Log: /mnt/jenkins/workspace/cloud-pxc-operator_PR-2124/e2e-tests/logs/users-8-0.log Warning: version difference between client (1.34) and server (1.31) exceeds the supported minor version skew of +/-1 Warning: version difference between client (1.34) and server (1.31) exceeds the supported minor version skew of +/-1 + create_infra users-28990 + local ns=users-28990 + '[' -n pxc-operator ']' + kubectl get pxc --all-namespaces -o wide + grep -v NAMESPACE + xargs -L 1 sh -xc 'kubectl patch pxc -n $0 $1 --type=merge -p "{\"metadata\":{\"finalizers\":[]}}"' + kubectl patch pxc -n users-26321 some-name --type=merge -p '{"metadata":{"finalizers":[]}}' perconaxtradbcluster.pxc.percona.com/some-name patched + kubectl_bin delete pxc --all --all-namespaces ++ mktemp + local LAST_OUT=/tmp/tmp.zZWt7kaRfx ++ mktemp + local LAST_ERR=/tmp/tmp.E1mhdQS4bY + local exit_status=0 ++ seq 0 2 + for i in '$(seq 0 2)' + set +e + kubectl delete pxc --all --all-namespaces + exit_status=0 + set -e + '[' 0 '!=' 0 ']' + break + cat /tmp/tmp.zZWt7kaRfx perconaxtradbcluster.pxc.percona.com "some-name" deleted from users-26321 namespace + cat /tmp/tmp.E1mhdQS4bY + rm /tmp/tmp.zZWt7kaRfx /tmp/tmp.E1mhdQS4bY + return 0 + kubectl_bin delete pxc-backup --all --all-namespaces ++ mktemp + local LAST_OUT=/tmp/tmp.nUfYz4Cpsd ++ mktemp + local LAST_ERR=/tmp/tmp.igekFkSefK + local exit_status=0 ++ seq 0 2 + for i in '$(seq 0 2)' + set +e + kubectl delete pxc-backup --all --all-namespaces + exit_status=0 + set -e + '[' 0 '!=' 0 ']' + break + cat /tmp/tmp.nUfYz4Cpsd No resources found + cat /tmp/tmp.igekFkSefK + rm /tmp/tmp.nUfYz4Cpsd /tmp/tmp.igekFkSefK + return 0 + kubectl_bin delete pxc-restore --all --all-namespaces ++ mktemp + local LAST_OUT=/tmp/tmp.FNbo2gnM7j ++ mktemp + local LAST_ERR=/tmp/tmp.8jKznk5iuf + local exit_status=0 ++ seq 0 2 + for i in '$(seq 0 2)' + set +e + kubectl delete pxc-restore --all --all-namespaces + exit_status=0 + set -e + '[' 0 '!=' 0 ']' + break + cat /tmp/tmp.FNbo2gnM7j No resources found + cat /tmp/tmp.8jKznk5iuf + rm /tmp/tmp.FNbo2gnM7j /tmp/tmp.8jKznk5iuf + return 0 + create_namespace pxc-operator + local namespace=pxc-operator + local skip_clean_namespace= + [[ 1 == 1 ]] + [[ -z '' ]] + destroy_chaos_mesh ++ helm list --all-namespaces --filter chaos-mesh ++ awk '-F ' '{print $2}' ++ tail -n1 ++ sed s/NAMESPACE// + local chaos_mesh_ns= + '[' -n '' ']' ++ grep chaos-mesh ++ awk '{print $1}' ++ kubectl get MutatingWebhookConfiguration + timeout 30 kubectl delete MutatingWebhookConfiguration error: resource(s) were provided, but no name was specified + : ++ kubectl get ValidatingWebhookConfiguration ++ awk '{print $1}' ++ grep chaos-mesh + timeout 30 kubectl delete ValidatingWebhookConfiguration error: resource(s) were provided, but no name was specified + : ++ awk '{print $1}' ++ kubectl get ValidatingWebhookConfiguration ++ grep validate-auth + timeout 30 kubectl delete ValidatingWebhookConfiguration error: resource(s) were provided, but no name was specified + : ++ kubectl api-resources ++ grep chaos-mesh ++ awk '{print $1}' ++ kubectl get crd ++ awk '{print $1}' ++ grep chaos-mesh.org + timeout 30 kubectl delete crd error: resource(s) were provided, but no name was specified + : ++ kubectl get clusterrolebinding ++ grep chaos-mesh ++ awk '{print $1}' + timeout 30 kubectl delete clusterrolebinding error: resource(s) were provided, but no name was specified + : ++ kubectl get clusterrole ++ grep chaos-mesh ++ awk '{print $1}' + timeout 30 kubectl delete clusterrole error: resource(s) were provided, but no name was specified + : + desc 'cleaned up all old namespaces' + set +o xtrace ----------------------------------------------------------------------------------- cleaned up all old namespaces ----------------------------------------------------------------------------------- + kubectl_bin get ns + grep -E -v '^kube-|^default|Terminating|pxc-operator|openshift|^gke-|^gmp-|^NAME' + '[' -n '' ']' + desc 'cleaned up old namespaces pxc-operator' + xargs kubectl delete ns + set +o xtrace ----------------------------------------------------------------------------------- cleaned up old namespaces pxc-operator ----------------------------------------------------------------------------------- + kubectl_bin delete namespace pxc-operator ++ mktemp ++ mktemp + awk '{print$1}' + local LAST_OUT=/tmp/tmp.pczsK0WTEq ++ mktemp + local LAST_OUT=/tmp/tmp.VQPmYBl9nw ++ mktemp + local LAST_ERR=/tmp/tmp.FF6JxyOaVf + local exit_status=0 ++ seq 0 2 + local LAST_ERR=/tmp/tmp.gymak2OGnH + local exit_status=0 ++ seq 0 2 + for i in '$(seq 0 2)' + set +e + kubectl get ns + for i in '$(seq 0 2)' + set +e + kubectl delete namespace pxc-operator + exit_status=0 + set -e + '[' 0 '!=' 0 ']' + break + cat /tmp/tmp.pczsK0WTEq + cat /tmp/tmp.FF6JxyOaVf + rm /tmp/tmp.pczsK0WTEq /tmp/tmp.FF6JxyOaVf + return 0 namespace "users-26321" deleted + exit_status=0 + set -e + '[' 0 '!=' 0 ']' + break + cat /tmp/tmp.VQPmYBl9nw namespace "pxc-operator" deleted + cat /tmp/tmp.gymak2OGnH + rm /tmp/tmp.VQPmYBl9nw /tmp/tmp.gymak2OGnH + return 0 + wait_for_delete namespace/pxc-operator + local res=namespace/pxc-operator + echo -n 'waiting for namespace/pxc-operator to be deleted' waiting for namespace/pxc-operator to be deleted+ set +o xtrace Error from server (NotFound): namespaces "pxc-operator" not found + desc 'create namespace pxc-operator' + set +o xtrace ----------------------------------------------------------------------------------- create namespace pxc-operator ----------------------------------------------------------------------------------- + kubectl_bin create namespace pxc-operator ++ mktemp + local LAST_OUT=/tmp/tmp.CF1pe0OKpk ++ mktemp + local LAST_ERR=/tmp/tmp.lqwiujpmnP + local exit_status=0 ++ seq 0 2 + for i in '$(seq 0 2)' + set +e + kubectl create namespace pxc-operator + exit_status=0 + set -e + '[' 0 '!=' 0 ']' + break + cat /tmp/tmp.CF1pe0OKpk namespace/pxc-operator created + cat /tmp/tmp.lqwiujpmnP + rm /tmp/tmp.CF1pe0OKpk /tmp/tmp.lqwiujpmnP + return 0 ++ kubectl_bin config current-context +++ mktemp ++ local LAST_OUT=/tmp/tmp.WsZGdKgUwT +++ mktemp ++ local LAST_ERR=/tmp/tmp.zLyDth4qRV ++ local exit_status=0 +++ seq 0 2 ++ for i in '$(seq 0 2)' ++ set +e ++ kubectl config current-context ++ exit_status=0 ++ set -e ++ '[' 0 '!=' 0 ']' ++ break ++ cat /tmp/tmp.WsZGdKgUwT ++ cat /tmp/tmp.zLyDth4qRV ++ rm /tmp/tmp.WsZGdKgUwT /tmp/tmp.zLyDth4qRV ++ return 0 + kubectl_bin config set-context gke_cloud-dev-112233_us-central1-a_jen-pxc-2124-f48cc30b-12-cluster1 --namespace=pxc-operator ++ mktemp + local LAST_OUT=/tmp/tmp.otH1nPJWSc ++ mktemp + local LAST_ERR=/tmp/tmp.qWVzJkf2mZ + local exit_status=0 ++ seq 0 2 + for i in '$(seq 0 2)' + set +e + kubectl config set-context gke_cloud-dev-112233_us-central1-a_jen-pxc-2124-f48cc30b-12-cluster1 --namespace=pxc-operator + exit_status=0 + set -e + '[' 0 '!=' 0 ']' + break + cat /tmp/tmp.otH1nPJWSc Context "gke_cloud-dev-112233_us-central1-a_jen-pxc-2124-f48cc30b-12-cluster1" modified. + cat /tmp/tmp.qWVzJkf2mZ + rm /tmp/tmp.otH1nPJWSc /tmp/tmp.qWVzJkf2mZ + return 0 + deploy_operator + desc 'start PXC operator' + set +o xtrace ----------------------------------------------------------------------------------- start PXC operator ----------------------------------------------------------------------------------- + kubectl_bin apply --server-side --force-conflicts -f /mnt/jenkins/workspace/cloud-pxc-operator_PR-2124/deploy/crd.yaml ++ mktemp + local LAST_OUT=/tmp/tmp.iIDV3837xY ++ mktemp + local LAST_ERR=/tmp/tmp.EHPV5Zh2L4 + local exit_status=0 ++ seq 0 2 + for i in '$(seq 0 2)' + set +e + kubectl apply --server-side --force-conflicts -f /mnt/jenkins/workspace/cloud-pxc-operator_PR-2124/deploy/crd.yaml + exit_status=0 + set -e + '[' 0 '!=' 0 ']' + break + cat /tmp/tmp.iIDV3837xY customresourcedefinition.apiextensions.k8s.io/perconaxtradbclusterbackups.pxc.percona.com serverside-applied customresourcedefinition.apiextensions.k8s.io/perconaxtradbclusterrestores.pxc.percona.com serverside-applied customresourcedefinition.apiextensions.k8s.io/perconaxtradbclusters.pxc.percona.com serverside-applied + cat /tmp/tmp.EHPV5Zh2L4 + rm /tmp/tmp.iIDV3837xY /tmp/tmp.EHPV5Zh2L4 + return 0 + '[' -n pxc-operator ']' + apply_rbac cw-rbac + local operator_namespace=pxc-operator + local rbac=cw-rbac + cat /mnt/jenkins/workspace/cloud-pxc-operator_PR-2124/deploy/cw-rbac.yaml + sed -e 's^namespace: .*^namespace: pxc-operator^' + kubectl_bin apply -f - ++ mktemp + local LAST_OUT=/tmp/tmp.zvX46LdA0L ++ mktemp + local LAST_ERR=/tmp/tmp.KgQGz3GrEv + local exit_status=0 ++ seq 0 2 + for i in '$(seq 0 2)' + set +e + kubectl apply -f - + exit_status=0 + set -e + '[' 0 '!=' 0 ']' + break + cat /tmp/tmp.zvX46LdA0L clusterrole.rbac.authorization.k8s.io/percona-xtradb-cluster-operator unchanged serviceaccount/percona-xtradb-cluster-operator created clusterrolebinding.rbac.authorization.k8s.io/service-account-percona-xtradb-cluster-operator unchanged + cat /tmp/tmp.KgQGz3GrEv + rm /tmp/tmp.zvX46LdA0L /tmp/tmp.KgQGz3GrEv + return 0 + sed -e 's^image: .*^image: perconalab/percona-xtradb-cluster-operator:PR-2124-f48cc30b^' + cat /mnt/jenkins/workspace/cloud-pxc-operator_PR-2124/deploy/cw-operator.yaml + sed -e 's^failureThreshold: .*^failureThreshold: 10^' + yq eval '(select(.kind == "Deployment").spec.template.spec.containers[] | select(.name == "percona-xtradb-cluster-operator").env[] | select(.name == "DISABLE_TELEMETRY").value) = "true"' - + yq eval '(select(.kind == "Deployment").spec.template.spec.containers[] | select(.name == "percona-xtradb-cluster-operator").env[] | select(.name == "LOG_LEVEL").value) = "VERBOSE"' - + kubectl_bin apply -f - ++ mktemp + local LAST_OUT=/tmp/tmp.0xNRz9Qu3O ++ mktemp + local LAST_ERR=/tmp/tmp.O1DTOXBdwC + local exit_status=0 ++ seq 0 2 + for i in '$(seq 0 2)' + set +e + kubectl apply -f - + exit_status=0 + set -e + '[' 0 '!=' 0 ']' + break + cat /tmp/tmp.0xNRz9Qu3O deployment.apps/percona-xtradb-cluster-operator created service/percona-xtradb-cluster-operator created + cat /tmp/tmp.O1DTOXBdwC + rm /tmp/tmp.0xNRz9Qu3O /tmp/tmp.O1DTOXBdwC + return 0 + sleep 10 + kubectl_bin wait --for=condition=Ready pods -l app.kubernetes.io/component=operator,app.kubernetes.io/instance=percona-xtradb-cluster-operator,app.kubernetes.io/name=percona-xtradb-cluster-operator --timeout=30s ++ mktemp + local LAST_OUT=/tmp/tmp.hR94HyV6xF ++ mktemp + local LAST_ERR=/tmp/tmp.efrcno3yje + local exit_status=0 ++ seq 0 2 + for i in '$(seq 0 2)' + set +e + kubectl wait --for=condition=Ready pods -l app.kubernetes.io/component=operator,app.kubernetes.io/instance=percona-xtradb-cluster-operator,app.kubernetes.io/name=percona-xtradb-cluster-operator --timeout=30s + exit_status=0 + set -e + '[' 0 '!=' 0 ']' + break + cat /tmp/tmp.hR94HyV6xF pod/percona-xtradb-cluster-operator-67bfc9d88f-wrxz2 condition met + cat /tmp/tmp.efrcno3yje + rm /tmp/tmp.hR94HyV6xF /tmp/tmp.efrcno3yje + return 0 ++ get_operator_pod ++ local label_prefix=app.kubernetes.io/ +++ kubectl get pods --selector=app.kubernetes.io/name=percona-xtradb-cluster-operator -n pxc-operator +++ grep -c percona-xtradb-cluster-operator ++ local check_label=1 ++ [[ 1 -eq 0 ]] ++ kubectl_bin get pods --selector=app.kubernetes.io/name=percona-xtradb-cluster-operator -o 'jsonpath={.items[].metadata.name}' -n pxc-operator +++ mktemp ++ local LAST_OUT=/tmp/tmp.KGmb1z2hVZ +++ mktemp ++ local LAST_ERR=/tmp/tmp.xHy9JjiV5J ++ local exit_status=0 +++ seq 0 2 ++ for i in '$(seq 0 2)' ++ set +e ++ kubectl get pods --selector=app.kubernetes.io/name=percona-xtradb-cluster-operator -o 'jsonpath={.items[].metadata.name}' -n pxc-operator ++ exit_status=0 ++ set -e ++ '[' 0 '!=' 0 ']' ++ break ++ cat /tmp/tmp.KGmb1z2hVZ ++ cat /tmp/tmp.xHy9JjiV5J ++ rm /tmp/tmp.KGmb1z2hVZ /tmp/tmp.xHy9JjiV5J ++ return 0 + wait_pod percona-xtradb-cluster-operator-67bfc9d88f-wrxz2 480 pxc-operator + local pod=percona-xtradb-cluster-operator-67bfc9d88f-wrxz2 + local max_retry=480 + local ns=pxc-operator ++ echo percona-xtradb-cluster-operator-67bfc9d88f-wrxz2 ++ /usr/bin/sed -E 's/.*-(pxc|proxysql)-[0-9]/\1/' ++ grep -E '^(pxc|proxysql)$' + local container= + set +o xtrace pod/percona-xtradb-cluster-operator-67bfc9d88f-wrxz2 condition met waiting for pod/percona-xtradb-cluster-operator-67bfc9d88f-wrxz2 to become Ready.Ok + sleep 3 + create_namespace users-28990 + local namespace=users-28990 + local skip_clean_namespace= + [[ 1 == 1 ]] + [[ -z '' ]] + destroy_chaos_mesh ++ helm list --all-namespaces --filter chaos-mesh ++ awk '-F ' '{print $2}' ++ sed s/NAMESPACE// ++ tail -n1 + local chaos_mesh_ns= + '[' -n '' ']' ++ kubectl get MutatingWebhookConfiguration ++ grep chaos-mesh ++ awk '{print $1}' + timeout 30 kubectl delete MutatingWebhookConfiguration error: resource(s) were provided, but no name was specified + : ++ kubectl get ValidatingWebhookConfiguration ++ grep chaos-mesh ++ awk '{print $1}' + timeout 30 kubectl delete ValidatingWebhookConfiguration error: resource(s) were provided, but no name was specified + : ++ kubectl get ValidatingWebhookConfiguration ++ grep validate-auth ++ awk '{print $1}' + timeout 30 kubectl delete ValidatingWebhookConfiguration error: resource(s) were provided, but no name was specified + : ++ kubectl api-resources ++ grep chaos-mesh ++ awk '{print $1}' ++ kubectl get crd ++ grep chaos-mesh.org ++ awk '{print $1}' + timeout 30 kubectl delete crd error: resource(s) were provided, but no name was specified + : ++ kubectl get clusterrolebinding ++ grep chaos-mesh ++ awk '{print $1}' + timeout 30 kubectl delete clusterrolebinding error: resource(s) were provided, but no name was specified + : ++ kubectl get clusterrole ++ awk '{print $1}' ++ grep chaos-mesh + timeout 30 kubectl delete clusterrole error: resource(s) were provided, but no name was specified + : + desc 'cleaned up all old namespaces' + set +o xtrace ----------------------------------------------------------------------------------- cleaned up all old namespaces ----------------------------------------------------------------------------------- + grep -E -v '^kube-|^default|Terminating|pxc-operator|openshift|^gke-|^gmp-|^NAME' + awk '{print$1}' + '[' -n '' ']' + desc 'cleaned up old namespaces users-28990' + xargs kubectl delete ns + set +o xtrace ----------------------------------------------------------------------------------- cleaned up old namespaces users-28990 ----------------------------------------------------------------------------------- + kubectl_bin delete namespace users-28990 + kubectl_bin get ns ++ mktemp ++ mktemp + local LAST_OUT=/tmp/tmp.YgpSL0W19w + local LAST_OUT=/tmp/tmp.kMvyGtqBv3 ++ mktemp ++ mktemp + local LAST_ERR=/tmp/tmp.6TT11OYrVx + local exit_status=0 + local LAST_ERR=/tmp/tmp.Z23mCnPDq3 + local exit_status=0 ++ seq 0 2 ++ seq 0 2 + for i in '$(seq 0 2)' + set +e + kubectl get ns + for i in '$(seq 0 2)' + set +e + kubectl delete namespace users-28990 + exit_status=1 + set -e + '[' 1 '!=' 0 ']' + '[' 1 == 1 ']' + sleep 0 + for i in '$(seq 0 2)' + set +e + kubectl delete namespace users-28990 + exit_status=0 + set -e + '[' 0 '!=' 0 ']' + break + cat /tmp/tmp.YgpSL0W19w + cat /tmp/tmp.6TT11OYrVx + rm /tmp/tmp.YgpSL0W19w /tmp/tmp.6TT11OYrVx + return 0 error: resource(s) were provided, but no name was specified + exit_status=1 + set -e + '[' 1 '!=' 0 ']' + '[' 1 == 1 ']' + sleep 0 + for i in '$(seq 0 2)' + set +e + kubectl delete namespace users-28990 + exit_status=1 + set -e + '[' 1 '!=' 0 ']' + '[' 1 == 1 ']' + sleep 0 + cat /tmp/tmp.kMvyGtqBv3 + cat /tmp/tmp.Z23mCnPDq3 Error from server (NotFound): namespaces "users-28990" not found + rm /tmp/tmp.kMvyGtqBv3 /tmp/tmp.Z23mCnPDq3 + return 1 + : + wait_for_delete namespace/users-28990 + local res=namespace/users-28990 + echo -n 'waiting for namespace/users-28990 to be deleted' waiting for namespace/users-28990 to be deleted+ set +o xtrace Error from server (NotFound): namespaces "users-28990" not found + desc 'create namespace users-28990' + set +o xtrace ----------------------------------------------------------------------------------- create namespace users-28990 ----------------------------------------------------------------------------------- + kubectl_bin create namespace users-28990 ++ mktemp + local LAST_OUT=/tmp/tmp.6OExvywjtI ++ mktemp + local LAST_ERR=/tmp/tmp.hPr1CQfLnD + local exit_status=0 ++ seq 0 2 + for i in '$(seq 0 2)' + set +e + kubectl create namespace users-28990 + exit_status=0 + set -e + '[' 0 '!=' 0 ']' + break + cat /tmp/tmp.6OExvywjtI namespace/users-28990 created + cat /tmp/tmp.hPr1CQfLnD + rm /tmp/tmp.6OExvywjtI /tmp/tmp.hPr1CQfLnD + return 0 ++ kubectl_bin config current-context +++ mktemp ++ local LAST_OUT=/tmp/tmp.qMBTtbyIAk +++ mktemp ++ local LAST_ERR=/tmp/tmp.ibyqvJeoyu ++ local exit_status=0 +++ seq 0 2 ++ for i in '$(seq 0 2)' ++ set +e ++ kubectl config current-context ++ exit_status=0 ++ set -e ++ '[' 0 '!=' 0 ']' ++ break ++ cat /tmp/tmp.qMBTtbyIAk ++ cat /tmp/tmp.ibyqvJeoyu ++ rm /tmp/tmp.qMBTtbyIAk /tmp/tmp.ibyqvJeoyu ++ return 0 + kubectl_bin config set-context gke_cloud-dev-112233_us-central1-a_jen-pxc-2124-f48cc30b-12-cluster1 --namespace=users-28990 ++ mktemp + local LAST_OUT=/tmp/tmp.WjJpl9cOe9 ++ mktemp + local LAST_ERR=/tmp/tmp.eQDtdQxLlU + local exit_status=0 ++ seq 0 2 + for i in '$(seq 0 2)' + set +e + kubectl config set-context gke_cloud-dev-112233_us-central1-a_jen-pxc-2124-f48cc30b-12-cluster1 --namespace=users-28990 + exit_status=0 + set -e + '[' 0 '!=' 0 ']' + break + cat /tmp/tmp.WjJpl9cOe9 Context "gke_cloud-dev-112233_us-central1-a_jen-pxc-2124-f48cc30b-12-cluster1" modified. + cat /tmp/tmp.eQDtdQxLlU + rm /tmp/tmp.WjJpl9cOe9 /tmp/tmp.eQDtdQxLlU + return 0 + apply_secrets + desc 'create secrets for cloud storages' + set +o xtrace ----------------------------------------------------------------------------------- create secrets for cloud storages ----------------------------------------------------------------------------------- + '[' -z '' ']' + kubectl_bin apply -f /mnt/jenkins/workspace/cloud-pxc-operator_PR-2124/e2e-tests/conf/minio-secret.yml -f /mnt/jenkins/workspace/cloud-pxc-operator_PR-2124/e2e-tests/conf/cloud-secret.yml ++ mktemp + local LAST_OUT=/tmp/tmp.GE8hl3tJ43 ++ mktemp + local LAST_ERR=/tmp/tmp.JSOW06ottx + local exit_status=0 ++ seq 0 2 + for i in '$(seq 0 2)' + set +e + kubectl apply -f /mnt/jenkins/workspace/cloud-pxc-operator_PR-2124/e2e-tests/conf/minio-secret.yml -f /mnt/jenkins/workspace/cloud-pxc-operator_PR-2124/e2e-tests/conf/cloud-secret.yml + exit_status=0 + set -e + '[' 0 '!=' 0 ']' + break + cat /tmp/tmp.GE8hl3tJ43 secret/minio-secret created secret/aws-s3-secret created secret/gcp-cs-secret created secret/azure-secret created + cat /tmp/tmp.JSOW06ottx + rm /tmp/tmp.GE8hl3tJ43 /tmp/tmp.JSOW06ottx + return 0 + desc 'create PXC cluster with 1-password secret' + set +o xtrace ----------------------------------------------------------------------------------- create PXC cluster with 1-password secret ----------------------------------------------------------------------------------- + newpass=test-password ++ echo -n test-password ++ base64 + newpassencrypted=dGVzdC1wYXNzd29yZA== + cluster=some-name + spinup_pxc some-name /mnt/jenkins/workspace/cloud-pxc-operator_PR-2124/e2e-tests/conf/some-name.yml '' '' /mnt/jenkins/workspace/cloud-pxc-operator_PR-2124/e2e-tests/conf/secrets_one_pass.yml + local cluster=some-name + local config=/mnt/jenkins/workspace/cloud-pxc-operator_PR-2124/e2e-tests/conf/some-name.yml + local size=3 + local sleep=10 + local secretsFile=/mnt/jenkins/workspace/cloud-pxc-operator_PR-2124/e2e-tests/conf/secrets_one_pass.yml + local pxcClientFile=/mnt/jenkins/workspace/cloud-pxc-operator_PR-2124/e2e-tests/conf/client.yml + local port=3306 + desc 'create first PXC cluster' + set +o xtrace ----------------------------------------------------------------------------------- create first PXC cluster ----------------------------------------------------------------------------------- + kubectl_bin apply -f /mnt/jenkins/workspace/cloud-pxc-operator_PR-2124/e2e-tests/conf/secrets_one_pass.yml ++ mktemp + local LAST_OUT=/tmp/tmp.3303NhoonG ++ mktemp + local LAST_ERR=/tmp/tmp.Ca4jWFZ22n + local exit_status=0 ++ seq 0 2 + for i in '$(seq 0 2)' + set +e + kubectl apply -f /mnt/jenkins/workspace/cloud-pxc-operator_PR-2124/e2e-tests/conf/secrets_one_pass.yml + exit_status=0 + set -e + '[' 0 '!=' 0 ']' + break + cat /tmp/tmp.3303NhoonG secret/my-cluster-secrets created secret/some-name-ssl created secret/some-name-ssl-internal created + cat /tmp/tmp.Ca4jWFZ22n + rm /tmp/tmp.3303NhoonG /tmp/tmp.Ca4jWFZ22n + return 0 + apply_config /mnt/jenkins/workspace/cloud-pxc-operator_PR-2124/e2e-tests/conf/client.yml + '[' -z '' ']' + cat_config /mnt/jenkins/workspace/cloud-pxc-operator_PR-2124/e2e-tests/conf/client.yml + kubectl_bin apply -f - + cat /mnt/jenkins/workspace/cloud-pxc-operator_PR-2124/e2e-tests/conf/client.yml + /usr/bin/sed -e 's#apiVersion: pxc.percona.com/v.*$#apiVersion: pxc.percona.com/v1#' + /usr/bin/sed -e 's#image:.*-pxc\([0-9]*.[0-9]*\)\{0,1\}$#image: perconalab/percona-xtradb-cluster-operator:main-pxc8.0#' + /usr/bin/sed -e 's#image:.*\/percona-xtradb-cluster:.*$#image: perconalab/percona-xtradb-cluster-operator:main-pxc8.0#' ++ mktemp + /usr/bin/sed -e 's#image:.*-pmm$#image: perconalab/pmm-client:dev-latest#' + /usr/bin/sed -e 's#image:.*-logcollector$#image: perconalab/percona-xtradb-cluster-operator:main-logcollector#' + local LAST_OUT=/tmp/tmp.xT01YeyIke + /usr/bin/sed -e 's#image:.*-backup$#image: perconalab/percona-xtradb-cluster-operator:main-pxc8.0-backup#' + /usr/bin/sed -e 's#image:.*-proxysql$#image: perconalab/percona-xtradb-cluster-operator:main-proxysql#' + /usr/bin/sed -e 's#image:.*-init$#image: perconalab/percona-xtradb-cluster-operator:PR-2124-f48cc30b#' + /usr/bin/sed -e s~minio-service.#namespace~minio-service.users-28990~ ++ mktemp + /usr/bin/sed -e 's#image:.*-haproxy$#image: perconalab/percona-xtradb-cluster-operator:main-haproxy#' + /usr/bin/sed -e 's#apply:.*#apply: Never#' + local LAST_ERR=/tmp/tmp.orHCvMPZpm + local exit_status=0 ++ seq 0 2 + for i in '$(seq 0 2)' + set +e + kubectl apply -f - + exit_status=0 + set -e + '[' 0 '!=' 0 ']' + break + cat /tmp/tmp.xT01YeyIke deployment.apps/pxc-client created + cat /tmp/tmp.orHCvMPZpm + rm /tmp/tmp.xT01YeyIke /tmp/tmp.orHCvMPZpm + return 0 + [[ perconalab/percona-xtradb-cluster-operator:main-pxc8.0 =~ 5\.7 ]] + apply_config /mnt/jenkins/workspace/cloud-pxc-operator_PR-2124/e2e-tests/conf/some-name.yml + '[' -z '' ']' + cat_config /mnt/jenkins/workspace/cloud-pxc-operator_PR-2124/e2e-tests/conf/some-name.yml + kubectl_bin apply -f - + cat /mnt/jenkins/workspace/cloud-pxc-operator_PR-2124/e2e-tests/conf/some-name.yml + /usr/bin/sed -e 's#apiVersion: pxc.percona.com/v.*$#apiVersion: pxc.percona.com/v1#' ++ mktemp + /usr/bin/sed -e 's#image:.*\/percona-xtradb-cluster:.*$#image: perconalab/percona-xtradb-cluster-operator:main-pxc8.0#' + local LAST_OUT=/tmp/tmp.3pZmWwbEZ0 + /usr/bin/sed -e 's#image:.*-init$#image: perconalab/percona-xtradb-cluster-operator:PR-2124-f48cc30b#' + /usr/bin/sed -e 's#image:.*-backup$#image: perconalab/percona-xtradb-cluster-operator:main-pxc8.0-backup#' + /usr/bin/sed -e 's#image:.*-proxysql$#image: perconalab/percona-xtradb-cluster-operator:main-proxysql#' + /usr/bin/sed -e 's#image:.*-pxc\([0-9]*.[0-9]*\)\{0,1\}$#image: perconalab/percona-xtradb-cluster-operator:main-pxc8.0#' + /usr/bin/sed -e 's#image:.*-pmm$#image: perconalab/pmm-client:dev-latest#' + /usr/bin/sed -e 's#image:.*-haproxy$#image: perconalab/percona-xtradb-cluster-operator:main-haproxy#' ++ mktemp + /usr/bin/sed -e s~minio-service.#namespace~minio-service.users-28990~ + /usr/bin/sed -e 's#image:.*-logcollector$#image: perconalab/percona-xtradb-cluster-operator:main-logcollector#' + /usr/bin/sed -e 's#apply:.*#apply: Never#' + local LAST_ERR=/tmp/tmp.gxFeGw42dH + local exit_status=0 ++ seq 0 2 + for i in '$(seq 0 2)' + set +e + kubectl apply -f - + exit_status=0 + set -e + '[' 0 '!=' 0 ']' + break + cat /tmp/tmp.3pZmWwbEZ0 perconaxtradbcluster.pxc.percona.com/some-name created + cat /tmp/tmp.gxFeGw42dH + rm /tmp/tmp.3pZmWwbEZ0 /tmp/tmp.gxFeGw42dH + return 0 + desc 'check if all 3 Pods started' + set +o xtrace ----------------------------------------------------------------------------------- check if all 3 Pods started ----------------------------------------------------------------------------------- ++ get_proxy some-name ++ local target_cluster=some-name +++ kubectl_bin get pxc some-name -o 'jsonpath={.spec.haproxy.enabled}' ++++ mktemp +++ local LAST_OUT=/tmp/tmp.buxRPVmjVJ ++++ mktemp +++ local LAST_ERR=/tmp/tmp.LQM4Ne20fC +++ local exit_status=0 ++++ seq 0 2 +++ for i in '$(seq 0 2)' +++ set +e +++ kubectl get pxc some-name -o 'jsonpath={.spec.haproxy.enabled}' +++ exit_status=0 +++ set -e +++ '[' 0 '!=' 0 ']' +++ break +++ cat /tmp/tmp.buxRPVmjVJ +++ cat /tmp/tmp.LQM4Ne20fC +++ rm /tmp/tmp.buxRPVmjVJ /tmp/tmp.LQM4Ne20fC +++ return 0 ++ [[ '' == \t\r\u\e ]] +++ kubectl_bin get pxc some-name -o 'jsonpath={.spec.proxysql.enabled}' ++++ mktemp +++ local LAST_OUT=/tmp/tmp.UaVZ85nrNd ++++ mktemp +++ local LAST_ERR=/tmp/tmp.6slDEv62NM +++ local exit_status=0 ++++ seq 0 2 +++ for i in '$(seq 0 2)' +++ set +e +++ kubectl get pxc some-name -o 'jsonpath={.spec.proxysql.enabled}' +++ exit_status=0 +++ set -e +++ '[' 0 '!=' 0 ']' +++ break +++ cat /tmp/tmp.UaVZ85nrNd +++ cat /tmp/tmp.6slDEv62NM +++ rm /tmp/tmp.UaVZ85nrNd /tmp/tmp.6slDEv62NM +++ return 0 ++ [[ true == \t\r\u\e ]] ++ echo some-name-proxysql ++ return + local proxy=some-name-proxysql + kubectl_bin wait --for=condition=Ready pod -l app.kubernetes.io/instance=monitoring,app.kubernetes.io/managed-by=percona-xtradb-cluster-operator --timeout=300s -n users-28990 ++ mktemp + local LAST_OUT=/tmp/tmp.5h8uF4N3cv ++ mktemp + local LAST_ERR=/tmp/tmp.pyijtFgham + local exit_status=0 ++ seq 0 2 + for i in '$(seq 0 2)' + set +e + kubectl wait --for=condition=Ready pod -l app.kubernetes.io/instance=monitoring,app.kubernetes.io/managed-by=percona-xtradb-cluster-operator --timeout=300s -n users-28990 + exit_status=1 + set -e + '[' 1 '!=' 0 ']' + '[' 1 == 1 ']' + sleep 0 + for i in '$(seq 0 2)' + set +e + kubectl wait --for=condition=Ready pod -l app.kubernetes.io/instance=monitoring,app.kubernetes.io/managed-by=percona-xtradb-cluster-operator --timeout=300s -n users-28990 + exit_status=1 + set -e + '[' 1 '!=' 0 ']' + '[' 1 == 1 ']' + sleep 0 + for i in '$(seq 0 2)' + set +e + kubectl wait --for=condition=Ready pod -l app.kubernetes.io/instance=monitoring,app.kubernetes.io/managed-by=percona-xtradb-cluster-operator --timeout=300s -n users-28990 + exit_status=1 + set -e + '[' 1 '!=' 0 ']' + '[' 1 == 1 ']' + sleep 0 + cat /tmp/tmp.5h8uF4N3cv + cat /tmp/tmp.pyijtFgham error: no matching resources found + rm /tmp/tmp.5h8uF4N3cv /tmp/tmp.pyijtFgham + return 1 + true + wait_for_running some-name-proxysql 1 + local name=some-name-proxysql + let last_pod=0 + : + local max_retry=480 + desc 'wait for running cluster' + set +o xtrace ----------------------------------------------------------------------------------- wait for running cluster ----------------------------------------------------------------------------------- ++ seq 0 0 + for i in '$(seq 0 $last_pod)' + wait_pod some-name-proxysql-0 480 + local pod=some-name-proxysql-0 + local max_retry=480 + local ns= ++ echo some-name-proxysql-0 ++ /usr/bin/sed -E 's/.*-(pxc|proxysql)-[0-9]/\1/' ++ grep -E '^(pxc|proxysql)$' + local container=proxysql + set +o xtrace pod/some-name-proxysql-0 condition met waiting for pod/some-name-proxysql-0 to become Ready.Ok + wait_for_running some-name-pxc 3 + local name=some-name-pxc + let last_pod=2 + local max_retry=480 + desc 'wait for running cluster' + set +o xtrace ----------------------------------------------------------------------------------- wait for running cluster ----------------------------------------------------------------------------------- ++ seq 0 2 + for i in '$(seq 0 $last_pod)' + wait_pod some-name-pxc-0 480 + local pod=some-name-pxc-0 + local max_retry=480 + local ns= ++ echo some-name-pxc-0 ++ /usr/bin/sed -E 's/.*-(pxc|proxysql)-[0-9]/\1/' ++ grep -E '^(pxc|proxysql)$' + local container=pxc + set +o xtrace pod/some-name-pxc-0 condition met waiting for pod/some-name-pxc-0 to become Ready.Ok + for i in '$(seq 0 $last_pod)' + wait_pod some-name-pxc-1 480 + local pod=some-name-pxc-1 + local max_retry=480 + local ns= ++ echo some-name-pxc-1 ++ /usr/bin/sed -E 's/.*-(pxc|proxysql)-[0-9]/\1/' ++ grep -E '^(pxc|proxysql)$' + local container=pxc + set +o xtrace pod/some-name-pxc-1 condition met waiting for pod/some-name-pxc-1 to become Ready.Ok + for i in '$(seq 0 $last_pod)' + wait_pod some-name-pxc-2 480 + local pod=some-name-pxc-2 + local max_retry=480 + local ns= ++ echo some-name-pxc-2 ++ /usr/bin/sed -E 's/.*-(pxc|proxysql)-[0-9]/\1/' ++ grep -E '^(pxc|proxysql)$' + local container=pxc + set +o xtrace pod/some-name-pxc-2 condition met waiting for pod/some-name-pxc-2 to become Ready.Ok + sleep 10 ++ kubectl get pxc some-name -o 'jsonpath={.spec.secretsName}' + local secret_name=my-cluster-secrets ++ getSecretData my-cluster-secrets root ++ local secretName=my-cluster-secrets ++ local dataKey=root ++ kubectl_bin get secrets/my-cluster-secrets '--template={{.data.root}}' ++ base64 --decode +++ mktemp ++ local LAST_OUT=/tmp/tmp.y0A1OGUZzo +++ mktemp ++ local LAST_ERR=/tmp/tmp.7Ngcx8J1RT ++ local exit_status=0 +++ seq 0 2 ++ for i in '$(seq 0 2)' ++ set +e ++ kubectl get secrets/my-cluster-secrets '--template={{.data.root}}' ++ exit_status=0 ++ set -e ++ '[' 0 '!=' 0 ']' ++ break ++ cat /tmp/tmp.y0A1OGUZzo ++ cat /tmp/tmp.7Ngcx8J1RT ++ rm /tmp/tmp.y0A1OGUZzo /tmp/tmp.7Ngcx8J1RT ++ return 0 + local 'root_pass=0jU$!m[>1Km!q@t0nf' + desc 'write data' + set +o xtrace ----------------------------------------------------------------------------------- write data ----------------------------------------------------------------------------------- + [[ perconalab/percona-xtradb-cluster-operator:main-pxc8.0 =~ 5\.7 ]] + run_mysql 'CREATE DATABASE IF NOT EXISTS myApp; use myApp; CREATE TABLE IF NOT EXISTS myApp (id int PRIMARY KEY) ;' '-h some-name-proxysql -uroot -p'\''0jU$!m[>1Km!q@t0nf'\'' -P3306' + local 'command=CREATE DATABASE IF NOT EXISTS myApp; use myApp; CREATE TABLE IF NOT EXISTS myApp (id int PRIMARY KEY) ;' + local 'uri=-h some-name-proxysql -uroot -p'\''0jU$!m[>1Km!q@t0nf'\'' -P3306' ++ get_client_pod ++ kubectl_bin get pods --selector=name=pxc-client -o 'jsonpath={.items[].metadata.name}' +++ mktemp ++ local LAST_OUT=/tmp/tmp.ZMXRPmlyrV +++ mktemp ++ local LAST_ERR=/tmp/tmp.wlv1WbXoJH ++ local exit_status=0 +++ seq 0 2 ++ for i in '$(seq 0 2)' ++ set +e ++ kubectl get pods --selector=name=pxc-client -o 'jsonpath={.items[].metadata.name}' ++ exit_status=0 ++ set -e ++ '[' 0 '!=' 0 ']' ++ break ++ cat /tmp/tmp.ZMXRPmlyrV ++ cat /tmp/tmp.wlv1WbXoJH ++ rm /tmp/tmp.ZMXRPmlyrV /tmp/tmp.wlv1WbXoJH ++ return 0 + client_pod=pxc-client-59944c5bbf-wqbt7 + wait_pod pxc-client-59944c5bbf-wqbt7 + local pod=pxc-client-59944c5bbf-wqbt7 + local max_retry=480 + local ns= ++ echo pxc-client-59944c5bbf-wqbt7 ++ /usr/bin/sed -E 's/.*-(pxc|proxysql)-[0-9]/\1/' ++ grep -E '^(pxc|proxysql)$' + local container= + set +o xtrace pod/pxc-client-59944c5bbf-wqbt7 condition met waiting for pod/pxc-client-59944c5bbf-wqbt7 to become ReadyDefaulted container "pxc-client" out of: pxc-client, backup .Ok + set +o xtrace + run_mysql 'INSERT myApp.myApp (id) VALUES (100500)' '-h some-name-proxysql -uroot -p'\''0jU$!m[>1Km!q@t0nf'\'' -P3306' + local 'command=INSERT myApp.myApp (id) VALUES (100500)' + local 'uri=-h some-name-proxysql -uroot -p'\''0jU$!m[>1Km!q@t0nf'\'' -P3306' ++ get_client_pod ++ kubectl_bin get pods --selector=name=pxc-client -o 'jsonpath={.items[].metadata.name}' +++ mktemp ++ local LAST_OUT=/tmp/tmp.2K7lYmGFlf +++ mktemp ++ local LAST_ERR=/tmp/tmp.tQ4U1Dxdhu ++ local exit_status=0 +++ seq 0 2 ++ for i in '$(seq 0 2)' ++ set +e ++ kubectl get pods --selector=name=pxc-client -o 'jsonpath={.items[].metadata.name}' ++ exit_status=0 ++ set -e ++ '[' 0 '!=' 0 ']' ++ break ++ cat /tmp/tmp.2K7lYmGFlf ++ cat /tmp/tmp.tQ4U1Dxdhu ++ rm /tmp/tmp.2K7lYmGFlf /tmp/tmp.tQ4U1Dxdhu ++ return 0 + client_pod=pxc-client-59944c5bbf-wqbt7 + wait_pod pxc-client-59944c5bbf-wqbt7 + local pod=pxc-client-59944c5bbf-wqbt7 + local max_retry=480 + local ns= ++ echo pxc-client-59944c5bbf-wqbt7 ++ /usr/bin/sed -E 's/.*-(pxc|proxysql)-[0-9]/\1/' ++ grep -E '^(pxc|proxysql)$' + local container= + set +o xtrace pod/pxc-client-59944c5bbf-wqbt7 condition met waiting for pod/pxc-client-59944c5bbf-wqbt7 to become ReadyDefaulted container "pxc-client" out of: pxc-client, backup .Ok + set +o xtrace + sleep 30 ++ seq 0 2 + for i in '$(seq 0 $((size - 1)))' + compare_mysql_cmd select-1 'SELECT * from myApp.myApp;' '-h some-name-pxc-0.some-name-pxc -uroot -p'\''0jU$!m[>1Km!q@t0nf'\'' -P3306' + local command_id=select-1 + local 'command=SELECT * from myApp.myApp;' + local 'uri=-h some-name-pxc-0.some-name-pxc -uroot -p'\''0jU$!m[>1Km!q@t0nf'\'' -P3306' + local postfix= + local expected_result=/mnt/jenkins/workspace/cloud-pxc-operator_PR-2124/e2e-tests/users/compare/select-1.sql + [[ perconalab/percona-xtradb-cluster-operator:main-pxc8.0 =~ 8\.4 ]] + [[ perconalab/percona-xtradb-cluster-operator:main-pxc8.0 =~ 8\.0 ]] + [[ -f /mnt/jenkins/workspace/cloud-pxc-operator_PR-2124/e2e-tests/users/compare/select-1-80.sql ]] + [[ perconalab/percona-xtradb-cluster-operator:main-pxc8.0 =~ 5\.7 ]] + run_mysql 'SELECT * from myApp.myApp;' '-h some-name-pxc-0.some-name-pxc -uroot -p'\''0jU$!m[>1Km!q@t0nf'\'' -P3306' + local 'command=SELECT * from myApp.myApp;' + local 'uri=-h some-name-pxc-0.some-name-pxc -uroot -p'\''0jU$!m[>1Km!q@t0nf'\'' -P3306' ++ get_client_pod ++ kubectl_bin get pods --selector=name=pxc-client -o 'jsonpath={.items[].metadata.name}' +++ mktemp ++ local LAST_OUT=/tmp/tmp.PA5QQn3KzL +++ mktemp ++ local LAST_ERR=/tmp/tmp.LTRysxknlC ++ local exit_status=0 +++ seq 0 2 ++ for i in '$(seq 0 2)' ++ set +e ++ kubectl get pods --selector=name=pxc-client -o 'jsonpath={.items[].metadata.name}' ++ exit_status=0 ++ set -e ++ '[' 0 '!=' 0 ']' ++ break ++ cat /tmp/tmp.PA5QQn3KzL ++ cat /tmp/tmp.LTRysxknlC ++ rm /tmp/tmp.PA5QQn3KzL /tmp/tmp.LTRysxknlC ++ return 0 + client_pod=pxc-client-59944c5bbf-wqbt7 + wait_pod pxc-client-59944c5bbf-wqbt7 + local pod=pxc-client-59944c5bbf-wqbt7 + local max_retry=480 + local ns= ++ echo pxc-client-59944c5bbf-wqbt7 ++ /usr/bin/sed -E 's/.*-(pxc|proxysql)-[0-9]/\1/' ++ grep -E '^(pxc|proxysql)$' + local container= + set +o xtrace pod/pxc-client-59944c5bbf-wqbt7 condition met waiting for pod/pxc-client-59944c5bbf-wqbt7 to become ReadyDefaulted container "pxc-client" out of: pxc-client, backup .Ok + set +o xtrace + '[' '!' -s /tmp/tmp.30P5AqZU1r/select-1.sql ']' + [[ 0 -eq 0 ]] + diff -u /mnt/jenkins/workspace/cloud-pxc-operator_PR-2124/e2e-tests/users/compare/select-1.sql /tmp/tmp.30P5AqZU1r/select-1.sql + for i in '$(seq 0 $((size - 1)))' + compare_mysql_cmd select-1 'SELECT * from myApp.myApp;' '-h some-name-pxc-1.some-name-pxc -uroot -p'\''0jU$!m[>1Km!q@t0nf'\'' -P3306' + local command_id=select-1 + local 'command=SELECT * from myApp.myApp;' + local 'uri=-h some-name-pxc-1.some-name-pxc -uroot -p'\''0jU$!m[>1Km!q@t0nf'\'' -P3306' + local postfix= + local expected_result=/mnt/jenkins/workspace/cloud-pxc-operator_PR-2124/e2e-tests/users/compare/select-1.sql + [[ perconalab/percona-xtradb-cluster-operator:main-pxc8.0 =~ 8\.4 ]] + [[ perconalab/percona-xtradb-cluster-operator:main-pxc8.0 =~ 8\.0 ]] + [[ -f /mnt/jenkins/workspace/cloud-pxc-operator_PR-2124/e2e-tests/users/compare/select-1-80.sql ]] + [[ perconalab/percona-xtradb-cluster-operator:main-pxc8.0 =~ 5\.7 ]] + run_mysql 'SELECT * from myApp.myApp;' '-h some-name-pxc-1.some-name-pxc -uroot -p'\''0jU$!m[>1Km!q@t0nf'\'' -P3306' + local 'command=SELECT * from myApp.myApp;' + local 'uri=-h some-name-pxc-1.some-name-pxc -uroot -p'\''0jU$!m[>1Km!q@t0nf'\'' -P3306' ++ get_client_pod ++ kubectl_bin get pods --selector=name=pxc-client -o 'jsonpath={.items[].metadata.name}' +++ mktemp ++ local LAST_OUT=/tmp/tmp.M38dvVl4lX +++ mktemp ++ local LAST_ERR=/tmp/tmp.wiefbcP7cg ++ local exit_status=0 +++ seq 0 2 ++ for i in '$(seq 0 2)' ++ set +e ++ kubectl get pods --selector=name=pxc-client -o 'jsonpath={.items[].metadata.name}' ++ exit_status=0 ++ set -e ++ '[' 0 '!=' 0 ']' ++ break ++ cat /tmp/tmp.M38dvVl4lX ++ cat /tmp/tmp.wiefbcP7cg ++ rm /tmp/tmp.M38dvVl4lX /tmp/tmp.wiefbcP7cg ++ return 0 + client_pod=pxc-client-59944c5bbf-wqbt7 + wait_pod pxc-client-59944c5bbf-wqbt7 + local pod=pxc-client-59944c5bbf-wqbt7 + local max_retry=480 + local ns= ++ echo pxc-client-59944c5bbf-wqbt7 ++ /usr/bin/sed -E 's/.*-(pxc|proxysql)-[0-9]/\1/' ++ grep -E '^(pxc|proxysql)$' + local container= + set +o xtrace pod/pxc-client-59944c5bbf-wqbt7 condition met waiting for pod/pxc-client-59944c5bbf-wqbt7 to become ReadyDefaulted container "pxc-client" out of: pxc-client, backup .Ok + set +o xtrace + '[' '!' -s /tmp/tmp.30P5AqZU1r/select-1.sql ']' + [[ 0 -eq 0 ]] + diff -u /mnt/jenkins/workspace/cloud-pxc-operator_PR-2124/e2e-tests/users/compare/select-1.sql /tmp/tmp.30P5AqZU1r/select-1.sql + for i in '$(seq 0 $((size - 1)))' + compare_mysql_cmd select-1 'SELECT * from myApp.myApp;' '-h some-name-pxc-2.some-name-pxc -uroot -p'\''0jU$!m[>1Km!q@t0nf'\'' -P3306' + local command_id=select-1 + local 'command=SELECT * from myApp.myApp;' + local 'uri=-h some-name-pxc-2.some-name-pxc -uroot -p'\''0jU$!m[>1Km!q@t0nf'\'' -P3306' + local postfix= + local expected_result=/mnt/jenkins/workspace/cloud-pxc-operator_PR-2124/e2e-tests/users/compare/select-1.sql + [[ perconalab/percona-xtradb-cluster-operator:main-pxc8.0 =~ 8\.4 ]] + [[ perconalab/percona-xtradb-cluster-operator:main-pxc8.0 =~ 8\.0 ]] + [[ -f /mnt/jenkins/workspace/cloud-pxc-operator_PR-2124/e2e-tests/users/compare/select-1-80.sql ]] + [[ perconalab/percona-xtradb-cluster-operator:main-pxc8.0 =~ 5\.7 ]] + run_mysql 'SELECT * from myApp.myApp;' '-h some-name-pxc-2.some-name-pxc -uroot -p'\''0jU$!m[>1Km!q@t0nf'\'' -P3306' + local 'command=SELECT * from myApp.myApp;' + local 'uri=-h some-name-pxc-2.some-name-pxc -uroot -p'\''0jU$!m[>1Km!q@t0nf'\'' -P3306' ++ get_client_pod ++ kubectl_bin get pods --selector=name=pxc-client -o 'jsonpath={.items[].metadata.name}' +++ mktemp ++ local LAST_OUT=/tmp/tmp.VXLlEpbEuT +++ mktemp ++ local LAST_ERR=/tmp/tmp.CMouwqTFD2 ++ local exit_status=0 +++ seq 0 2 ++ for i in '$(seq 0 2)' ++ set +e ++ kubectl get pods --selector=name=pxc-client -o 'jsonpath={.items[].metadata.name}' ++ exit_status=0 ++ set -e ++ '[' 0 '!=' 0 ']' ++ break ++ cat /tmp/tmp.VXLlEpbEuT ++ cat /tmp/tmp.CMouwqTFD2 ++ rm /tmp/tmp.VXLlEpbEuT /tmp/tmp.CMouwqTFD2 ++ return 0 + client_pod=pxc-client-59944c5bbf-wqbt7 + wait_pod pxc-client-59944c5bbf-wqbt7 + local pod=pxc-client-59944c5bbf-wqbt7 + local max_retry=480 + local ns= ++ echo pxc-client-59944c5bbf-wqbt7 ++ /usr/bin/sed -E 's/.*-(pxc|proxysql)-[0-9]/\1/' ++ grep -E '^(pxc|proxysql)$' + local container= + set +o xtrace pod/pxc-client-59944c5bbf-wqbt7 condition met waiting for pod/pxc-client-59944c5bbf-wqbt7 to become ReadyDefaulted container "pxc-client" out of: pxc-client, backup .Ok + set +o xtrace + '[' '!' -s /tmp/tmp.30P5AqZU1r/select-1.sql ']' + [[ 0 -eq 0 ]] + diff -u /mnt/jenkins/workspace/cloud-pxc-operator_PR-2124/e2e-tests/users/compare/select-1.sql /tmp/tmp.30P5AqZU1r/select-1.sql ++ is_keyring_plugin_in_use some-name ++ local cluster=some-name ++ kubectl_bin exec -it some-name-pxc-0 -c pxc -- bash -c 'cat /etc/mysql/node.cnf' ++ grep -E -o 'early-plugin-load=keyring_\w+.so' +++ mktemp ++ local LAST_OUT=/tmp/tmp.JdP80FGuZk +++ mktemp ++ local LAST_ERR=/tmp/tmp.fh8fddNxdu ++ local exit_status=0 +++ seq 0 2 ++ for i in '$(seq 0 2)' ++ set +e ++ kubectl exec -it some-name-pxc-0 -c pxc -- bash -c 'cat /etc/mysql/node.cnf' ++ exit_status=0 ++ set -e ++ '[' 0 '!=' 0 ']' ++ break ++ cat /tmp/tmp.JdP80FGuZk ++ cat /tmp/tmp.fh8fddNxdu Unable to use a TTY - input is not a terminal or the right kind of file ++ rm /tmp/tmp.JdP80FGuZk /tmp/tmp.fh8fddNxdu ++ return 0 + '[' '' ']' + desc 'test missing passwords were created and present in internal secrets' + set +o xtrace ----------------------------------------------------------------------------------- test missing passwords were created and present in internal secrets ----------------------------------------------------------------------------------- + empty_pwds=() + wrong_pwds=() + for user in root xtrabackup monitor proxyadmin operator replication + echo 'Checking root' Checking root ++ getSecretData my-cluster-secrets root ++ local secretName=my-cluster-secrets ++ local dataKey=root ++ kubectl_bin get secrets/my-cluster-secrets '--template={{.data.root}}' ++ base64 --decode +++ mktemp ++ local LAST_OUT=/tmp/tmp.tiYGHfyMpC +++ mktemp ++ local LAST_ERR=/tmp/tmp.ZpDuTUgKD9 ++ local exit_status=0 +++ seq 0 2 ++ for i in '$(seq 0 2)' ++ set +e ++ kubectl get secrets/my-cluster-secrets '--template={{.data.root}}' ++ exit_status=0 ++ set -e ++ '[' 0 '!=' 0 ']' ++ break ++ cat /tmp/tmp.tiYGHfyMpC ++ cat /tmp/tmp.ZpDuTUgKD9 ++ rm /tmp/tmp.tiYGHfyMpC /tmp/tmp.ZpDuTUgKD9 ++ return 0 + secret_pass='0jU$!m[>1Km!q@t0nf' ++ getSecretData internal-some-name root ++ local secretName=internal-some-name ++ local dataKey=root ++ kubectl_bin get secrets/internal-some-name '--template={{.data.root}}' ++ base64 --decode +++ mktemp ++ local LAST_OUT=/tmp/tmp.owEIxLNOmV +++ mktemp ++ local LAST_ERR=/tmp/tmp.Wy4wvJTuSI ++ local exit_status=0 +++ seq 0 2 ++ for i in '$(seq 0 2)' ++ set +e ++ kubectl get secrets/internal-some-name '--template={{.data.root}}' ++ exit_status=0 ++ set -e ++ '[' 0 '!=' 0 ']' ++ break ++ cat /tmp/tmp.owEIxLNOmV ++ cat /tmp/tmp.Wy4wvJTuSI ++ rm /tmp/tmp.owEIxLNOmV /tmp/tmp.Wy4wvJTuSI ++ return 0 + int_secret_pass='0jU$!m[>1Km!q@t0nf' + [[ -z 0jU$!m[>1Km!q@t0nf ]] + [[ 0jU$!m[>1Km!q@t0nf != \0\j\U\$\!\m\[\>\1\K\m\!\q\@\t\0\n\f ]] + [[ root != \p\r\o\x\y\a\d\m\i\n ]] + [[ '' =~ root ]] + [[ '' =~ root ]] + echo 'Running compare for root' Running compare for root + compare_mysql_cmd select-4 'SHOW TABLES;' '-h some-name-proxysql -uroot -p'\''0jU$!m[>1Km!q@t0nf'\''' + local command_id=select-4 + local 'command=SHOW TABLES;' + local 'uri=-h some-name-proxysql -uroot -p'\''0jU$!m[>1Km!q@t0nf'\''' + local postfix= + local expected_result=/mnt/jenkins/workspace/cloud-pxc-operator_PR-2124/e2e-tests/users/compare/select-4.sql + [[ perconalab/percona-xtradb-cluster-operator:main-pxc8.0 =~ 8\.4 ]] + [[ perconalab/percona-xtradb-cluster-operator:main-pxc8.0 =~ 8\.0 ]] + [[ -f /mnt/jenkins/workspace/cloud-pxc-operator_PR-2124/e2e-tests/users/compare/select-4-80.sql ]] + expected_result=/mnt/jenkins/workspace/cloud-pxc-operator_PR-2124/e2e-tests/users/compare/select-4-80.sql + run_mysql 'SHOW TABLES;' '-h some-name-proxysql -uroot -p'\''0jU$!m[>1Km!q@t0nf'\''' + local 'command=SHOW TABLES;' + local 'uri=-h some-name-proxysql -uroot -p'\''0jU$!m[>1Km!q@t0nf'\''' ++ get_client_pod ++ kubectl_bin get pods --selector=name=pxc-client -o 'jsonpath={.items[].metadata.name}' +++ mktemp ++ local LAST_OUT=/tmp/tmp.0ZBvjn5HQE +++ mktemp ++ local LAST_ERR=/tmp/tmp.fSpz4suVpG ++ local exit_status=0 +++ seq 0 2 ++ for i in '$(seq 0 2)' ++ set +e ++ kubectl get pods --selector=name=pxc-client -o 'jsonpath={.items[].metadata.name}' ++ exit_status=0 ++ set -e ++ '[' 0 '!=' 0 ']' ++ break ++ cat /tmp/tmp.0ZBvjn5HQE ++ cat /tmp/tmp.fSpz4suVpG ++ rm /tmp/tmp.0ZBvjn5HQE /tmp/tmp.fSpz4suVpG ++ return 0 + client_pod=pxc-client-59944c5bbf-wqbt7 + wait_pod pxc-client-59944c5bbf-wqbt7 + local pod=pxc-client-59944c5bbf-wqbt7 + local max_retry=480 + local ns= ++ echo pxc-client-59944c5bbf-wqbt7 ++ /usr/bin/sed -E 's/.*-(pxc|proxysql)-[0-9]/\1/' ++ grep -E '^(pxc|proxysql)$' + local container= + set +o xtrace pod/pxc-client-59944c5bbf-wqbt7 condition met waiting for pod/pxc-client-59944c5bbf-wqbt7 to become ReadyDefaulted container "pxc-client" out of: pxc-client, backup .Ok + set +o xtrace + '[' '!' -s /tmp/tmp.30P5AqZU1r/select-4.sql ']' + [[ 0 -eq 0 ]] + diff -u /mnt/jenkins/workspace/cloud-pxc-operator_PR-2124/e2e-tests/users/compare/select-4-80.sql /tmp/tmp.30P5AqZU1r/select-4.sql + for user in root xtrabackup monitor proxyadmin operator replication + echo 'Checking xtrabackup' Checking xtrabackup ++ getSecretData my-cluster-secrets xtrabackup ++ local secretName=my-cluster-secrets ++ local dataKey=xtrabackup ++ kubectl_bin get secrets/my-cluster-secrets '--template={{.data.xtrabackup}}' ++ base64 --decode +++ mktemp ++ local LAST_OUT=/tmp/tmp.QM8HNnirFQ +++ mktemp ++ local LAST_ERR=/tmp/tmp.lBXH4uRGZe ++ local exit_status=0 +++ seq 0 2 ++ for i in '$(seq 0 2)' ++ set +e ++ kubectl get secrets/my-cluster-secrets '--template={{.data.xtrabackup}}' ++ exit_status=0 ++ set -e ++ '[' 0 '!=' 0 ']' ++ break ++ cat /tmp/tmp.QM8HNnirFQ ++ cat /tmp/tmp.lBXH4uRGZe ++ rm /tmp/tmp.QM8HNnirFQ /tmp/tmp.lBXH4uRGZe ++ return 0 + secret_pass='L.iP,wR_i2oHj8_%]' ++ getSecretData internal-some-name xtrabackup ++ local secretName=internal-some-name ++ local dataKey=xtrabackup ++ kubectl_bin get secrets/internal-some-name '--template={{.data.xtrabackup}}' ++ base64 --decode +++ mktemp ++ local LAST_OUT=/tmp/tmp.mHqFsbsNLE +++ mktemp ++ local LAST_ERR=/tmp/tmp.MQ0JpY8Ryy ++ local exit_status=0 +++ seq 0 2 ++ for i in '$(seq 0 2)' ++ set +e ++ kubectl get secrets/internal-some-name '--template={{.data.xtrabackup}}' ++ exit_status=0 ++ set -e ++ '[' 0 '!=' 0 ']' ++ break ++ cat /tmp/tmp.mHqFsbsNLE ++ cat /tmp/tmp.MQ0JpY8Ryy ++ rm /tmp/tmp.mHqFsbsNLE /tmp/tmp.MQ0JpY8Ryy ++ return 0 + int_secret_pass='L.iP,wR_i2oHj8_%]' + [[ -z L.iP,wR_i2oHj8_%] ]] + [[ L.iP,wR_i2oHj8_%] != \L\.\i\P\,\w\R\_\i\2\o\H\j\8\_\%\] ]] + [[ xtrabackup != \p\r\o\x\y\a\d\m\i\n ]] + [[ '' =~ xtrabackup ]] + [[ '' =~ xtrabackup ]] + echo 'Running compare for xtrabackup' Running compare for xtrabackup + compare_mysql_cmd select-4 'SHOW TABLES;' '-h some-name-proxysql -uxtrabackup -p'\''L.iP,wR_i2oHj8_%]'\''' + local command_id=select-4 + local 'command=SHOW TABLES;' + local 'uri=-h some-name-proxysql -uxtrabackup -p'\''L.iP,wR_i2oHj8_%]'\''' + local postfix= + local expected_result=/mnt/jenkins/workspace/cloud-pxc-operator_PR-2124/e2e-tests/users/compare/select-4.sql + [[ perconalab/percona-xtradb-cluster-operator:main-pxc8.0 =~ 8\.4 ]] + [[ perconalab/percona-xtradb-cluster-operator:main-pxc8.0 =~ 8\.0 ]] + [[ -f /mnt/jenkins/workspace/cloud-pxc-operator_PR-2124/e2e-tests/users/compare/select-4-80.sql ]] + expected_result=/mnt/jenkins/workspace/cloud-pxc-operator_PR-2124/e2e-tests/users/compare/select-4-80.sql + run_mysql 'SHOW TABLES;' '-h some-name-proxysql -uxtrabackup -p'\''L.iP,wR_i2oHj8_%]'\''' + local 'command=SHOW TABLES;' + local 'uri=-h some-name-proxysql -uxtrabackup -p'\''L.iP,wR_i2oHj8_%]'\''' ++ get_client_pod ++ kubectl_bin get pods --selector=name=pxc-client -o 'jsonpath={.items[].metadata.name}' +++ mktemp ++ local LAST_OUT=/tmp/tmp.iCAmZb871B +++ mktemp ++ local LAST_ERR=/tmp/tmp.XXYrh5oF7Y ++ local exit_status=0 +++ seq 0 2 ++ for i in '$(seq 0 2)' ++ set +e ++ kubectl get pods --selector=name=pxc-client -o 'jsonpath={.items[].metadata.name}' ++ exit_status=0 ++ set -e ++ '[' 0 '!=' 0 ']' ++ break ++ cat /tmp/tmp.iCAmZb871B ++ cat /tmp/tmp.XXYrh5oF7Y ++ rm /tmp/tmp.iCAmZb871B /tmp/tmp.XXYrh5oF7Y ++ return 0 + client_pod=pxc-client-59944c5bbf-wqbt7 + wait_pod pxc-client-59944c5bbf-wqbt7 + local pod=pxc-client-59944c5bbf-wqbt7 + local max_retry=480 + local ns= ++ echo pxc-client-59944c5bbf-wqbt7 ++ /usr/bin/sed -E 's/.*-(pxc|proxysql)-[0-9]/\1/' ++ grep -E '^(pxc|proxysql)$' + local container= + set +o xtrace pod/pxc-client-59944c5bbf-wqbt7 condition met waiting for pod/pxc-client-59944c5bbf-wqbt7 to become ReadyDefaulted container "pxc-client" out of: pxc-client, backup .Ok + set +o xtrace + '[' '!' -s /tmp/tmp.30P5AqZU1r/select-4.sql ']' + [[ 0 -eq 0 ]] + diff -u /mnt/jenkins/workspace/cloud-pxc-operator_PR-2124/e2e-tests/users/compare/select-4-80.sql /tmp/tmp.30P5AqZU1r/select-4.sql + for user in root xtrabackup monitor proxyadmin operator replication + echo 'Checking monitor' Checking monitor ++ getSecretData my-cluster-secrets monitor ++ local secretName=my-cluster-secrets ++ local dataKey=monitor ++ kubectl_bin get secrets/my-cluster-secrets '--template={{.data.monitor}}' ++ base64 --decode +++ mktemp ++ local LAST_OUT=/tmp/tmp.0BIKsp0aYy +++ mktemp ++ local LAST_ERR=/tmp/tmp.rWxCLVbtt1 ++ local exit_status=0 +++ seq 0 2 ++ for i in '$(seq 0 2)' ++ set +e ++ kubectl get secrets/my-cluster-secrets '--template={{.data.monitor}}' ++ exit_status=0 ++ set -e ++ '[' 0 '!=' 0 ']' ++ break ++ cat /tmp/tmp.0BIKsp0aYy ++ cat /tmp/tmp.rWxCLVbtt1 ++ rm /tmp/tmp.0BIKsp0aYy /tmp/tmp.rWxCLVbtt1 ++ return 0 + secret_pass=monitor_password ++ getSecretData internal-some-name monitor ++ local secretName=internal-some-name ++ local dataKey=monitor ++ kubectl_bin get secrets/internal-some-name '--template={{.data.monitor}}' ++ base64 --decode +++ mktemp ++ local LAST_OUT=/tmp/tmp.1JUlsIziwd +++ mktemp ++ local LAST_ERR=/tmp/tmp.6xHJ1XLPP4 ++ local exit_status=0 +++ seq 0 2 ++ for i in '$(seq 0 2)' ++ set +e ++ kubectl get secrets/internal-some-name '--template={{.data.monitor}}' ++ exit_status=0 ++ set -e ++ '[' 0 '!=' 0 ']' ++ break ++ cat /tmp/tmp.1JUlsIziwd ++ cat /tmp/tmp.6xHJ1XLPP4 ++ rm /tmp/tmp.1JUlsIziwd /tmp/tmp.6xHJ1XLPP4 ++ return 0 + int_secret_pass=monitor_password + [[ -z monitor_password ]] + [[ monitor_password != \m\o\n\i\t\o\r\_\p\a\s\s\w\o\r\d ]] + [[ monitor != \p\r\o\x\y\a\d\m\i\n ]] + [[ '' =~ monitor ]] + [[ '' =~ monitor ]] + echo 'Running compare for monitor' Running compare for monitor + compare_mysql_cmd select-4 'SHOW TABLES;' '-h some-name-proxysql -umonitor -p'\''monitor_password'\''' + local command_id=select-4 + local 'command=SHOW TABLES;' + local 'uri=-h some-name-proxysql -umonitor -p'\''monitor_password'\''' + local postfix= + local expected_result=/mnt/jenkins/workspace/cloud-pxc-operator_PR-2124/e2e-tests/users/compare/select-4.sql + [[ perconalab/percona-xtradb-cluster-operator:main-pxc8.0 =~ 8\.4 ]] + [[ perconalab/percona-xtradb-cluster-operator:main-pxc8.0 =~ 8\.0 ]] + [[ -f /mnt/jenkins/workspace/cloud-pxc-operator_PR-2124/e2e-tests/users/compare/select-4-80.sql ]] + expected_result=/mnt/jenkins/workspace/cloud-pxc-operator_PR-2124/e2e-tests/users/compare/select-4-80.sql + run_mysql 'SHOW TABLES;' '-h some-name-proxysql -umonitor -p'\''monitor_password'\''' + local 'command=SHOW TABLES;' + local 'uri=-h some-name-proxysql -umonitor -p'\''monitor_password'\''' ++ get_client_pod ++ kubectl_bin get pods --selector=name=pxc-client -o 'jsonpath={.items[].metadata.name}' +++ mktemp ++ local LAST_OUT=/tmp/tmp.UGyQ5SwIeE +++ mktemp ++ local LAST_ERR=/tmp/tmp.W7zotG0V56 ++ local exit_status=0 +++ seq 0 2 ++ for i in '$(seq 0 2)' ++ set +e ++ kubectl get pods --selector=name=pxc-client -o 'jsonpath={.items[].metadata.name}' ++ exit_status=0 ++ set -e ++ '[' 0 '!=' 0 ']' ++ break ++ cat /tmp/tmp.UGyQ5SwIeE ++ cat /tmp/tmp.W7zotG0V56 ++ rm /tmp/tmp.UGyQ5SwIeE /tmp/tmp.W7zotG0V56 ++ return 0 + client_pod=pxc-client-59944c5bbf-wqbt7 + wait_pod pxc-client-59944c5bbf-wqbt7 + local pod=pxc-client-59944c5bbf-wqbt7 + local max_retry=480 + local ns= ++ echo pxc-client-59944c5bbf-wqbt7 ++ /usr/bin/sed -E 's/.*-(pxc|proxysql)-[0-9]/\1/' ++ grep -E '^(pxc|proxysql)$' + local container= + set +o xtrace pod/pxc-client-59944c5bbf-wqbt7 condition met waiting for pod/pxc-client-59944c5bbf-wqbt7 to become ReadyDefaulted container "pxc-client" out of: pxc-client, backup .Ok + set +o xtrace + '[' '!' -s /tmp/tmp.30P5AqZU1r/select-4.sql ']' + [[ 0 -eq 0 ]] + diff -u /mnt/jenkins/workspace/cloud-pxc-operator_PR-2124/e2e-tests/users/compare/select-4-80.sql /tmp/tmp.30P5AqZU1r/select-4.sql + for user in root xtrabackup monitor proxyadmin operator replication + echo 'Checking proxyadmin' Checking proxyadmin ++ getSecretData my-cluster-secrets proxyadmin ++ local secretName=my-cluster-secrets ++ local dataKey=proxyadmin ++ kubectl_bin get secrets/my-cluster-secrets '--template={{.data.proxyadmin}}' ++ base64 --decode +++ mktemp ++ local LAST_OUT=/tmp/tmp.rRFQ8raLVW +++ mktemp ++ local LAST_ERR=/tmp/tmp.TbGw4E2CuC ++ local exit_status=0 +++ seq 0 2 ++ for i in '$(seq 0 2)' ++ set +e ++ kubectl get secrets/my-cluster-secrets '--template={{.data.proxyadmin}}' ++ exit_status=0 ++ set -e ++ '[' 0 '!=' 0 ']' ++ break ++ cat /tmp/tmp.rRFQ8raLVW ++ cat /tmp/tmp.TbGw4E2CuC ++ rm /tmp/tmp.rRFQ8raLVW /tmp/tmp.TbGw4E2CuC ++ return 0 + secret_pass='vjmfmWRj(YjN-brW>K{' ++ getSecretData internal-some-name proxyadmin ++ local secretName=internal-some-name ++ local dataKey=proxyadmin ++ kubectl_bin get secrets/internal-some-name '--template={{.data.proxyadmin}}' ++ base64 --decode +++ mktemp ++ local LAST_OUT=/tmp/tmp.4SJGsJkvGj +++ mktemp ++ local LAST_ERR=/tmp/tmp.VivEzBcU6H ++ local exit_status=0 +++ seq 0 2 ++ for i in '$(seq 0 2)' ++ set +e ++ kubectl get secrets/internal-some-name '--template={{.data.proxyadmin}}' ++ exit_status=0 ++ set -e ++ '[' 0 '!=' 0 ']' ++ break ++ cat /tmp/tmp.4SJGsJkvGj ++ cat /tmp/tmp.VivEzBcU6H ++ rm /tmp/tmp.4SJGsJkvGj /tmp/tmp.VivEzBcU6H ++ return 0 + int_secret_pass='vjmfmWRj(YjN-brW>K{' + [[ -z vjmfmWRj(YjN-brW>K{ ]] + [[ vjmfmWRj(YjN-brW>K{ != \v\j\m\f\m\W\R\j\(\Y\j\N\-\b\r\W\>\K\{ ]] + [[ proxyadmin != \p\r\o\x\y\a\d\m\i\n ]] + [[ proxyadmin == \p\r\o\x\y\a\d\m\i\n ]] + [[ '' =~ proxyadmin ]] + [[ '' =~ proxyadmin ]] + echo 'Running compare for proxyadmin' Running compare for proxyadmin + compare_mysql_cmd_local select-2 'SHOW TABLES;' '-h127.0.0.1 -P6032 -uproxyadmin -p'\''vjmfmWRj(YjN-brW>K{'\''' some-name-proxysql-0 '' proxysql + local command_id=select-2 + local 'command=SHOW TABLES;' + local 'uri=-h127.0.0.1 -P6032 -uproxyadmin -p'\''vjmfmWRj(YjN-brW>K{'\''' + local pod=some-name-proxysql-0 + local postfix= + local container_name=proxysql + local expected_result=/mnt/jenkins/workspace/cloud-pxc-operator_PR-2124/e2e-tests/users/compare/select-2.sql + [[ perconalab/percona-xtradb-cluster-operator:main-pxc8.0 =~ 8\.0 ]] + '[' -f /mnt/jenkins/workspace/cloud-pxc-operator_PR-2124/e2e-tests/users/compare/select-2-80.sql ']' + [[ perconalab/percona-xtradb-cluster-operator:main-pxc8.0 =~ 8\.4 ]] + run_mysql_local 'SHOW TABLES;' '-h127.0.0.1 -P6032 -uproxyadmin -p'\''vjmfmWRj(YjN-brW>K{'\''' some-name-proxysql-0 proxysql + local 'command=SHOW TABLES;' + local 'uri=-h127.0.0.1 -P6032 -uproxyadmin -p'\''vjmfmWRj(YjN-brW>K{'\''' + local pod=some-name-proxysql-0 + local container_name=proxysql + set +o xtrace + '[' '!' -s /tmp/tmp.30P5AqZU1r/select-2.sql ']' + [[ 0 -eq 0 ]] + diff -u /mnt/jenkins/workspace/cloud-pxc-operator_PR-2124/e2e-tests/users/compare/select-2.sql /tmp/tmp.30P5AqZU1r/select-2.sql + for user in root xtrabackup monitor proxyadmin operator replication + echo 'Checking operator' Checking operator ++ getSecretData my-cluster-secrets operator ++ local secretName=my-cluster-secrets ++ local dataKey=operator ++ kubectl_bin get secrets/my-cluster-secrets '--template={{.data.operator}}' ++ base64 --decode +++ mktemp ++ local LAST_OUT=/tmp/tmp.FfDhHIwaLK +++ mktemp ++ local LAST_ERR=/tmp/tmp.ZrtQsfauaq ++ local exit_status=0 +++ seq 0 2 ++ for i in '$(seq 0 2)' ++ set +e ++ kubectl get secrets/my-cluster-secrets '--template={{.data.operator}}' ++ exit_status=0 ++ set -e ++ '[' 0 '!=' 0 ']' ++ break ++ cat /tmp/tmp.FfDhHIwaLK ++ cat /tmp/tmp.ZrtQsfauaq ++ rm /tmp/tmp.FfDhHIwaLK /tmp/tmp.ZrtQsfauaq ++ return 0 + secret_pass='x&Ruz!fQCER5N~6R9%' ++ getSecretData internal-some-name operator ++ local secretName=internal-some-name ++ local dataKey=operator ++ kubectl_bin get secrets/internal-some-name '--template={{.data.operator}}' ++ base64 --decode +++ mktemp ++ local LAST_OUT=/tmp/tmp.pTzdpxLd3k +++ mktemp ++ local LAST_ERR=/tmp/tmp.X0Fda2Umtn ++ local exit_status=0 +++ seq 0 2 ++ for i in '$(seq 0 2)' ++ set +e ++ kubectl get secrets/internal-some-name '--template={{.data.operator}}' ++ exit_status=0 ++ set -e ++ '[' 0 '!=' 0 ']' ++ break ++ cat /tmp/tmp.pTzdpxLd3k ++ cat /tmp/tmp.X0Fda2Umtn ++ rm /tmp/tmp.pTzdpxLd3k /tmp/tmp.X0Fda2Umtn ++ return 0 + int_secret_pass='x&Ruz!fQCER5N~6R9%' + [[ -z x&Ruz!fQCER5N~6R9% ]] + [[ x&Ruz!fQCER5N~6R9% != \x\&\R\u\z\!\f\Q\C\E\R\5\N\~\6\R\9\% ]] + [[ operator != \p\r\o\x\y\a\d\m\i\n ]] + [[ '' =~ operator ]] + [[ '' =~ operator ]] + echo 'Running compare for operator' Running compare for operator + compare_mysql_cmd select-4 'SHOW TABLES;' '-h some-name-proxysql -uoperator -p'\''x&Ruz!fQCER5N~6R9%'\''' + local command_id=select-4 + local 'command=SHOW TABLES;' + local 'uri=-h some-name-proxysql -uoperator -p'\''x&Ruz!fQCER5N~6R9%'\''' + local postfix= + local expected_result=/mnt/jenkins/workspace/cloud-pxc-operator_PR-2124/e2e-tests/users/compare/select-4.sql + [[ perconalab/percona-xtradb-cluster-operator:main-pxc8.0 =~ 8\.4 ]] + [[ perconalab/percona-xtradb-cluster-operator:main-pxc8.0 =~ 8\.0 ]] + [[ -f /mnt/jenkins/workspace/cloud-pxc-operator_PR-2124/e2e-tests/users/compare/select-4-80.sql ]] + expected_result=/mnt/jenkins/workspace/cloud-pxc-operator_PR-2124/e2e-tests/users/compare/select-4-80.sql + run_mysql 'SHOW TABLES;' '-h some-name-proxysql -uoperator -p'\''x&Ruz!fQCER5N~6R9%'\''' + local 'command=SHOW TABLES;' + local 'uri=-h some-name-proxysql -uoperator -p'\''x&Ruz!fQCER5N~6R9%'\''' ++ get_client_pod ++ kubectl_bin get pods --selector=name=pxc-client -o 'jsonpath={.items[].metadata.name}' +++ mktemp ++ local LAST_OUT=/tmp/tmp.yIPROZ8sLK +++ mktemp ++ local LAST_ERR=/tmp/tmp.WedDqKQ7by ++ local exit_status=0 +++ seq 0 2 ++ for i in '$(seq 0 2)' ++ set +e ++ kubectl get pods --selector=name=pxc-client -o 'jsonpath={.items[].metadata.name}' ++ exit_status=0 ++ set -e ++ '[' 0 '!=' 0 ']' ++ break ++ cat /tmp/tmp.yIPROZ8sLK ++ cat /tmp/tmp.WedDqKQ7by ++ rm /tmp/tmp.yIPROZ8sLK /tmp/tmp.WedDqKQ7by ++ return 0 + client_pod=pxc-client-59944c5bbf-wqbt7 + wait_pod pxc-client-59944c5bbf-wqbt7 + local pod=pxc-client-59944c5bbf-wqbt7 + local max_retry=480 + local ns= ++ echo pxc-client-59944c5bbf-wqbt7 ++ /usr/bin/sed -E 's/.*-(pxc|proxysql)-[0-9]/\1/' ++ grep -E '^(pxc|proxysql)$' + local container= + set +o xtrace pod/pxc-client-59944c5bbf-wqbt7 condition met waiting for pod/pxc-client-59944c5bbf-wqbt7 to become ReadyDefaulted container "pxc-client" out of: pxc-client, backup .Ok + set +o xtrace + '[' '!' -s /tmp/tmp.30P5AqZU1r/select-4.sql ']' + [[ 0 -eq 0 ]] + diff -u /mnt/jenkins/workspace/cloud-pxc-operator_PR-2124/e2e-tests/users/compare/select-4-80.sql /tmp/tmp.30P5AqZU1r/select-4.sql + for user in root xtrabackup monitor proxyadmin operator replication + echo 'Checking replication' Checking replication ++ getSecretData my-cluster-secrets replication ++ local secretName=my-cluster-secrets ++ local dataKey=replication ++ kubectl_bin get secrets/my-cluster-secrets '--template={{.data.replication}}' ++ base64 --decode +++ mktemp ++ local LAST_OUT=/tmp/tmp.Ayf5vT5Wfa +++ mktemp ++ local LAST_ERR=/tmp/tmp.6MJ1XSU5j1 ++ local exit_status=0 +++ seq 0 2 ++ for i in '$(seq 0 2)' ++ set +e ++ kubectl get secrets/my-cluster-secrets '--template={{.data.replication}}' ++ exit_status=0 ++ set -e ++ '[' 0 '!=' 0 ']' ++ break ++ cat /tmp/tmp.Ayf5vT5Wfa ++ cat /tmp/tmp.6MJ1XSU5j1 ++ rm /tmp/tmp.Ayf5vT5Wfa /tmp/tmp.6MJ1XSU5j1 ++ return 0 + secret_pass='44uB6k6v&?%]lOUd7k' ++ getSecretData internal-some-name replication ++ local secretName=internal-some-name ++ local dataKey=replication ++ kubectl_bin get secrets/internal-some-name '--template={{.data.replication}}' ++ base64 --decode +++ mktemp ++ local LAST_OUT=/tmp/tmp.FD8eoPbu6s +++ mktemp ++ local LAST_ERR=/tmp/tmp.FXNEedcafc ++ local exit_status=0 +++ seq 0 2 ++ for i in '$(seq 0 2)' ++ set +e ++ kubectl get secrets/internal-some-name '--template={{.data.replication}}' ++ exit_status=0 ++ set -e ++ '[' 0 '!=' 0 ']' ++ break ++ cat /tmp/tmp.FD8eoPbu6s ++ cat /tmp/tmp.FXNEedcafc ++ rm /tmp/tmp.FD8eoPbu6s /tmp/tmp.FXNEedcafc ++ return 0 + int_secret_pass='44uB6k6v&?%]lOUd7k' + [[ -z 44uB6k6v&?%]lOUd7k ]] + [[ 44uB6k6v&?%]lOUd7k != \4\4\u\B\6\k\6\v\&\?\%\]\l\O\U\d\7\k ]] + [[ replication != \p\r\o\x\y\a\d\m\i\n ]] + [[ '' =~ replication ]] + [[ '' =~ replication ]] + echo 'Running compare for replication' Running compare for replication + compare_mysql_cmd select-4 'SHOW TABLES;' '-h some-name-proxysql -ureplication -p'\''44uB6k6v&?%]lOUd7k'\''' + local command_id=select-4 + local 'command=SHOW TABLES;' + local 'uri=-h some-name-proxysql -ureplication -p'\''44uB6k6v&?%]lOUd7k'\''' + local postfix= + local expected_result=/mnt/jenkins/workspace/cloud-pxc-operator_PR-2124/e2e-tests/users/compare/select-4.sql + [[ perconalab/percona-xtradb-cluster-operator:main-pxc8.0 =~ 8\.4 ]] + [[ perconalab/percona-xtradb-cluster-operator:main-pxc8.0 =~ 8\.0 ]] + [[ -f /mnt/jenkins/workspace/cloud-pxc-operator_PR-2124/e2e-tests/users/compare/select-4-80.sql ]] + expected_result=/mnt/jenkins/workspace/cloud-pxc-operator_PR-2124/e2e-tests/users/compare/select-4-80.sql + run_mysql 'SHOW TABLES;' '-h some-name-proxysql -ureplication -p'\''44uB6k6v&?%]lOUd7k'\''' + local 'command=SHOW TABLES;' + local 'uri=-h some-name-proxysql -ureplication -p'\''44uB6k6v&?%]lOUd7k'\''' ++ get_client_pod ++ kubectl_bin get pods --selector=name=pxc-client -o 'jsonpath={.items[].metadata.name}' +++ mktemp ++ local LAST_OUT=/tmp/tmp.s7vxDfhXiE +++ mktemp ++ local LAST_ERR=/tmp/tmp.eZBGUXzSPl ++ local exit_status=0 +++ seq 0 2 ++ for i in '$(seq 0 2)' ++ set +e ++ kubectl get pods --selector=name=pxc-client -o 'jsonpath={.items[].metadata.name}' ++ exit_status=0 ++ set -e ++ '[' 0 '!=' 0 ']' ++ break ++ cat /tmp/tmp.s7vxDfhXiE ++ cat /tmp/tmp.eZBGUXzSPl ++ rm /tmp/tmp.s7vxDfhXiE /tmp/tmp.eZBGUXzSPl ++ return 0 + client_pod=pxc-client-59944c5bbf-wqbt7 + wait_pod pxc-client-59944c5bbf-wqbt7 + local pod=pxc-client-59944c5bbf-wqbt7 + local max_retry=480 + local ns= ++ echo pxc-client-59944c5bbf-wqbt7 ++ /usr/bin/sed -E 's/.*-(pxc|proxysql)-[0-9]/\1/' ++ grep -E '^(pxc|proxysql)$' + local container= + set +o xtrace pod/pxc-client-59944c5bbf-wqbt7 condition met waiting for pod/pxc-client-59944c5bbf-wqbt7 to become ReadyDefaulted container "pxc-client" out of: pxc-client, backup .Ok + set +o xtrace + '[' '!' -s /tmp/tmp.30P5AqZU1r/select-4.sql ']' + [[ 0 -eq 0 ]] + diff -u /mnt/jenkins/workspace/cloud-pxc-operator_PR-2124/e2e-tests/users/compare/select-4-80.sql /tmp/tmp.30P5AqZU1r/select-4.sql + [[ -n '' ]] + [[ -n '' ]] + desc 'test root' + set +o xtrace ----------------------------------------------------------------------------------- test root ----------------------------------------------------------------------------------- + patch_secret my-cluster-secrets root dGVzdC1wYXNzd29yZA== + local secret=my-cluster-secrets + local key=root + local value=dGVzdC1wYXNzd29yZA== + kubectl_bin patch secret my-cluster-secrets '-p={"data":{"root": "dGVzdC1wYXNzd29yZA=="}}' ++ mktemp + local LAST_OUT=/tmp/tmp.WKjoqDKNG1 ++ mktemp + local LAST_ERR=/tmp/tmp.Wyfm6zF05j + local exit_status=0 ++ seq 0 2 + for i in '$(seq 0 2)' + set +e + kubectl patch secret my-cluster-secrets '-p={"data":{"root": "dGVzdC1wYXNzd29yZA=="}}' + exit_status=0 + set -e + '[' 0 '!=' 0 ']' + break + cat /tmp/tmp.WKjoqDKNG1 secret/my-cluster-secrets patched + cat /tmp/tmp.Wyfm6zF05j + rm /tmp/tmp.WKjoqDKNG1 /tmp/tmp.Wyfm6zF05j + return 0 + sleep 15 + compare_mysql_cmd select-4 'SHOW TABLES;' '-h some-name-proxysql -uroot -p'\''test-password'\''' + local command_id=select-4 + local 'command=SHOW TABLES;' + local 'uri=-h some-name-proxysql -uroot -p'\''test-password'\''' + local postfix= + local expected_result=/mnt/jenkins/workspace/cloud-pxc-operator_PR-2124/e2e-tests/users/compare/select-4.sql + [[ perconalab/percona-xtradb-cluster-operator:main-pxc8.0 =~ 8\.4 ]] + [[ perconalab/percona-xtradb-cluster-operator:main-pxc8.0 =~ 8\.0 ]] + [[ -f /mnt/jenkins/workspace/cloud-pxc-operator_PR-2124/e2e-tests/users/compare/select-4-80.sql ]] + expected_result=/mnt/jenkins/workspace/cloud-pxc-operator_PR-2124/e2e-tests/users/compare/select-4-80.sql + run_mysql 'SHOW TABLES;' '-h some-name-proxysql -uroot -p'\''test-password'\''' + local 'command=SHOW TABLES;' + local 'uri=-h some-name-proxysql -uroot -p'\''test-password'\''' ++ get_client_pod ++ kubectl_bin get pods --selector=name=pxc-client -o 'jsonpath={.items[].metadata.name}' +++ mktemp ++ local LAST_OUT=/tmp/tmp.WU6AEEpi2I +++ mktemp ++ local LAST_ERR=/tmp/tmp.FwH5W7BVp7 ++ local exit_status=0 +++ seq 0 2 ++ for i in '$(seq 0 2)' ++ set +e ++ kubectl get pods --selector=name=pxc-client -o 'jsonpath={.items[].metadata.name}' ++ exit_status=0 ++ set -e ++ '[' 0 '!=' 0 ']' ++ break ++ cat /tmp/tmp.WU6AEEpi2I ++ cat /tmp/tmp.FwH5W7BVp7 ++ rm /tmp/tmp.WU6AEEpi2I /tmp/tmp.FwH5W7BVp7 ++ return 0 + client_pod=pxc-client-59944c5bbf-wqbt7 + wait_pod pxc-client-59944c5bbf-wqbt7 + local pod=pxc-client-59944c5bbf-wqbt7 + local max_retry=480 + local ns= ++ echo pxc-client-59944c5bbf-wqbt7 ++ /usr/bin/sed -E 's/.*-(pxc|proxysql)-[0-9]/\1/' ++ grep -E '^(pxc|proxysql)$' + local container= + set +o xtrace pod/pxc-client-59944c5bbf-wqbt7 condition met waiting for pod/pxc-client-59944c5bbf-wqbt7 to become ReadyDefaulted container "pxc-client" out of: pxc-client, backup .Ok + set +o xtrace + '[' '!' -s /tmp/tmp.30P5AqZU1r/select-4.sql ']' + [[ 0 -eq 0 ]] + diff -u /mnt/jenkins/workspace/cloud-pxc-operator_PR-2124/e2e-tests/users/compare/select-4-80.sql /tmp/tmp.30P5AqZU1r/select-4.sql + desc 'test proxyadmin' + set +o xtrace ----------------------------------------------------------------------------------- test proxyadmin ----------------------------------------------------------------------------------- + kubectl_bin patch pxc some-name --type=merge '-p={"spec":{"proxysql":{"size":3}}}' ++ mktemp + local LAST_OUT=/tmp/tmp.XqObtWn58N ++ mktemp + local LAST_ERR=/tmp/tmp.4dOjYcYvsD + local exit_status=0 ++ seq 0 2 + for i in '$(seq 0 2)' + set +e + kubectl patch pxc some-name --type=merge '-p={"spec":{"proxysql":{"size":3}}}' + exit_status=0 + set -e + '[' 0 '!=' 0 ']' + break + cat /tmp/tmp.XqObtWn58N perconaxtradbcluster.pxc.percona.com/some-name patched + cat /tmp/tmp.4dOjYcYvsD + rm /tmp/tmp.XqObtWn58N /tmp/tmp.4dOjYcYvsD + return 0 + sleep 15 + wait_cluster_consistency some-name 3 3 + local cluster_name=some-name + local cluster_size=3 + local proxy_size=3 + '[' -z 3 ']' + desc 'wait cluster consistency' + set +o xtrace ----------------------------------------------------------------------------------- wait cluster consistency ----------------------------------------------------------------------------------- + local i=0 + local max=300 + sleep 7 + echo -n 'waiting for pxc/some-name to be ready' waiting for pxc/some-name to be ready++ kubectl_bin get pxc some-name -o 'jsonpath={.status.state}' +++ mktemp ++ local LAST_OUT=/tmp/tmp.qsJyJ7n3KK +++ mktemp ++ local LAST_ERR=/tmp/tmp.FqWQDIijlb ++ local exit_status=0 +++ seq 0 2 ++ for i in '$(seq 0 2)' ++ set +e ++ kubectl get pxc some-name -o 'jsonpath={.status.state}' ++ exit_status=0 ++ set -e ++ '[' 0 '!=' 0 ']' ++ break ++ cat /tmp/tmp.qsJyJ7n3KK ++ cat /tmp/tmp.FqWQDIijlb ++ rm /tmp/tmp.qsJyJ7n3KK /tmp/tmp.FqWQDIijlb ++ return 0 + [[ ready == \r\e\a\d\y ]] ++ kubectl_bin get pxc some-name -o 'jsonpath={.status.pxc.ready}' +++ mktemp ++ local LAST_OUT=/tmp/tmp.IF2xuFtqWo +++ mktemp ++ local LAST_ERR=/tmp/tmp.KTmRUUlJQv ++ local exit_status=0 +++ seq 0 2 ++ for i in '$(seq 0 2)' ++ set +e ++ kubectl get pxc some-name -o 'jsonpath={.status.pxc.ready}' ++ exit_status=0 ++ set -e ++ '[' 0 '!=' 0 ']' ++ break ++ cat /tmp/tmp.IF2xuFtqWo ++ cat /tmp/tmp.KTmRUUlJQv ++ rm /tmp/tmp.IF2xuFtqWo /tmp/tmp.KTmRUUlJQv ++ return 0 + [[ 3 == \3 ]] +++ get_proxy_engine some-name +++ local cluster_name=some-name ++++ get_proxy some-name ++++ local target_cluster=some-name +++++ kubectl_bin get pxc some-name -o 'jsonpath={.spec.haproxy.enabled}' ++++++ mktemp +++++ local LAST_OUT=/tmp/tmp.5Pllwnjyvd ++++++ mktemp +++++ local LAST_ERR=/tmp/tmp.Jh1MMUscta +++++ local exit_status=0 ++++++ seq 0 2 +++++ for i in '$(seq 0 2)' +++++ set +e +++++ kubectl get pxc some-name -o 'jsonpath={.spec.haproxy.enabled}' +++++ exit_status=0 +++++ set -e +++++ '[' 0 '!=' 0 ']' +++++ break +++++ cat /tmp/tmp.5Pllwnjyvd +++++ cat /tmp/tmp.Jh1MMUscta +++++ rm /tmp/tmp.5Pllwnjyvd /tmp/tmp.Jh1MMUscta +++++ return 0 ++++ [[ '' == \t\r\u\e ]] +++++ kubectl_bin get pxc some-name -o 'jsonpath={.spec.proxysql.enabled}' ++++++ mktemp +++++ local LAST_OUT=/tmp/tmp.UIUu4ISqdm ++++++ mktemp +++++ local LAST_ERR=/tmp/tmp.v5otTMaBr6 +++++ local exit_status=0 ++++++ seq 0 2 +++++ for i in '$(seq 0 2)' +++++ set +e +++++ kubectl get pxc some-name -o 'jsonpath={.spec.proxysql.enabled}' +++++ exit_status=0 +++++ set -e +++++ '[' 0 '!=' 0 ']' +++++ break +++++ cat /tmp/tmp.UIUu4ISqdm +++++ cat /tmp/tmp.v5otTMaBr6 +++++ rm /tmp/tmp.UIUu4ISqdm /tmp/tmp.v5otTMaBr6 +++++ return 0 ++++ [[ true == \t\r\u\e ]] ++++ echo some-name-proxysql ++++ return +++ local cluster_proxy=some-name-proxysql +++ echo proxysql ++ kubectl_bin get pxc some-name -o 'jsonpath={.status.proxysql.ready}' +++ mktemp ++ local LAST_OUT=/tmp/tmp.Jvb8nnO6Oa +++ mktemp ++ local LAST_ERR=/tmp/tmp.G21qrtkuQj ++ local exit_status=0 +++ seq 0 2 ++ for i in '$(seq 0 2)' ++ set +e ++ kubectl get pxc some-name -o 'jsonpath={.status.proxysql.ready}' ++ exit_status=0 ++ set -e ++ '[' 0 '!=' 0 ']' ++ break ++ cat /tmp/tmp.Jvb8nnO6Oa ++ cat /tmp/tmp.G21qrtkuQj ++ rm /tmp/tmp.Jvb8nnO6Oa /tmp/tmp.G21qrtkuQj ++ return 0 + [[ 3 == \3 ]] + echo + patch_secret my-cluster-secrets proxyadmin dGVzdC1wYXNzd29yZA== + local secret=my-cluster-secrets + local key=proxyadmin + local value=dGVzdC1wYXNzd29yZA== + kubectl_bin patch secret my-cluster-secrets '-p={"data":{"proxyadmin": "dGVzdC1wYXNzd29yZA=="}}' ++ mktemp + local LAST_OUT=/tmp/tmp.Ij1WwRzYo6 ++ mktemp + local LAST_ERR=/tmp/tmp.sINDEQVTiu + local exit_status=0 ++ seq 0 2 + for i in '$(seq 0 2)' + set +e + kubectl patch secret my-cluster-secrets '-p={"data":{"proxyadmin": "dGVzdC1wYXNzd29yZA=="}}' + exit_status=0 + set -e + '[' 0 '!=' 0 ']' + break + cat /tmp/tmp.Ij1WwRzYo6 secret/my-cluster-secrets patched + cat /tmp/tmp.sINDEQVTiu + rm /tmp/tmp.Ij1WwRzYo6 /tmp/tmp.sINDEQVTiu + return 0 + sleep 15 + wait_cluster_consistency some-name 3 3 + local cluster_name=some-name + local cluster_size=3 + local proxy_size=3 + '[' -z 3 ']' + desc 'wait cluster consistency' + set +o xtrace ----------------------------------------------------------------------------------- wait cluster consistency ----------------------------------------------------------------------------------- + local i=0 + local max=300 + sleep 7 + echo -n 'waiting for pxc/some-name to be ready' waiting for pxc/some-name to be ready++ kubectl_bin get pxc some-name -o 'jsonpath={.status.state}' +++ mktemp ++ local LAST_OUT=/tmp/tmp.HlOOdvBCAM +++ mktemp ++ local LAST_ERR=/tmp/tmp.TGOQnC5kPa ++ local exit_status=0 +++ seq 0 2 ++ for i in '$(seq 0 2)' ++ set +e ++ kubectl get pxc some-name -o 'jsonpath={.status.state}' ++ exit_status=0 ++ set -e ++ '[' 0 '!=' 0 ']' ++ break ++ cat /tmp/tmp.HlOOdvBCAM ++ cat /tmp/tmp.TGOQnC5kPa ++ rm /tmp/tmp.HlOOdvBCAM /tmp/tmp.TGOQnC5kPa ++ return 0 + [[ initializing == \r\e\a\d\y ]] + echo -n . .+ sleep 5 + [[ 0 -ge 300 ]] + let i+=1 ++ kubectl_bin get pxc some-name -o 'jsonpath={.status.state}' +++ mktemp ++ local LAST_OUT=/tmp/tmp.0eIjUuplgo +++ mktemp ++ local LAST_ERR=/tmp/tmp.HrlWJiY0Jl ++ local exit_status=0 +++ seq 0 2 ++ for i in '$(seq 0 2)' ++ set +e ++ kubectl get pxc some-name -o 'jsonpath={.status.state}' ++ exit_status=0 ++ set -e ++ '[' 0 '!=' 0 ']' ++ break ++ cat /tmp/tmp.0eIjUuplgo ++ cat /tmp/tmp.HrlWJiY0Jl ++ rm /tmp/tmp.0eIjUuplgo /tmp/tmp.HrlWJiY0Jl ++ return 0 + [[ initializing == \r\e\a\d\y ]] + echo -n . .+ sleep 5 + [[ 1 -ge 300 ]] + let i+=1 ++ kubectl_bin get pxc some-name -o 'jsonpath={.status.state}' +++ mktemp ++ local LAST_OUT=/tmp/tmp.ejbY1rIH5S +++ mktemp ++ local LAST_ERR=/tmp/tmp.1FH2yJvwBs ++ local exit_status=0 +++ seq 0 2 ++ for i in '$(seq 0 2)' ++ set +e ++ kubectl get pxc some-name -o 'jsonpath={.status.state}' ++ exit_status=0 ++ set -e ++ '[' 0 '!=' 0 ']' ++ break ++ cat /tmp/tmp.ejbY1rIH5S ++ cat /tmp/tmp.1FH2yJvwBs ++ rm /tmp/tmp.ejbY1rIH5S /tmp/tmp.1FH2yJvwBs ++ return 0 + [[ ready == \r\e\a\d\y ]] ++ kubectl_bin get pxc some-name -o 'jsonpath={.status.pxc.ready}' +++ mktemp ++ local LAST_OUT=/tmp/tmp.v4PhnerSHT +++ mktemp ++ local LAST_ERR=/tmp/tmp.V9mdjyEnTX ++ local exit_status=0 +++ seq 0 2 ++ for i in '$(seq 0 2)' ++ set +e ++ kubectl get pxc some-name -o 'jsonpath={.status.pxc.ready}' ++ exit_status=0 ++ set -e ++ '[' 0 '!=' 0 ']' ++ break ++ cat /tmp/tmp.v4PhnerSHT ++ cat /tmp/tmp.V9mdjyEnTX ++ rm /tmp/tmp.v4PhnerSHT /tmp/tmp.V9mdjyEnTX ++ return 0 + [[ 3 == \3 ]] +++ get_proxy_engine some-name +++ local cluster_name=some-name ++++ get_proxy some-name ++++ local target_cluster=some-name +++++ kubectl_bin get pxc some-name -o 'jsonpath={.spec.haproxy.enabled}' ++++++ mktemp +++++ local LAST_OUT=/tmp/tmp.iiWAFxB1vF ++++++ mktemp +++++ local LAST_ERR=/tmp/tmp.g7ergYoJIo +++++ local exit_status=0 ++++++ seq 0 2 +++++ for i in '$(seq 0 2)' +++++ set +e +++++ kubectl get pxc some-name -o 'jsonpath={.spec.haproxy.enabled}' +++++ exit_status=0 +++++ set -e +++++ '[' 0 '!=' 0 ']' +++++ break +++++ cat /tmp/tmp.iiWAFxB1vF +++++ cat /tmp/tmp.g7ergYoJIo +++++ rm /tmp/tmp.iiWAFxB1vF /tmp/tmp.g7ergYoJIo +++++ return 0 ++++ [[ '' == \t\r\u\e ]] +++++ kubectl_bin get pxc some-name -o 'jsonpath={.spec.proxysql.enabled}' ++++++ mktemp +++++ local LAST_OUT=/tmp/tmp.6Xz8fKspp1 ++++++ mktemp +++++ local LAST_ERR=/tmp/tmp.QObHn5ACgH +++++ local exit_status=0 ++++++ seq 0 2 +++++ for i in '$(seq 0 2)' +++++ set +e +++++ kubectl get pxc some-name -o 'jsonpath={.spec.proxysql.enabled}' +++++ exit_status=0 +++++ set -e +++++ '[' 0 '!=' 0 ']' +++++ break +++++ cat /tmp/tmp.6Xz8fKspp1 +++++ cat /tmp/tmp.QObHn5ACgH +++++ rm /tmp/tmp.6Xz8fKspp1 /tmp/tmp.QObHn5ACgH +++++ return 0 ++++ [[ true == \t\r\u\e ]] ++++ echo some-name-proxysql ++++ return +++ local cluster_proxy=some-name-proxysql +++ echo proxysql ++ kubectl_bin get pxc some-name -o 'jsonpath={.status.proxysql.ready}' +++ mktemp ++ local LAST_OUT=/tmp/tmp.2wIbBR0zDF +++ mktemp ++ local LAST_ERR=/tmp/tmp.0qSbE5u7Pi ++ local exit_status=0 +++ seq 0 2 ++ for i in '$(seq 0 2)' ++ set +e ++ kubectl get pxc some-name -o 'jsonpath={.status.proxysql.ready}' ++ exit_status=0 ++ set -e ++ '[' 0 '!=' 0 ']' ++ break ++ cat /tmp/tmp.2wIbBR0zDF ++ cat /tmp/tmp.0qSbE5u7Pi ++ rm /tmp/tmp.2wIbBR0zDF /tmp/tmp.0qSbE5u7Pi ++ return 0 + [[ 3 == \3 ]] + echo + compare_mysql_cmd_local select-2 'SHOW TABLES;' '-h127.0.0.1 -P6032 -uproxyadmin -p'\''test-password'\''' some-name-proxysql-0 '' proxysql + local command_id=select-2 + local 'command=SHOW TABLES;' + local 'uri=-h127.0.0.1 -P6032 -uproxyadmin -p'\''test-password'\''' + local pod=some-name-proxysql-0 + local postfix= + local container_name=proxysql + local expected_result=/mnt/jenkins/workspace/cloud-pxc-operator_PR-2124/e2e-tests/users/compare/select-2.sql + [[ perconalab/percona-xtradb-cluster-operator:main-pxc8.0 =~ 8\.0 ]] + '[' -f /mnt/jenkins/workspace/cloud-pxc-operator_PR-2124/e2e-tests/users/compare/select-2-80.sql ']' + [[ perconalab/percona-xtradb-cluster-operator:main-pxc8.0 =~ 8\.4 ]] + run_mysql_local 'SHOW TABLES;' '-h127.0.0.1 -P6032 -uproxyadmin -p'\''test-password'\''' some-name-proxysql-0 proxysql + local 'command=SHOW TABLES;' + local 'uri=-h127.0.0.1 -P6032 -uproxyadmin -p'\''test-password'\''' + local pod=some-name-proxysql-0 + local container_name=proxysql + set +o xtrace + '[' '!' -s /tmp/tmp.30P5AqZU1r/select-2.sql ']' + [[ 0 -eq 0 ]] + diff -u /mnt/jenkins/workspace/cloud-pxc-operator_PR-2124/e2e-tests/users/compare/select-2.sql /tmp/tmp.30P5AqZU1r/select-2.sql + compare_mysql_cmd_local select-2 'SHOW TABLES;' '-h127.0.0.1 -P6032 -uproxyadmin -p'\''test-password'\''' some-name-proxysql-1 '' proxysql + local command_id=select-2 + local 'command=SHOW TABLES;' + local 'uri=-h127.0.0.1 -P6032 -uproxyadmin -p'\''test-password'\''' + local pod=some-name-proxysql-1 + local postfix= + local container_name=proxysql + local expected_result=/mnt/jenkins/workspace/cloud-pxc-operator_PR-2124/e2e-tests/users/compare/select-2.sql + [[ perconalab/percona-xtradb-cluster-operator:main-pxc8.0 =~ 8\.0 ]] + '[' -f /mnt/jenkins/workspace/cloud-pxc-operator_PR-2124/e2e-tests/users/compare/select-2-80.sql ']' + [[ perconalab/percona-xtradb-cluster-operator:main-pxc8.0 =~ 8\.4 ]] + run_mysql_local 'SHOW TABLES;' '-h127.0.0.1 -P6032 -uproxyadmin -p'\''test-password'\''' some-name-proxysql-1 proxysql + local 'command=SHOW TABLES;' + local 'uri=-h127.0.0.1 -P6032 -uproxyadmin -p'\''test-password'\''' + local pod=some-name-proxysql-1 + local container_name=proxysql + set +o xtrace + '[' '!' -s /tmp/tmp.30P5AqZU1r/select-2.sql ']' + [[ 0 -eq 0 ]] + diff -u /mnt/jenkins/workspace/cloud-pxc-operator_PR-2124/e2e-tests/users/compare/select-2.sql /tmp/tmp.30P5AqZU1r/select-2.sql + compare_mysql_cmd_local select-2 'SHOW TABLES;' '-h127.0.0.1 -P6032 -uproxyadmin -p'\''test-password'\''' some-name-proxysql-2 '' proxysql + local command_id=select-2 + local 'command=SHOW TABLES;' + local 'uri=-h127.0.0.1 -P6032 -uproxyadmin -p'\''test-password'\''' + local pod=some-name-proxysql-2 + local postfix= + local container_name=proxysql + local expected_result=/mnt/jenkins/workspace/cloud-pxc-operator_PR-2124/e2e-tests/users/compare/select-2.sql + [[ perconalab/percona-xtradb-cluster-operator:main-pxc8.0 =~ 8\.0 ]] + '[' -f /mnt/jenkins/workspace/cloud-pxc-operator_PR-2124/e2e-tests/users/compare/select-2-80.sql ']' + [[ perconalab/percona-xtradb-cluster-operator:main-pxc8.0 =~ 8\.4 ]] + run_mysql_local 'SHOW TABLES;' '-h127.0.0.1 -P6032 -uproxyadmin -p'\''test-password'\''' some-name-proxysql-2 proxysql + local 'command=SHOW TABLES;' + local 'uri=-h127.0.0.1 -P6032 -uproxyadmin -p'\''test-password'\''' + local pod=some-name-proxysql-2 + local container_name=proxysql + set +o xtrace + '[' '!' -s /tmp/tmp.30P5AqZU1r/select-2.sql ']' + [[ 0 -eq 0 ]] + diff -u /mnt/jenkins/workspace/cloud-pxc-operator_PR-2124/e2e-tests/users/compare/select-2.sql /tmp/tmp.30P5AqZU1r/select-2.sql + desc 'test xtrabackup' + set +o xtrace ----------------------------------------------------------------------------------- test xtrabackup ----------------------------------------------------------------------------------- + kubectl_bin patch pxc some-name --type=merge '-p={"spec":{"proxysql":{"size":2}}}' ++ mktemp + local LAST_OUT=/tmp/tmp.J9PS8n4Y5U ++ mktemp + local LAST_ERR=/tmp/tmp.ydBWc5tf1l + local exit_status=0 ++ seq 0 2 + for i in '$(seq 0 2)' + set +e + kubectl patch pxc some-name --type=merge '-p={"spec":{"proxysql":{"size":2}}}' + exit_status=0 + set -e + '[' 0 '!=' 0 ']' + break + cat /tmp/tmp.J9PS8n4Y5U perconaxtradbcluster.pxc.percona.com/some-name patched + cat /tmp/tmp.ydBWc5tf1l + rm /tmp/tmp.J9PS8n4Y5U /tmp/tmp.ydBWc5tf1l + return 0 + patch_secret my-cluster-secrets xtrabackup dGVzdC1wYXNzd29yZA== + local secret=my-cluster-secrets + local key=xtrabackup + local value=dGVzdC1wYXNzd29yZA== + kubectl_bin patch secret my-cluster-secrets '-p={"data":{"xtrabackup": "dGVzdC1wYXNzd29yZA=="}}' ++ mktemp + local LAST_OUT=/tmp/tmp.rkwlBWQLZS ++ mktemp + local LAST_ERR=/tmp/tmp.LXtFXU2O4S + local exit_status=0 ++ seq 0 2 + for i in '$(seq 0 2)' + set +e + kubectl patch secret my-cluster-secrets '-p={"data":{"xtrabackup": "dGVzdC1wYXNzd29yZA=="}}' + exit_status=0 + set -e + '[' 0 '!=' 0 ']' + break + cat /tmp/tmp.rkwlBWQLZS secret/my-cluster-secrets patched + cat /tmp/tmp.LXtFXU2O4S + rm /tmp/tmp.rkwlBWQLZS /tmp/tmp.LXtFXU2O4S + return 0 + sleep 15 + wait_cluster_consistency some-name 3 2 + local cluster_name=some-name + local cluster_size=3 + local proxy_size=2 + '[' -z 2 ']' + desc 'wait cluster consistency' + set +o xtrace ----------------------------------------------------------------------------------- wait cluster consistency ----------------------------------------------------------------------------------- + local i=0 + local max=300 + sleep 7 + echo -n 'waiting for pxc/some-name to be ready' waiting for pxc/some-name to be ready++ kubectl_bin get pxc some-name -o 'jsonpath={.status.state}' +++ mktemp ++ local LAST_OUT=/tmp/tmp.7wOMIEf7qC +++ mktemp ++ local LAST_ERR=/tmp/tmp.aXefQSRkkL ++ local exit_status=0 +++ seq 0 2 ++ for i in '$(seq 0 2)' ++ set +e ++ kubectl get pxc some-name -o 'jsonpath={.status.state}' ++ exit_status=0 ++ set -e ++ '[' 0 '!=' 0 ']' ++ break ++ cat /tmp/tmp.7wOMIEf7qC ++ cat /tmp/tmp.aXefQSRkkL ++ rm /tmp/tmp.7wOMIEf7qC /tmp/tmp.aXefQSRkkL ++ return 0 + [[ initializing == \r\e\a\d\y ]] + echo -n . .+ sleep 5 + [[ 0 -ge 300 ]] + let i+=1 ++ kubectl_bin get pxc some-name -o 'jsonpath={.status.state}' +++ mktemp ++ local LAST_OUT=/tmp/tmp.Bt0L6uwotM +++ mktemp ++ local LAST_ERR=/tmp/tmp.57wKj95Tdd ++ local exit_status=0 +++ seq 0 2 ++ for i in '$(seq 0 2)' ++ set +e ++ kubectl get pxc some-name -o 'jsonpath={.status.state}' ++ exit_status=0 ++ set -e ++ '[' 0 '!=' 0 ']' ++ break ++ cat /tmp/tmp.Bt0L6uwotM ++ cat /tmp/tmp.57wKj95Tdd ++ rm /tmp/tmp.Bt0L6uwotM /tmp/tmp.57wKj95Tdd ++ return 0 + [[ initializing == \r\e\a\d\y ]] + echo -n . .+ sleep 5 + [[ 1 -ge 300 ]] + let i+=1 ++ kubectl_bin get pxc some-name -o 'jsonpath={.status.state}' +++ mktemp ++ local LAST_OUT=/tmp/tmp.Zql2mG0HIm +++ mktemp ++ local LAST_ERR=/tmp/tmp.cJge7nEQLD ++ local exit_status=0 +++ seq 0 2 ++ for i in '$(seq 0 2)' ++ set +e ++ kubectl get pxc some-name -o 'jsonpath={.status.state}' ++ exit_status=0 ++ set -e ++ '[' 0 '!=' 0 ']' ++ break ++ cat /tmp/tmp.Zql2mG0HIm ++ cat /tmp/tmp.cJge7nEQLD ++ rm /tmp/tmp.Zql2mG0HIm /tmp/tmp.cJge7nEQLD ++ return 0 + [[ initializing == \r\e\a\d\y ]] + echo -n . .+ sleep 5 + [[ 2 -ge 300 ]] + let i+=1 ++ kubectl_bin get pxc some-name -o 'jsonpath={.status.state}' +++ mktemp ++ local LAST_OUT=/tmp/tmp.4rkZnjMS0Y +++ mktemp ++ local LAST_ERR=/tmp/tmp.Hdag4fmQuU ++ local exit_status=0 +++ seq 0 2 ++ for i in '$(seq 0 2)' ++ set +e ++ kubectl get pxc some-name -o 'jsonpath={.status.state}' ++ exit_status=0 ++ set -e ++ '[' 0 '!=' 0 ']' ++ break ++ cat /tmp/tmp.4rkZnjMS0Y ++ cat /tmp/tmp.Hdag4fmQuU ++ rm /tmp/tmp.4rkZnjMS0Y /tmp/tmp.Hdag4fmQuU ++ return 0 + [[ initializing == \r\e\a\d\y ]] + echo -n . .+ sleep 5 + [[ 3 -ge 300 ]] + let i+=1 ++ kubectl_bin get pxc some-name -o 'jsonpath={.status.state}' +++ mktemp ++ local LAST_OUT=/tmp/tmp.28W3Rryj6n +++ mktemp ++ local LAST_ERR=/tmp/tmp.Cn5K8cJJGU ++ local exit_status=0 +++ seq 0 2 ++ for i in '$(seq 0 2)' ++ set +e ++ kubectl get pxc some-name -o 'jsonpath={.status.state}' ++ exit_status=0 ++ set -e ++ '[' 0 '!=' 0 ']' ++ break ++ cat /tmp/tmp.28W3Rryj6n ++ cat /tmp/tmp.Cn5K8cJJGU ++ rm /tmp/tmp.28W3Rryj6n /tmp/tmp.Cn5K8cJJGU ++ return 0 + [[ initializing == \r\e\a\d\y ]] + echo -n . .+ sleep 5 + [[ 4 -ge 300 ]] + let i+=1 ++ kubectl_bin get pxc some-name -o 'jsonpath={.status.state}' +++ mktemp ++ local LAST_OUT=/tmp/tmp.DRBKwEfcyY +++ mktemp ++ local LAST_ERR=/tmp/tmp.OLdZsIqADe ++ local exit_status=0 +++ seq 0 2 ++ for i in '$(seq 0 2)' ++ set +e ++ kubectl get pxc some-name -o 'jsonpath={.status.state}' ++ exit_status=0 ++ set -e ++ '[' 0 '!=' 0 ']' ++ break ++ cat /tmp/tmp.DRBKwEfcyY ++ cat /tmp/tmp.OLdZsIqADe ++ rm /tmp/tmp.DRBKwEfcyY /tmp/tmp.OLdZsIqADe ++ return 0 + [[ initializing == \r\e\a\d\y ]] + echo -n . .+ sleep 5 + [[ 5 -ge 300 ]] + let i+=1 ++ kubectl_bin get pxc some-name -o 'jsonpath={.status.state}' +++ mktemp ++ local LAST_OUT=/tmp/tmp.D8ognASXAJ +++ mktemp ++ local LAST_ERR=/tmp/tmp.gx6RY1USdY ++ local exit_status=0 +++ seq 0 2 ++ for i in '$(seq 0 2)' ++ set +e ++ kubectl get pxc some-name -o 'jsonpath={.status.state}' ++ exit_status=0 ++ set -e ++ '[' 0 '!=' 0 ']' ++ break ++ cat /tmp/tmp.D8ognASXAJ ++ cat /tmp/tmp.gx6RY1USdY ++ rm /tmp/tmp.D8ognASXAJ /tmp/tmp.gx6RY1USdY ++ return 0 + [[ initializing == \r\e\a\d\y ]] + echo -n . .+ sleep 5 + [[ 6 -ge 300 ]] + let i+=1 ++ kubectl_bin get pxc some-name -o 'jsonpath={.status.state}' +++ mktemp ++ local LAST_OUT=/tmp/tmp.S7lAG3H3cm +++ mktemp ++ local LAST_ERR=/tmp/tmp.uAXSlX947P ++ local exit_status=0 +++ seq 0 2 ++ for i in '$(seq 0 2)' ++ set +e ++ kubectl get pxc some-name -o 'jsonpath={.status.state}' ++ exit_status=0 ++ set -e ++ '[' 0 '!=' 0 ']' ++ break ++ cat /tmp/tmp.S7lAG3H3cm ++ cat /tmp/tmp.uAXSlX947P ++ rm /tmp/tmp.S7lAG3H3cm /tmp/tmp.uAXSlX947P ++ return 0 + [[ initializing == \r\e\a\d\y ]] + echo -n . .+ sleep 5 + [[ 7 -ge 300 ]] + let i+=1 ++ kubectl_bin get pxc some-name -o 'jsonpath={.status.state}' +++ mktemp ++ local LAST_OUT=/tmp/tmp.KLBD1smC2Z +++ mktemp ++ local LAST_ERR=/tmp/tmp.oN7j3RO1o9 ++ local exit_status=0 +++ seq 0 2 ++ for i in '$(seq 0 2)' ++ set +e ++ kubectl get pxc some-name -o 'jsonpath={.status.state}' ++ exit_status=0 ++ set -e ++ '[' 0 '!=' 0 ']' ++ break ++ cat /tmp/tmp.KLBD1smC2Z ++ cat /tmp/tmp.oN7j3RO1o9 ++ rm /tmp/tmp.KLBD1smC2Z /tmp/tmp.oN7j3RO1o9 ++ return 0 + [[ initializing == \r\e\a\d\y ]] + echo -n . .+ sleep 5 + [[ 8 -ge 300 ]] + let i+=1 ++ kubectl_bin get pxc some-name -o 'jsonpath={.status.state}' +++ mktemp ++ local LAST_OUT=/tmp/tmp.4gPrsxkVL8 +++ mktemp ++ local LAST_ERR=/tmp/tmp.XLrB1tyHLi ++ local exit_status=0 +++ seq 0 2 ++ for i in '$(seq 0 2)' ++ set +e ++ kubectl get pxc some-name -o 'jsonpath={.status.state}' ++ exit_status=0 ++ set -e ++ '[' 0 '!=' 0 ']' ++ break ++ cat /tmp/tmp.4gPrsxkVL8 ++ cat /tmp/tmp.XLrB1tyHLi ++ rm /tmp/tmp.4gPrsxkVL8 /tmp/tmp.XLrB1tyHLi ++ return 0 + [[ initializing == \r\e\a\d\y ]] + echo -n . .+ sleep 5 + [[ 9 -ge 300 ]] + let i+=1 ++ kubectl_bin get pxc some-name -o 'jsonpath={.status.state}' +++ mktemp ++ local LAST_OUT=/tmp/tmp.259aWp1zvh +++ mktemp ++ local LAST_ERR=/tmp/tmp.rzaQnY1Gj6 ++ local exit_status=0 +++ seq 0 2 ++ for i in '$(seq 0 2)' ++ set +e ++ kubectl get pxc some-name -o 'jsonpath={.status.state}' ++ exit_status=0 ++ set -e ++ '[' 0 '!=' 0 ']' ++ break ++ cat /tmp/tmp.259aWp1zvh ++ cat /tmp/tmp.rzaQnY1Gj6 ++ rm /tmp/tmp.259aWp1zvh /tmp/tmp.rzaQnY1Gj6 ++ return 0 + [[ initializing == \r\e\a\d\y ]] + echo -n . .+ sleep 5 + [[ 10 -ge 300 ]] + let i+=1 ++ kubectl_bin get pxc some-name -o 'jsonpath={.status.state}' +++ mktemp ++ local LAST_OUT=/tmp/tmp.d2Cg7b7etM +++ mktemp ++ local LAST_ERR=/tmp/tmp.Cp0KZ8u5zS ++ local exit_status=0 +++ seq 0 2 ++ for i in '$(seq 0 2)' ++ set +e ++ kubectl get pxc some-name -o 'jsonpath={.status.state}' ++ exit_status=0 ++ set -e ++ '[' 0 '!=' 0 ']' ++ break ++ cat /tmp/tmp.d2Cg7b7etM ++ cat /tmp/tmp.Cp0KZ8u5zS ++ rm /tmp/tmp.d2Cg7b7etM /tmp/tmp.Cp0KZ8u5zS ++ return 0 + [[ initializing == \r\e\a\d\y ]] + echo -n . .+ sleep 5 + [[ 11 -ge 300 ]] + let i+=1 ++ kubectl_bin get pxc some-name -o 'jsonpath={.status.state}' +++ mktemp ++ local LAST_OUT=/tmp/tmp.AriAXkfeLg +++ mktemp ++ local LAST_ERR=/tmp/tmp.gFhr1i9dh8 ++ local exit_status=0 +++ seq 0 2 ++ for i in '$(seq 0 2)' ++ set +e ++ kubectl get pxc some-name -o 'jsonpath={.status.state}' ++ exit_status=0 ++ set -e ++ '[' 0 '!=' 0 ']' ++ break ++ cat /tmp/tmp.AriAXkfeLg ++ cat /tmp/tmp.gFhr1i9dh8 ++ rm /tmp/tmp.AriAXkfeLg /tmp/tmp.gFhr1i9dh8 ++ return 0 + [[ initializing == \r\e\a\d\y ]] + echo -n . .+ sleep 5 + [[ 12 -ge 300 ]] + let i+=1 ++ kubectl_bin get pxc some-name -o 'jsonpath={.status.state}' +++ mktemp ++ local LAST_OUT=/tmp/tmp.cVXMxNOScx +++ mktemp ++ local LAST_ERR=/tmp/tmp.GNd4EqcmTy ++ local exit_status=0 +++ seq 0 2 ++ for i in '$(seq 0 2)' ++ set +e ++ kubectl get pxc some-name -o 'jsonpath={.status.state}' ++ exit_status=0 ++ set -e ++ '[' 0 '!=' 0 ']' ++ break ++ cat /tmp/tmp.cVXMxNOScx ++ cat /tmp/tmp.GNd4EqcmTy ++ rm /tmp/tmp.cVXMxNOScx /tmp/tmp.GNd4EqcmTy ++ return 0 + [[ initializing == \r\e\a\d\y ]] + echo -n . .+ sleep 5 + [[ 13 -ge 300 ]] + let i+=1 ++ kubectl_bin get pxc some-name -o 'jsonpath={.status.state}' +++ mktemp ++ local LAST_OUT=/tmp/tmp.cp9QbrXqK0 +++ mktemp ++ local LAST_ERR=/tmp/tmp.ZlSzAArxYe ++ local exit_status=0 +++ seq 0 2 ++ for i in '$(seq 0 2)' ++ set +e ++ kubectl get pxc some-name -o 'jsonpath={.status.state}' ++ exit_status=0 ++ set -e ++ '[' 0 '!=' 0 ']' ++ break ++ cat /tmp/tmp.cp9QbrXqK0 ++ cat /tmp/tmp.ZlSzAArxYe ++ rm /tmp/tmp.cp9QbrXqK0 /tmp/tmp.ZlSzAArxYe ++ return 0 + [[ initializing == \r\e\a\d\y ]] + echo -n . .+ sleep 5 + [[ 14 -ge 300 ]] + let i+=1 ++ kubectl_bin get pxc some-name -o 'jsonpath={.status.state}' +++ mktemp ++ local LAST_OUT=/tmp/tmp.r4vg50eCuC +++ mktemp ++ local LAST_ERR=/tmp/tmp.ZHdcuD6n38 ++ local exit_status=0 +++ seq 0 2 ++ for i in '$(seq 0 2)' ++ set +e ++ kubectl get pxc some-name -o 'jsonpath={.status.state}' ++ exit_status=0 ++ set -e ++ '[' 0 '!=' 0 ']' ++ break ++ cat /tmp/tmp.r4vg50eCuC ++ cat /tmp/tmp.ZHdcuD6n38 ++ rm /tmp/tmp.r4vg50eCuC /tmp/tmp.ZHdcuD6n38 ++ return 0 + [[ initializing == \r\e\a\d\y ]] + echo -n . .+ sleep 5 + [[ 15 -ge 300 ]] + let i+=1 ++ kubectl_bin get pxc some-name -o 'jsonpath={.status.state}' +++ mktemp ++ local LAST_OUT=/tmp/tmp.TmWefJuJAS +++ mktemp ++ local LAST_ERR=/tmp/tmp.3fSXZKFAe4 ++ local exit_status=0 +++ seq 0 2 ++ for i in '$(seq 0 2)' ++ set +e ++ kubectl get pxc some-name -o 'jsonpath={.status.state}' ++ exit_status=0 ++ set -e ++ '[' 0 '!=' 0 ']' ++ break ++ cat /tmp/tmp.TmWefJuJAS ++ cat /tmp/tmp.3fSXZKFAe4 ++ rm /tmp/tmp.TmWefJuJAS /tmp/tmp.3fSXZKFAe4 ++ return 0 + [[ initializing == \r\e\a\d\y ]] + echo -n . .+ sleep 5 + [[ 16 -ge 300 ]] + let i+=1 ++ kubectl_bin get pxc some-name -o 'jsonpath={.status.state}' +++ mktemp ++ local LAST_OUT=/tmp/tmp.OMp0mYjZAw +++ mktemp ++ local LAST_ERR=/tmp/tmp.PzuNfnY6Zz ++ local exit_status=0 +++ seq 0 2 ++ for i in '$(seq 0 2)' ++ set +e ++ kubectl get pxc some-name -o 'jsonpath={.status.state}' ++ exit_status=0 ++ set -e ++ '[' 0 '!=' 0 ']' ++ break ++ cat /tmp/tmp.OMp0mYjZAw ++ cat /tmp/tmp.PzuNfnY6Zz ++ rm /tmp/tmp.OMp0mYjZAw /tmp/tmp.PzuNfnY6Zz ++ return 0 + [[ initializing == \r\e\a\d\y ]] + echo -n . .+ sleep 5 + [[ 17 -ge 300 ]] + let i+=1 ++ kubectl_bin get pxc some-name -o 'jsonpath={.status.state}' +++ mktemp ++ local LAST_OUT=/tmp/tmp.ZNDCaxzCBD +++ mktemp ++ local LAST_ERR=/tmp/tmp.VSahSbRH6l ++ local exit_status=0 +++ seq 0 2 ++ for i in '$(seq 0 2)' ++ set +e ++ kubectl get pxc some-name -o 'jsonpath={.status.state}' ++ exit_status=0 ++ set -e ++ '[' 0 '!=' 0 ']' ++ break ++ cat /tmp/tmp.ZNDCaxzCBD ++ cat /tmp/tmp.VSahSbRH6l ++ rm /tmp/tmp.ZNDCaxzCBD /tmp/tmp.VSahSbRH6l ++ return 0 + [[ initializing == \r\e\a\d\y ]] + echo -n . .+ sleep 5 + [[ 18 -ge 300 ]] + let i+=1 ++ kubectl_bin get pxc some-name -o 'jsonpath={.status.state}' +++ mktemp ++ local LAST_OUT=/tmp/tmp.XrizXclBmj +++ mktemp ++ local LAST_ERR=/tmp/tmp.kqXoklp38K ++ local exit_status=0 +++ seq 0 2 ++ for i in '$(seq 0 2)' ++ set +e ++ kubectl get pxc some-name -o 'jsonpath={.status.state}' ++ exit_status=0 ++ set -e ++ '[' 0 '!=' 0 ']' ++ break ++ cat /tmp/tmp.XrizXclBmj ++ cat /tmp/tmp.kqXoklp38K ++ rm /tmp/tmp.XrizXclBmj /tmp/tmp.kqXoklp38K ++ return 0 + [[ initializing == \r\e\a\d\y ]] + echo -n . .+ sleep 5 + [[ 19 -ge 300 ]] + let i+=1 ++ kubectl_bin get pxc some-name -o 'jsonpath={.status.state}' +++ mktemp ++ local LAST_OUT=/tmp/tmp.IGFpwsT1YC +++ mktemp ++ local LAST_ERR=/tmp/tmp.C6AOYx1cWs ++ local exit_status=0 +++ seq 0 2 ++ for i in '$(seq 0 2)' ++ set +e ++ kubectl get pxc some-name -o 'jsonpath={.status.state}' ++ exit_status=0 ++ set -e ++ '[' 0 '!=' 0 ']' ++ break ++ cat /tmp/tmp.IGFpwsT1YC ++ cat /tmp/tmp.C6AOYx1cWs ++ rm /tmp/tmp.IGFpwsT1YC /tmp/tmp.C6AOYx1cWs ++ return 0 + [[ initializing == \r\e\a\d\y ]] + echo -n . .+ sleep 5 + [[ 20 -ge 300 ]] + let i+=1 ++ kubectl_bin get pxc some-name -o 'jsonpath={.status.state}' +++ mktemp ++ local LAST_OUT=/tmp/tmp.B75lm6Mg4w +++ mktemp ++ local LAST_ERR=/tmp/tmp.0sTe8GW8Bm ++ local exit_status=0 +++ seq 0 2 ++ for i in '$(seq 0 2)' ++ set +e ++ kubectl get pxc some-name -o 'jsonpath={.status.state}' ++ exit_status=0 ++ set -e ++ '[' 0 '!=' 0 ']' ++ break ++ cat /tmp/tmp.B75lm6Mg4w ++ cat /tmp/tmp.0sTe8GW8Bm ++ rm /tmp/tmp.B75lm6Mg4w /tmp/tmp.0sTe8GW8Bm ++ return 0 + [[ initializing == \r\e\a\d\y ]] + echo -n . .+ sleep 5 + [[ 21 -ge 300 ]] + let i+=1 ++ kubectl_bin get pxc some-name -o 'jsonpath={.status.state}' +++ mktemp ++ local LAST_OUT=/tmp/tmp.rlgefBlIp6 +++ mktemp ++ local LAST_ERR=/tmp/tmp.nOVZDFtUpM ++ local exit_status=0 +++ seq 0 2 ++ for i in '$(seq 0 2)' ++ set +e ++ kubectl get pxc some-name -o 'jsonpath={.status.state}' ++ exit_status=0 ++ set -e ++ '[' 0 '!=' 0 ']' ++ break ++ cat /tmp/tmp.rlgefBlIp6 ++ cat /tmp/tmp.nOVZDFtUpM ++ rm /tmp/tmp.rlgefBlIp6 /tmp/tmp.nOVZDFtUpM ++ return 0 + [[ initializing == \r\e\a\d\y ]] + echo -n . .+ sleep 5 + [[ 22 -ge 300 ]] + let i+=1 ++ kubectl_bin get pxc some-name -o 'jsonpath={.status.state}' +++ mktemp ++ local LAST_OUT=/tmp/tmp.MIsINRiWYV +++ mktemp ++ local LAST_ERR=/tmp/tmp.wSWAMsUKsr ++ local exit_status=0 +++ seq 0 2 ++ for i in '$(seq 0 2)' ++ set +e ++ kubectl get pxc some-name -o 'jsonpath={.status.state}' ++ exit_status=0 ++ set -e ++ '[' 0 '!=' 0 ']' ++ break ++ cat /tmp/tmp.MIsINRiWYV ++ cat /tmp/tmp.wSWAMsUKsr ++ rm /tmp/tmp.MIsINRiWYV /tmp/tmp.wSWAMsUKsr ++ return 0 + [[ initializing == \r\e\a\d\y ]] + echo -n . .+ sleep 5 + [[ 23 -ge 300 ]] + let i+=1 ++ kubectl_bin get pxc some-name -o 'jsonpath={.status.state}' +++ mktemp ++ local LAST_OUT=/tmp/tmp.a6A3pUikln +++ mktemp ++ local LAST_ERR=/tmp/tmp.OdYKR4SPXi ++ local exit_status=0 +++ seq 0 2 ++ for i in '$(seq 0 2)' ++ set +e ++ kubectl get pxc some-name -o 'jsonpath={.status.state}' ++ exit_status=0 ++ set -e ++ '[' 0 '!=' 0 ']' ++ break ++ cat /tmp/tmp.a6A3pUikln ++ cat /tmp/tmp.OdYKR4SPXi ++ rm /tmp/tmp.a6A3pUikln /tmp/tmp.OdYKR4SPXi ++ return 0 + [[ initializing == \r\e\a\d\y ]] + echo -n . .+ sleep 5 + [[ 24 -ge 300 ]] + let i+=1 ++ kubectl_bin get pxc some-name -o 'jsonpath={.status.state}' +++ mktemp ++ local LAST_OUT=/tmp/tmp.v8dz7elAfj +++ mktemp ++ local LAST_ERR=/tmp/tmp.bgvzJxrhXR ++ local exit_status=0 +++ seq 0 2 ++ for i in '$(seq 0 2)' ++ set +e ++ kubectl get pxc some-name -o 'jsonpath={.status.state}' ++ exit_status=0 ++ set -e ++ '[' 0 '!=' 0 ']' ++ break ++ cat /tmp/tmp.v8dz7elAfj ++ cat /tmp/tmp.bgvzJxrhXR ++ rm /tmp/tmp.v8dz7elAfj /tmp/tmp.bgvzJxrhXR ++ return 0 + [[ ready == \r\e\a\d\y ]] ++ kubectl_bin get pxc some-name -o 'jsonpath={.status.pxc.ready}' +++ mktemp ++ local LAST_OUT=/tmp/tmp.Y3qW5JUKLa +++ mktemp ++ local LAST_ERR=/tmp/tmp.pzO3CAFVaM ++ local exit_status=0 +++ seq 0 2 ++ for i in '$(seq 0 2)' ++ set +e ++ kubectl get pxc some-name -o 'jsonpath={.status.pxc.ready}' ++ exit_status=0 ++ set -e ++ '[' 0 '!=' 0 ']' ++ break ++ cat /tmp/tmp.Y3qW5JUKLa ++ cat /tmp/tmp.pzO3CAFVaM ++ rm /tmp/tmp.Y3qW5JUKLa /tmp/tmp.pzO3CAFVaM ++ return 0 + [[ 3 == \3 ]] +++ get_proxy_engine some-name +++ local cluster_name=some-name ++++ get_proxy some-name ++++ local target_cluster=some-name +++++ kubectl_bin get pxc some-name -o 'jsonpath={.spec.haproxy.enabled}' ++++++ mktemp +++++ local LAST_OUT=/tmp/tmp.QxzNkBXY2U ++++++ mktemp +++++ local LAST_ERR=/tmp/tmp.L4lTZsMTIz +++++ local exit_status=0 ++++++ seq 0 2 +++++ for i in '$(seq 0 2)' +++++ set +e +++++ kubectl get pxc some-name -o 'jsonpath={.spec.haproxy.enabled}' +++++ exit_status=0 +++++ set -e +++++ '[' 0 '!=' 0 ']' +++++ break +++++ cat /tmp/tmp.QxzNkBXY2U +++++ cat /tmp/tmp.L4lTZsMTIz +++++ rm /tmp/tmp.QxzNkBXY2U /tmp/tmp.L4lTZsMTIz +++++ return 0 ++++ [[ '' == \t\r\u\e ]] +++++ kubectl_bin get pxc some-name -o 'jsonpath={.spec.proxysql.enabled}' ++++++ mktemp +++++ local LAST_OUT=/tmp/tmp.5mubp4WitX ++++++ mktemp +++++ local LAST_ERR=/tmp/tmp.B0PyqNiQRJ +++++ local exit_status=0 ++++++ seq 0 2 +++++ for i in '$(seq 0 2)' +++++ set +e +++++ kubectl get pxc some-name -o 'jsonpath={.spec.proxysql.enabled}' +++++ exit_status=0 +++++ set -e +++++ '[' 0 '!=' 0 ']' +++++ break +++++ cat /tmp/tmp.5mubp4WitX +++++ cat /tmp/tmp.B0PyqNiQRJ +++++ rm /tmp/tmp.5mubp4WitX /tmp/tmp.B0PyqNiQRJ +++++ return 0 ++++ [[ true == \t\r\u\e ]] ++++ echo some-name-proxysql ++++ return +++ local cluster_proxy=some-name-proxysql +++ echo proxysql ++ kubectl_bin get pxc some-name -o 'jsonpath={.status.proxysql.ready}' +++ mktemp ++ local LAST_OUT=/tmp/tmp.LsZBIxRh1G +++ mktemp ++ local LAST_ERR=/tmp/tmp.lPKGm0owT8 ++ local exit_status=0 +++ seq 0 2 ++ for i in '$(seq 0 2)' ++ set +e ++ kubectl get pxc some-name -o 'jsonpath={.status.proxysql.ready}' ++ exit_status=0 ++ set -e ++ '[' 0 '!=' 0 ']' ++ break ++ cat /tmp/tmp.LsZBIxRh1G ++ cat /tmp/tmp.lPKGm0owT8 ++ rm /tmp/tmp.LsZBIxRh1G /tmp/tmp.lPKGm0owT8 ++ return 0 + [[ 2 == \2 ]] + echo + compare_mysql_cmd_local select-3 'SHOW DATABASES;' '-h 127.0.0.1 -uxtrabackup -p'\''test-password'\''' some-name-pxc-0 '' pxc + local command_id=select-3 + local 'command=SHOW DATABASES;' + local 'uri=-h 127.0.0.1 -uxtrabackup -p'\''test-password'\''' + local pod=some-name-pxc-0 + local postfix= + local container_name=pxc + local expected_result=/mnt/jenkins/workspace/cloud-pxc-operator_PR-2124/e2e-tests/users/compare/select-3.sql + [[ perconalab/percona-xtradb-cluster-operator:main-pxc8.0 =~ 8\.0 ]] + '[' -f /mnt/jenkins/workspace/cloud-pxc-operator_PR-2124/e2e-tests/users/compare/select-3-80.sql ']' + [[ perconalab/percona-xtradb-cluster-operator:main-pxc8.0 =~ 8\.4 ]] + run_mysql_local 'SHOW DATABASES;' '-h 127.0.0.1 -uxtrabackup -p'\''test-password'\''' some-name-pxc-0 pxc + local 'command=SHOW DATABASES;' + local 'uri=-h 127.0.0.1 -uxtrabackup -p'\''test-password'\''' + local pod=some-name-pxc-0 + local container_name=pxc + set +o xtrace + '[' '!' -s /tmp/tmp.30P5AqZU1r/select-3.sql ']' + [[ 0 -eq 0 ]] + diff -u /mnt/jenkins/workspace/cloud-pxc-operator_PR-2124/e2e-tests/users/compare/select-3.sql /tmp/tmp.30P5AqZU1r/select-3.sql + desc 'test monitor' + set +o xtrace ----------------------------------------------------------------------------------- test monitor ----------------------------------------------------------------------------------- + patch_secret my-cluster-secrets monitor dGVzdC1wYXNzd29yZA== + local secret=my-cluster-secrets + local key=monitor + local value=dGVzdC1wYXNzd29yZA== + kubectl_bin patch secret my-cluster-secrets '-p={"data":{"monitor": "dGVzdC1wYXNzd29yZA=="}}' ++ mktemp + local LAST_OUT=/tmp/tmp.lMYU6zqg6e ++ mktemp + local LAST_ERR=/tmp/tmp.rdNxd4LMpr + local exit_status=0 ++ seq 0 2 + for i in '$(seq 0 2)' + set +e + kubectl patch secret my-cluster-secrets '-p={"data":{"monitor": "dGVzdC1wYXNzd29yZA=="}}' + exit_status=0 + set -e + '[' 0 '!=' 0 ']' + break + cat /tmp/tmp.lMYU6zqg6e secret/my-cluster-secrets patched + cat /tmp/tmp.rdNxd4LMpr + rm /tmp/tmp.lMYU6zqg6e /tmp/tmp.rdNxd4LMpr + return 0 + wait_for_password_propagation my-cluster-secrets monitor + local secret=my-cluster-secrets + local user=monitor + local max_retry=240 ++ getSecretData my-cluster-secrets root ++ local secretName=my-cluster-secrets ++ local dataKey=root ++ kubectl_bin get secrets/my-cluster-secrets '--template={{.data.root}}' ++ base64 --decode +++ mktemp ++ local LAST_OUT=/tmp/tmp.267PaDPQzI +++ mktemp ++ local LAST_ERR=/tmp/tmp.kslzTMCuFP ++ local exit_status=0 +++ seq 0 2 ++ for i in '$(seq 0 2)' ++ set +e ++ kubectl get secrets/my-cluster-secrets '--template={{.data.root}}' ++ exit_status=0 ++ set -e ++ '[' 0 '!=' 0 ']' ++ break ++ cat /tmp/tmp.267PaDPQzI ++ cat /tmp/tmp.kslzTMCuFP ++ rm /tmp/tmp.267PaDPQzI /tmp/tmp.kslzTMCuFP ++ return 0 + local root_pass=test-password + [[ perconalab/percona-xtradb-cluster-operator:main-pxc8.0 =~ 5\.7 ]] + retry=0 + is_password_updated monitor '-h some-name-pxc -uroot -p'\''test-password'\''' + local username=monitor + local 'uri=-h some-name-pxc -uroot -p'\''test-password'\''' + run_mysql 'SELECT User_attributes FROM mysql.user WHERE user='\''monitor'\''' '-h some-name-pxc -uroot -p'\''test-password'\''' + grep additional_password + local 'command=SELECT User_attributes FROM mysql.user WHERE user='\''monitor'\''' + local 'uri=-h some-name-pxc -uroot -p'\''test-password'\''' ++ get_client_pod ++ kubectl_bin get pods --selector=name=pxc-client -o 'jsonpath={.items[].metadata.name}' +++ mktemp ++ local LAST_OUT=/tmp/tmp.sbcSOYGQ8i +++ mktemp ++ local LAST_ERR=/tmp/tmp.0ECnmdk1DS ++ local exit_status=0 +++ seq 0 2 ++ for i in '$(seq 0 2)' ++ set +e ++ kubectl get pods --selector=name=pxc-client -o 'jsonpath={.items[].metadata.name}' ++ exit_status=0 ++ set -e ++ '[' 0 '!=' 0 ']' ++ break ++ cat /tmp/tmp.sbcSOYGQ8i ++ cat /tmp/tmp.0ECnmdk1DS ++ rm /tmp/tmp.sbcSOYGQ8i /tmp/tmp.0ECnmdk1DS ++ return 0 + client_pod=pxc-client-59944c5bbf-wqbt7 + wait_pod pxc-client-59944c5bbf-wqbt7 + local pod=pxc-client-59944c5bbf-wqbt7 + local max_retry=480 + local ns= ++ echo pxc-client-59944c5bbf-wqbt7 ++ /usr/bin/sed -E 's/.*-(pxc|proxysql)-[0-9]/\1/' ++ grep -E '^(pxc|proxysql)$' + local container= + set +o xtrace pod/pxc-client-59944c5bbf-wqbt7 condition met waiting for pod/pxc-client-59944c5bbf-wqbt7 to become ReadyDefaulted container "pxc-client" out of: pxc-client, backup .Ok + set +o xtrace {"additional_password": "*C1F414D9BAF378B656A849B31F9F8AF3125F558B"} + retry=0 + is_old_password_discarded monitor '-h some-name-pxc -uroot -p'\''test-password'\''' + local username=monitor + local 'uri=-h some-name-pxc -uroot -p'\''test-password'\''' + run_mysql 'SELECT User_attributes FROM mysql.user WHERE user='\''monitor'\''' '-h some-name-pxc -uroot -p'\''test-password'\''' + grep NULL + local 'command=SELECT User_attributes FROM mysql.user WHERE user='\''monitor'\''' + local 'uri=-h some-name-pxc -uroot -p'\''test-password'\''' ++ get_client_pod ++ kubectl_bin get pods --selector=name=pxc-client -o 'jsonpath={.items[].metadata.name}' +++ mktemp ++ local LAST_OUT=/tmp/tmp.kMqXMRL8fX +++ mktemp ++ local LAST_ERR=/tmp/tmp.ZTHjIrIQsZ ++ local exit_status=0 +++ seq 0 2 ++ for i in '$(seq 0 2)' ++ set +e ++ kubectl get pods --selector=name=pxc-client -o 'jsonpath={.items[].metadata.name}' ++ exit_status=0 ++ set -e ++ '[' 0 '!=' 0 ']' ++ break ++ cat /tmp/tmp.kMqXMRL8fX ++ cat /tmp/tmp.ZTHjIrIQsZ ++ rm /tmp/tmp.kMqXMRL8fX /tmp/tmp.ZTHjIrIQsZ ++ return 0 + client_pod=pxc-client-59944c5bbf-wqbt7 + wait_pod pxc-client-59944c5bbf-wqbt7 + local pod=pxc-client-59944c5bbf-wqbt7 + local max_retry=480 + local ns= ++ echo pxc-client-59944c5bbf-wqbt7 ++ /usr/bin/sed -E 's/.*-(pxc|proxysql)-[0-9]/\1/' ++ grep -E '^(pxc|proxysql)$' + local container= + set +o xtrace pod/pxc-client-59944c5bbf-wqbt7 condition met waiting for pod/pxc-client-59944c5bbf-wqbt7 to become ReadyDefaulted container "pxc-client" out of: pxc-client, backup .Ok + set +o xtrace + echo 'waiting for password propagation' waiting for password propagation + sleep 1 + let retry+=1 + [[ 1 -ge 240 ]] + is_old_password_discarded monitor '-h some-name-pxc -uroot -p'\''test-password'\''' + local username=monitor + local 'uri=-h some-name-pxc -uroot -p'\''test-password'\''' + run_mysql 'SELECT User_attributes FROM mysql.user WHERE user='\''monitor'\''' '-h some-name-pxc -uroot -p'\''test-password'\''' + local 'command=SELECT User_attributes FROM mysql.user WHERE user='\''monitor'\''' + grep NULL + local 'uri=-h some-name-pxc -uroot -p'\''test-password'\''' ++ get_client_pod ++ kubectl_bin get pods --selector=name=pxc-client -o 'jsonpath={.items[].metadata.name}' +++ mktemp ++ local LAST_OUT=/tmp/tmp.9s7yNeD1VU +++ mktemp ++ local LAST_ERR=/tmp/tmp.Jk1RorWta9 ++ local exit_status=0 +++ seq 0 2 ++ for i in '$(seq 0 2)' ++ set +e ++ kubectl get pods --selector=name=pxc-client -o 'jsonpath={.items[].metadata.name}' ++ exit_status=0 ++ set -e ++ '[' 0 '!=' 0 ']' ++ break ++ cat /tmp/tmp.9s7yNeD1VU ++ cat /tmp/tmp.Jk1RorWta9 ++ rm /tmp/tmp.9s7yNeD1VU /tmp/tmp.Jk1RorWta9 ++ return 0 + client_pod=pxc-client-59944c5bbf-wqbt7 + wait_pod pxc-client-59944c5bbf-wqbt7 + local pod=pxc-client-59944c5bbf-wqbt7 + local max_retry=480 + local ns= ++ echo pxc-client-59944c5bbf-wqbt7 ++ /usr/bin/sed -E 's/.*-(pxc|proxysql)-[0-9]/\1/' ++ grep -E '^(pxc|proxysql)$' + local container= + set +o xtrace pod/pxc-client-59944c5bbf-wqbt7 condition met waiting for pod/pxc-client-59944c5bbf-wqbt7 to become ReadyDefaulted container "pxc-client" out of: pxc-client, backup .Ok + set +o xtrace + echo 'waiting for password propagation' waiting for password propagation + sleep 1 + let retry+=1 + [[ 2 -ge 240 ]] + is_old_password_discarded monitor '-h some-name-pxc -uroot -p'\''test-password'\''' + local username=monitor + local 'uri=-h some-name-pxc -uroot -p'\''test-password'\''' + run_mysql 'SELECT User_attributes FROM mysql.user WHERE user='\''monitor'\''' '-h some-name-pxc -uroot -p'\''test-password'\''' + grep NULL + local 'command=SELECT User_attributes FROM mysql.user WHERE user='\''monitor'\''' + local 'uri=-h some-name-pxc -uroot -p'\''test-password'\''' ++ get_client_pod ++ kubectl_bin get pods --selector=name=pxc-client -o 'jsonpath={.items[].metadata.name}' +++ mktemp ++ local LAST_OUT=/tmp/tmp.GwteOuejOk +++ mktemp ++ local LAST_ERR=/tmp/tmp.2LwriXQnnc ++ local exit_status=0 +++ seq 0 2 ++ for i in '$(seq 0 2)' ++ set +e ++ kubectl get pods --selector=name=pxc-client -o 'jsonpath={.items[].metadata.name}' ++ exit_status=0 ++ set -e ++ '[' 0 '!=' 0 ']' ++ break ++ cat /tmp/tmp.GwteOuejOk ++ cat /tmp/tmp.2LwriXQnnc ++ rm /tmp/tmp.GwteOuejOk /tmp/tmp.2LwriXQnnc ++ return 0 + client_pod=pxc-client-59944c5bbf-wqbt7 + wait_pod pxc-client-59944c5bbf-wqbt7 + local pod=pxc-client-59944c5bbf-wqbt7 + local max_retry=480 + local ns= ++ echo pxc-client-59944c5bbf-wqbt7 ++ /usr/bin/sed -E 's/.*-(pxc|proxysql)-[0-9]/\1/' ++ grep -E '^(pxc|proxysql)$' + local container= + set +o xtrace pod/pxc-client-59944c5bbf-wqbt7 condition met waiting for pod/pxc-client-59944c5bbf-wqbt7 to become ReadyDefaulted container "pxc-client" out of: pxc-client, backup .Ok + set +o xtrace + echo 'waiting for password propagation' waiting for password propagation + sleep 1 + let retry+=1 + [[ 3 -ge 240 ]] + is_old_password_discarded monitor '-h some-name-pxc -uroot -p'\''test-password'\''' + local username=monitor + local 'uri=-h some-name-pxc -uroot -p'\''test-password'\''' + run_mysql 'SELECT User_attributes FROM mysql.user WHERE user='\''monitor'\''' '-h some-name-pxc -uroot -p'\''test-password'\''' + local 'command=SELECT User_attributes FROM mysql.user WHERE user='\''monitor'\''' + grep NULL + local 'uri=-h some-name-pxc -uroot -p'\''test-password'\''' ++ get_client_pod ++ kubectl_bin get pods --selector=name=pxc-client -o 'jsonpath={.items[].metadata.name}' +++ mktemp ++ local LAST_OUT=/tmp/tmp.T4sdkFDPUR +++ mktemp ++ local LAST_ERR=/tmp/tmp.ETTGyl8v1q ++ local exit_status=0 +++ seq 0 2 ++ for i in '$(seq 0 2)' ++ set +e ++ kubectl get pods --selector=name=pxc-client -o 'jsonpath={.items[].metadata.name}' ++ exit_status=0 ++ set -e ++ '[' 0 '!=' 0 ']' ++ break ++ cat /tmp/tmp.T4sdkFDPUR ++ cat /tmp/tmp.ETTGyl8v1q ++ rm /tmp/tmp.T4sdkFDPUR /tmp/tmp.ETTGyl8v1q ++ return 0 + client_pod=pxc-client-59944c5bbf-wqbt7 + wait_pod pxc-client-59944c5bbf-wqbt7 + local pod=pxc-client-59944c5bbf-wqbt7 + local max_retry=480 + local ns= ++ echo pxc-client-59944c5bbf-wqbt7 ++ /usr/bin/sed -E 's/.*-(pxc|proxysql)-[0-9]/\1/' ++ grep -E '^(pxc|proxysql)$' + local container= + set +o xtrace pod/pxc-client-59944c5bbf-wqbt7 condition met waiting for pod/pxc-client-59944c5bbf-wqbt7 to become ReadyDefaulted container "pxc-client" out of: pxc-client, backup .Ok + set +o xtrace + echo 'waiting for password propagation' waiting for password propagation + sleep 1 + let retry+=1 + [[ 4 -ge 240 ]] + is_old_password_discarded monitor '-h some-name-pxc -uroot -p'\''test-password'\''' + local username=monitor + local 'uri=-h some-name-pxc -uroot -p'\''test-password'\''' + run_mysql 'SELECT User_attributes FROM mysql.user WHERE user='\''monitor'\''' '-h some-name-pxc -uroot -p'\''test-password'\''' + grep NULL + local 'command=SELECT User_attributes FROM mysql.user WHERE user='\''monitor'\''' + local 'uri=-h some-name-pxc -uroot -p'\''test-password'\''' ++ get_client_pod ++ kubectl_bin get pods --selector=name=pxc-client -o 'jsonpath={.items[].metadata.name}' +++ mktemp ++ local LAST_OUT=/tmp/tmp.Aa3s4q56w0 +++ mktemp ++ local LAST_ERR=/tmp/tmp.7IDyFmeSVI ++ local exit_status=0 +++ seq 0 2 ++ for i in '$(seq 0 2)' ++ set +e ++ kubectl get pods --selector=name=pxc-client -o 'jsonpath={.items[].metadata.name}' ++ exit_status=0 ++ set -e ++ '[' 0 '!=' 0 ']' ++ break ++ cat /tmp/tmp.Aa3s4q56w0 ++ cat /tmp/tmp.7IDyFmeSVI ++ rm /tmp/tmp.Aa3s4q56w0 /tmp/tmp.7IDyFmeSVI ++ return 0 + client_pod=pxc-client-59944c5bbf-wqbt7 + wait_pod pxc-client-59944c5bbf-wqbt7 + local pod=pxc-client-59944c5bbf-wqbt7 + local max_retry=480 + local ns= ++ echo pxc-client-59944c5bbf-wqbt7 ++ /usr/bin/sed -E 's/.*-(pxc|proxysql)-[0-9]/\1/' ++ grep -E '^(pxc|proxysql)$' + local container= + set +o xtrace pod/pxc-client-59944c5bbf-wqbt7 condition met waiting for pod/pxc-client-59944c5bbf-wqbt7 to become ReadyDefaulted container "pxc-client" out of: pxc-client, backup .Ok + set +o xtrace + echo 'waiting for password propagation' waiting for password propagation + sleep 1 + let retry+=1 + [[ 5 -ge 240 ]] + is_old_password_discarded monitor '-h some-name-pxc -uroot -p'\''test-password'\''' + local username=monitor + local 'uri=-h some-name-pxc -uroot -p'\''test-password'\''' + run_mysql 'SELECT User_attributes FROM mysql.user WHERE user='\''monitor'\''' '-h some-name-pxc -uroot -p'\''test-password'\''' + grep NULL + local 'command=SELECT User_attributes FROM mysql.user WHERE user='\''monitor'\''' + local 'uri=-h some-name-pxc -uroot -p'\''test-password'\''' ++ get_client_pod ++ kubectl_bin get pods --selector=name=pxc-client -o 'jsonpath={.items[].metadata.name}' +++ mktemp ++ local LAST_OUT=/tmp/tmp.1v0areFkGk +++ mktemp ++ local LAST_ERR=/tmp/tmp.rG9OR3HXbR ++ local exit_status=0 +++ seq 0 2 ++ for i in '$(seq 0 2)' ++ set +e ++ kubectl get pods --selector=name=pxc-client -o 'jsonpath={.items[].metadata.name}' ++ exit_status=0 ++ set -e ++ '[' 0 '!=' 0 ']' ++ break ++ cat /tmp/tmp.1v0areFkGk ++ cat /tmp/tmp.rG9OR3HXbR ++ rm /tmp/tmp.1v0areFkGk /tmp/tmp.rG9OR3HXbR ++ return 0 + client_pod=pxc-client-59944c5bbf-wqbt7 + wait_pod pxc-client-59944c5bbf-wqbt7 + local pod=pxc-client-59944c5bbf-wqbt7 + local max_retry=480 + local ns= ++ echo pxc-client-59944c5bbf-wqbt7 ++ /usr/bin/sed -E 's/.*-(pxc|proxysql)-[0-9]/\1/' ++ grep -E '^(pxc|proxysql)$' + local container= + set +o xtrace pod/pxc-client-59944c5bbf-wqbt7 condition met waiting for pod/pxc-client-59944c5bbf-wqbt7 to become ReadyDefaulted container "pxc-client" out of: pxc-client, backup .Ok + set +o xtrace + echo 'waiting for password propagation' waiting for password propagation + sleep 1 + let retry+=1 + [[ 6 -ge 240 ]] + is_old_password_discarded monitor '-h some-name-pxc -uroot -p'\''test-password'\''' + local username=monitor + local 'uri=-h some-name-pxc -uroot -p'\''test-password'\''' + run_mysql 'SELECT User_attributes FROM mysql.user WHERE user='\''monitor'\''' '-h some-name-pxc -uroot -p'\''test-password'\''' + local 'command=SELECT User_attributes FROM mysql.user WHERE user='\''monitor'\''' + grep NULL + local 'uri=-h some-name-pxc -uroot -p'\''test-password'\''' ++ get_client_pod ++ kubectl_bin get pods --selector=name=pxc-client -o 'jsonpath={.items[].metadata.name}' +++ mktemp ++ local LAST_OUT=/tmp/tmp.iGRoi6GrOk +++ mktemp ++ local LAST_ERR=/tmp/tmp.GH5NTNxH4J ++ local exit_status=0 +++ seq 0 2 ++ for i in '$(seq 0 2)' ++ set +e ++ kubectl get pods --selector=name=pxc-client -o 'jsonpath={.items[].metadata.name}' ++ exit_status=0 ++ set -e ++ '[' 0 '!=' 0 ']' ++ break ++ cat /tmp/tmp.iGRoi6GrOk ++ cat /tmp/tmp.GH5NTNxH4J ++ rm /tmp/tmp.iGRoi6GrOk /tmp/tmp.GH5NTNxH4J ++ return 0 + client_pod=pxc-client-59944c5bbf-wqbt7 + wait_pod pxc-client-59944c5bbf-wqbt7 + local pod=pxc-client-59944c5bbf-wqbt7 + local max_retry=480 + local ns= ++ echo pxc-client-59944c5bbf-wqbt7 ++ /usr/bin/sed -E 's/.*-(pxc|proxysql)-[0-9]/\1/' ++ grep -E '^(pxc|proxysql)$' + local container= + set +o xtrace pod/pxc-client-59944c5bbf-wqbt7 condition met waiting for pod/pxc-client-59944c5bbf-wqbt7 to become ReadyDefaulted container "pxc-client" out of: pxc-client, backup .Ok + set +o xtrace + echo 'waiting for password propagation' waiting for password propagation + sleep 1 + let retry+=1 + [[ 7 -ge 240 ]] + is_old_password_discarded monitor '-h some-name-pxc -uroot -p'\''test-password'\''' + local username=monitor + local 'uri=-h some-name-pxc -uroot -p'\''test-password'\''' + run_mysql 'SELECT User_attributes FROM mysql.user WHERE user='\''monitor'\''' '-h some-name-pxc -uroot -p'\''test-password'\''' + local 'command=SELECT User_attributes FROM mysql.user WHERE user='\''monitor'\''' + grep NULL + local 'uri=-h some-name-pxc -uroot -p'\''test-password'\''' ++ get_client_pod ++ kubectl_bin get pods --selector=name=pxc-client -o 'jsonpath={.items[].metadata.name}' +++ mktemp ++ local LAST_OUT=/tmp/tmp.0EweF2xaSu +++ mktemp ++ local LAST_ERR=/tmp/tmp.JdZO3qdIYQ ++ local exit_status=0 +++ seq 0 2 ++ for i in '$(seq 0 2)' ++ set +e ++ kubectl get pods --selector=name=pxc-client -o 'jsonpath={.items[].metadata.name}' ++ exit_status=0 ++ set -e ++ '[' 0 '!=' 0 ']' ++ break ++ cat /tmp/tmp.0EweF2xaSu ++ cat /tmp/tmp.JdZO3qdIYQ ++ rm /tmp/tmp.0EweF2xaSu /tmp/tmp.JdZO3qdIYQ ++ return 0 + client_pod=pxc-client-59944c5bbf-wqbt7 + wait_pod pxc-client-59944c5bbf-wqbt7 + local pod=pxc-client-59944c5bbf-wqbt7 + local max_retry=480 + local ns= ++ echo pxc-client-59944c5bbf-wqbt7 ++ /usr/bin/sed -E 's/.*-(pxc|proxysql)-[0-9]/\1/' ++ grep -E '^(pxc|proxysql)$' + local container= + set +o xtrace pod/pxc-client-59944c5bbf-wqbt7 condition met waiting for pod/pxc-client-59944c5bbf-wqbt7 to become ReadyDefaulted container "pxc-client" out of: pxc-client, backup .Ok + set +o xtrace + echo 'waiting for password propagation' waiting for password propagation + sleep 1 + let retry+=1 + [[ 8 -ge 240 ]] + is_old_password_discarded monitor '-h some-name-pxc -uroot -p'\''test-password'\''' + local username=monitor + local 'uri=-h some-name-pxc -uroot -p'\''test-password'\''' + grep NULL + run_mysql 'SELECT User_attributes FROM mysql.user WHERE user='\''monitor'\''' '-h some-name-pxc -uroot -p'\''test-password'\''' + local 'command=SELECT User_attributes FROM mysql.user WHERE user='\''monitor'\''' + local 'uri=-h some-name-pxc -uroot -p'\''test-password'\''' ++ get_client_pod ++ kubectl_bin get pods --selector=name=pxc-client -o 'jsonpath={.items[].metadata.name}' +++ mktemp ++ local LAST_OUT=/tmp/tmp.BdEy7hCVaC +++ mktemp ++ local LAST_ERR=/tmp/tmp.xZlumigq6B ++ local exit_status=0 +++ seq 0 2 ++ for i in '$(seq 0 2)' ++ set +e ++ kubectl get pods --selector=name=pxc-client -o 'jsonpath={.items[].metadata.name}' ++ exit_status=0 ++ set -e ++ '[' 0 '!=' 0 ']' ++ break ++ cat /tmp/tmp.BdEy7hCVaC ++ cat /tmp/tmp.xZlumigq6B ++ rm /tmp/tmp.BdEy7hCVaC /tmp/tmp.xZlumigq6B ++ return 0 + client_pod=pxc-client-59944c5bbf-wqbt7 + wait_pod pxc-client-59944c5bbf-wqbt7 + local pod=pxc-client-59944c5bbf-wqbt7 + local max_retry=480 + local ns= ++ echo pxc-client-59944c5bbf-wqbt7 ++ /usr/bin/sed -E 's/.*-(pxc|proxysql)-[0-9]/\1/' ++ grep -E '^(pxc|proxysql)$' + local container= + set +o xtrace pod/pxc-client-59944c5bbf-wqbt7 condition met waiting for pod/pxc-client-59944c5bbf-wqbt7 to become ReadyDefaulted container "pxc-client" out of: pxc-client, backup .Ok + set +o xtrace + echo 'waiting for password propagation' waiting for password propagation + sleep 1 + let retry+=1 + [[ 9 -ge 240 ]] + is_old_password_discarded monitor '-h some-name-pxc -uroot -p'\''test-password'\''' + local username=monitor + local 'uri=-h some-name-pxc -uroot -p'\''test-password'\''' + run_mysql 'SELECT User_attributes FROM mysql.user WHERE user='\''monitor'\''' '-h some-name-pxc -uroot -p'\''test-password'\''' + grep NULL + local 'command=SELECT User_attributes FROM mysql.user WHERE user='\''monitor'\''' + local 'uri=-h some-name-pxc -uroot -p'\''test-password'\''' ++ get_client_pod ++ kubectl_bin get pods --selector=name=pxc-client -o 'jsonpath={.items[].metadata.name}' +++ mktemp ++ local LAST_OUT=/tmp/tmp.ZA8HbNmy3A +++ mktemp ++ local LAST_ERR=/tmp/tmp.zEmUJeBX0x ++ local exit_status=0 +++ seq 0 2 ++ for i in '$(seq 0 2)' ++ set +e ++ kubectl get pods --selector=name=pxc-client -o 'jsonpath={.items[].metadata.name}' ++ exit_status=0 ++ set -e ++ '[' 0 '!=' 0 ']' ++ break ++ cat /tmp/tmp.ZA8HbNmy3A ++ cat /tmp/tmp.zEmUJeBX0x ++ rm /tmp/tmp.ZA8HbNmy3A /tmp/tmp.zEmUJeBX0x ++ return 0 + client_pod=pxc-client-59944c5bbf-wqbt7 + wait_pod pxc-client-59944c5bbf-wqbt7 + local pod=pxc-client-59944c5bbf-wqbt7 + local max_retry=480 + local ns= ++ echo pxc-client-59944c5bbf-wqbt7 ++ /usr/bin/sed -E 's/.*-(pxc|proxysql)-[0-9]/\1/' ++ grep -E '^(pxc|proxysql)$' + local container= + set +o xtrace pod/pxc-client-59944c5bbf-wqbt7 condition met waiting for pod/pxc-client-59944c5bbf-wqbt7 to become ReadyDefaulted container "pxc-client" out of: pxc-client, backup .Ok + set +o xtrace + echo 'waiting for password propagation' waiting for password propagation + sleep 1 + let retry+=1 + [[ 10 -ge 240 ]] + is_old_password_discarded monitor '-h some-name-pxc -uroot -p'\''test-password'\''' + local username=monitor + local 'uri=-h some-name-pxc -uroot -p'\''test-password'\''' + grep NULL + run_mysql 'SELECT User_attributes FROM mysql.user WHERE user='\''monitor'\''' '-h some-name-pxc -uroot -p'\''test-password'\''' + local 'command=SELECT User_attributes FROM mysql.user WHERE user='\''monitor'\''' + local 'uri=-h some-name-pxc -uroot -p'\''test-password'\''' ++ get_client_pod ++ kubectl_bin get pods --selector=name=pxc-client -o 'jsonpath={.items[].metadata.name}' +++ mktemp ++ local LAST_OUT=/tmp/tmp.JAMrkBLKbI +++ mktemp ++ local LAST_ERR=/tmp/tmp.A56byMfUxW ++ local exit_status=0 +++ seq 0 2 ++ for i in '$(seq 0 2)' ++ set +e ++ kubectl get pods --selector=name=pxc-client -o 'jsonpath={.items[].metadata.name}' ++ exit_status=0 ++ set -e ++ '[' 0 '!=' 0 ']' ++ break ++ cat /tmp/tmp.JAMrkBLKbI ++ cat /tmp/tmp.A56byMfUxW ++ rm /tmp/tmp.JAMrkBLKbI /tmp/tmp.A56byMfUxW ++ return 0 + client_pod=pxc-client-59944c5bbf-wqbt7 + wait_pod pxc-client-59944c5bbf-wqbt7 + local pod=pxc-client-59944c5bbf-wqbt7 + local max_retry=480 + local ns= ++ echo pxc-client-59944c5bbf-wqbt7 ++ /usr/bin/sed -E 's/.*-(pxc|proxysql)-[0-9]/\1/' ++ grep -E '^(pxc|proxysql)$' + local container= + set +o xtrace pod/pxc-client-59944c5bbf-wqbt7 condition met waiting for pod/pxc-client-59944c5bbf-wqbt7 to become ReadyDefaulted container "pxc-client" out of: pxc-client, backup .Ok + set +o xtrace + echo 'waiting for password propagation' waiting for password propagation + sleep 1 + let retry+=1 + [[ 11 -ge 240 ]] + is_old_password_discarded monitor '-h some-name-pxc -uroot -p'\''test-password'\''' + local username=monitor + local 'uri=-h some-name-pxc -uroot -p'\''test-password'\''' + run_mysql 'SELECT User_attributes FROM mysql.user WHERE user='\''monitor'\''' '-h some-name-pxc -uroot -p'\''test-password'\''' + grep NULL + local 'command=SELECT User_attributes FROM mysql.user WHERE user='\''monitor'\''' + local 'uri=-h some-name-pxc -uroot -p'\''test-password'\''' ++ get_client_pod ++ kubectl_bin get pods --selector=name=pxc-client -o 'jsonpath={.items[].metadata.name}' +++ mktemp ++ local LAST_OUT=/tmp/tmp.lIAR39Y47k +++ mktemp ++ local LAST_ERR=/tmp/tmp.da2AXky33x ++ local exit_status=0 +++ seq 0 2 ++ for i in '$(seq 0 2)' ++ set +e ++ kubectl get pods --selector=name=pxc-client -o 'jsonpath={.items[].metadata.name}' ++ exit_status=0 ++ set -e ++ '[' 0 '!=' 0 ']' ++ break ++ cat /tmp/tmp.lIAR39Y47k ++ cat /tmp/tmp.da2AXky33x ++ rm /tmp/tmp.lIAR39Y47k /tmp/tmp.da2AXky33x ++ return 0 + client_pod=pxc-client-59944c5bbf-wqbt7 + wait_pod pxc-client-59944c5bbf-wqbt7 + local pod=pxc-client-59944c5bbf-wqbt7 + local max_retry=480 + local ns= ++ echo pxc-client-59944c5bbf-wqbt7 ++ /usr/bin/sed -E 's/.*-(pxc|proxysql)-[0-9]/\1/' ++ grep -E '^(pxc|proxysql)$' + local container= + set +o xtrace pod/pxc-client-59944c5bbf-wqbt7 condition met waiting for pod/pxc-client-59944c5bbf-wqbt7 to become ReadyDefaulted container "pxc-client" out of: pxc-client, backup .Ok + set +o xtrace + echo 'waiting for password propagation' waiting for password propagation + sleep 1 + let retry+=1 + [[ 12 -ge 240 ]] + is_old_password_discarded monitor '-h some-name-pxc -uroot -p'\''test-password'\''' + local username=monitor + local 'uri=-h some-name-pxc -uroot -p'\''test-password'\''' + run_mysql 'SELECT User_attributes FROM mysql.user WHERE user='\''monitor'\''' '-h some-name-pxc -uroot -p'\''test-password'\''' + grep NULL + local 'command=SELECT User_attributes FROM mysql.user WHERE user='\''monitor'\''' + local 'uri=-h some-name-pxc -uroot -p'\''test-password'\''' ++ get_client_pod ++ kubectl_bin get pods --selector=name=pxc-client -o 'jsonpath={.items[].metadata.name}' +++ mktemp ++ local LAST_OUT=/tmp/tmp.scy4KzCGdc +++ mktemp ++ local LAST_ERR=/tmp/tmp.hHn22p3J3G ++ local exit_status=0 +++ seq 0 2 ++ for i in '$(seq 0 2)' ++ set +e ++ kubectl get pods --selector=name=pxc-client -o 'jsonpath={.items[].metadata.name}' ++ exit_status=0 ++ set -e ++ '[' 0 '!=' 0 ']' ++ break ++ cat /tmp/tmp.scy4KzCGdc ++ cat /tmp/tmp.hHn22p3J3G ++ rm /tmp/tmp.scy4KzCGdc /tmp/tmp.hHn22p3J3G ++ return 0 + client_pod=pxc-client-59944c5bbf-wqbt7 + wait_pod pxc-client-59944c5bbf-wqbt7 + local pod=pxc-client-59944c5bbf-wqbt7 + local max_retry=480 + local ns= ++ echo pxc-client-59944c5bbf-wqbt7 ++ /usr/bin/sed -E 's/.*-(pxc|proxysql)-[0-9]/\1/' ++ grep -E '^(pxc|proxysql)$' + local container= + set +o xtrace pod/pxc-client-59944c5bbf-wqbt7 condition met waiting for pod/pxc-client-59944c5bbf-wqbt7 to become ReadyDefaulted container "pxc-client" out of: pxc-client, backup .Ok + set +o xtrace + echo 'waiting for password propagation' waiting for password propagation + sleep 1 + let retry+=1 + [[ 13 -ge 240 ]] + is_old_password_discarded monitor '-h some-name-pxc -uroot -p'\''test-password'\''' + local username=monitor + local 'uri=-h some-name-pxc -uroot -p'\''test-password'\''' + run_mysql 'SELECT User_attributes FROM mysql.user WHERE user='\''monitor'\''' '-h some-name-pxc -uroot -p'\''test-password'\''' + grep NULL + local 'command=SELECT User_attributes FROM mysql.user WHERE user='\''monitor'\''' + local 'uri=-h some-name-pxc -uroot -p'\''test-password'\''' ++ get_client_pod ++ kubectl_bin get pods --selector=name=pxc-client -o 'jsonpath={.items[].metadata.name}' +++ mktemp ++ local LAST_OUT=/tmp/tmp.ZoRsvab1xH +++ mktemp ++ local LAST_ERR=/tmp/tmp.TfZ5RPF5K8 ++ local exit_status=0 +++ seq 0 2 ++ for i in '$(seq 0 2)' ++ set +e ++ kubectl get pods --selector=name=pxc-client -o 'jsonpath={.items[].metadata.name}' ++ exit_status=0 ++ set -e ++ '[' 0 '!=' 0 ']' ++ break ++ cat /tmp/tmp.ZoRsvab1xH ++ cat /tmp/tmp.TfZ5RPF5K8 ++ rm /tmp/tmp.ZoRsvab1xH /tmp/tmp.TfZ5RPF5K8 ++ return 0 + client_pod=pxc-client-59944c5bbf-wqbt7 + wait_pod pxc-client-59944c5bbf-wqbt7 + local pod=pxc-client-59944c5bbf-wqbt7 + local max_retry=480 + local ns= ++ echo pxc-client-59944c5bbf-wqbt7 ++ /usr/bin/sed -E 's/.*-(pxc|proxysql)-[0-9]/\1/' ++ grep -E '^(pxc|proxysql)$' + local container= + set +o xtrace pod/pxc-client-59944c5bbf-wqbt7 condition met waiting for pod/pxc-client-59944c5bbf-wqbt7 to become ReadyDefaulted container "pxc-client" out of: pxc-client, backup .Ok + set +o xtrace + echo 'waiting for password propagation' waiting for password propagation + sleep 1 + let retry+=1 + [[ 14 -ge 240 ]] + is_old_password_discarded monitor '-h some-name-pxc -uroot -p'\''test-password'\''' + local username=monitor + local 'uri=-h some-name-pxc -uroot -p'\''test-password'\''' + run_mysql 'SELECT User_attributes FROM mysql.user WHERE user='\''monitor'\''' '-h some-name-pxc -uroot -p'\''test-password'\''' + grep NULL + local 'command=SELECT User_attributes FROM mysql.user WHERE user='\''monitor'\''' + local 'uri=-h some-name-pxc -uroot -p'\''test-password'\''' ++ get_client_pod ++ kubectl_bin get pods --selector=name=pxc-client -o 'jsonpath={.items[].metadata.name}' +++ mktemp ++ local LAST_OUT=/tmp/tmp.dzoTQtOPyb +++ mktemp ++ local LAST_ERR=/tmp/tmp.NxsIlYDTxV ++ local exit_status=0 +++ seq 0 2 ++ for i in '$(seq 0 2)' ++ set +e ++ kubectl get pods --selector=name=pxc-client -o 'jsonpath={.items[].metadata.name}' ++ exit_status=0 ++ set -e ++ '[' 0 '!=' 0 ']' ++ break ++ cat /tmp/tmp.dzoTQtOPyb ++ cat /tmp/tmp.NxsIlYDTxV ++ rm /tmp/tmp.dzoTQtOPyb /tmp/tmp.NxsIlYDTxV ++ return 0 + client_pod=pxc-client-59944c5bbf-wqbt7 + wait_pod pxc-client-59944c5bbf-wqbt7 + local pod=pxc-client-59944c5bbf-wqbt7 + local max_retry=480 + local ns= ++ echo pxc-client-59944c5bbf-wqbt7 ++ /usr/bin/sed -E 's/.*-(pxc|proxysql)-[0-9]/\1/' ++ grep -E '^(pxc|proxysql)$' + local container= + set +o xtrace pod/pxc-client-59944c5bbf-wqbt7 condition met waiting for pod/pxc-client-59944c5bbf-wqbt7 to become ReadyDefaulted container "pxc-client" out of: pxc-client, backup .Ok + set +o xtrace + echo 'waiting for password propagation' waiting for password propagation + sleep 1 + let retry+=1 + [[ 15 -ge 240 ]] + is_old_password_discarded monitor '-h some-name-pxc -uroot -p'\''test-password'\''' + local username=monitor + local 'uri=-h some-name-pxc -uroot -p'\''test-password'\''' + grep NULL + run_mysql 'SELECT User_attributes FROM mysql.user WHERE user='\''monitor'\''' '-h some-name-pxc -uroot -p'\''test-password'\''' + local 'command=SELECT User_attributes FROM mysql.user WHERE user='\''monitor'\''' + local 'uri=-h some-name-pxc -uroot -p'\''test-password'\''' ++ get_client_pod ++ kubectl_bin get pods --selector=name=pxc-client -o 'jsonpath={.items[].metadata.name}' +++ mktemp ++ local LAST_OUT=/tmp/tmp.LjUqvmIg26 +++ mktemp ++ local LAST_ERR=/tmp/tmp.hcA6XSSZki ++ local exit_status=0 +++ seq 0 2 ++ for i in '$(seq 0 2)' ++ set +e ++ kubectl get pods --selector=name=pxc-client -o 'jsonpath={.items[].metadata.name}' ++ exit_status=0 ++ set -e ++ '[' 0 '!=' 0 ']' ++ break ++ cat /tmp/tmp.LjUqvmIg26 ++ cat /tmp/tmp.hcA6XSSZki ++ rm /tmp/tmp.LjUqvmIg26 /tmp/tmp.hcA6XSSZki ++ return 0 + client_pod=pxc-client-59944c5bbf-wqbt7 + wait_pod pxc-client-59944c5bbf-wqbt7 + local pod=pxc-client-59944c5bbf-wqbt7 + local max_retry=480 + local ns= ++ echo pxc-client-59944c5bbf-wqbt7 ++ /usr/bin/sed -E 's/.*-(pxc|proxysql)-[0-9]/\1/' ++ grep -E '^(pxc|proxysql)$' + local container= + set +o xtrace pod/pxc-client-59944c5bbf-wqbt7 condition met waiting for pod/pxc-client-59944c5bbf-wqbt7 to become ReadyDefaulted container "pxc-client" out of: pxc-client, backup .Ok + set +o xtrace + echo 'waiting for password propagation' waiting for password propagation + sleep 1 + let retry+=1 + [[ 16 -ge 240 ]] + is_old_password_discarded monitor '-h some-name-pxc -uroot -p'\''test-password'\''' + local username=monitor + local 'uri=-h some-name-pxc -uroot -p'\''test-password'\''' + grep NULL + run_mysql 'SELECT User_attributes FROM mysql.user WHERE user='\''monitor'\''' '-h some-name-pxc -uroot -p'\''test-password'\''' + local 'command=SELECT User_attributes FROM mysql.user WHERE user='\''monitor'\''' + local 'uri=-h some-name-pxc -uroot -p'\''test-password'\''' ++ get_client_pod ++ kubectl_bin get pods --selector=name=pxc-client -o 'jsonpath={.items[].metadata.name}' +++ mktemp ++ local LAST_OUT=/tmp/tmp.82kVG1y9VB +++ mktemp ++ local LAST_ERR=/tmp/tmp.eaCZeoYsXX ++ local exit_status=0 +++ seq 0 2 ++ for i in '$(seq 0 2)' ++ set +e ++ kubectl get pods --selector=name=pxc-client -o 'jsonpath={.items[].metadata.name}' ++ exit_status=0 ++ set -e ++ '[' 0 '!=' 0 ']' ++ break ++ cat /tmp/tmp.82kVG1y9VB ++ cat /tmp/tmp.eaCZeoYsXX ++ rm /tmp/tmp.82kVG1y9VB /tmp/tmp.eaCZeoYsXX ++ return 0 + client_pod=pxc-client-59944c5bbf-wqbt7 + wait_pod pxc-client-59944c5bbf-wqbt7 + local pod=pxc-client-59944c5bbf-wqbt7 + local max_retry=480 + local ns= ++ echo pxc-client-59944c5bbf-wqbt7 ++ /usr/bin/sed -E 's/.*-(pxc|proxysql)-[0-9]/\1/' ++ grep -E '^(pxc|proxysql)$' + local container= + set +o xtrace pod/pxc-client-59944c5bbf-wqbt7 condition met waiting for pod/pxc-client-59944c5bbf-wqbt7 to become ReadyDefaulted container "pxc-client" out of: pxc-client, backup .Ok + set +o xtrace NULL + wait_cluster_consistency some-name 3 2 + local cluster_name=some-name + local cluster_size=3 + local proxy_size=2 + '[' -z 2 ']' + desc 'wait cluster consistency' + set +o xtrace ----------------------------------------------------------------------------------- wait cluster consistency ----------------------------------------------------------------------------------- + local i=0 + local max=300 + sleep 7 + echo -n 'waiting for pxc/some-name to be ready' waiting for pxc/some-name to be ready++ kubectl_bin get pxc some-name -o 'jsonpath={.status.state}' +++ mktemp ++ local LAST_OUT=/tmp/tmp.ZqU09D9RZ3 +++ mktemp ++ local LAST_ERR=/tmp/tmp.7gvFhPo7EL ++ local exit_status=0 +++ seq 0 2 ++ for i in '$(seq 0 2)' ++ set +e ++ kubectl get pxc some-name -o 'jsonpath={.status.state}' ++ exit_status=0 ++ set -e ++ '[' 0 '!=' 0 ']' ++ break ++ cat /tmp/tmp.ZqU09D9RZ3 ++ cat /tmp/tmp.7gvFhPo7EL ++ rm /tmp/tmp.ZqU09D9RZ3 /tmp/tmp.7gvFhPo7EL ++ return 0 + [[ ready == \r\e\a\d\y ]] ++ kubectl_bin get pxc some-name -o 'jsonpath={.status.pxc.ready}' +++ mktemp ++ local LAST_OUT=/tmp/tmp.mjaTJsIFND +++ mktemp ++ local LAST_ERR=/tmp/tmp.i8Z4IZuHL8 ++ local exit_status=0 +++ seq 0 2 ++ for i in '$(seq 0 2)' ++ set +e ++ kubectl get pxc some-name -o 'jsonpath={.status.pxc.ready}' ++ exit_status=0 ++ set -e ++ '[' 0 '!=' 0 ']' ++ break ++ cat /tmp/tmp.mjaTJsIFND ++ cat /tmp/tmp.i8Z4IZuHL8 ++ rm /tmp/tmp.mjaTJsIFND /tmp/tmp.i8Z4IZuHL8 ++ return 0 + [[ 3 == \3 ]] +++ get_proxy_engine some-name +++ local cluster_name=some-name ++++ get_proxy some-name ++++ local target_cluster=some-name +++++ kubectl_bin get pxc some-name -o 'jsonpath={.spec.haproxy.enabled}' ++++++ mktemp +++++ local LAST_OUT=/tmp/tmp.GPAOMcYpkR ++++++ mktemp +++++ local LAST_ERR=/tmp/tmp.7XuucGtbZX +++++ local exit_status=0 ++++++ seq 0 2 +++++ for i in '$(seq 0 2)' +++++ set +e +++++ kubectl get pxc some-name -o 'jsonpath={.spec.haproxy.enabled}' +++++ exit_status=0 +++++ set -e +++++ '[' 0 '!=' 0 ']' +++++ break +++++ cat /tmp/tmp.GPAOMcYpkR +++++ cat /tmp/tmp.7XuucGtbZX +++++ rm /tmp/tmp.GPAOMcYpkR /tmp/tmp.7XuucGtbZX +++++ return 0 ++++ [[ '' == \t\r\u\e ]] +++++ kubectl_bin get pxc some-name -o 'jsonpath={.spec.proxysql.enabled}' ++++++ mktemp +++++ local LAST_OUT=/tmp/tmp.2pKS4unYq0 ++++++ mktemp +++++ local LAST_ERR=/tmp/tmp.s3tHG6Z8Ri +++++ local exit_status=0 ++++++ seq 0 2 +++++ for i in '$(seq 0 2)' +++++ set +e +++++ kubectl get pxc some-name -o 'jsonpath={.spec.proxysql.enabled}' +++++ exit_status=0 +++++ set -e +++++ '[' 0 '!=' 0 ']' +++++ break +++++ cat /tmp/tmp.2pKS4unYq0 +++++ cat /tmp/tmp.s3tHG6Z8Ri +++++ rm /tmp/tmp.2pKS4unYq0 /tmp/tmp.s3tHG6Z8Ri +++++ return 0 ++++ [[ true == \t\r\u\e ]] ++++ echo some-name-proxysql ++++ return +++ local cluster_proxy=some-name-proxysql +++ echo proxysql ++ kubectl_bin get pxc some-name -o 'jsonpath={.status.proxysql.ready}' +++ mktemp ++ local LAST_OUT=/tmp/tmp.bRSOR6S4MG +++ mktemp ++ local LAST_ERR=/tmp/tmp.SQcNkwE6CE ++ local exit_status=0 +++ seq 0 2 ++ for i in '$(seq 0 2)' ++ set +e ++ kubectl get pxc some-name -o 'jsonpath={.status.proxysql.ready}' ++ exit_status=0 ++ set -e ++ '[' 0 '!=' 0 ']' ++ break ++ cat /tmp/tmp.bRSOR6S4MG ++ cat /tmp/tmp.SQcNkwE6CE ++ rm /tmp/tmp.bRSOR6S4MG /tmp/tmp.SQcNkwE6CE ++ return 0 + [[ 2 == \2 ]] + echo + sleep 10 + compare_mysql_cmd select-4 'SHOW TABLES;' '-h some-name-proxysql -umonitor -p'\''test-password'\''' + local command_id=select-4 + local 'command=SHOW TABLES;' + local 'uri=-h some-name-proxysql -umonitor -p'\''test-password'\''' + local postfix= + local expected_result=/mnt/jenkins/workspace/cloud-pxc-operator_PR-2124/e2e-tests/users/compare/select-4.sql + [[ perconalab/percona-xtradb-cluster-operator:main-pxc8.0 =~ 8\.4 ]] + [[ perconalab/percona-xtradb-cluster-operator:main-pxc8.0 =~ 8\.0 ]] + [[ -f /mnt/jenkins/workspace/cloud-pxc-operator_PR-2124/e2e-tests/users/compare/select-4-80.sql ]] + expected_result=/mnt/jenkins/workspace/cloud-pxc-operator_PR-2124/e2e-tests/users/compare/select-4-80.sql + run_mysql 'SHOW TABLES;' '-h some-name-proxysql -umonitor -p'\''test-password'\''' + local 'command=SHOW TABLES;' + local 'uri=-h some-name-proxysql -umonitor -p'\''test-password'\''' ++ get_client_pod ++ kubectl_bin get pods --selector=name=pxc-client -o 'jsonpath={.items[].metadata.name}' +++ mktemp ++ local LAST_OUT=/tmp/tmp.TUGHMKARlO +++ mktemp ++ local LAST_ERR=/tmp/tmp.6RtRKOwHZx ++ local exit_status=0 +++ seq 0 2 ++ for i in '$(seq 0 2)' ++ set +e ++ kubectl get pods --selector=name=pxc-client -o 'jsonpath={.items[].metadata.name}' ++ exit_status=0 ++ set -e ++ '[' 0 '!=' 0 ']' ++ break ++ cat /tmp/tmp.TUGHMKARlO ++ cat /tmp/tmp.6RtRKOwHZx ++ rm /tmp/tmp.TUGHMKARlO /tmp/tmp.6RtRKOwHZx ++ return 0 + client_pod=pxc-client-59944c5bbf-wqbt7 + wait_pod pxc-client-59944c5bbf-wqbt7 + local pod=pxc-client-59944c5bbf-wqbt7 + local max_retry=480 + local ns= ++ echo pxc-client-59944c5bbf-wqbt7 ++ /usr/bin/sed -E 's/.*-(pxc|proxysql)-[0-9]/\1/' ++ grep -E '^(pxc|proxysql)$' + local container= + set +o xtrace pod/pxc-client-59944c5bbf-wqbt7 condition met waiting for pod/pxc-client-59944c5bbf-wqbt7 to become ReadyDefaulted container "pxc-client" out of: pxc-client, backup .Ok + set +o xtrace + '[' '!' -s /tmp/tmp.30P5AqZU1r/select-4.sql ']' + [[ 0 -eq 0 ]] + diff -u /mnt/jenkins/workspace/cloud-pxc-operator_PR-2124/e2e-tests/users/compare/select-4-80.sql /tmp/tmp.30P5AqZU1r/select-4.sql + desc 'test operator' + set +o xtrace ----------------------------------------------------------------------------------- test operator ----------------------------------------------------------------------------------- + patch_secret my-cluster-secrets operator dGVzdC1wYXNzd29yZA== + local secret=my-cluster-secrets + local key=operator + local value=dGVzdC1wYXNzd29yZA== + kubectl_bin patch secret my-cluster-secrets '-p={"data":{"operator": "dGVzdC1wYXNzd29yZA=="}}' ++ mktemp + local LAST_OUT=/tmp/tmp.i1IPGpFD1J ++ mktemp + local LAST_ERR=/tmp/tmp.lWFm8Zug5R + local exit_status=0 ++ seq 0 2 + for i in '$(seq 0 2)' + set +e + kubectl patch secret my-cluster-secrets '-p={"data":{"operator": "dGVzdC1wYXNzd29yZA=="}}' + exit_status=0 + set -e + '[' 0 '!=' 0 ']' + break + cat /tmp/tmp.i1IPGpFD1J secret/my-cluster-secrets patched + cat /tmp/tmp.lWFm8Zug5R + rm /tmp/tmp.i1IPGpFD1J /tmp/tmp.lWFm8Zug5R + return 0 + sleep 15 + wait_cluster_consistency some-name 3 2 + local cluster_name=some-name + local cluster_size=3 + local proxy_size=2 + '[' -z 2 ']' + desc 'wait cluster consistency' + set +o xtrace ----------------------------------------------------------------------------------- wait cluster consistency ----------------------------------------------------------------------------------- + local i=0 + local max=300 + sleep 7 + echo -n 'waiting for pxc/some-name to be ready' waiting for pxc/some-name to be ready++ kubectl_bin get pxc some-name -o 'jsonpath={.status.state}' +++ mktemp ++ local LAST_OUT=/tmp/tmp.DwfEV7K53h +++ mktemp ++ local LAST_ERR=/tmp/tmp.91yVm4oFvV ++ local exit_status=0 +++ seq 0 2 ++ for i in '$(seq 0 2)' ++ set +e ++ kubectl get pxc some-name -o 'jsonpath={.status.state}' ++ exit_status=0 ++ set -e ++ '[' 0 '!=' 0 ']' ++ break ++ cat /tmp/tmp.DwfEV7K53h ++ cat /tmp/tmp.91yVm4oFvV ++ rm /tmp/tmp.DwfEV7K53h /tmp/tmp.91yVm4oFvV ++ return 0 + [[ initializing == \r\e\a\d\y ]] + echo -n . .+ sleep 5 + [[ 0 -ge 300 ]] + let i+=1 ++ kubectl_bin get pxc some-name -o 'jsonpath={.status.state}' +++ mktemp ++ local LAST_OUT=/tmp/tmp.Byt9bWZ086 +++ mktemp ++ local LAST_ERR=/tmp/tmp.eNNaEULTAS ++ local exit_status=0 +++ seq 0 2 ++ for i in '$(seq 0 2)' ++ set +e ++ kubectl get pxc some-name -o 'jsonpath={.status.state}' ++ exit_status=0 ++ set -e ++ '[' 0 '!=' 0 ']' ++ break ++ cat /tmp/tmp.Byt9bWZ086 ++ cat /tmp/tmp.eNNaEULTAS ++ rm /tmp/tmp.Byt9bWZ086 /tmp/tmp.eNNaEULTAS ++ return 0 + [[ initializing == \r\e\a\d\y ]] + echo -n . .+ sleep 5 + [[ 1 -ge 300 ]] + let i+=1 ++ kubectl_bin get pxc some-name -o 'jsonpath={.status.state}' +++ mktemp ++ local LAST_OUT=/tmp/tmp.JLCCUvKZ8l +++ mktemp ++ local LAST_ERR=/tmp/tmp.i4Ts8yUAcU ++ local exit_status=0 +++ seq 0 2 ++ for i in '$(seq 0 2)' ++ set +e ++ kubectl get pxc some-name -o 'jsonpath={.status.state}' ++ exit_status=0 ++ set -e ++ '[' 0 '!=' 0 ']' ++ break ++ cat /tmp/tmp.JLCCUvKZ8l ++ cat /tmp/tmp.i4Ts8yUAcU ++ rm /tmp/tmp.JLCCUvKZ8l /tmp/tmp.i4Ts8yUAcU ++ return 0 + [[ ready == \r\e\a\d\y ]] ++ kubectl_bin get pxc some-name -o 'jsonpath={.status.pxc.ready}' +++ mktemp ++ local LAST_OUT=/tmp/tmp.Wqe5QOocuW +++ mktemp ++ local LAST_ERR=/tmp/tmp.csxD5iAdnl ++ local exit_status=0 +++ seq 0 2 ++ for i in '$(seq 0 2)' ++ set +e ++ kubectl get pxc some-name -o 'jsonpath={.status.pxc.ready}' ++ exit_status=0 ++ set -e ++ '[' 0 '!=' 0 ']' ++ break ++ cat /tmp/tmp.Wqe5QOocuW ++ cat /tmp/tmp.csxD5iAdnl ++ rm /tmp/tmp.Wqe5QOocuW /tmp/tmp.csxD5iAdnl ++ return 0 + [[ 3 == \3 ]] +++ get_proxy_engine some-name +++ local cluster_name=some-name ++++ get_proxy some-name ++++ local target_cluster=some-name +++++ kubectl_bin get pxc some-name -o 'jsonpath={.spec.haproxy.enabled}' ++++++ mktemp +++++ local LAST_OUT=/tmp/tmp.a92mTtPtvG ++++++ mktemp +++++ local LAST_ERR=/tmp/tmp.Cpv1lNIRlJ +++++ local exit_status=0 ++++++ seq 0 2 +++++ for i in '$(seq 0 2)' +++++ set +e +++++ kubectl get pxc some-name -o 'jsonpath={.spec.haproxy.enabled}' +++++ exit_status=0 +++++ set -e +++++ '[' 0 '!=' 0 ']' +++++ break +++++ cat /tmp/tmp.a92mTtPtvG +++++ cat /tmp/tmp.Cpv1lNIRlJ +++++ rm /tmp/tmp.a92mTtPtvG /tmp/tmp.Cpv1lNIRlJ +++++ return 0 ++++ [[ '' == \t\r\u\e ]] +++++ kubectl_bin get pxc some-name -o 'jsonpath={.spec.proxysql.enabled}' ++++++ mktemp +++++ local LAST_OUT=/tmp/tmp.7w3wyiu2Ea ++++++ mktemp +++++ local LAST_ERR=/tmp/tmp.Av1uWWzJ4J +++++ local exit_status=0 ++++++ seq 0 2 +++++ for i in '$(seq 0 2)' +++++ set +e +++++ kubectl get pxc some-name -o 'jsonpath={.spec.proxysql.enabled}' +++++ exit_status=0 +++++ set -e +++++ '[' 0 '!=' 0 ']' +++++ break +++++ cat /tmp/tmp.7w3wyiu2Ea +++++ cat /tmp/tmp.Av1uWWzJ4J +++++ rm /tmp/tmp.7w3wyiu2Ea /tmp/tmp.Av1uWWzJ4J +++++ return 0 ++++ [[ true == \t\r\u\e ]] ++++ echo some-name-proxysql ++++ return +++ local cluster_proxy=some-name-proxysql +++ echo proxysql ++ kubectl_bin get pxc some-name -o 'jsonpath={.status.proxysql.ready}' +++ mktemp ++ local LAST_OUT=/tmp/tmp.XknW603qc6 +++ mktemp ++ local LAST_ERR=/tmp/tmp.aK4RVaR2zr ++ local exit_status=0 +++ seq 0 2 ++ for i in '$(seq 0 2)' ++ set +e ++ kubectl get pxc some-name -o 'jsonpath={.status.proxysql.ready}' ++ exit_status=0 ++ set -e ++ '[' 0 '!=' 0 ']' ++ break ++ cat /tmp/tmp.XknW603qc6 ++ cat /tmp/tmp.aK4RVaR2zr ++ rm /tmp/tmp.XknW603qc6 /tmp/tmp.aK4RVaR2zr ++ return 0 + [[ 2 == \2 ]] + echo + sleep 10 + compare_mysql_cmd select-4 'SHOW TABLES;' '-h some-name-proxysql -uoperator -p'\''test-password'\''' + local command_id=select-4 + local 'command=SHOW TABLES;' + local 'uri=-h some-name-proxysql -uoperator -p'\''test-password'\''' + local postfix= + local expected_result=/mnt/jenkins/workspace/cloud-pxc-operator_PR-2124/e2e-tests/users/compare/select-4.sql + [[ perconalab/percona-xtradb-cluster-operator:main-pxc8.0 =~ 8\.4 ]] + [[ perconalab/percona-xtradb-cluster-operator:main-pxc8.0 =~ 8\.0 ]] + [[ -f /mnt/jenkins/workspace/cloud-pxc-operator_PR-2124/e2e-tests/users/compare/select-4-80.sql ]] + expected_result=/mnt/jenkins/workspace/cloud-pxc-operator_PR-2124/e2e-tests/users/compare/select-4-80.sql + run_mysql 'SHOW TABLES;' '-h some-name-proxysql -uoperator -p'\''test-password'\''' + local 'command=SHOW TABLES;' + local 'uri=-h some-name-proxysql -uoperator -p'\''test-password'\''' ++ get_client_pod ++ kubectl_bin get pods --selector=name=pxc-client -o 'jsonpath={.items[].metadata.name}' +++ mktemp ++ local LAST_OUT=/tmp/tmp.KkUr2F11Ri +++ mktemp ++ local LAST_ERR=/tmp/tmp.oOJU38J0gd ++ local exit_status=0 +++ seq 0 2 ++ for i in '$(seq 0 2)' ++ set +e ++ kubectl get pods --selector=name=pxc-client -o 'jsonpath={.items[].metadata.name}' ++ exit_status=0 ++ set -e ++ '[' 0 '!=' 0 ']' ++ break ++ cat /tmp/tmp.KkUr2F11Ri ++ cat /tmp/tmp.oOJU38J0gd ++ rm /tmp/tmp.KkUr2F11Ri /tmp/tmp.oOJU38J0gd ++ return 0 + client_pod=pxc-client-59944c5bbf-wqbt7 + wait_pod pxc-client-59944c5bbf-wqbt7 + local pod=pxc-client-59944c5bbf-wqbt7 + local max_retry=480 + local ns= ++ echo pxc-client-59944c5bbf-wqbt7 ++ /usr/bin/sed -E 's/.*-(pxc|proxysql)-[0-9]/\1/' ++ grep -E '^(pxc|proxysql)$' + local container= + set +o xtrace pod/pxc-client-59944c5bbf-wqbt7 condition met waiting for pod/pxc-client-59944c5bbf-wqbt7 to become ReadyDefaulted container "pxc-client" out of: pxc-client, backup .Ok + set +o xtrace + '[' '!' -s /tmp/tmp.30P5AqZU1r/select-4.sql ']' + [[ 0 -eq 0 ]] + diff -u /mnt/jenkins/workspace/cloud-pxc-operator_PR-2124/e2e-tests/users/compare/select-4-80.sql /tmp/tmp.30P5AqZU1r/select-4.sql + desc 'change secret name' + set +o xtrace ----------------------------------------------------------------------------------- change secret name ----------------------------------------------------------------------------------- + kubectl_bin patch pxc some-name --type merge --patch '{"spec": {"secretsName":"my-cluster-secrets-2"}}' ++ mktemp + local LAST_OUT=/tmp/tmp.kEMNanySw6 ++ mktemp + local LAST_ERR=/tmp/tmp.LRDRKnP1I8 + local exit_status=0 ++ seq 0 2 + for i in '$(seq 0 2)' + set +e + kubectl patch pxc some-name --type merge --patch '{"spec": {"secretsName":"my-cluster-secrets-2"}}' + exit_status=0 + set -e + '[' 0 '!=' 0 ']' + break + cat /tmp/tmp.kEMNanySw6 perconaxtradbcluster.pxc.percona.com/some-name patched + cat /tmp/tmp.LRDRKnP1I8 + rm /tmp/tmp.kEMNanySw6 /tmp/tmp.LRDRKnP1I8 + return 0 + sleep 30 + wait_cluster_consistency some-name 3 2 + local cluster_name=some-name + local cluster_size=3 + local proxy_size=2 + '[' -z 2 ']' + desc 'wait cluster consistency' + set +o xtrace ----------------------------------------------------------------------------------- wait cluster consistency ----------------------------------------------------------------------------------- + local i=0 + local max=300 + sleep 7 + echo -n 'waiting for pxc/some-name to be ready' waiting for pxc/some-name to be ready++ kubectl_bin get pxc some-name -o 'jsonpath={.status.state}' +++ mktemp ++ local LAST_OUT=/tmp/tmp.kW0EcPWLyY +++ mktemp ++ local LAST_ERR=/tmp/tmp.SSMqF7fbeV ++ local exit_status=0 +++ seq 0 2 ++ for i in '$(seq 0 2)' ++ set +e ++ kubectl get pxc some-name -o 'jsonpath={.status.state}' ++ exit_status=0 ++ set -e ++ '[' 0 '!=' 0 ']' ++ break ++ cat /tmp/tmp.kW0EcPWLyY ++ cat /tmp/tmp.SSMqF7fbeV ++ rm /tmp/tmp.kW0EcPWLyY /tmp/tmp.SSMqF7fbeV ++ return 0 + [[ initializing == \r\e\a\d\y ]] + echo -n . .+ sleep 5 + [[ 0 -ge 300 ]] + let i+=1 ++ kubectl_bin get pxc some-name -o 'jsonpath={.status.state}' +++ mktemp ++ local LAST_OUT=/tmp/tmp.X0yZBhE4zT +++ mktemp ++ local LAST_ERR=/tmp/tmp.Pwnc8ZSuay ++ local exit_status=0 +++ seq 0 2 ++ for i in '$(seq 0 2)' ++ set +e ++ kubectl get pxc some-name -o 'jsonpath={.status.state}' ++ exit_status=0 ++ set -e ++ '[' 0 '!=' 0 ']' ++ break ++ cat /tmp/tmp.X0yZBhE4zT ++ cat /tmp/tmp.Pwnc8ZSuay ++ rm /tmp/tmp.X0yZBhE4zT /tmp/tmp.Pwnc8ZSuay ++ return 0 + [[ initializing == \r\e\a\d\y ]] + echo -n . .+ sleep 5 + [[ 1 -ge 300 ]] + let i+=1 ++ kubectl_bin get pxc some-name -o 'jsonpath={.status.state}' +++ mktemp ++ local LAST_OUT=/tmp/tmp.OvnT05esD7 +++ mktemp ++ local LAST_ERR=/tmp/tmp.4zubNRTv7Y ++ local exit_status=0 +++ seq 0 2 ++ for i in '$(seq 0 2)' ++ set +e ++ kubectl get pxc some-name -o 'jsonpath={.status.state}' ++ exit_status=0 ++ set -e ++ '[' 0 '!=' 0 ']' ++ break ++ cat /tmp/tmp.OvnT05esD7 ++ cat /tmp/tmp.4zubNRTv7Y ++ rm /tmp/tmp.OvnT05esD7 /tmp/tmp.4zubNRTv7Y ++ return 0 + [[ initializing == \r\e\a\d\y ]] + echo -n . .+ sleep 5 + [[ 2 -ge 300 ]] + let i+=1 ++ kubectl_bin get pxc some-name -o 'jsonpath={.status.state}' +++ mktemp ++ local LAST_OUT=/tmp/tmp.SCrAfLSh1W +++ mktemp ++ local LAST_ERR=/tmp/tmp.fyVkRDQvtB ++ local exit_status=0 +++ seq 0 2 ++ for i in '$(seq 0 2)' ++ set +e ++ kubectl get pxc some-name -o 'jsonpath={.status.state}' ++ exit_status=0 ++ set -e ++ '[' 0 '!=' 0 ']' ++ break ++ cat /tmp/tmp.SCrAfLSh1W ++ cat /tmp/tmp.fyVkRDQvtB ++ rm /tmp/tmp.SCrAfLSh1W /tmp/tmp.fyVkRDQvtB ++ return 0 + [[ initializing == \r\e\a\d\y ]] + echo -n . .+ sleep 5 + [[ 3 -ge 300 ]] + let i+=1 ++ kubectl_bin get pxc some-name -o 'jsonpath={.status.state}' +++ mktemp ++ local LAST_OUT=/tmp/tmp.sXqqukQvW1 +++ mktemp ++ local LAST_ERR=/tmp/tmp.nxktxBHKBl ++ local exit_status=0 +++ seq 0 2 ++ for i in '$(seq 0 2)' ++ set +e ++ kubectl get pxc some-name -o 'jsonpath={.status.state}' ++ exit_status=0 ++ set -e ++ '[' 0 '!=' 0 ']' ++ break ++ cat /tmp/tmp.sXqqukQvW1 ++ cat /tmp/tmp.nxktxBHKBl ++ rm /tmp/tmp.sXqqukQvW1 /tmp/tmp.nxktxBHKBl ++ return 0 + [[ initializing == \r\e\a\d\y ]] + echo -n . .+ sleep 5 + [[ 4 -ge 300 ]] + let i+=1 ++ kubectl_bin get pxc some-name -o 'jsonpath={.status.state}' +++ mktemp ++ local LAST_OUT=/tmp/tmp.PPOpXe9db5 +++ mktemp ++ local LAST_ERR=/tmp/tmp.lIyiPC5vFk ++ local exit_status=0 +++ seq 0 2 ++ for i in '$(seq 0 2)' ++ set +e ++ kubectl get pxc some-name -o 'jsonpath={.status.state}' ++ exit_status=0 ++ set -e ++ '[' 0 '!=' 0 ']' ++ break ++ cat /tmp/tmp.PPOpXe9db5 ++ cat /tmp/tmp.lIyiPC5vFk ++ rm /tmp/tmp.PPOpXe9db5 /tmp/tmp.lIyiPC5vFk ++ return 0 + [[ initializing == \r\e\a\d\y ]] + echo -n . .+ sleep 5 + [[ 5 -ge 300 ]] + let i+=1 ++ kubectl_bin get pxc some-name -o 'jsonpath={.status.state}' +++ mktemp ++ local LAST_OUT=/tmp/tmp.Bv9CSxSsdm +++ mktemp ++ local LAST_ERR=/tmp/tmp.ywK6ea5wcL ++ local exit_status=0 +++ seq 0 2 ++ for i in '$(seq 0 2)' ++ set +e ++ kubectl get pxc some-name -o 'jsonpath={.status.state}' ++ exit_status=0 ++ set -e ++ '[' 0 '!=' 0 ']' ++ break ++ cat /tmp/tmp.Bv9CSxSsdm ++ cat /tmp/tmp.ywK6ea5wcL ++ rm /tmp/tmp.Bv9CSxSsdm /tmp/tmp.ywK6ea5wcL ++ return 0 + [[ initializing == \r\e\a\d\y ]] + echo -n . .+ sleep 5 + [[ 6 -ge 300 ]] + let i+=1 ++ kubectl_bin get pxc some-name -o 'jsonpath={.status.state}' +++ mktemp ++ local LAST_OUT=/tmp/tmp.O0qF1ktWmV +++ mktemp ++ local LAST_ERR=/tmp/tmp.UNOsImkW5H ++ local exit_status=0 +++ seq 0 2 ++ for i in '$(seq 0 2)' ++ set +e ++ kubectl get pxc some-name -o 'jsonpath={.status.state}' ++ exit_status=0 ++ set -e ++ '[' 0 '!=' 0 ']' ++ break ++ cat /tmp/tmp.O0qF1ktWmV ++ cat /tmp/tmp.UNOsImkW5H ++ rm /tmp/tmp.O0qF1ktWmV /tmp/tmp.UNOsImkW5H ++ return 0 + [[ initializing == \r\e\a\d\y ]] + echo -n . .+ sleep 5 + [[ 7 -ge 300 ]] + let i+=1 ++ kubectl_bin get pxc some-name -o 'jsonpath={.status.state}' +++ mktemp ++ local LAST_OUT=/tmp/tmp.1IPjxc2LP4 +++ mktemp ++ local LAST_ERR=/tmp/tmp.kV9yEYHHhi ++ local exit_status=0 +++ seq 0 2 ++ for i in '$(seq 0 2)' ++ set +e ++ kubectl get pxc some-name -o 'jsonpath={.status.state}' ++ exit_status=0 ++ set -e ++ '[' 0 '!=' 0 ']' ++ break ++ cat /tmp/tmp.1IPjxc2LP4 ++ cat /tmp/tmp.kV9yEYHHhi ++ rm /tmp/tmp.1IPjxc2LP4 /tmp/tmp.kV9yEYHHhi ++ return 0 + [[ initializing == \r\e\a\d\y ]] + echo -n . .+ sleep 5 + [[ 8 -ge 300 ]] + let i+=1 ++ kubectl_bin get pxc some-name -o 'jsonpath={.status.state}' +++ mktemp ++ local LAST_OUT=/tmp/tmp.09dJCwMR3O +++ mktemp ++ local LAST_ERR=/tmp/tmp.5FC4rpEj0T ++ local exit_status=0 +++ seq 0 2 ++ for i in '$(seq 0 2)' ++ set +e ++ kubectl get pxc some-name -o 'jsonpath={.status.state}' ++ exit_status=0 ++ set -e ++ '[' 0 '!=' 0 ']' ++ break ++ cat /tmp/tmp.09dJCwMR3O ++ cat /tmp/tmp.5FC4rpEj0T ++ rm /tmp/tmp.09dJCwMR3O /tmp/tmp.5FC4rpEj0T ++ return 0 + [[ initializing == \r\e\a\d\y ]] + echo -n . .+ sleep 5 + [[ 9 -ge 300 ]] + let i+=1 ++ kubectl_bin get pxc some-name -o 'jsonpath={.status.state}' +++ mktemp ++ local LAST_OUT=/tmp/tmp.EAb05dUrgV +++ mktemp ++ local LAST_ERR=/tmp/tmp.n3uPIrSl7l ++ local exit_status=0 +++ seq 0 2 ++ for i in '$(seq 0 2)' ++ set +e ++ kubectl get pxc some-name -o 'jsonpath={.status.state}' ++ exit_status=0 ++ set -e ++ '[' 0 '!=' 0 ']' ++ break ++ cat /tmp/tmp.EAb05dUrgV ++ cat /tmp/tmp.n3uPIrSl7l ++ rm /tmp/tmp.EAb05dUrgV /tmp/tmp.n3uPIrSl7l ++ return 0 + [[ initializing == \r\e\a\d\y ]] + echo -n . .+ sleep 5 + [[ 10 -ge 300 ]] + let i+=1 ++ kubectl_bin get pxc some-name -o 'jsonpath={.status.state}' +++ mktemp ++ local LAST_OUT=/tmp/tmp.4CN6LQ8MJu +++ mktemp ++ local LAST_ERR=/tmp/tmp.GH2U8gn7g7 ++ local exit_status=0 +++ seq 0 2 ++ for i in '$(seq 0 2)' ++ set +e ++ kubectl get pxc some-name -o 'jsonpath={.status.state}' ++ exit_status=0 ++ set -e ++ '[' 0 '!=' 0 ']' ++ break ++ cat /tmp/tmp.4CN6LQ8MJu ++ cat /tmp/tmp.GH2U8gn7g7 ++ rm /tmp/tmp.4CN6LQ8MJu /tmp/tmp.GH2U8gn7g7 ++ return 0 + [[ initializing == \r\e\a\d\y ]] + echo -n . .+ sleep 5 + [[ 11 -ge 300 ]] + let i+=1 ++ kubectl_bin get pxc some-name -o 'jsonpath={.status.state}' +++ mktemp ++ local LAST_OUT=/tmp/tmp.wZDoufpozF +++ mktemp ++ local LAST_ERR=/tmp/tmp.k5TFLegHeJ ++ local exit_status=0 +++ seq 0 2 ++ for i in '$(seq 0 2)' ++ set +e ++ kubectl get pxc some-name -o 'jsonpath={.status.state}' ++ exit_status=0 ++ set -e ++ '[' 0 '!=' 0 ']' ++ break ++ cat /tmp/tmp.wZDoufpozF ++ cat /tmp/tmp.k5TFLegHeJ ++ rm /tmp/tmp.wZDoufpozF /tmp/tmp.k5TFLegHeJ ++ return 0 + [[ initializing == \r\e\a\d\y ]] + echo -n . .+ sleep 5 + [[ 12 -ge 300 ]] + let i+=1 ++ kubectl_bin get pxc some-name -o 'jsonpath={.status.state}' +++ mktemp ++ local LAST_OUT=/tmp/tmp.S6hAoPYiwt +++ mktemp ++ local LAST_ERR=/tmp/tmp.GxPCp0UKZ6 ++ local exit_status=0 +++ seq 0 2 ++ for i in '$(seq 0 2)' ++ set +e ++ kubectl get pxc some-name -o 'jsonpath={.status.state}' ++ exit_status=0 ++ set -e ++ '[' 0 '!=' 0 ']' ++ break ++ cat /tmp/tmp.S6hAoPYiwt ++ cat /tmp/tmp.GxPCp0UKZ6 ++ rm /tmp/tmp.S6hAoPYiwt /tmp/tmp.GxPCp0UKZ6 ++ return 0 + [[ initializing == \r\e\a\d\y ]] + echo -n . .+ sleep 5 + [[ 13 -ge 300 ]] + let i+=1 ++ kubectl_bin get pxc some-name -o 'jsonpath={.status.state}' +++ mktemp ++ local LAST_OUT=/tmp/tmp.XcLGiolWBU +++ mktemp ++ local LAST_ERR=/tmp/tmp.y1F3P8YcSG ++ local exit_status=0 +++ seq 0 2 ++ for i in '$(seq 0 2)' ++ set +e ++ kubectl get pxc some-name -o 'jsonpath={.status.state}' ++ exit_status=0 ++ set -e ++ '[' 0 '!=' 0 ']' ++ break ++ cat /tmp/tmp.XcLGiolWBU ++ cat /tmp/tmp.y1F3P8YcSG ++ rm /tmp/tmp.XcLGiolWBU /tmp/tmp.y1F3P8YcSG ++ return 0 + [[ initializing == \r\e\a\d\y ]] + echo -n . .+ sleep 5 + [[ 14 -ge 300 ]] + let i+=1 ++ kubectl_bin get pxc some-name -o 'jsonpath={.status.state}' +++ mktemp ++ local LAST_OUT=/tmp/tmp.tuQoAeGWf6 +++ mktemp ++ local LAST_ERR=/tmp/tmp.5RS0MrmGEN ++ local exit_status=0 +++ seq 0 2 ++ for i in '$(seq 0 2)' ++ set +e ++ kubectl get pxc some-name -o 'jsonpath={.status.state}' ++ exit_status=0 ++ set -e ++ '[' 0 '!=' 0 ']' ++ break ++ cat /tmp/tmp.tuQoAeGWf6 ++ cat /tmp/tmp.5RS0MrmGEN ++ rm /tmp/tmp.tuQoAeGWf6 /tmp/tmp.5RS0MrmGEN ++ return 0 + [[ initializing == \r\e\a\d\y ]] + echo -n . .+ sleep 5 + [[ 15 -ge 300 ]] + let i+=1 ++ kubectl_bin get pxc some-name -o 'jsonpath={.status.state}' +++ mktemp ++ local LAST_OUT=/tmp/tmp.fIyPlWoUGn +++ mktemp ++ local LAST_ERR=/tmp/tmp.sP2QwGi22D ++ local exit_status=0 +++ seq 0 2 ++ for i in '$(seq 0 2)' ++ set +e ++ kubectl get pxc some-name -o 'jsonpath={.status.state}' ++ exit_status=0 ++ set -e ++ '[' 0 '!=' 0 ']' ++ break ++ cat /tmp/tmp.fIyPlWoUGn ++ cat /tmp/tmp.sP2QwGi22D ++ rm /tmp/tmp.fIyPlWoUGn /tmp/tmp.sP2QwGi22D ++ return 0 + [[ initializing == \r\e\a\d\y ]] + echo -n . .+ sleep 5 + [[ 16 -ge 300 ]] + let i+=1 ++ kubectl_bin get pxc some-name -o 'jsonpath={.status.state}' +++ mktemp ++ local LAST_OUT=/tmp/tmp.8dC2tfjyig +++ mktemp ++ local LAST_ERR=/tmp/tmp.Xx7wDsC2Ls ++ local exit_status=0 +++ seq 0 2 ++ for i in '$(seq 0 2)' ++ set +e ++ kubectl get pxc some-name -o 'jsonpath={.status.state}' ++ exit_status=0 ++ set -e ++ '[' 0 '!=' 0 ']' ++ break ++ cat /tmp/tmp.8dC2tfjyig ++ cat /tmp/tmp.Xx7wDsC2Ls ++ rm /tmp/tmp.8dC2tfjyig /tmp/tmp.Xx7wDsC2Ls ++ return 0 + [[ initializing == \r\e\a\d\y ]] + echo -n . .+ sleep 5 + [[ 17 -ge 300 ]] + let i+=1 ++ kubectl_bin get pxc some-name -o 'jsonpath={.status.state}' +++ mktemp ++ local LAST_OUT=/tmp/tmp.GbNc0fukqO +++ mktemp ++ local LAST_ERR=/tmp/tmp.DKTv2F4vo3 ++ local exit_status=0 +++ seq 0 2 ++ for i in '$(seq 0 2)' ++ set +e ++ kubectl get pxc some-name -o 'jsonpath={.status.state}' ++ exit_status=0 ++ set -e ++ '[' 0 '!=' 0 ']' ++ break ++ cat /tmp/tmp.GbNc0fukqO ++ cat /tmp/tmp.DKTv2F4vo3 ++ rm /tmp/tmp.GbNc0fukqO /tmp/tmp.DKTv2F4vo3 ++ return 0 + [[ initializing == \r\e\a\d\y ]] + echo -n . .+ sleep 5 + [[ 18 -ge 300 ]] + let i+=1 ++ kubectl_bin get pxc some-name -o 'jsonpath={.status.state}' +++ mktemp ++ local LAST_OUT=/tmp/tmp.M9hAMPsZT2 +++ mktemp ++ local LAST_ERR=/tmp/tmp.st14O9hFON ++ local exit_status=0 +++ seq 0 2 ++ for i in '$(seq 0 2)' ++ set +e ++ kubectl get pxc some-name -o 'jsonpath={.status.state}' ++ exit_status=0 ++ set -e ++ '[' 0 '!=' 0 ']' ++ break ++ cat /tmp/tmp.M9hAMPsZT2 ++ cat /tmp/tmp.st14O9hFON ++ rm /tmp/tmp.M9hAMPsZT2 /tmp/tmp.st14O9hFON ++ return 0 + [[ initializing == \r\e\a\d\y ]] + echo -n . .+ sleep 5 + [[ 19 -ge 300 ]] + let i+=1 ++ kubectl_bin get pxc some-name -o 'jsonpath={.status.state}' +++ mktemp ++ local LAST_OUT=/tmp/tmp.MRnZn6ndn9 +++ mktemp ++ local LAST_ERR=/tmp/tmp.Rdby5W3aXU ++ local exit_status=0 +++ seq 0 2 ++ for i in '$(seq 0 2)' ++ set +e ++ kubectl get pxc some-name -o 'jsonpath={.status.state}' ++ exit_status=0 ++ set -e ++ '[' 0 '!=' 0 ']' ++ break ++ cat /tmp/tmp.MRnZn6ndn9 ++ cat /tmp/tmp.Rdby5W3aXU ++ rm /tmp/tmp.MRnZn6ndn9 /tmp/tmp.Rdby5W3aXU ++ return 0 + [[ initializing == \r\e\a\d\y ]] + echo -n . .+ sleep 5 + [[ 20 -ge 300 ]] + let i+=1 ++ kubectl_bin get pxc some-name -o 'jsonpath={.status.state}' +++ mktemp ++ local LAST_OUT=/tmp/tmp.dICsaM04Iu +++ mktemp ++ local LAST_ERR=/tmp/tmp.pHbKRMiyPe ++ local exit_status=0 +++ seq 0 2 ++ for i in '$(seq 0 2)' ++ set +e ++ kubectl get pxc some-name -o 'jsonpath={.status.state}' ++ exit_status=0 ++ set -e ++ '[' 0 '!=' 0 ']' ++ break ++ cat /tmp/tmp.dICsaM04Iu ++ cat /tmp/tmp.pHbKRMiyPe ++ rm /tmp/tmp.dICsaM04Iu /tmp/tmp.pHbKRMiyPe ++ return 0 + [[ initializing == \r\e\a\d\y ]] + echo -n . .+ sleep 5 + [[ 21 -ge 300 ]] + let i+=1 ++ kubectl_bin get pxc some-name -o 'jsonpath={.status.state}' +++ mktemp ++ local LAST_OUT=/tmp/tmp.1nuObZ4UoC +++ mktemp ++ local LAST_ERR=/tmp/tmp.nbSro35UlD ++ local exit_status=0 +++ seq 0 2 ++ for i in '$(seq 0 2)' ++ set +e ++ kubectl get pxc some-name -o 'jsonpath={.status.state}' ++ exit_status=0 ++ set -e ++ '[' 0 '!=' 0 ']' ++ break ++ cat /tmp/tmp.1nuObZ4UoC ++ cat /tmp/tmp.nbSro35UlD ++ rm /tmp/tmp.1nuObZ4UoC /tmp/tmp.nbSro35UlD ++ return 0 + [[ initializing == \r\e\a\d\y ]] + echo -n . .+ sleep 5 + [[ 22 -ge 300 ]] + let i+=1 ++ kubectl_bin get pxc some-name -o 'jsonpath={.status.state}' +++ mktemp ++ local LAST_OUT=/tmp/tmp.QJzJYDq5dt +++ mktemp ++ local LAST_ERR=/tmp/tmp.zY0kKj57HQ ++ local exit_status=0 +++ seq 0 2 ++ for i in '$(seq 0 2)' ++ set +e ++ kubectl get pxc some-name -o 'jsonpath={.status.state}' ++ exit_status=0 ++ set -e ++ '[' 0 '!=' 0 ']' ++ break ++ cat /tmp/tmp.QJzJYDq5dt ++ cat /tmp/tmp.zY0kKj57HQ ++ rm /tmp/tmp.QJzJYDq5dt /tmp/tmp.zY0kKj57HQ ++ return 0 + [[ ready == \r\e\a\d\y ]] ++ kubectl_bin get pxc some-name -o 'jsonpath={.status.pxc.ready}' +++ mktemp ++ local LAST_OUT=/tmp/tmp.VYfLtiZPJe +++ mktemp ++ local LAST_ERR=/tmp/tmp.AkYKHw0Hbz ++ local exit_status=0 +++ seq 0 2 ++ for i in '$(seq 0 2)' ++ set +e ++ kubectl get pxc some-name -o 'jsonpath={.status.pxc.ready}' ++ exit_status=0 ++ set -e ++ '[' 0 '!=' 0 ']' ++ break ++ cat /tmp/tmp.VYfLtiZPJe ++ cat /tmp/tmp.AkYKHw0Hbz ++ rm /tmp/tmp.VYfLtiZPJe /tmp/tmp.AkYKHw0Hbz ++ return 0 + [[ 3 == \3 ]] +++ get_proxy_engine some-name +++ local cluster_name=some-name ++++ get_proxy some-name ++++ local target_cluster=some-name +++++ kubectl_bin get pxc some-name -o 'jsonpath={.spec.haproxy.enabled}' ++++++ mktemp +++++ local LAST_OUT=/tmp/tmp.lizG2DhV74 ++++++ mktemp +++++ local LAST_ERR=/tmp/tmp.N8NL0P12zr +++++ local exit_status=0 ++++++ seq 0 2 +++++ for i in '$(seq 0 2)' +++++ set +e +++++ kubectl get pxc some-name -o 'jsonpath={.spec.haproxy.enabled}' +++++ exit_status=0 +++++ set -e +++++ '[' 0 '!=' 0 ']' +++++ break +++++ cat /tmp/tmp.lizG2DhV74 +++++ cat /tmp/tmp.N8NL0P12zr +++++ rm /tmp/tmp.lizG2DhV74 /tmp/tmp.N8NL0P12zr +++++ return 0 ++++ [[ '' == \t\r\u\e ]] +++++ kubectl_bin get pxc some-name -o 'jsonpath={.spec.proxysql.enabled}' ++++++ mktemp +++++ local LAST_OUT=/tmp/tmp.GbpbLNNfHo ++++++ mktemp +++++ local LAST_ERR=/tmp/tmp.ooNVwPwmVD +++++ local exit_status=0 ++++++ seq 0 2 +++++ for i in '$(seq 0 2)' +++++ set +e +++++ kubectl get pxc some-name -o 'jsonpath={.spec.proxysql.enabled}' +++++ exit_status=0 +++++ set -e +++++ '[' 0 '!=' 0 ']' +++++ break +++++ cat /tmp/tmp.GbpbLNNfHo +++++ cat /tmp/tmp.ooNVwPwmVD +++++ rm /tmp/tmp.GbpbLNNfHo /tmp/tmp.ooNVwPwmVD +++++ return 0 ++++ [[ true == \t\r\u\e ]] ++++ echo some-name-proxysql ++++ return +++ local cluster_proxy=some-name-proxysql +++ echo proxysql ++ kubectl_bin get pxc some-name -o 'jsonpath={.status.proxysql.ready}' +++ mktemp ++ local LAST_OUT=/tmp/tmp.KkXb9GQUg6 +++ mktemp ++ local LAST_ERR=/tmp/tmp.3vzjRefM8N ++ local exit_status=0 +++ seq 0 2 ++ for i in '$(seq 0 2)' ++ set +e ++ kubectl get pxc some-name -o 'jsonpath={.status.proxysql.ready}' ++ exit_status=0 ++ set -e ++ '[' 0 '!=' 0 ']' ++ break ++ cat /tmp/tmp.KkXb9GQUg6 ++ cat /tmp/tmp.3vzjRefM8N ++ rm /tmp/tmp.KkXb9GQUg6 /tmp/tmp.3vzjRefM8N ++ return 0 + [[ 2 == \2 ]] + echo + desc 'test new operator' + set +o xtrace ----------------------------------------------------------------------------------- test new operator ----------------------------------------------------------------------------------- + newpass=test-password2 ++ echo -n test-password2 ++ base64 + newpassencrypted=dGVzdC1wYXNzd29yZDI= + patch_secret my-cluster-secrets-2 operator dGVzdC1wYXNzd29yZDI= + local secret=my-cluster-secrets-2 + local key=operator + local value=dGVzdC1wYXNzd29yZDI= + kubectl_bin patch secret my-cluster-secrets-2 '-p={"data":{"operator": "dGVzdC1wYXNzd29yZDI="}}' ++ mktemp + local LAST_OUT=/tmp/tmp.YDSOLASFkl ++ mktemp + local LAST_ERR=/tmp/tmp.ylXN22JMkl + local exit_status=0 ++ seq 0 2 + for i in '$(seq 0 2)' + set +e + kubectl patch secret my-cluster-secrets-2 '-p={"data":{"operator": "dGVzdC1wYXNzd29yZDI="}}' + exit_status=0 + set -e + '[' 0 '!=' 0 ']' + break + cat /tmp/tmp.YDSOLASFkl secret/my-cluster-secrets-2 patched + cat /tmp/tmp.ylXN22JMkl + rm /tmp/tmp.YDSOLASFkl /tmp/tmp.ylXN22JMkl + return 0 + sleep 15 + wait_cluster_consistency some-name 3 2 + local cluster_name=some-name + local cluster_size=3 + local proxy_size=2 + '[' -z 2 ']' + desc 'wait cluster consistency' + set +o xtrace ----------------------------------------------------------------------------------- wait cluster consistency ----------------------------------------------------------------------------------- + local i=0 + local max=300 + sleep 7 + echo -n 'waiting for pxc/some-name to be ready' waiting for pxc/some-name to be ready++ kubectl_bin get pxc some-name -o 'jsonpath={.status.state}' +++ mktemp ++ local LAST_OUT=/tmp/tmp.LAyTwEr2ns +++ mktemp ++ local LAST_ERR=/tmp/tmp.DRc3pDo6DX ++ local exit_status=0 +++ seq 0 2 ++ for i in '$(seq 0 2)' ++ set +e ++ kubectl get pxc some-name -o 'jsonpath={.status.state}' ++ exit_status=0 ++ set -e ++ '[' 0 '!=' 0 ']' ++ break ++ cat /tmp/tmp.LAyTwEr2ns ++ cat /tmp/tmp.DRc3pDo6DX ++ rm /tmp/tmp.LAyTwEr2ns /tmp/tmp.DRc3pDo6DX ++ return 0 + [[ initializing == \r\e\a\d\y ]] + echo -n . .+ sleep 5 + [[ 0 -ge 300 ]] + let i+=1 ++ kubectl_bin get pxc some-name -o 'jsonpath={.status.state}' +++ mktemp ++ local LAST_OUT=/tmp/tmp.jA9jofYvcx +++ mktemp ++ local LAST_ERR=/tmp/tmp.0dvLd6Fob5 ++ local exit_status=0 +++ seq 0 2 ++ for i in '$(seq 0 2)' ++ set +e ++ kubectl get pxc some-name -o 'jsonpath={.status.state}' ++ exit_status=0 ++ set -e ++ '[' 0 '!=' 0 ']' ++ break ++ cat /tmp/tmp.jA9jofYvcx ++ cat /tmp/tmp.0dvLd6Fob5 ++ rm /tmp/tmp.jA9jofYvcx /tmp/tmp.0dvLd6Fob5 ++ return 0 + [[ initializing == \r\e\a\d\y ]] + echo -n . .+ sleep 5 + [[ 1 -ge 300 ]] + let i+=1 ++ kubectl_bin get pxc some-name -o 'jsonpath={.status.state}' +++ mktemp ++ local LAST_OUT=/tmp/tmp.w36iIMyrtj +++ mktemp ++ local LAST_ERR=/tmp/tmp.Z3C2BvlPWX ++ local exit_status=0 +++ seq 0 2 ++ for i in '$(seq 0 2)' ++ set +e ++ kubectl get pxc some-name -o 'jsonpath={.status.state}' ++ exit_status=0 ++ set -e ++ '[' 0 '!=' 0 ']' ++ break ++ cat /tmp/tmp.w36iIMyrtj ++ cat /tmp/tmp.Z3C2BvlPWX ++ rm /tmp/tmp.w36iIMyrtj /tmp/tmp.Z3C2BvlPWX ++ return 0 + [[ initializing == \r\e\a\d\y ]] + echo -n . .+ sleep 5 + [[ 2 -ge 300 ]] + let i+=1 ++ kubectl_bin get pxc some-name -o 'jsonpath={.status.state}' +++ mktemp ++ local LAST_OUT=/tmp/tmp.nLRmTTgx5U +++ mktemp ++ local LAST_ERR=/tmp/tmp.dJ7LWeD0Aa ++ local exit_status=0 +++ seq 0 2 ++ for i in '$(seq 0 2)' ++ set +e ++ kubectl get pxc some-name -o 'jsonpath={.status.state}' ++ exit_status=0 ++ set -e ++ '[' 0 '!=' 0 ']' ++ break ++ cat /tmp/tmp.nLRmTTgx5U ++ cat /tmp/tmp.dJ7LWeD0Aa ++ rm /tmp/tmp.nLRmTTgx5U /tmp/tmp.dJ7LWeD0Aa ++ return 0 + [[ ready == \r\e\a\d\y ]] ++ kubectl_bin get pxc some-name -o 'jsonpath={.status.pxc.ready}' +++ mktemp ++ local LAST_OUT=/tmp/tmp.nPzZjxbTpa +++ mktemp ++ local LAST_ERR=/tmp/tmp.cSO9z6C8fD ++ local exit_status=0 +++ seq 0 2 ++ for i in '$(seq 0 2)' ++ set +e ++ kubectl get pxc some-name -o 'jsonpath={.status.pxc.ready}' ++ exit_status=0 ++ set -e ++ '[' 0 '!=' 0 ']' ++ break ++ cat /tmp/tmp.nPzZjxbTpa ++ cat /tmp/tmp.cSO9z6C8fD ++ rm /tmp/tmp.nPzZjxbTpa /tmp/tmp.cSO9z6C8fD ++ return 0 + [[ 3 == \3 ]] +++ get_proxy_engine some-name +++ local cluster_name=some-name ++++ get_proxy some-name ++++ local target_cluster=some-name +++++ kubectl_bin get pxc some-name -o 'jsonpath={.spec.haproxy.enabled}' ++++++ mktemp +++++ local LAST_OUT=/tmp/tmp.2EX5unnIld ++++++ mktemp +++++ local LAST_ERR=/tmp/tmp.1Ujs9EZccH +++++ local exit_status=0 ++++++ seq 0 2 +++++ for i in '$(seq 0 2)' +++++ set +e +++++ kubectl get pxc some-name -o 'jsonpath={.spec.haproxy.enabled}' +++++ exit_status=0 +++++ set -e +++++ '[' 0 '!=' 0 ']' +++++ break +++++ cat /tmp/tmp.2EX5unnIld +++++ cat /tmp/tmp.1Ujs9EZccH +++++ rm /tmp/tmp.2EX5unnIld /tmp/tmp.1Ujs9EZccH +++++ return 0 ++++ [[ '' == \t\r\u\e ]] +++++ kubectl_bin get pxc some-name -o 'jsonpath={.spec.proxysql.enabled}' ++++++ mktemp +++++ local LAST_OUT=/tmp/tmp.vp2du65UDm ++++++ mktemp +++++ local LAST_ERR=/tmp/tmp.8hNzcnekbg +++++ local exit_status=0 ++++++ seq 0 2 +++++ for i in '$(seq 0 2)' +++++ set +e +++++ kubectl get pxc some-name -o 'jsonpath={.spec.proxysql.enabled}' +++++ exit_status=0 +++++ set -e +++++ '[' 0 '!=' 0 ']' +++++ break +++++ cat /tmp/tmp.vp2du65UDm +++++ cat /tmp/tmp.8hNzcnekbg +++++ rm /tmp/tmp.vp2du65UDm /tmp/tmp.8hNzcnekbg +++++ return 0 ++++ [[ true == \t\r\u\e ]] ++++ echo some-name-proxysql ++++ return +++ local cluster_proxy=some-name-proxysql +++ echo proxysql ++ kubectl_bin get pxc some-name -o 'jsonpath={.status.proxysql.ready}' +++ mktemp ++ local LAST_OUT=/tmp/tmp.Up4JRYGm8Z +++ mktemp ++ local LAST_ERR=/tmp/tmp.CWE1QTrAGL ++ local exit_status=0 +++ seq 0 2 ++ for i in '$(seq 0 2)' ++ set +e ++ kubectl get pxc some-name -o 'jsonpath={.status.proxysql.ready}' ++ exit_status=0 ++ set -e ++ '[' 0 '!=' 0 ']' ++ break ++ cat /tmp/tmp.Up4JRYGm8Z ++ cat /tmp/tmp.CWE1QTrAGL ++ rm /tmp/tmp.Up4JRYGm8Z /tmp/tmp.CWE1QTrAGL ++ return 0 + [[ 2 == \2 ]] + echo + sleep 20 + compare_mysql_cmd select-4 'SHOW TABLES;' '-h some-name-proxysql -uoperator -p'\''test-password2'\''' + local command_id=select-4 + local 'command=SHOW TABLES;' + local 'uri=-h some-name-proxysql -uoperator -p'\''test-password2'\''' + local postfix= + local expected_result=/mnt/jenkins/workspace/cloud-pxc-operator_PR-2124/e2e-tests/users/compare/select-4.sql + [[ perconalab/percona-xtradb-cluster-operator:main-pxc8.0 =~ 8\.4 ]] + [[ perconalab/percona-xtradb-cluster-operator:main-pxc8.0 =~ 8\.0 ]] + [[ -f /mnt/jenkins/workspace/cloud-pxc-operator_PR-2124/e2e-tests/users/compare/select-4-80.sql ]] + expected_result=/mnt/jenkins/workspace/cloud-pxc-operator_PR-2124/e2e-tests/users/compare/select-4-80.sql + run_mysql 'SHOW TABLES;' '-h some-name-proxysql -uoperator -p'\''test-password2'\''' + local 'command=SHOW TABLES;' + local 'uri=-h some-name-proxysql -uoperator -p'\''test-password2'\''' ++ get_client_pod ++ kubectl_bin get pods --selector=name=pxc-client -o 'jsonpath={.items[].metadata.name}' +++ mktemp ++ local LAST_OUT=/tmp/tmp.vj6QZUbuWO +++ mktemp ++ local LAST_ERR=/tmp/tmp.3R6cl31rkB ++ local exit_status=0 +++ seq 0 2 ++ for i in '$(seq 0 2)' ++ set +e ++ kubectl get pods --selector=name=pxc-client -o 'jsonpath={.items[].metadata.name}' ++ exit_status=0 ++ set -e ++ '[' 0 '!=' 0 ']' ++ break ++ cat /tmp/tmp.vj6QZUbuWO ++ cat /tmp/tmp.3R6cl31rkB ++ rm /tmp/tmp.vj6QZUbuWO /tmp/tmp.3R6cl31rkB ++ return 0 + client_pod=pxc-client-59944c5bbf-wqbt7 + wait_pod pxc-client-59944c5bbf-wqbt7 + local pod=pxc-client-59944c5bbf-wqbt7 + local max_retry=480 + local ns= ++ echo pxc-client-59944c5bbf-wqbt7 ++ /usr/bin/sed -E 's/.*-(pxc|proxysql)-[0-9]/\1/' ++ grep -E '^(pxc|proxysql)$' + local container= + set +o xtrace pod/pxc-client-59944c5bbf-wqbt7 condition met waiting for pod/pxc-client-59944c5bbf-wqbt7 to become ReadyDefaulted container "pxc-client" out of: pxc-client, backup .Ok + set +o xtrace + '[' '!' -s /tmp/tmp.30P5AqZU1r/select-4.sql ']' + [[ 0 -eq 0 ]] + diff -u /mnt/jenkins/workspace/cloud-pxc-operator_PR-2124/e2e-tests/users/compare/select-4-80.sql /tmp/tmp.30P5AqZU1r/select-4.sql ++ getSecretData my-cluster-secrets-2 root ++ local secretName=my-cluster-secrets-2 ++ local dataKey=root ++ kubectl_bin get secrets/my-cluster-secrets-2 '--template={{.data.root}}' ++ base64 --decode +++ mktemp ++ local LAST_OUT=/tmp/tmp.WGFEkmdZ2Z +++ mktemp ++ local LAST_ERR=/tmp/tmp.BxEhDTXWCS ++ local exit_status=0 +++ seq 0 2 ++ for i in '$(seq 0 2)' ++ set +e ++ kubectl get secrets/my-cluster-secrets-2 '--template={{.data.root}}' ++ exit_status=0 ++ set -e ++ '[' 0 '!=' 0 ']' ++ break ++ cat /tmp/tmp.WGFEkmdZ2Z ++ cat /tmp/tmp.BxEhDTXWCS ++ rm /tmp/tmp.WGFEkmdZ2Z /tmp/tmp.BxEhDTXWCS ++ return 0 + newpass='+L%L3@^x.yItG6=Zz2r' + desc 'test new users sync' + set +o xtrace ----------------------------------------------------------------------------------- test new users sync ----------------------------------------------------------------------------------- + run_mysql 'CREATE USER '\''testsync'\''@'\''%'\'' IDENTIFIED BY '\''+L%L3@^x.yItG6=Zz2r'\'';' '-h some-name-pxc -uroot -p'\''+L%L3@^x.yItG6=Zz2r'\''' + local 'command=CREATE USER '\''testsync'\''@'\''%'\'' IDENTIFIED BY '\''+L%L3@^x.yItG6=Zz2r'\'';' + local 'uri=-h some-name-pxc -uroot -p'\''+L%L3@^x.yItG6=Zz2r'\''' ++ get_client_pod ++ kubectl_bin get pods --selector=name=pxc-client -o 'jsonpath={.items[].metadata.name}' +++ mktemp ++ local LAST_OUT=/tmp/tmp.YspWMb3cyZ +++ mktemp ++ local LAST_ERR=/tmp/tmp.MWNqNH1KLG ++ local exit_status=0 +++ seq 0 2 ++ for i in '$(seq 0 2)' ++ set +e ++ kubectl get pods --selector=name=pxc-client -o 'jsonpath={.items[].metadata.name}' ++ exit_status=0 ++ set -e ++ '[' 0 '!=' 0 ']' ++ break ++ cat /tmp/tmp.YspWMb3cyZ ++ cat /tmp/tmp.MWNqNH1KLG ++ rm /tmp/tmp.YspWMb3cyZ /tmp/tmp.MWNqNH1KLG ++ return 0 + client_pod=pxc-client-59944c5bbf-wqbt7 + wait_pod pxc-client-59944c5bbf-wqbt7 + local pod=pxc-client-59944c5bbf-wqbt7 + local max_retry=480 + local ns= ++ echo pxc-client-59944c5bbf-wqbt7 ++ /usr/bin/sed -E 's/.*-(pxc|proxysql)-[0-9]/\1/' ++ grep -E '^(pxc|proxysql)$' + local container= + set +o xtrace pod/pxc-client-59944c5bbf-wqbt7 condition met waiting for pod/pxc-client-59944c5bbf-wqbt7 to become ReadyDefaulted container "pxc-client" out of: pxc-client, backup .Ok + set +o xtrace + sleep 40 + compare_mysql_cmd select-4 'SHOW TABLES;' '-h some-name-proxysql -utestsync -p'\''+L%L3@^x.yItG6=Zz2r'\''' + local command_id=select-4 + local 'command=SHOW TABLES;' + local 'uri=-h some-name-proxysql -utestsync -p'\''+L%L3@^x.yItG6=Zz2r'\''' + local postfix= + local expected_result=/mnt/jenkins/workspace/cloud-pxc-operator_PR-2124/e2e-tests/users/compare/select-4.sql + [[ perconalab/percona-xtradb-cluster-operator:main-pxc8.0 =~ 8\.4 ]] + [[ perconalab/percona-xtradb-cluster-operator:main-pxc8.0 =~ 8\.0 ]] + [[ -f /mnt/jenkins/workspace/cloud-pxc-operator_PR-2124/e2e-tests/users/compare/select-4-80.sql ]] + expected_result=/mnt/jenkins/workspace/cloud-pxc-operator_PR-2124/e2e-tests/users/compare/select-4-80.sql + run_mysql 'SHOW TABLES;' '-h some-name-proxysql -utestsync -p'\''+L%L3@^x.yItG6=Zz2r'\''' + local 'command=SHOW TABLES;' + local 'uri=-h some-name-proxysql -utestsync -p'\''+L%L3@^x.yItG6=Zz2r'\''' ++ get_client_pod ++ kubectl_bin get pods --selector=name=pxc-client -o 'jsonpath={.items[].metadata.name}' +++ mktemp ++ local LAST_OUT=/tmp/tmp.MuzYxnKtcF +++ mktemp ++ local LAST_ERR=/tmp/tmp.cO7ozR1UqZ ++ local exit_status=0 +++ seq 0 2 ++ for i in '$(seq 0 2)' ++ set +e ++ kubectl get pods --selector=name=pxc-client -o 'jsonpath={.items[].metadata.name}' ++ exit_status=0 ++ set -e ++ '[' 0 '!=' 0 ']' ++ break ++ cat /tmp/tmp.MuzYxnKtcF ++ cat /tmp/tmp.cO7ozR1UqZ ++ rm /tmp/tmp.MuzYxnKtcF /tmp/tmp.cO7ozR1UqZ ++ return 0 + client_pod=pxc-client-59944c5bbf-wqbt7 + wait_pod pxc-client-59944c5bbf-wqbt7 + local pod=pxc-client-59944c5bbf-wqbt7 + local max_retry=480 + local ns= ++ echo pxc-client-59944c5bbf-wqbt7 ++ /usr/bin/sed -E 's/.*-(pxc|proxysql)-[0-9]/\1/' ++ grep -E '^(pxc|proxysql)$' + local container= + set +o xtrace pod/pxc-client-59944c5bbf-wqbt7 condition met waiting for pod/pxc-client-59944c5bbf-wqbt7 to become ReadyDefaulted container "pxc-client" out of: pxc-client, backup .Ok + set +o xtrace + '[' '!' -s /tmp/tmp.30P5AqZU1r/select-4.sql ']' + [[ 0 -eq 0 ]] + diff -u /mnt/jenkins/workspace/cloud-pxc-operator_PR-2124/e2e-tests/users/compare/select-4-80.sql /tmp/tmp.30P5AqZU1r/select-4.sql ++ getSecretData internal-some-name operator ++ local secretName=internal-some-name ++ local dataKey=operator ++ kubectl_bin get secrets/internal-some-name '--template={{.data.operator}}' ++ base64 --decode +++ mktemp ++ local LAST_OUT=/tmp/tmp.s7rN1uAiPY +++ mktemp ++ local LAST_ERR=/tmp/tmp.J5MNHj5uk7 ++ local exit_status=0 +++ seq 0 2 ++ for i in '$(seq 0 2)' ++ set +e ++ kubectl get secrets/internal-some-name '--template={{.data.operator}}' ++ exit_status=0 ++ set -e ++ '[' 0 '!=' 0 ']' ++ break ++ cat /tmp/tmp.s7rN1uAiPY ++ cat /tmp/tmp.J5MNHj5uk7 ++ rm /tmp/tmp.s7rN1uAiPY /tmp/tmp.J5MNHj5uk7 ++ return 0 + pass=test-password2 + desc 'check secret without operator' + set +o xtrace ----------------------------------------------------------------------------------- check secret without operator ----------------------------------------------------------------------------------- + kubectl_bin apply -f /mnt/jenkins/workspace/cloud-pxc-operator_PR-2124/e2e-tests/users/conf/secrets.yml ++ mktemp + local LAST_OUT=/tmp/tmp.3GQHTs9Kgx ++ mktemp + local LAST_ERR=/tmp/tmp.0VWMwp7qkO + local exit_status=0 ++ seq 0 2 + for i in '$(seq 0 2)' + set +e + kubectl apply -f /mnt/jenkins/workspace/cloud-pxc-operator_PR-2124/e2e-tests/users/conf/secrets.yml + exit_status=0 + set -e + '[' 0 '!=' 0 ']' + break + cat /tmp/tmp.3GQHTs9Kgx secret/my-cluster-secrets-2 configured + cat /tmp/tmp.0VWMwp7qkO Warning: resource secrets/my-cluster-secrets-2 is missing the kubectl.kubernetes.io/last-applied-configuration annotation which is required by kubectl apply. kubectl apply should only be used on resources created declaratively by either kubectl create --save-config or kubectl apply. The missing annotation will be patched automatically. + rm /tmp/tmp.3GQHTs9Kgx /tmp/tmp.0VWMwp7qkO + return 0 + sleep 15 + compare_mysql_cmd select-4 'SHOW TABLES;' '-h some-name-proxysql -uoperator -p'\''test-password2'\''' + local command_id=select-4 + local 'command=SHOW TABLES;' + local 'uri=-h some-name-proxysql -uoperator -p'\''test-password2'\''' + local postfix= + local expected_result=/mnt/jenkins/workspace/cloud-pxc-operator_PR-2124/e2e-tests/users/compare/select-4.sql + [[ perconalab/percona-xtradb-cluster-operator:main-pxc8.0 =~ 8\.4 ]] + [[ perconalab/percona-xtradb-cluster-operator:main-pxc8.0 =~ 8\.0 ]] + [[ -f /mnt/jenkins/workspace/cloud-pxc-operator_PR-2124/e2e-tests/users/compare/select-4-80.sql ]] + expected_result=/mnt/jenkins/workspace/cloud-pxc-operator_PR-2124/e2e-tests/users/compare/select-4-80.sql + run_mysql 'SHOW TABLES;' '-h some-name-proxysql -uoperator -p'\''test-password2'\''' + local 'command=SHOW TABLES;' + local 'uri=-h some-name-proxysql -uoperator -p'\''test-password2'\''' ++ get_client_pod ++ kubectl_bin get pods --selector=name=pxc-client -o 'jsonpath={.items[].metadata.name}' +++ mktemp ++ local LAST_OUT=/tmp/tmp.lXGV7FgTCn +++ mktemp ++ local LAST_ERR=/tmp/tmp.6QgRLlkf9D ++ local exit_status=0 +++ seq 0 2 ++ for i in '$(seq 0 2)' ++ set +e ++ kubectl get pods --selector=name=pxc-client -o 'jsonpath={.items[].metadata.name}' ++ exit_status=0 ++ set -e ++ '[' 0 '!=' 0 ']' ++ break ++ cat /tmp/tmp.lXGV7FgTCn ++ cat /tmp/tmp.6QgRLlkf9D ++ rm /tmp/tmp.lXGV7FgTCn /tmp/tmp.6QgRLlkf9D ++ return 0 + client_pod=pxc-client-59944c5bbf-wqbt7 + wait_pod pxc-client-59944c5bbf-wqbt7 + local pod=pxc-client-59944c5bbf-wqbt7 + local max_retry=480 + local ns= ++ echo pxc-client-59944c5bbf-wqbt7 ++ /usr/bin/sed -E 's/.*-(pxc|proxysql)-[0-9]/\1/' ++ grep -E '^(pxc|proxysql)$' + local container= + set +o xtrace pod/pxc-client-59944c5bbf-wqbt7 condition met waiting for pod/pxc-client-59944c5bbf-wqbt7 to become ReadyDefaulted container "pxc-client" out of: pxc-client, backup .Ok + set +o xtrace + '[' '!' -s /tmp/tmp.30P5AqZU1r/select-4.sql ']' + [[ 0 -eq 0 ]] + diff -u /mnt/jenkins/workspace/cloud-pxc-operator_PR-2124/e2e-tests/users/compare/select-4-80.sql /tmp/tmp.30P5AqZU1r/select-4.sql + newpass=test-password2 ++ echo -n test-password2 ++ base64 + newpassencrypted=dGVzdC1wYXNzd29yZDI= + apply_config /mnt/jenkins/workspace/cloud-pxc-operator_PR-2124/e2e-tests/users/conf/some-name.yml + '[' -z '' ']' + kubectl_bin apply -f - + cat_config /mnt/jenkins/workspace/cloud-pxc-operator_PR-2124/e2e-tests/users/conf/some-name.yml + cat /mnt/jenkins/workspace/cloud-pxc-operator_PR-2124/e2e-tests/users/conf/some-name.yml ++ mktemp + /usr/bin/sed -e 's#image:.*-pxc\([0-9]*.[0-9]*\)\{0,1\}$#image: perconalab/percona-xtradb-cluster-operator:main-pxc8.0#' + /usr/bin/sed -e 's#image:.*\/percona-xtradb-cluster:.*$#image: perconalab/percona-xtradb-cluster-operator:main-pxc8.0#' + local LAST_OUT=/tmp/tmp.gEXQGMabS6 + /usr/bin/sed -e 's#image:.*-init$#image: perconalab/percona-xtradb-cluster-operator:PR-2124-f48cc30b#' + /usr/bin/sed -e 's#apiVersion: pxc.percona.com/v.*$#apiVersion: pxc.percona.com/v1#' ++ mktemp + /usr/bin/sed -e 's#image:.*-backup$#image: perconalab/percona-xtradb-cluster-operator:main-pxc8.0-backup#' + /usr/bin/sed -e s~minio-service.#namespace~minio-service.users-28990~ + /usr/bin/sed -e 's#apply:.*#apply: Never#' + /usr/bin/sed -e 's#image:.*-proxysql$#image: perconalab/percona-xtradb-cluster-operator:main-proxysql#' + /usr/bin/sed -e 's#image:.*-haproxy$#image: perconalab/percona-xtradb-cluster-operator:main-haproxy#' + local LAST_ERR=/tmp/tmp.e2BdV41lkv + local exit_status=0 ++ seq 0 2 + /usr/bin/sed -e 's#image:.*-logcollector$#image: perconalab/percona-xtradb-cluster-operator:main-logcollector#' + /usr/bin/sed -e 's#image:.*-pmm$#image: perconalab/pmm-client:dev-latest#' + for i in '$(seq 0 2)' + set +e + kubectl apply -f - + exit_status=0 + set -e + '[' 0 '!=' 0 ']' + break + cat /tmp/tmp.gEXQGMabS6 perconaxtradbcluster.pxc.percona.com/some-name configured + cat /tmp/tmp.e2BdV41lkv + rm /tmp/tmp.gEXQGMabS6 /tmp/tmp.e2BdV41lkv + return 0 + sleep 15 + wait_cluster_consistency some-name 3 3 + local cluster_name=some-name + local cluster_size=3 + local proxy_size=3 + '[' -z 3 ']' + desc 'wait cluster consistency' + set +o xtrace ----------------------------------------------------------------------------------- wait cluster consistency ----------------------------------------------------------------------------------- + local i=0 + local max=300 + sleep 7 + echo -n 'waiting for pxc/some-name to be ready' waiting for pxc/some-name to be ready++ kubectl_bin get pxc some-name -o 'jsonpath={.status.state}' +++ mktemp ++ local LAST_OUT=/tmp/tmp.PLgmTD7wbh +++ mktemp ++ local LAST_ERR=/tmp/tmp.1msf7W4un8 ++ local exit_status=0 +++ seq 0 2 ++ for i in '$(seq 0 2)' ++ set +e ++ kubectl get pxc some-name -o 'jsonpath={.status.state}' ++ exit_status=0 ++ set -e ++ '[' 0 '!=' 0 ']' ++ break ++ cat /tmp/tmp.PLgmTD7wbh ++ cat /tmp/tmp.1msf7W4un8 ++ rm /tmp/tmp.PLgmTD7wbh /tmp/tmp.1msf7W4un8 ++ return 0 + [[ initializing == \r\e\a\d\y ]] + echo -n . .+ sleep 5 + [[ 0 -ge 300 ]] + let i+=1 ++ kubectl_bin get pxc some-name -o 'jsonpath={.status.state}' +++ mktemp ++ local LAST_OUT=/tmp/tmp.CJpN5uCOOo +++ mktemp ++ local LAST_ERR=/tmp/tmp.vFHFwO40Cd ++ local exit_status=0 +++ seq 0 2 ++ for i in '$(seq 0 2)' ++ set +e ++ kubectl get pxc some-name -o 'jsonpath={.status.state}' ++ exit_status=0 ++ set -e ++ '[' 0 '!=' 0 ']' ++ break ++ cat /tmp/tmp.CJpN5uCOOo ++ cat /tmp/tmp.vFHFwO40Cd ++ rm /tmp/tmp.CJpN5uCOOo /tmp/tmp.vFHFwO40Cd ++ return 0 + [[ initializing == \r\e\a\d\y ]] + echo -n . .+ sleep 5 + [[ 1 -ge 300 ]] + let i+=1 ++ kubectl_bin get pxc some-name -o 'jsonpath={.status.state}' +++ mktemp ++ local LAST_OUT=/tmp/tmp.i9or4CwcS3 +++ mktemp ++ local LAST_ERR=/tmp/tmp.bNabJvVY2B ++ local exit_status=0 +++ seq 0 2 ++ for i in '$(seq 0 2)' ++ set +e ++ kubectl get pxc some-name -o 'jsonpath={.status.state}' ++ exit_status=0 ++ set -e ++ '[' 0 '!=' 0 ']' ++ break ++ cat /tmp/tmp.i9or4CwcS3 ++ cat /tmp/tmp.bNabJvVY2B ++ rm /tmp/tmp.i9or4CwcS3 /tmp/tmp.bNabJvVY2B ++ return 0 + [[ initializing == \r\e\a\d\y ]] + echo -n . .+ sleep 5 + [[ 2 -ge 300 ]] + let i+=1 ++ kubectl_bin get pxc some-name -o 'jsonpath={.status.state}' +++ mktemp ++ local LAST_OUT=/tmp/tmp.jw7bOikDaO +++ mktemp ++ local LAST_ERR=/tmp/tmp.zERCa8sNqz ++ local exit_status=0 +++ seq 0 2 ++ for i in '$(seq 0 2)' ++ set +e ++ kubectl get pxc some-name -o 'jsonpath={.status.state}' ++ exit_status=0 ++ set -e ++ '[' 0 '!=' 0 ']' ++ break ++ cat /tmp/tmp.jw7bOikDaO ++ cat /tmp/tmp.zERCa8sNqz ++ rm /tmp/tmp.jw7bOikDaO /tmp/tmp.zERCa8sNqz ++ return 0 + [[ initializing == \r\e\a\d\y ]] + echo -n . .+ sleep 5 + [[ 3 -ge 300 ]] + let i+=1 ++ kubectl_bin get pxc some-name -o 'jsonpath={.status.state}' +++ mktemp ++ local LAST_OUT=/tmp/tmp.jLaWqCH3eq +++ mktemp ++ local LAST_ERR=/tmp/tmp.DqZWe7CqkM ++ local exit_status=0 +++ seq 0 2 ++ for i in '$(seq 0 2)' ++ set +e ++ kubectl get pxc some-name -o 'jsonpath={.status.state}' ++ exit_status=0 ++ set -e ++ '[' 0 '!=' 0 ']' ++ break ++ cat /tmp/tmp.jLaWqCH3eq ++ cat /tmp/tmp.DqZWe7CqkM ++ rm /tmp/tmp.jLaWqCH3eq /tmp/tmp.DqZWe7CqkM ++ return 0 + [[ initializing == \r\e\a\d\y ]] + echo -n . .+ sleep 5 + [[ 4 -ge 300 ]] + let i+=1 ++ kubectl_bin get pxc some-name -o 'jsonpath={.status.state}' +++ mktemp ++ local LAST_OUT=/tmp/tmp.LDeGcyJGUC +++ mktemp ++ local LAST_ERR=/tmp/tmp.PnMu4E5UWt ++ local exit_status=0 +++ seq 0 2 ++ for i in '$(seq 0 2)' ++ set +e ++ kubectl get pxc some-name -o 'jsonpath={.status.state}' ++ exit_status=0 ++ set -e ++ '[' 0 '!=' 0 ']' ++ break ++ cat /tmp/tmp.LDeGcyJGUC ++ cat /tmp/tmp.PnMu4E5UWt ++ rm /tmp/tmp.LDeGcyJGUC /tmp/tmp.PnMu4E5UWt ++ return 0 + [[ initializing == \r\e\a\d\y ]] + echo -n . .+ sleep 5 + [[ 5 -ge 300 ]] + let i+=1 ++ kubectl_bin get pxc some-name -o 'jsonpath={.status.state}' +++ mktemp ++ local LAST_OUT=/tmp/tmp.VGxMtMKVZG +++ mktemp ++ local LAST_ERR=/tmp/tmp.ThWrrXxR6s ++ local exit_status=0 +++ seq 0 2 ++ for i in '$(seq 0 2)' ++ set +e ++ kubectl get pxc some-name -o 'jsonpath={.status.state}' ++ exit_status=0 ++ set -e ++ '[' 0 '!=' 0 ']' ++ break ++ cat /tmp/tmp.VGxMtMKVZG ++ cat /tmp/tmp.ThWrrXxR6s ++ rm /tmp/tmp.VGxMtMKVZG /tmp/tmp.ThWrrXxR6s ++ return 0 + [[ initializing == \r\e\a\d\y ]] + echo -n . .+ sleep 5 + [[ 6 -ge 300 ]] + let i+=1 ++ kubectl_bin get pxc some-name -o 'jsonpath={.status.state}' +++ mktemp ++ local LAST_OUT=/tmp/tmp.3tlJqgWQ9p +++ mktemp ++ local LAST_ERR=/tmp/tmp.xkIhf538xx ++ local exit_status=0 +++ seq 0 2 ++ for i in '$(seq 0 2)' ++ set +e ++ kubectl get pxc some-name -o 'jsonpath={.status.state}' ++ exit_status=0 ++ set -e ++ '[' 0 '!=' 0 ']' ++ break ++ cat /tmp/tmp.3tlJqgWQ9p ++ cat /tmp/tmp.xkIhf538xx ++ rm /tmp/tmp.3tlJqgWQ9p /tmp/tmp.xkIhf538xx ++ return 0 + [[ initializing == \r\e\a\d\y ]] + echo -n . .+ sleep 5 + [[ 7 -ge 300 ]] + let i+=1 ++ kubectl_bin get pxc some-name -o 'jsonpath={.status.state}' +++ mktemp ++ local LAST_OUT=/tmp/tmp.qleU9xnDe9 +++ mktemp ++ local LAST_ERR=/tmp/tmp.U61HMdMq4p ++ local exit_status=0 +++ seq 0 2 ++ for i in '$(seq 0 2)' ++ set +e ++ kubectl get pxc some-name -o 'jsonpath={.status.state}' ++ exit_status=0 ++ set -e ++ '[' 0 '!=' 0 ']' ++ break ++ cat /tmp/tmp.qleU9xnDe9 ++ cat /tmp/tmp.U61HMdMq4p ++ rm /tmp/tmp.qleU9xnDe9 /tmp/tmp.U61HMdMq4p ++ return 0 + [[ initializing == \r\e\a\d\y ]] + echo -n . .+ sleep 5 + [[ 8 -ge 300 ]] + let i+=1 ++ kubectl_bin get pxc some-name -o 'jsonpath={.status.state}' +++ mktemp ++ local LAST_OUT=/tmp/tmp.DAoRBEVuTz +++ mktemp ++ local LAST_ERR=/tmp/tmp.wXe9ywIbTK ++ local exit_status=0 +++ seq 0 2 ++ for i in '$(seq 0 2)' ++ set +e ++ kubectl get pxc some-name -o 'jsonpath={.status.state}' ++ exit_status=0 ++ set -e ++ '[' 0 '!=' 0 ']' ++ break ++ cat /tmp/tmp.DAoRBEVuTz ++ cat /tmp/tmp.wXe9ywIbTK ++ rm /tmp/tmp.DAoRBEVuTz /tmp/tmp.wXe9ywIbTK ++ return 0 + [[ initializing == \r\e\a\d\y ]] + echo -n . .+ sleep 5 + [[ 9 -ge 300 ]] + let i+=1 ++ kubectl_bin get pxc some-name -o 'jsonpath={.status.state}' +++ mktemp ++ local LAST_OUT=/tmp/tmp.GkWMHLmtXU +++ mktemp ++ local LAST_ERR=/tmp/tmp.nmznmrrtOE ++ local exit_status=0 +++ seq 0 2 ++ for i in '$(seq 0 2)' ++ set +e ++ kubectl get pxc some-name -o 'jsonpath={.status.state}' ++ exit_status=0 ++ set -e ++ '[' 0 '!=' 0 ']' ++ break ++ cat /tmp/tmp.GkWMHLmtXU ++ cat /tmp/tmp.nmznmrrtOE ++ rm /tmp/tmp.GkWMHLmtXU /tmp/tmp.nmznmrrtOE ++ return 0 + [[ initializing == \r\e\a\d\y ]] + echo -n . .+ sleep 5 + [[ 10 -ge 300 ]] + let i+=1 ++ kubectl_bin get pxc some-name -o 'jsonpath={.status.state}' +++ mktemp ++ local LAST_OUT=/tmp/tmp.DwgCDDwgSg +++ mktemp ++ local LAST_ERR=/tmp/tmp.W8ZWmxTj72 ++ local exit_status=0 +++ seq 0 2 ++ for i in '$(seq 0 2)' ++ set +e ++ kubectl get pxc some-name -o 'jsonpath={.status.state}' ++ exit_status=0 ++ set -e ++ '[' 0 '!=' 0 ']' ++ break ++ cat /tmp/tmp.DwgCDDwgSg ++ cat /tmp/tmp.W8ZWmxTj72 ++ rm /tmp/tmp.DwgCDDwgSg /tmp/tmp.W8ZWmxTj72 ++ return 0 + [[ initializing == \r\e\a\d\y ]] + echo -n . .+ sleep 5 + [[ 11 -ge 300 ]] + let i+=1 ++ kubectl_bin get pxc some-name -o 'jsonpath={.status.state}' +++ mktemp ++ local LAST_OUT=/tmp/tmp.z7U7EzjUD2 +++ mktemp ++ local LAST_ERR=/tmp/tmp.85svOf7BKn ++ local exit_status=0 +++ seq 0 2 ++ for i in '$(seq 0 2)' ++ set +e ++ kubectl get pxc some-name -o 'jsonpath={.status.state}' ++ exit_status=0 ++ set -e ++ '[' 0 '!=' 0 ']' ++ break ++ cat /tmp/tmp.z7U7EzjUD2 ++ cat /tmp/tmp.85svOf7BKn ++ rm /tmp/tmp.z7U7EzjUD2 /tmp/tmp.85svOf7BKn ++ return 0 + [[ initializing == \r\e\a\d\y ]] + echo -n . .+ sleep 5 + [[ 12 -ge 300 ]] + let i+=1 ++ kubectl_bin get pxc some-name -o 'jsonpath={.status.state}' +++ mktemp ++ local LAST_OUT=/tmp/tmp.qNSj85ujcY +++ mktemp ++ local LAST_ERR=/tmp/tmp.DyyNHixwuo ++ local exit_status=0 +++ seq 0 2 ++ for i in '$(seq 0 2)' ++ set +e ++ kubectl get pxc some-name -o 'jsonpath={.status.state}' ++ exit_status=0 ++ set -e ++ '[' 0 '!=' 0 ']' ++ break ++ cat /tmp/tmp.qNSj85ujcY ++ cat /tmp/tmp.DyyNHixwuo ++ rm /tmp/tmp.qNSj85ujcY /tmp/tmp.DyyNHixwuo ++ return 0 + [[ initializing == \r\e\a\d\y ]] + echo -n . .+ sleep 5 + [[ 13 -ge 300 ]] + let i+=1 ++ kubectl_bin get pxc some-name -o 'jsonpath={.status.state}' +++ mktemp ++ local LAST_OUT=/tmp/tmp.f3Dc6zEPgj +++ mktemp ++ local LAST_ERR=/tmp/tmp.GCVsPVj0GE ++ local exit_status=0 +++ seq 0 2 ++ for i in '$(seq 0 2)' ++ set +e ++ kubectl get pxc some-name -o 'jsonpath={.status.state}' ++ exit_status=0 ++ set -e ++ '[' 0 '!=' 0 ']' ++ break ++ cat /tmp/tmp.f3Dc6zEPgj ++ cat /tmp/tmp.GCVsPVj0GE ++ rm /tmp/tmp.f3Dc6zEPgj /tmp/tmp.GCVsPVj0GE ++ return 0 + [[ initializing == \r\e\a\d\y ]] + echo -n . .+ sleep 5 + [[ 14 -ge 300 ]] + let i+=1 ++ kubectl_bin get pxc some-name -o 'jsonpath={.status.state}' +++ mktemp ++ local LAST_OUT=/tmp/tmp.IY28Z78qce +++ mktemp ++ local LAST_ERR=/tmp/tmp.PUCm8FbTJP ++ local exit_status=0 +++ seq 0 2 ++ for i in '$(seq 0 2)' ++ set +e ++ kubectl get pxc some-name -o 'jsonpath={.status.state}' ++ exit_status=0 ++ set -e ++ '[' 0 '!=' 0 ']' ++ break ++ cat /tmp/tmp.IY28Z78qce ++ cat /tmp/tmp.PUCm8FbTJP ++ rm /tmp/tmp.IY28Z78qce /tmp/tmp.PUCm8FbTJP ++ return 0 + [[ initializing == \r\e\a\d\y ]] + echo -n . .+ sleep 5 + [[ 15 -ge 300 ]] + let i+=1 ++ kubectl_bin get pxc some-name -o 'jsonpath={.status.state}' +++ mktemp ++ local LAST_OUT=/tmp/tmp.E6Guh2Z1NC +++ mktemp ++ local LAST_ERR=/tmp/tmp.n8hZORkrsf ++ local exit_status=0 +++ seq 0 2 ++ for i in '$(seq 0 2)' ++ set +e ++ kubectl get pxc some-name -o 'jsonpath={.status.state}' ++ exit_status=0 ++ set -e ++ '[' 0 '!=' 0 ']' ++ break ++ cat /tmp/tmp.E6Guh2Z1NC ++ cat /tmp/tmp.n8hZORkrsf ++ rm /tmp/tmp.E6Guh2Z1NC /tmp/tmp.n8hZORkrsf ++ return 0 + [[ initializing == \r\e\a\d\y ]] + echo -n . .+ sleep 5 + [[ 16 -ge 300 ]] + let i+=1 ++ kubectl_bin get pxc some-name -o 'jsonpath={.status.state}' +++ mktemp ++ local LAST_OUT=/tmp/tmp.HwJYvxaF0A +++ mktemp ++ local LAST_ERR=/tmp/tmp.w0FpPQkLIj ++ local exit_status=0 +++ seq 0 2 ++ for i in '$(seq 0 2)' ++ set +e ++ kubectl get pxc some-name -o 'jsonpath={.status.state}' ++ exit_status=0 ++ set -e ++ '[' 0 '!=' 0 ']' ++ break ++ cat /tmp/tmp.HwJYvxaF0A ++ cat /tmp/tmp.w0FpPQkLIj ++ rm /tmp/tmp.HwJYvxaF0A /tmp/tmp.w0FpPQkLIj ++ return 0 + [[ initializing == \r\e\a\d\y ]] + echo -n . .+ sleep 5 + [[ 17 -ge 300 ]] + let i+=1 ++ kubectl_bin get pxc some-name -o 'jsonpath={.status.state}' +++ mktemp ++ local LAST_OUT=/tmp/tmp.dB4SqZrepf +++ mktemp ++ local LAST_ERR=/tmp/tmp.tL0etItNka ++ local exit_status=0 +++ seq 0 2 ++ for i in '$(seq 0 2)' ++ set +e ++ kubectl get pxc some-name -o 'jsonpath={.status.state}' ++ exit_status=0 ++ set -e ++ '[' 0 '!=' 0 ']' ++ break ++ cat /tmp/tmp.dB4SqZrepf ++ cat /tmp/tmp.tL0etItNka ++ rm /tmp/tmp.dB4SqZrepf /tmp/tmp.tL0etItNka ++ return 0 + [[ initializing == \r\e\a\d\y ]] + echo -n . .+ sleep 5 + [[ 18 -ge 300 ]] + let i+=1 ++ kubectl_bin get pxc some-name -o 'jsonpath={.status.state}' +++ mktemp ++ local LAST_OUT=/tmp/tmp.Hx0lhNAAox +++ mktemp ++ local LAST_ERR=/tmp/tmp.4Pqy4Syk4W ++ local exit_status=0 +++ seq 0 2 ++ for i in '$(seq 0 2)' ++ set +e ++ kubectl get pxc some-name -o 'jsonpath={.status.state}' ++ exit_status=0 ++ set -e ++ '[' 0 '!=' 0 ']' ++ break ++ cat /tmp/tmp.Hx0lhNAAox ++ cat /tmp/tmp.4Pqy4Syk4W ++ rm /tmp/tmp.Hx0lhNAAox /tmp/tmp.4Pqy4Syk4W ++ return 0 + [[ initializing == \r\e\a\d\y ]] + echo -n . .+ sleep 5 + [[ 19 -ge 300 ]] + let i+=1 ++ kubectl_bin get pxc some-name -o 'jsonpath={.status.state}' +++ mktemp ++ local LAST_OUT=/tmp/tmp.ruvtqXW7fd +++ mktemp ++ local LAST_ERR=/tmp/tmp.1mQyFN0Ks9 ++ local exit_status=0 +++ seq 0 2 ++ for i in '$(seq 0 2)' ++ set +e ++ kubectl get pxc some-name -o 'jsonpath={.status.state}' ++ exit_status=0 ++ set -e ++ '[' 0 '!=' 0 ']' ++ break ++ cat /tmp/tmp.ruvtqXW7fd ++ cat /tmp/tmp.1mQyFN0Ks9 ++ rm /tmp/tmp.ruvtqXW7fd /tmp/tmp.1mQyFN0Ks9 ++ return 0 + [[ initializing == \r\e\a\d\y ]] + echo -n . .+ sleep 5 + [[ 20 -ge 300 ]] + let i+=1 ++ kubectl_bin get pxc some-name -o 'jsonpath={.status.state}' +++ mktemp ++ local LAST_OUT=/tmp/tmp.CfbsTYdeMG +++ mktemp ++ local LAST_ERR=/tmp/tmp.vRgQ7QJCwx ++ local exit_status=0 +++ seq 0 2 ++ for i in '$(seq 0 2)' ++ set +e ++ kubectl get pxc some-name -o 'jsonpath={.status.state}' ++ exit_status=0 ++ set -e ++ '[' 0 '!=' 0 ']' ++ break ++ cat /tmp/tmp.CfbsTYdeMG ++ cat /tmp/tmp.vRgQ7QJCwx ++ rm /tmp/tmp.CfbsTYdeMG /tmp/tmp.vRgQ7QJCwx ++ return 0 + [[ initializing == \r\e\a\d\y ]] + echo -n . .+ sleep 5 + [[ 21 -ge 300 ]] + let i+=1 ++ kubectl_bin get pxc some-name -o 'jsonpath={.status.state}' +++ mktemp ++ local LAST_OUT=/tmp/tmp.PQ8LR3Eyd6 +++ mktemp ++ local LAST_ERR=/tmp/tmp.b8vcxP8usx ++ local exit_status=0 +++ seq 0 2 ++ for i in '$(seq 0 2)' ++ set +e ++ kubectl get pxc some-name -o 'jsonpath={.status.state}' ++ exit_status=0 ++ set -e ++ '[' 0 '!=' 0 ']' ++ break ++ cat /tmp/tmp.PQ8LR3Eyd6 ++ cat /tmp/tmp.b8vcxP8usx ++ rm /tmp/tmp.PQ8LR3Eyd6 /tmp/tmp.b8vcxP8usx ++ return 0 + [[ initializing == \r\e\a\d\y ]] + echo -n . .+ sleep 5 + [[ 22 -ge 300 ]] + let i+=1 ++ kubectl_bin get pxc some-name -o 'jsonpath={.status.state}' +++ mktemp ++ local LAST_OUT=/tmp/tmp.EcsP7redTe +++ mktemp ++ local LAST_ERR=/tmp/tmp.t3RNIBudat ++ local exit_status=0 +++ seq 0 2 ++ for i in '$(seq 0 2)' ++ set +e ++ kubectl get pxc some-name -o 'jsonpath={.status.state}' ++ exit_status=0 ++ set -e ++ '[' 0 '!=' 0 ']' ++ break ++ cat /tmp/tmp.EcsP7redTe ++ cat /tmp/tmp.t3RNIBudat ++ rm /tmp/tmp.EcsP7redTe /tmp/tmp.t3RNIBudat ++ return 0 + [[ initializing == \r\e\a\d\y ]] + echo -n . .+ sleep 5 + [[ 23 -ge 300 ]] + let i+=1 ++ kubectl_bin get pxc some-name -o 'jsonpath={.status.state}' +++ mktemp ++ local LAST_OUT=/tmp/tmp.uRyaW7ZHjU +++ mktemp ++ local LAST_ERR=/tmp/tmp.2x93aPnF4G ++ local exit_status=0 +++ seq 0 2 ++ for i in '$(seq 0 2)' ++ set +e ++ kubectl get pxc some-name -o 'jsonpath={.status.state}' ++ exit_status=0 ++ set -e ++ '[' 0 '!=' 0 ']' ++ break ++ cat /tmp/tmp.uRyaW7ZHjU ++ cat /tmp/tmp.2x93aPnF4G ++ rm /tmp/tmp.uRyaW7ZHjU /tmp/tmp.2x93aPnF4G ++ return 0 + [[ initializing == \r\e\a\d\y ]] + echo -n . .+ sleep 5 + [[ 24 -ge 300 ]] + let i+=1 ++ kubectl_bin get pxc some-name -o 'jsonpath={.status.state}' +++ mktemp ++ local LAST_OUT=/tmp/tmp.x4Ru41drfx +++ mktemp ++ local LAST_ERR=/tmp/tmp.mBuXHTYBx0 ++ local exit_status=0 +++ seq 0 2 ++ for i in '$(seq 0 2)' ++ set +e ++ kubectl get pxc some-name -o 'jsonpath={.status.state}' ++ exit_status=0 ++ set -e ++ '[' 0 '!=' 0 ']' ++ break ++ cat /tmp/tmp.x4Ru41drfx ++ cat /tmp/tmp.mBuXHTYBx0 ++ rm /tmp/tmp.x4Ru41drfx /tmp/tmp.mBuXHTYBx0 ++ return 0 + [[ initializing == \r\e\a\d\y ]] + echo -n . .+ sleep 5 + [[ 25 -ge 300 ]] + let i+=1 ++ kubectl_bin get pxc some-name -o 'jsonpath={.status.state}' +++ mktemp ++ local LAST_OUT=/tmp/tmp.hjRkQsTIL7 +++ mktemp ++ local LAST_ERR=/tmp/tmp.ueuosKsE8K ++ local exit_status=0 +++ seq 0 2 ++ for i in '$(seq 0 2)' ++ set +e ++ kubectl get pxc some-name -o 'jsonpath={.status.state}' ++ exit_status=0 ++ set -e ++ '[' 0 '!=' 0 ']' ++ break ++ cat /tmp/tmp.hjRkQsTIL7 ++ cat /tmp/tmp.ueuosKsE8K ++ rm /tmp/tmp.hjRkQsTIL7 /tmp/tmp.ueuosKsE8K ++ return 0 + [[ initializing == \r\e\a\d\y ]] + echo -n . .+ sleep 5 + [[ 26 -ge 300 ]] + let i+=1 ++ kubectl_bin get pxc some-name -o 'jsonpath={.status.state}' +++ mktemp ++ local LAST_OUT=/tmp/tmp.uNtzjfjZJW +++ mktemp ++ local LAST_ERR=/tmp/tmp.PVRK5Co0Qf ++ local exit_status=0 +++ seq 0 2 ++ for i in '$(seq 0 2)' ++ set +e ++ kubectl get pxc some-name -o 'jsonpath={.status.state}' ++ exit_status=0 ++ set -e ++ '[' 0 '!=' 0 ']' ++ break ++ cat /tmp/tmp.uNtzjfjZJW ++ cat /tmp/tmp.PVRK5Co0Qf ++ rm /tmp/tmp.uNtzjfjZJW /tmp/tmp.PVRK5Co0Qf ++ return 0 + [[ initializing == \r\e\a\d\y ]] + echo -n . .+ sleep 5 + [[ 27 -ge 300 ]] + let i+=1 ++ kubectl_bin get pxc some-name -o 'jsonpath={.status.state}' +++ mktemp ++ local LAST_OUT=/tmp/tmp.aUqjlUsR57 +++ mktemp ++ local LAST_ERR=/tmp/tmp.7pdlWDVzgq ++ local exit_status=0 +++ seq 0 2 ++ for i in '$(seq 0 2)' ++ set +e ++ kubectl get pxc some-name -o 'jsonpath={.status.state}' ++ exit_status=0 ++ set -e ++ '[' 0 '!=' 0 ']' ++ break ++ cat /tmp/tmp.aUqjlUsR57 ++ cat /tmp/tmp.7pdlWDVzgq ++ rm /tmp/tmp.aUqjlUsR57 /tmp/tmp.7pdlWDVzgq ++ return 0 + [[ initializing == \r\e\a\d\y ]] + echo -n . .+ sleep 5 + [[ 28 -ge 300 ]] + let i+=1 ++ kubectl_bin get pxc some-name -o 'jsonpath={.status.state}' +++ mktemp ++ local LAST_OUT=/tmp/tmp.egHY9r88W7 +++ mktemp ++ local LAST_ERR=/tmp/tmp.nUOkPYDRM2 ++ local exit_status=0 +++ seq 0 2 ++ for i in '$(seq 0 2)' ++ set +e ++ kubectl get pxc some-name -o 'jsonpath={.status.state}' ++ exit_status=0 ++ set -e ++ '[' 0 '!=' 0 ']' ++ break ++ cat /tmp/tmp.egHY9r88W7 ++ cat /tmp/tmp.nUOkPYDRM2 ++ rm /tmp/tmp.egHY9r88W7 /tmp/tmp.nUOkPYDRM2 ++ return 0 + [[ initializing == \r\e\a\d\y ]] + echo -n . .+ sleep 5 + [[ 29 -ge 300 ]] + let i+=1 ++ kubectl_bin get pxc some-name -o 'jsonpath={.status.state}' +++ mktemp ++ local LAST_OUT=/tmp/tmp.JSM2xtmcOi +++ mktemp ++ local LAST_ERR=/tmp/tmp.e445yuRr25 ++ local exit_status=0 +++ seq 0 2 ++ for i in '$(seq 0 2)' ++ set +e ++ kubectl get pxc some-name -o 'jsonpath={.status.state}' ++ exit_status=0 ++ set -e ++ '[' 0 '!=' 0 ']' ++ break ++ cat /tmp/tmp.JSM2xtmcOi ++ cat /tmp/tmp.e445yuRr25 ++ rm /tmp/tmp.JSM2xtmcOi /tmp/tmp.e445yuRr25 ++ return 0 + [[ initializing == \r\e\a\d\y ]] + echo -n . .+ sleep 5 + [[ 30 -ge 300 ]] + let i+=1 ++ kubectl_bin get pxc some-name -o 'jsonpath={.status.state}' +++ mktemp ++ local LAST_OUT=/tmp/tmp.E0flt9RcEA +++ mktemp ++ local LAST_ERR=/tmp/tmp.6j28Z0B7Zd ++ local exit_status=0 +++ seq 0 2 ++ for i in '$(seq 0 2)' ++ set +e ++ kubectl get pxc some-name -o 'jsonpath={.status.state}' ++ exit_status=0 ++ set -e ++ '[' 0 '!=' 0 ']' ++ break ++ cat /tmp/tmp.E0flt9RcEA ++ cat /tmp/tmp.6j28Z0B7Zd ++ rm /tmp/tmp.E0flt9RcEA /tmp/tmp.6j28Z0B7Zd ++ return 0 + [[ initializing == \r\e\a\d\y ]] + echo -n . .+ sleep 5 + [[ 31 -ge 300 ]] + let i+=1 ++ kubectl_bin get pxc some-name -o 'jsonpath={.status.state}' +++ mktemp ++ local LAST_OUT=/tmp/tmp.Ho9Oi4Z56M +++ mktemp ++ local LAST_ERR=/tmp/tmp.wR9jSqIxFe ++ local exit_status=0 +++ seq 0 2 ++ for i in '$(seq 0 2)' ++ set +e ++ kubectl get pxc some-name -o 'jsonpath={.status.state}' ++ exit_status=0 ++ set -e ++ '[' 0 '!=' 0 ']' ++ break ++ cat /tmp/tmp.Ho9Oi4Z56M ++ cat /tmp/tmp.wR9jSqIxFe ++ rm /tmp/tmp.Ho9Oi4Z56M /tmp/tmp.wR9jSqIxFe ++ return 0 + [[ initializing == \r\e\a\d\y ]] + echo -n . .+ sleep 5 + [[ 32 -ge 300 ]] + let i+=1 ++ kubectl_bin get pxc some-name -o 'jsonpath={.status.state}' +++ mktemp ++ local LAST_OUT=/tmp/tmp.RZLaUyEf15 +++ mktemp ++ local LAST_ERR=/tmp/tmp.cqUDQXtOuD ++ local exit_status=0 +++ seq 0 2 ++ for i in '$(seq 0 2)' ++ set +e ++ kubectl get pxc some-name -o 'jsonpath={.status.state}' ++ exit_status=0 ++ set -e ++ '[' 0 '!=' 0 ']' ++ break ++ cat /tmp/tmp.RZLaUyEf15 ++ cat /tmp/tmp.cqUDQXtOuD ++ rm /tmp/tmp.RZLaUyEf15 /tmp/tmp.cqUDQXtOuD ++ return 0 + [[ initializing == \r\e\a\d\y ]] + echo -n . .+ sleep 5 + [[ 33 -ge 300 ]] + let i+=1 ++ kubectl_bin get pxc some-name -o 'jsonpath={.status.state}' +++ mktemp ++ local LAST_OUT=/tmp/tmp.5NrAMxpR5A +++ mktemp ++ local LAST_ERR=/tmp/tmp.VivPVpX2oe ++ local exit_status=0 +++ seq 0 2 ++ for i in '$(seq 0 2)' ++ set +e ++ kubectl get pxc some-name -o 'jsonpath={.status.state}' ++ exit_status=0 ++ set -e ++ '[' 0 '!=' 0 ']' ++ break ++ cat /tmp/tmp.5NrAMxpR5A ++ cat /tmp/tmp.VivPVpX2oe ++ rm /tmp/tmp.5NrAMxpR5A /tmp/tmp.VivPVpX2oe ++ return 0 + [[ initializing == \r\e\a\d\y ]] + echo -n . .+ sleep 5 + [[ 34 -ge 300 ]] + let i+=1 ++ kubectl_bin get pxc some-name -o 'jsonpath={.status.state}' +++ mktemp ++ local LAST_OUT=/tmp/tmp.Pbpn8eY7oj +++ mktemp ++ local LAST_ERR=/tmp/tmp.G1HTo29PKh ++ local exit_status=0 +++ seq 0 2 ++ for i in '$(seq 0 2)' ++ set +e ++ kubectl get pxc some-name -o 'jsonpath={.status.state}' ++ exit_status=0 ++ set -e ++ '[' 0 '!=' 0 ']' ++ break ++ cat /tmp/tmp.Pbpn8eY7oj ++ cat /tmp/tmp.G1HTo29PKh ++ rm /tmp/tmp.Pbpn8eY7oj /tmp/tmp.G1HTo29PKh ++ return 0 + [[ initializing == \r\e\a\d\y ]] + echo -n . .+ sleep 5 + [[ 35 -ge 300 ]] + let i+=1 ++ kubectl_bin get pxc some-name -o 'jsonpath={.status.state}' +++ mktemp ++ local LAST_OUT=/tmp/tmp.azo6lvZcwf +++ mktemp ++ local LAST_ERR=/tmp/tmp.hs9Nlc7ZA2 ++ local exit_status=0 +++ seq 0 2 ++ for i in '$(seq 0 2)' ++ set +e ++ kubectl get pxc some-name -o 'jsonpath={.status.state}' ++ exit_status=0 ++ set -e ++ '[' 0 '!=' 0 ']' ++ break ++ cat /tmp/tmp.azo6lvZcwf ++ cat /tmp/tmp.hs9Nlc7ZA2 ++ rm /tmp/tmp.azo6lvZcwf /tmp/tmp.hs9Nlc7ZA2 ++ return 0 + [[ initializing == \r\e\a\d\y ]] + echo -n . .+ sleep 5 + [[ 36 -ge 300 ]] + let i+=1 ++ kubectl_bin get pxc some-name -o 'jsonpath={.status.state}' +++ mktemp ++ local LAST_OUT=/tmp/tmp.LjpyaP8Quv +++ mktemp ++ local LAST_ERR=/tmp/tmp.K32nF543Ic ++ local exit_status=0 +++ seq 0 2 ++ for i in '$(seq 0 2)' ++ set +e ++ kubectl get pxc some-name -o 'jsonpath={.status.state}' ++ exit_status=0 ++ set -e ++ '[' 0 '!=' 0 ']' ++ break ++ cat /tmp/tmp.LjpyaP8Quv ++ cat /tmp/tmp.K32nF543Ic ++ rm /tmp/tmp.LjpyaP8Quv /tmp/tmp.K32nF543Ic ++ return 0 + [[ initializing == \r\e\a\d\y ]] + echo -n . .+ sleep 5 + [[ 37 -ge 300 ]] + let i+=1 ++ kubectl_bin get pxc some-name -o 'jsonpath={.status.state}' +++ mktemp ++ local LAST_OUT=/tmp/tmp.95eMkepuCn +++ mktemp ++ local LAST_ERR=/tmp/tmp.SHMPbK4W9o ++ local exit_status=0 +++ seq 0 2 ++ for i in '$(seq 0 2)' ++ set +e ++ kubectl get pxc some-name -o 'jsonpath={.status.state}' ++ exit_status=0 ++ set -e ++ '[' 0 '!=' 0 ']' ++ break ++ cat /tmp/tmp.95eMkepuCn ++ cat /tmp/tmp.SHMPbK4W9o ++ rm /tmp/tmp.95eMkepuCn /tmp/tmp.SHMPbK4W9o ++ return 0 + [[ initializing == \r\e\a\d\y ]] + echo -n . .+ sleep 5 + [[ 38 -ge 300 ]] + let i+=1 ++ kubectl_bin get pxc some-name -o 'jsonpath={.status.state}' +++ mktemp ++ local LAST_OUT=/tmp/tmp.MbP1uW3thx +++ mktemp ++ local LAST_ERR=/tmp/tmp.U0G0iKlb4L ++ local exit_status=0 +++ seq 0 2 ++ for i in '$(seq 0 2)' ++ set +e ++ kubectl get pxc some-name -o 'jsonpath={.status.state}' ++ exit_status=0 ++ set -e ++ '[' 0 '!=' 0 ']' ++ break ++ cat /tmp/tmp.MbP1uW3thx ++ cat /tmp/tmp.U0G0iKlb4L ++ rm /tmp/tmp.MbP1uW3thx /tmp/tmp.U0G0iKlb4L ++ return 0 + [[ initializing == \r\e\a\d\y ]] + echo -n . .+ sleep 5 + [[ 39 -ge 300 ]] + let i+=1 ++ kubectl_bin get pxc some-name -o 'jsonpath={.status.state}' +++ mktemp ++ local LAST_OUT=/tmp/tmp.Ndijo7brgk +++ mktemp ++ local LAST_ERR=/tmp/tmp.jouKkIjGQP ++ local exit_status=0 +++ seq 0 2 ++ for i in '$(seq 0 2)' ++ set +e ++ kubectl get pxc some-name -o 'jsonpath={.status.state}' ++ exit_status=0 ++ set -e ++ '[' 0 '!=' 0 ']' ++ break ++ cat /tmp/tmp.Ndijo7brgk ++ cat /tmp/tmp.jouKkIjGQP ++ rm /tmp/tmp.Ndijo7brgk /tmp/tmp.jouKkIjGQP ++ return 0 + [[ initializing == \r\e\a\d\y ]] + echo -n . .+ sleep 5 + [[ 40 -ge 300 ]] + let i+=1 ++ kubectl_bin get pxc some-name -o 'jsonpath={.status.state}' +++ mktemp ++ local LAST_OUT=/tmp/tmp.syHnWn0d1J +++ mktemp ++ local LAST_ERR=/tmp/tmp.aAooe0pLub ++ local exit_status=0 +++ seq 0 2 ++ for i in '$(seq 0 2)' ++ set +e ++ kubectl get pxc some-name -o 'jsonpath={.status.state}' ++ exit_status=0 ++ set -e ++ '[' 0 '!=' 0 ']' ++ break ++ cat /tmp/tmp.syHnWn0d1J ++ cat /tmp/tmp.aAooe0pLub ++ rm /tmp/tmp.syHnWn0d1J /tmp/tmp.aAooe0pLub ++ return 0 + [[ initializing == \r\e\a\d\y ]] + echo -n . .+ sleep 5 + [[ 41 -ge 300 ]] + let i+=1 ++ kubectl_bin get pxc some-name -o 'jsonpath={.status.state}' +++ mktemp ++ local LAST_OUT=/tmp/tmp.90ifxUejzj +++ mktemp ++ local LAST_ERR=/tmp/tmp.aarUXug7Z5 ++ local exit_status=0 +++ seq 0 2 ++ for i in '$(seq 0 2)' ++ set +e ++ kubectl get pxc some-name -o 'jsonpath={.status.state}' ++ exit_status=0 ++ set -e ++ '[' 0 '!=' 0 ']' ++ break ++ cat /tmp/tmp.90ifxUejzj ++ cat /tmp/tmp.aarUXug7Z5 ++ rm /tmp/tmp.90ifxUejzj /tmp/tmp.aarUXug7Z5 ++ return 0 + [[ initializing == \r\e\a\d\y ]] + echo -n . .+ sleep 5 + [[ 42 -ge 300 ]] + let i+=1 ++ kubectl_bin get pxc some-name -o 'jsonpath={.status.state}' +++ mktemp ++ local LAST_OUT=/tmp/tmp.ULUUmTaV4F +++ mktemp ++ local LAST_ERR=/tmp/tmp.Qf3URvPPCV ++ local exit_status=0 +++ seq 0 2 ++ for i in '$(seq 0 2)' ++ set +e ++ kubectl get pxc some-name -o 'jsonpath={.status.state}' ++ exit_status=0 ++ set -e ++ '[' 0 '!=' 0 ']' ++ break ++ cat /tmp/tmp.ULUUmTaV4F ++ cat /tmp/tmp.Qf3URvPPCV ++ rm /tmp/tmp.ULUUmTaV4F /tmp/tmp.Qf3URvPPCV ++ return 0 + [[ initializing == \r\e\a\d\y ]] + echo -n . .+ sleep 5 + [[ 43 -ge 300 ]] + let i+=1 ++ kubectl_bin get pxc some-name -o 'jsonpath={.status.state}' +++ mktemp ++ local LAST_OUT=/tmp/tmp.nZeVFtIrJu +++ mktemp ++ local LAST_ERR=/tmp/tmp.NcjzSqpeu3 ++ local exit_status=0 +++ seq 0 2 ++ for i in '$(seq 0 2)' ++ set +e ++ kubectl get pxc some-name -o 'jsonpath={.status.state}' ++ exit_status=0 ++ set -e ++ '[' 0 '!=' 0 ']' ++ break ++ cat /tmp/tmp.nZeVFtIrJu ++ cat /tmp/tmp.NcjzSqpeu3 ++ rm /tmp/tmp.nZeVFtIrJu /tmp/tmp.NcjzSqpeu3 ++ return 0 + [[ initializing == \r\e\a\d\y ]] + echo -n . .+ sleep 5 + [[ 44 -ge 300 ]] + let i+=1 ++ kubectl_bin get pxc some-name -o 'jsonpath={.status.state}' +++ mktemp ++ local LAST_OUT=/tmp/tmp.h4NbCU3ocB +++ mktemp ++ local LAST_ERR=/tmp/tmp.1ZocH0VXr2 ++ local exit_status=0 +++ seq 0 2 ++ for i in '$(seq 0 2)' ++ set +e ++ kubectl get pxc some-name -o 'jsonpath={.status.state}' ++ exit_status=0 ++ set -e ++ '[' 0 '!=' 0 ']' ++ break ++ cat /tmp/tmp.h4NbCU3ocB ++ cat /tmp/tmp.1ZocH0VXr2 ++ rm /tmp/tmp.h4NbCU3ocB /tmp/tmp.1ZocH0VXr2 ++ return 0 + [[ initializing == \r\e\a\d\y ]] + echo -n . .+ sleep 5 + [[ 45 -ge 300 ]] + let i+=1 ++ kubectl_bin get pxc some-name -o 'jsonpath={.status.state}' +++ mktemp ++ local LAST_OUT=/tmp/tmp.wepbRfuZKv +++ mktemp ++ local LAST_ERR=/tmp/tmp.YEkp1OaZfw ++ local exit_status=0 +++ seq 0 2 ++ for i in '$(seq 0 2)' ++ set +e ++ kubectl get pxc some-name -o 'jsonpath={.status.state}' ++ exit_status=0 ++ set -e ++ '[' 0 '!=' 0 ']' ++ break ++ cat /tmp/tmp.wepbRfuZKv ++ cat /tmp/tmp.YEkp1OaZfw ++ rm /tmp/tmp.wepbRfuZKv /tmp/tmp.YEkp1OaZfw ++ return 0 + [[ initializing == \r\e\a\d\y ]] + echo -n . .+ sleep 5 + [[ 46 -ge 300 ]] + let i+=1 ++ kubectl_bin get pxc some-name -o 'jsonpath={.status.state}' +++ mktemp ++ local LAST_OUT=/tmp/tmp.pipxeygNHO +++ mktemp ++ local LAST_ERR=/tmp/tmp.emhA1Ezxuo ++ local exit_status=0 +++ seq 0 2 ++ for i in '$(seq 0 2)' ++ set +e ++ kubectl get pxc some-name -o 'jsonpath={.status.state}' ++ exit_status=0 ++ set -e ++ '[' 0 '!=' 0 ']' ++ break ++ cat /tmp/tmp.pipxeygNHO ++ cat /tmp/tmp.emhA1Ezxuo ++ rm /tmp/tmp.pipxeygNHO /tmp/tmp.emhA1Ezxuo ++ return 0 + [[ initializing == \r\e\a\d\y ]] + echo -n . .+ sleep 5 + [[ 47 -ge 300 ]] + let i+=1 ++ kubectl_bin get pxc some-name -o 'jsonpath={.status.state}' +++ mktemp ++ local LAST_OUT=/tmp/tmp.EFniMGmZWo +++ mktemp ++ local LAST_ERR=/tmp/tmp.AZCnEchRYI ++ local exit_status=0 +++ seq 0 2 ++ for i in '$(seq 0 2)' ++ set +e ++ kubectl get pxc some-name -o 'jsonpath={.status.state}' ++ exit_status=0 ++ set -e ++ '[' 0 '!=' 0 ']' ++ break ++ cat /tmp/tmp.EFniMGmZWo ++ cat /tmp/tmp.AZCnEchRYI ++ rm /tmp/tmp.EFniMGmZWo /tmp/tmp.AZCnEchRYI ++ return 0 + [[ initializing == \r\e\a\d\y ]] + echo -n . .+ sleep 5 + [[ 48 -ge 300 ]] + let i+=1 ++ kubectl_bin get pxc some-name -o 'jsonpath={.status.state}' +++ mktemp ++ local LAST_OUT=/tmp/tmp.mIYlAtEElP +++ mktemp ++ local LAST_ERR=/tmp/tmp.Pm8ntJIVOB ++ local exit_status=0 +++ seq 0 2 ++ for i in '$(seq 0 2)' ++ set +e ++ kubectl get pxc some-name -o 'jsonpath={.status.state}' ++ exit_status=0 ++ set -e ++ '[' 0 '!=' 0 ']' ++ break ++ cat /tmp/tmp.mIYlAtEElP ++ cat /tmp/tmp.Pm8ntJIVOB ++ rm /tmp/tmp.mIYlAtEElP /tmp/tmp.Pm8ntJIVOB ++ return 0 + [[ initializing == \r\e\a\d\y ]] + echo -n . .+ sleep 5 + [[ 49 -ge 300 ]] + let i+=1 ++ kubectl_bin get pxc some-name -o 'jsonpath={.status.state}' +++ mktemp ++ local LAST_OUT=/tmp/tmp.DuA7XpXEPi +++ mktemp ++ local LAST_ERR=/tmp/tmp.d9mXOEsumn ++ local exit_status=0 +++ seq 0 2 ++ for i in '$(seq 0 2)' ++ set +e ++ kubectl get pxc some-name -o 'jsonpath={.status.state}' ++ exit_status=0 ++ set -e ++ '[' 0 '!=' 0 ']' ++ break ++ cat /tmp/tmp.DuA7XpXEPi ++ cat /tmp/tmp.d9mXOEsumn ++ rm /tmp/tmp.DuA7XpXEPi /tmp/tmp.d9mXOEsumn ++ return 0 + [[ initializing == \r\e\a\d\y ]] + echo -n . .+ sleep 5 + [[ 50 -ge 300 ]] + let i+=1 ++ kubectl_bin get pxc some-name -o 'jsonpath={.status.state}' +++ mktemp ++ local LAST_OUT=/tmp/tmp.2ZdfnocPie +++ mktemp ++ local LAST_ERR=/tmp/tmp.mmvN7xgEPi ++ local exit_status=0 +++ seq 0 2 ++ for i in '$(seq 0 2)' ++ set +e ++ kubectl get pxc some-name -o 'jsonpath={.status.state}' ++ exit_status=0 ++ set -e ++ '[' 0 '!=' 0 ']' ++ break ++ cat /tmp/tmp.2ZdfnocPie ++ cat /tmp/tmp.mmvN7xgEPi ++ rm /tmp/tmp.2ZdfnocPie /tmp/tmp.mmvN7xgEPi ++ return 0 + [[ initializing == \r\e\a\d\y ]] + echo -n . .+ sleep 5 + [[ 51 -ge 300 ]] + let i+=1 ++ kubectl_bin get pxc some-name -o 'jsonpath={.status.state}' +++ mktemp ++ local LAST_OUT=/tmp/tmp.Ygqu2XS4Bz +++ mktemp ++ local LAST_ERR=/tmp/tmp.5wYa7HyTfG ++ local exit_status=0 +++ seq 0 2 ++ for i in '$(seq 0 2)' ++ set +e ++ kubectl get pxc some-name -o 'jsonpath={.status.state}' ++ exit_status=0 ++ set -e ++ '[' 0 '!=' 0 ']' ++ break ++ cat /tmp/tmp.Ygqu2XS4Bz ++ cat /tmp/tmp.5wYa7HyTfG ++ rm /tmp/tmp.Ygqu2XS4Bz /tmp/tmp.5wYa7HyTfG ++ return 0 + [[ initializing == \r\e\a\d\y ]] + echo -n . .+ sleep 5 + [[ 52 -ge 300 ]] + let i+=1 ++ kubectl_bin get pxc some-name -o 'jsonpath={.status.state}' +++ mktemp ++ local LAST_OUT=/tmp/tmp.mST5Yepztu +++ mktemp ++ local LAST_ERR=/tmp/tmp.uUzg8I5PVf ++ local exit_status=0 +++ seq 0 2 ++ for i in '$(seq 0 2)' ++ set +e ++ kubectl get pxc some-name -o 'jsonpath={.status.state}' ++ exit_status=0 ++ set -e ++ '[' 0 '!=' 0 ']' ++ break ++ cat /tmp/tmp.mST5Yepztu ++ cat /tmp/tmp.uUzg8I5PVf ++ rm /tmp/tmp.mST5Yepztu /tmp/tmp.uUzg8I5PVf ++ return 0 + [[ initializing == \r\e\a\d\y ]] + echo -n . .+ sleep 5 + [[ 53 -ge 300 ]] + let i+=1 ++ kubectl_bin get pxc some-name -o 'jsonpath={.status.state}' +++ mktemp ++ local LAST_OUT=/tmp/tmp.NAGB6q7pbH +++ mktemp ++ local LAST_ERR=/tmp/tmp.Geu3D5xgDa ++ local exit_status=0 +++ seq 0 2 ++ for i in '$(seq 0 2)' ++ set +e ++ kubectl get pxc some-name -o 'jsonpath={.status.state}' ++ exit_status=0 ++ set -e ++ '[' 0 '!=' 0 ']' ++ break ++ cat /tmp/tmp.NAGB6q7pbH ++ cat /tmp/tmp.Geu3D5xgDa ++ rm /tmp/tmp.NAGB6q7pbH /tmp/tmp.Geu3D5xgDa ++ return 0 + [[ initializing == \r\e\a\d\y ]] + echo -n . .+ sleep 5 + [[ 54 -ge 300 ]] + let i+=1 ++ kubectl_bin get pxc some-name -o 'jsonpath={.status.state}' +++ mktemp ++ local LAST_OUT=/tmp/tmp.wIaoIam4dN +++ mktemp ++ local LAST_ERR=/tmp/tmp.lwFbUWGxmm ++ local exit_status=0 +++ seq 0 2 ++ for i in '$(seq 0 2)' ++ set +e ++ kubectl get pxc some-name -o 'jsonpath={.status.state}' ++ exit_status=0 ++ set -e ++ '[' 0 '!=' 0 ']' ++ break ++ cat /tmp/tmp.wIaoIam4dN ++ cat /tmp/tmp.lwFbUWGxmm ++ rm /tmp/tmp.wIaoIam4dN /tmp/tmp.lwFbUWGxmm ++ return 0 + [[ initializing == \r\e\a\d\y ]] + echo -n . .+ sleep 5 + [[ 55 -ge 300 ]] + let i+=1 ++ kubectl_bin get pxc some-name -o 'jsonpath={.status.state}' +++ mktemp ++ local LAST_OUT=/tmp/tmp.fOei4q9vvK +++ mktemp ++ local LAST_ERR=/tmp/tmp.O1hAcS0Q6B ++ local exit_status=0 +++ seq 0 2 ++ for i in '$(seq 0 2)' ++ set +e ++ kubectl get pxc some-name -o 'jsonpath={.status.state}' ++ exit_status=0 ++ set -e ++ '[' 0 '!=' 0 ']' ++ break ++ cat /tmp/tmp.fOei4q9vvK ++ cat /tmp/tmp.O1hAcS0Q6B ++ rm /tmp/tmp.fOei4q9vvK /tmp/tmp.O1hAcS0Q6B ++ return 0 + [[ initializing == \r\e\a\d\y ]] + echo -n . .+ sleep 5 + [[ 56 -ge 300 ]] + let i+=1 ++ kubectl_bin get pxc some-name -o 'jsonpath={.status.state}' +++ mktemp ++ local LAST_OUT=/tmp/tmp.NHzHQhXYft +++ mktemp ++ local LAST_ERR=/tmp/tmp.CMJZFSfXLe ++ local exit_status=0 +++ seq 0 2 ++ for i in '$(seq 0 2)' ++ set +e ++ kubectl get pxc some-name -o 'jsonpath={.status.state}' ++ exit_status=0 ++ set -e ++ '[' 0 '!=' 0 ']' ++ break ++ cat /tmp/tmp.NHzHQhXYft ++ cat /tmp/tmp.CMJZFSfXLe ++ rm /tmp/tmp.NHzHQhXYft /tmp/tmp.CMJZFSfXLe ++ return 0 + [[ initializing == \r\e\a\d\y ]] + echo -n . .+ sleep 5 + [[ 57 -ge 300 ]] + let i+=1 ++ kubectl_bin get pxc some-name -o 'jsonpath={.status.state}' +++ mktemp ++ local LAST_OUT=/tmp/tmp.YyKPlJKOyL +++ mktemp ++ local LAST_ERR=/tmp/tmp.3Qf17KDJNv ++ local exit_status=0 +++ seq 0 2 ++ for i in '$(seq 0 2)' ++ set +e ++ kubectl get pxc some-name -o 'jsonpath={.status.state}' ++ exit_status=0 ++ set -e ++ '[' 0 '!=' 0 ']' ++ break ++ cat /tmp/tmp.YyKPlJKOyL ++ cat /tmp/tmp.3Qf17KDJNv ++ rm /tmp/tmp.YyKPlJKOyL /tmp/tmp.3Qf17KDJNv ++ return 0 + [[ initializing == \r\e\a\d\y ]] + echo -n . .+ sleep 5 + [[ 58 -ge 300 ]] + let i+=1 ++ kubectl_bin get pxc some-name -o 'jsonpath={.status.state}' +++ mktemp ++ local LAST_OUT=/tmp/tmp.8JLk078ATL +++ mktemp ++ local LAST_ERR=/tmp/tmp.CTB8aHYsHq ++ local exit_status=0 +++ seq 0 2 ++ for i in '$(seq 0 2)' ++ set +e ++ kubectl get pxc some-name -o 'jsonpath={.status.state}' ++ exit_status=0 ++ set -e ++ '[' 0 '!=' 0 ']' ++ break ++ cat /tmp/tmp.8JLk078ATL ++ cat /tmp/tmp.CTB8aHYsHq ++ rm /tmp/tmp.8JLk078ATL /tmp/tmp.CTB8aHYsHq ++ return 0 + [[ ready == \r\e\a\d\y ]] ++ kubectl_bin get pxc some-name -o 'jsonpath={.status.pxc.ready}' +++ mktemp ++ local LAST_OUT=/tmp/tmp.mG0rzWa9bj +++ mktemp ++ local LAST_ERR=/tmp/tmp.vmmmgX1Er0 ++ local exit_status=0 +++ seq 0 2 ++ for i in '$(seq 0 2)' ++ set +e ++ kubectl get pxc some-name -o 'jsonpath={.status.pxc.ready}' ++ exit_status=0 ++ set -e ++ '[' 0 '!=' 0 ']' ++ break ++ cat /tmp/tmp.mG0rzWa9bj ++ cat /tmp/tmp.vmmmgX1Er0 ++ rm /tmp/tmp.mG0rzWa9bj /tmp/tmp.vmmmgX1Er0 ++ return 0 + [[ 3 == \3 ]] +++ get_proxy_engine some-name +++ local cluster_name=some-name ++++ get_proxy some-name ++++ local target_cluster=some-name +++++ kubectl_bin get pxc some-name -o 'jsonpath={.spec.haproxy.enabled}' ++++++ mktemp +++++ local LAST_OUT=/tmp/tmp.1TzOlDtU0C ++++++ mktemp +++++ local LAST_ERR=/tmp/tmp.1D6iv9xoIU +++++ local exit_status=0 ++++++ seq 0 2 +++++ for i in '$(seq 0 2)' +++++ set +e +++++ kubectl get pxc some-name -o 'jsonpath={.spec.haproxy.enabled}' +++++ exit_status=0 +++++ set -e +++++ '[' 0 '!=' 0 ']' +++++ break +++++ cat /tmp/tmp.1TzOlDtU0C +++++ cat /tmp/tmp.1D6iv9xoIU +++++ rm /tmp/tmp.1TzOlDtU0C /tmp/tmp.1D6iv9xoIU +++++ return 0 ++++ [[ true == \t\r\u\e ]] ++++ echo some-name-haproxy ++++ return +++ local cluster_proxy=some-name-haproxy +++ echo haproxy ++ kubectl_bin get pxc some-name -o 'jsonpath={.status.haproxy.ready}' +++ mktemp ++ local LAST_OUT=/tmp/tmp.Ui6JpRdnmF +++ mktemp ++ local LAST_ERR=/tmp/tmp.35hOmNKbDV ++ local exit_status=0 +++ seq 0 2 ++ for i in '$(seq 0 2)' ++ set +e ++ kubectl get pxc some-name -o 'jsonpath={.status.haproxy.ready}' ++ exit_status=0 ++ set -e ++ '[' 0 '!=' 0 ']' ++ break ++ cat /tmp/tmp.Ui6JpRdnmF ++ cat /tmp/tmp.35hOmNKbDV ++ rm /tmp/tmp.Ui6JpRdnmF /tmp/tmp.35hOmNKbDV ++ return 0 + [[ 3 == \3 ]] + echo + [[ perconalab/percona-xtradb-cluster-operator:main-pxc8.0 =~ 5\.7 ]] + check_generation 1 haproxy some-name + local generation=1 + local container=haproxy + local cluster=some-name + local current_generation ++ kubectl_bin get statefulset some-name-haproxy -o 'jsonpath={.metadata.generation}' +++ mktemp ++ local LAST_OUT=/tmp/tmp.mH0VHbyCMm +++ mktemp ++ local LAST_ERR=/tmp/tmp.w4NoLvmtE4 ++ local exit_status=0 +++ seq 0 2 ++ for i in '$(seq 0 2)' ++ set +e ++ kubectl get statefulset some-name-haproxy -o 'jsonpath={.metadata.generation}' ++ exit_status=0 ++ set -e ++ '[' 0 '!=' 0 ']' ++ break ++ cat /tmp/tmp.mH0VHbyCMm ++ cat /tmp/tmp.w4NoLvmtE4 ++ rm /tmp/tmp.mH0VHbyCMm /tmp/tmp.w4NoLvmtE4 ++ return 0 + current_generation=1 + [[ 1 != \1 ]] + patch_secret my-cluster-secrets monitor dGVzdC1wYXNzd29yZDI= + local secret=my-cluster-secrets + local key=monitor + local value=dGVzdC1wYXNzd29yZDI= + kubectl_bin patch secret my-cluster-secrets '-p={"data":{"monitor": "dGVzdC1wYXNzd29yZDI="}}' ++ mktemp + local LAST_OUT=/tmp/tmp.ej117FK9Wv ++ mktemp + local LAST_ERR=/tmp/tmp.pAHzJNNdXi + local exit_status=0 ++ seq 0 2 + for i in '$(seq 0 2)' + set +e + kubectl patch secret my-cluster-secrets '-p={"data":{"monitor": "dGVzdC1wYXNzd29yZDI="}}' + exit_status=0 + set -e + '[' 0 '!=' 0 ']' + break + cat /tmp/tmp.ej117FK9Wv secret/my-cluster-secrets patched + cat /tmp/tmp.pAHzJNNdXi + rm /tmp/tmp.ej117FK9Wv /tmp/tmp.pAHzJNNdXi + return 0 + sleep 15 + wait_cluster_consistency some-name 3 3 + local cluster_name=some-name + local cluster_size=3 + local proxy_size=3 + '[' -z 3 ']' + desc 'wait cluster consistency' + set +o xtrace ----------------------------------------------------------------------------------- wait cluster consistency ----------------------------------------------------------------------------------- + local i=0 + local max=300 + sleep 7 + echo -n 'waiting for pxc/some-name to be ready' waiting for pxc/some-name to be ready++ kubectl_bin get pxc some-name -o 'jsonpath={.status.state}' +++ mktemp ++ local LAST_OUT=/tmp/tmp.WcOd6FlY9S +++ mktemp ++ local LAST_ERR=/tmp/tmp.oZVF2YMGYk ++ local exit_status=0 +++ seq 0 2 ++ for i in '$(seq 0 2)' ++ set +e ++ kubectl get pxc some-name -o 'jsonpath={.status.state}' ++ exit_status=0 ++ set -e ++ '[' 0 '!=' 0 ']' ++ break ++ cat /tmp/tmp.WcOd6FlY9S ++ cat /tmp/tmp.oZVF2YMGYk ++ rm /tmp/tmp.WcOd6FlY9S /tmp/tmp.oZVF2YMGYk ++ return 0 + [[ ready == \r\e\a\d\y ]] ++ kubectl_bin get pxc some-name -o 'jsonpath={.status.pxc.ready}' +++ mktemp ++ local LAST_OUT=/tmp/tmp.kM9KqSr8kO +++ mktemp ++ local LAST_ERR=/tmp/tmp.ubuaWWZvCW ++ local exit_status=0 +++ seq 0 2 ++ for i in '$(seq 0 2)' ++ set +e ++ kubectl get pxc some-name -o 'jsonpath={.status.pxc.ready}' ++ exit_status=0 ++ set -e ++ '[' 0 '!=' 0 ']' ++ break ++ cat /tmp/tmp.kM9KqSr8kO ++ cat /tmp/tmp.ubuaWWZvCW ++ rm /tmp/tmp.kM9KqSr8kO /tmp/tmp.ubuaWWZvCW ++ return 0 + [[ 3 == \3 ]] +++ get_proxy_engine some-name +++ local cluster_name=some-name ++++ get_proxy some-name ++++ local target_cluster=some-name +++++ kubectl_bin get pxc some-name -o 'jsonpath={.spec.haproxy.enabled}' ++++++ mktemp +++++ local LAST_OUT=/tmp/tmp.CXX4gosHVP ++++++ mktemp +++++ local LAST_ERR=/tmp/tmp.tYrEBBlUNM +++++ local exit_status=0 ++++++ seq 0 2 +++++ for i in '$(seq 0 2)' +++++ set +e +++++ kubectl get pxc some-name -o 'jsonpath={.spec.haproxy.enabled}' +++++ exit_status=0 +++++ set -e +++++ '[' 0 '!=' 0 ']' +++++ break +++++ cat /tmp/tmp.CXX4gosHVP +++++ cat /tmp/tmp.tYrEBBlUNM +++++ rm /tmp/tmp.CXX4gosHVP /tmp/tmp.tYrEBBlUNM +++++ return 0 ++++ [[ true == \t\r\u\e ]] ++++ echo some-name-haproxy ++++ return +++ local cluster_proxy=some-name-haproxy +++ echo haproxy ++ kubectl_bin get pxc some-name -o 'jsonpath={.status.haproxy.ready}' +++ mktemp ++ local LAST_OUT=/tmp/tmp.MYci3b02IK +++ mktemp ++ local LAST_ERR=/tmp/tmp.bhNnu5xfF8 ++ local exit_status=0 +++ seq 0 2 ++ for i in '$(seq 0 2)' ++ set +e ++ kubectl get pxc some-name -o 'jsonpath={.status.haproxy.ready}' ++ exit_status=0 ++ set -e ++ '[' 0 '!=' 0 ']' ++ break ++ cat /tmp/tmp.MYci3b02IK ++ cat /tmp/tmp.bhNnu5xfF8 ++ rm /tmp/tmp.MYci3b02IK /tmp/tmp.bhNnu5xfF8 ++ return 0 + [[ 3 == \3 ]] + echo + compare_mysql_cmd select-3 'SHOW DATABASES;' '-h some-name-haproxy -umonitor -p'\''test-password2'\''' + local command_id=select-3 + local 'command=SHOW DATABASES;' + local 'uri=-h some-name-haproxy -umonitor -p'\''test-password2'\''' + local postfix= + local expected_result=/mnt/jenkins/workspace/cloud-pxc-operator_PR-2124/e2e-tests/users/compare/select-3.sql + [[ perconalab/percona-xtradb-cluster-operator:main-pxc8.0 =~ 8\.4 ]] + [[ perconalab/percona-xtradb-cluster-operator:main-pxc8.0 =~ 8\.0 ]] + [[ -f /mnt/jenkins/workspace/cloud-pxc-operator_PR-2124/e2e-tests/users/compare/select-3-80.sql ]] + [[ perconalab/percona-xtradb-cluster-operator:main-pxc8.0 =~ 5\.7 ]] + run_mysql 'SHOW DATABASES;' '-h some-name-haproxy -umonitor -p'\''test-password2'\''' + local 'command=SHOW DATABASES;' + local 'uri=-h some-name-haproxy -umonitor -p'\''test-password2'\''' ++ get_client_pod ++ kubectl_bin get pods --selector=name=pxc-client -o 'jsonpath={.items[].metadata.name}' +++ mktemp ++ local LAST_OUT=/tmp/tmp.CCkXU9QaFt +++ mktemp ++ local LAST_ERR=/tmp/tmp.h4UkMcu2U7 ++ local exit_status=0 +++ seq 0 2 ++ for i in '$(seq 0 2)' ++ set +e ++ kubectl get pods --selector=name=pxc-client -o 'jsonpath={.items[].metadata.name}' ++ exit_status=0 ++ set -e ++ '[' 0 '!=' 0 ']' ++ break ++ cat /tmp/tmp.CCkXU9QaFt ++ cat /tmp/tmp.h4UkMcu2U7 ++ rm /tmp/tmp.CCkXU9QaFt /tmp/tmp.h4UkMcu2U7 ++ return 0 + client_pod=pxc-client-59944c5bbf-wqbt7 + wait_pod pxc-client-59944c5bbf-wqbt7 + local pod=pxc-client-59944c5bbf-wqbt7 + local max_retry=480 + local ns= ++ echo pxc-client-59944c5bbf-wqbt7 ++ /usr/bin/sed -E 's/.*-(pxc|proxysql)-[0-9]/\1/' ++ grep -E '^(pxc|proxysql)$' + local container= + set +o xtrace pod/pxc-client-59944c5bbf-wqbt7 condition met waiting for pod/pxc-client-59944c5bbf-wqbt7 to become ReadyDefaulted container "pxc-client" out of: pxc-client, backup .Ok + set +o xtrace + '[' '!' -s /tmp/tmp.30P5AqZU1r/select-3.sql ']' + [[ 0 -eq 0 ]] + diff -u /mnt/jenkins/workspace/cloud-pxc-operator_PR-2124/e2e-tests/users/compare/select-3.sql /tmp/tmp.30P5AqZU1r/select-3.sql + [[ perconalab/percona-xtradb-cluster-operator:main-pxc8.0 =~ 5\.7 ]] + check_generation 1 haproxy some-name + local generation=1 + local container=haproxy + local cluster=some-name + local current_generation ++ kubectl_bin get statefulset some-name-haproxy -o 'jsonpath={.metadata.generation}' +++ mktemp ++ local LAST_OUT=/tmp/tmp.Dp5TVCO9ru +++ mktemp ++ local LAST_ERR=/tmp/tmp.5l62MGQbo3 ++ local exit_status=0 +++ seq 0 2 ++ for i in '$(seq 0 2)' ++ set +e ++ kubectl get statefulset some-name-haproxy -o 'jsonpath={.metadata.generation}' ++ exit_status=0 ++ set -e ++ '[' 0 '!=' 0 ']' ++ break ++ cat /tmp/tmp.Dp5TVCO9ru ++ cat /tmp/tmp.5l62MGQbo3 ++ rm /tmp/tmp.Dp5TVCO9ru /tmp/tmp.5l62MGQbo3 ++ return 0 + current_generation=1 + [[ 1 != \1 ]] + destroy users-28990 + local namespace=users-28990 + local ignore_logs=true + desc 'destroy cluster/operator and all other resources' + set +o xtrace ----------------------------------------------------------------------------------- destroy cluster/operator and all other resources ----------------------------------------------------------------------------------- + '[' true == false -o 1 == 1 ']' + grep -v level=info + grep -v 'the object has been modified' + /usr/bin/sed -r 's/"ts":[0-9.]+//; s^limits-[0-9.]+/^^g' + grep -v 'get backup status: Job.batch' + sort -u + tee /tmp/tmp.30P5AqZU1r/operator.log ++ get_operator_pod ++ local label_prefix=app.kubernetes.io/ +++ kubectl get pods --selector=app.kubernetes.io/name=percona-xtradb-cluster-operator -n pxc-operator +++ grep -c percona-xtradb-cluster-operator ++ local check_label=1 ++ [[ 1 -eq 0 ]] ++ kubectl_bin get pods --selector=app.kubernetes.io/name=percona-xtradb-cluster-operator -o 'jsonpath={.items[].metadata.name}' -n pxc-operator +++ mktemp ++ local LAST_OUT=/tmp/tmp.stQ5K12ThI +++ mktemp ++ local LAST_ERR=/tmp/tmp.hS20lzhcIW ++ local exit_status=0 +++ seq 0 2 ++ for i in '$(seq 0 2)' ++ set +e ++ kubectl get pods --selector=app.kubernetes.io/name=percona-xtradb-cluster-operator -o 'jsonpath={.items[].metadata.name}' -n pxc-operator ++ exit_status=0 ++ set -e ++ '[' 0 '!=' 0 ']' ++ break ++ cat /tmp/tmp.stQ5K12ThI ++ cat /tmp/tmp.hS20lzhcIW ++ rm /tmp/tmp.stQ5K12ThI /tmp/tmp.hS20lzhcIW ++ return 0 + kubectl_bin logs -n pxc-operator percona-xtradb-cluster-operator-67bfc9d88f-wrxz2 ++ mktemp + local LAST_OUT=/tmp/tmp.5DhJO04dSl ++ mktemp + local LAST_ERR=/tmp/tmp.tp6FXPxPe4 + local exit_status=0 ++ seq 0 2 + for i in '$(seq 0 2)' + set +e + kubectl logs -n pxc-operator percona-xtradb-cluster-operator-67bfc9d88f-wrxz2 + exit_status=0 + set -e + '[' 0 '!=' 0 ']' + break + cat /tmp/tmp.5DhJO04dSl + cat /tmp/tmp.tp6FXPxPe4 + rm /tmp/tmp.5DhJO04dSl /tmp/tmp.tp6FXPxPe4 + return 0 2025-11-24T18:51:33.163Z INFO setup Manager starting up {"gitCommit": "f48cc30b73ca816d2a3a5f78d98b16af43297c61", "gitBranch": "PR-2124-f48cc30b", "buildTime": "2025-11-24T16:36:57Z", "goVersion": "go1.25.4", "os": "linux", "arch": "amd64"} 2025-11-24T18:51:33.163Z INFO setup Runs on {"platform": "kubernetes", "version": "v1.31.13-gke.1454000"} 2025-11-24T18:51:33.166Z INFO setup Registering Components. 2025-11-24T18:51:34.060Z INFO controller-runtime.metrics Serving metrics server {"bindAddress": ":8080", "secure": false} 2025-11-24T18:51:34.060Z INFO controller-runtime.metrics Starting metrics server 2025-11-24T18:51:34.060Z INFO controller-runtime.webhook Registering webhook {"path": "/validate-percona-xtradbcluster"} 2025-11-24T18:51:34.060Z INFO controller-runtime.webhook Starting webhook server 2025-11-24T18:51:34.060Z INFO setup Starting the Cmd. 2025-11-24T18:51:34.060Z INFO starting server {"name": "health probe", "addr": "[::]:8081"} 2025-11-24T18:51:34.061Z INFO controller-runtime.certwatcher Starting certificate poll+watcher {"cert": "/tmp/k8s-webhook-server/serving-certs/tls.crt", "key": "/tmp/k8s-webhook-server/serving-certs/tls.key", "interval": "10s"} 2025-11-24T18:51:34.061Z INFO controller-runtime.certwatcher Updated current TLS certificate {"cert": "/tmp/k8s-webhook-server/serving-certs/tls.crt", "key": "/tmp/k8s-webhook-server/serving-certs/tls.key"} 2025-11-24T18:51:34.061Z INFO controller-runtime.webhook Serving webhook server {"host": "", "port": 9443} 2025-11-24T18:51:34.161Z INFO attempting to acquire leader lease pxc-operator/08db1feb.percona.com... 2025-11-24T18:51:34.193Z DEBUG events percona-xtradb-cluster-operator-67bfc9d88f-wrxz2_dadbb0c5-83c4-4cdd-8725-c0b47ede6a62 became leader {"type": "Normal", "object": {"kind":"Lease","namespace":"pxc-operator","name":"08db1feb.percona.com","uid":"5e9f9709-a337-44a1-a196-6189d7e891b1","apiVersion":"coordination.k8s.io/v1","resourceVersion":"1764010294185119009"}, "reason": "LeaderElection"} 2025-11-24T18:51:34.193Z INFO Starting EventSource {"controller": "pxc-controller", "source": "kind source: *v1.Secret"} 2025-11-24T18:51:34.193Z INFO successfully acquired lease pxc-operator/08db1feb.percona.com 2025-11-24T18:51:34.194Z INFO Starting EventSource {"controller": "pxcbackup-controller", "source": "kind source: *v1.PerconaXtraDBClusterBackup"} 2025-11-24T18:51:34.194Z INFO Starting EventSource {"controller": "pxc-controller", "source": "kind source: *v1.PerconaXtraDBCluster"} 2025-11-24T18:51:34.194Z INFO Starting EventSource {"controller": "pxcrestore-controller", "source": "kind source: *v1.PerconaXtraDBClusterRestore"} 2025-11-24T18:51:34.295Z INFO Starting Controller {"controller": "pxc-controller"} 2025-11-24T18:51:34.295Z INFO Starting Controller {"controller": "pxcrestore-controller"} 2025-11-24T18:51:34.295Z INFO Starting workers {"controller": "pxc-controller", "worker count": 1} 2025-11-24T18:51:34.295Z INFO Starting workers {"controller": "pxcrestore-controller", "worker count": 1} 2025-11-24T18:51:34.396Z INFO Starting Controller {"controller": "pxcbackup-controller"} 2025-11-24T18:51:34.396Z INFO Starting workers {"controller": "pxcbackup-controller", "worker count": 1} 2025-11-24T18:52:08.767Z INFO Set CR version {"controller": "pxc-controller", "namespace": "users-28990", "name": "some-name", "reconcileID": "f22937ea-9306-4094-9306-441c6d5ff2d7", "version": "1.19.0"} 2025-11-24T18:52:09.030Z INFO User secrets updated {"controller": "pxc-controller", "namespace": "users-28990", "name": "some-name", "reconcileID": "f22937ea-9306-4094-9306-441c6d5ff2d7", "secrets": "my-cluster-secrets"} 2025-11-24T18:52:09.272Z DEBUG Creating object {"controller": "pxc-controller", "namespace": "users-28990", "name": "some-name", "reconcileID": "f22937ea-9306-4094-9306-441c6d5ff2d7", "object": "auto-some-name-pxc", "kind": "&TypeMeta{Kind:ConfigMap,APIVersion:v1,}"} 2025-11-24T18:52:09.286Z DEBUG Creating object {"controller": "pxc-controller", "namespace": "users-28990", "name": "some-name", "reconcileID": "f22937ea-9306-4094-9306-441c6d5ff2d7", "object": "auto-some-name-pxc", "kind": "&TypeMeta{Kind:ConfigMap,APIVersion:v1,}"} 2025-11-24T18:52:09.846Z ERROR Reconciler error {"controller": "pxc-controller", "namespace": "users-28990", "name": "some-name", "reconcileID": "f22937ea-9306-4094-9306-441c6d5ff2d7", "error": "failed to deploy proxysql: updatePod for proxysql: reconcile config: reconcile autotune config: create or update configmap: configmaps \"auto-some-name-pxc\" already exists", "errorVerbose": "configmaps \"auto-some-name-pxc\" already exists\ncreate or update configmap\ngithub.com/percona/percona-xtradb-cluster-operator/pkg/controller/pxc.(*ReconcilePerconaXtraDBCluster).reconcileAutotuneConfigMap\n\t/go/src/github.com/percona/percona-xtradb-cluster-operator/pkg/controller/pxc/config.go:92\ngithub.com/percona/percona-xtradb-cluster-operator/pkg/controller/pxc.(*ReconcilePerconaXtraDBCluster).reconcileConfigMaps\n\t/go/src/github.com/percona/percona-xtradb-cluster-operator/pkg/controller/pxc/config.go:25\ngithub.com/percona/percona-xtradb-cluster-operator/pkg/controller/pxc.(*ReconcilePerconaXtraDBCluster).updatePod\n\t/go/src/github.com/percona/percona-xtradb-cluster-operator/pkg/controller/pxc/upgrade.go:52\ngithub.com/percona/percona-xtradb-cluster-operator/pkg/controller/pxc.(*ReconcilePerconaXtraDBCluster).deploy.func1\n\t/go/src/github.com/percona/percona-xtradb-cluster-operator/pkg/controller/pxc/controller.go:577\ngithub.com/percona/percona-xtradb-cluster-operator/pkg/controller/pxc.(*ReconcilePerconaXtraDBCluster).deploy\n\t/go/src/github.com/percona/percona-xtradb-cluster-operator/pkg/controller/pxc/controller.go:595\ngithub.com/percona/percona-xtradb-cluster-operator/pkg/controller/pxc.(*ReconcilePerconaXtraDBCluster).Reconcile\n\t/go/src/github.com/percona/percona-xtradb-cluster-operator/pkg/controller/pxc/controller.go:379\nsigs.k8s.io/controller-runtime/pkg/internal/controller.(*Controller[...]).Reconcile\n\t/go/pkg/mod/sigs.k8s.io/controller-runtime@v0.22.4/pkg/internal/controller/controller.go:216\nsigs.k8s.io/controller-runtime/pkg/internal/controller.(*Controller[...]).reconcileHandler\n\t/go/pkg/mod/sigs.k8s.io/controller-runtime@v0.22.4/pkg/internal/controller/controller.go:461\nsigs.k8s.io/controller-runtime/pkg/internal/controller.(*Controller[...]).processNextWorkItem\n\t/go/pkg/mod/sigs.k8s.io/controller-runtime@v0.22.4/pkg/internal/controller/controller.go:421\nsigs.k8s.io/controller-runtime/pkg/internal/controller.(*Controller[...]).Start.func1.1\n\t/go/pkg/mod/sigs.k8s.io/controller-runtime@v0.22.4/pkg/internal/controller/controller.go:296\nruntime.goexit\n\t/usr/local/go/src/runtime/asm_amd64.s:1693\nreconcile autotune config\ngithub.com/percona/percona-xtradb-cluster-operator/pkg/controller/pxc.(*ReconcilePerconaXtraDBCluster).reconcileConfigMaps\n\t/go/src/github.com/percona/percona-xtradb-cluster-operator/pkg/controller/pxc/config.go:27\ngithub.com/percona/percona-xtradb-cluster-operator/pkg/controller/pxc.(*ReconcilePerconaXtraDBCluster).updatePod\n\t/go/src/github.com/percona/percona-xtradb-cluster-operator/pkg/controller/pxc/upgrade.go:52\ngithub.com/percona/percona-xtradb-cluster-operator/pkg/controller/pxc.(*ReconcilePerconaXtraDBCluster).deploy.func1\n\t/go/src/github.com/percona/percona-xtradb-cluster-operator/pkg/controller/pxc/controller.go:577\ngithub.com/percona/percona-xtradb-cluster-operator/pkg/controller/pxc.(*ReconcilePerconaXtraDBCluster).deploy\n\t/go/src/github.com/percona/percona-xtradb-cluster-operator/pkg/controller/pxc/controller.go:595\ngithub.com/percona/percona-xtradb-cluster-operator/pkg/controller/pxc.(*ReconcilePerconaXtraDBCluster).Reconcile\n\t/go/src/github.com/percona/percona-xtradb-cluster-operator/pkg/controller/pxc/controller.go:379\nsigs.k8s.io/controller-runtime/pkg/internal/controller.(*Controller[...]).Reconcile\n\t/go/pkg/mod/sigs.k8s.io/controller-runtime@v0.22.4/pkg/internal/controller/controller.go:216\nsigs.k8s.io/controller-runtime/pkg/internal/controller.(*Controller[...]).reconcileHandler\n\t/go/pkg/mod/sigs.k8s.io/controller-runtime@v0.22.4/pkg/internal/controller/controller.go:461\nsigs.k8s.io/controller-runtime/pkg/internal/controller.(*Controller[...]).processNextWorkItem\n\t/go/pkg/mod/sigs.k8s.io/controller-runtime@v0.22.4/pkg/internal/controller/controller.go:421\nsigs.k8s.io/controller-runtime/pkg/internal/controller.(*Controller[...]).Start.func1.1\n\t/go/pkg/mod/sigs.k8s.io/controller-runtime@v0.22.4/pkg/internal/controller/controller.go:296\nruntime.goexit\n\t/usr/local/go/src/runtime/asm_amd64.s:1693\nreconcile config\ngithub.com/percona/percona-xtradb-cluster-operator/pkg/controller/pxc.(*ReconcilePerconaXtraDBCluster).updatePod\n\t/go/src/github.com/percona/percona-xtradb-cluster-operator/pkg/controller/pxc/upgrade.go:54\ngithub.com/percona/percona-xtradb-cluster-operator/pkg/controller/pxc.(*ReconcilePerconaXtraDBCluster).deploy.func1\n\t/go/src/github.com/percona/percona-xtradb-cluster-operator/pkg/controller/pxc/controller.go:577\ngithub.com/percona/percona-xtradb-cluster-operator/pkg/controller/pxc.(*ReconcilePerconaXtraDBCluster).deploy\n\t/go/src/github.com/percona/percona-xtradb-cluster-operator/pkg/controller/pxc/controller.go:595\ngithub.com/percona/percona-xtradb-cluster-operator/pkg/controller/pxc.(*ReconcilePerconaXtraDBCluster).Reconcile\n\t/go/src/github.com/percona/percona-xtradb-cluster-operator/pkg/controller/pxc/controller.go:379\nsigs.k8s.io/controller-runtime/pkg/internal/controller.(*Controller[...]).Reconcile\n\t/go/pkg/mod/sigs.k8s.io/controller-runtime@v0.22.4/pkg/internal/controller/controller.go:216\nsigs.k8s.io/controller-runtime/pkg/internal/controller.(*Controller[...]).reconcileHandler\n\t/go/pkg/mod/sigs.k8s.io/controller-runtime@v0.22.4/pkg/internal/controller/controller.go:461\nsigs.k8s.io/controller-runtime/pkg/internal/controller.(*Controller[...]).processNextWorkItem\n\t/go/pkg/mod/sigs.k8s.io/controller-runtime@v0.22.4/pkg/internal/controller/controller.go:421\nsigs.k8s.io/controller-runtime/pkg/internal/controller.(*Controller[...]).Start.func1.1\n\t/go/pkg/mod/sigs.k8s.io/controller-runtime@v0.22.4/pkg/internal/controller/controller.go:296\nruntime.goexit\n\t/usr/local/go/src/runtime/asm_amd64.s:1693\nupdatePod for proxysql\ngithub.com/percona/percona-xtradb-cluster-operator/pkg/controller/pxc.(*ReconcilePerconaXtraDBCluster).deploy.func1\n\t/go/src/github.com/percona/percona-xtradb-cluster-operator/pkg/controller/pxc/controller.go:578\ngithub.com/percona/percona-xtradb-cluster-operator/pkg/controller/pxc.(*ReconcilePerconaXtraDBCluster).deploy\n\t/go/src/github.com/percona/percona-xtradb-cluster-operator/pkg/controller/pxc/controller.go:595\ngithub.com/percona/percona-xtradb-cluster-operator/pkg/controller/pxc.(*ReconcilePerconaXtraDBCluster).Reconcile\n\t/go/src/github.com/percona/percona-xtradb-cluster-operator/pkg/controller/pxc/controller.go:379\nsigs.k8s.io/controller-runtime/pkg/internal/controller.(*Controller[...]).Reconcile\n\t/go/pkg/mod/sigs.k8s.io/controller-runtime@v0.22.4/pkg/internal/controller/controller.go:216\nsigs.k8s.io/controller-runtime/pkg/internal/controller.(*Controller[...]).reconcileHandler\n\t/go/pkg/mod/sigs.k8s.io/controller-runtime@v0.22.4/pkg/internal/controller/controller.go:461\nsigs.k8s.io/controller-runtime/pkg/internal/controller.(*Controller[...]).processNextWorkItem\n\t/go/pkg/mod/sigs.k8s.io/controller-runtime@v0.22.4/pkg/internal/controller/controller.go:421\nsigs.k8s.io/controller-runtime/pkg/internal/controller.(*Controller[...]).Start.func1.1\n\t/go/pkg/mod/sigs.k8s.io/controller-runtime@v0.22.4/pkg/internal/controller/controller.go:296\nruntime.goexit\n\t/usr/local/go/src/runtime/asm_amd64.s:1693\nfailed to deploy proxysql\ngithub.com/percona/percona-xtradb-cluster-operator/pkg/controller/pxc.(*ReconcilePerconaXtraDBCluster).deploy\n\t/go/src/github.com/percona/percona-xtradb-cluster-operator/pkg/controller/pxc/controller.go:596\ngithub.com/percona/percona-xtradb-cluster-operator/pkg/controller/pxc.(*ReconcilePerconaXtraDBCluster).Reconcile\n\t/go/src/github.com/percona/percona-xtradb-cluster-operator/pkg/controller/pxc/controller.go:379\nsigs.k8s.io/controller-runtime/pkg/internal/controller.(*Controller[...]).Reconcile\n\t/go/pkg/mod/sigs.k8s.io/controller-runtime@v0.22.4/pkg/internal/controller/controller.go:216\nsigs.k8s.io/controller-runtime/pkg/internal/controller.(*Controller[...]).reconcileHandler\n\t/go/pkg/mod/sigs.k8s.io/controller-runtime@v0.22.4/pkg/internal/controller/controller.go:461\nsigs.k8s.io/controller-runtime/pkg/internal/controller.(*Controller[...]).processNextWorkItem\n\t/go/pkg/mod/sigs.k8s.io/controller-runtime@v0.22.4/pkg/internal/controller/controller.go:421\nsigs.k8s.io/controller-runtime/pkg/internal/controller.(*Controller[...]).Start.func1.1\n\t/go/pkg/mod/sigs.k8s.io/controller-runtime@v0.22.4/pkg/internal/controller/controller.go:296\nruntime.goexit\n\t/usr/local/go/src/runtime/asm_amd64.s:1693"} 2025-11-24T18:52:09.953Z DEBUG Creating object {"controller": "pxc-controller", "namespace": "users-28990", "name": "some-name", "reconcileID": "09aa139b-4f4d-4882-9455-ec57c6b068d6", "object": "some-name-pxc", "kind": "&TypeMeta{Kind:StatefulSet,APIVersion:apps/v1,}"} 2025-11-24T18:52:09.999Z DEBUG Creating object {"controller": "pxc-controller", "namespace": "users-28990", "name": "some-name", "reconcileID": "09aa139b-4f4d-4882-9455-ec57c6b068d6", "object": "some-name-proxysql", "kind": "&TypeMeta{Kind:StatefulSet,APIVersion:apps/v1,}"} 2025-11-24T18:52:10.189Z DEBUG Creating object {"controller": "pxc-controller", "namespace": "users-28990", "name": "some-name", "reconcileID": "09aa139b-4f4d-4882-9455-ec57c6b068d6", "object": "some-name-proxysql", "kind": "&TypeMeta{Kind:PodDisruptionBudget,APIVersion:policy/v1,}"} 2025-11-24T18:52:10.237Z DEBUG Creating object {"controller": "pxc-controller", "namespace": "users-28990", "name": "some-name", "reconcileID": "09aa139b-4f4d-4882-9455-ec57c6b068d6", "object": "some-name-pxc", "kind": "&TypeMeta{Kind:Service,APIVersion:v1,}"} 2025-11-24T18:52:10.334Z DEBUG Creating object {"controller": "pxc-controller", "namespace": "users-28990", "name": "some-name", "reconcileID": "09aa139b-4f4d-4882-9455-ec57c6b068d6", "object": "some-name-pxc-unready", "kind": "&TypeMeta{Kind:Service,APIVersion:v1,}"} 2025-11-24T18:52:10.423Z DEBUG Creating object {"controller": "pxc-controller", "namespace": "users-28990", "name": "some-name", "reconcileID": "09aa139b-4f4d-4882-9455-ec57c6b068d6", "object": "some-name-proxysql", "kind": "&TypeMeta{Kind:Service,APIVersion:v1,}"} 2025-11-24T18:52:10.577Z DEBUG Creating object {"controller": "pxc-controller", "namespace": "users-28990", "name": "some-name", "reconcileID": "09aa139b-4f4d-4882-9455-ec57c6b068d6", "object": "some-name-proxysql-unready", "kind": "&TypeMeta{Kind:Service,APIVersion:v1,}"} 2025-11-24T18:52:11.317Z DEBUG Creating object {"controller": "pxc-controller", "namespace": "users-28990", "name": "some-name", "reconcileID": "71380113-4aff-4f97-bfd6-643b56d932b6", "object": "some-name-pxc", "kind": "&TypeMeta{Kind:PodDisruptionBudget,APIVersion:policy/v1,}"} 2025-11-24T18:53:28.258Z INFO Password expiration policy updated {"controller": "pxc-controller", "namespace": "users-28990", "name": "some-name", "reconcileID": "1db917fd-b7c6-4be9-a737-5d0bf3b5e218", "user": "operator"} 2025-11-24T18:53:28.323Z INFO Password expiration policy updated {"controller": "pxc-controller", "namespace": "users-28990", "name": "some-name", "reconcileID": "1db917fd-b7c6-4be9-a737-5d0bf3b5e218", "user": "monitor"} 2025-11-24T18:53:28.408Z INFO User monitor: granted privileges {"controller": "pxc-controller", "namespace": "users-28990", "name": "some-name", "reconcileID": "1db917fd-b7c6-4be9-a737-5d0bf3b5e218"} 2025-11-24T18:53:28.455Z INFO monitor user privileges granted {"controller": "pxc-controller", "namespace": "users-28990", "name": "some-name", "reconcileID": "1db917fd-b7c6-4be9-a737-5d0bf3b5e218"} 2025-11-24T18:53:28.501Z INFO Password expiration policy updated {"controller": "pxc-controller", "namespace": "users-28990", "name": "some-name", "reconcileID": "1db917fd-b7c6-4be9-a737-5d0bf3b5e218", "user": "xtrabackup"} 2025-11-24T18:53:28.589Z INFO User xtrabackup: granted privileges {"controller": "pxc-controller", "namespace": "users-28990", "name": "some-name", "reconcileID": "1db917fd-b7c6-4be9-a737-5d0bf3b5e218"} 2025-11-24T18:53:28.667Z INFO Password expiration policy updated {"controller": "pxc-controller", "namespace": "users-28990", "name": "some-name", "reconcileID": "1db917fd-b7c6-4be9-a737-5d0bf3b5e218", "user": "replication"} 2025-11-24T18:53:28.676Z INFO reconcile replication error {"controller": "pxc-controller", "namespace": "users-28990", "name": "some-name", "reconcileID": "1db917fd-b7c6-4be9-a737-5d0bf3b5e218", "err": "get primary pxc pod: not found"} 2025-11-24T18:53:33.190Z INFO reconcile replication error {"controller": "pxc-controller", "namespace": "users-28990", "name": "some-name", "reconcileID": "4ae579e1-7f9d-49e0-8201-3324783239fa", "err": "get primary pxc pod: not found"} 2025-11-24T18:53:38.333Z INFO reconcile replication error {"controller": "pxc-controller", "namespace": "users-28990", "name": "some-name", "reconcileID": "96a29c92-efd0-466a-9fac-fc901332be58", "err": "get primary pxc pod: not found"} 2025-11-24T18:55:50.164Z INFO Password expiration policy updated {"controller": "pxc-controller", "namespace": "users-28990", "name": "some-name", "reconcileID": "51b1648a-716e-49e1-b081-269f3e3d0e1a", "user": "root"} 2025-11-24T18:55:50.262Z INFO update PXC version (fetched from db) {"controller": "pxc-controller", "namespace": "users-28990", "name": "some-name", "reconcileID": "51b1648a-716e-49e1-b081-269f3e3d0e1a", "new version": "8.0.43-34.1"} 2025-11-24T18:55:52.481Z DEBUG PXC users synced with ProxySQL {"controller": "pxc-controller", "namespace": "users-28990", "name": "some-name", "reconcileID": "51b1648a-716e-49e1-b081-269f3e3d0e1a"} 2025-11-24T18:55:56.921Z DEBUG PXC users synced with ProxySQL {"controller": "pxc-controller", "namespace": "users-28990", "name": "some-name", "reconcileID": "d07d426f-03b9-47af-aaf2-c953338b8f11"} 2025-11-24T18:56:02.191Z DEBUG PXC users synced with ProxySQL {"controller": "pxc-controller", "namespace": "users-28990", "name": "some-name", "reconcileID": "0fd0211a-2f59-4324-b453-87cd56cbe2f4"} 2025-11-24T18:56:07.718Z DEBUG PXC users synced with ProxySQL {"controller": "pxc-controller", "namespace": "users-28990", "name": "some-name", "reconcileID": "f40fd477-4b44-4121-a1ce-88075fd25a5a"} 2025-11-24T18:56:12.718Z DEBUG PXC users synced with ProxySQL {"controller": "pxc-controller", "namespace": "users-28990", "name": "some-name", "reconcileID": "1aaebe2c-3443-48d1-b5ec-044e0f057494"} 2025-11-24T18:56:18.085Z DEBUG PXC users synced with ProxySQL {"controller": "pxc-controller", "namespace": "users-28990", "name": "some-name", "reconcileID": "35a09c8a-39d1-4ca5-b0e5-c28511a78692"} 2025-11-24T18:56:23.475Z DEBUG PXC users synced with ProxySQL {"controller": "pxc-controller", "namespace": "users-28990", "name": "some-name", "reconcileID": "c41b0b01-cfdb-4072-bea4-092c8e509280"} 2025-11-24T18:56:29.117Z DEBUG PXC users synced with ProxySQL {"controller": "pxc-controller", "namespace": "users-28990", "name": "some-name", "reconcileID": "00c9b9f2-7d4a-4ee5-a853-9c1261f2e93b"} 2025-11-24T18:56:34.406Z DEBUG PXC users synced with ProxySQL {"controller": "pxc-controller", "namespace": "users-28990", "name": "some-name", "reconcileID": "6beccadc-7c7c-48c6-b51a-7bcf28039dfa"} 2025-11-24T18:56:39.495Z DEBUG PXC users synced with ProxySQL {"controller": "pxc-controller", "namespace": "users-28990", "name": "some-name", "reconcileID": "527668f1-94af-4c2d-b7b4-12474c6cb8c3"} 2025-11-24T18:56:45.086Z DEBUG PXC users synced with ProxySQL {"controller": "pxc-controller", "namespace": "users-28990", "name": "some-name", "reconcileID": "74f6b5be-4f99-41d6-9076-deba9923e09b"} 2025-11-24T18:56:50.373Z DEBUG PXC users synced with ProxySQL {"controller": "pxc-controller", "namespace": "users-28990", "name": "some-name", "reconcileID": "ddc493d1-27f8-4363-a7ea-5e7c477698ce"} 2025-11-24T18:56:55.717Z DEBUG PXC users synced with ProxySQL {"controller": "pxc-controller", "namespace": "users-28990", "name": "some-name", "reconcileID": "835149ea-37df-43e8-8412-495131752d78"} 2025-11-24T18:57:00.996Z DEBUG PXC users synced with ProxySQL {"controller": "pxc-controller", "namespace": "users-28990", "name": "some-name", "reconcileID": "df1bc8a2-f964-49ab-9327-f0bb15a26338"} 2025-11-24T18:57:06.433Z DEBUG PXC users synced with ProxySQL {"controller": "pxc-controller", "namespace": "users-28990", "name": "some-name", "reconcileID": "b6e6d0b2-1750-4d00-b001-6e0be868e9b4"} 2025-11-24T18:57:08.726Z INFO Password changed, updating user {"controller": "pxc-controller", "namespace": "users-28990", "name": "some-name", "reconcileID": "cbc983ff-44b8-4288-91b1-155b9b968be1", "user": "root"} 2025-11-24T18:57:08.767Z INFO Password updated {"controller": "pxc-controller", "namespace": "users-28990", "name": "some-name", "reconcileID": "cbc983ff-44b8-4288-91b1-155b9b968be1", "user": "root"} 2025-11-24T18:57:08.788Z INFO MySQL init secret created {"controller": "pxc-controller", "namespace": "users-28990", "name": "some-name", "reconcileID": "cbc983ff-44b8-4288-91b1-155b9b968be1", "secret": "some-name-mysql-init", "user": "root"} 2025-11-24T18:57:11.213Z DEBUG PXC users synced with ProxySQL {"controller": "pxc-controller", "namespace": "users-28990", "name": "some-name", "reconcileID": "cbc983ff-44b8-4288-91b1-155b9b968be1"} 2025-11-24T18:57:11.238Z INFO Internal secrets updated {"controller": "pxc-controller", "namespace": "users-28990", "name": "some-name", "reconcileID": "cbc983ff-44b8-4288-91b1-155b9b968be1", "user": "root"} 2025-11-24T18:57:11.278Z INFO Old password discarded {"controller": "pxc-controller", "namespace": "users-28990", "name": "some-name", "reconcileID": "cbc983ff-44b8-4288-91b1-155b9b968be1", "user": "root"} 2025-11-24T18:57:13.098Z DEBUG PXC users synced with ProxySQL {"controller": "pxc-controller", "namespace": "users-28990", "name": "some-name", "reconcileID": "cbc983ff-44b8-4288-91b1-155b9b968be1"} 2025-11-24T18:57:18.371Z DEBUG PXC users synced with ProxySQL {"controller": "pxc-controller", "namespace": "users-28990", "name": "some-name", "reconcileID": "4326f325-0950-491b-8d63-00555cdca1d8"} 2025-11-24T18:57:23.699Z DEBUG PXC users synced with ProxySQL {"controller": "pxc-controller", "namespace": "users-28990", "name": "some-name", "reconcileID": "02bda052-5ae5-450c-9fa7-b570c3347dfe"} 2025-11-24T18:57:27.521Z DEBUG Updating object {"controller": "pxc-controller", "namespace": "users-28990", "name": "some-name", "reconcileID": "8eac50d2-0527-45df-8460-1ee75ea6a01c", "object": "some-name-proxysql", "kind": "&TypeMeta{Kind:StatefulSet,APIVersion:apps/v1,}", "hashChanged": true, "metaChanged": true} 2025-11-24T18:57:27.572Z DEBUG Updating object {"controller": "pxc-controller", "namespace": "users-28990", "name": "some-name", "reconcileID": "8eac50d2-0527-45df-8460-1ee75ea6a01c", "object": "some-name-proxysql", "kind": "&TypeMeta{Kind:StatefulSet,APIVersion:apps/v1,}", "hashChanged": true, "metaChanged": true} 2025-11-24T18:57:29.018Z DEBUG PXC users synced with ProxySQL {"controller": "pxc-controller", "namespace": "users-28990", "name": "some-name", "reconcileID": "c51c84b8-0e00-4a58-9230-1d2b6a758186"} 2025-11-24T18:57:48.254Z INFO reconcile replication error {"controller": "pxc-controller", "namespace": "users-28990", "name": "some-name", "reconcileID": "be522956-b5ba-40de-8c9f-8b73a81e4eca", "err": "get primary pxc pod: not found"} 2025-11-24T18:57:51.991Z INFO Password changed, updating user {"controller": "pxc-controller", "namespace": "users-28990", "name": "some-name", "reconcileID": "71382dc7-38d0-40b9-8ffc-4455c745dbbb", "user": "proxyadmin"} 2025-11-24T18:57:51.991Z INFO Password expiration policy updated {"controller": "pxc-controller", "namespace": "users-28990", "name": "some-name", "reconcileID": "71382dc7-38d0-40b9-8ffc-4455c745dbbb", "user": "proxyadmin"} 2025-11-24T18:57:52.041Z INFO Proxy user updated {"controller": "pxc-controller", "namespace": "users-28990", "name": "some-name", "reconcileID": "71382dc7-38d0-40b9-8ffc-4455c745dbbb", "user": "proxyadmin"} 2025-11-24T18:57:52.068Z INFO Internal secrets updated {"controller": "pxc-controller", "namespace": "users-28990", "name": "some-name", "reconcileID": "71382dc7-38d0-40b9-8ffc-4455c745dbbb", "user": "proxyadmin"} 2025-11-24T18:57:52.068Z INFO Proxy pods will be restarted {"controller": "pxc-controller", "namespace": "users-28990", "name": "some-name", "reconcileID": "71382dc7-38d0-40b9-8ffc-4455c745dbbb", "last-applied-secret": "b8a029e65b796f96a332cdb88d930987a09e172d5574b49527cff10af58e869b"} 2025-11-24T18:57:52.076Z DEBUG Updating object {"controller": "pxc-controller", "namespace": "users-28990", "name": "some-name", "reconcileID": "71382dc7-38d0-40b9-8ffc-4455c745dbbb", "object": "some-name-proxysql", "kind": "&TypeMeta{Kind:StatefulSet,APIVersion:apps/v1,}", "hashChanged": true, "metaChanged": true} 2025-11-24T18:57:52.750Z ERROR sync users {"controller": "pxc-controller", "namespace": "users-28990", "name": "some-name", "reconcileID": "50f05aa6-c631-4cfb-847d-8f87a9ad07c3", "error": "exec syncusers: failed to execute command in pod: unable to upgrade connection: container not found (\"proxysql\") / / ERROR (line:515) : The cluster (with writer hostgroup:11) has not been configured in ProxySQL\nERROR (line:515) : The cluster (with writer hostgroup:11) has not been configured in ProxySQL\nERROR 1045 (28000): ProxySQL Error: Access denied for user 'proxyadmin'@'127.0.0.1' (using password: YES)\nERROR (line:339) : ProxySQL connection check failed. \n-- Could not connect to ProxySQL at localhost:6032 \n-- Please check the ProxySQL connection parameters and status.\n", "errorVerbose": "exec syncusers: failed to execute command in pod: unable to upgrade connection: container not found (\"proxysql\") / / ERROR (line:515) : The cluster (with writer hostgroup:11) has not been configured in ProxySQL\nERROR (line:515) : The cluster (with writer hostgroup:11) has not been configured in ProxySQL\nERROR 1045 (28000): ProxySQL Error: Access denied for user 'proxyadmin'@'127.0.0.1' (using password: YES)\nERROR (line:339) : ProxySQL connection check failed. \n-- Could not connect to ProxySQL at localhost:6032 \n-- Please check the ProxySQL connection parameters and status.\n\ngithub.com/percona/percona-xtradb-cluster-operator/pkg/controller/pxc.(*ReconcilePerconaXtraDBCluster).syncPXCUsersWithProxySQL\n\t/go/src/github.com/percona/percona-xtradb-cluster-operator/pkg/controller/pxc/users.go:974\ngithub.com/percona/percona-xtradb-cluster-operator/pkg/controller/pxc.(*ReconcilePerconaXtraDBCluster).resyncPXCUsersWithProxySQL.func1\n\t/go/src/github.com/percona/percona-xtradb-cluster-operator/pkg/controller/pxc/controller.go:867\nruntime.goexit\n\t/usr/local/go/src/runtime/asm_amd64.s:1693"} 2025-11-24T18:58:24.399Z DEBUG PXC users synced with ProxySQL {"controller": "pxc-controller", "namespace": "users-28990", "name": "some-name", "reconcileID": "dbb2fe6f-baa2-4217-b783-77e61ba917ef"} 2025-11-24T18:58:32.149Z INFO Password changed, updating user {"controller": "pxc-controller", "namespace": "users-28990", "name": "some-name", "reconcileID": "fec59869-e57f-4b61-b85b-18414ae96c27", "user": "xtrabackup"} 2025-11-24T18:58:32.171Z INFO Password updated {"controller": "pxc-controller", "namespace": "users-28990", "name": "some-name", "reconcileID": "fec59869-e57f-4b61-b85b-18414ae96c27", "user": "xtrabackup"} 2025-11-24T18:58:32.191Z INFO MySQL init secret updated {"controller": "pxc-controller", "namespace": "users-28990", "name": "some-name", "reconcileID": "fec59869-e57f-4b61-b85b-18414ae96c27", "secret": "some-name-mysql-init", "user": "xtrabackup"} 2025-11-24T18:58:32.226Z INFO Internal secrets updated {"controller": "pxc-controller", "namespace": "users-28990", "name": "some-name", "reconcileID": "fec59869-e57f-4b61-b85b-18414ae96c27", "user": "xtrabackup"} 2025-11-24T18:58:32.249Z INFO Old password discarded {"controller": "pxc-controller", "namespace": "users-28990", "name": "some-name", "reconcileID": "fec59869-e57f-4b61-b85b-18414ae96c27", "user": "xtrabackup"} 2025-11-24T18:58:32.256Z INFO PXC pods will be restarted {"controller": "pxc-controller", "namespace": "users-28990", "name": "some-name", "reconcileID": "fec59869-e57f-4b61-b85b-18414ae96c27", "last-applied-secret": "469f642c04e6a7d068fa51ae785d66fe6ece1ed25a22927a26302c22a7b34dac"} 2025-11-24T18:58:32.258Z DEBUG Updating object {"controller": "pxc-controller", "namespace": "users-28990", "name": "some-name", "reconcileID": "fec59869-e57f-4b61-b85b-18414ae96c27", "object": "some-name-proxysql", "kind": "&TypeMeta{Kind:StatefulSet,APIVersion:apps/v1,}", "hashChanged": true, "metaChanged": true} 2025-11-24T18:58:32.316Z DEBUG Updating object {"controller": "pxc-controller", "namespace": "users-28990", "name": "some-name", "reconcileID": "fec59869-e57f-4b61-b85b-18414ae96c27", "object": "some-name-pxc", "kind": "&TypeMeta{Kind:StatefulSet,APIVersion:apps/v1,}", "hashChanged": true, "metaChanged": true} 2025-11-24T18:58:35.004Z DEBUG PXC users synced with ProxySQL {"controller": "pxc-controller", "namespace": "users-28990", "name": "some-name", "reconcileID": "fec59869-e57f-4b61-b85b-18414ae96c27"} 2025-11-24T18:59:24.968Z INFO reconcile replication error {"controller": "pxc-controller", "namespace": "users-28990", "name": "some-name", "reconcileID": "71126469-39b8-4d1a-92d1-5be9ed31683f", "err": "failed to ensure cluster readonly status: connect to pod some-name-pxc-1: dial tcp: lookup some-name-pxc-1.some-name-pxc.users-28990 on 34.118.224.10:53: no such host"} 2025-11-24T18:59:30.262Z INFO reconcile replication error {"controller": "pxc-controller", "namespace": "users-28990", "name": "some-name", "reconcileID": "e3628b96-e047-4da4-9234-b37dd146af93", "err": "failed to ensure cluster readonly status: connect to pod some-name-pxc-1: dial tcp: lookup some-name-pxc-1.some-name-pxc.users-28990 on 34.118.224.10:53: no such host"} 2025-11-24T19:00:12.700Z INFO reconcile replication error {"controller": "pxc-controller", "namespace": "users-28990", "name": "some-name", "reconcileID": "fd3cd493-26e9-4182-84d3-a797d4d6166c", "err": "failed to ensure cluster readonly status: connect to pod some-name-pxc-0: dial tcp: lookup some-name-pxc-0.some-name-pxc.users-28990 on 34.118.224.10:53: no such host"} 2025-11-24T19:00:17.982Z INFO reconcile replication error {"controller": "pxc-controller", "namespace": "users-28990", "name": "some-name", "reconcileID": "41ef33f2-653a-4b7f-b736-94d824233016", "err": "failed to connect to pod some-name-pxc-0: dial tcp 10.233.18.45:33062: connect: connection refused"} 2025-11-24T19:00:23.223Z INFO Unable to find primary pod for replication. No pod with name or ip like this {"controller": "pxc-controller", "namespace": "users-28990", "name": "some-name", "reconcileID": "16b4f68d-72ff-4ef6-826d-f3caeddf48e8", "primary name": "some-name-pxc-0.some-name-pxc.users-28990.svc.cluster.local"} 2025-11-24T19:00:28.395Z INFO Unable to find primary pod for replication. No pod with name or ip like this {"controller": "pxc-controller", "namespace": "users-28990", "name": "some-name", "reconcileID": "cd9fc0e3-b65d-408a-ab3a-b4d4c2d43a2d", "primary name": "some-name-pxc-0.some-name-pxc.users-28990.svc.cluster.local"} 2025-11-24T19:00:33.539Z INFO Unable to find primary pod for replication. No pod with name or ip like this {"controller": "pxc-controller", "namespace": "users-28990", "name": "some-name", "reconcileID": "154a2c4e-a27a-4cdc-8185-82f8698f8734", "primary name": "some-name-pxc-0.some-name-pxc.users-28990.svc.cluster.local"} 2025-11-24T19:00:38.705Z INFO Unable to find primary pod for replication. No pod with name or ip like this {"controller": "pxc-controller", "namespace": "users-28990", "name": "some-name", "reconcileID": "e2a839e9-bacc-405c-8818-54c9daebbbe1", "primary name": "some-name-pxc-0.some-name-pxc.users-28990.svc.cluster.local"} 2025-11-24T19:00:43.855Z INFO Unable to find primary pod for replication. No pod with name or ip like this {"controller": "pxc-controller", "namespace": "users-28990", "name": "some-name", "reconcileID": "e9e77390-0b24-42e4-bfe9-75a96c77df6a", "primary name": "some-name-pxc-0.some-name-pxc.users-28990.svc.cluster.local"} 2025-11-24T19:00:49.001Z INFO Unable to find primary pod for replication. No pod with name or ip like this {"controller": "pxc-controller", "namespace": "users-28990", "name": "some-name", "reconcileID": "544f03ae-eee1-47df-90d8-63dc7a7f61c1", "primary name": "some-name-pxc-0.some-name-pxc.users-28990.svc.cluster.local"} 2025-11-24T19:00:54.161Z INFO Unable to find primary pod for replication. No pod with name or ip like this {"controller": "pxc-controller", "namespace": "users-28990", "name": "some-name", "reconcileID": "08d4281f-9244-4b4d-ae95-36c4d1e32800", "primary name": "some-name-pxc-0.some-name-pxc.users-28990.svc.cluster.local"} 2025-11-24T19:00:59.320Z INFO Unable to find primary pod for replication. No pod with name or ip like this {"controller": "pxc-controller", "namespace": "users-28990", "name": "some-name", "reconcileID": "8f2e9e9b-bf34-43d6-95e3-ade5448d40e4", "primary name": "some-name-pxc-0.some-name-pxc.users-28990.svc.cluster.local"} 2025-11-24T19:01:07.391Z DEBUG PXC users synced with ProxySQL {"controller": "pxc-controller", "namespace": "users-28990", "name": "some-name", "reconcileID": "0ff74911-42eb-4225-8f49-cde6976a0326"} 2025-11-24T19:01:12.086Z DEBUG PXC users synced with ProxySQL {"controller": "pxc-controller", "namespace": "users-28990", "name": "some-name", "reconcileID": "c32cf51f-410f-4e01-9f06-dac0b38b09b6"} 2025-11-24T19:01:12.908Z INFO Password changed, updating user {"controller": "pxc-controller", "namespace": "users-28990", "name": "some-name", "reconcileID": "947088b3-7512-4311-802f-9542bee3b19b", "user": "monitor"} 2025-11-24T19:01:12.929Z INFO Password updated {"controller": "pxc-controller", "namespace": "users-28990", "name": "some-name", "reconcileID": "947088b3-7512-4311-802f-9542bee3b19b", "user": "monitor"} 2025-11-24T19:01:12.952Z INFO MySQL init secret updated {"controller": "pxc-controller", "namespace": "users-28990", "name": "some-name", "reconcileID": "947088b3-7512-4311-802f-9542bee3b19b", "secret": "some-name-mysql-init", "user": "monitor"} 2025-11-24T19:01:12.986Z INFO Proxy user updated {"controller": "pxc-controller", "namespace": "users-28990", "name": "some-name", "reconcileID": "947088b3-7512-4311-802f-9542bee3b19b", "user": "monitor"} 2025-11-24T19:01:13.014Z INFO Internal secrets updated {"controller": "pxc-controller", "namespace": "users-28990", "name": "some-name", "reconcileID": "947088b3-7512-4311-802f-9542bee3b19b", "user": "monitor"} 2025-11-24T19:01:13.310Z INFO Proxy pods will be restarted {"controller": "pxc-controller", "namespace": "users-28990", "name": "some-name", "reconcileID": "947088b3-7512-4311-802f-9542bee3b19b", "last-applied-secret": "cc807018cc179922b153f7672bd63a69b1872cb121a592ef253747d3286fdea1"} 2025-11-24T19:01:13.313Z DEBUG Updating object {"controller": "pxc-controller", "namespace": "users-28990", "name": "some-name", "reconcileID": "947088b3-7512-4311-802f-9542bee3b19b", "object": "some-name-proxysql", "kind": "&TypeMeta{Kind:StatefulSet,APIVersion:apps/v1,}", "hashChanged": true, "metaChanged": true} 2025-11-24T19:01:16.043Z ERROR sync users {"controller": "pxc-controller", "namespace": "users-28990", "name": "some-name", "reconcileID": "947088b3-7512-4311-802f-9542bee3b19b", "error": "exec syncusers: failed to execute command in pod: unable to upgrade connection: container not found (\"proxysql\") / / ", "errorVerbose": "exec syncusers: failed to execute command in pod: unable to upgrade connection: container not found (\"proxysql\") / / \ngithub.com/percona/percona-xtradb-cluster-operator/pkg/controller/pxc.(*ReconcilePerconaXtraDBCluster).syncPXCUsersWithProxySQL\n\t/go/src/github.com/percona/percona-xtradb-cluster-operator/pkg/controller/pxc/users.go:974\ngithub.com/percona/percona-xtradb-cluster-operator/pkg/controller/pxc.(*ReconcilePerconaXtraDBCluster).resyncPXCUsersWithProxySQL.func1\n\t/go/src/github.com/percona/percona-xtradb-cluster-operator/pkg/controller/pxc/controller.go:867\nruntime.goexit\n\t/usr/local/go/src/runtime/asm_amd64.s:1693"} 2025-11-24T19:01:50.073Z INFO Password updated but old one not discarded {"controller": "pxc-controller", "namespace": "users-28990", "name": "some-name", "reconcileID": "eecd461b-d715-46c1-9418-9427fdcdaee1", "user": "monitor"} 2025-11-24T19:01:52.313Z DEBUG PXC users synced with ProxySQL {"controller": "pxc-controller", "namespace": "users-28990", "name": "some-name", "reconcileID": "eecd461b-d715-46c1-9418-9427fdcdaee1"} 2025-11-24T19:01:55.070Z INFO Password updated but old one not discarded {"controller": "pxc-controller", "namespace": "users-28990", "name": "some-name", "reconcileID": "7f493b4c-fa7c-417b-b659-e22a1322f247", "user": "monitor"} 2025-11-24T19:01:57.200Z DEBUG PXC users synced with ProxySQL {"controller": "pxc-controller", "namespace": "users-28990", "name": "some-name", "reconcileID": "7f493b4c-fa7c-417b-b659-e22a1322f247"} 2025-11-24T19:02:00.637Z INFO Password updated but old one not discarded {"controller": "pxc-controller", "namespace": "users-28990", "name": "some-name", "reconcileID": "09741a8d-05b4-4352-8316-3c4ae3d73a31", "user": "monitor"} 2025-11-24T19:02:02.782Z DEBUG PXC users synced with ProxySQL {"controller": "pxc-controller", "namespace": "users-28990", "name": "some-name", "reconcileID": "09741a8d-05b4-4352-8316-3c4ae3d73a31"} 2025-11-24T19:02:13.746Z INFO Password updated but old one not discarded {"controller": "pxc-controller", "namespace": "users-28990", "name": "some-name", "reconcileID": "1ce7a09e-3420-41f6-b478-2e70bdff1349", "user": "monitor"} 2025-11-24T19:02:15.914Z DEBUG PXC users synced with ProxySQL {"controller": "pxc-controller", "namespace": "users-28990", "name": "some-name", "reconcileID": "1ce7a09e-3420-41f6-b478-2e70bdff1349"} 2025-11-24T19:02:19.423Z INFO Password updated but old one not discarded {"controller": "pxc-controller", "namespace": "users-28990", "name": "some-name", "reconcileID": "bb91d683-a636-4f0b-bfe5-ba9417f95bbd", "user": "monitor"} 2025-11-24T19:02:21.543Z DEBUG PXC users synced with ProxySQL {"controller": "pxc-controller", "namespace": "users-28990", "name": "some-name", "reconcileID": "bb91d683-a636-4f0b-bfe5-ba9417f95bbd"} 2025-11-24T19:02:24.999Z INFO Password updated but old one not discarded {"controller": "pxc-controller", "namespace": "users-28990", "name": "some-name", "reconcileID": "3a00efeb-d986-4b0c-9f8b-ecd259e15d10", "user": "monitor"} 2025-11-24T19:02:25.697Z INFO Old password discarded {"controller": "pxc-controller", "namespace": "users-28990", "name": "some-name", "reconcileID": "3a00efeb-d986-4b0c-9f8b-ecd259e15d10", "user": "monitor"} 2025-11-24T19:02:25.712Z INFO Proxy pods will be restarted {"controller": "pxc-controller", "namespace": "users-28990", "name": "some-name", "reconcileID": "3a00efeb-d986-4b0c-9f8b-ecd259e15d10", "last-applied-secret": "cc807018cc179922b153f7672bd63a69b1872cb121a592ef253747d3286fdea1"} 2025-11-24T19:02:27.711Z DEBUG PXC users synced with ProxySQL {"controller": "pxc-controller", "namespace": "users-28990", "name": "some-name", "reconcileID": "3a00efeb-d986-4b0c-9f8b-ecd259e15d10"} 2025-11-24T19:02:32.880Z DEBUG PXC users synced with ProxySQL {"controller": "pxc-controller", "namespace": "users-28990", "name": "some-name", "reconcileID": "462c5cb2-97dd-48aa-b8fb-343fce665f6f"} 2025-11-24T19:02:38.421Z DEBUG PXC users synced with ProxySQL {"controller": "pxc-controller", "namespace": "users-28990", "name": "some-name", "reconcileID": "cfc1d4f8-93ab-438f-9715-7841f0e6ecd6"} 2025-11-24T19:02:43.688Z DEBUG PXC users synced with ProxySQL {"controller": "pxc-controller", "namespace": "users-28990", "name": "some-name", "reconcileID": "3b4bc910-27e4-4fac-a512-13df9801d168"} 2025-11-24T19:02:49.006Z DEBUG PXC users synced with ProxySQL {"controller": "pxc-controller", "namespace": "users-28990", "name": "some-name", "reconcileID": "3e38b5cb-ce8b-4330-ad0a-5d18aecaa533"} 2025-11-24T19:02:49.702Z INFO Password changed, updating user {"controller": "pxc-controller", "namespace": "users-28990", "name": "some-name", "reconcileID": "02e19808-afbf-4704-b5c2-efa2d25820e9", "user": "operator"} 2025-11-24T19:02:49.725Z INFO Password updated {"controller": "pxc-controller", "namespace": "users-28990", "name": "some-name", "reconcileID": "02e19808-afbf-4704-b5c2-efa2d25820e9", "user": "operator"} 2025-11-24T19:02:49.747Z INFO MySQL init secret updated {"controller": "pxc-controller", "namespace": "users-28990", "name": "some-name", "reconcileID": "02e19808-afbf-4704-b5c2-efa2d25820e9", "secret": "some-name-mysql-init", "user": "operator"} 2025-11-24T19:02:49.766Z INFO Internal secrets updated {"controller": "pxc-controller", "namespace": "users-28990", "name": "some-name", "reconcileID": "02e19808-afbf-4704-b5c2-efa2d25820e9", "user": "operator"} 2025-11-24T19:02:49.788Z INFO Old password discarded {"controller": "pxc-controller", "namespace": "users-28990", "name": "some-name", "reconcileID": "02e19808-afbf-4704-b5c2-efa2d25820e9", "user": "operator"} 2025-11-24T19:02:49.804Z INFO Proxy pods will be restarted {"controller": "pxc-controller", "namespace": "users-28990", "name": "some-name", "reconcileID": "02e19808-afbf-4704-b5c2-efa2d25820e9", "last-applied-secret": "74cff4001437fe6a341c2e390fa8196af53b778eab8300e8911eed6bd4cbea2f"} 2025-11-24T19:02:49.808Z DEBUG Updating object {"controller": "pxc-controller", "namespace": "users-28990", "name": "some-name", "reconcileID": "02e19808-afbf-4704-b5c2-efa2d25820e9", "object": "some-name-proxysql", "kind": "&TypeMeta{Kind:StatefulSet,APIVersion:apps/v1,}", "hashChanged": true, "metaChanged": true} 2025-11-24T19:02:53.538Z ERROR sync users {"controller": "pxc-controller", "namespace": "users-28990", "name": "some-name", "reconcileID": "02e19808-afbf-4704-b5c2-efa2d25820e9", "error": "exec syncusers: failed to execute command in pod: command terminated with exit code 1 / / ERROR 1045 (28000): Access denied for user 'operator'@'some-name-proxysql-0.some-name-proxysql-unready.users-28990.svc.' (using password: YES)\nERROR 1045 (28000): Access denied for user 'operator'@'some-name-proxysql-0.some-name-proxysql-unready.users-28990.svc.' (using password: YES)\nERROR 1045 (28000): Access denied for user 'operator'@'some-name-proxysql-0.some-name-proxysql-unready.users-28990.svc.' (using password: YES)\nERROR (line:572) : Could not find a primary cluster node\nERROR 1045 (28000): Access denied for user 'operator'@'some-name-proxysql-0.some-name-proxysql-unready.users-28990.svc.' (using password: YES)\nERROR 1045 (28000): Access denied for user 'operator'@'some-name-proxysql-0.some-name-proxysql-unready.users-28990.svc.' (using password: YES)\nERROR 1045 (28000): Access denied for user 'operator'@'some-name-proxysql-0.some-name-proxysql-unready.users-28990.svc.' (using password: YES)\nERROR (line:572) : Could not find a primary cluster node\nERROR 1045 (28000): Access denied for user 'operator'@'some-name-proxysql-0.some-name-proxysql-unready.users-28990.svc.' (using password: YES)\nERROR 1045 (28000): Access denied for user 'operator'@'some-name-proxysql-0.some-name-proxysql-unready.users-28990.svc.' (using password: YES)\nERROR 1045 (28000): Access denied for user 'operator'@'some-name-proxysql-0.some-name-proxysql-unready.users-28990.svc.' (using password: YES)\nERROR (line:572) : Could not find a primary cluster node\nERROR 1045 (28000): Access denied for user 'operator'@'some-name-proxysql-0.some-name-proxysql-unready.users-28990.svc.' (using password: YES)\nERROR 1045 (28000): Access denied for user 'operator'@'some-name-proxysql-0.some-name-proxysql-unready.users-28990.svc.' (using password: YES)\nERROR 1045 (28000): Access denied for user 'operator'@'some-name-proxysql-0.some-name-proxysql-unready.users-28990.svc.' (using password: YES)\nERROR (line:572) : Could not find a primary cluster node\nERROR 1045 (28000): Access denied for user 'operator'@'some-name-proxysql-0.some-name-proxysql-unready.users-28990.svc.' (using password: YES)\nERROR 1045 (28000): Access denied for user 'operator'@'some-name-proxysql-0.some-name-proxysql-unready.users-28990.svc.' (using password: YES)\nERROR 1045 (28000): Access denied for user 'operator'@'some-name-proxysql-0.some-name-proxysql-unready.users-28990.svc.' (using password: YES)\nERROR (line:572) : Could not find a primary cluster node\n", "errorVerbose": "exec syncusers: failed to execute command in pod: command terminated with exit code 1 / / ERROR 1045 (28000): Access denied for user 'operator'@'some-name-proxysql-0.some-name-proxysql-unready.users-28990.svc.' (using password: YES)\nERROR 1045 (28000): Access denied for user 'operator'@'some-name-proxysql-0.some-name-proxysql-unready.users-28990.svc.' (using password: YES)\nERROR 1045 (28000): Access denied for user 'operator'@'some-name-proxysql-0.some-name-proxysql-unready.users-28990.svc.' (using password: YES)\nERROR (line:572) : Could not find a primary cluster node\nERROR 1045 (28000): Access denied for user 'operator'@'some-name-proxysql-0.some-name-proxysql-unready.users-28990.svc.' (using password: YES)\nERROR 1045 (28000): Access denied for user 'operator'@'some-name-proxysql-0.some-name-proxysql-unready.users-28990.svc.' (using password: YES)\nERROR 1045 (28000): Access denied for user 'operator'@'some-name-proxysql-0.some-name-proxysql-unready.users-28990.svc.' (using password: YES)\nERROR (line:572) : Could not find a primary cluster node\nERROR 1045 (28000): Access denied for user 'operator'@'some-name-proxysql-0.some-name-proxysql-unready.users-28990.svc.' (using password: YES)\nERROR 1045 (28000): Access denied for user 'operator'@'some-name-proxysql-0.some-name-proxysql-unready.users-28990.svc.' (using password: YES)\nERROR 1045 (28000): Access denied for user 'operator'@'some-name-proxysql-0.some-name-proxysql-unready.users-28990.svc.' (using password: YES)\nERROR (line:572) : Could not find a primary cluster node\nERROR 1045 (28000): Access denied for user 'operator'@'some-name-proxysql-0.some-name-proxysql-unready.users-28990.svc.' (using password: YES)\nERROR 1045 (28000): Access denied for user 'operator'@'some-name-proxysql-0.some-name-proxysql-unready.users-28990.svc.' (using password: YES)\nERROR 1045 (28000): Access denied for user 'operator'@'some-name-proxysql-0.some-name-proxysql-unready.users-28990.svc.' (using password: YES)\nERROR (line:572) : Could not find a primary cluster node\nERROR 1045 (28000): Access denied for user 'operator'@'some-name-proxysql-0.some-name-proxysql-unready.users-28990.svc.' (using password: YES)\nERROR 1045 (28000): Access denied for user 'operator'@'some-name-proxysql-0.some-name-proxysql-unready.users-28990.svc.' (using password: YES)\nERROR 1045 (28000): Access denied for user 'operator'@'some-name-proxysql-0.some-name-proxysql-unready.users-28990.svc.' (using password: YES)\nERROR (line:572) : Could not find a primary cluster node\n\ngithub.com/percona/percona-xtradb-cluster-operator/pkg/controller/pxc.(*ReconcilePerconaXtraDBCluster).syncPXCUsersWithProxySQL\n\t/go/src/github.com/percona/percona-xtradb-cluster-operator/pkg/controller/pxc/users.go:974\ngithub.com/percona/percona-xtradb-cluster-operator/pkg/controller/pxc.(*ReconcilePerconaXtraDBCluster).resyncPXCUsersWithProxySQL.func1\n\t/go/src/github.com/percona/percona-xtradb-cluster-operator/pkg/controller/pxc/controller.go:867\nruntime.goexit\n\t/usr/local/go/src/runtime/asm_amd64.s:1693"} 2025-11-24T19:03:25.986Z DEBUG PXC users synced with ProxySQL {"controller": "pxc-controller", "namespace": "users-28990", "name": "some-name", "reconcileID": "50278065-cecb-4774-b538-1e0aac413c62"} 2025-11-24T19:03:30.380Z DEBUG PXC users synced with ProxySQL {"controller": "pxc-controller", "namespace": "users-28990", "name": "some-name", "reconcileID": "e2395ba3-b7fe-45e1-b324-6f9c45d8a1af"} 2025-11-24T19:03:35.757Z DEBUG PXC users synced with ProxySQL {"controller": "pxc-controller", "namespace": "users-28990", "name": "some-name", "reconcileID": "082e83b8-bfbe-4256-8d44-9f87485c6ab4"} 2025-11-24T19:03:38.835Z INFO Created user secrets {"controller": "pxc-controller", "namespace": "users-28990", "name": "some-name", "reconcileID": "23ed3608-40da-4a98-a024-36cb562bce75", "secrets": "my-cluster-secrets-2"} 2025-11-24T19:03:38.844Z INFO Password changed, updating user {"controller": "pxc-controller", "namespace": "users-28990", "name": "some-name", "reconcileID": "23ed3608-40da-4a98-a024-36cb562bce75", "user": "root"} 2025-11-24T19:03:38.882Z INFO Password updated {"controller": "pxc-controller", "namespace": "users-28990", "name": "some-name", "reconcileID": "23ed3608-40da-4a98-a024-36cb562bce75", "user": "root"} 2025-11-24T19:03:38.903Z INFO MySQL init secret updated {"controller": "pxc-controller", "namespace": "users-28990", "name": "some-name", "reconcileID": "23ed3608-40da-4a98-a024-36cb562bce75", "secret": "some-name-mysql-init", "user": "root"} 2025-11-24T19:03:41.521Z DEBUG PXC users synced with ProxySQL {"controller": "pxc-controller", "namespace": "users-28990", "name": "some-name", "reconcileID": "23ed3608-40da-4a98-a024-36cb562bce75"} 2025-11-24T19:03:41.573Z INFO Internal secrets updated {"controller": "pxc-controller", "namespace": "users-28990", "name": "some-name", "reconcileID": "23ed3608-40da-4a98-a024-36cb562bce75", "user": "root"} 2025-11-24T19:03:41.611Z INFO Old password discarded {"controller": "pxc-controller", "namespace": "users-28990", "name": "some-name", "reconcileID": "23ed3608-40da-4a98-a024-36cb562bce75", "user": "root"} 2025-11-24T19:03:41.619Z INFO Password changed, updating user {"controller": "pxc-controller", "namespace": "users-28990", "name": "some-name", "reconcileID": "23ed3608-40da-4a98-a024-36cb562bce75", "user": "operator"} 2025-11-24T19:03:41.641Z INFO Password updated {"controller": "pxc-controller", "namespace": "users-28990", "name": "some-name", "reconcileID": "23ed3608-40da-4a98-a024-36cb562bce75", "user": "operator"} 2025-11-24T19:03:41.661Z INFO MySQL init secret updated {"controller": "pxc-controller", "namespace": "users-28990", "name": "some-name", "reconcileID": "23ed3608-40da-4a98-a024-36cb562bce75", "secret": "some-name-mysql-init", "user": "operator"} 2025-11-24T19:03:41.686Z INFO Internal secrets updated {"controller": "pxc-controller", "namespace": "users-28990", "name": "some-name", "reconcileID": "23ed3608-40da-4a98-a024-36cb562bce75", "user": "operator"} 2025-11-24T19:03:41.710Z INFO Old password discarded {"controller": "pxc-controller", "namespace": "users-28990", "name": "some-name", "reconcileID": "23ed3608-40da-4a98-a024-36cb562bce75", "user": "operator"} 2025-11-24T19:03:41.716Z INFO Password changed, updating user {"controller": "pxc-controller", "namespace": "users-28990", "name": "some-name", "reconcileID": "23ed3608-40da-4a98-a024-36cb562bce75", "user": "monitor"} 2025-11-24T19:03:41.738Z INFO Password updated {"controller": "pxc-controller", "namespace": "users-28990", "name": "some-name", "reconcileID": "23ed3608-40da-4a98-a024-36cb562bce75", "user": "monitor"} 2025-11-24T19:03:41.760Z INFO MySQL init secret updated {"controller": "pxc-controller", "namespace": "users-28990", "name": "some-name", "reconcileID": "23ed3608-40da-4a98-a024-36cb562bce75", "secret": "some-name-mysql-init", "user": "monitor"} 2025-11-24T19:03:41.792Z INFO Proxy user updated {"controller": "pxc-controller", "namespace": "users-28990", "name": "some-name", "reconcileID": "23ed3608-40da-4a98-a024-36cb562bce75", "user": "monitor"} 2025-11-24T19:03:41.815Z INFO Internal secrets updated {"controller": "pxc-controller", "namespace": "users-28990", "name": "some-name", "reconcileID": "23ed3608-40da-4a98-a024-36cb562bce75", "user": "monitor"} 2025-11-24T19:03:42.118Z INFO Password changed, updating user {"controller": "pxc-controller", "namespace": "users-28990", "name": "some-name", "reconcileID": "23ed3608-40da-4a98-a024-36cb562bce75", "user": "xtrabackup"} 2025-11-24T19:03:42.142Z INFO Password updated {"controller": "pxc-controller", "namespace": "users-28990", "name": "some-name", "reconcileID": "23ed3608-40da-4a98-a024-36cb562bce75", "user": "xtrabackup"} 2025-11-24T19:03:42.164Z INFO MySQL init secret updated {"controller": "pxc-controller", "namespace": "users-28990", "name": "some-name", "reconcileID": "23ed3608-40da-4a98-a024-36cb562bce75", "secret": "some-name-mysql-init", "user": "xtrabackup"} 2025-11-24T19:03:42.187Z INFO Internal secrets updated {"controller": "pxc-controller", "namespace": "users-28990", "name": "some-name", "reconcileID": "23ed3608-40da-4a98-a024-36cb562bce75", "user": "xtrabackup"} 2025-11-24T19:03:42.210Z INFO Old password discarded {"controller": "pxc-controller", "namespace": "users-28990", "name": "some-name", "reconcileID": "23ed3608-40da-4a98-a024-36cb562bce75", "user": "xtrabackup"} 2025-11-24T19:03:42.218Z INFO Password changed, updating user {"controller": "pxc-controller", "namespace": "users-28990", "name": "some-name", "reconcileID": "23ed3608-40da-4a98-a024-36cb562bce75", "user": "replication"} 2025-11-24T19:03:42.239Z INFO Password updated {"controller": "pxc-controller", "namespace": "users-28990", "name": "some-name", "reconcileID": "23ed3608-40da-4a98-a024-36cb562bce75", "user": "replication"} 2025-11-24T19:03:42.260Z INFO MySQL init secret updated {"controller": "pxc-controller", "namespace": "users-28990", "name": "some-name", "reconcileID": "23ed3608-40da-4a98-a024-36cb562bce75", "secret": "some-name-mysql-init", "user": "replication"} 2025-11-24T19:03:42.288Z INFO Internal secrets updated {"controller": "pxc-controller", "namespace": "users-28990", "name": "some-name", "reconcileID": "23ed3608-40da-4a98-a024-36cb562bce75", "user": "replication"} 2025-11-24T19:03:42.311Z INFO Old password discarded {"controller": "pxc-controller", "namespace": "users-28990", "name": "some-name", "reconcileID": "23ed3608-40da-4a98-a024-36cb562bce75", "user": "replication"} 2025-11-24T19:03:42.311Z INFO Password changed, updating user {"controller": "pxc-controller", "namespace": "users-28990", "name": "some-name", "reconcileID": "23ed3608-40da-4a98-a024-36cb562bce75", "user": "proxyadmin"} 2025-11-24T19:03:42.343Z INFO Proxy user updated {"controller": "pxc-controller", "namespace": "users-28990", "name": "some-name", "reconcileID": "23ed3608-40da-4a98-a024-36cb562bce75", "user": "proxyadmin"} 2025-11-24T19:03:42.367Z INFO Internal secrets updated {"controller": "pxc-controller", "namespace": "users-28990", "name": "some-name", "reconcileID": "23ed3608-40da-4a98-a024-36cb562bce75", "user": "proxyadmin"} 2025-11-24T19:03:42.367Z INFO Proxy pods will be restarted {"controller": "pxc-controller", "namespace": "users-28990", "name": "some-name", "reconcileID": "23ed3608-40da-4a98-a024-36cb562bce75", "last-applied-secret": "a3ee7ed1e38d03b7e68cb012ed070fedd211a0292ac9208165e33abd45a4f22f"} 2025-11-24T19:03:42.367Z INFO PXC pods will be restarted {"controller": "pxc-controller", "namespace": "users-28990", "name": "some-name", "reconcileID": "23ed3608-40da-4a98-a024-36cb562bce75", "last-applied-secret": "a3ee7ed1e38d03b7e68cb012ed070fedd211a0292ac9208165e33abd45a4f22f"} 2025-11-24T19:03:42.370Z DEBUG Updating object {"controller": "pxc-controller", "namespace": "users-28990", "name": "some-name", "reconcileID": "23ed3608-40da-4a98-a024-36cb562bce75", "object": "some-name-pxc", "kind": "&TypeMeta{Kind:StatefulSet,APIVersion:apps/v1,}", "hashChanged": true, "metaChanged": true} 2025-11-24T19:03:42.425Z DEBUG Updating object {"controller": "pxc-controller", "namespace": "users-28990", "name": "some-name", "reconcileID": "23ed3608-40da-4a98-a024-36cb562bce75", "object": "some-name-proxysql", "kind": "&TypeMeta{Kind:StatefulSet,APIVersion:apps/v1,}", "hashChanged": true, "metaChanged": true} 2025-11-24T19:03:44.468Z ERROR sync users {"controller": "pxc-controller", "namespace": "users-28990", "name": "some-name", "reconcileID": "23ed3608-40da-4a98-a024-36cb562bce75", "error": "exec syncusers: failed to execute command in pod: command terminated with exit code 1 / / ERROR 1045 (28000): ProxySQL Error: Access denied for user 'proxyadmin'@'127.0.0.1' (using password: YES)\nERROR (line:339) : ProxySQL connection check failed. \n-- Could not connect to ProxySQL at localhost:6032 \n-- Please check the ProxySQL connection parameters and status.\nERROR 1045 (28000): ProxySQL Error: Access denied for user 'proxyadmin'@'127.0.0.1' (using password: YES)\nERROR (line:339) : ProxySQL connection check failed. \n-- Could not connect to ProxySQL at localhost:6032 \n-- Please check the ProxySQL connection parameters and status.\nERROR 1045 (28000): ProxySQL Error: Access denied for user 'proxyadmin'@'127.0.0.1' (using password: YES)\nERROR (line:339) : ProxySQL connection check failed. \n-- Could not connect to ProxySQL at localhost:6032 \n-- Please check the ProxySQL connection parameters and status.\nERROR 1045 (28000): ProxySQL Error: Access denied for user 'proxyadmin'@'127.0.0.1' (using password: YES)\nERROR (line:339) : ProxySQL connection check failed. \n-- Could not connect to ProxySQL at localhost:6032 \n-- Please check the ProxySQL connection parameters and status.\nERROR 1045 (28000): ProxySQL Error: Access denied for user 'proxyadmin'@'127.0.0.1' (using password: YES)\nERROR (line:339) : ProxySQL connection check failed. \n-- Could not connect to ProxySQL at localhost:6032 \n-- Please check the ProxySQL connection parameters and status.\n", "errorVerbose": "exec syncusers: failed to execute command in pod: command terminated with exit code 1 / / ERROR 1045 (28000): ProxySQL Error: Access denied for user 'proxyadmin'@'127.0.0.1' (using password: YES)\nERROR (line:339) : ProxySQL connection check failed. \n-- Could not connect to ProxySQL at localhost:6032 \n-- Please check the ProxySQL connection parameters and status.\nERROR 1045 (28000): ProxySQL Error: Access denied for user 'proxyadmin'@'127.0.0.1' (using password: YES)\nERROR (line:339) : ProxySQL connection check failed. \n-- Could not connect to ProxySQL at localhost:6032 \n-- Please check the ProxySQL connection parameters and status.\nERROR 1045 (28000): ProxySQL Error: Access denied for user 'proxyadmin'@'127.0.0.1' (using password: YES)\nERROR (line:339) : ProxySQL connection check failed. \n-- Could not connect to ProxySQL at localhost:6032 \n-- Please check the ProxySQL connection parameters and status.\nERROR 1045 (28000): ProxySQL Error: Access denied for user 'proxyadmin'@'127.0.0.1' (using password: YES)\nERROR (line:339) : ProxySQL connection check failed. \n-- Could not connect to ProxySQL at localhost:6032 \n-- Please check the ProxySQL connection parameters and status.\nERROR 1045 (28000): ProxySQL Error: Access denied for user 'proxyadmin'@'127.0.0.1' (using password: YES)\nERROR (line:339) : ProxySQL connection check failed. \n-- Could not connect to ProxySQL at localhost:6032 \n-- Please check the ProxySQL connection parameters and status.\n\ngithub.com/percona/percona-xtradb-cluster-operator/pkg/controller/pxc.(*ReconcilePerconaXtraDBCluster).syncPXCUsersWithProxySQL\n\t/go/src/github.com/percona/percona-xtradb-cluster-operator/pkg/controller/pxc/users.go:974\ngithub.com/percona/percona-xtradb-cluster-operator/pkg/controller/pxc.(*ReconcilePerconaXtraDBCluster).resyncPXCUsersWithProxySQL.func1\n\t/go/src/github.com/percona/percona-xtradb-cluster-operator/pkg/controller/pxc/controller.go:867\nruntime.goexit\n\t/usr/local/go/src/runtime/asm_amd64.s:1693"} 2025-11-24T19:04:40.783Z INFO reconcile replication error {"controller": "pxc-controller", "namespace": "users-28990", "name": "some-name", "reconcileID": "15fe00ed-73e1-47d1-992f-221c14f33fa9", "err": "failed to ensure cluster readonly status: connect to pod some-name-pxc-1: dial tcp: lookup some-name-pxc-1.some-name-pxc.users-28990 on 34.118.224.10:53: no such host"} 2025-11-24T19:04:46.105Z INFO reconcile replication error {"controller": "pxc-controller", "namespace": "users-28990", "name": "some-name", "reconcileID": "a0559ec2-8f5e-45a1-8877-0784ac25345b", "err": "failed to ensure cluster readonly status: connect to pod some-name-pxc-1: dial tcp: lookup some-name-pxc-1.some-name-pxc.users-28990 on 34.118.224.10:53: no such host"} 2025-11-24T19:05:39.291Z INFO reconcile replication error {"controller": "pxc-controller", "namespace": "users-28990", "name": "some-name", "reconcileID": "bbffd6c1-9bf0-41a7-ab33-cf9a578bf8a5", "err": "failed to connect to pod some-name-pxc-0: invalid connection"} 2025-11-24T19:05:44.460Z INFO Unable to find primary pod for replication. No pod with name or ip like this {"controller": "pxc-controller", "namespace": "users-28990", "name": "some-name", "reconcileID": "ee2e3082-3af8-4d34-bd78-6009555d0fc9", "primary name": "some-name-pxc-0.some-name-pxc.users-28990.svc.cluster.local"} 2025-11-24T19:05:49.638Z INFO Unable to find primary pod for replication. No pod with name or ip like this {"controller": "pxc-controller", "namespace": "users-28990", "name": "some-name", "reconcileID": "e4c8602c-0b49-44fe-9a11-316d05dae6df", "primary name": "some-name-pxc-0.some-name-pxc.users-28990.svc.cluster.local"} 2025-11-24T19:05:54.801Z INFO Unable to find primary pod for replication. No pod with name or ip like this {"controller": "pxc-controller", "namespace": "users-28990", "name": "some-name", "reconcileID": "80604147-c3e0-4d77-9f07-633566f6139d", "primary name": "some-name-pxc-0.some-name-pxc.users-28990.svc.cluster.local"} 2025-11-24T19:05:59.959Z INFO Unable to find primary pod for replication. No pod with name or ip like this {"controller": "pxc-controller", "namespace": "users-28990", "name": "some-name", "reconcileID": "9fedc220-700f-4905-a4d6-bf7040a4b0a6", "primary name": "some-name-pxc-0.some-name-pxc.users-28990.svc.cluster.local"} 2025-11-24T19:06:05.098Z INFO Unable to find primary pod for replication. No pod with name or ip like this {"controller": "pxc-controller", "namespace": "users-28990", "name": "some-name", "reconcileID": "5367ca57-92a3-454a-b2a3-7dc8a87342ad", "primary name": "some-name-pxc-0.some-name-pxc.users-28990.svc.cluster.local"} 2025-11-24T19:06:10.228Z INFO Unable to find primary pod for replication. No pod with name or ip like this {"controller": "pxc-controller", "namespace": "users-28990", "name": "some-name", "reconcileID": "cc981b4b-f86a-46ab-bb13-b723c164b45e", "primary name": "some-name-pxc-0.some-name-pxc.users-28990.svc.cluster.local"} 2025-11-24T19:06:15.370Z INFO Unable to find primary pod for replication. No pod with name or ip like this {"controller": "pxc-controller", "namespace": "users-28990", "name": "some-name", "reconcileID": "e8672e89-5ee3-4786-b0e6-307488ca051c", "primary name": "some-name-pxc-0.some-name-pxc.users-28990.svc.cluster.local"} 2025-11-24T19:06:21.740Z INFO Password updated but old one not discarded {"controller": "pxc-controller", "namespace": "users-28990", "name": "some-name", "reconcileID": "8f53991b-e325-4e76-967e-025d69bf9201", "user": "monitor"} 2025-11-24T19:06:22.718Z INFO Old password discarded {"controller": "pxc-controller", "namespace": "users-28990", "name": "some-name", "reconcileID": "8f53991b-e325-4e76-967e-025d69bf9201", "user": "monitor"} 2025-11-24T19:06:22.732Z INFO Proxy pods will be restarted {"controller": "pxc-controller", "namespace": "users-28990", "name": "some-name", "reconcileID": "8f53991b-e325-4e76-967e-025d69bf9201", "last-applied-secret": "a3ee7ed1e38d03b7e68cb012ed070fedd211a0292ac9208165e33abd45a4f22f"} 2025-11-24T19:06:24.529Z DEBUG PXC users synced with ProxySQL {"controller": "pxc-controller", "namespace": "users-28990", "name": "some-name", "reconcileID": "8f53991b-e325-4e76-967e-025d69bf9201"} 2025-11-24T19:06:24.721Z INFO Password changed, updating user {"controller": "pxc-controller", "namespace": "users-28990", "name": "some-name", "reconcileID": "0214ea48-a257-45bd-ab7b-4942a8a2a2ee", "user": "operator"} 2025-11-24T19:06:24.744Z INFO Password updated {"controller": "pxc-controller", "namespace": "users-28990", "name": "some-name", "reconcileID": "0214ea48-a257-45bd-ab7b-4942a8a2a2ee", "user": "operator"} 2025-11-24T19:06:24.763Z INFO MySQL init secret updated {"controller": "pxc-controller", "namespace": "users-28990", "name": "some-name", "reconcileID": "0214ea48-a257-45bd-ab7b-4942a8a2a2ee", "secret": "some-name-mysql-init", "user": "operator"} 2025-11-24T19:06:24.786Z INFO Internal secrets updated {"controller": "pxc-controller", "namespace": "users-28990", "name": "some-name", "reconcileID": "0214ea48-a257-45bd-ab7b-4942a8a2a2ee", "user": "operator"} 2025-11-24T19:06:24.810Z INFO Old password discarded {"controller": "pxc-controller", "namespace": "users-28990", "name": "some-name", "reconcileID": "0214ea48-a257-45bd-ab7b-4942a8a2a2ee", "user": "operator"} 2025-11-24T19:06:24.828Z INFO Proxy pods will be restarted {"controller": "pxc-controller", "namespace": "users-28990", "name": "some-name", "reconcileID": "0214ea48-a257-45bd-ab7b-4942a8a2a2ee", "last-applied-secret": "e5cb09926aafed17e5bc375940a405aef0ce8a22047e64d5e327e21e67ce9434"} 2025-11-24T19:06:24.832Z DEBUG Updating object {"controller": "pxc-controller", "namespace": "users-28990", "name": "some-name", "reconcileID": "0214ea48-a257-45bd-ab7b-4942a8a2a2ee", "object": "some-name-proxysql", "kind": "&TypeMeta{Kind:StatefulSet,APIVersion:apps/v1,}", "hashChanged": true, "metaChanged": true} 2025-11-24T19:06:28.268Z ERROR sync users {"controller": "pxc-controller", "namespace": "users-28990", "name": "some-name", "reconcileID": "0214ea48-a257-45bd-ab7b-4942a8a2a2ee", "error": "exec syncusers: failed to execute command in pod: command terminated with exit code 1 / / ERROR 1045 (28000): Access denied for user 'operator'@'some-name-proxysql-0.some-name-proxysql-unready.users-28990.svc.' (using password: YES)\nERROR 1045 (28000): Access denied for user 'operator'@'some-name-proxysql-0.some-name-proxysql-unready.users-28990.svc.' (using password: YES)\nERROR (line:572) : Could not find a primary cluster node\nERROR 1045 (28000): Access denied for user 'operator'@'some-name-proxysql-0.some-name-proxysql-unready.users-28990.svc.' (using password: YES)\nERROR 1045 (28000): Access denied for user 'operator'@'some-name-proxysql-0.some-name-proxysql-unready.users-28990.svc.' (using password: YES)\nERROR (line:572) : Could not find a primary cluster node\nERROR 1045 (28000): Access denied for user 'operator'@'some-name-proxysql-0.some-name-proxysql-unready.users-28990.svc.' (using password: YES)\nERROR 1045 (28000): Access denied for user 'operator'@'some-name-proxysql-0.some-name-proxysql-unready.users-28990.svc.' (using password: YES)\nERROR (line:572) : Could not find a primary cluster node\nERROR 1045 (28000): Access denied for user 'operator'@'some-name-proxysql-0.some-name-proxysql-unready.users-28990.svc.' (using password: YES)\nERROR 1045 (28000): Access denied for user 'operator'@'some-name-proxysql-0.some-name-proxysql-unready.users-28990.svc.' (using password: YES)\nERROR (line:572) : Could not find a primary cluster node\nERROR 1045 (28000): Access denied for user 'operator'@'some-name-proxysql-0.some-name-proxysql-unready.users-28990.svc.' (using password: YES)\nERROR 1045 (28000): Access denied for user 'operator'@'some-name-proxysql-0.some-name-proxysql-unready.users-28990.svc.' (using password: YES)\nERROR (line:572) : Could not find a primary cluster node\n", "errorVerbose": "exec syncusers: failed to execute command in pod: command terminated with exit code 1 / / ERROR 1045 (28000): Access denied for user 'operator'@'some-name-proxysql-0.some-name-proxysql-unready.users-28990.svc.' (using password: YES)\nERROR 1045 (28000): Access denied for user 'operator'@'some-name-proxysql-0.some-name-proxysql-unready.users-28990.svc.' (using password: YES)\nERROR (line:572) : Could not find a primary cluster node\nERROR 1045 (28000): Access denied for user 'operator'@'some-name-proxysql-0.some-name-proxysql-unready.users-28990.svc.' (using password: YES)\nERROR 1045 (28000): Access denied for user 'operator'@'some-name-proxysql-0.some-name-proxysql-unready.users-28990.svc.' (using password: YES)\nERROR (line:572) : Could not find a primary cluster node\nERROR 1045 (28000): Access denied for user 'operator'@'some-name-proxysql-0.some-name-proxysql-unready.users-28990.svc.' (using password: YES)\nERROR 1045 (28000): Access denied for user 'operator'@'some-name-proxysql-0.some-name-proxysql-unready.users-28990.svc.' (using password: YES)\nERROR (line:572) : Could not find a primary cluster node\nERROR 1045 (28000): Access denied for user 'operator'@'some-name-proxysql-0.some-name-proxysql-unready.users-28990.svc.' (using password: YES)\nERROR 1045 (28000): Access denied for user 'operator'@'some-name-proxysql-0.some-name-proxysql-unready.users-28990.svc.' (using password: YES)\nERROR (line:572) : Could not find a primary cluster node\nERROR 1045 (28000): Access denied for user 'operator'@'some-name-proxysql-0.some-name-proxysql-unready.users-28990.svc.' (using password: YES)\nERROR 1045 (28000): Access denied for user 'operator'@'some-name-proxysql-0.some-name-proxysql-unready.users-28990.svc.' (using password: YES)\nERROR (line:572) : Could not find a primary cluster node\n\ngithub.com/percona/percona-xtradb-cluster-operator/pkg/controller/pxc.(*ReconcilePerconaXtraDBCluster).syncPXCUsersWithProxySQL\n\t/go/src/github.com/percona/percona-xtradb-cluster-operator/pkg/controller/pxc/users.go:974\ngithub.com/percona/percona-xtradb-cluster-operator/pkg/controller/pxc.(*ReconcilePerconaXtraDBCluster).resyncPXCUsersWithProxySQL.func1\n\t/go/src/github.com/percona/percona-xtradb-cluster-operator/pkg/controller/pxc/controller.go:867\nruntime.goexit\n\t/usr/local/go/src/runtime/asm_amd64.s:1693"} 2025-11-24T19:07:01.540Z DEBUG PXC users synced with ProxySQL {"controller": "pxc-controller", "namespace": "users-28990", "name": "some-name", "reconcileID": "ca01a195-2bd6-486e-8306-519bd210ed6c"} 2025-11-24T19:07:05.921Z DEBUG PXC users synced with ProxySQL {"controller": "pxc-controller", "namespace": "users-28990", "name": "some-name", "reconcileID": "8602d8b0-c618-498b-bd50-90afe185e7b1"} 2025-11-24T19:07:11.251Z DEBUG PXC users synced with ProxySQL {"controller": "pxc-controller", "namespace": "users-28990", "name": "some-name", "reconcileID": "737f17a3-b6da-47d0-8577-02391ececcda"} 2025-11-24T19:07:16.639Z DEBUG PXC users synced with ProxySQL {"controller": "pxc-controller", "namespace": "users-28990", "name": "some-name", "reconcileID": "46eedc4b-510c-4ff1-a9a0-5c14f089dc7e"} 2025-11-24T19:07:22.362Z DEBUG PXC users synced with ProxySQL {"controller": "pxc-controller", "namespace": "users-28990", "name": "some-name", "reconcileID": "d4ef22e6-cd36-497e-bbfd-cfbb9c91e9b0"} 2025-11-24T19:07:27.164Z DEBUG PXC users synced with ProxySQL {"controller": "pxc-controller", "namespace": "users-28990", "name": "some-name", "reconcileID": "95f73ceb-bb8a-4dab-90d5-c6f89c57b97b"} 2025-11-24T19:07:32.752Z DEBUG PXC users synced with ProxySQL {"controller": "pxc-controller", "namespace": "users-28990", "name": "some-name", "reconcileID": "15b9b47c-604d-4309-9dc1-56d9acdb63f9"} 2025-11-24T19:07:38.030Z DEBUG PXC users synced with ProxySQL {"controller": "pxc-controller", "namespace": "users-28990", "name": "some-name", "reconcileID": "4e5299fb-58c8-4e50-9663-f76a30f8c72d"} 2025-11-24T19:07:43.161Z DEBUG PXC users synced with ProxySQL {"controller": "pxc-controller", "namespace": "users-28990", "name": "some-name", "reconcileID": "9aaab32b-1cd7-4fff-a0a3-1081c6d7e6e4"} 2025-11-24T19:07:48.809Z DEBUG PXC users synced with ProxySQL {"controller": "pxc-controller", "namespace": "users-28990", "name": "some-name", "reconcileID": "af83b043-01d6-46af-b7c6-91282ce5ab9d"} 2025-11-24T19:07:54.148Z DEBUG PXC users synced with ProxySQL {"controller": "pxc-controller", "namespace": "users-28990", "name": "some-name", "reconcileID": "ddbe21d7-4602-404c-a6a4-1dc62786545a"} 2025-11-24T19:07:59.615Z DEBUG PXC users synced with ProxySQL {"controller": "pxc-controller", "namespace": "users-28990", "name": "some-name", "reconcileID": "68764e92-af72-441b-82eb-b6d8116998f8"} 2025-11-24T19:08:04.838Z DEBUG PXC users synced with ProxySQL {"controller": "pxc-controller", "namespace": "users-28990", "name": "some-name", "reconcileID": "e18d0046-48f8-4584-b37f-9281980cf510"} 2025-11-24T19:08:10.217Z DEBUG PXC users synced with ProxySQL {"controller": "pxc-controller", "namespace": "users-28990", "name": "some-name", "reconcileID": "c84a13b9-d103-4c75-a31a-625328f21d15"} 2025-11-24T19:08:15.537Z DEBUG PXC users synced with ProxySQL {"controller": "pxc-controller", "namespace": "users-28990", "name": "some-name", "reconcileID": "e8fc1fc4-f058-4716-b3e6-c9bca55f396c"} 2025-11-24T19:08:16.416Z INFO Password changed, updating user {"controller": "pxc-controller", "namespace": "users-28990", "name": "some-name", "reconcileID": "f30f9bc3-5404-42dd-98e1-981c396f6fee", "user": "root"} 2025-11-24T19:08:16.455Z INFO Password updated {"controller": "pxc-controller", "namespace": "users-28990", "name": "some-name", "reconcileID": "f30f9bc3-5404-42dd-98e1-981c396f6fee", "user": "root"} 2025-11-24T19:08:16.484Z INFO MySQL init secret updated {"controller": "pxc-controller", "namespace": "users-28990", "name": "some-name", "reconcileID": "f30f9bc3-5404-42dd-98e1-981c396f6fee", "secret": "some-name-mysql-init", "user": "root"} 2025-11-24T19:08:18.968Z DEBUG PXC users synced with ProxySQL {"controller": "pxc-controller", "namespace": "users-28990", "name": "some-name", "reconcileID": "f30f9bc3-5404-42dd-98e1-981c396f6fee"} 2025-11-24T19:08:18.995Z INFO Internal secrets updated {"controller": "pxc-controller", "namespace": "users-28990", "name": "some-name", "reconcileID": "f30f9bc3-5404-42dd-98e1-981c396f6fee", "user": "root"} 2025-11-24T19:08:19.033Z INFO Old password discarded {"controller": "pxc-controller", "namespace": "users-28990", "name": "some-name", "reconcileID": "f30f9bc3-5404-42dd-98e1-981c396f6fee", "user": "root"} 2025-11-24T19:08:19.047Z INFO Password changed, updating user {"controller": "pxc-controller", "namespace": "users-28990", "name": "some-name", "reconcileID": "f30f9bc3-5404-42dd-98e1-981c396f6fee", "user": "monitor"} 2025-11-24T19:08:19.070Z INFO Password updated {"controller": "pxc-controller", "namespace": "users-28990", "name": "some-name", "reconcileID": "f30f9bc3-5404-42dd-98e1-981c396f6fee", "user": "monitor"} 2025-11-24T19:08:19.095Z INFO MySQL init secret updated {"controller": "pxc-controller", "namespace": "users-28990", "name": "some-name", "reconcileID": "f30f9bc3-5404-42dd-98e1-981c396f6fee", "secret": "some-name-mysql-init", "user": "monitor"} 2025-11-24T19:08:19.125Z INFO Proxy user updated {"controller": "pxc-controller", "namespace": "users-28990", "name": "some-name", "reconcileID": "f30f9bc3-5404-42dd-98e1-981c396f6fee", "user": "monitor"} 2025-11-24T19:08:19.147Z INFO Internal secrets updated {"controller": "pxc-controller", "namespace": "users-28990", "name": "some-name", "reconcileID": "f30f9bc3-5404-42dd-98e1-981c396f6fee", "user": "monitor"} 2025-11-24T19:08:19.450Z INFO Password changed, updating user {"controller": "pxc-controller", "namespace": "users-28990", "name": "some-name", "reconcileID": "f30f9bc3-5404-42dd-98e1-981c396f6fee", "user": "xtrabackup"} 2025-11-24T19:08:19.473Z INFO Password updated {"controller": "pxc-controller", "namespace": "users-28990", "name": "some-name", "reconcileID": "f30f9bc3-5404-42dd-98e1-981c396f6fee", "user": "xtrabackup"} 2025-11-24T19:08:19.492Z INFO MySQL init secret updated {"controller": "pxc-controller", "namespace": "users-28990", "name": "some-name", "reconcileID": "f30f9bc3-5404-42dd-98e1-981c396f6fee", "secret": "some-name-mysql-init", "user": "xtrabackup"} 2025-11-24T19:08:19.513Z INFO Internal secrets updated {"controller": "pxc-controller", "namespace": "users-28990", "name": "some-name", "reconcileID": "f30f9bc3-5404-42dd-98e1-981c396f6fee", "user": "xtrabackup"} 2025-11-24T19:08:19.534Z INFO Old password discarded {"controller": "pxc-controller", "namespace": "users-28990", "name": "some-name", "reconcileID": "f30f9bc3-5404-42dd-98e1-981c396f6fee", "user": "xtrabackup"} 2025-11-24T19:08:19.541Z INFO Password changed, updating user {"controller": "pxc-controller", "namespace": "users-28990", "name": "some-name", "reconcileID": "f30f9bc3-5404-42dd-98e1-981c396f6fee", "user": "proxyadmin"} 2025-11-24T19:08:19.572Z INFO Proxy user updated {"controller": "pxc-controller", "namespace": "users-28990", "name": "some-name", "reconcileID": "f30f9bc3-5404-42dd-98e1-981c396f6fee", "user": "proxyadmin"} 2025-11-24T19:08:19.596Z INFO Internal secrets updated {"controller": "pxc-controller", "namespace": "users-28990", "name": "some-name", "reconcileID": "f30f9bc3-5404-42dd-98e1-981c396f6fee", "user": "proxyadmin"} 2025-11-24T19:08:19.596Z INFO Proxy pods will be restarted {"controller": "pxc-controller", "namespace": "users-28990", "name": "some-name", "reconcileID": "f30f9bc3-5404-42dd-98e1-981c396f6fee", "last-applied-secret": "dac6b66c574159a7a809a006a5218a2029399c30c45888e438c726ba59836686"} 2025-11-24T19:08:19.596Z INFO PXC pods will be restarted {"controller": "pxc-controller", "namespace": "users-28990", "name": "some-name", "reconcileID": "f30f9bc3-5404-42dd-98e1-981c396f6fee", "last-applied-secret": "dac6b66c574159a7a809a006a5218a2029399c30c45888e438c726ba59836686"} 2025-11-24T19:08:19.601Z DEBUG Updating object {"controller": "pxc-controller", "namespace": "users-28990", "name": "some-name", "reconcileID": "f30f9bc3-5404-42dd-98e1-981c396f6fee", "object": "some-name-pxc", "kind": "&TypeMeta{Kind:StatefulSet,APIVersion:apps/v1,}", "hashChanged": true, "metaChanged": true} 2025-11-24T19:08:19.660Z DEBUG Updating object {"controller": "pxc-controller", "namespace": "users-28990", "name": "some-name", "reconcileID": "f30f9bc3-5404-42dd-98e1-981c396f6fee", "object": "some-name-proxysql", "kind": "&TypeMeta{Kind:StatefulSet,APIVersion:apps/v1,}", "hashChanged": true, "metaChanged": true} 2025-11-24T19:08:21.979Z ERROR sync users {"controller": "pxc-controller", "namespace": "users-28990", "name": "some-name", "reconcileID": "f30f9bc3-5404-42dd-98e1-981c396f6fee", "error": "exec syncusers: failed to execute command in pod: command terminated with exit code 1 / / ERROR 1045 (28000): ProxySQL Error: Access denied for user 'proxyadmin'@'127.0.0.1' (using password: YES)\nERROR (line:339) : ProxySQL connection check failed. \n-- Could not connect to ProxySQL at localhost:6032 \n-- Please check the ProxySQL connection parameters and status.\nERROR 1045 (28000): ProxySQL Error: Access denied for user 'proxyadmin'@'127.0.0.1' (using password: YES)\nERROR (line:339) : ProxySQL connection check failed. \n-- Could not connect to ProxySQL at localhost:6032 \n-- Please check the ProxySQL connection parameters and status.\nERROR 1045 (28000): ProxySQL Error: Access denied for user 'proxyadmin'@'127.0.0.1' (using password: YES)\nERROR (line:339) : ProxySQL connection check failed. \n-- Could not connect to ProxySQL at localhost:6032 \n-- Please check the ProxySQL connection parameters and status.\nERROR 1045 (28000): ProxySQL Error: Access denied for user 'proxyadmin'@'127.0.0.1' (using password: YES)\nERROR (line:339) : ProxySQL connection check failed. \n-- Could not connect to ProxySQL at localhost:6032 \n-- Please check the ProxySQL connection parameters and status.\nERROR 1045 (28000): ProxySQL Error: Access denied for user 'proxyadmin'@'127.0.0.1' (using password: YES)\nERROR (line:339) : ProxySQL connection check failed. \n-- Could not connect to ProxySQL at localhost:6032 \n-- Please check the ProxySQL connection parameters and status.\n", "errorVerbose": "exec syncusers: failed to execute command in pod: command terminated with exit code 1 / / ERROR 1045 (28000): ProxySQL Error: Access denied for user 'proxyadmin'@'127.0.0.1' (using password: YES)\nERROR (line:339) : ProxySQL connection check failed. \n-- Could not connect to ProxySQL at localhost:6032 \n-- Please check the ProxySQL connection parameters and status.\nERROR 1045 (28000): ProxySQL Error: Access denied for user 'proxyadmin'@'127.0.0.1' (using password: YES)\nERROR (line:339) : ProxySQL connection check failed. \n-- Could not connect to ProxySQL at localhost:6032 \n-- Please check the ProxySQL connection parameters and status.\nERROR 1045 (28000): ProxySQL Error: Access denied for user 'proxyadmin'@'127.0.0.1' (using password: YES)\nERROR (line:339) : ProxySQL connection check failed. \n-- Could not connect to ProxySQL at localhost:6032 \n-- Please check the ProxySQL connection parameters and status.\nERROR 1045 (28000): ProxySQL Error: Access denied for user 'proxyadmin'@'127.0.0.1' (using password: YES)\nERROR (line:339) : ProxySQL connection check failed. \n-- Could not connect to ProxySQL at localhost:6032 \n-- Please check the ProxySQL connection parameters and status.\nERROR 1045 (28000): ProxySQL Error: Access denied for user 'proxyadmin'@'127.0.0.1' (using password: YES)\nERROR (line:339) : ProxySQL connection check failed. \n-- Could not connect to ProxySQL at localhost:6032 \n-- Please check the ProxySQL connection parameters and status.\n\ngithub.com/percona/percona-xtradb-cluster-operator/pkg/controller/pxc.(*ReconcilePerconaXtraDBCluster).syncPXCUsersWithProxySQL\n\t/go/src/github.com/percona/percona-xtradb-cluster-operator/pkg/controller/pxc/users.go:974\ngithub.com/percona/percona-xtradb-cluster-operator/pkg/controller/pxc.(*ReconcilePerconaXtraDBCluster).resyncPXCUsersWithProxySQL.func1\n\t/go/src/github.com/percona/percona-xtradb-cluster-operator/pkg/controller/pxc/controller.go:867\nruntime.goexit\n\t/usr/local/go/src/runtime/asm_amd64.s:1693"} 2025-11-24T19:08:37.080Z DEBUG Updating object {"controller": "pxc-controller", "namespace": "users-28990", "name": "some-name", "reconcileID": "843fd3d2-427b-47f6-b415-1d7db11031b6", "object": "some-name-pxc", "kind": "&TypeMeta{Kind:StatefulSet,APIVersion:apps/v1,}", "hashChanged": true, "metaChanged": true} 2025-11-24T19:08:37.155Z DEBUG Creating object {"controller": "pxc-controller", "namespace": "users-28990", "name": "some-name", "reconcileID": "843fd3d2-427b-47f6-b415-1d7db11031b6", "object": "some-name-haproxy", "kind": "&TypeMeta{Kind:StatefulSet,APIVersion:apps/v1,}"} 2025-11-24T19:08:37.227Z DEBUG Creating object {"controller": "pxc-controller", "namespace": "users-28990", "name": "some-name", "reconcileID": "843fd3d2-427b-47f6-b415-1d7db11031b6", "object": "some-name-haproxy", "kind": "&TypeMeta{Kind:StatefulSet,APIVersion:apps/v1,}"} 2025-11-24T19:08:37.385Z DEBUG Creating object {"controller": "pxc-controller", "namespace": "users-28990", "name": "some-name", "reconcileID": "843fd3d2-427b-47f6-b415-1d7db11031b6", "object": "some-name-haproxy", "kind": "&TypeMeta{Kind:Service,APIVersion:v1,}"} 2025-11-24T19:08:37.506Z DEBUG Creating object {"controller": "pxc-controller", "namespace": "users-28990", "name": "some-name", "reconcileID": "843fd3d2-427b-47f6-b415-1d7db11031b6", "object": "some-name-haproxy-replicas", "kind": "&TypeMeta{Kind:Service,APIVersion:v1,}"} 2025-11-24T19:08:38.602Z DEBUG Creating object {"controller": "pxc-controller", "namespace": "users-28990", "name": "some-name", "reconcileID": "7940fca8-0c88-46aa-9830-ba1ccb77bdf4", "object": "some-name-haproxy", "kind": "&TypeMeta{Kind:PodDisruptionBudget,APIVersion:policy/v1,}"} 2025-11-24T19:10:05.519Z INFO reconcile replication error {"controller": "pxc-controller", "namespace": "users-28990", "name": "some-name", "reconcileID": "76ce831c-eee3-42f2-bfc2-e33ecafd0fe0", "err": "failed to ensure cluster readonly status: connect to pod some-name-pxc-1: dial tcp: lookup some-name-pxc-1.some-name-pxc.users-28990 on 34.118.224.10:53: no such host"} 2025-11-24T19:10:10.508Z INFO reconcile replication error {"controller": "pxc-controller", "namespace": "users-28990", "name": "some-name", "reconcileID": "e289b050-592b-4515-b745-49da7c73085d", "err": "failed to ensure cluster readonly status: connect to pod some-name-pxc-1: dial tcp: lookup some-name-pxc-1.some-name-pxc.users-28990 on 34.118.224.10:53: no such host"} 2025-11-24T19:10:16.248Z INFO reconcile replication error {"controller": "pxc-controller", "namespace": "users-28990", "name": "some-name", "reconcileID": "9b96d5b2-3aba-497d-a8f8-f4049c35cf9b", "err": "failed to ensure cluster readonly status: connect to pod some-name-pxc-1: dial tcp: lookup some-name-pxc-1.some-name-pxc.users-28990 on 34.118.224.10:53: no such host"} 2025-11-24T19:11:46.075Z INFO Password changed, updating user {"controller": "pxc-controller", "namespace": "users-28990", "name": "some-name", "reconcileID": "323e7d95-3f6d-4400-b0ef-2f576b475ec4", "user": "root"} 2025-11-24T19:11:46.111Z INFO Password updated {"controller": "pxc-controller", "namespace": "users-28990", "name": "some-name", "reconcileID": "323e7d95-3f6d-4400-b0ef-2f576b475ec4", "user": "root"} 2025-11-24T19:11:46.132Z INFO MySQL init secret updated {"controller": "pxc-controller", "namespace": "users-28990", "name": "some-name", "reconcileID": "323e7d95-3f6d-4400-b0ef-2f576b475ec4", "secret": "some-name-mysql-init", "user": "root"} 2025-11-24T19:11:46.155Z INFO Internal secrets updated {"controller": "pxc-controller", "namespace": "users-28990", "name": "some-name", "reconcileID": "323e7d95-3f6d-4400-b0ef-2f576b475ec4", "user": "root"} 2025-11-24T19:11:46.192Z INFO Old password discarded {"controller": "pxc-controller", "namespace": "users-28990", "name": "some-name", "reconcileID": "323e7d95-3f6d-4400-b0ef-2f576b475ec4", "user": "root"} 2025-11-24T19:11:46.201Z INFO Password changed, updating user {"controller": "pxc-controller", "namespace": "users-28990", "name": "some-name", "reconcileID": "323e7d95-3f6d-4400-b0ef-2f576b475ec4", "user": "operator"} 2025-11-24T19:11:46.224Z INFO Password updated {"controller": "pxc-controller", "namespace": "users-28990", "name": "some-name", "reconcileID": "323e7d95-3f6d-4400-b0ef-2f576b475ec4", "user": "operator"} 2025-11-24T19:11:46.244Z INFO MySQL init secret updated {"controller": "pxc-controller", "namespace": "users-28990", "name": "some-name", "reconcileID": "323e7d95-3f6d-4400-b0ef-2f576b475ec4", "secret": "some-name-mysql-init", "user": "operator"} 2025-11-24T19:11:46.268Z INFO Internal secrets updated {"controller": "pxc-controller", "namespace": "users-28990", "name": "some-name", "reconcileID": "323e7d95-3f6d-4400-b0ef-2f576b475ec4", "user": "operator"} 2025-11-24T19:11:46.289Z INFO Old password discarded {"controller": "pxc-controller", "namespace": "users-28990", "name": "some-name", "reconcileID": "323e7d95-3f6d-4400-b0ef-2f576b475ec4", "user": "operator"} 2025-11-24T19:11:46.295Z INFO Password changed, updating user {"controller": "pxc-controller", "namespace": "users-28990", "name": "some-name", "reconcileID": "323e7d95-3f6d-4400-b0ef-2f576b475ec4", "user": "monitor"} 2025-11-24T19:11:46.316Z INFO Password updated {"controller": "pxc-controller", "namespace": "users-28990", "name": "some-name", "reconcileID": "323e7d95-3f6d-4400-b0ef-2f576b475ec4", "user": "monitor"} 2025-11-24T19:11:46.337Z INFO MySQL init secret updated {"controller": "pxc-controller", "namespace": "users-28990", "name": "some-name", "reconcileID": "323e7d95-3f6d-4400-b0ef-2f576b475ec4", "secret": "some-name-mysql-init", "user": "monitor"} 2025-11-24T19:11:46.355Z INFO Internal secrets updated {"controller": "pxc-controller", "namespace": "users-28990", "name": "some-name", "reconcileID": "323e7d95-3f6d-4400-b0ef-2f576b475ec4", "user": "monitor"} 2025-11-24T19:11:46.672Z INFO Password changed, updating user {"controller": "pxc-controller", "namespace": "users-28990", "name": "some-name", "reconcileID": "323e7d95-3f6d-4400-b0ef-2f576b475ec4", "user": "xtrabackup"} 2025-11-24T19:11:46.694Z INFO Password updated {"controller": "pxc-controller", "namespace": "users-28990", "name": "some-name", "reconcileID": "323e7d95-3f6d-4400-b0ef-2f576b475ec4", "user": "xtrabackup"} 2025-11-24T19:11:46.715Z INFO MySQL init secret updated {"controller": "pxc-controller", "namespace": "users-28990", "name": "some-name", "reconcileID": "323e7d95-3f6d-4400-b0ef-2f576b475ec4", "secret": "some-name-mysql-init", "user": "xtrabackup"} 2025-11-24T19:11:46.739Z INFO Internal secrets updated {"controller": "pxc-controller", "namespace": "users-28990", "name": "some-name", "reconcileID": "323e7d95-3f6d-4400-b0ef-2f576b475ec4", "user": "xtrabackup"} 2025-11-24T19:11:46.759Z INFO Old password discarded {"controller": "pxc-controller", "namespace": "users-28990", "name": "some-name", "reconcileID": "323e7d95-3f6d-4400-b0ef-2f576b475ec4", "user": "xtrabackup"} 2025-11-24T19:11:46.764Z INFO Password changed, updating user {"controller": "pxc-controller", "namespace": "users-28990", "name": "some-name", "reconcileID": "323e7d95-3f6d-4400-b0ef-2f576b475ec4", "user": "replication"} 2025-11-24T19:11:46.784Z INFO Password updated {"controller": "pxc-controller", "namespace": "users-28990", "name": "some-name", "reconcileID": "323e7d95-3f6d-4400-b0ef-2f576b475ec4", "user": "replication"} 2025-11-24T19:11:46.802Z INFO MySQL init secret updated {"controller": "pxc-controller", "namespace": "users-28990", "name": "some-name", "reconcileID": "323e7d95-3f6d-4400-b0ef-2f576b475ec4", "secret": "some-name-mysql-init", "user": "replication"} 2025-11-24T19:11:46.823Z INFO Internal secrets updated {"controller": "pxc-controller", "namespace": "users-28990", "name": "some-name", "reconcileID": "323e7d95-3f6d-4400-b0ef-2f576b475ec4", "user": "replication"} 2025-11-24T19:11:46.844Z INFO Old password discarded {"controller": "pxc-controller", "namespace": "users-28990", "name": "some-name", "reconcileID": "323e7d95-3f6d-4400-b0ef-2f576b475ec4", "user": "replication"} 2025-11-24T19:11:46.844Z INFO PXC pods will be restarted {"controller": "pxc-controller", "namespace": "users-28990", "name": "some-name", "reconcileID": "323e7d95-3f6d-4400-b0ef-2f576b475ec4", "last-applied-secret": "74cff4001437fe6a341c2e390fa8196af53b778eab8300e8911eed6bd4cbea2f"} 2025-11-24T19:11:46.846Z DEBUG Updating object {"controller": "pxc-controller", "namespace": "users-28990", "name": "some-name", "reconcileID": "323e7d95-3f6d-4400-b0ef-2f576b475ec4", "object": "some-name-pxc", "kind": "&TypeMeta{Kind:StatefulSet,APIVersion:apps/v1,}", "hashChanged": true, "metaChanged": true} 2025-11-24T19:14:20.844Z INFO Password updated but old one not discarded {"controller": "pxc-controller", "namespace": "users-28990", "name": "some-name", "reconcileID": "acb0d048-135b-4e9a-83a3-0aff262b4e69", "user": "monitor"} 2025-11-24T19:14:22.251Z INFO Old password discarded {"controller": "pxc-controller", "namespace": "users-28990", "name": "some-name", "reconcileID": "acb0d048-135b-4e9a-83a3-0aff262b4e69", "user": "monitor"} 2025-11-24T19:14:24.732Z INFO Password changed, updating user {"controller": "pxc-controller", "namespace": "users-28990", "name": "some-name", "reconcileID": "2120c120-069f-4df8-85ca-1dff3f34d383", "user": "monitor"} 2025-11-24T19:14:24.754Z INFO Password updated {"controller": "pxc-controller", "namespace": "users-28990", "name": "some-name", "reconcileID": "2120c120-069f-4df8-85ca-1dff3f34d383", "user": "monitor"} 2025-11-24T19:14:24.779Z INFO MySQL init secret updated {"controller": "pxc-controller", "namespace": "users-28990", "name": "some-name", "reconcileID": "2120c120-069f-4df8-85ca-1dff3f34d383", "secret": "some-name-mysql-init", "user": "monitor"} 2025-11-24T19:14:24.805Z INFO Internal secrets updated {"controller": "pxc-controller", "namespace": "users-28990", "name": "some-name", "reconcileID": "2120c120-069f-4df8-85ca-1dff3f34d383", "user": "monitor"} 2025-11-24T19:14:25.842Z INFO Password updated but old one not discarded {"controller": "pxc-controller", "namespace": "users-28990", "name": "some-name", "reconcileID": "43f48356-e914-4244-a225-365c6aafc64d", "user": "monitor"} 2025-11-24T19:14:31.495Z INFO Password updated but old one not discarded {"controller": "pxc-controller", "namespace": "users-28990", "name": "some-name", "reconcileID": "27a29cd8-8a64-40e7-a634-8cf4c44150f8", "user": "monitor"} 2025-11-24T19:14:37.479Z INFO Password updated but old one not discarded {"controller": "pxc-controller", "namespace": "users-28990", "name": "some-name", "reconcileID": "7cc93476-66b9-4a90-b54a-4f4c6037d377", "user": "monitor"} 2025-11-24T19:14:43.248Z INFO Password updated but old one not discarded {"controller": "pxc-controller", "namespace": "users-28990", "name": "some-name", "reconcileID": "f092aab2-aba7-4a94-bc1d-67d37e69d375", "user": "monitor"} 2025-11-24T19:14:49.171Z INFO Password updated but old one not discarded {"controller": "pxc-controller", "namespace": "users-28990", "name": "some-name", "reconcileID": "c7260019-e1e4-41fc-a350-11e137e569af", "user": "monitor"} github.com/percona/percona-xtradb-cluster-operator/pkg/controller/pxc.(*ReconcilePerconaXtraDBCluster).resyncPXCUsersWithProxySQL.func1 /go/pkg/mod/sigs.k8s.io/controller-runtime@v0.22.4/pkg/internal/controller/controller.go:296 /go/pkg/mod/sigs.k8s.io/controller-runtime@v0.22.4/pkg/internal/controller/controller.go:421 /go/pkg/mod/sigs.k8s.io/controller-runtime@v0.22.4/pkg/internal/controller/controller.go:474 /go/src/github.com/percona/percona-xtradb-cluster-operator/pkg/controller/pxc/controller.go:869 [mysql] 2025/11/24 19:05:39 packets.go:58 read tcp 10.233.17.59:55610->10.233.18.48:33062: read: connection reset by peer [mysql] 2025/11/24 19:11:23 packets.go:58 read tcp 10.233.17.59:48218->34.118.236.154:3306: i/o timeout [mysql] 2025/11/24 19:13:53 packets.go:58 unexpected EOF sigs.k8s.io/controller-runtime/pkg/internal/controller.(*Controller[...]).processNextWorkItem sigs.k8s.io/controller-runtime/pkg/internal/controller.(*Controller[...]).reconcileHandler sigs.k8s.io/controller-runtime/pkg/internal/controller.(*Controller[...]).Start.func1.1 -  }, -  { -  }, -  { -  }, -  }, +  }, -  "469f642c04e6a7d068fa51ae785d66fe6ece1ed25a22927a26302c22a7b34dac", -  "74cff4001437fe6a341c2e390fa8196af53b778eab8300e8911eed6bd4cbea", +  "74cff4001437fe6a341c2e390fa8196af53b778eab8300e8911eed6bd4cbea2f", +  "a3ee7ed1e38d03b7e68cb012ed070fedd211a0292ac9208165e33abd45a4f2", -  "a3ee7ed1e38d03b7e68cb012ed070fedd211a0292ac9208165e33abd45a4f22f", +  "a3ee7ed1e38d03b7e68cb012ed070fedd211a0292ac9208165e33abd45a4f22f", -  Annotations: map[string]string{ +  Annotations: map[string]string{ +  APIVersion: "", -  APIVersion: "apps/v1", -  APIVersion: "apps/v1", -  APIVersion: "v1", -  Args: []string{"logrotate"}, +  AvailableReplicas: 0, -  AvailableReplicas: 2, -  AvailableReplicas: 3, -  "b8a029e65b796f96a332cdb88d930987a09e172d5574b49527cff10af58e869b", -  "cc807018cc179922b153f7672bd63a69b1872cb121a592ef253747d3286fdea1", +  "cc807018cc179922b153f7672bd63a69b1872cb121a592ef253747d3286fdea1", -  CollisionCount: &0, +  CollisionCount: nil, +  CreationTimestamp: v1.Time{}, -  CreationTimestamp: v1.Time{Time: s"2025-11-24 18:52:09 +0000 UTC"}, -  CreationTimestamp: v1.Time{Time: s"2025-11-24 18:52:10 +0000 UTC"}, +  CurrentReplicas: 0, -  CurrentReplicas: 2, -  CurrentReplicas: 3, +  CurrentRevision: "", -  CurrentRevision: "some-name-proxysql-5987694b78", -  CurrentRevision: "some-name-proxysql-59fd65b7dd", -  CurrentRevision: "some-name-proxysql-5dc68449c7", -  CurrentRevision: "some-name-proxysql-66c799db87", -  CurrentRevision: "some-name-proxysql-7d78ff9475", -  CurrentRevision: "some-name-proxysql-f8cc4dcfb", -  CurrentRevision: "some-name-pxc-59c6bdb746", -  CurrentRevision: "some-name-pxc-645869ccf4", -  CurrentRevision: "some-name-pxc-7d4bb5f7d4", -  CurrentRevision: "some-name-pxc-8856f89b9", -  "dac6b66c574159a7a809a006a5218a2029399c30c45888e438c726ba59836686", +  "dac6b66c574159a7a809a006a5218a2029399c30c45888e438c726ba59836686", -  DefaultMode: &420, -  DefaultMode: &420, +  DefaultMode: nil, +  DefaultMode: nil, +  DeprecatedServiceAccount: "", -  DeprecatedServiceAccount: "default", +  DNSPolicy: "", -  DNSPolicy: "ClusterFirst", -  "e5cb09926aafed17e5bc375940a405aef0ce8a22047e64d5e327e21e67ce9434", +  "e5cb09926aafed17e5bc375940a405aef0ce8a22047e64d5e327e21e67ce9434", -  EnvFrom: []v1.EnvFromSource{{SecretRef: s"&SecretEnvSource{LocalObjectRefe"...}}, -  Env: []v1.EnvVar{ -  FieldsType: "FieldsV1", -  FieldsType: "FieldsV1", -  FieldsV1: s`{"f:metadata":{"f:annotations":{".":{},"f:percona.com/last-confi`..., -  FieldsV1: s`{"f:status":{"f:availableReplicas":{},"f:collisionCount":{},"f:c`..., +  Generation: 0, -  Generation: 1, -  Generation: 2, -  Generation: 3, -  Generation: 4, -  Generation: 5, -  Generation: 6, -  Generation: 7, -  Generation: 8, -  Image: "perconalab/percona-xtradb-cluster-operator:main-logcollector", -  Image: "perconalab/percona-xtradb-cluster-operator:main-logcollector", -  ImagePullPolicy: "Always", +  "last-applied-secret": "469f642c04e6a7d068fa51ae785d66fe6ece1ed25a22927a26302c22a7b34dac", +  "last-applied-secret": "b8a029e65b796f96a332cdb88d930987a09e172d5574b49527cff10af58e869b", +  ManagedFields: nil, -  ManagedFields: []v1.ManagedFieldsEntry{ -  Manager: "kube-controller-manager", -  Manager: "percona-xtradb-cluster-operator", -  {Name: "IS_LOGCOLLECTOR", Value: "yes"}, -  {Name: "LOG_DATA_DIR", Value: "/var/lib/mysql"}, -  Name: "logrotate", -  Name: "logs", -  {Name: "MONITOR_PASSWORD", ValueFrom: s"&EnvVarSource{FieldRef:nil,Resou"...}, -  {Name: "POD_NAMESPASE", ValueFrom: s"&EnvVarSource{FieldRef:&ObjectFi"...}, -  {Name: "POD_NAME", ValueFrom: s"&EnvVarSource{FieldRef:&ObjectFi"...}, -  {Name: "SERVICE_TYPE", Value: "mysql"}, +  ObservedGeneration: 0, -  ObservedGeneration: 1, -  ObservedGeneration: 2, -  ObservedGeneration: 3, -  ObservedGeneration: 4, -  ObservedGeneration: 5, -  ObservedGeneration: 6, -  ObservedGeneration: 7, -  ObservedGeneration: 8, -  Operation: "Update", -  Operation: "Update", -  "percona.com/last-config-hash": "eyJyZXBsaWNhcyI6Miwic2VsZWN0b3IiOnsibWF0Y2hMYWJlbHMiOnsiYXBwLmt1YmVybmV0ZXMuaW8vY29tcG9uZW50IjoicHJveHlzcWwiLCJhcHAua3ViZXJuZXRlcy5pby9pbnN0YW5jZSI6InNvbWUtbmFtZSIsImFwcC5rdWJlcm5ldGVzLmlvL21hbmFnZWQtYnkiOiJwZXJjb25hLXh0cmFkYi1jbHVzdGVyLW9wZXJhdG9yIiwiYXBw"..., +  "percona.com/last-config-hash": "eyJyZXBsaWNhcyI6Miwic2VsZWN0b3IiOnsibWF0Y2hMYWJlbHMiOnsiYXBwLmt1YmVybmV0ZXMuaW8vY29tcG9uZW50IjoicHJveHlzcWwiLCJhcHAua3ViZXJuZXRlcy5pby9pbnN0YW5jZSI6InNvbWUtbmFtZSIsImFwcC5rdWJlcm5ldGVzLmlvL21hbmFnZWQtYnkiOiJwZXJjb25hLXh0cmFkYi1jbHVzdGVyLW9wZXJhdG9yIiwiYXBw"..., -  "percona.com/last-config-hash": "eyJyZXBsaWNhcyI6Miwic2VsZWN0b3IiOnsibWF0Y2hMYWJlbHMiOnsiYXBwLmt1YmVybmV0ZXMuaW8vY29tcG9uZW50IjoicHJveHlzcWwiLCJhcHAua3ViZXJuZXRlcy5pby9pbnN0YW5jZSI6InNvbWUtbmFtZSIsImFwcC5rdWJlcm5ldGVzLmlvL21hbmFnZWQtYnkiOiJwZXJjb25hLXh0cmFkYi1jbHVzdGVyLW9wZXJhdG9yIiwiYXBwLmt1YmVybmV0ZXMuaW8vbmFtZSI6InBlcmNvbmEteHRyYWRiLWNsdXN0ZXIiLCJhcHAua3ViZXJuZXRlcy5pby9wYXJ0LW9mIjoicGVyY29uYS14dHJhZGItY2x1c3RlciJ9fSwidGVtcGxhdGUiOnsibWV0YWRhdGEiOnsibGFiZWxzIjp7ImFwcC5rdWJlcm5ldGVzLmlvL2NvbXBvbmVudCI6InByb3h5c3FsIiwiYXBwLmt1YmVybmV0ZXMuaW8vaW5zdGFuY2UiOiJzb21lLW5hbWUiLCJhcHAua3ViZXJuZXRlcy5pby9tYW5hZ2VkLWJ5IjoicGVyY29uYS14dHJhZGItY2x1c3Rlci1vcGVyYXRvciIsImFwcC5rdWJlcm5ldGVzLmlvL25hbWUiOiJwZXJjb25hLXh0cmFkYi1jbHVzdGVyIiwiYXBwLmt1YmVybmV0ZXMuaW8vcGFydC1vZiI6InBlcmNvbmEteHRyYWRiLWNsdXN0ZXIifSwiYW5ub3RhdGlvbnMiOnsia3ViZWN0bC5rdWJlcm5ldGVzLmlvL2RlZmF1bHQtY29udGFpbmVyIjoicHJveHlzcWwiLCJsYXN0LWFwcGxpZWQtc2VjcmV0IjoiNzRjZmY0MDAxNDM3ZmU2YTM0MWMyZTM5MGZhODE5NmFmNTNiNzc4ZWFiODMwMGU4OTExZWVkNmJkNGNiZWEyZiIsInBlcmNvbmEuY29tL2NvbmZpZ3VyYXRpb24taGFzaCI6"..., +  "percona.com/last-config-hash": "eyJyZXBsaWNhcyI6Miwic2VsZWN0b3IiOnsibWF0Y2hMYWJlbHMiOnsiYXBwLmt1YmVybmV0ZXMuaW8vY29tcG9uZW50IjoicHJveHlzcWwiLCJhcHAua3ViZXJuZXRlcy5pby9pbnN0YW5jZSI6InNvbWUtbmFtZSIsImFwcC5rdWJlcm5ldGVzLmlvL21hbmFnZWQtYnkiOiJwZXJjb25hLXh0cmFkYi1jbHVzdGVyLW9wZXJhdG9yIiwiYXBwLmt1YmVybmV0ZXMuaW8vbmFtZSI6InBlcmNvbmEteHRyYWRiLWNsdXN0ZXIiLCJhcHAua3ViZXJuZXRlcy5pby9wYXJ0LW9mIjoicGVyY29uYS14dHJhZGItY2x1c3RlciJ9fSwidGVtcGxhdGUiOnsibWV0YWRhdGEiOnsibGFiZWxzIjp7ImFwcC5rdWJlcm5ldGVzLmlvL2NvbXBvbmVudCI6InByb3h5c3FsIiwiYXBwLmt1YmVybmV0ZXMuaW8vaW5zdGFuY2UiOiJzb21lLW5hbWUiLCJhcHAua3ViZXJuZXRlcy5pby9tYW5hZ2VkLWJ5IjoicGVyY29uYS14dHJhZGItY2x1c3Rlci1vcGVyYXRvciIsImFwcC5rdWJlcm5ldGVzLmlvL25hbWUiOiJwZXJjb25hLXh0cmFkYi1jbHVzdGVyIiwiYXBwLmt1YmVybmV0ZXMuaW8vcGFydC1vZiI6InBlcmNvbmEteHRyYWRiLWNsdXN0ZXIifSwiYW5ub3RhdGlvbnMiOnsia3ViZWN0bC5rdWJlcm5ldGVzLmlvL2RlZmF1bHQtY29udGFpbmVyIjoicHJveHlzcWwiLCJsYXN0LWFwcGxpZWQtc2VjcmV0IjoiNzRjZmY0MDAxNDM3ZmU2YTM0MWMyZTM5MGZhODE5NmFmNTNiNzc4ZWFiODMwMGU4OTExZWVkNmJkNGNiZWEyZiIsInBlcmNvbmEuY29tL2NvbmZpZ3VyYXRpb24taGFzaCI6"..., -  "percona.com/last-config-hash": "eyJyZXBsaWNhcyI6Miwic2VsZWN0b3IiOnsibWF0Y2hMYWJlbHMiOnsiYXBwLmt1YmVybmV0ZXMuaW8vY29tcG9uZW50IjoicHJveHlzcWwiLCJhcHAua3ViZXJuZXRlcy5pby9pbnN0YW5jZSI6InNvbWUtbmFtZSIsImFwcC5rdWJlcm5ldGVzLmlvL21hbmFnZWQtYnkiOiJwZXJjb25hLXh0cmFkYi1jbHVzdGVyLW9wZXJhdG9yIiwiYXBwLmt1YmVybmV0ZXMuaW8vbmFtZSI6InBlcmNvbmEteHRyYWRiLWNsdXN0ZXIiLCJhcHAua3ViZXJuZXRlcy5pby9wYXJ0LW9mIjoicGVyY29uYS14dHJhZGItY2x1c3RlciJ9fSwidGVtcGxhdGUiOnsibWV0YWRhdGEiOnsibGFiZWxzIjp7ImFwcC5rdWJlcm5ldGVzLmlvL2NvbXBvbmVudCI6InByb3h5c3FsIiwiYXBwLmt1YmVybmV0ZXMuaW8vaW5zdGFuY2UiOiJzb21lLW5hbWUiLCJhcHAua3ViZXJuZXRlcy5pby9tYW5hZ2VkLWJ5IjoicGVyY29uYS14dHJhZGItY2x1c3Rlci1vcGVyYXRvciIsImFwcC5rdWJlcm5ldGVzLmlvL25hbWUiOiJwZXJjb25hLXh0cmFkYi1jbHVzdGVyIiwiYXBwLmt1YmVybmV0ZXMuaW8vcGFydC1vZiI6InBlcmNvbmEteHRyYWRiLWNsdXN0ZXIifSwiYW5ub3RhdGlvbnMiOnsia3ViZWN0bC5rdWJlcm5ldGVzLmlvL2RlZmF1bHQtY29udGFpbmVyIjoicHJveHlzcWwiLCJsYXN0LWFwcGxpZWQtc2VjcmV0IjoiY2M4MDcwMThjYzE3OTkyMmIxNTNmNzY3MmJkNjNhNjliMTg3MmNiMTIxYTU5MmVmMjUzNzQ3ZDMyODZmZGVhMSIsInBlcmNvbmEuY29tL2NvbmZpZ3VyYXRpb24taGFzaCI6"..., +  "percona.com/last-config-hash": "eyJyZXBsaWNhcyI6Miwic2VsZWN0b3IiOnsibWF0Y2hMYWJlbHMiOnsiYXBwLmt1YmVybmV0ZXMuaW8vY29tcG9uZW50IjoicHJveHlzcWwiLCJhcHAua3ViZXJuZXRlcy5pby9pbnN0YW5jZSI6InNvbWUtbmFtZSIsImFwcC5rdWJlcm5ldGVzLmlvL21hbmFnZWQtYnkiOiJwZXJjb25hLXh0cmFkYi1jbHVzdGVyLW9wZXJhdG9yIiwiYXBwLmt1YmVybmV0ZXMuaW8vbmFtZSI6InBlcmNvbmEteHRyYWRiLWNsdXN0ZXIiLCJhcHAua3ViZXJuZXRlcy5pby9wYXJ0LW9mIjoicGVyY29uYS14dHJhZGItY2x1c3RlciJ9fSwidGVtcGxhdGUiOnsibWV0YWRhdGEiOnsibGFiZWxzIjp7ImFwcC5rdWJlcm5ldGVzLmlvL2NvbXBvbmVudCI6InByb3h5c3FsIiwiYXBwLmt1YmVybmV0ZXMuaW8vaW5zdGFuY2UiOiJzb21lLW5hbWUiLCJhcHAua3ViZXJuZXRlcy5pby9tYW5hZ2VkLWJ5IjoicGVyY29uYS14dHJhZGItY2x1c3Rlci1vcGVyYXRvciIsImFwcC5rdWJlcm5ldGVzLmlvL25hbWUiOiJwZXJjb25hLXh0cmFkYi1jbHVzdGVyIiwiYXBwLmt1YmVybmV0ZXMuaW8vcGFydC1vZiI6InBlcmNvbmEteHRyYWRiLWNsdXN0ZXIifSwiYW5ub3RhdGlvbnMiOnsia3ViZWN0bC5rdWJlcm5ldGVzLmlvL2RlZmF1bHQtY29udGFpbmVyIjoicHJveHlzcWwiLCJsYXN0LWFwcGxpZWQtc2VjcmV0IjoiY2M4MDcwMThjYzE3OTkyMmIxNTNmNzY3MmJkNjNhNjliMTg3MmNiMTIxYTU5MmVmMjUzNzQ3ZDMyODZmZGVhMSIsInBlcmNvbmEuY29tL2NvbmZpZ3VyYXRpb24taGFzaCI6"..., -  "percona.com/last-config-hash": "eyJyZXBsaWNhcyI6Miwic2VsZWN0b3IiOnsibWF0Y2hMYWJlbHMiOnsiYXBwLmt1YmVybmV0ZXMuaW8vY29tcG9uZW50IjoicHJveHlzcWwiLCJhcHAua3ViZXJuZXRlcy5pby9pbnN0YW5jZSI6InNvbWUtbmFtZSIsImFwcC5rdWJlcm5ldGVzLmlvL21hbmFnZWQtYnkiOiJwZXJjb25hLXh0cmFkYi1jbHVzdGVyLW9wZXJhdG9yIiwiYXBwLmt1YmVybmV0ZXMuaW8vbmFtZSI6InBlcmNvbmEteHRyYWRiLWNsdXN0ZXIiLCJhcHAua3ViZXJuZXRlcy5pby9wYXJ0LW9mIjoicGVyY29uYS14dHJhZGItY2x1c3RlciJ9fSwidGVtcGxhdGUiOnsibWV0YWRhdGEiOnsibGFiZWxzIjp7ImFwcC5rdWJlcm5ldGVzLmlvL2NvbXBvbmVudCI6InByb3h5c3FsIiwiYXBwLmt1YmVybmV0ZXMuaW8vaW5zdGFuY2UiOiJzb21lLW5hbWUiLCJhcHAua3ViZXJuZXRlcy5pby9tYW5hZ2VkLWJ5IjoicGVyY29uYS14dHJhZGItY2x1c3Rlci1vcGVyYXRvciIsImFwcC5rdWJlcm5ldGVzLmlvL25hbWUiOiJwZXJjb25hLXh0cmFkYi1jbHVzdGVyIiwiYXBwLmt1YmVybmV0ZXMuaW8vcGFydC1vZiI6InBlcmNvbmEteHRyYWRiLWNsdXN0ZXIifSwiYW5ub3RhdGlvbnMiOnsia3ViZWN0bC5rdWJlcm5ldGVzLmlvL2RlZmF1bHQtY29udGFpbmVyIjoicHJveHlzcWwiLCJsYXN0LWFwcGxpZWQtc2VjcmV0IjoiYjhhMDI5ZTY1Yjc5NmY5NmEzMzJjZGI4OGQ5MzA5ODdhMDllMTcyZDU1NzRiNDk1MjdjZmYxMGFmNThlODY5YiIsInBlcmNvbmEuY29tL2NvbmZpZ3VyYXRpb24taGFzaCI6"..., -  "percona.com/last-config-hash": "eyJyZXBsaWNhcyI6Miwic2VsZWN0b3IiOnsibWF0Y2hMYWJlbHMiOnsiYXBwLmt1YmVybmV0ZXMuaW8vY29tcG9uZW50IjoicHJveHlzcWwiLCJhcHAua3ViZXJuZXRlcy5pby9pbnN0YW5jZSI6InNvbWUtbmFtZSIsImFwcC5rdWJlcm5ldGVzLmlvL21hbmFnZWQtYnkiOiJwZXJjb25hLXh0cmFkYi1jbHVzdGVyLW9wZXJhdG9yIiwiYXBwLmt1YmVybmV0ZXMuaW8vbmFtZSI6InBlcmNvbmEteHRyYWRiLWNsdXN0ZXIiLCJhcHAua3ViZXJuZXRlcy5pby9wYXJ0LW9mIjoicGVyY29uYS14dHJhZGItY2x1c3RlciJ9fSwidGVtcGxhdGUiOnsibWV0YWRhdGEiOnsibGFiZWxzIjp7ImFwcC5rdWJlcm5ldGVzLmlvL2NvbXBvbmVudCI6InByb3h5c3FsIiwiYXBwLmt1YmVybmV0ZXMuaW8vaW5zdGFuY2UiOiJzb21lLW5hbWUiLCJhcHAua3ViZXJuZXRlcy5pby9tYW5hZ2VkLWJ5IjoicGVyY29uYS14dHJhZGItY2x1c3Rlci1vcGVyYXRvciIsImFwcC5rdWJlcm5ldGVzLmlvL25hbWUiOiJwZXJjb25hLXh0cmFkYi1jbHVzdGVyIiwiYXBwLmt1YmVybmV0ZXMuaW8vcGFydC1vZiI6InBlcmNvbmEteHRyYWRiLWNsdXN0ZXIifSwiYW5ub3RhdGlvbnMiOnsia3ViZWN0bC5rdWJlcm5ldGVzLmlvL2RlZmF1bHQtY29udGFpbmVyIjoicHJveHlzcWwiLCJsYXN0LWFwcGxpZWQtc2VjcmV0IjoiYTNlZTdlZDFlMzhkMDNiN2U2OGNiMDEyZWQwNzBmZWRkMjExYTAyOTJhYzkyMDgxNjVlMzNhYmQ0NWE0ZjIyZiIsInBlcmNvbmEuY29tL2NvbmZpZ3VyYXRpb24taGFzaCI6"..., +  "percona.com/last-config-hash": "eyJyZXBsaWNhcyI6Miwic2VsZWN0b3IiOnsibWF0Y2hMYWJlbHMiOnsiYXBwLmt1YmVybmV0ZXMuaW8vY29tcG9uZW50IjoicHJveHlzcWwiLCJhcHAua3ViZXJuZXRlcy5pby9pbnN0YW5jZSI6InNvbWUtbmFtZSIsImFwcC5rdWJlcm5ldGVzLmlvL21hbmFnZWQtYnkiOiJwZXJjb25hLXh0cmFkYi1jbHVzdGVyLW9wZXJhdG9yIiwiYXBwLmt1YmVybmV0ZXMuaW8vbmFtZSI6InBlcmNvbmEteHRyYWRiLWNsdXN0ZXIiLCJhcHAua3ViZXJuZXRlcy5pby9wYXJ0LW9mIjoicGVyY29uYS14dHJhZGItY2x1c3RlciJ9fSwidGVtcGxhdGUiOnsibWV0YWRhdGEiOnsibGFiZWxzIjp7ImFwcC5rdWJlcm5ldGVzLmlvL2NvbXBvbmVudCI6InByb3h5c3FsIiwiYXBwLmt1YmVybmV0ZXMuaW8vaW5zdGFuY2UiOiJzb21lLW5hbWUiLCJhcHAua3ViZXJuZXRlcy5pby9tYW5hZ2VkLWJ5IjoicGVyY29uYS14dHJhZGItY2x1c3Rlci1vcGVyYXRvciIsImFwcC5rdWJlcm5ldGVzLmlvL25hbWUiOiJwZXJjb25hLXh0cmFkYi1jbHVzdGVyIiwiYXBwLmt1YmVybmV0ZXMuaW8vcGFydC1vZiI6InBlcmNvbmEteHRyYWRiLWNsdXN0ZXIifSwiYW5ub3RhdGlvbnMiOnsia3ViZWN0bC5rdWJlcm5ldGVzLmlvL2RlZmF1bHQtY29udGFpbmVyIjoicHJveHlzcWwiLCJsYXN0LWFwcGxpZWQtc2VjcmV0IjoiYTNlZTdlZDFlMzhkMDNiN2U2OGNiMDEyZWQwNzBmZWRkMjExYTAyOTJhYzkyMDgxNjVlMzNhYmQ0NWE0ZjIyZiIsInBlcmNvbmEuY29tL2NvbmZpZ3VyYXRpb24taGFzaCI6"..., +  "percona.com/last-config-hash": "eyJyZXBsaWNhcyI6Miwic2VsZWN0b3IiOnsibWF0Y2hMYWJlbHMiOnsiYXBwLmt1YmVybmV0ZXMuaW8vY29tcG9uZW50IjoicHJveHlzcWwiLCJhcHAua3ViZXJuZXRlcy5pby9pbnN0YW5jZSI6InNvbWUtbmFtZSIsImFwcC5rdWJlcm5ldGVzLmlvL21hbmFnZWQtYnkiOiJwZXJjb25hLXh0cmFkYi1jbHVzdGVyLW9wZXJhdG9yIiwiYXBwLmt1YmVybmV0ZXMuaW8vbmFtZSI6InBlcmNvbmEteHRyYWRiLWNsdXN0ZXIiLCJhcHAua3ViZXJuZXRlcy5pby9wYXJ0LW9mIjoicGVyY29uYS14dHJhZGItY2x1c3RlciJ9fSwidGVtcGxhdGUiOnsibWV0YWRhdGEiOnsibGFiZWxzIjp7ImFwcC5rdWJlcm5ldGVzLmlvL2NvbXBvbmVudCI6InByb3h5c3FsIiwiYXBwLmt1YmVybmV0ZXMuaW8vaW5zdGFuY2UiOiJzb21lLW5hbWUiLCJhcHAua3ViZXJuZXRlcy5pby9tYW5hZ2VkLWJ5IjoicGVyY29uYS14dHJhZGItY2x1c3Rlci1vcGVyYXRvciIsImFwcC5rdWJlcm5ldGVzLmlvL25hbWUiOiJwZXJjb25hLXh0cmFkYi1jbHVzdGVyIiwiYXBwLmt1YmVybmV0ZXMuaW8vcGFydC1vZiI6InBlcmNvbmEteHRyYWRiLWNsdXN0ZXIifSwiYW5ub3RhdGlvbnMiOnsia3ViZWN0bC5rdWJlcm5ldGVzLmlvL2RlZmF1bHQtY29udGFpbmVyIjoicHJveHlzcWwiLCJsYXN0LWFwcGxpZWQtc2VjcmV0IjoiZGFjNmI2NmM1NzQxNTlhN2E4MDlhMDA2YTUyMThhMjAyOTM5OWMzMGM0NTg4OGU0MzhjNzI2YmE1OTgzNjY4NiIsInBlcmNvbmEuY29tL2NvbmZpZ3VyYXRpb24taGFzaCI6"..., -  "percona.com/last-config-hash": "eyJyZXBsaWNhcyI6Miwic2VsZWN0b3IiOnsibWF0Y2hMYWJlbHMiOnsiYXBwLmt1YmVybmV0ZXMuaW8vY29tcG9uZW50IjoicHJveHlzcWwiLCJhcHAua3ViZXJuZXRlcy5pby9pbnN0YW5jZSI6InNvbWUtbmFtZSIsImFwcC5rdWJlcm5ldGVzLmlvL21hbmFnZWQtYnkiOiJwZXJjb25hLXh0cmFkYi1jbHVzdGVyLW9wZXJhdG9yIiwiYXBwLmt1YmVybmV0ZXMuaW8vbmFtZSI6InBlcmNvbmEteHRyYWRiLWNsdXN0ZXIiLCJhcHAua3ViZXJuZXRlcy5pby9wYXJ0LW9mIjoicGVyY29uYS14dHJhZGItY2x1c3RlciJ9fSwidGVtcGxhdGUiOnsibWV0YWRhdGEiOnsibGFiZWxzIjp7ImFwcC5rdWJlcm5ldGVzLmlvL2NvbXBvbmVudCI6InByb3h5c3FsIiwiYXBwLmt1YmVybmV0ZXMuaW8vaW5zdGFuY2UiOiJzb21lLW5hbWUiLCJhcHAua3ViZXJuZXRlcy5pby9tYW5hZ2VkLWJ5IjoicGVyY29uYS14dHJhZGItY2x1c3Rlci1vcGVyYXRvciIsImFwcC5rdWJlcm5ldGVzLmlvL25hbWUiOiJwZXJjb25hLXh0cmFkYi1jbHVzdGVyIiwiYXBwLmt1YmVybmV0ZXMuaW8vcGFydC1vZiI6InBlcmNvbmEteHRyYWRiLWNsdXN0ZXIifSwiYW5ub3RhdGlvbnMiOnsia3ViZWN0bC5rdWJlcm5ldGVzLmlvL2RlZmF1bHQtY29udGFpbmVyIjoicHJveHlzcWwiLCJsYXN0LWFwcGxpZWQtc2VjcmV0IjoiZTVjYjA5OTI2YWFmZWQxN2U1YmMzNzU5NDBhNDA1YWVmMGNlOGEyMjA0N2U2NGQ1ZTMyN2UyMWU2N2NlOTQzNCIsInBlcmNvbmEuY29tL2NvbmZpZ3VyYXRpb24taGFzaCI6"..., +  "percona.com/last-config-hash": "eyJyZXBsaWNhcyI6Miwic2VsZWN0b3IiOnsibWF0Y2hMYWJlbHMiOnsiYXBwLmt1YmVybmV0ZXMuaW8vY29tcG9uZW50IjoicHJveHlzcWwiLCJhcHAua3ViZXJuZXRlcy5pby9pbnN0YW5jZSI6InNvbWUtbmFtZSIsImFwcC5rdWJlcm5ldGVzLmlvL21hbmFnZWQtYnkiOiJwZXJjb25hLXh0cmFkYi1jbHVzdGVyLW9wZXJhdG9yIiwiYXBwLmt1YmVybmV0ZXMuaW8vbmFtZSI6InBlcmNvbmEteHRyYWRiLWNsdXN0ZXIiLCJhcHAua3ViZXJuZXRlcy5pby9wYXJ0LW9mIjoicGVyY29uYS14dHJhZGItY2x1c3RlciJ9fSwidGVtcGxhdGUiOnsibWV0YWRhdGEiOnsibGFiZWxzIjp7ImFwcC5rdWJlcm5ldGVzLmlvL2NvbXBvbmVudCI6InByb3h5c3FsIiwiYXBwLmt1YmVybmV0ZXMuaW8vaW5zdGFuY2UiOiJzb21lLW5hbWUiLCJhcHAua3ViZXJuZXRlcy5pby9tYW5hZ2VkLWJ5IjoicGVyY29uYS14dHJhZGItY2x1c3Rlci1vcGVyYXRvciIsImFwcC5rdWJlcm5ldGVzLmlvL25hbWUiOiJwZXJjb25hLXh0cmFkYi1jbHVzdGVyIiwiYXBwLmt1YmVybmV0ZXMuaW8vcGFydC1vZiI6InBlcmNvbmEteHRyYWRiLWNsdXN0ZXIifSwiYW5ub3RhdGlvbnMiOnsia3ViZWN0bC5rdWJlcm5ldGVzLmlvL2RlZmF1bHQtY29udGFpbmVyIjoicHJveHlzcWwiLCJsYXN0LWFwcGxpZWQtc2VjcmV0IjoiZTVjYjA5OTI2YWFmZWQxN2U1YmMzNzU5NDBhNDA1YWVmMGNlOGEyMjA0N2U2NGQ1ZTMyN2UyMWU2N2NlOTQzNCIsInBlcmNvbmEuY29tL2NvbmZpZ3VyYXRpb24taGFzaCI6"..., -  "percona.com/last-config-hash": "eyJyZXBsaWNhcyI6Mywic2VsZWN0b3IiOnsibWF0Y2hMYWJlbHMiOnsiYXBwLmt1YmVybmV0ZXMuaW8vY29tcG9uZW50IjoicHhjIiwiYXBwLmt1YmVybmV0ZXMuaW8vaW5zdGFuY2UiOiJzb21lLW5hbWUiLCJhcHAua3ViZXJuZXRlcy5pby9tYW5hZ2VkLWJ5IjoicGVyY29uYS14dHJhZGItY2x1c3Rlci1vcGVyYXRvciIsImFwcC5rdWJlcm5ldGVzLmlvL25hbWUiOiJwZXJjb25hLXh0cmFkYi1jbHVzdGVyIiwiYXBwLmt1YmVybmV0ZXMuaW8vcGFydC1vZiI6InBlcmNvbmEteHRyYWRiLWNsdXN0ZXIifX0sInRlbXBsYXRlIjp7Im1ldGFkYXRhIjp7ImxhYmVscyI6eyJhcHAua3ViZXJuZXRlcy5pby9jb21wb25lbnQiOiJweGMiLCJhcHAua3ViZXJuZXRlcy5pby9pbnN0YW5jZSI6InNvbWUtbmFtZSIsImFwcC5rdWJlcm5ldGVzLmlvL21hbmFnZWQtYnkiOiJwZXJjb25hLXh0cmFkYi1jbHVzdGVyLW9wZXJhdG9yIiwiYXBwLmt1YmVybmV0ZXMuaW8vbmFtZSI6InBlcmNvbmEteHRyYWRiLWNsdXN0ZXIiLCJhcHAua3ViZXJuZXRlcy5pby9wYXJ0LW9mIjoicGVyY29uYS14dHJhZGItY2x1c3RlciJ9LCJhbm5vdGF0aW9ucyI6eyJrdWJlY3RsLmt1YmVybmV0ZXMuaW8vZGVmYXVsdC1jb250YWluZXIiOiJweGMiLCJsYXN0LWFwcGxpZWQtc2VjcmV0IjoiNDY5ZjY0MmMwNGU2YTdkMDY4ZmE1MWFlNzg1ZDY2ZmU2ZWNlMWVkMjVhMjI5MjdhMjYzMDJjMjJhN2IzNGRhYyIsInBlcmNvbmEuY29tL2NvbmZpZ3VyYXRpb24taGFzaCI6ImQ0MWQ4Y2Q5OGYwMGIy"..., +  "percona.com/last-config-hash": "eyJyZXBsaWNhcyI6Mywic2VsZWN0b3IiOnsibWF0Y2hMYWJlbHMiOnsiYXBwLmt1YmVybmV0ZXMuaW8vY29tcG9uZW50IjoicHhjIiwiYXBwLmt1YmVybmV0ZXMuaW8vaW5zdGFuY2UiOiJzb21lLW5hbWUiLCJhcHAua3ViZXJuZXRlcy5pby9tYW5hZ2VkLWJ5IjoicGVyY29uYS14dHJhZGItY2x1c3Rlci1vcGVyYXRvciIsImFwcC5rdWJlcm5ldGVzLmlvL25hbWUiOiJwZXJjb25hLXh0cmFkYi1jbHVzdGVyIiwiYXBwLmt1YmVybmV0ZXMuaW8vcGFydC1vZiI6InBlcmNvbmEteHRyYWRiLWNsdXN0ZXIifX0sInRlbXBsYXRlIjp7Im1ldGFkYXRhIjp7ImxhYmVscyI6eyJhcHAua3ViZXJuZXRlcy5pby9jb21wb25lbnQiOiJweGMiLCJhcHAua3ViZXJuZXRlcy5pby9pbnN0YW5jZSI6InNvbWUtbmFtZSIsImFwcC5rdWJlcm5ldGVzLmlvL21hbmFnZWQtYnkiOiJwZXJjb25hLXh0cmFkYi1jbHVzdGVyLW9wZXJhdG9yIiwiYXBwLmt1YmVybmV0ZXMuaW8vbmFtZSI6InBlcmNvbmEteHRyYWRiLWNsdXN0ZXIiLCJhcHAua3ViZXJuZXRlcy5pby9wYXJ0LW9mIjoicGVyY29uYS14dHJhZGItY2x1c3RlciJ9LCJhbm5vdGF0aW9ucyI6eyJrdWJlY3RsLmt1YmVybmV0ZXMuaW8vZGVmYXVsdC1jb250YWluZXIiOiJweGMiLCJsYXN0LWFwcGxpZWQtc2VjcmV0IjoiNDY5ZjY0MmMwNGU2YTdkMDY4ZmE1MWFlNzg1ZDY2ZmU2ZWNlMWVkMjVhMjI5MjdhMjYzMDJjMjJhN2IzNGRhYyIsInBlcmNvbmEuY29tL2NvbmZpZ3VyYXRpb24taGFzaCI6ImQ0MWQ4Y2Q5OGYwMGIy"..., +  "percona.com/last-config-hash": "eyJyZXBsaWNhcyI6Mywic2VsZWN0b3IiOnsibWF0Y2hMYWJlbHMiOnsiYXBwLmt1YmVybmV0ZXMuaW8vY29tcG9uZW50IjoicHhjIiwiYXBwLmt1YmVybmV0ZXMuaW8vaW5zdGFuY2UiOiJzb21lLW5hbWUiLCJhcHAua3ViZXJuZXRlcy5pby9tYW5hZ2VkLWJ5IjoicGVyY29uYS14dHJhZGItY2x1c3Rlci1vcGVyYXRvciIsImFwcC5rdWJlcm5ldGVzLmlvL25hbWUiOiJwZXJjb25hLXh0cmFkYi1jbHVzdGVyIiwiYXBwLmt1YmVybmV0ZXMuaW8vcGFydC1vZiI6InBlcmNvbmEteHRyYWRiLWNsdXN0ZXIifX0sInRlbXBsYXRlIjp7Im1ldGFkYXRhIjp7ImxhYmVscyI6eyJhcHAua3ViZXJuZXRlcy5pby9jb21wb25lbnQiOiJweGMiLCJhcHAua3ViZXJuZXRlcy5pby9pbnN0YW5jZSI6InNvbWUtbmFtZSIsImFwcC5rdWJlcm5ldGVzLmlvL21hbmFnZWQtYnkiOiJwZXJjb25hLXh0cmFkYi1jbHVzdGVyLW9wZXJhdG9yIiwiYXBwLmt1YmVybmV0ZXMuaW8vbmFtZSI6InBlcmNvbmEteHRyYWRiLWNsdXN0ZXIiLCJhcHAua3ViZXJuZXRlcy5pby9wYXJ0LW9mIjoicGVyY29uYS14dHJhZGItY2x1c3RlciJ9LCJhbm5vdGF0aW9ucyI6eyJrdWJlY3RsLmt1YmVybmV0ZXMuaW8vZGVmYXVsdC1jb250YWluZXIiOiJweGMiLCJsYXN0LWFwcGxpZWQtc2VjcmV0IjoiNzRjZmY0MDAxNDM3ZmU2YTM0MWMyZTM5MGZhODE5NmFmNTNiNzc4ZWFiODMwMGU4OTExZWVkNmJkNGNiZWEyZiIsInBlcmNvbmEuY29tL2NvbmZpZ3VyYXRpb24taGFzaCI6ImQ0MWQ4Y2Q5OGYwMGIy"..., -  "percona.com/last-config-hash": "eyJyZXBsaWNhcyI6Mywic2VsZWN0b3IiOnsibWF0Y2hMYWJlbHMiOnsiYXBwLmt1YmVybmV0ZXMuaW8vY29tcG9uZW50IjoicHhjIiwiYXBwLmt1YmVybmV0ZXMuaW8vaW5zdGFuY2UiOiJzb21lLW5hbWUiLCJhcHAua3ViZXJuZXRlcy5pby9tYW5hZ2VkLWJ5IjoicGVyY29uYS14dHJhZGItY2x1c3Rlci1vcGVyYXRvciIsImFwcC5rdWJlcm5ldGVzLmlvL25hbWUiOiJwZXJjb25hLXh0cmFkYi1jbHVzdGVyIiwiYXBwLmt1YmVybmV0ZXMuaW8vcGFydC1vZiI6InBlcmNvbmEteHRyYWRiLWNsdXN0ZXIifX0sInRlbXBsYXRlIjp7Im1ldGFkYXRhIjp7ImxhYmVscyI6eyJhcHAua3ViZXJuZXRlcy5pby9jb21wb25lbnQiOiJweGMiLCJhcHAua3ViZXJuZXRlcy5pby9pbnN0YW5jZSI6InNvbWUtbmFtZSIsImFwcC5rdWJlcm5ldGVzLmlvL21hbmFnZWQtYnkiOiJwZXJjb25hLXh0cmFkYi1jbHVzdGVyLW9wZXJhdG9yIiwiYXBwLmt1YmVybmV0ZXMuaW8vbmFtZSI6InBlcmNvbmEteHRyYWRiLWNsdXN0ZXIiLCJhcHAua3ViZXJuZXRlcy5pby9wYXJ0LW9mIjoicGVyY29uYS14dHJhZGItY2x1c3RlciJ9LCJhbm5vdGF0aW9ucyI6eyJrdWJlY3RsLmt1YmVybmV0ZXMuaW8vZGVmYXVsdC1jb250YWluZXIiOiJweGMiLCJsYXN0LWFwcGxpZWQtc2VjcmV0IjoiYTNlZTdlZDFlMzhkMDNiN2U2OGNiMDEyZWQwNzBmZWRkMjExYTAyOTJhYzkyMDgxNjVlMzNhYmQ0NWE0ZjIyZiIsInBlcmNvbmEuY29tL2NvbmZpZ3VyYXRpb24taGFzaCI6ImQ0MWQ4Y2Q5OGYwMGIy"..., +  "percona.com/last-config-hash": "eyJyZXBsaWNhcyI6Mywic2VsZWN0b3IiOnsibWF0Y2hMYWJlbHMiOnsiYXBwLmt1YmVybmV0ZXMuaW8vY29tcG9uZW50IjoicHhjIiwiYXBwLmt1YmVybmV0ZXMuaW8vaW5zdGFuY2UiOiJzb21lLW5hbWUiLCJhcHAua3ViZXJuZXRlcy5pby9tYW5hZ2VkLWJ5IjoicGVyY29uYS14dHJhZGItY2x1c3Rlci1vcGVyYXRvciIsImFwcC5rdWJlcm5ldGVzLmlvL25hbWUiOiJwZXJjb25hLXh0cmFkYi1jbHVzdGVyIiwiYXBwLmt1YmVybmV0ZXMuaW8vcGFydC1vZiI6InBlcmNvbmEteHRyYWRiLWNsdXN0ZXIifX0sInRlbXBsYXRlIjp7Im1ldGFkYXRhIjp7ImxhYmVscyI6eyJhcHAua3ViZXJuZXRlcy5pby9jb21wb25lbnQiOiJweGMiLCJhcHAua3ViZXJuZXRlcy5pby9pbnN0YW5jZSI6InNvbWUtbmFtZSIsImFwcC5rdWJlcm5ldGVzLmlvL21hbmFnZWQtYnkiOiJwZXJjb25hLXh0cmFkYi1jbHVzdGVyLW9wZXJhdG9yIiwiYXBwLmt1YmVybmV0ZXMuaW8vbmFtZSI6InBlcmNvbmEteHRyYWRiLWNsdXN0ZXIiLCJhcHAua3ViZXJuZXRlcy5pby9wYXJ0LW9mIjoicGVyY29uYS14dHJhZGItY2x1c3RlciJ9LCJhbm5vdGF0aW9ucyI6eyJrdWJlY3RsLmt1YmVybmV0ZXMuaW8vZGVmYXVsdC1jb250YWluZXIiOiJweGMiLCJsYXN0LWFwcGxpZWQtc2VjcmV0IjoiYTNlZTdlZDFlMzhkMDNiN2U2OGNiMDEyZWQwNzBmZWRkMjExYTAyOTJhYzkyMDgxNjVlMzNhYmQ0NWE0ZjIyZiIsInBlcmNvbmEuY29tL2NvbmZpZ3VyYXRpb24taGFzaCI6ImQ0MWQ4Y2Q5OGYwMGIy"..., -  "percona.com/last-config-hash": "eyJyZXBsaWNhcyI6Mywic2VsZWN0b3IiOnsibWF0Y2hMYWJlbHMiOnsiYXBwLmt1YmVybmV0ZXMuaW8vY29tcG9uZW50IjoicHhjIiwiYXBwLmt1YmVybmV0ZXMuaW8vaW5zdGFuY2UiOiJzb21lLW5hbWUiLCJhcHAua3ViZXJuZXRlcy5pby9tYW5hZ2VkLWJ5IjoicGVyY29uYS14dHJhZGItY2x1c3Rlci1vcGVyYXRvciIsImFwcC5rdWJlcm5ldGVzLmlvL25hbWUiOiJwZXJjb25hLXh0cmFkYi1jbHVzdGVyIiwiYXBwLmt1YmVybmV0ZXMuaW8vcGFydC1vZiI6InBlcmNvbmEteHRyYWRiLWNsdXN0ZXIifX0sInRlbXBsYXRlIjp7Im1ldGFkYXRhIjp7ImxhYmVscyI6eyJhcHAua3ViZXJuZXRlcy5pby9jb21wb25lbnQiOiJweGMiLCJhcHAua3ViZXJuZXRlcy5pby9pbnN0YW5jZSI6InNvbWUtbmFtZSIsImFwcC5rdWJlcm5ldGVzLmlvL21hbmFnZWQtYnkiOiJwZXJjb25hLXh0cmFkYi1jbHVzdGVyLW9wZXJhdG9yIiwiYXBwLmt1YmVybmV0ZXMuaW8vbmFtZSI6InBlcmNvbmEteHRyYWRiLWNsdXN0ZXIiLCJhcHAua3ViZXJuZXRlcy5pby9wYXJ0LW9mIjoicGVyY29uYS14dHJhZGItY2x1c3RlciJ9LCJhbm5vdGF0aW9ucyI6eyJrdWJlY3RsLmt1YmVybmV0ZXMuaW8vZGVmYXVsdC1jb250YWluZXIiOiJweGMiLCJsYXN0LWFwcGxpZWQtc2VjcmV0IjoiZGFjNmI2NmM1NzQxNTlhN2E4MDlhMDA2YTUyMThhMjAyOTM5OWMzMGM0NTg4OGU0MzhjNzI2YmE1OTgzNjY4NiIsInBlcmNvbmEuY29tL2NvbmZpZ3VyYXRpb24taGFzaCI6ImQ0MWQ4Y2Q5OGYwMGIy"..., +  "percona.com/last-config-hash": "eyJyZXBsaWNhcyI6Mywic2VsZWN0b3IiOnsibWF0Y2hMYWJlbHMiOnsiYXBwLmt1YmVybmV0ZXMuaW8vY29tcG9uZW50IjoicHhjIiwiYXBwLmt1YmVybmV0ZXMuaW8vaW5zdGFuY2UiOiJzb21lLW5hbWUiLCJhcHAua3ViZXJuZXRlcy5pby9tYW5hZ2VkLWJ5IjoicGVyY29uYS14dHJhZGItY2x1c3Rlci1vcGVyYXRvciIsImFwcC5rdWJlcm5ldGVzLmlvL25hbWUiOiJwZXJjb25hLXh0cmFkYi1jbHVzdGVyIiwiYXBwLmt1YmVybmV0ZXMuaW8vcGFydC1vZiI6InBlcmNvbmEteHRyYWRiLWNsdXN0ZXIifX0sInRlbXBsYXRlIjp7Im1ldGFkYXRhIjp7ImxhYmVscyI6eyJhcHAua3ViZXJuZXRlcy5pby9jb21wb25lbnQiOiJweGMiLCJhcHAua3ViZXJuZXRlcy5pby9pbnN0YW5jZSI6InNvbWUtbmFtZSIsImFwcC5rdWJlcm5ldGVzLmlvL21hbmFnZWQtYnkiOiJwZXJjb25hLXh0cmFkYi1jbHVzdGVyLW9wZXJhdG9yIiwiYXBwLmt1YmVybmV0ZXMuaW8vbmFtZSI6InBlcmNvbmEteHRyYWRiLWNsdXN0ZXIiLCJhcHAua3ViZXJuZXRlcy5pby9wYXJ0LW9mIjoicGVyY29uYS14dHJhZGItY2x1c3RlciJ9LCJhbm5vdGF0aW9ucyI6eyJrdWJlY3RsLmt1YmVybmV0ZXMuaW8vZGVmYXVsdC1jb250YWluZXIiOiJweGMiLCJsYXN0LWFwcGxpZWQtc2VjcmV0IjoiZGFjNmI2NmM1NzQxNTlhN2E4MDlhMDA2YTUyMThhMjAyOTM5OWMzMGM0NTg4OGU0MzhjNzI2YmE1OTgzNjY4NiIsInBlcmNvbmEuY29tL2NvbmZpZ3VyYXRpb24taGFzaCI6ImQ0MWQ4Y2Q5OGYwMGIy"..., -  "percona.com/last-config-hash": "eyJyZXBsaWNhcyI6Mywic2VsZWN0b3IiOnsibWF0Y2hMYWJlbHMiOnsiYXBwLmt1YmVybmV0ZXMuaW8vY29tcG9uZW50IjoicHhjIiwiYXBwLmt1YmVybmV0ZXMuaW8vaW5zdGFuY2UiOiJzb21lLW5hbWUiLCJhcHAua3ViZXJuZXRlcy5pby9tYW5hZ2VkLWJ5IjoicGVyY29uYS14dHJhZGItY2x1c3Rlci1vcGVyYXRvciIsImFwcC5rdWJlcm5ldGVzLmlvL25hbWUiOiJwZXJjb25hLXh0cmFkYi1jbHVzdGVyIiwiYXBwLmt1YmVybmV0ZXMuaW8vcGFydC1vZiI6InBlcmNvbmEteHRyYWRiLWNsdXN0ZXIifX0sInRlbXBsYXRlIjp7Im1ldGFkYXRhIjp7ImxhYmVscyI6eyJhcHAua3ViZXJuZXRlcy5pby9jb21wb25lbnQiOiJweGMiLCJhcHAua3ViZXJuZXRlcy5pby9pbnN0YW5jZSI6InNvbWUtbmFtZSIsImFwcC5rdWJlcm5ldGVzLmlvL21hbmFnZWQtYnkiOiJwZXJjb25hLXh0cmFkYi1jbHVzdGVyLW9wZXJhdG9yIiwiYXBwLmt1YmVybmV0ZXMuaW8vbmFtZSI6InBlcmNvbmEteHRyYWRiLWNsdXN0ZXIiLCJhcHAua3ViZXJuZXRlcy5pby9wYXJ0LW9mIjoicGVyY29uYS14dHJhZGItY2x1c3RlciJ9LCJhbm5vdGF0aW9ucyI6eyJrdWJlY3RsLmt1YmVybmV0ZXMuaW8vZGVmYXVsdC1jb250YWluZXIiOiJweGMiLCJsYXN0LWFwcGxpZWQtc2VjcmV0IjoiZGFjNmI2NmM1NzQxNTlhN2E4MDlhMDA2YTUyMThhMjAyOTM5OWMzMGM0NTg4OGU0MzhjNzI2YmE1OTgzNjY4NiIsInBlcmNvbmEuY29tL2NvbmZpZ3VyYXRpb24taGFzaCI6ImQ0MWQ4Y2Q5OGYwMGIyMDRlOTgwMDk5OGVjZjg0MjdlIiwicGVyY29uYS5jb20vc3NsLWhhc2giOiJjZmQ2YTUyMzk4MjY4MTczYjUxZDdjZGIxMzMxYzA5YSIsInBlcmNvbmEuY29tL3NzbC1pbnRlcm5hbC1oYXNoIjoiNzUxZTU5YTFjMWQ5NGFjNjdmOWQ2YmI2ZTc1NmFjZGMifX0sInNwZWMiOnsidm9sdW1lcyI6W3sibmFtZSI6InRtcCIsImVtcHR5RGlyIjp7fX0seyJuYW1lIjoiY29uZmlnIiwiY29uZmlnTWFwIjp7Im5hbWUiOiJzb21lLW5hbWUtcHhjIiwib3B0aW9uYWwiOnRydWV9fSx7Im5hbWUiOiJzc2wtaW50ZXJuYWwiLCJzZWNyZXQiOnsic2VjcmV0TmFtZSI6InNvbWUtbmFtZS1zc2wtaW50ZXJuYWwiLCJvcHRpb25hbCI6dHJ1ZX19LHsibmFtZSI6InNzbCIsInNlY3JldCI6eyJzZWNyZXROYW1lIjoic29tZS1uYW1lLXNzbCIsIm9wdGlvbmFsIjpmYWxzZX19LHsibmFtZSI6ImF1dG8tY29uZmlnIiwiY29uZmlnTWFwIjp7Im5hbWUiOiJhdXRvLXNvbWUtbmFtZS1weGMiLCJvcHRpb25hbCI6dHJ1ZX19LHsibmFtZSI6InZhdWx0LWtleXJpbmctc2VjcmV0Iiwic2VjcmV0Ijp7InNlY3JldE5hbWUiOiJzb21lLW5hbWUtdmF1bHQiLCJvcHRpb25hbCI6dHJ1ZX19LHsibmFtZSI6Im15c3FsLXVzZXJzLXNlY3JldC1maWxlIiwic2VjcmV0Ijp7InNlY3JldE5hbWUiOiJpbnRlcm5hbC1zb21lLW5hbWUiLCJvcHRpb25hbCI6ZmFsc2V9fSx7Im5hbWUiOiJteXNxbC1pbml0LWZpbGUiLCJzZWNyZXQiOnsic2VjcmV0TmFtZSI6InNvbWUtbmFtZS1teXNxbC1pbml0Iiwib3B0aW9uYWwiOnRydWV9fV0sImluaXRDb250YWluZXJzIjpbeyJuYW1lIjoicHhjLWluaXQiLCJpbWFnZSI6InBlcmNvbmFsYWIvcGVyY29uYS14dHJhZGItY2x1c3Rlci1vcGVyYXRvcjpQUi0yMTI0LWY0OGNjMzBiIiwiY29tbWFuZCI6WyIvcHhjLWluaXQtZW50cnlwb2ludC5zaCJdLCJyZXNvdXJjZXMiOnsibGltaXRzIjp7ImNwdSI6IjUwbSIsIm1lbW9yeSI6IjUwTSJ9fSwidm9sdW1lTW91bnRzIjpbeyJuYW1lIjoiZGF0YWRpciIsIm1vdW50UGF0aCI6Ii92YXIvbGliL215c3FsIn1dLCJpbWFnZVB1bGxQb2xpY3kiOiJBbHdheXMifV0sImNvbnRhaW5lcnMiOlt7Im5hbWUiOiJsb2dzIiwiaW1hZ2UiOiJwZXJjb25hbGFiL3BlcmNvbmEteHRyYWRiLWNsdXN0ZXItb3BlcmF0b3I6bWFpbi1sb2djb2xsZWN0b3IiLCJlbnZGcm9tIjpbeyJzZWNyZXRSZWYiOnsibmFtZSI6InNvbWUtbmFtZS1sb2ctY29sbGVjdG9yIiwib3B0aW9uYWwiOnRydWV9fV0sImVudiI6W3sibmFtZSI6IkxPR19EQVRBX0RJUiIsInZhbHVlIjoiL3Zhci9saWIvbXlzcWwifSx7Im5hbWUiOiJQT0RfTkFNRVNQQVNFIiwidmFsdWVGcm9tIjp7ImZpZWxkUmVmIjp7ImZpZWxkUGF0aCI6Im1ldGFkYXRhLm5hbWVzcGFjZSJ9fX0seyJuYW1lIjoiUE9EX05BTUUiLCJ2YWx1ZUZyb20iOnsiZmllbGRSZWYiOnsiZmllbGRQYXRoIjoibWV0YWRhdGEubmFtZSJ9fX1dLCJyZXNvdXJjZXMiOnt9LCJ2b2x1bWVNb3VudHMiOlt7Im5hbWUiOiJkYXRhZGlyIiwibW91bnRQYXRoIjoiL3Zhci9saWIvbXlzcWwifV0sImltYWdlUHVsbFBvbGljeSI6IkFsd2F5cyJ9LHsibmFtZSI6ImxvZ3JvdGF0ZSIsImltYWdlIjoicGVyY29uYWxhYi9wZXJjb25hLXh0cmFkYi1jbHVzdGVyLW9wZXJhdG9yOm1haW4tbG9nY29sbGVjdG9yIiwiYXJncyI6WyJsb2dyb3RhdGUiXSwiZW52IjpbeyJuYW1lIjoiU0VSVklDRV9UWVBFIiwidmFsdWUiOiJteXNxbCJ9LHsibmFtZSI6Ik1PTklUT1JfUEFTU1dPUkQiLCJ2YWx1ZUZyb20iOnsic2VjcmV0S2V5UmVmIjp7Im5hbWUiOiJpbnRlcm5hbC1zb21lLW5hbWUiLCJrZXkiOiJtb25pdG9yIn19fV0sInJlc291cmNlcyI6e30sInZvbHVtZU1vdW50cyI6W3sibmFtZSI6ImRhdGFkaXIiLCJtb3VudFBhdGgiOiIvdmFyL2xpYi9teXNxbCJ9XSwiaW1hZ2VQdWxsUG9saWN5IjoiQWx3YXlzIn0seyJuYW1lIjoicHhjIiwiaW1hZ2UiOiJwZXJjb25hbGFiL3BlcmNvbmEteHRyYWRiLWNsdXN0ZXItb3BlcmF0b3I6bWFpbi1weGM4LjAiLCJjb21tYW5kIjpbIi92YXIvbGliL215c3FsL3B4Yy1lbnRyeXBvaW50LnNoIl0sImFyZ3MiOlsibXlzcWxkIl0sInBvcnRzIjpbeyJuYW1lIjoibXlzcWwiLCJjb250YWluZXJQb3J0IjozMzA2fSx7Im5hbWUiOiJzc3QiLCJjb250YWluZXJQb3J0Ijo0NDQ0fSx7Im5hbWUiOiJ3cml0ZS1zZXQiLCJjb250YWluZXJQb3J0Ijo0NTY3fSx7Im5hbWUiOiJpc3QiLCJjb250YWluZXJQb3J0Ijo0NTY4fSx7Im5hbWUiOiJteXNxbC1hZG1pbiIsImNvbnRhaW5lclBvcnQiOjMzMDYyfSx7Im5h"..., +  "percona.com/last-config-hash": "eyJyZXBsaWNhcyI6Mywic2VsZWN0b3IiOnsibWF0Y2hMYWJlbHMiOnsiYXBwLmt1YmVybmV0ZXMuaW8vY29tcG9uZW50IjoicHhjIiwiYXBwLmt1YmVybmV0ZXMuaW8vaW5zdGFuY2UiOiJzb21lLW5hbWUiLCJhcHAua3ViZXJuZXRlcy5pby9tYW5hZ2VkLWJ5IjoicGVyY29uYS14dHJhZGItY2x1c3Rlci1vcGVyYXRvciIsImFwcC5rdWJlcm5ldGVzLmlvL25hbWUiOiJwZXJjb25hLXh0cmFkYi1jbHVzdGVyIiwiYXBwLmt1YmVybmV0ZXMuaW8vcGFydC1vZiI6InBlcmNvbmEteHRyYWRiLWNsdXN0ZXIifX0sInRlbXBsYXRlIjp7Im1ldGFkYXRhIjp7ImxhYmVscyI6eyJhcHAua3ViZXJuZXRlcy5pby9jb21wb25lbnQiOiJweGMiLCJhcHAua3ViZXJuZXRlcy5pby9pbnN0YW5jZSI6InNvbWUtbmFtZSIsImFwcC5rdWJlcm5ldGVzLmlvL21hbmFnZWQtYnkiOiJwZXJjb25hLXh0cmFkYi1jbHVzdGVyLW9wZXJhdG9yIiwiYXBwLmt1YmVybmV0ZXMuaW8vbmFtZSI6InBlcmNvbmEteHRyYWRiLWNsdXN0ZXIiLCJhcHAua3ViZXJuZXRlcy5pby9wYXJ0LW9mIjoicGVyY29uYS14dHJhZGItY2x1c3RlciJ9LCJhbm5vdGF0aW9ucyI6eyJrdWJlY3RsLmt1YmVybmV0ZXMuaW8vZGVmYXVsdC1jb250YWluZXIiOiJweGMiLCJsYXN0LWFwcGxpZWQtc2VjcmV0IjoiZGFjNmI2NmM1NzQxNTlhN2E4MDlhMDA2YTUyMThhMjAyOTM5OWMzMGM0NTg4OGU0MzhjNzI2YmE1OTgzNjY4NiIsInBlcmNvbmEuY29tL2NvbmZpZ3VyYXRpb24taGFzaCI6ImQ0MWQ4Y2Q5OGYwMGIyMDRlOTgwMDk5OGVjZjg0MjdlIiwicGVyY29uYS5jb20vc3NsLWhhc2giOiJjZmQ2YTUyMzk4MjY4MTczYjUxZDdjZGIxMzMxYzA5YSIsInBlcmNvbmEuY29tL3NzbC1pbnRlcm5hbC1oYXNoIjoiNzUxZTU5YTFjMWQ5NGFjNjdmOWQ2YmI2ZTc1NmFjZGMifX0sInNwZWMiOnsidm9sdW1lcyI6W3sibmFtZSI6InRtcCIsImVtcHR5RGlyIjp7fX0seyJuYW1lIjoiY29uZmlnIiwiY29uZmlnTWFwIjp7Im5hbWUiOiJzb21lLW5hbWUtcHhjIiwib3B0aW9uYWwiOnRydWV9fSx7Im5hbWUiOiJzc2wtaW50ZXJuYWwiLCJzZWNyZXQiOnsic2VjcmV0TmFtZSI6InNvbWUtbmFtZS1zc2wtaW50ZXJuYWwiLCJvcHRpb25hbCI6dHJ1ZX19LHsibmFtZSI6InNzbCIsInNlY3JldCI6eyJzZWNyZXROYW1lIjoic29tZS1uYW1lLXNzbCIsIm9wdGlvbmFsIjpmYWxzZX19LHsibmFtZSI6ImF1dG8tY29uZmlnIiwiY29uZmlnTWFwIjp7Im5hbWUiOiJhdXRvLXNvbWUtbmFtZS1weGMiLCJvcHRpb25hbCI6dHJ1ZX19LHsibmFtZSI6InZhdWx0LWtleXJpbmctc2VjcmV0Iiwic2VjcmV0Ijp7InNlY3JldE5hbWUiOiJzb21lLW5hbWUtdmF1bHQiLCJvcHRpb25hbCI6dHJ1ZX19LHsibmFtZSI6Im15c3FsLXVzZXJzLXNlY3JldC1maWxlIiwic2VjcmV0Ijp7InNlY3JldE5hbWUiOiJpbnRlcm5hbC1zb21lLW5hbWUiLCJvcHRpb25hbCI6ZmFsc2V9fSx7Im5hbWUiOiJteXNxbC1pbml0LWZpbGUiLCJzZWNyZXQiOnsic2VjcmV0TmFtZSI6InNvbWUtbmFtZS1teXNxbC1pbml0Iiwib3B0aW9uYWwiOnRydWV9fV0sImluaXRDb250YWluZXJzIjpbeyJuYW1lIjoicHhjLWluaXQiLCJpbWFnZSI6InBlcmNvbmFsYWIvcGVyY29uYS14dHJhZGItY2x1c3Rlci1vcGVyYXRvcjpQUi0yMTI0LWY0OGNjMzBiIiwiY29tbWFuZCI6WyIvcHhjLWluaXQtZW50cnlwb2ludC5zaCJdLCJyZXNvdXJjZXMiOnsibGltaXRzIjp7ImNwdSI6IjUwbSIsIm1lbW9yeSI6IjUwTSJ9fSwidm9sdW1lTW91bnRzIjpbeyJuYW1lIjoiZGF0YWRpciIsIm1vdW50UGF0aCI6Ii92YXIvbGliL215c3FsIn1dLCJpbWFnZVB1bGxQb2xpY3kiOiJBbHdheXMifV0sImNvbnRhaW5lcnMiOlt7Im5hbWUiOiJweGMiLCJpbWFnZSI6InBlcmNvbmFsYWIvcGVyY29uYS14dHJhZGItY2x1c3Rlci1vcGVyYXRvcjptYWluLXB4YzguMCIsImNvbW1hbmQiOlsiL3Zhci9saWIvbXlzcWwvcHhjLWVudHJ5cG9pbnQuc2giXSwiYXJncyI6WyJteXNxbGQiXSwicG9ydHMiOlt7Im5hbWUiOiJteXNxbCIsImNvbnRhaW5lclBvcnQiOjMzMDZ9LHsibmFtZSI6InNzdCIsImNvbnRhaW5lclBvcnQiOjQ0NDR9LHsibmFtZSI6IndyaXRlLXNldCIsImNvbnRhaW5lclBvcnQiOjQ1Njd9LHsibmFtZSI6ImlzdCIsImNvbnRhaW5lclBvcnQiOjQ1Njh9LHsibmFtZSI6Im15c3FsLWFkbWluIiwiY29udGFpbmVyUG9ydCI6MzMwNjJ9LHsibmFtZSI6Im15c3FseCIsImNvbnRhaW5lclBvcnQiOjMzMDYwfV0sImVudkZyb20iOlt7InNlY3JldFJlZiI6eyJuYW1lIjoic29tZS1uYW1lLWVudi12YXJzLXB4YyIsIm9wdGlvbmFsIjp0cnVlfX1dLCJlbnYiOlt7Im5hbWUiOiJQWENfU0VSVklDRSIsInZhbHVlIjoic29tZS1uYW1lLXB4Yy11bnJlYWR5In0seyJuYW1lIjoiTU9OSVRPUl9IT1NUIiwidmFsdWUiOiIlIn0seyJuYW1lIjoiTVlTUUxfUk9PVF9QQVNTV09SRCIsInZhbHVlRnJvbSI6eyJzZWNyZXRLZXlSZWYiOnsibmFtZSI6ImludGVybmFsLXNvbWUtbmFtZSIsImtleSI6InJvb3QifX19LHsibmFtZSI6IlhUUkFCQUNLVVBfUEFTU1dPUkQiLCJ2YWx1ZUZyb20iOnsic2VjcmV0S2V5UmVmIjp7Im5hbWUiOiJpbnRlcm5hbC1zb21lLW5hbWUiLCJrZXkiOiJ4dHJhYmFja3VwIn19fSx7Im5hbWUiOiJNT05JVE9SX1BBU1NXT1JEIiwidmFsdWVGcm9tIjp7InNlY3JldEtleVJlZiI6eyJuYW1lIjoiaW50ZXJuYWwtc29tZS1uYW1lIiwia2V5IjoibW9uaXRvciJ9fX0seyJuYW1lIjoiQ0xVU1RFUl9IQVNIIiwidmFsdWUiOiIxMDg1MTQ1In0seyJuYW1lIjoiT1BFUkFUT1JfQURNSU5fUEFTU1dPUkQiLCJ2YWx1ZUZyb20iOnsic2VjcmV0S2V5UmVmIjp7Im5hbWUiOiJpbnRlcm5hbC1zb21lLW5hbWUiLCJrZXkiOiJvcGVyYXRvciJ9fX0seyJuYW1lIjoiTElWRU5FU1NfQ0hFQ0tfVElNRU9VVCIsInZhbHVlIjoiNSJ9LHsibmFtZSI6IlJFQURJTkVTU19DSEVDS19USU1FT1VUIiwidmFsdWUiOiIxNSJ9LHsibmFtZSI6IkRFRkFVTFRfQVVUSEVOVElDQVRJT05fUExVR0lOIiwidmFsdWUiOiJjYWNoaW5nX3NoYTJfcGFzc3dvcmQifSx7Im5hbWUiOiJNWVNR"..., -  "percona.com/last-config-hash": "eyJyZXBsaWNhcyI6Mywic2VsZWN0b3IiOnsibWF0Y2hMYWJlbHMiOnsiYXBwLmt1YmVybmV0ZXMuaW8vY29tcG9uZW50IjoicHhjIiwiYXBwLmt1YmVybmV0ZXMuaW8vaW5zdGFuY2UiOiJzb21lLW5hbWUiLCJhcHAua3ViZXJuZXRlcy5pby9tYW5hZ2VkLWJ5IjoicGVyY29uYS14dHJhZGItY2x1c3Rlci1vcGVyYXRvciIsImFwcC5rdWJlcm5ldGVzLmlvL25hbWUiOiJwZXJjb25hLXh0cmFkYi1jbHVzdGVyIiwiYXBwLmt1YmVybmV0ZXMuaW8vcGFydC1vZiI6InBlcmNvbmEteHRyYWRiLWNsdXN0ZXIifX0sInRlbXBsYXRlIjp7Im1ldGFkYXRhIjp7ImxhYmVscyI6eyJhcHAua3ViZXJuZXRlcy5pby9jb21wb25lbnQiOiJweGMiLCJhcHAua3ViZXJuZXRlcy5pby9pbnN0YW5jZSI6InNvbWUtbmFtZSIsImFwcC5rdWJlcm5ldGVzLmlvL21hbmFnZWQtYnkiOiJwZXJjb25hLXh0cmFkYi1jbHVzdGVyLW9wZXJhdG9yIiwiYXBwLmt1YmVybmV0ZXMuaW8vbmFtZSI6InBlcmNvbmEteHRyYWRiLWNsdXN0ZXIiLCJhcHAua3ViZXJuZXRlcy5pby9wYXJ0LW9mIjoicGVyY29uYS14dHJhZGItY2x1c3RlciJ9LCJhbm5vdGF0aW9ucyI6eyJrdWJlY3RsLmt1YmVybmV0ZXMuaW8vZGVmYXVsdC1jb250YWluZXIiOiJweGMiLCJwZXJjb25hLmNvbS9jb25maWd1cmF0aW9uLWhhc2giOiJkNDFkOGNkOThmMDBiMjA0ZTk4MDA5OThlY2Y4NDI3ZSIsInBlcmNvbmEuY29tL3NzbC1oYXNoIjoiY2ZkNmE1MjM5ODI2ODE3M2I1MWQ3Y2RiMTMzMWMwOWEiLCJwZXJjb25hLmNv"..., -  "percona.com/last-config-hash": "eyJyZXBsaWNhcyI6Mywic2VsZWN0b3IiOnsibWF0Y2hMYWJlbHMiOnsiYXBwLmt1YmVybmV0ZXMuaW8vY29tcG9uZW50IjoicHJveHlzcWwiLCJhcHAua3ViZXJuZXRlcy5pby9pbnN0YW5jZSI6InNvbWUtbmFtZSIsImFwcC5rdWJlcm5ldGVzLmlvL21hbmFnZWQtYnkiOiJwZXJjb25hLXh0cmFkYi1jbHVzdGVyLW9wZXJhdG9yIiwiYXBw"..., +  "percona.com/last-config-hash": "eyJyZXBsaWNhcyI6Mywic2VsZWN0b3IiOnsibWF0Y2hMYWJlbHMiOnsiYXBwLmt1YmVybmV0ZXMuaW8vY29tcG9uZW50IjoicHJveHlzcWwiLCJhcHAua3ViZXJuZXRlcy5pby9pbnN0YW5jZSI6InNvbWUtbmFtZSIsImFwcC5rdWJlcm5ldGVzLmlvL21hbmFnZWQtYnkiOiJwZXJjb25hLXh0cmFkYi1jbHVzdGVyLW9wZXJhdG9yIiwiYXBw"..., +  "percona.com/last-config-hash": "eyJyZXBsaWNhcyI6Mywic2VsZWN0b3IiOnsibWF0Y2hMYWJlbHMiOnsiYXBwLmt1YmVybmV0ZXMuaW8vY29tcG9uZW50IjoicHJveHlzcWwiLCJhcHAua3ViZXJuZXRlcy5pby9pbnN0YW5jZSI6InNvbWUtbmFtZSIsImFwcC5rdWJlcm5ldGVzLmlvL21hbmFnZWQtYnkiOiJwZXJjb25hLXh0cmFkYi1jbHVzdGVyLW9wZXJhdG9yIiwiYXBwLmt1YmVybmV0ZXMuaW8vbmFtZSI6InBlcmNvbmEteHRyYWRiLWNsdXN0ZXIiLCJhcHAua3ViZXJuZXRlcy5pby9wYXJ0LW9mIjoicGVyY29uYS14dHJhZGItY2x1c3RlciJ9fSwidGVtcGxhdGUiOnsibWV0YWRhdGEiOnsibGFiZWxzIjp7ImFwcC5rdWJlcm5ldGVzLmlvL2NvbXBvbmVudCI6InByb3h5c3FsIiwiYXBwLmt1YmVybmV0ZXMuaW8vaW5zdGFuY2UiOiJzb21lLW5hbWUiLCJhcHAua3ViZXJuZXRlcy5pby9tYW5hZ2VkLWJ5IjoicGVyY29uYS14dHJhZGItY2x1c3Rlci1vcGVyYXRvciIsImFwcC5rdWJlcm5ldGVzLmlvL25hbWUiOiJwZXJjb25hLXh0cmFkYi1jbHVzdGVyIiwiYXBwLmt1YmVybmV0ZXMuaW8vcGFydC1vZiI6InBlcmNvbmEteHRyYWRiLWNsdXN0ZXIifSwiYW5ub3RhdGlvbnMiOnsia3ViZWN0bC5rdWJlcm5ldGVzLmlvL2RlZmF1bHQtY29udGFpbmVyIjoicHJveHlzcWwiLCJsYXN0LWFwcGxpZWQtc2VjcmV0IjoiYjhhMDI5ZTY1Yjc5NmY5NmEzMzJjZGI4OGQ5MzA5ODdhMDllMTcyZDU1NzRiNDk1MjdjZmYxMGFmNThlODY5YiIsInBlcmNvbmEuY29tL2NvbmZpZ3VyYXRpb24taGFzaCI6"..., -  "percona.com/last-config-hash": "eyJyZXBsaWNhcyI6Mywic2VsZWN0b3IiOnsibWF0Y2hMYWJlbHMiOnsiYXBwLmt1YmVybmV0ZXMuaW8vY29tcG9uZW50IjoicHJveHlzcWwiLCJhcHAua3ViZXJuZXRlcy5pby9pbnN0YW5jZSI6InNvbWUtbmFtZSIsImFwcC5rdWJlcm5ldGVzLmlvL21hbmFnZWQtYnkiOiJwZXJjb25hLXh0cmFkYi1jbHVzdGVyLW9wZXJhdG9yIiwiYXBwLmt1YmVybmV0ZXMuaW8vbmFtZSI6InBlcmNvbmEteHRyYWRiLWNsdXN0ZXIiLCJhcHAua3ViZXJuZXRlcy5pby9wYXJ0LW9mIjoicGVyY29uYS14dHJhZGItY2x1c3RlciJ9fSwidGVtcGxhdGUiOnsibWV0YWRhdGEiOnsibGFiZWxzIjp7ImFwcC5rdWJlcm5ldGVzLmlvL2NvbXBvbmVudCI6InByb3h5c3FsIiwiYXBwLmt1YmVybmV0ZXMuaW8vaW5zdGFuY2UiOiJzb21lLW5hbWUiLCJhcHAua3ViZXJuZXRlcy5pby9tYW5hZ2VkLWJ5IjoicGVyY29uYS14dHJhZGItY2x1c3Rlci1vcGVyYXRvciIsImFwcC5rdWJlcm5ldGVzLmlvL25hbWUiOiJwZXJjb25hLXh0cmFkYi1jbHVzdGVyIiwiYXBwLmt1YmVybmV0ZXMuaW8vcGFydC1vZiI6InBlcmNvbmEteHRyYWRiLWNsdXN0ZXIifSwiYW5ub3RhdGlvbnMiOnsia3ViZWN0bC5rdWJlcm5ldGVzLmlvL2RlZmF1bHQtY29udGFpbmVyIjoicHJveHlzcWwiLCJwZXJjb25hLmNvbS9jb25maWd1cmF0aW9uLWhhc2giOiJkNDFkOGNkOThmMDBiMjA0ZTk4MDA5OThlY2Y4NDI3ZSIsInBlcmNvbmEuY29tL3NzbC1oYXNoIjoiY2ZkNmE1MjM5ODI2ODE3M2I1MWQ3Y2RiMTMzMWMw"..., +  PeriodSeconds: 0, -  PeriodSeconds: 10, +  PersistentVolumeClaimRetentionPolicy: nil, -  PersistentVolumeClaimRetentionPolicy: s"&StatefulSetPersistentVolumeClaimRetentionPolicy{WhenDeleted:Retain,WhenScaled:Retain,}", +  Phase: "", -  Phase: "Pending", +  PodManagementPolicy: "", -  PodManagementPolicy: "OrderedReady", +  Protocol: "", -  Protocol: "TCP", +  ReadyReplicas: 0, -  ReadyReplicas: 2, -  ReadyReplicas: 3, +  Replicas: 0, -  Replicas: 2, -  Replicas: &2, +  Replicas: &2, -  Replicas: 3, -  Replicas: &3, +  Replicas: &3, +  ResourceVersion: "", -  ResourceVersion: "1764010368937439000", -  ResourceVersion: "1764010546364239007", -  ResourceVersion: "1764010665826863000", -  ResourceVersion: "1764010695094623000", -  ResourceVersion: "1764010713405743000", -  ResourceVersion: "1764010860643215007", -  ResourceVersion: "1764010909432367000", -  ResourceVersion: "1764010991625055000", -  ResourceVersion: "1764011042631327000", -  ResourceVersion: "1764011176680319007", -  ResourceVersion: "1764011204878991000", -  ResourceVersion: "1764011315574975007", -  ResourceVersion: "1764011503112831007", +  RestartPolicy: "", -  RestartPolicy: "Always", -  RevisionHistoryLimit: &10, +  RevisionHistoryLimit: nil, +  SchedulerName: "", -  SchedulerName: "default-scheduler", -  Subresource: "status", +  TerminationMessagePath: "", -  TerminationMessagePath: "/dev/termination-log", +  TerminationMessagePolicy: "", -  TerminationMessagePolicy: "File", -  Time: s"2025-11-24 18:52:09 +0000 UTC", -  Time: s"2025-11-24 18:52:10 +0000 UTC", -  Time: s"2025-11-24 18:52:48 +0000 UTC", -  Time: s"2025-11-24 18:55:46 +0000 UTC", -  Time: s"2025-11-24 18:57:27 +0000 UTC", -  Time: s"2025-11-24 18:57:45 +0000 UTC", -  Time: s"2025-11-24 18:57:52 +0000 UTC", -  Time: s"2025-11-24 18:58:15 +0000 UTC", -  Time: s"2025-11-24 18:58:32 +0000 UTC", -  Time: s"2025-11-24 18:58:33 +0000 UTC", -  Time: s"2025-11-24 19:01:00 +0000 UTC", -  Time: s"2025-11-24 19:01:13 +0000 UTC", -  Time: s"2025-11-24 19:01:49 +0000 UTC", -  Time: s"2025-11-24 19:02:49 +0000 UTC", -  Time: s"2025-11-24 19:03:11 +0000 UTC", -  Time: s"2025-11-24 19:03:42 +0000 UTC", -  Time: s"2025-11-24 19:04:02 +0000 UTC", -  Time: s"2025-11-24 19:06:16 +0000 UTC", -  Time: s"2025-11-24 19:06:24 +0000 UTC", -  Time: s"2025-11-24 19:06:44 +0000 UTC", -  Time: s"2025-11-24 19:08:19 +0000 UTC", -  Time: s"2025-11-24 19:08:35 +0000 UTC", -  Time: s"2025-11-24 19:08:37 +0000 UTC", -  Time: s"2025-11-24 19:11:43 +0000 UTC", -  TopologySpreadConstraints: nil, +  TopologySpreadConstraints: []v1.TopologySpreadConstraint{}, +  UID: "", -  UID: "3ebf9d79-994c-49e7-a70a-989e87bf7e7d", -  UID: "d17dea34-2413-4816-a632-21299f3ac4e0", +  UpdatedReplicas: 0, -  UpdatedReplicas: 1, -  UpdatedReplicas: 2, -  UpdatedReplicas: 3, +  UpdateRevision: "", -  UpdateRevision: "some-name-proxysql-5987694b78", -  UpdateRevision: "some-name-proxysql-59fd65b7dd", -  UpdateRevision: "some-name-proxysql-5dc68449c7", -  UpdateRevision: "some-name-proxysql-66c799db87", -  UpdateRevision: "some-name-proxysql-7d78ff9475", -  UpdateRevision: "some-name-proxysql-f8cc4dcfb", -  UpdateRevision: "some-name-pxc-59c6bdb746", -  UpdateRevision: "some-name-pxc-645869ccf4", -  UpdateRevision: "some-name-pxc-67c46b66ff", -  UpdateRevision: "some-name-pxc-7d4bb5f7d4", -  UpdateRevision: "some-name-pxc-8856f89b9", +  Value: "caching_sha2_password", -  Value: "mysql_native_password", -  VolumeMode: &"Filesystem", +  VolumeMode: nil, -  VolumeMounts: []v1.VolumeMount{{Name: "datadir", MountPath: "/var/lib/mysql"}},   }    },    },    {    },    },    {    },    }, ""),    },    {    },    },    },    ... // 16 identical fields    ... // 16 identical fields    ... // 22 identical fields    "2f",    ... // 2 identical fields    ... // 2 identical fields    ... // 2 identical fields    ... // 3 identical elements    ... // 3 identical fields    ... // 3 identical fields    ... // 3 identical fields    ... // 4 identical fields    ... // 5 identical fields    ... // 5 identical fields    ... // 5 identical fields    ... // 6 identical fields    ... // 6 identical fields    ... // 7 identical fields    ... // 8 identical fields    ... // 9 identical fields    ... // 9 identical fields    AccessModes: nil,    ActiveDeadlineSeconds: nil,    Affinity: &{PodAntiAffinity: &{RequiredDuringSchedulingIgnoredDuringExecution: {{LabelSelector: &{MatchLabels: {"app.kubernetes.io/component": "proxysql", "app.kubernetes.io/instance": "some-name", "app.kubernetes.io/managed-by": "percona-xtradb-cluster-operator", "app.kubernetes.io/name": "percona-xtradb-cluster", ...}}, TopologyKey: "kubernetes.io/hostname"}}}},    Affinity: &{PodAntiAffinity: &{RequiredDuringSchedulingIgnoredDuringExecution: {{LabelSelector: &{MatchLabels: {"app.kubernetes.io/component": "pxc", "app.kubernetes.io/instance": "some-name", "app.kubernetes.io/managed-by": "percona-xtradb-cluster-operator", "app.kubernetes.io/name": "percona-xtradb-cluster", ...}}, TopologyKey: "kubernetes.io/hostname"}}}},    Annotations: map[string]string{    Args: {"mysqld"},    Args: {"proxysql", "-f", "-c", "/etc/proxysql/proxysql.cnf", ...},    AutomountServiceAccountToken: nil,    AWSElasticBlockStore: nil,    AzureFile: nil,    Capacity: nil,    Conditions: nil,    ConfigMapKeyRef: nil,    ConfigMap: &v1.ConfigMapVolumeSource{    ContainerPort: 3306,    ContainerPort: 33060,    ContainerPort: 33062,    ContainerPort: 4444,    ContainerPort: 4567,    ContainerPort: 4568,    ContainerPort: 6032,    ContainerPort: 6070,    Containers: []v1.Container{    DataSource: nil,    DataSourceRef: nil,    DeletionGracePeriodSeconds: nil,    DeletionGracePeriodSeconds: nil,    DeletionTimestamp: nil,    EnvFrom: {{SecretRef: &{LocalObjectReference: {Name: "some-name-env-vars-proxysql"}, Optional: &true}}},    EnvFrom: {{SecretRef: &{LocalObjectReference: {Name: "some-name-env-vars-pxc"}, Optional: &true}}},    EnvFrom: {{SecretRef: &{LocalObjectReference: {Name: "some-name-log-collector"}, Optional: &true}}},    Env: {{Name: "PXC_SERVICE", Value: "some-name-pxc"}, {Name: "OPERATOR_PASSWORD", ValueFrom: &{SecretKeyRef: &{LocalObjectReference: {Name: "internal-some-name"}, Key: "operator"}}}, {Name: "PROXY_ADMIN_USER", Value: "proxyadmin"}, {Name: "PROXY_ADMIN_PASSWORD", ValueFrom: &{SecretKeyRef: &{LocalObjectReference: {Name: "internal-some-name"}, Key: "proxyadmin"}}}, ...},    Env: {{Name: "PXC_SERVICE", Value: "some-name-pxc-unready"}, {Name: "MONITOR_HOST", Value: "%"}, {Name: "MYSQL_ROOT_PASSWORD", ValueFrom: &{SecretKeyRef: &{LocalObjectReference: {Name: "internal-some-name"}, Key: "root"}}}, {Name: "XTRABACKUP_PASSWORD", ValueFrom: &{SecretKeyRef: &{LocalObjectReference: {Name: "internal-some-name"}, Key: "xtrabackup"}}}, ...},    Env: []v1.EnvVar{    EphemeralContainers: nil,    FailureThreshold: 3,    FC: nil,    FieldPath: "metadata.name",    FieldPath: "metadata.namespace",    FieldRef: &v1.ObjectFieldSelector{    Finalizers: nil,    Finalizers: nil,    GitRepo: nil,    HostAliases: nil,    HostIP: "",    HostPort: 0,    ImagePullPolicy: "Always",    InitContainers: []v1.Container{    InitialDelaySeconds: 300,    ISCSI: nil,    Items: nil,    Items: nil,    "kubectl.kubernetes.io/default-container": "proxysql",    "kubectl.kubernetes.io/default-container": "pxc",    Labels: {"app.kubernetes.io/component": "proxysql", "app.kubernetes.io/instance": "some-name", "app.kubernetes.io/managed-by": "percona-xtradb-cluster-operator", "app.kubernetes.io/name": "percona-xtradb-cluster", ...},    Labels: {"app.kubernetes.io/component": "pxc", "app.kubernetes.io/instance": "some-name", "app.kubernetes.io/managed-by": "percona-xtradb-cluster-operator", "app.kubernetes.io/name": "percona-xtradb-cluster", ...},    Labels: nil,    "last-applied-secret": strings.Join({    Lifecycle: nil,    LivenessProbe: &v1.Probe{    LocalObjectReference: {Name: "auto-some-name-pxc"},    LocalObjectReference: {Name: "some-name-pxc"},    ManagedFields: nil,    MinReadySeconds: 0,    Name: "auto-config",    {Name: "bin", VolumeSource: {EmptyDir: &{}}},    {Name: "CLUSTER_HASH", Value: "1085145"},    Name: "config",    Name: "DEFAULT_AUTHENTICATION_PLUGIN",    Name: "ist",    {Name: "LIVENESS_CHECK_TIMEOUT", Value: "5"},    {Name: "LOG_DATA_DIR", Value: "/var/lib/mysql"},    {Name: "MONITOR_PASSWORD", ValueFrom: &{SecretKeyRef: &{LocalObjectReference: {Name: "internal-some-name"}, Key: "monitor"}}},    Name: "mysql",    Name: "mysql-admin",    Name: "mysql-init-file",    {Name: "MYSQL_NOTIFY_SOCKET", Value: "/var/lib/mysql/notify.sock"},    {Name: "MYSQL_STATE_FILE", Value: "/var/lib/mysql/mysql.state"},    Name: "mysql-users-secret-file",    Name: "mysqlx",    {Name: "OPERATOR_ADMIN_PASSWORD", ValueFrom: &{SecretKeyRef: &{LocalObjectReference: {Name: "internal-some-name"}, Key: "operator"}}},    Name: "POD_NAME",    Name: "POD_NAMESPASE",    Name: "proxyadm",    {Name: "READINESS_CHECK_TIMEOUT", Value: "15"},    Namespace: "users-28990",    Name: "ssl",    Name: "ssl-internal",    Name: "sst",    Name: "stats",    {Name: "tmp", VolumeSource: {EmptyDir: &{}}},    Name: "vault-keyring-secret",    Name: "write-set",    {Name: "XTRABACKUP_PASSWORD", ValueFrom: &{SecretKeyRef: &{LocalObjectReference: {Name: "internal-some-name"}, Key: "xtrabackup"}}},    NFS: nil,    NodeName: "",    NodeSelector: nil,    ObjectMeta: {Labels: {"app.kubernetes.io/component": "proxysql", "app.kubernetes.io/instance": "some-name", "app.kubernetes.io/managed-by": "percona-xtradb-cluster-operator", "app.kubernetes.io/name": "percona-xtradb-cluster", ...}, Annotations: {"kubectl.kubernetes.io/default-container": "proxysql", "last-applied-secret": "b8a029e65b796f96a332cdb88d930987a09e172d5574b49527cff10af58e869b", "percona.com/configuration-hash": "d41d8cd98f00b204e9800998ecf8427e", "percona.com/ssl-hash": "cfd6a52398268173b51d7cdb1331c09a", ...}},    ObjectMeta: {Labels: {"app.kubernetes.io/component": "proxysql", "app.kubernetes.io/instance": "some-name", "app.kubernetes.io/managed-by": "percona-xtradb-cluster-operator", "app.kubernetes.io/name": "percona-xtradb-cluster", ...}, Annotations: {"kubectl.kubernetes.io/default-container": "proxysql", "percona.com/configuration-hash": "d41d8cd98f00b204e9800998ecf8427e", "percona.com/ssl-hash": "cfd6a52398268173b51d7cdb1331c09a", "percona.com/ssl-internal-hash": "751e59a1c1d94ac67f9d6bb6e756acdc"}},    ObjectMeta: {Labels: {"app.kubernetes.io/component": "pxc", "app.kubernetes.io/instance": "some-name", "app.kubernetes.io/managed-by": "percona-xtradb-cluster-operator", "app.kubernetes.io/name": "percona-xtradb-cluster", ...}, Annotations: {"kubectl.kubernetes.io/default-container": "pxc", "last-applied-secret": "dac6b66c574159a7a809a006a5218a2029399c30c45888e438c726ba59836686", "percona.com/configuration-hash": "d41d8cd98f00b204e9800998ecf8427e", "percona.com/ssl-hash": "cfd6a52398268173b51d7cdb1331c09a", ...}},    ObjectMeta: {Name: "datadir", Labels: {"app.kubernetes.io/component": "pxc", "app.kubernetes.io/instance": "some-name", "app.kubernetes.io/managed-by": "percona-xtradb-cluster-operator", "app.kubernetes.io/name": "percona-xtradb-cluster", ...}},    ObjectMeta: {Name: "proxydata", Labels: {"app.kubernetes.io/component": "proxysql", "app.kubernetes.io/instance": "some-name", "app.kubernetes.io/managed-by": "percona-xtradb-cluster-operator", "app.kubernetes.io/name": "percona-xtradb-cluster", ...}},    ObjectMeta: v1.ObjectMeta{    ObjectMeta: v1.ObjectMeta{    Optional: &false,    Optional: &true,    Optional: &true,    Ordinals: nil,    OS: nil,    Overhead: nil,    OwnerReferences: {{APIVersion: "pxc.percona.com/v1", Kind: "PerconaXtraDBCluster", Name: "some-name", UID: "1b60cc24-5172-4cc4-9fa5-e4739b3b5a26", ...}},    OwnerReferences: nil,    "percona.com/configuration-hash": "d41d8cd98f00b204e9800998ecf8427e",    "percona.com/configuration-hash": "d41d8cd98f00b204e9800998ecf8427e",    "percona.com/ssl-hash": "cfd6a52398268173b51d7cdb1331c09a",    "percona.com/ssl-hash": "cfd6a52398268173b51d7cdb1331c09a",    "percona.com/ssl-internal-hash": "751e59a1c1d94ac67f9d6bb6e756acdc",    "percona.com/ssl-internal-hash": "751e59a1c1d94ac67f9d6bb6e756acdc",    Ports: nil,    Ports: []v1.ContainerPort{    PreemptionPolicy: nil,    ProbeHandler: {Exec: &{Command: {"/var/lib/mysql/liveness-check.sh"}}},    Quobyte: nil,    ReadinessProbe: &{ProbeHandler: {Exec: &{Command: {"/var/lib/mysql/readiness-check.sh"}}}, InitialDelaySeconds: 15, TimeoutSeconds: 15, PeriodSeconds: 30, ...},    Replicas: &2,    Replicas: &3,    ResizePolicy: nil,    ResourceFieldRef: nil,    Resources: {},    Resources: {Limits: {s"cpu": {i: {...}, s: "1", Format: "DecimalSI"}, s"memory": {i: {...}, s: "2G", Format: "DecimalSI"}}, Requests: {s"cpu": {i: {...}, s: "100m", Format: "DecimalSI"}, s"memory": {i: {...}, Format: "DecimalSI"}}},    SecretName: "internal-some-name",    SecretName: "some-name-mysql-init",    SecretName: "some-name-ssl",    SecretName: "some-name-ssl-internal",    SecretName: "some-name-vault",    Secret: &v1.SecretVolumeSource{    SecurityContext: nil,    Selector: &{MatchLabels: {"app.kubernetes.io/component": "proxysql", "app.kubernetes.io/instance": "some-name", "app.kubernetes.io/managed-by": "percona-xtradb-cluster-operator", "app.kubernetes.io/name": "percona-xtradb-cluster", ...}},    Selector: &{MatchLabels: {"app.kubernetes.io/component": "pxc", "app.kubernetes.io/instance": "some-name", "app.kubernetes.io/managed-by": "percona-xtradb-cluster-operator", "app.kubernetes.io/name": "percona-xtradb-cluster", ...}},    SelfLink: "",    ServiceAccountName: "default",    ServiceName: "some-name-proxysql-unready",    ServiceName: "some-name-pxc",    SetHostnameAsFQDN: nil,    Spec: v1.PersistentVolumeClaimSpec{    Spec: v1.PodSpec{    Spec: v1.StatefulSetSpec{    StartupProbe: nil,    Status: v1.PersistentVolumeClaimStatus{    Status: v1.StatefulSetStatus{    StorageClassName: nil,    Subdomain: "",    SuccessThreshold: 1,    Template: v1.PodTemplateSpec{    TerminationGracePeriodSeconds: &30,    TerminationGracePeriodSeconds: &600,    TerminationGracePeriodSeconds: nil,    TimeoutSeconds: 5,    Tolerations: nil,    TypeMeta: {},    TypeMeta: {Kind: "StatefulSet", APIVersion: "apps/v1"},    UpdateStrategy: {Type: "RollingUpdate", RollingUpdate: &{Partition: &0}},   &v1.StatefulSet{    Value: "",    ValueFrom: nil,    ValueFrom: &v1.EnvVarSource{    VolumeAttributesClassName: nil,    VolumeClaimTemplates: []v1.PersistentVolumeClaim{    VolumeDevices: nil,    VolumeMounts: {{Name: "datadir", MountPath: "/var/lib/mysql"}, {Name: "config", MountPath: "/etc/percona-xtradb-cluster.conf.d"}, {Name: "tmp", MountPath: "/tmp"}, {Name: "ssl", MountPath: "/etc/mysql/ssl"}, ...},    VolumeName: "",    VolumeSource: v1.VolumeSource{    Volumes: []v1.Volume{    VsphereVolume: nil,    WorkingDir: "", + kubectl get pxc --all-namespaces -o wide + xargs -L 1 sh -xc 'kubectl patch pxc -n $0 $1 --type=merge -p "{\"metadata\":{\"finalizers\":[]}}"' + grep -v NAMESPACE + kubectl patch pxc -n users-28990 some-name --type=merge -p '{"metadata":{"finalizers":[]}}' perconaxtradbcluster.pxc.percona.com/some-name patched + kubectl_bin delete pxc --all --all-namespaces ++ mktemp + local LAST_OUT=/tmp/tmp.2FWRgIeTE3 ++ mktemp + local LAST_ERR=/tmp/tmp.7mfCme5NtX + local exit_status=0 ++ seq 0 2 + for i in '$(seq 0 2)' + set +e + kubectl delete pxc --all --all-namespaces + exit_status=0 + set -e + '[' 0 '!=' 0 ']' + break + cat /tmp/tmp.2FWRgIeTE3 perconaxtradbcluster.pxc.percona.com "some-name" deleted from users-28990 namespace + cat /tmp/tmp.7mfCme5NtX + rm /tmp/tmp.2FWRgIeTE3 /tmp/tmp.7mfCme5NtX + return 0 + kubectl_bin delete pxc-backup --all --all-namespaces ++ mktemp + local LAST_OUT=/tmp/tmp.7xpzl8bqju ++ mktemp + local LAST_ERR=/tmp/tmp.ciidvuK3qa + local exit_status=0 ++ seq 0 2 + for i in '$(seq 0 2)' + set +e + kubectl delete pxc-backup --all --all-namespaces + exit_status=0 + set -e + '[' 0 '!=' 0 ']' + break + cat /tmp/tmp.7xpzl8bqju No resources found + cat /tmp/tmp.ciidvuK3qa + rm /tmp/tmp.7xpzl8bqju /tmp/tmp.ciidvuK3qa + return 0 + kubectl_bin delete pxc-restore --all --all-namespaces ++ mktemp + local LAST_OUT=/tmp/tmp.gifOlMidIL ++ mktemp + local LAST_ERR=/tmp/tmp.pnBIa1IVWg + local exit_status=0 ++ seq 0 2 + for i in '$(seq 0 2)' + set +e + kubectl delete pxc-restore --all --all-namespaces + exit_status=0 + set -e + '[' 0 '!=' 0 ']' + break + cat /tmp/tmp.gifOlMidIL No resources found + cat /tmp/tmp.pnBIa1IVWg + rm /tmp/tmp.gifOlMidIL /tmp/tmp.pnBIa1IVWg + return 0 + kubectl_bin delete ValidatingWebhookConfiguration percona-xtradbcluster-webhook ++ mktemp + local LAST_OUT=/tmp/tmp.0YEtvIlMlI ++ mktemp + local LAST_ERR=/tmp/tmp.KkYUVLk0Fd + local exit_status=0 ++ seq 0 2 + for i in '$(seq 0 2)' + set +e + kubectl delete ValidatingWebhookConfiguration percona-xtradbcluster-webhook + exit_status=0 + set -e + '[' 0 '!=' 0 ']' + break + cat /tmp/tmp.0YEtvIlMlI validatingwebhookconfiguration.admissionregistration.k8s.io "percona-xtradbcluster-webhook" deleted + cat /tmp/tmp.KkYUVLk0Fd + rm /tmp/tmp.0YEtvIlMlI /tmp/tmp.KkYUVLk0Fd + return 0 + kubectl_bin delete -f https://github.com/jetstack/cert-manager/releases/download/v1.18.2/cert-manager.yaml + : + '[' '!' -z '' ']' + '[' -n pxc-operator ']' + kubectl_bin delete --grace-period=0 --force=true namespace users-28990 + rm -rf /tmp/tmp.30P5AqZU1r + kubectl_bin delete --grace-period=0 --force=true namespace pxc-operator ++ mktemp ++ mktemp + local LAST_OUT=/tmp/tmp.Q12lU9r9lc + desc 'test passed' + set +o xtrace ----------------------------------------------------------------------------------- test passed ----------------------------------------------------------------------------------- + local LAST_OUT=/tmp/tmp.olhzdAcA40 ++ mktemp ++ mktemp + local LAST_ERR=/tmp/tmp.ZKo8hkTVDx + local exit_status=0 ++ seq 0 2 + local LAST_ERR=/tmp/tmp.pjDkJOxMNw + local exit_status=0 ++ seq 0 2 + for i in '$(seq 0 2)' + set +e + kubectl delete --grace-period=0 --force=true namespace users-28990 + for i in '$(seq 0 2)' + set +e + kubectl delete --grace-period=0 --force=true namespace pxc-operator