Log: /mnt/jenkins/workspace/cloud-pxc-operator_PR-2330/e2e-tests/logs/users-scheduler-8-4.log Warning: version difference between client (1.35) and server (1.31) exceeds the supported minor version skew of +/-1 Warning: version difference between client (1.35) and server (1.31) exceeds the supported minor version skew of +/-1 + create_infra users-scheduler-16507 + local ns=users-scheduler-16507 + '[' -n pxc-operator ']' + kubectl get pxc --all-namespaces -o wide + grep -v NAMESPACE + xargs -L 1 sh -xc 'kubectl patch pxc -n $0 $1 --type=merge -p "{\"metadata\":{\"finalizers\":[]}}"' + kubectl patch pxc -n users-scheduler-26323 some-name --type=merge -p '{"metadata":{"finalizers":[]}}' perconaxtradbcluster.pxc.percona.com/some-name patched + kubectl_bin delete pxc --all --all-namespaces ++ mktemp + local LAST_OUT=/tmp/tmp.7opIKi7O80 ++ mktemp + local LAST_ERR=/tmp/tmp.Sv7TA1mpsD + local exit_status=0 ++ seq 0 2 + for i in '$(seq 0 2)' + set +e + kubectl delete pxc --all --all-namespaces + exit_status=0 + set -e + '[' 0 '!=' 0 ']' + break + cat /tmp/tmp.7opIKi7O80 perconaxtradbcluster.pxc.percona.com "some-name" deleted from users-scheduler-26323 namespace + cat /tmp/tmp.Sv7TA1mpsD + rm /tmp/tmp.7opIKi7O80 /tmp/tmp.Sv7TA1mpsD + return 0 + kubectl_bin delete pxc-backup --all --all-namespaces ++ mktemp + local LAST_OUT=/tmp/tmp.G9zjbhe42P ++ mktemp + local LAST_ERR=/tmp/tmp.AU5UWQo4Zi + local exit_status=0 ++ seq 0 2 + for i in '$(seq 0 2)' + set +e + kubectl delete pxc-backup --all --all-namespaces + exit_status=0 + set -e + '[' 0 '!=' 0 ']' + break + cat /tmp/tmp.G9zjbhe42P No resources found + cat /tmp/tmp.AU5UWQo4Zi + rm /tmp/tmp.G9zjbhe42P /tmp/tmp.AU5UWQo4Zi + return 0 + kubectl_bin delete pxc-restore --all --all-namespaces ++ mktemp + local LAST_OUT=/tmp/tmp.J1iq4WvYh1 ++ mktemp + local LAST_ERR=/tmp/tmp.i9gMh89PXn + local exit_status=0 ++ seq 0 2 + for i in '$(seq 0 2)' + set +e + kubectl delete pxc-restore --all --all-namespaces + exit_status=0 + set -e + '[' 0 '!=' 0 ']' + break + cat /tmp/tmp.J1iq4WvYh1 No resources found + cat /tmp/tmp.i9gMh89PXn + rm /tmp/tmp.J1iq4WvYh1 /tmp/tmp.i9gMh89PXn + return 0 + create_namespace pxc-operator + local namespace=pxc-operator + local skip_clean_namespace= + [[ 1 == 1 ]] + [[ -z '' ]] + destroy_chaos_mesh ++ helm list --all-namespaces --filter chaos-mesh ++ tail -n1 ++ awk '-F ' '{print $2}' ++ sed s/NAMESPACE// + local chaos_mesh_ns= + '[' -n '' ']' ++ kubectl get MutatingWebhookConfiguration ++ grep chaos-mesh ++ awk '{print $1}' + timeout 30 kubectl delete MutatingWebhookConfiguration error: resource(s) were provided, but no name was specified + : ++ kubectl get ValidatingWebhookConfiguration ++ grep chaos-mesh ++ awk '{print $1}' + timeout 30 kubectl delete ValidatingWebhookConfiguration error: resource(s) were provided, but no name was specified + : ++ kubectl get ValidatingWebhookConfiguration ++ grep validate-auth ++ awk '{print $1}' + timeout 30 kubectl delete ValidatingWebhookConfiguration error: resource(s) were provided, but no name was specified + : ++ kubectl api-resources ++ grep chaos-mesh ++ awk '{print $1}' ++ kubectl get crd ++ awk '{print $1}' ++ grep chaos-mesh.org + timeout 30 kubectl delete crd error: resource(s) were provided, but no name was specified + : ++ kubectl get clusterrolebinding ++ grep chaos-mesh ++ awk '{print $1}' + timeout 30 kubectl delete clusterrolebinding error: resource(s) were provided, but no name was specified + : ++ kubectl get clusterrole ++ grep chaos-mesh ++ awk '{print $1}' + timeout 30 kubectl delete clusterrole error: resource(s) were provided, but no name was specified + : + desc 'cleaned up all old namespaces' + set +o xtrace ----------------------------------------------------------------------------------- cleaned up all old namespaces ----------------------------------------------------------------------------------- + kubectl_bin get ns + '[' -n '' ']' + desc 'cleaned up old namespaces pxc-operator' ++ mktemp + set +o xtrace ----------------------------------------------------------------------------------- cleaned up old namespaces pxc-operator ----------------------------------------------------------------------------------- + kubectl_bin delete namespace pxc-operator + xargs kubectl delete ns + awk '{print$1}' + local LAST_OUT=/tmp/tmp.pX3B61k45e ++ mktemp ++ mktemp + local LAST_OUT=/tmp/tmp.EUkGYhX2ag ++ mktemp + local LAST_ERR=/tmp/tmp.YcdHGPenQt + local exit_status=0 ++ seq 0 2 + for i in '$(seq 0 2)' + set +e + kubectl get ns + local LAST_ERR=/tmp/tmp.sPuKY6hpCq + local exit_status=0 ++ seq 0 2 + for i in '$(seq 0 2)' + set +e + kubectl delete namespace pxc-operator + grep -E -v '^kube-|^default|Terminating|pxc-operator|openshift|^gke-|^gmp-|^NAME' + exit_status=0 + set -e + '[' 0 '!=' 0 ']' + break + cat /tmp/tmp.pX3B61k45e + cat /tmp/tmp.YcdHGPenQt + rm /tmp/tmp.pX3B61k45e /tmp/tmp.YcdHGPenQt + return 0 namespace "users-scheduler-26323" deleted + exit_status=0 + set -e + '[' 0 '!=' 0 ']' + break + cat /tmp/tmp.EUkGYhX2ag namespace "pxc-operator" deleted + cat /tmp/tmp.sPuKY6hpCq + rm /tmp/tmp.EUkGYhX2ag /tmp/tmp.sPuKY6hpCq + return 0 + wait_for_delete namespace/pxc-operator + local res=namespace/pxc-operator + echo -n 'waiting for namespace/pxc-operator to be deleted' waiting for namespace/pxc-operator to be deleted+ set +o xtrace Error from server (NotFound): namespaces "pxc-operator" not found + desc 'create namespace pxc-operator' + set +o xtrace ----------------------------------------------------------------------------------- create namespace pxc-operator ----------------------------------------------------------------------------------- + kubectl_bin create namespace pxc-operator ++ mktemp + local LAST_OUT=/tmp/tmp.dsl3x6CcgO ++ mktemp + local LAST_ERR=/tmp/tmp.k8KRMR8eq4 + local exit_status=0 ++ seq 0 2 + for i in '$(seq 0 2)' + set +e + kubectl create namespace pxc-operator + exit_status=0 + set -e + '[' 0 '!=' 0 ']' + break + cat /tmp/tmp.dsl3x6CcgO namespace/pxc-operator created + cat /tmp/tmp.k8KRMR8eq4 + rm /tmp/tmp.dsl3x6CcgO /tmp/tmp.k8KRMR8eq4 + return 0 ++ kubectl_bin config current-context +++ mktemp ++ local LAST_OUT=/tmp/tmp.R2G6KmzkuO +++ mktemp ++ local LAST_ERR=/tmp/tmp.BtDwue4upE ++ local exit_status=0 +++ seq 0 2 ++ for i in '$(seq 0 2)' ++ set +e ++ kubectl config current-context ++ exit_status=0 ++ set -e ++ '[' 0 '!=' 0 ']' ++ break ++ cat /tmp/tmp.R2G6KmzkuO ++ cat /tmp/tmp.BtDwue4upE ++ rm /tmp/tmp.R2G6KmzkuO /tmp/tmp.BtDwue4upE ++ return 0 + kubectl_bin config set-context gke_cloud-dev-112233_us-central1-a_jen-pxc-2330-8293f071-6-cluster9 --namespace=pxc-operator ++ mktemp + local LAST_OUT=/tmp/tmp.iHfn1s4e4H ++ mktemp + local LAST_ERR=/tmp/tmp.NN3oprNOsk + local exit_status=0 ++ seq 0 2 + for i in '$(seq 0 2)' + set +e + kubectl config set-context gke_cloud-dev-112233_us-central1-a_jen-pxc-2330-8293f071-6-cluster9 --namespace=pxc-operator + exit_status=0 + set -e + '[' 0 '!=' 0 ']' + break + cat /tmp/tmp.iHfn1s4e4H Context "gke_cloud-dev-112233_us-central1-a_jen-pxc-2330-8293f071-6-cluster9" modified. + cat /tmp/tmp.NN3oprNOsk + rm /tmp/tmp.iHfn1s4e4H /tmp/tmp.NN3oprNOsk + return 0 + deploy_operator + desc 'start PXC operator' + set +o xtrace ----------------------------------------------------------------------------------- start PXC operator ----------------------------------------------------------------------------------- + kubectl_bin apply --server-side --force-conflicts -f /mnt/jenkins/workspace/cloud-pxc-operator_PR-2330/deploy/crd.yaml ++ mktemp + local LAST_OUT=/tmp/tmp.PDXCRrPHr6 ++ mktemp + local LAST_ERR=/tmp/tmp.ixee33oLh4 + local exit_status=0 ++ seq 0 2 + for i in '$(seq 0 2)' + set +e + kubectl apply --server-side --force-conflicts -f /mnt/jenkins/workspace/cloud-pxc-operator_PR-2330/deploy/crd.yaml + exit_status=0 + set -e + '[' 0 '!=' 0 ']' + break + cat /tmp/tmp.PDXCRrPHr6 customresourcedefinition.apiextensions.k8s.io/perconaxtradbclusterbackups.pxc.percona.com serverside-applied customresourcedefinition.apiextensions.k8s.io/perconaxtradbclusterrestores.pxc.percona.com serverside-applied customresourcedefinition.apiextensions.k8s.io/perconaxtradbclusters.pxc.percona.com serverside-applied + cat /tmp/tmp.ixee33oLh4 + rm /tmp/tmp.PDXCRrPHr6 /tmp/tmp.ixee33oLh4 + return 0 + '[' -n pxc-operator ']' + apply_rbac cw-rbac + local operator_namespace=pxc-operator + local rbac=cw-rbac + cat /mnt/jenkins/workspace/cloud-pxc-operator_PR-2330/deploy/cw-rbac.yaml + sed -e 's^namespace: .*^namespace: pxc-operator^' + kubectl_bin apply -f - ++ mktemp + local LAST_OUT=/tmp/tmp.7uYzxs5pTO ++ mktemp + local LAST_ERR=/tmp/tmp.HwRjxfppGp + local exit_status=0 ++ seq 0 2 + for i in '$(seq 0 2)' + set +e + kubectl apply -f - + exit_status=0 + set -e + '[' 0 '!=' 0 ']' + break + cat /tmp/tmp.7uYzxs5pTO clusterrole.rbac.authorization.k8s.io/percona-xtradb-cluster-operator unchanged serviceaccount/percona-xtradb-cluster-operator created clusterrolebinding.rbac.authorization.k8s.io/service-account-percona-xtradb-cluster-operator unchanged + cat /tmp/tmp.HwRjxfppGp + rm /tmp/tmp.7uYzxs5pTO /tmp/tmp.HwRjxfppGp + return 0 + cat /mnt/jenkins/workspace/cloud-pxc-operator_PR-2330/deploy/cw-operator.yaml + sed -e 's^image: .*^image: perconalab/percona-xtradb-cluster-operator:PR-2330-8293f071^' + sed -e 's^failureThreshold: .*^failureThreshold: 10^' + kubectl_bin apply -f - ++ mktemp + yq eval '(select(.kind == "Deployment").spec.template.spec.containers[] | select(.name == "percona-xtradb-cluster-operator").env[] | select(.name == "PXCO_FEATURE_GATES").value) = ""' - + local LAST_OUT=/tmp/tmp.o0uiL6IrIq + yq eval '(select(.kind == "Deployment").spec.template.spec.containers[] | select(.name == "percona-xtradb-cluster-operator").env[] | select(.name == "DISABLE_TELEMETRY").value) = "true"' - + yq eval '(select(.kind == "Deployment").spec.template.spec.containers[] | select(.name == "percona-xtradb-cluster-operator").env[] | select(.name == "LOG_LEVEL").value) = "VERBOSE"' - ++ mktemp + local LAST_ERR=/tmp/tmp.0K7N5OFw9I + local exit_status=0 ++ seq 0 2 + for i in '$(seq 0 2)' + set +e + kubectl apply -f - + exit_status=0 + set -e + '[' 0 '!=' 0 ']' + break + cat /tmp/tmp.o0uiL6IrIq deployment.apps/percona-xtradb-cluster-operator created service/percona-xtradb-cluster-operator created + cat /tmp/tmp.0K7N5OFw9I + rm /tmp/tmp.o0uiL6IrIq /tmp/tmp.0K7N5OFw9I + return 0 + sleep 10 + kubectl_bin wait --for=condition=Ready pods -l app.kubernetes.io/component=operator,app.kubernetes.io/instance=percona-xtradb-cluster-operator,app.kubernetes.io/name=percona-xtradb-cluster-operator --timeout=30s ++ mktemp + local LAST_OUT=/tmp/tmp.HvxHOoHpUa ++ mktemp + local LAST_ERR=/tmp/tmp.elHNA7okKt + local exit_status=0 ++ seq 0 2 + for i in '$(seq 0 2)' + set +e + kubectl wait --for=condition=Ready pods -l app.kubernetes.io/component=operator,app.kubernetes.io/instance=percona-xtradb-cluster-operator,app.kubernetes.io/name=percona-xtradb-cluster-operator --timeout=30s + exit_status=0 + set -e + '[' 0 '!=' 0 ']' + break + cat /tmp/tmp.HvxHOoHpUa pod/percona-xtradb-cluster-operator-65b6f89d5d-8skh7 condition met + cat /tmp/tmp.elHNA7okKt + rm /tmp/tmp.HvxHOoHpUa /tmp/tmp.elHNA7okKt + return 0 ++ get_operator_pod ++ local label_prefix=app.kubernetes.io/ +++ kubectl get pods --selector=app.kubernetes.io/name=percona-xtradb-cluster-operator -n pxc-operator +++ grep -c percona-xtradb-cluster-operator ++ local check_label=1 ++ [[ 1 -eq 0 ]] ++ jq -r '.items[] | select(.metadata.deletionTimestamp == null) | .metadata.name' ++ head -1 ++ kubectl_bin get pods --selector=app.kubernetes.io/name=percona-xtradb-cluster-operator --field-selector=status.phase=Running -o json -n pxc-operator +++ mktemp ++ local LAST_OUT=/tmp/tmp.xp0iuruyHC +++ mktemp ++ local LAST_ERR=/tmp/tmp.yerrELZGDT ++ local exit_status=0 +++ seq 0 2 ++ for i in '$(seq 0 2)' ++ set +e ++ kubectl get pods --selector=app.kubernetes.io/name=percona-xtradb-cluster-operator --field-selector=status.phase=Running -o json -n pxc-operator ++ exit_status=0 ++ set -e ++ '[' 0 '!=' 0 ']' ++ break ++ cat /tmp/tmp.xp0iuruyHC ++ cat /tmp/tmp.yerrELZGDT ++ rm /tmp/tmp.xp0iuruyHC /tmp/tmp.yerrELZGDT ++ return 0 + wait_pod percona-xtradb-cluster-operator-65b6f89d5d-8skh7 480 pxc-operator + local pod=percona-xtradb-cluster-operator-65b6f89d5d-8skh7 + local max_retry=480 + local ns=pxc-operator ++ echo percona-xtradb-cluster-operator-65b6f89d5d-8skh7 ++ /usr/bin/sed -E 's/.*-(pxc|proxysql)-[0-9]/\1/' ++ grep -E '^(pxc|proxysql)$' + local container= + set +o xtrace pod/percona-xtradb-cluster-operator-65b6f89d5d-8skh7 condition met waiting for pod/percona-xtradb-cluster-operator-65b6f89d5d-8skh7 to become Ready.Ok + sleep 3 + create_namespace users-scheduler-16507 + local namespace=users-scheduler-16507 + local skip_clean_namespace= + [[ 1 == 1 ]] + [[ -z '' ]] + destroy_chaos_mesh ++ helm list --all-namespaces --filter chaos-mesh ++ tail -n1 ++ awk '-F ' '{print $2}' ++ sed s/NAMESPACE// + local chaos_mesh_ns= + '[' -n '' ']' ++ kubectl get MutatingWebhookConfiguration ++ grep chaos-mesh ++ awk '{print $1}' + timeout 30 kubectl delete MutatingWebhookConfiguration error: resource(s) were provided, but no name was specified + : ++ grep chaos-mesh ++ awk '{print $1}' ++ kubectl get ValidatingWebhookConfiguration + timeout 30 kubectl delete ValidatingWebhookConfiguration error: resource(s) were provided, but no name was specified + : ++ awk '{print $1}' ++ grep validate-auth ++ kubectl get ValidatingWebhookConfiguration + timeout 30 kubectl delete ValidatingWebhookConfiguration error: resource(s) were provided, but no name was specified + : ++ kubectl api-resources ++ grep chaos-mesh ++ awk '{print $1}' ++ kubectl get crd ++ grep chaos-mesh.org ++ awk '{print $1}' + timeout 30 kubectl delete crd error: resource(s) were provided, but no name was specified + : ++ kubectl get clusterrolebinding ++ grep chaos-mesh ++ awk '{print $1}' + timeout 30 kubectl delete clusterrolebinding error: resource(s) were provided, but no name was specified + : ++ kubectl get clusterrole ++ grep chaos-mesh ++ awk '{print $1}' + timeout 30 kubectl delete clusterrole error: resource(s) were provided, but no name was specified + : + desc 'cleaned up all old namespaces' + set +o xtrace ----------------------------------------------------------------------------------- cleaned up all old namespaces ----------------------------------------------------------------------------------- + grep -E -v '^kube-|^default|Terminating|pxc-operator|openshift|^gke-|^gmp-|^NAME' + awk '{print$1}' + '[' -n '' ']' + desc 'cleaned up old namespaces users-scheduler-16507' + set +o xtrace ----------------------------------------------------------------------------------- cleaned up old namespaces users-scheduler-16507 ----------------------------------------------------------------------------------- + kubectl_bin delete namespace users-scheduler-16507 + kubectl_bin get ns ++ mktemp ++ mktemp + local LAST_OUT=/tmp/tmp.3ZUQrU8aGM + local LAST_OUT=/tmp/tmp.OEqgPv0gZf ++ mktemp ++ mktemp + local LAST_ERR=/tmp/tmp.wD0DOAIxsL + local exit_status=0 ++ seq 0 2 + local LAST_ERR=/tmp/tmp.CJSzxlDdka + local exit_status=0 ++ seq 0 2 + for i in '$(seq 0 2)' + set +e + kubectl delete namespace users-scheduler-16507 + xargs kubectl delete ns + for i in '$(seq 0 2)' + set +e + kubectl get ns + exit_status=1 + set -e + '[' 1 '!=' 0 ']' + '[' 1 == 1 ']' + sleep 0 + exit_status=0 + set -e + '[' 0 '!=' 0 ']' + break + cat /tmp/tmp.OEqgPv0gZf + for i in '$(seq 0 2)' + set +e + kubectl delete namespace users-scheduler-16507 + cat /tmp/tmp.CJSzxlDdka + rm /tmp/tmp.OEqgPv0gZf /tmp/tmp.CJSzxlDdka + return 0 error: resource(s) were provided, but no name was specified + exit_status=1 + set -e + '[' 1 '!=' 0 ']' + '[' 1 == 1 ']' + sleep 0 + for i in '$(seq 0 2)' + set +e + kubectl delete namespace users-scheduler-16507 + exit_status=1 + set -e + '[' 1 '!=' 0 ']' + '[' 1 == 1 ']' + sleep 0 + cat /tmp/tmp.3ZUQrU8aGM + cat /tmp/tmp.wD0DOAIxsL Error from server (NotFound): namespaces "users-scheduler-16507" not found + rm /tmp/tmp.3ZUQrU8aGM /tmp/tmp.wD0DOAIxsL + return 1 + : + wait_for_delete namespace/users-scheduler-16507 + local res=namespace/users-scheduler-16507 + echo -n 'waiting for namespace/users-scheduler-16507 to be deleted' waiting for namespace/users-scheduler-16507 to be deleted+ set +o xtrace Error from server (NotFound): namespaces "users-scheduler-16507" not found + desc 'create namespace users-scheduler-16507' + set +o xtrace ----------------------------------------------------------------------------------- create namespace users-scheduler-16507 ----------------------------------------------------------------------------------- + kubectl_bin create namespace users-scheduler-16507 ++ mktemp + local LAST_OUT=/tmp/tmp.5pCmHI5Li7 ++ mktemp + local LAST_ERR=/tmp/tmp.xutaYbayqC + local exit_status=0 ++ seq 0 2 + for i in '$(seq 0 2)' + set +e + kubectl create namespace users-scheduler-16507 + exit_status=0 + set -e + '[' 0 '!=' 0 ']' + break + cat /tmp/tmp.5pCmHI5Li7 namespace/users-scheduler-16507 created + cat /tmp/tmp.xutaYbayqC + rm /tmp/tmp.5pCmHI5Li7 /tmp/tmp.xutaYbayqC + return 0 ++ kubectl_bin config current-context +++ mktemp ++ local LAST_OUT=/tmp/tmp.SIDpgnNUvz +++ mktemp ++ local LAST_ERR=/tmp/tmp.gExWWcw5lv ++ local exit_status=0 +++ seq 0 2 ++ for i in '$(seq 0 2)' ++ set +e ++ kubectl config current-context ++ exit_status=0 ++ set -e ++ '[' 0 '!=' 0 ']' ++ break ++ cat /tmp/tmp.SIDpgnNUvz ++ cat /tmp/tmp.gExWWcw5lv ++ rm /tmp/tmp.SIDpgnNUvz /tmp/tmp.gExWWcw5lv ++ return 0 + kubectl_bin config set-context gke_cloud-dev-112233_us-central1-a_jen-pxc-2330-8293f071-6-cluster9 --namespace=users-scheduler-16507 ++ mktemp + local LAST_OUT=/tmp/tmp.G4zq7Mkklc ++ mktemp + local LAST_ERR=/tmp/tmp.be9EVHKEBR + local exit_status=0 ++ seq 0 2 + for i in '$(seq 0 2)' + set +e + kubectl config set-context gke_cloud-dev-112233_us-central1-a_jen-pxc-2330-8293f071-6-cluster9 --namespace=users-scheduler-16507 + exit_status=0 + set -e + '[' 0 '!=' 0 ']' + break + cat /tmp/tmp.G4zq7Mkklc Context "gke_cloud-dev-112233_us-central1-a_jen-pxc-2330-8293f071-6-cluster9" modified. + cat /tmp/tmp.be9EVHKEBR + rm /tmp/tmp.G4zq7Mkklc /tmp/tmp.be9EVHKEBR + return 0 + apply_secrets + desc 'create secrets for cloud storages' + set +o xtrace ----------------------------------------------------------------------------------- create secrets for cloud storages ----------------------------------------------------------------------------------- + '[' -z '' ']' + kubectl_bin apply -f /mnt/jenkins/workspace/cloud-pxc-operator_PR-2330/e2e-tests/conf/minio-secret.yml -f /mnt/jenkins/workspace/cloud-pxc-operator_PR-2330/e2e-tests/conf/cloud-secret.yml ++ mktemp + local LAST_OUT=/tmp/tmp.igsrHo6NFm ++ mktemp + local LAST_ERR=/tmp/tmp.I9T1pYqcrz + local exit_status=0 ++ seq 0 2 + for i in '$(seq 0 2)' + set +e + kubectl apply -f /mnt/jenkins/workspace/cloud-pxc-operator_PR-2330/e2e-tests/conf/minio-secret.yml -f /mnt/jenkins/workspace/cloud-pxc-operator_PR-2330/e2e-tests/conf/cloud-secret.yml + exit_status=0 + set -e + '[' 0 '!=' 0 ']' + break + cat /tmp/tmp.igsrHo6NFm secret/minio-secret created secret/aws-s3-secret created secret/gcp-cs-secret created secret/azure-secret created + cat /tmp/tmp.I9T1pYqcrz + rm /tmp/tmp.igsrHo6NFm /tmp/tmp.I9T1pYqcrz + return 0 + desc 'create PXC cluster with 1-password secret' + set +o xtrace ----------------------------------------------------------------------------------- create PXC cluster with 1-password secret ----------------------------------------------------------------------------------- + newpass=test-password ++ echo -n test-password ++ base64 + newpassencrypted=dGVzdC1wYXNzd29yZA== + cluster=some-name + spinup_pxc some-name /mnt/jenkins/workspace/cloud-pxc-operator_PR-2330/e2e-tests/users-scheduler/conf/some-name.yml '' '' /mnt/jenkins/workspace/cloud-pxc-operator_PR-2330/e2e-tests/conf/secrets_one_pass.yml + local cluster=some-name + local config=/mnt/jenkins/workspace/cloud-pxc-operator_PR-2330/e2e-tests/users-scheduler/conf/some-name.yml + local size=3 + local sleep=10 + local secretsFile=/mnt/jenkins/workspace/cloud-pxc-operator_PR-2330/e2e-tests/conf/secrets_one_pass.yml + local pxcClientFile=/mnt/jenkins/workspace/cloud-pxc-operator_PR-2330/e2e-tests/conf/client.yml + local port=3306 + desc 'create first PXC cluster' + set +o xtrace ----------------------------------------------------------------------------------- create first PXC cluster ----------------------------------------------------------------------------------- + kubectl_bin apply -f /mnt/jenkins/workspace/cloud-pxc-operator_PR-2330/e2e-tests/conf/secrets_one_pass.yml ++ mktemp + local LAST_OUT=/tmp/tmp.YNOlsU3SPM ++ mktemp + local LAST_ERR=/tmp/tmp.pvCUXgkdIr + local exit_status=0 ++ seq 0 2 + for i in '$(seq 0 2)' + set +e + kubectl apply -f /mnt/jenkins/workspace/cloud-pxc-operator_PR-2330/e2e-tests/conf/secrets_one_pass.yml + exit_status=0 + set -e + '[' 0 '!=' 0 ']' + break + cat /tmp/tmp.YNOlsU3SPM secret/my-cluster-secrets created secret/some-name-ssl created secret/some-name-ssl-internal created + cat /tmp/tmp.pvCUXgkdIr + rm /tmp/tmp.YNOlsU3SPM /tmp/tmp.pvCUXgkdIr + return 0 + apply_config /mnt/jenkins/workspace/cloud-pxc-operator_PR-2330/e2e-tests/conf/client.yml + local config_file=/mnt/jenkins/workspace/cloud-pxc-operator_PR-2330/e2e-tests/conf/client.yml + local pvc_name= + '[' -z '' ']' + cat_config /mnt/jenkins/workspace/cloud-pxc-operator_PR-2330/e2e-tests/conf/client.yml '' + local input_file=/mnt/jenkins/workspace/cloud-pxc-operator_PR-2330/e2e-tests/conf/client.yml + local pvc_name= + kubectl_bin apply -f - + cat /mnt/jenkins/workspace/cloud-pxc-operator_PR-2330/e2e-tests/conf/client.yml + /usr/bin/sed -e 's#image:.*-pxc\([0-9]*.[0-9]*\)\{0,1\}$#image: perconalab/percona-xtradb-cluster-operator:main-pxc8.4#' + /usr/bin/sed -e 's#image:.*-backup$#image: perconalab/percona-xtradb-cluster-operator:main-pxc8.4-backup#' + /usr/bin/sed -e 's#image:.*-logcollector$#image: perconalab/fluentbit:main-logcollector#' + /usr/bin/sed -e s~minio-service.#namespace~minio-service.users-scheduler-16507~ + /usr/bin/sed -e 's#image:.*-proxysql$#image: perconalab/percona-xtradb-cluster-operator:main-proxysql#' + /usr/bin/sed -e 's#apply:.*#apply: Never#' + /usr/bin/sed -e 's#image:.*-haproxy$#image: perconalab/percona-xtradb-cluster-operator:main-haproxy#' + /usr/bin/sed -e 's#image:.*\/percona-xtradb-cluster:.*$#image: perconalab/percona-xtradb-cluster-operator:main-pxc8.4#' + /usr/bin/sed -e 's#claimName:..*-backup-pvc$#claimName: #' + /usr/bin/sed -e 's#image:.*-pmm$#image: perconalab/pmm-client:dev-latest#' + /usr/bin/sed -e 's#image:.*-init$#image: perconalab/percona-xtradb-cluster-operator:PR-2330-8293f071#' + /usr/bin/sed -e 's#apiVersion: pxc.percona.com/v.*$#apiVersion: pxc.percona.com/v1#' ++ mktemp + local LAST_OUT=/tmp/tmp.Aapv5rTbGD ++ mktemp + local LAST_ERR=/tmp/tmp.bYzouT56fN + local exit_status=0 ++ seq 0 2 + for i in '$(seq 0 2)' + set +e + kubectl apply -f - + exit_status=0 + set -e + '[' 0 '!=' 0 ']' + break + cat /tmp/tmp.Aapv5rTbGD deployment.apps/pxc-client created + cat /tmp/tmp.bYzouT56fN + rm /tmp/tmp.Aapv5rTbGD /tmp/tmp.bYzouT56fN + return 0 + [[ perconalab/percona-xtradb-cluster-operator:main-pxc8.4 =~ 5\.7 ]] + apply_config /mnt/jenkins/workspace/cloud-pxc-operator_PR-2330/e2e-tests/users-scheduler/conf/some-name.yml + local config_file=/mnt/jenkins/workspace/cloud-pxc-operator_PR-2330/e2e-tests/users-scheduler/conf/some-name.yml + local pvc_name= + '[' -z '' ']' + cat_config /mnt/jenkins/workspace/cloud-pxc-operator_PR-2330/e2e-tests/users-scheduler/conf/some-name.yml '' + local input_file=/mnt/jenkins/workspace/cloud-pxc-operator_PR-2330/e2e-tests/users-scheduler/conf/some-name.yml + local pvc_name= + kubectl_bin apply -f - + cat /mnt/jenkins/workspace/cloud-pxc-operator_PR-2330/e2e-tests/users-scheduler/conf/some-name.yml + /usr/bin/sed -e 's#apiVersion: pxc.percona.com/v.*$#apiVersion: pxc.percona.com/v1#' ++ mktemp + /usr/bin/sed -e 's#image:.*-pxc\([0-9]*.[0-9]*\)\{0,1\}$#image: perconalab/percona-xtradb-cluster-operator:main-pxc8.4#' + local LAST_OUT=/tmp/tmp.rhIR5LDDsN + /usr/bin/sed -e 's#image:.*\/percona-xtradb-cluster:.*$#image: perconalab/percona-xtradb-cluster-operator:main-pxc8.4#' ++ mktemp + /usr/bin/sed -e 's#image:.*-init$#image: perconalab/percona-xtradb-cluster-operator:PR-2330-8293f071#' + /usr/bin/sed -e 's#image:.*-pmm$#image: perconalab/pmm-client:dev-latest#' + /usr/bin/sed -e 's#image:.*-backup$#image: perconalab/percona-xtradb-cluster-operator:main-pxc8.4-backup#' + /usr/bin/sed -e 's#image:.*-proxysql$#image: perconalab/percona-xtradb-cluster-operator:main-proxysql#' + /usr/bin/sed -e 's#apply:.*#apply: Never#' + /usr/bin/sed -e 's#image:.*-haproxy$#image: perconalab/percona-xtradb-cluster-operator:main-haproxy#' + /usr/bin/sed -e 's#claimName:..*-backup-pvc$#claimName: #' + local LAST_ERR=/tmp/tmp.mP69NlQKF8 + local exit_status=0 + /usr/bin/sed -e s~minio-service.#namespace~minio-service.users-scheduler-16507~ + /usr/bin/sed -e 's#image:.*-logcollector$#image: perconalab/fluentbit:main-logcollector#' ++ seq 0 2 + for i in '$(seq 0 2)' + set +e + kubectl apply -f - + exit_status=0 + set -e + '[' 0 '!=' 0 ']' + break + cat /tmp/tmp.rhIR5LDDsN perconaxtradbcluster.pxc.percona.com/some-name created + cat /tmp/tmp.mP69NlQKF8 + rm /tmp/tmp.rhIR5LDDsN /tmp/tmp.mP69NlQKF8 + return 0 + desc 'check if all 3 Pods started' + set +o xtrace ----------------------------------------------------------------------------------- check if all 3 Pods started ----------------------------------------------------------------------------------- ++ get_proxy some-name ++ local target_cluster=some-name +++ kubectl_bin get pxc some-name -o 'jsonpath={.spec.haproxy.enabled}' ++++ mktemp +++ local LAST_OUT=/tmp/tmp.CgkKLUjSAq ++++ mktemp +++ local LAST_ERR=/tmp/tmp.nwEpUiiycT +++ local exit_status=0 ++++ seq 0 2 +++ for i in '$(seq 0 2)' +++ set +e +++ kubectl get pxc some-name -o 'jsonpath={.spec.haproxy.enabled}' +++ exit_status=0 +++ set -e +++ '[' 0 '!=' 0 ']' +++ break +++ cat /tmp/tmp.CgkKLUjSAq +++ cat /tmp/tmp.nwEpUiiycT +++ rm /tmp/tmp.CgkKLUjSAq /tmp/tmp.nwEpUiiycT +++ return 0 ++ [[ false == \t\r\u\e ]] +++ kubectl_bin get pxc some-name -o 'jsonpath={.spec.proxysql.enabled}' ++++ mktemp +++ local LAST_OUT=/tmp/tmp.Unr0xNOH8K ++++ mktemp +++ local LAST_ERR=/tmp/tmp.9NCI9Rm0G0 +++ local exit_status=0 ++++ seq 0 2 +++ for i in '$(seq 0 2)' +++ set +e +++ kubectl get pxc some-name -o 'jsonpath={.spec.proxysql.enabled}' +++ exit_status=0 +++ set -e +++ '[' 0 '!=' 0 ']' +++ break +++ cat /tmp/tmp.Unr0xNOH8K +++ cat /tmp/tmp.9NCI9Rm0G0 +++ rm /tmp/tmp.Unr0xNOH8K /tmp/tmp.9NCI9Rm0G0 +++ return 0 ++ [[ true == \t\r\u\e ]] ++ echo some-name-proxysql ++ return + local proxy=some-name-proxysql + kubectl_bin wait --for=condition=Ready pod -l app.kubernetes.io/instance=monitoring,app.kubernetes.io/managed-by=percona-xtradb-cluster-operator --timeout=300s -n users-scheduler-16507 ++ mktemp + local LAST_OUT=/tmp/tmp.RDY1GwkYEf ++ mktemp + local LAST_ERR=/tmp/tmp.nEzexnecdb + local exit_status=0 ++ seq 0 2 + for i in '$(seq 0 2)' + set +e + kubectl wait --for=condition=Ready pod -l app.kubernetes.io/instance=monitoring,app.kubernetes.io/managed-by=percona-xtradb-cluster-operator --timeout=300s -n users-scheduler-16507 + exit_status=1 + set -e + '[' 1 '!=' 0 ']' + '[' 1 == 1 ']' + sleep 0 + for i in '$(seq 0 2)' + set +e + kubectl wait --for=condition=Ready pod -l app.kubernetes.io/instance=monitoring,app.kubernetes.io/managed-by=percona-xtradb-cluster-operator --timeout=300s -n users-scheduler-16507 + exit_status=1 + set -e + '[' 1 '!=' 0 ']' + '[' 1 == 1 ']' + sleep 0 + for i in '$(seq 0 2)' + set +e + kubectl wait --for=condition=Ready pod -l app.kubernetes.io/instance=monitoring,app.kubernetes.io/managed-by=percona-xtradb-cluster-operator --timeout=300s -n users-scheduler-16507 + exit_status=1 + set -e + '[' 1 '!=' 0 ']' + '[' 1 == 1 ']' + sleep 0 + cat /tmp/tmp.RDY1GwkYEf + cat /tmp/tmp.nEzexnecdb error: no matching resources found + rm /tmp/tmp.RDY1GwkYEf /tmp/tmp.nEzexnecdb + return 1 + true + wait_for_running some-name-proxysql 1 + local name=some-name-proxysql + let last_pod=0 + : + local max_retry=480 + desc 'wait for running cluster' + set +o xtrace ----------------------------------------------------------------------------------- wait for running cluster ----------------------------------------------------------------------------------- ++ seq 0 0 + for i in '$(seq 0 $last_pod)' + wait_pod some-name-proxysql-0 480 + local pod=some-name-proxysql-0 + local max_retry=480 + local ns= ++ echo some-name-proxysql-0 ++ /usr/bin/sed -E 's/.*-(pxc|proxysql)-[0-9]/\1/' ++ grep -E '^(pxc|proxysql)$' + local container=proxysql + set +o xtrace pod/some-name-proxysql-0 condition met waiting for pod/some-name-proxysql-0 to become Ready.Ok + wait_for_running some-name-pxc 3 + local name=some-name-pxc + let last_pod=2 + local max_retry=480 + desc 'wait for running cluster' + set +o xtrace ----------------------------------------------------------------------------------- wait for running cluster ----------------------------------------------------------------------------------- ++ seq 0 2 + for i in '$(seq 0 $last_pod)' + wait_pod some-name-pxc-0 480 + local pod=some-name-pxc-0 + local max_retry=480 + local ns= ++ echo some-name-pxc-0 ++ /usr/bin/sed -E 's/.*-(pxc|proxysql)-[0-9]/\1/' ++ grep -E '^(pxc|proxysql)$' + local container=pxc + set +o xtrace pod/some-name-pxc-0 condition met waiting for pod/some-name-pxc-0 to become Ready.Ok + for i in '$(seq 0 $last_pod)' + wait_pod some-name-pxc-1 480 + local pod=some-name-pxc-1 + local max_retry=480 + local ns= ++ echo some-name-pxc-1 ++ grep -E '^(pxc|proxysql)$' ++ /usr/bin/sed -E 's/.*-(pxc|proxysql)-[0-9]/\1/' + local container=pxc + set +o xtrace pod/some-name-pxc-1 condition met waiting for pod/some-name-pxc-1 to become Ready.Ok + for i in '$(seq 0 $last_pod)' + wait_pod some-name-pxc-2 480 + local pod=some-name-pxc-2 + local max_retry=480 + local ns= ++ echo some-name-pxc-2 ++ /usr/bin/sed -E 's/.*-(pxc|proxysql)-[0-9]/\1/' ++ grep -E '^(pxc|proxysql)$' + local container=pxc + set +o xtrace pod/some-name-pxc-2 condition met waiting for pod/some-name-pxc-2 to become Ready.Ok + sleep 10 ++ kubectl get pxc some-name -o 'jsonpath={.spec.secretsName}' + local secret_name=my-cluster-secrets ++ getSecretData my-cluster-secrets root ++ local secretName=my-cluster-secrets ++ local dataKey=root ++ kubectl_bin get secrets/my-cluster-secrets '--template={{.data.root}}' ++ base64 --decode +++ mktemp ++ local LAST_OUT=/tmp/tmp.l7HB1jnq1o +++ mktemp ++ local LAST_ERR=/tmp/tmp.yvVxuSeVHS ++ local exit_status=0 +++ seq 0 2 ++ for i in '$(seq 0 2)' ++ set +e ++ kubectl get secrets/my-cluster-secrets '--template={{.data.root}}' ++ exit_status=0 ++ set -e ++ '[' 0 '!=' 0 ']' ++ break ++ cat /tmp/tmp.l7HB1jnq1o ++ cat /tmp/tmp.yvVxuSeVHS ++ rm /tmp/tmp.l7HB1jnq1o /tmp/tmp.yvVxuSeVHS ++ return 0 + local 'root_pass=$PG_ZP+VIMNesDj=1y' + desc 'write data' + set +o xtrace ----------------------------------------------------------------------------------- write data ----------------------------------------------------------------------------------- + [[ perconalab/percona-xtradb-cluster-operator:main-pxc8.4 =~ 5\.7 ]] + run_mysql 'CREATE DATABASE IF NOT EXISTS myApp; use myApp; CREATE TABLE IF NOT EXISTS myApp (id int PRIMARY KEY) ;' '-h some-name-proxysql -uroot -p'\''$PG_ZP+VIMNesDj=1y'\'' -P3306' + local 'command=CREATE DATABASE IF NOT EXISTS myApp; use myApp; CREATE TABLE IF NOT EXISTS myApp (id int PRIMARY KEY) ;' + local 'uri=-h some-name-proxysql -uroot -p'\''$PG_ZP+VIMNesDj=1y'\'' -P3306' ++ get_client_pod ++ kubectl_bin get pods --selector=name=pxc-client -o 'jsonpath={.items[].metadata.name}' +++ mktemp ++ local LAST_OUT=/tmp/tmp.sRdFRbaG7q +++ mktemp ++ local LAST_ERR=/tmp/tmp.f06gR57ybb ++ local exit_status=0 +++ seq 0 2 ++ for i in '$(seq 0 2)' ++ set +e ++ kubectl get pods --selector=name=pxc-client -o 'jsonpath={.items[].metadata.name}' ++ exit_status=0 ++ set -e ++ '[' 0 '!=' 0 ']' ++ break ++ cat /tmp/tmp.sRdFRbaG7q ++ cat /tmp/tmp.f06gR57ybb ++ rm /tmp/tmp.sRdFRbaG7q /tmp/tmp.f06gR57ybb ++ return 0 + client_pod=pxc-client-6bbff654db-bvlfn + wait_pod pxc-client-6bbff654db-bvlfn + local pod=pxc-client-6bbff654db-bvlfn + local max_retry=480 + local ns= ++ echo pxc-client-6bbff654db-bvlfn ++ /usr/bin/sed -E 's/.*-(pxc|proxysql)-[0-9]/\1/' ++ grep -E '^(pxc|proxysql)$' + local container= + set +o xtrace pod/pxc-client-6bbff654db-bvlfn condition met waiting for pod/pxc-client-6bbff654db-bvlfn to become ReadyDefaulted container "pxc-client" out of: pxc-client, backup .Ok + set +o xtrace + run_mysql 'INSERT myApp.myApp (id) VALUES (100500)' '-h some-name-proxysql -uroot -p'\''$PG_ZP+VIMNesDj=1y'\'' -P3306' + local 'command=INSERT myApp.myApp (id) VALUES (100500)' + local 'uri=-h some-name-proxysql -uroot -p'\''$PG_ZP+VIMNesDj=1y'\'' -P3306' ++ get_client_pod ++ kubectl_bin get pods --selector=name=pxc-client -o 'jsonpath={.items[].metadata.name}' +++ mktemp ++ local LAST_OUT=/tmp/tmp.6ZgDvrVGFQ +++ mktemp ++ local LAST_ERR=/tmp/tmp.KUW6rvSyQs ++ local exit_status=0 +++ seq 0 2 ++ for i in '$(seq 0 2)' ++ set +e ++ kubectl get pods --selector=name=pxc-client -o 'jsonpath={.items[].metadata.name}' ++ exit_status=0 ++ set -e ++ '[' 0 '!=' 0 ']' ++ break ++ cat /tmp/tmp.6ZgDvrVGFQ ++ cat /tmp/tmp.KUW6rvSyQs ++ rm /tmp/tmp.6ZgDvrVGFQ /tmp/tmp.KUW6rvSyQs ++ return 0 + client_pod=pxc-client-6bbff654db-bvlfn + wait_pod pxc-client-6bbff654db-bvlfn + local pod=pxc-client-6bbff654db-bvlfn + local max_retry=480 + local ns= ++ echo pxc-client-6bbff654db-bvlfn ++ /usr/bin/sed -E 's/.*-(pxc|proxysql)-[0-9]/\1/' ++ grep -E '^(pxc|proxysql)$' + local container= + set +o xtrace pod/pxc-client-6bbff654db-bvlfn condition met waiting for pod/pxc-client-6bbff654db-bvlfn to become ReadyDefaulted container "pxc-client" out of: pxc-client, backup .Ok + set +o xtrace + sleep 30 ++ seq 0 2 + for i in '$(seq 0 $((size - 1)))' + compare_mysql_cmd select-1 'SELECT * from myApp.myApp;' '-h some-name-pxc-0.some-name-pxc -uroot -p'\''$PG_ZP+VIMNesDj=1y'\'' -P3306' + local command_id=select-1 + local 'command=SELECT * from myApp.myApp;' + local 'uri=-h some-name-pxc-0.some-name-pxc -uroot -p'\''$PG_ZP+VIMNesDj=1y'\'' -P3306' + local postfix= + local expected_result=/mnt/jenkins/workspace/cloud-pxc-operator_PR-2330/e2e-tests/users-scheduler/compare/select-1.sql + [[ perconalab/percona-xtradb-cluster-operator:main-pxc8.4 =~ 8\.4 ]] + [[ -f /mnt/jenkins/workspace/cloud-pxc-operator_PR-2330/e2e-tests/users-scheduler/compare/select-1-84.sql ]] + [[ perconalab/percona-xtradb-cluster-operator:main-pxc8.4 =~ 8\.0 ]] + [[ perconalab/percona-xtradb-cluster-operator:main-pxc8.4 =~ 5\.7 ]] + run_mysql 'SELECT * from myApp.myApp;' '-h some-name-pxc-0.some-name-pxc -uroot -p'\''$PG_ZP+VIMNesDj=1y'\'' -P3306' + local 'command=SELECT * from myApp.myApp;' + local 'uri=-h some-name-pxc-0.some-name-pxc -uroot -p'\''$PG_ZP+VIMNesDj=1y'\'' -P3306' ++ get_client_pod ++ kubectl_bin get pods --selector=name=pxc-client -o 'jsonpath={.items[].metadata.name}' +++ mktemp ++ local LAST_OUT=/tmp/tmp.Y93uyjO0xk +++ mktemp ++ local LAST_ERR=/tmp/tmp.nuu4PfJnIZ ++ local exit_status=0 +++ seq 0 2 ++ for i in '$(seq 0 2)' ++ set +e ++ kubectl get pods --selector=name=pxc-client -o 'jsonpath={.items[].metadata.name}' ++ exit_status=0 ++ set -e ++ '[' 0 '!=' 0 ']' ++ break ++ cat /tmp/tmp.Y93uyjO0xk ++ cat /tmp/tmp.nuu4PfJnIZ ++ rm /tmp/tmp.Y93uyjO0xk /tmp/tmp.nuu4PfJnIZ ++ return 0 + client_pod=pxc-client-6bbff654db-bvlfn + wait_pod pxc-client-6bbff654db-bvlfn + local pod=pxc-client-6bbff654db-bvlfn + local max_retry=480 + local ns= ++ echo pxc-client-6bbff654db-bvlfn ++ /usr/bin/sed -E 's/.*-(pxc|proxysql)-[0-9]/\1/' ++ grep -E '^(pxc|proxysql)$' + local container= + set +o xtrace pod/pxc-client-6bbff654db-bvlfn condition met waiting for pod/pxc-client-6bbff654db-bvlfn to become ReadyDefaulted container "pxc-client" out of: pxc-client, backup .Ok + set +o xtrace + [[ ! -s /tmp/tmp.KUxcwQMinG/select-1.sql ]] ++ grep 'Unknown MySQL server host' /tmp/tmp.KUxcwQMinG/select-1.sql + [[ -n '' ]] + [[ 0 -eq 0 ]] + diff -u /mnt/jenkins/workspace/cloud-pxc-operator_PR-2330/e2e-tests/users-scheduler/compare/select-1.sql /tmp/tmp.KUxcwQMinG/select-1.sql + for i in '$(seq 0 $((size - 1)))' + compare_mysql_cmd select-1 'SELECT * from myApp.myApp;' '-h some-name-pxc-1.some-name-pxc -uroot -p'\''$PG_ZP+VIMNesDj=1y'\'' -P3306' + local command_id=select-1 + local 'command=SELECT * from myApp.myApp;' + local 'uri=-h some-name-pxc-1.some-name-pxc -uroot -p'\''$PG_ZP+VIMNesDj=1y'\'' -P3306' + local postfix= + local expected_result=/mnt/jenkins/workspace/cloud-pxc-operator_PR-2330/e2e-tests/users-scheduler/compare/select-1.sql + [[ perconalab/percona-xtradb-cluster-operator:main-pxc8.4 =~ 8\.4 ]] + [[ -f /mnt/jenkins/workspace/cloud-pxc-operator_PR-2330/e2e-tests/users-scheduler/compare/select-1-84.sql ]] + [[ perconalab/percona-xtradb-cluster-operator:main-pxc8.4 =~ 8\.0 ]] + [[ perconalab/percona-xtradb-cluster-operator:main-pxc8.4 =~ 5\.7 ]] + run_mysql 'SELECT * from myApp.myApp;' '-h some-name-pxc-1.some-name-pxc -uroot -p'\''$PG_ZP+VIMNesDj=1y'\'' -P3306' + local 'command=SELECT * from myApp.myApp;' + local 'uri=-h some-name-pxc-1.some-name-pxc -uroot -p'\''$PG_ZP+VIMNesDj=1y'\'' -P3306' ++ get_client_pod ++ kubectl_bin get pods --selector=name=pxc-client -o 'jsonpath={.items[].metadata.name}' +++ mktemp ++ local LAST_OUT=/tmp/tmp.fjdNb3Kz3z +++ mktemp ++ local LAST_ERR=/tmp/tmp.aJrrBEOh6S ++ local exit_status=0 +++ seq 0 2 ++ for i in '$(seq 0 2)' ++ set +e ++ kubectl get pods --selector=name=pxc-client -o 'jsonpath={.items[].metadata.name}' ++ exit_status=0 ++ set -e ++ '[' 0 '!=' 0 ']' ++ break ++ cat /tmp/tmp.fjdNb3Kz3z ++ cat /tmp/tmp.aJrrBEOh6S ++ rm /tmp/tmp.fjdNb3Kz3z /tmp/tmp.aJrrBEOh6S ++ return 0 + client_pod=pxc-client-6bbff654db-bvlfn + wait_pod pxc-client-6bbff654db-bvlfn + local pod=pxc-client-6bbff654db-bvlfn + local max_retry=480 + local ns= ++ echo pxc-client-6bbff654db-bvlfn ++ /usr/bin/sed -E 's/.*-(pxc|proxysql)-[0-9]/\1/' ++ grep -E '^(pxc|proxysql)$' + local container= + set +o xtrace pod/pxc-client-6bbff654db-bvlfn condition met waiting for pod/pxc-client-6bbff654db-bvlfn to become ReadyDefaulted container "pxc-client" out of: pxc-client, backup .Ok + set +o xtrace + [[ ! -s /tmp/tmp.KUxcwQMinG/select-1.sql ]] ++ grep 'Unknown MySQL server host' /tmp/tmp.KUxcwQMinG/select-1.sql + [[ -n '' ]] + [[ 0 -eq 0 ]] + diff -u /mnt/jenkins/workspace/cloud-pxc-operator_PR-2330/e2e-tests/users-scheduler/compare/select-1.sql /tmp/tmp.KUxcwQMinG/select-1.sql + for i in '$(seq 0 $((size - 1)))' + compare_mysql_cmd select-1 'SELECT * from myApp.myApp;' '-h some-name-pxc-2.some-name-pxc -uroot -p'\''$PG_ZP+VIMNesDj=1y'\'' -P3306' + local command_id=select-1 + local 'command=SELECT * from myApp.myApp;' + local 'uri=-h some-name-pxc-2.some-name-pxc -uroot -p'\''$PG_ZP+VIMNesDj=1y'\'' -P3306' + local postfix= + local expected_result=/mnt/jenkins/workspace/cloud-pxc-operator_PR-2330/e2e-tests/users-scheduler/compare/select-1.sql + [[ perconalab/percona-xtradb-cluster-operator:main-pxc8.4 =~ 8\.4 ]] + [[ -f /mnt/jenkins/workspace/cloud-pxc-operator_PR-2330/e2e-tests/users-scheduler/compare/select-1-84.sql ]] + [[ perconalab/percona-xtradb-cluster-operator:main-pxc8.4 =~ 8\.0 ]] + [[ perconalab/percona-xtradb-cluster-operator:main-pxc8.4 =~ 5\.7 ]] + run_mysql 'SELECT * from myApp.myApp;' '-h some-name-pxc-2.some-name-pxc -uroot -p'\''$PG_ZP+VIMNesDj=1y'\'' -P3306' + local 'command=SELECT * from myApp.myApp;' + local 'uri=-h some-name-pxc-2.some-name-pxc -uroot -p'\''$PG_ZP+VIMNesDj=1y'\'' -P3306' ++ get_client_pod ++ kubectl_bin get pods --selector=name=pxc-client -o 'jsonpath={.items[].metadata.name}' +++ mktemp ++ local LAST_OUT=/tmp/tmp.A6YPOR1R4L +++ mktemp ++ local LAST_ERR=/tmp/tmp.wfLaeOX8Hi ++ local exit_status=0 +++ seq 0 2 ++ for i in '$(seq 0 2)' ++ set +e ++ kubectl get pods --selector=name=pxc-client -o 'jsonpath={.items[].metadata.name}' ++ exit_status=0 ++ set -e ++ '[' 0 '!=' 0 ']' ++ break ++ cat /tmp/tmp.A6YPOR1R4L ++ cat /tmp/tmp.wfLaeOX8Hi ++ rm /tmp/tmp.A6YPOR1R4L /tmp/tmp.wfLaeOX8Hi ++ return 0 + client_pod=pxc-client-6bbff654db-bvlfn + wait_pod pxc-client-6bbff654db-bvlfn + local pod=pxc-client-6bbff654db-bvlfn + local max_retry=480 + local ns= ++ echo pxc-client-6bbff654db-bvlfn ++ /usr/bin/sed -E 's/.*-(pxc|proxysql)-[0-9]/\1/' ++ grep -E '^(pxc|proxysql)$' + local container= + set +o xtrace pod/pxc-client-6bbff654db-bvlfn condition met waiting for pod/pxc-client-6bbff654db-bvlfn to become ReadyDefaulted container "pxc-client" out of: pxc-client, backup .Ok + set +o xtrace + [[ ! -s /tmp/tmp.KUxcwQMinG/select-1.sql ]] ++ grep 'Unknown MySQL server host' /tmp/tmp.KUxcwQMinG/select-1.sql + [[ -n '' ]] + [[ 0 -eq 0 ]] + diff -u /mnt/jenkins/workspace/cloud-pxc-operator_PR-2330/e2e-tests/users-scheduler/compare/select-1.sql /tmp/tmp.KUxcwQMinG/select-1.sql + is_keyring_plugin_in_use some-name + local cluster=some-name + [[ perconalab/percona-xtradb-cluster-operator:main-pxc8.4 =~ 8\.4 ]] + kubectl exec -it some-name-pxc-0 -c pxc -- ls /var/lib/mysql/mysqld.my + return 1 ++ kubectl exec -it some-name-proxysql-0 -- sh -c 'proxysql --version 2>/dev/null' ++ awk '{print $3}' ++ cut -d. -f1 Unable to use a TTY - input is not a terminal or the right kind of file + PROXYSQL_VER=2 + tables_cmp_file=select-2-proxysql3 + [[ 2 == 2 ]] + tables_cmp_file=select-2 + desc 'test missing passwords were created and present in internal secrets' + set +o xtrace ----------------------------------------------------------------------------------- test missing passwords were created and present in internal secrets ----------------------------------------------------------------------------------- + empty_pwds=() + wrong_pwds=() + for user in root xtrabackup monitor proxyadmin operator replication + echo 'Checking root' Checking root ++ getSecretData my-cluster-secrets root ++ local secretName=my-cluster-secrets ++ local dataKey=root ++ kubectl_bin get secrets/my-cluster-secrets '--template={{.data.root}}' ++ base64 --decode +++ mktemp ++ local LAST_OUT=/tmp/tmp.69fzZuS9fe +++ mktemp ++ local LAST_ERR=/tmp/tmp.4z9A9zRJ7J ++ local exit_status=0 +++ seq 0 2 ++ for i in '$(seq 0 2)' ++ set +e ++ kubectl get secrets/my-cluster-secrets '--template={{.data.root}}' ++ exit_status=0 ++ set -e ++ '[' 0 '!=' 0 ']' ++ break ++ cat /tmp/tmp.69fzZuS9fe ++ cat /tmp/tmp.4z9A9zRJ7J ++ rm /tmp/tmp.69fzZuS9fe /tmp/tmp.4z9A9zRJ7J ++ return 0 + secret_pass='$PG_ZP+VIMNesDj=1y' ++ getSecretData internal-some-name root ++ local secretName=internal-some-name ++ local dataKey=root ++ kubectl_bin get secrets/internal-some-name '--template={{.data.root}}' ++ base64 --decode +++ mktemp ++ local LAST_OUT=/tmp/tmp.p9aIscPAB9 +++ mktemp ++ local LAST_ERR=/tmp/tmp.eyFwb2f0tP ++ local exit_status=0 +++ seq 0 2 ++ for i in '$(seq 0 2)' ++ set +e ++ kubectl get secrets/internal-some-name '--template={{.data.root}}' ++ exit_status=0 ++ set -e ++ '[' 0 '!=' 0 ']' ++ break ++ cat /tmp/tmp.p9aIscPAB9 ++ cat /tmp/tmp.eyFwb2f0tP ++ rm /tmp/tmp.p9aIscPAB9 /tmp/tmp.eyFwb2f0tP ++ return 0 + int_secret_pass='$PG_ZP+VIMNesDj=1y' + [[ -z $PG_ZP+VIMNesDj=1y ]] + [[ $PG_ZP+VIMNesDj=1y != \$\P\G\_\Z\P\+\V\I\M\N\e\s\D\j\=\1\y ]] + [[ root != \p\r\o\x\y\a\d\m\i\n ]] + [[ '' =~ root ]] + [[ '' =~ root ]] + echo 'Running compare for root' Running compare for root + compare_mysql_cmd select-4 'SHOW TABLES;' '-h some-name-proxysql -uroot -p'\''$PG_ZP+VIMNesDj=1y'\''' + local command_id=select-4 + local 'command=SHOW TABLES;' + local 'uri=-h some-name-proxysql -uroot -p'\''$PG_ZP+VIMNesDj=1y'\''' + local postfix= + local expected_result=/mnt/jenkins/workspace/cloud-pxc-operator_PR-2330/e2e-tests/users-scheduler/compare/select-4.sql + [[ perconalab/percona-xtradb-cluster-operator:main-pxc8.4 =~ 8\.4 ]] + [[ -f /mnt/jenkins/workspace/cloud-pxc-operator_PR-2330/e2e-tests/users-scheduler/compare/select-4-84.sql ]] + expected_result=/mnt/jenkins/workspace/cloud-pxc-operator_PR-2330/e2e-tests/users-scheduler/compare/select-4-84.sql + run_mysql 'SHOW TABLES;' '-h some-name-proxysql -uroot -p'\''$PG_ZP+VIMNesDj=1y'\''' + local 'command=SHOW TABLES;' + local 'uri=-h some-name-proxysql -uroot -p'\''$PG_ZP+VIMNesDj=1y'\''' ++ get_client_pod ++ kubectl_bin get pods --selector=name=pxc-client -o 'jsonpath={.items[].metadata.name}' +++ mktemp ++ local LAST_OUT=/tmp/tmp.Daq8cVNsFF +++ mktemp ++ local LAST_ERR=/tmp/tmp.gh1xqgf6gK ++ local exit_status=0 +++ seq 0 2 ++ for i in '$(seq 0 2)' ++ set +e ++ kubectl get pods --selector=name=pxc-client -o 'jsonpath={.items[].metadata.name}' ++ exit_status=0 ++ set -e ++ '[' 0 '!=' 0 ']' ++ break ++ cat /tmp/tmp.Daq8cVNsFF ++ cat /tmp/tmp.gh1xqgf6gK ++ rm /tmp/tmp.Daq8cVNsFF /tmp/tmp.gh1xqgf6gK ++ return 0 + client_pod=pxc-client-6bbff654db-bvlfn + wait_pod pxc-client-6bbff654db-bvlfn + local pod=pxc-client-6bbff654db-bvlfn + local max_retry=480 + local ns= ++ echo pxc-client-6bbff654db-bvlfn ++ /usr/bin/sed -E 's/.*-(pxc|proxysql)-[0-9]/\1/' ++ grep -E '^(pxc|proxysql)$' + local container= + set +o xtrace pod/pxc-client-6bbff654db-bvlfn condition met waiting for pod/pxc-client-6bbff654db-bvlfn to become ReadyDefaulted container "pxc-client" out of: pxc-client, backup .Ok + set +o xtrace + [[ ! -s /tmp/tmp.KUxcwQMinG/select-4.sql ]] ++ grep 'Unknown MySQL server host' /tmp/tmp.KUxcwQMinG/select-4.sql + [[ -n '' ]] + [[ 0 -eq 0 ]] + diff -u /mnt/jenkins/workspace/cloud-pxc-operator_PR-2330/e2e-tests/users-scheduler/compare/select-4-84.sql /tmp/tmp.KUxcwQMinG/select-4.sql + for user in root xtrabackup monitor proxyadmin operator replication + echo 'Checking xtrabackup' Checking xtrabackup ++ getSecretData my-cluster-secrets xtrabackup ++ local secretName=my-cluster-secrets ++ local dataKey=xtrabackup ++ kubectl_bin get secrets/my-cluster-secrets '--template={{.data.xtrabackup}}' ++ base64 --decode +++ mktemp ++ local LAST_OUT=/tmp/tmp.qYP4ZDUuxL +++ mktemp ++ local LAST_ERR=/tmp/tmp.1zEcrcJMsV ++ local exit_status=0 +++ seq 0 2 ++ for i in '$(seq 0 2)' ++ set +e ++ kubectl get secrets/my-cluster-secrets '--template={{.data.xtrabackup}}' ++ exit_status=0 ++ set -e ++ '[' 0 '!=' 0 ']' ++ break ++ cat /tmp/tmp.qYP4ZDUuxL ++ cat /tmp/tmp.1zEcrcJMsV ++ rm /tmp/tmp.qYP4ZDUuxL /tmp/tmp.1zEcrcJMsV ++ return 0 + secret_pass='#sS%T0p#yiuLeHQ{?iR' ++ getSecretData internal-some-name xtrabackup ++ local secretName=internal-some-name ++ local dataKey=xtrabackup ++ kubectl_bin get secrets/internal-some-name '--template={{.data.xtrabackup}}' ++ base64 --decode +++ mktemp ++ local LAST_OUT=/tmp/tmp.NdICxOj6ts +++ mktemp ++ local LAST_ERR=/tmp/tmp.Wvjyr2qxDm ++ local exit_status=0 +++ seq 0 2 ++ for i in '$(seq 0 2)' ++ set +e ++ kubectl get secrets/internal-some-name '--template={{.data.xtrabackup}}' ++ exit_status=0 ++ set -e ++ '[' 0 '!=' 0 ']' ++ break ++ cat /tmp/tmp.NdICxOj6ts ++ cat /tmp/tmp.Wvjyr2qxDm ++ rm /tmp/tmp.NdICxOj6ts /tmp/tmp.Wvjyr2qxDm ++ return 0 + int_secret_pass='#sS%T0p#yiuLeHQ{?iR' + [[ -z #sS%T0p#yiuLeHQ{?iR ]] + [[ #sS%T0p#yiuLeHQ{?iR != \#\s\S\%\T\0\p\#\y\i\u\L\e\H\Q\{\?\i\R ]] + [[ xtrabackup != \p\r\o\x\y\a\d\m\i\n ]] + [[ '' =~ xtrabackup ]] + [[ '' =~ xtrabackup ]] + echo 'Running compare for xtrabackup' Running compare for xtrabackup + compare_mysql_cmd select-4 'SHOW TABLES;' '-h some-name-proxysql -uxtrabackup -p'\''#sS%T0p#yiuLeHQ{?iR'\''' + local command_id=select-4 + local 'command=SHOW TABLES;' + local 'uri=-h some-name-proxysql -uxtrabackup -p'\''#sS%T0p#yiuLeHQ{?iR'\''' + local postfix= + local expected_result=/mnt/jenkins/workspace/cloud-pxc-operator_PR-2330/e2e-tests/users-scheduler/compare/select-4.sql + [[ perconalab/percona-xtradb-cluster-operator:main-pxc8.4 =~ 8\.4 ]] + [[ -f /mnt/jenkins/workspace/cloud-pxc-operator_PR-2330/e2e-tests/users-scheduler/compare/select-4-84.sql ]] + expected_result=/mnt/jenkins/workspace/cloud-pxc-operator_PR-2330/e2e-tests/users-scheduler/compare/select-4-84.sql + run_mysql 'SHOW TABLES;' '-h some-name-proxysql -uxtrabackup -p'\''#sS%T0p#yiuLeHQ{?iR'\''' + local 'command=SHOW TABLES;' + local 'uri=-h some-name-proxysql -uxtrabackup -p'\''#sS%T0p#yiuLeHQ{?iR'\''' ++ get_client_pod ++ kubectl_bin get pods --selector=name=pxc-client -o 'jsonpath={.items[].metadata.name}' +++ mktemp ++ local LAST_OUT=/tmp/tmp.uXvZa5CiVL +++ mktemp ++ local LAST_ERR=/tmp/tmp.V0hMFltHd5 ++ local exit_status=0 +++ seq 0 2 ++ for i in '$(seq 0 2)' ++ set +e ++ kubectl get pods --selector=name=pxc-client -o 'jsonpath={.items[].metadata.name}' ++ exit_status=0 ++ set -e ++ '[' 0 '!=' 0 ']' ++ break ++ cat /tmp/tmp.uXvZa5CiVL ++ cat /tmp/tmp.V0hMFltHd5 ++ rm /tmp/tmp.uXvZa5CiVL /tmp/tmp.V0hMFltHd5 ++ return 0 + client_pod=pxc-client-6bbff654db-bvlfn + wait_pod pxc-client-6bbff654db-bvlfn + local pod=pxc-client-6bbff654db-bvlfn + local max_retry=480 + local ns= ++ echo pxc-client-6bbff654db-bvlfn ++ /usr/bin/sed -E 's/.*-(pxc|proxysql)-[0-9]/\1/' ++ grep -E '^(pxc|proxysql)$' + local container= + set +o xtrace pod/pxc-client-6bbff654db-bvlfn condition met waiting for pod/pxc-client-6bbff654db-bvlfn to become ReadyDefaulted container "pxc-client" out of: pxc-client, backup .Ok + set +o xtrace + [[ ! -s /tmp/tmp.KUxcwQMinG/select-4.sql ]] ++ grep 'Unknown MySQL server host' /tmp/tmp.KUxcwQMinG/select-4.sql + [[ -n '' ]] + [[ 0 -eq 0 ]] + diff -u /mnt/jenkins/workspace/cloud-pxc-operator_PR-2330/e2e-tests/users-scheduler/compare/select-4-84.sql /tmp/tmp.KUxcwQMinG/select-4.sql + for user in root xtrabackup monitor proxyadmin operator replication + echo 'Checking monitor' Checking monitor ++ getSecretData my-cluster-secrets monitor ++ local secretName=my-cluster-secrets ++ local dataKey=monitor ++ kubectl_bin get secrets/my-cluster-secrets '--template={{.data.monitor}}' +++ mktemp ++ base64 --decode ++ local LAST_OUT=/tmp/tmp.N61SKbbCGG +++ mktemp ++ local LAST_ERR=/tmp/tmp.Vgvyx3AUq2 ++ local exit_status=0 +++ seq 0 2 ++ for i in '$(seq 0 2)' ++ set +e ++ kubectl get secrets/my-cluster-secrets '--template={{.data.monitor}}' ++ exit_status=0 ++ set -e ++ '[' 0 '!=' 0 ']' ++ break ++ cat /tmp/tmp.N61SKbbCGG ++ cat /tmp/tmp.Vgvyx3AUq2 ++ rm /tmp/tmp.N61SKbbCGG /tmp/tmp.Vgvyx3AUq2 ++ return 0 + secret_pass=monitor_password ++ getSecretData internal-some-name monitor ++ local secretName=internal-some-name ++ local dataKey=monitor ++ kubectl_bin get secrets/internal-some-name '--template={{.data.monitor}}' ++ base64 --decode +++ mktemp ++ local LAST_OUT=/tmp/tmp.cAYqzNxLRh +++ mktemp ++ local LAST_ERR=/tmp/tmp.q9FsFSubDr ++ local exit_status=0 +++ seq 0 2 ++ for i in '$(seq 0 2)' ++ set +e ++ kubectl get secrets/internal-some-name '--template={{.data.monitor}}' ++ exit_status=0 ++ set -e ++ '[' 0 '!=' 0 ']' ++ break ++ cat /tmp/tmp.cAYqzNxLRh ++ cat /tmp/tmp.q9FsFSubDr ++ rm /tmp/tmp.cAYqzNxLRh /tmp/tmp.q9FsFSubDr ++ return 0 + int_secret_pass=monitor_password + [[ -z monitor_password ]] + [[ monitor_password != \m\o\n\i\t\o\r\_\p\a\s\s\w\o\r\d ]] + [[ monitor != \p\r\o\x\y\a\d\m\i\n ]] + [[ '' =~ monitor ]] + [[ '' =~ monitor ]] + echo 'Running compare for monitor' Running compare for monitor + compare_mysql_cmd select-4 'SHOW TABLES;' '-h some-name-proxysql -umonitor -p'\''monitor_password'\''' + local command_id=select-4 + local 'command=SHOW TABLES;' + local 'uri=-h some-name-proxysql -umonitor -p'\''monitor_password'\''' + local postfix= + local expected_result=/mnt/jenkins/workspace/cloud-pxc-operator_PR-2330/e2e-tests/users-scheduler/compare/select-4.sql + [[ perconalab/percona-xtradb-cluster-operator:main-pxc8.4 =~ 8\.4 ]] + [[ -f /mnt/jenkins/workspace/cloud-pxc-operator_PR-2330/e2e-tests/users-scheduler/compare/select-4-84.sql ]] + expected_result=/mnt/jenkins/workspace/cloud-pxc-operator_PR-2330/e2e-tests/users-scheduler/compare/select-4-84.sql + run_mysql 'SHOW TABLES;' '-h some-name-proxysql -umonitor -p'\''monitor_password'\''' + local 'command=SHOW TABLES;' + local 'uri=-h some-name-proxysql -umonitor -p'\''monitor_password'\''' ++ get_client_pod ++ kubectl_bin get pods --selector=name=pxc-client -o 'jsonpath={.items[].metadata.name}' +++ mktemp ++ local LAST_OUT=/tmp/tmp.B37iEsELuD +++ mktemp ++ local LAST_ERR=/tmp/tmp.lGeK78QNnH ++ local exit_status=0 +++ seq 0 2 ++ for i in '$(seq 0 2)' ++ set +e ++ kubectl get pods --selector=name=pxc-client -o 'jsonpath={.items[].metadata.name}' ++ exit_status=0 ++ set -e ++ '[' 0 '!=' 0 ']' ++ break ++ cat /tmp/tmp.B37iEsELuD ++ cat /tmp/tmp.lGeK78QNnH ++ rm /tmp/tmp.B37iEsELuD /tmp/tmp.lGeK78QNnH ++ return 0 + client_pod=pxc-client-6bbff654db-bvlfn + wait_pod pxc-client-6bbff654db-bvlfn + local pod=pxc-client-6bbff654db-bvlfn + local max_retry=480 + local ns= ++ echo pxc-client-6bbff654db-bvlfn ++ /usr/bin/sed -E 's/.*-(pxc|proxysql)-[0-9]/\1/' ++ grep -E '^(pxc|proxysql)$' + local container= + set +o xtrace pod/pxc-client-6bbff654db-bvlfn condition met waiting for pod/pxc-client-6bbff654db-bvlfn to become ReadyDefaulted container "pxc-client" out of: pxc-client, backup .Ok + set +o xtrace + [[ ! -s /tmp/tmp.KUxcwQMinG/select-4.sql ]] ++ grep 'Unknown MySQL server host' /tmp/tmp.KUxcwQMinG/select-4.sql + [[ -n '' ]] + [[ 0 -eq 0 ]] + diff -u /mnt/jenkins/workspace/cloud-pxc-operator_PR-2330/e2e-tests/users-scheduler/compare/select-4-84.sql /tmp/tmp.KUxcwQMinG/select-4.sql + for user in root xtrabackup monitor proxyadmin operator replication + echo 'Checking proxyadmin' Checking proxyadmin ++ getSecretData my-cluster-secrets proxyadmin ++ local secretName=my-cluster-secrets ++ local dataKey=proxyadmin ++ kubectl_bin get secrets/my-cluster-secrets '--template={{.data.proxyadmin}}' ++ base64 --decode +++ mktemp ++ local LAST_OUT=/tmp/tmp.HQRaIsG8YI +++ mktemp ++ local LAST_ERR=/tmp/tmp.dWMm6IG5F1 ++ local exit_status=0 +++ seq 0 2 ++ for i in '$(seq 0 2)' ++ set +e ++ kubectl get secrets/my-cluster-secrets '--template={{.data.proxyadmin}}' ++ exit_status=0 ++ set -e ++ '[' 0 '!=' 0 ']' ++ break ++ cat /tmp/tmp.HQRaIsG8YI ++ cat /tmp/tmp.dWMm6IG5F1 ++ rm /tmp/tmp.HQRaIsG8YI /tmp/tmp.dWMm6IG5F1 ++ return 0 + secret_pass='-}D&b#eQ<$.OpYlh' ++ getSecretData internal-some-name proxyadmin ++ local secretName=internal-some-name ++ local dataKey=proxyadmin ++ kubectl_bin get secrets/internal-some-name '--template={{.data.proxyadmin}}' +++ mktemp ++ local LAST_OUT=/tmp/tmp.QlBwkpE9B3 +++ mktemp ++ local LAST_ERR=/tmp/tmp.6pjFnUZ5vF ++ local exit_status=0 +++ seq 0 2 ++ for i in '$(seq 0 2)' ++ set +e ++ kubectl get secrets/internal-some-name '--template={{.data.proxyadmin}}' ++ base64 --decode ++ exit_status=0 ++ set -e ++ '[' 0 '!=' 0 ']' ++ break ++ cat /tmp/tmp.QlBwkpE9B3 ++ cat /tmp/tmp.6pjFnUZ5vF ++ rm /tmp/tmp.QlBwkpE9B3 /tmp/tmp.6pjFnUZ5vF ++ return 0 + int_secret_pass='-}D&b#eQ<$.OpYlh' + [[ -z -}D&b#eQ<$.OpYlh ]] + [[ -}D&b#eQ<$.OpYlh != \-\}\D\&\b\#\e\Q\<\$\.\O\p\Y\l\h ]] + [[ proxyadmin != \p\r\o\x\y\a\d\m\i\n ]] + [[ proxyadmin == \p\r\o\x\y\a\d\m\i\n ]] + [[ '' =~ proxyadmin ]] + [[ '' =~ proxyadmin ]] + echo 'Running compare for proxyadmin' Running compare for proxyadmin + compare_mysql_cmd_local select-2 'SHOW TABLES;' '-h127.0.0.1 -P6032 -uproxyadmin -p'\''-}D&b#eQ<$.OpYlh'\''' some-name-proxysql-0 '' proxysql + local command_id=select-2 + local 'command=SHOW TABLES;' + local 'uri=-h127.0.0.1 -P6032 -uproxyadmin -p'\''-}D&b#eQ<$.OpYlh'\''' + local pod=some-name-proxysql-0 + local postfix= + local container_name=proxysql + local expected_result=/mnt/jenkins/workspace/cloud-pxc-operator_PR-2330/e2e-tests/users-scheduler/compare/select-2.sql + [[ perconalab/percona-xtradb-cluster-operator:main-pxc8.4 =~ 8\.0 ]] + [[ perconalab/percona-xtradb-cluster-operator:main-pxc8.4 =~ 8\.4 ]] + '[' -f /mnt/jenkins/workspace/cloud-pxc-operator_PR-2330/e2e-tests/users-scheduler/compare/select-2-84.sql ']' + run_mysql_local 'SHOW TABLES;' '-h127.0.0.1 -P6032 -uproxyadmin -p'\''-}D&b#eQ<$.OpYlh'\''' some-name-proxysql-0 proxysql + local 'command=SHOW TABLES;' + local 'uri=-h127.0.0.1 -P6032 -uproxyadmin -p'\''-}D&b#eQ<$.OpYlh'\''' + local pod=some-name-proxysql-0 + local container_name=proxysql + set +o xtrace + '[' '!' -s /tmp/tmp.KUxcwQMinG/select-2.sql ']' + [[ 0 -eq 0 ]] + diff -u /mnt/jenkins/workspace/cloud-pxc-operator_PR-2330/e2e-tests/users-scheduler/compare/select-2.sql /tmp/tmp.KUxcwQMinG/select-2.sql + for user in root xtrabackup monitor proxyadmin operator replication + echo 'Checking operator' Checking operator ++ getSecretData my-cluster-secrets operator ++ local secretName=my-cluster-secrets ++ local dataKey=operator ++ kubectl_bin get secrets/my-cluster-secrets '--template={{.data.operator}}' ++ base64 --decode +++ mktemp ++ local LAST_OUT=/tmp/tmp.adSBjCxLBE +++ mktemp ++ local LAST_ERR=/tmp/tmp.WqwiPTRVGK ++ local exit_status=0 +++ seq 0 2 ++ for i in '$(seq 0 2)' ++ set +e ++ kubectl get secrets/my-cluster-secrets '--template={{.data.operator}}' ++ exit_status=0 ++ set -e ++ '[' 0 '!=' 0 ']' ++ break ++ cat /tmp/tmp.adSBjCxLBE ++ cat /tmp/tmp.WqwiPTRVGK ++ rm /tmp/tmp.adSBjCxLBE /tmp/tmp.WqwiPTRVGK ++ return 0 + secret_pass='tRY^cKdxo8{OB=m=X ]] + [[ _Bb9qu,K]JL{IJ]>X != \_\B\b\9\q\u\,\K\]\J\L\{\I\J\]\>\X ]] + [[ replication != \p\r\o\x\y\a\d\m\i\n ]] + [[ '' =~ replication ]] + [[ '' =~ replication ]] + echo 'Running compare for replication' Running compare for replication + compare_mysql_cmd select-4 'SHOW TABLES;' '-h some-name-proxysql -ureplication -p'\''_Bb9qu,K]JL{IJ]>X'\''' + local command_id=select-4 + local 'command=SHOW TABLES;' + local 'uri=-h some-name-proxysql -ureplication -p'\''_Bb9qu,K]JL{IJ]>X'\''' + local postfix= + local expected_result=/mnt/jenkins/workspace/cloud-pxc-operator_PR-2330/e2e-tests/users-scheduler/compare/select-4.sql + [[ perconalab/percona-xtradb-cluster-operator:main-pxc8.4 =~ 8\.4 ]] + [[ -f /mnt/jenkins/workspace/cloud-pxc-operator_PR-2330/e2e-tests/users-scheduler/compare/select-4-84.sql ]] + expected_result=/mnt/jenkins/workspace/cloud-pxc-operator_PR-2330/e2e-tests/users-scheduler/compare/select-4-84.sql + run_mysql 'SHOW TABLES;' '-h some-name-proxysql -ureplication -p'\''_Bb9qu,K]JL{IJ]>X'\''' + local 'command=SHOW TABLES;' + local 'uri=-h some-name-proxysql -ureplication -p'\''_Bb9qu,K]JL{IJ]>X'\''' ++ get_client_pod ++ kubectl_bin get pods --selector=name=pxc-client -o 'jsonpath={.items[].metadata.name}' +++ mktemp ++ local LAST_OUT=/tmp/tmp.H3gVGdCyBp +++ mktemp ++ local LAST_ERR=/tmp/tmp.k5WocpRzWL ++ local exit_status=0 +++ seq 0 2 ++ for i in '$(seq 0 2)' ++ set +e ++ kubectl get pods --selector=name=pxc-client -o 'jsonpath={.items[].metadata.name}' ++ exit_status=0 ++ set -e ++ '[' 0 '!=' 0 ']' ++ break ++ cat /tmp/tmp.H3gVGdCyBp ++ cat /tmp/tmp.k5WocpRzWL ++ rm /tmp/tmp.H3gVGdCyBp /tmp/tmp.k5WocpRzWL ++ return 0 + client_pod=pxc-client-6bbff654db-bvlfn + wait_pod pxc-client-6bbff654db-bvlfn + local pod=pxc-client-6bbff654db-bvlfn + local max_retry=480 + local ns= ++ echo pxc-client-6bbff654db-bvlfn ++ /usr/bin/sed -E 's/.*-(pxc|proxysql)-[0-9]/\1/' ++ grep -E '^(pxc|proxysql)$' + local container= + set +o xtrace pod/pxc-client-6bbff654db-bvlfn condition met waiting for pod/pxc-client-6bbff654db-bvlfn to become ReadyDefaulted container "pxc-client" out of: pxc-client, backup .Ok + set +o xtrace + [[ ! -s /tmp/tmp.KUxcwQMinG/select-4.sql ]] ++ grep 'Unknown MySQL server host' /tmp/tmp.KUxcwQMinG/select-4.sql + [[ -n '' ]] + [[ 0 -eq 0 ]] + diff -u /mnt/jenkins/workspace/cloud-pxc-operator_PR-2330/e2e-tests/users-scheduler/compare/select-4-84.sql /tmp/tmp.KUxcwQMinG/select-4.sql + [[ -n '' ]] + [[ -n '' ]] + desc 'test root' + set +o xtrace ----------------------------------------------------------------------------------- test root ----------------------------------------------------------------------------------- + patch_secret my-cluster-secrets root dGVzdC1wYXNzd29yZA== + local secret=my-cluster-secrets + local key=root + local value=dGVzdC1wYXNzd29yZA== + kubectl_bin patch secret my-cluster-secrets '-p={"data":{"root": "dGVzdC1wYXNzd29yZA=="}}' ++ mktemp + local LAST_OUT=/tmp/tmp.BKWYTbvcKk ++ mktemp + local LAST_ERR=/tmp/tmp.NbzFpQWJai + local exit_status=0 ++ seq 0 2 + for i in '$(seq 0 2)' + set +e + kubectl patch secret my-cluster-secrets '-p={"data":{"root": "dGVzdC1wYXNzd29yZA=="}}' + exit_status=0 + set -e + '[' 0 '!=' 0 ']' + break + cat /tmp/tmp.BKWYTbvcKk secret/my-cluster-secrets patched + cat /tmp/tmp.NbzFpQWJai + rm /tmp/tmp.BKWYTbvcKk /tmp/tmp.NbzFpQWJai + return 0 + sleep 15 + wait_cluster_consistency some-name 3 2 + local cluster_name=some-name + local cluster_size=3 + local proxy_size=2 + '[' -z 2 ']' + desc 'wait cluster consistency' + set +o xtrace ----------------------------------------------------------------------------------- wait cluster consistency ----------------------------------------------------------------------------------- + local i=0 + local max=300 + sleep 7 + echo -n 'waiting for pxc/some-name to be ready' waiting for pxc/some-name to be ready++ kubectl_bin get pxc some-name -o 'jsonpath={.status.state}' +++ mktemp ++ local LAST_OUT=/tmp/tmp.82sBtiuqTN +++ mktemp ++ local LAST_ERR=/tmp/tmp.REQojUtkBz ++ local exit_status=0 +++ seq 0 2 ++ for i in '$(seq 0 2)' ++ set +e ++ kubectl get pxc some-name -o 'jsonpath={.status.state}' ++ exit_status=0 ++ set -e ++ '[' 0 '!=' 0 ']' ++ break ++ cat /tmp/tmp.82sBtiuqTN ++ cat /tmp/tmp.REQojUtkBz ++ rm /tmp/tmp.82sBtiuqTN /tmp/tmp.REQojUtkBz ++ return 0 + [[ ready == \r\e\a\d\y ]] ++ kubectl_bin get pxc some-name -o 'jsonpath={.status.pxc.ready}' +++ mktemp ++ local LAST_OUT=/tmp/tmp.MpIXepZP1n +++ mktemp ++ local LAST_ERR=/tmp/tmp.5LL9MyRECH ++ local exit_status=0 +++ seq 0 2 ++ for i in '$(seq 0 2)' ++ set +e ++ kubectl get pxc some-name -o 'jsonpath={.status.pxc.ready}' ++ exit_status=0 ++ set -e ++ '[' 0 '!=' 0 ']' ++ break ++ cat /tmp/tmp.MpIXepZP1n ++ cat /tmp/tmp.5LL9MyRECH ++ rm /tmp/tmp.MpIXepZP1n /tmp/tmp.5LL9MyRECH ++ return 0 + [[ 3 == \3 ]] +++ get_proxy_engine some-name +++ local cluster_name=some-name ++++ get_proxy some-name ++++ local target_cluster=some-name +++++ kubectl_bin get pxc some-name -o 'jsonpath={.spec.haproxy.enabled}' ++++++ mktemp +++++ local LAST_OUT=/tmp/tmp.WQgEoMbyeG ++++++ mktemp +++++ local LAST_ERR=/tmp/tmp.KFkVnHUzfT +++++ local exit_status=0 ++++++ seq 0 2 +++++ for i in '$(seq 0 2)' +++++ set +e +++++ kubectl get pxc some-name -o 'jsonpath={.spec.haproxy.enabled}' +++++ exit_status=0 +++++ set -e +++++ '[' 0 '!=' 0 ']' +++++ break +++++ cat /tmp/tmp.WQgEoMbyeG +++++ cat /tmp/tmp.KFkVnHUzfT +++++ rm /tmp/tmp.WQgEoMbyeG /tmp/tmp.KFkVnHUzfT +++++ return 0 ++++ [[ false == \t\r\u\e ]] +++++ kubectl_bin get pxc some-name -o 'jsonpath={.spec.proxysql.enabled}' ++++++ mktemp +++++ local LAST_OUT=/tmp/tmp.TBL7bYqQgI ++++++ mktemp +++++ local LAST_ERR=/tmp/tmp.YWEteAsA8v +++++ local exit_status=0 ++++++ seq 0 2 +++++ for i in '$(seq 0 2)' +++++ set +e +++++ kubectl get pxc some-name -o 'jsonpath={.spec.proxysql.enabled}' +++++ exit_status=0 +++++ set -e +++++ '[' 0 '!=' 0 ']' +++++ break +++++ cat /tmp/tmp.TBL7bYqQgI +++++ cat /tmp/tmp.YWEteAsA8v +++++ rm /tmp/tmp.TBL7bYqQgI /tmp/tmp.YWEteAsA8v +++++ return 0 ++++ [[ true == \t\r\u\e ]] ++++ echo some-name-proxysql ++++ return +++ local cluster_proxy=some-name-proxysql +++ echo proxysql ++ kubectl_bin get pxc some-name -o 'jsonpath={.status.proxysql.ready}' +++ mktemp ++ local LAST_OUT=/tmp/tmp.aTxQPoT3mG +++ mktemp ++ local LAST_ERR=/tmp/tmp.yDaRUL38kf ++ local exit_status=0 +++ seq 0 2 ++ for i in '$(seq 0 2)' ++ set +e ++ kubectl get pxc some-name -o 'jsonpath={.status.proxysql.ready}' ++ exit_status=0 ++ set -e ++ '[' 0 '!=' 0 ']' ++ break ++ cat /tmp/tmp.aTxQPoT3mG ++ cat /tmp/tmp.yDaRUL38kf ++ rm /tmp/tmp.aTxQPoT3mG /tmp/tmp.yDaRUL38kf ++ return 0 + [[ 2 == \2 ]] + echo + sleep 15 + compare_mysql_cmd select-4 'SHOW TABLES;' '-h some-name-proxysql -uroot -p'\''test-password'\''' + local command_id=select-4 + local 'command=SHOW TABLES;' + local 'uri=-h some-name-proxysql -uroot -p'\''test-password'\''' + local postfix= + local expected_result=/mnt/jenkins/workspace/cloud-pxc-operator_PR-2330/e2e-tests/users-scheduler/compare/select-4.sql + [[ perconalab/percona-xtradb-cluster-operator:main-pxc8.4 =~ 8\.4 ]] + [[ -f /mnt/jenkins/workspace/cloud-pxc-operator_PR-2330/e2e-tests/users-scheduler/compare/select-4-84.sql ]] + expected_result=/mnt/jenkins/workspace/cloud-pxc-operator_PR-2330/e2e-tests/users-scheduler/compare/select-4-84.sql + run_mysql 'SHOW TABLES;' '-h some-name-proxysql -uroot -p'\''test-password'\''' + local 'command=SHOW TABLES;' + local 'uri=-h some-name-proxysql -uroot -p'\''test-password'\''' ++ get_client_pod ++ kubectl_bin get pods --selector=name=pxc-client -o 'jsonpath={.items[].metadata.name}' +++ mktemp ++ local LAST_OUT=/tmp/tmp.SXnehu7W0g +++ mktemp ++ local LAST_ERR=/tmp/tmp.t0JHxFYVOD ++ local exit_status=0 +++ seq 0 2 ++ for i in '$(seq 0 2)' ++ set +e ++ kubectl get pods --selector=name=pxc-client -o 'jsonpath={.items[].metadata.name}' ++ exit_status=0 ++ set -e ++ '[' 0 '!=' 0 ']' ++ break ++ cat /tmp/tmp.SXnehu7W0g ++ cat /tmp/tmp.t0JHxFYVOD ++ rm /tmp/tmp.SXnehu7W0g /tmp/tmp.t0JHxFYVOD ++ return 0 + client_pod=pxc-client-6bbff654db-bvlfn + wait_pod pxc-client-6bbff654db-bvlfn + local pod=pxc-client-6bbff654db-bvlfn + local max_retry=480 + local ns= ++ echo pxc-client-6bbff654db-bvlfn ++ /usr/bin/sed -E 's/.*-(pxc|proxysql)-[0-9]/\1/' ++ grep -E '^(pxc|proxysql)$' + local container= + set +o xtrace pod/pxc-client-6bbff654db-bvlfn condition met waiting for pod/pxc-client-6bbff654db-bvlfn to become ReadyDefaulted container "pxc-client" out of: pxc-client, backup .Ok + set +o xtrace + [[ ! -s /tmp/tmp.KUxcwQMinG/select-4.sql ]] ++ grep 'Unknown MySQL server host' /tmp/tmp.KUxcwQMinG/select-4.sql + [[ -n '' ]] + [[ 0 -eq 0 ]] + diff -u /mnt/jenkins/workspace/cloud-pxc-operator_PR-2330/e2e-tests/users-scheduler/compare/select-4-84.sql /tmp/tmp.KUxcwQMinG/select-4.sql + desc 'test proxyadmin' + set +o xtrace ----------------------------------------------------------------------------------- test proxyadmin ----------------------------------------------------------------------------------- + kubectl_bin patch pxc some-name --type=merge '-p={"spec":{"proxysql":{"size":3}}}' ++ mktemp + local LAST_OUT=/tmp/tmp.N4RItjyxyN ++ mktemp + local LAST_ERR=/tmp/tmp.7NmYZZlaxd + local exit_status=0 ++ seq 0 2 + for i in '$(seq 0 2)' + set +e + kubectl patch pxc some-name --type=merge '-p={"spec":{"proxysql":{"size":3}}}' + exit_status=0 + set -e + '[' 0 '!=' 0 ']' + break + cat /tmp/tmp.N4RItjyxyN perconaxtradbcluster.pxc.percona.com/some-name patched + cat /tmp/tmp.7NmYZZlaxd + rm /tmp/tmp.N4RItjyxyN /tmp/tmp.7NmYZZlaxd + return 0 + sleep 15 + wait_cluster_consistency some-name 3 3 + local cluster_name=some-name + local cluster_size=3 + local proxy_size=3 + '[' -z 3 ']' + desc 'wait cluster consistency' + set +o xtrace ----------------------------------------------------------------------------------- wait cluster consistency ----------------------------------------------------------------------------------- + local i=0 + local max=300 + sleep 7 + echo -n 'waiting for pxc/some-name to be ready' waiting for pxc/some-name to be ready++ kubectl_bin get pxc some-name -o 'jsonpath={.status.state}' +++ mktemp ++ local LAST_OUT=/tmp/tmp.I5i7ijvh2T +++ mktemp ++ local LAST_ERR=/tmp/tmp.OWSMlcF2Zs ++ local exit_status=0 +++ seq 0 2 ++ for i in '$(seq 0 2)' ++ set +e ++ kubectl get pxc some-name -o 'jsonpath={.status.state}' ++ exit_status=0 ++ set -e ++ '[' 0 '!=' 0 ']' ++ break ++ cat /tmp/tmp.I5i7ijvh2T ++ cat /tmp/tmp.OWSMlcF2Zs ++ rm /tmp/tmp.I5i7ijvh2T /tmp/tmp.OWSMlcF2Zs ++ return 0 + [[ ready == \r\e\a\d\y ]] ++ kubectl_bin get pxc some-name -o 'jsonpath={.status.pxc.ready}' +++ mktemp ++ local LAST_OUT=/tmp/tmp.nAyzyKomnp +++ mktemp ++ local LAST_ERR=/tmp/tmp.zUHF27NqNW ++ local exit_status=0 +++ seq 0 2 ++ for i in '$(seq 0 2)' ++ set +e ++ kubectl get pxc some-name -o 'jsonpath={.status.pxc.ready}' ++ exit_status=0 ++ set -e ++ '[' 0 '!=' 0 ']' ++ break ++ cat /tmp/tmp.nAyzyKomnp ++ cat /tmp/tmp.zUHF27NqNW ++ rm /tmp/tmp.nAyzyKomnp /tmp/tmp.zUHF27NqNW ++ return 0 + [[ 3 == \3 ]] +++ get_proxy_engine some-name +++ local cluster_name=some-name ++++ get_proxy some-name ++++ local target_cluster=some-name +++++ kubectl_bin get pxc some-name -o 'jsonpath={.spec.haproxy.enabled}' ++++++ mktemp +++++ local LAST_OUT=/tmp/tmp.xWcNk85Kzi ++++++ mktemp +++++ local LAST_ERR=/tmp/tmp.PAE9hpNRqO +++++ local exit_status=0 ++++++ seq 0 2 +++++ for i in '$(seq 0 2)' +++++ set +e +++++ kubectl get pxc some-name -o 'jsonpath={.spec.haproxy.enabled}' +++++ exit_status=0 +++++ set -e +++++ '[' 0 '!=' 0 ']' +++++ break +++++ cat /tmp/tmp.xWcNk85Kzi +++++ cat /tmp/tmp.PAE9hpNRqO +++++ rm /tmp/tmp.xWcNk85Kzi /tmp/tmp.PAE9hpNRqO +++++ return 0 ++++ [[ false == \t\r\u\e ]] +++++ kubectl_bin get pxc some-name -o 'jsonpath={.spec.proxysql.enabled}' ++++++ mktemp +++++ local LAST_OUT=/tmp/tmp.IbSEDYKWcD ++++++ mktemp +++++ local LAST_ERR=/tmp/tmp.tuo9bu20Cj +++++ local exit_status=0 ++++++ seq 0 2 +++++ for i in '$(seq 0 2)' +++++ set +e +++++ kubectl get pxc some-name -o 'jsonpath={.spec.proxysql.enabled}' +++++ exit_status=0 +++++ set -e +++++ '[' 0 '!=' 0 ']' +++++ break +++++ cat /tmp/tmp.IbSEDYKWcD +++++ cat /tmp/tmp.tuo9bu20Cj +++++ rm /tmp/tmp.IbSEDYKWcD /tmp/tmp.tuo9bu20Cj +++++ return 0 ++++ [[ true == \t\r\u\e ]] ++++ echo some-name-proxysql ++++ return +++ local cluster_proxy=some-name-proxysql +++ echo proxysql ++ kubectl_bin get pxc some-name -o 'jsonpath={.status.proxysql.ready}' +++ mktemp ++ local LAST_OUT=/tmp/tmp.u5Q6BVaP5o +++ mktemp ++ local LAST_ERR=/tmp/tmp.EHUT4CqaPX ++ local exit_status=0 +++ seq 0 2 ++ for i in '$(seq 0 2)' ++ set +e ++ kubectl get pxc some-name -o 'jsonpath={.status.proxysql.ready}' ++ exit_status=0 ++ set -e ++ '[' 0 '!=' 0 ']' ++ break ++ cat /tmp/tmp.u5Q6BVaP5o ++ cat /tmp/tmp.EHUT4CqaPX ++ rm /tmp/tmp.u5Q6BVaP5o /tmp/tmp.EHUT4CqaPX ++ return 0 + [[ 3 == \3 ]] + echo + patch_secret my-cluster-secrets proxyadmin dGVzdC1wYXNzd29yZA== + local secret=my-cluster-secrets + local key=proxyadmin + local value=dGVzdC1wYXNzd29yZA== + kubectl_bin patch secret my-cluster-secrets '-p={"data":{"proxyadmin": "dGVzdC1wYXNzd29yZA=="}}' ++ mktemp + local LAST_OUT=/tmp/tmp.1Y54PSp2hM ++ mktemp + local LAST_ERR=/tmp/tmp.1nnrHTX6m3 + local exit_status=0 ++ seq 0 2 + for i in '$(seq 0 2)' + set +e + kubectl patch secret my-cluster-secrets '-p={"data":{"proxyadmin": "dGVzdC1wYXNzd29yZA=="}}' + exit_status=0 + set -e + '[' 0 '!=' 0 ']' + break + cat /tmp/tmp.1Y54PSp2hM secret/my-cluster-secrets patched + cat /tmp/tmp.1nnrHTX6m3 + rm /tmp/tmp.1Y54PSp2hM /tmp/tmp.1nnrHTX6m3 + return 0 + sleep 15 + wait_cluster_consistency some-name 3 3 + local cluster_name=some-name + local cluster_size=3 + local proxy_size=3 + '[' -z 3 ']' + desc 'wait cluster consistency' + set +o xtrace ----------------------------------------------------------------------------------- wait cluster consistency ----------------------------------------------------------------------------------- + local i=0 + local max=300 + sleep 7 + echo -n 'waiting for pxc/some-name to be ready' waiting for pxc/some-name to be ready++ kubectl_bin get pxc some-name -o 'jsonpath={.status.state}' +++ mktemp ++ local LAST_OUT=/tmp/tmp.Kj7xxZkNm6 +++ mktemp ++ local LAST_ERR=/tmp/tmp.84j2srSYWE ++ local exit_status=0 +++ seq 0 2 ++ for i in '$(seq 0 2)' ++ set +e ++ kubectl get pxc some-name -o 'jsonpath={.status.state}' ++ exit_status=0 ++ set -e ++ '[' 0 '!=' 0 ']' ++ break ++ cat /tmp/tmp.Kj7xxZkNm6 ++ cat /tmp/tmp.84j2srSYWE ++ rm /tmp/tmp.Kj7xxZkNm6 /tmp/tmp.84j2srSYWE ++ return 0 + [[ initializing == \r\e\a\d\y ]] + echo -n . .+ sleep 5 + [[ 0 -ge 300 ]] + let i+=1 ++ kubectl_bin get pxc some-name -o 'jsonpath={.status.state}' +++ mktemp ++ local LAST_OUT=/tmp/tmp.v1hCrHjpIE +++ mktemp ++ local LAST_ERR=/tmp/tmp.p5NX6Sv5fH ++ local exit_status=0 +++ seq 0 2 ++ for i in '$(seq 0 2)' ++ set +e ++ kubectl get pxc some-name -o 'jsonpath={.status.state}' ++ exit_status=0 ++ set -e ++ '[' 0 '!=' 0 ']' ++ break ++ cat /tmp/tmp.v1hCrHjpIE ++ cat /tmp/tmp.p5NX6Sv5fH ++ rm /tmp/tmp.v1hCrHjpIE /tmp/tmp.p5NX6Sv5fH ++ return 0 + [[ initializing == \r\e\a\d\y ]] + echo -n . .+ sleep 5 + [[ 1 -ge 300 ]] + let i+=1 ++ kubectl_bin get pxc some-name -o 'jsonpath={.status.state}' +++ mktemp ++ local LAST_OUT=/tmp/tmp.MyOngNL4pM +++ mktemp ++ local LAST_ERR=/tmp/tmp.0rAXlRbP0Y ++ local exit_status=0 +++ seq 0 2 ++ for i in '$(seq 0 2)' ++ set +e ++ kubectl get pxc some-name -o 'jsonpath={.status.state}' ++ exit_status=0 ++ set -e ++ '[' 0 '!=' 0 ']' ++ break ++ cat /tmp/tmp.MyOngNL4pM ++ cat /tmp/tmp.0rAXlRbP0Y ++ rm /tmp/tmp.MyOngNL4pM /tmp/tmp.0rAXlRbP0Y ++ return 0 + [[ ready == \r\e\a\d\y ]] ++ kubectl_bin get pxc some-name -o 'jsonpath={.status.pxc.ready}' +++ mktemp ++ local LAST_OUT=/tmp/tmp.FRpRwQnkMD +++ mktemp ++ local LAST_ERR=/tmp/tmp.xl8ab9deOu ++ local exit_status=0 +++ seq 0 2 ++ for i in '$(seq 0 2)' ++ set +e ++ kubectl get pxc some-name -o 'jsonpath={.status.pxc.ready}' ++ exit_status=0 ++ set -e ++ '[' 0 '!=' 0 ']' ++ break ++ cat /tmp/tmp.FRpRwQnkMD ++ cat /tmp/tmp.xl8ab9deOu ++ rm /tmp/tmp.FRpRwQnkMD /tmp/tmp.xl8ab9deOu ++ return 0 + [[ 3 == \3 ]] +++ get_proxy_engine some-name +++ local cluster_name=some-name ++++ get_proxy some-name ++++ local target_cluster=some-name +++++ kubectl_bin get pxc some-name -o 'jsonpath={.spec.haproxy.enabled}' ++++++ mktemp +++++ local LAST_OUT=/tmp/tmp.m3S4kz4NrQ ++++++ mktemp +++++ local LAST_ERR=/tmp/tmp.4uBBXO5DrT +++++ local exit_status=0 ++++++ seq 0 2 +++++ for i in '$(seq 0 2)' +++++ set +e +++++ kubectl get pxc some-name -o 'jsonpath={.spec.haproxy.enabled}' +++++ exit_status=0 +++++ set -e +++++ '[' 0 '!=' 0 ']' +++++ break +++++ cat /tmp/tmp.m3S4kz4NrQ +++++ cat /tmp/tmp.4uBBXO5DrT +++++ rm /tmp/tmp.m3S4kz4NrQ /tmp/tmp.4uBBXO5DrT +++++ return 0 ++++ [[ false == \t\r\u\e ]] +++++ kubectl_bin get pxc some-name -o 'jsonpath={.spec.proxysql.enabled}' ++++++ mktemp +++++ local LAST_OUT=/tmp/tmp.qDZ754RCzO ++++++ mktemp +++++ local LAST_ERR=/tmp/tmp.EVuf0Hvyve +++++ local exit_status=0 ++++++ seq 0 2 +++++ for i in '$(seq 0 2)' +++++ set +e +++++ kubectl get pxc some-name -o 'jsonpath={.spec.proxysql.enabled}' +++++ exit_status=0 +++++ set -e +++++ '[' 0 '!=' 0 ']' +++++ break +++++ cat /tmp/tmp.qDZ754RCzO +++++ cat /tmp/tmp.EVuf0Hvyve +++++ rm /tmp/tmp.qDZ754RCzO /tmp/tmp.EVuf0Hvyve +++++ return 0 ++++ [[ true == \t\r\u\e ]] ++++ echo some-name-proxysql ++++ return +++ local cluster_proxy=some-name-proxysql +++ echo proxysql ++ kubectl_bin get pxc some-name -o 'jsonpath={.status.proxysql.ready}' +++ mktemp ++ local LAST_OUT=/tmp/tmp.AxVRS16mgN +++ mktemp ++ local LAST_ERR=/tmp/tmp.Bo88fBMLNZ ++ local exit_status=0 +++ seq 0 2 ++ for i in '$(seq 0 2)' ++ set +e ++ kubectl get pxc some-name -o 'jsonpath={.status.proxysql.ready}' ++ exit_status=0 ++ set -e ++ '[' 0 '!=' 0 ']' ++ break ++ cat /tmp/tmp.AxVRS16mgN ++ cat /tmp/tmp.Bo88fBMLNZ ++ rm /tmp/tmp.AxVRS16mgN /tmp/tmp.Bo88fBMLNZ ++ return 0 + [[ 3 == \3 ]] + echo + sleep 15 + compare_mysql_cmd_local select-2 'SHOW TABLES;' '-h127.0.0.1 -P6032 -uproxyadmin -p'\''test-password'\''' some-name-proxysql-0 '' proxysql + local command_id=select-2 + local 'command=SHOW TABLES;' + local 'uri=-h127.0.0.1 -P6032 -uproxyadmin -p'\''test-password'\''' + local pod=some-name-proxysql-0 + local postfix= + local container_name=proxysql + local expected_result=/mnt/jenkins/workspace/cloud-pxc-operator_PR-2330/e2e-tests/users-scheduler/compare/select-2.sql + [[ perconalab/percona-xtradb-cluster-operator:main-pxc8.4 =~ 8\.0 ]] + [[ perconalab/percona-xtradb-cluster-operator:main-pxc8.4 =~ 8\.4 ]] + '[' -f /mnt/jenkins/workspace/cloud-pxc-operator_PR-2330/e2e-tests/users-scheduler/compare/select-2-84.sql ']' + run_mysql_local 'SHOW TABLES;' '-h127.0.0.1 -P6032 -uproxyadmin -p'\''test-password'\''' some-name-proxysql-0 proxysql + local 'command=SHOW TABLES;' + local 'uri=-h127.0.0.1 -P6032 -uproxyadmin -p'\''test-password'\''' + local pod=some-name-proxysql-0 + local container_name=proxysql + set +o xtrace + '[' '!' -s /tmp/tmp.KUxcwQMinG/select-2.sql ']' + [[ 0 -eq 0 ]] + diff -u /mnt/jenkins/workspace/cloud-pxc-operator_PR-2330/e2e-tests/users-scheduler/compare/select-2.sql /tmp/tmp.KUxcwQMinG/select-2.sql + compare_mysql_cmd_local select-2 'SHOW TABLES;' '-h127.0.0.1 -P6032 -uproxyadmin -p'\''test-password'\''' some-name-proxysql-1 '' proxysql + local command_id=select-2 + local 'command=SHOW TABLES;' + local 'uri=-h127.0.0.1 -P6032 -uproxyadmin -p'\''test-password'\''' + local pod=some-name-proxysql-1 + local postfix= + local container_name=proxysql + local expected_result=/mnt/jenkins/workspace/cloud-pxc-operator_PR-2330/e2e-tests/users-scheduler/compare/select-2.sql + [[ perconalab/percona-xtradb-cluster-operator:main-pxc8.4 =~ 8\.0 ]] + [[ perconalab/percona-xtradb-cluster-operator:main-pxc8.4 =~ 8\.4 ]] + '[' -f /mnt/jenkins/workspace/cloud-pxc-operator_PR-2330/e2e-tests/users-scheduler/compare/select-2-84.sql ']' + run_mysql_local 'SHOW TABLES;' '-h127.0.0.1 -P6032 -uproxyadmin -p'\''test-password'\''' some-name-proxysql-1 proxysql + local 'command=SHOW TABLES;' + local 'uri=-h127.0.0.1 -P6032 -uproxyadmin -p'\''test-password'\''' + local pod=some-name-proxysql-1 + local container_name=proxysql + set +o xtrace + '[' '!' -s /tmp/tmp.KUxcwQMinG/select-2.sql ']' + [[ 0 -eq 0 ]] + diff -u /mnt/jenkins/workspace/cloud-pxc-operator_PR-2330/e2e-tests/users-scheduler/compare/select-2.sql /tmp/tmp.KUxcwQMinG/select-2.sql + compare_mysql_cmd_local select-2 'SHOW TABLES;' '-h127.0.0.1 -P6032 -uproxyadmin -p'\''test-password'\''' some-name-proxysql-2 '' proxysql + local command_id=select-2 + local 'command=SHOW TABLES;' + local 'uri=-h127.0.0.1 -P6032 -uproxyadmin -p'\''test-password'\''' + local pod=some-name-proxysql-2 + local postfix= + local container_name=proxysql + local expected_result=/mnt/jenkins/workspace/cloud-pxc-operator_PR-2330/e2e-tests/users-scheduler/compare/select-2.sql + [[ perconalab/percona-xtradb-cluster-operator:main-pxc8.4 =~ 8\.0 ]] + [[ perconalab/percona-xtradb-cluster-operator:main-pxc8.4 =~ 8\.4 ]] + '[' -f /mnt/jenkins/workspace/cloud-pxc-operator_PR-2330/e2e-tests/users-scheduler/compare/select-2-84.sql ']' + run_mysql_local 'SHOW TABLES;' '-h127.0.0.1 -P6032 -uproxyadmin -p'\''test-password'\''' some-name-proxysql-2 proxysql + local 'command=SHOW TABLES;' + local 'uri=-h127.0.0.1 -P6032 -uproxyadmin -p'\''test-password'\''' + local pod=some-name-proxysql-2 + local container_name=proxysql + set +o xtrace + '[' '!' -s /tmp/tmp.KUxcwQMinG/select-2.sql ']' + [[ 0 -eq 0 ]] + diff -u /mnt/jenkins/workspace/cloud-pxc-operator_PR-2330/e2e-tests/users-scheduler/compare/select-2.sql /tmp/tmp.KUxcwQMinG/select-2.sql + compare_mysql_cmd_local proxysql-cfg 'SELECT variable_value from global_variables WHERE variable_name='\''admin-cluster_password'\'';' '-h127.0.0.1 -P6032 -uproxyadmin -p'\''test-password'\''' some-name-proxysql-2 '' proxysql + local command_id=proxysql-cfg + local 'command=SELECT variable_value from global_variables WHERE variable_name='\''admin-cluster_password'\'';' + local 'uri=-h127.0.0.1 -P6032 -uproxyadmin -p'\''test-password'\''' + local pod=some-name-proxysql-2 + local postfix= + local container_name=proxysql + local expected_result=/mnt/jenkins/workspace/cloud-pxc-operator_PR-2330/e2e-tests/users-scheduler/compare/proxysql-cfg.sql + [[ perconalab/percona-xtradb-cluster-operator:main-pxc8.4 =~ 8\.0 ]] + [[ perconalab/percona-xtradb-cluster-operator:main-pxc8.4 =~ 8\.4 ]] + '[' -f /mnt/jenkins/workspace/cloud-pxc-operator_PR-2330/e2e-tests/users-scheduler/compare/proxysql-cfg-84.sql ']' + run_mysql_local 'SELECT variable_value from global_variables WHERE variable_name='\''admin-cluster_password'\'';' '-h127.0.0.1 -P6032 -uproxyadmin -p'\''test-password'\''' some-name-proxysql-2 proxysql + local 'command=SELECT variable_value from global_variables WHERE variable_name='\''admin-cluster_password'\'';' + local 'uri=-h127.0.0.1 -P6032 -uproxyadmin -p'\''test-password'\''' + local pod=some-name-proxysql-2 + local container_name=proxysql + set +o xtrace + '[' '!' -s /tmp/tmp.KUxcwQMinG/proxysql-cfg.sql ']' + [[ 0 -eq 0 ]] + diff -u /mnt/jenkins/workspace/cloud-pxc-operator_PR-2330/e2e-tests/users-scheduler/compare/proxysql-cfg.sql /tmp/tmp.KUxcwQMinG/proxysql-cfg.sql + desc 'test xtrabackup' + set +o xtrace ----------------------------------------------------------------------------------- test xtrabackup ----------------------------------------------------------------------------------- + kubectl_bin patch pxc some-name --type=merge '-p={"spec":{"proxysql":{"size":2}}}' ++ mktemp + local LAST_OUT=/tmp/tmp.DzUo7LQnqC ++ mktemp + local LAST_ERR=/tmp/tmp.q1X2qsMs5l + local exit_status=0 ++ seq 0 2 + for i in '$(seq 0 2)' + set +e + kubectl patch pxc some-name --type=merge '-p={"spec":{"proxysql":{"size":2}}}' + exit_status=0 + set -e + '[' 0 '!=' 0 ']' + break + cat /tmp/tmp.DzUo7LQnqC perconaxtradbcluster.pxc.percona.com/some-name patched + cat /tmp/tmp.q1X2qsMs5l + rm /tmp/tmp.DzUo7LQnqC /tmp/tmp.q1X2qsMs5l + return 0 + patch_secret my-cluster-secrets xtrabackup dGVzdC1wYXNzd29yZA== + local secret=my-cluster-secrets + local key=xtrabackup + local value=dGVzdC1wYXNzd29yZA== + kubectl_bin patch secret my-cluster-secrets '-p={"data":{"xtrabackup": "dGVzdC1wYXNzd29yZA=="}}' ++ mktemp + local LAST_OUT=/tmp/tmp.MGXKaUY6Bx ++ mktemp + local LAST_ERR=/tmp/tmp.xDLFlg14dY + local exit_status=0 ++ seq 0 2 + for i in '$(seq 0 2)' + set +e + kubectl patch secret my-cluster-secrets '-p={"data":{"xtrabackup": "dGVzdC1wYXNzd29yZA=="}}' + exit_status=0 + set -e + '[' 0 '!=' 0 ']' + break + cat /tmp/tmp.MGXKaUY6Bx secret/my-cluster-secrets patched + cat /tmp/tmp.xDLFlg14dY + rm /tmp/tmp.MGXKaUY6Bx /tmp/tmp.xDLFlg14dY + return 0 + sleep 15 + wait_cluster_consistency some-name 3 2 + local cluster_name=some-name + local cluster_size=3 + local proxy_size=2 + '[' -z 2 ']' + desc 'wait cluster consistency' + set +o xtrace ----------------------------------------------------------------------------------- wait cluster consistency ----------------------------------------------------------------------------------- + local i=0 + local max=300 + sleep 7 + echo -n 'waiting for pxc/some-name to be ready' waiting for pxc/some-name to be ready++ kubectl_bin get pxc some-name -o 'jsonpath={.status.state}' +++ mktemp ++ local LAST_OUT=/tmp/tmp.Iyy5JYbbjK +++ mktemp ++ local LAST_ERR=/tmp/tmp.MRa6OiroxP ++ local exit_status=0 +++ seq 0 2 ++ for i in '$(seq 0 2)' ++ set +e ++ kubectl get pxc some-name -o 'jsonpath={.status.state}' ++ exit_status=0 ++ set -e ++ '[' 0 '!=' 0 ']' ++ break ++ cat /tmp/tmp.Iyy5JYbbjK ++ cat /tmp/tmp.MRa6OiroxP ++ rm /tmp/tmp.Iyy5JYbbjK /tmp/tmp.MRa6OiroxP ++ return 0 + [[ initializing == \r\e\a\d\y ]] + echo -n . .+ sleep 5 + [[ 0 -ge 300 ]] + let i+=1 ++ kubectl_bin get pxc some-name -o 'jsonpath={.status.state}' +++ mktemp ++ local LAST_OUT=/tmp/tmp.sVTOSwp779 +++ mktemp ++ local LAST_ERR=/tmp/tmp.WBvDOVGhHi ++ local exit_status=0 +++ seq 0 2 ++ for i in '$(seq 0 2)' ++ set +e ++ kubectl get pxc some-name -o 'jsonpath={.status.state}' ++ exit_status=0 ++ set -e ++ '[' 0 '!=' 0 ']' ++ break ++ cat /tmp/tmp.sVTOSwp779 ++ cat /tmp/tmp.WBvDOVGhHi ++ rm /tmp/tmp.sVTOSwp779 /tmp/tmp.WBvDOVGhHi ++ return 0 + [[ initializing == \r\e\a\d\y ]] + echo -n . .+ sleep 5 + [[ 1 -ge 300 ]] + let i+=1 ++ kubectl_bin get pxc some-name -o 'jsonpath={.status.state}' +++ mktemp ++ local LAST_OUT=/tmp/tmp.DBoMvhamdE +++ mktemp ++ local LAST_ERR=/tmp/tmp.kuJ8ZaAwfu ++ local exit_status=0 +++ seq 0 2 ++ for i in '$(seq 0 2)' ++ set +e ++ kubectl get pxc some-name -o 'jsonpath={.status.state}' ++ exit_status=0 ++ set -e ++ '[' 0 '!=' 0 ']' ++ break ++ cat /tmp/tmp.DBoMvhamdE ++ cat /tmp/tmp.kuJ8ZaAwfu ++ rm /tmp/tmp.DBoMvhamdE /tmp/tmp.kuJ8ZaAwfu ++ return 0 + [[ initializing == \r\e\a\d\y ]] + echo -n . .+ sleep 5 + [[ 2 -ge 300 ]] + let i+=1 ++ kubectl_bin get pxc some-name -o 'jsonpath={.status.state}' +++ mktemp ++ local LAST_OUT=/tmp/tmp.mnXVCRcfYd +++ mktemp ++ local LAST_ERR=/tmp/tmp.V3e0OW6Gb9 ++ local exit_status=0 +++ seq 0 2 ++ for i in '$(seq 0 2)' ++ set +e ++ kubectl get pxc some-name -o 'jsonpath={.status.state}' ++ exit_status=0 ++ set -e ++ '[' 0 '!=' 0 ']' ++ break ++ cat /tmp/tmp.mnXVCRcfYd ++ cat /tmp/tmp.V3e0OW6Gb9 ++ rm /tmp/tmp.mnXVCRcfYd /tmp/tmp.V3e0OW6Gb9 ++ return 0 + [[ initializing == \r\e\a\d\y ]] + echo -n . .+ sleep 5 + [[ 3 -ge 300 ]] + let i+=1 ++ kubectl_bin get pxc some-name -o 'jsonpath={.status.state}' +++ mktemp ++ local LAST_OUT=/tmp/tmp.YhQimS4wbr +++ mktemp ++ local LAST_ERR=/tmp/tmp.DMHM08uyct ++ local exit_status=0 +++ seq 0 2 ++ for i in '$(seq 0 2)' ++ set +e ++ kubectl get pxc some-name -o 'jsonpath={.status.state}' ++ exit_status=0 ++ set -e ++ '[' 0 '!=' 0 ']' ++ break ++ cat /tmp/tmp.YhQimS4wbr ++ cat /tmp/tmp.DMHM08uyct ++ rm /tmp/tmp.YhQimS4wbr /tmp/tmp.DMHM08uyct ++ return 0 + [[ initializing == \r\e\a\d\y ]] + echo -n . .+ sleep 5 + [[ 4 -ge 300 ]] + let i+=1 ++ kubectl_bin get pxc some-name -o 'jsonpath={.status.state}' +++ mktemp ++ local LAST_OUT=/tmp/tmp.ruhqMjNk2I +++ mktemp ++ local LAST_ERR=/tmp/tmp.B29OMSlwM6 ++ local exit_status=0 +++ seq 0 2 ++ for i in '$(seq 0 2)' ++ set +e ++ kubectl get pxc some-name -o 'jsonpath={.status.state}' ++ exit_status=0 ++ set -e ++ '[' 0 '!=' 0 ']' ++ break ++ cat /tmp/tmp.ruhqMjNk2I ++ cat /tmp/tmp.B29OMSlwM6 ++ rm /tmp/tmp.ruhqMjNk2I /tmp/tmp.B29OMSlwM6 ++ return 0 + [[ initializing == \r\e\a\d\y ]] + echo -n . .+ sleep 5 + [[ 5 -ge 300 ]] + let i+=1 ++ kubectl_bin get pxc some-name -o 'jsonpath={.status.state}' +++ mktemp ++ local LAST_OUT=/tmp/tmp.1mLhGUWW0v +++ mktemp ++ local LAST_ERR=/tmp/tmp.3AbweHoVi6 ++ local exit_status=0 +++ seq 0 2 ++ for i in '$(seq 0 2)' ++ set +e ++ kubectl get pxc some-name -o 'jsonpath={.status.state}' ++ exit_status=0 ++ set -e ++ '[' 0 '!=' 0 ']' ++ break ++ cat /tmp/tmp.1mLhGUWW0v ++ cat /tmp/tmp.3AbweHoVi6 ++ rm /tmp/tmp.1mLhGUWW0v /tmp/tmp.3AbweHoVi6 ++ return 0 + [[ initializing == \r\e\a\d\y ]] + echo -n . .+ sleep 5 + [[ 6 -ge 300 ]] + let i+=1 ++ kubectl_bin get pxc some-name -o 'jsonpath={.status.state}' +++ mktemp ++ local LAST_OUT=/tmp/tmp.W8PWpcMgxl +++ mktemp ++ local LAST_ERR=/tmp/tmp.KESp3H8T4e ++ local exit_status=0 +++ seq 0 2 ++ for i in '$(seq 0 2)' ++ set +e ++ kubectl get pxc some-name -o 'jsonpath={.status.state}' ++ exit_status=0 ++ set -e ++ '[' 0 '!=' 0 ']' ++ break ++ cat /tmp/tmp.W8PWpcMgxl ++ cat /tmp/tmp.KESp3H8T4e ++ rm /tmp/tmp.W8PWpcMgxl /tmp/tmp.KESp3H8T4e ++ return 0 + [[ initializing == \r\e\a\d\y ]] + echo -n . .+ sleep 5 + [[ 7 -ge 300 ]] + let i+=1 ++ kubectl_bin get pxc some-name -o 'jsonpath={.status.state}' +++ mktemp ++ local LAST_OUT=/tmp/tmp.Efqqpy7kxa +++ mktemp ++ local LAST_ERR=/tmp/tmp.avNUe8MBAC ++ local exit_status=0 +++ seq 0 2 ++ for i in '$(seq 0 2)' ++ set +e ++ kubectl get pxc some-name -o 'jsonpath={.status.state}' ++ exit_status=0 ++ set -e ++ '[' 0 '!=' 0 ']' ++ break ++ cat /tmp/tmp.Efqqpy7kxa ++ cat /tmp/tmp.avNUe8MBAC ++ rm /tmp/tmp.Efqqpy7kxa /tmp/tmp.avNUe8MBAC ++ return 0 + [[ initializing == \r\e\a\d\y ]] + echo -n . .+ sleep 5 + [[ 8 -ge 300 ]] + let i+=1 ++ kubectl_bin get pxc some-name -o 'jsonpath={.status.state}' +++ mktemp ++ local LAST_OUT=/tmp/tmp.WYCvxcxz3R +++ mktemp ++ local LAST_ERR=/tmp/tmp.5DWB3j7Qfa ++ local exit_status=0 +++ seq 0 2 ++ for i in '$(seq 0 2)' ++ set +e ++ kubectl get pxc some-name -o 'jsonpath={.status.state}' ++ exit_status=0 ++ set -e ++ '[' 0 '!=' 0 ']' ++ break ++ cat /tmp/tmp.WYCvxcxz3R ++ cat /tmp/tmp.5DWB3j7Qfa ++ rm /tmp/tmp.WYCvxcxz3R /tmp/tmp.5DWB3j7Qfa ++ return 0 + [[ initializing == \r\e\a\d\y ]] + echo -n . .+ sleep 5 + [[ 9 -ge 300 ]] + let i+=1 ++ kubectl_bin get pxc some-name -o 'jsonpath={.status.state}' +++ mktemp ++ local LAST_OUT=/tmp/tmp.2d2o6h9tjN +++ mktemp ++ local LAST_ERR=/tmp/tmp.lsYIX4WULv ++ local exit_status=0 +++ seq 0 2 ++ for i in '$(seq 0 2)' ++ set +e ++ kubectl get pxc some-name -o 'jsonpath={.status.state}' ++ exit_status=0 ++ set -e ++ '[' 0 '!=' 0 ']' ++ break ++ cat /tmp/tmp.2d2o6h9tjN ++ cat /tmp/tmp.lsYIX4WULv ++ rm /tmp/tmp.2d2o6h9tjN /tmp/tmp.lsYIX4WULv ++ return 0 + [[ initializing == \r\e\a\d\y ]] + echo -n . .+ sleep 5 + [[ 10 -ge 300 ]] + let i+=1 ++ kubectl_bin get pxc some-name -o 'jsonpath={.status.state}' +++ mktemp ++ local LAST_OUT=/tmp/tmp.eZJ4wRvjXq +++ mktemp ++ local LAST_ERR=/tmp/tmp.flcydmEHTU ++ local exit_status=0 +++ seq 0 2 ++ for i in '$(seq 0 2)' ++ set +e ++ kubectl get pxc some-name -o 'jsonpath={.status.state}' ++ exit_status=0 ++ set -e ++ '[' 0 '!=' 0 ']' ++ break ++ cat /tmp/tmp.eZJ4wRvjXq ++ cat /tmp/tmp.flcydmEHTU ++ rm /tmp/tmp.eZJ4wRvjXq /tmp/tmp.flcydmEHTU ++ return 0 + [[ initializing == \r\e\a\d\y ]] + echo -n . .+ sleep 5 + [[ 11 -ge 300 ]] + let i+=1 ++ kubectl_bin get pxc some-name -o 'jsonpath={.status.state}' +++ mktemp ++ local LAST_OUT=/tmp/tmp.iDsYQuZ4wu +++ mktemp ++ local LAST_ERR=/tmp/tmp.Lqa7HXFVs0 ++ local exit_status=0 +++ seq 0 2 ++ for i in '$(seq 0 2)' ++ set +e ++ kubectl get pxc some-name -o 'jsonpath={.status.state}' ++ exit_status=0 ++ set -e ++ '[' 0 '!=' 0 ']' ++ break ++ cat /tmp/tmp.iDsYQuZ4wu ++ cat /tmp/tmp.Lqa7HXFVs0 ++ rm /tmp/tmp.iDsYQuZ4wu /tmp/tmp.Lqa7HXFVs0 ++ return 0 + [[ initializing == \r\e\a\d\y ]] + echo -n . .+ sleep 5 + [[ 12 -ge 300 ]] + let i+=1 ++ kubectl_bin get pxc some-name -o 'jsonpath={.status.state}' +++ mktemp ++ local LAST_OUT=/tmp/tmp.G9b0L9ehv9 +++ mktemp ++ local LAST_ERR=/tmp/tmp.zMz5hk0xUp ++ local exit_status=0 +++ seq 0 2 ++ for i in '$(seq 0 2)' ++ set +e ++ kubectl get pxc some-name -o 'jsonpath={.status.state}' ++ exit_status=0 ++ set -e ++ '[' 0 '!=' 0 ']' ++ break ++ cat /tmp/tmp.G9b0L9ehv9 ++ cat /tmp/tmp.zMz5hk0xUp ++ rm /tmp/tmp.G9b0L9ehv9 /tmp/tmp.zMz5hk0xUp ++ return 0 + [[ initializing == \r\e\a\d\y ]] + echo -n . .+ sleep 5 + [[ 13 -ge 300 ]] + let i+=1 ++ kubectl_bin get pxc some-name -o 'jsonpath={.status.state}' +++ mktemp ++ local LAST_OUT=/tmp/tmp.s8RGPOUATR +++ mktemp ++ local LAST_ERR=/tmp/tmp.XrlLlleT4c ++ local exit_status=0 +++ seq 0 2 ++ for i in '$(seq 0 2)' ++ set +e ++ kubectl get pxc some-name -o 'jsonpath={.status.state}' ++ exit_status=0 ++ set -e ++ '[' 0 '!=' 0 ']' ++ break ++ cat /tmp/tmp.s8RGPOUATR ++ cat /tmp/tmp.XrlLlleT4c ++ rm /tmp/tmp.s8RGPOUATR /tmp/tmp.XrlLlleT4c ++ return 0 + [[ initializing == \r\e\a\d\y ]] + echo -n . .+ sleep 5 + [[ 14 -ge 300 ]] + let i+=1 ++ kubectl_bin get pxc some-name -o 'jsonpath={.status.state}' +++ mktemp ++ local LAST_OUT=/tmp/tmp.UV9jYMVGcf +++ mktemp ++ local LAST_ERR=/tmp/tmp.mQ4Dml37bo ++ local exit_status=0 +++ seq 0 2 ++ for i in '$(seq 0 2)' ++ set +e ++ kubectl get pxc some-name -o 'jsonpath={.status.state}' ++ exit_status=0 ++ set -e ++ '[' 0 '!=' 0 ']' ++ break ++ cat /tmp/tmp.UV9jYMVGcf ++ cat /tmp/tmp.mQ4Dml37bo ++ rm /tmp/tmp.UV9jYMVGcf /tmp/tmp.mQ4Dml37bo ++ return 0 + [[ initializing == \r\e\a\d\y ]] + echo -n . .+ sleep 5 + [[ 15 -ge 300 ]] + let i+=1 ++ kubectl_bin get pxc some-name -o 'jsonpath={.status.state}' +++ mktemp ++ local LAST_OUT=/tmp/tmp.RVFH0zDpVr +++ mktemp ++ local LAST_ERR=/tmp/tmp.dVZmDtKACs ++ local exit_status=0 +++ seq 0 2 ++ for i in '$(seq 0 2)' ++ set +e ++ kubectl get pxc some-name -o 'jsonpath={.status.state}' ++ exit_status=0 ++ set -e ++ '[' 0 '!=' 0 ']' ++ break ++ cat /tmp/tmp.RVFH0zDpVr ++ cat /tmp/tmp.dVZmDtKACs ++ rm /tmp/tmp.RVFH0zDpVr /tmp/tmp.dVZmDtKACs ++ return 0 + [[ initializing == \r\e\a\d\y ]] + echo -n . .+ sleep 5 + [[ 16 -ge 300 ]] + let i+=1 ++ kubectl_bin get pxc some-name -o 'jsonpath={.status.state}' +++ mktemp ++ local LAST_OUT=/tmp/tmp.RGboOeN51Y +++ mktemp ++ local LAST_ERR=/tmp/tmp.j0VPKsKcMG ++ local exit_status=0 +++ seq 0 2 ++ for i in '$(seq 0 2)' ++ set +e ++ kubectl get pxc some-name -o 'jsonpath={.status.state}' ++ exit_status=0 ++ set -e ++ '[' 0 '!=' 0 ']' ++ break ++ cat /tmp/tmp.RGboOeN51Y ++ cat /tmp/tmp.j0VPKsKcMG ++ rm /tmp/tmp.RGboOeN51Y /tmp/tmp.j0VPKsKcMG ++ return 0 + [[ initializing == \r\e\a\d\y ]] + echo -n . .+ sleep 5 + [[ 17 -ge 300 ]] + let i+=1 ++ kubectl_bin get pxc some-name -o 'jsonpath={.status.state}' +++ mktemp ++ local LAST_OUT=/tmp/tmp.OsfzZk0O7K +++ mktemp ++ local LAST_ERR=/tmp/tmp.DLRjfqE2ho ++ local exit_status=0 +++ seq 0 2 ++ for i in '$(seq 0 2)' ++ set +e ++ kubectl get pxc some-name -o 'jsonpath={.status.state}' ++ exit_status=0 ++ set -e ++ '[' 0 '!=' 0 ']' ++ break ++ cat /tmp/tmp.OsfzZk0O7K ++ cat /tmp/tmp.DLRjfqE2ho ++ rm /tmp/tmp.OsfzZk0O7K /tmp/tmp.DLRjfqE2ho ++ return 0 + [[ initializing == \r\e\a\d\y ]] + echo -n . .+ sleep 5 + [[ 18 -ge 300 ]] + let i+=1 ++ kubectl_bin get pxc some-name -o 'jsonpath={.status.state}' +++ mktemp ++ local LAST_OUT=/tmp/tmp.ZqSQV7xG0S +++ mktemp ++ local LAST_ERR=/tmp/tmp.gsHlZCOCkY ++ local exit_status=0 +++ seq 0 2 ++ for i in '$(seq 0 2)' ++ set +e ++ kubectl get pxc some-name -o 'jsonpath={.status.state}' ++ exit_status=0 ++ set -e ++ '[' 0 '!=' 0 ']' ++ break ++ cat /tmp/tmp.ZqSQV7xG0S ++ cat /tmp/tmp.gsHlZCOCkY ++ rm /tmp/tmp.ZqSQV7xG0S /tmp/tmp.gsHlZCOCkY ++ return 0 + [[ initializing == \r\e\a\d\y ]] + echo -n . .+ sleep 5 + [[ 19 -ge 300 ]] + let i+=1 ++ kubectl_bin get pxc some-name -o 'jsonpath={.status.state}' +++ mktemp ++ local LAST_OUT=/tmp/tmp.p86nOd3tQp +++ mktemp ++ local LAST_ERR=/tmp/tmp.AAqkKCTSFc ++ local exit_status=0 +++ seq 0 2 ++ for i in '$(seq 0 2)' ++ set +e ++ kubectl get pxc some-name -o 'jsonpath={.status.state}' ++ exit_status=0 ++ set -e ++ '[' 0 '!=' 0 ']' ++ break ++ cat /tmp/tmp.p86nOd3tQp ++ cat /tmp/tmp.AAqkKCTSFc ++ rm /tmp/tmp.p86nOd3tQp /tmp/tmp.AAqkKCTSFc ++ return 0 + [[ initializing == \r\e\a\d\y ]] + echo -n . .+ sleep 5 + [[ 20 -ge 300 ]] + let i+=1 ++ kubectl_bin get pxc some-name -o 'jsonpath={.status.state}' +++ mktemp ++ local LAST_OUT=/tmp/tmp.FwYKrq4KDu +++ mktemp ++ local LAST_ERR=/tmp/tmp.o4hkVjM2zv ++ local exit_status=0 +++ seq 0 2 ++ for i in '$(seq 0 2)' ++ set +e ++ kubectl get pxc some-name -o 'jsonpath={.status.state}' ++ exit_status=0 ++ set -e ++ '[' 0 '!=' 0 ']' ++ break ++ cat /tmp/tmp.FwYKrq4KDu ++ cat /tmp/tmp.o4hkVjM2zv ++ rm /tmp/tmp.FwYKrq4KDu /tmp/tmp.o4hkVjM2zv ++ return 0 + [[ initializing == \r\e\a\d\y ]] + echo -n . .+ sleep 5 + [[ 21 -ge 300 ]] + let i+=1 ++ kubectl_bin get pxc some-name -o 'jsonpath={.status.state}' +++ mktemp ++ local LAST_OUT=/tmp/tmp.1caM39o8Oq +++ mktemp ++ local LAST_ERR=/tmp/tmp.tjVus1fa94 ++ local exit_status=0 +++ seq 0 2 ++ for i in '$(seq 0 2)' ++ set +e ++ kubectl get pxc some-name -o 'jsonpath={.status.state}' ++ exit_status=0 ++ set -e ++ '[' 0 '!=' 0 ']' ++ break ++ cat /tmp/tmp.1caM39o8Oq ++ cat /tmp/tmp.tjVus1fa94 ++ rm /tmp/tmp.1caM39o8Oq /tmp/tmp.tjVus1fa94 ++ return 0 + [[ ready == \r\e\a\d\y ]] ++ kubectl_bin get pxc some-name -o 'jsonpath={.status.pxc.ready}' +++ mktemp ++ local LAST_OUT=/tmp/tmp.SXqsEv7V0S +++ mktemp ++ local LAST_ERR=/tmp/tmp.3mVuPDhCfj ++ local exit_status=0 +++ seq 0 2 ++ for i in '$(seq 0 2)' ++ set +e ++ kubectl get pxc some-name -o 'jsonpath={.status.pxc.ready}' ++ exit_status=0 ++ set -e ++ '[' 0 '!=' 0 ']' ++ break ++ cat /tmp/tmp.SXqsEv7V0S ++ cat /tmp/tmp.3mVuPDhCfj ++ rm /tmp/tmp.SXqsEv7V0S /tmp/tmp.3mVuPDhCfj ++ return 0 + [[ 3 == \3 ]] +++ get_proxy_engine some-name +++ local cluster_name=some-name ++++ get_proxy some-name ++++ local target_cluster=some-name +++++ kubectl_bin get pxc some-name -o 'jsonpath={.spec.haproxy.enabled}' ++++++ mktemp +++++ local LAST_OUT=/tmp/tmp.SW9Z3MSDVF ++++++ mktemp +++++ local LAST_ERR=/tmp/tmp.pa2NveSj5h +++++ local exit_status=0 ++++++ seq 0 2 +++++ for i in '$(seq 0 2)' +++++ set +e +++++ kubectl get pxc some-name -o 'jsonpath={.spec.haproxy.enabled}' +++++ exit_status=0 +++++ set -e +++++ '[' 0 '!=' 0 ']' +++++ break +++++ cat /tmp/tmp.SW9Z3MSDVF +++++ cat /tmp/tmp.pa2NveSj5h +++++ rm /tmp/tmp.SW9Z3MSDVF /tmp/tmp.pa2NveSj5h +++++ return 0 ++++ [[ false == \t\r\u\e ]] +++++ kubectl_bin get pxc some-name -o 'jsonpath={.spec.proxysql.enabled}' ++++++ mktemp +++++ local LAST_OUT=/tmp/tmp.ClHOFjGwrR ++++++ mktemp +++++ local LAST_ERR=/tmp/tmp.jNgxHhDTiB +++++ local exit_status=0 ++++++ seq 0 2 +++++ for i in '$(seq 0 2)' +++++ set +e +++++ kubectl get pxc some-name -o 'jsonpath={.spec.proxysql.enabled}' +++++ exit_status=0 +++++ set -e +++++ '[' 0 '!=' 0 ']' +++++ break +++++ cat /tmp/tmp.ClHOFjGwrR +++++ cat /tmp/tmp.jNgxHhDTiB +++++ rm /tmp/tmp.ClHOFjGwrR /tmp/tmp.jNgxHhDTiB +++++ return 0 ++++ [[ true == \t\r\u\e ]] ++++ echo some-name-proxysql ++++ return +++ local cluster_proxy=some-name-proxysql +++ echo proxysql ++ kubectl_bin get pxc some-name -o 'jsonpath={.status.proxysql.ready}' +++ mktemp ++ local LAST_OUT=/tmp/tmp.vguzIJTomk +++ mktemp ++ local LAST_ERR=/tmp/tmp.lwTWWCmHVz ++ local exit_status=0 +++ seq 0 2 ++ for i in '$(seq 0 2)' ++ set +e ++ kubectl get pxc some-name -o 'jsonpath={.status.proxysql.ready}' ++ exit_status=0 ++ set -e ++ '[' 0 '!=' 0 ']' ++ break ++ cat /tmp/tmp.vguzIJTomk ++ cat /tmp/tmp.lwTWWCmHVz ++ rm /tmp/tmp.vguzIJTomk /tmp/tmp.lwTWWCmHVz ++ return 0 + [[ 2 == \2 ]] + echo + sleep 15 + compare_mysql_cmd_local select-3 'SHOW DATABASES;' '-h 127.0.0.1 -uxtrabackup -p'\''test-password'\''' some-name-pxc-0 '' pxc + local command_id=select-3 + local 'command=SHOW DATABASES;' + local 'uri=-h 127.0.0.1 -uxtrabackup -p'\''test-password'\''' + local pod=some-name-pxc-0 + local postfix= + local container_name=pxc + local expected_result=/mnt/jenkins/workspace/cloud-pxc-operator_PR-2330/e2e-tests/users-scheduler/compare/select-3.sql + [[ perconalab/percona-xtradb-cluster-operator:main-pxc8.4 =~ 8\.0 ]] + [[ perconalab/percona-xtradb-cluster-operator:main-pxc8.4 =~ 8\.4 ]] + '[' -f /mnt/jenkins/workspace/cloud-pxc-operator_PR-2330/e2e-tests/users-scheduler/compare/select-3-84.sql ']' + run_mysql_local 'SHOW DATABASES;' '-h 127.0.0.1 -uxtrabackup -p'\''test-password'\''' some-name-pxc-0 pxc + local 'command=SHOW DATABASES;' + local 'uri=-h 127.0.0.1 -uxtrabackup -p'\''test-password'\''' + local pod=some-name-pxc-0 + local container_name=pxc + set +o xtrace + '[' '!' -s /tmp/tmp.KUxcwQMinG/select-3.sql ']' + [[ 0 -eq 0 ]] + diff -u /mnt/jenkins/workspace/cloud-pxc-operator_PR-2330/e2e-tests/users-scheduler/compare/select-3.sql /tmp/tmp.KUxcwQMinG/select-3.sql + desc 'test monitor' + set +o xtrace ----------------------------------------------------------------------------------- test monitor ----------------------------------------------------------------------------------- + patch_secret my-cluster-secrets monitor dGVzdC1wYXNzd29yZA== + local secret=my-cluster-secrets + local key=monitor + local value=dGVzdC1wYXNzd29yZA== + kubectl_bin patch secret my-cluster-secrets '-p={"data":{"monitor": "dGVzdC1wYXNzd29yZA=="}}' ++ mktemp + local LAST_OUT=/tmp/tmp.GevFfLv6Ut ++ mktemp + local LAST_ERR=/tmp/tmp.32vZUddMlK + local exit_status=0 ++ seq 0 2 + for i in '$(seq 0 2)' + set +e + kubectl patch secret my-cluster-secrets '-p={"data":{"monitor": "dGVzdC1wYXNzd29yZA=="}}' + exit_status=0 + set -e + '[' 0 '!=' 0 ']' + break + cat /tmp/tmp.GevFfLv6Ut secret/my-cluster-secrets patched + cat /tmp/tmp.32vZUddMlK + rm /tmp/tmp.GevFfLv6Ut /tmp/tmp.32vZUddMlK + return 0 + wait_for_password_propagation my-cluster-secrets monitor some-name + local secret=my-cluster-secrets + local user=monitor + local cluster=some-name + local max_retry=600 + local root_pass + local retry ++ getSecretData my-cluster-secrets root ++ local secretName=my-cluster-secrets ++ local dataKey=root ++ kubectl_bin get secrets/my-cluster-secrets '--template={{.data.root}}' ++ base64 --decode +++ mktemp ++ local LAST_OUT=/tmp/tmp.mBIWpaLugG +++ mktemp ++ local LAST_ERR=/tmp/tmp.qYdq9HvRbN ++ local exit_status=0 +++ seq 0 2 ++ for i in '$(seq 0 2)' ++ set +e ++ kubectl get secrets/my-cluster-secrets '--template={{.data.root}}' ++ exit_status=0 ++ set -e ++ '[' 0 '!=' 0 ']' ++ break ++ cat /tmp/tmp.mBIWpaLugG ++ cat /tmp/tmp.qYdq9HvRbN ++ rm /tmp/tmp.mBIWpaLugG /tmp/tmp.qYdq9HvRbN ++ return 0 + root_pass=test-password + [[ -z test-password ]] + [[ perconalab/percona-xtradb-cluster-operator:main-pxc8.4 =~ 5\.7 ]] + retry=0 + is_password_updated monitor '-h some-name-pxc -uroot -p'\''test-password'\''' + local username=monitor + local 'uri=-h some-name-pxc -uroot -p'\''test-password'\''' + run_mysql 'SELECT User_attributes FROM mysql.user WHERE user='\''monitor'\''' '-h some-name-pxc -uroot -p'\''test-password'\''' + local 'command=SELECT User_attributes FROM mysql.user WHERE user='\''monitor'\''' + grep additional_password + local 'uri=-h some-name-pxc -uroot -p'\''test-password'\''' ++ get_client_pod ++ kubectl_bin get pods --selector=name=pxc-client -o 'jsonpath={.items[].metadata.name}' +++ mktemp ++ local LAST_OUT=/tmp/tmp.oU49cgVIR1 +++ mktemp ++ local LAST_ERR=/tmp/tmp.zRP5rCDLoN ++ local exit_status=0 +++ seq 0 2 ++ for i in '$(seq 0 2)' ++ set +e ++ kubectl get pods --selector=name=pxc-client -o 'jsonpath={.items[].metadata.name}' ++ exit_status=0 ++ set -e ++ '[' 0 '!=' 0 ']' ++ break ++ cat /tmp/tmp.oU49cgVIR1 ++ cat /tmp/tmp.zRP5rCDLoN ++ rm /tmp/tmp.oU49cgVIR1 /tmp/tmp.zRP5rCDLoN ++ return 0 + client_pod=pxc-client-6bbff654db-bvlfn + wait_pod pxc-client-6bbff654db-bvlfn + local pod=pxc-client-6bbff654db-bvlfn + local max_retry=480 + local ns= ++ echo pxc-client-6bbff654db-bvlfn ++ /usr/bin/sed -E 's/.*-(pxc|proxysql)-[0-9]/\1/' ++ grep -E '^(pxc|proxysql)$' + local container= + set +o xtrace pod/pxc-client-6bbff654db-bvlfn condition met waiting for pod/pxc-client-6bbff654db-bvlfn to become ReadyDefaulted container "pxc-client" out of: pxc-client, backup .Ok + set +o xtrace {"additional_password": "$A$005$\\u0005tv\\\\8EKsk\\bCX%\\u000e\\u0011\\n\\u0017=\\u0010\\u0016Cb/1ncuPnQG/o2lSj9m.N6CZOqMZx2T3qZrvnv6Vpi5"} + sleep 5 + retry=0 + is_old_password_discarded monitor '-h some-name-pxc -uroot -p'\''test-password'\''' + local username=monitor + local 'uri=-h some-name-pxc -uroot -p'\''test-password'\''' + local result ++ run_mysql 'SELECT User_attributes FROM mysql.user WHERE user='\''monitor'\''' '-h some-name-pxc -uroot -p'\''test-password'\''' + result='{"additional_password": "$A$005$\\u0005tv\\\\8EKsk\\bCX%\\u000e\\u0011\\n\\u0017=\\u0010\\u0016Cb/1ncuPnQG/o2lSj9m.N6CZOqMZx2T3qZrvnv6Vpi5"}' + [[ {"additional_password": "$A$005$\\u0005tv\\\\8EKsk\\bCX%\\u000e\\u0011\\n\\u0017=\\u0010\\u0016Cb/1ncuPnQG/o2lSj9m.N6CZOqMZx2T3qZrvnv6Vpi5"} =~ additional_password ]] + return 1 + sleep 1 + retry=1 + [[ 1 -eq 0 ]] + [[ 1 -ge 600 ]] + is_old_password_discarded monitor '-h some-name-pxc -uroot -p'\''test-password'\''' + local username=monitor + local 'uri=-h some-name-pxc -uroot -p'\''test-password'\''' + local result ++ run_mysql 'SELECT User_attributes FROM mysql.user WHERE user='\''monitor'\''' '-h some-name-pxc -uroot -p'\''test-password'\''' + result='{"additional_password": "$A$005$\\u0005tv\\\\8EKsk\\bCX%\\u000e\\u0011\\n\\u0017=\\u0010\\u0016Cb/1ncuPnQG/o2lSj9m.N6CZOqMZx2T3qZrvnv6Vpi5"}' + [[ {"additional_password": "$A$005$\\u0005tv\\\\8EKsk\\bCX%\\u000e\\u0011\\n\\u0017=\\u0010\\u0016Cb/1ncuPnQG/o2lSj9m.N6CZOqMZx2T3qZrvnv6Vpi5"} =~ additional_password ]] + return 1 + sleep 1 + retry=2 + [[ 2 -eq 0 ]] + [[ 2 -ge 600 ]] + is_old_password_discarded monitor '-h some-name-pxc -uroot -p'\''test-password'\''' + local username=monitor + local 'uri=-h some-name-pxc -uroot -p'\''test-password'\''' + local result ++ run_mysql 'SELECT User_attributes FROM mysql.user WHERE user='\''monitor'\''' '-h some-name-pxc -uroot -p'\''test-password'\''' + result='{"additional_password": "$A$005$\\u0005tv\\\\8EKsk\\bCX%\\u000e\\u0011\\n\\u0017=\\u0010\\u0016Cb/1ncuPnQG/o2lSj9m.N6CZOqMZx2T3qZrvnv6Vpi5"}' + [[ {"additional_password": "$A$005$\\u0005tv\\\\8EKsk\\bCX%\\u000e\\u0011\\n\\u0017=\\u0010\\u0016Cb/1ncuPnQG/o2lSj9m.N6CZOqMZx2T3qZrvnv6Vpi5"} =~ additional_password ]] + return 1 + sleep 1 + retry=3 + [[ 3 -eq 0 ]] + [[ 3 -ge 600 ]] + is_old_password_discarded monitor '-h some-name-pxc -uroot -p'\''test-password'\''' + local username=monitor + local 'uri=-h some-name-pxc -uroot -p'\''test-password'\''' + local result ++ run_mysql 'SELECT User_attributes FROM mysql.user WHERE user='\''monitor'\''' '-h some-name-pxc -uroot -p'\''test-password'\''' + result='{"additional_password": "$A$005$\\u0005tv\\\\8EKsk\\bCX%\\u000e\\u0011\\n\\u0017=\\u0010\\u0016Cb/1ncuPnQG/o2lSj9m.N6CZOqMZx2T3qZrvnv6Vpi5"}' + [[ {"additional_password": "$A$005$\\u0005tv\\\\8EKsk\\bCX%\\u000e\\u0011\\n\\u0017=\\u0010\\u0016Cb/1ncuPnQG/o2lSj9m.N6CZOqMZx2T3qZrvnv6Vpi5"} =~ additional_password ]] + return 1 + sleep 1 + retry=4 + [[ 4 -eq 0 ]] + [[ 4 -ge 600 ]] + is_old_password_discarded monitor '-h some-name-pxc -uroot -p'\''test-password'\''' + local username=monitor + local 'uri=-h some-name-pxc -uroot -p'\''test-password'\''' + local result ++ run_mysql 'SELECT User_attributes FROM mysql.user WHERE user='\''monitor'\''' '-h some-name-pxc -uroot -p'\''test-password'\''' + result='{"additional_password": "$A$005$\\u0005tv\\\\8EKsk\\bCX%\\u000e\\u0011\\n\\u0017=\\u0010\\u0016Cb/1ncuPnQG/o2lSj9m.N6CZOqMZx2T3qZrvnv6Vpi5"}' + [[ {"additional_password": "$A$005$\\u0005tv\\\\8EKsk\\bCX%\\u000e\\u0011\\n\\u0017=\\u0010\\u0016Cb/1ncuPnQG/o2lSj9m.N6CZOqMZx2T3qZrvnv6Vpi5"} =~ additional_password ]] + return 1 + sleep 1 + retry=5 + [[ 5 -eq 0 ]] + [[ 5 -ge 600 ]] + is_old_password_discarded monitor '-h some-name-pxc -uroot -p'\''test-password'\''' + local username=monitor + local 'uri=-h some-name-pxc -uroot -p'\''test-password'\''' + local result ++ run_mysql 'SELECT User_attributes FROM mysql.user WHERE user='\''monitor'\''' '-h some-name-pxc -uroot -p'\''test-password'\''' + result='{"additional_password": "$A$005$\\u0005tv\\\\8EKsk\\bCX%\\u000e\\u0011\\n\\u0017=\\u0010\\u0016Cb/1ncuPnQG/o2lSj9m.N6CZOqMZx2T3qZrvnv6Vpi5"}' + [[ {"additional_password": "$A$005$\\u0005tv\\\\8EKsk\\bCX%\\u000e\\u0011\\n\\u0017=\\u0010\\u0016Cb/1ncuPnQG/o2lSj9m.N6CZOqMZx2T3qZrvnv6Vpi5"} =~ additional_password ]] + return 1 + sleep 1 + retry=6 + [[ 6 -eq 0 ]] + [[ 6 -ge 600 ]] + is_old_password_discarded monitor '-h some-name-pxc -uroot -p'\''test-password'\''' + local username=monitor + local 'uri=-h some-name-pxc -uroot -p'\''test-password'\''' + local result ++ run_mysql 'SELECT User_attributes FROM mysql.user WHERE user='\''monitor'\''' '-h some-name-pxc -uroot -p'\''test-password'\''' + result='{"additional_password": "$A$005$\\u0005tv\\\\8EKsk\\bCX%\\u000e\\u0011\\n\\u0017=\\u0010\\u0016Cb/1ncuPnQG/o2lSj9m.N6CZOqMZx2T3qZrvnv6Vpi5"}' + [[ {"additional_password": "$A$005$\\u0005tv\\\\8EKsk\\bCX%\\u000e\\u0011\\n\\u0017=\\u0010\\u0016Cb/1ncuPnQG/o2lSj9m.N6CZOqMZx2T3qZrvnv6Vpi5"} =~ additional_password ]] + return 1 + sleep 1 + retry=7 + [[ 7 -eq 0 ]] + [[ 7 -ge 600 ]] + is_old_password_discarded monitor '-h some-name-pxc -uroot -p'\''test-password'\''' + local username=monitor + local 'uri=-h some-name-pxc -uroot -p'\''test-password'\''' + local result ++ run_mysql 'SELECT User_attributes FROM mysql.user WHERE user='\''monitor'\''' '-h some-name-pxc -uroot -p'\''test-password'\''' + result=NULL + [[ NULL =~ additional_password ]] + [[ -z NULL ]] + [[ NULL =~ ^[[:space:]]*$ ]] + [[ NULL =~ NULL ]] + return 0 + echo 'Old password successfully discarded after 7 retries' Old password successfully discarded after 7 retries + wait_cluster_consistency some-name 3 2 + local cluster_name=some-name + local cluster_size=3 + local proxy_size=2 + '[' -z 2 ']' + desc 'wait cluster consistency' + set +o xtrace ----------------------------------------------------------------------------------- wait cluster consistency ----------------------------------------------------------------------------------- + local i=0 + local max=300 + sleep 7 + echo -n 'waiting for pxc/some-name to be ready' waiting for pxc/some-name to be ready++ kubectl_bin get pxc some-name -o 'jsonpath={.status.state}' +++ mktemp ++ local LAST_OUT=/tmp/tmp.6dmOjKUoor +++ mktemp ++ local LAST_ERR=/tmp/tmp.IsVat7TNzL ++ local exit_status=0 +++ seq 0 2 ++ for i in '$(seq 0 2)' ++ set +e ++ kubectl get pxc some-name -o 'jsonpath={.status.state}' ++ exit_status=0 ++ set -e ++ '[' 0 '!=' 0 ']' ++ break ++ cat /tmp/tmp.6dmOjKUoor ++ cat /tmp/tmp.IsVat7TNzL ++ rm /tmp/tmp.6dmOjKUoor /tmp/tmp.IsVat7TNzL ++ return 0 + [[ ready == \r\e\a\d\y ]] ++ kubectl_bin get pxc some-name -o 'jsonpath={.status.pxc.ready}' +++ mktemp ++ local LAST_OUT=/tmp/tmp.W61O6AmBo1 +++ mktemp ++ local LAST_ERR=/tmp/tmp.SWH07XVMA1 ++ local exit_status=0 +++ seq 0 2 ++ for i in '$(seq 0 2)' ++ set +e ++ kubectl get pxc some-name -o 'jsonpath={.status.pxc.ready}' ++ exit_status=0 ++ set -e ++ '[' 0 '!=' 0 ']' ++ break ++ cat /tmp/tmp.W61O6AmBo1 ++ cat /tmp/tmp.SWH07XVMA1 ++ rm /tmp/tmp.W61O6AmBo1 /tmp/tmp.SWH07XVMA1 ++ return 0 + [[ 3 == \3 ]] +++ get_proxy_engine some-name +++ local cluster_name=some-name ++++ get_proxy some-name ++++ local target_cluster=some-name +++++ kubectl_bin get pxc some-name -o 'jsonpath={.spec.haproxy.enabled}' ++++++ mktemp +++++ local LAST_OUT=/tmp/tmp.e59jVIrVad ++++++ mktemp +++++ local LAST_ERR=/tmp/tmp.mk3S3JZUE6 +++++ local exit_status=0 ++++++ seq 0 2 +++++ for i in '$(seq 0 2)' +++++ set +e +++++ kubectl get pxc some-name -o 'jsonpath={.spec.haproxy.enabled}' +++++ exit_status=0 +++++ set -e +++++ '[' 0 '!=' 0 ']' +++++ break +++++ cat /tmp/tmp.e59jVIrVad +++++ cat /tmp/tmp.mk3S3JZUE6 +++++ rm /tmp/tmp.e59jVIrVad /tmp/tmp.mk3S3JZUE6 +++++ return 0 ++++ [[ false == \t\r\u\e ]] +++++ kubectl_bin get pxc some-name -o 'jsonpath={.spec.proxysql.enabled}' ++++++ mktemp +++++ local LAST_OUT=/tmp/tmp.x5k4tzkpOu ++++++ mktemp +++++ local LAST_ERR=/tmp/tmp.3PAAEVa7U9 +++++ local exit_status=0 ++++++ seq 0 2 +++++ for i in '$(seq 0 2)' +++++ set +e +++++ kubectl get pxc some-name -o 'jsonpath={.spec.proxysql.enabled}' +++++ exit_status=0 +++++ set -e +++++ '[' 0 '!=' 0 ']' +++++ break +++++ cat /tmp/tmp.x5k4tzkpOu +++++ cat /tmp/tmp.3PAAEVa7U9 +++++ rm /tmp/tmp.x5k4tzkpOu /tmp/tmp.3PAAEVa7U9 +++++ return 0 ++++ [[ true == \t\r\u\e ]] ++++ echo some-name-proxysql ++++ return +++ local cluster_proxy=some-name-proxysql +++ echo proxysql ++ kubectl_bin get pxc some-name -o 'jsonpath={.status.proxysql.ready}' +++ mktemp ++ local LAST_OUT=/tmp/tmp.YgIKPXhoaa +++ mktemp ++ local LAST_ERR=/tmp/tmp.2oA6wB3mD3 ++ local exit_status=0 +++ seq 0 2 ++ for i in '$(seq 0 2)' ++ set +e ++ kubectl get pxc some-name -o 'jsonpath={.status.proxysql.ready}' ++ exit_status=0 ++ set -e ++ '[' 0 '!=' 0 ']' ++ break ++ cat /tmp/tmp.YgIKPXhoaa ++ cat /tmp/tmp.2oA6wB3mD3 ++ rm /tmp/tmp.YgIKPXhoaa /tmp/tmp.2oA6wB3mD3 ++ return 0 + [[ 2 == \2 ]] + echo + sleep 15 + compare_mysql_cmd select-4 'SHOW TABLES;' '-h some-name-proxysql -umonitor -p'\''test-password'\''' + local command_id=select-4 + local 'command=SHOW TABLES;' + local 'uri=-h some-name-proxysql -umonitor -p'\''test-password'\''' + local postfix= + local expected_result=/mnt/jenkins/workspace/cloud-pxc-operator_PR-2330/e2e-tests/users-scheduler/compare/select-4.sql + [[ perconalab/percona-xtradb-cluster-operator:main-pxc8.4 =~ 8\.4 ]] + [[ -f /mnt/jenkins/workspace/cloud-pxc-operator_PR-2330/e2e-tests/users-scheduler/compare/select-4-84.sql ]] + expected_result=/mnt/jenkins/workspace/cloud-pxc-operator_PR-2330/e2e-tests/users-scheduler/compare/select-4-84.sql + run_mysql 'SHOW TABLES;' '-h some-name-proxysql -umonitor -p'\''test-password'\''' + local 'command=SHOW TABLES;' + local 'uri=-h some-name-proxysql -umonitor -p'\''test-password'\''' ++ get_client_pod ++ kubectl_bin get pods --selector=name=pxc-client -o 'jsonpath={.items[].metadata.name}' +++ mktemp ++ local LAST_OUT=/tmp/tmp.q0KM9Llv6G +++ mktemp ++ local LAST_ERR=/tmp/tmp.Sk1phYhH1H ++ local exit_status=0 +++ seq 0 2 ++ for i in '$(seq 0 2)' ++ set +e ++ kubectl get pods --selector=name=pxc-client -o 'jsonpath={.items[].metadata.name}' ++ exit_status=0 ++ set -e ++ '[' 0 '!=' 0 ']' ++ break ++ cat /tmp/tmp.q0KM9Llv6G ++ cat /tmp/tmp.Sk1phYhH1H ++ rm /tmp/tmp.q0KM9Llv6G /tmp/tmp.Sk1phYhH1H ++ return 0 + client_pod=pxc-client-6bbff654db-bvlfn + wait_pod pxc-client-6bbff654db-bvlfn + local pod=pxc-client-6bbff654db-bvlfn + local max_retry=480 + local ns= ++ echo pxc-client-6bbff654db-bvlfn ++ /usr/bin/sed -E 's/.*-(pxc|proxysql)-[0-9]/\1/' ++ grep -E '^(pxc|proxysql)$' + local container= + set +o xtrace pod/pxc-client-6bbff654db-bvlfn condition met waiting for pod/pxc-client-6bbff654db-bvlfn to become ReadyDefaulted container "pxc-client" out of: pxc-client, backup .Ok + set +o xtrace + [[ ! -s /tmp/tmp.KUxcwQMinG/select-4.sql ]] ++ grep 'Unknown MySQL server host' /tmp/tmp.KUxcwQMinG/select-4.sql + [[ -n '' ]] + [[ 0 -eq 0 ]] + diff -u /mnt/jenkins/workspace/cloud-pxc-operator_PR-2330/e2e-tests/users-scheduler/compare/select-4-84.sql /tmp/tmp.KUxcwQMinG/select-4.sql + compare_mysql_cmd_local proxysql-cfg 'SELECT variable_value from global_variables WHERE variable_name='\''mysql-monitor_password'\'';' '-h127.0.0.1 -P6032 -uproxyadmin -p'\''test-password'\''' some-name-proxysql-1 '' proxysql + local command_id=proxysql-cfg + local 'command=SELECT variable_value from global_variables WHERE variable_name='\''mysql-monitor_password'\'';' + local 'uri=-h127.0.0.1 -P6032 -uproxyadmin -p'\''test-password'\''' + local pod=some-name-proxysql-1 + local postfix= + local container_name=proxysql + local expected_result=/mnt/jenkins/workspace/cloud-pxc-operator_PR-2330/e2e-tests/users-scheduler/compare/proxysql-cfg.sql + [[ perconalab/percona-xtradb-cluster-operator:main-pxc8.4 =~ 8\.0 ]] + [[ perconalab/percona-xtradb-cluster-operator:main-pxc8.4 =~ 8\.4 ]] + '[' -f /mnt/jenkins/workspace/cloud-pxc-operator_PR-2330/e2e-tests/users-scheduler/compare/proxysql-cfg-84.sql ']' + run_mysql_local 'SELECT variable_value from global_variables WHERE variable_name='\''mysql-monitor_password'\'';' '-h127.0.0.1 -P6032 -uproxyadmin -p'\''test-password'\''' some-name-proxysql-1 proxysql + local 'command=SELECT variable_value from global_variables WHERE variable_name='\''mysql-monitor_password'\'';' + local 'uri=-h127.0.0.1 -P6032 -uproxyadmin -p'\''test-password'\''' + local pod=some-name-proxysql-1 + local container_name=proxysql + set +o xtrace + '[' '!' -s /tmp/tmp.KUxcwQMinG/proxysql-cfg.sql ']' + [[ 0 -eq 0 ]] + diff -u /mnt/jenkins/workspace/cloud-pxc-operator_PR-2330/e2e-tests/users-scheduler/compare/proxysql-cfg.sql /tmp/tmp.KUxcwQMinG/proxysql-cfg.sql + desc 'test operator' + set +o xtrace ----------------------------------------------------------------------------------- test operator ----------------------------------------------------------------------------------- + patch_secret my-cluster-secrets operator dGVzdC1wYXNzd29yZA== + local secret=my-cluster-secrets + local key=operator + local value=dGVzdC1wYXNzd29yZA== + kubectl_bin patch secret my-cluster-secrets '-p={"data":{"operator": "dGVzdC1wYXNzd29yZA=="}}' ++ mktemp + local LAST_OUT=/tmp/tmp.hInPveFwBo ++ mktemp + local LAST_ERR=/tmp/tmp.e3dOS6t3sP + local exit_status=0 ++ seq 0 2 + for i in '$(seq 0 2)' + set +e + kubectl patch secret my-cluster-secrets '-p={"data":{"operator": "dGVzdC1wYXNzd29yZA=="}}' + exit_status=0 + set -e + '[' 0 '!=' 0 ']' + break + cat /tmp/tmp.hInPveFwBo secret/my-cluster-secrets patched + cat /tmp/tmp.e3dOS6t3sP + rm /tmp/tmp.hInPveFwBo /tmp/tmp.e3dOS6t3sP + return 0 + sleep 15 + wait_cluster_consistency some-name 3 2 + local cluster_name=some-name + local cluster_size=3 + local proxy_size=2 + '[' -z 2 ']' + desc 'wait cluster consistency' + set +o xtrace ----------------------------------------------------------------------------------- wait cluster consistency ----------------------------------------------------------------------------------- + local i=0 + local max=300 + sleep 7 + echo -n 'waiting for pxc/some-name to be ready' waiting for pxc/some-name to be ready++ kubectl_bin get pxc some-name -o 'jsonpath={.status.state}' +++ mktemp ++ local LAST_OUT=/tmp/tmp.iBxCDO5crE +++ mktemp ++ local LAST_ERR=/tmp/tmp.gbXK6EQXwk ++ local exit_status=0 +++ seq 0 2 ++ for i in '$(seq 0 2)' ++ set +e ++ kubectl get pxc some-name -o 'jsonpath={.status.state}' ++ exit_status=0 ++ set -e ++ '[' 0 '!=' 0 ']' ++ break ++ cat /tmp/tmp.iBxCDO5crE ++ cat /tmp/tmp.gbXK6EQXwk ++ rm /tmp/tmp.iBxCDO5crE /tmp/tmp.gbXK6EQXwk ++ return 0 + [[ initializing == \r\e\a\d\y ]] + echo -n . .+ sleep 5 + [[ 0 -ge 300 ]] + let i+=1 ++ kubectl_bin get pxc some-name -o 'jsonpath={.status.state}' +++ mktemp ++ local LAST_OUT=/tmp/tmp.fiNfnueZ0g +++ mktemp ++ local LAST_ERR=/tmp/tmp.Alb648GbEa ++ local exit_status=0 +++ seq 0 2 ++ for i in '$(seq 0 2)' ++ set +e ++ kubectl get pxc some-name -o 'jsonpath={.status.state}' ++ exit_status=0 ++ set -e ++ '[' 0 '!=' 0 ']' ++ break ++ cat /tmp/tmp.fiNfnueZ0g ++ cat /tmp/tmp.Alb648GbEa ++ rm /tmp/tmp.fiNfnueZ0g /tmp/tmp.Alb648GbEa ++ return 0 + [[ initializing == \r\e\a\d\y ]] + echo -n . .+ sleep 5 + [[ 1 -ge 300 ]] + let i+=1 ++ kubectl_bin get pxc some-name -o 'jsonpath={.status.state}' +++ mktemp ++ local LAST_OUT=/tmp/tmp.Dg33SUUesD +++ mktemp ++ local LAST_ERR=/tmp/tmp.3FEyDvGcYa ++ local exit_status=0 +++ seq 0 2 ++ for i in '$(seq 0 2)' ++ set +e ++ kubectl get pxc some-name -o 'jsonpath={.status.state}' ++ exit_status=0 ++ set -e ++ '[' 0 '!=' 0 ']' ++ break ++ cat /tmp/tmp.Dg33SUUesD ++ cat /tmp/tmp.3FEyDvGcYa ++ rm /tmp/tmp.Dg33SUUesD /tmp/tmp.3FEyDvGcYa ++ return 0 + [[ ready == \r\e\a\d\y ]] ++ kubectl_bin get pxc some-name -o 'jsonpath={.status.pxc.ready}' +++ mktemp ++ local LAST_OUT=/tmp/tmp.jtzozCh8f2 +++ mktemp ++ local LAST_ERR=/tmp/tmp.9yWRowiPD7 ++ local exit_status=0 +++ seq 0 2 ++ for i in '$(seq 0 2)' ++ set +e ++ kubectl get pxc some-name -o 'jsonpath={.status.pxc.ready}' ++ exit_status=0 ++ set -e ++ '[' 0 '!=' 0 ']' ++ break ++ cat /tmp/tmp.jtzozCh8f2 ++ cat /tmp/tmp.9yWRowiPD7 ++ rm /tmp/tmp.jtzozCh8f2 /tmp/tmp.9yWRowiPD7 ++ return 0 + [[ 3 == \3 ]] +++ get_proxy_engine some-name +++ local cluster_name=some-name ++++ get_proxy some-name ++++ local target_cluster=some-name +++++ kubectl_bin get pxc some-name -o 'jsonpath={.spec.haproxy.enabled}' ++++++ mktemp +++++ local LAST_OUT=/tmp/tmp.dTMPM93RLd ++++++ mktemp +++++ local LAST_ERR=/tmp/tmp.vCGsfRhicq +++++ local exit_status=0 ++++++ seq 0 2 +++++ for i in '$(seq 0 2)' +++++ set +e +++++ kubectl get pxc some-name -o 'jsonpath={.spec.haproxy.enabled}' +++++ exit_status=0 +++++ set -e +++++ '[' 0 '!=' 0 ']' +++++ break +++++ cat /tmp/tmp.dTMPM93RLd +++++ cat /tmp/tmp.vCGsfRhicq +++++ rm /tmp/tmp.dTMPM93RLd /tmp/tmp.vCGsfRhicq +++++ return 0 ++++ [[ false == \t\r\u\e ]] +++++ kubectl_bin get pxc some-name -o 'jsonpath={.spec.proxysql.enabled}' ++++++ mktemp +++++ local LAST_OUT=/tmp/tmp.RJgDA2LMX8 ++++++ mktemp +++++ local LAST_ERR=/tmp/tmp.m3MkWvw3yC +++++ local exit_status=0 ++++++ seq 0 2 +++++ for i in '$(seq 0 2)' +++++ set +e +++++ kubectl get pxc some-name -o 'jsonpath={.spec.proxysql.enabled}' +++++ exit_status=0 +++++ set -e +++++ '[' 0 '!=' 0 ']' +++++ break +++++ cat /tmp/tmp.RJgDA2LMX8 +++++ cat /tmp/tmp.m3MkWvw3yC +++++ rm /tmp/tmp.RJgDA2LMX8 /tmp/tmp.m3MkWvw3yC +++++ return 0 ++++ [[ true == \t\r\u\e ]] ++++ echo some-name-proxysql ++++ return +++ local cluster_proxy=some-name-proxysql +++ echo proxysql ++ kubectl_bin get pxc some-name -o 'jsonpath={.status.proxysql.ready}' +++ mktemp ++ local LAST_OUT=/tmp/tmp.vIQ9aZXysb +++ mktemp ++ local LAST_ERR=/tmp/tmp.Kj2cvMT4bu ++ local exit_status=0 +++ seq 0 2 ++ for i in '$(seq 0 2)' ++ set +e ++ kubectl get pxc some-name -o 'jsonpath={.status.proxysql.ready}' ++ exit_status=0 ++ set -e ++ '[' 0 '!=' 0 ']' ++ break ++ cat /tmp/tmp.vIQ9aZXysb ++ cat /tmp/tmp.Kj2cvMT4bu ++ rm /tmp/tmp.vIQ9aZXysb /tmp/tmp.Kj2cvMT4bu ++ return 0 + [[ 2 == \2 ]] + echo + sleep 15 + compare_mysql_cmd select-4 'SHOW TABLES;' '-h some-name-proxysql -uoperator -p'\''test-password'\''' + local command_id=select-4 + local 'command=SHOW TABLES;' + local 'uri=-h some-name-proxysql -uoperator -p'\''test-password'\''' + local postfix= + local expected_result=/mnt/jenkins/workspace/cloud-pxc-operator_PR-2330/e2e-tests/users-scheduler/compare/select-4.sql + [[ perconalab/percona-xtradb-cluster-operator:main-pxc8.4 =~ 8\.4 ]] + [[ -f /mnt/jenkins/workspace/cloud-pxc-operator_PR-2330/e2e-tests/users-scheduler/compare/select-4-84.sql ]] + expected_result=/mnt/jenkins/workspace/cloud-pxc-operator_PR-2330/e2e-tests/users-scheduler/compare/select-4-84.sql + run_mysql 'SHOW TABLES;' '-h some-name-proxysql -uoperator -p'\''test-password'\''' + local 'command=SHOW TABLES;' + local 'uri=-h some-name-proxysql -uoperator -p'\''test-password'\''' ++ get_client_pod ++ kubectl_bin get pods --selector=name=pxc-client -o 'jsonpath={.items[].metadata.name}' +++ mktemp ++ local LAST_OUT=/tmp/tmp.NNsqcUr2cI +++ mktemp ++ local LAST_ERR=/tmp/tmp.5KCEcgU9wK ++ local exit_status=0 +++ seq 0 2 ++ for i in '$(seq 0 2)' ++ set +e ++ kubectl get pods --selector=name=pxc-client -o 'jsonpath={.items[].metadata.name}' ++ exit_status=0 ++ set -e ++ '[' 0 '!=' 0 ']' ++ break ++ cat /tmp/tmp.NNsqcUr2cI ++ cat /tmp/tmp.5KCEcgU9wK ++ rm /tmp/tmp.NNsqcUr2cI /tmp/tmp.5KCEcgU9wK ++ return 0 + client_pod=pxc-client-6bbff654db-bvlfn + wait_pod pxc-client-6bbff654db-bvlfn + local pod=pxc-client-6bbff654db-bvlfn + local max_retry=480 + local ns= ++ echo pxc-client-6bbff654db-bvlfn ++ grep -E '^(pxc|proxysql)$' ++ /usr/bin/sed -E 's/.*-(pxc|proxysql)-[0-9]/\1/' + local container= + set +o xtrace pod/pxc-client-6bbff654db-bvlfn condition met waiting for pod/pxc-client-6bbff654db-bvlfn to become ReadyDefaulted container "pxc-client" out of: pxc-client, backup .Ok + set +o xtrace + [[ ! -s /tmp/tmp.KUxcwQMinG/select-4.sql ]] ++ grep 'Unknown MySQL server host' /tmp/tmp.KUxcwQMinG/select-4.sql + [[ -n '' ]] + [[ 0 -eq 0 ]] + diff -u /mnt/jenkins/workspace/cloud-pxc-operator_PR-2330/e2e-tests/users-scheduler/compare/select-4-84.sql /tmp/tmp.KUxcwQMinG/select-4.sql + desc 'change secret name' + set +o xtrace ----------------------------------------------------------------------------------- change secret name ----------------------------------------------------------------------------------- + kubectl_bin patch pxc some-name --type merge --patch '{"spec": {"secretsName":"my-cluster-secrets-2"}}' ++ mktemp + local LAST_OUT=/tmp/tmp.Tokm5JnFcl ++ mktemp + local LAST_ERR=/tmp/tmp.A9jygT1yVQ + local exit_status=0 ++ seq 0 2 + for i in '$(seq 0 2)' + set +e + kubectl patch pxc some-name --type merge --patch '{"spec": {"secretsName":"my-cluster-secrets-2"}}' + exit_status=0 + set -e + '[' 0 '!=' 0 ']' + break + cat /tmp/tmp.Tokm5JnFcl perconaxtradbcluster.pxc.percona.com/some-name patched + cat /tmp/tmp.A9jygT1yVQ + rm /tmp/tmp.Tokm5JnFcl /tmp/tmp.A9jygT1yVQ + return 0 + sleep 15 + wait_cluster_consistency some-name 3 2 + local cluster_name=some-name + local cluster_size=3 + local proxy_size=2 + '[' -z 2 ']' + desc 'wait cluster consistency' + set +o xtrace ----------------------------------------------------------------------------------- wait cluster consistency ----------------------------------------------------------------------------------- + local i=0 + local max=300 + sleep 7 + echo -n 'waiting for pxc/some-name to be ready' waiting for pxc/some-name to be ready++ kubectl_bin get pxc some-name -o 'jsonpath={.status.state}' +++ mktemp ++ local LAST_OUT=/tmp/tmp.8YYpKEwoO8 +++ mktemp ++ local LAST_ERR=/tmp/tmp.iCDlYGyI08 ++ local exit_status=0 +++ seq 0 2 ++ for i in '$(seq 0 2)' ++ set +e ++ kubectl get pxc some-name -o 'jsonpath={.status.state}' ++ exit_status=0 ++ set -e ++ '[' 0 '!=' 0 ']' ++ break ++ cat /tmp/tmp.8YYpKEwoO8 ++ cat /tmp/tmp.iCDlYGyI08 ++ rm /tmp/tmp.8YYpKEwoO8 /tmp/tmp.iCDlYGyI08 ++ return 0 + [[ initializing == \r\e\a\d\y ]] + echo -n . .+ sleep 5 + [[ 0 -ge 300 ]] + let i+=1 ++ kubectl_bin get pxc some-name -o 'jsonpath={.status.state}' +++ mktemp ++ local LAST_OUT=/tmp/tmp.ItyD7SxyyM +++ mktemp ++ local LAST_ERR=/tmp/tmp.KtkaUkdQgo ++ local exit_status=0 +++ seq 0 2 ++ for i in '$(seq 0 2)' ++ set +e ++ kubectl get pxc some-name -o 'jsonpath={.status.state}' ++ exit_status=0 ++ set -e ++ '[' 0 '!=' 0 ']' ++ break ++ cat /tmp/tmp.ItyD7SxyyM ++ cat /tmp/tmp.KtkaUkdQgo ++ rm /tmp/tmp.ItyD7SxyyM /tmp/tmp.KtkaUkdQgo ++ return 0 + [[ initializing == \r\e\a\d\y ]] + echo -n . .+ sleep 5 + [[ 1 -ge 300 ]] + let i+=1 ++ kubectl_bin get pxc some-name -o 'jsonpath={.status.state}' +++ mktemp ++ local LAST_OUT=/tmp/tmp.2ra86LOBkG +++ mktemp ++ local LAST_ERR=/tmp/tmp.wgjXJHfwcg ++ local exit_status=0 +++ seq 0 2 ++ for i in '$(seq 0 2)' ++ set +e ++ kubectl get pxc some-name -o 'jsonpath={.status.state}' ++ exit_status=0 ++ set -e ++ '[' 0 '!=' 0 ']' ++ break ++ cat /tmp/tmp.2ra86LOBkG ++ cat /tmp/tmp.wgjXJHfwcg ++ rm /tmp/tmp.2ra86LOBkG /tmp/tmp.wgjXJHfwcg ++ return 0 + [[ initializing == \r\e\a\d\y ]] + echo -n . .+ sleep 5 + [[ 2 -ge 300 ]] + let i+=1 ++ kubectl_bin get pxc some-name -o 'jsonpath={.status.state}' +++ mktemp ++ local LAST_OUT=/tmp/tmp.UmzUFfCFdr +++ mktemp ++ local LAST_ERR=/tmp/tmp.aZ3Fk9CHz3 ++ local exit_status=0 +++ seq 0 2 ++ for i in '$(seq 0 2)' ++ set +e ++ kubectl get pxc some-name -o 'jsonpath={.status.state}' ++ exit_status=0 ++ set -e ++ '[' 0 '!=' 0 ']' ++ break ++ cat /tmp/tmp.UmzUFfCFdr ++ cat /tmp/tmp.aZ3Fk9CHz3 ++ rm /tmp/tmp.UmzUFfCFdr /tmp/tmp.aZ3Fk9CHz3 ++ return 0 + [[ initializing == \r\e\a\d\y ]] + echo -n . .+ sleep 5 + [[ 3 -ge 300 ]] + let i+=1 ++ kubectl_bin get pxc some-name -o 'jsonpath={.status.state}' +++ mktemp ++ local LAST_OUT=/tmp/tmp.3oPIvKUsDp +++ mktemp ++ local LAST_ERR=/tmp/tmp.Hw3S51t19T ++ local exit_status=0 +++ seq 0 2 ++ for i in '$(seq 0 2)' ++ set +e ++ kubectl get pxc some-name -o 'jsonpath={.status.state}' ++ exit_status=0 ++ set -e ++ '[' 0 '!=' 0 ']' ++ break ++ cat /tmp/tmp.3oPIvKUsDp ++ cat /tmp/tmp.Hw3S51t19T ++ rm /tmp/tmp.3oPIvKUsDp /tmp/tmp.Hw3S51t19T ++ return 0 + [[ initializing == \r\e\a\d\y ]] + echo -n . .+ sleep 5 + [[ 4 -ge 300 ]] + let i+=1 ++ kubectl_bin get pxc some-name -o 'jsonpath={.status.state}' +++ mktemp ++ local LAST_OUT=/tmp/tmp.uPg6WqwO5C +++ mktemp ++ local LAST_ERR=/tmp/tmp.aaQVSYPVx4 ++ local exit_status=0 +++ seq 0 2 ++ for i in '$(seq 0 2)' ++ set +e ++ kubectl get pxc some-name -o 'jsonpath={.status.state}' ++ exit_status=0 ++ set -e ++ '[' 0 '!=' 0 ']' ++ break ++ cat /tmp/tmp.uPg6WqwO5C ++ cat /tmp/tmp.aaQVSYPVx4 ++ rm /tmp/tmp.uPg6WqwO5C /tmp/tmp.aaQVSYPVx4 ++ return 0 + [[ initializing == \r\e\a\d\y ]] + echo -n . .+ sleep 5 + [[ 5 -ge 300 ]] + let i+=1 ++ kubectl_bin get pxc some-name -o 'jsonpath={.status.state}' +++ mktemp ++ local LAST_OUT=/tmp/tmp.ter89VjhTe +++ mktemp ++ local LAST_ERR=/tmp/tmp.mrgCTBAm2h ++ local exit_status=0 +++ seq 0 2 ++ for i in '$(seq 0 2)' ++ set +e ++ kubectl get pxc some-name -o 'jsonpath={.status.state}' ++ exit_status=0 ++ set -e ++ '[' 0 '!=' 0 ']' ++ break ++ cat /tmp/tmp.ter89VjhTe ++ cat /tmp/tmp.mrgCTBAm2h ++ rm /tmp/tmp.ter89VjhTe /tmp/tmp.mrgCTBAm2h ++ return 0 + [[ initializing == \r\e\a\d\y ]] + echo -n . .+ sleep 5 + [[ 6 -ge 300 ]] + let i+=1 ++ kubectl_bin get pxc some-name -o 'jsonpath={.status.state}' +++ mktemp ++ local LAST_OUT=/tmp/tmp.6xEYFXu6G3 +++ mktemp ++ local LAST_ERR=/tmp/tmp.rph8If3cis ++ local exit_status=0 +++ seq 0 2 ++ for i in '$(seq 0 2)' ++ set +e ++ kubectl get pxc some-name -o 'jsonpath={.status.state}' ++ exit_status=0 ++ set -e ++ '[' 0 '!=' 0 ']' ++ break ++ cat /tmp/tmp.6xEYFXu6G3 ++ cat /tmp/tmp.rph8If3cis ++ rm /tmp/tmp.6xEYFXu6G3 /tmp/tmp.rph8If3cis ++ return 0 + [[ initializing == \r\e\a\d\y ]] + echo -n . .+ sleep 5 + [[ 7 -ge 300 ]] + let i+=1 ++ kubectl_bin get pxc some-name -o 'jsonpath={.status.state}' +++ mktemp ++ local LAST_OUT=/tmp/tmp.bNfuwgkm2C +++ mktemp ++ local LAST_ERR=/tmp/tmp.FqevB1EgML ++ local exit_status=0 +++ seq 0 2 ++ for i in '$(seq 0 2)' ++ set +e ++ kubectl get pxc some-name -o 'jsonpath={.status.state}' ++ exit_status=0 ++ set -e ++ '[' 0 '!=' 0 ']' ++ break ++ cat /tmp/tmp.bNfuwgkm2C ++ cat /tmp/tmp.FqevB1EgML ++ rm /tmp/tmp.bNfuwgkm2C /tmp/tmp.FqevB1EgML ++ return 0 + [[ initializing == \r\e\a\d\y ]] + echo -n . .+ sleep 5 + [[ 8 -ge 300 ]] + let i+=1 ++ kubectl_bin get pxc some-name -o 'jsonpath={.status.state}' +++ mktemp ++ local LAST_OUT=/tmp/tmp.ePTVDTETFo +++ mktemp ++ local LAST_ERR=/tmp/tmp.qFsCLKjjwM ++ local exit_status=0 +++ seq 0 2 ++ for i in '$(seq 0 2)' ++ set +e ++ kubectl get pxc some-name -o 'jsonpath={.status.state}' ++ exit_status=0 ++ set -e ++ '[' 0 '!=' 0 ']' ++ break ++ cat /tmp/tmp.ePTVDTETFo ++ cat /tmp/tmp.qFsCLKjjwM ++ rm /tmp/tmp.ePTVDTETFo /tmp/tmp.qFsCLKjjwM ++ return 0 + [[ initializing == \r\e\a\d\y ]] + echo -n . .+ sleep 5 + [[ 9 -ge 300 ]] + let i+=1 ++ kubectl_bin get pxc some-name -o 'jsonpath={.status.state}' +++ mktemp ++ local LAST_OUT=/tmp/tmp.cWwJIxcPMA +++ mktemp ++ local LAST_ERR=/tmp/tmp.FI8zxoljFZ ++ local exit_status=0 +++ seq 0 2 ++ for i in '$(seq 0 2)' ++ set +e ++ kubectl get pxc some-name -o 'jsonpath={.status.state}' ++ exit_status=0 ++ set -e ++ '[' 0 '!=' 0 ']' ++ break ++ cat /tmp/tmp.cWwJIxcPMA ++ cat /tmp/tmp.FI8zxoljFZ ++ rm /tmp/tmp.cWwJIxcPMA /tmp/tmp.FI8zxoljFZ ++ return 0 + [[ initializing == \r\e\a\d\y ]] + echo -n . .+ sleep 5 + [[ 10 -ge 300 ]] + let i+=1 ++ kubectl_bin get pxc some-name -o 'jsonpath={.status.state}' +++ mktemp ++ local LAST_OUT=/tmp/tmp.6ErvqoSAoX +++ mktemp ++ local LAST_ERR=/tmp/tmp.1qWkHn4QKp ++ local exit_status=0 +++ seq 0 2 ++ for i in '$(seq 0 2)' ++ set +e ++ kubectl get pxc some-name -o 'jsonpath={.status.state}' ++ exit_status=0 ++ set -e ++ '[' 0 '!=' 0 ']' ++ break ++ cat /tmp/tmp.6ErvqoSAoX ++ cat /tmp/tmp.1qWkHn4QKp ++ rm /tmp/tmp.6ErvqoSAoX /tmp/tmp.1qWkHn4QKp ++ return 0 + [[ initializing == \r\e\a\d\y ]] + echo -n . .+ sleep 5 + [[ 11 -ge 300 ]] + let i+=1 ++ kubectl_bin get pxc some-name -o 'jsonpath={.status.state}' +++ mktemp ++ local LAST_OUT=/tmp/tmp.ycGFBZ9Q2H +++ mktemp ++ local LAST_ERR=/tmp/tmp.myiGdKziv2 ++ local exit_status=0 +++ seq 0 2 ++ for i in '$(seq 0 2)' ++ set +e ++ kubectl get pxc some-name -o 'jsonpath={.status.state}' ++ exit_status=0 ++ set -e ++ '[' 0 '!=' 0 ']' ++ break ++ cat /tmp/tmp.ycGFBZ9Q2H ++ cat /tmp/tmp.myiGdKziv2 ++ rm /tmp/tmp.ycGFBZ9Q2H /tmp/tmp.myiGdKziv2 ++ return 0 + [[ initializing == \r\e\a\d\y ]] + echo -n . .+ sleep 5 + [[ 12 -ge 300 ]] + let i+=1 ++ kubectl_bin get pxc some-name -o 'jsonpath={.status.state}' +++ mktemp ++ local LAST_OUT=/tmp/tmp.8LacpiZoHc +++ mktemp ++ local LAST_ERR=/tmp/tmp.6Gg0kV6BbY ++ local exit_status=0 +++ seq 0 2 ++ for i in '$(seq 0 2)' ++ set +e ++ kubectl get pxc some-name -o 'jsonpath={.status.state}' ++ exit_status=0 ++ set -e ++ '[' 0 '!=' 0 ']' ++ break ++ cat /tmp/tmp.8LacpiZoHc ++ cat /tmp/tmp.6Gg0kV6BbY ++ rm /tmp/tmp.8LacpiZoHc /tmp/tmp.6Gg0kV6BbY ++ return 0 + [[ initializing == \r\e\a\d\y ]] + echo -n . .+ sleep 5 + [[ 13 -ge 300 ]] + let i+=1 ++ kubectl_bin get pxc some-name -o 'jsonpath={.status.state}' +++ mktemp ++ local LAST_OUT=/tmp/tmp.mcVcEVKBfk +++ mktemp ++ local LAST_ERR=/tmp/tmp.EjBzI10VUE ++ local exit_status=0 +++ seq 0 2 ++ for i in '$(seq 0 2)' ++ set +e ++ kubectl get pxc some-name -o 'jsonpath={.status.state}' ++ exit_status=0 ++ set -e ++ '[' 0 '!=' 0 ']' ++ break ++ cat /tmp/tmp.mcVcEVKBfk ++ cat /tmp/tmp.EjBzI10VUE ++ rm /tmp/tmp.mcVcEVKBfk /tmp/tmp.EjBzI10VUE ++ return 0 + [[ initializing == \r\e\a\d\y ]] + echo -n . .+ sleep 5 + [[ 14 -ge 300 ]] + let i+=1 ++ kubectl_bin get pxc some-name -o 'jsonpath={.status.state}' +++ mktemp ++ local LAST_OUT=/tmp/tmp.CqRv2OwdX5 +++ mktemp ++ local LAST_ERR=/tmp/tmp.sTraZSxGQp ++ local exit_status=0 +++ seq 0 2 ++ for i in '$(seq 0 2)' ++ set +e ++ kubectl get pxc some-name -o 'jsonpath={.status.state}' ++ exit_status=0 ++ set -e ++ '[' 0 '!=' 0 ']' ++ break ++ cat /tmp/tmp.CqRv2OwdX5 ++ cat /tmp/tmp.sTraZSxGQp ++ rm /tmp/tmp.CqRv2OwdX5 /tmp/tmp.sTraZSxGQp ++ return 0 + [[ initializing == \r\e\a\d\y ]] + echo -n . .+ sleep 5 + [[ 15 -ge 300 ]] + let i+=1 ++ kubectl_bin get pxc some-name -o 'jsonpath={.status.state}' +++ mktemp ++ local LAST_OUT=/tmp/tmp.oLvtVeeiFT +++ mktemp ++ local LAST_ERR=/tmp/tmp.W0gJwTI0Rz ++ local exit_status=0 +++ seq 0 2 ++ for i in '$(seq 0 2)' ++ set +e ++ kubectl get pxc some-name -o 'jsonpath={.status.state}' ++ exit_status=0 ++ set -e ++ '[' 0 '!=' 0 ']' ++ break ++ cat /tmp/tmp.oLvtVeeiFT ++ cat /tmp/tmp.W0gJwTI0Rz ++ rm /tmp/tmp.oLvtVeeiFT /tmp/tmp.W0gJwTI0Rz ++ return 0 + [[ initializing == \r\e\a\d\y ]] + echo -n . .+ sleep 5 + [[ 16 -ge 300 ]] + let i+=1 ++ kubectl_bin get pxc some-name -o 'jsonpath={.status.state}' +++ mktemp ++ local LAST_OUT=/tmp/tmp.jTEfcEaA5D +++ mktemp ++ local LAST_ERR=/tmp/tmp.2IFhu4y0Hw ++ local exit_status=0 +++ seq 0 2 ++ for i in '$(seq 0 2)' ++ set +e ++ kubectl get pxc some-name -o 'jsonpath={.status.state}' ++ exit_status=0 ++ set -e ++ '[' 0 '!=' 0 ']' ++ break ++ cat /tmp/tmp.jTEfcEaA5D ++ cat /tmp/tmp.2IFhu4y0Hw ++ rm /tmp/tmp.jTEfcEaA5D /tmp/tmp.2IFhu4y0Hw ++ return 0 + [[ initializing == \r\e\a\d\y ]] + echo -n . .+ sleep 5 + [[ 17 -ge 300 ]] + let i+=1 ++ kubectl_bin get pxc some-name -o 'jsonpath={.status.state}' +++ mktemp ++ local LAST_OUT=/tmp/tmp.co6Ctpmiyq +++ mktemp ++ local LAST_ERR=/tmp/tmp.eL67o7oq35 ++ local exit_status=0 +++ seq 0 2 ++ for i in '$(seq 0 2)' ++ set +e ++ kubectl get pxc some-name -o 'jsonpath={.status.state}' ++ exit_status=0 ++ set -e ++ '[' 0 '!=' 0 ']' ++ break ++ cat /tmp/tmp.co6Ctpmiyq ++ cat /tmp/tmp.eL67o7oq35 ++ rm /tmp/tmp.co6Ctpmiyq /tmp/tmp.eL67o7oq35 ++ return 0 + [[ initializing == \r\e\a\d\y ]] + echo -n . .+ sleep 5 + [[ 18 -ge 300 ]] + let i+=1 ++ kubectl_bin get pxc some-name -o 'jsonpath={.status.state}' +++ mktemp ++ local LAST_OUT=/tmp/tmp.qHkR2wTvVb +++ mktemp ++ local LAST_ERR=/tmp/tmp.1wV07Tb5MJ ++ local exit_status=0 +++ seq 0 2 ++ for i in '$(seq 0 2)' ++ set +e ++ kubectl get pxc some-name -o 'jsonpath={.status.state}' ++ exit_status=0 ++ set -e ++ '[' 0 '!=' 0 ']' ++ break ++ cat /tmp/tmp.qHkR2wTvVb ++ cat /tmp/tmp.1wV07Tb5MJ ++ rm /tmp/tmp.qHkR2wTvVb /tmp/tmp.1wV07Tb5MJ ++ return 0 + [[ initializing == \r\e\a\d\y ]] + echo -n . .+ sleep 5 + [[ 19 -ge 300 ]] + let i+=1 ++ kubectl_bin get pxc some-name -o 'jsonpath={.status.state}' +++ mktemp ++ local LAST_OUT=/tmp/tmp.AaUwC40fzD +++ mktemp ++ local LAST_ERR=/tmp/tmp.T0i6wk6u8F ++ local exit_status=0 +++ seq 0 2 ++ for i in '$(seq 0 2)' ++ set +e ++ kubectl get pxc some-name -o 'jsonpath={.status.state}' ++ exit_status=0 ++ set -e ++ '[' 0 '!=' 0 ']' ++ break ++ cat /tmp/tmp.AaUwC40fzD ++ cat /tmp/tmp.T0i6wk6u8F ++ rm /tmp/tmp.AaUwC40fzD /tmp/tmp.T0i6wk6u8F ++ return 0 + [[ initializing == \r\e\a\d\y ]] + echo -n . .+ sleep 5 + [[ 20 -ge 300 ]] + let i+=1 ++ kubectl_bin get pxc some-name -o 'jsonpath={.status.state}' +++ mktemp ++ local LAST_OUT=/tmp/tmp.aBzStSUGHC +++ mktemp ++ local LAST_ERR=/tmp/tmp.08T4RbL7PN ++ local exit_status=0 +++ seq 0 2 ++ for i in '$(seq 0 2)' ++ set +e ++ kubectl get pxc some-name -o 'jsonpath={.status.state}' ++ exit_status=0 ++ set -e ++ '[' 0 '!=' 0 ']' ++ break ++ cat /tmp/tmp.aBzStSUGHC ++ cat /tmp/tmp.08T4RbL7PN ++ rm /tmp/tmp.aBzStSUGHC /tmp/tmp.08T4RbL7PN ++ return 0 + [[ initializing == \r\e\a\d\y ]] + echo -n . .+ sleep 5 + [[ 21 -ge 300 ]] + let i+=1 ++ kubectl_bin get pxc some-name -o 'jsonpath={.status.state}' +++ mktemp ++ local LAST_OUT=/tmp/tmp.8uylIOwXgn +++ mktemp ++ local LAST_ERR=/tmp/tmp.sjFhfLS7S6 ++ local exit_status=0 +++ seq 0 2 ++ for i in '$(seq 0 2)' ++ set +e ++ kubectl get pxc some-name -o 'jsonpath={.status.state}' ++ exit_status=0 ++ set -e ++ '[' 0 '!=' 0 ']' ++ break ++ cat /tmp/tmp.8uylIOwXgn ++ cat /tmp/tmp.sjFhfLS7S6 ++ rm /tmp/tmp.8uylIOwXgn /tmp/tmp.sjFhfLS7S6 ++ return 0 + [[ ready == \r\e\a\d\y ]] ++ kubectl_bin get pxc some-name -o 'jsonpath={.status.pxc.ready}' +++ mktemp ++ local LAST_OUT=/tmp/tmp.Il14hEqNCc +++ mktemp ++ local LAST_ERR=/tmp/tmp.WS1NR9oogd ++ local exit_status=0 +++ seq 0 2 ++ for i in '$(seq 0 2)' ++ set +e ++ kubectl get pxc some-name -o 'jsonpath={.status.pxc.ready}' ++ exit_status=0 ++ set -e ++ '[' 0 '!=' 0 ']' ++ break ++ cat /tmp/tmp.Il14hEqNCc ++ cat /tmp/tmp.WS1NR9oogd ++ rm /tmp/tmp.Il14hEqNCc /tmp/tmp.WS1NR9oogd ++ return 0 + [[ 3 == \3 ]] +++ get_proxy_engine some-name +++ local cluster_name=some-name ++++ get_proxy some-name ++++ local target_cluster=some-name +++++ kubectl_bin get pxc some-name -o 'jsonpath={.spec.haproxy.enabled}' ++++++ mktemp +++++ local LAST_OUT=/tmp/tmp.ZVmWsMQkNt ++++++ mktemp +++++ local LAST_ERR=/tmp/tmp.JGijAKRyYQ +++++ local exit_status=0 ++++++ seq 0 2 +++++ for i in '$(seq 0 2)' +++++ set +e +++++ kubectl get pxc some-name -o 'jsonpath={.spec.haproxy.enabled}' +++++ exit_status=0 +++++ set -e +++++ '[' 0 '!=' 0 ']' +++++ break +++++ cat /tmp/tmp.ZVmWsMQkNt +++++ cat /tmp/tmp.JGijAKRyYQ +++++ rm /tmp/tmp.ZVmWsMQkNt /tmp/tmp.JGijAKRyYQ +++++ return 0 ++++ [[ false == \t\r\u\e ]] +++++ kubectl_bin get pxc some-name -o 'jsonpath={.spec.proxysql.enabled}' ++++++ mktemp +++++ local LAST_OUT=/tmp/tmp.nrZxNJW0rV ++++++ mktemp +++++ local LAST_ERR=/tmp/tmp.dJKMeoyArD +++++ local exit_status=0 ++++++ seq 0 2 +++++ for i in '$(seq 0 2)' +++++ set +e +++++ kubectl get pxc some-name -o 'jsonpath={.spec.proxysql.enabled}' +++++ exit_status=0 +++++ set -e +++++ '[' 0 '!=' 0 ']' +++++ break +++++ cat /tmp/tmp.nrZxNJW0rV +++++ cat /tmp/tmp.dJKMeoyArD +++++ rm /tmp/tmp.nrZxNJW0rV /tmp/tmp.dJKMeoyArD +++++ return 0 ++++ [[ true == \t\r\u\e ]] ++++ echo some-name-proxysql ++++ return +++ local cluster_proxy=some-name-proxysql +++ echo proxysql ++ kubectl_bin get pxc some-name -o 'jsonpath={.status.proxysql.ready}' +++ mktemp ++ local LAST_OUT=/tmp/tmp.hpPuRfPgbX +++ mktemp ++ local LAST_ERR=/tmp/tmp.SWGceaUBfo ++ local exit_status=0 +++ seq 0 2 ++ for i in '$(seq 0 2)' ++ set +e ++ kubectl get pxc some-name -o 'jsonpath={.status.proxysql.ready}' ++ exit_status=0 ++ set -e ++ '[' 0 '!=' 0 ']' ++ break ++ cat /tmp/tmp.hpPuRfPgbX ++ cat /tmp/tmp.SWGceaUBfo ++ rm /tmp/tmp.hpPuRfPgbX /tmp/tmp.SWGceaUBfo ++ return 0 + [[ 2 == \2 ]] + echo + desc 'test new operator' + set +o xtrace ----------------------------------------------------------------------------------- test new operator ----------------------------------------------------------------------------------- + newpass=test-password2 ++ echo -n test-password2 ++ base64 + newpassencrypted=dGVzdC1wYXNzd29yZDI= + patch_secret my-cluster-secrets-2 operator dGVzdC1wYXNzd29yZDI= + local secret=my-cluster-secrets-2 + local key=operator + local value=dGVzdC1wYXNzd29yZDI= + kubectl_bin patch secret my-cluster-secrets-2 '-p={"data":{"operator": "dGVzdC1wYXNzd29yZDI="}}' ++ mktemp + local LAST_OUT=/tmp/tmp.jKv7EfYwh7 ++ mktemp + local LAST_ERR=/tmp/tmp.nsWU1Wz7Fk + local exit_status=0 ++ seq 0 2 + for i in '$(seq 0 2)' + set +e + kubectl patch secret my-cluster-secrets-2 '-p={"data":{"operator": "dGVzdC1wYXNzd29yZDI="}}' + exit_status=0 + set -e + '[' 0 '!=' 0 ']' + break + cat /tmp/tmp.jKv7EfYwh7 secret/my-cluster-secrets-2 patched + cat /tmp/tmp.nsWU1Wz7Fk + rm /tmp/tmp.jKv7EfYwh7 /tmp/tmp.nsWU1Wz7Fk + return 0 + sleep 15 + wait_cluster_consistency some-name 3 2 + local cluster_name=some-name + local cluster_size=3 + local proxy_size=2 + '[' -z 2 ']' + desc 'wait cluster consistency' + set +o xtrace ----------------------------------------------------------------------------------- wait cluster consistency ----------------------------------------------------------------------------------- + local i=0 + local max=300 + sleep 7 + echo -n 'waiting for pxc/some-name to be ready' waiting for pxc/some-name to be ready++ kubectl_bin get pxc some-name -o 'jsonpath={.status.state}' +++ mktemp ++ local LAST_OUT=/tmp/tmp.zS77YrQsal +++ mktemp ++ local LAST_ERR=/tmp/tmp.isfv2wSuIB ++ local exit_status=0 +++ seq 0 2 ++ for i in '$(seq 0 2)' ++ set +e ++ kubectl get pxc some-name -o 'jsonpath={.status.state}' ++ exit_status=0 ++ set -e ++ '[' 0 '!=' 0 ']' ++ break ++ cat /tmp/tmp.zS77YrQsal ++ cat /tmp/tmp.isfv2wSuIB ++ rm /tmp/tmp.zS77YrQsal /tmp/tmp.isfv2wSuIB ++ return 0 + [[ ready == \r\e\a\d\y ]] ++ kubectl_bin get pxc some-name -o 'jsonpath={.status.pxc.ready}' +++ mktemp ++ local LAST_OUT=/tmp/tmp.T92cxBYNcH +++ mktemp ++ local LAST_ERR=/tmp/tmp.XnDpDWV3YB ++ local exit_status=0 +++ seq 0 2 ++ for i in '$(seq 0 2)' ++ set +e ++ kubectl get pxc some-name -o 'jsonpath={.status.pxc.ready}' ++ exit_status=0 ++ set -e ++ '[' 0 '!=' 0 ']' ++ break ++ cat /tmp/tmp.T92cxBYNcH ++ cat /tmp/tmp.XnDpDWV3YB ++ rm /tmp/tmp.T92cxBYNcH /tmp/tmp.XnDpDWV3YB ++ return 0 + [[ 3 == \3 ]] +++ get_proxy_engine some-name +++ local cluster_name=some-name ++++ get_proxy some-name ++++ local target_cluster=some-name +++++ kubectl_bin get pxc some-name -o 'jsonpath={.spec.haproxy.enabled}' ++++++ mktemp +++++ local LAST_OUT=/tmp/tmp.vJDGoidBDb ++++++ mktemp +++++ local LAST_ERR=/tmp/tmp.87HOMbpCFA +++++ local exit_status=0 ++++++ seq 0 2 +++++ for i in '$(seq 0 2)' +++++ set +e +++++ kubectl get pxc some-name -o 'jsonpath={.spec.haproxy.enabled}' +++++ exit_status=0 +++++ set -e +++++ '[' 0 '!=' 0 ']' +++++ break +++++ cat /tmp/tmp.vJDGoidBDb +++++ cat /tmp/tmp.87HOMbpCFA +++++ rm /tmp/tmp.vJDGoidBDb /tmp/tmp.87HOMbpCFA +++++ return 0 ++++ [[ false == \t\r\u\e ]] +++++ kubectl_bin get pxc some-name -o 'jsonpath={.spec.proxysql.enabled}' ++++++ mktemp +++++ local LAST_OUT=/tmp/tmp.tF7T2e8eLm ++++++ mktemp +++++ local LAST_ERR=/tmp/tmp.NBjRt5ie0V +++++ local exit_status=0 ++++++ seq 0 2 +++++ for i in '$(seq 0 2)' +++++ set +e +++++ kubectl get pxc some-name -o 'jsonpath={.spec.proxysql.enabled}' +++++ exit_status=0 +++++ set -e +++++ '[' 0 '!=' 0 ']' +++++ break +++++ cat /tmp/tmp.tF7T2e8eLm +++++ cat /tmp/tmp.NBjRt5ie0V +++++ rm /tmp/tmp.tF7T2e8eLm /tmp/tmp.NBjRt5ie0V +++++ return 0 ++++ [[ true == \t\r\u\e ]] ++++ echo some-name-proxysql ++++ return +++ local cluster_proxy=some-name-proxysql +++ echo proxysql ++ kubectl_bin get pxc some-name -o 'jsonpath={.status.proxysql.ready}' +++ mktemp ++ local LAST_OUT=/tmp/tmp.VH6ZGPe1dv +++ mktemp ++ local LAST_ERR=/tmp/tmp.tT6AFvb1rz ++ local exit_status=0 +++ seq 0 2 ++ for i in '$(seq 0 2)' ++ set +e ++ kubectl get pxc some-name -o 'jsonpath={.status.proxysql.ready}' ++ exit_status=0 ++ set -e ++ '[' 0 '!=' 0 ']' ++ break ++ cat /tmp/tmp.VH6ZGPe1dv ++ cat /tmp/tmp.tT6AFvb1rz ++ rm /tmp/tmp.VH6ZGPe1dv /tmp/tmp.tT6AFvb1rz ++ return 0 + [[ 2 == \2 ]] + echo + sleep 20 + compare_mysql_cmd select-4 'SHOW TABLES;' '-h some-name-proxysql -uoperator -p'\''test-password2'\''' + local command_id=select-4 + local 'command=SHOW TABLES;' + local 'uri=-h some-name-proxysql -uoperator -p'\''test-password2'\''' + local postfix= + local expected_result=/mnt/jenkins/workspace/cloud-pxc-operator_PR-2330/e2e-tests/users-scheduler/compare/select-4.sql + [[ perconalab/percona-xtradb-cluster-operator:main-pxc8.4 =~ 8\.4 ]] + [[ -f /mnt/jenkins/workspace/cloud-pxc-operator_PR-2330/e2e-tests/users-scheduler/compare/select-4-84.sql ]] + expected_result=/mnt/jenkins/workspace/cloud-pxc-operator_PR-2330/e2e-tests/users-scheduler/compare/select-4-84.sql + run_mysql 'SHOW TABLES;' '-h some-name-proxysql -uoperator -p'\''test-password2'\''' + local 'command=SHOW TABLES;' + local 'uri=-h some-name-proxysql -uoperator -p'\''test-password2'\''' ++ get_client_pod ++ kubectl_bin get pods --selector=name=pxc-client -o 'jsonpath={.items[].metadata.name}' +++ mktemp ++ local LAST_OUT=/tmp/tmp.qxyOIZ6Vft +++ mktemp ++ local LAST_ERR=/tmp/tmp.QrRZKX7Pjf ++ local exit_status=0 +++ seq 0 2 ++ for i in '$(seq 0 2)' ++ set +e ++ kubectl get pods --selector=name=pxc-client -o 'jsonpath={.items[].metadata.name}' ++ exit_status=0 ++ set -e ++ '[' 0 '!=' 0 ']' ++ break ++ cat /tmp/tmp.qxyOIZ6Vft ++ cat /tmp/tmp.QrRZKX7Pjf ++ rm /tmp/tmp.qxyOIZ6Vft /tmp/tmp.QrRZKX7Pjf ++ return 0 + client_pod=pxc-client-6bbff654db-bvlfn + wait_pod pxc-client-6bbff654db-bvlfn + local pod=pxc-client-6bbff654db-bvlfn + local max_retry=480 + local ns= ++ echo pxc-client-6bbff654db-bvlfn ++ /usr/bin/sed -E 's/.*-(pxc|proxysql)-[0-9]/\1/' ++ grep -E '^(pxc|proxysql)$' + local container= + set +o xtrace pod/pxc-client-6bbff654db-bvlfn condition met waiting for pod/pxc-client-6bbff654db-bvlfn to become ReadyDefaulted container "pxc-client" out of: pxc-client, backup .Ok + set +o xtrace + [[ ! -s /tmp/tmp.KUxcwQMinG/select-4.sql ]] ++ grep 'Unknown MySQL server host' /tmp/tmp.KUxcwQMinG/select-4.sql + [[ -n '' ]] + [[ 0 -eq 0 ]] + diff -u /mnt/jenkins/workspace/cloud-pxc-operator_PR-2330/e2e-tests/users-scheduler/compare/select-4-84.sql /tmp/tmp.KUxcwQMinG/select-4.sql ++ getSecretData my-cluster-secrets-2 root ++ local secretName=my-cluster-secrets-2 ++ local dataKey=root ++ kubectl_bin get secrets/my-cluster-secrets-2 '--template={{.data.root}}' ++ base64 --decode +++ mktemp ++ local LAST_OUT=/tmp/tmp.UC26KUrFrI +++ mktemp ++ local LAST_ERR=/tmp/tmp.WsAiLwk6cW ++ local exit_status=0 +++ seq 0 2 ++ for i in '$(seq 0 2)' ++ set +e ++ kubectl get secrets/my-cluster-secrets-2 '--template={{.data.root}}' ++ exit_status=0 ++ set -e ++ '[' 0 '!=' 0 ']' ++ break ++ cat /tmp/tmp.UC26KUrFrI ++ cat /tmp/tmp.WsAiLwk6cW ++ rm /tmp/tmp.UC26KUrFrI /tmp/tmp.WsAiLwk6cW ++ return 0 + newpass='(lqc7iS4AJ)<6KCqdD' + desc 'test new users sync' + set +o xtrace ----------------------------------------------------------------------------------- test new users sync ----------------------------------------------------------------------------------- + run_mysql 'CREATE USER '\''testsync'\''@'\''%'\'' IDENTIFIED BY '\''(lqc7iS4AJ)<6KCqdD'\'';' '-h some-name-pxc -uroot -p'\''(lqc7iS4AJ)<6KCqdD'\''' + local 'command=CREATE USER '\''testsync'\''@'\''%'\'' IDENTIFIED BY '\''(lqc7iS4AJ)<6KCqdD'\'';' + local 'uri=-h some-name-pxc -uroot -p'\''(lqc7iS4AJ)<6KCqdD'\''' ++ get_client_pod ++ kubectl_bin get pods --selector=name=pxc-client -o 'jsonpath={.items[].metadata.name}' +++ mktemp ++ local LAST_OUT=/tmp/tmp.vEIdtdVSpZ +++ mktemp ++ local LAST_ERR=/tmp/tmp.yuAVaRrbYq ++ local exit_status=0 +++ seq 0 2 ++ for i in '$(seq 0 2)' ++ set +e ++ kubectl get pods --selector=name=pxc-client -o 'jsonpath={.items[].metadata.name}' ++ exit_status=0 ++ set -e ++ '[' 0 '!=' 0 ']' ++ break ++ cat /tmp/tmp.vEIdtdVSpZ ++ cat /tmp/tmp.yuAVaRrbYq ++ rm /tmp/tmp.vEIdtdVSpZ /tmp/tmp.yuAVaRrbYq ++ return 0 + client_pod=pxc-client-6bbff654db-bvlfn + wait_pod pxc-client-6bbff654db-bvlfn + local pod=pxc-client-6bbff654db-bvlfn + local max_retry=480 + local ns= ++ echo pxc-client-6bbff654db-bvlfn ++ /usr/bin/sed -E 's/.*-(pxc|proxysql)-[0-9]/\1/' ++ grep -E '^(pxc|proxysql)$' + local container= + set +o xtrace pod/pxc-client-6bbff654db-bvlfn condition met waiting for pod/pxc-client-6bbff654db-bvlfn to become ReadyDefaulted container "pxc-client" out of: pxc-client, backup .Ok + set +o xtrace + sleep 80 + compare_mysql_cmd select-4 'SHOW TABLES;' '-h some-name-proxysql -utestsync -p'\''(lqc7iS4AJ)<6KCqdD'\''' + local command_id=select-4 + local 'command=SHOW TABLES;' + local 'uri=-h some-name-proxysql -utestsync -p'\''(lqc7iS4AJ)<6KCqdD'\''' + local postfix= + local expected_result=/mnt/jenkins/workspace/cloud-pxc-operator_PR-2330/e2e-tests/users-scheduler/compare/select-4.sql + [[ perconalab/percona-xtradb-cluster-operator:main-pxc8.4 =~ 8\.4 ]] + [[ -f /mnt/jenkins/workspace/cloud-pxc-operator_PR-2330/e2e-tests/users-scheduler/compare/select-4-84.sql ]] + expected_result=/mnt/jenkins/workspace/cloud-pxc-operator_PR-2330/e2e-tests/users-scheduler/compare/select-4-84.sql + run_mysql 'SHOW TABLES;' '-h some-name-proxysql -utestsync -p'\''(lqc7iS4AJ)<6KCqdD'\''' + local 'command=SHOW TABLES;' + local 'uri=-h some-name-proxysql -utestsync -p'\''(lqc7iS4AJ)<6KCqdD'\''' ++ get_client_pod ++ kubectl_bin get pods --selector=name=pxc-client -o 'jsonpath={.items[].metadata.name}' +++ mktemp ++ local LAST_OUT=/tmp/tmp.lk1Xg7CQCz +++ mktemp ++ local LAST_ERR=/tmp/tmp.gRKgTqehhz ++ local exit_status=0 +++ seq 0 2 ++ for i in '$(seq 0 2)' ++ set +e ++ kubectl get pods --selector=name=pxc-client -o 'jsonpath={.items[].metadata.name}' ++ exit_status=0 ++ set -e ++ '[' 0 '!=' 0 ']' ++ break ++ cat /tmp/tmp.lk1Xg7CQCz ++ cat /tmp/tmp.gRKgTqehhz ++ rm /tmp/tmp.lk1Xg7CQCz /tmp/tmp.gRKgTqehhz ++ return 0 + client_pod=pxc-client-6bbff654db-bvlfn + wait_pod pxc-client-6bbff654db-bvlfn + local pod=pxc-client-6bbff654db-bvlfn + local max_retry=480 + local ns= ++ echo pxc-client-6bbff654db-bvlfn ++ /usr/bin/sed -E 's/.*-(pxc|proxysql)-[0-9]/\1/' ++ grep -E '^(pxc|proxysql)$' + local container= + set +o xtrace pod/pxc-client-6bbff654db-bvlfn condition met waiting for pod/pxc-client-6bbff654db-bvlfn to become ReadyDefaulted container "pxc-client" out of: pxc-client, backup .Ok + set +o xtrace + [[ ! -s /tmp/tmp.KUxcwQMinG/select-4.sql ]] ++ grep 'Unknown MySQL server host' /tmp/tmp.KUxcwQMinG/select-4.sql + [[ -n '' ]] + [[ 0 -eq 0 ]] + diff -u /mnt/jenkins/workspace/cloud-pxc-operator_PR-2330/e2e-tests/users-scheduler/compare/select-4-84.sql /tmp/tmp.KUxcwQMinG/select-4.sql ++ getSecretData internal-some-name operator ++ local secretName=internal-some-name ++ local dataKey=operator ++ kubectl_bin get secrets/internal-some-name '--template={{.data.operator}}' ++ base64 --decode +++ mktemp ++ local LAST_OUT=/tmp/tmp.RBTW3nGGxT +++ mktemp ++ local LAST_ERR=/tmp/tmp.WgH3d6dupY ++ local exit_status=0 +++ seq 0 2 ++ for i in '$(seq 0 2)' ++ set +e ++ kubectl get secrets/internal-some-name '--template={{.data.operator}}' ++ exit_status=0 ++ set -e ++ '[' 0 '!=' 0 ']' ++ break ++ cat /tmp/tmp.RBTW3nGGxT ++ cat /tmp/tmp.WgH3d6dupY ++ rm /tmp/tmp.RBTW3nGGxT /tmp/tmp.WgH3d6dupY ++ return 0 + pass=test-password2 + desc 'check secret without operator' + set +o xtrace ----------------------------------------------------------------------------------- check secret without operator ----------------------------------------------------------------------------------- + kubectl_bin apply -f /mnt/jenkins/workspace/cloud-pxc-operator_PR-2330/e2e-tests/users-scheduler/conf/secrets.yml ++ mktemp + local LAST_OUT=/tmp/tmp.6Q9ceiSGg6 ++ mktemp + local LAST_ERR=/tmp/tmp.16NHsOjCof + local exit_status=0 ++ seq 0 2 + for i in '$(seq 0 2)' + set +e + kubectl apply -f /mnt/jenkins/workspace/cloud-pxc-operator_PR-2330/e2e-tests/users-scheduler/conf/secrets.yml + exit_status=0 + set -e + '[' 0 '!=' 0 ']' + break + cat /tmp/tmp.6Q9ceiSGg6 secret/my-cluster-secrets-2 configured + cat /tmp/tmp.16NHsOjCof Warning: resource secrets/my-cluster-secrets-2 is missing the kubectl.kubernetes.io/last-applied-configuration annotation which is required by kubectl apply. kubectl apply should only be used on resources created declaratively by either kubectl create --save-config or kubectl apply. The missing annotation will be patched automatically. + rm /tmp/tmp.6Q9ceiSGg6 /tmp/tmp.16NHsOjCof + return 0 + sleep 60 + compare_mysql_cmd select-4 'SHOW TABLES;' '-h some-name-proxysql -uoperator -p'\''test-password2'\''' + local command_id=select-4 + local 'command=SHOW TABLES;' + local 'uri=-h some-name-proxysql -uoperator -p'\''test-password2'\''' + local postfix= + local expected_result=/mnt/jenkins/workspace/cloud-pxc-operator_PR-2330/e2e-tests/users-scheduler/compare/select-4.sql + [[ perconalab/percona-xtradb-cluster-operator:main-pxc8.4 =~ 8\.4 ]] + [[ -f /mnt/jenkins/workspace/cloud-pxc-operator_PR-2330/e2e-tests/users-scheduler/compare/select-4-84.sql ]] + expected_result=/mnt/jenkins/workspace/cloud-pxc-operator_PR-2330/e2e-tests/users-scheduler/compare/select-4-84.sql + run_mysql 'SHOW TABLES;' '-h some-name-proxysql -uoperator -p'\''test-password2'\''' + local 'command=SHOW TABLES;' + local 'uri=-h some-name-proxysql -uoperator -p'\''test-password2'\''' ++ get_client_pod ++ kubectl_bin get pods --selector=name=pxc-client -o 'jsonpath={.items[].metadata.name}' +++ mktemp ++ local LAST_OUT=/tmp/tmp.zgyoP9Rx2o +++ mktemp ++ local LAST_ERR=/tmp/tmp.Is9v5S6ZII ++ local exit_status=0 +++ seq 0 2 ++ for i in '$(seq 0 2)' ++ set +e ++ kubectl get pods --selector=name=pxc-client -o 'jsonpath={.items[].metadata.name}' ++ exit_status=0 ++ set -e ++ '[' 0 '!=' 0 ']' ++ break ++ cat /tmp/tmp.zgyoP9Rx2o ++ cat /tmp/tmp.Is9v5S6ZII ++ rm /tmp/tmp.zgyoP9Rx2o /tmp/tmp.Is9v5S6ZII ++ return 0 + client_pod=pxc-client-6bbff654db-bvlfn + wait_pod pxc-client-6bbff654db-bvlfn + local pod=pxc-client-6bbff654db-bvlfn + local max_retry=480 + local ns= ++ echo pxc-client-6bbff654db-bvlfn ++ /usr/bin/sed -E 's/.*-(pxc|proxysql)-[0-9]/\1/' ++ grep -E '^(pxc|proxysql)$' + local container= + set +o xtrace pod/pxc-client-6bbff654db-bvlfn condition met waiting for pod/pxc-client-6bbff654db-bvlfn to become ReadyDefaulted container "pxc-client" out of: pxc-client, backup .Ok + set +o xtrace + [[ ! -s /tmp/tmp.KUxcwQMinG/select-4.sql ]] ++ grep 'Unknown MySQL server host' /tmp/tmp.KUxcwQMinG/select-4.sql + [[ -n '' ]] + [[ 0 -eq 0 ]] + diff -u /mnt/jenkins/workspace/cloud-pxc-operator_PR-2330/e2e-tests/users-scheduler/compare/select-4-84.sql /tmp/tmp.KUxcwQMinG/select-4.sql + desc 'test enable haproxy and disable proxysql ' + set +o xtrace ----------------------------------------------------------------------------------- test enable haproxy and disable proxysql ----------------------------------------------------------------------------------- + newpass=test-password2 ++ echo -n test-password2 ++ base64 + newpassencrypted=dGVzdC1wYXNzd29yZDI= + kubectl_bin patch pxc some-name --type=merge -p '{"spec":{"haproxy":{"enabled":true},"proxysql":{"enabled":false},"secretsName":"my-cluster-secrets"}}' ++ mktemp + local LAST_OUT=/tmp/tmp.5Nl2b3Sgnh ++ mktemp + local LAST_ERR=/tmp/tmp.o8gDzsFj73 + local exit_status=0 ++ seq 0 2 + for i in '$(seq 0 2)' + set +e + kubectl patch pxc some-name --type=merge -p '{"spec":{"haproxy":{"enabled":true},"proxysql":{"enabled":false},"secretsName":"my-cluster-secrets"}}' + exit_status=0 + set -e + '[' 0 '!=' 0 ']' + break + cat /tmp/tmp.5Nl2b3Sgnh perconaxtradbcluster.pxc.percona.com/some-name patched + cat /tmp/tmp.o8gDzsFj73 + rm /tmp/tmp.5Nl2b3Sgnh /tmp/tmp.o8gDzsFj73 + return 0 + sleep 15 + wait_cluster_consistency some-name 3 3 + local cluster_name=some-name + local cluster_size=3 + local proxy_size=3 + '[' -z 3 ']' + desc 'wait cluster consistency' + set +o xtrace ----------------------------------------------------------------------------------- wait cluster consistency ----------------------------------------------------------------------------------- + local i=0 + local max=300 + sleep 7 + echo -n 'waiting for pxc/some-name to be ready' waiting for pxc/some-name to be ready++ kubectl_bin get pxc some-name -o 'jsonpath={.status.state}' +++ mktemp ++ local LAST_OUT=/tmp/tmp.2GGRstiwyC +++ mktemp ++ local LAST_ERR=/tmp/tmp.xKSU5cm062 ++ local exit_status=0 +++ seq 0 2 ++ for i in '$(seq 0 2)' ++ set +e ++ kubectl get pxc some-name -o 'jsonpath={.status.state}' ++ exit_status=0 ++ set -e ++ '[' 0 '!=' 0 ']' ++ break ++ cat /tmp/tmp.2GGRstiwyC ++ cat /tmp/tmp.xKSU5cm062 ++ rm /tmp/tmp.2GGRstiwyC /tmp/tmp.xKSU5cm062 ++ return 0 + [[ initializing == \r\e\a\d\y ]] + echo -n . .+ sleep 5 + [[ 0 -ge 300 ]] + let i+=1 ++ kubectl_bin get pxc some-name -o 'jsonpath={.status.state}' +++ mktemp ++ local LAST_OUT=/tmp/tmp.anvhmfaSWe +++ mktemp ++ local LAST_ERR=/tmp/tmp.ndpwAlBtgP ++ local exit_status=0 +++ seq 0 2 ++ for i in '$(seq 0 2)' ++ set +e ++ kubectl get pxc some-name -o 'jsonpath={.status.state}' ++ exit_status=0 ++ set -e ++ '[' 0 '!=' 0 ']' ++ break ++ cat /tmp/tmp.anvhmfaSWe ++ cat /tmp/tmp.ndpwAlBtgP ++ rm /tmp/tmp.anvhmfaSWe /tmp/tmp.ndpwAlBtgP ++ return 0 + [[ initializing == \r\e\a\d\y ]] + echo -n . .+ sleep 5 + [[ 1 -ge 300 ]] + let i+=1 ++ kubectl_bin get pxc some-name -o 'jsonpath={.status.state}' +++ mktemp ++ local LAST_OUT=/tmp/tmp.GooHscxJR2 +++ mktemp ++ local LAST_ERR=/tmp/tmp.YQZOBHQ0R9 ++ local exit_status=0 +++ seq 0 2 ++ for i in '$(seq 0 2)' ++ set +e ++ kubectl get pxc some-name -o 'jsonpath={.status.state}' ++ exit_status=0 ++ set -e ++ '[' 0 '!=' 0 ']' ++ break ++ cat /tmp/tmp.GooHscxJR2 ++ cat /tmp/tmp.YQZOBHQ0R9 ++ rm /tmp/tmp.GooHscxJR2 /tmp/tmp.YQZOBHQ0R9 ++ return 0 + [[ initializing == \r\e\a\d\y ]] + echo -n . .+ sleep 5 + [[ 2 -ge 300 ]] + let i+=1 ++ kubectl_bin get pxc some-name -o 'jsonpath={.status.state}' +++ mktemp ++ local LAST_OUT=/tmp/tmp.2rmd7kvGL3 +++ mktemp ++ local LAST_ERR=/tmp/tmp.D0Hfies1zM ++ local exit_status=0 +++ seq 0 2 ++ for i in '$(seq 0 2)' ++ set +e ++ kubectl get pxc some-name -o 'jsonpath={.status.state}' ++ exit_status=0 ++ set -e ++ '[' 0 '!=' 0 ']' ++ break ++ cat /tmp/tmp.2rmd7kvGL3 ++ cat /tmp/tmp.D0Hfies1zM ++ rm /tmp/tmp.2rmd7kvGL3 /tmp/tmp.D0Hfies1zM ++ return 0 + [[ initializing == \r\e\a\d\y ]] + echo -n . .+ sleep 5 + [[ 3 -ge 300 ]] + let i+=1 ++ kubectl_bin get pxc some-name -o 'jsonpath={.status.state}' +++ mktemp ++ local LAST_OUT=/tmp/tmp.gTtI533G1y +++ mktemp ++ local LAST_ERR=/tmp/tmp.OfStaDSP5A ++ local exit_status=0 +++ seq 0 2 ++ for i in '$(seq 0 2)' ++ set +e ++ kubectl get pxc some-name -o 'jsonpath={.status.state}' ++ exit_status=0 ++ set -e ++ '[' 0 '!=' 0 ']' ++ break ++ cat /tmp/tmp.gTtI533G1y ++ cat /tmp/tmp.OfStaDSP5A ++ rm /tmp/tmp.gTtI533G1y /tmp/tmp.OfStaDSP5A ++ return 0 + [[ initializing == \r\e\a\d\y ]] + echo -n . .+ sleep 5 + [[ 4 -ge 300 ]] + let i+=1 ++ kubectl_bin get pxc some-name -o 'jsonpath={.status.state}' +++ mktemp ++ local LAST_OUT=/tmp/tmp.J82sNc3rTv +++ mktemp ++ local LAST_ERR=/tmp/tmp.bbUA7icyO0 ++ local exit_status=0 +++ seq 0 2 ++ for i in '$(seq 0 2)' ++ set +e ++ kubectl get pxc some-name -o 'jsonpath={.status.state}' ++ exit_status=0 ++ set -e ++ '[' 0 '!=' 0 ']' ++ break ++ cat /tmp/tmp.J82sNc3rTv ++ cat /tmp/tmp.bbUA7icyO0 ++ rm /tmp/tmp.J82sNc3rTv /tmp/tmp.bbUA7icyO0 ++ return 0 + [[ initializing == \r\e\a\d\y ]] + echo -n . .+ sleep 5 + [[ 5 -ge 300 ]] + let i+=1 ++ kubectl_bin get pxc some-name -o 'jsonpath={.status.state}' +++ mktemp ++ local LAST_OUT=/tmp/tmp.QLEY9WzKwn +++ mktemp ++ local LAST_ERR=/tmp/tmp.IPU66Z8rln ++ local exit_status=0 +++ seq 0 2 ++ for i in '$(seq 0 2)' ++ set +e ++ kubectl get pxc some-name -o 'jsonpath={.status.state}' ++ exit_status=0 ++ set -e ++ '[' 0 '!=' 0 ']' ++ break ++ cat /tmp/tmp.QLEY9WzKwn ++ cat /tmp/tmp.IPU66Z8rln ++ rm /tmp/tmp.QLEY9WzKwn /tmp/tmp.IPU66Z8rln ++ return 0 + [[ initializing == \r\e\a\d\y ]] + echo -n . .+ sleep 5 + [[ 6 -ge 300 ]] + let i+=1 ++ kubectl_bin get pxc some-name -o 'jsonpath={.status.state}' +++ mktemp ++ local LAST_OUT=/tmp/tmp.wf62RDTRtg +++ mktemp ++ local LAST_ERR=/tmp/tmp.6La1Vz17KX ++ local exit_status=0 +++ seq 0 2 ++ for i in '$(seq 0 2)' ++ set +e ++ kubectl get pxc some-name -o 'jsonpath={.status.state}' ++ exit_status=0 ++ set -e ++ '[' 0 '!=' 0 ']' ++ break ++ cat /tmp/tmp.wf62RDTRtg ++ cat /tmp/tmp.6La1Vz17KX ++ rm /tmp/tmp.wf62RDTRtg /tmp/tmp.6La1Vz17KX ++ return 0 + [[ initializing == \r\e\a\d\y ]] + echo -n . .+ sleep 5 + [[ 7 -ge 300 ]] + let i+=1 ++ kubectl_bin get pxc some-name -o 'jsonpath={.status.state}' +++ mktemp ++ local LAST_OUT=/tmp/tmp.FX9jXBaZxY +++ mktemp ++ local LAST_ERR=/tmp/tmp.TiIQJAVrbl ++ local exit_status=0 +++ seq 0 2 ++ for i in '$(seq 0 2)' ++ set +e ++ kubectl get pxc some-name -o 'jsonpath={.status.state}' ++ exit_status=0 ++ set -e ++ '[' 0 '!=' 0 ']' ++ break ++ cat /tmp/tmp.FX9jXBaZxY ++ cat /tmp/tmp.TiIQJAVrbl ++ rm /tmp/tmp.FX9jXBaZxY /tmp/tmp.TiIQJAVrbl ++ return 0 + [[ initializing == \r\e\a\d\y ]] + echo -n . .+ sleep 5 + [[ 8 -ge 300 ]] + let i+=1 ++ kubectl_bin get pxc some-name -o 'jsonpath={.status.state}' +++ mktemp ++ local LAST_OUT=/tmp/tmp.SS4HyQGSlt +++ mktemp ++ local LAST_ERR=/tmp/tmp.irMJKWHhS5 ++ local exit_status=0 +++ seq 0 2 ++ for i in '$(seq 0 2)' ++ set +e ++ kubectl get pxc some-name -o 'jsonpath={.status.state}' ++ exit_status=0 ++ set -e ++ '[' 0 '!=' 0 ']' ++ break ++ cat /tmp/tmp.SS4HyQGSlt ++ cat /tmp/tmp.irMJKWHhS5 ++ rm /tmp/tmp.SS4HyQGSlt /tmp/tmp.irMJKWHhS5 ++ return 0 + [[ initializing == \r\e\a\d\y ]] + echo -n . .+ sleep 5 + [[ 9 -ge 300 ]] + let i+=1 ++ kubectl_bin get pxc some-name -o 'jsonpath={.status.state}' +++ mktemp ++ local LAST_OUT=/tmp/tmp.v32QHFV2Wk +++ mktemp ++ local LAST_ERR=/tmp/tmp.3qgnl2a1dQ ++ local exit_status=0 +++ seq 0 2 ++ for i in '$(seq 0 2)' ++ set +e ++ kubectl get pxc some-name -o 'jsonpath={.status.state}' ++ exit_status=0 ++ set -e ++ '[' 0 '!=' 0 ']' ++ break ++ cat /tmp/tmp.v32QHFV2Wk ++ cat /tmp/tmp.3qgnl2a1dQ ++ rm /tmp/tmp.v32QHFV2Wk /tmp/tmp.3qgnl2a1dQ ++ return 0 + [[ initializing == \r\e\a\d\y ]] + echo -n . .+ sleep 5 + [[ 10 -ge 300 ]] + let i+=1 ++ kubectl_bin get pxc some-name -o 'jsonpath={.status.state}' +++ mktemp ++ local LAST_OUT=/tmp/tmp.L71sri2vRf +++ mktemp ++ local LAST_ERR=/tmp/tmp.fNe04cMqYd ++ local exit_status=0 +++ seq 0 2 ++ for i in '$(seq 0 2)' ++ set +e ++ kubectl get pxc some-name -o 'jsonpath={.status.state}' ++ exit_status=0 ++ set -e ++ '[' 0 '!=' 0 ']' ++ break ++ cat /tmp/tmp.L71sri2vRf ++ cat /tmp/tmp.fNe04cMqYd ++ rm /tmp/tmp.L71sri2vRf /tmp/tmp.fNe04cMqYd ++ return 0 + [[ ready == \r\e\a\d\y ]] ++ kubectl_bin get pxc some-name -o 'jsonpath={.status.pxc.ready}' +++ mktemp ++ local LAST_OUT=/tmp/tmp.FUVZ7LNow5 +++ mktemp ++ local LAST_ERR=/tmp/tmp.bFZQ6ATBYb ++ local exit_status=0 +++ seq 0 2 ++ for i in '$(seq 0 2)' ++ set +e ++ kubectl get pxc some-name -o 'jsonpath={.status.pxc.ready}' ++ exit_status=0 ++ set -e ++ '[' 0 '!=' 0 ']' ++ break ++ cat /tmp/tmp.FUVZ7LNow5 ++ cat /tmp/tmp.bFZQ6ATBYb ++ rm /tmp/tmp.FUVZ7LNow5 /tmp/tmp.bFZQ6ATBYb ++ return 0 + [[ 3 == \3 ]] +++ get_proxy_engine some-name +++ local cluster_name=some-name ++++ get_proxy some-name ++++ local target_cluster=some-name +++++ kubectl_bin get pxc some-name -o 'jsonpath={.spec.haproxy.enabled}' ++++++ mktemp +++++ local LAST_OUT=/tmp/tmp.4LOqQCwr3q ++++++ mktemp +++++ local LAST_ERR=/tmp/tmp.J9qLbYdUOw +++++ local exit_status=0 ++++++ seq 0 2 +++++ for i in '$(seq 0 2)' +++++ set +e +++++ kubectl get pxc some-name -o 'jsonpath={.spec.haproxy.enabled}' +++++ exit_status=0 +++++ set -e +++++ '[' 0 '!=' 0 ']' +++++ break +++++ cat /tmp/tmp.4LOqQCwr3q +++++ cat /tmp/tmp.J9qLbYdUOw +++++ rm /tmp/tmp.4LOqQCwr3q /tmp/tmp.J9qLbYdUOw +++++ return 0 ++++ [[ true == \t\r\u\e ]] ++++ echo some-name-haproxy ++++ return +++ local cluster_proxy=some-name-haproxy +++ echo haproxy ++ kubectl_bin get pxc some-name -o 'jsonpath={.status.haproxy.ready}' +++ mktemp ++ local LAST_OUT=/tmp/tmp.f7CWt83EIK +++ mktemp ++ local LAST_ERR=/tmp/tmp.DBwfQPN0I2 ++ local exit_status=0 +++ seq 0 2 ++ for i in '$(seq 0 2)' ++ set +e ++ kubectl get pxc some-name -o 'jsonpath={.status.haproxy.ready}' ++ exit_status=0 ++ set -e ++ '[' 0 '!=' 0 ']' ++ break ++ cat /tmp/tmp.f7CWt83EIK ++ cat /tmp/tmp.DBwfQPN0I2 ++ rm /tmp/tmp.f7CWt83EIK /tmp/tmp.DBwfQPN0I2 ++ return 0 + [[ 3 == \3 ]] + echo + [[ perconalab/percona-xtradb-cluster-operator:main-pxc8.4 =~ 5\.7 ]] + check_generation 1 haproxy some-name + local generation=1 + local container=haproxy + local cluster=some-name + local current_generation ++ kubectl_bin get statefulset some-name-haproxy -o 'jsonpath={.metadata.generation}' +++ mktemp ++ local LAST_OUT=/tmp/tmp.wrdWY2Dvvc +++ mktemp ++ local LAST_ERR=/tmp/tmp.8XsLqRbhf2 ++ local exit_status=0 +++ seq 0 2 ++ for i in '$(seq 0 2)' ++ set +e ++ kubectl get statefulset some-name-haproxy -o 'jsonpath={.metadata.generation}' ++ exit_status=0 ++ set -e ++ '[' 0 '!=' 0 ']' ++ break ++ cat /tmp/tmp.wrdWY2Dvvc ++ cat /tmp/tmp.8XsLqRbhf2 ++ rm /tmp/tmp.wrdWY2Dvvc /tmp/tmp.8XsLqRbhf2 ++ return 0 + current_generation=1 + [[ 1 != \1 ]] + patch_secret my-cluster-secrets monitor dGVzdC1wYXNzd29yZDI= + local secret=my-cluster-secrets + local key=monitor + local value=dGVzdC1wYXNzd29yZDI= + kubectl_bin patch secret my-cluster-secrets '-p={"data":{"monitor": "dGVzdC1wYXNzd29yZDI="}}' ++ mktemp + local LAST_OUT=/tmp/tmp.CeASGUb4Af ++ mktemp + local LAST_ERR=/tmp/tmp.8ubkSA1FfL + local exit_status=0 ++ seq 0 2 + for i in '$(seq 0 2)' + set +e + kubectl patch secret my-cluster-secrets '-p={"data":{"monitor": "dGVzdC1wYXNzd29yZDI="}}' + exit_status=0 + set -e + '[' 0 '!=' 0 ']' + break + cat /tmp/tmp.CeASGUb4Af secret/my-cluster-secrets patched + cat /tmp/tmp.8ubkSA1FfL + rm /tmp/tmp.CeASGUb4Af /tmp/tmp.8ubkSA1FfL + return 0 + sleep 15 + wait_cluster_consistency some-name 3 3 + local cluster_name=some-name + local cluster_size=3 + local proxy_size=3 + '[' -z 3 ']' + desc 'wait cluster consistency' + set +o xtrace ----------------------------------------------------------------------------------- wait cluster consistency ----------------------------------------------------------------------------------- + local i=0 + local max=300 + sleep 7 + echo -n 'waiting for pxc/some-name to be ready' waiting for pxc/some-name to be ready++ kubectl_bin get pxc some-name -o 'jsonpath={.status.state}' +++ mktemp ++ local LAST_OUT=/tmp/tmp.pHTeJuLkRs +++ mktemp ++ local LAST_ERR=/tmp/tmp.270K6PRe5L ++ local exit_status=0 +++ seq 0 2 ++ for i in '$(seq 0 2)' ++ set +e ++ kubectl get pxc some-name -o 'jsonpath={.status.state}' ++ exit_status=0 ++ set -e ++ '[' 0 '!=' 0 ']' ++ break ++ cat /tmp/tmp.pHTeJuLkRs ++ cat /tmp/tmp.270K6PRe5L ++ rm /tmp/tmp.pHTeJuLkRs /tmp/tmp.270K6PRe5L ++ return 0 + [[ initializing == \r\e\a\d\y ]] + echo -n . .+ sleep 5 + [[ 0 -ge 300 ]] + let i+=1 ++ kubectl_bin get pxc some-name -o 'jsonpath={.status.state}' +++ mktemp ++ local LAST_OUT=/tmp/tmp.1bFzyuXnoI +++ mktemp ++ local LAST_ERR=/tmp/tmp.l5k4eymT5f ++ local exit_status=0 +++ seq 0 2 ++ for i in '$(seq 0 2)' ++ set +e ++ kubectl get pxc some-name -o 'jsonpath={.status.state}' ++ exit_status=0 ++ set -e ++ '[' 0 '!=' 0 ']' ++ break ++ cat /tmp/tmp.1bFzyuXnoI ++ cat /tmp/tmp.l5k4eymT5f ++ rm /tmp/tmp.1bFzyuXnoI /tmp/tmp.l5k4eymT5f ++ return 0 + [[ initializing == \r\e\a\d\y ]] + echo -n . .+ sleep 5 + [[ 1 -ge 300 ]] + let i+=1 ++ kubectl_bin get pxc some-name -o 'jsonpath={.status.state}' +++ mktemp ++ local LAST_OUT=/tmp/tmp.NV5oOvVae1 +++ mktemp ++ local LAST_ERR=/tmp/tmp.alg7DGVYIV ++ local exit_status=0 +++ seq 0 2 ++ for i in '$(seq 0 2)' ++ set +e ++ kubectl get pxc some-name -o 'jsonpath={.status.state}' ++ exit_status=0 ++ set -e ++ '[' 0 '!=' 0 ']' ++ break ++ cat /tmp/tmp.NV5oOvVae1 ++ cat /tmp/tmp.alg7DGVYIV ++ rm /tmp/tmp.NV5oOvVae1 /tmp/tmp.alg7DGVYIV ++ return 0 + [[ initializing == \r\e\a\d\y ]] + echo -n . .+ sleep 5 + [[ 2 -ge 300 ]] + let i+=1 ++ kubectl_bin get pxc some-name -o 'jsonpath={.status.state}' +++ mktemp ++ local LAST_OUT=/tmp/tmp.QwUhFCKDj2 +++ mktemp ++ local LAST_ERR=/tmp/tmp.QKmfT9WCH6 ++ local exit_status=0 +++ seq 0 2 ++ for i in '$(seq 0 2)' ++ set +e ++ kubectl get pxc some-name -o 'jsonpath={.status.state}' ++ exit_status=0 ++ set -e ++ '[' 0 '!=' 0 ']' ++ break ++ cat /tmp/tmp.QwUhFCKDj2 ++ cat /tmp/tmp.QKmfT9WCH6 ++ rm /tmp/tmp.QwUhFCKDj2 /tmp/tmp.QKmfT9WCH6 ++ return 0 + [[ initializing == \r\e\a\d\y ]] + echo -n . .+ sleep 5 + [[ 3 -ge 300 ]] + let i+=1 ++ kubectl_bin get pxc some-name -o 'jsonpath={.status.state}' +++ mktemp ++ local LAST_OUT=/tmp/tmp.lfjAjkTaNR +++ mktemp ++ local LAST_ERR=/tmp/tmp.t1GFUSWoPN ++ local exit_status=0 +++ seq 0 2 ++ for i in '$(seq 0 2)' ++ set +e ++ kubectl get pxc some-name -o 'jsonpath={.status.state}' ++ exit_status=0 ++ set -e ++ '[' 0 '!=' 0 ']' ++ break ++ cat /tmp/tmp.lfjAjkTaNR ++ cat /tmp/tmp.t1GFUSWoPN ++ rm /tmp/tmp.lfjAjkTaNR /tmp/tmp.t1GFUSWoPN ++ return 0 + [[ initializing == \r\e\a\d\y ]] + echo -n . .+ sleep 5 + [[ 4 -ge 300 ]] + let i+=1 ++ kubectl_bin get pxc some-name -o 'jsonpath={.status.state}' +++ mktemp ++ local LAST_OUT=/tmp/tmp.6m2G7jDtLU +++ mktemp ++ local LAST_ERR=/tmp/tmp.wAdEEqw8pt ++ local exit_status=0 +++ seq 0 2 ++ for i in '$(seq 0 2)' ++ set +e ++ kubectl get pxc some-name -o 'jsonpath={.status.state}' ++ exit_status=0 ++ set -e ++ '[' 0 '!=' 0 ']' ++ break ++ cat /tmp/tmp.6m2G7jDtLU ++ cat /tmp/tmp.wAdEEqw8pt ++ rm /tmp/tmp.6m2G7jDtLU /tmp/tmp.wAdEEqw8pt ++ return 0 + [[ initializing == \r\e\a\d\y ]] + echo -n . .+ sleep 5 + [[ 5 -ge 300 ]] + let i+=1 ++ kubectl_bin get pxc some-name -o 'jsonpath={.status.state}' +++ mktemp ++ local LAST_OUT=/tmp/tmp.piM2GZL7Jm +++ mktemp ++ local LAST_ERR=/tmp/tmp.tJZHHqWIQl ++ local exit_status=0 +++ seq 0 2 ++ for i in '$(seq 0 2)' ++ set +e ++ kubectl get pxc some-name -o 'jsonpath={.status.state}' ++ exit_status=0 ++ set -e ++ '[' 0 '!=' 0 ']' ++ break ++ cat /tmp/tmp.piM2GZL7Jm ++ cat /tmp/tmp.tJZHHqWIQl ++ rm /tmp/tmp.piM2GZL7Jm /tmp/tmp.tJZHHqWIQl ++ return 0 + [[ initializing == \r\e\a\d\y ]] + echo -n . .+ sleep 5 + [[ 6 -ge 300 ]] + let i+=1 ++ kubectl_bin get pxc some-name -o 'jsonpath={.status.state}' +++ mktemp ++ local LAST_OUT=/tmp/tmp.iPhD4QhSHG +++ mktemp ++ local LAST_ERR=/tmp/tmp.0xwn9zTysW ++ local exit_status=0 +++ seq 0 2 ++ for i in '$(seq 0 2)' ++ set +e ++ kubectl get pxc some-name -o 'jsonpath={.status.state}' ++ exit_status=0 ++ set -e ++ '[' 0 '!=' 0 ']' ++ break ++ cat /tmp/tmp.iPhD4QhSHG ++ cat /tmp/tmp.0xwn9zTysW ++ rm /tmp/tmp.iPhD4QhSHG /tmp/tmp.0xwn9zTysW ++ return 0 + [[ initializing == \r\e\a\d\y ]] + echo -n . .+ sleep 5 + [[ 7 -ge 300 ]] + let i+=1 ++ kubectl_bin get pxc some-name -o 'jsonpath={.status.state}' +++ mktemp ++ local LAST_OUT=/tmp/tmp.ttMXFnIYlH +++ mktemp ++ local LAST_ERR=/tmp/tmp.DXTVJB9JDh ++ local exit_status=0 +++ seq 0 2 ++ for i in '$(seq 0 2)' ++ set +e ++ kubectl get pxc some-name -o 'jsonpath={.status.state}' ++ exit_status=0 ++ set -e ++ '[' 0 '!=' 0 ']' ++ break ++ cat /tmp/tmp.ttMXFnIYlH ++ cat /tmp/tmp.DXTVJB9JDh ++ rm /tmp/tmp.ttMXFnIYlH /tmp/tmp.DXTVJB9JDh ++ return 0 + [[ initializing == \r\e\a\d\y ]] + echo -n . .+ sleep 5 + [[ 8 -ge 300 ]] + let i+=1 ++ kubectl_bin get pxc some-name -o 'jsonpath={.status.state}' +++ mktemp ++ local LAST_OUT=/tmp/tmp.fqwJvzRunz +++ mktemp ++ local LAST_ERR=/tmp/tmp.CVPOSTEuTs ++ local exit_status=0 +++ seq 0 2 ++ for i in '$(seq 0 2)' ++ set +e ++ kubectl get pxc some-name -o 'jsonpath={.status.state}' ++ exit_status=0 ++ set -e ++ '[' 0 '!=' 0 ']' ++ break ++ cat /tmp/tmp.fqwJvzRunz ++ cat /tmp/tmp.CVPOSTEuTs ++ rm /tmp/tmp.fqwJvzRunz /tmp/tmp.CVPOSTEuTs ++ return 0 + [[ initializing == \r\e\a\d\y ]] + echo -n . .+ sleep 5 + [[ 9 -ge 300 ]] + let i+=1 ++ kubectl_bin get pxc some-name -o 'jsonpath={.status.state}' +++ mktemp ++ local LAST_OUT=/tmp/tmp.QHWBXLERJx +++ mktemp ++ local LAST_ERR=/tmp/tmp.tnABygiTRZ ++ local exit_status=0 +++ seq 0 2 ++ for i in '$(seq 0 2)' ++ set +e ++ kubectl get pxc some-name -o 'jsonpath={.status.state}' ++ exit_status=0 ++ set -e ++ '[' 0 '!=' 0 ']' ++ break ++ cat /tmp/tmp.QHWBXLERJx ++ cat /tmp/tmp.tnABygiTRZ ++ rm /tmp/tmp.QHWBXLERJx /tmp/tmp.tnABygiTRZ ++ return 0 + [[ initializing == \r\e\a\d\y ]] + echo -n . .+ sleep 5 + [[ 10 -ge 300 ]] + let i+=1 ++ kubectl_bin get pxc some-name -o 'jsonpath={.status.state}' +++ mktemp ++ local LAST_OUT=/tmp/tmp.AJd6cJ4QWI +++ mktemp ++ local LAST_ERR=/tmp/tmp.jFZBBVcjEY ++ local exit_status=0 +++ seq 0 2 ++ for i in '$(seq 0 2)' ++ set +e ++ kubectl get pxc some-name -o 'jsonpath={.status.state}' ++ exit_status=0 ++ set -e ++ '[' 0 '!=' 0 ']' ++ break ++ cat /tmp/tmp.AJd6cJ4QWI ++ cat /tmp/tmp.jFZBBVcjEY ++ rm /tmp/tmp.AJd6cJ4QWI /tmp/tmp.jFZBBVcjEY ++ return 0 + [[ initializing == \r\e\a\d\y ]] + echo -n . .+ sleep 5 + [[ 11 -ge 300 ]] + let i+=1 ++ kubectl_bin get pxc some-name -o 'jsonpath={.status.state}' +++ mktemp ++ local LAST_OUT=/tmp/tmp.Wh5Xo8X1JT +++ mktemp ++ local LAST_ERR=/tmp/tmp.05JMfRpYvQ ++ local exit_status=0 +++ seq 0 2 ++ for i in '$(seq 0 2)' ++ set +e ++ kubectl get pxc some-name -o 'jsonpath={.status.state}' ++ exit_status=0 ++ set -e ++ '[' 0 '!=' 0 ']' ++ break ++ cat /tmp/tmp.Wh5Xo8X1JT ++ cat /tmp/tmp.05JMfRpYvQ ++ rm /tmp/tmp.Wh5Xo8X1JT /tmp/tmp.05JMfRpYvQ ++ return 0 + [[ initializing == \r\e\a\d\y ]] + echo -n . .+ sleep 5 + [[ 12 -ge 300 ]] + let i+=1 ++ kubectl_bin get pxc some-name -o 'jsonpath={.status.state}' +++ mktemp ++ local LAST_OUT=/tmp/tmp.6WTnyxlWHG +++ mktemp ++ local LAST_ERR=/tmp/tmp.DXUfhMdb6i ++ local exit_status=0 +++ seq 0 2 ++ for i in '$(seq 0 2)' ++ set +e ++ kubectl get pxc some-name -o 'jsonpath={.status.state}' ++ exit_status=0 ++ set -e ++ '[' 0 '!=' 0 ']' ++ break ++ cat /tmp/tmp.6WTnyxlWHG ++ cat /tmp/tmp.DXUfhMdb6i ++ rm /tmp/tmp.6WTnyxlWHG /tmp/tmp.DXUfhMdb6i ++ return 0 + [[ initializing == \r\e\a\d\y ]] + echo -n . .+ sleep 5 + [[ 13 -ge 300 ]] + let i+=1 ++ kubectl_bin get pxc some-name -o 'jsonpath={.status.state}' +++ mktemp ++ local LAST_OUT=/tmp/tmp.HGL8zs0wLO +++ mktemp ++ local LAST_ERR=/tmp/tmp.ZVOjvkNHEC ++ local exit_status=0 +++ seq 0 2 ++ for i in '$(seq 0 2)' ++ set +e ++ kubectl get pxc some-name -o 'jsonpath={.status.state}' ++ exit_status=0 ++ set -e ++ '[' 0 '!=' 0 ']' ++ break ++ cat /tmp/tmp.HGL8zs0wLO ++ cat /tmp/tmp.ZVOjvkNHEC ++ rm /tmp/tmp.HGL8zs0wLO /tmp/tmp.ZVOjvkNHEC ++ return 0 + [[ initializing == \r\e\a\d\y ]] + echo -n . .+ sleep 5 + [[ 14 -ge 300 ]] + let i+=1 ++ kubectl_bin get pxc some-name -o 'jsonpath={.status.state}' +++ mktemp ++ local LAST_OUT=/tmp/tmp.pwVDOVrfik +++ mktemp ++ local LAST_ERR=/tmp/tmp.1MocmGedLq ++ local exit_status=0 +++ seq 0 2 ++ for i in '$(seq 0 2)' ++ set +e ++ kubectl get pxc some-name -o 'jsonpath={.status.state}' ++ exit_status=0 ++ set -e ++ '[' 0 '!=' 0 ']' ++ break ++ cat /tmp/tmp.pwVDOVrfik ++ cat /tmp/tmp.1MocmGedLq ++ rm /tmp/tmp.pwVDOVrfik /tmp/tmp.1MocmGedLq ++ return 0 + [[ initializing == \r\e\a\d\y ]] + echo -n . .+ sleep 5 + [[ 15 -ge 300 ]] + let i+=1 ++ kubectl_bin get pxc some-name -o 'jsonpath={.status.state}' +++ mktemp ++ local LAST_OUT=/tmp/tmp.8Xpio1qjAU +++ mktemp ++ local LAST_ERR=/tmp/tmp.ntdqw6hfJN ++ local exit_status=0 +++ seq 0 2 ++ for i in '$(seq 0 2)' ++ set +e ++ kubectl get pxc some-name -o 'jsonpath={.status.state}' ++ exit_status=0 ++ set -e ++ '[' 0 '!=' 0 ']' ++ break ++ cat /tmp/tmp.8Xpio1qjAU ++ cat /tmp/tmp.ntdqw6hfJN ++ rm /tmp/tmp.8Xpio1qjAU /tmp/tmp.ntdqw6hfJN ++ return 0 + [[ initializing == \r\e\a\d\y ]] + echo -n . .+ sleep 5 + [[ 16 -ge 300 ]] + let i+=1 ++ kubectl_bin get pxc some-name -o 'jsonpath={.status.state}' +++ mktemp ++ local LAST_OUT=/tmp/tmp.wpCTSKPO9V +++ mktemp ++ local LAST_ERR=/tmp/tmp.vMFazSHJTR ++ local exit_status=0 +++ seq 0 2 ++ for i in '$(seq 0 2)' ++ set +e ++ kubectl get pxc some-name -o 'jsonpath={.status.state}' ++ exit_status=0 ++ set -e ++ '[' 0 '!=' 0 ']' ++ break ++ cat /tmp/tmp.wpCTSKPO9V ++ cat /tmp/tmp.vMFazSHJTR ++ rm /tmp/tmp.wpCTSKPO9V /tmp/tmp.vMFazSHJTR ++ return 0 + [[ initializing == \r\e\a\d\y ]] + echo -n . .+ sleep 5 + [[ 17 -ge 300 ]] + let i+=1 ++ kubectl_bin get pxc some-name -o 'jsonpath={.status.state}' +++ mktemp ++ local LAST_OUT=/tmp/tmp.MaOtX65SCE +++ mktemp ++ local LAST_ERR=/tmp/tmp.cSroYOcbM2 ++ local exit_status=0 +++ seq 0 2 ++ for i in '$(seq 0 2)' ++ set +e ++ kubectl get pxc some-name -o 'jsonpath={.status.state}' ++ exit_status=0 ++ set -e ++ '[' 0 '!=' 0 ']' ++ break ++ cat /tmp/tmp.MaOtX65SCE ++ cat /tmp/tmp.cSroYOcbM2 ++ rm /tmp/tmp.MaOtX65SCE /tmp/tmp.cSroYOcbM2 ++ return 0 + [[ initializing == \r\e\a\d\y ]] + echo -n . .+ sleep 5 + [[ 18 -ge 300 ]] + let i+=1 ++ kubectl_bin get pxc some-name -o 'jsonpath={.status.state}' +++ mktemp ++ local LAST_OUT=/tmp/tmp.a0tcyzUMEb +++ mktemp ++ local LAST_ERR=/tmp/tmp.ZEszRm33jF ++ local exit_status=0 +++ seq 0 2 ++ for i in '$(seq 0 2)' ++ set +e ++ kubectl get pxc some-name -o 'jsonpath={.status.state}' ++ exit_status=0 ++ set -e ++ '[' 0 '!=' 0 ']' ++ break ++ cat /tmp/tmp.a0tcyzUMEb ++ cat /tmp/tmp.ZEszRm33jF ++ rm /tmp/tmp.a0tcyzUMEb /tmp/tmp.ZEszRm33jF ++ return 0 + [[ initializing == \r\e\a\d\y ]] + echo -n . .+ sleep 5 + [[ 19 -ge 300 ]] + let i+=1 ++ kubectl_bin get pxc some-name -o 'jsonpath={.status.state}' +++ mktemp ++ local LAST_OUT=/tmp/tmp.UVNZBGI23L +++ mktemp ++ local LAST_ERR=/tmp/tmp.oatKf70EQS ++ local exit_status=0 +++ seq 0 2 ++ for i in '$(seq 0 2)' ++ set +e ++ kubectl get pxc some-name -o 'jsonpath={.status.state}' ++ exit_status=0 ++ set -e ++ '[' 0 '!=' 0 ']' ++ break ++ cat /tmp/tmp.UVNZBGI23L ++ cat /tmp/tmp.oatKf70EQS ++ rm /tmp/tmp.UVNZBGI23L /tmp/tmp.oatKf70EQS ++ return 0 + [[ initializing == \r\e\a\d\y ]] + echo -n . .+ sleep 5 + [[ 20 -ge 300 ]] + let i+=1 ++ kubectl_bin get pxc some-name -o 'jsonpath={.status.state}' +++ mktemp ++ local LAST_OUT=/tmp/tmp.f4tfpTYLwS +++ mktemp ++ local LAST_ERR=/tmp/tmp.qiCjMcckVo ++ local exit_status=0 +++ seq 0 2 ++ for i in '$(seq 0 2)' ++ set +e ++ kubectl get pxc some-name -o 'jsonpath={.status.state}' ++ exit_status=0 ++ set -e ++ '[' 0 '!=' 0 ']' ++ break ++ cat /tmp/tmp.f4tfpTYLwS ++ cat /tmp/tmp.qiCjMcckVo ++ rm /tmp/tmp.f4tfpTYLwS /tmp/tmp.qiCjMcckVo ++ return 0 + [[ initializing == \r\e\a\d\y ]] + echo -n . .+ sleep 5 + [[ 21 -ge 300 ]] + let i+=1 ++ kubectl_bin get pxc some-name -o 'jsonpath={.status.state}' +++ mktemp ++ local LAST_OUT=/tmp/tmp.DKyUuHUyYS +++ mktemp ++ local LAST_ERR=/tmp/tmp.oeJw0x1PPc ++ local exit_status=0 +++ seq 0 2 ++ for i in '$(seq 0 2)' ++ set +e ++ kubectl get pxc some-name -o 'jsonpath={.status.state}' ++ exit_status=0 ++ set -e ++ '[' 0 '!=' 0 ']' ++ break ++ cat /tmp/tmp.DKyUuHUyYS ++ cat /tmp/tmp.oeJw0x1PPc ++ rm /tmp/tmp.DKyUuHUyYS /tmp/tmp.oeJw0x1PPc ++ return 0 + [[ initializing == \r\e\a\d\y ]] + echo -n . .+ sleep 5 + [[ 22 -ge 300 ]] + let i+=1 ++ kubectl_bin get pxc some-name -o 'jsonpath={.status.state}' +++ mktemp ++ local LAST_OUT=/tmp/tmp.uwuRbe898r +++ mktemp ++ local LAST_ERR=/tmp/tmp.ityp5ePDeB ++ local exit_status=0 +++ seq 0 2 ++ for i in '$(seq 0 2)' ++ set +e ++ kubectl get pxc some-name -o 'jsonpath={.status.state}' ++ exit_status=0 ++ set -e ++ '[' 0 '!=' 0 ']' ++ break ++ cat /tmp/tmp.uwuRbe898r ++ cat /tmp/tmp.ityp5ePDeB ++ rm /tmp/tmp.uwuRbe898r /tmp/tmp.ityp5ePDeB ++ return 0 + [[ ready == \r\e\a\d\y ]] ++ kubectl_bin get pxc some-name -o 'jsonpath={.status.pxc.ready}' +++ mktemp ++ local LAST_OUT=/tmp/tmp.D9GEHAEntT +++ mktemp ++ local LAST_ERR=/tmp/tmp.lMfyWLTYLk ++ local exit_status=0 +++ seq 0 2 ++ for i in '$(seq 0 2)' ++ set +e ++ kubectl get pxc some-name -o 'jsonpath={.status.pxc.ready}' ++ exit_status=0 ++ set -e ++ '[' 0 '!=' 0 ']' ++ break ++ cat /tmp/tmp.D9GEHAEntT ++ cat /tmp/tmp.lMfyWLTYLk ++ rm /tmp/tmp.D9GEHAEntT /tmp/tmp.lMfyWLTYLk ++ return 0 + [[ 3 == \3 ]] +++ get_proxy_engine some-name +++ local cluster_name=some-name ++++ get_proxy some-name ++++ local target_cluster=some-name +++++ kubectl_bin get pxc some-name -o 'jsonpath={.spec.haproxy.enabled}' ++++++ mktemp +++++ local LAST_OUT=/tmp/tmp.JoJwu7i5eV ++++++ mktemp +++++ local LAST_ERR=/tmp/tmp.hvC7v1TIzp +++++ local exit_status=0 ++++++ seq 0 2 +++++ for i in '$(seq 0 2)' +++++ set +e +++++ kubectl get pxc some-name -o 'jsonpath={.spec.haproxy.enabled}' +++++ exit_status=0 +++++ set -e +++++ '[' 0 '!=' 0 ']' +++++ break +++++ cat /tmp/tmp.JoJwu7i5eV +++++ cat /tmp/tmp.hvC7v1TIzp +++++ rm /tmp/tmp.JoJwu7i5eV /tmp/tmp.hvC7v1TIzp +++++ return 0 ++++ [[ true == \t\r\u\e ]] ++++ echo some-name-haproxy ++++ return +++ local cluster_proxy=some-name-haproxy +++ echo haproxy ++ kubectl_bin get pxc some-name -o 'jsonpath={.status.haproxy.ready}' +++ mktemp ++ local LAST_OUT=/tmp/tmp.TK11l1JiUE +++ mktemp ++ local LAST_ERR=/tmp/tmp.HGFY9UdK8s ++ local exit_status=0 +++ seq 0 2 ++ for i in '$(seq 0 2)' ++ set +e ++ kubectl get pxc some-name -o 'jsonpath={.status.haproxy.ready}' ++ exit_status=0 ++ set -e ++ '[' 0 '!=' 0 ']' ++ break ++ cat /tmp/tmp.TK11l1JiUE ++ cat /tmp/tmp.HGFY9UdK8s ++ rm /tmp/tmp.TK11l1JiUE /tmp/tmp.HGFY9UdK8s ++ return 0 + [[ 3 == \3 ]] + echo + sleep 15 + compare_mysql_cmd select-3 'SHOW DATABASES;' '-h some-name-haproxy -umonitor -p'\''test-password2'\''' + local command_id=select-3 + local 'command=SHOW DATABASES;' + local 'uri=-h some-name-haproxy -umonitor -p'\''test-password2'\''' + local postfix= + local expected_result=/mnt/jenkins/workspace/cloud-pxc-operator_PR-2330/e2e-tests/users-scheduler/compare/select-3.sql + [[ perconalab/percona-xtradb-cluster-operator:main-pxc8.4 =~ 8\.4 ]] + [[ -f /mnt/jenkins/workspace/cloud-pxc-operator_PR-2330/e2e-tests/users-scheduler/compare/select-3-84.sql ]] + [[ perconalab/percona-xtradb-cluster-operator:main-pxc8.4 =~ 8\.0 ]] + [[ perconalab/percona-xtradb-cluster-operator:main-pxc8.4 =~ 5\.7 ]] + run_mysql 'SHOW DATABASES;' '-h some-name-haproxy -umonitor -p'\''test-password2'\''' + local 'command=SHOW DATABASES;' + local 'uri=-h some-name-haproxy -umonitor -p'\''test-password2'\''' ++ get_client_pod ++ kubectl_bin get pods --selector=name=pxc-client -o 'jsonpath={.items[].metadata.name}' +++ mktemp ++ local LAST_OUT=/tmp/tmp.ERyyKnvGCE +++ mktemp ++ local LAST_ERR=/tmp/tmp.ZWHfACrYNx ++ local exit_status=0 +++ seq 0 2 ++ for i in '$(seq 0 2)' ++ set +e ++ kubectl get pods --selector=name=pxc-client -o 'jsonpath={.items[].metadata.name}' ++ exit_status=0 ++ set -e ++ '[' 0 '!=' 0 ']' ++ break ++ cat /tmp/tmp.ERyyKnvGCE ++ cat /tmp/tmp.ZWHfACrYNx ++ rm /tmp/tmp.ERyyKnvGCE /tmp/tmp.ZWHfACrYNx ++ return 0 + client_pod=pxc-client-6bbff654db-bvlfn + wait_pod pxc-client-6bbff654db-bvlfn + local pod=pxc-client-6bbff654db-bvlfn + local max_retry=480 + local ns= ++ echo pxc-client-6bbff654db-bvlfn ++ /usr/bin/sed -E 's/.*-(pxc|proxysql)-[0-9]/\1/' ++ grep -E '^(pxc|proxysql)$' + local container= + set +o xtrace pod/pxc-client-6bbff654db-bvlfn condition met waiting for pod/pxc-client-6bbff654db-bvlfn to become ReadyDefaulted container "pxc-client" out of: pxc-client, backup .Ok + set +o xtrace + [[ ! -s /tmp/tmp.KUxcwQMinG/select-3.sql ]] ++ grep 'Unknown MySQL server host' /tmp/tmp.KUxcwQMinG/select-3.sql + [[ -n '' ]] + [[ 0 -eq 0 ]] + diff -u /mnt/jenkins/workspace/cloud-pxc-operator_PR-2330/e2e-tests/users-scheduler/compare/select-3.sql /tmp/tmp.KUxcwQMinG/select-3.sql + [[ perconalab/percona-xtradb-cluster-operator:main-pxc8.4 =~ 5\.7 ]] + check_generation 1 haproxy some-name + local generation=1 + local container=haproxy + local cluster=some-name + local current_generation ++ kubectl_bin get statefulset some-name-haproxy -o 'jsonpath={.metadata.generation}' +++ mktemp ++ local LAST_OUT=/tmp/tmp.mQpJZEjP2I +++ mktemp ++ local LAST_ERR=/tmp/tmp.knDiVjQc3B ++ local exit_status=0 +++ seq 0 2 ++ for i in '$(seq 0 2)' ++ set +e ++ kubectl get statefulset some-name-haproxy -o 'jsonpath={.metadata.generation}' ++ exit_status=0 ++ set -e ++ '[' 0 '!=' 0 ']' ++ break ++ cat /tmp/tmp.mQpJZEjP2I ++ cat /tmp/tmp.knDiVjQc3B ++ rm /tmp/tmp.mQpJZEjP2I /tmp/tmp.knDiVjQc3B ++ return 0 + current_generation=1 + [[ 1 != \1 ]] + destroy users-scheduler-16507 + local namespace=users-scheduler-16507 + local ignore_logs=true + [[ 0 == 1 ]] + desc 'destroy cluster/operator and all other resources' + set +o xtrace ----------------------------------------------------------------------------------- destroy cluster/operator and all other resources ----------------------------------------------------------------------------------- + '[' true == false -o 1 == 1 ']' ++ get_operator_pod + grep -v 'the object has been modified' ++ local label_prefix=app.kubernetes.io/ + grep -v 'get backup status: Job.batch' + sort -u + tee /tmp/tmp.KUxcwQMinG/operator.log + /usr/bin/sed -r 's/"ts":[0-9.]+//; s^limits-[0-9.]+/^^g' + grep -v level=info +++ grep -c percona-xtradb-cluster-operator +++ kubectl get pods --selector=app.kubernetes.io/name=percona-xtradb-cluster-operator -n pxc-operator ++ local check_label=1 ++ [[ 1 -eq 0 ]] ++ kubectl_bin get pods --selector=app.kubernetes.io/name=percona-xtradb-cluster-operator --field-selector=status.phase=Running -o json -n pxc-operator ++ jq -r '.items[] | select(.metadata.deletionTimestamp == null) | .metadata.name' ++ head -1 +++ mktemp ++ local LAST_OUT=/tmp/tmp.GRJBnHuR5w +++ mktemp ++ local LAST_ERR=/tmp/tmp.DHiCnNAoPP ++ local exit_status=0 +++ seq 0 2 ++ for i in '$(seq 0 2)' ++ set +e ++ kubectl get pods --selector=app.kubernetes.io/name=percona-xtradb-cluster-operator --field-selector=status.phase=Running -o json -n pxc-operator ++ exit_status=0 ++ set -e ++ '[' 0 '!=' 0 ']' ++ break ++ cat /tmp/tmp.GRJBnHuR5w ++ cat /tmp/tmp.DHiCnNAoPP ++ rm /tmp/tmp.GRJBnHuR5w /tmp/tmp.DHiCnNAoPP ++ return 0 + kubectl_bin logs -n pxc-operator percona-xtradb-cluster-operator-65b6f89d5d-8skh7 ++ mktemp + local LAST_OUT=/tmp/tmp.BV4ENHteDT ++ mktemp + local LAST_ERR=/tmp/tmp.VSw4eqEY0V + local exit_status=0 ++ seq 0 2 + for i in '$(seq 0 2)' + set +e + kubectl logs -n pxc-operator percona-xtradb-cluster-operator-65b6f89d5d-8skh7 + exit_status=0 + set -e + '[' 0 '!=' 0 ']' + break + cat /tmp/tmp.BV4ENHteDT + cat /tmp/tmp.VSw4eqEY0V + rm /tmp/tmp.BV4ENHteDT /tmp/tmp.VSw4eqEY0V + return 0 } }, }, { }, }, { }, }, ""), }, { }, - }, - { - }, + }, + "05b97784fcfd3030a92bc9ee83fc4d73c12e29f1eaaf63d27fdb407ed25", ... // 16 identical fields ... // 16 identical fields 2025-12-29T15:57:57.654Z INFO setup Feature gates {"PXCO_FEATURE_GATES": "", "enabled": ""} 2025-12-29T15:57:57.654Z INFO setup Manager starting up {"gitCommit": "8293f0719903b3ac6d61887112eb49702bd931a3", "gitBranch": "PR-2330-8293f071", "buildTime": "2025-12-29T12:59:03Z", "goVersion": "go1.25.5", "os": "linux", "arch": "amd64"} 2025-12-29T15:57:57.654Z INFO setup Runs on {"platform": "kubernetes", "version": "v1.31.14-gke.1156000"} 2025-12-29T15:57:57.657Z INFO setup Registering Components. 2025-12-29T15:57:58.038Z INFO controller-runtime.webhook Registering webhook {"path": "/validate-percona-xtradbcluster"} 2025-12-29T15:57:58.039Z INFO controller-runtime.certwatcher Updated current TLS certificate {"cert": "/tmp/k8s-webhook-server/serving-certs/tls.crt", "key": "/tmp/k8s-webhook-server/serving-certs/tls.key"} 2025-12-29T15:57:58.039Z INFO controller-runtime.metrics Serving metrics server {"bindAddress": ":8080", "secure": false} 2025-12-29T15:57:58.039Z INFO controller-runtime.metrics Starting metrics server 2025-12-29T15:57:58.039Z INFO controller-runtime.webhook Serving webhook server {"host": "", "port": 9443} 2025-12-29T15:57:58.039Z INFO controller-runtime.webhook Starting webhook server 2025-12-29T15:57:58.039Z INFO setup Starting the Cmd. 2025-12-29T15:57:58.039Z INFO starting server {"name": "health probe", "addr": "[::]:8081"} 2025-12-29T15:57:58.040Z INFO controller-runtime.certwatcher Starting certificate poll+watcher {"cert": "/tmp/k8s-webhook-server/serving-certs/tls.crt", "key": "/tmp/k8s-webhook-server/serving-certs/tls.key", "interval": "10s"} 2025-12-29T15:57:58.140Z INFO Attempting to acquire leader lease... {"lock": "pxc-operator/08db1feb.percona.com"} 2025-12-29T15:57:58.178Z DEBUG events percona-xtradb-cluster-operator-65b6f89d5d-8skh7_61062646-263f-464b-999c-a112b19ee213 became leader {"type": "Normal", "object": {"kind":"Lease","namespace":"pxc-operator","name":"08db1feb.percona.com","uid":"b0e104b4-7ed3-4fbc-a0df-d6883a4be79d","apiVersion":"coordination.k8s.io/v1","resourceVersion":"1767023878172367009"}, "reason": "LeaderElection"} 2025-12-29T15:57:58.178Z INFO Starting EventSource {"controller": "pxcbackup-controller", "controllerGroup": "pxc.percona.com", "controllerKind": "PerconaXtraDBClusterBackup", "source": "kind source: *v1.PerconaXtraDBClusterBackup"} 2025-12-29T15:57:58.178Z INFO Starting EventSource {"controller": "pxc-controller", "controllerGroup": "pxc.percona.com", "controllerKind": "PerconaXtraDBCluster", "source": "kind source: *v1.PerconaXtraDBCluster"} 2025-12-29T15:57:58.178Z INFO Starting EventSource {"controller": "pxc-controller", "controllerGroup": "pxc.percona.com", "controllerKind": "PerconaXtraDBCluster", "source": "kind source: *v1.Secret"} 2025-12-29T15:57:58.178Z INFO Starting EventSource {"controller": "pxcrestore-controller", "controllerGroup": "pxc.percona.com", "controllerKind": "PerconaXtraDBClusterRestore", "source": "kind source: *v1.PerconaXtraDBClusterRestore"} 2025-12-29T15:57:58.178Z INFO Successfully acquired lease {"lock": "pxc-operator/08db1feb.percona.com"} 2025-12-29T15:57:58.279Z INFO Starting Controller {"controller": "pxcbackup-controller", "controllerGroup": "pxc.percona.com", "controllerKind": "PerconaXtraDBClusterBackup"} 2025-12-29T15:57:58.279Z INFO Starting Controller {"controller": "pxc-controller", "controllerGroup": "pxc.percona.com", "controllerKind": "PerconaXtraDBCluster"} 2025-12-29T15:57:58.279Z INFO Starting Controller {"controller": "pxcrestore-controller", "controllerGroup": "pxc.percona.com", "controllerKind": "PerconaXtraDBClusterRestore"} 2025-12-29T15:57:58.279Z INFO Starting workers {"controller": "pxcbackup-controller", "controllerGroup": "pxc.percona.com", "controllerKind": "PerconaXtraDBClusterBackup", "worker count": 1} 2025-12-29T15:57:58.279Z INFO Starting workers {"controller": "pxc-controller", "controllerGroup": "pxc.percona.com", "controllerKind": "PerconaXtraDBCluster", "worker count": 1} 2025-12-29T15:57:58.279Z INFO Starting workers {"controller": "pxcrestore-controller", "controllerGroup": "pxc.percona.com", "controllerKind": "PerconaXtraDBClusterRestore", "worker count": 1} 2025-12-29T15:58:35.711Z INFO Set CR version {"controller": "pxc-controller", "controllerGroup": "pxc.percona.com", "controllerKind": "PerconaXtraDBCluster", "PerconaXtraDBCluster": {"name":"some-name","namespace":"users-scheduler-16507"}, "namespace": "users-scheduler-16507", "name": "some-name", "reconcileID": "3bf26f87-1e38-496e-9135-761417965fed", "version": "1.19.0"} 2025-12-29T15:58:35.823Z INFO User secrets updated {"controller": "pxc-controller", "controllerGroup": "pxc.percona.com", "controllerKind": "PerconaXtraDBCluster", "PerconaXtraDBCluster": {"name":"some-name","namespace":"users-scheduler-16507"}, "namespace": "users-scheduler-16507", "name": "some-name", "reconcileID": "3bf26f87-1e38-496e-9135-761417965fed", "secrets": "my-cluster-secrets"} 2025-12-29T15:58:36.045Z DEBUG Creating object {"controller": "pxc-controller", "controllerGroup": "pxc.percona.com", "controllerKind": "PerconaXtraDBCluster", "PerconaXtraDBCluster": {"name":"some-name","namespace":"users-scheduler-16507"}, "namespace": "users-scheduler-16507", "name": "some-name", "reconcileID": "3bf26f87-1e38-496e-9135-761417965fed", "object": "auto-some-name-pxc", "kind": "&TypeMeta{Kind:ConfigMap,APIVersion:v1,}"} 2025-12-29T15:58:36.166Z DEBUG Creating object {"controller": "pxc-controller", "controllerGroup": "pxc.percona.com", "controllerKind": "PerconaXtraDBCluster", "PerconaXtraDBCluster": {"name":"some-name","namespace":"users-scheduler-16507"}, "namespace": "users-scheduler-16507", "name": "some-name", "reconcileID": "3bf26f87-1e38-496e-9135-761417965fed", "object": "some-name-proxysql", "kind": "&TypeMeta{Kind:StatefulSet,APIVersion:apps/v1,}"} 2025-12-29T15:58:36.198Z DEBUG Creating object {"controller": "pxc-controller", "controllerGroup": "pxc.percona.com", "controllerKind": "PerconaXtraDBCluster", "PerconaXtraDBCluster": {"name":"some-name","namespace":"users-scheduler-16507"}, "namespace": "users-scheduler-16507", "name": "some-name", "reconcileID": "3bf26f87-1e38-496e-9135-761417965fed", "object": "some-name-pxc", "kind": "&TypeMeta{Kind:StatefulSet,APIVersion:apps/v1,}"} 2025-12-29T15:58:36.226Z DEBUG Creating object {"controller": "pxc-controller", "controllerGroup": "pxc.percona.com", "controllerKind": "PerconaXtraDBCluster", "PerconaXtraDBCluster": {"name":"some-name","namespace":"users-scheduler-16507"}, "namespace": "users-scheduler-16507", "name": "some-name", "reconcileID": "3bf26f87-1e38-496e-9135-761417965fed", "object": "some-name-pxc", "kind": "&TypeMeta{Kind:Service,APIVersion:v1,}"} 2025-12-29T15:58:36.265Z DEBUG Creating object {"controller": "pxc-controller", "controllerGroup": "pxc.percona.com", "controllerKind": "PerconaXtraDBCluster", "PerconaXtraDBCluster": {"name":"some-name","namespace":"users-scheduler-16507"}, "namespace": "users-scheduler-16507", "name": "some-name", "reconcileID": "3bf26f87-1e38-496e-9135-761417965fed", "object": "some-name-pxc-unready", "kind": "&TypeMeta{Kind:Service,APIVersion:v1,}"} 2025-12-29T15:58:36.299Z DEBUG Creating object {"controller": "pxc-controller", "controllerGroup": "pxc.percona.com", "controllerKind": "PerconaXtraDBCluster", "PerconaXtraDBCluster": {"name":"some-name","namespace":"users-scheduler-16507"}, "namespace": "users-scheduler-16507", "name": "some-name", "reconcileID": "3bf26f87-1e38-496e-9135-761417965fed", "object": "some-name-proxysql", "kind": "&TypeMeta{Kind:Service,APIVersion:v1,}"} 2025-12-29T15:58:36.392Z DEBUG Creating object {"controller": "pxc-controller", "controllerGroup": "pxc.percona.com", "controllerKind": "PerconaXtraDBCluster", "PerconaXtraDBCluster": {"name":"some-name","namespace":"users-scheduler-16507"}, "namespace": "users-scheduler-16507", "name": "some-name", "reconcileID": "3bf26f87-1e38-496e-9135-761417965fed", "object": "some-name-proxysql-unready", "kind": "&TypeMeta{Kind:Service,APIVersion:v1,}"} 2025-12-29T15:58:37.126Z DEBUG Creating object {"controller": "pxc-controller", "controllerGroup": "pxc.percona.com", "controllerKind": "PerconaXtraDBCluster", "PerconaXtraDBCluster": {"name":"some-name","namespace":"users-scheduler-16507"}, "namespace": "users-scheduler-16507", "name": "some-name", "reconcileID": "e838ee7c-0b94-4317-abb2-409dfb3cb815", "object": "some-name-pxc", "kind": "&TypeMeta{Kind:PodDisruptionBudget,APIVersion:policy/v1,}"} 2025-12-29T15:58:37.150Z DEBUG Creating object {"controller": "pxc-controller", "controllerGroup": "pxc.percona.com", "controllerKind": "PerconaXtraDBCluster", "PerconaXtraDBCluster": {"name":"some-name","namespace":"users-scheduler-16507"}, "namespace": "users-scheduler-16507", "name": "some-name", "reconcileID": "e838ee7c-0b94-4317-abb2-409dfb3cb815", "object": "some-name-proxysql", "kind": "&TypeMeta{Kind:PodDisruptionBudget,APIVersion:policy/v1,}"} 2025-12-29T15:59:47.698Z INFO Password expiration policy updated {"controller": "pxc-controller", "controllerGroup": "pxc.percona.com", "controllerKind": "PerconaXtraDBCluster", "PerconaXtraDBCluster": {"name":"some-name","namespace":"users-scheduler-16507"}, "namespace": "users-scheduler-16507", "name": "some-name", "reconcileID": "cfe631b8-511f-46c8-9414-e598464536c9", "user": "operator"} 2025-12-29T15:59:47.724Z INFO Password expiration policy updated {"controller": "pxc-controller", "controllerGroup": "pxc.percona.com", "controllerKind": "PerconaXtraDBCluster", "PerconaXtraDBCluster": {"name":"some-name","namespace":"users-scheduler-16507"}, "namespace": "users-scheduler-16507", "name": "some-name", "reconcileID": "cfe631b8-511f-46c8-9414-e598464536c9", "user": "monitor"} 2025-12-29T15:59:47.759Z INFO User monitor: granted privileges {"controller": "pxc-controller", "controllerGroup": "pxc.percona.com", "controllerKind": "PerconaXtraDBCluster", "PerconaXtraDBCluster": {"name":"some-name","namespace":"users-scheduler-16507"}, "namespace": "users-scheduler-16507", "name": "some-name", "reconcileID": "cfe631b8-511f-46c8-9414-e598464536c9"} 2025-12-29T15:59:47.791Z INFO monitor user privileges granted {"controller": "pxc-controller", "controllerGroup": "pxc.percona.com", "controllerKind": "PerconaXtraDBCluster", "PerconaXtraDBCluster": {"name":"some-name","namespace":"users-scheduler-16507"}, "namespace": "users-scheduler-16507", "name": "some-name", "reconcileID": "cfe631b8-511f-46c8-9414-e598464536c9"} 2025-12-29T15:59:47.826Z INFO Password expiration policy updated {"controller": "pxc-controller", "controllerGroup": "pxc.percona.com", "controllerKind": "PerconaXtraDBCluster", "PerconaXtraDBCluster": {"name":"some-name","namespace":"users-scheduler-16507"}, "namespace": "users-scheduler-16507", "name": "some-name", "reconcileID": "cfe631b8-511f-46c8-9414-e598464536c9", "user": "xtrabackup"} 2025-12-29T15:59:47.854Z INFO User xtrabackup: granted privileges {"controller": "pxc-controller", "controllerGroup": "pxc.percona.com", "controllerKind": "PerconaXtraDBCluster", "PerconaXtraDBCluster": {"name":"some-name","namespace":"users-scheduler-16507"}, "namespace": "users-scheduler-16507", "name": "some-name", "reconcileID": "cfe631b8-511f-46c8-9414-e598464536c9"} 2025-12-29T15:59:47.876Z INFO Password expiration policy updated {"controller": "pxc-controller", "controllerGroup": "pxc.percona.com", "controllerKind": "PerconaXtraDBCluster", "PerconaXtraDBCluster": {"name":"some-name","namespace":"users-scheduler-16507"}, "namespace": "users-scheduler-16507", "name": "some-name", "reconcileID": "cfe631b8-511f-46c8-9414-e598464536c9", "user": "replication"} 2025-12-29T15:59:47.885Z INFO reconcile replication error {"controller": "pxc-controller", "controllerGroup": "pxc.percona.com", "controllerKind": "PerconaXtraDBCluster", "PerconaXtraDBCluster": {"name":"some-name","namespace":"users-scheduler-16507"}, "namespace": "users-scheduler-16507", "name": "some-name", "reconcileID": "cfe631b8-511f-46c8-9414-e598464536c9", "err": "get primary pxc pod: not found"} 2025-12-29T15:59:52.665Z INFO reconcile replication error {"controller": "pxc-controller", "controllerGroup": "pxc.percona.com", "controllerKind": "PerconaXtraDBCluster", "PerconaXtraDBCluster": {"name":"some-name","namespace":"users-scheduler-16507"}, "namespace": "users-scheduler-16507", "name": "some-name", "reconcileID": "d5fbe5dc-2e61-421e-9651-0889c6eca2a4", "err": "get primary pxc pod: not found"} 2025-12-29T15:59:57.747Z INFO reconcile replication error {"controller": "pxc-controller", "controllerGroup": "pxc.percona.com", "controllerKind": "PerconaXtraDBCluster", "PerconaXtraDBCluster": {"name":"some-name","namespace":"users-scheduler-16507"}, "namespace": "users-scheduler-16507", "name": "some-name", "reconcileID": "b0032954-de17-4ed4-98e4-abbe96d233ea", "err": "get primary pxc pod: not found"} 2025-12-29T16:02:17.183Z INFO Password expiration policy updated {"controller": "pxc-controller", "controllerGroup": "pxc.percona.com", "controllerKind": "PerconaXtraDBCluster", "PerconaXtraDBCluster": {"name":"some-name","namespace":"users-scheduler-16507"}, "namespace": "users-scheduler-16507", "name": "some-name", "reconcileID": "93760445-00f1-42d6-a6aa-c527a359619a", "user": "root"} 2025-12-29T16:02:17.259Z INFO update PXC version (fetched from db) {"controller": "pxc-controller", "controllerGroup": "pxc.percona.com", "controllerKind": "PerconaXtraDBCluster", "PerconaXtraDBCluster": {"name":"some-name","namespace":"users-scheduler-16507"}, "namespace": "users-scheduler-16507", "name": "some-name", "reconcileID": "93760445-00f1-42d6-a6aa-c527a359619a", "new version": "8.4.6-6.1"} 2025-12-29T16:02:19.654Z DEBUG PXC users synced with ProxySQL {"controller": "pxc-controller", "controllerGroup": "pxc.percona.com", "controllerKind": "PerconaXtraDBCluster", "PerconaXtraDBCluster": {"name":"some-name","namespace":"users-scheduler-16507"}, "namespace": "users-scheduler-16507", "name": "some-name", "reconcileID": "93760445-00f1-42d6-a6aa-c527a359619a"} 2025-12-29T16:02:24.528Z DEBUG PXC users synced with ProxySQL {"controller": "pxc-controller", "controllerGroup": "pxc.percona.com", "controllerKind": "PerconaXtraDBCluster", "PerconaXtraDBCluster": {"name":"some-name","namespace":"users-scheduler-16507"}, "namespace": "users-scheduler-16507", "name": "some-name", "reconcileID": "07930d94-3491-4014-95e8-65b9de512208"} 2025-12-29T16:02:29.752Z DEBUG PXC users synced with ProxySQL {"controller": "pxc-controller", "controllerGroup": "pxc.percona.com", "controllerKind": "PerconaXtraDBCluster", "PerconaXtraDBCluster": {"name":"some-name","namespace":"users-scheduler-16507"}, "namespace": "users-scheduler-16507", "name": "some-name", "reconcileID": "1f9f4d83-f92e-490e-8a29-60ac6440c9c9"} 2025-12-29T16:02:34.929Z DEBUG PXC users synced with ProxySQL {"controller": "pxc-controller", "controllerGroup": "pxc.percona.com", "controllerKind": "PerconaXtraDBCluster", "PerconaXtraDBCluster": {"name":"some-name","namespace":"users-scheduler-16507"}, "namespace": "users-scheduler-16507", "name": "some-name", "reconcileID": "060f3347-4d3c-48a0-9105-e70b7e99a5a3"} 2025-12-29T16:02:40.132Z DEBUG PXC users synced with ProxySQL {"controller": "pxc-controller", "controllerGroup": "pxc.percona.com", "controllerKind": "PerconaXtraDBCluster", "PerconaXtraDBCluster": {"name":"some-name","namespace":"users-scheduler-16507"}, "namespace": "users-scheduler-16507", "name": "some-name", "reconcileID": "b9679561-9c60-4078-a1ec-51ba3464db8e"} 2025-12-29T16:02:45.523Z DEBUG PXC users synced with ProxySQL {"controller": "pxc-controller", "controllerGroup": "pxc.percona.com", "controllerKind": "PerconaXtraDBCluster", "PerconaXtraDBCluster": {"name":"some-name","namespace":"users-scheduler-16507"}, "namespace": "users-scheduler-16507", "name": "some-name", "reconcileID": "16be9aa6-eea8-482c-8b70-502eed096200"} 2025-12-29T16:02:50.741Z DEBUG PXC users synced with ProxySQL {"controller": "pxc-controller", "controllerGroup": "pxc.percona.com", "controllerKind": "PerconaXtraDBCluster", "PerconaXtraDBCluster": {"name":"some-name","namespace":"users-scheduler-16507"}, "namespace": "users-scheduler-16507", "name": "some-name", "reconcileID": "03ea4b3e-eafb-44d3-9853-1bec5dec88fa"} 2025-12-29T16:02:55.943Z DEBUG PXC users synced with ProxySQL {"controller": "pxc-controller", "controllerGroup": "pxc.percona.com", "controllerKind": "PerconaXtraDBCluster", "PerconaXtraDBCluster": {"name":"some-name","namespace":"users-scheduler-16507"}, "namespace": "users-scheduler-16507", "name": "some-name", "reconcileID": "1862d8a5-4d40-4ca0-a10c-cfedda6d5fd0"} 2025-12-29T16:03:01.116Z DEBUG PXC users synced with ProxySQL {"controller": "pxc-controller", "controllerGroup": "pxc.percona.com", "controllerKind": "PerconaXtraDBCluster", "PerconaXtraDBCluster": {"name":"some-name","namespace":"users-scheduler-16507"}, "namespace": "users-scheduler-16507", "name": "some-name", "reconcileID": "4527f67c-3c55-47b7-84c3-08dbd9075503"} 2025-12-29T16:03:06.441Z DEBUG PXC users synced with ProxySQL {"controller": "pxc-controller", "controllerGroup": "pxc.percona.com", "controllerKind": "PerconaXtraDBCluster", "PerconaXtraDBCluster": {"name":"some-name","namespace":"users-scheduler-16507"}, "namespace": "users-scheduler-16507", "name": "some-name", "reconcileID": "db0e977a-0066-426c-9c6b-e8935c3db555"} 2025-12-29T16:03:11.501Z DEBUG PXC users synced with ProxySQL {"controller": "pxc-controller", "controllerGroup": "pxc.percona.com", "controllerKind": "PerconaXtraDBCluster", "PerconaXtraDBCluster": {"name":"some-name","namespace":"users-scheduler-16507"}, "namespace": "users-scheduler-16507", "name": "some-name", "reconcileID": "e45f60e2-5aeb-41bd-9cdd-42109ec48439"} 2025-12-29T16:03:17.055Z DEBUG PXC users synced with ProxySQL {"controller": "pxc-controller", "controllerGroup": "pxc.percona.com", "controllerKind": "PerconaXtraDBCluster", "PerconaXtraDBCluster": {"name":"some-name","namespace":"users-scheduler-16507"}, "namespace": "users-scheduler-16507", "name": "some-name", "reconcileID": "49c4d995-2405-4dd3-9dd4-68f4648198eb"} 2025-12-29T16:03:21.898Z DEBUG PXC users synced with ProxySQL {"controller": "pxc-controller", "controllerGroup": "pxc.percona.com", "controllerKind": "PerconaXtraDBCluster", "PerconaXtraDBCluster": {"name":"some-name","namespace":"users-scheduler-16507"}, "namespace": "users-scheduler-16507", "name": "some-name", "reconcileID": "869ce7f0-7800-4391-a431-be35eee741a7"} 2025-12-29T16:03:27.305Z DEBUG PXC users synced with ProxySQL {"controller": "pxc-controller", "controllerGroup": "pxc.percona.com", "controllerKind": "PerconaXtraDBCluster", "PerconaXtraDBCluster": {"name":"some-name","namespace":"users-scheduler-16507"}, "namespace": "users-scheduler-16507", "name": "some-name", "reconcileID": "d3e571e2-e49b-4283-a65b-64e3ca33a8e3"} 2025-12-29T16:03:32.446Z DEBUG PXC users synced with ProxySQL {"controller": "pxc-controller", "controllerGroup": "pxc.percona.com", "controllerKind": "PerconaXtraDBCluster", "PerconaXtraDBCluster": {"name":"some-name","namespace":"users-scheduler-16507"}, "namespace": "users-scheduler-16507", "name": "some-name", "reconcileID": "b5ccb200-4bc5-4718-83f6-54f5238dbc5c"} 2025-12-29T16:03:37.624Z DEBUG PXC users synced with ProxySQL {"controller": "pxc-controller", "controllerGroup": "pxc.percona.com", "controllerKind": "PerconaXtraDBCluster", "PerconaXtraDBCluster": {"name":"some-name","namespace":"users-scheduler-16507"}, "namespace": "users-scheduler-16507", "name": "some-name", "reconcileID": "7dc23763-7677-4fbe-915b-111fc7e63be8"} 2025-12-29T16:03:42.922Z DEBUG PXC users synced with ProxySQL {"controller": "pxc-controller", "controllerGroup": "pxc.percona.com", "controllerKind": "PerconaXtraDBCluster", "PerconaXtraDBCluster": {"name":"some-name","namespace":"users-scheduler-16507"}, "namespace": "users-scheduler-16507", "name": "some-name", "reconcileID": "4de4a6db-643c-4f4d-9d99-a3c7b511255c"} 2025-12-29T16:03:48.454Z DEBUG PXC users synced with ProxySQL {"controller": "pxc-controller", "controllerGroup": "pxc.percona.com", "controllerKind": "PerconaXtraDBCluster", "PerconaXtraDBCluster": {"name":"some-name","namespace":"users-scheduler-16507"}, "namespace": "users-scheduler-16507", "name": "some-name", "reconcileID": "e5b52d9c-7470-4a6d-bb9c-67680df9f9db"} 2025-12-29T16:03:53.798Z DEBUG PXC users synced with ProxySQL {"controller": "pxc-controller", "controllerGroup": "pxc.percona.com", "controllerKind": "PerconaXtraDBCluster", "PerconaXtraDBCluster": {"name":"some-name","namespace":"users-scheduler-16507"}, "namespace": "users-scheduler-16507", "name": "some-name", "reconcileID": "0c24a13b-cf4f-4775-89c2-f249967185a9"} 2025-12-29T16:03:56.921Z INFO Password changed, updating user {"controller": "pxc-controller", "controllerGroup": "pxc.percona.com", "controllerKind": "PerconaXtraDBCluster", "PerconaXtraDBCluster": {"name":"some-name","namespace":"users-scheduler-16507"}, "namespace": "users-scheduler-16507", "name": "some-name", "reconcileID": "e41e8ea9-cc37-468d-a46b-87a773f143c0", "user": "root"} 2025-12-29T16:03:56.939Z INFO Password updated {"controller": "pxc-controller", "controllerGroup": "pxc.percona.com", "controllerKind": "PerconaXtraDBCluster", "PerconaXtraDBCluster": {"name":"some-name","namespace":"users-scheduler-16507"}, "namespace": "users-scheduler-16507", "name": "some-name", "reconcileID": "e41e8ea9-cc37-468d-a46b-87a773f143c0", "user": "root"} 2025-12-29T16:03:56.959Z INFO MySQL init secret created {"controller": "pxc-controller", "controllerGroup": "pxc.percona.com", "controllerKind": "PerconaXtraDBCluster", "PerconaXtraDBCluster": {"name":"some-name","namespace":"users-scheduler-16507"}, "namespace": "users-scheduler-16507", "name": "some-name", "reconcileID": "e41e8ea9-cc37-468d-a46b-87a773f143c0", "secret": "some-name-mysql-init", "user": "root"} 2025-12-29T16:04:02.234Z DEBUG PXC users synced with ProxySQL {"controller": "pxc-controller", "controllerGroup": "pxc.percona.com", "controllerKind": "PerconaXtraDBCluster", "PerconaXtraDBCluster": {"name":"some-name","namespace":"users-scheduler-16507"}, "namespace": "users-scheduler-16507", "name": "some-name", "reconcileID": "e41e8ea9-cc37-468d-a46b-87a773f143c0"} 2025-12-29T16:04:02.258Z INFO Internal secrets updated {"controller": "pxc-controller", "controllerGroup": "pxc.percona.com", "controllerKind": "PerconaXtraDBCluster", "PerconaXtraDBCluster": {"name":"some-name","namespace":"users-scheduler-16507"}, "namespace": "users-scheduler-16507", "name": "some-name", "reconcileID": "e41e8ea9-cc37-468d-a46b-87a773f143c0", "user": "root"} 2025-12-29T16:04:02.272Z INFO Old password discarded {"controller": "pxc-controller", "controllerGroup": "pxc.percona.com", "controllerKind": "PerconaXtraDBCluster", "PerconaXtraDBCluster": {"name":"some-name","namespace":"users-scheduler-16507"}, "namespace": "users-scheduler-16507", "name": "some-name", "reconcileID": "e41e8ea9-cc37-468d-a46b-87a773f143c0", "user": "root"} 2025-12-29T16:04:02.298Z DEBUG PXC users synced with ProxySQL {"controller": "pxc-controller", "controllerGroup": "pxc.percona.com", "controllerKind": "PerconaXtraDBCluster", "PerconaXtraDBCluster": {"name":"some-name","namespace":"users-scheduler-16507"}, "namespace": "users-scheduler-16507", "name": "some-name", "reconcileID": "0775b154-4a39-4a1b-8ea3-b3fba3920648"} 2025-12-29T16:04:04.833Z DEBUG PXC users synced with ProxySQL {"controller": "pxc-controller", "controllerGroup": "pxc.percona.com", "controllerKind": "PerconaXtraDBCluster", "PerconaXtraDBCluster": {"name":"some-name","namespace":"users-scheduler-16507"}, "namespace": "users-scheduler-16507", "name": "some-name", "reconcileID": "5d81994e-2c45-4f8c-b1ee-b69c554b42e0"} 2025-12-29T16:04:09.823Z DEBUG PXC users synced with ProxySQL {"controller": "pxc-controller", "controllerGroup": "pxc.percona.com", "controllerKind": "PerconaXtraDBCluster", "PerconaXtraDBCluster": {"name":"some-name","namespace":"users-scheduler-16507"}, "namespace": "users-scheduler-16507", "name": "some-name", "reconcileID": "63cd7f11-746c-4856-8a94-b1a51db75745"} 2025-12-29T16:04:14.933Z DEBUG PXC users synced with ProxySQL {"controller": "pxc-controller", "controllerGroup": "pxc.percona.com", "controllerKind": "PerconaXtraDBCluster", "PerconaXtraDBCluster": {"name":"some-name","namespace":"users-scheduler-16507"}, "namespace": "users-scheduler-16507", "name": "some-name", "reconcileID": "aabf60d6-ae6b-46f8-ac45-38a74c952419"} 2025-12-29T16:04:20.334Z DEBUG PXC users synced with ProxySQL {"controller": "pxc-controller", "controllerGroup": "pxc.percona.com", "controllerKind": "PerconaXtraDBCluster", "PerconaXtraDBCluster": {"name":"some-name","namespace":"users-scheduler-16507"}, "namespace": "users-scheduler-16507", "name": "some-name", "reconcileID": "4fd86486-5954-46c9-8af0-05636a72810b"} 2025-12-29T16:04:25.496Z DEBUG PXC users synced with ProxySQL {"controller": "pxc-controller", "controllerGroup": "pxc.percona.com", "controllerKind": "PerconaXtraDBCluster", "PerconaXtraDBCluster": {"name":"some-name","namespace":"users-scheduler-16507"}, "namespace": "users-scheduler-16507", "name": "some-name", "reconcileID": "8e8246f5-bf57-4c4b-8860-74cd6ddb2bce"} 2025-12-29T16:04:30.793Z DEBUG PXC users synced with ProxySQL {"controller": "pxc-controller", "controllerGroup": "pxc.percona.com", "controllerKind": "PerconaXtraDBCluster", "PerconaXtraDBCluster": {"name":"some-name","namespace":"users-scheduler-16507"}, "namespace": "users-scheduler-16507", "name": "some-name", "reconcileID": "4312fb3d-9104-4b63-93b7-e920aedb85a9"} 2025-12-29T16:04:35.951Z DEBUG PXC users synced with ProxySQL {"controller": "pxc-controller", "controllerGroup": "pxc.percona.com", "controllerKind": "PerconaXtraDBCluster", "PerconaXtraDBCluster": {"name":"some-name","namespace":"users-scheduler-16507"}, "namespace": "users-scheduler-16507", "name": "some-name", "reconcileID": "b4d96f36-ca7e-4768-82ab-68d9658a2f89"} 2025-12-29T16:04:41.028Z DEBUG PXC users synced with ProxySQL {"controller": "pxc-controller", "controllerGroup": "pxc.percona.com", "controllerKind": "PerconaXtraDBCluster", "PerconaXtraDBCluster": {"name":"some-name","namespace":"users-scheduler-16507"}, "namespace": "users-scheduler-16507", "name": "some-name", "reconcileID": "5dc2ebf9-d45d-450c-b752-abb9bb5a5a44"} 2025-12-29T16:04:43.419Z DEBUG Updating object {"controller": "pxc-controller", "controllerGroup": "pxc.percona.com", "controllerKind": "PerconaXtraDBCluster", "PerconaXtraDBCluster": {"name":"some-name","namespace":"users-scheduler-16507"}, "namespace": "users-scheduler-16507", "name": "some-name", "reconcileID": "4fa515c7-35a5-432d-bed8-08384a2e6ff1", "object": "some-name-proxysql", "kind": "&TypeMeta{Kind:StatefulSet,APIVersion:apps/v1,}", "hashChanged": true, "metaChanged": true} 2025-12-29T16:04:43.459Z DEBUG Updating object {"controller": "pxc-controller", "controllerGroup": "pxc.percona.com", "controllerKind": "PerconaXtraDBCluster", "PerconaXtraDBCluster": {"name":"some-name","namespace":"users-scheduler-16507"}, "namespace": "users-scheduler-16507", "name": "some-name", "reconcileID": "4fa515c7-35a5-432d-bed8-08384a2e6ff1", "object": "some-name-proxysql", "kind": "&TypeMeta{Kind:StatefulSet,APIVersion:apps/v1,}", "hashChanged": true, "metaChanged": true} 2025-12-29T16:04:47.057Z ERROR sync users {"controller": "pxc-controller", "controllerGroup": "pxc.percona.com", "controllerKind": "PerconaXtraDBCluster", "PerconaXtraDBCluster": {"name":"some-name","namespace":"users-scheduler-16507"}, "namespace": "users-scheduler-16507", "name": "some-name", "reconcileID": "4fa515c7-35a5-432d-bed8-08384a2e6ff1", "error": "exec syncusers: failed to execute command in pod: pod some-name-proxysql-2 does not have a host assigned / / ", "errorVerbose": "exec syncusers: failed to execute command in pod: pod some-name-proxysql-2 does not have a host assigned / / \ngithub.com/percona/percona-xtradb-cluster-operator/pkg/controller/pxc.(*ReconcilePerconaXtraDBCluster).syncPXCUsersWithProxySQL\n\t/go/src/github.com/percona/percona-xtradb-cluster-operator/pkg/controller/pxc/users.go:975\ngithub.com/percona/percona-xtradb-cluster-operator/pkg/controller/pxc.(*ReconcilePerconaXtraDBCluster).resyncPXCUsersWithProxySQL.func1\n\t/go/src/github.com/percona/percona-xtradb-cluster-operator/pkg/controller/pxc/controller.go:855\nruntime.goexit\n\t/usr/local/go/src/runtime/asm_amd64.s:1693"} 2025-12-29T16:05:06.848Z ERROR sync users {"controller": "pxc-controller", "controllerGroup": "pxc.percona.com", "controllerKind": "PerconaXtraDBCluster", "PerconaXtraDBCluster": {"name":"some-name","namespace":"users-scheduler-16507"}, "namespace": "users-scheduler-16507", "name": "some-name", "reconcileID": "4fcf941f-a4c4-4ec7-96b1-fc30020e1dee", "error": "exec syncusers: failed to execute command in pod: command terminated with exit code 1 / / ERROR (line:515) : The cluster (with writer hostgroup:11) has not been configured in ProxySQL\nERROR (line:515) : The cluster (with writer hostgroup:11) has not been configured in ProxySQL\nERROR (line:515) : The cluster (with writer hostgroup:11) has not been configured in ProxySQL\nERROR (line:515) : The cluster (with writer hostgroup:11) has not been configured in ProxySQL\nERROR (line:515) : The cluster (with writer hostgroup:11) has not been configured in ProxySQL\n", "errorVerbose": "exec syncusers: failed to execute command in pod: command terminated with exit code 1 / / ERROR (line:515) : The cluster (with writer hostgroup:11) has not been configured in ProxySQL\nERROR (line:515) : The cluster (with writer hostgroup:11) has not been configured in ProxySQL\nERROR (line:515) : The cluster (with writer hostgroup:11) has not been configured in ProxySQL\nERROR (line:515) : The cluster (with writer hostgroup:11) has not been configured in ProxySQL\nERROR (line:515) : The cluster (with writer hostgroup:11) has not been configured in ProxySQL\n\ngithub.com/percona/percona-xtradb-cluster-operator/pkg/controller/pxc.(*ReconcilePerconaXtraDBCluster).syncPXCUsersWithProxySQL\n\t/go/src/github.com/percona/percona-xtradb-cluster-operator/pkg/controller/pxc/users.go:975\ngithub.com/percona/percona-xtradb-cluster-operator/pkg/controller/pxc.(*ReconcilePerconaXtraDBCluster).resyncPXCUsersWithProxySQL.func1\n\t/go/src/github.com/percona/percona-xtradb-cluster-operator/pkg/controller/pxc/controller.go:855\nruntime.goexit\n\t/usr/local/go/src/runtime/asm_amd64.s:1693"} 2025-12-29T16:05:09.095Z INFO Password changed, updating user {"controller": "pxc-controller", "controllerGroup": "pxc.percona.com", "controllerKind": "PerconaXtraDBCluster", "PerconaXtraDBCluster": {"name":"some-name","namespace":"users-scheduler-16507"}, "namespace": "users-scheduler-16507", "name": "some-name", "reconcileID": "bff38615-1053-4cdf-b855-2b21094c5cbd", "user": "proxyadmin"} 2025-12-29T16:05:09.095Z INFO Password expiration policy updated {"controller": "pxc-controller", "controllerGroup": "pxc.percona.com", "controllerKind": "PerconaXtraDBCluster", "PerconaXtraDBCluster": {"name":"some-name","namespace":"users-scheduler-16507"}, "namespace": "users-scheduler-16507", "name": "some-name", "reconcileID": "bff38615-1053-4cdf-b855-2b21094c5cbd", "user": "proxyadmin"} 2025-12-29T16:05:09.126Z INFO Proxy user updated {"controller": "pxc-controller", "controllerGroup": "pxc.percona.com", "controllerKind": "PerconaXtraDBCluster", "PerconaXtraDBCluster": {"name":"some-name","namespace":"users-scheduler-16507"}, "namespace": "users-scheduler-16507", "name": "some-name", "reconcileID": "bff38615-1053-4cdf-b855-2b21094c5cbd", "user": "proxyadmin"} 2025-12-29T16:05:09.145Z INFO Internal secrets updated {"controller": "pxc-controller", "controllerGroup": "pxc.percona.com", "controllerKind": "PerconaXtraDBCluster", "PerconaXtraDBCluster": {"name":"some-name","namespace":"users-scheduler-16507"}, "namespace": "users-scheduler-16507", "name": "some-name", "reconcileID": "bff38615-1053-4cdf-b855-2b21094c5cbd", "user": "proxyadmin"} 2025-12-29T16:05:09.145Z INFO Proxy pods will be restarted {"controller": "pxc-controller", "controllerGroup": "pxc.percona.com", "controllerKind": "PerconaXtraDBCluster", "PerconaXtraDBCluster": {"name":"some-name","namespace":"users-scheduler-16507"}, "namespace": "users-scheduler-16507", "name": "some-name", "reconcileID": "bff38615-1053-4cdf-b855-2b21094c5cbd", "last-applied-secret": "e30569abb03afe7b343dba72199d7c580b3e8d6cc191f0955de1ecd8b3bfe531"} 2025-12-29T16:05:09.149Z DEBUG Updating object {"controller": "pxc-controller", "controllerGroup": "pxc.percona.com", "controllerKind": "PerconaXtraDBCluster", "PerconaXtraDBCluster": {"name":"some-name","namespace":"users-scheduler-16507"}, "namespace": "users-scheduler-16507", "name": "some-name", "reconcileID": "bff38615-1053-4cdf-b855-2b21094c5cbd", "object": "some-name-proxysql", "kind": "&TypeMeta{Kind:StatefulSet,APIVersion:apps/v1,}", "hashChanged": true, "metaChanged": true} 2025-12-29T16:05:12.419Z ERROR sync users {"controller": "pxc-controller", "controllerGroup": "pxc.percona.com", "controllerKind": "PerconaXtraDBCluster", "PerconaXtraDBCluster": {"name":"some-name","namespace":"users-scheduler-16507"}, "namespace": "users-scheduler-16507", "name": "some-name", "reconcileID": "bff38615-1053-4cdf-b855-2b21094c5cbd", "error": "exec syncusers: failed to execute command in pod: command terminated with exit code 1 / / ERROR 1045 (28000): ProxySQL Error: Access denied for user 'proxyadmin'@'10.177.64.56' (using password: YES)\nERROR (line:586) : ProxySQL connection check failed. \n-- Could not connect to ProxySQL at some-name-proxysql-0.some-name-proxysql-unready.users-scheduler-16507.svc.cluster.local:6032 \n-- Please check the ProxySQL connection parameters and status.\nERROR 1045 (28000): ProxySQL Error: Access denied for user 'proxyadmin'@'10.177.64.56' (using password: YES)\nERROR (line:586) : ProxySQL connection check failed. \n-- Could not connect to ProxySQL at some-name-proxysql-0.some-name-proxysql-unready.users-scheduler-16507.svc.cluster.local:6032 \n-- Please check the ProxySQL connection parameters and status.\nERROR 1045 (28000): ProxySQL Error: Access denied for user 'proxyadmin'@'10.177.64.56' (using password: YES)\nERROR (line:586) : ProxySQL connection check failed. \n-- Could not connect to ProxySQL at some-name-proxysql-0.some-name-proxysql-unready.users-scheduler-16507.svc.cluster.local:6032 \n-- Please check the ProxySQL connection parameters and status.\nERROR 1045 (28000): ProxySQL Error: Access denied for user 'proxyadmin'@'10.177.64.56' (using password: YES)\nERROR (line:586) : ProxySQL connection check failed. \n-- Could not connect to ProxySQL at some-name-proxysql-0.some-name-proxysql-unready.users-scheduler-16507.svc.cluster.local:6032 \n-- Please check the ProxySQL connection parameters and status.\nERROR 1045 (28000): ProxySQL Error: Access denied for user 'proxyadmin'@'10.177.64.56' (using password: YES)\nERROR (line:586) : ProxySQL connection check failed. \n-- Could not connect to ProxySQL at some-name-proxysql-0.some-name-proxysql-unready.users-scheduler-16507.svc.cluster.local:6032 \n-- Please check the ProxySQL connection parameters and status.\n", "errorVerbose": "exec syncusers: failed to execute command in pod: command terminated with exit code 1 / / ERROR 1045 (28000): ProxySQL Error: Access denied for user 'proxyadmin'@'10.177.64.56' (using password: YES)\nERROR (line:586) : ProxySQL connection check failed. \n-- Could not connect to ProxySQL at some-name-proxysql-0.some-name-proxysql-unready.users-scheduler-16507.svc.cluster.local:6032 \n-- Please check the ProxySQL connection parameters and status.\nERROR 1045 (28000): ProxySQL Error: Access denied for user 'proxyadmin'@'10.177.64.56' (using password: YES)\nERROR (line:586) : ProxySQL connection check failed. \n-- Could not connect to ProxySQL at some-name-proxysql-0.some-name-proxysql-unready.users-scheduler-16507.svc.cluster.local:6032 \n-- Please check the ProxySQL connection parameters and status.\nERROR 1045 (28000): ProxySQL Error: Access denied for user 'proxyadmin'@'10.177.64.56' (using password: YES)\nERROR (line:586) : ProxySQL connection check failed. \n-- Could not connect to ProxySQL at some-name-proxysql-0.some-name-proxysql-unready.users-scheduler-16507.svc.cluster.local:6032 \n-- Please check the ProxySQL connection parameters and status.\nERROR 1045 (28000): ProxySQL Error: Access denied for user 'proxyadmin'@'10.177.64.56' (using password: YES)\nERROR (line:586) : ProxySQL connection check failed. \n-- Could not connect to ProxySQL at some-name-proxysql-0.some-name-proxysql-unready.users-scheduler-16507.svc.cluster.local:6032 \n-- Please check the ProxySQL connection parameters and status.\nERROR 1045 (28000): ProxySQL Error: Access denied for user 'proxyadmin'@'10.177.64.56' (using password: YES)\nERROR (line:586) : ProxySQL connection check failed. \n-- Could not connect to ProxySQL at some-name-proxysql-0.some-name-proxysql-unready.users-scheduler-16507.svc.cluster.local:6032 \n-- Please check the ProxySQL connection parameters and status.\n\ngithub.com/percona/percona-xtradb-cluster-operator/pkg/controller/pxc.(*ReconcilePerconaXtraDBCluster).syncPXCUsersWithProxySQL\n\t/go/src/github.com/percona/percona-xtradb-cluster-operator/pkg/controller/pxc/users.go:975\ngithub.com/percona/percona-xtradb-cluster-operator/pkg/controller/pxc.(*ReconcilePerconaXtraDBCluster).resyncPXCUsersWithProxySQL.func1\n\t/go/src/github.com/percona/percona-xtradb-cluster-operator/pkg/controller/pxc/controller.go:855\nruntime.goexit\n\t/usr/local/go/src/runtime/asm_amd64.s:1693"} 2025-12-29T16:05:47.206Z DEBUG PXC users synced with ProxySQL {"controller": "pxc-controller", "controllerGroup": "pxc.percona.com", "controllerKind": "PerconaXtraDBCluster", "PerconaXtraDBCluster": {"name":"some-name","namespace":"users-scheduler-16507"}, "namespace": "users-scheduler-16507", "name": "some-name", "reconcileID": "78917e6f-c26e-47cd-b06d-eccd00fb8046"} 2025-12-29T16:05:51.998Z DEBUG PXC users synced with ProxySQL {"controller": "pxc-controller", "controllerGroup": "pxc.percona.com", "controllerKind": "PerconaXtraDBCluster", "PerconaXtraDBCluster": {"name":"some-name","namespace":"users-scheduler-16507"}, "namespace": "users-scheduler-16507", "name": "some-name", "reconcileID": "065d1321-65da-42c7-be5b-a89d6c4d4d18"} 2025-12-29T16:05:57.471Z DEBUG PXC users synced with ProxySQL {"controller": "pxc-controller", "controllerGroup": "pxc.percona.com", "controllerKind": "PerconaXtraDBCluster", "PerconaXtraDBCluster": {"name":"some-name","namespace":"users-scheduler-16507"}, "namespace": "users-scheduler-16507", "name": "some-name", "reconcileID": "5518e514-4ac9-44c4-bcd6-abcd97ec37d3"} 2025-12-29T16:06:02.696Z DEBUG PXC users synced with ProxySQL {"controller": "pxc-controller", "controllerGroup": "pxc.percona.com", "controllerKind": "PerconaXtraDBCluster", "PerconaXtraDBCluster": {"name":"some-name","namespace":"users-scheduler-16507"}, "namespace": "users-scheduler-16507", "name": "some-name", "reconcileID": "162a08be-1cf4-4857-88f4-f0af0d6b2b8d"} 2025-12-29T16:06:07.951Z DEBUG PXC users synced with ProxySQL {"controller": "pxc-controller", "controllerGroup": "pxc.percona.com", "controllerKind": "PerconaXtraDBCluster", "PerconaXtraDBCluster": {"name":"some-name","namespace":"users-scheduler-16507"}, "namespace": "users-scheduler-16507", "name": "some-name", "reconcileID": "34e45a4d-71f9-45e5-b631-7983b2a924d4"} 2025-12-29T16:06:08.747Z DEBUG Updating object {"controller": "pxc-controller", "controllerGroup": "pxc.percona.com", "controllerKind": "PerconaXtraDBCluster", "PerconaXtraDBCluster": {"name":"some-name","namespace":"users-scheduler-16507"}, "namespace": "users-scheduler-16507", "name": "some-name", "reconcileID": "9abc0bf6-5765-4d9e-9635-b2f0642066cc", "object": "some-name-proxysql", "kind": "&TypeMeta{Kind:StatefulSet,APIVersion:apps/v1,}", "hashChanged": true, "metaChanged": true} 2025-12-29T16:06:08.786Z DEBUG Updating object {"controller": "pxc-controller", "controllerGroup": "pxc.percona.com", "controllerKind": "PerconaXtraDBCluster", "PerconaXtraDBCluster": {"name":"some-name","namespace":"users-scheduler-16507"}, "namespace": "users-scheduler-16507", "name": "some-name", "reconcileID": "9abc0bf6-5765-4d9e-9635-b2f0642066cc", "object": "some-name-proxysql", "kind": "&TypeMeta{Kind:StatefulSet,APIVersion:apps/v1,}", "hashChanged": true, "metaChanged": true} 2025-12-29T16:06:10.308Z INFO Password changed, updating user {"controller": "pxc-controller", "controllerGroup": "pxc.percona.com", "controllerKind": "PerconaXtraDBCluster", "PerconaXtraDBCluster": {"name":"some-name","namespace":"users-scheduler-16507"}, "namespace": "users-scheduler-16507", "name": "some-name", "reconcileID": "0416e9ca-9a04-4226-aa98-b43c6b6f121b", "user": "xtrabackup"} 2025-12-29T16:06:10.319Z INFO Password updated {"controller": "pxc-controller", "controllerGroup": "pxc.percona.com", "controllerKind": "PerconaXtraDBCluster", "PerconaXtraDBCluster": {"name":"some-name","namespace":"users-scheduler-16507"}, "namespace": "users-scheduler-16507", "name": "some-name", "reconcileID": "0416e9ca-9a04-4226-aa98-b43c6b6f121b", "user": "xtrabackup"} 2025-12-29T16:06:10.337Z INFO MySQL init secret updated {"controller": "pxc-controller", "controllerGroup": "pxc.percona.com", "controllerKind": "PerconaXtraDBCluster", "PerconaXtraDBCluster": {"name":"some-name","namespace":"users-scheduler-16507"}, "namespace": "users-scheduler-16507", "name": "some-name", "reconcileID": "0416e9ca-9a04-4226-aa98-b43c6b6f121b", "secret": "some-name-mysql-init", "user": "xtrabackup"} 2025-12-29T16:06:10.360Z INFO Internal secrets updated {"controller": "pxc-controller", "controllerGroup": "pxc.percona.com", "controllerKind": "PerconaXtraDBCluster", "PerconaXtraDBCluster": {"name":"some-name","namespace":"users-scheduler-16507"}, "namespace": "users-scheduler-16507", "name": "some-name", "reconcileID": "0416e9ca-9a04-4226-aa98-b43c6b6f121b", "user": "xtrabackup"} 2025-12-29T16:06:10.369Z INFO Old password discarded {"controller": "pxc-controller", "controllerGroup": "pxc.percona.com", "controllerKind": "PerconaXtraDBCluster", "PerconaXtraDBCluster": {"name":"some-name","namespace":"users-scheduler-16507"}, "namespace": "users-scheduler-16507", "name": "some-name", "reconcileID": "0416e9ca-9a04-4226-aa98-b43c6b6f121b", "user": "xtrabackup"} 2025-12-29T16:06:10.374Z INFO PXC pods will be restarted {"controller": "pxc-controller", "controllerGroup": "pxc.percona.com", "controllerKind": "PerconaXtraDBCluster", "PerconaXtraDBCluster": {"name":"some-name","namespace":"users-scheduler-16507"}, "namespace": "users-scheduler-16507", "name": "some-name", "reconcileID": "0416e9ca-9a04-4226-aa98-b43c6b6f121b", "last-applied-secret": "3f72af9d1d1c746384c555b3e45ddd6e24430f3773cbcfb9fa65c68e66bba7e4"} 2025-12-29T16:06:10.377Z DEBUG Updating object {"controller": "pxc-controller", "controllerGroup": "pxc.percona.com", "controllerKind": "PerconaXtraDBCluster", "PerconaXtraDBCluster": {"name":"some-name","namespace":"users-scheduler-16507"}, "namespace": "users-scheduler-16507", "name": "some-name", "reconcileID": "0416e9ca-9a04-4226-aa98-b43c6b6f121b", "object": "some-name-pxc", "kind": "&TypeMeta{Kind:StatefulSet,APIVersion:apps/v1,}", "hashChanged": true, "metaChanged": true} 2025-12-29T16:06:11.422Z DEBUG PXC users synced with ProxySQL {"controller": "pxc-controller", "controllerGroup": "pxc.percona.com", "controllerKind": "PerconaXtraDBCluster", "PerconaXtraDBCluster": {"name":"some-name","namespace":"users-scheduler-16507"}, "namespace": "users-scheduler-16507", "name": "some-name", "reconcileID": "9abc0bf6-5765-4d9e-9635-b2f0642066cc"} 2025-12-29T16:07:12.020Z INFO reconcile replication error {"controller": "pxc-controller", "controllerGroup": "pxc.percona.com", "controllerKind": "PerconaXtraDBCluster", "PerconaXtraDBCluster": {"name":"some-name","namespace":"users-scheduler-16507"}, "namespace": "users-scheduler-16507", "name": "some-name", "reconcileID": "2fbe7552-a8cb-4d9f-addf-264be16f5fe2", "err": "failed to ensure cluster readonly status: connect to pod some-name-pxc-1: dial tcp: lookup some-name-pxc-1.some-name-pxc.users-scheduler-16507 on 34.118.224.10:53: no such host"} 2025-12-29T16:07:17.631Z INFO reconcile replication error {"controller": "pxc-controller", "controllerGroup": "pxc.percona.com", "controllerKind": "PerconaXtraDBCluster", "PerconaXtraDBCluster": {"name":"some-name","namespace":"users-scheduler-16507"}, "namespace": "users-scheduler-16507", "name": "some-name", "reconcileID": "07fba43c-ebde-4cac-826e-a508129694fd", "err": "failed to ensure cluster readonly status: connect to pod some-name-pxc-1: invalid connection"} 2025-12-29T16:08:03.902Z INFO reconcile replication error {"controller": "pxc-controller", "controllerGroup": "pxc.percona.com", "controllerKind": "PerconaXtraDBCluster", "PerconaXtraDBCluster": {"name":"some-name","namespace":"users-scheduler-16507"}, "namespace": "users-scheduler-16507", "name": "some-name", "reconcileID": "a267d100-2d07-4252-8e6d-0e7e36b507da", "err": "failed to connect to pod some-name-pxc-0: dial tcp: lookup some-name-pxc-0.some-name-pxc.users-scheduler-16507 on 34.118.224.10:53: no such host"} 2025-12-29T16:08:09.105Z INFO reconcile replication error {"controller": "pxc-controller", "controllerGroup": "pxc.percona.com", "controllerKind": "PerconaXtraDBCluster", "PerconaXtraDBCluster": {"name":"some-name","namespace":"users-scheduler-16507"}, "namespace": "users-scheduler-16507", "name": "some-name", "reconcileID": "54408e0a-917e-4149-8bdb-4114b9dc9531", "err": "failed to connect to pod some-name-pxc-0: dial tcp: lookup some-name-pxc-0.some-name-pxc.users-scheduler-16507 on 34.118.224.10:53: no such host"} 2025-12-29T16:08:14.232Z INFO Unable to find primary pod for replication. No pod with name or ip like this {"controller": "pxc-controller", "controllerGroup": "pxc.percona.com", "controllerKind": "PerconaXtraDBCluster", "PerconaXtraDBCluster": {"name":"some-name","namespace":"users-scheduler-16507"}, "namespace": "users-scheduler-16507", "name": "some-name", "reconcileID": "a8c34738-ac7b-4eb3-94e6-b917fc5d3cba", "primary name": "some-name-pxc-0.some-name-pxc.users-scheduler-16507.svc.cluster.local"} 2025-12-29T16:08:19.346Z INFO Unable to find primary pod for replication. No pod with name or ip like this {"controller": "pxc-controller", "controllerGroup": "pxc.percona.com", "controllerKind": "PerconaXtraDBCluster", "PerconaXtraDBCluster": {"name":"some-name","namespace":"users-scheduler-16507"}, "namespace": "users-scheduler-16507", "name": "some-name", "reconcileID": "ab91de6d-0fea-4d64-b1aa-ef1570148da5", "primary name": "some-name-pxc-0.some-name-pxc.users-scheduler-16507.svc.cluster.local"} 2025-12-29T16:08:24.441Z INFO Unable to find primary pod for replication. No pod with name or ip like this {"controller": "pxc-controller", "controllerGroup": "pxc.percona.com", "controllerKind": "PerconaXtraDBCluster", "PerconaXtraDBCluster": {"name":"some-name","namespace":"users-scheduler-16507"}, "namespace": "users-scheduler-16507", "name": "some-name", "reconcileID": "20859368-06f6-4087-9de0-308a3d2d12b6", "primary name": "some-name-pxc-0.some-name-pxc.users-scheduler-16507.svc.cluster.local"} 2025-12-29T16:08:29.557Z INFO Unable to find primary pod for replication. No pod with name or ip like this {"controller": "pxc-controller", "controllerGroup": "pxc.percona.com", "controllerKind": "PerconaXtraDBCluster", "PerconaXtraDBCluster": {"name":"some-name","namespace":"users-scheduler-16507"}, "namespace": "users-scheduler-16507", "name": "some-name", "reconcileID": "d995f9d1-0501-4b62-a7d3-de4d6d2f8d44", "primary name": "some-name-pxc-0.some-name-pxc.users-scheduler-16507.svc.cluster.local"} 2025-12-29T16:08:34.643Z INFO Unable to find primary pod for replication. No pod with name or ip like this {"controller": "pxc-controller", "controllerGroup": "pxc.percona.com", "controllerKind": "PerconaXtraDBCluster", "PerconaXtraDBCluster": {"name":"some-name","namespace":"users-scheduler-16507"}, "namespace": "users-scheduler-16507", "name": "some-name", "reconcileID": "9ce58b9e-8b46-4c0f-84bb-d4c37f538090", "primary name": "some-name-pxc-0.some-name-pxc.users-scheduler-16507.svc.cluster.local"} 2025-12-29T16:08:39.750Z INFO Unable to find primary pod for replication. No pod with name or ip like this {"controller": "pxc-controller", "controllerGroup": "pxc.percona.com", "controllerKind": "PerconaXtraDBCluster", "PerconaXtraDBCluster": {"name":"some-name","namespace":"users-scheduler-16507"}, "namespace": "users-scheduler-16507", "name": "some-name", "reconcileID": "9f0cf773-3a12-4409-a999-783ca46103f2", "primary name": "some-name-pxc-0.some-name-pxc.users-scheduler-16507.svc.cluster.local"} 2025-12-29T16:08:49.272Z DEBUG PXC users synced with ProxySQL {"controller": "pxc-controller", "controllerGroup": "pxc.percona.com", "controllerKind": "PerconaXtraDBCluster", "PerconaXtraDBCluster": {"name":"some-name","namespace":"users-scheduler-16507"}, "namespace": "users-scheduler-16507", "name": "some-name", "reconcileID": "13c9c962-cd4f-42dc-a425-6dc46868ee32"} 2025-12-29T16:08:52.870Z DEBUG PXC users synced with ProxySQL {"controller": "pxc-controller", "controllerGroup": "pxc.percona.com", "controllerKind": "PerconaXtraDBCluster", "PerconaXtraDBCluster": {"name":"some-name","namespace":"users-scheduler-16507"}, "namespace": "users-scheduler-16507", "name": "some-name", "reconcileID": "2baa4db8-5471-4cc3-a2ec-f84cf005b10b"} 2025-12-29T16:08:58.211Z DEBUG PXC users synced with ProxySQL {"controller": "pxc-controller", "controllerGroup": "pxc.percona.com", "controllerKind": "PerconaXtraDBCluster", "PerconaXtraDBCluster": {"name":"some-name","namespace":"users-scheduler-16507"}, "namespace": "users-scheduler-16507", "name": "some-name", "reconcileID": "500c97a1-b41c-4eed-a048-26f1db638b57"} 2025-12-29T16:09:03.477Z DEBUG PXC users synced with ProxySQL {"controller": "pxc-controller", "controllerGroup": "pxc.percona.com", "controllerKind": "PerconaXtraDBCluster", "PerconaXtraDBCluster": {"name":"some-name","namespace":"users-scheduler-16507"}, "namespace": "users-scheduler-16507", "name": "some-name", "reconcileID": "65a03d3a-9a6f-45f8-8d31-5aa88a8d6149"} 2025-12-29T16:09:05.333Z INFO Password changed, updating user {"controller": "pxc-controller", "controllerGroup": "pxc.percona.com", "controllerKind": "PerconaXtraDBCluster", "PerconaXtraDBCluster": {"name":"some-name","namespace":"users-scheduler-16507"}, "namespace": "users-scheduler-16507", "name": "some-name", "reconcileID": "0467b5af-198c-4887-b9f4-d3f086efec0d", "user": "monitor"} 2025-12-29T16:09:05.343Z INFO Password updated {"controller": "pxc-controller", "controllerGroup": "pxc.percona.com", "controllerKind": "PerconaXtraDBCluster", "PerconaXtraDBCluster": {"name":"some-name","namespace":"users-scheduler-16507"}, "namespace": "users-scheduler-16507", "name": "some-name", "reconcileID": "0467b5af-198c-4887-b9f4-d3f086efec0d", "user": "monitor"} 2025-12-29T16:09:05.373Z INFO MySQL init secret updated {"controller": "pxc-controller", "controllerGroup": "pxc.percona.com", "controllerKind": "PerconaXtraDBCluster", "PerconaXtraDBCluster": {"name":"some-name","namespace":"users-scheduler-16507"}, "namespace": "users-scheduler-16507", "name": "some-name", "reconcileID": "0467b5af-198c-4887-b9f4-d3f086efec0d", "secret": "some-name-mysql-init", "user": "monitor"} 2025-12-29T16:09:05.393Z INFO Proxy user updated {"controller": "pxc-controller", "controllerGroup": "pxc.percona.com", "controllerKind": "PerconaXtraDBCluster", "PerconaXtraDBCluster": {"name":"some-name","namespace":"users-scheduler-16507"}, "namespace": "users-scheduler-16507", "name": "some-name", "reconcileID": "0467b5af-198c-4887-b9f4-d3f086efec0d", "user": "monitor"} 2025-12-29T16:09:05.419Z INFO Internal secrets updated {"controller": "pxc-controller", "controllerGroup": "pxc.percona.com", "controllerKind": "PerconaXtraDBCluster", "PerconaXtraDBCluster": {"name":"some-name","namespace":"users-scheduler-16507"}, "namespace": "users-scheduler-16507", "name": "some-name", "reconcileID": "0467b5af-198c-4887-b9f4-d3f086efec0d", "user": "monitor"} 2025-12-29T16:09:05.715Z INFO Proxy pods will be restarted {"controller": "pxc-controller", "controllerGroup": "pxc.percona.com", "controllerKind": "PerconaXtraDBCluster", "PerconaXtraDBCluster": {"name":"some-name","namespace":"users-scheduler-16507"}, "namespace": "users-scheduler-16507", "name": "some-name", "reconcileID": "0467b5af-198c-4887-b9f4-d3f086efec0d", "last-applied-secret": "f5bb0d6739cd4d11a3d9cb47c95ad84e11ef10b9e0ddccb47f92c0ba999a536a"} 2025-12-29T16:09:05.719Z DEBUG Updating object {"controller": "pxc-controller", "controllerGroup": "pxc.percona.com", "controllerKind": "PerconaXtraDBCluster", "PerconaXtraDBCluster": {"name":"some-name","namespace":"users-scheduler-16507"}, "namespace": "users-scheduler-16507", "name": "some-name", "reconcileID": "0467b5af-198c-4887-b9f4-d3f086efec0d", "object": "some-name-proxysql", "kind": "&TypeMeta{Kind:StatefulSet,APIVersion:apps/v1,}", "hashChanged": true, "metaChanged": true} 2025-12-29T16:09:06.307Z INFO Password updated but old one not discarded {"controller": "pxc-controller", "controllerGroup": "pxc.percona.com", "controllerKind": "PerconaXtraDBCluster", "PerconaXtraDBCluster": {"name":"some-name","namespace":"users-scheduler-16507"}, "namespace": "users-scheduler-16507", "name": "some-name", "reconcileID": "b6dc8bdf-6233-4b0f-82b8-0e1d6ddbbf57", "user": "monitor"} 2025-12-29T16:09:08.421Z ERROR sync users {"controller": "pxc-controller", "controllerGroup": "pxc.percona.com", "controllerKind": "PerconaXtraDBCluster", "PerconaXtraDBCluster": {"name":"some-name","namespace":"users-scheduler-16507"}, "namespace": "users-scheduler-16507", "name": "some-name", "reconcileID": "0467b5af-198c-4887-b9f4-d3f086efec0d", "error": "exec syncusers: failed to execute command in pod: unable to upgrade connection: container not found (\"proxysql\") / / ", "errorVerbose": "exec syncusers: failed to execute command in pod: unable to upgrade connection: container not found (\"proxysql\") / / \ngithub.com/percona/percona-xtradb-cluster-operator/pkg/controller/pxc.(*ReconcilePerconaXtraDBCluster).syncPXCUsersWithProxySQL\n\t/go/src/github.com/percona/percona-xtradb-cluster-operator/pkg/controller/pxc/users.go:975\ngithub.com/percona/percona-xtradb-cluster-operator/pkg/controller/pxc.(*ReconcilePerconaXtraDBCluster).resyncPXCUsersWithProxySQL.func1\n\t/go/src/github.com/percona/percona-xtradb-cluster-operator/pkg/controller/pxc/controller.go:855\nruntime.goexit\n\t/usr/local/go/src/runtime/asm_amd64.s:1693"} 2025-12-29T16:09:42.275Z INFO Password updated but old one not discarded {"controller": "pxc-controller", "controllerGroup": "pxc.percona.com", "controllerKind": "PerconaXtraDBCluster", "PerconaXtraDBCluster": {"name":"some-name","namespace":"users-scheduler-16507"}, "namespace": "users-scheduler-16507", "name": "some-name", "reconcileID": "10b47650-0b2b-4bac-b372-48f8b0463a01", "user": "monitor"} 2025-12-29T16:09:44.997Z DEBUG PXC users synced with ProxySQL {"controller": "pxc-controller", "controllerGroup": "pxc.percona.com", "controllerKind": "PerconaXtraDBCluster", "PerconaXtraDBCluster": {"name":"some-name","namespace":"users-scheduler-16507"}, "namespace": "users-scheduler-16507", "name": "some-name", "reconcileID": "10b47650-0b2b-4bac-b372-48f8b0463a01"} 2025-12-29T16:09:58.717Z INFO Password updated but old one not discarded {"controller": "pxc-controller", "controllerGroup": "pxc.percona.com", "controllerKind": "PerconaXtraDBCluster", "PerconaXtraDBCluster": {"name":"some-name","namespace":"users-scheduler-16507"}, "namespace": "users-scheduler-16507", "name": "some-name", "reconcileID": "fe31ee38-0a31-47c0-a84b-25a85c71ad7b", "user": "monitor"} 2025-12-29T16:09:59.574Z INFO Old password discarded {"controller": "pxc-controller", "controllerGroup": "pxc.percona.com", "controllerKind": "PerconaXtraDBCluster", "PerconaXtraDBCluster": {"name":"some-name","namespace":"users-scheduler-16507"}, "namespace": "users-scheduler-16507", "name": "some-name", "reconcileID": "fe31ee38-0a31-47c0-a84b-25a85c71ad7b", "user": "monitor"} 2025-12-29T16:09:59.580Z INFO Proxy pods will be restarted {"controller": "pxc-controller", "controllerGroup": "pxc.percona.com", "controllerKind": "PerconaXtraDBCluster", "PerconaXtraDBCluster": {"name":"some-name","namespace":"users-scheduler-16507"}, "namespace": "users-scheduler-16507", "name": "some-name", "reconcileID": "fe31ee38-0a31-47c0-a84b-25a85c71ad7b", "last-applied-secret": "f5bb0d6739cd4d11a3d9cb47c95ad84e11ef10b9e0ddccb47f92c0ba999a536a"} 2025-12-29T16:10:01.837Z DEBUG PXC users synced with ProxySQL {"controller": "pxc-controller", "controllerGroup": "pxc.percona.com", "controllerKind": "PerconaXtraDBCluster", "PerconaXtraDBCluster": {"name":"some-name","namespace":"users-scheduler-16507"}, "namespace": "users-scheduler-16507", "name": "some-name", "reconcileID": "fe31ee38-0a31-47c0-a84b-25a85c71ad7b"} 2025-12-29T16:10:06.206Z DEBUG PXC users synced with ProxySQL {"controller": "pxc-controller", "controllerGroup": "pxc.percona.com", "controllerKind": "PerconaXtraDBCluster", "PerconaXtraDBCluster": {"name":"some-name","namespace":"users-scheduler-16507"}, "namespace": "users-scheduler-16507", "name": "some-name", "reconcileID": "f90e4429-29e4-49cc-a81d-2d9f353d464b"} 2025-12-29T16:10:11.397Z DEBUG PXC users synced with ProxySQL {"controller": "pxc-controller", "controllerGroup": "pxc.percona.com", "controllerKind": "PerconaXtraDBCluster", "PerconaXtraDBCluster": {"name":"some-name","namespace":"users-scheduler-16507"}, "namespace": "users-scheduler-16507", "name": "some-name", "reconcileID": "c4e1a903-9aee-4e91-8179-e2dd376a4eba"} 2025-12-29T16:10:16.349Z DEBUG PXC users synced with ProxySQL {"controller": "pxc-controller", "controllerGroup": "pxc.percona.com", "controllerKind": "PerconaXtraDBCluster", "PerconaXtraDBCluster": {"name":"some-name","namespace":"users-scheduler-16507"}, "namespace": "users-scheduler-16507", "name": "some-name", "reconcileID": "6ba9ccc1-9210-4458-b7b8-1662dc44df53"} 2025-12-29T16:10:21.355Z DEBUG PXC users synced with ProxySQL {"controller": "pxc-controller", "controllerGroup": "pxc.percona.com", "controllerKind": "PerconaXtraDBCluster", "PerconaXtraDBCluster": {"name":"some-name","namespace":"users-scheduler-16507"}, "namespace": "users-scheduler-16507", "name": "some-name", "reconcileID": "ad38a05f-a4af-4368-89b4-f2a20e8b87ba"} 2025-12-29T16:10:27.101Z DEBUG PXC users synced with ProxySQL {"controller": "pxc-controller", "controllerGroup": "pxc.percona.com", "controllerKind": "PerconaXtraDBCluster", "PerconaXtraDBCluster": {"name":"some-name","namespace":"users-scheduler-16507"}, "namespace": "users-scheduler-16507", "name": "some-name", "reconcileID": "d062fb0d-d04a-41dd-af28-2de044990015"} 2025-12-29T16:10:32.321Z DEBUG PXC users synced with ProxySQL {"controller": "pxc-controller", "controllerGroup": "pxc.percona.com", "controllerKind": "PerconaXtraDBCluster", "PerconaXtraDBCluster": {"name":"some-name","namespace":"users-scheduler-16507"}, "namespace": "users-scheduler-16507", "name": "some-name", "reconcileID": "5048ff8e-fc7f-4cce-adf9-dbe30c550d52"} 2025-12-29T16:10:36.728Z INFO Password changed, updating user {"controller": "pxc-controller", "controllerGroup": "pxc.percona.com", "controllerKind": "PerconaXtraDBCluster", "PerconaXtraDBCluster": {"name":"some-name","namespace":"users-scheduler-16507"}, "namespace": "users-scheduler-16507", "name": "some-name", "reconcileID": "4899840e-257f-42e9-be39-51bbcb953a3d", "user": "operator"} 2025-12-29T16:10:36.739Z INFO Password updated {"controller": "pxc-controller", "controllerGroup": "pxc.percona.com", "controllerKind": "PerconaXtraDBCluster", "PerconaXtraDBCluster": {"name":"some-name","namespace":"users-scheduler-16507"}, "namespace": "users-scheduler-16507", "name": "some-name", "reconcileID": "4899840e-257f-42e9-be39-51bbcb953a3d", "user": "operator"} 2025-12-29T16:10:36.758Z INFO MySQL init secret updated {"controller": "pxc-controller", "controllerGroup": "pxc.percona.com", "controllerKind": "PerconaXtraDBCluster", "PerconaXtraDBCluster": {"name":"some-name","namespace":"users-scheduler-16507"}, "namespace": "users-scheduler-16507", "name": "some-name", "reconcileID": "4899840e-257f-42e9-be39-51bbcb953a3d", "secret": "some-name-mysql-init", "user": "operator"} 2025-12-29T16:10:36.777Z INFO Internal secrets updated {"controller": "pxc-controller", "controllerGroup": "pxc.percona.com", "controllerKind": "PerconaXtraDBCluster", "PerconaXtraDBCluster": {"name":"some-name","namespace":"users-scheduler-16507"}, "namespace": "users-scheduler-16507", "name": "some-name", "reconcileID": "4899840e-257f-42e9-be39-51bbcb953a3d", "user": "operator"} 2025-12-29T16:10:36.786Z INFO Old password discarded {"controller": "pxc-controller", "controllerGroup": "pxc.percona.com", "controllerKind": "PerconaXtraDBCluster", "PerconaXtraDBCluster": {"name":"some-name","namespace":"users-scheduler-16507"}, "namespace": "users-scheduler-16507", "name": "some-name", "reconcileID": "4899840e-257f-42e9-be39-51bbcb953a3d", "user": "operator"} 2025-12-29T16:10:36.801Z INFO Proxy pods will be restarted {"controller": "pxc-controller", "controllerGroup": "pxc.percona.com", "controllerKind": "PerconaXtraDBCluster", "PerconaXtraDBCluster": {"name":"some-name","namespace":"users-scheduler-16507"}, "namespace": "users-scheduler-16507", "name": "some-name", "reconcileID": "4899840e-257f-42e9-be39-51bbcb953a3d", "last-applied-secret": "5c0a8e06a24dc535b3e4bfd26dfdf1b25dcec2b9c6e76efeb692a26e0c66ddf3"} 2025-12-29T16:10:36.810Z DEBUG Updating object {"controller": "pxc-controller", "controllerGroup": "pxc.percona.com", "controllerKind": "PerconaXtraDBCluster", "PerconaXtraDBCluster": {"name":"some-name","namespace":"users-scheduler-16507"}, "namespace": "users-scheduler-16507", "name": "some-name", "reconcileID": "4899840e-257f-42e9-be39-51bbcb953a3d", "object": "some-name-proxysql", "kind": "&TypeMeta{Kind:StatefulSet,APIVersion:apps/v1,}", "hashChanged": true, "metaChanged": true} 2025-12-29T16:10:38.057Z ERROR sync users {"controller": "pxc-controller", "controllerGroup": "pxc.percona.com", "controllerKind": "PerconaXtraDBCluster", "PerconaXtraDBCluster": {"name":"some-name","namespace":"users-scheduler-16507"}, "namespace": "users-scheduler-16507", "name": "some-name", "reconcileID": "0a1e4d42-7722-4560-b6b9-2357bb8cbc74", "error": "exec syncusers: failed to execute command in pod: unable to upgrade connection: container not found (\"proxysql\") / / ", "errorVerbose": "exec syncusers: failed to execute command in pod: unable to upgrade connection: container not found (\"proxysql\") / / \ngithub.com/percona/percona-xtradb-cluster-operator/pkg/controller/pxc.(*ReconcilePerconaXtraDBCluster).syncPXCUsersWithProxySQL\n\t/go/src/github.com/percona/percona-xtradb-cluster-operator/pkg/controller/pxc/users.go:975\ngithub.com/percona/percona-xtradb-cluster-operator/pkg/controller/pxc.(*ReconcilePerconaXtraDBCluster).resyncPXCUsersWithProxySQL.func1\n\t/go/src/github.com/percona/percona-xtradb-cluster-operator/pkg/controller/pxc/controller.go:855\nruntime.goexit\n\t/usr/local/go/src/runtime/asm_amd64.s:1693"} 2025-12-29T16:11:13.132Z DEBUG PXC users synced with ProxySQL {"controller": "pxc-controller", "controllerGroup": "pxc.percona.com", "controllerKind": "PerconaXtraDBCluster", "PerconaXtraDBCluster": {"name":"some-name","namespace":"users-scheduler-16507"}, "namespace": "users-scheduler-16507", "name": "some-name", "reconcileID": "78a62149-80e9-41d3-8432-3ccec74bbc53"} 2025-12-29T16:11:17.542Z DEBUG PXC users synced with ProxySQL {"controller": "pxc-controller", "controllerGroup": "pxc.percona.com", "controllerKind": "PerconaXtraDBCluster", "PerconaXtraDBCluster": {"name":"some-name","namespace":"users-scheduler-16507"}, "namespace": "users-scheduler-16507", "name": "some-name", "reconcileID": "eec34de4-a11a-4b37-9cec-ee487cc2c139"} 2025-12-29T16:11:22.955Z DEBUG PXC users synced with ProxySQL {"controller": "pxc-controller", "controllerGroup": "pxc.percona.com", "controllerKind": "PerconaXtraDBCluster", "PerconaXtraDBCluster": {"name":"some-name","namespace":"users-scheduler-16507"}, "namespace": "users-scheduler-16507", "name": "some-name", "reconcileID": "03d52def-e638-419c-ad88-ec44ee1de22e"} 2025-12-29T16:11:28.327Z DEBUG PXC users synced with ProxySQL {"controller": "pxc-controller", "controllerGroup": "pxc.percona.com", "controllerKind": "PerconaXtraDBCluster", "PerconaXtraDBCluster": {"name":"some-name","namespace":"users-scheduler-16507"}, "namespace": "users-scheduler-16507", "name": "some-name", "reconcileID": "e2311b50-0a92-4660-a7d1-c1d0a135a0d8"} 2025-12-29T16:11:33.454Z DEBUG PXC users synced with ProxySQL {"controller": "pxc-controller", "controllerGroup": "pxc.percona.com", "controllerKind": "PerconaXtraDBCluster", "PerconaXtraDBCluster": {"name":"some-name","namespace":"users-scheduler-16507"}, "namespace": "users-scheduler-16507", "name": "some-name", "reconcileID": "e363f5b0-6929-460c-a4b5-83013f4caa26"} 2025-12-29T16:11:35.881Z INFO Created user secrets {"controller": "pxc-controller", "controllerGroup": "pxc.percona.com", "controllerKind": "PerconaXtraDBCluster", "PerconaXtraDBCluster": {"name":"some-name","namespace":"users-scheduler-16507"}, "namespace": "users-scheduler-16507", "name": "some-name", "reconcileID": "ff5114cd-4851-4108-b4fa-48a0c36abcb8", "secrets": "my-cluster-secrets-2"} 2025-12-29T16:11:35.885Z INFO Password changed, updating user {"controller": "pxc-controller", "controllerGroup": "pxc.percona.com", "controllerKind": "PerconaXtraDBCluster", "PerconaXtraDBCluster": {"name":"some-name","namespace":"users-scheduler-16507"}, "namespace": "users-scheduler-16507", "name": "some-name", "reconcileID": "ff5114cd-4851-4108-b4fa-48a0c36abcb8", "user": "root"} 2025-12-29T16:11:35.903Z INFO Password updated {"controller": "pxc-controller", "controllerGroup": "pxc.percona.com", "controllerKind": "PerconaXtraDBCluster", "PerconaXtraDBCluster": {"name":"some-name","namespace":"users-scheduler-16507"}, "namespace": "users-scheduler-16507", "name": "some-name", "reconcileID": "ff5114cd-4851-4108-b4fa-48a0c36abcb8", "user": "root"} 2025-12-29T16:11:35.916Z INFO MySQL init secret updated {"controller": "pxc-controller", "controllerGroup": "pxc.percona.com", "controllerKind": "PerconaXtraDBCluster", "PerconaXtraDBCluster": {"name":"some-name","namespace":"users-scheduler-16507"}, "namespace": "users-scheduler-16507", "name": "some-name", "reconcileID": "ff5114cd-4851-4108-b4fa-48a0c36abcb8", "secret": "some-name-mysql-init", "user": "root"} 2025-12-29T16:11:38.902Z DEBUG PXC users synced with ProxySQL {"controller": "pxc-controller", "controllerGroup": "pxc.percona.com", "controllerKind": "PerconaXtraDBCluster", "PerconaXtraDBCluster": {"name":"some-name","namespace":"users-scheduler-16507"}, "namespace": "users-scheduler-16507", "name": "some-name", "reconcileID": "ff5114cd-4851-4108-b4fa-48a0c36abcb8"} 2025-12-29T16:11:38.923Z INFO Internal secrets updated {"controller": "pxc-controller", "controllerGroup": "pxc.percona.com", "controllerKind": "PerconaXtraDBCluster", "PerconaXtraDBCluster": {"name":"some-name","namespace":"users-scheduler-16507"}, "namespace": "users-scheduler-16507", "name": "some-name", "reconcileID": "ff5114cd-4851-4108-b4fa-48a0c36abcb8", "user": "root"} 2025-12-29T16:11:38.936Z INFO Old password discarded {"controller": "pxc-controller", "controllerGroup": "pxc.percona.com", "controllerKind": "PerconaXtraDBCluster", "PerconaXtraDBCluster": {"name":"some-name","namespace":"users-scheduler-16507"}, "namespace": "users-scheduler-16507", "name": "some-name", "reconcileID": "ff5114cd-4851-4108-b4fa-48a0c36abcb8", "user": "root"} 2025-12-29T16:11:38.939Z INFO Password changed, updating user {"controller": "pxc-controller", "controllerGroup": "pxc.percona.com", "controllerKind": "PerconaXtraDBCluster", "PerconaXtraDBCluster": {"name":"some-name","namespace":"users-scheduler-16507"}, "namespace": "users-scheduler-16507", "name": "some-name", "reconcileID": "ff5114cd-4851-4108-b4fa-48a0c36abcb8", "user": "operator"} 2025-12-29T16:11:38.951Z INFO Password updated {"controller": "pxc-controller", "controllerGroup": "pxc.percona.com", "controllerKind": "PerconaXtraDBCluster", "PerconaXtraDBCluster": {"name":"some-name","namespace":"users-scheduler-16507"}, "namespace": "users-scheduler-16507", "name": "some-name", "reconcileID": "ff5114cd-4851-4108-b4fa-48a0c36abcb8", "user": "operator"} 2025-12-29T16:11:38.963Z INFO MySQL init secret updated {"controller": "pxc-controller", "controllerGroup": "pxc.percona.com", "controllerKind": "PerconaXtraDBCluster", "PerconaXtraDBCluster": {"name":"some-name","namespace":"users-scheduler-16507"}, "namespace": "users-scheduler-16507", "name": "some-name", "reconcileID": "ff5114cd-4851-4108-b4fa-48a0c36abcb8", "secret": "some-name-mysql-init", "user": "operator"} 2025-12-29T16:11:38.978Z INFO Internal secrets updated {"controller": "pxc-controller", "controllerGroup": "pxc.percona.com", "controllerKind": "PerconaXtraDBCluster", "PerconaXtraDBCluster": {"name":"some-name","namespace":"users-scheduler-16507"}, "namespace": "users-scheduler-16507", "name": "some-name", "reconcileID": "ff5114cd-4851-4108-b4fa-48a0c36abcb8", "user": "operator"} 2025-12-29T16:11:38.987Z INFO Old password discarded {"controller": "pxc-controller", "controllerGroup": "pxc.percona.com", "controllerKind": "PerconaXtraDBCluster", "PerconaXtraDBCluster": {"name":"some-name","namespace":"users-scheduler-16507"}, "namespace": "users-scheduler-16507", "name": "some-name", "reconcileID": "ff5114cd-4851-4108-b4fa-48a0c36abcb8", "user": "operator"} 2025-12-29T16:11:38.993Z INFO Password changed, updating user {"controller": "pxc-controller", "controllerGroup": "pxc.percona.com", "controllerKind": "PerconaXtraDBCluster", "PerconaXtraDBCluster": {"name":"some-name","namespace":"users-scheduler-16507"}, "namespace": "users-scheduler-16507", "name": "some-name", "reconcileID": "ff5114cd-4851-4108-b4fa-48a0c36abcb8", "user": "monitor"} 2025-12-29T16:11:39.006Z INFO Password updated {"controller": "pxc-controller", "controllerGroup": "pxc.percona.com", "controllerKind": "PerconaXtraDBCluster", "PerconaXtraDBCluster": {"name":"some-name","namespace":"users-scheduler-16507"}, "namespace": "users-scheduler-16507", "name": "some-name", "reconcileID": "ff5114cd-4851-4108-b4fa-48a0c36abcb8", "user": "monitor"} 2025-12-29T16:11:39.026Z INFO MySQL init secret updated {"controller": "pxc-controller", "controllerGroup": "pxc.percona.com", "controllerKind": "PerconaXtraDBCluster", "PerconaXtraDBCluster": {"name":"some-name","namespace":"users-scheduler-16507"}, "namespace": "users-scheduler-16507", "name": "some-name", "reconcileID": "ff5114cd-4851-4108-b4fa-48a0c36abcb8", "secret": "some-name-mysql-init", "user": "monitor"} 2025-12-29T16:11:39.044Z INFO Proxy user updated {"controller": "pxc-controller", "controllerGroup": "pxc.percona.com", "controllerKind": "PerconaXtraDBCluster", "PerconaXtraDBCluster": {"name":"some-name","namespace":"users-scheduler-16507"}, "namespace": "users-scheduler-16507", "name": "some-name", "reconcileID": "ff5114cd-4851-4108-b4fa-48a0c36abcb8", "user": "monitor"} 2025-12-29T16:11:39.064Z INFO Internal secrets updated {"controller": "pxc-controller", "controllerGroup": "pxc.percona.com", "controllerKind": "PerconaXtraDBCluster", "PerconaXtraDBCluster": {"name":"some-name","namespace":"users-scheduler-16507"}, "namespace": "users-scheduler-16507", "name": "some-name", "reconcileID": "ff5114cd-4851-4108-b4fa-48a0c36abcb8", "user": "monitor"} 2025-12-29T16:11:39.351Z INFO Password changed, updating user {"controller": "pxc-controller", "controllerGroup": "pxc.percona.com", "controllerKind": "PerconaXtraDBCluster", "PerconaXtraDBCluster": {"name":"some-name","namespace":"users-scheduler-16507"}, "namespace": "users-scheduler-16507", "name": "some-name", "reconcileID": "ff5114cd-4851-4108-b4fa-48a0c36abcb8", "user": "xtrabackup"} 2025-12-29T16:11:39.361Z INFO Password updated {"controller": "pxc-controller", "controllerGroup": "pxc.percona.com", "controllerKind": "PerconaXtraDBCluster", "PerconaXtraDBCluster": {"name":"some-name","namespace":"users-scheduler-16507"}, "namespace": "users-scheduler-16507", "name": "some-name", "reconcileID": "ff5114cd-4851-4108-b4fa-48a0c36abcb8", "user": "xtrabackup"} 2025-12-29T16:11:39.374Z INFO MySQL init secret updated {"controller": "pxc-controller", "controllerGroup": "pxc.percona.com", "controllerKind": "PerconaXtraDBCluster", "PerconaXtraDBCluster": {"name":"some-name","namespace":"users-scheduler-16507"}, "namespace": "users-scheduler-16507", "name": "some-name", "reconcileID": "ff5114cd-4851-4108-b4fa-48a0c36abcb8", "secret": "some-name-mysql-init", "user": "xtrabackup"} 2025-12-29T16:11:39.386Z INFO Internal secrets updated {"controller": "pxc-controller", "controllerGroup": "pxc.percona.com", "controllerKind": "PerconaXtraDBCluster", "PerconaXtraDBCluster": {"name":"some-name","namespace":"users-scheduler-16507"}, "namespace": "users-scheduler-16507", "name": "some-name", "reconcileID": "ff5114cd-4851-4108-b4fa-48a0c36abcb8", "user": "xtrabackup"} 2025-12-29T16:11:39.393Z INFO Old password discarded {"controller": "pxc-controller", "controllerGroup": "pxc.percona.com", "controllerKind": "PerconaXtraDBCluster", "PerconaXtraDBCluster": {"name":"some-name","namespace":"users-scheduler-16507"}, "namespace": "users-scheduler-16507", "name": "some-name", "reconcileID": "ff5114cd-4851-4108-b4fa-48a0c36abcb8", "user": "xtrabackup"} 2025-12-29T16:11:39.397Z INFO Password changed, updating user {"controller": "pxc-controller", "controllerGroup": "pxc.percona.com", "controllerKind": "PerconaXtraDBCluster", "PerconaXtraDBCluster": {"name":"some-name","namespace":"users-scheduler-16507"}, "namespace": "users-scheduler-16507", "name": "some-name", "reconcileID": "ff5114cd-4851-4108-b4fa-48a0c36abcb8", "user": "replication"} 2025-12-29T16:11:39.406Z INFO Password updated {"controller": "pxc-controller", "controllerGroup": "pxc.percona.com", "controllerKind": "PerconaXtraDBCluster", "PerconaXtraDBCluster": {"name":"some-name","namespace":"users-scheduler-16507"}, "namespace": "users-scheduler-16507", "name": "some-name", "reconcileID": "ff5114cd-4851-4108-b4fa-48a0c36abcb8", "user": "replication"} 2025-12-29T16:11:39.421Z INFO MySQL init secret updated {"controller": "pxc-controller", "controllerGroup": "pxc.percona.com", "controllerKind": "PerconaXtraDBCluster", "PerconaXtraDBCluster": {"name":"some-name","namespace":"users-scheduler-16507"}, "namespace": "users-scheduler-16507", "name": "some-name", "reconcileID": "ff5114cd-4851-4108-b4fa-48a0c36abcb8", "secret": "some-name-mysql-init", "user": "replication"} 2025-12-29T16:11:39.450Z INFO Internal secrets updated {"controller": "pxc-controller", "controllerGroup": "pxc.percona.com", "controllerKind": "PerconaXtraDBCluster", "PerconaXtraDBCluster": {"name":"some-name","namespace":"users-scheduler-16507"}, "namespace": "users-scheduler-16507", "name": "some-name", "reconcileID": "ff5114cd-4851-4108-b4fa-48a0c36abcb8", "user": "replication"} 2025-12-29T16:11:39.458Z INFO Old password discarded {"controller": "pxc-controller", "controllerGroup": "pxc.percona.com", "controllerKind": "PerconaXtraDBCluster", "PerconaXtraDBCluster": {"name":"some-name","namespace":"users-scheduler-16507"}, "namespace": "users-scheduler-16507", "name": "some-name", "reconcileID": "ff5114cd-4851-4108-b4fa-48a0c36abcb8", "user": "replication"} 2025-12-29T16:11:39.458Z INFO Password changed, updating user {"controller": "pxc-controller", "controllerGroup": "pxc.percona.com", "controllerKind": "PerconaXtraDBCluster", "PerconaXtraDBCluster": {"name":"some-name","namespace":"users-scheduler-16507"}, "namespace": "users-scheduler-16507", "name": "some-name", "reconcileID": "ff5114cd-4851-4108-b4fa-48a0c36abcb8", "user": "proxyadmin"} 2025-12-29T16:11:39.475Z INFO Proxy user updated {"controller": "pxc-controller", "controllerGroup": "pxc.percona.com", "controllerKind": "PerconaXtraDBCluster", "PerconaXtraDBCluster": {"name":"some-name","namespace":"users-scheduler-16507"}, "namespace": "users-scheduler-16507", "name": "some-name", "reconcileID": "ff5114cd-4851-4108-b4fa-48a0c36abcb8", "user": "proxyadmin"} 2025-12-29T16:11:39.490Z INFO Internal secrets updated {"controller": "pxc-controller", "controllerGroup": "pxc.percona.com", "controllerKind": "PerconaXtraDBCluster", "PerconaXtraDBCluster": {"name":"some-name","namespace":"users-scheduler-16507"}, "namespace": "users-scheduler-16507", "name": "some-name", "reconcileID": "ff5114cd-4851-4108-b4fa-48a0c36abcb8", "user": "proxyadmin"} 2025-12-29T16:11:39.490Z INFO Proxy pods will be restarted {"controller": "pxc-controller", "controllerGroup": "pxc.percona.com", "controllerKind": "PerconaXtraDBCluster", "PerconaXtraDBCluster": {"name":"some-name","namespace":"users-scheduler-16507"}, "namespace": "users-scheduler-16507", "name": "some-name", "reconcileID": "ff5114cd-4851-4108-b4fa-48a0c36abcb8", "last-applied-secret": "abeee594bb570c53bf6e910ae5dd099f95e919e243a1b024e85a23225c301596"} 2025-12-29T16:11:39.490Z INFO PXC pods will be restarted {"controller": "pxc-controller", "controllerGroup": "pxc.percona.com", "controllerKind": "PerconaXtraDBCluster", "PerconaXtraDBCluster": {"name":"some-name","namespace":"users-scheduler-16507"}, "namespace": "users-scheduler-16507", "name": "some-name", "reconcileID": "ff5114cd-4851-4108-b4fa-48a0c36abcb8", "last-applied-secret": "abeee594bb570c53bf6e910ae5dd099f95e919e243a1b024e85a23225c301596"} 2025-12-29T16:11:39.493Z DEBUG Updating object {"controller": "pxc-controller", "controllerGroup": "pxc.percona.com", "controllerKind": "PerconaXtraDBCluster", "PerconaXtraDBCluster": {"name":"some-name","namespace":"users-scheduler-16507"}, "namespace": "users-scheduler-16507", "name": "some-name", "reconcileID": "ff5114cd-4851-4108-b4fa-48a0c36abcb8", "object": "some-name-pxc", "kind": "&TypeMeta{Kind:StatefulSet,APIVersion:apps/v1,}", "hashChanged": true, "metaChanged": true} 2025-12-29T16:11:39.535Z DEBUG Updating object {"controller": "pxc-controller", "controllerGroup": "pxc.percona.com", "controllerKind": "PerconaXtraDBCluster", "PerconaXtraDBCluster": {"name":"some-name","namespace":"users-scheduler-16507"}, "namespace": "users-scheduler-16507", "name": "some-name", "reconcileID": "ff5114cd-4851-4108-b4fa-48a0c36abcb8", "object": "some-name-proxysql", "kind": "&TypeMeta{Kind:StatefulSet,APIVersion:apps/v1,}", "hashChanged": true, "metaChanged": true} 2025-12-29T16:11:42.897Z ERROR sync users {"controller": "pxc-controller", "controllerGroup": "pxc.percona.com", "controllerKind": "PerconaXtraDBCluster", "PerconaXtraDBCluster": {"name":"some-name","namespace":"users-scheduler-16507"}, "namespace": "users-scheduler-16507", "name": "some-name", "reconcileID": "ff5114cd-4851-4108-b4fa-48a0c36abcb8", "error": "exec syncusers: failed to execute command in pod: command terminated with exit code 1 / / ERROR 1045 (28000): ProxySQL Error: Access denied for user 'proxyadmin'@'10.177.64.61' (using password: YES)\nERROR (line:586) : ProxySQL connection check failed. \n-- Could not connect to ProxySQL at some-name-proxysql-0.some-name-proxysql-unready.users-scheduler-16507.svc.cluster.local:6032 \n-- Please check the ProxySQL connection parameters and status.\nERROR 1045 (28000): ProxySQL Error: Access denied for user 'proxyadmin'@'10.177.64.61' (using password: YES)\nERROR (line:586) : ProxySQL connection check failed. \n-- Could not connect to ProxySQL at some-name-proxysql-0.some-name-proxysql-unready.users-scheduler-16507.svc.cluster.local:6032 \n-- Please check the ProxySQL connection parameters and status.\nERROR 1045 (28000): ProxySQL Error: Access denied for user 'proxyadmin'@'10.177.64.61' (using password: YES)\nERROR (line:586) : ProxySQL connection check failed. \n-- Could not connect to ProxySQL at some-name-proxysql-0.some-name-proxysql-unready.users-scheduler-16507.svc.cluster.local:6032 \n-- Please check the ProxySQL connection parameters and status.\nERROR 1045 (28000): ProxySQL Error: Access denied for user 'proxyadmin'@'10.177.64.61' (using password: YES)\nERROR (line:586) : ProxySQL connection check failed. \n-- Could not connect to ProxySQL at some-name-proxysql-0.some-name-proxysql-unready.users-scheduler-16507.svc.cluster.local:6032 \n-- Please check the ProxySQL connection parameters and status.\nERROR 1045 (28000): ProxySQL Error: Access denied for user 'proxyadmin'@'10.177.64.61' (using password: YES)\nERROR (line:586) : ProxySQL connection check failed. \n-- Could not connect to ProxySQL at some-name-proxysql-0.some-name-proxysql-unready.users-scheduler-16507.svc.cluster.local:6032 \n-- Please check the ProxySQL connection parameters and status.\n", "errorVerbose": "exec syncusers: failed to execute command in pod: command terminated with exit code 1 / / ERROR 1045 (28000): ProxySQL Error: Access denied for user 'proxyadmin'@'10.177.64.61' (using password: YES)\nERROR (line:586) : ProxySQL connection check failed. \n-- Could not connect to ProxySQL at some-name-proxysql-0.some-name-proxysql-unready.users-scheduler-16507.svc.cluster.local:6032 \n-- Please check the ProxySQL connection parameters and status.\nERROR 1045 (28000): ProxySQL Error: Access denied for user 'proxyadmin'@'10.177.64.61' (using password: YES)\nERROR (line:586) : ProxySQL connection check failed. \n-- Could not connect to ProxySQL at some-name-proxysql-0.some-name-proxysql-unready.users-scheduler-16507.svc.cluster.local:6032 \n-- Please check the ProxySQL connection parameters and status.\nERROR 1045 (28000): ProxySQL Error: Access denied for user 'proxyadmin'@'10.177.64.61' (using password: YES)\nERROR (line:586) : ProxySQL connection check failed. \n-- Could not connect to ProxySQL at some-name-proxysql-0.some-name-proxysql-unready.users-scheduler-16507.svc.cluster.local:6032 \n-- Please check the ProxySQL connection parameters and status.\nERROR 1045 (28000): ProxySQL Error: Access denied for user 'proxyadmin'@'10.177.64.61' (using password: YES)\nERROR (line:586) : ProxySQL connection check failed. \n-- Could not connect to ProxySQL at some-name-proxysql-0.some-name-proxysql-unready.users-scheduler-16507.svc.cluster.local:6032 \n-- Please check the ProxySQL connection parameters and status.\nERROR 1045 (28000): ProxySQL Error: Access denied for user 'proxyadmin'@'10.177.64.61' (using password: YES)\nERROR (line:586) : ProxySQL connection check failed. \n-- Could not connect to ProxySQL at some-name-proxysql-0.some-name-proxysql-unready.users-scheduler-16507.svc.cluster.local:6032 \n-- Please check the ProxySQL connection parameters and status.\n\ngithub.com/percona/percona-xtradb-cluster-operator/pkg/controller/pxc.(*ReconcilePerconaXtraDBCluster).syncPXCUsersWithProxySQL\n\t/go/src/github.com/percona/percona-xtradb-cluster-operator/pkg/controller/pxc/users.go:975\ngithub.com/percona/percona-xtradb-cluster-operator/pkg/controller/pxc.(*ReconcilePerconaXtraDBCluster).resyncPXCUsersWithProxySQL.func1\n\t/go/src/github.com/percona/percona-xtradb-cluster-operator/pkg/controller/pxc/controller.go:855\nruntime.goexit\n\t/usr/local/go/src/runtime/asm_amd64.s:1693"} 2025-12-29T16:12:42.882Z INFO reconcile replication error {"controller": "pxc-controller", "controllerGroup": "pxc.percona.com", "controllerKind": "PerconaXtraDBCluster", "PerconaXtraDBCluster": {"name":"some-name","namespace":"users-scheduler-16507"}, "namespace": "users-scheduler-16507", "name": "some-name", "reconcileID": "108c4ccf-298f-4cae-b25f-5de4c293f2fe", "err": "failed to ensure cluster readonly status: connect to pod some-name-pxc-1: invalid connection"} 2025-12-29T16:13:19.197Z INFO reconcile replication error {"controller": "pxc-controller", "controllerGroup": "pxc.percona.com", "controllerKind": "PerconaXtraDBCluster", "PerconaXtraDBCluster": {"name":"some-name","namespace":"users-scheduler-16507"}, "namespace": "users-scheduler-16507", "name": "some-name", "reconcileID": "bc435bce-9fdf-41b8-9f1a-16bad64bcd92", "err": "failed to connect to pod some-name-pxc-0: dial tcp: lookup some-name-pxc-0.some-name-pxc.users-scheduler-16507 on 34.118.224.10:53: no such host"} 2025-12-29T16:13:19.455Z INFO reconcile replication error {"controller": "pxc-controller", "controllerGroup": "pxc.percona.com", "controllerKind": "PerconaXtraDBCluster", "PerconaXtraDBCluster": {"name":"some-name","namespace":"users-scheduler-16507"}, "namespace": "users-scheduler-16507", "name": "some-name", "reconcileID": "0de2f268-6da2-4ebc-a9a3-7cb544f9b9b3", "err": "failed to ensure cluster readonly status: connect to pod some-name-pxc-0: dial tcp: lookup some-name-pxc-0.some-name-pxc.users-scheduler-16507 on 34.118.224.10:53: no such host"} 2025-12-29T16:13:24.394Z INFO reconcile replication error {"controller": "pxc-controller", "controllerGroup": "pxc.percona.com", "controllerKind": "PerconaXtraDBCluster", "PerconaXtraDBCluster": {"name":"some-name","namespace":"users-scheduler-16507"}, "namespace": "users-scheduler-16507", "name": "some-name", "reconcileID": "96907598-6240-4322-ba23-c2262e61db1c", "err": "failed to connect to pod some-name-pxc-0: dial tcp: lookup some-name-pxc-0.some-name-pxc.users-scheduler-16507 on 34.118.224.10:53: no such host"} 2025-12-29T16:13:29.601Z INFO reconcile replication error {"controller": "pxc-controller", "controllerGroup": "pxc.percona.com", "controllerKind": "PerconaXtraDBCluster", "PerconaXtraDBCluster": {"name":"some-name","namespace":"users-scheduler-16507"}, "namespace": "users-scheduler-16507", "name": "some-name", "reconcileID": "018824de-633c-46bf-bc09-8668632b6f86", "err": "failed to connect to pod some-name-pxc-0: dial tcp: lookup some-name-pxc-0.some-name-pxc.users-scheduler-16507 on 34.118.224.10:53: no such host"} 2025-12-29T16:13:34.759Z INFO Unable to find primary pod for replication. No pod with name or ip like this {"controller": "pxc-controller", "controllerGroup": "pxc.percona.com", "controllerKind": "PerconaXtraDBCluster", "PerconaXtraDBCluster": {"name":"some-name","namespace":"users-scheduler-16507"}, "namespace": "users-scheduler-16507", "name": "some-name", "reconcileID": "0fdedb49-883f-4795-b513-16a83b1964b9", "primary name": "some-name-pxc-0.some-name-pxc.users-scheduler-16507.svc.cluster.local"} 2025-12-29T16:13:39.867Z INFO Unable to find primary pod for replication. No pod with name or ip like this {"controller": "pxc-controller", "controllerGroup": "pxc.percona.com", "controllerKind": "PerconaXtraDBCluster", "PerconaXtraDBCluster": {"name":"some-name","namespace":"users-scheduler-16507"}, "namespace": "users-scheduler-16507", "name": "some-name", "reconcileID": "f4228aad-17f2-42e1-834d-d6aba9db82a1", "primary name": "some-name-pxc-0.some-name-pxc.users-scheduler-16507.svc.cluster.local"} 2025-12-29T16:13:44.959Z INFO Unable to find primary pod for replication. No pod with name or ip like this {"controller": "pxc-controller", "controllerGroup": "pxc.percona.com", "controllerKind": "PerconaXtraDBCluster", "PerconaXtraDBCluster": {"name":"some-name","namespace":"users-scheduler-16507"}, "namespace": "users-scheduler-16507", "name": "some-name", "reconcileID": "9e724ca8-54bb-4d7a-830d-cbb22c1391ad", "primary name": "some-name-pxc-0.some-name-pxc.users-scheduler-16507.svc.cluster.local"} 2025-12-29T16:13:50.054Z INFO Unable to find primary pod for replication. No pod with name or ip like this {"controller": "pxc-controller", "controllerGroup": "pxc.percona.com", "controllerKind": "PerconaXtraDBCluster", "PerconaXtraDBCluster": {"name":"some-name","namespace":"users-scheduler-16507"}, "namespace": "users-scheduler-16507", "name": "some-name", "reconcileID": "8b683fb7-2cad-49ce-b71e-75da8d3c94ee", "primary name": "some-name-pxc-0.some-name-pxc.users-scheduler-16507.svc.cluster.local"} 2025-12-29T16:13:55.141Z INFO Unable to find primary pod for replication. No pod with name or ip like this {"controller": "pxc-controller", "controllerGroup": "pxc.percona.com", "controllerKind": "PerconaXtraDBCluster", "PerconaXtraDBCluster": {"name":"some-name","namespace":"users-scheduler-16507"}, "namespace": "users-scheduler-16507", "name": "some-name", "reconcileID": "c673e20d-cb2d-4106-a11f-477dbf884927", "primary name": "some-name-pxc-0.some-name-pxc.users-scheduler-16507.svc.cluster.local"} 2025-12-29T16:14:00.232Z INFO Unable to find primary pod for replication. No pod with name or ip like this {"controller": "pxc-controller", "controllerGroup": "pxc.percona.com", "controllerKind": "PerconaXtraDBCluster", "PerconaXtraDBCluster": {"name":"some-name","namespace":"users-scheduler-16507"}, "namespace": "users-scheduler-16507", "name": "some-name", "reconcileID": "f27a0037-a60e-46ce-84f6-9b3dfc04d1ec", "primary name": "some-name-pxc-0.some-name-pxc.users-scheduler-16507.svc.cluster.local"} 2025-12-29T16:14:06.067Z INFO Password updated but old one not discarded {"controller": "pxc-controller", "controllerGroup": "pxc.percona.com", "controllerKind": "PerconaXtraDBCluster", "PerconaXtraDBCluster": {"name":"some-name","namespace":"users-scheduler-16507"}, "namespace": "users-scheduler-16507", "name": "some-name", "reconcileID": "e4fd5a06-7b85-44f9-ba67-7bfa7584a0ff", "user": "monitor"} 2025-12-29T16:14:06.885Z INFO Old password discarded {"controller": "pxc-controller", "controllerGroup": "pxc.percona.com", "controllerKind": "PerconaXtraDBCluster", "PerconaXtraDBCluster": {"name":"some-name","namespace":"users-scheduler-16507"}, "namespace": "users-scheduler-16507", "name": "some-name", "reconcileID": "e4fd5a06-7b85-44f9-ba67-7bfa7584a0ff", "user": "monitor"} 2025-12-29T16:14:06.891Z INFO Proxy pods will be restarted {"controller": "pxc-controller", "controllerGroup": "pxc.percona.com", "controllerKind": "PerconaXtraDBCluster", "PerconaXtraDBCluster": {"name":"some-name","namespace":"users-scheduler-16507"}, "namespace": "users-scheduler-16507", "name": "some-name", "reconcileID": "e4fd5a06-7b85-44f9-ba67-7bfa7584a0ff", "last-applied-secret": "abeee594bb570c53bf6e910ae5dd099f95e919e243a1b024e85a23225c301596"} 2025-12-29T16:14:09.208Z INFO Password changed, updating user {"controller": "pxc-controller", "controllerGroup": "pxc.percona.com", "controllerKind": "PerconaXtraDBCluster", "PerconaXtraDBCluster": {"name":"some-name","namespace":"users-scheduler-16507"}, "namespace": "users-scheduler-16507", "name": "some-name", "reconcileID": "c5509e1e-01aa-4ced-bf1e-9705ca137358", "user": "operator"} 2025-12-29T16:14:09.218Z INFO Password updated {"controller": "pxc-controller", "controllerGroup": "pxc.percona.com", "controllerKind": "PerconaXtraDBCluster", "PerconaXtraDBCluster": {"name":"some-name","namespace":"users-scheduler-16507"}, "namespace": "users-scheduler-16507", "name": "some-name", "reconcileID": "c5509e1e-01aa-4ced-bf1e-9705ca137358", "user": "operator"} 2025-12-29T16:14:09.240Z INFO MySQL init secret updated {"controller": "pxc-controller", "controllerGroup": "pxc.percona.com", "controllerKind": "PerconaXtraDBCluster", "PerconaXtraDBCluster": {"name":"some-name","namespace":"users-scheduler-16507"}, "namespace": "users-scheduler-16507", "name": "some-name", "reconcileID": "c5509e1e-01aa-4ced-bf1e-9705ca137358", "secret": "some-name-mysql-init", "user": "operator"} 2025-12-29T16:14:09.264Z INFO Internal secrets updated {"controller": "pxc-controller", "controllerGroup": "pxc.percona.com", "controllerKind": "PerconaXtraDBCluster", "PerconaXtraDBCluster": {"name":"some-name","namespace":"users-scheduler-16507"}, "namespace": "users-scheduler-16507", "name": "some-name", "reconcileID": "c5509e1e-01aa-4ced-bf1e-9705ca137358", "user": "operator"} 2025-12-29T16:14:09.281Z INFO Old password discarded {"controller": "pxc-controller", "controllerGroup": "pxc.percona.com", "controllerKind": "PerconaXtraDBCluster", "PerconaXtraDBCluster": {"name":"some-name","namespace":"users-scheduler-16507"}, "namespace": "users-scheduler-16507", "name": "some-name", "reconcileID": "c5509e1e-01aa-4ced-bf1e-9705ca137358", "user": "operator"} 2025-12-29T16:14:09.294Z INFO Proxy pods will be restarted {"controller": "pxc-controller", "controllerGroup": "pxc.percona.com", "controllerKind": "PerconaXtraDBCluster", "PerconaXtraDBCluster": {"name":"some-name","namespace":"users-scheduler-16507"}, "namespace": "users-scheduler-16507", "name": "some-name", "reconcileID": "c5509e1e-01aa-4ced-bf1e-9705ca137358", "last-applied-secret": "b95e0a9edbebb3cb46be4e5302c626f4fc277097539ee2c3958bf185aaf8f935"} 2025-12-29T16:14:09.298Z DEBUG Updating object {"controller": "pxc-controller", "controllerGroup": "pxc.percona.com", "controllerKind": "PerconaXtraDBCluster", "PerconaXtraDBCluster": {"name":"some-name","namespace":"users-scheduler-16507"}, "namespace": "users-scheduler-16507", "name": "some-name", "reconcileID": "c5509e1e-01aa-4ced-bf1e-9705ca137358", "object": "some-name-proxysql", "kind": "&TypeMeta{Kind:StatefulSet,APIVersion:apps/v1,}", "hashChanged": true, "metaChanged": true} 2025-12-29T16:14:10.509Z ERROR sync users {"controller": "pxc-controller", "controllerGroup": "pxc.percona.com", "controllerKind": "PerconaXtraDBCluster", "PerconaXtraDBCluster": {"name":"some-name","namespace":"users-scheduler-16507"}, "namespace": "users-scheduler-16507", "name": "some-name", "reconcileID": "e4fd5a06-7b85-44f9-ba67-7bfa7584a0ff", "error": "exec syncusers: failed to execute command in pod: unable to upgrade connection: container not found (\"proxysql\") / / ", "errorVerbose": "exec syncusers: failed to execute command in pod: unable to upgrade connection: container not found (\"proxysql\") / / \ngithub.com/percona/percona-xtradb-cluster-operator/pkg/controller/pxc.(*ReconcilePerconaXtraDBCluster).syncPXCUsersWithProxySQL\n\t/go/src/github.com/percona/percona-xtradb-cluster-operator/pkg/controller/pxc/users.go:975\ngithub.com/percona/percona-xtradb-cluster-operator/pkg/controller/pxc.(*ReconcilePerconaXtraDBCluster).resyncPXCUsersWithProxySQL.func1\n\t/go/src/github.com/percona/percona-xtradb-cluster-operator/pkg/controller/pxc/controller.go:855\nruntime.goexit\n\t/usr/local/go/src/runtime/asm_amd64.s:1693"} 2025-12-29T16:14:15.886Z ERROR sync users {"controller": "pxc-controller", "controllerGroup": "pxc.percona.com", "controllerKind": "PerconaXtraDBCluster", "PerconaXtraDBCluster": {"name":"some-name","namespace":"users-scheduler-16507"}, "namespace": "users-scheduler-16507", "name": "some-name", "reconcileID": "8421edf7-5887-43ac-ab7f-46d20edbd17c", "error": "exec syncusers: failed to execute command in pod: command terminated with exit code 1 / / ERROR 1045 (28000): Access denied for user 'operator'@'some-name-proxysql-0.some-name-proxysql-unready.users-scheduler-' (using password: YES)\nERROR 1045 (28000): Access denied for user 'operator'@'some-name-proxysql-0.some-name-proxysql-unready.users-scheduler-' (using password: YES)\nERROR 1045 (28000): Access denied for user 'operator'@'some-name-proxysql-0.some-name-proxysql-unready.users-scheduler-' (using password: YES)\nERROR (line:572) : Could not find a primary cluster node\nERROR 1045 (28000): Access denied for user 'operator'@'some-name-proxysql-0.some-name-proxysql-unready.users-scheduler-' (using password: YES)\nERROR 1045 (28000): Access denied for user 'operator'@'some-name-proxysql-0.some-name-proxysql-unready.users-scheduler-' (using password: YES)\nERROR 1045 (28000): Access denied for user 'operator'@'some-name-proxysql-0.some-name-proxysql-unready.users-scheduler-' (using password: YES)\nERROR (line:572) : Could not find a primary cluster node\nERROR 1045 (28000): Access denied for user 'operator'@'some-name-proxysql-0.some-name-proxysql-unready.users-scheduler-' (using password: YES)\nERROR 1045 (28000): Access denied for user 'operator'@'some-name-proxysql-0.some-name-proxysql-unready.users-scheduler-' (using password: YES)\nERROR 1045 (28000): Access denied for user 'operator'@'some-name-proxysql-0.some-name-proxysql-unready.users-scheduler-' (using password: YES)\nERROR (line:572) : Could not find a primary cluster node\nERROR 1045 (28000): Access denied for user 'operator'@'some-name-proxysql-0.some-name-proxysql-unready.users-scheduler-' (using password: YES)\nERROR 1045 (28000): Access denied for user 'operator'@'some-name-proxysql-0.some-name-proxysql-unready.users-scheduler-' (using password: YES)\nERROR 1045 (28000): Access denied for user 'operator'@'some-name-proxysql-0.some-name-proxysql-unready.users-scheduler-' (using password: YES)\nERROR (line:572) : Could not find a primary cluster node\nERROR 1045 (28000): Access denied for user 'operator'@'some-name-proxysql-0.some-name-proxysql-unready.users-scheduler-' (using password: YES)\nERROR 1045 (28000): Access denied for user 'operator'@'some-name-proxysql-0.some-name-proxysql-unready.users-scheduler-' (using password: YES)\nERROR 1045 (28000): Access denied for user 'operator'@'some-name-proxysql-0.some-name-proxysql-unready.users-scheduler-' (using password: YES)\nERROR (line:572) : Could not find a primary cluster node\n", "errorVerbose": "exec syncusers: failed to execute command in pod: command terminated with exit code 1 / / ERROR 1045 (28000): Access denied for user 'operator'@'some-name-proxysql-0.some-name-proxysql-unready.users-scheduler-' (using password: YES)\nERROR 1045 (28000): Access denied for user 'operator'@'some-name-proxysql-0.some-name-proxysql-unready.users-scheduler-' (using password: YES)\nERROR 1045 (28000): Access denied for user 'operator'@'some-name-proxysql-0.some-name-proxysql-unready.users-scheduler-' (using password: YES)\nERROR (line:572) : Could not find a primary cluster node\nERROR 1045 (28000): Access denied for user 'operator'@'some-name-proxysql-0.some-name-proxysql-unready.users-scheduler-' (using password: YES)\nERROR 1045 (28000): Access denied for user 'operator'@'some-name-proxysql-0.some-name-proxysql-unready.users-scheduler-' (using password: YES)\nERROR 1045 (28000): Access denied for user 'operator'@'some-name-proxysql-0.some-name-proxysql-unready.users-scheduler-' (using password: YES)\nERROR (line:572) : Could not find a primary cluster node\nERROR 1045 (28000): Access denied for user 'operator'@'some-name-proxysql-0.some-name-proxysql-unready.users-scheduler-' (using password: YES)\nERROR 1045 (28000): Access denied for user 'operator'@'some-name-proxysql-0.some-name-proxysql-unready.users-scheduler-' (using password: YES)\nERROR 1045 (28000): Access denied for user 'operator'@'some-name-proxysql-0.some-name-proxysql-unready.users-scheduler-' (using password: YES)\nERROR (line:572) : Could not find a primary cluster node\nERROR 1045 (28000): Access denied for user 'operator'@'some-name-proxysql-0.some-name-proxysql-unready.users-scheduler-' (using password: YES)\nERROR 1045 (28000): Access denied for user 'operator'@'some-name-proxysql-0.some-name-proxysql-unready.users-scheduler-' (using password: YES)\nERROR 1045 (28000): Access denied for user 'operator'@'some-name-proxysql-0.some-name-proxysql-unready.users-scheduler-' (using password: YES)\nERROR (line:572) : Could not find a primary cluster node\nERROR 1045 (28000): Access denied for user 'operator'@'some-name-proxysql-0.some-name-proxysql-unready.users-scheduler-' (using password: YES)\nERROR 1045 (28000): Access denied for user 'operator'@'some-name-proxysql-0.some-name-proxysql-unready.users-scheduler-' (using password: YES)\nERROR 1045 (28000): Access denied for user 'operator'@'some-name-proxysql-0.some-name-proxysql-unready.users-scheduler-' (using password: YES)\nERROR (line:572) : Could not find a primary cluster node\n\ngithub.com/percona/percona-xtradb-cluster-operator/pkg/controller/pxc.(*ReconcilePerconaXtraDBCluster).syncPXCUsersWithProxySQL\n\t/go/src/github.com/percona/percona-xtradb-cluster-operator/pkg/controller/pxc/users.go:975\ngithub.com/percona/percona-xtradb-cluster-operator/pkg/controller/pxc.(*ReconcilePerconaXtraDBCluster).resyncPXCUsersWithProxySQL.func1\n\t/go/src/github.com/percona/percona-xtradb-cluster-operator/pkg/controller/pxc/controller.go:855\nruntime.goexit\n\t/usr/local/go/src/runtime/asm_amd64.s:1693"} 2025-12-29T16:14:50.657Z DEBUG PXC users synced with ProxySQL {"controller": "pxc-controller", "controllerGroup": "pxc.percona.com", "controllerKind": "PerconaXtraDBCluster", "PerconaXtraDBCluster": {"name":"some-name","namespace":"users-scheduler-16507"}, "namespace": "users-scheduler-16507", "name": "some-name", "reconcileID": "6a0555ee-74fe-4614-bf42-da73397977fd"} 2025-12-29T16:14:55.886Z DEBUG PXC users synced with ProxySQL {"controller": "pxc-controller", "controllerGroup": "pxc.percona.com", "controllerKind": "PerconaXtraDBCluster", "PerconaXtraDBCluster": {"name":"some-name","namespace":"users-scheduler-16507"}, "namespace": "users-scheduler-16507", "name": "some-name", "reconcileID": "7539c719-9eb4-435f-bb1a-0e2603cc60e4"} 2025-12-29T16:15:00.970Z DEBUG PXC users synced with ProxySQL {"controller": "pxc-controller", "controllerGroup": "pxc.percona.com", "controllerKind": "PerconaXtraDBCluster", "PerconaXtraDBCluster": {"name":"some-name","namespace":"users-scheduler-16507"}, "namespace": "users-scheduler-16507", "name": "some-name", "reconcileID": "c00ec840-670f-4262-b63c-09ec55301b85"} 2025-12-29T16:15:06.790Z DEBUG PXC users synced with ProxySQL {"controller": "pxc-controller", "controllerGroup": "pxc.percona.com", "controllerKind": "PerconaXtraDBCluster", "PerconaXtraDBCluster": {"name":"some-name","namespace":"users-scheduler-16507"}, "namespace": "users-scheduler-16507", "name": "some-name", "reconcileID": "aa430908-68ca-49d3-b558-190e77023569"} 2025-12-29T16:15:11.644Z DEBUG PXC users synced with ProxySQL {"controller": "pxc-controller", "controllerGroup": "pxc.percona.com", "controllerKind": "PerconaXtraDBCluster", "PerconaXtraDBCluster": {"name":"some-name","namespace":"users-scheduler-16507"}, "namespace": "users-scheduler-16507", "name": "some-name", "reconcileID": "193ff0c6-4e49-4741-9906-718992212abd"} 2025-12-29T16:15:17.041Z DEBUG PXC users synced with ProxySQL {"controller": "pxc-controller", "controllerGroup": "pxc.percona.com", "controllerKind": "PerconaXtraDBCluster", "PerconaXtraDBCluster": {"name":"some-name","namespace":"users-scheduler-16507"}, "namespace": "users-scheduler-16507", "name": "some-name", "reconcileID": "531e5642-8150-4896-8351-4df12510e926"} 2025-12-29T16:15:21.773Z DEBUG PXC users synced with ProxySQL {"controller": "pxc-controller", "controllerGroup": "pxc.percona.com", "controllerKind": "PerconaXtraDBCluster", "PerconaXtraDBCluster": {"name":"some-name","namespace":"users-scheduler-16507"}, "namespace": "users-scheduler-16507", "name": "some-name", "reconcileID": "cf4ddd50-7a3e-4f55-a474-2abdf6d541a6"} 2025-12-29T16:15:27.274Z DEBUG PXC users synced with ProxySQL {"controller": "pxc-controller", "controllerGroup": "pxc.percona.com", "controllerKind": "PerconaXtraDBCluster", "PerconaXtraDBCluster": {"name":"some-name","namespace":"users-scheduler-16507"}, "namespace": "users-scheduler-16507", "name": "some-name", "reconcileID": "510890b8-6727-410d-b5bf-10fcd5648924"} 2025-12-29T16:15:32.377Z DEBUG PXC users synced with ProxySQL {"controller": "pxc-controller", "controllerGroup": "pxc.percona.com", "controllerKind": "PerconaXtraDBCluster", "PerconaXtraDBCluster": {"name":"some-name","namespace":"users-scheduler-16507"}, "namespace": "users-scheduler-16507", "name": "some-name", "reconcileID": "3e2a3b42-91f8-4d7b-bd06-8d5c8981dcdb"} 2025-12-29T16:15:37.681Z DEBUG PXC users synced with ProxySQL {"controller": "pxc-controller", "controllerGroup": "pxc.percona.com", "controllerKind": "PerconaXtraDBCluster", "PerconaXtraDBCluster": {"name":"some-name","namespace":"users-scheduler-16507"}, "namespace": "users-scheduler-16507", "name": "some-name", "reconcileID": "3489e076-2977-40f8-9ef0-8ead899cda57"} 2025-12-29T16:15:42.938Z DEBUG PXC users synced with ProxySQL {"controller": "pxc-controller", "controllerGroup": "pxc.percona.com", "controllerKind": "PerconaXtraDBCluster", "PerconaXtraDBCluster": {"name":"some-name","namespace":"users-scheduler-16507"}, "namespace": "users-scheduler-16507", "name": "some-name", "reconcileID": "98611eec-a5d3-4e5b-8697-6f3e26fe4281"} 2025-12-29T16:15:48.447Z DEBUG PXC users synced with ProxySQL {"controller": "pxc-controller", "controllerGroup": "pxc.percona.com", "controllerKind": "PerconaXtraDBCluster", "PerconaXtraDBCluster": {"name":"some-name","namespace":"users-scheduler-16507"}, "namespace": "users-scheduler-16507", "name": "some-name", "reconcileID": "ecd0007f-229e-4430-aa3b-9a23d89e85cd"} 2025-12-29T16:15:53.563Z DEBUG PXC users synced with ProxySQL {"controller": "pxc-controller", "controllerGroup": "pxc.percona.com", "controllerKind": "PerconaXtraDBCluster", "PerconaXtraDBCluster": {"name":"some-name","namespace":"users-scheduler-16507"}, "namespace": "users-scheduler-16507", "name": "some-name", "reconcileID": "e9f2a776-f5ab-41f8-a129-414ae39cc917"} 2025-12-29T16:15:59.062Z DEBUG PXC users synced with ProxySQL {"controller": "pxc-controller", "controllerGroup": "pxc.percona.com", "controllerKind": "PerconaXtraDBCluster", "PerconaXtraDBCluster": {"name":"some-name","namespace":"users-scheduler-16507"}, "namespace": "users-scheduler-16507", "name": "some-name", "reconcileID": "0595acdb-8516-4824-8db4-87ce1be4f018"} 2025-12-29T16:16:04.036Z DEBUG PXC users synced with ProxySQL {"controller": "pxc-controller", "controllerGroup": "pxc.percona.com", "controllerKind": "PerconaXtraDBCluster", "PerconaXtraDBCluster": {"name":"some-name","namespace":"users-scheduler-16507"}, "namespace": "users-scheduler-16507", "name": "some-name", "reconcileID": "0107f088-f6eb-4905-ba8f-dbdcaedaa791"} 2025-12-29T16:16:09.460Z DEBUG PXC users synced with ProxySQL {"controller": "pxc-controller", "controllerGroup": "pxc.percona.com", "controllerKind": "PerconaXtraDBCluster", "PerconaXtraDBCluster": {"name":"some-name","namespace":"users-scheduler-16507"}, "namespace": "users-scheduler-16507", "name": "some-name", "reconcileID": "d468dc1c-79f3-4d04-b23c-9ca42fb74a27"} 2025-12-29T16:16:14.641Z DEBUG PXC users synced with ProxySQL {"controller": "pxc-controller", "controllerGroup": "pxc.percona.com", "controllerKind": "PerconaXtraDBCluster", "PerconaXtraDBCluster": {"name":"some-name","namespace":"users-scheduler-16507"}, "namespace": "users-scheduler-16507", "name": "some-name", "reconcileID": "c6a3dbca-54bb-41ca-a789-ba2fb6a8ad9c"} 2025-12-29T16:16:19.835Z DEBUG PXC users synced with ProxySQL {"controller": "pxc-controller", "controllerGroup": "pxc.percona.com", "controllerKind": "PerconaXtraDBCluster", "PerconaXtraDBCluster": {"name":"some-name","namespace":"users-scheduler-16507"}, "namespace": "users-scheduler-16507", "name": "some-name", "reconcileID": "eebf65a0-6ba5-4290-8926-7f4c2279e1b1"} 2025-12-29T16:16:24.983Z DEBUG PXC users synced with ProxySQL {"controller": "pxc-controller", "controllerGroup": "pxc.percona.com", "controllerKind": "PerconaXtraDBCluster", "PerconaXtraDBCluster": {"name":"some-name","namespace":"users-scheduler-16507"}, "namespace": "users-scheduler-16507", "name": "some-name", "reconcileID": "02e068f3-33ef-4e78-a938-e1e8aeecc411"} 2025-12-29T16:16:26.595Z INFO Password changed, updating user {"controller": "pxc-controller", "controllerGroup": "pxc.percona.com", "controllerKind": "PerconaXtraDBCluster", "PerconaXtraDBCluster": {"name":"some-name","namespace":"users-scheduler-16507"}, "namespace": "users-scheduler-16507", "name": "some-name", "reconcileID": "f23609e1-f44a-4e51-b851-752f2eadc971", "user": "root"} 2025-12-29T16:16:26.613Z INFO Password updated {"controller": "pxc-controller", "controllerGroup": "pxc.percona.com", "controllerKind": "PerconaXtraDBCluster", "PerconaXtraDBCluster": {"name":"some-name","namespace":"users-scheduler-16507"}, "namespace": "users-scheduler-16507", "name": "some-name", "reconcileID": "f23609e1-f44a-4e51-b851-752f2eadc971", "user": "root"} 2025-12-29T16:16:26.629Z INFO MySQL init secret updated {"controller": "pxc-controller", "controllerGroup": "pxc.percona.com", "controllerKind": "PerconaXtraDBCluster", "PerconaXtraDBCluster": {"name":"some-name","namespace":"users-scheduler-16507"}, "namespace": "users-scheduler-16507", "name": "some-name", "reconcileID": "f23609e1-f44a-4e51-b851-752f2eadc971", "secret": "some-name-mysql-init", "user": "root"} 2025-12-29T16:16:29.580Z DEBUG PXC users synced with ProxySQL {"controller": "pxc-controller", "controllerGroup": "pxc.percona.com", "controllerKind": "PerconaXtraDBCluster", "PerconaXtraDBCluster": {"name":"some-name","namespace":"users-scheduler-16507"}, "namespace": "users-scheduler-16507", "name": "some-name", "reconcileID": "f23609e1-f44a-4e51-b851-752f2eadc971"} 2025-12-29T16:16:29.606Z INFO Internal secrets updated {"controller": "pxc-controller", "controllerGroup": "pxc.percona.com", "controllerKind": "PerconaXtraDBCluster", "PerconaXtraDBCluster": {"name":"some-name","namespace":"users-scheduler-16507"}, "namespace": "users-scheduler-16507", "name": "some-name", "reconcileID": "f23609e1-f44a-4e51-b851-752f2eadc971", "user": "root"} 2025-12-29T16:16:29.619Z INFO Old password discarded {"controller": "pxc-controller", "controllerGroup": "pxc.percona.com", "controllerKind": "PerconaXtraDBCluster", "PerconaXtraDBCluster": {"name":"some-name","namespace":"users-scheduler-16507"}, "namespace": "users-scheduler-16507", "name": "some-name", "reconcileID": "f23609e1-f44a-4e51-b851-752f2eadc971", "user": "root"} 2025-12-29T16:16:29.625Z INFO Password changed, updating user {"controller": "pxc-controller", "controllerGroup": "pxc.percona.com", "controllerKind": "PerconaXtraDBCluster", "PerconaXtraDBCluster": {"name":"some-name","namespace":"users-scheduler-16507"}, "namespace": "users-scheduler-16507", "name": "some-name", "reconcileID": "f23609e1-f44a-4e51-b851-752f2eadc971", "user": "monitor"} 2025-12-29T16:16:29.636Z INFO Password updated {"controller": "pxc-controller", "controllerGroup": "pxc.percona.com", "controllerKind": "PerconaXtraDBCluster", "PerconaXtraDBCluster": {"name":"some-name","namespace":"users-scheduler-16507"}, "namespace": "users-scheduler-16507", "name": "some-name", "reconcileID": "f23609e1-f44a-4e51-b851-752f2eadc971", "user": "monitor"} 2025-12-29T16:16:29.655Z INFO MySQL init secret updated {"controller": "pxc-controller", "controllerGroup": "pxc.percona.com", "controllerKind": "PerconaXtraDBCluster", "PerconaXtraDBCluster": {"name":"some-name","namespace":"users-scheduler-16507"}, "namespace": "users-scheduler-16507", "name": "some-name", "reconcileID": "f23609e1-f44a-4e51-b851-752f2eadc971", "secret": "some-name-mysql-init", "user": "monitor"} 2025-12-29T16:16:29.673Z INFO Proxy user updated {"controller": "pxc-controller", "controllerGroup": "pxc.percona.com", "controllerKind": "PerconaXtraDBCluster", "PerconaXtraDBCluster": {"name":"some-name","namespace":"users-scheduler-16507"}, "namespace": "users-scheduler-16507", "name": "some-name", "reconcileID": "f23609e1-f44a-4e51-b851-752f2eadc971", "user": "monitor"} 2025-12-29T16:16:29.688Z INFO Internal secrets updated {"controller": "pxc-controller", "controllerGroup": "pxc.percona.com", "controllerKind": "PerconaXtraDBCluster", "PerconaXtraDBCluster": {"name":"some-name","namespace":"users-scheduler-16507"}, "namespace": "users-scheduler-16507", "name": "some-name", "reconcileID": "f23609e1-f44a-4e51-b851-752f2eadc971", "user": "monitor"} 2025-12-29T16:16:29.955Z INFO Password changed, updating user {"controller": "pxc-controller", "controllerGroup": "pxc.percona.com", "controllerKind": "PerconaXtraDBCluster", "PerconaXtraDBCluster": {"name":"some-name","namespace":"users-scheduler-16507"}, "namespace": "users-scheduler-16507", "name": "some-name", "reconcileID": "f23609e1-f44a-4e51-b851-752f2eadc971", "user": "xtrabackup"} 2025-12-29T16:16:29.965Z INFO Password updated {"controller": "pxc-controller", "controllerGroup": "pxc.percona.com", "controllerKind": "PerconaXtraDBCluster", "PerconaXtraDBCluster": {"name":"some-name","namespace":"users-scheduler-16507"}, "namespace": "users-scheduler-16507", "name": "some-name", "reconcileID": "f23609e1-f44a-4e51-b851-752f2eadc971", "user": "xtrabackup"} 2025-12-29T16:16:29.984Z INFO MySQL init secret updated {"controller": "pxc-controller", "controllerGroup": "pxc.percona.com", "controllerKind": "PerconaXtraDBCluster", "PerconaXtraDBCluster": {"name":"some-name","namespace":"users-scheduler-16507"}, "namespace": "users-scheduler-16507", "name": "some-name", "reconcileID": "f23609e1-f44a-4e51-b851-752f2eadc971", "secret": "some-name-mysql-init", "user": "xtrabackup"} 2025-12-29T16:16:30.015Z INFO Internal secrets updated {"controller": "pxc-controller", "controllerGroup": "pxc.percona.com", "controllerKind": "PerconaXtraDBCluster", "PerconaXtraDBCluster": {"name":"some-name","namespace":"users-scheduler-16507"}, "namespace": "users-scheduler-16507", "name": "some-name", "reconcileID": "f23609e1-f44a-4e51-b851-752f2eadc971", "user": "xtrabackup"} 2025-12-29T16:16:30.024Z INFO Old password discarded {"controller": "pxc-controller", "controllerGroup": "pxc.percona.com", "controllerKind": "PerconaXtraDBCluster", "PerconaXtraDBCluster": {"name":"some-name","namespace":"users-scheduler-16507"}, "namespace": "users-scheduler-16507", "name": "some-name", "reconcileID": "f23609e1-f44a-4e51-b851-752f2eadc971", "user": "xtrabackup"} 2025-12-29T16:16:30.027Z INFO Password changed, updating user {"controller": "pxc-controller", "controllerGroup": "pxc.percona.com", "controllerKind": "PerconaXtraDBCluster", "PerconaXtraDBCluster": {"name":"some-name","namespace":"users-scheduler-16507"}, "namespace": "users-scheduler-16507", "name": "some-name", "reconcileID": "f23609e1-f44a-4e51-b851-752f2eadc971", "user": "proxyadmin"} 2025-12-29T16:16:30.045Z INFO Proxy user updated {"controller": "pxc-controller", "controllerGroup": "pxc.percona.com", "controllerKind": "PerconaXtraDBCluster", "PerconaXtraDBCluster": {"name":"some-name","namespace":"users-scheduler-16507"}, "namespace": "users-scheduler-16507", "name": "some-name", "reconcileID": "f23609e1-f44a-4e51-b851-752f2eadc971", "user": "proxyadmin"} 2025-12-29T16:16:30.059Z INFO Internal secrets updated {"controller": "pxc-controller", "controllerGroup": "pxc.percona.com", "controllerKind": "PerconaXtraDBCluster", "PerconaXtraDBCluster": {"name":"some-name","namespace":"users-scheduler-16507"}, "namespace": "users-scheduler-16507", "name": "some-name", "reconcileID": "f23609e1-f44a-4e51-b851-752f2eadc971", "user": "proxyadmin"} 2025-12-29T16:16:30.059Z INFO Proxy pods will be restarted {"controller": "pxc-controller", "controllerGroup": "pxc.percona.com", "controllerKind": "PerconaXtraDBCluster", "PerconaXtraDBCluster": {"name":"some-name","namespace":"users-scheduler-16507"}, "namespace": "users-scheduler-16507", "name": "some-name", "reconcileID": "f23609e1-f44a-4e51-b851-752f2eadc971", "last-applied-secret": "ab6905b97784fcfd3030a92bc9ee83fc4d73c12e29f1eaaf63d27fdb407ed255"} 2025-12-29T16:16:30.059Z INFO PXC pods will be restarted {"controller": "pxc-controller", "controllerGroup": "pxc.percona.com", "controllerKind": "PerconaXtraDBCluster", "PerconaXtraDBCluster": {"name":"some-name","namespace":"users-scheduler-16507"}, "namespace": "users-scheduler-16507", "name": "some-name", "reconcileID": "f23609e1-f44a-4e51-b851-752f2eadc971", "last-applied-secret": "ab6905b97784fcfd3030a92bc9ee83fc4d73c12e29f1eaaf63d27fdb407ed255"} 2025-12-29T16:16:30.062Z DEBUG Updating object {"controller": "pxc-controller", "controllerGroup": "pxc.percona.com", "controllerKind": "PerconaXtraDBCluster", "PerconaXtraDBCluster": {"name":"some-name","namespace":"users-scheduler-16507"}, "namespace": "users-scheduler-16507", "name": "some-name", "reconcileID": "f23609e1-f44a-4e51-b851-752f2eadc971", "object": "some-name-pxc", "kind": "&TypeMeta{Kind:StatefulSet,APIVersion:apps/v1,}", "hashChanged": true, "metaChanged": true} 2025-12-29T16:16:30.109Z DEBUG Updating object {"controller": "pxc-controller", "controllerGroup": "pxc.percona.com", "controllerKind": "PerconaXtraDBCluster", "PerconaXtraDBCluster": {"name":"some-name","namespace":"users-scheduler-16507"}, "namespace": "users-scheduler-16507", "name": "some-name", "reconcileID": "f23609e1-f44a-4e51-b851-752f2eadc971", "object": "some-name-proxysql", "kind": "&TypeMeta{Kind:StatefulSet,APIVersion:apps/v1,}", "hashChanged": true, "metaChanged": true} 2025-12-29T16:16:33.654Z ERROR sync users {"controller": "pxc-controller", "controllerGroup": "pxc.percona.com", "controllerKind": "PerconaXtraDBCluster", "PerconaXtraDBCluster": {"name":"some-name","namespace":"users-scheduler-16507"}, "namespace": "users-scheduler-16507", "name": "some-name", "reconcileID": "f23609e1-f44a-4e51-b851-752f2eadc971", "error": "exec syncusers: failed to execute command in pod: command terminated with exit code 1 / / ERROR 1045 (28000): ProxySQL Error: Access denied for user 'proxyadmin'@'10.177.64.64' (using password: YES)\nERROR (line:586) : ProxySQL connection check failed. \n-- Could not connect to ProxySQL at some-name-proxysql-0.some-name-proxysql-unready.users-scheduler-16507.svc.cluster.local:6032 \n-- Please check the ProxySQL connection parameters and status.\nERROR 1045 (28000): ProxySQL Error: Access denied for user 'proxyadmin'@'10.177.64.64' (using password: YES)\nERROR (line:586) : ProxySQL connection check failed. \n-- Could not connect to ProxySQL at some-name-proxysql-0.some-name-proxysql-unready.users-scheduler-16507.svc.cluster.local:6032 \n-- Please check the ProxySQL connection parameters and status.\nERROR 1045 (28000): ProxySQL Error: Access denied for user 'proxyadmin'@'10.177.64.64' (using password: YES)\nERROR (line:586) : ProxySQL connection check failed. \n-- Could not connect to ProxySQL at some-name-proxysql-0.some-name-proxysql-unready.users-scheduler-16507.svc.cluster.local:6032 \n-- Please check the ProxySQL connection parameters and status.\nERROR 1045 (28000): ProxySQL Error: Access denied for user 'proxyadmin'@'10.177.64.64' (using password: YES)\nERROR (line:586) : ProxySQL connection check failed. \n-- Could not connect to ProxySQL at some-name-proxysql-0.some-name-proxysql-unready.users-scheduler-16507.svc.cluster.local:6032 \n-- Please check the ProxySQL connection parameters and status.\nERROR 1045 (28000): ProxySQL Error: Access denied for user 'proxyadmin'@'10.177.64.64' (using password: YES)\nERROR (line:586) : ProxySQL connection check failed. \n-- Could not connect to ProxySQL at some-name-proxysql-0.some-name-proxysql-unready.users-scheduler-16507.svc.cluster.local:6032 \n-- Please check the ProxySQL connection parameters and status.\n", "errorVerbose": "exec syncusers: failed to execute command in pod: command terminated with exit code 1 / / ERROR 1045 (28000): ProxySQL Error: Access denied for user 'proxyadmin'@'10.177.64.64' (using password: YES)\nERROR (line:586) : ProxySQL connection check failed. \n-- Could not connect to ProxySQL at some-name-proxysql-0.some-name-proxysql-unready.users-scheduler-16507.svc.cluster.local:6032 \n-- Please check the ProxySQL connection parameters and status.\nERROR 1045 (28000): ProxySQL Error: Access denied for user 'proxyadmin'@'10.177.64.64' (using password: YES)\nERROR (line:586) : ProxySQL connection check failed. \n-- Could not connect to ProxySQL at some-name-proxysql-0.some-name-proxysql-unready.users-scheduler-16507.svc.cluster.local:6032 \n-- Please check the ProxySQL connection parameters and status.\nERROR 1045 (28000): ProxySQL Error: Access denied for user 'proxyadmin'@'10.177.64.64' (using password: YES)\nERROR (line:586) : ProxySQL connection check failed. \n-- Could not connect to ProxySQL at some-name-proxysql-0.some-name-proxysql-unready.users-scheduler-16507.svc.cluster.local:6032 \n-- Please check the ProxySQL connection parameters and status.\nERROR 1045 (28000): ProxySQL Error: Access denied for user 'proxyadmin'@'10.177.64.64' (using password: YES)\nERROR (line:586) : ProxySQL connection check failed. \n-- Could not connect to ProxySQL at some-name-proxysql-0.some-name-proxysql-unready.users-scheduler-16507.svc.cluster.local:6032 \n-- Please check the ProxySQL connection parameters and status.\nERROR 1045 (28000): ProxySQL Error: Access denied for user 'proxyadmin'@'10.177.64.64' (using password: YES)\nERROR (line:586) : ProxySQL connection check failed. \n-- Could not connect to ProxySQL at some-name-proxysql-0.some-name-proxysql-unready.users-scheduler-16507.svc.cluster.local:6032 \n-- Please check the ProxySQL connection parameters and status.\n\ngithub.com/percona/percona-xtradb-cluster-operator/pkg/controller/pxc.(*ReconcilePerconaXtraDBCluster).syncPXCUsersWithProxySQL\n\t/go/src/github.com/percona/percona-xtradb-cluster-operator/pkg/controller/pxc/users.go:975\ngithub.com/percona/percona-xtradb-cluster-operator/pkg/controller/pxc.(*ReconcilePerconaXtraDBCluster).resyncPXCUsersWithProxySQL.func1\n\t/go/src/github.com/percona/percona-xtradb-cluster-operator/pkg/controller/pxc/controller.go:855\nruntime.goexit\n\t/usr/local/go/src/runtime/asm_amd64.s:1693"} 2025-12-29T16:17:30.026Z INFO reconcile replication error {"controller": "pxc-controller", "controllerGroup": "pxc.percona.com", "controllerKind": "PerconaXtraDBCluster", "PerconaXtraDBCluster": {"name":"some-name","namespace":"users-scheduler-16507"}, "namespace": "users-scheduler-16507", "name": "some-name", "reconcileID": "17b514c3-4893-493a-be20-010350426622", "err": "failed to ensure cluster readonly status: connect to pod some-name-pxc-1: dial tcp: lookup some-name-pxc-1.some-name-pxc.users-scheduler-16507 on 34.118.224.10:53: no such host"} 2025-12-29T16:17:31.098Z DEBUG Creating object {"controller": "pxc-controller", "controllerGroup": "pxc.percona.com", "controllerKind": "PerconaXtraDBCluster", "PerconaXtraDBCluster": {"name":"some-name","namespace":"users-scheduler-16507"}, "namespace": "users-scheduler-16507", "name": "some-name", "reconcileID": "4f356cd5-9365-43e2-ac5b-3baa0ec546de", "object": "some-name-haproxy", "kind": "&TypeMeta{Kind:StatefulSet,APIVersion:apps/v1,}"} 2025-12-29T16:17:31.122Z DEBUG Creating object {"controller": "pxc-controller", "controllerGroup": "pxc.percona.com", "controllerKind": "PerconaXtraDBCluster", "PerconaXtraDBCluster": {"name":"some-name","namespace":"users-scheduler-16507"}, "namespace": "users-scheduler-16507", "name": "some-name", "reconcileID": "4f356cd5-9365-43e2-ac5b-3baa0ec546de", "object": "some-name-haproxy", "kind": "&TypeMeta{Kind:StatefulSet,APIVersion:apps/v1,}"} 2025-12-29T16:17:31.171Z DEBUG Creating object {"controller": "pxc-controller", "controllerGroup": "pxc.percona.com", "controllerKind": "PerconaXtraDBCluster", "PerconaXtraDBCluster": {"name":"some-name","namespace":"users-scheduler-16507"}, "namespace": "users-scheduler-16507", "name": "some-name", "reconcileID": "4f356cd5-9365-43e2-ac5b-3baa0ec546de", "object": "some-name-haproxy", "kind": "&TypeMeta{Kind:Service,APIVersion:v1,}"} 2025-12-29T16:17:31.289Z DEBUG Creating object {"controller": "pxc-controller", "controllerGroup": "pxc.percona.com", "controllerKind": "PerconaXtraDBCluster", "PerconaXtraDBCluster": {"name":"some-name","namespace":"users-scheduler-16507"}, "namespace": "users-scheduler-16507", "name": "some-name", "reconcileID": "4f356cd5-9365-43e2-ac5b-3baa0ec546de", "object": "some-name-haproxy-replicas", "kind": "&TypeMeta{Kind:Service,APIVersion:v1,}"} 2025-12-29T16:17:34.619Z INFO reconcile replication error {"controller": "pxc-controller", "controllerGroup": "pxc.percona.com", "controllerKind": "PerconaXtraDBCluster", "PerconaXtraDBCluster": {"name":"some-name","namespace":"users-scheduler-16507"}, "namespace": "users-scheduler-16507", "name": "some-name", "reconcileID": "4f356cd5-9365-43e2-ac5b-3baa0ec546de", "err": "get primary pxc pod: failed to get proxy connection: dial tcp 34.118.235.68:3306: connect: connection refused"} 2025-12-29T16:17:34.758Z DEBUG Creating object {"controller": "pxc-controller", "controllerGroup": "pxc.percona.com", "controllerKind": "PerconaXtraDBCluster", "PerconaXtraDBCluster": {"name":"some-name","namespace":"users-scheduler-16507"}, "namespace": "users-scheduler-16507", "name": "some-name", "reconcileID": "504ef412-0b7b-4446-98be-3057f4d028fd", "object": "some-name-haproxy", "kind": "&TypeMeta{Kind:PodDisruptionBudget,APIVersion:policy/v1,}"} 2025-12-29T16:17:37.784Z INFO reconcile replication error {"controller": "pxc-controller", "controllerGroup": "pxc.percona.com", "controllerKind": "PerconaXtraDBCluster", "PerconaXtraDBCluster": {"name":"some-name","namespace":"users-scheduler-16507"}, "namespace": "users-scheduler-16507", "name": "some-name", "reconcileID": "504ef412-0b7b-4446-98be-3057f4d028fd", "err": "get primary pxc pod: failed to get proxy connection: dial tcp 34.118.235.68:3306: connect: connection refused"} 2025-12-29T16:17:40.958Z INFO reconcile replication error {"controller": "pxc-controller", "controllerGroup": "pxc.percona.com", "controllerKind": "PerconaXtraDBCluster", "PerconaXtraDBCluster": {"name":"some-name","namespace":"users-scheduler-16507"}, "namespace": "users-scheduler-16507", "name": "some-name", "reconcileID": "ba2d1af1-85a1-4493-95a1-9ca2343ee623", "err": "get primary pxc pod: failed to get proxy connection: dial tcp 34.118.235.68:3306: connect: connection refused"} 2025-12-29T16:17:45.927Z INFO reconcile replication error {"controller": "pxc-controller", "controllerGroup": "pxc.percona.com", "controllerKind": "PerconaXtraDBCluster", "PerconaXtraDBCluster": {"name":"some-name","namespace":"users-scheduler-16507"}, "namespace": "users-scheduler-16507", "name": "some-name", "reconcileID": "8649819d-954a-40ce-802b-9ce5740647a9", "err": "get primary pxc pod: failed to get proxy connection: dial tcp 34.118.235.68:3306: connect: connection refused"} 2025-12-29T16:17:54.202Z INFO reconcile replication error {"controller": "pxc-controller", "controllerGroup": "pxc.percona.com", "controllerKind": "PerconaXtraDBCluster", "PerconaXtraDBCluster": {"name":"some-name","namespace":"users-scheduler-16507"}, "namespace": "users-scheduler-16507", "name": "some-name", "reconcileID": "75e98f74-d8c2-489b-9fd6-745521df91dc", "err": "get primary pxc pod: failed to get proxy connection: dial tcp 34.118.235.68:3306: connect: connection refused"} 2025-12-29T16:18:14.783Z INFO reconcile replication error {"controller": "pxc-controller", "controllerGroup": "pxc.percona.com", "controllerKind": "PerconaXtraDBCluster", "PerconaXtraDBCluster": {"name":"some-name","namespace":"users-scheduler-16507"}, "namespace": "users-scheduler-16507", "name": "some-name", "reconcileID": "076fb85f-ddf6-430a-b7fe-67a00d892e97", "err": "failed to ensure cluster readonly status: connect to pod some-name-pxc-0: dial tcp 10.177.66.87:33062: connect: connection refused"} 2025-12-29T16:18:51.256Z INFO Password changed, updating user {"controller": "pxc-controller", "controllerGroup": "pxc.percona.com", "controllerKind": "PerconaXtraDBCluster", "PerconaXtraDBCluster": {"name":"some-name","namespace":"users-scheduler-16507"}, "namespace": "users-scheduler-16507", "name": "some-name", "reconcileID": "4ef473ae-25f5-46dd-8212-ea24f7e9dad5", "user": "root"} 2025-12-29T16:18:51.275Z INFO Password updated {"controller": "pxc-controller", "controllerGroup": "pxc.percona.com", "controllerKind": "PerconaXtraDBCluster", "PerconaXtraDBCluster": {"name":"some-name","namespace":"users-scheduler-16507"}, "namespace": "users-scheduler-16507", "name": "some-name", "reconcileID": "4ef473ae-25f5-46dd-8212-ea24f7e9dad5", "user": "root"} 2025-12-29T16:18:51.297Z INFO MySQL init secret updated {"controller": "pxc-controller", "controllerGroup": "pxc.percona.com", "controllerKind": "PerconaXtraDBCluster", "PerconaXtraDBCluster": {"name":"some-name","namespace":"users-scheduler-16507"}, "namespace": "users-scheduler-16507", "name": "some-name", "reconcileID": "4ef473ae-25f5-46dd-8212-ea24f7e9dad5", "secret": "some-name-mysql-init", "user": "root"} 2025-12-29T16:18:51.319Z INFO Internal secrets updated {"controller": "pxc-controller", "controllerGroup": "pxc.percona.com", "controllerKind": "PerconaXtraDBCluster", "PerconaXtraDBCluster": {"name":"some-name","namespace":"users-scheduler-16507"}, "namespace": "users-scheduler-16507", "name": "some-name", "reconcileID": "4ef473ae-25f5-46dd-8212-ea24f7e9dad5", "user": "root"} 2025-12-29T16:18:51.331Z INFO Old password discarded {"controller": "pxc-controller", "controllerGroup": "pxc.percona.com", "controllerKind": "PerconaXtraDBCluster", "PerconaXtraDBCluster": {"name":"some-name","namespace":"users-scheduler-16507"}, "namespace": "users-scheduler-16507", "name": "some-name", "reconcileID": "4ef473ae-25f5-46dd-8212-ea24f7e9dad5", "user": "root"} 2025-12-29T16:18:51.334Z INFO Password changed, updating user {"controller": "pxc-controller", "controllerGroup": "pxc.percona.com", "controllerKind": "PerconaXtraDBCluster", "PerconaXtraDBCluster": {"name":"some-name","namespace":"users-scheduler-16507"}, "namespace": "users-scheduler-16507", "name": "some-name", "reconcileID": "4ef473ae-25f5-46dd-8212-ea24f7e9dad5", "user": "operator"} 2025-12-29T16:18:51.344Z INFO Password updated {"controller": "pxc-controller", "controllerGroup": "pxc.percona.com", "controllerKind": "PerconaXtraDBCluster", "PerconaXtraDBCluster": {"name":"some-name","namespace":"users-scheduler-16507"}, "namespace": "users-scheduler-16507", "name": "some-name", "reconcileID": "4ef473ae-25f5-46dd-8212-ea24f7e9dad5", "user": "operator"} 2025-12-29T16:18:51.366Z INFO MySQL init secret updated {"controller": "pxc-controller", "controllerGroup": "pxc.percona.com", "controllerKind": "PerconaXtraDBCluster", "PerconaXtraDBCluster": {"name":"some-name","namespace":"users-scheduler-16507"}, "namespace": "users-scheduler-16507", "name": "some-name", "reconcileID": "4ef473ae-25f5-46dd-8212-ea24f7e9dad5", "secret": "some-name-mysql-init", "user": "operator"} 2025-12-29T16:18:51.387Z INFO Internal secrets updated {"controller": "pxc-controller", "controllerGroup": "pxc.percona.com", "controllerKind": "PerconaXtraDBCluster", "PerconaXtraDBCluster": {"name":"some-name","namespace":"users-scheduler-16507"}, "namespace": "users-scheduler-16507", "name": "some-name", "reconcileID": "4ef473ae-25f5-46dd-8212-ea24f7e9dad5", "user": "operator"} 2025-12-29T16:18:51.397Z INFO Old password discarded {"controller": "pxc-controller", "controllerGroup": "pxc.percona.com", "controllerKind": "PerconaXtraDBCluster", "PerconaXtraDBCluster": {"name":"some-name","namespace":"users-scheduler-16507"}, "namespace": "users-scheduler-16507", "name": "some-name", "reconcileID": "4ef473ae-25f5-46dd-8212-ea24f7e9dad5", "user": "operator"} 2025-12-29T16:18:51.402Z INFO Password changed, updating user {"controller": "pxc-controller", "controllerGroup": "pxc.percona.com", "controllerKind": "PerconaXtraDBCluster", "PerconaXtraDBCluster": {"name":"some-name","namespace":"users-scheduler-16507"}, "namespace": "users-scheduler-16507", "name": "some-name", "reconcileID": "4ef473ae-25f5-46dd-8212-ea24f7e9dad5", "user": "monitor"} 2025-12-29T16:18:51.413Z INFO Password updated {"controller": "pxc-controller", "controllerGroup": "pxc.percona.com", "controllerKind": "PerconaXtraDBCluster", "PerconaXtraDBCluster": {"name":"some-name","namespace":"users-scheduler-16507"}, "namespace": "users-scheduler-16507", "name": "some-name", "reconcileID": "4ef473ae-25f5-46dd-8212-ea24f7e9dad5", "user": "monitor"} 2025-12-29T16:18:51.434Z INFO MySQL init secret updated {"controller": "pxc-controller", "controllerGroup": "pxc.percona.com", "controllerKind": "PerconaXtraDBCluster", "PerconaXtraDBCluster": {"name":"some-name","namespace":"users-scheduler-16507"}, "namespace": "users-scheduler-16507", "name": "some-name", "reconcileID": "4ef473ae-25f5-46dd-8212-ea24f7e9dad5", "secret": "some-name-mysql-init", "user": "monitor"} 2025-12-29T16:18:51.448Z INFO Internal secrets updated {"controller": "pxc-controller", "controllerGroup": "pxc.percona.com", "controllerKind": "PerconaXtraDBCluster", "PerconaXtraDBCluster": {"name":"some-name","namespace":"users-scheduler-16507"}, "namespace": "users-scheduler-16507", "name": "some-name", "reconcileID": "4ef473ae-25f5-46dd-8212-ea24f7e9dad5", "user": "monitor"} 2025-12-29T16:18:51.712Z INFO Password changed, updating user {"controller": "pxc-controller", "controllerGroup": "pxc.percona.com", "controllerKind": "PerconaXtraDBCluster", "PerconaXtraDBCluster": {"name":"some-name","namespace":"users-scheduler-16507"}, "namespace": "users-scheduler-16507", "name": "some-name", "reconcileID": "4ef473ae-25f5-46dd-8212-ea24f7e9dad5", "user": "xtrabackup"} 2025-12-29T16:18:51.722Z INFO Password updated {"controller": "pxc-controller", "controllerGroup": "pxc.percona.com", "controllerKind": "PerconaXtraDBCluster", "PerconaXtraDBCluster": {"name":"some-name","namespace":"users-scheduler-16507"}, "namespace": "users-scheduler-16507", "name": "some-name", "reconcileID": "4ef473ae-25f5-46dd-8212-ea24f7e9dad5", "user": "xtrabackup"} 2025-12-29T16:18:51.742Z INFO MySQL init secret updated {"controller": "pxc-controller", "controllerGroup": "pxc.percona.com", "controllerKind": "PerconaXtraDBCluster", "PerconaXtraDBCluster": {"name":"some-name","namespace":"users-scheduler-16507"}, "namespace": "users-scheduler-16507", "name": "some-name", "reconcileID": "4ef473ae-25f5-46dd-8212-ea24f7e9dad5", "secret": "some-name-mysql-init", "user": "xtrabackup"} 2025-12-29T16:18:51.765Z INFO Internal secrets updated {"controller": "pxc-controller", "controllerGroup": "pxc.percona.com", "controllerKind": "PerconaXtraDBCluster", "PerconaXtraDBCluster": {"name":"some-name","namespace":"users-scheduler-16507"}, "namespace": "users-scheduler-16507", "name": "some-name", "reconcileID": "4ef473ae-25f5-46dd-8212-ea24f7e9dad5", "user": "xtrabackup"} 2025-12-29T16:18:51.773Z INFO Old password discarded {"controller": "pxc-controller", "controllerGroup": "pxc.percona.com", "controllerKind": "PerconaXtraDBCluster", "PerconaXtraDBCluster": {"name":"some-name","namespace":"users-scheduler-16507"}, "namespace": "users-scheduler-16507", "name": "some-name", "reconcileID": "4ef473ae-25f5-46dd-8212-ea24f7e9dad5", "user": "xtrabackup"} 2025-12-29T16:18:51.777Z INFO Password changed, updating user {"controller": "pxc-controller", "controllerGroup": "pxc.percona.com", "controllerKind": "PerconaXtraDBCluster", "PerconaXtraDBCluster": {"name":"some-name","namespace":"users-scheduler-16507"}, "namespace": "users-scheduler-16507", "name": "some-name", "reconcileID": "4ef473ae-25f5-46dd-8212-ea24f7e9dad5", "user": "replication"} 2025-12-29T16:18:51.786Z INFO Password updated {"controller": "pxc-controller", "controllerGroup": "pxc.percona.com", "controllerKind": "PerconaXtraDBCluster", "PerconaXtraDBCluster": {"name":"some-name","namespace":"users-scheduler-16507"}, "namespace": "users-scheduler-16507", "name": "some-name", "reconcileID": "4ef473ae-25f5-46dd-8212-ea24f7e9dad5", "user": "replication"} 2025-12-29T16:18:51.804Z INFO MySQL init secret updated {"controller": "pxc-controller", "controllerGroup": "pxc.percona.com", "controllerKind": "PerconaXtraDBCluster", "PerconaXtraDBCluster": {"name":"some-name","namespace":"users-scheduler-16507"}, "namespace": "users-scheduler-16507", "name": "some-name", "reconcileID": "4ef473ae-25f5-46dd-8212-ea24f7e9dad5", "secret": "some-name-mysql-init", "user": "replication"} 2025-12-29T16:18:51.821Z INFO Internal secrets updated {"controller": "pxc-controller", "controllerGroup": "pxc.percona.com", "controllerKind": "PerconaXtraDBCluster", "PerconaXtraDBCluster": {"name":"some-name","namespace":"users-scheduler-16507"}, "namespace": "users-scheduler-16507", "name": "some-name", "reconcileID": "4ef473ae-25f5-46dd-8212-ea24f7e9dad5", "user": "replication"} 2025-12-29T16:18:51.829Z INFO Old password discarded {"controller": "pxc-controller", "controllerGroup": "pxc.percona.com", "controllerKind": "PerconaXtraDBCluster", "PerconaXtraDBCluster": {"name":"some-name","namespace":"users-scheduler-16507"}, "namespace": "users-scheduler-16507", "name": "some-name", "reconcileID": "4ef473ae-25f5-46dd-8212-ea24f7e9dad5", "user": "replication"} 2025-12-29T16:18:51.829Z INFO PXC pods will be restarted {"controller": "pxc-controller", "controllerGroup": "pxc.percona.com", "controllerKind": "PerconaXtraDBCluster", "PerconaXtraDBCluster": {"name":"some-name","namespace":"users-scheduler-16507"}, "namespace": "users-scheduler-16507", "name": "some-name", "reconcileID": "4ef473ae-25f5-46dd-8212-ea24f7e9dad5", "last-applied-secret": "5c0a8e06a24dc535b3e4bfd26dfdf1b25dcec2b9c6e76efeb692a26e0c66ddf3"} 2025-12-29T16:18:51.831Z DEBUG Updating object {"controller": "pxc-controller", "controllerGroup": "pxc.percona.com", "controllerKind": "PerconaXtraDBCluster", "PerconaXtraDBCluster": {"name":"some-name","namespace":"users-scheduler-16507"}, "namespace": "users-scheduler-16507", "name": "some-name", "reconcileID": "4ef473ae-25f5-46dd-8212-ea24f7e9dad5", "object": "some-name-pxc", "kind": "&TypeMeta{Kind:StatefulSet,APIVersion:apps/v1,}", "hashChanged": true, "metaChanged": true} 2025-12-29T16:18:55.007Z INFO Password changed, updating user {"controller": "pxc-controller", "controllerGroup": "pxc.percona.com", "controllerKind": "PerconaXtraDBCluster", "PerconaXtraDBCluster": {"name":"some-name","namespace":"users-scheduler-16507"}, "namespace": "users-scheduler-16507", "name": "some-name", "reconcileID": "9a87e079-1c84-434d-92ca-f0d7b7188294", "user": "monitor"} 2025-12-29T16:18:55.017Z INFO Password updated {"controller": "pxc-controller", "controllerGroup": "pxc.percona.com", "controllerKind": "PerconaXtraDBCluster", "PerconaXtraDBCluster": {"name":"some-name","namespace":"users-scheduler-16507"}, "namespace": "users-scheduler-16507", "name": "some-name", "reconcileID": "9a87e079-1c84-434d-92ca-f0d7b7188294", "user": "monitor"} 2025-12-29T16:18:55.052Z INFO MySQL init secret updated {"controller": "pxc-controller", "controllerGroup": "pxc.percona.com", "controllerKind": "PerconaXtraDBCluster", "PerconaXtraDBCluster": {"name":"some-name","namespace":"users-scheduler-16507"}, "namespace": "users-scheduler-16507", "name": "some-name", "reconcileID": "9a87e079-1c84-434d-92ca-f0d7b7188294", "secret": "some-name-mysql-init", "user": "monitor"} 2025-12-29T16:18:55.076Z INFO Internal secrets updated {"controller": "pxc-controller", "controllerGroup": "pxc.percona.com", "controllerKind": "PerconaXtraDBCluster", "PerconaXtraDBCluster": {"name":"some-name","namespace":"users-scheduler-16507"}, "namespace": "users-scheduler-16507", "name": "some-name", "reconcileID": "9a87e079-1c84-434d-92ca-f0d7b7188294", "user": "monitor"} 2025-12-29T16:21:21.135Z INFO Password updated but old one not discarded {"controller": "pxc-controller", "controllerGroup": "pxc.percona.com", "controllerKind": "PerconaXtraDBCluster", "PerconaXtraDBCluster": {"name":"some-name","namespace":"users-scheduler-16507"}, "namespace": "users-scheduler-16507", "name": "some-name", "reconcileID": "26457a8a-4e7b-4d4d-b4e3-30fc53af399c", "user": "monitor"} 2025-12-29T16:21:21.957Z INFO Old password discarded {"controller": "pxc-controller", "controllerGroup": "pxc.percona.com", "controllerKind": "PerconaXtraDBCluster", "PerconaXtraDBCluster": {"name":"some-name","namespace":"users-scheduler-16507"}, "namespace": "users-scheduler-16507", "name": "some-name", "reconcileID": "26457a8a-4e7b-4d4d-b4e3-30fc53af399c", "user": "monitor"} ... // 22 identical fields ... // 2 identical fields - "3f72af9d1d1c746384c555b3e45ddd6e24430f3773cbcfb9fa65c68e66bba7e4", ... // 3 identical fields ... // 3 identical fields ... // 3 identical fields ... // 4 identical fields "5", - "5c0a8e06a24dc535b3e4bfd26dfdf1b25dcec2b9c6e76efeb692a26e0c66ddf3", + "5c0a8e06a24dc535b3e4bfd26dfdf1b25dcec2b9c6e76efeb692a26e0c66ddf3", - "5e0a9edbebb3cb46be4e5302c626f4fc277097539ee2c3958bf185aaf8f93", ... // 5 identical fields + "6", + "6905b97784fcfd3030a92bc9ee83fc4d73c12e29f1eaaf63d27fdb407ed255", ... // 6 identical fields ... // 7 identical fields ... // 8 identical fields "9", ... // 9 identical fields ... // 9 identical fields + "a", "ab", - "ab6905b97784fcfd3030a92bc9ee83fc4d73c12e29f1eaaf63d27fdb407ed255", - "abeee594bb570c53bf6e910ae5dd099f95e919e243a1b024e85a23225c301596", + "abeee594bb570c53bf6e910ae5dd099f95e919e243a1b024e85a23225c301596", AccessModes: nil, ActiveDeadlineSeconds: nil, Affinity: nil, Annotations: map[string]string{ - Annotations: map[string]string{ + Annotations: map[string]string{ - APIVersion: "apps/v1", - APIVersion: "apps/v1", Args: {"mysqld"}, Args: {"proxysql", "-f", "-c", "/etc/proxysql/proxysql.cnf", ...}, AutomountServiceAccountToken: nil, + AvailableReplicas: 0, - AvailableReplicas: 2, - AvailableReplicas: 3, AWSElasticBlockStore: nil, AzureFile: nil, "b", + "b95e0a9edbebb3cb46be4e5302c626f4fc277097539ee2c3958bf185aaf8f935", Capacity: nil, - CollisionCount: &0, + CollisionCount: nil, Conditions: nil, ConfigMap: &v1.ConfigMapVolumeSource{ ContainerPort: 3306, ContainerPort: 33060, ContainerPort: 33062, ContainerPort: 4444, ContainerPort: 4567, ContainerPort: 4568, ContainerPort: 6032, ContainerPort: 6070, Containers: []v1.Container{ + CreationTimestamp: v1.Time{}, - CreationTimestamp: v1.Time{Time: s"2025-12-29 15:58:36 +0000 UTC"}, + CurrentReplicas: 0, - CurrentReplicas: 2, - CurrentReplicas: 3, + CurrentRevision: "", - CurrentRevision: "some-name-proxysql-5cc5cfc8d9", - CurrentRevision: "some-name-proxysql-6b584878d6", - CurrentRevision: "some-name-proxysql-7c554644", - CurrentRevision: "some-name-proxysql-848958879f", - CurrentRevision: "some-name-proxysql-9986688b9", - CurrentRevision: "some-name-proxysql-f5bd96649", - CurrentRevision: "some-name-pxc-65c75c44d6", - CurrentRevision: "some-name-pxc-6ccf64f994", - CurrentRevision: "some-name-pxc-b94497b7", - CurrentRevision: "some-name-pxc-d6f997bd6", DataSource: nil, DataSourceRef: nil, - DefaultMode: &420, - DefaultMode: &420, + DefaultMode: nil, + DefaultMode: nil, DeletionGracePeriodSeconds: nil, DeletionGracePeriodSeconds: nil, DeletionTimestamp: nil, + DeprecatedServiceAccount: "", - DeprecatedServiceAccount: "default", + DNSPolicy: "", - DNSPolicy: "ClusterFirst", - "e30569abb03afe7b343dba72199d7c580b3e8d6cc191f0955de1ecd8b3bfe531", - "eee594bb570c53bf6e910ae5dd099f95e919e243a1b024e85a23225c301596", EnvFrom: {{SecretRef: &{LocalObjectReference: {Name: "some-name-env-vars-proxysql"}, Optional: &true}}}, EnvFrom: {{SecretRef: &{LocalObjectReference: {Name: "some-name-env-vars-pxc"}, Optional: &true}}}, Env: {{Name: "PXC_SERVICE", Value: "some-name-pxc"}, {Name: "OPERATOR_PASSWORD", ValueFrom: &{SecretKeyRef: &{LocalObjectReference: {Name: "internal-some-name"}, Key: "operator"}}}, {Name: "PROXY_ADMIN_USER", Value: "proxyadmin"}, {Name: "PROXY_ADMIN_PASSWORD", ValueFrom: &{SecretKeyRef: &{LocalObjectReference: {Name: "internal-some-name"}, Key: "proxyadmin"}}}, ...}, Env: {{Name: "PXC_SERVICE", Value: "some-name-pxc-unready"}, {Name: "MONITOR_HOST", Value: "%"}, {Name: "MYSQL_ROOT_PASSWORD", ValueFrom: &{SecretKeyRef: &{LocalObjectReference: {Name: "internal-some-name"}, Key: "root"}}}, {Name: "XTRABACKUP_PASSWORD", ValueFrom: &{SecretKeyRef: &{LocalObjectReference: {Name: "internal-some-name"}, Key: "xtrabackup"}}}, ...}, EphemeralContainers: nil, - "f5bb0d6739cd4d11a3d9cb47c95ad84e11ef10b9e0ddccb47f92c0ba999a536a", + "f5bb0d6739cd4d11a3d9cb47c95ad84e11ef10b9e0ddccb47f92c0ba999a536a", FailureThreshold: 3, FC: nil, - FieldsType: "FieldsV1", - FieldsType: "FieldsV1", - FieldsV1: s`{"f:metadata":{"f:annotations":{".":{},"f:percona.com/last-confi`..., - FieldsV1: s`{"f:status":{"f:availableReplicas":{},"f:collisionCount":{},"f:c`..., Finalizers: nil, Finalizers: nil, + Generation: 0, - Generation: 1, - Generation: 2, - Generation: 3, - Generation: 4, - Generation: 5, - Generation: 6, - Generation: 7, - Generation: 8, github.com/percona/percona-xtradb-cluster-operator/pkg/controller/pxc.(*ReconcilePerconaXtraDBCluster).resyncPXCUsersWithProxySQL.func1 GitRepo: nil, /go/src/github.com/percona/percona-xtradb-cluster-operator/pkg/controller/pxc/controller.go:857 HostAliases: nil, HostIP: "", HostPort: 0, ImagePullPolicy: "Always", InitContainers: []v1.Container{ InitialDelaySeconds: 300, ISCSI: nil, Items: nil, Items: nil, "kubectl.kubernetes.io/default-container": "proxysql", "kubectl.kubernetes.io/default-container": "pxc", Labels: {"app.kubernetes.io/component": "proxysql", "app.kubernetes.io/instance": "some-name", "app.kubernetes.io/managed-by": "percona-xtradb-cluster-operator", "app.kubernetes.io/name": "percona-xtradb-cluster", ...}, Labels: {"app.kubernetes.io/component": "pxc", "app.kubernetes.io/instance": "some-name", "app.kubernetes.io/managed-by": "percona-xtradb-cluster-operator", "app.kubernetes.io/name": "percona-xtradb-cluster", ...}, Labels: nil, + "last-applied-secret": "3f72af9d1d1c746384c555b3e45ddd6e24430f3773cbcfb9fa65c68e66bba7e4", + "last-applied-secret": "e30569abb03afe7b343dba72199d7c580b3e8d6cc191f0955de1ecd8b3bfe531", "last-applied-secret": strings.Join({ Lifecycle: nil, LivenessProbe: &v1.Probe{ LocalObjectReference: {Name: "auto-some-name-pxc"}, LocalObjectReference: {Name: "some-name-pxc"}, ManagedFields: nil, + ManagedFields: nil, - ManagedFields: []v1.ManagedFieldsEntry{ - Manager: "kube-controller-manager", - Manager: "percona-xtradb-cluster-operator", MinReadySeconds: 0, [mysql] 2025/12/29 16:07:17 packets.go:58 read tcp 10.177.66.79:52130->10.177.64.57:33062: read: connection reset by peer [mysql] 2025/12/29 16:12:42 packets.go:58 read tcp 10.177.66.79:58304->10.177.64.59:33062: read: connection reset by peer Name: "auto-config", {Name: "bin", VolumeSource: {EmptyDir: &{}}}, Name: "config", Name: "ist", Name: "mysql", Name: "mysql-admin", Name: "mysql-init-file", Name: "mysql-users-secret-file", Name: "mysqlx", Name: "proxyadm", Namespace: "users-scheduler-16507", Name: "ssl", Name: "ssl-internal", Name: "sst", Name: "stats", {Name: "tmp", VolumeSource: {EmptyDir: &{}}}, Name: "vault-keyring-secret", Name: "write-set", NFS: nil, NodeName: "", NodeSelector: nil, ObjectMeta: {Labels: {"app.kubernetes.io/component": "proxysql", "app.kubernetes.io/instance": "some-name", "app.kubernetes.io/managed-by": "percona-xtradb-cluster-operator", "app.kubernetes.io/name": "percona-xtradb-cluster", ...}, Annotations: {"kubectl.kubernetes.io/default-container": "proxysql", "last-applied-secret": "e30569abb03afe7b343dba72199d7c580b3e8d6cc191f0955de1ecd8b3bfe531", "percona.com/configuration-hash": "d41d8cd98f00b204e9800998ecf8427e", "percona.com/ssl-hash": "cfd6a52398268173b51d7cdb1331c09a", ...}}, ObjectMeta: {Labels: {"app.kubernetes.io/component": "proxysql", "app.kubernetes.io/instance": "some-name", "app.kubernetes.io/managed-by": "percona-xtradb-cluster-operator", "app.kubernetes.io/name": "percona-xtradb-cluster", ...}, Annotations: {"kubectl.kubernetes.io/default-container": "proxysql", "percona.com/configuration-hash": "d41d8cd98f00b204e9800998ecf8427e", "percona.com/ssl-hash": "cfd6a52398268173b51d7cdb1331c09a", "percona.com/ssl-internal-hash": "751e59a1c1d94ac67f9d6bb6e756acdc"}}, ObjectMeta: {Name: "datadir", Labels: {"app.kubernetes.io/component": "pxc", "app.kubernetes.io/instance": "some-name", "app.kubernetes.io/managed-by": "percona-xtradb-cluster-operator", "app.kubernetes.io/name": "percona-xtradb-cluster", ...}}, ObjectMeta: {Name: "proxydata", Labels: {"app.kubernetes.io/component": "proxysql", "app.kubernetes.io/instance": "some-name", "app.kubernetes.io/managed-by": "percona-xtradb-cluster-operator", "app.kubernetes.io/name": "percona-xtradb-cluster", ...}}, ObjectMeta: v1.ObjectMeta{ ObjectMeta: v1.ObjectMeta{ + ObservedGeneration: 0, - ObservedGeneration: 1, - ObservedGeneration: 2, - ObservedGeneration: 3, - ObservedGeneration: 4, - ObservedGeneration: 5, - ObservedGeneration: 6, - ObservedGeneration: 7, - ObservedGeneration: 8, - Operation: "Update", - Operation: "Update", Optional: &false, Optional: &true, Optional: &true, Ordinals: nil, OS: nil, Overhead: nil, OwnerReferences: {{APIVersion: "pxc.percona.com/v1", Kind: "PerconaXtraDBCluster", Name: "some-name", UID: "6227f932-f476-4077-a02b-099ff687f2e3", ...}}, OwnerReferences: nil, "percona.com/configuration-hash": "d41d8cd98f00b204e9800998ecf8427e", "percona.com/configuration-hash": "d41d8cd98f00b204e9800998ecf8427e", - "percona.com/last-config-hash": "eyJyZXBsaWNhcyI6Miwic2VsZWN0b3IiOnsibWF0Y2hMYWJlbHMiOnsiYXBwLmt1YmVybmV0ZXMuaW8vY29tcG9uZW50IjoicHJveHlzcWwiLCJhcHAua3ViZXJuZXRlcy5pby9pbnN0YW5jZSI6InNvbWUtbmFtZSIsImFwcC5rdWJlcm5ldGVzLmlvL21hbmFnZWQtYnkiOiJwZXJjb25hLXh0cmFkYi1jbHVzdGVyLW9wZXJhdG9yIiwiYXBw"..., + "percona.com/last-config-hash": "eyJyZXBsaWNhcyI6Miwic2VsZWN0b3IiOnsibWF0Y2hMYWJlbHMiOnsiYXBwLmt1YmVybmV0ZXMuaW8vY29tcG9uZW50IjoicHJveHlzcWwiLCJhcHAua3ViZXJuZXRlcy5pby9pbnN0YW5jZSI6InNvbWUtbmFtZSIsImFwcC5rdWJlcm5ldGVzLmlvL21hbmFnZWQtYnkiOiJwZXJjb25hLXh0cmFkYi1jbHVzdGVyLW9wZXJhdG9yIiwiYXBw"..., - "percona.com/last-config-hash": "eyJyZXBsaWNhcyI6Miwic2VsZWN0b3IiOnsibWF0Y2hMYWJlbHMiOnsiYXBwLmt1YmVybmV0ZXMuaW8vY29tcG9uZW50IjoicHJveHlzcWwiLCJhcHAua3ViZXJuZXRlcy5pby9pbnN0YW5jZSI6InNvbWUtbmFtZSIsImFwcC5rdWJlcm5ldGVzLmlvL21hbmFnZWQtYnkiOiJwZXJjb25hLXh0cmFkYi1jbHVzdGVyLW9wZXJhdG9yIiwiYXBwLmt1YmVybmV0ZXMuaW8vbmFtZSI6InBlcmNvbmEteHRyYWRiLWNsdXN0ZXIiLCJhcHAua3ViZXJuZXRlcy5pby9wYXJ0LW9mIjoicGVyY29uYS14dHJhZGItY2x1c3RlciJ9fSwidGVtcGxhdGUiOnsibWV0YWRhdGEiOnsibGFiZWxzIjp7ImFwcC5rdWJlcm5ldGVzLmlvL2NvbXBvbmVudCI6InByb3h5c3FsIiwiYXBwLmt1YmVybmV0ZXMuaW8vaW5zdGFuY2UiOiJzb21lLW5hbWUiLCJhcHAua3ViZXJuZXRlcy5pby9tYW5hZ2VkLWJ5IjoicGVyY29uYS14dHJhZGItY2x1c3Rlci1vcGVyYXRvciIsImFwcC5rdWJlcm5ldGVzLmlvL25hbWUiOiJwZXJjb25hLXh0cmFkYi1jbHVzdGVyIiwiYXBwLmt1YmVybmV0ZXMuaW8vcGFydC1vZiI6InBlcmNvbmEteHRyYWRiLWNsdXN0ZXIifSwiYW5ub3RhdGlvbnMiOnsia3ViZWN0bC5rdWJlcm5ldGVzLmlvL2RlZmF1bHQtY29udGFpbmVyIjoicHJveHlzcWwiLCJsYXN0LWFwcGxpZWQtc2VjcmV0IjoiNWMwYThlMDZhMjRkYzUzNWIzZTRiZmQyNmRmZGYxYjI1ZGNlYzJiOWM2ZTc2ZWZlYjY5MmEyNmUwYzY2ZGRmMyIsInBlcmNvbmEuY29tL2NvbmZpZ3VyYXRpb24taGFzaCI6"..., + "percona.com/last-config-hash": "eyJyZXBsaWNhcyI6Miwic2VsZWN0b3IiOnsibWF0Y2hMYWJlbHMiOnsiYXBwLmt1YmVybmV0ZXMuaW8vY29tcG9uZW50IjoicHJveHlzcWwiLCJhcHAua3ViZXJuZXRlcy5pby9pbnN0YW5jZSI6InNvbWUtbmFtZSIsImFwcC5rdWJlcm5ldGVzLmlvL21hbmFnZWQtYnkiOiJwZXJjb25hLXh0cmFkYi1jbHVzdGVyLW9wZXJhdG9yIiwiYXBwLmt1YmVybmV0ZXMuaW8vbmFtZSI6InBlcmNvbmEteHRyYWRiLWNsdXN0ZXIiLCJhcHAua3ViZXJuZXRlcy5pby9wYXJ0LW9mIjoicGVyY29uYS14dHJhZGItY2x1c3RlciJ9fSwidGVtcGxhdGUiOnsibWV0YWRhdGEiOnsibGFiZWxzIjp7ImFwcC5rdWJlcm5ldGVzLmlvL2NvbXBvbmVudCI6InByb3h5c3FsIiwiYXBwLmt1YmVybmV0ZXMuaW8vaW5zdGFuY2UiOiJzb21lLW5hbWUiLCJhcHAua3ViZXJuZXRlcy5pby9tYW5hZ2VkLWJ5IjoicGVyY29uYS14dHJhZGItY2x1c3Rlci1vcGVyYXRvciIsImFwcC5rdWJlcm5ldGVzLmlvL25hbWUiOiJwZXJjb25hLXh0cmFkYi1jbHVzdGVyIiwiYXBwLmt1YmVybmV0ZXMuaW8vcGFydC1vZiI6InBlcmNvbmEteHRyYWRiLWNsdXN0ZXIifSwiYW5ub3RhdGlvbnMiOnsia3ViZWN0bC5rdWJlcm5ldGVzLmlvL2RlZmF1bHQtY29udGFpbmVyIjoicHJveHlzcWwiLCJsYXN0LWFwcGxpZWQtc2VjcmV0IjoiNWMwYThlMDZhMjRkYzUzNWIzZTRiZmQyNmRmZGYxYjI1ZGNlYzJiOWM2ZTc2ZWZlYjY5MmEyNmUwYzY2ZGRmMyIsInBlcmNvbmEuY29tL2NvbmZpZ3VyYXRpb24taGFzaCI6"..., - "percona.com/last-config-hash": "eyJyZXBsaWNhcyI6Miwic2VsZWN0b3IiOnsibWF0Y2hMYWJlbHMiOnsiYXBwLmt1YmVybmV0ZXMuaW8vY29tcG9uZW50IjoicHJveHlzcWwiLCJhcHAua3ViZXJuZXRlcy5pby9pbnN0YW5jZSI6InNvbWUtbmFtZSIsImFwcC5rdWJlcm5ldGVzLmlvL21hbmFnZWQtYnkiOiJwZXJjb25hLXh0cmFkYi1jbHVzdGVyLW9wZXJhdG9yIiwiYXBwLmt1YmVybmV0ZXMuaW8vbmFtZSI6InBlcmNvbmEteHRyYWRiLWNsdXN0ZXIiLCJhcHAua3ViZXJuZXRlcy5pby9wYXJ0LW9mIjoicGVyY29uYS14dHJhZGItY2x1c3RlciJ9fSwidGVtcGxhdGUiOnsibWV0YWRhdGEiOnsibGFiZWxzIjp7ImFwcC5rdWJlcm5ldGVzLmlvL2NvbXBvbmVudCI6InByb3h5c3FsIiwiYXBwLmt1YmVybmV0ZXMuaW8vaW5zdGFuY2UiOiJzb21lLW5hbWUiLCJhcHAua3ViZXJuZXRlcy5pby9tYW5hZ2VkLWJ5IjoicGVyY29uYS14dHJhZGItY2x1c3Rlci1vcGVyYXRvciIsImFwcC5rdWJlcm5ldGVzLmlvL25hbWUiOiJwZXJjb25hLXh0cmFkYi1jbHVzdGVyIiwiYXBwLmt1YmVybmV0ZXMuaW8vcGFydC1vZiI6InBlcmNvbmEteHRyYWRiLWNsdXN0ZXIifSwiYW5ub3RhdGlvbnMiOnsia3ViZWN0bC5rdWJlcm5ldGVzLmlvL2RlZmF1bHQtY29udGFpbmVyIjoicHJveHlzcWwiLCJsYXN0LWFwcGxpZWQtc2VjcmV0IjoiYjk1ZTBhOWVkYmViYjNjYjQ2YmU0ZTUzMDJjNjI2ZjRmYzI3NzA5NzUzOWVlMmMzOTU4YmYxODVhYWY4ZjkzNSIsInBlcmNvbmEuY29tL2NvbmZpZ3VyYXRpb24taGFzaCI6"..., + "percona.com/last-config-hash": "eyJyZXBsaWNhcyI6Miwic2VsZWN0b3IiOnsibWF0Y2hMYWJlbHMiOnsiYXBwLmt1YmVybmV0ZXMuaW8vY29tcG9uZW50IjoicHJveHlzcWwiLCJhcHAua3ViZXJuZXRlcy5pby9pbnN0YW5jZSI6InNvbWUtbmFtZSIsImFwcC5rdWJlcm5ldGVzLmlvL21hbmFnZWQtYnkiOiJwZXJjb25hLXh0cmFkYi1jbHVzdGVyLW9wZXJhdG9yIiwiYXBwLmt1YmVybmV0ZXMuaW8vbmFtZSI6InBlcmNvbmEteHRyYWRiLWNsdXN0ZXIiLCJhcHAua3ViZXJuZXRlcy5pby9wYXJ0LW9mIjoicGVyY29uYS14dHJhZGItY2x1c3RlciJ9fSwidGVtcGxhdGUiOnsibWV0YWRhdGEiOnsibGFiZWxzIjp7ImFwcC5rdWJlcm5ldGVzLmlvL2NvbXBvbmVudCI6InByb3h5c3FsIiwiYXBwLmt1YmVybmV0ZXMuaW8vaW5zdGFuY2UiOiJzb21lLW5hbWUiLCJhcHAua3ViZXJuZXRlcy5pby9tYW5hZ2VkLWJ5IjoicGVyY29uYS14dHJhZGItY2x1c3Rlci1vcGVyYXRvciIsImFwcC5rdWJlcm5ldGVzLmlvL25hbWUiOiJwZXJjb25hLXh0cmFkYi1jbHVzdGVyIiwiYXBwLmt1YmVybmV0ZXMuaW8vcGFydC1vZiI6InBlcmNvbmEteHRyYWRiLWNsdXN0ZXIifSwiYW5ub3RhdGlvbnMiOnsia3ViZWN0bC5rdWJlcm5ldGVzLmlvL2RlZmF1bHQtY29udGFpbmVyIjoicHJveHlzcWwiLCJsYXN0LWFwcGxpZWQtc2VjcmV0IjoiYjk1ZTBhOWVkYmViYjNjYjQ2YmU0ZTUzMDJjNjI2ZjRmYzI3NzA5NzUzOWVlMmMzOTU4YmYxODVhYWY4ZjkzNSIsInBlcmNvbmEuY29tL2NvbmZpZ3VyYXRpb24taGFzaCI6"..., + "percona.com/last-config-hash": "eyJyZXBsaWNhcyI6Miwic2VsZWN0b3IiOnsibWF0Y2hMYWJlbHMiOnsiYXBwLmt1YmVybmV0ZXMuaW8vY29tcG9uZW50IjoicHJveHlzcWwiLCJhcHAua3ViZXJuZXRlcy5pby9pbnN0YW5jZSI6InNvbWUtbmFtZSIsImFwcC5rdWJlcm5ldGVzLmlvL21hbmFnZWQtYnkiOiJwZXJjb25hLXh0cmFkYi1jbHVzdGVyLW9wZXJhdG9yIiwiYXBwLmt1YmVybmV0ZXMuaW8vbmFtZSI6InBlcmNvbmEteHRyYWRiLWNsdXN0ZXIiLCJhcHAua3ViZXJuZXRlcy5pby9wYXJ0LW9mIjoicGVyY29uYS14dHJhZGItY2x1c3RlciJ9fSwidGVtcGxhdGUiOnsibWV0YWRhdGEiOnsibGFiZWxzIjp7ImFwcC5rdWJlcm5ldGVzLmlvL2NvbXBvbmVudCI6InByb3h5c3FsIiwiYXBwLmt1YmVybmV0ZXMuaW8vaW5zdGFuY2UiOiJzb21lLW5hbWUiLCJhcHAua3ViZXJuZXRlcy5pby9tYW5hZ2VkLWJ5IjoicGVyY29uYS14dHJhZGItY2x1c3Rlci1vcGVyYXRvciIsImFwcC5rdWJlcm5ldGVzLmlvL25hbWUiOiJwZXJjb25hLXh0cmFkYi1jbHVzdGVyIiwiYXBwLmt1YmVybmV0ZXMuaW8vcGFydC1vZiI6InBlcmNvbmEteHRyYWRiLWNsdXN0ZXIifSwiYW5ub3RhdGlvbnMiOnsia3ViZWN0bC5rdWJlcm5ldGVzLmlvL2RlZmF1bHQtY29udGFpbmVyIjoicHJveHlzcWwiLCJsYXN0LWFwcGxpZWQtc2VjcmV0IjoiYWI2OTA1Yjk3Nzg0ZmNmZDMwMzBhOTJiYzllZTgzZmM0ZDczYzEyZTI5ZjFlYWFmNjNkMjdmZGI0MDdlZDI1NSIsInBlcmNvbmEuY29tL2NvbmZpZ3VyYXRpb24taGFzaCI6"..., - "percona.com/last-config-hash": "eyJyZXBsaWNhcyI6Miwic2VsZWN0b3IiOnsibWF0Y2hMYWJlbHMiOnsiYXBwLmt1YmVybmV0ZXMuaW8vY29tcG9uZW50IjoicHJveHlzcWwiLCJhcHAua3ViZXJuZXRlcy5pby9pbnN0YW5jZSI6InNvbWUtbmFtZSIsImFwcC5rdWJlcm5ldGVzLmlvL21hbmFnZWQtYnkiOiJwZXJjb25hLXh0cmFkYi1jbHVzdGVyLW9wZXJhdG9yIiwiYXBwLmt1YmVybmV0ZXMuaW8vbmFtZSI6InBlcmNvbmEteHRyYWRiLWNsdXN0ZXIiLCJhcHAua3ViZXJuZXRlcy5pby9wYXJ0LW9mIjoicGVyY29uYS14dHJhZGItY2x1c3RlciJ9fSwidGVtcGxhdGUiOnsibWV0YWRhdGEiOnsibGFiZWxzIjp7ImFwcC5rdWJlcm5ldGVzLmlvL2NvbXBvbmVudCI6InByb3h5c3FsIiwiYXBwLmt1YmVybmV0ZXMuaW8vaW5zdGFuY2UiOiJzb21lLW5hbWUiLCJhcHAua3ViZXJuZXRlcy5pby9tYW5hZ2VkLWJ5IjoicGVyY29uYS14dHJhZGItY2x1c3Rlci1vcGVyYXRvciIsImFwcC5rdWJlcm5ldGVzLmlvL25hbWUiOiJwZXJjb25hLXh0cmFkYi1jbHVzdGVyIiwiYXBwLmt1YmVybmV0ZXMuaW8vcGFydC1vZiI6InBlcmNvbmEteHRyYWRiLWNsdXN0ZXIifSwiYW5ub3RhdGlvbnMiOnsia3ViZWN0bC5rdWJlcm5ldGVzLmlvL2RlZmF1bHQtY29udGFpbmVyIjoicHJveHlzcWwiLCJsYXN0LWFwcGxpZWQtc2VjcmV0IjoiYWJlZWU1OTRiYjU3MGM1M2JmNmU5MTBhZTVkZDA5OWY5NWU5MTllMjQzYTFiMDI0ZTg1YTIzMjI1YzMwMTU5NiIsInBlcmNvbmEuY29tL2NvbmZpZ3VyYXRpb24taGFzaCI6"..., + "percona.com/last-config-hash": "eyJyZXBsaWNhcyI6Miwic2VsZWN0b3IiOnsibWF0Y2hMYWJlbHMiOnsiYXBwLmt1YmVybmV0ZXMuaW8vY29tcG9uZW50IjoicHJveHlzcWwiLCJhcHAua3ViZXJuZXRlcy5pby9pbnN0YW5jZSI6InNvbWUtbmFtZSIsImFwcC5rdWJlcm5ldGVzLmlvL21hbmFnZWQtYnkiOiJwZXJjb25hLXh0cmFkYi1jbHVzdGVyLW9wZXJhdG9yIiwiYXBwLmt1YmVybmV0ZXMuaW8vbmFtZSI6InBlcmNvbmEteHRyYWRiLWNsdXN0ZXIiLCJhcHAua3ViZXJuZXRlcy5pby9wYXJ0LW9mIjoicGVyY29uYS14dHJhZGItY2x1c3RlciJ9fSwidGVtcGxhdGUiOnsibWV0YWRhdGEiOnsibGFiZWxzIjp7ImFwcC5rdWJlcm5ldGVzLmlvL2NvbXBvbmVudCI6InByb3h5c3FsIiwiYXBwLmt1YmVybmV0ZXMuaW8vaW5zdGFuY2UiOiJzb21lLW5hbWUiLCJhcHAua3ViZXJuZXRlcy5pby9tYW5hZ2VkLWJ5IjoicGVyY29uYS14dHJhZGItY2x1c3Rlci1vcGVyYXRvciIsImFwcC5rdWJlcm5ldGVzLmlvL25hbWUiOiJwZXJjb25hLXh0cmFkYi1jbHVzdGVyIiwiYXBwLmt1YmVybmV0ZXMuaW8vcGFydC1vZiI6InBlcmNvbmEteHRyYWRiLWNsdXN0ZXIifSwiYW5ub3RhdGlvbnMiOnsia3ViZWN0bC5rdWJlcm5ldGVzLmlvL2RlZmF1bHQtY29udGFpbmVyIjoicHJveHlzcWwiLCJsYXN0LWFwcGxpZWQtc2VjcmV0IjoiYWJlZWU1OTRiYjU3MGM1M2JmNmU5MTBhZTVkZDA5OWY5NWU5MTllMjQzYTFiMDI0ZTg1YTIzMjI1YzMwMTU5NiIsInBlcmNvbmEuY29tL2NvbmZpZ3VyYXRpb24taGFzaCI6"..., - "percona.com/last-config-hash": "eyJyZXBsaWNhcyI6Miwic2VsZWN0b3IiOnsibWF0Y2hMYWJlbHMiOnsiYXBwLmt1YmVybmV0ZXMuaW8vY29tcG9uZW50IjoicHJveHlzcWwiLCJhcHAua3ViZXJuZXRlcy5pby9pbnN0YW5jZSI6InNvbWUtbmFtZSIsImFwcC5rdWJlcm5ldGVzLmlvL21hbmFnZWQtYnkiOiJwZXJjb25hLXh0cmFkYi1jbHVzdGVyLW9wZXJhdG9yIiwiYXBwLmt1YmVybmV0ZXMuaW8vbmFtZSI6InBlcmNvbmEteHRyYWRiLWNsdXN0ZXIiLCJhcHAua3ViZXJuZXRlcy5pby9wYXJ0LW9mIjoicGVyY29uYS14dHJhZGItY2x1c3RlciJ9fSwidGVtcGxhdGUiOnsibWV0YWRhdGEiOnsibGFiZWxzIjp7ImFwcC5rdWJlcm5ldGVzLmlvL2NvbXBvbmVudCI6InByb3h5c3FsIiwiYXBwLmt1YmVybmV0ZXMuaW8vaW5zdGFuY2UiOiJzb21lLW5hbWUiLCJhcHAua3ViZXJuZXRlcy5pby9tYW5hZ2VkLWJ5IjoicGVyY29uYS14dHJhZGItY2x1c3Rlci1vcGVyYXRvciIsImFwcC5rdWJlcm5ldGVzLmlvL25hbWUiOiJwZXJjb25hLXh0cmFkYi1jbHVzdGVyIiwiYXBwLmt1YmVybmV0ZXMuaW8vcGFydC1vZiI6InBlcmNvbmEteHRyYWRiLWNsdXN0ZXIifSwiYW5ub3RhdGlvbnMiOnsia3ViZWN0bC5rdWJlcm5ldGVzLmlvL2RlZmF1bHQtY29udGFpbmVyIjoicHJveHlzcWwiLCJsYXN0LWFwcGxpZWQtc2VjcmV0IjoiZjViYjBkNjczOWNkNGQxMWEzZDljYjQ3Yzk1YWQ4NGUxMWVmMTBiOWUwZGRjY2I0N2Y5MmMwYmE5OTlhNTM2YSIsInBlcmNvbmEuY29tL2NvbmZpZ3VyYXRpb24taGFzaCI6"..., + "percona.com/last-config-hash": "eyJyZXBsaWNhcyI6Miwic2VsZWN0b3IiOnsibWF0Y2hMYWJlbHMiOnsiYXBwLmt1YmVybmV0ZXMuaW8vY29tcG9uZW50IjoicHJveHlzcWwiLCJhcHAua3ViZXJuZXRlcy5pby9pbnN0YW5jZSI6InNvbWUtbmFtZSIsImFwcC5rdWJlcm5ldGVzLmlvL21hbmFnZWQtYnkiOiJwZXJjb25hLXh0cmFkYi1jbHVzdGVyLW9wZXJhdG9yIiwiYXBwLmt1YmVybmV0ZXMuaW8vbmFtZSI6InBlcmNvbmEteHRyYWRiLWNsdXN0ZXIiLCJhcHAua3ViZXJuZXRlcy5pby9wYXJ0LW9mIjoicGVyY29uYS14dHJhZGItY2x1c3RlciJ9fSwidGVtcGxhdGUiOnsibWV0YWRhdGEiOnsibGFiZWxzIjp7ImFwcC5rdWJlcm5ldGVzLmlvL2NvbXBvbmVudCI6InByb3h5c3FsIiwiYXBwLmt1YmVybmV0ZXMuaW8vaW5zdGFuY2UiOiJzb21lLW5hbWUiLCJhcHAua3ViZXJuZXRlcy5pby9tYW5hZ2VkLWJ5IjoicGVyY29uYS14dHJhZGItY2x1c3Rlci1vcGVyYXRvciIsImFwcC5rdWJlcm5ldGVzLmlvL25hbWUiOiJwZXJjb25hLXh0cmFkYi1jbHVzdGVyIiwiYXBwLmt1YmVybmV0ZXMuaW8vcGFydC1vZiI6InBlcmNvbmEteHRyYWRiLWNsdXN0ZXIifSwiYW5ub3RhdGlvbnMiOnsia3ViZWN0bC5rdWJlcm5ldGVzLmlvL2RlZmF1bHQtY29udGFpbmVyIjoicHJveHlzcWwiLCJsYXN0LWFwcGxpZWQtc2VjcmV0IjoiZjViYjBkNjczOWNkNGQxMWEzZDljYjQ3Yzk1YWQ4NGUxMWVmMTBiOWUwZGRjY2I0N2Y5MmMwYmE5OTlhNTM2YSIsInBlcmNvbmEuY29tL2NvbmZpZ3VyYXRpb24taGFzaCI6"..., - "percona.com/last-config-hash": "eyJyZXBsaWNhcyI6Miwic2VsZWN0b3IiOnsibWF0Y2hMYWJlbHMiOnsiYXBwLmt1YmVybmV0ZXMuaW8vY29tcG9uZW50IjoicHJveHlzcWwiLCJhcHAua3ViZXJuZXRlcy5pby9pbnN0YW5jZSI6InNvbWUtbmFtZSIsImFwcC5rdWJlcm5ldGVzLmlvL21hbmFnZWQtYnkiOiJwZXJjb25hLXh0cmFkYi1jbHVzdGVyLW9wZXJhdG9yIiwiYXBwLmt1YmVybmV0ZXMuaW8vbmFtZSI6InBlcmNvbmEteHRyYWRiLWNsdXN0ZXIiLCJhcHAua3ViZXJuZXRlcy5pby9wYXJ0LW9mIjoicGVyY29uYS14dHJhZGItY2x1c3RlciJ9fSwidGVtcGxhdGUiOnsibWV0YWRhdGEiOnsibGFiZWxzIjp7ImFwcC5rdWJlcm5ldGVzLmlvL2NvbXBvbmVudCI6InByb3h5c3FsIiwiYXBwLmt1YmVybmV0ZXMuaW8vaW5zdGFuY2UiOiJzb21lLW5hbWUiLCJhcHAua3ViZXJuZXRlcy5pby9tYW5hZ2VkLWJ5IjoicGVyY29uYS14dHJhZGItY2x1c3Rlci1vcGVyYXRvciIsImFwcC5rdWJlcm5ldGVzLmlvL25hbWUiOiJwZXJjb25hLXh0cmFkYi1jbHVzdGVyIiwiYXBwLmt1YmVybmV0ZXMuaW8vcGFydC1vZiI6InBlcmNvbmEteHRyYWRiLWNsdXN0ZXIifSwiYW5ub3RhdGlvbnMiOnsia3ViZWN0bC5rdWJlcm5ldGVzLmlvL2RlZmF1bHQtY29udGFpbmVyIjoicHJveHlzcWwiLCJsYXN0LWFwcGxpZWQtc2VjcmV0IjoiZTMwNTY5YWJiMDNhZmU3YjM0M2RiYTcyMTk5ZDdjNTgwYjNlOGQ2Y2MxOTFmMDk1NWRlMWVjZDhiM2JmZTUzMSIsInBlcmNvbmEuY29tL2NvbmZpZ3VyYXRpb24taGFzaCI6"..., - "percona.com/last-config-hash": "eyJyZXBsaWNhcyI6Mywic2VsZWN0b3IiOnsibWF0Y2hMYWJlbHMiOnsiYXBwLmt1YmVybmV0ZXMuaW8vY29tcG9uZW50IjoicHhjIiwiYXBwLmt1YmVybmV0ZXMuaW8vaW5zdGFuY2UiOiJzb21lLW5hbWUiLCJhcHAua3ViZXJuZXRlcy5pby9tYW5hZ2VkLWJ5IjoicGVyY29uYS14dHJhZGItY2x1c3Rlci1vcGVyYXRvciIsImFwcC5rdWJlcm5ldGVzLmlvL25hbWUiOiJwZXJjb25hLXh0cmFkYi1jbHVzdGVyIiwiYXBwLmt1YmVybmV0ZXMuaW8vcGFydC1vZiI6InBlcmNvbmEteHRyYWRiLWNsdXN0ZXIifX0sInRlbXBsYXRlIjp7Im1ldGFkYXRhIjp7ImxhYmVscyI6eyJhcHAua3ViZXJuZXRlcy5pby9jb21wb25lbnQiOiJweGMiLCJhcHAua3ViZXJuZXRlcy5pby9pbnN0YW5jZSI6InNvbWUtbmFtZSIsImFwcC5rdWJlcm5ldGVzLmlvL21hbmFnZWQtYnkiOiJwZXJjb25hLXh0cmFkYi1jbHVzdGVyLW9wZXJhdG9yIiwiYXBwLmt1YmVybmV0ZXMuaW8vbmFtZSI6InBlcmNvbmEteHRyYWRiLWNsdXN0ZXIiLCJhcHAua3ViZXJuZXRlcy5pby9wYXJ0LW9mIjoicGVyY29uYS14dHJhZGItY2x1c3RlciJ9LCJhbm5vdGF0aW9ucyI6eyJrdWJlY3RsLmt1YmVybmV0ZXMuaW8vZGVmYXVsdC1jb250YWluZXIiOiJweGMiLCJsYXN0LWFwcGxpZWQtc2VjcmV0IjoiM2Y3MmFmOWQxZDFjNzQ2Mzg0YzU1NWIzZTQ1ZGRkNmUyNDQzMGYzNzczY2JjZmI5ZmE2NWM2OGU2NmJiYTdlNCIsInBlcmNvbmEuY29tL2NvbmZpZ3VyYXRpb24taGFzaCI6ImQ0MWQ4Y2Q5OGYwMGIy"..., + "percona.com/last-config-hash": "eyJyZXBsaWNhcyI6Mywic2VsZWN0b3IiOnsibWF0Y2hMYWJlbHMiOnsiYXBwLmt1YmVybmV0ZXMuaW8vY29tcG9uZW50IjoicHhjIiwiYXBwLmt1YmVybmV0ZXMuaW8vaW5zdGFuY2UiOiJzb21lLW5hbWUiLCJhcHAua3ViZXJuZXRlcy5pby9tYW5hZ2VkLWJ5IjoicGVyY29uYS14dHJhZGItY2x1c3Rlci1vcGVyYXRvciIsImFwcC5rdWJlcm5ldGVzLmlvL25hbWUiOiJwZXJjb25hLXh0cmFkYi1jbHVzdGVyIiwiYXBwLmt1YmVybmV0ZXMuaW8vcGFydC1vZiI6InBlcmNvbmEteHRyYWRiLWNsdXN0ZXIifX0sInRlbXBsYXRlIjp7Im1ldGFkYXRhIjp7ImxhYmVscyI6eyJhcHAua3ViZXJuZXRlcy5pby9jb21wb25lbnQiOiJweGMiLCJhcHAua3ViZXJuZXRlcy5pby9pbnN0YW5jZSI6InNvbWUtbmFtZSIsImFwcC5rdWJlcm5ldGVzLmlvL21hbmFnZWQtYnkiOiJwZXJjb25hLXh0cmFkYi1jbHVzdGVyLW9wZXJhdG9yIiwiYXBwLmt1YmVybmV0ZXMuaW8vbmFtZSI6InBlcmNvbmEteHRyYWRiLWNsdXN0ZXIiLCJhcHAua3ViZXJuZXRlcy5pby9wYXJ0LW9mIjoicGVyY29uYS14dHJhZGItY2x1c3RlciJ9LCJhbm5vdGF0aW9ucyI6eyJrdWJlY3RsLmt1YmVybmV0ZXMuaW8vZGVmYXVsdC1jb250YWluZXIiOiJweGMiLCJsYXN0LWFwcGxpZWQtc2VjcmV0IjoiM2Y3MmFmOWQxZDFjNzQ2Mzg0YzU1NWIzZTQ1ZGRkNmUyNDQzMGYzNzczY2JjZmI5ZmE2NWM2OGU2NmJiYTdlNCIsInBlcmNvbmEuY29tL2NvbmZpZ3VyYXRpb24taGFzaCI6ImQ0MWQ4Y2Q5OGYwMGIy"..., + "percona.com/last-config-hash": "eyJyZXBsaWNhcyI6Mywic2VsZWN0b3IiOnsibWF0Y2hMYWJlbHMiOnsiYXBwLmt1YmVybmV0ZXMuaW8vY29tcG9uZW50IjoicHhjIiwiYXBwLmt1YmVybmV0ZXMuaW8vaW5zdGFuY2UiOiJzb21lLW5hbWUiLCJhcHAua3ViZXJuZXRlcy5pby9tYW5hZ2VkLWJ5IjoicGVyY29uYS14dHJhZGItY2x1c3Rlci1vcGVyYXRvciIsImFwcC5rdWJlcm5ldGVzLmlvL25hbWUiOiJwZXJjb25hLXh0cmFkYi1jbHVzdGVyIiwiYXBwLmt1YmVybmV0ZXMuaW8vcGFydC1vZiI6InBlcmNvbmEteHRyYWRiLWNsdXN0ZXIifX0sInRlbXBsYXRlIjp7Im1ldGFkYXRhIjp7ImxhYmVscyI6eyJhcHAua3ViZXJuZXRlcy5pby9jb21wb25lbnQiOiJweGMiLCJhcHAua3ViZXJuZXRlcy5pby9pbnN0YW5jZSI6InNvbWUtbmFtZSIsImFwcC5rdWJlcm5ldGVzLmlvL21hbmFnZWQtYnkiOiJwZXJjb25hLXh0cmFkYi1jbHVzdGVyLW9wZXJhdG9yIiwiYXBwLmt1YmVybmV0ZXMuaW8vbmFtZSI6InBlcmNvbmEteHRyYWRiLWNsdXN0ZXIiLCJhcHAua3ViZXJuZXRlcy5pby9wYXJ0LW9mIjoicGVyY29uYS14dHJhZGItY2x1c3RlciJ9LCJhbm5vdGF0aW9ucyI6eyJrdWJlY3RsLmt1YmVybmV0ZXMuaW8vZGVmYXVsdC1jb250YWluZXIiOiJweGMiLCJsYXN0LWFwcGxpZWQtc2VjcmV0IjoiNWMwYThlMDZhMjRkYzUzNWIzZTRiZmQyNmRmZGYxYjI1ZGNlYzJiOWM2ZTc2ZWZlYjY5MmEyNmUwYzY2ZGRmMyIsInBlcmNvbmEuY29tL2NvbmZpZ3VyYXRpb24taGFzaCI6ImQ0MWQ4Y2Q5OGYwMGIy"..., - "percona.com/last-config-hash": "eyJyZXBsaWNhcyI6Mywic2VsZWN0b3IiOnsibWF0Y2hMYWJlbHMiOnsiYXBwLmt1YmVybmV0ZXMuaW8vY29tcG9uZW50IjoicHhjIiwiYXBwLmt1YmVybmV0ZXMuaW8vaW5zdGFuY2UiOiJzb21lLW5hbWUiLCJhcHAua3ViZXJuZXRlcy5pby9tYW5hZ2VkLWJ5IjoicGVyY29uYS14dHJhZGItY2x1c3Rlci1vcGVyYXRvciIsImFwcC5rdWJlcm5ldGVzLmlvL25hbWUiOiJwZXJjb25hLXh0cmFkYi1jbHVzdGVyIiwiYXBwLmt1YmVybmV0ZXMuaW8vcGFydC1vZiI6InBlcmNvbmEteHRyYWRiLWNsdXN0ZXIifX0sInRlbXBsYXRlIjp7Im1ldGFkYXRhIjp7ImxhYmVscyI6eyJhcHAua3ViZXJuZXRlcy5pby9jb21wb25lbnQiOiJweGMiLCJhcHAua3ViZXJuZXRlcy5pby9pbnN0YW5jZSI6InNvbWUtbmFtZSIsImFwcC5rdWJlcm5ldGVzLmlvL21hbmFnZWQtYnkiOiJwZXJjb25hLXh0cmFkYi1jbHVzdGVyLW9wZXJhdG9yIiwiYXBwLmt1YmVybmV0ZXMuaW8vbmFtZSI6InBlcmNvbmEteHRyYWRiLWNsdXN0ZXIiLCJhcHAua3ViZXJuZXRlcy5pby9wYXJ0LW9mIjoicGVyY29uYS14dHJhZGItY2x1c3RlciJ9LCJhbm5vdGF0aW9ucyI6eyJrdWJlY3RsLmt1YmVybmV0ZXMuaW8vZGVmYXVsdC1jb250YWluZXIiOiJweGMiLCJsYXN0LWFwcGxpZWQtc2VjcmV0IjoiYWI2OTA1Yjk3Nzg0ZmNmZDMwMzBhOTJiYzllZTgzZmM0ZDczYzEyZTI5ZjFlYWFmNjNkMjdmZGI0MDdlZDI1NSIsInBlcmNvbmEuY29tL2NvbmZpZ3VyYXRpb24taGFzaCI6ImQ0MWQ4Y2Q5OGYwMGIy"..., + "percona.com/last-config-hash": "eyJyZXBsaWNhcyI6Mywic2VsZWN0b3IiOnsibWF0Y2hMYWJlbHMiOnsiYXBwLmt1YmVybmV0ZXMuaW8vY29tcG9uZW50IjoicHhjIiwiYXBwLmt1YmVybmV0ZXMuaW8vaW5zdGFuY2UiOiJzb21lLW5hbWUiLCJhcHAua3ViZXJuZXRlcy5pby9tYW5hZ2VkLWJ5IjoicGVyY29uYS14dHJhZGItY2x1c3Rlci1vcGVyYXRvciIsImFwcC5rdWJlcm5ldGVzLmlvL25hbWUiOiJwZXJjb25hLXh0cmFkYi1jbHVzdGVyIiwiYXBwLmt1YmVybmV0ZXMuaW8vcGFydC1vZiI6InBlcmNvbmEteHRyYWRiLWNsdXN0ZXIifX0sInRlbXBsYXRlIjp7Im1ldGFkYXRhIjp7ImxhYmVscyI6eyJhcHAua3ViZXJuZXRlcy5pby9jb21wb25lbnQiOiJweGMiLCJhcHAua3ViZXJuZXRlcy5pby9pbnN0YW5jZSI6InNvbWUtbmFtZSIsImFwcC5rdWJlcm5ldGVzLmlvL21hbmFnZWQtYnkiOiJwZXJjb25hLXh0cmFkYi1jbHVzdGVyLW9wZXJhdG9yIiwiYXBwLmt1YmVybmV0ZXMuaW8vbmFtZSI6InBlcmNvbmEteHRyYWRiLWNsdXN0ZXIiLCJhcHAua3ViZXJuZXRlcy5pby9wYXJ0LW9mIjoicGVyY29uYS14dHJhZGItY2x1c3RlciJ9LCJhbm5vdGF0aW9ucyI6eyJrdWJlY3RsLmt1YmVybmV0ZXMuaW8vZGVmYXVsdC1jb250YWluZXIiOiJweGMiLCJsYXN0LWFwcGxpZWQtc2VjcmV0IjoiYWI2OTA1Yjk3Nzg0ZmNmZDMwMzBhOTJiYzllZTgzZmM0ZDczYzEyZTI5ZjFlYWFmNjNkMjdmZGI0MDdlZDI1NSIsInBlcmNvbmEuY29tL2NvbmZpZ3VyYXRpb24taGFzaCI6ImQ0MWQ4Y2Q5OGYwMGIy"..., - "percona.com/last-config-hash": "eyJyZXBsaWNhcyI6Mywic2VsZWN0b3IiOnsibWF0Y2hMYWJlbHMiOnsiYXBwLmt1YmVybmV0ZXMuaW8vY29tcG9uZW50IjoicHhjIiwiYXBwLmt1YmVybmV0ZXMuaW8vaW5zdGFuY2UiOiJzb21lLW5hbWUiLCJhcHAua3ViZXJuZXRlcy5pby9tYW5hZ2VkLWJ5IjoicGVyY29uYS14dHJhZGItY2x1c3Rlci1vcGVyYXRvciIsImFwcC5rdWJlcm5ldGVzLmlvL25hbWUiOiJwZXJjb25hLXh0cmFkYi1jbHVzdGVyIiwiYXBwLmt1YmVybmV0ZXMuaW8vcGFydC1vZiI6InBlcmNvbmEteHRyYWRiLWNsdXN0ZXIifX0sInRlbXBsYXRlIjp7Im1ldGFkYXRhIjp7ImxhYmVscyI6eyJhcHAua3ViZXJuZXRlcy5pby9jb21wb25lbnQiOiJweGMiLCJhcHAua3ViZXJuZXRlcy5pby9pbnN0YW5jZSI6InNvbWUtbmFtZSIsImFwcC5rdWJlcm5ldGVzLmlvL21hbmFnZWQtYnkiOiJwZXJjb25hLXh0cmFkYi1jbHVzdGVyLW9wZXJhdG9yIiwiYXBwLmt1YmVybmV0ZXMuaW8vbmFtZSI6InBlcmNvbmEteHRyYWRiLWNsdXN0ZXIiLCJhcHAua3ViZXJuZXRlcy5pby9wYXJ0LW9mIjoicGVyY29uYS14dHJhZGItY2x1c3RlciJ9LCJhbm5vdGF0aW9ucyI6eyJrdWJlY3RsLmt1YmVybmV0ZXMuaW8vZGVmYXVsdC1jb250YWluZXIiOiJweGMiLCJsYXN0LWFwcGxpZWQtc2VjcmV0IjoiYWJlZWU1OTRiYjU3MGM1M2JmNmU5MTBhZTVkZDA5OWY5NWU5MTllMjQzYTFiMDI0ZTg1YTIzMjI1YzMwMTU5NiIsInBlcmNvbmEuY29tL2NvbmZpZ3VyYXRpb24taGFzaCI6ImQ0MWQ4Y2Q5OGYwMGIy"..., + "percona.com/last-config-hash": "eyJyZXBsaWNhcyI6Mywic2VsZWN0b3IiOnsibWF0Y2hMYWJlbHMiOnsiYXBwLmt1YmVybmV0ZXMuaW8vY29tcG9uZW50IjoicHhjIiwiYXBwLmt1YmVybmV0ZXMuaW8vaW5zdGFuY2UiOiJzb21lLW5hbWUiLCJhcHAua3ViZXJuZXRlcy5pby9tYW5hZ2VkLWJ5IjoicGVyY29uYS14dHJhZGItY2x1c3Rlci1vcGVyYXRvciIsImFwcC5rdWJlcm5ldGVzLmlvL25hbWUiOiJwZXJjb25hLXh0cmFkYi1jbHVzdGVyIiwiYXBwLmt1YmVybmV0ZXMuaW8vcGFydC1vZiI6InBlcmNvbmEteHRyYWRiLWNsdXN0ZXIifX0sInRlbXBsYXRlIjp7Im1ldGFkYXRhIjp7ImxhYmVscyI6eyJhcHAua3ViZXJuZXRlcy5pby9jb21wb25lbnQiOiJweGMiLCJhcHAua3ViZXJuZXRlcy5pby9pbnN0YW5jZSI6InNvbWUtbmFtZSIsImFwcC5rdWJlcm5ldGVzLmlvL21hbmFnZWQtYnkiOiJwZXJjb25hLXh0cmFkYi1jbHVzdGVyLW9wZXJhdG9yIiwiYXBwLmt1YmVybmV0ZXMuaW8vbmFtZSI6InBlcmNvbmEteHRyYWRiLWNsdXN0ZXIiLCJhcHAua3ViZXJuZXRlcy5pby9wYXJ0LW9mIjoicGVyY29uYS14dHJhZGItY2x1c3RlciJ9LCJhbm5vdGF0aW9ucyI6eyJrdWJlY3RsLmt1YmVybmV0ZXMuaW8vZGVmYXVsdC1jb250YWluZXIiOiJweGMiLCJsYXN0LWFwcGxpZWQtc2VjcmV0IjoiYWJlZWU1OTRiYjU3MGM1M2JmNmU5MTBhZTVkZDA5OWY5NWU5MTllMjQzYTFiMDI0ZTg1YTIzMjI1YzMwMTU5NiIsInBlcmNvbmEuY29tL2NvbmZpZ3VyYXRpb24taGFzaCI6ImQ0MWQ4Y2Q5OGYwMGIy"..., - "percona.com/last-config-hash": "eyJyZXBsaWNhcyI6Mywic2VsZWN0b3IiOnsibWF0Y2hMYWJlbHMiOnsiYXBwLmt1YmVybmV0ZXMuaW8vY29tcG9uZW50IjoicHhjIiwiYXBwLmt1YmVybmV0ZXMuaW8vaW5zdGFuY2UiOiJzb21lLW5hbWUiLCJhcHAua3ViZXJuZXRlcy5pby9tYW5hZ2VkLWJ5IjoicGVyY29uYS14dHJhZGItY2x1c3Rlci1vcGVyYXRvciIsImFwcC5rdWJlcm5ldGVzLmlvL25hbWUiOiJwZXJjb25hLXh0cmFkYi1jbHVzdGVyIiwiYXBwLmt1YmVybmV0ZXMuaW8vcGFydC1vZiI6InBlcmNvbmEteHRyYWRiLWNsdXN0ZXIifX0sInRlbXBsYXRlIjp7Im1ldGFkYXRhIjp7ImxhYmVscyI6eyJhcHAua3ViZXJuZXRlcy5pby9jb21wb25lbnQiOiJweGMiLCJhcHAua3ViZXJuZXRlcy5pby9pbnN0YW5jZSI6InNvbWUtbmFtZSIsImFwcC5rdWJlcm5ldGVzLmlvL21hbmFnZWQtYnkiOiJwZXJjb25hLXh0cmFkYi1jbHVzdGVyLW9wZXJhdG9yIiwiYXBwLmt1YmVybmV0ZXMuaW8vbmFtZSI6InBlcmNvbmEteHRyYWRiLWNsdXN0ZXIiLCJhcHAua3ViZXJuZXRlcy5pby9wYXJ0LW9mIjoicGVyY29uYS14dHJhZGItY2x1c3RlciJ9LCJhbm5vdGF0aW9ucyI6eyJrdWJlY3RsLmt1YmVybmV0ZXMuaW8vZGVmYXVsdC1jb250YWluZXIiOiJweGMiLCJwZXJjb25hLmNvbS9jb25maWd1cmF0aW9uLWhhc2giOiJkNDFkOGNkOThmMDBiMjA0ZTk4MDA5OThlY2Y4NDI3ZSIsInBlcmNvbmEuY29tL3NzbC1oYXNoIjoiY2ZkNmE1MjM5ODI2ODE3M2I1MWQ3Y2RiMTMzMWMwOWEiLCJwZXJjb25hLmNv"..., - "percona.com/last-config-hash": "eyJyZXBsaWNhcyI6Mywic2VsZWN0b3IiOnsibWF0Y2hMYWJlbHMiOnsiYXBwLmt1YmVybmV0ZXMuaW8vY29tcG9uZW50IjoicHJveHlzcWwiLCJhcHAua3ViZXJuZXRlcy5pby9pbnN0YW5jZSI6InNvbWUtbmFtZSIsImFwcC5rdWJlcm5ldGVzLmlvL21hbmFnZWQtYnkiOiJwZXJjb25hLXh0cmFkYi1jbHVzdGVyLW9wZXJhdG9yIiwiYXBw"..., + "percona.com/last-config-hash": "eyJyZXBsaWNhcyI6Mywic2VsZWN0b3IiOnsibWF0Y2hMYWJlbHMiOnsiYXBwLmt1YmVybmV0ZXMuaW8vY29tcG9uZW50IjoicHJveHlzcWwiLCJhcHAua3ViZXJuZXRlcy5pby9pbnN0YW5jZSI6InNvbWUtbmFtZSIsImFwcC5rdWJlcm5ldGVzLmlvL21hbmFnZWQtYnkiOiJwZXJjb25hLXh0cmFkYi1jbHVzdGVyLW9wZXJhdG9yIiwiYXBw"..., + "percona.com/last-config-hash": "eyJyZXBsaWNhcyI6Mywic2VsZWN0b3IiOnsibWF0Y2hMYWJlbHMiOnsiYXBwLmt1YmVybmV0ZXMuaW8vY29tcG9uZW50IjoicHJveHlzcWwiLCJhcHAua3ViZXJuZXRlcy5pby9pbnN0YW5jZSI6InNvbWUtbmFtZSIsImFwcC5rdWJlcm5ldGVzLmlvL21hbmFnZWQtYnkiOiJwZXJjb25hLXh0cmFkYi1jbHVzdGVyLW9wZXJhdG9yIiwiYXBwLmt1YmVybmV0ZXMuaW8vbmFtZSI6InBlcmNvbmEteHRyYWRiLWNsdXN0ZXIiLCJhcHAua3ViZXJuZXRlcy5pby9wYXJ0LW9mIjoicGVyY29uYS14dHJhZGItY2x1c3RlciJ9fSwidGVtcGxhdGUiOnsibWV0YWRhdGEiOnsibGFiZWxzIjp7ImFwcC5rdWJlcm5ldGVzLmlvL2NvbXBvbmVudCI6InByb3h5c3FsIiwiYXBwLmt1YmVybmV0ZXMuaW8vaW5zdGFuY2UiOiJzb21lLW5hbWUiLCJhcHAua3ViZXJuZXRlcy5pby9tYW5hZ2VkLWJ5IjoicGVyY29uYS14dHJhZGItY2x1c3Rlci1vcGVyYXRvciIsImFwcC5rdWJlcm5ldGVzLmlvL25hbWUiOiJwZXJjb25hLXh0cmFkYi1jbHVzdGVyIiwiYXBwLmt1YmVybmV0ZXMuaW8vcGFydC1vZiI6InBlcmNvbmEteHRyYWRiLWNsdXN0ZXIifSwiYW5ub3RhdGlvbnMiOnsia3ViZWN0bC5rdWJlcm5ldGVzLmlvL2RlZmF1bHQtY29udGFpbmVyIjoicHJveHlzcWwiLCJsYXN0LWFwcGxpZWQtc2VjcmV0IjoiZTMwNTY5YWJiMDNhZmU3YjM0M2RiYTcyMTk5ZDdjNTgwYjNlOGQ2Y2MxOTFmMDk1NWRlMWVjZDhiM2JmZTUzMSIsInBlcmNvbmEuY29tL2NvbmZpZ3VyYXRpb24taGFzaCI6"..., - "percona.com/last-config-hash": "eyJyZXBsaWNhcyI6Mywic2VsZWN0b3IiOnsibWF0Y2hMYWJlbHMiOnsiYXBwLmt1YmVybmV0ZXMuaW8vY29tcG9uZW50IjoicHJveHlzcWwiLCJhcHAua3ViZXJuZXRlcy5pby9pbnN0YW5jZSI6InNvbWUtbmFtZSIsImFwcC5rdWJlcm5ldGVzLmlvL21hbmFnZWQtYnkiOiJwZXJjb25hLXh0cmFkYi1jbHVzdGVyLW9wZXJhdG9yIiwiYXBwLmt1YmVybmV0ZXMuaW8vbmFtZSI6InBlcmNvbmEteHRyYWRiLWNsdXN0ZXIiLCJhcHAua3ViZXJuZXRlcy5pby9wYXJ0LW9mIjoicGVyY29uYS14dHJhZGItY2x1c3RlciJ9fSwidGVtcGxhdGUiOnsibWV0YWRhdGEiOnsibGFiZWxzIjp7ImFwcC5rdWJlcm5ldGVzLmlvL2NvbXBvbmVudCI6InByb3h5c3FsIiwiYXBwLmt1YmVybmV0ZXMuaW8vaW5zdGFuY2UiOiJzb21lLW5hbWUiLCJhcHAua3ViZXJuZXRlcy5pby9tYW5hZ2VkLWJ5IjoicGVyY29uYS14dHJhZGItY2x1c3Rlci1vcGVyYXRvciIsImFwcC5rdWJlcm5ldGVzLmlvL25hbWUiOiJwZXJjb25hLXh0cmFkYi1jbHVzdGVyIiwiYXBwLmt1YmVybmV0ZXMuaW8vcGFydC1vZiI6InBlcmNvbmEteHRyYWRiLWNsdXN0ZXIifSwiYW5ub3RhdGlvbnMiOnsia3ViZWN0bC5rdWJlcm5ldGVzLmlvL2RlZmF1bHQtY29udGFpbmVyIjoicHJveHlzcWwiLCJwZXJjb25hLmNvbS9jb25maWd1cmF0aW9uLWhhc2giOiJkNDFkOGNkOThmMDBiMjA0ZTk4MDA5OThlY2Y4NDI3ZSIsInBlcmNvbmEuY29tL3NzbC1oYXNoIjoiY2ZkNmE1MjM5ODI2ODE3M2I1MWQ3Y2RiMTMzMWMw"..., "percona.com/ssl-hash": "cfd6a52398268173b51d7cdb1331c09a", "percona.com/ssl-hash": "cfd6a52398268173b51d7cdb1331c09a", "percona.com/ssl-internal-hash": "751e59a1c1d94ac67f9d6bb6e756acdc", "percona.com/ssl-internal-hash": "751e59a1c1d94ac67f9d6bb6e756acdc", + PeriodSeconds: 0, - PeriodSeconds: 10, + PersistentVolumeClaimRetentionPolicy: nil, - PersistentVolumeClaimRetentionPolicy: s"&StatefulSetPersistentVolumeClaimRetentionPolicy{WhenDeleted:Retain,WhenScaled:Retain,}", + Phase: "", - Phase: "Pending", + PodManagementPolicy: "", - PodManagementPolicy: "OrderedReady", Ports: []v1.ContainerPort{ PreemptionPolicy: nil, ProbeHandler: {Exec: &{Command: {"/var/lib/mysql/liveness-check.sh"}}}, + Protocol: "", - Protocol: "TCP", Quobyte: nil, ReadinessProbe: &{ProbeHandler: {Exec: &{Command: {"/var/lib/mysql/readiness-check.sh"}}}, InitialDelaySeconds: 15, TimeoutSeconds: 15, PeriodSeconds: 30, ...}, + ReadyReplicas: 0, - ReadyReplicas: 2, - ReadyReplicas: 3, + Replicas: 0, Replicas: &2, - Replicas: 2, - Replicas: &2, + Replicas: &2, Replicas: &3, - Replicas: 3, - Replicas: &3, + Replicas: &3, + ResourceVersion: "", - ResourceVersion: "1767023954527455020", - ResourceVersion: "1767024131500335014", - ResourceVersion: "1767024299045471020", - ResourceVersion: "1767024340472303020", - ResourceVersion: "1767024369474255020", - ResourceVersion: "1767024521539215014", - ResourceVersion: "1767024573829407020", - ResourceVersion: "1767024656949279020", - ResourceVersion: "1767024838216783020", - ResourceVersion: "1767024843287263014", - ResourceVersion: "1767024866315551020", - ResourceVersion: "1767025128035743014", + RestartPolicy: "", - RestartPolicy: "Always", - RevisionHistoryLimit: &10, + RevisionHistoryLimit: nil, + SchedulerName: "", - SchedulerName: "default-scheduler", SecretName: "internal-some-name", SecretName: "some-name-mysql-init", SecretName: "some-name-ssl", SecretName: "some-name-ssl-internal", SecretName: "some-name-vault", Secret: &v1.SecretVolumeSource{ SecurityContext: nil, Selector: &{MatchLabels: {"app.kubernetes.io/component": "proxysql", "app.kubernetes.io/instance": "some-name", "app.kubernetes.io/managed-by": "percona-xtradb-cluster-operator", "app.kubernetes.io/name": "percona-xtradb-cluster", ...}}, Selector: &{MatchLabels: {"app.kubernetes.io/component": "pxc", "app.kubernetes.io/instance": "some-name", "app.kubernetes.io/managed-by": "percona-xtradb-cluster-operator", "app.kubernetes.io/name": "percona-xtradb-cluster", ...}}, SelfLink: "", ServiceAccountName: "default", ServiceName: "some-name-proxysql-unready", ServiceName: "some-name-pxc", SetHostnameAsFQDN: nil, Spec: v1.PersistentVolumeClaimSpec{ Spec: v1.PodSpec{ Spec: v1.StatefulSetSpec{ StartupProbe: nil, Status: v1.PersistentVolumeClaimStatus{ Status: v1.StatefulSetStatus{ StorageClassName: nil, Subdomain: "", - Subresource: "status", SuccessThreshold: 1, Template: v1.PodTemplateSpec{ TerminationGracePeriodSeconds: &30, TerminationGracePeriodSeconds: &600, TerminationGracePeriodSeconds: nil, + TerminationMessagePath: "", - TerminationMessagePath: "/dev/termination-log", + TerminationMessagePolicy: "", - TerminationMessagePolicy: "File", TimeoutSeconds: 5, - Time: s"2025-12-29 15:58:36 +0000 UTC", - Time: s"2025-12-29 15:59:14 +0000 UTC", - Time: s"2025-12-29 16:02:11 +0000 UTC", - Time: s"2025-12-29 16:04:43 +0000 UTC", - Time: s"2025-12-29 16:04:59 +0000 UTC", - Time: s"2025-12-29 16:05:09 +0000 UTC", - Time: s"2025-12-29 16:05:40 +0000 UTC", - Time: s"2025-12-29 16:06:08 +0000 UTC", - Time: s"2025-12-29 16:06:09 +0000 UTC", - Time: s"2025-12-29 16:06:10 +0000 UTC", - Time: s"2025-12-29 16:08:41 +0000 UTC", - Time: s"2025-12-29 16:09:05 +0000 UTC", - Time: s"2025-12-29 16:09:33 +0000 UTC", - Time: s"2025-12-29 16:10:36 +0000 UTC", - Time: s"2025-12-29 16:10:56 +0000 UTC", - Time: s"2025-12-29 16:11:39 +0000 UTC", - Time: s"2025-12-29 16:13:58 +0000 UTC", - Time: s"2025-12-29 16:14:03 +0000 UTC", - Time: s"2025-12-29 16:14:09 +0000 UTC", - Time: s"2025-12-29 16:14:26 +0000 UTC", - Time: s"2025-12-29 16:16:30 +0000 UTC", - Time: s"2025-12-29 16:18:48 +0000 UTC", Tolerations: nil, - TopologySpreadConstraints: nil, + TopologySpreadConstraints: []v1.TopologySpreadConstraint{}, TypeMeta: {}, TypeMeta: {Kind: "StatefulSet", APIVersion: "apps/v1"}, + UID: "", - UID: "28a6d608-b6de-4141-93e7-ab895fb8cb4a", - UID: "3a453928-4205-4b21-ab28-613821778503", + UpdatedReplicas: 0, - UpdatedReplicas: 2, - UpdatedReplicas: 3, + UpdateRevision: "", - UpdateRevision: "some-name-proxysql-5cc5cfc8d9", - UpdateRevision: "some-name-proxysql-6b584878d6", - UpdateRevision: "some-name-proxysql-7c554644", - UpdateRevision: "some-name-proxysql-848958879f", - UpdateRevision: "some-name-proxysql-9986688b9", - UpdateRevision: "some-name-proxysql-f5bd96649", - UpdateRevision: "some-name-pxc-65c75c44d6", - UpdateRevision: "some-name-pxc-6ccf64f994", - UpdateRevision: "some-name-pxc-b94497b7", - UpdateRevision: "some-name-pxc-d6f997bd6", UpdateStrategy: {Type: "RollingUpdate", RollingUpdate: &{Partition: &0}}, &v1.StatefulSet{ VolumeAttributesClassName: nil, VolumeClaimTemplates: []v1.PersistentVolumeClaim{ VolumeDevices: nil, - VolumeMode: &"Filesystem", + VolumeMode: nil, VolumeMounts: {{Name: "datadir", MountPath: "/var/lib/mysql"}, {Name: "config", MountPath: "/etc/percona-xtradb-cluster.conf.d"}, {Name: "tmp", MountPath: "/tmp"}, {Name: "ssl", MountPath: "/etc/mysql/ssl"}, ...}, VolumeName: "", VolumeSource: v1.VolumeSource{ Volumes: []v1.Volume{ VsphereVolume: nil, WorkingDir: "", + kubectl get pxc --all-namespaces -o wide + xargs -L 1 sh -xc 'kubectl patch pxc -n $0 $1 --type=merge -p "{\"metadata\":{\"finalizers\":[]}}"' + grep -v NAMESPACE + kubectl patch pxc -n users-scheduler-16507 some-name --type=merge -p '{"metadata":{"finalizers":[]}}' perconaxtradbcluster.pxc.percona.com/some-name patched + kubectl_bin delete pxc --all --all-namespaces ++ mktemp + local LAST_OUT=/tmp/tmp.EtPz5G5WUQ ++ mktemp + local LAST_ERR=/tmp/tmp.XqsteAa2z9 + local exit_status=0 ++ seq 0 2 + for i in '$(seq 0 2)' + set +e + kubectl delete pxc --all --all-namespaces + exit_status=0 + set -e + '[' 0 '!=' 0 ']' + break + cat /tmp/tmp.EtPz5G5WUQ perconaxtradbcluster.pxc.percona.com "some-name" deleted from users-scheduler-16507 namespace + cat /tmp/tmp.XqsteAa2z9 + rm /tmp/tmp.EtPz5G5WUQ /tmp/tmp.XqsteAa2z9 + return 0 + kubectl_bin delete pxc-backup --all --all-namespaces ++ mktemp + local LAST_OUT=/tmp/tmp.A0f0nTHIBb ++ mktemp + local LAST_ERR=/tmp/tmp.6V7DzLVobj + local exit_status=0 ++ seq 0 2 + for i in '$(seq 0 2)' + set +e + kubectl delete pxc-backup --all --all-namespaces + exit_status=0 + set -e + '[' 0 '!=' 0 ']' + break + cat /tmp/tmp.A0f0nTHIBb No resources found + cat /tmp/tmp.6V7DzLVobj + rm /tmp/tmp.A0f0nTHIBb /tmp/tmp.6V7DzLVobj + return 0 + kubectl_bin delete pxc-restore --all --all-namespaces ++ mktemp + local LAST_OUT=/tmp/tmp.oApmaKdrCe ++ mktemp + local LAST_ERR=/tmp/tmp.AR3pKssZwU + local exit_status=0 ++ seq 0 2 + for i in '$(seq 0 2)' + set +e + kubectl delete pxc-restore --all --all-namespaces + exit_status=0 + set -e + '[' 0 '!=' 0 ']' + break + cat /tmp/tmp.oApmaKdrCe No resources found + cat /tmp/tmp.AR3pKssZwU + rm /tmp/tmp.oApmaKdrCe /tmp/tmp.AR3pKssZwU + return 0 + kubectl_bin delete ValidatingWebhookConfiguration percona-xtradbcluster-webhook ++ mktemp + local LAST_OUT=/tmp/tmp.qIrFcrFov5 ++ mktemp + local LAST_ERR=/tmp/tmp.uXm1ar3Qe0 + local exit_status=0 ++ seq 0 2 + for i in '$(seq 0 2)' + set +e + kubectl delete ValidatingWebhookConfiguration percona-xtradbcluster-webhook + exit_status=0 + set -e + '[' 0 '!=' 0 ']' + break + cat /tmp/tmp.qIrFcrFov5 validatingwebhookconfiguration.admissionregistration.k8s.io "percona-xtradbcluster-webhook" deleted + cat /tmp/tmp.uXm1ar3Qe0 + rm /tmp/tmp.qIrFcrFov5 /tmp/tmp.uXm1ar3Qe0 + return 0 + kubectl_bin delete -f https://github.com/jetstack/cert-manager/releases/download/v1.18.2/cert-manager.yaml + : + '[' '!' -z '' ']' + '[' -n pxc-operator ']' + kubectl_bin delete --grace-period=0 --force=true namespace users-scheduler-16507 + rm -rf /tmp/tmp.KUxcwQMinG ++ mktemp + kubectl_bin delete --grace-period=0 --force=true namespace pxc-operator ++ mktemp + local LAST_OUT=/tmp/tmp.TvzumHFg2q + desc 'test passed' + set +o xtrace ----------------------------------------------------------------------------------- test passed ----------------------------------------------------------------------------------- ++ mktemp + local LAST_OUT=/tmp/tmp.YDCAPSt84J + local LAST_ERR=/tmp/tmp.OeckvL2EIr + local exit_status=0 ++ mktemp + local LAST_ERR=/tmp/tmp.2uTs0o8moZ + local exit_status=0 ++ seq 0 2 ++ seq 0 2 + for i in '$(seq 0 2)' + set +e + for i in '$(seq 0 2)' + set +e + kubectl delete --grace-period=0 --force=true namespace users-scheduler-16507 + kubectl delete --grace-period=0 --force=true namespace pxc-operator