Log: /mnt/jenkins/workspace/cloud-pxc-operator_PR-2234/e2e-tests/logs/users-5-7.log Warning: version difference between client (1.34) and server (1.31) exceeds the supported minor version skew of +/-1 Warning: version difference between client (1.34) and server (1.31) exceeds the supported minor version skew of +/-1 + create_infra users-12977 + local ns=users-12977 + '[' -n pxc-operator ']' + kubectl get pxc --all-namespaces -o wide + grep -v NAMESPACE + xargs -L 1 sh -xc 'kubectl patch pxc -n $0 $1 --type=merge -p "{\"metadata\":{\"finalizers\":[]}}"' + kubectl patch pxc -n users-17422 some-name --type=merge -p '{"metadata":{"finalizers":[]}}' perconaxtradbcluster.pxc.percona.com/some-name patched + kubectl_bin delete pxc --all --all-namespaces ++ mktemp + local LAST_OUT=/tmp/tmp.MNL4vJ7kJK ++ mktemp + local LAST_ERR=/tmp/tmp.gQIc3MZJSX + local exit_status=0 ++ seq 0 2 + for i in '$(seq 0 2)' + set +e + kubectl delete pxc --all --all-namespaces + exit_status=0 + set -e + '[' 0 '!=' 0 ']' + break + cat /tmp/tmp.MNL4vJ7kJK perconaxtradbcluster.pxc.percona.com "some-name" deleted from users-17422 namespace + cat /tmp/tmp.gQIc3MZJSX + rm /tmp/tmp.MNL4vJ7kJK /tmp/tmp.gQIc3MZJSX + return 0 + kubectl_bin delete pxc-backup --all --all-namespaces ++ mktemp + local LAST_OUT=/tmp/tmp.8N9P8qvURC ++ mktemp + local LAST_ERR=/tmp/tmp.WnaEjVTUpv + local exit_status=0 ++ seq 0 2 + for i in '$(seq 0 2)' + set +e + kubectl delete pxc-backup --all --all-namespaces + exit_status=0 + set -e + '[' 0 '!=' 0 ']' + break + cat /tmp/tmp.8N9P8qvURC No resources found + cat /tmp/tmp.WnaEjVTUpv + rm /tmp/tmp.8N9P8qvURC /tmp/tmp.WnaEjVTUpv + return 0 + kubectl_bin delete pxc-restore --all --all-namespaces ++ mktemp + local LAST_OUT=/tmp/tmp.s2voXp7Qse ++ mktemp + local LAST_ERR=/tmp/tmp.FoUpJ7x9B5 + local exit_status=0 ++ seq 0 2 + for i in '$(seq 0 2)' + set +e + kubectl delete pxc-restore --all --all-namespaces + exit_status=0 + set -e + '[' 0 '!=' 0 ']' + break + cat /tmp/tmp.s2voXp7Qse No resources found + cat /tmp/tmp.FoUpJ7x9B5 + rm /tmp/tmp.s2voXp7Qse /tmp/tmp.FoUpJ7x9B5 + return 0 + create_namespace pxc-operator + local namespace=pxc-operator + local skip_clean_namespace= + [[ 1 == 1 ]] + [[ -z '' ]] + destroy_chaos_mesh ++ helm list --all-namespaces --filter chaos-mesh ++ tail -n1 ++ sed s/NAMESPACE// ++ awk '-F ' '{print $2}' + local chaos_mesh_ns= + '[' -n '' ']' ++ kubectl get MutatingWebhookConfiguration ++ grep chaos-mesh ++ awk '{print $1}' + timeout 30 kubectl delete MutatingWebhookConfiguration error: resource(s) were provided, but no name was specified + : ++ kubectl get ValidatingWebhookConfiguration ++ grep chaos-mesh ++ awk '{print $1}' + timeout 30 kubectl delete ValidatingWebhookConfiguration error: resource(s) were provided, but no name was specified + : ++ kubectl get ValidatingWebhookConfiguration ++ grep validate-auth ++ awk '{print $1}' + timeout 30 kubectl delete ValidatingWebhookConfiguration error: resource(s) were provided, but no name was specified + : ++ kubectl api-resources ++ grep chaos-mesh ++ awk '{print $1}' ++ kubectl get crd ++ grep chaos-mesh.org ++ awk '{print $1}' + timeout 30 kubectl delete crd error: resource(s) were provided, but no name was specified + : ++ kubectl get clusterrolebinding ++ grep chaos-mesh ++ awk '{print $1}' + timeout 30 kubectl delete clusterrolebinding error: resource(s) were provided, but no name was specified + : ++ kubectl get clusterrole ++ grep chaos-mesh ++ awk '{print $1}' + timeout 30 kubectl delete clusterrole error: resource(s) were provided, but no name was specified + : + desc 'cleaned up all old namespaces' + set +o xtrace ----------------------------------------------------------------------------------- cleaned up all old namespaces ----------------------------------------------------------------------------------- + grep -E -v '^kube-|^default|Terminating|pxc-operator|openshift|^gke-|^gmp-|^NAME' + awk '{print$1}' + '[' -n '' ']' + desc 'cleaned up old namespaces pxc-operator' + set +o xtrace ----------------------------------------------------------------------------------- cleaned up old namespaces pxc-operator ----------------------------------------------------------------------------------- + kubectl_bin delete namespace pxc-operator + kubectl_bin get ns + xargs kubectl delete ns ++ mktemp ++ mktemp + local LAST_OUT=/tmp/tmp.gz6cROnLf2 + local LAST_OUT=/tmp/tmp.5Rfh3545hi ++ mktemp ++ mktemp + local LAST_ERR=/tmp/tmp.KXwcoQ6QgU + local exit_status=0 ++ seq 0 2 + local LAST_ERR=/tmp/tmp.eK4cQehYqJ + local exit_status=0 ++ seq 0 2 + for i in '$(seq 0 2)' + set +e + kubectl get ns + for i in '$(seq 0 2)' + set +e + kubectl delete namespace pxc-operator + exit_status=0 + set -e + '[' 0 '!=' 0 ']' + break + cat /tmp/tmp.5Rfh3545hi + cat /tmp/tmp.KXwcoQ6QgU + rm /tmp/tmp.5Rfh3545hi /tmp/tmp.KXwcoQ6QgU + return 0 namespace "users-17422" deleted + exit_status=0 + set -e + '[' 0 '!=' 0 ']' + break + cat /tmp/tmp.gz6cROnLf2 namespace "pxc-operator" deleted + cat /tmp/tmp.eK4cQehYqJ + rm /tmp/tmp.gz6cROnLf2 /tmp/tmp.eK4cQehYqJ + return 0 + wait_for_delete namespace/pxc-operator + local res=namespace/pxc-operator + echo -n 'waiting for namespace/pxc-operator to be deleted' waiting for namespace/pxc-operator to be deleted+ set +o xtrace Error from server (NotFound): namespaces "pxc-operator" not found + desc 'create namespace pxc-operator' + set +o xtrace ----------------------------------------------------------------------------------- create namespace pxc-operator ----------------------------------------------------------------------------------- + kubectl_bin create namespace pxc-operator ++ mktemp + local LAST_OUT=/tmp/tmp.0KCxMXozy8 ++ mktemp + local LAST_ERR=/tmp/tmp.izz1AfdHQs + local exit_status=0 ++ seq 0 2 + for i in '$(seq 0 2)' + set +e + kubectl create namespace pxc-operator + exit_status=0 + set -e + '[' 0 '!=' 0 ']' + break + cat /tmp/tmp.0KCxMXozy8 namespace/pxc-operator created + cat /tmp/tmp.izz1AfdHQs + rm /tmp/tmp.0KCxMXozy8 /tmp/tmp.izz1AfdHQs + return 0 ++ kubectl_bin config current-context +++ mktemp ++ local LAST_OUT=/tmp/tmp.N8dBUo4Zx7 +++ mktemp ++ local LAST_ERR=/tmp/tmp.wCa4ZWMxIk ++ local exit_status=0 +++ seq 0 2 ++ for i in '$(seq 0 2)' ++ set +e ++ kubectl config current-context ++ exit_status=0 ++ set -e ++ '[' 0 '!=' 0 ']' ++ break ++ cat /tmp/tmp.N8dBUo4Zx7 ++ cat /tmp/tmp.wCa4ZWMxIk ++ rm /tmp/tmp.N8dBUo4Zx7 /tmp/tmp.wCa4ZWMxIk ++ return 0 + kubectl_bin config set-context gke_cloud-dev-112233_us-central1-a_jen-pxc-2234-c49d4810-14-cluster8 --namespace=pxc-operator ++ mktemp + local LAST_OUT=/tmp/tmp.Rsc9o04vDG ++ mktemp + local LAST_ERR=/tmp/tmp.u3za5QjVHP + local exit_status=0 ++ seq 0 2 + for i in '$(seq 0 2)' + set +e + kubectl config set-context gke_cloud-dev-112233_us-central1-a_jen-pxc-2234-c49d4810-14-cluster8 --namespace=pxc-operator + exit_status=0 + set -e + '[' 0 '!=' 0 ']' + break + cat /tmp/tmp.Rsc9o04vDG Context "gke_cloud-dev-112233_us-central1-a_jen-pxc-2234-c49d4810-14-cluster8" modified. + cat /tmp/tmp.u3za5QjVHP + rm /tmp/tmp.Rsc9o04vDG /tmp/tmp.u3za5QjVHP + return 0 + deploy_operator + desc 'start PXC operator' + set +o xtrace ----------------------------------------------------------------------------------- start PXC operator ----------------------------------------------------------------------------------- + kubectl_bin apply --server-side --force-conflicts -f /mnt/jenkins/workspace/cloud-pxc-operator_PR-2234/deploy/crd.yaml ++ mktemp + local LAST_OUT=/tmp/tmp.e07o9bdwa9 ++ mktemp + local LAST_ERR=/tmp/tmp.JHGUnwHJtU + local exit_status=0 ++ seq 0 2 + for i in '$(seq 0 2)' + set +e + kubectl apply --server-side --force-conflicts -f /mnt/jenkins/workspace/cloud-pxc-operator_PR-2234/deploy/crd.yaml + exit_status=0 + set -e + '[' 0 '!=' 0 ']' + break + cat /tmp/tmp.e07o9bdwa9 customresourcedefinition.apiextensions.k8s.io/perconaxtradbclusterbackups.pxc.percona.com serverside-applied customresourcedefinition.apiextensions.k8s.io/perconaxtradbclusterrestores.pxc.percona.com serverside-applied customresourcedefinition.apiextensions.k8s.io/perconaxtradbclusters.pxc.percona.com serverside-applied + cat /tmp/tmp.JHGUnwHJtU + rm /tmp/tmp.e07o9bdwa9 /tmp/tmp.JHGUnwHJtU + return 0 + '[' -n pxc-operator ']' + apply_rbac cw-rbac + local operator_namespace=pxc-operator + local rbac=cw-rbac + cat /mnt/jenkins/workspace/cloud-pxc-operator_PR-2234/deploy/cw-rbac.yaml + sed -e 's^namespace: .*^namespace: pxc-operator^' + kubectl_bin apply -f - ++ mktemp + local LAST_OUT=/tmp/tmp.RfvvTAM1dy ++ mktemp + local LAST_ERR=/tmp/tmp.bSpDpOqByC + local exit_status=0 ++ seq 0 2 + for i in '$(seq 0 2)' + set +e + kubectl apply -f - + exit_status=0 + set -e + '[' 0 '!=' 0 ']' + break + cat /tmp/tmp.RfvvTAM1dy clusterrole.rbac.authorization.k8s.io/percona-xtradb-cluster-operator unchanged serviceaccount/percona-xtradb-cluster-operator created clusterrolebinding.rbac.authorization.k8s.io/service-account-percona-xtradb-cluster-operator unchanged + cat /tmp/tmp.bSpDpOqByC + rm /tmp/tmp.RfvvTAM1dy /tmp/tmp.bSpDpOqByC + return 0 + sed -e 's^image: .*^image: perconalab/percona-xtradb-cluster-operator:PR-2234-c49d4810^' + sed -e 's^failureThreshold: .*^failureThreshold: 10^' + kubectl_bin apply -f - + yq eval '(select(.kind == "Deployment").spec.template.spec.containers[] | select(.name == "percona-xtradb-cluster-operator").env[] | select(.name == "DISABLE_TELEMETRY").value) = "true"' - + yq eval '(select(.kind == "Deployment").spec.template.spec.containers[] | select(.name == "percona-xtradb-cluster-operator").env[] | select(.name == "LOG_LEVEL").value) = "VERBOSE"' - + cat /mnt/jenkins/workspace/cloud-pxc-operator_PR-2234/deploy/cw-operator.yaml ++ mktemp + local LAST_OUT=/tmp/tmp.SkqSUoxF6n ++ mktemp + local LAST_ERR=/tmp/tmp.7OG7r9G1fo + local exit_status=0 ++ seq 0 2 + for i in '$(seq 0 2)' + set +e + kubectl apply -f - + exit_status=0 + set -e + '[' 0 '!=' 0 ']' + break + cat /tmp/tmp.SkqSUoxF6n deployment.apps/percona-xtradb-cluster-operator created service/percona-xtradb-cluster-operator created + cat /tmp/tmp.7OG7r9G1fo + rm /tmp/tmp.SkqSUoxF6n /tmp/tmp.7OG7r9G1fo + return 0 + sleep 10 + kubectl_bin wait --for=condition=Ready pods -l app.kubernetes.io/component=operator,app.kubernetes.io/instance=percona-xtradb-cluster-operator,app.kubernetes.io/name=percona-xtradb-cluster-operator --timeout=30s ++ mktemp + local LAST_OUT=/tmp/tmp.Urt4i0UCoh ++ mktemp + local LAST_ERR=/tmp/tmp.KBcdkTN4pe + local exit_status=0 ++ seq 0 2 + for i in '$(seq 0 2)' + set +e + kubectl wait --for=condition=Ready pods -l app.kubernetes.io/component=operator,app.kubernetes.io/instance=percona-xtradb-cluster-operator,app.kubernetes.io/name=percona-xtradb-cluster-operator --timeout=30s + exit_status=0 + set -e + '[' 0 '!=' 0 ']' + break + cat /tmp/tmp.Urt4i0UCoh pod/percona-xtradb-cluster-operator-64f4d94dcc-tsb7w condition met + cat /tmp/tmp.KBcdkTN4pe + rm /tmp/tmp.Urt4i0UCoh /tmp/tmp.KBcdkTN4pe + return 0 ++ get_operator_pod ++ local label_prefix=app.kubernetes.io/ +++ kubectl get pods --selector=app.kubernetes.io/name=percona-xtradb-cluster-operator -n pxc-operator +++ grep -c percona-xtradb-cluster-operator ++ local check_label=1 ++ [[ 1 -eq 0 ]] ++ kubectl_bin get pods --selector=app.kubernetes.io/name=percona-xtradb-cluster-operator -o 'jsonpath={.items[].metadata.name}' -n pxc-operator +++ mktemp ++ local LAST_OUT=/tmp/tmp.7Jrpg4HaZN +++ mktemp ++ local LAST_ERR=/tmp/tmp.JZPqkQqxH1 ++ local exit_status=0 +++ seq 0 2 ++ for i in '$(seq 0 2)' ++ set +e ++ kubectl get pods --selector=app.kubernetes.io/name=percona-xtradb-cluster-operator -o 'jsonpath={.items[].metadata.name}' -n pxc-operator ++ exit_status=0 ++ set -e ++ '[' 0 '!=' 0 ']' ++ break ++ cat /tmp/tmp.7Jrpg4HaZN ++ cat /tmp/tmp.JZPqkQqxH1 ++ rm /tmp/tmp.7Jrpg4HaZN /tmp/tmp.JZPqkQqxH1 ++ return 0 + wait_pod percona-xtradb-cluster-operator-64f4d94dcc-tsb7w 480 pxc-operator + local pod=percona-xtradb-cluster-operator-64f4d94dcc-tsb7w + local max_retry=480 + local ns=pxc-operator ++ echo percona-xtradb-cluster-operator-64f4d94dcc-tsb7w ++ /usr/bin/sed -E 's/.*-(pxc|proxysql)-[0-9]/\1/' ++ grep -E '^(pxc|proxysql)$' + local container= + set +o xtrace pod/percona-xtradb-cluster-operator-64f4d94dcc-tsb7w condition met waiting for pod/percona-xtradb-cluster-operator-64f4d94dcc-tsb7w to become Ready.Ok + sleep 3 + create_namespace users-12977 + local namespace=users-12977 + local skip_clean_namespace= + [[ 1 == 1 ]] + [[ -z '' ]] + destroy_chaos_mesh ++ helm list --all-namespaces --filter chaos-mesh ++ tail -n1 ++ awk '-F ' '{print $2}' ++ sed s/NAMESPACE// + local chaos_mesh_ns= + '[' -n '' ']' ++ kubectl get MutatingWebhookConfiguration ++ grep chaos-mesh ++ awk '{print $1}' + timeout 30 kubectl delete MutatingWebhookConfiguration error: resource(s) were provided, but no name was specified + : ++ kubectl get ValidatingWebhookConfiguration ++ grep chaos-mesh ++ awk '{print $1}' + timeout 30 kubectl delete ValidatingWebhookConfiguration error: resource(s) were provided, but no name was specified + : ++ kubectl get ValidatingWebhookConfiguration ++ grep validate-auth ++ awk '{print $1}' + timeout 30 kubectl delete ValidatingWebhookConfiguration error: resource(s) were provided, but no name was specified + : ++ kubectl api-resources ++ grep chaos-mesh ++ awk '{print $1}' ++ kubectl get crd ++ grep chaos-mesh.org ++ awk '{print $1}' + timeout 30 kubectl delete crd error: resource(s) were provided, but no name was specified + : ++ kubectl get clusterrolebinding ++ grep chaos-mesh ++ awk '{print $1}' + timeout 30 kubectl delete clusterrolebinding error: resource(s) were provided, but no name was specified + : ++ kubectl get clusterrole ++ grep chaos-mesh ++ awk '{print $1}' + timeout 30 kubectl delete clusterrole error: resource(s) were provided, but no name was specified + : + desc 'cleaned up all old namespaces' + set +o xtrace ----------------------------------------------------------------------------------- cleaned up all old namespaces ----------------------------------------------------------------------------------- + kubectl_bin get ns + awk '{print$1}' + '[' -n '' ']' ++ mktemp + desc 'cleaned up old namespaces users-12977' + xargs kubectl delete ns + set +o xtrace ----------------------------------------------------------------------------------- cleaned up old namespaces users-12977 ----------------------------------------------------------------------------------- + kubectl_bin delete namespace users-12977 + grep -E -v '^kube-|^default|Terminating|pxc-operator|openshift|^gke-|^gmp-|^NAME' + local LAST_OUT=/tmp/tmp.EOuNA0klJJ ++ mktemp ++ mktemp + local LAST_OUT=/tmp/tmp.NFSrnPmMqc + local LAST_ERR=/tmp/tmp.IH6On7Ozva + local exit_status=0 ++ seq 0 2 ++ mktemp + for i in '$(seq 0 2)' + set +e + kubectl get ns + local LAST_ERR=/tmp/tmp.UrL1dcRNXX + local exit_status=0 ++ seq 0 2 + for i in '$(seq 0 2)' + set +e + kubectl delete namespace users-12977 + exit_status=1 + set -e + '[' 1 '!=' 0 ']' + '[' 1 == 1 ']' + sleep 0 + for i in '$(seq 0 2)' + set +e + kubectl delete namespace users-12977 + exit_status=0 + set -e + '[' 0 '!=' 0 ']' + break + cat /tmp/tmp.EOuNA0klJJ + cat /tmp/tmp.IH6On7Ozva + rm /tmp/tmp.EOuNA0klJJ /tmp/tmp.IH6On7Ozva + return 0 error: resource(s) were provided, but no name was specified + exit_status=1 + set -e + '[' 1 '!=' 0 ']' + '[' 1 == 1 ']' + sleep 0 + for i in '$(seq 0 2)' + set +e + kubectl delete namespace users-12977 + exit_status=1 + set -e + '[' 1 '!=' 0 ']' + '[' 1 == 1 ']' + sleep 0 + cat /tmp/tmp.NFSrnPmMqc + cat /tmp/tmp.UrL1dcRNXX Error from server (NotFound): namespaces "users-12977" not found + rm /tmp/tmp.NFSrnPmMqc /tmp/tmp.UrL1dcRNXX + return 1 + : + wait_for_delete namespace/users-12977 + local res=namespace/users-12977 + echo -n 'waiting for namespace/users-12977 to be deleted' waiting for namespace/users-12977 to be deleted+ set +o xtrace Error from server (NotFound): namespaces "users-12977" not found + desc 'create namespace users-12977' + set +o xtrace ----------------------------------------------------------------------------------- create namespace users-12977 ----------------------------------------------------------------------------------- + kubectl_bin create namespace users-12977 ++ mktemp + local LAST_OUT=/tmp/tmp.8ClZ4k2UOn ++ mktemp + local LAST_ERR=/tmp/tmp.vi0XYCA0fs + local exit_status=0 ++ seq 0 2 + for i in '$(seq 0 2)' + set +e + kubectl create namespace users-12977 + exit_status=0 + set -e + '[' 0 '!=' 0 ']' + break + cat /tmp/tmp.8ClZ4k2UOn namespace/users-12977 created + cat /tmp/tmp.vi0XYCA0fs + rm /tmp/tmp.8ClZ4k2UOn /tmp/tmp.vi0XYCA0fs + return 0 ++ kubectl_bin config current-context +++ mktemp ++ local LAST_OUT=/tmp/tmp.P7i4AKTfsT +++ mktemp ++ local LAST_ERR=/tmp/tmp.6aK0G7eD40 ++ local exit_status=0 +++ seq 0 2 ++ for i in '$(seq 0 2)' ++ set +e ++ kubectl config current-context ++ exit_status=0 ++ set -e ++ '[' 0 '!=' 0 ']' ++ break ++ cat /tmp/tmp.P7i4AKTfsT ++ cat /tmp/tmp.6aK0G7eD40 ++ rm /tmp/tmp.P7i4AKTfsT /tmp/tmp.6aK0G7eD40 ++ return 0 + kubectl_bin config set-context gke_cloud-dev-112233_us-central1-a_jen-pxc-2234-c49d4810-14-cluster8 --namespace=users-12977 ++ mktemp + local LAST_OUT=/tmp/tmp.k3t2dmAas8 ++ mktemp + local LAST_ERR=/tmp/tmp.hJbhEALCCy + local exit_status=0 ++ seq 0 2 + for i in '$(seq 0 2)' + set +e + kubectl config set-context gke_cloud-dev-112233_us-central1-a_jen-pxc-2234-c49d4810-14-cluster8 --namespace=users-12977 + exit_status=0 + set -e + '[' 0 '!=' 0 ']' + break + cat /tmp/tmp.k3t2dmAas8 Context "gke_cloud-dev-112233_us-central1-a_jen-pxc-2234-c49d4810-14-cluster8" modified. + cat /tmp/tmp.hJbhEALCCy + rm /tmp/tmp.k3t2dmAas8 /tmp/tmp.hJbhEALCCy + return 0 + apply_secrets + desc 'create secrets for cloud storages' + set +o xtrace ----------------------------------------------------------------------------------- create secrets for cloud storages ----------------------------------------------------------------------------------- + '[' -z '' ']' + kubectl_bin apply -f /mnt/jenkins/workspace/cloud-pxc-operator_PR-2234/e2e-tests/conf/minio-secret.yml -f /mnt/jenkins/workspace/cloud-pxc-operator_PR-2234/e2e-tests/conf/cloud-secret.yml ++ mktemp + local LAST_OUT=/tmp/tmp.vCQiaLqfxp ++ mktemp + local LAST_ERR=/tmp/tmp.Ei0oabP6oU + local exit_status=0 ++ seq 0 2 + for i in '$(seq 0 2)' + set +e + kubectl apply -f /mnt/jenkins/workspace/cloud-pxc-operator_PR-2234/e2e-tests/conf/minio-secret.yml -f /mnt/jenkins/workspace/cloud-pxc-operator_PR-2234/e2e-tests/conf/cloud-secret.yml + exit_status=0 + set -e + '[' 0 '!=' 0 ']' + break + cat /tmp/tmp.vCQiaLqfxp secret/minio-secret created secret/aws-s3-secret created secret/gcp-cs-secret created secret/azure-secret created + cat /tmp/tmp.Ei0oabP6oU + rm /tmp/tmp.vCQiaLqfxp /tmp/tmp.Ei0oabP6oU + return 0 + desc 'create PXC cluster with 1-password secret' + set +o xtrace ----------------------------------------------------------------------------------- create PXC cluster with 1-password secret ----------------------------------------------------------------------------------- + newpass=test-password ++ echo -n test-password ++ base64 + newpassencrypted=dGVzdC1wYXNzd29yZA== + cluster=some-name + spinup_pxc some-name /mnt/jenkins/workspace/cloud-pxc-operator_PR-2234/e2e-tests/conf/some-name.yml '' '' /mnt/jenkins/workspace/cloud-pxc-operator_PR-2234/e2e-tests/conf/secrets_one_pass.yml + local cluster=some-name + local config=/mnt/jenkins/workspace/cloud-pxc-operator_PR-2234/e2e-tests/conf/some-name.yml + local size=3 + local sleep=10 + local secretsFile=/mnt/jenkins/workspace/cloud-pxc-operator_PR-2234/e2e-tests/conf/secrets_one_pass.yml + local pxcClientFile=/mnt/jenkins/workspace/cloud-pxc-operator_PR-2234/e2e-tests/conf/client.yml + local port=3306 + desc 'create first PXC cluster' + set +o xtrace ----------------------------------------------------------------------------------- create first PXC cluster ----------------------------------------------------------------------------------- + kubectl_bin apply -f /mnt/jenkins/workspace/cloud-pxc-operator_PR-2234/e2e-tests/conf/secrets_one_pass.yml ++ mktemp + local LAST_OUT=/tmp/tmp.EKfZE5iDSS ++ mktemp + local LAST_ERR=/tmp/tmp.AHG1ltJgJW + local exit_status=0 ++ seq 0 2 + for i in '$(seq 0 2)' + set +e + kubectl apply -f /mnt/jenkins/workspace/cloud-pxc-operator_PR-2234/e2e-tests/conf/secrets_one_pass.yml + exit_status=0 + set -e + '[' 0 '!=' 0 ']' + break + cat /tmp/tmp.EKfZE5iDSS secret/my-cluster-secrets created secret/some-name-ssl created secret/some-name-ssl-internal created + cat /tmp/tmp.AHG1ltJgJW + rm /tmp/tmp.EKfZE5iDSS /tmp/tmp.AHG1ltJgJW + return 0 + apply_config /mnt/jenkins/workspace/cloud-pxc-operator_PR-2234/e2e-tests/conf/client.yml + local config_file=/mnt/jenkins/workspace/cloud-pxc-operator_PR-2234/e2e-tests/conf/client.yml + local pvc_name= + '[' -z '' ']' + cat_config /mnt/jenkins/workspace/cloud-pxc-operator_PR-2234/e2e-tests/conf/client.yml '' + local input_file=/mnt/jenkins/workspace/cloud-pxc-operator_PR-2234/e2e-tests/conf/client.yml + kubectl_bin apply -f - + local pvc_name= + cat /mnt/jenkins/workspace/cloud-pxc-operator_PR-2234/e2e-tests/conf/client.yml ++ mktemp + /usr/bin/sed -e 's#image:.*-pmm$#image: perconalab/pmm-client:dev-latest#' + local LAST_OUT=/tmp/tmp.0OBfN8LawV + /usr/bin/sed -e 's#image:.*\/percona-xtradb-cluster:.*$#image: perconalab/percona-xtradb-cluster-operator:main-pxc5.7#' + /usr/bin/sed -e 's#image:.*-init$#image: perconalab/percona-xtradb-cluster-operator:PR-2234-c49d4810#' + /usr/bin/sed -e 's#image:.*-pxc\([0-9]*.[0-9]*\)\{0,1\}$#image: perconalab/percona-xtradb-cluster-operator:main-pxc5.7#' ++ mktemp + /usr/bin/sed -e 's#image:.*-backup$#image: perconalab/percona-xtradb-cluster-operator:main-pxc5.7-backup#' + /usr/bin/sed -e 's#claimName:..*-backup-pvc$#claimName: #' + /usr/bin/sed -e 's#image:.*-proxysql$#image: perconalab/percona-xtradb-cluster-operator:main-proxysql#' + /usr/bin/sed -e s~minio-service.#namespace~minio-service.users-12977~ + /usr/bin/sed -e 's#apply:.*#apply: Never#' + /usr/bin/sed -e 's#apiVersion: pxc.percona.com/v.*$#apiVersion: pxc.percona.com/v1#' + local LAST_ERR=/tmp/tmp.0VyGYnAJsw + local exit_status=0 ++ seq 0 2 + /usr/bin/sed -e 's#image:.*-haproxy$#image: perconalab/percona-xtradb-cluster-operator:main-haproxy#' + /usr/bin/sed -e 's#image:.*-logcollector$#image: perconalab/percona-xtradb-cluster-operator:main-logcollector#' + for i in '$(seq 0 2)' + set +e + kubectl apply -f - + exit_status=0 + set -e + '[' 0 '!=' 0 ']' + break + cat /tmp/tmp.0OBfN8LawV deployment.apps/pxc-client created + cat /tmp/tmp.0VyGYnAJsw + rm /tmp/tmp.0OBfN8LawV /tmp/tmp.0VyGYnAJsw + return 0 + [[ perconalab/percona-xtradb-cluster-operator:main-pxc5.7 =~ 5\.7 ]] + [[ some-name == \d\e\m\a\n\d\-\b\a\c\k\u\p ]] + [[ some-name == \d\e\m\a\n\d\-\b\a\c\k\u\p\-\c\l\o\u\d ]] + apply_config /mnt/jenkins/workspace/cloud-pxc-operator_PR-2234/e2e-tests/conf/some-name.yml + local config_file=/mnt/jenkins/workspace/cloud-pxc-operator_PR-2234/e2e-tests/conf/some-name.yml + local pvc_name= + '[' -z '' ']' + cat_config /mnt/jenkins/workspace/cloud-pxc-operator_PR-2234/e2e-tests/conf/some-name.yml '' + kubectl_bin apply -f - + local input_file=/mnt/jenkins/workspace/cloud-pxc-operator_PR-2234/e2e-tests/conf/some-name.yml + local pvc_name= ++ mktemp + cat /mnt/jenkins/workspace/cloud-pxc-operator_PR-2234/e2e-tests/conf/some-name.yml + local LAST_OUT=/tmp/tmp.qipYhg7WZa + /usr/bin/sed -e 's#apiVersion: pxc.percona.com/v.*$#apiVersion: pxc.percona.com/v1#' ++ mktemp + /usr/bin/sed -e 's#image:.*-logcollector$#image: perconalab/percona-xtradb-cluster-operator:main-logcollector#' + /usr/bin/sed -e s~minio-service.#namespace~minio-service.users-12977~ + /usr/bin/sed -e 's#image:.*-pxc\([0-9]*.[0-9]*\)\{0,1\}$#image: perconalab/percona-xtradb-cluster-operator:main-pxc5.7#' + /usr/bin/sed -e 's#image:.*\/percona-xtradb-cluster:.*$#image: perconalab/percona-xtradb-cluster-operator:main-pxc5.7#' + /usr/bin/sed -e 's#apply:.*#apply: Never#' + /usr/bin/sed -e 's#claimName:..*-backup-pvc$#claimName: #' + local LAST_ERR=/tmp/tmp.EbGMjqY5Or + local exit_status=0 + /usr/bin/sed -e 's#image:.*-pmm$#image: perconalab/pmm-client:dev-latest#' + /usr/bin/sed -e 's#image:.*-backup$#image: perconalab/percona-xtradb-cluster-operator:main-pxc5.7-backup#' ++ seq 0 2 + /usr/bin/sed -e 's#image:.*-haproxy$#image: perconalab/percona-xtradb-cluster-operator:main-haproxy#' + /usr/bin/sed -e 's#image:.*-proxysql$#image: perconalab/percona-xtradb-cluster-operator:main-proxysql#' + for i in '$(seq 0 2)' + set +e + kubectl apply -f - + /usr/bin/sed -e 's#image:.*-init$#image: perconalab/percona-xtradb-cluster-operator:PR-2234-c49d4810#' + exit_status=0 + set -e + '[' 0 '!=' 0 ']' + break + cat /tmp/tmp.qipYhg7WZa perconaxtradbcluster.pxc.percona.com/some-name created + cat /tmp/tmp.EbGMjqY5Or + rm /tmp/tmp.qipYhg7WZa /tmp/tmp.EbGMjqY5Or + return 0 + desc 'check if all 3 Pods started' + set +o xtrace ----------------------------------------------------------------------------------- check if all 3 Pods started ----------------------------------------------------------------------------------- ++ get_proxy some-name ++ local target_cluster=some-name +++ kubectl_bin get pxc some-name -o 'jsonpath={.spec.haproxy.enabled}' ++++ mktemp +++ local LAST_OUT=/tmp/tmp.J9HTmnkBI2 ++++ mktemp +++ local LAST_ERR=/tmp/tmp.iKfFIN0v7v +++ local exit_status=0 ++++ seq 0 2 +++ for i in '$(seq 0 2)' +++ set +e +++ kubectl get pxc some-name -o 'jsonpath={.spec.haproxy.enabled}' +++ exit_status=0 +++ set -e +++ '[' 0 '!=' 0 ']' +++ break +++ cat /tmp/tmp.J9HTmnkBI2 +++ cat /tmp/tmp.iKfFIN0v7v +++ rm /tmp/tmp.J9HTmnkBI2 /tmp/tmp.iKfFIN0v7v +++ return 0 ++ [[ '' == \t\r\u\e ]] +++ kubectl_bin get pxc some-name -o 'jsonpath={.spec.proxysql.enabled}' ++++ mktemp +++ local LAST_OUT=/tmp/tmp.TicrCJyHWQ ++++ mktemp +++ local LAST_ERR=/tmp/tmp.V6ccaAYLZj +++ local exit_status=0 ++++ seq 0 2 +++ for i in '$(seq 0 2)' +++ set +e +++ kubectl get pxc some-name -o 'jsonpath={.spec.proxysql.enabled}' +++ exit_status=0 +++ set -e +++ '[' 0 '!=' 0 ']' +++ break +++ cat /tmp/tmp.TicrCJyHWQ +++ cat /tmp/tmp.V6ccaAYLZj +++ rm /tmp/tmp.TicrCJyHWQ /tmp/tmp.V6ccaAYLZj +++ return 0 ++ [[ true == \t\r\u\e ]] ++ echo some-name-proxysql ++ return + local proxy=some-name-proxysql + kubectl_bin wait --for=condition=Ready pod -l app.kubernetes.io/instance=monitoring,app.kubernetes.io/managed-by=percona-xtradb-cluster-operator --timeout=300s -n users-12977 ++ mktemp + local LAST_OUT=/tmp/tmp.3IHaykjJDa ++ mktemp + local LAST_ERR=/tmp/tmp.KhN6sbD1Gm + local exit_status=0 ++ seq 0 2 + for i in '$(seq 0 2)' + set +e + kubectl wait --for=condition=Ready pod -l app.kubernetes.io/instance=monitoring,app.kubernetes.io/managed-by=percona-xtradb-cluster-operator --timeout=300s -n users-12977 + exit_status=1 + set -e + '[' 1 '!=' 0 ']' + '[' 1 == 1 ']' + sleep 0 + for i in '$(seq 0 2)' + set +e + kubectl wait --for=condition=Ready pod -l app.kubernetes.io/instance=monitoring,app.kubernetes.io/managed-by=percona-xtradb-cluster-operator --timeout=300s -n users-12977 + exit_status=1 + set -e + '[' 1 '!=' 0 ']' + '[' 1 == 1 ']' + sleep 0 + for i in '$(seq 0 2)' + set +e + kubectl wait --for=condition=Ready pod -l app.kubernetes.io/instance=monitoring,app.kubernetes.io/managed-by=percona-xtradb-cluster-operator --timeout=300s -n users-12977 + exit_status=1 + set -e + '[' 1 '!=' 0 ']' + '[' 1 == 1 ']' + sleep 0 + cat /tmp/tmp.3IHaykjJDa + cat /tmp/tmp.KhN6sbD1Gm error: no matching resources found + rm /tmp/tmp.3IHaykjJDa /tmp/tmp.KhN6sbD1Gm + return 1 + true + wait_for_running some-name-proxysql 1 + local name=some-name-proxysql + let last_pod=0 + : + local max_retry=480 + desc 'wait for running cluster' + set +o xtrace ----------------------------------------------------------------------------------- wait for running cluster ----------------------------------------------------------------------------------- ++ seq 0 0 + for i in '$(seq 0 $last_pod)' + wait_pod some-name-proxysql-0 480 + local pod=some-name-proxysql-0 + local max_retry=480 + local ns= ++ echo some-name-proxysql-0 ++ /usr/bin/sed -E 's/.*-(pxc|proxysql)-[0-9]/\1/' ++ grep -E '^(pxc|proxysql)$' + local container=proxysql + set +o xtrace pod/some-name-proxysql-0 condition met waiting for pod/some-name-proxysql-0 to become Ready.Ok + wait_for_running some-name-pxc 3 + local name=some-name-pxc + let last_pod=2 + local max_retry=480 + desc 'wait for running cluster' + set +o xtrace ----------------------------------------------------------------------------------- wait for running cluster ----------------------------------------------------------------------------------- ++ seq 0 2 + for i in '$(seq 0 $last_pod)' + wait_pod some-name-pxc-0 480 + local pod=some-name-pxc-0 + local max_retry=480 + local ns= ++ echo some-name-pxc-0 ++ grep -E '^(pxc|proxysql)$' ++ /usr/bin/sed -E 's/.*-(pxc|proxysql)-[0-9]/\1/' + local container=pxc + set +o xtrace pod/some-name-pxc-0 condition met waiting for pod/some-name-pxc-0 to become Ready.Ok + for i in '$(seq 0 $last_pod)' + wait_pod some-name-pxc-1 480 + local pod=some-name-pxc-1 + local max_retry=480 + local ns= ++ echo some-name-pxc-1 ++ /usr/bin/sed -E 's/.*-(pxc|proxysql)-[0-9]/\1/' ++ grep -E '^(pxc|proxysql)$' + local container=pxc + set +o xtrace pod/some-name-pxc-1 condition met waiting for pod/some-name-pxc-1 to become Ready.Ok + for i in '$(seq 0 $last_pod)' + wait_pod some-name-pxc-2 480 + local pod=some-name-pxc-2 + local max_retry=480 + local ns= ++ echo some-name-pxc-2 ++ /usr/bin/sed -E 's/.*-(pxc|proxysql)-[0-9]/\1/' ++ grep -E '^(pxc|proxysql)$' + local container=pxc + set +o xtrace pod/some-name-pxc-2 condition met waiting for pod/some-name-pxc-2 to become Ready.Ok + sleep 10 ++ kubectl get pxc some-name -o 'jsonpath={.spec.secretsName}' + local secret_name=my-cluster-secrets ++ getSecretData my-cluster-secrets root ++ local secretName=my-cluster-secrets ++ local dataKey=root ++ kubectl_bin get secrets/my-cluster-secrets '--template={{.data.root}}' ++ base64 --decode +++ mktemp ++ local LAST_OUT=/tmp/tmp.vpyKrMLWSV +++ mktemp ++ local LAST_ERR=/tmp/tmp.W45K7HDtx8 ++ local exit_status=0 +++ seq 0 2 ++ for i in '$(seq 0 2)' ++ set +e ++ kubectl get secrets/my-cluster-secrets '--template={{.data.root}}' ++ exit_status=0 ++ set -e ++ '[' 0 '!=' 0 ']' ++ break ++ cat /tmp/tmp.vpyKrMLWSV ++ cat /tmp/tmp.W45K7HDtx8 ++ rm /tmp/tmp.vpyKrMLWSV /tmp/tmp.W45K7HDtx8 ++ return 0 + local 'root_pass=#2Z(2Z0lm,$nR8ARH?' + desc 'write data' + set +o xtrace ----------------------------------------------------------------------------------- write data ----------------------------------------------------------------------------------- + [[ perconalab/percona-xtradb-cluster-operator:main-pxc5.7 =~ 5\.7 ]] ++ is_keyring_plugin_in_use some-name ++ local cluster=some-name ++ kubectl_bin exec -it some-name-pxc-0 -c pxc -- bash -c 'cat /etc/mysql/node.cnf' ++ grep -E -o 'early-plugin-load=keyring_\w+.so' +++ mktemp ++ local LAST_OUT=/tmp/tmp.nb8RSijpY4 +++ mktemp ++ local LAST_ERR=/tmp/tmp.vVjA0WJoak ++ local exit_status=0 +++ seq 0 2 ++ for i in '$(seq 0 2)' ++ set +e ++ kubectl exec -it some-name-pxc-0 -c pxc -- bash -c 'cat /etc/mysql/node.cnf' ++ exit_status=0 ++ set -e ++ '[' 0 '!=' 0 ']' ++ break ++ cat /tmp/tmp.nb8RSijpY4 ++ cat /tmp/tmp.vVjA0WJoak Unable to use a TTY - input is not a terminal or the right kind of file ++ rm /tmp/tmp.nb8RSijpY4 /tmp/tmp.vVjA0WJoak ++ return 0 + [[ -n '' ]] + run_mysql 'CREATE DATABASE IF NOT EXISTS myApp; use myApp; CREATE TABLE IF NOT EXISTS myApp (id int PRIMARY KEY) ;' '-h some-name-proxysql -uroot -p'\''#2Z(2Z0lm,$nR8ARH?'\'' -P3306' + local 'command=CREATE DATABASE IF NOT EXISTS myApp; use myApp; CREATE TABLE IF NOT EXISTS myApp (id int PRIMARY KEY) ;' + local 'uri=-h some-name-proxysql -uroot -p'\''#2Z(2Z0lm,$nR8ARH?'\'' -P3306' ++ get_client_pod ++ kubectl_bin get pods --selector=name=pxc-client -o 'jsonpath={.items[].metadata.name}' +++ mktemp ++ local LAST_OUT=/tmp/tmp.r1f3BDsoyf +++ mktemp ++ local LAST_ERR=/tmp/tmp.J23YRuQruM ++ local exit_status=0 +++ seq 0 2 ++ for i in '$(seq 0 2)' ++ set +e ++ kubectl get pods --selector=name=pxc-client -o 'jsonpath={.items[].metadata.name}' ++ exit_status=0 ++ set -e ++ '[' 0 '!=' 0 ']' ++ break ++ cat /tmp/tmp.r1f3BDsoyf ++ cat /tmp/tmp.J23YRuQruM ++ rm /tmp/tmp.r1f3BDsoyf /tmp/tmp.J23YRuQruM ++ return 0 + client_pod=pxc-client-857d976497-6qfkm + wait_pod pxc-client-857d976497-6qfkm + local pod=pxc-client-857d976497-6qfkm + local max_retry=480 + local ns= ++ /usr/bin/sed -E 's/.*-(pxc|proxysql)-[0-9]/\1/' ++ echo pxc-client-857d976497-6qfkm ++ grep -E '^(pxc|proxysql)$' + local container= + set +o xtrace pod/pxc-client-857d976497-6qfkm condition met waiting for pod/pxc-client-857d976497-6qfkm to become ReadyDefaulted container "pxc-client" out of: pxc-client, backup .Ok + set +o xtrace + run_mysql 'INSERT myApp.myApp (id) VALUES (100500)' '-h some-name-proxysql -uroot -p'\''#2Z(2Z0lm,$nR8ARH?'\'' -P3306' + local 'command=INSERT myApp.myApp (id) VALUES (100500)' + local 'uri=-h some-name-proxysql -uroot -p'\''#2Z(2Z0lm,$nR8ARH?'\'' -P3306' ++ get_client_pod ++ kubectl_bin get pods --selector=name=pxc-client -o 'jsonpath={.items[].metadata.name}' +++ mktemp ++ local LAST_OUT=/tmp/tmp.j0wkLgPBMx +++ mktemp ++ local LAST_ERR=/tmp/tmp.4uBUlZec3n ++ local exit_status=0 +++ seq 0 2 ++ for i in '$(seq 0 2)' ++ set +e ++ kubectl get pods --selector=name=pxc-client -o 'jsonpath={.items[].metadata.name}' ++ exit_status=0 ++ set -e ++ '[' 0 '!=' 0 ']' ++ break ++ cat /tmp/tmp.j0wkLgPBMx ++ cat /tmp/tmp.4uBUlZec3n ++ rm /tmp/tmp.j0wkLgPBMx /tmp/tmp.4uBUlZec3n ++ return 0 + client_pod=pxc-client-857d976497-6qfkm + wait_pod pxc-client-857d976497-6qfkm + local pod=pxc-client-857d976497-6qfkm + local max_retry=480 + local ns= ++ /usr/bin/sed -E 's/.*-(pxc|proxysql)-[0-9]/\1/' ++ grep -E '^(pxc|proxysql)$' ++ echo pxc-client-857d976497-6qfkm + local container= + set +o xtrace pod/pxc-client-857d976497-6qfkm condition met waiting for pod/pxc-client-857d976497-6qfkm to become ReadyDefaulted container "pxc-client" out of: pxc-client, backup .Ok + set +o xtrace + sleep 30 ++ seq 0 2 + for i in '$(seq 0 $((size - 1)))' + compare_mysql_cmd select-1 'SELECT * from myApp.myApp;' '-h some-name-pxc-0.some-name-pxc -uroot -p'\''#2Z(2Z0lm,$nR8ARH?'\'' -P3306' + local command_id=select-1 + local 'command=SELECT * from myApp.myApp;' + local 'uri=-h some-name-pxc-0.some-name-pxc -uroot -p'\''#2Z(2Z0lm,$nR8ARH?'\'' -P3306' + local postfix= + local expected_result=/mnt/jenkins/workspace/cloud-pxc-operator_PR-2234/e2e-tests/users/compare/select-1.sql + [[ perconalab/percona-xtradb-cluster-operator:main-pxc5.7 =~ 8\.4 ]] + [[ perconalab/percona-xtradb-cluster-operator:main-pxc5.7 =~ 8\.0 ]] + [[ perconalab/percona-xtradb-cluster-operator:main-pxc5.7 =~ 5\.7 ]] + [[ -f /mnt/jenkins/workspace/cloud-pxc-operator_PR-2234/e2e-tests/users/compare/select-1-57.sql ]] + run_mysql 'SELECT * from myApp.myApp;' '-h some-name-pxc-0.some-name-pxc -uroot -p'\''#2Z(2Z0lm,$nR8ARH?'\'' -P3306' + local 'command=SELECT * from myApp.myApp;' + local 'uri=-h some-name-pxc-0.some-name-pxc -uroot -p'\''#2Z(2Z0lm,$nR8ARH?'\'' -P3306' ++ get_client_pod ++ kubectl_bin get pods --selector=name=pxc-client -o 'jsonpath={.items[].metadata.name}' +++ mktemp ++ local LAST_OUT=/tmp/tmp.E1pEoGNzFu +++ mktemp ++ local LAST_ERR=/tmp/tmp.C2VO1ercyi ++ local exit_status=0 +++ seq 0 2 ++ for i in '$(seq 0 2)' ++ set +e ++ kubectl get pods --selector=name=pxc-client -o 'jsonpath={.items[].metadata.name}' ++ exit_status=0 ++ set -e ++ '[' 0 '!=' 0 ']' ++ break ++ cat /tmp/tmp.E1pEoGNzFu ++ cat /tmp/tmp.C2VO1ercyi ++ rm /tmp/tmp.E1pEoGNzFu /tmp/tmp.C2VO1ercyi ++ return 0 + client_pod=pxc-client-857d976497-6qfkm + wait_pod pxc-client-857d976497-6qfkm + local pod=pxc-client-857d976497-6qfkm + local max_retry=480 + local ns= ++ echo pxc-client-857d976497-6qfkm ++ /usr/bin/sed -E 's/.*-(pxc|proxysql)-[0-9]/\1/' ++ grep -E '^(pxc|proxysql)$' + local container= + set +o xtrace pod/pxc-client-857d976497-6qfkm condition met waiting for pod/pxc-client-857d976497-6qfkm to become ReadyDefaulted container "pxc-client" out of: pxc-client, backup .Ok + set +o xtrace + '[' '!' -s /tmp/tmp.O4Ec8syMdJ/select-1.sql ']' + [[ 0 -eq 0 ]] + diff -u /mnt/jenkins/workspace/cloud-pxc-operator_PR-2234/e2e-tests/users/compare/select-1.sql /tmp/tmp.O4Ec8syMdJ/select-1.sql + for i in '$(seq 0 $((size - 1)))' + compare_mysql_cmd select-1 'SELECT * from myApp.myApp;' '-h some-name-pxc-1.some-name-pxc -uroot -p'\''#2Z(2Z0lm,$nR8ARH?'\'' -P3306' + local command_id=select-1 + local 'command=SELECT * from myApp.myApp;' + local 'uri=-h some-name-pxc-1.some-name-pxc -uroot -p'\''#2Z(2Z0lm,$nR8ARH?'\'' -P3306' + local postfix= + local expected_result=/mnt/jenkins/workspace/cloud-pxc-operator_PR-2234/e2e-tests/users/compare/select-1.sql + [[ perconalab/percona-xtradb-cluster-operator:main-pxc5.7 =~ 8\.4 ]] + [[ perconalab/percona-xtradb-cluster-operator:main-pxc5.7 =~ 8\.0 ]] + [[ perconalab/percona-xtradb-cluster-operator:main-pxc5.7 =~ 5\.7 ]] + [[ -f /mnt/jenkins/workspace/cloud-pxc-operator_PR-2234/e2e-tests/users/compare/select-1-57.sql ]] + run_mysql 'SELECT * from myApp.myApp;' '-h some-name-pxc-1.some-name-pxc -uroot -p'\''#2Z(2Z0lm,$nR8ARH?'\'' -P3306' + local 'command=SELECT * from myApp.myApp;' + local 'uri=-h some-name-pxc-1.some-name-pxc -uroot -p'\''#2Z(2Z0lm,$nR8ARH?'\'' -P3306' ++ get_client_pod ++ kubectl_bin get pods --selector=name=pxc-client -o 'jsonpath={.items[].metadata.name}' +++ mktemp ++ local LAST_OUT=/tmp/tmp.XaylxGNyKp +++ mktemp ++ local LAST_ERR=/tmp/tmp.Def8w80btz ++ local exit_status=0 +++ seq 0 2 ++ for i in '$(seq 0 2)' ++ set +e ++ kubectl get pods --selector=name=pxc-client -o 'jsonpath={.items[].metadata.name}' ++ exit_status=0 ++ set -e ++ '[' 0 '!=' 0 ']' ++ break ++ cat /tmp/tmp.XaylxGNyKp ++ cat /tmp/tmp.Def8w80btz ++ rm /tmp/tmp.XaylxGNyKp /tmp/tmp.Def8w80btz ++ return 0 + client_pod=pxc-client-857d976497-6qfkm + wait_pod pxc-client-857d976497-6qfkm + local pod=pxc-client-857d976497-6qfkm + local max_retry=480 + local ns= ++ echo pxc-client-857d976497-6qfkm ++ grep -E '^(pxc|proxysql)$' ++ /usr/bin/sed -E 's/.*-(pxc|proxysql)-[0-9]/\1/' + local container= + set +o xtrace pod/pxc-client-857d976497-6qfkm condition met waiting for pod/pxc-client-857d976497-6qfkm to become ReadyDefaulted container "pxc-client" out of: pxc-client, backup .Ok + set +o xtrace + '[' '!' -s /tmp/tmp.O4Ec8syMdJ/select-1.sql ']' + [[ 0 -eq 0 ]] + diff -u /mnt/jenkins/workspace/cloud-pxc-operator_PR-2234/e2e-tests/users/compare/select-1.sql /tmp/tmp.O4Ec8syMdJ/select-1.sql + for i in '$(seq 0 $((size - 1)))' + compare_mysql_cmd select-1 'SELECT * from myApp.myApp;' '-h some-name-pxc-2.some-name-pxc -uroot -p'\''#2Z(2Z0lm,$nR8ARH?'\'' -P3306' + local command_id=select-1 + local 'command=SELECT * from myApp.myApp;' + local 'uri=-h some-name-pxc-2.some-name-pxc -uroot -p'\''#2Z(2Z0lm,$nR8ARH?'\'' -P3306' + local postfix= + local expected_result=/mnt/jenkins/workspace/cloud-pxc-operator_PR-2234/e2e-tests/users/compare/select-1.sql + [[ perconalab/percona-xtradb-cluster-operator:main-pxc5.7 =~ 8\.4 ]] + [[ perconalab/percona-xtradb-cluster-operator:main-pxc5.7 =~ 8\.0 ]] + [[ perconalab/percona-xtradb-cluster-operator:main-pxc5.7 =~ 5\.7 ]] + [[ -f /mnt/jenkins/workspace/cloud-pxc-operator_PR-2234/e2e-tests/users/compare/select-1-57.sql ]] + run_mysql 'SELECT * from myApp.myApp;' '-h some-name-pxc-2.some-name-pxc -uroot -p'\''#2Z(2Z0lm,$nR8ARH?'\'' -P3306' + local 'command=SELECT * from myApp.myApp;' + local 'uri=-h some-name-pxc-2.some-name-pxc -uroot -p'\''#2Z(2Z0lm,$nR8ARH?'\'' -P3306' ++ get_client_pod ++ kubectl_bin get pods --selector=name=pxc-client -o 'jsonpath={.items[].metadata.name}' +++ mktemp ++ local LAST_OUT=/tmp/tmp.ys6y1v3Cxz +++ mktemp ++ local LAST_ERR=/tmp/tmp.hhCxaZPg9z ++ local exit_status=0 +++ seq 0 2 ++ for i in '$(seq 0 2)' ++ set +e ++ kubectl get pods --selector=name=pxc-client -o 'jsonpath={.items[].metadata.name}' ++ exit_status=0 ++ set -e ++ '[' 0 '!=' 0 ']' ++ break ++ cat /tmp/tmp.ys6y1v3Cxz ++ cat /tmp/tmp.hhCxaZPg9z ++ rm /tmp/tmp.ys6y1v3Cxz /tmp/tmp.hhCxaZPg9z ++ return 0 + client_pod=pxc-client-857d976497-6qfkm + wait_pod pxc-client-857d976497-6qfkm + local pod=pxc-client-857d976497-6qfkm + local max_retry=480 + local ns= ++ echo pxc-client-857d976497-6qfkm ++ /usr/bin/sed -E 's/.*-(pxc|proxysql)-[0-9]/\1/' ++ grep -E '^(pxc|proxysql)$' + local container= + set +o xtrace pod/pxc-client-857d976497-6qfkm condition met waiting for pod/pxc-client-857d976497-6qfkm to become ReadyDefaulted container "pxc-client" out of: pxc-client, backup .Ok + set +o xtrace + '[' '!' -s /tmp/tmp.O4Ec8syMdJ/select-1.sql ']' + [[ 0 -eq 0 ]] + diff -u /mnt/jenkins/workspace/cloud-pxc-operator_PR-2234/e2e-tests/users/compare/select-1.sql /tmp/tmp.O4Ec8syMdJ/select-1.sql ++ is_keyring_plugin_in_use some-name ++ local cluster=some-name ++ kubectl_bin exec -it some-name-pxc-0 -c pxc -- bash -c 'cat /etc/mysql/node.cnf' +++ mktemp ++ grep -E -o 'early-plugin-load=keyring_\w+.so' ++ local LAST_OUT=/tmp/tmp.wVlvGEZ0DF +++ mktemp ++ local LAST_ERR=/tmp/tmp.SbpLpdxwZ3 ++ local exit_status=0 +++ seq 0 2 ++ for i in '$(seq 0 2)' ++ set +e ++ kubectl exec -it some-name-pxc-0 -c pxc -- bash -c 'cat /etc/mysql/node.cnf' ++ exit_status=0 ++ set -e ++ '[' 0 '!=' 0 ']' ++ break ++ cat /tmp/tmp.wVlvGEZ0DF ++ cat /tmp/tmp.SbpLpdxwZ3 Unable to use a TTY - input is not a terminal or the right kind of file ++ rm /tmp/tmp.wVlvGEZ0DF /tmp/tmp.SbpLpdxwZ3 ++ return 0 + '[' '' ']' + desc 'test missing passwords were created and present in internal secrets' + set +o xtrace ----------------------------------------------------------------------------------- test missing passwords were created and present in internal secrets ----------------------------------------------------------------------------------- + empty_pwds=() + wrong_pwds=() + for user in root xtrabackup monitor proxyadmin operator replication + echo 'Checking root' Checking root ++ getSecretData my-cluster-secrets root ++ local secretName=my-cluster-secrets ++ local dataKey=root ++ kubectl_bin get secrets/my-cluster-secrets '--template={{.data.root}}' ++ base64 --decode +++ mktemp ++ local LAST_OUT=/tmp/tmp.I8xQUOpNZJ +++ mktemp ++ local LAST_ERR=/tmp/tmp.Tw90VP5BgS ++ local exit_status=0 +++ seq 0 2 ++ for i in '$(seq 0 2)' ++ set +e ++ kubectl get secrets/my-cluster-secrets '--template={{.data.root}}' ++ exit_status=0 ++ set -e ++ '[' 0 '!=' 0 ']' ++ break ++ cat /tmp/tmp.I8xQUOpNZJ ++ cat /tmp/tmp.Tw90VP5BgS ++ rm /tmp/tmp.I8xQUOpNZJ /tmp/tmp.Tw90VP5BgS ++ return 0 + secret_pass='#2Z(2Z0lm,$nR8ARH?' ++ getSecretData internal-some-name root ++ local secretName=internal-some-name ++ local dataKey=root ++ kubectl_bin get secrets/internal-some-name '--template={{.data.root}}' ++ base64 --decode +++ mktemp ++ local LAST_OUT=/tmp/tmp.nL1h7IMVCE +++ mktemp ++ local LAST_ERR=/tmp/tmp.ud2ogMYO31 ++ local exit_status=0 +++ seq 0 2 ++ for i in '$(seq 0 2)' ++ set +e ++ kubectl get secrets/internal-some-name '--template={{.data.root}}' ++ exit_status=0 ++ set -e ++ '[' 0 '!=' 0 ']' ++ break ++ cat /tmp/tmp.nL1h7IMVCE ++ cat /tmp/tmp.ud2ogMYO31 ++ rm /tmp/tmp.nL1h7IMVCE /tmp/tmp.ud2ogMYO31 ++ return 0 + int_secret_pass='#2Z(2Z0lm,$nR8ARH?' + [[ -z #2Z(2Z0lm,$nR8ARH? ]] + [[ #2Z(2Z0lm,$nR8ARH? != \#\2\Z\(\2\Z\0\l\m\,\$\n\R\8\A\R\H\? ]] + [[ root != \p\r\o\x\y\a\d\m\i\n ]] + [[ '' =~ root ]] + [[ '' =~ root ]] + echo 'Running compare for root' Running compare for root + compare_mysql_cmd select-4 'SHOW TABLES;' '-h some-name-proxysql -uroot -p'\''#2Z(2Z0lm,$nR8ARH?'\''' + local command_id=select-4 + local 'command=SHOW TABLES;' + local 'uri=-h some-name-proxysql -uroot -p'\''#2Z(2Z0lm,$nR8ARH?'\''' + local postfix= + local expected_result=/mnt/jenkins/workspace/cloud-pxc-operator_PR-2234/e2e-tests/users/compare/select-4.sql + [[ perconalab/percona-xtradb-cluster-operator:main-pxc5.7 =~ 8\.4 ]] + [[ perconalab/percona-xtradb-cluster-operator:main-pxc5.7 =~ 8\.0 ]] + [[ perconalab/percona-xtradb-cluster-operator:main-pxc5.7 =~ 5\.7 ]] + [[ -f /mnt/jenkins/workspace/cloud-pxc-operator_PR-2234/e2e-tests/users/compare/select-4-57.sql ]] + run_mysql 'SHOW TABLES;' '-h some-name-proxysql -uroot -p'\''#2Z(2Z0lm,$nR8ARH?'\''' + local 'command=SHOW TABLES;' + local 'uri=-h some-name-proxysql -uroot -p'\''#2Z(2Z0lm,$nR8ARH?'\''' ++ get_client_pod ++ kubectl_bin get pods --selector=name=pxc-client -o 'jsonpath={.items[].metadata.name}' +++ mktemp ++ local LAST_OUT=/tmp/tmp.VCaP5vhEHB +++ mktemp ++ local LAST_ERR=/tmp/tmp.49cVN0ad24 ++ local exit_status=0 +++ seq 0 2 ++ for i in '$(seq 0 2)' ++ set +e ++ kubectl get pods --selector=name=pxc-client -o 'jsonpath={.items[].metadata.name}' ++ exit_status=0 ++ set -e ++ '[' 0 '!=' 0 ']' ++ break ++ cat /tmp/tmp.VCaP5vhEHB ++ cat /tmp/tmp.49cVN0ad24 ++ rm /tmp/tmp.VCaP5vhEHB /tmp/tmp.49cVN0ad24 ++ return 0 + client_pod=pxc-client-857d976497-6qfkm + wait_pod pxc-client-857d976497-6qfkm + local pod=pxc-client-857d976497-6qfkm + local max_retry=480 + local ns= ++ echo pxc-client-857d976497-6qfkm ++ /usr/bin/sed -E 's/.*-(pxc|proxysql)-[0-9]/\1/' ++ grep -E '^(pxc|proxysql)$' + local container= + set +o xtrace pod/pxc-client-857d976497-6qfkm condition met waiting for pod/pxc-client-857d976497-6qfkm to become ReadyDefaulted container "pxc-client" out of: pxc-client, backup .Ok + set +o xtrace + '[' '!' -s /tmp/tmp.O4Ec8syMdJ/select-4.sql ']' + [[ 0 -eq 0 ]] + diff -u /mnt/jenkins/workspace/cloud-pxc-operator_PR-2234/e2e-tests/users/compare/select-4.sql /tmp/tmp.O4Ec8syMdJ/select-4.sql + for user in root xtrabackup monitor proxyadmin operator replication + echo 'Checking xtrabackup' Checking xtrabackup ++ getSecretData my-cluster-secrets xtrabackup ++ local secretName=my-cluster-secrets ++ local dataKey=xtrabackup ++ kubectl_bin get secrets/my-cluster-secrets '--template={{.data.xtrabackup}}' ++ base64 --decode +++ mktemp ++ local LAST_OUT=/tmp/tmp.BXt8P6KPPu +++ mktemp ++ local LAST_ERR=/tmp/tmp.XrMIjaEkg8 ++ local exit_status=0 +++ seq 0 2 ++ for i in '$(seq 0 2)' ++ set +e ++ kubectl get secrets/my-cluster-secrets '--template={{.data.xtrabackup}}' ++ exit_status=0 ++ set -e ++ '[' 0 '!=' 0 ']' ++ break ++ cat /tmp/tmp.BXt8P6KPPu ++ cat /tmp/tmp.XrMIjaEkg8 ++ rm /tmp/tmp.BXt8P6KPPu /tmp/tmp.XrMIjaEkg8 ++ return 0 + secret_pass='M]mT+82tZs[iIab9' ++ getSecretData internal-some-name xtrabackup ++ local secretName=internal-some-name ++ local dataKey=xtrabackup ++ base64 --decode ++ kubectl_bin get secrets/internal-some-name '--template={{.data.xtrabackup}}' +++ mktemp ++ local LAST_OUT=/tmp/tmp.DHIkKPqLKk +++ mktemp ++ local LAST_ERR=/tmp/tmp.Wm9dRa7tvY ++ local exit_status=0 +++ seq 0 2 ++ for i in '$(seq 0 2)' ++ set +e ++ kubectl get secrets/internal-some-name '--template={{.data.xtrabackup}}' ++ exit_status=0 ++ set -e ++ '[' 0 '!=' 0 ']' ++ break ++ cat /tmp/tmp.DHIkKPqLKk ++ cat /tmp/tmp.Wm9dRa7tvY ++ rm /tmp/tmp.DHIkKPqLKk /tmp/tmp.Wm9dRa7tvY ++ return 0 + int_secret_pass='M]mT+82tZs[iIab9' + [[ -z M]mT+82tZs[iIab9 ]] + [[ M]mT+82tZs[iIab9 != \M\]\m\T\+\8\2\t\Z\s\[\i\I\a\b\9 ]] + [[ xtrabackup != \p\r\o\x\y\a\d\m\i\n ]] + [[ '' =~ xtrabackup ]] + [[ '' =~ xtrabackup ]] + echo 'Running compare for xtrabackup' Running compare for xtrabackup + compare_mysql_cmd select-4 'SHOW TABLES;' '-h some-name-proxysql -uxtrabackup -p'\''M]mT+82tZs[iIab9'\''' + local command_id=select-4 + local 'command=SHOW TABLES;' + local 'uri=-h some-name-proxysql -uxtrabackup -p'\''M]mT+82tZs[iIab9'\''' + local postfix= + local expected_result=/mnt/jenkins/workspace/cloud-pxc-operator_PR-2234/e2e-tests/users/compare/select-4.sql + [[ perconalab/percona-xtradb-cluster-operator:main-pxc5.7 =~ 8\.4 ]] + [[ perconalab/percona-xtradb-cluster-operator:main-pxc5.7 =~ 8\.0 ]] + [[ perconalab/percona-xtradb-cluster-operator:main-pxc5.7 =~ 5\.7 ]] + [[ -f /mnt/jenkins/workspace/cloud-pxc-operator_PR-2234/e2e-tests/users/compare/select-4-57.sql ]] + run_mysql 'SHOW TABLES;' '-h some-name-proxysql -uxtrabackup -p'\''M]mT+82tZs[iIab9'\''' + local 'command=SHOW TABLES;' + local 'uri=-h some-name-proxysql -uxtrabackup -p'\''M]mT+82tZs[iIab9'\''' ++ get_client_pod ++ kubectl_bin get pods --selector=name=pxc-client -o 'jsonpath={.items[].metadata.name}' +++ mktemp ++ local LAST_OUT=/tmp/tmp.msT7zdMjxO +++ mktemp ++ local LAST_ERR=/tmp/tmp.4EM8UtY1kp ++ local exit_status=0 +++ seq 0 2 ++ for i in '$(seq 0 2)' ++ set +e ++ kubectl get pods --selector=name=pxc-client -o 'jsonpath={.items[].metadata.name}' ++ exit_status=0 ++ set -e ++ '[' 0 '!=' 0 ']' ++ break ++ cat /tmp/tmp.msT7zdMjxO ++ cat /tmp/tmp.4EM8UtY1kp ++ rm /tmp/tmp.msT7zdMjxO /tmp/tmp.4EM8UtY1kp ++ return 0 + client_pod=pxc-client-857d976497-6qfkm + wait_pod pxc-client-857d976497-6qfkm + local pod=pxc-client-857d976497-6qfkm + local max_retry=480 + local ns= ++ echo pxc-client-857d976497-6qfkm ++ /usr/bin/sed -E 's/.*-(pxc|proxysql)-[0-9]/\1/' ++ grep -E '^(pxc|proxysql)$' + local container= + set +o xtrace pod/pxc-client-857d976497-6qfkm condition met waiting for pod/pxc-client-857d976497-6qfkm to become ReadyDefaulted container "pxc-client" out of: pxc-client, backup .Ok + set +o xtrace + '[' '!' -s /tmp/tmp.O4Ec8syMdJ/select-4.sql ']' + [[ 0 -eq 0 ]] + diff -u /mnt/jenkins/workspace/cloud-pxc-operator_PR-2234/e2e-tests/users/compare/select-4.sql /tmp/tmp.O4Ec8syMdJ/select-4.sql + for user in root xtrabackup monitor proxyadmin operator replication + echo 'Checking monitor' Checking monitor ++ getSecretData my-cluster-secrets monitor ++ local secretName=my-cluster-secrets ++ local dataKey=monitor ++ kubectl_bin get secrets/my-cluster-secrets '--template={{.data.monitor}}' ++ base64 --decode +++ mktemp ++ local LAST_OUT=/tmp/tmp.8nPCTN4qL3 +++ mktemp ++ local LAST_ERR=/tmp/tmp.uYhvxNVrRs ++ local exit_status=0 +++ seq 0 2 ++ for i in '$(seq 0 2)' ++ set +e ++ kubectl get secrets/my-cluster-secrets '--template={{.data.monitor}}' ++ exit_status=0 ++ set -e ++ '[' 0 '!=' 0 ']' ++ break ++ cat /tmp/tmp.8nPCTN4qL3 ++ cat /tmp/tmp.uYhvxNVrRs ++ rm /tmp/tmp.8nPCTN4qL3 /tmp/tmp.uYhvxNVrRs ++ return 0 + secret_pass=monitor_password ++ getSecretData internal-some-name monitor ++ local secretName=internal-some-name ++ local dataKey=monitor ++ kubectl_bin get secrets/internal-some-name '--template={{.data.monitor}}' ++ base64 --decode +++ mktemp ++ local LAST_OUT=/tmp/tmp.qiu0odXCWv +++ mktemp ++ local LAST_ERR=/tmp/tmp.ODcUEdUSnD ++ local exit_status=0 +++ seq 0 2 ++ for i in '$(seq 0 2)' ++ set +e ++ kubectl get secrets/internal-some-name '--template={{.data.monitor}}' ++ exit_status=0 ++ set -e ++ '[' 0 '!=' 0 ']' ++ break ++ cat /tmp/tmp.qiu0odXCWv ++ cat /tmp/tmp.ODcUEdUSnD ++ rm /tmp/tmp.qiu0odXCWv /tmp/tmp.ODcUEdUSnD ++ return 0 + int_secret_pass=monitor_password + [[ -z monitor_password ]] + [[ monitor_password != \m\o\n\i\t\o\r\_\p\a\s\s\w\o\r\d ]] + [[ monitor != \p\r\o\x\y\a\d\m\i\n ]] + [[ '' =~ monitor ]] + [[ '' =~ monitor ]] + echo 'Running compare for monitor' Running compare for monitor + compare_mysql_cmd select-4 'SHOW TABLES;' '-h some-name-proxysql -umonitor -p'\''monitor_password'\''' + local command_id=select-4 + local 'command=SHOW TABLES;' + local 'uri=-h some-name-proxysql -umonitor -p'\''monitor_password'\''' + local postfix= + local expected_result=/mnt/jenkins/workspace/cloud-pxc-operator_PR-2234/e2e-tests/users/compare/select-4.sql + [[ perconalab/percona-xtradb-cluster-operator:main-pxc5.7 =~ 8\.4 ]] + [[ perconalab/percona-xtradb-cluster-operator:main-pxc5.7 =~ 8\.0 ]] + [[ perconalab/percona-xtradb-cluster-operator:main-pxc5.7 =~ 5\.7 ]] + [[ -f /mnt/jenkins/workspace/cloud-pxc-operator_PR-2234/e2e-tests/users/compare/select-4-57.sql ]] + run_mysql 'SHOW TABLES;' '-h some-name-proxysql -umonitor -p'\''monitor_password'\''' + local 'command=SHOW TABLES;' + local 'uri=-h some-name-proxysql -umonitor -p'\''monitor_password'\''' ++ get_client_pod ++ kubectl_bin get pods --selector=name=pxc-client -o 'jsonpath={.items[].metadata.name}' +++ mktemp ++ local LAST_OUT=/tmp/tmp.vnK4AX46RE +++ mktemp ++ local LAST_ERR=/tmp/tmp.t1xT4wd28a ++ local exit_status=0 +++ seq 0 2 ++ for i in '$(seq 0 2)' ++ set +e ++ kubectl get pods --selector=name=pxc-client -o 'jsonpath={.items[].metadata.name}' ++ exit_status=0 ++ set -e ++ '[' 0 '!=' 0 ']' ++ break ++ cat /tmp/tmp.vnK4AX46RE ++ cat /tmp/tmp.t1xT4wd28a ++ rm /tmp/tmp.vnK4AX46RE /tmp/tmp.t1xT4wd28a ++ return 0 + client_pod=pxc-client-857d976497-6qfkm + wait_pod pxc-client-857d976497-6qfkm + local pod=pxc-client-857d976497-6qfkm + local max_retry=480 + local ns= ++ echo pxc-client-857d976497-6qfkm ++ grep -E '^(pxc|proxysql)$' ++ /usr/bin/sed -E 's/.*-(pxc|proxysql)-[0-9]/\1/' + local container= + set +o xtrace pod/pxc-client-857d976497-6qfkm condition met waiting for pod/pxc-client-857d976497-6qfkm to become ReadyDefaulted container "pxc-client" out of: pxc-client, backup .Ok + set +o xtrace + '[' '!' -s /tmp/tmp.O4Ec8syMdJ/select-4.sql ']' + [[ 0 -eq 0 ]] + diff -u /mnt/jenkins/workspace/cloud-pxc-operator_PR-2234/e2e-tests/users/compare/select-4.sql /tmp/tmp.O4Ec8syMdJ/select-4.sql + for user in root xtrabackup monitor proxyadmin operator replication + echo 'Checking proxyadmin' Checking proxyadmin ++ getSecretData my-cluster-secrets proxyadmin ++ local secretName=my-cluster-secrets ++ local dataKey=proxyadmin ++ kubectl_bin get secrets/my-cluster-secrets '--template={{.data.proxyadmin}}' ++ base64 --decode +++ mktemp ++ local LAST_OUT=/tmp/tmp.SEJaVyUVLl +++ mktemp ++ local LAST_ERR=/tmp/tmp.k4xXgq66Zw ++ local exit_status=0 +++ seq 0 2 ++ for i in '$(seq 0 2)' ++ set +e ++ kubectl get secrets/my-cluster-secrets '--template={{.data.proxyadmin}}' ++ exit_status=0 ++ set -e ++ '[' 0 '!=' 0 ']' ++ break ++ cat /tmp/tmp.SEJaVyUVLl ++ cat /tmp/tmp.k4xXgq66Zw ++ rm /tmp/tmp.SEJaVyUVLl /tmp/tmp.k4xXgq66Zw ++ return 0 + secret_pass='2(YeUTxYgv}K}B+Ct' ++ getSecretData internal-some-name proxyadmin ++ local secretName=internal-some-name ++ local dataKey=proxyadmin ++ kubectl_bin get secrets/internal-some-name '--template={{.data.proxyadmin}}' ++ base64 --decode +++ mktemp ++ local LAST_OUT=/tmp/tmp.5uPDUZPd7s +++ mktemp ++ local LAST_ERR=/tmp/tmp.5KTa1uKrVM ++ local exit_status=0 +++ seq 0 2 ++ for i in '$(seq 0 2)' ++ set +e ++ kubectl get secrets/internal-some-name '--template={{.data.proxyadmin}}' ++ exit_status=0 ++ set -e ++ '[' 0 '!=' 0 ']' ++ break ++ cat /tmp/tmp.5uPDUZPd7s ++ cat /tmp/tmp.5KTa1uKrVM ++ rm /tmp/tmp.5uPDUZPd7s /tmp/tmp.5KTa1uKrVM ++ return 0 + int_secret_pass='2(YeUTxYgv}K}B+Ct' + [[ -z 2(YeUTxYgv}K}B+Ct ]] + [[ 2(YeUTxYgv}K}B+Ct != \2\(\Y\e\U\T\x\Y\g\v\}\K\}\B\+\C\t ]] + [[ proxyadmin != \p\r\o\x\y\a\d\m\i\n ]] + [[ proxyadmin == \p\r\o\x\y\a\d\m\i\n ]] + [[ '' =~ proxyadmin ]] + [[ '' =~ proxyadmin ]] + echo 'Running compare for proxyadmin' Running compare for proxyadmin + compare_mysql_cmd_local select-2 'SHOW TABLES;' '-h127.0.0.1 -P6032 -uproxyadmin -p'\''2(YeUTxYgv}K}B+Ct'\''' some-name-proxysql-0 '' proxysql + local command_id=select-2 + local 'command=SHOW TABLES;' + local 'uri=-h127.0.0.1 -P6032 -uproxyadmin -p'\''2(YeUTxYgv}K}B+Ct'\''' + local pod=some-name-proxysql-0 + local postfix= + local container_name=proxysql + local expected_result=/mnt/jenkins/workspace/cloud-pxc-operator_PR-2234/e2e-tests/users/compare/select-2.sql + [[ perconalab/percona-xtradb-cluster-operator:main-pxc5.7 =~ 8\.0 ]] + [[ perconalab/percona-xtradb-cluster-operator:main-pxc5.7 =~ 8\.4 ]] + run_mysql_local 'SHOW TABLES;' '-h127.0.0.1 -P6032 -uproxyadmin -p'\''2(YeUTxYgv}K}B+Ct'\''' some-name-proxysql-0 proxysql + local 'command=SHOW TABLES;' + local 'uri=-h127.0.0.1 -P6032 -uproxyadmin -p'\''2(YeUTxYgv}K}B+Ct'\''' + local pod=some-name-proxysql-0 + local container_name=proxysql + set +o xtrace + '[' '!' -s /tmp/tmp.O4Ec8syMdJ/select-2.sql ']' + [[ 0 -eq 0 ]] + diff -u /mnt/jenkins/workspace/cloud-pxc-operator_PR-2234/e2e-tests/users/compare/select-2.sql /tmp/tmp.O4Ec8syMdJ/select-2.sql + for user in root xtrabackup monitor proxyadmin operator replication + echo 'Checking operator' Checking operator ++ getSecretData my-cluster-secrets operator ++ local secretName=my-cluster-secrets ++ local dataKey=operator ++ kubectl_bin get secrets/my-cluster-secrets '--template={{.data.operator}}' ++ base64 --decode +++ mktemp ++ local LAST_OUT=/tmp/tmp.ZGgtTG2sZ0 +++ mktemp ++ local LAST_ERR=/tmp/tmp.xylnRvTa0g ++ local exit_status=0 +++ seq 0 2 ++ for i in '$(seq 0 2)' ++ set +e ++ kubectl get secrets/my-cluster-secrets '--template={{.data.operator}}' ++ exit_status=0 ++ set -e ++ '[' 0 '!=' 0 ']' ++ break ++ cat /tmp/tmp.ZGgtTG2sZ0 ++ cat /tmp/tmp.xylnRvTa0g ++ rm /tmp/tmp.ZGgtTG2sZ0 /tmp/tmp.xylnRvTa0g ++ return 0 + secret_pass='E-MxQy)2&f7_BK[2q' ++ getSecretData internal-some-name operator ++ local secretName=internal-some-name ++ local dataKey=operator ++ kubectl_bin get secrets/internal-some-name '--template={{.data.operator}}' ++ base64 --decode +++ mktemp ++ local LAST_OUT=/tmp/tmp.0IZuO0latk +++ mktemp ++ local LAST_ERR=/tmp/tmp.P8XfGnZ7ns ++ local exit_status=0 +++ seq 0 2 ++ for i in '$(seq 0 2)' ++ set +e ++ kubectl get secrets/internal-some-name '--template={{.data.operator}}' ++ exit_status=0 ++ set -e ++ '[' 0 '!=' 0 ']' ++ break ++ cat /tmp/tmp.0IZuO0latk ++ cat /tmp/tmp.P8XfGnZ7ns ++ rm /tmp/tmp.0IZuO0latk /tmp/tmp.P8XfGnZ7ns ++ return 0 + int_secret_pass='E-MxQy)2&f7_BK[2q' + [[ -z E-MxQy)2&f7_BK[2q ]] + [[ E-MxQy)2&f7_BK[2q != \E\-\M\x\Q\y\)\2\&\f\7\_\B\K\[\2\q ]] + [[ operator != \p\r\o\x\y\a\d\m\i\n ]] + [[ '' =~ operator ]] + [[ '' =~ operator ]] + echo 'Running compare for operator' Running compare for operator + compare_mysql_cmd select-4 'SHOW TABLES;' '-h some-name-proxysql -uoperator -p'\''E-MxQy)2&f7_BK[2q'\''' + local command_id=select-4 + local 'command=SHOW TABLES;' + local 'uri=-h some-name-proxysql -uoperator -p'\''E-MxQy)2&f7_BK[2q'\''' + local postfix= + local expected_result=/mnt/jenkins/workspace/cloud-pxc-operator_PR-2234/e2e-tests/users/compare/select-4.sql + [[ perconalab/percona-xtradb-cluster-operator:main-pxc5.7 =~ 8\.4 ]] + [[ perconalab/percona-xtradb-cluster-operator:main-pxc5.7 =~ 8\.0 ]] + [[ perconalab/percona-xtradb-cluster-operator:main-pxc5.7 =~ 5\.7 ]] + [[ -f /mnt/jenkins/workspace/cloud-pxc-operator_PR-2234/e2e-tests/users/compare/select-4-57.sql ]] + run_mysql 'SHOW TABLES;' '-h some-name-proxysql -uoperator -p'\''E-MxQy)2&f7_BK[2q'\''' + local 'command=SHOW TABLES;' + local 'uri=-h some-name-proxysql -uoperator -p'\''E-MxQy)2&f7_BK[2q'\''' ++ get_client_pod ++ kubectl_bin get pods --selector=name=pxc-client -o 'jsonpath={.items[].metadata.name}' +++ mktemp ++ local LAST_OUT=/tmp/tmp.1y5ntbjdmO +++ mktemp ++ local LAST_ERR=/tmp/tmp.QBiNyyQDS0 ++ local exit_status=0 +++ seq 0 2 ++ for i in '$(seq 0 2)' ++ set +e ++ kubectl get pods --selector=name=pxc-client -o 'jsonpath={.items[].metadata.name}' ++ exit_status=0 ++ set -e ++ '[' 0 '!=' 0 ']' ++ break ++ cat /tmp/tmp.1y5ntbjdmO ++ cat /tmp/tmp.QBiNyyQDS0 ++ rm /tmp/tmp.1y5ntbjdmO /tmp/tmp.QBiNyyQDS0 ++ return 0 + client_pod=pxc-client-857d976497-6qfkm + wait_pod pxc-client-857d976497-6qfkm + local pod=pxc-client-857d976497-6qfkm + local max_retry=480 + local ns= ++ echo pxc-client-857d976497-6qfkm ++ /usr/bin/sed -E 's/.*-(pxc|proxysql)-[0-9]/\1/' ++ grep -E '^(pxc|proxysql)$' + local container= + set +o xtrace pod/pxc-client-857d976497-6qfkm condition met waiting for pod/pxc-client-857d976497-6qfkm to become ReadyDefaulted container "pxc-client" out of: pxc-client, backup .Ok + set +o xtrace + '[' '!' -s /tmp/tmp.O4Ec8syMdJ/select-4.sql ']' + [[ 0 -eq 0 ]] + diff -u /mnt/jenkins/workspace/cloud-pxc-operator_PR-2234/e2e-tests/users/compare/select-4.sql /tmp/tmp.O4Ec8syMdJ/select-4.sql + for user in root xtrabackup monitor proxyadmin operator replication + echo 'Checking replication' Checking replication ++ getSecretData my-cluster-secrets replication ++ local secretName=my-cluster-secrets ++ local dataKey=replication ++ kubectl_bin get secrets/my-cluster-secrets '--template={{.data.replication}}' ++ base64 --decode +++ mktemp ++ local LAST_OUT=/tmp/tmp.VaMZxXUNO7 +++ mktemp ++ local LAST_ERR=/tmp/tmp.Koh0b1f9p6 ++ local exit_status=0 +++ seq 0 2 ++ for i in '$(seq 0 2)' ++ set +e ++ kubectl get secrets/my-cluster-secrets '--template={{.data.replication}}' ++ exit_status=0 ++ set -e ++ '[' 0 '!=' 0 ']' ++ break ++ cat /tmp/tmp.VaMZxXUNO7 ++ cat /tmp/tmp.Koh0b1f9p6 ++ rm /tmp/tmp.VaMZxXUNO7 /tmp/tmp.Koh0b1f9p6 ++ return 0 + secret_pass='yxpB%<4fUi-%ey~4qMv' ++ getSecretData internal-some-name replication ++ local secretName=internal-some-name ++ local dataKey=replication ++ kubectl_bin get secrets/internal-some-name '--template={{.data.replication}}' ++ base64 --decode +++ mktemp ++ local LAST_OUT=/tmp/tmp.67kpycqP4I +++ mktemp ++ local LAST_ERR=/tmp/tmp.0iNygCvFmZ ++ local exit_status=0 +++ seq 0 2 ++ for i in '$(seq 0 2)' ++ set +e ++ kubectl get secrets/internal-some-name '--template={{.data.replication}}' ++ exit_status=0 ++ set -e ++ '[' 0 '!=' 0 ']' ++ break ++ cat /tmp/tmp.67kpycqP4I ++ cat /tmp/tmp.0iNygCvFmZ ++ rm /tmp/tmp.67kpycqP4I /tmp/tmp.0iNygCvFmZ ++ return 0 + int_secret_pass='yxpB%<4fUi-%ey~4qMv' + [[ -z yxpB%<4fUi-%ey~4qMv ]] + [[ yxpB%<4fUi-%ey~4qMv != \y\x\p\B\%\<\4\f\U\i\-\%\e\y\~\4\q\M\v ]] + [[ replication != \p\r\o\x\y\a\d\m\i\n ]] + [[ '' =~ replication ]] + [[ '' =~ replication ]] + echo 'Running compare for replication' Running compare for replication + compare_mysql_cmd select-4 'SHOW TABLES;' '-h some-name-proxysql -ureplication -p'\''yxpB%<4fUi-%ey~4qMv'\''' + local command_id=select-4 + local 'command=SHOW TABLES;' + local 'uri=-h some-name-proxysql -ureplication -p'\''yxpB%<4fUi-%ey~4qMv'\''' + local postfix= + local expected_result=/mnt/jenkins/workspace/cloud-pxc-operator_PR-2234/e2e-tests/users/compare/select-4.sql + [[ perconalab/percona-xtradb-cluster-operator:main-pxc5.7 =~ 8\.4 ]] + [[ perconalab/percona-xtradb-cluster-operator:main-pxc5.7 =~ 8\.0 ]] + [[ perconalab/percona-xtradb-cluster-operator:main-pxc5.7 =~ 5\.7 ]] + [[ -f /mnt/jenkins/workspace/cloud-pxc-operator_PR-2234/e2e-tests/users/compare/select-4-57.sql ]] + run_mysql 'SHOW TABLES;' '-h some-name-proxysql -ureplication -p'\''yxpB%<4fUi-%ey~4qMv'\''' + local 'command=SHOW TABLES;' + local 'uri=-h some-name-proxysql -ureplication -p'\''yxpB%<4fUi-%ey~4qMv'\''' ++ get_client_pod ++ kubectl_bin get pods --selector=name=pxc-client -o 'jsonpath={.items[].metadata.name}' +++ mktemp ++ local LAST_OUT=/tmp/tmp.hRe68n2VYM +++ mktemp ++ local LAST_ERR=/tmp/tmp.P6Qws8eS9Z ++ local exit_status=0 +++ seq 0 2 ++ for i in '$(seq 0 2)' ++ set +e ++ kubectl get pods --selector=name=pxc-client -o 'jsonpath={.items[].metadata.name}' ++ exit_status=0 ++ set -e ++ '[' 0 '!=' 0 ']' ++ break ++ cat /tmp/tmp.hRe68n2VYM ++ cat /tmp/tmp.P6Qws8eS9Z ++ rm /tmp/tmp.hRe68n2VYM /tmp/tmp.P6Qws8eS9Z ++ return 0 + client_pod=pxc-client-857d976497-6qfkm + wait_pod pxc-client-857d976497-6qfkm + local pod=pxc-client-857d976497-6qfkm + local max_retry=480 + local ns= ++ echo pxc-client-857d976497-6qfkm ++ /usr/bin/sed -E 's/.*-(pxc|proxysql)-[0-9]/\1/' ++ grep -E '^(pxc|proxysql)$' + local container= + set +o xtrace pod/pxc-client-857d976497-6qfkm condition met waiting for pod/pxc-client-857d976497-6qfkm to become ReadyDefaulted container "pxc-client" out of: pxc-client, backup .Ok + set +o xtrace + '[' '!' -s /tmp/tmp.O4Ec8syMdJ/select-4.sql ']' + [[ 0 -eq 0 ]] + diff -u /mnt/jenkins/workspace/cloud-pxc-operator_PR-2234/e2e-tests/users/compare/select-4.sql /tmp/tmp.O4Ec8syMdJ/select-4.sql + [[ -n '' ]] + [[ -n '' ]] + desc 'test root' + set +o xtrace ----------------------------------------------------------------------------------- test root ----------------------------------------------------------------------------------- + patch_secret my-cluster-secrets root dGVzdC1wYXNzd29yZA== + local secret=my-cluster-secrets + local key=root + local value=dGVzdC1wYXNzd29yZA== + kubectl_bin patch secret my-cluster-secrets '-p={"data":{"root": "dGVzdC1wYXNzd29yZA=="}}' ++ mktemp + local LAST_OUT=/tmp/tmp.2v9UUJHkjE ++ mktemp + local LAST_ERR=/tmp/tmp.43OK0Sx4sG + local exit_status=0 ++ seq 0 2 + for i in '$(seq 0 2)' + set +e + kubectl patch secret my-cluster-secrets '-p={"data":{"root": "dGVzdC1wYXNzd29yZA=="}}' + exit_status=0 + set -e + '[' 0 '!=' 0 ']' + break + cat /tmp/tmp.2v9UUJHkjE secret/my-cluster-secrets patched + cat /tmp/tmp.43OK0Sx4sG + rm /tmp/tmp.2v9UUJHkjE /tmp/tmp.43OK0Sx4sG + return 0 + sleep 15 + compare_mysql_cmd select-4 'SHOW TABLES;' '-h some-name-proxysql -uroot -p'\''test-password'\''' + local command_id=select-4 + local 'command=SHOW TABLES;' + local 'uri=-h some-name-proxysql -uroot -p'\''test-password'\''' + local postfix= + local expected_result=/mnt/jenkins/workspace/cloud-pxc-operator_PR-2234/e2e-tests/users/compare/select-4.sql + [[ perconalab/percona-xtradb-cluster-operator:main-pxc5.7 =~ 8\.4 ]] + [[ perconalab/percona-xtradb-cluster-operator:main-pxc5.7 =~ 8\.0 ]] + [[ perconalab/percona-xtradb-cluster-operator:main-pxc5.7 =~ 5\.7 ]] + [[ -f /mnt/jenkins/workspace/cloud-pxc-operator_PR-2234/e2e-tests/users/compare/select-4-57.sql ]] + run_mysql 'SHOW TABLES;' '-h some-name-proxysql -uroot -p'\''test-password'\''' + local 'command=SHOW TABLES;' + local 'uri=-h some-name-proxysql -uroot -p'\''test-password'\''' ++ get_client_pod ++ kubectl_bin get pods --selector=name=pxc-client -o 'jsonpath={.items[].metadata.name}' +++ mktemp ++ local LAST_OUT=/tmp/tmp.c9VdNcNBja +++ mktemp ++ local LAST_ERR=/tmp/tmp.hUh989Dl9S ++ local exit_status=0 +++ seq 0 2 ++ for i in '$(seq 0 2)' ++ set +e ++ kubectl get pods --selector=name=pxc-client -o 'jsonpath={.items[].metadata.name}' ++ exit_status=0 ++ set -e ++ '[' 0 '!=' 0 ']' ++ break ++ cat /tmp/tmp.c9VdNcNBja ++ cat /tmp/tmp.hUh989Dl9S ++ rm /tmp/tmp.c9VdNcNBja /tmp/tmp.hUh989Dl9S ++ return 0 + client_pod=pxc-client-857d976497-6qfkm + wait_pod pxc-client-857d976497-6qfkm + local pod=pxc-client-857d976497-6qfkm + local max_retry=480 + local ns= ++ echo pxc-client-857d976497-6qfkm ++ grep -E '^(pxc|proxysql)$' ++ /usr/bin/sed -E 's/.*-(pxc|proxysql)-[0-9]/\1/' + local container= + set +o xtrace pod/pxc-client-857d976497-6qfkm condition met waiting for pod/pxc-client-857d976497-6qfkm to become ReadyDefaulted container "pxc-client" out of: pxc-client, backup .Ok + set +o xtrace + '[' '!' -s /tmp/tmp.O4Ec8syMdJ/select-4.sql ']' + [[ 0 -eq 0 ]] + diff -u /mnt/jenkins/workspace/cloud-pxc-operator_PR-2234/e2e-tests/users/compare/select-4.sql /tmp/tmp.O4Ec8syMdJ/select-4.sql + desc 'test proxyadmin' + set +o xtrace ----------------------------------------------------------------------------------- test proxyadmin ----------------------------------------------------------------------------------- + kubectl_bin patch pxc some-name --type=merge '-p={"spec":{"proxysql":{"size":3}}}' ++ mktemp + local LAST_OUT=/tmp/tmp.GOimwnLRHO ++ mktemp + local LAST_ERR=/tmp/tmp.rqpmJotFkq + local exit_status=0 ++ seq 0 2 + for i in '$(seq 0 2)' + set +e + kubectl patch pxc some-name --type=merge '-p={"spec":{"proxysql":{"size":3}}}' + exit_status=0 + set -e + '[' 0 '!=' 0 ']' + break + cat /tmp/tmp.GOimwnLRHO perconaxtradbcluster.pxc.percona.com/some-name patched + cat /tmp/tmp.rqpmJotFkq + rm /tmp/tmp.GOimwnLRHO /tmp/tmp.rqpmJotFkq + return 0 + sleep 15 + wait_cluster_consistency some-name 3 3 + local cluster_name=some-name + local cluster_size=3 + local proxy_size=3 + '[' -z 3 ']' + desc 'wait cluster consistency' + set +o xtrace ----------------------------------------------------------------------------------- wait cluster consistency ----------------------------------------------------------------------------------- + local i=0 + local max=300 + sleep 7 + echo -n 'waiting for pxc/some-name to be ready' waiting for pxc/some-name to be ready++ kubectl_bin get pxc some-name -o 'jsonpath={.status.state}' +++ mktemp ++ local LAST_OUT=/tmp/tmp.J9vuerjA02 +++ mktemp ++ local LAST_ERR=/tmp/tmp.3ardPWI4fQ ++ local exit_status=0 +++ seq 0 2 ++ for i in '$(seq 0 2)' ++ set +e ++ kubectl get pxc some-name -o 'jsonpath={.status.state}' ++ exit_status=0 ++ set -e ++ '[' 0 '!=' 0 ']' ++ break ++ cat /tmp/tmp.J9vuerjA02 ++ cat /tmp/tmp.3ardPWI4fQ ++ rm /tmp/tmp.J9vuerjA02 /tmp/tmp.3ardPWI4fQ ++ return 0 + [[ initializing == \r\e\a\d\y ]] + echo -n . .+ sleep 5 + [[ 0 -ge 300 ]] + let i+=1 ++ kubectl_bin get pxc some-name -o 'jsonpath={.status.state}' +++ mktemp ++ local LAST_OUT=/tmp/tmp.lQYGYUKgx0 +++ mktemp ++ local LAST_ERR=/tmp/tmp.re3O1mxsTv ++ local exit_status=0 +++ seq 0 2 ++ for i in '$(seq 0 2)' ++ set +e ++ kubectl get pxc some-name -o 'jsonpath={.status.state}' ++ exit_status=0 ++ set -e ++ '[' 0 '!=' 0 ']' ++ break ++ cat /tmp/tmp.lQYGYUKgx0 ++ cat /tmp/tmp.re3O1mxsTv ++ rm /tmp/tmp.lQYGYUKgx0 /tmp/tmp.re3O1mxsTv ++ return 0 + [[ ready == \r\e\a\d\y ]] ++ kubectl_bin get pxc some-name -o 'jsonpath={.status.pxc.ready}' +++ mktemp ++ local LAST_OUT=/tmp/tmp.SKYcCMrJay +++ mktemp ++ local LAST_ERR=/tmp/tmp.gcFewuvdQr ++ local exit_status=0 +++ seq 0 2 ++ for i in '$(seq 0 2)' ++ set +e ++ kubectl get pxc some-name -o 'jsonpath={.status.pxc.ready}' ++ exit_status=0 ++ set -e ++ '[' 0 '!=' 0 ']' ++ break ++ cat /tmp/tmp.SKYcCMrJay ++ cat /tmp/tmp.gcFewuvdQr ++ rm /tmp/tmp.SKYcCMrJay /tmp/tmp.gcFewuvdQr ++ return 0 + [[ 3 == \3 ]] +++ get_proxy_engine some-name +++ local cluster_name=some-name ++++ get_proxy some-name ++++ local target_cluster=some-name +++++ kubectl_bin get pxc some-name -o 'jsonpath={.spec.haproxy.enabled}' ++++++ mktemp +++++ local LAST_OUT=/tmp/tmp.SgMVT6DHeD ++++++ mktemp +++++ local LAST_ERR=/tmp/tmp.FoFHTN7uNF +++++ local exit_status=0 ++++++ seq 0 2 +++++ for i in '$(seq 0 2)' +++++ set +e +++++ kubectl get pxc some-name -o 'jsonpath={.spec.haproxy.enabled}' +++++ exit_status=0 +++++ set -e +++++ '[' 0 '!=' 0 ']' +++++ break +++++ cat /tmp/tmp.SgMVT6DHeD +++++ cat /tmp/tmp.FoFHTN7uNF +++++ rm /tmp/tmp.SgMVT6DHeD /tmp/tmp.FoFHTN7uNF +++++ return 0 ++++ [[ '' == \t\r\u\e ]] +++++ kubectl_bin get pxc some-name -o 'jsonpath={.spec.proxysql.enabled}' ++++++ mktemp +++++ local LAST_OUT=/tmp/tmp.AuvUfM9bRu ++++++ mktemp +++++ local LAST_ERR=/tmp/tmp.Sf14Dy88NQ +++++ local exit_status=0 ++++++ seq 0 2 +++++ for i in '$(seq 0 2)' +++++ set +e +++++ kubectl get pxc some-name -o 'jsonpath={.spec.proxysql.enabled}' +++++ exit_status=0 +++++ set -e +++++ '[' 0 '!=' 0 ']' +++++ break +++++ cat /tmp/tmp.AuvUfM9bRu +++++ cat /tmp/tmp.Sf14Dy88NQ +++++ rm /tmp/tmp.AuvUfM9bRu /tmp/tmp.Sf14Dy88NQ +++++ return 0 ++++ [[ true == \t\r\u\e ]] ++++ echo some-name-proxysql ++++ return +++ local cluster_proxy=some-name-proxysql +++ echo proxysql ++ kubectl_bin get pxc some-name -o 'jsonpath={.status.proxysql.ready}' +++ mktemp ++ local LAST_OUT=/tmp/tmp.qnEruV2gXV +++ mktemp ++ local LAST_ERR=/tmp/tmp.8VMEu35BPq ++ local exit_status=0 +++ seq 0 2 ++ for i in '$(seq 0 2)' ++ set +e ++ kubectl get pxc some-name -o 'jsonpath={.status.proxysql.ready}' ++ exit_status=0 ++ set -e ++ '[' 0 '!=' 0 ']' ++ break ++ cat /tmp/tmp.qnEruV2gXV ++ cat /tmp/tmp.8VMEu35BPq ++ rm /tmp/tmp.qnEruV2gXV /tmp/tmp.8VMEu35BPq ++ return 0 + [[ 3 == \3 ]] + echo + patch_secret my-cluster-secrets proxyadmin dGVzdC1wYXNzd29yZA== + local secret=my-cluster-secrets + local key=proxyadmin + local value=dGVzdC1wYXNzd29yZA== + kubectl_bin patch secret my-cluster-secrets '-p={"data":{"proxyadmin": "dGVzdC1wYXNzd29yZA=="}}' ++ mktemp + local LAST_OUT=/tmp/tmp.T3Jzcs0EXf ++ mktemp + local LAST_ERR=/tmp/tmp.Fsg8SmmuO3 + local exit_status=0 ++ seq 0 2 + for i in '$(seq 0 2)' + set +e + kubectl patch secret my-cluster-secrets '-p={"data":{"proxyadmin": "dGVzdC1wYXNzd29yZA=="}}' + exit_status=0 + set -e + '[' 0 '!=' 0 ']' + break + cat /tmp/tmp.T3Jzcs0EXf secret/my-cluster-secrets patched + cat /tmp/tmp.Fsg8SmmuO3 + rm /tmp/tmp.T3Jzcs0EXf /tmp/tmp.Fsg8SmmuO3 + return 0 + sleep 15 + wait_cluster_consistency some-name 3 3 + local cluster_name=some-name + local cluster_size=3 + local proxy_size=3 + '[' -z 3 ']' + desc 'wait cluster consistency' + set +o xtrace ----------------------------------------------------------------------------------- wait cluster consistency ----------------------------------------------------------------------------------- + local i=0 + local max=300 + sleep 7 + echo -n 'waiting for pxc/some-name to be ready' waiting for pxc/some-name to be ready++ kubectl_bin get pxc some-name -o 'jsonpath={.status.state}' +++ mktemp ++ local LAST_OUT=/tmp/tmp.PVeImu1Xpa +++ mktemp ++ local LAST_ERR=/tmp/tmp.rnW1bEB4MH ++ local exit_status=0 +++ seq 0 2 ++ for i in '$(seq 0 2)' ++ set +e ++ kubectl get pxc some-name -o 'jsonpath={.status.state}' ++ exit_status=0 ++ set -e ++ '[' 0 '!=' 0 ']' ++ break ++ cat /tmp/tmp.PVeImu1Xpa ++ cat /tmp/tmp.rnW1bEB4MH ++ rm /tmp/tmp.PVeImu1Xpa /tmp/tmp.rnW1bEB4MH ++ return 0 + [[ initializing == \r\e\a\d\y ]] + echo -n . .+ sleep 5 + [[ 0 -ge 300 ]] + let i+=1 ++ kubectl_bin get pxc some-name -o 'jsonpath={.status.state}' +++ mktemp ++ local LAST_OUT=/tmp/tmp.fVcRKHgIF2 +++ mktemp ++ local LAST_ERR=/tmp/tmp.LLMH8tKK89 ++ local exit_status=0 +++ seq 0 2 ++ for i in '$(seq 0 2)' ++ set +e ++ kubectl get pxc some-name -o 'jsonpath={.status.state}' ++ exit_status=0 ++ set -e ++ '[' 0 '!=' 0 ']' ++ break ++ cat /tmp/tmp.fVcRKHgIF2 ++ cat /tmp/tmp.LLMH8tKK89 ++ rm /tmp/tmp.fVcRKHgIF2 /tmp/tmp.LLMH8tKK89 ++ return 0 + [[ initializing == \r\e\a\d\y ]] + echo -n . .+ sleep 5 + [[ 1 -ge 300 ]] + let i+=1 ++ kubectl_bin get pxc some-name -o 'jsonpath={.status.state}' +++ mktemp ++ local LAST_OUT=/tmp/tmp.gCbj3dh2Ea +++ mktemp ++ local LAST_ERR=/tmp/tmp.lRmKMWxDA8 ++ local exit_status=0 +++ seq 0 2 ++ for i in '$(seq 0 2)' ++ set +e ++ kubectl get pxc some-name -o 'jsonpath={.status.state}' ++ exit_status=0 ++ set -e ++ '[' 0 '!=' 0 ']' ++ break ++ cat /tmp/tmp.gCbj3dh2Ea ++ cat /tmp/tmp.lRmKMWxDA8 ++ rm /tmp/tmp.gCbj3dh2Ea /tmp/tmp.lRmKMWxDA8 ++ return 0 + [[ initializing == \r\e\a\d\y ]] + echo -n . .+ sleep 5 + [[ 2 -ge 300 ]] + let i+=1 ++ kubectl_bin get pxc some-name -o 'jsonpath={.status.state}' +++ mktemp ++ local LAST_OUT=/tmp/tmp.2Twch1hSUu +++ mktemp ++ local LAST_ERR=/tmp/tmp.E9Ex2uVuz2 ++ local exit_status=0 +++ seq 0 2 ++ for i in '$(seq 0 2)' ++ set +e ++ kubectl get pxc some-name -o 'jsonpath={.status.state}' ++ exit_status=0 ++ set -e ++ '[' 0 '!=' 0 ']' ++ break ++ cat /tmp/tmp.2Twch1hSUu ++ cat /tmp/tmp.E9Ex2uVuz2 ++ rm /tmp/tmp.2Twch1hSUu /tmp/tmp.E9Ex2uVuz2 ++ return 0 + [[ initializing == \r\e\a\d\y ]] + echo -n . .+ sleep 5 + [[ 3 -ge 300 ]] + let i+=1 ++ kubectl_bin get pxc some-name -o 'jsonpath={.status.state}' +++ mktemp ++ local LAST_OUT=/tmp/tmp.x5M5USbVu7 +++ mktemp ++ local LAST_ERR=/tmp/tmp.LeOTJ9mgyy ++ local exit_status=0 +++ seq 0 2 ++ for i in '$(seq 0 2)' ++ set +e ++ kubectl get pxc some-name -o 'jsonpath={.status.state}' ++ exit_status=0 ++ set -e ++ '[' 0 '!=' 0 ']' ++ break ++ cat /tmp/tmp.x5M5USbVu7 ++ cat /tmp/tmp.LeOTJ9mgyy ++ rm /tmp/tmp.x5M5USbVu7 /tmp/tmp.LeOTJ9mgyy ++ return 0 + [[ initializing == \r\e\a\d\y ]] + echo -n . .+ sleep 5 + [[ 4 -ge 300 ]] + let i+=1 ++ kubectl_bin get pxc some-name -o 'jsonpath={.status.state}' +++ mktemp ++ local LAST_OUT=/tmp/tmp.RMNQn24mft +++ mktemp ++ local LAST_ERR=/tmp/tmp.Il9vqaMrqy ++ local exit_status=0 +++ seq 0 2 ++ for i in '$(seq 0 2)' ++ set +e ++ kubectl get pxc some-name -o 'jsonpath={.status.state}' ++ exit_status=0 ++ set -e ++ '[' 0 '!=' 0 ']' ++ break ++ cat /tmp/tmp.RMNQn24mft ++ cat /tmp/tmp.Il9vqaMrqy ++ rm /tmp/tmp.RMNQn24mft /tmp/tmp.Il9vqaMrqy ++ return 0 + [[ ready == \r\e\a\d\y ]] ++ kubectl_bin get pxc some-name -o 'jsonpath={.status.pxc.ready}' +++ mktemp ++ local LAST_OUT=/tmp/tmp.kGQeDijlln +++ mktemp ++ local LAST_ERR=/tmp/tmp.WmH9aHWiMb ++ local exit_status=0 +++ seq 0 2 ++ for i in '$(seq 0 2)' ++ set +e ++ kubectl get pxc some-name -o 'jsonpath={.status.pxc.ready}' ++ exit_status=0 ++ set -e ++ '[' 0 '!=' 0 ']' ++ break ++ cat /tmp/tmp.kGQeDijlln ++ cat /tmp/tmp.WmH9aHWiMb ++ rm /tmp/tmp.kGQeDijlln /tmp/tmp.WmH9aHWiMb ++ return 0 + [[ 3 == \3 ]] +++ get_proxy_engine some-name +++ local cluster_name=some-name ++++ get_proxy some-name ++++ local target_cluster=some-name +++++ kubectl_bin get pxc some-name -o 'jsonpath={.spec.haproxy.enabled}' ++++++ mktemp +++++ local LAST_OUT=/tmp/tmp.3ChEjvOxln ++++++ mktemp +++++ local LAST_ERR=/tmp/tmp.29FAUU6s0W +++++ local exit_status=0 ++++++ seq 0 2 +++++ for i in '$(seq 0 2)' +++++ set +e +++++ kubectl get pxc some-name -o 'jsonpath={.spec.haproxy.enabled}' +++++ exit_status=0 +++++ set -e +++++ '[' 0 '!=' 0 ']' +++++ break +++++ cat /tmp/tmp.3ChEjvOxln +++++ cat /tmp/tmp.29FAUU6s0W +++++ rm /tmp/tmp.3ChEjvOxln /tmp/tmp.29FAUU6s0W +++++ return 0 ++++ [[ '' == \t\r\u\e ]] +++++ kubectl_bin get pxc some-name -o 'jsonpath={.spec.proxysql.enabled}' ++++++ mktemp +++++ local LAST_OUT=/tmp/tmp.SJoL5jGRtv ++++++ mktemp +++++ local LAST_ERR=/tmp/tmp.efPN6dTcZ1 +++++ local exit_status=0 ++++++ seq 0 2 +++++ for i in '$(seq 0 2)' +++++ set +e +++++ kubectl get pxc some-name -o 'jsonpath={.spec.proxysql.enabled}' +++++ exit_status=0 +++++ set -e +++++ '[' 0 '!=' 0 ']' +++++ break +++++ cat /tmp/tmp.SJoL5jGRtv +++++ cat /tmp/tmp.efPN6dTcZ1 +++++ rm /tmp/tmp.SJoL5jGRtv /tmp/tmp.efPN6dTcZ1 +++++ return 0 ++++ [[ true == \t\r\u\e ]] ++++ echo some-name-proxysql ++++ return +++ local cluster_proxy=some-name-proxysql +++ echo proxysql ++ kubectl_bin get pxc some-name -o 'jsonpath={.status.proxysql.ready}' +++ mktemp ++ local LAST_OUT=/tmp/tmp.lCOpJZwcyi +++ mktemp ++ local LAST_ERR=/tmp/tmp.O3MG52ufdn ++ local exit_status=0 +++ seq 0 2 ++ for i in '$(seq 0 2)' ++ set +e ++ kubectl get pxc some-name -o 'jsonpath={.status.proxysql.ready}' ++ exit_status=0 ++ set -e ++ '[' 0 '!=' 0 ']' ++ break ++ cat /tmp/tmp.lCOpJZwcyi ++ cat /tmp/tmp.O3MG52ufdn ++ rm /tmp/tmp.lCOpJZwcyi /tmp/tmp.O3MG52ufdn ++ return 0 + [[ 3 == \3 ]] + echo + compare_mysql_cmd_local select-2 'SHOW TABLES;' '-h127.0.0.1 -P6032 -uproxyadmin -p'\''test-password'\''' some-name-proxysql-0 '' proxysql + local command_id=select-2 + local 'command=SHOW TABLES;' + local 'uri=-h127.0.0.1 -P6032 -uproxyadmin -p'\''test-password'\''' + local pod=some-name-proxysql-0 + local postfix= + local container_name=proxysql + local expected_result=/mnt/jenkins/workspace/cloud-pxc-operator_PR-2234/e2e-tests/users/compare/select-2.sql + [[ perconalab/percona-xtradb-cluster-operator:main-pxc5.7 =~ 8\.0 ]] + [[ perconalab/percona-xtradb-cluster-operator:main-pxc5.7 =~ 8\.4 ]] + run_mysql_local 'SHOW TABLES;' '-h127.0.0.1 -P6032 -uproxyadmin -p'\''test-password'\''' some-name-proxysql-0 proxysql + local 'command=SHOW TABLES;' + local 'uri=-h127.0.0.1 -P6032 -uproxyadmin -p'\''test-password'\''' + local pod=some-name-proxysql-0 + local container_name=proxysql + set +o xtrace + '[' '!' -s /tmp/tmp.O4Ec8syMdJ/select-2.sql ']' + [[ 0 -eq 0 ]] + diff -u /mnt/jenkins/workspace/cloud-pxc-operator_PR-2234/e2e-tests/users/compare/select-2.sql /tmp/tmp.O4Ec8syMdJ/select-2.sql + compare_mysql_cmd_local select-2 'SHOW TABLES;' '-h127.0.0.1 -P6032 -uproxyadmin -p'\''test-password'\''' some-name-proxysql-1 '' proxysql + local command_id=select-2 + local 'command=SHOW TABLES;' + local 'uri=-h127.0.0.1 -P6032 -uproxyadmin -p'\''test-password'\''' + local pod=some-name-proxysql-1 + local postfix= + local container_name=proxysql + local expected_result=/mnt/jenkins/workspace/cloud-pxc-operator_PR-2234/e2e-tests/users/compare/select-2.sql + [[ perconalab/percona-xtradb-cluster-operator:main-pxc5.7 =~ 8\.0 ]] + [[ perconalab/percona-xtradb-cluster-operator:main-pxc5.7 =~ 8\.4 ]] + run_mysql_local 'SHOW TABLES;' '-h127.0.0.1 -P6032 -uproxyadmin -p'\''test-password'\''' some-name-proxysql-1 proxysql + local 'command=SHOW TABLES;' + local 'uri=-h127.0.0.1 -P6032 -uproxyadmin -p'\''test-password'\''' + local pod=some-name-proxysql-1 + local container_name=proxysql + set +o xtrace + '[' '!' -s /tmp/tmp.O4Ec8syMdJ/select-2.sql ']' + [[ 0 -eq 0 ]] + diff -u /mnt/jenkins/workspace/cloud-pxc-operator_PR-2234/e2e-tests/users/compare/select-2.sql /tmp/tmp.O4Ec8syMdJ/select-2.sql + compare_mysql_cmd_local select-2 'SHOW TABLES;' '-h127.0.0.1 -P6032 -uproxyadmin -p'\''test-password'\''' some-name-proxysql-2 '' proxysql + local command_id=select-2 + local 'command=SHOW TABLES;' + local 'uri=-h127.0.0.1 -P6032 -uproxyadmin -p'\''test-password'\''' + local pod=some-name-proxysql-2 + local postfix= + local container_name=proxysql + local expected_result=/mnt/jenkins/workspace/cloud-pxc-operator_PR-2234/e2e-tests/users/compare/select-2.sql + [[ perconalab/percona-xtradb-cluster-operator:main-pxc5.7 =~ 8\.0 ]] + [[ perconalab/percona-xtradb-cluster-operator:main-pxc5.7 =~ 8\.4 ]] + run_mysql_local 'SHOW TABLES;' '-h127.0.0.1 -P6032 -uproxyadmin -p'\''test-password'\''' some-name-proxysql-2 proxysql + local 'command=SHOW TABLES;' + local 'uri=-h127.0.0.1 -P6032 -uproxyadmin -p'\''test-password'\''' + local pod=some-name-proxysql-2 + local container_name=proxysql + set +o xtrace + '[' '!' -s /tmp/tmp.O4Ec8syMdJ/select-2.sql ']' + [[ 0 -eq 0 ]] + diff -u /mnt/jenkins/workspace/cloud-pxc-operator_PR-2234/e2e-tests/users/compare/select-2.sql /tmp/tmp.O4Ec8syMdJ/select-2.sql + desc 'test xtrabackup' + set +o xtrace ----------------------------------------------------------------------------------- test xtrabackup ----------------------------------------------------------------------------------- + kubectl_bin patch pxc some-name --type=merge '-p={"spec":{"proxysql":{"size":2}}}' ++ mktemp + local LAST_OUT=/tmp/tmp.on0Hs1VqTs ++ mktemp + local LAST_ERR=/tmp/tmp.1OwDYur8eC + local exit_status=0 ++ seq 0 2 + for i in '$(seq 0 2)' + set +e + kubectl patch pxc some-name --type=merge '-p={"spec":{"proxysql":{"size":2}}}' + exit_status=0 + set -e + '[' 0 '!=' 0 ']' + break + cat /tmp/tmp.on0Hs1VqTs perconaxtradbcluster.pxc.percona.com/some-name patched + cat /tmp/tmp.1OwDYur8eC + rm /tmp/tmp.on0Hs1VqTs /tmp/tmp.1OwDYur8eC + return 0 + patch_secret my-cluster-secrets xtrabackup dGVzdC1wYXNzd29yZA== + local secret=my-cluster-secrets + local key=xtrabackup + local value=dGVzdC1wYXNzd29yZA== + kubectl_bin patch secret my-cluster-secrets '-p={"data":{"xtrabackup": "dGVzdC1wYXNzd29yZA=="}}' ++ mktemp + local LAST_OUT=/tmp/tmp.V4rxIbAZg1 ++ mktemp + local LAST_ERR=/tmp/tmp.paR68LUkih + local exit_status=0 ++ seq 0 2 + for i in '$(seq 0 2)' + set +e + kubectl patch secret my-cluster-secrets '-p={"data":{"xtrabackup": "dGVzdC1wYXNzd29yZA=="}}' + exit_status=0 + set -e + '[' 0 '!=' 0 ']' + break + cat /tmp/tmp.V4rxIbAZg1 secret/my-cluster-secrets patched + cat /tmp/tmp.paR68LUkih + rm /tmp/tmp.V4rxIbAZg1 /tmp/tmp.paR68LUkih + return 0 + sleep 15 + wait_cluster_consistency some-name 3 2 + local cluster_name=some-name + local cluster_size=3 + local proxy_size=2 + '[' -z 2 ']' + desc 'wait cluster consistency' + set +o xtrace ----------------------------------------------------------------------------------- wait cluster consistency ----------------------------------------------------------------------------------- + local i=0 + local max=300 + sleep 7 + echo -n 'waiting for pxc/some-name to be ready' waiting for pxc/some-name to be ready++ kubectl_bin get pxc some-name -o 'jsonpath={.status.state}' +++ mktemp ++ local LAST_OUT=/tmp/tmp.8Wg6UIhRAI +++ mktemp ++ local LAST_ERR=/tmp/tmp.VI0LfsI9Of ++ local exit_status=0 +++ seq 0 2 ++ for i in '$(seq 0 2)' ++ set +e ++ kubectl get pxc some-name -o 'jsonpath={.status.state}' ++ exit_status=0 ++ set -e ++ '[' 0 '!=' 0 ']' ++ break ++ cat /tmp/tmp.8Wg6UIhRAI ++ cat /tmp/tmp.VI0LfsI9Of ++ rm /tmp/tmp.8Wg6UIhRAI /tmp/tmp.VI0LfsI9Of ++ return 0 + [[ initializing == \r\e\a\d\y ]] + echo -n . .+ sleep 5 + [[ 0 -ge 300 ]] + let i+=1 ++ kubectl_bin get pxc some-name -o 'jsonpath={.status.state}' +++ mktemp ++ local LAST_OUT=/tmp/tmp.xu3HREPMl2 +++ mktemp ++ local LAST_ERR=/tmp/tmp.zTk3XlsAHq ++ local exit_status=0 +++ seq 0 2 ++ for i in '$(seq 0 2)' ++ set +e ++ kubectl get pxc some-name -o 'jsonpath={.status.state}' ++ exit_status=0 ++ set -e ++ '[' 0 '!=' 0 ']' ++ break ++ cat /tmp/tmp.xu3HREPMl2 ++ cat /tmp/tmp.zTk3XlsAHq ++ rm /tmp/tmp.xu3HREPMl2 /tmp/tmp.zTk3XlsAHq ++ return 0 + [[ initializing == \r\e\a\d\y ]] + echo -n . .+ sleep 5 + [[ 1 -ge 300 ]] + let i+=1 ++ kubectl_bin get pxc some-name -o 'jsonpath={.status.state}' +++ mktemp ++ local LAST_OUT=/tmp/tmp.G0HolcUnuH +++ mktemp ++ local LAST_ERR=/tmp/tmp.2jvZE6sPuk ++ local exit_status=0 +++ seq 0 2 ++ for i in '$(seq 0 2)' ++ set +e ++ kubectl get pxc some-name -o 'jsonpath={.status.state}' ++ exit_status=0 ++ set -e ++ '[' 0 '!=' 0 ']' ++ break ++ cat /tmp/tmp.G0HolcUnuH ++ cat /tmp/tmp.2jvZE6sPuk ++ rm /tmp/tmp.G0HolcUnuH /tmp/tmp.2jvZE6sPuk ++ return 0 + [[ initializing == \r\e\a\d\y ]] + echo -n . .+ sleep 5 + [[ 2 -ge 300 ]] + let i+=1 ++ kubectl_bin get pxc some-name -o 'jsonpath={.status.state}' +++ mktemp ++ local LAST_OUT=/tmp/tmp.AO0OaJNtJ8 +++ mktemp ++ local LAST_ERR=/tmp/tmp.IxFPB0BbVA ++ local exit_status=0 +++ seq 0 2 ++ for i in '$(seq 0 2)' ++ set +e ++ kubectl get pxc some-name -o 'jsonpath={.status.state}' ++ exit_status=0 ++ set -e ++ '[' 0 '!=' 0 ']' ++ break ++ cat /tmp/tmp.AO0OaJNtJ8 ++ cat /tmp/tmp.IxFPB0BbVA ++ rm /tmp/tmp.AO0OaJNtJ8 /tmp/tmp.IxFPB0BbVA ++ return 0 + [[ initializing == \r\e\a\d\y ]] + echo -n . .+ sleep 5 + [[ 3 -ge 300 ]] + let i+=1 ++ kubectl_bin get pxc some-name -o 'jsonpath={.status.state}' +++ mktemp ++ local LAST_OUT=/tmp/tmp.G3BCWD8Xu7 +++ mktemp ++ local LAST_ERR=/tmp/tmp.wCKDVb9DN6 ++ local exit_status=0 +++ seq 0 2 ++ for i in '$(seq 0 2)' ++ set +e ++ kubectl get pxc some-name -o 'jsonpath={.status.state}' ++ exit_status=0 ++ set -e ++ '[' 0 '!=' 0 ']' ++ break ++ cat /tmp/tmp.G3BCWD8Xu7 ++ cat /tmp/tmp.wCKDVb9DN6 ++ rm /tmp/tmp.G3BCWD8Xu7 /tmp/tmp.wCKDVb9DN6 ++ return 0 + [[ initializing == \r\e\a\d\y ]] + echo -n . .+ sleep 5 + [[ 4 -ge 300 ]] + let i+=1 ++ kubectl_bin get pxc some-name -o 'jsonpath={.status.state}' +++ mktemp ++ local LAST_OUT=/tmp/tmp.s6qaJPdJbf +++ mktemp ++ local LAST_ERR=/tmp/tmp.1uM2PpyKEo ++ local exit_status=0 +++ seq 0 2 ++ for i in '$(seq 0 2)' ++ set +e ++ kubectl get pxc some-name -o 'jsonpath={.status.state}' ++ exit_status=0 ++ set -e ++ '[' 0 '!=' 0 ']' ++ break ++ cat /tmp/tmp.s6qaJPdJbf ++ cat /tmp/tmp.1uM2PpyKEo ++ rm /tmp/tmp.s6qaJPdJbf /tmp/tmp.1uM2PpyKEo ++ return 0 + [[ initializing == \r\e\a\d\y ]] + echo -n . .+ sleep 5 + [[ 5 -ge 300 ]] + let i+=1 ++ kubectl_bin get pxc some-name -o 'jsonpath={.status.state}' +++ mktemp ++ local LAST_OUT=/tmp/tmp.Q0shC8OvNi +++ mktemp ++ local LAST_ERR=/tmp/tmp.e907eC8or2 ++ local exit_status=0 +++ seq 0 2 ++ for i in '$(seq 0 2)' ++ set +e ++ kubectl get pxc some-name -o 'jsonpath={.status.state}' ++ exit_status=0 ++ set -e ++ '[' 0 '!=' 0 ']' ++ break ++ cat /tmp/tmp.Q0shC8OvNi ++ cat /tmp/tmp.e907eC8or2 ++ rm /tmp/tmp.Q0shC8OvNi /tmp/tmp.e907eC8or2 ++ return 0 + [[ initializing == \r\e\a\d\y ]] + echo -n . .+ sleep 5 + [[ 6 -ge 300 ]] + let i+=1 ++ kubectl_bin get pxc some-name -o 'jsonpath={.status.state}' +++ mktemp ++ local LAST_OUT=/tmp/tmp.oB11EHgnrC +++ mktemp ++ local LAST_ERR=/tmp/tmp.7IBEDVghXh ++ local exit_status=0 +++ seq 0 2 ++ for i in '$(seq 0 2)' ++ set +e ++ kubectl get pxc some-name -o 'jsonpath={.status.state}' ++ exit_status=0 ++ set -e ++ '[' 0 '!=' 0 ']' ++ break ++ cat /tmp/tmp.oB11EHgnrC ++ cat /tmp/tmp.7IBEDVghXh ++ rm /tmp/tmp.oB11EHgnrC /tmp/tmp.7IBEDVghXh ++ return 0 + [[ initializing == \r\e\a\d\y ]] + echo -n . .+ sleep 5 + [[ 7 -ge 300 ]] + let i+=1 ++ kubectl_bin get pxc some-name -o 'jsonpath={.status.state}' +++ mktemp ++ local LAST_OUT=/tmp/tmp.hI3Dloxzxl +++ mktemp ++ local LAST_ERR=/tmp/tmp.zvNYvpfKME ++ local exit_status=0 +++ seq 0 2 ++ for i in '$(seq 0 2)' ++ set +e ++ kubectl get pxc some-name -o 'jsonpath={.status.state}' ++ exit_status=0 ++ set -e ++ '[' 0 '!=' 0 ']' ++ break ++ cat /tmp/tmp.hI3Dloxzxl ++ cat /tmp/tmp.zvNYvpfKME ++ rm /tmp/tmp.hI3Dloxzxl /tmp/tmp.zvNYvpfKME ++ return 0 + [[ initializing == \r\e\a\d\y ]] + echo -n . .+ sleep 5 + [[ 8 -ge 300 ]] + let i+=1 ++ kubectl_bin get pxc some-name -o 'jsonpath={.status.state}' +++ mktemp ++ local LAST_OUT=/tmp/tmp.8S1OVlEjwO +++ mktemp ++ local LAST_ERR=/tmp/tmp.Znqt29Kok6 ++ local exit_status=0 +++ seq 0 2 ++ for i in '$(seq 0 2)' ++ set +e ++ kubectl get pxc some-name -o 'jsonpath={.status.state}' ++ exit_status=0 ++ set -e ++ '[' 0 '!=' 0 ']' ++ break ++ cat /tmp/tmp.8S1OVlEjwO ++ cat /tmp/tmp.Znqt29Kok6 ++ rm /tmp/tmp.8S1OVlEjwO /tmp/tmp.Znqt29Kok6 ++ return 0 + [[ initializing == \r\e\a\d\y ]] + echo -n . .+ sleep 5 + [[ 9 -ge 300 ]] + let i+=1 ++ kubectl_bin get pxc some-name -o 'jsonpath={.status.state}' +++ mktemp ++ local LAST_OUT=/tmp/tmp.5shFAFIRbo +++ mktemp ++ local LAST_ERR=/tmp/tmp.NnOD5T7s5I ++ local exit_status=0 +++ seq 0 2 ++ for i in '$(seq 0 2)' ++ set +e ++ kubectl get pxc some-name -o 'jsonpath={.status.state}' ++ exit_status=0 ++ set -e ++ '[' 0 '!=' 0 ']' ++ break ++ cat /tmp/tmp.5shFAFIRbo ++ cat /tmp/tmp.NnOD5T7s5I ++ rm /tmp/tmp.5shFAFIRbo /tmp/tmp.NnOD5T7s5I ++ return 0 + [[ initializing == \r\e\a\d\y ]] + echo -n . .+ sleep 5 + [[ 10 -ge 300 ]] + let i+=1 ++ kubectl_bin get pxc some-name -o 'jsonpath={.status.state}' +++ mktemp ++ local LAST_OUT=/tmp/tmp.nOatE9QL0H +++ mktemp ++ local LAST_ERR=/tmp/tmp.2v48UtYnHo ++ local exit_status=0 +++ seq 0 2 ++ for i in '$(seq 0 2)' ++ set +e ++ kubectl get pxc some-name -o 'jsonpath={.status.state}' ++ exit_status=0 ++ set -e ++ '[' 0 '!=' 0 ']' ++ break ++ cat /tmp/tmp.nOatE9QL0H ++ cat /tmp/tmp.2v48UtYnHo ++ rm /tmp/tmp.nOatE9QL0H /tmp/tmp.2v48UtYnHo ++ return 0 + [[ initializing == \r\e\a\d\y ]] + echo -n . .+ sleep 5 + [[ 11 -ge 300 ]] + let i+=1 ++ kubectl_bin get pxc some-name -o 'jsonpath={.status.state}' +++ mktemp ++ local LAST_OUT=/tmp/tmp.V31DrCKIHb +++ mktemp ++ local LAST_ERR=/tmp/tmp.gkcGTOD8TN ++ local exit_status=0 +++ seq 0 2 ++ for i in '$(seq 0 2)' ++ set +e ++ kubectl get pxc some-name -o 'jsonpath={.status.state}' ++ exit_status=0 ++ set -e ++ '[' 0 '!=' 0 ']' ++ break ++ cat /tmp/tmp.V31DrCKIHb ++ cat /tmp/tmp.gkcGTOD8TN ++ rm /tmp/tmp.V31DrCKIHb /tmp/tmp.gkcGTOD8TN ++ return 0 + [[ initializing == \r\e\a\d\y ]] + echo -n . .+ sleep 5 + [[ 12 -ge 300 ]] + let i+=1 ++ kubectl_bin get pxc some-name -o 'jsonpath={.status.state}' +++ mktemp ++ local LAST_OUT=/tmp/tmp.ZHh4J3D15i +++ mktemp ++ local LAST_ERR=/tmp/tmp.1UaCATtOQu ++ local exit_status=0 +++ seq 0 2 ++ for i in '$(seq 0 2)' ++ set +e ++ kubectl get pxc some-name -o 'jsonpath={.status.state}' ++ exit_status=0 ++ set -e ++ '[' 0 '!=' 0 ']' ++ break ++ cat /tmp/tmp.ZHh4J3D15i ++ cat /tmp/tmp.1UaCATtOQu ++ rm /tmp/tmp.ZHh4J3D15i /tmp/tmp.1UaCATtOQu ++ return 0 + [[ initializing == \r\e\a\d\y ]] + echo -n . .+ sleep 5 + [[ 13 -ge 300 ]] + let i+=1 ++ kubectl_bin get pxc some-name -o 'jsonpath={.status.state}' +++ mktemp ++ local LAST_OUT=/tmp/tmp.NGbwQmYUNo +++ mktemp ++ local LAST_ERR=/tmp/tmp.jK7Q8JxDDU ++ local exit_status=0 +++ seq 0 2 ++ for i in '$(seq 0 2)' ++ set +e ++ kubectl get pxc some-name -o 'jsonpath={.status.state}' ++ exit_status=0 ++ set -e ++ '[' 0 '!=' 0 ']' ++ break ++ cat /tmp/tmp.NGbwQmYUNo ++ cat /tmp/tmp.jK7Q8JxDDU ++ rm /tmp/tmp.NGbwQmYUNo /tmp/tmp.jK7Q8JxDDU ++ return 0 + [[ initializing == \r\e\a\d\y ]] + echo -n . .+ sleep 5 + [[ 14 -ge 300 ]] + let i+=1 ++ kubectl_bin get pxc some-name -o 'jsonpath={.status.state}' +++ mktemp ++ local LAST_OUT=/tmp/tmp.MypZwPmFBZ +++ mktemp ++ local LAST_ERR=/tmp/tmp.pIag0xejO5 ++ local exit_status=0 +++ seq 0 2 ++ for i in '$(seq 0 2)' ++ set +e ++ kubectl get pxc some-name -o 'jsonpath={.status.state}' ++ exit_status=0 ++ set -e ++ '[' 0 '!=' 0 ']' ++ break ++ cat /tmp/tmp.MypZwPmFBZ ++ cat /tmp/tmp.pIag0xejO5 ++ rm /tmp/tmp.MypZwPmFBZ /tmp/tmp.pIag0xejO5 ++ return 0 + [[ initializing == \r\e\a\d\y ]] + echo -n . .+ sleep 5 + [[ 15 -ge 300 ]] + let i+=1 ++ kubectl_bin get pxc some-name -o 'jsonpath={.status.state}' +++ mktemp ++ local LAST_OUT=/tmp/tmp.DpsFX4fLvL +++ mktemp ++ local LAST_ERR=/tmp/tmp.MjAcPdC6dV ++ local exit_status=0 +++ seq 0 2 ++ for i in '$(seq 0 2)' ++ set +e ++ kubectl get pxc some-name -o 'jsonpath={.status.state}' ++ exit_status=0 ++ set -e ++ '[' 0 '!=' 0 ']' ++ break ++ cat /tmp/tmp.DpsFX4fLvL ++ cat /tmp/tmp.MjAcPdC6dV ++ rm /tmp/tmp.DpsFX4fLvL /tmp/tmp.MjAcPdC6dV ++ return 0 + [[ initializing == \r\e\a\d\y ]] + echo -n . .+ sleep 5 + [[ 16 -ge 300 ]] + let i+=1 ++ kubectl_bin get pxc some-name -o 'jsonpath={.status.state}' +++ mktemp ++ local LAST_OUT=/tmp/tmp.HT0Wzved3H +++ mktemp ++ local LAST_ERR=/tmp/tmp.SOQIEjaYqV ++ local exit_status=0 +++ seq 0 2 ++ for i in '$(seq 0 2)' ++ set +e ++ kubectl get pxc some-name -o 'jsonpath={.status.state}' ++ exit_status=0 ++ set -e ++ '[' 0 '!=' 0 ']' ++ break ++ cat /tmp/tmp.HT0Wzved3H ++ cat /tmp/tmp.SOQIEjaYqV ++ rm /tmp/tmp.HT0Wzved3H /tmp/tmp.SOQIEjaYqV ++ return 0 + [[ initializing == \r\e\a\d\y ]] + echo -n . .+ sleep 5 + [[ 17 -ge 300 ]] + let i+=1 ++ kubectl_bin get pxc some-name -o 'jsonpath={.status.state}' +++ mktemp ++ local LAST_OUT=/tmp/tmp.NZzMU1yxqS +++ mktemp ++ local LAST_ERR=/tmp/tmp.hFpDyeKWlq ++ local exit_status=0 +++ seq 0 2 ++ for i in '$(seq 0 2)' ++ set +e ++ kubectl get pxc some-name -o 'jsonpath={.status.state}' ++ exit_status=0 ++ set -e ++ '[' 0 '!=' 0 ']' ++ break ++ cat /tmp/tmp.NZzMU1yxqS ++ cat /tmp/tmp.hFpDyeKWlq ++ rm /tmp/tmp.NZzMU1yxqS /tmp/tmp.hFpDyeKWlq ++ return 0 + [[ initializing == \r\e\a\d\y ]] + echo -n . .+ sleep 5 + [[ 18 -ge 300 ]] + let i+=1 ++ kubectl_bin get pxc some-name -o 'jsonpath={.status.state}' +++ mktemp ++ local LAST_OUT=/tmp/tmp.yfiDKRzLUe +++ mktemp ++ local LAST_ERR=/tmp/tmp.bq829El7aH ++ local exit_status=0 +++ seq 0 2 ++ for i in '$(seq 0 2)' ++ set +e ++ kubectl get pxc some-name -o 'jsonpath={.status.state}' ++ exit_status=0 ++ set -e ++ '[' 0 '!=' 0 ']' ++ break ++ cat /tmp/tmp.yfiDKRzLUe ++ cat /tmp/tmp.bq829El7aH ++ rm /tmp/tmp.yfiDKRzLUe /tmp/tmp.bq829El7aH ++ return 0 + [[ initializing == \r\e\a\d\y ]] + echo -n . .+ sleep 5 + [[ 19 -ge 300 ]] + let i+=1 ++ kubectl_bin get pxc some-name -o 'jsonpath={.status.state}' +++ mktemp ++ local LAST_OUT=/tmp/tmp.vdQy4jkDkW +++ mktemp ++ local LAST_ERR=/tmp/tmp.uLeE4i7BQj ++ local exit_status=0 +++ seq 0 2 ++ for i in '$(seq 0 2)' ++ set +e ++ kubectl get pxc some-name -o 'jsonpath={.status.state}' ++ exit_status=0 ++ set -e ++ '[' 0 '!=' 0 ']' ++ break ++ cat /tmp/tmp.vdQy4jkDkW ++ cat /tmp/tmp.uLeE4i7BQj ++ rm /tmp/tmp.vdQy4jkDkW /tmp/tmp.uLeE4i7BQj ++ return 0 + [[ initializing == \r\e\a\d\y ]] + echo -n . .+ sleep 5 + [[ 20 -ge 300 ]] + let i+=1 ++ kubectl_bin get pxc some-name -o 'jsonpath={.status.state}' +++ mktemp ++ local LAST_OUT=/tmp/tmp.nPIVM8C1GE +++ mktemp ++ local LAST_ERR=/tmp/tmp.xRfUoIez4J ++ local exit_status=0 +++ seq 0 2 ++ for i in '$(seq 0 2)' ++ set +e ++ kubectl get pxc some-name -o 'jsonpath={.status.state}' ++ exit_status=0 ++ set -e ++ '[' 0 '!=' 0 ']' ++ break ++ cat /tmp/tmp.nPIVM8C1GE ++ cat /tmp/tmp.xRfUoIez4J ++ rm /tmp/tmp.nPIVM8C1GE /tmp/tmp.xRfUoIez4J ++ return 0 + [[ initializing == \r\e\a\d\y ]] + echo -n . .+ sleep 5 + [[ 21 -ge 300 ]] + let i+=1 ++ kubectl_bin get pxc some-name -o 'jsonpath={.status.state}' +++ mktemp ++ local LAST_OUT=/tmp/tmp.xPR9gEUXmi +++ mktemp ++ local LAST_ERR=/tmp/tmp.1U7xr5pAyh ++ local exit_status=0 +++ seq 0 2 ++ for i in '$(seq 0 2)' ++ set +e ++ kubectl get pxc some-name -o 'jsonpath={.status.state}' ++ exit_status=0 ++ set -e ++ '[' 0 '!=' 0 ']' ++ break ++ cat /tmp/tmp.xPR9gEUXmi ++ cat /tmp/tmp.1U7xr5pAyh ++ rm /tmp/tmp.xPR9gEUXmi /tmp/tmp.1U7xr5pAyh ++ return 0 + [[ initializing == \r\e\a\d\y ]] + echo -n . .+ sleep 5 + [[ 22 -ge 300 ]] + let i+=1 ++ kubectl_bin get pxc some-name -o 'jsonpath={.status.state}' +++ mktemp ++ local LAST_OUT=/tmp/tmp.0W9iZxMBx1 +++ mktemp ++ local LAST_ERR=/tmp/tmp.EIJNEzZFBm ++ local exit_status=0 +++ seq 0 2 ++ for i in '$(seq 0 2)' ++ set +e ++ kubectl get pxc some-name -o 'jsonpath={.status.state}' ++ exit_status=0 ++ set -e ++ '[' 0 '!=' 0 ']' ++ break ++ cat /tmp/tmp.0W9iZxMBx1 ++ cat /tmp/tmp.EIJNEzZFBm ++ rm /tmp/tmp.0W9iZxMBx1 /tmp/tmp.EIJNEzZFBm ++ return 0 + [[ initializing == \r\e\a\d\y ]] + echo -n . .+ sleep 5 + [[ 23 -ge 300 ]] + let i+=1 ++ kubectl_bin get pxc some-name -o 'jsonpath={.status.state}' +++ mktemp ++ local LAST_OUT=/tmp/tmp.EM2VUIQZDl +++ mktemp ++ local LAST_ERR=/tmp/tmp.UjaPMNfrhy ++ local exit_status=0 +++ seq 0 2 ++ for i in '$(seq 0 2)' ++ set +e ++ kubectl get pxc some-name -o 'jsonpath={.status.state}' ++ exit_status=0 ++ set -e ++ '[' 0 '!=' 0 ']' ++ break ++ cat /tmp/tmp.EM2VUIQZDl ++ cat /tmp/tmp.UjaPMNfrhy ++ rm /tmp/tmp.EM2VUIQZDl /tmp/tmp.UjaPMNfrhy ++ return 0 + [[ initializing == \r\e\a\d\y ]] + echo -n . .+ sleep 5 + [[ 24 -ge 300 ]] + let i+=1 ++ kubectl_bin get pxc some-name -o 'jsonpath={.status.state}' +++ mktemp ++ local LAST_OUT=/tmp/tmp.mi3ioeoTTX +++ mktemp ++ local LAST_ERR=/tmp/tmp.bnkklGKTq9 ++ local exit_status=0 +++ seq 0 2 ++ for i in '$(seq 0 2)' ++ set +e ++ kubectl get pxc some-name -o 'jsonpath={.status.state}' ++ exit_status=0 ++ set -e ++ '[' 0 '!=' 0 ']' ++ break ++ cat /tmp/tmp.mi3ioeoTTX ++ cat /tmp/tmp.bnkklGKTq9 ++ rm /tmp/tmp.mi3ioeoTTX /tmp/tmp.bnkklGKTq9 ++ return 0 + [[ ready == \r\e\a\d\y ]] ++ kubectl_bin get pxc some-name -o 'jsonpath={.status.pxc.ready}' +++ mktemp ++ local LAST_OUT=/tmp/tmp.84TU0KIP2c +++ mktemp ++ local LAST_ERR=/tmp/tmp.RH5ABEapB9 ++ local exit_status=0 +++ seq 0 2 ++ for i in '$(seq 0 2)' ++ set +e ++ kubectl get pxc some-name -o 'jsonpath={.status.pxc.ready}' ++ exit_status=0 ++ set -e ++ '[' 0 '!=' 0 ']' ++ break ++ cat /tmp/tmp.84TU0KIP2c ++ cat /tmp/tmp.RH5ABEapB9 ++ rm /tmp/tmp.84TU0KIP2c /tmp/tmp.RH5ABEapB9 ++ return 0 + [[ 3 == \3 ]] +++ get_proxy_engine some-name +++ local cluster_name=some-name ++++ get_proxy some-name ++++ local target_cluster=some-name +++++ kubectl_bin get pxc some-name -o 'jsonpath={.spec.haproxy.enabled}' ++++++ mktemp +++++ local LAST_OUT=/tmp/tmp.2RnAknzx7O ++++++ mktemp +++++ local LAST_ERR=/tmp/tmp.14HmREWnUQ +++++ local exit_status=0 ++++++ seq 0 2 +++++ for i in '$(seq 0 2)' +++++ set +e +++++ kubectl get pxc some-name -o 'jsonpath={.spec.haproxy.enabled}' +++++ exit_status=0 +++++ set -e +++++ '[' 0 '!=' 0 ']' +++++ break +++++ cat /tmp/tmp.2RnAknzx7O +++++ cat /tmp/tmp.14HmREWnUQ +++++ rm /tmp/tmp.2RnAknzx7O /tmp/tmp.14HmREWnUQ +++++ return 0 ++++ [[ '' == \t\r\u\e ]] +++++ kubectl_bin get pxc some-name -o 'jsonpath={.spec.proxysql.enabled}' ++++++ mktemp +++++ local LAST_OUT=/tmp/tmp.lAujoTsILh ++++++ mktemp +++++ local LAST_ERR=/tmp/tmp.2ChVcamF0D +++++ local exit_status=0 ++++++ seq 0 2 +++++ for i in '$(seq 0 2)' +++++ set +e +++++ kubectl get pxc some-name -o 'jsonpath={.spec.proxysql.enabled}' +++++ exit_status=0 +++++ set -e +++++ '[' 0 '!=' 0 ']' +++++ break +++++ cat /tmp/tmp.lAujoTsILh +++++ cat /tmp/tmp.2ChVcamF0D +++++ rm /tmp/tmp.lAujoTsILh /tmp/tmp.2ChVcamF0D +++++ return 0 ++++ [[ true == \t\r\u\e ]] ++++ echo some-name-proxysql ++++ return +++ local cluster_proxy=some-name-proxysql +++ echo proxysql ++ kubectl_bin get pxc some-name -o 'jsonpath={.status.proxysql.ready}' +++ mktemp ++ local LAST_OUT=/tmp/tmp.4VaKMDXNB9 +++ mktemp ++ local LAST_ERR=/tmp/tmp.AA5FT5wJGI ++ local exit_status=0 +++ seq 0 2 ++ for i in '$(seq 0 2)' ++ set +e ++ kubectl get pxc some-name -o 'jsonpath={.status.proxysql.ready}' ++ exit_status=0 ++ set -e ++ '[' 0 '!=' 0 ']' ++ break ++ cat /tmp/tmp.4VaKMDXNB9 ++ cat /tmp/tmp.AA5FT5wJGI ++ rm /tmp/tmp.4VaKMDXNB9 /tmp/tmp.AA5FT5wJGI ++ return 0 + [[ 2 == \2 ]] + echo + compare_mysql_cmd_local select-3 'SHOW DATABASES;' '-h 127.0.0.1 -uxtrabackup -p'\''test-password'\''' some-name-pxc-0 '' pxc + local command_id=select-3 + local 'command=SHOW DATABASES;' + local 'uri=-h 127.0.0.1 -uxtrabackup -p'\''test-password'\''' + local pod=some-name-pxc-0 + local postfix= + local container_name=pxc + local expected_result=/mnt/jenkins/workspace/cloud-pxc-operator_PR-2234/e2e-tests/users/compare/select-3.sql + [[ perconalab/percona-xtradb-cluster-operator:main-pxc5.7 =~ 8\.0 ]] + [[ perconalab/percona-xtradb-cluster-operator:main-pxc5.7 =~ 8\.4 ]] + run_mysql_local 'SHOW DATABASES;' '-h 127.0.0.1 -uxtrabackup -p'\''test-password'\''' some-name-pxc-0 pxc + local 'command=SHOW DATABASES;' + local 'uri=-h 127.0.0.1 -uxtrabackup -p'\''test-password'\''' + local pod=some-name-pxc-0 + local container_name=pxc + set +o xtrace + '[' '!' -s /tmp/tmp.O4Ec8syMdJ/select-3.sql ']' + [[ 0 -eq 0 ]] + diff -u /mnt/jenkins/workspace/cloud-pxc-operator_PR-2234/e2e-tests/users/compare/select-3.sql /tmp/tmp.O4Ec8syMdJ/select-3.sql + desc 'test monitor' + set +o xtrace ----------------------------------------------------------------------------------- test monitor ----------------------------------------------------------------------------------- + patch_secret my-cluster-secrets monitor dGVzdC1wYXNzd29yZA== + local secret=my-cluster-secrets + local key=monitor + local value=dGVzdC1wYXNzd29yZA== + kubectl_bin patch secret my-cluster-secrets '-p={"data":{"monitor": "dGVzdC1wYXNzd29yZA=="}}' ++ mktemp + local LAST_OUT=/tmp/tmp.mPJwFAyRBB ++ mktemp + local LAST_ERR=/tmp/tmp.9Oz57XYWjo + local exit_status=0 ++ seq 0 2 + for i in '$(seq 0 2)' + set +e + kubectl patch secret my-cluster-secrets '-p={"data":{"monitor": "dGVzdC1wYXNzd29yZA=="}}' + exit_status=0 + set -e + '[' 0 '!=' 0 ']' + break + cat /tmp/tmp.mPJwFAyRBB secret/my-cluster-secrets patched + cat /tmp/tmp.9Oz57XYWjo + rm /tmp/tmp.mPJwFAyRBB /tmp/tmp.9Oz57XYWjo + return 0 + wait_for_password_propagation my-cluster-secrets monitor + local secret=my-cluster-secrets + local user=monitor + local max_retry=240 ++ getSecretData my-cluster-secrets root ++ local secretName=my-cluster-secrets ++ local dataKey=root ++ kubectl_bin get secrets/my-cluster-secrets '--template={{.data.root}}' ++ base64 --decode +++ mktemp ++ local LAST_OUT=/tmp/tmp.OD8D0c0P0q +++ mktemp ++ local LAST_ERR=/tmp/tmp.VnCzzVLbzX ++ local exit_status=0 +++ seq 0 2 ++ for i in '$(seq 0 2)' ++ set +e ++ kubectl get secrets/my-cluster-secrets '--template={{.data.root}}' ++ exit_status=0 ++ set -e ++ '[' 0 '!=' 0 ']' ++ break ++ cat /tmp/tmp.OD8D0c0P0q ++ cat /tmp/tmp.VnCzzVLbzX ++ rm /tmp/tmp.OD8D0c0P0q /tmp/tmp.VnCzzVLbzX ++ return 0 + local root_pass=test-password + [[ perconalab/percona-xtradb-cluster-operator:main-pxc5.7 =~ 5\.7 ]] + echo 'Skipping dual password feature doesn'\''t work for 5.7. PXC 5.7 doesn'\''t support it!' Skipping dual password feature doesn't work for 5.7. PXC 5.7 doesn't support it! + return + wait_cluster_consistency some-name 3 2 + local cluster_name=some-name + local cluster_size=3 + local proxy_size=2 + '[' -z 2 ']' + desc 'wait cluster consistency' + set +o xtrace ----------------------------------------------------------------------------------- wait cluster consistency ----------------------------------------------------------------------------------- + local i=0 + local max=300 + sleep 7 + echo -n 'waiting for pxc/some-name to be ready' waiting for pxc/some-name to be ready++ kubectl_bin get pxc some-name -o 'jsonpath={.status.state}' +++ mktemp ++ local LAST_OUT=/tmp/tmp.KjxkTTOu4I +++ mktemp ++ local LAST_ERR=/tmp/tmp.VOUGxiy3Vn ++ local exit_status=0 +++ seq 0 2 ++ for i in '$(seq 0 2)' ++ set +e ++ kubectl get pxc some-name -o 'jsonpath={.status.state}' ++ exit_status=0 ++ set -e ++ '[' 0 '!=' 0 ']' ++ break ++ cat /tmp/tmp.KjxkTTOu4I ++ cat /tmp/tmp.VOUGxiy3Vn ++ rm /tmp/tmp.KjxkTTOu4I /tmp/tmp.VOUGxiy3Vn ++ return 0 + [[ initializing == \r\e\a\d\y ]] + echo -n . .+ sleep 5 + [[ 0 -ge 300 ]] + let i+=1 ++ kubectl_bin get pxc some-name -o 'jsonpath={.status.state}' +++ mktemp ++ local LAST_OUT=/tmp/tmp.Q0Twy7QyY1 +++ mktemp ++ local LAST_ERR=/tmp/tmp.OFF7rkwbMP ++ local exit_status=0 +++ seq 0 2 ++ for i in '$(seq 0 2)' ++ set +e ++ kubectl get pxc some-name -o 'jsonpath={.status.state}' ++ exit_status=0 ++ set -e ++ '[' 0 '!=' 0 ']' ++ break ++ cat /tmp/tmp.Q0Twy7QyY1 ++ cat /tmp/tmp.OFF7rkwbMP ++ rm /tmp/tmp.Q0Twy7QyY1 /tmp/tmp.OFF7rkwbMP ++ return 0 + [[ initializing == \r\e\a\d\y ]] + echo -n . .+ sleep 5 + [[ 1 -ge 300 ]] + let i+=1 ++ kubectl_bin get pxc some-name -o 'jsonpath={.status.state}' +++ mktemp ++ local LAST_OUT=/tmp/tmp.n1anu2ARE6 +++ mktemp ++ local LAST_ERR=/tmp/tmp.x76USyhhb6 ++ local exit_status=0 +++ seq 0 2 ++ for i in '$(seq 0 2)' ++ set +e ++ kubectl get pxc some-name -o 'jsonpath={.status.state}' ++ exit_status=0 ++ set -e ++ '[' 0 '!=' 0 ']' ++ break ++ cat /tmp/tmp.n1anu2ARE6 ++ cat /tmp/tmp.x76USyhhb6 ++ rm /tmp/tmp.n1anu2ARE6 /tmp/tmp.x76USyhhb6 ++ return 0 + [[ initializing == \r\e\a\d\y ]] + echo -n . .+ sleep 5 + [[ 2 -ge 300 ]] + let i+=1 ++ kubectl_bin get pxc some-name -o 'jsonpath={.status.state}' +++ mktemp ++ local LAST_OUT=/tmp/tmp.JcXLMjumSI +++ mktemp ++ local LAST_ERR=/tmp/tmp.3cCQ0L9OOp ++ local exit_status=0 +++ seq 0 2 ++ for i in '$(seq 0 2)' ++ set +e ++ kubectl get pxc some-name -o 'jsonpath={.status.state}' ++ exit_status=0 ++ set -e ++ '[' 0 '!=' 0 ']' ++ break ++ cat /tmp/tmp.JcXLMjumSI ++ cat /tmp/tmp.3cCQ0L9OOp ++ rm /tmp/tmp.JcXLMjumSI /tmp/tmp.3cCQ0L9OOp ++ return 0 + [[ initializing == \r\e\a\d\y ]] + echo -n . .+ sleep 5 + [[ 3 -ge 300 ]] + let i+=1 ++ kubectl_bin get pxc some-name -o 'jsonpath={.status.state}' +++ mktemp ++ local LAST_OUT=/tmp/tmp.XbDkpMvk0h +++ mktemp ++ local LAST_ERR=/tmp/tmp.9CL52PoMzB ++ local exit_status=0 +++ seq 0 2 ++ for i in '$(seq 0 2)' ++ set +e ++ kubectl get pxc some-name -o 'jsonpath={.status.state}' ++ exit_status=0 ++ set -e ++ '[' 0 '!=' 0 ']' ++ break ++ cat /tmp/tmp.XbDkpMvk0h ++ cat /tmp/tmp.9CL52PoMzB ++ rm /tmp/tmp.XbDkpMvk0h /tmp/tmp.9CL52PoMzB ++ return 0 + [[ initializing == \r\e\a\d\y ]] + echo -n . .+ sleep 5 + [[ 4 -ge 300 ]] + let i+=1 ++ kubectl_bin get pxc some-name -o 'jsonpath={.status.state}' +++ mktemp ++ local LAST_OUT=/tmp/tmp.KCoS6vxm25 +++ mktemp ++ local LAST_ERR=/tmp/tmp.0HuTjltODB ++ local exit_status=0 +++ seq 0 2 ++ for i in '$(seq 0 2)' ++ set +e ++ kubectl get pxc some-name -o 'jsonpath={.status.state}' ++ exit_status=0 ++ set -e ++ '[' 0 '!=' 0 ']' ++ break ++ cat /tmp/tmp.KCoS6vxm25 ++ cat /tmp/tmp.0HuTjltODB ++ rm /tmp/tmp.KCoS6vxm25 /tmp/tmp.0HuTjltODB ++ return 0 + [[ initializing == \r\e\a\d\y ]] + echo -n . .+ sleep 5 + [[ 5 -ge 300 ]] + let i+=1 ++ kubectl_bin get pxc some-name -o 'jsonpath={.status.state}' +++ mktemp ++ local LAST_OUT=/tmp/tmp.4pIwLw661P +++ mktemp ++ local LAST_ERR=/tmp/tmp.9VdktGnW6U ++ local exit_status=0 +++ seq 0 2 ++ for i in '$(seq 0 2)' ++ set +e ++ kubectl get pxc some-name -o 'jsonpath={.status.state}' ++ exit_status=0 ++ set -e ++ '[' 0 '!=' 0 ']' ++ break ++ cat /tmp/tmp.4pIwLw661P ++ cat /tmp/tmp.9VdktGnW6U ++ rm /tmp/tmp.4pIwLw661P /tmp/tmp.9VdktGnW6U ++ return 0 + [[ initializing == \r\e\a\d\y ]] + echo -n . .+ sleep 5 + [[ 6 -ge 300 ]] + let i+=1 ++ kubectl_bin get pxc some-name -o 'jsonpath={.status.state}' +++ mktemp ++ local LAST_OUT=/tmp/tmp.KIgKem2D7g +++ mktemp ++ local LAST_ERR=/tmp/tmp.fFNkHt42iO ++ local exit_status=0 +++ seq 0 2 ++ for i in '$(seq 0 2)' ++ set +e ++ kubectl get pxc some-name -o 'jsonpath={.status.state}' ++ exit_status=0 ++ set -e ++ '[' 0 '!=' 0 ']' ++ break ++ cat /tmp/tmp.KIgKem2D7g ++ cat /tmp/tmp.fFNkHt42iO ++ rm /tmp/tmp.KIgKem2D7g /tmp/tmp.fFNkHt42iO ++ return 0 + [[ initializing == \r\e\a\d\y ]] + echo -n . .+ sleep 5 + [[ 7 -ge 300 ]] + let i+=1 ++ kubectl_bin get pxc some-name -o 'jsonpath={.status.state}' +++ mktemp ++ local LAST_OUT=/tmp/tmp.xrCzkl6caN +++ mktemp ++ local LAST_ERR=/tmp/tmp.rUXy8iRePK ++ local exit_status=0 +++ seq 0 2 ++ for i in '$(seq 0 2)' ++ set +e ++ kubectl get pxc some-name -o 'jsonpath={.status.state}' ++ exit_status=0 ++ set -e ++ '[' 0 '!=' 0 ']' ++ break ++ cat /tmp/tmp.xrCzkl6caN ++ cat /tmp/tmp.rUXy8iRePK ++ rm /tmp/tmp.xrCzkl6caN /tmp/tmp.rUXy8iRePK ++ return 0 + [[ initializing == \r\e\a\d\y ]] + echo -n . .+ sleep 5 + [[ 8 -ge 300 ]] + let i+=1 ++ kubectl_bin get pxc some-name -o 'jsonpath={.status.state}' +++ mktemp ++ local LAST_OUT=/tmp/tmp.oanneqjDOM +++ mktemp ++ local LAST_ERR=/tmp/tmp.iVoInfks2k ++ local exit_status=0 +++ seq 0 2 ++ for i in '$(seq 0 2)' ++ set +e ++ kubectl get pxc some-name -o 'jsonpath={.status.state}' ++ exit_status=0 ++ set -e ++ '[' 0 '!=' 0 ']' ++ break ++ cat /tmp/tmp.oanneqjDOM ++ cat /tmp/tmp.iVoInfks2k ++ rm /tmp/tmp.oanneqjDOM /tmp/tmp.iVoInfks2k ++ return 0 + [[ ready == \r\e\a\d\y ]] ++ kubectl_bin get pxc some-name -o 'jsonpath={.status.pxc.ready}' +++ mktemp ++ local LAST_OUT=/tmp/tmp.Csn5ES7CZI +++ mktemp ++ local LAST_ERR=/tmp/tmp.GhjxAvAO5O ++ local exit_status=0 +++ seq 0 2 ++ for i in '$(seq 0 2)' ++ set +e ++ kubectl get pxc some-name -o 'jsonpath={.status.pxc.ready}' ++ exit_status=0 ++ set -e ++ '[' 0 '!=' 0 ']' ++ break ++ cat /tmp/tmp.Csn5ES7CZI ++ cat /tmp/tmp.GhjxAvAO5O ++ rm /tmp/tmp.Csn5ES7CZI /tmp/tmp.GhjxAvAO5O ++ return 0 + [[ 3 == \3 ]] +++ get_proxy_engine some-name +++ local cluster_name=some-name ++++ get_proxy some-name ++++ local target_cluster=some-name +++++ kubectl_bin get pxc some-name -o 'jsonpath={.spec.haproxy.enabled}' ++++++ mktemp +++++ local LAST_OUT=/tmp/tmp.dpTFRD5xMR ++++++ mktemp +++++ local LAST_ERR=/tmp/tmp.TkM2x5ZlOj +++++ local exit_status=0 ++++++ seq 0 2 +++++ for i in '$(seq 0 2)' +++++ set +e +++++ kubectl get pxc some-name -o 'jsonpath={.spec.haproxy.enabled}' +++++ exit_status=0 +++++ set -e +++++ '[' 0 '!=' 0 ']' +++++ break +++++ cat /tmp/tmp.dpTFRD5xMR +++++ cat /tmp/tmp.TkM2x5ZlOj +++++ rm /tmp/tmp.dpTFRD5xMR /tmp/tmp.TkM2x5ZlOj +++++ return 0 ++++ [[ '' == \t\r\u\e ]] +++++ kubectl_bin get pxc some-name -o 'jsonpath={.spec.proxysql.enabled}' ++++++ mktemp +++++ local LAST_OUT=/tmp/tmp.jLoLPnRsfX ++++++ mktemp +++++ local LAST_ERR=/tmp/tmp.COcKyxs4KW +++++ local exit_status=0 ++++++ seq 0 2 +++++ for i in '$(seq 0 2)' +++++ set +e +++++ kubectl get pxc some-name -o 'jsonpath={.spec.proxysql.enabled}' +++++ exit_status=0 +++++ set -e +++++ '[' 0 '!=' 0 ']' +++++ break +++++ cat /tmp/tmp.jLoLPnRsfX +++++ cat /tmp/tmp.COcKyxs4KW +++++ rm /tmp/tmp.jLoLPnRsfX /tmp/tmp.COcKyxs4KW +++++ return 0 ++++ [[ true == \t\r\u\e ]] ++++ echo some-name-proxysql ++++ return +++ local cluster_proxy=some-name-proxysql +++ echo proxysql ++ kubectl_bin get pxc some-name -o 'jsonpath={.status.proxysql.ready}' +++ mktemp ++ local LAST_OUT=/tmp/tmp.KI8xiNK6lw +++ mktemp ++ local LAST_ERR=/tmp/tmp.veo2Jh7H2m ++ local exit_status=0 +++ seq 0 2 ++ for i in '$(seq 0 2)' ++ set +e ++ kubectl get pxc some-name -o 'jsonpath={.status.proxysql.ready}' ++ exit_status=0 ++ set -e ++ '[' 0 '!=' 0 ']' ++ break ++ cat /tmp/tmp.KI8xiNK6lw ++ cat /tmp/tmp.veo2Jh7H2m ++ rm /tmp/tmp.KI8xiNK6lw /tmp/tmp.veo2Jh7H2m ++ return 0 + [[ 2 == \2 ]] + echo + sleep 10 + compare_mysql_cmd select-4 'SHOW TABLES;' '-h some-name-proxysql -umonitor -p'\''test-password'\''' + local command_id=select-4 + local 'command=SHOW TABLES;' + local 'uri=-h some-name-proxysql -umonitor -p'\''test-password'\''' + local postfix= + local expected_result=/mnt/jenkins/workspace/cloud-pxc-operator_PR-2234/e2e-tests/users/compare/select-4.sql + [[ perconalab/percona-xtradb-cluster-operator:main-pxc5.7 =~ 8\.4 ]] + [[ perconalab/percona-xtradb-cluster-operator:main-pxc5.7 =~ 8\.0 ]] + [[ perconalab/percona-xtradb-cluster-operator:main-pxc5.7 =~ 5\.7 ]] + [[ -f /mnt/jenkins/workspace/cloud-pxc-operator_PR-2234/e2e-tests/users/compare/select-4-57.sql ]] + run_mysql 'SHOW TABLES;' '-h some-name-proxysql -umonitor -p'\''test-password'\''' + local 'command=SHOW TABLES;' + local 'uri=-h some-name-proxysql -umonitor -p'\''test-password'\''' ++ get_client_pod ++ kubectl_bin get pods --selector=name=pxc-client -o 'jsonpath={.items[].metadata.name}' +++ mktemp ++ local LAST_OUT=/tmp/tmp.l85joqdiz2 +++ mktemp ++ local LAST_ERR=/tmp/tmp.ud3p2yOUEn ++ local exit_status=0 +++ seq 0 2 ++ for i in '$(seq 0 2)' ++ set +e ++ kubectl get pods --selector=name=pxc-client -o 'jsonpath={.items[].metadata.name}' ++ exit_status=0 ++ set -e ++ '[' 0 '!=' 0 ']' ++ break ++ cat /tmp/tmp.l85joqdiz2 ++ cat /tmp/tmp.ud3p2yOUEn ++ rm /tmp/tmp.l85joqdiz2 /tmp/tmp.ud3p2yOUEn ++ return 0 + client_pod=pxc-client-857d976497-6qfkm + wait_pod pxc-client-857d976497-6qfkm + local pod=pxc-client-857d976497-6qfkm + local max_retry=480 + local ns= ++ echo pxc-client-857d976497-6qfkm ++ /usr/bin/sed -E 's/.*-(pxc|proxysql)-[0-9]/\1/' ++ grep -E '^(pxc|proxysql)$' + local container= + set +o xtrace pod/pxc-client-857d976497-6qfkm condition met waiting for pod/pxc-client-857d976497-6qfkm to become ReadyDefaulted container "pxc-client" out of: pxc-client, backup .Ok + set +o xtrace + '[' '!' -s /tmp/tmp.O4Ec8syMdJ/select-4.sql ']' + [[ 0 -eq 0 ]] + diff -u /mnt/jenkins/workspace/cloud-pxc-operator_PR-2234/e2e-tests/users/compare/select-4.sql /tmp/tmp.O4Ec8syMdJ/select-4.sql + desc 'test operator' + set +o xtrace ----------------------------------------------------------------------------------- test operator ----------------------------------------------------------------------------------- + patch_secret my-cluster-secrets operator dGVzdC1wYXNzd29yZA== + local secret=my-cluster-secrets + local key=operator + local value=dGVzdC1wYXNzd29yZA== + kubectl_bin patch secret my-cluster-secrets '-p={"data":{"operator": "dGVzdC1wYXNzd29yZA=="}}' ++ mktemp + local LAST_OUT=/tmp/tmp.5aLPLiEbZX ++ mktemp + local LAST_ERR=/tmp/tmp.XhbCAvWprG + local exit_status=0 ++ seq 0 2 + for i in '$(seq 0 2)' + set +e + kubectl patch secret my-cluster-secrets '-p={"data":{"operator": "dGVzdC1wYXNzd29yZA=="}}' + exit_status=0 + set -e + '[' 0 '!=' 0 ']' + break + cat /tmp/tmp.5aLPLiEbZX secret/my-cluster-secrets patched + cat /tmp/tmp.XhbCAvWprG + rm /tmp/tmp.5aLPLiEbZX /tmp/tmp.XhbCAvWprG + return 0 + sleep 15 + wait_cluster_consistency some-name 3 2 + local cluster_name=some-name + local cluster_size=3 + local proxy_size=2 + '[' -z 2 ']' + desc 'wait cluster consistency' + set +o xtrace ----------------------------------------------------------------------------------- wait cluster consistency ----------------------------------------------------------------------------------- + local i=0 + local max=300 + sleep 7 + echo -n 'waiting for pxc/some-name to be ready' waiting for pxc/some-name to be ready++ kubectl_bin get pxc some-name -o 'jsonpath={.status.state}' +++ mktemp ++ local LAST_OUT=/tmp/tmp.ifkf9ZKU9d +++ mktemp ++ local LAST_ERR=/tmp/tmp.D6iq1CIiU0 ++ local exit_status=0 +++ seq 0 2 ++ for i in '$(seq 0 2)' ++ set +e ++ kubectl get pxc some-name -o 'jsonpath={.status.state}' ++ exit_status=0 ++ set -e ++ '[' 0 '!=' 0 ']' ++ break ++ cat /tmp/tmp.ifkf9ZKU9d ++ cat /tmp/tmp.D6iq1CIiU0 ++ rm /tmp/tmp.ifkf9ZKU9d /tmp/tmp.D6iq1CIiU0 ++ return 0 + [[ initializing == \r\e\a\d\y ]] + echo -n . .+ sleep 5 + [[ 0 -ge 300 ]] + let i+=1 ++ kubectl_bin get pxc some-name -o 'jsonpath={.status.state}' +++ mktemp ++ local LAST_OUT=/tmp/tmp.Mw6tqMJmIe +++ mktemp ++ local LAST_ERR=/tmp/tmp.Hlv4RJceqQ ++ local exit_status=0 +++ seq 0 2 ++ for i in '$(seq 0 2)' ++ set +e ++ kubectl get pxc some-name -o 'jsonpath={.status.state}' ++ exit_status=0 ++ set -e ++ '[' 0 '!=' 0 ']' ++ break ++ cat /tmp/tmp.Mw6tqMJmIe ++ cat /tmp/tmp.Hlv4RJceqQ ++ rm /tmp/tmp.Mw6tqMJmIe /tmp/tmp.Hlv4RJceqQ ++ return 0 + [[ initializing == \r\e\a\d\y ]] + echo -n . .+ sleep 5 + [[ 1 -ge 300 ]] + let i+=1 ++ kubectl_bin get pxc some-name -o 'jsonpath={.status.state}' +++ mktemp ++ local LAST_OUT=/tmp/tmp.7E3UTFyFfY +++ mktemp ++ local LAST_ERR=/tmp/tmp.g9we333Xzw ++ local exit_status=0 +++ seq 0 2 ++ for i in '$(seq 0 2)' ++ set +e ++ kubectl get pxc some-name -o 'jsonpath={.status.state}' ++ exit_status=0 ++ set -e ++ '[' 0 '!=' 0 ']' ++ break ++ cat /tmp/tmp.7E3UTFyFfY ++ cat /tmp/tmp.g9we333Xzw ++ rm /tmp/tmp.7E3UTFyFfY /tmp/tmp.g9we333Xzw ++ return 0 + [[ ready == \r\e\a\d\y ]] ++ kubectl_bin get pxc some-name -o 'jsonpath={.status.pxc.ready}' +++ mktemp ++ local LAST_OUT=/tmp/tmp.fIjWCGrtbu +++ mktemp ++ local LAST_ERR=/tmp/tmp.vlEuti0VTp ++ local exit_status=0 +++ seq 0 2 ++ for i in '$(seq 0 2)' ++ set +e ++ kubectl get pxc some-name -o 'jsonpath={.status.pxc.ready}' ++ exit_status=0 ++ set -e ++ '[' 0 '!=' 0 ']' ++ break ++ cat /tmp/tmp.fIjWCGrtbu ++ cat /tmp/tmp.vlEuti0VTp ++ rm /tmp/tmp.fIjWCGrtbu /tmp/tmp.vlEuti0VTp ++ return 0 + [[ 3 == \3 ]] +++ get_proxy_engine some-name +++ local cluster_name=some-name ++++ get_proxy some-name ++++ local target_cluster=some-name +++++ kubectl_bin get pxc some-name -o 'jsonpath={.spec.haproxy.enabled}' ++++++ mktemp +++++ local LAST_OUT=/tmp/tmp.7quOmt77Rq ++++++ mktemp +++++ local LAST_ERR=/tmp/tmp.VxbSjoq340 +++++ local exit_status=0 ++++++ seq 0 2 +++++ for i in '$(seq 0 2)' +++++ set +e +++++ kubectl get pxc some-name -o 'jsonpath={.spec.haproxy.enabled}' +++++ exit_status=0 +++++ set -e +++++ '[' 0 '!=' 0 ']' +++++ break +++++ cat /tmp/tmp.7quOmt77Rq +++++ cat /tmp/tmp.VxbSjoq340 +++++ rm /tmp/tmp.7quOmt77Rq /tmp/tmp.VxbSjoq340 +++++ return 0 ++++ [[ '' == \t\r\u\e ]] +++++ kubectl_bin get pxc some-name -o 'jsonpath={.spec.proxysql.enabled}' ++++++ mktemp +++++ local LAST_OUT=/tmp/tmp.ljP1DtscbF ++++++ mktemp +++++ local LAST_ERR=/tmp/tmp.iU1ies54qG +++++ local exit_status=0 ++++++ seq 0 2 +++++ for i in '$(seq 0 2)' +++++ set +e +++++ kubectl get pxc some-name -o 'jsonpath={.spec.proxysql.enabled}' +++++ exit_status=0 +++++ set -e +++++ '[' 0 '!=' 0 ']' +++++ break +++++ cat /tmp/tmp.ljP1DtscbF +++++ cat /tmp/tmp.iU1ies54qG +++++ rm /tmp/tmp.ljP1DtscbF /tmp/tmp.iU1ies54qG +++++ return 0 ++++ [[ true == \t\r\u\e ]] ++++ echo some-name-proxysql ++++ return +++ local cluster_proxy=some-name-proxysql +++ echo proxysql ++ kubectl_bin get pxc some-name -o 'jsonpath={.status.proxysql.ready}' +++ mktemp ++ local LAST_OUT=/tmp/tmp.1IVuBADhCq +++ mktemp ++ local LAST_ERR=/tmp/tmp.AnANrruq8u ++ local exit_status=0 +++ seq 0 2 ++ for i in '$(seq 0 2)' ++ set +e ++ kubectl get pxc some-name -o 'jsonpath={.status.proxysql.ready}' ++ exit_status=0 ++ set -e ++ '[' 0 '!=' 0 ']' ++ break ++ cat /tmp/tmp.1IVuBADhCq ++ cat /tmp/tmp.AnANrruq8u ++ rm /tmp/tmp.1IVuBADhCq /tmp/tmp.AnANrruq8u ++ return 0 + [[ 2 == \2 ]] + echo + sleep 10 + compare_mysql_cmd select-4 'SHOW TABLES;' '-h some-name-proxysql -uoperator -p'\''test-password'\''' + local command_id=select-4 + local 'command=SHOW TABLES;' + local 'uri=-h some-name-proxysql -uoperator -p'\''test-password'\''' + local postfix= + local expected_result=/mnt/jenkins/workspace/cloud-pxc-operator_PR-2234/e2e-tests/users/compare/select-4.sql + [[ perconalab/percona-xtradb-cluster-operator:main-pxc5.7 =~ 8\.4 ]] + [[ perconalab/percona-xtradb-cluster-operator:main-pxc5.7 =~ 8\.0 ]] + [[ perconalab/percona-xtradb-cluster-operator:main-pxc5.7 =~ 5\.7 ]] + [[ -f /mnt/jenkins/workspace/cloud-pxc-operator_PR-2234/e2e-tests/users/compare/select-4-57.sql ]] + run_mysql 'SHOW TABLES;' '-h some-name-proxysql -uoperator -p'\''test-password'\''' + local 'command=SHOW TABLES;' + local 'uri=-h some-name-proxysql -uoperator -p'\''test-password'\''' ++ get_client_pod ++ kubectl_bin get pods --selector=name=pxc-client -o 'jsonpath={.items[].metadata.name}' +++ mktemp ++ local LAST_OUT=/tmp/tmp.m3LGyzggjZ +++ mktemp ++ local LAST_ERR=/tmp/tmp.UAZASQKjhv ++ local exit_status=0 +++ seq 0 2 ++ for i in '$(seq 0 2)' ++ set +e ++ kubectl get pods --selector=name=pxc-client -o 'jsonpath={.items[].metadata.name}' ++ exit_status=0 ++ set -e ++ '[' 0 '!=' 0 ']' ++ break ++ cat /tmp/tmp.m3LGyzggjZ ++ cat /tmp/tmp.UAZASQKjhv ++ rm /tmp/tmp.m3LGyzggjZ /tmp/tmp.UAZASQKjhv ++ return 0 + client_pod=pxc-client-857d976497-6qfkm + wait_pod pxc-client-857d976497-6qfkm + local pod=pxc-client-857d976497-6qfkm + local max_retry=480 + local ns= ++ echo pxc-client-857d976497-6qfkm ++ /usr/bin/sed -E 's/.*-(pxc|proxysql)-[0-9]/\1/' ++ grep -E '^(pxc|proxysql)$' + local container= + set +o xtrace pod/pxc-client-857d976497-6qfkm condition met waiting for pod/pxc-client-857d976497-6qfkm to become ReadyDefaulted container "pxc-client" out of: pxc-client, backup .Ok + set +o xtrace + '[' '!' -s /tmp/tmp.O4Ec8syMdJ/select-4.sql ']' + [[ 0 -eq 0 ]] + diff -u /mnt/jenkins/workspace/cloud-pxc-operator_PR-2234/e2e-tests/users/compare/select-4.sql /tmp/tmp.O4Ec8syMdJ/select-4.sql + desc 'change secret name' + set +o xtrace ----------------------------------------------------------------------------------- change secret name ----------------------------------------------------------------------------------- + kubectl_bin patch pxc some-name --type merge --patch '{"spec": {"secretsName":"my-cluster-secrets-2"}}' ++ mktemp + local LAST_OUT=/tmp/tmp.KlQ0l35Jvg ++ mktemp + local LAST_ERR=/tmp/tmp.Uidk7omJ77 + local exit_status=0 ++ seq 0 2 + for i in '$(seq 0 2)' + set +e + kubectl patch pxc some-name --type merge --patch '{"spec": {"secretsName":"my-cluster-secrets-2"}}' + exit_status=0 + set -e + '[' 0 '!=' 0 ']' + break + cat /tmp/tmp.KlQ0l35Jvg perconaxtradbcluster.pxc.percona.com/some-name patched + cat /tmp/tmp.Uidk7omJ77 + rm /tmp/tmp.KlQ0l35Jvg /tmp/tmp.Uidk7omJ77 + return 0 + sleep 30 + wait_cluster_consistency some-name 3 2 + local cluster_name=some-name + local cluster_size=3 + local proxy_size=2 + '[' -z 2 ']' + desc 'wait cluster consistency' + set +o xtrace ----------------------------------------------------------------------------------- wait cluster consistency ----------------------------------------------------------------------------------- + local i=0 + local max=300 + sleep 7 + echo -n 'waiting for pxc/some-name to be ready' waiting for pxc/some-name to be ready++ kubectl_bin get pxc some-name -o 'jsonpath={.status.state}' +++ mktemp ++ local LAST_OUT=/tmp/tmp.C1sZTRFAzn +++ mktemp ++ local LAST_ERR=/tmp/tmp.4gUaeC0Xha ++ local exit_status=0 +++ seq 0 2 ++ for i in '$(seq 0 2)' ++ set +e ++ kubectl get pxc some-name -o 'jsonpath={.status.state}' ++ exit_status=0 ++ set -e ++ '[' 0 '!=' 0 ']' ++ break ++ cat /tmp/tmp.C1sZTRFAzn ++ cat /tmp/tmp.4gUaeC0Xha ++ rm /tmp/tmp.C1sZTRFAzn /tmp/tmp.4gUaeC0Xha ++ return 0 + [[ initializing == \r\e\a\d\y ]] + echo -n . .+ sleep 5 + [[ 0 -ge 300 ]] + let i+=1 ++ kubectl_bin get pxc some-name -o 'jsonpath={.status.state}' +++ mktemp ++ local LAST_OUT=/tmp/tmp.pza8ftXp1T +++ mktemp ++ local LAST_ERR=/tmp/tmp.8B9g8fMDy7 ++ local exit_status=0 +++ seq 0 2 ++ for i in '$(seq 0 2)' ++ set +e ++ kubectl get pxc some-name -o 'jsonpath={.status.state}' ++ exit_status=0 ++ set -e ++ '[' 0 '!=' 0 ']' ++ break ++ cat /tmp/tmp.pza8ftXp1T ++ cat /tmp/tmp.8B9g8fMDy7 ++ rm /tmp/tmp.pza8ftXp1T /tmp/tmp.8B9g8fMDy7 ++ return 0 + [[ initializing == \r\e\a\d\y ]] + echo -n . .+ sleep 5 + [[ 1 -ge 300 ]] + let i+=1 ++ kubectl_bin get pxc some-name -o 'jsonpath={.status.state}' +++ mktemp ++ local LAST_OUT=/tmp/tmp.IJZCix8VIh +++ mktemp ++ local LAST_ERR=/tmp/tmp.X9z8kz12vU ++ local exit_status=0 +++ seq 0 2 ++ for i in '$(seq 0 2)' ++ set +e ++ kubectl get pxc some-name -o 'jsonpath={.status.state}' ++ exit_status=0 ++ set -e ++ '[' 0 '!=' 0 ']' ++ break ++ cat /tmp/tmp.IJZCix8VIh ++ cat /tmp/tmp.X9z8kz12vU ++ rm /tmp/tmp.IJZCix8VIh /tmp/tmp.X9z8kz12vU ++ return 0 + [[ initializing == \r\e\a\d\y ]] + echo -n . .+ sleep 5 + [[ 2 -ge 300 ]] + let i+=1 ++ kubectl_bin get pxc some-name -o 'jsonpath={.status.state}' +++ mktemp ++ local LAST_OUT=/tmp/tmp.Ky2xz9VSrA +++ mktemp ++ local LAST_ERR=/tmp/tmp.AhB1g2PQlZ ++ local exit_status=0 +++ seq 0 2 ++ for i in '$(seq 0 2)' ++ set +e ++ kubectl get pxc some-name -o 'jsonpath={.status.state}' ++ exit_status=0 ++ set -e ++ '[' 0 '!=' 0 ']' ++ break ++ cat /tmp/tmp.Ky2xz9VSrA ++ cat /tmp/tmp.AhB1g2PQlZ ++ rm /tmp/tmp.Ky2xz9VSrA /tmp/tmp.AhB1g2PQlZ ++ return 0 + [[ initializing == \r\e\a\d\y ]] + echo -n . .+ sleep 5 + [[ 3 -ge 300 ]] + let i+=1 ++ kubectl_bin get pxc some-name -o 'jsonpath={.status.state}' +++ mktemp ++ local LAST_OUT=/tmp/tmp.pP0D9U1Iif +++ mktemp ++ local LAST_ERR=/tmp/tmp.HO0QGSRH8A ++ local exit_status=0 +++ seq 0 2 ++ for i in '$(seq 0 2)' ++ set +e ++ kubectl get pxc some-name -o 'jsonpath={.status.state}' ++ exit_status=0 ++ set -e ++ '[' 0 '!=' 0 ']' ++ break ++ cat /tmp/tmp.pP0D9U1Iif ++ cat /tmp/tmp.HO0QGSRH8A ++ rm /tmp/tmp.pP0D9U1Iif /tmp/tmp.HO0QGSRH8A ++ return 0 + [[ initializing == \r\e\a\d\y ]] + echo -n . .+ sleep 5 + [[ 4 -ge 300 ]] + let i+=1 ++ kubectl_bin get pxc some-name -o 'jsonpath={.status.state}' +++ mktemp ++ local LAST_OUT=/tmp/tmp.VJad3S4dP5 +++ mktemp ++ local LAST_ERR=/tmp/tmp.dXsHq0OGyE ++ local exit_status=0 +++ seq 0 2 ++ for i in '$(seq 0 2)' ++ set +e ++ kubectl get pxc some-name -o 'jsonpath={.status.state}' ++ exit_status=0 ++ set -e ++ '[' 0 '!=' 0 ']' ++ break ++ cat /tmp/tmp.VJad3S4dP5 ++ cat /tmp/tmp.dXsHq0OGyE ++ rm /tmp/tmp.VJad3S4dP5 /tmp/tmp.dXsHq0OGyE ++ return 0 + [[ initializing == \r\e\a\d\y ]] + echo -n . .+ sleep 5 + [[ 5 -ge 300 ]] + let i+=1 ++ kubectl_bin get pxc some-name -o 'jsonpath={.status.state}' +++ mktemp ++ local LAST_OUT=/tmp/tmp.Ahs3hYsF4H +++ mktemp ++ local LAST_ERR=/tmp/tmp.U9HGMFBaZ0 ++ local exit_status=0 +++ seq 0 2 ++ for i in '$(seq 0 2)' ++ set +e ++ kubectl get pxc some-name -o 'jsonpath={.status.state}' ++ exit_status=0 ++ set -e ++ '[' 0 '!=' 0 ']' ++ break ++ cat /tmp/tmp.Ahs3hYsF4H ++ cat /tmp/tmp.U9HGMFBaZ0 ++ rm /tmp/tmp.Ahs3hYsF4H /tmp/tmp.U9HGMFBaZ0 ++ return 0 + [[ initializing == \r\e\a\d\y ]] + echo -n . .+ sleep 5 + [[ 6 -ge 300 ]] + let i+=1 ++ kubectl_bin get pxc some-name -o 'jsonpath={.status.state}' +++ mktemp ++ local LAST_OUT=/tmp/tmp.HQWFh6yThu +++ mktemp ++ local LAST_ERR=/tmp/tmp.5yD6aVgl8J ++ local exit_status=0 +++ seq 0 2 ++ for i in '$(seq 0 2)' ++ set +e ++ kubectl get pxc some-name -o 'jsonpath={.status.state}' ++ exit_status=0 ++ set -e ++ '[' 0 '!=' 0 ']' ++ break ++ cat /tmp/tmp.HQWFh6yThu ++ cat /tmp/tmp.5yD6aVgl8J ++ rm /tmp/tmp.HQWFh6yThu /tmp/tmp.5yD6aVgl8J ++ return 0 + [[ initializing == \r\e\a\d\y ]] + echo -n . .+ sleep 5 + [[ 7 -ge 300 ]] + let i+=1 ++ kubectl_bin get pxc some-name -o 'jsonpath={.status.state}' +++ mktemp ++ local LAST_OUT=/tmp/tmp.3w7SvqwOrr +++ mktemp ++ local LAST_ERR=/tmp/tmp.6hJowI9puy ++ local exit_status=0 +++ seq 0 2 ++ for i in '$(seq 0 2)' ++ set +e ++ kubectl get pxc some-name -o 'jsonpath={.status.state}' ++ exit_status=0 ++ set -e ++ '[' 0 '!=' 0 ']' ++ break ++ cat /tmp/tmp.3w7SvqwOrr ++ cat /tmp/tmp.6hJowI9puy ++ rm /tmp/tmp.3w7SvqwOrr /tmp/tmp.6hJowI9puy ++ return 0 + [[ initializing == \r\e\a\d\y ]] + echo -n . .+ sleep 5 + [[ 8 -ge 300 ]] + let i+=1 ++ kubectl_bin get pxc some-name -o 'jsonpath={.status.state}' +++ mktemp ++ local LAST_OUT=/tmp/tmp.dJTPDSOwf6 +++ mktemp ++ local LAST_ERR=/tmp/tmp.GVMFQRBkLb ++ local exit_status=0 +++ seq 0 2 ++ for i in '$(seq 0 2)' ++ set +e ++ kubectl get pxc some-name -o 'jsonpath={.status.state}' ++ exit_status=0 ++ set -e ++ '[' 0 '!=' 0 ']' ++ break ++ cat /tmp/tmp.dJTPDSOwf6 ++ cat /tmp/tmp.GVMFQRBkLb ++ rm /tmp/tmp.dJTPDSOwf6 /tmp/tmp.GVMFQRBkLb ++ return 0 + [[ initializing == \r\e\a\d\y ]] + echo -n . .+ sleep 5 + [[ 9 -ge 300 ]] + let i+=1 ++ kubectl_bin get pxc some-name -o 'jsonpath={.status.state}' +++ mktemp ++ local LAST_OUT=/tmp/tmp.CgRATVwJ2s +++ mktemp ++ local LAST_ERR=/tmp/tmp.GoGXwkbohz ++ local exit_status=0 +++ seq 0 2 ++ for i in '$(seq 0 2)' ++ set +e ++ kubectl get pxc some-name -o 'jsonpath={.status.state}' ++ exit_status=0 ++ set -e ++ '[' 0 '!=' 0 ']' ++ break ++ cat /tmp/tmp.CgRATVwJ2s ++ cat /tmp/tmp.GoGXwkbohz ++ rm /tmp/tmp.CgRATVwJ2s /tmp/tmp.GoGXwkbohz ++ return 0 + [[ initializing == \r\e\a\d\y ]] + echo -n . .+ sleep 5 + [[ 10 -ge 300 ]] + let i+=1 ++ kubectl_bin get pxc some-name -o 'jsonpath={.status.state}' +++ mktemp ++ local LAST_OUT=/tmp/tmp.2F3lDRTfHf +++ mktemp ++ local LAST_ERR=/tmp/tmp.yPJLWUFB7E ++ local exit_status=0 +++ seq 0 2 ++ for i in '$(seq 0 2)' ++ set +e ++ kubectl get pxc some-name -o 'jsonpath={.status.state}' ++ exit_status=0 ++ set -e ++ '[' 0 '!=' 0 ']' ++ break ++ cat /tmp/tmp.2F3lDRTfHf ++ cat /tmp/tmp.yPJLWUFB7E ++ rm /tmp/tmp.2F3lDRTfHf /tmp/tmp.yPJLWUFB7E ++ return 0 + [[ initializing == \r\e\a\d\y ]] + echo -n . .+ sleep 5 + [[ 11 -ge 300 ]] + let i+=1 ++ kubectl_bin get pxc some-name -o 'jsonpath={.status.state}' +++ mktemp ++ local LAST_OUT=/tmp/tmp.PiHSvlIR9k +++ mktemp ++ local LAST_ERR=/tmp/tmp.RrBr53RMvR ++ local exit_status=0 +++ seq 0 2 ++ for i in '$(seq 0 2)' ++ set +e ++ kubectl get pxc some-name -o 'jsonpath={.status.state}' ++ exit_status=0 ++ set -e ++ '[' 0 '!=' 0 ']' ++ break ++ cat /tmp/tmp.PiHSvlIR9k ++ cat /tmp/tmp.RrBr53RMvR ++ rm /tmp/tmp.PiHSvlIR9k /tmp/tmp.RrBr53RMvR ++ return 0 + [[ initializing == \r\e\a\d\y ]] + echo -n . .+ sleep 5 + [[ 12 -ge 300 ]] + let i+=1 ++ kubectl_bin get pxc some-name -o 'jsonpath={.status.state}' +++ mktemp ++ local LAST_OUT=/tmp/tmp.QH1KQ1huIb +++ mktemp ++ local LAST_ERR=/tmp/tmp.OUBZTUpxoh ++ local exit_status=0 +++ seq 0 2 ++ for i in '$(seq 0 2)' ++ set +e ++ kubectl get pxc some-name -o 'jsonpath={.status.state}' ++ exit_status=0 ++ set -e ++ '[' 0 '!=' 0 ']' ++ break ++ cat /tmp/tmp.QH1KQ1huIb ++ cat /tmp/tmp.OUBZTUpxoh ++ rm /tmp/tmp.QH1KQ1huIb /tmp/tmp.OUBZTUpxoh ++ return 0 + [[ initializing == \r\e\a\d\y ]] + echo -n . .+ sleep 5 + [[ 13 -ge 300 ]] + let i+=1 ++ kubectl_bin get pxc some-name -o 'jsonpath={.status.state}' +++ mktemp ++ local LAST_OUT=/tmp/tmp.Vh9CUKKiWs +++ mktemp ++ local LAST_ERR=/tmp/tmp.RsWpuJi2bI ++ local exit_status=0 +++ seq 0 2 ++ for i in '$(seq 0 2)' ++ set +e ++ kubectl get pxc some-name -o 'jsonpath={.status.state}' ++ exit_status=0 ++ set -e ++ '[' 0 '!=' 0 ']' ++ break ++ cat /tmp/tmp.Vh9CUKKiWs ++ cat /tmp/tmp.RsWpuJi2bI ++ rm /tmp/tmp.Vh9CUKKiWs /tmp/tmp.RsWpuJi2bI ++ return 0 + [[ initializing == \r\e\a\d\y ]] + echo -n . .+ sleep 5 + [[ 14 -ge 300 ]] + let i+=1 ++ kubectl_bin get pxc some-name -o 'jsonpath={.status.state}' +++ mktemp ++ local LAST_OUT=/tmp/tmp.9OLJJPZtCN +++ mktemp ++ local LAST_ERR=/tmp/tmp.qPq850AAEM ++ local exit_status=0 +++ seq 0 2 ++ for i in '$(seq 0 2)' ++ set +e ++ kubectl get pxc some-name -o 'jsonpath={.status.state}' ++ exit_status=0 ++ set -e ++ '[' 0 '!=' 0 ']' ++ break ++ cat /tmp/tmp.9OLJJPZtCN ++ cat /tmp/tmp.qPq850AAEM ++ rm /tmp/tmp.9OLJJPZtCN /tmp/tmp.qPq850AAEM ++ return 0 + [[ initializing == \r\e\a\d\y ]] + echo -n . .+ sleep 5 + [[ 15 -ge 300 ]] + let i+=1 ++ kubectl_bin get pxc some-name -o 'jsonpath={.status.state}' +++ mktemp ++ local LAST_OUT=/tmp/tmp.3STYK7FAiQ +++ mktemp ++ local LAST_ERR=/tmp/tmp.3wzd4cMI49 ++ local exit_status=0 +++ seq 0 2 ++ for i in '$(seq 0 2)' ++ set +e ++ kubectl get pxc some-name -o 'jsonpath={.status.state}' ++ exit_status=0 ++ set -e ++ '[' 0 '!=' 0 ']' ++ break ++ cat /tmp/tmp.3STYK7FAiQ ++ cat /tmp/tmp.3wzd4cMI49 ++ rm /tmp/tmp.3STYK7FAiQ /tmp/tmp.3wzd4cMI49 ++ return 0 + [[ initializing == \r\e\a\d\y ]] + echo -n . .+ sleep 5 + [[ 16 -ge 300 ]] + let i+=1 ++ kubectl_bin get pxc some-name -o 'jsonpath={.status.state}' +++ mktemp ++ local LAST_OUT=/tmp/tmp.94EBHELAdn +++ mktemp ++ local LAST_ERR=/tmp/tmp.XP5MAYzwlO ++ local exit_status=0 +++ seq 0 2 ++ for i in '$(seq 0 2)' ++ set +e ++ kubectl get pxc some-name -o 'jsonpath={.status.state}' ++ exit_status=0 ++ set -e ++ '[' 0 '!=' 0 ']' ++ break ++ cat /tmp/tmp.94EBHELAdn ++ cat /tmp/tmp.XP5MAYzwlO ++ rm /tmp/tmp.94EBHELAdn /tmp/tmp.XP5MAYzwlO ++ return 0 + [[ initializing == \r\e\a\d\y ]] + echo -n . .+ sleep 5 + [[ 17 -ge 300 ]] + let i+=1 ++ kubectl_bin get pxc some-name -o 'jsonpath={.status.state}' +++ mktemp ++ local LAST_OUT=/tmp/tmp.tYl46Dgctn +++ mktemp ++ local LAST_ERR=/tmp/tmp.EWqRBHgu2L ++ local exit_status=0 +++ seq 0 2 ++ for i in '$(seq 0 2)' ++ set +e ++ kubectl get pxc some-name -o 'jsonpath={.status.state}' ++ exit_status=0 ++ set -e ++ '[' 0 '!=' 0 ']' ++ break ++ cat /tmp/tmp.tYl46Dgctn ++ cat /tmp/tmp.EWqRBHgu2L ++ rm /tmp/tmp.tYl46Dgctn /tmp/tmp.EWqRBHgu2L ++ return 0 + [[ initializing == \r\e\a\d\y ]] + echo -n . .+ sleep 5 + [[ 18 -ge 300 ]] + let i+=1 ++ kubectl_bin get pxc some-name -o 'jsonpath={.status.state}' +++ mktemp ++ local LAST_OUT=/tmp/tmp.vy7GLRdEAU +++ mktemp ++ local LAST_ERR=/tmp/tmp.27sWXISGwP ++ local exit_status=0 +++ seq 0 2 ++ for i in '$(seq 0 2)' ++ set +e ++ kubectl get pxc some-name -o 'jsonpath={.status.state}' ++ exit_status=0 ++ set -e ++ '[' 0 '!=' 0 ']' ++ break ++ cat /tmp/tmp.vy7GLRdEAU ++ cat /tmp/tmp.27sWXISGwP ++ rm /tmp/tmp.vy7GLRdEAU /tmp/tmp.27sWXISGwP ++ return 0 + [[ initializing == \r\e\a\d\y ]] + echo -n . .+ sleep 5 + [[ 19 -ge 300 ]] + let i+=1 ++ kubectl_bin get pxc some-name -o 'jsonpath={.status.state}' +++ mktemp ++ local LAST_OUT=/tmp/tmp.rZ3YhBDWbb +++ mktemp ++ local LAST_ERR=/tmp/tmp.252nX4YWxB ++ local exit_status=0 +++ seq 0 2 ++ for i in '$(seq 0 2)' ++ set +e ++ kubectl get pxc some-name -o 'jsonpath={.status.state}' ++ exit_status=0 ++ set -e ++ '[' 0 '!=' 0 ']' ++ break ++ cat /tmp/tmp.rZ3YhBDWbb ++ cat /tmp/tmp.252nX4YWxB ++ rm /tmp/tmp.rZ3YhBDWbb /tmp/tmp.252nX4YWxB ++ return 0 + [[ initializing == \r\e\a\d\y ]] + echo -n . .+ sleep 5 + [[ 20 -ge 300 ]] + let i+=1 ++ kubectl_bin get pxc some-name -o 'jsonpath={.status.state}' +++ mktemp ++ local LAST_OUT=/tmp/tmp.oeehTaGY3w +++ mktemp ++ local LAST_ERR=/tmp/tmp.uZYfuvQfSW ++ local exit_status=0 +++ seq 0 2 ++ for i in '$(seq 0 2)' ++ set +e ++ kubectl get pxc some-name -o 'jsonpath={.status.state}' ++ exit_status=0 ++ set -e ++ '[' 0 '!=' 0 ']' ++ break ++ cat /tmp/tmp.oeehTaGY3w ++ cat /tmp/tmp.uZYfuvQfSW ++ rm /tmp/tmp.oeehTaGY3w /tmp/tmp.uZYfuvQfSW ++ return 0 + [[ initializing == \r\e\a\d\y ]] + echo -n . .+ sleep 5 + [[ 21 -ge 300 ]] + let i+=1 ++ kubectl_bin get pxc some-name -o 'jsonpath={.status.state}' +++ mktemp ++ local LAST_OUT=/tmp/tmp.PfNCBBlvrh +++ mktemp ++ local LAST_ERR=/tmp/tmp.ZqlDkl4R62 ++ local exit_status=0 +++ seq 0 2 ++ for i in '$(seq 0 2)' ++ set +e ++ kubectl get pxc some-name -o 'jsonpath={.status.state}' ++ exit_status=0 ++ set -e ++ '[' 0 '!=' 0 ']' ++ break ++ cat /tmp/tmp.PfNCBBlvrh ++ cat /tmp/tmp.ZqlDkl4R62 ++ rm /tmp/tmp.PfNCBBlvrh /tmp/tmp.ZqlDkl4R62 ++ return 0 + [[ initializing == \r\e\a\d\y ]] + echo -n . .+ sleep 5 + [[ 22 -ge 300 ]] + let i+=1 ++ kubectl_bin get pxc some-name -o 'jsonpath={.status.state}' +++ mktemp ++ local LAST_OUT=/tmp/tmp.RP5zosYO9h +++ mktemp ++ local LAST_ERR=/tmp/tmp.lrud2CUcRL ++ local exit_status=0 +++ seq 0 2 ++ for i in '$(seq 0 2)' ++ set +e ++ kubectl get pxc some-name -o 'jsonpath={.status.state}' ++ exit_status=0 ++ set -e ++ '[' 0 '!=' 0 ']' ++ break ++ cat /tmp/tmp.RP5zosYO9h ++ cat /tmp/tmp.lrud2CUcRL ++ rm /tmp/tmp.RP5zosYO9h /tmp/tmp.lrud2CUcRL ++ return 0 + [[ initializing == \r\e\a\d\y ]] + echo -n . .+ sleep 5 + [[ 23 -ge 300 ]] + let i+=1 ++ kubectl_bin get pxc some-name -o 'jsonpath={.status.state}' +++ mktemp ++ local LAST_OUT=/tmp/tmp.VxOPAfLDn9 +++ mktemp ++ local LAST_ERR=/tmp/tmp.DmUhv3ieyN ++ local exit_status=0 +++ seq 0 2 ++ for i in '$(seq 0 2)' ++ set +e ++ kubectl get pxc some-name -o 'jsonpath={.status.state}' ++ exit_status=0 ++ set -e ++ '[' 0 '!=' 0 ']' ++ break ++ cat /tmp/tmp.VxOPAfLDn9 ++ cat /tmp/tmp.DmUhv3ieyN ++ rm /tmp/tmp.VxOPAfLDn9 /tmp/tmp.DmUhv3ieyN ++ return 0 + [[ ready == \r\e\a\d\y ]] ++ kubectl_bin get pxc some-name -o 'jsonpath={.status.pxc.ready}' +++ mktemp ++ local LAST_OUT=/tmp/tmp.jMVCcrYpBn +++ mktemp ++ local LAST_ERR=/tmp/tmp.pkLYktAS4f ++ local exit_status=0 +++ seq 0 2 ++ for i in '$(seq 0 2)' ++ set +e ++ kubectl get pxc some-name -o 'jsonpath={.status.pxc.ready}' ++ exit_status=0 ++ set -e ++ '[' 0 '!=' 0 ']' ++ break ++ cat /tmp/tmp.jMVCcrYpBn ++ cat /tmp/tmp.pkLYktAS4f ++ rm /tmp/tmp.jMVCcrYpBn /tmp/tmp.pkLYktAS4f ++ return 0 + [[ 3 == \3 ]] +++ get_proxy_engine some-name +++ local cluster_name=some-name ++++ get_proxy some-name ++++ local target_cluster=some-name +++++ kubectl_bin get pxc some-name -o 'jsonpath={.spec.haproxy.enabled}' ++++++ mktemp +++++ local LAST_OUT=/tmp/tmp.BT7zhdc8iU ++++++ mktemp +++++ local LAST_ERR=/tmp/tmp.CMibJE2TrR +++++ local exit_status=0 ++++++ seq 0 2 +++++ for i in '$(seq 0 2)' +++++ set +e +++++ kubectl get pxc some-name -o 'jsonpath={.spec.haproxy.enabled}' +++++ exit_status=0 +++++ set -e +++++ '[' 0 '!=' 0 ']' +++++ break +++++ cat /tmp/tmp.BT7zhdc8iU +++++ cat /tmp/tmp.CMibJE2TrR +++++ rm /tmp/tmp.BT7zhdc8iU /tmp/tmp.CMibJE2TrR +++++ return 0 ++++ [[ '' == \t\r\u\e ]] +++++ kubectl_bin get pxc some-name -o 'jsonpath={.spec.proxysql.enabled}' ++++++ mktemp +++++ local LAST_OUT=/tmp/tmp.lRd3GJbx0C ++++++ mktemp +++++ local LAST_ERR=/tmp/tmp.gRmr3Gqhcv +++++ local exit_status=0 ++++++ seq 0 2 +++++ for i in '$(seq 0 2)' +++++ set +e +++++ kubectl get pxc some-name -o 'jsonpath={.spec.proxysql.enabled}' +++++ exit_status=0 +++++ set -e +++++ '[' 0 '!=' 0 ']' +++++ break +++++ cat /tmp/tmp.lRd3GJbx0C +++++ cat /tmp/tmp.gRmr3Gqhcv +++++ rm /tmp/tmp.lRd3GJbx0C /tmp/tmp.gRmr3Gqhcv +++++ return 0 ++++ [[ true == \t\r\u\e ]] ++++ echo some-name-proxysql ++++ return +++ local cluster_proxy=some-name-proxysql +++ echo proxysql ++ kubectl_bin get pxc some-name -o 'jsonpath={.status.proxysql.ready}' +++ mktemp ++ local LAST_OUT=/tmp/tmp.ZgpVB7gRYz +++ mktemp ++ local LAST_ERR=/tmp/tmp.6VWl3ZwJ8s ++ local exit_status=0 +++ seq 0 2 ++ for i in '$(seq 0 2)' ++ set +e ++ kubectl get pxc some-name -o 'jsonpath={.status.proxysql.ready}' ++ exit_status=0 ++ set -e ++ '[' 0 '!=' 0 ']' ++ break ++ cat /tmp/tmp.ZgpVB7gRYz ++ cat /tmp/tmp.6VWl3ZwJ8s ++ rm /tmp/tmp.ZgpVB7gRYz /tmp/tmp.6VWl3ZwJ8s ++ return 0 + [[ 2 == \2 ]] + echo + desc 'test new operator' + set +o xtrace ----------------------------------------------------------------------------------- test new operator ----------------------------------------------------------------------------------- + newpass=test-password2 ++ echo -n test-password2 ++ base64 + newpassencrypted=dGVzdC1wYXNzd29yZDI= + patch_secret my-cluster-secrets-2 operator dGVzdC1wYXNzd29yZDI= + local secret=my-cluster-secrets-2 + local key=operator + local value=dGVzdC1wYXNzd29yZDI= + kubectl_bin patch secret my-cluster-secrets-2 '-p={"data":{"operator": "dGVzdC1wYXNzd29yZDI="}}' ++ mktemp + local LAST_OUT=/tmp/tmp.JPOoxoiXG5 ++ mktemp + local LAST_ERR=/tmp/tmp.0a1rTk2Bc4 + local exit_status=0 ++ seq 0 2 + for i in '$(seq 0 2)' + set +e + kubectl patch secret my-cluster-secrets-2 '-p={"data":{"operator": "dGVzdC1wYXNzd29yZDI="}}' + exit_status=0 + set -e + '[' 0 '!=' 0 ']' + break + cat /tmp/tmp.JPOoxoiXG5 secret/my-cluster-secrets-2 patched + cat /tmp/tmp.0a1rTk2Bc4 + rm /tmp/tmp.JPOoxoiXG5 /tmp/tmp.0a1rTk2Bc4 + return 0 + sleep 15 + wait_cluster_consistency some-name 3 2 + local cluster_name=some-name + local cluster_size=3 + local proxy_size=2 + '[' -z 2 ']' + desc 'wait cluster consistency' + set +o xtrace ----------------------------------------------------------------------------------- wait cluster consistency ----------------------------------------------------------------------------------- + local i=0 + local max=300 + sleep 7 + echo -n 'waiting for pxc/some-name to be ready' waiting for pxc/some-name to be ready++ kubectl_bin get pxc some-name -o 'jsonpath={.status.state}' +++ mktemp ++ local LAST_OUT=/tmp/tmp.KkOx1PckVq +++ mktemp ++ local LAST_ERR=/tmp/tmp.d3btzhVwbp ++ local exit_status=0 +++ seq 0 2 ++ for i in '$(seq 0 2)' ++ set +e ++ kubectl get pxc some-name -o 'jsonpath={.status.state}' ++ exit_status=0 ++ set -e ++ '[' 0 '!=' 0 ']' ++ break ++ cat /tmp/tmp.KkOx1PckVq ++ cat /tmp/tmp.d3btzhVwbp ++ rm /tmp/tmp.KkOx1PckVq /tmp/tmp.d3btzhVwbp ++ return 0 + [[ initializing == \r\e\a\d\y ]] + echo -n . .+ sleep 5 + [[ 0 -ge 300 ]] + let i+=1 ++ kubectl_bin get pxc some-name -o 'jsonpath={.status.state}' +++ mktemp ++ local LAST_OUT=/tmp/tmp.qgjK1dkmI1 +++ mktemp ++ local LAST_ERR=/tmp/tmp.yWim9j0yGU ++ local exit_status=0 +++ seq 0 2 ++ for i in '$(seq 0 2)' ++ set +e ++ kubectl get pxc some-name -o 'jsonpath={.status.state}' ++ exit_status=0 ++ set -e ++ '[' 0 '!=' 0 ']' ++ break ++ cat /tmp/tmp.qgjK1dkmI1 ++ cat /tmp/tmp.yWim9j0yGU ++ rm /tmp/tmp.qgjK1dkmI1 /tmp/tmp.yWim9j0yGU ++ return 0 + [[ initializing == \r\e\a\d\y ]] + echo -n . .+ sleep 5 + [[ 1 -ge 300 ]] + let i+=1 ++ kubectl_bin get pxc some-name -o 'jsonpath={.status.state}' +++ mktemp ++ local LAST_OUT=/tmp/tmp.UEjM5fGUR5 +++ mktemp ++ local LAST_ERR=/tmp/tmp.by9YYD801x ++ local exit_status=0 +++ seq 0 2 ++ for i in '$(seq 0 2)' ++ set +e ++ kubectl get pxc some-name -o 'jsonpath={.status.state}' ++ exit_status=0 ++ set -e ++ '[' 0 '!=' 0 ']' ++ break ++ cat /tmp/tmp.UEjM5fGUR5 ++ cat /tmp/tmp.by9YYD801x ++ rm /tmp/tmp.UEjM5fGUR5 /tmp/tmp.by9YYD801x ++ return 0 + [[ initializing == \r\e\a\d\y ]] + echo -n . .+ sleep 5 + [[ 2 -ge 300 ]] + let i+=1 ++ kubectl_bin get pxc some-name -o 'jsonpath={.status.state}' +++ mktemp ++ local LAST_OUT=/tmp/tmp.3zniZXf6iw +++ mktemp ++ local LAST_ERR=/tmp/tmp.OpzFWlbf6y ++ local exit_status=0 +++ seq 0 2 ++ for i in '$(seq 0 2)' ++ set +e ++ kubectl get pxc some-name -o 'jsonpath={.status.state}' ++ exit_status=0 ++ set -e ++ '[' 0 '!=' 0 ']' ++ break ++ cat /tmp/tmp.3zniZXf6iw ++ cat /tmp/tmp.OpzFWlbf6y ++ rm /tmp/tmp.3zniZXf6iw /tmp/tmp.OpzFWlbf6y ++ return 0 + [[ ready == \r\e\a\d\y ]] ++ kubectl_bin get pxc some-name -o 'jsonpath={.status.pxc.ready}' +++ mktemp ++ local LAST_OUT=/tmp/tmp.3Dwd6w7RtC +++ mktemp ++ local LAST_ERR=/tmp/tmp.oE8l0U8Y6R ++ local exit_status=0 +++ seq 0 2 ++ for i in '$(seq 0 2)' ++ set +e ++ kubectl get pxc some-name -o 'jsonpath={.status.pxc.ready}' ++ exit_status=0 ++ set -e ++ '[' 0 '!=' 0 ']' ++ break ++ cat /tmp/tmp.3Dwd6w7RtC ++ cat /tmp/tmp.oE8l0U8Y6R ++ rm /tmp/tmp.3Dwd6w7RtC /tmp/tmp.oE8l0U8Y6R ++ return 0 + [[ 3 == \3 ]] +++ get_proxy_engine some-name +++ local cluster_name=some-name ++++ get_proxy some-name ++++ local target_cluster=some-name +++++ kubectl_bin get pxc some-name -o 'jsonpath={.spec.haproxy.enabled}' ++++++ mktemp +++++ local LAST_OUT=/tmp/tmp.SgQP2HTBbp ++++++ mktemp +++++ local LAST_ERR=/tmp/tmp.maS5PzMbrh +++++ local exit_status=0 ++++++ seq 0 2 +++++ for i in '$(seq 0 2)' +++++ set +e +++++ kubectl get pxc some-name -o 'jsonpath={.spec.haproxy.enabled}' +++++ exit_status=0 +++++ set -e +++++ '[' 0 '!=' 0 ']' +++++ break +++++ cat /tmp/tmp.SgQP2HTBbp +++++ cat /tmp/tmp.maS5PzMbrh +++++ rm /tmp/tmp.SgQP2HTBbp /tmp/tmp.maS5PzMbrh +++++ return 0 ++++ [[ '' == \t\r\u\e ]] +++++ kubectl_bin get pxc some-name -o 'jsonpath={.spec.proxysql.enabled}' ++++++ mktemp +++++ local LAST_OUT=/tmp/tmp.eyXJn720Sw ++++++ mktemp +++++ local LAST_ERR=/tmp/tmp.23hhFPFu7l +++++ local exit_status=0 ++++++ seq 0 2 +++++ for i in '$(seq 0 2)' +++++ set +e +++++ kubectl get pxc some-name -o 'jsonpath={.spec.proxysql.enabled}' +++++ exit_status=0 +++++ set -e +++++ '[' 0 '!=' 0 ']' +++++ break +++++ cat /tmp/tmp.eyXJn720Sw +++++ cat /tmp/tmp.23hhFPFu7l +++++ rm /tmp/tmp.eyXJn720Sw /tmp/tmp.23hhFPFu7l +++++ return 0 ++++ [[ true == \t\r\u\e ]] ++++ echo some-name-proxysql ++++ return +++ local cluster_proxy=some-name-proxysql +++ echo proxysql ++ kubectl_bin get pxc some-name -o 'jsonpath={.status.proxysql.ready}' +++ mktemp ++ local LAST_OUT=/tmp/tmp.O3eUCukpsJ +++ mktemp ++ local LAST_ERR=/tmp/tmp.WJj7OGrBOO ++ local exit_status=0 +++ seq 0 2 ++ for i in '$(seq 0 2)' ++ set +e ++ kubectl get pxc some-name -o 'jsonpath={.status.proxysql.ready}' ++ exit_status=0 ++ set -e ++ '[' 0 '!=' 0 ']' ++ break ++ cat /tmp/tmp.O3eUCukpsJ ++ cat /tmp/tmp.WJj7OGrBOO ++ rm /tmp/tmp.O3eUCukpsJ /tmp/tmp.WJj7OGrBOO ++ return 0 + [[ 2 == \2 ]] + echo + sleep 20 + compare_mysql_cmd select-4 'SHOW TABLES;' '-h some-name-proxysql -uoperator -p'\''test-password2'\''' + local command_id=select-4 + local 'command=SHOW TABLES;' + local 'uri=-h some-name-proxysql -uoperator -p'\''test-password2'\''' + local postfix= + local expected_result=/mnt/jenkins/workspace/cloud-pxc-operator_PR-2234/e2e-tests/users/compare/select-4.sql + [[ perconalab/percona-xtradb-cluster-operator:main-pxc5.7 =~ 8\.4 ]] + [[ perconalab/percona-xtradb-cluster-operator:main-pxc5.7 =~ 8\.0 ]] + [[ perconalab/percona-xtradb-cluster-operator:main-pxc5.7 =~ 5\.7 ]] + [[ -f /mnt/jenkins/workspace/cloud-pxc-operator_PR-2234/e2e-tests/users/compare/select-4-57.sql ]] + run_mysql 'SHOW TABLES;' '-h some-name-proxysql -uoperator -p'\''test-password2'\''' + local 'command=SHOW TABLES;' + local 'uri=-h some-name-proxysql -uoperator -p'\''test-password2'\''' ++ get_client_pod ++ kubectl_bin get pods --selector=name=pxc-client -o 'jsonpath={.items[].metadata.name}' +++ mktemp ++ local LAST_OUT=/tmp/tmp.r2EidDWFlk +++ mktemp ++ local LAST_ERR=/tmp/tmp.9AkDOaxsBy ++ local exit_status=0 +++ seq 0 2 ++ for i in '$(seq 0 2)' ++ set +e ++ kubectl get pods --selector=name=pxc-client -o 'jsonpath={.items[].metadata.name}' ++ exit_status=0 ++ set -e ++ '[' 0 '!=' 0 ']' ++ break ++ cat /tmp/tmp.r2EidDWFlk ++ cat /tmp/tmp.9AkDOaxsBy ++ rm /tmp/tmp.r2EidDWFlk /tmp/tmp.9AkDOaxsBy ++ return 0 + client_pod=pxc-client-857d976497-6qfkm + wait_pod pxc-client-857d976497-6qfkm + local pod=pxc-client-857d976497-6qfkm + local max_retry=480 + local ns= ++ echo pxc-client-857d976497-6qfkm ++ /usr/bin/sed -E 's/.*-(pxc|proxysql)-[0-9]/\1/' ++ grep -E '^(pxc|proxysql)$' + local container= + set +o xtrace pod/pxc-client-857d976497-6qfkm condition met waiting for pod/pxc-client-857d976497-6qfkm to become ReadyDefaulted container "pxc-client" out of: pxc-client, backup .Ok + set +o xtrace + '[' '!' -s /tmp/tmp.O4Ec8syMdJ/select-4.sql ']' + [[ 0 -eq 0 ]] + diff -u /mnt/jenkins/workspace/cloud-pxc-operator_PR-2234/e2e-tests/users/compare/select-4.sql /tmp/tmp.O4Ec8syMdJ/select-4.sql ++ getSecretData my-cluster-secrets-2 root ++ local secretName=my-cluster-secrets-2 ++ local dataKey=root ++ kubectl_bin get secrets/my-cluster-secrets-2 '--template={{.data.root}}' ++ base64 --decode +++ mktemp ++ local LAST_OUT=/tmp/tmp.FuMxFrKUfw +++ mktemp ++ local LAST_ERR=/tmp/tmp.Y6NpVAzRQd ++ local exit_status=0 +++ seq 0 2 ++ for i in '$(seq 0 2)' ++ set +e ++ kubectl get secrets/my-cluster-secrets-2 '--template={{.data.root}}' ++ exit_status=0 ++ set -e ++ '[' 0 '!=' 0 ']' ++ break ++ cat /tmp/tmp.FuMxFrKUfw ++ cat /tmp/tmp.Y6NpVAzRQd ++ rm /tmp/tmp.FuMxFrKUfw /tmp/tmp.Y6NpVAzRQd ++ return 0 + newpass=']pb)a8p8MZ3qy&D[o' + desc 'test new users sync' + set +o xtrace ----------------------------------------------------------------------------------- test new users sync ----------------------------------------------------------------------------------- + run_mysql 'CREATE USER '\''testsync'\''@'\''%'\'' IDENTIFIED BY '\'']pb)a8p8MZ3qy&D[o'\'';' '-h some-name-pxc -uroot -p'\'']pb)a8p8MZ3qy&D[o'\''' + local 'command=CREATE USER '\''testsync'\''@'\''%'\'' IDENTIFIED BY '\'']pb)a8p8MZ3qy&D[o'\'';' + local 'uri=-h some-name-pxc -uroot -p'\'']pb)a8p8MZ3qy&D[o'\''' ++ get_client_pod ++ kubectl_bin get pods --selector=name=pxc-client -o 'jsonpath={.items[].metadata.name}' +++ mktemp ++ local LAST_OUT=/tmp/tmp.gr7WaUVY2F +++ mktemp ++ local LAST_ERR=/tmp/tmp.CjhfBsfOOA ++ local exit_status=0 +++ seq 0 2 ++ for i in '$(seq 0 2)' ++ set +e ++ kubectl get pods --selector=name=pxc-client -o 'jsonpath={.items[].metadata.name}' ++ exit_status=0 ++ set -e ++ '[' 0 '!=' 0 ']' ++ break ++ cat /tmp/tmp.gr7WaUVY2F ++ cat /tmp/tmp.CjhfBsfOOA ++ rm /tmp/tmp.gr7WaUVY2F /tmp/tmp.CjhfBsfOOA ++ return 0 + client_pod=pxc-client-857d976497-6qfkm + wait_pod pxc-client-857d976497-6qfkm + local pod=pxc-client-857d976497-6qfkm + local max_retry=480 + local ns= ++ echo pxc-client-857d976497-6qfkm ++ /usr/bin/sed -E 's/.*-(pxc|proxysql)-[0-9]/\1/' ++ grep -E '^(pxc|proxysql)$' + local container= + set +o xtrace pod/pxc-client-857d976497-6qfkm condition met waiting for pod/pxc-client-857d976497-6qfkm to become ReadyDefaulted container "pxc-client" out of: pxc-client, backup .Ok + set +o xtrace + sleep 40 + compare_mysql_cmd select-4 'SHOW TABLES;' '-h some-name-proxysql -utestsync -p'\'']pb)a8p8MZ3qy&D[o'\''' + local command_id=select-4 + local 'command=SHOW TABLES;' + local 'uri=-h some-name-proxysql -utestsync -p'\'']pb)a8p8MZ3qy&D[o'\''' + local postfix= + local expected_result=/mnt/jenkins/workspace/cloud-pxc-operator_PR-2234/e2e-tests/users/compare/select-4.sql + [[ perconalab/percona-xtradb-cluster-operator:main-pxc5.7 =~ 8\.4 ]] + [[ perconalab/percona-xtradb-cluster-operator:main-pxc5.7 =~ 8\.0 ]] + [[ perconalab/percona-xtradb-cluster-operator:main-pxc5.7 =~ 5\.7 ]] + [[ -f /mnt/jenkins/workspace/cloud-pxc-operator_PR-2234/e2e-tests/users/compare/select-4-57.sql ]] + run_mysql 'SHOW TABLES;' '-h some-name-proxysql -utestsync -p'\'']pb)a8p8MZ3qy&D[o'\''' + local 'command=SHOW TABLES;' + local 'uri=-h some-name-proxysql -utestsync -p'\'']pb)a8p8MZ3qy&D[o'\''' ++ get_client_pod ++ kubectl_bin get pods --selector=name=pxc-client -o 'jsonpath={.items[].metadata.name}' +++ mktemp ++ local LAST_OUT=/tmp/tmp.0SYilrsFZi +++ mktemp ++ local LAST_ERR=/tmp/tmp.mg7qHLavHb ++ local exit_status=0 +++ seq 0 2 ++ for i in '$(seq 0 2)' ++ set +e ++ kubectl get pods --selector=name=pxc-client -o 'jsonpath={.items[].metadata.name}' ++ exit_status=0 ++ set -e ++ '[' 0 '!=' 0 ']' ++ break ++ cat /tmp/tmp.0SYilrsFZi ++ cat /tmp/tmp.mg7qHLavHb ++ rm /tmp/tmp.0SYilrsFZi /tmp/tmp.mg7qHLavHb ++ return 0 + client_pod=pxc-client-857d976497-6qfkm + wait_pod pxc-client-857d976497-6qfkm + local pod=pxc-client-857d976497-6qfkm + local max_retry=480 + local ns= ++ echo pxc-client-857d976497-6qfkm ++ /usr/bin/sed -E 's/.*-(pxc|proxysql)-[0-9]/\1/' ++ grep -E '^(pxc|proxysql)$' + local container= + set +o xtrace pod/pxc-client-857d976497-6qfkm condition met waiting for pod/pxc-client-857d976497-6qfkm to become ReadyDefaulted container "pxc-client" out of: pxc-client, backup .Ok + set +o xtrace + '[' '!' -s /tmp/tmp.O4Ec8syMdJ/select-4.sql ']' + [[ 0 -eq 0 ]] + diff -u /mnt/jenkins/workspace/cloud-pxc-operator_PR-2234/e2e-tests/users/compare/select-4.sql /tmp/tmp.O4Ec8syMdJ/select-4.sql ++ getSecretData internal-some-name operator ++ local secretName=internal-some-name ++ local dataKey=operator ++ kubectl_bin get secrets/internal-some-name '--template={{.data.operator}}' ++ base64 --decode +++ mktemp ++ local LAST_OUT=/tmp/tmp.IQvKscq28l +++ mktemp ++ local LAST_ERR=/tmp/tmp.RNuTrMY8gL ++ local exit_status=0 +++ seq 0 2 ++ for i in '$(seq 0 2)' ++ set +e ++ kubectl get secrets/internal-some-name '--template={{.data.operator}}' ++ exit_status=0 ++ set -e ++ '[' 0 '!=' 0 ']' ++ break ++ cat /tmp/tmp.IQvKscq28l ++ cat /tmp/tmp.RNuTrMY8gL ++ rm /tmp/tmp.IQvKscq28l /tmp/tmp.RNuTrMY8gL ++ return 0 + pass=test-password2 + desc 'check secret without operator' + set +o xtrace ----------------------------------------------------------------------------------- check secret without operator ----------------------------------------------------------------------------------- + kubectl_bin apply -f /mnt/jenkins/workspace/cloud-pxc-operator_PR-2234/e2e-tests/users/conf/secrets.yml ++ mktemp + local LAST_OUT=/tmp/tmp.NpTNvLELGu ++ mktemp + local LAST_ERR=/tmp/tmp.j6VgFImsOk + local exit_status=0 ++ seq 0 2 + for i in '$(seq 0 2)' + set +e + kubectl apply -f /mnt/jenkins/workspace/cloud-pxc-operator_PR-2234/e2e-tests/users/conf/secrets.yml + exit_status=0 + set -e + '[' 0 '!=' 0 ']' + break + cat /tmp/tmp.NpTNvLELGu secret/my-cluster-secrets-2 configured + cat /tmp/tmp.j6VgFImsOk Warning: resource secrets/my-cluster-secrets-2 is missing the kubectl.kubernetes.io/last-applied-configuration annotation which is required by kubectl apply. kubectl apply should only be used on resources created declaratively by either kubectl create --save-config or kubectl apply. The missing annotation will be patched automatically. + rm /tmp/tmp.NpTNvLELGu /tmp/tmp.j6VgFImsOk + return 0 + sleep 15 + compare_mysql_cmd select-4 'SHOW TABLES;' '-h some-name-proxysql -uoperator -p'\''test-password2'\''' + local command_id=select-4 + local 'command=SHOW TABLES;' + local 'uri=-h some-name-proxysql -uoperator -p'\''test-password2'\''' + local postfix= + local expected_result=/mnt/jenkins/workspace/cloud-pxc-operator_PR-2234/e2e-tests/users/compare/select-4.sql + [[ perconalab/percona-xtradb-cluster-operator:main-pxc5.7 =~ 8\.4 ]] + [[ perconalab/percona-xtradb-cluster-operator:main-pxc5.7 =~ 8\.0 ]] + [[ perconalab/percona-xtradb-cluster-operator:main-pxc5.7 =~ 5\.7 ]] + [[ -f /mnt/jenkins/workspace/cloud-pxc-operator_PR-2234/e2e-tests/users/compare/select-4-57.sql ]] + run_mysql 'SHOW TABLES;' '-h some-name-proxysql -uoperator -p'\''test-password2'\''' + local 'command=SHOW TABLES;' + local 'uri=-h some-name-proxysql -uoperator -p'\''test-password2'\''' ++ get_client_pod ++ kubectl_bin get pods --selector=name=pxc-client -o 'jsonpath={.items[].metadata.name}' +++ mktemp ++ local LAST_OUT=/tmp/tmp.1TQK0dKrOv +++ mktemp ++ local LAST_ERR=/tmp/tmp.P2OZepn5dk ++ local exit_status=0 +++ seq 0 2 ++ for i in '$(seq 0 2)' ++ set +e ++ kubectl get pods --selector=name=pxc-client -o 'jsonpath={.items[].metadata.name}' ++ exit_status=0 ++ set -e ++ '[' 0 '!=' 0 ']' ++ break ++ cat /tmp/tmp.1TQK0dKrOv ++ cat /tmp/tmp.P2OZepn5dk ++ rm /tmp/tmp.1TQK0dKrOv /tmp/tmp.P2OZepn5dk ++ return 0 + client_pod=pxc-client-857d976497-6qfkm + wait_pod pxc-client-857d976497-6qfkm + local pod=pxc-client-857d976497-6qfkm + local max_retry=480 + local ns= ++ echo pxc-client-857d976497-6qfkm ++ /usr/bin/sed -E 's/.*-(pxc|proxysql)-[0-9]/\1/' ++ grep -E '^(pxc|proxysql)$' + local container= + set +o xtrace pod/pxc-client-857d976497-6qfkm condition met waiting for pod/pxc-client-857d976497-6qfkm to become ReadyDefaulted container "pxc-client" out of: pxc-client, backup .Ok + set +o xtrace + '[' '!' -s /tmp/tmp.O4Ec8syMdJ/select-4.sql ']' + [[ 0 -eq 0 ]] + diff -u /mnt/jenkins/workspace/cloud-pxc-operator_PR-2234/e2e-tests/users/compare/select-4.sql /tmp/tmp.O4Ec8syMdJ/select-4.sql + newpass=test-password2 ++ echo -n test-password2 ++ base64 + newpassencrypted=dGVzdC1wYXNzd29yZDI= + apply_config /mnt/jenkins/workspace/cloud-pxc-operator_PR-2234/e2e-tests/users/conf/some-name.yml + local config_file=/mnt/jenkins/workspace/cloud-pxc-operator_PR-2234/e2e-tests/users/conf/some-name.yml + local pvc_name= + '[' -z '' ']' + cat_config /mnt/jenkins/workspace/cloud-pxc-operator_PR-2234/e2e-tests/users/conf/some-name.yml '' + local input_file=/mnt/jenkins/workspace/cloud-pxc-operator_PR-2234/e2e-tests/users/conf/some-name.yml + kubectl_bin apply -f - + local pvc_name= + cat /mnt/jenkins/workspace/cloud-pxc-operator_PR-2234/e2e-tests/users/conf/some-name.yml + /usr/bin/sed -e 's#apiVersion: pxc.percona.com/v.*$#apiVersion: pxc.percona.com/v1#' ++ mktemp + /usr/bin/sed -e 's#image:.*\/percona-xtradb-cluster:.*$#image: perconalab/percona-xtradb-cluster-operator:main-pxc5.7#' + /usr/bin/sed -e 's#image:.*-backup$#image: perconalab/percona-xtradb-cluster-operator:main-pxc5.7-backup#' + local LAST_OUT=/tmp/tmp.bpsc34bGC7 + /usr/bin/sed -e 's#image:.*-pxc\([0-9]*.[0-9]*\)\{0,1\}$#image: perconalab/percona-xtradb-cluster-operator:main-pxc5.7#' + /usr/bin/sed -e 's#image:.*-pmm$#image: perconalab/pmm-client:dev-latest#' + /usr/bin/sed -e 's#image:.*-init$#image: perconalab/percona-xtradb-cluster-operator:PR-2234-c49d4810#' + /usr/bin/sed -e 's#image:.*-logcollector$#image: perconalab/percona-xtradb-cluster-operator:main-logcollector#' + /usr/bin/sed -e s~minio-service.#namespace~minio-service.users-12977~ ++ mktemp + /usr/bin/sed -e 's#image:.*-proxysql$#image: perconalab/percona-xtradb-cluster-operator:main-proxysql#' + /usr/bin/sed -e 's#image:.*-haproxy$#image: perconalab/percona-xtradb-cluster-operator:main-haproxy#' + /usr/bin/sed -e 's#apply:.*#apply: Never#' + local LAST_ERR=/tmp/tmp.uX6ZcRCVF4 + local exit_status=0 + /usr/bin/sed -e 's#claimName:..*-backup-pvc$#claimName: #' ++ seq 0 2 + for i in '$(seq 0 2)' + set +e + kubectl apply -f - + exit_status=0 + set -e + '[' 0 '!=' 0 ']' + break + cat /tmp/tmp.bpsc34bGC7 perconaxtradbcluster.pxc.percona.com/some-name configured + cat /tmp/tmp.uX6ZcRCVF4 + rm /tmp/tmp.bpsc34bGC7 /tmp/tmp.uX6ZcRCVF4 + return 0 + sleep 15 + wait_cluster_consistency some-name 3 3 + local cluster_name=some-name + local cluster_size=3 + local proxy_size=3 + '[' -z 3 ']' + desc 'wait cluster consistency' + set +o xtrace ----------------------------------------------------------------------------------- wait cluster consistency ----------------------------------------------------------------------------------- + local i=0 + local max=300 + sleep 7 + echo -n 'waiting for pxc/some-name to be ready' waiting for pxc/some-name to be ready++ kubectl_bin get pxc some-name -o 'jsonpath={.status.state}' +++ mktemp ++ local LAST_OUT=/tmp/tmp.FxE02s1D09 +++ mktemp ++ local LAST_ERR=/tmp/tmp.d1oeHcMTNU ++ local exit_status=0 +++ seq 0 2 ++ for i in '$(seq 0 2)' ++ set +e ++ kubectl get pxc some-name -o 'jsonpath={.status.state}' ++ exit_status=0 ++ set -e ++ '[' 0 '!=' 0 ']' ++ break ++ cat /tmp/tmp.FxE02s1D09 ++ cat /tmp/tmp.d1oeHcMTNU ++ rm /tmp/tmp.FxE02s1D09 /tmp/tmp.d1oeHcMTNU ++ return 0 + [[ initializing == \r\e\a\d\y ]] + echo -n . .+ sleep 5 + [[ 0 -ge 300 ]] + let i+=1 ++ kubectl_bin get pxc some-name -o 'jsonpath={.status.state}' +++ mktemp ++ local LAST_OUT=/tmp/tmp.OykCE9MoeC +++ mktemp ++ local LAST_ERR=/tmp/tmp.x2qP6ifJUG ++ local exit_status=0 +++ seq 0 2 ++ for i in '$(seq 0 2)' ++ set +e ++ kubectl get pxc some-name -o 'jsonpath={.status.state}' ++ exit_status=0 ++ set -e ++ '[' 0 '!=' 0 ']' ++ break ++ cat /tmp/tmp.OykCE9MoeC ++ cat /tmp/tmp.x2qP6ifJUG ++ rm /tmp/tmp.OykCE9MoeC /tmp/tmp.x2qP6ifJUG ++ return 0 + [[ initializing == \r\e\a\d\y ]] + echo -n . .+ sleep 5 + [[ 1 -ge 300 ]] + let i+=1 ++ kubectl_bin get pxc some-name -o 'jsonpath={.status.state}' +++ mktemp ++ local LAST_OUT=/tmp/tmp.XAKm7jue0H +++ mktemp ++ local LAST_ERR=/tmp/tmp.1V3fhZusq5 ++ local exit_status=0 +++ seq 0 2 ++ for i in '$(seq 0 2)' ++ set +e ++ kubectl get pxc some-name -o 'jsonpath={.status.state}' ++ exit_status=0 ++ set -e ++ '[' 0 '!=' 0 ']' ++ break ++ cat /tmp/tmp.XAKm7jue0H ++ cat /tmp/tmp.1V3fhZusq5 ++ rm /tmp/tmp.XAKm7jue0H /tmp/tmp.1V3fhZusq5 ++ return 0 + [[ initializing == \r\e\a\d\y ]] + echo -n . .+ sleep 5 + [[ 2 -ge 300 ]] + let i+=1 ++ kubectl_bin get pxc some-name -o 'jsonpath={.status.state}' +++ mktemp ++ local LAST_OUT=/tmp/tmp.GvYmZyxBxt +++ mktemp ++ local LAST_ERR=/tmp/tmp.m8VfABCTvF ++ local exit_status=0 +++ seq 0 2 ++ for i in '$(seq 0 2)' ++ set +e ++ kubectl get pxc some-name -o 'jsonpath={.status.state}' ++ exit_status=0 ++ set -e ++ '[' 0 '!=' 0 ']' ++ break ++ cat /tmp/tmp.GvYmZyxBxt ++ cat /tmp/tmp.m8VfABCTvF ++ rm /tmp/tmp.GvYmZyxBxt /tmp/tmp.m8VfABCTvF ++ return 0 + [[ initializing == \r\e\a\d\y ]] + echo -n . .+ sleep 5 + [[ 3 -ge 300 ]] + let i+=1 ++ kubectl_bin get pxc some-name -o 'jsonpath={.status.state}' +++ mktemp ++ local LAST_OUT=/tmp/tmp.lAxmkmBX5o +++ mktemp ++ local LAST_ERR=/tmp/tmp.1Y9jHf0CtQ ++ local exit_status=0 +++ seq 0 2 ++ for i in '$(seq 0 2)' ++ set +e ++ kubectl get pxc some-name -o 'jsonpath={.status.state}' ++ exit_status=0 ++ set -e ++ '[' 0 '!=' 0 ']' ++ break ++ cat /tmp/tmp.lAxmkmBX5o ++ cat /tmp/tmp.1Y9jHf0CtQ ++ rm /tmp/tmp.lAxmkmBX5o /tmp/tmp.1Y9jHf0CtQ ++ return 0 + [[ initializing == \r\e\a\d\y ]] + echo -n . .+ sleep 5 + [[ 4 -ge 300 ]] + let i+=1 ++ kubectl_bin get pxc some-name -o 'jsonpath={.status.state}' +++ mktemp ++ local LAST_OUT=/tmp/tmp.HW8d5pLXRd +++ mktemp ++ local LAST_ERR=/tmp/tmp.hshTxFLbSw ++ local exit_status=0 +++ seq 0 2 ++ for i in '$(seq 0 2)' ++ set +e ++ kubectl get pxc some-name -o 'jsonpath={.status.state}' ++ exit_status=0 ++ set -e ++ '[' 0 '!=' 0 ']' ++ break ++ cat /tmp/tmp.HW8d5pLXRd ++ cat /tmp/tmp.hshTxFLbSw ++ rm /tmp/tmp.HW8d5pLXRd /tmp/tmp.hshTxFLbSw ++ return 0 + [[ initializing == \r\e\a\d\y ]] + echo -n . .+ sleep 5 + [[ 5 -ge 300 ]] + let i+=1 ++ kubectl_bin get pxc some-name -o 'jsonpath={.status.state}' +++ mktemp ++ local LAST_OUT=/tmp/tmp.aDDYQ3jiQ0 +++ mktemp ++ local LAST_ERR=/tmp/tmp.sEHL8aZv3V ++ local exit_status=0 +++ seq 0 2 ++ for i in '$(seq 0 2)' ++ set +e ++ kubectl get pxc some-name -o 'jsonpath={.status.state}' ++ exit_status=0 ++ set -e ++ '[' 0 '!=' 0 ']' ++ break ++ cat /tmp/tmp.aDDYQ3jiQ0 ++ cat /tmp/tmp.sEHL8aZv3V ++ rm /tmp/tmp.aDDYQ3jiQ0 /tmp/tmp.sEHL8aZv3V ++ return 0 + [[ initializing == \r\e\a\d\y ]] + echo -n . .+ sleep 5 + [[ 6 -ge 300 ]] + let i+=1 ++ kubectl_bin get pxc some-name -o 'jsonpath={.status.state}' +++ mktemp ++ local LAST_OUT=/tmp/tmp.ndxqogMmyj +++ mktemp ++ local LAST_ERR=/tmp/tmp.re4YctTeKb ++ local exit_status=0 +++ seq 0 2 ++ for i in '$(seq 0 2)' ++ set +e ++ kubectl get pxc some-name -o 'jsonpath={.status.state}' ++ exit_status=0 ++ set -e ++ '[' 0 '!=' 0 ']' ++ break ++ cat /tmp/tmp.ndxqogMmyj ++ cat /tmp/tmp.re4YctTeKb ++ rm /tmp/tmp.ndxqogMmyj /tmp/tmp.re4YctTeKb ++ return 0 + [[ initializing == \r\e\a\d\y ]] + echo -n . .+ sleep 5 + [[ 7 -ge 300 ]] + let i+=1 ++ kubectl_bin get pxc some-name -o 'jsonpath={.status.state}' +++ mktemp ++ local LAST_OUT=/tmp/tmp.oDjsMqnzYB +++ mktemp ++ local LAST_ERR=/tmp/tmp.4AQauIghCc ++ local exit_status=0 +++ seq 0 2 ++ for i in '$(seq 0 2)' ++ set +e ++ kubectl get pxc some-name -o 'jsonpath={.status.state}' ++ exit_status=0 ++ set -e ++ '[' 0 '!=' 0 ']' ++ break ++ cat /tmp/tmp.oDjsMqnzYB ++ cat /tmp/tmp.4AQauIghCc ++ rm /tmp/tmp.oDjsMqnzYB /tmp/tmp.4AQauIghCc ++ return 0 + [[ initializing == \r\e\a\d\y ]] + echo -n . .+ sleep 5 + [[ 8 -ge 300 ]] + let i+=1 ++ kubectl_bin get pxc some-name -o 'jsonpath={.status.state}' +++ mktemp ++ local LAST_OUT=/tmp/tmp.B5RKoiPJUt +++ mktemp ++ local LAST_ERR=/tmp/tmp.2yEYrkqc1V ++ local exit_status=0 +++ seq 0 2 ++ for i in '$(seq 0 2)' ++ set +e ++ kubectl get pxc some-name -o 'jsonpath={.status.state}' ++ exit_status=0 ++ set -e ++ '[' 0 '!=' 0 ']' ++ break ++ cat /tmp/tmp.B5RKoiPJUt ++ cat /tmp/tmp.2yEYrkqc1V ++ rm /tmp/tmp.B5RKoiPJUt /tmp/tmp.2yEYrkqc1V ++ return 0 + [[ initializing == \r\e\a\d\y ]] + echo -n . .+ sleep 5 + [[ 9 -ge 300 ]] + let i+=1 ++ kubectl_bin get pxc some-name -o 'jsonpath={.status.state}' +++ mktemp ++ local LAST_OUT=/tmp/tmp.7oe1JxCmH3 +++ mktemp ++ local LAST_ERR=/tmp/tmp.fZTsZbUrOS ++ local exit_status=0 +++ seq 0 2 ++ for i in '$(seq 0 2)' ++ set +e ++ kubectl get pxc some-name -o 'jsonpath={.status.state}' ++ exit_status=0 ++ set -e ++ '[' 0 '!=' 0 ']' ++ break ++ cat /tmp/tmp.7oe1JxCmH3 ++ cat /tmp/tmp.fZTsZbUrOS ++ rm /tmp/tmp.7oe1JxCmH3 /tmp/tmp.fZTsZbUrOS ++ return 0 + [[ initializing == \r\e\a\d\y ]] + echo -n . .+ sleep 5 + [[ 10 -ge 300 ]] + let i+=1 ++ kubectl_bin get pxc some-name -o 'jsonpath={.status.state}' +++ mktemp ++ local LAST_OUT=/tmp/tmp.u4fFLSsTpy +++ mktemp ++ local LAST_ERR=/tmp/tmp.HHda4T9oyN ++ local exit_status=0 +++ seq 0 2 ++ for i in '$(seq 0 2)' ++ set +e ++ kubectl get pxc some-name -o 'jsonpath={.status.state}' ++ exit_status=0 ++ set -e ++ '[' 0 '!=' 0 ']' ++ break ++ cat /tmp/tmp.u4fFLSsTpy ++ cat /tmp/tmp.HHda4T9oyN ++ rm /tmp/tmp.u4fFLSsTpy /tmp/tmp.HHda4T9oyN ++ return 0 + [[ initializing == \r\e\a\d\y ]] + echo -n . .+ sleep 5 + [[ 11 -ge 300 ]] + let i+=1 ++ kubectl_bin get pxc some-name -o 'jsonpath={.status.state}' +++ mktemp ++ local LAST_OUT=/tmp/tmp.WeO3inrC3X +++ mktemp ++ local LAST_ERR=/tmp/tmp.oZRrKgssKk ++ local exit_status=0 +++ seq 0 2 ++ for i in '$(seq 0 2)' ++ set +e ++ kubectl get pxc some-name -o 'jsonpath={.status.state}' ++ exit_status=0 ++ set -e ++ '[' 0 '!=' 0 ']' ++ break ++ cat /tmp/tmp.WeO3inrC3X ++ cat /tmp/tmp.oZRrKgssKk ++ rm /tmp/tmp.WeO3inrC3X /tmp/tmp.oZRrKgssKk ++ return 0 + [[ initializing == \r\e\a\d\y ]] + echo -n . .+ sleep 5 + [[ 12 -ge 300 ]] + let i+=1 ++ kubectl_bin get pxc some-name -o 'jsonpath={.status.state}' +++ mktemp ++ local LAST_OUT=/tmp/tmp.fxMJg8IIDQ +++ mktemp ++ local LAST_ERR=/tmp/tmp.yiSLEXH7ns ++ local exit_status=0 +++ seq 0 2 ++ for i in '$(seq 0 2)' ++ set +e ++ kubectl get pxc some-name -o 'jsonpath={.status.state}' ++ exit_status=0 ++ set -e ++ '[' 0 '!=' 0 ']' ++ break ++ cat /tmp/tmp.fxMJg8IIDQ ++ cat /tmp/tmp.yiSLEXH7ns ++ rm /tmp/tmp.fxMJg8IIDQ /tmp/tmp.yiSLEXH7ns ++ return 0 + [[ initializing == \r\e\a\d\y ]] + echo -n . .+ sleep 5 + [[ 13 -ge 300 ]] + let i+=1 ++ kubectl_bin get pxc some-name -o 'jsonpath={.status.state}' +++ mktemp ++ local LAST_OUT=/tmp/tmp.XyKLlz1Cnz +++ mktemp ++ local LAST_ERR=/tmp/tmp.nRcz5o2Z1e ++ local exit_status=0 +++ seq 0 2 ++ for i in '$(seq 0 2)' ++ set +e ++ kubectl get pxc some-name -o 'jsonpath={.status.state}' ++ exit_status=0 ++ set -e ++ '[' 0 '!=' 0 ']' ++ break ++ cat /tmp/tmp.XyKLlz1Cnz ++ cat /tmp/tmp.nRcz5o2Z1e ++ rm /tmp/tmp.XyKLlz1Cnz /tmp/tmp.nRcz5o2Z1e ++ return 0 + [[ initializing == \r\e\a\d\y ]] + echo -n . .+ sleep 5 + [[ 14 -ge 300 ]] + let i+=1 ++ kubectl_bin get pxc some-name -o 'jsonpath={.status.state}' +++ mktemp ++ local LAST_OUT=/tmp/tmp.MBWUelKFll +++ mktemp ++ local LAST_ERR=/tmp/tmp.4YB0OhtFAW ++ local exit_status=0 +++ seq 0 2 ++ for i in '$(seq 0 2)' ++ set +e ++ kubectl get pxc some-name -o 'jsonpath={.status.state}' ++ exit_status=0 ++ set -e ++ '[' 0 '!=' 0 ']' ++ break ++ cat /tmp/tmp.MBWUelKFll ++ cat /tmp/tmp.4YB0OhtFAW ++ rm /tmp/tmp.MBWUelKFll /tmp/tmp.4YB0OhtFAW ++ return 0 + [[ initializing == \r\e\a\d\y ]] + echo -n . .+ sleep 5 + [[ 15 -ge 300 ]] + let i+=1 ++ kubectl_bin get pxc some-name -o 'jsonpath={.status.state}' +++ mktemp ++ local LAST_OUT=/tmp/tmp.E6ZoR8S9NH +++ mktemp ++ local LAST_ERR=/tmp/tmp.nlCuTW5WhP ++ local exit_status=0 +++ seq 0 2 ++ for i in '$(seq 0 2)' ++ set +e ++ kubectl get pxc some-name -o 'jsonpath={.status.state}' ++ exit_status=0 ++ set -e ++ '[' 0 '!=' 0 ']' ++ break ++ cat /tmp/tmp.E6ZoR8S9NH ++ cat /tmp/tmp.nlCuTW5WhP ++ rm /tmp/tmp.E6ZoR8S9NH /tmp/tmp.nlCuTW5WhP ++ return 0 + [[ initializing == \r\e\a\d\y ]] + echo -n . .+ sleep 5 + [[ 16 -ge 300 ]] + let i+=1 ++ kubectl_bin get pxc some-name -o 'jsonpath={.status.state}' +++ mktemp ++ local LAST_OUT=/tmp/tmp.6ACl3dI8tL +++ mktemp ++ local LAST_ERR=/tmp/tmp.pLzYZSMLn9 ++ local exit_status=0 +++ seq 0 2 ++ for i in '$(seq 0 2)' ++ set +e ++ kubectl get pxc some-name -o 'jsonpath={.status.state}' ++ exit_status=0 ++ set -e ++ '[' 0 '!=' 0 ']' ++ break ++ cat /tmp/tmp.6ACl3dI8tL ++ cat /tmp/tmp.pLzYZSMLn9 ++ rm /tmp/tmp.6ACl3dI8tL /tmp/tmp.pLzYZSMLn9 ++ return 0 + [[ initializing == \r\e\a\d\y ]] + echo -n . .+ sleep 5 + [[ 17 -ge 300 ]] + let i+=1 ++ kubectl_bin get pxc some-name -o 'jsonpath={.status.state}' +++ mktemp ++ local LAST_OUT=/tmp/tmp.9nqyswm4G9 +++ mktemp ++ local LAST_ERR=/tmp/tmp.Ien7UoDSFH ++ local exit_status=0 +++ seq 0 2 ++ for i in '$(seq 0 2)' ++ set +e ++ kubectl get pxc some-name -o 'jsonpath={.status.state}' ++ exit_status=0 ++ set -e ++ '[' 0 '!=' 0 ']' ++ break ++ cat /tmp/tmp.9nqyswm4G9 ++ cat /tmp/tmp.Ien7UoDSFH ++ rm /tmp/tmp.9nqyswm4G9 /tmp/tmp.Ien7UoDSFH ++ return 0 + [[ initializing == \r\e\a\d\y ]] + echo -n . .+ sleep 5 + [[ 18 -ge 300 ]] + let i+=1 ++ kubectl_bin get pxc some-name -o 'jsonpath={.status.state}' +++ mktemp ++ local LAST_OUT=/tmp/tmp.daFtIJm18q +++ mktemp ++ local LAST_ERR=/tmp/tmp.w8X2ctKU21 ++ local exit_status=0 +++ seq 0 2 ++ for i in '$(seq 0 2)' ++ set +e ++ kubectl get pxc some-name -o 'jsonpath={.status.state}' ++ exit_status=0 ++ set -e ++ '[' 0 '!=' 0 ']' ++ break ++ cat /tmp/tmp.daFtIJm18q ++ cat /tmp/tmp.w8X2ctKU21 ++ rm /tmp/tmp.daFtIJm18q /tmp/tmp.w8X2ctKU21 ++ return 0 + [[ initializing == \r\e\a\d\y ]] + echo -n . .+ sleep 5 + [[ 19 -ge 300 ]] + let i+=1 ++ kubectl_bin get pxc some-name -o 'jsonpath={.status.state}' +++ mktemp ++ local LAST_OUT=/tmp/tmp.3nQuQTtCAT +++ mktemp ++ local LAST_ERR=/tmp/tmp.ScbdA6xoex ++ local exit_status=0 +++ seq 0 2 ++ for i in '$(seq 0 2)' ++ set +e ++ kubectl get pxc some-name -o 'jsonpath={.status.state}' ++ exit_status=0 ++ set -e ++ '[' 0 '!=' 0 ']' ++ break ++ cat /tmp/tmp.3nQuQTtCAT ++ cat /tmp/tmp.ScbdA6xoex ++ rm /tmp/tmp.3nQuQTtCAT /tmp/tmp.ScbdA6xoex ++ return 0 + [[ initializing == \r\e\a\d\y ]] + echo -n . .+ sleep 5 + [[ 20 -ge 300 ]] + let i+=1 ++ kubectl_bin get pxc some-name -o 'jsonpath={.status.state}' +++ mktemp ++ local LAST_OUT=/tmp/tmp.VOEh70igow +++ mktemp ++ local LAST_ERR=/tmp/tmp.eHgrWdjvrk ++ local exit_status=0 +++ seq 0 2 ++ for i in '$(seq 0 2)' ++ set +e ++ kubectl get pxc some-name -o 'jsonpath={.status.state}' ++ exit_status=0 ++ set -e ++ '[' 0 '!=' 0 ']' ++ break ++ cat /tmp/tmp.VOEh70igow ++ cat /tmp/tmp.eHgrWdjvrk ++ rm /tmp/tmp.VOEh70igow /tmp/tmp.eHgrWdjvrk ++ return 0 + [[ initializing == \r\e\a\d\y ]] + echo -n . .+ sleep 5 + [[ 21 -ge 300 ]] + let i+=1 ++ kubectl_bin get pxc some-name -o 'jsonpath={.status.state}' +++ mktemp ++ local LAST_OUT=/tmp/tmp.sX76dA5q6s +++ mktemp ++ local LAST_ERR=/tmp/tmp.u20aoimeKf ++ local exit_status=0 +++ seq 0 2 ++ for i in '$(seq 0 2)' ++ set +e ++ kubectl get pxc some-name -o 'jsonpath={.status.state}' ++ exit_status=0 ++ set -e ++ '[' 0 '!=' 0 ']' ++ break ++ cat /tmp/tmp.sX76dA5q6s ++ cat /tmp/tmp.u20aoimeKf ++ rm /tmp/tmp.sX76dA5q6s /tmp/tmp.u20aoimeKf ++ return 0 + [[ initializing == \r\e\a\d\y ]] + echo -n . .+ sleep 5 + [[ 22 -ge 300 ]] + let i+=1 ++ kubectl_bin get pxc some-name -o 'jsonpath={.status.state}' +++ mktemp ++ local LAST_OUT=/tmp/tmp.TMBB8byrWI +++ mktemp ++ local LAST_ERR=/tmp/tmp.6TjbsUWnOh ++ local exit_status=0 +++ seq 0 2 ++ for i in '$(seq 0 2)' ++ set +e ++ kubectl get pxc some-name -o 'jsonpath={.status.state}' ++ exit_status=0 ++ set -e ++ '[' 0 '!=' 0 ']' ++ break ++ cat /tmp/tmp.TMBB8byrWI ++ cat /tmp/tmp.6TjbsUWnOh ++ rm /tmp/tmp.TMBB8byrWI /tmp/tmp.6TjbsUWnOh ++ return 0 + [[ initializing == \r\e\a\d\y ]] + echo -n . .+ sleep 5 + [[ 23 -ge 300 ]] + let i+=1 ++ kubectl_bin get pxc some-name -o 'jsonpath={.status.state}' +++ mktemp ++ local LAST_OUT=/tmp/tmp.olPwPp7h63 +++ mktemp ++ local LAST_ERR=/tmp/tmp.QMoG0K9rku ++ local exit_status=0 +++ seq 0 2 ++ for i in '$(seq 0 2)' ++ set +e ++ kubectl get pxc some-name -o 'jsonpath={.status.state}' ++ exit_status=0 ++ set -e ++ '[' 0 '!=' 0 ']' ++ break ++ cat /tmp/tmp.olPwPp7h63 ++ cat /tmp/tmp.QMoG0K9rku ++ rm /tmp/tmp.olPwPp7h63 /tmp/tmp.QMoG0K9rku ++ return 0 + [[ ready == \r\e\a\d\y ]] ++ kubectl_bin get pxc some-name -o 'jsonpath={.status.pxc.ready}' +++ mktemp ++ local LAST_OUT=/tmp/tmp.rqtZfmNJKv +++ mktemp ++ local LAST_ERR=/tmp/tmp.5A2K43rn9H ++ local exit_status=0 +++ seq 0 2 ++ for i in '$(seq 0 2)' ++ set +e ++ kubectl get pxc some-name -o 'jsonpath={.status.pxc.ready}' ++ exit_status=0 ++ set -e ++ '[' 0 '!=' 0 ']' ++ break ++ cat /tmp/tmp.rqtZfmNJKv ++ cat /tmp/tmp.5A2K43rn9H ++ rm /tmp/tmp.rqtZfmNJKv /tmp/tmp.5A2K43rn9H ++ return 0 + [[ '' == \3 ]] + echo -n . .+ sleep 5 + [[ 24 -ge 300 ]] + let i+=1 ++ kubectl_bin get pxc some-name -o 'jsonpath={.status.state}' +++ mktemp ++ local LAST_OUT=/tmp/tmp.seIBJDxojp +++ mktemp ++ local LAST_ERR=/tmp/tmp.HrYAJgBKCm ++ local exit_status=0 +++ seq 0 2 ++ for i in '$(seq 0 2)' ++ set +e ++ kubectl get pxc some-name -o 'jsonpath={.status.state}' ++ exit_status=0 ++ set -e ++ '[' 0 '!=' 0 ']' ++ break ++ cat /tmp/tmp.seIBJDxojp ++ cat /tmp/tmp.HrYAJgBKCm ++ rm /tmp/tmp.seIBJDxojp /tmp/tmp.HrYAJgBKCm ++ return 0 + [[ initializing == \r\e\a\d\y ]] + echo -n . .+ sleep 5 + [[ 25 -ge 300 ]] + let i+=1 ++ kubectl_bin get pxc some-name -o 'jsonpath={.status.state}' +++ mktemp ++ local LAST_OUT=/tmp/tmp.Skc2K26Qgx +++ mktemp ++ local LAST_ERR=/tmp/tmp.PnONkhfGZG ++ local exit_status=0 +++ seq 0 2 ++ for i in '$(seq 0 2)' ++ set +e ++ kubectl get pxc some-name -o 'jsonpath={.status.state}' ++ exit_status=0 ++ set -e ++ '[' 0 '!=' 0 ']' ++ break ++ cat /tmp/tmp.Skc2K26Qgx ++ cat /tmp/tmp.PnONkhfGZG ++ rm /tmp/tmp.Skc2K26Qgx /tmp/tmp.PnONkhfGZG ++ return 0 + [[ initializing == \r\e\a\d\y ]] + echo -n . .+ sleep 5 + [[ 26 -ge 300 ]] + let i+=1 ++ kubectl_bin get pxc some-name -o 'jsonpath={.status.state}' +++ mktemp ++ local LAST_OUT=/tmp/tmp.wExJuBd7Tr +++ mktemp ++ local LAST_ERR=/tmp/tmp.F7wag4YXcy ++ local exit_status=0 +++ seq 0 2 ++ for i in '$(seq 0 2)' ++ set +e ++ kubectl get pxc some-name -o 'jsonpath={.status.state}' ++ exit_status=0 ++ set -e ++ '[' 0 '!=' 0 ']' ++ break ++ cat /tmp/tmp.wExJuBd7Tr ++ cat /tmp/tmp.F7wag4YXcy ++ rm /tmp/tmp.wExJuBd7Tr /tmp/tmp.F7wag4YXcy ++ return 0 + [[ initializing == \r\e\a\d\y ]] + echo -n . .+ sleep 5 + [[ 27 -ge 300 ]] + let i+=1 ++ kubectl_bin get pxc some-name -o 'jsonpath={.status.state}' +++ mktemp ++ local LAST_OUT=/tmp/tmp.RQD5QL4DSt +++ mktemp ++ local LAST_ERR=/tmp/tmp.ufz6ZBnk1i ++ local exit_status=0 +++ seq 0 2 ++ for i in '$(seq 0 2)' ++ set +e ++ kubectl get pxc some-name -o 'jsonpath={.status.state}' ++ exit_status=0 ++ set -e ++ '[' 0 '!=' 0 ']' ++ break ++ cat /tmp/tmp.RQD5QL4DSt ++ cat /tmp/tmp.ufz6ZBnk1i ++ rm /tmp/tmp.RQD5QL4DSt /tmp/tmp.ufz6ZBnk1i ++ return 0 + [[ initializing == \r\e\a\d\y ]] + echo -n . .+ sleep 5 + [[ 28 -ge 300 ]] + let i+=1 ++ kubectl_bin get pxc some-name -o 'jsonpath={.status.state}' +++ mktemp ++ local LAST_OUT=/tmp/tmp.PRfyA2f8ke +++ mktemp ++ local LAST_ERR=/tmp/tmp.0tDoNHyc0S ++ local exit_status=0 +++ seq 0 2 ++ for i in '$(seq 0 2)' ++ set +e ++ kubectl get pxc some-name -o 'jsonpath={.status.state}' ++ exit_status=0 ++ set -e ++ '[' 0 '!=' 0 ']' ++ break ++ cat /tmp/tmp.PRfyA2f8ke ++ cat /tmp/tmp.0tDoNHyc0S ++ rm /tmp/tmp.PRfyA2f8ke /tmp/tmp.0tDoNHyc0S ++ return 0 + [[ initializing == \r\e\a\d\y ]] + echo -n . .+ sleep 5 + [[ 29 -ge 300 ]] + let i+=1 ++ kubectl_bin get pxc some-name -o 'jsonpath={.status.state}' +++ mktemp ++ local LAST_OUT=/tmp/tmp.EkS6v2NowS +++ mktemp ++ local LAST_ERR=/tmp/tmp.sjZWXKLGjb ++ local exit_status=0 +++ seq 0 2 ++ for i in '$(seq 0 2)' ++ set +e ++ kubectl get pxc some-name -o 'jsonpath={.status.state}' ++ exit_status=0 ++ set -e ++ '[' 0 '!=' 0 ']' ++ break ++ cat /tmp/tmp.EkS6v2NowS ++ cat /tmp/tmp.sjZWXKLGjb ++ rm /tmp/tmp.EkS6v2NowS /tmp/tmp.sjZWXKLGjb ++ return 0 + [[ initializing == \r\e\a\d\y ]] + echo -n . .+ sleep 5 + [[ 30 -ge 300 ]] + let i+=1 ++ kubectl_bin get pxc some-name -o 'jsonpath={.status.state}' +++ mktemp ++ local LAST_OUT=/tmp/tmp.BJpzHruM37 +++ mktemp ++ local LAST_ERR=/tmp/tmp.Nz3D0wwW3R ++ local exit_status=0 +++ seq 0 2 ++ for i in '$(seq 0 2)' ++ set +e ++ kubectl get pxc some-name -o 'jsonpath={.status.state}' ++ exit_status=0 ++ set -e ++ '[' 0 '!=' 0 ']' ++ break ++ cat /tmp/tmp.BJpzHruM37 ++ cat /tmp/tmp.Nz3D0wwW3R ++ rm /tmp/tmp.BJpzHruM37 /tmp/tmp.Nz3D0wwW3R ++ return 0 + [[ initializing == \r\e\a\d\y ]] + echo -n . .+ sleep 5 + [[ 31 -ge 300 ]] + let i+=1 ++ kubectl_bin get pxc some-name -o 'jsonpath={.status.state}' +++ mktemp ++ local LAST_OUT=/tmp/tmp.m9PnSDxU1N +++ mktemp ++ local LAST_ERR=/tmp/tmp.1CiLaA5WRa ++ local exit_status=0 +++ seq 0 2 ++ for i in '$(seq 0 2)' ++ set +e ++ kubectl get pxc some-name -o 'jsonpath={.status.state}' ++ exit_status=0 ++ set -e ++ '[' 0 '!=' 0 ']' ++ break ++ cat /tmp/tmp.m9PnSDxU1N ++ cat /tmp/tmp.1CiLaA5WRa ++ rm /tmp/tmp.m9PnSDxU1N /tmp/tmp.1CiLaA5WRa ++ return 0 + [[ initializing == \r\e\a\d\y ]] + echo -n . .+ sleep 5 + [[ 32 -ge 300 ]] + let i+=1 ++ kubectl_bin get pxc some-name -o 'jsonpath={.status.state}' +++ mktemp ++ local LAST_OUT=/tmp/tmp.pKfPGDZlXB +++ mktemp ++ local LAST_ERR=/tmp/tmp.fi9Mw373wz ++ local exit_status=0 +++ seq 0 2 ++ for i in '$(seq 0 2)' ++ set +e ++ kubectl get pxc some-name -o 'jsonpath={.status.state}' ++ exit_status=0 ++ set -e ++ '[' 0 '!=' 0 ']' ++ break ++ cat /tmp/tmp.pKfPGDZlXB ++ cat /tmp/tmp.fi9Mw373wz ++ rm /tmp/tmp.pKfPGDZlXB /tmp/tmp.fi9Mw373wz ++ return 0 + [[ initializing == \r\e\a\d\y ]] + echo -n . .+ sleep 5 + [[ 33 -ge 300 ]] + let i+=1 ++ kubectl_bin get pxc some-name -o 'jsonpath={.status.state}' +++ mktemp ++ local LAST_OUT=/tmp/tmp.V5dHL30CHC +++ mktemp ++ local LAST_ERR=/tmp/tmp.9wnOyKGqxq ++ local exit_status=0 +++ seq 0 2 ++ for i in '$(seq 0 2)' ++ set +e ++ kubectl get pxc some-name -o 'jsonpath={.status.state}' ++ exit_status=0 ++ set -e ++ '[' 0 '!=' 0 ']' ++ break ++ cat /tmp/tmp.V5dHL30CHC ++ cat /tmp/tmp.9wnOyKGqxq ++ rm /tmp/tmp.V5dHL30CHC /tmp/tmp.9wnOyKGqxq ++ return 0 + [[ initializing == \r\e\a\d\y ]] + echo -n . .+ sleep 5 + [[ 34 -ge 300 ]] + let i+=1 ++ kubectl_bin get pxc some-name -o 'jsonpath={.status.state}' +++ mktemp ++ local LAST_OUT=/tmp/tmp.h32kztgLXv +++ mktemp ++ local LAST_ERR=/tmp/tmp.pxp04lGiyp ++ local exit_status=0 +++ seq 0 2 ++ for i in '$(seq 0 2)' ++ set +e ++ kubectl get pxc some-name -o 'jsonpath={.status.state}' ++ exit_status=0 ++ set -e ++ '[' 0 '!=' 0 ']' ++ break ++ cat /tmp/tmp.h32kztgLXv ++ cat /tmp/tmp.pxp04lGiyp ++ rm /tmp/tmp.h32kztgLXv /tmp/tmp.pxp04lGiyp ++ return 0 + [[ initializing == \r\e\a\d\y ]] + echo -n . .+ sleep 5 + [[ 35 -ge 300 ]] + let i+=1 ++ kubectl_bin get pxc some-name -o 'jsonpath={.status.state}' +++ mktemp ++ local LAST_OUT=/tmp/tmp.3vr0aCVRtz +++ mktemp ++ local LAST_ERR=/tmp/tmp.lBGg2hREHU ++ local exit_status=0 +++ seq 0 2 ++ for i in '$(seq 0 2)' ++ set +e ++ kubectl get pxc some-name -o 'jsonpath={.status.state}' ++ exit_status=0 ++ set -e ++ '[' 0 '!=' 0 ']' ++ break ++ cat /tmp/tmp.3vr0aCVRtz ++ cat /tmp/tmp.lBGg2hREHU ++ rm /tmp/tmp.3vr0aCVRtz /tmp/tmp.lBGg2hREHU ++ return 0 + [[ initializing == \r\e\a\d\y ]] + echo -n . .+ sleep 5 + [[ 36 -ge 300 ]] + let i+=1 ++ kubectl_bin get pxc some-name -o 'jsonpath={.status.state}' +++ mktemp ++ local LAST_OUT=/tmp/tmp.54oGgyCIeE +++ mktemp ++ local LAST_ERR=/tmp/tmp.KhG0JU9xr8 ++ local exit_status=0 +++ seq 0 2 ++ for i in '$(seq 0 2)' ++ set +e ++ kubectl get pxc some-name -o 'jsonpath={.status.state}' ++ exit_status=0 ++ set -e ++ '[' 0 '!=' 0 ']' ++ break ++ cat /tmp/tmp.54oGgyCIeE ++ cat /tmp/tmp.KhG0JU9xr8 ++ rm /tmp/tmp.54oGgyCIeE /tmp/tmp.KhG0JU9xr8 ++ return 0 + [[ initializing == \r\e\a\d\y ]] + echo -n . .+ sleep 5 + [[ 37 -ge 300 ]] + let i+=1 ++ kubectl_bin get pxc some-name -o 'jsonpath={.status.state}' +++ mktemp ++ local LAST_OUT=/tmp/tmp.I41ZOTbVAY +++ mktemp ++ local LAST_ERR=/tmp/tmp.VFGgVcpkgn ++ local exit_status=0 +++ seq 0 2 ++ for i in '$(seq 0 2)' ++ set +e ++ kubectl get pxc some-name -o 'jsonpath={.status.state}' ++ exit_status=0 ++ set -e ++ '[' 0 '!=' 0 ']' ++ break ++ cat /tmp/tmp.I41ZOTbVAY ++ cat /tmp/tmp.VFGgVcpkgn ++ rm /tmp/tmp.I41ZOTbVAY /tmp/tmp.VFGgVcpkgn ++ return 0 + [[ initializing == \r\e\a\d\y ]] + echo -n . .+ sleep 5 + [[ 38 -ge 300 ]] + let i+=1 ++ kubectl_bin get pxc some-name -o 'jsonpath={.status.state}' +++ mktemp ++ local LAST_OUT=/tmp/tmp.9WXKB4lM5E +++ mktemp ++ local LAST_ERR=/tmp/tmp.FDaTa0uEVA ++ local exit_status=0 +++ seq 0 2 ++ for i in '$(seq 0 2)' ++ set +e ++ kubectl get pxc some-name -o 'jsonpath={.status.state}' ++ exit_status=0 ++ set -e ++ '[' 0 '!=' 0 ']' ++ break ++ cat /tmp/tmp.9WXKB4lM5E ++ cat /tmp/tmp.FDaTa0uEVA ++ rm /tmp/tmp.9WXKB4lM5E /tmp/tmp.FDaTa0uEVA ++ return 0 + [[ initializing == \r\e\a\d\y ]] + echo -n . .+ sleep 5 + [[ 39 -ge 300 ]] + let i+=1 ++ kubectl_bin get pxc some-name -o 'jsonpath={.status.state}' +++ mktemp ++ local LAST_OUT=/tmp/tmp.HO79dgdK6L +++ mktemp ++ local LAST_ERR=/tmp/tmp.H2HJavJmro ++ local exit_status=0 +++ seq 0 2 ++ for i in '$(seq 0 2)' ++ set +e ++ kubectl get pxc some-name -o 'jsonpath={.status.state}' ++ exit_status=0 ++ set -e ++ '[' 0 '!=' 0 ']' ++ break ++ cat /tmp/tmp.HO79dgdK6L ++ cat /tmp/tmp.H2HJavJmro ++ rm /tmp/tmp.HO79dgdK6L /tmp/tmp.H2HJavJmro ++ return 0 + [[ initializing == \r\e\a\d\y ]] + echo -n . .+ sleep 5 + [[ 40 -ge 300 ]] + let i+=1 ++ kubectl_bin get pxc some-name -o 'jsonpath={.status.state}' +++ mktemp ++ local LAST_OUT=/tmp/tmp.6A4hqrBzLk +++ mktemp ++ local LAST_ERR=/tmp/tmp.GItnp1zwm3 ++ local exit_status=0 +++ seq 0 2 ++ for i in '$(seq 0 2)' ++ set +e ++ kubectl get pxc some-name -o 'jsonpath={.status.state}' ++ exit_status=0 ++ set -e ++ '[' 0 '!=' 0 ']' ++ break ++ cat /tmp/tmp.6A4hqrBzLk ++ cat /tmp/tmp.GItnp1zwm3 ++ rm /tmp/tmp.6A4hqrBzLk /tmp/tmp.GItnp1zwm3 ++ return 0 + [[ initializing == \r\e\a\d\y ]] + echo -n . .+ sleep 5 + [[ 41 -ge 300 ]] + let i+=1 ++ kubectl_bin get pxc some-name -o 'jsonpath={.status.state}' +++ mktemp ++ local LAST_OUT=/tmp/tmp.g8vbxd4nLR +++ mktemp ++ local LAST_ERR=/tmp/tmp.YRL7iiC5if ++ local exit_status=0 +++ seq 0 2 ++ for i in '$(seq 0 2)' ++ set +e ++ kubectl get pxc some-name -o 'jsonpath={.status.state}' ++ exit_status=0 ++ set -e ++ '[' 0 '!=' 0 ']' ++ break ++ cat /tmp/tmp.g8vbxd4nLR ++ cat /tmp/tmp.YRL7iiC5if ++ rm /tmp/tmp.g8vbxd4nLR /tmp/tmp.YRL7iiC5if ++ return 0 + [[ initializing == \r\e\a\d\y ]] + echo -n . .+ sleep 5 + [[ 42 -ge 300 ]] + let i+=1 ++ kubectl_bin get pxc some-name -o 'jsonpath={.status.state}' +++ mktemp ++ local LAST_OUT=/tmp/tmp.VtvUgvDCsw +++ mktemp ++ local LAST_ERR=/tmp/tmp.gJz4i9Ktuf ++ local exit_status=0 +++ seq 0 2 ++ for i in '$(seq 0 2)' ++ set +e ++ kubectl get pxc some-name -o 'jsonpath={.status.state}' ++ exit_status=0 ++ set -e ++ '[' 0 '!=' 0 ']' ++ break ++ cat /tmp/tmp.VtvUgvDCsw ++ cat /tmp/tmp.gJz4i9Ktuf ++ rm /tmp/tmp.VtvUgvDCsw /tmp/tmp.gJz4i9Ktuf ++ return 0 + [[ initializing == \r\e\a\d\y ]] + echo -n . .+ sleep 5 + [[ 43 -ge 300 ]] + let i+=1 ++ kubectl_bin get pxc some-name -o 'jsonpath={.status.state}' +++ mktemp ++ local LAST_OUT=/tmp/tmp.26lz8izok3 +++ mktemp ++ local LAST_ERR=/tmp/tmp.e0xcbrcEDL ++ local exit_status=0 +++ seq 0 2 ++ for i in '$(seq 0 2)' ++ set +e ++ kubectl get pxc some-name -o 'jsonpath={.status.state}' ++ exit_status=0 ++ set -e ++ '[' 0 '!=' 0 ']' ++ break ++ cat /tmp/tmp.26lz8izok3 ++ cat /tmp/tmp.e0xcbrcEDL ++ rm /tmp/tmp.26lz8izok3 /tmp/tmp.e0xcbrcEDL ++ return 0 + [[ initializing == \r\e\a\d\y ]] + echo -n . .+ sleep 5 + [[ 44 -ge 300 ]] + let i+=1 ++ kubectl_bin get pxc some-name -o 'jsonpath={.status.state}' +++ mktemp ++ local LAST_OUT=/tmp/tmp.KcnFrm0KgD +++ mktemp ++ local LAST_ERR=/tmp/tmp.OVQXEsSOfn ++ local exit_status=0 +++ seq 0 2 ++ for i in '$(seq 0 2)' ++ set +e ++ kubectl get pxc some-name -o 'jsonpath={.status.state}' ++ exit_status=0 ++ set -e ++ '[' 0 '!=' 0 ']' ++ break ++ cat /tmp/tmp.KcnFrm0KgD ++ cat /tmp/tmp.OVQXEsSOfn ++ rm /tmp/tmp.KcnFrm0KgD /tmp/tmp.OVQXEsSOfn ++ return 0 + [[ initializing == \r\e\a\d\y ]] + echo -n . .+ sleep 5 + [[ 45 -ge 300 ]] + let i+=1 ++ kubectl_bin get pxc some-name -o 'jsonpath={.status.state}' +++ mktemp ++ local LAST_OUT=/tmp/tmp.pldnG1thXS +++ mktemp ++ local LAST_ERR=/tmp/tmp.P04j3dLjg6 ++ local exit_status=0 +++ seq 0 2 ++ for i in '$(seq 0 2)' ++ set +e ++ kubectl get pxc some-name -o 'jsonpath={.status.state}' ++ exit_status=0 ++ set -e ++ '[' 0 '!=' 0 ']' ++ break ++ cat /tmp/tmp.pldnG1thXS ++ cat /tmp/tmp.P04j3dLjg6 ++ rm /tmp/tmp.pldnG1thXS /tmp/tmp.P04j3dLjg6 ++ return 0 + [[ initializing == \r\e\a\d\y ]] + echo -n . .+ sleep 5 + [[ 46 -ge 300 ]] + let i+=1 ++ kubectl_bin get pxc some-name -o 'jsonpath={.status.state}' +++ mktemp ++ local LAST_OUT=/tmp/tmp.89WH9Ov117 +++ mktemp ++ local LAST_ERR=/tmp/tmp.xFIpMoABzQ ++ local exit_status=0 +++ seq 0 2 ++ for i in '$(seq 0 2)' ++ set +e ++ kubectl get pxc some-name -o 'jsonpath={.status.state}' ++ exit_status=0 ++ set -e ++ '[' 0 '!=' 0 ']' ++ break ++ cat /tmp/tmp.89WH9Ov117 ++ cat /tmp/tmp.xFIpMoABzQ ++ rm /tmp/tmp.89WH9Ov117 /tmp/tmp.xFIpMoABzQ ++ return 0 + [[ initializing == \r\e\a\d\y ]] + echo -n . .+ sleep 5 + [[ 47 -ge 300 ]] + let i+=1 ++ kubectl_bin get pxc some-name -o 'jsonpath={.status.state}' +++ mktemp ++ local LAST_OUT=/tmp/tmp.DS3Pi3bJSk +++ mktemp ++ local LAST_ERR=/tmp/tmp.chAB8GwGof ++ local exit_status=0 +++ seq 0 2 ++ for i in '$(seq 0 2)' ++ set +e ++ kubectl get pxc some-name -o 'jsonpath={.status.state}' ++ exit_status=0 ++ set -e ++ '[' 0 '!=' 0 ']' ++ break ++ cat /tmp/tmp.DS3Pi3bJSk ++ cat /tmp/tmp.chAB8GwGof ++ rm /tmp/tmp.DS3Pi3bJSk /tmp/tmp.chAB8GwGof ++ return 0 + [[ initializing == \r\e\a\d\y ]] + echo -n . .+ sleep 5 + [[ 48 -ge 300 ]] + let i+=1 ++ kubectl_bin get pxc some-name -o 'jsonpath={.status.state}' +++ mktemp ++ local LAST_OUT=/tmp/tmp.tB0pXjTmDz +++ mktemp ++ local LAST_ERR=/tmp/tmp.xOQaU9qiPf ++ local exit_status=0 +++ seq 0 2 ++ for i in '$(seq 0 2)' ++ set +e ++ kubectl get pxc some-name -o 'jsonpath={.status.state}' ++ exit_status=0 ++ set -e ++ '[' 0 '!=' 0 ']' ++ break ++ cat /tmp/tmp.tB0pXjTmDz ++ cat /tmp/tmp.xOQaU9qiPf ++ rm /tmp/tmp.tB0pXjTmDz /tmp/tmp.xOQaU9qiPf ++ return 0 + [[ initializing == \r\e\a\d\y ]] + echo -n . .+ sleep 5 + [[ 49 -ge 300 ]] + let i+=1 ++ kubectl_bin get pxc some-name -o 'jsonpath={.status.state}' +++ mktemp ++ local LAST_OUT=/tmp/tmp.OqKdcCQpPx +++ mktemp ++ local LAST_ERR=/tmp/tmp.gKjct1JOf0 ++ local exit_status=0 +++ seq 0 2 ++ for i in '$(seq 0 2)' ++ set +e ++ kubectl get pxc some-name -o 'jsonpath={.status.state}' ++ exit_status=0 ++ set -e ++ '[' 0 '!=' 0 ']' ++ break ++ cat /tmp/tmp.OqKdcCQpPx ++ cat /tmp/tmp.gKjct1JOf0 ++ rm /tmp/tmp.OqKdcCQpPx /tmp/tmp.gKjct1JOf0 ++ return 0 + [[ initializing == \r\e\a\d\y ]] + echo -n . .+ sleep 5 + [[ 50 -ge 300 ]] + let i+=1 ++ kubectl_bin get pxc some-name -o 'jsonpath={.status.state}' +++ mktemp ++ local LAST_OUT=/tmp/tmp.HqL1p7FEM7 +++ mktemp ++ local LAST_ERR=/tmp/tmp.yMB7DUxYDE ++ local exit_status=0 +++ seq 0 2 ++ for i in '$(seq 0 2)' ++ set +e ++ kubectl get pxc some-name -o 'jsonpath={.status.state}' ++ exit_status=0 ++ set -e ++ '[' 0 '!=' 0 ']' ++ break ++ cat /tmp/tmp.HqL1p7FEM7 ++ cat /tmp/tmp.yMB7DUxYDE ++ rm /tmp/tmp.HqL1p7FEM7 /tmp/tmp.yMB7DUxYDE ++ return 0 + [[ ready == \r\e\a\d\y ]] ++ kubectl_bin get pxc some-name -o 'jsonpath={.status.pxc.ready}' +++ mktemp ++ local LAST_OUT=/tmp/tmp.YTZfKo5jZc +++ mktemp ++ local LAST_ERR=/tmp/tmp.X1xPLZrOFs ++ local exit_status=0 +++ seq 0 2 ++ for i in '$(seq 0 2)' ++ set +e ++ kubectl get pxc some-name -o 'jsonpath={.status.pxc.ready}' ++ exit_status=0 ++ set -e ++ '[' 0 '!=' 0 ']' ++ break ++ cat /tmp/tmp.YTZfKo5jZc ++ cat /tmp/tmp.X1xPLZrOFs ++ rm /tmp/tmp.YTZfKo5jZc /tmp/tmp.X1xPLZrOFs ++ return 0 + [[ 3 == \3 ]] +++ get_proxy_engine some-name +++ local cluster_name=some-name ++++ get_proxy some-name ++++ local target_cluster=some-name +++++ kubectl_bin get pxc some-name -o 'jsonpath={.spec.haproxy.enabled}' ++++++ mktemp +++++ local LAST_OUT=/tmp/tmp.3TD8uxCwBA ++++++ mktemp +++++ local LAST_ERR=/tmp/tmp.YVs7PFJwI7 +++++ local exit_status=0 ++++++ seq 0 2 +++++ for i in '$(seq 0 2)' +++++ set +e +++++ kubectl get pxc some-name -o 'jsonpath={.spec.haproxy.enabled}' +++++ exit_status=0 +++++ set -e +++++ '[' 0 '!=' 0 ']' +++++ break +++++ cat /tmp/tmp.3TD8uxCwBA +++++ cat /tmp/tmp.YVs7PFJwI7 +++++ rm /tmp/tmp.3TD8uxCwBA /tmp/tmp.YVs7PFJwI7 +++++ return 0 ++++ [[ true == \t\r\u\e ]] ++++ echo some-name-haproxy ++++ return +++ local cluster_proxy=some-name-haproxy +++ echo haproxy ++ kubectl_bin get pxc some-name -o 'jsonpath={.status.haproxy.ready}' +++ mktemp ++ local LAST_OUT=/tmp/tmp.Lnuamzg0wV +++ mktemp ++ local LAST_ERR=/tmp/tmp.aWbMlTjgQz ++ local exit_status=0 +++ seq 0 2 ++ for i in '$(seq 0 2)' ++ set +e ++ kubectl get pxc some-name -o 'jsonpath={.status.haproxy.ready}' ++ exit_status=0 ++ set -e ++ '[' 0 '!=' 0 ']' ++ break ++ cat /tmp/tmp.Lnuamzg0wV ++ cat /tmp/tmp.aWbMlTjgQz ++ rm /tmp/tmp.Lnuamzg0wV /tmp/tmp.aWbMlTjgQz ++ return 0 + [[ 3 == \3 ]] + echo + [[ perconalab/percona-xtradb-cluster-operator:main-pxc5.7 =~ 5\.7 ]] + check_generation 2 haproxy some-name + local generation=2 + local container=haproxy + local cluster=some-name + local current_generation ++ kubectl_bin get statefulset some-name-haproxy -o 'jsonpath={.metadata.generation}' +++ mktemp ++ local LAST_OUT=/tmp/tmp.3nlCaaVluk +++ mktemp ++ local LAST_ERR=/tmp/tmp.qkcIlUNo5R ++ local exit_status=0 +++ seq 0 2 ++ for i in '$(seq 0 2)' ++ set +e ++ kubectl get statefulset some-name-haproxy -o 'jsonpath={.metadata.generation}' ++ exit_status=0 ++ set -e ++ '[' 0 '!=' 0 ']' ++ break ++ cat /tmp/tmp.3nlCaaVluk ++ cat /tmp/tmp.qkcIlUNo5R ++ rm /tmp/tmp.3nlCaaVluk /tmp/tmp.qkcIlUNo5R ++ return 0 + current_generation=2 + [[ 2 != \2 ]] + patch_secret my-cluster-secrets monitor dGVzdC1wYXNzd29yZDI= + local secret=my-cluster-secrets + local key=monitor + local value=dGVzdC1wYXNzd29yZDI= + kubectl_bin patch secret my-cluster-secrets '-p={"data":{"monitor": "dGVzdC1wYXNzd29yZDI="}}' ++ mktemp + local LAST_OUT=/tmp/tmp.8FfhSMwoxo ++ mktemp + local LAST_ERR=/tmp/tmp.7mwmKpEy3G + local exit_status=0 ++ seq 0 2 + for i in '$(seq 0 2)' + set +e + kubectl patch secret my-cluster-secrets '-p={"data":{"monitor": "dGVzdC1wYXNzd29yZDI="}}' + exit_status=0 + set -e + '[' 0 '!=' 0 ']' + break + cat /tmp/tmp.8FfhSMwoxo secret/my-cluster-secrets patched + cat /tmp/tmp.7mwmKpEy3G + rm /tmp/tmp.8FfhSMwoxo /tmp/tmp.7mwmKpEy3G + return 0 + sleep 15 + wait_cluster_consistency some-name 3 3 + local cluster_name=some-name + local cluster_size=3 + local proxy_size=3 + '[' -z 3 ']' + desc 'wait cluster consistency' + set +o xtrace ----------------------------------------------------------------------------------- wait cluster consistency ----------------------------------------------------------------------------------- + local i=0 + local max=300 + sleep 7 + echo -n 'waiting for pxc/some-name to be ready' waiting for pxc/some-name to be ready++ kubectl_bin get pxc some-name -o 'jsonpath={.status.state}' +++ mktemp ++ local LAST_OUT=/tmp/tmp.Dm7MiEOZsc +++ mktemp ++ local LAST_ERR=/tmp/tmp.SoXGGlqgYi ++ local exit_status=0 +++ seq 0 2 ++ for i in '$(seq 0 2)' ++ set +e ++ kubectl get pxc some-name -o 'jsonpath={.status.state}' ++ exit_status=0 ++ set -e ++ '[' 0 '!=' 0 ']' ++ break ++ cat /tmp/tmp.Dm7MiEOZsc ++ cat /tmp/tmp.SoXGGlqgYi ++ rm /tmp/tmp.Dm7MiEOZsc /tmp/tmp.SoXGGlqgYi ++ return 0 + [[ initializing == \r\e\a\d\y ]] + echo -n . .+ sleep 5 + [[ 0 -ge 300 ]] + let i+=1 ++ kubectl_bin get pxc some-name -o 'jsonpath={.status.state}' +++ mktemp ++ local LAST_OUT=/tmp/tmp.9lBtnmEcwy +++ mktemp ++ local LAST_ERR=/tmp/tmp.7ufqSisfqw ++ local exit_status=0 +++ seq 0 2 ++ for i in '$(seq 0 2)' ++ set +e ++ kubectl get pxc some-name -o 'jsonpath={.status.state}' ++ exit_status=0 ++ set -e ++ '[' 0 '!=' 0 ']' ++ break ++ cat /tmp/tmp.9lBtnmEcwy ++ cat /tmp/tmp.7ufqSisfqw ++ rm /tmp/tmp.9lBtnmEcwy /tmp/tmp.7ufqSisfqw ++ return 0 + [[ initializing == \r\e\a\d\y ]] + echo -n . .+ sleep 5 + [[ 1 -ge 300 ]] + let i+=1 ++ kubectl_bin get pxc some-name -o 'jsonpath={.status.state}' +++ mktemp ++ local LAST_OUT=/tmp/tmp.grGKcYJ1EO +++ mktemp ++ local LAST_ERR=/tmp/tmp.1UBQCr9o5z ++ local exit_status=0 +++ seq 0 2 ++ for i in '$(seq 0 2)' ++ set +e ++ kubectl get pxc some-name -o 'jsonpath={.status.state}' ++ exit_status=0 ++ set -e ++ '[' 0 '!=' 0 ']' ++ break ++ cat /tmp/tmp.grGKcYJ1EO ++ cat /tmp/tmp.1UBQCr9o5z ++ rm /tmp/tmp.grGKcYJ1EO /tmp/tmp.1UBQCr9o5z ++ return 0 + [[ initializing == \r\e\a\d\y ]] + echo -n . .+ sleep 5 + [[ 2 -ge 300 ]] + let i+=1 ++ kubectl_bin get pxc some-name -o 'jsonpath={.status.state}' +++ mktemp ++ local LAST_OUT=/tmp/tmp.kQOI9ExJdZ +++ mktemp ++ local LAST_ERR=/tmp/tmp.Wz11WToO0q ++ local exit_status=0 +++ seq 0 2 ++ for i in '$(seq 0 2)' ++ set +e ++ kubectl get pxc some-name -o 'jsonpath={.status.state}' ++ exit_status=0 ++ set -e ++ '[' 0 '!=' 0 ']' ++ break ++ cat /tmp/tmp.kQOI9ExJdZ ++ cat /tmp/tmp.Wz11WToO0q ++ rm /tmp/tmp.kQOI9ExJdZ /tmp/tmp.Wz11WToO0q ++ return 0 + [[ initializing == \r\e\a\d\y ]] + echo -n . .+ sleep 5 + [[ 3 -ge 300 ]] + let i+=1 ++ kubectl_bin get pxc some-name -o 'jsonpath={.status.state}' +++ mktemp ++ local LAST_OUT=/tmp/tmp.m873rqaoJi +++ mktemp ++ local LAST_ERR=/tmp/tmp.kyywSilnNu ++ local exit_status=0 +++ seq 0 2 ++ for i in '$(seq 0 2)' ++ set +e ++ kubectl get pxc some-name -o 'jsonpath={.status.state}' ++ exit_status=0 ++ set -e ++ '[' 0 '!=' 0 ']' ++ break ++ cat /tmp/tmp.m873rqaoJi ++ cat /tmp/tmp.kyywSilnNu ++ rm /tmp/tmp.m873rqaoJi /tmp/tmp.kyywSilnNu ++ return 0 + [[ initializing == \r\e\a\d\y ]] + echo -n . .+ sleep 5 + [[ 4 -ge 300 ]] + let i+=1 ++ kubectl_bin get pxc some-name -o 'jsonpath={.status.state}' +++ mktemp ++ local LAST_OUT=/tmp/tmp.sZiWBsVt8T +++ mktemp ++ local LAST_ERR=/tmp/tmp.FhR7qBYbyP ++ local exit_status=0 +++ seq 0 2 ++ for i in '$(seq 0 2)' ++ set +e ++ kubectl get pxc some-name -o 'jsonpath={.status.state}' ++ exit_status=0 ++ set -e ++ '[' 0 '!=' 0 ']' ++ break ++ cat /tmp/tmp.sZiWBsVt8T ++ cat /tmp/tmp.FhR7qBYbyP ++ rm /tmp/tmp.sZiWBsVt8T /tmp/tmp.FhR7qBYbyP ++ return 0 + [[ initializing == \r\e\a\d\y ]] + echo -n . .+ sleep 5 + [[ 5 -ge 300 ]] + let i+=1 ++ kubectl_bin get pxc some-name -o 'jsonpath={.status.state}' +++ mktemp ++ local LAST_OUT=/tmp/tmp.4HvFvqnZbS +++ mktemp ++ local LAST_ERR=/tmp/tmp.W0iWr7YIEy ++ local exit_status=0 +++ seq 0 2 ++ for i in '$(seq 0 2)' ++ set +e ++ kubectl get pxc some-name -o 'jsonpath={.status.state}' ++ exit_status=0 ++ set -e ++ '[' 0 '!=' 0 ']' ++ break ++ cat /tmp/tmp.4HvFvqnZbS ++ cat /tmp/tmp.W0iWr7YIEy ++ rm /tmp/tmp.4HvFvqnZbS /tmp/tmp.W0iWr7YIEy ++ return 0 + [[ initializing == \r\e\a\d\y ]] + echo -n . .+ sleep 5 + [[ 6 -ge 300 ]] + let i+=1 ++ kubectl_bin get pxc some-name -o 'jsonpath={.status.state}' +++ mktemp ++ local LAST_OUT=/tmp/tmp.w8fww5qloG +++ mktemp ++ local LAST_ERR=/tmp/tmp.V7qB4Qo3mq ++ local exit_status=0 +++ seq 0 2 ++ for i in '$(seq 0 2)' ++ set +e ++ kubectl get pxc some-name -o 'jsonpath={.status.state}' ++ exit_status=0 ++ set -e ++ '[' 0 '!=' 0 ']' ++ break ++ cat /tmp/tmp.w8fww5qloG ++ cat /tmp/tmp.V7qB4Qo3mq ++ rm /tmp/tmp.w8fww5qloG /tmp/tmp.V7qB4Qo3mq ++ return 0 + [[ initializing == \r\e\a\d\y ]] + echo -n . .+ sleep 5 + [[ 7 -ge 300 ]] + let i+=1 ++ kubectl_bin get pxc some-name -o 'jsonpath={.status.state}' +++ mktemp ++ local LAST_OUT=/tmp/tmp.cXku8OaFrq +++ mktemp ++ local LAST_ERR=/tmp/tmp.DBS6dt1H3U ++ local exit_status=0 +++ seq 0 2 ++ for i in '$(seq 0 2)' ++ set +e ++ kubectl get pxc some-name -o 'jsonpath={.status.state}' ++ exit_status=0 ++ set -e ++ '[' 0 '!=' 0 ']' ++ break ++ cat /tmp/tmp.cXku8OaFrq ++ cat /tmp/tmp.DBS6dt1H3U ++ rm /tmp/tmp.cXku8OaFrq /tmp/tmp.DBS6dt1H3U ++ return 0 + [[ initializing == \r\e\a\d\y ]] + echo -n . .+ sleep 5 + [[ 8 -ge 300 ]] + let i+=1 ++ kubectl_bin get pxc some-name -o 'jsonpath={.status.state}' +++ mktemp ++ local LAST_OUT=/tmp/tmp.e9gNNptsXE +++ mktemp ++ local LAST_ERR=/tmp/tmp.lL3L9LdoPn ++ local exit_status=0 +++ seq 0 2 ++ for i in '$(seq 0 2)' ++ set +e ++ kubectl get pxc some-name -o 'jsonpath={.status.state}' ++ exit_status=0 ++ set -e ++ '[' 0 '!=' 0 ']' ++ break ++ cat /tmp/tmp.e9gNNptsXE ++ cat /tmp/tmp.lL3L9LdoPn ++ rm /tmp/tmp.e9gNNptsXE /tmp/tmp.lL3L9LdoPn ++ return 0 + [[ ready == \r\e\a\d\y ]] ++ kubectl_bin get pxc some-name -o 'jsonpath={.status.pxc.ready}' +++ mktemp ++ local LAST_OUT=/tmp/tmp.47SmeRQRVn +++ mktemp ++ local LAST_ERR=/tmp/tmp.cY1kbvtg7I ++ local exit_status=0 +++ seq 0 2 ++ for i in '$(seq 0 2)' ++ set +e ++ kubectl get pxc some-name -o 'jsonpath={.status.pxc.ready}' ++ exit_status=0 ++ set -e ++ '[' 0 '!=' 0 ']' ++ break ++ cat /tmp/tmp.47SmeRQRVn ++ cat /tmp/tmp.cY1kbvtg7I ++ rm /tmp/tmp.47SmeRQRVn /tmp/tmp.cY1kbvtg7I ++ return 0 + [[ 3 == \3 ]] +++ get_proxy_engine some-name +++ local cluster_name=some-name ++++ get_proxy some-name ++++ local target_cluster=some-name +++++ kubectl_bin get pxc some-name -o 'jsonpath={.spec.haproxy.enabled}' ++++++ mktemp +++++ local LAST_OUT=/tmp/tmp.NkD91x1TAg ++++++ mktemp +++++ local LAST_ERR=/tmp/tmp.DDReleMQl2 +++++ local exit_status=0 ++++++ seq 0 2 +++++ for i in '$(seq 0 2)' +++++ set +e +++++ kubectl get pxc some-name -o 'jsonpath={.spec.haproxy.enabled}' +++++ exit_status=0 +++++ set -e +++++ '[' 0 '!=' 0 ']' +++++ break +++++ cat /tmp/tmp.NkD91x1TAg +++++ cat /tmp/tmp.DDReleMQl2 +++++ rm /tmp/tmp.NkD91x1TAg /tmp/tmp.DDReleMQl2 +++++ return 0 ++++ [[ true == \t\r\u\e ]] ++++ echo some-name-haproxy ++++ return +++ local cluster_proxy=some-name-haproxy +++ echo haproxy ++ kubectl_bin get pxc some-name -o 'jsonpath={.status.haproxy.ready}' +++ mktemp ++ local LAST_OUT=/tmp/tmp.ZlokqhIgEu +++ mktemp ++ local LAST_ERR=/tmp/tmp.KaaunCLuum ++ local exit_status=0 +++ seq 0 2 ++ for i in '$(seq 0 2)' ++ set +e ++ kubectl get pxc some-name -o 'jsonpath={.status.haproxy.ready}' ++ exit_status=0 ++ set -e ++ '[' 0 '!=' 0 ']' ++ break ++ cat /tmp/tmp.ZlokqhIgEu ++ cat /tmp/tmp.KaaunCLuum ++ rm /tmp/tmp.ZlokqhIgEu /tmp/tmp.KaaunCLuum ++ return 0 + [[ 3 == \3 ]] + echo + compare_mysql_cmd select-3 'SHOW DATABASES;' '-h some-name-haproxy -umonitor -p'\''test-password2'\''' + local command_id=select-3 + local 'command=SHOW DATABASES;' + local 'uri=-h some-name-haproxy -umonitor -p'\''test-password2'\''' + local postfix= + local expected_result=/mnt/jenkins/workspace/cloud-pxc-operator_PR-2234/e2e-tests/users/compare/select-3.sql + [[ perconalab/percona-xtradb-cluster-operator:main-pxc5.7 =~ 8\.4 ]] + [[ perconalab/percona-xtradb-cluster-operator:main-pxc5.7 =~ 8\.0 ]] + [[ perconalab/percona-xtradb-cluster-operator:main-pxc5.7 =~ 5\.7 ]] + [[ -f /mnt/jenkins/workspace/cloud-pxc-operator_PR-2234/e2e-tests/users/compare/select-3-57.sql ]] + run_mysql 'SHOW DATABASES;' '-h some-name-haproxy -umonitor -p'\''test-password2'\''' + local 'command=SHOW DATABASES;' + local 'uri=-h some-name-haproxy -umonitor -p'\''test-password2'\''' ++ get_client_pod ++ kubectl_bin get pods --selector=name=pxc-client -o 'jsonpath={.items[].metadata.name}' +++ mktemp ++ local LAST_OUT=/tmp/tmp.1F9jtdfrHb +++ mktemp ++ local LAST_ERR=/tmp/tmp.5pdEwEJotK ++ local exit_status=0 +++ seq 0 2 ++ for i in '$(seq 0 2)' ++ set +e ++ kubectl get pods --selector=name=pxc-client -o 'jsonpath={.items[].metadata.name}' ++ exit_status=0 ++ set -e ++ '[' 0 '!=' 0 ']' ++ break ++ cat /tmp/tmp.1F9jtdfrHb ++ cat /tmp/tmp.5pdEwEJotK ++ rm /tmp/tmp.1F9jtdfrHb /tmp/tmp.5pdEwEJotK ++ return 0 + client_pod=pxc-client-857d976497-6qfkm + wait_pod pxc-client-857d976497-6qfkm + local pod=pxc-client-857d976497-6qfkm + local max_retry=480 + local ns= ++ echo pxc-client-857d976497-6qfkm ++ /usr/bin/sed -E 's/.*-(pxc|proxysql)-[0-9]/\1/' ++ grep -E '^(pxc|proxysql)$' + local container= + set +o xtrace pod/pxc-client-857d976497-6qfkm condition met waiting for pod/pxc-client-857d976497-6qfkm to become ReadyDefaulted container "pxc-client" out of: pxc-client, backup .Ok + set +o xtrace + '[' '!' -s /tmp/tmp.O4Ec8syMdJ/select-3.sql ']' + [[ 0 -eq 0 ]] + diff -u /mnt/jenkins/workspace/cloud-pxc-operator_PR-2234/e2e-tests/users/compare/select-3.sql /tmp/tmp.O4Ec8syMdJ/select-3.sql + [[ perconalab/percona-xtradb-cluster-operator:main-pxc5.7 =~ 5\.7 ]] + check_generation 3 haproxy some-name + local generation=3 + local container=haproxy + local cluster=some-name + local current_generation ++ kubectl_bin get statefulset some-name-haproxy -o 'jsonpath={.metadata.generation}' +++ mktemp ++ local LAST_OUT=/tmp/tmp.k6iLlsse4I +++ mktemp ++ local LAST_ERR=/tmp/tmp.k944aovQnX ++ local exit_status=0 +++ seq 0 2 ++ for i in '$(seq 0 2)' ++ set +e ++ kubectl get statefulset some-name-haproxy -o 'jsonpath={.metadata.generation}' ++ exit_status=0 ++ set -e ++ '[' 0 '!=' 0 ']' ++ break ++ cat /tmp/tmp.k6iLlsse4I ++ cat /tmp/tmp.k944aovQnX ++ rm /tmp/tmp.k6iLlsse4I /tmp/tmp.k944aovQnX ++ return 0 + current_generation=3 + [[ 3 != \3 ]] + destroy users-12977 + local namespace=users-12977 + local ignore_logs=true + desc 'destroy cluster/operator and all other resources' + set +o xtrace ----------------------------------------------------------------------------------- destroy cluster/operator and all other resources ----------------------------------------------------------------------------------- + '[' true == false -o 1 == 1 ']' + grep -v 'the object has been modified' ++ get_operator_pod + grep -v 'get backup status: Job.batch' ++ local label_prefix=app.kubernetes.io/ + sort -u + /usr/bin/sed -r 's/"ts":[0-9.]+//; s^limits-[0-9.]+/^^g' + tee /tmp/tmp.O4Ec8syMdJ/operator.log + grep -v level=info +++ kubectl get pods --selector=app.kubernetes.io/name=percona-xtradb-cluster-operator -n pxc-operator +++ grep -c percona-xtradb-cluster-operator ++ local check_label=1 ++ [[ 1 -eq 0 ]] ++ kubectl_bin get pods --selector=app.kubernetes.io/name=percona-xtradb-cluster-operator -o 'jsonpath={.items[].metadata.name}' -n pxc-operator +++ mktemp ++ local LAST_OUT=/tmp/tmp.vvrZabCgdl +++ mktemp ++ local LAST_ERR=/tmp/tmp.w3RMmhi38P ++ local exit_status=0 +++ seq 0 2 ++ for i in '$(seq 0 2)' ++ set +e ++ kubectl get pods --selector=app.kubernetes.io/name=percona-xtradb-cluster-operator -o 'jsonpath={.items[].metadata.name}' -n pxc-operator ++ exit_status=0 ++ set -e ++ '[' 0 '!=' 0 ']' ++ break ++ cat /tmp/tmp.vvrZabCgdl ++ cat /tmp/tmp.w3RMmhi38P ++ rm /tmp/tmp.vvrZabCgdl /tmp/tmp.w3RMmhi38P ++ return 0 + kubectl_bin logs -n pxc-operator percona-xtradb-cluster-operator-64f4d94dcc-tsb7w ++ mktemp + local LAST_OUT=/tmp/tmp.lTkdsYN8i2 ++ mktemp + local LAST_ERR=/tmp/tmp.dWedUIvCgY + local exit_status=0 ++ seq 0 2 + for i in '$(seq 0 2)' + set +e + kubectl logs -n pxc-operator percona-xtradb-cluster-operator-64f4d94dcc-tsb7w + exit_status=0 + set -e + '[' 0 '!=' 0 ']' + break + cat /tmp/tmp.lTkdsYN8i2 + cat /tmp/tmp.dWedUIvCgY + rm /tmp/tmp.lTkdsYN8i2 /tmp/tmp.dWedUIvCgY + return 0 2025-11-28T12:29:59.172Z INFO setup Manager starting up {"gitCommit": "c49d48109f1d79a254c1c926389a6948743db079", "gitBranch": "PR-2234-c49d4810", "buildTime": "2025-11-28T10:14:57Z", "goVersion": "go1.25.4", "os": "linux", "arch": "amd64"} 2025-11-28T12:29:59.172Z INFO setup Runs on {"platform": "kubernetes", "version": "v1.31.13-gke.1454000"} 2025-11-28T12:29:59.175Z INFO setup Registering Components. 2025-11-28T12:30:00.064Z INFO controller-runtime.certwatcher Starting certificate poll+watcher {"cert": "/tmp/k8s-webhook-server/serving-certs/tls.crt", "key": "/tmp/k8s-webhook-server/serving-certs/tls.key", "interval": "10s"} 2025-11-28T12:30:00.064Z INFO controller-runtime.certwatcher Updated current TLS certificate {"cert": "/tmp/k8s-webhook-server/serving-certs/tls.crt", "key": "/tmp/k8s-webhook-server/serving-certs/tls.key"} 2025-11-28T12:30:00.064Z INFO controller-runtime.metrics Serving metrics server {"bindAddress": ":8080", "secure": false} 2025-11-28T12:30:00.064Z INFO controller-runtime.metrics Starting metrics server 2025-11-28T12:30:00.064Z INFO controller-runtime.webhook Registering webhook {"path": "/validate-percona-xtradbcluster"} 2025-11-28T12:30:00.064Z INFO controller-runtime.webhook Serving webhook server {"host": "", "port": 9443} 2025-11-28T12:30:00.064Z INFO controller-runtime.webhook Starting webhook server 2025-11-28T12:30:00.064Z INFO setup Starting the Cmd. 2025-11-28T12:30:00.064Z INFO starting server {"name": "health probe", "addr": "[::]:8081"} 2025-11-28T12:30:00.165Z INFO attempting to acquire leader lease pxc-operator/08db1feb.percona.com... 2025-11-28T12:30:00.196Z DEBUG events percona-xtradb-cluster-operator-64f4d94dcc-tsb7w_31164c71-b56a-44f9-9fee-3aacbba18d7b became leader {"type": "Normal", "object": {"kind":"Lease","namespace":"pxc-operator","name":"08db1feb.percona.com","uid":"59d91524-5a75-42c8-9e02-80a847faebaf","apiVersion":"coordination.k8s.io/v1","resourceVersion":"1764333000188719009"}, "reason": "LeaderElection"} 2025-11-28T12:30:00.196Z INFO successfully acquired lease pxc-operator/08db1feb.percona.com 2025-11-28T12:30:00.197Z INFO Starting EventSource {"controller": "pxcbackup-controller", "controllerGroup": "pxc.percona.com", "controllerKind": "PerconaXtraDBClusterBackup", "source": "kind source: *v1.PerconaXtraDBClusterBackup"} 2025-11-28T12:30:00.197Z INFO Starting EventSource {"controller": "pxc-controller", "controllerGroup": "pxc.percona.com", "controllerKind": "PerconaXtraDBCluster", "source": "kind source: *v1.PerconaXtraDBCluster"} 2025-11-28T12:30:00.197Z INFO Starting EventSource {"controller": "pxc-controller", "controllerGroup": "pxc.percona.com", "controllerKind": "PerconaXtraDBCluster", "source": "kind source: *v1.Secret"} 2025-11-28T12:30:00.197Z INFO Starting EventSource {"controller": "pxcrestore-controller", "controllerGroup": "pxc.percona.com", "controllerKind": "PerconaXtraDBClusterRestore", "source": "kind source: *v1.PerconaXtraDBClusterRestore"} 2025-11-28T12:30:00.298Z INFO Starting Controller {"controller": "pxcbackup-controller", "controllerGroup": "pxc.percona.com", "controllerKind": "PerconaXtraDBClusterBackup"} 2025-11-28T12:30:00.298Z INFO Starting Controller {"controller": "pxc-controller", "controllerGroup": "pxc.percona.com", "controllerKind": "PerconaXtraDBCluster"} 2025-11-28T12:30:00.298Z INFO Starting Controller {"controller": "pxcrestore-controller", "controllerGroup": "pxc.percona.com", "controllerKind": "PerconaXtraDBClusterRestore"} 2025-11-28T12:30:00.298Z INFO Starting workers {"controller": "pxcbackup-controller", "controllerGroup": "pxc.percona.com", "controllerKind": "PerconaXtraDBClusterBackup", "worker count": 1} 2025-11-28T12:30:00.298Z INFO Starting workers {"controller": "pxc-controller", "controllerGroup": "pxc.percona.com", "controllerKind": "PerconaXtraDBCluster", "worker count": 1} 2025-11-28T12:30:00.298Z INFO Starting workers {"controller": "pxcrestore-controller", "controllerGroup": "pxc.percona.com", "controllerKind": "PerconaXtraDBClusterRestore", "worker count": 1} 2025-11-28T12:30:38.650Z INFO Set CR version {"controller": "pxc-controller", "controllerGroup": "pxc.percona.com", "controllerKind": "PerconaXtraDBCluster", "PerconaXtraDBCluster": {"name":"some-name","namespace":"users-12977"}, "namespace": "users-12977", "name": "some-name", "reconcileID": "72538881-531e-46bd-9951-ad76732fefb1", "version": "1.19.0"} 2025-11-28T12:30:38.784Z INFO User secrets updated {"controller": "pxc-controller", "controllerGroup": "pxc.percona.com", "controllerKind": "PerconaXtraDBCluster", "PerconaXtraDBCluster": {"name":"some-name","namespace":"users-12977"}, "namespace": "users-12977", "name": "some-name", "reconcileID": "72538881-531e-46bd-9951-ad76732fefb1", "secrets": "my-cluster-secrets"} 2025-11-28T12:30:39.009Z DEBUG Creating object {"controller": "pxc-controller", "controllerGroup": "pxc.percona.com", "controllerKind": "PerconaXtraDBCluster", "PerconaXtraDBCluster": {"name":"some-name","namespace":"users-12977"}, "namespace": "users-12977", "name": "some-name", "reconcileID": "72538881-531e-46bd-9951-ad76732fefb1", "object": "auto-some-name-pxc", "kind": "&TypeMeta{Kind:ConfigMap,APIVersion:v1,}"} 2025-11-28T12:30:39.558Z DEBUG Creating object {"controller": "pxc-controller", "controllerGroup": "pxc.percona.com", "controllerKind": "PerconaXtraDBCluster", "PerconaXtraDBCluster": {"name":"some-name","namespace":"users-12977"}, "namespace": "users-12977", "name": "some-name", "reconcileID": "72538881-531e-46bd-9951-ad76732fefb1", "object": "some-name-proxysql", "kind": "&TypeMeta{Kind:StatefulSet,APIVersion:apps/v1,}"} 2025-11-28T12:30:39.841Z DEBUG Creating object {"controller": "pxc-controller", "controllerGroup": "pxc.percona.com", "controllerKind": "PerconaXtraDBCluster", "PerconaXtraDBCluster": {"name":"some-name","namespace":"users-12977"}, "namespace": "users-12977", "name": "some-name", "reconcileID": "72538881-531e-46bd-9951-ad76732fefb1", "object": "some-name-pxc", "kind": "&TypeMeta{Kind:StatefulSet,APIVersion:apps/v1,}"} 2025-11-28T12:30:39.987Z DEBUG Creating object {"controller": "pxc-controller", "controllerGroup": "pxc.percona.com", "controllerKind": "PerconaXtraDBCluster", "PerconaXtraDBCluster": {"name":"some-name","namespace":"users-12977"}, "namespace": "users-12977", "name": "some-name", "reconcileID": "72538881-531e-46bd-9951-ad76732fefb1", "object": "some-name-pxc", "kind": "&TypeMeta{Kind:Service,APIVersion:v1,}"} 2025-11-28T12:30:40.086Z DEBUG Creating object {"controller": "pxc-controller", "controllerGroup": "pxc.percona.com", "controllerKind": "PerconaXtraDBCluster", "PerconaXtraDBCluster": {"name":"some-name","namespace":"users-12977"}, "namespace": "users-12977", "name": "some-name", "reconcileID": "72538881-531e-46bd-9951-ad76732fefb1", "object": "some-name-pxc-unready", "kind": "&TypeMeta{Kind:Service,APIVersion:v1,}"} 2025-11-28T12:30:40.206Z DEBUG Creating object {"controller": "pxc-controller", "controllerGroup": "pxc.percona.com", "controllerKind": "PerconaXtraDBCluster", "PerconaXtraDBCluster": {"name":"some-name","namespace":"users-12977"}, "namespace": "users-12977", "name": "some-name", "reconcileID": "72538881-531e-46bd-9951-ad76732fefb1", "object": "some-name-proxysql", "kind": "&TypeMeta{Kind:Service,APIVersion:v1,}"} 2025-11-28T12:30:40.342Z DEBUG Creating object {"controller": "pxc-controller", "controllerGroup": "pxc.percona.com", "controllerKind": "PerconaXtraDBCluster", "PerconaXtraDBCluster": {"name":"some-name","namespace":"users-12977"}, "namespace": "users-12977", "name": "some-name", "reconcileID": "72538881-531e-46bd-9951-ad76732fefb1", "object": "some-name-proxysql-unready", "kind": "&TypeMeta{Kind:Service,APIVersion:v1,}"} 2025-11-28T12:30:41.471Z DEBUG Creating object {"controller": "pxc-controller", "controllerGroup": "pxc.percona.com", "controllerKind": "PerconaXtraDBCluster", "PerconaXtraDBCluster": {"name":"some-name","namespace":"users-12977"}, "namespace": "users-12977", "name": "some-name", "reconcileID": "a5bdb2e7-a757-4584-86b2-5c6ebe991e5e", "object": "some-name-pxc", "kind": "&TypeMeta{Kind:PodDisruptionBudget,APIVersion:policy/v1,}"} 2025-11-28T12:30:41.494Z DEBUG Creating object {"controller": "pxc-controller", "controllerGroup": "pxc.percona.com", "controllerKind": "PerconaXtraDBCluster", "PerconaXtraDBCluster": {"name":"some-name","namespace":"users-12977"}, "namespace": "users-12977", "name": "some-name", "reconcileID": "a5bdb2e7-a757-4584-86b2-5c6ebe991e5e", "object": "some-name-proxysql", "kind": "&TypeMeta{Kind:PodDisruptionBudget,APIVersion:policy/v1,}"} 2025-11-28T12:31:52.914Z INFO Password expiration policy updated {"controller": "pxc-controller", "controllerGroup": "pxc.percona.com", "controllerKind": "PerconaXtraDBCluster", "PerconaXtraDBCluster": {"name":"some-name","namespace":"users-12977"}, "namespace": "users-12977", "name": "some-name", "reconcileID": "5ad211f0-4a4a-4dad-a41f-2cfa87d3f33d", "user": "operator"} 2025-11-28T12:31:52.945Z INFO Password expiration policy updated {"controller": "pxc-controller", "controllerGroup": "pxc.percona.com", "controllerKind": "PerconaXtraDBCluster", "PerconaXtraDBCluster": {"name":"some-name","namespace":"users-12977"}, "namespace": "users-12977", "name": "some-name", "reconcileID": "5ad211f0-4a4a-4dad-a41f-2cfa87d3f33d", "user": "monitor"} 2025-11-28T12:31:52.977Z INFO User monitor: granted privileges {"controller": "pxc-controller", "controllerGroup": "pxc.percona.com", "controllerKind": "PerconaXtraDBCluster", "PerconaXtraDBCluster": {"name":"some-name","namespace":"users-12977"}, "namespace": "users-12977", "name": "some-name", "reconcileID": "5ad211f0-4a4a-4dad-a41f-2cfa87d3f33d"} 2025-11-28T12:31:53.008Z INFO Password expiration policy updated {"controller": "pxc-controller", "controllerGroup": "pxc.percona.com", "controllerKind": "PerconaXtraDBCluster", "PerconaXtraDBCluster": {"name":"some-name","namespace":"users-12977"}, "namespace": "users-12977", "name": "some-name", "reconcileID": "5ad211f0-4a4a-4dad-a41f-2cfa87d3f33d", "user": "xtrabackup"} 2025-11-28T12:31:53.038Z INFO User xtrabackup: granted privileges {"controller": "pxc-controller", "controllerGroup": "pxc.percona.com", "controllerKind": "PerconaXtraDBCluster", "PerconaXtraDBCluster": {"name":"some-name","namespace":"users-12977"}, "namespace": "users-12977", "name": "some-name", "reconcileID": "5ad211f0-4a4a-4dad-a41f-2cfa87d3f33d"} 2025-11-28T12:31:53.046Z INFO reconcile replication error {"controller": "pxc-controller", "controllerGroup": "pxc.percona.com", "controllerKind": "PerconaXtraDBCluster", "PerconaXtraDBCluster": {"name":"some-name","namespace":"users-12977"}, "namespace": "users-12977", "name": "some-name", "reconcileID": "5ad211f0-4a4a-4dad-a41f-2cfa87d3f33d", "err": "get primary pxc pod: not found"} 2025-11-28T12:31:57.864Z INFO reconcile replication error {"controller": "pxc-controller", "controllerGroup": "pxc.percona.com", "controllerKind": "PerconaXtraDBCluster", "PerconaXtraDBCluster": {"name":"some-name","namespace":"users-12977"}, "namespace": "users-12977", "name": "some-name", "reconcileID": "cba253f6-8964-470f-8c2b-6eee9bcc1771", "err": "get primary pxc pod: not found"} 2025-11-28T12:32:02.998Z INFO reconcile replication error {"controller": "pxc-controller", "controllerGroup": "pxc.percona.com", "controllerKind": "PerconaXtraDBCluster", "PerconaXtraDBCluster": {"name":"some-name","namespace":"users-12977"}, "namespace": "users-12977", "name": "some-name", "reconcileID": "b5aa6ae8-b141-4630-832d-be361a272eef", "err": "get primary pxc pod: not found"} 2025-11-28T12:32:08.254Z INFO reconcile replication error {"controller": "pxc-controller", "controllerGroup": "pxc.percona.com", "controllerKind": "PerconaXtraDBCluster", "PerconaXtraDBCluster": {"name":"some-name","namespace":"users-12977"}, "namespace": "users-12977", "name": "some-name", "reconcileID": "2c76c289-f80d-4a8b-b343-a32a95ddad29", "err": "get primary pxc pod: not found"} 2025-11-28T12:34:19.396Z INFO Password expiration policy updated {"controller": "pxc-controller", "controllerGroup": "pxc.percona.com", "controllerKind": "PerconaXtraDBCluster", "PerconaXtraDBCluster": {"name":"some-name","namespace":"users-12977"}, "namespace": "users-12977", "name": "some-name", "reconcileID": "c1e41600-5b0f-4f0a-ac8e-e82116eb82f2", "user": "root"} 2025-11-28T12:34:19.432Z INFO Password expiration policy updated {"controller": "pxc-controller", "controllerGroup": "pxc.percona.com", "controllerKind": "PerconaXtraDBCluster", "PerconaXtraDBCluster": {"name":"some-name","namespace":"users-12977"}, "namespace": "users-12977", "name": "some-name", "reconcileID": "c1e41600-5b0f-4f0a-ac8e-e82116eb82f2", "user": "replication"} 2025-11-28T12:34:19.487Z INFO update PXC version (fetched from db) {"controller": "pxc-controller", "controllerGroup": "pxc.percona.com", "controllerKind": "PerconaXtraDBCluster", "PerconaXtraDBCluster": {"name":"some-name","namespace":"users-12977"}, "namespace": "users-12977", "name": "some-name", "reconcileID": "c1e41600-5b0f-4f0a-ac8e-e82116eb82f2", "new version": "5.7.44-48-57"} 2025-11-28T12:34:21.334Z DEBUG PXC users synced with ProxySQL {"controller": "pxc-controller", "controllerGroup": "pxc.percona.com", "controllerKind": "PerconaXtraDBCluster", "PerconaXtraDBCluster": {"name":"some-name","namespace":"users-12977"}, "namespace": "users-12977", "name": "some-name", "reconcileID": "c1e41600-5b0f-4f0a-ac8e-e82116eb82f2"} 2025-11-28T12:34:26.273Z DEBUG PXC users synced with ProxySQL {"controller": "pxc-controller", "controllerGroup": "pxc.percona.com", "controllerKind": "PerconaXtraDBCluster", "PerconaXtraDBCluster": {"name":"some-name","namespace":"users-12977"}, "namespace": "users-12977", "name": "some-name", "reconcileID": "aba96427-84ad-4ba7-9f13-5c21feadd3af"} 2025-11-28T12:34:31.748Z DEBUG PXC users synced with ProxySQL {"controller": "pxc-controller", "controllerGroup": "pxc.percona.com", "controllerKind": "PerconaXtraDBCluster", "PerconaXtraDBCluster": {"name":"some-name","namespace":"users-12977"}, "namespace": "users-12977", "name": "some-name", "reconcileID": "dc23d76a-07ce-45cd-a09d-3ef8a1c761f0"} 2025-11-28T12:34:37.264Z DEBUG PXC users synced with ProxySQL {"controller": "pxc-controller", "controllerGroup": "pxc.percona.com", "controllerKind": "PerconaXtraDBCluster", "PerconaXtraDBCluster": {"name":"some-name","namespace":"users-12977"}, "namespace": "users-12977", "name": "some-name", "reconcileID": "26295bc3-3318-41e4-8da4-247170a52a7e"} 2025-11-28T12:34:42.262Z DEBUG PXC users synced with ProxySQL {"controller": "pxc-controller", "controllerGroup": "pxc.percona.com", "controllerKind": "PerconaXtraDBCluster", "PerconaXtraDBCluster": {"name":"some-name","namespace":"users-12977"}, "namespace": "users-12977", "name": "some-name", "reconcileID": "312e9348-6553-4a9a-a064-7bc362b77efc"} 2025-11-28T12:34:47.837Z DEBUG PXC users synced with ProxySQL {"controller": "pxc-controller", "controllerGroup": "pxc.percona.com", "controllerKind": "PerconaXtraDBCluster", "PerconaXtraDBCluster": {"name":"some-name","namespace":"users-12977"}, "namespace": "users-12977", "name": "some-name", "reconcileID": "a8d7dce2-5633-4dd0-b17b-f5d7aab8ebfa"} 2025-11-28T12:34:52.742Z DEBUG PXC users synced with ProxySQL {"controller": "pxc-controller", "controllerGroup": "pxc.percona.com", "controllerKind": "PerconaXtraDBCluster", "PerconaXtraDBCluster": {"name":"some-name","namespace":"users-12977"}, "namespace": "users-12977", "name": "some-name", "reconcileID": "443aff90-8764-4a12-bb6e-d1a3f29ea6fe"} 2025-11-28T12:34:58.239Z DEBUG PXC users synced with ProxySQL {"controller": "pxc-controller", "controllerGroup": "pxc.percona.com", "controllerKind": "PerconaXtraDBCluster", "PerconaXtraDBCluster": {"name":"some-name","namespace":"users-12977"}, "namespace": "users-12977", "name": "some-name", "reconcileID": "80a80d02-ca7e-4fbd-9069-74eeab258d9b"} 2025-11-28T12:35:03.570Z DEBUG PXC users synced with ProxySQL {"controller": "pxc-controller", "controllerGroup": "pxc.percona.com", "controllerKind": "PerconaXtraDBCluster", "PerconaXtraDBCluster": {"name":"some-name","namespace":"users-12977"}, "namespace": "users-12977", "name": "some-name", "reconcileID": "6fafcb6b-f33a-4200-8a1b-c0df0e033b7d"} 2025-11-28T12:35:08.859Z DEBUG PXC users synced with ProxySQL {"controller": "pxc-controller", "controllerGroup": "pxc.percona.com", "controllerKind": "PerconaXtraDBCluster", "PerconaXtraDBCluster": {"name":"some-name","namespace":"users-12977"}, "namespace": "users-12977", "name": "some-name", "reconcileID": "683a4f0b-c3ab-4152-b567-693a02d434b3"} 2025-11-28T12:35:14.147Z DEBUG PXC users synced with ProxySQL {"controller": "pxc-controller", "controllerGroup": "pxc.percona.com", "controllerKind": "PerconaXtraDBCluster", "PerconaXtraDBCluster": {"name":"some-name","namespace":"users-12977"}, "namespace": "users-12977", "name": "some-name", "reconcileID": "49d043fc-b4a0-4b3f-8902-8428c766dbbc"} 2025-11-28T12:35:19.355Z DEBUG PXC users synced with ProxySQL {"controller": "pxc-controller", "controllerGroup": "pxc.percona.com", "controllerKind": "PerconaXtraDBCluster", "PerconaXtraDBCluster": {"name":"some-name","namespace":"users-12977"}, "namespace": "users-12977", "name": "some-name", "reconcileID": "86f2f63e-b350-4f63-911b-4331007a53a9"} 2025-11-28T12:35:24.961Z DEBUG PXC users synced with ProxySQL {"controller": "pxc-controller", "controllerGroup": "pxc.percona.com", "controllerKind": "PerconaXtraDBCluster", "PerconaXtraDBCluster": {"name":"some-name","namespace":"users-12977"}, "namespace": "users-12977", "name": "some-name", "reconcileID": "127d1398-f2ca-4770-a120-4979aa983353"} 2025-11-28T12:35:29.962Z DEBUG PXC users synced with ProxySQL {"controller": "pxc-controller", "controllerGroup": "pxc.percona.com", "controllerKind": "PerconaXtraDBCluster", "PerconaXtraDBCluster": {"name":"some-name","namespace":"users-12977"}, "namespace": "users-12977", "name": "some-name", "reconcileID": "7e08a3ab-adba-4cee-9563-02ba7d46498d"} 2025-11-28T12:35:35.144Z DEBUG PXC users synced with ProxySQL {"controller": "pxc-controller", "controllerGroup": "pxc.percona.com", "controllerKind": "PerconaXtraDBCluster", "PerconaXtraDBCluster": {"name":"some-name","namespace":"users-12977"}, "namespace": "users-12977", "name": "some-name", "reconcileID": "b04b6b46-2589-461f-9550-4c8a42aaebe5"} 2025-11-28T12:35:40.946Z DEBUG PXC users synced with ProxySQL {"controller": "pxc-controller", "controllerGroup": "pxc.percona.com", "controllerKind": "PerconaXtraDBCluster", "PerconaXtraDBCluster": {"name":"some-name","namespace":"users-12977"}, "namespace": "users-12977", "name": "some-name", "reconcileID": "b15ee977-86e0-4df1-9c19-9142a02d2a76"} 2025-11-28T12:35:45.689Z DEBUG PXC users synced with ProxySQL {"controller": "pxc-controller", "controllerGroup": "pxc.percona.com", "controllerKind": "PerconaXtraDBCluster", "PerconaXtraDBCluster": {"name":"some-name","namespace":"users-12977"}, "namespace": "users-12977", "name": "some-name", "reconcileID": "6cf16f07-ef96-47ce-ad98-49d3115f8d52"} 2025-11-28T12:35:50.945Z DEBUG PXC users synced with ProxySQL {"controller": "pxc-controller", "controllerGroup": "pxc.percona.com", "controllerKind": "PerconaXtraDBCluster", "PerconaXtraDBCluster": {"name":"some-name","namespace":"users-12977"}, "namespace": "users-12977", "name": "some-name", "reconcileID": "f4587571-cfb2-4433-9cb6-4c4d80019949"} 2025-11-28T12:35:56.147Z DEBUG PXC users synced with ProxySQL {"controller": "pxc-controller", "controllerGroup": "pxc.percona.com", "controllerKind": "PerconaXtraDBCluster", "PerconaXtraDBCluster": {"name":"some-name","namespace":"users-12977"}, "namespace": "users-12977", "name": "some-name", "reconcileID": "5635283d-77b3-4d5c-b967-6d30c7298502"} 2025-11-28T12:36:01.760Z DEBUG PXC users synced with ProxySQL {"controller": "pxc-controller", "controllerGroup": "pxc.percona.com", "controllerKind": "PerconaXtraDBCluster", "PerconaXtraDBCluster": {"name":"some-name","namespace":"users-12977"}, "namespace": "users-12977", "name": "some-name", "reconcileID": "23f7ce30-0af5-4fd9-85aa-c9e57835cd1d"} 2025-11-28T12:36:07.036Z DEBUG PXC users synced with ProxySQL {"controller": "pxc-controller", "controllerGroup": "pxc.percona.com", "controllerKind": "PerconaXtraDBCluster", "PerconaXtraDBCluster": {"name":"some-name","namespace":"users-12977"}, "namespace": "users-12977", "name": "some-name", "reconcileID": "5111132b-93c3-495b-8328-cc35c0f95938"} 2025-11-28T12:36:09.858Z INFO Password changed, updating user {"controller": "pxc-controller", "controllerGroup": "pxc.percona.com", "controllerKind": "PerconaXtraDBCluster", "PerconaXtraDBCluster": {"name":"some-name","namespace":"users-12977"}, "namespace": "users-12977", "name": "some-name", "reconcileID": "8bedbcf6-1fb5-4b34-8e6c-897c2ec179ae", "user": "root"} 2025-11-28T12:36:09.874Z INFO User password updated {"controller": "pxc-controller", "controllerGroup": "pxc.percona.com", "controllerKind": "PerconaXtraDBCluster", "PerconaXtraDBCluster": {"name":"some-name","namespace":"users-12977"}, "namespace": "users-12977", "name": "some-name", "reconcileID": "8bedbcf6-1fb5-4b34-8e6c-897c2ec179ae", "user": "root"} 2025-11-28T12:36:09.908Z INFO MySQL init secret created {"controller": "pxc-controller", "controllerGroup": "pxc.percona.com", "controllerKind": "PerconaXtraDBCluster", "PerconaXtraDBCluster": {"name":"some-name","namespace":"users-12977"}, "namespace": "users-12977", "name": "some-name", "reconcileID": "8bedbcf6-1fb5-4b34-8e6c-897c2ec179ae", "secret": "some-name-mysql-init", "user": "root"} 2025-11-28T12:36:12.153Z DEBUG PXC users synced with ProxySQL {"controller": "pxc-controller", "controllerGroup": "pxc.percona.com", "controllerKind": "PerconaXtraDBCluster", "PerconaXtraDBCluster": {"name":"some-name","namespace":"users-12977"}, "namespace": "users-12977", "name": "some-name", "reconcileID": "8bedbcf6-1fb5-4b34-8e6c-897c2ec179ae"} 2025-11-28T12:36:12.179Z INFO Internal secrets updated {"controller": "pxc-controller", "controllerGroup": "pxc.percona.com", "controllerKind": "PerconaXtraDBCluster", "PerconaXtraDBCluster": {"name":"some-name","namespace":"users-12977"}, "namespace": "users-12977", "name": "some-name", "reconcileID": "8bedbcf6-1fb5-4b34-8e6c-897c2ec179ae", "user": "root"} 2025-11-28T12:36:13.888Z DEBUG PXC users synced with ProxySQL {"controller": "pxc-controller", "controllerGroup": "pxc.percona.com", "controllerKind": "PerconaXtraDBCluster", "PerconaXtraDBCluster": {"name":"some-name","namespace":"users-12977"}, "namespace": "users-12977", "name": "some-name", "reconcileID": "8bedbcf6-1fb5-4b34-8e6c-897c2ec179ae"} 2025-11-28T12:36:19.040Z DEBUG PXC users synced with ProxySQL {"controller": "pxc-controller", "controllerGroup": "pxc.percona.com", "controllerKind": "PerconaXtraDBCluster", "PerconaXtraDBCluster": {"name":"some-name","namespace":"users-12977"}, "namespace": "users-12977", "name": "some-name", "reconcileID": "77025e6b-a105-47ff-8992-8dea829ecb4d"} 2025-11-28T12:36:24.586Z DEBUG PXC users synced with ProxySQL {"controller": "pxc-controller", "controllerGroup": "pxc.percona.com", "controllerKind": "PerconaXtraDBCluster", "PerconaXtraDBCluster": {"name":"some-name","namespace":"users-12977"}, "namespace": "users-12977", "name": "some-name", "reconcileID": "9e5a4151-10ff-4032-b792-60086c8d37ef"} 2025-11-28T12:36:29.534Z DEBUG PXC users synced with ProxySQL {"controller": "pxc-controller", "controllerGroup": "pxc.percona.com", "controllerKind": "PerconaXtraDBCluster", "PerconaXtraDBCluster": {"name":"some-name","namespace":"users-12977"}, "namespace": "users-12977", "name": "some-name", "reconcileID": "6d31362b-4c0b-4a25-bb6b-aea98a17be66"} 2025-11-28T12:36:30.728Z DEBUG Updating object {"controller": "pxc-controller", "controllerGroup": "pxc.percona.com", "controllerKind": "PerconaXtraDBCluster", "PerconaXtraDBCluster": {"name":"some-name","namespace":"users-12977"}, "namespace": "users-12977", "name": "some-name", "reconcileID": "95043cd8-de7b-4ae4-8fc4-60ec8e3a059d", "object": "some-name-proxysql", "kind": "&TypeMeta{Kind:StatefulSet,APIVersion:apps/v1,}", "hashChanged": true, "metaChanged": true} 2025-11-28T12:36:30.852Z DEBUG Updating object {"controller": "pxc-controller", "controllerGroup": "pxc.percona.com", "controllerKind": "PerconaXtraDBCluster", "PerconaXtraDBCluster": {"name":"some-name","namespace":"users-12977"}, "namespace": "users-12977", "name": "some-name", "reconcileID": "95043cd8-de7b-4ae4-8fc4-60ec8e3a059d", "object": "some-name-proxysql", "kind": "&TypeMeta{Kind:StatefulSet,APIVersion:apps/v1,}", "hashChanged": true, "metaChanged": true} 2025-11-28T12:36:33.007Z ERROR sync users {"controller": "pxc-controller", "controllerGroup": "pxc.percona.com", "controllerKind": "PerconaXtraDBCluster", "PerconaXtraDBCluster": {"name":"some-name","namespace":"users-12977"}, "namespace": "users-12977", "name": "some-name", "reconcileID": "95043cd8-de7b-4ae4-8fc4-60ec8e3a059d", "error": "exec syncusers: failed to execute command in pod: pod some-name-proxysql-2 does not have a host assigned / / ", "errorVerbose": "exec syncusers: failed to execute command in pod: pod some-name-proxysql-2 does not have a host assigned / / \ngithub.com/percona/percona-xtradb-cluster-operator/pkg/controller/pxc.(*ReconcilePerconaXtraDBCluster).syncPXCUsersWithProxySQL\n\t/go/src/github.com/percona/percona-xtradb-cluster-operator/pkg/controller/pxc/users.go:975\ngithub.com/percona/percona-xtradb-cluster-operator/pkg/controller/pxc.(*ReconcilePerconaXtraDBCluster).resyncPXCUsersWithProxySQL.func1\n\t/go/src/github.com/percona/percona-xtradb-cluster-operator/pkg/controller/pxc/controller.go:854\nruntime.goexit\n\t/usr/local/go/src/runtime/asm_amd64.s:1693"} 2025-11-28T12:36:59.815Z ERROR sync users {"controller": "pxc-controller", "controllerGroup": "pxc.percona.com", "controllerKind": "PerconaXtraDBCluster", "PerconaXtraDBCluster": {"name":"some-name","namespace":"users-12977"}, "namespace": "users-12977", "name": "some-name", "reconcileID": "6869874b-9fec-439d-b4a5-69f13c779fe2", "error": "exec syncusers: failed to execute command in pod: command terminated with exit code 1 / / ERROR (line:515) : The cluster (with writer hostgroup:11) has not been configured in ProxySQL\nERROR (line:515) : The cluster (with writer hostgroup:11) has not been configured in ProxySQL\nERROR (line:515) : The cluster (with writer hostgroup:11) has not been configured in ProxySQL\nERROR (line:515) : The cluster (with writer hostgroup:11) has not been configured in ProxySQL\nERROR (line:515) : The cluster (with writer hostgroup:11) has not been configured in ProxySQL\n", "errorVerbose": "exec syncusers: failed to execute command in pod: command terminated with exit code 1 / / ERROR (line:515) : The cluster (with writer hostgroup:11) has not been configured in ProxySQL\nERROR (line:515) : The cluster (with writer hostgroup:11) has not been configured in ProxySQL\nERROR (line:515) : The cluster (with writer hostgroup:11) has not been configured in ProxySQL\nERROR (line:515) : The cluster (with writer hostgroup:11) has not been configured in ProxySQL\nERROR (line:515) : The cluster (with writer hostgroup:11) has not been configured in ProxySQL\n\ngithub.com/percona/percona-xtradb-cluster-operator/pkg/controller/pxc.(*ReconcilePerconaXtraDBCluster).syncPXCUsersWithProxySQL\n\t/go/src/github.com/percona/percona-xtradb-cluster-operator/pkg/controller/pxc/users.go:975\ngithub.com/percona/percona-xtradb-cluster-operator/pkg/controller/pxc.(*ReconcilePerconaXtraDBCluster).resyncPXCUsersWithProxySQL.func1\n\t/go/src/github.com/percona/percona-xtradb-cluster-operator/pkg/controller/pxc/controller.go:854\nruntime.goexit\n\t/usr/local/go/src/runtime/asm_amd64.s:1693"} 2025-11-28T12:37:01.215Z INFO Password changed, updating user {"controller": "pxc-controller", "controllerGroup": "pxc.percona.com", "controllerKind": "PerconaXtraDBCluster", "PerconaXtraDBCluster": {"name":"some-name","namespace":"users-12977"}, "namespace": "users-12977", "name": "some-name", "reconcileID": "dcb53fe7-9208-4b1d-a3bc-a42dd374e656", "user": "proxyadmin"} 2025-11-28T12:37:01.215Z INFO Password expiration policy updated {"controller": "pxc-controller", "controllerGroup": "pxc.percona.com", "controllerKind": "PerconaXtraDBCluster", "PerconaXtraDBCluster": {"name":"some-name","namespace":"users-12977"}, "namespace": "users-12977", "name": "some-name", "reconcileID": "dcb53fe7-9208-4b1d-a3bc-a42dd374e656", "user": "proxyadmin"} 2025-11-28T12:37:01.246Z INFO Proxy user updated {"controller": "pxc-controller", "controllerGroup": "pxc.percona.com", "controllerKind": "PerconaXtraDBCluster", "PerconaXtraDBCluster": {"name":"some-name","namespace":"users-12977"}, "namespace": "users-12977", "name": "some-name", "reconcileID": "dcb53fe7-9208-4b1d-a3bc-a42dd374e656", "user": "proxyadmin"} 2025-11-28T12:37:01.270Z INFO Internal secrets updated {"controller": "pxc-controller", "controllerGroup": "pxc.percona.com", "controllerKind": "PerconaXtraDBCluster", "PerconaXtraDBCluster": {"name":"some-name","namespace":"users-12977"}, "namespace": "users-12977", "name": "some-name", "reconcileID": "dcb53fe7-9208-4b1d-a3bc-a42dd374e656", "user": "proxyadmin"} 2025-11-28T12:37:01.270Z INFO Proxy pods will be restarted {"controller": "pxc-controller", "controllerGroup": "pxc.percona.com", "controllerKind": "PerconaXtraDBCluster", "PerconaXtraDBCluster": {"name":"some-name","namespace":"users-12977"}, "namespace": "users-12977", "name": "some-name", "reconcileID": "dcb53fe7-9208-4b1d-a3bc-a42dd374e656", "last-applied-secret": "09c805506dbfbb3fabd9ac2cd40c911c77d29097bd481e0b9f9bc5a827ea98d4"} 2025-11-28T12:37:01.278Z DEBUG Updating object {"controller": "pxc-controller", "controllerGroup": "pxc.percona.com", "controllerKind": "PerconaXtraDBCluster", "PerconaXtraDBCluster": {"name":"some-name","namespace":"users-12977"}, "namespace": "users-12977", "name": "some-name", "reconcileID": "dcb53fe7-9208-4b1d-a3bc-a42dd374e656", "object": "some-name-proxysql", "kind": "&TypeMeta{Kind:StatefulSet,APIVersion:apps/v1,}", "hashChanged": true, "metaChanged": true} 2025-11-28T12:37:01.345Z INFO reconcile replication error {"controller": "pxc-controller", "controllerGroup": "pxc.percona.com", "controllerKind": "PerconaXtraDBCluster", "PerconaXtraDBCluster": {"name":"some-name","namespace":"users-12977"}, "namespace": "users-12977", "name": "some-name", "reconcileID": "dcb53fe7-9208-4b1d-a3bc-a42dd374e656", "err": "get primary pxc pod: not found"} 2025-11-28T12:37:02.834Z ERROR sync users {"controller": "pxc-controller", "controllerGroup": "pxc.percona.com", "controllerKind": "PerconaXtraDBCluster", "PerconaXtraDBCluster": {"name":"some-name","namespace":"users-12977"}, "namespace": "users-12977", "name": "some-name", "reconcileID": "8ef3cabf-d4de-4741-9e93-61eae23e04bc", "error": "exec syncusers: failed to execute command in pod: command terminated with exit code 1 / / ERROR 1045 (28000): ProxySQL Error: Access denied for user 'proxyadmin'@'127.0.0.1' (using password: YES)\nERROR (line:339) : ProxySQL connection check failed. \n-- Could not connect to ProxySQL at localhost:6032 \n-- Please check the ProxySQL connection parameters and status.\nERROR 1045 (28000): ProxySQL Error: Access denied for user 'proxyadmin'@'127.0.0.1' (using password: YES)\nERROR (line:339) : ProxySQL connection check failed. \n-- Could not connect to ProxySQL at localhost:6032 \n-- Please check the ProxySQL connection parameters and status.\nERROR 1045 (28000): ProxySQL Error: Access denied for user 'proxyadmin'@'127.0.0.1' (using password: YES)\nERROR (line:339) : ProxySQL connection check failed. \n-- Could not connect to ProxySQL at localhost:6032 \n-- Please check the ProxySQL connection parameters and status.\nERROR 1045 (28000): ProxySQL Error: Access denied for user 'proxyadmin'@'127.0.0.1' (using password: YES)\nERROR (line:339) : ProxySQL connection check failed. \n-- Could not connect to ProxySQL at localhost:6032 \n-- Please check the ProxySQL connection parameters and status.\nERROR 1045 (28000): ProxySQL Error: Access denied for user 'proxyadmin'@'127.0.0.1' (using password: YES)\nERROR (line:339) : ProxySQL connection check failed. \n-- Could not connect to ProxySQL at localhost:6032 \n-- Please check the ProxySQL connection parameters and status.\n", "errorVerbose": "exec syncusers: failed to execute command in pod: command terminated with exit code 1 / / ERROR 1045 (28000): ProxySQL Error: Access denied for user 'proxyadmin'@'127.0.0.1' (using password: YES)\nERROR (line:339) : ProxySQL connection check failed. \n-- Could not connect to ProxySQL at localhost:6032 \n-- Please check the ProxySQL connection parameters and status.\nERROR 1045 (28000): ProxySQL Error: Access denied for user 'proxyadmin'@'127.0.0.1' (using password: YES)\nERROR (line:339) : ProxySQL connection check failed. \n-- Could not connect to ProxySQL at localhost:6032 \n-- Please check the ProxySQL connection parameters and status.\nERROR 1045 (28000): ProxySQL Error: Access denied for user 'proxyadmin'@'127.0.0.1' (using password: YES)\nERROR (line:339) : ProxySQL connection check failed. \n-- Could not connect to ProxySQL at localhost:6032 \n-- Please check the ProxySQL connection parameters and status.\nERROR 1045 (28000): ProxySQL Error: Access denied for user 'proxyadmin'@'127.0.0.1' (using password: YES)\nERROR (line:339) : ProxySQL connection check failed. \n-- Could not connect to ProxySQL at localhost:6032 \n-- Please check the ProxySQL connection parameters and status.\nERROR 1045 (28000): ProxySQL Error: Access denied for user 'proxyadmin'@'127.0.0.1' (using password: YES)\nERROR (line:339) : ProxySQL connection check failed. \n-- Could not connect to ProxySQL at localhost:6032 \n-- Please check the ProxySQL connection parameters and status.\n\ngithub.com/percona/percona-xtradb-cluster-operator/pkg/controller/pxc.(*ReconcilePerconaXtraDBCluster).syncPXCUsersWithProxySQL\n\t/go/src/github.com/percona/percona-xtradb-cluster-operator/pkg/controller/pxc/users.go:975\ngithub.com/percona/percona-xtradb-cluster-operator/pkg/controller/pxc.(*ReconcilePerconaXtraDBCluster).resyncPXCUsersWithProxySQL.func1\n\t/go/src/github.com/percona/percona-xtradb-cluster-operator/pkg/controller/pxc/controller.go:854\nruntime.goexit\n\t/usr/local/go/src/runtime/asm_amd64.s:1693"} 2025-11-28T12:37:36.561Z INFO reconcile replication error {"controller": "pxc-controller", "controllerGroup": "pxc.percona.com", "controllerKind": "PerconaXtraDBCluster", "PerconaXtraDBCluster": {"name":"some-name","namespace":"users-12977"}, "namespace": "users-12977", "name": "some-name", "reconcileID": "c1cb6eb3-2a33-4f2e-bb38-e3a20a1f2cb8", "err": "get primary pxc pod: not found"} 2025-11-28T12:37:50.271Z DEBUG PXC users synced with ProxySQL {"controller": "pxc-controller", "controllerGroup": "pxc.percona.com", "controllerKind": "PerconaXtraDBCluster", "PerconaXtraDBCluster": {"name":"some-name","namespace":"users-12977"}, "namespace": "users-12977", "name": "some-name", "reconcileID": "b92103e8-ce3d-4276-a32c-95bcd0e679d5"} 2025-11-28T12:37:55.463Z DEBUG PXC users synced with ProxySQL {"controller": "pxc-controller", "controllerGroup": "pxc.percona.com", "controllerKind": "PerconaXtraDBCluster", "PerconaXtraDBCluster": {"name":"some-name","namespace":"users-12977"}, "namespace": "users-12977", "name": "some-name", "reconcileID": "ffb6e2ea-8181-41cc-b2ef-229dd0af3c54"} 2025-11-28T12:37:55.823Z DEBUG Updating object {"controller": "pxc-controller", "controllerGroup": "pxc.percona.com", "controllerKind": "PerconaXtraDBCluster", "PerconaXtraDBCluster": {"name":"some-name","namespace":"users-12977"}, "namespace": "users-12977", "name": "some-name", "reconcileID": "6652c137-60c1-4558-afad-5281dc7608fa", "object": "some-name-proxysql", "kind": "&TypeMeta{Kind:StatefulSet,APIVersion:apps/v1,}", "hashChanged": true, "metaChanged": true} 2025-11-28T12:37:55.875Z DEBUG Updating object {"controller": "pxc-controller", "controllerGroup": "pxc.percona.com", "controllerKind": "PerconaXtraDBCluster", "PerconaXtraDBCluster": {"name":"some-name","namespace":"users-12977"}, "namespace": "users-12977", "name": "some-name", "reconcileID": "6652c137-60c1-4558-afad-5281dc7608fa", "object": "some-name-proxysql", "kind": "&TypeMeta{Kind:StatefulSet,APIVersion:apps/v1,}", "hashChanged": true, "metaChanged": true} 2025-11-28T12:37:57.642Z DEBUG PXC users synced with ProxySQL {"controller": "pxc-controller", "controllerGroup": "pxc.percona.com", "controllerKind": "PerconaXtraDBCluster", "PerconaXtraDBCluster": {"name":"some-name","namespace":"users-12977"}, "namespace": "users-12977", "name": "some-name", "reconcileID": "6652c137-60c1-4558-afad-5281dc7608fa"} 2025-11-28T12:37:57.741Z INFO Password changed, updating user {"controller": "pxc-controller", "controllerGroup": "pxc.percona.com", "controllerKind": "PerconaXtraDBCluster", "PerconaXtraDBCluster": {"name":"some-name","namespace":"users-12977"}, "namespace": "users-12977", "name": "some-name", "reconcileID": "8c577551-77ba-4779-8d19-0c4e04fb3acf", "user": "xtrabackup"} 2025-11-28T12:37:57.753Z INFO User password updated {"controller": "pxc-controller", "controllerGroup": "pxc.percona.com", "controllerKind": "PerconaXtraDBCluster", "PerconaXtraDBCluster": {"name":"some-name","namespace":"users-12977"}, "namespace": "users-12977", "name": "some-name", "reconcileID": "8c577551-77ba-4779-8d19-0c4e04fb3acf", "user": "xtrabackup"} 2025-11-28T12:37:57.775Z INFO MySQL init secret updated {"controller": "pxc-controller", "controllerGroup": "pxc.percona.com", "controllerKind": "PerconaXtraDBCluster", "PerconaXtraDBCluster": {"name":"some-name","namespace":"users-12977"}, "namespace": "users-12977", "name": "some-name", "reconcileID": "8c577551-77ba-4779-8d19-0c4e04fb3acf", "secret": "some-name-mysql-init", "user": "xtrabackup"} 2025-11-28T12:37:57.794Z INFO Internal secrets updated {"controller": "pxc-controller", "controllerGroup": "pxc.percona.com", "controllerKind": "PerconaXtraDBCluster", "PerconaXtraDBCluster": {"name":"some-name","namespace":"users-12977"}, "namespace": "users-12977", "name": "some-name", "reconcileID": "8c577551-77ba-4779-8d19-0c4e04fb3acf", "user": "xtrabackup"} 2025-11-28T12:37:57.794Z INFO PXC pods will be restarted {"controller": "pxc-controller", "controllerGroup": "pxc.percona.com", "controllerKind": "PerconaXtraDBCluster", "PerconaXtraDBCluster": {"name":"some-name","namespace":"users-12977"}, "namespace": "users-12977", "name": "some-name", "reconcileID": "8c577551-77ba-4779-8d19-0c4e04fb3acf", "last-applied-secret": "06eff63ccef7b61206dbbd575d7c0d3c0d22a80c11b22ae596bfe270ddda2b4c"} 2025-11-28T12:37:57.797Z DEBUG Updating object {"controller": "pxc-controller", "controllerGroup": "pxc.percona.com", "controllerKind": "PerconaXtraDBCluster", "PerconaXtraDBCluster": {"name":"some-name","namespace":"users-12977"}, "namespace": "users-12977", "name": "some-name", "reconcileID": "8c577551-77ba-4779-8d19-0c4e04fb3acf", "object": "some-name-pxc", "kind": "&TypeMeta{Kind:StatefulSet,APIVersion:apps/v1,}", "hashChanged": true, "metaChanged": true} 2025-11-28T12:37:59.755Z DEBUG PXC users synced with ProxySQL {"controller": "pxc-controller", "controllerGroup": "pxc.percona.com", "controllerKind": "PerconaXtraDBCluster", "PerconaXtraDBCluster": {"name":"some-name","namespace":"users-12977"}, "namespace": "users-12977", "name": "some-name", "reconcileID": "8c577551-77ba-4779-8d19-0c4e04fb3acf"} 2025-11-28T12:39:50.302Z INFO Unable to find primary pod for replication. No pod with name or ip like this {"controller": "pxc-controller", "controllerGroup": "pxc.percona.com", "controllerKind": "PerconaXtraDBCluster", "PerconaXtraDBCluster": {"name":"some-name","namespace":"users-12977"}, "namespace": "users-12977", "name": "some-name", "reconcileID": "49eb4445-a392-4408-8613-72edf7994c2b", "primary name": "some-name-pxc-0.some-name-pxc.users-12977.svc.cluster.local"} 2025-11-28T12:39:55.503Z INFO Unable to find primary pod for replication. No pod with name or ip like this {"controller": "pxc-controller", "controllerGroup": "pxc.percona.com", "controllerKind": "PerconaXtraDBCluster", "PerconaXtraDBCluster": {"name":"some-name","namespace":"users-12977"}, "namespace": "users-12977", "name": "some-name", "reconcileID": "51465323-d2f8-4611-b7e9-44c87128a717", "primary name": "some-name-pxc-0.some-name-pxc.users-12977.svc.cluster.local"} 2025-11-28T12:40:00.688Z INFO Unable to find primary pod for replication. No pod with name or ip like this {"controller": "pxc-controller", "controllerGroup": "pxc.percona.com", "controllerKind": "PerconaXtraDBCluster", "PerconaXtraDBCluster": {"name":"some-name","namespace":"users-12977"}, "namespace": "users-12977", "name": "some-name", "reconcileID": "9d47de3e-f662-401e-8620-d20d7b805c05", "primary name": "some-name-pxc-0.some-name-pxc.users-12977.svc.cluster.local"} 2025-11-28T12:40:05.806Z INFO Unable to find primary pod for replication. No pod with name or ip like this {"controller": "pxc-controller", "controllerGroup": "pxc.percona.com", "controllerKind": "PerconaXtraDBCluster", "PerconaXtraDBCluster": {"name":"some-name","namespace":"users-12977"}, "namespace": "users-12977", "name": "some-name", "reconcileID": "fd63173b-0f01-42f0-aaa0-c9ad0dcee516", "primary name": "some-name-pxc-0.some-name-pxc.users-12977.svc.cluster.local"} 2025-11-28T12:40:10.935Z INFO Unable to find primary pod for replication. No pod with name or ip like this {"controller": "pxc-controller", "controllerGroup": "pxc.percona.com", "controllerKind": "PerconaXtraDBCluster", "PerconaXtraDBCluster": {"name":"some-name","namespace":"users-12977"}, "namespace": "users-12977", "name": "some-name", "reconcileID": "384adde1-653d-4410-a1ef-ba5cfb2bd367", "primary name": "some-name-pxc-0.some-name-pxc.users-12977.svc.cluster.local"} 2025-11-28T12:40:16.064Z INFO Unable to find primary pod for replication. No pod with name or ip like this {"controller": "pxc-controller", "controllerGroup": "pxc.percona.com", "controllerKind": "PerconaXtraDBCluster", "PerconaXtraDBCluster": {"name":"some-name","namespace":"users-12977"}, "namespace": "users-12977", "name": "some-name", "reconcileID": "4b72a10c-bd64-49c8-b600-4868ce1e1e0e", "primary name": "some-name-pxc-0.some-name-pxc.users-12977.svc.cluster.local"} 2025-11-28T12:40:21.192Z INFO Unable to find primary pod for replication. No pod with name or ip like this {"controller": "pxc-controller", "controllerGroup": "pxc.percona.com", "controllerKind": "PerconaXtraDBCluster", "PerconaXtraDBCluster": {"name":"some-name","namespace":"users-12977"}, "namespace": "users-12977", "name": "some-name", "reconcileID": "9168933c-3496-4ac3-aecd-e2ee4ea5c9b8", "primary name": "some-name-pxc-0.some-name-pxc.users-12977.svc.cluster.local"} 2025-11-28T12:40:26.318Z INFO Unable to find primary pod for replication. No pod with name or ip like this {"controller": "pxc-controller", "controllerGroup": "pxc.percona.com", "controllerKind": "PerconaXtraDBCluster", "PerconaXtraDBCluster": {"name":"some-name","namespace":"users-12977"}, "namespace": "users-12977", "name": "some-name", "reconcileID": "f803cd24-d6d1-4ea8-aca3-18f15746205d", "primary name": "some-name-pxc-0.some-name-pxc.users-12977.svc.cluster.local"} 2025-11-28T12:40:33.862Z DEBUG PXC users synced with ProxySQL {"controller": "pxc-controller", "controllerGroup": "pxc.percona.com", "controllerKind": "PerconaXtraDBCluster", "PerconaXtraDBCluster": {"name":"some-name","namespace":"users-12977"}, "namespace": "users-12977", "name": "some-name", "reconcileID": "159da388-6f7a-42ea-9d83-7a6588c6248e"} 2025-11-28T12:40:38.996Z DEBUG PXC users synced with ProxySQL {"controller": "pxc-controller", "controllerGroup": "pxc.percona.com", "controllerKind": "PerconaXtraDBCluster", "PerconaXtraDBCluster": {"name":"some-name","namespace":"users-12977"}, "namespace": "users-12977", "name": "some-name", "reconcileID": "17c0f15e-d30b-440e-be82-5909aee0f2ed"} 2025-11-28T12:40:39.799Z INFO Password changed, updating user {"controller": "pxc-controller", "controllerGroup": "pxc.percona.com", "controllerKind": "PerconaXtraDBCluster", "PerconaXtraDBCluster": {"name":"some-name","namespace":"users-12977"}, "namespace": "users-12977", "name": "some-name", "reconcileID": "69427938-b3e2-494a-8462-b2f12af0df2e", "user": "monitor"} 2025-11-28T12:40:39.810Z INFO User password updated {"controller": "pxc-controller", "controllerGroup": "pxc.percona.com", "controllerKind": "PerconaXtraDBCluster", "PerconaXtraDBCluster": {"name":"some-name","namespace":"users-12977"}, "namespace": "users-12977", "name": "some-name", "reconcileID": "69427938-b3e2-494a-8462-b2f12af0df2e", "user": "monitor"} 2025-11-28T12:40:39.866Z INFO MySQL init secret updated {"controller": "pxc-controller", "controllerGroup": "pxc.percona.com", "controllerKind": "PerconaXtraDBCluster", "PerconaXtraDBCluster": {"name":"some-name","namespace":"users-12977"}, "namespace": "users-12977", "name": "some-name", "reconcileID": "69427938-b3e2-494a-8462-b2f12af0df2e", "secret": "some-name-mysql-init", "user": "monitor"} 2025-11-28T12:40:39.884Z INFO Proxy user updated {"controller": "pxc-controller", "controllerGroup": "pxc.percona.com", "controllerKind": "PerconaXtraDBCluster", "PerconaXtraDBCluster": {"name":"some-name","namespace":"users-12977"}, "namespace": "users-12977", "name": "some-name", "reconcileID": "69427938-b3e2-494a-8462-b2f12af0df2e", "user": "monitor"} 2025-11-28T12:40:39.951Z INFO Internal secrets updated {"controller": "pxc-controller", "controllerGroup": "pxc.percona.com", "controllerKind": "PerconaXtraDBCluster", "PerconaXtraDBCluster": {"name":"some-name","namespace":"users-12977"}, "namespace": "users-12977", "name": "some-name", "reconcileID": "69427938-b3e2-494a-8462-b2f12af0df2e", "user": "monitor"} 2025-11-28T12:40:39.951Z INFO Proxy pods will be restarted {"controller": "pxc-controller", "controllerGroup": "pxc.percona.com", "controllerKind": "PerconaXtraDBCluster", "PerconaXtraDBCluster": {"name":"some-name","namespace":"users-12977"}, "namespace": "users-12977", "name": "some-name", "reconcileID": "69427938-b3e2-494a-8462-b2f12af0df2e", "last-applied-secret": "2eae38749965876d6b3e029348d7ef11fea2daf3366b1669aba2dfd4af85086d"} 2025-11-28T12:40:39.959Z DEBUG Updating object {"controller": "pxc-controller", "controllerGroup": "pxc.percona.com", "controllerKind": "PerconaXtraDBCluster", "PerconaXtraDBCluster": {"name":"some-name","namespace":"users-12977"}, "namespace": "users-12977", "name": "some-name", "reconcileID": "69427938-b3e2-494a-8462-b2f12af0df2e", "object": "some-name-proxysql", "kind": "&TypeMeta{Kind:StatefulSet,APIVersion:apps/v1,}", "hashChanged": true, "metaChanged": true} 2025-11-28T12:40:42.494Z ERROR sync users {"controller": "pxc-controller", "controllerGroup": "pxc.percona.com", "controllerKind": "PerconaXtraDBCluster", "PerconaXtraDBCluster": {"name":"some-name","namespace":"users-12977"}, "namespace": "users-12977", "name": "some-name", "reconcileID": "69427938-b3e2-494a-8462-b2f12af0df2e", "error": "exec syncusers: failed to execute command in pod: unable to upgrade connection: container not found (\"proxysql\") / / ", "errorVerbose": "exec syncusers: failed to execute command in pod: unable to upgrade connection: container not found (\"proxysql\") / / \ngithub.com/percona/percona-xtradb-cluster-operator/pkg/controller/pxc.(*ReconcilePerconaXtraDBCluster).syncPXCUsersWithProxySQL\n\t/go/src/github.com/percona/percona-xtradb-cluster-operator/pkg/controller/pxc/users.go:975\ngithub.com/percona/percona-xtradb-cluster-operator/pkg/controller/pxc.(*ReconcilePerconaXtraDBCluster).resyncPXCUsersWithProxySQL.func1\n\t/go/src/github.com/percona/percona-xtradb-cluster-operator/pkg/controller/pxc/controller.go:854\nruntime.goexit\n\t/usr/local/go/src/runtime/asm_amd64.s:1693"} 2025-11-28T12:41:37.523Z DEBUG PXC users synced with ProxySQL {"controller": "pxc-controller", "controllerGroup": "pxc.percona.com", "controllerKind": "PerconaXtraDBCluster", "PerconaXtraDBCluster": {"name":"some-name","namespace":"users-12977"}, "namespace": "users-12977", "name": "some-name", "reconcileID": "6b07afb6-e197-4f66-a5e9-13481fcf4ace"} 2025-11-28T12:41:52.627Z INFO Password changed, updating user {"controller": "pxc-controller", "controllerGroup": "pxc.percona.com", "controllerKind": "PerconaXtraDBCluster", "PerconaXtraDBCluster": {"name":"some-name","namespace":"users-12977"}, "namespace": "users-12977", "name": "some-name", "reconcileID": "aeeaf7d3-6396-4612-8dd1-b6c60fff902f", "user": "operator"} 2025-11-28T12:41:52.639Z INFO User password updated {"controller": "pxc-controller", "controllerGroup": "pxc.percona.com", "controllerKind": "PerconaXtraDBCluster", "PerconaXtraDBCluster": {"name":"some-name","namespace":"users-12977"}, "namespace": "users-12977", "name": "some-name", "reconcileID": "aeeaf7d3-6396-4612-8dd1-b6c60fff902f", "user": "operator"} 2025-11-28T12:41:52.665Z INFO MySQL init secret updated {"controller": "pxc-controller", "controllerGroup": "pxc.percona.com", "controllerKind": "PerconaXtraDBCluster", "PerconaXtraDBCluster": {"name":"some-name","namespace":"users-12977"}, "namespace": "users-12977", "name": "some-name", "reconcileID": "aeeaf7d3-6396-4612-8dd1-b6c60fff902f", "secret": "some-name-mysql-init", "user": "operator"} 2025-11-28T12:41:52.713Z INFO Internal secrets updated {"controller": "pxc-controller", "controllerGroup": "pxc.percona.com", "controllerKind": "PerconaXtraDBCluster", "PerconaXtraDBCluster": {"name":"some-name","namespace":"users-12977"}, "namespace": "users-12977", "name": "some-name", "reconcileID": "aeeaf7d3-6396-4612-8dd1-b6c60fff902f", "user": "operator"} 2025-11-28T12:41:52.713Z INFO Proxy pods will be restarted {"controller": "pxc-controller", "controllerGroup": "pxc.percona.com", "controllerKind": "PerconaXtraDBCluster", "PerconaXtraDBCluster": {"name":"some-name","namespace":"users-12977"}, "namespace": "users-12977", "name": "some-name", "reconcileID": "aeeaf7d3-6396-4612-8dd1-b6c60fff902f", "last-applied-secret": "fd0a8b253f16d5c2198d0017a16e09901c51a6f2e59a85fab7bb843006274e05"} 2025-11-28T12:41:52.719Z DEBUG Updating object {"controller": "pxc-controller", "controllerGroup": "pxc.percona.com", "controllerKind": "PerconaXtraDBCluster", "PerconaXtraDBCluster": {"name":"some-name","namespace":"users-12977"}, "namespace": "users-12977", "name": "some-name", "reconcileID": "aeeaf7d3-6396-4612-8dd1-b6c60fff902f", "object": "some-name-proxysql", "kind": "&TypeMeta{Kind:StatefulSet,APIVersion:apps/v1,}", "hashChanged": true, "metaChanged": true} 2025-11-28T12:41:56.008Z ERROR sync users {"controller": "pxc-controller", "controllerGroup": "pxc.percona.com", "controllerKind": "PerconaXtraDBCluster", "PerconaXtraDBCluster": {"name":"some-name","namespace":"users-12977"}, "namespace": "users-12977", "name": "some-name", "reconcileID": "5444d0a4-2052-4a4f-89fa-d9021d4e1a89", "error": "exec syncusers: failed to execute command in pod: command terminated with exit code 1 / \nSyncing user accounts from PXC(some-name-pxc-0.some-name-pxc.users-12977.svc.cluster.local:3306) to ProxySQL\n / ERROR 1045 (28000): Access denied for user 'operator'@'some-name-proxysql-0.some-name-proxysql-unready.users-12977.svc.' (using password: YES)\nERROR (line:587) : Could not connect to the server. \n-- Please check the server connection parameters and status.\nERROR 1045 (28000): Access denied for user 'operator'@'some-name-proxysql-0.some-name-proxysql-unready.users-12977.svc.' (using password: YES)\nERROR 1045 (28000): Access denied for user 'operator'@'some-name-proxysql-0.some-name-proxysql-unready.users-12977.svc.' (using password: YES)\nERROR 1045 (28000): Access denied for user 'operator'@'some-name-proxysql-0.some-name-proxysql-unready.users-12977.svc.' (using password: YES)\nERROR (line:572) : Could not find a primary cluster node\nERROR 1045 (28000): Access denied for user 'operator'@'some-name-proxysql-0.some-name-proxysql-unready.users-12977.svc.' (using password: YES)\nERROR 1045 (28000): Access denied for user 'operator'@'some-name-proxysql-0.some-name-proxysql-unready.users-12977.svc.' (using password: YES)\nERROR 1045 (28000): Access denied for user 'operator'@'some-name-proxysql-0.some-name-proxysql-unready.users-12977.svc.' (using password: YES)\nERROR (line:572) : Could not find a primary cluster node\nERROR 1045 (28000): Access denied for user 'operator'@'some-name-proxysql-0.some-name-proxysql-unready.users-12977.svc.' (using password: YES)\nERROR 1045 (28000): Access denied for user 'operator'@'some-name-proxysql-0.some-name-proxysql-unready.users-12977.svc.' (using password: YES)\nERROR 1045 (28000): Access denied for user 'operator'@'some-name-proxysql-0.some-name-proxysql-unready.users-12977.svc.' (using password: YES)\nERROR (line:572) : Could not find a primary cluster node\nERROR 1045 (28000): Access denied for user 'operator'@'some-name-proxysql-0.some-name-proxysql-unready.users-12977.svc.' (using password: YES)\nERROR 1045 (28000): Access denied for user 'operator'@'some-name-proxysql-0.some-name-proxysql-unready.users-12977.svc.' (using password: YES)\nERROR 1045 (28000): Access denied for user 'operator'@'some-name-proxysql-0.some-name-proxysql-unready.users-12977.svc.' (using password: YES)\nERROR (line:572) : Could not find a primary cluster node\n", "errorVerbose": "exec syncusers: failed to execute command in pod: command terminated with exit code 1 / \nSyncing user accounts from PXC(some-name-pxc-0.some-name-pxc.users-12977.svc.cluster.local:3306) to ProxySQL\n / ERROR 1045 (28000): Access denied for user 'operator'@'some-name-proxysql-0.some-name-proxysql-unready.users-12977.svc.' (using password: YES)\nERROR (line:587) : Could not connect to the server. \n-- Please check the server connection parameters and status.\nERROR 1045 (28000): Access denied for user 'operator'@'some-name-proxysql-0.some-name-proxysql-unready.users-12977.svc.' (using password: YES)\nERROR 1045 (28000): Access denied for user 'operator'@'some-name-proxysql-0.some-name-proxysql-unready.users-12977.svc.' (using password: YES)\nERROR 1045 (28000): Access denied for user 'operator'@'some-name-proxysql-0.some-name-proxysql-unready.users-12977.svc.' (using password: YES)\nERROR (line:572) : Could not find a primary cluster node\nERROR 1045 (28000): Access denied for user 'operator'@'some-name-proxysql-0.some-name-proxysql-unready.users-12977.svc.' (using password: YES)\nERROR 1045 (28000): Access denied for user 'operator'@'some-name-proxysql-0.some-name-proxysql-unready.users-12977.svc.' (using password: YES)\nERROR 1045 (28000): Access denied for user 'operator'@'some-name-proxysql-0.some-name-proxysql-unready.users-12977.svc.' (using password: YES)\nERROR (line:572) : Could not find a primary cluster node\nERROR 1045 (28000): Access denied for user 'operator'@'some-name-proxysql-0.some-name-proxysql-unready.users-12977.svc.' (using password: YES)\nERROR 1045 (28000): Access denied for user 'operator'@'some-name-proxysql-0.some-name-proxysql-unready.users-12977.svc.' (using password: YES)\nERROR 1045 (28000): Access denied for user 'operator'@'some-name-proxysql-0.some-name-proxysql-unready.users-12977.svc.' (using password: YES)\nERROR (line:572) : Could not find a primary cluster node\nERROR 1045 (28000): Access denied for user 'operator'@'some-name-proxysql-0.some-name-proxysql-unready.users-12977.svc.' (using password: YES)\nERROR 1045 (28000): Access denied for user 'operator'@'some-name-proxysql-0.some-name-proxysql-unready.users-12977.svc.' (using password: YES)\nERROR 1045 (28000): Access denied for user 'operator'@'some-name-proxysql-0.some-name-proxysql-unready.users-12977.svc.' (using password: YES)\nERROR (line:572) : Could not find a primary cluster node\n\ngithub.com/percona/percona-xtradb-cluster-operator/pkg/controller/pxc.(*ReconcilePerconaXtraDBCluster).syncPXCUsersWithProxySQL\n\t/go/src/github.com/percona/percona-xtradb-cluster-operator/pkg/controller/pxc/users.go:975\ngithub.com/percona/percona-xtradb-cluster-operator/pkg/controller/pxc.(*ReconcilePerconaXtraDBCluster).resyncPXCUsersWithProxySQL.func1\n\t/go/src/github.com/percona/percona-xtradb-cluster-operator/pkg/controller/pxc/controller.go:854\nruntime.goexit\n\t/usr/local/go/src/runtime/asm_amd64.s:1693"} 2025-11-28T12:42:26.049Z DEBUG PXC users synced with ProxySQL {"controller": "pxc-controller", "controllerGroup": "pxc.percona.com", "controllerKind": "PerconaXtraDBCluster", "PerconaXtraDBCluster": {"name":"some-name","namespace":"users-12977"}, "namespace": "users-12977", "name": "some-name", "reconcileID": "f62748e9-6207-4498-a16d-e425aad0fb19"} 2025-11-28T12:42:30.443Z DEBUG PXC users synced with ProxySQL {"controller": "pxc-controller", "controllerGroup": "pxc.percona.com", "controllerKind": "PerconaXtraDBCluster", "PerconaXtraDBCluster": {"name":"some-name","namespace":"users-12977"}, "namespace": "users-12977", "name": "some-name", "reconcileID": "f363b10e-14fb-4648-8ccd-8019d9107cf4"} 2025-11-28T12:42:35.590Z DEBUG PXC users synced with ProxySQL {"controller": "pxc-controller", "controllerGroup": "pxc.percona.com", "controllerKind": "PerconaXtraDBCluster", "PerconaXtraDBCluster": {"name":"some-name","namespace":"users-12977"}, "namespace": "users-12977", "name": "some-name", "reconcileID": "28675360-209b-4986-9018-23a0434396dc"} 2025-11-28T12:42:41.774Z DEBUG PXC users synced with ProxySQL {"controller": "pxc-controller", "controllerGroup": "pxc.percona.com", "controllerKind": "PerconaXtraDBCluster", "PerconaXtraDBCluster": {"name":"some-name","namespace":"users-12977"}, "namespace": "users-12977", "name": "some-name", "reconcileID": "b2b2d599-6493-4224-b12b-3ebf4ac95bcd"} 2025-11-28T12:42:42.305Z INFO Created user secrets {"controller": "pxc-controller", "controllerGroup": "pxc.percona.com", "controllerKind": "PerconaXtraDBCluster", "PerconaXtraDBCluster": {"name":"some-name","namespace":"users-12977"}, "namespace": "users-12977", "name": "some-name", "reconcileID": "81655089-f1c7-4e0c-85d0-49b2a217e444", "secrets": "my-cluster-secrets-2"} 2025-11-28T12:42:42.305Z INFO Password changed, updating user {"controller": "pxc-controller", "controllerGroup": "pxc.percona.com", "controllerKind": "PerconaXtraDBCluster", "PerconaXtraDBCluster": {"name":"some-name","namespace":"users-12977"}, "namespace": "users-12977", "name": "some-name", "reconcileID": "81655089-f1c7-4e0c-85d0-49b2a217e444", "user": "root"} 2025-11-28T12:42:42.319Z INFO User password updated {"controller": "pxc-controller", "controllerGroup": "pxc.percona.com", "controllerKind": "PerconaXtraDBCluster", "PerconaXtraDBCluster": {"name":"some-name","namespace":"users-12977"}, "namespace": "users-12977", "name": "some-name", "reconcileID": "81655089-f1c7-4e0c-85d0-49b2a217e444", "user": "root"} 2025-11-28T12:42:42.338Z INFO MySQL init secret updated {"controller": "pxc-controller", "controllerGroup": "pxc.percona.com", "controllerKind": "PerconaXtraDBCluster", "PerconaXtraDBCluster": {"name":"some-name","namespace":"users-12977"}, "namespace": "users-12977", "name": "some-name", "reconcileID": "81655089-f1c7-4e0c-85d0-49b2a217e444", "secret": "some-name-mysql-init", "user": "root"} 2025-11-28T12:42:44.856Z DEBUG PXC users synced with ProxySQL {"controller": "pxc-controller", "controllerGroup": "pxc.percona.com", "controllerKind": "PerconaXtraDBCluster", "PerconaXtraDBCluster": {"name":"some-name","namespace":"users-12977"}, "namespace": "users-12977", "name": "some-name", "reconcileID": "81655089-f1c7-4e0c-85d0-49b2a217e444"} 2025-11-28T12:42:44.880Z INFO Internal secrets updated {"controller": "pxc-controller", "controllerGroup": "pxc.percona.com", "controllerKind": "PerconaXtraDBCluster", "PerconaXtraDBCluster": {"name":"some-name","namespace":"users-12977"}, "namespace": "users-12977", "name": "some-name", "reconcileID": "81655089-f1c7-4e0c-85d0-49b2a217e444", "user": "root"} 2025-11-28T12:42:44.880Z INFO Password changed, updating user {"controller": "pxc-controller", "controllerGroup": "pxc.percona.com", "controllerKind": "PerconaXtraDBCluster", "PerconaXtraDBCluster": {"name":"some-name","namespace":"users-12977"}, "namespace": "users-12977", "name": "some-name", "reconcileID": "81655089-f1c7-4e0c-85d0-49b2a217e444", "user": "operator"} 2025-11-28T12:42:44.890Z INFO User password updated {"controller": "pxc-controller", "controllerGroup": "pxc.percona.com", "controllerKind": "PerconaXtraDBCluster", "PerconaXtraDBCluster": {"name":"some-name","namespace":"users-12977"}, "namespace": "users-12977", "name": "some-name", "reconcileID": "81655089-f1c7-4e0c-85d0-49b2a217e444", "user": "operator"} 2025-11-28T12:42:44.908Z INFO MySQL init secret updated {"controller": "pxc-controller", "controllerGroup": "pxc.percona.com", "controllerKind": "PerconaXtraDBCluster", "PerconaXtraDBCluster": {"name":"some-name","namespace":"users-12977"}, "namespace": "users-12977", "name": "some-name", "reconcileID": "81655089-f1c7-4e0c-85d0-49b2a217e444", "secret": "some-name-mysql-init", "user": "operator"} 2025-11-28T12:42:44.931Z INFO Internal secrets updated {"controller": "pxc-controller", "controllerGroup": "pxc.percona.com", "controllerKind": "PerconaXtraDBCluster", "PerconaXtraDBCluster": {"name":"some-name","namespace":"users-12977"}, "namespace": "users-12977", "name": "some-name", "reconcileID": "81655089-f1c7-4e0c-85d0-49b2a217e444", "user": "operator"} 2025-11-28T12:42:44.931Z INFO Password changed, updating user {"controller": "pxc-controller", "controllerGroup": "pxc.percona.com", "controllerKind": "PerconaXtraDBCluster", "PerconaXtraDBCluster": {"name":"some-name","namespace":"users-12977"}, "namespace": "users-12977", "name": "some-name", "reconcileID": "81655089-f1c7-4e0c-85d0-49b2a217e444", "user": "monitor"} 2025-11-28T12:42:44.942Z INFO User password updated {"controller": "pxc-controller", "controllerGroup": "pxc.percona.com", "controllerKind": "PerconaXtraDBCluster", "PerconaXtraDBCluster": {"name":"some-name","namespace":"users-12977"}, "namespace": "users-12977", "name": "some-name", "reconcileID": "81655089-f1c7-4e0c-85d0-49b2a217e444", "user": "monitor"} 2025-11-28T12:42:44.964Z INFO MySQL init secret updated {"controller": "pxc-controller", "controllerGroup": "pxc.percona.com", "controllerKind": "PerconaXtraDBCluster", "PerconaXtraDBCluster": {"name":"some-name","namespace":"users-12977"}, "namespace": "users-12977", "name": "some-name", "reconcileID": "81655089-f1c7-4e0c-85d0-49b2a217e444", "secret": "some-name-mysql-init", "user": "monitor"} 2025-11-28T12:42:44.982Z INFO Proxy user updated {"controller": "pxc-controller", "controllerGroup": "pxc.percona.com", "controllerKind": "PerconaXtraDBCluster", "PerconaXtraDBCluster": {"name":"some-name","namespace":"users-12977"}, "namespace": "users-12977", "name": "some-name", "reconcileID": "81655089-f1c7-4e0c-85d0-49b2a217e444", "user": "monitor"} 2025-11-28T12:42:45.023Z INFO Internal secrets updated {"controller": "pxc-controller", "controllerGroup": "pxc.percona.com", "controllerKind": "PerconaXtraDBCluster", "PerconaXtraDBCluster": {"name":"some-name","namespace":"users-12977"}, "namespace": "users-12977", "name": "some-name", "reconcileID": "81655089-f1c7-4e0c-85d0-49b2a217e444", "user": "monitor"} 2025-11-28T12:42:45.023Z INFO Password changed, updating user {"controller": "pxc-controller", "controllerGroup": "pxc.percona.com", "controllerKind": "PerconaXtraDBCluster", "PerconaXtraDBCluster": {"name":"some-name","namespace":"users-12977"}, "namespace": "users-12977", "name": "some-name", "reconcileID": "81655089-f1c7-4e0c-85d0-49b2a217e444", "user": "xtrabackup"} 2025-11-28T12:42:45.033Z INFO User password updated {"controller": "pxc-controller", "controllerGroup": "pxc.percona.com", "controllerKind": "PerconaXtraDBCluster", "PerconaXtraDBCluster": {"name":"some-name","namespace":"users-12977"}, "namespace": "users-12977", "name": "some-name", "reconcileID": "81655089-f1c7-4e0c-85d0-49b2a217e444", "user": "xtrabackup"} 2025-11-28T12:42:45.052Z INFO MySQL init secret updated {"controller": "pxc-controller", "controllerGroup": "pxc.percona.com", "controllerKind": "PerconaXtraDBCluster", "PerconaXtraDBCluster": {"name":"some-name","namespace":"users-12977"}, "namespace": "users-12977", "name": "some-name", "reconcileID": "81655089-f1c7-4e0c-85d0-49b2a217e444", "secret": "some-name-mysql-init", "user": "xtrabackup"} 2025-11-28T12:42:45.082Z INFO Internal secrets updated {"controller": "pxc-controller", "controllerGroup": "pxc.percona.com", "controllerKind": "PerconaXtraDBCluster", "PerconaXtraDBCluster": {"name":"some-name","namespace":"users-12977"}, "namespace": "users-12977", "name": "some-name", "reconcileID": "81655089-f1c7-4e0c-85d0-49b2a217e444", "user": "xtrabackup"} 2025-11-28T12:42:45.082Z INFO Password changed, updating user {"controller": "pxc-controller", "controllerGroup": "pxc.percona.com", "controllerKind": "PerconaXtraDBCluster", "PerconaXtraDBCluster": {"name":"some-name","namespace":"users-12977"}, "namespace": "users-12977", "name": "some-name", "reconcileID": "81655089-f1c7-4e0c-85d0-49b2a217e444", "user": "replication"} 2025-11-28T12:42:45.092Z INFO User password updated {"controller": "pxc-controller", "controllerGroup": "pxc.percona.com", "controllerKind": "PerconaXtraDBCluster", "PerconaXtraDBCluster": {"name":"some-name","namespace":"users-12977"}, "namespace": "users-12977", "name": "some-name", "reconcileID": "81655089-f1c7-4e0c-85d0-49b2a217e444", "user": "replication"} 2025-11-28T12:42:45.112Z INFO MySQL init secret updated {"controller": "pxc-controller", "controllerGroup": "pxc.percona.com", "controllerKind": "PerconaXtraDBCluster", "PerconaXtraDBCluster": {"name":"some-name","namespace":"users-12977"}, "namespace": "users-12977", "name": "some-name", "reconcileID": "81655089-f1c7-4e0c-85d0-49b2a217e444", "secret": "some-name-mysql-init", "user": "replication"} 2025-11-28T12:42:45.131Z INFO Internal secrets updated {"controller": "pxc-controller", "controllerGroup": "pxc.percona.com", "controllerKind": "PerconaXtraDBCluster", "PerconaXtraDBCluster": {"name":"some-name","namespace":"users-12977"}, "namespace": "users-12977", "name": "some-name", "reconcileID": "81655089-f1c7-4e0c-85d0-49b2a217e444", "user": "replication"} 2025-11-28T12:42:45.131Z INFO Password changed, updating user {"controller": "pxc-controller", "controllerGroup": "pxc.percona.com", "controllerKind": "PerconaXtraDBCluster", "PerconaXtraDBCluster": {"name":"some-name","namespace":"users-12977"}, "namespace": "users-12977", "name": "some-name", "reconcileID": "81655089-f1c7-4e0c-85d0-49b2a217e444", "user": "proxyadmin"} 2025-11-28T12:42:45.149Z INFO Proxy user updated {"controller": "pxc-controller", "controllerGroup": "pxc.percona.com", "controllerKind": "PerconaXtraDBCluster", "PerconaXtraDBCluster": {"name":"some-name","namespace":"users-12977"}, "namespace": "users-12977", "name": "some-name", "reconcileID": "81655089-f1c7-4e0c-85d0-49b2a217e444", "user": "proxyadmin"} 2025-11-28T12:42:45.171Z INFO Internal secrets updated {"controller": "pxc-controller", "controllerGroup": "pxc.percona.com", "controllerKind": "PerconaXtraDBCluster", "PerconaXtraDBCluster": {"name":"some-name","namespace":"users-12977"}, "namespace": "users-12977", "name": "some-name", "reconcileID": "81655089-f1c7-4e0c-85d0-49b2a217e444", "user": "proxyadmin"} 2025-11-28T12:42:45.171Z INFO Proxy pods will be restarted {"controller": "pxc-controller", "controllerGroup": "pxc.percona.com", "controllerKind": "PerconaXtraDBCluster", "PerconaXtraDBCluster": {"name":"some-name","namespace":"users-12977"}, "namespace": "users-12977", "name": "some-name", "reconcileID": "81655089-f1c7-4e0c-85d0-49b2a217e444", "last-applied-secret": "0005a869f4194e84e8aca936b5340cd1212a53720adf84591e32e1b074413665"} 2025-11-28T12:42:45.172Z INFO PXC pods will be restarted {"controller": "pxc-controller", "controllerGroup": "pxc.percona.com", "controllerKind": "PerconaXtraDBCluster", "PerconaXtraDBCluster": {"name":"some-name","namespace":"users-12977"}, "namespace": "users-12977", "name": "some-name", "reconcileID": "81655089-f1c7-4e0c-85d0-49b2a217e444", "last-applied-secret": "0005a869f4194e84e8aca936b5340cd1212a53720adf84591e32e1b074413665"} 2025-11-28T12:42:45.180Z DEBUG Updating object {"controller": "pxc-controller", "controllerGroup": "pxc.percona.com", "controllerKind": "PerconaXtraDBCluster", "PerconaXtraDBCluster": {"name":"some-name","namespace":"users-12977"}, "namespace": "users-12977", "name": "some-name", "reconcileID": "81655089-f1c7-4e0c-85d0-49b2a217e444", "object": "some-name-pxc", "kind": "&TypeMeta{Kind:StatefulSet,APIVersion:apps/v1,}", "hashChanged": true, "metaChanged": true} 2025-11-28T12:42:45.257Z DEBUG Updating object {"controller": "pxc-controller", "controllerGroup": "pxc.percona.com", "controllerKind": "PerconaXtraDBCluster", "PerconaXtraDBCluster": {"name":"some-name","namespace":"users-12977"}, "namespace": "users-12977", "name": "some-name", "reconcileID": "81655089-f1c7-4e0c-85d0-49b2a217e444", "object": "some-name-proxysql", "kind": "&TypeMeta{Kind:StatefulSet,APIVersion:apps/v1,}", "hashChanged": true, "metaChanged": true} 2025-11-28T12:42:47.087Z ERROR sync users {"controller": "pxc-controller", "controllerGroup": "pxc.percona.com", "controllerKind": "PerconaXtraDBCluster", "PerconaXtraDBCluster": {"name":"some-name","namespace":"users-12977"}, "namespace": "users-12977", "name": "some-name", "reconcileID": "81655089-f1c7-4e0c-85d0-49b2a217e444", "error": "exec syncusers: failed to execute command in pod: command terminated with exit code 1 / / ERROR 1045 (28000): ProxySQL Error: Access denied for user 'proxyadmin'@'127.0.0.1' (using password: YES)\nERROR (line:339) : ProxySQL connection check failed. \n-- Could not connect to ProxySQL at localhost:6032 \n-- Please check the ProxySQL connection parameters and status.\nERROR 1045 (28000): ProxySQL Error: Access denied for user 'proxyadmin'@'127.0.0.1' (using password: YES)\nERROR (line:339) : ProxySQL connection check failed. \n-- Could not connect to ProxySQL at localhost:6032 \n-- Please check the ProxySQL connection parameters and status.\nERROR 1045 (28000): ProxySQL Error: Access denied for user 'proxyadmin'@'127.0.0.1' (using password: YES)\nERROR (line:339) : ProxySQL connection check failed. \n-- Could not connect to ProxySQL at localhost:6032 \n-- Please check the ProxySQL connection parameters and status.\nERROR 1045 (28000): ProxySQL Error: Access denied for user 'proxyadmin'@'127.0.0.1' (using password: YES)\nERROR (line:339) : ProxySQL connection check failed. \n-- Could not connect to ProxySQL at localhost:6032 \n-- Please check the ProxySQL connection parameters and status.\nERROR 1045 (28000): ProxySQL Error: Access denied for user 'proxyadmin'@'127.0.0.1' (using password: YES)\nERROR (line:339) : ProxySQL connection check failed. \n-- Could not connect to ProxySQL at localhost:6032 \n-- Please check the ProxySQL connection parameters and status.\n", "errorVerbose": "exec syncusers: failed to execute command in pod: command terminated with exit code 1 / / ERROR 1045 (28000): ProxySQL Error: Access denied for user 'proxyadmin'@'127.0.0.1' (using password: YES)\nERROR (line:339) : ProxySQL connection check failed. \n-- Could not connect to ProxySQL at localhost:6032 \n-- Please check the ProxySQL connection parameters and status.\nERROR 1045 (28000): ProxySQL Error: Access denied for user 'proxyadmin'@'127.0.0.1' (using password: YES)\nERROR (line:339) : ProxySQL connection check failed. \n-- Could not connect to ProxySQL at localhost:6032 \n-- Please check the ProxySQL connection parameters and status.\nERROR 1045 (28000): ProxySQL Error: Access denied for user 'proxyadmin'@'127.0.0.1' (using password: YES)\nERROR (line:339) : ProxySQL connection check failed. \n-- Could not connect to ProxySQL at localhost:6032 \n-- Please check the ProxySQL connection parameters and status.\nERROR 1045 (28000): ProxySQL Error: Access denied for user 'proxyadmin'@'127.0.0.1' (using password: YES)\nERROR (line:339) : ProxySQL connection check failed. \n-- Could not connect to ProxySQL at localhost:6032 \n-- Please check the ProxySQL connection parameters and status.\nERROR 1045 (28000): ProxySQL Error: Access denied for user 'proxyadmin'@'127.0.0.1' (using password: YES)\nERROR (line:339) : ProxySQL connection check failed. \n-- Could not connect to ProxySQL at localhost:6032 \n-- Please check the ProxySQL connection parameters and status.\n\ngithub.com/percona/percona-xtradb-cluster-operator/pkg/controller/pxc.(*ReconcilePerconaXtraDBCluster).syncPXCUsersWithProxySQL\n\t/go/src/github.com/percona/percona-xtradb-cluster-operator/pkg/controller/pxc/users.go:975\ngithub.com/percona/percona-xtradb-cluster-operator/pkg/controller/pxc.(*ReconcilePerconaXtraDBCluster).resyncPXCUsersWithProxySQL.func1\n\t/go/src/github.com/percona/percona-xtradb-cluster-operator/pkg/controller/pxc/controller.go:854\nruntime.goexit\n\t/usr/local/go/src/runtime/asm_amd64.s:1693"} 2025-11-28T12:44:40.956Z INFO reconcile replication error {"controller": "pxc-controller", "controllerGroup": "pxc.percona.com", "controllerKind": "PerconaXtraDBCluster", "PerconaXtraDBCluster": {"name":"some-name","namespace":"users-12977"}, "namespace": "users-12977", "name": "some-name", "reconcileID": "a38634c1-eaf3-49b1-b34b-11a4b6f6f9e1", "err": "failed to connect to pod some-name-pxc-0: dial tcp 10.204.24.47:33062: connect: connection refused"} 2025-11-28T12:44:46.091Z INFO Unable to find primary pod for replication. No pod with name or ip like this {"controller": "pxc-controller", "controllerGroup": "pxc.percona.com", "controllerKind": "PerconaXtraDBCluster", "PerconaXtraDBCluster": {"name":"some-name","namespace":"users-12977"}, "namespace": "users-12977", "name": "some-name", "reconcileID": "b7c9d9c0-5864-4fb3-bcb6-96bb59543ec2", "primary name": "some-name-pxc-0.some-name-pxc.users-12977.svc.cluster.local"} 2025-11-28T12:44:51.269Z INFO Unable to find primary pod for replication. No pod with name or ip like this {"controller": "pxc-controller", "controllerGroup": "pxc.percona.com", "controllerKind": "PerconaXtraDBCluster", "PerconaXtraDBCluster": {"name":"some-name","namespace":"users-12977"}, "namespace": "users-12977", "name": "some-name", "reconcileID": "922f5c70-dbca-4d53-ad24-0e7fa7f39c16", "primary name": "some-name-pxc-0.some-name-pxc.users-12977.svc.cluster.local"} 2025-11-28T12:44:56.414Z INFO Unable to find primary pod for replication. No pod with name or ip like this {"controller": "pxc-controller", "controllerGroup": "pxc.percona.com", "controllerKind": "PerconaXtraDBCluster", "PerconaXtraDBCluster": {"name":"some-name","namespace":"users-12977"}, "namespace": "users-12977", "name": "some-name", "reconcileID": "edb3c4cc-c390-4be5-9798-b9abe857c1d7", "primary name": "some-name-pxc-0.some-name-pxc.users-12977.svc.cluster.local"} 2025-11-28T12:45:01.589Z INFO Unable to find primary pod for replication. No pod with name or ip like this {"controller": "pxc-controller", "controllerGroup": "pxc.percona.com", "controllerKind": "PerconaXtraDBCluster", "PerconaXtraDBCluster": {"name":"some-name","namespace":"users-12977"}, "namespace": "users-12977", "name": "some-name", "reconcileID": "739a28de-e85e-4eca-86b6-4f4e53cdbbe8", "primary name": "some-name-pxc-0.some-name-pxc.users-12977.svc.cluster.local"} 2025-11-28T12:45:06.741Z INFO Unable to find primary pod for replication. No pod with name or ip like this {"controller": "pxc-controller", "controllerGroup": "pxc.percona.com", "controllerKind": "PerconaXtraDBCluster", "PerconaXtraDBCluster": {"name":"some-name","namespace":"users-12977"}, "namespace": "users-12977", "name": "some-name", "reconcileID": "3a7c090d-df67-4088-babe-0174c9c1f7be", "primary name": "some-name-pxc-0.some-name-pxc.users-12977.svc.cluster.local"} 2025-11-28T12:45:11.934Z INFO Unable to find primary pod for replication. No pod with name or ip like this {"controller": "pxc-controller", "controllerGroup": "pxc.percona.com", "controllerKind": "PerconaXtraDBCluster", "PerconaXtraDBCluster": {"name":"some-name","namespace":"users-12977"}, "namespace": "users-12977", "name": "some-name", "reconcileID": "2b7644c0-a6c1-4eff-b247-74bee1e8ebf4", "primary name": "some-name-pxc-0.some-name-pxc.users-12977.svc.cluster.local"} 2025-11-28T12:45:17.113Z INFO Unable to find primary pod for replication. No pod with name or ip like this {"controller": "pxc-controller", "controllerGroup": "pxc.percona.com", "controllerKind": "PerconaXtraDBCluster", "PerconaXtraDBCluster": {"name":"some-name","namespace":"users-12977"}, "namespace": "users-12977", "name": "some-name", "reconcileID": "09f91aa9-7027-476e-a7d0-7c069913f15c", "primary name": "some-name-pxc-0.some-name-pxc.users-12977.svc.cluster.local"} 2025-11-28T12:45:22.256Z INFO Unable to find primary pod for replication. No pod with name or ip like this {"controller": "pxc-controller", "controllerGroup": "pxc.percona.com", "controllerKind": "PerconaXtraDBCluster", "PerconaXtraDBCluster": {"name":"some-name","namespace":"users-12977"}, "namespace": "users-12977", "name": "some-name", "reconcileID": "61ffaf50-e62d-4b2c-a490-ae12d1f07e57", "primary name": "some-name-pxc-0.some-name-pxc.users-12977.svc.cluster.local"} 2025-11-28T12:45:30.204Z DEBUG PXC users synced with ProxySQL {"controller": "pxc-controller", "controllerGroup": "pxc.percona.com", "controllerKind": "PerconaXtraDBCluster", "PerconaXtraDBCluster": {"name":"some-name","namespace":"users-12977"}, "namespace": "users-12977", "name": "some-name", "reconcileID": "5728b33c-ca42-4f28-87f8-87aad13ab015"} 2025-11-28T12:45:33.799Z INFO Password changed, updating user {"controller": "pxc-controller", "controllerGroup": "pxc.percona.com", "controllerKind": "PerconaXtraDBCluster", "PerconaXtraDBCluster": {"name":"some-name","namespace":"users-12977"}, "namespace": "users-12977", "name": "some-name", "reconcileID": "af165d33-4f55-4a51-a3d9-36daa8e95432", "user": "operator"} 2025-11-28T12:45:33.810Z INFO User password updated {"controller": "pxc-controller", "controllerGroup": "pxc.percona.com", "controllerKind": "PerconaXtraDBCluster", "PerconaXtraDBCluster": {"name":"some-name","namespace":"users-12977"}, "namespace": "users-12977", "name": "some-name", "reconcileID": "af165d33-4f55-4a51-a3d9-36daa8e95432", "user": "operator"} 2025-11-28T12:45:33.861Z INFO MySQL init secret updated {"controller": "pxc-controller", "controllerGroup": "pxc.percona.com", "controllerKind": "PerconaXtraDBCluster", "PerconaXtraDBCluster": {"name":"some-name","namespace":"users-12977"}, "namespace": "users-12977", "name": "some-name", "reconcileID": "af165d33-4f55-4a51-a3d9-36daa8e95432", "secret": "some-name-mysql-init", "user": "operator"} 2025-11-28T12:45:33.912Z INFO Internal secrets updated {"controller": "pxc-controller", "controllerGroup": "pxc.percona.com", "controllerKind": "PerconaXtraDBCluster", "PerconaXtraDBCluster": {"name":"some-name","namespace":"users-12977"}, "namespace": "users-12977", "name": "some-name", "reconcileID": "af165d33-4f55-4a51-a3d9-36daa8e95432", "user": "operator"} 2025-11-28T12:45:33.913Z INFO Proxy pods will be restarted {"controller": "pxc-controller", "controllerGroup": "pxc.percona.com", "controllerKind": "PerconaXtraDBCluster", "PerconaXtraDBCluster": {"name":"some-name","namespace":"users-12977"}, "namespace": "users-12977", "name": "some-name", "reconcileID": "af165d33-4f55-4a51-a3d9-36daa8e95432", "last-applied-secret": "70fa1b9bea88c5eaceb9d199fc48df42e1148b742dea7dd4292036d6e41d4ac8"} 2025-11-28T12:45:33.917Z DEBUG Updating object {"controller": "pxc-controller", "controllerGroup": "pxc.percona.com", "controllerKind": "PerconaXtraDBCluster", "PerconaXtraDBCluster": {"name":"some-name","namespace":"users-12977"}, "namespace": "users-12977", "name": "some-name", "reconcileID": "af165d33-4f55-4a51-a3d9-36daa8e95432", "object": "some-name-proxysql", "kind": "&TypeMeta{Kind:StatefulSet,APIVersion:apps/v1,}", "hashChanged": true, "metaChanged": true} 2025-11-28T12:45:36.907Z ERROR sync users {"controller": "pxc-controller", "controllerGroup": "pxc.percona.com", "controllerKind": "PerconaXtraDBCluster", "PerconaXtraDBCluster": {"name":"some-name","namespace":"users-12977"}, "namespace": "users-12977", "name": "some-name", "reconcileID": "8b7b579f-ecb1-4ea7-a610-6509f30c4aec", "error": "exec syncusers: failed to execute command in pod: command terminated with exit code 1 / / ERROR 1045 (28000): Access denied for user 'operator'@'some-name-proxysql-0.some-name-proxysql-unready.users-12977.svc.' (using password: YES)\nERROR 1045 (28000): Access denied for user 'operator'@'some-name-proxysql-0.some-name-proxysql-unready.users-12977.svc.' (using password: YES)\nERROR 1045 (28000): Access denied for user 'operator'@'some-name-proxysql-0.some-name-proxysql-unready.users-12977.svc.' (using password: YES)\nERROR (line:572) : Could not find a primary cluster node\nERROR 1045 (28000): Access denied for user 'operator'@'some-name-proxysql-0.some-name-proxysql-unready.users-12977.svc.' (using password: YES)\nERROR 1045 (28000): Access denied for user 'operator'@'some-name-proxysql-0.some-name-proxysql-unready.users-12977.svc.' (using password: YES)\nERROR 1045 (28000): Access denied for user 'operator'@'some-name-proxysql-0.some-name-proxysql-unready.users-12977.svc.' (using password: YES)\nERROR (line:572) : Could not find a primary cluster node\nERROR 1045 (28000): Access denied for user 'operator'@'some-name-proxysql-0.some-name-proxysql-unready.users-12977.svc.' (using password: YES)\nERROR 1045 (28000): Access denied for user 'operator'@'some-name-proxysql-0.some-name-proxysql-unready.users-12977.svc.' (using password: YES)\nERROR 1045 (28000): Access denied for user 'operator'@'some-name-proxysql-0.some-name-proxysql-unready.users-12977.svc.' (using password: YES)\nERROR (line:572) : Could not find a primary cluster node\nERROR 1045 (28000): Access denied for user 'operator'@'some-name-proxysql-0.some-name-proxysql-unready.users-12977.svc.' (using password: YES)\nERROR 1045 (28000): Access denied for user 'operator'@'some-name-proxysql-0.some-name-proxysql-unready.users-12977.svc.' (using password: YES)\nERROR 1045 (28000): Access denied for user 'operator'@'some-name-proxysql-0.some-name-proxysql-unready.users-12977.svc.' (using password: YES)\nERROR (line:572) : Could not find a primary cluster node\nERROR 1045 (28000): Access denied for user 'operator'@'some-name-proxysql-0.some-name-proxysql-unready.users-12977.svc.' (using password: YES)\nERROR 1045 (28000): Access denied for user 'operator'@'some-name-proxysql-0.some-name-proxysql-unready.users-12977.svc.' (using password: YES)\nERROR 1045 (28000): Access denied for user 'operator'@'some-name-proxysql-0.some-name-proxysql-unready.users-12977.svc.' (using password: YES)\nERROR (line:572) : Could not find a primary cluster node\n", "errorVerbose": "exec syncusers: failed to execute command in pod: command terminated with exit code 1 / / ERROR 1045 (28000): Access denied for user 'operator'@'some-name-proxysql-0.some-name-proxysql-unready.users-12977.svc.' (using password: YES)\nERROR 1045 (28000): Access denied for user 'operator'@'some-name-proxysql-0.some-name-proxysql-unready.users-12977.svc.' (using password: YES)\nERROR 1045 (28000): Access denied for user 'operator'@'some-name-proxysql-0.some-name-proxysql-unready.users-12977.svc.' (using password: YES)\nERROR (line:572) : Could not find a primary cluster node\nERROR 1045 (28000): Access denied for user 'operator'@'some-name-proxysql-0.some-name-proxysql-unready.users-12977.svc.' (using password: YES)\nERROR 1045 (28000): Access denied for user 'operator'@'some-name-proxysql-0.some-name-proxysql-unready.users-12977.svc.' (using password: YES)\nERROR 1045 (28000): Access denied for user 'operator'@'some-name-proxysql-0.some-name-proxysql-unready.users-12977.svc.' (using password: YES)\nERROR (line:572) : Could not find a primary cluster node\nERROR 1045 (28000): Access denied for user 'operator'@'some-name-proxysql-0.some-name-proxysql-unready.users-12977.svc.' (using password: YES)\nERROR 1045 (28000): Access denied for user 'operator'@'some-name-proxysql-0.some-name-proxysql-unready.users-12977.svc.' (using password: YES)\nERROR 1045 (28000): Access denied for user 'operator'@'some-name-proxysql-0.some-name-proxysql-unready.users-12977.svc.' (using password: YES)\nERROR (line:572) : Could not find a primary cluster node\nERROR 1045 (28000): Access denied for user 'operator'@'some-name-proxysql-0.some-name-proxysql-unready.users-12977.svc.' (using password: YES)\nERROR 1045 (28000): Access denied for user 'operator'@'some-name-proxysql-0.some-name-proxysql-unready.users-12977.svc.' (using password: YES)\nERROR 1045 (28000): Access denied for user 'operator'@'some-name-proxysql-0.some-name-proxysql-unready.users-12977.svc.' (using password: YES)\nERROR (line:572) : Could not find a primary cluster node\nERROR 1045 (28000): Access denied for user 'operator'@'some-name-proxysql-0.some-name-proxysql-unready.users-12977.svc.' (using password: YES)\nERROR 1045 (28000): Access denied for user 'operator'@'some-name-proxysql-0.some-name-proxysql-unready.users-12977.svc.' (using password: YES)\nERROR 1045 (28000): Access denied for user 'operator'@'some-name-proxysql-0.some-name-proxysql-unready.users-12977.svc.' (using password: YES)\nERROR (line:572) : Could not find a primary cluster node\n\ngithub.com/percona/percona-xtradb-cluster-operator/pkg/controller/pxc.(*ReconcilePerconaXtraDBCluster).syncPXCUsersWithProxySQL\n\t/go/src/github.com/percona/percona-xtradb-cluster-operator/pkg/controller/pxc/users.go:975\ngithub.com/percona/percona-xtradb-cluster-operator/pkg/controller/pxc.(*ReconcilePerconaXtraDBCluster).resyncPXCUsersWithProxySQL.func1\n\t/go/src/github.com/percona/percona-xtradb-cluster-operator/pkg/controller/pxc/controller.go:854\nruntime.goexit\n\t/usr/local/go/src/runtime/asm_amd64.s:1693"} 2025-11-28T12:46:12.805Z DEBUG PXC users synced with ProxySQL {"controller": "pxc-controller", "controllerGroup": "pxc.percona.com", "controllerKind": "PerconaXtraDBCluster", "PerconaXtraDBCluster": {"name":"some-name","namespace":"users-12977"}, "namespace": "users-12977", "name": "some-name", "reconcileID": "5a8d7b8c-4933-428c-a26f-5123c137d5b3"} 2025-11-28T12:46:17.383Z DEBUG PXC users synced with ProxySQL {"controller": "pxc-controller", "controllerGroup": "pxc.percona.com", "controllerKind": "PerconaXtraDBCluster", "PerconaXtraDBCluster": {"name":"some-name","namespace":"users-12977"}, "namespace": "users-12977", "name": "some-name", "reconcileID": "5182571a-6bed-42bc-b245-4f030710fe80"} 2025-11-28T12:46:22.395Z DEBUG PXC users synced with ProxySQL {"controller": "pxc-controller", "controllerGroup": "pxc.percona.com", "controllerKind": "PerconaXtraDBCluster", "PerconaXtraDBCluster": {"name":"some-name","namespace":"users-12977"}, "namespace": "users-12977", "name": "some-name", "reconcileID": "3f4f1494-f5f1-4c07-9805-08432abd60ae"} 2025-11-28T12:46:27.799Z DEBUG PXC users synced with ProxySQL {"controller": "pxc-controller", "controllerGroup": "pxc.percona.com", "controllerKind": "PerconaXtraDBCluster", "PerconaXtraDBCluster": {"name":"some-name","namespace":"users-12977"}, "namespace": "users-12977", "name": "some-name", "reconcileID": "bd789793-c983-41b3-aee0-6250670624d9"} 2025-11-28T12:46:32.906Z DEBUG PXC users synced with ProxySQL {"controller": "pxc-controller", "controllerGroup": "pxc.percona.com", "controllerKind": "PerconaXtraDBCluster", "PerconaXtraDBCluster": {"name":"some-name","namespace":"users-12977"}, "namespace": "users-12977", "name": "some-name", "reconcileID": "e990e7f4-ebe7-4cdb-bb71-86dd50c7ec1c"} 2025-11-28T12:46:37.918Z DEBUG PXC users synced with ProxySQL {"controller": "pxc-controller", "controllerGroup": "pxc.percona.com", "controllerKind": "PerconaXtraDBCluster", "PerconaXtraDBCluster": {"name":"some-name","namespace":"users-12977"}, "namespace": "users-12977", "name": "some-name", "reconcileID": "a8803691-0ff3-4a62-a56f-9d1353fae55c"} 2025-11-28T12:46:44.204Z DEBUG PXC users synced with ProxySQL {"controller": "pxc-controller", "controllerGroup": "pxc.percona.com", "controllerKind": "PerconaXtraDBCluster", "PerconaXtraDBCluster": {"name":"some-name","namespace":"users-12977"}, "namespace": "users-12977", "name": "some-name", "reconcileID": "2979ca69-490b-4c49-8b43-cd2d09386ac3"} 2025-11-28T12:46:48.903Z DEBUG PXC users synced with ProxySQL {"controller": "pxc-controller", "controllerGroup": "pxc.percona.com", "controllerKind": "PerconaXtraDBCluster", "PerconaXtraDBCluster": {"name":"some-name","namespace":"users-12977"}, "namespace": "users-12977", "name": "some-name", "reconcileID": "64ebf3ed-06dd-4961-a65a-8da40cbf7c37"} 2025-11-28T12:46:54.129Z DEBUG PXC users synced with ProxySQL {"controller": "pxc-controller", "controllerGroup": "pxc.percona.com", "controllerKind": "PerconaXtraDBCluster", "PerconaXtraDBCluster": {"name":"some-name","namespace":"users-12977"}, "namespace": "users-12977", "name": "some-name", "reconcileID": "aeef949d-3547-4ff2-aae6-01f0b7dff2f1"} 2025-11-28T12:46:59.400Z DEBUG PXC users synced with ProxySQL {"controller": "pxc-controller", "controllerGroup": "pxc.percona.com", "controllerKind": "PerconaXtraDBCluster", "PerconaXtraDBCluster": {"name":"some-name","namespace":"users-12977"}, "namespace": "users-12977", "name": "some-name", "reconcileID": "f6536e48-a2c7-493b-b59d-4234f6cc4b38"} 2025-11-28T12:47:04.586Z DEBUG PXC users synced with ProxySQL {"controller": "pxc-controller", "controllerGroup": "pxc.percona.com", "controllerKind": "PerconaXtraDBCluster", "PerconaXtraDBCluster": {"name":"some-name","namespace":"users-12977"}, "namespace": "users-12977", "name": "some-name", "reconcileID": "a4f237b1-d0cb-487e-bbed-ac27513bd3ad"} 2025-11-28T12:47:09.601Z DEBUG PXC users synced with ProxySQL {"controller": "pxc-controller", "controllerGroup": "pxc.percona.com", "controllerKind": "PerconaXtraDBCluster", "PerconaXtraDBCluster": {"name":"some-name","namespace":"users-12977"}, "namespace": "users-12977", "name": "some-name", "reconcileID": "8d574ab2-1116-49fc-9d4d-d3a0b281131a"} 2025-11-28T12:47:15.001Z DEBUG PXC users synced with ProxySQL {"controller": "pxc-controller", "controllerGroup": "pxc.percona.com", "controllerKind": "PerconaXtraDBCluster", "PerconaXtraDBCluster": {"name":"some-name","namespace":"users-12977"}, "namespace": "users-12977", "name": "some-name", "reconcileID": "67989cac-355f-40df-8495-458759641d85"} 2025-11-28T12:47:20.288Z DEBUG PXC users synced with ProxySQL {"controller": "pxc-controller", "controllerGroup": "pxc.percona.com", "controllerKind": "PerconaXtraDBCluster", "PerconaXtraDBCluster": {"name":"some-name","namespace":"users-12977"}, "namespace": "users-12977", "name": "some-name", "reconcileID": "8eca1945-9ade-42b3-9044-8264023e4695"} 2025-11-28T12:47:25.991Z DEBUG PXC users synced with ProxySQL {"controller": "pxc-controller", "controllerGroup": "pxc.percona.com", "controllerKind": "PerconaXtraDBCluster", "PerconaXtraDBCluster": {"name":"some-name","namespace":"users-12977"}, "namespace": "users-12977", "name": "some-name", "reconcileID": "360e3dbe-7a7f-4656-b0f1-bb492d8018f7"} 2025-11-28T12:47:26.166Z INFO Password changed, updating user {"controller": "pxc-controller", "controllerGroup": "pxc.percona.com", "controllerKind": "PerconaXtraDBCluster", "PerconaXtraDBCluster": {"name":"some-name","namespace":"users-12977"}, "namespace": "users-12977", "name": "some-name", "reconcileID": "f944235e-06e5-48ee-8cb0-58f307054439", "user": "root"} 2025-11-28T12:47:26.182Z INFO User password updated {"controller": "pxc-controller", "controllerGroup": "pxc.percona.com", "controllerKind": "PerconaXtraDBCluster", "PerconaXtraDBCluster": {"name":"some-name","namespace":"users-12977"}, "namespace": "users-12977", "name": "some-name", "reconcileID": "f944235e-06e5-48ee-8cb0-58f307054439", "user": "root"} 2025-11-28T12:47:26.204Z INFO MySQL init secret updated {"controller": "pxc-controller", "controllerGroup": "pxc.percona.com", "controllerKind": "PerconaXtraDBCluster", "PerconaXtraDBCluster": {"name":"some-name","namespace":"users-12977"}, "namespace": "users-12977", "name": "some-name", "reconcileID": "f944235e-06e5-48ee-8cb0-58f307054439", "secret": "some-name-mysql-init", "user": "root"} 2025-11-28T12:47:28.848Z DEBUG PXC users synced with ProxySQL {"controller": "pxc-controller", "controllerGroup": "pxc.percona.com", "controllerKind": "PerconaXtraDBCluster", "PerconaXtraDBCluster": {"name":"some-name","namespace":"users-12977"}, "namespace": "users-12977", "name": "some-name", "reconcileID": "f944235e-06e5-48ee-8cb0-58f307054439"} 2025-11-28T12:47:28.871Z INFO Internal secrets updated {"controller": "pxc-controller", "controllerGroup": "pxc.percona.com", "controllerKind": "PerconaXtraDBCluster", "PerconaXtraDBCluster": {"name":"some-name","namespace":"users-12977"}, "namespace": "users-12977", "name": "some-name", "reconcileID": "f944235e-06e5-48ee-8cb0-58f307054439", "user": "root"} 2025-11-28T12:47:28.871Z INFO Password changed, updating user {"controller": "pxc-controller", "controllerGroup": "pxc.percona.com", "controllerKind": "PerconaXtraDBCluster", "PerconaXtraDBCluster": {"name":"some-name","namespace":"users-12977"}, "namespace": "users-12977", "name": "some-name", "reconcileID": "f944235e-06e5-48ee-8cb0-58f307054439", "user": "monitor"} 2025-11-28T12:47:28.881Z INFO User password updated {"controller": "pxc-controller", "controllerGroup": "pxc.percona.com", "controllerKind": "PerconaXtraDBCluster", "PerconaXtraDBCluster": {"name":"some-name","namespace":"users-12977"}, "namespace": "users-12977", "name": "some-name", "reconcileID": "f944235e-06e5-48ee-8cb0-58f307054439", "user": "monitor"} 2025-11-28T12:47:28.899Z INFO MySQL init secret updated {"controller": "pxc-controller", "controllerGroup": "pxc.percona.com", "controllerKind": "PerconaXtraDBCluster", "PerconaXtraDBCluster": {"name":"some-name","namespace":"users-12977"}, "namespace": "users-12977", "name": "some-name", "reconcileID": "f944235e-06e5-48ee-8cb0-58f307054439", "secret": "some-name-mysql-init", "user": "monitor"} 2025-11-28T12:47:28.920Z INFO Proxy user updated {"controller": "pxc-controller", "controllerGroup": "pxc.percona.com", "controllerKind": "PerconaXtraDBCluster", "PerconaXtraDBCluster": {"name":"some-name","namespace":"users-12977"}, "namespace": "users-12977", "name": "some-name", "reconcileID": "f944235e-06e5-48ee-8cb0-58f307054439", "user": "monitor"} 2025-11-28T12:47:28.941Z INFO Internal secrets updated {"controller": "pxc-controller", "controllerGroup": "pxc.percona.com", "controllerKind": "PerconaXtraDBCluster", "PerconaXtraDBCluster": {"name":"some-name","namespace":"users-12977"}, "namespace": "users-12977", "name": "some-name", "reconcileID": "f944235e-06e5-48ee-8cb0-58f307054439", "user": "monitor"} 2025-11-28T12:47:28.941Z INFO Password changed, updating user {"controller": "pxc-controller", "controllerGroup": "pxc.percona.com", "controllerKind": "PerconaXtraDBCluster", "PerconaXtraDBCluster": {"name":"some-name","namespace":"users-12977"}, "namespace": "users-12977", "name": "some-name", "reconcileID": "f944235e-06e5-48ee-8cb0-58f307054439", "user": "xtrabackup"} 2025-11-28T12:47:28.952Z INFO User password updated {"controller": "pxc-controller", "controllerGroup": "pxc.percona.com", "controllerKind": "PerconaXtraDBCluster", "PerconaXtraDBCluster": {"name":"some-name","namespace":"users-12977"}, "namespace": "users-12977", "name": "some-name", "reconcileID": "f944235e-06e5-48ee-8cb0-58f307054439", "user": "xtrabackup"} 2025-11-28T12:47:28.974Z INFO MySQL init secret updated {"controller": "pxc-controller", "controllerGroup": "pxc.percona.com", "controllerKind": "PerconaXtraDBCluster", "PerconaXtraDBCluster": {"name":"some-name","namespace":"users-12977"}, "namespace": "users-12977", "name": "some-name", "reconcileID": "f944235e-06e5-48ee-8cb0-58f307054439", "secret": "some-name-mysql-init", "user": "xtrabackup"} 2025-11-28T12:47:28.992Z INFO Internal secrets updated {"controller": "pxc-controller", "controllerGroup": "pxc.percona.com", "controllerKind": "PerconaXtraDBCluster", "PerconaXtraDBCluster": {"name":"some-name","namespace":"users-12977"}, "namespace": "users-12977", "name": "some-name", "reconcileID": "f944235e-06e5-48ee-8cb0-58f307054439", "user": "xtrabackup"} 2025-11-28T12:47:28.992Z INFO Password changed, updating user {"controller": "pxc-controller", "controllerGroup": "pxc.percona.com", "controllerKind": "PerconaXtraDBCluster", "PerconaXtraDBCluster": {"name":"some-name","namespace":"users-12977"}, "namespace": "users-12977", "name": "some-name", "reconcileID": "f944235e-06e5-48ee-8cb0-58f307054439", "user": "proxyadmin"} 2025-11-28T12:47:29.009Z INFO Proxy user updated {"controller": "pxc-controller", "controllerGroup": "pxc.percona.com", "controllerKind": "PerconaXtraDBCluster", "PerconaXtraDBCluster": {"name":"some-name","namespace":"users-12977"}, "namespace": "users-12977", "name": "some-name", "reconcileID": "f944235e-06e5-48ee-8cb0-58f307054439", "user": "proxyadmin"} 2025-11-28T12:47:29.026Z INFO Internal secrets updated {"controller": "pxc-controller", "controllerGroup": "pxc.percona.com", "controllerKind": "PerconaXtraDBCluster", "PerconaXtraDBCluster": {"name":"some-name","namespace":"users-12977"}, "namespace": "users-12977", "name": "some-name", "reconcileID": "f944235e-06e5-48ee-8cb0-58f307054439", "user": "proxyadmin"} 2025-11-28T12:47:29.027Z INFO Proxy pods will be restarted {"controller": "pxc-controller", "controllerGroup": "pxc.percona.com", "controllerKind": "PerconaXtraDBCluster", "PerconaXtraDBCluster": {"name":"some-name","namespace":"users-12977"}, "namespace": "users-12977", "name": "some-name", "reconcileID": "f944235e-06e5-48ee-8cb0-58f307054439", "last-applied-secret": "9558af457b55d18e740a2371d028488931f295ea9aef7e45a12534889fc6bf8a"} 2025-11-28T12:47:29.027Z INFO PXC pods will be restarted {"controller": "pxc-controller", "controllerGroup": "pxc.percona.com", "controllerKind": "PerconaXtraDBCluster", "PerconaXtraDBCluster": {"name":"some-name","namespace":"users-12977"}, "namespace": "users-12977", "name": "some-name", "reconcileID": "f944235e-06e5-48ee-8cb0-58f307054439", "last-applied-secret": "9558af457b55d18e740a2371d028488931f295ea9aef7e45a12534889fc6bf8a"} 2025-11-28T12:47:29.029Z DEBUG Updating object {"controller": "pxc-controller", "controllerGroup": "pxc.percona.com", "controllerKind": "PerconaXtraDBCluster", "PerconaXtraDBCluster": {"name":"some-name","namespace":"users-12977"}, "namespace": "users-12977", "name": "some-name", "reconcileID": "f944235e-06e5-48ee-8cb0-58f307054439", "object": "some-name-pxc", "kind": "&TypeMeta{Kind:StatefulSet,APIVersion:apps/v1,}", "hashChanged": true, "metaChanged": true} 2025-11-28T12:47:29.093Z DEBUG Updating object {"controller": "pxc-controller", "controllerGroup": "pxc.percona.com", "controllerKind": "PerconaXtraDBCluster", "PerconaXtraDBCluster": {"name":"some-name","namespace":"users-12977"}, "namespace": "users-12977", "name": "some-name", "reconcileID": "f944235e-06e5-48ee-8cb0-58f307054439", "object": "some-name-proxysql", "kind": "&TypeMeta{Kind:StatefulSet,APIVersion:apps/v1,}", "hashChanged": true, "metaChanged": true} 2025-11-28T12:47:31.619Z ERROR sync users {"controller": "pxc-controller", "controllerGroup": "pxc.percona.com", "controllerKind": "PerconaXtraDBCluster", "PerconaXtraDBCluster": {"name":"some-name","namespace":"users-12977"}, "namespace": "users-12977", "name": "some-name", "reconcileID": "f944235e-06e5-48ee-8cb0-58f307054439", "error": "exec syncusers: failed to execute command in pod: command terminated with exit code 1 / / ERROR 1045 (28000): ProxySQL Error: Access denied for user 'proxyadmin'@'127.0.0.1' (using password: YES)\nERROR (line:339) : ProxySQL connection check failed. \n-- Could not connect to ProxySQL at localhost:6032 \n-- Please check the ProxySQL connection parameters and status.\nERROR 1045 (28000): ProxySQL Error: Access denied for user 'proxyadmin'@'127.0.0.1' (using password: YES)\nERROR (line:339) : ProxySQL connection check failed. \n-- Could not connect to ProxySQL at localhost:6032 \n-- Please check the ProxySQL connection parameters and status.\nERROR 1045 (28000): ProxySQL Error: Access denied for user 'proxyadmin'@'127.0.0.1' (using password: YES)\nERROR (line:339) : ProxySQL connection check failed. \n-- Could not connect to ProxySQL at localhost:6032 \n-- Please check the ProxySQL connection parameters and status.\nERROR 1045 (28000): ProxySQL Error: Access denied for user 'proxyadmin'@'127.0.0.1' (using password: YES)\nERROR (line:339) : ProxySQL connection check failed. \n-- Could not connect to ProxySQL at localhost:6032 \n-- Please check the ProxySQL connection parameters and status.\nERROR 1045 (28000): ProxySQL Error: Access denied for user 'proxyadmin'@'127.0.0.1' (using password: YES)\nERROR (line:339) : ProxySQL connection check failed. \n-- Could not connect to ProxySQL at localhost:6032 \n-- Please check the ProxySQL connection parameters and status.\n", "errorVerbose": "exec syncusers: failed to execute command in pod: command terminated with exit code 1 / / ERROR 1045 (28000): ProxySQL Error: Access denied for user 'proxyadmin'@'127.0.0.1' (using password: YES)\nERROR (line:339) : ProxySQL connection check failed. \n-- Could not connect to ProxySQL at localhost:6032 \n-- Please check the ProxySQL connection parameters and status.\nERROR 1045 (28000): ProxySQL Error: Access denied for user 'proxyadmin'@'127.0.0.1' (using password: YES)\nERROR (line:339) : ProxySQL connection check failed. \n-- Could not connect to ProxySQL at localhost:6032 \n-- Please check the ProxySQL connection parameters and status.\nERROR 1045 (28000): ProxySQL Error: Access denied for user 'proxyadmin'@'127.0.0.1' (using password: YES)\nERROR (line:339) : ProxySQL connection check failed. \n-- Could not connect to ProxySQL at localhost:6032 \n-- Please check the ProxySQL connection parameters and status.\nERROR 1045 (28000): ProxySQL Error: Access denied for user 'proxyadmin'@'127.0.0.1' (using password: YES)\nERROR (line:339) : ProxySQL connection check failed. \n-- Could not connect to ProxySQL at localhost:6032 \n-- Please check the ProxySQL connection parameters and status.\nERROR 1045 (28000): ProxySQL Error: Access denied for user 'proxyadmin'@'127.0.0.1' (using password: YES)\nERROR (line:339) : ProxySQL connection check failed. \n-- Could not connect to ProxySQL at localhost:6032 \n-- Please check the ProxySQL connection parameters and status.\n\ngithub.com/percona/percona-xtradb-cluster-operator/pkg/controller/pxc.(*ReconcilePerconaXtraDBCluster).syncPXCUsersWithProxySQL\n\t/go/src/github.com/percona/percona-xtradb-cluster-operator/pkg/controller/pxc/users.go:975\ngithub.com/percona/percona-xtradb-cluster-operator/pkg/controller/pxc.(*ReconcilePerconaXtraDBCluster).resyncPXCUsersWithProxySQL.func1\n\t/go/src/github.com/percona/percona-xtradb-cluster-operator/pkg/controller/pxc/controller.go:854\nruntime.goexit\n\t/usr/local/go/src/runtime/asm_amd64.s:1693"} 2025-11-28T12:49:30.338Z INFO Unable to find primary pod for replication. No pod with name or ip like this {"controller": "pxc-controller", "controllerGroup": "pxc.percona.com", "controllerKind": "PerconaXtraDBCluster", "PerconaXtraDBCluster": {"name":"some-name","namespace":"users-12977"}, "namespace": "users-12977", "name": "some-name", "reconcileID": "93dc91b2-7ab7-4a41-abef-173a358df57c", "primary name": "some-name-pxc-0.some-name-pxc.users-12977.svc.cluster.local"} 2025-11-28T12:49:35.489Z INFO Unable to find primary pod for replication. No pod with name or ip like this {"controller": "pxc-controller", "controllerGroup": "pxc.percona.com", "controllerKind": "PerconaXtraDBCluster", "PerconaXtraDBCluster": {"name":"some-name","namespace":"users-12977"}, "namespace": "users-12977", "name": "some-name", "reconcileID": "f072181e-4e8a-418f-a10e-f896d3df13ef", "primary name": "some-name-pxc-0.some-name-pxc.users-12977.svc.cluster.local"} 2025-11-28T12:49:40.637Z INFO Unable to find primary pod for replication. No pod with name or ip like this {"controller": "pxc-controller", "controllerGroup": "pxc.percona.com", "controllerKind": "PerconaXtraDBCluster", "PerconaXtraDBCluster": {"name":"some-name","namespace":"users-12977"}, "namespace": "users-12977", "name": "some-name", "reconcileID": "e77e749e-f15b-4c63-864f-033acbde43f0", "primary name": "some-name-pxc-0.some-name-pxc.users-12977.svc.cluster.local"} 2025-11-28T12:49:45.864Z INFO Unable to find primary pod for replication. No pod with name or ip like this {"controller": "pxc-controller", "controllerGroup": "pxc.percona.com", "controllerKind": "PerconaXtraDBCluster", "PerconaXtraDBCluster": {"name":"some-name","namespace":"users-12977"}, "namespace": "users-12977", "name": "some-name", "reconcileID": "f6414e40-fe1e-4e88-9065-e59d10c51322", "primary name": "some-name-pxc-0.some-name-pxc.users-12977.svc.cluster.local"} 2025-11-28T12:49:50.990Z INFO Unable to find primary pod for replication. No pod with name or ip like this {"controller": "pxc-controller", "controllerGroup": "pxc.percona.com", "controllerKind": "PerconaXtraDBCluster", "PerconaXtraDBCluster": {"name":"some-name","namespace":"users-12977"}, "namespace": "users-12977", "name": "some-name", "reconcileID": "3bfd29fc-ca3b-4b50-817a-47b78c0a65c4", "primary name": "some-name-pxc-0.some-name-pxc.users-12977.svc.cluster.local"} 2025-11-28T12:49:56.201Z INFO Unable to find primary pod for replication. No pod with name or ip like this {"controller": "pxc-controller", "controllerGroup": "pxc.percona.com", "controllerKind": "PerconaXtraDBCluster", "PerconaXtraDBCluster": {"name":"some-name","namespace":"users-12977"}, "namespace": "users-12977", "name": "some-name", "reconcileID": "680c7cdb-5cce-42ee-af9c-9953d5cc414a", "primary name": "some-name-pxc-0.some-name-pxc.users-12977.svc.cluster.local"} 2025-11-28T12:50:01.359Z INFO Unable to find primary pod for replication. No pod with name or ip like this {"controller": "pxc-controller", "controllerGroup": "pxc.percona.com", "controllerKind": "PerconaXtraDBCluster", "PerconaXtraDBCluster": {"name":"some-name","namespace":"users-12977"}, "namespace": "users-12977", "name": "some-name", "reconcileID": "f35fdc2c-5cf1-4806-a836-7744197d842f", "primary name": "some-name-pxc-0.some-name-pxc.users-12977.svc.cluster.local"} 2025-11-28T12:50:04.511Z DEBUG Updating object {"controller": "pxc-controller", "controllerGroup": "pxc.percona.com", "controllerKind": "PerconaXtraDBCluster", "PerconaXtraDBCluster": {"name":"some-name","namespace":"users-12977"}, "namespace": "users-12977", "name": "some-name", "reconcileID": "e5c40031-508a-46f9-aba4-1cb0eb3e11bc", "object": "some-name-pxc", "kind": "&TypeMeta{Kind:StatefulSet,APIVersion:apps/v1,}", "hashChanged": true, "metaChanged": true} 2025-11-28T12:50:04.553Z DEBUG Creating object {"controller": "pxc-controller", "controllerGroup": "pxc.percona.com", "controllerKind": "PerconaXtraDBCluster", "PerconaXtraDBCluster": {"name":"some-name","namespace":"users-12977"}, "namespace": "users-12977", "name": "some-name", "reconcileID": "e5c40031-508a-46f9-aba4-1cb0eb3e11bc", "object": "some-name-haproxy", "kind": "&TypeMeta{Kind:StatefulSet,APIVersion:apps/v1,}"} 2025-11-28T12:50:04.594Z DEBUG Creating object {"controller": "pxc-controller", "controllerGroup": "pxc.percona.com", "controllerKind": "PerconaXtraDBCluster", "PerconaXtraDBCluster": {"name":"some-name","namespace":"users-12977"}, "namespace": "users-12977", "name": "some-name", "reconcileID": "e5c40031-508a-46f9-aba4-1cb0eb3e11bc", "object": "some-name-haproxy", "kind": "&TypeMeta{Kind:StatefulSet,APIVersion:apps/v1,}"} 2025-11-28T12:50:04.669Z DEBUG Creating object {"controller": "pxc-controller", "controllerGroup": "pxc.percona.com", "controllerKind": "PerconaXtraDBCluster", "PerconaXtraDBCluster": {"name":"some-name","namespace":"users-12977"}, "namespace": "users-12977", "name": "some-name", "reconcileID": "e5c40031-508a-46f9-aba4-1cb0eb3e11bc", "object": "some-name-haproxy", "kind": "&TypeMeta{Kind:Service,APIVersion:v1,}"} 2025-11-28T12:50:04.783Z DEBUG Creating object {"controller": "pxc-controller", "controllerGroup": "pxc.percona.com", "controllerKind": "PerconaXtraDBCluster", "PerconaXtraDBCluster": {"name":"some-name","namespace":"users-12977"}, "namespace": "users-12977", "name": "some-name", "reconcileID": "e5c40031-508a-46f9-aba4-1cb0eb3e11bc", "object": "some-name-haproxy-replicas", "kind": "&TypeMeta{Kind:Service,APIVersion:v1,}"} 2025-11-28T12:50:08.272Z INFO reconcile replication error {"controller": "pxc-controller", "controllerGroup": "pxc.percona.com", "controllerKind": "PerconaXtraDBCluster", "PerconaXtraDBCluster": {"name":"some-name","namespace":"users-12977"}, "namespace": "users-12977", "name": "some-name", "reconcileID": "e5c40031-508a-46f9-aba4-1cb0eb3e11bc", "err": "get primary pxc pod: failed to get proxy connection: dial tcp 34.118.238.49:3306: connect: connection refused"} 2025-11-28T12:50:08.534Z DEBUG Creating object {"controller": "pxc-controller", "controllerGroup": "pxc.percona.com", "controllerKind": "PerconaXtraDBCluster", "PerconaXtraDBCluster": {"name":"some-name","namespace":"users-12977"}, "namespace": "users-12977", "name": "some-name", "reconcileID": "7f899997-9d85-42df-8477-20e24015fc3b", "object": "some-name-haproxy", "kind": "&TypeMeta{Kind:PodDisruptionBudget,APIVersion:policy/v1,}"} 2025-11-28T12:50:16.557Z INFO reconcile replication error {"controller": "pxc-controller", "controllerGroup": "pxc.percona.com", "controllerKind": "PerconaXtraDBCluster", "PerconaXtraDBCluster": {"name":"some-name","namespace":"users-12977"}, "namespace": "users-12977", "name": "some-name", "reconcileID": "7f899997-9d85-42df-8477-20e24015fc3b", "err": "get primary pxc pod: failed to get proxy connection: dial tcp 34.118.238.49:3306: connect: connection refused"} 2025-11-28T12:52:37.750Z INFO Password changed, updating user {"controller": "pxc-controller", "controllerGroup": "pxc.percona.com", "controllerKind": "PerconaXtraDBCluster", "PerconaXtraDBCluster": {"name":"some-name","namespace":"users-12977"}, "namespace": "users-12977", "name": "some-name", "reconcileID": "59e96856-34a2-444d-a56e-918cb8b7e710", "user": "root"} 2025-11-28T12:52:37.768Z INFO User password updated {"controller": "pxc-controller", "controllerGroup": "pxc.percona.com", "controllerKind": "PerconaXtraDBCluster", "PerconaXtraDBCluster": {"name":"some-name","namespace":"users-12977"}, "namespace": "users-12977", "name": "some-name", "reconcileID": "59e96856-34a2-444d-a56e-918cb8b7e710", "user": "root"} 2025-11-28T12:52:37.792Z INFO MySQL init secret updated {"controller": "pxc-controller", "controllerGroup": "pxc.percona.com", "controllerKind": "PerconaXtraDBCluster", "PerconaXtraDBCluster": {"name":"some-name","namespace":"users-12977"}, "namespace": "users-12977", "name": "some-name", "reconcileID": "59e96856-34a2-444d-a56e-918cb8b7e710", "secret": "some-name-mysql-init", "user": "root"} 2025-11-28T12:52:37.821Z INFO Internal secrets updated {"controller": "pxc-controller", "controllerGroup": "pxc.percona.com", "controllerKind": "PerconaXtraDBCluster", "PerconaXtraDBCluster": {"name":"some-name","namespace":"users-12977"}, "namespace": "users-12977", "name": "some-name", "reconcileID": "59e96856-34a2-444d-a56e-918cb8b7e710", "user": "root"} 2025-11-28T12:52:37.821Z INFO Password changed, updating user {"controller": "pxc-controller", "controllerGroup": "pxc.percona.com", "controllerKind": "PerconaXtraDBCluster", "PerconaXtraDBCluster": {"name":"some-name","namespace":"users-12977"}, "namespace": "users-12977", "name": "some-name", "reconcileID": "59e96856-34a2-444d-a56e-918cb8b7e710", "user": "operator"} 2025-11-28T12:52:37.832Z INFO User password updated {"controller": "pxc-controller", "controllerGroup": "pxc.percona.com", "controllerKind": "PerconaXtraDBCluster", "PerconaXtraDBCluster": {"name":"some-name","namespace":"users-12977"}, "namespace": "users-12977", "name": "some-name", "reconcileID": "59e96856-34a2-444d-a56e-918cb8b7e710", "user": "operator"} 2025-11-28T12:52:37.855Z INFO MySQL init secret updated {"controller": "pxc-controller", "controllerGroup": "pxc.percona.com", "controllerKind": "PerconaXtraDBCluster", "PerconaXtraDBCluster": {"name":"some-name","namespace":"users-12977"}, "namespace": "users-12977", "name": "some-name", "reconcileID": "59e96856-34a2-444d-a56e-918cb8b7e710", "secret": "some-name-mysql-init", "user": "operator"} 2025-11-28T12:52:37.876Z INFO Internal secrets updated {"controller": "pxc-controller", "controllerGroup": "pxc.percona.com", "controllerKind": "PerconaXtraDBCluster", "PerconaXtraDBCluster": {"name":"some-name","namespace":"users-12977"}, "namespace": "users-12977", "name": "some-name", "reconcileID": "59e96856-34a2-444d-a56e-918cb8b7e710", "user": "operator"} 2025-11-28T12:52:37.876Z INFO Password changed, updating user {"controller": "pxc-controller", "controllerGroup": "pxc.percona.com", "controllerKind": "PerconaXtraDBCluster", "PerconaXtraDBCluster": {"name":"some-name","namespace":"users-12977"}, "namespace": "users-12977", "name": "some-name", "reconcileID": "59e96856-34a2-444d-a56e-918cb8b7e710", "user": "monitor"} 2025-11-28T12:52:37.885Z INFO User password updated {"controller": "pxc-controller", "controllerGroup": "pxc.percona.com", "controllerKind": "PerconaXtraDBCluster", "PerconaXtraDBCluster": {"name":"some-name","namespace":"users-12977"}, "namespace": "users-12977", "name": "some-name", "reconcileID": "59e96856-34a2-444d-a56e-918cb8b7e710", "user": "monitor"} 2025-11-28T12:52:37.906Z INFO MySQL init secret updated {"controller": "pxc-controller", "controllerGroup": "pxc.percona.com", "controllerKind": "PerconaXtraDBCluster", "PerconaXtraDBCluster": {"name":"some-name","namespace":"users-12977"}, "namespace": "users-12977", "name": "some-name", "reconcileID": "59e96856-34a2-444d-a56e-918cb8b7e710", "secret": "some-name-mysql-init", "user": "monitor"} 2025-11-28T12:52:37.929Z INFO Internal secrets updated {"controller": "pxc-controller", "controllerGroup": "pxc.percona.com", "controllerKind": "PerconaXtraDBCluster", "PerconaXtraDBCluster": {"name":"some-name","namespace":"users-12977"}, "namespace": "users-12977", "name": "some-name", "reconcileID": "59e96856-34a2-444d-a56e-918cb8b7e710", "user": "monitor"} 2025-11-28T12:52:37.929Z INFO Password changed, updating user {"controller": "pxc-controller", "controllerGroup": "pxc.percona.com", "controllerKind": "PerconaXtraDBCluster", "PerconaXtraDBCluster": {"name":"some-name","namespace":"users-12977"}, "namespace": "users-12977", "name": "some-name", "reconcileID": "59e96856-34a2-444d-a56e-918cb8b7e710", "user": "xtrabackup"} 2025-11-28T12:52:37.938Z INFO User password updated {"controller": "pxc-controller", "controllerGroup": "pxc.percona.com", "controllerKind": "PerconaXtraDBCluster", "PerconaXtraDBCluster": {"name":"some-name","namespace":"users-12977"}, "namespace": "users-12977", "name": "some-name", "reconcileID": "59e96856-34a2-444d-a56e-918cb8b7e710", "user": "xtrabackup"} 2025-11-28T12:52:37.960Z INFO MySQL init secret updated {"controller": "pxc-controller", "controllerGroup": "pxc.percona.com", "controllerKind": "PerconaXtraDBCluster", "PerconaXtraDBCluster": {"name":"some-name","namespace":"users-12977"}, "namespace": "users-12977", "name": "some-name", "reconcileID": "59e96856-34a2-444d-a56e-918cb8b7e710", "secret": "some-name-mysql-init", "user": "xtrabackup"} 2025-11-28T12:52:37.976Z INFO Internal secrets updated {"controller": "pxc-controller", "controllerGroup": "pxc.percona.com", "controllerKind": "PerconaXtraDBCluster", "PerconaXtraDBCluster": {"name":"some-name","namespace":"users-12977"}, "namespace": "users-12977", "name": "some-name", "reconcileID": "59e96856-34a2-444d-a56e-918cb8b7e710", "user": "xtrabackup"} 2025-11-28T12:52:37.976Z INFO Password changed, updating user {"controller": "pxc-controller", "controllerGroup": "pxc.percona.com", "controllerKind": "PerconaXtraDBCluster", "PerconaXtraDBCluster": {"name":"some-name","namespace":"users-12977"}, "namespace": "users-12977", "name": "some-name", "reconcileID": "59e96856-34a2-444d-a56e-918cb8b7e710", "user": "replication"} 2025-11-28T12:52:37.986Z INFO User password updated {"controller": "pxc-controller", "controllerGroup": "pxc.percona.com", "controllerKind": "PerconaXtraDBCluster", "PerconaXtraDBCluster": {"name":"some-name","namespace":"users-12977"}, "namespace": "users-12977", "name": "some-name", "reconcileID": "59e96856-34a2-444d-a56e-918cb8b7e710", "user": "replication"} 2025-11-28T12:52:38.006Z INFO MySQL init secret updated {"controller": "pxc-controller", "controllerGroup": "pxc.percona.com", "controllerKind": "PerconaXtraDBCluster", "PerconaXtraDBCluster": {"name":"some-name","namespace":"users-12977"}, "namespace": "users-12977", "name": "some-name", "reconcileID": "59e96856-34a2-444d-a56e-918cb8b7e710", "secret": "some-name-mysql-init", "user": "replication"} 2025-11-28T12:52:38.028Z INFO HAProxy pods will be restarted {"controller": "pxc-controller", "controllerGroup": "pxc.percona.com", "controllerKind": "PerconaXtraDBCluster", "PerconaXtraDBCluster": {"name":"some-name","namespace":"users-12977"}, "namespace": "users-12977", "name": "some-name", "reconcileID": "59e96856-34a2-444d-a56e-918cb8b7e710", "last-applied-secret": "fd0a8b253f16d5c2198d0017a16e09901c51a6f2e59a85fab7bb843006274e05"} 2025-11-28T12:52:38.028Z INFO Internal secrets updated {"controller": "pxc-controller", "controllerGroup": "pxc.percona.com", "controllerKind": "PerconaXtraDBCluster", "PerconaXtraDBCluster": {"name":"some-name","namespace":"users-12977"}, "namespace": "users-12977", "name": "some-name", "reconcileID": "59e96856-34a2-444d-a56e-918cb8b7e710", "user": "replication"} 2025-11-28T12:52:38.028Z INFO PXC pods will be restarted {"controller": "pxc-controller", "controllerGroup": "pxc.percona.com", "controllerKind": "PerconaXtraDBCluster", "PerconaXtraDBCluster": {"name":"some-name","namespace":"users-12977"}, "namespace": "users-12977", "name": "some-name", "reconcileID": "59e96856-34a2-444d-a56e-918cb8b7e710", "last-applied-secret": "fd0a8b253f16d5c2198d0017a16e09901c51a6f2e59a85fab7bb843006274e05"} 2025-11-28T12:52:38.031Z DEBUG Updating object {"controller": "pxc-controller", "controllerGroup": "pxc.percona.com", "controllerKind": "PerconaXtraDBCluster", "PerconaXtraDBCluster": {"name":"some-name","namespace":"users-12977"}, "namespace": "users-12977", "name": "some-name", "reconcileID": "59e96856-34a2-444d-a56e-918cb8b7e710", "object": "some-name-pxc", "kind": "&TypeMeta{Kind:StatefulSet,APIVersion:apps/v1,}", "hashChanged": true, "metaChanged": true} 2025-11-28T12:52:38.086Z DEBUG Updating object {"controller": "pxc-controller", "controllerGroup": "pxc.percona.com", "controllerKind": "PerconaXtraDBCluster", "PerconaXtraDBCluster": {"name":"some-name","namespace":"users-12977"}, "namespace": "users-12977", "name": "some-name", "reconcileID": "59e96856-34a2-444d-a56e-918cb8b7e710", "object": "some-name-haproxy", "kind": "&TypeMeta{Kind:StatefulSet,APIVersion:apps/v1,}", "hashChanged": true, "metaChanged": true} 2025-11-28T12:55:10.483Z INFO Password changed, updating user {"controller": "pxc-controller", "controllerGroup": "pxc.percona.com", "controllerKind": "PerconaXtraDBCluster", "PerconaXtraDBCluster": {"name":"some-name","namespace":"users-12977"}, "namespace": "users-12977", "name": "some-name", "reconcileID": "416b9317-3d53-4079-ac60-f7f37271f1f5", "user": "monitor"} 2025-11-28T12:55:10.494Z INFO User password updated {"controller": "pxc-controller", "controllerGroup": "pxc.percona.com", "controllerKind": "PerconaXtraDBCluster", "PerconaXtraDBCluster": {"name":"some-name","namespace":"users-12977"}, "namespace": "users-12977", "name": "some-name", "reconcileID": "416b9317-3d53-4079-ac60-f7f37271f1f5", "user": "monitor"} 2025-11-28T12:55:10.516Z INFO MySQL init secret updated {"controller": "pxc-controller", "controllerGroup": "pxc.percona.com", "controllerKind": "PerconaXtraDBCluster", "PerconaXtraDBCluster": {"name":"some-name","namespace":"users-12977"}, "namespace": "users-12977", "name": "some-name", "reconcileID": "416b9317-3d53-4079-ac60-f7f37271f1f5", "secret": "some-name-mysql-init", "user": "monitor"} 2025-11-28T12:55:10.538Z INFO HAProxy pods will be restarted {"controller": "pxc-controller", "controllerGroup": "pxc.percona.com", "controllerKind": "PerconaXtraDBCluster", "PerconaXtraDBCluster": {"name":"some-name","namespace":"users-12977"}, "namespace": "users-12977", "name": "some-name", "reconcileID": "416b9317-3d53-4079-ac60-f7f37271f1f5", "last-applied-secret": "e381f74dcf09bf4a29b507e43d6326e30d4f7c4980a1306a5d0733fa03a55a42"} 2025-11-28T12:55:10.538Z INFO Internal secrets updated {"controller": "pxc-controller", "controllerGroup": "pxc.percona.com", "controllerKind": "PerconaXtraDBCluster", "PerconaXtraDBCluster": {"name":"some-name","namespace":"users-12977"}, "namespace": "users-12977", "name": "some-name", "reconcileID": "416b9317-3d53-4079-ac60-f7f37271f1f5", "user": "monitor"} 2025-11-28T12:55:10.540Z DEBUG Updating object {"controller": "pxc-controller", "controllerGroup": "pxc.percona.com", "controllerKind": "PerconaXtraDBCluster", "PerconaXtraDBCluster": {"name":"some-name","namespace":"users-12977"}, "namespace": "users-12977", "name": "some-name", "reconcileID": "416b9317-3d53-4079-ac60-f7f37271f1f5", "object": "some-name-haproxy", "kind": "&TypeMeta{Kind:StatefulSet,APIVersion:apps/v1,}", "hashChanged": true, "metaChanged": true} github.com/percona/percona-xtradb-cluster-operator/pkg/controller/pxc.(*ReconcilePerconaXtraDBCluster).resyncPXCUsersWithProxySQL.func1 /go/src/github.com/percona/percona-xtradb-cluster-operator/pkg/controller/pxc/controller.go:856 [mysql] 2025/11/28 12:55:27 packets.go:58 unexpected EOF -  }, -  { -  }, -  { -  }, -  }, +  }, +  "0005a869f4194e84e8aca936b5340cd1212a53720adf84591e32e1b07441366", -  "0005a869f4194e84e8aca936b5340cd1212a53720adf84591e32e1b074413665", +  "005a869f4194e84e8aca936b5340cd1212a53720adf84591e32e1b074413665", -  "09c805506dbfbb3fabd9ac2cd40c911c77d29097bd481e0b9f9bc5a827ea98d4", -  "0a8b253f16d5c2198d0017a16e09901c51a6f2e59a85fab7bb843006274e05", -  "2eae38749965876d6b3e029348d7ef11fea2daf3366b1669aba2dfd4af85086d", +  "2eae38749965876d6b3e029348d7ef11fea2daf3366b1669aba2dfd4af85086d", -  "457b55d18e740a2371d028488931f295ea9aef7e45a12534889fc6bf8a", -  "6eff63ccef7b61206dbbd575d7c0d3c0d22a80c11b22ae596bfe270ddda2b4c", -  "70fa1b9bea88c5eaceb9d199fc48df42e1148b742dea7dd4292036d6e41d4ac8", +  "70fa1b9bea88c5eaceb9d199fc48df42e1148b742dea7dd4292036d6e41d4ac8", +  "74", -  "9558a", +  "9558af457b55d18e740a2371d028488931f295ea9aef7e45a12534889fc6bf8a", -  Annotations: map[string]string{ +  Annotations: map[string]string{ +  APIVersion: "", -  APIVersion: "apps/v1", -  APIVersion: "apps/v1", -  APIVersion: "v1", -  Args: []string{"logrotate"}, +  AvailableReplicas: 0, -  AvailableReplicas: 2, -  AvailableReplicas: 3, +  "cf09bf4a29b507e43d6326e30d4f7c4980a1306a5d0733fa03a55a42", -  CollisionCount: &0, +  CollisionCount: nil, +  CreationTimestamp: v1.Time{}, -  CreationTimestamp: v1.Time{Time: s"2025-11-28 12:30:39 +0000 UTC"}, -  CreationTimestamp: v1.Time{Time: s"2025-11-28 12:50:04 +0000 UTC"}, +  CurrentReplicas: 0, -  CurrentReplicas: 2, -  CurrentReplicas: 3, +  CurrentRevision: "", -  CurrentRevision: "some-name-haproxy-6545ff48dc", -  CurrentRevision: "some-name-haproxy-6bfcf8d65c", -  CurrentRevision: "some-name-proxysql-5bcbc45879", -  CurrentRevision: "some-name-proxysql-667db77b78", -  CurrentRevision: "some-name-proxysql-699977c9bc", -  CurrentRevision: "some-name-proxysql-6d4d7f4598", -  CurrentRevision: "some-name-proxysql-95997bf47", -  CurrentRevision: "some-name-proxysql-bdc6dd84c", -  CurrentRevision: "some-name-pxc-6ff96c4c5c", -  CurrentRevision: "some-name-pxc-bb7dd9584", -  CurrentRevision: "some-name-pxc-c77674768", -  CurrentRevision: "some-name-pxc-cd59b8975", +  "d0a8b253f16d5c2198d0017a16e09901c51a6f2e59a85fab7bb843006274e05", -  DefaultMode: &420, -  DefaultMode: &420, +  DefaultMode: nil, +  DefaultMode: nil, +  DeprecatedServiceAccount: "", -  DeprecatedServiceAccount: "default", +  DNSPolicy: "", -  DNSPolicy: "ClusterFirst", +  "e381", -  EnvFrom: []v1.EnvFromSource{{SecretRef: s"&SecretEnvSource{LocalObjectRefe"...}}, -  Env: []v1.EnvVar{ -  "fd0a8b253f16d5c2198d0017a16e09901c51a6f2e59a85fab7bb843006274e0", +  "fd0a8b253f16d5c2198d0017a16e09901c51a6f2e59a85fab7bb843006274e05", -  FieldsType: "FieldsV1", -  FieldsType: "FieldsV1", -  FieldsV1: s`{"f:metadata":{"f:annotations":{".":{},"f:percona.com/last-confi`..., -  FieldsV1: s`{"f:status":{"f:availableReplicas":{},"f:collisionCount":{},"f:c`..., +  Generation: 0, -  Generation: 1, -  Generation: 2, -  Generation: 3, -  Generation: 4, -  Generation: 5, -  Generation: 6, -  Generation: 7, -  Generation: 8, -  Image: "perconalab/percona-xtradb-cluster-operator:main-logcollector", -  Image: "perconalab/percona-xtradb-cluster-operator:main-logcollector", -  ImagePullPolicy: "Always", +  "last-applied-secret": "06eff63ccef7b61206dbbd575d7c0d3c0d22a80c11b22ae596bfe270ddda2b4c", +  "last-applied-secret": "09c805506dbfbb3fabd9ac2cd40c911c77d29097bd481e0b9f9bc5a827ea98d4", +  "last-applied-secret": "fd0a8b253f16d5c2198d0017a16e09901c51a6f2e59a85fab7bb843006274e05", +  ManagedFields: nil, -  ManagedFields: []v1.ManagedFieldsEntry{ -  Manager: "kube-controller-manager", -  Manager: "percona-xtradb-cluster-operator", -  {Name: "IS_LOGCOLLECTOR", Value: "yes"}, -  {Name: "LOG_DATA_DIR", Value: "/var/lib/mysql"}, -  Name: "logrotate", -  Name: "logs", -  {Name: "MONITOR_PASSWORD", ValueFrom: s"&EnvVarSource{FieldRef:nil,Resou"...}, -  {Name: "POD_NAMESPASE", ValueFrom: s"&EnvVarSource{FieldRef:&ObjectFi"...}, -  {Name: "POD_NAME", ValueFrom: s"&EnvVarSource{FieldRef:&ObjectFi"...}, -  {Name: "SERVICE_TYPE", Value: "mysql"}, +  ObservedGeneration: 0, -  ObservedGeneration: 1, -  ObservedGeneration: 2, -  ObservedGeneration: 3, -  ObservedGeneration: 4, -  ObservedGeneration: 5, -  ObservedGeneration: 6, -  ObservedGeneration: 7, -  ObservedGeneration: 8, -  Operation: "Update", -  Operation: "Update", -  "percona.com/last-config-hash": "eyJyZXBsaWNhcyI6Miwic2VsZWN0b3IiOnsibWF0Y2hMYWJlbHMiOnsiYXBwLmt1YmVybmV0ZXMuaW8vY29tcG9uZW50IjoicHJveHlzcWwiLCJhcHAua3ViZXJuZXRlcy5pby9pbnN0YW5jZSI6InNvbWUtbmFtZSIsImFwcC5rdWJlcm5ldGVzLmlvL21hbmFnZWQtYnkiOiJwZXJjb25hLXh0cmFkYi1jbHVzdGVyLW9wZXJhdG9yIiwiYXBw"..., +  "percona.com/last-config-hash": "eyJyZXBsaWNhcyI6Miwic2VsZWN0b3IiOnsibWF0Y2hMYWJlbHMiOnsiYXBwLmt1YmVybmV0ZXMuaW8vY29tcG9uZW50IjoicHJveHlzcWwiLCJhcHAua3ViZXJuZXRlcy5pby9pbnN0YW5jZSI6InNvbWUtbmFtZSIsImFwcC5rdWJlcm5ldGVzLmlvL21hbmFnZWQtYnkiOiJwZXJjb25hLXh0cmFkYi1jbHVzdGVyLW9wZXJhdG9yIiwiYXBw"..., -  "percona.com/last-config-hash": "eyJyZXBsaWNhcyI6Miwic2VsZWN0b3IiOnsibWF0Y2hMYWJlbHMiOnsiYXBwLmt1YmVybmV0ZXMuaW8vY29tcG9uZW50IjoicHJveHlzcWwiLCJhcHAua3ViZXJuZXRlcy5pby9pbnN0YW5jZSI6InNvbWUtbmFtZSIsImFwcC5rdWJlcm5ldGVzLmlvL21hbmFnZWQtYnkiOiJwZXJjb25hLXh0cmFkYi1jbHVzdGVyLW9wZXJhdG9yIiwiYXBwLmt1YmVybmV0ZXMuaW8vbmFtZSI6InBlcmNvbmEteHRyYWRiLWNsdXN0ZXIiLCJhcHAua3ViZXJuZXRlcy5pby9wYXJ0LW9mIjoicGVyY29uYS14dHJhZGItY2x1c3RlciJ9fSwidGVtcGxhdGUiOnsibWV0YWRhdGEiOnsibGFiZWxzIjp7ImFwcC5rdWJlcm5ldGVzLmlvL2NvbXBvbmVudCI6InByb3h5c3FsIiwiYXBwLmt1YmVybmV0ZXMuaW8vaW5zdGFuY2UiOiJzb21lLW5hbWUiLCJhcHAua3ViZXJuZXRlcy5pby9tYW5hZ2VkLWJ5IjoicGVyY29uYS14dHJhZGItY2x1c3Rlci1vcGVyYXRvciIsImFwcC5rdWJlcm5ldGVzLmlvL25hbWUiOiJwZXJjb25hLXh0cmFkYi1jbHVzdGVyIiwiYXBwLmt1YmVybmV0ZXMuaW8vcGFydC1vZiI6InBlcmNvbmEteHRyYWRiLWNsdXN0ZXIifSwiYW5ub3RhdGlvbnMiOnsia3ViZWN0bC5rdWJlcm5ldGVzLmlvL2RlZmF1bHQtY29udGFpbmVyIjoicHJveHlzcWwiLCJsYXN0LWFwcGxpZWQtc2VjcmV0IjoiMDAwNWE4NjlmNDE5NGU4NGU4YWNhOTM2YjUzNDBjZDEyMTJhNTM3MjBhZGY4NDU5MWUzMmUxYjA3NDQxMzY2NSIsInBlcmNvbmEuY29tL2NvbmZpZ3VyYXRpb24taGFzaCI6"..., +  "percona.com/last-config-hash": "eyJyZXBsaWNhcyI6Miwic2VsZWN0b3IiOnsibWF0Y2hMYWJlbHMiOnsiYXBwLmt1YmVybmV0ZXMuaW8vY29tcG9uZW50IjoicHJveHlzcWwiLCJhcHAua3ViZXJuZXRlcy5pby9pbnN0YW5jZSI6InNvbWUtbmFtZSIsImFwcC5rdWJlcm5ldGVzLmlvL21hbmFnZWQtYnkiOiJwZXJjb25hLXh0cmFkYi1jbHVzdGVyLW9wZXJhdG9yIiwiYXBwLmt1YmVybmV0ZXMuaW8vbmFtZSI6InBlcmNvbmEteHRyYWRiLWNsdXN0ZXIiLCJhcHAua3ViZXJuZXRlcy5pby9wYXJ0LW9mIjoicGVyY29uYS14dHJhZGItY2x1c3RlciJ9fSwidGVtcGxhdGUiOnsibWV0YWRhdGEiOnsibGFiZWxzIjp7ImFwcC5rdWJlcm5ldGVzLmlvL2NvbXBvbmVudCI6InByb3h5c3FsIiwiYXBwLmt1YmVybmV0ZXMuaW8vaW5zdGFuY2UiOiJzb21lLW5hbWUiLCJhcHAua3ViZXJuZXRlcy5pby9tYW5hZ2VkLWJ5IjoicGVyY29uYS14dHJhZGItY2x1c3Rlci1vcGVyYXRvciIsImFwcC5rdWJlcm5ldGVzLmlvL25hbWUiOiJwZXJjb25hLXh0cmFkYi1jbHVzdGVyIiwiYXBwLmt1YmVybmV0ZXMuaW8vcGFydC1vZiI6InBlcmNvbmEteHRyYWRiLWNsdXN0ZXIifSwiYW5ub3RhdGlvbnMiOnsia3ViZWN0bC5rdWJlcm5ldGVzLmlvL2RlZmF1bHQtY29udGFpbmVyIjoicHJveHlzcWwiLCJsYXN0LWFwcGxpZWQtc2VjcmV0IjoiMDAwNWE4NjlmNDE5NGU4NGU4YWNhOTM2YjUzNDBjZDEyMTJhNTM3MjBhZGY4NDU5MWUzMmUxYjA3NDQxMzY2NSIsInBlcmNvbmEuY29tL2NvbmZpZ3VyYXRpb24taGFzaCI6"..., -  "percona.com/last-config-hash": "eyJyZXBsaWNhcyI6Miwic2VsZWN0b3IiOnsibWF0Y2hMYWJlbHMiOnsiYXBwLmt1YmVybmV0ZXMuaW8vY29tcG9uZW50IjoicHJveHlzcWwiLCJhcHAua3ViZXJuZXRlcy5pby9pbnN0YW5jZSI6InNvbWUtbmFtZSIsImFwcC5rdWJlcm5ldGVzLmlvL21hbmFnZWQtYnkiOiJwZXJjb25hLXh0cmFkYi1jbHVzdGVyLW9wZXJhdG9yIiwiYXBwLmt1YmVybmV0ZXMuaW8vbmFtZSI6InBlcmNvbmEteHRyYWRiLWNsdXN0ZXIiLCJhcHAua3ViZXJuZXRlcy5pby9wYXJ0LW9mIjoicGVyY29uYS14dHJhZGItY2x1c3RlciJ9fSwidGVtcGxhdGUiOnsibWV0YWRhdGEiOnsibGFiZWxzIjp7ImFwcC5rdWJlcm5ldGVzLmlvL2NvbXBvbmVudCI6InByb3h5c3FsIiwiYXBwLmt1YmVybmV0ZXMuaW8vaW5zdGFuY2UiOiJzb21lLW5hbWUiLCJhcHAua3ViZXJuZXRlcy5pby9tYW5hZ2VkLWJ5IjoicGVyY29uYS14dHJhZGItY2x1c3Rlci1vcGVyYXRvciIsImFwcC5rdWJlcm5ldGVzLmlvL25hbWUiOiJwZXJjb25hLXh0cmFkYi1jbHVzdGVyIiwiYXBwLmt1YmVybmV0ZXMuaW8vcGFydC1vZiI6InBlcmNvbmEteHRyYWRiLWNsdXN0ZXIifSwiYW5ub3RhdGlvbnMiOnsia3ViZWN0bC5rdWJlcm5ldGVzLmlvL2RlZmF1bHQtY29udGFpbmVyIjoicHJveHlzcWwiLCJsYXN0LWFwcGxpZWQtc2VjcmV0IjoiMDljODA1NTA2ZGJmYmIzZmFiZDlhYzJjZDQwYzkxMWM3N2QyOTA5N2JkNDgxZTBiOWY5YmM1YTgyN2VhOThkNCIsInBlcmNvbmEuY29tL2NvbmZpZ3VyYXRpb24taGFzaCI6"..., -  "percona.com/last-config-hash": "eyJyZXBsaWNhcyI6Miwic2VsZWN0b3IiOnsibWF0Y2hMYWJlbHMiOnsiYXBwLmt1YmVybmV0ZXMuaW8vY29tcG9uZW50IjoicHJveHlzcWwiLCJhcHAua3ViZXJuZXRlcy5pby9pbnN0YW5jZSI6InNvbWUtbmFtZSIsImFwcC5rdWJlcm5ldGVzLmlvL21hbmFnZWQtYnkiOiJwZXJjb25hLXh0cmFkYi1jbHVzdGVyLW9wZXJhdG9yIiwiYXBwLmt1YmVybmV0ZXMuaW8vbmFtZSI6InBlcmNvbmEteHRyYWRiLWNsdXN0ZXIiLCJhcHAua3ViZXJuZXRlcy5pby9wYXJ0LW9mIjoicGVyY29uYS14dHJhZGItY2x1c3RlciJ9fSwidGVtcGxhdGUiOnsibWV0YWRhdGEiOnsibGFiZWxzIjp7ImFwcC5rdWJlcm5ldGVzLmlvL2NvbXBvbmVudCI6InByb3h5c3FsIiwiYXBwLmt1YmVybmV0ZXMuaW8vaW5zdGFuY2UiOiJzb21lLW5hbWUiLCJhcHAua3ViZXJuZXRlcy5pby9tYW5hZ2VkLWJ5IjoicGVyY29uYS14dHJhZGItY2x1c3Rlci1vcGVyYXRvciIsImFwcC5rdWJlcm5ldGVzLmlvL25hbWUiOiJwZXJjb25hLXh0cmFkYi1jbHVzdGVyIiwiYXBwLmt1YmVybmV0ZXMuaW8vcGFydC1vZiI6InBlcmNvbmEteHRyYWRiLWNsdXN0ZXIifSwiYW5ub3RhdGlvbnMiOnsia3ViZWN0bC5rdWJlcm5ldGVzLmlvL2RlZmF1bHQtY29udGFpbmVyIjoicHJveHlzcWwiLCJsYXN0LWFwcGxpZWQtc2VjcmV0IjoiMmVhZTM4NzQ5OTY1ODc2ZDZiM2UwMjkzNDhkN2VmMTFmZWEyZGFmMzM2NmIxNjY5YWJhMmRmZDRhZjg1MDg2ZCIsInBlcmNvbmEuY29tL2NvbmZpZ3VyYXRpb24taGFzaCI6"..., +  "percona.com/last-config-hash": "eyJyZXBsaWNhcyI6Miwic2VsZWN0b3IiOnsibWF0Y2hMYWJlbHMiOnsiYXBwLmt1YmVybmV0ZXMuaW8vY29tcG9uZW50IjoicHJveHlzcWwiLCJhcHAua3ViZXJuZXRlcy5pby9pbnN0YW5jZSI6InNvbWUtbmFtZSIsImFwcC5rdWJlcm5ldGVzLmlvL21hbmFnZWQtYnkiOiJwZXJjb25hLXh0cmFkYi1jbHVzdGVyLW9wZXJhdG9yIiwiYXBwLmt1YmVybmV0ZXMuaW8vbmFtZSI6InBlcmNvbmEteHRyYWRiLWNsdXN0ZXIiLCJhcHAua3ViZXJuZXRlcy5pby9wYXJ0LW9mIjoicGVyY29uYS14dHJhZGItY2x1c3RlciJ9fSwidGVtcGxhdGUiOnsibWV0YWRhdGEiOnsibGFiZWxzIjp7ImFwcC5rdWJlcm5ldGVzLmlvL2NvbXBvbmVudCI6InByb3h5c3FsIiwiYXBwLmt1YmVybmV0ZXMuaW8vaW5zdGFuY2UiOiJzb21lLW5hbWUiLCJhcHAua3ViZXJuZXRlcy5pby9tYW5hZ2VkLWJ5IjoicGVyY29uYS14dHJhZGItY2x1c3Rlci1vcGVyYXRvciIsImFwcC5rdWJlcm5ldGVzLmlvL25hbWUiOiJwZXJjb25hLXh0cmFkYi1jbHVzdGVyIiwiYXBwLmt1YmVybmV0ZXMuaW8vcGFydC1vZiI6InBlcmNvbmEteHRyYWRiLWNsdXN0ZXIifSwiYW5ub3RhdGlvbnMiOnsia3ViZWN0bC5rdWJlcm5ldGVzLmlvL2RlZmF1bHQtY29udGFpbmVyIjoicHJveHlzcWwiLCJsYXN0LWFwcGxpZWQtc2VjcmV0IjoiMmVhZTM4NzQ5OTY1ODc2ZDZiM2UwMjkzNDhkN2VmMTFmZWEyZGFmMzM2NmIxNjY5YWJhMmRmZDRhZjg1MDg2ZCIsInBlcmNvbmEuY29tL2NvbmZpZ3VyYXRpb24taGFzaCI6"..., -  "percona.com/last-config-hash": "eyJyZXBsaWNhcyI6Miwic2VsZWN0b3IiOnsibWF0Y2hMYWJlbHMiOnsiYXBwLmt1YmVybmV0ZXMuaW8vY29tcG9uZW50IjoicHJveHlzcWwiLCJhcHAua3ViZXJuZXRlcy5pby9pbnN0YW5jZSI6InNvbWUtbmFtZSIsImFwcC5rdWJlcm5ldGVzLmlvL21hbmFnZWQtYnkiOiJwZXJjb25hLXh0cmFkYi1jbHVzdGVyLW9wZXJhdG9yIiwiYXBwLmt1YmVybmV0ZXMuaW8vbmFtZSI6InBlcmNvbmEteHRyYWRiLWNsdXN0ZXIiLCJhcHAua3ViZXJuZXRlcy5pby9wYXJ0LW9mIjoicGVyY29uYS14dHJhZGItY2x1c3RlciJ9fSwidGVtcGxhdGUiOnsibWV0YWRhdGEiOnsibGFiZWxzIjp7ImFwcC5rdWJlcm5ldGVzLmlvL2NvbXBvbmVudCI6InByb3h5c3FsIiwiYXBwLmt1YmVybmV0ZXMuaW8vaW5zdGFuY2UiOiJzb21lLW5hbWUiLCJhcHAua3ViZXJuZXRlcy5pby9tYW5hZ2VkLWJ5IjoicGVyY29uYS14dHJhZGItY2x1c3Rlci1vcGVyYXRvciIsImFwcC5rdWJlcm5ldGVzLmlvL25hbWUiOiJwZXJjb25hLXh0cmFkYi1jbHVzdGVyIiwiYXBwLmt1YmVybmV0ZXMuaW8vcGFydC1vZiI6InBlcmNvbmEteHRyYWRiLWNsdXN0ZXIifSwiYW5ub3RhdGlvbnMiOnsia3ViZWN0bC5rdWJlcm5ldGVzLmlvL2RlZmF1bHQtY29udGFpbmVyIjoicHJveHlzcWwiLCJsYXN0LWFwcGxpZWQtc2VjcmV0IjoiNzBmYTFiOWJlYTg4YzVlYWNlYjlkMTk5ZmM0OGRmNDJlMTE0OGI3NDJkZWE3ZGQ0MjkyMDM2ZDZlNDFkNGFjOCIsInBlcmNvbmEuY29tL2NvbmZpZ3VyYXRpb24taGFzaCI6"..., +  "percona.com/last-config-hash": "eyJyZXBsaWNhcyI6Miwic2VsZWN0b3IiOnsibWF0Y2hMYWJlbHMiOnsiYXBwLmt1YmVybmV0ZXMuaW8vY29tcG9uZW50IjoicHJveHlzcWwiLCJhcHAua3ViZXJuZXRlcy5pby9pbnN0YW5jZSI6InNvbWUtbmFtZSIsImFwcC5rdWJlcm5ldGVzLmlvL21hbmFnZWQtYnkiOiJwZXJjb25hLXh0cmFkYi1jbHVzdGVyLW9wZXJhdG9yIiwiYXBwLmt1YmVybmV0ZXMuaW8vbmFtZSI6InBlcmNvbmEteHRyYWRiLWNsdXN0ZXIiLCJhcHAua3ViZXJuZXRlcy5pby9wYXJ0LW9mIjoicGVyY29uYS14dHJhZGItY2x1c3RlciJ9fSwidGVtcGxhdGUiOnsibWV0YWRhdGEiOnsibGFiZWxzIjp7ImFwcC5rdWJlcm5ldGVzLmlvL2NvbXBvbmVudCI6InByb3h5c3FsIiwiYXBwLmt1YmVybmV0ZXMuaW8vaW5zdGFuY2UiOiJzb21lLW5hbWUiLCJhcHAua3ViZXJuZXRlcy5pby9tYW5hZ2VkLWJ5IjoicGVyY29uYS14dHJhZGItY2x1c3Rlci1vcGVyYXRvciIsImFwcC5rdWJlcm5ldGVzLmlvL25hbWUiOiJwZXJjb25hLXh0cmFkYi1jbHVzdGVyIiwiYXBwLmt1YmVybmV0ZXMuaW8vcGFydC1vZiI6InBlcmNvbmEteHRyYWRiLWNsdXN0ZXIifSwiYW5ub3RhdGlvbnMiOnsia3ViZWN0bC5rdWJlcm5ldGVzLmlvL2RlZmF1bHQtY29udGFpbmVyIjoicHJveHlzcWwiLCJsYXN0LWFwcGxpZWQtc2VjcmV0IjoiNzBmYTFiOWJlYTg4YzVlYWNlYjlkMTk5ZmM0OGRmNDJlMTE0OGI3NDJkZWE3ZGQ0MjkyMDM2ZDZlNDFkNGFjOCIsInBlcmNvbmEuY29tL2NvbmZpZ3VyYXRpb24taGFzaCI6"..., +  "percona.com/last-config-hash": "eyJyZXBsaWNhcyI6Miwic2VsZWN0b3IiOnsibWF0Y2hMYWJlbHMiOnsiYXBwLmt1YmVybmV0ZXMuaW8vY29tcG9uZW50IjoicHJveHlzcWwiLCJhcHAua3ViZXJuZXRlcy5pby9pbnN0YW5jZSI6InNvbWUtbmFtZSIsImFwcC5rdWJlcm5ldGVzLmlvL21hbmFnZWQtYnkiOiJwZXJjb25hLXh0cmFkYi1jbHVzdGVyLW9wZXJhdG9yIiwiYXBwLmt1YmVybmV0ZXMuaW8vbmFtZSI6InBlcmNvbmEteHRyYWRiLWNsdXN0ZXIiLCJhcHAua3ViZXJuZXRlcy5pby9wYXJ0LW9mIjoicGVyY29uYS14dHJhZGItY2x1c3RlciJ9fSwidGVtcGxhdGUiOnsibWV0YWRhdGEiOnsibGFiZWxzIjp7ImFwcC5rdWJlcm5ldGVzLmlvL2NvbXBvbmVudCI6InByb3h5c3FsIiwiYXBwLmt1YmVybmV0ZXMuaW8vaW5zdGFuY2UiOiJzb21lLW5hbWUiLCJhcHAua3ViZXJuZXRlcy5pby9tYW5hZ2VkLWJ5IjoicGVyY29uYS14dHJhZGItY2x1c3Rlci1vcGVyYXRvciIsImFwcC5rdWJlcm5ldGVzLmlvL25hbWUiOiJwZXJjb25hLXh0cmFkYi1jbHVzdGVyIiwiYXBwLmt1YmVybmV0ZXMuaW8vcGFydC1vZiI6InBlcmNvbmEteHRyYWRiLWNsdXN0ZXIifSwiYW5ub3RhdGlvbnMiOnsia3ViZWN0bC5rdWJlcm5ldGVzLmlvL2RlZmF1bHQtY29udGFpbmVyIjoicHJveHlzcWwiLCJsYXN0LWFwcGxpZWQtc2VjcmV0IjoiOTU1OGFmNDU3YjU1ZDE4ZTc0MGEyMzcxZDAyODQ4ODkzMWYyOTVlYTlhZWY3ZTQ1YTEyNTM0ODg5ZmM2YmY4YSIsInBlcmNvbmEuY29tL2NvbmZpZ3VyYXRpb24taGFzaCI6"..., -  "percona.com/last-config-hash": "eyJyZXBsaWNhcyI6Miwic2VsZWN0b3IiOnsibWF0Y2hMYWJlbHMiOnsiYXBwLmt1YmVybmV0ZXMuaW8vY29tcG9uZW50IjoicHJveHlzcWwiLCJhcHAua3ViZXJuZXRlcy5pby9pbnN0YW5jZSI6InNvbWUtbmFtZSIsImFwcC5rdWJlcm5ldGVzLmlvL21hbmFnZWQtYnkiOiJwZXJjb25hLXh0cmFkYi1jbHVzdGVyLW9wZXJhdG9yIiwiYXBwLmt1YmVybmV0ZXMuaW8vbmFtZSI6InBlcmNvbmEteHRyYWRiLWNsdXN0ZXIiLCJhcHAua3ViZXJuZXRlcy5pby9wYXJ0LW9mIjoicGVyY29uYS14dHJhZGItY2x1c3RlciJ9fSwidGVtcGxhdGUiOnsibWV0YWRhdGEiOnsibGFiZWxzIjp7ImFwcC5rdWJlcm5ldGVzLmlvL2NvbXBvbmVudCI6InByb3h5c3FsIiwiYXBwLmt1YmVybmV0ZXMuaW8vaW5zdGFuY2UiOiJzb21lLW5hbWUiLCJhcHAua3ViZXJuZXRlcy5pby9tYW5hZ2VkLWJ5IjoicGVyY29uYS14dHJhZGItY2x1c3Rlci1vcGVyYXRvciIsImFwcC5rdWJlcm5ldGVzLmlvL25hbWUiOiJwZXJjb25hLXh0cmFkYi1jbHVzdGVyIiwiYXBwLmt1YmVybmV0ZXMuaW8vcGFydC1vZiI6InBlcmNvbmEteHRyYWRiLWNsdXN0ZXIifSwiYW5ub3RhdGlvbnMiOnsia3ViZWN0bC5rdWJlcm5ldGVzLmlvL2RlZmF1bHQtY29udGFpbmVyIjoicHJveHlzcWwiLCJsYXN0LWFwcGxpZWQtc2VjcmV0IjoiZmQwYThiMjUzZjE2ZDVjMjE5OGQwMDE3YTE2ZTA5OTAxYzUxYTZmMmU1OWE4NWZhYjdiYjg0MzAwNjI3NGUwNSIsInBlcmNvbmEuY29tL2NvbmZpZ3VyYXRpb24taGFzaCI6"..., +  "percona.com/last-config-hash": "eyJyZXBsaWNhcyI6Miwic2VsZWN0b3IiOnsibWF0Y2hMYWJlbHMiOnsiYXBwLmt1YmVybmV0ZXMuaW8vY29tcG9uZW50IjoicHJveHlzcWwiLCJhcHAua3ViZXJuZXRlcy5pby9pbnN0YW5jZSI6InNvbWUtbmFtZSIsImFwcC5rdWJlcm5ldGVzLmlvL21hbmFnZWQtYnkiOiJwZXJjb25hLXh0cmFkYi1jbHVzdGVyLW9wZXJhdG9yIiwiYXBwLmt1YmVybmV0ZXMuaW8vbmFtZSI6InBlcmNvbmEteHRyYWRiLWNsdXN0ZXIiLCJhcHAua3ViZXJuZXRlcy5pby9wYXJ0LW9mIjoicGVyY29uYS14dHJhZGItY2x1c3RlciJ9fSwidGVtcGxhdGUiOnsibWV0YWRhdGEiOnsibGFiZWxzIjp7ImFwcC5rdWJlcm5ldGVzLmlvL2NvbXBvbmVudCI6InByb3h5c3FsIiwiYXBwLmt1YmVybmV0ZXMuaW8vaW5zdGFuY2UiOiJzb21lLW5hbWUiLCJhcHAua3ViZXJuZXRlcy5pby9tYW5hZ2VkLWJ5IjoicGVyY29uYS14dHJhZGItY2x1c3Rlci1vcGVyYXRvciIsImFwcC5rdWJlcm5ldGVzLmlvL25hbWUiOiJwZXJjb25hLXh0cmFkYi1jbHVzdGVyIiwiYXBwLmt1YmVybmV0ZXMuaW8vcGFydC1vZiI6InBlcmNvbmEteHRyYWRiLWNsdXN0ZXIifSwiYW5ub3RhdGlvbnMiOnsia3ViZWN0bC5rdWJlcm5ldGVzLmlvL2RlZmF1bHQtY29udGFpbmVyIjoicHJveHlzcWwiLCJsYXN0LWFwcGxpZWQtc2VjcmV0IjoiZmQwYThiMjUzZjE2ZDVjMjE5OGQwMDE3YTE2ZTA5OTAxYzUxYTZmMmU1OWE4NWZhYjdiYjg0MzAwNjI3NGUwNSIsInBlcmNvbmEuY29tL2NvbmZpZ3VyYXRpb24taGFzaCI6"..., -  "percona.com/last-config-hash": "eyJyZXBsaWNhcyI6Mywic2VsZWN0b3IiOnsibWF0Y2hMYWJlbHMiOnsiYXBwLmt1YmVybmV0ZXMuaW8vY29tcG9uZW50IjoiaGFwcm94eSIsImFwcC5rdWJlcm5ldGVzLmlvL2luc3RhbmNlIjoic29tZS1uYW1lIiwiYXBwLmt1YmVybmV0ZXMuaW8vbWFuYWdlZC1ieSI6InBlcmNvbmEteHRyYWRiLWNsdXN0ZXItb3BlcmF0b3IiLCJhcHAua3ViZXJuZXRlcy5pby9uYW1lIjoicGVyY29uYS14dHJhZGItY2x1c3RlciIsImFwcC5rdWJlcm5ldGVzLmlvL3BhcnQtb2YiOiJwZXJjb25hLXh0cmFkYi1jbHVzdGVyIn19LCJ0ZW1wbGF0ZSI6eyJtZXRhZGF0YSI6eyJsYWJlbHMiOnsiYXBwLmt1YmVybmV0ZXMuaW8vY29tcG9uZW50IjoiaGFwcm94eSIsImFwcC5rdWJlcm5ldGVzLmlvL2luc3RhbmNlIjoic29tZS1uYW1lIiwiYXBwLmt1YmVybmV0ZXMuaW8vbWFuYWdlZC1ieSI6InBlcmNvbmEteHRyYWRiLWNsdXN0ZXItb3BlcmF0b3IiLCJhcHAua3ViZXJuZXRlcy5pby9uYW1lIjoicGVyY29uYS14dHJhZGItY2x1c3RlciIsImFwcC5rdWJlcm5ldGVzLmlvL3BhcnQtb2YiOiJwZXJjb25hLXh0cmFkYi1jbHVzdGVyIn0sImFubm90YXRpb25zIjp7Imt1YmVjdGwua3ViZXJuZXRlcy5pby9kZWZhdWx0LWNvbnRhaW5lciI6ImhhcHJveHkiLCJsYXN0LWFwcGxpZWQtc2VjcmV0IjoiZmQwYThiMjUzZjE2ZDVjMjE5OGQwMDE3YTE2ZTA5OTAxYzUxYTZmMmU1OWE4NWZhYjdiYjg0MzAwNjI3NGUwNSIsInBlcmNvbmEuY29tL2NvbmZpZ3VyYXRpb24taGFzaCI6ImQ0"..., +  "percona.com/last-config-hash": "eyJyZXBsaWNhcyI6Mywic2VsZWN0b3IiOnsibWF0Y2hMYWJlbHMiOnsiYXBwLmt1YmVybmV0ZXMuaW8vY29tcG9uZW50IjoiaGFwcm94eSIsImFwcC5rdWJlcm5ldGVzLmlvL2luc3RhbmNlIjoic29tZS1uYW1lIiwiYXBwLmt1YmVybmV0ZXMuaW8vbWFuYWdlZC1ieSI6InBlcmNvbmEteHRyYWRiLWNsdXN0ZXItb3BlcmF0b3IiLCJhcHAua3ViZXJuZXRlcy5pby9uYW1lIjoicGVyY29uYS14dHJhZGItY2x1c3RlciIsImFwcC5rdWJlcm5ldGVzLmlvL3BhcnQtb2YiOiJwZXJjb25hLXh0cmFkYi1jbHVzdGVyIn19LCJ0ZW1wbGF0ZSI6eyJtZXRhZGF0YSI6eyJsYWJlbHMiOnsiYXBwLmt1YmVybmV0ZXMuaW8vY29tcG9uZW50IjoiaGFwcm94eSIsImFwcC5rdWJlcm5ldGVzLmlvL2luc3RhbmNlIjoic29tZS1uYW1lIiwiYXBwLmt1YmVybmV0ZXMuaW8vbWFuYWdlZC1ieSI6InBlcmNvbmEteHRyYWRiLWNsdXN0ZXItb3BlcmF0b3IiLCJhcHAua3ViZXJuZXRlcy5pby9uYW1lIjoicGVyY29uYS14dHJhZGItY2x1c3RlciIsImFwcC5rdWJlcm5ldGVzLmlvL3BhcnQtb2YiOiJwZXJjb25hLXh0cmFkYi1jbHVzdGVyIn0sImFubm90YXRpb25zIjp7Imt1YmVjdGwua3ViZXJuZXRlcy5pby9kZWZhdWx0LWNvbnRhaW5lciI6ImhhcHJveHkiLCJsYXN0LWFwcGxpZWQtc2VjcmV0IjoiZmQwYThiMjUzZjE2ZDVjMjE5OGQwMDE3YTE2ZTA5OTAxYzUxYTZmMmU1OWE4NWZhYjdiYjg0MzAwNjI3NGUwNSIsInBlcmNvbmEuY29tL2NvbmZpZ3VyYXRpb24taGFzaCI6ImQ0"..., +  "percona.com/last-config-hash": "eyJyZXBsaWNhcyI6Mywic2VsZWN0b3IiOnsibWF0Y2hMYWJlbHMiOnsiYXBwLmt1YmVybmV0ZXMuaW8vY29tcG9uZW50IjoiaGFwcm94eSIsImFwcC5rdWJlcm5ldGVzLmlvL2luc3RhbmNlIjoic29tZS1uYW1lIiwiYXBwLmt1YmVybmV0ZXMuaW8vbWFuYWdlZC1ieSI6InBlcmNvbmEteHRyYWRiLWNsdXN0ZXItb3BlcmF0b3IiLCJhcHAua3ViZXJuZXRlcy5pby9uYW1lIjoicGVyY29uYS14dHJhZGItY2x1c3RlciIsImFwcC5rdWJlcm5ldGVzLmlvL3BhcnQtb2YiOiJwZXJjb25hLXh0cmFkYi1jbHVzdGVyIn19LCJ0ZW1wbGF0ZSI6eyJtZXRhZGF0YSI6eyJsYWJlbHMiOnsiYXBwLmt1YmVybmV0ZXMuaW8vY29tcG9uZW50IjoiaGFwcm94eSIsImFwcC5rdWJlcm5ldGVzLmlvL2luc3RhbmNlIjoic29tZS1uYW1lIiwiYXBwLmt1YmVybmV0ZXMuaW8vbWFuYWdlZC1ieSI6InBlcmNvbmEteHRyYWRiLWNsdXN0ZXItb3BlcmF0b3IiLCJhcHAua3ViZXJuZXRlcy5pby9uYW1lIjoicGVyY29uYS14dHJhZGItY2x1c3RlciIsImFwcC5rdWJlcm5ldGVzLmlvL3BhcnQtb2YiOiJwZXJjb25hLXh0cmFkYi1jbHVzdGVyIn0sImFubm90YXRpb25zIjp7Imt1YmVjdGwua3ViZXJuZXRlcy5pby9kZWZhdWx0LWNvbnRhaW5lciI6ImhhcHJveHkiLCJsYXN0LWFwcGxpZWQtc2VjcmV0IjoiZTM4MWY3NGRjZjA5YmY0YTI5YjUwN2U0M2Q2MzI2ZTMwZDRmN2M0OTgwYTEzMDZhNWQwNzMzZmEwM2E1NWE0MiIsInBlcmNvbmEuY29tL2NvbmZpZ3VyYXRpb24taGFzaCI6ImQ0"..., -  "percona.com/last-config-hash": "eyJyZXBsaWNhcyI6Mywic2VsZWN0b3IiOnsibWF0Y2hMYWJlbHMiOnsiYXBwLmt1YmVybmV0ZXMuaW8vY29tcG9uZW50IjoiaGFwcm94eSIsImFwcC5rdWJlcm5ldGVzLmlvL2luc3RhbmNlIjoic29tZS1uYW1lIiwiYXBwLmt1YmVybmV0ZXMuaW8vbWFuYWdlZC1ieSI6InBlcmNvbmEteHRyYWRiLWNsdXN0ZXItb3BlcmF0b3IiLCJhcHAua3ViZXJuZXRlcy5pby9uYW1lIjoicGVyY29uYS14dHJhZGItY2x1c3RlciIsImFwcC5rdWJlcm5ldGVzLmlvL3BhcnQtb2YiOiJwZXJjb25hLXh0cmFkYi1jbHVzdGVyIn19LCJ0ZW1wbGF0ZSI6eyJtZXRhZGF0YSI6eyJsYWJlbHMiOnsiYXBwLmt1YmVybmV0ZXMuaW8vY29tcG9uZW50IjoiaGFwcm94eSIsImFwcC5rdWJlcm5ldGVzLmlvL2luc3RhbmNlIjoic29tZS1uYW1lIiwiYXBwLmt1YmVybmV0ZXMuaW8vbWFuYWdlZC1ieSI6InBlcmNvbmEteHRyYWRiLWNsdXN0ZXItb3BlcmF0b3IiLCJhcHAua3ViZXJuZXRlcy5pby9uYW1lIjoicGVyY29uYS14dHJhZGItY2x1c3RlciIsImFwcC5rdWJlcm5ldGVzLmlvL3BhcnQtb2YiOiJwZXJjb25hLXh0cmFkYi1jbHVzdGVyIn0sImFubm90YXRpb25zIjp7Imt1YmVjdGwua3ViZXJuZXRlcy5pby9kZWZhdWx0LWNvbnRhaW5lciI6ImhhcHJveHkiLCJwZXJjb25hLmNvbS9jb25maWd1cmF0aW9uLWhhc2giOiJkNDFkOGNkOThmMDBiMjA0ZTk4MDA5OThlY2Y4NDI3ZSJ9fSwic3BlYyI6eyJ2b2x1bWVzIjpbeyJuYW1lIjoiaGFwcm94eS1jdXN0b20iLCJjb25maWdNYXAi"..., -  "percona.com/last-config-hash": "eyJyZXBsaWNhcyI6Mywic2VsZWN0b3IiOnsibWF0Y2hMYWJlbHMiOnsiYXBwLmt1YmVybmV0ZXMuaW8vY29tcG9uZW50IjoicHhjIiwiYXBwLmt1YmVybmV0ZXMuaW8vaW5zdGFuY2UiOiJzb21lLW5hbWUiLCJhcHAua3ViZXJuZXRlcy5pby9tYW5hZ2VkLWJ5IjoicGVyY29uYS14dHJhZGItY2x1c3Rlci1vcGVyYXRvciIsImFwcC5rdWJlcm5ldGVzLmlvL25hbWUiOiJwZXJjb25hLXh0cmFkYi1jbHVzdGVyIiwiYXBwLmt1YmVybmV0ZXMuaW8vcGFydC1vZiI6InBlcmNvbmEteHRyYWRiLWNsdXN0ZXIifX0sInRlbXBsYXRlIjp7Im1ldGFkYXRhIjp7ImxhYmVscyI6eyJhcHAua3ViZXJuZXRlcy5pby9jb21wb25lbnQiOiJweGMiLCJhcHAua3ViZXJuZXRlcy5pby9pbnN0YW5jZSI6InNvbWUtbmFtZSIsImFwcC5rdWJlcm5ldGVzLmlvL21hbmFnZWQtYnkiOiJwZXJjb25hLXh0cmFkYi1jbHVzdGVyLW9wZXJhdG9yIiwiYXBwLmt1YmVybmV0ZXMuaW8vbmFtZSI6InBlcmNvbmEteHRyYWRiLWNsdXN0ZXIiLCJhcHAua3ViZXJuZXRlcy5pby9wYXJ0LW9mIjoicGVyY29uYS14dHJhZGItY2x1c3RlciJ9LCJhbm5vdGF0aW9ucyI6eyJrdWJlY3RsLmt1YmVybmV0ZXMuaW8vZGVmYXVsdC1jb250YWluZXIiOiJweGMiLCJsYXN0LWFwcGxpZWQtc2VjcmV0IjoiMDAwNWE4NjlmNDE5NGU4NGU4YWNhOTM2YjUzNDBjZDEyMTJhNTM3MjBhZGY4NDU5MWUzMmUxYjA3NDQxMzY2NSIsInBlcmNvbmEuY29tL2NvbmZpZ3VyYXRpb24taGFzaCI6ImQ0MWQ4Y2Q5OGYwMGIy"..., +  "percona.com/last-config-hash": "eyJyZXBsaWNhcyI6Mywic2VsZWN0b3IiOnsibWF0Y2hMYWJlbHMiOnsiYXBwLmt1YmVybmV0ZXMuaW8vY29tcG9uZW50IjoicHhjIiwiYXBwLmt1YmVybmV0ZXMuaW8vaW5zdGFuY2UiOiJzb21lLW5hbWUiLCJhcHAua3ViZXJuZXRlcy5pby9tYW5hZ2VkLWJ5IjoicGVyY29uYS14dHJhZGItY2x1c3Rlci1vcGVyYXRvciIsImFwcC5rdWJlcm5ldGVzLmlvL25hbWUiOiJwZXJjb25hLXh0cmFkYi1jbHVzdGVyIiwiYXBwLmt1YmVybmV0ZXMuaW8vcGFydC1vZiI6InBlcmNvbmEteHRyYWRiLWNsdXN0ZXIifX0sInRlbXBsYXRlIjp7Im1ldGFkYXRhIjp7ImxhYmVscyI6eyJhcHAua3ViZXJuZXRlcy5pby9jb21wb25lbnQiOiJweGMiLCJhcHAua3ViZXJuZXRlcy5pby9pbnN0YW5jZSI6InNvbWUtbmFtZSIsImFwcC5rdWJlcm5ldGVzLmlvL21hbmFnZWQtYnkiOiJwZXJjb25hLXh0cmFkYi1jbHVzdGVyLW9wZXJhdG9yIiwiYXBwLmt1YmVybmV0ZXMuaW8vbmFtZSI6InBlcmNvbmEteHRyYWRiLWNsdXN0ZXIiLCJhcHAua3ViZXJuZXRlcy5pby9wYXJ0LW9mIjoicGVyY29uYS14dHJhZGItY2x1c3RlciJ9LCJhbm5vdGF0aW9ucyI6eyJrdWJlY3RsLmt1YmVybmV0ZXMuaW8vZGVmYXVsdC1jb250YWluZXIiOiJweGMiLCJsYXN0LWFwcGxpZWQtc2VjcmV0IjoiMDAwNWE4NjlmNDE5NGU4NGU4YWNhOTM2YjUzNDBjZDEyMTJhNTM3MjBhZGY4NDU5MWUzMmUxYjA3NDQxMzY2NSIsInBlcmNvbmEuY29tL2NvbmZpZ3VyYXRpb24taGFzaCI6ImQ0MWQ4Y2Q5OGYwMGIy"..., -  "percona.com/last-config-hash": "eyJyZXBsaWNhcyI6Mywic2VsZWN0b3IiOnsibWF0Y2hMYWJlbHMiOnsiYXBwLmt1YmVybmV0ZXMuaW8vY29tcG9uZW50IjoicHhjIiwiYXBwLmt1YmVybmV0ZXMuaW8vaW5zdGFuY2UiOiJzb21lLW5hbWUiLCJhcHAua3ViZXJuZXRlcy5pby9tYW5hZ2VkLWJ5IjoicGVyY29uYS14dHJhZGItY2x1c3Rlci1vcGVyYXRvciIsImFwcC5rdWJlcm5ldGVzLmlvL25hbWUiOiJwZXJjb25hLXh0cmFkYi1jbHVzdGVyIiwiYXBwLmt1YmVybmV0ZXMuaW8vcGFydC1vZiI6InBlcmNvbmEteHRyYWRiLWNsdXN0ZXIifX0sInRlbXBsYXRlIjp7Im1ldGFkYXRhIjp7ImxhYmVscyI6eyJhcHAua3ViZXJuZXRlcy5pby9jb21wb25lbnQiOiJweGMiLCJhcHAua3ViZXJuZXRlcy5pby9pbnN0YW5jZSI6InNvbWUtbmFtZSIsImFwcC5rdWJlcm5ldGVzLmlvL21hbmFnZWQtYnkiOiJwZXJjb25hLXh0cmFkYi1jbHVzdGVyLW9wZXJhdG9yIiwiYXBwLmt1YmVybmV0ZXMuaW8vbmFtZSI6InBlcmNvbmEteHRyYWRiLWNsdXN0ZXIiLCJhcHAua3ViZXJuZXRlcy5pby9wYXJ0LW9mIjoicGVyY29uYS14dHJhZGItY2x1c3RlciJ9LCJhbm5vdGF0aW9ucyI6eyJrdWJlY3RsLmt1YmVybmV0ZXMuaW8vZGVmYXVsdC1jb250YWluZXIiOiJweGMiLCJsYXN0LWFwcGxpZWQtc2VjcmV0IjoiMDZlZmY2M2NjZWY3YjYxMjA2ZGJiZDU3NWQ3YzBkM2MwZDIyYTgwYzExYjIyYWU1OTZiZmUyNzBkZGRhMmI0YyIsInBlcmNvbmEuY29tL2NvbmZpZ3VyYXRpb24taGFzaCI6ImQ0MWQ4Y2Q5OGYwMGIy"..., +  "percona.com/last-config-hash": "eyJyZXBsaWNhcyI6Mywic2VsZWN0b3IiOnsibWF0Y2hMYWJlbHMiOnsiYXBwLmt1YmVybmV0ZXMuaW8vY29tcG9uZW50IjoicHhjIiwiYXBwLmt1YmVybmV0ZXMuaW8vaW5zdGFuY2UiOiJzb21lLW5hbWUiLCJhcHAua3ViZXJuZXRlcy5pby9tYW5hZ2VkLWJ5IjoicGVyY29uYS14dHJhZGItY2x1c3Rlci1vcGVyYXRvciIsImFwcC5rdWJlcm5ldGVzLmlvL25hbWUiOiJwZXJjb25hLXh0cmFkYi1jbHVzdGVyIiwiYXBwLmt1YmVybmV0ZXMuaW8vcGFydC1vZiI6InBlcmNvbmEteHRyYWRiLWNsdXN0ZXIifX0sInRlbXBsYXRlIjp7Im1ldGFkYXRhIjp7ImxhYmVscyI6eyJhcHAua3ViZXJuZXRlcy5pby9jb21wb25lbnQiOiJweGMiLCJhcHAua3ViZXJuZXRlcy5pby9pbnN0YW5jZSI6InNvbWUtbmFtZSIsImFwcC5rdWJlcm5ldGVzLmlvL21hbmFnZWQtYnkiOiJwZXJjb25hLXh0cmFkYi1jbHVzdGVyLW9wZXJhdG9yIiwiYXBwLmt1YmVybmV0ZXMuaW8vbmFtZSI6InBlcmNvbmEteHRyYWRiLWNsdXN0ZXIiLCJhcHAua3ViZXJuZXRlcy5pby9wYXJ0LW9mIjoicGVyY29uYS14dHJhZGItY2x1c3RlciJ9LCJhbm5vdGF0aW9ucyI6eyJrdWJlY3RsLmt1YmVybmV0ZXMuaW8vZGVmYXVsdC1jb250YWluZXIiOiJweGMiLCJsYXN0LWFwcGxpZWQtc2VjcmV0IjoiMDZlZmY2M2NjZWY3YjYxMjA2ZGJiZDU3NWQ3YzBkM2MwZDIyYTgwYzExYjIyYWU1OTZiZmUyNzBkZGRhMmI0YyIsInBlcmNvbmEuY29tL2NvbmZpZ3VyYXRpb24taGFzaCI6ImQ0MWQ4Y2Q5OGYwMGIy"..., -  "percona.com/last-config-hash": "eyJyZXBsaWNhcyI6Mywic2VsZWN0b3IiOnsibWF0Y2hMYWJlbHMiOnsiYXBwLmt1YmVybmV0ZXMuaW8vY29tcG9uZW50IjoicHhjIiwiYXBwLmt1YmVybmV0ZXMuaW8vaW5zdGFuY2UiOiJzb21lLW5hbWUiLCJhcHAua3ViZXJuZXRlcy5pby9tYW5hZ2VkLWJ5IjoicGVyY29uYS14dHJhZGItY2x1c3Rlci1vcGVyYXRvciIsImFwcC5rdWJlcm5ldGVzLmlvL25hbWUiOiJwZXJjb25hLXh0cmFkYi1jbHVzdGVyIiwiYXBwLmt1YmVybmV0ZXMuaW8vcGFydC1vZiI6InBlcmNvbmEteHRyYWRiLWNsdXN0ZXIifX0sInRlbXBsYXRlIjp7Im1ldGFkYXRhIjp7ImxhYmVscyI6eyJhcHAua3ViZXJuZXRlcy5pby9jb21wb25lbnQiOiJweGMiLCJhcHAua3ViZXJuZXRlcy5pby9pbnN0YW5jZSI6InNvbWUtbmFtZSIsImFwcC5rdWJlcm5ldGVzLmlvL21hbmFnZWQtYnkiOiJwZXJjb25hLXh0cmFkYi1jbHVzdGVyLW9wZXJhdG9yIiwiYXBwLmt1YmVybmV0ZXMuaW8vbmFtZSI6InBlcmNvbmEteHRyYWRiLWNsdXN0ZXIiLCJhcHAua3ViZXJuZXRlcy5pby9wYXJ0LW9mIjoicGVyY29uYS14dHJhZGItY2x1c3RlciJ9LCJhbm5vdGF0aW9ucyI6eyJrdWJlY3RsLmt1YmVybmV0ZXMuaW8vZGVmYXVsdC1jb250YWluZXIiOiJweGMiLCJsYXN0LWFwcGxpZWQtc2VjcmV0IjoiOTU1OGFmNDU3YjU1ZDE4ZTc0MGEyMzcxZDAyODQ4ODkzMWYyOTVlYTlhZWY3ZTQ1YTEyNTM0ODg5ZmM2YmY4YSIsInBlcmNvbmEuY29tL2NvbmZpZ3VyYXRpb24taGFzaCI6ImQ0MWQ4Y2Q5OGYwMGIy"..., +  "percona.com/last-config-hash": "eyJyZXBsaWNhcyI6Mywic2VsZWN0b3IiOnsibWF0Y2hMYWJlbHMiOnsiYXBwLmt1YmVybmV0ZXMuaW8vY29tcG9uZW50IjoicHhjIiwiYXBwLmt1YmVybmV0ZXMuaW8vaW5zdGFuY2UiOiJzb21lLW5hbWUiLCJhcHAua3ViZXJuZXRlcy5pby9tYW5hZ2VkLWJ5IjoicGVyY29uYS14dHJhZGItY2x1c3Rlci1vcGVyYXRvciIsImFwcC5rdWJlcm5ldGVzLmlvL25hbWUiOiJwZXJjb25hLXh0cmFkYi1jbHVzdGVyIiwiYXBwLmt1YmVybmV0ZXMuaW8vcGFydC1vZiI6InBlcmNvbmEteHRyYWRiLWNsdXN0ZXIifX0sInRlbXBsYXRlIjp7Im1ldGFkYXRhIjp7ImxhYmVscyI6eyJhcHAua3ViZXJuZXRlcy5pby9jb21wb25lbnQiOiJweGMiLCJhcHAua3ViZXJuZXRlcy5pby9pbnN0YW5jZSI6InNvbWUtbmFtZSIsImFwcC5rdWJlcm5ldGVzLmlvL21hbmFnZWQtYnkiOiJwZXJjb25hLXh0cmFkYi1jbHVzdGVyLW9wZXJhdG9yIiwiYXBwLmt1YmVybmV0ZXMuaW8vbmFtZSI6InBlcmNvbmEteHRyYWRiLWNsdXN0ZXIiLCJhcHAua3ViZXJuZXRlcy5pby9wYXJ0LW9mIjoicGVyY29uYS14dHJhZGItY2x1c3RlciJ9LCJhbm5vdGF0aW9ucyI6eyJrdWJlY3RsLmt1YmVybmV0ZXMuaW8vZGVmYXVsdC1jb250YWluZXIiOiJweGMiLCJsYXN0LWFwcGxpZWQtc2VjcmV0IjoiOTU1OGFmNDU3YjU1ZDE4ZTc0MGEyMzcxZDAyODQ4ODkzMWYyOTVlYTlhZWY3ZTQ1YTEyNTM0ODg5ZmM2YmY4YSIsInBlcmNvbmEuY29tL2NvbmZpZ3VyYXRpb24taGFzaCI6ImQ0MWQ4Y2Q5OGYwMGIy"..., -  "percona.com/last-config-hash": "eyJyZXBsaWNhcyI6Mywic2VsZWN0b3IiOnsibWF0Y2hMYWJlbHMiOnsiYXBwLmt1YmVybmV0ZXMuaW8vY29tcG9uZW50IjoicHhjIiwiYXBwLmt1YmVybmV0ZXMuaW8vaW5zdGFuY2UiOiJzb21lLW5hbWUiLCJhcHAua3ViZXJuZXRlcy5pby9tYW5hZ2VkLWJ5IjoicGVyY29uYS14dHJhZGItY2x1c3Rlci1vcGVyYXRvciIsImFwcC5rdWJlcm5ldGVzLmlvL25hbWUiOiJwZXJjb25hLXh0cmFkYi1jbHVzdGVyIiwiYXBwLmt1YmVybmV0ZXMuaW8vcGFydC1vZiI6InBlcmNvbmEteHRyYWRiLWNsdXN0ZXIifX0sInRlbXBsYXRlIjp7Im1ldGFkYXRhIjp7ImxhYmVscyI6eyJhcHAua3ViZXJuZXRlcy5pby9jb21wb25lbnQiOiJweGMiLCJhcHAua3ViZXJuZXRlcy5pby9pbnN0YW5jZSI6InNvbWUtbmFtZSIsImFwcC5rdWJlcm5ldGVzLmlvL21hbmFnZWQtYnkiOiJwZXJjb25hLXh0cmFkYi1jbHVzdGVyLW9wZXJhdG9yIiwiYXBwLmt1YmVybmV0ZXMuaW8vbmFtZSI6InBlcmNvbmEteHRyYWRiLWNsdXN0ZXIiLCJhcHAua3ViZXJuZXRlcy5pby9wYXJ0LW9mIjoicGVyY29uYS14dHJhZGItY2x1c3RlciJ9LCJhbm5vdGF0aW9ucyI6eyJrdWJlY3RsLmt1YmVybmV0ZXMuaW8vZGVmYXVsdC1jb250YWluZXIiOiJweGMiLCJsYXN0LWFwcGxpZWQtc2VjcmV0IjoiOTU1OGFmNDU3YjU1ZDE4ZTc0MGEyMzcxZDAyODQ4ODkzMWYyOTVlYTlhZWY3ZTQ1YTEyNTM0ODg5ZmM2YmY4YSIsInBlcmNvbmEuY29tL2NvbmZpZ3VyYXRpb24taGFzaCI6ImQ0MWQ4Y2Q5OGYwMGIyMDRlOTgwMDk5OGVjZjg0MjdlIiwicGVyY29uYS5jb20vc3NsLWhhc2giOiJjZmQ2YTUyMzk4MjY4MTczYjUxZDdjZGIxMzMxYzA5YSIsInBlcmNvbmEuY29tL3NzbC1pbnRlcm5hbC1oYXNoIjoiNzUxZTU5YTFjMWQ5NGFjNjdmOWQ2YmI2ZTc1NmFjZGMifX0sInNwZWMiOnsidm9sdW1lcyI6W3sibmFtZSI6InRtcCIsImVtcHR5RGlyIjp7fX0seyJuYW1lIjoiY29uZmlnIiwiY29uZmlnTWFwIjp7Im5hbWUiOiJzb21lLW5hbWUtcHhjIiwib3B0aW9uYWwiOnRydWV9fSx7Im5hbWUiOiJzc2wtaW50ZXJuYWwiLCJzZWNyZXQiOnsic2VjcmV0TmFtZSI6InNvbWUtbmFtZS1zc2wtaW50ZXJuYWwiLCJvcHRpb25hbCI6dHJ1ZX19LHsibmFtZSI6InNzbCIsInNlY3JldCI6eyJzZWNyZXROYW1lIjoic29tZS1uYW1lLXNzbCIsIm9wdGlvbmFsIjpmYWxzZX19LHsibmFtZSI6ImF1dG8tY29uZmlnIiwiY29uZmlnTWFwIjp7Im5hbWUiOiJhdXRvLXNvbWUtbmFtZS1weGMiLCJvcHRpb25hbCI6dHJ1ZX19LHsibmFtZSI6InZhdWx0LWtleXJpbmctc2VjcmV0Iiwic2VjcmV0Ijp7InNlY3JldE5hbWUiOiJzb21lLW5hbWUtdmF1bHQiLCJvcHRpb25hbCI6dHJ1ZX19LHsibmFtZSI6Im15c3FsLXVzZXJzLXNlY3JldC1maWxlIiwic2VjcmV0Ijp7InNlY3JldE5hbWUiOiJpbnRlcm5hbC1zb21lLW5hbWUiLCJvcHRpb25hbCI6ZmFsc2V9fSx7Im5hbWUiOiJteXNxbC1pbml0LWZpbGUiLCJzZWNyZXQiOnsic2VjcmV0TmFtZSI6InNvbWUtbmFtZS1teXNxbC1pbml0Iiwib3B0aW9uYWwiOnRydWV9fV0sImluaXRDb250YWluZXJzIjpbeyJuYW1lIjoicHhjLWluaXQiLCJpbWFnZSI6InBlcmNvbmFsYWIvcGVyY29uYS14dHJhZGItY2x1c3Rlci1vcGVyYXRvcjpQUi0yMjM0LWM0OWQ0ODEwIiwiY29tbWFuZCI6WyIvcHhjLWluaXQtZW50cnlwb2ludC5zaCJdLCJyZXNvdXJjZXMiOnsibGltaXRzIjp7ImNwdSI6IjUwbSIsIm1lbW9yeSI6IjUwTSJ9fSwidm9sdW1lTW91bnRzIjpbeyJuYW1lIjoiZGF0YWRpciIsIm1vdW50UGF0aCI6Ii92YXIvbGliL215c3FsIn1dLCJpbWFnZVB1bGxQb2xpY3kiOiJBbHdheXMifV0sImNvbnRhaW5lcnMiOlt7Im5hbWUiOiJsb2dzIiwiaW1hZ2UiOiJwZXJjb25hbGFiL3BlcmNvbmEteHRyYWRiLWNsdXN0ZXItb3BlcmF0b3I6bWFpbi1sb2djb2xsZWN0b3IiLCJlbnZGcm9tIjpbeyJzZWNyZXRSZWYiOnsibmFtZSI6InNvbWUtbmFtZS1sb2ctY29sbGVjdG9yIiwib3B0aW9uYWwiOnRydWV9fV0sImVudiI6W3sibmFtZSI6IkxPR19EQVRBX0RJUiIsInZhbHVlIjoiL3Zhci9saWIvbXlzcWwifSx7Im5hbWUiOiJQT0RfTkFNRVNQQVNFIiwidmFsdWVGcm9tIjp7ImZpZWxkUmVmIjp7ImZpZWxkUGF0aCI6Im1ldGFkYXRhLm5hbWVzcGFjZSJ9fX0seyJuYW1lIjoiUE9EX05BTUUiLCJ2YWx1ZUZyb20iOnsiZmllbGRSZWYiOnsiZmllbGRQYXRoIjoibWV0YWRhdGEubmFtZSJ9fX1dLCJyZXNvdXJjZXMiOnt9LCJ2b2x1bWVNb3VudHMiOlt7Im5hbWUiOiJkYXRhZGlyIiwibW91bnRQYXRoIjoiL3Zhci9saWIvbXlzcWwifV0sImltYWdlUHVsbFBvbGljeSI6IkFsd2F5cyJ9LHsibmFtZSI6ImxvZ3JvdGF0ZSIsImltYWdlIjoicGVyY29uYWxhYi9wZXJjb25hLXh0cmFkYi1jbHVzdGVyLW9wZXJhdG9yOm1haW4tbG9nY29sbGVjdG9yIiwiYXJncyI6WyJsb2dyb3RhdGUiXSwiZW52IjpbeyJuYW1lIjoiU0VSVklDRV9UWVBFIiwidmFsdWUiOiJteXNxbCJ9LHsibmFtZSI6Ik1PTklUT1JfUEFTU1dPUkQiLCJ2YWx1ZUZyb20iOnsic2VjcmV0S2V5UmVmIjp7Im5hbWUiOiJpbnRlcm5hbC1zb21lLW5hbWUiLCJrZXkiOiJtb25pdG9yIn19fV0sInJlc291cmNlcyI6e30sInZvbHVtZU1vdW50cyI6W3sibmFtZSI6ImRhdGFkaXIiLCJtb3VudFBhdGgiOiIvdmFyL2xpYi9teXNxbCJ9XSwiaW1hZ2VQdWxsUG9saWN5IjoiQWx3YXlzIn0seyJuYW1lIjoicHhjIiwiaW1hZ2UiOiJwZXJjb25hbGFiL3BlcmNvbmEteHRyYWRiLWNsdXN0ZXItb3BlcmF0b3I6bWFpbi1weGM1LjciLCJjb21tYW5kIjpbIi92YXIvbGliL215c3FsL3B4Yy1lbnRyeXBvaW50LnNoIl0sImFyZ3MiOlsibXlzcWxkIl0sInBvcnRzIjpbeyJuYW1lIjoibXlzcWwiLCJjb250YWluZXJQb3J0IjozMzA2fSx7Im5hbWUiOiJzc3QiLCJjb250YWluZXJQb3J0Ijo0NDQ0fSx7Im5hbWUiOiJ3cml0ZS1zZXQiLCJjb250YWluZXJQb3J0Ijo0NTY3fSx7Im5hbWUiOiJpc3QiLCJjb250YWluZXJQb3J0Ijo0NTY4fSx7Im5hbWUiOiJteXNxbC1hZG1pbiIsImNvbnRhaW5lclBvcnQiOjMzMDYyfSx7Im5h"..., +  "percona.com/last-config-hash": "eyJyZXBsaWNhcyI6Mywic2VsZWN0b3IiOnsibWF0Y2hMYWJlbHMiOnsiYXBwLmt1YmVybmV0ZXMuaW8vY29tcG9uZW50IjoicHhjIiwiYXBwLmt1YmVybmV0ZXMuaW8vaW5zdGFuY2UiOiJzb21lLW5hbWUiLCJhcHAua3ViZXJuZXRlcy5pby9tYW5hZ2VkLWJ5IjoicGVyY29uYS14dHJhZGItY2x1c3Rlci1vcGVyYXRvciIsImFwcC5rdWJlcm5ldGVzLmlvL25hbWUiOiJwZXJjb25hLXh0cmFkYi1jbHVzdGVyIiwiYXBwLmt1YmVybmV0ZXMuaW8vcGFydC1vZiI6InBlcmNvbmEteHRyYWRiLWNsdXN0ZXIifX0sInRlbXBsYXRlIjp7Im1ldGFkYXRhIjp7ImxhYmVscyI6eyJhcHAua3ViZXJuZXRlcy5pby9jb21wb25lbnQiOiJweGMiLCJhcHAua3ViZXJuZXRlcy5pby9pbnN0YW5jZSI6InNvbWUtbmFtZSIsImFwcC5rdWJlcm5ldGVzLmlvL21hbmFnZWQtYnkiOiJwZXJjb25hLXh0cmFkYi1jbHVzdGVyLW9wZXJhdG9yIiwiYXBwLmt1YmVybmV0ZXMuaW8vbmFtZSI6InBlcmNvbmEteHRyYWRiLWNsdXN0ZXIiLCJhcHAua3ViZXJuZXRlcy5pby9wYXJ0LW9mIjoicGVyY29uYS14dHJhZGItY2x1c3RlciJ9LCJhbm5vdGF0aW9ucyI6eyJrdWJlY3RsLmt1YmVybmV0ZXMuaW8vZGVmYXVsdC1jb250YWluZXIiOiJweGMiLCJsYXN0LWFwcGxpZWQtc2VjcmV0IjoiOTU1OGFmNDU3YjU1ZDE4ZTc0MGEyMzcxZDAyODQ4ODkzMWYyOTVlYTlhZWY3ZTQ1YTEyNTM0ODg5ZmM2YmY4YSIsInBlcmNvbmEuY29tL2NvbmZpZ3VyYXRpb24taGFzaCI6ImQ0MWQ4Y2Q5OGYwMGIyMDRlOTgwMDk5OGVjZjg0MjdlIiwicGVyY29uYS5jb20vc3NsLWhhc2giOiJjZmQ2YTUyMzk4MjY4MTczYjUxZDdjZGIxMzMxYzA5YSIsInBlcmNvbmEuY29tL3NzbC1pbnRlcm5hbC1oYXNoIjoiNzUxZTU5YTFjMWQ5NGFjNjdmOWQ2YmI2ZTc1NmFjZGMifX0sInNwZWMiOnsidm9sdW1lcyI6W3sibmFtZSI6InRtcCIsImVtcHR5RGlyIjp7fX0seyJuYW1lIjoiY29uZmlnIiwiY29uZmlnTWFwIjp7Im5hbWUiOiJzb21lLW5hbWUtcHhjIiwib3B0aW9uYWwiOnRydWV9fSx7Im5hbWUiOiJzc2wtaW50ZXJuYWwiLCJzZWNyZXQiOnsic2VjcmV0TmFtZSI6InNvbWUtbmFtZS1zc2wtaW50ZXJuYWwiLCJvcHRpb25hbCI6dHJ1ZX19LHsibmFtZSI6InNzbCIsInNlY3JldCI6eyJzZWNyZXROYW1lIjoic29tZS1uYW1lLXNzbCIsIm9wdGlvbmFsIjpmYWxzZX19LHsibmFtZSI6ImF1dG8tY29uZmlnIiwiY29uZmlnTWFwIjp7Im5hbWUiOiJhdXRvLXNvbWUtbmFtZS1weGMiLCJvcHRpb25hbCI6dHJ1ZX19LHsibmFtZSI6InZhdWx0LWtleXJpbmctc2VjcmV0Iiwic2VjcmV0Ijp7InNlY3JldE5hbWUiOiJzb21lLW5hbWUtdmF1bHQiLCJvcHRpb25hbCI6dHJ1ZX19LHsibmFtZSI6Im15c3FsLXVzZXJzLXNlY3JldC1maWxlIiwic2VjcmV0Ijp7InNlY3JldE5hbWUiOiJpbnRlcm5hbC1zb21lLW5hbWUiLCJvcHRpb25hbCI6ZmFsc2V9fSx7Im5hbWUiOiJteXNxbC1pbml0LWZpbGUiLCJzZWNyZXQiOnsic2VjcmV0TmFtZSI6InNvbWUtbmFtZS1teXNxbC1pbml0Iiwib3B0aW9uYWwiOnRydWV9fV0sImluaXRDb250YWluZXJzIjpbeyJuYW1lIjoicHhjLWluaXQiLCJpbWFnZSI6InBlcmNvbmFsYWIvcGVyY29uYS14dHJhZGItY2x1c3Rlci1vcGVyYXRvcjpQUi0yMjM0LWM0OWQ0ODEwIiwiY29tbWFuZCI6WyIvcHhjLWluaXQtZW50cnlwb2ludC5zaCJdLCJyZXNvdXJjZXMiOnsibGltaXRzIjp7ImNwdSI6IjUwbSIsIm1lbW9yeSI6IjUwTSJ9fSwidm9sdW1lTW91bnRzIjpbeyJuYW1lIjoiZGF0YWRpciIsIm1vdW50UGF0aCI6Ii92YXIvbGliL215c3FsIn1dLCJpbWFnZVB1bGxQb2xpY3kiOiJBbHdheXMifV0sImNvbnRhaW5lcnMiOlt7Im5hbWUiOiJweGMiLCJpbWFnZSI6InBlcmNvbmFsYWIvcGVyY29uYS14dHJhZGItY2x1c3Rlci1vcGVyYXRvcjptYWluLXB4YzUuNyIsImNvbW1hbmQiOlsiL3Zhci9saWIvbXlzcWwvcHhjLWVudHJ5cG9pbnQuc2giXSwiYXJncyI6WyJteXNxbGQiXSwicG9ydHMiOlt7Im5hbWUiOiJteXNxbCIsImNvbnRhaW5lclBvcnQiOjMzMDZ9LHsibmFtZSI6InNzdCIsImNvbnRhaW5lclBvcnQiOjQ0NDR9LHsibmFtZSI6IndyaXRlLXNldCIsImNvbnRhaW5lclBvcnQiOjQ1Njd9LHsibmFtZSI6ImlzdCIsImNvbnRhaW5lclBvcnQiOjQ1Njh9LHsibmFtZSI6Im15c3FsLWFkbWluIiwiY29udGFpbmVyUG9ydCI6MzMwNjJ9LHsibmFtZSI6Im15c3FseCIsImNvbnRhaW5lclBvcnQiOjMzMDYwfV0sImVudkZyb20iOlt7InNlY3JldFJlZiI6eyJuYW1lIjoic29tZS1uYW1lLWVudi12YXJzLXB4YyIsIm9wdGlvbmFsIjp0cnVlfX1dLCJlbnYiOlt7Im5hbWUiOiJQWENfU0VSVklDRSIsInZhbHVlIjoic29tZS1uYW1lLXB4Yy11bnJlYWR5In0seyJuYW1lIjoiTU9OSVRPUl9IT1NUIiwidmFsdWUiOiIlIn0seyJuYW1lIjoiTVlTUUxfUk9PVF9QQVNTV09SRCIsInZhbHVlRnJvbSI6eyJzZWNyZXRLZXlSZWYiOnsibmFtZSI6ImludGVybmFsLXNvbWUtbmFtZSIsImtleSI6InJvb3QifX19LHsibmFtZSI6IlhUUkFCQUNLVVBfUEFTU1dPUkQiLCJ2YWx1ZUZyb20iOnsic2VjcmV0S2V5UmVmIjp7Im5hbWUiOiJpbnRlcm5hbC1zb21lLW5hbWUiLCJrZXkiOiJ4dHJhYmFja3VwIn19fSx7Im5hbWUiOiJNT05JVE9SX1BBU1NXT1JEIiwidmFsdWVGcm9tIjp7InNlY3JldEtleVJlZiI6eyJuYW1lIjoiaW50ZXJuYWwtc29tZS1uYW1lIiwia2V5IjoibW9uaXRvciJ9fX0seyJuYW1lIjoiQ0xVU1RFUl9IQVNIIiwidmFsdWUiOiIyMjI5NzM3In0seyJuYW1lIjoiT1BFUkFUT1JfQURNSU5fUEFTU1dPUkQiLCJ2YWx1ZUZyb20iOnsic2VjcmV0S2V5UmVmIjp7Im5hbWUiOiJpbnRlcm5hbC1zb21lLW5hbWUiLCJrZXkiOiJvcGVyYXRvciJ9fX0seyJuYW1lIjoiTElWRU5FU1NfQ0hFQ0tfVElNRU9VVCIsInZhbHVlIjoiNSJ9LHsibmFtZSI6IlJFQURJTkVTU19DSEVDS19USU1FT1VUIiwidmFsdWUiOiIxNSJ9LHsibmFtZSI6IkRFRkFVTFRfQVVUSEVOVElDQVRJT05fUExVR0lOIiwidmFsdWUiOiJjYWNoaW5nX3NoYTJfcGFzc3dvcmQifSx7Im5hbWUiOiJNWVNR"..., +  "percona.com/last-config-hash": "eyJyZXBsaWNhcyI6Mywic2VsZWN0b3IiOnsibWF0Y2hMYWJlbHMiOnsiYXBwLmt1YmVybmV0ZXMuaW8vY29tcG9uZW50IjoicHhjIiwiYXBwLmt1YmVybmV0ZXMuaW8vaW5zdGFuY2UiOiJzb21lLW5hbWUiLCJhcHAua3ViZXJuZXRlcy5pby9tYW5hZ2VkLWJ5IjoicGVyY29uYS14dHJhZGItY2x1c3Rlci1vcGVyYXRvciIsImFwcC5rdWJlcm5ldGVzLmlvL25hbWUiOiJwZXJjb25hLXh0cmFkYi1jbHVzdGVyIiwiYXBwLmt1YmVybmV0ZXMuaW8vcGFydC1vZiI6InBlcmNvbmEteHRyYWRiLWNsdXN0ZXIifX0sInRlbXBsYXRlIjp7Im1ldGFkYXRhIjp7ImxhYmVscyI6eyJhcHAua3ViZXJuZXRlcy5pby9jb21wb25lbnQiOiJweGMiLCJhcHAua3ViZXJuZXRlcy5pby9pbnN0YW5jZSI6InNvbWUtbmFtZSIsImFwcC5rdWJlcm5ldGVzLmlvL21hbmFnZWQtYnkiOiJwZXJjb25hLXh0cmFkYi1jbHVzdGVyLW9wZXJhdG9yIiwiYXBwLmt1YmVybmV0ZXMuaW8vbmFtZSI6InBlcmNvbmEteHRyYWRiLWNsdXN0ZXIiLCJhcHAua3ViZXJuZXRlcy5pby9wYXJ0LW9mIjoicGVyY29uYS14dHJhZGItY2x1c3RlciJ9LCJhbm5vdGF0aW9ucyI6eyJrdWJlY3RsLmt1YmVybmV0ZXMuaW8vZGVmYXVsdC1jb250YWluZXIiOiJweGMiLCJsYXN0LWFwcGxpZWQtc2VjcmV0IjoiZmQwYThiMjUzZjE2ZDVjMjE5OGQwMDE3YTE2ZTA5OTAxYzUxYTZmMmU1OWE4NWZhYjdiYjg0MzAwNjI3NGUwNSIsInBlcmNvbmEuY29tL2NvbmZpZ3VyYXRpb24taGFzaCI6ImQ0MWQ4Y2Q5OGYwMGIy"..., -  "percona.com/last-config-hash": "eyJyZXBsaWNhcyI6Mywic2VsZWN0b3IiOnsibWF0Y2hMYWJlbHMiOnsiYXBwLmt1YmVybmV0ZXMuaW8vY29tcG9uZW50IjoicHhjIiwiYXBwLmt1YmVybmV0ZXMuaW8vaW5zdGFuY2UiOiJzb21lLW5hbWUiLCJhcHAua3ViZXJuZXRlcy5pby9tYW5hZ2VkLWJ5IjoicGVyY29uYS14dHJhZGItY2x1c3Rlci1vcGVyYXRvciIsImFwcC5rdWJlcm5ldGVzLmlvL25hbWUiOiJwZXJjb25hLXh0cmFkYi1jbHVzdGVyIiwiYXBwLmt1YmVybmV0ZXMuaW8vcGFydC1vZiI6InBlcmNvbmEteHRyYWRiLWNsdXN0ZXIifX0sInRlbXBsYXRlIjp7Im1ldGFkYXRhIjp7ImxhYmVscyI6eyJhcHAua3ViZXJuZXRlcy5pby9jb21wb25lbnQiOiJweGMiLCJhcHAua3ViZXJuZXRlcy5pby9pbnN0YW5jZSI6InNvbWUtbmFtZSIsImFwcC5rdWJlcm5ldGVzLmlvL21hbmFnZWQtYnkiOiJwZXJjb25hLXh0cmFkYi1jbHVzdGVyLW9wZXJhdG9yIiwiYXBwLmt1YmVybmV0ZXMuaW8vbmFtZSI6InBlcmNvbmEteHRyYWRiLWNsdXN0ZXIiLCJhcHAua3ViZXJuZXRlcy5pby9wYXJ0LW9mIjoicGVyY29uYS14dHJhZGItY2x1c3RlciJ9LCJhbm5vdGF0aW9ucyI6eyJrdWJlY3RsLmt1YmVybmV0ZXMuaW8vZGVmYXVsdC1jb250YWluZXIiOiJweGMiLCJwZXJjb25hLmNvbS9jb25maWd1cmF0aW9uLWhhc2giOiJkNDFkOGNkOThmMDBiMjA0ZTk4MDA5OThlY2Y4NDI3ZSIsInBlcmNvbmEuY29tL3NzbC1oYXNoIjoiY2ZkNmE1MjM5ODI2ODE3M2I1MWQ3Y2RiMTMzMWMwOWEiLCJwZXJjb25hLmNv"..., -  "percona.com/last-config-hash": "eyJyZXBsaWNhcyI6Mywic2VsZWN0b3IiOnsibWF0Y2hMYWJlbHMiOnsiYXBwLmt1YmVybmV0ZXMuaW8vY29tcG9uZW50IjoicHJveHlzcWwiLCJhcHAua3ViZXJuZXRlcy5pby9pbnN0YW5jZSI6InNvbWUtbmFtZSIsImFwcC5rdWJlcm5ldGVzLmlvL21hbmFnZWQtYnkiOiJwZXJjb25hLXh0cmFkYi1jbHVzdGVyLW9wZXJhdG9yIiwiYXBw"..., +  "percona.com/last-config-hash": "eyJyZXBsaWNhcyI6Mywic2VsZWN0b3IiOnsibWF0Y2hMYWJlbHMiOnsiYXBwLmt1YmVybmV0ZXMuaW8vY29tcG9uZW50IjoicHJveHlzcWwiLCJhcHAua3ViZXJuZXRlcy5pby9pbnN0YW5jZSI6InNvbWUtbmFtZSIsImFwcC5rdWJlcm5ldGVzLmlvL21hbmFnZWQtYnkiOiJwZXJjb25hLXh0cmFkYi1jbHVzdGVyLW9wZXJhdG9yIiwiYXBw"..., +  "percona.com/last-config-hash": "eyJyZXBsaWNhcyI6Mywic2VsZWN0b3IiOnsibWF0Y2hMYWJlbHMiOnsiYXBwLmt1YmVybmV0ZXMuaW8vY29tcG9uZW50IjoicHJveHlzcWwiLCJhcHAua3ViZXJuZXRlcy5pby9pbnN0YW5jZSI6InNvbWUtbmFtZSIsImFwcC5rdWJlcm5ldGVzLmlvL21hbmFnZWQtYnkiOiJwZXJjb25hLXh0cmFkYi1jbHVzdGVyLW9wZXJhdG9yIiwiYXBwLmt1YmVybmV0ZXMuaW8vbmFtZSI6InBlcmNvbmEteHRyYWRiLWNsdXN0ZXIiLCJhcHAua3ViZXJuZXRlcy5pby9wYXJ0LW9mIjoicGVyY29uYS14dHJhZGItY2x1c3RlciJ9fSwidGVtcGxhdGUiOnsibWV0YWRhdGEiOnsibGFiZWxzIjp7ImFwcC5rdWJlcm5ldGVzLmlvL2NvbXBvbmVudCI6InByb3h5c3FsIiwiYXBwLmt1YmVybmV0ZXMuaW8vaW5zdGFuY2UiOiJzb21lLW5hbWUiLCJhcHAua3ViZXJuZXRlcy5pby9tYW5hZ2VkLWJ5IjoicGVyY29uYS14dHJhZGItY2x1c3Rlci1vcGVyYXRvciIsImFwcC5rdWJlcm5ldGVzLmlvL25hbWUiOiJwZXJjb25hLXh0cmFkYi1jbHVzdGVyIiwiYXBwLmt1YmVybmV0ZXMuaW8vcGFydC1vZiI6InBlcmNvbmEteHRyYWRiLWNsdXN0ZXIifSwiYW5ub3RhdGlvbnMiOnsia3ViZWN0bC5rdWJlcm5ldGVzLmlvL2RlZmF1bHQtY29udGFpbmVyIjoicHJveHlzcWwiLCJsYXN0LWFwcGxpZWQtc2VjcmV0IjoiMDljODA1NTA2ZGJmYmIzZmFiZDlhYzJjZDQwYzkxMWM3N2QyOTA5N2JkNDgxZTBiOWY5YmM1YTgyN2VhOThkNCIsInBlcmNvbmEuY29tL2NvbmZpZ3VyYXRpb24taGFzaCI6"..., -  "percona.com/last-config-hash": "eyJyZXBsaWNhcyI6Mywic2VsZWN0b3IiOnsibWF0Y2hMYWJlbHMiOnsiYXBwLmt1YmVybmV0ZXMuaW8vY29tcG9uZW50IjoicHJveHlzcWwiLCJhcHAua3ViZXJuZXRlcy5pby9pbnN0YW5jZSI6InNvbWUtbmFtZSIsImFwcC5rdWJlcm5ldGVzLmlvL21hbmFnZWQtYnkiOiJwZXJjb25hLXh0cmFkYi1jbHVzdGVyLW9wZXJhdG9yIiwiYXBwLmt1YmVybmV0ZXMuaW8vbmFtZSI6InBlcmNvbmEteHRyYWRiLWNsdXN0ZXIiLCJhcHAua3ViZXJuZXRlcy5pby9wYXJ0LW9mIjoicGVyY29uYS14dHJhZGItY2x1c3RlciJ9fSwidGVtcGxhdGUiOnsibWV0YWRhdGEiOnsibGFiZWxzIjp7ImFwcC5rdWJlcm5ldGVzLmlvL2NvbXBvbmVudCI6InByb3h5c3FsIiwiYXBwLmt1YmVybmV0ZXMuaW8vaW5zdGFuY2UiOiJzb21lLW5hbWUiLCJhcHAua3ViZXJuZXRlcy5pby9tYW5hZ2VkLWJ5IjoicGVyY29uYS14dHJhZGItY2x1c3Rlci1vcGVyYXRvciIsImFwcC5rdWJlcm5ldGVzLmlvL25hbWUiOiJwZXJjb25hLXh0cmFkYi1jbHVzdGVyIiwiYXBwLmt1YmVybmV0ZXMuaW8vcGFydC1vZiI6InBlcmNvbmEteHRyYWRiLWNsdXN0ZXIifSwiYW5ub3RhdGlvbnMiOnsia3ViZWN0bC5rdWJlcm5ldGVzLmlvL2RlZmF1bHQtY29udGFpbmVyIjoicHJveHlzcWwiLCJwZXJjb25hLmNvbS9jb25maWd1cmF0aW9uLWhhc2giOiJkNDFkOGNkOThmMDBiMjA0ZTk4MDA5OThlY2Y4NDI3ZSIsInBlcmNvbmEuY29tL3NzbC1oYXNoIjoiY2ZkNmE1MjM5ODI2ODE3M2I1MWQ3Y2RiMTMzMWMw"..., +  PeriodSeconds: 0, -  PeriodSeconds: 10, +  PersistentVolumeClaimRetentionPolicy: nil, -  PersistentVolumeClaimRetentionPolicy: s"&StatefulSetPersistentVolumeClaimRetentionPolicy{WhenDeleted:Retain,WhenScaled:Retain,}", +  Phase: "", -  Phase: "Pending", +  PodManagementPolicy: "", -  PodManagementPolicy: "OrderedReady", +  Protocol: "", -  Protocol: "TCP", +  ReadyReplicas: 0, -  ReadyReplicas: 2, -  ReadyReplicas: 3, +  Replicas: 0, -  Replicas: 2, -  Replicas: &2, +  Replicas: &2, -  Replicas: 3, -  Replicas: &3, +  Replicas: &3, +  ResourceVersion: "", -  ResourceVersion: "1764333075935231018", -  ResourceVersion: "1764333257553487019", -  ResourceVersion: "1764333409178143018", -  ResourceVersion: "1764333465601183018", -  ResourceVersion: "1764333476736799018", -  ResourceVersion: "1764333626561231019", -  ResourceVersion: "1764333694894831018", -  ResourceVersion: "1764333741981935018", -  ResourceVersion: "1764333793017535018", -  ResourceVersion: "1764333922415855019", -  ResourceVersion: "1764333966212735018", -  ResourceVersion: "1764334170038447019", -  ResourceVersion: "1764334271925743007", -  ResourceVersion: "1764334352230831019", -  ResourceVersion: "1764334423952671007", +  RestartPolicy: "", -  RestartPolicy: "Always", -  RevisionHistoryLimit: &10, +  RevisionHistoryLimit: nil, +  SchedulerName: "", +  SchedulerName: "", -  SchedulerName: "default-scheduler", -  SchedulerName: "default-scheduler", +  SecurityContext: nil, -  SecurityContext: s"&PodSecurityContext{SELinuxOptions:nil,RunAsUser:nil,RunAsNonRoot:nil,SupplementalGroups:[],FSGroup:nil,RunAsGroup:nil,Sysctls:[]Sysctl{},WindowsOptions:nil,FSGroupChangePolicy:nil,SeccompProfile:nil,AppArmorProfile:nil,SupplementalGroupsPolicy:nil,SELinux"..., -  Subresource: "status", +  TerminationMessagePath: "", -  TerminationMessagePath: "/dev/termination-log", +  TerminationMessagePolicy: "", -  TerminationMessagePolicy: "File", -  Time: s"2025-11-28 12:30:39 +0000 UTC", -  Time: s"2025-11-28 12:31:15 +0000 UTC", -  Time: s"2025-11-28 12:34:17 +0000 UTC", -  Time: s"2025-11-28 12:36:30 +0000 UTC", -  Time: s"2025-11-28 12:36:49 +0000 UTC", -  Time: s"2025-11-28 12:37:01 +0000 UTC", -  Time: s"2025-11-28 12:37:45 +0000 UTC", -  Time: s"2025-11-28 12:37:55 +0000 UTC", -  Time: s"2025-11-28 12:37:56 +0000 UTC", -  Time: s"2025-11-28 12:37:57 +0000 UTC", -  Time: s"2025-11-28 12:40:26 +0000 UTC", -  Time: s"2025-11-28 12:40:40 +0000 UTC", -  Time: s"2025-11-28 12:41:34 +0000 UTC", -  Time: s"2025-11-28 12:41:52 +0000 UTC", -  Time: s"2025-11-28 12:42:21 +0000 UTC", -  Time: s"2025-11-28 12:42:45 +0000 UTC", -  Time: s"2025-11-28 12:43:13 +0000 UTC", -  Time: s"2025-11-28 12:45:22 +0000 UTC", -  Time: s"2025-11-28 12:45:33 +0000 UTC", -  Time: s"2025-11-28 12:46:06 +0000 UTC", -  Time: s"2025-11-28 12:47:29 +0000 UTC", -  Time: s"2025-11-28 12:49:30 +0000 UTC", -  Time: s"2025-11-28 12:50:04 +0000 UTC", -  Time: s"2025-11-28 12:51:11 +0000 UTC", -  Time: s"2025-11-28 12:52:32 +0000 UTC", -  Time: s"2025-11-28 12:52:38 +0000 UTC", -  Time: s"2025-11-28 12:53:43 +0000 UTC", -  TopologySpreadConstraints: nil, +  TopologySpreadConstraints: []v1.TopologySpreadConstraint{}, +  UID: "", -  UID: "30eb597e-49d6-4c44-9ec9-b2ffbbc06c67", -  UID: "5620a944-f02b-4885-9ae0-090f205078c4", -  UID: "6d4fd9b1-ee93-454a-b91b-85ae2aa16e4b", +  UpdatedReplicas: 0, -  UpdatedReplicas: 2, -  UpdatedReplicas: 3, +  UpdateRevision: "", -  UpdateRevision: "some-name-haproxy-6545ff48dc", -  UpdateRevision: "some-name-haproxy-6bfcf8d65c", -  UpdateRevision: "some-name-proxysql-5bcbc45879", -  UpdateRevision: "some-name-proxysql-667db77b78", -  UpdateRevision: "some-name-proxysql-699977c9bc", -  UpdateRevision: "some-name-proxysql-6d4d7f4598", -  UpdateRevision: "some-name-proxysql-95997bf47", -  UpdateRevision: "some-name-proxysql-bdc6dd84c", -  UpdateRevision: "some-name-pxc-548d4d9547", -  UpdateRevision: "some-name-pxc-6ff96c4c5c", -  UpdateRevision: "some-name-pxc-bb7dd9584", -  UpdateRevision: "some-name-pxc-c77674768", -  UpdateRevision: "some-name-pxc-cd59b8975", -  VolumeMode: &"Filesystem", +  VolumeMode: nil, -  VolumeMounts: []v1.VolumeMount{{Name: "datadir", MountPath: "/var/lib/mysql"}},   }    },    },    {    },    },    {    },    }, ""),    },    {    },    },    },    "0",    ... // 16 identical fields    ... // 16 identical fields    ... // 22 identical fields    ... // 2 identical fields    ... // 2 identical fields    ... // 2 identical fields    ... // 2 identical fields    ... // 3 identical elements    ... // 3 identical fields    ... // 3 identical fields    ... // 3 identical fields    ... // 4 identical fields    "5",    ... // 5 identical elements    ... // 5 identical fields    ... // 5 identical fields    ... // 5 identical fields    ... // 6 identical fields    ... // 6 identical fields    ... // 7 identical fields    ... // 8 identical fields    ... // 9 identical fields    ... // 9 identical fields    AccessModes: nil,    ActiveDeadlineSeconds: nil,    Affinity: &{PodAntiAffinity: &{RequiredDuringSchedulingIgnoredDuringExecution: {{LabelSelector: &{MatchLabels: {"app.kubernetes.io/component": "haproxy", "app.kubernetes.io/instance": "some-name", "app.kubernetes.io/managed-by": "percona-xtradb-cluster-operator", "app.kubernetes.io/name": "percona-xtradb-cluster", ...}}, TopologyKey: "kubernetes.io/hostname"}}}},    Affinity: &{PodAntiAffinity: &{RequiredDuringSchedulingIgnoredDuringExecution: {{LabelSelector: &{MatchLabels: {"app.kubernetes.io/component": "proxysql", "app.kubernetes.io/instance": "some-name", "app.kubernetes.io/managed-by": "percona-xtradb-cluster-operator", "app.kubernetes.io/name": "percona-xtradb-cluster", ...}}, TopologyKey: "kubernetes.io/hostname"}}}},    Affinity: &{PodAntiAffinity: &{RequiredDuringSchedulingIgnoredDuringExecution: {{LabelSelector: &{MatchLabels: {"app.kubernetes.io/component": "pxc", "app.kubernetes.io/instance": "some-name", "app.kubernetes.io/managed-by": "percona-xtradb-cluster-operator", "app.kubernetes.io/name": "percona-xtradb-cluster", ...}}, TopologyKey: "kubernetes.io/hostname"}}}},    Annotations: map[string]string{    Args: {"haproxy"},    Args: {"mysqld"},    Args: {"proxysql", "-f", "-c", "/etc/proxysql/proxysql.cnf", ...},    AutomountServiceAccountToken: nil,    AWSElasticBlockStore: nil,    AzureFile: nil,    Capacity: nil,    Conditions: nil,    ConfigMapKeyRef: nil,    ConfigMap: &v1.ConfigMapVolumeSource{    ContainerPort: 3306,    ContainerPort: 33060,    ContainerPort: 33062,    ContainerPort: 3307,    ContainerPort: 3309,    ContainerPort: 4444,    ContainerPort: 4567,    ContainerPort: 4568,    ContainerPort: 6032,    ContainerPort: 6070,    ContainerPort: 8404,    Containers: []v1.Container{    CurrentReplicas: 0,    "d",    DataSource: nil,    DataSourceRef: nil,    DeletionGracePeriodSeconds: nil,    DeletionGracePeriodSeconds: nil,    DeletionTimestamp: nil,    EnvFrom: {{SecretRef: &{LocalObjectReference: {Name: "some-name-env-vars-haproxy"}, Optional: &true}}},    EnvFrom: {{SecretRef: &{LocalObjectReference: {Name: "some-name-env-vars-proxysql"}, Optional: &true}}},    EnvFrom: {{SecretRef: &{LocalObjectReference: {Name: "some-name-env-vars-pxc"}, Optional: &true}}},    EnvFrom: {{SecretRef: &{LocalObjectReference: {Name: "some-name-log-collector"}, Optional: &true}}},    Env: {{Name: "PXC_SERVICE", Value: "some-name-pxc"}, {Name: "LIVENESS_CHECK_TIMEOUT", Value: "5"}, {Name: "READINESS_CHECK_TIMEOUT", Value: "1"}},    Env: {{Name: "PXC_SERVICE", Value: "some-name-pxc"}, {Name: "OPERATOR_PASSWORD", ValueFrom: &{SecretKeyRef: &{LocalObjectReference: {Name: "internal-some-name"}, Key: "operator"}}}, {Name: "PROXY_ADMIN_USER", Value: "proxyadmin"}, {Name: "PROXY_ADMIN_PASSWORD", ValueFrom: &{SecretKeyRef: &{LocalObjectReference: {Name: "internal-some-name"}, Key: "proxyadmin"}}}, ...},    Env: {{Name: "PXC_SERVICE", Value: "some-name-pxc-unready"}, {Name: "MONITOR_HOST", Value: "%"}, {Name: "MYSQL_ROOT_PASSWORD", ValueFrom: &{SecretKeyRef: &{LocalObjectReference: {Name: "internal-some-name"}, Key: "root"}}}, {Name: "XTRABACKUP_PASSWORD", ValueFrom: &{SecretKeyRef: &{LocalObjectReference: {Name: "internal-some-name"}, Key: "xtrabackup"}}}, ...},    Env: []v1.EnvVar{    EphemeralContainers: nil,    "f",    FailureThreshold: 3,    FC: nil,    FieldPath: "metadata.name",    FieldPath: "metadata.namespace",    FieldRef: &v1.ObjectFieldSelector{    Finalizers: nil,    Finalizers: nil,    GitRepo: nil,    HostAliases: nil,    HostAliases: nil,    HostIP: "",    HostIPC: false,    Hostname: "",    HostPort: 0,    ImagePullPolicy: "Always",    ImagePullSecrets: nil,    InitContainers: []v1.Container{    InitialDelaySeconds: 300,    ISCSI: nil,    Items: nil,    Items: nil,    "kubectl.kubernetes.io/default-container": "haproxy",    "kubectl.kubernetes.io/default-container": "proxysql",    "kubectl.kubernetes.io/default-container": "pxc",    Labels: {"app.kubernetes.io/component": "haproxy", "app.kubernetes.io/instance": "some-name", "app.kubernetes.io/managed-by": "percona-xtradb-cluster-operator", "app.kubernetes.io/name": "percona-xtradb-cluster", ...},    Labels: {"app.kubernetes.io/component": "proxysql", "app.kubernetes.io/instance": "some-name", "app.kubernetes.io/managed-by": "percona-xtradb-cluster-operator", "app.kubernetes.io/name": "percona-xtradb-cluster", ...},    Labels: {"app.kubernetes.io/component": "pxc", "app.kubernetes.io/instance": "some-name", "app.kubernetes.io/managed-by": "percona-xtradb-cluster-operator", "app.kubernetes.io/name": "percona-xtradb-cluster", ...},    Labels: nil,    "last-applied-secret": strings.Join({    Lifecycle: nil,    LivenessProbe: &v1.Probe{    LocalObjectReference: {Name: "auto-some-name-pxc"},    LocalObjectReference: {Name: "some-name-haproxy"},    LocalObjectReference: {Name: "some-name-pxc"},    ManagedFields: nil,    MinReadySeconds: 0,    Name: "auto-config",    {Name: "bin", VolumeSource: {EmptyDir: &{}}},    {Name: "CLUSTER_HASH", Value: "2229737"},    Name: "config",    {Name: "haproxy-auto", VolumeSource: {EmptyDir: &{}}},    Name: "haproxy-custom",    Name: "ist",    {Name: "LOG_DATA_DIR", Value: "/var/lib/mysql"},    {Name: "MONITOR_PASSWORD", ValueFrom: &{SecretKeyRef: &{LocalObjectReference: {Name: "internal-some-name"}, Key: "monitor"}}},    Name: "mysql",    Name: "mysql-admin",    Name: "mysql-init-file",    Name: "mysql-replicas",    Name: "mysql-users-secret-file",    Name: "mysqlx",    {Name: "OPERATOR_ADMIN_PASSWORD", ValueFrom: &{SecretKeyRef: &{LocalObjectReference: {Name: "internal-some-name"}, Key: "operator"}}},    Name: "POD_NAME",    Name: "POD_NAMESPASE",    Name: "proxyadm",    Name: "proxy-protocol",    Name: "some-name-env-vars-haproxy",    Namespace: "users-12977",    Name: "ssl",    Name: "ssl-internal",    Name: "sst",    Name: "stats",    {Name: "tmp", VolumeSource: {EmptyDir: &{}}},    Name: "vault-keyring-secret",    Name: "write-set",    {Name: "XTRABACKUP_PASSWORD", ValueFrom: &{SecretKeyRef: &{LocalObjectReference: {Name: "internal-some-name"}, Key: "xtrabackup"}}},    NFS: nil,    NodeName: "",    NodeSelector: nil,    ObjectMeta: {Labels: {"app.kubernetes.io/component": "proxysql", "app.kubernetes.io/instance": "some-name", "app.kubernetes.io/managed-by": "percona-xtradb-cluster-operator", "app.kubernetes.io/name": "percona-xtradb-cluster", ...}, Annotations: {"kubectl.kubernetes.io/default-container": "proxysql", "last-applied-secret": "09c805506dbfbb3fabd9ac2cd40c911c77d29097bd481e0b9f9bc5a827ea98d4", "percona.com/configuration-hash": "d41d8cd98f00b204e9800998ecf8427e", "percona.com/ssl-hash": "cfd6a52398268173b51d7cdb1331c09a", ...}},    ObjectMeta: {Labels: {"app.kubernetes.io/component": "proxysql", "app.kubernetes.io/instance": "some-name", "app.kubernetes.io/managed-by": "percona-xtradb-cluster-operator", "app.kubernetes.io/name": "percona-xtradb-cluster", ...}, Annotations: {"kubectl.kubernetes.io/default-container": "proxysql", "percona.com/configuration-hash": "d41d8cd98f00b204e9800998ecf8427e", "percona.com/ssl-hash": "cfd6a52398268173b51d7cdb1331c09a", "percona.com/ssl-internal-hash": "751e59a1c1d94ac67f9d6bb6e756acdc"}},    ObjectMeta: {Labels: {"app.kubernetes.io/component": "pxc", "app.kubernetes.io/instance": "some-name", "app.kubernetes.io/managed-by": "percona-xtradb-cluster-operator", "app.kubernetes.io/name": "percona-xtradb-cluster", ...}, Annotations: {"kubectl.kubernetes.io/default-container": "pxc", "last-applied-secret": "9558af457b55d18e740a2371d028488931f295ea9aef7e45a12534889fc6bf8a", "percona.com/configuration-hash": "d41d8cd98f00b204e9800998ecf8427e", "percona.com/ssl-hash": "cfd6a52398268173b51d7cdb1331c09a", ...}},    ObjectMeta: {Name: "datadir", Labels: {"app.kubernetes.io/component": "pxc", "app.kubernetes.io/instance": "some-name", "app.kubernetes.io/managed-by": "percona-xtradb-cluster-operator", "app.kubernetes.io/name": "percona-xtradb-cluster", ...}},    ObjectMeta: {Name: "proxydata", Labels: {"app.kubernetes.io/component": "proxysql", "app.kubernetes.io/instance": "some-name", "app.kubernetes.io/managed-by": "percona-xtradb-cluster-operator", "app.kubernetes.io/name": "percona-xtradb-cluster", ...}},    ObjectMeta: v1.ObjectMeta{    ObjectMeta: v1.ObjectMeta{    Optional: &false,    Optional: &true,    Optional: &true,    Ordinals: nil,    OS: nil,    Overhead: nil,    OwnerReferences: {{APIVersion: "pxc.percona.com/v1", Kind: "PerconaXtraDBCluster", Name: "some-name", UID: "aadc62e6-c948-4152-807b-0d898400b4ea", ...}},    OwnerReferences: nil,    "percona.com/configuration-hash": "d41d8cd98f00b204e9800998ecf8427e",    "percona.com/configuration-hash": "d41d8cd98f00b204e9800998ecf8427e",    "percona.com/ssl-hash": "cfd6a52398268173b51d7cdb1331c09a",    "percona.com/ssl-hash": "cfd6a52398268173b51d7cdb1331c09a",    "percona.com/ssl-internal-hash": "751e59a1c1d94ac67f9d6bb6e756acdc",    "percona.com/ssl-internal-hash": "751e59a1c1d94ac67f9d6bb6e756acdc",    Ports: nil,    Ports: []v1.ContainerPort{    PreemptionPolicy: nil,    ProbeHandler: {Exec: &{Command: {"/var/lib/mysql/liveness-check.sh"}}},    Quobyte: nil,    ReadinessProbe: &{ProbeHandler: {Exec: &{Command: {"/var/lib/mysql/readiness-check.sh"}}}, InitialDelaySeconds: 15, TimeoutSeconds: 15, PeriodSeconds: 30, ...},    Replicas: &2,    Replicas: &3,    ResizePolicy: nil,    ResourceFieldRef: nil,    Resources: {},    Resources: {Limits: {s"cpu": {i: {...}, s: "1", Format: "DecimalSI"}, s"memory": {i: {...}, s: "2G", Format: "DecimalSI"}}, Requests: {s"cpu": {i: {...}, s: "100m", Format: "DecimalSI"}, s"memory": {i: {...}, Format: "DecimalSI"}}},    SecretName: "internal-some-name",    SecretName: "some-name-env-vars-haproxy",    SecretName: "some-name-mysql-init",    SecretName: "some-name-ssl",    SecretName: "some-name-ssl-internal",    SecretName: "some-name-vault",    Secret: &v1.SecretVolumeSource{    SecurityContext: nil,    Selector: &{MatchLabels: {"app.kubernetes.io/component": "haproxy", "app.kubernetes.io/instance": "some-name", "app.kubernetes.io/managed-by": "percona-xtradb-cluster-operator", "app.kubernetes.io/name": "percona-xtradb-cluster", ...}},    Selector: &{MatchLabels: {"app.kubernetes.io/component": "proxysql", "app.kubernetes.io/instance": "some-name", "app.kubernetes.io/managed-by": "percona-xtradb-cluster-operator", "app.kubernetes.io/name": "percona-xtradb-cluster", ...}},    Selector: &{MatchLabels: {"app.kubernetes.io/component": "pxc", "app.kubernetes.io/instance": "some-name", "app.kubernetes.io/managed-by": "percona-xtradb-cluster-operator", "app.kubernetes.io/name": "percona-xtradb-cluster", ...}},    SelfLink: "",    ServiceAccountName: "default",    ServiceName: "some-name-haproxy",    ServiceName: "some-name-proxysql-unready",    ServiceName: "some-name-pxc",    SetHostnameAsFQDN: nil,    ShareProcessNamespace: nil,    Spec: v1.PersistentVolumeClaimSpec{    Spec: v1.PodSpec{    Spec: v1.StatefulSetSpec{    StartupProbe: nil,    Status: v1.PersistentVolumeClaimStatus{    Status: v1.StatefulSetStatus{    StorageClassName: nil,    Subdomain: "",    Subdomain: "",    SuccessThreshold: 1,    Template: v1.PodTemplateSpec{    TerminationGracePeriodSeconds: &30,    TerminationGracePeriodSeconds: &600,    TerminationGracePeriodSeconds: nil,    TimeoutSeconds: 5,    Tolerations: {{Key: "node.alpha.kubernetes.io/unreachable", Operator: "Exists", Effect: "NoExecute", TolerationSeconds: &6000}},    Tolerations: nil,    TypeMeta: {},    TypeMeta: {Kind: "StatefulSet", APIVersion: "apps/v1"},    UpdateStrategy: {Type: "RollingUpdate", RollingUpdate: &{Partition: &0}},   &v1.StatefulSet{    Value: "",    ValueFrom: &v1.EnvVarSource{    VolumeAttributesClassName: nil,    VolumeClaimTemplates: nil,    VolumeClaimTemplates: []v1.PersistentVolumeClaim{    VolumeDevices: nil,    VolumeMounts: {{Name: "datadir", MountPath: "/var/lib/mysql"}, {Name: "config", MountPath: "/etc/percona-xtradb-cluster.conf.d"}, {Name: "tmp", MountPath: "/tmp"}, {Name: "ssl", MountPath: "/etc/mysql/ssl"}, ...},    VolumeName: "",    VolumeSource: v1.VolumeSource{    Volumes: []v1.Volume{    VsphereVolume: nil,    WorkingDir: "", + grep -v NAMESPACE + kubectl get pxc --all-namespaces -o wide + xargs -L 1 sh -xc 'kubectl patch pxc -n $0 $1 --type=merge -p "{\"metadata\":{\"finalizers\":[]}}"' + kubectl patch pxc -n users-12977 some-name --type=merge -p '{"metadata":{"finalizers":[]}}' perconaxtradbcluster.pxc.percona.com/some-name patched + kubectl_bin delete pxc --all --all-namespaces ++ mktemp + local LAST_OUT=/tmp/tmp.PfoCcUF22I ++ mktemp + local LAST_ERR=/tmp/tmp.VdqOqLx9rU + local exit_status=0 ++ seq 0 2 + for i in '$(seq 0 2)' + set +e + kubectl delete pxc --all --all-namespaces + exit_status=0 + set -e + '[' 0 '!=' 0 ']' + break + cat /tmp/tmp.PfoCcUF22I perconaxtradbcluster.pxc.percona.com "some-name" deleted from users-12977 namespace + cat /tmp/tmp.VdqOqLx9rU + rm /tmp/tmp.PfoCcUF22I /tmp/tmp.VdqOqLx9rU + return 0 + kubectl_bin delete pxc-backup --all --all-namespaces ++ mktemp + local LAST_OUT=/tmp/tmp.ltkAjf2dsi ++ mktemp + local LAST_ERR=/tmp/tmp.KXA9hnstkf + local exit_status=0 ++ seq 0 2 + for i in '$(seq 0 2)' + set +e + kubectl delete pxc-backup --all --all-namespaces + exit_status=0 + set -e + '[' 0 '!=' 0 ']' + break + cat /tmp/tmp.ltkAjf2dsi No resources found + cat /tmp/tmp.KXA9hnstkf + rm /tmp/tmp.ltkAjf2dsi /tmp/tmp.KXA9hnstkf + return 0 + kubectl_bin delete pxc-restore --all --all-namespaces ++ mktemp + local LAST_OUT=/tmp/tmp.NPOfXHuVPI ++ mktemp + local LAST_ERR=/tmp/tmp.4sCxQgNEmM + local exit_status=0 ++ seq 0 2 + for i in '$(seq 0 2)' + set +e + kubectl delete pxc-restore --all --all-namespaces + exit_status=0 + set -e + '[' 0 '!=' 0 ']' + break + cat /tmp/tmp.NPOfXHuVPI No resources found + cat /tmp/tmp.4sCxQgNEmM + rm /tmp/tmp.NPOfXHuVPI /tmp/tmp.4sCxQgNEmM + return 0 + kubectl_bin delete ValidatingWebhookConfiguration percona-xtradbcluster-webhook ++ mktemp + local LAST_OUT=/tmp/tmp.ZWmrmVtfK0 ++ mktemp + local LAST_ERR=/tmp/tmp.bK7jNqNW5O + local exit_status=0 ++ seq 0 2 + for i in '$(seq 0 2)' + set +e + kubectl delete ValidatingWebhookConfiguration percona-xtradbcluster-webhook + exit_status=0 + set -e + '[' 0 '!=' 0 ']' + break + cat /tmp/tmp.ZWmrmVtfK0 validatingwebhookconfiguration.admissionregistration.k8s.io "percona-xtradbcluster-webhook" deleted + cat /tmp/tmp.bK7jNqNW5O + rm /tmp/tmp.ZWmrmVtfK0 /tmp/tmp.bK7jNqNW5O + return 0 + kubectl_bin delete -f https://github.com/jetstack/cert-manager/releases/download/v1.18.2/cert-manager.yaml + : + '[' '!' -z '' ']' + '[' -n pxc-operator ']' + kubectl_bin delete --grace-period=0 --force=true namespace users-12977 + rm -rf /tmp/tmp.O4Ec8syMdJ + kubectl_bin delete --grace-period=0 --force=true namespace pxc-operator ++ mktemp ++ mktemp + local LAST_OUT=/tmp/tmp.fBR7TzOShg + local LAST_OUT=/tmp/tmp.4Xfkcqmu9R + desc 'test passed' ++ mktemp + set +o xtrace ----------------------------------------------------------------------------------- test passed ----------------------------------------------------------------------------------- ++ mktemp + local LAST_ERR=/tmp/tmp.EwYNE6mldi + local LAST_ERR=/tmp/tmp.3rzbSB8F4V + local exit_status=0 + local exit_status=0 ++ seq 0 2 ++ seq 0 2 + for i in '$(seq 0 2)' + set +e + kubectl delete --grace-period=0 --force=true namespace users-12977 + for i in '$(seq 0 2)' + set +e + kubectl delete --grace-period=0 --force=true namespace pxc-operator