Log: /mnt/jenkins/workspace/cloud-pxc-operator_PR-2325/e2e-tests/logs/users-5-7.log Warning: version difference between client (1.35) and server (1.31) exceeds the supported minor version skew of +/-1 Warning: version difference between client (1.35) and server (1.31) exceeds the supported minor version skew of +/-1 + create_infra users-31794 + local ns=users-31794 + '[' -n pxc-operator ']' + kubectl get pxc --all-namespaces -o wide + grep -v NAMESPACE + xargs -L 1 sh -xc 'kubectl patch pxc -n $0 $1 --type=merge -p "{\"metadata\":{\"finalizers\":[]}}"' + kubectl patch pxc -n users-25423 some-name --type=merge -p '{"metadata":{"finalizers":[]}}' perconaxtradbcluster.pxc.percona.com/some-name patched + kubectl_bin delete pxc --all --all-namespaces ++ mktemp + local LAST_OUT=/tmp/tmp.w0eFVq44ud ++ mktemp + local LAST_ERR=/tmp/tmp.yRS4B1AR6F + local exit_status=0 ++ seq 0 2 + for i in '$(seq 0 2)' + set +e + kubectl delete pxc --all --all-namespaces + exit_status=0 + set -e + '[' 0 '!=' 0 ']' + break + cat /tmp/tmp.w0eFVq44ud perconaxtradbcluster.pxc.percona.com "some-name" deleted from users-25423 namespace + cat /tmp/tmp.yRS4B1AR6F + rm /tmp/tmp.w0eFVq44ud /tmp/tmp.yRS4B1AR6F + return 0 + kubectl_bin delete pxc-backup --all --all-namespaces ++ mktemp + local LAST_OUT=/tmp/tmp.3dgoZgc5YM ++ mktemp + local LAST_ERR=/tmp/tmp.2R6liA1HRP + local exit_status=0 ++ seq 0 2 + for i in '$(seq 0 2)' + set +e + kubectl delete pxc-backup --all --all-namespaces + exit_status=0 + set -e + '[' 0 '!=' 0 ']' + break + cat /tmp/tmp.3dgoZgc5YM No resources found + cat /tmp/tmp.2R6liA1HRP + rm /tmp/tmp.3dgoZgc5YM /tmp/tmp.2R6liA1HRP + return 0 + kubectl_bin delete pxc-restore --all --all-namespaces ++ mktemp + local LAST_OUT=/tmp/tmp.GTCMzxPDuy ++ mktemp + local LAST_ERR=/tmp/tmp.PLsIb62aWG + local exit_status=0 ++ seq 0 2 + for i in '$(seq 0 2)' + set +e + kubectl delete pxc-restore --all --all-namespaces + exit_status=0 + set -e + '[' 0 '!=' 0 ']' + break + cat /tmp/tmp.GTCMzxPDuy No resources found + cat /tmp/tmp.PLsIb62aWG + rm /tmp/tmp.GTCMzxPDuy /tmp/tmp.PLsIb62aWG + return 0 + create_namespace pxc-operator + local namespace=pxc-operator + local skip_clean_namespace= + [[ 1 == 1 ]] + [[ -z '' ]] + destroy_chaos_mesh ++ helm list --all-namespaces --filter chaos-mesh ++ tail -n1 ++ sed s/NAMESPACE// ++ awk '-F ' '{print $2}' + local chaos_mesh_ns= + '[' -n '' ']' ++ awk '{print $1}' ++ kubectl get MutatingWebhookConfiguration ++ grep chaos-mesh + timeout 30 kubectl delete MutatingWebhookConfiguration error: resource(s) were provided, but no name was specified + : ++ kubectl get ValidatingWebhookConfiguration ++ grep chaos-mesh ++ awk '{print $1}' + timeout 30 kubectl delete ValidatingWebhookConfiguration error: resource(s) were provided, but no name was specified + : ++ kubectl get ValidatingWebhookConfiguration ++ grep validate-auth ++ awk '{print $1}' + timeout 30 kubectl delete ValidatingWebhookConfiguration error: resource(s) were provided, but no name was specified + : ++ kubectl api-resources ++ grep chaos-mesh ++ awk '{print $1}' ++ kubectl get crd ++ grep chaos-mesh.org ++ awk '{print $1}' + timeout 30 kubectl delete crd error: resource(s) were provided, but no name was specified + : ++ kubectl get clusterrolebinding ++ grep chaos-mesh ++ awk '{print $1}' + timeout 30 kubectl delete clusterrolebinding error: resource(s) were provided, but no name was specified + : ++ grep chaos-mesh ++ kubectl get clusterrole ++ awk '{print $1}' + timeout 30 kubectl delete clusterrole error: resource(s) were provided, but no name was specified + : + desc 'cleaned up all old namespaces' + set +o xtrace ----------------------------------------------------------------------------------- cleaned up all old namespaces ----------------------------------------------------------------------------------- + kubectl_bin get ns + awk '{print$1}' + '[' -n '' ']' + desc 'cleaned up old namespaces pxc-operator' + xargs kubectl delete ns + set +o xtrace ----------------------------------------------------------------------------------- cleaned up old namespaces pxc-operator ----------------------------------------------------------------------------------- + kubectl_bin delete namespace pxc-operator ++ mktemp + grep -E -v '^kube-|^default|Terminating|pxc-operator|openshift|^gke-|^gmp-|^NAME' + local LAST_OUT=/tmp/tmp.1eO13ZbnBN ++ mktemp ++ mktemp + local LAST_ERR=/tmp/tmp.M8uP6dSy73 + local exit_status=0 ++ seq 0 2 + local LAST_OUT=/tmp/tmp.Mzm3Qqb0TN ++ mktemp + for i in '$(seq 0 2)' + set +e + kubectl delete namespace pxc-operator + local LAST_ERR=/tmp/tmp.tvWzlfZI4C + local exit_status=0 ++ seq 0 2 + for i in '$(seq 0 2)' + set +e + kubectl get ns + exit_status=0 + set -e + '[' 0 '!=' 0 ']' + break + cat /tmp/tmp.Mzm3Qqb0TN + cat /tmp/tmp.tvWzlfZI4C + rm /tmp/tmp.Mzm3Qqb0TN /tmp/tmp.tvWzlfZI4C + return 0 namespace "users-25423" deleted + exit_status=0 + set -e + '[' 0 '!=' 0 ']' + break + cat /tmp/tmp.1eO13ZbnBN namespace "pxc-operator" deleted + cat /tmp/tmp.M8uP6dSy73 + rm /tmp/tmp.1eO13ZbnBN /tmp/tmp.M8uP6dSy73 + return 0 + wait_for_delete namespace/pxc-operator + local res=namespace/pxc-operator + echo -n 'waiting for namespace/pxc-operator to be deleted' waiting for namespace/pxc-operator to be deleted+ set +o xtrace Error from server (NotFound): namespaces "pxc-operator" not found + desc 'create namespace pxc-operator' + set +o xtrace ----------------------------------------------------------------------------------- create namespace pxc-operator ----------------------------------------------------------------------------------- + kubectl_bin create namespace pxc-operator ++ mktemp + local LAST_OUT=/tmp/tmp.IiTbb5jtAn ++ mktemp + local LAST_ERR=/tmp/tmp.SOBotZrCxG + local exit_status=0 ++ seq 0 2 + for i in '$(seq 0 2)' + set +e + kubectl create namespace pxc-operator + exit_status=0 + set -e + '[' 0 '!=' 0 ']' + break + cat /tmp/tmp.IiTbb5jtAn namespace/pxc-operator created + cat /tmp/tmp.SOBotZrCxG + rm /tmp/tmp.IiTbb5jtAn /tmp/tmp.SOBotZrCxG + return 0 ++ kubectl_bin config current-context +++ mktemp ++ local LAST_OUT=/tmp/tmp.ir3oSnmTCW +++ mktemp ++ local LAST_ERR=/tmp/tmp.zhWd3Q0aMq ++ local exit_status=0 +++ seq 0 2 ++ for i in '$(seq 0 2)' ++ set +e ++ kubectl config current-context ++ exit_status=0 ++ set -e ++ '[' 0 '!=' 0 ']' ++ break ++ cat /tmp/tmp.ir3oSnmTCW ++ cat /tmp/tmp.zhWd3Q0aMq ++ rm /tmp/tmp.ir3oSnmTCW /tmp/tmp.zhWd3Q0aMq ++ return 0 + kubectl_bin config set-context gke_cloud-dev-112233_us-central1-a_jen-pxc-2325-d401e0a9-8-cluster3 --namespace=pxc-operator ++ mktemp + local LAST_OUT=/tmp/tmp.4oKIX6Ukbt ++ mktemp + local LAST_ERR=/tmp/tmp.ZS5WZANpU5 + local exit_status=0 ++ seq 0 2 + for i in '$(seq 0 2)' + set +e + kubectl config set-context gke_cloud-dev-112233_us-central1-a_jen-pxc-2325-d401e0a9-8-cluster3 --namespace=pxc-operator + exit_status=0 + set -e + '[' 0 '!=' 0 ']' + break + cat /tmp/tmp.4oKIX6Ukbt Context "gke_cloud-dev-112233_us-central1-a_jen-pxc-2325-d401e0a9-8-cluster3" modified. + cat /tmp/tmp.ZS5WZANpU5 + rm /tmp/tmp.4oKIX6Ukbt /tmp/tmp.ZS5WZANpU5 + return 0 + deploy_operator + desc 'start PXC operator' + set +o xtrace ----------------------------------------------------------------------------------- start PXC operator ----------------------------------------------------------------------------------- + kubectl_bin apply --server-side --force-conflicts -f /mnt/jenkins/workspace/cloud-pxc-operator_PR-2325/deploy/crd.yaml ++ mktemp + local LAST_OUT=/tmp/tmp.44iOF4g37l ++ mktemp + local LAST_ERR=/tmp/tmp.77otJn5uC7 + local exit_status=0 ++ seq 0 2 + for i in '$(seq 0 2)' + set +e + kubectl apply --server-side --force-conflicts -f /mnt/jenkins/workspace/cloud-pxc-operator_PR-2325/deploy/crd.yaml + exit_status=0 + set -e + '[' 0 '!=' 0 ']' + break + cat /tmp/tmp.44iOF4g37l customresourcedefinition.apiextensions.k8s.io/perconaxtradbclusterbackups.pxc.percona.com serverside-applied customresourcedefinition.apiextensions.k8s.io/perconaxtradbclusterrestores.pxc.percona.com serverside-applied customresourcedefinition.apiextensions.k8s.io/perconaxtradbclusters.pxc.percona.com serverside-applied + cat /tmp/tmp.77otJn5uC7 + rm /tmp/tmp.44iOF4g37l /tmp/tmp.77otJn5uC7 + return 0 + '[' -n pxc-operator ']' + apply_rbac cw-rbac + local operator_namespace=pxc-operator + local rbac=cw-rbac + cat /mnt/jenkins/workspace/cloud-pxc-operator_PR-2325/deploy/cw-rbac.yaml + sed -e 's^namespace: .*^namespace: pxc-operator^' + kubectl_bin apply -f - ++ mktemp + local LAST_OUT=/tmp/tmp.jspaOPFQPK ++ mktemp + local LAST_ERR=/tmp/tmp.7DaY7RYiZq + local exit_status=0 ++ seq 0 2 + for i in '$(seq 0 2)' + set +e + kubectl apply -f - + exit_status=0 + set -e + '[' 0 '!=' 0 ']' + break + cat /tmp/tmp.jspaOPFQPK clusterrole.rbac.authorization.k8s.io/percona-xtradb-cluster-operator unchanged serviceaccount/percona-xtradb-cluster-operator created clusterrolebinding.rbac.authorization.k8s.io/service-account-percona-xtradb-cluster-operator unchanged + cat /tmp/tmp.7DaY7RYiZq + rm /tmp/tmp.jspaOPFQPK /tmp/tmp.7DaY7RYiZq + return 0 + yq eval '(select(.kind == "Deployment").spec.template.spec.containers[] | select(.name == "percona-xtradb-cluster-operator").env[] | select(.name == "DISABLE_TELEMETRY").value) = "true"' - + kubectl_bin apply -f - + yq eval '(select(.kind == "Deployment").spec.template.spec.containers[] | select(.name == "percona-xtradb-cluster-operator").env[] | select(.name == "PXCO_FEATURE_GATES").value) = ""' - + sed -e 's^image: .*^image: perconalab/percona-xtradb-cluster-operator:PR-2325-d401e0a9^' ++ mktemp + cat /mnt/jenkins/workspace/cloud-pxc-operator_PR-2325/deploy/cw-operator.yaml + sed -e 's^failureThreshold: .*^failureThreshold: 10^' + local LAST_OUT=/tmp/tmp.M5FVGHRPUI + yq eval '(select(.kind == "Deployment").spec.template.spec.containers[] | select(.name == "percona-xtradb-cluster-operator").env[] | select(.name == "LOG_LEVEL").value) = "VERBOSE"' - ++ mktemp + local LAST_ERR=/tmp/tmp.VUz6Y0hiNu + local exit_status=0 ++ seq 0 2 + for i in '$(seq 0 2)' + set +e + kubectl apply -f - + exit_status=0 + set -e + '[' 0 '!=' 0 ']' + break + cat /tmp/tmp.M5FVGHRPUI deployment.apps/percona-xtradb-cluster-operator created service/percona-xtradb-cluster-operator created + cat /tmp/tmp.VUz6Y0hiNu + rm /tmp/tmp.M5FVGHRPUI /tmp/tmp.VUz6Y0hiNu + return 0 + sleep 10 + kubectl_bin wait --for=condition=Ready pods -l app.kubernetes.io/component=operator,app.kubernetes.io/instance=percona-xtradb-cluster-operator,app.kubernetes.io/name=percona-xtradb-cluster-operator --timeout=30s ++ mktemp + local LAST_OUT=/tmp/tmp.zQXXJTarmI ++ mktemp + local LAST_ERR=/tmp/tmp.Z8ruF5FF2D + local exit_status=0 ++ seq 0 2 + for i in '$(seq 0 2)' + set +e + kubectl wait --for=condition=Ready pods -l app.kubernetes.io/component=operator,app.kubernetes.io/instance=percona-xtradb-cluster-operator,app.kubernetes.io/name=percona-xtradb-cluster-operator --timeout=30s + exit_status=0 + set -e + '[' 0 '!=' 0 ']' + break + cat /tmp/tmp.zQXXJTarmI pod/percona-xtradb-cluster-operator-6dc5fb9844-zsx8l condition met + cat /tmp/tmp.Z8ruF5FF2D + rm /tmp/tmp.zQXXJTarmI /tmp/tmp.Z8ruF5FF2D + return 0 ++ get_operator_pod ++ local label_prefix=app.kubernetes.io/ +++ kubectl get pods --selector=app.kubernetes.io/name=percona-xtradb-cluster-operator -n pxc-operator +++ grep -c percona-xtradb-cluster-operator ++ local check_label=1 ++ [[ 1 -eq 0 ]] ++ kubectl_bin get pods --selector=app.kubernetes.io/name=percona-xtradb-cluster-operator --field-selector=status.phase=Running -o json -n pxc-operator ++ jq -r '.items[] | select(.metadata.deletionTimestamp == null) | .metadata.name' ++ head -1 +++ mktemp ++ local LAST_OUT=/tmp/tmp.3YsROkdZ5o +++ mktemp ++ local LAST_ERR=/tmp/tmp.P1Q0lyxqEx ++ local exit_status=0 +++ seq 0 2 ++ for i in '$(seq 0 2)' ++ set +e ++ kubectl get pods --selector=app.kubernetes.io/name=percona-xtradb-cluster-operator --field-selector=status.phase=Running -o json -n pxc-operator ++ exit_status=0 ++ set -e ++ '[' 0 '!=' 0 ']' ++ break ++ cat /tmp/tmp.3YsROkdZ5o ++ cat /tmp/tmp.P1Q0lyxqEx ++ rm /tmp/tmp.3YsROkdZ5o /tmp/tmp.P1Q0lyxqEx ++ return 0 + wait_pod percona-xtradb-cluster-operator-6dc5fb9844-zsx8l 480 pxc-operator + local pod=percona-xtradb-cluster-operator-6dc5fb9844-zsx8l + local max_retry=480 + local ns=pxc-operator ++ echo percona-xtradb-cluster-operator-6dc5fb9844-zsx8l ++ grep -E '^(pxc|proxysql)$' ++ /usr/bin/sed -E 's/.*-(pxc|proxysql)-[0-9]/\1/' + local container= + set +o xtrace pod/percona-xtradb-cluster-operator-6dc5fb9844-zsx8l condition met waiting for pod/percona-xtradb-cluster-operator-6dc5fb9844-zsx8l to become Ready.Ok + sleep 3 + create_namespace users-31794 + local namespace=users-31794 + local skip_clean_namespace= + [[ 1 == 1 ]] + [[ -z '' ]] + destroy_chaos_mesh ++ helm list --all-namespaces --filter chaos-mesh ++ tail -n1 ++ awk '-F ' '{print $2}' ++ sed s/NAMESPACE// + local chaos_mesh_ns= + '[' -n '' ']' ++ kubectl get MutatingWebhookConfiguration ++ awk '{print $1}' ++ grep chaos-mesh + timeout 30 kubectl delete MutatingWebhookConfiguration error: resource(s) were provided, but no name was specified + : ++ kubectl get ValidatingWebhookConfiguration ++ grep chaos-mesh ++ awk '{print $1}' + timeout 30 kubectl delete ValidatingWebhookConfiguration error: resource(s) were provided, but no name was specified + : ++ kubectl get ValidatingWebhookConfiguration ++ awk '{print $1}' ++ grep validate-auth + timeout 30 kubectl delete ValidatingWebhookConfiguration error: resource(s) were provided, but no name was specified + : ++ kubectl api-resources ++ grep chaos-mesh ++ awk '{print $1}' ++ kubectl get crd ++ grep chaos-mesh.org ++ awk '{print $1}' + timeout 30 kubectl delete crd error: resource(s) were provided, but no name was specified + : ++ kubectl get clusterrolebinding ++ grep chaos-mesh ++ awk '{print $1}' + timeout 30 kubectl delete clusterrolebinding error: resource(s) were provided, but no name was specified + : ++ kubectl get clusterrole ++ awk '{print $1}' ++ grep chaos-mesh + timeout 30 kubectl delete clusterrole error: resource(s) were provided, but no name was specified + : + desc 'cleaned up all old namespaces' + set +o xtrace ----------------------------------------------------------------------------------- cleaned up all old namespaces ----------------------------------------------------------------------------------- + kubectl_bin get ns + '[' -n '' ']' + desc 'cleaned up old namespaces users-31794' + xargs kubectl delete ns + set +o xtrace ----------------------------------------------------------------------------------- cleaned up old namespaces users-31794 ----------------------------------------------------------------------------------- + kubectl_bin delete namespace users-31794 + awk '{print$1}' ++ mktemp + grep -E -v '^kube-|^default|Terminating|pxc-operator|openshift|^gke-|^gmp-|^NAME' ++ mktemp + local LAST_OUT=/tmp/tmp.upz9frJDWs + local LAST_OUT=/tmp/tmp.5O750aHff0 ++ mktemp + local LAST_ERR=/tmp/tmp.V386HXZfEE + local exit_status=0 ++ mktemp + local LAST_ERR=/tmp/tmp.RJuqUkAFKK + local exit_status=0 ++ seq 0 2 + for i in '$(seq 0 2)' + set +e + kubectl delete namespace users-31794 ++ seq 0 2 + for i in '$(seq 0 2)' + set +e + kubectl get ns + exit_status=1 + set -e + '[' 1 '!=' 0 ']' + '[' 1 == 1 ']' + sleep 0 + for i in '$(seq 0 2)' + set +e + kubectl delete namespace users-31794 + exit_status=0 + set -e + '[' 0 '!=' 0 ']' + break + cat /tmp/tmp.5O750aHff0 + cat /tmp/tmp.RJuqUkAFKK + rm /tmp/tmp.5O750aHff0 /tmp/tmp.RJuqUkAFKK + return 0 error: resource(s) were provided, but no name was specified + exit_status=1 + set -e + '[' 1 '!=' 0 ']' + '[' 1 == 1 ']' + sleep 0 + for i in '$(seq 0 2)' + set +e + kubectl delete namespace users-31794 + exit_status=1 + set -e + '[' 1 '!=' 0 ']' + '[' 1 == 1 ']' + sleep 0 + cat /tmp/tmp.upz9frJDWs + cat /tmp/tmp.V386HXZfEE Error from server (NotFound): namespaces "users-31794" not found + rm /tmp/tmp.upz9frJDWs /tmp/tmp.V386HXZfEE + return 1 + : + wait_for_delete namespace/users-31794 + local res=namespace/users-31794 + echo -n 'waiting for namespace/users-31794 to be deleted' waiting for namespace/users-31794 to be deleted+ set +o xtrace Error from server (NotFound): namespaces "users-31794" not found + desc 'create namespace users-31794' + set +o xtrace ----------------------------------------------------------------------------------- create namespace users-31794 ----------------------------------------------------------------------------------- + kubectl_bin create namespace users-31794 ++ mktemp + local LAST_OUT=/tmp/tmp.cHn1FfLvGh ++ mktemp + local LAST_ERR=/tmp/tmp.uqxgqSRcn4 + local exit_status=0 ++ seq 0 2 + for i in '$(seq 0 2)' + set +e + kubectl create namespace users-31794 + exit_status=0 + set -e + '[' 0 '!=' 0 ']' + break + cat /tmp/tmp.cHn1FfLvGh namespace/users-31794 created + cat /tmp/tmp.uqxgqSRcn4 + rm /tmp/tmp.cHn1FfLvGh /tmp/tmp.uqxgqSRcn4 + return 0 ++ kubectl_bin config current-context +++ mktemp ++ local LAST_OUT=/tmp/tmp.d5XwIbrEVN +++ mktemp ++ local LAST_ERR=/tmp/tmp.hhfGfhC48z ++ local exit_status=0 +++ seq 0 2 ++ for i in '$(seq 0 2)' ++ set +e ++ kubectl config current-context ++ exit_status=0 ++ set -e ++ '[' 0 '!=' 0 ']' ++ break ++ cat /tmp/tmp.d5XwIbrEVN ++ cat /tmp/tmp.hhfGfhC48z ++ rm /tmp/tmp.d5XwIbrEVN /tmp/tmp.hhfGfhC48z ++ return 0 + kubectl_bin config set-context gke_cloud-dev-112233_us-central1-a_jen-pxc-2325-d401e0a9-8-cluster3 --namespace=users-31794 ++ mktemp + local LAST_OUT=/tmp/tmp.1Af4nDdYDO ++ mktemp + local LAST_ERR=/tmp/tmp.mIgKmaXQsA + local exit_status=0 ++ seq 0 2 + for i in '$(seq 0 2)' + set +e + kubectl config set-context gke_cloud-dev-112233_us-central1-a_jen-pxc-2325-d401e0a9-8-cluster3 --namespace=users-31794 + exit_status=0 + set -e + '[' 0 '!=' 0 ']' + break + cat /tmp/tmp.1Af4nDdYDO Context "gke_cloud-dev-112233_us-central1-a_jen-pxc-2325-d401e0a9-8-cluster3" modified. + cat /tmp/tmp.mIgKmaXQsA + rm /tmp/tmp.1Af4nDdYDO /tmp/tmp.mIgKmaXQsA + return 0 + apply_secrets + desc 'create secrets for cloud storages' + set +o xtrace ----------------------------------------------------------------------------------- create secrets for cloud storages ----------------------------------------------------------------------------------- + '[' -z '' ']' + kubectl_bin apply -f /mnt/jenkins/workspace/cloud-pxc-operator_PR-2325/e2e-tests/conf/minio-secret.yml -f /mnt/jenkins/workspace/cloud-pxc-operator_PR-2325/e2e-tests/conf/cloud-secret.yml ++ mktemp + local LAST_OUT=/tmp/tmp.aHnUTd8fFM ++ mktemp + local LAST_ERR=/tmp/tmp.OhulNpeoVt + local exit_status=0 ++ seq 0 2 + for i in '$(seq 0 2)' + set +e + kubectl apply -f /mnt/jenkins/workspace/cloud-pxc-operator_PR-2325/e2e-tests/conf/minio-secret.yml -f /mnt/jenkins/workspace/cloud-pxc-operator_PR-2325/e2e-tests/conf/cloud-secret.yml + exit_status=0 + set -e + '[' 0 '!=' 0 ']' + break + cat /tmp/tmp.aHnUTd8fFM secret/minio-secret created secret/aws-s3-secret created secret/gcp-cs-secret created secret/azure-secret created + cat /tmp/tmp.OhulNpeoVt + rm /tmp/tmp.aHnUTd8fFM /tmp/tmp.OhulNpeoVt + return 0 + desc 'create PXC cluster with 1-password secret' + set +o xtrace ----------------------------------------------------------------------------------- create PXC cluster with 1-password secret ----------------------------------------------------------------------------------- + newpass=test-password ++ echo -n test-password ++ base64 + newpassencrypted=dGVzdC1wYXNzd29yZA== + cluster=some-name + spinup_pxc some-name /mnt/jenkins/workspace/cloud-pxc-operator_PR-2325/e2e-tests/conf/some-name.yml '' '' /mnt/jenkins/workspace/cloud-pxc-operator_PR-2325/e2e-tests/conf/secrets_one_pass.yml + local cluster=some-name + local config=/mnt/jenkins/workspace/cloud-pxc-operator_PR-2325/e2e-tests/conf/some-name.yml + local size=3 + local sleep=10 + local secretsFile=/mnt/jenkins/workspace/cloud-pxc-operator_PR-2325/e2e-tests/conf/secrets_one_pass.yml + local pxcClientFile=/mnt/jenkins/workspace/cloud-pxc-operator_PR-2325/e2e-tests/conf/client.yml + local port=3306 + desc 'create first PXC cluster' + set +o xtrace ----------------------------------------------------------------------------------- create first PXC cluster ----------------------------------------------------------------------------------- + kubectl_bin apply -f /mnt/jenkins/workspace/cloud-pxc-operator_PR-2325/e2e-tests/conf/secrets_one_pass.yml ++ mktemp + local LAST_OUT=/tmp/tmp.1qLIYVV0LT ++ mktemp + local LAST_ERR=/tmp/tmp.a2XCmR7V4s + local exit_status=0 ++ seq 0 2 + for i in '$(seq 0 2)' + set +e + kubectl apply -f /mnt/jenkins/workspace/cloud-pxc-operator_PR-2325/e2e-tests/conf/secrets_one_pass.yml + exit_status=0 + set -e + '[' 0 '!=' 0 ']' + break + cat /tmp/tmp.1qLIYVV0LT secret/my-cluster-secrets created secret/some-name-ssl created secret/some-name-ssl-internal created + cat /tmp/tmp.a2XCmR7V4s + rm /tmp/tmp.1qLIYVV0LT /tmp/tmp.a2XCmR7V4s + return 0 + apply_config /mnt/jenkins/workspace/cloud-pxc-operator_PR-2325/e2e-tests/conf/client.yml + local config_file=/mnt/jenkins/workspace/cloud-pxc-operator_PR-2325/e2e-tests/conf/client.yml + local pvc_name= + '[' -z '' ']' + cat_config /mnt/jenkins/workspace/cloud-pxc-operator_PR-2325/e2e-tests/conf/client.yml '' + local input_file=/mnt/jenkins/workspace/cloud-pxc-operator_PR-2325/e2e-tests/conf/client.yml + local pvc_name= + kubectl_bin apply -f - + cat /mnt/jenkins/workspace/cloud-pxc-operator_PR-2325/e2e-tests/conf/client.yml + /usr/bin/sed -e 's#image:.*-pxc\([0-9]*.[0-9]*\)\{0,1\}$#image: perconalab/percona-xtradb-cluster-operator:main-pxc5.7#' ++ mktemp + /usr/bin/sed -e 's#apiVersion: pxc.percona.com/v.*$#apiVersion: pxc.percona.com/v1#' + local LAST_OUT=/tmp/tmp.9jZiEMwbli + /usr/bin/sed -e 's#image:.*\/percona-xtradb-cluster:.*$#image: perconalab/percona-xtradb-cluster-operator:main-pxc5.7#' + /usr/bin/sed -e 's#image:.*-pmm$#image: perconalab/pmm-client:dev-latest#' + /usr/bin/sed -e 's#image:.*-init$#image: perconalab/percona-xtradb-cluster-operator:PR-2325-d401e0a9#' + /usr/bin/sed -e 's#image:.*-backup$#image: perconalab/percona-xtradb-cluster-operator:main-pxc5.7-backup#' + /usr/bin/sed -e 's#image:.*-proxysql$#image: perconalab/percona-xtradb-cluster-operator:main-proxysql#' + /usr/bin/sed -e 's#image:.*-haproxy$#image: perconalab/percona-xtradb-cluster-operator:main-haproxy#' ++ mktemp + /usr/bin/sed -e 's#image:.*-logcollector$#image: perconalab/fluentbit:main-logcollector#' + /usr/bin/sed -e 's#claimName:..*-backup-pvc$#claimName: #' + /usr/bin/sed -e s~minio-service.#namespace~minio-service.users-31794~ + /usr/bin/sed -e 's#apply:.*#apply: Never#' + local LAST_ERR=/tmp/tmp.1R8lu7PsVf + local exit_status=0 ++ seq 0 2 + for i in '$(seq 0 2)' + set +e + kubectl apply -f - + exit_status=0 + set -e + '[' 0 '!=' 0 ']' + break + cat /tmp/tmp.9jZiEMwbli deployment.apps/pxc-client created + cat /tmp/tmp.1R8lu7PsVf + rm /tmp/tmp.9jZiEMwbli /tmp/tmp.1R8lu7PsVf + return 0 + [[ perconalab/percona-xtradb-cluster-operator:main-pxc5.7 =~ 5\.7 ]] + [[ some-name == \d\e\m\a\n\d\-\b\a\c\k\u\p ]] + [[ some-name == \d\e\m\a\n\d\-\b\a\c\k\u\p\-\c\l\o\u\d ]] + apply_config /mnt/jenkins/workspace/cloud-pxc-operator_PR-2325/e2e-tests/conf/some-name.yml + local config_file=/mnt/jenkins/workspace/cloud-pxc-operator_PR-2325/e2e-tests/conf/some-name.yml + local pvc_name= + '[' -z '' ']' + kubectl_bin apply -f - + cat_config /mnt/jenkins/workspace/cloud-pxc-operator_PR-2325/e2e-tests/conf/some-name.yml '' + local input_file=/mnt/jenkins/workspace/cloud-pxc-operator_PR-2325/e2e-tests/conf/some-name.yml + local pvc_name= ++ mktemp + local LAST_OUT=/tmp/tmp.LuXhIOA7Gf + /usr/bin/sed -e 's#image:.*-pxc\([0-9]*.[0-9]*\)\{0,1\}$#image: perconalab/percona-xtradb-cluster-operator:main-pxc5.7#' + /usr/bin/sed -e 's#apiVersion: pxc.percona.com/v.*$#apiVersion: pxc.percona.com/v1#' ++ mktemp + /usr/bin/sed -e 's#image:.*-logcollector$#image: perconalab/fluentbit:main-logcollector#' + /usr/bin/sed -e s~minio-service.#namespace~minio-service.users-31794~ + local LAST_ERR=/tmp/tmp.79NGogMGIH + local exit_status=0 + cat /mnt/jenkins/workspace/cloud-pxc-operator_PR-2325/e2e-tests/conf/some-name.yml ++ seq 0 2 + for i in '$(seq 0 2)' + set +e + kubectl apply -f - + /usr/bin/sed -e 's#image:.*-haproxy$#image: perconalab/percona-xtradb-cluster-operator:main-haproxy#' + /usr/bin/sed -e 's#image:.*-backup$#image: perconalab/percona-xtradb-cluster-operator:main-pxc5.7-backup#' + /usr/bin/sed -e 's#image:.*\/percona-xtradb-cluster:.*$#image: perconalab/percona-xtradb-cluster-operator:main-pxc5.7#' + /usr/bin/sed -e 's#apply:.*#apply: Never#' + /usr/bin/sed -e 's#image:.*-proxysql$#image: perconalab/percona-xtradb-cluster-operator:main-proxysql#' + /usr/bin/sed -e 's#image:.*-init$#image: perconalab/percona-xtradb-cluster-operator:PR-2325-d401e0a9#' + /usr/bin/sed -e 's#claimName:..*-backup-pvc$#claimName: #' + /usr/bin/sed -e 's#image:.*-pmm$#image: perconalab/pmm-client:dev-latest#' + exit_status=0 + set -e + '[' 0 '!=' 0 ']' + break + cat /tmp/tmp.LuXhIOA7Gf perconaxtradbcluster.pxc.percona.com/some-name created + cat /tmp/tmp.79NGogMGIH + rm /tmp/tmp.LuXhIOA7Gf /tmp/tmp.79NGogMGIH + return 0 + desc 'check if all 3 Pods started' + set +o xtrace ----------------------------------------------------------------------------------- check if all 3 Pods started ----------------------------------------------------------------------------------- ++ get_proxy some-name ++ local target_cluster=some-name +++ kubectl_bin get pxc some-name -o 'jsonpath={.spec.haproxy.enabled}' ++++ mktemp +++ local LAST_OUT=/tmp/tmp.btCbxONRes ++++ mktemp +++ local LAST_ERR=/tmp/tmp.YZH6MwAbXg +++ local exit_status=0 ++++ seq 0 2 +++ for i in '$(seq 0 2)' +++ set +e +++ kubectl get pxc some-name -o 'jsonpath={.spec.haproxy.enabled}' +++ exit_status=0 +++ set -e +++ '[' 0 '!=' 0 ']' +++ break +++ cat /tmp/tmp.btCbxONRes +++ cat /tmp/tmp.YZH6MwAbXg +++ rm /tmp/tmp.btCbxONRes /tmp/tmp.YZH6MwAbXg +++ return 0 ++ [[ '' == \t\r\u\e ]] +++ kubectl_bin get pxc some-name -o 'jsonpath={.spec.proxysql.enabled}' ++++ mktemp +++ local LAST_OUT=/tmp/tmp.fmXEBV319C ++++ mktemp +++ local LAST_ERR=/tmp/tmp.DRYdyjSNAS +++ local exit_status=0 ++++ seq 0 2 +++ for i in '$(seq 0 2)' +++ set +e +++ kubectl get pxc some-name -o 'jsonpath={.spec.proxysql.enabled}' +++ exit_status=0 +++ set -e +++ '[' 0 '!=' 0 ']' +++ break +++ cat /tmp/tmp.fmXEBV319C +++ cat /tmp/tmp.DRYdyjSNAS +++ rm /tmp/tmp.fmXEBV319C /tmp/tmp.DRYdyjSNAS +++ return 0 ++ [[ true == \t\r\u\e ]] ++ echo some-name-proxysql ++ return + local proxy=some-name-proxysql + kubectl_bin wait --for=condition=Ready pod -l app.kubernetes.io/instance=monitoring,app.kubernetes.io/managed-by=percona-xtradb-cluster-operator --timeout=300s -n users-31794 ++ mktemp + local LAST_OUT=/tmp/tmp.KKDoqPBJ2j ++ mktemp + local LAST_ERR=/tmp/tmp.6zhhDch3Ob + local exit_status=0 ++ seq 0 2 + for i in '$(seq 0 2)' + set +e + kubectl wait --for=condition=Ready pod -l app.kubernetes.io/instance=monitoring,app.kubernetes.io/managed-by=percona-xtradb-cluster-operator --timeout=300s -n users-31794 + exit_status=1 + set -e + '[' 1 '!=' 0 ']' + '[' 1 == 1 ']' + sleep 0 + for i in '$(seq 0 2)' + set +e + kubectl wait --for=condition=Ready pod -l app.kubernetes.io/instance=monitoring,app.kubernetes.io/managed-by=percona-xtradb-cluster-operator --timeout=300s -n users-31794 + exit_status=1 + set -e + '[' 1 '!=' 0 ']' + '[' 1 == 1 ']' + sleep 0 + for i in '$(seq 0 2)' + set +e + kubectl wait --for=condition=Ready pod -l app.kubernetes.io/instance=monitoring,app.kubernetes.io/managed-by=percona-xtradb-cluster-operator --timeout=300s -n users-31794 + exit_status=1 + set -e + '[' 1 '!=' 0 ']' + '[' 1 == 1 ']' + sleep 0 + cat /tmp/tmp.KKDoqPBJ2j + cat /tmp/tmp.6zhhDch3Ob error: no matching resources found + rm /tmp/tmp.KKDoqPBJ2j /tmp/tmp.6zhhDch3Ob + return 1 + true + wait_for_running some-name-proxysql 1 + local name=some-name-proxysql + let last_pod=0 + : + local max_retry=480 + desc 'wait for running cluster' + set +o xtrace ----------------------------------------------------------------------------------- wait for running cluster ----------------------------------------------------------------------------------- ++ seq 0 0 + for i in '$(seq 0 $last_pod)' + wait_pod some-name-proxysql-0 480 + local pod=some-name-proxysql-0 + local max_retry=480 + local ns= ++ echo some-name-proxysql-0 ++ /usr/bin/sed -E 's/.*-(pxc|proxysql)-[0-9]/\1/' ++ grep -E '^(pxc|proxysql)$' + local container=proxysql + set +o xtrace pod/some-name-proxysql-0 condition met waiting for pod/some-name-proxysql-0 to become Ready.Ok + wait_for_running some-name-pxc 3 + local name=some-name-pxc + let last_pod=2 + local max_retry=480 + desc 'wait for running cluster' + set +o xtrace ----------------------------------------------------------------------------------- wait for running cluster ----------------------------------------------------------------------------------- ++ seq 0 2 + for i in '$(seq 0 $last_pod)' + wait_pod some-name-pxc-0 480 + local pod=some-name-pxc-0 + local max_retry=480 + local ns= ++ echo some-name-pxc-0 ++ /usr/bin/sed -E 's/.*-(pxc|proxysql)-[0-9]/\1/' ++ grep -E '^(pxc|proxysql)$' + local container=pxc + set +o xtrace pod/some-name-pxc-0 condition met waiting for pod/some-name-pxc-0 to become Ready.Ok + for i in '$(seq 0 $last_pod)' + wait_pod some-name-pxc-1 480 + local pod=some-name-pxc-1 + local max_retry=480 + local ns= ++ echo some-name-pxc-1 ++ grep -E '^(pxc|proxysql)$' ++ /usr/bin/sed -E 's/.*-(pxc|proxysql)-[0-9]/\1/' + local container=pxc + set +o xtrace pod/some-name-pxc-1 condition met waiting for pod/some-name-pxc-1 to become Ready.Ok + for i in '$(seq 0 $last_pod)' + wait_pod some-name-pxc-2 480 + local pod=some-name-pxc-2 + local max_retry=480 + local ns= ++ echo some-name-pxc-2 ++ /usr/bin/sed -E 's/.*-(pxc|proxysql)-[0-9]/\1/' ++ grep -E '^(pxc|proxysql)$' + local container=pxc + set +o xtrace pod/some-name-pxc-2 condition met waiting for pod/some-name-pxc-2 to become Ready.Ok + sleep 10 ++ kubectl get pxc some-name -o 'jsonpath={.spec.secretsName}' + local secret_name=my-cluster-secrets ++ getSecretData my-cluster-secrets root ++ local secretName=my-cluster-secrets ++ local dataKey=root ++ kubectl_bin get secrets/my-cluster-secrets '--template={{.data.root}}' ++ base64 --decode +++ mktemp ++ local LAST_OUT=/tmp/tmp.IM8JW2iLTa +++ mktemp ++ local LAST_ERR=/tmp/tmp.HSv0Ufptgr ++ local exit_status=0 +++ seq 0 2 ++ for i in '$(seq 0 2)' ++ set +e ++ kubectl get secrets/my-cluster-secrets '--template={{.data.root}}' ++ exit_status=0 ++ set -e ++ '[' 0 '!=' 0 ']' ++ break ++ cat /tmp/tmp.IM8JW2iLTa ++ cat /tmp/tmp.HSv0Ufptgr ++ rm /tmp/tmp.IM8JW2iLTa /tmp/tmp.HSv0Ufptgr ++ return 0 + local 'root_pass=?.ns@-)6,6U8ho61Ba+' + desc 'write data' + set +o xtrace ----------------------------------------------------------------------------------- write data ----------------------------------------------------------------------------------- + [[ perconalab/percona-xtradb-cluster-operator:main-pxc5.7 =~ 5\.7 ]] ++ is_keyring_plugin_in_use some-name ++ local cluster=some-name ++ [[ perconalab/percona-xtradb-cluster-operator:main-pxc5.7 =~ 8\.4 ]] ++ kubectl exec -it some-name-pxc-0 -c pxc -- bash -c 'cat /etc/mysql/node.cnf' ++ grep -E -o 'early-plugin-load=keyring_\w+.so' Unable to use a TTY - input is not a terminal or the right kind of file ++ return 1 + [[ -n '' ]] + run_mysql 'CREATE DATABASE IF NOT EXISTS myApp; use myApp; CREATE TABLE IF NOT EXISTS myApp (id int PRIMARY KEY) ;' '-h some-name-proxysql -uroot -p'\''?.ns@-)6,6U8ho61Ba+'\'' -P3306' + local 'command=CREATE DATABASE IF NOT EXISTS myApp; use myApp; CREATE TABLE IF NOT EXISTS myApp (id int PRIMARY KEY) ;' + local 'uri=-h some-name-proxysql -uroot -p'\''?.ns@-)6,6U8ho61Ba+'\'' -P3306' ++ get_client_pod ++ kubectl_bin get pods --selector=name=pxc-client -o 'jsonpath={.items[].metadata.name}' +++ mktemp ++ local LAST_OUT=/tmp/tmp.dV479fswg8 +++ mktemp ++ local LAST_ERR=/tmp/tmp.aNUD0ttU0U ++ local exit_status=0 +++ seq 0 2 ++ for i in '$(seq 0 2)' ++ set +e ++ kubectl get pods --selector=name=pxc-client -o 'jsonpath={.items[].metadata.name}' ++ exit_status=0 ++ set -e ++ '[' 0 '!=' 0 ']' ++ break ++ cat /tmp/tmp.dV479fswg8 ++ cat /tmp/tmp.aNUD0ttU0U ++ rm /tmp/tmp.dV479fswg8 /tmp/tmp.aNUD0ttU0U ++ return 0 + client_pod=pxc-client-545786ccbb-gfmwf + wait_pod pxc-client-545786ccbb-gfmwf + local pod=pxc-client-545786ccbb-gfmwf + local max_retry=480 + local ns= ++ echo pxc-client-545786ccbb-gfmwf ++ /usr/bin/sed -E 's/.*-(pxc|proxysql)-[0-9]/\1/' ++ grep -E '^(pxc|proxysql)$' + local container= + set +o xtrace pod/pxc-client-545786ccbb-gfmwf condition met waiting for pod/pxc-client-545786ccbb-gfmwf to become ReadyDefaulted container "pxc-client" out of: pxc-client, backup .Ok + set +o xtrace + run_mysql 'INSERT myApp.myApp (id) VALUES (100500)' '-h some-name-proxysql -uroot -p'\''?.ns@-)6,6U8ho61Ba+'\'' -P3306' + local 'command=INSERT myApp.myApp (id) VALUES (100500)' + local 'uri=-h some-name-proxysql -uroot -p'\''?.ns@-)6,6U8ho61Ba+'\'' -P3306' ++ get_client_pod ++ kubectl_bin get pods --selector=name=pxc-client -o 'jsonpath={.items[].metadata.name}' +++ mktemp ++ local LAST_OUT=/tmp/tmp.nX7so4HhjD +++ mktemp ++ local LAST_ERR=/tmp/tmp.RtB4wqOJ63 ++ local exit_status=0 +++ seq 0 2 ++ for i in '$(seq 0 2)' ++ set +e ++ kubectl get pods --selector=name=pxc-client -o 'jsonpath={.items[].metadata.name}' ++ exit_status=0 ++ set -e ++ '[' 0 '!=' 0 ']' ++ break ++ cat /tmp/tmp.nX7so4HhjD ++ cat /tmp/tmp.RtB4wqOJ63 ++ rm /tmp/tmp.nX7so4HhjD /tmp/tmp.RtB4wqOJ63 ++ return 0 + client_pod=pxc-client-545786ccbb-gfmwf + wait_pod pxc-client-545786ccbb-gfmwf + local pod=pxc-client-545786ccbb-gfmwf + local max_retry=480 + local ns= ++ echo pxc-client-545786ccbb-gfmwf ++ /usr/bin/sed -E 's/.*-(pxc|proxysql)-[0-9]/\1/' ++ grep -E '^(pxc|proxysql)$' + local container= + set +o xtrace pod/pxc-client-545786ccbb-gfmwf condition met waiting for pod/pxc-client-545786ccbb-gfmwf to become ReadyDefaulted container "pxc-client" out of: pxc-client, backup .Ok + set +o xtrace + sleep 30 ++ seq 0 2 + for i in '$(seq 0 $((size - 1)))' + compare_mysql_cmd select-1 'SELECT * from myApp.myApp;' '-h some-name-pxc-0.some-name-pxc -uroot -p'\''?.ns@-)6,6U8ho61Ba+'\'' -P3306' + local command_id=select-1 + local 'command=SELECT * from myApp.myApp;' + local 'uri=-h some-name-pxc-0.some-name-pxc -uroot -p'\''?.ns@-)6,6U8ho61Ba+'\'' -P3306' + local postfix= + local expected_result=/mnt/jenkins/workspace/cloud-pxc-operator_PR-2325/e2e-tests/users/compare/select-1.sql + [[ perconalab/percona-xtradb-cluster-operator:main-pxc5.7 =~ 8\.4 ]] + [[ perconalab/percona-xtradb-cluster-operator:main-pxc5.7 =~ 8\.0 ]] + [[ perconalab/percona-xtradb-cluster-operator:main-pxc5.7 =~ 5\.7 ]] + [[ -f /mnt/jenkins/workspace/cloud-pxc-operator_PR-2325/e2e-tests/users/compare/select-1-57.sql ]] + run_mysql 'SELECT * from myApp.myApp;' '-h some-name-pxc-0.some-name-pxc -uroot -p'\''?.ns@-)6,6U8ho61Ba+'\'' -P3306' + local 'command=SELECT * from myApp.myApp;' + local 'uri=-h some-name-pxc-0.some-name-pxc -uroot -p'\''?.ns@-)6,6U8ho61Ba+'\'' -P3306' ++ get_client_pod ++ kubectl_bin get pods --selector=name=pxc-client -o 'jsonpath={.items[].metadata.name}' +++ mktemp ++ local LAST_OUT=/tmp/tmp.XLTFOn8QeF +++ mktemp ++ local LAST_ERR=/tmp/tmp.EcgztM3nbs ++ local exit_status=0 +++ seq 0 2 ++ for i in '$(seq 0 2)' ++ set +e ++ kubectl get pods --selector=name=pxc-client -o 'jsonpath={.items[].metadata.name}' ++ exit_status=0 ++ set -e ++ '[' 0 '!=' 0 ']' ++ break ++ cat /tmp/tmp.XLTFOn8QeF ++ cat /tmp/tmp.EcgztM3nbs ++ rm /tmp/tmp.XLTFOn8QeF /tmp/tmp.EcgztM3nbs ++ return 0 + client_pod=pxc-client-545786ccbb-gfmwf + wait_pod pxc-client-545786ccbb-gfmwf + local pod=pxc-client-545786ccbb-gfmwf + local max_retry=480 + local ns= ++ echo pxc-client-545786ccbb-gfmwf ++ /usr/bin/sed -E 's/.*-(pxc|proxysql)-[0-9]/\1/' ++ grep -E '^(pxc|proxysql)$' + local container= + set +o xtrace pod/pxc-client-545786ccbb-gfmwf condition met waiting for pod/pxc-client-545786ccbb-gfmwf to become ReadyDefaulted container "pxc-client" out of: pxc-client, backup .Ok + set +o xtrace + [[ ! -s /tmp/tmp.dqV03Eaoyh/select-1.sql ]] ++ grep 'Unknown MySQL server host' /tmp/tmp.dqV03Eaoyh/select-1.sql + [[ -n '' ]] + [[ 0 -eq 0 ]] + diff -u /mnt/jenkins/workspace/cloud-pxc-operator_PR-2325/e2e-tests/users/compare/select-1.sql /tmp/tmp.dqV03Eaoyh/select-1.sql + for i in '$(seq 0 $((size - 1)))' + compare_mysql_cmd select-1 'SELECT * from myApp.myApp;' '-h some-name-pxc-1.some-name-pxc -uroot -p'\''?.ns@-)6,6U8ho61Ba+'\'' -P3306' + local command_id=select-1 + local 'command=SELECT * from myApp.myApp;' + local 'uri=-h some-name-pxc-1.some-name-pxc -uroot -p'\''?.ns@-)6,6U8ho61Ba+'\'' -P3306' + local postfix= + local expected_result=/mnt/jenkins/workspace/cloud-pxc-operator_PR-2325/e2e-tests/users/compare/select-1.sql + [[ perconalab/percona-xtradb-cluster-operator:main-pxc5.7 =~ 8\.4 ]] + [[ perconalab/percona-xtradb-cluster-operator:main-pxc5.7 =~ 8\.0 ]] + [[ perconalab/percona-xtradb-cluster-operator:main-pxc5.7 =~ 5\.7 ]] + [[ -f /mnt/jenkins/workspace/cloud-pxc-operator_PR-2325/e2e-tests/users/compare/select-1-57.sql ]] + run_mysql 'SELECT * from myApp.myApp;' '-h some-name-pxc-1.some-name-pxc -uroot -p'\''?.ns@-)6,6U8ho61Ba+'\'' -P3306' + local 'command=SELECT * from myApp.myApp;' + local 'uri=-h some-name-pxc-1.some-name-pxc -uroot -p'\''?.ns@-)6,6U8ho61Ba+'\'' -P3306' ++ get_client_pod ++ kubectl_bin get pods --selector=name=pxc-client -o 'jsonpath={.items[].metadata.name}' +++ mktemp ++ local LAST_OUT=/tmp/tmp.wwmOOiTPmV +++ mktemp ++ local LAST_ERR=/tmp/tmp.g6TtNrWNo5 ++ local exit_status=0 +++ seq 0 2 ++ for i in '$(seq 0 2)' ++ set +e ++ kubectl get pods --selector=name=pxc-client -o 'jsonpath={.items[].metadata.name}' ++ exit_status=0 ++ set -e ++ '[' 0 '!=' 0 ']' ++ break ++ cat /tmp/tmp.wwmOOiTPmV ++ cat /tmp/tmp.g6TtNrWNo5 ++ rm /tmp/tmp.wwmOOiTPmV /tmp/tmp.g6TtNrWNo5 ++ return 0 + client_pod=pxc-client-545786ccbb-gfmwf + wait_pod pxc-client-545786ccbb-gfmwf + local pod=pxc-client-545786ccbb-gfmwf + local max_retry=480 + local ns= ++ echo pxc-client-545786ccbb-gfmwf ++ grep -E '^(pxc|proxysql)$' ++ /usr/bin/sed -E 's/.*-(pxc|proxysql)-[0-9]/\1/' + local container= + set +o xtrace pod/pxc-client-545786ccbb-gfmwf condition met waiting for pod/pxc-client-545786ccbb-gfmwf to become ReadyDefaulted container "pxc-client" out of: pxc-client, backup .Ok + set +o xtrace + [[ ! -s /tmp/tmp.dqV03Eaoyh/select-1.sql ]] ++ grep 'Unknown MySQL server host' /tmp/tmp.dqV03Eaoyh/select-1.sql + [[ -n '' ]] + [[ 0 -eq 0 ]] + diff -u /mnt/jenkins/workspace/cloud-pxc-operator_PR-2325/e2e-tests/users/compare/select-1.sql /tmp/tmp.dqV03Eaoyh/select-1.sql + for i in '$(seq 0 $((size - 1)))' + compare_mysql_cmd select-1 'SELECT * from myApp.myApp;' '-h some-name-pxc-2.some-name-pxc -uroot -p'\''?.ns@-)6,6U8ho61Ba+'\'' -P3306' + local command_id=select-1 + local 'command=SELECT * from myApp.myApp;' + local 'uri=-h some-name-pxc-2.some-name-pxc -uroot -p'\''?.ns@-)6,6U8ho61Ba+'\'' -P3306' + local postfix= + local expected_result=/mnt/jenkins/workspace/cloud-pxc-operator_PR-2325/e2e-tests/users/compare/select-1.sql + [[ perconalab/percona-xtradb-cluster-operator:main-pxc5.7 =~ 8\.4 ]] + [[ perconalab/percona-xtradb-cluster-operator:main-pxc5.7 =~ 8\.0 ]] + [[ perconalab/percona-xtradb-cluster-operator:main-pxc5.7 =~ 5\.7 ]] + [[ -f /mnt/jenkins/workspace/cloud-pxc-operator_PR-2325/e2e-tests/users/compare/select-1-57.sql ]] + run_mysql 'SELECT * from myApp.myApp;' '-h some-name-pxc-2.some-name-pxc -uroot -p'\''?.ns@-)6,6U8ho61Ba+'\'' -P3306' + local 'command=SELECT * from myApp.myApp;' + local 'uri=-h some-name-pxc-2.some-name-pxc -uroot -p'\''?.ns@-)6,6U8ho61Ba+'\'' -P3306' ++ get_client_pod ++ kubectl_bin get pods --selector=name=pxc-client -o 'jsonpath={.items[].metadata.name}' +++ mktemp ++ local LAST_OUT=/tmp/tmp.LQp4MulnvL +++ mktemp ++ local LAST_ERR=/tmp/tmp.XMmBYmE5EP ++ local exit_status=0 +++ seq 0 2 ++ for i in '$(seq 0 2)' ++ set +e ++ kubectl get pods --selector=name=pxc-client -o 'jsonpath={.items[].metadata.name}' ++ exit_status=0 ++ set -e ++ '[' 0 '!=' 0 ']' ++ break ++ cat /tmp/tmp.LQp4MulnvL ++ cat /tmp/tmp.XMmBYmE5EP ++ rm /tmp/tmp.LQp4MulnvL /tmp/tmp.XMmBYmE5EP ++ return 0 + client_pod=pxc-client-545786ccbb-gfmwf + wait_pod pxc-client-545786ccbb-gfmwf + local pod=pxc-client-545786ccbb-gfmwf + local max_retry=480 + local ns= ++ echo pxc-client-545786ccbb-gfmwf ++ /usr/bin/sed -E 's/.*-(pxc|proxysql)-[0-9]/\1/' ++ grep -E '^(pxc|proxysql)$' + local container= + set +o xtrace pod/pxc-client-545786ccbb-gfmwf condition met waiting for pod/pxc-client-545786ccbb-gfmwf to become ReadyDefaulted container "pxc-client" out of: pxc-client, backup .Ok + set +o xtrace + [[ ! -s /tmp/tmp.dqV03Eaoyh/select-1.sql ]] ++ grep 'Unknown MySQL server host' /tmp/tmp.dqV03Eaoyh/select-1.sql + [[ -n '' ]] + [[ 0 -eq 0 ]] + diff -u /mnt/jenkins/workspace/cloud-pxc-operator_PR-2325/e2e-tests/users/compare/select-1.sql /tmp/tmp.dqV03Eaoyh/select-1.sql + is_keyring_plugin_in_use some-name + local cluster=some-name + [[ perconalab/percona-xtradb-cluster-operator:main-pxc5.7 =~ 8\.4 ]] + kubectl exec -it some-name-pxc-0 -c pxc -- bash -c 'cat /etc/mysql/node.cnf' + grep -E -o 'early-plugin-load=keyring_\w+.so' Unable to use a TTY - input is not a terminal or the right kind of file + return 1 ++ kubectl exec -it some-name-proxysql-0 -- sh -c 'proxysql --version 2>/dev/null' ++ awk '{print $3}' ++ cut -d. -f1 Unable to use a TTY - input is not a terminal or the right kind of file + PROXYSQL_VER=2 + tables_cmp_file=select-2-proxysql3 + [[ 2 == 2 ]] + tables_cmp_file=select-2 + desc 'test missing passwords were created and present in internal secrets' + set +o xtrace ----------------------------------------------------------------------------------- test missing passwords were created and present in internal secrets ----------------------------------------------------------------------------------- + empty_pwds=() + wrong_pwds=() + for user in root xtrabackup monitor proxyadmin operator replication + echo 'Checking root' Checking root ++ getSecretData my-cluster-secrets root ++ local secretName=my-cluster-secrets ++ local dataKey=root ++ base64 --decode ++ kubectl_bin get secrets/my-cluster-secrets '--template={{.data.root}}' +++ mktemp ++ local LAST_OUT=/tmp/tmp.xnRKfIAdH5 +++ mktemp ++ local LAST_ERR=/tmp/tmp.mYBLoQbr87 ++ local exit_status=0 +++ seq 0 2 ++ for i in '$(seq 0 2)' ++ set +e ++ kubectl get secrets/my-cluster-secrets '--template={{.data.root}}' ++ exit_status=0 ++ set -e ++ '[' 0 '!=' 0 ']' ++ break ++ cat /tmp/tmp.xnRKfIAdH5 ++ cat /tmp/tmp.mYBLoQbr87 ++ rm /tmp/tmp.xnRKfIAdH5 /tmp/tmp.mYBLoQbr87 ++ return 0 + secret_pass='?.ns@-)6,6U8ho61Ba+' ++ getSecretData internal-some-name root ++ local secretName=internal-some-name ++ local dataKey=root ++ kubectl_bin get secrets/internal-some-name '--template={{.data.root}}' ++ base64 --decode +++ mktemp ++ local LAST_OUT=/tmp/tmp.Hr0O1Q81mc +++ mktemp ++ local LAST_ERR=/tmp/tmp.Xtud98QzKj ++ local exit_status=0 +++ seq 0 2 ++ for i in '$(seq 0 2)' ++ set +e ++ kubectl get secrets/internal-some-name '--template={{.data.root}}' ++ exit_status=0 ++ set -e ++ '[' 0 '!=' 0 ']' ++ break ++ cat /tmp/tmp.Hr0O1Q81mc ++ cat /tmp/tmp.Xtud98QzKj ++ rm /tmp/tmp.Hr0O1Q81mc /tmp/tmp.Xtud98QzKj ++ return 0 + int_secret_pass='?.ns@-)6,6U8ho61Ba+' + [[ -z ?.ns@-)6,6U8ho61Ba+ ]] + [[ ?.ns@-)6,6U8ho61Ba+ != \?\.\n\s\@\-\)\6\,\6\U\8\h\o\6\1\B\a\+ ]] + [[ root != \p\r\o\x\y\a\d\m\i\n ]] + [[ '' =~ root ]] + [[ '' =~ root ]] + echo 'Running compare for root' Running compare for root + compare_mysql_cmd select-4 'SHOW TABLES;' '-h some-name-proxysql -uroot -p'\''?.ns@-)6,6U8ho61Ba+'\''' + local command_id=select-4 + local 'command=SHOW TABLES;' + local 'uri=-h some-name-proxysql -uroot -p'\''?.ns@-)6,6U8ho61Ba+'\''' + local postfix= + local expected_result=/mnt/jenkins/workspace/cloud-pxc-operator_PR-2325/e2e-tests/users/compare/select-4.sql + [[ perconalab/percona-xtradb-cluster-operator:main-pxc5.7 =~ 8\.4 ]] + [[ perconalab/percona-xtradb-cluster-operator:main-pxc5.7 =~ 8\.0 ]] + [[ perconalab/percona-xtradb-cluster-operator:main-pxc5.7 =~ 5\.7 ]] + [[ -f /mnt/jenkins/workspace/cloud-pxc-operator_PR-2325/e2e-tests/users/compare/select-4-57.sql ]] + run_mysql 'SHOW TABLES;' '-h some-name-proxysql -uroot -p'\''?.ns@-)6,6U8ho61Ba+'\''' + local 'command=SHOW TABLES;' + local 'uri=-h some-name-proxysql -uroot -p'\''?.ns@-)6,6U8ho61Ba+'\''' ++ get_client_pod ++ kubectl_bin get pods --selector=name=pxc-client -o 'jsonpath={.items[].metadata.name}' +++ mktemp ++ local LAST_OUT=/tmp/tmp.gJb0ypBo7H +++ mktemp ++ local LAST_ERR=/tmp/tmp.gSq0HbhUgA ++ local exit_status=0 +++ seq 0 2 ++ for i in '$(seq 0 2)' ++ set +e ++ kubectl get pods --selector=name=pxc-client -o 'jsonpath={.items[].metadata.name}' ++ exit_status=0 ++ set -e ++ '[' 0 '!=' 0 ']' ++ break ++ cat /tmp/tmp.gJb0ypBo7H ++ cat /tmp/tmp.gSq0HbhUgA ++ rm /tmp/tmp.gJb0ypBo7H /tmp/tmp.gSq0HbhUgA ++ return 0 + client_pod=pxc-client-545786ccbb-gfmwf + wait_pod pxc-client-545786ccbb-gfmwf + local pod=pxc-client-545786ccbb-gfmwf + local max_retry=480 + local ns= ++ echo pxc-client-545786ccbb-gfmwf ++ /usr/bin/sed -E 's/.*-(pxc|proxysql)-[0-9]/\1/' ++ grep -E '^(pxc|proxysql)$' + local container= + set +o xtrace pod/pxc-client-545786ccbb-gfmwf condition met waiting for pod/pxc-client-545786ccbb-gfmwf to become ReadyDefaulted container "pxc-client" out of: pxc-client, backup .Ok + set +o xtrace + [[ ! -s /tmp/tmp.dqV03Eaoyh/select-4.sql ]] ++ grep 'Unknown MySQL server host' /tmp/tmp.dqV03Eaoyh/select-4.sql + [[ -n '' ]] + [[ 0 -eq 0 ]] + diff -u /mnt/jenkins/workspace/cloud-pxc-operator_PR-2325/e2e-tests/users/compare/select-4.sql /tmp/tmp.dqV03Eaoyh/select-4.sql + for user in root xtrabackup monitor proxyadmin operator replication + echo 'Checking xtrabackup' Checking xtrabackup ++ getSecretData my-cluster-secrets xtrabackup ++ local secretName=my-cluster-secrets ++ local dataKey=xtrabackup ++ kubectl_bin get secrets/my-cluster-secrets '--template={{.data.xtrabackup}}' ++ base64 --decode +++ mktemp ++ local LAST_OUT=/tmp/tmp.90msd1TDKC +++ mktemp ++ local LAST_ERR=/tmp/tmp.BrZbk05oAC ++ local exit_status=0 +++ seq 0 2 ++ for i in '$(seq 0 2)' ++ set +e ++ kubectl get secrets/my-cluster-secrets '--template={{.data.xtrabackup}}' ++ exit_status=0 ++ set -e ++ '[' 0 '!=' 0 ']' ++ break ++ cat /tmp/tmp.90msd1TDKC ++ cat /tmp/tmp.BrZbk05oAC ++ rm /tmp/tmp.90msd1TDKC /tmp/tmp.BrZbk05oAC ++ return 0 + secret_pass='8XR&t&7-~U<6K^OITv' ++ getSecretData internal-some-name xtrabackup ++ local secretName=internal-some-name ++ local dataKey=xtrabackup ++ kubectl_bin get secrets/internal-some-name '--template={{.data.xtrabackup}}' ++ base64 --decode +++ mktemp ++ local LAST_OUT=/tmp/tmp.2w9yQ0EWKP +++ mktemp ++ local LAST_ERR=/tmp/tmp.rd198BYg52 ++ local exit_status=0 +++ seq 0 2 ++ for i in '$(seq 0 2)' ++ set +e ++ kubectl get secrets/internal-some-name '--template={{.data.xtrabackup}}' ++ exit_status=0 ++ set -e ++ '[' 0 '!=' 0 ']' ++ break ++ cat /tmp/tmp.2w9yQ0EWKP ++ cat /tmp/tmp.rd198BYg52 ++ rm /tmp/tmp.2w9yQ0EWKP /tmp/tmp.rd198BYg52 ++ return 0 + int_secret_pass='8XR&t&7-~U<6K^OITv' + [[ -z 8XR&t&7-~U<6K^OITv ]] + [[ 8XR&t&7-~U<6K^OITv != \8\X\R\&\t\&\7\-\~\U\<\6\K\^\O\I\T\v ]] + [[ xtrabackup != \p\r\o\x\y\a\d\m\i\n ]] + [[ '' =~ xtrabackup ]] + [[ '' =~ xtrabackup ]] + echo 'Running compare for xtrabackup' Running compare for xtrabackup + compare_mysql_cmd select-4 'SHOW TABLES;' '-h some-name-proxysql -uxtrabackup -p'\''8XR&t&7-~U<6K^OITv'\''' + local command_id=select-4 + local 'command=SHOW TABLES;' + local 'uri=-h some-name-proxysql -uxtrabackup -p'\''8XR&t&7-~U<6K^OITv'\''' + local postfix= + local expected_result=/mnt/jenkins/workspace/cloud-pxc-operator_PR-2325/e2e-tests/users/compare/select-4.sql + [[ perconalab/percona-xtradb-cluster-operator:main-pxc5.7 =~ 8\.4 ]] + [[ perconalab/percona-xtradb-cluster-operator:main-pxc5.7 =~ 8\.0 ]] + [[ perconalab/percona-xtradb-cluster-operator:main-pxc5.7 =~ 5\.7 ]] + [[ -f /mnt/jenkins/workspace/cloud-pxc-operator_PR-2325/e2e-tests/users/compare/select-4-57.sql ]] + run_mysql 'SHOW TABLES;' '-h some-name-proxysql -uxtrabackup -p'\''8XR&t&7-~U<6K^OITv'\''' + local 'command=SHOW TABLES;' + local 'uri=-h some-name-proxysql -uxtrabackup -p'\''8XR&t&7-~U<6K^OITv'\''' ++ get_client_pod ++ kubectl_bin get pods --selector=name=pxc-client -o 'jsonpath={.items[].metadata.name}' +++ mktemp ++ local LAST_OUT=/tmp/tmp.LnYAaWVKeF +++ mktemp ++ local LAST_ERR=/tmp/tmp.x1diP9hGRo ++ local exit_status=0 +++ seq 0 2 ++ for i in '$(seq 0 2)' ++ set +e ++ kubectl get pods --selector=name=pxc-client -o 'jsonpath={.items[].metadata.name}' ++ exit_status=0 ++ set -e ++ '[' 0 '!=' 0 ']' ++ break ++ cat /tmp/tmp.LnYAaWVKeF ++ cat /tmp/tmp.x1diP9hGRo ++ rm /tmp/tmp.LnYAaWVKeF /tmp/tmp.x1diP9hGRo ++ return 0 + client_pod=pxc-client-545786ccbb-gfmwf + wait_pod pxc-client-545786ccbb-gfmwf + local pod=pxc-client-545786ccbb-gfmwf + local max_retry=480 + local ns= ++ echo pxc-client-545786ccbb-gfmwf ++ /usr/bin/sed -E 's/.*-(pxc|proxysql)-[0-9]/\1/' ++ grep -E '^(pxc|proxysql)$' + local container= + set +o xtrace pod/pxc-client-545786ccbb-gfmwf condition met waiting for pod/pxc-client-545786ccbb-gfmwf to become ReadyDefaulted container "pxc-client" out of: pxc-client, backup .Ok + set +o xtrace + [[ ! -s /tmp/tmp.dqV03Eaoyh/select-4.sql ]] ++ grep 'Unknown MySQL server host' /tmp/tmp.dqV03Eaoyh/select-4.sql + [[ -n '' ]] + [[ 0 -eq 0 ]] + diff -u /mnt/jenkins/workspace/cloud-pxc-operator_PR-2325/e2e-tests/users/compare/select-4.sql /tmp/tmp.dqV03Eaoyh/select-4.sql + for user in root xtrabackup monitor proxyadmin operator replication + echo 'Checking monitor' Checking monitor ++ getSecretData my-cluster-secrets monitor ++ local secretName=my-cluster-secrets ++ local dataKey=monitor ++ kubectl_bin get secrets/my-cluster-secrets '--template={{.data.monitor}}' ++ base64 --decode +++ mktemp ++ local LAST_OUT=/tmp/tmp.u35NNukGkc +++ mktemp ++ local LAST_ERR=/tmp/tmp.0C5bJthQqW ++ local exit_status=0 +++ seq 0 2 ++ for i in '$(seq 0 2)' ++ set +e ++ kubectl get secrets/my-cluster-secrets '--template={{.data.monitor}}' ++ exit_status=0 ++ set -e ++ '[' 0 '!=' 0 ']' ++ break ++ cat /tmp/tmp.u35NNukGkc ++ cat /tmp/tmp.0C5bJthQqW ++ rm /tmp/tmp.u35NNukGkc /tmp/tmp.0C5bJthQqW ++ return 0 + secret_pass=monitor_password ++ getSecretData internal-some-name monitor ++ local secretName=internal-some-name ++ local dataKey=monitor ++ base64 --decode ++ kubectl_bin get secrets/internal-some-name '--template={{.data.monitor}}' +++ mktemp ++ local LAST_OUT=/tmp/tmp.8cMLtwaUF8 +++ mktemp ++ local LAST_ERR=/tmp/tmp.0CfSDroCHt ++ local exit_status=0 +++ seq 0 2 ++ for i in '$(seq 0 2)' ++ set +e ++ kubectl get secrets/internal-some-name '--template={{.data.monitor}}' ++ exit_status=0 ++ set -e ++ '[' 0 '!=' 0 ']' ++ break ++ cat /tmp/tmp.8cMLtwaUF8 ++ cat /tmp/tmp.0CfSDroCHt ++ rm /tmp/tmp.8cMLtwaUF8 /tmp/tmp.0CfSDroCHt ++ return 0 + int_secret_pass=monitor_password + [[ -z monitor_password ]] + [[ monitor_password != \m\o\n\i\t\o\r\_\p\a\s\s\w\o\r\d ]] + [[ monitor != \p\r\o\x\y\a\d\m\i\n ]] + [[ '' =~ monitor ]] + [[ '' =~ monitor ]] + echo 'Running compare for monitor' Running compare for monitor + compare_mysql_cmd select-4 'SHOW TABLES;' '-h some-name-proxysql -umonitor -p'\''monitor_password'\''' + local command_id=select-4 + local 'command=SHOW TABLES;' + local 'uri=-h some-name-proxysql -umonitor -p'\''monitor_password'\''' + local postfix= + local expected_result=/mnt/jenkins/workspace/cloud-pxc-operator_PR-2325/e2e-tests/users/compare/select-4.sql + [[ perconalab/percona-xtradb-cluster-operator:main-pxc5.7 =~ 8\.4 ]] + [[ perconalab/percona-xtradb-cluster-operator:main-pxc5.7 =~ 8\.0 ]] + [[ perconalab/percona-xtradb-cluster-operator:main-pxc5.7 =~ 5\.7 ]] + [[ -f /mnt/jenkins/workspace/cloud-pxc-operator_PR-2325/e2e-tests/users/compare/select-4-57.sql ]] + run_mysql 'SHOW TABLES;' '-h some-name-proxysql -umonitor -p'\''monitor_password'\''' + local 'command=SHOW TABLES;' + local 'uri=-h some-name-proxysql -umonitor -p'\''monitor_password'\''' ++ get_client_pod ++ kubectl_bin get pods --selector=name=pxc-client -o 'jsonpath={.items[].metadata.name}' +++ mktemp ++ local LAST_OUT=/tmp/tmp.O1cwlMNWUC +++ mktemp ++ local LAST_ERR=/tmp/tmp.VYWIqhdsDD ++ local exit_status=0 +++ seq 0 2 ++ for i in '$(seq 0 2)' ++ set +e ++ kubectl get pods --selector=name=pxc-client -o 'jsonpath={.items[].metadata.name}' ++ exit_status=0 ++ set -e ++ '[' 0 '!=' 0 ']' ++ break ++ cat /tmp/tmp.O1cwlMNWUC ++ cat /tmp/tmp.VYWIqhdsDD ++ rm /tmp/tmp.O1cwlMNWUC /tmp/tmp.VYWIqhdsDD ++ return 0 + client_pod=pxc-client-545786ccbb-gfmwf + wait_pod pxc-client-545786ccbb-gfmwf + local pod=pxc-client-545786ccbb-gfmwf + local max_retry=480 + local ns= ++ echo pxc-client-545786ccbb-gfmwf ++ /usr/bin/sed -E 's/.*-(pxc|proxysql)-[0-9]/\1/' ++ grep -E '^(pxc|proxysql)$' + local container= + set +o xtrace pod/pxc-client-545786ccbb-gfmwf condition met waiting for pod/pxc-client-545786ccbb-gfmwf to become ReadyDefaulted container "pxc-client" out of: pxc-client, backup .Ok + set +o xtrace + [[ ! -s /tmp/tmp.dqV03Eaoyh/select-4.sql ]] ++ grep 'Unknown MySQL server host' /tmp/tmp.dqV03Eaoyh/select-4.sql + [[ -n '' ]] + [[ 0 -eq 0 ]] + diff -u /mnt/jenkins/workspace/cloud-pxc-operator_PR-2325/e2e-tests/users/compare/select-4.sql /tmp/tmp.dqV03Eaoyh/select-4.sql + for user in root xtrabackup monitor proxyadmin operator replication + echo 'Checking proxyadmin' Checking proxyadmin ++ getSecretData my-cluster-secrets proxyadmin ++ local secretName=my-cluster-secrets ++ local dataKey=proxyadmin ++ kubectl_bin get secrets/my-cluster-secrets '--template={{.data.proxyadmin}}' ++ base64 --decode +++ mktemp ++ local LAST_OUT=/tmp/tmp.oekihFw5kk +++ mktemp ++ local LAST_ERR=/tmp/tmp.NlvGyQp9b5 ++ local exit_status=0 +++ seq 0 2 ++ for i in '$(seq 0 2)' ++ set +e ++ kubectl get secrets/my-cluster-secrets '--template={{.data.proxyadmin}}' ++ exit_status=0 ++ set -e ++ '[' 0 '!=' 0 ']' ++ break ++ cat /tmp/tmp.oekihFw5kk ++ cat /tmp/tmp.NlvGyQp9b5 ++ rm /tmp/tmp.oekihFw5kk /tmp/tmp.NlvGyQp9b5 ++ return 0 + secret_pass='rRrSz.~D?kSV0FWUP' ++ getSecretData internal-some-name proxyadmin ++ local secretName=internal-some-name ++ local dataKey=proxyadmin ++ kubectl_bin get secrets/internal-some-name '--template={{.data.proxyadmin}}' ++ base64 --decode +++ mktemp ++ local LAST_OUT=/tmp/tmp.80pcAAbwHZ +++ mktemp ++ local LAST_ERR=/tmp/tmp.r9writ2wJY ++ local exit_status=0 +++ seq 0 2 ++ for i in '$(seq 0 2)' ++ set +e ++ kubectl get secrets/internal-some-name '--template={{.data.proxyadmin}}' ++ exit_status=0 ++ set -e ++ '[' 0 '!=' 0 ']' ++ break ++ cat /tmp/tmp.80pcAAbwHZ ++ cat /tmp/tmp.r9writ2wJY ++ rm /tmp/tmp.80pcAAbwHZ /tmp/tmp.r9writ2wJY ++ return 0 + int_secret_pass='rRrSz.~D?kSV0FWUP' + [[ -z rRrSz.~D?kSV0FWUP ]] + [[ rRrSz.~D?kSV0FWUP != \r\R\r\S\z\.\~\D\?\k\S\V\0\F\W\U\P ]] + [[ proxyadmin != \p\r\o\x\y\a\d\m\i\n ]] + [[ proxyadmin == \p\r\o\x\y\a\d\m\i\n ]] + [[ '' =~ proxyadmin ]] + [[ '' =~ proxyadmin ]] + echo 'Running compare for proxyadmin' Running compare for proxyadmin + compare_mysql_cmd_local select-2 'SHOW TABLES;' '-h127.0.0.1 -P6032 -uproxyadmin -p'\''rRrSz.~D?kSV0FWUP'\''' some-name-proxysql-0 '' proxysql + local command_id=select-2 + local 'command=SHOW TABLES;' + local 'uri=-h127.0.0.1 -P6032 -uproxyadmin -p'\''rRrSz.~D?kSV0FWUP'\''' + local pod=some-name-proxysql-0 + local postfix= + local container_name=proxysql + local expected_result=/mnt/jenkins/workspace/cloud-pxc-operator_PR-2325/e2e-tests/users/compare/select-2.sql + [[ perconalab/percona-xtradb-cluster-operator:main-pxc5.7 =~ 8\.0 ]] + [[ perconalab/percona-xtradb-cluster-operator:main-pxc5.7 =~ 8\.4 ]] + run_mysql_local 'SHOW TABLES;' '-h127.0.0.1 -P6032 -uproxyadmin -p'\''rRrSz.~D?kSV0FWUP'\''' some-name-proxysql-0 proxysql + local 'command=SHOW TABLES;' + local 'uri=-h127.0.0.1 -P6032 -uproxyadmin -p'\''rRrSz.~D?kSV0FWUP'\''' + local pod=some-name-proxysql-0 + local container_name=proxysql + set +o xtrace + '[' '!' -s /tmp/tmp.dqV03Eaoyh/select-2.sql ']' + [[ 0 -eq 0 ]] + diff -u /mnt/jenkins/workspace/cloud-pxc-operator_PR-2325/e2e-tests/users/compare/select-2.sql /tmp/tmp.dqV03Eaoyh/select-2.sql + for user in root xtrabackup monitor proxyadmin operator replication + echo 'Checking operator' Checking operator ++ getSecretData my-cluster-secrets operator ++ local secretName=my-cluster-secrets ++ local dataKey=operator ++ kubectl_bin get secrets/my-cluster-secrets '--template={{.data.operator}}' ++ base64 --decode +++ mktemp ++ local LAST_OUT=/tmp/tmp.W57RONPR32 +++ mktemp ++ local LAST_ERR=/tmp/tmp.7yIVOtBqfS ++ local exit_status=0 +++ seq 0 2 ++ for i in '$(seq 0 2)' ++ set +e ++ kubectl get secrets/my-cluster-secrets '--template={{.data.operator}}' ++ exit_status=0 ++ set -e ++ '[' 0 '!=' 0 ']' ++ break ++ cat /tmp/tmp.W57RONPR32 ++ cat /tmp/tmp.7yIVOtBqfS ++ rm /tmp/tmp.W57RONPR32 /tmp/tmp.7yIVOtBqfS ++ return 0 + secret_pass='L*64b#1V7m&9%M&}M!' ++ getSecretData internal-some-name operator ++ local secretName=internal-some-name ++ local dataKey=operator ++ kubectl_bin get secrets/internal-some-name '--template={{.data.operator}}' ++ base64 --decode +++ mktemp ++ local LAST_OUT=/tmp/tmp.YmF6pWB14H +++ mktemp ++ local LAST_ERR=/tmp/tmp.pEkHWQ0kO6 ++ local exit_status=0 +++ seq 0 2 ++ for i in '$(seq 0 2)' ++ set +e ++ kubectl get secrets/internal-some-name '--template={{.data.operator}}' ++ exit_status=0 ++ set -e ++ '[' 0 '!=' 0 ']' ++ break ++ cat /tmp/tmp.YmF6pWB14H ++ cat /tmp/tmp.pEkHWQ0kO6 ++ rm /tmp/tmp.YmF6pWB14H /tmp/tmp.pEkHWQ0kO6 ++ return 0 + int_secret_pass='L*64b#1V7m&9%M&}M!' + [[ -z L*64b#1V7m&9%M&}M! ]] + [[ L*64b#1V7m&9%M&}M! != \L\*\6\4\b\#\1\V\7\m\&\9\%\M\&\}\M\! ]] + [[ operator != \p\r\o\x\y\a\d\m\i\n ]] + [[ '' =~ operator ]] + [[ '' =~ operator ]] + echo 'Running compare for operator' Running compare for operator + compare_mysql_cmd select-4 'SHOW TABLES;' '-h some-name-proxysql -uoperator -p'\''L*64b#1V7m&9%M&}M!'\''' + local command_id=select-4 + local 'command=SHOW TABLES;' + local 'uri=-h some-name-proxysql -uoperator -p'\''L*64b#1V7m&9%M&}M!'\''' + local postfix= + local expected_result=/mnt/jenkins/workspace/cloud-pxc-operator_PR-2325/e2e-tests/users/compare/select-4.sql + [[ perconalab/percona-xtradb-cluster-operator:main-pxc5.7 =~ 8\.4 ]] + [[ perconalab/percona-xtradb-cluster-operator:main-pxc5.7 =~ 8\.0 ]] + [[ perconalab/percona-xtradb-cluster-operator:main-pxc5.7 =~ 5\.7 ]] + [[ -f /mnt/jenkins/workspace/cloud-pxc-operator_PR-2325/e2e-tests/users/compare/select-4-57.sql ]] + run_mysql 'SHOW TABLES;' '-h some-name-proxysql -uoperator -p'\''L*64b#1V7m&9%M&}M!'\''' + local 'command=SHOW TABLES;' + local 'uri=-h some-name-proxysql -uoperator -p'\''L*64b#1V7m&9%M&}M!'\''' ++ get_client_pod ++ kubectl_bin get pods --selector=name=pxc-client -o 'jsonpath={.items[].metadata.name}' +++ mktemp ++ local LAST_OUT=/tmp/tmp.xNpNGgKHDm +++ mktemp ++ local LAST_ERR=/tmp/tmp.RlxEIAKmKF ++ local exit_status=0 +++ seq 0 2 ++ for i in '$(seq 0 2)' ++ set +e ++ kubectl get pods --selector=name=pxc-client -o 'jsonpath={.items[].metadata.name}' ++ exit_status=0 ++ set -e ++ '[' 0 '!=' 0 ']' ++ break ++ cat /tmp/tmp.xNpNGgKHDm ++ cat /tmp/tmp.RlxEIAKmKF ++ rm /tmp/tmp.xNpNGgKHDm /tmp/tmp.RlxEIAKmKF ++ return 0 + client_pod=pxc-client-545786ccbb-gfmwf + wait_pod pxc-client-545786ccbb-gfmwf + local pod=pxc-client-545786ccbb-gfmwf + local max_retry=480 + local ns= ++ echo pxc-client-545786ccbb-gfmwf ++ /usr/bin/sed -E 's/.*-(pxc|proxysql)-[0-9]/\1/' ++ grep -E '^(pxc|proxysql)$' + local container= + set +o xtrace pod/pxc-client-545786ccbb-gfmwf condition met waiting for pod/pxc-client-545786ccbb-gfmwf to become ReadyDefaulted container "pxc-client" out of: pxc-client, backup .Ok + set +o xtrace + [[ ! -s /tmp/tmp.dqV03Eaoyh/select-4.sql ]] ++ grep 'Unknown MySQL server host' /tmp/tmp.dqV03Eaoyh/select-4.sql + [[ -n '' ]] + [[ 0 -eq 0 ]] + diff -u /mnt/jenkins/workspace/cloud-pxc-operator_PR-2325/e2e-tests/users/compare/select-4.sql /tmp/tmp.dqV03Eaoyh/select-4.sql + for user in root xtrabackup monitor proxyadmin operator replication + echo 'Checking replication' Checking replication ++ getSecretData my-cluster-secrets replication ++ local secretName=my-cluster-secrets ++ local dataKey=replication ++ kubectl_bin get secrets/my-cluster-secrets '--template={{.data.replication}}' ++ base64 --decode +++ mktemp ++ local LAST_OUT=/tmp/tmp.d1xAz86A4j +++ mktemp ++ local LAST_ERR=/tmp/tmp.AhnGmDJ0Ly ++ local exit_status=0 +++ seq 0 2 ++ for i in '$(seq 0 2)' ++ set +e ++ kubectl get secrets/my-cluster-secrets '--template={{.data.replication}}' ++ exit_status=0 ++ set -e ++ '[' 0 '!=' 0 ']' ++ break ++ cat /tmp/tmp.d1xAz86A4j ++ cat /tmp/tmp.AhnGmDJ0Ly ++ rm /tmp/tmp.d1xAz86A4j /tmp/tmp.AhnGmDJ0Ly ++ return 0 + secret_pass='?W1pm(?%7,Wnt@vH&' ++ getSecretData internal-some-name replication ++ local secretName=internal-some-name ++ local dataKey=replication ++ kubectl_bin get secrets/internal-some-name '--template={{.data.replication}}' ++ base64 --decode +++ mktemp ++ local LAST_OUT=/tmp/tmp.PKs7tjQIMh +++ mktemp ++ local LAST_ERR=/tmp/tmp.4zxr1tC5Dv ++ local exit_status=0 +++ seq 0 2 ++ for i in '$(seq 0 2)' ++ set +e ++ kubectl get secrets/internal-some-name '--template={{.data.replication}}' ++ exit_status=0 ++ set -e ++ '[' 0 '!=' 0 ']' ++ break ++ cat /tmp/tmp.PKs7tjQIMh ++ cat /tmp/tmp.4zxr1tC5Dv ++ rm /tmp/tmp.PKs7tjQIMh /tmp/tmp.4zxr1tC5Dv ++ return 0 + int_secret_pass='?W1pm(?%7,Wnt@vH&' + [[ -z ?W1pm(?%7,Wnt@vH& ]] + [[ ?W1pm(?%7,Wnt@vH& != \?\W\1\p\m\(\?\%\7\,\W\n\t\@\v\H\& ]] + [[ replication != \p\r\o\x\y\a\d\m\i\n ]] + [[ '' =~ replication ]] + [[ '' =~ replication ]] + echo 'Running compare for replication' Running compare for replication + compare_mysql_cmd select-4 'SHOW TABLES;' '-h some-name-proxysql -ureplication -p'\''?W1pm(?%7,Wnt@vH&'\''' + local command_id=select-4 + local 'command=SHOW TABLES;' + local 'uri=-h some-name-proxysql -ureplication -p'\''?W1pm(?%7,Wnt@vH&'\''' + local postfix= + local expected_result=/mnt/jenkins/workspace/cloud-pxc-operator_PR-2325/e2e-tests/users/compare/select-4.sql + [[ perconalab/percona-xtradb-cluster-operator:main-pxc5.7 =~ 8\.4 ]] + [[ perconalab/percona-xtradb-cluster-operator:main-pxc5.7 =~ 8\.0 ]] + [[ perconalab/percona-xtradb-cluster-operator:main-pxc5.7 =~ 5\.7 ]] + [[ -f /mnt/jenkins/workspace/cloud-pxc-operator_PR-2325/e2e-tests/users/compare/select-4-57.sql ]] + run_mysql 'SHOW TABLES;' '-h some-name-proxysql -ureplication -p'\''?W1pm(?%7,Wnt@vH&'\''' + local 'command=SHOW TABLES;' + local 'uri=-h some-name-proxysql -ureplication -p'\''?W1pm(?%7,Wnt@vH&'\''' ++ get_client_pod ++ kubectl_bin get pods --selector=name=pxc-client -o 'jsonpath={.items[].metadata.name}' +++ mktemp ++ local LAST_OUT=/tmp/tmp.QmfS0n0tUU +++ mktemp ++ local LAST_ERR=/tmp/tmp.c7RSI3xJ9y ++ local exit_status=0 +++ seq 0 2 ++ for i in '$(seq 0 2)' ++ set +e ++ kubectl get pods --selector=name=pxc-client -o 'jsonpath={.items[].metadata.name}' ++ exit_status=0 ++ set -e ++ '[' 0 '!=' 0 ']' ++ break ++ cat /tmp/tmp.QmfS0n0tUU ++ cat /tmp/tmp.c7RSI3xJ9y ++ rm /tmp/tmp.QmfS0n0tUU /tmp/tmp.c7RSI3xJ9y ++ return 0 + client_pod=pxc-client-545786ccbb-gfmwf + wait_pod pxc-client-545786ccbb-gfmwf + local pod=pxc-client-545786ccbb-gfmwf + local max_retry=480 + local ns= ++ echo pxc-client-545786ccbb-gfmwf ++ /usr/bin/sed -E 's/.*-(pxc|proxysql)-[0-9]/\1/' ++ grep -E '^(pxc|proxysql)$' + local container= + set +o xtrace pod/pxc-client-545786ccbb-gfmwf condition met waiting for pod/pxc-client-545786ccbb-gfmwf to become ReadyDefaulted container "pxc-client" out of: pxc-client, backup .Ok + set +o xtrace + [[ ! -s /tmp/tmp.dqV03Eaoyh/select-4.sql ]] ++ grep 'Unknown MySQL server host' /tmp/tmp.dqV03Eaoyh/select-4.sql + [[ -n '' ]] + [[ 0 -eq 0 ]] + diff -u /mnt/jenkins/workspace/cloud-pxc-operator_PR-2325/e2e-tests/users/compare/select-4.sql /tmp/tmp.dqV03Eaoyh/select-4.sql + [[ -n '' ]] + [[ -n '' ]] + desc 'test root' + set +o xtrace ----------------------------------------------------------------------------------- test root ----------------------------------------------------------------------------------- + patch_secret my-cluster-secrets root dGVzdC1wYXNzd29yZA== + local secret=my-cluster-secrets + local key=root + local value=dGVzdC1wYXNzd29yZA== + kubectl_bin patch secret my-cluster-secrets '-p={"data":{"root": "dGVzdC1wYXNzd29yZA=="}}' ++ mktemp + local LAST_OUT=/tmp/tmp.LRKiyP4lGb ++ mktemp + local LAST_ERR=/tmp/tmp.IaaWk36nzI + local exit_status=0 ++ seq 0 2 + for i in '$(seq 0 2)' + set +e + kubectl patch secret my-cluster-secrets '-p={"data":{"root": "dGVzdC1wYXNzd29yZA=="}}' + exit_status=0 + set -e + '[' 0 '!=' 0 ']' + break + cat /tmp/tmp.LRKiyP4lGb secret/my-cluster-secrets patched + cat /tmp/tmp.IaaWk36nzI + rm /tmp/tmp.LRKiyP4lGb /tmp/tmp.IaaWk36nzI + return 0 + sleep 15 + wait_cluster_consistency some-name 3 2 + local cluster_name=some-name + local cluster_size=3 + local proxy_size=2 + '[' -z 2 ']' + desc 'wait cluster consistency' + set +o xtrace ----------------------------------------------------------------------------------- wait cluster consistency ----------------------------------------------------------------------------------- + local i=0 + local max=300 + sleep 7 + echo -n 'waiting for pxc/some-name to be ready' waiting for pxc/some-name to be ready++ kubectl_bin get pxc some-name -o 'jsonpath={.status.state}' +++ mktemp ++ local LAST_OUT=/tmp/tmp.FENJ2LLuR4 +++ mktemp ++ local LAST_ERR=/tmp/tmp.GSALtifF0X ++ local exit_status=0 +++ seq 0 2 ++ for i in '$(seq 0 2)' ++ set +e ++ kubectl get pxc some-name -o 'jsonpath={.status.state}' ++ exit_status=0 ++ set -e ++ '[' 0 '!=' 0 ']' ++ break ++ cat /tmp/tmp.FENJ2LLuR4 ++ cat /tmp/tmp.GSALtifF0X ++ rm /tmp/tmp.FENJ2LLuR4 /tmp/tmp.GSALtifF0X ++ return 0 + [[ ready == \r\e\a\d\y ]] ++ kubectl_bin get pxc some-name -o 'jsonpath={.status.pxc.ready}' +++ mktemp ++ local LAST_OUT=/tmp/tmp.ef7D7LLlg1 +++ mktemp ++ local LAST_ERR=/tmp/tmp.CUo7VuCwJr ++ local exit_status=0 +++ seq 0 2 ++ for i in '$(seq 0 2)' ++ set +e ++ kubectl get pxc some-name -o 'jsonpath={.status.pxc.ready}' ++ exit_status=0 ++ set -e ++ '[' 0 '!=' 0 ']' ++ break ++ cat /tmp/tmp.ef7D7LLlg1 ++ cat /tmp/tmp.CUo7VuCwJr ++ rm /tmp/tmp.ef7D7LLlg1 /tmp/tmp.CUo7VuCwJr ++ return 0 + [[ 3 == \3 ]] +++ get_proxy_engine some-name +++ local cluster_name=some-name ++++ get_proxy some-name ++++ local target_cluster=some-name +++++ kubectl_bin get pxc some-name -o 'jsonpath={.spec.haproxy.enabled}' ++++++ mktemp +++++ local LAST_OUT=/tmp/tmp.HZwwGuo27I ++++++ mktemp +++++ local LAST_ERR=/tmp/tmp.23gDUP5nZc +++++ local exit_status=0 ++++++ seq 0 2 +++++ for i in '$(seq 0 2)' +++++ set +e +++++ kubectl get pxc some-name -o 'jsonpath={.spec.haproxy.enabled}' +++++ exit_status=0 +++++ set -e +++++ '[' 0 '!=' 0 ']' +++++ break +++++ cat /tmp/tmp.HZwwGuo27I +++++ cat /tmp/tmp.23gDUP5nZc +++++ rm /tmp/tmp.HZwwGuo27I /tmp/tmp.23gDUP5nZc +++++ return 0 ++++ [[ '' == \t\r\u\e ]] +++++ kubectl_bin get pxc some-name -o 'jsonpath={.spec.proxysql.enabled}' ++++++ mktemp +++++ local LAST_OUT=/tmp/tmp.crp47rnmJH ++++++ mktemp +++++ local LAST_ERR=/tmp/tmp.bSfz8NFICf +++++ local exit_status=0 ++++++ seq 0 2 +++++ for i in '$(seq 0 2)' +++++ set +e +++++ kubectl get pxc some-name -o 'jsonpath={.spec.proxysql.enabled}' +++++ exit_status=0 +++++ set -e +++++ '[' 0 '!=' 0 ']' +++++ break +++++ cat /tmp/tmp.crp47rnmJH +++++ cat /tmp/tmp.bSfz8NFICf +++++ rm /tmp/tmp.crp47rnmJH /tmp/tmp.bSfz8NFICf +++++ return 0 ++++ [[ true == \t\r\u\e ]] ++++ echo some-name-proxysql ++++ return +++ local cluster_proxy=some-name-proxysql +++ echo proxysql ++ kubectl_bin get pxc some-name -o 'jsonpath={.status.proxysql.ready}' +++ mktemp ++ local LAST_OUT=/tmp/tmp.420O7wHFmR +++ mktemp ++ local LAST_ERR=/tmp/tmp.Qgk99qj8hN ++ local exit_status=0 +++ seq 0 2 ++ for i in '$(seq 0 2)' ++ set +e ++ kubectl get pxc some-name -o 'jsonpath={.status.proxysql.ready}' ++ exit_status=0 ++ set -e ++ '[' 0 '!=' 0 ']' ++ break ++ cat /tmp/tmp.420O7wHFmR ++ cat /tmp/tmp.Qgk99qj8hN ++ rm /tmp/tmp.420O7wHFmR /tmp/tmp.Qgk99qj8hN ++ return 0 + [[ 2 == \2 ]] + echo + sleep 15 + compare_mysql_cmd select-4 'SHOW TABLES;' '-h some-name-proxysql -uroot -p'\''test-password'\''' + local command_id=select-4 + local 'command=SHOW TABLES;' + local 'uri=-h some-name-proxysql -uroot -p'\''test-password'\''' + local postfix= + local expected_result=/mnt/jenkins/workspace/cloud-pxc-operator_PR-2325/e2e-tests/users/compare/select-4.sql + [[ perconalab/percona-xtradb-cluster-operator:main-pxc5.7 =~ 8\.4 ]] + [[ perconalab/percona-xtradb-cluster-operator:main-pxc5.7 =~ 8\.0 ]] + [[ perconalab/percona-xtradb-cluster-operator:main-pxc5.7 =~ 5\.7 ]] + [[ -f /mnt/jenkins/workspace/cloud-pxc-operator_PR-2325/e2e-tests/users/compare/select-4-57.sql ]] + run_mysql 'SHOW TABLES;' '-h some-name-proxysql -uroot -p'\''test-password'\''' + local 'command=SHOW TABLES;' + local 'uri=-h some-name-proxysql -uroot -p'\''test-password'\''' ++ get_client_pod ++ kubectl_bin get pods --selector=name=pxc-client -o 'jsonpath={.items[].metadata.name}' +++ mktemp ++ local LAST_OUT=/tmp/tmp.GJyebqDFxN +++ mktemp ++ local LAST_ERR=/tmp/tmp.3gdylp0exd ++ local exit_status=0 +++ seq 0 2 ++ for i in '$(seq 0 2)' ++ set +e ++ kubectl get pods --selector=name=pxc-client -o 'jsonpath={.items[].metadata.name}' ++ exit_status=0 ++ set -e ++ '[' 0 '!=' 0 ']' ++ break ++ cat /tmp/tmp.GJyebqDFxN ++ cat /tmp/tmp.3gdylp0exd ++ rm /tmp/tmp.GJyebqDFxN /tmp/tmp.3gdylp0exd ++ return 0 + client_pod=pxc-client-545786ccbb-gfmwf + wait_pod pxc-client-545786ccbb-gfmwf + local pod=pxc-client-545786ccbb-gfmwf + local max_retry=480 + local ns= ++ echo pxc-client-545786ccbb-gfmwf ++ /usr/bin/sed -E 's/.*-(pxc|proxysql)-[0-9]/\1/' ++ grep -E '^(pxc|proxysql)$' + local container= + set +o xtrace pod/pxc-client-545786ccbb-gfmwf condition met waiting for pod/pxc-client-545786ccbb-gfmwf to become ReadyDefaulted container "pxc-client" out of: pxc-client, backup .Ok + set +o xtrace + [[ ! -s /tmp/tmp.dqV03Eaoyh/select-4.sql ]] ++ grep 'Unknown MySQL server host' /tmp/tmp.dqV03Eaoyh/select-4.sql + [[ -n '' ]] + [[ 0 -eq 0 ]] + diff -u /mnt/jenkins/workspace/cloud-pxc-operator_PR-2325/e2e-tests/users/compare/select-4.sql /tmp/tmp.dqV03Eaoyh/select-4.sql + desc 'test proxyadmin' + set +o xtrace ----------------------------------------------------------------------------------- test proxyadmin ----------------------------------------------------------------------------------- + kubectl_bin patch pxc some-name --type=merge '-p={"spec":{"proxysql":{"size":3}}}' ++ mktemp + local LAST_OUT=/tmp/tmp.RIfXcWX9uI ++ mktemp + local LAST_ERR=/tmp/tmp.oc1h0nAlmK + local exit_status=0 ++ seq 0 2 + for i in '$(seq 0 2)' + set +e + kubectl patch pxc some-name --type=merge '-p={"spec":{"proxysql":{"size":3}}}' + exit_status=0 + set -e + '[' 0 '!=' 0 ']' + break + cat /tmp/tmp.RIfXcWX9uI perconaxtradbcluster.pxc.percona.com/some-name patched + cat /tmp/tmp.oc1h0nAlmK + rm /tmp/tmp.RIfXcWX9uI /tmp/tmp.oc1h0nAlmK + return 0 + sleep 15 + wait_cluster_consistency some-name 3 3 + local cluster_name=some-name + local cluster_size=3 + local proxy_size=3 + '[' -z 3 ']' + desc 'wait cluster consistency' + set +o xtrace ----------------------------------------------------------------------------------- wait cluster consistency ----------------------------------------------------------------------------------- + local i=0 + local max=300 + sleep 7 + echo -n 'waiting for pxc/some-name to be ready' waiting for pxc/some-name to be ready++ kubectl_bin get pxc some-name -o 'jsonpath={.status.state}' +++ mktemp ++ local LAST_OUT=/tmp/tmp.9ddkc7yZCX +++ mktemp ++ local LAST_ERR=/tmp/tmp.d3o31ZYAlo ++ local exit_status=0 +++ seq 0 2 ++ for i in '$(seq 0 2)' ++ set +e ++ kubectl get pxc some-name -o 'jsonpath={.status.state}' ++ exit_status=0 ++ set -e ++ '[' 0 '!=' 0 ']' ++ break ++ cat /tmp/tmp.9ddkc7yZCX ++ cat /tmp/tmp.d3o31ZYAlo ++ rm /tmp/tmp.9ddkc7yZCX /tmp/tmp.d3o31ZYAlo ++ return 0 + [[ ready == \r\e\a\d\y ]] ++ kubectl_bin get pxc some-name -o 'jsonpath={.status.pxc.ready}' +++ mktemp ++ local LAST_OUT=/tmp/tmp.NSgpXOaXny +++ mktemp ++ local LAST_ERR=/tmp/tmp.4O0a4Sg7ii ++ local exit_status=0 +++ seq 0 2 ++ for i in '$(seq 0 2)' ++ set +e ++ kubectl get pxc some-name -o 'jsonpath={.status.pxc.ready}' ++ exit_status=0 ++ set -e ++ '[' 0 '!=' 0 ']' ++ break ++ cat /tmp/tmp.NSgpXOaXny ++ cat /tmp/tmp.4O0a4Sg7ii ++ rm /tmp/tmp.NSgpXOaXny /tmp/tmp.4O0a4Sg7ii ++ return 0 + [[ 3 == \3 ]] +++ get_proxy_engine some-name +++ local cluster_name=some-name ++++ get_proxy some-name ++++ local target_cluster=some-name +++++ kubectl_bin get pxc some-name -o 'jsonpath={.spec.haproxy.enabled}' ++++++ mktemp +++++ local LAST_OUT=/tmp/tmp.y1KtIx7Xx0 ++++++ mktemp +++++ local LAST_ERR=/tmp/tmp.h0F6p0SYAN +++++ local exit_status=0 ++++++ seq 0 2 +++++ for i in '$(seq 0 2)' +++++ set +e +++++ kubectl get pxc some-name -o 'jsonpath={.spec.haproxy.enabled}' +++++ exit_status=0 +++++ set -e +++++ '[' 0 '!=' 0 ']' +++++ break +++++ cat /tmp/tmp.y1KtIx7Xx0 +++++ cat /tmp/tmp.h0F6p0SYAN +++++ rm /tmp/tmp.y1KtIx7Xx0 /tmp/tmp.h0F6p0SYAN +++++ return 0 ++++ [[ '' == \t\r\u\e ]] +++++ kubectl_bin get pxc some-name -o 'jsonpath={.spec.proxysql.enabled}' ++++++ mktemp +++++ local LAST_OUT=/tmp/tmp.UXkECqPv0m ++++++ mktemp +++++ local LAST_ERR=/tmp/tmp.cH8a0sh9YR +++++ local exit_status=0 ++++++ seq 0 2 +++++ for i in '$(seq 0 2)' +++++ set +e +++++ kubectl get pxc some-name -o 'jsonpath={.spec.proxysql.enabled}' +++++ exit_status=0 +++++ set -e +++++ '[' 0 '!=' 0 ']' +++++ break +++++ cat /tmp/tmp.UXkECqPv0m +++++ cat /tmp/tmp.cH8a0sh9YR +++++ rm /tmp/tmp.UXkECqPv0m /tmp/tmp.cH8a0sh9YR +++++ return 0 ++++ [[ true == \t\r\u\e ]] ++++ echo some-name-proxysql ++++ return +++ local cluster_proxy=some-name-proxysql +++ echo proxysql ++ kubectl_bin get pxc some-name -o 'jsonpath={.status.proxysql.ready}' +++ mktemp ++ local LAST_OUT=/tmp/tmp.T0vmv3uQrl +++ mktemp ++ local LAST_ERR=/tmp/tmp.iS6PxWHsPH ++ local exit_status=0 +++ seq 0 2 ++ for i in '$(seq 0 2)' ++ set +e ++ kubectl get pxc some-name -o 'jsonpath={.status.proxysql.ready}' ++ exit_status=0 ++ set -e ++ '[' 0 '!=' 0 ']' ++ break ++ cat /tmp/tmp.T0vmv3uQrl ++ cat /tmp/tmp.iS6PxWHsPH ++ rm /tmp/tmp.T0vmv3uQrl /tmp/tmp.iS6PxWHsPH ++ return 0 + [[ 3 == \3 ]] + echo + patch_secret my-cluster-secrets proxyadmin dGVzdC1wYXNzd29yZA== + local secret=my-cluster-secrets + local key=proxyadmin + local value=dGVzdC1wYXNzd29yZA== + kubectl_bin patch secret my-cluster-secrets '-p={"data":{"proxyadmin": "dGVzdC1wYXNzd29yZA=="}}' ++ mktemp + local LAST_OUT=/tmp/tmp.bgWp22ludk ++ mktemp + local LAST_ERR=/tmp/tmp.L0wJAwsV2A + local exit_status=0 ++ seq 0 2 + for i in '$(seq 0 2)' + set +e + kubectl patch secret my-cluster-secrets '-p={"data":{"proxyadmin": "dGVzdC1wYXNzd29yZA=="}}' + exit_status=0 + set -e + '[' 0 '!=' 0 ']' + break + cat /tmp/tmp.bgWp22ludk secret/my-cluster-secrets patched + cat /tmp/tmp.L0wJAwsV2A + rm /tmp/tmp.bgWp22ludk /tmp/tmp.L0wJAwsV2A + return 0 + sleep 15 + wait_cluster_consistency some-name 3 3 + local cluster_name=some-name + local cluster_size=3 + local proxy_size=3 + '[' -z 3 ']' + desc 'wait cluster consistency' + set +o xtrace ----------------------------------------------------------------------------------- wait cluster consistency ----------------------------------------------------------------------------------- + local i=0 + local max=300 + sleep 7 + echo -n 'waiting for pxc/some-name to be ready' waiting for pxc/some-name to be ready++ kubectl_bin get pxc some-name -o 'jsonpath={.status.state}' +++ mktemp ++ local LAST_OUT=/tmp/tmp.U7MJu74ofM +++ mktemp ++ local LAST_ERR=/tmp/tmp.3plIHsB710 ++ local exit_status=0 +++ seq 0 2 ++ for i in '$(seq 0 2)' ++ set +e ++ kubectl get pxc some-name -o 'jsonpath={.status.state}' ++ exit_status=0 ++ set -e ++ '[' 0 '!=' 0 ']' ++ break ++ cat /tmp/tmp.U7MJu74ofM ++ cat /tmp/tmp.3plIHsB710 ++ rm /tmp/tmp.U7MJu74ofM /tmp/tmp.3plIHsB710 ++ return 0 + [[ initializing == \r\e\a\d\y ]] + echo -n . .+ sleep 5 + [[ 0 -ge 300 ]] + let i+=1 ++ kubectl_bin get pxc some-name -o 'jsonpath={.status.state}' +++ mktemp ++ local LAST_OUT=/tmp/tmp.LM8S5GMt1s +++ mktemp ++ local LAST_ERR=/tmp/tmp.gKiL7sYbKj ++ local exit_status=0 +++ seq 0 2 ++ for i in '$(seq 0 2)' ++ set +e ++ kubectl get pxc some-name -o 'jsonpath={.status.state}' ++ exit_status=0 ++ set -e ++ '[' 0 '!=' 0 ']' ++ break ++ cat /tmp/tmp.LM8S5GMt1s ++ cat /tmp/tmp.gKiL7sYbKj ++ rm /tmp/tmp.LM8S5GMt1s /tmp/tmp.gKiL7sYbKj ++ return 0 + [[ initializing == \r\e\a\d\y ]] + echo -n . .+ sleep 5 + [[ 1 -ge 300 ]] + let i+=1 ++ kubectl_bin get pxc some-name -o 'jsonpath={.status.state}' +++ mktemp ++ local LAST_OUT=/tmp/tmp.gbmNxvLzNb +++ mktemp ++ local LAST_ERR=/tmp/tmp.EnaIvpC8es ++ local exit_status=0 +++ seq 0 2 ++ for i in '$(seq 0 2)' ++ set +e ++ kubectl get pxc some-name -o 'jsonpath={.status.state}' ++ exit_status=0 ++ set -e ++ '[' 0 '!=' 0 ']' ++ break ++ cat /tmp/tmp.gbmNxvLzNb ++ cat /tmp/tmp.EnaIvpC8es ++ rm /tmp/tmp.gbmNxvLzNb /tmp/tmp.EnaIvpC8es ++ return 0 + [[ initializing == \r\e\a\d\y ]] + echo -n . .+ sleep 5 + [[ 2 -ge 300 ]] + let i+=1 ++ kubectl_bin get pxc some-name -o 'jsonpath={.status.state}' +++ mktemp ++ local LAST_OUT=/tmp/tmp.njJVf7jgOH +++ mktemp ++ local LAST_ERR=/tmp/tmp.AZtPkWbrwc ++ local exit_status=0 +++ seq 0 2 ++ for i in '$(seq 0 2)' ++ set +e ++ kubectl get pxc some-name -o 'jsonpath={.status.state}' ++ exit_status=0 ++ set -e ++ '[' 0 '!=' 0 ']' ++ break ++ cat /tmp/tmp.njJVf7jgOH ++ cat /tmp/tmp.AZtPkWbrwc ++ rm /tmp/tmp.njJVf7jgOH /tmp/tmp.AZtPkWbrwc ++ return 0 + [[ initializing == \r\e\a\d\y ]] + echo -n . .+ sleep 5 + [[ 3 -ge 300 ]] + let i+=1 ++ kubectl_bin get pxc some-name -o 'jsonpath={.status.state}' +++ mktemp ++ local LAST_OUT=/tmp/tmp.7UfKcoEJBK +++ mktemp ++ local LAST_ERR=/tmp/tmp.FcgrlBzLSD ++ local exit_status=0 +++ seq 0 2 ++ for i in '$(seq 0 2)' ++ set +e ++ kubectl get pxc some-name -o 'jsonpath={.status.state}' ++ exit_status=0 ++ set -e ++ '[' 0 '!=' 0 ']' ++ break ++ cat /tmp/tmp.7UfKcoEJBK ++ cat /tmp/tmp.FcgrlBzLSD ++ rm /tmp/tmp.7UfKcoEJBK /tmp/tmp.FcgrlBzLSD ++ return 0 + [[ ready == \r\e\a\d\y ]] ++ kubectl_bin get pxc some-name -o 'jsonpath={.status.pxc.ready}' +++ mktemp ++ local LAST_OUT=/tmp/tmp.gblVUor6QO +++ mktemp ++ local LAST_ERR=/tmp/tmp.9dNFucpRhD ++ local exit_status=0 +++ seq 0 2 ++ for i in '$(seq 0 2)' ++ set +e ++ kubectl get pxc some-name -o 'jsonpath={.status.pxc.ready}' ++ exit_status=0 ++ set -e ++ '[' 0 '!=' 0 ']' ++ break ++ cat /tmp/tmp.gblVUor6QO ++ cat /tmp/tmp.9dNFucpRhD ++ rm /tmp/tmp.gblVUor6QO /tmp/tmp.9dNFucpRhD ++ return 0 + [[ 3 == \3 ]] +++ get_proxy_engine some-name +++ local cluster_name=some-name ++++ get_proxy some-name ++++ local target_cluster=some-name +++++ kubectl_bin get pxc some-name -o 'jsonpath={.spec.haproxy.enabled}' ++++++ mktemp +++++ local LAST_OUT=/tmp/tmp.nB7QHUQjzo ++++++ mktemp +++++ local LAST_ERR=/tmp/tmp.7EtDbJykYu +++++ local exit_status=0 ++++++ seq 0 2 +++++ for i in '$(seq 0 2)' +++++ set +e +++++ kubectl get pxc some-name -o 'jsonpath={.spec.haproxy.enabled}' +++++ exit_status=0 +++++ set -e +++++ '[' 0 '!=' 0 ']' +++++ break +++++ cat /tmp/tmp.nB7QHUQjzo +++++ cat /tmp/tmp.7EtDbJykYu +++++ rm /tmp/tmp.nB7QHUQjzo /tmp/tmp.7EtDbJykYu +++++ return 0 ++++ [[ '' == \t\r\u\e ]] +++++ kubectl_bin get pxc some-name -o 'jsonpath={.spec.proxysql.enabled}' ++++++ mktemp +++++ local LAST_OUT=/tmp/tmp.bYDmOjtaQN ++++++ mktemp +++++ local LAST_ERR=/tmp/tmp.LueEEzKHdm +++++ local exit_status=0 ++++++ seq 0 2 +++++ for i in '$(seq 0 2)' +++++ set +e +++++ kubectl get pxc some-name -o 'jsonpath={.spec.proxysql.enabled}' +++++ exit_status=0 +++++ set -e +++++ '[' 0 '!=' 0 ']' +++++ break +++++ cat /tmp/tmp.bYDmOjtaQN +++++ cat /tmp/tmp.LueEEzKHdm +++++ rm /tmp/tmp.bYDmOjtaQN /tmp/tmp.LueEEzKHdm +++++ return 0 ++++ [[ true == \t\r\u\e ]] ++++ echo some-name-proxysql ++++ return +++ local cluster_proxy=some-name-proxysql +++ echo proxysql ++ kubectl_bin get pxc some-name -o 'jsonpath={.status.proxysql.ready}' +++ mktemp ++ local LAST_OUT=/tmp/tmp.1iVRuG6OjB +++ mktemp ++ local LAST_ERR=/tmp/tmp.4QnLMVCNGI ++ local exit_status=0 +++ seq 0 2 ++ for i in '$(seq 0 2)' ++ set +e ++ kubectl get pxc some-name -o 'jsonpath={.status.proxysql.ready}' ++ exit_status=0 ++ set -e ++ '[' 0 '!=' 0 ']' ++ break ++ cat /tmp/tmp.1iVRuG6OjB ++ cat /tmp/tmp.4QnLMVCNGI ++ rm /tmp/tmp.1iVRuG6OjB /tmp/tmp.4QnLMVCNGI ++ return 0 + [[ 3 == \3 ]] + echo + sleep 15 + compare_mysql_cmd_local select-2 'SHOW TABLES;' '-h127.0.0.1 -P6032 -uproxyadmin -p'\''test-password'\''' some-name-proxysql-0 '' proxysql + local command_id=select-2 + local 'command=SHOW TABLES;' + local 'uri=-h127.0.0.1 -P6032 -uproxyadmin -p'\''test-password'\''' + local pod=some-name-proxysql-0 + local postfix= + local container_name=proxysql + local expected_result=/mnt/jenkins/workspace/cloud-pxc-operator_PR-2325/e2e-tests/users/compare/select-2.sql + [[ perconalab/percona-xtradb-cluster-operator:main-pxc5.7 =~ 8\.0 ]] + [[ perconalab/percona-xtradb-cluster-operator:main-pxc5.7 =~ 8\.4 ]] + run_mysql_local 'SHOW TABLES;' '-h127.0.0.1 -P6032 -uproxyadmin -p'\''test-password'\''' some-name-proxysql-0 proxysql + local 'command=SHOW TABLES;' + local 'uri=-h127.0.0.1 -P6032 -uproxyadmin -p'\''test-password'\''' + local pod=some-name-proxysql-0 + local container_name=proxysql + set +o xtrace + '[' '!' -s /tmp/tmp.dqV03Eaoyh/select-2.sql ']' + [[ 0 -eq 0 ]] + diff -u /mnt/jenkins/workspace/cloud-pxc-operator_PR-2325/e2e-tests/users/compare/select-2.sql /tmp/tmp.dqV03Eaoyh/select-2.sql + compare_mysql_cmd_local select-2 'SHOW TABLES;' '-h127.0.0.1 -P6032 -uproxyadmin -p'\''test-password'\''' some-name-proxysql-1 '' proxysql + local command_id=select-2 + local 'command=SHOW TABLES;' + local 'uri=-h127.0.0.1 -P6032 -uproxyadmin -p'\''test-password'\''' + local pod=some-name-proxysql-1 + local postfix= + local container_name=proxysql + local expected_result=/mnt/jenkins/workspace/cloud-pxc-operator_PR-2325/e2e-tests/users/compare/select-2.sql + [[ perconalab/percona-xtradb-cluster-operator:main-pxc5.7 =~ 8\.0 ]] + [[ perconalab/percona-xtradb-cluster-operator:main-pxc5.7 =~ 8\.4 ]] + run_mysql_local 'SHOW TABLES;' '-h127.0.0.1 -P6032 -uproxyadmin -p'\''test-password'\''' some-name-proxysql-1 proxysql + local 'command=SHOW TABLES;' + local 'uri=-h127.0.0.1 -P6032 -uproxyadmin -p'\''test-password'\''' + local pod=some-name-proxysql-1 + local container_name=proxysql + set +o xtrace + '[' '!' -s /tmp/tmp.dqV03Eaoyh/select-2.sql ']' + [[ 0 -eq 0 ]] + diff -u /mnt/jenkins/workspace/cloud-pxc-operator_PR-2325/e2e-tests/users/compare/select-2.sql /tmp/tmp.dqV03Eaoyh/select-2.sql + compare_mysql_cmd_local select-2 'SHOW TABLES;' '-h127.0.0.1 -P6032 -uproxyadmin -p'\''test-password'\''' some-name-proxysql-2 '' proxysql + local command_id=select-2 + local 'command=SHOW TABLES;' + local 'uri=-h127.0.0.1 -P6032 -uproxyadmin -p'\''test-password'\''' + local pod=some-name-proxysql-2 + local postfix= + local container_name=proxysql + local expected_result=/mnt/jenkins/workspace/cloud-pxc-operator_PR-2325/e2e-tests/users/compare/select-2.sql + [[ perconalab/percona-xtradb-cluster-operator:main-pxc5.7 =~ 8\.0 ]] + [[ perconalab/percona-xtradb-cluster-operator:main-pxc5.7 =~ 8\.4 ]] + run_mysql_local 'SHOW TABLES;' '-h127.0.0.1 -P6032 -uproxyadmin -p'\''test-password'\''' some-name-proxysql-2 proxysql + local 'command=SHOW TABLES;' + local 'uri=-h127.0.0.1 -P6032 -uproxyadmin -p'\''test-password'\''' + local pod=some-name-proxysql-2 + local container_name=proxysql + set +o xtrace + '[' '!' -s /tmp/tmp.dqV03Eaoyh/select-2.sql ']' + [[ 0 -eq 0 ]] + diff -u /mnt/jenkins/workspace/cloud-pxc-operator_PR-2325/e2e-tests/users/compare/select-2.sql /tmp/tmp.dqV03Eaoyh/select-2.sql + desc 'test xtrabackup' + set +o xtrace ----------------------------------------------------------------------------------- test xtrabackup ----------------------------------------------------------------------------------- + kubectl_bin patch pxc some-name --type=merge '-p={"spec":{"proxysql":{"size":2}}}' ++ mktemp + local LAST_OUT=/tmp/tmp.R6YR6JaxFt ++ mktemp + local LAST_ERR=/tmp/tmp.mdel0XWPPE + local exit_status=0 ++ seq 0 2 + for i in '$(seq 0 2)' + set +e + kubectl patch pxc some-name --type=merge '-p={"spec":{"proxysql":{"size":2}}}' + exit_status=0 + set -e + '[' 0 '!=' 0 ']' + break + cat /tmp/tmp.R6YR6JaxFt perconaxtradbcluster.pxc.percona.com/some-name patched + cat /tmp/tmp.mdel0XWPPE + rm /tmp/tmp.R6YR6JaxFt /tmp/tmp.mdel0XWPPE + return 0 + patch_secret my-cluster-secrets xtrabackup dGVzdC1wYXNzd29yZA== + local secret=my-cluster-secrets + local key=xtrabackup + local value=dGVzdC1wYXNzd29yZA== + kubectl_bin patch secret my-cluster-secrets '-p={"data":{"xtrabackup": "dGVzdC1wYXNzd29yZA=="}}' ++ mktemp + local LAST_OUT=/tmp/tmp.Rz182lWvGa ++ mktemp + local LAST_ERR=/tmp/tmp.RPgffmium5 + local exit_status=0 ++ seq 0 2 + for i in '$(seq 0 2)' + set +e + kubectl patch secret my-cluster-secrets '-p={"data":{"xtrabackup": "dGVzdC1wYXNzd29yZA=="}}' + exit_status=0 + set -e + '[' 0 '!=' 0 ']' + break + cat /tmp/tmp.Rz182lWvGa secret/my-cluster-secrets patched + cat /tmp/tmp.RPgffmium5 + rm /tmp/tmp.Rz182lWvGa /tmp/tmp.RPgffmium5 + return 0 + sleep 15 + wait_cluster_consistency some-name 3 2 + local cluster_name=some-name + local cluster_size=3 + local proxy_size=2 + '[' -z 2 ']' + desc 'wait cluster consistency' + set +o xtrace ----------------------------------------------------------------------------------- wait cluster consistency ----------------------------------------------------------------------------------- + local i=0 + local max=300 + sleep 7 + echo -n 'waiting for pxc/some-name to be ready' waiting for pxc/some-name to be ready++ kubectl_bin get pxc some-name -o 'jsonpath={.status.state}' +++ mktemp ++ local LAST_OUT=/tmp/tmp.0lqjuRR5OU +++ mktemp ++ local LAST_ERR=/tmp/tmp.z70cQ3X5cc ++ local exit_status=0 +++ seq 0 2 ++ for i in '$(seq 0 2)' ++ set +e ++ kubectl get pxc some-name -o 'jsonpath={.status.state}' ++ exit_status=0 ++ set -e ++ '[' 0 '!=' 0 ']' ++ break ++ cat /tmp/tmp.0lqjuRR5OU ++ cat /tmp/tmp.z70cQ3X5cc ++ rm /tmp/tmp.0lqjuRR5OU /tmp/tmp.z70cQ3X5cc ++ return 0 + [[ initializing == \r\e\a\d\y ]] + echo -n . .+ sleep 5 + [[ 0 -ge 300 ]] + let i+=1 ++ kubectl_bin get pxc some-name -o 'jsonpath={.status.state}' +++ mktemp ++ local LAST_OUT=/tmp/tmp.nTorj3DLYa +++ mktemp ++ local LAST_ERR=/tmp/tmp.huBZItpVwo ++ local exit_status=0 +++ seq 0 2 ++ for i in '$(seq 0 2)' ++ set +e ++ kubectl get pxc some-name -o 'jsonpath={.status.state}' ++ exit_status=0 ++ set -e ++ '[' 0 '!=' 0 ']' ++ break ++ cat /tmp/tmp.nTorj3DLYa ++ cat /tmp/tmp.huBZItpVwo ++ rm /tmp/tmp.nTorj3DLYa /tmp/tmp.huBZItpVwo ++ return 0 + [[ initializing == \r\e\a\d\y ]] + echo -n . .+ sleep 5 + [[ 1 -ge 300 ]] + let i+=1 ++ kubectl_bin get pxc some-name -o 'jsonpath={.status.state}' +++ mktemp ++ local LAST_OUT=/tmp/tmp.OY4sfuh5bO +++ mktemp ++ local LAST_ERR=/tmp/tmp.ZNi7QOqbrF ++ local exit_status=0 +++ seq 0 2 ++ for i in '$(seq 0 2)' ++ set +e ++ kubectl get pxc some-name -o 'jsonpath={.status.state}' ++ exit_status=0 ++ set -e ++ '[' 0 '!=' 0 ']' ++ break ++ cat /tmp/tmp.OY4sfuh5bO ++ cat /tmp/tmp.ZNi7QOqbrF ++ rm /tmp/tmp.OY4sfuh5bO /tmp/tmp.ZNi7QOqbrF ++ return 0 + [[ initializing == \r\e\a\d\y ]] + echo -n . .+ sleep 5 + [[ 2 -ge 300 ]] + let i+=1 ++ kubectl_bin get pxc some-name -o 'jsonpath={.status.state}' +++ mktemp ++ local LAST_OUT=/tmp/tmp.10IbkephUa +++ mktemp ++ local LAST_ERR=/tmp/tmp.S0QGBXwzUM ++ local exit_status=0 +++ seq 0 2 ++ for i in '$(seq 0 2)' ++ set +e ++ kubectl get pxc some-name -o 'jsonpath={.status.state}' ++ exit_status=0 ++ set -e ++ '[' 0 '!=' 0 ']' ++ break ++ cat /tmp/tmp.10IbkephUa ++ cat /tmp/tmp.S0QGBXwzUM ++ rm /tmp/tmp.10IbkephUa /tmp/tmp.S0QGBXwzUM ++ return 0 + [[ initializing == \r\e\a\d\y ]] + echo -n . .+ sleep 5 + [[ 3 -ge 300 ]] + let i+=1 ++ kubectl_bin get pxc some-name -o 'jsonpath={.status.state}' +++ mktemp ++ local LAST_OUT=/tmp/tmp.Aq7zgDOjPM +++ mktemp ++ local LAST_ERR=/tmp/tmp.ux4xPpSkWL ++ local exit_status=0 +++ seq 0 2 ++ for i in '$(seq 0 2)' ++ set +e ++ kubectl get pxc some-name -o 'jsonpath={.status.state}' ++ exit_status=0 ++ set -e ++ '[' 0 '!=' 0 ']' ++ break ++ cat /tmp/tmp.Aq7zgDOjPM ++ cat /tmp/tmp.ux4xPpSkWL ++ rm /tmp/tmp.Aq7zgDOjPM /tmp/tmp.ux4xPpSkWL ++ return 0 + [[ initializing == \r\e\a\d\y ]] + echo -n . .+ sleep 5 + [[ 4 -ge 300 ]] + let i+=1 ++ kubectl_bin get pxc some-name -o 'jsonpath={.status.state}' +++ mktemp ++ local LAST_OUT=/tmp/tmp.hgt7w5kjyh +++ mktemp ++ local LAST_ERR=/tmp/tmp.KbtG7BTcoA ++ local exit_status=0 +++ seq 0 2 ++ for i in '$(seq 0 2)' ++ set +e ++ kubectl get pxc some-name -o 'jsonpath={.status.state}' ++ exit_status=0 ++ set -e ++ '[' 0 '!=' 0 ']' ++ break ++ cat /tmp/tmp.hgt7w5kjyh ++ cat /tmp/tmp.KbtG7BTcoA ++ rm /tmp/tmp.hgt7w5kjyh /tmp/tmp.KbtG7BTcoA ++ return 0 + [[ initializing == \r\e\a\d\y ]] + echo -n . .+ sleep 5 + [[ 5 -ge 300 ]] + let i+=1 ++ kubectl_bin get pxc some-name -o 'jsonpath={.status.state}' +++ mktemp ++ local LAST_OUT=/tmp/tmp.qrjL3PNbEH +++ mktemp ++ local LAST_ERR=/tmp/tmp.kLs9RR7Z11 ++ local exit_status=0 +++ seq 0 2 ++ for i in '$(seq 0 2)' ++ set +e ++ kubectl get pxc some-name -o 'jsonpath={.status.state}' ++ exit_status=0 ++ set -e ++ '[' 0 '!=' 0 ']' ++ break ++ cat /tmp/tmp.qrjL3PNbEH ++ cat /tmp/tmp.kLs9RR7Z11 ++ rm /tmp/tmp.qrjL3PNbEH /tmp/tmp.kLs9RR7Z11 ++ return 0 + [[ initializing == \r\e\a\d\y ]] + echo -n . .+ sleep 5 + [[ 6 -ge 300 ]] + let i+=1 ++ kubectl_bin get pxc some-name -o 'jsonpath={.status.state}' +++ mktemp ++ local LAST_OUT=/tmp/tmp.hVIWLZo5aT +++ mktemp ++ local LAST_ERR=/tmp/tmp.YSkeewOhOd ++ local exit_status=0 +++ seq 0 2 ++ for i in '$(seq 0 2)' ++ set +e ++ kubectl get pxc some-name -o 'jsonpath={.status.state}' ++ exit_status=0 ++ set -e ++ '[' 0 '!=' 0 ']' ++ break ++ cat /tmp/tmp.hVIWLZo5aT ++ cat /tmp/tmp.YSkeewOhOd ++ rm /tmp/tmp.hVIWLZo5aT /tmp/tmp.YSkeewOhOd ++ return 0 + [[ initializing == \r\e\a\d\y ]] + echo -n . .+ sleep 5 + [[ 7 -ge 300 ]] + let i+=1 ++ kubectl_bin get pxc some-name -o 'jsonpath={.status.state}' +++ mktemp ++ local LAST_OUT=/tmp/tmp.DMchzK2JMz +++ mktemp ++ local LAST_ERR=/tmp/tmp.oTavvDbyJH ++ local exit_status=0 +++ seq 0 2 ++ for i in '$(seq 0 2)' ++ set +e ++ kubectl get pxc some-name -o 'jsonpath={.status.state}' ++ exit_status=0 ++ set -e ++ '[' 0 '!=' 0 ']' ++ break ++ cat /tmp/tmp.DMchzK2JMz ++ cat /tmp/tmp.oTavvDbyJH ++ rm /tmp/tmp.DMchzK2JMz /tmp/tmp.oTavvDbyJH ++ return 0 + [[ initializing == \r\e\a\d\y ]] + echo -n . .+ sleep 5 + [[ 8 -ge 300 ]] + let i+=1 ++ kubectl_bin get pxc some-name -o 'jsonpath={.status.state}' +++ mktemp ++ local LAST_OUT=/tmp/tmp.QnXb9scpt9 +++ mktemp ++ local LAST_ERR=/tmp/tmp.K8HPZiGtcc ++ local exit_status=0 +++ seq 0 2 ++ for i in '$(seq 0 2)' ++ set +e ++ kubectl get pxc some-name -o 'jsonpath={.status.state}' ++ exit_status=0 ++ set -e ++ '[' 0 '!=' 0 ']' ++ break ++ cat /tmp/tmp.QnXb9scpt9 ++ cat /tmp/tmp.K8HPZiGtcc ++ rm /tmp/tmp.QnXb9scpt9 /tmp/tmp.K8HPZiGtcc ++ return 0 + [[ initializing == \r\e\a\d\y ]] + echo -n . .+ sleep 5 + [[ 9 -ge 300 ]] + let i+=1 ++ kubectl_bin get pxc some-name -o 'jsonpath={.status.state}' +++ mktemp ++ local LAST_OUT=/tmp/tmp.RYLC4eHBMO +++ mktemp ++ local LAST_ERR=/tmp/tmp.5ruOZPSTNu ++ local exit_status=0 +++ seq 0 2 ++ for i in '$(seq 0 2)' ++ set +e ++ kubectl get pxc some-name -o 'jsonpath={.status.state}' ++ exit_status=0 ++ set -e ++ '[' 0 '!=' 0 ']' ++ break ++ cat /tmp/tmp.RYLC4eHBMO ++ cat /tmp/tmp.5ruOZPSTNu ++ rm /tmp/tmp.RYLC4eHBMO /tmp/tmp.5ruOZPSTNu ++ return 0 + [[ initializing == \r\e\a\d\y ]] + echo -n . .+ sleep 5 + [[ 10 -ge 300 ]] + let i+=1 ++ kubectl_bin get pxc some-name -o 'jsonpath={.status.state}' +++ mktemp ++ local LAST_OUT=/tmp/tmp.ZVdlqHSdtX +++ mktemp ++ local LAST_ERR=/tmp/tmp.Qaddx7rn4s ++ local exit_status=0 +++ seq 0 2 ++ for i in '$(seq 0 2)' ++ set +e ++ kubectl get pxc some-name -o 'jsonpath={.status.state}' ++ exit_status=0 ++ set -e ++ '[' 0 '!=' 0 ']' ++ break ++ cat /tmp/tmp.ZVdlqHSdtX ++ cat /tmp/tmp.Qaddx7rn4s ++ rm /tmp/tmp.ZVdlqHSdtX /tmp/tmp.Qaddx7rn4s ++ return 0 + [[ initializing == \r\e\a\d\y ]] + echo -n . .+ sleep 5 + [[ 11 -ge 300 ]] + let i+=1 ++ kubectl_bin get pxc some-name -o 'jsonpath={.status.state}' +++ mktemp ++ local LAST_OUT=/tmp/tmp.r99QErRbOu +++ mktemp ++ local LAST_ERR=/tmp/tmp.zPEgjirEmr ++ local exit_status=0 +++ seq 0 2 ++ for i in '$(seq 0 2)' ++ set +e ++ kubectl get pxc some-name -o 'jsonpath={.status.state}' ++ exit_status=0 ++ set -e ++ '[' 0 '!=' 0 ']' ++ break ++ cat /tmp/tmp.r99QErRbOu ++ cat /tmp/tmp.zPEgjirEmr ++ rm /tmp/tmp.r99QErRbOu /tmp/tmp.zPEgjirEmr ++ return 0 + [[ initializing == \r\e\a\d\y ]] + echo -n . .+ sleep 5 + [[ 12 -ge 300 ]] + let i+=1 ++ kubectl_bin get pxc some-name -o 'jsonpath={.status.state}' +++ mktemp ++ local LAST_OUT=/tmp/tmp.51dnVN0nvI +++ mktemp ++ local LAST_ERR=/tmp/tmp.0fdCfdctNy ++ local exit_status=0 +++ seq 0 2 ++ for i in '$(seq 0 2)' ++ set +e ++ kubectl get pxc some-name -o 'jsonpath={.status.state}' ++ exit_status=0 ++ set -e ++ '[' 0 '!=' 0 ']' ++ break ++ cat /tmp/tmp.51dnVN0nvI ++ cat /tmp/tmp.0fdCfdctNy ++ rm /tmp/tmp.51dnVN0nvI /tmp/tmp.0fdCfdctNy ++ return 0 + [[ initializing == \r\e\a\d\y ]] + echo -n . .+ sleep 5 + [[ 13 -ge 300 ]] + let i+=1 ++ kubectl_bin get pxc some-name -o 'jsonpath={.status.state}' +++ mktemp ++ local LAST_OUT=/tmp/tmp.VyUAvWx3bo +++ mktemp ++ local LAST_ERR=/tmp/tmp.28lrlStrZJ ++ local exit_status=0 +++ seq 0 2 ++ for i in '$(seq 0 2)' ++ set +e ++ kubectl get pxc some-name -o 'jsonpath={.status.state}' ++ exit_status=0 ++ set -e ++ '[' 0 '!=' 0 ']' ++ break ++ cat /tmp/tmp.VyUAvWx3bo ++ cat /tmp/tmp.28lrlStrZJ ++ rm /tmp/tmp.VyUAvWx3bo /tmp/tmp.28lrlStrZJ ++ return 0 + [[ initializing == \r\e\a\d\y ]] + echo -n . .+ sleep 5 + [[ 14 -ge 300 ]] + let i+=1 ++ kubectl_bin get pxc some-name -o 'jsonpath={.status.state}' +++ mktemp ++ local LAST_OUT=/tmp/tmp.6jTJgAsvVf +++ mktemp ++ local LAST_ERR=/tmp/tmp.Cr8hsrX6g5 ++ local exit_status=0 +++ seq 0 2 ++ for i in '$(seq 0 2)' ++ set +e ++ kubectl get pxc some-name -o 'jsonpath={.status.state}' ++ exit_status=0 ++ set -e ++ '[' 0 '!=' 0 ']' ++ break ++ cat /tmp/tmp.6jTJgAsvVf ++ cat /tmp/tmp.Cr8hsrX6g5 ++ rm /tmp/tmp.6jTJgAsvVf /tmp/tmp.Cr8hsrX6g5 ++ return 0 + [[ initializing == \r\e\a\d\y ]] + echo -n . .+ sleep 5 + [[ 15 -ge 300 ]] + let i+=1 ++ kubectl_bin get pxc some-name -o 'jsonpath={.status.state}' +++ mktemp ++ local LAST_OUT=/tmp/tmp.S2lrlc9cvz +++ mktemp ++ local LAST_ERR=/tmp/tmp.R1NfF6bafw ++ local exit_status=0 +++ seq 0 2 ++ for i in '$(seq 0 2)' ++ set +e ++ kubectl get pxc some-name -o 'jsonpath={.status.state}' ++ exit_status=0 ++ set -e ++ '[' 0 '!=' 0 ']' ++ break ++ cat /tmp/tmp.S2lrlc9cvz ++ cat /tmp/tmp.R1NfF6bafw ++ rm /tmp/tmp.S2lrlc9cvz /tmp/tmp.R1NfF6bafw ++ return 0 + [[ initializing == \r\e\a\d\y ]] + echo -n . .+ sleep 5 + [[ 16 -ge 300 ]] + let i+=1 ++ kubectl_bin get pxc some-name -o 'jsonpath={.status.state}' +++ mktemp ++ local LAST_OUT=/tmp/tmp.WAK5B4M7k1 +++ mktemp ++ local LAST_ERR=/tmp/tmp.ZuvfJlfl2A ++ local exit_status=0 +++ seq 0 2 ++ for i in '$(seq 0 2)' ++ set +e ++ kubectl get pxc some-name -o 'jsonpath={.status.state}' ++ exit_status=0 ++ set -e ++ '[' 0 '!=' 0 ']' ++ break ++ cat /tmp/tmp.WAK5B4M7k1 ++ cat /tmp/tmp.ZuvfJlfl2A ++ rm /tmp/tmp.WAK5B4M7k1 /tmp/tmp.ZuvfJlfl2A ++ return 0 + [[ initializing == \r\e\a\d\y ]] + echo -n . .+ sleep 5 + [[ 17 -ge 300 ]] + let i+=1 ++ kubectl_bin get pxc some-name -o 'jsonpath={.status.state}' +++ mktemp ++ local LAST_OUT=/tmp/tmp.h33KmZG6xN +++ mktemp ++ local LAST_ERR=/tmp/tmp.wg99uwxhEj ++ local exit_status=0 +++ seq 0 2 ++ for i in '$(seq 0 2)' ++ set +e ++ kubectl get pxc some-name -o 'jsonpath={.status.state}' ++ exit_status=0 ++ set -e ++ '[' 0 '!=' 0 ']' ++ break ++ cat /tmp/tmp.h33KmZG6xN ++ cat /tmp/tmp.wg99uwxhEj ++ rm /tmp/tmp.h33KmZG6xN /tmp/tmp.wg99uwxhEj ++ return 0 + [[ initializing == \r\e\a\d\y ]] + echo -n . .+ sleep 5 + [[ 18 -ge 300 ]] + let i+=1 ++ kubectl_bin get pxc some-name -o 'jsonpath={.status.state}' +++ mktemp ++ local LAST_OUT=/tmp/tmp.0772AFhX18 +++ mktemp ++ local LAST_ERR=/tmp/tmp.Z8ZK4wYOZ2 ++ local exit_status=0 +++ seq 0 2 ++ for i in '$(seq 0 2)' ++ set +e ++ kubectl get pxc some-name -o 'jsonpath={.status.state}' ++ exit_status=0 ++ set -e ++ '[' 0 '!=' 0 ']' ++ break ++ cat /tmp/tmp.0772AFhX18 ++ cat /tmp/tmp.Z8ZK4wYOZ2 ++ rm /tmp/tmp.0772AFhX18 /tmp/tmp.Z8ZK4wYOZ2 ++ return 0 + [[ initializing == \r\e\a\d\y ]] + echo -n . .+ sleep 5 + [[ 19 -ge 300 ]] + let i+=1 ++ kubectl_bin get pxc some-name -o 'jsonpath={.status.state}' +++ mktemp ++ local LAST_OUT=/tmp/tmp.iy6XgdPWGy +++ mktemp ++ local LAST_ERR=/tmp/tmp.UUH5SJotZt ++ local exit_status=0 +++ seq 0 2 ++ for i in '$(seq 0 2)' ++ set +e ++ kubectl get pxc some-name -o 'jsonpath={.status.state}' ++ exit_status=0 ++ set -e ++ '[' 0 '!=' 0 ']' ++ break ++ cat /tmp/tmp.iy6XgdPWGy ++ cat /tmp/tmp.UUH5SJotZt ++ rm /tmp/tmp.iy6XgdPWGy /tmp/tmp.UUH5SJotZt ++ return 0 + [[ initializing == \r\e\a\d\y ]] + echo -n . .+ sleep 5 + [[ 20 -ge 300 ]] + let i+=1 ++ kubectl_bin get pxc some-name -o 'jsonpath={.status.state}' +++ mktemp ++ local LAST_OUT=/tmp/tmp.lrEdOhRXgB +++ mktemp ++ local LAST_ERR=/tmp/tmp.Fmu5aIn6jq ++ local exit_status=0 +++ seq 0 2 ++ for i in '$(seq 0 2)' ++ set +e ++ kubectl get pxc some-name -o 'jsonpath={.status.state}' ++ exit_status=0 ++ set -e ++ '[' 0 '!=' 0 ']' ++ break ++ cat /tmp/tmp.lrEdOhRXgB ++ cat /tmp/tmp.Fmu5aIn6jq ++ rm /tmp/tmp.lrEdOhRXgB /tmp/tmp.Fmu5aIn6jq ++ return 0 + [[ initializing == \r\e\a\d\y ]] + echo -n . .+ sleep 5 + [[ 21 -ge 300 ]] + let i+=1 ++ kubectl_bin get pxc some-name -o 'jsonpath={.status.state}' +++ mktemp ++ local LAST_OUT=/tmp/tmp.nR8Eu5jL56 +++ mktemp ++ local LAST_ERR=/tmp/tmp.88K2hbkgnV ++ local exit_status=0 +++ seq 0 2 ++ for i in '$(seq 0 2)' ++ set +e ++ kubectl get pxc some-name -o 'jsonpath={.status.state}' ++ exit_status=0 ++ set -e ++ '[' 0 '!=' 0 ']' ++ break ++ cat /tmp/tmp.nR8Eu5jL56 ++ cat /tmp/tmp.88K2hbkgnV ++ rm /tmp/tmp.nR8Eu5jL56 /tmp/tmp.88K2hbkgnV ++ return 0 + [[ initializing == \r\e\a\d\y ]] + echo -n . .+ sleep 5 + [[ 22 -ge 300 ]] + let i+=1 ++ kubectl_bin get pxc some-name -o 'jsonpath={.status.state}' +++ mktemp ++ local LAST_OUT=/tmp/tmp.Dcue2H3dN7 +++ mktemp ++ local LAST_ERR=/tmp/tmp.NdCKqlwUeR ++ local exit_status=0 +++ seq 0 2 ++ for i in '$(seq 0 2)' ++ set +e ++ kubectl get pxc some-name -o 'jsonpath={.status.state}' ++ exit_status=0 ++ set -e ++ '[' 0 '!=' 0 ']' ++ break ++ cat /tmp/tmp.Dcue2H3dN7 ++ cat /tmp/tmp.NdCKqlwUeR ++ rm /tmp/tmp.Dcue2H3dN7 /tmp/tmp.NdCKqlwUeR ++ return 0 + [[ ready == \r\e\a\d\y ]] ++ kubectl_bin get pxc some-name -o 'jsonpath={.status.pxc.ready}' +++ mktemp ++ local LAST_OUT=/tmp/tmp.aC1ZzNcYsW +++ mktemp ++ local LAST_ERR=/tmp/tmp.e5Gb6mntgv ++ local exit_status=0 +++ seq 0 2 ++ for i in '$(seq 0 2)' ++ set +e ++ kubectl get pxc some-name -o 'jsonpath={.status.pxc.ready}' ++ exit_status=0 ++ set -e ++ '[' 0 '!=' 0 ']' ++ break ++ cat /tmp/tmp.aC1ZzNcYsW ++ cat /tmp/tmp.e5Gb6mntgv ++ rm /tmp/tmp.aC1ZzNcYsW /tmp/tmp.e5Gb6mntgv ++ return 0 + [[ 3 == \3 ]] +++ get_proxy_engine some-name +++ local cluster_name=some-name ++++ get_proxy some-name ++++ local target_cluster=some-name +++++ kubectl_bin get pxc some-name -o 'jsonpath={.spec.haproxy.enabled}' ++++++ mktemp +++++ local LAST_OUT=/tmp/tmp.q397nBxePe ++++++ mktemp +++++ local LAST_ERR=/tmp/tmp.oex1Rkr3C1 +++++ local exit_status=0 ++++++ seq 0 2 +++++ for i in '$(seq 0 2)' +++++ set +e +++++ kubectl get pxc some-name -o 'jsonpath={.spec.haproxy.enabled}' +++++ exit_status=0 +++++ set -e +++++ '[' 0 '!=' 0 ']' +++++ break +++++ cat /tmp/tmp.q397nBxePe +++++ cat /tmp/tmp.oex1Rkr3C1 +++++ rm /tmp/tmp.q397nBxePe /tmp/tmp.oex1Rkr3C1 +++++ return 0 ++++ [[ '' == \t\r\u\e ]] +++++ kubectl_bin get pxc some-name -o 'jsonpath={.spec.proxysql.enabled}' ++++++ mktemp +++++ local LAST_OUT=/tmp/tmp.Huiguf7dW1 ++++++ mktemp +++++ local LAST_ERR=/tmp/tmp.UMOmfBVdri +++++ local exit_status=0 ++++++ seq 0 2 +++++ for i in '$(seq 0 2)' +++++ set +e +++++ kubectl get pxc some-name -o 'jsonpath={.spec.proxysql.enabled}' +++++ exit_status=0 +++++ set -e +++++ '[' 0 '!=' 0 ']' +++++ break +++++ cat /tmp/tmp.Huiguf7dW1 +++++ cat /tmp/tmp.UMOmfBVdri +++++ rm /tmp/tmp.Huiguf7dW1 /tmp/tmp.UMOmfBVdri +++++ return 0 ++++ [[ true == \t\r\u\e ]] ++++ echo some-name-proxysql ++++ return +++ local cluster_proxy=some-name-proxysql +++ echo proxysql ++ kubectl_bin get pxc some-name -o 'jsonpath={.status.proxysql.ready}' +++ mktemp ++ local LAST_OUT=/tmp/tmp.1x7Hr1ps17 +++ mktemp ++ local LAST_ERR=/tmp/tmp.viuLJptLIN ++ local exit_status=0 +++ seq 0 2 ++ for i in '$(seq 0 2)' ++ set +e ++ kubectl get pxc some-name -o 'jsonpath={.status.proxysql.ready}' ++ exit_status=0 ++ set -e ++ '[' 0 '!=' 0 ']' ++ break ++ cat /tmp/tmp.1x7Hr1ps17 ++ cat /tmp/tmp.viuLJptLIN ++ rm /tmp/tmp.1x7Hr1ps17 /tmp/tmp.viuLJptLIN ++ return 0 + [[ 2 == \2 ]] + echo + sleep 15 + compare_mysql_cmd_local select-3 'SHOW DATABASES;' '-h 127.0.0.1 -uxtrabackup -p'\''test-password'\''' some-name-pxc-0 '' pxc + local command_id=select-3 + local 'command=SHOW DATABASES;' + local 'uri=-h 127.0.0.1 -uxtrabackup -p'\''test-password'\''' + local pod=some-name-pxc-0 + local postfix= + local container_name=pxc + local expected_result=/mnt/jenkins/workspace/cloud-pxc-operator_PR-2325/e2e-tests/users/compare/select-3.sql + [[ perconalab/percona-xtradb-cluster-operator:main-pxc5.7 =~ 8\.0 ]] + [[ perconalab/percona-xtradb-cluster-operator:main-pxc5.7 =~ 8\.4 ]] + run_mysql_local 'SHOW DATABASES;' '-h 127.0.0.1 -uxtrabackup -p'\''test-password'\''' some-name-pxc-0 pxc + local 'command=SHOW DATABASES;' + local 'uri=-h 127.0.0.1 -uxtrabackup -p'\''test-password'\''' + local pod=some-name-pxc-0 + local container_name=pxc + set +o xtrace + '[' '!' -s /tmp/tmp.dqV03Eaoyh/select-3.sql ']' + [[ 0 -eq 0 ]] + diff -u /mnt/jenkins/workspace/cloud-pxc-operator_PR-2325/e2e-tests/users/compare/select-3.sql /tmp/tmp.dqV03Eaoyh/select-3.sql + desc 'test monitor' + set +o xtrace ----------------------------------------------------------------------------------- test monitor ----------------------------------------------------------------------------------- + patch_secret my-cluster-secrets monitor dGVzdC1wYXNzd29yZA== + local secret=my-cluster-secrets + local key=monitor + local value=dGVzdC1wYXNzd29yZA== + kubectl_bin patch secret my-cluster-secrets '-p={"data":{"monitor": "dGVzdC1wYXNzd29yZA=="}}' ++ mktemp + local LAST_OUT=/tmp/tmp.0TZYJalh7R ++ mktemp + local LAST_ERR=/tmp/tmp.uGnGfkioN4 + local exit_status=0 ++ seq 0 2 + for i in '$(seq 0 2)' + set +e + kubectl patch secret my-cluster-secrets '-p={"data":{"monitor": "dGVzdC1wYXNzd29yZA=="}}' + exit_status=0 + set -e + '[' 0 '!=' 0 ']' + break + cat /tmp/tmp.0TZYJalh7R secret/my-cluster-secrets patched + cat /tmp/tmp.uGnGfkioN4 + rm /tmp/tmp.0TZYJalh7R /tmp/tmp.uGnGfkioN4 + return 0 + wait_for_password_propagation my-cluster-secrets monitor some-name + local secret=my-cluster-secrets + local user=monitor + local cluster=some-name + local max_retry=600 + local root_pass + local retry ++ getSecretData my-cluster-secrets root ++ local secretName=my-cluster-secrets ++ local dataKey=root ++ kubectl_bin get secrets/my-cluster-secrets '--template={{.data.root}}' ++ base64 --decode +++ mktemp ++ local LAST_OUT=/tmp/tmp.hGvIUoF7dV +++ mktemp ++ local LAST_ERR=/tmp/tmp.T99YpyskaR ++ local exit_status=0 +++ seq 0 2 ++ for i in '$(seq 0 2)' ++ set +e ++ kubectl get secrets/my-cluster-secrets '--template={{.data.root}}' ++ exit_status=0 ++ set -e ++ '[' 0 '!=' 0 ']' ++ break ++ cat /tmp/tmp.hGvIUoF7dV ++ cat /tmp/tmp.T99YpyskaR ++ rm /tmp/tmp.hGvIUoF7dV /tmp/tmp.T99YpyskaR ++ return 0 + root_pass=test-password + [[ -z test-password ]] + [[ perconalab/percona-xtradb-cluster-operator:main-pxc5.7 =~ 5\.7 ]] + echo 'Skipping dual password feature doesn'\''t work for 5.7. PXC 5.7 doesn'\''t support it!' Skipping dual password feature doesn't work for 5.7. PXC 5.7 doesn't support it! + return 0 + wait_cluster_consistency some-name 3 2 + local cluster_name=some-name + local cluster_size=3 + local proxy_size=2 + '[' -z 2 ']' + desc 'wait cluster consistency' + set +o xtrace ----------------------------------------------------------------------------------- wait cluster consistency ----------------------------------------------------------------------------------- + local i=0 + local max=300 + sleep 7 + echo -n 'waiting for pxc/some-name to be ready' waiting for pxc/some-name to be ready++ kubectl_bin get pxc some-name -o 'jsonpath={.status.state}' +++ mktemp ++ local LAST_OUT=/tmp/tmp.QMwzM14Rlz +++ mktemp ++ local LAST_ERR=/tmp/tmp.q5xRxhlk7b ++ local exit_status=0 +++ seq 0 2 ++ for i in '$(seq 0 2)' ++ set +e ++ kubectl get pxc some-name -o 'jsonpath={.status.state}' ++ exit_status=0 ++ set -e ++ '[' 0 '!=' 0 ']' ++ break ++ cat /tmp/tmp.QMwzM14Rlz ++ cat /tmp/tmp.q5xRxhlk7b ++ rm /tmp/tmp.QMwzM14Rlz /tmp/tmp.q5xRxhlk7b ++ return 0 + [[ initializing == \r\e\a\d\y ]] + echo -n . .+ sleep 5 + [[ 0 -ge 300 ]] + let i+=1 ++ kubectl_bin get pxc some-name -o 'jsonpath={.status.state}' +++ mktemp ++ local LAST_OUT=/tmp/tmp.tue6f3qaMu +++ mktemp ++ local LAST_ERR=/tmp/tmp.fvYTlPtchS ++ local exit_status=0 +++ seq 0 2 ++ for i in '$(seq 0 2)' ++ set +e ++ kubectl get pxc some-name -o 'jsonpath={.status.state}' ++ exit_status=0 ++ set -e ++ '[' 0 '!=' 0 ']' ++ break ++ cat /tmp/tmp.tue6f3qaMu ++ cat /tmp/tmp.fvYTlPtchS ++ rm /tmp/tmp.tue6f3qaMu /tmp/tmp.fvYTlPtchS ++ return 0 + [[ initializing == \r\e\a\d\y ]] + echo -n . .+ sleep 5 + [[ 1 -ge 300 ]] + let i+=1 ++ kubectl_bin get pxc some-name -o 'jsonpath={.status.state}' +++ mktemp ++ local LAST_OUT=/tmp/tmp.O9BVPhJTaP +++ mktemp ++ local LAST_ERR=/tmp/tmp.HZGkMfblBv ++ local exit_status=0 +++ seq 0 2 ++ for i in '$(seq 0 2)' ++ set +e ++ kubectl get pxc some-name -o 'jsonpath={.status.state}' ++ exit_status=0 ++ set -e ++ '[' 0 '!=' 0 ']' ++ break ++ cat /tmp/tmp.O9BVPhJTaP ++ cat /tmp/tmp.HZGkMfblBv ++ rm /tmp/tmp.O9BVPhJTaP /tmp/tmp.HZGkMfblBv ++ return 0 + [[ initializing == \r\e\a\d\y ]] + echo -n . .+ sleep 5 + [[ 2 -ge 300 ]] + let i+=1 ++ kubectl_bin get pxc some-name -o 'jsonpath={.status.state}' +++ mktemp ++ local LAST_OUT=/tmp/tmp.9N3MMkrBMr +++ mktemp ++ local LAST_ERR=/tmp/tmp.TUxaUeReLP ++ local exit_status=0 +++ seq 0 2 ++ for i in '$(seq 0 2)' ++ set +e ++ kubectl get pxc some-name -o 'jsonpath={.status.state}' ++ exit_status=0 ++ set -e ++ '[' 0 '!=' 0 ']' ++ break ++ cat /tmp/tmp.9N3MMkrBMr ++ cat /tmp/tmp.TUxaUeReLP ++ rm /tmp/tmp.9N3MMkrBMr /tmp/tmp.TUxaUeReLP ++ return 0 + [[ initializing == \r\e\a\d\y ]] + echo -n . .+ sleep 5 + [[ 3 -ge 300 ]] + let i+=1 ++ kubectl_bin get pxc some-name -o 'jsonpath={.status.state}' +++ mktemp ++ local LAST_OUT=/tmp/tmp.CRz0n2HVuP +++ mktemp ++ local LAST_ERR=/tmp/tmp.a2jOYCuWUs ++ local exit_status=0 +++ seq 0 2 ++ for i in '$(seq 0 2)' ++ set +e ++ kubectl get pxc some-name -o 'jsonpath={.status.state}' ++ exit_status=0 ++ set -e ++ '[' 0 '!=' 0 ']' ++ break ++ cat /tmp/tmp.CRz0n2HVuP ++ cat /tmp/tmp.a2jOYCuWUs ++ rm /tmp/tmp.CRz0n2HVuP /tmp/tmp.a2jOYCuWUs ++ return 0 + [[ initializing == \r\e\a\d\y ]] + echo -n . .+ sleep 5 + [[ 4 -ge 300 ]] + let i+=1 ++ kubectl_bin get pxc some-name -o 'jsonpath={.status.state}' +++ mktemp ++ local LAST_OUT=/tmp/tmp.sZJRUby63s +++ mktemp ++ local LAST_ERR=/tmp/tmp.yB1sccyIIn ++ local exit_status=0 +++ seq 0 2 ++ for i in '$(seq 0 2)' ++ set +e ++ kubectl get pxc some-name -o 'jsonpath={.status.state}' ++ exit_status=0 ++ set -e ++ '[' 0 '!=' 0 ']' ++ break ++ cat /tmp/tmp.sZJRUby63s ++ cat /tmp/tmp.yB1sccyIIn ++ rm /tmp/tmp.sZJRUby63s /tmp/tmp.yB1sccyIIn ++ return 0 + [[ initializing == \r\e\a\d\y ]] + echo -n . .+ sleep 5 + [[ 5 -ge 300 ]] + let i+=1 ++ kubectl_bin get pxc some-name -o 'jsonpath={.status.state}' +++ mktemp ++ local LAST_OUT=/tmp/tmp.IBjR6fD9RH +++ mktemp ++ local LAST_ERR=/tmp/tmp.SwiK3mgpm0 ++ local exit_status=0 +++ seq 0 2 ++ for i in '$(seq 0 2)' ++ set +e ++ kubectl get pxc some-name -o 'jsonpath={.status.state}' ++ exit_status=0 ++ set -e ++ '[' 0 '!=' 0 ']' ++ break ++ cat /tmp/tmp.IBjR6fD9RH ++ cat /tmp/tmp.SwiK3mgpm0 ++ rm /tmp/tmp.IBjR6fD9RH /tmp/tmp.SwiK3mgpm0 ++ return 0 + [[ ready == \r\e\a\d\y ]] ++ kubectl_bin get pxc some-name -o 'jsonpath={.status.pxc.ready}' +++ mktemp ++ local LAST_OUT=/tmp/tmp.W2kQHNGcgI +++ mktemp ++ local LAST_ERR=/tmp/tmp.VDKcZZcMD2 ++ local exit_status=0 +++ seq 0 2 ++ for i in '$(seq 0 2)' ++ set +e ++ kubectl get pxc some-name -o 'jsonpath={.status.pxc.ready}' ++ exit_status=0 ++ set -e ++ '[' 0 '!=' 0 ']' ++ break ++ cat /tmp/tmp.W2kQHNGcgI ++ cat /tmp/tmp.VDKcZZcMD2 ++ rm /tmp/tmp.W2kQHNGcgI /tmp/tmp.VDKcZZcMD2 ++ return 0 + [[ 3 == \3 ]] +++ get_proxy_engine some-name +++ local cluster_name=some-name ++++ get_proxy some-name ++++ local target_cluster=some-name +++++ kubectl_bin get pxc some-name -o 'jsonpath={.spec.haproxy.enabled}' ++++++ mktemp +++++ local LAST_OUT=/tmp/tmp.1TkMSvKczM ++++++ mktemp +++++ local LAST_ERR=/tmp/tmp.kbvaU6gYIG +++++ local exit_status=0 ++++++ seq 0 2 +++++ for i in '$(seq 0 2)' +++++ set +e +++++ kubectl get pxc some-name -o 'jsonpath={.spec.haproxy.enabled}' +++++ exit_status=0 +++++ set -e +++++ '[' 0 '!=' 0 ']' +++++ break +++++ cat /tmp/tmp.1TkMSvKczM +++++ cat /tmp/tmp.kbvaU6gYIG +++++ rm /tmp/tmp.1TkMSvKczM /tmp/tmp.kbvaU6gYIG +++++ return 0 ++++ [[ '' == \t\r\u\e ]] +++++ kubectl_bin get pxc some-name -o 'jsonpath={.spec.proxysql.enabled}' ++++++ mktemp +++++ local LAST_OUT=/tmp/tmp.yxJmqCBV4x ++++++ mktemp +++++ local LAST_ERR=/tmp/tmp.Dqj3DewMcq +++++ local exit_status=0 ++++++ seq 0 2 +++++ for i in '$(seq 0 2)' +++++ set +e +++++ kubectl get pxc some-name -o 'jsonpath={.spec.proxysql.enabled}' +++++ exit_status=0 +++++ set -e +++++ '[' 0 '!=' 0 ']' +++++ break +++++ cat /tmp/tmp.yxJmqCBV4x +++++ cat /tmp/tmp.Dqj3DewMcq +++++ rm /tmp/tmp.yxJmqCBV4x /tmp/tmp.Dqj3DewMcq +++++ return 0 ++++ [[ true == \t\r\u\e ]] ++++ echo some-name-proxysql ++++ return +++ local cluster_proxy=some-name-proxysql +++ echo proxysql ++ kubectl_bin get pxc some-name -o 'jsonpath={.status.proxysql.ready}' +++ mktemp ++ local LAST_OUT=/tmp/tmp.El7dNGTSVq +++ mktemp ++ local LAST_ERR=/tmp/tmp.uxxMfx2YWk ++ local exit_status=0 +++ seq 0 2 ++ for i in '$(seq 0 2)' ++ set +e ++ kubectl get pxc some-name -o 'jsonpath={.status.proxysql.ready}' ++ exit_status=0 ++ set -e ++ '[' 0 '!=' 0 ']' ++ break ++ cat /tmp/tmp.El7dNGTSVq ++ cat /tmp/tmp.uxxMfx2YWk ++ rm /tmp/tmp.El7dNGTSVq /tmp/tmp.uxxMfx2YWk ++ return 0 + [[ 2 == \2 ]] + echo + sleep 15 + compare_mysql_cmd select-4 'SHOW TABLES;' '-h some-name-proxysql -umonitor -p'\''test-password'\''' + local command_id=select-4 + local 'command=SHOW TABLES;' + local 'uri=-h some-name-proxysql -umonitor -p'\''test-password'\''' + local postfix= + local expected_result=/mnt/jenkins/workspace/cloud-pxc-operator_PR-2325/e2e-tests/users/compare/select-4.sql + [[ perconalab/percona-xtradb-cluster-operator:main-pxc5.7 =~ 8\.4 ]] + [[ perconalab/percona-xtradb-cluster-operator:main-pxc5.7 =~ 8\.0 ]] + [[ perconalab/percona-xtradb-cluster-operator:main-pxc5.7 =~ 5\.7 ]] + [[ -f /mnt/jenkins/workspace/cloud-pxc-operator_PR-2325/e2e-tests/users/compare/select-4-57.sql ]] + run_mysql 'SHOW TABLES;' '-h some-name-proxysql -umonitor -p'\''test-password'\''' + local 'command=SHOW TABLES;' + local 'uri=-h some-name-proxysql -umonitor -p'\''test-password'\''' ++ get_client_pod ++ kubectl_bin get pods --selector=name=pxc-client -o 'jsonpath={.items[].metadata.name}' +++ mktemp ++ local LAST_OUT=/tmp/tmp.ogvJcBNDgQ +++ mktemp ++ local LAST_ERR=/tmp/tmp.pM5pUISpKs ++ local exit_status=0 +++ seq 0 2 ++ for i in '$(seq 0 2)' ++ set +e ++ kubectl get pods --selector=name=pxc-client -o 'jsonpath={.items[].metadata.name}' ++ exit_status=0 ++ set -e ++ '[' 0 '!=' 0 ']' ++ break ++ cat /tmp/tmp.ogvJcBNDgQ ++ cat /tmp/tmp.pM5pUISpKs ++ rm /tmp/tmp.ogvJcBNDgQ /tmp/tmp.pM5pUISpKs ++ return 0 + client_pod=pxc-client-545786ccbb-gfmwf + wait_pod pxc-client-545786ccbb-gfmwf + local pod=pxc-client-545786ccbb-gfmwf + local max_retry=480 + local ns= ++ echo pxc-client-545786ccbb-gfmwf ++ /usr/bin/sed -E 's/.*-(pxc|proxysql)-[0-9]/\1/' ++ grep -E '^(pxc|proxysql)$' + local container= + set +o xtrace pod/pxc-client-545786ccbb-gfmwf condition met waiting for pod/pxc-client-545786ccbb-gfmwf to become ReadyDefaulted container "pxc-client" out of: pxc-client, backup .Ok + set +o xtrace + [[ ! -s /tmp/tmp.dqV03Eaoyh/select-4.sql ]] ++ grep 'Unknown MySQL server host' /tmp/tmp.dqV03Eaoyh/select-4.sql + [[ -n '' ]] + [[ 0 -eq 0 ]] + diff -u /mnt/jenkins/workspace/cloud-pxc-operator_PR-2325/e2e-tests/users/compare/select-4.sql /tmp/tmp.dqV03Eaoyh/select-4.sql + desc 'test operator' + set +o xtrace ----------------------------------------------------------------------------------- test operator ----------------------------------------------------------------------------------- + patch_secret my-cluster-secrets operator dGVzdC1wYXNzd29yZA== + local secret=my-cluster-secrets + local key=operator + local value=dGVzdC1wYXNzd29yZA== + kubectl_bin patch secret my-cluster-secrets '-p={"data":{"operator": "dGVzdC1wYXNzd29yZA=="}}' ++ mktemp + local LAST_OUT=/tmp/tmp.lSLCzfC0Rg ++ mktemp + local LAST_ERR=/tmp/tmp.1MKR5PBSHK + local exit_status=0 ++ seq 0 2 + for i in '$(seq 0 2)' + set +e + kubectl patch secret my-cluster-secrets '-p={"data":{"operator": "dGVzdC1wYXNzd29yZA=="}}' + exit_status=0 + set -e + '[' 0 '!=' 0 ']' + break + cat /tmp/tmp.lSLCzfC0Rg secret/my-cluster-secrets patched + cat /tmp/tmp.1MKR5PBSHK + rm /tmp/tmp.lSLCzfC0Rg /tmp/tmp.1MKR5PBSHK + return 0 + sleep 15 + wait_cluster_consistency some-name 3 2 + local cluster_name=some-name + local cluster_size=3 + local proxy_size=2 + '[' -z 2 ']' + desc 'wait cluster consistency' + set +o xtrace ----------------------------------------------------------------------------------- wait cluster consistency ----------------------------------------------------------------------------------- + local i=0 + local max=300 + sleep 7 + echo -n 'waiting for pxc/some-name to be ready' waiting for pxc/some-name to be ready++ kubectl_bin get pxc some-name -o 'jsonpath={.status.state}' +++ mktemp ++ local LAST_OUT=/tmp/tmp.M5nc3dNuU9 +++ mktemp ++ local LAST_ERR=/tmp/tmp.Ixa0oGNG8a ++ local exit_status=0 +++ seq 0 2 ++ for i in '$(seq 0 2)' ++ set +e ++ kubectl get pxc some-name -o 'jsonpath={.status.state}' ++ exit_status=0 ++ set -e ++ '[' 0 '!=' 0 ']' ++ break ++ cat /tmp/tmp.M5nc3dNuU9 ++ cat /tmp/tmp.Ixa0oGNG8a ++ rm /tmp/tmp.M5nc3dNuU9 /tmp/tmp.Ixa0oGNG8a ++ return 0 + [[ ready == \r\e\a\d\y ]] ++ kubectl_bin get pxc some-name -o 'jsonpath={.status.pxc.ready}' +++ mktemp ++ local LAST_OUT=/tmp/tmp.Hs280CkMd1 +++ mktemp ++ local LAST_ERR=/tmp/tmp.U32tdPPoO6 ++ local exit_status=0 +++ seq 0 2 ++ for i in '$(seq 0 2)' ++ set +e ++ kubectl get pxc some-name -o 'jsonpath={.status.pxc.ready}' ++ exit_status=0 ++ set -e ++ '[' 0 '!=' 0 ']' ++ break ++ cat /tmp/tmp.Hs280CkMd1 ++ cat /tmp/tmp.U32tdPPoO6 ++ rm /tmp/tmp.Hs280CkMd1 /tmp/tmp.U32tdPPoO6 ++ return 0 + [[ 3 == \3 ]] +++ get_proxy_engine some-name +++ local cluster_name=some-name ++++ get_proxy some-name ++++ local target_cluster=some-name +++++ kubectl_bin get pxc some-name -o 'jsonpath={.spec.haproxy.enabled}' ++++++ mktemp +++++ local LAST_OUT=/tmp/tmp.QsdTNhFF0d ++++++ mktemp +++++ local LAST_ERR=/tmp/tmp.CXALz6CNYf +++++ local exit_status=0 ++++++ seq 0 2 +++++ for i in '$(seq 0 2)' +++++ set +e +++++ kubectl get pxc some-name -o 'jsonpath={.spec.haproxy.enabled}' +++++ exit_status=0 +++++ set -e +++++ '[' 0 '!=' 0 ']' +++++ break +++++ cat /tmp/tmp.QsdTNhFF0d +++++ cat /tmp/tmp.CXALz6CNYf +++++ rm /tmp/tmp.QsdTNhFF0d /tmp/tmp.CXALz6CNYf +++++ return 0 ++++ [[ '' == \t\r\u\e ]] +++++ kubectl_bin get pxc some-name -o 'jsonpath={.spec.proxysql.enabled}' ++++++ mktemp +++++ local LAST_OUT=/tmp/tmp.TwpX92F3Xu ++++++ mktemp +++++ local LAST_ERR=/tmp/tmp.xgb1MdtekN +++++ local exit_status=0 ++++++ seq 0 2 +++++ for i in '$(seq 0 2)' +++++ set +e +++++ kubectl get pxc some-name -o 'jsonpath={.spec.proxysql.enabled}' +++++ exit_status=0 +++++ set -e +++++ '[' 0 '!=' 0 ']' +++++ break +++++ cat /tmp/tmp.TwpX92F3Xu +++++ cat /tmp/tmp.xgb1MdtekN +++++ rm /tmp/tmp.TwpX92F3Xu /tmp/tmp.xgb1MdtekN +++++ return 0 ++++ [[ true == \t\r\u\e ]] ++++ echo some-name-proxysql ++++ return +++ local cluster_proxy=some-name-proxysql +++ echo proxysql ++ kubectl_bin get pxc some-name -o 'jsonpath={.status.proxysql.ready}' +++ mktemp ++ local LAST_OUT=/tmp/tmp.ScXZuPadCR +++ mktemp ++ local LAST_ERR=/tmp/tmp.Am1r3hKgpq ++ local exit_status=0 +++ seq 0 2 ++ for i in '$(seq 0 2)' ++ set +e ++ kubectl get pxc some-name -o 'jsonpath={.status.proxysql.ready}' ++ exit_status=0 ++ set -e ++ '[' 0 '!=' 0 ']' ++ break ++ cat /tmp/tmp.ScXZuPadCR ++ cat /tmp/tmp.Am1r3hKgpq ++ rm /tmp/tmp.ScXZuPadCR /tmp/tmp.Am1r3hKgpq ++ return 0 + [[ 2 == \2 ]] + echo + sleep 15 + compare_mysql_cmd select-4 'SHOW TABLES;' '-h some-name-proxysql -uoperator -p'\''test-password'\''' + local command_id=select-4 + local 'command=SHOW TABLES;' + local 'uri=-h some-name-proxysql -uoperator -p'\''test-password'\''' + local postfix= + local expected_result=/mnt/jenkins/workspace/cloud-pxc-operator_PR-2325/e2e-tests/users/compare/select-4.sql + [[ perconalab/percona-xtradb-cluster-operator:main-pxc5.7 =~ 8\.4 ]] + [[ perconalab/percona-xtradb-cluster-operator:main-pxc5.7 =~ 8\.0 ]] + [[ perconalab/percona-xtradb-cluster-operator:main-pxc5.7 =~ 5\.7 ]] + [[ -f /mnt/jenkins/workspace/cloud-pxc-operator_PR-2325/e2e-tests/users/compare/select-4-57.sql ]] + run_mysql 'SHOW TABLES;' '-h some-name-proxysql -uoperator -p'\''test-password'\''' + local 'command=SHOW TABLES;' + local 'uri=-h some-name-proxysql -uoperator -p'\''test-password'\''' ++ get_client_pod ++ kubectl_bin get pods --selector=name=pxc-client -o 'jsonpath={.items[].metadata.name}' +++ mktemp ++ local LAST_OUT=/tmp/tmp.I2tBXCwjhP +++ mktemp ++ local LAST_ERR=/tmp/tmp.s54XwFtvIo ++ local exit_status=0 +++ seq 0 2 ++ for i in '$(seq 0 2)' ++ set +e ++ kubectl get pods --selector=name=pxc-client -o 'jsonpath={.items[].metadata.name}' ++ exit_status=0 ++ set -e ++ '[' 0 '!=' 0 ']' ++ break ++ cat /tmp/tmp.I2tBXCwjhP ++ cat /tmp/tmp.s54XwFtvIo ++ rm /tmp/tmp.I2tBXCwjhP /tmp/tmp.s54XwFtvIo ++ return 0 + client_pod=pxc-client-545786ccbb-gfmwf + wait_pod pxc-client-545786ccbb-gfmwf + local pod=pxc-client-545786ccbb-gfmwf + local max_retry=480 + local ns= ++ echo pxc-client-545786ccbb-gfmwf ++ /usr/bin/sed -E 's/.*-(pxc|proxysql)-[0-9]/\1/' ++ grep -E '^(pxc|proxysql)$' + local container= + set +o xtrace pod/pxc-client-545786ccbb-gfmwf condition met waiting for pod/pxc-client-545786ccbb-gfmwf to become ReadyDefaulted container "pxc-client" out of: pxc-client, backup .Ok + set +o xtrace + [[ ! -s /tmp/tmp.dqV03Eaoyh/select-4.sql ]] ++ grep 'Unknown MySQL server host' /tmp/tmp.dqV03Eaoyh/select-4.sql + [[ -n '' ]] + [[ 0 -eq 0 ]] + diff -u /mnt/jenkins/workspace/cloud-pxc-operator_PR-2325/e2e-tests/users/compare/select-4.sql /tmp/tmp.dqV03Eaoyh/select-4.sql + desc 'change secret name' + set +o xtrace ----------------------------------------------------------------------------------- change secret name ----------------------------------------------------------------------------------- + kubectl_bin patch pxc some-name --type merge --patch '{"spec": {"secretsName":"my-cluster-secrets-2"}}' ++ mktemp + local LAST_OUT=/tmp/tmp.ctesgEJKfs ++ mktemp + local LAST_ERR=/tmp/tmp.g3r4DVItGZ + local exit_status=0 ++ seq 0 2 + for i in '$(seq 0 2)' + set +e + kubectl patch pxc some-name --type merge --patch '{"spec": {"secretsName":"my-cluster-secrets-2"}}' + exit_status=0 + set -e + '[' 0 '!=' 0 ']' + break + cat /tmp/tmp.ctesgEJKfs perconaxtradbcluster.pxc.percona.com/some-name patched + cat /tmp/tmp.g3r4DVItGZ + rm /tmp/tmp.ctesgEJKfs /tmp/tmp.g3r4DVItGZ + return 0 + sleep 15 + wait_cluster_consistency some-name 3 2 + local cluster_name=some-name + local cluster_size=3 + local proxy_size=2 + '[' -z 2 ']' + desc 'wait cluster consistency' + set +o xtrace ----------------------------------------------------------------------------------- wait cluster consistency ----------------------------------------------------------------------------------- + local i=0 + local max=300 + sleep 7 + echo -n 'waiting for pxc/some-name to be ready' waiting for pxc/some-name to be ready++ kubectl_bin get pxc some-name -o 'jsonpath={.status.state}' +++ mktemp ++ local LAST_OUT=/tmp/tmp.DmY4pHsEIU +++ mktemp ++ local LAST_ERR=/tmp/tmp.crhNoqdXqb ++ local exit_status=0 +++ seq 0 2 ++ for i in '$(seq 0 2)' ++ set +e ++ kubectl get pxc some-name -o 'jsonpath={.status.state}' ++ exit_status=0 ++ set -e ++ '[' 0 '!=' 0 ']' ++ break ++ cat /tmp/tmp.DmY4pHsEIU ++ cat /tmp/tmp.crhNoqdXqb ++ rm /tmp/tmp.DmY4pHsEIU /tmp/tmp.crhNoqdXqb ++ return 0 + [[ initializing == \r\e\a\d\y ]] + echo -n . .+ sleep 5 + [[ 0 -ge 300 ]] + let i+=1 ++ kubectl_bin get pxc some-name -o 'jsonpath={.status.state}' +++ mktemp ++ local LAST_OUT=/tmp/tmp.PBt6yR9pQ6 +++ mktemp ++ local LAST_ERR=/tmp/tmp.xECkocAkm3 ++ local exit_status=0 +++ seq 0 2 ++ for i in '$(seq 0 2)' ++ set +e ++ kubectl get pxc some-name -o 'jsonpath={.status.state}' ++ exit_status=0 ++ set -e ++ '[' 0 '!=' 0 ']' ++ break ++ cat /tmp/tmp.PBt6yR9pQ6 ++ cat /tmp/tmp.xECkocAkm3 ++ rm /tmp/tmp.PBt6yR9pQ6 /tmp/tmp.xECkocAkm3 ++ return 0 + [[ initializing == \r\e\a\d\y ]] + echo -n . .+ sleep 5 + [[ 1 -ge 300 ]] + let i+=1 ++ kubectl_bin get pxc some-name -o 'jsonpath={.status.state}' +++ mktemp ++ local LAST_OUT=/tmp/tmp.sgtjf3FMW7 +++ mktemp ++ local LAST_ERR=/tmp/tmp.niCo3WUm17 ++ local exit_status=0 +++ seq 0 2 ++ for i in '$(seq 0 2)' ++ set +e ++ kubectl get pxc some-name -o 'jsonpath={.status.state}' ++ exit_status=0 ++ set -e ++ '[' 0 '!=' 0 ']' ++ break ++ cat /tmp/tmp.sgtjf3FMW7 ++ cat /tmp/tmp.niCo3WUm17 ++ rm /tmp/tmp.sgtjf3FMW7 /tmp/tmp.niCo3WUm17 ++ return 0 + [[ initializing == \r\e\a\d\y ]] + echo -n . .+ sleep 5 + [[ 2 -ge 300 ]] + let i+=1 ++ kubectl_bin get pxc some-name -o 'jsonpath={.status.state}' +++ mktemp ++ local LAST_OUT=/tmp/tmp.XCUD7divP1 +++ mktemp ++ local LAST_ERR=/tmp/tmp.OnflmrE2Y8 ++ local exit_status=0 +++ seq 0 2 ++ for i in '$(seq 0 2)' ++ set +e ++ kubectl get pxc some-name -o 'jsonpath={.status.state}' ++ exit_status=0 ++ set -e ++ '[' 0 '!=' 0 ']' ++ break ++ cat /tmp/tmp.XCUD7divP1 ++ cat /tmp/tmp.OnflmrE2Y8 ++ rm /tmp/tmp.XCUD7divP1 /tmp/tmp.OnflmrE2Y8 ++ return 0 + [[ initializing == \r\e\a\d\y ]] + echo -n . .+ sleep 5 + [[ 3 -ge 300 ]] + let i+=1 ++ kubectl_bin get pxc some-name -o 'jsonpath={.status.state}' +++ mktemp ++ local LAST_OUT=/tmp/tmp.vfFXOJfTAn +++ mktemp ++ local LAST_ERR=/tmp/tmp.y4OwXfe0vR ++ local exit_status=0 +++ seq 0 2 ++ for i in '$(seq 0 2)' ++ set +e ++ kubectl get pxc some-name -o 'jsonpath={.status.state}' ++ exit_status=0 ++ set -e ++ '[' 0 '!=' 0 ']' ++ break ++ cat /tmp/tmp.vfFXOJfTAn ++ cat /tmp/tmp.y4OwXfe0vR ++ rm /tmp/tmp.vfFXOJfTAn /tmp/tmp.y4OwXfe0vR ++ return 0 + [[ initializing == \r\e\a\d\y ]] + echo -n . .+ sleep 5 + [[ 4 -ge 300 ]] + let i+=1 ++ kubectl_bin get pxc some-name -o 'jsonpath={.status.state}' +++ mktemp ++ local LAST_OUT=/tmp/tmp.YXUJaENZMV +++ mktemp ++ local LAST_ERR=/tmp/tmp.oyPQCInkek ++ local exit_status=0 +++ seq 0 2 ++ for i in '$(seq 0 2)' ++ set +e ++ kubectl get pxc some-name -o 'jsonpath={.status.state}' ++ exit_status=0 ++ set -e ++ '[' 0 '!=' 0 ']' ++ break ++ cat /tmp/tmp.YXUJaENZMV ++ cat /tmp/tmp.oyPQCInkek ++ rm /tmp/tmp.YXUJaENZMV /tmp/tmp.oyPQCInkek ++ return 0 + [[ initializing == \r\e\a\d\y ]] + echo -n . .+ sleep 5 + [[ 5 -ge 300 ]] + let i+=1 ++ kubectl_bin get pxc some-name -o 'jsonpath={.status.state}' +++ mktemp ++ local LAST_OUT=/tmp/tmp.eSKnWziqIC +++ mktemp ++ local LAST_ERR=/tmp/tmp.2rgezZfFCk ++ local exit_status=0 +++ seq 0 2 ++ for i in '$(seq 0 2)' ++ set +e ++ kubectl get pxc some-name -o 'jsonpath={.status.state}' ++ exit_status=0 ++ set -e ++ '[' 0 '!=' 0 ']' ++ break ++ cat /tmp/tmp.eSKnWziqIC ++ cat /tmp/tmp.2rgezZfFCk ++ rm /tmp/tmp.eSKnWziqIC /tmp/tmp.2rgezZfFCk ++ return 0 + [[ initializing == \r\e\a\d\y ]] + echo -n . .+ sleep 5 + [[ 6 -ge 300 ]] + let i+=1 ++ kubectl_bin get pxc some-name -o 'jsonpath={.status.state}' +++ mktemp ++ local LAST_OUT=/tmp/tmp.VMO70lrHua +++ mktemp ++ local LAST_ERR=/tmp/tmp.k9Tks2qGXv ++ local exit_status=0 +++ seq 0 2 ++ for i in '$(seq 0 2)' ++ set +e ++ kubectl get pxc some-name -o 'jsonpath={.status.state}' ++ exit_status=0 ++ set -e ++ '[' 0 '!=' 0 ']' ++ break ++ cat /tmp/tmp.VMO70lrHua ++ cat /tmp/tmp.k9Tks2qGXv ++ rm /tmp/tmp.VMO70lrHua /tmp/tmp.k9Tks2qGXv ++ return 0 + [[ initializing == \r\e\a\d\y ]] + echo -n . .+ sleep 5 + [[ 7 -ge 300 ]] + let i+=1 ++ kubectl_bin get pxc some-name -o 'jsonpath={.status.state}' +++ mktemp ++ local LAST_OUT=/tmp/tmp.0Lnzyf6i9W +++ mktemp ++ local LAST_ERR=/tmp/tmp.fzMwFOmc2C ++ local exit_status=0 +++ seq 0 2 ++ for i in '$(seq 0 2)' ++ set +e ++ kubectl get pxc some-name -o 'jsonpath={.status.state}' ++ exit_status=0 ++ set -e ++ '[' 0 '!=' 0 ']' ++ break ++ cat /tmp/tmp.0Lnzyf6i9W ++ cat /tmp/tmp.fzMwFOmc2C ++ rm /tmp/tmp.0Lnzyf6i9W /tmp/tmp.fzMwFOmc2C ++ return 0 + [[ initializing == \r\e\a\d\y ]] + echo -n . .+ sleep 5 + [[ 8 -ge 300 ]] + let i+=1 ++ kubectl_bin get pxc some-name -o 'jsonpath={.status.state}' +++ mktemp ++ local LAST_OUT=/tmp/tmp.tD74Pb7DPA +++ mktemp ++ local LAST_ERR=/tmp/tmp.QoBufWjxNG ++ local exit_status=0 +++ seq 0 2 ++ for i in '$(seq 0 2)' ++ set +e ++ kubectl get pxc some-name -o 'jsonpath={.status.state}' ++ exit_status=0 ++ set -e ++ '[' 0 '!=' 0 ']' ++ break ++ cat /tmp/tmp.tD74Pb7DPA ++ cat /tmp/tmp.QoBufWjxNG ++ rm /tmp/tmp.tD74Pb7DPA /tmp/tmp.QoBufWjxNG ++ return 0 + [[ initializing == \r\e\a\d\y ]] + echo -n . .+ sleep 5 + [[ 9 -ge 300 ]] + let i+=1 ++ kubectl_bin get pxc some-name -o 'jsonpath={.status.state}' +++ mktemp ++ local LAST_OUT=/tmp/tmp.bFbBncK6Dm +++ mktemp ++ local LAST_ERR=/tmp/tmp.9vN5wn7Yhq ++ local exit_status=0 +++ seq 0 2 ++ for i in '$(seq 0 2)' ++ set +e ++ kubectl get pxc some-name -o 'jsonpath={.status.state}' ++ exit_status=0 ++ set -e ++ '[' 0 '!=' 0 ']' ++ break ++ cat /tmp/tmp.bFbBncK6Dm ++ cat /tmp/tmp.9vN5wn7Yhq ++ rm /tmp/tmp.bFbBncK6Dm /tmp/tmp.9vN5wn7Yhq ++ return 0 + [[ initializing == \r\e\a\d\y ]] + echo -n . .+ sleep 5 + [[ 10 -ge 300 ]] + let i+=1 ++ kubectl_bin get pxc some-name -o 'jsonpath={.status.state}' +++ mktemp ++ local LAST_OUT=/tmp/tmp.eEs7iOygcl +++ mktemp ++ local LAST_ERR=/tmp/tmp.GrK9lBUfqm ++ local exit_status=0 +++ seq 0 2 ++ for i in '$(seq 0 2)' ++ set +e ++ kubectl get pxc some-name -o 'jsonpath={.status.state}' ++ exit_status=0 ++ set -e ++ '[' 0 '!=' 0 ']' ++ break ++ cat /tmp/tmp.eEs7iOygcl ++ cat /tmp/tmp.GrK9lBUfqm ++ rm /tmp/tmp.eEs7iOygcl /tmp/tmp.GrK9lBUfqm ++ return 0 + [[ initializing == \r\e\a\d\y ]] + echo -n . .+ sleep 5 + [[ 11 -ge 300 ]] + let i+=1 ++ kubectl_bin get pxc some-name -o 'jsonpath={.status.state}' +++ mktemp ++ local LAST_OUT=/tmp/tmp.uvkesiwN5b +++ mktemp ++ local LAST_ERR=/tmp/tmp.MFnH1u3KzT ++ local exit_status=0 +++ seq 0 2 ++ for i in '$(seq 0 2)' ++ set +e ++ kubectl get pxc some-name -o 'jsonpath={.status.state}' ++ exit_status=0 ++ set -e ++ '[' 0 '!=' 0 ']' ++ break ++ cat /tmp/tmp.uvkesiwN5b ++ cat /tmp/tmp.MFnH1u3KzT ++ rm /tmp/tmp.uvkesiwN5b /tmp/tmp.MFnH1u3KzT ++ return 0 + [[ initializing == \r\e\a\d\y ]] + echo -n . .+ sleep 5 + [[ 12 -ge 300 ]] + let i+=1 ++ kubectl_bin get pxc some-name -o 'jsonpath={.status.state}' +++ mktemp ++ local LAST_OUT=/tmp/tmp.e7u2wwhU7L +++ mktemp ++ local LAST_ERR=/tmp/tmp.F84JIIlovw ++ local exit_status=0 +++ seq 0 2 ++ for i in '$(seq 0 2)' ++ set +e ++ kubectl get pxc some-name -o 'jsonpath={.status.state}' ++ exit_status=0 ++ set -e ++ '[' 0 '!=' 0 ']' ++ break ++ cat /tmp/tmp.e7u2wwhU7L ++ cat /tmp/tmp.F84JIIlovw ++ rm /tmp/tmp.e7u2wwhU7L /tmp/tmp.F84JIIlovw ++ return 0 + [[ initializing == \r\e\a\d\y ]] + echo -n . .+ sleep 5 + [[ 13 -ge 300 ]] + let i+=1 ++ kubectl_bin get pxc some-name -o 'jsonpath={.status.state}' +++ mktemp ++ local LAST_OUT=/tmp/tmp.xZhprlqaAF +++ mktemp ++ local LAST_ERR=/tmp/tmp.uXMNI8fVwD ++ local exit_status=0 +++ seq 0 2 ++ for i in '$(seq 0 2)' ++ set +e ++ kubectl get pxc some-name -o 'jsonpath={.status.state}' ++ exit_status=0 ++ set -e ++ '[' 0 '!=' 0 ']' ++ break ++ cat /tmp/tmp.xZhprlqaAF ++ cat /tmp/tmp.uXMNI8fVwD ++ rm /tmp/tmp.xZhprlqaAF /tmp/tmp.uXMNI8fVwD ++ return 0 + [[ initializing == \r\e\a\d\y ]] + echo -n . .+ sleep 5 + [[ 14 -ge 300 ]] + let i+=1 ++ kubectl_bin get pxc some-name -o 'jsonpath={.status.state}' +++ mktemp ++ local LAST_OUT=/tmp/tmp.hrn8UAqih3 +++ mktemp ++ local LAST_ERR=/tmp/tmp.fmK6VDTf9S ++ local exit_status=0 +++ seq 0 2 ++ for i in '$(seq 0 2)' ++ set +e ++ kubectl get pxc some-name -o 'jsonpath={.status.state}' ++ exit_status=0 ++ set -e ++ '[' 0 '!=' 0 ']' ++ break ++ cat /tmp/tmp.hrn8UAqih3 ++ cat /tmp/tmp.fmK6VDTf9S ++ rm /tmp/tmp.hrn8UAqih3 /tmp/tmp.fmK6VDTf9S ++ return 0 + [[ initializing == \r\e\a\d\y ]] + echo -n . .+ sleep 5 + [[ 15 -ge 300 ]] + let i+=1 ++ kubectl_bin get pxc some-name -o 'jsonpath={.status.state}' +++ mktemp ++ local LAST_OUT=/tmp/tmp.HShbmvGeR1 +++ mktemp ++ local LAST_ERR=/tmp/tmp.lD2gfuOoaB ++ local exit_status=0 +++ seq 0 2 ++ for i in '$(seq 0 2)' ++ set +e ++ kubectl get pxc some-name -o 'jsonpath={.status.state}' ++ exit_status=0 ++ set -e ++ '[' 0 '!=' 0 ']' ++ break ++ cat /tmp/tmp.HShbmvGeR1 ++ cat /tmp/tmp.lD2gfuOoaB ++ rm /tmp/tmp.HShbmvGeR1 /tmp/tmp.lD2gfuOoaB ++ return 0 + [[ initializing == \r\e\a\d\y ]] + echo -n . .+ sleep 5 + [[ 16 -ge 300 ]] + let i+=1 ++ kubectl_bin get pxc some-name -o 'jsonpath={.status.state}' +++ mktemp ++ local LAST_OUT=/tmp/tmp.RCi7VnDd1L +++ mktemp ++ local LAST_ERR=/tmp/tmp.I9h6FnrQoi ++ local exit_status=0 +++ seq 0 2 ++ for i in '$(seq 0 2)' ++ set +e ++ kubectl get pxc some-name -o 'jsonpath={.status.state}' ++ exit_status=0 ++ set -e ++ '[' 0 '!=' 0 ']' ++ break ++ cat /tmp/tmp.RCi7VnDd1L ++ cat /tmp/tmp.I9h6FnrQoi ++ rm /tmp/tmp.RCi7VnDd1L /tmp/tmp.I9h6FnrQoi ++ return 0 + [[ initializing == \r\e\a\d\y ]] + echo -n . .+ sleep 5 + [[ 17 -ge 300 ]] + let i+=1 ++ kubectl_bin get pxc some-name -o 'jsonpath={.status.state}' +++ mktemp ++ local LAST_OUT=/tmp/tmp.ngHuaKEIqA +++ mktemp ++ local LAST_ERR=/tmp/tmp.gyVBfDh11n ++ local exit_status=0 +++ seq 0 2 ++ for i in '$(seq 0 2)' ++ set +e ++ kubectl get pxc some-name -o 'jsonpath={.status.state}' ++ exit_status=0 ++ set -e ++ '[' 0 '!=' 0 ']' ++ break ++ cat /tmp/tmp.ngHuaKEIqA ++ cat /tmp/tmp.gyVBfDh11n ++ rm /tmp/tmp.ngHuaKEIqA /tmp/tmp.gyVBfDh11n ++ return 0 + [[ initializing == \r\e\a\d\y ]] + echo -n . .+ sleep 5 + [[ 18 -ge 300 ]] + let i+=1 ++ kubectl_bin get pxc some-name -o 'jsonpath={.status.state}' +++ mktemp ++ local LAST_OUT=/tmp/tmp.6jZJQ3GkdU +++ mktemp ++ local LAST_ERR=/tmp/tmp.g0zgo8UAKA ++ local exit_status=0 +++ seq 0 2 ++ for i in '$(seq 0 2)' ++ set +e ++ kubectl get pxc some-name -o 'jsonpath={.status.state}' ++ exit_status=0 ++ set -e ++ '[' 0 '!=' 0 ']' ++ break ++ cat /tmp/tmp.6jZJQ3GkdU ++ cat /tmp/tmp.g0zgo8UAKA ++ rm /tmp/tmp.6jZJQ3GkdU /tmp/tmp.g0zgo8UAKA ++ return 0 + [[ initializing == \r\e\a\d\y ]] + echo -n . .+ sleep 5 + [[ 19 -ge 300 ]] + let i+=1 ++ kubectl_bin get pxc some-name -o 'jsonpath={.status.state}' +++ mktemp ++ local LAST_OUT=/tmp/tmp.qgUQq0wA1C +++ mktemp ++ local LAST_ERR=/tmp/tmp.VsPpb5urXK ++ local exit_status=0 +++ seq 0 2 ++ for i in '$(seq 0 2)' ++ set +e ++ kubectl get pxc some-name -o 'jsonpath={.status.state}' ++ exit_status=0 ++ set -e ++ '[' 0 '!=' 0 ']' ++ break ++ cat /tmp/tmp.qgUQq0wA1C ++ cat /tmp/tmp.VsPpb5urXK ++ rm /tmp/tmp.qgUQq0wA1C /tmp/tmp.VsPpb5urXK ++ return 0 + [[ initializing == \r\e\a\d\y ]] + echo -n . .+ sleep 5 + [[ 20 -ge 300 ]] + let i+=1 ++ kubectl_bin get pxc some-name -o 'jsonpath={.status.state}' +++ mktemp ++ local LAST_OUT=/tmp/tmp.NHvj0J6VJd +++ mktemp ++ local LAST_ERR=/tmp/tmp.tDhq0JCHao ++ local exit_status=0 +++ seq 0 2 ++ for i in '$(seq 0 2)' ++ set +e ++ kubectl get pxc some-name -o 'jsonpath={.status.state}' ++ exit_status=0 ++ set -e ++ '[' 0 '!=' 0 ']' ++ break ++ cat /tmp/tmp.NHvj0J6VJd ++ cat /tmp/tmp.tDhq0JCHao ++ rm /tmp/tmp.NHvj0J6VJd /tmp/tmp.tDhq0JCHao ++ return 0 + [[ initializing == \r\e\a\d\y ]] + echo -n . .+ sleep 5 + [[ 21 -ge 300 ]] + let i+=1 ++ kubectl_bin get pxc some-name -o 'jsonpath={.status.state}' +++ mktemp ++ local LAST_OUT=/tmp/tmp.gIMmBTwzq3 +++ mktemp ++ local LAST_ERR=/tmp/tmp.brQQJ9org4 ++ local exit_status=0 +++ seq 0 2 ++ for i in '$(seq 0 2)' ++ set +e ++ kubectl get pxc some-name -o 'jsonpath={.status.state}' ++ exit_status=0 ++ set -e ++ '[' 0 '!=' 0 ']' ++ break ++ cat /tmp/tmp.gIMmBTwzq3 ++ cat /tmp/tmp.brQQJ9org4 ++ rm /tmp/tmp.gIMmBTwzq3 /tmp/tmp.brQQJ9org4 ++ return 0 + [[ initializing == \r\e\a\d\y ]] + echo -n . .+ sleep 5 + [[ 22 -ge 300 ]] + let i+=1 ++ kubectl_bin get pxc some-name -o 'jsonpath={.status.state}' +++ mktemp ++ local LAST_OUT=/tmp/tmp.84Hctat0ey +++ mktemp ++ local LAST_ERR=/tmp/tmp.Fwg9SrxGWz ++ local exit_status=0 +++ seq 0 2 ++ for i in '$(seq 0 2)' ++ set +e ++ kubectl get pxc some-name -o 'jsonpath={.status.state}' ++ exit_status=0 ++ set -e ++ '[' 0 '!=' 0 ']' ++ break ++ cat /tmp/tmp.84Hctat0ey ++ cat /tmp/tmp.Fwg9SrxGWz ++ rm /tmp/tmp.84Hctat0ey /tmp/tmp.Fwg9SrxGWz ++ return 0 + [[ ready == \r\e\a\d\y ]] ++ kubectl_bin get pxc some-name -o 'jsonpath={.status.pxc.ready}' +++ mktemp ++ local LAST_OUT=/tmp/tmp.uyiJMpTdXE +++ mktemp ++ local LAST_ERR=/tmp/tmp.H0iyZOkz2u ++ local exit_status=0 +++ seq 0 2 ++ for i in '$(seq 0 2)' ++ set +e ++ kubectl get pxc some-name -o 'jsonpath={.status.pxc.ready}' ++ exit_status=0 ++ set -e ++ '[' 0 '!=' 0 ']' ++ break ++ cat /tmp/tmp.uyiJMpTdXE ++ cat /tmp/tmp.H0iyZOkz2u ++ rm /tmp/tmp.uyiJMpTdXE /tmp/tmp.H0iyZOkz2u ++ return 0 + [[ 3 == \3 ]] +++ get_proxy_engine some-name +++ local cluster_name=some-name ++++ get_proxy some-name ++++ local target_cluster=some-name +++++ kubectl_bin get pxc some-name -o 'jsonpath={.spec.haproxy.enabled}' ++++++ mktemp +++++ local LAST_OUT=/tmp/tmp.jmoy685aii ++++++ mktemp +++++ local LAST_ERR=/tmp/tmp.J68iLgDQ79 +++++ local exit_status=0 ++++++ seq 0 2 +++++ for i in '$(seq 0 2)' +++++ set +e +++++ kubectl get pxc some-name -o 'jsonpath={.spec.haproxy.enabled}' +++++ exit_status=0 +++++ set -e +++++ '[' 0 '!=' 0 ']' +++++ break +++++ cat /tmp/tmp.jmoy685aii +++++ cat /tmp/tmp.J68iLgDQ79 +++++ rm /tmp/tmp.jmoy685aii /tmp/tmp.J68iLgDQ79 +++++ return 0 ++++ [[ '' == \t\r\u\e ]] +++++ kubectl_bin get pxc some-name -o 'jsonpath={.spec.proxysql.enabled}' ++++++ mktemp +++++ local LAST_OUT=/tmp/tmp.09X3jih5Cp ++++++ mktemp +++++ local LAST_ERR=/tmp/tmp.ThT7CX2cd9 +++++ local exit_status=0 ++++++ seq 0 2 +++++ for i in '$(seq 0 2)' +++++ set +e +++++ kubectl get pxc some-name -o 'jsonpath={.spec.proxysql.enabled}' +++++ exit_status=0 +++++ set -e +++++ '[' 0 '!=' 0 ']' +++++ break +++++ cat /tmp/tmp.09X3jih5Cp +++++ cat /tmp/tmp.ThT7CX2cd9 +++++ rm /tmp/tmp.09X3jih5Cp /tmp/tmp.ThT7CX2cd9 +++++ return 0 ++++ [[ true == \t\r\u\e ]] ++++ echo some-name-proxysql ++++ return +++ local cluster_proxy=some-name-proxysql +++ echo proxysql ++ kubectl_bin get pxc some-name -o 'jsonpath={.status.proxysql.ready}' +++ mktemp ++ local LAST_OUT=/tmp/tmp.YbqPfi41iP +++ mktemp ++ local LAST_ERR=/tmp/tmp.XlBqBI9uda ++ local exit_status=0 +++ seq 0 2 ++ for i in '$(seq 0 2)' ++ set +e ++ kubectl get pxc some-name -o 'jsonpath={.status.proxysql.ready}' ++ exit_status=0 ++ set -e ++ '[' 0 '!=' 0 ']' ++ break ++ cat /tmp/tmp.YbqPfi41iP ++ cat /tmp/tmp.XlBqBI9uda ++ rm /tmp/tmp.YbqPfi41iP /tmp/tmp.XlBqBI9uda ++ return 0 + [[ 2 == \2 ]] + echo + desc 'test new operator' + set +o xtrace ----------------------------------------------------------------------------------- test new operator ----------------------------------------------------------------------------------- + newpass=test-password2 ++ echo -n test-password2 ++ base64 + newpassencrypted=dGVzdC1wYXNzd29yZDI= + patch_secret my-cluster-secrets-2 operator dGVzdC1wYXNzd29yZDI= + local secret=my-cluster-secrets-2 + local key=operator + local value=dGVzdC1wYXNzd29yZDI= + kubectl_bin patch secret my-cluster-secrets-2 '-p={"data":{"operator": "dGVzdC1wYXNzd29yZDI="}}' ++ mktemp + local LAST_OUT=/tmp/tmp.Z1H9qObwVY ++ mktemp + local LAST_ERR=/tmp/tmp.mP8dY1w1b6 + local exit_status=0 ++ seq 0 2 + for i in '$(seq 0 2)' + set +e + kubectl patch secret my-cluster-secrets-2 '-p={"data":{"operator": "dGVzdC1wYXNzd29yZDI="}}' + exit_status=0 + set -e + '[' 0 '!=' 0 ']' + break + cat /tmp/tmp.Z1H9qObwVY secret/my-cluster-secrets-2 patched + cat /tmp/tmp.mP8dY1w1b6 + rm /tmp/tmp.Z1H9qObwVY /tmp/tmp.mP8dY1w1b6 + return 0 + sleep 15 + wait_cluster_consistency some-name 3 2 + local cluster_name=some-name + local cluster_size=3 + local proxy_size=2 + '[' -z 2 ']' + desc 'wait cluster consistency' + set +o xtrace ----------------------------------------------------------------------------------- wait cluster consistency ----------------------------------------------------------------------------------- + local i=0 + local max=300 + sleep 7 + echo -n 'waiting for pxc/some-name to be ready' waiting for pxc/some-name to be ready++ kubectl_bin get pxc some-name -o 'jsonpath={.status.state}' +++ mktemp ++ local LAST_OUT=/tmp/tmp.vKkEI6AlLd +++ mktemp ++ local LAST_ERR=/tmp/tmp.S391soHUA7 ++ local exit_status=0 +++ seq 0 2 ++ for i in '$(seq 0 2)' ++ set +e ++ kubectl get pxc some-name -o 'jsonpath={.status.state}' ++ exit_status=0 ++ set -e ++ '[' 0 '!=' 0 ']' ++ break ++ cat /tmp/tmp.vKkEI6AlLd ++ cat /tmp/tmp.S391soHUA7 ++ rm /tmp/tmp.vKkEI6AlLd /tmp/tmp.S391soHUA7 ++ return 0 + [[ initializing == \r\e\a\d\y ]] + echo -n . .+ sleep 5 + [[ 0 -ge 300 ]] + let i+=1 ++ kubectl_bin get pxc some-name -o 'jsonpath={.status.state}' +++ mktemp ++ local LAST_OUT=/tmp/tmp.xK2e1ZIkYW +++ mktemp ++ local LAST_ERR=/tmp/tmp.mAftezEaiP ++ local exit_status=0 +++ seq 0 2 ++ for i in '$(seq 0 2)' ++ set +e ++ kubectl get pxc some-name -o 'jsonpath={.status.state}' ++ exit_status=0 ++ set -e ++ '[' 0 '!=' 0 ']' ++ break ++ cat /tmp/tmp.xK2e1ZIkYW ++ cat /tmp/tmp.mAftezEaiP ++ rm /tmp/tmp.xK2e1ZIkYW /tmp/tmp.mAftezEaiP ++ return 0 + [[ ready == \r\e\a\d\y ]] ++ kubectl_bin get pxc some-name -o 'jsonpath={.status.pxc.ready}' +++ mktemp ++ local LAST_OUT=/tmp/tmp.p3YZyy7bAB +++ mktemp ++ local LAST_ERR=/tmp/tmp.RC87YR50lN ++ local exit_status=0 +++ seq 0 2 ++ for i in '$(seq 0 2)' ++ set +e ++ kubectl get pxc some-name -o 'jsonpath={.status.pxc.ready}' ++ exit_status=0 ++ set -e ++ '[' 0 '!=' 0 ']' ++ break ++ cat /tmp/tmp.p3YZyy7bAB ++ cat /tmp/tmp.RC87YR50lN ++ rm /tmp/tmp.p3YZyy7bAB /tmp/tmp.RC87YR50lN ++ return 0 + [[ 3 == \3 ]] +++ get_proxy_engine some-name +++ local cluster_name=some-name ++++ get_proxy some-name ++++ local target_cluster=some-name +++++ kubectl_bin get pxc some-name -o 'jsonpath={.spec.haproxy.enabled}' ++++++ mktemp +++++ local LAST_OUT=/tmp/tmp.bhURinvZHq ++++++ mktemp +++++ local LAST_ERR=/tmp/tmp.JQZQljU0HM +++++ local exit_status=0 ++++++ seq 0 2 +++++ for i in '$(seq 0 2)' +++++ set +e +++++ kubectl get pxc some-name -o 'jsonpath={.spec.haproxy.enabled}' +++++ exit_status=0 +++++ set -e +++++ '[' 0 '!=' 0 ']' +++++ break +++++ cat /tmp/tmp.bhURinvZHq +++++ cat /tmp/tmp.JQZQljU0HM +++++ rm /tmp/tmp.bhURinvZHq /tmp/tmp.JQZQljU0HM +++++ return 0 ++++ [[ '' == \t\r\u\e ]] +++++ kubectl_bin get pxc some-name -o 'jsonpath={.spec.proxysql.enabled}' ++++++ mktemp +++++ local LAST_OUT=/tmp/tmp.jQmOo0AquV ++++++ mktemp +++++ local LAST_ERR=/tmp/tmp.vY9a3X0ACL +++++ local exit_status=0 ++++++ seq 0 2 +++++ for i in '$(seq 0 2)' +++++ set +e +++++ kubectl get pxc some-name -o 'jsonpath={.spec.proxysql.enabled}' +++++ exit_status=0 +++++ set -e +++++ '[' 0 '!=' 0 ']' +++++ break +++++ cat /tmp/tmp.jQmOo0AquV +++++ cat /tmp/tmp.vY9a3X0ACL +++++ rm /tmp/tmp.jQmOo0AquV /tmp/tmp.vY9a3X0ACL +++++ return 0 ++++ [[ true == \t\r\u\e ]] ++++ echo some-name-proxysql ++++ return +++ local cluster_proxy=some-name-proxysql +++ echo proxysql ++ kubectl_bin get pxc some-name -o 'jsonpath={.status.proxysql.ready}' +++ mktemp ++ local LAST_OUT=/tmp/tmp.FwPnFyV8a7 +++ mktemp ++ local LAST_ERR=/tmp/tmp.SZ3UILlDp4 ++ local exit_status=0 +++ seq 0 2 ++ for i in '$(seq 0 2)' ++ set +e ++ kubectl get pxc some-name -o 'jsonpath={.status.proxysql.ready}' ++ exit_status=0 ++ set -e ++ '[' 0 '!=' 0 ']' ++ break ++ cat /tmp/tmp.FwPnFyV8a7 ++ cat /tmp/tmp.SZ3UILlDp4 ++ rm /tmp/tmp.FwPnFyV8a7 /tmp/tmp.SZ3UILlDp4 ++ return 0 + [[ 2 == \2 ]] + echo + sleep 20 + compare_mysql_cmd select-4 'SHOW TABLES;' '-h some-name-proxysql -uoperator -p'\''test-password2'\''' + local command_id=select-4 + local 'command=SHOW TABLES;' + local 'uri=-h some-name-proxysql -uoperator -p'\''test-password2'\''' + local postfix= + local expected_result=/mnt/jenkins/workspace/cloud-pxc-operator_PR-2325/e2e-tests/users/compare/select-4.sql + [[ perconalab/percona-xtradb-cluster-operator:main-pxc5.7 =~ 8\.4 ]] + [[ perconalab/percona-xtradb-cluster-operator:main-pxc5.7 =~ 8\.0 ]] + [[ perconalab/percona-xtradb-cluster-operator:main-pxc5.7 =~ 5\.7 ]] + [[ -f /mnt/jenkins/workspace/cloud-pxc-operator_PR-2325/e2e-tests/users/compare/select-4-57.sql ]] + run_mysql 'SHOW TABLES;' '-h some-name-proxysql -uoperator -p'\''test-password2'\''' + local 'command=SHOW TABLES;' + local 'uri=-h some-name-proxysql -uoperator -p'\''test-password2'\''' ++ get_client_pod ++ kubectl_bin get pods --selector=name=pxc-client -o 'jsonpath={.items[].metadata.name}' +++ mktemp ++ local LAST_OUT=/tmp/tmp.SxCgbmKSQo +++ mktemp ++ local LAST_ERR=/tmp/tmp.SHHJ3RSjYc ++ local exit_status=0 +++ seq 0 2 ++ for i in '$(seq 0 2)' ++ set +e ++ kubectl get pods --selector=name=pxc-client -o 'jsonpath={.items[].metadata.name}' ++ exit_status=0 ++ set -e ++ '[' 0 '!=' 0 ']' ++ break ++ cat /tmp/tmp.SxCgbmKSQo ++ cat /tmp/tmp.SHHJ3RSjYc ++ rm /tmp/tmp.SxCgbmKSQo /tmp/tmp.SHHJ3RSjYc ++ return 0 + client_pod=pxc-client-545786ccbb-gfmwf + wait_pod pxc-client-545786ccbb-gfmwf + local pod=pxc-client-545786ccbb-gfmwf + local max_retry=480 + local ns= ++ echo pxc-client-545786ccbb-gfmwf ++ /usr/bin/sed -E 's/.*-(pxc|proxysql)-[0-9]/\1/' ++ grep -E '^(pxc|proxysql)$' + local container= + set +o xtrace pod/pxc-client-545786ccbb-gfmwf condition met waiting for pod/pxc-client-545786ccbb-gfmwf to become ReadyDefaulted container "pxc-client" out of: pxc-client, backup .Ok + set +o xtrace + [[ ! -s /tmp/tmp.dqV03Eaoyh/select-4.sql ]] ++ grep 'Unknown MySQL server host' /tmp/tmp.dqV03Eaoyh/select-4.sql + [[ -n '' ]] + [[ 0 -eq 0 ]] + diff -u /mnt/jenkins/workspace/cloud-pxc-operator_PR-2325/e2e-tests/users/compare/select-4.sql /tmp/tmp.dqV03Eaoyh/select-4.sql ++ getSecretData my-cluster-secrets-2 root ++ local secretName=my-cluster-secrets-2 ++ local dataKey=root ++ kubectl_bin get secrets/my-cluster-secrets-2 '--template={{.data.root}}' ++ base64 --decode +++ mktemp ++ local LAST_OUT=/tmp/tmp.rnvTy6EBYn +++ mktemp ++ local LAST_ERR=/tmp/tmp.7BlRUxgdX7 ++ local exit_status=0 +++ seq 0 2 ++ for i in '$(seq 0 2)' ++ set +e ++ kubectl get secrets/my-cluster-secrets-2 '--template={{.data.root}}' ++ exit_status=0 ++ set -e ++ '[' 0 '!=' 0 ']' ++ break ++ cat /tmp/tmp.rnvTy6EBYn ++ cat /tmp/tmp.7BlRUxgdX7 ++ rm /tmp/tmp.rnvTy6EBYn /tmp/tmp.7BlRUxgdX7 ++ return 0 + newpass='}v2Omq~7db.?l9B1' + desc 'test new users sync' + set +o xtrace ----------------------------------------------------------------------------------- test new users sync ----------------------------------------------------------------------------------- + run_mysql 'CREATE USER '\''testsync'\''@'\''%'\'' IDENTIFIED BY '\''}v2Omq~7db.?l9B1'\'';' '-h some-name-pxc -uroot -p'\''}v2Omq~7db.?l9B1'\''' + local 'command=CREATE USER '\''testsync'\''@'\''%'\'' IDENTIFIED BY '\''}v2Omq~7db.?l9B1'\'';' + local 'uri=-h some-name-pxc -uroot -p'\''}v2Omq~7db.?l9B1'\''' ++ get_client_pod ++ kubectl_bin get pods --selector=name=pxc-client -o 'jsonpath={.items[].metadata.name}' +++ mktemp ++ local LAST_OUT=/tmp/tmp.ZtxY679HpS +++ mktemp ++ local LAST_ERR=/tmp/tmp.c2cegDzdvV ++ local exit_status=0 +++ seq 0 2 ++ for i in '$(seq 0 2)' ++ set +e ++ kubectl get pods --selector=name=pxc-client -o 'jsonpath={.items[].metadata.name}' ++ exit_status=0 ++ set -e ++ '[' 0 '!=' 0 ']' ++ break ++ cat /tmp/tmp.ZtxY679HpS ++ cat /tmp/tmp.c2cegDzdvV ++ rm /tmp/tmp.ZtxY679HpS /tmp/tmp.c2cegDzdvV ++ return 0 + client_pod=pxc-client-545786ccbb-gfmwf + wait_pod pxc-client-545786ccbb-gfmwf + local pod=pxc-client-545786ccbb-gfmwf + local max_retry=480 + local ns= ++ echo pxc-client-545786ccbb-gfmwf ++ /usr/bin/sed -E 's/.*-(pxc|proxysql)-[0-9]/\1/' ++ grep -E '^(pxc|proxysql)$' + local container= + set +o xtrace pod/pxc-client-545786ccbb-gfmwf condition met waiting for pod/pxc-client-545786ccbb-gfmwf to become ReadyDefaulted container "pxc-client" out of: pxc-client, backup .Ok + set +o xtrace + sleep 80 + compare_mysql_cmd select-4 'SHOW TABLES;' '-h some-name-proxysql -utestsync -p'\''}v2Omq~7db.?l9B1'\''' + local command_id=select-4 + local 'command=SHOW TABLES;' + local 'uri=-h some-name-proxysql -utestsync -p'\''}v2Omq~7db.?l9B1'\''' + local postfix= + local expected_result=/mnt/jenkins/workspace/cloud-pxc-operator_PR-2325/e2e-tests/users/compare/select-4.sql + [[ perconalab/percona-xtradb-cluster-operator:main-pxc5.7 =~ 8\.4 ]] + [[ perconalab/percona-xtradb-cluster-operator:main-pxc5.7 =~ 8\.0 ]] + [[ perconalab/percona-xtradb-cluster-operator:main-pxc5.7 =~ 5\.7 ]] + [[ -f /mnt/jenkins/workspace/cloud-pxc-operator_PR-2325/e2e-tests/users/compare/select-4-57.sql ]] + run_mysql 'SHOW TABLES;' '-h some-name-proxysql -utestsync -p'\''}v2Omq~7db.?l9B1'\''' + local 'command=SHOW TABLES;' + local 'uri=-h some-name-proxysql -utestsync -p'\''}v2Omq~7db.?l9B1'\''' ++ get_client_pod ++ kubectl_bin get pods --selector=name=pxc-client -o 'jsonpath={.items[].metadata.name}' +++ mktemp ++ local LAST_OUT=/tmp/tmp.tq2HZTmTWI +++ mktemp ++ local LAST_ERR=/tmp/tmp.TcigHHm7d4 ++ local exit_status=0 +++ seq 0 2 ++ for i in '$(seq 0 2)' ++ set +e ++ kubectl get pods --selector=name=pxc-client -o 'jsonpath={.items[].metadata.name}' ++ exit_status=0 ++ set -e ++ '[' 0 '!=' 0 ']' ++ break ++ cat /tmp/tmp.tq2HZTmTWI ++ cat /tmp/tmp.TcigHHm7d4 ++ rm /tmp/tmp.tq2HZTmTWI /tmp/tmp.TcigHHm7d4 ++ return 0 + client_pod=pxc-client-545786ccbb-gfmwf + wait_pod pxc-client-545786ccbb-gfmwf + local pod=pxc-client-545786ccbb-gfmwf + local max_retry=480 + local ns= ++ echo pxc-client-545786ccbb-gfmwf ++ /usr/bin/sed -E 's/.*-(pxc|proxysql)-[0-9]/\1/' ++ grep -E '^(pxc|proxysql)$' + local container= + set +o xtrace pod/pxc-client-545786ccbb-gfmwf condition met waiting for pod/pxc-client-545786ccbb-gfmwf to become ReadyDefaulted container "pxc-client" out of: pxc-client, backup .Ok + set +o xtrace + [[ ! -s /tmp/tmp.dqV03Eaoyh/select-4.sql ]] ++ grep 'Unknown MySQL server host' /tmp/tmp.dqV03Eaoyh/select-4.sql + [[ -n '' ]] + [[ 0 -eq 0 ]] + diff -u /mnt/jenkins/workspace/cloud-pxc-operator_PR-2325/e2e-tests/users/compare/select-4.sql /tmp/tmp.dqV03Eaoyh/select-4.sql ++ getSecretData internal-some-name operator ++ local secretName=internal-some-name ++ local dataKey=operator ++ kubectl_bin get secrets/internal-some-name '--template={{.data.operator}}' ++ base64 --decode +++ mktemp ++ local LAST_OUT=/tmp/tmp.NOPhVHgCZ4 +++ mktemp ++ local LAST_ERR=/tmp/tmp.vvWrazMlcn ++ local exit_status=0 +++ seq 0 2 ++ for i in '$(seq 0 2)' ++ set +e ++ kubectl get secrets/internal-some-name '--template={{.data.operator}}' ++ exit_status=0 ++ set -e ++ '[' 0 '!=' 0 ']' ++ break ++ cat /tmp/tmp.NOPhVHgCZ4 ++ cat /tmp/tmp.vvWrazMlcn ++ rm /tmp/tmp.NOPhVHgCZ4 /tmp/tmp.vvWrazMlcn ++ return 0 + pass=test-password2 + desc 'check secret without operator' + set +o xtrace ----------------------------------------------------------------------------------- check secret without operator ----------------------------------------------------------------------------------- + kubectl_bin apply -f /mnt/jenkins/workspace/cloud-pxc-operator_PR-2325/e2e-tests/users/conf/secrets.yml ++ mktemp + local LAST_OUT=/tmp/tmp.QWP3kB7Wom ++ mktemp + local LAST_ERR=/tmp/tmp.gwFpEXsOw9 + local exit_status=0 ++ seq 0 2 + for i in '$(seq 0 2)' + set +e + kubectl apply -f /mnt/jenkins/workspace/cloud-pxc-operator_PR-2325/e2e-tests/users/conf/secrets.yml + exit_status=0 + set -e + '[' 0 '!=' 0 ']' + break + cat /tmp/tmp.QWP3kB7Wom secret/my-cluster-secrets-2 configured + cat /tmp/tmp.gwFpEXsOw9 Warning: resource secrets/my-cluster-secrets-2 is missing the kubectl.kubernetes.io/last-applied-configuration annotation which is required by kubectl apply. kubectl apply should only be used on resources created declaratively by either kubectl create --save-config or kubectl apply. The missing annotation will be patched automatically. + rm /tmp/tmp.QWP3kB7Wom /tmp/tmp.gwFpEXsOw9 + return 0 + sleep 60 + compare_mysql_cmd select-4 'SHOW TABLES;' '-h some-name-proxysql -uoperator -p'\''test-password2'\''' + local command_id=select-4 + local 'command=SHOW TABLES;' + local 'uri=-h some-name-proxysql -uoperator -p'\''test-password2'\''' + local postfix= + local expected_result=/mnt/jenkins/workspace/cloud-pxc-operator_PR-2325/e2e-tests/users/compare/select-4.sql + [[ perconalab/percona-xtradb-cluster-operator:main-pxc5.7 =~ 8\.4 ]] + [[ perconalab/percona-xtradb-cluster-operator:main-pxc5.7 =~ 8\.0 ]] + [[ perconalab/percona-xtradb-cluster-operator:main-pxc5.7 =~ 5\.7 ]] + [[ -f /mnt/jenkins/workspace/cloud-pxc-operator_PR-2325/e2e-tests/users/compare/select-4-57.sql ]] + run_mysql 'SHOW TABLES;' '-h some-name-proxysql -uoperator -p'\''test-password2'\''' + local 'command=SHOW TABLES;' + local 'uri=-h some-name-proxysql -uoperator -p'\''test-password2'\''' ++ get_client_pod ++ kubectl_bin get pods --selector=name=pxc-client -o 'jsonpath={.items[].metadata.name}' +++ mktemp ++ local LAST_OUT=/tmp/tmp.oaKwo6ZnS1 +++ mktemp ++ local LAST_ERR=/tmp/tmp.VOSsLM48NY ++ local exit_status=0 +++ seq 0 2 ++ for i in '$(seq 0 2)' ++ set +e ++ kubectl get pods --selector=name=pxc-client -o 'jsonpath={.items[].metadata.name}' ++ exit_status=0 ++ set -e ++ '[' 0 '!=' 0 ']' ++ break ++ cat /tmp/tmp.oaKwo6ZnS1 ++ cat /tmp/tmp.VOSsLM48NY ++ rm /tmp/tmp.oaKwo6ZnS1 /tmp/tmp.VOSsLM48NY ++ return 0 + client_pod=pxc-client-545786ccbb-gfmwf + wait_pod pxc-client-545786ccbb-gfmwf + local pod=pxc-client-545786ccbb-gfmwf + local max_retry=480 + local ns= ++ echo pxc-client-545786ccbb-gfmwf ++ /usr/bin/sed -E 's/.*-(pxc|proxysql)-[0-9]/\1/' ++ grep -E '^(pxc|proxysql)$' + local container= + set +o xtrace pod/pxc-client-545786ccbb-gfmwf condition met waiting for pod/pxc-client-545786ccbb-gfmwf to become ReadyDefaulted container "pxc-client" out of: pxc-client, backup .Ok + set +o xtrace + [[ ! -s /tmp/tmp.dqV03Eaoyh/select-4.sql ]] ++ grep 'Unknown MySQL server host' /tmp/tmp.dqV03Eaoyh/select-4.sql + [[ -n '' ]] + [[ 0 -eq 0 ]] + diff -u /mnt/jenkins/workspace/cloud-pxc-operator_PR-2325/e2e-tests/users/compare/select-4.sql /tmp/tmp.dqV03Eaoyh/select-4.sql + newpass=test-password2 ++ echo -n test-password2 ++ base64 + newpassencrypted=dGVzdC1wYXNzd29yZDI= + apply_config /mnt/jenkins/workspace/cloud-pxc-operator_PR-2325/e2e-tests/users/conf/some-name.yml + local config_file=/mnt/jenkins/workspace/cloud-pxc-operator_PR-2325/e2e-tests/users/conf/some-name.yml + local pvc_name= + '[' -z '' ']' + cat_config /mnt/jenkins/workspace/cloud-pxc-operator_PR-2325/e2e-tests/users/conf/some-name.yml '' + local input_file=/mnt/jenkins/workspace/cloud-pxc-operator_PR-2325/e2e-tests/users/conf/some-name.yml + kubectl_bin apply -f - + local pvc_name= + cat /mnt/jenkins/workspace/cloud-pxc-operator_PR-2325/e2e-tests/users/conf/some-name.yml ++ mktemp + /usr/bin/sed -e 's#apiVersion: pxc.percona.com/v.*$#apiVersion: pxc.percona.com/v1#' + /usr/bin/sed -e 's#image:.*-pxc\([0-9]*.[0-9]*\)\{0,1\}$#image: perconalab/percona-xtradb-cluster-operator:main-pxc5.7#' + local LAST_OUT=/tmp/tmp.xxQpAHaFQy + /usr/bin/sed -e 's#image:.*\/percona-xtradb-cluster:.*$#image: perconalab/percona-xtradb-cluster-operator:main-pxc5.7#' ++ mktemp + /usr/bin/sed -e 's#image:.*-pmm$#image: perconalab/pmm-client:dev-latest#' + /usr/bin/sed -e 's#image:.*-backup$#image: perconalab/percona-xtradb-cluster-operator:main-pxc5.7-backup#' + /usr/bin/sed -e 's#image:.*-proxysql$#image: perconalab/percona-xtradb-cluster-operator:main-proxysql#' + local LAST_ERR=/tmp/tmp.s7l2jivFE3 + local exit_status=0 + /usr/bin/sed -e 's#apply:.*#apply: Never#' + /usr/bin/sed -e 's#claimName:..*-backup-pvc$#claimName: #' + /usr/bin/sed -e 's#image:.*-init$#image: perconalab/percona-xtradb-cluster-operator:PR-2325-d401e0a9#' + /usr/bin/sed -e 's#image:.*-haproxy$#image: perconalab/percona-xtradb-cluster-operator:main-haproxy#' + /usr/bin/sed -e 's#image:.*-logcollector$#image: perconalab/fluentbit:main-logcollector#' + /usr/bin/sed -e s~minio-service.#namespace~minio-service.users-31794~ ++ seq 0 2 + for i in '$(seq 0 2)' + set +e + kubectl apply -f - + exit_status=0 + set -e + '[' 0 '!=' 0 ']' + break + cat /tmp/tmp.xxQpAHaFQy perconaxtradbcluster.pxc.percona.com/some-name configured + cat /tmp/tmp.s7l2jivFE3 + rm /tmp/tmp.xxQpAHaFQy /tmp/tmp.s7l2jivFE3 + return 0 + sleep 15 + wait_cluster_consistency some-name 3 3 + local cluster_name=some-name + local cluster_size=3 + local proxy_size=3 + '[' -z 3 ']' + desc 'wait cluster consistency' + set +o xtrace ----------------------------------------------------------------------------------- wait cluster consistency ----------------------------------------------------------------------------------- + local i=0 + local max=300 + sleep 7 + echo -n 'waiting for pxc/some-name to be ready' waiting for pxc/some-name to be ready++ kubectl_bin get pxc some-name -o 'jsonpath={.status.state}' +++ mktemp ++ local LAST_OUT=/tmp/tmp.0txuQjS13L +++ mktemp ++ local LAST_ERR=/tmp/tmp.WNo4qHASUg ++ local exit_status=0 +++ seq 0 2 ++ for i in '$(seq 0 2)' ++ set +e ++ kubectl get pxc some-name -o 'jsonpath={.status.state}' ++ exit_status=0 ++ set -e ++ '[' 0 '!=' 0 ']' ++ break ++ cat /tmp/tmp.0txuQjS13L ++ cat /tmp/tmp.WNo4qHASUg ++ rm /tmp/tmp.0txuQjS13L /tmp/tmp.WNo4qHASUg ++ return 0 + [[ initializing == \r\e\a\d\y ]] + echo -n . .+ sleep 5 + [[ 0 -ge 300 ]] + let i+=1 ++ kubectl_bin get pxc some-name -o 'jsonpath={.status.state}' +++ mktemp ++ local LAST_OUT=/tmp/tmp.f1gCEkFPsF +++ mktemp ++ local LAST_ERR=/tmp/tmp.vlFDtt7ltw ++ local exit_status=0 +++ seq 0 2 ++ for i in '$(seq 0 2)' ++ set +e ++ kubectl get pxc some-name -o 'jsonpath={.status.state}' ++ exit_status=0 ++ set -e ++ '[' 0 '!=' 0 ']' ++ break ++ cat /tmp/tmp.f1gCEkFPsF ++ cat /tmp/tmp.vlFDtt7ltw ++ rm /tmp/tmp.f1gCEkFPsF /tmp/tmp.vlFDtt7ltw ++ return 0 + [[ initializing == \r\e\a\d\y ]] + echo -n . .+ sleep 5 + [[ 1 -ge 300 ]] + let i+=1 ++ kubectl_bin get pxc some-name -o 'jsonpath={.status.state}' +++ mktemp ++ local LAST_OUT=/tmp/tmp.L5jPFfHWdp +++ mktemp ++ local LAST_ERR=/tmp/tmp.5emQbKQRBU ++ local exit_status=0 +++ seq 0 2 ++ for i in '$(seq 0 2)' ++ set +e ++ kubectl get pxc some-name -o 'jsonpath={.status.state}' ++ exit_status=0 ++ set -e ++ '[' 0 '!=' 0 ']' ++ break ++ cat /tmp/tmp.L5jPFfHWdp ++ cat /tmp/tmp.5emQbKQRBU ++ rm /tmp/tmp.L5jPFfHWdp /tmp/tmp.5emQbKQRBU ++ return 0 + [[ initializing == \r\e\a\d\y ]] + echo -n . .+ sleep 5 + [[ 2 -ge 300 ]] + let i+=1 ++ kubectl_bin get pxc some-name -o 'jsonpath={.status.state}' +++ mktemp ++ local LAST_OUT=/tmp/tmp.Cllyw0jQXp +++ mktemp ++ local LAST_ERR=/tmp/tmp.sqBYhcrS2a ++ local exit_status=0 +++ seq 0 2 ++ for i in '$(seq 0 2)' ++ set +e ++ kubectl get pxc some-name -o 'jsonpath={.status.state}' ++ exit_status=0 ++ set -e ++ '[' 0 '!=' 0 ']' ++ break ++ cat /tmp/tmp.Cllyw0jQXp ++ cat /tmp/tmp.sqBYhcrS2a ++ rm /tmp/tmp.Cllyw0jQXp /tmp/tmp.sqBYhcrS2a ++ return 0 + [[ initializing == \r\e\a\d\y ]] + echo -n . .+ sleep 5 + [[ 3 -ge 300 ]] + let i+=1 ++ kubectl_bin get pxc some-name -o 'jsonpath={.status.state}' +++ mktemp ++ local LAST_OUT=/tmp/tmp.hc39FYRTSV +++ mktemp ++ local LAST_ERR=/tmp/tmp.VjoyEEVoJ5 ++ local exit_status=0 +++ seq 0 2 ++ for i in '$(seq 0 2)' ++ set +e ++ kubectl get pxc some-name -o 'jsonpath={.status.state}' ++ exit_status=0 ++ set -e ++ '[' 0 '!=' 0 ']' ++ break ++ cat /tmp/tmp.hc39FYRTSV ++ cat /tmp/tmp.VjoyEEVoJ5 ++ rm /tmp/tmp.hc39FYRTSV /tmp/tmp.VjoyEEVoJ5 ++ return 0 + [[ initializing == \r\e\a\d\y ]] + echo -n . .+ sleep 5 + [[ 4 -ge 300 ]] + let i+=1 ++ kubectl_bin get pxc some-name -o 'jsonpath={.status.state}' +++ mktemp ++ local LAST_OUT=/tmp/tmp.7CHxAXxIAp +++ mktemp ++ local LAST_ERR=/tmp/tmp.Qm3RGG5WSq ++ local exit_status=0 +++ seq 0 2 ++ for i in '$(seq 0 2)' ++ set +e ++ kubectl get pxc some-name -o 'jsonpath={.status.state}' ++ exit_status=0 ++ set -e ++ '[' 0 '!=' 0 ']' ++ break ++ cat /tmp/tmp.7CHxAXxIAp ++ cat /tmp/tmp.Qm3RGG5WSq ++ rm /tmp/tmp.7CHxAXxIAp /tmp/tmp.Qm3RGG5WSq ++ return 0 + [[ initializing == \r\e\a\d\y ]] + echo -n . .+ sleep 5 + [[ 5 -ge 300 ]] + let i+=1 ++ kubectl_bin get pxc some-name -o 'jsonpath={.status.state}' +++ mktemp ++ local LAST_OUT=/tmp/tmp.XEXOsqQaPw +++ mktemp ++ local LAST_ERR=/tmp/tmp.pnYSzKj7Tf ++ local exit_status=0 +++ seq 0 2 ++ for i in '$(seq 0 2)' ++ set +e ++ kubectl get pxc some-name -o 'jsonpath={.status.state}' ++ exit_status=0 ++ set -e ++ '[' 0 '!=' 0 ']' ++ break ++ cat /tmp/tmp.XEXOsqQaPw ++ cat /tmp/tmp.pnYSzKj7Tf ++ rm /tmp/tmp.XEXOsqQaPw /tmp/tmp.pnYSzKj7Tf ++ return 0 + [[ initializing == \r\e\a\d\y ]] + echo -n . .+ sleep 5 + [[ 6 -ge 300 ]] + let i+=1 ++ kubectl_bin get pxc some-name -o 'jsonpath={.status.state}' +++ mktemp ++ local LAST_OUT=/tmp/tmp.JkdTmUrgfc +++ mktemp ++ local LAST_ERR=/tmp/tmp.uNgdXo6gSH ++ local exit_status=0 +++ seq 0 2 ++ for i in '$(seq 0 2)' ++ set +e ++ kubectl get pxc some-name -o 'jsonpath={.status.state}' ++ exit_status=0 ++ set -e ++ '[' 0 '!=' 0 ']' ++ break ++ cat /tmp/tmp.JkdTmUrgfc ++ cat /tmp/tmp.uNgdXo6gSH ++ rm /tmp/tmp.JkdTmUrgfc /tmp/tmp.uNgdXo6gSH ++ return 0 + [[ initializing == \r\e\a\d\y ]] + echo -n . .+ sleep 5 + [[ 7 -ge 300 ]] + let i+=1 ++ kubectl_bin get pxc some-name -o 'jsonpath={.status.state}' +++ mktemp ++ local LAST_OUT=/tmp/tmp.wAvUniyiFF +++ mktemp ++ local LAST_ERR=/tmp/tmp.vXZSIjad8t ++ local exit_status=0 +++ seq 0 2 ++ for i in '$(seq 0 2)' ++ set +e ++ kubectl get pxc some-name -o 'jsonpath={.status.state}' ++ exit_status=0 ++ set -e ++ '[' 0 '!=' 0 ']' ++ break ++ cat /tmp/tmp.wAvUniyiFF ++ cat /tmp/tmp.vXZSIjad8t ++ rm /tmp/tmp.wAvUniyiFF /tmp/tmp.vXZSIjad8t ++ return 0 + [[ initializing == \r\e\a\d\y ]] + echo -n . .+ sleep 5 + [[ 8 -ge 300 ]] + let i+=1 ++ kubectl_bin get pxc some-name -o 'jsonpath={.status.state}' +++ mktemp ++ local LAST_OUT=/tmp/tmp.xKYLu96BI9 +++ mktemp ++ local LAST_ERR=/tmp/tmp.bx1D8MvwfG ++ local exit_status=0 +++ seq 0 2 ++ for i in '$(seq 0 2)' ++ set +e ++ kubectl get pxc some-name -o 'jsonpath={.status.state}' ++ exit_status=0 ++ set -e ++ '[' 0 '!=' 0 ']' ++ break ++ cat /tmp/tmp.xKYLu96BI9 ++ cat /tmp/tmp.bx1D8MvwfG ++ rm /tmp/tmp.xKYLu96BI9 /tmp/tmp.bx1D8MvwfG ++ return 0 + [[ initializing == \r\e\a\d\y ]] + echo -n . .+ sleep 5 + [[ 9 -ge 300 ]] + let i+=1 ++ kubectl_bin get pxc some-name -o 'jsonpath={.status.state}' +++ mktemp ++ local LAST_OUT=/tmp/tmp.96owbezC1U +++ mktemp ++ local LAST_ERR=/tmp/tmp.saRG4Y9weQ ++ local exit_status=0 +++ seq 0 2 ++ for i in '$(seq 0 2)' ++ set +e ++ kubectl get pxc some-name -o 'jsonpath={.status.state}' ++ exit_status=0 ++ set -e ++ '[' 0 '!=' 0 ']' ++ break ++ cat /tmp/tmp.96owbezC1U ++ cat /tmp/tmp.saRG4Y9weQ ++ rm /tmp/tmp.96owbezC1U /tmp/tmp.saRG4Y9weQ ++ return 0 + [[ initializing == \r\e\a\d\y ]] + echo -n . .+ sleep 5 + [[ 10 -ge 300 ]] + let i+=1 ++ kubectl_bin get pxc some-name -o 'jsonpath={.status.state}' +++ mktemp ++ local LAST_OUT=/tmp/tmp.MismQewOj6 +++ mktemp ++ local LAST_ERR=/tmp/tmp.N6ZxVUicNz ++ local exit_status=0 +++ seq 0 2 ++ for i in '$(seq 0 2)' ++ set +e ++ kubectl get pxc some-name -o 'jsonpath={.status.state}' ++ exit_status=0 ++ set -e ++ '[' 0 '!=' 0 ']' ++ break ++ cat /tmp/tmp.MismQewOj6 ++ cat /tmp/tmp.N6ZxVUicNz ++ rm /tmp/tmp.MismQewOj6 /tmp/tmp.N6ZxVUicNz ++ return 0 + [[ initializing == \r\e\a\d\y ]] + echo -n . .+ sleep 5 + [[ 11 -ge 300 ]] + let i+=1 ++ kubectl_bin get pxc some-name -o 'jsonpath={.status.state}' +++ mktemp ++ local LAST_OUT=/tmp/tmp.wUfEwB5BXk +++ mktemp ++ local LAST_ERR=/tmp/tmp.TmHprIwNKx ++ local exit_status=0 +++ seq 0 2 ++ for i in '$(seq 0 2)' ++ set +e ++ kubectl get pxc some-name -o 'jsonpath={.status.state}' ++ exit_status=0 ++ set -e ++ '[' 0 '!=' 0 ']' ++ break ++ cat /tmp/tmp.wUfEwB5BXk ++ cat /tmp/tmp.TmHprIwNKx ++ rm /tmp/tmp.wUfEwB5BXk /tmp/tmp.TmHprIwNKx ++ return 0 + [[ initializing == \r\e\a\d\y ]] + echo -n . .+ sleep 5 + [[ 12 -ge 300 ]] + let i+=1 ++ kubectl_bin get pxc some-name -o 'jsonpath={.status.state}' +++ mktemp ++ local LAST_OUT=/tmp/tmp.qKbz04Gy12 +++ mktemp ++ local LAST_ERR=/tmp/tmp.XniJuT90Pk ++ local exit_status=0 +++ seq 0 2 ++ for i in '$(seq 0 2)' ++ set +e ++ kubectl get pxc some-name -o 'jsonpath={.status.state}' ++ exit_status=0 ++ set -e ++ '[' 0 '!=' 0 ']' ++ break ++ cat /tmp/tmp.qKbz04Gy12 ++ cat /tmp/tmp.XniJuT90Pk ++ rm /tmp/tmp.qKbz04Gy12 /tmp/tmp.XniJuT90Pk ++ return 0 + [[ initializing == \r\e\a\d\y ]] + echo -n . .+ sleep 5 + [[ 13 -ge 300 ]] + let i+=1 ++ kubectl_bin get pxc some-name -o 'jsonpath={.status.state}' +++ mktemp ++ local LAST_OUT=/tmp/tmp.m0SLxtGXux +++ mktemp ++ local LAST_ERR=/tmp/tmp.4MbqogdPMs ++ local exit_status=0 +++ seq 0 2 ++ for i in '$(seq 0 2)' ++ set +e ++ kubectl get pxc some-name -o 'jsonpath={.status.state}' ++ exit_status=0 ++ set -e ++ '[' 0 '!=' 0 ']' ++ break ++ cat /tmp/tmp.m0SLxtGXux ++ cat /tmp/tmp.4MbqogdPMs ++ rm /tmp/tmp.m0SLxtGXux /tmp/tmp.4MbqogdPMs ++ return 0 + [[ initializing == \r\e\a\d\y ]] + echo -n . .+ sleep 5 + [[ 14 -ge 300 ]] + let i+=1 ++ kubectl_bin get pxc some-name -o 'jsonpath={.status.state}' +++ mktemp ++ local LAST_OUT=/tmp/tmp.2dcbfeYklI +++ mktemp ++ local LAST_ERR=/tmp/tmp.MDaMAOajrt ++ local exit_status=0 +++ seq 0 2 ++ for i in '$(seq 0 2)' ++ set +e ++ kubectl get pxc some-name -o 'jsonpath={.status.state}' ++ exit_status=0 ++ set -e ++ '[' 0 '!=' 0 ']' ++ break ++ cat /tmp/tmp.2dcbfeYklI ++ cat /tmp/tmp.MDaMAOajrt ++ rm /tmp/tmp.2dcbfeYklI /tmp/tmp.MDaMAOajrt ++ return 0 + [[ initializing == \r\e\a\d\y ]] + echo -n . .+ sleep 5 + [[ 15 -ge 300 ]] + let i+=1 ++ kubectl_bin get pxc some-name -o 'jsonpath={.status.state}' +++ mktemp ++ local LAST_OUT=/tmp/tmp.5ju1ePhIuV +++ mktemp ++ local LAST_ERR=/tmp/tmp.XHRUSBPcnh ++ local exit_status=0 +++ seq 0 2 ++ for i in '$(seq 0 2)' ++ set +e ++ kubectl get pxc some-name -o 'jsonpath={.status.state}' ++ exit_status=0 ++ set -e ++ '[' 0 '!=' 0 ']' ++ break ++ cat /tmp/tmp.5ju1ePhIuV ++ cat /tmp/tmp.XHRUSBPcnh ++ rm /tmp/tmp.5ju1ePhIuV /tmp/tmp.XHRUSBPcnh ++ return 0 + [[ initializing == \r\e\a\d\y ]] + echo -n . .+ sleep 5 + [[ 16 -ge 300 ]] + let i+=1 ++ kubectl_bin get pxc some-name -o 'jsonpath={.status.state}' +++ mktemp ++ local LAST_OUT=/tmp/tmp.gCRKbEbeiu +++ mktemp ++ local LAST_ERR=/tmp/tmp.DxqBiVq8Gq ++ local exit_status=0 +++ seq 0 2 ++ for i in '$(seq 0 2)' ++ set +e ++ kubectl get pxc some-name -o 'jsonpath={.status.state}' ++ exit_status=0 ++ set -e ++ '[' 0 '!=' 0 ']' ++ break ++ cat /tmp/tmp.gCRKbEbeiu ++ cat /tmp/tmp.DxqBiVq8Gq ++ rm /tmp/tmp.gCRKbEbeiu /tmp/tmp.DxqBiVq8Gq ++ return 0 + [[ initializing == \r\e\a\d\y ]] + echo -n . .+ sleep 5 + [[ 17 -ge 300 ]] + let i+=1 ++ kubectl_bin get pxc some-name -o 'jsonpath={.status.state}' +++ mktemp ++ local LAST_OUT=/tmp/tmp.AMBoC0EQZU +++ mktemp ++ local LAST_ERR=/tmp/tmp.f44WSwhNuv ++ local exit_status=0 +++ seq 0 2 ++ for i in '$(seq 0 2)' ++ set +e ++ kubectl get pxc some-name -o 'jsonpath={.status.state}' ++ exit_status=0 ++ set -e ++ '[' 0 '!=' 0 ']' ++ break ++ cat /tmp/tmp.AMBoC0EQZU ++ cat /tmp/tmp.f44WSwhNuv ++ rm /tmp/tmp.AMBoC0EQZU /tmp/tmp.f44WSwhNuv ++ return 0 + [[ initializing == \r\e\a\d\y ]] + echo -n . .+ sleep 5 + [[ 18 -ge 300 ]] + let i+=1 ++ kubectl_bin get pxc some-name -o 'jsonpath={.status.state}' +++ mktemp ++ local LAST_OUT=/tmp/tmp.4Dl1n2X2Ud +++ mktemp ++ local LAST_ERR=/tmp/tmp.VgovBrzN5Y ++ local exit_status=0 +++ seq 0 2 ++ for i in '$(seq 0 2)' ++ set +e ++ kubectl get pxc some-name -o 'jsonpath={.status.state}' ++ exit_status=0 ++ set -e ++ '[' 0 '!=' 0 ']' ++ break ++ cat /tmp/tmp.4Dl1n2X2Ud ++ cat /tmp/tmp.VgovBrzN5Y ++ rm /tmp/tmp.4Dl1n2X2Ud /tmp/tmp.VgovBrzN5Y ++ return 0 + [[ initializing == \r\e\a\d\y ]] + echo -n . .+ sleep 5 + [[ 19 -ge 300 ]] + let i+=1 ++ kubectl_bin get pxc some-name -o 'jsonpath={.status.state}' +++ mktemp ++ local LAST_OUT=/tmp/tmp.7lJx2QgZF4 +++ mktemp ++ local LAST_ERR=/tmp/tmp.Cv99E5Ecvm ++ local exit_status=0 +++ seq 0 2 ++ for i in '$(seq 0 2)' ++ set +e ++ kubectl get pxc some-name -o 'jsonpath={.status.state}' ++ exit_status=0 ++ set -e ++ '[' 0 '!=' 0 ']' ++ break ++ cat /tmp/tmp.7lJx2QgZF4 ++ cat /tmp/tmp.Cv99E5Ecvm ++ rm /tmp/tmp.7lJx2QgZF4 /tmp/tmp.Cv99E5Ecvm ++ return 0 + [[ initializing == \r\e\a\d\y ]] + echo -n . .+ sleep 5 + [[ 20 -ge 300 ]] + let i+=1 ++ kubectl_bin get pxc some-name -o 'jsonpath={.status.state}' +++ mktemp ++ local LAST_OUT=/tmp/tmp.17CI89rMXC +++ mktemp ++ local LAST_ERR=/tmp/tmp.bbJW9IRvM9 ++ local exit_status=0 +++ seq 0 2 ++ for i in '$(seq 0 2)' ++ set +e ++ kubectl get pxc some-name -o 'jsonpath={.status.state}' ++ exit_status=0 ++ set -e ++ '[' 0 '!=' 0 ']' ++ break ++ cat /tmp/tmp.17CI89rMXC ++ cat /tmp/tmp.bbJW9IRvM9 ++ rm /tmp/tmp.17CI89rMXC /tmp/tmp.bbJW9IRvM9 ++ return 0 + [[ initializing == \r\e\a\d\y ]] + echo -n . .+ sleep 5 + [[ 21 -ge 300 ]] + let i+=1 ++ kubectl_bin get pxc some-name -o 'jsonpath={.status.state}' +++ mktemp ++ local LAST_OUT=/tmp/tmp.ZfsP2tNKEu +++ mktemp ++ local LAST_ERR=/tmp/tmp.53QEyWrwWx ++ local exit_status=0 +++ seq 0 2 ++ for i in '$(seq 0 2)' ++ set +e ++ kubectl get pxc some-name -o 'jsonpath={.status.state}' ++ exit_status=0 ++ set -e ++ '[' 0 '!=' 0 ']' ++ break ++ cat /tmp/tmp.ZfsP2tNKEu ++ cat /tmp/tmp.53QEyWrwWx ++ rm /tmp/tmp.ZfsP2tNKEu /tmp/tmp.53QEyWrwWx ++ return 0 + [[ initializing == \r\e\a\d\y ]] + echo -n . .+ sleep 5 + [[ 22 -ge 300 ]] + let i+=1 ++ kubectl_bin get pxc some-name -o 'jsonpath={.status.state}' +++ mktemp ++ local LAST_OUT=/tmp/tmp.YAouoYHHg2 +++ mktemp ++ local LAST_ERR=/tmp/tmp.RZMxPBOrRV ++ local exit_status=0 +++ seq 0 2 ++ for i in '$(seq 0 2)' ++ set +e ++ kubectl get pxc some-name -o 'jsonpath={.status.state}' ++ exit_status=0 ++ set -e ++ '[' 0 '!=' 0 ']' ++ break ++ cat /tmp/tmp.YAouoYHHg2 ++ cat /tmp/tmp.RZMxPBOrRV ++ rm /tmp/tmp.YAouoYHHg2 /tmp/tmp.RZMxPBOrRV ++ return 0 + [[ initializing == \r\e\a\d\y ]] + echo -n . .+ sleep 5 + [[ 23 -ge 300 ]] + let i+=1 ++ kubectl_bin get pxc some-name -o 'jsonpath={.status.state}' +++ mktemp ++ local LAST_OUT=/tmp/tmp.jfEDRrN1W5 +++ mktemp ++ local LAST_ERR=/tmp/tmp.gnZ4DBmlwN ++ local exit_status=0 +++ seq 0 2 ++ for i in '$(seq 0 2)' ++ set +e ++ kubectl get pxc some-name -o 'jsonpath={.status.state}' ++ exit_status=0 ++ set -e ++ '[' 0 '!=' 0 ']' ++ break ++ cat /tmp/tmp.jfEDRrN1W5 ++ cat /tmp/tmp.gnZ4DBmlwN ++ rm /tmp/tmp.jfEDRrN1W5 /tmp/tmp.gnZ4DBmlwN ++ return 0 + [[ initializing == \r\e\a\d\y ]] + echo -n . .+ sleep 5 + [[ 24 -ge 300 ]] + let i+=1 ++ kubectl_bin get pxc some-name -o 'jsonpath={.status.state}' +++ mktemp ++ local LAST_OUT=/tmp/tmp.fKe0DIKN1y +++ mktemp ++ local LAST_ERR=/tmp/tmp.E5qMvWR8BE ++ local exit_status=0 +++ seq 0 2 ++ for i in '$(seq 0 2)' ++ set +e ++ kubectl get pxc some-name -o 'jsonpath={.status.state}' ++ exit_status=0 ++ set -e ++ '[' 0 '!=' 0 ']' ++ break ++ cat /tmp/tmp.fKe0DIKN1y ++ cat /tmp/tmp.E5qMvWR8BE ++ rm /tmp/tmp.fKe0DIKN1y /tmp/tmp.E5qMvWR8BE ++ return 0 + [[ initializing == \r\e\a\d\y ]] + echo -n . .+ sleep 5 + [[ 25 -ge 300 ]] + let i+=1 ++ kubectl_bin get pxc some-name -o 'jsonpath={.status.state}' +++ mktemp ++ local LAST_OUT=/tmp/tmp.XsZsihhCSO +++ mktemp ++ local LAST_ERR=/tmp/tmp.Hx05QXVqy2 ++ local exit_status=0 +++ seq 0 2 ++ for i in '$(seq 0 2)' ++ set +e ++ kubectl get pxc some-name -o 'jsonpath={.status.state}' ++ exit_status=0 ++ set -e ++ '[' 0 '!=' 0 ']' ++ break ++ cat /tmp/tmp.XsZsihhCSO ++ cat /tmp/tmp.Hx05QXVqy2 ++ rm /tmp/tmp.XsZsihhCSO /tmp/tmp.Hx05QXVqy2 ++ return 0 + [[ initializing == \r\e\a\d\y ]] + echo -n . .+ sleep 5 + [[ 26 -ge 300 ]] + let i+=1 ++ kubectl_bin get pxc some-name -o 'jsonpath={.status.state}' +++ mktemp ++ local LAST_OUT=/tmp/tmp.q7ysIdqPKx +++ mktemp ++ local LAST_ERR=/tmp/tmp.OtEj3dgoSW ++ local exit_status=0 +++ seq 0 2 ++ for i in '$(seq 0 2)' ++ set +e ++ kubectl get pxc some-name -o 'jsonpath={.status.state}' ++ exit_status=0 ++ set -e ++ '[' 0 '!=' 0 ']' ++ break ++ cat /tmp/tmp.q7ysIdqPKx ++ cat /tmp/tmp.OtEj3dgoSW ++ rm /tmp/tmp.q7ysIdqPKx /tmp/tmp.OtEj3dgoSW ++ return 0 + [[ initializing == \r\e\a\d\y ]] + echo -n . .+ sleep 5 + [[ 27 -ge 300 ]] + let i+=1 ++ kubectl_bin get pxc some-name -o 'jsonpath={.status.state}' +++ mktemp ++ local LAST_OUT=/tmp/tmp.1SipIA02Bq +++ mktemp ++ local LAST_ERR=/tmp/tmp.HRIm4xsGZD ++ local exit_status=0 +++ seq 0 2 ++ for i in '$(seq 0 2)' ++ set +e ++ kubectl get pxc some-name -o 'jsonpath={.status.state}' ++ exit_status=0 ++ set -e ++ '[' 0 '!=' 0 ']' ++ break ++ cat /tmp/tmp.1SipIA02Bq ++ cat /tmp/tmp.HRIm4xsGZD ++ rm /tmp/tmp.1SipIA02Bq /tmp/tmp.HRIm4xsGZD ++ return 0 + [[ initializing == \r\e\a\d\y ]] + echo -n . .+ sleep 5 + [[ 28 -ge 300 ]] + let i+=1 ++ kubectl_bin get pxc some-name -o 'jsonpath={.status.state}' +++ mktemp ++ local LAST_OUT=/tmp/tmp.3LuZAPpx2l +++ mktemp ++ local LAST_ERR=/tmp/tmp.L0Nimh7Hsy ++ local exit_status=0 +++ seq 0 2 ++ for i in '$(seq 0 2)' ++ set +e ++ kubectl get pxc some-name -o 'jsonpath={.status.state}' ++ exit_status=0 ++ set -e ++ '[' 0 '!=' 0 ']' ++ break ++ cat /tmp/tmp.3LuZAPpx2l ++ cat /tmp/tmp.L0Nimh7Hsy ++ rm /tmp/tmp.3LuZAPpx2l /tmp/tmp.L0Nimh7Hsy ++ return 0 + [[ initializing == \r\e\a\d\y ]] + echo -n . .+ sleep 5 + [[ 29 -ge 300 ]] + let i+=1 ++ kubectl_bin get pxc some-name -o 'jsonpath={.status.state}' +++ mktemp ++ local LAST_OUT=/tmp/tmp.3yY5hqJx6G +++ mktemp ++ local LAST_ERR=/tmp/tmp.S0N3sQKdbX ++ local exit_status=0 +++ seq 0 2 ++ for i in '$(seq 0 2)' ++ set +e ++ kubectl get pxc some-name -o 'jsonpath={.status.state}' ++ exit_status=0 ++ set -e ++ '[' 0 '!=' 0 ']' ++ break ++ cat /tmp/tmp.3yY5hqJx6G ++ cat /tmp/tmp.S0N3sQKdbX ++ rm /tmp/tmp.3yY5hqJx6G /tmp/tmp.S0N3sQKdbX ++ return 0 + [[ initializing == \r\e\a\d\y ]] + echo -n . .+ sleep 5 + [[ 30 -ge 300 ]] + let i+=1 ++ kubectl_bin get pxc some-name -o 'jsonpath={.status.state}' +++ mktemp ++ local LAST_OUT=/tmp/tmp.LQBfsx39QP +++ mktemp ++ local LAST_ERR=/tmp/tmp.iqLFGmQFpM ++ local exit_status=0 +++ seq 0 2 ++ for i in '$(seq 0 2)' ++ set +e ++ kubectl get pxc some-name -o 'jsonpath={.status.state}' ++ exit_status=0 ++ set -e ++ '[' 0 '!=' 0 ']' ++ break ++ cat /tmp/tmp.LQBfsx39QP ++ cat /tmp/tmp.iqLFGmQFpM ++ rm /tmp/tmp.LQBfsx39QP /tmp/tmp.iqLFGmQFpM ++ return 0 + [[ initializing == \r\e\a\d\y ]] + echo -n . .+ sleep 5 + [[ 31 -ge 300 ]] + let i+=1 ++ kubectl_bin get pxc some-name -o 'jsonpath={.status.state}' +++ mktemp ++ local LAST_OUT=/tmp/tmp.EiWKQ3ZJEO +++ mktemp ++ local LAST_ERR=/tmp/tmp.QiZHDibTSB ++ local exit_status=0 +++ seq 0 2 ++ for i in '$(seq 0 2)' ++ set +e ++ kubectl get pxc some-name -o 'jsonpath={.status.state}' ++ exit_status=0 ++ set -e ++ '[' 0 '!=' 0 ']' ++ break ++ cat /tmp/tmp.EiWKQ3ZJEO ++ cat /tmp/tmp.QiZHDibTSB ++ rm /tmp/tmp.EiWKQ3ZJEO /tmp/tmp.QiZHDibTSB ++ return 0 + [[ initializing == \r\e\a\d\y ]] + echo -n . .+ sleep 5 + [[ 32 -ge 300 ]] + let i+=1 ++ kubectl_bin get pxc some-name -o 'jsonpath={.status.state}' +++ mktemp ++ local LAST_OUT=/tmp/tmp.BkB9jUni1m +++ mktemp ++ local LAST_ERR=/tmp/tmp.ud6UIkEPet ++ local exit_status=0 +++ seq 0 2 ++ for i in '$(seq 0 2)' ++ set +e ++ kubectl get pxc some-name -o 'jsonpath={.status.state}' ++ exit_status=0 ++ set -e ++ '[' 0 '!=' 0 ']' ++ break ++ cat /tmp/tmp.BkB9jUni1m ++ cat /tmp/tmp.ud6UIkEPet ++ rm /tmp/tmp.BkB9jUni1m /tmp/tmp.ud6UIkEPet ++ return 0 + [[ initializing == \r\e\a\d\y ]] + echo -n . .+ sleep 5 + [[ 33 -ge 300 ]] + let i+=1 ++ kubectl_bin get pxc some-name -o 'jsonpath={.status.state}' +++ mktemp ++ local LAST_OUT=/tmp/tmp.srNBJy3w44 +++ mktemp ++ local LAST_ERR=/tmp/tmp.vGgYmDiHHO ++ local exit_status=0 +++ seq 0 2 ++ for i in '$(seq 0 2)' ++ set +e ++ kubectl get pxc some-name -o 'jsonpath={.status.state}' ++ exit_status=0 ++ set -e ++ '[' 0 '!=' 0 ']' ++ break ++ cat /tmp/tmp.srNBJy3w44 ++ cat /tmp/tmp.vGgYmDiHHO ++ rm /tmp/tmp.srNBJy3w44 /tmp/tmp.vGgYmDiHHO ++ return 0 + [[ initializing == \r\e\a\d\y ]] + echo -n . .+ sleep 5 + [[ 34 -ge 300 ]] + let i+=1 ++ kubectl_bin get pxc some-name -o 'jsonpath={.status.state}' +++ mktemp ++ local LAST_OUT=/tmp/tmp.Ani7s5Pv2J +++ mktemp ++ local LAST_ERR=/tmp/tmp.7P8Px13I5x ++ local exit_status=0 +++ seq 0 2 ++ for i in '$(seq 0 2)' ++ set +e ++ kubectl get pxc some-name -o 'jsonpath={.status.state}' ++ exit_status=0 ++ set -e ++ '[' 0 '!=' 0 ']' ++ break ++ cat /tmp/tmp.Ani7s5Pv2J ++ cat /tmp/tmp.7P8Px13I5x ++ rm /tmp/tmp.Ani7s5Pv2J /tmp/tmp.7P8Px13I5x ++ return 0 + [[ initializing == \r\e\a\d\y ]] + echo -n . .+ sleep 5 + [[ 35 -ge 300 ]] + let i+=1 ++ kubectl_bin get pxc some-name -o 'jsonpath={.status.state}' +++ mktemp ++ local LAST_OUT=/tmp/tmp.IsxXrf8kpp +++ mktemp ++ local LAST_ERR=/tmp/tmp.WypYTEvbYV ++ local exit_status=0 +++ seq 0 2 ++ for i in '$(seq 0 2)' ++ set +e ++ kubectl get pxc some-name -o 'jsonpath={.status.state}' ++ exit_status=0 ++ set -e ++ '[' 0 '!=' 0 ']' ++ break ++ cat /tmp/tmp.IsxXrf8kpp ++ cat /tmp/tmp.WypYTEvbYV ++ rm /tmp/tmp.IsxXrf8kpp /tmp/tmp.WypYTEvbYV ++ return 0 + [[ initializing == \r\e\a\d\y ]] + echo -n . .+ sleep 5 + [[ 36 -ge 300 ]] + let i+=1 ++ kubectl_bin get pxc some-name -o 'jsonpath={.status.state}' +++ mktemp ++ local LAST_OUT=/tmp/tmp.fHW1FT01Js +++ mktemp ++ local LAST_ERR=/tmp/tmp.2cxpvjocMu ++ local exit_status=0 +++ seq 0 2 ++ for i in '$(seq 0 2)' ++ set +e ++ kubectl get pxc some-name -o 'jsonpath={.status.state}' ++ exit_status=0 ++ set -e ++ '[' 0 '!=' 0 ']' ++ break ++ cat /tmp/tmp.fHW1FT01Js ++ cat /tmp/tmp.2cxpvjocMu ++ rm /tmp/tmp.fHW1FT01Js /tmp/tmp.2cxpvjocMu ++ return 0 + [[ initializing == \r\e\a\d\y ]] + echo -n . .+ sleep 5 + [[ 37 -ge 300 ]] + let i+=1 ++ kubectl_bin get pxc some-name -o 'jsonpath={.status.state}' +++ mktemp ++ local LAST_OUT=/tmp/tmp.i5AIjoepp8 +++ mktemp ++ local LAST_ERR=/tmp/tmp.UflKuUKRfv ++ local exit_status=0 +++ seq 0 2 ++ for i in '$(seq 0 2)' ++ set +e ++ kubectl get pxc some-name -o 'jsonpath={.status.state}' ++ exit_status=0 ++ set -e ++ '[' 0 '!=' 0 ']' ++ break ++ cat /tmp/tmp.i5AIjoepp8 ++ cat /tmp/tmp.UflKuUKRfv ++ rm /tmp/tmp.i5AIjoepp8 /tmp/tmp.UflKuUKRfv ++ return 0 + [[ initializing == \r\e\a\d\y ]] + echo -n . .+ sleep 5 + [[ 38 -ge 300 ]] + let i+=1 ++ kubectl_bin get pxc some-name -o 'jsonpath={.status.state}' +++ mktemp ++ local LAST_OUT=/tmp/tmp.MwQOsxto0h +++ mktemp ++ local LAST_ERR=/tmp/tmp.dvWysacx3g ++ local exit_status=0 +++ seq 0 2 ++ for i in '$(seq 0 2)' ++ set +e ++ kubectl get pxc some-name -o 'jsonpath={.status.state}' ++ exit_status=0 ++ set -e ++ '[' 0 '!=' 0 ']' ++ break ++ cat /tmp/tmp.MwQOsxto0h ++ cat /tmp/tmp.dvWysacx3g ++ rm /tmp/tmp.MwQOsxto0h /tmp/tmp.dvWysacx3g ++ return 0 + [[ initializing == \r\e\a\d\y ]] + echo -n . .+ sleep 5 + [[ 39 -ge 300 ]] + let i+=1 ++ kubectl_bin get pxc some-name -o 'jsonpath={.status.state}' +++ mktemp ++ local LAST_OUT=/tmp/tmp.1vFvYxKedM +++ mktemp ++ local LAST_ERR=/tmp/tmp.B1PGexxkMr ++ local exit_status=0 +++ seq 0 2 ++ for i in '$(seq 0 2)' ++ set +e ++ kubectl get pxc some-name -o 'jsonpath={.status.state}' ++ exit_status=0 ++ set -e ++ '[' 0 '!=' 0 ']' ++ break ++ cat /tmp/tmp.1vFvYxKedM ++ cat /tmp/tmp.B1PGexxkMr ++ rm /tmp/tmp.1vFvYxKedM /tmp/tmp.B1PGexxkMr ++ return 0 + [[ initializing == \r\e\a\d\y ]] + echo -n . .+ sleep 5 + [[ 40 -ge 300 ]] + let i+=1 ++ kubectl_bin get pxc some-name -o 'jsonpath={.status.state}' +++ mktemp ++ local LAST_OUT=/tmp/tmp.LUws1oOTNz +++ mktemp ++ local LAST_ERR=/tmp/tmp.CTF5KE1yt1 ++ local exit_status=0 +++ seq 0 2 ++ for i in '$(seq 0 2)' ++ set +e ++ kubectl get pxc some-name -o 'jsonpath={.status.state}' ++ exit_status=0 ++ set -e ++ '[' 0 '!=' 0 ']' ++ break ++ cat /tmp/tmp.LUws1oOTNz ++ cat /tmp/tmp.CTF5KE1yt1 ++ rm /tmp/tmp.LUws1oOTNz /tmp/tmp.CTF5KE1yt1 ++ return 0 + [[ initializing == \r\e\a\d\y ]] + echo -n . .+ sleep 5 + [[ 41 -ge 300 ]] + let i+=1 ++ kubectl_bin get pxc some-name -o 'jsonpath={.status.state}' +++ mktemp ++ local LAST_OUT=/tmp/tmp.gclusMnjiS +++ mktemp ++ local LAST_ERR=/tmp/tmp.4BHtYuZkgj ++ local exit_status=0 +++ seq 0 2 ++ for i in '$(seq 0 2)' ++ set +e ++ kubectl get pxc some-name -o 'jsonpath={.status.state}' ++ exit_status=0 ++ set -e ++ '[' 0 '!=' 0 ']' ++ break ++ cat /tmp/tmp.gclusMnjiS ++ cat /tmp/tmp.4BHtYuZkgj ++ rm /tmp/tmp.gclusMnjiS /tmp/tmp.4BHtYuZkgj ++ return 0 + [[ initializing == \r\e\a\d\y ]] + echo -n . .+ sleep 5 + [[ 42 -ge 300 ]] + let i+=1 ++ kubectl_bin get pxc some-name -o 'jsonpath={.status.state}' +++ mktemp ++ local LAST_OUT=/tmp/tmp.ObFI1AZ5Ur +++ mktemp ++ local LAST_ERR=/tmp/tmp.8AllLImly7 ++ local exit_status=0 +++ seq 0 2 ++ for i in '$(seq 0 2)' ++ set +e ++ kubectl get pxc some-name -o 'jsonpath={.status.state}' ++ exit_status=0 ++ set -e ++ '[' 0 '!=' 0 ']' ++ break ++ cat /tmp/tmp.ObFI1AZ5Ur ++ cat /tmp/tmp.8AllLImly7 ++ rm /tmp/tmp.ObFI1AZ5Ur /tmp/tmp.8AllLImly7 ++ return 0 + [[ initializing == \r\e\a\d\y ]] + echo -n . .+ sleep 5 + [[ 43 -ge 300 ]] + let i+=1 ++ kubectl_bin get pxc some-name -o 'jsonpath={.status.state}' +++ mktemp ++ local LAST_OUT=/tmp/tmp.FFhNQNAMMs +++ mktemp ++ local LAST_ERR=/tmp/tmp.HD1aJ3RGn3 ++ local exit_status=0 +++ seq 0 2 ++ for i in '$(seq 0 2)' ++ set +e ++ kubectl get pxc some-name -o 'jsonpath={.status.state}' ++ exit_status=0 ++ set -e ++ '[' 0 '!=' 0 ']' ++ break ++ cat /tmp/tmp.FFhNQNAMMs ++ cat /tmp/tmp.HD1aJ3RGn3 ++ rm /tmp/tmp.FFhNQNAMMs /tmp/tmp.HD1aJ3RGn3 ++ return 0 + [[ initializing == \r\e\a\d\y ]] + echo -n . .+ sleep 5 + [[ 44 -ge 300 ]] + let i+=1 ++ kubectl_bin get pxc some-name -o 'jsonpath={.status.state}' +++ mktemp ++ local LAST_OUT=/tmp/tmp.UuqDXm1dG1 +++ mktemp ++ local LAST_ERR=/tmp/tmp.1I7zfyqcnT ++ local exit_status=0 +++ seq 0 2 ++ for i in '$(seq 0 2)' ++ set +e ++ kubectl get pxc some-name -o 'jsonpath={.status.state}' ++ exit_status=0 ++ set -e ++ '[' 0 '!=' 0 ']' ++ break ++ cat /tmp/tmp.UuqDXm1dG1 ++ cat /tmp/tmp.1I7zfyqcnT ++ rm /tmp/tmp.UuqDXm1dG1 /tmp/tmp.1I7zfyqcnT ++ return 0 + [[ initializing == \r\e\a\d\y ]] + echo -n . .+ sleep 5 + [[ 45 -ge 300 ]] + let i+=1 ++ kubectl_bin get pxc some-name -o 'jsonpath={.status.state}' +++ mktemp ++ local LAST_OUT=/tmp/tmp.xcxLghL2RC +++ mktemp ++ local LAST_ERR=/tmp/tmp.dDBDfouYoS ++ local exit_status=0 +++ seq 0 2 ++ for i in '$(seq 0 2)' ++ set +e ++ kubectl get pxc some-name -o 'jsonpath={.status.state}' ++ exit_status=0 ++ set -e ++ '[' 0 '!=' 0 ']' ++ break ++ cat /tmp/tmp.xcxLghL2RC ++ cat /tmp/tmp.dDBDfouYoS ++ rm /tmp/tmp.xcxLghL2RC /tmp/tmp.dDBDfouYoS ++ return 0 + [[ initializing == \r\e\a\d\y ]] + echo -n . .+ sleep 5 + [[ 46 -ge 300 ]] + let i+=1 ++ kubectl_bin get pxc some-name -o 'jsonpath={.status.state}' +++ mktemp ++ local LAST_OUT=/tmp/tmp.6sUwOA1h9J +++ mktemp ++ local LAST_ERR=/tmp/tmp.YFbvhvPiQ7 ++ local exit_status=0 +++ seq 0 2 ++ for i in '$(seq 0 2)' ++ set +e ++ kubectl get pxc some-name -o 'jsonpath={.status.state}' ++ exit_status=0 ++ set -e ++ '[' 0 '!=' 0 ']' ++ break ++ cat /tmp/tmp.6sUwOA1h9J ++ cat /tmp/tmp.YFbvhvPiQ7 ++ rm /tmp/tmp.6sUwOA1h9J /tmp/tmp.YFbvhvPiQ7 ++ return 0 + [[ initializing == \r\e\a\d\y ]] + echo -n . .+ sleep 5 + [[ 47 -ge 300 ]] + let i+=1 ++ kubectl_bin get pxc some-name -o 'jsonpath={.status.state}' +++ mktemp ++ local LAST_OUT=/tmp/tmp.oRsNL1M2Wm +++ mktemp ++ local LAST_ERR=/tmp/tmp.BM3d9DaSiS ++ local exit_status=0 +++ seq 0 2 ++ for i in '$(seq 0 2)' ++ set +e ++ kubectl get pxc some-name -o 'jsonpath={.status.state}' ++ exit_status=0 ++ set -e ++ '[' 0 '!=' 0 ']' ++ break ++ cat /tmp/tmp.oRsNL1M2Wm ++ cat /tmp/tmp.BM3d9DaSiS ++ rm /tmp/tmp.oRsNL1M2Wm /tmp/tmp.BM3d9DaSiS ++ return 0 + [[ initializing == \r\e\a\d\y ]] + echo -n . .+ sleep 5 + [[ 48 -ge 300 ]] + let i+=1 ++ kubectl_bin get pxc some-name -o 'jsonpath={.status.state}' +++ mktemp ++ local LAST_OUT=/tmp/tmp.MtY98PD90f +++ mktemp ++ local LAST_ERR=/tmp/tmp.YdHh6wYyKA ++ local exit_status=0 +++ seq 0 2 ++ for i in '$(seq 0 2)' ++ set +e ++ kubectl get pxc some-name -o 'jsonpath={.status.state}' ++ exit_status=0 ++ set -e ++ '[' 0 '!=' 0 ']' ++ break ++ cat /tmp/tmp.MtY98PD90f ++ cat /tmp/tmp.YdHh6wYyKA ++ rm /tmp/tmp.MtY98PD90f /tmp/tmp.YdHh6wYyKA ++ return 0 + [[ initializing == \r\e\a\d\y ]] + echo -n . .+ sleep 5 + [[ 49 -ge 300 ]] + let i+=1 ++ kubectl_bin get pxc some-name -o 'jsonpath={.status.state}' +++ mktemp ++ local LAST_OUT=/tmp/tmp.DRpl2Lqxt1 +++ mktemp ++ local LAST_ERR=/tmp/tmp.I08FOZ5P8X ++ local exit_status=0 +++ seq 0 2 ++ for i in '$(seq 0 2)' ++ set +e ++ kubectl get pxc some-name -o 'jsonpath={.status.state}' ++ exit_status=0 ++ set -e ++ '[' 0 '!=' 0 ']' ++ break ++ cat /tmp/tmp.DRpl2Lqxt1 ++ cat /tmp/tmp.I08FOZ5P8X ++ rm /tmp/tmp.DRpl2Lqxt1 /tmp/tmp.I08FOZ5P8X ++ return 0 + [[ initializing == \r\e\a\d\y ]] + echo -n . .+ sleep 5 + [[ 50 -ge 300 ]] + let i+=1 ++ kubectl_bin get pxc some-name -o 'jsonpath={.status.state}' +++ mktemp ++ local LAST_OUT=/tmp/tmp.Bpa4qgff3c +++ mktemp ++ local LAST_ERR=/tmp/tmp.9g4Ib2XLh3 ++ local exit_status=0 +++ seq 0 2 ++ for i in '$(seq 0 2)' ++ set +e ++ kubectl get pxc some-name -o 'jsonpath={.status.state}' ++ exit_status=0 ++ set -e ++ '[' 0 '!=' 0 ']' ++ break ++ cat /tmp/tmp.Bpa4qgff3c ++ cat /tmp/tmp.9g4Ib2XLh3 ++ rm /tmp/tmp.Bpa4qgff3c /tmp/tmp.9g4Ib2XLh3 ++ return 0 + [[ initializing == \r\e\a\d\y ]] + echo -n . .+ sleep 5 + [[ 51 -ge 300 ]] + let i+=1 ++ kubectl_bin get pxc some-name -o 'jsonpath={.status.state}' +++ mktemp ++ local LAST_OUT=/tmp/tmp.d1P8o8fQY7 +++ mktemp ++ local LAST_ERR=/tmp/tmp.7mgYbyiGZL ++ local exit_status=0 +++ seq 0 2 ++ for i in '$(seq 0 2)' ++ set +e ++ kubectl get pxc some-name -o 'jsonpath={.status.state}' ++ exit_status=0 ++ set -e ++ '[' 0 '!=' 0 ']' ++ break ++ cat /tmp/tmp.d1P8o8fQY7 ++ cat /tmp/tmp.7mgYbyiGZL ++ rm /tmp/tmp.d1P8o8fQY7 /tmp/tmp.7mgYbyiGZL ++ return 0 + [[ initializing == \r\e\a\d\y ]] + echo -n . .+ sleep 5 + [[ 52 -ge 300 ]] + let i+=1 ++ kubectl_bin get pxc some-name -o 'jsonpath={.status.state}' +++ mktemp ++ local LAST_OUT=/tmp/tmp.yTcv6bu6g5 +++ mktemp ++ local LAST_ERR=/tmp/tmp.7hggWAKBpJ ++ local exit_status=0 +++ seq 0 2 ++ for i in '$(seq 0 2)' ++ set +e ++ kubectl get pxc some-name -o 'jsonpath={.status.state}' ++ exit_status=0 ++ set -e ++ '[' 0 '!=' 0 ']' ++ break ++ cat /tmp/tmp.yTcv6bu6g5 ++ cat /tmp/tmp.7hggWAKBpJ ++ rm /tmp/tmp.yTcv6bu6g5 /tmp/tmp.7hggWAKBpJ ++ return 0 + [[ initializing == \r\e\a\d\y ]] + echo -n . .+ sleep 5 + [[ 53 -ge 300 ]] + let i+=1 ++ kubectl_bin get pxc some-name -o 'jsonpath={.status.state}' +++ mktemp ++ local LAST_OUT=/tmp/tmp.BzdBZLvVvd +++ mktemp ++ local LAST_ERR=/tmp/tmp.UiGnKs62tf ++ local exit_status=0 +++ seq 0 2 ++ for i in '$(seq 0 2)' ++ set +e ++ kubectl get pxc some-name -o 'jsonpath={.status.state}' ++ exit_status=0 ++ set -e ++ '[' 0 '!=' 0 ']' ++ break ++ cat /tmp/tmp.BzdBZLvVvd ++ cat /tmp/tmp.UiGnKs62tf ++ rm /tmp/tmp.BzdBZLvVvd /tmp/tmp.UiGnKs62tf ++ return 0 + [[ ready == \r\e\a\d\y ]] ++ kubectl_bin get pxc some-name -o 'jsonpath={.status.pxc.ready}' +++ mktemp ++ local LAST_OUT=/tmp/tmp.jxWnsEFIW5 +++ mktemp ++ local LAST_ERR=/tmp/tmp.Q7F6tG0Y0R ++ local exit_status=0 +++ seq 0 2 ++ for i in '$(seq 0 2)' ++ set +e ++ kubectl get pxc some-name -o 'jsonpath={.status.pxc.ready}' ++ exit_status=0 ++ set -e ++ '[' 0 '!=' 0 ']' ++ break ++ cat /tmp/tmp.jxWnsEFIW5 ++ cat /tmp/tmp.Q7F6tG0Y0R ++ rm /tmp/tmp.jxWnsEFIW5 /tmp/tmp.Q7F6tG0Y0R ++ return 0 + [[ 3 == \3 ]] +++ get_proxy_engine some-name +++ local cluster_name=some-name ++++ get_proxy some-name ++++ local target_cluster=some-name +++++ kubectl_bin get pxc some-name -o 'jsonpath={.spec.haproxy.enabled}' ++++++ mktemp +++++ local LAST_OUT=/tmp/tmp.pnKb3wAtaW ++++++ mktemp +++++ local LAST_ERR=/tmp/tmp.K90hwtqXVt +++++ local exit_status=0 ++++++ seq 0 2 +++++ for i in '$(seq 0 2)' +++++ set +e +++++ kubectl get pxc some-name -o 'jsonpath={.spec.haproxy.enabled}' +++++ exit_status=0 +++++ set -e +++++ '[' 0 '!=' 0 ']' +++++ break +++++ cat /tmp/tmp.pnKb3wAtaW +++++ cat /tmp/tmp.K90hwtqXVt +++++ rm /tmp/tmp.pnKb3wAtaW /tmp/tmp.K90hwtqXVt +++++ return 0 ++++ [[ true == \t\r\u\e ]] ++++ echo some-name-haproxy ++++ return +++ local cluster_proxy=some-name-haproxy +++ echo haproxy ++ kubectl_bin get pxc some-name -o 'jsonpath={.status.haproxy.ready}' +++ mktemp ++ local LAST_OUT=/tmp/tmp.ZwlrTIPM6q +++ mktemp ++ local LAST_ERR=/tmp/tmp.E3qWuZ7SgM ++ local exit_status=0 +++ seq 0 2 ++ for i in '$(seq 0 2)' ++ set +e ++ kubectl get pxc some-name -o 'jsonpath={.status.haproxy.ready}' ++ exit_status=0 ++ set -e ++ '[' 0 '!=' 0 ']' ++ break ++ cat /tmp/tmp.ZwlrTIPM6q ++ cat /tmp/tmp.E3qWuZ7SgM ++ rm /tmp/tmp.ZwlrTIPM6q /tmp/tmp.E3qWuZ7SgM ++ return 0 + [[ 3 == \3 ]] + echo + [[ perconalab/percona-xtradb-cluster-operator:main-pxc5.7 =~ 5\.7 ]] + check_generation 2 haproxy some-name + local generation=2 + local container=haproxy + local cluster=some-name + local current_generation ++ kubectl_bin get statefulset some-name-haproxy -o 'jsonpath={.metadata.generation}' +++ mktemp ++ local LAST_OUT=/tmp/tmp.IOAK11MoDH +++ mktemp ++ local LAST_ERR=/tmp/tmp.LxX3YWvAED ++ local exit_status=0 +++ seq 0 2 ++ for i in '$(seq 0 2)' ++ set +e ++ kubectl get statefulset some-name-haproxy -o 'jsonpath={.metadata.generation}' ++ exit_status=0 ++ set -e ++ '[' 0 '!=' 0 ']' ++ break ++ cat /tmp/tmp.IOAK11MoDH ++ cat /tmp/tmp.LxX3YWvAED ++ rm /tmp/tmp.IOAK11MoDH /tmp/tmp.LxX3YWvAED ++ return 0 + current_generation=2 + [[ 2 != \2 ]] + patch_secret my-cluster-secrets monitor dGVzdC1wYXNzd29yZDI= + local secret=my-cluster-secrets + local key=monitor + local value=dGVzdC1wYXNzd29yZDI= + kubectl_bin patch secret my-cluster-secrets '-p={"data":{"monitor": "dGVzdC1wYXNzd29yZDI="}}' ++ mktemp + local LAST_OUT=/tmp/tmp.MkLB080KYq ++ mktemp + local LAST_ERR=/tmp/tmp.mFzlpgJqkH + local exit_status=0 ++ seq 0 2 + for i in '$(seq 0 2)' + set +e + kubectl patch secret my-cluster-secrets '-p={"data":{"monitor": "dGVzdC1wYXNzd29yZDI="}}' + exit_status=0 + set -e + '[' 0 '!=' 0 ']' + break + cat /tmp/tmp.MkLB080KYq secret/my-cluster-secrets patched + cat /tmp/tmp.mFzlpgJqkH + rm /tmp/tmp.MkLB080KYq /tmp/tmp.mFzlpgJqkH + return 0 + sleep 15 + wait_cluster_consistency some-name 3 3 + local cluster_name=some-name + local cluster_size=3 + local proxy_size=3 + '[' -z 3 ']' + desc 'wait cluster consistency' + set +o xtrace ----------------------------------------------------------------------------------- wait cluster consistency ----------------------------------------------------------------------------------- + local i=0 + local max=300 + sleep 7 + echo -n 'waiting for pxc/some-name to be ready' waiting for pxc/some-name to be ready++ kubectl_bin get pxc some-name -o 'jsonpath={.status.state}' +++ mktemp ++ local LAST_OUT=/tmp/tmp.1zAFpcnVjS +++ mktemp ++ local LAST_ERR=/tmp/tmp.Oi4FohFJD0 ++ local exit_status=0 +++ seq 0 2 ++ for i in '$(seq 0 2)' ++ set +e ++ kubectl get pxc some-name -o 'jsonpath={.status.state}' ++ exit_status=0 ++ set -e ++ '[' 0 '!=' 0 ']' ++ break ++ cat /tmp/tmp.1zAFpcnVjS ++ cat /tmp/tmp.Oi4FohFJD0 ++ rm /tmp/tmp.1zAFpcnVjS /tmp/tmp.Oi4FohFJD0 ++ return 0 + [[ initializing == \r\e\a\d\y ]] + echo -n . .+ sleep 5 + [[ 0 -ge 300 ]] + let i+=1 ++ kubectl_bin get pxc some-name -o 'jsonpath={.status.state}' +++ mktemp ++ local LAST_OUT=/tmp/tmp.ZH3Dm56r13 +++ mktemp ++ local LAST_ERR=/tmp/tmp.4v5Tl3ZH4a ++ local exit_status=0 +++ seq 0 2 ++ for i in '$(seq 0 2)' ++ set +e ++ kubectl get pxc some-name -o 'jsonpath={.status.state}' ++ exit_status=0 ++ set -e ++ '[' 0 '!=' 0 ']' ++ break ++ cat /tmp/tmp.ZH3Dm56r13 ++ cat /tmp/tmp.4v5Tl3ZH4a ++ rm /tmp/tmp.ZH3Dm56r13 /tmp/tmp.4v5Tl3ZH4a ++ return 0 + [[ initializing == \r\e\a\d\y ]] + echo -n . .+ sleep 5 + [[ 1 -ge 300 ]] + let i+=1 ++ kubectl_bin get pxc some-name -o 'jsonpath={.status.state}' +++ mktemp ++ local LAST_OUT=/tmp/tmp.7Q9KwH9hnO +++ mktemp ++ local LAST_ERR=/tmp/tmp.mc3nCfTK9y ++ local exit_status=0 +++ seq 0 2 ++ for i in '$(seq 0 2)' ++ set +e ++ kubectl get pxc some-name -o 'jsonpath={.status.state}' ++ exit_status=0 ++ set -e ++ '[' 0 '!=' 0 ']' ++ break ++ cat /tmp/tmp.7Q9KwH9hnO ++ cat /tmp/tmp.mc3nCfTK9y ++ rm /tmp/tmp.7Q9KwH9hnO /tmp/tmp.mc3nCfTK9y ++ return 0 + [[ initializing == \r\e\a\d\y ]] + echo -n . .+ sleep 5 + [[ 2 -ge 300 ]] + let i+=1 ++ kubectl_bin get pxc some-name -o 'jsonpath={.status.state}' +++ mktemp ++ local LAST_OUT=/tmp/tmp.LBAOVAfk9F +++ mktemp ++ local LAST_ERR=/tmp/tmp.YljaEmxLef ++ local exit_status=0 +++ seq 0 2 ++ for i in '$(seq 0 2)' ++ set +e ++ kubectl get pxc some-name -o 'jsonpath={.status.state}' ++ exit_status=0 ++ set -e ++ '[' 0 '!=' 0 ']' ++ break ++ cat /tmp/tmp.LBAOVAfk9F ++ cat /tmp/tmp.YljaEmxLef ++ rm /tmp/tmp.LBAOVAfk9F /tmp/tmp.YljaEmxLef ++ return 0 + [[ initializing == \r\e\a\d\y ]] + echo -n . .+ sleep 5 + [[ 3 -ge 300 ]] + let i+=1 ++ kubectl_bin get pxc some-name -o 'jsonpath={.status.state}' +++ mktemp ++ local LAST_OUT=/tmp/tmp.qGOWUmgYrF +++ mktemp ++ local LAST_ERR=/tmp/tmp.f8l1mW4rpQ ++ local exit_status=0 +++ seq 0 2 ++ for i in '$(seq 0 2)' ++ set +e ++ kubectl get pxc some-name -o 'jsonpath={.status.state}' ++ exit_status=0 ++ set -e ++ '[' 0 '!=' 0 ']' ++ break ++ cat /tmp/tmp.qGOWUmgYrF ++ cat /tmp/tmp.f8l1mW4rpQ ++ rm /tmp/tmp.qGOWUmgYrF /tmp/tmp.f8l1mW4rpQ ++ return 0 + [[ initializing == \r\e\a\d\y ]] + echo -n . .+ sleep 5 + [[ 4 -ge 300 ]] + let i+=1 ++ kubectl_bin get pxc some-name -o 'jsonpath={.status.state}' +++ mktemp ++ local LAST_OUT=/tmp/tmp.3jKzRUL1WO +++ mktemp ++ local LAST_ERR=/tmp/tmp.3lGSuVRige ++ local exit_status=0 +++ seq 0 2 ++ for i in '$(seq 0 2)' ++ set +e ++ kubectl get pxc some-name -o 'jsonpath={.status.state}' ++ exit_status=0 ++ set -e ++ '[' 0 '!=' 0 ']' ++ break ++ cat /tmp/tmp.3jKzRUL1WO ++ cat /tmp/tmp.3lGSuVRige ++ rm /tmp/tmp.3jKzRUL1WO /tmp/tmp.3lGSuVRige ++ return 0 + [[ initializing == \r\e\a\d\y ]] + echo -n . .+ sleep 5 + [[ 5 -ge 300 ]] + let i+=1 ++ kubectl_bin get pxc some-name -o 'jsonpath={.status.state}' +++ mktemp ++ local LAST_OUT=/tmp/tmp.OT2KZQH753 +++ mktemp ++ local LAST_ERR=/tmp/tmp.rqTxBrQJK5 ++ local exit_status=0 +++ seq 0 2 ++ for i in '$(seq 0 2)' ++ set +e ++ kubectl get pxc some-name -o 'jsonpath={.status.state}' ++ exit_status=0 ++ set -e ++ '[' 0 '!=' 0 ']' ++ break ++ cat /tmp/tmp.OT2KZQH753 ++ cat /tmp/tmp.rqTxBrQJK5 ++ rm /tmp/tmp.OT2KZQH753 /tmp/tmp.rqTxBrQJK5 ++ return 0 + [[ initializing == \r\e\a\d\y ]] + echo -n . .+ sleep 5 + [[ 6 -ge 300 ]] + let i+=1 ++ kubectl_bin get pxc some-name -o 'jsonpath={.status.state}' +++ mktemp ++ local LAST_OUT=/tmp/tmp.GqYMoQu1Yv +++ mktemp ++ local LAST_ERR=/tmp/tmp.b7SwCTdBtl ++ local exit_status=0 +++ seq 0 2 ++ for i in '$(seq 0 2)' ++ set +e ++ kubectl get pxc some-name -o 'jsonpath={.status.state}' ++ exit_status=0 ++ set -e ++ '[' 0 '!=' 0 ']' ++ break ++ cat /tmp/tmp.GqYMoQu1Yv ++ cat /tmp/tmp.b7SwCTdBtl ++ rm /tmp/tmp.GqYMoQu1Yv /tmp/tmp.b7SwCTdBtl ++ return 0 + [[ initializing == \r\e\a\d\y ]] + echo -n . .+ sleep 5 + [[ 7 -ge 300 ]] + let i+=1 ++ kubectl_bin get pxc some-name -o 'jsonpath={.status.state}' +++ mktemp ++ local LAST_OUT=/tmp/tmp.7MtOBf8pVN +++ mktemp ++ local LAST_ERR=/tmp/tmp.WVxlaJy8bU ++ local exit_status=0 +++ seq 0 2 ++ for i in '$(seq 0 2)' ++ set +e ++ kubectl get pxc some-name -o 'jsonpath={.status.state}' ++ exit_status=0 ++ set -e ++ '[' 0 '!=' 0 ']' ++ break ++ cat /tmp/tmp.7MtOBf8pVN ++ cat /tmp/tmp.WVxlaJy8bU ++ rm /tmp/tmp.7MtOBf8pVN /tmp/tmp.WVxlaJy8bU ++ return 0 + [[ initializing == \r\e\a\d\y ]] + echo -n . .+ sleep 5 + [[ 8 -ge 300 ]] + let i+=1 ++ kubectl_bin get pxc some-name -o 'jsonpath={.status.state}' +++ mktemp ++ local LAST_OUT=/tmp/tmp.NAP1UYEaqx +++ mktemp ++ local LAST_ERR=/tmp/tmp.LVzyFkuzYY ++ local exit_status=0 +++ seq 0 2 ++ for i in '$(seq 0 2)' ++ set +e ++ kubectl get pxc some-name -o 'jsonpath={.status.state}' ++ exit_status=0 ++ set -e ++ '[' 0 '!=' 0 ']' ++ break ++ cat /tmp/tmp.NAP1UYEaqx ++ cat /tmp/tmp.LVzyFkuzYY ++ rm /tmp/tmp.NAP1UYEaqx /tmp/tmp.LVzyFkuzYY ++ return 0 + [[ initializing == \r\e\a\d\y ]] + echo -n . .+ sleep 5 + [[ 9 -ge 300 ]] + let i+=1 ++ kubectl_bin get pxc some-name -o 'jsonpath={.status.state}' +++ mktemp ++ local LAST_OUT=/tmp/tmp.KFIG32qTcE +++ mktemp ++ local LAST_ERR=/tmp/tmp.Z8dbXey2vt ++ local exit_status=0 +++ seq 0 2 ++ for i in '$(seq 0 2)' ++ set +e ++ kubectl get pxc some-name -o 'jsonpath={.status.state}' ++ exit_status=0 ++ set -e ++ '[' 0 '!=' 0 ']' ++ break ++ cat /tmp/tmp.KFIG32qTcE ++ cat /tmp/tmp.Z8dbXey2vt ++ rm /tmp/tmp.KFIG32qTcE /tmp/tmp.Z8dbXey2vt ++ return 0 + [[ initializing == \r\e\a\d\y ]] + echo -n . .+ sleep 5 + [[ 10 -ge 300 ]] + let i+=1 ++ kubectl_bin get pxc some-name -o 'jsonpath={.status.state}' +++ mktemp ++ local LAST_OUT=/tmp/tmp.bUjs79uyQS +++ mktemp ++ local LAST_ERR=/tmp/tmp.q91GhIREw1 ++ local exit_status=0 +++ seq 0 2 ++ for i in '$(seq 0 2)' ++ set +e ++ kubectl get pxc some-name -o 'jsonpath={.status.state}' ++ exit_status=0 ++ set -e ++ '[' 0 '!=' 0 ']' ++ break ++ cat /tmp/tmp.bUjs79uyQS ++ cat /tmp/tmp.q91GhIREw1 ++ rm /tmp/tmp.bUjs79uyQS /tmp/tmp.q91GhIREw1 ++ return 0 + [[ ready == \r\e\a\d\y ]] ++ kubectl_bin get pxc some-name -o 'jsonpath={.status.pxc.ready}' +++ mktemp ++ local LAST_OUT=/tmp/tmp.OLTc9pQtHv +++ mktemp ++ local LAST_ERR=/tmp/tmp.p0ghvrjTM2 ++ local exit_status=0 +++ seq 0 2 ++ for i in '$(seq 0 2)' ++ set +e ++ kubectl get pxc some-name -o 'jsonpath={.status.pxc.ready}' ++ exit_status=0 ++ set -e ++ '[' 0 '!=' 0 ']' ++ break ++ cat /tmp/tmp.OLTc9pQtHv ++ cat /tmp/tmp.p0ghvrjTM2 ++ rm /tmp/tmp.OLTc9pQtHv /tmp/tmp.p0ghvrjTM2 ++ return 0 + [[ 3 == \3 ]] +++ get_proxy_engine some-name +++ local cluster_name=some-name ++++ get_proxy some-name ++++ local target_cluster=some-name +++++ kubectl_bin get pxc some-name -o 'jsonpath={.spec.haproxy.enabled}' ++++++ mktemp +++++ local LAST_OUT=/tmp/tmp.dtMa4Cbyvr ++++++ mktemp +++++ local LAST_ERR=/tmp/tmp.1D67b3rEei +++++ local exit_status=0 ++++++ seq 0 2 +++++ for i in '$(seq 0 2)' +++++ set +e +++++ kubectl get pxc some-name -o 'jsonpath={.spec.haproxy.enabled}' +++++ exit_status=0 +++++ set -e +++++ '[' 0 '!=' 0 ']' +++++ break +++++ cat /tmp/tmp.dtMa4Cbyvr +++++ cat /tmp/tmp.1D67b3rEei +++++ rm /tmp/tmp.dtMa4Cbyvr /tmp/tmp.1D67b3rEei +++++ return 0 ++++ [[ true == \t\r\u\e ]] ++++ echo some-name-haproxy ++++ return +++ local cluster_proxy=some-name-haproxy +++ echo haproxy ++ kubectl_bin get pxc some-name -o 'jsonpath={.status.haproxy.ready}' +++ mktemp ++ local LAST_OUT=/tmp/tmp.MqxI6CMqoO +++ mktemp ++ local LAST_ERR=/tmp/tmp.5ZlxjG0NTV ++ local exit_status=0 +++ seq 0 2 ++ for i in '$(seq 0 2)' ++ set +e ++ kubectl get pxc some-name -o 'jsonpath={.status.haproxy.ready}' ++ exit_status=0 ++ set -e ++ '[' 0 '!=' 0 ']' ++ break ++ cat /tmp/tmp.MqxI6CMqoO ++ cat /tmp/tmp.5ZlxjG0NTV ++ rm /tmp/tmp.MqxI6CMqoO /tmp/tmp.5ZlxjG0NTV ++ return 0 + [[ 3 == \3 ]] + echo + sleep 15 + compare_mysql_cmd select-3 'SHOW DATABASES;' '-h some-name-haproxy -umonitor -p'\''test-password2'\''' + local command_id=select-3 + local 'command=SHOW DATABASES;' + local 'uri=-h some-name-haproxy -umonitor -p'\''test-password2'\''' + local postfix= + local expected_result=/mnt/jenkins/workspace/cloud-pxc-operator_PR-2325/e2e-tests/users/compare/select-3.sql + [[ perconalab/percona-xtradb-cluster-operator:main-pxc5.7 =~ 8\.4 ]] + [[ perconalab/percona-xtradb-cluster-operator:main-pxc5.7 =~ 8\.0 ]] + [[ perconalab/percona-xtradb-cluster-operator:main-pxc5.7 =~ 5\.7 ]] + [[ -f /mnt/jenkins/workspace/cloud-pxc-operator_PR-2325/e2e-tests/users/compare/select-3-57.sql ]] + run_mysql 'SHOW DATABASES;' '-h some-name-haproxy -umonitor -p'\''test-password2'\''' + local 'command=SHOW DATABASES;' + local 'uri=-h some-name-haproxy -umonitor -p'\''test-password2'\''' ++ get_client_pod ++ kubectl_bin get pods --selector=name=pxc-client -o 'jsonpath={.items[].metadata.name}' +++ mktemp ++ local LAST_OUT=/tmp/tmp.ilDdVrWzi9 +++ mktemp ++ local LAST_ERR=/tmp/tmp.pLx8yxzg1x ++ local exit_status=0 +++ seq 0 2 ++ for i in '$(seq 0 2)' ++ set +e ++ kubectl get pods --selector=name=pxc-client -o 'jsonpath={.items[].metadata.name}' ++ exit_status=0 ++ set -e ++ '[' 0 '!=' 0 ']' ++ break ++ cat /tmp/tmp.ilDdVrWzi9 ++ cat /tmp/tmp.pLx8yxzg1x ++ rm /tmp/tmp.ilDdVrWzi9 /tmp/tmp.pLx8yxzg1x ++ return 0 + client_pod=pxc-client-545786ccbb-gfmwf + wait_pod pxc-client-545786ccbb-gfmwf + local pod=pxc-client-545786ccbb-gfmwf + local max_retry=480 + local ns= ++ echo pxc-client-545786ccbb-gfmwf ++ /usr/bin/sed -E 's/.*-(pxc|proxysql)-[0-9]/\1/' ++ grep -E '^(pxc|proxysql)$' + local container= + set +o xtrace pod/pxc-client-545786ccbb-gfmwf condition met waiting for pod/pxc-client-545786ccbb-gfmwf to become ReadyDefaulted container "pxc-client" out of: pxc-client, backup .Ok + set +o xtrace + [[ ! -s /tmp/tmp.dqV03Eaoyh/select-3.sql ]] ++ grep 'Unknown MySQL server host' /tmp/tmp.dqV03Eaoyh/select-3.sql + [[ -n '' ]] + [[ 0 -eq 0 ]] + diff -u /mnt/jenkins/workspace/cloud-pxc-operator_PR-2325/e2e-tests/users/compare/select-3.sql /tmp/tmp.dqV03Eaoyh/select-3.sql + [[ perconalab/percona-xtradb-cluster-operator:main-pxc5.7 =~ 5\.7 ]] + check_generation 3 haproxy some-name + local generation=3 + local container=haproxy + local cluster=some-name + local current_generation ++ kubectl_bin get statefulset some-name-haproxy -o 'jsonpath={.metadata.generation}' +++ mktemp ++ local LAST_OUT=/tmp/tmp.Ug6rHQNaL9 +++ mktemp ++ local LAST_ERR=/tmp/tmp.AfC1NzIkq1 ++ local exit_status=0 +++ seq 0 2 ++ for i in '$(seq 0 2)' ++ set +e ++ kubectl get statefulset some-name-haproxy -o 'jsonpath={.metadata.generation}' ++ exit_status=0 ++ set -e ++ '[' 0 '!=' 0 ']' ++ break ++ cat /tmp/tmp.Ug6rHQNaL9 ++ cat /tmp/tmp.AfC1NzIkq1 ++ rm /tmp/tmp.Ug6rHQNaL9 /tmp/tmp.AfC1NzIkq1 ++ return 0 + current_generation=3 + [[ 3 != \3 ]] + destroy users-31794 + local namespace=users-31794 + local ignore_logs=true + [[ 0 == 1 ]] + desc 'destroy cluster/operator and all other resources' + set +o xtrace ----------------------------------------------------------------------------------- destroy cluster/operator and all other resources ----------------------------------------------------------------------------------- + '[' true == false -o 1 == 1 ']' + grep -v 'the object has been modified' ++ get_operator_pod + grep -v 'get backup status: Job.batch' ++ local label_prefix=app.kubernetes.io/ + grep -v level=info + sort -u + /usr/bin/sed -r 's/"ts":[0-9.]+//; s^limits-[0-9.]+/^^g' + tee /tmp/tmp.dqV03Eaoyh/operator.log +++ kubectl get pods --selector=app.kubernetes.io/name=percona-xtradb-cluster-operator -n pxc-operator +++ grep -c percona-xtradb-cluster-operator ++ local check_label=1 ++ [[ 1 -eq 0 ]] ++ kubectl_bin get pods --selector=app.kubernetes.io/name=percona-xtradb-cluster-operator --field-selector=status.phase=Running -o json -n pxc-operator ++ jq -r '.items[] | select(.metadata.deletionTimestamp == null) | .metadata.name' ++ head -1 +++ mktemp ++ local LAST_OUT=/tmp/tmp.1ssTk2fU4r +++ mktemp ++ local LAST_ERR=/tmp/tmp.rmTikASof3 ++ local exit_status=0 +++ seq 0 2 ++ for i in '$(seq 0 2)' ++ set +e ++ kubectl get pods --selector=app.kubernetes.io/name=percona-xtradb-cluster-operator --field-selector=status.phase=Running -o json -n pxc-operator ++ exit_status=0 ++ set -e ++ '[' 0 '!=' 0 ']' ++ break ++ cat /tmp/tmp.1ssTk2fU4r ++ cat /tmp/tmp.rmTikASof3 ++ rm /tmp/tmp.1ssTk2fU4r /tmp/tmp.rmTikASof3 ++ return 0 + kubectl_bin logs -n pxc-operator percona-xtradb-cluster-operator-6dc5fb9844-zsx8l ++ mktemp + local LAST_OUT=/tmp/tmp.2GPewwV3uX ++ mktemp + local LAST_ERR=/tmp/tmp.Aox6HcRFIx + local exit_status=0 ++ seq 0 2 + for i in '$(seq 0 2)' + set +e + kubectl logs -n pxc-operator percona-xtradb-cluster-operator-6dc5fb9844-zsx8l + exit_status=0 + set -e + '[' 0 '!=' 0 ']' + break + cat /tmp/tmp.2GPewwV3uX + cat /tmp/tmp.Aox6HcRFIx + rm /tmp/tmp.2GPewwV3uX /tmp/tmp.Aox6HcRFIx + return 0 } }, }, { }, }, { }, }, ""), }, { }, }, }, - }, - { - }, - { - }, - }, + }, - "0f7b5156c360408f5acb1ef5cbc55ffef4962ad03c3d2d51651607e1d044ca1e", + "0f7b5156c360408f5acb1ef5cbc55ffef4962ad03c3d2d51651607e1d044ca1e", ... // 16 identical fields ... // 16 identical fields 2025-12-22T10:46:06.834Z INFO setup Feature gates {"PXCO_FEATURE_GATES": "", "enabled": ""} 2025-12-22T10:46:06.834Z INFO setup Manager starting up {"gitCommit": "d401e0a9be96f8454c90bf9a8ce22966e6d400ef", "gitBranch": "PR-2325-d401e0a9", "buildTime": "2025-12-22T07:39:29Z", "goVersion": "go1.25.5", "os": "linux", "arch": "amd64"} 2025-12-22T10:46:06.834Z INFO setup Runs on {"platform": "kubernetes", "version": "v1.31.14-gke.1081000"} 2025-12-22T10:46:06.837Z INFO setup Registering Components. 2025-12-22T10:46:07.533Z INFO controller-runtime.metrics Serving metrics server {"bindAddress": ":8080", "secure": false} 2025-12-22T10:46:07.533Z INFO controller-runtime.metrics Starting metrics server 2025-12-22T10:46:07.533Z INFO controller-runtime.webhook Registering webhook {"path": "/validate-percona-xtradbcluster"} 2025-12-22T10:46:07.533Z INFO controller-runtime.webhook Starting webhook server 2025-12-22T10:46:07.533Z INFO setup Starting the Cmd. 2025-12-22T10:46:07.533Z INFO starting server {"name": "health probe", "addr": "[::]:8081"} 2025-12-22T10:46:07.534Z INFO controller-runtime.certwatcher Starting certificate poll+watcher {"cert": "/tmp/k8s-webhook-server/serving-certs/tls.crt", "key": "/tmp/k8s-webhook-server/serving-certs/tls.key", "interval": "10s"} 2025-12-22T10:46:07.534Z INFO controller-runtime.certwatcher Updated current TLS certificate {"cert": "/tmp/k8s-webhook-server/serving-certs/tls.crt", "key": "/tmp/k8s-webhook-server/serving-certs/tls.key"} 2025-12-22T10:46:07.534Z INFO controller-runtime.webhook Serving webhook server {"host": "", "port": 9443} 2025-12-22T10:46:07.635Z INFO Attempting to acquire leader lease... {"lock": "pxc-operator/08db1feb.percona.com"} 2025-12-22T10:46:07.691Z DEBUG events percona-xtradb-cluster-operator-6dc5fb9844-zsx8l_4e269172-b964-412c-aad7-326098b41593 became leader {"type": "Normal", "object": {"kind":"Lease","namespace":"pxc-operator","name":"08db1feb.percona.com","uid":"93a80377-cc52-4c3a-bb45-d24b12c1ad0e","apiVersion":"coordination.k8s.io/v1","resourceVersion":"1766400367682303009"}, "reason": "LeaderElection"} 2025-12-22T10:46:07.691Z INFO Starting EventSource {"controller": "pxcbackup-controller", "controllerGroup": "pxc.percona.com", "controllerKind": "PerconaXtraDBClusterBackup", "source": "kind source: *v1.PerconaXtraDBClusterBackup"} 2025-12-22T10:46:07.691Z INFO Starting EventSource {"controller": "pxc-controller", "controllerGroup": "pxc.percona.com", "controllerKind": "PerconaXtraDBCluster", "source": "kind source: *v1.PerconaXtraDBCluster"} 2025-12-22T10:46:07.691Z INFO Starting EventSource {"controller": "pxc-controller", "controllerGroup": "pxc.percona.com", "controllerKind": "PerconaXtraDBCluster", "source": "kind source: *v1.Secret"} 2025-12-22T10:46:07.691Z INFO Starting EventSource {"controller": "pxcrestore-controller", "controllerGroup": "pxc.percona.com", "controllerKind": "PerconaXtraDBClusterRestore", "source": "kind source: *v1.PerconaXtraDBClusterRestore"} 2025-12-22T10:46:07.691Z INFO Successfully acquired lease {"lock": "pxc-operator/08db1feb.percona.com"} 2025-12-22T10:46:07.792Z INFO Starting Controller {"controller": "pxcbackup-controller", "controllerGroup": "pxc.percona.com", "controllerKind": "PerconaXtraDBClusterBackup"} 2025-12-22T10:46:07.792Z INFO Starting Controller {"controller": "pxc-controller", "controllerGroup": "pxc.percona.com", "controllerKind": "PerconaXtraDBCluster"} 2025-12-22T10:46:07.792Z INFO Starting Controller {"controller": "pxcrestore-controller", "controllerGroup": "pxc.percona.com", "controllerKind": "PerconaXtraDBClusterRestore"} 2025-12-22T10:46:07.792Z INFO Starting workers {"controller": "pxcbackup-controller", "controllerGroup": "pxc.percona.com", "controllerKind": "PerconaXtraDBClusterBackup", "worker count": 1} 2025-12-22T10:46:07.792Z INFO Starting workers {"controller": "pxc-controller", "controllerGroup": "pxc.percona.com", "controllerKind": "PerconaXtraDBCluster", "worker count": 1} 2025-12-22T10:46:07.792Z INFO Starting workers {"controller": "pxcrestore-controller", "controllerGroup": "pxc.percona.com", "controllerKind": "PerconaXtraDBClusterRestore", "worker count": 1} 2025-12-22T10:46:47.515Z INFO Set CR version {"controller": "pxc-controller", "controllerGroup": "pxc.percona.com", "controllerKind": "PerconaXtraDBCluster", "PerconaXtraDBCluster": {"name":"some-name","namespace":"users-31794"}, "namespace": "users-31794", "name": "some-name", "reconcileID": "6f8ae31a-c560-4c2c-8d2d-1e6cf4a90c31", "version": "1.19.0"} 2025-12-22T10:46:47.661Z INFO User secrets updated {"controller": "pxc-controller", "controllerGroup": "pxc.percona.com", "controllerKind": "PerconaXtraDBCluster", "PerconaXtraDBCluster": {"name":"some-name","namespace":"users-31794"}, "namespace": "users-31794", "name": "some-name", "reconcileID": "6f8ae31a-c560-4c2c-8d2d-1e6cf4a90c31", "secrets": "my-cluster-secrets"} 2025-12-22T10:46:47.876Z DEBUG Creating object {"controller": "pxc-controller", "controllerGroup": "pxc.percona.com", "controllerKind": "PerconaXtraDBCluster", "PerconaXtraDBCluster": {"name":"some-name","namespace":"users-31794"}, "namespace": "users-31794", "name": "some-name", "reconcileID": "6f8ae31a-c560-4c2c-8d2d-1e6cf4a90c31", "object": "auto-some-name-pxc", "kind": "&TypeMeta{Kind:ConfigMap,APIVersion:v1,}"} 2025-12-22T10:46:47.997Z DEBUG Creating object {"controller": "pxc-controller", "controllerGroup": "pxc.percona.com", "controllerKind": "PerconaXtraDBCluster", "PerconaXtraDBCluster": {"name":"some-name","namespace":"users-31794"}, "namespace": "users-31794", "name": "some-name", "reconcileID": "6f8ae31a-c560-4c2c-8d2d-1e6cf4a90c31", "object": "some-name-proxysql", "kind": "&TypeMeta{Kind:StatefulSet,APIVersion:apps/v1,}"} 2025-12-22T10:46:48.024Z DEBUG Creating object {"controller": "pxc-controller", "controllerGroup": "pxc.percona.com", "controllerKind": "PerconaXtraDBCluster", "PerconaXtraDBCluster": {"name":"some-name","namespace":"users-31794"}, "namespace": "users-31794", "name": "some-name", "reconcileID": "6f8ae31a-c560-4c2c-8d2d-1e6cf4a90c31", "object": "some-name-pxc", "kind": "&TypeMeta{Kind:StatefulSet,APIVersion:apps/v1,}"} 2025-12-22T10:46:48.061Z DEBUG Creating object {"controller": "pxc-controller", "controllerGroup": "pxc.percona.com", "controllerKind": "PerconaXtraDBCluster", "PerconaXtraDBCluster": {"name":"some-name","namespace":"users-31794"}, "namespace": "users-31794", "name": "some-name", "reconcileID": "6f8ae31a-c560-4c2c-8d2d-1e6cf4a90c31", "object": "some-name-pxc", "kind": "&TypeMeta{Kind:Service,APIVersion:v1,}"} 2025-12-22T10:46:48.082Z DEBUG Creating object {"controller": "pxc-controller", "controllerGroup": "pxc.percona.com", "controllerKind": "PerconaXtraDBCluster", "PerconaXtraDBCluster": {"name":"some-name","namespace":"users-31794"}, "namespace": "users-31794", "name": "some-name", "reconcileID": "6f8ae31a-c560-4c2c-8d2d-1e6cf4a90c31", "object": "some-name-pxc-unready", "kind": "&TypeMeta{Kind:Service,APIVersion:v1,}"} 2025-12-22T10:46:48.108Z DEBUG Creating object {"controller": "pxc-controller", "controllerGroup": "pxc.percona.com", "controllerKind": "PerconaXtraDBCluster", "PerconaXtraDBCluster": {"name":"some-name","namespace":"users-31794"}, "namespace": "users-31794", "name": "some-name", "reconcileID": "6f8ae31a-c560-4c2c-8d2d-1e6cf4a90c31", "object": "some-name-proxysql", "kind": "&TypeMeta{Kind:Service,APIVersion:v1,}"} 2025-12-22T10:46:48.188Z DEBUG Creating object {"controller": "pxc-controller", "controllerGroup": "pxc.percona.com", "controllerKind": "PerconaXtraDBCluster", "PerconaXtraDBCluster": {"name":"some-name","namespace":"users-31794"}, "namespace": "users-31794", "name": "some-name", "reconcileID": "6f8ae31a-c560-4c2c-8d2d-1e6cf4a90c31", "object": "some-name-proxysql-unready", "kind": "&TypeMeta{Kind:Service,APIVersion:v1,}"} 2025-12-22T10:46:48.972Z DEBUG Creating object {"controller": "pxc-controller", "controllerGroup": "pxc.percona.com", "controllerKind": "PerconaXtraDBCluster", "PerconaXtraDBCluster": {"name":"some-name","namespace":"users-31794"}, "namespace": "users-31794", "name": "some-name", "reconcileID": "b8c1556c-75c3-4edd-bdf7-ef57197343f7", "object": "some-name-pxc", "kind": "&TypeMeta{Kind:PodDisruptionBudget,APIVersion:policy/v1,}"} 2025-12-22T10:46:48.988Z DEBUG Creating object {"controller": "pxc-controller", "controllerGroup": "pxc.percona.com", "controllerKind": "PerconaXtraDBCluster", "PerconaXtraDBCluster": {"name":"some-name","namespace":"users-31794"}, "namespace": "users-31794", "name": "some-name", "reconcileID": "b8c1556c-75c3-4edd-bdf7-ef57197343f7", "object": "some-name-proxysql", "kind": "&TypeMeta{Kind:PodDisruptionBudget,APIVersion:policy/v1,}"} 2025-12-22T10:48:05.182Z INFO Password expiration policy updated {"controller": "pxc-controller", "controllerGroup": "pxc.percona.com", "controllerKind": "PerconaXtraDBCluster", "PerconaXtraDBCluster": {"name":"some-name","namespace":"users-31794"}, "namespace": "users-31794", "name": "some-name", "reconcileID": "25c5fb6b-1550-4554-ace3-05ee7f66fede", "user": "operator"} 2025-12-22T10:48:05.225Z INFO Password expiration policy updated {"controller": "pxc-controller", "controllerGroup": "pxc.percona.com", "controllerKind": "PerconaXtraDBCluster", "PerconaXtraDBCluster": {"name":"some-name","namespace":"users-31794"}, "namespace": "users-31794", "name": "some-name", "reconcileID": "25c5fb6b-1550-4554-ace3-05ee7f66fede", "user": "monitor"} 2025-12-22T10:48:05.257Z INFO User monitor: granted privileges {"controller": "pxc-controller", "controllerGroup": "pxc.percona.com", "controllerKind": "PerconaXtraDBCluster", "PerconaXtraDBCluster": {"name":"some-name","namespace":"users-31794"}, "namespace": "users-31794", "name": "some-name", "reconcileID": "25c5fb6b-1550-4554-ace3-05ee7f66fede"} 2025-12-22T10:48:05.288Z INFO Password expiration policy updated {"controller": "pxc-controller", "controllerGroup": "pxc.percona.com", "controllerKind": "PerconaXtraDBCluster", "PerconaXtraDBCluster": {"name":"some-name","namespace":"users-31794"}, "namespace": "users-31794", "name": "some-name", "reconcileID": "25c5fb6b-1550-4554-ace3-05ee7f66fede", "user": "xtrabackup"} 2025-12-22T10:48:05.319Z INFO User xtrabackup: granted privileges {"controller": "pxc-controller", "controllerGroup": "pxc.percona.com", "controllerKind": "PerconaXtraDBCluster", "PerconaXtraDBCluster": {"name":"some-name","namespace":"users-31794"}, "namespace": "users-31794", "name": "some-name", "reconcileID": "25c5fb6b-1550-4554-ace3-05ee7f66fede"} 2025-12-22T10:48:05.327Z INFO reconcile replication error {"controller": "pxc-controller", "controllerGroup": "pxc.percona.com", "controllerKind": "PerconaXtraDBCluster", "PerconaXtraDBCluster": {"name":"some-name","namespace":"users-31794"}, "namespace": "users-31794", "name": "some-name", "reconcileID": "25c5fb6b-1550-4554-ace3-05ee7f66fede", "err": "get primary pxc pod: not found"} 2025-12-22T10:48:10.152Z INFO reconcile replication error {"controller": "pxc-controller", "controllerGroup": "pxc.percona.com", "controllerKind": "PerconaXtraDBCluster", "PerconaXtraDBCluster": {"name":"some-name","namespace":"users-31794"}, "namespace": "users-31794", "name": "some-name", "reconcileID": "d006a62c-f086-4606-8848-dc398773aac5", "err": "get primary pxc pod: not found"} 2025-12-22T10:48:15.311Z INFO reconcile replication error {"controller": "pxc-controller", "controllerGroup": "pxc.percona.com", "controllerKind": "PerconaXtraDBCluster", "PerconaXtraDBCluster": {"name":"some-name","namespace":"users-31794"}, "namespace": "users-31794", "name": "some-name", "reconcileID": "105a3cb8-5fd1-4d5b-a55e-9e3109b01163", "err": "get primary pxc pod: not found"} 2025-12-22T10:48:20.441Z INFO reconcile replication error {"controller": "pxc-controller", "controllerGroup": "pxc.percona.com", "controllerKind": "PerconaXtraDBCluster", "PerconaXtraDBCluster": {"name":"some-name","namespace":"users-31794"}, "namespace": "users-31794", "name": "some-name", "reconcileID": "7bcb31e5-56c7-4590-a0b2-ea3107b238fb", "err": "get primary pxc pod: not found"} 2025-12-22T10:50:30.752Z INFO Password expiration policy updated {"controller": "pxc-controller", "controllerGroup": "pxc.percona.com", "controllerKind": "PerconaXtraDBCluster", "PerconaXtraDBCluster": {"name":"some-name","namespace":"users-31794"}, "namespace": "users-31794", "name": "some-name", "reconcileID": "4277c7f5-b437-44c0-bbe3-88b62fc529d7", "user": "root"} 2025-12-22T10:50:30.795Z INFO Password expiration policy updated {"controller": "pxc-controller", "controllerGroup": "pxc.percona.com", "controllerKind": "PerconaXtraDBCluster", "PerconaXtraDBCluster": {"name":"some-name","namespace":"users-31794"}, "namespace": "users-31794", "name": "some-name", "reconcileID": "4277c7f5-b437-44c0-bbe3-88b62fc529d7", "user": "replication"} 2025-12-22T10:50:30.843Z INFO update PXC version (fetched from db) {"controller": "pxc-controller", "controllerGroup": "pxc.percona.com", "controllerKind": "PerconaXtraDBCluster", "PerconaXtraDBCluster": {"name":"some-name","namespace":"users-31794"}, "namespace": "users-31794", "name": "some-name", "reconcileID": "4277c7f5-b437-44c0-bbe3-88b62fc529d7", "new version": "5.7.44-48-57"} 2025-12-22T10:50:32.966Z DEBUG PXC users synced with ProxySQL {"controller": "pxc-controller", "controllerGroup": "pxc.percona.com", "controllerKind": "PerconaXtraDBCluster", "PerconaXtraDBCluster": {"name":"some-name","namespace":"users-31794"}, "namespace": "users-31794", "name": "some-name", "reconcileID": "4277c7f5-b437-44c0-bbe3-88b62fc529d7"} 2025-12-22T10:50:37.472Z DEBUG PXC users synced with ProxySQL {"controller": "pxc-controller", "controllerGroup": "pxc.percona.com", "controllerKind": "PerconaXtraDBCluster", "PerconaXtraDBCluster": {"name":"some-name","namespace":"users-31794"}, "namespace": "users-31794", "name": "some-name", "reconcileID": "7e79de24-1d31-4844-8a6b-469b1ee2fa1a"} 2025-12-22T10:50:42.656Z DEBUG PXC users synced with ProxySQL {"controller": "pxc-controller", "controllerGroup": "pxc.percona.com", "controllerKind": "PerconaXtraDBCluster", "PerconaXtraDBCluster": {"name":"some-name","namespace":"users-31794"}, "namespace": "users-31794", "name": "some-name", "reconcileID": "d962972f-99f2-4451-97ad-827a6489ccae"} 2025-12-22T10:50:47.982Z DEBUG PXC users synced with ProxySQL {"controller": "pxc-controller", "controllerGroup": "pxc.percona.com", "controllerKind": "PerconaXtraDBCluster", "PerconaXtraDBCluster": {"name":"some-name","namespace":"users-31794"}, "namespace": "users-31794", "name": "some-name", "reconcileID": "cfc4d63a-58c2-4576-a307-64ace0113985"} 2025-12-22T10:50:53.088Z DEBUG PXC users synced with ProxySQL {"controller": "pxc-controller", "controllerGroup": "pxc.percona.com", "controllerKind": "PerconaXtraDBCluster", "PerconaXtraDBCluster": {"name":"some-name","namespace":"users-31794"}, "namespace": "users-31794", "name": "some-name", "reconcileID": "b2835975-ccef-45b7-999a-f91bebbdbd55"} 2025-12-22T10:50:58.368Z DEBUG PXC users synced with ProxySQL {"controller": "pxc-controller", "controllerGroup": "pxc.percona.com", "controllerKind": "PerconaXtraDBCluster", "PerconaXtraDBCluster": {"name":"some-name","namespace":"users-31794"}, "namespace": "users-31794", "name": "some-name", "reconcileID": "b9bcf9c0-31f7-4d9c-9d1e-d1e3083f0000"} 2025-12-22T10:51:03.384Z DEBUG PXC users synced with ProxySQL {"controller": "pxc-controller", "controllerGroup": "pxc.percona.com", "controllerKind": "PerconaXtraDBCluster", "PerconaXtraDBCluster": {"name":"some-name","namespace":"users-31794"}, "namespace": "users-31794", "name": "some-name", "reconcileID": "5f482e52-433d-4bb0-8650-c9e29abfe072"} 2025-12-22T10:51:08.691Z DEBUG PXC users synced with ProxySQL {"controller": "pxc-controller", "controllerGroup": "pxc.percona.com", "controllerKind": "PerconaXtraDBCluster", "PerconaXtraDBCluster": {"name":"some-name","namespace":"users-31794"}, "namespace": "users-31794", "name": "some-name", "reconcileID": "4d94967e-ab18-44ed-a92d-d669e079cdf1"} 2025-12-22T10:51:13.998Z DEBUG PXC users synced with ProxySQL {"controller": "pxc-controller", "controllerGroup": "pxc.percona.com", "controllerKind": "PerconaXtraDBCluster", "PerconaXtraDBCluster": {"name":"some-name","namespace":"users-31794"}, "namespace": "users-31794", "name": "some-name", "reconcileID": "acaefb57-c258-4f85-9472-d9f8b13010f8"} 2025-12-22T10:51:19.303Z DEBUG PXC users synced with ProxySQL {"controller": "pxc-controller", "controllerGroup": "pxc.percona.com", "controllerKind": "PerconaXtraDBCluster", "PerconaXtraDBCluster": {"name":"some-name","namespace":"users-31794"}, "namespace": "users-31794", "name": "some-name", "reconcileID": "d0365d04-c997-4c8b-85d4-c562b93713c2"} 2025-12-22T10:51:24.557Z DEBUG PXC users synced with ProxySQL {"controller": "pxc-controller", "controllerGroup": "pxc.percona.com", "controllerKind": "PerconaXtraDBCluster", "PerconaXtraDBCluster": {"name":"some-name","namespace":"users-31794"}, "namespace": "users-31794", "name": "some-name", "reconcileID": "e6dd2028-a5cc-437a-b3c8-122943deb627"} 2025-12-22T10:51:29.858Z DEBUG PXC users synced with ProxySQL {"controller": "pxc-controller", "controllerGroup": "pxc.percona.com", "controllerKind": "PerconaXtraDBCluster", "PerconaXtraDBCluster": {"name":"some-name","namespace":"users-31794"}, "namespace": "users-31794", "name": "some-name", "reconcileID": "7dcd38b2-433a-4c64-a1dd-48bbb7dabb1d"} 2025-12-22T10:51:34.862Z DEBUG PXC users synced with ProxySQL {"controller": "pxc-controller", "controllerGroup": "pxc.percona.com", "controllerKind": "PerconaXtraDBCluster", "PerconaXtraDBCluster": {"name":"some-name","namespace":"users-31794"}, "namespace": "users-31794", "name": "some-name", "reconcileID": "7cf28ec8-ddac-451b-9499-df78bad8cd47"} 2025-12-22T10:51:40.201Z DEBUG PXC users synced with ProxySQL {"controller": "pxc-controller", "controllerGroup": "pxc.percona.com", "controllerKind": "PerconaXtraDBCluster", "PerconaXtraDBCluster": {"name":"some-name","namespace":"users-31794"}, "namespace": "users-31794", "name": "some-name", "reconcileID": "1e4a4a04-4a38-4ea4-9375-a8edd2edefa0"} 2025-12-22T10:51:45.508Z DEBUG PXC users synced with ProxySQL {"controller": "pxc-controller", "controllerGroup": "pxc.percona.com", "controllerKind": "PerconaXtraDBCluster", "PerconaXtraDBCluster": {"name":"some-name","namespace":"users-31794"}, "namespace": "users-31794", "name": "some-name", "reconcileID": "e463a31e-652c-4f4d-be86-27ba110d97aa"} 2025-12-22T10:51:50.666Z DEBUG PXC users synced with ProxySQL {"controller": "pxc-controller", "controllerGroup": "pxc.percona.com", "controllerKind": "PerconaXtraDBCluster", "PerconaXtraDBCluster": {"name":"some-name","namespace":"users-31794"}, "namespace": "users-31794", "name": "some-name", "reconcileID": "6b1335a4-e75f-4a2f-9d1e-1235061e9d07"} 2025-12-22T10:51:55.864Z DEBUG PXC users synced with ProxySQL {"controller": "pxc-controller", "controllerGroup": "pxc.percona.com", "controllerKind": "PerconaXtraDBCluster", "PerconaXtraDBCluster": {"name":"some-name","namespace":"users-31794"}, "namespace": "users-31794", "name": "some-name", "reconcileID": "98968881-26ca-4374-9b89-e6ac9610f09f"} 2025-12-22T10:52:01.097Z DEBUG PXC users synced with ProxySQL {"controller": "pxc-controller", "controllerGroup": "pxc.percona.com", "controllerKind": "PerconaXtraDBCluster", "PerconaXtraDBCluster": {"name":"some-name","namespace":"users-31794"}, "namespace": "users-31794", "name": "some-name", "reconcileID": "7b1ae76a-39c0-4195-8fae-e52659d5178e"} 2025-12-22T10:52:06.278Z INFO Password changed, updating user {"controller": "pxc-controller", "controllerGroup": "pxc.percona.com", "controllerKind": "PerconaXtraDBCluster", "PerconaXtraDBCluster": {"name":"some-name","namespace":"users-31794"}, "namespace": "users-31794", "name": "some-name", "reconcileID": "ea62338d-78e0-4096-87df-6b6afc866217", "user": "root"} 2025-12-22T10:52:06.294Z INFO User password updated {"controller": "pxc-controller", "controllerGroup": "pxc.percona.com", "controllerKind": "PerconaXtraDBCluster", "PerconaXtraDBCluster": {"name":"some-name","namespace":"users-31794"}, "namespace": "users-31794", "name": "some-name", "reconcileID": "ea62338d-78e0-4096-87df-6b6afc866217", "user": "root"} 2025-12-22T10:52:06.311Z DEBUG PXC users synced with ProxySQL {"controller": "pxc-controller", "controllerGroup": "pxc.percona.com", "controllerKind": "PerconaXtraDBCluster", "PerconaXtraDBCluster": {"name":"some-name","namespace":"users-31794"}, "namespace": "users-31794", "name": "some-name", "reconcileID": "c2dae8ce-77b0-4dd7-bb80-cbf6a2af3277"} 2025-12-22T10:52:06.322Z INFO MySQL init secret created {"controller": "pxc-controller", "controllerGroup": "pxc.percona.com", "controllerKind": "PerconaXtraDBCluster", "PerconaXtraDBCluster": {"name":"some-name","namespace":"users-31794"}, "namespace": "users-31794", "name": "some-name", "reconcileID": "ea62338d-78e0-4096-87df-6b6afc866217", "secret": "some-name-mysql-init", "user": "root"} 2025-12-22T10:52:08.715Z DEBUG PXC users synced with ProxySQL {"controller": "pxc-controller", "controllerGroup": "pxc.percona.com", "controllerKind": "PerconaXtraDBCluster", "PerconaXtraDBCluster": {"name":"some-name","namespace":"users-31794"}, "namespace": "users-31794", "name": "some-name", "reconcileID": "ea62338d-78e0-4096-87df-6b6afc866217"} 2025-12-22T10:52:08.734Z INFO Internal secrets updated {"controller": "pxc-controller", "controllerGroup": "pxc.percona.com", "controllerKind": "PerconaXtraDBCluster", "PerconaXtraDBCluster": {"name":"some-name","namespace":"users-31794"}, "namespace": "users-31794", "name": "some-name", "reconcileID": "ea62338d-78e0-4096-87df-6b6afc866217", "user": "root"} 2025-12-22T10:52:10.486Z DEBUG PXC users synced with ProxySQL {"controller": "pxc-controller", "controllerGroup": "pxc.percona.com", "controllerKind": "PerconaXtraDBCluster", "PerconaXtraDBCluster": {"name":"some-name","namespace":"users-31794"}, "namespace": "users-31794", "name": "some-name", "reconcileID": "ea62338d-78e0-4096-87df-6b6afc866217"} 2025-12-22T10:52:16.886Z DEBUG PXC users synced with ProxySQL {"controller": "pxc-controller", "controllerGroup": "pxc.percona.com", "controllerKind": "PerconaXtraDBCluster", "PerconaXtraDBCluster": {"name":"some-name","namespace":"users-31794"}, "namespace": "users-31794", "name": "some-name", "reconcileID": "d84c3846-acd1-4943-92ba-c733d2b22a61"} 2025-12-22T10:52:21.963Z DEBUG PXC users synced with ProxySQL {"controller": "pxc-controller", "controllerGroup": "pxc.percona.com", "controllerKind": "PerconaXtraDBCluster", "PerconaXtraDBCluster": {"name":"some-name","namespace":"users-31794"}, "namespace": "users-31794", "name": "some-name", "reconcileID": "e11ce895-b14a-4b6a-b38f-69e068feddb9"} 2025-12-22T10:52:27.168Z DEBUG PXC users synced with ProxySQL {"controller": "pxc-controller", "controllerGroup": "pxc.percona.com", "controllerKind": "PerconaXtraDBCluster", "PerconaXtraDBCluster": {"name":"some-name","namespace":"users-31794"}, "namespace": "users-31794", "name": "some-name", "reconcileID": "b8e1f8c3-13f2-4372-9d64-7608bb14c7c0"} 2025-12-22T10:52:32.190Z DEBUG PXC users synced with ProxySQL {"controller": "pxc-controller", "controllerGroup": "pxc.percona.com", "controllerKind": "PerconaXtraDBCluster", "PerconaXtraDBCluster": {"name":"some-name","namespace":"users-31794"}, "namespace": "users-31794", "name": "some-name", "reconcileID": "deec2100-1408-4cbf-8e5f-b4345c711fd7"} 2025-12-22T10:52:37.757Z DEBUG PXC users synced with ProxySQL {"controller": "pxc-controller", "controllerGroup": "pxc.percona.com", "controllerKind": "PerconaXtraDBCluster", "PerconaXtraDBCluster": {"name":"some-name","namespace":"users-31794"}, "namespace": "users-31794", "name": "some-name", "reconcileID": "46dc8e95-366a-4564-8840-9e35e1ae9a45"} 2025-12-22T10:52:42.697Z DEBUG PXC users synced with ProxySQL {"controller": "pxc-controller", "controllerGroup": "pxc.percona.com", "controllerKind": "PerconaXtraDBCluster", "PerconaXtraDBCluster": {"name":"some-name","namespace":"users-31794"}, "namespace": "users-31794", "name": "some-name", "reconcileID": "39418c5f-b79a-45c5-9bd8-70c701fd5753"} 2025-12-22T10:52:45.605Z INFO controller-runtime.cache Warning: watch ended with error {"reflector": "pkg/mod/k8s.io/client-go@v0.35.0/tools/cache/reflector.go:289", "type": "*v1.Event", "err": "very short watch: pkg/mod/k8s.io/client-go@v0.35.0/tools/cache/reflector.go:289: Unexpected watch close - watch lasted less than a second and no items received"} 2025-12-22T10:52:48.191Z DEBUG PXC users synced with ProxySQL {"controller": "pxc-controller", "controllerGroup": "pxc.percona.com", "controllerKind": "PerconaXtraDBCluster", "PerconaXtraDBCluster": {"name":"some-name","namespace":"users-31794"}, "namespace": "users-31794", "name": "some-name", "reconcileID": "dcc060fe-34ab-4623-a041-17e2ce49c8f2"} 2025-12-22T10:52:50.745Z DEBUG Updating object {"controller": "pxc-controller", "controllerGroup": "pxc.percona.com", "controllerKind": "PerconaXtraDBCluster", "PerconaXtraDBCluster": {"name":"some-name","namespace":"users-31794"}, "namespace": "users-31794", "name": "some-name", "reconcileID": "26a4e794-17e7-4b61-b96b-ec64f8e04b62", "object": "some-name-proxysql", "kind": "&TypeMeta{Kind:StatefulSet,APIVersion:apps/v1,}", "hashChanged": true, "metaChanged": true} 2025-12-22T10:52:50.793Z DEBUG Updating object {"controller": "pxc-controller", "controllerGroup": "pxc.percona.com", "controllerKind": "PerconaXtraDBCluster", "PerconaXtraDBCluster": {"name":"some-name","namespace":"users-31794"}, "namespace": "users-31794", "name": "some-name", "reconcileID": "26a4e794-17e7-4b61-b96b-ec64f8e04b62", "object": "some-name-proxysql", "kind": "&TypeMeta{Kind:StatefulSet,APIVersion:apps/v1,}", "hashChanged": true, "metaChanged": true} 2025-12-22T10:52:53.026Z ERROR sync users {"controller": "pxc-controller", "controllerGroup": "pxc.percona.com", "controllerKind": "PerconaXtraDBCluster", "PerconaXtraDBCluster": {"name":"some-name","namespace":"users-31794"}, "namespace": "users-31794", "name": "some-name", "reconcileID": "26a4e794-17e7-4b61-b96b-ec64f8e04b62", "error": "exec syncusers: failed to execute command in pod: pod some-name-proxysql-2 does not have a host assigned / / ", "errorVerbose": "exec syncusers: failed to execute command in pod: pod some-name-proxysql-2 does not have a host assigned / / \ngithub.com/percona/percona-xtradb-cluster-operator/pkg/controller/pxc.(*ReconcilePerconaXtraDBCluster).syncPXCUsersWithProxySQL\n\t/go/src/github.com/percona/percona-xtradb-cluster-operator/pkg/controller/pxc/users.go:975\ngithub.com/percona/percona-xtradb-cluster-operator/pkg/controller/pxc.(*ReconcilePerconaXtraDBCluster).resyncPXCUsersWithProxySQL.func1\n\t/go/src/github.com/percona/percona-xtradb-cluster-operator/pkg/controller/pxc/controller.go:855\nruntime.goexit\n\t/usr/local/go/src/runtime/asm_amd64.s:1693"} 2025-12-22T10:53:15.925Z INFO Password changed, updating user {"controller": "pxc-controller", "controllerGroup": "pxc.percona.com", "controllerKind": "PerconaXtraDBCluster", "PerconaXtraDBCluster": {"name":"some-name","namespace":"users-31794"}, "namespace": "users-31794", "name": "some-name", "reconcileID": "a82b9ef6-f882-4bf4-a32d-219b07b2acf1", "user": "proxyadmin"} 2025-12-22T10:53:15.925Z INFO Password expiration policy updated {"controller": "pxc-controller", "controllerGroup": "pxc.percona.com", "controllerKind": "PerconaXtraDBCluster", "PerconaXtraDBCluster": {"name":"some-name","namespace":"users-31794"}, "namespace": "users-31794", "name": "some-name", "reconcileID": "a82b9ef6-f882-4bf4-a32d-219b07b2acf1", "user": "proxyadmin"} 2025-12-22T10:53:15.955Z INFO Proxy user updated {"controller": "pxc-controller", "controllerGroup": "pxc.percona.com", "controllerKind": "PerconaXtraDBCluster", "PerconaXtraDBCluster": {"name":"some-name","namespace":"users-31794"}, "namespace": "users-31794", "name": "some-name", "reconcileID": "a82b9ef6-f882-4bf4-a32d-219b07b2acf1", "user": "proxyadmin"} 2025-12-22T10:53:15.984Z INFO Internal secrets updated {"controller": "pxc-controller", "controllerGroup": "pxc.percona.com", "controllerKind": "PerconaXtraDBCluster", "PerconaXtraDBCluster": {"name":"some-name","namespace":"users-31794"}, "namespace": "users-31794", "name": "some-name", "reconcileID": "a82b9ef6-f882-4bf4-a32d-219b07b2acf1", "user": "proxyadmin"} 2025-12-22T10:53:15.984Z INFO Proxy pods will be restarted {"controller": "pxc-controller", "controllerGroup": "pxc.percona.com", "controllerKind": "PerconaXtraDBCluster", "PerconaXtraDBCluster": {"name":"some-name","namespace":"users-31794"}, "namespace": "users-31794", "name": "some-name", "reconcileID": "a82b9ef6-f882-4bf4-a32d-219b07b2acf1", "last-applied-secret": "56d0a69e7f7738bb3188a773b0f608ca903a708f7ecf84f000622bfdf2a94962"} 2025-12-22T10:53:15.988Z DEBUG Updating object {"controller": "pxc-controller", "controllerGroup": "pxc.percona.com", "controllerKind": "PerconaXtraDBCluster", "PerconaXtraDBCluster": {"name":"some-name","namespace":"users-31794"}, "namespace": "users-31794", "name": "some-name", "reconcileID": "a82b9ef6-f882-4bf4-a32d-219b07b2acf1", "object": "some-name-proxysql", "kind": "&TypeMeta{Kind:StatefulSet,APIVersion:apps/v1,}", "hashChanged": true, "metaChanged": true} 2025-12-22T10:53:17.313Z ERROR sync users {"controller": "pxc-controller", "controllerGroup": "pxc.percona.com", "controllerKind": "PerconaXtraDBCluster", "PerconaXtraDBCluster": {"name":"some-name","namespace":"users-31794"}, "namespace": "users-31794", "name": "some-name", "reconcileID": "156d6b1b-b22a-4875-9deb-64cec5f500f7", "error": "exec syncusers: failed to execute command in pod: unable to upgrade connection: container not found (\"proxysql\") / / ERROR (line:515) : The cluster (with writer hostgroup:11) has not been configured in ProxySQL\nERROR (line:515) : The cluster (with writer hostgroup:11) has not been configured in ProxySQL\nERROR 1045 (28000): ProxySQL Error: Access denied for user 'proxyadmin'@'127.0.0.1' (using password: YES)\nERROR (line:339) : ProxySQL connection check failed. \n-- Could not connect to ProxySQL at localhost:6032 \n-- Please check the ProxySQL connection parameters and status.\n", "errorVerbose": "exec syncusers: failed to execute command in pod: unable to upgrade connection: container not found (\"proxysql\") / / ERROR (line:515) : The cluster (with writer hostgroup:11) has not been configured in ProxySQL\nERROR (line:515) : The cluster (with writer hostgroup:11) has not been configured in ProxySQL\nERROR 1045 (28000): ProxySQL Error: Access denied for user 'proxyadmin'@'127.0.0.1' (using password: YES)\nERROR (line:339) : ProxySQL connection check failed. \n-- Could not connect to ProxySQL at localhost:6032 \n-- Please check the ProxySQL connection parameters and status.\n\ngithub.com/percona/percona-xtradb-cluster-operator/pkg/controller/pxc.(*ReconcilePerconaXtraDBCluster).syncPXCUsersWithProxySQL\n\t/go/src/github.com/percona/percona-xtradb-cluster-operator/pkg/controller/pxc/users.go:975\ngithub.com/percona/percona-xtradb-cluster-operator/pkg/controller/pxc.(*ReconcilePerconaXtraDBCluster).resyncPXCUsersWithProxySQL.func1\n\t/go/src/github.com/percona/percona-xtradb-cluster-operator/pkg/controller/pxc/controller.go:855\nruntime.goexit\n\t/usr/local/go/src/runtime/asm_amd64.s:1693"} 2025-12-22T10:54:03.290Z DEBUG PXC users synced with ProxySQL {"controller": "pxc-controller", "controllerGroup": "pxc.percona.com", "controllerKind": "PerconaXtraDBCluster", "PerconaXtraDBCluster": {"name":"some-name","namespace":"users-31794"}, "namespace": "users-31794", "name": "some-name", "reconcileID": "ab22e72c-ebd1-4736-8e28-cf32a973e885"} 2025-12-22T10:54:07.917Z DEBUG PXC users synced with ProxySQL {"controller": "pxc-controller", "controllerGroup": "pxc.percona.com", "controllerKind": "PerconaXtraDBCluster", "PerconaXtraDBCluster": {"name":"some-name","namespace":"users-31794"}, "namespace": "users-31794", "name": "some-name", "reconcileID": "35a073ec-65a2-43c2-a443-89f0c3d6aab9"} 2025-12-22T10:54:13.168Z DEBUG PXC users synced with ProxySQL {"controller": "pxc-controller", "controllerGroup": "pxc.percona.com", "controllerKind": "PerconaXtraDBCluster", "PerconaXtraDBCluster": {"name":"some-name","namespace":"users-31794"}, "namespace": "users-31794", "name": "some-name", "reconcileID": "abd7ffe7-878e-41ab-897f-a92feaa725f0"} 2025-12-22T10:54:18.818Z DEBUG PXC users synced with ProxySQL {"controller": "pxc-controller", "controllerGroup": "pxc.percona.com", "controllerKind": "PerconaXtraDBCluster", "PerconaXtraDBCluster": {"name":"some-name","namespace":"users-31794"}, "namespace": "users-31794", "name": "some-name", "reconcileID": "5daa115d-e2b2-4f21-aacd-f8143203069c"} 2025-12-22T10:54:22.748Z DEBUG Updating object {"controller": "pxc-controller", "controllerGroup": "pxc.percona.com", "controllerKind": "PerconaXtraDBCluster", "PerconaXtraDBCluster": {"name":"some-name","namespace":"users-31794"}, "namespace": "users-31794", "name": "some-name", "reconcileID": "884806a1-f2ad-4a29-8c4e-9e39df010872", "object": "some-name-proxysql", "kind": "&TypeMeta{Kind:StatefulSet,APIVersion:apps/v1,}", "hashChanged": true, "metaChanged": true} 2025-12-22T10:54:22.785Z DEBUG Updating object {"controller": "pxc-controller", "controllerGroup": "pxc.percona.com", "controllerKind": "PerconaXtraDBCluster", "PerconaXtraDBCluster": {"name":"some-name","namespace":"users-31794"}, "namespace": "users-31794", "name": "some-name", "reconcileID": "884806a1-f2ad-4a29-8c4e-9e39df010872", "object": "some-name-proxysql", "kind": "&TypeMeta{Kind:StatefulSet,APIVersion:apps/v1,}", "hashChanged": true, "metaChanged": true} 2025-12-22T10:54:23.812Z ERROR sync users {"controller": "pxc-controller", "controllerGroup": "pxc.percona.com", "controllerKind": "PerconaXtraDBCluster", "PerconaXtraDBCluster": {"name":"some-name","namespace":"users-31794"}, "namespace": "users-31794", "name": "some-name", "reconcileID": "3b755893-0a30-4ef3-baea-aafadfa69a11", "error": "exec syncusers: failed to execute command in pod: pods \"some-name-proxysql-2\" not found / / ", "errorVerbose": "exec syncusers: failed to execute command in pod: pods \"some-name-proxysql-2\" not found / / \ngithub.com/percona/percona-xtradb-cluster-operator/pkg/controller/pxc.(*ReconcilePerconaXtraDBCluster).syncPXCUsersWithProxySQL\n\t/go/src/github.com/percona/percona-xtradb-cluster-operator/pkg/controller/pxc/users.go:975\ngithub.com/percona/percona-xtradb-cluster-operator/pkg/controller/pxc.(*ReconcilePerconaXtraDBCluster).resyncPXCUsersWithProxySQL.func1\n\t/go/src/github.com/percona/percona-xtradb-cluster-operator/pkg/controller/pxc/controller.go:855\nruntime.goexit\n\t/usr/local/go/src/runtime/asm_amd64.s:1693"} 2025-12-22T10:54:29.348Z INFO Password changed, updating user {"controller": "pxc-controller", "controllerGroup": "pxc.percona.com", "controllerKind": "PerconaXtraDBCluster", "PerconaXtraDBCluster": {"name":"some-name","namespace":"users-31794"}, "namespace": "users-31794", "name": "some-name", "reconcileID": "30fad44a-7964-4759-a225-90067c75d691", "user": "xtrabackup"} 2025-12-22T10:54:29.359Z INFO User password updated {"controller": "pxc-controller", "controllerGroup": "pxc.percona.com", "controllerKind": "PerconaXtraDBCluster", "PerconaXtraDBCluster": {"name":"some-name","namespace":"users-31794"}, "namespace": "users-31794", "name": "some-name", "reconcileID": "30fad44a-7964-4759-a225-90067c75d691", "user": "xtrabackup"} 2025-12-22T10:54:29.379Z INFO MySQL init secret updated {"controller": "pxc-controller", "controllerGroup": "pxc.percona.com", "controllerKind": "PerconaXtraDBCluster", "PerconaXtraDBCluster": {"name":"some-name","namespace":"users-31794"}, "namespace": "users-31794", "name": "some-name", "reconcileID": "30fad44a-7964-4759-a225-90067c75d691", "secret": "some-name-mysql-init", "user": "xtrabackup"} 2025-12-22T10:54:29.396Z INFO Internal secrets updated {"controller": "pxc-controller", "controllerGroup": "pxc.percona.com", "controllerKind": "PerconaXtraDBCluster", "PerconaXtraDBCluster": {"name":"some-name","namespace":"users-31794"}, "namespace": "users-31794", "name": "some-name", "reconcileID": "30fad44a-7964-4759-a225-90067c75d691", "user": "xtrabackup"} 2025-12-22T10:54:29.396Z INFO PXC pods will be restarted {"controller": "pxc-controller", "controllerGroup": "pxc.percona.com", "controllerKind": "PerconaXtraDBCluster", "PerconaXtraDBCluster": {"name":"some-name","namespace":"users-31794"}, "namespace": "users-31794", "name": "some-name", "reconcileID": "30fad44a-7964-4759-a225-90067c75d691", "last-applied-secret": "e327b0e0f90b7eecf8672b6a319ef9b2c31d148b4beb3cbb5b0d7f1146f35e22"} 2025-12-22T10:54:29.399Z DEBUG Updating object {"controller": "pxc-controller", "controllerGroup": "pxc.percona.com", "controllerKind": "PerconaXtraDBCluster", "PerconaXtraDBCluster": {"name":"some-name","namespace":"users-31794"}, "namespace": "users-31794", "name": "some-name", "reconcileID": "30fad44a-7964-4759-a225-90067c75d691", "object": "some-name-pxc", "kind": "&TypeMeta{Kind:StatefulSet,APIVersion:apps/v1,}", "hashChanged": true, "metaChanged": true} 2025-12-22T10:54:32.136Z DEBUG PXC users synced with ProxySQL {"controller": "pxc-controller", "controllerGroup": "pxc.percona.com", "controllerKind": "PerconaXtraDBCluster", "PerconaXtraDBCluster": {"name":"some-name","namespace":"users-31794"}, "namespace": "users-31794", "name": "some-name", "reconcileID": "30fad44a-7964-4759-a225-90067c75d691"} 2025-12-22T10:54:36.232Z DEBUG PXC users synced with ProxySQL {"controller": "pxc-controller", "controllerGroup": "pxc.percona.com", "controllerKind": "PerconaXtraDBCluster", "PerconaXtraDBCluster": {"name":"some-name","namespace":"users-31794"}, "namespace": "users-31794", "name": "some-name", "reconcileID": "e958a32d-3dc3-4f4f-82a7-01a42691254a"} 2025-12-22T10:56:14.157Z INFO reconcile replication error {"controller": "pxc-controller", "controllerGroup": "pxc.percona.com", "controllerKind": "PerconaXtraDBCluster", "PerconaXtraDBCluster": {"name":"some-name","namespace":"users-31794"}, "namespace": "users-31794", "name": "some-name", "reconcileID": "125bbd7a-ea7a-4bfb-a5e5-0b76c93668e4", "err": "failed to connect to pod some-name-pxc-0: dial tcp 10.39.65.58:33062: connect: connection refused"} 2025-12-22T10:56:19.310Z INFO Unable to find primary pod for replication. No pod with name or ip like this {"controller": "pxc-controller", "controllerGroup": "pxc.percona.com", "controllerKind": "PerconaXtraDBCluster", "PerconaXtraDBCluster": {"name":"some-name","namespace":"users-31794"}, "namespace": "users-31794", "name": "some-name", "reconcileID": "1acb0261-7e2b-4ef0-8391-a225e3df2680", "primary name": "some-name-pxc-0.some-name-pxc.users-31794.svc.cluster.local"} 2025-12-22T10:56:24.438Z INFO Unable to find primary pod for replication. No pod with name or ip like this {"controller": "pxc-controller", "controllerGroup": "pxc.percona.com", "controllerKind": "PerconaXtraDBCluster", "PerconaXtraDBCluster": {"name":"some-name","namespace":"users-31794"}, "namespace": "users-31794", "name": "some-name", "reconcileID": "3b9cc1a8-61fe-41ee-9e19-e4408ddb725e", "primary name": "some-name-pxc-0.some-name-pxc.users-31794.svc.cluster.local"} 2025-12-22T10:56:29.568Z INFO Unable to find primary pod for replication. No pod with name or ip like this {"controller": "pxc-controller", "controllerGroup": "pxc.percona.com", "controllerKind": "PerconaXtraDBCluster", "PerconaXtraDBCluster": {"name":"some-name","namespace":"users-31794"}, "namespace": "users-31794", "name": "some-name", "reconcileID": "fb0ade31-2b27-48ac-bad4-615af353cb02", "primary name": "some-name-pxc-0.some-name-pxc.users-31794.svc.cluster.local"} 2025-12-22T10:56:34.722Z INFO Unable to find primary pod for replication. No pod with name or ip like this {"controller": "pxc-controller", "controllerGroup": "pxc.percona.com", "controllerKind": "PerconaXtraDBCluster", "PerconaXtraDBCluster": {"name":"some-name","namespace":"users-31794"}, "namespace": "users-31794", "name": "some-name", "reconcileID": "4397abed-1d70-4179-927e-0ed6ba33119a", "primary name": "some-name-pxc-0.some-name-pxc.users-31794.svc.cluster.local"} 2025-12-22T10:56:39.849Z INFO Unable to find primary pod for replication. No pod with name or ip like this {"controller": "pxc-controller", "controllerGroup": "pxc.percona.com", "controllerKind": "PerconaXtraDBCluster", "PerconaXtraDBCluster": {"name":"some-name","namespace":"users-31794"}, "namespace": "users-31794", "name": "some-name", "reconcileID": "87f8dbbc-03da-4fb7-9f8c-f839fae6e096", "primary name": "some-name-pxc-0.some-name-pxc.users-31794.svc.cluster.local"} 2025-12-22T10:56:44.976Z INFO Unable to find primary pod for replication. No pod with name or ip like this {"controller": "pxc-controller", "controllerGroup": "pxc.percona.com", "controllerKind": "PerconaXtraDBCluster", "PerconaXtraDBCluster": {"name":"some-name","namespace":"users-31794"}, "namespace": "users-31794", "name": "some-name", "reconcileID": "90f90467-2296-4493-8c9f-6475e8cf71b2", "primary name": "some-name-pxc-0.some-name-pxc.users-31794.svc.cluster.local"} 2025-12-22T10:56:50.115Z INFO Unable to find primary pod for replication. No pod with name or ip like this {"controller": "pxc-controller", "controllerGroup": "pxc.percona.com", "controllerKind": "PerconaXtraDBCluster", "PerconaXtraDBCluster": {"name":"some-name","namespace":"users-31794"}, "namespace": "users-31794", "name": "some-name", "reconcileID": "dbb16383-1ace-45e0-802f-05a1140799cb", "primary name": "some-name-pxc-0.some-name-pxc.users-31794.svc.cluster.local"} 2025-12-22T10:56:57.770Z DEBUG PXC users synced with ProxySQL {"controller": "pxc-controller", "controllerGroup": "pxc.percona.com", "controllerKind": "PerconaXtraDBCluster", "PerconaXtraDBCluster": {"name":"some-name","namespace":"users-31794"}, "namespace": "users-31794", "name": "some-name", "reconcileID": "899756f6-73d0-4398-a92a-384f7a5689ff"} 2025-12-22T10:57:02.934Z DEBUG PXC users synced with ProxySQL {"controller": "pxc-controller", "controllerGroup": "pxc.percona.com", "controllerKind": "PerconaXtraDBCluster", "PerconaXtraDBCluster": {"name":"some-name","namespace":"users-31794"}, "namespace": "users-31794", "name": "some-name", "reconcileID": "7c9cdf2b-b288-4163-a331-6b263e9253d1"} 2025-12-22T10:57:08.020Z DEBUG PXC users synced with ProxySQL {"controller": "pxc-controller", "controllerGroup": "pxc.percona.com", "controllerKind": "PerconaXtraDBCluster", "PerconaXtraDBCluster": {"name":"some-name","namespace":"users-31794"}, "namespace": "users-31794", "name": "some-name", "reconcileID": "ef8df7c8-58fe-46b5-b89f-3cf732e0f50b"} 2025-12-22T10:57:13.330Z DEBUG PXC users synced with ProxySQL {"controller": "pxc-controller", "controllerGroup": "pxc.percona.com", "controllerKind": "PerconaXtraDBCluster", "PerconaXtraDBCluster": {"name":"some-name","namespace":"users-31794"}, "namespace": "users-31794", "name": "some-name", "reconcileID": "32e5c194-b536-43f4-bbe7-7309fb508c8b"} 2025-12-22T10:57:14.876Z INFO Password changed, updating user {"controller": "pxc-controller", "controllerGroup": "pxc.percona.com", "controllerKind": "PerconaXtraDBCluster", "PerconaXtraDBCluster": {"name":"some-name","namespace":"users-31794"}, "namespace": "users-31794", "name": "some-name", "reconcileID": "50538886-95d9-4b30-8285-6e376f685506", "user": "monitor"} 2025-12-22T10:57:14.887Z INFO User password updated {"controller": "pxc-controller", "controllerGroup": "pxc.percona.com", "controllerKind": "PerconaXtraDBCluster", "PerconaXtraDBCluster": {"name":"some-name","namespace":"users-31794"}, "namespace": "users-31794", "name": "some-name", "reconcileID": "50538886-95d9-4b30-8285-6e376f685506", "user": "monitor"} 2025-12-22T10:57:14.910Z INFO MySQL init secret updated {"controller": "pxc-controller", "controllerGroup": "pxc.percona.com", "controllerKind": "PerconaXtraDBCluster", "PerconaXtraDBCluster": {"name":"some-name","namespace":"users-31794"}, "namespace": "users-31794", "name": "some-name", "reconcileID": "50538886-95d9-4b30-8285-6e376f685506", "secret": "some-name-mysql-init", "user": "monitor"} 2025-12-22T10:57:14.930Z INFO Proxy user updated {"controller": "pxc-controller", "controllerGroup": "pxc.percona.com", "controllerKind": "PerconaXtraDBCluster", "PerconaXtraDBCluster": {"name":"some-name","namespace":"users-31794"}, "namespace": "users-31794", "name": "some-name", "reconcileID": "50538886-95d9-4b30-8285-6e376f685506", "user": "monitor"} 2025-12-22T10:57:15.019Z INFO Internal secrets updated {"controller": "pxc-controller", "controllerGroup": "pxc.percona.com", "controllerKind": "PerconaXtraDBCluster", "PerconaXtraDBCluster": {"name":"some-name","namespace":"users-31794"}, "namespace": "users-31794", "name": "some-name", "reconcileID": "50538886-95d9-4b30-8285-6e376f685506", "user": "monitor"} 2025-12-22T10:57:15.019Z INFO Proxy pods will be restarted {"controller": "pxc-controller", "controllerGroup": "pxc.percona.com", "controllerKind": "PerconaXtraDBCluster", "PerconaXtraDBCluster": {"name":"some-name","namespace":"users-31794"}, "namespace": "users-31794", "name": "some-name", "reconcileID": "50538886-95d9-4b30-8285-6e376f685506", "last-applied-secret": "dca36778af6416ade691623ecacf361b1696a4a5d953686bc92619ce815601e5"} 2025-12-22T10:57:15.023Z DEBUG Updating object {"controller": "pxc-controller", "controllerGroup": "pxc.percona.com", "controllerKind": "PerconaXtraDBCluster", "PerconaXtraDBCluster": {"name":"some-name","namespace":"users-31794"}, "namespace": "users-31794", "name": "some-name", "reconcileID": "50538886-95d9-4b30-8285-6e376f685506", "object": "some-name-proxysql", "kind": "&TypeMeta{Kind:StatefulSet,APIVersion:apps/v1,}", "hashChanged": true, "metaChanged": true} 2025-12-22T10:57:18.063Z ERROR sync users {"controller": "pxc-controller", "controllerGroup": "pxc.percona.com", "controllerKind": "PerconaXtraDBCluster", "PerconaXtraDBCluster": {"name":"some-name","namespace":"users-31794"}, "namespace": "users-31794", "name": "some-name", "reconcileID": "50538886-95d9-4b30-8285-6e376f685506", "error": "exec syncusers: failed to execute command in pod: unable to upgrade connection: container not found (\"proxysql\") / / ", "errorVerbose": "exec syncusers: failed to execute command in pod: unable to upgrade connection: container not found (\"proxysql\") / / \ngithub.com/percona/percona-xtradb-cluster-operator/pkg/controller/pxc.(*ReconcilePerconaXtraDBCluster).syncPXCUsersWithProxySQL\n\t/go/src/github.com/percona/percona-xtradb-cluster-operator/pkg/controller/pxc/users.go:975\ngithub.com/percona/percona-xtradb-cluster-operator/pkg/controller/pxc.(*ReconcilePerconaXtraDBCluster).resyncPXCUsersWithProxySQL.func1\n\t/go/src/github.com/percona/percona-xtradb-cluster-operator/pkg/controller/pxc/controller.go:855\nruntime.goexit\n\t/usr/local/go/src/runtime/asm_amd64.s:1693"} 2025-12-22T10:57:52.530Z DEBUG PXC users synced with ProxySQL {"controller": "pxc-controller", "controllerGroup": "pxc.percona.com", "controllerKind": "PerconaXtraDBCluster", "PerconaXtraDBCluster": {"name":"some-name","namespace":"users-31794"}, "namespace": "users-31794", "name": "some-name", "reconcileID": "6cf3f35b-165e-4397-a173-f5df3db70039"} 2025-12-22T10:57:57.275Z DEBUG PXC users synced with ProxySQL {"controller": "pxc-controller", "controllerGroup": "pxc.percona.com", "controllerKind": "PerconaXtraDBCluster", "PerconaXtraDBCluster": {"name":"some-name","namespace":"users-31794"}, "namespace": "users-31794", "name": "some-name", "reconcileID": "0cca58e9-fd33-4335-bd67-f324bce61293"} 2025-12-22T10:58:02.850Z DEBUG PXC users synced with ProxySQL {"controller": "pxc-controller", "controllerGroup": "pxc.percona.com", "controllerKind": "PerconaXtraDBCluster", "PerconaXtraDBCluster": {"name":"some-name","namespace":"users-31794"}, "namespace": "users-31794", "name": "some-name", "reconcileID": "b7edf0a2-af32-41ae-bc68-0da78fe48680"} 2025-12-22T10:58:07.871Z DEBUG PXC users synced with ProxySQL {"controller": "pxc-controller", "controllerGroup": "pxc.percona.com", "controllerKind": "PerconaXtraDBCluster", "PerconaXtraDBCluster": {"name":"some-name","namespace":"users-31794"}, "namespace": "users-31794", "name": "some-name", "reconcileID": "dbb81ba7-2fb2-4252-b073-808794e1695d"} 2025-12-22T10:58:13.166Z DEBUG PXC users synced with ProxySQL {"controller": "pxc-controller", "controllerGroup": "pxc.percona.com", "controllerKind": "PerconaXtraDBCluster", "PerconaXtraDBCluster": {"name":"some-name","namespace":"users-31794"}, "namespace": "users-31794", "name": "some-name", "reconcileID": "8a0d68c0-96af-4615-bfbd-900b1ba7bb39"} 2025-12-22T10:58:15.764Z INFO Password changed, updating user {"controller": "pxc-controller", "controllerGroup": "pxc.percona.com", "controllerKind": "PerconaXtraDBCluster", "PerconaXtraDBCluster": {"name":"some-name","namespace":"users-31794"}, "namespace": "users-31794", "name": "some-name", "reconcileID": "7513ef04-de50-4508-a08b-2f4d5192e963", "user": "operator"} 2025-12-22T10:58:15.779Z INFO User password updated {"controller": "pxc-controller", "controllerGroup": "pxc.percona.com", "controllerKind": "PerconaXtraDBCluster", "PerconaXtraDBCluster": {"name":"some-name","namespace":"users-31794"}, "namespace": "users-31794", "name": "some-name", "reconcileID": "7513ef04-de50-4508-a08b-2f4d5192e963", "user": "operator"} 2025-12-22T10:58:15.807Z INFO MySQL init secret updated {"controller": "pxc-controller", "controllerGroup": "pxc.percona.com", "controllerKind": "PerconaXtraDBCluster", "PerconaXtraDBCluster": {"name":"some-name","namespace":"users-31794"}, "namespace": "users-31794", "name": "some-name", "reconcileID": "7513ef04-de50-4508-a08b-2f4d5192e963", "secret": "some-name-mysql-init", "user": "operator"} 2025-12-22T10:58:15.839Z INFO Internal secrets updated {"controller": "pxc-controller", "controllerGroup": "pxc.percona.com", "controllerKind": "PerconaXtraDBCluster", "PerconaXtraDBCluster": {"name":"some-name","namespace":"users-31794"}, "namespace": "users-31794", "name": "some-name", "reconcileID": "7513ef04-de50-4508-a08b-2f4d5192e963", "user": "operator"} 2025-12-22T10:58:15.839Z INFO Proxy pods will be restarted {"controller": "pxc-controller", "controllerGroup": "pxc.percona.com", "controllerKind": "PerconaXtraDBCluster", "PerconaXtraDBCluster": {"name":"some-name","namespace":"users-31794"}, "namespace": "users-31794", "name": "some-name", "reconcileID": "7513ef04-de50-4508-a08b-2f4d5192e963", "last-applied-secret": "830f92a4fb43c8abae77c299f6240f5ccc5452ba59ead38f4165beb7d6592819"} 2025-12-22T10:58:15.843Z DEBUG Updating object {"controller": "pxc-controller", "controllerGroup": "pxc.percona.com", "controllerKind": "PerconaXtraDBCluster", "PerconaXtraDBCluster": {"name":"some-name","namespace":"users-31794"}, "namespace": "users-31794", "name": "some-name", "reconcileID": "7513ef04-de50-4508-a08b-2f4d5192e963", "object": "some-name-proxysql", "kind": "&TypeMeta{Kind:StatefulSet,APIVersion:apps/v1,}", "hashChanged": true, "metaChanged": true} 2025-12-22T10:58:19.802Z ERROR sync users {"controller": "pxc-controller", "controllerGroup": "pxc.percona.com", "controllerKind": "PerconaXtraDBCluster", "PerconaXtraDBCluster": {"name":"some-name","namespace":"users-31794"}, "namespace": "users-31794", "name": "some-name", "reconcileID": "7513ef04-de50-4508-a08b-2f4d5192e963", "error": "exec syncusers: failed to execute command in pod: command terminated with exit code 1 / / ERROR 1045 (28000): Access denied for user 'operator'@'some-name-proxysql-0.some-name-proxysql-unready.users-31794.svc.' (using password: YES)\nERROR 1045 (28000): Access denied for user 'operator'@'some-name-proxysql-0.some-name-proxysql-unready.users-31794.svc.' (using password: YES)\nERROR 1045 (28000): Access denied for user 'operator'@'some-name-proxysql-0.some-name-proxysql-unready.users-31794.svc.' (using password: YES)\nERROR (line:572) : Could not find a primary cluster node\nERROR 1045 (28000): Access denied for user 'operator'@'some-name-proxysql-0.some-name-proxysql-unready.users-31794.svc.' (using password: YES)\nERROR 1045 (28000): Access denied for user 'operator'@'some-name-proxysql-0.some-name-proxysql-unready.users-31794.svc.' (using password: YES)\nERROR 1045 (28000): Access denied for user 'operator'@'some-name-proxysql-0.some-name-proxysql-unready.users-31794.svc.' (using password: YES)\nERROR (line:572) : Could not find a primary cluster node\nERROR 1045 (28000): Access denied for user 'operator'@'some-name-proxysql-0.some-name-proxysql-unready.users-31794.svc.' (using password: YES)\nERROR 1045 (28000): Access denied for user 'operator'@'some-name-proxysql-0.some-name-proxysql-unready.users-31794.svc.' (using password: YES)\nERROR 1045 (28000): Access denied for user 'operator'@'some-name-proxysql-0.some-name-proxysql-unready.users-31794.svc.' (using password: YES)\nERROR (line:572) : Could not find a primary cluster node\nERROR 1045 (28000): Access denied for user 'operator'@'some-name-proxysql-0.some-name-proxysql-unready.users-31794.svc.' (using password: YES)\nERROR 1045 (28000): Access denied for user 'operator'@'some-name-proxysql-0.some-name-proxysql-unready.users-31794.svc.' (using password: YES)\nERROR 1045 (28000): Access denied for user 'operator'@'some-name-proxysql-0.some-name-proxysql-unready.users-31794.svc.' (using password: YES)\nERROR (line:572) : Could not find a primary cluster node\nERROR 1045 (28000): Access denied for user 'operator'@'some-name-proxysql-0.some-name-proxysql-unready.users-31794.svc.' (using password: YES)\nERROR 1045 (28000): Access denied for user 'operator'@'some-name-proxysql-0.some-name-proxysql-unready.users-31794.svc.' (using password: YES)\nERROR 1045 (28000): Access denied for user 'operator'@'some-name-proxysql-0.some-name-proxysql-unready.users-31794.svc.' (using password: YES)\nERROR (line:572) : Could not find a primary cluster node\n", "errorVerbose": "exec syncusers: failed to execute command in pod: command terminated with exit code 1 / / ERROR 1045 (28000): Access denied for user 'operator'@'some-name-proxysql-0.some-name-proxysql-unready.users-31794.svc.' (using password: YES)\nERROR 1045 (28000): Access denied for user 'operator'@'some-name-proxysql-0.some-name-proxysql-unready.users-31794.svc.' (using password: YES)\nERROR 1045 (28000): Access denied for user 'operator'@'some-name-proxysql-0.some-name-proxysql-unready.users-31794.svc.' (using password: YES)\nERROR (line:572) : Could not find a primary cluster node\nERROR 1045 (28000): Access denied for user 'operator'@'some-name-proxysql-0.some-name-proxysql-unready.users-31794.svc.' (using password: YES)\nERROR 1045 (28000): Access denied for user 'operator'@'some-name-proxysql-0.some-name-proxysql-unready.users-31794.svc.' (using password: YES)\nERROR 1045 (28000): Access denied for user 'operator'@'some-name-proxysql-0.some-name-proxysql-unready.users-31794.svc.' (using password: YES)\nERROR (line:572) : Could not find a primary cluster node\nERROR 1045 (28000): Access denied for user 'operator'@'some-name-proxysql-0.some-name-proxysql-unready.users-31794.svc.' (using password: YES)\nERROR 1045 (28000): Access denied for user 'operator'@'some-name-proxysql-0.some-name-proxysql-unready.users-31794.svc.' (using password: YES)\nERROR 1045 (28000): Access denied for user 'operator'@'some-name-proxysql-0.some-name-proxysql-unready.users-31794.svc.' (using password: YES)\nERROR (line:572) : Could not find a primary cluster node\nERROR 1045 (28000): Access denied for user 'operator'@'some-name-proxysql-0.some-name-proxysql-unready.users-31794.svc.' (using password: YES)\nERROR 1045 (28000): Access denied for user 'operator'@'some-name-proxysql-0.some-name-proxysql-unready.users-31794.svc.' (using password: YES)\nERROR 1045 (28000): Access denied for user 'operator'@'some-name-proxysql-0.some-name-proxysql-unready.users-31794.svc.' (using password: YES)\nERROR (line:572) : Could not find a primary cluster node\nERROR 1045 (28000): Access denied for user 'operator'@'some-name-proxysql-0.some-name-proxysql-unready.users-31794.svc.' (using password: YES)\nERROR 1045 (28000): Access denied for user 'operator'@'some-name-proxysql-0.some-name-proxysql-unready.users-31794.svc.' (using password: YES)\nERROR 1045 (28000): Access denied for user 'operator'@'some-name-proxysql-0.some-name-proxysql-unready.users-31794.svc.' (using password: YES)\nERROR (line:572) : Could not find a primary cluster node\n\ngithub.com/percona/percona-xtradb-cluster-operator/pkg/controller/pxc.(*ReconcilePerconaXtraDBCluster).syncPXCUsersWithProxySQL\n\t/go/src/github.com/percona/percona-xtradb-cluster-operator/pkg/controller/pxc/users.go:975\ngithub.com/percona/percona-xtradb-cluster-operator/pkg/controller/pxc.(*ReconcilePerconaXtraDBCluster).resyncPXCUsersWithProxySQL.func1\n\t/go/src/github.com/percona/percona-xtradb-cluster-operator/pkg/controller/pxc/controller.go:855\nruntime.goexit\n\t/usr/local/go/src/runtime/asm_amd64.s:1693"} 2025-12-22T10:58:39.153Z DEBUG PXC users synced with ProxySQL {"controller": "pxc-controller", "controllerGroup": "pxc.percona.com", "controllerKind": "PerconaXtraDBCluster", "PerconaXtraDBCluster": {"name":"some-name","namespace":"users-31794"}, "namespace": "users-31794", "name": "some-name", "reconcileID": "8a5213e0-3994-4c2b-ae3e-58465c8855ef"} 2025-12-22T10:58:43.713Z DEBUG PXC users synced with ProxySQL {"controller": "pxc-controller", "controllerGroup": "pxc.percona.com", "controllerKind": "PerconaXtraDBCluster", "PerconaXtraDBCluster": {"name":"some-name","namespace":"users-31794"}, "namespace": "users-31794", "name": "some-name", "reconcileID": "cdd3342e-aa77-4a60-a872-100287ffac0b"} 2025-12-22T10:58:49.115Z DEBUG PXC users synced with ProxySQL {"controller": "pxc-controller", "controllerGroup": "pxc.percona.com", "controllerKind": "PerconaXtraDBCluster", "PerconaXtraDBCluster": {"name":"some-name","namespace":"users-31794"}, "namespace": "users-31794", "name": "some-name", "reconcileID": "c793458e-03db-4e7a-8df3-d6ef4d3d6093"} 2025-12-22T10:58:54.353Z DEBUG PXC users synced with ProxySQL {"controller": "pxc-controller", "controllerGroup": "pxc.percona.com", "controllerKind": "PerconaXtraDBCluster", "PerconaXtraDBCluster": {"name":"some-name","namespace":"users-31794"}, "namespace": "users-31794", "name": "some-name", "reconcileID": "6c0fbb86-6641-495c-8128-236ca5f7eab2"} 2025-12-22T10:58:58.790Z INFO Created user secrets {"controller": "pxc-controller", "controllerGroup": "pxc.percona.com", "controllerKind": "PerconaXtraDBCluster", "PerconaXtraDBCluster": {"name":"some-name","namespace":"users-31794"}, "namespace": "users-31794", "name": "some-name", "reconcileID": "24d89c31-3468-4ac8-a9eb-d8c45269681b", "secrets": "my-cluster-secrets-2"} 2025-12-22T10:58:58.790Z INFO Password changed, updating user {"controller": "pxc-controller", "controllerGroup": "pxc.percona.com", "controllerKind": "PerconaXtraDBCluster", "PerconaXtraDBCluster": {"name":"some-name","namespace":"users-31794"}, "namespace": "users-31794", "name": "some-name", "reconcileID": "24d89c31-3468-4ac8-a9eb-d8c45269681b", "user": "root"} 2025-12-22T10:58:58.806Z INFO User password updated {"controller": "pxc-controller", "controllerGroup": "pxc.percona.com", "controllerKind": "PerconaXtraDBCluster", "PerconaXtraDBCluster": {"name":"some-name","namespace":"users-31794"}, "namespace": "users-31794", "name": "some-name", "reconcileID": "24d89c31-3468-4ac8-a9eb-d8c45269681b", "user": "root"} 2025-12-22T10:58:58.824Z INFO MySQL init secret updated {"controller": "pxc-controller", "controllerGroup": "pxc.percona.com", "controllerKind": "PerconaXtraDBCluster", "PerconaXtraDBCluster": {"name":"some-name","namespace":"users-31794"}, "namespace": "users-31794", "name": "some-name", "reconcileID": "24d89c31-3468-4ac8-a9eb-d8c45269681b", "secret": "some-name-mysql-init", "user": "root"} 2025-12-22T10:58:59.961Z DEBUG PXC users synced with ProxySQL {"controller": "pxc-controller", "controllerGroup": "pxc.percona.com", "controllerKind": "PerconaXtraDBCluster", "PerconaXtraDBCluster": {"name":"some-name","namespace":"users-31794"}, "namespace": "users-31794", "name": "some-name", "reconcileID": "f7aa2c70-0eeb-4954-ae29-ee6c4e6e0246"} 2025-12-22T10:59:01.020Z DEBUG PXC users synced with ProxySQL {"controller": "pxc-controller", "controllerGroup": "pxc.percona.com", "controllerKind": "PerconaXtraDBCluster", "PerconaXtraDBCluster": {"name":"some-name","namespace":"users-31794"}, "namespace": "users-31794", "name": "some-name", "reconcileID": "24d89c31-3468-4ac8-a9eb-d8c45269681b"} 2025-12-22T10:59:01.044Z INFO Internal secrets updated {"controller": "pxc-controller", "controllerGroup": "pxc.percona.com", "controllerKind": "PerconaXtraDBCluster", "PerconaXtraDBCluster": {"name":"some-name","namespace":"users-31794"}, "namespace": "users-31794", "name": "some-name", "reconcileID": "24d89c31-3468-4ac8-a9eb-d8c45269681b", "user": "root"} 2025-12-22T10:59:01.044Z INFO Password changed, updating user {"controller": "pxc-controller", "controllerGroup": "pxc.percona.com", "controllerKind": "PerconaXtraDBCluster", "PerconaXtraDBCluster": {"name":"some-name","namespace":"users-31794"}, "namespace": "users-31794", "name": "some-name", "reconcileID": "24d89c31-3468-4ac8-a9eb-d8c45269681b", "user": "operator"} 2025-12-22T10:59:01.056Z INFO User password updated {"controller": "pxc-controller", "controllerGroup": "pxc.percona.com", "controllerKind": "PerconaXtraDBCluster", "PerconaXtraDBCluster": {"name":"some-name","namespace":"users-31794"}, "namespace": "users-31794", "name": "some-name", "reconcileID": "24d89c31-3468-4ac8-a9eb-d8c45269681b", "user": "operator"} 2025-12-22T10:59:01.083Z INFO MySQL init secret updated {"controller": "pxc-controller", "controllerGroup": "pxc.percona.com", "controllerKind": "PerconaXtraDBCluster", "PerconaXtraDBCluster": {"name":"some-name","namespace":"users-31794"}, "namespace": "users-31794", "name": "some-name", "reconcileID": "24d89c31-3468-4ac8-a9eb-d8c45269681b", "secret": "some-name-mysql-init", "user": "operator"} 2025-12-22T10:59:01.112Z INFO Internal secrets updated {"controller": "pxc-controller", "controllerGroup": "pxc.percona.com", "controllerKind": "PerconaXtraDBCluster", "PerconaXtraDBCluster": {"name":"some-name","namespace":"users-31794"}, "namespace": "users-31794", "name": "some-name", "reconcileID": "24d89c31-3468-4ac8-a9eb-d8c45269681b", "user": "operator"} 2025-12-22T10:59:01.112Z INFO Password changed, updating user {"controller": "pxc-controller", "controllerGroup": "pxc.percona.com", "controllerKind": "PerconaXtraDBCluster", "PerconaXtraDBCluster": {"name":"some-name","namespace":"users-31794"}, "namespace": "users-31794", "name": "some-name", "reconcileID": "24d89c31-3468-4ac8-a9eb-d8c45269681b", "user": "monitor"} 2025-12-22T10:59:01.122Z INFO User password updated {"controller": "pxc-controller", "controllerGroup": "pxc.percona.com", "controllerKind": "PerconaXtraDBCluster", "PerconaXtraDBCluster": {"name":"some-name","namespace":"users-31794"}, "namespace": "users-31794", "name": "some-name", "reconcileID": "24d89c31-3468-4ac8-a9eb-d8c45269681b", "user": "monitor"} 2025-12-22T10:59:01.143Z INFO MySQL init secret updated {"controller": "pxc-controller", "controllerGroup": "pxc.percona.com", "controllerKind": "PerconaXtraDBCluster", "PerconaXtraDBCluster": {"name":"some-name","namespace":"users-31794"}, "namespace": "users-31794", "name": "some-name", "reconcileID": "24d89c31-3468-4ac8-a9eb-d8c45269681b", "secret": "some-name-mysql-init", "user": "monitor"} 2025-12-22T10:59:01.162Z INFO Proxy user updated {"controller": "pxc-controller", "controllerGroup": "pxc.percona.com", "controllerKind": "PerconaXtraDBCluster", "PerconaXtraDBCluster": {"name":"some-name","namespace":"users-31794"}, "namespace": "users-31794", "name": "some-name", "reconcileID": "24d89c31-3468-4ac8-a9eb-d8c45269681b", "user": "monitor"} 2025-12-22T10:59:01.185Z INFO Internal secrets updated {"controller": "pxc-controller", "controllerGroup": "pxc.percona.com", "controllerKind": "PerconaXtraDBCluster", "PerconaXtraDBCluster": {"name":"some-name","namespace":"users-31794"}, "namespace": "users-31794", "name": "some-name", "reconcileID": "24d89c31-3468-4ac8-a9eb-d8c45269681b", "user": "monitor"} 2025-12-22T10:59:01.185Z INFO Password changed, updating user {"controller": "pxc-controller", "controllerGroup": "pxc.percona.com", "controllerKind": "PerconaXtraDBCluster", "PerconaXtraDBCluster": {"name":"some-name","namespace":"users-31794"}, "namespace": "users-31794", "name": "some-name", "reconcileID": "24d89c31-3468-4ac8-a9eb-d8c45269681b", "user": "xtrabackup"} 2025-12-22T10:59:01.197Z INFO User password updated {"controller": "pxc-controller", "controllerGroup": "pxc.percona.com", "controllerKind": "PerconaXtraDBCluster", "PerconaXtraDBCluster": {"name":"some-name","namespace":"users-31794"}, "namespace": "users-31794", "name": "some-name", "reconcileID": "24d89c31-3468-4ac8-a9eb-d8c45269681b", "user": "xtrabackup"} 2025-12-22T10:59:01.220Z INFO MySQL init secret updated {"controller": "pxc-controller", "controllerGroup": "pxc.percona.com", "controllerKind": "PerconaXtraDBCluster", "PerconaXtraDBCluster": {"name":"some-name","namespace":"users-31794"}, "namespace": "users-31794", "name": "some-name", "reconcileID": "24d89c31-3468-4ac8-a9eb-d8c45269681b", "secret": "some-name-mysql-init", "user": "xtrabackup"} 2025-12-22T10:59:01.257Z INFO Internal secrets updated {"controller": "pxc-controller", "controllerGroup": "pxc.percona.com", "controllerKind": "PerconaXtraDBCluster", "PerconaXtraDBCluster": {"name":"some-name","namespace":"users-31794"}, "namespace": "users-31794", "name": "some-name", "reconcileID": "24d89c31-3468-4ac8-a9eb-d8c45269681b", "user": "xtrabackup"} 2025-12-22T10:59:01.257Z INFO Password changed, updating user {"controller": "pxc-controller", "controllerGroup": "pxc.percona.com", "controllerKind": "PerconaXtraDBCluster", "PerconaXtraDBCluster": {"name":"some-name","namespace":"users-31794"}, "namespace": "users-31794", "name": "some-name", "reconcileID": "24d89c31-3468-4ac8-a9eb-d8c45269681b", "user": "replication"} 2025-12-22T10:59:01.268Z INFO User password updated {"controller": "pxc-controller", "controllerGroup": "pxc.percona.com", "controllerKind": "PerconaXtraDBCluster", "PerconaXtraDBCluster": {"name":"some-name","namespace":"users-31794"}, "namespace": "users-31794", "name": "some-name", "reconcileID": "24d89c31-3468-4ac8-a9eb-d8c45269681b", "user": "replication"} 2025-12-22T10:59:01.286Z INFO MySQL init secret updated {"controller": "pxc-controller", "controllerGroup": "pxc.percona.com", "controllerKind": "PerconaXtraDBCluster", "PerconaXtraDBCluster": {"name":"some-name","namespace":"users-31794"}, "namespace": "users-31794", "name": "some-name", "reconcileID": "24d89c31-3468-4ac8-a9eb-d8c45269681b", "secret": "some-name-mysql-init", "user": "replication"} 2025-12-22T10:59:01.309Z INFO Internal secrets updated {"controller": "pxc-controller", "controllerGroup": "pxc.percona.com", "controllerKind": "PerconaXtraDBCluster", "PerconaXtraDBCluster": {"name":"some-name","namespace":"users-31794"}, "namespace": "users-31794", "name": "some-name", "reconcileID": "24d89c31-3468-4ac8-a9eb-d8c45269681b", "user": "replication"} 2025-12-22T10:59:01.309Z INFO Password changed, updating user {"controller": "pxc-controller", "controllerGroup": "pxc.percona.com", "controllerKind": "PerconaXtraDBCluster", "PerconaXtraDBCluster": {"name":"some-name","namespace":"users-31794"}, "namespace": "users-31794", "name": "some-name", "reconcileID": "24d89c31-3468-4ac8-a9eb-d8c45269681b", "user": "proxyadmin"} 2025-12-22T10:59:01.329Z INFO Proxy user updated {"controller": "pxc-controller", "controllerGroup": "pxc.percona.com", "controllerKind": "PerconaXtraDBCluster", "PerconaXtraDBCluster": {"name":"some-name","namespace":"users-31794"}, "namespace": "users-31794", "name": "some-name", "reconcileID": "24d89c31-3468-4ac8-a9eb-d8c45269681b", "user": "proxyadmin"} 2025-12-22T10:59:01.350Z INFO Internal secrets updated {"controller": "pxc-controller", "controllerGroup": "pxc.percona.com", "controllerKind": "PerconaXtraDBCluster", "PerconaXtraDBCluster": {"name":"some-name","namespace":"users-31794"}, "namespace": "users-31794", "name": "some-name", "reconcileID": "24d89c31-3468-4ac8-a9eb-d8c45269681b", "user": "proxyadmin"} 2025-12-22T10:59:01.350Z INFO Proxy pods will be restarted {"controller": "pxc-controller", "controllerGroup": "pxc.percona.com", "controllerKind": "PerconaXtraDBCluster", "PerconaXtraDBCluster": {"name":"some-name","namespace":"users-31794"}, "namespace": "users-31794", "name": "some-name", "reconcileID": "24d89c31-3468-4ac8-a9eb-d8c45269681b", "last-applied-secret": "0f7b5156c360408f5acb1ef5cbc55ffef4962ad03c3d2d51651607e1d044ca1e"} 2025-12-22T10:59:01.350Z INFO PXC pods will be restarted {"controller": "pxc-controller", "controllerGroup": "pxc.percona.com", "controllerKind": "PerconaXtraDBCluster", "PerconaXtraDBCluster": {"name":"some-name","namespace":"users-31794"}, "namespace": "users-31794", "name": "some-name", "reconcileID": "24d89c31-3468-4ac8-a9eb-d8c45269681b", "last-applied-secret": "0f7b5156c360408f5acb1ef5cbc55ffef4962ad03c3d2d51651607e1d044ca1e"} 2025-12-22T10:59:01.353Z DEBUG Updating object {"controller": "pxc-controller", "controllerGroup": "pxc.percona.com", "controllerKind": "PerconaXtraDBCluster", "PerconaXtraDBCluster": {"name":"some-name","namespace":"users-31794"}, "namespace": "users-31794", "name": "some-name", "reconcileID": "24d89c31-3468-4ac8-a9eb-d8c45269681b", "object": "some-name-pxc", "kind": "&TypeMeta{Kind:StatefulSet,APIVersion:apps/v1,}", "hashChanged": true, "metaChanged": true} 2025-12-22T10:59:01.398Z DEBUG Updating object {"controller": "pxc-controller", "controllerGroup": "pxc.percona.com", "controllerKind": "PerconaXtraDBCluster", "PerconaXtraDBCluster": {"name":"some-name","namespace":"users-31794"}, "namespace": "users-31794", "name": "some-name", "reconcileID": "24d89c31-3468-4ac8-a9eb-d8c45269681b", "object": "some-name-proxysql", "kind": "&TypeMeta{Kind:StatefulSet,APIVersion:apps/v1,}", "hashChanged": true, "metaChanged": true} 2025-12-22T10:59:03.103Z ERROR sync users {"controller": "pxc-controller", "controllerGroup": "pxc.percona.com", "controllerKind": "PerconaXtraDBCluster", "PerconaXtraDBCluster": {"name":"some-name","namespace":"users-31794"}, "namespace": "users-31794", "name": "some-name", "reconcileID": "24d89c31-3468-4ac8-a9eb-d8c45269681b", "error": "exec syncusers: failed to execute command in pod: command terminated with exit code 1 / / ERROR 1045 (28000): ProxySQL Error: Access denied for user 'proxyadmin'@'127.0.0.1' (using password: YES)\nERROR (line:339) : ProxySQL connection check failed. \n-- Could not connect to ProxySQL at localhost:6032 \n-- Please check the ProxySQL connection parameters and status.\nERROR 1045 (28000): ProxySQL Error: Access denied for user 'proxyadmin'@'127.0.0.1' (using password: YES)\nERROR (line:339) : ProxySQL connection check failed. \n-- Could not connect to ProxySQL at localhost:6032 \n-- Please check the ProxySQL connection parameters and status.\nERROR 1045 (28000): ProxySQL Error: Access denied for user 'proxyadmin'@'127.0.0.1' (using password: YES)\nERROR (line:339) : ProxySQL connection check failed. \n-- Could not connect to ProxySQL at localhost:6032 \n-- Please check the ProxySQL connection parameters and status.\nERROR 1045 (28000): ProxySQL Error: Access denied for user 'proxyadmin'@'127.0.0.1' (using password: YES)\nERROR (line:339) : ProxySQL connection check failed. \n-- Could not connect to ProxySQL at localhost:6032 \n-- Please check the ProxySQL connection parameters and status.\nERROR 1045 (28000): ProxySQL Error: Access denied for user 'proxyadmin'@'127.0.0.1' (using password: YES)\nERROR (line:339) : ProxySQL connection check failed. \n-- Could not connect to ProxySQL at localhost:6032 \n-- Please check the ProxySQL connection parameters and status.\n", "errorVerbose": "exec syncusers: failed to execute command in pod: command terminated with exit code 1 / / ERROR 1045 (28000): ProxySQL Error: Access denied for user 'proxyadmin'@'127.0.0.1' (using password: YES)\nERROR (line:339) : ProxySQL connection check failed. \n-- Could not connect to ProxySQL at localhost:6032 \n-- Please check the ProxySQL connection parameters and status.\nERROR 1045 (28000): ProxySQL Error: Access denied for user 'proxyadmin'@'127.0.0.1' (using password: YES)\nERROR (line:339) : ProxySQL connection check failed. \n-- Could not connect to ProxySQL at localhost:6032 \n-- Please check the ProxySQL connection parameters and status.\nERROR 1045 (28000): ProxySQL Error: Access denied for user 'proxyadmin'@'127.0.0.1' (using password: YES)\nERROR (line:339) : ProxySQL connection check failed. \n-- Could not connect to ProxySQL at localhost:6032 \n-- Please check the ProxySQL connection parameters and status.\nERROR 1045 (28000): ProxySQL Error: Access denied for user 'proxyadmin'@'127.0.0.1' (using password: YES)\nERROR (line:339) : ProxySQL connection check failed. \n-- Could not connect to ProxySQL at localhost:6032 \n-- Please check the ProxySQL connection parameters and status.\nERROR 1045 (28000): ProxySQL Error: Access denied for user 'proxyadmin'@'127.0.0.1' (using password: YES)\nERROR (line:339) : ProxySQL connection check failed. \n-- Could not connect to ProxySQL at localhost:6032 \n-- Please check the ProxySQL connection parameters and status.\n\ngithub.com/percona/percona-xtradb-cluster-operator/pkg/controller/pxc.(*ReconcilePerconaXtraDBCluster).syncPXCUsersWithProxySQL\n\t/go/src/github.com/percona/percona-xtradb-cluster-operator/pkg/controller/pxc/users.go:975\ngithub.com/percona/percona-xtradb-cluster-operator/pkg/controller/pxc.(*ReconcilePerconaXtraDBCluster).resyncPXCUsersWithProxySQL.func1\n\t/go/src/github.com/percona/percona-xtradb-cluster-operator/pkg/controller/pxc/controller.go:855\nruntime.goexit\n\t/usr/local/go/src/runtime/asm_amd64.s:1693"} 2025-12-22T11:00:36.519Z INFO reconcile replication error {"controller": "pxc-controller", "controllerGroup": "pxc.percona.com", "controllerKind": "PerconaXtraDBCluster", "PerconaXtraDBCluster": {"name":"some-name","namespace":"users-31794"}, "namespace": "users-31794", "name": "some-name", "reconcileID": "113d69b4-27f0-47b4-be01-b393e9fa589a", "err": "failed to connect to pod some-name-pxc-0: dial tcp: lookup some-name-pxc-0.some-name-pxc.users-31794 on 34.118.224.10:53: no such host"} 2025-12-22T11:00:52.453Z INFO Unable to find primary pod for replication. No pod with name or ip like this {"controller": "pxc-controller", "controllerGroup": "pxc.percona.com", "controllerKind": "PerconaXtraDBCluster", "PerconaXtraDBCluster": {"name":"some-name","namespace":"users-31794"}, "namespace": "users-31794", "name": "some-name", "reconcileID": "0c29e5d2-3083-49d0-b726-43a9d2b21036", "primary name": "some-name-pxc-0.some-name-pxc.users-31794.svc.cluster.local"} 2025-12-22T11:01:02.725Z INFO Unable to find primary pod for replication. No pod with name or ip like this {"controller": "pxc-controller", "controllerGroup": "pxc.percona.com", "controllerKind": "PerconaXtraDBCluster", "PerconaXtraDBCluster": {"name":"some-name","namespace":"users-31794"}, "namespace": "users-31794", "name": "some-name", "reconcileID": "d4e2250e-9f2e-4431-aacb-0d29a99c122c", "primary name": "some-name-pxc-0.some-name-pxc.users-31794.svc.cluster.local"} 2025-12-22T11:01:25.897Z DEBUG PXC users synced with ProxySQL {"controller": "pxc-controller", "controllerGroup": "pxc.percona.com", "controllerKind": "PerconaXtraDBCluster", "PerconaXtraDBCluster": {"name":"some-name","namespace":"users-31794"}, "namespace": "users-31794", "name": "some-name", "reconcileID": "2d30fd25-2607-4a35-b0be-5dccb71ce328"} 2025-12-22T11:01:30.263Z INFO Password changed, updating user {"controller": "pxc-controller", "controllerGroup": "pxc.percona.com", "controllerKind": "PerconaXtraDBCluster", "PerconaXtraDBCluster": {"name":"some-name","namespace":"users-31794"}, "namespace": "users-31794", "name": "some-name", "reconcileID": "15523879-c2ca-4c99-be1f-2f1167e13d28", "user": "operator"} 2025-12-22T11:01:30.276Z INFO User password updated {"controller": "pxc-controller", "controllerGroup": "pxc.percona.com", "controllerKind": "PerconaXtraDBCluster", "PerconaXtraDBCluster": {"name":"some-name","namespace":"users-31794"}, "namespace": "users-31794", "name": "some-name", "reconcileID": "15523879-c2ca-4c99-be1f-2f1167e13d28", "user": "operator"} 2025-12-22T11:01:30.298Z INFO MySQL init secret updated {"controller": "pxc-controller", "controllerGroup": "pxc.percona.com", "controllerKind": "PerconaXtraDBCluster", "PerconaXtraDBCluster": {"name":"some-name","namespace":"users-31794"}, "namespace": "users-31794", "name": "some-name", "reconcileID": "15523879-c2ca-4c99-be1f-2f1167e13d28", "secret": "some-name-mysql-init", "user": "operator"} 2025-12-22T11:01:30.324Z INFO Internal secrets updated {"controller": "pxc-controller", "controllerGroup": "pxc.percona.com", "controllerKind": "PerconaXtraDBCluster", "PerconaXtraDBCluster": {"name":"some-name","namespace":"users-31794"}, "namespace": "users-31794", "name": "some-name", "reconcileID": "15523879-c2ca-4c99-be1f-2f1167e13d28", "user": "operator"} 2025-12-22T11:01:30.324Z INFO Proxy pods will be restarted {"controller": "pxc-controller", "controllerGroup": "pxc.percona.com", "controllerKind": "PerconaXtraDBCluster", "PerconaXtraDBCluster": {"name":"some-name","namespace":"users-31794"}, "namespace": "users-31794", "name": "some-name", "reconcileID": "15523879-c2ca-4c99-be1f-2f1167e13d28", "last-applied-secret": "f237cddd2e9631e6195f4db3a501711c93fa24e9ebb417951f5e533acce52ca5"} 2025-12-22T11:01:30.328Z DEBUG Updating object {"controller": "pxc-controller", "controllerGroup": "pxc.percona.com", "controllerKind": "PerconaXtraDBCluster", "PerconaXtraDBCluster": {"name":"some-name","namespace":"users-31794"}, "namespace": "users-31794", "name": "some-name", "reconcileID": "15523879-c2ca-4c99-be1f-2f1167e13d28", "object": "some-name-proxysql", "kind": "&TypeMeta{Kind:StatefulSet,APIVersion:apps/v1,}", "hashChanged": true, "metaChanged": true} 2025-12-22T11:01:31.124Z ERROR sync users {"controller": "pxc-controller", "controllerGroup": "pxc.percona.com", "controllerKind": "PerconaXtraDBCluster", "PerconaXtraDBCluster": {"name":"some-name","namespace":"users-31794"}, "namespace": "users-31794", "name": "some-name", "reconcileID": "7ffea48b-2095-4539-9376-e0fa091ae32d", "error": "exec syncusers: failed to execute command in pod: pods \"some-name-proxysql-1\" not found / \nSyncing user accounts from PXC(some-name-pxc-0.some-name-pxc.users-31794.svc.cluster.local:3306) to ProxySQL\n / ERROR 1045 (28000): Access denied for user 'operator'@'some-name-proxysql-1.some-name-proxysql-unready.users-31794.svc.' (using password: YES)\nERROR (line:587) : Could not connect to the server. \n-- Please check the server connection parameters and status.\n", "errorVerbose": "exec syncusers: failed to execute command in pod: pods \"some-name-proxysql-1\" not found / \nSyncing user accounts from PXC(some-name-pxc-0.some-name-pxc.users-31794.svc.cluster.local:3306) to ProxySQL\n / ERROR 1045 (28000): Access denied for user 'operator'@'some-name-proxysql-1.some-name-proxysql-unready.users-31794.svc.' (using password: YES)\nERROR (line:587) : Could not connect to the server. \n-- Please check the server connection parameters and status.\n\ngithub.com/percona/percona-xtradb-cluster-operator/pkg/controller/pxc.(*ReconcilePerconaXtraDBCluster).syncPXCUsersWithProxySQL\n\t/go/src/github.com/percona/percona-xtradb-cluster-operator/pkg/controller/pxc/users.go:975\ngithub.com/percona/percona-xtradb-cluster-operator/pkg/controller/pxc.(*ReconcilePerconaXtraDBCluster).resyncPXCUsersWithProxySQL.func1\n\t/go/src/github.com/percona/percona-xtradb-cluster-operator/pkg/controller/pxc/controller.go:855\nruntime.goexit\n\t/usr/local/go/src/runtime/asm_amd64.s:1693"} 2025-12-22T11:02:00.065Z DEBUG PXC users synced with ProxySQL {"controller": "pxc-controller", "controllerGroup": "pxc.percona.com", "controllerKind": "PerconaXtraDBCluster", "PerconaXtraDBCluster": {"name":"some-name","namespace":"users-31794"}, "namespace": "users-31794", "name": "some-name", "reconcileID": "55eccbde-02ac-4454-a6d4-ef13679bdaff"} 2025-12-22T11:02:04.582Z DEBUG PXC users synced with ProxySQL {"controller": "pxc-controller", "controllerGroup": "pxc.percona.com", "controllerKind": "PerconaXtraDBCluster", "PerconaXtraDBCluster": {"name":"some-name","namespace":"users-31794"}, "namespace": "users-31794", "name": "some-name", "reconcileID": "50854e57-2c3f-4d8f-96b2-eea9f0287dd4"} 2025-12-22T11:02:09.388Z DEBUG PXC users synced with ProxySQL {"controller": "pxc-controller", "controllerGroup": "pxc.percona.com", "controllerKind": "PerconaXtraDBCluster", "PerconaXtraDBCluster": {"name":"some-name","namespace":"users-31794"}, "namespace": "users-31794", "name": "some-name", "reconcileID": "f9dede37-a271-44fa-94ed-42c8c945637e"} 2025-12-22T11:02:14.839Z DEBUG PXC users synced with ProxySQL {"controller": "pxc-controller", "controllerGroup": "pxc.percona.com", "controllerKind": "PerconaXtraDBCluster", "PerconaXtraDBCluster": {"name":"some-name","namespace":"users-31794"}, "namespace": "users-31794", "name": "some-name", "reconcileID": "ef232e61-eb98-413a-bd49-6746c126d1c1"} 2025-12-22T11:02:20.331Z DEBUG PXC users synced with ProxySQL {"controller": "pxc-controller", "controllerGroup": "pxc.percona.com", "controllerKind": "PerconaXtraDBCluster", "PerconaXtraDBCluster": {"name":"some-name","namespace":"users-31794"}, "namespace": "users-31794", "name": "some-name", "reconcileID": "8025b709-21ac-4d28-96db-8885d1fe747d"} 2025-12-22T11:02:25.536Z DEBUG PXC users synced with ProxySQL {"controller": "pxc-controller", "controllerGroup": "pxc.percona.com", "controllerKind": "PerconaXtraDBCluster", "PerconaXtraDBCluster": {"name":"some-name","namespace":"users-31794"}, "namespace": "users-31794", "name": "some-name", "reconcileID": "9ad366e4-71fb-4330-8e07-c7b730697499"} 2025-12-22T11:02:31.304Z DEBUG PXC users synced with ProxySQL {"controller": "pxc-controller", "controllerGroup": "pxc.percona.com", "controllerKind": "PerconaXtraDBCluster", "PerconaXtraDBCluster": {"name":"some-name","namespace":"users-31794"}, "namespace": "users-31794", "name": "some-name", "reconcileID": "d2ba5e76-96a3-49d2-9314-bbf907a427ab"} 2025-12-22T11:02:35.751Z DEBUG PXC users synced with ProxySQL {"controller": "pxc-controller", "controllerGroup": "pxc.percona.com", "controllerKind": "PerconaXtraDBCluster", "PerconaXtraDBCluster": {"name":"some-name","namespace":"users-31794"}, "namespace": "users-31794", "name": "some-name", "reconcileID": "f260bd80-4fd1-46a9-bfc1-a65caa08ecbb"} 2025-12-22T11:02:41.060Z DEBUG PXC users synced with ProxySQL {"controller": "pxc-controller", "controllerGroup": "pxc.percona.com", "controllerKind": "PerconaXtraDBCluster", "PerconaXtraDBCluster": {"name":"some-name","namespace":"users-31794"}, "namespace": "users-31794", "name": "some-name", "reconcileID": "2ef1b621-7649-4556-850b-79f0a84647ef"} 2025-12-22T11:02:46.459Z DEBUG PXC users synced with ProxySQL {"controller": "pxc-controller", "controllerGroup": "pxc.percona.com", "controllerKind": "PerconaXtraDBCluster", "PerconaXtraDBCluster": {"name":"some-name","namespace":"users-31794"}, "namespace": "users-31794", "name": "some-name", "reconcileID": "e65011b4-8beb-4f08-8623-10f5c2c1c740"} 2025-12-22T11:02:51.284Z DEBUG PXC users synced with ProxySQL {"controller": "pxc-controller", "controllerGroup": "pxc.percona.com", "controllerKind": "PerconaXtraDBCluster", "PerconaXtraDBCluster": {"name":"some-name","namespace":"users-31794"}, "namespace": "users-31794", "name": "some-name", "reconcileID": "b656516b-bddc-468a-abf6-98d66aaa8121"} 2025-12-22T11:02:56.734Z DEBUG PXC users synced with ProxySQL {"controller": "pxc-controller", "controllerGroup": "pxc.percona.com", "controllerKind": "PerconaXtraDBCluster", "PerconaXtraDBCluster": {"name":"some-name","namespace":"users-31794"}, "namespace": "users-31794", "name": "some-name", "reconcileID": "5ed6923d-0226-4618-8fe7-3985c58506ac"} 2025-12-22T11:03:02.264Z DEBUG PXC users synced with ProxySQL {"controller": "pxc-controller", "controllerGroup": "pxc.percona.com", "controllerKind": "PerconaXtraDBCluster", "PerconaXtraDBCluster": {"name":"some-name","namespace":"users-31794"}, "namespace": "users-31794", "name": "some-name", "reconcileID": "a3c843bb-4b20-4460-851e-b26b70614aa7"} 2025-12-22T11:03:07.375Z DEBUG PXC users synced with ProxySQL {"controller": "pxc-controller", "controllerGroup": "pxc.percona.com", "controllerKind": "PerconaXtraDBCluster", "PerconaXtraDBCluster": {"name":"some-name","namespace":"users-31794"}, "namespace": "users-31794", "name": "some-name", "reconcileID": "dc8eb671-35d9-4abd-bf3a-d703a54892e8"} 2025-12-22T11:03:12.654Z DEBUG PXC users synced with ProxySQL {"controller": "pxc-controller", "controllerGroup": "pxc.percona.com", "controllerKind": "PerconaXtraDBCluster", "PerconaXtraDBCluster": {"name":"some-name","namespace":"users-31794"}, "namespace": "users-31794", "name": "some-name", "reconcileID": "e200e868-e10e-4e7c-843f-d18b32c6977c"} 2025-12-22T11:03:18.161Z DEBUG PXC users synced with ProxySQL {"controller": "pxc-controller", "controllerGroup": "pxc.percona.com", "controllerKind": "PerconaXtraDBCluster", "PerconaXtraDBCluster": {"name":"some-name","namespace":"users-31794"}, "namespace": "users-31794", "name": "some-name", "reconcileID": "84f6f425-28e9-477f-943f-5b260c3533ba"} 2025-12-22T11:03:23.037Z DEBUG PXC users synced with ProxySQL {"controller": "pxc-controller", "controllerGroup": "pxc.percona.com", "controllerKind": "PerconaXtraDBCluster", "PerconaXtraDBCluster": {"name":"some-name","namespace":"users-31794"}, "namespace": "users-31794", "name": "some-name", "reconcileID": "ebb35db9-8a65-497a-b406-46d9f0c5e167"} 2025-12-22T11:03:28.358Z DEBUG PXC users synced with ProxySQL {"controller": "pxc-controller", "controllerGroup": "pxc.percona.com", "controllerKind": "PerconaXtraDBCluster", "PerconaXtraDBCluster": {"name":"some-name","namespace":"users-31794"}, "namespace": "users-31794", "name": "some-name", "reconcileID": "70614406-97cb-4871-80c7-867ae1423cf9"} 2025-12-22T11:03:33.872Z DEBUG PXC users synced with ProxySQL {"controller": "pxc-controller", "controllerGroup": "pxc.percona.com", "controllerKind": "PerconaXtraDBCluster", "PerconaXtraDBCluster": {"name":"some-name","namespace":"users-31794"}, "namespace": "users-31794", "name": "some-name", "reconcileID": "b31714ee-6343-4a33-881b-ecfae7bdb77d"} 2025-12-22T11:03:39.276Z DEBUG PXC users synced with ProxySQL {"controller": "pxc-controller", "controllerGroup": "pxc.percona.com", "controllerKind": "PerconaXtraDBCluster", "PerconaXtraDBCluster": {"name":"some-name","namespace":"users-31794"}, "namespace": "users-31794", "name": "some-name", "reconcileID": "32585b86-222d-4e55-9de7-803a637b6b76"} 2025-12-22T11:03:44.450Z DEBUG PXC users synced with ProxySQL {"controller": "pxc-controller", "controllerGroup": "pxc.percona.com", "controllerKind": "PerconaXtraDBCluster", "PerconaXtraDBCluster": {"name":"some-name","namespace":"users-31794"}, "namespace": "users-31794", "name": "some-name", "reconcileID": "1f7a2d5b-8712-45ee-835e-4f94adca1010"} 2025-12-22T11:03:49.856Z DEBUG PXC users synced with ProxySQL {"controller": "pxc-controller", "controllerGroup": "pxc.percona.com", "controllerKind": "PerconaXtraDBCluster", "PerconaXtraDBCluster": {"name":"some-name","namespace":"users-31794"}, "namespace": "users-31794", "name": "some-name", "reconcileID": "91d0af9f-fc01-43a5-8585-499329600f29"} 2025-12-22T11:03:51.513Z INFO Password changed, updating user {"controller": "pxc-controller", "controllerGroup": "pxc.percona.com", "controllerKind": "PerconaXtraDBCluster", "PerconaXtraDBCluster": {"name":"some-name","namespace":"users-31794"}, "namespace": "users-31794", "name": "some-name", "reconcileID": "026a9a50-d588-4158-adae-fae98ccd397f", "user": "root"} 2025-12-22T11:03:51.529Z INFO User password updated {"controller": "pxc-controller", "controllerGroup": "pxc.percona.com", "controllerKind": "PerconaXtraDBCluster", "PerconaXtraDBCluster": {"name":"some-name","namespace":"users-31794"}, "namespace": "users-31794", "name": "some-name", "reconcileID": "026a9a50-d588-4158-adae-fae98ccd397f", "user": "root"} 2025-12-22T11:03:51.554Z INFO MySQL init secret updated {"controller": "pxc-controller", "controllerGroup": "pxc.percona.com", "controllerKind": "PerconaXtraDBCluster", "PerconaXtraDBCluster": {"name":"some-name","namespace":"users-31794"}, "namespace": "users-31794", "name": "some-name", "reconcileID": "026a9a50-d588-4158-adae-fae98ccd397f", "secret": "some-name-mysql-init", "user": "root"} 2025-12-22T11:03:54.045Z DEBUG PXC users synced with ProxySQL {"controller": "pxc-controller", "controllerGroup": "pxc.percona.com", "controllerKind": "PerconaXtraDBCluster", "PerconaXtraDBCluster": {"name":"some-name","namespace":"users-31794"}, "namespace": "users-31794", "name": "some-name", "reconcileID": "026a9a50-d588-4158-adae-fae98ccd397f"} 2025-12-22T11:03:54.069Z INFO Internal secrets updated {"controller": "pxc-controller", "controllerGroup": "pxc.percona.com", "controllerKind": "PerconaXtraDBCluster", "PerconaXtraDBCluster": {"name":"some-name","namespace":"users-31794"}, "namespace": "users-31794", "name": "some-name", "reconcileID": "026a9a50-d588-4158-adae-fae98ccd397f", "user": "root"} 2025-12-22T11:03:54.069Z INFO Password changed, updating user {"controller": "pxc-controller", "controllerGroup": "pxc.percona.com", "controllerKind": "PerconaXtraDBCluster", "PerconaXtraDBCluster": {"name":"some-name","namespace":"users-31794"}, "namespace": "users-31794", "name": "some-name", "reconcileID": "026a9a50-d588-4158-adae-fae98ccd397f", "user": "monitor"} 2025-12-22T11:03:54.080Z INFO User password updated {"controller": "pxc-controller", "controllerGroup": "pxc.percona.com", "controllerKind": "PerconaXtraDBCluster", "PerconaXtraDBCluster": {"name":"some-name","namespace":"users-31794"}, "namespace": "users-31794", "name": "some-name", "reconcileID": "026a9a50-d588-4158-adae-fae98ccd397f", "user": "monitor"} 2025-12-22T11:03:54.121Z INFO MySQL init secret updated {"controller": "pxc-controller", "controllerGroup": "pxc.percona.com", "controllerKind": "PerconaXtraDBCluster", "PerconaXtraDBCluster": {"name":"some-name","namespace":"users-31794"}, "namespace": "users-31794", "name": "some-name", "reconcileID": "026a9a50-d588-4158-adae-fae98ccd397f", "secret": "some-name-mysql-init", "user": "monitor"} 2025-12-22T11:03:54.139Z INFO Proxy user updated {"controller": "pxc-controller", "controllerGroup": "pxc.percona.com", "controllerKind": "PerconaXtraDBCluster", "PerconaXtraDBCluster": {"name":"some-name","namespace":"users-31794"}, "namespace": "users-31794", "name": "some-name", "reconcileID": "026a9a50-d588-4158-adae-fae98ccd397f", "user": "monitor"} 2025-12-22T11:03:54.164Z INFO Internal secrets updated {"controller": "pxc-controller", "controllerGroup": "pxc.percona.com", "controllerKind": "PerconaXtraDBCluster", "PerconaXtraDBCluster": {"name":"some-name","namespace":"users-31794"}, "namespace": "users-31794", "name": "some-name", "reconcileID": "026a9a50-d588-4158-adae-fae98ccd397f", "user": "monitor"} 2025-12-22T11:03:54.164Z INFO Password changed, updating user {"controller": "pxc-controller", "controllerGroup": "pxc.percona.com", "controllerKind": "PerconaXtraDBCluster", "PerconaXtraDBCluster": {"name":"some-name","namespace":"users-31794"}, "namespace": "users-31794", "name": "some-name", "reconcileID": "026a9a50-d588-4158-adae-fae98ccd397f", "user": "xtrabackup"} 2025-12-22T11:03:54.175Z INFO User password updated {"controller": "pxc-controller", "controllerGroup": "pxc.percona.com", "controllerKind": "PerconaXtraDBCluster", "PerconaXtraDBCluster": {"name":"some-name","namespace":"users-31794"}, "namespace": "users-31794", "name": "some-name", "reconcileID": "026a9a50-d588-4158-adae-fae98ccd397f", "user": "xtrabackup"} 2025-12-22T11:03:54.193Z INFO MySQL init secret updated {"controller": "pxc-controller", "controllerGroup": "pxc.percona.com", "controllerKind": "PerconaXtraDBCluster", "PerconaXtraDBCluster": {"name":"some-name","namespace":"users-31794"}, "namespace": "users-31794", "name": "some-name", "reconcileID": "026a9a50-d588-4158-adae-fae98ccd397f", "secret": "some-name-mysql-init", "user": "xtrabackup"} 2025-12-22T11:03:54.219Z INFO Internal secrets updated {"controller": "pxc-controller", "controllerGroup": "pxc.percona.com", "controllerKind": "PerconaXtraDBCluster", "PerconaXtraDBCluster": {"name":"some-name","namespace":"users-31794"}, "namespace": "users-31794", "name": "some-name", "reconcileID": "026a9a50-d588-4158-adae-fae98ccd397f", "user": "xtrabackup"} 2025-12-22T11:03:54.219Z INFO Password changed, updating user {"controller": "pxc-controller", "controllerGroup": "pxc.percona.com", "controllerKind": "PerconaXtraDBCluster", "PerconaXtraDBCluster": {"name":"some-name","namespace":"users-31794"}, "namespace": "users-31794", "name": "some-name", "reconcileID": "026a9a50-d588-4158-adae-fae98ccd397f", "user": "proxyadmin"} 2025-12-22T11:03:54.237Z INFO Proxy user updated {"controller": "pxc-controller", "controllerGroup": "pxc.percona.com", "controllerKind": "PerconaXtraDBCluster", "PerconaXtraDBCluster": {"name":"some-name","namespace":"users-31794"}, "namespace": "users-31794", "name": "some-name", "reconcileID": "026a9a50-d588-4158-adae-fae98ccd397f", "user": "proxyadmin"} 2025-12-22T11:03:54.269Z INFO Internal secrets updated {"controller": "pxc-controller", "controllerGroup": "pxc.percona.com", "controllerKind": "PerconaXtraDBCluster", "PerconaXtraDBCluster": {"name":"some-name","namespace":"users-31794"}, "namespace": "users-31794", "name": "some-name", "reconcileID": "026a9a50-d588-4158-adae-fae98ccd397f", "user": "proxyadmin"} 2025-12-22T11:03:54.269Z INFO Proxy pods will be restarted {"controller": "pxc-controller", "controllerGroup": "pxc.percona.com", "controllerKind": "PerconaXtraDBCluster", "PerconaXtraDBCluster": {"name":"some-name","namespace":"users-31794"}, "namespace": "users-31794", "name": "some-name", "reconcileID": "026a9a50-d588-4158-adae-fae98ccd397f", "last-applied-secret": "49133365c5f7ccbfb86edb4285bd8306ef9f856966f57999626a6ad34decd2ec"} 2025-12-22T11:03:54.269Z INFO PXC pods will be restarted {"controller": "pxc-controller", "controllerGroup": "pxc.percona.com", "controllerKind": "PerconaXtraDBCluster", "PerconaXtraDBCluster": {"name":"some-name","namespace":"users-31794"}, "namespace": "users-31794", "name": "some-name", "reconcileID": "026a9a50-d588-4158-adae-fae98ccd397f", "last-applied-secret": "49133365c5f7ccbfb86edb4285bd8306ef9f856966f57999626a6ad34decd2ec"} 2025-12-22T11:03:54.274Z DEBUG Updating object {"controller": "pxc-controller", "controllerGroup": "pxc.percona.com", "controllerKind": "PerconaXtraDBCluster", "PerconaXtraDBCluster": {"name":"some-name","namespace":"users-31794"}, "namespace": "users-31794", "name": "some-name", "reconcileID": "026a9a50-d588-4158-adae-fae98ccd397f", "object": "some-name-pxc", "kind": "&TypeMeta{Kind:StatefulSet,APIVersion:apps/v1,}", "hashChanged": true, "metaChanged": true} 2025-12-22T11:03:54.329Z DEBUG Updating object {"controller": "pxc-controller", "controllerGroup": "pxc.percona.com", "controllerKind": "PerconaXtraDBCluster", "PerconaXtraDBCluster": {"name":"some-name","namespace":"users-31794"}, "namespace": "users-31794", "name": "some-name", "reconcileID": "026a9a50-d588-4158-adae-fae98ccd397f", "object": "some-name-proxysql", "kind": "&TypeMeta{Kind:StatefulSet,APIVersion:apps/v1,}", "hashChanged": true, "metaChanged": true} 2025-12-22T11:03:56.183Z ERROR sync users {"controller": "pxc-controller", "controllerGroup": "pxc.percona.com", "controllerKind": "PerconaXtraDBCluster", "PerconaXtraDBCluster": {"name":"some-name","namespace":"users-31794"}, "namespace": "users-31794", "name": "some-name", "reconcileID": "026a9a50-d588-4158-adae-fae98ccd397f", "error": "exec syncusers: failed to execute command in pod: command terminated with exit code 1 / / ERROR 1045 (28000): ProxySQL Error: Access denied for user 'proxyadmin'@'127.0.0.1' (using password: YES)\nERROR (line:339) : ProxySQL connection check failed. \n-- Could not connect to ProxySQL at localhost:6032 \n-- Please check the ProxySQL connection parameters and status.\nERROR 1045 (28000): ProxySQL Error: Access denied for user 'proxyadmin'@'127.0.0.1' (using password: YES)\nERROR (line:339) : ProxySQL connection check failed. \n-- Could not connect to ProxySQL at localhost:6032 \n-- Please check the ProxySQL connection parameters and status.\nERROR 1045 (28000): ProxySQL Error: Access denied for user 'proxyadmin'@'127.0.0.1' (using password: YES)\nERROR (line:339) : ProxySQL connection check failed. \n-- Could not connect to ProxySQL at localhost:6032 \n-- Please check the ProxySQL connection parameters and status.\nERROR 1045 (28000): ProxySQL Error: Access denied for user 'proxyadmin'@'127.0.0.1' (using password: YES)\nERROR (line:339) : ProxySQL connection check failed. \n-- Could not connect to ProxySQL at localhost:6032 \n-- Please check the ProxySQL connection parameters and status.\nERROR 1045 (28000): ProxySQL Error: Access denied for user 'proxyadmin'@'127.0.0.1' (using password: YES)\nERROR (line:339) : ProxySQL connection check failed. \n-- Could not connect to ProxySQL at localhost:6032 \n-- Please check the ProxySQL connection parameters and status.\n", "errorVerbose": "exec syncusers: failed to execute command in pod: command terminated with exit code 1 / / ERROR 1045 (28000): ProxySQL Error: Access denied for user 'proxyadmin'@'127.0.0.1' (using password: YES)\nERROR (line:339) : ProxySQL connection check failed. \n-- Could not connect to ProxySQL at localhost:6032 \n-- Please check the ProxySQL connection parameters and status.\nERROR 1045 (28000): ProxySQL Error: Access denied for user 'proxyadmin'@'127.0.0.1' (using password: YES)\nERROR (line:339) : ProxySQL connection check failed. \n-- Could not connect to ProxySQL at localhost:6032 \n-- Please check the ProxySQL connection parameters and status.\nERROR 1045 (28000): ProxySQL Error: Access denied for user 'proxyadmin'@'127.0.0.1' (using password: YES)\nERROR (line:339) : ProxySQL connection check failed. \n-- Could not connect to ProxySQL at localhost:6032 \n-- Please check the ProxySQL connection parameters and status.\nERROR 1045 (28000): ProxySQL Error: Access denied for user 'proxyadmin'@'127.0.0.1' (using password: YES)\nERROR (line:339) : ProxySQL connection check failed. \n-- Could not connect to ProxySQL at localhost:6032 \n-- Please check the ProxySQL connection parameters and status.\nERROR 1045 (28000): ProxySQL Error: Access denied for user 'proxyadmin'@'127.0.0.1' (using password: YES)\nERROR (line:339) : ProxySQL connection check failed. \n-- Could not connect to ProxySQL at localhost:6032 \n-- Please check the ProxySQL connection parameters and status.\n\ngithub.com/percona/percona-xtradb-cluster-operator/pkg/controller/pxc.(*ReconcilePerconaXtraDBCluster).syncPXCUsersWithProxySQL\n\t/go/src/github.com/percona/percona-xtradb-cluster-operator/pkg/controller/pxc/users.go:975\ngithub.com/percona/percona-xtradb-cluster-operator/pkg/controller/pxc.(*ReconcilePerconaXtraDBCluster).resyncPXCUsersWithProxySQL.func1\n\t/go/src/github.com/percona/percona-xtradb-cluster-operator/pkg/controller/pxc/controller.go:855\nruntime.goexit\n\t/usr/local/go/src/runtime/asm_amd64.s:1693"} 2025-12-22T11:04:56.027Z DEBUG Updating object {"controller": "pxc-controller", "controllerGroup": "pxc.percona.com", "controllerKind": "PerconaXtraDBCluster", "PerconaXtraDBCluster": {"name":"some-name","namespace":"users-31794"}, "namespace": "users-31794", "name": "some-name", "reconcileID": "04300c2e-7840-45eb-a2e8-3d0f3ad9b4b6", "object": "some-name-pxc", "kind": "&TypeMeta{Kind:StatefulSet,APIVersion:apps/v1,}", "hashChanged": true, "metaChanged": true} 2025-12-22T11:04:56.070Z DEBUG Creating object {"controller": "pxc-controller", "controllerGroup": "pxc.percona.com", "controllerKind": "PerconaXtraDBCluster", "PerconaXtraDBCluster": {"name":"some-name","namespace":"users-31794"}, "namespace": "users-31794", "name": "some-name", "reconcileID": "04300c2e-7840-45eb-a2e8-3d0f3ad9b4b6", "object": "some-name-haproxy", "kind": "&TypeMeta{Kind:StatefulSet,APIVersion:apps/v1,}"} 2025-12-22T11:04:56.119Z DEBUG Creating object {"controller": "pxc-controller", "controllerGroup": "pxc.percona.com", "controllerKind": "PerconaXtraDBCluster", "PerconaXtraDBCluster": {"name":"some-name","namespace":"users-31794"}, "namespace": "users-31794", "name": "some-name", "reconcileID": "04300c2e-7840-45eb-a2e8-3d0f3ad9b4b6", "object": "some-name-haproxy", "kind": "&TypeMeta{Kind:StatefulSet,APIVersion:apps/v1,}"} 2025-12-22T11:04:56.182Z DEBUG Creating object {"controller": "pxc-controller", "controllerGroup": "pxc.percona.com", "controllerKind": "PerconaXtraDBCluster", "PerconaXtraDBCluster": {"name":"some-name","namespace":"users-31794"}, "namespace": "users-31794", "name": "some-name", "reconcileID": "04300c2e-7840-45eb-a2e8-3d0f3ad9b4b6", "object": "some-name-haproxy", "kind": "&TypeMeta{Kind:Service,APIVersion:v1,}"} 2025-12-22T11:04:56.253Z DEBUG Creating object {"controller": "pxc-controller", "controllerGroup": "pxc.percona.com", "controllerKind": "PerconaXtraDBCluster", "PerconaXtraDBCluster": {"name":"some-name","namespace":"users-31794"}, "namespace": "users-31794", "name": "some-name", "reconcileID": "04300c2e-7840-45eb-a2e8-3d0f3ad9b4b6", "object": "some-name-haproxy-replicas", "kind": "&TypeMeta{Kind:Service,APIVersion:v1,}"} 2025-12-22T11:04:59.738Z INFO reconcile replication error {"controller": "pxc-controller", "controllerGroup": "pxc.percona.com", "controllerKind": "PerconaXtraDBCluster", "PerconaXtraDBCluster": {"name":"some-name","namespace":"users-31794"}, "namespace": "users-31794", "name": "some-name", "reconcileID": "04300c2e-7840-45eb-a2e8-3d0f3ad9b4b6", "err": "get primary pxc pod: failed to get proxy connection: dial tcp 34.118.237.49:3306: connect: connection refused"} 2025-12-22T11:04:59.870Z DEBUG Creating object {"controller": "pxc-controller", "controllerGroup": "pxc.percona.com", "controllerKind": "PerconaXtraDBCluster", "PerconaXtraDBCluster": {"name":"some-name","namespace":"users-31794"}, "namespace": "users-31794", "name": "some-name", "reconcileID": "c2ab0587-4988-4383-80a9-413065862628", "object": "some-name-haproxy", "kind": "&TypeMeta{Kind:PodDisruptionBudget,APIVersion:policy/v1,}"} 2025-12-22T11:05:02.929Z INFO reconcile replication error {"controller": "pxc-controller", "controllerGroup": "pxc.percona.com", "controllerKind": "PerconaXtraDBCluster", "PerconaXtraDBCluster": {"name":"some-name","namespace":"users-31794"}, "namespace": "users-31794", "name": "some-name", "reconcileID": "c2ab0587-4988-4383-80a9-413065862628", "err": "get primary pxc pod: failed to get proxy connection: dial tcp 34.118.237.49:3306: connect: connection refused"} 2025-12-22T11:07:56.722Z INFO Password changed, updating user {"controller": "pxc-controller", "controllerGroup": "pxc.percona.com", "controllerKind": "PerconaXtraDBCluster", "PerconaXtraDBCluster": {"name":"some-name","namespace":"users-31794"}, "namespace": "users-31794", "name": "some-name", "reconcileID": "2be2f308-74b3-4c32-b73b-f19186c62179", "user": "root"} 2025-12-22T11:07:56.737Z INFO User password updated {"controller": "pxc-controller", "controllerGroup": "pxc.percona.com", "controllerKind": "PerconaXtraDBCluster", "PerconaXtraDBCluster": {"name":"some-name","namespace":"users-31794"}, "namespace": "users-31794", "name": "some-name", "reconcileID": "2be2f308-74b3-4c32-b73b-f19186c62179", "user": "root"} 2025-12-22T11:07:56.758Z INFO MySQL init secret updated {"controller": "pxc-controller", "controllerGroup": "pxc.percona.com", "controllerKind": "PerconaXtraDBCluster", "PerconaXtraDBCluster": {"name":"some-name","namespace":"users-31794"}, "namespace": "users-31794", "name": "some-name", "reconcileID": "2be2f308-74b3-4c32-b73b-f19186c62179", "secret": "some-name-mysql-init", "user": "root"} 2025-12-22T11:07:56.775Z INFO Internal secrets updated {"controller": "pxc-controller", "controllerGroup": "pxc.percona.com", "controllerKind": "PerconaXtraDBCluster", "PerconaXtraDBCluster": {"name":"some-name","namespace":"users-31794"}, "namespace": "users-31794", "name": "some-name", "reconcileID": "2be2f308-74b3-4c32-b73b-f19186c62179", "user": "root"} 2025-12-22T11:07:56.775Z INFO Password changed, updating user {"controller": "pxc-controller", "controllerGroup": "pxc.percona.com", "controllerKind": "PerconaXtraDBCluster", "PerconaXtraDBCluster": {"name":"some-name","namespace":"users-31794"}, "namespace": "users-31794", "name": "some-name", "reconcileID": "2be2f308-74b3-4c32-b73b-f19186c62179", "user": "operator"} 2025-12-22T11:07:56.785Z INFO User password updated {"controller": "pxc-controller", "controllerGroup": "pxc.percona.com", "controllerKind": "PerconaXtraDBCluster", "PerconaXtraDBCluster": {"name":"some-name","namespace":"users-31794"}, "namespace": "users-31794", "name": "some-name", "reconcileID": "2be2f308-74b3-4c32-b73b-f19186c62179", "user": "operator"} 2025-12-22T11:07:56.806Z INFO MySQL init secret updated {"controller": "pxc-controller", "controllerGroup": "pxc.percona.com", "controllerKind": "PerconaXtraDBCluster", "PerconaXtraDBCluster": {"name":"some-name","namespace":"users-31794"}, "namespace": "users-31794", "name": "some-name", "reconcileID": "2be2f308-74b3-4c32-b73b-f19186c62179", "secret": "some-name-mysql-init", "user": "operator"} 2025-12-22T11:07:56.822Z INFO Internal secrets updated {"controller": "pxc-controller", "controllerGroup": "pxc.percona.com", "controllerKind": "PerconaXtraDBCluster", "PerconaXtraDBCluster": {"name":"some-name","namespace":"users-31794"}, "namespace": "users-31794", "name": "some-name", "reconcileID": "2be2f308-74b3-4c32-b73b-f19186c62179", "user": "operator"} 2025-12-22T11:07:56.822Z INFO Password changed, updating user {"controller": "pxc-controller", "controllerGroup": "pxc.percona.com", "controllerKind": "PerconaXtraDBCluster", "PerconaXtraDBCluster": {"name":"some-name","namespace":"users-31794"}, "namespace": "users-31794", "name": "some-name", "reconcileID": "2be2f308-74b3-4c32-b73b-f19186c62179", "user": "monitor"} 2025-12-22T11:07:56.830Z INFO User password updated {"controller": "pxc-controller", "controllerGroup": "pxc.percona.com", "controllerKind": "PerconaXtraDBCluster", "PerconaXtraDBCluster": {"name":"some-name","namespace":"users-31794"}, "namespace": "users-31794", "name": "some-name", "reconcileID": "2be2f308-74b3-4c32-b73b-f19186c62179", "user": "monitor"} 2025-12-22T11:07:56.848Z INFO MySQL init secret updated {"controller": "pxc-controller", "controllerGroup": "pxc.percona.com", "controllerKind": "PerconaXtraDBCluster", "PerconaXtraDBCluster": {"name":"some-name","namespace":"users-31794"}, "namespace": "users-31794", "name": "some-name", "reconcileID": "2be2f308-74b3-4c32-b73b-f19186c62179", "secret": "some-name-mysql-init", "user": "monitor"} 2025-12-22T11:07:56.868Z INFO Internal secrets updated {"controller": "pxc-controller", "controllerGroup": "pxc.percona.com", "controllerKind": "PerconaXtraDBCluster", "PerconaXtraDBCluster": {"name":"some-name","namespace":"users-31794"}, "namespace": "users-31794", "name": "some-name", "reconcileID": "2be2f308-74b3-4c32-b73b-f19186c62179", "user": "monitor"} 2025-12-22T11:07:56.868Z INFO Password changed, updating user {"controller": "pxc-controller", "controllerGroup": "pxc.percona.com", "controllerKind": "PerconaXtraDBCluster", "PerconaXtraDBCluster": {"name":"some-name","namespace":"users-31794"}, "namespace": "users-31794", "name": "some-name", "reconcileID": "2be2f308-74b3-4c32-b73b-f19186c62179", "user": "xtrabackup"} 2025-12-22T11:07:56.877Z INFO User password updated {"controller": "pxc-controller", "controllerGroup": "pxc.percona.com", "controllerKind": "PerconaXtraDBCluster", "PerconaXtraDBCluster": {"name":"some-name","namespace":"users-31794"}, "namespace": "users-31794", "name": "some-name", "reconcileID": "2be2f308-74b3-4c32-b73b-f19186c62179", "user": "xtrabackup"} 2025-12-22T11:07:56.898Z INFO MySQL init secret updated {"controller": "pxc-controller", "controllerGroup": "pxc.percona.com", "controllerKind": "PerconaXtraDBCluster", "PerconaXtraDBCluster": {"name":"some-name","namespace":"users-31794"}, "namespace": "users-31794", "name": "some-name", "reconcileID": "2be2f308-74b3-4c32-b73b-f19186c62179", "secret": "some-name-mysql-init", "user": "xtrabackup"} 2025-12-22T11:07:56.915Z INFO Internal secrets updated {"controller": "pxc-controller", "controllerGroup": "pxc.percona.com", "controllerKind": "PerconaXtraDBCluster", "PerconaXtraDBCluster": {"name":"some-name","namespace":"users-31794"}, "namespace": "users-31794", "name": "some-name", "reconcileID": "2be2f308-74b3-4c32-b73b-f19186c62179", "user": "xtrabackup"} 2025-12-22T11:07:56.915Z INFO Password changed, updating user {"controller": "pxc-controller", "controllerGroup": "pxc.percona.com", "controllerKind": "PerconaXtraDBCluster", "PerconaXtraDBCluster": {"name":"some-name","namespace":"users-31794"}, "namespace": "users-31794", "name": "some-name", "reconcileID": "2be2f308-74b3-4c32-b73b-f19186c62179", "user": "replication"} 2025-12-22T11:07:56.925Z INFO User password updated {"controller": "pxc-controller", "controllerGroup": "pxc.percona.com", "controllerKind": "PerconaXtraDBCluster", "PerconaXtraDBCluster": {"name":"some-name","namespace":"users-31794"}, "namespace": "users-31794", "name": "some-name", "reconcileID": "2be2f308-74b3-4c32-b73b-f19186c62179", "user": "replication"} 2025-12-22T11:07:56.939Z INFO MySQL init secret updated {"controller": "pxc-controller", "controllerGroup": "pxc.percona.com", "controllerKind": "PerconaXtraDBCluster", "PerconaXtraDBCluster": {"name":"some-name","namespace":"users-31794"}, "namespace": "users-31794", "name": "some-name", "reconcileID": "2be2f308-74b3-4c32-b73b-f19186c62179", "secret": "some-name-mysql-init", "user": "replication"} 2025-12-22T11:07:56.958Z INFO HAProxy pods will be restarted {"controller": "pxc-controller", "controllerGroup": "pxc.percona.com", "controllerKind": "PerconaXtraDBCluster", "PerconaXtraDBCluster": {"name":"some-name","namespace":"users-31794"}, "namespace": "users-31794", "name": "some-name", "reconcileID": "2be2f308-74b3-4c32-b73b-f19186c62179", "last-applied-secret": "830f92a4fb43c8abae77c299f6240f5ccc5452ba59ead38f4165beb7d6592819"} 2025-12-22T11:07:56.958Z INFO Internal secrets updated {"controller": "pxc-controller", "controllerGroup": "pxc.percona.com", "controllerKind": "PerconaXtraDBCluster", "PerconaXtraDBCluster": {"name":"some-name","namespace":"users-31794"}, "namespace": "users-31794", "name": "some-name", "reconcileID": "2be2f308-74b3-4c32-b73b-f19186c62179", "user": "replication"} 2025-12-22T11:07:56.958Z INFO PXC pods will be restarted {"controller": "pxc-controller", "controllerGroup": "pxc.percona.com", "controllerKind": "PerconaXtraDBCluster", "PerconaXtraDBCluster": {"name":"some-name","namespace":"users-31794"}, "namespace": "users-31794", "name": "some-name", "reconcileID": "2be2f308-74b3-4c32-b73b-f19186c62179", "last-applied-secret": "830f92a4fb43c8abae77c299f6240f5ccc5452ba59ead38f4165beb7d6592819"} 2025-12-22T11:07:56.961Z DEBUG Updating object {"controller": "pxc-controller", "controllerGroup": "pxc.percona.com", "controllerKind": "PerconaXtraDBCluster", "PerconaXtraDBCluster": {"name":"some-name","namespace":"users-31794"}, "namespace": "users-31794", "name": "some-name", "reconcileID": "2be2f308-74b3-4c32-b73b-f19186c62179", "object": "some-name-pxc", "kind": "&TypeMeta{Kind:StatefulSet,APIVersion:apps/v1,}", "hashChanged": true, "metaChanged": true} 2025-12-22T11:07:56.998Z DEBUG Updating object {"controller": "pxc-controller", "controllerGroup": "pxc.percona.com", "controllerKind": "PerconaXtraDBCluster", "PerconaXtraDBCluster": {"name":"some-name","namespace":"users-31794"}, "namespace": "users-31794", "name": "some-name", "reconcileID": "2be2f308-74b3-4c32-b73b-f19186c62179", "object": "some-name-haproxy", "kind": "&TypeMeta{Kind:StatefulSet,APIVersion:apps/v1,}", "hashChanged": true, "metaChanged": true} 2025-12-22T11:10:17.238Z INFO Password changed, updating user {"controller": "pxc-controller", "controllerGroup": "pxc.percona.com", "controllerKind": "PerconaXtraDBCluster", "PerconaXtraDBCluster": {"name":"some-name","namespace":"users-31794"}, "namespace": "users-31794", "name": "some-name", "reconcileID": "3ec5c5e0-4c1d-462f-824a-35540d29d047", "user": "monitor"} 2025-12-22T11:10:17.249Z INFO User password updated {"controller": "pxc-controller", "controllerGroup": "pxc.percona.com", "controllerKind": "PerconaXtraDBCluster", "PerconaXtraDBCluster": {"name":"some-name","namespace":"users-31794"}, "namespace": "users-31794", "name": "some-name", "reconcileID": "3ec5c5e0-4c1d-462f-824a-35540d29d047", "user": "monitor"} 2025-12-22T11:10:17.268Z INFO MySQL init secret updated {"controller": "pxc-controller", "controllerGroup": "pxc.percona.com", "controllerKind": "PerconaXtraDBCluster", "PerconaXtraDBCluster": {"name":"some-name","namespace":"users-31794"}, "namespace": "users-31794", "name": "some-name", "reconcileID": "3ec5c5e0-4c1d-462f-824a-35540d29d047", "secret": "some-name-mysql-init", "user": "monitor"} 2025-12-22T11:10:17.285Z INFO HAProxy pods will be restarted {"controller": "pxc-controller", "controllerGroup": "pxc.percona.com", "controllerKind": "PerconaXtraDBCluster", "PerconaXtraDBCluster": {"name":"some-name","namespace":"users-31794"}, "namespace": "users-31794", "name": "some-name", "reconcileID": "3ec5c5e0-4c1d-462f-824a-35540d29d047", "last-applied-secret": "cc8462fc187c53d1999b2987fec78861cfe96eb6ca407b2b89173d3d8bb2d537"} 2025-12-22T11:10:17.285Z INFO Internal secrets updated {"controller": "pxc-controller", "controllerGroup": "pxc.percona.com", "controllerKind": "PerconaXtraDBCluster", "PerconaXtraDBCluster": {"name":"some-name","namespace":"users-31794"}, "namespace": "users-31794", "name": "some-name", "reconcileID": "3ec5c5e0-4c1d-462f-824a-35540d29d047", "user": "monitor"} 2025-12-22T11:10:17.287Z DEBUG Updating object {"controller": "pxc-controller", "controllerGroup": "pxc.percona.com", "controllerKind": "PerconaXtraDBCluster", "PerconaXtraDBCluster": {"name":"some-name","namespace":"users-31794"}, "namespace": "users-31794", "name": "some-name", "reconcileID": "3ec5c5e0-4c1d-462f-824a-35540d29d047", "object": "some-name-haproxy", "kind": "&TypeMeta{Kind:StatefulSet,APIVersion:apps/v1,}", "hashChanged": true, "metaChanged": true} ... // 22 identical fields ... // 2 identical fields ... // 2 identical fields ... // 2 identical fields ... // 2 identical fields ... // 3 identical elements ... // 3 identical fields ... // 3 identical fields ... // 3 identical fields - "49133365c5f7ccbfb86edb4285bd8306ef9f856966f57999626a6ad34decd2ec", + "49133365c5f7ccbfb86edb4285bd8306ef9f856966f57999626a6ad34decd2ec", ... // 4 identical fields - "56d0a69e7f7738bb3188a773b0f608ca903a708f7ecf84f000622bfdf2a94962", ... // 5 identical elements ... // 5 identical fields ... // 5 identical fields ... // 6 identical fields ... // 6 identical fields ... // 7 identical fields - "830f92a4fb43c8abae77c299f6240f5ccc5452ba59ead38f4165beb7d6592819", + "830f92a4fb43c8abae77c299f6240f5ccc5452ba59ead38f4165beb7d6592819", ... // 8 identical fields ... // 9 identical fields ... // 9 identical fields AccessModes: nil, ActiveDeadlineSeconds: nil, Affinity: &{PodAntiAffinity: &{RequiredDuringSchedulingIgnoredDuringExecution: {{LabelSelector: &{MatchLabels: {"app.kubernetes.io/component": "haproxy", "app.kubernetes.io/instance": "some-name", "app.kubernetes.io/managed-by": "percona-xtradb-cluster-operator", "app.kubernetes.io/name": "percona-xtradb-cluster", ...}}, TopologyKey: "kubernetes.io/hostname"}}}}, Affinity: &{PodAntiAffinity: &{RequiredDuringSchedulingIgnoredDuringExecution: {{LabelSelector: &{MatchLabels: {"app.kubernetes.io/component": "proxysql", "app.kubernetes.io/instance": "some-name", "app.kubernetes.io/managed-by": "percona-xtradb-cluster-operator", "app.kubernetes.io/name": "percona-xtradb-cluster", ...}}, TopologyKey: "kubernetes.io/hostname"}}}}, Affinity: &{PodAntiAffinity: &{RequiredDuringSchedulingIgnoredDuringExecution: {{LabelSelector: &{MatchLabels: {"app.kubernetes.io/component": "pxc", "app.kubernetes.io/instance": "some-name", "app.kubernetes.io/managed-by": "percona-xtradb-cluster-operator", "app.kubernetes.io/name": "percona-xtradb-cluster", ...}}, TopologyKey: "kubernetes.io/hostname"}}}}, Annotations: map[string]string{ - Annotations: map[string]string{ + Annotations: map[string]string{ + APIVersion: "", - APIVersion: "apps/v1", - APIVersion: "apps/v1", - APIVersion: "v1", Args: {"haproxy"}, Args: {"mysqld"}, Args: {"proxysql", "-f", "-c", "/etc/proxysql/proxysql.cnf", ...}, - Args: []string{"logrotate"}, AutomountServiceAccountToken: nil, + AvailableReplicas: 0, - AvailableReplicas: 2, - AvailableReplicas: 3, AWSElasticBlockStore: nil, AzureFile: nil, Capacity: nil, + "cc8462fc187c53d1999b2987fec78861cfe96eb6ca407b2b89173d3d8bb2d537", - CollisionCount: &0, + CollisionCount: nil, Conditions: nil, ConfigMapKeyRef: nil, ConfigMap: &v1.ConfigMapVolumeSource{ ContainerPort: 3306, ContainerPort: 33060, ContainerPort: 33062, ContainerPort: 3307, ContainerPort: 3309, ContainerPort: 4444, ContainerPort: 4567, ContainerPort: 4568, ContainerPort: 6032, ContainerPort: 6070, ContainerPort: 8404, Containers: []v1.Container{ + CreationTimestamp: v1.Time{}, - CreationTimestamp: v1.Time{Time: s"2025-12-22 10:46:48 +0000 UTC"}, - CreationTimestamp: v1.Time{Time: s"2025-12-22 11:04:56 +0000 UTC"}, + CurrentReplicas: 0, - CurrentReplicas: 1, - CurrentReplicas: 2, - CurrentReplicas: 3, + CurrentRevision: "", - CurrentRevision: "some-name-haproxy-55d494f9b5", - CurrentRevision: "some-name-haproxy-77d9cd4bb9", - CurrentRevision: "some-name-proxysql-59694db8b9", - CurrentRevision: "some-name-proxysql-5fd678f649", - CurrentRevision: "some-name-proxysql-6dcb7c9b84", - CurrentRevision: "some-name-proxysql-6fd5ccddf", - CurrentRevision: "some-name-proxysql-76996b7b85", - CurrentRevision: "some-name-proxysql-7df76b64f9", - CurrentRevision: "some-name-pxc-58b64dc496", - CurrentRevision: "some-name-pxc-5f8f4fc876", - CurrentRevision: "some-name-pxc-667b9cdbf5", - CurrentRevision: "some-name-pxc-75f4f959dd", DataSource: nil, DataSourceRef: nil, - "dca36778af6416ade691623ecacf361b1696a4a5d953686bc92619ce815601e5", + "dca36778af6416ade691623ecacf361b1696a4a5d953686bc92619ce815601e5", - DefaultMode: &420, - DefaultMode: &420, + DefaultMode: nil, + DefaultMode: nil, DeletionGracePeriodSeconds: nil, DeletionGracePeriodSeconds: nil, DeletionTimestamp: nil, + DeprecatedServiceAccount: "", - DeprecatedServiceAccount: "default", + DNSPolicy: "", - DNSPolicy: "ClusterFirst", - "e327b0e0f90b7eecf8672b6a319ef9b2c31d148b4beb3cbb5b0d7f1146f35e22", EnvFrom: {{SecretRef: &{LocalObjectReference: {Name: "some-name-env-vars-haproxy"}, Optional: &true}}}, EnvFrom: {{SecretRef: &{LocalObjectReference: {Name: "some-name-env-vars-proxysql"}, Optional: &true}}}, EnvFrom: {{SecretRef: &{LocalObjectReference: {Name: "some-name-env-vars-pxc"}, Optional: &true}}}, EnvFrom: {{SecretRef: &{LocalObjectReference: {Name: "some-name-log-collector"}, Optional: &true}}}, - EnvFrom: []v1.EnvFromSource{{SecretRef: s"&SecretEnvSource{LocalObjectRefe"...}}, Env: {{Name: "PXC_SERVICE", Value: "some-name-pxc"}, {Name: "LIVENESS_CHECK_TIMEOUT", Value: "5"}, {Name: "READINESS_CHECK_TIMEOUT", Value: "1"}}, Env: {{Name: "PXC_SERVICE", Value: "some-name-pxc"}, {Name: "OPERATOR_PASSWORD", ValueFrom: &{SecretKeyRef: &{LocalObjectReference: {Name: "internal-some-name"}, Key: "operator"}}}, {Name: "PROXY_ADMIN_USER", Value: "proxyadmin"}, {Name: "PROXY_ADMIN_PASSWORD", ValueFrom: &{SecretKeyRef: &{LocalObjectReference: {Name: "internal-some-name"}, Key: "proxyadmin"}}}, ...}, Env: {{Name: "PXC_SERVICE", Value: "some-name-pxc-unready"}, {Name: "MONITOR_HOST", Value: "%"}, {Name: "MYSQL_ROOT_PASSWORD", ValueFrom: &{SecretKeyRef: &{LocalObjectReference: {Name: "internal-some-name"}, Key: "root"}}}, {Name: "XTRABACKUP_PASSWORD", ValueFrom: &{SecretKeyRef: &{LocalObjectReference: {Name: "internal-some-name"}, Key: "xtrabackup"}}}, ...}, Env: []v1.EnvVar{ - Env: []v1.EnvVar{ EphemeralContainers: nil, - "f237cddd2e9631e6195f4db3a501711c93fa24e9ebb417951f5e533acce52ca5", + "f237cddd2e9631e6195f4db3a501711c93fa24e9ebb417951f5e533acce52ca5", FailureThreshold: 3, FC: nil, FieldPath: "metadata.name", FieldPath: "metadata.namespace", FieldRef: &v1.ObjectFieldSelector{ - FieldsType: "FieldsV1", - FieldsType: "FieldsV1", - FieldsV1: s`{"f:metadata":{"f:annotations":{".":{},"f:percona.com/last-confi`..., - FieldsV1: s`{"f:status":{"f:availableReplicas":{},"f:collisionCount":{},"f:c`..., Finalizers: nil, Finalizers: nil, + Generation: 0, - Generation: 1, - Generation: 2, - Generation: 3, - Generation: 4, - Generation: 5, - Generation: 6, - Generation: 7, - Generation: 8, github.com/percona/percona-xtradb-cluster-operator/pkg/controller/pxc.(*ReconcilePerconaXtraDBCluster).resyncPXCUsersWithProxySQL.func1 GitRepo: nil, /go/src/github.com/percona/percona-xtradb-cluster-operator/pkg/controller/pxc/controller.go:857 HostAliases: nil, HostAliases: nil, HostIP: "", HostIPC: false, Hostname: "", HostPort: 0, - Image: "perconalab/fluentbit:main-logcollector", - Image: "perconalab/fluentbit:main-logcollector", ImagePullPolicy: "Always", - ImagePullPolicy: "Always", ImagePullSecrets: nil, InitContainers: []v1.Container{ InitialDelaySeconds: 300, ISCSI: nil, Items: nil, Items: nil, "kubectl.kubernetes.io/default-container": "haproxy", "kubectl.kubernetes.io/default-container": "proxysql", "kubectl.kubernetes.io/default-container": "pxc", Labels: {"app.kubernetes.io/component": "haproxy", "app.kubernetes.io/instance": "some-name", "app.kubernetes.io/managed-by": "percona-xtradb-cluster-operator", "app.kubernetes.io/name": "percona-xtradb-cluster", ...}, Labels: {"app.kubernetes.io/component": "proxysql", "app.kubernetes.io/instance": "some-name", "app.kubernetes.io/managed-by": "percona-xtradb-cluster-operator", "app.kubernetes.io/name": "percona-xtradb-cluster", ...}, Labels: {"app.kubernetes.io/component": "pxc", "app.kubernetes.io/instance": "some-name", "app.kubernetes.io/managed-by": "percona-xtradb-cluster-operator", "app.kubernetes.io/name": "percona-xtradb-cluster", ...}, Labels: nil, + "last-applied-secret": "56d0a69e7f7738bb3188a773b0f608ca903a708f7ecf84f000622bfdf2a94962", + "last-applied-secret": "830f92a4fb43c8abae77c299f6240f5ccc5452ba59ead38f4165beb7d6592819", + "last-applied-secret": "e327b0e0f90b7eecf8672b6a319ef9b2c31d148b4beb3cbb5b0d7f1146f35e22", "last-applied-secret": strings.Join({ Lifecycle: nil, LivenessProbe: &v1.Probe{ LocalObjectReference: {Name: "auto-some-name-pxc"}, LocalObjectReference: {Name: "some-name-haproxy"}, LocalObjectReference: {Name: "some-name-pxc"}, ManagedFields: nil, + ManagedFields: nil, - ManagedFields: []v1.ManagedFieldsEntry{ - Manager: "kube-controller-manager", - Manager: "percona-xtradb-cluster-operator", MinReadySeconds: 0, [mysql] 2025/12/22 11:09:51 packets.go:58 unexpected EOF Name: "auto-config", {Name: "bin", VolumeSource: {EmptyDir: &{}}}, {Name: "CLUSTER_HASH", Value: "2107405"}, Name: "config", {Name: "haproxy-auto", VolumeSource: {EmptyDir: &{}}}, Name: "haproxy-custom", - {Name: "IS_LOGCOLLECTOR", Value: "yes"}, Name: "ist", {Name: "LOG_DATA_DIR", Value: "/var/lib/mysql"}, - {Name: "LOG_DATA_DIR", Value: "/var/lib/mysql"}, - Name: "logrotate", - Name: "logs", {Name: "MONITOR_PASSWORD", ValueFrom: &{SecretKeyRef: &{LocalObjectReference: {Name: "internal-some-name"}, Key: "monitor"}}}, - {Name: "MONITOR_PASSWORD", ValueFrom: s"&EnvVarSource{FieldRef:nil,Resou"...}, Name: "mysql", Name: "mysql-admin", Name: "mysql-init-file", Name: "mysql-replicas", Name: "mysql-users-secret-file", Name: "mysqlx", {Name: "OPERATOR_ADMIN_PASSWORD", ValueFrom: &{SecretKeyRef: &{LocalObjectReference: {Name: "internal-some-name"}, Key: "operator"}}}, Name: "POD_NAME", Name: "POD_NAMESPASE", - {Name: "POD_NAMESPASE", ValueFrom: s"&EnvVarSource{FieldRef:&ObjectFi"...}, - {Name: "POD_NAME", ValueFrom: s"&EnvVarSource{FieldRef:&ObjectFi"...}, Name: "proxyadm", Name: "proxy-protocol", - {Name: "SERVICE_TYPE", Value: "mysql"}, Name: "some-name-env-vars-haproxy", Namespace: "users-31794", Name: "ssl", Name: "ssl-internal", Name: "sst", Name: "stats", {Name: "tmp", VolumeSource: {EmptyDir: &{}}}, Name: "vault-keyring-secret", Name: "write-set", {Name: "XTRABACKUP_PASSWORD", ValueFrom: &{SecretKeyRef: &{LocalObjectReference: {Name: "internal-some-name"}, Key: "xtrabackup"}}}, NFS: nil, NodeName: "", NodeSelector: nil, ObjectMeta: {Labels: {"app.kubernetes.io/component": "proxysql", "app.kubernetes.io/instance": "some-name", "app.kubernetes.io/managed-by": "percona-xtradb-cluster-operator", "app.kubernetes.io/name": "percona-xtradb-cluster", ...}, Annotations: {"kubectl.kubernetes.io/default-container": "proxysql", "last-applied-secret": "56d0a69e7f7738bb3188a773b0f608ca903a708f7ecf84f000622bfdf2a94962", "percona.com/configuration-hash": "d41d8cd98f00b204e9800998ecf8427e", "percona.com/ssl-hash": "cfd6a52398268173b51d7cdb1331c09a", ...}}, ObjectMeta: {Labels: {"app.kubernetes.io/component": "proxysql", "app.kubernetes.io/instance": "some-name", "app.kubernetes.io/managed-by": "percona-xtradb-cluster-operator", "app.kubernetes.io/name": "percona-xtradb-cluster", ...}, Annotations: {"kubectl.kubernetes.io/default-container": "proxysql", "percona.com/configuration-hash": "d41d8cd98f00b204e9800998ecf8427e", "percona.com/ssl-hash": "cfd6a52398268173b51d7cdb1331c09a", "percona.com/ssl-internal-hash": "751e59a1c1d94ac67f9d6bb6e756acdc"}}, ObjectMeta: {Labels: {"app.kubernetes.io/component": "pxc", "app.kubernetes.io/instance": "some-name", "app.kubernetes.io/managed-by": "percona-xtradb-cluster-operator", "app.kubernetes.io/name": "percona-xtradb-cluster", ...}, Annotations: {"kubectl.kubernetes.io/default-container": "pxc", "last-applied-secret": "49133365c5f7ccbfb86edb4285bd8306ef9f856966f57999626a6ad34decd2ec", "percona.com/configuration-hash": "d41d8cd98f00b204e9800998ecf8427e", "percona.com/ssl-hash": "cfd6a52398268173b51d7cdb1331c09a", ...}}, ObjectMeta: {Name: "datadir", Labels: {"app.kubernetes.io/component": "pxc", "app.kubernetes.io/instance": "some-name", "app.kubernetes.io/managed-by": "percona-xtradb-cluster-operator", "app.kubernetes.io/name": "percona-xtradb-cluster", ...}}, ObjectMeta: {Name: "proxydata", Labels: {"app.kubernetes.io/component": "proxysql", "app.kubernetes.io/instance": "some-name", "app.kubernetes.io/managed-by": "percona-xtradb-cluster-operator", "app.kubernetes.io/name": "percona-xtradb-cluster", ...}}, ObjectMeta: v1.ObjectMeta{ ObjectMeta: v1.ObjectMeta{ + ObservedGeneration: 0, - ObservedGeneration: 1, - ObservedGeneration: 2, - ObservedGeneration: 3, - ObservedGeneration: 4, - ObservedGeneration: 5, - ObservedGeneration: 6, - ObservedGeneration: 7, - ObservedGeneration: 8, - Operation: "Update", - Operation: "Update", Optional: &false, Optional: &true, Optional: &true, Ordinals: nil, OS: nil, Overhead: nil, OwnerReferences: {{APIVersion: "pxc.percona.com/v1", Kind: "PerconaXtraDBCluster", Name: "some-name", UID: "469c9dcf-d1e6-4d40-8801-5f079f5ffef6", ...}}, OwnerReferences: nil, "percona.com/configuration-hash": "d41d8cd98f00b204e9800998ecf8427e", "percona.com/configuration-hash": "d41d8cd98f00b204e9800998ecf8427e", - "percona.com/last-config-hash": "eyJyZXBsaWNhcyI6Miwic2VsZWN0b3IiOnsibWF0Y2hMYWJlbHMiOnsiYXBwLmt1YmVybmV0ZXMuaW8vY29tcG9uZW50IjoicHJveHlzcWwiLCJhcHAua3ViZXJuZXRlcy5pby9pbnN0YW5jZSI6InNvbWUtbmFtZSIsImFwcC5rdWJlcm5ldGVzLmlvL21hbmFnZWQtYnkiOiJwZXJjb25hLXh0cmFkYi1jbHVzdGVyLW9wZXJhdG9yIiwiYXBw"..., + "percona.com/last-config-hash": "eyJyZXBsaWNhcyI6Miwic2VsZWN0b3IiOnsibWF0Y2hMYWJlbHMiOnsiYXBwLmt1YmVybmV0ZXMuaW8vY29tcG9uZW50IjoicHJveHlzcWwiLCJhcHAua3ViZXJuZXRlcy5pby9pbnN0YW5jZSI6InNvbWUtbmFtZSIsImFwcC5rdWJlcm5ldGVzLmlvL21hbmFnZWQtYnkiOiJwZXJjb25hLXh0cmFkYi1jbHVzdGVyLW9wZXJhdG9yIiwiYXBw"..., - "percona.com/last-config-hash": "eyJyZXBsaWNhcyI6Miwic2VsZWN0b3IiOnsibWF0Y2hMYWJlbHMiOnsiYXBwLmt1YmVybmV0ZXMuaW8vY29tcG9uZW50IjoicHJveHlzcWwiLCJhcHAua3ViZXJuZXRlcy5pby9pbnN0YW5jZSI6InNvbWUtbmFtZSIsImFwcC5rdWJlcm5ldGVzLmlvL21hbmFnZWQtYnkiOiJwZXJjb25hLXh0cmFkYi1jbHVzdGVyLW9wZXJhdG9yIiwiYXBwLmt1YmVybmV0ZXMuaW8vbmFtZSI6InBlcmNvbmEteHRyYWRiLWNsdXN0ZXIiLCJhcHAua3ViZXJuZXRlcy5pby9wYXJ0LW9mIjoicGVyY29uYS14dHJhZGItY2x1c3RlciJ9fSwidGVtcGxhdGUiOnsibWV0YWRhdGEiOnsibGFiZWxzIjp7ImFwcC5rdWJlcm5ldGVzLmlvL2NvbXBvbmVudCI6InByb3h5c3FsIiwiYXBwLmt1YmVybmV0ZXMuaW8vaW5zdGFuY2UiOiJzb21lLW5hbWUiLCJhcHAua3ViZXJuZXRlcy5pby9tYW5hZ2VkLWJ5IjoicGVyY29uYS14dHJhZGItY2x1c3Rlci1vcGVyYXRvciIsImFwcC5rdWJlcm5ldGVzLmlvL25hbWUiOiJwZXJjb25hLXh0cmFkYi1jbHVzdGVyIiwiYXBwLmt1YmVybmV0ZXMuaW8vcGFydC1vZiI6InBlcmNvbmEteHRyYWRiLWNsdXN0ZXIifSwiYW5ub3RhdGlvbnMiOnsia3ViZWN0bC5rdWJlcm5ldGVzLmlvL2RlZmF1bHQtY29udGFpbmVyIjoicHJveHlzcWwiLCJsYXN0LWFwcGxpZWQtc2VjcmV0IjoiMGY3YjUxNTZjMzYwNDA4ZjVhY2IxZWY1Y2JjNTVmZmVmNDk2MmFkMDNjM2QyZDUxNjUxNjA3ZTFkMDQ0Y2ExZSIsInBlcmNvbmEuY29tL2NvbmZpZ3VyYXRpb24taGFzaCI6"..., + "percona.com/last-config-hash": "eyJyZXBsaWNhcyI6Miwic2VsZWN0b3IiOnsibWF0Y2hMYWJlbHMiOnsiYXBwLmt1YmVybmV0ZXMuaW8vY29tcG9uZW50IjoicHJveHlzcWwiLCJhcHAua3ViZXJuZXRlcy5pby9pbnN0YW5jZSI6InNvbWUtbmFtZSIsImFwcC5rdWJlcm5ldGVzLmlvL21hbmFnZWQtYnkiOiJwZXJjb25hLXh0cmFkYi1jbHVzdGVyLW9wZXJhdG9yIiwiYXBwLmt1YmVybmV0ZXMuaW8vbmFtZSI6InBlcmNvbmEteHRyYWRiLWNsdXN0ZXIiLCJhcHAua3ViZXJuZXRlcy5pby9wYXJ0LW9mIjoicGVyY29uYS14dHJhZGItY2x1c3RlciJ9fSwidGVtcGxhdGUiOnsibWV0YWRhdGEiOnsibGFiZWxzIjp7ImFwcC5rdWJlcm5ldGVzLmlvL2NvbXBvbmVudCI6InByb3h5c3FsIiwiYXBwLmt1YmVybmV0ZXMuaW8vaW5zdGFuY2UiOiJzb21lLW5hbWUiLCJhcHAua3ViZXJuZXRlcy5pby9tYW5hZ2VkLWJ5IjoicGVyY29uYS14dHJhZGItY2x1c3Rlci1vcGVyYXRvciIsImFwcC5rdWJlcm5ldGVzLmlvL25hbWUiOiJwZXJjb25hLXh0cmFkYi1jbHVzdGVyIiwiYXBwLmt1YmVybmV0ZXMuaW8vcGFydC1vZiI6InBlcmNvbmEteHRyYWRiLWNsdXN0ZXIifSwiYW5ub3RhdGlvbnMiOnsia3ViZWN0bC5rdWJlcm5ldGVzLmlvL2RlZmF1bHQtY29udGFpbmVyIjoicHJveHlzcWwiLCJsYXN0LWFwcGxpZWQtc2VjcmV0IjoiMGY3YjUxNTZjMzYwNDA4ZjVhY2IxZWY1Y2JjNTVmZmVmNDk2MmFkMDNjM2QyZDUxNjUxNjA3ZTFkMDQ0Y2ExZSIsInBlcmNvbmEuY29tL2NvbmZpZ3VyYXRpb24taGFzaCI6"..., + "percona.com/last-config-hash": "eyJyZXBsaWNhcyI6Miwic2VsZWN0b3IiOnsibWF0Y2hMYWJlbHMiOnsiYXBwLmt1YmVybmV0ZXMuaW8vY29tcG9uZW50IjoicHJveHlzcWwiLCJhcHAua3ViZXJuZXRlcy5pby9pbnN0YW5jZSI6InNvbWUtbmFtZSIsImFwcC5rdWJlcm5ldGVzLmlvL21hbmFnZWQtYnkiOiJwZXJjb25hLXh0cmFkYi1jbHVzdGVyLW9wZXJhdG9yIiwiYXBwLmt1YmVybmV0ZXMuaW8vbmFtZSI6InBlcmNvbmEteHRyYWRiLWNsdXN0ZXIiLCJhcHAua3ViZXJuZXRlcy5pby9wYXJ0LW9mIjoicGVyY29uYS14dHJhZGItY2x1c3RlciJ9fSwidGVtcGxhdGUiOnsibWV0YWRhdGEiOnsibGFiZWxzIjp7ImFwcC5rdWJlcm5ldGVzLmlvL2NvbXBvbmVudCI6InByb3h5c3FsIiwiYXBwLmt1YmVybmV0ZXMuaW8vaW5zdGFuY2UiOiJzb21lLW5hbWUiLCJhcHAua3ViZXJuZXRlcy5pby9tYW5hZ2VkLWJ5IjoicGVyY29uYS14dHJhZGItY2x1c3Rlci1vcGVyYXRvciIsImFwcC5rdWJlcm5ldGVzLmlvL25hbWUiOiJwZXJjb25hLXh0cmFkYi1jbHVzdGVyIiwiYXBwLmt1YmVybmV0ZXMuaW8vcGFydC1vZiI6InBlcmNvbmEteHRyYWRiLWNsdXN0ZXIifSwiYW5ub3RhdGlvbnMiOnsia3ViZWN0bC5rdWJlcm5ldGVzLmlvL2RlZmF1bHQtY29udGFpbmVyIjoicHJveHlzcWwiLCJsYXN0LWFwcGxpZWQtc2VjcmV0IjoiNDkxMzMzNjVjNWY3Y2NiZmI4NmVkYjQyODViZDgzMDZlZjlmODU2OTY2ZjU3OTk5NjI2YTZhZDM0ZGVjZDJlYyIsInBlcmNvbmEuY29tL2NvbmZpZ3VyYXRpb24taGFzaCI6"..., - "percona.com/last-config-hash": "eyJyZXBsaWNhcyI6Miwic2VsZWN0b3IiOnsibWF0Y2hMYWJlbHMiOnsiYXBwLmt1YmVybmV0ZXMuaW8vY29tcG9uZW50IjoicHJveHlzcWwiLCJhcHAua3ViZXJuZXRlcy5pby9pbnN0YW5jZSI6InNvbWUtbmFtZSIsImFwcC5rdWJlcm5ldGVzLmlvL21hbmFnZWQtYnkiOiJwZXJjb25hLXh0cmFkYi1jbHVzdGVyLW9wZXJhdG9yIiwiYXBwLmt1YmVybmV0ZXMuaW8vbmFtZSI6InBlcmNvbmEteHRyYWRiLWNsdXN0ZXIiLCJhcHAua3ViZXJuZXRlcy5pby9wYXJ0LW9mIjoicGVyY29uYS14dHJhZGItY2x1c3RlciJ9fSwidGVtcGxhdGUiOnsibWV0YWRhdGEiOnsibGFiZWxzIjp7ImFwcC5rdWJlcm5ldGVzLmlvL2NvbXBvbmVudCI6InByb3h5c3FsIiwiYXBwLmt1YmVybmV0ZXMuaW8vaW5zdGFuY2UiOiJzb21lLW5hbWUiLCJhcHAua3ViZXJuZXRlcy5pby9tYW5hZ2VkLWJ5IjoicGVyY29uYS14dHJhZGItY2x1c3Rlci1vcGVyYXRvciIsImFwcC5rdWJlcm5ldGVzLmlvL25hbWUiOiJwZXJjb25hLXh0cmFkYi1jbHVzdGVyIiwiYXBwLmt1YmVybmV0ZXMuaW8vcGFydC1vZiI6InBlcmNvbmEteHRyYWRiLWNsdXN0ZXIifSwiYW5ub3RhdGlvbnMiOnsia3ViZWN0bC5rdWJlcm5ldGVzLmlvL2RlZmF1bHQtY29udGFpbmVyIjoicHJveHlzcWwiLCJsYXN0LWFwcGxpZWQtc2VjcmV0IjoiNTZkMGE2OWU3Zjc3MzhiYjMxODhhNzczYjBmNjA4Y2E5MDNhNzA4ZjdlY2Y4NGYwMDA2MjJiZmRmMmE5NDk2MiIsInBlcmNvbmEuY29tL2NvbmZpZ3VyYXRpb24taGFzaCI6"..., - "percona.com/last-config-hash": "eyJyZXBsaWNhcyI6Miwic2VsZWN0b3IiOnsibWF0Y2hMYWJlbHMiOnsiYXBwLmt1YmVybmV0ZXMuaW8vY29tcG9uZW50IjoicHJveHlzcWwiLCJhcHAua3ViZXJuZXRlcy5pby9pbnN0YW5jZSI6InNvbWUtbmFtZSIsImFwcC5rdWJlcm5ldGVzLmlvL21hbmFnZWQtYnkiOiJwZXJjb25hLXh0cmFkYi1jbHVzdGVyLW9wZXJhdG9yIiwiYXBwLmt1YmVybmV0ZXMuaW8vbmFtZSI6InBlcmNvbmEteHRyYWRiLWNsdXN0ZXIiLCJhcHAua3ViZXJuZXRlcy5pby9wYXJ0LW9mIjoicGVyY29uYS14dHJhZGItY2x1c3RlciJ9fSwidGVtcGxhdGUiOnsibWV0YWRhdGEiOnsibGFiZWxzIjp7ImFwcC5rdWJlcm5ldGVzLmlvL2NvbXBvbmVudCI6InByb3h5c3FsIiwiYXBwLmt1YmVybmV0ZXMuaW8vaW5zdGFuY2UiOiJzb21lLW5hbWUiLCJhcHAua3ViZXJuZXRlcy5pby9tYW5hZ2VkLWJ5IjoicGVyY29uYS14dHJhZGItY2x1c3Rlci1vcGVyYXRvciIsImFwcC5rdWJlcm5ldGVzLmlvL25hbWUiOiJwZXJjb25hLXh0cmFkYi1jbHVzdGVyIiwiYXBwLmt1YmVybmV0ZXMuaW8vcGFydC1vZiI6InBlcmNvbmEteHRyYWRiLWNsdXN0ZXIifSwiYW5ub3RhdGlvbnMiOnsia3ViZWN0bC5rdWJlcm5ldGVzLmlvL2RlZmF1bHQtY29udGFpbmVyIjoicHJveHlzcWwiLCJsYXN0LWFwcGxpZWQtc2VjcmV0IjoiODMwZjkyYTRmYjQzYzhhYmFlNzdjMjk5ZjYyNDBmNWNjYzU0NTJiYTU5ZWFkMzhmNDE2NWJlYjdkNjU5MjgxOSIsInBlcmNvbmEuY29tL2NvbmZpZ3VyYXRpb24taGFzaCI6"..., + "percona.com/last-config-hash": "eyJyZXBsaWNhcyI6Miwic2VsZWN0b3IiOnsibWF0Y2hMYWJlbHMiOnsiYXBwLmt1YmVybmV0ZXMuaW8vY29tcG9uZW50IjoicHJveHlzcWwiLCJhcHAua3ViZXJuZXRlcy5pby9pbnN0YW5jZSI6InNvbWUtbmFtZSIsImFwcC5rdWJlcm5ldGVzLmlvL21hbmFnZWQtYnkiOiJwZXJjb25hLXh0cmFkYi1jbHVzdGVyLW9wZXJhdG9yIiwiYXBwLmt1YmVybmV0ZXMuaW8vbmFtZSI6InBlcmNvbmEteHRyYWRiLWNsdXN0ZXIiLCJhcHAua3ViZXJuZXRlcy5pby9wYXJ0LW9mIjoicGVyY29uYS14dHJhZGItY2x1c3RlciJ9fSwidGVtcGxhdGUiOnsibWV0YWRhdGEiOnsibGFiZWxzIjp7ImFwcC5rdWJlcm5ldGVzLmlvL2NvbXBvbmVudCI6InByb3h5c3FsIiwiYXBwLmt1YmVybmV0ZXMuaW8vaW5zdGFuY2UiOiJzb21lLW5hbWUiLCJhcHAua3ViZXJuZXRlcy5pby9tYW5hZ2VkLWJ5IjoicGVyY29uYS14dHJhZGItY2x1c3Rlci1vcGVyYXRvciIsImFwcC5rdWJlcm5ldGVzLmlvL25hbWUiOiJwZXJjb25hLXh0cmFkYi1jbHVzdGVyIiwiYXBwLmt1YmVybmV0ZXMuaW8vcGFydC1vZiI6InBlcmNvbmEteHRyYWRiLWNsdXN0ZXIifSwiYW5ub3RhdGlvbnMiOnsia3ViZWN0bC5rdWJlcm5ldGVzLmlvL2RlZmF1bHQtY29udGFpbmVyIjoicHJveHlzcWwiLCJsYXN0LWFwcGxpZWQtc2VjcmV0IjoiODMwZjkyYTRmYjQzYzhhYmFlNzdjMjk5ZjYyNDBmNWNjYzU0NTJiYTU5ZWFkMzhmNDE2NWJlYjdkNjU5MjgxOSIsInBlcmNvbmEuY29tL2NvbmZpZ3VyYXRpb24taGFzaCI6"..., - "percona.com/last-config-hash": "eyJyZXBsaWNhcyI6Miwic2VsZWN0b3IiOnsibWF0Y2hMYWJlbHMiOnsiYXBwLmt1YmVybmV0ZXMuaW8vY29tcG9uZW50IjoicHJveHlzcWwiLCJhcHAua3ViZXJuZXRlcy5pby9pbnN0YW5jZSI6InNvbWUtbmFtZSIsImFwcC5rdWJlcm5ldGVzLmlvL21hbmFnZWQtYnkiOiJwZXJjb25hLXh0cmFkYi1jbHVzdGVyLW9wZXJhdG9yIiwiYXBwLmt1YmVybmV0ZXMuaW8vbmFtZSI6InBlcmNvbmEteHRyYWRiLWNsdXN0ZXIiLCJhcHAua3ViZXJuZXRlcy5pby9wYXJ0LW9mIjoicGVyY29uYS14dHJhZGItY2x1c3RlciJ9fSwidGVtcGxhdGUiOnsibWV0YWRhdGEiOnsibGFiZWxzIjp7ImFwcC5rdWJlcm5ldGVzLmlvL2NvbXBvbmVudCI6InByb3h5c3FsIiwiYXBwLmt1YmVybmV0ZXMuaW8vaW5zdGFuY2UiOiJzb21lLW5hbWUiLCJhcHAua3ViZXJuZXRlcy5pby9tYW5hZ2VkLWJ5IjoicGVyY29uYS14dHJhZGItY2x1c3Rlci1vcGVyYXRvciIsImFwcC5rdWJlcm5ldGVzLmlvL25hbWUiOiJwZXJjb25hLXh0cmFkYi1jbHVzdGVyIiwiYXBwLmt1YmVybmV0ZXMuaW8vcGFydC1vZiI6InBlcmNvbmEteHRyYWRiLWNsdXN0ZXIifSwiYW5ub3RhdGlvbnMiOnsia3ViZWN0bC5rdWJlcm5ldGVzLmlvL2RlZmF1bHQtY29udGFpbmVyIjoicHJveHlzcWwiLCJsYXN0LWFwcGxpZWQtc2VjcmV0IjoiZGNhMzY3NzhhZjY0MTZhZGU2OTE2MjNlY2FjZjM2MWIxNjk2YTRhNWQ5NTM2ODZiYzkyNjE5Y2U4MTU2MDFlNSIsInBlcmNvbmEuY29tL2NvbmZpZ3VyYXRpb24taGFzaCI6"..., + "percona.com/last-config-hash": "eyJyZXBsaWNhcyI6Miwic2VsZWN0b3IiOnsibWF0Y2hMYWJlbHMiOnsiYXBwLmt1YmVybmV0ZXMuaW8vY29tcG9uZW50IjoicHJveHlzcWwiLCJhcHAua3ViZXJuZXRlcy5pby9pbnN0YW5jZSI6InNvbWUtbmFtZSIsImFwcC5rdWJlcm5ldGVzLmlvL21hbmFnZWQtYnkiOiJwZXJjb25hLXh0cmFkYi1jbHVzdGVyLW9wZXJhdG9yIiwiYXBwLmt1YmVybmV0ZXMuaW8vbmFtZSI6InBlcmNvbmEteHRyYWRiLWNsdXN0ZXIiLCJhcHAua3ViZXJuZXRlcy5pby9wYXJ0LW9mIjoicGVyY29uYS14dHJhZGItY2x1c3RlciJ9fSwidGVtcGxhdGUiOnsibWV0YWRhdGEiOnsibGFiZWxzIjp7ImFwcC5rdWJlcm5ldGVzLmlvL2NvbXBvbmVudCI6InByb3h5c3FsIiwiYXBwLmt1YmVybmV0ZXMuaW8vaW5zdGFuY2UiOiJzb21lLW5hbWUiLCJhcHAua3ViZXJuZXRlcy5pby9tYW5hZ2VkLWJ5IjoicGVyY29uYS14dHJhZGItY2x1c3Rlci1vcGVyYXRvciIsImFwcC5rdWJlcm5ldGVzLmlvL25hbWUiOiJwZXJjb25hLXh0cmFkYi1jbHVzdGVyIiwiYXBwLmt1YmVybmV0ZXMuaW8vcGFydC1vZiI6InBlcmNvbmEteHRyYWRiLWNsdXN0ZXIifSwiYW5ub3RhdGlvbnMiOnsia3ViZWN0bC5rdWJlcm5ldGVzLmlvL2RlZmF1bHQtY29udGFpbmVyIjoicHJveHlzcWwiLCJsYXN0LWFwcGxpZWQtc2VjcmV0IjoiZGNhMzY3NzhhZjY0MTZhZGU2OTE2MjNlY2FjZjM2MWIxNjk2YTRhNWQ5NTM2ODZiYzkyNjE5Y2U4MTU2MDFlNSIsInBlcmNvbmEuY29tL2NvbmZpZ3VyYXRpb24taGFzaCI6"..., - "percona.com/last-config-hash": "eyJyZXBsaWNhcyI6Miwic2VsZWN0b3IiOnsibWF0Y2hMYWJlbHMiOnsiYXBwLmt1YmVybmV0ZXMuaW8vY29tcG9uZW50IjoicHJveHlzcWwiLCJhcHAua3ViZXJuZXRlcy5pby9pbnN0YW5jZSI6InNvbWUtbmFtZSIsImFwcC5rdWJlcm5ldGVzLmlvL21hbmFnZWQtYnkiOiJwZXJjb25hLXh0cmFkYi1jbHVzdGVyLW9wZXJhdG9yIiwiYXBwLmt1YmVybmV0ZXMuaW8vbmFtZSI6InBlcmNvbmEteHRyYWRiLWNsdXN0ZXIiLCJhcHAua3ViZXJuZXRlcy5pby9wYXJ0LW9mIjoicGVyY29uYS14dHJhZGItY2x1c3RlciJ9fSwidGVtcGxhdGUiOnsibWV0YWRhdGEiOnsibGFiZWxzIjp7ImFwcC5rdWJlcm5ldGVzLmlvL2NvbXBvbmVudCI6InByb3h5c3FsIiwiYXBwLmt1YmVybmV0ZXMuaW8vaW5zdGFuY2UiOiJzb21lLW5hbWUiLCJhcHAua3ViZXJuZXRlcy5pby9tYW5hZ2VkLWJ5IjoicGVyY29uYS14dHJhZGItY2x1c3Rlci1vcGVyYXRvciIsImFwcC5rdWJlcm5ldGVzLmlvL25hbWUiOiJwZXJjb25hLXh0cmFkYi1jbHVzdGVyIiwiYXBwLmt1YmVybmV0ZXMuaW8vcGFydC1vZiI6InBlcmNvbmEteHRyYWRiLWNsdXN0ZXIifSwiYW5ub3RhdGlvbnMiOnsia3ViZWN0bC5rdWJlcm5ldGVzLmlvL2RlZmF1bHQtY29udGFpbmVyIjoicHJveHlzcWwiLCJsYXN0LWFwcGxpZWQtc2VjcmV0IjoiZjIzN2NkZGQyZTk2MzFlNjE5NWY0ZGIzYTUwMTcxMWM5M2ZhMjRlOWViYjQxNzk1MWY1ZTUzM2FjY2U1MmNhNSIsInBlcmNvbmEuY29tL2NvbmZpZ3VyYXRpb24taGFzaCI6"..., + "percona.com/last-config-hash": "eyJyZXBsaWNhcyI6Miwic2VsZWN0b3IiOnsibWF0Y2hMYWJlbHMiOnsiYXBwLmt1YmVybmV0ZXMuaW8vY29tcG9uZW50IjoicHJveHlzcWwiLCJhcHAua3ViZXJuZXRlcy5pby9pbnN0YW5jZSI6InNvbWUtbmFtZSIsImFwcC5rdWJlcm5ldGVzLmlvL21hbmFnZWQtYnkiOiJwZXJjb25hLXh0cmFkYi1jbHVzdGVyLW9wZXJhdG9yIiwiYXBwLmt1YmVybmV0ZXMuaW8vbmFtZSI6InBlcmNvbmEteHRyYWRiLWNsdXN0ZXIiLCJhcHAua3ViZXJuZXRlcy5pby9wYXJ0LW9mIjoicGVyY29uYS14dHJhZGItY2x1c3RlciJ9fSwidGVtcGxhdGUiOnsibWV0YWRhdGEiOnsibGFiZWxzIjp7ImFwcC5rdWJlcm5ldGVzLmlvL2NvbXBvbmVudCI6InByb3h5c3FsIiwiYXBwLmt1YmVybmV0ZXMuaW8vaW5zdGFuY2UiOiJzb21lLW5hbWUiLCJhcHAua3ViZXJuZXRlcy5pby9tYW5hZ2VkLWJ5IjoicGVyY29uYS14dHJhZGItY2x1c3Rlci1vcGVyYXRvciIsImFwcC5rdWJlcm5ldGVzLmlvL25hbWUiOiJwZXJjb25hLXh0cmFkYi1jbHVzdGVyIiwiYXBwLmt1YmVybmV0ZXMuaW8vcGFydC1vZiI6InBlcmNvbmEteHRyYWRiLWNsdXN0ZXIifSwiYW5ub3RhdGlvbnMiOnsia3ViZWN0bC5rdWJlcm5ldGVzLmlvL2RlZmF1bHQtY29udGFpbmVyIjoicHJveHlzcWwiLCJsYXN0LWFwcGxpZWQtc2VjcmV0IjoiZjIzN2NkZGQyZTk2MzFlNjE5NWY0ZGIzYTUwMTcxMWM5M2ZhMjRlOWViYjQxNzk1MWY1ZTUzM2FjY2U1MmNhNSIsInBlcmNvbmEuY29tL2NvbmZpZ3VyYXRpb24taGFzaCI6"..., - "percona.com/last-config-hash": "eyJyZXBsaWNhcyI6Mywic2VsZWN0b3IiOnsibWF0Y2hMYWJlbHMiOnsiYXBwLmt1YmVybmV0ZXMuaW8vY29tcG9uZW50IjoiaGFwcm94eSIsImFwcC5rdWJlcm5ldGVzLmlvL2luc3RhbmNlIjoic29tZS1uYW1lIiwiYXBwLmt1YmVybmV0ZXMuaW8vbWFuYWdlZC1ieSI6InBlcmNvbmEteHRyYWRiLWNsdXN0ZXItb3BlcmF0b3IiLCJhcHAua3ViZXJuZXRlcy5pby9uYW1lIjoicGVyY29uYS14dHJhZGItY2x1c3RlciIsImFwcC5rdWJlcm5ldGVzLmlvL3BhcnQtb2YiOiJwZXJjb25hLXh0cmFkYi1jbHVzdGVyIn19LCJ0ZW1wbGF0ZSI6eyJtZXRhZGF0YSI6eyJsYWJlbHMiOnsiYXBwLmt1YmVybmV0ZXMuaW8vY29tcG9uZW50IjoiaGFwcm94eSIsImFwcC5rdWJlcm5ldGVzLmlvL2luc3RhbmNlIjoic29tZS1uYW1lIiwiYXBwLmt1YmVybmV0ZXMuaW8vbWFuYWdlZC1ieSI6InBlcmNvbmEteHRyYWRiLWNsdXN0ZXItb3BlcmF0b3IiLCJhcHAua3ViZXJuZXRlcy5pby9uYW1lIjoicGVyY29uYS14dHJhZGItY2x1c3RlciIsImFwcC5rdWJlcm5ldGVzLmlvL3BhcnQtb2YiOiJwZXJjb25hLXh0cmFkYi1jbHVzdGVyIn0sImFubm90YXRpb25zIjp7Imt1YmVjdGwua3ViZXJuZXRlcy5pby9kZWZhdWx0LWNvbnRhaW5lciI6ImhhcHJveHkiLCJsYXN0LWFwcGxpZWQtc2VjcmV0IjoiODMwZjkyYTRmYjQzYzhhYmFlNzdjMjk5ZjYyNDBmNWNjYzU0NTJiYTU5ZWFkMzhmNDE2NWJlYjdkNjU5MjgxOSIsInBlcmNvbmEuY29tL2NvbmZpZ3VyYXRpb24taGFzaCI6ImQ0"..., + "percona.com/last-config-hash": "eyJyZXBsaWNhcyI6Mywic2VsZWN0b3IiOnsibWF0Y2hMYWJlbHMiOnsiYXBwLmt1YmVybmV0ZXMuaW8vY29tcG9uZW50IjoiaGFwcm94eSIsImFwcC5rdWJlcm5ldGVzLmlvL2luc3RhbmNlIjoic29tZS1uYW1lIiwiYXBwLmt1YmVybmV0ZXMuaW8vbWFuYWdlZC1ieSI6InBlcmNvbmEteHRyYWRiLWNsdXN0ZXItb3BlcmF0b3IiLCJhcHAua3ViZXJuZXRlcy5pby9uYW1lIjoicGVyY29uYS14dHJhZGItY2x1c3RlciIsImFwcC5rdWJlcm5ldGVzLmlvL3BhcnQtb2YiOiJwZXJjb25hLXh0cmFkYi1jbHVzdGVyIn19LCJ0ZW1wbGF0ZSI6eyJtZXRhZGF0YSI6eyJsYWJlbHMiOnsiYXBwLmt1YmVybmV0ZXMuaW8vY29tcG9uZW50IjoiaGFwcm94eSIsImFwcC5rdWJlcm5ldGVzLmlvL2luc3RhbmNlIjoic29tZS1uYW1lIiwiYXBwLmt1YmVybmV0ZXMuaW8vbWFuYWdlZC1ieSI6InBlcmNvbmEteHRyYWRiLWNsdXN0ZXItb3BlcmF0b3IiLCJhcHAua3ViZXJuZXRlcy5pby9uYW1lIjoicGVyY29uYS14dHJhZGItY2x1c3RlciIsImFwcC5rdWJlcm5ldGVzLmlvL3BhcnQtb2YiOiJwZXJjb25hLXh0cmFkYi1jbHVzdGVyIn0sImFubm90YXRpb25zIjp7Imt1YmVjdGwua3ViZXJuZXRlcy5pby9kZWZhdWx0LWNvbnRhaW5lciI6ImhhcHJveHkiLCJsYXN0LWFwcGxpZWQtc2VjcmV0IjoiODMwZjkyYTRmYjQzYzhhYmFlNzdjMjk5ZjYyNDBmNWNjYzU0NTJiYTU5ZWFkMzhmNDE2NWJlYjdkNjU5MjgxOSIsInBlcmNvbmEuY29tL2NvbmZpZ3VyYXRpb24taGFzaCI6ImQ0"..., + "percona.com/last-config-hash": "eyJyZXBsaWNhcyI6Mywic2VsZWN0b3IiOnsibWF0Y2hMYWJlbHMiOnsiYXBwLmt1YmVybmV0ZXMuaW8vY29tcG9uZW50IjoiaGFwcm94eSIsImFwcC5rdWJlcm5ldGVzLmlvL2luc3RhbmNlIjoic29tZS1uYW1lIiwiYXBwLmt1YmVybmV0ZXMuaW8vbWFuYWdlZC1ieSI6InBlcmNvbmEteHRyYWRiLWNsdXN0ZXItb3BlcmF0b3IiLCJhcHAua3ViZXJuZXRlcy5pby9uYW1lIjoicGVyY29uYS14dHJhZGItY2x1c3RlciIsImFwcC5rdWJlcm5ldGVzLmlvL3BhcnQtb2YiOiJwZXJjb25hLXh0cmFkYi1jbHVzdGVyIn19LCJ0ZW1wbGF0ZSI6eyJtZXRhZGF0YSI6eyJsYWJlbHMiOnsiYXBwLmt1YmVybmV0ZXMuaW8vY29tcG9uZW50IjoiaGFwcm94eSIsImFwcC5rdWJlcm5ldGVzLmlvL2luc3RhbmNlIjoic29tZS1uYW1lIiwiYXBwLmt1YmVybmV0ZXMuaW8vbWFuYWdlZC1ieSI6InBlcmNvbmEteHRyYWRiLWNsdXN0ZXItb3BlcmF0b3IiLCJhcHAua3ViZXJuZXRlcy5pby9uYW1lIjoicGVyY29uYS14dHJhZGItY2x1c3RlciIsImFwcC5rdWJlcm5ldGVzLmlvL3BhcnQtb2YiOiJwZXJjb25hLXh0cmFkYi1jbHVzdGVyIn0sImFubm90YXRpb25zIjp7Imt1YmVjdGwua3ViZXJuZXRlcy5pby9kZWZhdWx0LWNvbnRhaW5lciI6ImhhcHJveHkiLCJsYXN0LWFwcGxpZWQtc2VjcmV0IjoiY2M4NDYyZmMxODdjNTNkMTk5OWIyOTg3ZmVjNzg4NjFjZmU5NmViNmNhNDA3YjJiODkxNzNkM2Q4YmIyZDUzNyIsInBlcmNvbmEuY29tL2NvbmZpZ3VyYXRpb24taGFzaCI6ImQ0"..., - "percona.com/last-config-hash": "eyJyZXBsaWNhcyI6Mywic2VsZWN0b3IiOnsibWF0Y2hMYWJlbHMiOnsiYXBwLmt1YmVybmV0ZXMuaW8vY29tcG9uZW50IjoiaGFwcm94eSIsImFwcC5rdWJlcm5ldGVzLmlvL2luc3RhbmNlIjoic29tZS1uYW1lIiwiYXBwLmt1YmVybmV0ZXMuaW8vbWFuYWdlZC1ieSI6InBlcmNvbmEteHRyYWRiLWNsdXN0ZXItb3BlcmF0b3IiLCJhcHAua3ViZXJuZXRlcy5pby9uYW1lIjoicGVyY29uYS14dHJhZGItY2x1c3RlciIsImFwcC5rdWJlcm5ldGVzLmlvL3BhcnQtb2YiOiJwZXJjb25hLXh0cmFkYi1jbHVzdGVyIn19LCJ0ZW1wbGF0ZSI6eyJtZXRhZGF0YSI6eyJsYWJlbHMiOnsiYXBwLmt1YmVybmV0ZXMuaW8vY29tcG9uZW50IjoiaGFwcm94eSIsImFwcC5rdWJlcm5ldGVzLmlvL2luc3RhbmNlIjoic29tZS1uYW1lIiwiYXBwLmt1YmVybmV0ZXMuaW8vbWFuYWdlZC1ieSI6InBlcmNvbmEteHRyYWRiLWNsdXN0ZXItb3BlcmF0b3IiLCJhcHAua3ViZXJuZXRlcy5pby9uYW1lIjoicGVyY29uYS14dHJhZGItY2x1c3RlciIsImFwcC5rdWJlcm5ldGVzLmlvL3BhcnQtb2YiOiJwZXJjb25hLXh0cmFkYi1jbHVzdGVyIn0sImFubm90YXRpb25zIjp7Imt1YmVjdGwua3ViZXJuZXRlcy5pby9kZWZhdWx0LWNvbnRhaW5lciI6ImhhcHJveHkiLCJwZXJjb25hLmNvbS9jb25maWd1cmF0aW9uLWhhc2giOiJkNDFkOGNkOThmMDBiMjA0ZTk4MDA5OThlY2Y4NDI3ZSJ9fSwic3BlYyI6eyJ2b2x1bWVzIjpbeyJuYW1lIjoiaGFwcm94eS1jdXN0b20iLCJjb25maWdNYXAi"..., - "percona.com/last-config-hash": "eyJyZXBsaWNhcyI6Mywic2VsZWN0b3IiOnsibWF0Y2hMYWJlbHMiOnsiYXBwLmt1YmVybmV0ZXMuaW8vY29tcG9uZW50IjoicHhjIiwiYXBwLmt1YmVybmV0ZXMuaW8vaW5zdGFuY2UiOiJzb21lLW5hbWUiLCJhcHAua3ViZXJuZXRlcy5pby9tYW5hZ2VkLWJ5IjoicGVyY29uYS14dHJhZGItY2x1c3Rlci1vcGVyYXRvciIsImFwcC5rdWJlcm5ldGVzLmlvL25hbWUiOiJwZXJjb25hLXh0cmFkYi1jbHVzdGVyIiwiYXBwLmt1YmVybmV0ZXMuaW8vcGFydC1vZiI6InBlcmNvbmEteHRyYWRiLWNsdXN0ZXIifX0sInRlbXBsYXRlIjp7Im1ldGFkYXRhIjp7ImxhYmVscyI6eyJhcHAua3ViZXJuZXRlcy5pby9jb21wb25lbnQiOiJweGMiLCJhcHAua3ViZXJuZXRlcy5pby9pbnN0YW5jZSI6InNvbWUtbmFtZSIsImFwcC5rdWJlcm5ldGVzLmlvL21hbmFnZWQtYnkiOiJwZXJjb25hLXh0cmFkYi1jbHVzdGVyLW9wZXJhdG9yIiwiYXBwLmt1YmVybmV0ZXMuaW8vbmFtZSI6InBlcmNvbmEteHRyYWRiLWNsdXN0ZXIiLCJhcHAua3ViZXJuZXRlcy5pby9wYXJ0LW9mIjoicGVyY29uYS14dHJhZGItY2x1c3RlciJ9LCJhbm5vdGF0aW9ucyI6eyJrdWJlY3RsLmt1YmVybmV0ZXMuaW8vZGVmYXVsdC1jb250YWluZXIiOiJweGMiLCJsYXN0LWFwcGxpZWQtc2VjcmV0IjoiMGY3YjUxNTZjMzYwNDA4ZjVhY2IxZWY1Y2JjNTVmZmVmNDk2MmFkMDNjM2QyZDUxNjUxNjA3ZTFkMDQ0Y2ExZSIsInBlcmNvbmEuY29tL2NvbmZpZ3VyYXRpb24taGFzaCI6ImQ0MWQ4Y2Q5OGYwMGIy"..., + "percona.com/last-config-hash": "eyJyZXBsaWNhcyI6Mywic2VsZWN0b3IiOnsibWF0Y2hMYWJlbHMiOnsiYXBwLmt1YmVybmV0ZXMuaW8vY29tcG9uZW50IjoicHhjIiwiYXBwLmt1YmVybmV0ZXMuaW8vaW5zdGFuY2UiOiJzb21lLW5hbWUiLCJhcHAua3ViZXJuZXRlcy5pby9tYW5hZ2VkLWJ5IjoicGVyY29uYS14dHJhZGItY2x1c3Rlci1vcGVyYXRvciIsImFwcC5rdWJlcm5ldGVzLmlvL25hbWUiOiJwZXJjb25hLXh0cmFkYi1jbHVzdGVyIiwiYXBwLmt1YmVybmV0ZXMuaW8vcGFydC1vZiI6InBlcmNvbmEteHRyYWRiLWNsdXN0ZXIifX0sInRlbXBsYXRlIjp7Im1ldGFkYXRhIjp7ImxhYmVscyI6eyJhcHAua3ViZXJuZXRlcy5pby9jb21wb25lbnQiOiJweGMiLCJhcHAua3ViZXJuZXRlcy5pby9pbnN0YW5jZSI6InNvbWUtbmFtZSIsImFwcC5rdWJlcm5ldGVzLmlvL21hbmFnZWQtYnkiOiJwZXJjb25hLXh0cmFkYi1jbHVzdGVyLW9wZXJhdG9yIiwiYXBwLmt1YmVybmV0ZXMuaW8vbmFtZSI6InBlcmNvbmEteHRyYWRiLWNsdXN0ZXIiLCJhcHAua3ViZXJuZXRlcy5pby9wYXJ0LW9mIjoicGVyY29uYS14dHJhZGItY2x1c3RlciJ9LCJhbm5vdGF0aW9ucyI6eyJrdWJlY3RsLmt1YmVybmV0ZXMuaW8vZGVmYXVsdC1jb250YWluZXIiOiJweGMiLCJsYXN0LWFwcGxpZWQtc2VjcmV0IjoiMGY3YjUxNTZjMzYwNDA4ZjVhY2IxZWY1Y2JjNTVmZmVmNDk2MmFkMDNjM2QyZDUxNjUxNjA3ZTFkMDQ0Y2ExZSIsInBlcmNvbmEuY29tL2NvbmZpZ3VyYXRpb24taGFzaCI6ImQ0MWQ4Y2Q5OGYwMGIy"..., - "percona.com/last-config-hash": "eyJyZXBsaWNhcyI6Mywic2VsZWN0b3IiOnsibWF0Y2hMYWJlbHMiOnsiYXBwLmt1YmVybmV0ZXMuaW8vY29tcG9uZW50IjoicHhjIiwiYXBwLmt1YmVybmV0ZXMuaW8vaW5zdGFuY2UiOiJzb21lLW5hbWUiLCJhcHAua3ViZXJuZXRlcy5pby9tYW5hZ2VkLWJ5IjoicGVyY29uYS14dHJhZGItY2x1c3Rlci1vcGVyYXRvciIsImFwcC5rdWJlcm5ldGVzLmlvL25hbWUiOiJwZXJjb25hLXh0cmFkYi1jbHVzdGVyIiwiYXBwLmt1YmVybmV0ZXMuaW8vcGFydC1vZiI6InBlcmNvbmEteHRyYWRiLWNsdXN0ZXIifX0sInRlbXBsYXRlIjp7Im1ldGFkYXRhIjp7ImxhYmVscyI6eyJhcHAua3ViZXJuZXRlcy5pby9jb21wb25lbnQiOiJweGMiLCJhcHAua3ViZXJuZXRlcy5pby9pbnN0YW5jZSI6InNvbWUtbmFtZSIsImFwcC5rdWJlcm5ldGVzLmlvL21hbmFnZWQtYnkiOiJwZXJjb25hLXh0cmFkYi1jbHVzdGVyLW9wZXJhdG9yIiwiYXBwLmt1YmVybmV0ZXMuaW8vbmFtZSI6InBlcmNvbmEteHRyYWRiLWNsdXN0ZXIiLCJhcHAua3ViZXJuZXRlcy5pby9wYXJ0LW9mIjoicGVyY29uYS14dHJhZGItY2x1c3RlciJ9LCJhbm5vdGF0aW9ucyI6eyJrdWJlY3RsLmt1YmVybmV0ZXMuaW8vZGVmYXVsdC1jb250YWluZXIiOiJweGMiLCJsYXN0LWFwcGxpZWQtc2VjcmV0IjoiNDkxMzMzNjVjNWY3Y2NiZmI4NmVkYjQyODViZDgzMDZlZjlmODU2OTY2ZjU3OTk5NjI2YTZhZDM0ZGVjZDJlYyIsInBlcmNvbmEuY29tL2NvbmZpZ3VyYXRpb24taGFzaCI6ImQ0MWQ4Y2Q5OGYwMGIy"..., + "percona.com/last-config-hash": "eyJyZXBsaWNhcyI6Mywic2VsZWN0b3IiOnsibWF0Y2hMYWJlbHMiOnsiYXBwLmt1YmVybmV0ZXMuaW8vY29tcG9uZW50IjoicHhjIiwiYXBwLmt1YmVybmV0ZXMuaW8vaW5zdGFuY2UiOiJzb21lLW5hbWUiLCJhcHAua3ViZXJuZXRlcy5pby9tYW5hZ2VkLWJ5IjoicGVyY29uYS14dHJhZGItY2x1c3Rlci1vcGVyYXRvciIsImFwcC5rdWJlcm5ldGVzLmlvL25hbWUiOiJwZXJjb25hLXh0cmFkYi1jbHVzdGVyIiwiYXBwLmt1YmVybmV0ZXMuaW8vcGFydC1vZiI6InBlcmNvbmEteHRyYWRiLWNsdXN0ZXIifX0sInRlbXBsYXRlIjp7Im1ldGFkYXRhIjp7ImxhYmVscyI6eyJhcHAua3ViZXJuZXRlcy5pby9jb21wb25lbnQiOiJweGMiLCJhcHAua3ViZXJuZXRlcy5pby9pbnN0YW5jZSI6InNvbWUtbmFtZSIsImFwcC5rdWJlcm5ldGVzLmlvL21hbmFnZWQtYnkiOiJwZXJjb25hLXh0cmFkYi1jbHVzdGVyLW9wZXJhdG9yIiwiYXBwLmt1YmVybmV0ZXMuaW8vbmFtZSI6InBlcmNvbmEteHRyYWRiLWNsdXN0ZXIiLCJhcHAua3ViZXJuZXRlcy5pby9wYXJ0LW9mIjoicGVyY29uYS14dHJhZGItY2x1c3RlciJ9LCJhbm5vdGF0aW9ucyI6eyJrdWJlY3RsLmt1YmVybmV0ZXMuaW8vZGVmYXVsdC1jb250YWluZXIiOiJweGMiLCJsYXN0LWFwcGxpZWQtc2VjcmV0IjoiNDkxMzMzNjVjNWY3Y2NiZmI4NmVkYjQyODViZDgzMDZlZjlmODU2OTY2ZjU3OTk5NjI2YTZhZDM0ZGVjZDJlYyIsInBlcmNvbmEuY29tL2NvbmZpZ3VyYXRpb24taGFzaCI6ImQ0MWQ4Y2Q5OGYwMGIy"..., - "percona.com/last-config-hash": "eyJyZXBsaWNhcyI6Mywic2VsZWN0b3IiOnsibWF0Y2hMYWJlbHMiOnsiYXBwLmt1YmVybmV0ZXMuaW8vY29tcG9uZW50IjoicHhjIiwiYXBwLmt1YmVybmV0ZXMuaW8vaW5zdGFuY2UiOiJzb21lLW5hbWUiLCJhcHAua3ViZXJuZXRlcy5pby9tYW5hZ2VkLWJ5IjoicGVyY29uYS14dHJhZGItY2x1c3Rlci1vcGVyYXRvciIsImFwcC5rdWJlcm5ldGVzLmlvL25hbWUiOiJwZXJjb25hLXh0cmFkYi1jbHVzdGVyIiwiYXBwLmt1YmVybmV0ZXMuaW8vcGFydC1vZiI6InBlcmNvbmEteHRyYWRiLWNsdXN0ZXIifX0sInRlbXBsYXRlIjp7Im1ldGFkYXRhIjp7ImxhYmVscyI6eyJhcHAua3ViZXJuZXRlcy5pby9jb21wb25lbnQiOiJweGMiLCJhcHAua3ViZXJuZXRlcy5pby9pbnN0YW5jZSI6InNvbWUtbmFtZSIsImFwcC5rdWJlcm5ldGVzLmlvL21hbmFnZWQtYnkiOiJwZXJjb25hLXh0cmFkYi1jbHVzdGVyLW9wZXJhdG9yIiwiYXBwLmt1YmVybmV0ZXMuaW8vbmFtZSI6InBlcmNvbmEteHRyYWRiLWNsdXN0ZXIiLCJhcHAua3ViZXJuZXRlcy5pby9wYXJ0LW9mIjoicGVyY29uYS14dHJhZGItY2x1c3RlciJ9LCJhbm5vdGF0aW9ucyI6eyJrdWJlY3RsLmt1YmVybmV0ZXMuaW8vZGVmYXVsdC1jb250YWluZXIiOiJweGMiLCJsYXN0LWFwcGxpZWQtc2VjcmV0IjoiNDkxMzMzNjVjNWY3Y2NiZmI4NmVkYjQyODViZDgzMDZlZjlmODU2OTY2ZjU3OTk5NjI2YTZhZDM0ZGVjZDJlYyIsInBlcmNvbmEuY29tL2NvbmZpZ3VyYXRpb24taGFzaCI6ImQ0MWQ4Y2Q5OGYwMGIyMDRlOTgwMDk5OGVjZjg0MjdlIiwicGVyY29uYS5jb20vc3NsLWhhc2giOiJjZmQ2YTUyMzk4MjY4MTczYjUxZDdjZGIxMzMxYzA5YSIsInBlcmNvbmEuY29tL3NzbC1pbnRlcm5hbC1oYXNoIjoiNzUxZTU5YTFjMWQ5NGFjNjdmOWQ2YmI2ZTc1NmFjZGMifX0sInNwZWMiOnsidm9sdW1lcyI6W3sibmFtZSI6InRtcCIsImVtcHR5RGlyIjp7fX0seyJuYW1lIjoiY29uZmlnIiwiY29uZmlnTWFwIjp7Im5hbWUiOiJzb21lLW5hbWUtcHhjIiwib3B0aW9uYWwiOnRydWV9fSx7Im5hbWUiOiJzc2wtaW50ZXJuYWwiLCJzZWNyZXQiOnsic2VjcmV0TmFtZSI6InNvbWUtbmFtZS1zc2wtaW50ZXJuYWwiLCJvcHRpb25hbCI6dHJ1ZX19LHsibmFtZSI6InNzbCIsInNlY3JldCI6eyJzZWNyZXROYW1lIjoic29tZS1uYW1lLXNzbCIsIm9wdGlvbmFsIjpmYWxzZX19LHsibmFtZSI6ImF1dG8tY29uZmlnIiwiY29uZmlnTWFwIjp7Im5hbWUiOiJhdXRvLXNvbWUtbmFtZS1weGMiLCJvcHRpb25hbCI6dHJ1ZX19LHsibmFtZSI6InZhdWx0LWtleXJpbmctc2VjcmV0Iiwic2VjcmV0Ijp7InNlY3JldE5hbWUiOiJzb21lLW5hbWUtdmF1bHQiLCJvcHRpb25hbCI6dHJ1ZX19LHsibmFtZSI6Im15c3FsLXVzZXJzLXNlY3JldC1maWxlIiwic2VjcmV0Ijp7InNlY3JldE5hbWUiOiJpbnRlcm5hbC1zb21lLW5hbWUiLCJvcHRpb25hbCI6ZmFsc2V9fSx7Im5hbWUiOiJteXNxbC1pbml0LWZpbGUiLCJzZWNyZXQiOnsic2VjcmV0TmFtZSI6InNvbWUtbmFtZS1teXNxbC1pbml0Iiwib3B0aW9uYWwiOnRydWV9fV0sImluaXRDb250YWluZXJzIjpbeyJuYW1lIjoicHhjLWluaXQiLCJpbWFnZSI6InBlcmNvbmFsYWIvcGVyY29uYS14dHJhZGItY2x1c3Rlci1vcGVyYXRvcjpQUi0yMzI1LWQ0MDFlMGE5IiwiY29tbWFuZCI6WyIvcHhjLWluaXQtZW50cnlwb2ludC5zaCJdLCJyZXNvdXJjZXMiOnsibGltaXRzIjp7ImNwdSI6IjUwbSIsIm1lbW9yeSI6IjUwTSJ9fSwidm9sdW1lTW91bnRzIjpbeyJuYW1lIjoiZGF0YWRpciIsIm1vdW50UGF0aCI6Ii92YXIvbGliL215c3FsIn1dLCJpbWFnZVB1bGxQb2xpY3kiOiJBbHdheXMifV0sImNvbnRhaW5lcnMiOlt7Im5hbWUiOiJsb2dzIiwiaW1hZ2UiOiJwZXJjb25hbGFiL2ZsdWVudGJpdDptYWluLWxvZ2NvbGxlY3RvciIsImVudkZyb20iOlt7InNlY3JldFJlZiI6eyJuYW1lIjoic29tZS1uYW1lLWxvZy1jb2xsZWN0b3IiLCJvcHRpb25hbCI6dHJ1ZX19XSwiZW52IjpbeyJuYW1lIjoiTE9HX0RBVEFfRElSIiwidmFsdWUiOiIvdmFyL2xpYi9teXNxbCJ9LHsibmFtZSI6IlBPRF9OQU1FU1BBU0UiLCJ2YWx1ZUZyb20iOnsiZmllbGRSZWYiOnsiZmllbGRQYXRoIjoibWV0YWRhdGEubmFtZXNwYWNlIn19fSx7Im5hbWUiOiJQT0RfTkFNRSIsInZhbHVlRnJvbSI6eyJmaWVsZFJlZiI6eyJmaWVsZFBhdGgiOiJtZXRhZGF0YS5uYW1lIn19fV0sInJlc291cmNlcyI6e30sInZvbHVtZU1vdW50cyI6W3sibmFtZSI6ImRhdGFkaXIiLCJtb3VudFBhdGgiOiIvdmFyL2xpYi9teXNxbCJ9XSwiaW1hZ2VQdWxsUG9saWN5IjoiQWx3YXlzIn0seyJuYW1lIjoibG9ncm90YXRlIiwiaW1hZ2UiOiJwZXJjb25hbGFiL2ZsdWVudGJpdDptYWluLWxvZ2NvbGxlY3RvciIsImFyZ3MiOlsibG9ncm90YXRlIl0sImVudiI6W3sibmFtZSI6IlNFUlZJQ0VfVFlQRSIsInZhbHVlIjoibXlzcWwifSx7Im5hbWUiOiJNT05JVE9SX1BBU1NXT1JEIiwidmFsdWVGcm9tIjp7InNlY3JldEtleVJlZiI6eyJuYW1lIjoiaW50ZXJuYWwtc29tZS1uYW1lIiwia2V5IjoibW9uaXRvciJ9fX1dLCJyZXNvdXJjZXMiOnt9LCJ2b2x1bWVNb3VudHMiOlt7Im5hbWUiOiJkYXRhZGlyIiwibW91bnRQYXRoIjoiL3Zhci9saWIvbXlzcWwifV0sImltYWdlUHVsbFBvbGljeSI6IkFsd2F5cyJ9LHsibmFtZSI6InB4YyIsImltYWdlIjoicGVyY29uYWxhYi9wZXJjb25hLXh0cmFkYi1jbHVzdGVyLW9wZXJhdG9yOm1haW4tcHhjNS43IiwiY29tbWFuZCI6WyIvdmFyL2xpYi9teXNxbC9weGMtZW50cnlwb2ludC5zaCJdLCJhcmdzIjpbIm15c3FsZCJdLCJwb3J0cyI6W3sibmFtZSI6Im15c3FsIiwiY29udGFpbmVyUG9ydCI6MzMwNn0seyJuYW1lIjoic3N0IiwiY29udGFpbmVyUG9ydCI6NDQ0NH0seyJuYW1lIjoid3JpdGUtc2V0IiwiY29udGFpbmVyUG9ydCI6NDU2N30seyJuYW1lIjoiaXN0IiwiY29udGFpbmVyUG9ydCI6NDU2OH0seyJuYW1lIjoibXlzcWwtYWRtaW4iLCJjb250YWluZXJQb3J0IjozMzA2Mn0seyJuYW1lIjoibXlzcWx4IiwiY29udGFpbmVyUG9ydCI6MzMwNjB9XSwiZW52RnJv"..., + "percona.com/last-config-hash": "eyJyZXBsaWNhcyI6Mywic2VsZWN0b3IiOnsibWF0Y2hMYWJlbHMiOnsiYXBwLmt1YmVybmV0ZXMuaW8vY29tcG9uZW50IjoicHhjIiwiYXBwLmt1YmVybmV0ZXMuaW8vaW5zdGFuY2UiOiJzb21lLW5hbWUiLCJhcHAua3ViZXJuZXRlcy5pby9tYW5hZ2VkLWJ5IjoicGVyY29uYS14dHJhZGItY2x1c3Rlci1vcGVyYXRvciIsImFwcC5rdWJlcm5ldGVzLmlvL25hbWUiOiJwZXJjb25hLXh0cmFkYi1jbHVzdGVyIiwiYXBwLmt1YmVybmV0ZXMuaW8vcGFydC1vZiI6InBlcmNvbmEteHRyYWRiLWNsdXN0ZXIifX0sInRlbXBsYXRlIjp7Im1ldGFkYXRhIjp7ImxhYmVscyI6eyJhcHAua3ViZXJuZXRlcy5pby9jb21wb25lbnQiOiJweGMiLCJhcHAua3ViZXJuZXRlcy5pby9pbnN0YW5jZSI6InNvbWUtbmFtZSIsImFwcC5rdWJlcm5ldGVzLmlvL21hbmFnZWQtYnkiOiJwZXJjb25hLXh0cmFkYi1jbHVzdGVyLW9wZXJhdG9yIiwiYXBwLmt1YmVybmV0ZXMuaW8vbmFtZSI6InBlcmNvbmEteHRyYWRiLWNsdXN0ZXIiLCJhcHAua3ViZXJuZXRlcy5pby9wYXJ0LW9mIjoicGVyY29uYS14dHJhZGItY2x1c3RlciJ9LCJhbm5vdGF0aW9ucyI6eyJrdWJlY3RsLmt1YmVybmV0ZXMuaW8vZGVmYXVsdC1jb250YWluZXIiOiJweGMiLCJsYXN0LWFwcGxpZWQtc2VjcmV0IjoiNDkxMzMzNjVjNWY3Y2NiZmI4NmVkYjQyODViZDgzMDZlZjlmODU2OTY2ZjU3OTk5NjI2YTZhZDM0ZGVjZDJlYyIsInBlcmNvbmEuY29tL2NvbmZpZ3VyYXRpb24taGFzaCI6ImQ0MWQ4Y2Q5OGYwMGIyMDRlOTgwMDk5OGVjZjg0MjdlIiwicGVyY29uYS5jb20vc3NsLWhhc2giOiJjZmQ2YTUyMzk4MjY4MTczYjUxZDdjZGIxMzMxYzA5YSIsInBlcmNvbmEuY29tL3NzbC1pbnRlcm5hbC1oYXNoIjoiNzUxZTU5YTFjMWQ5NGFjNjdmOWQ2YmI2ZTc1NmFjZGMifX0sInNwZWMiOnsidm9sdW1lcyI6W3sibmFtZSI6InRtcCIsImVtcHR5RGlyIjp7fX0seyJuYW1lIjoiY29uZmlnIiwiY29uZmlnTWFwIjp7Im5hbWUiOiJzb21lLW5hbWUtcHhjIiwib3B0aW9uYWwiOnRydWV9fSx7Im5hbWUiOiJzc2wtaW50ZXJuYWwiLCJzZWNyZXQiOnsic2VjcmV0TmFtZSI6InNvbWUtbmFtZS1zc2wtaW50ZXJuYWwiLCJvcHRpb25hbCI6dHJ1ZX19LHsibmFtZSI6InNzbCIsInNlY3JldCI6eyJzZWNyZXROYW1lIjoic29tZS1uYW1lLXNzbCIsIm9wdGlvbmFsIjpmYWxzZX19LHsibmFtZSI6ImF1dG8tY29uZmlnIiwiY29uZmlnTWFwIjp7Im5hbWUiOiJhdXRvLXNvbWUtbmFtZS1weGMiLCJvcHRpb25hbCI6dHJ1ZX19LHsibmFtZSI6InZhdWx0LWtleXJpbmctc2VjcmV0Iiwic2VjcmV0Ijp7InNlY3JldE5hbWUiOiJzb21lLW5hbWUtdmF1bHQiLCJvcHRpb25hbCI6dHJ1ZX19LHsibmFtZSI6Im15c3FsLXVzZXJzLXNlY3JldC1maWxlIiwic2VjcmV0Ijp7InNlY3JldE5hbWUiOiJpbnRlcm5hbC1zb21lLW5hbWUiLCJvcHRpb25hbCI6ZmFsc2V9fSx7Im5hbWUiOiJteXNxbC1pbml0LWZpbGUiLCJzZWNyZXQiOnsic2VjcmV0TmFtZSI6InNvbWUtbmFtZS1teXNxbC1pbml0Iiwib3B0aW9uYWwiOnRydWV9fV0sImluaXRDb250YWluZXJzIjpbeyJuYW1lIjoicHhjLWluaXQiLCJpbWFnZSI6InBlcmNvbmFsYWIvcGVyY29uYS14dHJhZGItY2x1c3Rlci1vcGVyYXRvcjpQUi0yMzI1LWQ0MDFlMGE5IiwiY29tbWFuZCI6WyIvcHhjLWluaXQtZW50cnlwb2ludC5zaCJdLCJyZXNvdXJjZXMiOnsibGltaXRzIjp7ImNwdSI6IjUwbSIsIm1lbW9yeSI6IjUwTSJ9fSwidm9sdW1lTW91bnRzIjpbeyJuYW1lIjoiZGF0YWRpciIsIm1vdW50UGF0aCI6Ii92YXIvbGliL215c3FsIn1dLCJpbWFnZVB1bGxQb2xpY3kiOiJBbHdheXMifV0sImNvbnRhaW5lcnMiOlt7Im5hbWUiOiJweGMiLCJpbWFnZSI6InBlcmNvbmFsYWIvcGVyY29uYS14dHJhZGItY2x1c3Rlci1vcGVyYXRvcjptYWluLXB4YzUuNyIsImNvbW1hbmQiOlsiL3Zhci9saWIvbXlzcWwvcHhjLWVudHJ5cG9pbnQuc2giXSwiYXJncyI6WyJteXNxbGQiXSwicG9ydHMiOlt7Im5hbWUiOiJteXNxbCIsImNvbnRhaW5lclBvcnQiOjMzMDZ9LHsibmFtZSI6InNzdCIsImNvbnRhaW5lclBvcnQiOjQ0NDR9LHsibmFtZSI6IndyaXRlLXNldCIsImNvbnRhaW5lclBvcnQiOjQ1Njd9LHsibmFtZSI6ImlzdCIsImNvbnRhaW5lclBvcnQiOjQ1Njh9LHsibmFtZSI6Im15c3FsLWFkbWluIiwiY29udGFpbmVyUG9ydCI6MzMwNjJ9LHsibmFtZSI6Im15c3FseCIsImNvbnRhaW5lclBvcnQiOjMzMDYwfV0sImVudkZyb20iOlt7InNlY3JldFJlZiI6eyJuYW1lIjoic29tZS1uYW1lLWVudi12YXJzLXB4YyIsIm9wdGlvbmFsIjp0cnVlfX1dLCJlbnYiOlt7Im5hbWUiOiJQWENfU0VSVklDRSIsInZhbHVlIjoic29tZS1uYW1lLXB4Yy11bnJlYWR5In0seyJuYW1lIjoiTU9OSVRPUl9IT1NUIiwidmFsdWUiOiIlIn0seyJuYW1lIjoiTVlTUUxfUk9PVF9QQVNTV09SRCIsInZhbHVlRnJvbSI6eyJzZWNyZXRLZXlSZWYiOnsibmFtZSI6ImludGVybmFsLXNvbWUtbmFtZSIsImtleSI6InJvb3QifX19LHsibmFtZSI6IlhUUkFCQUNLVVBfUEFTU1dPUkQiLCJ2YWx1ZUZyb20iOnsic2VjcmV0S2V5UmVmIjp7Im5hbWUiOiJpbnRlcm5hbC1zb21lLW5hbWUiLCJrZXkiOiJ4dHJhYmFja3VwIn19fSx7Im5hbWUiOiJNT05JVE9SX1BBU1NXT1JEIiwidmFsdWVGcm9tIjp7InNlY3JldEtleVJlZiI6eyJuYW1lIjoiaW50ZXJuYWwtc29tZS1uYW1lIiwia2V5IjoibW9uaXRvciJ9fX0seyJuYW1lIjoiQ0xVU1RFUl9IQVNIIiwidmFsdWUiOiIyMTA3NDA1In0seyJuYW1lIjoiT1BFUkFUT1JfQURNSU5fUEFTU1dPUkQiLCJ2YWx1ZUZyb20iOnsic2VjcmV0S2V5UmVmIjp7Im5hbWUiOiJpbnRlcm5hbC1zb21lLW5hbWUiLCJrZXkiOiJvcGVyYXRvciJ9fX0seyJuYW1lIjoiTElWRU5FU1NfQ0hFQ0tfVElNRU9VVCIsInZhbHVlIjoiNSJ9LHsibmFtZSI6IlJFQURJTkVTU19DSEVDS19USU1FT1VUIiwidmFsdWUiOiIxNSJ9LHsibmFtZSI6IkRFRkFVTFRfQVVUSEVOVElDQVRJT05fUExVR0lOIiwidmFsdWUiOiJjYWNoaW5nX3NoYTJfcGFzc3dvcmQifSx7Im5hbWUiOiJNWVNR"..., + "percona.com/last-config-hash": "eyJyZXBsaWNhcyI6Mywic2VsZWN0b3IiOnsibWF0Y2hMYWJlbHMiOnsiYXBwLmt1YmVybmV0ZXMuaW8vY29tcG9uZW50IjoicHhjIiwiYXBwLmt1YmVybmV0ZXMuaW8vaW5zdGFuY2UiOiJzb21lLW5hbWUiLCJhcHAua3ViZXJuZXRlcy5pby9tYW5hZ2VkLWJ5IjoicGVyY29uYS14dHJhZGItY2x1c3Rlci1vcGVyYXRvciIsImFwcC5rdWJlcm5ldGVzLmlvL25hbWUiOiJwZXJjb25hLXh0cmFkYi1jbHVzdGVyIiwiYXBwLmt1YmVybmV0ZXMuaW8vcGFydC1vZiI6InBlcmNvbmEteHRyYWRiLWNsdXN0ZXIifX0sInRlbXBsYXRlIjp7Im1ldGFkYXRhIjp7ImxhYmVscyI6eyJhcHAua3ViZXJuZXRlcy5pby9jb21wb25lbnQiOiJweGMiLCJhcHAua3ViZXJuZXRlcy5pby9pbnN0YW5jZSI6InNvbWUtbmFtZSIsImFwcC5rdWJlcm5ldGVzLmlvL21hbmFnZWQtYnkiOiJwZXJjb25hLXh0cmFkYi1jbHVzdGVyLW9wZXJhdG9yIiwiYXBwLmt1YmVybmV0ZXMuaW8vbmFtZSI6InBlcmNvbmEteHRyYWRiLWNsdXN0ZXIiLCJhcHAua3ViZXJuZXRlcy5pby9wYXJ0LW9mIjoicGVyY29uYS14dHJhZGItY2x1c3RlciJ9LCJhbm5vdGF0aW9ucyI6eyJrdWJlY3RsLmt1YmVybmV0ZXMuaW8vZGVmYXVsdC1jb250YWluZXIiOiJweGMiLCJsYXN0LWFwcGxpZWQtc2VjcmV0IjoiODMwZjkyYTRmYjQzYzhhYmFlNzdjMjk5ZjYyNDBmNWNjYzU0NTJiYTU5ZWFkMzhmNDE2NWJlYjdkNjU5MjgxOSIsInBlcmNvbmEuY29tL2NvbmZpZ3VyYXRpb24taGFzaCI6ImQ0MWQ4Y2Q5OGYwMGIy"..., - "percona.com/last-config-hash": "eyJyZXBsaWNhcyI6Mywic2VsZWN0b3IiOnsibWF0Y2hMYWJlbHMiOnsiYXBwLmt1YmVybmV0ZXMuaW8vY29tcG9uZW50IjoicHhjIiwiYXBwLmt1YmVybmV0ZXMuaW8vaW5zdGFuY2UiOiJzb21lLW5hbWUiLCJhcHAua3ViZXJuZXRlcy5pby9tYW5hZ2VkLWJ5IjoicGVyY29uYS14dHJhZGItY2x1c3Rlci1vcGVyYXRvciIsImFwcC5rdWJlcm5ldGVzLmlvL25hbWUiOiJwZXJjb25hLXh0cmFkYi1jbHVzdGVyIiwiYXBwLmt1YmVybmV0ZXMuaW8vcGFydC1vZiI6InBlcmNvbmEteHRyYWRiLWNsdXN0ZXIifX0sInRlbXBsYXRlIjp7Im1ldGFkYXRhIjp7ImxhYmVscyI6eyJhcHAua3ViZXJuZXRlcy5pby9jb21wb25lbnQiOiJweGMiLCJhcHAua3ViZXJuZXRlcy5pby9pbnN0YW5jZSI6InNvbWUtbmFtZSIsImFwcC5rdWJlcm5ldGVzLmlvL21hbmFnZWQtYnkiOiJwZXJjb25hLXh0cmFkYi1jbHVzdGVyLW9wZXJhdG9yIiwiYXBwLmt1YmVybmV0ZXMuaW8vbmFtZSI6InBlcmNvbmEteHRyYWRiLWNsdXN0ZXIiLCJhcHAua3ViZXJuZXRlcy5pby9wYXJ0LW9mIjoicGVyY29uYS14dHJhZGItY2x1c3RlciJ9LCJhbm5vdGF0aW9ucyI6eyJrdWJlY3RsLmt1YmVybmV0ZXMuaW8vZGVmYXVsdC1jb250YWluZXIiOiJweGMiLCJsYXN0LWFwcGxpZWQtc2VjcmV0IjoiZTMyN2IwZTBmOTBiN2VlY2Y4NjcyYjZhMzE5ZWY5YjJjMzFkMTQ4YjRiZWIzY2JiNWIwZDdmMTE0NmYzNWUyMiIsInBlcmNvbmEuY29tL2NvbmZpZ3VyYXRpb24taGFzaCI6ImQ0MWQ4Y2Q5OGYwMGIy"..., + "percona.com/last-config-hash": "eyJyZXBsaWNhcyI6Mywic2VsZWN0b3IiOnsibWF0Y2hMYWJlbHMiOnsiYXBwLmt1YmVybmV0ZXMuaW8vY29tcG9uZW50IjoicHhjIiwiYXBwLmt1YmVybmV0ZXMuaW8vaW5zdGFuY2UiOiJzb21lLW5hbWUiLCJhcHAua3ViZXJuZXRlcy5pby9tYW5hZ2VkLWJ5IjoicGVyY29uYS14dHJhZGItY2x1c3Rlci1vcGVyYXRvciIsImFwcC5rdWJlcm5ldGVzLmlvL25hbWUiOiJwZXJjb25hLXh0cmFkYi1jbHVzdGVyIiwiYXBwLmt1YmVybmV0ZXMuaW8vcGFydC1vZiI6InBlcmNvbmEteHRyYWRiLWNsdXN0ZXIifX0sInRlbXBsYXRlIjp7Im1ldGFkYXRhIjp7ImxhYmVscyI6eyJhcHAua3ViZXJuZXRlcy5pby9jb21wb25lbnQiOiJweGMiLCJhcHAua3ViZXJuZXRlcy5pby9pbnN0YW5jZSI6InNvbWUtbmFtZSIsImFwcC5rdWJlcm5ldGVzLmlvL21hbmFnZWQtYnkiOiJwZXJjb25hLXh0cmFkYi1jbHVzdGVyLW9wZXJhdG9yIiwiYXBwLmt1YmVybmV0ZXMuaW8vbmFtZSI6InBlcmNvbmEteHRyYWRiLWNsdXN0ZXIiLCJhcHAua3ViZXJuZXRlcy5pby9wYXJ0LW9mIjoicGVyY29uYS14dHJhZGItY2x1c3RlciJ9LCJhbm5vdGF0aW9ucyI6eyJrdWJlY3RsLmt1YmVybmV0ZXMuaW8vZGVmYXVsdC1jb250YWluZXIiOiJweGMiLCJsYXN0LWFwcGxpZWQtc2VjcmV0IjoiZTMyN2IwZTBmOTBiN2VlY2Y4NjcyYjZhMzE5ZWY5YjJjMzFkMTQ4YjRiZWIzY2JiNWIwZDdmMTE0NmYzNWUyMiIsInBlcmNvbmEuY29tL2NvbmZpZ3VyYXRpb24taGFzaCI6ImQ0MWQ4Y2Q5OGYwMGIy"..., - "percona.com/last-config-hash": "eyJyZXBsaWNhcyI6Mywic2VsZWN0b3IiOnsibWF0Y2hMYWJlbHMiOnsiYXBwLmt1YmVybmV0ZXMuaW8vY29tcG9uZW50IjoicHhjIiwiYXBwLmt1YmVybmV0ZXMuaW8vaW5zdGFuY2UiOiJzb21lLW5hbWUiLCJhcHAua3ViZXJuZXRlcy5pby9tYW5hZ2VkLWJ5IjoicGVyY29uYS14dHJhZGItY2x1c3Rlci1vcGVyYXRvciIsImFwcC5rdWJlcm5ldGVzLmlvL25hbWUiOiJwZXJjb25hLXh0cmFkYi1jbHVzdGVyIiwiYXBwLmt1YmVybmV0ZXMuaW8vcGFydC1vZiI6InBlcmNvbmEteHRyYWRiLWNsdXN0ZXIifX0sInRlbXBsYXRlIjp7Im1ldGFkYXRhIjp7ImxhYmVscyI6eyJhcHAua3ViZXJuZXRlcy5pby9jb21wb25lbnQiOiJweGMiLCJhcHAua3ViZXJuZXRlcy5pby9pbnN0YW5jZSI6InNvbWUtbmFtZSIsImFwcC5rdWJlcm5ldGVzLmlvL21hbmFnZWQtYnkiOiJwZXJjb25hLXh0cmFkYi1jbHVzdGVyLW9wZXJhdG9yIiwiYXBwLmt1YmVybmV0ZXMuaW8vbmFtZSI6InBlcmNvbmEteHRyYWRiLWNsdXN0ZXIiLCJhcHAua3ViZXJuZXRlcy5pby9wYXJ0LW9mIjoicGVyY29uYS14dHJhZGItY2x1c3RlciJ9LCJhbm5vdGF0aW9ucyI6eyJrdWJlY3RsLmt1YmVybmV0ZXMuaW8vZGVmYXVsdC1jb250YWluZXIiOiJweGMiLCJwZXJjb25hLmNvbS9jb25maWd1cmF0aW9uLWhhc2giOiJkNDFkOGNkOThmMDBiMjA0ZTk4MDA5OThlY2Y4NDI3ZSIsInBlcmNvbmEuY29tL3NzbC1oYXNoIjoiY2ZkNmE1MjM5ODI2ODE3M2I1MWQ3Y2RiMTMzMWMwOWEiLCJwZXJjb25hLmNv"..., - "percona.com/last-config-hash": "eyJyZXBsaWNhcyI6Mywic2VsZWN0b3IiOnsibWF0Y2hMYWJlbHMiOnsiYXBwLmt1YmVybmV0ZXMuaW8vY29tcG9uZW50IjoicHJveHlzcWwiLCJhcHAua3ViZXJuZXRlcy5pby9pbnN0YW5jZSI6InNvbWUtbmFtZSIsImFwcC5rdWJlcm5ldGVzLmlvL21hbmFnZWQtYnkiOiJwZXJjb25hLXh0cmFkYi1jbHVzdGVyLW9wZXJhdG9yIiwiYXBw"..., + "percona.com/last-config-hash": "eyJyZXBsaWNhcyI6Mywic2VsZWN0b3IiOnsibWF0Y2hMYWJlbHMiOnsiYXBwLmt1YmVybmV0ZXMuaW8vY29tcG9uZW50IjoicHJveHlzcWwiLCJhcHAua3ViZXJuZXRlcy5pby9pbnN0YW5jZSI6InNvbWUtbmFtZSIsImFwcC5rdWJlcm5ldGVzLmlvL21hbmFnZWQtYnkiOiJwZXJjb25hLXh0cmFkYi1jbHVzdGVyLW9wZXJhdG9yIiwiYXBw"..., + "percona.com/last-config-hash": "eyJyZXBsaWNhcyI6Mywic2VsZWN0b3IiOnsibWF0Y2hMYWJlbHMiOnsiYXBwLmt1YmVybmV0ZXMuaW8vY29tcG9uZW50IjoicHJveHlzcWwiLCJhcHAua3ViZXJuZXRlcy5pby9pbnN0YW5jZSI6InNvbWUtbmFtZSIsImFwcC5rdWJlcm5ldGVzLmlvL21hbmFnZWQtYnkiOiJwZXJjb25hLXh0cmFkYi1jbHVzdGVyLW9wZXJhdG9yIiwiYXBwLmt1YmVybmV0ZXMuaW8vbmFtZSI6InBlcmNvbmEteHRyYWRiLWNsdXN0ZXIiLCJhcHAua3ViZXJuZXRlcy5pby9wYXJ0LW9mIjoicGVyY29uYS14dHJhZGItY2x1c3RlciJ9fSwidGVtcGxhdGUiOnsibWV0YWRhdGEiOnsibGFiZWxzIjp7ImFwcC5rdWJlcm5ldGVzLmlvL2NvbXBvbmVudCI6InByb3h5c3FsIiwiYXBwLmt1YmVybmV0ZXMuaW8vaW5zdGFuY2UiOiJzb21lLW5hbWUiLCJhcHAua3ViZXJuZXRlcy5pby9tYW5hZ2VkLWJ5IjoicGVyY29uYS14dHJhZGItY2x1c3Rlci1vcGVyYXRvciIsImFwcC5rdWJlcm5ldGVzLmlvL25hbWUiOiJwZXJjb25hLXh0cmFkYi1jbHVzdGVyIiwiYXBwLmt1YmVybmV0ZXMuaW8vcGFydC1vZiI6InBlcmNvbmEteHRyYWRiLWNsdXN0ZXIifSwiYW5ub3RhdGlvbnMiOnsia3ViZWN0bC5rdWJlcm5ldGVzLmlvL2RlZmF1bHQtY29udGFpbmVyIjoicHJveHlzcWwiLCJsYXN0LWFwcGxpZWQtc2VjcmV0IjoiNTZkMGE2OWU3Zjc3MzhiYjMxODhhNzczYjBmNjA4Y2E5MDNhNzA4ZjdlY2Y4NGYwMDA2MjJiZmRmMmE5NDk2MiIsInBlcmNvbmEuY29tL2NvbmZpZ3VyYXRpb24taGFzaCI6"..., - "percona.com/last-config-hash": "eyJyZXBsaWNhcyI6Mywic2VsZWN0b3IiOnsibWF0Y2hMYWJlbHMiOnsiYXBwLmt1YmVybmV0ZXMuaW8vY29tcG9uZW50IjoicHJveHlzcWwiLCJhcHAua3ViZXJuZXRlcy5pby9pbnN0YW5jZSI6InNvbWUtbmFtZSIsImFwcC5rdWJlcm5ldGVzLmlvL21hbmFnZWQtYnkiOiJwZXJjb25hLXh0cmFkYi1jbHVzdGVyLW9wZXJhdG9yIiwiYXBwLmt1YmVybmV0ZXMuaW8vbmFtZSI6InBlcmNvbmEteHRyYWRiLWNsdXN0ZXIiLCJhcHAua3ViZXJuZXRlcy5pby9wYXJ0LW9mIjoicGVyY29uYS14dHJhZGItY2x1c3RlciJ9fSwidGVtcGxhdGUiOnsibWV0YWRhdGEiOnsibGFiZWxzIjp7ImFwcC5rdWJlcm5ldGVzLmlvL2NvbXBvbmVudCI6InByb3h5c3FsIiwiYXBwLmt1YmVybmV0ZXMuaW8vaW5zdGFuY2UiOiJzb21lLW5hbWUiLCJhcHAua3ViZXJuZXRlcy5pby9tYW5hZ2VkLWJ5IjoicGVyY29uYS14dHJhZGItY2x1c3Rlci1vcGVyYXRvciIsImFwcC5rdWJlcm5ldGVzLmlvL25hbWUiOiJwZXJjb25hLXh0cmFkYi1jbHVzdGVyIiwiYXBwLmt1YmVybmV0ZXMuaW8vcGFydC1vZiI6InBlcmNvbmEteHRyYWRiLWNsdXN0ZXIifSwiYW5ub3RhdGlvbnMiOnsia3ViZWN0bC5rdWJlcm5ldGVzLmlvL2RlZmF1bHQtY29udGFpbmVyIjoicHJveHlzcWwiLCJwZXJjb25hLmNvbS9jb25maWd1cmF0aW9uLWhhc2giOiJkNDFkOGNkOThmMDBiMjA0ZTk4MDA5OThlY2Y4NDI3ZSIsInBlcmNvbmEuY29tL3NzbC1oYXNoIjoiY2ZkNmE1MjM5ODI2ODE3M2I1MWQ3Y2RiMTMzMWMw"..., "percona.com/ssl-hash": "cfd6a52398268173b51d7cdb1331c09a", "percona.com/ssl-hash": "cfd6a52398268173b51d7cdb1331c09a", "percona.com/ssl-internal-hash": "751e59a1c1d94ac67f9d6bb6e756acdc", "percona.com/ssl-internal-hash": "751e59a1c1d94ac67f9d6bb6e756acdc", + PeriodSeconds: 0, - PeriodSeconds: 10, + PersistentVolumeClaimRetentionPolicy: nil, - PersistentVolumeClaimRetentionPolicy: s"&StatefulSetPersistentVolumeClaimRetentionPolicy{WhenDeleted:Retain,WhenScaled:Retain,}", + Phase: "", - Phase: "Pending", + PodManagementPolicy: "", - PodManagementPolicy: "OrderedReady", Ports: nil, Ports: []v1.ContainerPort{ PreemptionPolicy: nil, ProbeHandler: {Exec: &{Command: {"/var/lib/mysql/liveness-check.sh"}}}, + Protocol: "", - Protocol: "TCP", Quobyte: nil, ReadinessProbe: &{ProbeHandler: {Exec: &{Command: {"/var/lib/mysql/readiness-check.sh"}}}, InitialDelaySeconds: 15, TimeoutSeconds: 15, PeriodSeconds: 30, ...}, + ReadyReplicas: 0, - ReadyReplicas: 2, - ReadyReplicas: 3, + Replicas: 0, Replicas: &2, - Replicas: 2, - Replicas: &2, + Replicas: &2, Replicas: &3, - Replicas: 3, - Replicas: &3, + Replicas: &3, ResizePolicy: nil, ResourceFieldRef: nil, Resources: {}, Resources: {Limits: {s"cpu": {i: {...}, s: "1", Format: "DecimalSI"}, s"memory": {i: {...}, s: "2G", Format: "DecimalSI"}}, Requests: {s"cpu": {i: {...}, s: "100m", Format: "DecimalSI"}, s"memory": {i: {...}, Format: "DecimalSI"}}}, + ResourceVersion: "", - ResourceVersion: "1766400448350223020", - ResourceVersion: "1766400626997375003", - ResourceVersion: "1766400790499519020", - ResourceVersion: "1766400832586287020", - ResourceVersion: "1766400863632927020", - ResourceVersion: "1766401012334863003", - ResourceVersion: "1766401062941023020", - ResourceVersion: "1766401113037775020", - ResourceVersion: "1766401163137407020", - ResourceVersion: "1766401280678063003", - ResourceVersion: "1766401309393535020", - ResourceVersion: "1766401492907935003", - ResourceVersion: "1766401573517087023", - ResourceVersion: "1766401673024111003", - ResourceVersion: "1766401755567007023", + RestartPolicy: "", - RestartPolicy: "Always", - RevisionHistoryLimit: &10, + RevisionHistoryLimit: nil, + SchedulerName: "", + SchedulerName: "", - SchedulerName: "default-scheduler", - SchedulerName: "default-scheduler", SecretName: "internal-some-name", SecretName: "some-name-env-vars-haproxy", SecretName: "some-name-mysql-init", SecretName: "some-name-ssl", SecretName: "some-name-ssl-internal", SecretName: "some-name-vault", Secret: &v1.SecretVolumeSource{ SecurityContext: nil, + SecurityContext: nil, - SecurityContext: s"&PodSecurityContext{SELinuxOptions:nil,RunAsUser:nil,RunAsNonRoot:nil,SupplementalGroups:[],FSGroup:nil,RunAsGroup:nil,Sysctls:[]Sysctl{},WindowsOptions:nil,FSGroupChangePolicy:nil,SeccompProfile:nil,AppArmorProfile:nil,SupplementalGroupsPolicy:nil,SELinux"..., Selector: &{MatchLabels: {"app.kubernetes.io/component": "haproxy", "app.kubernetes.io/instance": "some-name", "app.kubernetes.io/managed-by": "percona-xtradb-cluster-operator", "app.kubernetes.io/name": "percona-xtradb-cluster", ...}}, Selector: &{MatchLabels: {"app.kubernetes.io/component": "proxysql", "app.kubernetes.io/instance": "some-name", "app.kubernetes.io/managed-by": "percona-xtradb-cluster-operator", "app.kubernetes.io/name": "percona-xtradb-cluster", ...}}, Selector: &{MatchLabels: {"app.kubernetes.io/component": "pxc", "app.kubernetes.io/instance": "some-name", "app.kubernetes.io/managed-by": "percona-xtradb-cluster-operator", "app.kubernetes.io/name": "percona-xtradb-cluster", ...}}, SelfLink: "", ServiceAccountName: "default", ServiceName: "some-name-haproxy", ServiceName: "some-name-proxysql-unready", ServiceName: "some-name-pxc", SetHostnameAsFQDN: nil, ShareProcessNamespace: nil, Spec: v1.PersistentVolumeClaimSpec{ Spec: v1.PodSpec{ Spec: v1.StatefulSetSpec{ StartupProbe: nil, Status: v1.PersistentVolumeClaimStatus{ Status: v1.StatefulSetStatus{ StorageClassName: nil, Subdomain: "", Subdomain: "", - Subresource: "status", SuccessThreshold: 1, Template: v1.PodTemplateSpec{ TerminationGracePeriodSeconds: &30, TerminationGracePeriodSeconds: &600, TerminationGracePeriodSeconds: nil, + TerminationMessagePath: "", - TerminationMessagePath: "/dev/termination-log", + TerminationMessagePolicy: "", - TerminationMessagePolicy: "File", TimeoutSeconds: 5, - Time: s"2025-12-22 10:46:48 +0000 UTC", - Time: s"2025-12-22 10:47:28 +0000 UTC", - Time: s"2025-12-22 10:50:26 +0000 UTC", - Time: s"2025-12-22 10:52:50 +0000 UTC", - Time: s"2025-12-22 10:53:10 +0000 UTC", - Time: s"2025-12-22 10:53:15 +0000 UTC", - Time: s"2025-12-22 10:53:52 +0000 UTC", - Time: s"2025-12-22 10:54:22 +0000 UTC", - Time: s"2025-12-22 10:54:23 +0000 UTC", - Time: s"2025-12-22 10:54:29 +0000 UTC", - Time: s"2025-12-22 10:56:52 +0000 UTC", - Time: s"2025-12-22 10:57:15 +0000 UTC", - Time: s"2025-12-22 10:57:42 +0000 UTC", - Time: s"2025-12-22 10:58:15 +0000 UTC", - Time: s"2025-12-22 10:58:33 +0000 UTC", - Time: s"2025-12-22 10:59:01 +0000 UTC", - Time: s"2025-12-22 10:59:23 +0000 UTC", - Time: s"2025-12-22 11:01:20 +0000 UTC", - Time: s"2025-12-22 11:01:30 +0000 UTC", - Time: s"2025-12-22 11:01:49 +0000 UTC", - Time: s"2025-12-22 11:03:54 +0000 UTC", - Time: s"2025-12-22 11:04:52 +0000 UTC", - Time: s"2025-12-22 11:04:56 +0000 UTC", - Time: s"2025-12-22 11:06:13 +0000 UTC", - Time: s"2025-12-22 11:07:53 +0000 UTC", - Time: s"2025-12-22 11:07:57 +0000 UTC", - Time: s"2025-12-22 11:09:15 +0000 UTC", Tolerations: {{Key: "node.alpha.kubernetes.io/unreachable", Operator: "Exists", Effect: "NoExecute", TolerationSeconds: &6000}}, Tolerations: nil, - TopologySpreadConstraints: nil, + TopologySpreadConstraints: []v1.TopologySpreadConstraint{}, TypeMeta: {}, TypeMeta: {Kind: "StatefulSet", APIVersion: "apps/v1"}, + UID: "", - UID: "2ed16c11-0d42-4bcb-8987-c8651d08af6c", - UID: "9563cccd-792b-4a74-9303-4c2152f9ebaa", - UID: "e7769e6e-2db7-4e65-9958-3344083534fa", + UpdatedReplicas: 0, - UpdatedReplicas: 2, - UpdatedReplicas: 3, + UpdateRevision: "", - UpdateRevision: "some-name-haproxy-55d494f9b5", - UpdateRevision: "some-name-haproxy-77d9cd4bb9", - UpdateRevision: "some-name-proxysql-59694db8b9", - UpdateRevision: "some-name-proxysql-5fd678f649", - UpdateRevision: "some-name-proxysql-6dcb7c9b84", - UpdateRevision: "some-name-proxysql-6fd5ccddf", - UpdateRevision: "some-name-proxysql-76996b7b85", - UpdateRevision: "some-name-proxysql-7df76b64f9", - UpdateRevision: "some-name-pxc-58b64dc496", - UpdateRevision: "some-name-pxc-5f8f4fc876", - UpdateRevision: "some-name-pxc-667b9cdbf5", - UpdateRevision: "some-name-pxc-6b75866ddf", - UpdateRevision: "some-name-pxc-75f4f959dd", UpdateStrategy: {Type: "RollingUpdate", RollingUpdate: &{Partition: &0}}, &v1.StatefulSet{ Value: "", ValueFrom: &v1.EnvVarSource{ VolumeAttributesClassName: nil, VolumeClaimTemplates: nil, VolumeClaimTemplates: []v1.PersistentVolumeClaim{ VolumeDevices: nil, - VolumeMode: &"Filesystem", + VolumeMode: nil, VolumeMounts: {{Name: "datadir", MountPath: "/var/lib/mysql"}, {Name: "config", MountPath: "/etc/percona-xtradb-cluster.conf.d"}, {Name: "tmp", MountPath: "/tmp"}, {Name: "ssl", MountPath: "/etc/mysql/ssl"}, ...}, - VolumeMounts: []v1.VolumeMount{{Name: "datadir", MountPath: "/var/lib/mysql"}}, VolumeName: "", VolumeSource: v1.VolumeSource{ Volumes: []v1.Volume{ VsphereVolume: nil, WorkingDir: "", + grep -v NAMESPACE + kubectl get pxc --all-namespaces -o wide + xargs -L 1 sh -xc 'kubectl patch pxc -n $0 $1 --type=merge -p "{\"metadata\":{\"finalizers\":[]}}"' + kubectl patch pxc -n users-31794 some-name --type=merge -p '{"metadata":{"finalizers":[]}}' perconaxtradbcluster.pxc.percona.com/some-name patched + kubectl_bin delete pxc --all --all-namespaces ++ mktemp + local LAST_OUT=/tmp/tmp.xCiYjcc7cS ++ mktemp + local LAST_ERR=/tmp/tmp.iCkNLvz8XR + local exit_status=0 ++ seq 0 2 + for i in '$(seq 0 2)' + set +e + kubectl delete pxc --all --all-namespaces + exit_status=0 + set -e + '[' 0 '!=' 0 ']' + break + cat /tmp/tmp.xCiYjcc7cS perconaxtradbcluster.pxc.percona.com "some-name" deleted from users-31794 namespace + cat /tmp/tmp.iCkNLvz8XR + rm /tmp/tmp.xCiYjcc7cS /tmp/tmp.iCkNLvz8XR + return 0 + kubectl_bin delete pxc-backup --all --all-namespaces ++ mktemp + local LAST_OUT=/tmp/tmp.akMoVeW7Dw ++ mktemp + local LAST_ERR=/tmp/tmp.evCno7DcAb + local exit_status=0 ++ seq 0 2 + for i in '$(seq 0 2)' + set +e + kubectl delete pxc-backup --all --all-namespaces + exit_status=0 + set -e + '[' 0 '!=' 0 ']' + break + cat /tmp/tmp.akMoVeW7Dw No resources found + cat /tmp/tmp.evCno7DcAb + rm /tmp/tmp.akMoVeW7Dw /tmp/tmp.evCno7DcAb + return 0 + kubectl_bin delete pxc-restore --all --all-namespaces ++ mktemp + local LAST_OUT=/tmp/tmp.2Vjr78AuqO ++ mktemp + local LAST_ERR=/tmp/tmp.Wo1O9WSjG9 + local exit_status=0 ++ seq 0 2 + for i in '$(seq 0 2)' + set +e + kubectl delete pxc-restore --all --all-namespaces + exit_status=0 + set -e + '[' 0 '!=' 0 ']' + break + cat /tmp/tmp.2Vjr78AuqO No resources found + cat /tmp/tmp.Wo1O9WSjG9 + rm /tmp/tmp.2Vjr78AuqO /tmp/tmp.Wo1O9WSjG9 + return 0 + kubectl_bin delete ValidatingWebhookConfiguration percona-xtradbcluster-webhook ++ mktemp + local LAST_OUT=/tmp/tmp.j6ZYTW2Wfs ++ mktemp + local LAST_ERR=/tmp/tmp.wjpXgDEskw + local exit_status=0 ++ seq 0 2 + for i in '$(seq 0 2)' + set +e + kubectl delete ValidatingWebhookConfiguration percona-xtradbcluster-webhook + exit_status=0 + set -e + '[' 0 '!=' 0 ']' + break + cat /tmp/tmp.j6ZYTW2Wfs validatingwebhookconfiguration.admissionregistration.k8s.io "percona-xtradbcluster-webhook" deleted + cat /tmp/tmp.wjpXgDEskw + rm /tmp/tmp.j6ZYTW2Wfs /tmp/tmp.wjpXgDEskw + return 0 + kubectl_bin delete -f https://github.com/jetstack/cert-manager/releases/download/v1.18.2/cert-manager.yaml + : + '[' '!' -z '' ']' + '[' -n pxc-operator ']' + kubectl_bin delete --grace-period=0 --force=true namespace users-31794 + rm -rf /tmp/tmp.dqV03Eaoyh + kubectl_bin delete --grace-period=0 --force=true namespace pxc-operator ++ mktemp ++ mktemp + desc 'test passed' + set +o xtrace + local LAST_OUT=/tmp/tmp.xrfPV0R8Zf ----------------------------------------------------------------------------------- test passed ----------------------------------------------------------------------------------- + local LAST_OUT=/tmp/tmp.Vg39uoOe3P ++ mktemp ++ mktemp + local LAST_ERR=/tmp/tmp.rI2wbwSRwb + local exit_status=0 + local LAST_ERR=/tmp/tmp.Hk1Cnq5KcM + local exit_status=0 ++ seq 0 2 ++ seq 0 2 + for i in '$(seq 0 2)' + set +e + kubectl delete --grace-period=0 --force=true namespace pxc-operator + for i in '$(seq 0 2)' + set +e + kubectl delete --grace-period=0 --force=true namespace users-31794