Log: /mnt/jenkins/workspace/cloud-pxc-operator_PR-2203/e2e-tests/logs/users-5-7.log grep: warning: stray \ before - Warning: version difference between client (1.34) and server (1.31) exceeds the supported minor version skew of +/-1 Warning: version difference between client (1.34) and server (1.31) exceeds the supported minor version skew of +/-1 + create_infra users-28439 + local ns=users-28439 + '[' -n pxc-operator ']' + kubectl get pxc --all-namespaces -o wide + grep -v NAMESPACE + xargs -L 1 sh -xc 'kubectl patch pxc -n $0 $1 --type=merge -p "{\"metadata\":{\"finalizers\":[]}}"' + kubectl patch pxc -n users-31576 some-name --type=merge -p '{"metadata":{"finalizers":[]}}' perconaxtradbcluster.pxc.percona.com/some-name patched + kubectl_bin delete pxc --all --all-namespaces ++ mktemp + local LAST_OUT=/tmp/tmp.Ic3SJCBUPk ++ mktemp + local LAST_ERR=/tmp/tmp.mFefgmM4cB + local exit_status=0 ++ seq 0 2 + for i in $(seq 0 2) + set +e + kubectl delete pxc --all --all-namespaces + exit_status=0 + set -e + '[' 0 '!=' 0 ']' + break + cat /tmp/tmp.Ic3SJCBUPk perconaxtradbcluster.pxc.percona.com "some-name" deleted from users-31576 namespace + cat /tmp/tmp.mFefgmM4cB + rm /tmp/tmp.Ic3SJCBUPk /tmp/tmp.mFefgmM4cB + return 0 + kubectl_bin delete pxc-backup --all --all-namespaces ++ mktemp + local LAST_OUT=/tmp/tmp.Eoku0GZAV1 ++ mktemp + local LAST_ERR=/tmp/tmp.9BlPrVEHdq + local exit_status=0 ++ seq 0 2 + for i in $(seq 0 2) + set +e + kubectl delete pxc-backup --all --all-namespaces + exit_status=0 + set -e + '[' 0 '!=' 0 ']' + break + cat /tmp/tmp.Eoku0GZAV1 No resources found + cat /tmp/tmp.9BlPrVEHdq + rm /tmp/tmp.Eoku0GZAV1 /tmp/tmp.9BlPrVEHdq + return 0 + kubectl_bin delete pxc-restore --all --all-namespaces ++ mktemp + local LAST_OUT=/tmp/tmp.P8tmhT6hJr ++ mktemp + local LAST_ERR=/tmp/tmp.7OxrDv9jPf + local exit_status=0 ++ seq 0 2 + for i in $(seq 0 2) + set +e + kubectl delete pxc-restore --all --all-namespaces + exit_status=0 + set -e + '[' 0 '!=' 0 ']' + break + cat /tmp/tmp.P8tmhT6hJr No resources found + cat /tmp/tmp.7OxrDv9jPf + rm /tmp/tmp.P8tmhT6hJr /tmp/tmp.7OxrDv9jPf + return 0 + create_namespace pxc-operator + local namespace=pxc-operator + local skip_clean_namespace= + [[ 1 == 1 ]] + [[ -z '' ]] + destroy_chaos_mesh ++ helm list --all-namespaces --filter chaos-mesh ++ tail -n1 ++ awk '-F ' '{print $2}' ++ sed s/NAMESPACE// + local chaos_mesh_ns= + '[' -n '' ']' ++ kubectl get MutatingWebhookConfiguration ++ grep chaos-mesh ++ awk '{print $1}' + timeout 30 kubectl delete MutatingWebhookConfiguration error: resource(s) were provided, but no name was specified + : ++ kubectl get ValidatingWebhookConfiguration ++ awk '{print $1}' ++ grep chaos-mesh + timeout 30 kubectl delete ValidatingWebhookConfiguration error: resource(s) were provided, but no name was specified + : ++ kubectl get ValidatingWebhookConfiguration ++ grep validate-auth ++ awk '{print $1}' + timeout 30 kubectl delete ValidatingWebhookConfiguration error: resource(s) were provided, but no name was specified + : ++ kubectl api-resources ++ grep chaos-mesh ++ awk '{print $1}' ++ kubectl get crd ++ grep chaos-mesh.org ++ awk '{print $1}' + timeout 30 kubectl delete crd error: resource(s) were provided, but no name was specified + : ++ kubectl get clusterrolebinding ++ grep chaos-mesh ++ awk '{print $1}' + timeout 30 kubectl delete clusterrolebinding error: resource(s) were provided, but no name was specified + : ++ kubectl get clusterrole ++ grep chaos-mesh ++ awk '{print $1}' + timeout 30 kubectl delete clusterrole error: resource(s) were provided, but no name was specified + : + desc 'cleaned up all old namespaces' + set +o xtrace ----------------------------------------------------------------------------------- cleaned up all old namespaces ----------------------------------------------------------------------------------- + kubectl_bin get ns + egrep -v '^kube-|^default|Terminating|pxc-operator|openshift|^gke-|^gmp-|^NAME' + awk '{print$1}' + '[' -n '' ']' + desc 'cleaned up old namespaces pxc-operator' ++ mktemp + xargs kubectl delete ns + set +o xtrace ----------------------------------------------------------------------------------- cleaned up old namespaces pxc-operator ----------------------------------------------------------------------------------- + kubectl_bin delete namespace pxc-operator ++ mktemp + local LAST_OUT=/tmp/tmp.lXpLlG2KEU egrep: warning: egrep is obsolescent; using grep -E ++ mktemp + local LAST_OUT=/tmp/tmp.3gTriK0K7S ++ mktemp + local LAST_ERR=/tmp/tmp.ybIRIrddLA + local exit_status=0 ++ seq 0 2 + local LAST_ERR=/tmp/tmp.Y7pIhs7Rm8 + local exit_status=0 ++ seq 0 2 + for i in $(seq 0 2) + set +e + kubectl get ns + for i in $(seq 0 2) + set +e + kubectl delete namespace pxc-operator + exit_status=0 + set -e + '[' 0 '!=' 0 ']' + break + cat /tmp/tmp.lXpLlG2KEU + cat /tmp/tmp.ybIRIrddLA + rm /tmp/tmp.lXpLlG2KEU /tmp/tmp.ybIRIrddLA + return 0 namespace "users-31576" deleted + exit_status=0 + set -e + '[' 0 '!=' 0 ']' + break + cat /tmp/tmp.3gTriK0K7S namespace "pxc-operator" deleted + cat /tmp/tmp.Y7pIhs7Rm8 + rm /tmp/tmp.3gTriK0K7S /tmp/tmp.Y7pIhs7Rm8 + return 0 + wait_for_delete namespace/pxc-operator + local res=namespace/pxc-operator + echo -n 'waiting for namespace/pxc-operator to be deleted' waiting for namespace/pxc-operator to be deleted+ set +o xtrace Error from server (NotFound): namespaces "pxc-operator" not found + desc 'create namespace pxc-operator' + set +o xtrace ----------------------------------------------------------------------------------- create namespace pxc-operator ----------------------------------------------------------------------------------- + kubectl_bin create namespace pxc-operator ++ mktemp + local LAST_OUT=/tmp/tmp.5rC7Q8ohen ++ mktemp + local LAST_ERR=/tmp/tmp.BPsnGRHsQX + local exit_status=0 ++ seq 0 2 + for i in $(seq 0 2) + set +e + kubectl create namespace pxc-operator + exit_status=0 + set -e + '[' 0 '!=' 0 ']' + break + cat /tmp/tmp.5rC7Q8ohen namespace/pxc-operator created + cat /tmp/tmp.BPsnGRHsQX + rm /tmp/tmp.5rC7Q8ohen /tmp/tmp.BPsnGRHsQX + return 0 ++ kubectl_bin config current-context +++ mktemp ++ local LAST_OUT=/tmp/tmp.AQUjwiDUXj +++ mktemp ++ local LAST_ERR=/tmp/tmp.uP5N8DQpFJ ++ local exit_status=0 +++ seq 0 2 ++ for i in $(seq 0 2) ++ set +e ++ kubectl config current-context ++ exit_status=0 ++ set -e ++ '[' 0 '!=' 0 ']' ++ break ++ cat /tmp/tmp.AQUjwiDUXj ++ cat /tmp/tmp.uP5N8DQpFJ ++ rm /tmp/tmp.AQUjwiDUXj /tmp/tmp.uP5N8DQpFJ ++ return 0 + kubectl_bin config set-context gke_cloud-dev-112233_us-central1-a_jen-pxc-2203-07b4356f-1-cluster7 --namespace=pxc-operator ++ mktemp + local LAST_OUT=/tmp/tmp.YfwETStjSJ ++ mktemp + local LAST_ERR=/tmp/tmp.Mm4N2I7i4q + local exit_status=0 ++ seq 0 2 + for i in $(seq 0 2) + set +e + kubectl config set-context gke_cloud-dev-112233_us-central1-a_jen-pxc-2203-07b4356f-1-cluster7 --namespace=pxc-operator + exit_status=0 + set -e + '[' 0 '!=' 0 ']' + break + cat /tmp/tmp.YfwETStjSJ Context "gke_cloud-dev-112233_us-central1-a_jen-pxc-2203-07b4356f-1-cluster7" modified. + cat /tmp/tmp.Mm4N2I7i4q + rm /tmp/tmp.YfwETStjSJ /tmp/tmp.Mm4N2I7i4q + return 0 + deploy_operator + desc 'start PXC operator' + set +o xtrace ----------------------------------------------------------------------------------- start PXC operator ----------------------------------------------------------------------------------- + kubectl_bin apply --server-side --force-conflicts -f /mnt/jenkins/workspace/cloud-pxc-operator_PR-2203/deploy/crd.yaml ++ mktemp + local LAST_OUT=/tmp/tmp.jN2amHHhqC ++ mktemp + local LAST_ERR=/tmp/tmp.eHvGhmeAbI + local exit_status=0 ++ seq 0 2 + for i in $(seq 0 2) + set +e + kubectl apply --server-side --force-conflicts -f /mnt/jenkins/workspace/cloud-pxc-operator_PR-2203/deploy/crd.yaml + exit_status=0 + set -e + '[' 0 '!=' 0 ']' + break + cat /tmp/tmp.jN2amHHhqC customresourcedefinition.apiextensions.k8s.io/perconaxtradbclusterbackups.pxc.percona.com serverside-applied customresourcedefinition.apiextensions.k8s.io/perconaxtradbclusterrestores.pxc.percona.com serverside-applied customresourcedefinition.apiextensions.k8s.io/perconaxtradbclusters.pxc.percona.com serverside-applied + cat /tmp/tmp.eHvGhmeAbI + rm /tmp/tmp.jN2amHHhqC /tmp/tmp.eHvGhmeAbI + return 0 + '[' -n pxc-operator ']' + apply_rbac cw-rbac + local operator_namespace=pxc-operator + local rbac=cw-rbac + cat /mnt/jenkins/workspace/cloud-pxc-operator_PR-2203/deploy/cw-rbac.yaml + sed -e 's^namespace: .*^namespace: pxc-operator^' + kubectl_bin apply -f - ++ mktemp + local LAST_OUT=/tmp/tmp.wuR6JzR5l0 ++ mktemp + local LAST_ERR=/tmp/tmp.qY1jKvyjaW + local exit_status=0 ++ seq 0 2 + for i in $(seq 0 2) + set +e + kubectl apply -f - + exit_status=0 + set -e + '[' 0 '!=' 0 ']' + break + cat /tmp/tmp.wuR6JzR5l0 clusterrole.rbac.authorization.k8s.io/percona-xtradb-cluster-operator unchanged serviceaccount/percona-xtradb-cluster-operator created clusterrolebinding.rbac.authorization.k8s.io/service-account-percona-xtradb-cluster-operator unchanged + cat /tmp/tmp.qY1jKvyjaW + rm /tmp/tmp.wuR6JzR5l0 /tmp/tmp.qY1jKvyjaW + return 0 + cat /mnt/jenkins/workspace/cloud-pxc-operator_PR-2203/deploy/cw-operator.yaml + sed -e 's^failureThreshold: .*^failureThreshold: 10^' + sed -e 's^image: .*^image: perconalab/percona-xtradb-cluster-operator:PR-2203-07b4356f^' + yq eval '(select(.kind == "Deployment").spec.template.spec.containers[] | select(.name == "percona-xtradb-cluster-operator").env[] | select(.name == "DISABLE_TELEMETRY").value) = "true"' - + yq eval '(select(.kind == "Deployment").spec.template.spec.containers[] | select(.name == "percona-xtradb-cluster-operator").env[] | select(.name == "LOG_LEVEL").value) = "VERBOSE"' - + kubectl_bin apply -f - ++ mktemp + local LAST_OUT=/tmp/tmp.C8Zmjs5fFB ++ mktemp + local LAST_ERR=/tmp/tmp.2t30Hczeg6 + local exit_status=0 ++ seq 0 2 + for i in $(seq 0 2) + set +e + kubectl apply -f - + exit_status=0 + set -e + '[' 0 '!=' 0 ']' + break + cat /tmp/tmp.C8Zmjs5fFB deployment.apps/percona-xtradb-cluster-operator created service/percona-xtradb-cluster-operator created + cat /tmp/tmp.2t30Hczeg6 + rm /tmp/tmp.C8Zmjs5fFB /tmp/tmp.2t30Hczeg6 + return 0 + sleep 10 + kubectl_bin wait --for=condition=Ready pods -l app.kubernetes.io/component=operator,app.kubernetes.io/instance=percona-xtradb-cluster-operator,app.kubernetes.io/name=percona-xtradb-cluster-operator --timeout=30s ++ mktemp + local LAST_OUT=/tmp/tmp.n6YtqfZ8yz ++ mktemp + local LAST_ERR=/tmp/tmp.VjYagTWs0h + local exit_status=0 ++ seq 0 2 + for i in $(seq 0 2) + set +e + kubectl wait --for=condition=Ready pods -l app.kubernetes.io/component=operator,app.kubernetes.io/instance=percona-xtradb-cluster-operator,app.kubernetes.io/name=percona-xtradb-cluster-operator --timeout=30s + exit_status=0 + set -e + '[' 0 '!=' 0 ']' + break + cat /tmp/tmp.n6YtqfZ8yz pod/percona-xtradb-cluster-operator-7db859f455-2rm95 condition met + cat /tmp/tmp.VjYagTWs0h + rm /tmp/tmp.n6YtqfZ8yz /tmp/tmp.VjYagTWs0h + return 0 ++ get_operator_pod ++ local label_prefix=app.kubernetes.io/ +++ kubectl get pods --selector=app.kubernetes.io/name=percona-xtradb-cluster-operator -n pxc-operator +++ grep -c percona-xtradb-cluster-operator ++ local check_label=1 ++ [[ 1 -eq 0 ]] ++ kubectl_bin get pods --selector=app.kubernetes.io/name=percona-xtradb-cluster-operator -o 'jsonpath={.items[].metadata.name}' -n pxc-operator +++ mktemp ++ local LAST_OUT=/tmp/tmp.bvBw2v85Jp +++ mktemp ++ local LAST_ERR=/tmp/tmp.ZUnifIOdS7 ++ local exit_status=0 +++ seq 0 2 ++ for i in $(seq 0 2) ++ set +e ++ kubectl get pods --selector=app.kubernetes.io/name=percona-xtradb-cluster-operator -o 'jsonpath={.items[].metadata.name}' -n pxc-operator ++ exit_status=0 ++ set -e ++ '[' 0 '!=' 0 ']' ++ break ++ cat /tmp/tmp.bvBw2v85Jp ++ cat /tmp/tmp.ZUnifIOdS7 ++ rm /tmp/tmp.bvBw2v85Jp /tmp/tmp.ZUnifIOdS7 ++ return 0 + wait_pod percona-xtradb-cluster-operator-7db859f455-2rm95 480 pxc-operator + local pod=percona-xtradb-cluster-operator-7db859f455-2rm95 + local max_retry=480 + local ns=pxc-operator ++ echo percona-xtradb-cluster-operator-7db859f455-2rm95 ++ /usr/sbin/sed -E 's/.*-(pxc|proxysql)-[0-9]/\1/' ++ egrep '^(pxc|proxysql)$' egrep: warning: egrep is obsolescent; using grep -E + local container= + set +o xtrace pod/percona-xtradb-cluster-operator-7db859f455-2rm95 condition met waiting for pod/percona-xtradb-cluster-operator-7db859f455-2rm95 to become Ready.Ok + sleep 3 + create_namespace users-28439 + local namespace=users-28439 + local skip_clean_namespace= + [[ 1 == 1 ]] + [[ -z '' ]] + destroy_chaos_mesh ++ helm list --all-namespaces --filter chaos-mesh ++ tail -n1 ++ awk '-F ' '{print $2}' ++ sed s/NAMESPACE// + local chaos_mesh_ns= + '[' -n '' ']' ++ kubectl get MutatingWebhookConfiguration ++ grep chaos-mesh ++ awk '{print $1}' + timeout 30 kubectl delete MutatingWebhookConfiguration error: resource(s) were provided, but no name was specified + : ++ grep chaos-mesh ++ kubectl get ValidatingWebhookConfiguration ++ awk '{print $1}' + timeout 30 kubectl delete ValidatingWebhookConfiguration error: resource(s) were provided, but no name was specified + : ++ kubectl get ValidatingWebhookConfiguration ++ grep validate-auth ++ awk '{print $1}' + timeout 30 kubectl delete ValidatingWebhookConfiguration error: resource(s) were provided, but no name was specified + : ++ kubectl api-resources ++ grep chaos-mesh ++ awk '{print $1}' ++ kubectl get crd ++ grep chaos-mesh.org ++ awk '{print $1}' + timeout 30 kubectl delete crd error: resource(s) were provided, but no name was specified + : ++ kubectl get clusterrolebinding ++ grep chaos-mesh ++ awk '{print $1}' + timeout 30 kubectl delete clusterrolebinding error: resource(s) were provided, but no name was specified + : ++ kubectl get clusterrole ++ grep chaos-mesh ++ awk '{print $1}' + timeout 30 kubectl delete clusterrole error: resource(s) were provided, but no name was specified + : + desc 'cleaned up all old namespaces' + set +o xtrace ----------------------------------------------------------------------------------- cleaned up all old namespaces ----------------------------------------------------------------------------------- + egrep -v '^kube-|^default|Terminating|pxc-operator|openshift|^gke-|^gmp-|^NAME' + kubectl_bin get ns + '[' -n '' ']' + awk '{print$1}' + desc 'cleaned up old namespaces users-28439' + set +o xtrace ----------------------------------------------------------------------------------- cleaned up old namespaces users-28439 ----------------------------------------------------------------------------------- + kubectl_bin delete namespace users-28439 + xargs kubectl delete ns ++ mktemp ++ mktemp + local LAST_OUT=/tmp/tmp.Yd4WaC1MSs + local LAST_OUT=/tmp/tmp.NZfJbRDyT2 egrep: warning: egrep is obsolescent; using grep -E ++ mktemp ++ mktemp + local LAST_ERR=/tmp/tmp.39ThhiFcd1 + local exit_status=0 + local LAST_ERR=/tmp/tmp.Jv7l5Pkp4z + local exit_status=0 ++ seq 0 2 ++ seq 0 2 + for i in $(seq 0 2) + set +e + kubectl get ns + for i in $(seq 0 2) + set +e + kubectl delete namespace users-28439 + exit_status=1 + set -e + '[' 1 '!=' 0 ']' + '[' 1 == 1 ']' + sleep 0 + for i in $(seq 0 2) + set +e + kubectl delete namespace users-28439 + exit_status=0 + set -e + '[' 0 '!=' 0 ']' + break + cat /tmp/tmp.Yd4WaC1MSs + cat /tmp/tmp.Jv7l5Pkp4z + rm /tmp/tmp.Yd4WaC1MSs /tmp/tmp.Jv7l5Pkp4z + return 0 error: resource(s) were provided, but no name was specified + exit_status=1 + set -e + '[' 1 '!=' 0 ']' + '[' 1 == 1 ']' + sleep 0 + for i in $(seq 0 2) + set +e + kubectl delete namespace users-28439 + exit_status=1 + set -e + '[' 1 '!=' 0 ']' + '[' 1 == 1 ']' + sleep 0 + cat /tmp/tmp.NZfJbRDyT2 + cat /tmp/tmp.39ThhiFcd1 Error from server (NotFound): namespaces "users-28439" not found + rm /tmp/tmp.NZfJbRDyT2 /tmp/tmp.39ThhiFcd1 + return 1 + : + wait_for_delete namespace/users-28439 + local res=namespace/users-28439 + echo -n 'waiting for namespace/users-28439 to be deleted' waiting for namespace/users-28439 to be deleted+ set +o xtrace Error from server (NotFound): namespaces "users-28439" not found + desc 'create namespace users-28439' + set +o xtrace ----------------------------------------------------------------------------------- create namespace users-28439 ----------------------------------------------------------------------------------- + kubectl_bin create namespace users-28439 ++ mktemp + local LAST_OUT=/tmp/tmp.zuSkGVPKDb ++ mktemp + local LAST_ERR=/tmp/tmp.BR2Vfp4zHi + local exit_status=0 ++ seq 0 2 + for i in $(seq 0 2) + set +e + kubectl create namespace users-28439 + exit_status=0 + set -e + '[' 0 '!=' 0 ']' + break + cat /tmp/tmp.zuSkGVPKDb namespace/users-28439 created + cat /tmp/tmp.BR2Vfp4zHi + rm /tmp/tmp.zuSkGVPKDb /tmp/tmp.BR2Vfp4zHi + return 0 ++ kubectl_bin config current-context +++ mktemp ++ local LAST_OUT=/tmp/tmp.tJBkAfoZT3 +++ mktemp ++ local LAST_ERR=/tmp/tmp.SBhxzH6IPe ++ local exit_status=0 +++ seq 0 2 ++ for i in $(seq 0 2) ++ set +e ++ kubectl config current-context ++ exit_status=0 ++ set -e ++ '[' 0 '!=' 0 ']' ++ break ++ cat /tmp/tmp.tJBkAfoZT3 ++ cat /tmp/tmp.SBhxzH6IPe ++ rm /tmp/tmp.tJBkAfoZT3 /tmp/tmp.SBhxzH6IPe ++ return 0 + kubectl_bin config set-context gke_cloud-dev-112233_us-central1-a_jen-pxc-2203-07b4356f-1-cluster7 --namespace=users-28439 ++ mktemp + local LAST_OUT=/tmp/tmp.nZ11pIAuRx ++ mktemp + local LAST_ERR=/tmp/tmp.u2eEomr50l + local exit_status=0 ++ seq 0 2 + for i in $(seq 0 2) + set +e + kubectl config set-context gke_cloud-dev-112233_us-central1-a_jen-pxc-2203-07b4356f-1-cluster7 --namespace=users-28439 + exit_status=0 + set -e + '[' 0 '!=' 0 ']' + break + cat /tmp/tmp.nZ11pIAuRx Context "gke_cloud-dev-112233_us-central1-a_jen-pxc-2203-07b4356f-1-cluster7" modified. + cat /tmp/tmp.u2eEomr50l + rm /tmp/tmp.nZ11pIAuRx /tmp/tmp.u2eEomr50l + return 0 + apply_secrets + desc 'create secrets for cloud storages' + set +o xtrace ----------------------------------------------------------------------------------- create secrets for cloud storages ----------------------------------------------------------------------------------- + '[' -z '' ']' + kubectl_bin apply -f /mnt/jenkins/workspace/cloud-pxc-operator_PR-2203/e2e-tests/conf/minio-secret.yml -f /mnt/jenkins/workspace/cloud-pxc-operator_PR-2203/e2e-tests/conf/cloud-secret.yml ++ mktemp + local LAST_OUT=/tmp/tmp.3456PDeAbf ++ mktemp + local LAST_ERR=/tmp/tmp.Xs8I9S2dOn + local exit_status=0 ++ seq 0 2 + for i in $(seq 0 2) + set +e + kubectl apply -f /mnt/jenkins/workspace/cloud-pxc-operator_PR-2203/e2e-tests/conf/minio-secret.yml -f /mnt/jenkins/workspace/cloud-pxc-operator_PR-2203/e2e-tests/conf/cloud-secret.yml + exit_status=0 + set -e + '[' 0 '!=' 0 ']' + break + cat /tmp/tmp.3456PDeAbf secret/minio-secret created secret/aws-s3-secret created secret/gcp-cs-secret created secret/azure-secret created + cat /tmp/tmp.Xs8I9S2dOn + rm /tmp/tmp.3456PDeAbf /tmp/tmp.Xs8I9S2dOn + return 0 + desc 'create PXC cluster with 1-password secret' + set +o xtrace ----------------------------------------------------------------------------------- create PXC cluster with 1-password secret ----------------------------------------------------------------------------------- + newpass=test-password ++ echo -n test-password ++ base64 + newpassencrypted=dGVzdC1wYXNzd29yZA== + cluster=some-name + spinup_pxc some-name /mnt/jenkins/workspace/cloud-pxc-operator_PR-2203/e2e-tests/conf/some-name.yml '' '' /mnt/jenkins/workspace/cloud-pxc-operator_PR-2203/e2e-tests/conf/secrets_one_pass.yml + local cluster=some-name + local config=/mnt/jenkins/workspace/cloud-pxc-operator_PR-2203/e2e-tests/conf/some-name.yml + local size=3 + local sleep=10 + local secretsFile=/mnt/jenkins/workspace/cloud-pxc-operator_PR-2203/e2e-tests/conf/secrets_one_pass.yml + local pxcClientFile=/mnt/jenkins/workspace/cloud-pxc-operator_PR-2203/e2e-tests/conf/client.yml + local port=3306 + desc 'create first PXC cluster' + set +o xtrace ----------------------------------------------------------------------------------- create first PXC cluster ----------------------------------------------------------------------------------- + kubectl_bin apply -f /mnt/jenkins/workspace/cloud-pxc-operator_PR-2203/e2e-tests/conf/secrets_one_pass.yml ++ mktemp + local LAST_OUT=/tmp/tmp.qAsVDEqtxp ++ mktemp + local LAST_ERR=/tmp/tmp.c7XCak0FRN + local exit_status=0 ++ seq 0 2 + for i in $(seq 0 2) + set +e + kubectl apply -f /mnt/jenkins/workspace/cloud-pxc-operator_PR-2203/e2e-tests/conf/secrets_one_pass.yml + exit_status=0 + set -e + '[' 0 '!=' 0 ']' + break + cat /tmp/tmp.qAsVDEqtxp secret/my-cluster-secrets created secret/some-name-ssl created secret/some-name-ssl-internal created + cat /tmp/tmp.c7XCak0FRN + rm /tmp/tmp.qAsVDEqtxp /tmp/tmp.c7XCak0FRN + return 0 + apply_config /mnt/jenkins/workspace/cloud-pxc-operator_PR-2203/e2e-tests/conf/client.yml + '[' -z '' ']' + cat_config /mnt/jenkins/workspace/cloud-pxc-operator_PR-2203/e2e-tests/conf/client.yml + kubectl_bin apply -f - + cat /mnt/jenkins/workspace/cloud-pxc-operator_PR-2203/e2e-tests/conf/client.yml + /usr/sbin/sed -e 's#apiVersion: pxc.percona.com/v.*$#apiVersion: pxc.percona.com/v1#' ++ mktemp + /usr/sbin/sed -e 's#image:.*-pxc\([0-9]*.[0-9]*\)\{0,1\}$#image: perconalab/percona-xtradb-cluster-operator:main-pxc5.7#' + /usr/sbin/sed -e 's#image:.*\/percona-xtradb-cluster:.*$#image: perconalab/percona-xtradb-cluster-operator:main-pxc5.7#' + /usr/sbin/sed -e 's#image:.*-init$#image: perconalab/percona-xtradb-cluster-operator:PR-2203-07b4356f#' + /usr/sbin/sed -e 's#image:.*-pmm$#image: perconalab/pmm-client:dev-latest#' + local LAST_OUT=/tmp/tmp.K6tZWGnzV7 + /usr/sbin/sed -e 's#image:.*-proxysql$#image: perconalab/percona-xtradb-cluster-operator:main-proxysql#' + /usr/sbin/sed -e 's#image:.*-backup$#image: perconalab/percona-xtradb-cluster-operator:main-pxc5.7-backup#' ++ mktemp + /usr/sbin/sed -e 's#image:.*-haproxy$#image: perconalab/percona-xtradb-cluster-operator:main-haproxy#' + /usr/sbin/sed -e 's#image:.*-logcollector$#image: perconalab/percona-xtradb-cluster-operator:main-logcollector#' + /usr/sbin/sed -e s~minio-service.#namespace~minio-service.users-28439~ + /usr/sbin/sed -e 's#apply:.*#apply: Never#' + local LAST_ERR=/tmp/tmp.g8IB0wiFaK + local exit_status=0 ++ seq 0 2 + for i in $(seq 0 2) + set +e + kubectl apply -f - + exit_status=0 + set -e + '[' 0 '!=' 0 ']' + break + cat /tmp/tmp.K6tZWGnzV7 deployment.apps/pxc-client created + cat /tmp/tmp.g8IB0wiFaK + rm /tmp/tmp.K6tZWGnzV7 /tmp/tmp.g8IB0wiFaK + return 0 + [[ perconalab/percona-xtradb-cluster-operator:main-pxc5.7 =~ 5\.7 ]] + [[ some-name == \d\e\m\a\n\d\-\b\a\c\k\u\p ]] + [[ some-name == \d\e\m\a\n\d\-\b\a\c\k\u\p\-\c\l\o\u\d ]] + apply_config /mnt/jenkins/workspace/cloud-pxc-operator_PR-2203/e2e-tests/conf/some-name.yml + '[' -z '' ']' + cat_config /mnt/jenkins/workspace/cloud-pxc-operator_PR-2203/e2e-tests/conf/some-name.yml + kubectl_bin apply -f - + cat /mnt/jenkins/workspace/cloud-pxc-operator_PR-2203/e2e-tests/conf/some-name.yml + /usr/sbin/sed -e 's#apiVersion: pxc.percona.com/v.*$#apiVersion: pxc.percona.com/v1#' ++ mktemp + /usr/sbin/sed -e 's#image:.*-pxc\([0-9]*.[0-9]*\)\{0,1\}$#image: perconalab/percona-xtradb-cluster-operator:main-pxc5.7#' + /usr/sbin/sed -e 's#image:.*-logcollector$#image: perconalab/percona-xtradb-cluster-operator:main-logcollector#' + /usr/sbin/sed -e 's#image:.*-proxysql$#image: perconalab/percona-xtradb-cluster-operator:main-proxysql#' + /usr/sbin/sed -e 's#image:.*-backup$#image: perconalab/percona-xtradb-cluster-operator:main-pxc5.7-backup#' + /usr/sbin/sed -e 's#image:.*\/percona-xtradb-cluster:.*$#image: perconalab/percona-xtradb-cluster-operator:main-pxc5.7#' + local LAST_OUT=/tmp/tmp.ANprVVGDC5 + /usr/sbin/sed -e 's#image:.*-haproxy$#image: perconalab/percona-xtradb-cluster-operator:main-haproxy#' ++ mktemp + /usr/sbin/sed -e 's#apply:.*#apply: Never#' + local LAST_ERR=/tmp/tmp.6E3CFkn2JZ + local exit_status=0 + /usr/sbin/sed -e s~minio-service.#namespace~minio-service.users-28439~ + /usr/sbin/sed -e 's#image:.*-init$#image: perconalab/percona-xtradb-cluster-operator:PR-2203-07b4356f#' ++ seq 0 2 + for i in $(seq 0 2) + set +e + kubectl apply -f - + /usr/sbin/sed -e 's#image:.*-pmm$#image: perconalab/pmm-client:dev-latest#' + exit_status=0 + set -e + '[' 0 '!=' 0 ']' + break + cat /tmp/tmp.ANprVVGDC5 perconaxtradbcluster.pxc.percona.com/some-name created + cat /tmp/tmp.6E3CFkn2JZ + rm /tmp/tmp.ANprVVGDC5 /tmp/tmp.6E3CFkn2JZ + return 0 + desc 'check if all 3 Pods started' + set +o xtrace ----------------------------------------------------------------------------------- check if all 3 Pods started ----------------------------------------------------------------------------------- ++ get_proxy some-name ++ local target_cluster=some-name +++ kubectl_bin get pxc some-name -o 'jsonpath={.spec.haproxy.enabled}' ++++ mktemp +++ local LAST_OUT=/tmp/tmp.romyOe0t2k ++++ mktemp +++ local LAST_ERR=/tmp/tmp.bD4YjwovkV +++ local exit_status=0 ++++ seq 0 2 +++ for i in $(seq 0 2) +++ set +e +++ kubectl get pxc some-name -o 'jsonpath={.spec.haproxy.enabled}' +++ exit_status=0 +++ set -e +++ '[' 0 '!=' 0 ']' +++ break +++ cat /tmp/tmp.romyOe0t2k +++ cat /tmp/tmp.bD4YjwovkV +++ rm /tmp/tmp.romyOe0t2k /tmp/tmp.bD4YjwovkV +++ return 0 ++ [[ '' == \t\r\u\e ]] +++ kubectl_bin get pxc some-name -o 'jsonpath={.spec.proxysql.enabled}' ++++ mktemp +++ local LAST_OUT=/tmp/tmp.6cJpvO3j2X ++++ mktemp +++ local LAST_ERR=/tmp/tmp.XWXVcKTsDh +++ local exit_status=0 ++++ seq 0 2 +++ for i in $(seq 0 2) +++ set +e +++ kubectl get pxc some-name -o 'jsonpath={.spec.proxysql.enabled}' +++ exit_status=0 +++ set -e +++ '[' 0 '!=' 0 ']' +++ break +++ cat /tmp/tmp.6cJpvO3j2X +++ cat /tmp/tmp.XWXVcKTsDh +++ rm /tmp/tmp.6cJpvO3j2X /tmp/tmp.XWXVcKTsDh +++ return 0 ++ [[ true == \t\r\u\e ]] ++ echo some-name-proxysql ++ return + local proxy=some-name-proxysql + kubectl_bin wait --for=condition=Ready pod -l app.kubernetes.io/instance=monitoring,app.kubernetes.io/managed-by=percona-xtradb-cluster-operator --timeout=300s -n users-28439 ++ mktemp + local LAST_OUT=/tmp/tmp.zFkpuLqB1Z ++ mktemp + local LAST_ERR=/tmp/tmp.476raDv05s + local exit_status=0 ++ seq 0 2 + for i in $(seq 0 2) + set +e + kubectl wait --for=condition=Ready pod -l app.kubernetes.io/instance=monitoring,app.kubernetes.io/managed-by=percona-xtradb-cluster-operator --timeout=300s -n users-28439 + exit_status=1 + set -e + '[' 1 '!=' 0 ']' + '[' 1 == 1 ']' + sleep 0 + for i in $(seq 0 2) + set +e + kubectl wait --for=condition=Ready pod -l app.kubernetes.io/instance=monitoring,app.kubernetes.io/managed-by=percona-xtradb-cluster-operator --timeout=300s -n users-28439 + exit_status=1 + set -e + '[' 1 '!=' 0 ']' + '[' 1 == 1 ']' + sleep 0 + for i in $(seq 0 2) + set +e + kubectl wait --for=condition=Ready pod -l app.kubernetes.io/instance=monitoring,app.kubernetes.io/managed-by=percona-xtradb-cluster-operator --timeout=300s -n users-28439 + exit_status=1 + set -e + '[' 1 '!=' 0 ']' + '[' 1 == 1 ']' + sleep 0 + cat /tmp/tmp.zFkpuLqB1Z + cat /tmp/tmp.476raDv05s error: no matching resources found + rm /tmp/tmp.zFkpuLqB1Z /tmp/tmp.476raDv05s + return 1 + true + wait_for_running some-name-proxysql 1 + local name=some-name-proxysql + let last_pod=0 + : + local max_retry=480 + desc 'wait for running cluster' + set +o xtrace ----------------------------------------------------------------------------------- wait for running cluster ----------------------------------------------------------------------------------- ++ seq 0 0 + for i in $(seq 0 $last_pod) + wait_pod some-name-proxysql-0 480 + local pod=some-name-proxysql-0 + local max_retry=480 + local ns= ++ echo some-name-proxysql-0 ++ /usr/sbin/sed -E 's/.*-(pxc|proxysql)-[0-9]/\1/' ++ egrep '^(pxc|proxysql)$' egrep: warning: egrep is obsolescent; using grep -E + local container=proxysql + set +o xtrace pod/some-name-proxysql-0 condition met waiting for pod/some-name-proxysql-0 to become Ready.Ok + wait_for_running some-name-pxc 3 + local name=some-name-pxc + let last_pod=2 + local max_retry=480 + desc 'wait for running cluster' + set +o xtrace ----------------------------------------------------------------------------------- wait for running cluster ----------------------------------------------------------------------------------- ++ seq 0 2 + for i in $(seq 0 $last_pod) + wait_pod some-name-pxc-0 480 + local pod=some-name-pxc-0 + local max_retry=480 + local ns= ++ echo some-name-pxc-0 ++ /usr/sbin/sed -E 's/.*-(pxc|proxysql)-[0-9]/\1/' ++ egrep '^(pxc|proxysql)$' egrep: warning: egrep is obsolescent; using grep -E + local container=pxc + set +o xtrace pod/some-name-pxc-0 condition met waiting for pod/some-name-pxc-0 to become Ready.Ok + for i in $(seq 0 $last_pod) + wait_pod some-name-pxc-1 480 + local pod=some-name-pxc-1 + local max_retry=480 + local ns= ++ echo some-name-pxc-1 ++ /usr/sbin/sed -E 's/.*-(pxc|proxysql)-[0-9]/\1/' ++ egrep '^(pxc|proxysql)$' egrep: warning: egrep is obsolescent; using grep -E + local container=pxc + set +o xtrace pod/some-name-pxc-1 condition met waiting for pod/some-name-pxc-1 to become Ready.Ok + for i in $(seq 0 $last_pod) + wait_pod some-name-pxc-2 480 + local pod=some-name-pxc-2 + local max_retry=480 + local ns= ++ echo some-name-pxc-2 ++ /usr/sbin/sed -E 's/.*-(pxc|proxysql)-[0-9]/\1/' ++ egrep '^(pxc|proxysql)$' egrep: warning: egrep is obsolescent; using grep -E + local container=pxc + set +o xtrace pod/some-name-pxc-2 condition met waiting for pod/some-name-pxc-2 to become Ready.Ok + sleep 10 ++ kubectl get pxc some-name -o 'jsonpath={.spec.secretsName}' + local secret_name=my-cluster-secrets ++ getSecretData my-cluster-secrets root ++ local secretName=my-cluster-secrets ++ local dataKey=root ++ base64 --decode ++ kubectl_bin get secrets/my-cluster-secrets '--template={{.data.root}}' +++ mktemp ++ local LAST_OUT=/tmp/tmp.iPBEMt9mcp +++ mktemp ++ local LAST_ERR=/tmp/tmp.O85PHgTFpT ++ local exit_status=0 +++ seq 0 2 ++ for i in $(seq 0 2) ++ set +e ++ kubectl get secrets/my-cluster-secrets '--template={{.data.root}}' ++ exit_status=0 ++ set -e ++ '[' 0 '!=' 0 ']' ++ break ++ cat /tmp/tmp.iPBEMt9mcp ++ cat /tmp/tmp.O85PHgTFpT ++ rm /tmp/tmp.iPBEMt9mcp /tmp/tmp.O85PHgTFpT ++ return 0 + local 'root_pass=H8B?nghhpeQGrirkNyN' + desc 'write data' + set +o xtrace ----------------------------------------------------------------------------------- write data ----------------------------------------------------------------------------------- + [[ perconalab/percona-xtradb-cluster-operator:main-pxc5.7 =~ 5\.7 ]] ++ is_keyring_plugin_in_use some-name ++ local cluster=some-name ++ kubectl_bin exec -it some-name-pxc-0 -c pxc -- bash -c 'cat /etc/mysql/node.cnf' ++ egrep -o 'early-plugin-load=keyring_\w+.so' +++ mktemp ++ local LAST_OUT=/tmp/tmp.kQB0OHrCng +++ mktemp egrep: warning: egrep is obsolescent; using grep -E ++ local LAST_ERR=/tmp/tmp.UyHXv9vyG2 ++ local exit_status=0 +++ seq 0 2 ++ for i in $(seq 0 2) ++ set +e ++ kubectl exec -it some-name-pxc-0 -c pxc -- bash -c 'cat /etc/mysql/node.cnf' ++ exit_status=0 ++ set -e ++ '[' 0 '!=' 0 ']' ++ break ++ cat /tmp/tmp.kQB0OHrCng ++ cat /tmp/tmp.UyHXv9vyG2 Unable to use a TTY - input is not a terminal or the right kind of file ++ rm /tmp/tmp.kQB0OHrCng /tmp/tmp.UyHXv9vyG2 ++ return 0 + [[ -n '' ]] + run_mysql 'CREATE DATABASE IF NOT EXISTS myApp; use myApp; CREATE TABLE IF NOT EXISTS myApp (id int PRIMARY KEY) ;' '-h some-name-proxysql -uroot -p'\''H8B?nghhpeQGrirkNyN'\'' -P3306' + local 'command=CREATE DATABASE IF NOT EXISTS myApp; use myApp; CREATE TABLE IF NOT EXISTS myApp (id int PRIMARY KEY) ;' + local 'uri=-h some-name-proxysql -uroot -p'\''H8B?nghhpeQGrirkNyN'\'' -P3306' ++ get_client_pod ++ kubectl_bin get pods --selector=name=pxc-client -o 'jsonpath={.items[].metadata.name}' +++ mktemp ++ local LAST_OUT=/tmp/tmp.lrD64wPi53 +++ mktemp ++ local LAST_ERR=/tmp/tmp.i8cqEIi5Sy ++ local exit_status=0 +++ seq 0 2 ++ for i in $(seq 0 2) ++ set +e ++ kubectl get pods --selector=name=pxc-client -o 'jsonpath={.items[].metadata.name}' ++ exit_status=0 ++ set -e ++ '[' 0 '!=' 0 ']' ++ break ++ cat /tmp/tmp.lrD64wPi53 ++ cat /tmp/tmp.i8cqEIi5Sy ++ rm /tmp/tmp.lrD64wPi53 /tmp/tmp.i8cqEIi5Sy ++ return 0 + client_pod=pxc-client-857d976497-khhbf + wait_pod pxc-client-857d976497-khhbf + local pod=pxc-client-857d976497-khhbf + local max_retry=480 + local ns= ++ echo pxc-client-857d976497-khhbf ++ /usr/sbin/sed -E 's/.*-(pxc|proxysql)-[0-9]/\1/' ++ egrep '^(pxc|proxysql)$' egrep: warning: egrep is obsolescent; using grep -E + local container= + set +o xtrace pod/pxc-client-857d976497-khhbf condition met waiting for pod/pxc-client-857d976497-khhbf to become ReadyDefaulted container "pxc-client" out of: pxc-client, backup .Ok + set +o xtrace + run_mysql 'INSERT myApp.myApp (id) VALUES (100500)' '-h some-name-proxysql -uroot -p'\''H8B?nghhpeQGrirkNyN'\'' -P3306' + local 'command=INSERT myApp.myApp (id) VALUES (100500)' + local 'uri=-h some-name-proxysql -uroot -p'\''H8B?nghhpeQGrirkNyN'\'' -P3306' ++ get_client_pod ++ kubectl_bin get pods --selector=name=pxc-client -o 'jsonpath={.items[].metadata.name}' +++ mktemp ++ local LAST_OUT=/tmp/tmp.QRzRByEpNi +++ mktemp ++ local LAST_ERR=/tmp/tmp.2wm9eztSew ++ local exit_status=0 +++ seq 0 2 ++ for i in $(seq 0 2) ++ set +e ++ kubectl get pods --selector=name=pxc-client -o 'jsonpath={.items[].metadata.name}' ++ exit_status=0 ++ set -e ++ '[' 0 '!=' 0 ']' ++ break ++ cat /tmp/tmp.QRzRByEpNi ++ cat /tmp/tmp.2wm9eztSew ++ rm /tmp/tmp.QRzRByEpNi /tmp/tmp.2wm9eztSew ++ return 0 + client_pod=pxc-client-857d976497-khhbf + wait_pod pxc-client-857d976497-khhbf + local pod=pxc-client-857d976497-khhbf + local max_retry=480 + local ns= ++ echo pxc-client-857d976497-khhbf ++ /usr/sbin/sed -E 's/.*-(pxc|proxysql)-[0-9]/\1/' ++ egrep '^(pxc|proxysql)$' egrep: warning: egrep is obsolescent; using grep -E + local container= + set +o xtrace pod/pxc-client-857d976497-khhbf condition met waiting for pod/pxc-client-857d976497-khhbf to become ReadyDefaulted container "pxc-client" out of: pxc-client, backup .Ok + set +o xtrace + sleep 30 ++ seq 0 2 + for i in $(seq 0 $((size - 1))) + compare_mysql_cmd select-1 'SELECT * from myApp.myApp;' '-h some-name-pxc-0.some-name-pxc -uroot -p'\''H8B?nghhpeQGrirkNyN'\'' -P3306' + local command_id=select-1 + local 'command=SELECT * from myApp.myApp;' + local 'uri=-h some-name-pxc-0.some-name-pxc -uroot -p'\''H8B?nghhpeQGrirkNyN'\'' -P3306' + local postfix= + local expected_result=/mnt/jenkins/workspace/cloud-pxc-operator_PR-2203/e2e-tests/users/compare/select-1.sql + [[ perconalab/percona-xtradb-cluster-operator:main-pxc5.7 =~ 8\.4 ]] + [[ perconalab/percona-xtradb-cluster-operator:main-pxc5.7 =~ 8\.0 ]] + [[ perconalab/percona-xtradb-cluster-operator:main-pxc5.7 =~ 5\.7 ]] + [[ -f /mnt/jenkins/workspace/cloud-pxc-operator_PR-2203/e2e-tests/users/compare/select-1-57.sql ]] + run_mysql 'SELECT * from myApp.myApp;' '-h some-name-pxc-0.some-name-pxc -uroot -p'\''H8B?nghhpeQGrirkNyN'\'' -P3306' + local 'command=SELECT * from myApp.myApp;' + local 'uri=-h some-name-pxc-0.some-name-pxc -uroot -p'\''H8B?nghhpeQGrirkNyN'\'' -P3306' ++ get_client_pod ++ kubectl_bin get pods --selector=name=pxc-client -o 'jsonpath={.items[].metadata.name}' +++ mktemp ++ local LAST_OUT=/tmp/tmp.xAW9HteGCl +++ mktemp ++ local LAST_ERR=/tmp/tmp.SfvhzdKWoJ ++ local exit_status=0 +++ seq 0 2 ++ for i in $(seq 0 2) ++ set +e ++ kubectl get pods --selector=name=pxc-client -o 'jsonpath={.items[].metadata.name}' ++ exit_status=0 ++ set -e ++ '[' 0 '!=' 0 ']' ++ break ++ cat /tmp/tmp.xAW9HteGCl ++ cat /tmp/tmp.SfvhzdKWoJ ++ rm /tmp/tmp.xAW9HteGCl /tmp/tmp.SfvhzdKWoJ ++ return 0 + client_pod=pxc-client-857d976497-khhbf + wait_pod pxc-client-857d976497-khhbf + local pod=pxc-client-857d976497-khhbf + local max_retry=480 + local ns= ++ /usr/sbin/sed -E 's/.*-(pxc|proxysql)-[0-9]/\1/' ++ echo pxc-client-857d976497-khhbf ++ egrep '^(pxc|proxysql)$' egrep: warning: egrep is obsolescent; using grep -E + local container= + set +o xtrace pod/pxc-client-857d976497-khhbf condition met waiting for pod/pxc-client-857d976497-khhbf to become ReadyDefaulted container "pxc-client" out of: pxc-client, backup .Ok + set +o xtrace + '[' '!' -s /tmp/tmp.zNf2oNZjFh/select-1.sql ']' + diff -u /mnt/jenkins/workspace/cloud-pxc-operator_PR-2203/e2e-tests/users/compare/select-1.sql /tmp/tmp.zNf2oNZjFh/select-1.sql + for i in $(seq 0 $((size - 1))) + compare_mysql_cmd select-1 'SELECT * from myApp.myApp;' '-h some-name-pxc-1.some-name-pxc -uroot -p'\''H8B?nghhpeQGrirkNyN'\'' -P3306' + local command_id=select-1 + local 'command=SELECT * from myApp.myApp;' + local 'uri=-h some-name-pxc-1.some-name-pxc -uroot -p'\''H8B?nghhpeQGrirkNyN'\'' -P3306' + local postfix= + local expected_result=/mnt/jenkins/workspace/cloud-pxc-operator_PR-2203/e2e-tests/users/compare/select-1.sql + [[ perconalab/percona-xtradb-cluster-operator:main-pxc5.7 =~ 8\.4 ]] + [[ perconalab/percona-xtradb-cluster-operator:main-pxc5.7 =~ 8\.0 ]] + [[ perconalab/percona-xtradb-cluster-operator:main-pxc5.7 =~ 5\.7 ]] + [[ -f /mnt/jenkins/workspace/cloud-pxc-operator_PR-2203/e2e-tests/users/compare/select-1-57.sql ]] + run_mysql 'SELECT * from myApp.myApp;' '-h some-name-pxc-1.some-name-pxc -uroot -p'\''H8B?nghhpeQGrirkNyN'\'' -P3306' + local 'command=SELECT * from myApp.myApp;' + local 'uri=-h some-name-pxc-1.some-name-pxc -uroot -p'\''H8B?nghhpeQGrirkNyN'\'' -P3306' ++ get_client_pod ++ kubectl_bin get pods --selector=name=pxc-client -o 'jsonpath={.items[].metadata.name}' +++ mktemp ++ local LAST_OUT=/tmp/tmp.sBSECLWWzS +++ mktemp ++ local LAST_ERR=/tmp/tmp.P9bNTnzEkF ++ local exit_status=0 +++ seq 0 2 ++ for i in $(seq 0 2) ++ set +e ++ kubectl get pods --selector=name=pxc-client -o 'jsonpath={.items[].metadata.name}' ++ exit_status=0 ++ set -e ++ '[' 0 '!=' 0 ']' ++ break ++ cat /tmp/tmp.sBSECLWWzS ++ cat /tmp/tmp.P9bNTnzEkF ++ rm /tmp/tmp.sBSECLWWzS /tmp/tmp.P9bNTnzEkF ++ return 0 + client_pod=pxc-client-857d976497-khhbf + wait_pod pxc-client-857d976497-khhbf + local pod=pxc-client-857d976497-khhbf + local max_retry=480 + local ns= ++ echo pxc-client-857d976497-khhbf ++ /usr/sbin/sed -E 's/.*-(pxc|proxysql)-[0-9]/\1/' ++ egrep '^(pxc|proxysql)$' egrep: warning: egrep is obsolescent; using grep -E + local container= + set +o xtrace pod/pxc-client-857d976497-khhbf condition met waiting for pod/pxc-client-857d976497-khhbf to become ReadyDefaulted container "pxc-client" out of: pxc-client, backup .Ok + set +o xtrace + '[' '!' -s /tmp/tmp.zNf2oNZjFh/select-1.sql ']' + diff -u /mnt/jenkins/workspace/cloud-pxc-operator_PR-2203/e2e-tests/users/compare/select-1.sql /tmp/tmp.zNf2oNZjFh/select-1.sql + for i in $(seq 0 $((size - 1))) + compare_mysql_cmd select-1 'SELECT * from myApp.myApp;' '-h some-name-pxc-2.some-name-pxc -uroot -p'\''H8B?nghhpeQGrirkNyN'\'' -P3306' + local command_id=select-1 + local 'command=SELECT * from myApp.myApp;' + local 'uri=-h some-name-pxc-2.some-name-pxc -uroot -p'\''H8B?nghhpeQGrirkNyN'\'' -P3306' + local postfix= + local expected_result=/mnt/jenkins/workspace/cloud-pxc-operator_PR-2203/e2e-tests/users/compare/select-1.sql + [[ perconalab/percona-xtradb-cluster-operator:main-pxc5.7 =~ 8\.4 ]] + [[ perconalab/percona-xtradb-cluster-operator:main-pxc5.7 =~ 8\.0 ]] + [[ perconalab/percona-xtradb-cluster-operator:main-pxc5.7 =~ 5\.7 ]] + [[ -f /mnt/jenkins/workspace/cloud-pxc-operator_PR-2203/e2e-tests/users/compare/select-1-57.sql ]] + run_mysql 'SELECT * from myApp.myApp;' '-h some-name-pxc-2.some-name-pxc -uroot -p'\''H8B?nghhpeQGrirkNyN'\'' -P3306' + local 'command=SELECT * from myApp.myApp;' + local 'uri=-h some-name-pxc-2.some-name-pxc -uroot -p'\''H8B?nghhpeQGrirkNyN'\'' -P3306' ++ get_client_pod ++ kubectl_bin get pods --selector=name=pxc-client -o 'jsonpath={.items[].metadata.name}' +++ mktemp ++ local LAST_OUT=/tmp/tmp.gyPLrNvp8P +++ mktemp ++ local LAST_ERR=/tmp/tmp.h3a6CeJHuZ ++ local exit_status=0 +++ seq 0 2 ++ for i in $(seq 0 2) ++ set +e ++ kubectl get pods --selector=name=pxc-client -o 'jsonpath={.items[].metadata.name}' ++ exit_status=0 ++ set -e ++ '[' 0 '!=' 0 ']' ++ break ++ cat /tmp/tmp.gyPLrNvp8P ++ cat /tmp/tmp.h3a6CeJHuZ ++ rm /tmp/tmp.gyPLrNvp8P /tmp/tmp.h3a6CeJHuZ ++ return 0 + client_pod=pxc-client-857d976497-khhbf + wait_pod pxc-client-857d976497-khhbf + local pod=pxc-client-857d976497-khhbf + local max_retry=480 + local ns= ++ echo pxc-client-857d976497-khhbf ++ /usr/sbin/sed -E 's/.*-(pxc|proxysql)-[0-9]/\1/' ++ egrep '^(pxc|proxysql)$' egrep: warning: egrep is obsolescent; using grep -E + local container= + set +o xtrace pod/pxc-client-857d976497-khhbf condition met waiting for pod/pxc-client-857d976497-khhbf to become ReadyDefaulted container "pxc-client" out of: pxc-client, backup .Ok + set +o xtrace + '[' '!' -s /tmp/tmp.zNf2oNZjFh/select-1.sql ']' + diff -u /mnt/jenkins/workspace/cloud-pxc-operator_PR-2203/e2e-tests/users/compare/select-1.sql /tmp/tmp.zNf2oNZjFh/select-1.sql ++ is_keyring_plugin_in_use some-name ++ local cluster=some-name ++ kubectl_bin exec -it some-name-pxc-0 -c pxc -- bash -c 'cat /etc/mysql/node.cnf' ++ egrep -o 'early-plugin-load=keyring_\w+.so' +++ mktemp egrep: warning: egrep is obsolescent; using grep -E ++ local LAST_OUT=/tmp/tmp.J3xUAeNmVF +++ mktemp ++ local LAST_ERR=/tmp/tmp.bfttUNsXmi ++ local exit_status=0 +++ seq 0 2 ++ for i in $(seq 0 2) ++ set +e ++ kubectl exec -it some-name-pxc-0 -c pxc -- bash -c 'cat /etc/mysql/node.cnf' ++ exit_status=0 ++ set -e ++ '[' 0 '!=' 0 ']' ++ break ++ cat /tmp/tmp.J3xUAeNmVF ++ cat /tmp/tmp.bfttUNsXmi Unable to use a TTY - input is not a terminal or the right kind of file ++ rm /tmp/tmp.J3xUAeNmVF /tmp/tmp.bfttUNsXmi ++ return 0 + '[' '' ']' + desc 'test missing passwords were created and present in internal secrets' + set +o xtrace ----------------------------------------------------------------------------------- test missing passwords were created and present in internal secrets ----------------------------------------------------------------------------------- + empty_pwds=() + wrong_pwds=() + for user in root xtrabackup monitor proxyadmin operator replication + echo 'Checking root' Checking root ++ getSecretData my-cluster-secrets root ++ local secretName=my-cluster-secrets ++ local dataKey=root ++ kubectl_bin get secrets/my-cluster-secrets '--template={{.data.root}}' ++ base64 --decode +++ mktemp ++ local LAST_OUT=/tmp/tmp.qHlX1iktoZ +++ mktemp ++ local LAST_ERR=/tmp/tmp.j26Vqjz0eS ++ local exit_status=0 +++ seq 0 2 ++ for i in $(seq 0 2) ++ set +e ++ kubectl get secrets/my-cluster-secrets '--template={{.data.root}}' ++ exit_status=0 ++ set -e ++ '[' 0 '!=' 0 ']' ++ break ++ cat /tmp/tmp.qHlX1iktoZ ++ cat /tmp/tmp.j26Vqjz0eS ++ rm /tmp/tmp.qHlX1iktoZ /tmp/tmp.j26Vqjz0eS ++ return 0 + secret_pass='H8B?nghhpeQGrirkNyN' ++ getSecretData internal-some-name root ++ local secretName=internal-some-name ++ local dataKey=root ++ kubectl_bin get secrets/internal-some-name '--template={{.data.root}}' ++ base64 --decode +++ mktemp ++ local LAST_OUT=/tmp/tmp.diMivy3rJR +++ mktemp ++ local LAST_ERR=/tmp/tmp.kMdh3eImLK ++ local exit_status=0 +++ seq 0 2 ++ for i in $(seq 0 2) ++ set +e ++ kubectl get secrets/internal-some-name '--template={{.data.root}}' ++ exit_status=0 ++ set -e ++ '[' 0 '!=' 0 ']' ++ break ++ cat /tmp/tmp.diMivy3rJR ++ cat /tmp/tmp.kMdh3eImLK ++ rm /tmp/tmp.diMivy3rJR /tmp/tmp.kMdh3eImLK ++ return 0 + int_secret_pass='H8B?nghhpeQGrirkNyN' + [[ -z H8B?nghhpeQGrirkNyN ]] + [[ H8B?nghhpeQGrirkNyN != \H\8\B\?\n\g\h\h\p\e\Q\G\r\i\r\k\N\y\N ]] + [[ root != \p\r\o\x\y\a\d\m\i\n ]] + [[ '' =~ root ]] + [[ '' =~ root ]] + echo 'Running compare for root' Running compare for root + compare_mysql_cmd select-4 'SHOW TABLES;' '-h some-name-proxysql -uroot -p'\''H8B?nghhpeQGrirkNyN'\''' + local command_id=select-4 + local 'command=SHOW TABLES;' + local 'uri=-h some-name-proxysql -uroot -p'\''H8B?nghhpeQGrirkNyN'\''' + local postfix= + local expected_result=/mnt/jenkins/workspace/cloud-pxc-operator_PR-2203/e2e-tests/users/compare/select-4.sql + [[ perconalab/percona-xtradb-cluster-operator:main-pxc5.7 =~ 8\.4 ]] + [[ perconalab/percona-xtradb-cluster-operator:main-pxc5.7 =~ 8\.0 ]] + [[ perconalab/percona-xtradb-cluster-operator:main-pxc5.7 =~ 5\.7 ]] + [[ -f /mnt/jenkins/workspace/cloud-pxc-operator_PR-2203/e2e-tests/users/compare/select-4-57.sql ]] + run_mysql 'SHOW TABLES;' '-h some-name-proxysql -uroot -p'\''H8B?nghhpeQGrirkNyN'\''' + local 'command=SHOW TABLES;' + local 'uri=-h some-name-proxysql -uroot -p'\''H8B?nghhpeQGrirkNyN'\''' ++ get_client_pod ++ kubectl_bin get pods --selector=name=pxc-client -o 'jsonpath={.items[].metadata.name}' +++ mktemp ++ local LAST_OUT=/tmp/tmp.rjbx8ihcks +++ mktemp ++ local LAST_ERR=/tmp/tmp.NBI22aQJ6m ++ local exit_status=0 +++ seq 0 2 ++ for i in $(seq 0 2) ++ set +e ++ kubectl get pods --selector=name=pxc-client -o 'jsonpath={.items[].metadata.name}' ++ exit_status=0 ++ set -e ++ '[' 0 '!=' 0 ']' ++ break ++ cat /tmp/tmp.rjbx8ihcks ++ cat /tmp/tmp.NBI22aQJ6m ++ rm /tmp/tmp.rjbx8ihcks /tmp/tmp.NBI22aQJ6m ++ return 0 + client_pod=pxc-client-857d976497-khhbf + wait_pod pxc-client-857d976497-khhbf + local pod=pxc-client-857d976497-khhbf + local max_retry=480 + local ns= ++ echo pxc-client-857d976497-khhbf ++ /usr/sbin/sed -E 's/.*-(pxc|proxysql)-[0-9]/\1/' ++ egrep '^(pxc|proxysql)$' egrep: warning: egrep is obsolescent; using grep -E + local container= + set +o xtrace pod/pxc-client-857d976497-khhbf condition met waiting for pod/pxc-client-857d976497-khhbf to become ReadyDefaulted container "pxc-client" out of: pxc-client, backup .Ok + set +o xtrace + '[' '!' -s /tmp/tmp.zNf2oNZjFh/select-4.sql ']' + diff -u /mnt/jenkins/workspace/cloud-pxc-operator_PR-2203/e2e-tests/users/compare/select-4.sql /tmp/tmp.zNf2oNZjFh/select-4.sql + for user in root xtrabackup monitor proxyadmin operator replication + echo 'Checking xtrabackup' Checking xtrabackup ++ getSecretData my-cluster-secrets xtrabackup ++ local secretName=my-cluster-secrets ++ local dataKey=xtrabackup ++ kubectl_bin get secrets/my-cluster-secrets '--template={{.data.xtrabackup}}' ++ base64 --decode +++ mktemp ++ local LAST_OUT=/tmp/tmp.K7IZ3a217Y +++ mktemp ++ local LAST_ERR=/tmp/tmp.wspf4y8XGy ++ local exit_status=0 +++ seq 0 2 ++ for i in $(seq 0 2) ++ set +e ++ kubectl get secrets/my-cluster-secrets '--template={{.data.xtrabackup}}' ++ exit_status=0 ++ set -e ++ '[' 0 '!=' 0 ']' ++ break ++ cat /tmp/tmp.K7IZ3a217Y ++ cat /tmp/tmp.wspf4y8XGy ++ rm /tmp/tmp.K7IZ3a217Y /tmp/tmp.wspf4y8XGy ++ return 0 + secret_pass='LHl=]Q^q]0ra}nE}ZM' ++ getSecretData internal-some-name xtrabackup ++ local secretName=internal-some-name ++ local dataKey=xtrabackup ++ kubectl_bin get secrets/internal-some-name '--template={{.data.xtrabackup}}' ++ base64 --decode +++ mktemp ++ local LAST_OUT=/tmp/tmp.G0fsbIFNLP +++ mktemp ++ local LAST_ERR=/tmp/tmp.h29YzHEHLQ ++ local exit_status=0 +++ seq 0 2 ++ for i in $(seq 0 2) ++ set +e ++ kubectl get secrets/internal-some-name '--template={{.data.xtrabackup}}' ++ exit_status=0 ++ set -e ++ '[' 0 '!=' 0 ']' ++ break ++ cat /tmp/tmp.G0fsbIFNLP ++ cat /tmp/tmp.h29YzHEHLQ ++ rm /tmp/tmp.G0fsbIFNLP /tmp/tmp.h29YzHEHLQ ++ return 0 + int_secret_pass='LHl=]Q^q]0ra}nE}ZM' + [[ -z LHl=]Q^q]0ra}nE}ZM ]] + [[ LHl=]Q^q]0ra}nE}ZM != \L\H\l\=\]\Q\^\q\]\0\r\a\}\n\E\}\Z\M ]] + [[ xtrabackup != \p\r\o\x\y\a\d\m\i\n ]] + [[ '' =~ xtrabackup ]] + [[ '' =~ xtrabackup ]] + echo 'Running compare for xtrabackup' Running compare for xtrabackup + compare_mysql_cmd select-4 'SHOW TABLES;' '-h some-name-proxysql -uxtrabackup -p'\''LHl=]Q^q]0ra}nE}ZM'\''' + local command_id=select-4 + local 'command=SHOW TABLES;' + local 'uri=-h some-name-proxysql -uxtrabackup -p'\''LHl=]Q^q]0ra}nE}ZM'\''' + local postfix= + local expected_result=/mnt/jenkins/workspace/cloud-pxc-operator_PR-2203/e2e-tests/users/compare/select-4.sql + [[ perconalab/percona-xtradb-cluster-operator:main-pxc5.7 =~ 8\.4 ]] + [[ perconalab/percona-xtradb-cluster-operator:main-pxc5.7 =~ 8\.0 ]] + [[ perconalab/percona-xtradb-cluster-operator:main-pxc5.7 =~ 5\.7 ]] + [[ -f /mnt/jenkins/workspace/cloud-pxc-operator_PR-2203/e2e-tests/users/compare/select-4-57.sql ]] + run_mysql 'SHOW TABLES;' '-h some-name-proxysql -uxtrabackup -p'\''LHl=]Q^q]0ra}nE}ZM'\''' + local 'command=SHOW TABLES;' + local 'uri=-h some-name-proxysql -uxtrabackup -p'\''LHl=]Q^q]0ra}nE}ZM'\''' ++ get_client_pod ++ kubectl_bin get pods --selector=name=pxc-client -o 'jsonpath={.items[].metadata.name}' +++ mktemp ++ local LAST_OUT=/tmp/tmp.NW4pxJccUD +++ mktemp ++ local LAST_ERR=/tmp/tmp.iVrPZci3Rd ++ local exit_status=0 +++ seq 0 2 ++ for i in $(seq 0 2) ++ set +e ++ kubectl get pods --selector=name=pxc-client -o 'jsonpath={.items[].metadata.name}' ++ exit_status=0 ++ set -e ++ '[' 0 '!=' 0 ']' ++ break ++ cat /tmp/tmp.NW4pxJccUD ++ cat /tmp/tmp.iVrPZci3Rd ++ rm /tmp/tmp.NW4pxJccUD /tmp/tmp.iVrPZci3Rd ++ return 0 + client_pod=pxc-client-857d976497-khhbf + wait_pod pxc-client-857d976497-khhbf + local pod=pxc-client-857d976497-khhbf + local max_retry=480 + local ns= ++ echo pxc-client-857d976497-khhbf ++ /usr/sbin/sed -E 's/.*-(pxc|proxysql)-[0-9]/\1/' ++ egrep '^(pxc|proxysql)$' egrep: warning: egrep is obsolescent; using grep -E + local container= + set +o xtrace pod/pxc-client-857d976497-khhbf condition met waiting for pod/pxc-client-857d976497-khhbf to become ReadyDefaulted container "pxc-client" out of: pxc-client, backup .Ok + set +o xtrace + '[' '!' -s /tmp/tmp.zNf2oNZjFh/select-4.sql ']' + diff -u /mnt/jenkins/workspace/cloud-pxc-operator_PR-2203/e2e-tests/users/compare/select-4.sql /tmp/tmp.zNf2oNZjFh/select-4.sql + for user in root xtrabackup monitor proxyadmin operator replication + echo 'Checking monitor' Checking monitor ++ getSecretData my-cluster-secrets monitor ++ local secretName=my-cluster-secrets ++ local dataKey=monitor ++ kubectl_bin get secrets/my-cluster-secrets '--template={{.data.monitor}}' ++ base64 --decode +++ mktemp ++ local LAST_OUT=/tmp/tmp.7NAKOsIE1v +++ mktemp ++ local LAST_ERR=/tmp/tmp.Q3FdQda8Gi ++ local exit_status=0 +++ seq 0 2 ++ for i in $(seq 0 2) ++ set +e ++ kubectl get secrets/my-cluster-secrets '--template={{.data.monitor}}' ++ exit_status=0 ++ set -e ++ '[' 0 '!=' 0 ']' ++ break ++ cat /tmp/tmp.7NAKOsIE1v ++ cat /tmp/tmp.Q3FdQda8Gi ++ rm /tmp/tmp.7NAKOsIE1v /tmp/tmp.Q3FdQda8Gi ++ return 0 + secret_pass=monitor_password ++ getSecretData internal-some-name monitor ++ local secretName=internal-some-name ++ local dataKey=monitor ++ kubectl_bin get secrets/internal-some-name '--template={{.data.monitor}}' ++ base64 --decode +++ mktemp ++ local LAST_OUT=/tmp/tmp.tpHOuZaNW0 +++ mktemp ++ local LAST_ERR=/tmp/tmp.d14q8S7Tpk ++ local exit_status=0 +++ seq 0 2 ++ for i in $(seq 0 2) ++ set +e ++ kubectl get secrets/internal-some-name '--template={{.data.monitor}}' ++ exit_status=0 ++ set -e ++ '[' 0 '!=' 0 ']' ++ break ++ cat /tmp/tmp.tpHOuZaNW0 ++ cat /tmp/tmp.d14q8S7Tpk ++ rm /tmp/tmp.tpHOuZaNW0 /tmp/tmp.d14q8S7Tpk ++ return 0 + int_secret_pass=monitor_password + [[ -z monitor_password ]] + [[ monitor_password != \m\o\n\i\t\o\r\_\p\a\s\s\w\o\r\d ]] + [[ monitor != \p\r\o\x\y\a\d\m\i\n ]] + [[ '' =~ monitor ]] + [[ '' =~ monitor ]] + echo 'Running compare for monitor' Running compare for monitor + compare_mysql_cmd select-4 'SHOW TABLES;' '-h some-name-proxysql -umonitor -p'\''monitor_password'\''' + local command_id=select-4 + local 'command=SHOW TABLES;' + local 'uri=-h some-name-proxysql -umonitor -p'\''monitor_password'\''' + local postfix= + local expected_result=/mnt/jenkins/workspace/cloud-pxc-operator_PR-2203/e2e-tests/users/compare/select-4.sql + [[ perconalab/percona-xtradb-cluster-operator:main-pxc5.7 =~ 8\.4 ]] + [[ perconalab/percona-xtradb-cluster-operator:main-pxc5.7 =~ 8\.0 ]] + [[ perconalab/percona-xtradb-cluster-operator:main-pxc5.7 =~ 5\.7 ]] + [[ -f /mnt/jenkins/workspace/cloud-pxc-operator_PR-2203/e2e-tests/users/compare/select-4-57.sql ]] + run_mysql 'SHOW TABLES;' '-h some-name-proxysql -umonitor -p'\''monitor_password'\''' + local 'command=SHOW TABLES;' + local 'uri=-h some-name-proxysql -umonitor -p'\''monitor_password'\''' ++ get_client_pod ++ kubectl_bin get pods --selector=name=pxc-client -o 'jsonpath={.items[].metadata.name}' +++ mktemp ++ local LAST_OUT=/tmp/tmp.NGjDWSC73b +++ mktemp ++ local LAST_ERR=/tmp/tmp.Onw2qhIHNr ++ local exit_status=0 +++ seq 0 2 ++ for i in $(seq 0 2) ++ set +e ++ kubectl get pods --selector=name=pxc-client -o 'jsonpath={.items[].metadata.name}' ++ exit_status=0 ++ set -e ++ '[' 0 '!=' 0 ']' ++ break ++ cat /tmp/tmp.NGjDWSC73b ++ cat /tmp/tmp.Onw2qhIHNr ++ rm /tmp/tmp.NGjDWSC73b /tmp/tmp.Onw2qhIHNr ++ return 0 + client_pod=pxc-client-857d976497-khhbf + wait_pod pxc-client-857d976497-khhbf + local pod=pxc-client-857d976497-khhbf + local max_retry=480 + local ns= ++ echo pxc-client-857d976497-khhbf ++ /usr/sbin/sed -E 's/.*-(pxc|proxysql)-[0-9]/\1/' ++ egrep '^(pxc|proxysql)$' egrep: warning: egrep is obsolescent; using grep -E + local container= + set +o xtrace pod/pxc-client-857d976497-khhbf condition met waiting for pod/pxc-client-857d976497-khhbf to become ReadyDefaulted container "pxc-client" out of: pxc-client, backup .Ok + set +o xtrace + '[' '!' -s /tmp/tmp.zNf2oNZjFh/select-4.sql ']' + diff -u /mnt/jenkins/workspace/cloud-pxc-operator_PR-2203/e2e-tests/users/compare/select-4.sql /tmp/tmp.zNf2oNZjFh/select-4.sql + for user in root xtrabackup monitor proxyadmin operator replication + echo 'Checking proxyadmin' Checking proxyadmin ++ getSecretData my-cluster-secrets proxyadmin ++ local secretName=my-cluster-secrets ++ local dataKey=proxyadmin ++ kubectl_bin get secrets/my-cluster-secrets '--template={{.data.proxyadmin}}' ++ base64 --decode +++ mktemp ++ local LAST_OUT=/tmp/tmp.kjUm0o9pVO +++ mktemp ++ local LAST_ERR=/tmp/tmp.HTgAKh6N1J ++ local exit_status=0 +++ seq 0 2 ++ for i in $(seq 0 2) ++ set +e ++ kubectl get secrets/my-cluster-secrets '--template={{.data.proxyadmin}}' ++ exit_status=0 ++ set -e ++ '[' 0 '!=' 0 ']' ++ break ++ cat /tmp/tmp.kjUm0o9pVO ++ cat /tmp/tmp.HTgAKh6N1J ++ rm /tmp/tmp.kjUm0o9pVO /tmp/tmp.HTgAKh6N1J ++ return 0 + secret_pass='QF(*swBKXETG~yqK' ++ getSecretData internal-some-name proxyadmin ++ local secretName=internal-some-name ++ local dataKey=proxyadmin ++ kubectl_bin get secrets/internal-some-name '--template={{.data.proxyadmin}}' ++ base64 --decode +++ mktemp ++ local LAST_OUT=/tmp/tmp.mbxpmhNuDJ +++ mktemp ++ local LAST_ERR=/tmp/tmp.1UmrzaWVk4 ++ local exit_status=0 +++ seq 0 2 ++ for i in $(seq 0 2) ++ set +e ++ kubectl get secrets/internal-some-name '--template={{.data.proxyadmin}}' ++ exit_status=0 ++ set -e ++ '[' 0 '!=' 0 ']' ++ break ++ cat /tmp/tmp.mbxpmhNuDJ ++ cat /tmp/tmp.1UmrzaWVk4 ++ rm /tmp/tmp.mbxpmhNuDJ /tmp/tmp.1UmrzaWVk4 ++ return 0 + int_secret_pass='QF(*swBKXETG~yqK' + [[ -z QF(*swBKXETG~yqK ]] + [[ QF(*swBKXETG~yqK != \Q\F\(\*\s\w\B\K\X\E\T\G\~\y\q\K ]] + [[ proxyadmin != \p\r\o\x\y\a\d\m\i\n ]] + [[ proxyadmin == \p\r\o\x\y\a\d\m\i\n ]] + [[ '' =~ proxyadmin ]] + [[ '' =~ proxyadmin ]] + echo 'Running compare for proxyadmin' Running compare for proxyadmin + compare_mysql_cmd_local select-2 'SHOW TABLES;' '-h127.0.0.1 -P6032 -uproxyadmin -p'\''QF(*swBKXETG~yqK'\''' some-name-proxysql-0 '' proxysql + local command_id=select-2 + local 'command=SHOW TABLES;' + local 'uri=-h127.0.0.1 -P6032 -uproxyadmin -p'\''QF(*swBKXETG~yqK'\''' + local pod=some-name-proxysql-0 + local postfix= + local container_name=proxysql + local expected_result=/mnt/jenkins/workspace/cloud-pxc-operator_PR-2203/e2e-tests/users/compare/select-2.sql + [[ perconalab/percona-xtradb-cluster-operator:main-pxc5.7 =~ 8\.0 ]] + [[ perconalab/percona-xtradb-cluster-operator:main-pxc5.7 =~ 8\.4 ]] + run_mysql_local 'SHOW TABLES;' '-h127.0.0.1 -P6032 -uproxyadmin -p'\''QF(*swBKXETG~yqK'\''' some-name-proxysql-0 proxysql + local 'command=SHOW TABLES;' + local 'uri=-h127.0.0.1 -P6032 -uproxyadmin -p'\''QF(*swBKXETG~yqK'\''' + local pod=some-name-proxysql-0 + local container_name=proxysql + set +o xtrace egrep: warning: egrep is obsolescent; using grep -E + '[' '!' -s /tmp/tmp.zNf2oNZjFh/select-2.sql ']' + diff -u /mnt/jenkins/workspace/cloud-pxc-operator_PR-2203/e2e-tests/users/compare/select-2.sql /tmp/tmp.zNf2oNZjFh/select-2.sql + for user in root xtrabackup monitor proxyadmin operator replication + echo 'Checking operator' Checking operator ++ getSecretData my-cluster-secrets operator ++ local secretName=my-cluster-secrets ++ local dataKey=operator ++ kubectl_bin get secrets/my-cluster-secrets '--template={{.data.operator}}' ++ base64 --decode +++ mktemp ++ local LAST_OUT=/tmp/tmp.t7lNbLS2q5 +++ mktemp ++ local LAST_ERR=/tmp/tmp.uaZpxZFRza ++ local exit_status=0 +++ seq 0 2 ++ for i in $(seq 0 2) ++ set +e ++ kubectl get secrets/my-cluster-secrets '--template={{.data.operator}}' ++ exit_status=0 ++ set -e ++ '[' 0 '!=' 0 ']' ++ break ++ cat /tmp/tmp.t7lNbLS2q5 ++ cat /tmp/tmp.uaZpxZFRza ++ rm /tmp/tmp.t7lNbLS2q5 /tmp/tmp.uaZpxZFRza ++ return 0 + secret_pass='n<(63oVYBUpTN7E_' ++ getSecretData internal-some-name operator ++ local secretName=internal-some-name ++ local dataKey=operator ++ kubectl_bin get secrets/internal-some-name '--template={{.data.operator}}' ++ base64 --decode +++ mktemp ++ local LAST_OUT=/tmp/tmp.u9ziPLkhfR +++ mktemp ++ local LAST_ERR=/tmp/tmp.XBjzYLAKRg ++ local exit_status=0 +++ seq 0 2 ++ for i in $(seq 0 2) ++ set +e ++ kubectl get secrets/internal-some-name '--template={{.data.operator}}' ++ exit_status=0 ++ set -e ++ '[' 0 '!=' 0 ']' ++ break ++ cat /tmp/tmp.u9ziPLkhfR ++ cat /tmp/tmp.XBjzYLAKRg ++ rm /tmp/tmp.u9ziPLkhfR /tmp/tmp.XBjzYLAKRg ++ return 0 + int_secret_pass='n<(63oVYBUpTN7E_' + [[ -z n<(63oVYBUpTN7E_ ]] + [[ n<(63oVYBUpTN7E_ != \n\<\(\6\3\o\V\Y\B\U\p\T\N\7\E\_ ]] + [[ operator != \p\r\o\x\y\a\d\m\i\n ]] + [[ '' =~ operator ]] + [[ '' =~ operator ]] + echo 'Running compare for operator' Running compare for operator + compare_mysql_cmd select-4 'SHOW TABLES;' '-h some-name-proxysql -uoperator -p'\''n<(63oVYBUpTN7E_'\''' + local command_id=select-4 + local 'command=SHOW TABLES;' + local 'uri=-h some-name-proxysql -uoperator -p'\''n<(63oVYBUpTN7E_'\''' + local postfix= + local expected_result=/mnt/jenkins/workspace/cloud-pxc-operator_PR-2203/e2e-tests/users/compare/select-4.sql + [[ perconalab/percona-xtradb-cluster-operator:main-pxc5.7 =~ 8\.4 ]] + [[ perconalab/percona-xtradb-cluster-operator:main-pxc5.7 =~ 8\.0 ]] + [[ perconalab/percona-xtradb-cluster-operator:main-pxc5.7 =~ 5\.7 ]] + [[ -f /mnt/jenkins/workspace/cloud-pxc-operator_PR-2203/e2e-tests/users/compare/select-4-57.sql ]] + run_mysql 'SHOW TABLES;' '-h some-name-proxysql -uoperator -p'\''n<(63oVYBUpTN7E_'\''' + local 'command=SHOW TABLES;' + local 'uri=-h some-name-proxysql -uoperator -p'\''n<(63oVYBUpTN7E_'\''' ++ get_client_pod ++ kubectl_bin get pods --selector=name=pxc-client -o 'jsonpath={.items[].metadata.name}' +++ mktemp ++ local LAST_OUT=/tmp/tmp.md18LySs9L +++ mktemp ++ local LAST_ERR=/tmp/tmp.3ibfyLbynL ++ local exit_status=0 +++ seq 0 2 ++ for i in $(seq 0 2) ++ set +e ++ kubectl get pods --selector=name=pxc-client -o 'jsonpath={.items[].metadata.name}' ++ exit_status=0 ++ set -e ++ '[' 0 '!=' 0 ']' ++ break ++ cat /tmp/tmp.md18LySs9L ++ cat /tmp/tmp.3ibfyLbynL ++ rm /tmp/tmp.md18LySs9L /tmp/tmp.3ibfyLbynL ++ return 0 + client_pod=pxc-client-857d976497-khhbf + wait_pod pxc-client-857d976497-khhbf + local pod=pxc-client-857d976497-khhbf + local max_retry=480 + local ns= ++ echo pxc-client-857d976497-khhbf ++ /usr/sbin/sed -E 's/.*-(pxc|proxysql)-[0-9]/\1/' ++ egrep '^(pxc|proxysql)$' egrep: warning: egrep is obsolescent; using grep -E + local container= + set +o xtrace pod/pxc-client-857d976497-khhbf condition met waiting for pod/pxc-client-857d976497-khhbf to become ReadyDefaulted container "pxc-client" out of: pxc-client, backup .Ok + set +o xtrace + '[' '!' -s /tmp/tmp.zNf2oNZjFh/select-4.sql ']' + diff -u /mnt/jenkins/workspace/cloud-pxc-operator_PR-2203/e2e-tests/users/compare/select-4.sql /tmp/tmp.zNf2oNZjFh/select-4.sql + for user in root xtrabackup monitor proxyadmin operator replication + echo 'Checking replication' Checking replication ++ getSecretData my-cluster-secrets replication ++ local secretName=my-cluster-secrets ++ local dataKey=replication ++ kubectl_bin get secrets/my-cluster-secrets '--template={{.data.replication}}' ++ base64 --decode +++ mktemp ++ local LAST_OUT=/tmp/tmp.ghoyKmztOA +++ mktemp ++ local LAST_ERR=/tmp/tmp.J35b18FyED ++ local exit_status=0 +++ seq 0 2 ++ for i in $(seq 0 2) ++ set +e ++ kubectl get secrets/my-cluster-secrets '--template={{.data.replication}}' ++ exit_status=0 ++ set -e ++ '[' 0 '!=' 0 ']' ++ break ++ cat /tmp/tmp.ghoyKmztOA ++ cat /tmp/tmp.J35b18FyED ++ rm /tmp/tmp.ghoyKmztOA /tmp/tmp.J35b18FyED ++ return 0 + secret_pass='D!i]S.*V^,8!=t?DFi' ++ getSecretData internal-some-name replication ++ local secretName=internal-some-name ++ local dataKey=replication ++ kubectl_bin get secrets/internal-some-name '--template={{.data.replication}}' ++ base64 --decode +++ mktemp ++ local LAST_OUT=/tmp/tmp.6oeyzzi2Yz +++ mktemp ++ local LAST_ERR=/tmp/tmp.7ecJli0JOS ++ local exit_status=0 +++ seq 0 2 ++ for i in $(seq 0 2) ++ set +e ++ kubectl get secrets/internal-some-name '--template={{.data.replication}}' ++ exit_status=0 ++ set -e ++ '[' 0 '!=' 0 ']' ++ break ++ cat /tmp/tmp.6oeyzzi2Yz ++ cat /tmp/tmp.7ecJli0JOS ++ rm /tmp/tmp.6oeyzzi2Yz /tmp/tmp.7ecJli0JOS ++ return 0 + int_secret_pass='D!i]S.*V^,8!=t?DFi' + [[ -z D!i]S.*V^,8!=t?DFi ]] + [[ D!i]S.*V^,8!=t?DFi != \D\!\i\]\S\.\*\V\^\,\8\!\=\t\?\D\F\i ]] + [[ replication != \p\r\o\x\y\a\d\m\i\n ]] + [[ '' =~ replication ]] + [[ '' =~ replication ]] + echo 'Running compare for replication' Running compare for replication + compare_mysql_cmd select-4 'SHOW TABLES;' '-h some-name-proxysql -ureplication -p'\''D!i]S.*V^,8!=t?DFi'\''' + local command_id=select-4 + local 'command=SHOW TABLES;' + local 'uri=-h some-name-proxysql -ureplication -p'\''D!i]S.*V^,8!=t?DFi'\''' + local postfix= + local expected_result=/mnt/jenkins/workspace/cloud-pxc-operator_PR-2203/e2e-tests/users/compare/select-4.sql + [[ perconalab/percona-xtradb-cluster-operator:main-pxc5.7 =~ 8\.4 ]] + [[ perconalab/percona-xtradb-cluster-operator:main-pxc5.7 =~ 8\.0 ]] + [[ perconalab/percona-xtradb-cluster-operator:main-pxc5.7 =~ 5\.7 ]] + [[ -f /mnt/jenkins/workspace/cloud-pxc-operator_PR-2203/e2e-tests/users/compare/select-4-57.sql ]] + run_mysql 'SHOW TABLES;' '-h some-name-proxysql -ureplication -p'\''D!i]S.*V^,8!=t?DFi'\''' + local 'command=SHOW TABLES;' + local 'uri=-h some-name-proxysql -ureplication -p'\''D!i]S.*V^,8!=t?DFi'\''' ++ get_client_pod ++ kubectl_bin get pods --selector=name=pxc-client -o 'jsonpath={.items[].metadata.name}' +++ mktemp ++ local LAST_OUT=/tmp/tmp.kzemKk9kgk +++ mktemp ++ local LAST_ERR=/tmp/tmp.aYqbmITPxp ++ local exit_status=0 +++ seq 0 2 ++ for i in $(seq 0 2) ++ set +e ++ kubectl get pods --selector=name=pxc-client -o 'jsonpath={.items[].metadata.name}' ++ exit_status=0 ++ set -e ++ '[' 0 '!=' 0 ']' ++ break ++ cat /tmp/tmp.kzemKk9kgk ++ cat /tmp/tmp.aYqbmITPxp ++ rm /tmp/tmp.kzemKk9kgk /tmp/tmp.aYqbmITPxp ++ return 0 + client_pod=pxc-client-857d976497-khhbf + wait_pod pxc-client-857d976497-khhbf + local pod=pxc-client-857d976497-khhbf + local max_retry=480 + local ns= ++ echo pxc-client-857d976497-khhbf ++ /usr/sbin/sed -E 's/.*-(pxc|proxysql)-[0-9]/\1/' ++ egrep '^(pxc|proxysql)$' egrep: warning: egrep is obsolescent; using grep -E + local container= + set +o xtrace pod/pxc-client-857d976497-khhbf condition met waiting for pod/pxc-client-857d976497-khhbf to become ReadyDefaulted container "pxc-client" out of: pxc-client, backup .Ok + set +o xtrace + '[' '!' -s /tmp/tmp.zNf2oNZjFh/select-4.sql ']' + diff -u /mnt/jenkins/workspace/cloud-pxc-operator_PR-2203/e2e-tests/users/compare/select-4.sql /tmp/tmp.zNf2oNZjFh/select-4.sql + [[ -n '' ]] + [[ -n '' ]] + desc 'test root' + set +o xtrace ----------------------------------------------------------------------------------- test root ----------------------------------------------------------------------------------- + patch_secret my-cluster-secrets root dGVzdC1wYXNzd29yZA== + local secret=my-cluster-secrets + local key=root + local value=dGVzdC1wYXNzd29yZA== + kubectl_bin patch secret my-cluster-secrets '-p={"data":{"root": "dGVzdC1wYXNzd29yZA=="}}' ++ mktemp + local LAST_OUT=/tmp/tmp.9yIiSLw8bM ++ mktemp + local LAST_ERR=/tmp/tmp.SIdVXnYLK5 + local exit_status=0 ++ seq 0 2 + for i in $(seq 0 2) + set +e + kubectl patch secret my-cluster-secrets '-p={"data":{"root": "dGVzdC1wYXNzd29yZA=="}}' + exit_status=0 + set -e + '[' 0 '!=' 0 ']' + break + cat /tmp/tmp.9yIiSLw8bM secret/my-cluster-secrets patched + cat /tmp/tmp.SIdVXnYLK5 + rm /tmp/tmp.9yIiSLw8bM /tmp/tmp.SIdVXnYLK5 + return 0 + sleep 15 + compare_mysql_cmd select-4 'SHOW TABLES;' '-h some-name-proxysql -uroot -p'\''test-password'\''' + local command_id=select-4 + local 'command=SHOW TABLES;' + local 'uri=-h some-name-proxysql -uroot -p'\''test-password'\''' + local postfix= + local expected_result=/mnt/jenkins/workspace/cloud-pxc-operator_PR-2203/e2e-tests/users/compare/select-4.sql + [[ perconalab/percona-xtradb-cluster-operator:main-pxc5.7 =~ 8\.4 ]] + [[ perconalab/percona-xtradb-cluster-operator:main-pxc5.7 =~ 8\.0 ]] + [[ perconalab/percona-xtradb-cluster-operator:main-pxc5.7 =~ 5\.7 ]] + [[ -f /mnt/jenkins/workspace/cloud-pxc-operator_PR-2203/e2e-tests/users/compare/select-4-57.sql ]] + run_mysql 'SHOW TABLES;' '-h some-name-proxysql -uroot -p'\''test-password'\''' + local 'command=SHOW TABLES;' + local 'uri=-h some-name-proxysql -uroot -p'\''test-password'\''' ++ get_client_pod ++ kubectl_bin get pods --selector=name=pxc-client -o 'jsonpath={.items[].metadata.name}' +++ mktemp ++ local LAST_OUT=/tmp/tmp.IwyMGv96oj +++ mktemp ++ local LAST_ERR=/tmp/tmp.2Ud3Spe3Ph ++ local exit_status=0 +++ seq 0 2 ++ for i in $(seq 0 2) ++ set +e ++ kubectl get pods --selector=name=pxc-client -o 'jsonpath={.items[].metadata.name}' ++ exit_status=0 ++ set -e ++ '[' 0 '!=' 0 ']' ++ break ++ cat /tmp/tmp.IwyMGv96oj ++ cat /tmp/tmp.2Ud3Spe3Ph ++ rm /tmp/tmp.IwyMGv96oj /tmp/tmp.2Ud3Spe3Ph ++ return 0 + client_pod=pxc-client-857d976497-khhbf + wait_pod pxc-client-857d976497-khhbf + local pod=pxc-client-857d976497-khhbf + local max_retry=480 + local ns= ++ echo pxc-client-857d976497-khhbf ++ /usr/sbin/sed -E 's/.*-(pxc|proxysql)-[0-9]/\1/' ++ egrep '^(pxc|proxysql)$' egrep: warning: egrep is obsolescent; using grep -E + local container= + set +o xtrace pod/pxc-client-857d976497-khhbf condition met waiting for pod/pxc-client-857d976497-khhbf to become ReadyDefaulted container "pxc-client" out of: pxc-client, backup .Ok + set +o xtrace + '[' '!' -s /tmp/tmp.zNf2oNZjFh/select-4.sql ']' + diff -u /mnt/jenkins/workspace/cloud-pxc-operator_PR-2203/e2e-tests/users/compare/select-4.sql /tmp/tmp.zNf2oNZjFh/select-4.sql + desc 'test proxyadmin' + set +o xtrace ----------------------------------------------------------------------------------- test proxyadmin ----------------------------------------------------------------------------------- + kubectl_bin patch pxc some-name --type=merge '-p={"spec":{"proxysql":{"size":3}}}' ++ mktemp + local LAST_OUT=/tmp/tmp.xxAA2mKYjk ++ mktemp + local LAST_ERR=/tmp/tmp.Mcp48XN7i0 + local exit_status=0 ++ seq 0 2 + for i in $(seq 0 2) + set +e + kubectl patch pxc some-name --type=merge '-p={"spec":{"proxysql":{"size":3}}}' + exit_status=0 + set -e + '[' 0 '!=' 0 ']' + break + cat /tmp/tmp.xxAA2mKYjk perconaxtradbcluster.pxc.percona.com/some-name patched + cat /tmp/tmp.Mcp48XN7i0 + rm /tmp/tmp.xxAA2mKYjk /tmp/tmp.Mcp48XN7i0 + return 0 + sleep 15 + wait_cluster_consistency some-name 3 3 + local cluster_name=some-name + local cluster_size=3 + local proxy_size=3 + '[' -z 3 ']' + desc 'wait cluster consistency' + set +o xtrace ----------------------------------------------------------------------------------- wait cluster consistency ----------------------------------------------------------------------------------- + local i=0 + local max=300 + sleep 7 + echo -n 'waiting for pxc/some-name to be ready' waiting for pxc/some-name to be ready++ kubectl_bin get pxc some-name -o 'jsonpath={.status.state}' +++ mktemp ++ local LAST_OUT=/tmp/tmp.x1RPpjhMbr +++ mktemp ++ local LAST_ERR=/tmp/tmp.UBtsX9psGP ++ local exit_status=0 +++ seq 0 2 ++ for i in $(seq 0 2) ++ set +e ++ kubectl get pxc some-name -o 'jsonpath={.status.state}' ++ exit_status=0 ++ set -e ++ '[' 0 '!=' 0 ']' ++ break ++ cat /tmp/tmp.x1RPpjhMbr ++ cat /tmp/tmp.UBtsX9psGP ++ rm /tmp/tmp.x1RPpjhMbr /tmp/tmp.UBtsX9psGP ++ return 0 + [[ ready == \r\e\a\d\y ]] ++ kubectl_bin get pxc some-name -o 'jsonpath={.status.pxc.ready}' +++ mktemp ++ local LAST_OUT=/tmp/tmp.rxf6kbJB0Z +++ mktemp ++ local LAST_ERR=/tmp/tmp.H71T8L5jVX ++ local exit_status=0 +++ seq 0 2 ++ for i in $(seq 0 2) ++ set +e ++ kubectl get pxc some-name -o 'jsonpath={.status.pxc.ready}' ++ exit_status=0 ++ set -e ++ '[' 0 '!=' 0 ']' ++ break ++ cat /tmp/tmp.rxf6kbJB0Z ++ cat /tmp/tmp.H71T8L5jVX ++ rm /tmp/tmp.rxf6kbJB0Z /tmp/tmp.H71T8L5jVX ++ return 0 + [[ 3 == \3 ]] +++ get_proxy_engine some-name +++ local cluster_name=some-name ++++ get_proxy some-name ++++ local target_cluster=some-name +++++ kubectl_bin get pxc some-name -o 'jsonpath={.spec.haproxy.enabled}' ++++++ mktemp +++++ local LAST_OUT=/tmp/tmp.a3kLxI86Au ++++++ mktemp +++++ local LAST_ERR=/tmp/tmp.ZtZuTQQsuJ +++++ local exit_status=0 ++++++ seq 0 2 +++++ for i in $(seq 0 2) +++++ set +e +++++ kubectl get pxc some-name -o 'jsonpath={.spec.haproxy.enabled}' +++++ exit_status=0 +++++ set -e +++++ '[' 0 '!=' 0 ']' +++++ break +++++ cat /tmp/tmp.a3kLxI86Au +++++ cat /tmp/tmp.ZtZuTQQsuJ +++++ rm /tmp/tmp.a3kLxI86Au /tmp/tmp.ZtZuTQQsuJ +++++ return 0 ++++ [[ '' == \t\r\u\e ]] +++++ kubectl_bin get pxc some-name -o 'jsonpath={.spec.proxysql.enabled}' ++++++ mktemp +++++ local LAST_OUT=/tmp/tmp.MAMldEdNbq ++++++ mktemp +++++ local LAST_ERR=/tmp/tmp.L52zBm23qF +++++ local exit_status=0 ++++++ seq 0 2 +++++ for i in $(seq 0 2) +++++ set +e +++++ kubectl get pxc some-name -o 'jsonpath={.spec.proxysql.enabled}' +++++ exit_status=0 +++++ set -e +++++ '[' 0 '!=' 0 ']' +++++ break +++++ cat /tmp/tmp.MAMldEdNbq +++++ cat /tmp/tmp.L52zBm23qF +++++ rm /tmp/tmp.MAMldEdNbq /tmp/tmp.L52zBm23qF +++++ return 0 ++++ [[ true == \t\r\u\e ]] ++++ echo some-name-proxysql ++++ return +++ local cluster_proxy=some-name-proxysql +++ echo proxysql ++ kubectl_bin get pxc some-name -o 'jsonpath={.status.proxysql.ready}' +++ mktemp ++ local LAST_OUT=/tmp/tmp.KSFOC09OGM +++ mktemp ++ local LAST_ERR=/tmp/tmp.6rq9u5hnrA ++ local exit_status=0 +++ seq 0 2 ++ for i in $(seq 0 2) ++ set +e ++ kubectl get pxc some-name -o 'jsonpath={.status.proxysql.ready}' ++ exit_status=0 ++ set -e ++ '[' 0 '!=' 0 ']' ++ break ++ cat /tmp/tmp.KSFOC09OGM ++ cat /tmp/tmp.6rq9u5hnrA ++ rm /tmp/tmp.KSFOC09OGM /tmp/tmp.6rq9u5hnrA ++ return 0 + [[ 3 == \3 ]] + echo + patch_secret my-cluster-secrets proxyadmin dGVzdC1wYXNzd29yZA== + local secret=my-cluster-secrets + local key=proxyadmin + local value=dGVzdC1wYXNzd29yZA== + kubectl_bin patch secret my-cluster-secrets '-p={"data":{"proxyadmin": "dGVzdC1wYXNzd29yZA=="}}' ++ mktemp + local LAST_OUT=/tmp/tmp.y1Kped3SzE ++ mktemp + local LAST_ERR=/tmp/tmp.PRpyYzpSiw + local exit_status=0 ++ seq 0 2 + for i in $(seq 0 2) + set +e + kubectl patch secret my-cluster-secrets '-p={"data":{"proxyadmin": "dGVzdC1wYXNzd29yZA=="}}' + exit_status=0 + set -e + '[' 0 '!=' 0 ']' + break + cat /tmp/tmp.y1Kped3SzE secret/my-cluster-secrets patched + cat /tmp/tmp.PRpyYzpSiw + rm /tmp/tmp.y1Kped3SzE /tmp/tmp.PRpyYzpSiw + return 0 + sleep 15 + wait_cluster_consistency some-name 3 3 + local cluster_name=some-name + local cluster_size=3 + local proxy_size=3 + '[' -z 3 ']' + desc 'wait cluster consistency' + set +o xtrace ----------------------------------------------------------------------------------- wait cluster consistency ----------------------------------------------------------------------------------- + local i=0 + local max=300 + sleep 7 + echo -n 'waiting for pxc/some-name to be ready' waiting for pxc/some-name to be ready++ kubectl_bin get pxc some-name -o 'jsonpath={.status.state}' +++ mktemp ++ local LAST_OUT=/tmp/tmp.BBeIoqGreq +++ mktemp ++ local LAST_ERR=/tmp/tmp.LXCziN8D30 ++ local exit_status=0 +++ seq 0 2 ++ for i in $(seq 0 2) ++ set +e ++ kubectl get pxc some-name -o 'jsonpath={.status.state}' ++ exit_status=0 ++ set -e ++ '[' 0 '!=' 0 ']' ++ break ++ cat /tmp/tmp.BBeIoqGreq ++ cat /tmp/tmp.LXCziN8D30 ++ rm /tmp/tmp.BBeIoqGreq /tmp/tmp.LXCziN8D30 ++ return 0 + [[ initializing == \r\e\a\d\y ]] + echo -n . .+ sleep 5 + [[ 0 -ge 300 ]] + let i+=1 ++ kubectl_bin get pxc some-name -o 'jsonpath={.status.state}' +++ mktemp ++ local LAST_OUT=/tmp/tmp.huHn4P69hC +++ mktemp ++ local LAST_ERR=/tmp/tmp.qcQA3UQ1NJ ++ local exit_status=0 +++ seq 0 2 ++ for i in $(seq 0 2) ++ set +e ++ kubectl get pxc some-name -o 'jsonpath={.status.state}' ++ exit_status=0 ++ set -e ++ '[' 0 '!=' 0 ']' ++ break ++ cat /tmp/tmp.huHn4P69hC ++ cat /tmp/tmp.qcQA3UQ1NJ ++ rm /tmp/tmp.huHn4P69hC /tmp/tmp.qcQA3UQ1NJ ++ return 0 + [[ initializing == \r\e\a\d\y ]] + echo -n . .+ sleep 5 + [[ 1 -ge 300 ]] + let i+=1 ++ kubectl_bin get pxc some-name -o 'jsonpath={.status.state}' +++ mktemp ++ local LAST_OUT=/tmp/tmp.ULiy16tS3x +++ mktemp ++ local LAST_ERR=/tmp/tmp.hlR0pzY08y ++ local exit_status=0 +++ seq 0 2 ++ for i in $(seq 0 2) ++ set +e ++ kubectl get pxc some-name -o 'jsonpath={.status.state}' ++ exit_status=0 ++ set -e ++ '[' 0 '!=' 0 ']' ++ break ++ cat /tmp/tmp.ULiy16tS3x ++ cat /tmp/tmp.hlR0pzY08y ++ rm /tmp/tmp.ULiy16tS3x /tmp/tmp.hlR0pzY08y ++ return 0 + [[ initializing == \r\e\a\d\y ]] + echo -n . .+ sleep 5 + [[ 2 -ge 300 ]] + let i+=1 ++ kubectl_bin get pxc some-name -o 'jsonpath={.status.state}' +++ mktemp ++ local LAST_OUT=/tmp/tmp.7HQTtrcSyr +++ mktemp ++ local LAST_ERR=/tmp/tmp.KpyVRemnqa ++ local exit_status=0 +++ seq 0 2 ++ for i in $(seq 0 2) ++ set +e ++ kubectl get pxc some-name -o 'jsonpath={.status.state}' ++ exit_status=0 ++ set -e ++ '[' 0 '!=' 0 ']' ++ break ++ cat /tmp/tmp.7HQTtrcSyr ++ cat /tmp/tmp.KpyVRemnqa ++ rm /tmp/tmp.7HQTtrcSyr /tmp/tmp.KpyVRemnqa ++ return 0 + [[ initializing == \r\e\a\d\y ]] + echo -n . .+ sleep 5 + [[ 3 -ge 300 ]] + let i+=1 ++ kubectl_bin get pxc some-name -o 'jsonpath={.status.state}' +++ mktemp ++ local LAST_OUT=/tmp/tmp.DV346mnrLT +++ mktemp ++ local LAST_ERR=/tmp/tmp.wWU0DAkQG3 ++ local exit_status=0 +++ seq 0 2 ++ for i in $(seq 0 2) ++ set +e ++ kubectl get pxc some-name -o 'jsonpath={.status.state}' ++ exit_status=0 ++ set -e ++ '[' 0 '!=' 0 ']' ++ break ++ cat /tmp/tmp.DV346mnrLT ++ cat /tmp/tmp.wWU0DAkQG3 ++ rm /tmp/tmp.DV346mnrLT /tmp/tmp.wWU0DAkQG3 ++ return 0 + [[ ready == \r\e\a\d\y ]] ++ kubectl_bin get pxc some-name -o 'jsonpath={.status.pxc.ready}' +++ mktemp ++ local LAST_OUT=/tmp/tmp.41AX3pWPK4 +++ mktemp ++ local LAST_ERR=/tmp/tmp.VNylpqGBUe ++ local exit_status=0 +++ seq 0 2 ++ for i in $(seq 0 2) ++ set +e ++ kubectl get pxc some-name -o 'jsonpath={.status.pxc.ready}' ++ exit_status=0 ++ set -e ++ '[' 0 '!=' 0 ']' ++ break ++ cat /tmp/tmp.41AX3pWPK4 ++ cat /tmp/tmp.VNylpqGBUe ++ rm /tmp/tmp.41AX3pWPK4 /tmp/tmp.VNylpqGBUe ++ return 0 + [[ 3 == \3 ]] +++ get_proxy_engine some-name +++ local cluster_name=some-name ++++ get_proxy some-name ++++ local target_cluster=some-name +++++ kubectl_bin get pxc some-name -o 'jsonpath={.spec.haproxy.enabled}' ++++++ mktemp +++++ local LAST_OUT=/tmp/tmp.RGLunxqiyY ++++++ mktemp +++++ local LAST_ERR=/tmp/tmp.WAtn2bdxjs +++++ local exit_status=0 ++++++ seq 0 2 +++++ for i in $(seq 0 2) +++++ set +e +++++ kubectl get pxc some-name -o 'jsonpath={.spec.haproxy.enabled}' +++++ exit_status=0 +++++ set -e +++++ '[' 0 '!=' 0 ']' +++++ break +++++ cat /tmp/tmp.RGLunxqiyY +++++ cat /tmp/tmp.WAtn2bdxjs +++++ rm /tmp/tmp.RGLunxqiyY /tmp/tmp.WAtn2bdxjs +++++ return 0 ++++ [[ '' == \t\r\u\e ]] +++++ kubectl_bin get pxc some-name -o 'jsonpath={.spec.proxysql.enabled}' ++++++ mktemp +++++ local LAST_OUT=/tmp/tmp.Ysc3ygrWVm ++++++ mktemp +++++ local LAST_ERR=/tmp/tmp.0aFnIs4Lqs +++++ local exit_status=0 ++++++ seq 0 2 +++++ for i in $(seq 0 2) +++++ set +e +++++ kubectl get pxc some-name -o 'jsonpath={.spec.proxysql.enabled}' +++++ exit_status=0 +++++ set -e +++++ '[' 0 '!=' 0 ']' +++++ break +++++ cat /tmp/tmp.Ysc3ygrWVm +++++ cat /tmp/tmp.0aFnIs4Lqs +++++ rm /tmp/tmp.Ysc3ygrWVm /tmp/tmp.0aFnIs4Lqs +++++ return 0 ++++ [[ true == \t\r\u\e ]] ++++ echo some-name-proxysql ++++ return +++ local cluster_proxy=some-name-proxysql +++ echo proxysql ++ kubectl_bin get pxc some-name -o 'jsonpath={.status.proxysql.ready}' +++ mktemp ++ local LAST_OUT=/tmp/tmp.Y7F3OiUs2p +++ mktemp ++ local LAST_ERR=/tmp/tmp.LsO72GmXhm ++ local exit_status=0 +++ seq 0 2 ++ for i in $(seq 0 2) ++ set +e ++ kubectl get pxc some-name -o 'jsonpath={.status.proxysql.ready}' ++ exit_status=0 ++ set -e ++ '[' 0 '!=' 0 ']' ++ break ++ cat /tmp/tmp.Y7F3OiUs2p ++ cat /tmp/tmp.LsO72GmXhm ++ rm /tmp/tmp.Y7F3OiUs2p /tmp/tmp.LsO72GmXhm ++ return 0 + [[ 3 == \3 ]] + echo + compare_mysql_cmd_local select-2 'SHOW TABLES;' '-h127.0.0.1 -P6032 -uproxyadmin -p'\''test-password'\''' some-name-proxysql-0 '' proxysql + local command_id=select-2 + local 'command=SHOW TABLES;' + local 'uri=-h127.0.0.1 -P6032 -uproxyadmin -p'\''test-password'\''' + local pod=some-name-proxysql-0 + local postfix= + local container_name=proxysql + local expected_result=/mnt/jenkins/workspace/cloud-pxc-operator_PR-2203/e2e-tests/users/compare/select-2.sql + [[ perconalab/percona-xtradb-cluster-operator:main-pxc5.7 =~ 8\.0 ]] + [[ perconalab/percona-xtradb-cluster-operator:main-pxc5.7 =~ 8\.4 ]] + run_mysql_local 'SHOW TABLES;' '-h127.0.0.1 -P6032 -uproxyadmin -p'\''test-password'\''' some-name-proxysql-0 proxysql + local 'command=SHOW TABLES;' + local 'uri=-h127.0.0.1 -P6032 -uproxyadmin -p'\''test-password'\''' + local pod=some-name-proxysql-0 + local container_name=proxysql + set +o xtrace egrep: warning: egrep is obsolescent; using grep -E + '[' '!' -s /tmp/tmp.zNf2oNZjFh/select-2.sql ']' + diff -u /mnt/jenkins/workspace/cloud-pxc-operator_PR-2203/e2e-tests/users/compare/select-2.sql /tmp/tmp.zNf2oNZjFh/select-2.sql + compare_mysql_cmd_local select-2 'SHOW TABLES;' '-h127.0.0.1 -P6032 -uproxyadmin -p'\''test-password'\''' some-name-proxysql-1 '' proxysql + local command_id=select-2 + local 'command=SHOW TABLES;' + local 'uri=-h127.0.0.1 -P6032 -uproxyadmin -p'\''test-password'\''' + local pod=some-name-proxysql-1 + local postfix= + local container_name=proxysql + local expected_result=/mnt/jenkins/workspace/cloud-pxc-operator_PR-2203/e2e-tests/users/compare/select-2.sql + [[ perconalab/percona-xtradb-cluster-operator:main-pxc5.7 =~ 8\.0 ]] + [[ perconalab/percona-xtradb-cluster-operator:main-pxc5.7 =~ 8\.4 ]] + run_mysql_local 'SHOW TABLES;' '-h127.0.0.1 -P6032 -uproxyadmin -p'\''test-password'\''' some-name-proxysql-1 proxysql + local 'command=SHOW TABLES;' + local 'uri=-h127.0.0.1 -P6032 -uproxyadmin -p'\''test-password'\''' + local pod=some-name-proxysql-1 + local container_name=proxysql + set +o xtrace egrep: warning: egrep is obsolescent; using grep -E + '[' '!' -s /tmp/tmp.zNf2oNZjFh/select-2.sql ']' + diff -u /mnt/jenkins/workspace/cloud-pxc-operator_PR-2203/e2e-tests/users/compare/select-2.sql /tmp/tmp.zNf2oNZjFh/select-2.sql + compare_mysql_cmd_local select-2 'SHOW TABLES;' '-h127.0.0.1 -P6032 -uproxyadmin -p'\''test-password'\''' some-name-proxysql-2 '' proxysql + local command_id=select-2 + local 'command=SHOW TABLES;' + local 'uri=-h127.0.0.1 -P6032 -uproxyadmin -p'\''test-password'\''' + local pod=some-name-proxysql-2 + local postfix= + local container_name=proxysql + local expected_result=/mnt/jenkins/workspace/cloud-pxc-operator_PR-2203/e2e-tests/users/compare/select-2.sql + [[ perconalab/percona-xtradb-cluster-operator:main-pxc5.7 =~ 8\.0 ]] + [[ perconalab/percona-xtradb-cluster-operator:main-pxc5.7 =~ 8\.4 ]] + run_mysql_local 'SHOW TABLES;' '-h127.0.0.1 -P6032 -uproxyadmin -p'\''test-password'\''' some-name-proxysql-2 proxysql + local 'command=SHOW TABLES;' + local 'uri=-h127.0.0.1 -P6032 -uproxyadmin -p'\''test-password'\''' + local pod=some-name-proxysql-2 + local container_name=proxysql + set +o xtrace egrep: warning: egrep is obsolescent; using grep -E + '[' '!' -s /tmp/tmp.zNf2oNZjFh/select-2.sql ']' + diff -u /mnt/jenkins/workspace/cloud-pxc-operator_PR-2203/e2e-tests/users/compare/select-2.sql /tmp/tmp.zNf2oNZjFh/select-2.sql + desc 'test xtrabackup' + set +o xtrace ----------------------------------------------------------------------------------- test xtrabackup ----------------------------------------------------------------------------------- + kubectl_bin patch pxc some-name --type=merge '-p={"spec":{"proxysql":{"size":2}}}' ++ mktemp + local LAST_OUT=/tmp/tmp.0pi6EehDFr ++ mktemp + local LAST_ERR=/tmp/tmp.wyuy6Tx6ii + local exit_status=0 ++ seq 0 2 + for i in $(seq 0 2) + set +e + kubectl patch pxc some-name --type=merge '-p={"spec":{"proxysql":{"size":2}}}' + exit_status=0 + set -e + '[' 0 '!=' 0 ']' + break + cat /tmp/tmp.0pi6EehDFr perconaxtradbcluster.pxc.percona.com/some-name patched + cat /tmp/tmp.wyuy6Tx6ii + rm /tmp/tmp.0pi6EehDFr /tmp/tmp.wyuy6Tx6ii + return 0 + patch_secret my-cluster-secrets xtrabackup dGVzdC1wYXNzd29yZA== + local secret=my-cluster-secrets + local key=xtrabackup + local value=dGVzdC1wYXNzd29yZA== + kubectl_bin patch secret my-cluster-secrets '-p={"data":{"xtrabackup": "dGVzdC1wYXNzd29yZA=="}}' ++ mktemp + local LAST_OUT=/tmp/tmp.t7jhQamF9v ++ mktemp + local LAST_ERR=/tmp/tmp.M75tfTebII + local exit_status=0 ++ seq 0 2 + for i in $(seq 0 2) + set +e + kubectl patch secret my-cluster-secrets '-p={"data":{"xtrabackup": "dGVzdC1wYXNzd29yZA=="}}' + exit_status=0 + set -e + '[' 0 '!=' 0 ']' + break + cat /tmp/tmp.t7jhQamF9v secret/my-cluster-secrets patched + cat /tmp/tmp.M75tfTebII + rm /tmp/tmp.t7jhQamF9v /tmp/tmp.M75tfTebII + return 0 + sleep 15 + wait_cluster_consistency some-name 3 2 + local cluster_name=some-name + local cluster_size=3 + local proxy_size=2 + '[' -z 2 ']' + desc 'wait cluster consistency' + set +o xtrace ----------------------------------------------------------------------------------- wait cluster consistency ----------------------------------------------------------------------------------- + local i=0 + local max=300 + sleep 7 + echo -n 'waiting for pxc/some-name to be ready' waiting for pxc/some-name to be ready++ kubectl_bin get pxc some-name -o 'jsonpath={.status.state}' +++ mktemp ++ local LAST_OUT=/tmp/tmp.BublnOUyu8 +++ mktemp ++ local LAST_ERR=/tmp/tmp.KaGD0YRgzJ ++ local exit_status=0 +++ seq 0 2 ++ for i in $(seq 0 2) ++ set +e ++ kubectl get pxc some-name -o 'jsonpath={.status.state}' ++ exit_status=0 ++ set -e ++ '[' 0 '!=' 0 ']' ++ break ++ cat /tmp/tmp.BublnOUyu8 ++ cat /tmp/tmp.KaGD0YRgzJ ++ rm /tmp/tmp.BublnOUyu8 /tmp/tmp.KaGD0YRgzJ ++ return 0 + [[ initializing == \r\e\a\d\y ]] + echo -n . .+ sleep 5 + [[ 0 -ge 300 ]] + let i+=1 ++ kubectl_bin get pxc some-name -o 'jsonpath={.status.state}' +++ mktemp ++ local LAST_OUT=/tmp/tmp.flQPoecJf1 +++ mktemp ++ local LAST_ERR=/tmp/tmp.pvbkPGXTUC ++ local exit_status=0 +++ seq 0 2 ++ for i in $(seq 0 2) ++ set +e ++ kubectl get pxc some-name -o 'jsonpath={.status.state}' ++ exit_status=0 ++ set -e ++ '[' 0 '!=' 0 ']' ++ break ++ cat /tmp/tmp.flQPoecJf1 ++ cat /tmp/tmp.pvbkPGXTUC ++ rm /tmp/tmp.flQPoecJf1 /tmp/tmp.pvbkPGXTUC ++ return 0 + [[ initializing == \r\e\a\d\y ]] + echo -n . .+ sleep 5 + [[ 1 -ge 300 ]] + let i+=1 ++ kubectl_bin get pxc some-name -o 'jsonpath={.status.state}' +++ mktemp ++ local LAST_OUT=/tmp/tmp.kg4wcphWAf +++ mktemp ++ local LAST_ERR=/tmp/tmp.u1sX4A5V5I ++ local exit_status=0 +++ seq 0 2 ++ for i in $(seq 0 2) ++ set +e ++ kubectl get pxc some-name -o 'jsonpath={.status.state}' ++ exit_status=0 ++ set -e ++ '[' 0 '!=' 0 ']' ++ break ++ cat /tmp/tmp.kg4wcphWAf ++ cat /tmp/tmp.u1sX4A5V5I ++ rm /tmp/tmp.kg4wcphWAf /tmp/tmp.u1sX4A5V5I ++ return 0 + [[ initializing == \r\e\a\d\y ]] + echo -n . .+ sleep 5 + [[ 2 -ge 300 ]] + let i+=1 ++ kubectl_bin get pxc some-name -o 'jsonpath={.status.state}' +++ mktemp ++ local LAST_OUT=/tmp/tmp.rLqRFTVlLe +++ mktemp ++ local LAST_ERR=/tmp/tmp.4ihJdC5ptY ++ local exit_status=0 +++ seq 0 2 ++ for i in $(seq 0 2) ++ set +e ++ kubectl get pxc some-name -o 'jsonpath={.status.state}' ++ exit_status=0 ++ set -e ++ '[' 0 '!=' 0 ']' ++ break ++ cat /tmp/tmp.rLqRFTVlLe ++ cat /tmp/tmp.4ihJdC5ptY ++ rm /tmp/tmp.rLqRFTVlLe /tmp/tmp.4ihJdC5ptY ++ return 0 + [[ initializing == \r\e\a\d\y ]] + echo -n . .+ sleep 5 + [[ 3 -ge 300 ]] + let i+=1 ++ kubectl_bin get pxc some-name -o 'jsonpath={.status.state}' +++ mktemp ++ local LAST_OUT=/tmp/tmp.Da6TikM7qB +++ mktemp ++ local LAST_ERR=/tmp/tmp.9irLeYRMSD ++ local exit_status=0 +++ seq 0 2 ++ for i in $(seq 0 2) ++ set +e ++ kubectl get pxc some-name -o 'jsonpath={.status.state}' ++ exit_status=0 ++ set -e ++ '[' 0 '!=' 0 ']' ++ break ++ cat /tmp/tmp.Da6TikM7qB ++ cat /tmp/tmp.9irLeYRMSD ++ rm /tmp/tmp.Da6TikM7qB /tmp/tmp.9irLeYRMSD ++ return 0 + [[ initializing == \r\e\a\d\y ]] + echo -n . .+ sleep 5 + [[ 4 -ge 300 ]] + let i+=1 ++ kubectl_bin get pxc some-name -o 'jsonpath={.status.state}' +++ mktemp ++ local LAST_OUT=/tmp/tmp.XWv1Xty7jW +++ mktemp ++ local LAST_ERR=/tmp/tmp.KEMir4UQdI ++ local exit_status=0 +++ seq 0 2 ++ for i in $(seq 0 2) ++ set +e ++ kubectl get pxc some-name -o 'jsonpath={.status.state}' ++ exit_status=0 ++ set -e ++ '[' 0 '!=' 0 ']' ++ break ++ cat /tmp/tmp.XWv1Xty7jW ++ cat /tmp/tmp.KEMir4UQdI ++ rm /tmp/tmp.XWv1Xty7jW /tmp/tmp.KEMir4UQdI ++ return 0 + [[ initializing == \r\e\a\d\y ]] + echo -n . .+ sleep 5 + [[ 5 -ge 300 ]] + let i+=1 ++ kubectl_bin get pxc some-name -o 'jsonpath={.status.state}' +++ mktemp ++ local LAST_OUT=/tmp/tmp.lqmERZgVzr +++ mktemp ++ local LAST_ERR=/tmp/tmp.MPVS3e4g3Z ++ local exit_status=0 +++ seq 0 2 ++ for i in $(seq 0 2) ++ set +e ++ kubectl get pxc some-name -o 'jsonpath={.status.state}' ++ exit_status=0 ++ set -e ++ '[' 0 '!=' 0 ']' ++ break ++ cat /tmp/tmp.lqmERZgVzr ++ cat /tmp/tmp.MPVS3e4g3Z ++ rm /tmp/tmp.lqmERZgVzr /tmp/tmp.MPVS3e4g3Z ++ return 0 + [[ initializing == \r\e\a\d\y ]] + echo -n . .+ sleep 5 + [[ 6 -ge 300 ]] + let i+=1 ++ kubectl_bin get pxc some-name -o 'jsonpath={.status.state}' +++ mktemp ++ local LAST_OUT=/tmp/tmp.g9mF40TbZI +++ mktemp ++ local LAST_ERR=/tmp/tmp.yfIV9mGKeH ++ local exit_status=0 +++ seq 0 2 ++ for i in $(seq 0 2) ++ set +e ++ kubectl get pxc some-name -o 'jsonpath={.status.state}' ++ exit_status=0 ++ set -e ++ '[' 0 '!=' 0 ']' ++ break ++ cat /tmp/tmp.g9mF40TbZI ++ cat /tmp/tmp.yfIV9mGKeH ++ rm /tmp/tmp.g9mF40TbZI /tmp/tmp.yfIV9mGKeH ++ return 0 + [[ initializing == \r\e\a\d\y ]] + echo -n . .+ sleep 5 + [[ 7 -ge 300 ]] + let i+=1 ++ kubectl_bin get pxc some-name -o 'jsonpath={.status.state}' +++ mktemp ++ local LAST_OUT=/tmp/tmp.C0ciFvT3PJ +++ mktemp ++ local LAST_ERR=/tmp/tmp.IiJS9mDwxD ++ local exit_status=0 +++ seq 0 2 ++ for i in $(seq 0 2) ++ set +e ++ kubectl get pxc some-name -o 'jsonpath={.status.state}' ++ exit_status=0 ++ set -e ++ '[' 0 '!=' 0 ']' ++ break ++ cat /tmp/tmp.C0ciFvT3PJ ++ cat /tmp/tmp.IiJS9mDwxD ++ rm /tmp/tmp.C0ciFvT3PJ /tmp/tmp.IiJS9mDwxD ++ return 0 + [[ initializing == \r\e\a\d\y ]] + echo -n . .+ sleep 5 + [[ 8 -ge 300 ]] + let i+=1 ++ kubectl_bin get pxc some-name -o 'jsonpath={.status.state}' +++ mktemp ++ local LAST_OUT=/tmp/tmp.XYuiioOdtA +++ mktemp ++ local LAST_ERR=/tmp/tmp.cbsWRaOnsd ++ local exit_status=0 +++ seq 0 2 ++ for i in $(seq 0 2) ++ set +e ++ kubectl get pxc some-name -o 'jsonpath={.status.state}' ++ exit_status=0 ++ set -e ++ '[' 0 '!=' 0 ']' ++ break ++ cat /tmp/tmp.XYuiioOdtA ++ cat /tmp/tmp.cbsWRaOnsd ++ rm /tmp/tmp.XYuiioOdtA /tmp/tmp.cbsWRaOnsd ++ return 0 + [[ initializing == \r\e\a\d\y ]] + echo -n . .+ sleep 5 + [[ 9 -ge 300 ]] + let i+=1 ++ kubectl_bin get pxc some-name -o 'jsonpath={.status.state}' +++ mktemp ++ local LAST_OUT=/tmp/tmp.AabvnNX9Yp +++ mktemp ++ local LAST_ERR=/tmp/tmp.ppQimCB9u9 ++ local exit_status=0 +++ seq 0 2 ++ for i in $(seq 0 2) ++ set +e ++ kubectl get pxc some-name -o 'jsonpath={.status.state}' ++ exit_status=0 ++ set -e ++ '[' 0 '!=' 0 ']' ++ break ++ cat /tmp/tmp.AabvnNX9Yp ++ cat /tmp/tmp.ppQimCB9u9 ++ rm /tmp/tmp.AabvnNX9Yp /tmp/tmp.ppQimCB9u9 ++ return 0 + [[ initializing == \r\e\a\d\y ]] + echo -n . .+ sleep 5 + [[ 10 -ge 300 ]] + let i+=1 ++ kubectl_bin get pxc some-name -o 'jsonpath={.status.state}' +++ mktemp ++ local LAST_OUT=/tmp/tmp.9rz7dXlFIn +++ mktemp ++ local LAST_ERR=/tmp/tmp.rtEskAdR29 ++ local exit_status=0 +++ seq 0 2 ++ for i in $(seq 0 2) ++ set +e ++ kubectl get pxc some-name -o 'jsonpath={.status.state}' ++ exit_status=0 ++ set -e ++ '[' 0 '!=' 0 ']' ++ break ++ cat /tmp/tmp.9rz7dXlFIn ++ cat /tmp/tmp.rtEskAdR29 ++ rm /tmp/tmp.9rz7dXlFIn /tmp/tmp.rtEskAdR29 ++ return 0 + [[ initializing == \r\e\a\d\y ]] + echo -n . .+ sleep 5 + [[ 11 -ge 300 ]] + let i+=1 ++ kubectl_bin get pxc some-name -o 'jsonpath={.status.state}' +++ mktemp ++ local LAST_OUT=/tmp/tmp.3zS5Ik0gB3 +++ mktemp ++ local LAST_ERR=/tmp/tmp.zpcSCkUR91 ++ local exit_status=0 +++ seq 0 2 ++ for i in $(seq 0 2) ++ set +e ++ kubectl get pxc some-name -o 'jsonpath={.status.state}' ++ exit_status=0 ++ set -e ++ '[' 0 '!=' 0 ']' ++ break ++ cat /tmp/tmp.3zS5Ik0gB3 ++ cat /tmp/tmp.zpcSCkUR91 ++ rm /tmp/tmp.3zS5Ik0gB3 /tmp/tmp.zpcSCkUR91 ++ return 0 + [[ initializing == \r\e\a\d\y ]] + echo -n . .+ sleep 5 + [[ 12 -ge 300 ]] + let i+=1 ++ kubectl_bin get pxc some-name -o 'jsonpath={.status.state}' +++ mktemp ++ local LAST_OUT=/tmp/tmp.v5BXlfhsEh +++ mktemp ++ local LAST_ERR=/tmp/tmp.2kgBiuKhjR ++ local exit_status=0 +++ seq 0 2 ++ for i in $(seq 0 2) ++ set +e ++ kubectl get pxc some-name -o 'jsonpath={.status.state}' ++ exit_status=0 ++ set -e ++ '[' 0 '!=' 0 ']' ++ break ++ cat /tmp/tmp.v5BXlfhsEh ++ cat /tmp/tmp.2kgBiuKhjR ++ rm /tmp/tmp.v5BXlfhsEh /tmp/tmp.2kgBiuKhjR ++ return 0 + [[ initializing == \r\e\a\d\y ]] + echo -n . .+ sleep 5 + [[ 13 -ge 300 ]] + let i+=1 ++ kubectl_bin get pxc some-name -o 'jsonpath={.status.state}' +++ mktemp ++ local LAST_OUT=/tmp/tmp.r78kW1H5YB +++ mktemp ++ local LAST_ERR=/tmp/tmp.CDU0oJYlwG ++ local exit_status=0 +++ seq 0 2 ++ for i in $(seq 0 2) ++ set +e ++ kubectl get pxc some-name -o 'jsonpath={.status.state}' ++ exit_status=0 ++ set -e ++ '[' 0 '!=' 0 ']' ++ break ++ cat /tmp/tmp.r78kW1H5YB ++ cat /tmp/tmp.CDU0oJYlwG ++ rm /tmp/tmp.r78kW1H5YB /tmp/tmp.CDU0oJYlwG ++ return 0 + [[ initializing == \r\e\a\d\y ]] + echo -n . .+ sleep 5 + [[ 14 -ge 300 ]] + let i+=1 ++ kubectl_bin get pxc some-name -o 'jsonpath={.status.state}' +++ mktemp ++ local LAST_OUT=/tmp/tmp.kiVpdpsbqy +++ mktemp ++ local LAST_ERR=/tmp/tmp.yjrMEnxZHU ++ local exit_status=0 +++ seq 0 2 ++ for i in $(seq 0 2) ++ set +e ++ kubectl get pxc some-name -o 'jsonpath={.status.state}' ++ exit_status=0 ++ set -e ++ '[' 0 '!=' 0 ']' ++ break ++ cat /tmp/tmp.kiVpdpsbqy ++ cat /tmp/tmp.yjrMEnxZHU ++ rm /tmp/tmp.kiVpdpsbqy /tmp/tmp.yjrMEnxZHU ++ return 0 + [[ initializing == \r\e\a\d\y ]] + echo -n . .+ sleep 5 + [[ 15 -ge 300 ]] + let i+=1 ++ kubectl_bin get pxc some-name -o 'jsonpath={.status.state}' +++ mktemp ++ local LAST_OUT=/tmp/tmp.aKYmczeqIV +++ mktemp ++ local LAST_ERR=/tmp/tmp.LMOQVR6xJ9 ++ local exit_status=0 +++ seq 0 2 ++ for i in $(seq 0 2) ++ set +e ++ kubectl get pxc some-name -o 'jsonpath={.status.state}' ++ exit_status=0 ++ set -e ++ '[' 0 '!=' 0 ']' ++ break ++ cat /tmp/tmp.aKYmczeqIV ++ cat /tmp/tmp.LMOQVR6xJ9 ++ rm /tmp/tmp.aKYmczeqIV /tmp/tmp.LMOQVR6xJ9 ++ return 0 + [[ initializing == \r\e\a\d\y ]] + echo -n . .+ sleep 5 + [[ 16 -ge 300 ]] + let i+=1 ++ kubectl_bin get pxc some-name -o 'jsonpath={.status.state}' +++ mktemp ++ local LAST_OUT=/tmp/tmp.IIpG0mUvFH +++ mktemp ++ local LAST_ERR=/tmp/tmp.neuVnjY68d ++ local exit_status=0 +++ seq 0 2 ++ for i in $(seq 0 2) ++ set +e ++ kubectl get pxc some-name -o 'jsonpath={.status.state}' ++ exit_status=0 ++ set -e ++ '[' 0 '!=' 0 ']' ++ break ++ cat /tmp/tmp.IIpG0mUvFH ++ cat /tmp/tmp.neuVnjY68d ++ rm /tmp/tmp.IIpG0mUvFH /tmp/tmp.neuVnjY68d ++ return 0 + [[ initializing == \r\e\a\d\y ]] + echo -n . .+ sleep 5 + [[ 17 -ge 300 ]] + let i+=1 ++ kubectl_bin get pxc some-name -o 'jsonpath={.status.state}' +++ mktemp ++ local LAST_OUT=/tmp/tmp.eIyzoxpPCk +++ mktemp ++ local LAST_ERR=/tmp/tmp.nGC2Kq4Y0k ++ local exit_status=0 +++ seq 0 2 ++ for i in $(seq 0 2) ++ set +e ++ kubectl get pxc some-name -o 'jsonpath={.status.state}' ++ exit_status=0 ++ set -e ++ '[' 0 '!=' 0 ']' ++ break ++ cat /tmp/tmp.eIyzoxpPCk ++ cat /tmp/tmp.nGC2Kq4Y0k ++ rm /tmp/tmp.eIyzoxpPCk /tmp/tmp.nGC2Kq4Y0k ++ return 0 + [[ initializing == \r\e\a\d\y ]] + echo -n . .+ sleep 5 + [[ 18 -ge 300 ]] + let i+=1 ++ kubectl_bin get pxc some-name -o 'jsonpath={.status.state}' +++ mktemp ++ local LAST_OUT=/tmp/tmp.JO8PO9o0re +++ mktemp ++ local LAST_ERR=/tmp/tmp.BjDudqxYf8 ++ local exit_status=0 +++ seq 0 2 ++ for i in $(seq 0 2) ++ set +e ++ kubectl get pxc some-name -o 'jsonpath={.status.state}' ++ exit_status=0 ++ set -e ++ '[' 0 '!=' 0 ']' ++ break ++ cat /tmp/tmp.JO8PO9o0re ++ cat /tmp/tmp.BjDudqxYf8 ++ rm /tmp/tmp.JO8PO9o0re /tmp/tmp.BjDudqxYf8 ++ return 0 + [[ initializing == \r\e\a\d\y ]] + echo -n . .+ sleep 5 + [[ 19 -ge 300 ]] + let i+=1 ++ kubectl_bin get pxc some-name -o 'jsonpath={.status.state}' +++ mktemp ++ local LAST_OUT=/tmp/tmp.vDhiEvSWYP +++ mktemp ++ local LAST_ERR=/tmp/tmp.zQlTgSxryR ++ local exit_status=0 +++ seq 0 2 ++ for i in $(seq 0 2) ++ set +e ++ kubectl get pxc some-name -o 'jsonpath={.status.state}' ++ exit_status=0 ++ set -e ++ '[' 0 '!=' 0 ']' ++ break ++ cat /tmp/tmp.vDhiEvSWYP ++ cat /tmp/tmp.zQlTgSxryR ++ rm /tmp/tmp.vDhiEvSWYP /tmp/tmp.zQlTgSxryR ++ return 0 + [[ initializing == \r\e\a\d\y ]] + echo -n . .+ sleep 5 + [[ 20 -ge 300 ]] + let i+=1 ++ kubectl_bin get pxc some-name -o 'jsonpath={.status.state}' +++ mktemp ++ local LAST_OUT=/tmp/tmp.uqGsRa5wTT +++ mktemp ++ local LAST_ERR=/tmp/tmp.z1NwizLC1E ++ local exit_status=0 +++ seq 0 2 ++ for i in $(seq 0 2) ++ set +e ++ kubectl get pxc some-name -o 'jsonpath={.status.state}' ++ exit_status=0 ++ set -e ++ '[' 0 '!=' 0 ']' ++ break ++ cat /tmp/tmp.uqGsRa5wTT ++ cat /tmp/tmp.z1NwizLC1E ++ rm /tmp/tmp.uqGsRa5wTT /tmp/tmp.z1NwizLC1E ++ return 0 + [[ initializing == \r\e\a\d\y ]] + echo -n . .+ sleep 5 + [[ 21 -ge 300 ]] + let i+=1 ++ kubectl_bin get pxc some-name -o 'jsonpath={.status.state}' +++ mktemp ++ local LAST_OUT=/tmp/tmp.XfVi82AoqP +++ mktemp ++ local LAST_ERR=/tmp/tmp.n3aVztpv0m ++ local exit_status=0 +++ seq 0 2 ++ for i in $(seq 0 2) ++ set +e ++ kubectl get pxc some-name -o 'jsonpath={.status.state}' ++ exit_status=0 ++ set -e ++ '[' 0 '!=' 0 ']' ++ break ++ cat /tmp/tmp.XfVi82AoqP ++ cat /tmp/tmp.n3aVztpv0m ++ rm /tmp/tmp.XfVi82AoqP /tmp/tmp.n3aVztpv0m ++ return 0 + [[ initializing == \r\e\a\d\y ]] + echo -n . .+ sleep 5 + [[ 22 -ge 300 ]] + let i+=1 ++ kubectl_bin get pxc some-name -o 'jsonpath={.status.state}' +++ mktemp ++ local LAST_OUT=/tmp/tmp.DQfpfkwVFU +++ mktemp ++ local LAST_ERR=/tmp/tmp.MknVPt0Ndf ++ local exit_status=0 +++ seq 0 2 ++ for i in $(seq 0 2) ++ set +e ++ kubectl get pxc some-name -o 'jsonpath={.status.state}' ++ exit_status=0 ++ set -e ++ '[' 0 '!=' 0 ']' ++ break ++ cat /tmp/tmp.DQfpfkwVFU ++ cat /tmp/tmp.MknVPt0Ndf ++ rm /tmp/tmp.DQfpfkwVFU /tmp/tmp.MknVPt0Ndf ++ return 0 + [[ ready == \r\e\a\d\y ]] ++ kubectl_bin get pxc some-name -o 'jsonpath={.status.pxc.ready}' +++ mktemp ++ local LAST_OUT=/tmp/tmp.3imi7eHiEm +++ mktemp ++ local LAST_ERR=/tmp/tmp.6yjaVxx2oJ ++ local exit_status=0 +++ seq 0 2 ++ for i in $(seq 0 2) ++ set +e ++ kubectl get pxc some-name -o 'jsonpath={.status.pxc.ready}' ++ exit_status=0 ++ set -e ++ '[' 0 '!=' 0 ']' ++ break ++ cat /tmp/tmp.3imi7eHiEm ++ cat /tmp/tmp.6yjaVxx2oJ ++ rm /tmp/tmp.3imi7eHiEm /tmp/tmp.6yjaVxx2oJ ++ return 0 + [[ 3 == \3 ]] +++ get_proxy_engine some-name +++ local cluster_name=some-name ++++ get_proxy some-name ++++ local target_cluster=some-name +++++ kubectl_bin get pxc some-name -o 'jsonpath={.spec.haproxy.enabled}' ++++++ mktemp +++++ local LAST_OUT=/tmp/tmp.2KHhDopjML ++++++ mktemp +++++ local LAST_ERR=/tmp/tmp.I6cXey0YZW +++++ local exit_status=0 ++++++ seq 0 2 +++++ for i in $(seq 0 2) +++++ set +e +++++ kubectl get pxc some-name -o 'jsonpath={.spec.haproxy.enabled}' +++++ exit_status=0 +++++ set -e +++++ '[' 0 '!=' 0 ']' +++++ break +++++ cat /tmp/tmp.2KHhDopjML +++++ cat /tmp/tmp.I6cXey0YZW +++++ rm /tmp/tmp.2KHhDopjML /tmp/tmp.I6cXey0YZW +++++ return 0 ++++ [[ '' == \t\r\u\e ]] +++++ kubectl_bin get pxc some-name -o 'jsonpath={.spec.proxysql.enabled}' ++++++ mktemp +++++ local LAST_OUT=/tmp/tmp.f7YvAwtEKs ++++++ mktemp +++++ local LAST_ERR=/tmp/tmp.6LfKX34Hm5 +++++ local exit_status=0 ++++++ seq 0 2 +++++ for i in $(seq 0 2) +++++ set +e +++++ kubectl get pxc some-name -o 'jsonpath={.spec.proxysql.enabled}' +++++ exit_status=0 +++++ set -e +++++ '[' 0 '!=' 0 ']' +++++ break +++++ cat /tmp/tmp.f7YvAwtEKs +++++ cat /tmp/tmp.6LfKX34Hm5 +++++ rm /tmp/tmp.f7YvAwtEKs /tmp/tmp.6LfKX34Hm5 +++++ return 0 ++++ [[ true == \t\r\u\e ]] ++++ echo some-name-proxysql ++++ return +++ local cluster_proxy=some-name-proxysql +++ echo proxysql ++ kubectl_bin get pxc some-name -o 'jsonpath={.status.proxysql.ready}' +++ mktemp ++ local LAST_OUT=/tmp/tmp.qZYEVUFIFC +++ mktemp ++ local LAST_ERR=/tmp/tmp.4qe7bxG9D5 ++ local exit_status=0 +++ seq 0 2 ++ for i in $(seq 0 2) ++ set +e ++ kubectl get pxc some-name -o 'jsonpath={.status.proxysql.ready}' ++ exit_status=0 ++ set -e ++ '[' 0 '!=' 0 ']' ++ break ++ cat /tmp/tmp.qZYEVUFIFC ++ cat /tmp/tmp.4qe7bxG9D5 ++ rm /tmp/tmp.qZYEVUFIFC /tmp/tmp.4qe7bxG9D5 ++ return 0 + [[ 2 == \2 ]] + echo + compare_mysql_cmd_local select-3 'SHOW DATABASES;' '-h 127.0.0.1 -uxtrabackup -p'\''test-password'\''' some-name-pxc-0 '' pxc + local command_id=select-3 + local 'command=SHOW DATABASES;' + local 'uri=-h 127.0.0.1 -uxtrabackup -p'\''test-password'\''' + local pod=some-name-pxc-0 + local postfix= + local container_name=pxc + local expected_result=/mnt/jenkins/workspace/cloud-pxc-operator_PR-2203/e2e-tests/users/compare/select-3.sql + [[ perconalab/percona-xtradb-cluster-operator:main-pxc5.7 =~ 8\.0 ]] + [[ perconalab/percona-xtradb-cluster-operator:main-pxc5.7 =~ 8\.4 ]] + run_mysql_local 'SHOW DATABASES;' '-h 127.0.0.1 -uxtrabackup -p'\''test-password'\''' some-name-pxc-0 pxc + local 'command=SHOW DATABASES;' + local 'uri=-h 127.0.0.1 -uxtrabackup -p'\''test-password'\''' + local pod=some-name-pxc-0 + local container_name=pxc + set +o xtrace egrep: warning: egrep is obsolescent; using grep -E + '[' '!' -s /tmp/tmp.zNf2oNZjFh/select-3.sql ']' + diff -u /mnt/jenkins/workspace/cloud-pxc-operator_PR-2203/e2e-tests/users/compare/select-3.sql /tmp/tmp.zNf2oNZjFh/select-3.sql + desc 'test monitor' + set +o xtrace ----------------------------------------------------------------------------------- test monitor ----------------------------------------------------------------------------------- + patch_secret my-cluster-secrets monitor dGVzdC1wYXNzd29yZA== + local secret=my-cluster-secrets + local key=monitor + local value=dGVzdC1wYXNzd29yZA== + kubectl_bin patch secret my-cluster-secrets '-p={"data":{"monitor": "dGVzdC1wYXNzd29yZA=="}}' ++ mktemp + local LAST_OUT=/tmp/tmp.FdSw6eycJx ++ mktemp + local LAST_ERR=/tmp/tmp.gVwAOkNj2z + local exit_status=0 ++ seq 0 2 + for i in $(seq 0 2) + set +e + kubectl patch secret my-cluster-secrets '-p={"data":{"monitor": "dGVzdC1wYXNzd29yZA=="}}' + exit_status=0 + set -e + '[' 0 '!=' 0 ']' + break + cat /tmp/tmp.FdSw6eycJx secret/my-cluster-secrets patched + cat /tmp/tmp.gVwAOkNj2z + rm /tmp/tmp.FdSw6eycJx /tmp/tmp.gVwAOkNj2z + return 0 + wait_for_password_propagation my-cluster-secrets monitor + local secret=my-cluster-secrets + local user=monitor + local max_retry=240 ++ getSecretData my-cluster-secrets root ++ local secretName=my-cluster-secrets ++ local dataKey=root ++ kubectl_bin get secrets/my-cluster-secrets '--template={{.data.root}}' ++ base64 --decode +++ mktemp ++ local LAST_OUT=/tmp/tmp.nSBEr6knvC +++ mktemp ++ local LAST_ERR=/tmp/tmp.4uZeTFifJx ++ local exit_status=0 +++ seq 0 2 ++ for i in $(seq 0 2) ++ set +e ++ kubectl get secrets/my-cluster-secrets '--template={{.data.root}}' ++ exit_status=0 ++ set -e ++ '[' 0 '!=' 0 ']' ++ break ++ cat /tmp/tmp.nSBEr6knvC ++ cat /tmp/tmp.4uZeTFifJx ++ rm /tmp/tmp.nSBEr6knvC /tmp/tmp.4uZeTFifJx ++ return 0 + local root_pass=test-password + [[ perconalab/percona-xtradb-cluster-operator:main-pxc5.7 =~ 5\.7 ]] + echo 'Skipping dual password feature doesn'\''t work for 5.7. PXC 5.7 doesn'\''t support it!' Skipping dual password feature doesn't work for 5.7. PXC 5.7 doesn't support it! + return + wait_cluster_consistency some-name 3 2 + local cluster_name=some-name + local cluster_size=3 + local proxy_size=2 + '[' -z 2 ']' + desc 'wait cluster consistency' + set +o xtrace ----------------------------------------------------------------------------------- wait cluster consistency ----------------------------------------------------------------------------------- + local i=0 + local max=300 + sleep 7 + echo -n 'waiting for pxc/some-name to be ready' waiting for pxc/some-name to be ready++ kubectl_bin get pxc some-name -o 'jsonpath={.status.state}' +++ mktemp ++ local LAST_OUT=/tmp/tmp.jQnBz8S0vf +++ mktemp ++ local LAST_ERR=/tmp/tmp.RwQWqxxAIw ++ local exit_status=0 +++ seq 0 2 ++ for i in $(seq 0 2) ++ set +e ++ kubectl get pxc some-name -o 'jsonpath={.status.state}' ++ exit_status=0 ++ set -e ++ '[' 0 '!=' 0 ']' ++ break ++ cat /tmp/tmp.jQnBz8S0vf ++ cat /tmp/tmp.RwQWqxxAIw ++ rm /tmp/tmp.jQnBz8S0vf /tmp/tmp.RwQWqxxAIw ++ return 0 + [[ initializing == \r\e\a\d\y ]] + echo -n . .+ sleep 5 + [[ 0 -ge 300 ]] + let i+=1 ++ kubectl_bin get pxc some-name -o 'jsonpath={.status.state}' +++ mktemp ++ local LAST_OUT=/tmp/tmp.ZaweDXncyn +++ mktemp ++ local LAST_ERR=/tmp/tmp.gWnMUz99Ir ++ local exit_status=0 +++ seq 0 2 ++ for i in $(seq 0 2) ++ set +e ++ kubectl get pxc some-name -o 'jsonpath={.status.state}' ++ exit_status=0 ++ set -e ++ '[' 0 '!=' 0 ']' ++ break ++ cat /tmp/tmp.ZaweDXncyn ++ cat /tmp/tmp.gWnMUz99Ir ++ rm /tmp/tmp.ZaweDXncyn /tmp/tmp.gWnMUz99Ir ++ return 0 + [[ initializing == \r\e\a\d\y ]] + echo -n . .+ sleep 5 + [[ 1 -ge 300 ]] + let i+=1 ++ kubectl_bin get pxc some-name -o 'jsonpath={.status.state}' +++ mktemp ++ local LAST_OUT=/tmp/tmp.KrSVWz37kX +++ mktemp ++ local LAST_ERR=/tmp/tmp.rKqtPe2GSm ++ local exit_status=0 +++ seq 0 2 ++ for i in $(seq 0 2) ++ set +e ++ kubectl get pxc some-name -o 'jsonpath={.status.state}' ++ exit_status=0 ++ set -e ++ '[' 0 '!=' 0 ']' ++ break ++ cat /tmp/tmp.KrSVWz37kX ++ cat /tmp/tmp.rKqtPe2GSm ++ rm /tmp/tmp.KrSVWz37kX /tmp/tmp.rKqtPe2GSm ++ return 0 + [[ initializing == \r\e\a\d\y ]] + echo -n . .+ sleep 5 + [[ 2 -ge 300 ]] + let i+=1 ++ kubectl_bin get pxc some-name -o 'jsonpath={.status.state}' +++ mktemp ++ local LAST_OUT=/tmp/tmp.bOItCJ1rFF +++ mktemp ++ local LAST_ERR=/tmp/tmp.ZynZTMmweS ++ local exit_status=0 +++ seq 0 2 ++ for i in $(seq 0 2) ++ set +e ++ kubectl get pxc some-name -o 'jsonpath={.status.state}' ++ exit_status=0 ++ set -e ++ '[' 0 '!=' 0 ']' ++ break ++ cat /tmp/tmp.bOItCJ1rFF ++ cat /tmp/tmp.ZynZTMmweS ++ rm /tmp/tmp.bOItCJ1rFF /tmp/tmp.ZynZTMmweS ++ return 0 + [[ initializing == \r\e\a\d\y ]] + echo -n . .+ sleep 5 + [[ 3 -ge 300 ]] + let i+=1 ++ kubectl_bin get pxc some-name -o 'jsonpath={.status.state}' +++ mktemp ++ local LAST_OUT=/tmp/tmp.LefNdrKISS +++ mktemp ++ local LAST_ERR=/tmp/tmp.Bebscx7Mq9 ++ local exit_status=0 +++ seq 0 2 ++ for i in $(seq 0 2) ++ set +e ++ kubectl get pxc some-name -o 'jsonpath={.status.state}' ++ exit_status=0 ++ set -e ++ '[' 0 '!=' 0 ']' ++ break ++ cat /tmp/tmp.LefNdrKISS ++ cat /tmp/tmp.Bebscx7Mq9 ++ rm /tmp/tmp.LefNdrKISS /tmp/tmp.Bebscx7Mq9 ++ return 0 + [[ initializing == \r\e\a\d\y ]] + echo -n . .+ sleep 5 + [[ 4 -ge 300 ]] + let i+=1 ++ kubectl_bin get pxc some-name -o 'jsonpath={.status.state}' +++ mktemp ++ local LAST_OUT=/tmp/tmp.sG1HSqm0Wv +++ mktemp ++ local LAST_ERR=/tmp/tmp.EwBzzc6Cdj ++ local exit_status=0 +++ seq 0 2 ++ for i in $(seq 0 2) ++ set +e ++ kubectl get pxc some-name -o 'jsonpath={.status.state}' ++ exit_status=0 ++ set -e ++ '[' 0 '!=' 0 ']' ++ break ++ cat /tmp/tmp.sG1HSqm0Wv ++ cat /tmp/tmp.EwBzzc6Cdj ++ rm /tmp/tmp.sG1HSqm0Wv /tmp/tmp.EwBzzc6Cdj ++ return 0 + [[ initializing == \r\e\a\d\y ]] + echo -n . .+ sleep 5 + [[ 5 -ge 300 ]] + let i+=1 ++ kubectl_bin get pxc some-name -o 'jsonpath={.status.state}' +++ mktemp ++ local LAST_OUT=/tmp/tmp.4EWeEs1EvC +++ mktemp ++ local LAST_ERR=/tmp/tmp.QlVjTNA4dv ++ local exit_status=0 +++ seq 0 2 ++ for i in $(seq 0 2) ++ set +e ++ kubectl get pxc some-name -o 'jsonpath={.status.state}' ++ exit_status=0 ++ set -e ++ '[' 0 '!=' 0 ']' ++ break ++ cat /tmp/tmp.4EWeEs1EvC ++ cat /tmp/tmp.QlVjTNA4dv ++ rm /tmp/tmp.4EWeEs1EvC /tmp/tmp.QlVjTNA4dv ++ return 0 + [[ initializing == \r\e\a\d\y ]] + echo -n . .+ sleep 5 + [[ 6 -ge 300 ]] + let i+=1 ++ kubectl_bin get pxc some-name -o 'jsonpath={.status.state}' +++ mktemp ++ local LAST_OUT=/tmp/tmp.Gk4t4p71sf +++ mktemp ++ local LAST_ERR=/tmp/tmp.AccRwbpGwK ++ local exit_status=0 +++ seq 0 2 ++ for i in $(seq 0 2) ++ set +e ++ kubectl get pxc some-name -o 'jsonpath={.status.state}' ++ exit_status=0 ++ set -e ++ '[' 0 '!=' 0 ']' ++ break ++ cat /tmp/tmp.Gk4t4p71sf ++ cat /tmp/tmp.AccRwbpGwK ++ rm /tmp/tmp.Gk4t4p71sf /tmp/tmp.AccRwbpGwK ++ return 0 + [[ initializing == \r\e\a\d\y ]] + echo -n . .+ sleep 5 + [[ 7 -ge 300 ]] + let i+=1 ++ kubectl_bin get pxc some-name -o 'jsonpath={.status.state}' +++ mktemp ++ local LAST_OUT=/tmp/tmp.6oBauWJ32a +++ mktemp ++ local LAST_ERR=/tmp/tmp.IAQ9ApL8r4 ++ local exit_status=0 +++ seq 0 2 ++ for i in $(seq 0 2) ++ set +e ++ kubectl get pxc some-name -o 'jsonpath={.status.state}' ++ exit_status=0 ++ set -e ++ '[' 0 '!=' 0 ']' ++ break ++ cat /tmp/tmp.6oBauWJ32a ++ cat /tmp/tmp.IAQ9ApL8r4 ++ rm /tmp/tmp.6oBauWJ32a /tmp/tmp.IAQ9ApL8r4 ++ return 0 + [[ initializing == \r\e\a\d\y ]] + echo -n . .+ sleep 5 + [[ 8 -ge 300 ]] + let i+=1 ++ kubectl_bin get pxc some-name -o 'jsonpath={.status.state}' +++ mktemp ++ local LAST_OUT=/tmp/tmp.slnJ9YlaEd +++ mktemp ++ local LAST_ERR=/tmp/tmp.j3gWIMrJSw ++ local exit_status=0 +++ seq 0 2 ++ for i in $(seq 0 2) ++ set +e ++ kubectl get pxc some-name -o 'jsonpath={.status.state}' ++ exit_status=0 ++ set -e ++ '[' 0 '!=' 0 ']' ++ break ++ cat /tmp/tmp.slnJ9YlaEd ++ cat /tmp/tmp.j3gWIMrJSw ++ rm /tmp/tmp.slnJ9YlaEd /tmp/tmp.j3gWIMrJSw ++ return 0 + [[ ready == \r\e\a\d\y ]] ++ kubectl_bin get pxc some-name -o 'jsonpath={.status.pxc.ready}' +++ mktemp ++ local LAST_OUT=/tmp/tmp.qObGLgzFxr +++ mktemp ++ local LAST_ERR=/tmp/tmp.ZfSSD3ejGY ++ local exit_status=0 +++ seq 0 2 ++ for i in $(seq 0 2) ++ set +e ++ kubectl get pxc some-name -o 'jsonpath={.status.pxc.ready}' ++ exit_status=0 ++ set -e ++ '[' 0 '!=' 0 ']' ++ break ++ cat /tmp/tmp.qObGLgzFxr ++ cat /tmp/tmp.ZfSSD3ejGY ++ rm /tmp/tmp.qObGLgzFxr /tmp/tmp.ZfSSD3ejGY ++ return 0 + [[ 3 == \3 ]] +++ get_proxy_engine some-name +++ local cluster_name=some-name ++++ get_proxy some-name ++++ local target_cluster=some-name +++++ kubectl_bin get pxc some-name -o 'jsonpath={.spec.haproxy.enabled}' ++++++ mktemp +++++ local LAST_OUT=/tmp/tmp.6U5Quz2EML ++++++ mktemp +++++ local LAST_ERR=/tmp/tmp.bqUfB0C4OW +++++ local exit_status=0 ++++++ seq 0 2 +++++ for i in $(seq 0 2) +++++ set +e +++++ kubectl get pxc some-name -o 'jsonpath={.spec.haproxy.enabled}' +++++ exit_status=0 +++++ set -e +++++ '[' 0 '!=' 0 ']' +++++ break +++++ cat /tmp/tmp.6U5Quz2EML +++++ cat /tmp/tmp.bqUfB0C4OW +++++ rm /tmp/tmp.6U5Quz2EML /tmp/tmp.bqUfB0C4OW +++++ return 0 ++++ [[ '' == \t\r\u\e ]] +++++ kubectl_bin get pxc some-name -o 'jsonpath={.spec.proxysql.enabled}' ++++++ mktemp +++++ local LAST_OUT=/tmp/tmp.B0hgm8y3LR ++++++ mktemp +++++ local LAST_ERR=/tmp/tmp.fKavuO7yLR +++++ local exit_status=0 ++++++ seq 0 2 +++++ for i in $(seq 0 2) +++++ set +e +++++ kubectl get pxc some-name -o 'jsonpath={.spec.proxysql.enabled}' +++++ exit_status=0 +++++ set -e +++++ '[' 0 '!=' 0 ']' +++++ break +++++ cat /tmp/tmp.B0hgm8y3LR +++++ cat /tmp/tmp.fKavuO7yLR +++++ rm /tmp/tmp.B0hgm8y3LR /tmp/tmp.fKavuO7yLR +++++ return 0 ++++ [[ true == \t\r\u\e ]] ++++ echo some-name-proxysql ++++ return +++ local cluster_proxy=some-name-proxysql +++ echo proxysql ++ kubectl_bin get pxc some-name -o 'jsonpath={.status.proxysql.ready}' +++ mktemp ++ local LAST_OUT=/tmp/tmp.7Ut1KyoMIz +++ mktemp ++ local LAST_ERR=/tmp/tmp.OrwzjNLKM5 ++ local exit_status=0 +++ seq 0 2 ++ for i in $(seq 0 2) ++ set +e ++ kubectl get pxc some-name -o 'jsonpath={.status.proxysql.ready}' ++ exit_status=0 ++ set -e ++ '[' 0 '!=' 0 ']' ++ break ++ cat /tmp/tmp.7Ut1KyoMIz ++ cat /tmp/tmp.OrwzjNLKM5 ++ rm /tmp/tmp.7Ut1KyoMIz /tmp/tmp.OrwzjNLKM5 ++ return 0 + [[ 2 == \2 ]] + echo + sleep 10 + compare_mysql_cmd select-4 'SHOW TABLES;' '-h some-name-proxysql -umonitor -p'\''test-password'\''' + local command_id=select-4 + local 'command=SHOW TABLES;' + local 'uri=-h some-name-proxysql -umonitor -p'\''test-password'\''' + local postfix= + local expected_result=/mnt/jenkins/workspace/cloud-pxc-operator_PR-2203/e2e-tests/users/compare/select-4.sql + [[ perconalab/percona-xtradb-cluster-operator:main-pxc5.7 =~ 8\.4 ]] + [[ perconalab/percona-xtradb-cluster-operator:main-pxc5.7 =~ 8\.0 ]] + [[ perconalab/percona-xtradb-cluster-operator:main-pxc5.7 =~ 5\.7 ]] + [[ -f /mnt/jenkins/workspace/cloud-pxc-operator_PR-2203/e2e-tests/users/compare/select-4-57.sql ]] + run_mysql 'SHOW TABLES;' '-h some-name-proxysql -umonitor -p'\''test-password'\''' + local 'command=SHOW TABLES;' + local 'uri=-h some-name-proxysql -umonitor -p'\''test-password'\''' ++ get_client_pod ++ kubectl_bin get pods --selector=name=pxc-client -o 'jsonpath={.items[].metadata.name}' +++ mktemp ++ local LAST_OUT=/tmp/tmp.fua88TN9ra +++ mktemp ++ local LAST_ERR=/tmp/tmp.Y3DTE6AaAU ++ local exit_status=0 +++ seq 0 2 ++ for i in $(seq 0 2) ++ set +e ++ kubectl get pods --selector=name=pxc-client -o 'jsonpath={.items[].metadata.name}' ++ exit_status=0 ++ set -e ++ '[' 0 '!=' 0 ']' ++ break ++ cat /tmp/tmp.fua88TN9ra ++ cat /tmp/tmp.Y3DTE6AaAU ++ rm /tmp/tmp.fua88TN9ra /tmp/tmp.Y3DTE6AaAU ++ return 0 + client_pod=pxc-client-857d976497-khhbf + wait_pod pxc-client-857d976497-khhbf + local pod=pxc-client-857d976497-khhbf + local max_retry=480 + local ns= ++ echo pxc-client-857d976497-khhbf ++ /usr/sbin/sed -E 's/.*-(pxc|proxysql)-[0-9]/\1/' ++ egrep '^(pxc|proxysql)$' egrep: warning: egrep is obsolescent; using grep -E + local container= + set +o xtrace pod/pxc-client-857d976497-khhbf condition met waiting for pod/pxc-client-857d976497-khhbf to become ReadyDefaulted container "pxc-client" out of: pxc-client, backup .Ok + set +o xtrace + '[' '!' -s /tmp/tmp.zNf2oNZjFh/select-4.sql ']' + diff -u /mnt/jenkins/workspace/cloud-pxc-operator_PR-2203/e2e-tests/users/compare/select-4.sql /tmp/tmp.zNf2oNZjFh/select-4.sql + desc 'test operator' + set +o xtrace ----------------------------------------------------------------------------------- test operator ----------------------------------------------------------------------------------- + patch_secret my-cluster-secrets operator dGVzdC1wYXNzd29yZA== + local secret=my-cluster-secrets + local key=operator + local value=dGVzdC1wYXNzd29yZA== + kubectl_bin patch secret my-cluster-secrets '-p={"data":{"operator": "dGVzdC1wYXNzd29yZA=="}}' ++ mktemp + local LAST_OUT=/tmp/tmp.fkVrJ27QFR ++ mktemp + local LAST_ERR=/tmp/tmp.3GrXcZPGau + local exit_status=0 ++ seq 0 2 + for i in $(seq 0 2) + set +e + kubectl patch secret my-cluster-secrets '-p={"data":{"operator": "dGVzdC1wYXNzd29yZA=="}}' + exit_status=0 + set -e + '[' 0 '!=' 0 ']' + break + cat /tmp/tmp.fkVrJ27QFR secret/my-cluster-secrets patched + cat /tmp/tmp.3GrXcZPGau + rm /tmp/tmp.fkVrJ27QFR /tmp/tmp.3GrXcZPGau + return 0 + sleep 15 + wait_cluster_consistency some-name 3 2 + local cluster_name=some-name + local cluster_size=3 + local proxy_size=2 + '[' -z 2 ']' + desc 'wait cluster consistency' + set +o xtrace ----------------------------------------------------------------------------------- wait cluster consistency ----------------------------------------------------------------------------------- + local i=0 + local max=300 + sleep 7 + echo -n 'waiting for pxc/some-name to be ready' waiting for pxc/some-name to be ready++ kubectl_bin get pxc some-name -o 'jsonpath={.status.state}' +++ mktemp ++ local LAST_OUT=/tmp/tmp.3w1gh0XQ1G +++ mktemp ++ local LAST_ERR=/tmp/tmp.vLfm8H4t8T ++ local exit_status=0 +++ seq 0 2 ++ for i in $(seq 0 2) ++ set +e ++ kubectl get pxc some-name -o 'jsonpath={.status.state}' ++ exit_status=0 ++ set -e ++ '[' 0 '!=' 0 ']' ++ break ++ cat /tmp/tmp.3w1gh0XQ1G ++ cat /tmp/tmp.vLfm8H4t8T ++ rm /tmp/tmp.3w1gh0XQ1G /tmp/tmp.vLfm8H4t8T ++ return 0 + [[ initializing == \r\e\a\d\y ]] + echo -n . .+ sleep 5 + [[ 0 -ge 300 ]] + let i+=1 ++ kubectl_bin get pxc some-name -o 'jsonpath={.status.state}' +++ mktemp ++ local LAST_OUT=/tmp/tmp.JyRJilOSYt +++ mktemp ++ local LAST_ERR=/tmp/tmp.6delzDIRuu ++ local exit_status=0 +++ seq 0 2 ++ for i in $(seq 0 2) ++ set +e ++ kubectl get pxc some-name -o 'jsonpath={.status.state}' ++ exit_status=0 ++ set -e ++ '[' 0 '!=' 0 ']' ++ break ++ cat /tmp/tmp.JyRJilOSYt ++ cat /tmp/tmp.6delzDIRuu ++ rm /tmp/tmp.JyRJilOSYt /tmp/tmp.6delzDIRuu ++ return 0 + [[ initializing == \r\e\a\d\y ]] + echo -n . .+ sleep 5 + [[ 1 -ge 300 ]] + let i+=1 ++ kubectl_bin get pxc some-name -o 'jsonpath={.status.state}' +++ mktemp ++ local LAST_OUT=/tmp/tmp.5YHhr1hS4u +++ mktemp ++ local LAST_ERR=/tmp/tmp.wT7wTs4yRV ++ local exit_status=0 +++ seq 0 2 ++ for i in $(seq 0 2) ++ set +e ++ kubectl get pxc some-name -o 'jsonpath={.status.state}' ++ exit_status=0 ++ set -e ++ '[' 0 '!=' 0 ']' ++ break ++ cat /tmp/tmp.5YHhr1hS4u ++ cat /tmp/tmp.wT7wTs4yRV ++ rm /tmp/tmp.5YHhr1hS4u /tmp/tmp.wT7wTs4yRV ++ return 0 + [[ ready == \r\e\a\d\y ]] ++ kubectl_bin get pxc some-name -o 'jsonpath={.status.pxc.ready}' +++ mktemp ++ local LAST_OUT=/tmp/tmp.MRUK8dAp9u +++ mktemp ++ local LAST_ERR=/tmp/tmp.yrCGPYMeND ++ local exit_status=0 +++ seq 0 2 ++ for i in $(seq 0 2) ++ set +e ++ kubectl get pxc some-name -o 'jsonpath={.status.pxc.ready}' ++ exit_status=0 ++ set -e ++ '[' 0 '!=' 0 ']' ++ break ++ cat /tmp/tmp.MRUK8dAp9u ++ cat /tmp/tmp.yrCGPYMeND ++ rm /tmp/tmp.MRUK8dAp9u /tmp/tmp.yrCGPYMeND ++ return 0 + [[ 3 == \3 ]] +++ get_proxy_engine some-name +++ local cluster_name=some-name ++++ get_proxy some-name ++++ local target_cluster=some-name +++++ kubectl_bin get pxc some-name -o 'jsonpath={.spec.haproxy.enabled}' ++++++ mktemp +++++ local LAST_OUT=/tmp/tmp.nbsRdkQKY4 ++++++ mktemp +++++ local LAST_ERR=/tmp/tmp.QB1TB0EheW +++++ local exit_status=0 ++++++ seq 0 2 +++++ for i in $(seq 0 2) +++++ set +e +++++ kubectl get pxc some-name -o 'jsonpath={.spec.haproxy.enabled}' +++++ exit_status=0 +++++ set -e +++++ '[' 0 '!=' 0 ']' +++++ break +++++ cat /tmp/tmp.nbsRdkQKY4 +++++ cat /tmp/tmp.QB1TB0EheW +++++ rm /tmp/tmp.nbsRdkQKY4 /tmp/tmp.QB1TB0EheW +++++ return 0 ++++ [[ '' == \t\r\u\e ]] +++++ kubectl_bin get pxc some-name -o 'jsonpath={.spec.proxysql.enabled}' ++++++ mktemp +++++ local LAST_OUT=/tmp/tmp.WHEfrusSxo ++++++ mktemp +++++ local LAST_ERR=/tmp/tmp.flSJdtjzd8 +++++ local exit_status=0 ++++++ seq 0 2 +++++ for i in $(seq 0 2) +++++ set +e +++++ kubectl get pxc some-name -o 'jsonpath={.spec.proxysql.enabled}' +++++ exit_status=0 +++++ set -e +++++ '[' 0 '!=' 0 ']' +++++ break +++++ cat /tmp/tmp.WHEfrusSxo +++++ cat /tmp/tmp.flSJdtjzd8 +++++ rm /tmp/tmp.WHEfrusSxo /tmp/tmp.flSJdtjzd8 +++++ return 0 ++++ [[ true == \t\r\u\e ]] ++++ echo some-name-proxysql ++++ return +++ local cluster_proxy=some-name-proxysql +++ echo proxysql ++ kubectl_bin get pxc some-name -o 'jsonpath={.status.proxysql.ready}' +++ mktemp ++ local LAST_OUT=/tmp/tmp.G38OWwdT1B +++ mktemp ++ local LAST_ERR=/tmp/tmp.XgJKP2O5h9 ++ local exit_status=0 +++ seq 0 2 ++ for i in $(seq 0 2) ++ set +e ++ kubectl get pxc some-name -o 'jsonpath={.status.proxysql.ready}' ++ exit_status=0 ++ set -e ++ '[' 0 '!=' 0 ']' ++ break ++ cat /tmp/tmp.G38OWwdT1B ++ cat /tmp/tmp.XgJKP2O5h9 ++ rm /tmp/tmp.G38OWwdT1B /tmp/tmp.XgJKP2O5h9 ++ return 0 + [[ 2 == \2 ]] + echo + sleep 10 + compare_mysql_cmd select-4 'SHOW TABLES;' '-h some-name-proxysql -uoperator -p'\''test-password'\''' + local command_id=select-4 + local 'command=SHOW TABLES;' + local 'uri=-h some-name-proxysql -uoperator -p'\''test-password'\''' + local postfix= + local expected_result=/mnt/jenkins/workspace/cloud-pxc-operator_PR-2203/e2e-tests/users/compare/select-4.sql + [[ perconalab/percona-xtradb-cluster-operator:main-pxc5.7 =~ 8\.4 ]] + [[ perconalab/percona-xtradb-cluster-operator:main-pxc5.7 =~ 8\.0 ]] + [[ perconalab/percona-xtradb-cluster-operator:main-pxc5.7 =~ 5\.7 ]] + [[ -f /mnt/jenkins/workspace/cloud-pxc-operator_PR-2203/e2e-tests/users/compare/select-4-57.sql ]] + run_mysql 'SHOW TABLES;' '-h some-name-proxysql -uoperator -p'\''test-password'\''' + local 'command=SHOW TABLES;' + local 'uri=-h some-name-proxysql -uoperator -p'\''test-password'\''' ++ get_client_pod ++ kubectl_bin get pods --selector=name=pxc-client -o 'jsonpath={.items[].metadata.name}' +++ mktemp ++ local LAST_OUT=/tmp/tmp.DM1CU7CSp3 +++ mktemp ++ local LAST_ERR=/tmp/tmp.lhsp3YQsfL ++ local exit_status=0 +++ seq 0 2 ++ for i in $(seq 0 2) ++ set +e ++ kubectl get pods --selector=name=pxc-client -o 'jsonpath={.items[].metadata.name}' ++ exit_status=0 ++ set -e ++ '[' 0 '!=' 0 ']' ++ break ++ cat /tmp/tmp.DM1CU7CSp3 ++ cat /tmp/tmp.lhsp3YQsfL ++ rm /tmp/tmp.DM1CU7CSp3 /tmp/tmp.lhsp3YQsfL ++ return 0 + client_pod=pxc-client-857d976497-khhbf + wait_pod pxc-client-857d976497-khhbf + local pod=pxc-client-857d976497-khhbf + local max_retry=480 + local ns= ++ echo pxc-client-857d976497-khhbf ++ /usr/sbin/sed -E 's/.*-(pxc|proxysql)-[0-9]/\1/' ++ egrep '^(pxc|proxysql)$' egrep: warning: egrep is obsolescent; using grep -E + local container= + set +o xtrace pod/pxc-client-857d976497-khhbf condition met waiting for pod/pxc-client-857d976497-khhbf to become ReadyDefaulted container "pxc-client" out of: pxc-client, backup .Ok + set +o xtrace + '[' '!' -s /tmp/tmp.zNf2oNZjFh/select-4.sql ']' + diff -u /mnt/jenkins/workspace/cloud-pxc-operator_PR-2203/e2e-tests/users/compare/select-4.sql /tmp/tmp.zNf2oNZjFh/select-4.sql + desc 'change secret name' + set +o xtrace ----------------------------------------------------------------------------------- change secret name ----------------------------------------------------------------------------------- + kubectl_bin patch pxc some-name --type merge --patch '{"spec": {"secretsName":"my-cluster-secrets-2"}}' ++ mktemp + local LAST_OUT=/tmp/tmp.U7uX1eCc94 ++ mktemp + local LAST_ERR=/tmp/tmp.ZFPahfJSVK + local exit_status=0 ++ seq 0 2 + for i in $(seq 0 2) + set +e + kubectl patch pxc some-name --type merge --patch '{"spec": {"secretsName":"my-cluster-secrets-2"}}' + exit_status=0 + set -e + '[' 0 '!=' 0 ']' + break + cat /tmp/tmp.U7uX1eCc94 perconaxtradbcluster.pxc.percona.com/some-name patched + cat /tmp/tmp.ZFPahfJSVK + rm /tmp/tmp.U7uX1eCc94 /tmp/tmp.ZFPahfJSVK + return 0 + sleep 30 + wait_cluster_consistency some-name 3 2 + local cluster_name=some-name + local cluster_size=3 + local proxy_size=2 + '[' -z 2 ']' + desc 'wait cluster consistency' + set +o xtrace ----------------------------------------------------------------------------------- wait cluster consistency ----------------------------------------------------------------------------------- + local i=0 + local max=300 + sleep 7 + echo -n 'waiting for pxc/some-name to be ready' waiting for pxc/some-name to be ready++ kubectl_bin get pxc some-name -o 'jsonpath={.status.state}' +++ mktemp ++ local LAST_OUT=/tmp/tmp.Dn27KMzl5i +++ mktemp ++ local LAST_ERR=/tmp/tmp.bKmHLbPlhe ++ local exit_status=0 +++ seq 0 2 ++ for i in $(seq 0 2) ++ set +e ++ kubectl get pxc some-name -o 'jsonpath={.status.state}' ++ exit_status=0 ++ set -e ++ '[' 0 '!=' 0 ']' ++ break ++ cat /tmp/tmp.Dn27KMzl5i ++ cat /tmp/tmp.bKmHLbPlhe ++ rm /tmp/tmp.Dn27KMzl5i /tmp/tmp.bKmHLbPlhe ++ return 0 + [[ initializing == \r\e\a\d\y ]] + echo -n . .+ sleep 5 + [[ 0 -ge 300 ]] + let i+=1 ++ kubectl_bin get pxc some-name -o 'jsonpath={.status.state}' +++ mktemp ++ local LAST_OUT=/tmp/tmp.mbHGDKZxmN +++ mktemp ++ local LAST_ERR=/tmp/tmp.jOUHsNkjWo ++ local exit_status=0 +++ seq 0 2 ++ for i in $(seq 0 2) ++ set +e ++ kubectl get pxc some-name -o 'jsonpath={.status.state}' ++ exit_status=0 ++ set -e ++ '[' 0 '!=' 0 ']' ++ break ++ cat /tmp/tmp.mbHGDKZxmN ++ cat /tmp/tmp.jOUHsNkjWo ++ rm /tmp/tmp.mbHGDKZxmN /tmp/tmp.jOUHsNkjWo ++ return 0 + [[ initializing == \r\e\a\d\y ]] + echo -n . .+ sleep 5 + [[ 1 -ge 300 ]] + let i+=1 ++ kubectl_bin get pxc some-name -o 'jsonpath={.status.state}' +++ mktemp ++ local LAST_OUT=/tmp/tmp.KP8X3r98jU +++ mktemp ++ local LAST_ERR=/tmp/tmp.EsSBafy4U6 ++ local exit_status=0 +++ seq 0 2 ++ for i in $(seq 0 2) ++ set +e ++ kubectl get pxc some-name -o 'jsonpath={.status.state}' ++ exit_status=0 ++ set -e ++ '[' 0 '!=' 0 ']' ++ break ++ cat /tmp/tmp.KP8X3r98jU ++ cat /tmp/tmp.EsSBafy4U6 ++ rm /tmp/tmp.KP8X3r98jU /tmp/tmp.EsSBafy4U6 ++ return 0 + [[ initializing == \r\e\a\d\y ]] + echo -n . .+ sleep 5 + [[ 2 -ge 300 ]] + let i+=1 ++ kubectl_bin get pxc some-name -o 'jsonpath={.status.state}' +++ mktemp ++ local LAST_OUT=/tmp/tmp.lBx80vXAy8 +++ mktemp ++ local LAST_ERR=/tmp/tmp.nsayTFyvIB ++ local exit_status=0 +++ seq 0 2 ++ for i in $(seq 0 2) ++ set +e ++ kubectl get pxc some-name -o 'jsonpath={.status.state}' ++ exit_status=0 ++ set -e ++ '[' 0 '!=' 0 ']' ++ break ++ cat /tmp/tmp.lBx80vXAy8 ++ cat /tmp/tmp.nsayTFyvIB ++ rm /tmp/tmp.lBx80vXAy8 /tmp/tmp.nsayTFyvIB ++ return 0 + [[ initializing == \r\e\a\d\y ]] + echo -n . .+ sleep 5 + [[ 3 -ge 300 ]] + let i+=1 ++ kubectl_bin get pxc some-name -o 'jsonpath={.status.state}' +++ mktemp ++ local LAST_OUT=/tmp/tmp.fuTOS4rld2 +++ mktemp ++ local LAST_ERR=/tmp/tmp.W6gk5qlAu7 ++ local exit_status=0 +++ seq 0 2 ++ for i in $(seq 0 2) ++ set +e ++ kubectl get pxc some-name -o 'jsonpath={.status.state}' ++ exit_status=0 ++ set -e ++ '[' 0 '!=' 0 ']' ++ break ++ cat /tmp/tmp.fuTOS4rld2 ++ cat /tmp/tmp.W6gk5qlAu7 ++ rm /tmp/tmp.fuTOS4rld2 /tmp/tmp.W6gk5qlAu7 ++ return 0 + [[ initializing == \r\e\a\d\y ]] + echo -n . .+ sleep 5 + [[ 4 -ge 300 ]] + let i+=1 ++ kubectl_bin get pxc some-name -o 'jsonpath={.status.state}' +++ mktemp ++ local LAST_OUT=/tmp/tmp.wpDXABX286 +++ mktemp ++ local LAST_ERR=/tmp/tmp.UHdoSZqYuq ++ local exit_status=0 +++ seq 0 2 ++ for i in $(seq 0 2) ++ set +e ++ kubectl get pxc some-name -o 'jsonpath={.status.state}' ++ exit_status=0 ++ set -e ++ '[' 0 '!=' 0 ']' ++ break ++ cat /tmp/tmp.wpDXABX286 ++ cat /tmp/tmp.UHdoSZqYuq ++ rm /tmp/tmp.wpDXABX286 /tmp/tmp.UHdoSZqYuq ++ return 0 + [[ initializing == \r\e\a\d\y ]] + echo -n . .+ sleep 5 + [[ 5 -ge 300 ]] + let i+=1 ++ kubectl_bin get pxc some-name -o 'jsonpath={.status.state}' +++ mktemp ++ local LAST_OUT=/tmp/tmp.k2yS5bA1Tf +++ mktemp ++ local LAST_ERR=/tmp/tmp.QSnDcHRpjW ++ local exit_status=0 +++ seq 0 2 ++ for i in $(seq 0 2) ++ set +e ++ kubectl get pxc some-name -o 'jsonpath={.status.state}' ++ exit_status=0 ++ set -e ++ '[' 0 '!=' 0 ']' ++ break ++ cat /tmp/tmp.k2yS5bA1Tf ++ cat /tmp/tmp.QSnDcHRpjW ++ rm /tmp/tmp.k2yS5bA1Tf /tmp/tmp.QSnDcHRpjW ++ return 0 + [[ initializing == \r\e\a\d\y ]] + echo -n . .+ sleep 5 + [[ 6 -ge 300 ]] + let i+=1 ++ kubectl_bin get pxc some-name -o 'jsonpath={.status.state}' +++ mktemp ++ local LAST_OUT=/tmp/tmp.SL5YpX8Rbo +++ mktemp ++ local LAST_ERR=/tmp/tmp.hHI8FCgxAs ++ local exit_status=0 +++ seq 0 2 ++ for i in $(seq 0 2) ++ set +e ++ kubectl get pxc some-name -o 'jsonpath={.status.state}' ++ exit_status=0 ++ set -e ++ '[' 0 '!=' 0 ']' ++ break ++ cat /tmp/tmp.SL5YpX8Rbo ++ cat /tmp/tmp.hHI8FCgxAs ++ rm /tmp/tmp.SL5YpX8Rbo /tmp/tmp.hHI8FCgxAs ++ return 0 + [[ initializing == \r\e\a\d\y ]] + echo -n . .+ sleep 5 + [[ 7 -ge 300 ]] + let i+=1 ++ kubectl_bin get pxc some-name -o 'jsonpath={.status.state}' +++ mktemp ++ local LAST_OUT=/tmp/tmp.tXG7yNiCCk +++ mktemp ++ local LAST_ERR=/tmp/tmp.nmauzYWljd ++ local exit_status=0 +++ seq 0 2 ++ for i in $(seq 0 2) ++ set +e ++ kubectl get pxc some-name -o 'jsonpath={.status.state}' ++ exit_status=0 ++ set -e ++ '[' 0 '!=' 0 ']' ++ break ++ cat /tmp/tmp.tXG7yNiCCk ++ cat /tmp/tmp.nmauzYWljd ++ rm /tmp/tmp.tXG7yNiCCk /tmp/tmp.nmauzYWljd ++ return 0 + [[ initializing == \r\e\a\d\y ]] + echo -n . .+ sleep 5 + [[ 8 -ge 300 ]] + let i+=1 ++ kubectl_bin get pxc some-name -o 'jsonpath={.status.state}' +++ mktemp ++ local LAST_OUT=/tmp/tmp.m57sekYfxQ +++ mktemp ++ local LAST_ERR=/tmp/tmp.eat5xRCyun ++ local exit_status=0 +++ seq 0 2 ++ for i in $(seq 0 2) ++ set +e ++ kubectl get pxc some-name -o 'jsonpath={.status.state}' ++ exit_status=0 ++ set -e ++ '[' 0 '!=' 0 ']' ++ break ++ cat /tmp/tmp.m57sekYfxQ ++ cat /tmp/tmp.eat5xRCyun ++ rm /tmp/tmp.m57sekYfxQ /tmp/tmp.eat5xRCyun ++ return 0 + [[ initializing == \r\e\a\d\y ]] + echo -n . .+ sleep 5 + [[ 9 -ge 300 ]] + let i+=1 ++ kubectl_bin get pxc some-name -o 'jsonpath={.status.state}' +++ mktemp ++ local LAST_OUT=/tmp/tmp.LMyCIqSc4w +++ mktemp ++ local LAST_ERR=/tmp/tmp.uT63WpsjaD ++ local exit_status=0 +++ seq 0 2 ++ for i in $(seq 0 2) ++ set +e ++ kubectl get pxc some-name -o 'jsonpath={.status.state}' ++ exit_status=0 ++ set -e ++ '[' 0 '!=' 0 ']' ++ break ++ cat /tmp/tmp.LMyCIqSc4w ++ cat /tmp/tmp.uT63WpsjaD ++ rm /tmp/tmp.LMyCIqSc4w /tmp/tmp.uT63WpsjaD ++ return 0 + [[ initializing == \r\e\a\d\y ]] + echo -n . .+ sleep 5 + [[ 10 -ge 300 ]] + let i+=1 ++ kubectl_bin get pxc some-name -o 'jsonpath={.status.state}' +++ mktemp ++ local LAST_OUT=/tmp/tmp.7bp0bUTkZU +++ mktemp ++ local LAST_ERR=/tmp/tmp.0cNv9KdDkw ++ local exit_status=0 +++ seq 0 2 ++ for i in $(seq 0 2) ++ set +e ++ kubectl get pxc some-name -o 'jsonpath={.status.state}' ++ exit_status=0 ++ set -e ++ '[' 0 '!=' 0 ']' ++ break ++ cat /tmp/tmp.7bp0bUTkZU ++ cat /tmp/tmp.0cNv9KdDkw ++ rm /tmp/tmp.7bp0bUTkZU /tmp/tmp.0cNv9KdDkw ++ return 0 + [[ initializing == \r\e\a\d\y ]] + echo -n . .+ sleep 5 + [[ 11 -ge 300 ]] + let i+=1 ++ kubectl_bin get pxc some-name -o 'jsonpath={.status.state}' +++ mktemp ++ local LAST_OUT=/tmp/tmp.WTpOhcJcIg +++ mktemp ++ local LAST_ERR=/tmp/tmp.Ye6gEyGbRg ++ local exit_status=0 +++ seq 0 2 ++ for i in $(seq 0 2) ++ set +e ++ kubectl get pxc some-name -o 'jsonpath={.status.state}' ++ exit_status=0 ++ set -e ++ '[' 0 '!=' 0 ']' ++ break ++ cat /tmp/tmp.WTpOhcJcIg ++ cat /tmp/tmp.Ye6gEyGbRg ++ rm /tmp/tmp.WTpOhcJcIg /tmp/tmp.Ye6gEyGbRg ++ return 0 + [[ initializing == \r\e\a\d\y ]] + echo -n . .+ sleep 5 + [[ 12 -ge 300 ]] + let i+=1 ++ kubectl_bin get pxc some-name -o 'jsonpath={.status.state}' +++ mktemp ++ local LAST_OUT=/tmp/tmp.zkNZwdHjvr +++ mktemp ++ local LAST_ERR=/tmp/tmp.07ZVqyngid ++ local exit_status=0 +++ seq 0 2 ++ for i in $(seq 0 2) ++ set +e ++ kubectl get pxc some-name -o 'jsonpath={.status.state}' ++ exit_status=0 ++ set -e ++ '[' 0 '!=' 0 ']' ++ break ++ cat /tmp/tmp.zkNZwdHjvr ++ cat /tmp/tmp.07ZVqyngid ++ rm /tmp/tmp.zkNZwdHjvr /tmp/tmp.07ZVqyngid ++ return 0 + [[ initializing == \r\e\a\d\y ]] + echo -n . .+ sleep 5 + [[ 13 -ge 300 ]] + let i+=1 ++ kubectl_bin get pxc some-name -o 'jsonpath={.status.state}' +++ mktemp ++ local LAST_OUT=/tmp/tmp.wuwxewMD5Y +++ mktemp ++ local LAST_ERR=/tmp/tmp.oRFoGGHlso ++ local exit_status=0 +++ seq 0 2 ++ for i in $(seq 0 2) ++ set +e ++ kubectl get pxc some-name -o 'jsonpath={.status.state}' ++ exit_status=0 ++ set -e ++ '[' 0 '!=' 0 ']' ++ break ++ cat /tmp/tmp.wuwxewMD5Y ++ cat /tmp/tmp.oRFoGGHlso ++ rm /tmp/tmp.wuwxewMD5Y /tmp/tmp.oRFoGGHlso ++ return 0 + [[ initializing == \r\e\a\d\y ]] + echo -n . .+ sleep 5 + [[ 14 -ge 300 ]] + let i+=1 ++ kubectl_bin get pxc some-name -o 'jsonpath={.status.state}' +++ mktemp ++ local LAST_OUT=/tmp/tmp.p68SM6jWMg +++ mktemp ++ local LAST_ERR=/tmp/tmp.hpe4KGAnmu ++ local exit_status=0 +++ seq 0 2 ++ for i in $(seq 0 2) ++ set +e ++ kubectl get pxc some-name -o 'jsonpath={.status.state}' ++ exit_status=0 ++ set -e ++ '[' 0 '!=' 0 ']' ++ break ++ cat /tmp/tmp.p68SM6jWMg ++ cat /tmp/tmp.hpe4KGAnmu ++ rm /tmp/tmp.p68SM6jWMg /tmp/tmp.hpe4KGAnmu ++ return 0 + [[ initializing == \r\e\a\d\y ]] + echo -n . .+ sleep 5 + [[ 15 -ge 300 ]] + let i+=1 ++ kubectl_bin get pxc some-name -o 'jsonpath={.status.state}' +++ mktemp ++ local LAST_OUT=/tmp/tmp.6zO34tXkZC +++ mktemp ++ local LAST_ERR=/tmp/tmp.EzG5V3zIn0 ++ local exit_status=0 +++ seq 0 2 ++ for i in $(seq 0 2) ++ set +e ++ kubectl get pxc some-name -o 'jsonpath={.status.state}' ++ exit_status=0 ++ set -e ++ '[' 0 '!=' 0 ']' ++ break ++ cat /tmp/tmp.6zO34tXkZC ++ cat /tmp/tmp.EzG5V3zIn0 ++ rm /tmp/tmp.6zO34tXkZC /tmp/tmp.EzG5V3zIn0 ++ return 0 + [[ initializing == \r\e\a\d\y ]] + echo -n . .+ sleep 5 + [[ 16 -ge 300 ]] + let i+=1 ++ kubectl_bin get pxc some-name -o 'jsonpath={.status.state}' +++ mktemp ++ local LAST_OUT=/tmp/tmp.Yw0HEaktdD +++ mktemp ++ local LAST_ERR=/tmp/tmp.H4I0bU355V ++ local exit_status=0 +++ seq 0 2 ++ for i in $(seq 0 2) ++ set +e ++ kubectl get pxc some-name -o 'jsonpath={.status.state}' ++ exit_status=0 ++ set -e ++ '[' 0 '!=' 0 ']' ++ break ++ cat /tmp/tmp.Yw0HEaktdD ++ cat /tmp/tmp.H4I0bU355V ++ rm /tmp/tmp.Yw0HEaktdD /tmp/tmp.H4I0bU355V ++ return 0 + [[ initializing == \r\e\a\d\y ]] + echo -n . .+ sleep 5 + [[ 17 -ge 300 ]] + let i+=1 ++ kubectl_bin get pxc some-name -o 'jsonpath={.status.state}' +++ mktemp ++ local LAST_OUT=/tmp/tmp.nUgyMFJkMW +++ mktemp ++ local LAST_ERR=/tmp/tmp.xybCqS31u8 ++ local exit_status=0 +++ seq 0 2 ++ for i in $(seq 0 2) ++ set +e ++ kubectl get pxc some-name -o 'jsonpath={.status.state}' ++ exit_status=0 ++ set -e ++ '[' 0 '!=' 0 ']' ++ break ++ cat /tmp/tmp.nUgyMFJkMW ++ cat /tmp/tmp.xybCqS31u8 ++ rm /tmp/tmp.nUgyMFJkMW /tmp/tmp.xybCqS31u8 ++ return 0 + [[ initializing == \r\e\a\d\y ]] + echo -n . .+ sleep 5 + [[ 18 -ge 300 ]] + let i+=1 ++ kubectl_bin get pxc some-name -o 'jsonpath={.status.state}' +++ mktemp ++ local LAST_OUT=/tmp/tmp.8fb9YcXzuO +++ mktemp ++ local LAST_ERR=/tmp/tmp.uOgq3gk82X ++ local exit_status=0 +++ seq 0 2 ++ for i in $(seq 0 2) ++ set +e ++ kubectl get pxc some-name -o 'jsonpath={.status.state}' ++ exit_status=0 ++ set -e ++ '[' 0 '!=' 0 ']' ++ break ++ cat /tmp/tmp.8fb9YcXzuO ++ cat /tmp/tmp.uOgq3gk82X ++ rm /tmp/tmp.8fb9YcXzuO /tmp/tmp.uOgq3gk82X ++ return 0 + [[ initializing == \r\e\a\d\y ]] + echo -n . .+ sleep 5 + [[ 19 -ge 300 ]] + let i+=1 ++ kubectl_bin get pxc some-name -o 'jsonpath={.status.state}' +++ mktemp ++ local LAST_OUT=/tmp/tmp.1jaFOYkFZs +++ mktemp ++ local LAST_ERR=/tmp/tmp.NCgYwmnRqS ++ local exit_status=0 +++ seq 0 2 ++ for i in $(seq 0 2) ++ set +e ++ kubectl get pxc some-name -o 'jsonpath={.status.state}' ++ exit_status=0 ++ set -e ++ '[' 0 '!=' 0 ']' ++ break ++ cat /tmp/tmp.1jaFOYkFZs ++ cat /tmp/tmp.NCgYwmnRqS ++ rm /tmp/tmp.1jaFOYkFZs /tmp/tmp.NCgYwmnRqS ++ return 0 + [[ initializing == \r\e\a\d\y ]] + echo -n . .+ sleep 5 + [[ 20 -ge 300 ]] + let i+=1 ++ kubectl_bin get pxc some-name -o 'jsonpath={.status.state}' +++ mktemp ++ local LAST_OUT=/tmp/tmp.OHadOCTWjr +++ mktemp ++ local LAST_ERR=/tmp/tmp.pDpH3adFnb ++ local exit_status=0 +++ seq 0 2 ++ for i in $(seq 0 2) ++ set +e ++ kubectl get pxc some-name -o 'jsonpath={.status.state}' ++ exit_status=0 ++ set -e ++ '[' 0 '!=' 0 ']' ++ break ++ cat /tmp/tmp.OHadOCTWjr ++ cat /tmp/tmp.pDpH3adFnb ++ rm /tmp/tmp.OHadOCTWjr /tmp/tmp.pDpH3adFnb ++ return 0 + [[ ready == \r\e\a\d\y ]] ++ kubectl_bin get pxc some-name -o 'jsonpath={.status.pxc.ready}' +++ mktemp ++ local LAST_OUT=/tmp/tmp.m3Mrq4fzfA +++ mktemp ++ local LAST_ERR=/tmp/tmp.PJVyIZ8b9o ++ local exit_status=0 +++ seq 0 2 ++ for i in $(seq 0 2) ++ set +e ++ kubectl get pxc some-name -o 'jsonpath={.status.pxc.ready}' ++ exit_status=0 ++ set -e ++ '[' 0 '!=' 0 ']' ++ break ++ cat /tmp/tmp.m3Mrq4fzfA ++ cat /tmp/tmp.PJVyIZ8b9o ++ rm /tmp/tmp.m3Mrq4fzfA /tmp/tmp.PJVyIZ8b9o ++ return 0 + [[ 3 == \3 ]] +++ get_proxy_engine some-name +++ local cluster_name=some-name ++++ get_proxy some-name ++++ local target_cluster=some-name +++++ kubectl_bin get pxc some-name -o 'jsonpath={.spec.haproxy.enabled}' ++++++ mktemp +++++ local LAST_OUT=/tmp/tmp.qBhxULHR6k ++++++ mktemp +++++ local LAST_ERR=/tmp/tmp.5lFiBVna71 +++++ local exit_status=0 ++++++ seq 0 2 +++++ for i in $(seq 0 2) +++++ set +e +++++ kubectl get pxc some-name -o 'jsonpath={.spec.haproxy.enabled}' +++++ exit_status=0 +++++ set -e +++++ '[' 0 '!=' 0 ']' +++++ break +++++ cat /tmp/tmp.qBhxULHR6k +++++ cat /tmp/tmp.5lFiBVna71 +++++ rm /tmp/tmp.qBhxULHR6k /tmp/tmp.5lFiBVna71 +++++ return 0 ++++ [[ '' == \t\r\u\e ]] +++++ kubectl_bin get pxc some-name -o 'jsonpath={.spec.proxysql.enabled}' ++++++ mktemp +++++ local LAST_OUT=/tmp/tmp.FPA4NtYdq4 ++++++ mktemp +++++ local LAST_ERR=/tmp/tmp.Yl4Abvvv8K +++++ local exit_status=0 ++++++ seq 0 2 +++++ for i in $(seq 0 2) +++++ set +e +++++ kubectl get pxc some-name -o 'jsonpath={.spec.proxysql.enabled}' +++++ exit_status=0 +++++ set -e +++++ '[' 0 '!=' 0 ']' +++++ break +++++ cat /tmp/tmp.FPA4NtYdq4 +++++ cat /tmp/tmp.Yl4Abvvv8K +++++ rm /tmp/tmp.FPA4NtYdq4 /tmp/tmp.Yl4Abvvv8K +++++ return 0 ++++ [[ true == \t\r\u\e ]] ++++ echo some-name-proxysql ++++ return +++ local cluster_proxy=some-name-proxysql +++ echo proxysql ++ kubectl_bin get pxc some-name -o 'jsonpath={.status.proxysql.ready}' +++ mktemp ++ local LAST_OUT=/tmp/tmp.8wzfETKoZ6 +++ mktemp ++ local LAST_ERR=/tmp/tmp.vaI4p6wMdA ++ local exit_status=0 +++ seq 0 2 ++ for i in $(seq 0 2) ++ set +e ++ kubectl get pxc some-name -o 'jsonpath={.status.proxysql.ready}' ++ exit_status=0 ++ set -e ++ '[' 0 '!=' 0 ']' ++ break ++ cat /tmp/tmp.8wzfETKoZ6 ++ cat /tmp/tmp.vaI4p6wMdA ++ rm /tmp/tmp.8wzfETKoZ6 /tmp/tmp.vaI4p6wMdA ++ return 0 + [[ 2 == \2 ]] + echo + desc 'test new operator' + set +o xtrace ----------------------------------------------------------------------------------- test new operator ----------------------------------------------------------------------------------- + newpass=test-password2 ++ echo -n test-password2 ++ base64 + newpassencrypted=dGVzdC1wYXNzd29yZDI= + patch_secret my-cluster-secrets-2 operator dGVzdC1wYXNzd29yZDI= + local secret=my-cluster-secrets-2 + local key=operator + local value=dGVzdC1wYXNzd29yZDI= + kubectl_bin patch secret my-cluster-secrets-2 '-p={"data":{"operator": "dGVzdC1wYXNzd29yZDI="}}' ++ mktemp + local LAST_OUT=/tmp/tmp.JASqPRyQWd ++ mktemp + local LAST_ERR=/tmp/tmp.lLeifNsBGe + local exit_status=0 ++ seq 0 2 + for i in $(seq 0 2) + set +e + kubectl patch secret my-cluster-secrets-2 '-p={"data":{"operator": "dGVzdC1wYXNzd29yZDI="}}' + exit_status=0 + set -e + '[' 0 '!=' 0 ']' + break + cat /tmp/tmp.JASqPRyQWd secret/my-cluster-secrets-2 patched + cat /tmp/tmp.lLeifNsBGe + rm /tmp/tmp.JASqPRyQWd /tmp/tmp.lLeifNsBGe + return 0 + sleep 15 + wait_cluster_consistency some-name 3 2 + local cluster_name=some-name + local cluster_size=3 + local proxy_size=2 + '[' -z 2 ']' + desc 'wait cluster consistency' + set +o xtrace ----------------------------------------------------------------------------------- wait cluster consistency ----------------------------------------------------------------------------------- + local i=0 + local max=300 + sleep 7 + echo -n 'waiting for pxc/some-name to be ready' waiting for pxc/some-name to be ready++ kubectl_bin get pxc some-name -o 'jsonpath={.status.state}' +++ mktemp ++ local LAST_OUT=/tmp/tmp.YRqhlA8v7c +++ mktemp ++ local LAST_ERR=/tmp/tmp.L64VcFtmxX ++ local exit_status=0 +++ seq 0 2 ++ for i in $(seq 0 2) ++ set +e ++ kubectl get pxc some-name -o 'jsonpath={.status.state}' ++ exit_status=0 ++ set -e ++ '[' 0 '!=' 0 ']' ++ break ++ cat /tmp/tmp.YRqhlA8v7c ++ cat /tmp/tmp.L64VcFtmxX ++ rm /tmp/tmp.YRqhlA8v7c /tmp/tmp.L64VcFtmxX ++ return 0 + [[ initializing == \r\e\a\d\y ]] + echo -n . .+ sleep 5 + [[ 0 -ge 300 ]] + let i+=1 ++ kubectl_bin get pxc some-name -o 'jsonpath={.status.state}' +++ mktemp ++ local LAST_OUT=/tmp/tmp.rVM5nEnpip +++ mktemp ++ local LAST_ERR=/tmp/tmp.rsIeFjtIG4 ++ local exit_status=0 +++ seq 0 2 ++ for i in $(seq 0 2) ++ set +e ++ kubectl get pxc some-name -o 'jsonpath={.status.state}' ++ exit_status=0 ++ set -e ++ '[' 0 '!=' 0 ']' ++ break ++ cat /tmp/tmp.rVM5nEnpip ++ cat /tmp/tmp.rsIeFjtIG4 ++ rm /tmp/tmp.rVM5nEnpip /tmp/tmp.rsIeFjtIG4 ++ return 0 + [[ initializing == \r\e\a\d\y ]] + echo -n . .+ sleep 5 + [[ 1 -ge 300 ]] + let i+=1 ++ kubectl_bin get pxc some-name -o 'jsonpath={.status.state}' +++ mktemp ++ local LAST_OUT=/tmp/tmp.6HrfsIwiTk +++ mktemp ++ local LAST_ERR=/tmp/tmp.TjCIST2jGV ++ local exit_status=0 +++ seq 0 2 ++ for i in $(seq 0 2) ++ set +e ++ kubectl get pxc some-name -o 'jsonpath={.status.state}' ++ exit_status=0 ++ set -e ++ '[' 0 '!=' 0 ']' ++ break ++ cat /tmp/tmp.6HrfsIwiTk ++ cat /tmp/tmp.TjCIST2jGV ++ rm /tmp/tmp.6HrfsIwiTk /tmp/tmp.TjCIST2jGV ++ return 0 + [[ initializing == \r\e\a\d\y ]] + echo -n . .+ sleep 5 + [[ 2 -ge 300 ]] + let i+=1 ++ kubectl_bin get pxc some-name -o 'jsonpath={.status.state}' +++ mktemp ++ local LAST_OUT=/tmp/tmp.VEnDhvZOce +++ mktemp ++ local LAST_ERR=/tmp/tmp.TO8BzpIrDW ++ local exit_status=0 +++ seq 0 2 ++ for i in $(seq 0 2) ++ set +e ++ kubectl get pxc some-name -o 'jsonpath={.status.state}' ++ exit_status=0 ++ set -e ++ '[' 0 '!=' 0 ']' ++ break ++ cat /tmp/tmp.VEnDhvZOce ++ cat /tmp/tmp.TO8BzpIrDW ++ rm /tmp/tmp.VEnDhvZOce /tmp/tmp.TO8BzpIrDW ++ return 0 + [[ initializing == \r\e\a\d\y ]] + echo -n . .+ sleep 5 + [[ 3 -ge 300 ]] + let i+=1 ++ kubectl_bin get pxc some-name -o 'jsonpath={.status.state}' +++ mktemp ++ local LAST_OUT=/tmp/tmp.r3eW2ZXnnZ +++ mktemp ++ local LAST_ERR=/tmp/tmp.Sqc7PxB9yR ++ local exit_status=0 +++ seq 0 2 ++ for i in $(seq 0 2) ++ set +e ++ kubectl get pxc some-name -o 'jsonpath={.status.state}' ++ exit_status=0 ++ set -e ++ '[' 0 '!=' 0 ']' ++ break ++ cat /tmp/tmp.r3eW2ZXnnZ ++ cat /tmp/tmp.Sqc7PxB9yR ++ rm /tmp/tmp.r3eW2ZXnnZ /tmp/tmp.Sqc7PxB9yR ++ return 0 + [[ initializing == \r\e\a\d\y ]] + echo -n . .+ sleep 5 + [[ 4 -ge 300 ]] + let i+=1 ++ kubectl_bin get pxc some-name -o 'jsonpath={.status.state}' +++ mktemp ++ local LAST_OUT=/tmp/tmp.H873ttCELR +++ mktemp ++ local LAST_ERR=/tmp/tmp.KvUvY6yFcz ++ local exit_status=0 +++ seq 0 2 ++ for i in $(seq 0 2) ++ set +e ++ kubectl get pxc some-name -o 'jsonpath={.status.state}' ++ exit_status=0 ++ set -e ++ '[' 0 '!=' 0 ']' ++ break ++ cat /tmp/tmp.H873ttCELR ++ cat /tmp/tmp.KvUvY6yFcz ++ rm /tmp/tmp.H873ttCELR /tmp/tmp.KvUvY6yFcz ++ return 0 + [[ ready == \r\e\a\d\y ]] ++ kubectl_bin get pxc some-name -o 'jsonpath={.status.pxc.ready}' +++ mktemp ++ local LAST_OUT=/tmp/tmp.0RFdOzyU7D +++ mktemp ++ local LAST_ERR=/tmp/tmp.0KZbwYHh7s ++ local exit_status=0 +++ seq 0 2 ++ for i in $(seq 0 2) ++ set +e ++ kubectl get pxc some-name -o 'jsonpath={.status.pxc.ready}' ++ exit_status=0 ++ set -e ++ '[' 0 '!=' 0 ']' ++ break ++ cat /tmp/tmp.0RFdOzyU7D ++ cat /tmp/tmp.0KZbwYHh7s ++ rm /tmp/tmp.0RFdOzyU7D /tmp/tmp.0KZbwYHh7s ++ return 0 + [[ 3 == \3 ]] +++ get_proxy_engine some-name +++ local cluster_name=some-name ++++ get_proxy some-name ++++ local target_cluster=some-name +++++ kubectl_bin get pxc some-name -o 'jsonpath={.spec.haproxy.enabled}' ++++++ mktemp +++++ local LAST_OUT=/tmp/tmp.CBSRUK85jQ ++++++ mktemp +++++ local LAST_ERR=/tmp/tmp.77SxnKw6L4 +++++ local exit_status=0 ++++++ seq 0 2 +++++ for i in $(seq 0 2) +++++ set +e +++++ kubectl get pxc some-name -o 'jsonpath={.spec.haproxy.enabled}' +++++ exit_status=0 +++++ set -e +++++ '[' 0 '!=' 0 ']' +++++ break +++++ cat /tmp/tmp.CBSRUK85jQ +++++ cat /tmp/tmp.77SxnKw6L4 +++++ rm /tmp/tmp.CBSRUK85jQ /tmp/tmp.77SxnKw6L4 +++++ return 0 ++++ [[ '' == \t\r\u\e ]] +++++ kubectl_bin get pxc some-name -o 'jsonpath={.spec.proxysql.enabled}' ++++++ mktemp +++++ local LAST_OUT=/tmp/tmp.ou57Y2VT30 ++++++ mktemp +++++ local LAST_ERR=/tmp/tmp.pfxFmE8AEt +++++ local exit_status=0 ++++++ seq 0 2 +++++ for i in $(seq 0 2) +++++ set +e +++++ kubectl get pxc some-name -o 'jsonpath={.spec.proxysql.enabled}' +++++ exit_status=0 +++++ set -e +++++ '[' 0 '!=' 0 ']' +++++ break +++++ cat /tmp/tmp.ou57Y2VT30 +++++ cat /tmp/tmp.pfxFmE8AEt +++++ rm /tmp/tmp.ou57Y2VT30 /tmp/tmp.pfxFmE8AEt +++++ return 0 ++++ [[ true == \t\r\u\e ]] ++++ echo some-name-proxysql ++++ return +++ local cluster_proxy=some-name-proxysql +++ echo proxysql ++ kubectl_bin get pxc some-name -o 'jsonpath={.status.proxysql.ready}' +++ mktemp ++ local LAST_OUT=/tmp/tmp.XX87qOHAxG +++ mktemp ++ local LAST_ERR=/tmp/tmp.hlyzxcdvoz ++ local exit_status=0 +++ seq 0 2 ++ for i in $(seq 0 2) ++ set +e ++ kubectl get pxc some-name -o 'jsonpath={.status.proxysql.ready}' ++ exit_status=0 ++ set -e ++ '[' 0 '!=' 0 ']' ++ break ++ cat /tmp/tmp.XX87qOHAxG ++ cat /tmp/tmp.hlyzxcdvoz ++ rm /tmp/tmp.XX87qOHAxG /tmp/tmp.hlyzxcdvoz ++ return 0 + [[ 2 == \2 ]] + echo + sleep 20 + compare_mysql_cmd select-4 'SHOW TABLES;' '-h some-name-proxysql -uoperator -p'\''test-password2'\''' + local command_id=select-4 + local 'command=SHOW TABLES;' + local 'uri=-h some-name-proxysql -uoperator -p'\''test-password2'\''' + local postfix= + local expected_result=/mnt/jenkins/workspace/cloud-pxc-operator_PR-2203/e2e-tests/users/compare/select-4.sql + [[ perconalab/percona-xtradb-cluster-operator:main-pxc5.7 =~ 8\.4 ]] + [[ perconalab/percona-xtradb-cluster-operator:main-pxc5.7 =~ 8\.0 ]] + [[ perconalab/percona-xtradb-cluster-operator:main-pxc5.7 =~ 5\.7 ]] + [[ -f /mnt/jenkins/workspace/cloud-pxc-operator_PR-2203/e2e-tests/users/compare/select-4-57.sql ]] + run_mysql 'SHOW TABLES;' '-h some-name-proxysql -uoperator -p'\''test-password2'\''' + local 'command=SHOW TABLES;' + local 'uri=-h some-name-proxysql -uoperator -p'\''test-password2'\''' ++ get_client_pod ++ kubectl_bin get pods --selector=name=pxc-client -o 'jsonpath={.items[].metadata.name}' +++ mktemp ++ local LAST_OUT=/tmp/tmp.m7hCweF1Lg +++ mktemp ++ local LAST_ERR=/tmp/tmp.COQh9A4Nho ++ local exit_status=0 +++ seq 0 2 ++ for i in $(seq 0 2) ++ set +e ++ kubectl get pods --selector=name=pxc-client -o 'jsonpath={.items[].metadata.name}' ++ exit_status=0 ++ set -e ++ '[' 0 '!=' 0 ']' ++ break ++ cat /tmp/tmp.m7hCweF1Lg ++ cat /tmp/tmp.COQh9A4Nho ++ rm /tmp/tmp.m7hCweF1Lg /tmp/tmp.COQh9A4Nho ++ return 0 + client_pod=pxc-client-857d976497-khhbf + wait_pod pxc-client-857d976497-khhbf + local pod=pxc-client-857d976497-khhbf + local max_retry=480 + local ns= ++ echo pxc-client-857d976497-khhbf ++ /usr/sbin/sed -E 's/.*-(pxc|proxysql)-[0-9]/\1/' ++ egrep '^(pxc|proxysql)$' egrep: warning: egrep is obsolescent; using grep -E + local container= + set +o xtrace pod/pxc-client-857d976497-khhbf condition met waiting for pod/pxc-client-857d976497-khhbf to become ReadyDefaulted container "pxc-client" out of: pxc-client, backup .Ok + set +o xtrace + '[' '!' -s /tmp/tmp.zNf2oNZjFh/select-4.sql ']' + diff -u /mnt/jenkins/workspace/cloud-pxc-operator_PR-2203/e2e-tests/users/compare/select-4.sql /tmp/tmp.zNf2oNZjFh/select-4.sql ++ getSecretData my-cluster-secrets-2 root ++ local secretName=my-cluster-secrets-2 ++ local dataKey=root ++ kubectl_bin get secrets/my-cluster-secrets-2 '--template={{.data.root}}' ++ base64 --decode +++ mktemp ++ local LAST_OUT=/tmp/tmp.wWeLzT79Lc +++ mktemp ++ local LAST_ERR=/tmp/tmp.kwby56GOKz ++ local exit_status=0 +++ seq 0 2 ++ for i in $(seq 0 2) ++ set +e ++ kubectl get secrets/my-cluster-secrets-2 '--template={{.data.root}}' ++ exit_status=0 ++ set -e ++ '[' 0 '!=' 0 ']' ++ break ++ cat /tmp/tmp.wWeLzT79Lc ++ cat /tmp/tmp.kwby56GOKz ++ rm /tmp/tmp.wWeLzT79Lc /tmp/tmp.kwby56GOKz ++ return 0 + newpass='N0XOHd1>v,i8)t.*' + desc 'test new users sync' + set +o xtrace ----------------------------------------------------------------------------------- test new users sync ----------------------------------------------------------------------------------- + run_mysql 'CREATE USER '\''testsync'\''@'\''%'\'' IDENTIFIED BY '\''N0XOHd1>v,i8)t.*'\'';' '-h some-name-pxc -uroot -p'\''N0XOHd1>v,i8)t.*'\''' + local 'command=CREATE USER '\''testsync'\''@'\''%'\'' IDENTIFIED BY '\''N0XOHd1>v,i8)t.*'\'';' + local 'uri=-h some-name-pxc -uroot -p'\''N0XOHd1>v,i8)t.*'\''' ++ get_client_pod ++ kubectl_bin get pods --selector=name=pxc-client -o 'jsonpath={.items[].metadata.name}' +++ mktemp ++ local LAST_OUT=/tmp/tmp.I3L4FN2gid +++ mktemp ++ local LAST_ERR=/tmp/tmp.c5AAyWdedL ++ local exit_status=0 +++ seq 0 2 ++ for i in $(seq 0 2) ++ set +e ++ kubectl get pods --selector=name=pxc-client -o 'jsonpath={.items[].metadata.name}' ++ exit_status=0 ++ set -e ++ '[' 0 '!=' 0 ']' ++ break ++ cat /tmp/tmp.I3L4FN2gid ++ cat /tmp/tmp.c5AAyWdedL ++ rm /tmp/tmp.I3L4FN2gid /tmp/tmp.c5AAyWdedL ++ return 0 + client_pod=pxc-client-857d976497-khhbf + wait_pod pxc-client-857d976497-khhbf + local pod=pxc-client-857d976497-khhbf + local max_retry=480 + local ns= ++ echo pxc-client-857d976497-khhbf ++ /usr/sbin/sed -E 's/.*-(pxc|proxysql)-[0-9]/\1/' ++ egrep '^(pxc|proxysql)$' egrep: warning: egrep is obsolescent; using grep -E + local container= + set +o xtrace pod/pxc-client-857d976497-khhbf condition met waiting for pod/pxc-client-857d976497-khhbf to become ReadyDefaulted container "pxc-client" out of: pxc-client, backup .Ok + set +o xtrace + sleep 40 + compare_mysql_cmd select-4 'SHOW TABLES;' '-h some-name-proxysql -utestsync -p'\''N0XOHd1>v,i8)t.*'\''' + local command_id=select-4 + local 'command=SHOW TABLES;' + local 'uri=-h some-name-proxysql -utestsync -p'\''N0XOHd1>v,i8)t.*'\''' + local postfix= + local expected_result=/mnt/jenkins/workspace/cloud-pxc-operator_PR-2203/e2e-tests/users/compare/select-4.sql + [[ perconalab/percona-xtradb-cluster-operator:main-pxc5.7 =~ 8\.4 ]] + [[ perconalab/percona-xtradb-cluster-operator:main-pxc5.7 =~ 8\.0 ]] + [[ perconalab/percona-xtradb-cluster-operator:main-pxc5.7 =~ 5\.7 ]] + [[ -f /mnt/jenkins/workspace/cloud-pxc-operator_PR-2203/e2e-tests/users/compare/select-4-57.sql ]] + run_mysql 'SHOW TABLES;' '-h some-name-proxysql -utestsync -p'\''N0XOHd1>v,i8)t.*'\''' + local 'command=SHOW TABLES;' + local 'uri=-h some-name-proxysql -utestsync -p'\''N0XOHd1>v,i8)t.*'\''' ++ get_client_pod ++ kubectl_bin get pods --selector=name=pxc-client -o 'jsonpath={.items[].metadata.name}' +++ mktemp ++ local LAST_OUT=/tmp/tmp.H6OQIkQZ0E +++ mktemp ++ local LAST_ERR=/tmp/tmp.mxHfd2Guz0 ++ local exit_status=0 +++ seq 0 2 ++ for i in $(seq 0 2) ++ set +e ++ kubectl get pods --selector=name=pxc-client -o 'jsonpath={.items[].metadata.name}' ++ exit_status=0 ++ set -e ++ '[' 0 '!=' 0 ']' ++ break ++ cat /tmp/tmp.H6OQIkQZ0E ++ cat /tmp/tmp.mxHfd2Guz0 ++ rm /tmp/tmp.H6OQIkQZ0E /tmp/tmp.mxHfd2Guz0 ++ return 0 + client_pod=pxc-client-857d976497-khhbf + wait_pod pxc-client-857d976497-khhbf + local pod=pxc-client-857d976497-khhbf + local max_retry=480 + local ns= ++ echo pxc-client-857d976497-khhbf ++ /usr/sbin/sed -E 's/.*-(pxc|proxysql)-[0-9]/\1/' ++ egrep '^(pxc|proxysql)$' egrep: warning: egrep is obsolescent; using grep -E + local container= + set +o xtrace pod/pxc-client-857d976497-khhbf condition met waiting for pod/pxc-client-857d976497-khhbf to become ReadyDefaulted container "pxc-client" out of: pxc-client, backup .Ok + set +o xtrace + '[' '!' -s /tmp/tmp.zNf2oNZjFh/select-4.sql ']' + diff -u /mnt/jenkins/workspace/cloud-pxc-operator_PR-2203/e2e-tests/users/compare/select-4.sql /tmp/tmp.zNf2oNZjFh/select-4.sql ++ getSecretData internal-some-name operator ++ local secretName=internal-some-name ++ local dataKey=operator ++ kubectl_bin get secrets/internal-some-name '--template={{.data.operator}}' ++ base64 --decode +++ mktemp ++ local LAST_OUT=/tmp/tmp.wjcNdOdNC4 +++ mktemp ++ local LAST_ERR=/tmp/tmp.gANFYAoW6c ++ local exit_status=0 +++ seq 0 2 ++ for i in $(seq 0 2) ++ set +e ++ kubectl get secrets/internal-some-name '--template={{.data.operator}}' ++ exit_status=0 ++ set -e ++ '[' 0 '!=' 0 ']' ++ break ++ cat /tmp/tmp.wjcNdOdNC4 ++ cat /tmp/tmp.gANFYAoW6c ++ rm /tmp/tmp.wjcNdOdNC4 /tmp/tmp.gANFYAoW6c ++ return 0 + pass=test-password2 + desc 'check secret without operator' + set +o xtrace ----------------------------------------------------------------------------------- check secret without operator ----------------------------------------------------------------------------------- + kubectl_bin apply -f /mnt/jenkins/workspace/cloud-pxc-operator_PR-2203/e2e-tests/users/conf/secrets.yml ++ mktemp + local LAST_OUT=/tmp/tmp.kb37dIkyKv ++ mktemp + local LAST_ERR=/tmp/tmp.ZlAx9Fb4IC + local exit_status=0 ++ seq 0 2 + for i in $(seq 0 2) + set +e + kubectl apply -f /mnt/jenkins/workspace/cloud-pxc-operator_PR-2203/e2e-tests/users/conf/secrets.yml + exit_status=0 + set -e + '[' 0 '!=' 0 ']' + break + cat /tmp/tmp.kb37dIkyKv secret/my-cluster-secrets-2 configured + cat /tmp/tmp.ZlAx9Fb4IC Warning: resource secrets/my-cluster-secrets-2 is missing the kubectl.kubernetes.io/last-applied-configuration annotation which is required by kubectl apply. kubectl apply should only be used on resources created declaratively by either kubectl create --save-config or kubectl apply. The missing annotation will be patched automatically. + rm /tmp/tmp.kb37dIkyKv /tmp/tmp.ZlAx9Fb4IC + return 0 + sleep 15 + compare_mysql_cmd select-4 'SHOW TABLES;' '-h some-name-proxysql -uoperator -p'\''test-password2'\''' + local command_id=select-4 + local 'command=SHOW TABLES;' + local 'uri=-h some-name-proxysql -uoperator -p'\''test-password2'\''' + local postfix= + local expected_result=/mnt/jenkins/workspace/cloud-pxc-operator_PR-2203/e2e-tests/users/compare/select-4.sql + [[ perconalab/percona-xtradb-cluster-operator:main-pxc5.7 =~ 8\.4 ]] + [[ perconalab/percona-xtradb-cluster-operator:main-pxc5.7 =~ 8\.0 ]] + [[ perconalab/percona-xtradb-cluster-operator:main-pxc5.7 =~ 5\.7 ]] + [[ -f /mnt/jenkins/workspace/cloud-pxc-operator_PR-2203/e2e-tests/users/compare/select-4-57.sql ]] + run_mysql 'SHOW TABLES;' '-h some-name-proxysql -uoperator -p'\''test-password2'\''' + local 'command=SHOW TABLES;' + local 'uri=-h some-name-proxysql -uoperator -p'\''test-password2'\''' ++ get_client_pod ++ kubectl_bin get pods --selector=name=pxc-client -o 'jsonpath={.items[].metadata.name}' +++ mktemp ++ local LAST_OUT=/tmp/tmp.WWWiGy1cqe +++ mktemp ++ local LAST_ERR=/tmp/tmp.acMjsjfSnf ++ local exit_status=0 +++ seq 0 2 ++ for i in $(seq 0 2) ++ set +e ++ kubectl get pods --selector=name=pxc-client -o 'jsonpath={.items[].metadata.name}' ++ exit_status=0 ++ set -e ++ '[' 0 '!=' 0 ']' ++ break ++ cat /tmp/tmp.WWWiGy1cqe ++ cat /tmp/tmp.acMjsjfSnf ++ rm /tmp/tmp.WWWiGy1cqe /tmp/tmp.acMjsjfSnf ++ return 0 + client_pod=pxc-client-857d976497-khhbf + wait_pod pxc-client-857d976497-khhbf + local pod=pxc-client-857d976497-khhbf + local max_retry=480 + local ns= ++ echo pxc-client-857d976497-khhbf ++ /usr/sbin/sed -E 's/.*-(pxc|proxysql)-[0-9]/\1/' ++ egrep '^(pxc|proxysql)$' egrep: warning: egrep is obsolescent; using grep -E + local container= + set +o xtrace pod/pxc-client-857d976497-khhbf condition met waiting for pod/pxc-client-857d976497-khhbf to become ReadyDefaulted container "pxc-client" out of: pxc-client, backup .Ok + set +o xtrace + '[' '!' -s /tmp/tmp.zNf2oNZjFh/select-4.sql ']' + diff -u /mnt/jenkins/workspace/cloud-pxc-operator_PR-2203/e2e-tests/users/compare/select-4.sql /tmp/tmp.zNf2oNZjFh/select-4.sql + newpass=test-password2 ++ echo -n test-password2 ++ base64 + newpassencrypted=dGVzdC1wYXNzd29yZDI= + apply_config /mnt/jenkins/workspace/cloud-pxc-operator_PR-2203/e2e-tests/users/conf/some-name.yml + '[' -z '' ']' + cat_config /mnt/jenkins/workspace/cloud-pxc-operator_PR-2203/e2e-tests/users/conf/some-name.yml + kubectl_bin apply -f - + cat /mnt/jenkins/workspace/cloud-pxc-operator_PR-2203/e2e-tests/users/conf/some-name.yml + /usr/sbin/sed -e 's#apiVersion: pxc.percona.com/v.*$#apiVersion: pxc.percona.com/v1#' + /usr/sbin/sed -e 's#image:.*-pxc\([0-9]*.[0-9]*\)\{0,1\}$#image: perconalab/percona-xtradb-cluster-operator:main-pxc5.7#' ++ mktemp + /usr/sbin/sed -e 's#image:.*\/percona-xtradb-cluster:.*$#image: perconalab/percona-xtradb-cluster-operator:main-pxc5.7#' + /usr/sbin/sed -e 's#image:.*-init$#image: perconalab/percona-xtradb-cluster-operator:PR-2203-07b4356f#' + /usr/sbin/sed -e 's#image:.*-pmm$#image: perconalab/pmm-client:dev-latest#' + /usr/sbin/sed -e 's#image:.*-backup$#image: perconalab/percona-xtradb-cluster-operator:main-pxc5.7-backup#' + /usr/sbin/sed -e 's#image:.*-haproxy$#image: perconalab/percona-xtradb-cluster-operator:main-haproxy#' + local LAST_OUT=/tmp/tmp.ovV3hAIwhm + /usr/sbin/sed -e 's#image:.*-logcollector$#image: perconalab/percona-xtradb-cluster-operator:main-logcollector#' + /usr/sbin/sed -e s~minio-service.#namespace~minio-service.users-28439~ + /usr/sbin/sed -e 's#image:.*-proxysql$#image: perconalab/percona-xtradb-cluster-operator:main-proxysql#' ++ mktemp + /usr/sbin/sed -e 's#apply:.*#apply: Never#' + local LAST_ERR=/tmp/tmp.8VRiGrjA1O + local exit_status=0 ++ seq 0 2 + for i in $(seq 0 2) + set +e + kubectl apply -f - + exit_status=0 + set -e + '[' 0 '!=' 0 ']' + break + cat /tmp/tmp.ovV3hAIwhm perconaxtradbcluster.pxc.percona.com/some-name configured + cat /tmp/tmp.8VRiGrjA1O + rm /tmp/tmp.ovV3hAIwhm /tmp/tmp.8VRiGrjA1O + return 0 + sleep 15 + wait_cluster_consistency some-name 3 3 + local cluster_name=some-name + local cluster_size=3 + local proxy_size=3 + '[' -z 3 ']' + desc 'wait cluster consistency' + set +o xtrace ----------------------------------------------------------------------------------- wait cluster consistency ----------------------------------------------------------------------------------- + local i=0 + local max=300 + sleep 7 + echo -n 'waiting for pxc/some-name to be ready' waiting for pxc/some-name to be ready++ kubectl_bin get pxc some-name -o 'jsonpath={.status.state}' +++ mktemp ++ local LAST_OUT=/tmp/tmp.8Cezl8g09z +++ mktemp ++ local LAST_ERR=/tmp/tmp.7Ha2m9ZWih ++ local exit_status=0 +++ seq 0 2 ++ for i in $(seq 0 2) ++ set +e ++ kubectl get pxc some-name -o 'jsonpath={.status.state}' ++ exit_status=0 ++ set -e ++ '[' 0 '!=' 0 ']' ++ break ++ cat /tmp/tmp.8Cezl8g09z ++ cat /tmp/tmp.7Ha2m9ZWih ++ rm /tmp/tmp.8Cezl8g09z /tmp/tmp.7Ha2m9ZWih ++ return 0 + [[ initializing == \r\e\a\d\y ]] + echo -n . .+ sleep 5 + [[ 0 -ge 300 ]] + let i+=1 ++ kubectl_bin get pxc some-name -o 'jsonpath={.status.state}' +++ mktemp ++ local LAST_OUT=/tmp/tmp.ru1KxPKKou +++ mktemp ++ local LAST_ERR=/tmp/tmp.vucD2S3Znl ++ local exit_status=0 +++ seq 0 2 ++ for i in $(seq 0 2) ++ set +e ++ kubectl get pxc some-name -o 'jsonpath={.status.state}' ++ exit_status=0 ++ set -e ++ '[' 0 '!=' 0 ']' ++ break ++ cat /tmp/tmp.ru1KxPKKou ++ cat /tmp/tmp.vucD2S3Znl ++ rm /tmp/tmp.ru1KxPKKou /tmp/tmp.vucD2S3Znl ++ return 0 + [[ initializing == \r\e\a\d\y ]] + echo -n . .+ sleep 5 + [[ 1 -ge 300 ]] + let i+=1 ++ kubectl_bin get pxc some-name -o 'jsonpath={.status.state}' +++ mktemp ++ local LAST_OUT=/tmp/tmp.BpQoKfZEJg +++ mktemp ++ local LAST_ERR=/tmp/tmp.nQ3H4pFE3c ++ local exit_status=0 +++ seq 0 2 ++ for i in $(seq 0 2) ++ set +e ++ kubectl get pxc some-name -o 'jsonpath={.status.state}' ++ exit_status=0 ++ set -e ++ '[' 0 '!=' 0 ']' ++ break ++ cat /tmp/tmp.BpQoKfZEJg ++ cat /tmp/tmp.nQ3H4pFE3c ++ rm /tmp/tmp.BpQoKfZEJg /tmp/tmp.nQ3H4pFE3c ++ return 0 + [[ initializing == \r\e\a\d\y ]] + echo -n . .+ sleep 5 + [[ 2 -ge 300 ]] + let i+=1 ++ kubectl_bin get pxc some-name -o 'jsonpath={.status.state}' +++ mktemp ++ local LAST_OUT=/tmp/tmp.uffHNABJfR +++ mktemp ++ local LAST_ERR=/tmp/tmp.ttYBtzSAHn ++ local exit_status=0 +++ seq 0 2 ++ for i in $(seq 0 2) ++ set +e ++ kubectl get pxc some-name -o 'jsonpath={.status.state}' ++ exit_status=0 ++ set -e ++ '[' 0 '!=' 0 ']' ++ break ++ cat /tmp/tmp.uffHNABJfR ++ cat /tmp/tmp.ttYBtzSAHn ++ rm /tmp/tmp.uffHNABJfR /tmp/tmp.ttYBtzSAHn ++ return 0 + [[ initializing == \r\e\a\d\y ]] + echo -n . .+ sleep 5 + [[ 3 -ge 300 ]] + let i+=1 ++ kubectl_bin get pxc some-name -o 'jsonpath={.status.state}' +++ mktemp ++ local LAST_OUT=/tmp/tmp.a1pcfcFYTK +++ mktemp ++ local LAST_ERR=/tmp/tmp.ZTBMbrqQPq ++ local exit_status=0 +++ seq 0 2 ++ for i in $(seq 0 2) ++ set +e ++ kubectl get pxc some-name -o 'jsonpath={.status.state}' ++ exit_status=0 ++ set -e ++ '[' 0 '!=' 0 ']' ++ break ++ cat /tmp/tmp.a1pcfcFYTK ++ cat /tmp/tmp.ZTBMbrqQPq ++ rm /tmp/tmp.a1pcfcFYTK /tmp/tmp.ZTBMbrqQPq ++ return 0 + [[ initializing == \r\e\a\d\y ]] + echo -n . .+ sleep 5 + [[ 4 -ge 300 ]] + let i+=1 ++ kubectl_bin get pxc some-name -o 'jsonpath={.status.state}' +++ mktemp ++ local LAST_OUT=/tmp/tmp.0rSEnp6Gt4 +++ mktemp ++ local LAST_ERR=/tmp/tmp.4CdNiZmJcp ++ local exit_status=0 +++ seq 0 2 ++ for i in $(seq 0 2) ++ set +e ++ kubectl get pxc some-name -o 'jsonpath={.status.state}' ++ exit_status=0 ++ set -e ++ '[' 0 '!=' 0 ']' ++ break ++ cat /tmp/tmp.0rSEnp6Gt4 ++ cat /tmp/tmp.4CdNiZmJcp ++ rm /tmp/tmp.0rSEnp6Gt4 /tmp/tmp.4CdNiZmJcp ++ return 0 + [[ initializing == \r\e\a\d\y ]] + echo -n . .+ sleep 5 + [[ 5 -ge 300 ]] + let i+=1 ++ kubectl_bin get pxc some-name -o 'jsonpath={.status.state}' +++ mktemp ++ local LAST_OUT=/tmp/tmp.38oFI0LgiB +++ mktemp ++ local LAST_ERR=/tmp/tmp.By0eKD4Odw ++ local exit_status=0 +++ seq 0 2 ++ for i in $(seq 0 2) ++ set +e ++ kubectl get pxc some-name -o 'jsonpath={.status.state}' ++ exit_status=0 ++ set -e ++ '[' 0 '!=' 0 ']' ++ break ++ cat /tmp/tmp.38oFI0LgiB ++ cat /tmp/tmp.By0eKD4Odw ++ rm /tmp/tmp.38oFI0LgiB /tmp/tmp.By0eKD4Odw ++ return 0 + [[ initializing == \r\e\a\d\y ]] + echo -n . .+ sleep 5 + [[ 6 -ge 300 ]] + let i+=1 ++ kubectl_bin get pxc some-name -o 'jsonpath={.status.state}' +++ mktemp ++ local LAST_OUT=/tmp/tmp.Ua9s7zxSQ9 +++ mktemp ++ local LAST_ERR=/tmp/tmp.EaChO6YrH7 ++ local exit_status=0 +++ seq 0 2 ++ for i in $(seq 0 2) ++ set +e ++ kubectl get pxc some-name -o 'jsonpath={.status.state}' ++ exit_status=0 ++ set -e ++ '[' 0 '!=' 0 ']' ++ break ++ cat /tmp/tmp.Ua9s7zxSQ9 ++ cat /tmp/tmp.EaChO6YrH7 ++ rm /tmp/tmp.Ua9s7zxSQ9 /tmp/tmp.EaChO6YrH7 ++ return 0 + [[ initializing == \r\e\a\d\y ]] + echo -n . .+ sleep 5 + [[ 7 -ge 300 ]] + let i+=1 ++ kubectl_bin get pxc some-name -o 'jsonpath={.status.state}' +++ mktemp ++ local LAST_OUT=/tmp/tmp.U8H9cmYax8 +++ mktemp ++ local LAST_ERR=/tmp/tmp.vtVPjpzP43 ++ local exit_status=0 +++ seq 0 2 ++ for i in $(seq 0 2) ++ set +e ++ kubectl get pxc some-name -o 'jsonpath={.status.state}' ++ exit_status=0 ++ set -e ++ '[' 0 '!=' 0 ']' ++ break ++ cat /tmp/tmp.U8H9cmYax8 ++ cat /tmp/tmp.vtVPjpzP43 ++ rm /tmp/tmp.U8H9cmYax8 /tmp/tmp.vtVPjpzP43 ++ return 0 + [[ initializing == \r\e\a\d\y ]] + echo -n . .+ sleep 5 + [[ 8 -ge 300 ]] + let i+=1 ++ kubectl_bin get pxc some-name -o 'jsonpath={.status.state}' +++ mktemp ++ local LAST_OUT=/tmp/tmp.9wdSjLJxHF +++ mktemp ++ local LAST_ERR=/tmp/tmp.2s2j6HJvQZ ++ local exit_status=0 +++ seq 0 2 ++ for i in $(seq 0 2) ++ set +e ++ kubectl get pxc some-name -o 'jsonpath={.status.state}' ++ exit_status=0 ++ set -e ++ '[' 0 '!=' 0 ']' ++ break ++ cat /tmp/tmp.9wdSjLJxHF ++ cat /tmp/tmp.2s2j6HJvQZ ++ rm /tmp/tmp.9wdSjLJxHF /tmp/tmp.2s2j6HJvQZ ++ return 0 + [[ initializing == \r\e\a\d\y ]] + echo -n . .+ sleep 5 + [[ 9 -ge 300 ]] + let i+=1 ++ kubectl_bin get pxc some-name -o 'jsonpath={.status.state}' +++ mktemp ++ local LAST_OUT=/tmp/tmp.xZPyVh2GRw +++ mktemp ++ local LAST_ERR=/tmp/tmp.re63WIAkLY ++ local exit_status=0 +++ seq 0 2 ++ for i in $(seq 0 2) ++ set +e ++ kubectl get pxc some-name -o 'jsonpath={.status.state}' ++ exit_status=0 ++ set -e ++ '[' 0 '!=' 0 ']' ++ break ++ cat /tmp/tmp.xZPyVh2GRw ++ cat /tmp/tmp.re63WIAkLY ++ rm /tmp/tmp.xZPyVh2GRw /tmp/tmp.re63WIAkLY ++ return 0 + [[ initializing == \r\e\a\d\y ]] + echo -n . .+ sleep 5 + [[ 10 -ge 300 ]] + let i+=1 ++ kubectl_bin get pxc some-name -o 'jsonpath={.status.state}' +++ mktemp ++ local LAST_OUT=/tmp/tmp.MbFnele2WG +++ mktemp ++ local LAST_ERR=/tmp/tmp.aYLoutmVxy ++ local exit_status=0 +++ seq 0 2 ++ for i in $(seq 0 2) ++ set +e ++ kubectl get pxc some-name -o 'jsonpath={.status.state}' ++ exit_status=0 ++ set -e ++ '[' 0 '!=' 0 ']' ++ break ++ cat /tmp/tmp.MbFnele2WG ++ cat /tmp/tmp.aYLoutmVxy ++ rm /tmp/tmp.MbFnele2WG /tmp/tmp.aYLoutmVxy ++ return 0 + [[ initializing == \r\e\a\d\y ]] + echo -n . .+ sleep 5 + [[ 11 -ge 300 ]] + let i+=1 ++ kubectl_bin get pxc some-name -o 'jsonpath={.status.state}' +++ mktemp ++ local LAST_OUT=/tmp/tmp.hbrjOAu3tj +++ mktemp ++ local LAST_ERR=/tmp/tmp.oLI6aasW8G ++ local exit_status=0 +++ seq 0 2 ++ for i in $(seq 0 2) ++ set +e ++ kubectl get pxc some-name -o 'jsonpath={.status.state}' ++ exit_status=0 ++ set -e ++ '[' 0 '!=' 0 ']' ++ break ++ cat /tmp/tmp.hbrjOAu3tj ++ cat /tmp/tmp.oLI6aasW8G ++ rm /tmp/tmp.hbrjOAu3tj /tmp/tmp.oLI6aasW8G ++ return 0 + [[ initializing == \r\e\a\d\y ]] + echo -n . .+ sleep 5 + [[ 12 -ge 300 ]] + let i+=1 ++ kubectl_bin get pxc some-name -o 'jsonpath={.status.state}' +++ mktemp ++ local LAST_OUT=/tmp/tmp.KgtChQ78pt +++ mktemp ++ local LAST_ERR=/tmp/tmp.zU5IkRdxfI ++ local exit_status=0 +++ seq 0 2 ++ for i in $(seq 0 2) ++ set +e ++ kubectl get pxc some-name -o 'jsonpath={.status.state}' ++ exit_status=0 ++ set -e ++ '[' 0 '!=' 0 ']' ++ break ++ cat /tmp/tmp.KgtChQ78pt ++ cat /tmp/tmp.zU5IkRdxfI ++ rm /tmp/tmp.KgtChQ78pt /tmp/tmp.zU5IkRdxfI ++ return 0 + [[ initializing == \r\e\a\d\y ]] + echo -n . .+ sleep 5 + [[ 13 -ge 300 ]] + let i+=1 ++ kubectl_bin get pxc some-name -o 'jsonpath={.status.state}' +++ mktemp ++ local LAST_OUT=/tmp/tmp.r55OhJXXJu +++ mktemp ++ local LAST_ERR=/tmp/tmp.ZhPwYWDL6C ++ local exit_status=0 +++ seq 0 2 ++ for i in $(seq 0 2) ++ set +e ++ kubectl get pxc some-name -o 'jsonpath={.status.state}' ++ exit_status=0 ++ set -e ++ '[' 0 '!=' 0 ']' ++ break ++ cat /tmp/tmp.r55OhJXXJu ++ cat /tmp/tmp.ZhPwYWDL6C ++ rm /tmp/tmp.r55OhJXXJu /tmp/tmp.ZhPwYWDL6C ++ return 0 + [[ initializing == \r\e\a\d\y ]] + echo -n . .+ sleep 5 + [[ 14 -ge 300 ]] + let i+=1 ++ kubectl_bin get pxc some-name -o 'jsonpath={.status.state}' +++ mktemp ++ local LAST_OUT=/tmp/tmp.qc3aViZjdf +++ mktemp ++ local LAST_ERR=/tmp/tmp.3ZHb7iJIuZ ++ local exit_status=0 +++ seq 0 2 ++ for i in $(seq 0 2) ++ set +e ++ kubectl get pxc some-name -o 'jsonpath={.status.state}' ++ exit_status=0 ++ set -e ++ '[' 0 '!=' 0 ']' ++ break ++ cat /tmp/tmp.qc3aViZjdf ++ cat /tmp/tmp.3ZHb7iJIuZ ++ rm /tmp/tmp.qc3aViZjdf /tmp/tmp.3ZHb7iJIuZ ++ return 0 + [[ initializing == \r\e\a\d\y ]] + echo -n . .+ sleep 5 + [[ 15 -ge 300 ]] + let i+=1 ++ kubectl_bin get pxc some-name -o 'jsonpath={.status.state}' +++ mktemp ++ local LAST_OUT=/tmp/tmp.ms2Giw37yb +++ mktemp ++ local LAST_ERR=/tmp/tmp.Vas8ou7xQS ++ local exit_status=0 +++ seq 0 2 ++ for i in $(seq 0 2) ++ set +e ++ kubectl get pxc some-name -o 'jsonpath={.status.state}' ++ exit_status=0 ++ set -e ++ '[' 0 '!=' 0 ']' ++ break ++ cat /tmp/tmp.ms2Giw37yb ++ cat /tmp/tmp.Vas8ou7xQS ++ rm /tmp/tmp.ms2Giw37yb /tmp/tmp.Vas8ou7xQS ++ return 0 + [[ initializing == \r\e\a\d\y ]] + echo -n . .+ sleep 5 + [[ 16 -ge 300 ]] + let i+=1 ++ kubectl_bin get pxc some-name -o 'jsonpath={.status.state}' +++ mktemp ++ local LAST_OUT=/tmp/tmp.NK7wZI1wtP +++ mktemp ++ local LAST_ERR=/tmp/tmp.2POTHqQXfN ++ local exit_status=0 +++ seq 0 2 ++ for i in $(seq 0 2) ++ set +e ++ kubectl get pxc some-name -o 'jsonpath={.status.state}' ++ exit_status=0 ++ set -e ++ '[' 0 '!=' 0 ']' ++ break ++ cat /tmp/tmp.NK7wZI1wtP ++ cat /tmp/tmp.2POTHqQXfN ++ rm /tmp/tmp.NK7wZI1wtP /tmp/tmp.2POTHqQXfN ++ return 0 + [[ initializing == \r\e\a\d\y ]] + echo -n . .+ sleep 5 + [[ 17 -ge 300 ]] + let i+=1 ++ kubectl_bin get pxc some-name -o 'jsonpath={.status.state}' +++ mktemp ++ local LAST_OUT=/tmp/tmp.bkXabOtHbA +++ mktemp ++ local LAST_ERR=/tmp/tmp.ntAKkRDFGT ++ local exit_status=0 +++ seq 0 2 ++ for i in $(seq 0 2) ++ set +e ++ kubectl get pxc some-name -o 'jsonpath={.status.state}' ++ exit_status=0 ++ set -e ++ '[' 0 '!=' 0 ']' ++ break ++ cat /tmp/tmp.bkXabOtHbA ++ cat /tmp/tmp.ntAKkRDFGT ++ rm /tmp/tmp.bkXabOtHbA /tmp/tmp.ntAKkRDFGT ++ return 0 + [[ initializing == \r\e\a\d\y ]] + echo -n . .+ sleep 5 + [[ 18 -ge 300 ]] + let i+=1 ++ kubectl_bin get pxc some-name -o 'jsonpath={.status.state}' +++ mktemp ++ local LAST_OUT=/tmp/tmp.obaY9mBTkj +++ mktemp ++ local LAST_ERR=/tmp/tmp.WM9CvdVPkX ++ local exit_status=0 +++ seq 0 2 ++ for i in $(seq 0 2) ++ set +e ++ kubectl get pxc some-name -o 'jsonpath={.status.state}' ++ exit_status=0 ++ set -e ++ '[' 0 '!=' 0 ']' ++ break ++ cat /tmp/tmp.obaY9mBTkj ++ cat /tmp/tmp.WM9CvdVPkX ++ rm /tmp/tmp.obaY9mBTkj /tmp/tmp.WM9CvdVPkX ++ return 0 + [[ initializing == \r\e\a\d\y ]] + echo -n . .+ sleep 5 + [[ 19 -ge 300 ]] + let i+=1 ++ kubectl_bin get pxc some-name -o 'jsonpath={.status.state}' +++ mktemp ++ local LAST_OUT=/tmp/tmp.adg861LZRf +++ mktemp ++ local LAST_ERR=/tmp/tmp.FkzVPeQ5vs ++ local exit_status=0 +++ seq 0 2 ++ for i in $(seq 0 2) ++ set +e ++ kubectl get pxc some-name -o 'jsonpath={.status.state}' ++ exit_status=0 ++ set -e ++ '[' 0 '!=' 0 ']' ++ break ++ cat /tmp/tmp.adg861LZRf ++ cat /tmp/tmp.FkzVPeQ5vs ++ rm /tmp/tmp.adg861LZRf /tmp/tmp.FkzVPeQ5vs ++ return 0 + [[ initializing == \r\e\a\d\y ]] + echo -n . .+ sleep 5 + [[ 20 -ge 300 ]] + let i+=1 ++ kubectl_bin get pxc some-name -o 'jsonpath={.status.state}' +++ mktemp ++ local LAST_OUT=/tmp/tmp.YT8uNuXr8r +++ mktemp ++ local LAST_ERR=/tmp/tmp.eOpGDOPRBN ++ local exit_status=0 +++ seq 0 2 ++ for i in $(seq 0 2) ++ set +e ++ kubectl get pxc some-name -o 'jsonpath={.status.state}' ++ exit_status=0 ++ set -e ++ '[' 0 '!=' 0 ']' ++ break ++ cat /tmp/tmp.YT8uNuXr8r ++ cat /tmp/tmp.eOpGDOPRBN ++ rm /tmp/tmp.YT8uNuXr8r /tmp/tmp.eOpGDOPRBN ++ return 0 + [[ initializing == \r\e\a\d\y ]] + echo -n . .+ sleep 5 + [[ 21 -ge 300 ]] + let i+=1 ++ kubectl_bin get pxc some-name -o 'jsonpath={.status.state}' +++ mktemp ++ local LAST_OUT=/tmp/tmp.a1xPl1kger +++ mktemp ++ local LAST_ERR=/tmp/tmp.5gVV3VUrEo ++ local exit_status=0 +++ seq 0 2 ++ for i in $(seq 0 2) ++ set +e ++ kubectl get pxc some-name -o 'jsonpath={.status.state}' ++ exit_status=0 ++ set -e ++ '[' 0 '!=' 0 ']' ++ break ++ cat /tmp/tmp.a1xPl1kger ++ cat /tmp/tmp.5gVV3VUrEo ++ rm /tmp/tmp.a1xPl1kger /tmp/tmp.5gVV3VUrEo ++ return 0 + [[ initializing == \r\e\a\d\y ]] + echo -n . .+ sleep 5 + [[ 22 -ge 300 ]] + let i+=1 ++ kubectl_bin get pxc some-name -o 'jsonpath={.status.state}' +++ mktemp ++ local LAST_OUT=/tmp/tmp.Y1e6Vb09qT +++ mktemp ++ local LAST_ERR=/tmp/tmp.sCub32Jjph ++ local exit_status=0 +++ seq 0 2 ++ for i in $(seq 0 2) ++ set +e ++ kubectl get pxc some-name -o 'jsonpath={.status.state}' ++ exit_status=0 ++ set -e ++ '[' 0 '!=' 0 ']' ++ break ++ cat /tmp/tmp.Y1e6Vb09qT ++ cat /tmp/tmp.sCub32Jjph ++ rm /tmp/tmp.Y1e6Vb09qT /tmp/tmp.sCub32Jjph ++ return 0 + [[ initializing == \r\e\a\d\y ]] + echo -n . .+ sleep 5 + [[ 23 -ge 300 ]] + let i+=1 ++ kubectl_bin get pxc some-name -o 'jsonpath={.status.state}' +++ mktemp ++ local LAST_OUT=/tmp/tmp.ksA9mu9oS5 +++ mktemp ++ local LAST_ERR=/tmp/tmp.oDarnaAf5p ++ local exit_status=0 +++ seq 0 2 ++ for i in $(seq 0 2) ++ set +e ++ kubectl get pxc some-name -o 'jsonpath={.status.state}' ++ exit_status=0 ++ set -e ++ '[' 0 '!=' 0 ']' ++ break ++ cat /tmp/tmp.ksA9mu9oS5 ++ cat /tmp/tmp.oDarnaAf5p ++ rm /tmp/tmp.ksA9mu9oS5 /tmp/tmp.oDarnaAf5p ++ return 0 + [[ ready == \r\e\a\d\y ]] ++ kubectl_bin get pxc some-name -o 'jsonpath={.status.pxc.ready}' +++ mktemp ++ local LAST_OUT=/tmp/tmp.GdioBlR1Qo +++ mktemp ++ local LAST_ERR=/tmp/tmp.uK0fM6ii7c ++ local exit_status=0 +++ seq 0 2 ++ for i in $(seq 0 2) ++ set +e ++ kubectl get pxc some-name -o 'jsonpath={.status.pxc.ready}' ++ exit_status=0 ++ set -e ++ '[' 0 '!=' 0 ']' ++ break ++ cat /tmp/tmp.GdioBlR1Qo ++ cat /tmp/tmp.uK0fM6ii7c ++ rm /tmp/tmp.GdioBlR1Qo /tmp/tmp.uK0fM6ii7c ++ return 0 + [[ 3 == \3 ]] +++ get_proxy_engine some-name +++ local cluster_name=some-name ++++ get_proxy some-name ++++ local target_cluster=some-name +++++ kubectl_bin get pxc some-name -o 'jsonpath={.spec.haproxy.enabled}' ++++++ mktemp +++++ local LAST_OUT=/tmp/tmp.zYQ85YBdtC ++++++ mktemp +++++ local LAST_ERR=/tmp/tmp.Ctwon42t26 +++++ local exit_status=0 ++++++ seq 0 2 +++++ for i in $(seq 0 2) +++++ set +e +++++ kubectl get pxc some-name -o 'jsonpath={.spec.haproxy.enabled}' +++++ exit_status=0 +++++ set -e +++++ '[' 0 '!=' 0 ']' +++++ break +++++ cat /tmp/tmp.zYQ85YBdtC +++++ cat /tmp/tmp.Ctwon42t26 +++++ rm /tmp/tmp.zYQ85YBdtC /tmp/tmp.Ctwon42t26 +++++ return 0 ++++ [[ true == \t\r\u\e ]] ++++ echo some-name-haproxy ++++ return +++ local cluster_proxy=some-name-haproxy +++ echo haproxy ++ kubectl_bin get pxc some-name -o 'jsonpath={.status.haproxy.ready}' +++ mktemp ++ local LAST_OUT=/tmp/tmp.2s1tJGATnE +++ mktemp ++ local LAST_ERR=/tmp/tmp.3QWoFkMUox ++ local exit_status=0 +++ seq 0 2 ++ for i in $(seq 0 2) ++ set +e ++ kubectl get pxc some-name -o 'jsonpath={.status.haproxy.ready}' ++ exit_status=0 ++ set -e ++ '[' 0 '!=' 0 ']' ++ break ++ cat /tmp/tmp.2s1tJGATnE ++ cat /tmp/tmp.3QWoFkMUox ++ rm /tmp/tmp.2s1tJGATnE /tmp/tmp.3QWoFkMUox ++ return 0 + [[ 3 == \3 ]] + echo + [[ perconalab/percona-xtradb-cluster-operator:main-pxc5.7 =~ 5\.7 ]] + check_generation 2 haproxy some-name + local generation=2 + local container=haproxy + local cluster=some-name + local current_generation ++ kubectl_bin get statefulset some-name-haproxy -o 'jsonpath={.metadata.generation}' +++ mktemp ++ local LAST_OUT=/tmp/tmp.cDMmqwGNsk +++ mktemp ++ local LAST_ERR=/tmp/tmp.PXg0WBAEbM ++ local exit_status=0 +++ seq 0 2 ++ for i in $(seq 0 2) ++ set +e ++ kubectl get statefulset some-name-haproxy -o 'jsonpath={.metadata.generation}' ++ exit_status=0 ++ set -e ++ '[' 0 '!=' 0 ']' ++ break ++ cat /tmp/tmp.cDMmqwGNsk ++ cat /tmp/tmp.PXg0WBAEbM ++ rm /tmp/tmp.cDMmqwGNsk /tmp/tmp.PXg0WBAEbM ++ return 0 + current_generation=2 + [[ 2 != \2 ]] + patch_secret my-cluster-secrets monitor dGVzdC1wYXNzd29yZDI= + local secret=my-cluster-secrets + local key=monitor + local value=dGVzdC1wYXNzd29yZDI= + kubectl_bin patch secret my-cluster-secrets '-p={"data":{"monitor": "dGVzdC1wYXNzd29yZDI="}}' ++ mktemp + local LAST_OUT=/tmp/tmp.yfbwpLv6mL ++ mktemp + local LAST_ERR=/tmp/tmp.Mq0XXeGwxM + local exit_status=0 ++ seq 0 2 + for i in $(seq 0 2) + set +e + kubectl patch secret my-cluster-secrets '-p={"data":{"monitor": "dGVzdC1wYXNzd29yZDI="}}' + exit_status=0 + set -e + '[' 0 '!=' 0 ']' + break + cat /tmp/tmp.yfbwpLv6mL secret/my-cluster-secrets patched + cat /tmp/tmp.Mq0XXeGwxM + rm /tmp/tmp.yfbwpLv6mL /tmp/tmp.Mq0XXeGwxM + return 0 + sleep 15 + wait_cluster_consistency some-name 3 3 + local cluster_name=some-name + local cluster_size=3 + local proxy_size=3 + '[' -z 3 ']' + desc 'wait cluster consistency' + set +o xtrace ----------------------------------------------------------------------------------- wait cluster consistency ----------------------------------------------------------------------------------- + local i=0 + local max=300 + sleep 7 + echo -n 'waiting for pxc/some-name to be ready' waiting for pxc/some-name to be ready++ kubectl_bin get pxc some-name -o 'jsonpath={.status.state}' +++ mktemp ++ local LAST_OUT=/tmp/tmp.G2lLtlGif6 +++ mktemp ++ local LAST_ERR=/tmp/tmp.F3nGH08KYe ++ local exit_status=0 +++ seq 0 2 ++ for i in $(seq 0 2) ++ set +e ++ kubectl get pxc some-name -o 'jsonpath={.status.state}' ++ exit_status=0 ++ set -e ++ '[' 0 '!=' 0 ']' ++ break ++ cat /tmp/tmp.G2lLtlGif6 ++ cat /tmp/tmp.F3nGH08KYe ++ rm /tmp/tmp.G2lLtlGif6 /tmp/tmp.F3nGH08KYe ++ return 0 + [[ initializing == \r\e\a\d\y ]] + echo -n . .+ sleep 5 + [[ 0 -ge 300 ]] + let i+=1 ++ kubectl_bin get pxc some-name -o 'jsonpath={.status.state}' +++ mktemp ++ local LAST_OUT=/tmp/tmp.5vXW2uibj7 +++ mktemp ++ local LAST_ERR=/tmp/tmp.HCriMTeiRn ++ local exit_status=0 +++ seq 0 2 ++ for i in $(seq 0 2) ++ set +e ++ kubectl get pxc some-name -o 'jsonpath={.status.state}' ++ exit_status=0 ++ set -e ++ '[' 0 '!=' 0 ']' ++ break ++ cat /tmp/tmp.5vXW2uibj7 ++ cat /tmp/tmp.HCriMTeiRn ++ rm /tmp/tmp.5vXW2uibj7 /tmp/tmp.HCriMTeiRn ++ return 0 + [[ initializing == \r\e\a\d\y ]] + echo -n . .+ sleep 5 + [[ 1 -ge 300 ]] + let i+=1 ++ kubectl_bin get pxc some-name -o 'jsonpath={.status.state}' +++ mktemp ++ local LAST_OUT=/tmp/tmp.7jYjjSuOEa +++ mktemp ++ local LAST_ERR=/tmp/tmp.WMV8V4OYv1 ++ local exit_status=0 +++ seq 0 2 ++ for i in $(seq 0 2) ++ set +e ++ kubectl get pxc some-name -o 'jsonpath={.status.state}' ++ exit_status=0 ++ set -e ++ '[' 0 '!=' 0 ']' ++ break ++ cat /tmp/tmp.7jYjjSuOEa ++ cat /tmp/tmp.WMV8V4OYv1 ++ rm /tmp/tmp.7jYjjSuOEa /tmp/tmp.WMV8V4OYv1 ++ return 0 + [[ initializing == \r\e\a\d\y ]] + echo -n . .+ sleep 5 + [[ 2 -ge 300 ]] + let i+=1 ++ kubectl_bin get pxc some-name -o 'jsonpath={.status.state}' +++ mktemp ++ local LAST_OUT=/tmp/tmp.UDsoFf7S7E +++ mktemp ++ local LAST_ERR=/tmp/tmp.EUn1LxAao4 ++ local exit_status=0 +++ seq 0 2 ++ for i in $(seq 0 2) ++ set +e ++ kubectl get pxc some-name -o 'jsonpath={.status.state}' ++ exit_status=0 ++ set -e ++ '[' 0 '!=' 0 ']' ++ break ++ cat /tmp/tmp.UDsoFf7S7E ++ cat /tmp/tmp.EUn1LxAao4 ++ rm /tmp/tmp.UDsoFf7S7E /tmp/tmp.EUn1LxAao4 ++ return 0 + [[ initializing == \r\e\a\d\y ]] + echo -n . .+ sleep 5 + [[ 3 -ge 300 ]] + let i+=1 ++ kubectl_bin get pxc some-name -o 'jsonpath={.status.state}' +++ mktemp ++ local LAST_OUT=/tmp/tmp.hPbuVa3hTr +++ mktemp ++ local LAST_ERR=/tmp/tmp.m95gDjx7Li ++ local exit_status=0 +++ seq 0 2 ++ for i in $(seq 0 2) ++ set +e ++ kubectl get pxc some-name -o 'jsonpath={.status.state}' ++ exit_status=0 ++ set -e ++ '[' 0 '!=' 0 ']' ++ break ++ cat /tmp/tmp.hPbuVa3hTr ++ cat /tmp/tmp.m95gDjx7Li ++ rm /tmp/tmp.hPbuVa3hTr /tmp/tmp.m95gDjx7Li ++ return 0 + [[ initializing == \r\e\a\d\y ]] + echo -n . .+ sleep 5 + [[ 4 -ge 300 ]] + let i+=1 ++ kubectl_bin get pxc some-name -o 'jsonpath={.status.state}' +++ mktemp ++ local LAST_OUT=/tmp/tmp.67cu9PYwpR +++ mktemp ++ local LAST_ERR=/tmp/tmp.orCZngFc0a ++ local exit_status=0 +++ seq 0 2 ++ for i in $(seq 0 2) ++ set +e ++ kubectl get pxc some-name -o 'jsonpath={.status.state}' ++ exit_status=0 ++ set -e ++ '[' 0 '!=' 0 ']' ++ break ++ cat /tmp/tmp.67cu9PYwpR ++ cat /tmp/tmp.orCZngFc0a ++ rm /tmp/tmp.67cu9PYwpR /tmp/tmp.orCZngFc0a ++ return 0 + [[ initializing == \r\e\a\d\y ]] + echo -n . .+ sleep 5 + [[ 5 -ge 300 ]] + let i+=1 ++ kubectl_bin get pxc some-name -o 'jsonpath={.status.state}' +++ mktemp ++ local LAST_OUT=/tmp/tmp.5wd0lMda0Z +++ mktemp ++ local LAST_ERR=/tmp/tmp.9WkO2j50j4 ++ local exit_status=0 +++ seq 0 2 ++ for i in $(seq 0 2) ++ set +e ++ kubectl get pxc some-name -o 'jsonpath={.status.state}' ++ exit_status=0 ++ set -e ++ '[' 0 '!=' 0 ']' ++ break ++ cat /tmp/tmp.5wd0lMda0Z ++ cat /tmp/tmp.9WkO2j50j4 ++ rm /tmp/tmp.5wd0lMda0Z /tmp/tmp.9WkO2j50j4 ++ return 0 + [[ initializing == \r\e\a\d\y ]] + echo -n . .+ sleep 5 + [[ 6 -ge 300 ]] + let i+=1 ++ kubectl_bin get pxc some-name -o 'jsonpath={.status.state}' +++ mktemp ++ local LAST_OUT=/tmp/tmp.WlCVhdoba6 +++ mktemp ++ local LAST_ERR=/tmp/tmp.e0V4rdY8au ++ local exit_status=0 +++ seq 0 2 ++ for i in $(seq 0 2) ++ set +e ++ kubectl get pxc some-name -o 'jsonpath={.status.state}' ++ exit_status=0 ++ set -e ++ '[' 0 '!=' 0 ']' ++ break ++ cat /tmp/tmp.WlCVhdoba6 ++ cat /tmp/tmp.e0V4rdY8au ++ rm /tmp/tmp.WlCVhdoba6 /tmp/tmp.e0V4rdY8au ++ return 0 + [[ initializing == \r\e\a\d\y ]] + echo -n . .+ sleep 5 + [[ 7 -ge 300 ]] + let i+=1 ++ kubectl_bin get pxc some-name -o 'jsonpath={.status.state}' +++ mktemp ++ local LAST_OUT=/tmp/tmp.6JdYdlW5kk +++ mktemp ++ local LAST_ERR=/tmp/tmp.fm5Q9zOC7u ++ local exit_status=0 +++ seq 0 2 ++ for i in $(seq 0 2) ++ set +e ++ kubectl get pxc some-name -o 'jsonpath={.status.state}' ++ exit_status=0 ++ set -e ++ '[' 0 '!=' 0 ']' ++ break ++ cat /tmp/tmp.6JdYdlW5kk ++ cat /tmp/tmp.fm5Q9zOC7u ++ rm /tmp/tmp.6JdYdlW5kk /tmp/tmp.fm5Q9zOC7u ++ return 0 + [[ initializing == \r\e\a\d\y ]] + echo -n . .+ sleep 5 + [[ 8 -ge 300 ]] + let i+=1 ++ kubectl_bin get pxc some-name -o 'jsonpath={.status.state}' +++ mktemp ++ local LAST_OUT=/tmp/tmp.r1qAZ0N9Hy +++ mktemp ++ local LAST_ERR=/tmp/tmp.5nB5v6Y4Ds ++ local exit_status=0 +++ seq 0 2 ++ for i in $(seq 0 2) ++ set +e ++ kubectl get pxc some-name -o 'jsonpath={.status.state}' ++ exit_status=0 ++ set -e ++ '[' 0 '!=' 0 ']' ++ break ++ cat /tmp/tmp.r1qAZ0N9Hy ++ cat /tmp/tmp.5nB5v6Y4Ds ++ rm /tmp/tmp.r1qAZ0N9Hy /tmp/tmp.5nB5v6Y4Ds ++ return 0 + [[ ready == \r\e\a\d\y ]] ++ kubectl_bin get pxc some-name -o 'jsonpath={.status.pxc.ready}' +++ mktemp ++ local LAST_OUT=/tmp/tmp.blscq30fJZ +++ mktemp ++ local LAST_ERR=/tmp/tmp.tEurovSc7c ++ local exit_status=0 +++ seq 0 2 ++ for i in $(seq 0 2) ++ set +e ++ kubectl get pxc some-name -o 'jsonpath={.status.pxc.ready}' ++ exit_status=0 ++ set -e ++ '[' 0 '!=' 0 ']' ++ break ++ cat /tmp/tmp.blscq30fJZ ++ cat /tmp/tmp.tEurovSc7c ++ rm /tmp/tmp.blscq30fJZ /tmp/tmp.tEurovSc7c ++ return 0 + [[ 3 == \3 ]] +++ get_proxy_engine some-name +++ local cluster_name=some-name ++++ get_proxy some-name ++++ local target_cluster=some-name +++++ kubectl_bin get pxc some-name -o 'jsonpath={.spec.haproxy.enabled}' ++++++ mktemp +++++ local LAST_OUT=/tmp/tmp.h1CWwEPjCs ++++++ mktemp +++++ local LAST_ERR=/tmp/tmp.EfbdDn84fC +++++ local exit_status=0 ++++++ seq 0 2 +++++ for i in $(seq 0 2) +++++ set +e +++++ kubectl get pxc some-name -o 'jsonpath={.spec.haproxy.enabled}' +++++ exit_status=0 +++++ set -e +++++ '[' 0 '!=' 0 ']' +++++ break +++++ cat /tmp/tmp.h1CWwEPjCs +++++ cat /tmp/tmp.EfbdDn84fC +++++ rm /tmp/tmp.h1CWwEPjCs /tmp/tmp.EfbdDn84fC +++++ return 0 ++++ [[ true == \t\r\u\e ]] ++++ echo some-name-haproxy ++++ return +++ local cluster_proxy=some-name-haproxy +++ echo haproxy ++ kubectl_bin get pxc some-name -o 'jsonpath={.status.haproxy.ready}' +++ mktemp ++ local LAST_OUT=/tmp/tmp.o2AkmybFVJ +++ mktemp ++ local LAST_ERR=/tmp/tmp.4jik7ZinOq ++ local exit_status=0 +++ seq 0 2 ++ for i in $(seq 0 2) ++ set +e ++ kubectl get pxc some-name -o 'jsonpath={.status.haproxy.ready}' ++ exit_status=0 ++ set -e ++ '[' 0 '!=' 0 ']' ++ break ++ cat /tmp/tmp.o2AkmybFVJ ++ cat /tmp/tmp.4jik7ZinOq ++ rm /tmp/tmp.o2AkmybFVJ /tmp/tmp.4jik7ZinOq ++ return 0 + [[ 3 == \3 ]] + echo + compare_mysql_cmd select-3 'SHOW DATABASES;' '-h some-name-haproxy -umonitor -p'\''test-password2'\''' + local command_id=select-3 + local 'command=SHOW DATABASES;' + local 'uri=-h some-name-haproxy -umonitor -p'\''test-password2'\''' + local postfix= + local expected_result=/mnt/jenkins/workspace/cloud-pxc-operator_PR-2203/e2e-tests/users/compare/select-3.sql + [[ perconalab/percona-xtradb-cluster-operator:main-pxc5.7 =~ 8\.4 ]] + [[ perconalab/percona-xtradb-cluster-operator:main-pxc5.7 =~ 8\.0 ]] + [[ perconalab/percona-xtradb-cluster-operator:main-pxc5.7 =~ 5\.7 ]] + [[ -f /mnt/jenkins/workspace/cloud-pxc-operator_PR-2203/e2e-tests/users/compare/select-3-57.sql ]] + run_mysql 'SHOW DATABASES;' '-h some-name-haproxy -umonitor -p'\''test-password2'\''' + local 'command=SHOW DATABASES;' + local 'uri=-h some-name-haproxy -umonitor -p'\''test-password2'\''' ++ get_client_pod ++ kubectl_bin get pods --selector=name=pxc-client -o 'jsonpath={.items[].metadata.name}' +++ mktemp ++ local LAST_OUT=/tmp/tmp.N1Sika3YlO +++ mktemp ++ local LAST_ERR=/tmp/tmp.CpTTvVIOC4 ++ local exit_status=0 +++ seq 0 2 ++ for i in $(seq 0 2) ++ set +e ++ kubectl get pods --selector=name=pxc-client -o 'jsonpath={.items[].metadata.name}' ++ exit_status=0 ++ set -e ++ '[' 0 '!=' 0 ']' ++ break ++ cat /tmp/tmp.N1Sika3YlO ++ cat /tmp/tmp.CpTTvVIOC4 ++ rm /tmp/tmp.N1Sika3YlO /tmp/tmp.CpTTvVIOC4 ++ return 0 + client_pod=pxc-client-857d976497-khhbf + wait_pod pxc-client-857d976497-khhbf + local pod=pxc-client-857d976497-khhbf + local max_retry=480 + local ns= ++ echo pxc-client-857d976497-khhbf ++ /usr/sbin/sed -E 's/.*-(pxc|proxysql)-[0-9]/\1/' ++ egrep '^(pxc|proxysql)$' egrep: warning: egrep is obsolescent; using grep -E + local container= + set +o xtrace pod/pxc-client-857d976497-khhbf condition met waiting for pod/pxc-client-857d976497-khhbf to become ReadyDefaulted container "pxc-client" out of: pxc-client, backup .Ok + set +o xtrace + '[' '!' -s /tmp/tmp.zNf2oNZjFh/select-3.sql ']' + diff -u /mnt/jenkins/workspace/cloud-pxc-operator_PR-2203/e2e-tests/users/compare/select-3.sql /tmp/tmp.zNf2oNZjFh/select-3.sql + [[ perconalab/percona-xtradb-cluster-operator:main-pxc5.7 =~ 5\.7 ]] + check_generation 3 haproxy some-name + local generation=3 + local container=haproxy + local cluster=some-name + local current_generation ++ kubectl_bin get statefulset some-name-haproxy -o 'jsonpath={.metadata.generation}' +++ mktemp ++ local LAST_OUT=/tmp/tmp.0KefHgUoAC +++ mktemp ++ local LAST_ERR=/tmp/tmp.fbpGuEvz6r ++ local exit_status=0 +++ seq 0 2 ++ for i in $(seq 0 2) ++ set +e ++ kubectl get statefulset some-name-haproxy -o 'jsonpath={.metadata.generation}' ++ exit_status=0 ++ set -e ++ '[' 0 '!=' 0 ']' ++ break ++ cat /tmp/tmp.0KefHgUoAC ++ cat /tmp/tmp.fbpGuEvz6r ++ rm /tmp/tmp.0KefHgUoAC /tmp/tmp.fbpGuEvz6r ++ return 0 + current_generation=3 + [[ 3 != \3 ]] + destroy users-28439 + local namespace=users-28439 + local ignore_logs=true + desc 'destroy cluster/operator and all other resources' + set +o xtrace ----------------------------------------------------------------------------------- destroy cluster/operator and all other resources ----------------------------------------------------------------------------------- + '[' true == false -o 1 == 1 ']' + grep -v level=info + grep -v 'the object has been modified' ++ get_operator_pod ++ local label_prefix=app.kubernetes.io/ + grep -v 'get backup status: Job.batch' + /usr/sbin/sed -r 's/"ts":[0-9.]+//; s^limits-[0-9.]+/^^g' + sort -u + tee /tmp/tmp.zNf2oNZjFh/operator.log +++ kubectl get pods --selector=app.kubernetes.io/name=percona-xtradb-cluster-operator -n pxc-operator +++ grep -c percona-xtradb-cluster-operator ++ local check_label=1 ++ [[ 1 -eq 0 ]] ++ kubectl_bin get pods --selector=app.kubernetes.io/name=percona-xtradb-cluster-operator -o 'jsonpath={.items[].metadata.name}' -n pxc-operator +++ mktemp ++ local LAST_OUT=/tmp/tmp.4ZbBttrJzn +++ mktemp ++ local LAST_ERR=/tmp/tmp.ce5FAFMfqX ++ local exit_status=0 +++ seq 0 2 ++ for i in $(seq 0 2) ++ set +e ++ kubectl get pods --selector=app.kubernetes.io/name=percona-xtradb-cluster-operator -o 'jsonpath={.items[].metadata.name}' -n pxc-operator ++ exit_status=0 ++ set -e ++ '[' 0 '!=' 0 ']' ++ break ++ cat /tmp/tmp.4ZbBttrJzn ++ cat /tmp/tmp.ce5FAFMfqX ++ rm /tmp/tmp.4ZbBttrJzn /tmp/tmp.ce5FAFMfqX ++ return 0 + kubectl_bin logs -n pxc-operator percona-xtradb-cluster-operator-7db859f455-2rm95 ++ mktemp + local LAST_OUT=/tmp/tmp.rwaNrQrzb8 ++ mktemp + local LAST_ERR=/tmp/tmp.8nuYzEWCls + local exit_status=0 ++ seq 0 2 + for i in $(seq 0 2) + set +e + kubectl logs -n pxc-operator percona-xtradb-cluster-operator-7db859f455-2rm95 + exit_status=0 + set -e + '[' 0 '!=' 0 ']' + break + cat /tmp/tmp.rwaNrQrzb8 + cat /tmp/tmp.8nuYzEWCls + rm /tmp/tmp.rwaNrQrzb8 /tmp/tmp.8nuYzEWCls + return 0 +  }, -  }, -  { -  }, -  { -  }, -  },    },    },    {    },    },    {    },    }, ""),    },    {    },    },    },   }    ... // 16 identical fields    ... // 16 identical fields 2025-10-02T03:16:04.415Z INFO setup Manager starting up {"gitCommit": "07b4356f03c8f0e94567843f1a6a2b3074b43604", "gitBranch": "PR-2203-07b4356f", "buildTime": "2025-10-02T01:13:21Z", "goVersion": "go1.24.7", "os": "linux", "arch": "amd64"} 2025-10-02T03:16:04.415Z INFO setup Runs on {"platform": "kubernetes", "version": "v1.31.12-gke.1265000"} 2025-10-02T03:16:04.418Z INFO setup Registering Components. 2025-10-02T03:16:06.203Z INFO controller-runtime.metrics Serving metrics server {"bindAddress": ":8080", "secure": false} 2025-10-02T03:16:06.203Z INFO controller-runtime.metrics Starting metrics server 2025-10-02T03:16:06.203Z INFO controller-runtime.webhook Registering webhook {"path": "/validate-percona-xtradbcluster"} 2025-10-02T03:16:06.203Z INFO controller-runtime.webhook Starting webhook server 2025-10-02T03:16:06.203Z INFO setup Starting the Cmd. 2025-10-02T03:16:06.203Z INFO starting server {"name": "health probe", "addr": "[::]:8081"} 2025-10-02T03:16:06.204Z INFO controller-runtime.certwatcher Starting certificate poll+watcher {"cert": "/tmp/k8s-webhook-server/serving-certs/tls.crt", "key": "/tmp/k8s-webhook-server/serving-certs/tls.key", "interval": "10s"} 2025-10-02T03:16:06.204Z INFO controller-runtime.certwatcher Updated current TLS certificate {"cert": "/tmp/k8s-webhook-server/serving-certs/tls.crt", "key": "/tmp/k8s-webhook-server/serving-certs/tls.key"} 2025-10-02T03:16:06.204Z INFO controller-runtime.webhook Serving webhook server {"host": "", "port": 9443} 2025-10-02T03:16:06.304Z INFO attempting to acquire leader lease pxc-operator/08db1feb.percona.com... 2025-10-02T03:16:06.340Z INFO successfully acquired lease pxc-operator/08db1feb.percona.com 2025-10-02T03:16:06.341Z DEBUG events percona-xtradb-cluster-operator-7db859f455-2rm95_1f634a86-ab3f-4636-b1b0-00b745f096f4 became leader {"type": "Normal", "object": {"kind":"Lease","namespace":"pxc-operator","name":"08db1feb.percona.com","uid":"81244d41-d3d4-4ad8-b462-61ff74a98311","apiVersion":"coordination.k8s.io/v1","resourceVersion":"1759374966332879009"}, "reason": "LeaderElection"} 2025-10-02T03:16:06.341Z INFO Starting EventSource {"controller": "pxcbackup-controller", "source": "kind source: *v1.PerconaXtraDBClusterBackup"} 2025-10-02T03:16:06.341Z INFO Starting EventSource {"controller": "pxc-controller", "source": "kind source: *v1.PerconaXtraDBCluster"} 2025-10-02T03:16:06.341Z INFO Starting EventSource {"controller": "pxc-controller", "source": "kind source: *v1.Secret"} 2025-10-02T03:16:06.341Z INFO Starting EventSource {"controller": "pxcrestore-controller", "source": "kind source: *v1.PerconaXtraDBClusterRestore"} 2025-10-02T03:16:06.442Z INFO Starting Controller {"controller": "pxcbackup-controller"} 2025-10-02T03:16:06.442Z INFO Starting Controller {"controller": "pxc-controller"} 2025-10-02T03:16:06.442Z INFO Starting Controller {"controller": "pxcrestore-controller"} 2025-10-02T03:16:06.442Z INFO Starting workers {"controller": "pxcbackup-controller", "worker count": 1} 2025-10-02T03:16:06.442Z INFO Starting workers {"controller": "pxc-controller", "worker count": 1} 2025-10-02T03:16:06.442Z INFO Starting workers {"controller": "pxcrestore-controller", "worker count": 1} 2025-10-02T03:16:45.898Z INFO Set CR version {"controller": "pxc-controller", "namespace": "users-28439", "name": "some-name", "reconcileID": "bdc9f939-2f66-4789-ae1e-ba2ba5f4208a", "version": "1.19.0"} 2025-10-02T03:16:46.140Z INFO User secrets updated {"controller": "pxc-controller", "namespace": "users-28439", "name": "some-name", "reconcileID": "bdc9f939-2f66-4789-ae1e-ba2ba5f4208a", "secrets": "my-cluster-secrets"} 2025-10-02T03:16:46.356Z DEBUG Creating object {"controller": "pxc-controller", "namespace": "users-28439", "name": "some-name", "reconcileID": "bdc9f939-2f66-4789-ae1e-ba2ba5f4208a", "object": "auto-some-name-pxc", "kind": "&TypeMeta{Kind:ConfigMap,APIVersion:v1,}"} 2025-10-02T03:16:46.386Z DEBUG Creating object {"controller": "pxc-controller", "namespace": "users-28439", "name": "some-name", "reconcileID": "bdc9f939-2f66-4789-ae1e-ba2ba5f4208a", "object": "auto-some-name-pxc", "kind": "&TypeMeta{Kind:ConfigMap,APIVersion:v1,}"} 2025-10-02T03:16:46.969Z ERROR Reconciler error {"controller": "pxc-controller", "namespace": "users-28439", "name": "some-name", "reconcileID": "bdc9f939-2f66-4789-ae1e-ba2ba5f4208a", "error": "failed to deploy proxysql: updatePod for proxysql: reconcile config: reconcile autotune config: create or update configmap: configmaps \"auto-some-name-pxc\" already exists", "errorVerbose": "configmaps \"auto-some-name-pxc\" already exists\ncreate or update configmap\ngithub.com/percona/percona-xtradb-cluster-operator/pkg/controller/pxc.(*ReconcilePerconaXtraDBCluster).reconcileAutotuneConfigMap\n\t/go/src/github.com/percona/percona-xtradb-cluster-operator/pkg/controller/pxc/config.go:92\ngithub.com/percona/percona-xtradb-cluster-operator/pkg/controller/pxc.(*ReconcilePerconaXtraDBCluster).reconcileConfigMaps\n\t/go/src/github.com/percona/percona-xtradb-cluster-operator/pkg/controller/pxc/config.go:25\ngithub.com/percona/percona-xtradb-cluster-operator/pkg/controller/pxc.(*ReconcilePerconaXtraDBCluster).updatePod\n\t/go/src/github.com/percona/percona-xtradb-cluster-operator/pkg/controller/pxc/upgrade.go:52\ngithub.com/percona/percona-xtradb-cluster-operator/pkg/controller/pxc.(*ReconcilePerconaXtraDBCluster).deploy.func1\n\t/go/src/github.com/percona/percona-xtradb-cluster-operator/pkg/controller/pxc/controller.go:577\ngithub.com/percona/percona-xtradb-cluster-operator/pkg/controller/pxc.(*ReconcilePerconaXtraDBCluster).deploy\n\t/go/src/github.com/percona/percona-xtradb-cluster-operator/pkg/controller/pxc/controller.go:595\ngithub.com/percona/percona-xtradb-cluster-operator/pkg/controller/pxc.(*ReconcilePerconaXtraDBCluster).Reconcile\n\t/go/src/github.com/percona/percona-xtradb-cluster-operator/pkg/controller/pxc/controller.go:379\nsigs.k8s.io/controller-runtime/pkg/internal/controller.(*Controller[...]).Reconcile\n\t/go/pkg/mod/sigs.k8s.io/controller-runtime@v0.22.1/pkg/internal/controller/controller.go:216\nsigs.k8s.io/controller-runtime/pkg/internal/controller.(*Controller[...]).reconcileHandler\n\t/go/pkg/mod/sigs.k8s.io/controller-runtime@v0.22.1/pkg/internal/controller/controller.go:461\nsigs.k8s.io/controller-runtime/pkg/internal/controller.(*Controller[...]).processNextWorkItem\n\t/go/pkg/mod/sigs.k8s.io/controller-runtime@v0.22.1/pkg/internal/controller/controller.go:421\nsigs.k8s.io/controller-runtime/pkg/internal/controller.(*Controller[...]).Start.func1.1\n\t/go/pkg/mod/sigs.k8s.io/controller-runtime@v0.22.1/pkg/internal/controller/controller.go:296\nruntime.goexit\n\t/usr/local/go/src/runtime/asm_amd64.s:1700\nreconcile autotune config\ngithub.com/percona/percona-xtradb-cluster-operator/pkg/controller/pxc.(*ReconcilePerconaXtraDBCluster).reconcileConfigMaps\n\t/go/src/github.com/percona/percona-xtradb-cluster-operator/pkg/controller/pxc/config.go:27\ngithub.com/percona/percona-xtradb-cluster-operator/pkg/controller/pxc.(*ReconcilePerconaXtraDBCluster).updatePod\n\t/go/src/github.com/percona/percona-xtradb-cluster-operator/pkg/controller/pxc/upgrade.go:52\ngithub.com/percona/percona-xtradb-cluster-operator/pkg/controller/pxc.(*ReconcilePerconaXtraDBCluster).deploy.func1\n\t/go/src/github.com/percona/percona-xtradb-cluster-operator/pkg/controller/pxc/controller.go:577\ngithub.com/percona/percona-xtradb-cluster-operator/pkg/controller/pxc.(*ReconcilePerconaXtraDBCluster).deploy\n\t/go/src/github.com/percona/percona-xtradb-cluster-operator/pkg/controller/pxc/controller.go:595\ngithub.com/percona/percona-xtradb-cluster-operator/pkg/controller/pxc.(*ReconcilePerconaXtraDBCluster).Reconcile\n\t/go/src/github.com/percona/percona-xtradb-cluster-operator/pkg/controller/pxc/controller.go:379\nsigs.k8s.io/controller-runtime/pkg/internal/controller.(*Controller[...]).Reconcile\n\t/go/pkg/mod/sigs.k8s.io/controller-runtime@v0.22.1/pkg/internal/controller/controller.go:216\nsigs.k8s.io/controller-runtime/pkg/internal/controller.(*Controller[...]).reconcileHandler\n\t/go/pkg/mod/sigs.k8s.io/controller-runtime@v0.22.1/pkg/internal/controller/controller.go:461\nsigs.k8s.io/controller-runtime/pkg/internal/controller.(*Controller[...]).processNextWorkItem\n\t/go/pkg/mod/sigs.k8s.io/controller-runtime@v0.22.1/pkg/internal/controller/controller.go:421\nsigs.k8s.io/controller-runtime/pkg/internal/controller.(*Controller[...]).Start.func1.1\n\t/go/pkg/mod/sigs.k8s.io/controller-runtime@v0.22.1/pkg/internal/controller/controller.go:296\nruntime.goexit\n\t/usr/local/go/src/runtime/asm_amd64.s:1700\nreconcile config\ngithub.com/percona/percona-xtradb-cluster-operator/pkg/controller/pxc.(*ReconcilePerconaXtraDBCluster).updatePod\n\t/go/src/github.com/percona/percona-xtradb-cluster-operator/pkg/controller/pxc/upgrade.go:54\ngithub.com/percona/percona-xtradb-cluster-operator/pkg/controller/pxc.(*ReconcilePerconaXtraDBCluster).deploy.func1\n\t/go/src/github.com/percona/percona-xtradb-cluster-operator/pkg/controller/pxc/controller.go:577\ngithub.com/percona/percona-xtradb-cluster-operator/pkg/controller/pxc.(*ReconcilePerconaXtraDBCluster).deploy\n\t/go/src/github.com/percona/percona-xtradb-cluster-operator/pkg/controller/pxc/controller.go:595\ngithub.com/percona/percona-xtradb-cluster-operator/pkg/controller/pxc.(*ReconcilePerconaXtraDBCluster).Reconcile\n\t/go/src/github.com/percona/percona-xtradb-cluster-operator/pkg/controller/pxc/controller.go:379\nsigs.k8s.io/controller-runtime/pkg/internal/controller.(*Controller[...]).Reconcile\n\t/go/pkg/mod/sigs.k8s.io/controller-runtime@v0.22.1/pkg/internal/controller/controller.go:216\nsigs.k8s.io/controller-runtime/pkg/internal/controller.(*Controller[...]).reconcileHandler\n\t/go/pkg/mod/sigs.k8s.io/controller-runtime@v0.22.1/pkg/internal/controller/controller.go:461\nsigs.k8s.io/controller-runtime/pkg/internal/controller.(*Controller[...]).processNextWorkItem\n\t/go/pkg/mod/sigs.k8s.io/controller-runtime@v0.22.1/pkg/internal/controller/controller.go:421\nsigs.k8s.io/controller-runtime/pkg/internal/controller.(*Controller[...]).Start.func1.1\n\t/go/pkg/mod/sigs.k8s.io/controller-runtime@v0.22.1/pkg/internal/controller/controller.go:296\nruntime.goexit\n\t/usr/local/go/src/runtime/asm_amd64.s:1700\nupdatePod for proxysql\ngithub.com/percona/percona-xtradb-cluster-operator/pkg/controller/pxc.(*ReconcilePerconaXtraDBCluster).deploy.func1\n\t/go/src/github.com/percona/percona-xtradb-cluster-operator/pkg/controller/pxc/controller.go:578\ngithub.com/percona/percona-xtradb-cluster-operator/pkg/controller/pxc.(*ReconcilePerconaXtraDBCluster).deploy\n\t/go/src/github.com/percona/percona-xtradb-cluster-operator/pkg/controller/pxc/controller.go:595\ngithub.com/percona/percona-xtradb-cluster-operator/pkg/controller/pxc.(*ReconcilePerconaXtraDBCluster).Reconcile\n\t/go/src/github.com/percona/percona-xtradb-cluster-operator/pkg/controller/pxc/controller.go:379\nsigs.k8s.io/controller-runtime/pkg/internal/controller.(*Controller[...]).Reconcile\n\t/go/pkg/mod/sigs.k8s.io/controller-runtime@v0.22.1/pkg/internal/controller/controller.go:216\nsigs.k8s.io/controller-runtime/pkg/internal/controller.(*Controller[...]).reconcileHandler\n\t/go/pkg/mod/sigs.k8s.io/controller-runtime@v0.22.1/pkg/internal/controller/controller.go:461\nsigs.k8s.io/controller-runtime/pkg/internal/controller.(*Controller[...]).processNextWorkItem\n\t/go/pkg/mod/sigs.k8s.io/controller-runtime@v0.22.1/pkg/internal/controller/controller.go:421\nsigs.k8s.io/controller-runtime/pkg/internal/controller.(*Controller[...]).Start.func1.1\n\t/go/pkg/mod/sigs.k8s.io/controller-runtime@v0.22.1/pkg/internal/controller/controller.go:296\nruntime.goexit\n\t/usr/local/go/src/runtime/asm_amd64.s:1700\nfailed to deploy proxysql\ngithub.com/percona/percona-xtradb-cluster-operator/pkg/controller/pxc.(*ReconcilePerconaXtraDBCluster).deploy\n\t/go/src/github.com/percona/percona-xtradb-cluster-operator/pkg/controller/pxc/controller.go:596\ngithub.com/percona/percona-xtradb-cluster-operator/pkg/controller/pxc.(*ReconcilePerconaXtraDBCluster).Reconcile\n\t/go/src/github.com/percona/percona-xtradb-cluster-operator/pkg/controller/pxc/controller.go:379\nsigs.k8s.io/controller-runtime/pkg/internal/controller.(*Controller[...]).Reconcile\n\t/go/pkg/mod/sigs.k8s.io/controller-runtime@v0.22.1/pkg/internal/controller/controller.go:216\nsigs.k8s.io/controller-runtime/pkg/internal/controller.(*Controller[...]).reconcileHandler\n\t/go/pkg/mod/sigs.k8s.io/controller-runtime@v0.22.1/pkg/internal/controller/controller.go:461\nsigs.k8s.io/controller-runtime/pkg/internal/controller.(*Controller[...]).processNextWorkItem\n\t/go/pkg/mod/sigs.k8s.io/controller-runtime@v0.22.1/pkg/internal/controller/controller.go:421\nsigs.k8s.io/controller-runtime/pkg/internal/controller.(*Controller[...]).Start.func1.1\n\t/go/pkg/mod/sigs.k8s.io/controller-runtime@v0.22.1/pkg/internal/controller/controller.go:296\nruntime.goexit\n\t/usr/local/go/src/runtime/asm_amd64.s:1700"} 2025-10-02T03:16:47.098Z DEBUG Creating object {"controller": "pxc-controller", "namespace": "users-28439", "name": "some-name", "reconcileID": "7292dc4b-5f81-48d4-8b63-a527cd19681c", "object": "some-name-pxc", "kind": "&TypeMeta{Kind:StatefulSet,APIVersion:apps/v1,}"} 2025-10-02T03:16:47.139Z DEBUG Creating object {"controller": "pxc-controller", "namespace": "users-28439", "name": "some-name", "reconcileID": "7292dc4b-5f81-48d4-8b63-a527cd19681c", "object": "some-name-proxysql", "kind": "&TypeMeta{Kind:StatefulSet,APIVersion:apps/v1,}"} 2025-10-02T03:16:47.222Z DEBUG Creating object {"controller": "pxc-controller", "namespace": "users-28439", "name": "some-name", "reconcileID": "7292dc4b-5f81-48d4-8b63-a527cd19681c", "object": "some-name-pxc", "kind": "&TypeMeta{Kind:Service,APIVersion:v1,}"} 2025-10-02T03:16:47.261Z DEBUG Creating object {"controller": "pxc-controller", "namespace": "users-28439", "name": "some-name", "reconcileID": "7292dc4b-5f81-48d4-8b63-a527cd19681c", "object": "some-name-pxc-unready", "kind": "&TypeMeta{Kind:Service,APIVersion:v1,}"} 2025-10-02T03:16:47.336Z DEBUG Creating object {"controller": "pxc-controller", "namespace": "users-28439", "name": "some-name", "reconcileID": "7292dc4b-5f81-48d4-8b63-a527cd19681c", "object": "some-name-proxysql", "kind": "&TypeMeta{Kind:Service,APIVersion:v1,}"} 2025-10-02T03:16:47.447Z DEBUG Creating object {"controller": "pxc-controller", "namespace": "users-28439", "name": "some-name", "reconcileID": "7292dc4b-5f81-48d4-8b63-a527cd19681c", "object": "some-name-proxysql-unready", "kind": "&TypeMeta{Kind:Service,APIVersion:v1,}"} 2025-10-02T03:16:48.349Z DEBUG Creating object {"controller": "pxc-controller", "namespace": "users-28439", "name": "some-name", "reconcileID": "d967c5c0-b54e-42d2-bd09-3253c051a407", "object": "some-name-pxc", "kind": "&TypeMeta{Kind:PodDisruptionBudget,APIVersion:policy/v1,}"} 2025-10-02T03:16:48.384Z DEBUG Creating object {"controller": "pxc-controller", "namespace": "users-28439", "name": "some-name", "reconcileID": "d967c5c0-b54e-42d2-bd09-3253c051a407", "object": "some-name-proxysql", "kind": "&TypeMeta{Kind:PodDisruptionBudget,APIVersion:policy/v1,}"} 2025-10-02T03:18:05.362Z INFO Password expiration policy updated {"controller": "pxc-controller", "namespace": "users-28439", "name": "some-name", "reconcileID": "77b37f9c-7070-4e00-91dd-8da49739db5e", "user": "operator"} 2025-10-02T03:18:05.395Z INFO Password expiration policy updated {"controller": "pxc-controller", "namespace": "users-28439", "name": "some-name", "reconcileID": "77b37f9c-7070-4e00-91dd-8da49739db5e", "user": "monitor"} 2025-10-02T03:18:05.428Z INFO User monitor: granted privileges {"controller": "pxc-controller", "namespace": "users-28439", "name": "some-name", "reconcileID": "77b37f9c-7070-4e00-91dd-8da49739db5e"} 2025-10-02T03:18:05.466Z INFO Password expiration policy updated {"controller": "pxc-controller", "namespace": "users-28439", "name": "some-name", "reconcileID": "77b37f9c-7070-4e00-91dd-8da49739db5e", "user": "xtrabackup"} 2025-10-02T03:18:05.501Z INFO User xtrabackup: granted privileges {"controller": "pxc-controller", "namespace": "users-28439", "name": "some-name", "reconcileID": "77b37f9c-7070-4e00-91dd-8da49739db5e"} 2025-10-02T03:18:05.510Z INFO reconcile replication error {"controller": "pxc-controller", "namespace": "users-28439", "name": "some-name", "reconcileID": "77b37f9c-7070-4e00-91dd-8da49739db5e", "err": "get primary pxc pod: not found"} 2025-10-02T03:18:10.311Z INFO reconcile replication error {"controller": "pxc-controller", "namespace": "users-28439", "name": "some-name", "reconcileID": "e897005f-42ae-4344-b3c4-276315d549f1", "err": "get primary pxc pod: not found"} 2025-10-02T03:18:15.462Z INFO reconcile replication error {"controller": "pxc-controller", "namespace": "users-28439", "name": "some-name", "reconcileID": "fbb97271-b093-4bab-ad8f-829701f3ee2a", "err": "get primary pxc pod: not found"} 2025-10-02T03:18:20.622Z INFO reconcile replication error {"controller": "pxc-controller", "namespace": "users-28439", "name": "some-name", "reconcileID": "3afe96a4-9d18-42b8-895f-ef7f2bdf7735", "err": "get primary pxc pod: not found"} 2025-10-02T03:20:31.976Z INFO Password expiration policy updated {"controller": "pxc-controller", "namespace": "users-28439", "name": "some-name", "reconcileID": "714e2878-6a64-4190-858d-eb3aefd4456f", "user": "root"} 2025-10-02T03:20:32.017Z INFO Password expiration policy updated {"controller": "pxc-controller", "namespace": "users-28439", "name": "some-name", "reconcileID": "714e2878-6a64-4190-858d-eb3aefd4456f", "user": "replication"} 2025-10-02T03:20:32.074Z INFO update PXC version (fetched from db) {"controller": "pxc-controller", "namespace": "users-28439", "name": "some-name", "reconcileID": "714e2878-6a64-4190-858d-eb3aefd4456f", "new version": "5.7.44-48-57"} 2025-10-02T03:20:34.042Z DEBUG PXC users synced with ProxySQL {"controller": "pxc-controller", "namespace": "users-28439", "name": "some-name", "reconcileID": "714e2878-6a64-4190-858d-eb3aefd4456f"} 2025-10-02T03:20:38.503Z DEBUG PXC users synced with ProxySQL {"controller": "pxc-controller", "namespace": "users-28439", "name": "some-name", "reconcileID": "ff7d6516-03aa-4ba8-92c9-f069cbda6450"} 2025-10-02T03:20:43.633Z DEBUG PXC users synced with ProxySQL {"controller": "pxc-controller", "namespace": "users-28439", "name": "some-name", "reconcileID": "e376ed67-b68b-4f9f-87b1-97746305219f"} 2025-10-02T03:20:48.856Z DEBUG PXC users synced with ProxySQL {"controller": "pxc-controller", "namespace": "users-28439", "name": "some-name", "reconcileID": "a40d3129-5ca5-4574-839a-0f9cb150de72"} 2025-10-02T03:20:54.419Z DEBUG PXC users synced with ProxySQL {"controller": "pxc-controller", "namespace": "users-28439", "name": "some-name", "reconcileID": "45e44396-993c-4b30-a20b-f9b46b2fc9fc"} 2025-10-02T03:20:59.621Z DEBUG PXC users synced with ProxySQL {"controller": "pxc-controller", "namespace": "users-28439", "name": "some-name", "reconcileID": "23c38258-209b-49fe-b0a0-352ebd6426de"} 2025-10-02T03:21:04.839Z DEBUG PXC users synced with ProxySQL {"controller": "pxc-controller", "namespace": "users-28439", "name": "some-name", "reconcileID": "7f6397a1-73ff-4c3a-a4f5-9c4bc1f0dff0"} 2025-10-02T03:21:10.539Z DEBUG PXC users synced with ProxySQL {"controller": "pxc-controller", "namespace": "users-28439", "name": "some-name", "reconcileID": "0136c44e-7fa5-4f7c-bf3e-99f322524f42"} 2025-10-02T03:21:15.872Z DEBUG PXC users synced with ProxySQL {"controller": "pxc-controller", "namespace": "users-28439", "name": "some-name", "reconcileID": "4bfb0d0a-1fb5-4e55-b4b3-c3946f1a450f"} 2025-10-02T03:21:21.154Z DEBUG PXC users synced with ProxySQL {"controller": "pxc-controller", "namespace": "users-28439", "name": "some-name", "reconcileID": "eaf5120f-dcd2-43e6-8d6f-f2d5d8d5a2ce"} 2025-10-02T03:21:26.365Z DEBUG PXC users synced with ProxySQL {"controller": "pxc-controller", "namespace": "users-28439", "name": "some-name", "reconcileID": "229cf5a4-03dd-457e-916a-7748250c92b0"} 2025-10-02T03:21:31.763Z DEBUG PXC users synced with ProxySQL {"controller": "pxc-controller", "namespace": "users-28439", "name": "some-name", "reconcileID": "b7d2d34f-6cd1-4297-831c-fa2620acc6b7"} 2025-10-02T03:21:36.920Z DEBUG PXC users synced with ProxySQL {"controller": "pxc-controller", "namespace": "users-28439", "name": "some-name", "reconcileID": "2e1d2aae-d740-4e7e-804b-6922578fe2e7"} 2025-10-02T03:21:42.137Z DEBUG PXC users synced with ProxySQL {"controller": "pxc-controller", "namespace": "users-28439", "name": "some-name", "reconcileID": "f2788ee8-9a1c-42bb-8b96-ecfc03af133f"} 2025-10-02T03:21:47.230Z DEBUG PXC users synced with ProxySQL {"controller": "pxc-controller", "namespace": "users-28439", "name": "some-name", "reconcileID": "d5c8ee97-7ad3-4e68-b4f8-823ef618dbc5"} 2025-10-02T03:21:52.645Z DEBUG PXC users synced with ProxySQL {"controller": "pxc-controller", "namespace": "users-28439", "name": "some-name", "reconcileID": "4d222ec1-8959-4570-a313-2c62d0621688"} 2025-10-02T03:21:57.873Z DEBUG PXC users synced with ProxySQL {"controller": "pxc-controller", "namespace": "users-28439", "name": "some-name", "reconcileID": "9fb077b8-75af-451c-b54f-01274c150d48"} 2025-10-02T03:22:03.143Z DEBUG PXC users synced with ProxySQL {"controller": "pxc-controller", "namespace": "users-28439", "name": "some-name", "reconcileID": "4d9b4939-a3fc-41b3-89f4-ff99c39f66b1"} 2025-10-02T03:22:08.562Z DEBUG PXC users synced with ProxySQL {"controller": "pxc-controller", "namespace": "users-28439", "name": "some-name", "reconcileID": "69bccb5a-27eb-4ba1-8fa4-2e48c7105c2d"} 2025-10-02T03:22:12.475Z INFO Password changed, updating user {"controller": "pxc-controller", "namespace": "users-28439", "name": "some-name", "reconcileID": "eda8f11d-62e0-48a3-a357-cdd96b4fa3dd", "user": "root"} 2025-10-02T03:22:12.496Z INFO User password updated {"controller": "pxc-controller", "namespace": "users-28439", "name": "some-name", "reconcileID": "eda8f11d-62e0-48a3-a357-cdd96b4fa3dd", "user": "root"} 2025-10-02T03:22:12.512Z INFO MySQL init secret created {"controller": "pxc-controller", "namespace": "users-28439", "name": "some-name", "reconcileID": "eda8f11d-62e0-48a3-a357-cdd96b4fa3dd", "secret": "some-name-mysql-init", "user": "root"} 2025-10-02T03:22:15.252Z DEBUG PXC users synced with ProxySQL {"controller": "pxc-controller", "namespace": "users-28439", "name": "some-name", "reconcileID": "4790c40b-f56a-4fa5-8536-a9d2c9beefdf"} 2025-10-02T03:22:15.882Z DEBUG PXC users synced with ProxySQL {"controller": "pxc-controller", "namespace": "users-28439", "name": "some-name", "reconcileID": "eda8f11d-62e0-48a3-a357-cdd96b4fa3dd"} 2025-10-02T03:22:15.905Z INFO Internal secrets updated {"controller": "pxc-controller", "namespace": "users-28439", "name": "some-name", "reconcileID": "eda8f11d-62e0-48a3-a357-cdd96b4fa3dd", "user": "root"} 2025-10-02T03:22:17.434Z DEBUG PXC users synced with ProxySQL {"controller": "pxc-controller", "namespace": "users-28439", "name": "some-name", "reconcileID": "eda8f11d-62e0-48a3-a357-cdd96b4fa3dd"} 2025-10-02T03:22:24.022Z DEBUG PXC users synced with ProxySQL {"controller": "pxc-controller", "namespace": "users-28439", "name": "some-name", "reconcileID": "41935738-adcc-4c2b-ba50-5b7c8756a8ef"} 2025-10-02T03:22:29.454Z DEBUG PXC users synced with ProxySQL {"controller": "pxc-controller", "namespace": "users-28439", "name": "some-name", "reconcileID": "763cff64-f55a-41b1-9c2b-806246aff251"} 2025-10-02T03:22:32.283Z DEBUG Updating object {"controller": "pxc-controller", "namespace": "users-28439", "name": "some-name", "reconcileID": "a8f79a22-b770-4cdf-a985-50964ac80460", "object": "some-name-proxysql", "kind": "&TypeMeta{Kind:StatefulSet,APIVersion:apps/v1,}", "hashChanged": true, "metaChanged": true} 2025-10-02T03:22:32.335Z DEBUG Updating object {"controller": "pxc-controller", "namespace": "users-28439", "name": "some-name", "reconcileID": "a8f79a22-b770-4cdf-a985-50964ac80460", "object": "some-name-proxysql", "kind": "&TypeMeta{Kind:StatefulSet,APIVersion:apps/v1,}", "hashChanged": true, "metaChanged": true} 2025-10-02T03:22:35.027Z ERROR sync users {"controller": "pxc-controller", "namespace": "users-28439", "name": "some-name", "reconcileID": "a8f79a22-b770-4cdf-a985-50964ac80460", "error": "exec syncusers: failed to execute command in pod: pod some-name-proxysql-2 does not have a host assigned / / ", "errorVerbose": "exec syncusers: failed to execute command in pod: pod some-name-proxysql-2 does not have a host assigned / / \ngithub.com/percona/percona-xtradb-cluster-operator/pkg/controller/pxc.(*ReconcilePerconaXtraDBCluster).syncPXCUsersWithProxySQL\n\t/go/src/github.com/percona/percona-xtradb-cluster-operator/pkg/controller/pxc/users.go:966\ngithub.com/percona/percona-xtradb-cluster-operator/pkg/controller/pxc.(*ReconcilePerconaXtraDBCluster).resyncPXCUsersWithProxySQL.func1\n\t/go/src/github.com/percona/percona-xtradb-cluster-operator/pkg/controller/pxc/controller.go:867\nruntime.goexit\n\t/usr/local/go/src/runtime/asm_amd64.s:1700"} 2025-10-02T03:22:59.167Z INFO Password changed, updating user {"controller": "pxc-controller", "namespace": "users-28439", "name": "some-name", "reconcileID": "2d4f3606-3345-4f43-aa84-b294f9170213", "user": "proxyadmin"} 2025-10-02T03:22:59.167Z INFO Password expiration policy updated {"controller": "pxc-controller", "namespace": "users-28439", "name": "some-name", "reconcileID": "2d4f3606-3345-4f43-aa84-b294f9170213", "user": "proxyadmin"} 2025-10-02T03:22:59.212Z INFO Proxy user updated {"controller": "pxc-controller", "namespace": "users-28439", "name": "some-name", "reconcileID": "2d4f3606-3345-4f43-aa84-b294f9170213", "user": "proxyadmin"} 2025-10-02T03:22:59.233Z INFO Internal secrets updated {"controller": "pxc-controller", "namespace": "users-28439", "name": "some-name", "reconcileID": "2d4f3606-3345-4f43-aa84-b294f9170213", "user": "proxyadmin"} 2025-10-02T03:22:59.233Z INFO Proxy pods will be restarted {"controller": "pxc-controller", "namespace": "users-28439", "name": "some-name", "reconcileID": "2d4f3606-3345-4f43-aa84-b294f9170213", "last-applied-secret": "dd79cd08d55481e66b3df9f2b1060fc5062722e9eef6f83f40c4772f8c9a945a"} 2025-10-02T03:22:59.237Z DEBUG Updating object {"controller": "pxc-controller", "namespace": "users-28439", "name": "some-name", "reconcileID": "2d4f3606-3345-4f43-aa84-b294f9170213", "object": "some-name-proxysql", "kind": "&TypeMeta{Kind:StatefulSet,APIVersion:apps/v1,}", "hashChanged": true, "metaChanged": true} 2025-10-02T03:22:59.479Z ERROR sync users {"controller": "pxc-controller", "namespace": "users-28439", "name": "some-name", "reconcileID": "0bec0145-c6ad-4523-802d-1ca9c263df10", "error": "exec syncusers: failed to execute command in pod: Internal error occurred: error executing command in container: failed to exec in container: failed to load task: no running task found: task a15e987e1b294c7bff731af72610c58897910557b27867844e9648c7dea0b616 not found: not found / / ERROR (line:515) : The cluster (with writer hostgroup:11) has not been configured in ProxySQL\nERROR (line:515) : The cluster (with writer hostgroup:11) has not been configured in ProxySQL\nERROR (line:515) : The cluster (with writer hostgroup:11) has not been configured in ProxySQL\nERROR (line:515) : The cluster (with writer hostgroup:11) has not been configured in ProxySQL\n", "errorVerbose": "exec syncusers: failed to execute command in pod: Internal error occurred: error executing command in container: failed to exec in container: failed to load task: no running task found: task a15e987e1b294c7bff731af72610c58897910557b27867844e9648c7dea0b616 not found: not found / / ERROR (line:515) : The cluster (with writer hostgroup:11) has not been configured in ProxySQL\nERROR (line:515) : The cluster (with writer hostgroup:11) has not been configured in ProxySQL\nERROR (line:515) : The cluster (with writer hostgroup:11) has not been configured in ProxySQL\nERROR (line:515) : The cluster (with writer hostgroup:11) has not been configured in ProxySQL\n\ngithub.com/percona/percona-xtradb-cluster-operator/pkg/controller/pxc.(*ReconcilePerconaXtraDBCluster).syncPXCUsersWithProxySQL\n\t/go/src/github.com/percona/percona-xtradb-cluster-operator/pkg/controller/pxc/users.go:966\ngithub.com/percona/percona-xtradb-cluster-operator/pkg/controller/pxc.(*ReconcilePerconaXtraDBCluster).resyncPXCUsersWithProxySQL.func1\n\t/go/src/github.com/percona/percona-xtradb-cluster-operator/pkg/controller/pxc/controller.go:867\nruntime.goexit\n\t/usr/local/go/src/runtime/asm_amd64.s:1700"} 2025-10-02T03:23:47.775Z DEBUG PXC users synced with ProxySQL {"controller": "pxc-controller", "namespace": "users-28439", "name": "some-name", "reconcileID": "6fe5127a-1334-461f-b7b5-73f8047720a3"} 2025-10-02T03:23:53.070Z DEBUG PXC users synced with ProxySQL {"controller": "pxc-controller", "namespace": "users-28439", "name": "some-name", "reconcileID": "9bc5cd1e-6f88-4f52-b305-248658e9ea90"} 2025-10-02T03:23:53.750Z DEBUG Updating object {"controller": "pxc-controller", "namespace": "users-28439", "name": "some-name", "reconcileID": "bfec3f4c-b5ec-403f-946c-e6a77300c6af", "object": "some-name-proxysql", "kind": "&TypeMeta{Kind:StatefulSet,APIVersion:apps/v1,}", "hashChanged": true, "metaChanged": true} 2025-10-02T03:23:53.819Z DEBUG Updating object {"controller": "pxc-controller", "namespace": "users-28439", "name": "some-name", "reconcileID": "bfec3f4c-b5ec-403f-946c-e6a77300c6af", "object": "some-name-proxysql", "kind": "&TypeMeta{Kind:StatefulSet,APIVersion:apps/v1,}", "hashChanged": true, "metaChanged": true} 2025-10-02T03:23:55.387Z DEBUG PXC users synced with ProxySQL {"controller": "pxc-controller", "namespace": "users-28439", "name": "some-name", "reconcileID": "bfec3f4c-b5ec-403f-946c-e6a77300c6af"} 2025-10-02T03:24:00.624Z INFO Password changed, updating user {"controller": "pxc-controller", "namespace": "users-28439", "name": "some-name", "reconcileID": "b8c24985-0773-48cd-84e9-294ede7d464d", "user": "xtrabackup"} 2025-10-02T03:24:00.637Z INFO User password updated {"controller": "pxc-controller", "namespace": "users-28439", "name": "some-name", "reconcileID": "b8c24985-0773-48cd-84e9-294ede7d464d", "user": "xtrabackup"} 2025-10-02T03:24:00.658Z INFO MySQL init secret updated {"controller": "pxc-controller", "namespace": "users-28439", "name": "some-name", "reconcileID": "b8c24985-0773-48cd-84e9-294ede7d464d", "secret": "some-name-mysql-init", "user": "xtrabackup"} 2025-10-02T03:24:00.673Z INFO Internal secrets updated {"controller": "pxc-controller", "namespace": "users-28439", "name": "some-name", "reconcileID": "b8c24985-0773-48cd-84e9-294ede7d464d", "user": "xtrabackup"} 2025-10-02T03:24:00.673Z INFO PXC pods will be restarted {"controller": "pxc-controller", "namespace": "users-28439", "name": "some-name", "reconcileID": "b8c24985-0773-48cd-84e9-294ede7d464d", "last-applied-secret": "c93c64c276d4323c2ab792ca37c8cc4435032a4f0003313ec9b8419436a4ac0e"} 2025-10-02T03:24:00.676Z DEBUG Updating object {"controller": "pxc-controller", "namespace": "users-28439", "name": "some-name", "reconcileID": "b8c24985-0773-48cd-84e9-294ede7d464d", "object": "some-name-pxc", "kind": "&TypeMeta{Kind:StatefulSet,APIVersion:apps/v1,}", "hashChanged": true, "metaChanged": true} 2025-10-02T03:24:03.496Z DEBUG PXC users synced with ProxySQL {"controller": "pxc-controller", "namespace": "users-28439", "name": "some-name", "reconcileID": "b8c24985-0773-48cd-84e9-294ede7d464d"} 2025-10-02T03:25:34.869Z INFO reconcile replication error {"controller": "pxc-controller", "namespace": "users-28439", "name": "some-name", "reconcileID": "99c249d1-c448-449a-bcd7-56c791dd8a5c", "err": "failed to connect to pod some-name-pxc-0: dial tcp: lookup some-name-pxc-0.some-name-pxc.users-28439 on 34.118.224.10:53: no such host"} 2025-10-02T03:25:39.763Z INFO reconcile replication error {"controller": "pxc-controller", "namespace": "users-28439", "name": "some-name", "reconcileID": "432868b9-7ab1-4481-9374-3949d21cfc6c", "err": "failed to connect to pod some-name-pxc-0: dial tcp: lookup some-name-pxc-0.some-name-pxc.users-28439 on 34.118.224.10:53: no such host"} 2025-10-02T03:25:45.456Z INFO reconcile replication error {"controller": "pxc-controller", "namespace": "users-28439", "name": "some-name", "reconcileID": "ecea3c05-23ad-4af5-acac-f7818b3bd7a1", "err": "failed to connect to pod some-name-pxc-0: dial tcp: lookup some-name-pxc-0.some-name-pxc.users-28439 on 34.118.224.10:53: no such host"} 2025-10-02T03:25:50.625Z INFO Unable to find primary pod for replication. No pod with name or ip like this {"controller": "pxc-controller", "namespace": "users-28439", "name": "some-name", "reconcileID": "a392d4b6-f783-46b7-bf5d-aac602408980", "primary name": "some-name-pxc-0.some-name-pxc.users-28439.svc.cluster.local"} 2025-10-02T03:25:55.777Z INFO Unable to find primary pod for replication. No pod with name or ip like this {"controller": "pxc-controller", "namespace": "users-28439", "name": "some-name", "reconcileID": "e2ffffbb-aea6-4cb3-b77f-dbf9cd32d471", "primary name": "some-name-pxc-0.some-name-pxc.users-28439.svc.cluster.local"} 2025-10-02T03:26:00.948Z INFO Unable to find primary pod for replication. No pod with name or ip like this {"controller": "pxc-controller", "namespace": "users-28439", "name": "some-name", "reconcileID": "571bfbf9-d364-48bb-b061-85cb0efe98a1", "primary name": "some-name-pxc-0.some-name-pxc.users-28439.svc.cluster.local"} 2025-10-02T03:26:06.119Z INFO Unable to find primary pod for replication. No pod with name or ip like this {"controller": "pxc-controller", "namespace": "users-28439", "name": "some-name", "reconcileID": "ba356f12-9f8a-4ffb-9cc3-652e4a313a2a", "primary name": "some-name-pxc-0.some-name-pxc.users-28439.svc.cluster.local"} 2025-10-02T03:26:11.258Z INFO Unable to find primary pod for replication. No pod with name or ip like this {"controller": "pxc-controller", "namespace": "users-28439", "name": "some-name", "reconcileID": "d0c7ac58-359e-4868-b7de-aec9e839c096", "primary name": "some-name-pxc-0.some-name-pxc.users-28439.svc.cluster.local"} 2025-10-02T03:26:16.392Z INFO Unable to find primary pod for replication. No pod with name or ip like this {"controller": "pxc-controller", "namespace": "users-28439", "name": "some-name", "reconcileID": "42f8006d-02f0-4a0c-83c5-5e29c3a66845", "primary name": "some-name-pxc-0.some-name-pxc.users-28439.svc.cluster.local"} 2025-10-02T03:26:21.530Z INFO Unable to find primary pod for replication. No pod with name or ip like this {"controller": "pxc-controller", "namespace": "users-28439", "name": "some-name", "reconcileID": "e5d3cd02-8ca9-4190-9f77-6a701f168730", "primary name": "some-name-pxc-0.some-name-pxc.users-28439.svc.cluster.local"} 2025-10-02T03:26:29.205Z DEBUG PXC users synced with ProxySQL {"controller": "pxc-controller", "namespace": "users-28439", "name": "some-name", "reconcileID": "aa6c5c55-4ce1-4a05-a950-fa09b11f4beb"} 2025-10-02T03:26:32.093Z INFO Password changed, updating user {"controller": "pxc-controller", "namespace": "users-28439", "name": "some-name", "reconcileID": "79ae806d-7739-4258-aac2-6d3fc881bd1a", "user": "monitor"} 2025-10-02T03:26:32.107Z INFO User password updated {"controller": "pxc-controller", "namespace": "users-28439", "name": "some-name", "reconcileID": "79ae806d-7739-4258-aac2-6d3fc881bd1a", "user": "monitor"} 2025-10-02T03:26:32.129Z INFO MySQL init secret updated {"controller": "pxc-controller", "namespace": "users-28439", "name": "some-name", "reconcileID": "79ae806d-7739-4258-aac2-6d3fc881bd1a", "secret": "some-name-mysql-init", "user": "monitor"} 2025-10-02T03:26:32.162Z INFO Proxy user updated {"controller": "pxc-controller", "namespace": "users-28439", "name": "some-name", "reconcileID": "79ae806d-7739-4258-aac2-6d3fc881bd1a", "user": "monitor"} 2025-10-02T03:26:32.186Z INFO Internal secrets updated {"controller": "pxc-controller", "namespace": "users-28439", "name": "some-name", "reconcileID": "79ae806d-7739-4258-aac2-6d3fc881bd1a", "user": "monitor"} 2025-10-02T03:26:32.186Z INFO Proxy pods will be restarted {"controller": "pxc-controller", "namespace": "users-28439", "name": "some-name", "reconcileID": "79ae806d-7739-4258-aac2-6d3fc881bd1a", "last-applied-secret": "b3c2cd80a6cc9954421754bd6de3b751772abee71881fdbdf0560f43c810e500"} 2025-10-02T03:26:32.190Z DEBUG Updating object {"controller": "pxc-controller", "namespace": "users-28439", "name": "some-name", "reconcileID": "79ae806d-7739-4258-aac2-6d3fc881bd1a", "object": "some-name-proxysql", "kind": "&TypeMeta{Kind:StatefulSet,APIVersion:apps/v1,}", "hashChanged": true, "metaChanged": true} 2025-10-02T03:26:34.463Z ERROR sync users {"controller": "pxc-controller", "namespace": "users-28439", "name": "some-name", "reconcileID": "79ae806d-7739-4258-aac2-6d3fc881bd1a", "error": "exec syncusers: failed to execute command in pod: unable to upgrade connection: container not found (\"proxysql\") / / ", "errorVerbose": "exec syncusers: failed to execute command in pod: unable to upgrade connection: container not found (\"proxysql\") / / \ngithub.com/percona/percona-xtradb-cluster-operator/pkg/controller/pxc.(*ReconcilePerconaXtraDBCluster).syncPXCUsersWithProxySQL\n\t/go/src/github.com/percona/percona-xtradb-cluster-operator/pkg/controller/pxc/users.go:966\ngithub.com/percona/percona-xtradb-cluster-operator/pkg/controller/pxc.(*ReconcilePerconaXtraDBCluster).resyncPXCUsersWithProxySQL.func1\n\t/go/src/github.com/percona/percona-xtradb-cluster-operator/pkg/controller/pxc/controller.go:867\nruntime.goexit\n\t/usr/local/go/src/runtime/asm_amd64.s:1700"} 2025-10-02T03:27:31.100Z DEBUG PXC users synced with ProxySQL {"controller": "pxc-controller", "namespace": "users-28439", "name": "some-name", "reconcileID": "b433ff6e-4c1c-4bc8-be4d-507b9bc2c31c"} 2025-10-02T03:27:36.268Z DEBUG PXC users synced with ProxySQL {"controller": "pxc-controller", "namespace": "users-28439", "name": "some-name", "reconcileID": "e28a8222-6135-43dd-97f2-ad719e9e93cb"} 2025-10-02T03:27:41.277Z DEBUG PXC users synced with ProxySQL {"controller": "pxc-controller", "namespace": "users-28439", "name": "some-name", "reconcileID": "3cfb0c62-866f-418c-9c8e-d15ed6017a0b"} 2025-10-02T03:27:46.718Z DEBUG PXC users synced with ProxySQL {"controller": "pxc-controller", "namespace": "users-28439", "name": "some-name", "reconcileID": "4ec12194-8a12-4713-a46c-d081a19c4ed4"} 2025-10-02T03:27:47.020Z INFO Password changed, updating user {"controller": "pxc-controller", "namespace": "users-28439", "name": "some-name", "reconcileID": "12573152-4928-4cf3-8a97-fef9c3ace672", "user": "operator"} 2025-10-02T03:27:47.044Z INFO User password updated {"controller": "pxc-controller", "namespace": "users-28439", "name": "some-name", "reconcileID": "12573152-4928-4cf3-8a97-fef9c3ace672", "user": "operator"} 2025-10-02T03:27:47.062Z INFO MySQL init secret updated {"controller": "pxc-controller", "namespace": "users-28439", "name": "some-name", "reconcileID": "12573152-4928-4cf3-8a97-fef9c3ace672", "secret": "some-name-mysql-init", "user": "operator"} 2025-10-02T03:27:47.080Z INFO Internal secrets updated {"controller": "pxc-controller", "namespace": "users-28439", "name": "some-name", "reconcileID": "12573152-4928-4cf3-8a97-fef9c3ace672", "user": "operator"} 2025-10-02T03:27:47.080Z INFO Proxy pods will be restarted {"controller": "pxc-controller", "namespace": "users-28439", "name": "some-name", "reconcileID": "12573152-4928-4cf3-8a97-fef9c3ace672", "last-applied-secret": "52a49964e357907654c2ad0095880b1de796df3cac739a92b64aa2fa033577cc"} 2025-10-02T03:27:47.083Z DEBUG Updating object {"controller": "pxc-controller", "namespace": "users-28439", "name": "some-name", "reconcileID": "12573152-4928-4cf3-8a97-fef9c3ace672", "object": "some-name-proxysql", "kind": "&TypeMeta{Kind:StatefulSet,APIVersion:apps/v1,}", "hashChanged": true, "metaChanged": true} 2025-10-02T03:27:50.665Z ERROR sync users {"controller": "pxc-controller", "namespace": "users-28439", "name": "some-name", "reconcileID": "12573152-4928-4cf3-8a97-fef9c3ace672", "error": "exec syncusers: failed to execute command in pod: command terminated with exit code 1 / / ERROR 1045 (28000): Access denied for user 'operator'@'some-name-proxysql-0.some-name-proxysql-unready.users-28439.svc.' (using password: YES)\nERROR 1045 (28000): Access denied for user 'operator'@'some-name-proxysql-0.some-name-proxysql-unready.users-28439.svc.' (using password: YES)\nERROR 1045 (28000): Access denied for user 'operator'@'some-name-proxysql-0.some-name-proxysql-unready.users-28439.svc.' (using password: YES)\nERROR (line:572) : Could not find a primary cluster node\nERROR 1045 (28000): Access denied for user 'operator'@'some-name-proxysql-0.some-name-proxysql-unready.users-28439.svc.' (using password: YES)\nERROR 1045 (28000): Access denied for user 'operator'@'some-name-proxysql-0.some-name-proxysql-unready.users-28439.svc.' (using password: YES)\nERROR 1045 (28000): Access denied for user 'operator'@'some-name-proxysql-0.some-name-proxysql-unready.users-28439.svc.' (using password: YES)\nERROR (line:572) : Could not find a primary cluster node\nERROR 1045 (28000): Access denied for user 'operator'@'some-name-proxysql-0.some-name-proxysql-unready.users-28439.svc.' (using password: YES)\nERROR 1045 (28000): Access denied for user 'operator'@'some-name-proxysql-0.some-name-proxysql-unready.users-28439.svc.' (using password: YES)\nERROR 1045 (28000): Access denied for user 'operator'@'some-name-proxysql-0.some-name-proxysql-unready.users-28439.svc.' (using password: YES)\nERROR (line:572) : Could not find a primary cluster node\nERROR 1045 (28000): Access denied for user 'operator'@'some-name-proxysql-0.some-name-proxysql-unready.users-28439.svc.' (using password: YES)\nERROR 1045 (28000): Access denied for user 'operator'@'some-name-proxysql-0.some-name-proxysql-unready.users-28439.svc.' (using password: YES)\nERROR 1045 (28000): Access denied for user 'operator'@'some-name-proxysql-0.some-name-proxysql-unready.users-28439.svc.' (using password: YES)\nERROR (line:572) : Could not find a primary cluster node\nERROR 1045 (28000): Access denied for user 'operator'@'some-name-proxysql-0.some-name-proxysql-unready.users-28439.svc.' (using password: YES)\nERROR 1045 (28000): Access denied for user 'operator'@'some-name-proxysql-0.some-name-proxysql-unready.users-28439.svc.' (using password: YES)\nERROR 1045 (28000): Access denied for user 'operator'@'some-name-proxysql-0.some-name-proxysql-unready.users-28439.svc.' (using password: YES)\nERROR (line:572) : Could not find a primary cluster node\n", "errorVerbose": "exec syncusers: failed to execute command in pod: command terminated with exit code 1 / / ERROR 1045 (28000): Access denied for user 'operator'@'some-name-proxysql-0.some-name-proxysql-unready.users-28439.svc.' (using password: YES)\nERROR 1045 (28000): Access denied for user 'operator'@'some-name-proxysql-0.some-name-proxysql-unready.users-28439.svc.' (using password: YES)\nERROR 1045 (28000): Access denied for user 'operator'@'some-name-proxysql-0.some-name-proxysql-unready.users-28439.svc.' (using password: YES)\nERROR (line:572) : Could not find a primary cluster node\nERROR 1045 (28000): Access denied for user 'operator'@'some-name-proxysql-0.some-name-proxysql-unready.users-28439.svc.' (using password: YES)\nERROR 1045 (28000): Access denied for user 'operator'@'some-name-proxysql-0.some-name-proxysql-unready.users-28439.svc.' (using password: YES)\nERROR 1045 (28000): Access denied for user 'operator'@'some-name-proxysql-0.some-name-proxysql-unready.users-28439.svc.' (using password: YES)\nERROR (line:572) : Could not find a primary cluster node\nERROR 1045 (28000): Access denied for user 'operator'@'some-name-proxysql-0.some-name-proxysql-unready.users-28439.svc.' (using password: YES)\nERROR 1045 (28000): Access denied for user 'operator'@'some-name-proxysql-0.some-name-proxysql-unready.users-28439.svc.' (using password: YES)\nERROR 1045 (28000): Access denied for user 'operator'@'some-name-proxysql-0.some-name-proxysql-unready.users-28439.svc.' (using password: YES)\nERROR (line:572) : Could not find a primary cluster node\nERROR 1045 (28000): Access denied for user 'operator'@'some-name-proxysql-0.some-name-proxysql-unready.users-28439.svc.' (using password: YES)\nERROR 1045 (28000): Access denied for user 'operator'@'some-name-proxysql-0.some-name-proxysql-unready.users-28439.svc.' (using password: YES)\nERROR 1045 (28000): Access denied for user 'operator'@'some-name-proxysql-0.some-name-proxysql-unready.users-28439.svc.' (using password: YES)\nERROR (line:572) : Could not find a primary cluster node\nERROR 1045 (28000): Access denied for user 'operator'@'some-name-proxysql-0.some-name-proxysql-unready.users-28439.svc.' (using password: YES)\nERROR 1045 (28000): Access denied for user 'operator'@'some-name-proxysql-0.some-name-proxysql-unready.users-28439.svc.' (using password: YES)\nERROR 1045 (28000): Access denied for user 'operator'@'some-name-proxysql-0.some-name-proxysql-unready.users-28439.svc.' (using password: YES)\nERROR (line:572) : Could not find a primary cluster node\n\ngithub.com/percona/percona-xtradb-cluster-operator/pkg/controller/pxc.(*ReconcilePerconaXtraDBCluster).syncPXCUsersWithProxySQL\n\t/go/src/github.com/percona/percona-xtradb-cluster-operator/pkg/controller/pxc/users.go:966\ngithub.com/percona/percona-xtradb-cluster-operator/pkg/controller/pxc.(*ReconcilePerconaXtraDBCluster).resyncPXCUsersWithProxySQL.func1\n\t/go/src/github.com/percona/percona-xtradb-cluster-operator/pkg/controller/pxc/controller.go:867\nruntime.goexit\n\t/usr/local/go/src/runtime/asm_amd64.s:1700"} 2025-10-02T03:28:22.587Z DEBUG PXC users synced with ProxySQL {"controller": "pxc-controller", "namespace": "users-28439", "name": "some-name", "reconcileID": "c2256d29-e3ac-4813-bfce-7d674009be12"} 2025-10-02T03:28:27.034Z DEBUG PXC users synced with ProxySQL {"controller": "pxc-controller", "namespace": "users-28439", "name": "some-name", "reconcileID": "0a38833a-e22b-41ed-a816-230a42a70c63"} 2025-10-02T03:28:32.221Z DEBUG PXC users synced with ProxySQL {"controller": "pxc-controller", "namespace": "users-28439", "name": "some-name", "reconcileID": "2502e36d-0eff-49a7-9449-d5b47c9f8491"} 2025-10-02T03:28:37.485Z INFO Created user secrets {"controller": "pxc-controller", "namespace": "users-28439", "name": "some-name", "reconcileID": "636eada7-7132-48f6-b31e-77a86c2c4960", "secrets": "my-cluster-secrets-2"} 2025-10-02T03:28:37.485Z INFO Password changed, updating user {"controller": "pxc-controller", "namespace": "users-28439", "name": "some-name", "reconcileID": "636eada7-7132-48f6-b31e-77a86c2c4960", "user": "root"} 2025-10-02T03:28:37.505Z INFO User password updated {"controller": "pxc-controller", "namespace": "users-28439", "name": "some-name", "reconcileID": "636eada7-7132-48f6-b31e-77a86c2c4960", "user": "root"} 2025-10-02T03:28:37.523Z INFO MySQL init secret updated {"controller": "pxc-controller", "namespace": "users-28439", "name": "some-name", "reconcileID": "636eada7-7132-48f6-b31e-77a86c2c4960", "secret": "some-name-mysql-init", "user": "root"} 2025-10-02T03:28:37.534Z DEBUG PXC users synced with ProxySQL {"controller": "pxc-controller", "namespace": "users-28439", "name": "some-name", "reconcileID": "7b1cdfff-03e6-4659-990d-20a101f8f635"} 2025-10-02T03:28:39.855Z DEBUG PXC users synced with ProxySQL {"controller": "pxc-controller", "namespace": "users-28439", "name": "some-name", "reconcileID": "636eada7-7132-48f6-b31e-77a86c2c4960"} 2025-10-02T03:28:39.879Z INFO Internal secrets updated {"controller": "pxc-controller", "namespace": "users-28439", "name": "some-name", "reconcileID": "636eada7-7132-48f6-b31e-77a86c2c4960", "user": "root"} 2025-10-02T03:28:39.879Z INFO Password changed, updating user {"controller": "pxc-controller", "namespace": "users-28439", "name": "some-name", "reconcileID": "636eada7-7132-48f6-b31e-77a86c2c4960", "user": "operator"} 2025-10-02T03:28:39.895Z INFO User password updated {"controller": "pxc-controller", "namespace": "users-28439", "name": "some-name", "reconcileID": "636eada7-7132-48f6-b31e-77a86c2c4960", "user": "operator"} 2025-10-02T03:28:39.914Z INFO MySQL init secret updated {"controller": "pxc-controller", "namespace": "users-28439", "name": "some-name", "reconcileID": "636eada7-7132-48f6-b31e-77a86c2c4960", "secret": "some-name-mysql-init", "user": "operator"} 2025-10-02T03:28:39.932Z INFO Internal secrets updated {"controller": "pxc-controller", "namespace": "users-28439", "name": "some-name", "reconcileID": "636eada7-7132-48f6-b31e-77a86c2c4960", "user": "operator"} 2025-10-02T03:28:39.932Z INFO Password changed, updating user {"controller": "pxc-controller", "namespace": "users-28439", "name": "some-name", "reconcileID": "636eada7-7132-48f6-b31e-77a86c2c4960", "user": "monitor"} 2025-10-02T03:28:39.948Z INFO User password updated {"controller": "pxc-controller", "namespace": "users-28439", "name": "some-name", "reconcileID": "636eada7-7132-48f6-b31e-77a86c2c4960", "user": "monitor"} 2025-10-02T03:28:39.965Z INFO MySQL init secret updated {"controller": "pxc-controller", "namespace": "users-28439", "name": "some-name", "reconcileID": "636eada7-7132-48f6-b31e-77a86c2c4960", "secret": "some-name-mysql-init", "user": "monitor"} 2025-10-02T03:28:39.997Z INFO Proxy user updated {"controller": "pxc-controller", "namespace": "users-28439", "name": "some-name", "reconcileID": "636eada7-7132-48f6-b31e-77a86c2c4960", "user": "monitor"} 2025-10-02T03:28:40.016Z INFO Internal secrets updated {"controller": "pxc-controller", "namespace": "users-28439", "name": "some-name", "reconcileID": "636eada7-7132-48f6-b31e-77a86c2c4960", "user": "monitor"} 2025-10-02T03:28:40.016Z INFO Password changed, updating user {"controller": "pxc-controller", "namespace": "users-28439", "name": "some-name", "reconcileID": "636eada7-7132-48f6-b31e-77a86c2c4960", "user": "xtrabackup"} 2025-10-02T03:28:40.032Z INFO User password updated {"controller": "pxc-controller", "namespace": "users-28439", "name": "some-name", "reconcileID": "636eada7-7132-48f6-b31e-77a86c2c4960", "user": "xtrabackup"} 2025-10-02T03:28:40.049Z INFO MySQL init secret updated {"controller": "pxc-controller", "namespace": "users-28439", "name": "some-name", "reconcileID": "636eada7-7132-48f6-b31e-77a86c2c4960", "secret": "some-name-mysql-init", "user": "xtrabackup"} 2025-10-02T03:28:40.067Z INFO Internal secrets updated {"controller": "pxc-controller", "namespace": "users-28439", "name": "some-name", "reconcileID": "636eada7-7132-48f6-b31e-77a86c2c4960", "user": "xtrabackup"} 2025-10-02T03:28:40.067Z INFO Password changed, updating user {"controller": "pxc-controller", "namespace": "users-28439", "name": "some-name", "reconcileID": "636eada7-7132-48f6-b31e-77a86c2c4960", "user": "replication"} 2025-10-02T03:28:40.094Z INFO User password updated {"controller": "pxc-controller", "namespace": "users-28439", "name": "some-name", "reconcileID": "636eada7-7132-48f6-b31e-77a86c2c4960", "user": "replication"} 2025-10-02T03:28:40.111Z INFO MySQL init secret updated {"controller": "pxc-controller", "namespace": "users-28439", "name": "some-name", "reconcileID": "636eada7-7132-48f6-b31e-77a86c2c4960", "secret": "some-name-mysql-init", "user": "replication"} 2025-10-02T03:28:40.138Z INFO Internal secrets updated {"controller": "pxc-controller", "namespace": "users-28439", "name": "some-name", "reconcileID": "636eada7-7132-48f6-b31e-77a86c2c4960", "user": "replication"} 2025-10-02T03:28:40.138Z INFO Password changed, updating user {"controller": "pxc-controller", "namespace": "users-28439", "name": "some-name", "reconcileID": "636eada7-7132-48f6-b31e-77a86c2c4960", "user": "proxyadmin"} 2025-10-02T03:28:40.173Z INFO Proxy user updated {"controller": "pxc-controller", "namespace": "users-28439", "name": "some-name", "reconcileID": "636eada7-7132-48f6-b31e-77a86c2c4960", "user": "proxyadmin"} 2025-10-02T03:28:40.191Z INFO Internal secrets updated {"controller": "pxc-controller", "namespace": "users-28439", "name": "some-name", "reconcileID": "636eada7-7132-48f6-b31e-77a86c2c4960", "user": "proxyadmin"} 2025-10-02T03:28:40.191Z INFO Proxy pods will be restarted {"controller": "pxc-controller", "namespace": "users-28439", "name": "some-name", "reconcileID": "636eada7-7132-48f6-b31e-77a86c2c4960", "last-applied-secret": "74e9355bccd365ca4c4dd3dcf6f20f2ca6e48db7d64658aef1dd92c8181d13d8"} 2025-10-02T03:28:40.191Z INFO PXC pods will be restarted {"controller": "pxc-controller", "namespace": "users-28439", "name": "some-name", "reconcileID": "636eada7-7132-48f6-b31e-77a86c2c4960", "last-applied-secret": "74e9355bccd365ca4c4dd3dcf6f20f2ca6e48db7d64658aef1dd92c8181d13d8"} 2025-10-02T03:28:40.195Z DEBUG Updating object {"controller": "pxc-controller", "namespace": "users-28439", "name": "some-name", "reconcileID": "636eada7-7132-48f6-b31e-77a86c2c4960", "object": "some-name-pxc", "kind": "&TypeMeta{Kind:StatefulSet,APIVersion:apps/v1,}", "hashChanged": true, "metaChanged": true} 2025-10-02T03:28:40.310Z DEBUG Updating object {"controller": "pxc-controller", "namespace": "users-28439", "name": "some-name", "reconcileID": "636eada7-7132-48f6-b31e-77a86c2c4960", "object": "some-name-proxysql", "kind": "&TypeMeta{Kind:StatefulSet,APIVersion:apps/v1,}", "hashChanged": true, "metaChanged": true} 2025-10-02T03:28:42.089Z ERROR sync users {"controller": "pxc-controller", "namespace": "users-28439", "name": "some-name", "reconcileID": "636eada7-7132-48f6-b31e-77a86c2c4960", "error": "exec syncusers: failed to execute command in pod: command terminated with exit code 1 / / ERROR 1045 (28000): ProxySQL Error: Access denied for user 'proxyadmin'@'127.0.0.1' (using password: YES)\nERROR (line:339) : ProxySQL connection check failed. \n-- Could not connect to ProxySQL at localhost:6032 \n-- Please check the ProxySQL connection parameters and status.\nERROR 1045 (28000): ProxySQL Error: Access denied for user 'proxyadmin'@'127.0.0.1' (using password: YES)\nERROR (line:339) : ProxySQL connection check failed. \n-- Could not connect to ProxySQL at localhost:6032 \n-- Please check the ProxySQL connection parameters and status.\nERROR 1045 (28000): ProxySQL Error: Access denied for user 'proxyadmin'@'127.0.0.1' (using password: YES)\nERROR (line:339) : ProxySQL connection check failed. \n-- Could not connect to ProxySQL at localhost:6032 \n-- Please check the ProxySQL connection parameters and status.\nERROR 1045 (28000): ProxySQL Error: Access denied for user 'proxyadmin'@'127.0.0.1' (using password: YES)\nERROR (line:339) : ProxySQL connection check failed. \n-- Could not connect to ProxySQL at localhost:6032 \n-- Please check the ProxySQL connection parameters and status.\nERROR 1045 (28000): ProxySQL Error: Access denied for user 'proxyadmin'@'127.0.0.1' (using password: YES)\nERROR (line:339) : ProxySQL connection check failed. \n-- Could not connect to ProxySQL at localhost:6032 \n-- Please check the ProxySQL connection parameters and status.\n", "errorVerbose": "exec syncusers: failed to execute command in pod: command terminated with exit code 1 / / ERROR 1045 (28000): ProxySQL Error: Access denied for user 'proxyadmin'@'127.0.0.1' (using password: YES)\nERROR (line:339) : ProxySQL connection check failed. \n-- Could not connect to ProxySQL at localhost:6032 \n-- Please check the ProxySQL connection parameters and status.\nERROR 1045 (28000): ProxySQL Error: Access denied for user 'proxyadmin'@'127.0.0.1' (using password: YES)\nERROR (line:339) : ProxySQL connection check failed. \n-- Could not connect to ProxySQL at localhost:6032 \n-- Please check the ProxySQL connection parameters and status.\nERROR 1045 (28000): ProxySQL Error: Access denied for user 'proxyadmin'@'127.0.0.1' (using password: YES)\nERROR (line:339) : ProxySQL connection check failed. \n-- Could not connect to ProxySQL at localhost:6032 \n-- Please check the ProxySQL connection parameters and status.\nERROR 1045 (28000): ProxySQL Error: Access denied for user 'proxyadmin'@'127.0.0.1' (using password: YES)\nERROR (line:339) : ProxySQL connection check failed. \n-- Could not connect to ProxySQL at localhost:6032 \n-- Please check the ProxySQL connection parameters and status.\nERROR 1045 (28000): ProxySQL Error: Access denied for user 'proxyadmin'@'127.0.0.1' (using password: YES)\nERROR (line:339) : ProxySQL connection check failed. \n-- Could not connect to ProxySQL at localhost:6032 \n-- Please check the ProxySQL connection parameters and status.\n\ngithub.com/percona/percona-xtradb-cluster-operator/pkg/controller/pxc.(*ReconcilePerconaXtraDBCluster).syncPXCUsersWithProxySQL\n\t/go/src/github.com/percona/percona-xtradb-cluster-operator/pkg/controller/pxc/users.go:966\ngithub.com/percona/percona-xtradb-cluster-operator/pkg/controller/pxc.(*ReconcilePerconaXtraDBCluster).resyncPXCUsersWithProxySQL.func1\n\t/go/src/github.com/percona/percona-xtradb-cluster-operator/pkg/controller/pxc/controller.go:867\nruntime.goexit\n\t/usr/local/go/src/runtime/asm_amd64.s:1700"} 2025-10-02T03:30:25.402Z INFO reconcile replication error {"controller": "pxc-controller", "namespace": "users-28439", "name": "some-name", "reconcileID": "85c0ae66-fcb0-4372-836a-3c5d4a73a2d9", "err": "failed to connect to pod some-name-pxc-0: dial tcp 10.245.144.70:33062: connect: connection refused"} 2025-10-02T03:30:30.573Z INFO Unable to find primary pod for replication. No pod with name or ip like this {"controller": "pxc-controller", "namespace": "users-28439", "name": "some-name", "reconcileID": "62488521-1204-4703-a3cd-cabd5d3abd76", "primary name": "some-name-pxc-0.some-name-pxc.users-28439.svc.cluster.local"} 2025-10-02T03:30:35.737Z INFO Unable to find primary pod for replication. No pod with name or ip like this {"controller": "pxc-controller", "namespace": "users-28439", "name": "some-name", "reconcileID": "3427e7a8-d241-4867-8d0f-7556d202f714", "primary name": "some-name-pxc-0.some-name-pxc.users-28439.svc.cluster.local"} 2025-10-02T03:30:40.961Z INFO Unable to find primary pod for replication. No pod with name or ip like this {"controller": "pxc-controller", "namespace": "users-28439", "name": "some-name", "reconcileID": "0db21f28-31ee-43d0-925c-0fb0433c73f3", "primary name": "some-name-pxc-0.some-name-pxc.users-28439.svc.cluster.local"} 2025-10-02T03:30:46.104Z INFO Unable to find primary pod for replication. No pod with name or ip like this {"controller": "pxc-controller", "namespace": "users-28439", "name": "some-name", "reconcileID": "d2b863e4-45a3-45a0-b624-34d7db1a9369", "primary name": "some-name-pxc-0.some-name-pxc.users-28439.svc.cluster.local"} 2025-10-02T03:30:51.241Z INFO Unable to find primary pod for replication. No pod with name or ip like this {"controller": "pxc-controller", "namespace": "users-28439", "name": "some-name", "reconcileID": "5d05879a-9e4e-45d5-b189-78667b10f5c7", "primary name": "some-name-pxc-0.some-name-pxc.users-28439.svc.cluster.local"} 2025-10-02T03:30:56.413Z INFO Unable to find primary pod for replication. No pod with name or ip like this {"controller": "pxc-controller", "namespace": "users-28439", "name": "some-name", "reconcileID": "6aacb558-60c5-4d07-b85b-7b4ee9d23678", "primary name": "some-name-pxc-0.some-name-pxc.users-28439.svc.cluster.local"} 2025-10-02T03:31:01.573Z INFO Unable to find primary pod for replication. No pod with name or ip like this {"controller": "pxc-controller", "namespace": "users-28439", "name": "some-name", "reconcileID": "76cba69a-8966-45e9-8f82-46418b6e1f27", "primary name": "some-name-pxc-0.some-name-pxc.users-28439.svc.cluster.local"} 2025-10-02T03:31:09.470Z DEBUG PXC users synced with ProxySQL {"controller": "pxc-controller", "namespace": "users-28439", "name": "some-name", "reconcileID": "44d5dbd8-0825-4c7e-8602-8771f4166906"} 2025-10-02T03:31:14.381Z DEBUG PXC users synced with ProxySQL {"controller": "pxc-controller", "namespace": "users-28439", "name": "some-name", "reconcileID": "e44c3d25-74ff-48e6-8b10-18d4029cf6da"} 2025-10-02T03:31:15.106Z INFO Password changed, updating user {"controller": "pxc-controller", "namespace": "users-28439", "name": "some-name", "reconcileID": "8a60027d-b6ae-4341-877c-6e79bf9d370b", "user": "operator"} 2025-10-02T03:31:15.123Z INFO User password updated {"controller": "pxc-controller", "namespace": "users-28439", "name": "some-name", "reconcileID": "8a60027d-b6ae-4341-877c-6e79bf9d370b", "user": "operator"} 2025-10-02T03:31:15.138Z INFO MySQL init secret updated {"controller": "pxc-controller", "namespace": "users-28439", "name": "some-name", "reconcileID": "8a60027d-b6ae-4341-877c-6e79bf9d370b", "secret": "some-name-mysql-init", "user": "operator"} 2025-10-02T03:31:15.153Z INFO Internal secrets updated {"controller": "pxc-controller", "namespace": "users-28439", "name": "some-name", "reconcileID": "8a60027d-b6ae-4341-877c-6e79bf9d370b", "user": "operator"} 2025-10-02T03:31:15.153Z INFO Proxy pods will be restarted {"controller": "pxc-controller", "namespace": "users-28439", "name": "some-name", "reconcileID": "8a60027d-b6ae-4341-877c-6e79bf9d370b", "last-applied-secret": "fc3bec314a15001cda6175b0bcd68024caed8c2994a816376b93215ceac1853b"} 2025-10-02T03:31:15.158Z DEBUG Updating object {"controller": "pxc-controller", "namespace": "users-28439", "name": "some-name", "reconcileID": "8a60027d-b6ae-4341-877c-6e79bf9d370b", "object": "some-name-proxysql", "kind": "&TypeMeta{Kind:StatefulSet,APIVersion:apps/v1,}", "hashChanged": true, "metaChanged": true} 2025-10-02T03:31:18.407Z ERROR sync users {"controller": "pxc-controller", "namespace": "users-28439", "name": "some-name", "reconcileID": "8a60027d-b6ae-4341-877c-6e79bf9d370b", "error": "exec syncusers: failed to execute command in pod: command terminated with exit code 1 / / ERROR 1045 (28000): Access denied for user 'operator'@'some-name-proxysql-0.some-name-proxysql-unready.users-28439.svc.' (using password: YES)\nERROR 1045 (28000): Access denied for user 'operator'@'some-name-proxysql-0.some-name-proxysql-unready.users-28439.svc.' (using password: YES)\nERROR (line:572) : Could not find a primary cluster node\nERROR 1045 (28000): Access denied for user 'operator'@'some-name-proxysql-0.some-name-proxysql-unready.users-28439.svc.' (using password: YES)\nERROR 1045 (28000): Access denied for user 'operator'@'some-name-proxysql-0.some-name-proxysql-unready.users-28439.svc.' (using password: YES)\nERROR (line:572) : Could not find a primary cluster node\nERROR 1045 (28000): Access denied for user 'operator'@'some-name-proxysql-0.some-name-proxysql-unready.users-28439.svc.' (using password: YES)\nERROR 1045 (28000): Access denied for user 'operator'@'some-name-proxysql-0.some-name-proxysql-unready.users-28439.svc.' (using password: YES)\nERROR (line:572) : Could not find a primary cluster node\nERROR 1045 (28000): Access denied for user 'operator'@'some-name-proxysql-0.some-name-proxysql-unready.users-28439.svc.' (using password: YES)\nERROR 1045 (28000): Access denied for user 'operator'@'some-name-proxysql-0.some-name-proxysql-unready.users-28439.svc.' (using password: YES)\nERROR (line:572) : Could not find a primary cluster node\nERROR 1045 (28000): Access denied for user 'operator'@'some-name-proxysql-0.some-name-proxysql-unready.users-28439.svc.' (using password: YES)\nERROR 1045 (28000): Access denied for user 'operator'@'some-name-proxysql-0.some-name-proxysql-unready.users-28439.svc.' (using password: YES)\nERROR (line:572) : Could not find a primary cluster node\n", "errorVerbose": "exec syncusers: failed to execute command in pod: command terminated with exit code 1 / / ERROR 1045 (28000): Access denied for user 'operator'@'some-name-proxysql-0.some-name-proxysql-unready.users-28439.svc.' (using password: YES)\nERROR 1045 (28000): Access denied for user 'operator'@'some-name-proxysql-0.some-name-proxysql-unready.users-28439.svc.' (using password: YES)\nERROR (line:572) : Could not find a primary cluster node\nERROR 1045 (28000): Access denied for user 'operator'@'some-name-proxysql-0.some-name-proxysql-unready.users-28439.svc.' (using password: YES)\nERROR 1045 (28000): Access denied for user 'operator'@'some-name-proxysql-0.some-name-proxysql-unready.users-28439.svc.' (using password: YES)\nERROR (line:572) : Could not find a primary cluster node\nERROR 1045 (28000): Access denied for user 'operator'@'some-name-proxysql-0.some-name-proxysql-unready.users-28439.svc.' (using password: YES)\nERROR 1045 (28000): Access denied for user 'operator'@'some-name-proxysql-0.some-name-proxysql-unready.users-28439.svc.' (using password: YES)\nERROR (line:572) : Could not find a primary cluster node\nERROR 1045 (28000): Access denied for user 'operator'@'some-name-proxysql-0.some-name-proxysql-unready.users-28439.svc.' (using password: YES)\nERROR 1045 (28000): Access denied for user 'operator'@'some-name-proxysql-0.some-name-proxysql-unready.users-28439.svc.' (using password: YES)\nERROR (line:572) : Could not find a primary cluster node\nERROR 1045 (28000): Access denied for user 'operator'@'some-name-proxysql-0.some-name-proxysql-unready.users-28439.svc.' (using password: YES)\nERROR 1045 (28000): Access denied for user 'operator'@'some-name-proxysql-0.some-name-proxysql-unready.users-28439.svc.' (using password: YES)\nERROR (line:572) : Could not find a primary cluster node\n\ngithub.com/percona/percona-xtradb-cluster-operator/pkg/controller/pxc.(*ReconcilePerconaXtraDBCluster).syncPXCUsersWithProxySQL\n\t/go/src/github.com/percona/percona-xtradb-cluster-operator/pkg/controller/pxc/users.go:966\ngithub.com/percona/percona-xtradb-cluster-operator/pkg/controller/pxc.(*ReconcilePerconaXtraDBCluster).resyncPXCUsersWithProxySQL.func1\n\t/go/src/github.com/percona/percona-xtradb-cluster-operator/pkg/controller/pxc/controller.go:867\nruntime.goexit\n\t/usr/local/go/src/runtime/asm_amd64.s:1700"} 2025-10-02T03:32:04.730Z DEBUG PXC users synced with ProxySQL {"controller": "pxc-controller", "namespace": "users-28439", "name": "some-name", "reconcileID": "16c03fd7-6637-457b-9654-4c818543bf0b"} 2025-10-02T03:32:09.460Z DEBUG PXC users synced with ProxySQL {"controller": "pxc-controller", "namespace": "users-28439", "name": "some-name", "reconcileID": "ecff0fc5-6da6-4255-8dc1-c81fe05e82d7"} 2025-10-02T03:32:14.090Z DEBUG PXC users synced with ProxySQL {"controller": "pxc-controller", "namespace": "users-28439", "name": "some-name", "reconcileID": "7e93e657-1f8e-43cf-8c7b-e1ad8331345b"} 2025-10-02T03:32:19.362Z DEBUG PXC users synced with ProxySQL {"controller": "pxc-controller", "namespace": "users-28439", "name": "some-name", "reconcileID": "2b674bc9-3b85-4bc3-bc0b-4b79304aad17"} 2025-10-02T03:32:24.963Z DEBUG PXC users synced with ProxySQL {"controller": "pxc-controller", "namespace": "users-28439", "name": "some-name", "reconcileID": "3a9bbd16-fbd2-423d-ab96-27cd4a2428a8"} 2025-10-02T03:32:30.292Z DEBUG PXC users synced with ProxySQL {"controller": "pxc-controller", "namespace": "users-28439", "name": "some-name", "reconcileID": "27c4168b-7dd5-4f08-a8a1-8d7e7c7d4794"} 2025-10-02T03:32:35.907Z DEBUG PXC users synced with ProxySQL {"controller": "pxc-controller", "namespace": "users-28439", "name": "some-name", "reconcileID": "71bd5b2f-f62b-454c-a9ad-628a1a1decaa"} 2025-10-02T03:32:40.999Z DEBUG PXC users synced with ProxySQL {"controller": "pxc-controller", "namespace": "users-28439", "name": "some-name", "reconcileID": "79fa9abe-fda7-4203-abfd-0fcad5fddd60"} 2025-10-02T03:32:46.107Z DEBUG PXC users synced with ProxySQL {"controller": "pxc-controller", "namespace": "users-28439", "name": "some-name", "reconcileID": "8963b0a8-e6a9-4dbc-84a4-bcdb362cb4e5"} 2025-10-02T03:32:51.377Z DEBUG PXC users synced with ProxySQL {"controller": "pxc-controller", "namespace": "users-28439", "name": "some-name", "reconcileID": "080c7a83-d33a-443b-9fcb-2545b600eab7"} 2025-10-02T03:32:56.369Z DEBUG PXC users synced with ProxySQL {"controller": "pxc-controller", "namespace": "users-28439", "name": "some-name", "reconcileID": "e1771db5-f400-43c3-9903-2d991f9789bd"} 2025-10-02T03:33:01.595Z DEBUG PXC users synced with ProxySQL {"controller": "pxc-controller", "namespace": "users-28439", "name": "some-name", "reconcileID": "f13a8edd-f7ea-4a92-8710-dff04aed8ef5"} 2025-10-02T03:33:06.890Z DEBUG PXC users synced with ProxySQL {"controller": "pxc-controller", "namespace": "users-28439", "name": "some-name", "reconcileID": "b85ec490-d445-4046-af94-8caca2936d27"} 2025-10-02T03:33:12.308Z DEBUG PXC users synced with ProxySQL {"controller": "pxc-controller", "namespace": "users-28439", "name": "some-name", "reconcileID": "de4c146d-f4cd-4e7a-a01b-f675794f1ef6"} 2025-10-02T03:33:17.467Z DEBUG PXC users synced with ProxySQL {"controller": "pxc-controller", "namespace": "users-28439", "name": "some-name", "reconcileID": "269bafa1-4672-4be8-8271-faaa18e4eb46"} 2025-10-02T03:33:19.895Z INFO Password changed, updating user {"controller": "pxc-controller", "namespace": "users-28439", "name": "some-name", "reconcileID": "c902edbb-ce59-41d8-a62a-168abfdbd63d", "user": "root"} 2025-10-02T03:33:19.916Z INFO User password updated {"controller": "pxc-controller", "namespace": "users-28439", "name": "some-name", "reconcileID": "c902edbb-ce59-41d8-a62a-168abfdbd63d", "user": "root"} 2025-10-02T03:33:19.934Z INFO MySQL init secret updated {"controller": "pxc-controller", "namespace": "users-28439", "name": "some-name", "reconcileID": "c902edbb-ce59-41d8-a62a-168abfdbd63d", "secret": "some-name-mysql-init", "user": "root"} 2025-10-02T03:33:22.382Z DEBUG PXC users synced with ProxySQL {"controller": "pxc-controller", "namespace": "users-28439", "name": "some-name", "reconcileID": "c902edbb-ce59-41d8-a62a-168abfdbd63d"} 2025-10-02T03:33:22.402Z INFO Internal secrets updated {"controller": "pxc-controller", "namespace": "users-28439", "name": "some-name", "reconcileID": "c902edbb-ce59-41d8-a62a-168abfdbd63d", "user": "root"} 2025-10-02T03:33:22.402Z INFO Password changed, updating user {"controller": "pxc-controller", "namespace": "users-28439", "name": "some-name", "reconcileID": "c902edbb-ce59-41d8-a62a-168abfdbd63d", "user": "monitor"} 2025-10-02T03:33:22.416Z INFO User password updated {"controller": "pxc-controller", "namespace": "users-28439", "name": "some-name", "reconcileID": "c902edbb-ce59-41d8-a62a-168abfdbd63d", "user": "monitor"} 2025-10-02T03:33:22.434Z INFO MySQL init secret updated {"controller": "pxc-controller", "namespace": "users-28439", "name": "some-name", "reconcileID": "c902edbb-ce59-41d8-a62a-168abfdbd63d", "secret": "some-name-mysql-init", "user": "monitor"} 2025-10-02T03:33:22.466Z INFO Proxy user updated {"controller": "pxc-controller", "namespace": "users-28439", "name": "some-name", "reconcileID": "c902edbb-ce59-41d8-a62a-168abfdbd63d", "user": "monitor"} 2025-10-02T03:33:22.486Z INFO Internal secrets updated {"controller": "pxc-controller", "namespace": "users-28439", "name": "some-name", "reconcileID": "c902edbb-ce59-41d8-a62a-168abfdbd63d", "user": "monitor"} 2025-10-02T03:33:22.486Z INFO Password changed, updating user {"controller": "pxc-controller", "namespace": "users-28439", "name": "some-name", "reconcileID": "c902edbb-ce59-41d8-a62a-168abfdbd63d", "user": "xtrabackup"} 2025-10-02T03:33:22.512Z INFO User password updated {"controller": "pxc-controller", "namespace": "users-28439", "name": "some-name", "reconcileID": "c902edbb-ce59-41d8-a62a-168abfdbd63d", "user": "xtrabackup"} 2025-10-02T03:33:22.528Z INFO MySQL init secret updated {"controller": "pxc-controller", "namespace": "users-28439", "name": "some-name", "reconcileID": "c902edbb-ce59-41d8-a62a-168abfdbd63d", "secret": "some-name-mysql-init", "user": "xtrabackup"} 2025-10-02T03:33:22.546Z INFO Internal secrets updated {"controller": "pxc-controller", "namespace": "users-28439", "name": "some-name", "reconcileID": "c902edbb-ce59-41d8-a62a-168abfdbd63d", "user": "xtrabackup"} 2025-10-02T03:33:22.546Z INFO Password changed, updating user {"controller": "pxc-controller", "namespace": "users-28439", "name": "some-name", "reconcileID": "c902edbb-ce59-41d8-a62a-168abfdbd63d", "user": "proxyadmin"} 2025-10-02T03:33:22.579Z INFO Proxy user updated {"controller": "pxc-controller", "namespace": "users-28439", "name": "some-name", "reconcileID": "c902edbb-ce59-41d8-a62a-168abfdbd63d", "user": "proxyadmin"} 2025-10-02T03:33:22.604Z INFO Internal secrets updated {"controller": "pxc-controller", "namespace": "users-28439", "name": "some-name", "reconcileID": "c902edbb-ce59-41d8-a62a-168abfdbd63d", "user": "proxyadmin"} 2025-10-02T03:33:22.604Z INFO Proxy pods will be restarted {"controller": "pxc-controller", "namespace": "users-28439", "name": "some-name", "reconcileID": "c902edbb-ce59-41d8-a62a-168abfdbd63d", "last-applied-secret": "8b78fb704eec99bcbefffc78aec63950b60ddf5ea272efd638c71cb7fbd0cc9e"} 2025-10-02T03:33:22.604Z INFO PXC pods will be restarted {"controller": "pxc-controller", "namespace": "users-28439", "name": "some-name", "reconcileID": "c902edbb-ce59-41d8-a62a-168abfdbd63d", "last-applied-secret": "8b78fb704eec99bcbefffc78aec63950b60ddf5ea272efd638c71cb7fbd0cc9e"} 2025-10-02T03:33:22.607Z DEBUG Updating object {"controller": "pxc-controller", "namespace": "users-28439", "name": "some-name", "reconcileID": "c902edbb-ce59-41d8-a62a-168abfdbd63d", "object": "some-name-pxc", "kind": "&TypeMeta{Kind:StatefulSet,APIVersion:apps/v1,}", "hashChanged": true, "metaChanged": true} 2025-10-02T03:33:22.681Z DEBUG Updating object {"controller": "pxc-controller", "namespace": "users-28439", "name": "some-name", "reconcileID": "c902edbb-ce59-41d8-a62a-168abfdbd63d", "object": "some-name-proxysql", "kind": "&TypeMeta{Kind:StatefulSet,APIVersion:apps/v1,}", "hashChanged": true, "metaChanged": true} 2025-10-02T03:33:25.061Z ERROR sync users {"controller": "pxc-controller", "namespace": "users-28439", "name": "some-name", "reconcileID": "c902edbb-ce59-41d8-a62a-168abfdbd63d", "error": "exec syncusers: failed to execute command in pod: command terminated with exit code 1 / / ERROR 1045 (28000): ProxySQL Error: Access denied for user 'proxyadmin'@'127.0.0.1' (using password: YES)\nERROR (line:339) : ProxySQL connection check failed. \n-- Could not connect to ProxySQL at localhost:6032 \n-- Please check the ProxySQL connection parameters and status.\nERROR 1045 (28000): ProxySQL Error: Access denied for user 'proxyadmin'@'127.0.0.1' (using password: YES)\nERROR (line:339) : ProxySQL connection check failed. \n-- Could not connect to ProxySQL at localhost:6032 \n-- Please check the ProxySQL connection parameters and status.\nERROR 1045 (28000): ProxySQL Error: Access denied for user 'proxyadmin'@'127.0.0.1' (using password: YES)\nERROR (line:339) : ProxySQL connection check failed. \n-- Could not connect to ProxySQL at localhost:6032 \n-- Please check the ProxySQL connection parameters and status.\nERROR 1045 (28000): ProxySQL Error: Access denied for user 'proxyadmin'@'127.0.0.1' (using password: YES)\nERROR (line:339) : ProxySQL connection check failed. \n-- Could not connect to ProxySQL at localhost:6032 \n-- Please check the ProxySQL connection parameters and status.\nERROR 1045 (28000): ProxySQL Error: Access denied for user 'proxyadmin'@'127.0.0.1' (using password: YES)\nERROR (line:339) : ProxySQL connection check failed. \n-- Could not connect to ProxySQL at localhost:6032 \n-- Please check the ProxySQL connection parameters and status.\n", "errorVerbose": "exec syncusers: failed to execute command in pod: command terminated with exit code 1 / / ERROR 1045 (28000): ProxySQL Error: Access denied for user 'proxyadmin'@'127.0.0.1' (using password: YES)\nERROR (line:339) : ProxySQL connection check failed. \n-- Could not connect to ProxySQL at localhost:6032 \n-- Please check the ProxySQL connection parameters and status.\nERROR 1045 (28000): ProxySQL Error: Access denied for user 'proxyadmin'@'127.0.0.1' (using password: YES)\nERROR (line:339) : ProxySQL connection check failed. \n-- Could not connect to ProxySQL at localhost:6032 \n-- Please check the ProxySQL connection parameters and status.\nERROR 1045 (28000): ProxySQL Error: Access denied for user 'proxyadmin'@'127.0.0.1' (using password: YES)\nERROR (line:339) : ProxySQL connection check failed. \n-- Could not connect to ProxySQL at localhost:6032 \n-- Please check the ProxySQL connection parameters and status.\nERROR 1045 (28000): ProxySQL Error: Access denied for user 'proxyadmin'@'127.0.0.1' (using password: YES)\nERROR (line:339) : ProxySQL connection check failed. \n-- Could not connect to ProxySQL at localhost:6032 \n-- Please check the ProxySQL connection parameters and status.\nERROR 1045 (28000): ProxySQL Error: Access denied for user 'proxyadmin'@'127.0.0.1' (using password: YES)\nERROR (line:339) : ProxySQL connection check failed. \n-- Could not connect to ProxySQL at localhost:6032 \n-- Please check the ProxySQL connection parameters and status.\n\ngithub.com/percona/percona-xtradb-cluster-operator/pkg/controller/pxc.(*ReconcilePerconaXtraDBCluster).syncPXCUsersWithProxySQL\n\t/go/src/github.com/percona/percona-xtradb-cluster-operator/pkg/controller/pxc/users.go:966\ngithub.com/percona/percona-xtradb-cluster-operator/pkg/controller/pxc.(*ReconcilePerconaXtraDBCluster).resyncPXCUsersWithProxySQL.func1\n\t/go/src/github.com/percona/percona-xtradb-cluster-operator/pkg/controller/pxc/controller.go:867\nruntime.goexit\n\t/usr/local/go/src/runtime/asm_amd64.s:1700"} 2025-10-02T03:35:17.992Z INFO Unable to find primary pod for replication. No pod with name or ip like this {"controller": "pxc-controller", "namespace": "users-28439", "name": "some-name", "reconcileID": "b76c3ed3-4f04-45ba-a1c6-4875baa7b0ba", "primary name": "some-name-pxc-0.some-name-pxc.users-28439.svc.cluster.local"} 2025-10-02T03:35:23.246Z INFO Unable to find primary pod for replication. No pod with name or ip like this {"controller": "pxc-controller", "namespace": "users-28439", "name": "some-name", "reconcileID": "eb5d6099-2c90-42a7-9813-7d80cc6a4ed1", "primary name": "some-name-pxc-0.some-name-pxc.users-28439.svc.cluster.local"} 2025-10-02T03:35:28.504Z INFO Unable to find primary pod for replication. No pod with name or ip like this {"controller": "pxc-controller", "namespace": "users-28439", "name": "some-name", "reconcileID": "857bb3a8-2444-4388-b0da-ee74b6c41e1b", "primary name": "some-name-pxc-0.some-name-pxc.users-28439.svc.cluster.local"} 2025-10-02T03:35:33.658Z INFO Unable to find primary pod for replication. No pod with name or ip like this {"controller": "pxc-controller", "namespace": "users-28439", "name": "some-name", "reconcileID": "b55cdd34-592b-4840-a1c9-c2054bf9cd89", "primary name": "some-name-pxc-0.some-name-pxc.users-28439.svc.cluster.local"} 2025-10-02T03:35:38.815Z INFO Unable to find primary pod for replication. No pod with name or ip like this {"controller": "pxc-controller", "namespace": "users-28439", "name": "some-name", "reconcileID": "50797c9c-8872-483f-9808-d9040e8be2d0", "primary name": "some-name-pxc-0.some-name-pxc.users-28439.svc.cluster.local"} 2025-10-02T03:35:43.959Z INFO Unable to find primary pod for replication. No pod with name or ip like this {"controller": "pxc-controller", "namespace": "users-28439", "name": "some-name", "reconcileID": "c860dea7-3c65-4895-9ef2-fe0139a03bc8", "primary name": "some-name-pxc-0.some-name-pxc.users-28439.svc.cluster.local"} 2025-10-02T03:35:49.108Z INFO Unable to find primary pod for replication. No pod with name or ip like this {"controller": "pxc-controller", "namespace": "users-28439", "name": "some-name", "reconcileID": "4e7e96a9-ea38-4a9e-9bc5-9119b33f941f", "primary name": "some-name-pxc-0.some-name-pxc.users-28439.svc.cluster.local"} 2025-10-02T03:35:56.800Z DEBUG PXC users synced with ProxySQL {"controller": "pxc-controller", "namespace": "users-28439", "name": "some-name", "reconcileID": "a0093086-c5dc-4b76-bcd6-5ba825d1f4d8"} 2025-10-02T03:35:58.708Z INFO Password changed, updating user {"controller": "pxc-controller", "namespace": "users-28439", "name": "some-name", "reconcileID": "43d6ec90-cb1f-40fd-8b91-7c176f0b22aa", "user": "root"} 2025-10-02T03:35:58.729Z INFO User password updated {"controller": "pxc-controller", "namespace": "users-28439", "name": "some-name", "reconcileID": "43d6ec90-cb1f-40fd-8b91-7c176f0b22aa", "user": "root"} 2025-10-02T03:35:58.746Z INFO MySQL init secret updated {"controller": "pxc-controller", "namespace": "users-28439", "name": "some-name", "reconcileID": "43d6ec90-cb1f-40fd-8b91-7c176f0b22aa", "secret": "some-name-mysql-init", "user": "root"} 2025-10-02T03:35:58.763Z INFO Internal secrets updated {"controller": "pxc-controller", "namespace": "users-28439", "name": "some-name", "reconcileID": "43d6ec90-cb1f-40fd-8b91-7c176f0b22aa", "user": "root"} 2025-10-02T03:35:58.763Z INFO Password changed, updating user {"controller": "pxc-controller", "namespace": "users-28439", "name": "some-name", "reconcileID": "43d6ec90-cb1f-40fd-8b91-7c176f0b22aa", "user": "operator"} 2025-10-02T03:35:58.777Z INFO User password updated {"controller": "pxc-controller", "namespace": "users-28439", "name": "some-name", "reconcileID": "43d6ec90-cb1f-40fd-8b91-7c176f0b22aa", "user": "operator"} 2025-10-02T03:35:58.792Z INFO MySQL init secret updated {"controller": "pxc-controller", "namespace": "users-28439", "name": "some-name", "reconcileID": "43d6ec90-cb1f-40fd-8b91-7c176f0b22aa", "secret": "some-name-mysql-init", "user": "operator"} 2025-10-02T03:35:58.831Z INFO Internal secrets updated {"controller": "pxc-controller", "namespace": "users-28439", "name": "some-name", "reconcileID": "43d6ec90-cb1f-40fd-8b91-7c176f0b22aa", "user": "operator"} 2025-10-02T03:35:58.831Z INFO Password changed, updating user {"controller": "pxc-controller", "namespace": "users-28439", "name": "some-name", "reconcileID": "43d6ec90-cb1f-40fd-8b91-7c176f0b22aa", "user": "monitor"} 2025-10-02T03:35:58.848Z INFO User password updated {"controller": "pxc-controller", "namespace": "users-28439", "name": "some-name", "reconcileID": "43d6ec90-cb1f-40fd-8b91-7c176f0b22aa", "user": "monitor"} 2025-10-02T03:35:58.864Z INFO MySQL init secret updated {"controller": "pxc-controller", "namespace": "users-28439", "name": "some-name", "reconcileID": "43d6ec90-cb1f-40fd-8b91-7c176f0b22aa", "secret": "some-name-mysql-init", "user": "monitor"} 2025-10-02T03:35:58.880Z INFO Internal secrets updated {"controller": "pxc-controller", "namespace": "users-28439", "name": "some-name", "reconcileID": "43d6ec90-cb1f-40fd-8b91-7c176f0b22aa", "user": "monitor"} 2025-10-02T03:35:58.880Z INFO Password changed, updating user {"controller": "pxc-controller", "namespace": "users-28439", "name": "some-name", "reconcileID": "43d6ec90-cb1f-40fd-8b91-7c176f0b22aa", "user": "xtrabackup"} 2025-10-02T03:35:58.903Z INFO User password updated {"controller": "pxc-controller", "namespace": "users-28439", "name": "some-name", "reconcileID": "43d6ec90-cb1f-40fd-8b91-7c176f0b22aa", "user": "xtrabackup"} 2025-10-02T03:35:58.922Z INFO MySQL init secret updated {"controller": "pxc-controller", "namespace": "users-28439", "name": "some-name", "reconcileID": "43d6ec90-cb1f-40fd-8b91-7c176f0b22aa", "secret": "some-name-mysql-init", "user": "xtrabackup"} 2025-10-02T03:35:58.938Z INFO Internal secrets updated {"controller": "pxc-controller", "namespace": "users-28439", "name": "some-name", "reconcileID": "43d6ec90-cb1f-40fd-8b91-7c176f0b22aa", "user": "xtrabackup"} 2025-10-02T03:35:58.938Z INFO Password changed, updating user {"controller": "pxc-controller", "namespace": "users-28439", "name": "some-name", "reconcileID": "43d6ec90-cb1f-40fd-8b91-7c176f0b22aa", "user": "replication"} 2025-10-02T03:35:58.951Z INFO User password updated {"controller": "pxc-controller", "namespace": "users-28439", "name": "some-name", "reconcileID": "43d6ec90-cb1f-40fd-8b91-7c176f0b22aa", "user": "replication"} 2025-10-02T03:35:58.967Z INFO MySQL init secret updated {"controller": "pxc-controller", "namespace": "users-28439", "name": "some-name", "reconcileID": "43d6ec90-cb1f-40fd-8b91-7c176f0b22aa", "secret": "some-name-mysql-init", "user": "replication"} 2025-10-02T03:35:58.983Z INFO HAProxy pods will be restarted {"controller": "pxc-controller", "namespace": "users-28439", "name": "some-name", "reconcileID": "43d6ec90-cb1f-40fd-8b91-7c176f0b22aa", "last-applied-secret": "52a49964e357907654c2ad0095880b1de796df3cac739a92b64aa2fa033577cc"} 2025-10-02T03:35:58.983Z INFO Internal secrets updated {"controller": "pxc-controller", "namespace": "users-28439", "name": "some-name", "reconcileID": "43d6ec90-cb1f-40fd-8b91-7c176f0b22aa", "user": "replication"} 2025-10-02T03:35:58.983Z INFO PXC pods will be restarted {"controller": "pxc-controller", "namespace": "users-28439", "name": "some-name", "reconcileID": "43d6ec90-cb1f-40fd-8b91-7c176f0b22aa", "last-applied-secret": "52a49964e357907654c2ad0095880b1de796df3cac739a92b64aa2fa033577cc"} 2025-10-02T03:35:58.984Z DEBUG Updating object {"controller": "pxc-controller", "namespace": "users-28439", "name": "some-name", "reconcileID": "43d6ec90-cb1f-40fd-8b91-7c176f0b22aa", "object": "some-name-pxc", "kind": "&TypeMeta{Kind:StatefulSet,APIVersion:apps/v1,}", "hashChanged": true, "metaChanged": true} 2025-10-02T03:35:59.030Z DEBUG Creating object {"controller": "pxc-controller", "namespace": "users-28439", "name": "some-name", "reconcileID": "43d6ec90-cb1f-40fd-8b91-7c176f0b22aa", "object": "some-name-haproxy", "kind": "&TypeMeta{Kind:StatefulSet,APIVersion:apps/v1,}"} 2025-10-02T03:35:59.079Z DEBUG Updating object {"controller": "pxc-controller", "namespace": "users-28439", "name": "some-name", "reconcileID": "43d6ec90-cb1f-40fd-8b91-7c176f0b22aa", "object": "some-name-pxc", "kind": "&TypeMeta{Kind:StatefulSet,APIVersion:apps/v1,}", "hashChanged": true, "metaChanged": true} 2025-10-02T03:35:59.155Z DEBUG Updating object {"controller": "pxc-controller", "namespace": "users-28439", "name": "some-name", "reconcileID": "43d6ec90-cb1f-40fd-8b91-7c176f0b22aa", "object": "some-name-haproxy", "kind": "&TypeMeta{Kind:StatefulSet,APIVersion:apps/v1,}", "hashChanged": true, "metaChanged": true} 2025-10-02T03:35:59.257Z DEBUG Creating object {"controller": "pxc-controller", "namespace": "users-28439", "name": "some-name", "reconcileID": "43d6ec90-cb1f-40fd-8b91-7c176f0b22aa", "object": "some-name-haproxy", "kind": "&TypeMeta{Kind:Service,APIVersion:v1,}"} 2025-10-02T03:35:59.345Z DEBUG Creating object {"controller": "pxc-controller", "namespace": "users-28439", "name": "some-name", "reconcileID": "43d6ec90-cb1f-40fd-8b91-7c176f0b22aa", "object": "some-name-haproxy-replicas", "kind": "&TypeMeta{Kind:Service,APIVersion:v1,}"} 2025-10-02T03:36:02.762Z INFO reconcile replication error {"controller": "pxc-controller", "namespace": "users-28439", "name": "some-name", "reconcileID": "43d6ec90-cb1f-40fd-8b91-7c176f0b22aa", "err": "get primary pxc pod: failed to get proxy connection: dial tcp 34.118.237.162:3306: connect: connection refused"} 2025-10-02T03:36:03.537Z DEBUG Creating object {"controller": "pxc-controller", "namespace": "users-28439", "name": "some-name", "reconcileID": "1e62391a-c160-43cf-a133-101653d2d2f4", "object": "some-name-haproxy", "kind": "&TypeMeta{Kind:PodDisruptionBudget,APIVersion:policy/v1,}"} 2025-10-02T03:38:37.421Z INFO Password changed, updating user {"controller": "pxc-controller", "namespace": "users-28439", "name": "some-name", "reconcileID": "c4496bd1-5cd1-42c7-a360-e97808d4d9b8", "user": "monitor"} 2025-10-02T03:38:37.437Z INFO User password updated {"controller": "pxc-controller", "namespace": "users-28439", "name": "some-name", "reconcileID": "c4496bd1-5cd1-42c7-a360-e97808d4d9b8", "user": "monitor"} 2025-10-02T03:38:37.458Z INFO MySQL init secret updated {"controller": "pxc-controller", "namespace": "users-28439", "name": "some-name", "reconcileID": "c4496bd1-5cd1-42c7-a360-e97808d4d9b8", "secret": "some-name-mysql-init", "user": "monitor"} 2025-10-02T03:38:37.486Z INFO HAProxy pods will be restarted {"controller": "pxc-controller", "namespace": "users-28439", "name": "some-name", "reconcileID": "c4496bd1-5cd1-42c7-a360-e97808d4d9b8", "last-applied-secret": "bf77ecd194036eeb3b0dd030147d2ae19aa930312d5b2e4820a882e1c50ff338"} 2025-10-02T03:38:37.486Z INFO Internal secrets updated {"controller": "pxc-controller", "namespace": "users-28439", "name": "some-name", "reconcileID": "c4496bd1-5cd1-42c7-a360-e97808d4d9b8", "user": "monitor"} 2025-10-02T03:38:37.488Z DEBUG Updating object {"controller": "pxc-controller", "namespace": "users-28439", "name": "some-name", "reconcileID": "c4496bd1-5cd1-42c7-a360-e97808d4d9b8", "object": "some-name-haproxy", "kind": "&TypeMeta{Kind:StatefulSet,APIVersion:apps/v1,}", "hashChanged": true, "metaChanged": true}    ... // 22 identical fields    ... // 2 identical fields    ... // 2 identical fields    ... // 2 identical fields    ... // 2 identical fields    ... // 3 identical elements    ... // 3 identical fields    ... // 3 identical fields    ... // 3 identical fields    "4e",    ... // 4 identical fields +  "52a49964e357907654c2ad0095880b1de796df3cac739a92b64aa2fa033577cc", -  "52a49964e357907654c2ad0095880b1de796df3cac739a92b64aa2fa033577cc",    ... // 5 identical fields    ... // 5 identical fields    ... // 5 identical fields    ... // 6 identical fields    ... // 6 identical fields    "7", +  "74e9355bccd365ca4c4dd3dcf6f20f2ca6e48db7d64658aef1dd92c8181d13d8", -  "74e9355bccd365ca4c4dd3dcf6f20f2ca6e48db7d64658aef1dd92c8181d13d8",    ... // 7 identical fields +  "8b", +  "8b78fb704eec99bcbefffc78aec63950b60ddf5ea272efd638c71cb7fbd0cc9e", -  "8b78fb704eec99bcbefffc78aec63950b60ddf5ea272efd638c71cb7fbd0cc9e", +  "8fb70",    ... // 8 identical fields -  "9355bccd365ca4c4dd3dcf6f20f2ca6e48db7d64658aef1dd92c8181d13d8",    ... // 9 identical fields    ... // 9 identical fields    AccessModes: nil,    ActiveDeadlineSeconds: nil,    Affinity: &{PodAntiAffinity: &{RequiredDuringSchedulingIgnoredDuringExecution: {{LabelSelector: &{MatchLabels: {"app.kubernetes.io/component": "haproxy", "app.kubernetes.io/instance": "some-name", "app.kubernetes.io/managed-by": "percona-xtradb-cluster-operator", "app.kubernetes.io/name": "percona-xtradb-cluster", ...}}, TopologyKey: "kubernetes.io/hostname"}}}},    Affinity: &{PodAntiAffinity: &{RequiredDuringSchedulingIgnoredDuringExecution: {{LabelSelector: &{MatchLabels: {"app.kubernetes.io/component": "proxysql", "app.kubernetes.io/instance": "some-name", "app.kubernetes.io/managed-by": "percona-xtradb-cluster-operator", "app.kubernetes.io/name": "percona-xtradb-cluster", ...}}, TopologyKey: "kubernetes.io/hostname"}}}},    Affinity: &{PodAntiAffinity: &{RequiredDuringSchedulingIgnoredDuringExecution: {{LabelSelector: &{MatchLabels: {"app.kubernetes.io/component": "pxc", "app.kubernetes.io/instance": "some-name", "app.kubernetes.io/managed-by": "percona-xtradb-cluster-operator", "app.kubernetes.io/name": "percona-xtradb-cluster", ...}}, TopologyKey: "kubernetes.io/hostname"}}}}, +  Annotations: map[string]string{ -  Annotations: map[string]string{    Annotations: map[string]string{ +  APIVersion: "", -  APIVersion: "apps/v1", -  APIVersion: "apps/v1", -  APIVersion: "v1",    Args: {"haproxy"},    Args: {"mysqld"},    Args: {"proxysql", "-f", "-c", "/etc/proxysql/proxysql.cnf", ...}, -  Args: []string{"logrotate"},    AutomountServiceAccountToken: nil, +  AvailableReplicas: 0, -  AvailableReplicas: 2, -  AvailableReplicas: 3,    AWSElasticBlockStore: nil,    AzureFile: nil, +  "b3c2cd80a6cc9954421754bd6de3b751772abee71881fdbdf0560f43c810e500", -  "b3c2cd80a6cc9954421754bd6de3b751772abee71881fdbdf0560f43c810e500", +  "bf77ecd194036eeb3b0dd030147d2ae19aa930312d5b2e4820a882e1c50ff338", -  "c93c64c276d4323c2ab792ca37c8cc4435032a4f0003313ec9b8419436a4ac0e",    Capacity: nil, -  CollisionCount: &0, +  CollisionCount: nil,    Conditions: nil,    ConfigMapKeyRef: nil,    ConfigMap: &v1.ConfigMapVolumeSource{    ContainerPort: 3306,    ContainerPort: 33060,    ContainerPort: 33062,    ContainerPort: 3307,    ContainerPort: 3309,    ContainerPort: 4444,    ContainerPort: 4567,    ContainerPort: 4568,    ContainerPort: 6032,    ContainerPort: 6070,    ContainerPort: 8404,    Containers: []v1.Container{ +  CreationTimestamp: v1.Time{}, -  CreationTimestamp: v1.Time{Time: s"2025-10-02 03:16:47 +0000 UTC"}, -  CreationTimestamp: v1.Time{Time: s"2025-10-02 03:35:59 +0000 UTC"}, +  CurrentReplicas: 0, -  CurrentReplicas: 2, -  CurrentReplicas: 3, +  CurrentRevision: "", -  CurrentRevision: "some-name-haproxy-5dfb9d8c7", -  CurrentRevision: "some-name-proxysql-54877777b5", -  CurrentRevision: "some-name-proxysql-57899f475c", -  CurrentRevision: "some-name-proxysql-584c4c6b74", -  CurrentRevision: "some-name-proxysql-7747cd479f", -  CurrentRevision: "some-name-proxysql-7765964b77", -  CurrentRevision: "some-name-proxysql-867976c455", -  CurrentRevision: "some-name-pxc-5f8b99759c", -  CurrentRevision: "some-name-pxc-67c98bff64", -  CurrentRevision: "some-name-pxc-6bd7b6855d", -  CurrentRevision: "some-name-pxc-854f47c984",    DataSource: nil,    DataSourceRef: nil, -  "dd79cd08d55481e66b3df9f2b1060fc5062722e9eef6f83f40c4772f8c9a945a", -  DefaultMode: &420, -  DefaultMode: &420, +  DefaultMode: nil, +  DefaultMode: nil,    DeletionGracePeriodSeconds: nil,    DeletionGracePeriodSeconds: nil,    DeletionTimestamp: nil, +  DeprecatedServiceAccount: "", -  DeprecatedServiceAccount: "default", +  DNSPolicy: "", -  DNSPolicy: "ClusterFirst", +  "ec99bcbefffc78aec63950b60ddf5ea272efd638c71cb7fbd0cc9e",    EnvFrom: {{SecretRef: &{LocalObjectReference: {Name: "some-name-env-vars-haproxy"}, Optional: &true}}},    EnvFrom: {{SecretRef: &{LocalObjectReference: {Name: "some-name-env-vars-proxysql"}, Optional: &true}}},    EnvFrom: {{SecretRef: &{LocalObjectReference: {Name: "some-name-env-vars-pxc"}, Optional: &true}}},    EnvFrom: {{SecretRef: &{LocalObjectReference: {Name: "some-name-log-collector"}, Optional: &true}}}, -  EnvFrom: []v1.EnvFromSource{{SecretRef: s"&SecretEnvSource{LocalObjectRefe"...}},    Env: {{Name: "PXC_SERVICE", Value: "some-name-pxc"}, {Name: "LIVENESS_CHECK_TIMEOUT", Value: "5"}, {Name: "READINESS_CHECK_TIMEOUT", Value: "1"}},    Env: {{Name: "PXC_SERVICE", Value: "some-name-pxc"}, {Name: "OPERATOR_PASSWORD", ValueFrom: &{SecretKeyRef: &{LocalObjectReference: {Name: "internal-some-name"}, Key: "operator"}}}, {Name: "PROXY_ADMIN_USER", Value: "proxyadmin"}, {Name: "PROXY_ADMIN_PASSWORD", ValueFrom: &{SecretKeyRef: &{LocalObjectReference: {Name: "internal-some-name"}, Key: "proxyadmin"}}}, ...},    Env: {{Name: "PXC_SERVICE", Value: "some-name-pxc-unready"}, {Name: "MONITOR_HOST", Value: "%"}, {Name: "MYSQL_ROOT_PASSWORD", ValueFrom: &{SecretKeyRef: &{LocalObjectReference: {Name: "internal-some-name"}, Key: "root"}}}, {Name: "XTRABACKUP_PASSWORD", ValueFrom: &{SecretKeyRef: &{LocalObjectReference: {Name: "internal-some-name"}, Key: "xtrabackup"}}}, ...}, -  Env: []v1.EnvVar{    Env: []v1.EnvVar{    EphemeralContainers: nil,    FailureThreshold: 3, +  "fc3bec314a15001cda6175b0bcd68024caed8c2994a816376b93215ceac1853b", -  "fc3bec314a15001cda6175b0bcd68024caed8c2994a816376b93215ceac1853b",    FC: nil,    FieldPath: "metadata.name",    FieldPath: "metadata.namespace",    FieldRef: &v1.ObjectFieldSelector{ -  FieldsType: "FieldsV1", -  FieldsType: "FieldsV1", -  FieldsV1: s`{"f:metadata":{"f:annotations":{".":{},"f:percona.com/last-confi`..., -  FieldsV1: s`{"f:status":{"f:availableReplicas":{},"f:collisionCount":{},"f:c`...,    Finalizers: nil,    Finalizers: nil, +  Generation: 0, -  Generation: 1, -  Generation: 2, -  Generation: 3, -  Generation: 4, -  Generation: 5, -  Generation: 6, -  Generation: 7, -  Generation: 8, github.com/percona/percona-xtradb-cluster-operator/pkg/controller/pxc.(*ReconcilePerconaXtraDBCluster).resyncPXCUsersWithProxySQL.func1    GitRepo: nil, /go/pkg/mod/sigs.k8s.io/controller-runtime@v0.22.1/pkg/internal/controller/controller.go:296 /go/pkg/mod/sigs.k8s.io/controller-runtime@v0.22.1/pkg/internal/controller/controller.go:421 /go/pkg/mod/sigs.k8s.io/controller-runtime@v0.22.1/pkg/internal/controller/controller.go:474 /go/src/github.com/percona/percona-xtradb-cluster-operator/pkg/controller/pxc/controller.go:869    HostAliases: nil,    HostAliases: nil,    HostIP: "",    HostIPC: false,    Hostname: "",    HostPort: 0, -  Image: "perconalab/percona-xtradb-cluster-operator:main-logcollector", -  Image: "perconalab/percona-xtradb-cluster-operator:main-logcollector", -  ImagePullPolicy: "Always",    ImagePullPolicy: "Always",    ImagePullSecrets: nil,    InitContainers: []v1.Container{    InitialDelaySeconds: 300,    ISCSI: nil,    Items: nil,    Items: nil,    "kubectl.kubernetes.io/default-container": "haproxy",    "kubectl.kubernetes.io/default-container": "proxysql",    "kubectl.kubernetes.io/default-container": "pxc",    Labels: {"app.kubernetes.io/component": "haproxy", "app.kubernetes.io/instance": "some-name", "app.kubernetes.io/managed-by": "percona-xtradb-cluster-operator", "app.kubernetes.io/name": "percona-xtradb-cluster", ...},    Labels: {"app.kubernetes.io/component": "proxysql", "app.kubernetes.io/instance": "some-name", "app.kubernetes.io/managed-by": "percona-xtradb-cluster-operator", "app.kubernetes.io/name": "percona-xtradb-cluster", ...},    Labels: {"app.kubernetes.io/component": "pxc", "app.kubernetes.io/instance": "some-name", "app.kubernetes.io/managed-by": "percona-xtradb-cluster-operator", "app.kubernetes.io/name": "percona-xtradb-cluster", ...},    Labels: nil, +  "last-applied-secret": "52a49964e357907654c2ad0095880b1de796df3cac739a92b64aa2fa033577cc", +  "last-applied-secret": "c93c64c276d4323c2ab792ca37c8cc4435032a4f0003313ec9b8419436a4ac0e", +  "last-applied-secret": "dd79cd08d55481e66b3df9f2b1060fc5062722e9eef6f83f40c4772f8c9a945a",    "last-applied-secret": strings.Join({    Lifecycle: nil,    LivenessProbe: &v1.Probe{    LocalObjectReference: {Name: "auto-some-name-pxc"},    LocalObjectReference: {Name: "some-name-haproxy"},    LocalObjectReference: {Name: "some-name-pxc"}, +  ManagedFields: nil,    ManagedFields: nil, -  ManagedFields: []v1.ManagedFieldsEntry{ -  Manager: "kube-controller-manager", -  Manager: "percona-xtradb-cluster-operator",    MinReadySeconds: 0, [mysql] 2025/10/02 03:38:02 packets.go:58 read tcp 10.245.144.65:39504->34.118.237.162:3306: i/o timeout [mysql] 2025/10/02 03:38:40 packets.go:58 unexpected EOF [mysql] 2025/10/02 03:38:46 packets.go:58 unexpected EOF [mysql] 2025/10/02 03:38:53 packets.go:58 unexpected EOF [mysql] 2025/10/02 03:38:54 packets.go:58 unexpected EOF [mysql] 2025/10/02 03:38:55 packets.go:58 unexpected EOF    Name: "auto-config",    {Name: "bin", VolumeSource: {EmptyDir: &{}}},    {Name: "CLUSTER_HASH", Value: "3709004"},    Name: "config",    Name: "DEFAULT_AUTHENTICATION_PLUGIN",    {Name: "haproxy-auto", VolumeSource: {EmptyDir: &{}}},    Name: "haproxy-custom", -  {Name: "IS_LOGCOLLECTOR", Value: "yes"},    Name: "ist",    {Name: "LIVENESS_CHECK_TIMEOUT", Value: "5"}, -  {Name: "LOG_DATA_DIR", Value: "/var/lib/mysql"},    {Name: "LOG_DATA_DIR", Value: "/var/lib/mysql"}, -  Name: "logrotate", -  Name: "logs",    {Name: "MONITOR_PASSWORD", ValueFrom: &{SecretKeyRef: &{LocalObjectReference: {Name: "internal-some-name"}, Key: "monitor"}}}, -  {Name: "MONITOR_PASSWORD", ValueFrom: s"&EnvVarSource{FieldRef:nil,Resou"...},    Name: "mysql",    Name: "mysql-admin",    Name: "mysql-init-file",    {Name: "MYSQL_NOTIFY_SOCKET", Value: "/var/lib/mysql/notify.sock"},    Name: "mysql-replicas",    {Name: "MYSQL_STATE_FILE", Value: "/var/lib/mysql/mysql.state"},    Name: "mysql-users-secret-file",    Name: "mysqlx",    {Name: "OPERATOR_ADMIN_PASSWORD", ValueFrom: &{SecretKeyRef: &{LocalObjectReference: {Name: "internal-some-name"}, Key: "operator"}}},    Name: "POD_NAME",    Name: "POD_NAMESPASE", -  {Name: "POD_NAMESPASE", ValueFrom: s"&EnvVarSource{FieldRef:&ObjectFi"...}, -  {Name: "POD_NAME", ValueFrom: s"&EnvVarSource{FieldRef:&ObjectFi"...},    Name: "proxyadm",    Name: "proxy-protocol",    {Name: "READINESS_CHECK_TIMEOUT", Value: "15"}, -  {Name: "SERVICE_TYPE", Value: "mysql"},    Name: "some-name-env-vars-haproxy",    Namespace: "users-28439",    Name: "ssl",    Name: "ssl-internal",    Name: "sst",    Name: "stats",    {Name: "tmp", VolumeSource: {EmptyDir: &{}}},    Name: "vault-keyring-secret",    Name: "write-set",    {Name: "XTRABACKUP_PASSWORD", ValueFrom: &{SecretKeyRef: &{LocalObjectReference: {Name: "internal-some-name"}, Key: "xtrabackup"}}},    NFS: nil,    NodeName: "",    NodeSelector: nil,    ObjectMeta: {Labels: {"app.kubernetes.io/component": "proxysql", "app.kubernetes.io/instance": "some-name", "app.kubernetes.io/managed-by": "percona-xtradb-cluster-operator", "app.kubernetes.io/name": "percona-xtradb-cluster", ...}, Annotations: {"kubectl.kubernetes.io/default-container": "proxysql", "last-applied-secret": "dd79cd08d55481e66b3df9f2b1060fc5062722e9eef6f83f40c4772f8c9a945a", "percona.com/configuration-hash": "d41d8cd98f00b204e9800998ecf8427e", "percona.com/ssl-hash": "cfd6a52398268173b51d7cdb1331c09a", ...}},    ObjectMeta: {Labels: {"app.kubernetes.io/component": "proxysql", "app.kubernetes.io/instance": "some-name", "app.kubernetes.io/managed-by": "percona-xtradb-cluster-operator", "app.kubernetes.io/name": "percona-xtradb-cluster", ...}, Annotations: {"kubectl.kubernetes.io/default-container": "proxysql", "percona.com/configuration-hash": "d41d8cd98f00b204e9800998ecf8427e", "percona.com/ssl-hash": "cfd6a52398268173b51d7cdb1331c09a", "percona.com/ssl-internal-hash": "751e59a1c1d94ac67f9d6bb6e756acdc"}},    ObjectMeta: {Labels: {"app.kubernetes.io/component": "pxc", "app.kubernetes.io/instance": "some-name", "app.kubernetes.io/managed-by": "percona-xtradb-cluster-operator", "app.kubernetes.io/name": "percona-xtradb-cluster", ...}, Annotations: {"kubectl.kubernetes.io/default-container": "pxc", "last-applied-secret": "8b78fb704eec99bcbefffc78aec63950b60ddf5ea272efd638c71cb7fbd0cc9e", "percona.com/configuration-hash": "d41d8cd98f00b204e9800998ecf8427e", "percona.com/ssl-hash": "cfd6a52398268173b51d7cdb1331c09a", ...}},    ObjectMeta: {Name: "datadir", Labels: {"app.kubernetes.io/component": "pxc", "app.kubernetes.io/instance": "some-name", "app.kubernetes.io/managed-by": "percona-xtradb-cluster-operator", "app.kubernetes.io/name": "percona-xtradb-cluster", ...}},    ObjectMeta: {Name: "proxydata", Labels: {"app.kubernetes.io/component": "proxysql", "app.kubernetes.io/instance": "some-name", "app.kubernetes.io/managed-by": "percona-xtradb-cluster-operator", "app.kubernetes.io/name": "percona-xtradb-cluster", ...}},    ObjectMeta: v1.ObjectMeta{    ObjectMeta: v1.ObjectMeta{ +  ObservedGeneration: 0, -  ObservedGeneration: 1, -  ObservedGeneration: 2, -  ObservedGeneration: 3, -  ObservedGeneration: 4, -  ObservedGeneration: 5, -  ObservedGeneration: 6, -  ObservedGeneration: 7, -  ObservedGeneration: 8, -  Operation: "Update", -  Operation: "Update",    Optional: &false,    Optional: &true,    Optional: &true,    Ordinals: nil,    OS: nil,    Overhead: nil,    OwnerReferences: {{APIVersion: "pxc.percona.com/v1", Kind: "PerconaXtraDBCluster", Name: "some-name", UID: "68404465-0ddf-4f04-9cd9-a30f5ae0f858", ...}},    OwnerReferences: nil,    "percona.com/configuration-hash": "d41d8cd98f00b204e9800998ecf8427e",    "percona.com/configuration-hash": "d41d8cd98f00b204e9800998ecf8427e", +  "percona.com/last-config-hash": "eyJyZXBsaWNhcyI6Miwic2VsZWN0b3IiOnsibWF0Y2hMYWJlbHMiOnsiYXBwLmt1YmVybmV0ZXMuaW8vY29tcG9uZW50IjoicHJveHlzcWwiLCJhcHAua3ViZXJuZXRlcy5pby9pbnN0YW5jZSI6InNvbWUtbmFtZSIsImFwcC5rdWJlcm5ldGVzLmlvL21hbmFnZWQtYnkiOiJwZXJjb25hLXh0cmFkYi1jbHVzdGVyLW9wZXJhdG9yIiwiYXBw"..., -  "percona.com/last-config-hash": "eyJyZXBsaWNhcyI6Miwic2VsZWN0b3IiOnsibWF0Y2hMYWJlbHMiOnsiYXBwLmt1YmVybmV0ZXMuaW8vY29tcG9uZW50IjoicHJveHlzcWwiLCJhcHAua3ViZXJuZXRlcy5pby9pbnN0YW5jZSI6InNvbWUtbmFtZSIsImFwcC5rdWJlcm5ldGVzLmlvL21hbmFnZWQtYnkiOiJwZXJjb25hLXh0cmFkYi1jbHVzdGVyLW9wZXJhdG9yIiwiYXBw"..., +  "percona.com/last-config-hash": "eyJyZXBsaWNhcyI6Miwic2VsZWN0b3IiOnsibWF0Y2hMYWJlbHMiOnsiYXBwLmt1YmVybmV0ZXMuaW8vY29tcG9uZW50IjoicHJveHlzcWwiLCJhcHAua3ViZXJuZXRlcy5pby9pbnN0YW5jZSI6InNvbWUtbmFtZSIsImFwcC5rdWJlcm5ldGVzLmlvL21hbmFnZWQtYnkiOiJwZXJjb25hLXh0cmFkYi1jbHVzdGVyLW9wZXJhdG9yIiwiYXBwLmt1YmVybmV0ZXMuaW8vbmFtZSI6InBlcmNvbmEteHRyYWRiLWNsdXN0ZXIiLCJhcHAua3ViZXJuZXRlcy5pby9wYXJ0LW9mIjoicGVyY29uYS14dHJhZGItY2x1c3RlciJ9fSwidGVtcGxhdGUiOnsibWV0YWRhdGEiOnsibGFiZWxzIjp7ImFwcC5rdWJlcm5ldGVzLmlvL2NvbXBvbmVudCI6InByb3h5c3FsIiwiYXBwLmt1YmVybmV0ZXMuaW8vaW5zdGFuY2UiOiJzb21lLW5hbWUiLCJhcHAua3ViZXJuZXRlcy5pby9tYW5hZ2VkLWJ5IjoicGVyY29uYS14dHJhZGItY2x1c3Rlci1vcGVyYXRvciIsImFwcC5rdWJlcm5ldGVzLmlvL25hbWUiOiJwZXJjb25hLXh0cmFkYi1jbHVzdGVyIiwiYXBwLmt1YmVybmV0ZXMuaW8vcGFydC1vZiI6InBlcmNvbmEteHRyYWRiLWNsdXN0ZXIifSwiYW5ub3RhdGlvbnMiOnsia3ViZWN0bC5rdWJlcm5ldGVzLmlvL2RlZmF1bHQtY29udGFpbmVyIjoicHJveHlzcWwiLCJsYXN0LWFwcGxpZWQtc2VjcmV0IjoiNTJhNDk5NjRlMzU3OTA3NjU0YzJhZDAwOTU4ODBiMWRlNzk2ZGYzY2FjNzM5YTkyYjY0YWEyZmEwMzM1NzdjYyIsInBlcmNvbmEuY29tL2NvbmZpZ3VyYXRpb24taGFzaCI6"..., -  "percona.com/last-config-hash": "eyJyZXBsaWNhcyI6Miwic2VsZWN0b3IiOnsibWF0Y2hMYWJlbHMiOnsiYXBwLmt1YmVybmV0ZXMuaW8vY29tcG9uZW50IjoicHJveHlzcWwiLCJhcHAua3ViZXJuZXRlcy5pby9pbnN0YW5jZSI6InNvbWUtbmFtZSIsImFwcC5rdWJlcm5ldGVzLmlvL21hbmFnZWQtYnkiOiJwZXJjb25hLXh0cmFkYi1jbHVzdGVyLW9wZXJhdG9yIiwiYXBwLmt1YmVybmV0ZXMuaW8vbmFtZSI6InBlcmNvbmEteHRyYWRiLWNsdXN0ZXIiLCJhcHAua3ViZXJuZXRlcy5pby9wYXJ0LW9mIjoicGVyY29uYS14dHJhZGItY2x1c3RlciJ9fSwidGVtcGxhdGUiOnsibWV0YWRhdGEiOnsibGFiZWxzIjp7ImFwcC5rdWJlcm5ldGVzLmlvL2NvbXBvbmVudCI6InByb3h5c3FsIiwiYXBwLmt1YmVybmV0ZXMuaW8vaW5zdGFuY2UiOiJzb21lLW5hbWUiLCJhcHAua3ViZXJuZXRlcy5pby9tYW5hZ2VkLWJ5IjoicGVyY29uYS14dHJhZGItY2x1c3Rlci1vcGVyYXRvciIsImFwcC5rdWJlcm5ldGVzLmlvL25hbWUiOiJwZXJjb25hLXh0cmFkYi1jbHVzdGVyIiwiYXBwLmt1YmVybmV0ZXMuaW8vcGFydC1vZiI6InBlcmNvbmEteHRyYWRiLWNsdXN0ZXIifSwiYW5ub3RhdGlvbnMiOnsia3ViZWN0bC5rdWJlcm5ldGVzLmlvL2RlZmF1bHQtY29udGFpbmVyIjoicHJveHlzcWwiLCJsYXN0LWFwcGxpZWQtc2VjcmV0IjoiNTJhNDk5NjRlMzU3OTA3NjU0YzJhZDAwOTU4ODBiMWRlNzk2ZGYzY2FjNzM5YTkyYjY0YWEyZmEwMzM1NzdjYyIsInBlcmNvbmEuY29tL2NvbmZpZ3VyYXRpb24taGFzaCI6"..., +  "percona.com/last-config-hash": "eyJyZXBsaWNhcyI6Miwic2VsZWN0b3IiOnsibWF0Y2hMYWJlbHMiOnsiYXBwLmt1YmVybmV0ZXMuaW8vY29tcG9uZW50IjoicHJveHlzcWwiLCJhcHAua3ViZXJuZXRlcy5pby9pbnN0YW5jZSI6InNvbWUtbmFtZSIsImFwcC5rdWJlcm5ldGVzLmlvL21hbmFnZWQtYnkiOiJwZXJjb25hLXh0cmFkYi1jbHVzdGVyLW9wZXJhdG9yIiwiYXBwLmt1YmVybmV0ZXMuaW8vbmFtZSI6InBlcmNvbmEteHRyYWRiLWNsdXN0ZXIiLCJhcHAua3ViZXJuZXRlcy5pby9wYXJ0LW9mIjoicGVyY29uYS14dHJhZGItY2x1c3RlciJ9fSwidGVtcGxhdGUiOnsibWV0YWRhdGEiOnsibGFiZWxzIjp7ImFwcC5rdWJlcm5ldGVzLmlvL2NvbXBvbmVudCI6InByb3h5c3FsIiwiYXBwLmt1YmVybmV0ZXMuaW8vaW5zdGFuY2UiOiJzb21lLW5hbWUiLCJhcHAua3ViZXJuZXRlcy5pby9tYW5hZ2VkLWJ5IjoicGVyY29uYS14dHJhZGItY2x1c3Rlci1vcGVyYXRvciIsImFwcC5rdWJlcm5ldGVzLmlvL25hbWUiOiJwZXJjb25hLXh0cmFkYi1jbHVzdGVyIiwiYXBwLmt1YmVybmV0ZXMuaW8vcGFydC1vZiI6InBlcmNvbmEteHRyYWRiLWNsdXN0ZXIifSwiYW5ub3RhdGlvbnMiOnsia3ViZWN0bC5rdWJlcm5ldGVzLmlvL2RlZmF1bHQtY29udGFpbmVyIjoicHJveHlzcWwiLCJsYXN0LWFwcGxpZWQtc2VjcmV0IjoiNzRlOTM1NWJjY2QzNjVjYTRjNGRkM2RjZjZmMjBmMmNhNmU0OGRiN2Q2NDY1OGFlZjFkZDkyYzgxODFkMTNkOCIsInBlcmNvbmEuY29tL2NvbmZpZ3VyYXRpb24taGFzaCI6"..., -  "percona.com/last-config-hash": "eyJyZXBsaWNhcyI6Miwic2VsZWN0b3IiOnsibWF0Y2hMYWJlbHMiOnsiYXBwLmt1YmVybmV0ZXMuaW8vY29tcG9uZW50IjoicHJveHlzcWwiLCJhcHAua3ViZXJuZXRlcy5pby9pbnN0YW5jZSI6InNvbWUtbmFtZSIsImFwcC5rdWJlcm5ldGVzLmlvL21hbmFnZWQtYnkiOiJwZXJjb25hLXh0cmFkYi1jbHVzdGVyLW9wZXJhdG9yIiwiYXBwLmt1YmVybmV0ZXMuaW8vbmFtZSI6InBlcmNvbmEteHRyYWRiLWNsdXN0ZXIiLCJhcHAua3ViZXJuZXRlcy5pby9wYXJ0LW9mIjoicGVyY29uYS14dHJhZGItY2x1c3RlciJ9fSwidGVtcGxhdGUiOnsibWV0YWRhdGEiOnsibGFiZWxzIjp7ImFwcC5rdWJlcm5ldGVzLmlvL2NvbXBvbmVudCI6InByb3h5c3FsIiwiYXBwLmt1YmVybmV0ZXMuaW8vaW5zdGFuY2UiOiJzb21lLW5hbWUiLCJhcHAua3ViZXJuZXRlcy5pby9tYW5hZ2VkLWJ5IjoicGVyY29uYS14dHJhZGItY2x1c3Rlci1vcGVyYXRvciIsImFwcC5rdWJlcm5ldGVzLmlvL25hbWUiOiJwZXJjb25hLXh0cmFkYi1jbHVzdGVyIiwiYXBwLmt1YmVybmV0ZXMuaW8vcGFydC1vZiI6InBlcmNvbmEteHRyYWRiLWNsdXN0ZXIifSwiYW5ub3RhdGlvbnMiOnsia3ViZWN0bC5rdWJlcm5ldGVzLmlvL2RlZmF1bHQtY29udGFpbmVyIjoicHJveHlzcWwiLCJsYXN0LWFwcGxpZWQtc2VjcmV0IjoiNzRlOTM1NWJjY2QzNjVjYTRjNGRkM2RjZjZmMjBmMmNhNmU0OGRiN2Q2NDY1OGFlZjFkZDkyYzgxODFkMTNkOCIsInBlcmNvbmEuY29tL2NvbmZpZ3VyYXRpb24taGFzaCI6"..., +  "percona.com/last-config-hash": "eyJyZXBsaWNhcyI6Miwic2VsZWN0b3IiOnsibWF0Y2hMYWJlbHMiOnsiYXBwLmt1YmVybmV0ZXMuaW8vY29tcG9uZW50IjoicHJveHlzcWwiLCJhcHAua3ViZXJuZXRlcy5pby9pbnN0YW5jZSI6InNvbWUtbmFtZSIsImFwcC5rdWJlcm5ldGVzLmlvL21hbmFnZWQtYnkiOiJwZXJjb25hLXh0cmFkYi1jbHVzdGVyLW9wZXJhdG9yIiwiYXBwLmt1YmVybmV0ZXMuaW8vbmFtZSI6InBlcmNvbmEteHRyYWRiLWNsdXN0ZXIiLCJhcHAua3ViZXJuZXRlcy5pby9wYXJ0LW9mIjoicGVyY29uYS14dHJhZGItY2x1c3RlciJ9fSwidGVtcGxhdGUiOnsibWV0YWRhdGEiOnsibGFiZWxzIjp7ImFwcC5rdWJlcm5ldGVzLmlvL2NvbXBvbmVudCI6InByb3h5c3FsIiwiYXBwLmt1YmVybmV0ZXMuaW8vaW5zdGFuY2UiOiJzb21lLW5hbWUiLCJhcHAua3ViZXJuZXRlcy5pby9tYW5hZ2VkLWJ5IjoicGVyY29uYS14dHJhZGItY2x1c3Rlci1vcGVyYXRvciIsImFwcC5rdWJlcm5ldGVzLmlvL25hbWUiOiJwZXJjb25hLXh0cmFkYi1jbHVzdGVyIiwiYXBwLmt1YmVybmV0ZXMuaW8vcGFydC1vZiI6InBlcmNvbmEteHRyYWRiLWNsdXN0ZXIifSwiYW5ub3RhdGlvbnMiOnsia3ViZWN0bC5rdWJlcm5ldGVzLmlvL2RlZmF1bHQtY29udGFpbmVyIjoicHJveHlzcWwiLCJsYXN0LWFwcGxpZWQtc2VjcmV0IjoiOGI3OGZiNzA0ZWVjOTliY2JlZmZmYzc4YWVjNjM5NTBiNjBkZGY1ZWEyNzJlZmQ2MzhjNzFjYjdmYmQwY2M5ZSIsInBlcmNvbmEuY29tL2NvbmZpZ3VyYXRpb24taGFzaCI6"..., +  "percona.com/last-config-hash": "eyJyZXBsaWNhcyI6Miwic2VsZWN0b3IiOnsibWF0Y2hMYWJlbHMiOnsiYXBwLmt1YmVybmV0ZXMuaW8vY29tcG9uZW50IjoicHJveHlzcWwiLCJhcHAua3ViZXJuZXRlcy5pby9pbnN0YW5jZSI6InNvbWUtbmFtZSIsImFwcC5rdWJlcm5ldGVzLmlvL21hbmFnZWQtYnkiOiJwZXJjb25hLXh0cmFkYi1jbHVzdGVyLW9wZXJhdG9yIiwiYXBwLmt1YmVybmV0ZXMuaW8vbmFtZSI6InBlcmNvbmEteHRyYWRiLWNsdXN0ZXIiLCJhcHAua3ViZXJuZXRlcy5pby9wYXJ0LW9mIjoicGVyY29uYS14dHJhZGItY2x1c3RlciJ9fSwidGVtcGxhdGUiOnsibWV0YWRhdGEiOnsibGFiZWxzIjp7ImFwcC5rdWJlcm5ldGVzLmlvL2NvbXBvbmVudCI6InByb3h5c3FsIiwiYXBwLmt1YmVybmV0ZXMuaW8vaW5zdGFuY2UiOiJzb21lLW5hbWUiLCJhcHAua3ViZXJuZXRlcy5pby9tYW5hZ2VkLWJ5IjoicGVyY29uYS14dHJhZGItY2x1c3Rlci1vcGVyYXRvciIsImFwcC5rdWJlcm5ldGVzLmlvL25hbWUiOiJwZXJjb25hLXh0cmFkYi1jbHVzdGVyIiwiYXBwLmt1YmVybmV0ZXMuaW8vcGFydC1vZiI6InBlcmNvbmEteHRyYWRiLWNsdXN0ZXIifSwiYW5ub3RhdGlvbnMiOnsia3ViZWN0bC5rdWJlcm5ldGVzLmlvL2RlZmF1bHQtY29udGFpbmVyIjoicHJveHlzcWwiLCJsYXN0LWFwcGxpZWQtc2VjcmV0IjoiYjNjMmNkODBhNmNjOTk1NDQyMTc1NGJkNmRlM2I3NTE3NzJhYmVlNzE4ODFmZGJkZjA1NjBmNDNjODEwZTUwMCIsInBlcmNvbmEuY29tL2NvbmZpZ3VyYXRpb24taGFzaCI6"..., -  "percona.com/last-config-hash": "eyJyZXBsaWNhcyI6Miwic2VsZWN0b3IiOnsibWF0Y2hMYWJlbHMiOnsiYXBwLmt1YmVybmV0ZXMuaW8vY29tcG9uZW50IjoicHJveHlzcWwiLCJhcHAua3ViZXJuZXRlcy5pby9pbnN0YW5jZSI6InNvbWUtbmFtZSIsImFwcC5rdWJlcm5ldGVzLmlvL21hbmFnZWQtYnkiOiJwZXJjb25hLXh0cmFkYi1jbHVzdGVyLW9wZXJhdG9yIiwiYXBwLmt1YmVybmV0ZXMuaW8vbmFtZSI6InBlcmNvbmEteHRyYWRiLWNsdXN0ZXIiLCJhcHAua3ViZXJuZXRlcy5pby9wYXJ0LW9mIjoicGVyY29uYS14dHJhZGItY2x1c3RlciJ9fSwidGVtcGxhdGUiOnsibWV0YWRhdGEiOnsibGFiZWxzIjp7ImFwcC5rdWJlcm5ldGVzLmlvL2NvbXBvbmVudCI6InByb3h5c3FsIiwiYXBwLmt1YmVybmV0ZXMuaW8vaW5zdGFuY2UiOiJzb21lLW5hbWUiLCJhcHAua3ViZXJuZXRlcy5pby9tYW5hZ2VkLWJ5IjoicGVyY29uYS14dHJhZGItY2x1c3Rlci1vcGVyYXRvciIsImFwcC5rdWJlcm5ldGVzLmlvL25hbWUiOiJwZXJjb25hLXh0cmFkYi1jbHVzdGVyIiwiYXBwLmt1YmVybmV0ZXMuaW8vcGFydC1vZiI6InBlcmNvbmEteHRyYWRiLWNsdXN0ZXIifSwiYW5ub3RhdGlvbnMiOnsia3ViZWN0bC5rdWJlcm5ldGVzLmlvL2RlZmF1bHQtY29udGFpbmVyIjoicHJveHlzcWwiLCJsYXN0LWFwcGxpZWQtc2VjcmV0IjoiYjNjMmNkODBhNmNjOTk1NDQyMTc1NGJkNmRlM2I3NTE3NzJhYmVlNzE4ODFmZGJkZjA1NjBmNDNjODEwZTUwMCIsInBlcmNvbmEuY29tL2NvbmZpZ3VyYXRpb24taGFzaCI6"..., -  "percona.com/last-config-hash": "eyJyZXBsaWNhcyI6Miwic2VsZWN0b3IiOnsibWF0Y2hMYWJlbHMiOnsiYXBwLmt1YmVybmV0ZXMuaW8vY29tcG9uZW50IjoicHJveHlzcWwiLCJhcHAua3ViZXJuZXRlcy5pby9pbnN0YW5jZSI6InNvbWUtbmFtZSIsImFwcC5rdWJlcm5ldGVzLmlvL21hbmFnZWQtYnkiOiJwZXJjb25hLXh0cmFkYi1jbHVzdGVyLW9wZXJhdG9yIiwiYXBwLmt1YmVybmV0ZXMuaW8vbmFtZSI6InBlcmNvbmEteHRyYWRiLWNsdXN0ZXIiLCJhcHAua3ViZXJuZXRlcy5pby9wYXJ0LW9mIjoicGVyY29uYS14dHJhZGItY2x1c3RlciJ9fSwidGVtcGxhdGUiOnsibWV0YWRhdGEiOnsibGFiZWxzIjp7ImFwcC5rdWJlcm5ldGVzLmlvL2NvbXBvbmVudCI6InByb3h5c3FsIiwiYXBwLmt1YmVybmV0ZXMuaW8vaW5zdGFuY2UiOiJzb21lLW5hbWUiLCJhcHAua3ViZXJuZXRlcy5pby9tYW5hZ2VkLWJ5IjoicGVyY29uYS14dHJhZGItY2x1c3Rlci1vcGVyYXRvciIsImFwcC5rdWJlcm5ldGVzLmlvL25hbWUiOiJwZXJjb25hLXh0cmFkYi1jbHVzdGVyIiwiYXBwLmt1YmVybmV0ZXMuaW8vcGFydC1vZiI6InBlcmNvbmEteHRyYWRiLWNsdXN0ZXIifSwiYW5ub3RhdGlvbnMiOnsia3ViZWN0bC5rdWJlcm5ldGVzLmlvL2RlZmF1bHQtY29udGFpbmVyIjoicHJveHlzcWwiLCJsYXN0LWFwcGxpZWQtc2VjcmV0IjoiZGQ3OWNkMDhkNTU0ODFlNjZiM2RmOWYyYjEwNjBmYzUwNjI3MjJlOWVlZjZmODNmNDBjNDc3MmY4YzlhOTQ1YSIsInBlcmNvbmEuY29tL2NvbmZpZ3VyYXRpb24taGFzaCI6"..., +  "percona.com/last-config-hash": "eyJyZXBsaWNhcyI6Miwic2VsZWN0b3IiOnsibWF0Y2hMYWJlbHMiOnsiYXBwLmt1YmVybmV0ZXMuaW8vY29tcG9uZW50IjoicHJveHlzcWwiLCJhcHAua3ViZXJuZXRlcy5pby9pbnN0YW5jZSI6InNvbWUtbmFtZSIsImFwcC5rdWJlcm5ldGVzLmlvL21hbmFnZWQtYnkiOiJwZXJjb25hLXh0cmFkYi1jbHVzdGVyLW9wZXJhdG9yIiwiYXBwLmt1YmVybmV0ZXMuaW8vbmFtZSI6InBlcmNvbmEteHRyYWRiLWNsdXN0ZXIiLCJhcHAua3ViZXJuZXRlcy5pby9wYXJ0LW9mIjoicGVyY29uYS14dHJhZGItY2x1c3RlciJ9fSwidGVtcGxhdGUiOnsibWV0YWRhdGEiOnsibGFiZWxzIjp7ImFwcC5rdWJlcm5ldGVzLmlvL2NvbXBvbmVudCI6InByb3h5c3FsIiwiYXBwLmt1YmVybmV0ZXMuaW8vaW5zdGFuY2UiOiJzb21lLW5hbWUiLCJhcHAua3ViZXJuZXRlcy5pby9tYW5hZ2VkLWJ5IjoicGVyY29uYS14dHJhZGItY2x1c3Rlci1vcGVyYXRvciIsImFwcC5rdWJlcm5ldGVzLmlvL25hbWUiOiJwZXJjb25hLXh0cmFkYi1jbHVzdGVyIiwiYXBwLmt1YmVybmV0ZXMuaW8vcGFydC1vZiI6InBlcmNvbmEteHRyYWRiLWNsdXN0ZXIifSwiYW5ub3RhdGlvbnMiOnsia3ViZWN0bC5rdWJlcm5ldGVzLmlvL2RlZmF1bHQtY29udGFpbmVyIjoicHJveHlzcWwiLCJsYXN0LWFwcGxpZWQtc2VjcmV0IjoiZmMzYmVjMzE0YTE1MDAxY2RhNjE3NWIwYmNkNjgwMjRjYWVkOGMyOTk0YTgxNjM3NmI5MzIxNWNlYWMxODUzYiIsInBlcmNvbmEuY29tL2NvbmZpZ3VyYXRpb24taGFzaCI6"..., -  "percona.com/last-config-hash": "eyJyZXBsaWNhcyI6Miwic2VsZWN0b3IiOnsibWF0Y2hMYWJlbHMiOnsiYXBwLmt1YmVybmV0ZXMuaW8vY29tcG9uZW50IjoicHJveHlzcWwiLCJhcHAua3ViZXJuZXRlcy5pby9pbnN0YW5jZSI6InNvbWUtbmFtZSIsImFwcC5rdWJlcm5ldGVzLmlvL21hbmFnZWQtYnkiOiJwZXJjb25hLXh0cmFkYi1jbHVzdGVyLW9wZXJhdG9yIiwiYXBwLmt1YmVybmV0ZXMuaW8vbmFtZSI6InBlcmNvbmEteHRyYWRiLWNsdXN0ZXIiLCJhcHAua3ViZXJuZXRlcy5pby9wYXJ0LW9mIjoicGVyY29uYS14dHJhZGItY2x1c3RlciJ9fSwidGVtcGxhdGUiOnsibWV0YWRhdGEiOnsibGFiZWxzIjp7ImFwcC5rdWJlcm5ldGVzLmlvL2NvbXBvbmVudCI6InByb3h5c3FsIiwiYXBwLmt1YmVybmV0ZXMuaW8vaW5zdGFuY2UiOiJzb21lLW5hbWUiLCJhcHAua3ViZXJuZXRlcy5pby9tYW5hZ2VkLWJ5IjoicGVyY29uYS14dHJhZGItY2x1c3Rlci1vcGVyYXRvciIsImFwcC5rdWJlcm5ldGVzLmlvL25hbWUiOiJwZXJjb25hLXh0cmFkYi1jbHVzdGVyIiwiYXBwLmt1YmVybmV0ZXMuaW8vcGFydC1vZiI6InBlcmNvbmEteHRyYWRiLWNsdXN0ZXIifSwiYW5ub3RhdGlvbnMiOnsia3ViZWN0bC5rdWJlcm5ldGVzLmlvL2RlZmF1bHQtY29udGFpbmVyIjoicHJveHlzcWwiLCJsYXN0LWFwcGxpZWQtc2VjcmV0IjoiZmMzYmVjMzE0YTE1MDAxY2RhNjE3NWIwYmNkNjgwMjRjYWVkOGMyOTk0YTgxNjM3NmI5MzIxNWNlYWMxODUzYiIsInBlcmNvbmEuY29tL2NvbmZpZ3VyYXRpb24taGFzaCI6"..., +  "percona.com/last-config-hash": "eyJyZXBsaWNhcyI6Mywic2VsZWN0b3IiOnsibWF0Y2hMYWJlbHMiOnsiYXBwLmt1YmVybmV0ZXMuaW8vY29tcG9uZW50IjoiaGFwcm94eSIsImFwcC5rdWJlcm5ldGVzLmlvL2luc3RhbmNlIjoic29tZS1uYW1lIiwiYXBwLmt1YmVybmV0ZXMuaW8vbWFuYWdlZC1ieSI6InBlcmNvbmEteHRyYWRiLWNsdXN0ZXItb3BlcmF0b3IiLCJhcHAua3ViZXJuZXRlcy5pby9uYW1lIjoicGVyY29uYS14dHJhZGItY2x1c3RlciIsImFwcC5rdWJlcm5ldGVzLmlvL3BhcnQtb2YiOiJwZXJjb25hLXh0cmFkYi1jbHVzdGVyIn19LCJ0ZW1wbGF0ZSI6eyJtZXRhZGF0YSI6eyJsYWJlbHMiOnsiYXBwLmt1YmVybmV0ZXMuaW8vY29tcG9uZW50IjoiaGFwcm94eSIsImFwcC5rdWJlcm5ldGVzLmlvL2luc3RhbmNlIjoic29tZS1uYW1lIiwiYXBwLmt1YmVybmV0ZXMuaW8vbWFuYWdlZC1ieSI6InBlcmNvbmEteHRyYWRiLWNsdXN0ZXItb3BlcmF0b3IiLCJhcHAua3ViZXJuZXRlcy5pby9uYW1lIjoicGVyY29uYS14dHJhZGItY2x1c3RlciIsImFwcC5rdWJlcm5ldGVzLmlvL3BhcnQtb2YiOiJwZXJjb25hLXh0cmFkYi1jbHVzdGVyIn0sImFubm90YXRpb25zIjp7Imt1YmVjdGwua3ViZXJuZXRlcy5pby9kZWZhdWx0LWNvbnRhaW5lciI6ImhhcHJveHkiLCJsYXN0LWFwcGxpZWQtc2VjcmV0IjoiNTJhNDk5NjRlMzU3OTA3NjU0YzJhZDAwOTU4ODBiMWRlNzk2ZGYzY2FjNzM5YTkyYjY0YWEyZmEwMzM1NzdjYyIsInBlcmNvbmEuY29tL2NvbmZpZ3VyYXRpb24taGFzaCI6ImQ0"..., -  "percona.com/last-config-hash": "eyJyZXBsaWNhcyI6Mywic2VsZWN0b3IiOnsibWF0Y2hMYWJlbHMiOnsiYXBwLmt1YmVybmV0ZXMuaW8vY29tcG9uZW50IjoiaGFwcm94eSIsImFwcC5rdWJlcm5ldGVzLmlvL2luc3RhbmNlIjoic29tZS1uYW1lIiwiYXBwLmt1YmVybmV0ZXMuaW8vbWFuYWdlZC1ieSI6InBlcmNvbmEteHRyYWRiLWNsdXN0ZXItb3BlcmF0b3IiLCJhcHAua3ViZXJuZXRlcy5pby9uYW1lIjoicGVyY29uYS14dHJhZGItY2x1c3RlciIsImFwcC5rdWJlcm5ldGVzLmlvL3BhcnQtb2YiOiJwZXJjb25hLXh0cmFkYi1jbHVzdGVyIn19LCJ0ZW1wbGF0ZSI6eyJtZXRhZGF0YSI6eyJsYWJlbHMiOnsiYXBwLmt1YmVybmV0ZXMuaW8vY29tcG9uZW50IjoiaGFwcm94eSIsImFwcC5rdWJlcm5ldGVzLmlvL2luc3RhbmNlIjoic29tZS1uYW1lIiwiYXBwLmt1YmVybmV0ZXMuaW8vbWFuYWdlZC1ieSI6InBlcmNvbmEteHRyYWRiLWNsdXN0ZXItb3BlcmF0b3IiLCJhcHAua3ViZXJuZXRlcy5pby9uYW1lIjoicGVyY29uYS14dHJhZGItY2x1c3RlciIsImFwcC5rdWJlcm5ldGVzLmlvL3BhcnQtb2YiOiJwZXJjb25hLXh0cmFkYi1jbHVzdGVyIn0sImFubm90YXRpb25zIjp7Imt1YmVjdGwua3ViZXJuZXRlcy5pby9kZWZhdWx0LWNvbnRhaW5lciI6ImhhcHJveHkiLCJsYXN0LWFwcGxpZWQtc2VjcmV0IjoiNTJhNDk5NjRlMzU3OTA3NjU0YzJhZDAwOTU4ODBiMWRlNzk2ZGYzY2FjNzM5YTkyYjY0YWEyZmEwMzM1NzdjYyIsInBlcmNvbmEuY29tL2NvbmZpZ3VyYXRpb24taGFzaCI6ImQ0"..., +  "percona.com/last-config-hash": "eyJyZXBsaWNhcyI6Mywic2VsZWN0b3IiOnsibWF0Y2hMYWJlbHMiOnsiYXBwLmt1YmVybmV0ZXMuaW8vY29tcG9uZW50IjoiaGFwcm94eSIsImFwcC5rdWJlcm5ldGVzLmlvL2luc3RhbmNlIjoic29tZS1uYW1lIiwiYXBwLmt1YmVybmV0ZXMuaW8vbWFuYWdlZC1ieSI6InBlcmNvbmEteHRyYWRiLWNsdXN0ZXItb3BlcmF0b3IiLCJhcHAua3ViZXJuZXRlcy5pby9uYW1lIjoicGVyY29uYS14dHJhZGItY2x1c3RlciIsImFwcC5rdWJlcm5ldGVzLmlvL3BhcnQtb2YiOiJwZXJjb25hLXh0cmFkYi1jbHVzdGVyIn19LCJ0ZW1wbGF0ZSI6eyJtZXRhZGF0YSI6eyJsYWJlbHMiOnsiYXBwLmt1YmVybmV0ZXMuaW8vY29tcG9uZW50IjoiaGFwcm94eSIsImFwcC5rdWJlcm5ldGVzLmlvL2luc3RhbmNlIjoic29tZS1uYW1lIiwiYXBwLmt1YmVybmV0ZXMuaW8vbWFuYWdlZC1ieSI6InBlcmNvbmEteHRyYWRiLWNsdXN0ZXItb3BlcmF0b3IiLCJhcHAua3ViZXJuZXRlcy5pby9uYW1lIjoicGVyY29uYS14dHJhZGItY2x1c3RlciIsImFwcC5rdWJlcm5ldGVzLmlvL3BhcnQtb2YiOiJwZXJjb25hLXh0cmFkYi1jbHVzdGVyIn0sImFubm90YXRpb25zIjp7Imt1YmVjdGwua3ViZXJuZXRlcy5pby9kZWZhdWx0LWNvbnRhaW5lciI6ImhhcHJveHkiLCJsYXN0LWFwcGxpZWQtc2VjcmV0IjoiYmY3N2VjZDE5NDAzNmVlYjNiMGRkMDMwMTQ3ZDJhZTE5YWE5MzAzMTJkNWIyZTQ4MjBhODgyZTFjNTBmZjMzOCIsInBlcmNvbmEuY29tL2NvbmZpZ3VyYXRpb24taGFzaCI6ImQ0"..., -  "percona.com/last-config-hash": "eyJyZXBsaWNhcyI6Mywic2VsZWN0b3IiOnsibWF0Y2hMYWJlbHMiOnsiYXBwLmt1YmVybmV0ZXMuaW8vY29tcG9uZW50IjoiaGFwcm94eSIsImFwcC5rdWJlcm5ldGVzLmlvL2luc3RhbmNlIjoic29tZS1uYW1lIiwiYXBwLmt1YmVybmV0ZXMuaW8vbWFuYWdlZC1ieSI6InBlcmNvbmEteHRyYWRiLWNsdXN0ZXItb3BlcmF0b3IiLCJhcHAua3ViZXJuZXRlcy5pby9uYW1lIjoicGVyY29uYS14dHJhZGItY2x1c3RlciIsImFwcC5rdWJlcm5ldGVzLmlvL3BhcnQtb2YiOiJwZXJjb25hLXh0cmFkYi1jbHVzdGVyIn19LCJ0ZW1wbGF0ZSI6eyJtZXRhZGF0YSI6eyJsYWJlbHMiOnsiYXBwLmt1YmVybmV0ZXMuaW8vY29tcG9uZW50IjoiaGFwcm94eSIsImFwcC5rdWJlcm5ldGVzLmlvL2luc3RhbmNlIjoic29tZS1uYW1lIiwiYXBwLmt1YmVybmV0ZXMuaW8vbWFuYWdlZC1ieSI6InBlcmNvbmEteHRyYWRiLWNsdXN0ZXItb3BlcmF0b3IiLCJhcHAua3ViZXJuZXRlcy5pby9uYW1lIjoicGVyY29uYS14dHJhZGItY2x1c3RlciIsImFwcC5rdWJlcm5ldGVzLmlvL3BhcnQtb2YiOiJwZXJjb25hLXh0cmFkYi1jbHVzdGVyIn0sImFubm90YXRpb25zIjp7Imt1YmVjdGwua3ViZXJuZXRlcy5pby9kZWZhdWx0LWNvbnRhaW5lciI6ImhhcHJveHkiLCJwZXJjb25hLmNvbS9jb25maWd1cmF0aW9uLWhhc2giOiJkNDFkOGNkOThmMDBiMjA0ZTk4MDA5OThlY2Y4NDI3ZSJ9fSwic3BlYyI6eyJ2b2x1bWVzIjpbeyJuYW1lIjoiaGFwcm94eS1jdXN0b20iLCJjb25maWdNYXAi"..., +  "percona.com/last-config-hash": "eyJyZXBsaWNhcyI6Mywic2VsZWN0b3IiOnsibWF0Y2hMYWJlbHMiOnsiYXBwLmt1YmVybmV0ZXMuaW8vY29tcG9uZW50IjoicHhjIiwiYXBwLmt1YmVybmV0ZXMuaW8vaW5zdGFuY2UiOiJzb21lLW5hbWUiLCJhcHAua3ViZXJuZXRlcy5pby9tYW5hZ2VkLWJ5IjoicGVyY29uYS14dHJhZGItY2x1c3Rlci1vcGVyYXRvciIsImFwcC5rdWJlcm5ldGVzLmlvL25hbWUiOiJwZXJjb25hLXh0cmFkYi1jbHVzdGVyIiwiYXBwLmt1YmVybmV0ZXMuaW8vcGFydC1vZiI6InBlcmNvbmEteHRyYWRiLWNsdXN0ZXIifX0sInRlbXBsYXRlIjp7Im1ldGFkYXRhIjp7ImxhYmVscyI6eyJhcHAua3ViZXJuZXRlcy5pby9jb21wb25lbnQiOiJweGMiLCJhcHAua3ViZXJuZXRlcy5pby9pbnN0YW5jZSI6InNvbWUtbmFtZSIsImFwcC5rdWJlcm5ldGVzLmlvL21hbmFnZWQtYnkiOiJwZXJjb25hLXh0cmFkYi1jbHVzdGVyLW9wZXJhdG9yIiwiYXBwLmt1YmVybmV0ZXMuaW8vbmFtZSI6InBlcmNvbmEteHRyYWRiLWNsdXN0ZXIiLCJhcHAua3ViZXJuZXRlcy5pby9wYXJ0LW9mIjoicGVyY29uYS14dHJhZGItY2x1c3RlciJ9LCJhbm5vdGF0aW9ucyI6eyJrdWJlY3RsLmt1YmVybmV0ZXMuaW8vZGVmYXVsdC1jb250YWluZXIiOiJweGMiLCJsYXN0LWFwcGxpZWQtc2VjcmV0IjoiNTJhNDk5NjRlMzU3OTA3NjU0YzJhZDAwOTU4ODBiMWRlNzk2ZGYzY2FjNzM5YTkyYjY0YWEyZmEwMzM1NzdjYyIsInBlcmNvbmEuY29tL2NvbmZpZ3VyYXRpb24taGFzaCI6ImQ0MWQ4Y2Q5OGYwMGIy"..., +  "percona.com/last-config-hash": "eyJyZXBsaWNhcyI6Mywic2VsZWN0b3IiOnsibWF0Y2hMYWJlbHMiOnsiYXBwLmt1YmVybmV0ZXMuaW8vY29tcG9uZW50IjoicHhjIiwiYXBwLmt1YmVybmV0ZXMuaW8vaW5zdGFuY2UiOiJzb21lLW5hbWUiLCJhcHAua3ViZXJuZXRlcy5pby9tYW5hZ2VkLWJ5IjoicGVyY29uYS14dHJhZGItY2x1c3Rlci1vcGVyYXRvciIsImFwcC5rdWJlcm5ldGVzLmlvL25hbWUiOiJwZXJjb25hLXh0cmFkYi1jbHVzdGVyIiwiYXBwLmt1YmVybmV0ZXMuaW8vcGFydC1vZiI6InBlcmNvbmEteHRyYWRiLWNsdXN0ZXIifX0sInRlbXBsYXRlIjp7Im1ldGFkYXRhIjp7ImxhYmVscyI6eyJhcHAua3ViZXJuZXRlcy5pby9jb21wb25lbnQiOiJweGMiLCJhcHAua3ViZXJuZXRlcy5pby9pbnN0YW5jZSI6InNvbWUtbmFtZSIsImFwcC5rdWJlcm5ldGVzLmlvL21hbmFnZWQtYnkiOiJwZXJjb25hLXh0cmFkYi1jbHVzdGVyLW9wZXJhdG9yIiwiYXBwLmt1YmVybmV0ZXMuaW8vbmFtZSI6InBlcmNvbmEteHRyYWRiLWNsdXN0ZXIiLCJhcHAua3ViZXJuZXRlcy5pby9wYXJ0LW9mIjoicGVyY29uYS14dHJhZGItY2x1c3RlciJ9LCJhbm5vdGF0aW9ucyI6eyJrdWJlY3RsLmt1YmVybmV0ZXMuaW8vZGVmYXVsdC1jb250YWluZXIiOiJweGMiLCJsYXN0LWFwcGxpZWQtc2VjcmV0IjoiNzRlOTM1NWJjY2QzNjVjYTRjNGRkM2RjZjZmMjBmMmNhNmU0OGRiN2Q2NDY1OGFlZjFkZDkyYzgxODFkMTNkOCIsInBlcmNvbmEuY29tL2NvbmZpZ3VyYXRpb24taGFzaCI6ImQ0MWQ4Y2Q5OGYwMGIy"..., -  "percona.com/last-config-hash": "eyJyZXBsaWNhcyI6Mywic2VsZWN0b3IiOnsibWF0Y2hMYWJlbHMiOnsiYXBwLmt1YmVybmV0ZXMuaW8vY29tcG9uZW50IjoicHhjIiwiYXBwLmt1YmVybmV0ZXMuaW8vaW5zdGFuY2UiOiJzb21lLW5hbWUiLCJhcHAua3ViZXJuZXRlcy5pby9tYW5hZ2VkLWJ5IjoicGVyY29uYS14dHJhZGItY2x1c3Rlci1vcGVyYXRvciIsImFwcC5rdWJlcm5ldGVzLmlvL25hbWUiOiJwZXJjb25hLXh0cmFkYi1jbHVzdGVyIiwiYXBwLmt1YmVybmV0ZXMuaW8vcGFydC1vZiI6InBlcmNvbmEteHRyYWRiLWNsdXN0ZXIifX0sInRlbXBsYXRlIjp7Im1ldGFkYXRhIjp7ImxhYmVscyI6eyJhcHAua3ViZXJuZXRlcy5pby9jb21wb25lbnQiOiJweGMiLCJhcHAua3ViZXJuZXRlcy5pby9pbnN0YW5jZSI6InNvbWUtbmFtZSIsImFwcC5rdWJlcm5ldGVzLmlvL21hbmFnZWQtYnkiOiJwZXJjb25hLXh0cmFkYi1jbHVzdGVyLW9wZXJhdG9yIiwiYXBwLmt1YmVybmV0ZXMuaW8vbmFtZSI6InBlcmNvbmEteHRyYWRiLWNsdXN0ZXIiLCJhcHAua3ViZXJuZXRlcy5pby9wYXJ0LW9mIjoicGVyY29uYS14dHJhZGItY2x1c3RlciJ9LCJhbm5vdGF0aW9ucyI6eyJrdWJlY3RsLmt1YmVybmV0ZXMuaW8vZGVmYXVsdC1jb250YWluZXIiOiJweGMiLCJsYXN0LWFwcGxpZWQtc2VjcmV0IjoiNzRlOTM1NWJjY2QzNjVjYTRjNGRkM2RjZjZmMjBmMmNhNmU0OGRiN2Q2NDY1OGFlZjFkZDkyYzgxODFkMTNkOCIsInBlcmNvbmEuY29tL2NvbmZpZ3VyYXRpb24taGFzaCI6ImQ0MWQ4Y2Q5OGYwMGIy"..., +  "percona.com/last-config-hash": "eyJyZXBsaWNhcyI6Mywic2VsZWN0b3IiOnsibWF0Y2hMYWJlbHMiOnsiYXBwLmt1YmVybmV0ZXMuaW8vY29tcG9uZW50IjoicHhjIiwiYXBwLmt1YmVybmV0ZXMuaW8vaW5zdGFuY2UiOiJzb21lLW5hbWUiLCJhcHAua3ViZXJuZXRlcy5pby9tYW5hZ2VkLWJ5IjoicGVyY29uYS14dHJhZGItY2x1c3Rlci1vcGVyYXRvciIsImFwcC5rdWJlcm5ldGVzLmlvL25hbWUiOiJwZXJjb25hLXh0cmFkYi1jbHVzdGVyIiwiYXBwLmt1YmVybmV0ZXMuaW8vcGFydC1vZiI6InBlcmNvbmEteHRyYWRiLWNsdXN0ZXIifX0sInRlbXBsYXRlIjp7Im1ldGFkYXRhIjp7ImxhYmVscyI6eyJhcHAua3ViZXJuZXRlcy5pby9jb21wb25lbnQiOiJweGMiLCJhcHAua3ViZXJuZXRlcy5pby9pbnN0YW5jZSI6InNvbWUtbmFtZSIsImFwcC5rdWJlcm5ldGVzLmlvL21hbmFnZWQtYnkiOiJwZXJjb25hLXh0cmFkYi1jbHVzdGVyLW9wZXJhdG9yIiwiYXBwLmt1YmVybmV0ZXMuaW8vbmFtZSI6InBlcmNvbmEteHRyYWRiLWNsdXN0ZXIiLCJhcHAua3ViZXJuZXRlcy5pby9wYXJ0LW9mIjoicGVyY29uYS14dHJhZGItY2x1c3RlciJ9LCJhbm5vdGF0aW9ucyI6eyJrdWJlY3RsLmt1YmVybmV0ZXMuaW8vZGVmYXVsdC1jb250YWluZXIiOiJweGMiLCJsYXN0LWFwcGxpZWQtc2VjcmV0IjoiOGI3OGZiNzA0ZWVjOTliY2JlZmZmYzc4YWVjNjM5NTBiNjBkZGY1ZWEyNzJlZmQ2MzhjNzFjYjdmYmQwY2M5ZSIsInBlcmNvbmEuY29tL2NvbmZpZ3VyYXRpb24taGFzaCI6ImQ0MWQ4Y2Q5OGYwMGIy"..., -  "percona.com/last-config-hash": "eyJyZXBsaWNhcyI6Mywic2VsZWN0b3IiOnsibWF0Y2hMYWJlbHMiOnsiYXBwLmt1YmVybmV0ZXMuaW8vY29tcG9uZW50IjoicHhjIiwiYXBwLmt1YmVybmV0ZXMuaW8vaW5zdGFuY2UiOiJzb21lLW5hbWUiLCJhcHAua3ViZXJuZXRlcy5pby9tYW5hZ2VkLWJ5IjoicGVyY29uYS14dHJhZGItY2x1c3Rlci1vcGVyYXRvciIsImFwcC5rdWJlcm5ldGVzLmlvL25hbWUiOiJwZXJjb25hLXh0cmFkYi1jbHVzdGVyIiwiYXBwLmt1YmVybmV0ZXMuaW8vcGFydC1vZiI6InBlcmNvbmEteHRyYWRiLWNsdXN0ZXIifX0sInRlbXBsYXRlIjp7Im1ldGFkYXRhIjp7ImxhYmVscyI6eyJhcHAua3ViZXJuZXRlcy5pby9jb21wb25lbnQiOiJweGMiLCJhcHAua3ViZXJuZXRlcy5pby9pbnN0YW5jZSI6InNvbWUtbmFtZSIsImFwcC5rdWJlcm5ldGVzLmlvL21hbmFnZWQtYnkiOiJwZXJjb25hLXh0cmFkYi1jbHVzdGVyLW9wZXJhdG9yIiwiYXBwLmt1YmVybmV0ZXMuaW8vbmFtZSI6InBlcmNvbmEteHRyYWRiLWNsdXN0ZXIiLCJhcHAua3ViZXJuZXRlcy5pby9wYXJ0LW9mIjoicGVyY29uYS14dHJhZGItY2x1c3RlciJ9LCJhbm5vdGF0aW9ucyI6eyJrdWJlY3RsLmt1YmVybmV0ZXMuaW8vZGVmYXVsdC1jb250YWluZXIiOiJweGMiLCJsYXN0LWFwcGxpZWQtc2VjcmV0IjoiOGI3OGZiNzA0ZWVjOTliY2JlZmZmYzc4YWVjNjM5NTBiNjBkZGY1ZWEyNzJlZmQ2MzhjNzFjYjdmYmQwY2M5ZSIsInBlcmNvbmEuY29tL2NvbmZpZ3VyYXRpb24taGFzaCI6ImQ0MWQ4Y2Q5OGYwMGIy"..., -  "percona.com/last-config-hash": "eyJyZXBsaWNhcyI6Mywic2VsZWN0b3IiOnsibWF0Y2hMYWJlbHMiOnsiYXBwLmt1YmVybmV0ZXMuaW8vY29tcG9uZW50IjoicHhjIiwiYXBwLmt1YmVybmV0ZXMuaW8vaW5zdGFuY2UiOiJzb21lLW5hbWUiLCJhcHAua3ViZXJuZXRlcy5pby9tYW5hZ2VkLWJ5IjoicGVyY29uYS14dHJhZGItY2x1c3Rlci1vcGVyYXRvciIsImFwcC5rdWJlcm5ldGVzLmlvL25hbWUiOiJwZXJjb25hLXh0cmFkYi1jbHVzdGVyIiwiYXBwLmt1YmVybmV0ZXMuaW8vcGFydC1vZiI6InBlcmNvbmEteHRyYWRiLWNsdXN0ZXIifX0sInRlbXBsYXRlIjp7Im1ldGFkYXRhIjp7ImxhYmVscyI6eyJhcHAua3ViZXJuZXRlcy5pby9jb21wb25lbnQiOiJweGMiLCJhcHAua3ViZXJuZXRlcy5pby9pbnN0YW5jZSI6InNvbWUtbmFtZSIsImFwcC5rdWJlcm5ldGVzLmlvL21hbmFnZWQtYnkiOiJwZXJjb25hLXh0cmFkYi1jbHVzdGVyLW9wZXJhdG9yIiwiYXBwLmt1YmVybmV0ZXMuaW8vbmFtZSI6InBlcmNvbmEteHRyYWRiLWNsdXN0ZXIiLCJhcHAua3ViZXJuZXRlcy5pby9wYXJ0LW9mIjoicGVyY29uYS14dHJhZGItY2x1c3RlciJ9LCJhbm5vdGF0aW9ucyI6eyJrdWJlY3RsLmt1YmVybmV0ZXMuaW8vZGVmYXVsdC1jb250YWluZXIiOiJweGMiLCJsYXN0LWFwcGxpZWQtc2VjcmV0IjoiOGI3OGZiNzA0ZWVjOTliY2JlZmZmYzc4YWVjNjM5NTBiNjBkZGY1ZWEyNzJlZmQ2MzhjNzFjYjdmYmQwY2M5ZSIsInBlcmNvbmEuY29tL2NvbmZpZ3VyYXRpb24taGFzaCI6ImQ0MWQ4Y2Q5OGYwMGIyMDRlOTgwMDk5OGVjZjg0MjdlIiwicGVyY29uYS5jb20vc3NsLWhhc2giOiJjZmQ2YTUyMzk4MjY4MTczYjUxZDdjZGIxMzMxYzA5YSIsInBlcmNvbmEuY29tL3NzbC1pbnRlcm5hbC1oYXNoIjoiNzUxZTU5YTFjMWQ5NGFjNjdmOWQ2YmI2ZTc1NmFjZGMifX0sInNwZWMiOnsidm9sdW1lcyI6W3sibmFtZSI6InRtcCIsImVtcHR5RGlyIjp7fX0seyJuYW1lIjoiY29uZmlnIiwiY29uZmlnTWFwIjp7Im5hbWUiOiJzb21lLW5hbWUtcHhjIiwib3B0aW9uYWwiOnRydWV9fSx7Im5hbWUiOiJzc2wtaW50ZXJuYWwiLCJzZWNyZXQiOnsic2VjcmV0TmFtZSI6InNvbWUtbmFtZS1zc2wtaW50ZXJuYWwiLCJvcHRpb25hbCI6dHJ1ZX19LHsibmFtZSI6InNzbCIsInNlY3JldCI6eyJzZWNyZXROYW1lIjoic29tZS1uYW1lLXNzbCIsIm9wdGlvbmFsIjpmYWxzZX19LHsibmFtZSI6ImF1dG8tY29uZmlnIiwiY29uZmlnTWFwIjp7Im5hbWUiOiJhdXRvLXNvbWUtbmFtZS1weGMiLCJvcHRpb25hbCI6dHJ1ZX19LHsibmFtZSI6InZhdWx0LWtleXJpbmctc2VjcmV0Iiwic2VjcmV0Ijp7InNlY3JldE5hbWUiOiJzb21lLW5hbWUtdmF1bHQiLCJvcHRpb25hbCI6dHJ1ZX19LHsibmFtZSI6Im15c3FsLXVzZXJzLXNlY3JldC1maWxlIiwic2VjcmV0Ijp7InNlY3JldE5hbWUiOiJpbnRlcm5hbC1zb21lLW5hbWUiLCJvcHRpb25hbCI6ZmFsc2V9fSx7Im5hbWUiOiJteXNxbC1pbml0LWZpbGUiLCJzZWNyZXQiOnsic2VjcmV0TmFtZSI6InNvbWUtbmFtZS1teXNxbC1pbml0Iiwib3B0aW9uYWwiOnRydWV9fV0sImluaXRDb250YWluZXJzIjpbeyJuYW1lIjoicHhjLWluaXQiLCJpbWFnZSI6InBlcmNvbmFsYWIvcGVyY29uYS14dHJhZGItY2x1c3Rlci1vcGVyYXRvcjpQUi0yMjAzLTA3YjQzNTZmIiwiY29tbWFuZCI6WyIvcHhjLWluaXQtZW50cnlwb2ludC5zaCJdLCJyZXNvdXJjZXMiOnsibGltaXRzIjp7ImNwdSI6IjUwbSIsIm1lbW9yeSI6IjUwTSJ9fSwidm9sdW1lTW91bnRzIjpbeyJuYW1lIjoiZGF0YWRpciIsIm1vdW50UGF0aCI6Ii92YXIvbGliL215c3FsIn1dLCJpbWFnZVB1bGxQb2xpY3kiOiJBbHdheXMifV0sImNvbnRhaW5lcnMiOlt7Im5hbWUiOiJsb2dzIiwiaW1hZ2UiOiJwZXJjb25hbGFiL3BlcmNvbmEteHRyYWRiLWNsdXN0ZXItb3BlcmF0b3I6bWFpbi1sb2djb2xsZWN0b3IiLCJlbnZGcm9tIjpbeyJzZWNyZXRSZWYiOnsibmFtZSI6InNvbWUtbmFtZS1sb2ctY29sbGVjdG9yIiwib3B0aW9uYWwiOnRydWV9fV0sImVudiI6W3sibmFtZSI6IkxPR19EQVRBX0RJUiIsInZhbHVlIjoiL3Zhci9saWIvbXlzcWwifSx7Im5hbWUiOiJQT0RfTkFNRVNQQVNFIiwidmFsdWVGcm9tIjp7ImZpZWxkUmVmIjp7ImZpZWxkUGF0aCI6Im1ldGFkYXRhLm5hbWVzcGFjZSJ9fX0seyJuYW1lIjoiUE9EX05BTUUiLCJ2YWx1ZUZyb20iOnsiZmllbGRSZWYiOnsiZmllbGRQYXRoIjoibWV0YWRhdGEubmFtZSJ9fX1dLCJyZXNvdXJjZXMiOnt9LCJ2b2x1bWVNb3VudHMiOlt7Im5hbWUiOiJkYXRhZGlyIiwibW91bnRQYXRoIjoiL3Zhci9saWIvbXlzcWwifV0sImltYWdlUHVsbFBvbGljeSI6IkFsd2F5cyJ9LHsibmFtZSI6ImxvZ3JvdGF0ZSIsImltYWdlIjoicGVyY29uYWxhYi9wZXJjb25hLXh0cmFkYi1jbHVzdGVyLW9wZXJhdG9yOm1haW4tbG9nY29sbGVjdG9yIiwiYXJncyI6WyJsb2dyb3RhdGUiXSwiZW52IjpbeyJuYW1lIjoiU0VSVklDRV9UWVBFIiwidmFsdWUiOiJteXNxbCJ9LHsibmFtZSI6Ik1PTklUT1JfUEFTU1dPUkQiLCJ2YWx1ZUZyb20iOnsic2VjcmV0S2V5UmVmIjp7Im5hbWUiOiJpbnRlcm5hbC1zb21lLW5hbWUiLCJrZXkiOiJtb25pdG9yIn19fV0sInJlc291cmNlcyI6e30sInZvbHVtZU1vdW50cyI6W3sibmFtZSI6ImRhdGFkaXIiLCJtb3VudFBhdGgiOiIvdmFyL2xpYi9teXNxbCJ9XSwiaW1hZ2VQdWxsUG9saWN5IjoiQWx3YXlzIn0seyJuYW1lIjoicHhjIiwiaW1hZ2UiOiJwZXJjb25hbGFiL3BlcmNvbmEteHRyYWRiLWNsdXN0ZXItb3BlcmF0b3I6bWFpbi1weGM1LjciLCJjb21tYW5kIjpbIi92YXIvbGliL215c3FsL3B4Yy1lbnRyeXBvaW50LnNoIl0sImFyZ3MiOlsibXlzcWxkIl0sInBvcnRzIjpbeyJuYW1lIjoibXlzcWwiLCJjb250YWluZXJQb3J0IjozMzA2fSx7Im5hbWUiOiJzc3QiLCJjb250YWluZXJQb3J0Ijo0NDQ0fSx7Im5hbWUiOiJ3cml0ZS1zZXQiLCJjb250YWluZXJQb3J0Ijo0NTY3fSx7Im5hbWUiOiJpc3QiLCJjb250YWluZXJQb3J0Ijo0NTY4fSx7Im5hbWUiOiJteXNxbC1hZG1pbiIsImNvbnRhaW5lclBvcnQiOjMzMDYyfSx7Im5h"..., +  "percona.com/last-config-hash": "eyJyZXBsaWNhcyI6Mywic2VsZWN0b3IiOnsibWF0Y2hMYWJlbHMiOnsiYXBwLmt1YmVybmV0ZXMuaW8vY29tcG9uZW50IjoicHhjIiwiYXBwLmt1YmVybmV0ZXMuaW8vaW5zdGFuY2UiOiJzb21lLW5hbWUiLCJhcHAua3ViZXJuZXRlcy5pby9tYW5hZ2VkLWJ5IjoicGVyY29uYS14dHJhZGItY2x1c3Rlci1vcGVyYXRvciIsImFwcC5rdWJlcm5ldGVzLmlvL25hbWUiOiJwZXJjb25hLXh0cmFkYi1jbHVzdGVyIiwiYXBwLmt1YmVybmV0ZXMuaW8vcGFydC1vZiI6InBlcmNvbmEteHRyYWRiLWNsdXN0ZXIifX0sInRlbXBsYXRlIjp7Im1ldGFkYXRhIjp7ImxhYmVscyI6eyJhcHAua3ViZXJuZXRlcy5pby9jb21wb25lbnQiOiJweGMiLCJhcHAua3ViZXJuZXRlcy5pby9pbnN0YW5jZSI6InNvbWUtbmFtZSIsImFwcC5rdWJlcm5ldGVzLmlvL21hbmFnZWQtYnkiOiJwZXJjb25hLXh0cmFkYi1jbHVzdGVyLW9wZXJhdG9yIiwiYXBwLmt1YmVybmV0ZXMuaW8vbmFtZSI6InBlcmNvbmEteHRyYWRiLWNsdXN0ZXIiLCJhcHAua3ViZXJuZXRlcy5pby9wYXJ0LW9mIjoicGVyY29uYS14dHJhZGItY2x1c3RlciJ9LCJhbm5vdGF0aW9ucyI6eyJrdWJlY3RsLmt1YmVybmV0ZXMuaW8vZGVmYXVsdC1jb250YWluZXIiOiJweGMiLCJsYXN0LWFwcGxpZWQtc2VjcmV0IjoiOGI3OGZiNzA0ZWVjOTliY2JlZmZmYzc4YWVjNjM5NTBiNjBkZGY1ZWEyNzJlZmQ2MzhjNzFjYjdmYmQwY2M5ZSIsInBlcmNvbmEuY29tL2NvbmZpZ3VyYXRpb24taGFzaCI6ImQ0MWQ4Y2Q5OGYwMGIyMDRlOTgwMDk5OGVjZjg0MjdlIiwicGVyY29uYS5jb20vc3NsLWhhc2giOiJjZmQ2YTUyMzk4MjY4MTczYjUxZDdjZGIxMzMxYzA5YSIsInBlcmNvbmEuY29tL3NzbC1pbnRlcm5hbC1oYXNoIjoiNzUxZTU5YTFjMWQ5NGFjNjdmOWQ2YmI2ZTc1NmFjZGMifX0sInNwZWMiOnsidm9sdW1lcyI6W3sibmFtZSI6InRtcCIsImVtcHR5RGlyIjp7fX0seyJuYW1lIjoiY29uZmlnIiwiY29uZmlnTWFwIjp7Im5hbWUiOiJzb21lLW5hbWUtcHhjIiwib3B0aW9uYWwiOnRydWV9fSx7Im5hbWUiOiJzc2wtaW50ZXJuYWwiLCJzZWNyZXQiOnsic2VjcmV0TmFtZSI6InNvbWUtbmFtZS1zc2wtaW50ZXJuYWwiLCJvcHRpb25hbCI6dHJ1ZX19LHsibmFtZSI6InNzbCIsInNlY3JldCI6eyJzZWNyZXROYW1lIjoic29tZS1uYW1lLXNzbCIsIm9wdGlvbmFsIjpmYWxzZX19LHsibmFtZSI6ImF1dG8tY29uZmlnIiwiY29uZmlnTWFwIjp7Im5hbWUiOiJhdXRvLXNvbWUtbmFtZS1weGMiLCJvcHRpb25hbCI6dHJ1ZX19LHsibmFtZSI6InZhdWx0LWtleXJpbmctc2VjcmV0Iiwic2VjcmV0Ijp7InNlY3JldE5hbWUiOiJzb21lLW5hbWUtdmF1bHQiLCJvcHRpb25hbCI6dHJ1ZX19LHsibmFtZSI6Im15c3FsLXVzZXJzLXNlY3JldC1maWxlIiwic2VjcmV0Ijp7InNlY3JldE5hbWUiOiJpbnRlcm5hbC1zb21lLW5hbWUiLCJvcHRpb25hbCI6ZmFsc2V9fSx7Im5hbWUiOiJteXNxbC1pbml0LWZpbGUiLCJzZWNyZXQiOnsic2VjcmV0TmFtZSI6InNvbWUtbmFtZS1teXNxbC1pbml0Iiwib3B0aW9uYWwiOnRydWV9fV0sImluaXRDb250YWluZXJzIjpbeyJuYW1lIjoicHhjLWluaXQiLCJpbWFnZSI6InBlcmNvbmFsYWIvcGVyY29uYS14dHJhZGItY2x1c3Rlci1vcGVyYXRvcjpQUi0yMjAzLTA3YjQzNTZmIiwiY29tbWFuZCI6WyIvcHhjLWluaXQtZW50cnlwb2ludC5zaCJdLCJyZXNvdXJjZXMiOnsibGltaXRzIjp7ImNwdSI6IjUwbSIsIm1lbW9yeSI6IjUwTSJ9fSwidm9sdW1lTW91bnRzIjpbeyJuYW1lIjoiZGF0YWRpciIsIm1vdW50UGF0aCI6Ii92YXIvbGliL215c3FsIn1dLCJpbWFnZVB1bGxQb2xpY3kiOiJBbHdheXMifV0sImNvbnRhaW5lcnMiOlt7Im5hbWUiOiJweGMiLCJpbWFnZSI6InBlcmNvbmFsYWIvcGVyY29uYS14dHJhZGItY2x1c3Rlci1vcGVyYXRvcjptYWluLXB4YzUuNyIsImNvbW1hbmQiOlsiL3Zhci9saWIvbXlzcWwvcHhjLWVudHJ5cG9pbnQuc2giXSwiYXJncyI6WyJteXNxbGQiXSwicG9ydHMiOlt7Im5hbWUiOiJteXNxbCIsImNvbnRhaW5lclBvcnQiOjMzMDZ9LHsibmFtZSI6InNzdCIsImNvbnRhaW5lclBvcnQiOjQ0NDR9LHsibmFtZSI6IndyaXRlLXNldCIsImNvbnRhaW5lclBvcnQiOjQ1Njd9LHsibmFtZSI6ImlzdCIsImNvbnRhaW5lclBvcnQiOjQ1Njh9LHsibmFtZSI6Im15c3FsLWFkbWluIiwiY29udGFpbmVyUG9ydCI6MzMwNjJ9LHsibmFtZSI6Im15c3FseCIsImNvbnRhaW5lclBvcnQiOjMzMDYwfV0sImVudkZyb20iOlt7InNlY3JldFJlZiI6eyJuYW1lIjoic29tZS1uYW1lLWVudi12YXJzLXB4YyIsIm9wdGlvbmFsIjp0cnVlfX1dLCJlbnYiOlt7Im5hbWUiOiJQWENfU0VSVklDRSIsInZhbHVlIjoic29tZS1uYW1lLXB4Yy11bnJlYWR5In0seyJuYW1lIjoiTU9OSVRPUl9IT1NUIiwidmFsdWUiOiIlIn0seyJuYW1lIjoiTVlTUUxfUk9PVF9QQVNTV09SRCIsInZhbHVlRnJvbSI6eyJzZWNyZXRLZXlSZWYiOnsibmFtZSI6ImludGVybmFsLXNvbWUtbmFtZSIsImtleSI6InJvb3QifX19LHsibmFtZSI6IlhUUkFCQUNLVVBfUEFTU1dPUkQiLCJ2YWx1ZUZyb20iOnsic2VjcmV0S2V5UmVmIjp7Im5hbWUiOiJpbnRlcm5hbC1zb21lLW5hbWUiLCJrZXkiOiJ4dHJhYmFja3VwIn19fSx7Im5hbWUiOiJNT05JVE9SX1BBU1NXT1JEIiwidmFsdWVGcm9tIjp7InNlY3JldEtleVJlZiI6eyJuYW1lIjoiaW50ZXJuYWwtc29tZS1uYW1lIiwia2V5IjoibW9uaXRvciJ9fX0seyJuYW1lIjoiQ0xVU1RFUl9IQVNIIiwidmFsdWUiOiIzNzA5MDA0In0seyJuYW1lIjoiT1BFUkFUT1JfQURNSU5fUEFTU1dPUkQiLCJ2YWx1ZUZyb20iOnsic2VjcmV0S2V5UmVmIjp7Im5hbWUiOiJpbnRlcm5hbC1zb21lLW5hbWUiLCJrZXkiOiJvcGVyYXRvciJ9fX0seyJuYW1lIjoiTElWRU5FU1NfQ0hFQ0tfVElNRU9VVCIsInZhbHVlIjoiNSJ9LHsibmFtZSI6IlJFQURJTkVTU19DSEVDS19USU1FT1VUIiwidmFsdWUiOiIxNSJ9LHsibmFtZSI6IkRFRkFVTFRfQVVUSEVOVElDQVRJT05fUExVR0lOIiwidmFsdWUiOiJjYWNoaW5nX3NoYTJfcGFzc3dvcmQifSx7Im5hbWUiOiJNWVNR"..., +  "percona.com/last-config-hash": "eyJyZXBsaWNhcyI6Mywic2VsZWN0b3IiOnsibWF0Y2hMYWJlbHMiOnsiYXBwLmt1YmVybmV0ZXMuaW8vY29tcG9uZW50IjoicHhjIiwiYXBwLmt1YmVybmV0ZXMuaW8vaW5zdGFuY2UiOiJzb21lLW5hbWUiLCJhcHAua3ViZXJuZXRlcy5pby9tYW5hZ2VkLWJ5IjoicGVyY29uYS14dHJhZGItY2x1c3Rlci1vcGVyYXRvciIsImFwcC5rdWJlcm5ldGVzLmlvL25hbWUiOiJwZXJjb25hLXh0cmFkYi1jbHVzdGVyIiwiYXBwLmt1YmVybmV0ZXMuaW8vcGFydC1vZiI6InBlcmNvbmEteHRyYWRiLWNsdXN0ZXIifX0sInRlbXBsYXRlIjp7Im1ldGFkYXRhIjp7ImxhYmVscyI6eyJhcHAua3ViZXJuZXRlcy5pby9jb21wb25lbnQiOiJweGMiLCJhcHAua3ViZXJuZXRlcy5pby9pbnN0YW5jZSI6InNvbWUtbmFtZSIsImFwcC5rdWJlcm5ldGVzLmlvL21hbmFnZWQtYnkiOiJwZXJjb25hLXh0cmFkYi1jbHVzdGVyLW9wZXJhdG9yIiwiYXBwLmt1YmVybmV0ZXMuaW8vbmFtZSI6InBlcmNvbmEteHRyYWRiLWNsdXN0ZXIiLCJhcHAua3ViZXJuZXRlcy5pby9wYXJ0LW9mIjoicGVyY29uYS14dHJhZGItY2x1c3RlciJ9LCJhbm5vdGF0aW9ucyI6eyJrdWJlY3RsLmt1YmVybmV0ZXMuaW8vZGVmYXVsdC1jb250YWluZXIiOiJweGMiLCJsYXN0LWFwcGxpZWQtc2VjcmV0IjoiYzkzYzY0YzI3NmQ0MzIzYzJhYjc5MmNhMzdjOGNjNDQzNTAzMmE0ZjAwMDMzMTNlYzliODQxOTQzNmE0YWMwZSIsInBlcmNvbmEuY29tL2NvbmZpZ3VyYXRpb24taGFzaCI6ImQ0MWQ4Y2Q5OGYwMGIy"..., -  "percona.com/last-config-hash": "eyJyZXBsaWNhcyI6Mywic2VsZWN0b3IiOnsibWF0Y2hMYWJlbHMiOnsiYXBwLmt1YmVybmV0ZXMuaW8vY29tcG9uZW50IjoicHhjIiwiYXBwLmt1YmVybmV0ZXMuaW8vaW5zdGFuY2UiOiJzb21lLW5hbWUiLCJhcHAua3ViZXJuZXRlcy5pby9tYW5hZ2VkLWJ5IjoicGVyY29uYS14dHJhZGItY2x1c3Rlci1vcGVyYXRvciIsImFwcC5rdWJlcm5ldGVzLmlvL25hbWUiOiJwZXJjb25hLXh0cmFkYi1jbHVzdGVyIiwiYXBwLmt1YmVybmV0ZXMuaW8vcGFydC1vZiI6InBlcmNvbmEteHRyYWRiLWNsdXN0ZXIifX0sInRlbXBsYXRlIjp7Im1ldGFkYXRhIjp7ImxhYmVscyI6eyJhcHAua3ViZXJuZXRlcy5pby9jb21wb25lbnQiOiJweGMiLCJhcHAua3ViZXJuZXRlcy5pby9pbnN0YW5jZSI6InNvbWUtbmFtZSIsImFwcC5rdWJlcm5ldGVzLmlvL21hbmFnZWQtYnkiOiJwZXJjb25hLXh0cmFkYi1jbHVzdGVyLW9wZXJhdG9yIiwiYXBwLmt1YmVybmV0ZXMuaW8vbmFtZSI6InBlcmNvbmEteHRyYWRiLWNsdXN0ZXIiLCJhcHAua3ViZXJuZXRlcy5pby9wYXJ0LW9mIjoicGVyY29uYS14dHJhZGItY2x1c3RlciJ9LCJhbm5vdGF0aW9ucyI6eyJrdWJlY3RsLmt1YmVybmV0ZXMuaW8vZGVmYXVsdC1jb250YWluZXIiOiJweGMiLCJsYXN0LWFwcGxpZWQtc2VjcmV0IjoiYzkzYzY0YzI3NmQ0MzIzYzJhYjc5MmNhMzdjOGNjNDQzNTAzMmE0ZjAwMDMzMTNlYzliODQxOTQzNmE0YWMwZSIsInBlcmNvbmEuY29tL2NvbmZpZ3VyYXRpb24taGFzaCI6ImQ0MWQ4Y2Q5OGYwMGIy"..., -  "percona.com/last-config-hash": "eyJyZXBsaWNhcyI6Mywic2VsZWN0b3IiOnsibWF0Y2hMYWJlbHMiOnsiYXBwLmt1YmVybmV0ZXMuaW8vY29tcG9uZW50IjoicHhjIiwiYXBwLmt1YmVybmV0ZXMuaW8vaW5zdGFuY2UiOiJzb21lLW5hbWUiLCJhcHAua3ViZXJuZXRlcy5pby9tYW5hZ2VkLWJ5IjoicGVyY29uYS14dHJhZGItY2x1c3Rlci1vcGVyYXRvciIsImFwcC5rdWJlcm5ldGVzLmlvL25hbWUiOiJwZXJjb25hLXh0cmFkYi1jbHVzdGVyIiwiYXBwLmt1YmVybmV0ZXMuaW8vcGFydC1vZiI6InBlcmNvbmEteHRyYWRiLWNsdXN0ZXIifX0sInRlbXBsYXRlIjp7Im1ldGFkYXRhIjp7ImxhYmVscyI6eyJhcHAua3ViZXJuZXRlcy5pby9jb21wb25lbnQiOiJweGMiLCJhcHAua3ViZXJuZXRlcy5pby9pbnN0YW5jZSI6InNvbWUtbmFtZSIsImFwcC5rdWJlcm5ldGVzLmlvL21hbmFnZWQtYnkiOiJwZXJjb25hLXh0cmFkYi1jbHVzdGVyLW9wZXJhdG9yIiwiYXBwLmt1YmVybmV0ZXMuaW8vbmFtZSI6InBlcmNvbmEteHRyYWRiLWNsdXN0ZXIiLCJhcHAua3ViZXJuZXRlcy5pby9wYXJ0LW9mIjoicGVyY29uYS14dHJhZGItY2x1c3RlciJ9LCJhbm5vdGF0aW9ucyI6eyJrdWJlY3RsLmt1YmVybmV0ZXMuaW8vZGVmYXVsdC1jb250YWluZXIiOiJweGMiLCJwZXJjb25hLmNvbS9jb25maWd1cmF0aW9uLWhhc2giOiJkNDFkOGNkOThmMDBiMjA0ZTk4MDA5OThlY2Y4NDI3ZSIsInBlcmNvbmEuY29tL3NzbC1oYXNoIjoiY2ZkNmE1MjM5ODI2ODE3M2I1MWQ3Y2RiMTMzMWMwOWEiLCJwZXJjb25hLmNv"..., +  "percona.com/last-config-hash": "eyJyZXBsaWNhcyI6Mywic2VsZWN0b3IiOnsibWF0Y2hMYWJlbHMiOnsiYXBwLmt1YmVybmV0ZXMuaW8vY29tcG9uZW50IjoicHJveHlzcWwiLCJhcHAua3ViZXJuZXRlcy5pby9pbnN0YW5jZSI6InNvbWUtbmFtZSIsImFwcC5rdWJlcm5ldGVzLmlvL21hbmFnZWQtYnkiOiJwZXJjb25hLXh0cmFkYi1jbHVzdGVyLW9wZXJhdG9yIiwiYXBw"..., -  "percona.com/last-config-hash": "eyJyZXBsaWNhcyI6Mywic2VsZWN0b3IiOnsibWF0Y2hMYWJlbHMiOnsiYXBwLmt1YmVybmV0ZXMuaW8vY29tcG9uZW50IjoicHJveHlzcWwiLCJhcHAua3ViZXJuZXRlcy5pby9pbnN0YW5jZSI6InNvbWUtbmFtZSIsImFwcC5rdWJlcm5ldGVzLmlvL21hbmFnZWQtYnkiOiJwZXJjb25hLXh0cmFkYi1jbHVzdGVyLW9wZXJhdG9yIiwiYXBw"..., +  "percona.com/last-config-hash": "eyJyZXBsaWNhcyI6Mywic2VsZWN0b3IiOnsibWF0Y2hMYWJlbHMiOnsiYXBwLmt1YmVybmV0ZXMuaW8vY29tcG9uZW50IjoicHJveHlzcWwiLCJhcHAua3ViZXJuZXRlcy5pby9pbnN0YW5jZSI6InNvbWUtbmFtZSIsImFwcC5rdWJlcm5ldGVzLmlvL21hbmFnZWQtYnkiOiJwZXJjb25hLXh0cmFkYi1jbHVzdGVyLW9wZXJhdG9yIiwiYXBwLmt1YmVybmV0ZXMuaW8vbmFtZSI6InBlcmNvbmEteHRyYWRiLWNsdXN0ZXIiLCJhcHAua3ViZXJuZXRlcy5pby9wYXJ0LW9mIjoicGVyY29uYS14dHJhZGItY2x1c3RlciJ9fSwidGVtcGxhdGUiOnsibWV0YWRhdGEiOnsibGFiZWxzIjp7ImFwcC5rdWJlcm5ldGVzLmlvL2NvbXBvbmVudCI6InByb3h5c3FsIiwiYXBwLmt1YmVybmV0ZXMuaW8vaW5zdGFuY2UiOiJzb21lLW5hbWUiLCJhcHAua3ViZXJuZXRlcy5pby9tYW5hZ2VkLWJ5IjoicGVyY29uYS14dHJhZGItY2x1c3Rlci1vcGVyYXRvciIsImFwcC5rdWJlcm5ldGVzLmlvL25hbWUiOiJwZXJjb25hLXh0cmFkYi1jbHVzdGVyIiwiYXBwLmt1YmVybmV0ZXMuaW8vcGFydC1vZiI6InBlcmNvbmEteHRyYWRiLWNsdXN0ZXIifSwiYW5ub3RhdGlvbnMiOnsia3ViZWN0bC5rdWJlcm5ldGVzLmlvL2RlZmF1bHQtY29udGFpbmVyIjoicHJveHlzcWwiLCJsYXN0LWFwcGxpZWQtc2VjcmV0IjoiZGQ3OWNkMDhkNTU0ODFlNjZiM2RmOWYyYjEwNjBmYzUwNjI3MjJlOWVlZjZmODNmNDBjNDc3MmY4YzlhOTQ1YSIsInBlcmNvbmEuY29tL2NvbmZpZ3VyYXRpb24taGFzaCI6"..., -  "percona.com/last-config-hash": "eyJyZXBsaWNhcyI6Mywic2VsZWN0b3IiOnsibWF0Y2hMYWJlbHMiOnsiYXBwLmt1YmVybmV0ZXMuaW8vY29tcG9uZW50IjoicHJveHlzcWwiLCJhcHAua3ViZXJuZXRlcy5pby9pbnN0YW5jZSI6InNvbWUtbmFtZSIsImFwcC5rdWJlcm5ldGVzLmlvL21hbmFnZWQtYnkiOiJwZXJjb25hLXh0cmFkYi1jbHVzdGVyLW9wZXJhdG9yIiwiYXBwLmt1YmVybmV0ZXMuaW8vbmFtZSI6InBlcmNvbmEteHRyYWRiLWNsdXN0ZXIiLCJhcHAua3ViZXJuZXRlcy5pby9wYXJ0LW9mIjoicGVyY29uYS14dHJhZGItY2x1c3RlciJ9fSwidGVtcGxhdGUiOnsibWV0YWRhdGEiOnsibGFiZWxzIjp7ImFwcC5rdWJlcm5ldGVzLmlvL2NvbXBvbmVudCI6InByb3h5c3FsIiwiYXBwLmt1YmVybmV0ZXMuaW8vaW5zdGFuY2UiOiJzb21lLW5hbWUiLCJhcHAua3ViZXJuZXRlcy5pby9tYW5hZ2VkLWJ5IjoicGVyY29uYS14dHJhZGItY2x1c3Rlci1vcGVyYXRvciIsImFwcC5rdWJlcm5ldGVzLmlvL25hbWUiOiJwZXJjb25hLXh0cmFkYi1jbHVzdGVyIiwiYXBwLmt1YmVybmV0ZXMuaW8vcGFydC1vZiI6InBlcmNvbmEteHRyYWRiLWNsdXN0ZXIifSwiYW5ub3RhdGlvbnMiOnsia3ViZWN0bC5rdWJlcm5ldGVzLmlvL2RlZmF1bHQtY29udGFpbmVyIjoicHJveHlzcWwiLCJwZXJjb25hLmNvbS9jb25maWd1cmF0aW9uLWhhc2giOiJkNDFkOGNkOThmMDBiMjA0ZTk4MDA5OThlY2Y4NDI3ZSIsInBlcmNvbmEuY29tL3NzbC1oYXNoIjoiY2ZkNmE1MjM5ODI2ODE3M2I1MWQ3Y2RiMTMzMWMw"...,    "percona.com/ssl-hash": "cfd6a52398268173b51d7cdb1331c09a",    "percona.com/ssl-hash": "cfd6a52398268173b51d7cdb1331c09a",    "percona.com/ssl-internal-hash": "751e59a1c1d94ac67f9d6bb6e756acdc",    "percona.com/ssl-internal-hash": "751e59a1c1d94ac67f9d6bb6e756acdc", +  PeriodSeconds: 0, -  PeriodSeconds: 10, +  PersistentVolumeClaimRetentionPolicy: nil, -  PersistentVolumeClaimRetentionPolicy: s"&StatefulSetPersistentVolumeClaimRetentionPolicy{WhenDeleted:Retain,WhenScaled:Retain,}", +  Phase: "", -  Phase: "Pending", +  PodManagementPolicy: "", -  PodManagementPolicy: "OrderedReady",    Ports: nil,    Ports: []v1.ContainerPort{    PreemptionPolicy: nil,    ProbeHandler: {Exec: &{Command: {"/var/lib/mysql/liveness-check.sh"}}}, +  Protocol: "", -  Protocol: "TCP",    Quobyte: nil,    ReadinessProbe: &{ProbeHandler: {Exec: &{Command: {"/var/lib/mysql/readiness-check.sh"}}}, InitialDelaySeconds: 15, TimeoutSeconds: 15, PeriodSeconds: 30, ...}, +  ReadyReplicas: 0, -  ReadyReplicas: 2, -  ReadyReplicas: 3, +  Replicas: 0, +  Replicas: &2, -  Replicas: 2, -  Replicas: &2,    Replicas: &2, +  Replicas: &3, -  Replicas: 3, -  Replicas: &3,    Replicas: &3,    ResizePolicy: nil,    ResourceFieldRef: nil,    Resources: {},    Resources: {Limits: {s"cpu": {i: {...}, s: "1", Format: "DecimalSI"}, s"memory": {i: {...}, s: "2G", Format: "DecimalSI"}}, Requests: {s"cpu": {i: {...}, s: "100m", Format: "DecimalSI"}, s"memory": {i: {...}, Format: "DecimalSI"}}}, +  ResourceVersion: "", -  ResourceVersion: "1759375043668319008", -  ResourceVersion: "1759375230069967016", -  ResourceVersion: "1759375370518495008", -  ResourceVersion: "1759375409467423008", -  ResourceVersion: "1759375434683999008", -  ResourceVersion: "1759375582322575016", -  ResourceVersion: "1759375648436927008", -  ResourceVersion: "1759375687530559008", -  ResourceVersion: "1759375737598495008", -  ResourceVersion: "1759375861830559016", -  ResourceVersion: "1759375899901999008", -  ResourceVersion: "1759376149286943016", -  ResourceVersion: "1759376159023295016", -  ResourceVersion: "1759376159071775024", -  ResourceVersion: "1759376258728831024", +  RestartPolicy: "", -  RestartPolicy: "Always", -  RevisionHistoryLimit: &10, +  RevisionHistoryLimit: nil, +  SchedulerName: "", +  SchedulerName: "", -  SchedulerName: "default-scheduler", -  SchedulerName: "default-scheduler",    SecretName: "internal-some-name",    SecretName: "some-name-env-vars-haproxy",    SecretName: "some-name-mysql-init",    SecretName: "some-name-ssl",    SecretName: "some-name-ssl-internal",    SecretName: "some-name-vault",    Secret: &v1.SecretVolumeSource{ +  SecurityContext: nil,    SecurityContext: nil, -  SecurityContext: s"&PodSecurityContext{SELinuxOptions:nil,RunAsUser:nil,RunAsNonRoot:nil,SupplementalGroups:[],FSGroup:nil,RunAsGroup:nil,Sysctls:[]Sysctl{},WindowsOptions:nil,FSGroupChangePolicy:nil,SeccompProfile:nil,AppArmorProfile:nil,SupplementalGroupsPolicy:nil,SELinux"...,    Selector: &{MatchLabels: {"app.kubernetes.io/component": "haproxy", "app.kubernetes.io/instance": "some-name", "app.kubernetes.io/managed-by": "percona-xtradb-cluster-operator", "app.kubernetes.io/name": "percona-xtradb-cluster", ...}},    Selector: &{MatchLabels: {"app.kubernetes.io/component": "proxysql", "app.kubernetes.io/instance": "some-name", "app.kubernetes.io/managed-by": "percona-xtradb-cluster-operator", "app.kubernetes.io/name": "percona-xtradb-cluster", ...}},    Selector: &{MatchLabels: {"app.kubernetes.io/component": "pxc", "app.kubernetes.io/instance": "some-name", "app.kubernetes.io/managed-by": "percona-xtradb-cluster-operator", "app.kubernetes.io/name": "percona-xtradb-cluster", ...}},    SelfLink: "",    ServiceAccountName: "default",    ServiceName: "some-name-haproxy",    ServiceName: "some-name-proxysql-unready",    ServiceName: "some-name-pxc",    SetHostnameAsFQDN: nil,    ShareProcessNamespace: nil, sigs.k8s.io/controller-runtime/pkg/internal/controller.(*Controller[...]).processNextWorkItem sigs.k8s.io/controller-runtime/pkg/internal/controller.(*Controller[...]).reconcileHandler sigs.k8s.io/controller-runtime/pkg/internal/controller.(*Controller[...]).Start.func1.1    Spec: v1.PersistentVolumeClaimSpec{    Spec: v1.PodSpec{    Spec: v1.StatefulSetSpec{    StartupProbe: nil,    Status: {},    Status: v1.PersistentVolumeClaimStatus{    Status: v1.StatefulSetStatus{    StorageClassName: nil,    Subdomain: "",    Subdomain: "", -  Subresource: "status",    SuccessThreshold: 1,    Template: v1.PodTemplateSpec{    TerminationGracePeriodSeconds: &30,    TerminationGracePeriodSeconds: &600,    TerminationGracePeriodSeconds: nil, +  TerminationMessagePath: "", -  TerminationMessagePath: "/dev/termination-log", +  TerminationMessagePolicy: "", -  TerminationMessagePolicy: "File",    TimeoutSeconds: 5, -  Time: s"2025-10-02 03:16:47 +0000 UTC", -  Time: s"2025-10-02 03:17:23 +0000 UTC", -  Time: s"2025-10-02 03:20:30 +0000 UTC", -  Time: s"2025-10-02 03:22:32 +0000 UTC", -  Time: s"2025-10-02 03:22:50 +0000 UTC", -  Time: s"2025-10-02 03:22:59 +0000 UTC", -  Time: s"2025-10-02 03:23:29 +0000 UTC", -  Time: s"2025-10-02 03:23:53 +0000 UTC", -  Time: s"2025-10-02 03:23:54 +0000 UTC", -  Time: s"2025-10-02 03:24:00 +0000 UTC", -  Time: s"2025-10-02 03:26:22 +0000 UTC", -  Time: s"2025-10-02 03:26:32 +0000 UTC", -  Time: s"2025-10-02 03:27:28 +0000 UTC", -  Time: s"2025-10-02 03:27:47 +0000 UTC", -  Time: s"2025-10-02 03:28:07 +0000 UTC", -  Time: s"2025-10-02 03:28:40 +0000 UTC", -  Time: s"2025-10-02 03:28:57 +0000 UTC", -  Time: s"2025-10-02 03:31:01 +0000 UTC", -  Time: s"2025-10-02 03:31:15 +0000 UTC", -  Time: s"2025-10-02 03:31:39 +0000 UTC", -  Time: s"2025-10-02 03:33:22 +0000 UTC", -  Time: s"2025-10-02 03:35:49 +0000 UTC", -  Time: s"2025-10-02 03:35:58 +0000 UTC", -  Time: s"2025-10-02 03:35:59 +0000 UTC", -  Time: s"2025-10-02 03:37:38 +0000 UTC",    Tolerations: {{Key: "node.alpha.kubernetes.io/unreachable", Operator: "Exists", Effect: "NoExecute", TolerationSeconds: &6000}},    Tolerations: nil, -  TopologySpreadConstraints: nil, +  TopologySpreadConstraints: []v1.TopologySpreadConstraint{},    TypeMeta: {},    TypeMeta: {Kind: "StatefulSet", APIVersion: "apps/v1"}, +  UID: "", -  UID: "4a5d852e-7425-4456-a03f-d7da0f896e56", -  UID: "5fbd8b0b-d0c2-4d4f-810e-758eec1c9140", -  UID: "e7dc4da9-44f8-4b64-b449-9ba4a49778ef", +  UpdatedReplicas: 0, -  UpdatedReplicas: 2, -  UpdatedReplicas: 3, +  UpdateRevision: "", -  UpdateRevision: "some-name-haproxy-5dfb9d8c7", -  UpdateRevision: "some-name-proxysql-54877777b5", -  UpdateRevision: "some-name-proxysql-57899f475c", -  UpdateRevision: "some-name-proxysql-584c4c6b74", -  UpdateRevision: "some-name-proxysql-7747cd479f", -  UpdateRevision: "some-name-proxysql-7765964b77", -  UpdateRevision: "some-name-proxysql-867976c455", -  UpdateRevision: "some-name-pxc-5f8b99759c", -  UpdateRevision: "some-name-pxc-67c98bff64", -  UpdateRevision: "some-name-pxc-6bd7b6855d", -  UpdateRevision: "some-name-pxc-854f47c984",    UpdateStrategy: {Type: "RollingUpdate", RollingUpdate: &{Partition: &0}},   &v1.StatefulSet{    Value: "", +  Value: "caching_sha2_password",    ValueFrom: nil,    ValueFrom: &v1.EnvVarSource{ -  Value: "mysql_native_password",    VolumeAttributesClassName: nil,    VolumeClaimTemplates: nil,    VolumeClaimTemplates: []v1.PersistentVolumeClaim{    VolumeDevices: nil, -  VolumeMode: &"Filesystem", +  VolumeMode: nil,    VolumeMounts: {{Name: "datadir", MountPath: "/var/lib/mysql"}, {Name: "config", MountPath: "/etc/percona-xtradb-cluster.conf.d"}, {Name: "tmp", MountPath: "/tmp"}, {Name: "ssl", MountPath: "/etc/mysql/ssl"}, ...}, -  VolumeMounts: []v1.VolumeMount{{Name: "datadir", MountPath: "/var/lib/mysql"}},    VolumeName: "",    VolumeSource: v1.VolumeSource{    Volumes: []v1.Volume{    VsphereVolume: nil,    WorkingDir: "", + kubectl get pxc --all-namespaces -o wide + grep -v NAMESPACE + xargs -L 1 sh -xc 'kubectl patch pxc -n $0 $1 --type=merge -p "{\"metadata\":{\"finalizers\":[]}}"' + kubectl patch pxc -n users-28439 some-name --type=merge -p '{"metadata":{"finalizers":[]}}' perconaxtradbcluster.pxc.percona.com/some-name patched + kubectl_bin delete pxc --all --all-namespaces ++ mktemp + local LAST_OUT=/tmp/tmp.RmC5xeQ9h8 ++ mktemp + local LAST_ERR=/tmp/tmp.4XklInwwxA + local exit_status=0 ++ seq 0 2 + for i in $(seq 0 2) + set +e + kubectl delete pxc --all --all-namespaces + exit_status=0 + set -e + '[' 0 '!=' 0 ']' + break + cat /tmp/tmp.RmC5xeQ9h8 perconaxtradbcluster.pxc.percona.com "some-name" deleted from users-28439 namespace + cat /tmp/tmp.4XklInwwxA + rm /tmp/tmp.RmC5xeQ9h8 /tmp/tmp.4XklInwwxA + return 0 + kubectl_bin delete pxc-backup --all --all-namespaces ++ mktemp + local LAST_OUT=/tmp/tmp.vyfPUq0Nbt ++ mktemp + local LAST_ERR=/tmp/tmp.0D2onABCeQ + local exit_status=0 ++ seq 0 2 + for i in $(seq 0 2) + set +e + kubectl delete pxc-backup --all --all-namespaces + exit_status=0 + set -e + '[' 0 '!=' 0 ']' + break + cat /tmp/tmp.vyfPUq0Nbt No resources found + cat /tmp/tmp.0D2onABCeQ + rm /tmp/tmp.vyfPUq0Nbt /tmp/tmp.0D2onABCeQ + return 0 + kubectl_bin delete pxc-restore --all --all-namespaces ++ mktemp + local LAST_OUT=/tmp/tmp.adtCM1ghSG ++ mktemp + local LAST_ERR=/tmp/tmp.R2VdIFFjrA + local exit_status=0 ++ seq 0 2 + for i in $(seq 0 2) + set +e + kubectl delete pxc-restore --all --all-namespaces + exit_status=0 + set -e + '[' 0 '!=' 0 ']' + break + cat /tmp/tmp.adtCM1ghSG No resources found + cat /tmp/tmp.R2VdIFFjrA + rm /tmp/tmp.adtCM1ghSG /tmp/tmp.R2VdIFFjrA + return 0 + kubectl_bin delete ValidatingWebhookConfiguration percona-xtradbcluster-webhook ++ mktemp + local LAST_OUT=/tmp/tmp.rj1sILYUQk ++ mktemp + local LAST_ERR=/tmp/tmp.vpZC12UcNj + local exit_status=0 ++ seq 0 2 + for i in $(seq 0 2) + set +e + kubectl delete ValidatingWebhookConfiguration percona-xtradbcluster-webhook + exit_status=0 + set -e + '[' 0 '!=' 0 ']' + break + cat /tmp/tmp.rj1sILYUQk validatingwebhookconfiguration.admissionregistration.k8s.io "percona-xtradbcluster-webhook" deleted + cat /tmp/tmp.vpZC12UcNj + rm /tmp/tmp.rj1sILYUQk /tmp/tmp.vpZC12UcNj + return 0 + kubectl_bin delete -f https://github.com/jetstack/cert-manager/releases/download/v1.18.2/cert-manager.yaml + : + '[' '!' -z '' ']' + '[' -n pxc-operator ']' + kubectl_bin delete --grace-period=0 --force=true namespace users-28439 + rm -rf /tmp/tmp.zNf2oNZjFh + kubectl_bin delete --grace-period=0 --force=true namespace pxc-operator ++ mktemp ++ mktemp + local LAST_OUT=/tmp/tmp.39VT6NYeOl + local LAST_OUT=/tmp/tmp.rmJjYVyH8Z ++ mktemp ++ mktemp + desc 'test passed' + set +o xtrace ----------------------------------------------------------------------------------- test passed ----------------------------------------------------------------------------------- + local LAST_ERR=/tmp/tmp.cLoorkKkIz + local exit_status=0 + local LAST_ERR=/tmp/tmp.7diBqjUBFD + local exit_status=0 ++ seq 0 2 ++ seq 0 2 + for i in $(seq 0 2) + set +e + kubectl delete --grace-period=0 --force=true namespace users-28439 + for i in $(seq 0 2) + set +e + kubectl delete --grace-period=0 --force=true namespace pxc-operator