Log: /mnt/jenkins/workspace/cloud-pxc-operator_PR-1726/e2e-tests/logs/users-8-0.log WARNING: version difference between client (1.30) and server (1.26) exceeds the supported minor version skew of +/-1 WARNING: version difference between client (1.30) and server (1.26) exceeds the supported minor version skew of +/-1 + create_infra users-31680 + local ns=users-31680 + '[' -n pxc-operator ']' + kubectl get pxc --all-namespaces -o wide + grep -v NAMESPACE + xargs -L 1 sh -xc 'kubectl patch pxc -n $0 $1 --type=merge -p "{\"metadata\":{\"finalizers\":[]}}"' + kubectl patch pxc -n users-6161 some-name --type=merge -p '{"metadata":{"finalizers":[]}}' Error from server (InternalError): Internal error occurred: failed calling webhook "validationwebhook.pxc.percona.com": failed to call webhook: Post "https://percona-xtradb-cluster-operator.pxc-operator.svc:443/validate-percona-xtradbcluster?timeout=10s": tls: failed to verify certificate: x509: certificate signed by unknown authority (possibly because of "crypto/rsa: verification error" while trying to verify candidate authority certificate "Root CA") + : + kubectl_bin delete pxc --all --all-namespaces ++ mktemp + local LAST_OUT=/tmp/tmp.obnPx2nMxj ++ mktemp + local LAST_ERR=/tmp/tmp.YHuowoSdHT + local exit_status=0 ++ seq 0 2 + for i in '$(seq 0 2)' + set +e + kubectl delete pxc --all --all-namespaces + exit_status=0 + set -e + '[' 0 '!=' 0 ']' + break + cat /tmp/tmp.obnPx2nMxj perconaxtradbcluster.pxc.percona.com "some-name" deleted + cat /tmp/tmp.YHuowoSdHT + rm /tmp/tmp.obnPx2nMxj /tmp/tmp.YHuowoSdHT + return 0 + kubectl_bin delete pxc-backup --all --all-namespaces ++ mktemp + local LAST_OUT=/tmp/tmp.76IZ1qpSfm ++ mktemp + local LAST_ERR=/tmp/tmp.m0MhR5nhLc + local exit_status=0 ++ seq 0 2 + for i in '$(seq 0 2)' + set +e + kubectl delete pxc-backup --all --all-namespaces + exit_status=0 + set -e + '[' 0 '!=' 0 ']' + break + cat /tmp/tmp.76IZ1qpSfm No resources found + cat /tmp/tmp.m0MhR5nhLc + rm /tmp/tmp.76IZ1qpSfm /tmp/tmp.m0MhR5nhLc + return 0 + kubectl_bin delete pxc-restore --all --all-namespaces ++ mktemp + local LAST_OUT=/tmp/tmp.snYgzyP7tr ++ mktemp + local LAST_ERR=/tmp/tmp.O1D2Ivih3k + local exit_status=0 ++ seq 0 2 + for i in '$(seq 0 2)' + set +e + kubectl delete pxc-restore --all --all-namespaces + exit_status=0 + set -e + '[' 0 '!=' 0 ']' + break + cat /tmp/tmp.snYgzyP7tr No resources found + cat /tmp/tmp.O1D2Ivih3k + rm /tmp/tmp.snYgzyP7tr /tmp/tmp.O1D2Ivih3k + return 0 + create_namespace pxc-operator + local namespace=pxc-operator + local skip_clean_namespace= + [[ 1 == 1 ]] + [[ -z '' ]] + destroy_chaos_mesh ++ helm list --all-namespaces --filter chaos-mesh ++ tail -n1 ++ awk '-F ' '{print $2}' ++ sed s/NAMESPACE// + local chaos_mesh_ns= + '[' -n '' ']' ++ awk '{print $1}' ++ grep chaos-mesh ++ kubectl get MutatingWebhookConfiguration + timeout 30 kubectl delete MutatingWebhookConfiguration error: resource(s) were provided, but no name was specified + : ++ kubectl get ValidatingWebhookConfiguration ++ grep chaos-mesh ++ awk '{print $1}' + timeout 30 kubectl delete ValidatingWebhookConfiguration error: resource(s) were provided, but no name was specified + : ++ kubectl get ValidatingWebhookConfiguration ++ grep validate-auth ++ awk '{print $1}' + timeout 30 kubectl delete ValidatingWebhookConfiguration error: resource(s) were provided, but no name was specified + : ++ kubectl api-resources ++ awk '{print $1}' ++ grep chaos-mesh ++ kubectl get crd ++ grep chaos-mesh.org ++ awk '{print $1}' + timeout 30 kubectl delete crd error: resource(s) were provided, but no name was specified + : ++ grep chaos-mesh ++ awk '{print $1}' ++ kubectl get clusterrolebinding + timeout 30 kubectl delete clusterrolebinding error: resource(s) were provided, but no name was specified + : ++ kubectl get clusterrole ++ grep chaos-mesh ++ awk '{print $1}' + timeout 30 kubectl delete clusterrole error: resource(s) were provided, but no name was specified + : + desc 'cleaned up all old namespaces' + set +o xtrace ----------------------------------------------------------------------------------- cleaned up all old namespaces ----------------------------------------------------------------------------------- + kubectl_bin get ns + egrep -v '^kube-|^default$|Terminating|pxc-operator|openshift|^NAME' + '[' -n '' ']' + desc 'cleaned up old namespaces pxc-operator' + set +o xtrace ----------------------------------------------------------------------------------- cleaned up old namespaces pxc-operator ----------------------------------------------------------------------------------- + kubectl_bin delete namespace pxc-operator + xargs kubectl delete ns ++ mktemp + awk '{print$1}' + local LAST_OUT=/tmp/tmp.4BsLnAs2WZ ++ mktemp ++ mktemp + local LAST_ERR=/tmp/tmp.YWbIcP1WhS + local exit_status=0 ++ seq 0 2 + local LAST_OUT=/tmp/tmp.x1riuZREvf + for i in '$(seq 0 2)' + set +e + kubectl delete namespace pxc-operator ++ mktemp + local LAST_ERR=/tmp/tmp.A4un6L1lxG + local exit_status=0 ++ seq 0 2 + for i in '$(seq 0 2)' + set +e + kubectl get ns + exit_status=0 + set -e + '[' 0 '!=' 0 ']' + break + cat /tmp/tmp.x1riuZREvf + cat /tmp/tmp.A4un6L1lxG + rm /tmp/tmp.x1riuZREvf /tmp/tmp.A4un6L1lxG + return 0 namespace "users-6161" deleted Error from server (Forbidden): namespaces "default" is forbidden: this namespace may not be deleted + exit_status=0 + set -e + '[' 0 '!=' 0 ']' + break + cat /tmp/tmp.4BsLnAs2WZ namespace "pxc-operator" deleted + cat /tmp/tmp.YWbIcP1WhS + rm /tmp/tmp.4BsLnAs2WZ /tmp/tmp.YWbIcP1WhS + return 0 + wait_for_delete namespace/pxc-operator + local res=namespace/pxc-operator + echo -n 'namespace/pxc-operator - ' namespace/pxc-operator - + set +o xtrace Error from server (NotFound): namespaces "pxc-operator" not found + desc 'create namespace pxc-operator' + set +o xtrace ----------------------------------------------------------------------------------- create namespace pxc-operator ----------------------------------------------------------------------------------- + kubectl_bin create namespace pxc-operator ++ mktemp + local LAST_OUT=/tmp/tmp.jHQnnIGxwp ++ mktemp + local LAST_ERR=/tmp/tmp.cMPTfJGQ5s + local exit_status=0 ++ seq 0 2 + for i in '$(seq 0 2)' + set +e + kubectl create namespace pxc-operator + exit_status=0 + set -e + '[' 0 '!=' 0 ']' + break + cat /tmp/tmp.jHQnnIGxwp namespace/pxc-operator created + cat /tmp/tmp.cMPTfJGQ5s + rm /tmp/tmp.jHQnnIGxwp /tmp/tmp.cMPTfJGQ5s + return 0 ++ kubectl_bin config current-context +++ mktemp ++ local LAST_OUT=/tmp/tmp.vCVLlJFYFN +++ mktemp ++ local LAST_ERR=/tmp/tmp.HyJb1YjWRs ++ local exit_status=0 +++ seq 0 2 ++ for i in '$(seq 0 2)' ++ set +e ++ kubectl config current-context ++ exit_status=0 ++ set -e ++ '[' 0 '!=' 0 ']' ++ break ++ cat /tmp/tmp.vCVLlJFYFN ++ cat /tmp/tmp.HyJb1YjWRs ++ rm /tmp/tmp.vCVLlJFYFN /tmp/tmp.HyJb1YjWRs ++ return 0 + kubectl_bin config set-context gke_cloud-dev-112233_us-central1-a_jen-pxc-1726-75d74a6e-1-cluster8 --namespace=pxc-operator ++ mktemp + local LAST_OUT=/tmp/tmp.OaSqcYewwa ++ mktemp + local LAST_ERR=/tmp/tmp.IrbQqfIzdL + local exit_status=0 ++ seq 0 2 + for i in '$(seq 0 2)' + set +e + kubectl config set-context gke_cloud-dev-112233_us-central1-a_jen-pxc-1726-75d74a6e-1-cluster8 --namespace=pxc-operator + exit_status=0 + set -e + '[' 0 '!=' 0 ']' + break + cat /tmp/tmp.OaSqcYewwa Context "gke_cloud-dev-112233_us-central1-a_jen-pxc-1726-75d74a6e-1-cluster8" modified. + cat /tmp/tmp.IrbQqfIzdL + rm /tmp/tmp.OaSqcYewwa /tmp/tmp.IrbQqfIzdL + return 0 + deploy_operator + desc 'start PXC operator' + set +o xtrace ----------------------------------------------------------------------------------- start PXC operator ----------------------------------------------------------------------------------- + kubectl_bin apply --server-side --force-conflicts -f /mnt/jenkins/workspace/cloud-pxc-operator_PR-1726/deploy/crd.yaml ++ mktemp + local LAST_OUT=/tmp/tmp.h3S9dWwlZ7 ++ mktemp + local LAST_ERR=/tmp/tmp.IKrRUu1zg1 + local exit_status=0 ++ seq 0 2 + for i in '$(seq 0 2)' + set +e + kubectl apply --server-side --force-conflicts -f /mnt/jenkins/workspace/cloud-pxc-operator_PR-1726/deploy/crd.yaml + exit_status=0 + set -e + '[' 0 '!=' 0 ']' + break + cat /tmp/tmp.h3S9dWwlZ7 customresourcedefinition.apiextensions.k8s.io/perconaxtradbclusterbackups.pxc.percona.com serverside-applied customresourcedefinition.apiextensions.k8s.io/perconaxtradbclusterrestores.pxc.percona.com serverside-applied customresourcedefinition.apiextensions.k8s.io/perconaxtradbclusters.pxc.percona.com serverside-applied + cat /tmp/tmp.IKrRUu1zg1 + rm /tmp/tmp.h3S9dWwlZ7 /tmp/tmp.IKrRUu1zg1 + return 0 + '[' -n pxc-operator ']' + apply_rbac cw-rbac + local operator_namespace=pxc-operator + local rbac=cw-rbac + cat /mnt/jenkins/workspace/cloud-pxc-operator_PR-1726/deploy/cw-rbac.yaml + kubectl_bin apply -f - + sed -e 's^namespace: .*^namespace: pxc-operator^' ++ mktemp + local LAST_OUT=/tmp/tmp.yp3Ryjpoaz ++ mktemp + local LAST_ERR=/tmp/tmp.jGDKmDGft1 + local exit_status=0 ++ seq 0 2 + for i in '$(seq 0 2)' + set +e + kubectl apply -f - + exit_status=0 + set -e + '[' 0 '!=' 0 ']' + break + cat /tmp/tmp.yp3Ryjpoaz clusterrole.rbac.authorization.k8s.io/percona-xtradb-cluster-operator unchanged serviceaccount/percona-xtradb-cluster-operator created clusterrolebinding.rbac.authorization.k8s.io/service-account-percona-xtradb-cluster-operator unchanged + cat /tmp/tmp.jGDKmDGft1 + rm /tmp/tmp.yp3Ryjpoaz /tmp/tmp.jGDKmDGft1 + return 0 + sed -e 's^image: .*^image: perconalab/percona-xtradb-cluster-operator:PR-1726-75d74a6e^' + sed -e 's^failureThreshold: .*^failureThreshold: 10^' + cat /mnt/jenkins/workspace/cloud-pxc-operator_PR-1726/deploy/cw-operator.yaml + yq eval '(select(.kind == "Deployment").spec.template.spec.containers[] | select(.name == "percona-xtradb-cluster-operator").env[] | select(.name == "DISABLE_TELEMETRY").value) = "true"' - + yq eval '(select(.kind == "Deployment").spec.template.spec.containers[] | select(.name == "percona-xtradb-cluster-operator").env[] | select(.name == "LOG_LEVEL").value) = "DEBUG"' - + kubectl_bin apply -f - ++ mktemp + local LAST_OUT=/tmp/tmp.xqWPcyYYhT ++ mktemp + local LAST_ERR=/tmp/tmp.ZUPeJxHt5V + local exit_status=0 ++ seq 0 2 + for i in '$(seq 0 2)' + set +e + kubectl apply -f - + exit_status=0 + set -e + '[' 0 '!=' 0 ']' + break + cat /tmp/tmp.xqWPcyYYhT deployment.apps/percona-xtradb-cluster-operator created service/percona-xtradb-cluster-operator created + cat /tmp/tmp.ZUPeJxHt5V + rm /tmp/tmp.xqWPcyYYhT /tmp/tmp.ZUPeJxHt5V + return 0 + sleep 10 + kubectl_bin wait --for=condition=Ready pods -l app.kubernetes.io/component=operator,app.kubernetes.io/instance=percona-xtradb-cluster-operator,app.kubernetes.io/name=percona-xtradb-cluster-operator --timeout=30s ++ mktemp + local LAST_OUT=/tmp/tmp.PD1182u4J5 ++ mktemp + local LAST_ERR=/tmp/tmp.xPY5Cfxmec + local exit_status=0 ++ seq 0 2 + for i in '$(seq 0 2)' + set +e + kubectl wait --for=condition=Ready pods -l app.kubernetes.io/component=operator,app.kubernetes.io/instance=percona-xtradb-cluster-operator,app.kubernetes.io/name=percona-xtradb-cluster-operator --timeout=30s + exit_status=0 + set -e + '[' 0 '!=' 0 ']' + break + cat /tmp/tmp.PD1182u4J5 pod/percona-xtradb-cluster-operator-58867c4d5c-9l5bj condition met + cat /tmp/tmp.xPY5Cfxmec + rm /tmp/tmp.PD1182u4J5 /tmp/tmp.xPY5Cfxmec + return 0 ++ get_operator_pod ++ local label_prefix=app.kubernetes.io/ +++ kubectl get pods --selector=app.kubernetes.io/name=percona-xtradb-cluster-operator -n pxc-operator +++ grep -c percona-xtradb-cluster-operator ++ local check_label=1 ++ [[ 1 -eq 0 ]] ++ kubectl_bin get pods --selector=app.kubernetes.io/name=percona-xtradb-cluster-operator -o 'jsonpath={.items[].metadata.name}' -n pxc-operator +++ mktemp ++ local LAST_OUT=/tmp/tmp.RBgW7eEjfW +++ mktemp ++ local LAST_ERR=/tmp/tmp.Xz9R443yG2 ++ local exit_status=0 +++ seq 0 2 ++ for i in '$(seq 0 2)' ++ set +e ++ kubectl get pods --selector=app.kubernetes.io/name=percona-xtradb-cluster-operator -o 'jsonpath={.items[].metadata.name}' -n pxc-operator ++ exit_status=0 ++ set -e ++ '[' 0 '!=' 0 ']' ++ break ++ cat /tmp/tmp.RBgW7eEjfW ++ cat /tmp/tmp.Xz9R443yG2 ++ rm /tmp/tmp.RBgW7eEjfW /tmp/tmp.Xz9R443yG2 ++ return 0 + wait_pod percona-xtradb-cluster-operator-58867c4d5c-9l5bj 480 pxc-operator + local pod=percona-xtradb-cluster-operator-58867c4d5c-9l5bj + local max_retry=480 + local ns=pxc-operator ++ echo percona-xtradb-cluster-operator-58867c4d5c-9l5bj ++ /usr/bin/sed -E 's/.*-(pxc|proxysql)-[0-9]/\1/' ++ egrep '^(pxc|proxysql)$' + local container= + set +o xtrace pod/percona-xtradb-cluster-operator-58867c4d5c-9l5bj condition met percona-xtradb-cluster-operator-58867c4d5c-9l5bj.Ok + sleep 3 + create_namespace users-31680 + local namespace=users-31680 + local skip_clean_namespace= + [[ 1 == 1 ]] + [[ -z '' ]] + destroy_chaos_mesh ++ helm list --all-namespaces --filter chaos-mesh ++ tail -n1 ++ awk '-F ' '{print $2}' ++ sed s/NAMESPACE// + local chaos_mesh_ns= + '[' -n '' ']' ++ kubectl get MutatingWebhookConfiguration ++ grep chaos-mesh ++ awk '{print $1}' + timeout 30 kubectl delete MutatingWebhookConfiguration error: resource(s) were provided, but no name was specified + : ++ kubectl get ValidatingWebhookConfiguration ++ grep chaos-mesh ++ awk '{print $1}' + timeout 30 kubectl delete ValidatingWebhookConfiguration error: resource(s) were provided, but no name was specified + : ++ kubectl get ValidatingWebhookConfiguration ++ grep validate-auth ++ awk '{print $1}' + timeout 30 kubectl delete ValidatingWebhookConfiguration error: resource(s) were provided, but no name was specified + : ++ kubectl api-resources ++ grep chaos-mesh ++ awk '{print $1}' ++ kubectl get crd ++ grep chaos-mesh.org ++ awk '{print $1}' + timeout 30 kubectl delete crd error: resource(s) were provided, but no name was specified + : ++ kubectl get clusterrolebinding ++ grep chaos-mesh ++ awk '{print $1}' + timeout 30 kubectl delete clusterrolebinding error: resource(s) were provided, but no name was specified + : ++ kubectl get clusterrole ++ grep chaos-mesh ++ awk '{print $1}' + timeout 30 kubectl delete clusterrole error: resource(s) were provided, but no name was specified + : + desc 'cleaned up all old namespaces' + set +o xtrace ----------------------------------------------------------------------------------- cleaned up all old namespaces ----------------------------------------------------------------------------------- + kubectl_bin get ns + egrep -v '^kube-|^default$|Terminating|pxc-operator|openshift|^NAME' + '[' -n '' ']' + desc 'cleaned up old namespaces users-31680' + xargs kubectl delete ns + set +o xtrace ----------------------------------------------------------------------------------- cleaned up old namespaces users-31680 ----------------------------------------------------------------------------------- + kubectl_bin delete namespace users-31680 ++ mktemp ++ mktemp + awk '{print$1}' + local LAST_OUT=/tmp/tmp.990TLPAFRs + local LAST_OUT=/tmp/tmp.tBkz1OFjOH ++ mktemp ++ mktemp + local LAST_ERR=/tmp/tmp.wMrFX1YwLC + local LAST_ERR=/tmp/tmp.C84osibnDs + local exit_status=0 + local exit_status=0 ++ seq 0 2 ++ seq 0 2 + for i in '$(seq 0 2)' + set +e + kubectl get ns + for i in '$(seq 0 2)' + set +e + kubectl delete namespace users-31680 + exit_status=0 + set -e + '[' 0 '!=' 0 ']' + break + cat /tmp/tmp.990TLPAFRs + cat /tmp/tmp.C84osibnDs + exit_status=1 + set -e + '[' 1 '!=' 0 ']' + '[' 1 == 1 ']' + sleep 0 + rm /tmp/tmp.990TLPAFRs /tmp/tmp.C84osibnDs + for i in '$(seq 0 2)' + set +e + kubectl delete namespace users-31680 + return 0 Error from server (Forbidden): namespaces "default" is forbidden: this namespace may not be deleted + exit_status=1 + set -e + '[' 1 '!=' 0 ']' + '[' 1 == 1 ']' + sleep 0 + for i in '$(seq 0 2)' + set +e + kubectl delete namespace users-31680 + exit_status=1 + set -e + '[' 1 '!=' 0 ']' + '[' 1 == 1 ']' + sleep 0 + cat /tmp/tmp.tBkz1OFjOH + cat /tmp/tmp.wMrFX1YwLC Error from server (NotFound): namespaces "users-31680" not found + rm /tmp/tmp.tBkz1OFjOH /tmp/tmp.wMrFX1YwLC + return 1 + : + wait_for_delete namespace/users-31680 + local res=namespace/users-31680 + echo -n 'namespace/users-31680 - ' namespace/users-31680 - + set +o xtrace Error from server (NotFound): namespaces "users-31680" not found + desc 'create namespace users-31680' + set +o xtrace ----------------------------------------------------------------------------------- create namespace users-31680 ----------------------------------------------------------------------------------- + kubectl_bin create namespace users-31680 ++ mktemp + local LAST_OUT=/tmp/tmp.lTBnW23u6H ++ mktemp + local LAST_ERR=/tmp/tmp.jZiYNn8iZu + local exit_status=0 ++ seq 0 2 + for i in '$(seq 0 2)' + set +e + kubectl create namespace users-31680 + exit_status=0 + set -e + '[' 0 '!=' 0 ']' + break + cat /tmp/tmp.lTBnW23u6H namespace/users-31680 created + cat /tmp/tmp.jZiYNn8iZu + rm /tmp/tmp.lTBnW23u6H /tmp/tmp.jZiYNn8iZu + return 0 ++ kubectl_bin config current-context +++ mktemp ++ local LAST_OUT=/tmp/tmp.1bO0AUrtyG +++ mktemp ++ local LAST_ERR=/tmp/tmp.L0ExksxNiq ++ local exit_status=0 +++ seq 0 2 ++ for i in '$(seq 0 2)' ++ set +e ++ kubectl config current-context ++ exit_status=0 ++ set -e ++ '[' 0 '!=' 0 ']' ++ break ++ cat /tmp/tmp.1bO0AUrtyG ++ cat /tmp/tmp.L0ExksxNiq ++ rm /tmp/tmp.1bO0AUrtyG /tmp/tmp.L0ExksxNiq ++ return 0 + kubectl_bin config set-context gke_cloud-dev-112233_us-central1-a_jen-pxc-1726-75d74a6e-1-cluster8 --namespace=users-31680 ++ mktemp + local LAST_OUT=/tmp/tmp.xNaqpA4Ftq ++ mktemp + local LAST_ERR=/tmp/tmp.2jaivPpdip + local exit_status=0 ++ seq 0 2 + for i in '$(seq 0 2)' + set +e + kubectl config set-context gke_cloud-dev-112233_us-central1-a_jen-pxc-1726-75d74a6e-1-cluster8 --namespace=users-31680 + exit_status=0 + set -e + '[' 0 '!=' 0 ']' + break + cat /tmp/tmp.xNaqpA4Ftq Context "gke_cloud-dev-112233_us-central1-a_jen-pxc-1726-75d74a6e-1-cluster8" modified. + cat /tmp/tmp.2jaivPpdip + rm /tmp/tmp.xNaqpA4Ftq /tmp/tmp.2jaivPpdip + return 0 + apply_secrets + desc 'create secrets for cloud storages' + set +o xtrace ----------------------------------------------------------------------------------- create secrets for cloud storages ----------------------------------------------------------------------------------- + '[' -z '' ']' + kubectl_bin apply -f /mnt/jenkins/workspace/cloud-pxc-operator_PR-1726/e2e-tests/conf/minio-secret.yml -f /mnt/jenkins/workspace/cloud-pxc-operator_PR-1726/e2e-tests/conf/cloud-secret.yml ++ mktemp + local LAST_OUT=/tmp/tmp.xdoaBgduqE ++ mktemp + local LAST_ERR=/tmp/tmp.Iy34O2V5B5 + local exit_status=0 ++ seq 0 2 + for i in '$(seq 0 2)' + set +e + kubectl apply -f /mnt/jenkins/workspace/cloud-pxc-operator_PR-1726/e2e-tests/conf/minio-secret.yml -f /mnt/jenkins/workspace/cloud-pxc-operator_PR-1726/e2e-tests/conf/cloud-secret.yml + exit_status=0 + set -e + '[' 0 '!=' 0 ']' + break + cat /tmp/tmp.xdoaBgduqE secret/minio-secret created secret/aws-s3-secret created secret/gcp-cs-secret created secret/azure-secret created + cat /tmp/tmp.Iy34O2V5B5 + rm /tmp/tmp.xdoaBgduqE /tmp/tmp.Iy34O2V5B5 + return 0 + desc 'create PXC cluster' + set +o xtrace ----------------------------------------------------------------------------------- create PXC cluster ----------------------------------------------------------------------------------- + newpass=test-password ++ echo -n test-password ++ base64 + newpassencrypted=dGVzdC1wYXNzd29yZA== + cluster=some-name + spinup_pxc some-name /mnt/jenkins/workspace/cloud-pxc-operator_PR-1726/e2e-tests/conf/some-name.yml + local cluster=some-name + local config=/mnt/jenkins/workspace/cloud-pxc-operator_PR-1726/e2e-tests/conf/some-name.yml + local size=3 + local sleep=10 + local secretsFile=/mnt/jenkins/workspace/cloud-pxc-operator_PR-1726/e2e-tests/conf/secrets.yml + local pxcClientFile=/mnt/jenkins/workspace/cloud-pxc-operator_PR-1726/e2e-tests/conf/client.yml + local port=3306 + desc 'create first PXC cluster' + set +o xtrace ----------------------------------------------------------------------------------- create first PXC cluster ----------------------------------------------------------------------------------- + kubectl_bin apply -f /mnt/jenkins/workspace/cloud-pxc-operator_PR-1726/e2e-tests/conf/secrets.yml ++ mktemp + local LAST_OUT=/tmp/tmp.kcwdsUYfiG ++ mktemp + local LAST_ERR=/tmp/tmp.PoJwHD3Qx3 + local exit_status=0 ++ seq 0 2 + for i in '$(seq 0 2)' + set +e + kubectl apply -f /mnt/jenkins/workspace/cloud-pxc-operator_PR-1726/e2e-tests/conf/secrets.yml + exit_status=0 + set -e + '[' 0 '!=' 0 ']' + break + cat /tmp/tmp.kcwdsUYfiG secret/my-cluster-secrets created secret/some-name-ssl created secret/some-name-ssl-internal created + cat /tmp/tmp.PoJwHD3Qx3 + rm /tmp/tmp.kcwdsUYfiG /tmp/tmp.PoJwHD3Qx3 + return 0 + apply_config /mnt/jenkins/workspace/cloud-pxc-operator_PR-1726/e2e-tests/conf/client.yml + '[' -z '' ']' + kubectl_bin apply -f - + cat_config /mnt/jenkins/workspace/cloud-pxc-operator_PR-1726/e2e-tests/conf/client.yml + cat /mnt/jenkins/workspace/cloud-pxc-operator_PR-1726/e2e-tests/conf/client.yml + /usr/bin/sed -e 's#apiVersion: pxc.percona.com/v.*$#apiVersion: pxc.percona.com/v1#' ++ mktemp + /usr/bin/sed -e 's#image:.*-pmm$#image: perconalab/pmm-client:dev-latest#' + local LAST_OUT=/tmp/tmp.7vAnFoJ1KN ++ mktemp + /usr/bin/sed -e 's#image:.*\/percona-xtradb-cluster:.*$#image: perconalab/percona-xtradb-cluster-operator:main-pxc8.0#' + /usr/bin/sed -e 's#apply:.*#apply: Never#' + /usr/bin/sed -e 's#image:.*-haproxy$#image: perconalab/percona-xtradb-cluster-operator:main-haproxy#' + /usr/bin/sed -e 's#image:.*-proxysql$#image: perconalab/percona-xtradb-cluster-operator:main-proxysql#' + local LAST_ERR=/tmp/tmp.Q8CaC0AbA3 + local exit_status=0 + /usr/bin/sed -e 's#image:.*-init$#image: perconalab/percona-xtradb-cluster-operator:PR-1726-75d74a6e#' ++ seq 0 2 + /usr/bin/sed -e 's#image:.*-backup$#image: perconalab/percona-xtradb-cluster-operator:main-pxc8.0-backup#' + /usr/bin/sed -e 's#image:.*-logcollector$#image: perconalab/percona-xtradb-cluster-operator:main-logcollector#' + /usr/bin/sed -e s~minio-service.#namespace~minio-service.users-31680~ + for i in '$(seq 0 2)' + set +e + kubectl apply -f - + /usr/bin/sed -e 's#image:.*-pxc\([0-9]*.[0-9]*\)\{0,1\}$#image: perconalab/percona-xtradb-cluster-operator:main-pxc8.0#' + exit_status=0 + set -e + '[' 0 '!=' 0 ']' + break + cat /tmp/tmp.7vAnFoJ1KN deployment.apps/pxc-client created + cat /tmp/tmp.Q8CaC0AbA3 + rm /tmp/tmp.7vAnFoJ1KN /tmp/tmp.Q8CaC0AbA3 + return 0 + [[ perconalab/percona-xtradb-cluster-operator:main-pxc8.0 =~ 5\.7 ]] + apply_config /mnt/jenkins/workspace/cloud-pxc-operator_PR-1726/e2e-tests/conf/some-name.yml + '[' -z '' ']' + kubectl_bin apply -f - + cat_config /mnt/jenkins/workspace/cloud-pxc-operator_PR-1726/e2e-tests/conf/some-name.yml + cat /mnt/jenkins/workspace/cloud-pxc-operator_PR-1726/e2e-tests/conf/some-name.yml + /usr/bin/sed -e 's#apiVersion: pxc.percona.com/v.*$#apiVersion: pxc.percona.com/v1#' ++ mktemp + /usr/bin/sed -e 's#image:.*-pxc\([0-9]*.[0-9]*\)\{0,1\}$#image: perconalab/percona-xtradb-cluster-operator:main-pxc8.0#' + /usr/bin/sed -e 's#image:.*-pmm$#image: perconalab/pmm-client:dev-latest#' + /usr/bin/sed -e 's#image:.*-init$#image: perconalab/percona-xtradb-cluster-operator:PR-1726-75d74a6e#' + /usr/bin/sed -e 's#apply:.*#apply: Never#' + /usr/bin/sed -e 's#image:.*\/percona-xtradb-cluster:.*$#image: perconalab/percona-xtradb-cluster-operator:main-pxc8.0#' + local LAST_OUT=/tmp/tmp.mz9CzYIl2m + /usr/bin/sed -e 's#image:.*-logcollector$#image: perconalab/percona-xtradb-cluster-operator:main-logcollector#' + /usr/bin/sed -e s~minio-service.#namespace~minio-service.users-31680~ + /usr/bin/sed -e 's#image:.*-backup$#image: perconalab/percona-xtradb-cluster-operator:main-pxc8.0-backup#' ++ mktemp + /usr/bin/sed -e 's#image:.*-haproxy$#image: perconalab/percona-xtradb-cluster-operator:main-haproxy#' + /usr/bin/sed -e 's#image:.*-proxysql$#image: perconalab/percona-xtradb-cluster-operator:main-proxysql#' + local LAST_ERR=/tmp/tmp.bxUdLcRDM5 + local exit_status=0 ++ seq 0 2 + for i in '$(seq 0 2)' + set +e + kubectl apply -f - + exit_status=0 + set -e + '[' 0 '!=' 0 ']' + break + cat /tmp/tmp.mz9CzYIl2m perconaxtradbcluster.pxc.percona.com/some-name created + cat /tmp/tmp.bxUdLcRDM5 + rm /tmp/tmp.mz9CzYIl2m /tmp/tmp.bxUdLcRDM5 + return 0 + desc 'check if all 3 Pods started' + set +o xtrace ----------------------------------------------------------------------------------- check if all 3 Pods started ----------------------------------------------------------------------------------- ++ get_proxy some-name ++ local target_cluster=some-name +++ kubectl_bin get pxc some-name -o 'jsonpath={.spec.haproxy.enabled}' ++++ mktemp +++ local LAST_OUT=/tmp/tmp.PJgZAULBxS ++++ mktemp +++ local LAST_ERR=/tmp/tmp.eoITkk7TAp +++ local exit_status=0 ++++ seq 0 2 +++ for i in '$(seq 0 2)' +++ set +e +++ kubectl get pxc some-name -o 'jsonpath={.spec.haproxy.enabled}' +++ exit_status=0 +++ set -e +++ '[' 0 '!=' 0 ']' +++ break +++ cat /tmp/tmp.PJgZAULBxS +++ cat /tmp/tmp.eoITkk7TAp +++ rm /tmp/tmp.PJgZAULBxS /tmp/tmp.eoITkk7TAp +++ return 0 ++ [[ '' == \t\r\u\e ]] +++ kubectl_bin get pxc some-name -o 'jsonpath={.spec.proxysql.enabled}' ++++ mktemp +++ local LAST_OUT=/tmp/tmp.cuHx8booWg ++++ mktemp +++ local LAST_ERR=/tmp/tmp.p1QEsDsyfG +++ local exit_status=0 ++++ seq 0 2 +++ for i in '$(seq 0 2)' +++ set +e +++ kubectl get pxc some-name -o 'jsonpath={.spec.proxysql.enabled}' +++ exit_status=0 +++ set -e +++ '[' 0 '!=' 0 ']' +++ break +++ cat /tmp/tmp.cuHx8booWg +++ cat /tmp/tmp.p1QEsDsyfG +++ rm /tmp/tmp.cuHx8booWg /tmp/tmp.p1QEsDsyfG +++ return 0 ++ [[ true == \t\r\u\e ]] ++ echo some-name-proxysql ++ return + local proxy=some-name-proxysql + kubectl_bin wait --for=condition=Ready pod -l app.kubernetes.io/instance=monitoring,app.kubernetes.io/managed-by=percona-xtradb-cluster-operator --timeout=300s -n users-31680 ++ mktemp + local LAST_OUT=/tmp/tmp.JKcZLdM8lB ++ mktemp + local LAST_ERR=/tmp/tmp.2LUo4B9rQJ + local exit_status=0 ++ seq 0 2 + for i in '$(seq 0 2)' + set +e + kubectl wait --for=condition=Ready pod -l app.kubernetes.io/instance=monitoring,app.kubernetes.io/managed-by=percona-xtradb-cluster-operator --timeout=300s -n users-31680 + exit_status=1 + set -e + '[' 1 '!=' 0 ']' + '[' 1 == 1 ']' + sleep 0 + for i in '$(seq 0 2)' + set +e + kubectl wait --for=condition=Ready pod -l app.kubernetes.io/instance=monitoring,app.kubernetes.io/managed-by=percona-xtradb-cluster-operator --timeout=300s -n users-31680 + exit_status=1 + set -e + '[' 1 '!=' 0 ']' + '[' 1 == 1 ']' + sleep 0 + for i in '$(seq 0 2)' + set +e + kubectl wait --for=condition=Ready pod -l app.kubernetes.io/instance=monitoring,app.kubernetes.io/managed-by=percona-xtradb-cluster-operator --timeout=300s -n users-31680 + exit_status=1 + set -e + '[' 1 '!=' 0 ']' + '[' 1 == 1 ']' + sleep 0 + cat /tmp/tmp.JKcZLdM8lB + cat /tmp/tmp.2LUo4B9rQJ error: no matching resources found + rm /tmp/tmp.JKcZLdM8lB /tmp/tmp.2LUo4B9rQJ + return 1 + true + wait_for_running some-name-proxysql 1 + local name=some-name-proxysql + let last_pod=0 + : + local max_retry=480 + desc 'wait for running cluster' + set +o xtrace ----------------------------------------------------------------------------------- wait for running cluster ----------------------------------------------------------------------------------- ++ seq 0 0 + for i in '$(seq 0 $last_pod)' + wait_pod some-name-proxysql-0 480 + local pod=some-name-proxysql-0 + local max_retry=480 + local ns= ++ echo some-name-proxysql-0 ++ /usr/bin/sed -E 's/.*-(pxc|proxysql)-[0-9]/\1/' ++ egrep '^(pxc|proxysql)$' + local container=proxysql + set +o xtrace pod/some-name-proxysql-0 condition met some-name-proxysql-0.Ok + wait_for_running some-name-pxc 3 + local name=some-name-pxc + let last_pod=2 + local max_retry=480 + desc 'wait for running cluster' + set +o xtrace ----------------------------------------------------------------------------------- wait for running cluster ----------------------------------------------------------------------------------- ++ seq 0 2 + for i in '$(seq 0 $last_pod)' + wait_pod some-name-pxc-0 480 + local pod=some-name-pxc-0 + local max_retry=480 + local ns= ++ echo some-name-pxc-0 ++ /usr/bin/sed -E 's/.*-(pxc|proxysql)-[0-9]/\1/' ++ egrep '^(pxc|proxysql)$' + local container=pxc + set +o xtrace pod/some-name-pxc-0 condition met some-name-pxc-0.Ok + for i in '$(seq 0 $last_pod)' + wait_pod some-name-pxc-1 480 + local pod=some-name-pxc-1 + local max_retry=480 + local ns= ++ echo some-name-pxc-1 ++ /usr/bin/sed -E 's/.*-(pxc|proxysql)-[0-9]/\1/' ++ egrep '^(pxc|proxysql)$' + local container=pxc + set +o xtrace pod/some-name-pxc-1 condition met some-name-pxc-1.Ok + for i in '$(seq 0 $last_pod)' + wait_pod some-name-pxc-2 480 + local pod=some-name-pxc-2 + local max_retry=480 + local ns= ++ echo some-name-pxc-2 ++ /usr/bin/sed -E 's/.*-(pxc|proxysql)-[0-9]/\1/' ++ egrep '^(pxc|proxysql)$' + local container=pxc + set +o xtrace pod/some-name-pxc-2 condition met some-name-pxc-2.Ok + sleep 10 + desc 'write data' + set +o xtrace ----------------------------------------------------------------------------------- write data ----------------------------------------------------------------------------------- + [[ perconalab/percona-xtradb-cluster-operator:main-pxc8.0 =~ 5\.7 ]] + run_mysql 'CREATE DATABASE IF NOT EXISTS myApp; use myApp; CREATE TABLE IF NOT EXISTS myApp (id int PRIMARY KEY) ;' '-h some-name-proxysql -uroot -proot_password -P3306' + local 'command=CREATE DATABASE IF NOT EXISTS myApp; use myApp; CREATE TABLE IF NOT EXISTS myApp (id int PRIMARY KEY) ;' + local 'uri=-h some-name-proxysql -uroot -proot_password -P3306' ++ get_client_pod ++ kubectl_bin get pods --selector=name=pxc-client -o 'jsonpath={.items[].metadata.name}' +++ mktemp ++ local LAST_OUT=/tmp/tmp.QyjtTZULt8 +++ mktemp ++ local LAST_ERR=/tmp/tmp.eIyhaMdnMb ++ local exit_status=0 +++ seq 0 2 ++ for i in '$(seq 0 2)' ++ set +e ++ kubectl get pods --selector=name=pxc-client -o 'jsonpath={.items[].metadata.name}' ++ exit_status=0 ++ set -e ++ '[' 0 '!=' 0 ']' ++ break ++ cat /tmp/tmp.QyjtTZULt8 ++ cat /tmp/tmp.eIyhaMdnMb ++ rm /tmp/tmp.QyjtTZULt8 /tmp/tmp.eIyhaMdnMb ++ return 0 + client_pod=pxc-client-6644d8898f-zmpgg + wait_pod pxc-client-6644d8898f-zmpgg + local pod=pxc-client-6644d8898f-zmpgg + local max_retry=480 + local ns= ++ echo pxc-client-6644d8898f-zmpgg ++ /usr/bin/sed -E 's/.*-(pxc|proxysql)-[0-9]/\1/' ++ egrep '^(pxc|proxysql)$' + local container= + set +o xtrace pod/pxc-client-6644d8898f-zmpgg condition met pxc-client-6644d8898f-zmpgg.Ok + set +o xtrace + run_mysql 'INSERT myApp.myApp (id) VALUES (100500)' '-h some-name-proxysql -uroot -proot_password -P3306' + local 'command=INSERT myApp.myApp (id) VALUES (100500)' + local 'uri=-h some-name-proxysql -uroot -proot_password -P3306' ++ get_client_pod ++ kubectl_bin get pods --selector=name=pxc-client -o 'jsonpath={.items[].metadata.name}' +++ mktemp ++ local LAST_OUT=/tmp/tmp.dv2VvVXRDJ +++ mktemp ++ local LAST_ERR=/tmp/tmp.ewzG3ODeaf ++ local exit_status=0 +++ seq 0 2 ++ for i in '$(seq 0 2)' ++ set +e ++ kubectl get pods --selector=name=pxc-client -o 'jsonpath={.items[].metadata.name}' ++ exit_status=0 ++ set -e ++ '[' 0 '!=' 0 ']' ++ break ++ cat /tmp/tmp.dv2VvVXRDJ ++ cat /tmp/tmp.ewzG3ODeaf ++ rm /tmp/tmp.dv2VvVXRDJ /tmp/tmp.ewzG3ODeaf ++ return 0 + client_pod=pxc-client-6644d8898f-zmpgg + wait_pod pxc-client-6644d8898f-zmpgg + local pod=pxc-client-6644d8898f-zmpgg + local max_retry=480 + local ns= ++ echo pxc-client-6644d8898f-zmpgg ++ /usr/bin/sed -E 's/.*-(pxc|proxysql)-[0-9]/\1/' ++ egrep '^(pxc|proxysql)$' + local container= + set +o xtrace pod/pxc-client-6644d8898f-zmpgg condition met pxc-client-6644d8898f-zmpgg.Ok + set +o xtrace + sleep 30 ++ seq 0 2 + for i in '$(seq 0 $((size - 1)))' + compare_mysql_cmd select-1 'SELECT * from myApp.myApp;' '-h some-name-pxc-0.some-name-pxc -uroot -proot_password -P3306' + local command_id=select-1 + local 'command=SELECT * from myApp.myApp;' + local 'uri=-h some-name-pxc-0.some-name-pxc -uroot -proot_password -P3306' + local postfix= + local expected_result=/mnt/jenkins/workspace/cloud-pxc-operator_PR-1726/e2e-tests/users/compare/select-1.sql + [[ perconalab/percona-xtradb-cluster-operator:main-pxc8.0 =~ 8\.0 ]] + '[' -f /mnt/jenkins/workspace/cloud-pxc-operator_PR-1726/e2e-tests/users/compare/select-1-80.sql ']' + run_mysql 'SELECT * from myApp.myApp;' '-h some-name-pxc-0.some-name-pxc -uroot -proot_password -P3306' + local 'command=SELECT * from myApp.myApp;' + local 'uri=-h some-name-pxc-0.some-name-pxc -uroot -proot_password -P3306' ++ get_client_pod ++ kubectl_bin get pods --selector=name=pxc-client -o 'jsonpath={.items[].metadata.name}' +++ mktemp ++ local LAST_OUT=/tmp/tmp.9QfwzGmRuJ +++ mktemp ++ local LAST_ERR=/tmp/tmp.LIK9jNUJ4G ++ local exit_status=0 +++ seq 0 2 ++ for i in '$(seq 0 2)' ++ set +e ++ kubectl get pods --selector=name=pxc-client -o 'jsonpath={.items[].metadata.name}' ++ exit_status=0 ++ set -e ++ '[' 0 '!=' 0 ']' ++ break ++ cat /tmp/tmp.9QfwzGmRuJ ++ cat /tmp/tmp.LIK9jNUJ4G ++ rm /tmp/tmp.9QfwzGmRuJ /tmp/tmp.LIK9jNUJ4G ++ return 0 + client_pod=pxc-client-6644d8898f-zmpgg + wait_pod pxc-client-6644d8898f-zmpgg + local pod=pxc-client-6644d8898f-zmpgg + local max_retry=480 + local ns= ++ echo pxc-client-6644d8898f-zmpgg ++ /usr/bin/sed -E 's/.*-(pxc|proxysql)-[0-9]/\1/' ++ egrep '^(pxc|proxysql)$' + local container= + set +o xtrace pod/pxc-client-6644d8898f-zmpgg condition met pxc-client-6644d8898f-zmpgg.Ok + set +o xtrace + '[' '!' -s /tmp/tmp.k6orBcxvpD/select-1.sql ']' + diff -u /mnt/jenkins/workspace/cloud-pxc-operator_PR-1726/e2e-tests/users/compare/select-1.sql /tmp/tmp.k6orBcxvpD/select-1.sql + for i in '$(seq 0 $((size - 1)))' + compare_mysql_cmd select-1 'SELECT * from myApp.myApp;' '-h some-name-pxc-1.some-name-pxc -uroot -proot_password -P3306' + local command_id=select-1 + local 'command=SELECT * from myApp.myApp;' + local 'uri=-h some-name-pxc-1.some-name-pxc -uroot -proot_password -P3306' + local postfix= + local expected_result=/mnt/jenkins/workspace/cloud-pxc-operator_PR-1726/e2e-tests/users/compare/select-1.sql + [[ perconalab/percona-xtradb-cluster-operator:main-pxc8.0 =~ 8\.0 ]] + '[' -f /mnt/jenkins/workspace/cloud-pxc-operator_PR-1726/e2e-tests/users/compare/select-1-80.sql ']' + run_mysql 'SELECT * from myApp.myApp;' '-h some-name-pxc-1.some-name-pxc -uroot -proot_password -P3306' + local 'command=SELECT * from myApp.myApp;' + local 'uri=-h some-name-pxc-1.some-name-pxc -uroot -proot_password -P3306' ++ get_client_pod ++ kubectl_bin get pods --selector=name=pxc-client -o 'jsonpath={.items[].metadata.name}' +++ mktemp ++ local LAST_OUT=/tmp/tmp.wpTnFMfsNe +++ mktemp ++ local LAST_ERR=/tmp/tmp.YvvgmpTMIL ++ local exit_status=0 +++ seq 0 2 ++ for i in '$(seq 0 2)' ++ set +e ++ kubectl get pods --selector=name=pxc-client -o 'jsonpath={.items[].metadata.name}' ++ exit_status=0 ++ set -e ++ '[' 0 '!=' 0 ']' ++ break ++ cat /tmp/tmp.wpTnFMfsNe ++ cat /tmp/tmp.YvvgmpTMIL ++ rm /tmp/tmp.wpTnFMfsNe /tmp/tmp.YvvgmpTMIL ++ return 0 + client_pod=pxc-client-6644d8898f-zmpgg + wait_pod pxc-client-6644d8898f-zmpgg + local pod=pxc-client-6644d8898f-zmpgg + local max_retry=480 + local ns= ++ echo pxc-client-6644d8898f-zmpgg ++ /usr/bin/sed -E 's/.*-(pxc|proxysql)-[0-9]/\1/' ++ egrep '^(pxc|proxysql)$' + local container= + set +o xtrace pod/pxc-client-6644d8898f-zmpgg condition met pxc-client-6644d8898f-zmpgg.Ok + set +o xtrace + '[' '!' -s /tmp/tmp.k6orBcxvpD/select-1.sql ']' + diff -u /mnt/jenkins/workspace/cloud-pxc-operator_PR-1726/e2e-tests/users/compare/select-1.sql /tmp/tmp.k6orBcxvpD/select-1.sql + for i in '$(seq 0 $((size - 1)))' + compare_mysql_cmd select-1 'SELECT * from myApp.myApp;' '-h some-name-pxc-2.some-name-pxc -uroot -proot_password -P3306' + local command_id=select-1 + local 'command=SELECT * from myApp.myApp;' + local 'uri=-h some-name-pxc-2.some-name-pxc -uroot -proot_password -P3306' + local postfix= + local expected_result=/mnt/jenkins/workspace/cloud-pxc-operator_PR-1726/e2e-tests/users/compare/select-1.sql + [[ perconalab/percona-xtradb-cluster-operator:main-pxc8.0 =~ 8\.0 ]] + '[' -f /mnt/jenkins/workspace/cloud-pxc-operator_PR-1726/e2e-tests/users/compare/select-1-80.sql ']' + run_mysql 'SELECT * from myApp.myApp;' '-h some-name-pxc-2.some-name-pxc -uroot -proot_password -P3306' + local 'command=SELECT * from myApp.myApp;' + local 'uri=-h some-name-pxc-2.some-name-pxc -uroot -proot_password -P3306' ++ get_client_pod ++ kubectl_bin get pods --selector=name=pxc-client -o 'jsonpath={.items[].metadata.name}' +++ mktemp ++ local LAST_OUT=/tmp/tmp.ayFtCGSKP1 +++ mktemp ++ local LAST_ERR=/tmp/tmp.yGckOO3S5X ++ local exit_status=0 +++ seq 0 2 ++ for i in '$(seq 0 2)' ++ set +e ++ kubectl get pods --selector=name=pxc-client -o 'jsonpath={.items[].metadata.name}' ++ exit_status=0 ++ set -e ++ '[' 0 '!=' 0 ']' ++ break ++ cat /tmp/tmp.ayFtCGSKP1 ++ cat /tmp/tmp.yGckOO3S5X ++ rm /tmp/tmp.ayFtCGSKP1 /tmp/tmp.yGckOO3S5X ++ return 0 + client_pod=pxc-client-6644d8898f-zmpgg + wait_pod pxc-client-6644d8898f-zmpgg + local pod=pxc-client-6644d8898f-zmpgg + local max_retry=480 + local ns= ++ echo pxc-client-6644d8898f-zmpgg ++ /usr/bin/sed -E 's/.*-(pxc|proxysql)-[0-9]/\1/' ++ egrep '^(pxc|proxysql)$' + local container= + set +o xtrace pod/pxc-client-6644d8898f-zmpgg condition met pxc-client-6644d8898f-zmpgg.Ok + set +o xtrace + '[' '!' -s /tmp/tmp.k6orBcxvpD/select-1.sql ']' + diff -u /mnt/jenkins/workspace/cloud-pxc-operator_PR-1726/e2e-tests/users/compare/select-1.sql /tmp/tmp.k6orBcxvpD/select-1.sql ++ is_keyring_plugin_in_use some-name ++ local cluster=some-name ++ kubectl_bin exec -it some-name-pxc-0 -c pxc -- bash -c 'cat /etc/mysql/node.cnf' ++ egrep -o 'early-plugin-load=keyring_\w+.so' +++ mktemp ++ local LAST_OUT=/tmp/tmp.UkAOZQn71L +++ mktemp ++ local LAST_ERR=/tmp/tmp.J2KY9DOqyf ++ local exit_status=0 +++ seq 0 2 ++ for i in '$(seq 0 2)' ++ set +e ++ kubectl exec -it some-name-pxc-0 -c pxc -- bash -c 'cat /etc/mysql/node.cnf' ++ exit_status=0 ++ set -e ++ '[' 0 '!=' 0 ']' ++ break ++ cat /tmp/tmp.UkAOZQn71L ++ cat /tmp/tmp.J2KY9DOqyf Unable to use a TTY - input is not a terminal or the right kind of file ++ rm /tmp/tmp.UkAOZQn71L /tmp/tmp.J2KY9DOqyf ++ return 0 + '[' '' ']' + desc 'test root' + set +o xtrace ----------------------------------------------------------------------------------- test root ----------------------------------------------------------------------------------- + patch_secret my-cluster-secrets root dGVzdC1wYXNzd29yZA== + local secret=my-cluster-secrets + local key=root + local value=dGVzdC1wYXNzd29yZA== + kubectl_bin patch secret my-cluster-secrets '-p={"data":{"root": "dGVzdC1wYXNzd29yZA=="}}' ++ mktemp + local LAST_OUT=/tmp/tmp.mZKlLUWvTM ++ mktemp + local LAST_ERR=/tmp/tmp.uOEBr82yrh + local exit_status=0 ++ seq 0 2 + for i in '$(seq 0 2)' + set +e + kubectl patch secret my-cluster-secrets '-p={"data":{"root": "dGVzdC1wYXNzd29yZA=="}}' + exit_status=0 + set -e + '[' 0 '!=' 0 ']' + break + cat /tmp/tmp.mZKlLUWvTM secret/my-cluster-secrets patched + cat /tmp/tmp.uOEBr82yrh + rm /tmp/tmp.mZKlLUWvTM /tmp/tmp.uOEBr82yrh + return 0 + sleep 15 + compare_mysql_cmd select-4 'SHOW TABLES;' '-h some-name-proxysql -uroot -p'\''test-password'\''' + local command_id=select-4 + local 'command=SHOW TABLES;' + local 'uri=-h some-name-proxysql -uroot -p'\''test-password'\''' + local postfix= + local expected_result=/mnt/jenkins/workspace/cloud-pxc-operator_PR-1726/e2e-tests/users/compare/select-4.sql + [[ perconalab/percona-xtradb-cluster-operator:main-pxc8.0 =~ 8\.0 ]] + '[' -f /mnt/jenkins/workspace/cloud-pxc-operator_PR-1726/e2e-tests/users/compare/select-4-80.sql ']' + expected_result=/mnt/jenkins/workspace/cloud-pxc-operator_PR-1726/e2e-tests/users/compare/select-4-80.sql + run_mysql 'SHOW TABLES;' '-h some-name-proxysql -uroot -p'\''test-password'\''' + local 'command=SHOW TABLES;' + local 'uri=-h some-name-proxysql -uroot -p'\''test-password'\''' ++ get_client_pod ++ kubectl_bin get pods --selector=name=pxc-client -o 'jsonpath={.items[].metadata.name}' +++ mktemp ++ local LAST_OUT=/tmp/tmp.LsdWJZGx1a +++ mktemp ++ local LAST_ERR=/tmp/tmp.I0pGoc1f3B ++ local exit_status=0 +++ seq 0 2 ++ for i in '$(seq 0 2)' ++ set +e ++ kubectl get pods --selector=name=pxc-client -o 'jsonpath={.items[].metadata.name}' ++ exit_status=0 ++ set -e ++ '[' 0 '!=' 0 ']' ++ break ++ cat /tmp/tmp.LsdWJZGx1a ++ cat /tmp/tmp.I0pGoc1f3B ++ rm /tmp/tmp.LsdWJZGx1a /tmp/tmp.I0pGoc1f3B ++ return 0 + client_pod=pxc-client-6644d8898f-zmpgg + wait_pod pxc-client-6644d8898f-zmpgg + local pod=pxc-client-6644d8898f-zmpgg + local max_retry=480 + local ns= ++ echo pxc-client-6644d8898f-zmpgg ++ /usr/bin/sed -E 's/.*-(pxc|proxysql)-[0-9]/\1/' ++ egrep '^(pxc|proxysql)$' + local container= + set +o xtrace pod/pxc-client-6644d8898f-zmpgg condition met pxc-client-6644d8898f-zmpgg.Ok + set +o xtrace + '[' '!' -s /tmp/tmp.k6orBcxvpD/select-4.sql ']' + diff -u /mnt/jenkins/workspace/cloud-pxc-operator_PR-1726/e2e-tests/users/compare/select-4-80.sql /tmp/tmp.k6orBcxvpD/select-4.sql + desc 'test proxyadmin' + set +o xtrace ----------------------------------------------------------------------------------- test proxyadmin ----------------------------------------------------------------------------------- + kubectl_bin patch pxc some-name --type=merge '-p={"spec":{"proxysql":{"size":3}}}' ++ mktemp + local LAST_OUT=/tmp/tmp.6Xyq37VpwR ++ mktemp + local LAST_ERR=/tmp/tmp.mkEdV6ga0y + local exit_status=0 ++ seq 0 2 + for i in '$(seq 0 2)' + set +e + kubectl patch pxc some-name --type=merge '-p={"spec":{"proxysql":{"size":3}}}' + exit_status=0 + set -e + '[' 0 '!=' 0 ']' + break + cat /tmp/tmp.6Xyq37VpwR perconaxtradbcluster.pxc.percona.com/some-name patched + cat /tmp/tmp.mkEdV6ga0y + rm /tmp/tmp.6Xyq37VpwR /tmp/tmp.mkEdV6ga0y + return 0 + sleep 15 + wait_cluster_consistency some-name 3 3 + local cluster_name=some-name + local cluster_size=3 + local proxy_size=3 + '[' -z 3 ']' + desc 'wait cluster consistency' + set +o xtrace ----------------------------------------------------------------------------------- wait cluster consistency ----------------------------------------------------------------------------------- + local i=0 + local max=36 + sleep 7 ++ kubectl_bin get pxc some-name -o 'jsonpath={.status.state}' +++ mktemp ++ local LAST_OUT=/tmp/tmp.dgMuDx37Xa +++ mktemp ++ local LAST_ERR=/tmp/tmp.mk0QB7qfFx ++ local exit_status=0 +++ seq 0 2 ++ for i in '$(seq 0 2)' ++ set +e ++ kubectl get pxc some-name -o 'jsonpath={.status.state}' ++ exit_status=0 ++ set -e ++ '[' 0 '!=' 0 ']' ++ break ++ cat /tmp/tmp.dgMuDx37Xa ++ cat /tmp/tmp.mk0QB7qfFx ++ rm /tmp/tmp.dgMuDx37Xa /tmp/tmp.mk0QB7qfFx ++ return 0 + [[ ready == \r\e\a\d\y ]] ++ kubectl_bin get pxc some-name -o 'jsonpath={.status.pxc.ready}' +++ mktemp ++ local LAST_OUT=/tmp/tmp.PmBvVU6pk3 +++ mktemp ++ local LAST_ERR=/tmp/tmp.jCTfx9uz5S ++ local exit_status=0 +++ seq 0 2 ++ for i in '$(seq 0 2)' ++ set +e ++ kubectl get pxc some-name -o 'jsonpath={.status.pxc.ready}' ++ exit_status=0 ++ set -e ++ '[' 0 '!=' 0 ']' ++ break ++ cat /tmp/tmp.PmBvVU6pk3 ++ cat /tmp/tmp.jCTfx9uz5S ++ rm /tmp/tmp.PmBvVU6pk3 /tmp/tmp.jCTfx9uz5S ++ return 0 + [[ 3 == \3 ]] +++ get_proxy_engine some-name +++ local cluster_name=some-name ++++ get_proxy some-name ++++ local target_cluster=some-name +++++ kubectl_bin get pxc some-name -o 'jsonpath={.spec.haproxy.enabled}' ++++++ mktemp +++++ local LAST_OUT=/tmp/tmp.HyIlJzA9Vo ++++++ mktemp +++++ local LAST_ERR=/tmp/tmp.jy2l3Py2oZ +++++ local exit_status=0 ++++++ seq 0 2 +++++ for i in '$(seq 0 2)' +++++ set +e +++++ kubectl get pxc some-name -o 'jsonpath={.spec.haproxy.enabled}' +++++ exit_status=0 +++++ set -e +++++ '[' 0 '!=' 0 ']' +++++ break +++++ cat /tmp/tmp.HyIlJzA9Vo +++++ cat /tmp/tmp.jy2l3Py2oZ +++++ rm /tmp/tmp.HyIlJzA9Vo /tmp/tmp.jy2l3Py2oZ +++++ return 0 ++++ [[ '' == \t\r\u\e ]] +++++ kubectl_bin get pxc some-name -o 'jsonpath={.spec.proxysql.enabled}' ++++++ mktemp +++++ local LAST_OUT=/tmp/tmp.ZJGYiSIM6U ++++++ mktemp +++++ local LAST_ERR=/tmp/tmp.MiepqXxIRY +++++ local exit_status=0 ++++++ seq 0 2 +++++ for i in '$(seq 0 2)' +++++ set +e +++++ kubectl get pxc some-name -o 'jsonpath={.spec.proxysql.enabled}' +++++ exit_status=0 +++++ set -e +++++ '[' 0 '!=' 0 ']' +++++ break +++++ cat /tmp/tmp.ZJGYiSIM6U +++++ cat /tmp/tmp.MiepqXxIRY +++++ rm /tmp/tmp.ZJGYiSIM6U /tmp/tmp.MiepqXxIRY +++++ return 0 ++++ [[ true == \t\r\u\e ]] ++++ echo some-name-proxysql ++++ return +++ local cluster_proxy=some-name-proxysql +++ echo proxysql ++ kubectl_bin get pxc some-name -o 'jsonpath={.status.proxysql.ready}' +++ mktemp ++ local LAST_OUT=/tmp/tmp.TTvlBkSoph +++ mktemp ++ local LAST_ERR=/tmp/tmp.XQ3EoSPTop ++ local exit_status=0 +++ seq 0 2 ++ for i in '$(seq 0 2)' ++ set +e ++ kubectl get pxc some-name -o 'jsonpath={.status.proxysql.ready}' ++ exit_status=0 ++ set -e ++ '[' 0 '!=' 0 ']' ++ break ++ cat /tmp/tmp.TTvlBkSoph ++ cat /tmp/tmp.XQ3EoSPTop ++ rm /tmp/tmp.TTvlBkSoph /tmp/tmp.XQ3EoSPTop ++ return 0 + [[ 3 == \3 ]] + patch_secret my-cluster-secrets proxyadmin dGVzdC1wYXNzd29yZA== + local secret=my-cluster-secrets + local key=proxyadmin + local value=dGVzdC1wYXNzd29yZA== + kubectl_bin patch secret my-cluster-secrets '-p={"data":{"proxyadmin": "dGVzdC1wYXNzd29yZA=="}}' ++ mktemp + local LAST_OUT=/tmp/tmp.bzYbP4YHST ++ mktemp + local LAST_ERR=/tmp/tmp.O0lAsHfokE + local exit_status=0 ++ seq 0 2 + for i in '$(seq 0 2)' + set +e + kubectl patch secret my-cluster-secrets '-p={"data":{"proxyadmin": "dGVzdC1wYXNzd29yZA=="}}' + exit_status=0 + set -e + '[' 0 '!=' 0 ']' + break + cat /tmp/tmp.bzYbP4YHST secret/my-cluster-secrets patched + cat /tmp/tmp.O0lAsHfokE + rm /tmp/tmp.bzYbP4YHST /tmp/tmp.O0lAsHfokE + return 0 + sleep 15 + wait_cluster_consistency some-name 3 3 + local cluster_name=some-name + local cluster_size=3 + local proxy_size=3 + '[' -z 3 ']' + desc 'wait cluster consistency' + set +o xtrace ----------------------------------------------------------------------------------- wait cluster consistency ----------------------------------------------------------------------------------- + local i=0 + local max=36 + sleep 7 ++ kubectl_bin get pxc some-name -o 'jsonpath={.status.state}' +++ mktemp ++ local LAST_OUT=/tmp/tmp.Ip0bagXnVc +++ mktemp ++ local LAST_ERR=/tmp/tmp.qVdFpCohzc ++ local exit_status=0 +++ seq 0 2 ++ for i in '$(seq 0 2)' ++ set +e ++ kubectl get pxc some-name -o 'jsonpath={.status.state}' ++ exit_status=0 ++ set -e ++ '[' 0 '!=' 0 ']' ++ break ++ cat /tmp/tmp.Ip0bagXnVc ++ cat /tmp/tmp.qVdFpCohzc ++ rm /tmp/tmp.Ip0bagXnVc /tmp/tmp.qVdFpCohzc ++ return 0 + [[ initializing == \r\e\a\d\y ]] + echo 'waiting for cluster readyness' waiting for cluster readyness + sleep 20 + [[ 0 -ge 36 ]] + let i+=1 ++ kubectl_bin get pxc some-name -o 'jsonpath={.status.state}' +++ mktemp ++ local LAST_OUT=/tmp/tmp.w1Vz3jNQji +++ mktemp ++ local LAST_ERR=/tmp/tmp.SJCFehN1Oi ++ local exit_status=0 +++ seq 0 2 ++ for i in '$(seq 0 2)' ++ set +e ++ kubectl get pxc some-name -o 'jsonpath={.status.state}' ++ exit_status=0 ++ set -e ++ '[' 0 '!=' 0 ']' ++ break ++ cat /tmp/tmp.w1Vz3jNQji ++ cat /tmp/tmp.SJCFehN1Oi ++ rm /tmp/tmp.w1Vz3jNQji /tmp/tmp.SJCFehN1Oi ++ return 0 + [[ ready == \r\e\a\d\y ]] ++ kubectl_bin get pxc some-name -o 'jsonpath={.status.pxc.ready}' +++ mktemp ++ local LAST_OUT=/tmp/tmp.go3O4BRVfW +++ mktemp ++ local LAST_ERR=/tmp/tmp.kt7UV4qFZ1 ++ local exit_status=0 +++ seq 0 2 ++ for i in '$(seq 0 2)' ++ set +e ++ kubectl get pxc some-name -o 'jsonpath={.status.pxc.ready}' ++ exit_status=0 ++ set -e ++ '[' 0 '!=' 0 ']' ++ break ++ cat /tmp/tmp.go3O4BRVfW ++ cat /tmp/tmp.kt7UV4qFZ1 ++ rm /tmp/tmp.go3O4BRVfW /tmp/tmp.kt7UV4qFZ1 ++ return 0 + [[ 3 == \3 ]] +++ get_proxy_engine some-name +++ local cluster_name=some-name ++++ get_proxy some-name ++++ local target_cluster=some-name +++++ kubectl_bin get pxc some-name -o 'jsonpath={.spec.haproxy.enabled}' ++++++ mktemp +++++ local LAST_OUT=/tmp/tmp.0yILxPif6e ++++++ mktemp +++++ local LAST_ERR=/tmp/tmp.xyzts0vUVh +++++ local exit_status=0 ++++++ seq 0 2 +++++ for i in '$(seq 0 2)' +++++ set +e +++++ kubectl get pxc some-name -o 'jsonpath={.spec.haproxy.enabled}' +++++ exit_status=0 +++++ set -e +++++ '[' 0 '!=' 0 ']' +++++ break +++++ cat /tmp/tmp.0yILxPif6e +++++ cat /tmp/tmp.xyzts0vUVh +++++ rm /tmp/tmp.0yILxPif6e /tmp/tmp.xyzts0vUVh +++++ return 0 ++++ [[ '' == \t\r\u\e ]] +++++ kubectl_bin get pxc some-name -o 'jsonpath={.spec.proxysql.enabled}' ++++++ mktemp +++++ local LAST_OUT=/tmp/tmp.4Y4SCFxWVv ++++++ mktemp +++++ local LAST_ERR=/tmp/tmp.TcYajzkIy2 +++++ local exit_status=0 ++++++ seq 0 2 +++++ for i in '$(seq 0 2)' +++++ set +e +++++ kubectl get pxc some-name -o 'jsonpath={.spec.proxysql.enabled}' +++++ exit_status=0 +++++ set -e +++++ '[' 0 '!=' 0 ']' +++++ break +++++ cat /tmp/tmp.4Y4SCFxWVv +++++ cat /tmp/tmp.TcYajzkIy2 +++++ rm /tmp/tmp.4Y4SCFxWVv /tmp/tmp.TcYajzkIy2 +++++ return 0 ++++ [[ true == \t\r\u\e ]] ++++ echo some-name-proxysql ++++ return +++ local cluster_proxy=some-name-proxysql +++ echo proxysql ++ kubectl_bin get pxc some-name -o 'jsonpath={.status.proxysql.ready}' +++ mktemp ++ local LAST_OUT=/tmp/tmp.OwRU1ZXJkr +++ mktemp ++ local LAST_ERR=/tmp/tmp.Xw9yjfkRA8 ++ local exit_status=0 +++ seq 0 2 ++ for i in '$(seq 0 2)' ++ set +e ++ kubectl get pxc some-name -o 'jsonpath={.status.proxysql.ready}' ++ exit_status=0 ++ set -e ++ '[' 0 '!=' 0 ']' ++ break ++ cat /tmp/tmp.OwRU1ZXJkr ++ cat /tmp/tmp.Xw9yjfkRA8 ++ rm /tmp/tmp.OwRU1ZXJkr /tmp/tmp.Xw9yjfkRA8 ++ return 0 + [[ 3 == \3 ]] + compare_mysql_cmd_local select-2 'SHOW TABLES;' '-h127.0.0.1 -P6032 -uproxyadmin -p'\''test-password'\''' some-name-proxysql-0 '' proxysql + local command_id=select-2 + local 'command=SHOW TABLES;' + local 'uri=-h127.0.0.1 -P6032 -uproxyadmin -p'\''test-password'\''' + local pod=some-name-proxysql-0 + local postfix= + local container_name=proxysql + local expected_result=/mnt/jenkins/workspace/cloud-pxc-operator_PR-1726/e2e-tests/users/compare/select-2.sql + [[ perconalab/percona-xtradb-cluster-operator:main-pxc8.0 =~ 8\.0 ]] + '[' -f /mnt/jenkins/workspace/cloud-pxc-operator_PR-1726/e2e-tests/users/compare/select-2-80.sql ']' + run_mysql_local 'SHOW TABLES;' '-h127.0.0.1 -P6032 -uproxyadmin -p'\''test-password'\''' some-name-proxysql-0 proxysql + local 'command=SHOW TABLES;' + local 'uri=-h127.0.0.1 -P6032 -uproxyadmin -p'\''test-password'\''' + local pod=some-name-proxysql-0 + local container_name=proxysql + set +o xtrace + '[' '!' -s /tmp/tmp.k6orBcxvpD/select-2.sql ']' + diff -u /mnt/jenkins/workspace/cloud-pxc-operator_PR-1726/e2e-tests/users/compare/select-2.sql /tmp/tmp.k6orBcxvpD/select-2.sql + compare_mysql_cmd_local select-2 'SHOW TABLES;' '-h127.0.0.1 -P6032 -uproxyadmin -p'\''test-password'\''' some-name-proxysql-1 '' proxysql + local command_id=select-2 + local 'command=SHOW TABLES;' + local 'uri=-h127.0.0.1 -P6032 -uproxyadmin -p'\''test-password'\''' + local pod=some-name-proxysql-1 + local postfix= + local container_name=proxysql + local expected_result=/mnt/jenkins/workspace/cloud-pxc-operator_PR-1726/e2e-tests/users/compare/select-2.sql + [[ perconalab/percona-xtradb-cluster-operator:main-pxc8.0 =~ 8\.0 ]] + '[' -f /mnt/jenkins/workspace/cloud-pxc-operator_PR-1726/e2e-tests/users/compare/select-2-80.sql ']' + run_mysql_local 'SHOW TABLES;' '-h127.0.0.1 -P6032 -uproxyadmin -p'\''test-password'\''' some-name-proxysql-1 proxysql + local 'command=SHOW TABLES;' + local 'uri=-h127.0.0.1 -P6032 -uproxyadmin -p'\''test-password'\''' + local pod=some-name-proxysql-1 + local container_name=proxysql + set +o xtrace + '[' '!' -s /tmp/tmp.k6orBcxvpD/select-2.sql ']' + diff -u /mnt/jenkins/workspace/cloud-pxc-operator_PR-1726/e2e-tests/users/compare/select-2.sql /tmp/tmp.k6orBcxvpD/select-2.sql + compare_mysql_cmd_local select-2 'SHOW TABLES;' '-h127.0.0.1 -P6032 -uproxyadmin -p'\''test-password'\''' some-name-proxysql-2 '' proxysql + local command_id=select-2 + local 'command=SHOW TABLES;' + local 'uri=-h127.0.0.1 -P6032 -uproxyadmin -p'\''test-password'\''' + local pod=some-name-proxysql-2 + local postfix= + local container_name=proxysql + local expected_result=/mnt/jenkins/workspace/cloud-pxc-operator_PR-1726/e2e-tests/users/compare/select-2.sql + [[ perconalab/percona-xtradb-cluster-operator:main-pxc8.0 =~ 8\.0 ]] + '[' -f /mnt/jenkins/workspace/cloud-pxc-operator_PR-1726/e2e-tests/users/compare/select-2-80.sql ']' + run_mysql_local 'SHOW TABLES;' '-h127.0.0.1 -P6032 -uproxyadmin -p'\''test-password'\''' some-name-proxysql-2 proxysql + local 'command=SHOW TABLES;' + local 'uri=-h127.0.0.1 -P6032 -uproxyadmin -p'\''test-password'\''' + local pod=some-name-proxysql-2 + local container_name=proxysql + set +o xtrace + '[' '!' -s /tmp/tmp.k6orBcxvpD/select-2.sql ']' + diff -u /mnt/jenkins/workspace/cloud-pxc-operator_PR-1726/e2e-tests/users/compare/select-2.sql /tmp/tmp.k6orBcxvpD/select-2.sql + desc 'test xtrabackup' + set +o xtrace ----------------------------------------------------------------------------------- test xtrabackup ----------------------------------------------------------------------------------- + kubectl_bin patch pxc some-name --type=merge '-p={"spec":{"proxysql":{"size":2}}}' ++ mktemp + local LAST_OUT=/tmp/tmp.fIeSQjixBV ++ mktemp + local LAST_ERR=/tmp/tmp.1wPi1Rh59H + local exit_status=0 ++ seq 0 2 + for i in '$(seq 0 2)' + set +e + kubectl patch pxc some-name --type=merge '-p={"spec":{"proxysql":{"size":2}}}' + exit_status=0 + set -e + '[' 0 '!=' 0 ']' + break + cat /tmp/tmp.fIeSQjixBV perconaxtradbcluster.pxc.percona.com/some-name patched + cat /tmp/tmp.1wPi1Rh59H + rm /tmp/tmp.fIeSQjixBV /tmp/tmp.1wPi1Rh59H + return 0 + patch_secret my-cluster-secrets xtrabackup dGVzdC1wYXNzd29yZA== + local secret=my-cluster-secrets + local key=xtrabackup + local value=dGVzdC1wYXNzd29yZA== + kubectl_bin patch secret my-cluster-secrets '-p={"data":{"xtrabackup": "dGVzdC1wYXNzd29yZA=="}}' ++ mktemp + local LAST_OUT=/tmp/tmp.WSeEBHwOFY ++ mktemp + local LAST_ERR=/tmp/tmp.fsErhMDoND + local exit_status=0 ++ seq 0 2 + for i in '$(seq 0 2)' + set +e + kubectl patch secret my-cluster-secrets '-p={"data":{"xtrabackup": "dGVzdC1wYXNzd29yZA=="}}' + exit_status=0 + set -e + '[' 0 '!=' 0 ']' + break + cat /tmp/tmp.WSeEBHwOFY secret/my-cluster-secrets patched + cat /tmp/tmp.fsErhMDoND + rm /tmp/tmp.WSeEBHwOFY /tmp/tmp.fsErhMDoND + return 0 + sleep 15 + wait_cluster_consistency some-name 3 2 + local cluster_name=some-name + local cluster_size=3 + local proxy_size=2 + '[' -z 2 ']' + desc 'wait cluster consistency' + set +o xtrace ----------------------------------------------------------------------------------- wait cluster consistency ----------------------------------------------------------------------------------- + local i=0 + local max=36 + sleep 7 ++ kubectl_bin get pxc some-name -o 'jsonpath={.status.state}' +++ mktemp ++ local LAST_OUT=/tmp/tmp.6tsLwBJeu4 +++ mktemp ++ local LAST_ERR=/tmp/tmp.IjjbJcLPj9 ++ local exit_status=0 +++ seq 0 2 ++ for i in '$(seq 0 2)' ++ set +e ++ kubectl get pxc some-name -o 'jsonpath={.status.state}' ++ exit_status=0 ++ set -e ++ '[' 0 '!=' 0 ']' ++ break ++ cat /tmp/tmp.6tsLwBJeu4 ++ cat /tmp/tmp.IjjbJcLPj9 ++ rm /tmp/tmp.6tsLwBJeu4 /tmp/tmp.IjjbJcLPj9 ++ return 0 + [[ initializing == \r\e\a\d\y ]] + echo 'waiting for cluster readyness' waiting for cluster readyness + sleep 20 + [[ 0 -ge 36 ]] + let i+=1 ++ kubectl_bin get pxc some-name -o 'jsonpath={.status.state}' +++ mktemp ++ local LAST_OUT=/tmp/tmp.xOgg78nzcN +++ mktemp ++ local LAST_ERR=/tmp/tmp.0TKOdvIDc7 ++ local exit_status=0 +++ seq 0 2 ++ for i in '$(seq 0 2)' ++ set +e ++ kubectl get pxc some-name -o 'jsonpath={.status.state}' ++ exit_status=0 ++ set -e ++ '[' 0 '!=' 0 ']' ++ break ++ cat /tmp/tmp.xOgg78nzcN ++ cat /tmp/tmp.0TKOdvIDc7 ++ rm /tmp/tmp.xOgg78nzcN /tmp/tmp.0TKOdvIDc7 ++ return 0 + [[ initializing == \r\e\a\d\y ]] + echo 'waiting for cluster readyness' waiting for cluster readyness + sleep 20 + [[ 1 -ge 36 ]] + let i+=1 ++ kubectl_bin get pxc some-name -o 'jsonpath={.status.state}' +++ mktemp ++ local LAST_OUT=/tmp/tmp.IesafpgHqH +++ mktemp ++ local LAST_ERR=/tmp/tmp.7DY9ZJlaZl ++ local exit_status=0 +++ seq 0 2 ++ for i in '$(seq 0 2)' ++ set +e ++ kubectl get pxc some-name -o 'jsonpath={.status.state}' ++ exit_status=0 ++ set -e ++ '[' 0 '!=' 0 ']' ++ break ++ cat /tmp/tmp.IesafpgHqH ++ cat /tmp/tmp.7DY9ZJlaZl ++ rm /tmp/tmp.IesafpgHqH /tmp/tmp.7DY9ZJlaZl ++ return 0 + [[ initializing == \r\e\a\d\y ]] + echo 'waiting for cluster readyness' waiting for cluster readyness + sleep 20 + [[ 2 -ge 36 ]] + let i+=1 ++ kubectl_bin get pxc some-name -o 'jsonpath={.status.state}' +++ mktemp ++ local LAST_OUT=/tmp/tmp.DSTypATP0h +++ mktemp ++ local LAST_ERR=/tmp/tmp.AsjiwWZXjE ++ local exit_status=0 +++ seq 0 2 ++ for i in '$(seq 0 2)' ++ set +e ++ kubectl get pxc some-name -o 'jsonpath={.status.state}' ++ exit_status=0 ++ set -e ++ '[' 0 '!=' 0 ']' ++ break ++ cat /tmp/tmp.DSTypATP0h ++ cat /tmp/tmp.AsjiwWZXjE ++ rm /tmp/tmp.DSTypATP0h /tmp/tmp.AsjiwWZXjE ++ return 0 + [[ initializing == \r\e\a\d\y ]] + echo 'waiting for cluster readyness' waiting for cluster readyness + sleep 20 + [[ 3 -ge 36 ]] + let i+=1 ++ kubectl_bin get pxc some-name -o 'jsonpath={.status.state}' +++ mktemp ++ local LAST_OUT=/tmp/tmp.ElAoYL0Kau +++ mktemp ++ local LAST_ERR=/tmp/tmp.kjfyrBMmse ++ local exit_status=0 +++ seq 0 2 ++ for i in '$(seq 0 2)' ++ set +e ++ kubectl get pxc some-name -o 'jsonpath={.status.state}' ++ exit_status=0 ++ set -e ++ '[' 0 '!=' 0 ']' ++ break ++ cat /tmp/tmp.ElAoYL0Kau ++ cat /tmp/tmp.kjfyrBMmse ++ rm /tmp/tmp.ElAoYL0Kau /tmp/tmp.kjfyrBMmse ++ return 0 + [[ initializing == \r\e\a\d\y ]] + echo 'waiting for cluster readyness' waiting for cluster readyness + sleep 20 + [[ 4 -ge 36 ]] + let i+=1 ++ kubectl_bin get pxc some-name -o 'jsonpath={.status.state}' +++ mktemp ++ local LAST_OUT=/tmp/tmp.0moyj7VVMa +++ mktemp ++ local LAST_ERR=/tmp/tmp.pbySGhDPDV ++ local exit_status=0 +++ seq 0 2 ++ for i in '$(seq 0 2)' ++ set +e ++ kubectl get pxc some-name -o 'jsonpath={.status.state}' ++ exit_status=0 ++ set -e ++ '[' 0 '!=' 0 ']' ++ break ++ cat /tmp/tmp.0moyj7VVMa ++ cat /tmp/tmp.pbySGhDPDV ++ rm /tmp/tmp.0moyj7VVMa /tmp/tmp.pbySGhDPDV ++ return 0 + [[ initializing == \r\e\a\d\y ]] + echo 'waiting for cluster readyness' waiting for cluster readyness + sleep 20 + [[ 5 -ge 36 ]] + let i+=1 ++ kubectl_bin get pxc some-name -o 'jsonpath={.status.state}' +++ mktemp ++ local LAST_OUT=/tmp/tmp.gPU6IUKy6B +++ mktemp ++ local LAST_ERR=/tmp/tmp.NsGp3KOuLG ++ local exit_status=0 +++ seq 0 2 ++ for i in '$(seq 0 2)' ++ set +e ++ kubectl get pxc some-name -o 'jsonpath={.status.state}' ++ exit_status=0 ++ set -e ++ '[' 0 '!=' 0 ']' ++ break ++ cat /tmp/tmp.gPU6IUKy6B ++ cat /tmp/tmp.NsGp3KOuLG ++ rm /tmp/tmp.gPU6IUKy6B /tmp/tmp.NsGp3KOuLG ++ return 0 + [[ initializing == \r\e\a\d\y ]] + echo 'waiting for cluster readyness' waiting for cluster readyness + sleep 20 + [[ 6 -ge 36 ]] + let i+=1 ++ kubectl_bin get pxc some-name -o 'jsonpath={.status.state}' +++ mktemp ++ local LAST_OUT=/tmp/tmp.kVLFrLokFE +++ mktemp ++ local LAST_ERR=/tmp/tmp.WUn2wad8rL ++ local exit_status=0 +++ seq 0 2 ++ for i in '$(seq 0 2)' ++ set +e ++ kubectl get pxc some-name -o 'jsonpath={.status.state}' ++ exit_status=0 ++ set -e ++ '[' 0 '!=' 0 ']' ++ break ++ cat /tmp/tmp.kVLFrLokFE ++ cat /tmp/tmp.WUn2wad8rL ++ rm /tmp/tmp.kVLFrLokFE /tmp/tmp.WUn2wad8rL ++ return 0 + [[ ready == \r\e\a\d\y ]] ++ kubectl_bin get pxc some-name -o 'jsonpath={.status.pxc.ready}' +++ mktemp ++ local LAST_OUT=/tmp/tmp.TT5mw3Oo16 +++ mktemp ++ local LAST_ERR=/tmp/tmp.zDKNfo5jZ6 ++ local exit_status=0 +++ seq 0 2 ++ for i in '$(seq 0 2)' ++ set +e ++ kubectl get pxc some-name -o 'jsonpath={.status.pxc.ready}' ++ exit_status=0 ++ set -e ++ '[' 0 '!=' 0 ']' ++ break ++ cat /tmp/tmp.TT5mw3Oo16 ++ cat /tmp/tmp.zDKNfo5jZ6 ++ rm /tmp/tmp.TT5mw3Oo16 /tmp/tmp.zDKNfo5jZ6 ++ return 0 + [[ 3 == \3 ]] +++ get_proxy_engine some-name +++ local cluster_name=some-name ++++ get_proxy some-name ++++ local target_cluster=some-name +++++ kubectl_bin get pxc some-name -o 'jsonpath={.spec.haproxy.enabled}' ++++++ mktemp +++++ local LAST_OUT=/tmp/tmp.RFJgaqsQGX ++++++ mktemp +++++ local LAST_ERR=/tmp/tmp.giS8poVo0Z +++++ local exit_status=0 ++++++ seq 0 2 +++++ for i in '$(seq 0 2)' +++++ set +e +++++ kubectl get pxc some-name -o 'jsonpath={.spec.haproxy.enabled}' +++++ exit_status=0 +++++ set -e +++++ '[' 0 '!=' 0 ']' +++++ break +++++ cat /tmp/tmp.RFJgaqsQGX +++++ cat /tmp/tmp.giS8poVo0Z +++++ rm /tmp/tmp.RFJgaqsQGX /tmp/tmp.giS8poVo0Z +++++ return 0 ++++ [[ '' == \t\r\u\e ]] +++++ kubectl_bin get pxc some-name -o 'jsonpath={.spec.proxysql.enabled}' ++++++ mktemp +++++ local LAST_OUT=/tmp/tmp.3tbPqRBRn1 ++++++ mktemp +++++ local LAST_ERR=/tmp/tmp.54qXJChRZl +++++ local exit_status=0 ++++++ seq 0 2 +++++ for i in '$(seq 0 2)' +++++ set +e +++++ kubectl get pxc some-name -o 'jsonpath={.spec.proxysql.enabled}' +++++ exit_status=0 +++++ set -e +++++ '[' 0 '!=' 0 ']' +++++ break +++++ cat /tmp/tmp.3tbPqRBRn1 +++++ cat /tmp/tmp.54qXJChRZl +++++ rm /tmp/tmp.3tbPqRBRn1 /tmp/tmp.54qXJChRZl +++++ return 0 ++++ [[ true == \t\r\u\e ]] ++++ echo some-name-proxysql ++++ return +++ local cluster_proxy=some-name-proxysql +++ echo proxysql ++ kubectl_bin get pxc some-name -o 'jsonpath={.status.proxysql.ready}' +++ mktemp ++ local LAST_OUT=/tmp/tmp.143d8vOOb6 +++ mktemp ++ local LAST_ERR=/tmp/tmp.nIVsjMrS2V ++ local exit_status=0 +++ seq 0 2 ++ for i in '$(seq 0 2)' ++ set +e ++ kubectl get pxc some-name -o 'jsonpath={.status.proxysql.ready}' ++ exit_status=0 ++ set -e ++ '[' 0 '!=' 0 ']' ++ break ++ cat /tmp/tmp.143d8vOOb6 ++ cat /tmp/tmp.nIVsjMrS2V ++ rm /tmp/tmp.143d8vOOb6 /tmp/tmp.nIVsjMrS2V ++ return 0 + [[ 2 == \2 ]] + compare_mysql_cmd_local select-3 'SHOW DATABASES;' '-h 127.0.0.1 -uxtrabackup -p'\''test-password'\''' some-name-pxc-0 '' pxc + local command_id=select-3 + local 'command=SHOW DATABASES;' + local 'uri=-h 127.0.0.1 -uxtrabackup -p'\''test-password'\''' + local pod=some-name-pxc-0 + local postfix= + local container_name=pxc + local expected_result=/mnt/jenkins/workspace/cloud-pxc-operator_PR-1726/e2e-tests/users/compare/select-3.sql + [[ perconalab/percona-xtradb-cluster-operator:main-pxc8.0 =~ 8\.0 ]] + '[' -f /mnt/jenkins/workspace/cloud-pxc-operator_PR-1726/e2e-tests/users/compare/select-3-80.sql ']' + run_mysql_local 'SHOW DATABASES;' '-h 127.0.0.1 -uxtrabackup -p'\''test-password'\''' some-name-pxc-0 pxc + local 'command=SHOW DATABASES;' + local 'uri=-h 127.0.0.1 -uxtrabackup -p'\''test-password'\''' + local pod=some-name-pxc-0 + local container_name=pxc + set +o xtrace + '[' '!' -s /tmp/tmp.k6orBcxvpD/select-3.sql ']' + diff -u /mnt/jenkins/workspace/cloud-pxc-operator_PR-1726/e2e-tests/users/compare/select-3.sql /tmp/tmp.k6orBcxvpD/select-3.sql + desc 'test monitor' + set +o xtrace ----------------------------------------------------------------------------------- test monitor ----------------------------------------------------------------------------------- + patch_secret my-cluster-secrets monitor dGVzdC1wYXNzd29yZA== + local secret=my-cluster-secrets + local key=monitor + local value=dGVzdC1wYXNzd29yZA== + kubectl_bin patch secret my-cluster-secrets '-p={"data":{"monitor": "dGVzdC1wYXNzd29yZA=="}}' ++ mktemp + local LAST_OUT=/tmp/tmp.pOULrkF5o1 ++ mktemp + local LAST_ERR=/tmp/tmp.4PZ20KKCsN + local exit_status=0 ++ seq 0 2 + for i in '$(seq 0 2)' + set +e + kubectl patch secret my-cluster-secrets '-p={"data":{"monitor": "dGVzdC1wYXNzd29yZA=="}}' + exit_status=0 + set -e + '[' 0 '!=' 0 ']' + break + cat /tmp/tmp.pOULrkF5o1 secret/my-cluster-secrets patched + cat /tmp/tmp.4PZ20KKCsN + rm /tmp/tmp.pOULrkF5o1 /tmp/tmp.4PZ20KKCsN + return 0 + wait_for_password_propagation my-cluster-secrets monitor + local secret=my-cluster-secrets + local user=monitor + local max_retry=240 ++ getSecretData my-cluster-secrets root ++ local secretName=my-cluster-secrets ++ local dataKey=root ++ kubectl_bin get secrets/my-cluster-secrets '--template={{.data.root}}' ++ base64 --decode +++ mktemp ++ local LAST_OUT=/tmp/tmp.4x4VOYOeP8 +++ mktemp ++ local LAST_ERR=/tmp/tmp.7ivb2178sy ++ local exit_status=0 +++ seq 0 2 ++ for i in '$(seq 0 2)' ++ set +e ++ kubectl get secrets/my-cluster-secrets '--template={{.data.root}}' ++ exit_status=0 ++ set -e ++ '[' 0 '!=' 0 ']' ++ break ++ cat /tmp/tmp.4x4VOYOeP8 ++ cat /tmp/tmp.7ivb2178sy ++ rm /tmp/tmp.4x4VOYOeP8 /tmp/tmp.7ivb2178sy ++ return 0 + local root_pass=test-password + [[ perconalab/percona-xtradb-cluster-operator:main-pxc8.0 =~ 5\.7 ]] + retry=0 + is_password_updated monitor '-h some-name-pxc -uroot -p'\''test-password'\''' + local username=monitor + local 'uri=-h some-name-pxc -uroot -p'\''test-password'\''' + run_mysql 'SELECT User_attributes FROM mysql.user WHERE user='\''monitor'\''' '-h some-name-pxc -uroot -p'\''test-password'\''' + local 'command=SELECT User_attributes FROM mysql.user WHERE user='\''monitor'\''' + grep additional_password + local 'uri=-h some-name-pxc -uroot -p'\''test-password'\''' ++ get_client_pod ++ kubectl_bin get pods --selector=name=pxc-client -o 'jsonpath={.items[].metadata.name}' +++ mktemp ++ local LAST_OUT=/tmp/tmp.j0h6BuvfWD +++ mktemp ++ local LAST_ERR=/tmp/tmp.39DIE8Nvpz ++ local exit_status=0 +++ seq 0 2 ++ for i in '$(seq 0 2)' ++ set +e ++ kubectl get pods --selector=name=pxc-client -o 'jsonpath={.items[].metadata.name}' ++ exit_status=0 ++ set -e ++ '[' 0 '!=' 0 ']' ++ break ++ cat /tmp/tmp.j0h6BuvfWD ++ cat /tmp/tmp.39DIE8Nvpz ++ rm /tmp/tmp.j0h6BuvfWD /tmp/tmp.39DIE8Nvpz ++ return 0 + client_pod=pxc-client-6644d8898f-zmpgg + wait_pod pxc-client-6644d8898f-zmpgg + local pod=pxc-client-6644d8898f-zmpgg + local max_retry=480 + local ns= ++ echo pxc-client-6644d8898f-zmpgg ++ /usr/bin/sed -E 's/.*-(pxc|proxysql)-[0-9]/\1/' ++ egrep '^(pxc|proxysql)$' + local container= + set +o xtrace pod/pxc-client-6644d8898f-zmpgg condition met pxc-client-6644d8898f-zmpgg.Ok + set +o xtrace {"additional_password": "*C1F414D9BAF378B656A849B31F9F8AF3125F558B"} + retry=0 + is_old_password_discarded monitor '-h some-name-pxc -uroot -p'\''test-password'\''' + local username=monitor + local 'uri=-h some-name-pxc -uroot -p'\''test-password'\''' + run_mysql 'SELECT User_attributes FROM mysql.user WHERE user='\''monitor'\''' '-h some-name-pxc -uroot -p'\''test-password'\''' + local 'command=SELECT User_attributes FROM mysql.user WHERE user='\''monitor'\''' + local 'uri=-h some-name-pxc -uroot -p'\''test-password'\''' ++ get_client_pod ++ kubectl_bin get pods --selector=name=pxc-client -o 'jsonpath={.items[].metadata.name}' + grep NULL +++ mktemp ++ local LAST_OUT=/tmp/tmp.W0dnmPhwtW +++ mktemp ++ local LAST_ERR=/tmp/tmp.Woy3xlyKYm ++ local exit_status=0 +++ seq 0 2 ++ for i in '$(seq 0 2)' ++ set +e ++ kubectl get pods --selector=name=pxc-client -o 'jsonpath={.items[].metadata.name}' ++ exit_status=0 ++ set -e ++ '[' 0 '!=' 0 ']' ++ break ++ cat /tmp/tmp.W0dnmPhwtW ++ cat /tmp/tmp.Woy3xlyKYm ++ rm /tmp/tmp.W0dnmPhwtW /tmp/tmp.Woy3xlyKYm ++ return 0 + client_pod=pxc-client-6644d8898f-zmpgg + wait_pod pxc-client-6644d8898f-zmpgg + local pod=pxc-client-6644d8898f-zmpgg + local max_retry=480 + local ns= ++ echo pxc-client-6644d8898f-zmpgg ++ /usr/bin/sed -E 's/.*-(pxc|proxysql)-[0-9]/\1/' ++ egrep '^(pxc|proxysql)$' + local container= + set +o xtrace pod/pxc-client-6644d8898f-zmpgg condition met pxc-client-6644d8898f-zmpgg.Ok + set +o xtrace + echo 'waiting for password propagation' waiting for password propagation + sleep 1 + let retry+=1 + [[ 1 -ge 240 ]] + is_old_password_discarded monitor '-h some-name-pxc -uroot -p'\''test-password'\''' + local username=monitor + local 'uri=-h some-name-pxc -uroot -p'\''test-password'\''' + grep NULL + run_mysql 'SELECT User_attributes FROM mysql.user WHERE user='\''monitor'\''' '-h some-name-pxc -uroot -p'\''test-password'\''' + local 'command=SELECT User_attributes FROM mysql.user WHERE user='\''monitor'\''' + local 'uri=-h some-name-pxc -uroot -p'\''test-password'\''' ++ get_client_pod ++ kubectl_bin get pods --selector=name=pxc-client -o 'jsonpath={.items[].metadata.name}' +++ mktemp ++ local LAST_OUT=/tmp/tmp.9FMDTgRmCU +++ mktemp ++ local LAST_ERR=/tmp/tmp.vKZq1clIVh ++ local exit_status=0 +++ seq 0 2 ++ for i in '$(seq 0 2)' ++ set +e ++ kubectl get pods --selector=name=pxc-client -o 'jsonpath={.items[].metadata.name}' ++ exit_status=0 ++ set -e ++ '[' 0 '!=' 0 ']' ++ break ++ cat /tmp/tmp.9FMDTgRmCU ++ cat /tmp/tmp.vKZq1clIVh ++ rm /tmp/tmp.9FMDTgRmCU /tmp/tmp.vKZq1clIVh ++ return 0 + client_pod=pxc-client-6644d8898f-zmpgg + wait_pod pxc-client-6644d8898f-zmpgg + local pod=pxc-client-6644d8898f-zmpgg + local max_retry=480 + local ns= ++ echo pxc-client-6644d8898f-zmpgg ++ /usr/bin/sed -E 's/.*-(pxc|proxysql)-[0-9]/\1/' ++ egrep '^(pxc|proxysql)$' + local container= + set +o xtrace pod/pxc-client-6644d8898f-zmpgg condition met pxc-client-6644d8898f-zmpgg.Ok + set +o xtrace + echo 'waiting for password propagation' waiting for password propagation + sleep 1 + let retry+=1 + [[ 2 -ge 240 ]] + is_old_password_discarded monitor '-h some-name-pxc -uroot -p'\''test-password'\''' + local username=monitor + local 'uri=-h some-name-pxc -uroot -p'\''test-password'\''' + run_mysql 'SELECT User_attributes FROM mysql.user WHERE user='\''monitor'\''' '-h some-name-pxc -uroot -p'\''test-password'\''' + local 'command=SELECT User_attributes FROM mysql.user WHERE user='\''monitor'\''' + grep NULL + local 'uri=-h some-name-pxc -uroot -p'\''test-password'\''' ++ get_client_pod ++ kubectl_bin get pods --selector=name=pxc-client -o 'jsonpath={.items[].metadata.name}' +++ mktemp ++ local LAST_OUT=/tmp/tmp.AWWpsRJjqh +++ mktemp ++ local LAST_ERR=/tmp/tmp.HySNCGKYNd ++ local exit_status=0 +++ seq 0 2 ++ for i in '$(seq 0 2)' ++ set +e ++ kubectl get pods --selector=name=pxc-client -o 'jsonpath={.items[].metadata.name}' ++ exit_status=0 ++ set -e ++ '[' 0 '!=' 0 ']' ++ break ++ cat /tmp/tmp.AWWpsRJjqh ++ cat /tmp/tmp.HySNCGKYNd ++ rm /tmp/tmp.AWWpsRJjqh /tmp/tmp.HySNCGKYNd ++ return 0 + client_pod=pxc-client-6644d8898f-zmpgg + wait_pod pxc-client-6644d8898f-zmpgg + local pod=pxc-client-6644d8898f-zmpgg + local max_retry=480 + local ns= ++ echo pxc-client-6644d8898f-zmpgg ++ /usr/bin/sed -E 's/.*-(pxc|proxysql)-[0-9]/\1/' ++ egrep '^(pxc|proxysql)$' + local container= + set +o xtrace pod/pxc-client-6644d8898f-zmpgg condition met pxc-client-6644d8898f-zmpgg.Ok + set +o xtrace + echo 'waiting for password propagation' waiting for password propagation + sleep 1 + let retry+=1 + [[ 3 -ge 240 ]] + is_old_password_discarded monitor '-h some-name-pxc -uroot -p'\''test-password'\''' + local username=monitor + local 'uri=-h some-name-pxc -uroot -p'\''test-password'\''' + grep NULL + run_mysql 'SELECT User_attributes FROM mysql.user WHERE user='\''monitor'\''' '-h some-name-pxc -uroot -p'\''test-password'\''' + local 'command=SELECT User_attributes FROM mysql.user WHERE user='\''monitor'\''' + local 'uri=-h some-name-pxc -uroot -p'\''test-password'\''' ++ get_client_pod ++ kubectl_bin get pods --selector=name=pxc-client -o 'jsonpath={.items[].metadata.name}' +++ mktemp ++ local LAST_OUT=/tmp/tmp.uhrw2zQ4aF +++ mktemp ++ local LAST_ERR=/tmp/tmp.h22eqGIBdW ++ local exit_status=0 +++ seq 0 2 ++ for i in '$(seq 0 2)' ++ set +e ++ kubectl get pods --selector=name=pxc-client -o 'jsonpath={.items[].metadata.name}' ++ exit_status=0 ++ set -e ++ '[' 0 '!=' 0 ']' ++ break ++ cat /tmp/tmp.uhrw2zQ4aF ++ cat /tmp/tmp.h22eqGIBdW ++ rm /tmp/tmp.uhrw2zQ4aF /tmp/tmp.h22eqGIBdW ++ return 0 + client_pod=pxc-client-6644d8898f-zmpgg + wait_pod pxc-client-6644d8898f-zmpgg + local pod=pxc-client-6644d8898f-zmpgg + local max_retry=480 + local ns= ++ echo pxc-client-6644d8898f-zmpgg ++ /usr/bin/sed -E 's/.*-(pxc|proxysql)-[0-9]/\1/' ++ egrep '^(pxc|proxysql)$' + local container= + set +o xtrace pod/pxc-client-6644d8898f-zmpgg condition met pxc-client-6644d8898f-zmpgg.Ok + set +o xtrace + echo 'waiting for password propagation' waiting for password propagation + sleep 1 + let retry+=1 + [[ 4 -ge 240 ]] + is_old_password_discarded monitor '-h some-name-pxc -uroot -p'\''test-password'\''' + local username=monitor + local 'uri=-h some-name-pxc -uroot -p'\''test-password'\''' + run_mysql 'SELECT User_attributes FROM mysql.user WHERE user='\''monitor'\''' '-h some-name-pxc -uroot -p'\''test-password'\''' + grep NULL + local 'command=SELECT User_attributes FROM mysql.user WHERE user='\''monitor'\''' + local 'uri=-h some-name-pxc -uroot -p'\''test-password'\''' ++ get_client_pod ++ kubectl_bin get pods --selector=name=pxc-client -o 'jsonpath={.items[].metadata.name}' +++ mktemp ++ local LAST_OUT=/tmp/tmp.GxJY8tBOdl +++ mktemp ++ local LAST_ERR=/tmp/tmp.YIS86uVTyV ++ local exit_status=0 +++ seq 0 2 ++ for i in '$(seq 0 2)' ++ set +e ++ kubectl get pods --selector=name=pxc-client -o 'jsonpath={.items[].metadata.name}' ++ exit_status=0 ++ set -e ++ '[' 0 '!=' 0 ']' ++ break ++ cat /tmp/tmp.GxJY8tBOdl ++ cat /tmp/tmp.YIS86uVTyV ++ rm /tmp/tmp.GxJY8tBOdl /tmp/tmp.YIS86uVTyV ++ return 0 + client_pod=pxc-client-6644d8898f-zmpgg + wait_pod pxc-client-6644d8898f-zmpgg + local pod=pxc-client-6644d8898f-zmpgg + local max_retry=480 + local ns= ++ echo pxc-client-6644d8898f-zmpgg ++ /usr/bin/sed -E 's/.*-(pxc|proxysql)-[0-9]/\1/' ++ egrep '^(pxc|proxysql)$' + local container= + set +o xtrace pod/pxc-client-6644d8898f-zmpgg condition met pxc-client-6644d8898f-zmpgg.Ok + set +o xtrace + echo 'waiting for password propagation' waiting for password propagation + sleep 1 + let retry+=1 + [[ 5 -ge 240 ]] + is_old_password_discarded monitor '-h some-name-pxc -uroot -p'\''test-password'\''' + local username=monitor + local 'uri=-h some-name-pxc -uroot -p'\''test-password'\''' + run_mysql 'SELECT User_attributes FROM mysql.user WHERE user='\''monitor'\''' '-h some-name-pxc -uroot -p'\''test-password'\''' + local 'command=SELECT User_attributes FROM mysql.user WHERE user='\''monitor'\''' + grep NULL + local 'uri=-h some-name-pxc -uroot -p'\''test-password'\''' ++ get_client_pod ++ kubectl_bin get pods --selector=name=pxc-client -o 'jsonpath={.items[].metadata.name}' +++ mktemp ++ local LAST_OUT=/tmp/tmp.fRIZluveTU +++ mktemp ++ local LAST_ERR=/tmp/tmp.iTiz3bAbT9 ++ local exit_status=0 +++ seq 0 2 ++ for i in '$(seq 0 2)' ++ set +e ++ kubectl get pods --selector=name=pxc-client -o 'jsonpath={.items[].metadata.name}' ++ exit_status=0 ++ set -e ++ '[' 0 '!=' 0 ']' ++ break ++ cat /tmp/tmp.fRIZluveTU ++ cat /tmp/tmp.iTiz3bAbT9 ++ rm /tmp/tmp.fRIZluveTU /tmp/tmp.iTiz3bAbT9 ++ return 0 + client_pod=pxc-client-6644d8898f-zmpgg + wait_pod pxc-client-6644d8898f-zmpgg + local pod=pxc-client-6644d8898f-zmpgg + local max_retry=480 + local ns= ++ echo pxc-client-6644d8898f-zmpgg ++ /usr/bin/sed -E 's/.*-(pxc|proxysql)-[0-9]/\1/' ++ egrep '^(pxc|proxysql)$' + local container= + set +o xtrace pod/pxc-client-6644d8898f-zmpgg condition met pxc-client-6644d8898f-zmpgg.Ok + set +o xtrace + echo 'waiting for password propagation' waiting for password propagation + sleep 1 + let retry+=1 + [[ 6 -ge 240 ]] + is_old_password_discarded monitor '-h some-name-pxc -uroot -p'\''test-password'\''' + local username=monitor + local 'uri=-h some-name-pxc -uroot -p'\''test-password'\''' + grep NULL + run_mysql 'SELECT User_attributes FROM mysql.user WHERE user='\''monitor'\''' '-h some-name-pxc -uroot -p'\''test-password'\''' + local 'command=SELECT User_attributes FROM mysql.user WHERE user='\''monitor'\''' + local 'uri=-h some-name-pxc -uroot -p'\''test-password'\''' ++ get_client_pod ++ kubectl_bin get pods --selector=name=pxc-client -o 'jsonpath={.items[].metadata.name}' +++ mktemp ++ local LAST_OUT=/tmp/tmp.ti6JFh97a5 +++ mktemp ++ local LAST_ERR=/tmp/tmp.DhZ4aF3SnH ++ local exit_status=0 +++ seq 0 2 ++ for i in '$(seq 0 2)' ++ set +e ++ kubectl get pods --selector=name=pxc-client -o 'jsonpath={.items[].metadata.name}' ++ exit_status=0 ++ set -e ++ '[' 0 '!=' 0 ']' ++ break ++ cat /tmp/tmp.ti6JFh97a5 ++ cat /tmp/tmp.DhZ4aF3SnH ++ rm /tmp/tmp.ti6JFh97a5 /tmp/tmp.DhZ4aF3SnH ++ return 0 + client_pod=pxc-client-6644d8898f-zmpgg + wait_pod pxc-client-6644d8898f-zmpgg + local pod=pxc-client-6644d8898f-zmpgg + local max_retry=480 + local ns= ++ echo pxc-client-6644d8898f-zmpgg ++ /usr/bin/sed -E 's/.*-(pxc|proxysql)-[0-9]/\1/' ++ egrep '^(pxc|proxysql)$' + local container= + set +o xtrace pod/pxc-client-6644d8898f-zmpgg condition met pxc-client-6644d8898f-zmpgg.Ok + set +o xtrace + echo 'waiting for password propagation' waiting for password propagation + sleep 1 + let retry+=1 + [[ 7 -ge 240 ]] + is_old_password_discarded monitor '-h some-name-pxc -uroot -p'\''test-password'\''' + local username=monitor + local 'uri=-h some-name-pxc -uroot -p'\''test-password'\''' + run_mysql 'SELECT User_attributes FROM mysql.user WHERE user='\''monitor'\''' '-h some-name-pxc -uroot -p'\''test-password'\''' + local 'command=SELECT User_attributes FROM mysql.user WHERE user='\''monitor'\''' + local 'uri=-h some-name-pxc -uroot -p'\''test-password'\''' + grep NULL ++ get_client_pod ++ kubectl_bin get pods --selector=name=pxc-client -o 'jsonpath={.items[].metadata.name}' +++ mktemp ++ local LAST_OUT=/tmp/tmp.Gh51jU5SZK +++ mktemp ++ local LAST_ERR=/tmp/tmp.FMyCcGwhJp ++ local exit_status=0 +++ seq 0 2 ++ for i in '$(seq 0 2)' ++ set +e ++ kubectl get pods --selector=name=pxc-client -o 'jsonpath={.items[].metadata.name}' ++ exit_status=0 ++ set -e ++ '[' 0 '!=' 0 ']' ++ break ++ cat /tmp/tmp.Gh51jU5SZK ++ cat /tmp/tmp.FMyCcGwhJp ++ rm /tmp/tmp.Gh51jU5SZK /tmp/tmp.FMyCcGwhJp ++ return 0 + client_pod=pxc-client-6644d8898f-zmpgg + wait_pod pxc-client-6644d8898f-zmpgg + local pod=pxc-client-6644d8898f-zmpgg + local max_retry=480 + local ns= ++ echo pxc-client-6644d8898f-zmpgg ++ /usr/bin/sed -E 's/.*-(pxc|proxysql)-[0-9]/\1/' ++ egrep '^(pxc|proxysql)$' + local container= + set +o xtrace pod/pxc-client-6644d8898f-zmpgg condition met pxc-client-6644d8898f-zmpgg.Ok + set +o xtrace + echo 'waiting for password propagation' waiting for password propagation + sleep 1 + let retry+=1 + [[ 8 -ge 240 ]] + is_old_password_discarded monitor '-h some-name-pxc -uroot -p'\''test-password'\''' + local username=monitor + local 'uri=-h some-name-pxc -uroot -p'\''test-password'\''' + run_mysql 'SELECT User_attributes FROM mysql.user WHERE user='\''monitor'\''' '-h some-name-pxc -uroot -p'\''test-password'\''' + grep NULL + local 'command=SELECT User_attributes FROM mysql.user WHERE user='\''monitor'\''' + local 'uri=-h some-name-pxc -uroot -p'\''test-password'\''' ++ get_client_pod ++ kubectl_bin get pods --selector=name=pxc-client -o 'jsonpath={.items[].metadata.name}' +++ mktemp ++ local LAST_OUT=/tmp/tmp.WnlkTqohqo +++ mktemp ++ local LAST_ERR=/tmp/tmp.4iMu5O5DRP ++ local exit_status=0 +++ seq 0 2 ++ for i in '$(seq 0 2)' ++ set +e ++ kubectl get pods --selector=name=pxc-client -o 'jsonpath={.items[].metadata.name}' ++ exit_status=0 ++ set -e ++ '[' 0 '!=' 0 ']' ++ break ++ cat /tmp/tmp.WnlkTqohqo ++ cat /tmp/tmp.4iMu5O5DRP ++ rm /tmp/tmp.WnlkTqohqo /tmp/tmp.4iMu5O5DRP ++ return 0 + client_pod=pxc-client-6644d8898f-zmpgg + wait_pod pxc-client-6644d8898f-zmpgg + local pod=pxc-client-6644d8898f-zmpgg + local max_retry=480 + local ns= ++ echo pxc-client-6644d8898f-zmpgg ++ /usr/bin/sed -E 's/.*-(pxc|proxysql)-[0-9]/\1/' ++ egrep '^(pxc|proxysql)$' + local container= + set +o xtrace pod/pxc-client-6644d8898f-zmpgg condition met pxc-client-6644d8898f-zmpgg.Ok + set +o xtrace + echo 'waiting for password propagation' waiting for password propagation + sleep 1 + let retry+=1 + [[ 9 -ge 240 ]] + is_old_password_discarded monitor '-h some-name-pxc -uroot -p'\''test-password'\''' + local username=monitor + local 'uri=-h some-name-pxc -uroot -p'\''test-password'\''' + run_mysql 'SELECT User_attributes FROM mysql.user WHERE user='\''monitor'\''' '-h some-name-pxc -uroot -p'\''test-password'\''' + grep NULL + local 'command=SELECT User_attributes FROM mysql.user WHERE user='\''monitor'\''' + local 'uri=-h some-name-pxc -uroot -p'\''test-password'\''' ++ get_client_pod ++ kubectl_bin get pods --selector=name=pxc-client -o 'jsonpath={.items[].metadata.name}' +++ mktemp ++ local LAST_OUT=/tmp/tmp.O8ZgYjGoP1 +++ mktemp ++ local LAST_ERR=/tmp/tmp.KnGMMRv67h ++ local exit_status=0 +++ seq 0 2 ++ for i in '$(seq 0 2)' ++ set +e ++ kubectl get pods --selector=name=pxc-client -o 'jsonpath={.items[].metadata.name}' ++ exit_status=0 ++ set -e ++ '[' 0 '!=' 0 ']' ++ break ++ cat /tmp/tmp.O8ZgYjGoP1 ++ cat /tmp/tmp.KnGMMRv67h ++ rm /tmp/tmp.O8ZgYjGoP1 /tmp/tmp.KnGMMRv67h ++ return 0 + client_pod=pxc-client-6644d8898f-zmpgg + wait_pod pxc-client-6644d8898f-zmpgg + local pod=pxc-client-6644d8898f-zmpgg + local max_retry=480 + local ns= ++ echo pxc-client-6644d8898f-zmpgg ++ /usr/bin/sed -E 's/.*-(pxc|proxysql)-[0-9]/\1/' ++ egrep '^(pxc|proxysql)$' + local container= + set +o xtrace pod/pxc-client-6644d8898f-zmpgg condition met pxc-client-6644d8898f-zmpgg.Ok + set +o xtrace + echo 'waiting for password propagation' waiting for password propagation + sleep 1 + let retry+=1 + [[ 10 -ge 240 ]] + is_old_password_discarded monitor '-h some-name-pxc -uroot -p'\''test-password'\''' + local username=monitor + local 'uri=-h some-name-pxc -uroot -p'\''test-password'\''' + run_mysql 'SELECT User_attributes FROM mysql.user WHERE user='\''monitor'\''' '-h some-name-pxc -uroot -p'\''test-password'\''' + local 'command=SELECT User_attributes FROM mysql.user WHERE user='\''monitor'\''' + grep NULL + local 'uri=-h some-name-pxc -uroot -p'\''test-password'\''' ++ get_client_pod ++ kubectl_bin get pods --selector=name=pxc-client -o 'jsonpath={.items[].metadata.name}' +++ mktemp ++ local LAST_OUT=/tmp/tmp.ot7scbWxuS +++ mktemp ++ local LAST_ERR=/tmp/tmp.zxYk98Q3V6 ++ local exit_status=0 +++ seq 0 2 ++ for i in '$(seq 0 2)' ++ set +e ++ kubectl get pods --selector=name=pxc-client -o 'jsonpath={.items[].metadata.name}' ++ exit_status=0 ++ set -e ++ '[' 0 '!=' 0 ']' ++ break ++ cat /tmp/tmp.ot7scbWxuS ++ cat /tmp/tmp.zxYk98Q3V6 ++ rm /tmp/tmp.ot7scbWxuS /tmp/tmp.zxYk98Q3V6 ++ return 0 + client_pod=pxc-client-6644d8898f-zmpgg + wait_pod pxc-client-6644d8898f-zmpgg + local pod=pxc-client-6644d8898f-zmpgg + local max_retry=480 + local ns= ++ echo pxc-client-6644d8898f-zmpgg ++ /usr/bin/sed -E 's/.*-(pxc|proxysql)-[0-9]/\1/' ++ egrep '^(pxc|proxysql)$' + local container= + set +o xtrace pod/pxc-client-6644d8898f-zmpgg condition met pxc-client-6644d8898f-zmpgg.Ok + set +o xtrace NULL + wait_cluster_consistency some-name 3 2 + local cluster_name=some-name + local cluster_size=3 + local proxy_size=2 + '[' -z 2 ']' + desc 'wait cluster consistency' + set +o xtrace ----------------------------------------------------------------------------------- wait cluster consistency ----------------------------------------------------------------------------------- + local i=0 + local max=36 + sleep 7 ++ kubectl_bin get pxc some-name -o 'jsonpath={.status.state}' +++ mktemp ++ local LAST_OUT=/tmp/tmp.qpDkd2x6CN +++ mktemp ++ local LAST_ERR=/tmp/tmp.Bb40OMV9Js ++ local exit_status=0 +++ seq 0 2 ++ for i in '$(seq 0 2)' ++ set +e ++ kubectl get pxc some-name -o 'jsonpath={.status.state}' ++ exit_status=0 ++ set -e ++ '[' 0 '!=' 0 ']' ++ break ++ cat /tmp/tmp.qpDkd2x6CN ++ cat /tmp/tmp.Bb40OMV9Js ++ rm /tmp/tmp.qpDkd2x6CN /tmp/tmp.Bb40OMV9Js ++ return 0 + [[ ready == \r\e\a\d\y ]] ++ kubectl_bin get pxc some-name -o 'jsonpath={.status.pxc.ready}' +++ mktemp ++ local LAST_OUT=/tmp/tmp.PRtD4i2qP8 +++ mktemp ++ local LAST_ERR=/tmp/tmp.WHUnm1dxQX ++ local exit_status=0 +++ seq 0 2 ++ for i in '$(seq 0 2)' ++ set +e ++ kubectl get pxc some-name -o 'jsonpath={.status.pxc.ready}' ++ exit_status=0 ++ set -e ++ '[' 0 '!=' 0 ']' ++ break ++ cat /tmp/tmp.PRtD4i2qP8 ++ cat /tmp/tmp.WHUnm1dxQX ++ rm /tmp/tmp.PRtD4i2qP8 /tmp/tmp.WHUnm1dxQX ++ return 0 + [[ 3 == \3 ]] +++ get_proxy_engine some-name +++ local cluster_name=some-name ++++ get_proxy some-name ++++ local target_cluster=some-name +++++ kubectl_bin get pxc some-name -o 'jsonpath={.spec.haproxy.enabled}' ++++++ mktemp +++++ local LAST_OUT=/tmp/tmp.Ff7mFefoLm ++++++ mktemp +++++ local LAST_ERR=/tmp/tmp.899BBSnCiK +++++ local exit_status=0 ++++++ seq 0 2 +++++ for i in '$(seq 0 2)' +++++ set +e +++++ kubectl get pxc some-name -o 'jsonpath={.spec.haproxy.enabled}' +++++ exit_status=0 +++++ set -e +++++ '[' 0 '!=' 0 ']' +++++ break +++++ cat /tmp/tmp.Ff7mFefoLm +++++ cat /tmp/tmp.899BBSnCiK +++++ rm /tmp/tmp.Ff7mFefoLm /tmp/tmp.899BBSnCiK +++++ return 0 ++++ [[ '' == \t\r\u\e ]] +++++ kubectl_bin get pxc some-name -o 'jsonpath={.spec.proxysql.enabled}' ++++++ mktemp +++++ local LAST_OUT=/tmp/tmp.tydMSNTAbi ++++++ mktemp +++++ local LAST_ERR=/tmp/tmp.Yb5muWcEEn +++++ local exit_status=0 ++++++ seq 0 2 +++++ for i in '$(seq 0 2)' +++++ set +e +++++ kubectl get pxc some-name -o 'jsonpath={.spec.proxysql.enabled}' +++++ exit_status=0 +++++ set -e +++++ '[' 0 '!=' 0 ']' +++++ break +++++ cat /tmp/tmp.tydMSNTAbi +++++ cat /tmp/tmp.Yb5muWcEEn +++++ rm /tmp/tmp.tydMSNTAbi /tmp/tmp.Yb5muWcEEn +++++ return 0 ++++ [[ true == \t\r\u\e ]] ++++ echo some-name-proxysql ++++ return +++ local cluster_proxy=some-name-proxysql +++ echo proxysql ++ kubectl_bin get pxc some-name -o 'jsonpath={.status.proxysql.ready}' +++ mktemp ++ local LAST_OUT=/tmp/tmp.6Nz34hstWY +++ mktemp ++ local LAST_ERR=/tmp/tmp.CFRFhpcf4d ++ local exit_status=0 +++ seq 0 2 ++ for i in '$(seq 0 2)' ++ set +e ++ kubectl get pxc some-name -o 'jsonpath={.status.proxysql.ready}' ++ exit_status=0 ++ set -e ++ '[' 0 '!=' 0 ']' ++ break ++ cat /tmp/tmp.6Nz34hstWY ++ cat /tmp/tmp.CFRFhpcf4d ++ rm /tmp/tmp.6Nz34hstWY /tmp/tmp.CFRFhpcf4d ++ return 0 + [[ 2 == \2 ]] + sleep 10 + compare_mysql_cmd select-4 'SHOW TABLES;' '-h some-name-proxysql -umonitor -p'\''test-password'\''' + local command_id=select-4 + local 'command=SHOW TABLES;' + local 'uri=-h some-name-proxysql -umonitor -p'\''test-password'\''' + local postfix= + local expected_result=/mnt/jenkins/workspace/cloud-pxc-operator_PR-1726/e2e-tests/users/compare/select-4.sql + [[ perconalab/percona-xtradb-cluster-operator:main-pxc8.0 =~ 8\.0 ]] + '[' -f /mnt/jenkins/workspace/cloud-pxc-operator_PR-1726/e2e-tests/users/compare/select-4-80.sql ']' + expected_result=/mnt/jenkins/workspace/cloud-pxc-operator_PR-1726/e2e-tests/users/compare/select-4-80.sql + run_mysql 'SHOW TABLES;' '-h some-name-proxysql -umonitor -p'\''test-password'\''' + local 'command=SHOW TABLES;' + local 'uri=-h some-name-proxysql -umonitor -p'\''test-password'\''' ++ get_client_pod ++ kubectl_bin get pods --selector=name=pxc-client -o 'jsonpath={.items[].metadata.name}' +++ mktemp ++ local LAST_OUT=/tmp/tmp.yqx2Ffk4Jh +++ mktemp ++ local LAST_ERR=/tmp/tmp.oxuQ9PLB9p ++ local exit_status=0 +++ seq 0 2 ++ for i in '$(seq 0 2)' ++ set +e ++ kubectl get pods --selector=name=pxc-client -o 'jsonpath={.items[].metadata.name}' ++ exit_status=0 ++ set -e ++ '[' 0 '!=' 0 ']' ++ break ++ cat /tmp/tmp.yqx2Ffk4Jh ++ cat /tmp/tmp.oxuQ9PLB9p ++ rm /tmp/tmp.yqx2Ffk4Jh /tmp/tmp.oxuQ9PLB9p ++ return 0 + client_pod=pxc-client-6644d8898f-zmpgg + wait_pod pxc-client-6644d8898f-zmpgg + local pod=pxc-client-6644d8898f-zmpgg + local max_retry=480 + local ns= ++ echo pxc-client-6644d8898f-zmpgg ++ /usr/bin/sed -E 's/.*-(pxc|proxysql)-[0-9]/\1/' ++ egrep '^(pxc|proxysql)$' + local container= + set +o xtrace pod/pxc-client-6644d8898f-zmpgg condition met pxc-client-6644d8898f-zmpgg.Ok + set +o xtrace + '[' '!' -s /tmp/tmp.k6orBcxvpD/select-4.sql ']' + diff -u /mnt/jenkins/workspace/cloud-pxc-operator_PR-1726/e2e-tests/users/compare/select-4-80.sql /tmp/tmp.k6orBcxvpD/select-4.sql + desc 'test operator' + set +o xtrace ----------------------------------------------------------------------------------- test operator ----------------------------------------------------------------------------------- + patch_secret my-cluster-secrets operator dGVzdC1wYXNzd29yZA== + local secret=my-cluster-secrets + local key=operator + local value=dGVzdC1wYXNzd29yZA== + kubectl_bin patch secret my-cluster-secrets '-p={"data":{"operator": "dGVzdC1wYXNzd29yZA=="}}' ++ mktemp + local LAST_OUT=/tmp/tmp.OH3XqcSgyN ++ mktemp + local LAST_ERR=/tmp/tmp.TwJwNCyR7Y + local exit_status=0 ++ seq 0 2 + for i in '$(seq 0 2)' + set +e + kubectl patch secret my-cluster-secrets '-p={"data":{"operator": "dGVzdC1wYXNzd29yZA=="}}' + exit_status=0 + set -e + '[' 0 '!=' 0 ']' + break + cat /tmp/tmp.OH3XqcSgyN secret/my-cluster-secrets patched + cat /tmp/tmp.TwJwNCyR7Y + rm /tmp/tmp.OH3XqcSgyN /tmp/tmp.TwJwNCyR7Y + return 0 + sleep 15 + wait_cluster_consistency some-name 3 2 + local cluster_name=some-name + local cluster_size=3 + local proxy_size=2 + '[' -z 2 ']' + desc 'wait cluster consistency' + set +o xtrace ----------------------------------------------------------------------------------- wait cluster consistency ----------------------------------------------------------------------------------- + local i=0 + local max=36 + sleep 7 ++ kubectl_bin get pxc some-name -o 'jsonpath={.status.state}' +++ mktemp ++ local LAST_OUT=/tmp/tmp.Q2fWcvl2JI +++ mktemp ++ local LAST_ERR=/tmp/tmp.ZSVm3pQ9m1 ++ local exit_status=0 +++ seq 0 2 ++ for i in '$(seq 0 2)' ++ set +e ++ kubectl get pxc some-name -o 'jsonpath={.status.state}' ++ exit_status=0 ++ set -e ++ '[' 0 '!=' 0 ']' ++ break ++ cat /tmp/tmp.Q2fWcvl2JI ++ cat /tmp/tmp.ZSVm3pQ9m1 ++ rm /tmp/tmp.Q2fWcvl2JI /tmp/tmp.ZSVm3pQ9m1 ++ return 0 + [[ ready == \r\e\a\d\y ]] ++ kubectl_bin get pxc some-name -o 'jsonpath={.status.pxc.ready}' +++ mktemp ++ local LAST_OUT=/tmp/tmp.W1sJ976ADt +++ mktemp ++ local LAST_ERR=/tmp/tmp.M7AueVY17g ++ local exit_status=0 +++ seq 0 2 ++ for i in '$(seq 0 2)' ++ set +e ++ kubectl get pxc some-name -o 'jsonpath={.status.pxc.ready}' ++ exit_status=0 ++ set -e ++ '[' 0 '!=' 0 ']' ++ break ++ cat /tmp/tmp.W1sJ976ADt ++ cat /tmp/tmp.M7AueVY17g ++ rm /tmp/tmp.W1sJ976ADt /tmp/tmp.M7AueVY17g ++ return 0 + [[ 3 == \3 ]] +++ get_proxy_engine some-name +++ local cluster_name=some-name ++++ get_proxy some-name ++++ local target_cluster=some-name +++++ kubectl_bin get pxc some-name -o 'jsonpath={.spec.haproxy.enabled}' ++++++ mktemp +++++ local LAST_OUT=/tmp/tmp.TXpkWexfD3 ++++++ mktemp +++++ local LAST_ERR=/tmp/tmp.PaowOgWAxN +++++ local exit_status=0 ++++++ seq 0 2 +++++ for i in '$(seq 0 2)' +++++ set +e +++++ kubectl get pxc some-name -o 'jsonpath={.spec.haproxy.enabled}' +++++ exit_status=0 +++++ set -e +++++ '[' 0 '!=' 0 ']' +++++ break +++++ cat /tmp/tmp.TXpkWexfD3 +++++ cat /tmp/tmp.PaowOgWAxN +++++ rm /tmp/tmp.TXpkWexfD3 /tmp/tmp.PaowOgWAxN +++++ return 0 ++++ [[ '' == \t\r\u\e ]] +++++ kubectl_bin get pxc some-name -o 'jsonpath={.spec.proxysql.enabled}' ++++++ mktemp +++++ local LAST_OUT=/tmp/tmp.M2aQGAACVR ++++++ mktemp +++++ local LAST_ERR=/tmp/tmp.UtH5BpLcRo +++++ local exit_status=0 ++++++ seq 0 2 +++++ for i in '$(seq 0 2)' +++++ set +e +++++ kubectl get pxc some-name -o 'jsonpath={.spec.proxysql.enabled}' +++++ exit_status=0 +++++ set -e +++++ '[' 0 '!=' 0 ']' +++++ break +++++ cat /tmp/tmp.M2aQGAACVR +++++ cat /tmp/tmp.UtH5BpLcRo +++++ rm /tmp/tmp.M2aQGAACVR /tmp/tmp.UtH5BpLcRo +++++ return 0 ++++ [[ true == \t\r\u\e ]] ++++ echo some-name-proxysql ++++ return +++ local cluster_proxy=some-name-proxysql +++ echo proxysql ++ kubectl_bin get pxc some-name -o 'jsonpath={.status.proxysql.ready}' +++ mktemp ++ local LAST_OUT=/tmp/tmp.zhpN0D3DMc +++ mktemp ++ local LAST_ERR=/tmp/tmp.mKGWfB14j5 ++ local exit_status=0 +++ seq 0 2 ++ for i in '$(seq 0 2)' ++ set +e ++ kubectl get pxc some-name -o 'jsonpath={.status.proxysql.ready}' ++ exit_status=0 ++ set -e ++ '[' 0 '!=' 0 ']' ++ break ++ cat /tmp/tmp.zhpN0D3DMc ++ cat /tmp/tmp.mKGWfB14j5 ++ rm /tmp/tmp.zhpN0D3DMc /tmp/tmp.mKGWfB14j5 ++ return 0 + [[ 2 == \2 ]] + sleep 10 + compare_mysql_cmd select-4 'SHOW TABLES;' '-h some-name-proxysql -uoperator -p'\''test-password'\''' + local command_id=select-4 + local 'command=SHOW TABLES;' + local 'uri=-h some-name-proxysql -uoperator -p'\''test-password'\''' + local postfix= + local expected_result=/mnt/jenkins/workspace/cloud-pxc-operator_PR-1726/e2e-tests/users/compare/select-4.sql + [[ perconalab/percona-xtradb-cluster-operator:main-pxc8.0 =~ 8\.0 ]] + '[' -f /mnt/jenkins/workspace/cloud-pxc-operator_PR-1726/e2e-tests/users/compare/select-4-80.sql ']' + expected_result=/mnt/jenkins/workspace/cloud-pxc-operator_PR-1726/e2e-tests/users/compare/select-4-80.sql + run_mysql 'SHOW TABLES;' '-h some-name-proxysql -uoperator -p'\''test-password'\''' + local 'command=SHOW TABLES;' + local 'uri=-h some-name-proxysql -uoperator -p'\''test-password'\''' ++ get_client_pod ++ kubectl_bin get pods --selector=name=pxc-client -o 'jsonpath={.items[].metadata.name}' +++ mktemp ++ local LAST_OUT=/tmp/tmp.MAZMx74hKG +++ mktemp ++ local LAST_ERR=/tmp/tmp.mgQowgKp8G ++ local exit_status=0 +++ seq 0 2 ++ for i in '$(seq 0 2)' ++ set +e ++ kubectl get pods --selector=name=pxc-client -o 'jsonpath={.items[].metadata.name}' ++ exit_status=0 ++ set -e ++ '[' 0 '!=' 0 ']' ++ break ++ cat /tmp/tmp.MAZMx74hKG ++ cat /tmp/tmp.mgQowgKp8G ++ rm /tmp/tmp.MAZMx74hKG /tmp/tmp.mgQowgKp8G ++ return 0 + client_pod=pxc-client-6644d8898f-zmpgg + wait_pod pxc-client-6644d8898f-zmpgg + local pod=pxc-client-6644d8898f-zmpgg + local max_retry=480 + local ns= ++ echo pxc-client-6644d8898f-zmpgg ++ /usr/bin/sed -E 's/.*-(pxc|proxysql)-[0-9]/\1/' ++ egrep '^(pxc|proxysql)$' + local container= + set +o xtrace pod/pxc-client-6644d8898f-zmpgg condition met pxc-client-6644d8898f-zmpgg.Ok + set +o xtrace + '[' '!' -s /tmp/tmp.k6orBcxvpD/select-4.sql ']' + diff -u /mnt/jenkins/workspace/cloud-pxc-operator_PR-1726/e2e-tests/users/compare/select-4-80.sql /tmp/tmp.k6orBcxvpD/select-4.sql + desc 'change secret name' + set +o xtrace ----------------------------------------------------------------------------------- change secret name ----------------------------------------------------------------------------------- + kubectl_bin patch pxc some-name --type merge --patch '{"spec": {"secretsName":"my-cluster-secrets-2"}}' ++ mktemp + local LAST_OUT=/tmp/tmp.tThMABiiIL ++ mktemp + local LAST_ERR=/tmp/tmp.Y9sg5SdZfr + local exit_status=0 ++ seq 0 2 + for i in '$(seq 0 2)' + set +e + kubectl patch pxc some-name --type merge --patch '{"spec": {"secretsName":"my-cluster-secrets-2"}}' + exit_status=0 + set -e + '[' 0 '!=' 0 ']' + break + cat /tmp/tmp.tThMABiiIL perconaxtradbcluster.pxc.percona.com/some-name patched + cat /tmp/tmp.Y9sg5SdZfr + rm /tmp/tmp.tThMABiiIL /tmp/tmp.Y9sg5SdZfr + return 0 + sleep 30 + wait_cluster_consistency some-name 3 2 + local cluster_name=some-name + local cluster_size=3 + local proxy_size=2 + '[' -z 2 ']' + desc 'wait cluster consistency' + set +o xtrace ----------------------------------------------------------------------------------- wait cluster consistency ----------------------------------------------------------------------------------- + local i=0 + local max=36 + sleep 7 ++ kubectl_bin get pxc some-name -o 'jsonpath={.status.state}' +++ mktemp ++ local LAST_OUT=/tmp/tmp.E5ECB3LPur +++ mktemp ++ local LAST_ERR=/tmp/tmp.c0H40EgcYm ++ local exit_status=0 +++ seq 0 2 ++ for i in '$(seq 0 2)' ++ set +e ++ kubectl get pxc some-name -o 'jsonpath={.status.state}' ++ exit_status=0 ++ set -e ++ '[' 0 '!=' 0 ']' ++ break ++ cat /tmp/tmp.E5ECB3LPur ++ cat /tmp/tmp.c0H40EgcYm ++ rm /tmp/tmp.E5ECB3LPur /tmp/tmp.c0H40EgcYm ++ return 0 + [[ initializing == \r\e\a\d\y ]] + echo 'waiting for cluster readyness' waiting for cluster readyness + sleep 20 + [[ 0 -ge 36 ]] + let i+=1 ++ kubectl_bin get pxc some-name -o 'jsonpath={.status.state}' +++ mktemp ++ local LAST_OUT=/tmp/tmp.dLWJE9FMxM +++ mktemp ++ local LAST_ERR=/tmp/tmp.tnOpV5Lcc5 ++ local exit_status=0 +++ seq 0 2 ++ for i in '$(seq 0 2)' ++ set +e ++ kubectl get pxc some-name -o 'jsonpath={.status.state}' ++ exit_status=0 ++ set -e ++ '[' 0 '!=' 0 ']' ++ break ++ cat /tmp/tmp.dLWJE9FMxM ++ cat /tmp/tmp.tnOpV5Lcc5 ++ rm /tmp/tmp.dLWJE9FMxM /tmp/tmp.tnOpV5Lcc5 ++ return 0 + [[ initializing == \r\e\a\d\y ]] + echo 'waiting for cluster readyness' waiting for cluster readyness + sleep 20 + [[ 1 -ge 36 ]] + let i+=1 ++ kubectl_bin get pxc some-name -o 'jsonpath={.status.state}' +++ mktemp ++ local LAST_OUT=/tmp/tmp.pOCW8cxxs5 +++ mktemp ++ local LAST_ERR=/tmp/tmp.bExfIrGZaa ++ local exit_status=0 +++ seq 0 2 ++ for i in '$(seq 0 2)' ++ set +e ++ kubectl get pxc some-name -o 'jsonpath={.status.state}' ++ exit_status=0 ++ set -e ++ '[' 0 '!=' 0 ']' ++ break ++ cat /tmp/tmp.pOCW8cxxs5 ++ cat /tmp/tmp.bExfIrGZaa ++ rm /tmp/tmp.pOCW8cxxs5 /tmp/tmp.bExfIrGZaa ++ return 0 + [[ initializing == \r\e\a\d\y ]] + echo 'waiting for cluster readyness' waiting for cluster readyness + sleep 20 + [[ 2 -ge 36 ]] + let i+=1 ++ kubectl_bin get pxc some-name -o 'jsonpath={.status.state}' +++ mktemp ++ local LAST_OUT=/tmp/tmp.lLGIawOo3k +++ mktemp ++ local LAST_ERR=/tmp/tmp.swR4zc0fmF ++ local exit_status=0 +++ seq 0 2 ++ for i in '$(seq 0 2)' ++ set +e ++ kubectl get pxc some-name -o 'jsonpath={.status.state}' ++ exit_status=0 ++ set -e ++ '[' 0 '!=' 0 ']' ++ break ++ cat /tmp/tmp.lLGIawOo3k ++ cat /tmp/tmp.swR4zc0fmF ++ rm /tmp/tmp.lLGIawOo3k /tmp/tmp.swR4zc0fmF ++ return 0 + [[ initializing == \r\e\a\d\y ]] + echo 'waiting for cluster readyness' waiting for cluster readyness + sleep 20 + [[ 3 -ge 36 ]] + let i+=1 ++ kubectl_bin get pxc some-name -o 'jsonpath={.status.state}' +++ mktemp ++ local LAST_OUT=/tmp/tmp.vYud9U4fRi +++ mktemp ++ local LAST_ERR=/tmp/tmp.mbtZDT6g0q ++ local exit_status=0 +++ seq 0 2 ++ for i in '$(seq 0 2)' ++ set +e ++ kubectl get pxc some-name -o 'jsonpath={.status.state}' ++ exit_status=0 ++ set -e ++ '[' 0 '!=' 0 ']' ++ break ++ cat /tmp/tmp.vYud9U4fRi ++ cat /tmp/tmp.mbtZDT6g0q ++ rm /tmp/tmp.vYud9U4fRi /tmp/tmp.mbtZDT6g0q ++ return 0 + [[ initializing == \r\e\a\d\y ]] + echo 'waiting for cluster readyness' waiting for cluster readyness + sleep 20 + [[ 4 -ge 36 ]] + let i+=1 ++ kubectl_bin get pxc some-name -o 'jsonpath={.status.state}' +++ mktemp ++ local LAST_OUT=/tmp/tmp.gPmRW5jzDS +++ mktemp ++ local LAST_ERR=/tmp/tmp.ziZKpLPSrx ++ local exit_status=0 +++ seq 0 2 ++ for i in '$(seq 0 2)' ++ set +e ++ kubectl get pxc some-name -o 'jsonpath={.status.state}' ++ exit_status=0 ++ set -e ++ '[' 0 '!=' 0 ']' ++ break ++ cat /tmp/tmp.gPmRW5jzDS ++ cat /tmp/tmp.ziZKpLPSrx ++ rm /tmp/tmp.gPmRW5jzDS /tmp/tmp.ziZKpLPSrx ++ return 0 + [[ initializing == \r\e\a\d\y ]] + echo 'waiting for cluster readyness' waiting for cluster readyness + sleep 20 + [[ 5 -ge 36 ]] + let i+=1 ++ kubectl_bin get pxc some-name -o 'jsonpath={.status.state}' +++ mktemp ++ local LAST_OUT=/tmp/tmp.PExTUSiWbs +++ mktemp ++ local LAST_ERR=/tmp/tmp.wvj3boQi6T ++ local exit_status=0 +++ seq 0 2 ++ for i in '$(seq 0 2)' ++ set +e ++ kubectl get pxc some-name -o 'jsonpath={.status.state}' ++ exit_status=0 ++ set -e ++ '[' 0 '!=' 0 ']' ++ break ++ cat /tmp/tmp.PExTUSiWbs ++ cat /tmp/tmp.wvj3boQi6T ++ rm /tmp/tmp.PExTUSiWbs /tmp/tmp.wvj3boQi6T ++ return 0 + [[ ready == \r\e\a\d\y ]] ++ kubectl_bin get pxc some-name -o 'jsonpath={.status.pxc.ready}' +++ mktemp ++ local LAST_OUT=/tmp/tmp.MiujJdaBTu +++ mktemp ++ local LAST_ERR=/tmp/tmp.oVjfGcwOmM ++ local exit_status=0 +++ seq 0 2 ++ for i in '$(seq 0 2)' ++ set +e ++ kubectl get pxc some-name -o 'jsonpath={.status.pxc.ready}' ++ exit_status=0 ++ set -e ++ '[' 0 '!=' 0 ']' ++ break ++ cat /tmp/tmp.MiujJdaBTu ++ cat /tmp/tmp.oVjfGcwOmM ++ rm /tmp/tmp.MiujJdaBTu /tmp/tmp.oVjfGcwOmM ++ return 0 + [[ 3 == \3 ]] +++ get_proxy_engine some-name +++ local cluster_name=some-name ++++ get_proxy some-name ++++ local target_cluster=some-name +++++ kubectl_bin get pxc some-name -o 'jsonpath={.spec.haproxy.enabled}' ++++++ mktemp +++++ local LAST_OUT=/tmp/tmp.8stDrGwVFK ++++++ mktemp +++++ local LAST_ERR=/tmp/tmp.Lm1ziC6Gyx +++++ local exit_status=0 ++++++ seq 0 2 +++++ for i in '$(seq 0 2)' +++++ set +e +++++ kubectl get pxc some-name -o 'jsonpath={.spec.haproxy.enabled}' +++++ exit_status=0 +++++ set -e +++++ '[' 0 '!=' 0 ']' +++++ break +++++ cat /tmp/tmp.8stDrGwVFK +++++ cat /tmp/tmp.Lm1ziC6Gyx +++++ rm /tmp/tmp.8stDrGwVFK /tmp/tmp.Lm1ziC6Gyx +++++ return 0 ++++ [[ '' == \t\r\u\e ]] +++++ kubectl_bin get pxc some-name -o 'jsonpath={.spec.proxysql.enabled}' ++++++ mktemp +++++ local LAST_OUT=/tmp/tmp.qcuVrHIx1g ++++++ mktemp +++++ local LAST_ERR=/tmp/tmp.AxRzWMlLeo +++++ local exit_status=0 ++++++ seq 0 2 +++++ for i in '$(seq 0 2)' +++++ set +e +++++ kubectl get pxc some-name -o 'jsonpath={.spec.proxysql.enabled}' +++++ exit_status=0 +++++ set -e +++++ '[' 0 '!=' 0 ']' +++++ break +++++ cat /tmp/tmp.qcuVrHIx1g +++++ cat /tmp/tmp.AxRzWMlLeo +++++ rm /tmp/tmp.qcuVrHIx1g /tmp/tmp.AxRzWMlLeo +++++ return 0 ++++ [[ true == \t\r\u\e ]] ++++ echo some-name-proxysql ++++ return +++ local cluster_proxy=some-name-proxysql +++ echo proxysql ++ kubectl_bin get pxc some-name -o 'jsonpath={.status.proxysql.ready}' +++ mktemp ++ local LAST_OUT=/tmp/tmp.P2a5zUUuZb +++ mktemp ++ local LAST_ERR=/tmp/tmp.kPjS3nXGyy ++ local exit_status=0 +++ seq 0 2 ++ for i in '$(seq 0 2)' ++ set +e ++ kubectl get pxc some-name -o 'jsonpath={.status.proxysql.ready}' ++ exit_status=0 ++ set -e ++ '[' 0 '!=' 0 ']' ++ break ++ cat /tmp/tmp.P2a5zUUuZb ++ cat /tmp/tmp.kPjS3nXGyy ++ rm /tmp/tmp.P2a5zUUuZb /tmp/tmp.kPjS3nXGyy ++ return 0 + [[ 2 == \2 ]] + desc 'test new operator' + set +o xtrace ----------------------------------------------------------------------------------- test new operator ----------------------------------------------------------------------------------- + newpass=test-password2 ++ echo -n test-password2 ++ base64 + newpassencrypted=dGVzdC1wYXNzd29yZDI= + patch_secret my-cluster-secrets-2 operator dGVzdC1wYXNzd29yZDI= + local secret=my-cluster-secrets-2 + local key=operator + local value=dGVzdC1wYXNzd29yZDI= + kubectl_bin patch secret my-cluster-secrets-2 '-p={"data":{"operator": "dGVzdC1wYXNzd29yZDI="}}' ++ mktemp + local LAST_OUT=/tmp/tmp.mWKl0RXYz5 ++ mktemp + local LAST_ERR=/tmp/tmp.fw5Ahu4oo3 + local exit_status=0 ++ seq 0 2 + for i in '$(seq 0 2)' + set +e + kubectl patch secret my-cluster-secrets-2 '-p={"data":{"operator": "dGVzdC1wYXNzd29yZDI="}}' + exit_status=0 + set -e + '[' 0 '!=' 0 ']' + break + cat /tmp/tmp.mWKl0RXYz5 secret/my-cluster-secrets-2 patched + cat /tmp/tmp.fw5Ahu4oo3 + rm /tmp/tmp.mWKl0RXYz5 /tmp/tmp.fw5Ahu4oo3 + return 0 + sleep 15 + wait_cluster_consistency some-name 3 2 + local cluster_name=some-name + local cluster_size=3 + local proxy_size=2 + '[' -z 2 ']' + desc 'wait cluster consistency' + set +o xtrace ----------------------------------------------------------------------------------- wait cluster consistency ----------------------------------------------------------------------------------- + local i=0 + local max=36 + sleep 7 ++ kubectl_bin get pxc some-name -o 'jsonpath={.status.state}' +++ mktemp ++ local LAST_OUT=/tmp/tmp.eglHA8CTIm +++ mktemp ++ local LAST_ERR=/tmp/tmp.8bZqSDha2d ++ local exit_status=0 +++ seq 0 2 ++ for i in '$(seq 0 2)' ++ set +e ++ kubectl get pxc some-name -o 'jsonpath={.status.state}' ++ exit_status=0 ++ set -e ++ '[' 0 '!=' 0 ']' ++ break ++ cat /tmp/tmp.eglHA8CTIm ++ cat /tmp/tmp.8bZqSDha2d ++ rm /tmp/tmp.eglHA8CTIm /tmp/tmp.8bZqSDha2d ++ return 0 + [[ ready == \r\e\a\d\y ]] ++ kubectl_bin get pxc some-name -o 'jsonpath={.status.pxc.ready}' +++ mktemp ++ local LAST_OUT=/tmp/tmp.UV1rBDDk7Y +++ mktemp ++ local LAST_ERR=/tmp/tmp.bEtv6E7MdW ++ local exit_status=0 +++ seq 0 2 ++ for i in '$(seq 0 2)' ++ set +e ++ kubectl get pxc some-name -o 'jsonpath={.status.pxc.ready}' ++ exit_status=0 ++ set -e ++ '[' 0 '!=' 0 ']' ++ break ++ cat /tmp/tmp.UV1rBDDk7Y ++ cat /tmp/tmp.bEtv6E7MdW ++ rm /tmp/tmp.UV1rBDDk7Y /tmp/tmp.bEtv6E7MdW ++ return 0 + [[ 3 == \3 ]] +++ get_proxy_engine some-name +++ local cluster_name=some-name ++++ get_proxy some-name ++++ local target_cluster=some-name +++++ kubectl_bin get pxc some-name -o 'jsonpath={.spec.haproxy.enabled}' ++++++ mktemp +++++ local LAST_OUT=/tmp/tmp.WHjad26jDV ++++++ mktemp +++++ local LAST_ERR=/tmp/tmp.djOPGMg6Ga +++++ local exit_status=0 ++++++ seq 0 2 +++++ for i in '$(seq 0 2)' +++++ set +e +++++ kubectl get pxc some-name -o 'jsonpath={.spec.haproxy.enabled}' +++++ exit_status=0 +++++ set -e +++++ '[' 0 '!=' 0 ']' +++++ break +++++ cat /tmp/tmp.WHjad26jDV +++++ cat /tmp/tmp.djOPGMg6Ga +++++ rm /tmp/tmp.WHjad26jDV /tmp/tmp.djOPGMg6Ga +++++ return 0 ++++ [[ '' == \t\r\u\e ]] +++++ kubectl_bin get pxc some-name -o 'jsonpath={.spec.proxysql.enabled}' ++++++ mktemp +++++ local LAST_OUT=/tmp/tmp.aYpWDnQTGU ++++++ mktemp +++++ local LAST_ERR=/tmp/tmp.NpVbpKbpHw +++++ local exit_status=0 ++++++ seq 0 2 +++++ for i in '$(seq 0 2)' +++++ set +e +++++ kubectl get pxc some-name -o 'jsonpath={.spec.proxysql.enabled}' +++++ exit_status=0 +++++ set -e +++++ '[' 0 '!=' 0 ']' +++++ break +++++ cat /tmp/tmp.aYpWDnQTGU +++++ cat /tmp/tmp.NpVbpKbpHw +++++ rm /tmp/tmp.aYpWDnQTGU /tmp/tmp.NpVbpKbpHw +++++ return 0 ++++ [[ true == \t\r\u\e ]] ++++ echo some-name-proxysql ++++ return +++ local cluster_proxy=some-name-proxysql +++ echo proxysql ++ kubectl_bin get pxc some-name -o 'jsonpath={.status.proxysql.ready}' +++ mktemp ++ local LAST_OUT=/tmp/tmp.sBjqnZCm9W +++ mktemp ++ local LAST_ERR=/tmp/tmp.xpgFLcLPTn ++ local exit_status=0 +++ seq 0 2 ++ for i in '$(seq 0 2)' ++ set +e ++ kubectl get pxc some-name -o 'jsonpath={.status.proxysql.ready}' ++ exit_status=0 ++ set -e ++ '[' 0 '!=' 0 ']' ++ break ++ cat /tmp/tmp.sBjqnZCm9W ++ cat /tmp/tmp.xpgFLcLPTn ++ rm /tmp/tmp.sBjqnZCm9W /tmp/tmp.xpgFLcLPTn ++ return 0 + [[ 2 == \2 ]] + sleep 20 + compare_mysql_cmd select-4 'SHOW TABLES;' '-h some-name-proxysql -uoperator -p'\''test-password2'\''' + local command_id=select-4 + local 'command=SHOW TABLES;' + local 'uri=-h some-name-proxysql -uoperator -p'\''test-password2'\''' + local postfix= + local expected_result=/mnt/jenkins/workspace/cloud-pxc-operator_PR-1726/e2e-tests/users/compare/select-4.sql + [[ perconalab/percona-xtradb-cluster-operator:main-pxc8.0 =~ 8\.0 ]] + '[' -f /mnt/jenkins/workspace/cloud-pxc-operator_PR-1726/e2e-tests/users/compare/select-4-80.sql ']' + expected_result=/mnt/jenkins/workspace/cloud-pxc-operator_PR-1726/e2e-tests/users/compare/select-4-80.sql + run_mysql 'SHOW TABLES;' '-h some-name-proxysql -uoperator -p'\''test-password2'\''' + local 'command=SHOW TABLES;' + local 'uri=-h some-name-proxysql -uoperator -p'\''test-password2'\''' ++ get_client_pod ++ kubectl_bin get pods --selector=name=pxc-client -o 'jsonpath={.items[].metadata.name}' +++ mktemp ++ local LAST_OUT=/tmp/tmp.OiUVLSO11c +++ mktemp ++ local LAST_ERR=/tmp/tmp.KWvHsF8P9N ++ local exit_status=0 +++ seq 0 2 ++ for i in '$(seq 0 2)' ++ set +e ++ kubectl get pods --selector=name=pxc-client -o 'jsonpath={.items[].metadata.name}' ++ exit_status=0 ++ set -e ++ '[' 0 '!=' 0 ']' ++ break ++ cat /tmp/tmp.OiUVLSO11c ++ cat /tmp/tmp.KWvHsF8P9N ++ rm /tmp/tmp.OiUVLSO11c /tmp/tmp.KWvHsF8P9N ++ return 0 + client_pod=pxc-client-6644d8898f-zmpgg + wait_pod pxc-client-6644d8898f-zmpgg + local pod=pxc-client-6644d8898f-zmpgg + local max_retry=480 + local ns= ++ echo pxc-client-6644d8898f-zmpgg ++ /usr/bin/sed -E 's/.*-(pxc|proxysql)-[0-9]/\1/' ++ egrep '^(pxc|proxysql)$' + local container= + set +o xtrace pod/pxc-client-6644d8898f-zmpgg condition met pxc-client-6644d8898f-zmpgg.Ok + set +o xtrace + '[' '!' -s /tmp/tmp.k6orBcxvpD/select-4.sql ']' + diff -u /mnt/jenkins/workspace/cloud-pxc-operator_PR-1726/e2e-tests/users/compare/select-4-80.sql /tmp/tmp.k6orBcxvpD/select-4.sql ++ getSecretData my-cluster-secrets-2 root ++ local secretName=my-cluster-secrets-2 ++ local dataKey=root ++ kubectl_bin get secrets/my-cluster-secrets-2 '--template={{.data.root}}' ++ base64 --decode +++ mktemp ++ local LAST_OUT=/tmp/tmp.KRyrDNlP13 +++ mktemp ++ local LAST_ERR=/tmp/tmp.NHESCSMqB5 ++ local exit_status=0 +++ seq 0 2 ++ for i in '$(seq 0 2)' ++ set +e ++ kubectl get secrets/my-cluster-secrets-2 '--template={{.data.root}}' ++ exit_status=0 ++ set -e ++ '[' 0 '!=' 0 ']' ++ break ++ cat /tmp/tmp.KRyrDNlP13 ++ cat /tmp/tmp.NHESCSMqB5 ++ rm /tmp/tmp.KRyrDNlP13 /tmp/tmp.NHESCSMqB5 ++ return 0 + newpass='L4{%6!-Z,6%qo&5s' + desc 'test new users sync' + set +o xtrace ----------------------------------------------------------------------------------- test new users sync ----------------------------------------------------------------------------------- + run_mysql 'CREATE USER '\''testsync'\''@'\''%'\'' IDENTIFIED BY '\''L4{%6!-Z,6%qo&5s'\'';' '-h some-name-pxc -uroot -p'\''L4{%6!-Z,6%qo&5s'\''' + local 'command=CREATE USER '\''testsync'\''@'\''%'\'' IDENTIFIED BY '\''L4{%6!-Z,6%qo&5s'\'';' + local 'uri=-h some-name-pxc -uroot -p'\''L4{%6!-Z,6%qo&5s'\''' ++ get_client_pod ++ kubectl_bin get pods --selector=name=pxc-client -o 'jsonpath={.items[].metadata.name}' +++ mktemp ++ local LAST_OUT=/tmp/tmp.uDvXuhYyZq +++ mktemp ++ local LAST_ERR=/tmp/tmp.xKzOuyqdL3 ++ local exit_status=0 +++ seq 0 2 ++ for i in '$(seq 0 2)' ++ set +e ++ kubectl get pods --selector=name=pxc-client -o 'jsonpath={.items[].metadata.name}' ++ exit_status=0 ++ set -e ++ '[' 0 '!=' 0 ']' ++ break ++ cat /tmp/tmp.uDvXuhYyZq ++ cat /tmp/tmp.xKzOuyqdL3 ++ rm /tmp/tmp.uDvXuhYyZq /tmp/tmp.xKzOuyqdL3 ++ return 0 + client_pod=pxc-client-6644d8898f-zmpgg + wait_pod pxc-client-6644d8898f-zmpgg + local pod=pxc-client-6644d8898f-zmpgg + local max_retry=480 + local ns= ++ echo pxc-client-6644d8898f-zmpgg ++ /usr/bin/sed -E 's/.*-(pxc|proxysql)-[0-9]/\1/' ++ egrep '^(pxc|proxysql)$' + local container= + set +o xtrace pod/pxc-client-6644d8898f-zmpgg condition met pxc-client-6644d8898f-zmpgg.Ok + set +o xtrace + sleep 40 + compare_mysql_cmd select-4 'SHOW TABLES;' '-h some-name-proxysql -utestsync -p'\''L4{%6!-Z,6%qo&5s'\''' + local command_id=select-4 + local 'command=SHOW TABLES;' + local 'uri=-h some-name-proxysql -utestsync -p'\''L4{%6!-Z,6%qo&5s'\''' + local postfix= + local expected_result=/mnt/jenkins/workspace/cloud-pxc-operator_PR-1726/e2e-tests/users/compare/select-4.sql + [[ perconalab/percona-xtradb-cluster-operator:main-pxc8.0 =~ 8\.0 ]] + '[' -f /mnt/jenkins/workspace/cloud-pxc-operator_PR-1726/e2e-tests/users/compare/select-4-80.sql ']' + expected_result=/mnt/jenkins/workspace/cloud-pxc-operator_PR-1726/e2e-tests/users/compare/select-4-80.sql + run_mysql 'SHOW TABLES;' '-h some-name-proxysql -utestsync -p'\''L4{%6!-Z,6%qo&5s'\''' + local 'command=SHOW TABLES;' + local 'uri=-h some-name-proxysql -utestsync -p'\''L4{%6!-Z,6%qo&5s'\''' ++ get_client_pod ++ kubectl_bin get pods --selector=name=pxc-client -o 'jsonpath={.items[].metadata.name}' +++ mktemp ++ local LAST_OUT=/tmp/tmp.VzQzgpv6Xr +++ mktemp ++ local LAST_ERR=/tmp/tmp.jOdUxkNMLi ++ local exit_status=0 +++ seq 0 2 ++ for i in '$(seq 0 2)' ++ set +e ++ kubectl get pods --selector=name=pxc-client -o 'jsonpath={.items[].metadata.name}' ++ exit_status=0 ++ set -e ++ '[' 0 '!=' 0 ']' ++ break ++ cat /tmp/tmp.VzQzgpv6Xr ++ cat /tmp/tmp.jOdUxkNMLi ++ rm /tmp/tmp.VzQzgpv6Xr /tmp/tmp.jOdUxkNMLi ++ return 0 + client_pod=pxc-client-6644d8898f-zmpgg + wait_pod pxc-client-6644d8898f-zmpgg + local pod=pxc-client-6644d8898f-zmpgg + local max_retry=480 + local ns= ++ echo pxc-client-6644d8898f-zmpgg ++ /usr/bin/sed -E 's/.*-(pxc|proxysql)-[0-9]/\1/' ++ egrep '^(pxc|proxysql)$' + local container= + set +o xtrace pod/pxc-client-6644d8898f-zmpgg condition met pxc-client-6644d8898f-zmpgg.Ok + set +o xtrace + '[' '!' -s /tmp/tmp.k6orBcxvpD/select-4.sql ']' + diff -u /mnt/jenkins/workspace/cloud-pxc-operator_PR-1726/e2e-tests/users/compare/select-4-80.sql /tmp/tmp.k6orBcxvpD/select-4.sql ++ getSecretData internal-some-name operator ++ local secretName=internal-some-name ++ local dataKey=operator ++ kubectl_bin get secrets/internal-some-name '--template={{.data.operator}}' ++ base64 --decode +++ mktemp ++ local LAST_OUT=/tmp/tmp.Yt7lDMv43V +++ mktemp ++ local LAST_ERR=/tmp/tmp.qAKGQqT2c7 ++ local exit_status=0 +++ seq 0 2 ++ for i in '$(seq 0 2)' ++ set +e ++ kubectl get secrets/internal-some-name '--template={{.data.operator}}' ++ exit_status=0 ++ set -e ++ '[' 0 '!=' 0 ']' ++ break ++ cat /tmp/tmp.Yt7lDMv43V ++ cat /tmp/tmp.qAKGQqT2c7 ++ rm /tmp/tmp.Yt7lDMv43V /tmp/tmp.qAKGQqT2c7 ++ return 0 + pass=test-password2 + desc 'check secret without operator' + set +o xtrace ----------------------------------------------------------------------------------- check secret without operator ----------------------------------------------------------------------------------- + kubectl_bin apply -f /mnt/jenkins/workspace/cloud-pxc-operator_PR-1726/e2e-tests/users/conf/secrets.yml ++ mktemp + local LAST_OUT=/tmp/tmp.Vy6UfsfTny ++ mktemp + local LAST_ERR=/tmp/tmp.6aukJjXR5J + local exit_status=0 ++ seq 0 2 + for i in '$(seq 0 2)' + set +e + kubectl apply -f /mnt/jenkins/workspace/cloud-pxc-operator_PR-1726/e2e-tests/users/conf/secrets.yml + exit_status=0 + set -e + '[' 0 '!=' 0 ']' + break + cat /tmp/tmp.Vy6UfsfTny secret/my-cluster-secrets-2 configured + cat /tmp/tmp.6aukJjXR5J Warning: resource secrets/my-cluster-secrets-2 is missing the kubectl.kubernetes.io/last-applied-configuration annotation which is required by kubectl apply. kubectl apply should only be used on resources created declaratively by either kubectl create --save-config or kubectl apply. The missing annotation will be patched automatically. + rm /tmp/tmp.Vy6UfsfTny /tmp/tmp.6aukJjXR5J + return 0 + sleep 15 + compare_mysql_cmd select-4 'SHOW TABLES;' '-h some-name-proxysql -uoperator -p'\''test-password2'\''' + local command_id=select-4 + local 'command=SHOW TABLES;' + local 'uri=-h some-name-proxysql -uoperator -p'\''test-password2'\''' + local postfix= + local expected_result=/mnt/jenkins/workspace/cloud-pxc-operator_PR-1726/e2e-tests/users/compare/select-4.sql + [[ perconalab/percona-xtradb-cluster-operator:main-pxc8.0 =~ 8\.0 ]] + '[' -f /mnt/jenkins/workspace/cloud-pxc-operator_PR-1726/e2e-tests/users/compare/select-4-80.sql ']' + expected_result=/mnt/jenkins/workspace/cloud-pxc-operator_PR-1726/e2e-tests/users/compare/select-4-80.sql + run_mysql 'SHOW TABLES;' '-h some-name-proxysql -uoperator -p'\''test-password2'\''' + local 'command=SHOW TABLES;' + local 'uri=-h some-name-proxysql -uoperator -p'\''test-password2'\''' ++ get_client_pod ++ kubectl_bin get pods --selector=name=pxc-client -o 'jsonpath={.items[].metadata.name}' +++ mktemp ++ local LAST_OUT=/tmp/tmp.BBo3t3xxKX +++ mktemp ++ local LAST_ERR=/tmp/tmp.U02rZ7foRw ++ local exit_status=0 +++ seq 0 2 ++ for i in '$(seq 0 2)' ++ set +e ++ kubectl get pods --selector=name=pxc-client -o 'jsonpath={.items[].metadata.name}' ++ exit_status=0 ++ set -e ++ '[' 0 '!=' 0 ']' ++ break ++ cat /tmp/tmp.BBo3t3xxKX ++ cat /tmp/tmp.U02rZ7foRw ++ rm /tmp/tmp.BBo3t3xxKX /tmp/tmp.U02rZ7foRw ++ return 0 + client_pod=pxc-client-6644d8898f-zmpgg + wait_pod pxc-client-6644d8898f-zmpgg + local pod=pxc-client-6644d8898f-zmpgg + local max_retry=480 + local ns= ++ echo pxc-client-6644d8898f-zmpgg ++ /usr/bin/sed -E 's/.*-(pxc|proxysql)-[0-9]/\1/' ++ egrep '^(pxc|proxysql)$' + local container= + set +o xtrace pod/pxc-client-6644d8898f-zmpgg condition met pxc-client-6644d8898f-zmpgg.Ok + set +o xtrace + '[' '!' -s /tmp/tmp.k6orBcxvpD/select-4.sql ']' + diff -u /mnt/jenkins/workspace/cloud-pxc-operator_PR-1726/e2e-tests/users/compare/select-4-80.sql /tmp/tmp.k6orBcxvpD/select-4.sql + newpass=test-password2 ++ echo -n test-password2 ++ base64 + newpassencrypted=dGVzdC1wYXNzd29yZDI= + apply_config /mnt/jenkins/workspace/cloud-pxc-operator_PR-1726/e2e-tests/users/conf/some-name.yml + '[' -z '' ']' + cat_config /mnt/jenkins/workspace/cloud-pxc-operator_PR-1726/e2e-tests/users/conf/some-name.yml + kubectl_bin apply -f - + cat /mnt/jenkins/workspace/cloud-pxc-operator_PR-1726/e2e-tests/users/conf/some-name.yml + /usr/bin/sed -e 's#apiVersion: pxc.percona.com/v.*$#apiVersion: pxc.percona.com/v1#' + /usr/bin/sed -e 's#image:.*\/percona-xtradb-cluster:.*$#image: perconalab/percona-xtradb-cluster-operator:main-pxc8.0#' + /usr/bin/sed -e 's#image:.*-pxc\([0-9]*.[0-9]*\)\{0,1\}$#image: perconalab/percona-xtradb-cluster-operator:main-pxc8.0#' + /usr/bin/sed -e 's#image:.*-proxysql$#image: perconalab/percona-xtradb-cluster-operator:main-proxysql#' + /usr/bin/sed -e s~minio-service.#namespace~minio-service.users-31680~ + /usr/bin/sed -e 's#image:.*-pmm$#image: perconalab/pmm-client:dev-latest#' + /usr/bin/sed -e 's#image:.*-init$#image: perconalab/percona-xtradb-cluster-operator:PR-1726-75d74a6e#' + /usr/bin/sed -e 's#image:.*-backup$#image: perconalab/percona-xtradb-cluster-operator:main-pxc8.0-backup#' + /usr/bin/sed -e 's#image:.*-haproxy$#image: perconalab/percona-xtradb-cluster-operator:main-haproxy#' + /usr/bin/sed -e 's#image:.*-logcollector$#image: perconalab/percona-xtradb-cluster-operator:main-logcollector#' ++ mktemp + /usr/bin/sed -e 's#apply:.*#apply: Never#' + local LAST_OUT=/tmp/tmp.SPr33uWoAy ++ mktemp + local LAST_ERR=/tmp/tmp.SGdqybCzMj + local exit_status=0 ++ seq 0 2 + for i in '$(seq 0 2)' + set +e + kubectl apply -f - + exit_status=0 + set -e + '[' 0 '!=' 0 ']' + break + cat /tmp/tmp.SPr33uWoAy perconaxtradbcluster.pxc.percona.com/some-name configured + cat /tmp/tmp.SGdqybCzMj + rm /tmp/tmp.SPr33uWoAy /tmp/tmp.SGdqybCzMj + return 0 + sleep 15 + wait_cluster_consistency some-name 3 3 + local cluster_name=some-name + local cluster_size=3 + local proxy_size=3 + '[' -z 3 ']' + desc 'wait cluster consistency' + set +o xtrace ----------------------------------------------------------------------------------- wait cluster consistency ----------------------------------------------------------------------------------- + local i=0 + local max=36 + sleep 7 ++ kubectl_bin get pxc some-name -o 'jsonpath={.status.state}' +++ mktemp ++ local LAST_OUT=/tmp/tmp.vQKveoHlxv +++ mktemp ++ local LAST_ERR=/tmp/tmp.YPf0ylXt0y ++ local exit_status=0 +++ seq 0 2 ++ for i in '$(seq 0 2)' ++ set +e ++ kubectl get pxc some-name -o 'jsonpath={.status.state}' ++ exit_status=0 ++ set -e ++ '[' 0 '!=' 0 ']' ++ break ++ cat /tmp/tmp.vQKveoHlxv ++ cat /tmp/tmp.YPf0ylXt0y ++ rm /tmp/tmp.vQKveoHlxv /tmp/tmp.YPf0ylXt0y ++ return 0 + [[ initializing == \r\e\a\d\y ]] + echo 'waiting for cluster readyness' waiting for cluster readyness + sleep 20 + [[ 0 -ge 36 ]] + let i+=1 ++ kubectl_bin get pxc some-name -o 'jsonpath={.status.state}' +++ mktemp ++ local LAST_OUT=/tmp/tmp.9xZQW3JVT0 +++ mktemp ++ local LAST_ERR=/tmp/tmp.y71QxmWZHS ++ local exit_status=0 +++ seq 0 2 ++ for i in '$(seq 0 2)' ++ set +e ++ kubectl get pxc some-name -o 'jsonpath={.status.state}' ++ exit_status=0 ++ set -e ++ '[' 0 '!=' 0 ']' ++ break ++ cat /tmp/tmp.9xZQW3JVT0 ++ cat /tmp/tmp.y71QxmWZHS ++ rm /tmp/tmp.9xZQW3JVT0 /tmp/tmp.y71QxmWZHS ++ return 0 + [[ initializing == \r\e\a\d\y ]] + echo 'waiting for cluster readyness' waiting for cluster readyness + sleep 20 + [[ 1 -ge 36 ]] + let i+=1 ++ kubectl_bin get pxc some-name -o 'jsonpath={.status.state}' +++ mktemp ++ local LAST_OUT=/tmp/tmp.oTyURPVlPP +++ mktemp ++ local LAST_ERR=/tmp/tmp.U3VMHc3zxk ++ local exit_status=0 +++ seq 0 2 ++ for i in '$(seq 0 2)' ++ set +e ++ kubectl get pxc some-name -o 'jsonpath={.status.state}' ++ exit_status=0 ++ set -e ++ '[' 0 '!=' 0 ']' ++ break ++ cat /tmp/tmp.oTyURPVlPP ++ cat /tmp/tmp.U3VMHc3zxk ++ rm /tmp/tmp.oTyURPVlPP /tmp/tmp.U3VMHc3zxk ++ return 0 + [[ initializing == \r\e\a\d\y ]] + echo 'waiting for cluster readyness' waiting for cluster readyness + sleep 20 + [[ 2 -ge 36 ]] + let i+=1 ++ kubectl_bin get pxc some-name -o 'jsonpath={.status.state}' +++ mktemp ++ local LAST_OUT=/tmp/tmp.vxQTrVwQWg +++ mktemp ++ local LAST_ERR=/tmp/tmp.0jSlsrnv19 ++ local exit_status=0 +++ seq 0 2 ++ for i in '$(seq 0 2)' ++ set +e ++ kubectl get pxc some-name -o 'jsonpath={.status.state}' ++ exit_status=0 ++ set -e ++ '[' 0 '!=' 0 ']' ++ break ++ cat /tmp/tmp.vxQTrVwQWg ++ cat /tmp/tmp.0jSlsrnv19 ++ rm /tmp/tmp.vxQTrVwQWg /tmp/tmp.0jSlsrnv19 ++ return 0 + [[ initializing == \r\e\a\d\y ]] + echo 'waiting for cluster readyness' waiting for cluster readyness + sleep 20 + [[ 3 -ge 36 ]] + let i+=1 ++ kubectl_bin get pxc some-name -o 'jsonpath={.status.state}' +++ mktemp ++ local LAST_OUT=/tmp/tmp.ChdbI1sWMN +++ mktemp ++ local LAST_ERR=/tmp/tmp.tOHcvv3xup ++ local exit_status=0 +++ seq 0 2 ++ for i in '$(seq 0 2)' ++ set +e ++ kubectl get pxc some-name -o 'jsonpath={.status.state}' ++ exit_status=0 ++ set -e ++ '[' 0 '!=' 0 ']' ++ break ++ cat /tmp/tmp.ChdbI1sWMN ++ cat /tmp/tmp.tOHcvv3xup ++ rm /tmp/tmp.ChdbI1sWMN /tmp/tmp.tOHcvv3xup ++ return 0 + [[ initializing == \r\e\a\d\y ]] + echo 'waiting for cluster readyness' waiting for cluster readyness + sleep 20 + [[ 4 -ge 36 ]] + let i+=1 ++ kubectl_bin get pxc some-name -o 'jsonpath={.status.state}' +++ mktemp ++ local LAST_OUT=/tmp/tmp.NsRaWS1UDK +++ mktemp ++ local LAST_ERR=/tmp/tmp.uYEZboFLzg ++ local exit_status=0 +++ seq 0 2 ++ for i in '$(seq 0 2)' ++ set +e ++ kubectl get pxc some-name -o 'jsonpath={.status.state}' ++ exit_status=0 ++ set -e ++ '[' 0 '!=' 0 ']' ++ break ++ cat /tmp/tmp.NsRaWS1UDK ++ cat /tmp/tmp.uYEZboFLzg ++ rm /tmp/tmp.NsRaWS1UDK /tmp/tmp.uYEZboFLzg ++ return 0 + [[ initializing == \r\e\a\d\y ]] + echo 'waiting for cluster readyness' waiting for cluster readyness + sleep 20 + [[ 5 -ge 36 ]] + let i+=1 ++ kubectl_bin get pxc some-name -o 'jsonpath={.status.state}' +++ mktemp ++ local LAST_OUT=/tmp/tmp.zPS8DCyH9X +++ mktemp ++ local LAST_ERR=/tmp/tmp.iYqKeYUZKy ++ local exit_status=0 +++ seq 0 2 ++ for i in '$(seq 0 2)' ++ set +e ++ kubectl get pxc some-name -o 'jsonpath={.status.state}' ++ exit_status=0 ++ set -e ++ '[' 0 '!=' 0 ']' ++ break ++ cat /tmp/tmp.zPS8DCyH9X ++ cat /tmp/tmp.iYqKeYUZKy ++ rm /tmp/tmp.zPS8DCyH9X /tmp/tmp.iYqKeYUZKy ++ return 0 + [[ initializing == \r\e\a\d\y ]] + echo 'waiting for cluster readyness' waiting for cluster readyness + sleep 20 + [[ 6 -ge 36 ]] + let i+=1 ++ kubectl_bin get pxc some-name -o 'jsonpath={.status.state}' +++ mktemp ++ local LAST_OUT=/tmp/tmp.wmn1Bh9ULO +++ mktemp ++ local LAST_ERR=/tmp/tmp.JGbBbn1EK1 ++ local exit_status=0 +++ seq 0 2 ++ for i in '$(seq 0 2)' ++ set +e ++ kubectl get pxc some-name -o 'jsonpath={.status.state}' ++ exit_status=0 ++ set -e ++ '[' 0 '!=' 0 ']' ++ break ++ cat /tmp/tmp.wmn1Bh9ULO ++ cat /tmp/tmp.JGbBbn1EK1 ++ rm /tmp/tmp.wmn1Bh9ULO /tmp/tmp.JGbBbn1EK1 ++ return 0 + [[ initializing == \r\e\a\d\y ]] + echo 'waiting for cluster readyness' waiting for cluster readyness + sleep 20 + [[ 7 -ge 36 ]] + let i+=1 ++ kubectl_bin get pxc some-name -o 'jsonpath={.status.state}' +++ mktemp ++ local LAST_OUT=/tmp/tmp.T7OyFrmBgT +++ mktemp ++ local LAST_ERR=/tmp/tmp.RFCtVOtuUn ++ local exit_status=0 +++ seq 0 2 ++ for i in '$(seq 0 2)' ++ set +e ++ kubectl get pxc some-name -o 'jsonpath={.status.state}' ++ exit_status=0 ++ set -e ++ '[' 0 '!=' 0 ']' ++ break ++ cat /tmp/tmp.T7OyFrmBgT ++ cat /tmp/tmp.RFCtVOtuUn ++ rm /tmp/tmp.T7OyFrmBgT /tmp/tmp.RFCtVOtuUn ++ return 0 + [[ initializing == \r\e\a\d\y ]] + echo 'waiting for cluster readyness' waiting for cluster readyness + sleep 20 + [[ 8 -ge 36 ]] + let i+=1 ++ kubectl_bin get pxc some-name -o 'jsonpath={.status.state}' +++ mktemp ++ local LAST_OUT=/tmp/tmp.aj5dw0NY0v +++ mktemp ++ local LAST_ERR=/tmp/tmp.QnrRGzhE0u ++ local exit_status=0 +++ seq 0 2 ++ for i in '$(seq 0 2)' ++ set +e ++ kubectl get pxc some-name -o 'jsonpath={.status.state}' ++ exit_status=0 ++ set -e ++ '[' 0 '!=' 0 ']' ++ break ++ cat /tmp/tmp.aj5dw0NY0v ++ cat /tmp/tmp.QnrRGzhE0u ++ rm /tmp/tmp.aj5dw0NY0v /tmp/tmp.QnrRGzhE0u ++ return 0 + [[ initializing == \r\e\a\d\y ]] + echo 'waiting for cluster readyness' waiting for cluster readyness + sleep 20 + [[ 9 -ge 36 ]] + let i+=1 ++ kubectl_bin get pxc some-name -o 'jsonpath={.status.state}' +++ mktemp ++ local LAST_OUT=/tmp/tmp.0mbKL9WwC1 +++ mktemp ++ local LAST_ERR=/tmp/tmp.7JweB8JAVm ++ local exit_status=0 +++ seq 0 2 ++ for i in '$(seq 0 2)' ++ set +e ++ kubectl get pxc some-name -o 'jsonpath={.status.state}' ++ exit_status=0 ++ set -e ++ '[' 0 '!=' 0 ']' ++ break ++ cat /tmp/tmp.0mbKL9WwC1 ++ cat /tmp/tmp.7JweB8JAVm ++ rm /tmp/tmp.0mbKL9WwC1 /tmp/tmp.7JweB8JAVm ++ return 0 + [[ initializing == \r\e\a\d\y ]] + echo 'waiting for cluster readyness' waiting for cluster readyness + sleep 20 + [[ 10 -ge 36 ]] + let i+=1 ++ kubectl_bin get pxc some-name -o 'jsonpath={.status.state}' +++ mktemp ++ local LAST_OUT=/tmp/tmp.FHizwDdOEf +++ mktemp ++ local LAST_ERR=/tmp/tmp.dFr85UxKDk ++ local exit_status=0 +++ seq 0 2 ++ for i in '$(seq 0 2)' ++ set +e ++ kubectl get pxc some-name -o 'jsonpath={.status.state}' ++ exit_status=0 ++ set -e ++ '[' 0 '!=' 0 ']' ++ break ++ cat /tmp/tmp.FHizwDdOEf ++ cat /tmp/tmp.dFr85UxKDk ++ rm /tmp/tmp.FHizwDdOEf /tmp/tmp.dFr85UxKDk ++ return 0 + [[ initializing == \r\e\a\d\y ]] + echo 'waiting for cluster readyness' waiting for cluster readyness + sleep 20 + [[ 11 -ge 36 ]] + let i+=1 ++ kubectl_bin get pxc some-name -o 'jsonpath={.status.state}' +++ mktemp ++ local LAST_OUT=/tmp/tmp.gVVYE2D1hZ +++ mktemp ++ local LAST_ERR=/tmp/tmp.lTtor73dGb ++ local exit_status=0 +++ seq 0 2 ++ for i in '$(seq 0 2)' ++ set +e ++ kubectl get pxc some-name -o 'jsonpath={.status.state}' ++ exit_status=0 ++ set -e ++ '[' 0 '!=' 0 ']' ++ break ++ cat /tmp/tmp.gVVYE2D1hZ ++ cat /tmp/tmp.lTtor73dGb ++ rm /tmp/tmp.gVVYE2D1hZ /tmp/tmp.lTtor73dGb ++ return 0 + [[ initializing == \r\e\a\d\y ]] + echo 'waiting for cluster readyness' waiting for cluster readyness + sleep 20 + [[ 12 -ge 36 ]] + let i+=1 ++ kubectl_bin get pxc some-name -o 'jsonpath={.status.state}' +++ mktemp ++ local LAST_OUT=/tmp/tmp.5zAYzIYkjz +++ mktemp ++ local LAST_ERR=/tmp/tmp.nFWHUk0W1V ++ local exit_status=0 +++ seq 0 2 ++ for i in '$(seq 0 2)' ++ set +e ++ kubectl get pxc some-name -o 'jsonpath={.status.state}' ++ exit_status=0 ++ set -e ++ '[' 0 '!=' 0 ']' ++ break ++ cat /tmp/tmp.5zAYzIYkjz ++ cat /tmp/tmp.nFWHUk0W1V ++ rm /tmp/tmp.5zAYzIYkjz /tmp/tmp.nFWHUk0W1V ++ return 0 + [[ ready == \r\e\a\d\y ]] ++ kubectl_bin get pxc some-name -o 'jsonpath={.status.pxc.ready}' +++ mktemp ++ local LAST_OUT=/tmp/tmp.4qfKjWKpEM +++ mktemp ++ local LAST_ERR=/tmp/tmp.tvyjOYoUua ++ local exit_status=0 +++ seq 0 2 ++ for i in '$(seq 0 2)' ++ set +e ++ kubectl get pxc some-name -o 'jsonpath={.status.pxc.ready}' ++ exit_status=0 ++ set -e ++ '[' 0 '!=' 0 ']' ++ break ++ cat /tmp/tmp.4qfKjWKpEM ++ cat /tmp/tmp.tvyjOYoUua ++ rm /tmp/tmp.4qfKjWKpEM /tmp/tmp.tvyjOYoUua ++ return 0 + [[ 3 == \3 ]] +++ get_proxy_engine some-name +++ local cluster_name=some-name ++++ get_proxy some-name ++++ local target_cluster=some-name +++++ kubectl_bin get pxc some-name -o 'jsonpath={.spec.haproxy.enabled}' ++++++ mktemp +++++ local LAST_OUT=/tmp/tmp.6uqkS9DZuc ++++++ mktemp +++++ local LAST_ERR=/tmp/tmp.UyKVgSkFJa +++++ local exit_status=0 ++++++ seq 0 2 +++++ for i in '$(seq 0 2)' +++++ set +e +++++ kubectl get pxc some-name -o 'jsonpath={.spec.haproxy.enabled}' +++++ exit_status=0 +++++ set -e +++++ '[' 0 '!=' 0 ']' +++++ break +++++ cat /tmp/tmp.6uqkS9DZuc +++++ cat /tmp/tmp.UyKVgSkFJa +++++ rm /tmp/tmp.6uqkS9DZuc /tmp/tmp.UyKVgSkFJa +++++ return 0 ++++ [[ true == \t\r\u\e ]] ++++ echo some-name-haproxy ++++ return +++ local cluster_proxy=some-name-haproxy +++ echo haproxy ++ kubectl_bin get pxc some-name -o 'jsonpath={.status.haproxy.ready}' +++ mktemp ++ local LAST_OUT=/tmp/tmp.BFgNw27iuU +++ mktemp ++ local LAST_ERR=/tmp/tmp.rGbVhpvp5i ++ local exit_status=0 +++ seq 0 2 ++ for i in '$(seq 0 2)' ++ set +e ++ kubectl get pxc some-name -o 'jsonpath={.status.haproxy.ready}' ++ exit_status=0 ++ set -e ++ '[' 0 '!=' 0 ']' ++ break ++ cat /tmp/tmp.BFgNw27iuU ++ cat /tmp/tmp.rGbVhpvp5i ++ rm /tmp/tmp.BFgNw27iuU /tmp/tmp.rGbVhpvp5i ++ return 0 + [[ 3 == \3 ]] + patch_secret my-cluster-secrets monitor dGVzdC1wYXNzd29yZDI= + local secret=my-cluster-secrets + local key=monitor + local value=dGVzdC1wYXNzd29yZDI= + kubectl_bin patch secret my-cluster-secrets '-p={"data":{"monitor": "dGVzdC1wYXNzd29yZDI="}}' ++ mktemp + local LAST_OUT=/tmp/tmp.4olYtvjTxJ ++ mktemp + local LAST_ERR=/tmp/tmp.Z07bLAqS4b + local exit_status=0 ++ seq 0 2 + for i in '$(seq 0 2)' + set +e + kubectl patch secret my-cluster-secrets '-p={"data":{"monitor": "dGVzdC1wYXNzd29yZDI="}}' + exit_status=0 + set -e + '[' 0 '!=' 0 ']' + break + cat /tmp/tmp.4olYtvjTxJ secret/my-cluster-secrets patched + cat /tmp/tmp.Z07bLAqS4b + rm /tmp/tmp.4olYtvjTxJ /tmp/tmp.Z07bLAqS4b + return 0 + sleep 15 + wait_cluster_consistency some-name 3 3 + local cluster_name=some-name + local cluster_size=3 + local proxy_size=3 + '[' -z 3 ']' + desc 'wait cluster consistency' + set +o xtrace ----------------------------------------------------------------------------------- wait cluster consistency ----------------------------------------------------------------------------------- + local i=0 + local max=36 + sleep 7 ++ kubectl_bin get pxc some-name -o 'jsonpath={.status.state}' +++ mktemp ++ local LAST_OUT=/tmp/tmp.RTOXUCvZI2 +++ mktemp ++ local LAST_ERR=/tmp/tmp.mlpA3SilVy ++ local exit_status=0 +++ seq 0 2 ++ for i in '$(seq 0 2)' ++ set +e ++ kubectl get pxc some-name -o 'jsonpath={.status.state}' ++ exit_status=0 ++ set -e ++ '[' 0 '!=' 0 ']' ++ break ++ cat /tmp/tmp.RTOXUCvZI2 ++ cat /tmp/tmp.mlpA3SilVy ++ rm /tmp/tmp.RTOXUCvZI2 /tmp/tmp.mlpA3SilVy ++ return 0 + [[ initializing == \r\e\a\d\y ]] + echo 'waiting for cluster readyness' waiting for cluster readyness + sleep 20 + [[ 0 -ge 36 ]] + let i+=1 ++ kubectl_bin get pxc some-name -o 'jsonpath={.status.state}' +++ mktemp ++ local LAST_OUT=/tmp/tmp.4JthfPA9hi +++ mktemp ++ local LAST_ERR=/tmp/tmp.OzyrspZOQL ++ local exit_status=0 +++ seq 0 2 ++ for i in '$(seq 0 2)' ++ set +e ++ kubectl get pxc some-name -o 'jsonpath={.status.state}' ++ exit_status=0 ++ set -e ++ '[' 0 '!=' 0 ']' ++ break ++ cat /tmp/tmp.4JthfPA9hi ++ cat /tmp/tmp.OzyrspZOQL ++ rm /tmp/tmp.4JthfPA9hi /tmp/tmp.OzyrspZOQL ++ return 0 + [[ initializing == \r\e\a\d\y ]] + echo 'waiting for cluster readyness' waiting for cluster readyness + sleep 20 + [[ 1 -ge 36 ]] + let i+=1 ++ kubectl_bin get pxc some-name -o 'jsonpath={.status.state}' +++ mktemp ++ local LAST_OUT=/tmp/tmp.03Ko8KlLbw +++ mktemp ++ local LAST_ERR=/tmp/tmp.Lamw0OO07N ++ local exit_status=0 +++ seq 0 2 ++ for i in '$(seq 0 2)' ++ set +e ++ kubectl get pxc some-name -o 'jsonpath={.status.state}' ++ exit_status=0 ++ set -e ++ '[' 0 '!=' 0 ']' ++ break ++ cat /tmp/tmp.03Ko8KlLbw ++ cat /tmp/tmp.Lamw0OO07N ++ rm /tmp/tmp.03Ko8KlLbw /tmp/tmp.Lamw0OO07N ++ return 0 + [[ initializing == \r\e\a\d\y ]] + echo 'waiting for cluster readyness' waiting for cluster readyness + sleep 20 + [[ 2 -ge 36 ]] + let i+=1 ++ kubectl_bin get pxc some-name -o 'jsonpath={.status.state}' +++ mktemp ++ local LAST_OUT=/tmp/tmp.kVXiHU6s2q +++ mktemp ++ local LAST_ERR=/tmp/tmp.Rf8MhJuL24 ++ local exit_status=0 +++ seq 0 2 ++ for i in '$(seq 0 2)' ++ set +e ++ kubectl get pxc some-name -o 'jsonpath={.status.state}' ++ exit_status=0 ++ set -e ++ '[' 0 '!=' 0 ']' ++ break ++ cat /tmp/tmp.kVXiHU6s2q ++ cat /tmp/tmp.Rf8MhJuL24 ++ rm /tmp/tmp.kVXiHU6s2q /tmp/tmp.Rf8MhJuL24 ++ return 0 + [[ ready == \r\e\a\d\y ]] ++ kubectl_bin get pxc some-name -o 'jsonpath={.status.pxc.ready}' +++ mktemp ++ local LAST_OUT=/tmp/tmp.Esa7H6MmY0 +++ mktemp ++ local LAST_ERR=/tmp/tmp.sGyzQmjSKl ++ local exit_status=0 +++ seq 0 2 ++ for i in '$(seq 0 2)' ++ set +e ++ kubectl get pxc some-name -o 'jsonpath={.status.pxc.ready}' ++ exit_status=0 ++ set -e ++ '[' 0 '!=' 0 ']' ++ break ++ cat /tmp/tmp.Esa7H6MmY0 ++ cat /tmp/tmp.sGyzQmjSKl ++ rm /tmp/tmp.Esa7H6MmY0 /tmp/tmp.sGyzQmjSKl ++ return 0 + [[ 3 == \3 ]] +++ get_proxy_engine some-name +++ local cluster_name=some-name ++++ get_proxy some-name ++++ local target_cluster=some-name +++++ kubectl_bin get pxc some-name -o 'jsonpath={.spec.haproxy.enabled}' ++++++ mktemp +++++ local LAST_OUT=/tmp/tmp.mlGFPctJVl ++++++ mktemp +++++ local LAST_ERR=/tmp/tmp.VMm6Sf8Jse +++++ local exit_status=0 ++++++ seq 0 2 +++++ for i in '$(seq 0 2)' +++++ set +e +++++ kubectl get pxc some-name -o 'jsonpath={.spec.haproxy.enabled}' +++++ exit_status=0 +++++ set -e +++++ '[' 0 '!=' 0 ']' +++++ break +++++ cat /tmp/tmp.mlGFPctJVl +++++ cat /tmp/tmp.VMm6Sf8Jse +++++ rm /tmp/tmp.mlGFPctJVl /tmp/tmp.VMm6Sf8Jse +++++ return 0 ++++ [[ true == \t\r\u\e ]] ++++ echo some-name-haproxy ++++ return +++ local cluster_proxy=some-name-haproxy +++ echo haproxy ++ kubectl_bin get pxc some-name -o 'jsonpath={.status.haproxy.ready}' +++ mktemp ++ local LAST_OUT=/tmp/tmp.ZtjS1vudmS +++ mktemp ++ local LAST_ERR=/tmp/tmp.CT3hkDYozZ ++ local exit_status=0 +++ seq 0 2 ++ for i in '$(seq 0 2)' ++ set +e ++ kubectl get pxc some-name -o 'jsonpath={.status.haproxy.ready}' ++ exit_status=0 ++ set -e ++ '[' 0 '!=' 0 ']' ++ break ++ cat /tmp/tmp.ZtjS1vudmS ++ cat /tmp/tmp.CT3hkDYozZ ++ rm /tmp/tmp.ZtjS1vudmS /tmp/tmp.CT3hkDYozZ ++ return 0 + [[ 3 == \3 ]] + compare_mysql_cmd select-3 'SHOW DATABASES;' '-h some-name-haproxy -umonitor -p'\''test-password2'\''' + local command_id=select-3 + local 'command=SHOW DATABASES;' + local 'uri=-h some-name-haproxy -umonitor -p'\''test-password2'\''' + local postfix= + local expected_result=/mnt/jenkins/workspace/cloud-pxc-operator_PR-1726/e2e-tests/users/compare/select-3.sql + [[ perconalab/percona-xtradb-cluster-operator:main-pxc8.0 =~ 8\.0 ]] + '[' -f /mnt/jenkins/workspace/cloud-pxc-operator_PR-1726/e2e-tests/users/compare/select-3-80.sql ']' + run_mysql 'SHOW DATABASES;' '-h some-name-haproxy -umonitor -p'\''test-password2'\''' + local 'command=SHOW DATABASES;' + local 'uri=-h some-name-haproxy -umonitor -p'\''test-password2'\''' ++ get_client_pod ++ kubectl_bin get pods --selector=name=pxc-client -o 'jsonpath={.items[].metadata.name}' +++ mktemp ++ local LAST_OUT=/tmp/tmp.4D2MhK4E6d +++ mktemp ++ local LAST_ERR=/tmp/tmp.em8Y8Wmczf ++ local exit_status=0 +++ seq 0 2 ++ for i in '$(seq 0 2)' ++ set +e ++ kubectl get pods --selector=name=pxc-client -o 'jsonpath={.items[].metadata.name}' ++ exit_status=0 ++ set -e ++ '[' 0 '!=' 0 ']' ++ break ++ cat /tmp/tmp.4D2MhK4E6d ++ cat /tmp/tmp.em8Y8Wmczf ++ rm /tmp/tmp.4D2MhK4E6d /tmp/tmp.em8Y8Wmczf ++ return 0 + client_pod=pxc-client-6644d8898f-zmpgg + wait_pod pxc-client-6644d8898f-zmpgg + local pod=pxc-client-6644d8898f-zmpgg + local max_retry=480 + local ns= ++ echo pxc-client-6644d8898f-zmpgg ++ /usr/bin/sed -E 's/.*-(pxc|proxysql)-[0-9]/\1/' ++ egrep '^(pxc|proxysql)$' + local container= + set +o xtrace pod/pxc-client-6644d8898f-zmpgg condition met pxc-client-6644d8898f-zmpgg.Ok + set +o xtrace + '[' '!' -s /tmp/tmp.k6orBcxvpD/select-3.sql ']' + diff -u /mnt/jenkins/workspace/cloud-pxc-operator_PR-1726/e2e-tests/users/compare/select-3.sql /tmp/tmp.k6orBcxvpD/select-3.sql + destroy users-31680 + local namespace=users-31680 + local ignore_logs=true + desc 'destroy cluster/operator and all other resources' + set +o xtrace ----------------------------------------------------------------------------------- destroy cluster/operator and all other resources ----------------------------------------------------------------------------------- + '[' true == false -o 1 == 1 ']' + grep -v 'the object has been modified' ++ get_operator_pod + grep -v 'get backup status: Job.batch' ++ local label_prefix=app.kubernetes.io/ + sort -u + /usr/bin/sed -r 's/"ts":[0-9.]+//; s^limits-[0-9.]+/^^g' + tee /tmp/tmp.k6orBcxvpD/operator.log +++ grep -c percona-xtradb-cluster-operator +++ kubectl get pods --selector=app.kubernetes.io/name=percona-xtradb-cluster-operator -n pxc-operator + grep -v level=info ++ local check_label=1 ++ [[ 1 -eq 0 ]] ++ kubectl_bin get pods --selector=app.kubernetes.io/name=percona-xtradb-cluster-operator -o 'jsonpath={.items[].metadata.name}' -n pxc-operator +++ mktemp ++ local LAST_OUT=/tmp/tmp.JMASBetTfS +++ mktemp ++ local LAST_ERR=/tmp/tmp.uMgPsd5LpN ++ local exit_status=0 +++ seq 0 2 ++ for i in '$(seq 0 2)' ++ set +e ++ kubectl get pods --selector=app.kubernetes.io/name=percona-xtradb-cluster-operator -o 'jsonpath={.items[].metadata.name}' -n pxc-operator ++ exit_status=0 ++ set -e ++ '[' 0 '!=' 0 ']' ++ break ++ cat /tmp/tmp.JMASBetTfS ++ cat /tmp/tmp.uMgPsd5LpN ++ rm /tmp/tmp.JMASBetTfS /tmp/tmp.uMgPsd5LpN ++ return 0 + kubectl_bin logs -n pxc-operator percona-xtradb-cluster-operator-58867c4d5c-9l5bj ++ mktemp + local LAST_OUT=/tmp/tmp.7yp50l1Z0Z ++ mktemp + local LAST_ERR=/tmp/tmp.yyIaToCEPl + local exit_status=0 ++ seq 0 2 + for i in '$(seq 0 2)' + set +e + kubectl logs -n pxc-operator percona-xtradb-cluster-operator-58867c4d5c-9l5bj + exit_status=0 + set -e + '[' 0 '!=' 0 ']' + break + cat /tmp/tmp.7yp50l1Z0Z + cat /tmp/tmp.yyIaToCEPl + rm /tmp/tmp.7yp50l1Z0Z /tmp/tmp.yyIaToCEPl + return 0 2024-06-10T12:34:58.803Z INFO setup Manager starting up {"gitCommit": "75d74a6ea09a77205fd1516a16b298cf30936045", "gitBranch": "PR-1726-75d74a6e", "buildTime": "2024-06-10T10:17:46Z", "goVersion": "go1.22.4", "os": "linux", "arch": "amd64"} 2024-06-10T12:34:58.803Z INFO setup Runs on {"platform": "kubernetes", "version": "v1.26.15-gke.1390000"} 2024-06-10T12:34:58.804Z INFO setup Registering Components. 2024-06-10T12:35:00.701Z INFO controller-runtime.webhook Registering webhook {"path": "/validate-percona-xtradbcluster"} 2024-06-10T12:35:00.782Z INFO setup Starting the Cmd. 2024-06-10T12:35:00.783Z INFO controller-runtime.metrics Serving metrics server {"bindAddress": ":8080", "secure": false} 2024-06-10T12:35:00.783Z INFO controller-runtime.metrics Starting metrics server 2024-06-10T12:35:00.783Z INFO controller-runtime.webhook Starting webhook server 2024-06-10T12:35:00.783Z INFO starting server {"name": "health probe", "addr": "[::]:8081"} 2024-06-10T12:35:00.784Z INFO controller-runtime.certwatcher Updated current TLS certificate 2024-06-10T12:35:00.784Z INFO controller-runtime.webhook Serving webhook server {"host": "", "port": 9443} 2024-06-10T12:35:00.785Z INFO controller-runtime.certwatcher Starting certificate watcher 2024-06-10T12:35:00.886Z INFO attempting to acquire leader lease pxc-operator/08db1feb.percona.com... 2024-06-10T12:35:00.899Z DEBUG events percona-xtradb-cluster-operator-58867c4d5c-9l5bj_3fb83aeb-726a-4b5c-8894-fe5c72a2044b became leader {"type": "Normal", "object": {"kind":"Lease","namespace":"pxc-operator","name":"08db1feb.percona.com","uid":"66de734b-a02d-4d2a-bd81-f8b94276060b","apiVersion":"coordination.k8s.io/v1","resourceVersion":"72965"}, "reason": "LeaderElection"} 2024-06-10T12:35:00.899Z INFO successfully acquired lease pxc-operator/08db1feb.percona.com 2024-06-10T12:35:00.900Z INFO Starting Controller {"controller": "pxcbackup-controller"} 2024-06-10T12:35:00.900Z INFO Starting Controller {"controller": "pxc-controller"} 2024-06-10T12:35:00.900Z INFO Starting Controller {"controller": "pxcrestore-controller"} 2024-06-10T12:35:00.900Z INFO Starting EventSource {"controller": "pxcbackup-controller", "source": "kind source: unknown type"} 2024-06-10T12:35:00.900Z INFO Starting EventSource {"controller": "pxc-controller", "source": "kind source: unknown type"} 2024-06-10T12:35:00.900Z INFO Starting EventSource {"controller": "pxcrestore-controller", "source": "kind source: unknown type"} 2024-06-10T12:35:01.005Z INFO Starting workers {"controller": "pxcbackup-controller", "worker count": 1} 2024-06-10T12:35:01.005Z INFO Starting workers {"controller": "pxc-controller", "worker count": 1} 2024-06-10T12:35:01.011Z INFO Starting workers {"controller": "pxcrestore-controller", "worker count": 1} 2024-06-10T12:35:26.970Z INFO Set CR version {"controller": "pxc-controller", "namespace": "users-31680", "name": "some-name", "reconcileID": "cf662099-778d-447b-875c-624603a03cf2", "version": "1.15.0"} 2024-06-10T12:36:45.953Z INFO Password expiration policy updated {"controller": "pxc-controller", "namespace": "users-31680", "name": "some-name", "reconcileID": "6b648d30-34b3-42c9-96fd-6002be43f642", "user": "operator"} 2024-06-10T12:36:45.998Z INFO Password expiration policy updated {"controller": "pxc-controller", "namespace": "users-31680", "name": "some-name", "reconcileID": "6b648d30-34b3-42c9-96fd-6002be43f642", "user": "monitor"} 2024-06-10T12:36:46.096Z INFO User monitor: granted privileges {"controller": "pxc-controller", "namespace": "users-31680", "name": "some-name", "reconcileID": "6b648d30-34b3-42c9-96fd-6002be43f642"} 2024-06-10T12:36:46.135Z INFO monitor user privileges granted {"controller": "pxc-controller", "namespace": "users-31680", "name": "some-name", "reconcileID": "6b648d30-34b3-42c9-96fd-6002be43f642"} 2024-06-10T12:36:46.176Z INFO Password expiration policy updated {"controller": "pxc-controller", "namespace": "users-31680", "name": "some-name", "reconcileID": "6b648d30-34b3-42c9-96fd-6002be43f642", "user": "xtrabackup"} 2024-06-10T12:36:46.231Z INFO User xtrabackup: granted privileges {"controller": "pxc-controller", "namespace": "users-31680", "name": "some-name", "reconcileID": "6b648d30-34b3-42c9-96fd-6002be43f642"} 2024-06-10T12:36:46.269Z INFO Password expiration policy updated {"controller": "pxc-controller", "namespace": "users-31680", "name": "some-name", "reconcileID": "6b648d30-34b3-42c9-96fd-6002be43f642", "user": "replication"} 2024-06-10T12:36:46.355Z INFO reconcile replication error {"controller": "pxc-controller", "namespace": "users-31680", "name": "some-name", "reconcileID": "6b648d30-34b3-42c9-96fd-6002be43f642", "err": "get primary pxc pod: not found"} 2024-06-10T12:36:51.016Z INFO reconcile replication error {"controller": "pxc-controller", "namespace": "users-31680", "name": "some-name", "reconcileID": "9b15719e-8d4d-4293-bbfc-63dc4cca3574", "err": "get primary pxc pod: not found"} 2024-06-10T12:36:56.177Z INFO reconcile replication error {"controller": "pxc-controller", "namespace": "users-31680", "name": "some-name", "reconcileID": "95db756d-0c10-45b9-9140-e50aaff6fe20", "err": "get primary pxc pod: not found"} 2024-06-10T12:39:13.270Z INFO Password expiration policy updated {"controller": "pxc-controller", "namespace": "users-31680", "name": "some-name", "reconcileID": "4148d60c-e946-4195-b4ea-554830538c3b", "user": "root"} 2024-06-10T12:39:13.540Z INFO update PXC version (fetched from db) {"controller": "pxc-controller", "namespace": "users-31680", "name": "some-name", "reconcileID": "4148d60c-e946-4195-b4ea-554830538c3b", "new version": "8.0.36-28.1"} 2024-06-10T12:39:17.108Z DEBUG PXC users synced with ProxySQL {"controller": "pxc-controller", "namespace": "users-31680", "name": "some-name", "reconcileID": "4148d60c-e946-4195-b4ea-554830538c3b"} 2024-06-10T12:39:21.610Z DEBUG PXC users synced with ProxySQL {"controller": "pxc-controller", "namespace": "users-31680", "name": "some-name", "reconcileID": "75b1bfa3-21d9-4303-959b-f4b334f3835d"} 2024-06-10T12:39:27.010Z DEBUG PXC users synced with ProxySQL {"controller": "pxc-controller", "namespace": "users-31680", "name": "some-name", "reconcileID": "dff07136-f6f9-4e8a-beee-95f1e06cc32f"} 2024-06-10T12:39:32.399Z DEBUG PXC users synced with ProxySQL {"controller": "pxc-controller", "namespace": "users-31680", "name": "some-name", "reconcileID": "876b9450-736f-4680-a2dc-587b36ebcc5b"} 2024-06-10T12:39:37.591Z DEBUG PXC users synced with ProxySQL {"controller": "pxc-controller", "namespace": "users-31680", "name": "some-name", "reconcileID": "a6cccd17-3bf1-48cd-839b-6aed12a5193b"} 2024-06-10T12:39:42.997Z DEBUG PXC users synced with ProxySQL {"controller": "pxc-controller", "namespace": "users-31680", "name": "some-name", "reconcileID": "1be701f3-efdf-478b-95d0-e432dd13861a"} 2024-06-10T12:39:49.331Z DEBUG PXC users synced with ProxySQL {"controller": "pxc-controller", "namespace": "users-31680", "name": "some-name", "reconcileID": "267abd85-8c47-45f2-baa5-d1cead21735f"} 2024-06-10T12:39:54.721Z DEBUG PXC users synced with ProxySQL {"controller": "pxc-controller", "namespace": "users-31680", "name": "some-name", "reconcileID": "c9eed022-3e89-4722-9398-5589b62176f3"} 2024-06-10T12:39:59.819Z DEBUG PXC users synced with ProxySQL {"controller": "pxc-controller", "namespace": "users-31680", "name": "some-name", "reconcileID": "20d450f0-8f8e-4687-a40c-5a8fbb8d27ec"} 2024-06-10T12:40:05.198Z DEBUG PXC users synced with ProxySQL {"controller": "pxc-controller", "namespace": "users-31680", "name": "some-name", "reconcileID": "abbc37f4-f94c-40a1-b888-37d66cf2b96e"} 2024-06-10T12:40:10.941Z DEBUG PXC users synced with ProxySQL {"controller": "pxc-controller", "namespace": "users-31680", "name": "some-name", "reconcileID": "334d6224-835c-49f2-9011-0b59791e54ad"} 2024-06-10T12:40:12.533Z INFO Password changed, updating user {"controller": "pxc-controller", "namespace": "users-31680", "name": "some-name", "reconcileID": "f2cc65c4-d607-4892-a0b7-d755b42693fb", "user": "root"} 2024-06-10T12:40:12.571Z INFO Password updated {"controller": "pxc-controller", "namespace": "users-31680", "name": "some-name", "reconcileID": "f2cc65c4-d607-4892-a0b7-d755b42693fb", "user": "root"} 2024-06-10T12:40:12.579Z INFO MySQL init secret created {"controller": "pxc-controller", "namespace": "users-31680", "name": "some-name", "reconcileID": "f2cc65c4-d607-4892-a0b7-d755b42693fb", "secret": "some-name-mysql-init", "user": "root"} 2024-06-10T12:40:18.165Z DEBUG PXC users synced with ProxySQL {"controller": "pxc-controller", "namespace": "users-31680", "name": "some-name", "reconcileID": "f2cc65c4-d607-4892-a0b7-d755b42693fb"} 2024-06-10T12:40:18.173Z INFO Internal secrets updated {"controller": "pxc-controller", "namespace": "users-31680", "name": "some-name", "reconcileID": "f2cc65c4-d607-4892-a0b7-d755b42693fb", "user": "root"} 2024-06-10T12:40:18.213Z INFO Old password discarded {"controller": "pxc-controller", "namespace": "users-31680", "name": "some-name", "reconcileID": "f2cc65c4-d607-4892-a0b7-d755b42693fb", "user": "root"} 2024-06-10T12:40:21.608Z DEBUG PXC users synced with ProxySQL {"controller": "pxc-controller", "namespace": "users-31680", "name": "some-name", "reconcileID": "f2cc65c4-d607-4892-a0b7-d755b42693fb"} 2024-06-10T12:40:27.111Z DEBUG PXC users synced with ProxySQL {"controller": "pxc-controller", "namespace": "users-31680", "name": "some-name", "reconcileID": "d92b72cb-848b-4008-aabc-c62c7691ebc1"} 2024-06-10T12:40:32.535Z DEBUG PXC users synced with ProxySQL {"controller": "pxc-controller", "namespace": "users-31680", "name": "some-name", "reconcileID": "2e14d98c-506f-408e-ae24-f87783840922"} 2024-06-10T12:40:36.904Z ERROR sync users {"controller": "pxc-controller", "namespace": "users-31680", "name": "some-name", "reconcileID": "1a699036-9151-48ed-afe0-4867b2d083d9", "error": "exec syncusers: pod some-name-proxysql-2 does not have a host assigned / / ", "errorVerbose": "exec syncusers: pod some-name-proxysql-2 does not have a host assigned / / \ngithub.com/percona/percona-xtradb-cluster-operator/pkg/controller/pxc.(*ReconcilePerconaXtraDBCluster).syncPXCUsersWithProxySQL\n\t/go/src/github.com/percona/percona-xtradb-cluster-operator/pkg/controller/pxc/users.go:920\ngithub.com/percona/percona-xtradb-cluster-operator/pkg/controller/pxc.(*ReconcilePerconaXtraDBCluster).resyncPXCUsersWithProxySQL.func1\n\t/go/src/github.com/percona/percona-xtradb-cluster-operator/pkg/controller/pxc/controller.go:1246\nruntime.goexit\n\t/usr/local/go/src/runtime/asm_amd64.s:1695"} 2024-06-10T12:40:55.661Z ERROR sync users {"controller": "pxc-controller", "namespace": "users-31680", "name": "some-name", "reconcileID": "58d6250d-c80b-4830-863c-99100979b2db", "error": "exec syncusers: command terminated with exit code 1 / / ERROR (line:512) : The cluster (with writer hostgroup:11) has not been configured in ProxySQL\n", "errorVerbose": "exec syncusers: command terminated with exit code 1 / / ERROR (line:512) : The cluster (with writer hostgroup:11) has not been configured in ProxySQL\n\ngithub.com/percona/percona-xtradb-cluster-operator/pkg/controller/pxc.(*ReconcilePerconaXtraDBCluster).syncPXCUsersWithProxySQL\n\t/go/src/github.com/percona/percona-xtradb-cluster-operator/pkg/controller/pxc/users.go:920\ngithub.com/percona/percona-xtradb-cluster-operator/pkg/controller/pxc.(*ReconcilePerconaXtraDBCluster).resyncPXCUsersWithProxySQL.func1\n\t/go/src/github.com/percona/percona-xtradb-cluster-operator/pkg/controller/pxc/controller.go:1246\nruntime.goexit\n\t/usr/local/go/src/runtime/asm_amd64.s:1695"} 2024-06-10T12:41:01.419Z ERROR sync users {"controller": "pxc-controller", "namespace": "users-31680", "name": "some-name", "reconcileID": "f963f8bc-3ab0-4bed-86bf-69045693a187", "error": "exec syncusers: command terminated with exit code 1 / / ERROR (line:512) : The cluster (with writer hostgroup:11) has not been configured in ProxySQL\n", "errorVerbose": "exec syncusers: command terminated with exit code 1 / / ERROR (line:512) : The cluster (with writer hostgroup:11) has not been configured in ProxySQL\n\ngithub.com/percona/percona-xtradb-cluster-operator/pkg/controller/pxc.(*ReconcilePerconaXtraDBCluster).syncPXCUsersWithProxySQL\n\t/go/src/github.com/percona/percona-xtradb-cluster-operator/pkg/controller/pxc/users.go:920\ngithub.com/percona/percona-xtradb-cluster-operator/pkg/controller/pxc.(*ReconcilePerconaXtraDBCluster).resyncPXCUsersWithProxySQL.func1\n\t/go/src/github.com/percona/percona-xtradb-cluster-operator/pkg/controller/pxc/controller.go:1246\nruntime.goexit\n\t/usr/local/go/src/runtime/asm_amd64.s:1695"} 2024-06-10T12:41:02.106Z INFO Password changed, updating user {"controller": "pxc-controller", "namespace": "users-31680", "name": "some-name", "reconcileID": "842aa13e-0cac-4209-8d26-0756fc3a6fe4", "user": "proxyadmin"} 2024-06-10T12:41:02.106Z INFO Password expiration policy updated {"controller": "pxc-controller", "namespace": "users-31680", "name": "some-name", "reconcileID": "842aa13e-0cac-4209-8d26-0756fc3a6fe4", "user": "proxyadmin"} 2024-06-10T12:41:02.176Z INFO Proxy user updated {"controller": "pxc-controller", "namespace": "users-31680", "name": "some-name", "reconcileID": "842aa13e-0cac-4209-8d26-0756fc3a6fe4", "user": "proxyadmin"} 2024-06-10T12:41:02.188Z INFO Internal secrets updated {"controller": "pxc-controller", "namespace": "users-31680", "name": "some-name", "reconcileID": "842aa13e-0cac-4209-8d26-0756fc3a6fe4", "user": "proxyadmin"} 2024-06-10T12:41:02.188Z INFO Proxy pods will be restarted {"controller": "pxc-controller", "namespace": "users-31680", "name": "some-name", "reconcileID": "842aa13e-0cac-4209-8d26-0756fc3a6fe4", "last-applied-secret": "1a55867d0f4fa0d0643653b2eb57b2f690315d8d843a10599c9839c2a779a95f"} 2024-06-10T12:41:02.405Z ERROR sync users {"controller": "pxc-controller", "namespace": "users-31680", "name": "some-name", "reconcileID": "842aa13e-0cac-4209-8d26-0756fc3a6fe4", "error": "exec syncusers: command terminated with exit code 1 / / ERROR 1045 (28000): ProxySQL Error: Access denied for user 'proxyadmin'@'127.0.0.1' (using password: YES)\nERROR (line:335) : ProxySQL connection check failed. \n-- Could not connect to ProxySQL at localhost:6032 \n-- Please check the ProxySQL connection parameters and status.\n", "errorVerbose": "exec syncusers: command terminated with exit code 1 / / ERROR 1045 (28000): ProxySQL Error: Access denied for user 'proxyadmin'@'127.0.0.1' (using password: YES)\nERROR (line:335) : ProxySQL connection check failed. \n-- Could not connect to ProxySQL at localhost:6032 \n-- Please check the ProxySQL connection parameters and status.\n\ngithub.com/percona/percona-xtradb-cluster-operator/pkg/controller/pxc.(*ReconcilePerconaXtraDBCluster).syncPXCUsersWithProxySQL\n\t/go/src/github.com/percona/percona-xtradb-cluster-operator/pkg/controller/pxc/users.go:920\ngithub.com/percona/percona-xtradb-cluster-operator/pkg/controller/pxc.(*ReconcilePerconaXtraDBCluster).resyncPXCUsersWithProxySQL.func1\n\t/go/src/github.com/percona/percona-xtradb-cluster-operator/pkg/controller/pxc/controller.go:1246\nruntime.goexit\n\t/usr/local/go/src/runtime/asm_amd64.s:1695"} 2024-06-10T12:41:09.180Z INFO reconcile replication error {"controller": "pxc-controller", "namespace": "users-31680", "name": "some-name", "reconcileID": "c182e5dc-f198-4ae9-9776-c28be62b20eb", "err": "get primary pxc pod: not found"} 2024-06-10T12:41:15.581Z INFO reconcile replication error {"controller": "pxc-controller", "namespace": "users-31680", "name": "some-name", "reconcileID": "8959c48d-678d-4779-8fa1-c5175033fc96", "err": "get primary pxc pod: not found"} 2024-06-10T12:41:20.976Z INFO reconcile replication error {"controller": "pxc-controller", "namespace": "users-31680", "name": "some-name", "reconcileID": "e607694e-2c74-405c-9a8d-7ae7255487c0", "err": "get primary pxc pod: not found"} 2024-06-10T12:41:26.148Z INFO reconcile replication error {"controller": "pxc-controller", "namespace": "users-31680", "name": "some-name", "reconcileID": "700d18bf-f037-4603-aaec-b635468c245f", "err": "get primary pxc pod: not found"} 2024-06-10T12:41:41.549Z DEBUG PXC users synced with ProxySQL {"controller": "pxc-controller", "namespace": "users-31680", "name": "some-name", "reconcileID": "24dd46b4-8c05-4dc1-977e-f04fb8caa7ec"} 2024-06-10T12:41:51.348Z ERROR sync users {"controller": "pxc-controller", "namespace": "users-31680", "name": "some-name", "reconcileID": "3398d609-272d-4c70-9a55-2482ba97c841", "error": "exec syncusers: command terminated with exit code 137 / / ", "errorVerbose": "exec syncusers: command terminated with exit code 137 / / \ngithub.com/percona/percona-xtradb-cluster-operator/pkg/controller/pxc.(*ReconcilePerconaXtraDBCluster).syncPXCUsersWithProxySQL\n\t/go/src/github.com/percona/percona-xtradb-cluster-operator/pkg/controller/pxc/users.go:920\ngithub.com/percona/percona-xtradb-cluster-operator/pkg/controller/pxc.(*ReconcilePerconaXtraDBCluster).resyncPXCUsersWithProxySQL.func1\n\t/go/src/github.com/percona/percona-xtradb-cluster-operator/pkg/controller/pxc/controller.go:1246\nruntime.goexit\n\t/usr/local/go/src/runtime/asm_amd64.s:1695"} 2024-06-10T12:41:56.806Z INFO Password changed, updating user {"controller": "pxc-controller", "namespace": "users-31680", "name": "some-name", "reconcileID": "d8f16b1b-bdd4-4070-bdf2-d059e3734fd7", "user": "xtrabackup"} 2024-06-10T12:41:56.885Z INFO Password updated {"controller": "pxc-controller", "namespace": "users-31680", "name": "some-name", "reconcileID": "d8f16b1b-bdd4-4070-bdf2-d059e3734fd7", "user": "xtrabackup"} 2024-06-10T12:41:56.893Z INFO MySQL init secret updated {"controller": "pxc-controller", "namespace": "users-31680", "name": "some-name", "reconcileID": "d8f16b1b-bdd4-4070-bdf2-d059e3734fd7", "secret": "some-name-mysql-init", "user": "xtrabackup"} 2024-06-10T12:41:56.901Z INFO Internal secrets updated {"controller": "pxc-controller", "namespace": "users-31680", "name": "some-name", "reconcileID": "d8f16b1b-bdd4-4070-bdf2-d059e3734fd7", "user": "xtrabackup"} 2024-06-10T12:41:56.930Z INFO Old password discarded {"controller": "pxc-controller", "namespace": "users-31680", "name": "some-name", "reconcileID": "d8f16b1b-bdd4-4070-bdf2-d059e3734fd7", "user": "xtrabackup"} 2024-06-10T12:41:56.947Z INFO PXC pods will be restarted {"controller": "pxc-controller", "namespace": "users-31680", "name": "some-name", "reconcileID": "d8f16b1b-bdd4-4070-bdf2-d059e3734fd7", "last-applied-secret": "d11bcc70ca53ea410cd455e13d82e7225917b0861d9b677ed34c02c5f6fbaf1b"} 2024-06-10T12:42:02.351Z DEBUG PXC users synced with ProxySQL {"controller": "pxc-controller", "namespace": "users-31680", "name": "some-name", "reconcileID": "d8f16b1b-bdd4-4070-bdf2-d059e3734fd7"} 2024-06-10T12:42:43.894Z INFO reconcile replication error {"controller": "pxc-controller", "namespace": "users-31680", "name": "some-name", "reconcileID": "7f07b416-28e5-4ce8-bc38-12f1876019a6", "err": "failed to ensure cluster readonly status: connect to pod some-name-pxc-1: dial tcp: lookup some-name-pxc-1.some-name-pxc.users-31680 on 10.204.208.10:53: no such host"} 2024-06-10T12:42:54.084Z INFO reconcile replication error {"controller": "pxc-controller", "namespace": "users-31680", "name": "some-name", "reconcileID": "940b1b90-e9f2-4df5-a9dc-f395c45acdd5", "err": "failed to ensure cluster readonly status: connect to pod some-name-pxc-1: dial tcp: lookup some-name-pxc-1.some-name-pxc.users-31680 on 10.204.208.10:53: no such host"} 2024-06-10T12:43:41.729Z INFO reconcile replication error {"controller": "pxc-controller", "namespace": "users-31680", "name": "some-name", "reconcileID": "a42d919a-fb8e-4480-a20e-9bca9c43181b", "err": "failed to connect to pod some-name-pxc-0: dial tcp: lookup some-name-pxc-0.some-name-pxc.users-31680 on 10.204.208.10:53: no such host"} 2024-06-10T12:43:46.968Z INFO reconcile replication error {"controller": "pxc-controller", "namespace": "users-31680", "name": "some-name", "reconcileID": "43e389d2-2801-489f-a312-39775f4e5d47", "err": "failed to connect to pod some-name-pxc-0: dial tcp: lookup some-name-pxc-0.some-name-pxc.users-31680 on 10.204.208.10:53: no such host"} 2024-06-10T12:44:32.692Z DEBUG PXC users synced with ProxySQL {"controller": "pxc-controller", "namespace": "users-31680", "name": "some-name", "reconcileID": "1b9b2869-1734-43fa-9a7f-22fd0e82ee99"} 2024-06-10T12:44:37.625Z DEBUG PXC users synced with ProxySQL {"controller": "pxc-controller", "namespace": "users-31680", "name": "some-name", "reconcileID": "9f25b88f-b923-4bf2-923e-c5e6f64f2d3b"} 2024-06-10T12:44:43.194Z DEBUG PXC users synced with ProxySQL {"controller": "pxc-controller", "namespace": "users-31680", "name": "some-name", "reconcileID": "55976749-6b03-41d0-84ea-296349502428"} 2024-06-10T12:44:48.817Z DEBUG PXC users synced with ProxySQL {"controller": "pxc-controller", "namespace": "users-31680", "name": "some-name", "reconcileID": "e923833a-7f71-4d52-9693-35a665bd25e1"} 2024-06-10T12:44:50.983Z INFO Password changed, updating user {"controller": "pxc-controller", "namespace": "users-31680", "name": "some-name", "reconcileID": "441e055a-5973-4c52-8dad-a4d771bf7e4f", "user": "monitor"} 2024-06-10T12:44:51.014Z INFO Password updated {"controller": "pxc-controller", "namespace": "users-31680", "name": "some-name", "reconcileID": "441e055a-5973-4c52-8dad-a4d771bf7e4f", "user": "monitor"} 2024-06-10T12:44:51.021Z INFO MySQL init secret updated {"controller": "pxc-controller", "namespace": "users-31680", "name": "some-name", "reconcileID": "441e055a-5973-4c52-8dad-a4d771bf7e4f", "secret": "some-name-mysql-init", "user": "monitor"} 2024-06-10T12:44:51.069Z INFO Proxy user updated {"controller": "pxc-controller", "namespace": "users-31680", "name": "some-name", "reconcileID": "441e055a-5973-4c52-8dad-a4d771bf7e4f", "user": "monitor"} 2024-06-10T12:44:51.076Z INFO Internal secrets updated {"controller": "pxc-controller", "namespace": "users-31680", "name": "some-name", "reconcileID": "441e055a-5973-4c52-8dad-a4d771bf7e4f", "user": "monitor"} 2024-06-10T12:44:51.202Z INFO Proxy pods will be restarted {"controller": "pxc-controller", "namespace": "users-31680", "name": "some-name", "reconcileID": "441e055a-5973-4c52-8dad-a4d771bf7e4f", "last-applied-secret": "4e3282050745046d5044e0fcb5f2bf140d7320a1256a79c8ca2184f38f4f3098"} 2024-06-10T12:44:54.226Z ERROR sync users {"controller": "pxc-controller", "namespace": "users-31680", "name": "some-name", "reconcileID": "441e055a-5973-4c52-8dad-a4d771bf7e4f", "error": "exec syncusers: unable to upgrade connection: container not found (\"proxysql\") / / ", "errorVerbose": "exec syncusers: unable to upgrade connection: container not found (\"proxysql\") / / \ngithub.com/percona/percona-xtradb-cluster-operator/pkg/controller/pxc.(*ReconcilePerconaXtraDBCluster).syncPXCUsersWithProxySQL\n\t/go/src/github.com/percona/percona-xtradb-cluster-operator/pkg/controller/pxc/users.go:920\ngithub.com/percona/percona-xtradb-cluster-operator/pkg/controller/pxc.(*ReconcilePerconaXtraDBCluster).resyncPXCUsersWithProxySQL.func1\n\t/go/src/github.com/percona/percona-xtradb-cluster-operator/pkg/controller/pxc/controller.go:1246\nruntime.goexit\n\t/usr/local/go/src/runtime/asm_amd64.s:1695"} 2024-06-10T12:45:24.859Z INFO Password updated but old one not discarded {"controller": "pxc-controller", "namespace": "users-31680", "name": "some-name", "reconcileID": "0934da89-1292-4a92-b544-d9c9ece8d72e", "user": "monitor"} 2024-06-10T12:45:28.710Z DEBUG PXC users synced with ProxySQL {"controller": "pxc-controller", "namespace": "users-31680", "name": "some-name", "reconcileID": "0934da89-1292-4a92-b544-d9c9ece8d72e"} 2024-06-10T12:45:29.826Z INFO Password updated but old one not discarded {"controller": "pxc-controller", "namespace": "users-31680", "name": "some-name", "reconcileID": "7a6a8864-5e82-4f5d-97bd-4b45a1eae45a", "user": "monitor"} 2024-06-10T12:45:33.911Z DEBUG PXC users synced with ProxySQL {"controller": "pxc-controller", "namespace": "users-31680", "name": "some-name", "reconcileID": "7a6a8864-5e82-4f5d-97bd-4b45a1eae45a"} 2024-06-10T12:45:35.592Z INFO Password updated but old one not discarded {"controller": "pxc-controller", "namespace": "users-31680", "name": "some-name", "reconcileID": "95f289b8-b353-42b0-8f79-243d3cd1cf3f", "user": "monitor"} 2024-06-10T12:45:39.143Z DEBUG PXC users synced with ProxySQL {"controller": "pxc-controller", "namespace": "users-31680", "name": "some-name", "reconcileID": "95f289b8-b353-42b0-8f79-243d3cd1cf3f"} 2024-06-10T12:45:41.099Z INFO Password updated but old one not discarded {"controller": "pxc-controller", "namespace": "users-31680", "name": "some-name", "reconcileID": "da5ab2e0-53fd-401e-811e-111cde3db34b", "user": "monitor"} 2024-06-10T12:45:44.699Z DEBUG PXC users synced with ProxySQL {"controller": "pxc-controller", "namespace": "users-31680", "name": "some-name", "reconcileID": "da5ab2e0-53fd-401e-811e-111cde3db34b"} 2024-06-10T12:45:46.587Z INFO Password updated but old one not discarded {"controller": "pxc-controller", "namespace": "users-31680", "name": "some-name", "reconcileID": "e5a6cdf4-38cb-4fe4-8fe0-4eb860ab3ca8", "user": "monitor"} 2024-06-10T12:45:46.821Z INFO Old password discarded {"controller": "pxc-controller", "namespace": "users-31680", "name": "some-name", "reconcileID": "e5a6cdf4-38cb-4fe4-8fe0-4eb860ab3ca8", "user": "monitor"} 2024-06-10T12:45:46.855Z INFO Proxy pods will be restarted {"controller": "pxc-controller", "namespace": "users-31680", "name": "some-name", "reconcileID": "e5a6cdf4-38cb-4fe4-8fe0-4eb860ab3ca8", "last-applied-secret": "4e3282050745046d5044e0fcb5f2bf140d7320a1256a79c8ca2184f38f4f3098"} 2024-06-10T12:45:50.500Z DEBUG PXC users synced with ProxySQL {"controller": "pxc-controller", "namespace": "users-31680", "name": "some-name", "reconcileID": "e5a6cdf4-38cb-4fe4-8fe0-4eb860ab3ca8"} 2024-06-10T12:45:55.914Z DEBUG PXC users synced with ProxySQL {"controller": "pxc-controller", "namespace": "users-31680", "name": "some-name", "reconcileID": "77e57f4c-70ed-41dd-9dd6-db30958b1b71"} 2024-06-10T12:46:01.022Z DEBUG PXC users synced with ProxySQL {"controller": "pxc-controller", "namespace": "users-31680", "name": "some-name", "reconcileID": "055717c7-4213-49b8-9a38-5eb507e37443"} 2024-06-10T12:46:06.304Z DEBUG PXC users synced with ProxySQL {"controller": "pxc-controller", "namespace": "users-31680", "name": "some-name", "reconcileID": "f9ea2e5b-239f-4f64-bcb1-cb18f829d9c0"} 2024-06-10T12:46:11.637Z DEBUG PXC users synced with ProxySQL {"controller": "pxc-controller", "namespace": "users-31680", "name": "some-name", "reconcileID": "390f8cc5-0288-4d60-a44b-9a1cbd674723"} 2024-06-10T12:46:13.672Z INFO Password changed, updating user {"controller": "pxc-controller", "namespace": "users-31680", "name": "some-name", "reconcileID": "462488e1-8497-451b-bce1-05494b199eaf", "user": "operator"} 2024-06-10T12:46:13.700Z INFO Password updated {"controller": "pxc-controller", "namespace": "users-31680", "name": "some-name", "reconcileID": "462488e1-8497-451b-bce1-05494b199eaf", "user": "operator"} 2024-06-10T12:46:13.707Z INFO MySQL init secret updated {"controller": "pxc-controller", "namespace": "users-31680", "name": "some-name", "reconcileID": "462488e1-8497-451b-bce1-05494b199eaf", "secret": "some-name-mysql-init", "user": "operator"} 2024-06-10T12:46:13.715Z INFO Internal secrets updated {"controller": "pxc-controller", "namespace": "users-31680", "name": "some-name", "reconcileID": "462488e1-8497-451b-bce1-05494b199eaf", "user": "operator"} 2024-06-10T12:46:13.747Z INFO Old password discarded {"controller": "pxc-controller", "namespace": "users-31680", "name": "some-name", "reconcileID": "462488e1-8497-451b-bce1-05494b199eaf", "user": "operator"} 2024-06-10T12:46:13.794Z INFO Proxy pods will be restarted {"controller": "pxc-controller", "namespace": "users-31680", "name": "some-name", "reconcileID": "462488e1-8497-451b-bce1-05494b199eaf", "last-applied-secret": "6542c6f8a2375ddfcda319bf91c99ca69519730971c577cd07366e356671ee68"} 2024-06-10T12:46:15.159Z ERROR sync users {"controller": "pxc-controller", "namespace": "users-31680", "name": "some-name", "reconcileID": "462488e1-8497-451b-bce1-05494b199eaf", "error": "exec syncusers: command terminated with exit code 1 / / ERROR 1045 (28000): Access denied for user 'operator'@'some-name-proxysql-0.some-name-proxysql-unready.users-31680.svc.' (using password: YES)\nERROR 1045 (28000): Access denied for user 'operator'@'some-name-proxysql-0.some-name-proxysql-unready.users-31680.svc.' (using password: YES)\nERROR 1045 (28000): Access denied for user 'operator'@'some-name-proxysql-0.some-name-proxysql-unready.users-31680.svc.' (using password: YES)\nERROR (line:569) : Could not find a primary cluster node\n", "errorVerbose": "exec syncusers: command terminated with exit code 1 / / ERROR 1045 (28000): Access denied for user 'operator'@'some-name-proxysql-0.some-name-proxysql-unready.users-31680.svc.' (using password: YES)\nERROR 1045 (28000): Access denied for user 'operator'@'some-name-proxysql-0.some-name-proxysql-unready.users-31680.svc.' (using password: YES)\nERROR 1045 (28000): Access denied for user 'operator'@'some-name-proxysql-0.some-name-proxysql-unready.users-31680.svc.' (using password: YES)\nERROR (line:569) : Could not find a primary cluster node\n\ngithub.com/percona/percona-xtradb-cluster-operator/pkg/controller/pxc.(*ReconcilePerconaXtraDBCluster).syncPXCUsersWithProxySQL\n\t/go/src/github.com/percona/percona-xtradb-cluster-operator/pkg/controller/pxc/users.go:920\ngithub.com/percona/percona-xtradb-cluster-operator/pkg/controller/pxc.(*ReconcilePerconaXtraDBCluster).resyncPXCUsersWithProxySQL.func1\n\t/go/src/github.com/percona/percona-xtradb-cluster-operator/pkg/controller/pxc/controller.go:1246\nruntime.goexit\n\t/usr/local/go/src/runtime/asm_amd64.s:1695"} 2024-06-10T12:46:40.736Z DEBUG PXC users synced with ProxySQL {"controller": "pxc-controller", "namespace": "users-31680", "name": "some-name", "reconcileID": "52d4fcea-8bf4-400a-8fc6-13ec67b43888"} 2024-06-10T12:46:49.403Z DEBUG PXC users synced with ProxySQL {"controller": "pxc-controller", "namespace": "users-31680", "name": "some-name", "reconcileID": "8e63f4b8-87e6-4974-b2bc-e777bae05d40"} 2024-06-10T12:46:52.615Z INFO Created user secrets {"controller": "pxc-controller", "namespace": "users-31680", "name": "some-name", "reconcileID": "68635bc2-78a7-4798-98b1-11e9b1a7abb2", "secrets": "my-cluster-secrets-2"} 2024-06-10T12:46:52.629Z INFO Password changed, updating user {"controller": "pxc-controller", "namespace": "users-31680", "name": "some-name", "reconcileID": "68635bc2-78a7-4798-98b1-11e9b1a7abb2", "user": "root"} 2024-06-10T12:46:52.666Z INFO Password updated {"controller": "pxc-controller", "namespace": "users-31680", "name": "some-name", "reconcileID": "68635bc2-78a7-4798-98b1-11e9b1a7abb2", "user": "root"} 2024-06-10T12:46:52.675Z INFO MySQL init secret updated {"controller": "pxc-controller", "namespace": "users-31680", "name": "some-name", "reconcileID": "68635bc2-78a7-4798-98b1-11e9b1a7abb2", "secret": "some-name-mysql-init", "user": "root"} 2024-06-10T12:47:00.543Z ERROR Reconciler error {"controller": "pxc-controller", "namespace": "users-31680", "name": "some-name", "reconcileID": "68635bc2-78a7-4798-98b1-11e9b1a7abb2", "error": "reconcile users: manage sys users: sync users: exec syncusers: command terminated with exit code 1 / \nSyncing user accounts from PXC(some-name-pxc-0.some-name-pxc.users-31680.svc.cluster.local:3306) to ProxySQL\nAdding user to ProxySQL: root\n / ERROR 1045 (28000) at line 1: ProxySQL Admin Error: UNIQUE constraint failed: mysql_users.username, mysql_users.frontend\nERROR (line:715) : Failed to add the user (root) from PXC to ProxySQL database. \n-- Please check the ProxySQL connection parameters and status.\n", "errorVerbose": "exec syncusers: command terminated with exit code 1 / \nSyncing user accounts from PXC(some-name-pxc-0.some-name-pxc.users-31680.svc.cluster.local:3306) to ProxySQL\nAdding user to ProxySQL: root\n / ERROR 1045 (28000) at line 1: ProxySQL Admin Error: UNIQUE constraint failed: mysql_users.username, mysql_users.frontend\nERROR (line:715) : Failed to add the user (root) from PXC to ProxySQL database. \n-- Please check the ProxySQL connection parameters and status.\n\ngithub.com/percona/percona-xtradb-cluster-operator/pkg/controller/pxc.(*ReconcilePerconaXtraDBCluster).syncPXCUsersWithProxySQL\n\t/go/src/github.com/percona/percona-xtradb-cluster-operator/pkg/controller/pxc/users.go:920\ngithub.com/percona/percona-xtradb-cluster-operator/pkg/controller/pxc.(*ReconcilePerconaXtraDBCluster).handleRootUser\n\t/go/src/github.com/percona/percona-xtradb-cluster-operator/pkg/controller/pxc/users.go:241\ngithub.com/percona/percona-xtradb-cluster-operator/pkg/controller/pxc.(*ReconcilePerconaXtraDBCluster).updateUsers\n\t/go/src/github.com/percona/percona-xtradb-cluster-operator/pkg/controller/pxc/users.go:157\ngithub.com/percona/percona-xtradb-cluster-operator/pkg/controller/pxc.(*ReconcilePerconaXtraDBCluster).reconcileUsers\n\t/go/src/github.com/percona/percona-xtradb-cluster-operator/pkg/controller/pxc/users.go:110\ngithub.com/percona/percona-xtradb-cluster-operator/pkg/controller/pxc.(*ReconcilePerconaXtraDBCluster).Reconcile\n\t/go/src/github.com/percona/percona-xtradb-cluster-operator/pkg/controller/pxc/controller.go:281\nsigs.k8s.io/controller-runtime/pkg/internal/controller.(*Controller).Reconcile\n\t/go/pkg/mod/sigs.k8s.io/controller-runtime@v0.18.0/pkg/internal/controller/controller.go:114\nsigs.k8s.io/controller-runtime/pkg/internal/controller.(*Controller).reconcileHandler\n\t/go/pkg/mod/sigs.k8s.io/controller-runtime@v0.18.0/pkg/internal/controller/controller.go:311\nsigs.k8s.io/controller-runtime/pkg/internal/controller.(*Controller).processNextWorkItem\n\t/go/pkg/mod/sigs.k8s.io/controller-runtime@v0.18.0/pkg/internal/controller/controller.go:261\nsigs.k8s.io/controller-runtime/pkg/internal/controller.(*Controller).Start.func2.2\n\t/go/pkg/mod/sigs.k8s.io/controller-runtime@v0.18.0/pkg/internal/controller/controller.go:222\nruntime.goexit\n\t/usr/local/go/src/runtime/asm_amd64.s:1695\nsync users\ngithub.com/percona/percona-xtradb-cluster-operator/pkg/controller/pxc.(*ReconcilePerconaXtraDBCluster).handleRootUser\n\t/go/src/github.com/percona/percona-xtradb-cluster-operator/pkg/controller/pxc/users.go:243\ngithub.com/percona/percona-xtradb-cluster-operator/pkg/controller/pxc.(*ReconcilePerconaXtraDBCluster).updateUsers\n\t/go/src/github.com/percona/percona-xtradb-cluster-operator/pkg/controller/pxc/users.go:157\ngithub.com/percona/percona-xtradb-cluster-operator/pkg/controller/pxc.(*ReconcilePerconaXtraDBCluster).reconcileUsers\n\t/go/src/github.com/percona/percona-xtradb-cluster-operator/pkg/controller/pxc/users.go:110\ngithub.com/percona/percona-xtradb-cluster-operator/pkg/controller/pxc.(*ReconcilePerconaXtraDBCluster).Reconcile\n\t/go/src/github.com/percona/percona-xtradb-cluster-operator/pkg/controller/pxc/controller.go:281\nsigs.k8s.io/controller-runtime/pkg/internal/controller.(*Controller).Reconcile\n\t/go/pkg/mod/sigs.k8s.io/controller-runtime@v0.18.0/pkg/internal/controller/controller.go:114\nsigs.k8s.io/controller-runtime/pkg/internal/controller.(*Controller).reconcileHandler\n\t/go/pkg/mod/sigs.k8s.io/controller-runtime@v0.18.0/pkg/internal/controller/controller.go:311\nsigs.k8s.io/controller-runtime/pkg/internal/controller.(*Controller).processNextWorkItem\n\t/go/pkg/mod/sigs.k8s.io/controller-runtime@v0.18.0/pkg/internal/controller/controller.go:261\nsigs.k8s.io/controller-runtime/pkg/internal/controller.(*Controller).Start.func2.2\n\t/go/pkg/mod/sigs.k8s.io/controller-runtime@v0.18.0/pkg/internal/controller/controller.go:222\nruntime.goexit\n\t/usr/local/go/src/runtime/asm_amd64.s:1695\nmanage sys users\ngithub.com/percona/percona-xtradb-cluster-operator/pkg/controller/pxc.(*ReconcilePerconaXtraDBCluster).reconcileUsers\n\t/go/src/github.com/percona/percona-xtradb-cluster-operator/pkg/controller/pxc/users.go:112\ngithub.com/percona/percona-xtradb-cluster-operator/pkg/controller/pxc.(*ReconcilePerconaXtraDBCluster).Reconcile\n\t/go/src/github.com/percona/percona-xtradb-cluster-operator/pkg/controller/pxc/controller.go:281\nsigs.k8s.io/controller-runtime/pkg/internal/controller.(*Controller).Reconcile\n\t/go/pkg/mod/sigs.k8s.io/controller-runtime@v0.18.0/pkg/internal/controller/controller.go:114\nsigs.k8s.io/controller-runtime/pkg/internal/controller.(*Controller).reconcileHandler\n\t/go/pkg/mod/sigs.k8s.io/controller-runtime@v0.18.0/pkg/internal/controller/controller.go:311\nsigs.k8s.io/controller-runtime/pkg/internal/controller.(*Controller).processNextWorkItem\n\t/go/pkg/mod/sigs.k8s.io/controller-runtime@v0.18.0/pkg/internal/controller/controller.go:261\nsigs.k8s.io/controller-runtime/pkg/internal/controller.(*Controller).Start.func2.2\n\t/go/pkg/mod/sigs.k8s.io/controller-runtime@v0.18.0/pkg/internal/controller/controller.go:222\nruntime.goexit\n\t/usr/local/go/src/runtime/asm_amd64.s:1695\nreconcile users\ngithub.com/percona/percona-xtradb-cluster-operator/pkg/controller/pxc.(*ReconcilePerconaXtraDBCluster).Reconcile\n\t/go/src/github.com/percona/percona-xtradb-cluster-operator/pkg/controller/pxc/controller.go:283\nsigs.k8s.io/controller-runtime/pkg/internal/controller.(*Controller).Reconcile\n\t/go/pkg/mod/sigs.k8s.io/controller-runtime@v0.18.0/pkg/internal/controller/controller.go:114\nsigs.k8s.io/controller-runtime/pkg/internal/controller.(*Controller).reconcileHandler\n\t/go/pkg/mod/sigs.k8s.io/controller-runtime@v0.18.0/pkg/internal/controller/controller.go:311\nsigs.k8s.io/controller-runtime/pkg/internal/controller.(*Controller).processNextWorkItem\n\t/go/pkg/mod/sigs.k8s.io/controller-runtime@v0.18.0/pkg/internal/controller/controller.go:261\nsigs.k8s.io/controller-runtime/pkg/internal/controller.(*Controller).Start.func2.2\n\t/go/pkg/mod/sigs.k8s.io/controller-runtime@v0.18.0/pkg/internal/controller/controller.go:222\nruntime.goexit\n\t/usr/local/go/src/runtime/asm_amd64.s:1695"} 2024-06-10T12:47:00.543Z INFO Warning: Reconciler returned both a non-zero result and a non-nil error. The result will always be ignored if the error is non-nil and the non-nil error causes reqeueuing with exponential backoff. For more details, see: https://pkg.go.dev/sigs.k8s.io/controller-runtime/pkg/reconcile#Reconciler {"controller": "pxc-controller", "namespace": "users-31680", "name": "some-name", "reconcileID": "68635bc2-78a7-4798-98b1-11e9b1a7abb2"} 2024-06-10T12:47:01.035Z DEBUG PXC users synced with ProxySQL {"controller": "pxc-controller", "namespace": "users-31680", "name": "some-name", "reconcileID": "ccf0fc1e-e288-46a7-aa36-6c822308d9eb"} 2024-06-10T12:47:02.168Z INFO Password changed, updating user {"controller": "pxc-controller", "namespace": "users-31680", "name": "some-name", "reconcileID": "eb171146-8b52-4a52-92b7-cf18b082aa5a", "user": "root"} 2024-06-10T12:47:02.210Z INFO Password updated {"controller": "pxc-controller", "namespace": "users-31680", "name": "some-name", "reconcileID": "eb171146-8b52-4a52-92b7-cf18b082aa5a", "user": "root"} 2024-06-10T12:47:02.220Z INFO MySQL init secret updated {"controller": "pxc-controller", "namespace": "users-31680", "name": "some-name", "reconcileID": "eb171146-8b52-4a52-92b7-cf18b082aa5a", "secret": "some-name-mysql-init", "user": "root"} 2024-06-10T12:47:05.519Z DEBUG PXC users synced with ProxySQL {"controller": "pxc-controller", "namespace": "users-31680", "name": "some-name", "reconcileID": "eb171146-8b52-4a52-92b7-cf18b082aa5a"} 2024-06-10T12:47:05.528Z INFO Internal secrets updated {"controller": "pxc-controller", "namespace": "users-31680", "name": "some-name", "reconcileID": "eb171146-8b52-4a52-92b7-cf18b082aa5a", "user": "root"} 2024-06-10T12:47:05.571Z INFO Old password discarded {"controller": "pxc-controller", "namespace": "users-31680", "name": "some-name", "reconcileID": "eb171146-8b52-4a52-92b7-cf18b082aa5a", "user": "root"} 2024-06-10T12:47:05.589Z INFO Password changed, updating user {"controller": "pxc-controller", "namespace": "users-31680", "name": "some-name", "reconcileID": "eb171146-8b52-4a52-92b7-cf18b082aa5a", "user": "operator"} 2024-06-10T12:47:05.620Z INFO Password updated {"controller": "pxc-controller", "namespace": "users-31680", "name": "some-name", "reconcileID": "eb171146-8b52-4a52-92b7-cf18b082aa5a", "user": "operator"} 2024-06-10T12:47:05.628Z INFO MySQL init secret updated {"controller": "pxc-controller", "namespace": "users-31680", "name": "some-name", "reconcileID": "eb171146-8b52-4a52-92b7-cf18b082aa5a", "secret": "some-name-mysql-init", "user": "operator"} 2024-06-10T12:47:05.636Z INFO Internal secrets updated {"controller": "pxc-controller", "namespace": "users-31680", "name": "some-name", "reconcileID": "eb171146-8b52-4a52-92b7-cf18b082aa5a", "user": "operator"} 2024-06-10T12:47:05.662Z INFO Old password discarded {"controller": "pxc-controller", "namespace": "users-31680", "name": "some-name", "reconcileID": "eb171146-8b52-4a52-92b7-cf18b082aa5a", "user": "operator"} 2024-06-10T12:47:05.680Z INFO Password changed, updating user {"controller": "pxc-controller", "namespace": "users-31680", "name": "some-name", "reconcileID": "eb171146-8b52-4a52-92b7-cf18b082aa5a", "user": "monitor"} 2024-06-10T12:47:05.709Z INFO Password updated {"controller": "pxc-controller", "namespace": "users-31680", "name": "some-name", "reconcileID": "eb171146-8b52-4a52-92b7-cf18b082aa5a", "user": "monitor"} 2024-06-10T12:47:05.719Z INFO MySQL init secret updated {"controller": "pxc-controller", "namespace": "users-31680", "name": "some-name", "reconcileID": "eb171146-8b52-4a52-92b7-cf18b082aa5a", "secret": "some-name-mysql-init", "user": "monitor"} 2024-06-10T12:47:05.761Z INFO Proxy user updated {"controller": "pxc-controller", "namespace": "users-31680", "name": "some-name", "reconcileID": "eb171146-8b52-4a52-92b7-cf18b082aa5a", "user": "monitor"} 2024-06-10T12:47:05.770Z INFO Internal secrets updated {"controller": "pxc-controller", "namespace": "users-31680", "name": "some-name", "reconcileID": "eb171146-8b52-4a52-92b7-cf18b082aa5a", "user": "monitor"} 2024-06-10T12:47:05.857Z INFO Password changed, updating user {"controller": "pxc-controller", "namespace": "users-31680", "name": "some-name", "reconcileID": "eb171146-8b52-4a52-92b7-cf18b082aa5a", "user": "xtrabackup"} 2024-06-10T12:47:05.884Z INFO Password updated {"controller": "pxc-controller", "namespace": "users-31680", "name": "some-name", "reconcileID": "eb171146-8b52-4a52-92b7-cf18b082aa5a", "user": "xtrabackup"} 2024-06-10T12:47:05.892Z INFO MySQL init secret updated {"controller": "pxc-controller", "namespace": "users-31680", "name": "some-name", "reconcileID": "eb171146-8b52-4a52-92b7-cf18b082aa5a", "secret": "some-name-mysql-init", "user": "xtrabackup"} 2024-06-10T12:47:05.899Z INFO Internal secrets updated {"controller": "pxc-controller", "namespace": "users-31680", "name": "some-name", "reconcileID": "eb171146-8b52-4a52-92b7-cf18b082aa5a", "user": "xtrabackup"} 2024-06-10T12:47:05.928Z INFO Old password discarded {"controller": "pxc-controller", "namespace": "users-31680", "name": "some-name", "reconcileID": "eb171146-8b52-4a52-92b7-cf18b082aa5a", "user": "xtrabackup"} 2024-06-10T12:47:05.943Z INFO Password changed, updating user {"controller": "pxc-controller", "namespace": "users-31680", "name": "some-name", "reconcileID": "eb171146-8b52-4a52-92b7-cf18b082aa5a", "user": "replication"} 2024-06-10T12:47:05.971Z INFO Password updated {"controller": "pxc-controller", "namespace": "users-31680", "name": "some-name", "reconcileID": "eb171146-8b52-4a52-92b7-cf18b082aa5a", "user": "replication"} 2024-06-10T12:47:05.980Z INFO MySQL init secret updated {"controller": "pxc-controller", "namespace": "users-31680", "name": "some-name", "reconcileID": "eb171146-8b52-4a52-92b7-cf18b082aa5a", "secret": "some-name-mysql-init", "user": "replication"} 2024-06-10T12:47:05.989Z INFO Internal secrets updated {"controller": "pxc-controller", "namespace": "users-31680", "name": "some-name", "reconcileID": "eb171146-8b52-4a52-92b7-cf18b082aa5a", "user": "replication"} 2024-06-10T12:47:06.018Z INFO Old password discarded {"controller": "pxc-controller", "namespace": "users-31680", "name": "some-name", "reconcileID": "eb171146-8b52-4a52-92b7-cf18b082aa5a", "user": "replication"} 2024-06-10T12:47:06.018Z INFO Password changed, updating user {"controller": "pxc-controller", "namespace": "users-31680", "name": "some-name", "reconcileID": "eb171146-8b52-4a52-92b7-cf18b082aa5a", "user": "proxyadmin"} 2024-06-10T12:47:06.064Z INFO Proxy user updated {"controller": "pxc-controller", "namespace": "users-31680", "name": "some-name", "reconcileID": "eb171146-8b52-4a52-92b7-cf18b082aa5a", "user": "proxyadmin"} 2024-06-10T12:47:06.073Z INFO Internal secrets updated {"controller": "pxc-controller", "namespace": "users-31680", "name": "some-name", "reconcileID": "eb171146-8b52-4a52-92b7-cf18b082aa5a", "user": "proxyadmin"} 2024-06-10T12:47:06.073Z INFO Proxy pods will be restarted {"controller": "pxc-controller", "namespace": "users-31680", "name": "some-name", "reconcileID": "eb171146-8b52-4a52-92b7-cf18b082aa5a", "last-applied-secret": "4998150a51c8bf6b6e09d574dfec327d8a0502bf0223f6ba3b97b06a2ef063b0"} 2024-06-10T12:47:06.073Z INFO PXC pods will be restarted {"controller": "pxc-controller", "namespace": "users-31680", "name": "some-name", "reconcileID": "eb171146-8b52-4a52-92b7-cf18b082aa5a", "last-applied-secret": "4998150a51c8bf6b6e09d574dfec327d8a0502bf0223f6ba3b97b06a2ef063b0"} 2024-06-10T12:47:06.309Z ERROR sync users {"controller": "pxc-controller", "namespace": "users-31680", "name": "some-name", "reconcileID": "eb171146-8b52-4a52-92b7-cf18b082aa5a", "error": "exec syncusers: command terminated with exit code 1 / / ERROR 1045 (28000): ProxySQL Error: Access denied for user 'proxyadmin'@'127.0.0.1' (using password: YES)\nERROR (line:335) : ProxySQL connection check failed. \n-- Could not connect to ProxySQL at localhost:6032 \n-- Please check the ProxySQL connection parameters and status.\n", "errorVerbose": "exec syncusers: command terminated with exit code 1 / / ERROR 1045 (28000): ProxySQL Error: Access denied for user 'proxyadmin'@'127.0.0.1' (using password: YES)\nERROR (line:335) : ProxySQL connection check failed. \n-- Could not connect to ProxySQL at localhost:6032 \n-- Please check the ProxySQL connection parameters and status.\n\ngithub.com/percona/percona-xtradb-cluster-operator/pkg/controller/pxc.(*ReconcilePerconaXtraDBCluster).syncPXCUsersWithProxySQL\n\t/go/src/github.com/percona/percona-xtradb-cluster-operator/pkg/controller/pxc/users.go:920\ngithub.com/percona/percona-xtradb-cluster-operator/pkg/controller/pxc.(*ReconcilePerconaXtraDBCluster).resyncPXCUsersWithProxySQL.func1\n\t/go/src/github.com/percona/percona-xtradb-cluster-operator/pkg/controller/pxc/controller.go:1246\nruntime.goexit\n\t/usr/local/go/src/runtime/asm_amd64.s:1695"} 2024-06-10T12:47:53.320Z INFO reconcile replication error {"controller": "pxc-controller", "namespace": "users-31680", "name": "some-name", "reconcileID": "d701206f-3a1f-4cd1-b9a1-2d906f3a0122", "err": "failed to ensure cluster readonly status: connect to pod some-name-pxc-1: dial tcp: lookup some-name-pxc-1.some-name-pxc.users-31680 on 10.204.208.10:53: no such host"} 2024-06-10T12:47:58.371Z INFO reconcile replication error {"controller": "pxc-controller", "namespace": "users-31680", "name": "some-name", "reconcileID": "356403a5-e1b2-4f07-a45d-363676c2fd26", "err": "failed to ensure cluster readonly status: connect to pod some-name-pxc-1: dial tcp: lookup some-name-pxc-1.some-name-pxc.users-31680 on 10.204.208.10:53: no such host"} 2024-06-10T12:48:03.608Z INFO reconcile replication error {"controller": "pxc-controller", "namespace": "users-31680", "name": "some-name", "reconcileID": "b30c3e62-8b46-438b-9f79-42fbe1b5e32b", "err": "failed to ensure cluster readonly status: connect to pod some-name-pxc-1: dial tcp: lookup some-name-pxc-1.some-name-pxc.users-31680 on 10.204.208.10:53: no such host"} 2024-06-10T12:48:56.516Z INFO reconcile replication error {"controller": "pxc-controller", "namespace": "users-31680", "name": "some-name", "reconcileID": "124be105-11d2-4d5c-935c-74c554407794", "err": "failed to ensure cluster readonly status: connect to pod some-name-pxc-0: dial tcp: lookup some-name-pxc-0.some-name-pxc.users-31680 on 10.204.208.10:53: no such host"} 2024-06-10T12:49:01.849Z INFO Unable to find primary pod for replication. No pod with name or ip like this {"controller": "pxc-controller", "namespace": "users-31680", "name": "some-name", "reconcileID": "e78b55b5-82c7-4fce-be2d-c0c8650abb9c", "primary name": "some-name-pxc-0.some-name-pxc.users-31680.svc.cluster.local"} 2024-06-10T12:49:17.391Z INFO Unable to find primary pod for replication. No pod with name or ip like this {"controller": "pxc-controller", "namespace": "users-31680", "name": "some-name", "reconcileID": "448c3a5e-7637-416f-8e5b-3944f905779d", "primary name": "some-name-pxc-0.some-name-pxc.users-31680.svc.cluster.local"} 2024-06-10T12:49:28.279Z INFO Password updated but old one not discarded {"controller": "pxc-controller", "namespace": "users-31680", "name": "some-name", "reconcileID": "24126fdd-c37a-4dc9-b28c-d52e1fe52a5c", "user": "monitor"} 2024-06-10T12:49:28.512Z INFO Old password discarded {"controller": "pxc-controller", "namespace": "users-31680", "name": "some-name", "reconcileID": "24126fdd-c37a-4dc9-b28c-d52e1fe52a5c", "user": "monitor"} 2024-06-10T12:49:28.545Z INFO Proxy pods will be restarted {"controller": "pxc-controller", "namespace": "users-31680", "name": "some-name", "reconcileID": "24126fdd-c37a-4dc9-b28c-d52e1fe52a5c", "last-applied-secret": "4998150a51c8bf6b6e09d574dfec327d8a0502bf0223f6ba3b97b06a2ef063b0"} 2024-06-10T12:49:32.129Z DEBUG PXC users synced with ProxySQL {"controller": "pxc-controller", "namespace": "users-31680", "name": "some-name", "reconcileID": "24126fdd-c37a-4dc9-b28c-d52e1fe52a5c"} 2024-06-10T12:49:36.632Z DEBUG PXC users synced with ProxySQL {"controller": "pxc-controller", "namespace": "users-31680", "name": "some-name", "reconcileID": "a295dd1c-f232-4e3d-b086-d8490309ad59"} 2024-06-10T12:49:38.558Z INFO Password changed, updating user {"controller": "pxc-controller", "namespace": "users-31680", "name": "some-name", "reconcileID": "dedb3c53-f6a1-45f9-88d9-e6d2acf54985", "user": "operator"} 2024-06-10T12:49:38.590Z INFO Password updated {"controller": "pxc-controller", "namespace": "users-31680", "name": "some-name", "reconcileID": "dedb3c53-f6a1-45f9-88d9-e6d2acf54985", "user": "operator"} 2024-06-10T12:49:38.600Z INFO MySQL init secret updated {"controller": "pxc-controller", "namespace": "users-31680", "name": "some-name", "reconcileID": "dedb3c53-f6a1-45f9-88d9-e6d2acf54985", "secret": "some-name-mysql-init", "user": "operator"} 2024-06-10T12:49:38.614Z INFO Internal secrets updated {"controller": "pxc-controller", "namespace": "users-31680", "name": "some-name", "reconcileID": "dedb3c53-f6a1-45f9-88d9-e6d2acf54985", "user": "operator"} 2024-06-10T12:49:38.644Z INFO Old password discarded {"controller": "pxc-controller", "namespace": "users-31680", "name": "some-name", "reconcileID": "dedb3c53-f6a1-45f9-88d9-e6d2acf54985", "user": "operator"} 2024-06-10T12:49:38.686Z INFO Proxy pods will be restarted {"controller": "pxc-controller", "namespace": "users-31680", "name": "some-name", "reconcileID": "dedb3c53-f6a1-45f9-88d9-e6d2acf54985", "last-applied-secret": "b40a139ad08e65a4ce108363ea67e627b42c2084b2a4e66dee62fc5ce9ab7a32"} 2024-06-10T12:49:39.983Z ERROR sync users {"controller": "pxc-controller", "namespace": "users-31680", "name": "some-name", "reconcileID": "dedb3c53-f6a1-45f9-88d9-e6d2acf54985", "error": "exec syncusers: command terminated with exit code 1 / / ERROR 1045 (28000): Access denied for user 'operator'@'some-name-proxysql-0.some-name-proxysql-unready.users-31680.svc.' (using password: YES)\nERROR 1045 (28000): Access denied for user 'operator'@'some-name-proxysql-0.some-name-proxysql-unready.users-31680.svc.' (using password: YES)\nERROR 1045 (28000): Access denied for user 'operator'@'some-name-proxysql-0.some-name-proxysql-unready.users-31680.svc.' (using password: YES)\nERROR (line:569) : Could not find a primary cluster node\n", "errorVerbose": "exec syncusers: command terminated with exit code 1 / / ERROR 1045 (28000): Access denied for user 'operator'@'some-name-proxysql-0.some-name-proxysql-unready.users-31680.svc.' (using password: YES)\nERROR 1045 (28000): Access denied for user 'operator'@'some-name-proxysql-0.some-name-proxysql-unready.users-31680.svc.' (using password: YES)\nERROR 1045 (28000): Access denied for user 'operator'@'some-name-proxysql-0.some-name-proxysql-unready.users-31680.svc.' (using password: YES)\nERROR (line:569) : Could not find a primary cluster node\n\ngithub.com/percona/percona-xtradb-cluster-operator/pkg/controller/pxc.(*ReconcilePerconaXtraDBCluster).syncPXCUsersWithProxySQL\n\t/go/src/github.com/percona/percona-xtradb-cluster-operator/pkg/controller/pxc/users.go:920\ngithub.com/percona/percona-xtradb-cluster-operator/pkg/controller/pxc.(*ReconcilePerconaXtraDBCluster).resyncPXCUsersWithProxySQL.func1\n\t/go/src/github.com/percona/percona-xtradb-cluster-operator/pkg/controller/pxc/controller.go:1246\nruntime.goexit\n\t/usr/local/go/src/runtime/asm_amd64.s:1695"} 2024-06-10T12:50:06.461Z DEBUG PXC users synced with ProxySQL {"controller": "pxc-controller", "namespace": "users-31680", "name": "some-name", "reconcileID": "566a73e1-8537-4c4f-bb09-f998936bcbd7"} 2024-06-10T12:50:15.206Z DEBUG PXC users synced with ProxySQL {"controller": "pxc-controller", "namespace": "users-31680", "name": "some-name", "reconcileID": "37cc8f17-4951-4268-9cc5-e229056a1aeb"} 2024-06-10T12:50:20.015Z DEBUG PXC users synced with ProxySQL {"controller": "pxc-controller", "namespace": "users-31680", "name": "some-name", "reconcileID": "adbefa67-2757-4c29-a9e9-1a3e2773a196"} 2024-06-10T12:50:25.403Z DEBUG PXC users synced with ProxySQL {"controller": "pxc-controller", "namespace": "users-31680", "name": "some-name", "reconcileID": "6b94cef1-88e3-4df9-9677-7ecb2e1ec4a1"} 2024-06-10T12:50:30.844Z DEBUG PXC users synced with ProxySQL {"controller": "pxc-controller", "namespace": "users-31680", "name": "some-name", "reconcileID": "de6180c5-0479-41a5-8831-5caedfd4943c"} 2024-06-10T12:50:37.192Z DEBUG PXC users synced with ProxySQL {"controller": "pxc-controller", "namespace": "users-31680", "name": "some-name", "reconcileID": "b5e660b5-57b1-42c3-8af1-69162c3536a0"} 2024-06-10T12:50:41.536Z DEBUG PXC users synced with ProxySQL {"controller": "pxc-controller", "namespace": "users-31680", "name": "some-name", "reconcileID": "eb8e0631-3a2a-4542-9473-0bb46373a090"} 2024-06-10T12:50:47.404Z DEBUG PXC users synced with ProxySQL {"controller": "pxc-controller", "namespace": "users-31680", "name": "some-name", "reconcileID": "0508c843-9c58-404d-a83c-06fb3be8386e"} 2024-06-10T12:50:52.314Z DEBUG PXC users synced with ProxySQL {"controller": "pxc-controller", "namespace": "users-31680", "name": "some-name", "reconcileID": "c005aa76-a4dd-4205-9a86-fed97b427d5c"} 2024-06-10T12:50:57.694Z DEBUG PXC users synced with ProxySQL {"controller": "pxc-controller", "namespace": "users-31680", "name": "some-name", "reconcileID": "0435fd0f-f373-4502-9fe5-ac8952246410"} 2024-06-10T12:51:03.031Z DEBUG PXC users synced with ProxySQL {"controller": "pxc-controller", "namespace": "users-31680", "name": "some-name", "reconcileID": "632c3d86-8958-4312-8f5a-bd88d0d21847"} 2024-06-10T12:51:08.737Z DEBUG PXC users synced with ProxySQL {"controller": "pxc-controller", "namespace": "users-31680", "name": "some-name", "reconcileID": "5fdf517d-fe28-45e3-b262-ff69cd4b6abb"} 2024-06-10T12:51:14.104Z DEBUG PXC users synced with ProxySQL {"controller": "pxc-controller", "namespace": "users-31680", "name": "some-name", "reconcileID": "5b1ee570-dc21-4801-966f-a3f6cf47702b"} 2024-06-10T12:51:19.513Z DEBUG PXC users synced with ProxySQL {"controller": "pxc-controller", "namespace": "users-31680", "name": "some-name", "reconcileID": "fe232063-6088-4add-9325-86550554dd7d"} 2024-06-10T12:51:21.367Z INFO Password changed, updating user {"controller": "pxc-controller", "namespace": "users-31680", "name": "some-name", "reconcileID": "da7278af-c81e-41fd-9742-cccaa7898893", "user": "root"} 2024-06-10T12:51:21.417Z INFO Password updated {"controller": "pxc-controller", "namespace": "users-31680", "name": "some-name", "reconcileID": "da7278af-c81e-41fd-9742-cccaa7898893", "user": "root"} 2024-06-10T12:51:21.424Z INFO MySQL init secret updated {"controller": "pxc-controller", "namespace": "users-31680", "name": "some-name", "reconcileID": "da7278af-c81e-41fd-9742-cccaa7898893", "secret": "some-name-mysql-init", "user": "root"} 2024-06-10T12:51:26.810Z DEBUG PXC users synced with ProxySQL {"controller": "pxc-controller", "namespace": "users-31680", "name": "some-name", "reconcileID": "da7278af-c81e-41fd-9742-cccaa7898893"} 2024-06-10T12:51:26.820Z INFO Internal secrets updated {"controller": "pxc-controller", "namespace": "users-31680", "name": "some-name", "reconcileID": "da7278af-c81e-41fd-9742-cccaa7898893", "user": "root"} 2024-06-10T12:51:26.861Z INFO Old password discarded {"controller": "pxc-controller", "namespace": "users-31680", "name": "some-name", "reconcileID": "da7278af-c81e-41fd-9742-cccaa7898893", "user": "root"} 2024-06-10T12:51:26.890Z INFO Password changed, updating user {"controller": "pxc-controller", "namespace": "users-31680", "name": "some-name", "reconcileID": "da7278af-c81e-41fd-9742-cccaa7898893", "user": "monitor"} 2024-06-10T12:51:26.919Z INFO Password updated {"controller": "pxc-controller", "namespace": "users-31680", "name": "some-name", "reconcileID": "da7278af-c81e-41fd-9742-cccaa7898893", "user": "monitor"} 2024-06-10T12:51:26.927Z INFO MySQL init secret updated {"controller": "pxc-controller", "namespace": "users-31680", "name": "some-name", "reconcileID": "da7278af-c81e-41fd-9742-cccaa7898893", "secret": "some-name-mysql-init", "user": "monitor"} 2024-06-10T12:51:26.972Z INFO Proxy user updated {"controller": "pxc-controller", "namespace": "users-31680", "name": "some-name", "reconcileID": "da7278af-c81e-41fd-9742-cccaa7898893", "user": "monitor"} 2024-06-10T12:51:26.983Z INFO Internal secrets updated {"controller": "pxc-controller", "namespace": "users-31680", "name": "some-name", "reconcileID": "da7278af-c81e-41fd-9742-cccaa7898893", "user": "monitor"} 2024-06-10T12:51:27.063Z INFO Password changed, updating user {"controller": "pxc-controller", "namespace": "users-31680", "name": "some-name", "reconcileID": "da7278af-c81e-41fd-9742-cccaa7898893", "user": "xtrabackup"} 2024-06-10T12:51:27.092Z INFO Password updated {"controller": "pxc-controller", "namespace": "users-31680", "name": "some-name", "reconcileID": "da7278af-c81e-41fd-9742-cccaa7898893", "user": "xtrabackup"} 2024-06-10T12:51:27.100Z INFO MySQL init secret updated {"controller": "pxc-controller", "namespace": "users-31680", "name": "some-name", "reconcileID": "da7278af-c81e-41fd-9742-cccaa7898893", "secret": "some-name-mysql-init", "user": "xtrabackup"} 2024-06-10T12:51:27.109Z INFO Internal secrets updated {"controller": "pxc-controller", "namespace": "users-31680", "name": "some-name", "reconcileID": "da7278af-c81e-41fd-9742-cccaa7898893", "user": "xtrabackup"} 2024-06-10T12:51:27.138Z INFO Old password discarded {"controller": "pxc-controller", "namespace": "users-31680", "name": "some-name", "reconcileID": "da7278af-c81e-41fd-9742-cccaa7898893", "user": "xtrabackup"} 2024-06-10T12:51:27.151Z INFO Password changed, updating user {"controller": "pxc-controller", "namespace": "users-31680", "name": "some-name", "reconcileID": "da7278af-c81e-41fd-9742-cccaa7898893", "user": "proxyadmin"} 2024-06-10T12:51:27.199Z INFO Proxy user updated {"controller": "pxc-controller", "namespace": "users-31680", "name": "some-name", "reconcileID": "da7278af-c81e-41fd-9742-cccaa7898893", "user": "proxyadmin"} 2024-06-10T12:51:27.208Z INFO Internal secrets updated {"controller": "pxc-controller", "namespace": "users-31680", "name": "some-name", "reconcileID": "da7278af-c81e-41fd-9742-cccaa7898893", "user": "proxyadmin"} 2024-06-10T12:51:27.208Z INFO Proxy pods will be restarted {"controller": "pxc-controller", "namespace": "users-31680", "name": "some-name", "reconcileID": "da7278af-c81e-41fd-9742-cccaa7898893", "last-applied-secret": "07cd10be8ebfe46a89aa44f3ecf778b448f3c7875e6beaa5d70280daa553745b"} 2024-06-10T12:51:27.208Z INFO PXC pods will be restarted {"controller": "pxc-controller", "namespace": "users-31680", "name": "some-name", "reconcileID": "da7278af-c81e-41fd-9742-cccaa7898893", "last-applied-secret": "07cd10be8ebfe46a89aa44f3ecf778b448f3c7875e6beaa5d70280daa553745b"} 2024-06-10T12:51:27.489Z ERROR sync users {"controller": "pxc-controller", "namespace": "users-31680", "name": "some-name", "reconcileID": "da7278af-c81e-41fd-9742-cccaa7898893", "error": "exec syncusers: command terminated with exit code 1 / / ERROR 1045 (28000): ProxySQL Error: Access denied for user 'proxyadmin'@'127.0.0.1' (using password: YES)\nERROR (line:335) : ProxySQL connection check failed. \n-- Could not connect to ProxySQL at localhost:6032 \n-- Please check the ProxySQL connection parameters and status.\n", "errorVerbose": "exec syncusers: command terminated with exit code 1 / / ERROR 1045 (28000): ProxySQL Error: Access denied for user 'proxyadmin'@'127.0.0.1' (using password: YES)\nERROR (line:335) : ProxySQL connection check failed. \n-- Could not connect to ProxySQL at localhost:6032 \n-- Please check the ProxySQL connection parameters and status.\n\ngithub.com/percona/percona-xtradb-cluster-operator/pkg/controller/pxc.(*ReconcilePerconaXtraDBCluster).syncPXCUsersWithProxySQL\n\t/go/src/github.com/percona/percona-xtradb-cluster-operator/pkg/controller/pxc/users.go:920\ngithub.com/percona/percona-xtradb-cluster-operator/pkg/controller/pxc.(*ReconcilePerconaXtraDBCluster).resyncPXCUsersWithProxySQL.func1\n\t/go/src/github.com/percona/percona-xtradb-cluster-operator/pkg/controller/pxc/controller.go:1246\nruntime.goexit\n\t/usr/local/go/src/runtime/asm_amd64.s:1695"} 2024-06-10T12:52:14.826Z INFO reconcile replication error {"controller": "pxc-controller", "namespace": "users-31680", "name": "some-name", "reconcileID": "9f55803d-812c-436f-b0b3-24593d140085", "err": "failed to ensure cluster readonly status: connect to pod some-name-pxc-1: dial tcp: lookup some-name-pxc-1.some-name-pxc.users-31680 on 10.204.208.10:53: no such host"} 2024-06-10T12:52:25.129Z INFO reconcile replication error {"controller": "pxc-controller", "namespace": "users-31680", "name": "some-name", "reconcileID": "8e878fb6-7bea-48c4-b382-f664dc4541aa", "err": "failed to ensure cluster readonly status: connect to pod some-name-pxc-1: dial tcp: lookup some-name-pxc-1.some-name-pxc.users-31680 on 10.204.208.10:53: no such host"} 2024-06-10T12:53:17.351Z INFO reconcile replication error {"controller": "pxc-controller", "namespace": "users-31680", "name": "some-name", "reconcileID": "eea604a6-77d9-4574-8f13-89a6d5775c20", "err": "failed to connect to pod some-name-pxc-0: dial tcp: lookup some-name-pxc-0.some-name-pxc.users-31680 on 10.204.208.10:53: no such host"} 2024-06-10T12:53:17.562Z INFO reconcile replication error {"controller": "pxc-controller", "namespace": "users-31680", "name": "some-name", "reconcileID": "93172c1b-625c-4061-a31a-611cb6625a58", "err": "remove outdated replication channels: get current replication channels: select current replication channels: Error 1047 (08S01): WSREP has not yet prepared node for application use"} 2024-06-10T12:53:22.628Z INFO Unable to find primary pod for replication. No pod with name or ip like this {"controller": "pxc-controller", "namespace": "users-31680", "name": "some-name", "reconcileID": "7995f151-82eb-47ea-9d02-8c43ba350959", "primary name": "some-name-pxc-0.some-name-pxc.users-31680.svc.cluster.local"} 2024-06-10T12:53:22.896Z INFO Unable to find primary pod for replication. No pod with name or ip like this {"controller": "pxc-controller", "namespace": "users-31680", "name": "some-name", "reconcileID": "1d2e3448-2124-4a99-a4ba-7128761819bf", "primary name": "some-name-pxc-0.some-name-pxc.users-31680.svc.cluster.local"} 2024-06-10T12:53:23.044Z INFO Unable to find primary pod for replication. No pod with name or ip like this {"controller": "pxc-controller", "namespace": "users-31680", "name": "some-name", "reconcileID": "ad9d9c5c-2233-4b29-bb79-34df890bfe32", "primary name": "some-name-pxc-0.some-name-pxc.users-31680.svc.cluster.local"} 2024-06-10T12:53:27.841Z INFO Unable to find primary pod for replication. No pod with name or ip like this {"controller": "pxc-controller", "namespace": "users-31680", "name": "some-name", "reconcileID": "d960a06d-eef1-4daa-b7b4-96c315d7645c", "primary name": "some-name-pxc-0.some-name-pxc.users-31680.svc.cluster.local"} 2024-06-10T12:53:33.026Z INFO Unable to find primary pod for replication. No pod with name or ip like this {"controller": "pxc-controller", "namespace": "users-31680", "name": "some-name", "reconcileID": "8ba6a517-5894-407b-ac05-0c62411d2f2a", "primary name": "some-name-pxc-0.some-name-pxc.users-31680.svc.cluster.local"} 2024-06-10T12:53:38.173Z INFO Unable to find primary pod for replication. No pod with name or ip like this {"controller": "pxc-controller", "namespace": "users-31680", "name": "some-name", "reconcileID": "94d6601c-674e-45d8-8499-fc46c9a73b92", "primary name": "some-name-pxc-0.some-name-pxc.users-31680.svc.cluster.local"} 2024-06-10T12:53:43.319Z INFO Unable to find primary pod for replication. No pod with name or ip like this {"controller": "pxc-controller", "namespace": "users-31680", "name": "some-name", "reconcileID": "f1fe059e-d959-43e5-b11c-8be9d6bb0cd5", "primary name": "some-name-pxc-0.some-name-pxc.users-31680.svc.cluster.local"} 2024-06-10T12:53:48.516Z INFO Unable to find primary pod for replication. No pod with name or ip like this {"controller": "pxc-controller", "namespace": "users-31680", "name": "some-name", "reconcileID": "79f4aa99-d135-4c9b-b2b2-05d488e97f3e", "primary name": "some-name-pxc-0.some-name-pxc.users-31680.svc.cluster.local"} 2024-06-10T12:53:51.087Z INFO KubeAPIWarningLogger .metadata.ownerReferences contains duplicate entries; API server dedups owner references in 1.20+, and may reject such requests as early as 1.24; please fix your requests; duplicate UID(s) observed: 5f205191-1f0f-4bc2-ab96-133595b0b745 2024-06-10T12:53:54.245Z INFO reconcile replication error {"controller": "pxc-controller", "namespace": "users-31680", "name": "some-name", "reconcileID": "54e7e1ba-2cbc-44c8-a5ca-50c8fe8aef62", "err": "get primary pxc pod: failed to get proxy connection: dial tcp 10.204.221.153:3306: connect: connection refused"} 2024-06-10T12:56:21.267Z INFO Password changed, updating user {"controller": "pxc-controller", "namespace": "users-31680", "name": "some-name", "reconcileID": "5c41a491-4755-4886-ba07-e0af81f097cc", "user": "root"} 2024-06-10T12:56:21.306Z INFO Password updated {"controller": "pxc-controller", "namespace": "users-31680", "name": "some-name", "reconcileID": "5c41a491-4755-4886-ba07-e0af81f097cc", "user": "root"} 2024-06-10T12:56:21.313Z INFO MySQL init secret updated {"controller": "pxc-controller", "namespace": "users-31680", "name": "some-name", "reconcileID": "5c41a491-4755-4886-ba07-e0af81f097cc", "secret": "some-name-mysql-init", "user": "root"} 2024-06-10T12:56:21.323Z INFO Internal secrets updated {"controller": "pxc-controller", "namespace": "users-31680", "name": "some-name", "reconcileID": "5c41a491-4755-4886-ba07-e0af81f097cc", "user": "root"} 2024-06-10T12:56:21.360Z INFO Old password discarded {"controller": "pxc-controller", "namespace": "users-31680", "name": "some-name", "reconcileID": "5c41a491-4755-4886-ba07-e0af81f097cc", "user": "root"} 2024-06-10T12:56:21.374Z INFO Password changed, updating user {"controller": "pxc-controller", "namespace": "users-31680", "name": "some-name", "reconcileID": "5c41a491-4755-4886-ba07-e0af81f097cc", "user": "operator"} 2024-06-10T12:56:21.406Z INFO Password updated {"controller": "pxc-controller", "namespace": "users-31680", "name": "some-name", "reconcileID": "5c41a491-4755-4886-ba07-e0af81f097cc", "user": "operator"} 2024-06-10T12:56:21.416Z INFO MySQL init secret updated {"controller": "pxc-controller", "namespace": "users-31680", "name": "some-name", "reconcileID": "5c41a491-4755-4886-ba07-e0af81f097cc", "secret": "some-name-mysql-init", "user": "operator"} 2024-06-10T12:56:21.427Z INFO Internal secrets updated {"controller": "pxc-controller", "namespace": "users-31680", "name": "some-name", "reconcileID": "5c41a491-4755-4886-ba07-e0af81f097cc", "user": "operator"} 2024-06-10T12:56:21.454Z INFO Old password discarded {"controller": "pxc-controller", "namespace": "users-31680", "name": "some-name", "reconcileID": "5c41a491-4755-4886-ba07-e0af81f097cc", "user": "operator"} 2024-06-10T12:56:21.470Z INFO Password changed, updating user {"controller": "pxc-controller", "namespace": "users-31680", "name": "some-name", "reconcileID": "5c41a491-4755-4886-ba07-e0af81f097cc", "user": "monitor"} 2024-06-10T12:56:21.501Z INFO Password updated {"controller": "pxc-controller", "namespace": "users-31680", "name": "some-name", "reconcileID": "5c41a491-4755-4886-ba07-e0af81f097cc", "user": "monitor"} 2024-06-10T12:56:21.510Z INFO MySQL init secret updated {"controller": "pxc-controller", "namespace": "users-31680", "name": "some-name", "reconcileID": "5c41a491-4755-4886-ba07-e0af81f097cc", "secret": "some-name-mysql-init", "user": "monitor"} 2024-06-10T12:56:21.518Z INFO Internal secrets updated {"controller": "pxc-controller", "namespace": "users-31680", "name": "some-name", "reconcileID": "5c41a491-4755-4886-ba07-e0af81f097cc", "user": "monitor"} 2024-06-10T12:56:21.616Z INFO Password changed, updating user {"controller": "pxc-controller", "namespace": "users-31680", "name": "some-name", "reconcileID": "5c41a491-4755-4886-ba07-e0af81f097cc", "user": "xtrabackup"} 2024-06-10T12:56:21.642Z INFO Password updated {"controller": "pxc-controller", "namespace": "users-31680", "name": "some-name", "reconcileID": "5c41a491-4755-4886-ba07-e0af81f097cc", "user": "xtrabackup"} 2024-06-10T12:56:21.652Z INFO MySQL init secret updated {"controller": "pxc-controller", "namespace": "users-31680", "name": "some-name", "reconcileID": "5c41a491-4755-4886-ba07-e0af81f097cc", "secret": "some-name-mysql-init", "user": "xtrabackup"} 2024-06-10T12:56:21.661Z INFO Internal secrets updated {"controller": "pxc-controller", "namespace": "users-31680", "name": "some-name", "reconcileID": "5c41a491-4755-4886-ba07-e0af81f097cc", "user": "xtrabackup"} 2024-06-10T12:56:21.687Z INFO Old password discarded {"controller": "pxc-controller", "namespace": "users-31680", "name": "some-name", "reconcileID": "5c41a491-4755-4886-ba07-e0af81f097cc", "user": "xtrabackup"} 2024-06-10T12:56:21.704Z INFO Password changed, updating user {"controller": "pxc-controller", "namespace": "users-31680", "name": "some-name", "reconcileID": "5c41a491-4755-4886-ba07-e0af81f097cc", "user": "replication"} 2024-06-10T12:56:21.734Z INFO Password updated {"controller": "pxc-controller", "namespace": "users-31680", "name": "some-name", "reconcileID": "5c41a491-4755-4886-ba07-e0af81f097cc", "user": "replication"} 2024-06-10T12:56:21.742Z INFO MySQL init secret updated {"controller": "pxc-controller", "namespace": "users-31680", "name": "some-name", "reconcileID": "5c41a491-4755-4886-ba07-e0af81f097cc", "secret": "some-name-mysql-init", "user": "replication"} 2024-06-10T12:56:21.749Z INFO Internal secrets updated {"controller": "pxc-controller", "namespace": "users-31680", "name": "some-name", "reconcileID": "5c41a491-4755-4886-ba07-e0af81f097cc", "user": "replication"} 2024-06-10T12:56:21.778Z INFO Old password discarded {"controller": "pxc-controller", "namespace": "users-31680", "name": "some-name", "reconcileID": "5c41a491-4755-4886-ba07-e0af81f097cc", "user": "replication"} 2024-06-10T12:56:21.778Z INFO Proxy pods will be restarted {"controller": "pxc-controller", "namespace": "users-31680", "name": "some-name", "reconcileID": "5c41a491-4755-4886-ba07-e0af81f097cc", "last-applied-secret": "6542c6f8a2375ddfcda319bf91c99ca69519730971c577cd07366e356671ee68"} 2024-06-10T12:56:21.778Z INFO PXC pods will be restarted {"controller": "pxc-controller", "namespace": "users-31680", "name": "some-name", "reconcileID": "5c41a491-4755-4886-ba07-e0af81f097cc", "last-applied-secret": "6542c6f8a2375ddfcda319bf91c99ca69519730971c577cd07366e356671ee68"} 2024-06-10T12:57:13.107Z INFO reconcile replication error {"controller": "pxc-controller", "namespace": "users-31680", "name": "some-name", "reconcileID": "6da13ace-c349-4d19-a777-22374d7b1c35", "err": "failed to ensure cluster readonly status: connect to pod some-name-pxc-1: dial tcp: lookup some-name-pxc-1.some-name-pxc.users-31680 on 10.204.208.10:53: no such host"} 2024-06-10T12:57:18.362Z INFO reconcile replication error {"controller": "pxc-controller", "namespace": "users-31680", "name": "some-name", "reconcileID": "56f73055-781b-44a7-853e-1cfc50ef66dd", "err": "failed to ensure cluster readonly status: connect to pod some-name-pxc-1: dial tcp: lookup some-name-pxc-1.some-name-pxc.users-31680 on 10.204.208.10:53: no such host"} 2024-06-10T12:58:37.785Z INFO Password updated but old one not discarded {"controller": "pxc-controller", "namespace": "users-31680", "name": "some-name", "reconcileID": "4678f4b2-cf48-4a8b-aaf5-97817d840cec", "user": "monitor"} 2024-06-10T12:58:38.100Z INFO Old password discarded {"controller": "pxc-controller", "namespace": "users-31680", "name": "some-name", "reconcileID": "4678f4b2-cf48-4a8b-aaf5-97817d840cec", "user": "monitor"} 2024-06-10T12:58:38.121Z INFO Proxy pods will be restarted {"controller": "pxc-controller", "namespace": "users-31680", "name": "some-name", "reconcileID": "4678f4b2-cf48-4a8b-aaf5-97817d840cec", "last-applied-secret": "6542c6f8a2375ddfcda319bf91c99ca69519730971c577cd07366e356671ee68"} 2024-06-10T12:58:48.197Z INFO Password changed, updating user {"controller": "pxc-controller", "namespace": "users-31680", "name": "some-name", "reconcileID": "967b4fa9-42f7-459e-a1d7-fe417db2fb6e", "user": "monitor"} 2024-06-10T12:58:48.224Z INFO Password updated {"controller": "pxc-controller", "namespace": "users-31680", "name": "some-name", "reconcileID": "967b4fa9-42f7-459e-a1d7-fe417db2fb6e", "user": "monitor"} 2024-06-10T12:58:48.233Z INFO MySQL init secret updated {"controller": "pxc-controller", "namespace": "users-31680", "name": "some-name", "reconcileID": "967b4fa9-42f7-459e-a1d7-fe417db2fb6e", "secret": "some-name-mysql-init", "user": "monitor"} 2024-06-10T12:58:48.240Z INFO Internal secrets updated {"controller": "pxc-controller", "namespace": "users-31680", "name": "some-name", "reconcileID": "967b4fa9-42f7-459e-a1d7-fe417db2fb6e", "user": "monitor"} 2024-06-10T12:58:48.354Z INFO Proxy pods will be restarted {"controller": "pxc-controller", "namespace": "users-31680", "name": "some-name", "reconcileID": "967b4fa9-42f7-459e-a1d7-fe417db2fb6e", "last-applied-secret": "81935ecf30ae523a6ee69582354f9488d47dbeb0f8c7c572af96870f2024284d"} 2024-06-10T12:59:53.809Z INFO Password updated but old one not discarded {"controller": "pxc-controller", "namespace": "users-31680", "name": "some-name", "reconcileID": "2f63e917-7ea1-43be-b76e-afefc5a4fda4", "user": "monitor"} 2024-06-10T12:59:54.073Z INFO Old password discarded {"controller": "pxc-controller", "namespace": "users-31680", "name": "some-name", "reconcileID": "2f63e917-7ea1-43be-b76e-afefc5a4fda4", "user": "monitor"} 2024-06-10T12:59:54.103Z INFO Proxy pods will be restarted {"controller": "pxc-controller", "namespace": "users-31680", "name": "some-name", "reconcileID": "2f63e917-7ea1-43be-b76e-afefc5a4fda4", "last-applied-secret": "81935ecf30ae523a6ee69582354f9488d47dbeb0f8c7c572af96870f2024284d"} github.com/percona/percona-xtradb-cluster-operator/pkg/controller/pxc.(*ReconcilePerconaXtraDBCluster).resyncPXCUsersWithProxySQL.func1 /go/pkg/mod/sigs.k8s.io/controller-runtime@v0.18.0/pkg/internal/controller/controller.go:222 /go/pkg/mod/sigs.k8s.io/controller-runtime@v0.18.0/pkg/internal/controller/controller.go:261 /go/pkg/mod/sigs.k8s.io/controller-runtime@v0.18.0/pkg/internal/controller/controller.go:324 /go/src/github.com/percona/percona-xtradb-cluster-operator/pkg/controller/pxc/controller.go:1248 [mysql] 2024/06/10 12:55:59 packets.go:37: read tcp 10.3.186.4:49642->10.204.221.153:3306: i/o timeout sigs.k8s.io/controller-runtime/pkg/internal/controller.(*Controller).processNextWorkItem sigs.k8s.io/controller-runtime/pkg/internal/controller.(*Controller).reconcileHandler sigs.k8s.io/controller-runtime/pkg/internal/controller.(*Controller).Start.func2.2 + grep -v NAMESPACE + kubectl get pxc --all-namespaces -o wide + xargs -L 1 sh -xc 'kubectl patch pxc -n $0 $1 --type=merge -p "{\"metadata\":{\"finalizers\":[]}}"' + kubectl patch pxc -n users-31680 some-name --type=merge -p '{"metadata":{"finalizers":[]}}' perconaxtradbcluster.pxc.percona.com/some-name patched + kubectl_bin delete pxc --all --all-namespaces ++ mktemp + local LAST_OUT=/tmp/tmp.X9cJZoY6V2 ++ mktemp + local LAST_ERR=/tmp/tmp.LasmzUMEPO + local exit_status=0 ++ seq 0 2 + for i in '$(seq 0 2)' + set +e + kubectl delete pxc --all --all-namespaces + exit_status=0 + set -e + '[' 0 '!=' 0 ']' + break + cat /tmp/tmp.X9cJZoY6V2 perconaxtradbcluster.pxc.percona.com "some-name" deleted + cat /tmp/tmp.LasmzUMEPO + rm /tmp/tmp.X9cJZoY6V2 /tmp/tmp.LasmzUMEPO + return 0 + kubectl_bin delete pxc-backup --all --all-namespaces ++ mktemp + local LAST_OUT=/tmp/tmp.7JiOl3uuQx ++ mktemp + local LAST_ERR=/tmp/tmp.K0RO0zKjK5 + local exit_status=0 ++ seq 0 2 + for i in '$(seq 0 2)' + set +e + kubectl delete pxc-backup --all --all-namespaces + exit_status=0 + set -e + '[' 0 '!=' 0 ']' + break + cat /tmp/tmp.7JiOl3uuQx No resources found + cat /tmp/tmp.K0RO0zKjK5 + rm /tmp/tmp.7JiOl3uuQx /tmp/tmp.K0RO0zKjK5 + return 0 + kubectl_bin delete pxc-restore --all --all-namespaces ++ mktemp + local LAST_OUT=/tmp/tmp.M5oBoV71bt ++ mktemp + local LAST_ERR=/tmp/tmp.DSH0I07bV8 + local exit_status=0 ++ seq 0 2 + for i in '$(seq 0 2)' + set +e + kubectl delete pxc-restore --all --all-namespaces + exit_status=0 + set -e + '[' 0 '!=' 0 ']' + break + cat /tmp/tmp.M5oBoV71bt No resources found + cat /tmp/tmp.DSH0I07bV8 + rm /tmp/tmp.M5oBoV71bt /tmp/tmp.DSH0I07bV8 + return 0 + kubectl_bin delete ValidatingWebhookConfiguration percona-xtradbcluster-webhook ++ mktemp + local LAST_OUT=/tmp/tmp.JJQjGuNVz6 ++ mktemp + local LAST_ERR=/tmp/tmp.yyqlzblEMA + local exit_status=0 ++ seq 0 2 + for i in '$(seq 0 2)' + set +e + kubectl delete ValidatingWebhookConfiguration percona-xtradbcluster-webhook + exit_status=0 + set -e + '[' 0 '!=' 0 ']' + break + cat /tmp/tmp.JJQjGuNVz6 validatingwebhookconfiguration.admissionregistration.k8s.io "percona-xtradbcluster-webhook" deleted + cat /tmp/tmp.yyqlzblEMA + rm /tmp/tmp.JJQjGuNVz6 /tmp/tmp.yyqlzblEMA + return 0 + kubectl_bin delete -f https://github.com/jetstack/cert-manager/releases/download/v1.14.2/cert-manager.yaml + : + '[' '!' -z '' ']' + '[' -n pxc-operator ']' + kubectl_bin delete --grace-period=0 --force=true namespace users-31680 + rm -rf /tmp/tmp.k6orBcxvpD + kubectl_bin delete --grace-period=0 --force=true namespace pxc-operator ++ mktemp ++ mktemp + desc 'test passed' + set +o xtrace ----------------------------------------------------------------------------------- test passed ----------------------------------------------------------------------------------- + local LAST_OUT=/tmp/tmp.gOhOA4CZWa + local LAST_OUT=/tmp/tmp.2uM55dceh2 ++ mktemp ++ mktemp + local LAST_ERR=/tmp/tmp.RffLfj3WCp + local exit_status=0 + local LAST_ERR=/tmp/tmp.DRueGJyjLQ + local exit_status=0 ++ seq 0 2 ++ seq 0 2 + for i in '$(seq 0 2)' + set +e + kubectl delete --grace-period=0 --force=true namespace users-31680 + for i in '$(seq 0 2)' + set +e + kubectl delete --grace-period=0 --force=true namespace pxc-operator