Log: /mnt/jenkins/workspace/cloud-pxc-operator_PR-1736/e2e-tests/logs/users-5-7.log WARNING: version difference between client (1.30) and server (1.26) exceeds the supported minor version skew of +/-1 WARNING: version difference between client (1.30) and server (1.26) exceeds the supported minor version skew of +/-1 + create_infra users-29349 + local ns=users-29349 + '[' -n pxc-operator ']' + kubectl get pxc --all-namespaces -o wide + grep -v NAMESPACE + xargs -L 1 sh -xc 'kubectl patch pxc -n $0 $1 --type=merge -p "{\"metadata\":{\"finalizers\":[]}}"' + kubectl patch pxc -n users-10621 some-name --type=merge -p '{"metadata":{"finalizers":[]}}' perconaxtradbcluster.pxc.percona.com/some-name patched + kubectl_bin delete pxc --all --all-namespaces ++ mktemp + local LAST_OUT=/tmp/tmp.n3umGFMj9f ++ mktemp + local LAST_ERR=/tmp/tmp.7MkD0dslHl + local exit_status=0 ++ seq 0 2 + for i in '$(seq 0 2)' + set +e + kubectl delete pxc --all --all-namespaces + exit_status=0 + set -e + '[' 0 '!=' 0 ']' + break + cat /tmp/tmp.n3umGFMj9f perconaxtradbcluster.pxc.percona.com "some-name" deleted + cat /tmp/tmp.7MkD0dslHl + rm /tmp/tmp.n3umGFMj9f /tmp/tmp.7MkD0dslHl + return 0 + kubectl_bin delete pxc-backup --all --all-namespaces ++ mktemp + local LAST_OUT=/tmp/tmp.kIo9AOrc3u ++ mktemp + local LAST_ERR=/tmp/tmp.2PMTz8ER4d + local exit_status=0 ++ seq 0 2 + for i in '$(seq 0 2)' + set +e + kubectl delete pxc-backup --all --all-namespaces + exit_status=0 + set -e + '[' 0 '!=' 0 ']' + break + cat /tmp/tmp.kIo9AOrc3u No resources found + cat /tmp/tmp.2PMTz8ER4d + rm /tmp/tmp.kIo9AOrc3u /tmp/tmp.2PMTz8ER4d + return 0 + kubectl_bin delete pxc-restore --all --all-namespaces ++ mktemp + local LAST_OUT=/tmp/tmp.PdbicPeNjK ++ mktemp + local LAST_ERR=/tmp/tmp.o1h3I78DGd + local exit_status=0 ++ seq 0 2 + for i in '$(seq 0 2)' + set +e + kubectl delete pxc-restore --all --all-namespaces + exit_status=0 + set -e + '[' 0 '!=' 0 ']' + break + cat /tmp/tmp.PdbicPeNjK No resources found + cat /tmp/tmp.o1h3I78DGd + rm /tmp/tmp.PdbicPeNjK /tmp/tmp.o1h3I78DGd + return 0 + create_namespace pxc-operator + local namespace=pxc-operator + local skip_clean_namespace= + [[ 1 == 1 ]] + [[ -z '' ]] + destroy_chaos_mesh ++ helm list --all-namespaces --filter chaos-mesh ++ tail -n1 ++ sed s/NAMESPACE// ++ awk '-F ' '{print $2}' + local chaos_mesh_ns= + '[' -n '' ']' ++ kubectl get MutatingWebhookConfiguration ++ grep chaos-mesh ++ awk '{print $1}' + timeout 30 kubectl delete MutatingWebhookConfiguration error: resource(s) were provided, but no name was specified + : ++ kubectl get ValidatingWebhookConfiguration ++ grep chaos-mesh ++ awk '{print $1}' + timeout 30 kubectl delete ValidatingWebhookConfiguration error: resource(s) were provided, but no name was specified + : ++ kubectl get ValidatingWebhookConfiguration ++ grep validate-auth ++ awk '{print $1}' + timeout 30 kubectl delete ValidatingWebhookConfiguration error: resource(s) were provided, but no name was specified + : ++ kubectl api-resources ++ grep chaos-mesh ++ awk '{print $1}' ++ kubectl get crd ++ awk '{print $1}' ++ grep chaos-mesh.org + timeout 30 kubectl delete crd error: resource(s) were provided, but no name was specified + : ++ kubectl get clusterrolebinding ++ grep chaos-mesh ++ awk '{print $1}' + timeout 30 kubectl delete clusterrolebinding error: resource(s) were provided, but no name was specified + : ++ kubectl get clusterrole ++ grep chaos-mesh ++ awk '{print $1}' + timeout 30 kubectl delete clusterrole error: resource(s) were provided, but no name was specified + : + desc 'cleaned up all old namespaces' + set +o xtrace ----------------------------------------------------------------------------------- cleaned up all old namespaces ----------------------------------------------------------------------------------- + egrep -v '^kube-|^default$|Terminating|pxc-operator|openshift|^NAME' + awk '{print$1}' + '[' -n '' ']' + desc 'cleaned up old namespaces pxc-operator' + set +o xtrace + xargs kubectl delete ns ----------------------------------------------------------------------------------- cleaned up old namespaces pxc-operator ----------------------------------------------------------------------------------- + kubectl_bin delete namespace pxc-operator ++ mktemp + kubectl_bin get ns + local LAST_OUT=/tmp/tmp.8zpjTTlg3R ++ mktemp ++ mktemp + local LAST_ERR=/tmp/tmp.6yZwZDeAii + local LAST_OUT=/tmp/tmp.SyYAVvjPrZ + local exit_status=0 ++ mktemp ++ seq 0 2 + for i in '$(seq 0 2)' + set +e + kubectl delete namespace pxc-operator + local LAST_ERR=/tmp/tmp.vTzWEqnsF9 + local exit_status=0 ++ seq 0 2 + for i in '$(seq 0 2)' + set +e + kubectl get ns + exit_status=0 + set -e + '[' 0 '!=' 0 ']' + break + cat /tmp/tmp.SyYAVvjPrZ + cat /tmp/tmp.vTzWEqnsF9 + rm /tmp/tmp.SyYAVvjPrZ /tmp/tmp.vTzWEqnsF9 + return 0 namespace "users-10621" deleted Error from server (Forbidden): namespaces "default" is forbidden: this namespace may not be deleted + exit_status=0 + set -e + '[' 0 '!=' 0 ']' + break + cat /tmp/tmp.8zpjTTlg3R namespace "pxc-operator" deleted + cat /tmp/tmp.6yZwZDeAii + rm /tmp/tmp.8zpjTTlg3R /tmp/tmp.6yZwZDeAii + return 0 + wait_for_delete namespace/pxc-operator + local res=namespace/pxc-operator + echo -n 'namespace/pxc-operator - ' namespace/pxc-operator - + set +o xtrace Error from server (NotFound): namespaces "pxc-operator" not found + desc 'create namespace pxc-operator' + set +o xtrace ----------------------------------------------------------------------------------- create namespace pxc-operator ----------------------------------------------------------------------------------- + kubectl_bin create namespace pxc-operator ++ mktemp + local LAST_OUT=/tmp/tmp.1TtW3D9em4 ++ mktemp + local LAST_ERR=/tmp/tmp.rLCsNRXd0V + local exit_status=0 ++ seq 0 2 + for i in '$(seq 0 2)' + set +e + kubectl create namespace pxc-operator + exit_status=0 + set -e + '[' 0 '!=' 0 ']' + break + cat /tmp/tmp.1TtW3D9em4 namespace/pxc-operator created + cat /tmp/tmp.rLCsNRXd0V + rm /tmp/tmp.1TtW3D9em4 /tmp/tmp.rLCsNRXd0V + return 0 ++ kubectl_bin config current-context +++ mktemp ++ local LAST_OUT=/tmp/tmp.ezOuUWenPt +++ mktemp ++ local LAST_ERR=/tmp/tmp.z2VRnYOnnH ++ local exit_status=0 +++ seq 0 2 ++ for i in '$(seq 0 2)' ++ set +e ++ kubectl config current-context ++ exit_status=0 ++ set -e ++ '[' 0 '!=' 0 ']' ++ break ++ cat /tmp/tmp.ezOuUWenPt ++ cat /tmp/tmp.z2VRnYOnnH ++ rm /tmp/tmp.ezOuUWenPt /tmp/tmp.z2VRnYOnnH ++ return 0 + kubectl_bin config set-context gke_cloud-dev-112233_us-central1-a_jen-pxc-1736-1898f674-3-cluster1 --namespace=pxc-operator ++ mktemp + local LAST_OUT=/tmp/tmp.LK5LbSLomK ++ mktemp + local LAST_ERR=/tmp/tmp.XxIMdZ0tsB + local exit_status=0 ++ seq 0 2 + for i in '$(seq 0 2)' + set +e + kubectl config set-context gke_cloud-dev-112233_us-central1-a_jen-pxc-1736-1898f674-3-cluster1 --namespace=pxc-operator + exit_status=0 + set -e + '[' 0 '!=' 0 ']' + break + cat /tmp/tmp.LK5LbSLomK Context "gke_cloud-dev-112233_us-central1-a_jen-pxc-1736-1898f674-3-cluster1" modified. + cat /tmp/tmp.XxIMdZ0tsB + rm /tmp/tmp.LK5LbSLomK /tmp/tmp.XxIMdZ0tsB + return 0 + deploy_operator + desc 'start PXC operator' + set +o xtrace ----------------------------------------------------------------------------------- start PXC operator ----------------------------------------------------------------------------------- + kubectl_bin apply --server-side --force-conflicts -f /mnt/jenkins/workspace/cloud-pxc-operator_PR-1736/deploy/crd.yaml ++ mktemp + local LAST_OUT=/tmp/tmp.XfRMWaViOh ++ mktemp + local LAST_ERR=/tmp/tmp.IgdeiDmrep + local exit_status=0 ++ seq 0 2 + for i in '$(seq 0 2)' + set +e + kubectl apply --server-side --force-conflicts -f /mnt/jenkins/workspace/cloud-pxc-operator_PR-1736/deploy/crd.yaml + exit_status=0 + set -e + '[' 0 '!=' 0 ']' + break + cat /tmp/tmp.XfRMWaViOh customresourcedefinition.apiextensions.k8s.io/perconaxtradbclusterbackups.pxc.percona.com serverside-applied customresourcedefinition.apiextensions.k8s.io/perconaxtradbclusterrestores.pxc.percona.com serverside-applied customresourcedefinition.apiextensions.k8s.io/perconaxtradbclusters.pxc.percona.com serverside-applied + cat /tmp/tmp.IgdeiDmrep + rm /tmp/tmp.XfRMWaViOh /tmp/tmp.IgdeiDmrep + return 0 + '[' -n pxc-operator ']' + apply_rbac cw-rbac + local operator_namespace=pxc-operator + local rbac=cw-rbac + cat /mnt/jenkins/workspace/cloud-pxc-operator_PR-1736/deploy/cw-rbac.yaml + sed -e 's^namespace: .*^namespace: pxc-operator^' + kubectl_bin apply -f - ++ mktemp + local LAST_OUT=/tmp/tmp.3NgdqxNfcV ++ mktemp + local LAST_ERR=/tmp/tmp.1EUqr2guh6 + local exit_status=0 ++ seq 0 2 + for i in '$(seq 0 2)' + set +e + kubectl apply -f - + exit_status=0 + set -e + '[' 0 '!=' 0 ']' + break + cat /tmp/tmp.3NgdqxNfcV clusterrole.rbac.authorization.k8s.io/percona-xtradb-cluster-operator unchanged serviceaccount/percona-xtradb-cluster-operator created clusterrolebinding.rbac.authorization.k8s.io/service-account-percona-xtradb-cluster-operator unchanged + cat /tmp/tmp.1EUqr2guh6 + rm /tmp/tmp.3NgdqxNfcV /tmp/tmp.1EUqr2guh6 + return 0 + cat /mnt/jenkins/workspace/cloud-pxc-operator_PR-1736/deploy/cw-operator.yaml + sed -e 's^image: .*^image: perconalab/percona-xtradb-cluster-operator:PR-1736-1898f674^' + sed -e 's^failureThreshold: .*^failureThreshold: 10^' + yq eval '(select(.kind == "Deployment").spec.template.spec.containers[] | select(.name == "percona-xtradb-cluster-operator").env[] | select(.name == "DISABLE_TELEMETRY").value) = "true"' - + kubectl_bin apply -f - + yq eval '(select(.kind == "Deployment").spec.template.spec.containers[] | select(.name == "percona-xtradb-cluster-operator").env[] | select(.name == "LOG_LEVEL").value) = "DEBUG"' - ++ mktemp + local LAST_OUT=/tmp/tmp.hn1ekU553W ++ mktemp + local LAST_ERR=/tmp/tmp.SRbpfw4tym + local exit_status=0 ++ seq 0 2 + for i in '$(seq 0 2)' + set +e + kubectl apply -f - + exit_status=0 + set -e + '[' 0 '!=' 0 ']' + break + cat /tmp/tmp.hn1ekU553W deployment.apps/percona-xtradb-cluster-operator created service/percona-xtradb-cluster-operator created + cat /tmp/tmp.SRbpfw4tym + rm /tmp/tmp.hn1ekU553W /tmp/tmp.SRbpfw4tym + return 0 + sleep 10 + kubectl_bin wait --for=condition=Ready pods -l app.kubernetes.io/component=operator,app.kubernetes.io/instance=percona-xtradb-cluster-operator,app.kubernetes.io/name=percona-xtradb-cluster-operator --timeout=30s ++ mktemp + local LAST_OUT=/tmp/tmp.2nzA5SYeFD ++ mktemp + local LAST_ERR=/tmp/tmp.lWHJQHDC44 + local exit_status=0 ++ seq 0 2 + for i in '$(seq 0 2)' + set +e + kubectl wait --for=condition=Ready pods -l app.kubernetes.io/component=operator,app.kubernetes.io/instance=percona-xtradb-cluster-operator,app.kubernetes.io/name=percona-xtradb-cluster-operator --timeout=30s + exit_status=0 + set -e + '[' 0 '!=' 0 ']' + break + cat /tmp/tmp.2nzA5SYeFD pod/percona-xtradb-cluster-operator-6b9f484f66-rtddm condition met + cat /tmp/tmp.lWHJQHDC44 + rm /tmp/tmp.2nzA5SYeFD /tmp/tmp.lWHJQHDC44 + return 0 ++ get_operator_pod ++ local label_prefix=app.kubernetes.io/ +++ kubectl get pods --selector=app.kubernetes.io/name=percona-xtradb-cluster-operator -n pxc-operator +++ grep -c percona-xtradb-cluster-operator ++ local check_label=1 ++ [[ 1 -eq 0 ]] ++ kubectl_bin get pods --selector=app.kubernetes.io/name=percona-xtradb-cluster-operator -o 'jsonpath={.items[].metadata.name}' -n pxc-operator +++ mktemp ++ local LAST_OUT=/tmp/tmp.xihbzf8fHe +++ mktemp ++ local LAST_ERR=/tmp/tmp.GPWod2Mr9J ++ local exit_status=0 +++ seq 0 2 ++ for i in '$(seq 0 2)' ++ set +e ++ kubectl get pods --selector=app.kubernetes.io/name=percona-xtradb-cluster-operator -o 'jsonpath={.items[].metadata.name}' -n pxc-operator ++ exit_status=0 ++ set -e ++ '[' 0 '!=' 0 ']' ++ break ++ cat /tmp/tmp.xihbzf8fHe ++ cat /tmp/tmp.GPWod2Mr9J ++ rm /tmp/tmp.xihbzf8fHe /tmp/tmp.GPWod2Mr9J ++ return 0 + wait_pod percona-xtradb-cluster-operator-6b9f484f66-rtddm 480 pxc-operator + local pod=percona-xtradb-cluster-operator-6b9f484f66-rtddm + local max_retry=480 + local ns=pxc-operator ++ echo percona-xtradb-cluster-operator-6b9f484f66-rtddm ++ egrep '^(pxc|proxysql)$' ++ /usr/bin/sed -E 's/.*-(pxc|proxysql)-[0-9]/\1/' + local container= + set +o xtrace pod/percona-xtradb-cluster-operator-6b9f484f66-rtddm condition met percona-xtradb-cluster-operator-6b9f484f66-rtddm.Ok + sleep 3 + create_namespace users-29349 + local namespace=users-29349 + local skip_clean_namespace= + [[ 1 == 1 ]] + [[ -z '' ]] + destroy_chaos_mesh ++ helm list --all-namespaces --filter chaos-mesh ++ tail -n1 ++ awk '-F ' '{print $2}' ++ sed s/NAMESPACE// + local chaos_mesh_ns= + '[' -n '' ']' ++ kubectl get MutatingWebhookConfiguration ++ grep chaos-mesh ++ awk '{print $1}' + timeout 30 kubectl delete MutatingWebhookConfiguration error: resource(s) were provided, but no name was specified + : ++ kubectl get ValidatingWebhookConfiguration ++ grep chaos-mesh ++ awk '{print $1}' + timeout 30 kubectl delete ValidatingWebhookConfiguration error: resource(s) were provided, but no name was specified + : ++ kubectl get ValidatingWebhookConfiguration ++ grep validate-auth ++ awk '{print $1}' + timeout 30 kubectl delete ValidatingWebhookConfiguration error: resource(s) were provided, but no name was specified + : ++ kubectl api-resources ++ grep chaos-mesh ++ awk '{print $1}' ++ kubectl get crd ++ grep chaos-mesh.org ++ awk '{print $1}' + timeout 30 kubectl delete crd error: resource(s) were provided, but no name was specified + : ++ kubectl get clusterrolebinding ++ grep chaos-mesh ++ awk '{print $1}' + timeout 30 kubectl delete clusterrolebinding error: resource(s) were provided, but no name was specified + : ++ kubectl get clusterrole ++ grep chaos-mesh ++ awk '{print $1}' + timeout 30 kubectl delete clusterrole error: resource(s) were provided, but no name was specified + : + desc 'cleaned up all old namespaces' + set +o xtrace ----------------------------------------------------------------------------------- cleaned up all old namespaces ----------------------------------------------------------------------------------- + egrep -v '^kube-|^default$|Terminating|pxc-operator|openshift|^NAME' + awk '{print$1}' + '[' -n '' ']' + desc 'cleaned up old namespaces users-29349' + set +o xtrace ----------------------------------------------------------------------------------- cleaned up old namespaces users-29349 ----------------------------------------------------------------------------------- + kubectl_bin delete namespace users-29349 + xargs kubectl delete ns + kubectl_bin get ns ++ mktemp ++ mktemp + local LAST_OUT=/tmp/tmp.vybKsBJIcA + local LAST_OUT=/tmp/tmp.xFyhDtAgpg ++ mktemp + local LAST_ERR=/tmp/tmp.ZvOeFRbHiN + local exit_status=0 ++ mktemp ++ seq 0 2 + local LAST_ERR=/tmp/tmp.eFzf99hZro + local exit_status=0 + for i in '$(seq 0 2)' + set +e + kubectl delete namespace users-29349 ++ seq 0 2 + for i in '$(seq 0 2)' + set +e + kubectl get ns + exit_status=0 + set -e + '[' 0 '!=' 0 ']' + break + cat /tmp/tmp.xFyhDtAgpg + cat /tmp/tmp.eFzf99hZro + rm /tmp/tmp.xFyhDtAgpg /tmp/tmp.eFzf99hZro + return 0 + exit_status=1 + set -e + '[' 1 '!=' 0 ']' + '[' 1 == 1 ']' + sleep 0 + for i in '$(seq 0 2)' + set +e + kubectl delete namespace users-29349 + exit_status=1 + set -e + '[' 1 '!=' 0 ']' + '[' 1 == 1 ']' + sleep 0 + for i in '$(seq 0 2)' + set +e + kubectl delete namespace users-29349 Error from server (Forbidden): namespaces "default" is forbidden: this namespace may not be deleted + exit_status=1 + set -e + '[' 1 '!=' 0 ']' + '[' 1 == 1 ']' + sleep 0 + cat /tmp/tmp.vybKsBJIcA + cat /tmp/tmp.ZvOeFRbHiN Error from server (NotFound): namespaces "users-29349" not found + rm /tmp/tmp.vybKsBJIcA /tmp/tmp.ZvOeFRbHiN + return 1 + : + wait_for_delete namespace/users-29349 + local res=namespace/users-29349 + echo -n 'namespace/users-29349 - ' namespace/users-29349 - + set +o xtrace Error from server (NotFound): namespaces "users-29349" not found + desc 'create namespace users-29349' + set +o xtrace ----------------------------------------------------------------------------------- create namespace users-29349 ----------------------------------------------------------------------------------- + kubectl_bin create namespace users-29349 ++ mktemp + local LAST_OUT=/tmp/tmp.KbSWLqH6FB ++ mktemp + local LAST_ERR=/tmp/tmp.2Ebr6L7d8k + local exit_status=0 ++ seq 0 2 + for i in '$(seq 0 2)' + set +e + kubectl create namespace users-29349 + exit_status=0 + set -e + '[' 0 '!=' 0 ']' + break + cat /tmp/tmp.KbSWLqH6FB namespace/users-29349 created + cat /tmp/tmp.2Ebr6L7d8k + rm /tmp/tmp.KbSWLqH6FB /tmp/tmp.2Ebr6L7d8k + return 0 ++ kubectl_bin config current-context +++ mktemp ++ local LAST_OUT=/tmp/tmp.4Y5AMmUn2W +++ mktemp ++ local LAST_ERR=/tmp/tmp.eeIn0LXgSy ++ local exit_status=0 +++ seq 0 2 ++ for i in '$(seq 0 2)' ++ set +e ++ kubectl config current-context ++ exit_status=0 ++ set -e ++ '[' 0 '!=' 0 ']' ++ break ++ cat /tmp/tmp.4Y5AMmUn2W ++ cat /tmp/tmp.eeIn0LXgSy ++ rm /tmp/tmp.4Y5AMmUn2W /tmp/tmp.eeIn0LXgSy ++ return 0 + kubectl_bin config set-context gke_cloud-dev-112233_us-central1-a_jen-pxc-1736-1898f674-3-cluster1 --namespace=users-29349 ++ mktemp + local LAST_OUT=/tmp/tmp.rb3OPVOOis ++ mktemp + local LAST_ERR=/tmp/tmp.IwgndlKDID + local exit_status=0 ++ seq 0 2 + for i in '$(seq 0 2)' + set +e + kubectl config set-context gke_cloud-dev-112233_us-central1-a_jen-pxc-1736-1898f674-3-cluster1 --namespace=users-29349 + exit_status=0 + set -e + '[' 0 '!=' 0 ']' + break + cat /tmp/tmp.rb3OPVOOis Context "gke_cloud-dev-112233_us-central1-a_jen-pxc-1736-1898f674-3-cluster1" modified. + cat /tmp/tmp.IwgndlKDID + rm /tmp/tmp.rb3OPVOOis /tmp/tmp.IwgndlKDID + return 0 + apply_secrets + desc 'create secrets for cloud storages' + set +o xtrace ----------------------------------------------------------------------------------- create secrets for cloud storages ----------------------------------------------------------------------------------- + '[' -z '' ']' + kubectl_bin apply -f /mnt/jenkins/workspace/cloud-pxc-operator_PR-1736/e2e-tests/conf/minio-secret.yml -f /mnt/jenkins/workspace/cloud-pxc-operator_PR-1736/e2e-tests/conf/cloud-secret.yml ++ mktemp + local LAST_OUT=/tmp/tmp.oFFIck9m2f ++ mktemp + local LAST_ERR=/tmp/tmp.Ft13eFRNRY + local exit_status=0 ++ seq 0 2 + for i in '$(seq 0 2)' + set +e + kubectl apply -f /mnt/jenkins/workspace/cloud-pxc-operator_PR-1736/e2e-tests/conf/minio-secret.yml -f /mnt/jenkins/workspace/cloud-pxc-operator_PR-1736/e2e-tests/conf/cloud-secret.yml + exit_status=0 + set -e + '[' 0 '!=' 0 ']' + break + cat /tmp/tmp.oFFIck9m2f secret/minio-secret created secret/aws-s3-secret created secret/gcp-cs-secret created secret/azure-secret created + cat /tmp/tmp.Ft13eFRNRY + rm /tmp/tmp.oFFIck9m2f /tmp/tmp.Ft13eFRNRY + return 0 + desc 'create PXC cluster' + set +o xtrace ----------------------------------------------------------------------------------- create PXC cluster ----------------------------------------------------------------------------------- + newpass=test-password ++ echo -n test-password ++ base64 + newpassencrypted=dGVzdC1wYXNzd29yZA== + cluster=some-name + spinup_pxc some-name /mnt/jenkins/workspace/cloud-pxc-operator_PR-1736/e2e-tests/conf/some-name.yml + local cluster=some-name + local config=/mnt/jenkins/workspace/cloud-pxc-operator_PR-1736/e2e-tests/conf/some-name.yml + local size=3 + local sleep=10 + local secretsFile=/mnt/jenkins/workspace/cloud-pxc-operator_PR-1736/e2e-tests/conf/secrets.yml + local pxcClientFile=/mnt/jenkins/workspace/cloud-pxc-operator_PR-1736/e2e-tests/conf/client.yml + local port=3306 + desc 'create first PXC cluster' + set +o xtrace ----------------------------------------------------------------------------------- create first PXC cluster ----------------------------------------------------------------------------------- + kubectl_bin apply -f /mnt/jenkins/workspace/cloud-pxc-operator_PR-1736/e2e-tests/conf/secrets.yml ++ mktemp + local LAST_OUT=/tmp/tmp.jb2yotNI8I ++ mktemp + local LAST_ERR=/tmp/tmp.9Yhg660O0s + local exit_status=0 ++ seq 0 2 + for i in '$(seq 0 2)' + set +e + kubectl apply -f /mnt/jenkins/workspace/cloud-pxc-operator_PR-1736/e2e-tests/conf/secrets.yml + exit_status=0 + set -e + '[' 0 '!=' 0 ']' + break + cat /tmp/tmp.jb2yotNI8I secret/my-cluster-secrets created secret/some-name-ssl created secret/some-name-ssl-internal created + cat /tmp/tmp.9Yhg660O0s + rm /tmp/tmp.jb2yotNI8I /tmp/tmp.9Yhg660O0s + return 0 + apply_config /mnt/jenkins/workspace/cloud-pxc-operator_PR-1736/e2e-tests/conf/client.yml + '[' -z '' ']' + cat_config /mnt/jenkins/workspace/cloud-pxc-operator_PR-1736/e2e-tests/conf/client.yml + kubectl_bin apply -f - + cat /mnt/jenkins/workspace/cloud-pxc-operator_PR-1736/e2e-tests/conf/client.yml ++ mktemp + /usr/bin/sed -e 's#image:.*-pxc\([0-9]*.[0-9]*\)\{0,1\}$#image: perconalab/percona-xtradb-cluster-operator:main-pxc5.7#' + /usr/bin/sed -e 's#apiVersion: pxc.percona.com/v.*$#apiVersion: pxc.percona.com/v1#' + local LAST_OUT=/tmp/tmp.6p5ByCmgei + /usr/bin/sed -e 's#image:.*-pmm$#image: perconalab/pmm-client:dev-latest#' + /usr/bin/sed -e 's#image:.*\/percona-xtradb-cluster:.*$#image: perconalab/percona-xtradb-cluster-operator:main-pxc5.7#' ++ mktemp + /usr/bin/sed -e 's#image:.*-init$#image: perconalab/percona-xtradb-cluster-operator:PR-1736-1898f674#' + /usr/bin/sed -e 's#image:.*-logcollector$#image: perconalab/percona-xtradb-cluster-operator:main-logcollector#' + /usr/bin/sed -e 's#image:.*-backup$#image: perconalab/percona-xtradb-cluster-operator:main-pxc5.7-backup#' + /usr/bin/sed -e 's#image:.*-proxysql$#image: perconalab/percona-xtradb-cluster-operator:main-proxysql#' + /usr/bin/sed -e 's#image:.*-haproxy$#image: perconalab/percona-xtradb-cluster-operator:main-haproxy#' + /usr/bin/sed -e s~minio-service.#namespace~minio-service.users-29349~ + /usr/bin/sed -e 's#apply:.*#apply: Never#' + local LAST_ERR=/tmp/tmp.S34mN4Cq3j + local exit_status=0 ++ seq 0 2 + for i in '$(seq 0 2)' + set +e + kubectl apply -f - + exit_status=0 + set -e + '[' 0 '!=' 0 ']' + break + cat /tmp/tmp.6p5ByCmgei deployment.apps/pxc-client created + cat /tmp/tmp.S34mN4Cq3j + rm /tmp/tmp.6p5ByCmgei /tmp/tmp.S34mN4Cq3j + return 0 + [[ perconalab/percona-xtradb-cluster-operator:main-pxc5.7 =~ 5\.7 ]] + [[ some-name == \d\e\m\a\n\d\-\b\a\c\k\u\p ]] + [[ some-name == \d\e\m\a\n\d\-\b\a\c\k\u\p\-\c\l\o\u\d ]] + apply_config /mnt/jenkins/workspace/cloud-pxc-operator_PR-1736/e2e-tests/conf/some-name.yml + '[' -z '' ']' + kubectl_bin apply -f - + cat_config /mnt/jenkins/workspace/cloud-pxc-operator_PR-1736/e2e-tests/conf/some-name.yml + /usr/bin/sed -e 's#apiVersion: pxc.percona.com/v.*$#apiVersion: pxc.percona.com/v1#' + /usr/bin/sed -e 's#image:.*-pxc\([0-9]*.[0-9]*\)\{0,1\}$#image: perconalab/percona-xtradb-cluster-operator:main-pxc5.7#' + /usr/bin/sed -e 's#image:.*\/percona-xtradb-cluster:.*$#image: perconalab/percona-xtradb-cluster-operator:main-pxc5.7#' + /usr/bin/sed -e 's#image:.*-backup$#image: perconalab/percona-xtradb-cluster-operator:main-pxc5.7-backup#' + /usr/bin/sed -e 's#image:.*-init$#image: perconalab/percona-xtradb-cluster-operator:PR-1736-1898f674#' + /usr/bin/sed -e 's#image:.*-pmm$#image: perconalab/pmm-client:dev-latest#' + /usr/bin/sed -e 's#apply:.*#apply: Never#' + /usr/bin/sed -e s~minio-service.#namespace~minio-service.users-29349~ + /usr/bin/sed -e 's#image:.*-logcollector$#image: perconalab/percona-xtradb-cluster-operator:main-logcollector#' + /usr/bin/sed -e 's#image:.*-proxysql$#image: perconalab/percona-xtradb-cluster-operator:main-proxysql#' + cat /mnt/jenkins/workspace/cloud-pxc-operator_PR-1736/e2e-tests/conf/some-name.yml + /usr/bin/sed -e 's#image:.*-haproxy$#image: perconalab/percona-xtradb-cluster-operator:main-haproxy#' ++ mktemp + local LAST_OUT=/tmp/tmp.5sY5tdw6gt ++ mktemp + local LAST_ERR=/tmp/tmp.yJLM8pziRk + local exit_status=0 ++ seq 0 2 + for i in '$(seq 0 2)' + set +e + kubectl apply -f - + exit_status=0 + set -e + '[' 0 '!=' 0 ']' + break + cat /tmp/tmp.5sY5tdw6gt perconaxtradbcluster.pxc.percona.com/some-name created + cat /tmp/tmp.yJLM8pziRk + rm /tmp/tmp.5sY5tdw6gt /tmp/tmp.yJLM8pziRk + return 0 + desc 'check if all 3 Pods started' + set +o xtrace ----------------------------------------------------------------------------------- check if all 3 Pods started ----------------------------------------------------------------------------------- ++ get_proxy some-name ++ local target_cluster=some-name +++ kubectl_bin get pxc some-name -o 'jsonpath={.spec.haproxy.enabled}' ++++ mktemp +++ local LAST_OUT=/tmp/tmp.KuVcLjBhO9 ++++ mktemp +++ local LAST_ERR=/tmp/tmp.P98ccWW4vD +++ local exit_status=0 ++++ seq 0 2 +++ for i in '$(seq 0 2)' +++ set +e +++ kubectl get pxc some-name -o 'jsonpath={.spec.haproxy.enabled}' +++ exit_status=0 +++ set -e +++ '[' 0 '!=' 0 ']' +++ break +++ cat /tmp/tmp.KuVcLjBhO9 +++ cat /tmp/tmp.P98ccWW4vD +++ rm /tmp/tmp.KuVcLjBhO9 /tmp/tmp.P98ccWW4vD +++ return 0 ++ [[ '' == \t\r\u\e ]] +++ kubectl_bin get pxc some-name -o 'jsonpath={.spec.proxysql.enabled}' ++++ mktemp +++ local LAST_OUT=/tmp/tmp.aZtcutKury ++++ mktemp +++ local LAST_ERR=/tmp/tmp.45CvC06oBt +++ local exit_status=0 ++++ seq 0 2 +++ for i in '$(seq 0 2)' +++ set +e +++ kubectl get pxc some-name -o 'jsonpath={.spec.proxysql.enabled}' +++ exit_status=0 +++ set -e +++ '[' 0 '!=' 0 ']' +++ break +++ cat /tmp/tmp.aZtcutKury +++ cat /tmp/tmp.45CvC06oBt +++ rm /tmp/tmp.aZtcutKury /tmp/tmp.45CvC06oBt +++ return 0 ++ [[ true == \t\r\u\e ]] ++ echo some-name-proxysql ++ return + local proxy=some-name-proxysql + kubectl_bin wait --for=condition=Ready pod -l app.kubernetes.io/instance=monitoring,app.kubernetes.io/managed-by=percona-xtradb-cluster-operator --timeout=300s -n users-29349 ++ mktemp + local LAST_OUT=/tmp/tmp.h3W2yqohbZ ++ mktemp + local LAST_ERR=/tmp/tmp.zhWFJfweyu + local exit_status=0 ++ seq 0 2 + for i in '$(seq 0 2)' + set +e + kubectl wait --for=condition=Ready pod -l app.kubernetes.io/instance=monitoring,app.kubernetes.io/managed-by=percona-xtradb-cluster-operator --timeout=300s -n users-29349 + exit_status=1 + set -e + '[' 1 '!=' 0 ']' + '[' 1 == 1 ']' + sleep 0 + for i in '$(seq 0 2)' + set +e + kubectl wait --for=condition=Ready pod -l app.kubernetes.io/instance=monitoring,app.kubernetes.io/managed-by=percona-xtradb-cluster-operator --timeout=300s -n users-29349 + exit_status=1 + set -e + '[' 1 '!=' 0 ']' + '[' 1 == 1 ']' + sleep 0 + for i in '$(seq 0 2)' + set +e + kubectl wait --for=condition=Ready pod -l app.kubernetes.io/instance=monitoring,app.kubernetes.io/managed-by=percona-xtradb-cluster-operator --timeout=300s -n users-29349 + exit_status=1 + set -e + '[' 1 '!=' 0 ']' + '[' 1 == 1 ']' + sleep 0 + cat /tmp/tmp.h3W2yqohbZ + cat /tmp/tmp.zhWFJfweyu error: no matching resources found + rm /tmp/tmp.h3W2yqohbZ /tmp/tmp.zhWFJfweyu + return 1 + true + wait_for_running some-name-proxysql 1 + local name=some-name-proxysql + let last_pod=0 + : + local max_retry=480 + desc 'wait for running cluster' + set +o xtrace ----------------------------------------------------------------------------------- wait for running cluster ----------------------------------------------------------------------------------- ++ seq 0 0 + for i in '$(seq 0 $last_pod)' + wait_pod some-name-proxysql-0 480 + local pod=some-name-proxysql-0 + local max_retry=480 + local ns= ++ echo some-name-proxysql-0 ++ /usr/bin/sed -E 's/.*-(pxc|proxysql)-[0-9]/\1/' ++ egrep '^(pxc|proxysql)$' + local container=proxysql + set +o xtrace pod/some-name-proxysql-0 condition met some-name-proxysql-0.Ok + wait_for_running some-name-pxc 3 + local name=some-name-pxc + let last_pod=2 + local max_retry=480 + desc 'wait for running cluster' + set +o xtrace ----------------------------------------------------------------------------------- wait for running cluster ----------------------------------------------------------------------------------- ++ seq 0 2 + for i in '$(seq 0 $last_pod)' + wait_pod some-name-pxc-0 480 + local pod=some-name-pxc-0 + local max_retry=480 + local ns= ++ echo some-name-pxc-0 ++ /usr/bin/sed -E 's/.*-(pxc|proxysql)-[0-9]/\1/' ++ egrep '^(pxc|proxysql)$' + local container=pxc + set +o xtrace pod/some-name-pxc-0 condition met some-name-pxc-0.Ok + for i in '$(seq 0 $last_pod)' + wait_pod some-name-pxc-1 480 + local pod=some-name-pxc-1 + local max_retry=480 + local ns= ++ echo some-name-pxc-1 ++ /usr/bin/sed -E 's/.*-(pxc|proxysql)-[0-9]/\1/' ++ egrep '^(pxc|proxysql)$' + local container=pxc + set +o xtrace pod/some-name-pxc-1 condition met some-name-pxc-1.Ok + for i in '$(seq 0 $last_pod)' + wait_pod some-name-pxc-2 480 + local pod=some-name-pxc-2 + local max_retry=480 + local ns= ++ echo some-name-pxc-2 ++ /usr/bin/sed -E 's/.*-(pxc|proxysql)-[0-9]/\1/' ++ egrep '^(pxc|proxysql)$' + local container=pxc + set +o xtrace pod/some-name-pxc-2 condition met some-name-pxc-2.Ok + sleep 10 + desc 'write data' + set +o xtrace ----------------------------------------------------------------------------------- write data ----------------------------------------------------------------------------------- + [[ perconalab/percona-xtradb-cluster-operator:main-pxc5.7 =~ 5\.7 ]] ++ is_keyring_plugin_in_use some-name ++ local cluster=some-name ++ kubectl_bin exec -it some-name-pxc-0 -c pxc -- bash -c 'cat /etc/mysql/node.cnf' ++ egrep -o 'early-plugin-load=keyring_\w+.so' +++ mktemp ++ local LAST_OUT=/tmp/tmp.vAIEGgrShY +++ mktemp ++ local LAST_ERR=/tmp/tmp.J5BbMQ0BcA ++ local exit_status=0 +++ seq 0 2 ++ for i in '$(seq 0 2)' ++ set +e ++ kubectl exec -it some-name-pxc-0 -c pxc -- bash -c 'cat /etc/mysql/node.cnf' ++ exit_status=0 ++ set -e ++ '[' 0 '!=' 0 ']' ++ break ++ cat /tmp/tmp.vAIEGgrShY ++ cat /tmp/tmp.J5BbMQ0BcA Unable to use a TTY - input is not a terminal or the right kind of file ++ rm /tmp/tmp.vAIEGgrShY /tmp/tmp.J5BbMQ0BcA ++ return 0 + [[ -n '' ]] + run_mysql 'CREATE DATABASE IF NOT EXISTS myApp; use myApp; CREATE TABLE IF NOT EXISTS myApp (id int PRIMARY KEY) ;' '-h some-name-proxysql -uroot -proot_password -P3306' + local 'command=CREATE DATABASE IF NOT EXISTS myApp; use myApp; CREATE TABLE IF NOT EXISTS myApp (id int PRIMARY KEY) ;' + local 'uri=-h some-name-proxysql -uroot -proot_password -P3306' ++ get_client_pod ++ kubectl_bin get pods --selector=name=pxc-client -o 'jsonpath={.items[].metadata.name}' +++ mktemp ++ local LAST_OUT=/tmp/tmp.k1e7VA9AyM +++ mktemp ++ local LAST_ERR=/tmp/tmp.av34H724g7 ++ local exit_status=0 +++ seq 0 2 ++ for i in '$(seq 0 2)' ++ set +e ++ kubectl get pods --selector=name=pxc-client -o 'jsonpath={.items[].metadata.name}' ++ exit_status=0 ++ set -e ++ '[' 0 '!=' 0 ']' ++ break ++ cat /tmp/tmp.k1e7VA9AyM ++ cat /tmp/tmp.av34H724g7 ++ rm /tmp/tmp.k1e7VA9AyM /tmp/tmp.av34H724g7 ++ return 0 + client_pod=pxc-client-64b479df95-s59c5 + wait_pod pxc-client-64b479df95-s59c5 + local pod=pxc-client-64b479df95-s59c5 + local max_retry=480 + local ns= ++ echo pxc-client-64b479df95-s59c5 ++ /usr/bin/sed -E 's/.*-(pxc|proxysql)-[0-9]/\1/' ++ egrep '^(pxc|proxysql)$' + local container= + set +o xtrace pod/pxc-client-64b479df95-s59c5 condition met pxc-client-64b479df95-s59c5.Ok + set +o xtrace + run_mysql 'INSERT myApp.myApp (id) VALUES (100500)' '-h some-name-proxysql -uroot -proot_password -P3306' + local 'command=INSERT myApp.myApp (id) VALUES (100500)' + local 'uri=-h some-name-proxysql -uroot -proot_password -P3306' ++ get_client_pod ++ kubectl_bin get pods --selector=name=pxc-client -o 'jsonpath={.items[].metadata.name}' +++ mktemp ++ local LAST_OUT=/tmp/tmp.TbTAdSgT9I +++ mktemp ++ local LAST_ERR=/tmp/tmp.lYgC1T4OgJ ++ local exit_status=0 +++ seq 0 2 ++ for i in '$(seq 0 2)' ++ set +e ++ kubectl get pods --selector=name=pxc-client -o 'jsonpath={.items[].metadata.name}' ++ exit_status=0 ++ set -e ++ '[' 0 '!=' 0 ']' ++ break ++ cat /tmp/tmp.TbTAdSgT9I ++ cat /tmp/tmp.lYgC1T4OgJ ++ rm /tmp/tmp.TbTAdSgT9I /tmp/tmp.lYgC1T4OgJ ++ return 0 + client_pod=pxc-client-64b479df95-s59c5 + wait_pod pxc-client-64b479df95-s59c5 + local pod=pxc-client-64b479df95-s59c5 + local max_retry=480 + local ns= ++ echo pxc-client-64b479df95-s59c5 ++ /usr/bin/sed -E 's/.*-(pxc|proxysql)-[0-9]/\1/' ++ egrep '^(pxc|proxysql)$' + local container= + set +o xtrace pod/pxc-client-64b479df95-s59c5 condition met pxc-client-64b479df95-s59c5.Ok + set +o xtrace + sleep 30 ++ seq 0 2 + for i in '$(seq 0 $((size - 1)))' + compare_mysql_cmd select-1 'SELECT * from myApp.myApp;' '-h some-name-pxc-0.some-name-pxc -uroot -proot_password -P3306' + local command_id=select-1 + local 'command=SELECT * from myApp.myApp;' + local 'uri=-h some-name-pxc-0.some-name-pxc -uroot -proot_password -P3306' + local postfix= + local expected_result=/mnt/jenkins/workspace/cloud-pxc-operator_PR-1736/e2e-tests/users/compare/select-1.sql + [[ perconalab/percona-xtradb-cluster-operator:main-pxc5.7 =~ 8\.0 ]] + run_mysql 'SELECT * from myApp.myApp;' '-h some-name-pxc-0.some-name-pxc -uroot -proot_password -P3306' + local 'command=SELECT * from myApp.myApp;' + local 'uri=-h some-name-pxc-0.some-name-pxc -uroot -proot_password -P3306' ++ get_client_pod ++ kubectl_bin get pods --selector=name=pxc-client -o 'jsonpath={.items[].metadata.name}' +++ mktemp ++ local LAST_OUT=/tmp/tmp.DH6pj2x8u3 +++ mktemp ++ local LAST_ERR=/tmp/tmp.B1scEGnDKl ++ local exit_status=0 +++ seq 0 2 ++ for i in '$(seq 0 2)' ++ set +e ++ kubectl get pods --selector=name=pxc-client -o 'jsonpath={.items[].metadata.name}' ++ exit_status=0 ++ set -e ++ '[' 0 '!=' 0 ']' ++ break ++ cat /tmp/tmp.DH6pj2x8u3 ++ cat /tmp/tmp.B1scEGnDKl ++ rm /tmp/tmp.DH6pj2x8u3 /tmp/tmp.B1scEGnDKl ++ return 0 + client_pod=pxc-client-64b479df95-s59c5 + wait_pod pxc-client-64b479df95-s59c5 + local pod=pxc-client-64b479df95-s59c5 + local max_retry=480 + local ns= ++ echo pxc-client-64b479df95-s59c5 ++ /usr/bin/sed -E 's/.*-(pxc|proxysql)-[0-9]/\1/' ++ egrep '^(pxc|proxysql)$' + local container= + set +o xtrace pod/pxc-client-64b479df95-s59c5 condition met pxc-client-64b479df95-s59c5.Ok + set +o xtrace + '[' '!' -s /tmp/tmp.XJynlYwy6y/select-1.sql ']' + diff -u /mnt/jenkins/workspace/cloud-pxc-operator_PR-1736/e2e-tests/users/compare/select-1.sql /tmp/tmp.XJynlYwy6y/select-1.sql + for i in '$(seq 0 $((size - 1)))' + compare_mysql_cmd select-1 'SELECT * from myApp.myApp;' '-h some-name-pxc-1.some-name-pxc -uroot -proot_password -P3306' + local command_id=select-1 + local 'command=SELECT * from myApp.myApp;' + local 'uri=-h some-name-pxc-1.some-name-pxc -uroot -proot_password -P3306' + local postfix= + local expected_result=/mnt/jenkins/workspace/cloud-pxc-operator_PR-1736/e2e-tests/users/compare/select-1.sql + [[ perconalab/percona-xtradb-cluster-operator:main-pxc5.7 =~ 8\.0 ]] + run_mysql 'SELECT * from myApp.myApp;' '-h some-name-pxc-1.some-name-pxc -uroot -proot_password -P3306' + local 'command=SELECT * from myApp.myApp;' + local 'uri=-h some-name-pxc-1.some-name-pxc -uroot -proot_password -P3306' ++ get_client_pod ++ kubectl_bin get pods --selector=name=pxc-client -o 'jsonpath={.items[].metadata.name}' +++ mktemp ++ local LAST_OUT=/tmp/tmp.cwQiSg1U19 +++ mktemp ++ local LAST_ERR=/tmp/tmp.s4kF8qNsuz ++ local exit_status=0 +++ seq 0 2 ++ for i in '$(seq 0 2)' ++ set +e ++ kubectl get pods --selector=name=pxc-client -o 'jsonpath={.items[].metadata.name}' ++ exit_status=0 ++ set -e ++ '[' 0 '!=' 0 ']' ++ break ++ cat /tmp/tmp.cwQiSg1U19 ++ cat /tmp/tmp.s4kF8qNsuz ++ rm /tmp/tmp.cwQiSg1U19 /tmp/tmp.s4kF8qNsuz ++ return 0 + client_pod=pxc-client-64b479df95-s59c5 + wait_pod pxc-client-64b479df95-s59c5 + local pod=pxc-client-64b479df95-s59c5 + local max_retry=480 + local ns= ++ echo pxc-client-64b479df95-s59c5 ++ /usr/bin/sed -E 's/.*-(pxc|proxysql)-[0-9]/\1/' ++ egrep '^(pxc|proxysql)$' + local container= + set +o xtrace pod/pxc-client-64b479df95-s59c5 condition met pxc-client-64b479df95-s59c5.Ok + set +o xtrace + '[' '!' -s /tmp/tmp.XJynlYwy6y/select-1.sql ']' + diff -u /mnt/jenkins/workspace/cloud-pxc-operator_PR-1736/e2e-tests/users/compare/select-1.sql /tmp/tmp.XJynlYwy6y/select-1.sql + for i in '$(seq 0 $((size - 1)))' + compare_mysql_cmd select-1 'SELECT * from myApp.myApp;' '-h some-name-pxc-2.some-name-pxc -uroot -proot_password -P3306' + local command_id=select-1 + local 'command=SELECT * from myApp.myApp;' + local 'uri=-h some-name-pxc-2.some-name-pxc -uroot -proot_password -P3306' + local postfix= + local expected_result=/mnt/jenkins/workspace/cloud-pxc-operator_PR-1736/e2e-tests/users/compare/select-1.sql + [[ perconalab/percona-xtradb-cluster-operator:main-pxc5.7 =~ 8\.0 ]] + run_mysql 'SELECT * from myApp.myApp;' '-h some-name-pxc-2.some-name-pxc -uroot -proot_password -P3306' + local 'command=SELECT * from myApp.myApp;' + local 'uri=-h some-name-pxc-2.some-name-pxc -uroot -proot_password -P3306' ++ get_client_pod ++ kubectl_bin get pods --selector=name=pxc-client -o 'jsonpath={.items[].metadata.name}' +++ mktemp ++ local LAST_OUT=/tmp/tmp.CsEcqLISQU +++ mktemp ++ local LAST_ERR=/tmp/tmp.PxrkHhfKvb ++ local exit_status=0 +++ seq 0 2 ++ for i in '$(seq 0 2)' ++ set +e ++ kubectl get pods --selector=name=pxc-client -o 'jsonpath={.items[].metadata.name}' ++ exit_status=0 ++ set -e ++ '[' 0 '!=' 0 ']' ++ break ++ cat /tmp/tmp.CsEcqLISQU ++ cat /tmp/tmp.PxrkHhfKvb ++ rm /tmp/tmp.CsEcqLISQU /tmp/tmp.PxrkHhfKvb ++ return 0 + client_pod=pxc-client-64b479df95-s59c5 + wait_pod pxc-client-64b479df95-s59c5 + local pod=pxc-client-64b479df95-s59c5 + local max_retry=480 + local ns= ++ echo pxc-client-64b479df95-s59c5 ++ /usr/bin/sed -E 's/.*-(pxc|proxysql)-[0-9]/\1/' ++ egrep '^(pxc|proxysql)$' + local container= + set +o xtrace pod/pxc-client-64b479df95-s59c5 condition met pxc-client-64b479df95-s59c5.Ok + set +o xtrace + '[' '!' -s /tmp/tmp.XJynlYwy6y/select-1.sql ']' + diff -u /mnt/jenkins/workspace/cloud-pxc-operator_PR-1736/e2e-tests/users/compare/select-1.sql /tmp/tmp.XJynlYwy6y/select-1.sql ++ is_keyring_plugin_in_use some-name ++ local cluster=some-name ++ kubectl_bin exec -it some-name-pxc-0 -c pxc -- bash -c 'cat /etc/mysql/node.cnf' ++ egrep -o 'early-plugin-load=keyring_\w+.so' +++ mktemp ++ local LAST_OUT=/tmp/tmp.Yjru6gRXj8 +++ mktemp ++ local LAST_ERR=/tmp/tmp.vAOsGHIZAZ ++ local exit_status=0 +++ seq 0 2 ++ for i in '$(seq 0 2)' ++ set +e ++ kubectl exec -it some-name-pxc-0 -c pxc -- bash -c 'cat /etc/mysql/node.cnf' ++ exit_status=0 ++ set -e ++ '[' 0 '!=' 0 ']' ++ break ++ cat /tmp/tmp.Yjru6gRXj8 ++ cat /tmp/tmp.vAOsGHIZAZ Unable to use a TTY - input is not a terminal or the right kind of file ++ rm /tmp/tmp.Yjru6gRXj8 /tmp/tmp.vAOsGHIZAZ ++ return 0 + '[' '' ']' + desc 'test root' + set +o xtrace ----------------------------------------------------------------------------------- test root ----------------------------------------------------------------------------------- + patch_secret my-cluster-secrets root dGVzdC1wYXNzd29yZA== + local secret=my-cluster-secrets + local key=root + local value=dGVzdC1wYXNzd29yZA== + kubectl_bin patch secret my-cluster-secrets '-p={"data":{"root": "dGVzdC1wYXNzd29yZA=="}}' ++ mktemp + local LAST_OUT=/tmp/tmp.5xFY2JahBe ++ mktemp + local LAST_ERR=/tmp/tmp.VlDCPpnYX7 + local exit_status=0 ++ seq 0 2 + for i in '$(seq 0 2)' + set +e + kubectl patch secret my-cluster-secrets '-p={"data":{"root": "dGVzdC1wYXNzd29yZA=="}}' + exit_status=0 + set -e + '[' 0 '!=' 0 ']' + break + cat /tmp/tmp.5xFY2JahBe secret/my-cluster-secrets patched + cat /tmp/tmp.VlDCPpnYX7 + rm /tmp/tmp.5xFY2JahBe /tmp/tmp.VlDCPpnYX7 + return 0 + sleep 15 + compare_mysql_cmd select-4 'SHOW TABLES;' '-h some-name-proxysql -uroot -p'\''test-password'\''' + local command_id=select-4 + local 'command=SHOW TABLES;' + local 'uri=-h some-name-proxysql -uroot -p'\''test-password'\''' + local postfix= + local expected_result=/mnt/jenkins/workspace/cloud-pxc-operator_PR-1736/e2e-tests/users/compare/select-4.sql + [[ perconalab/percona-xtradb-cluster-operator:main-pxc5.7 =~ 8\.0 ]] + run_mysql 'SHOW TABLES;' '-h some-name-proxysql -uroot -p'\''test-password'\''' + local 'command=SHOW TABLES;' + local 'uri=-h some-name-proxysql -uroot -p'\''test-password'\''' ++ get_client_pod ++ kubectl_bin get pods --selector=name=pxc-client -o 'jsonpath={.items[].metadata.name}' +++ mktemp ++ local LAST_OUT=/tmp/tmp.nB6w21LRIU +++ mktemp ++ local LAST_ERR=/tmp/tmp.5d2e3GUjNF ++ local exit_status=0 +++ seq 0 2 ++ for i in '$(seq 0 2)' ++ set +e ++ kubectl get pods --selector=name=pxc-client -o 'jsonpath={.items[].metadata.name}' ++ exit_status=0 ++ set -e ++ '[' 0 '!=' 0 ']' ++ break ++ cat /tmp/tmp.nB6w21LRIU ++ cat /tmp/tmp.5d2e3GUjNF ++ rm /tmp/tmp.nB6w21LRIU /tmp/tmp.5d2e3GUjNF ++ return 0 + client_pod=pxc-client-64b479df95-s59c5 + wait_pod pxc-client-64b479df95-s59c5 + local pod=pxc-client-64b479df95-s59c5 + local max_retry=480 + local ns= ++ echo pxc-client-64b479df95-s59c5 ++ /usr/bin/sed -E 's/.*-(pxc|proxysql)-[0-9]/\1/' ++ egrep '^(pxc|proxysql)$' + local container= + set +o xtrace pod/pxc-client-64b479df95-s59c5 condition met pxc-client-64b479df95-s59c5.Ok + set +o xtrace + '[' '!' -s /tmp/tmp.XJynlYwy6y/select-4.sql ']' + diff -u /mnt/jenkins/workspace/cloud-pxc-operator_PR-1736/e2e-tests/users/compare/select-4.sql /tmp/tmp.XJynlYwy6y/select-4.sql + desc 'test proxyadmin' + set +o xtrace ----------------------------------------------------------------------------------- test proxyadmin ----------------------------------------------------------------------------------- + kubectl_bin patch pxc some-name --type=merge '-p={"spec":{"proxysql":{"size":3}}}' ++ mktemp + local LAST_OUT=/tmp/tmp.8WsTJSZRup ++ mktemp + local LAST_ERR=/tmp/tmp.eRbx5ckpY8 + local exit_status=0 ++ seq 0 2 + for i in '$(seq 0 2)' + set +e + kubectl patch pxc some-name --type=merge '-p={"spec":{"proxysql":{"size":3}}}' + exit_status=0 + set -e + '[' 0 '!=' 0 ']' + break + cat /tmp/tmp.8WsTJSZRup perconaxtradbcluster.pxc.percona.com/some-name patched + cat /tmp/tmp.eRbx5ckpY8 + rm /tmp/tmp.8WsTJSZRup /tmp/tmp.eRbx5ckpY8 + return 0 + sleep 15 + wait_cluster_consistency some-name 3 3 + local cluster_name=some-name + local cluster_size=3 + local proxy_size=3 + '[' -z 3 ']' + desc 'wait cluster consistency' + set +o xtrace ----------------------------------------------------------------------------------- wait cluster consistency ----------------------------------------------------------------------------------- + local i=0 + local max=36 + sleep 7 ++ kubectl_bin get pxc some-name -o 'jsonpath={.status.state}' +++ mktemp ++ local LAST_OUT=/tmp/tmp.jy2RdTHAY7 +++ mktemp ++ local LAST_ERR=/tmp/tmp.T7H66ebTT8 ++ local exit_status=0 +++ seq 0 2 ++ for i in '$(seq 0 2)' ++ set +e ++ kubectl get pxc some-name -o 'jsonpath={.status.state}' ++ exit_status=0 ++ set -e ++ '[' 0 '!=' 0 ']' ++ break ++ cat /tmp/tmp.jy2RdTHAY7 ++ cat /tmp/tmp.T7H66ebTT8 ++ rm /tmp/tmp.jy2RdTHAY7 /tmp/tmp.T7H66ebTT8 ++ return 0 + [[ ready == \r\e\a\d\y ]] ++ kubectl_bin get pxc some-name -o 'jsonpath={.status.pxc.ready}' +++ mktemp ++ local LAST_OUT=/tmp/tmp.nYP6XzLC6c +++ mktemp ++ local LAST_ERR=/tmp/tmp.SYI6dnyVm0 ++ local exit_status=0 +++ seq 0 2 ++ for i in '$(seq 0 2)' ++ set +e ++ kubectl get pxc some-name -o 'jsonpath={.status.pxc.ready}' ++ exit_status=0 ++ set -e ++ '[' 0 '!=' 0 ']' ++ break ++ cat /tmp/tmp.nYP6XzLC6c ++ cat /tmp/tmp.SYI6dnyVm0 ++ rm /tmp/tmp.nYP6XzLC6c /tmp/tmp.SYI6dnyVm0 ++ return 0 + [[ 3 == \3 ]] +++ get_proxy_engine some-name +++ local cluster_name=some-name ++++ get_proxy some-name ++++ local target_cluster=some-name +++++ kubectl_bin get pxc some-name -o 'jsonpath={.spec.haproxy.enabled}' ++++++ mktemp +++++ local LAST_OUT=/tmp/tmp.tjYRFOKT8a ++++++ mktemp +++++ local LAST_ERR=/tmp/tmp.SQMYtqofHx +++++ local exit_status=0 ++++++ seq 0 2 +++++ for i in '$(seq 0 2)' +++++ set +e +++++ kubectl get pxc some-name -o 'jsonpath={.spec.haproxy.enabled}' +++++ exit_status=0 +++++ set -e +++++ '[' 0 '!=' 0 ']' +++++ break +++++ cat /tmp/tmp.tjYRFOKT8a +++++ cat /tmp/tmp.SQMYtqofHx +++++ rm /tmp/tmp.tjYRFOKT8a /tmp/tmp.SQMYtqofHx +++++ return 0 ++++ [[ '' == \t\r\u\e ]] +++++ kubectl_bin get pxc some-name -o 'jsonpath={.spec.proxysql.enabled}' ++++++ mktemp +++++ local LAST_OUT=/tmp/tmp.6BcnBqe7dB ++++++ mktemp +++++ local LAST_ERR=/tmp/tmp.C0Jxta1BuE +++++ local exit_status=0 ++++++ seq 0 2 +++++ for i in '$(seq 0 2)' +++++ set +e +++++ kubectl get pxc some-name -o 'jsonpath={.spec.proxysql.enabled}' +++++ exit_status=0 +++++ set -e +++++ '[' 0 '!=' 0 ']' +++++ break +++++ cat /tmp/tmp.6BcnBqe7dB +++++ cat /tmp/tmp.C0Jxta1BuE +++++ rm /tmp/tmp.6BcnBqe7dB /tmp/tmp.C0Jxta1BuE +++++ return 0 ++++ [[ true == \t\r\u\e ]] ++++ echo some-name-proxysql ++++ return +++ local cluster_proxy=some-name-proxysql +++ echo proxysql ++ kubectl_bin get pxc some-name -o 'jsonpath={.status.proxysql.ready}' +++ mktemp ++ local LAST_OUT=/tmp/tmp.XazKSHJHne +++ mktemp ++ local LAST_ERR=/tmp/tmp.gJYNWbPZxM ++ local exit_status=0 +++ seq 0 2 ++ for i in '$(seq 0 2)' ++ set +e ++ kubectl get pxc some-name -o 'jsonpath={.status.proxysql.ready}' ++ exit_status=0 ++ set -e ++ '[' 0 '!=' 0 ']' ++ break ++ cat /tmp/tmp.XazKSHJHne ++ cat /tmp/tmp.gJYNWbPZxM ++ rm /tmp/tmp.XazKSHJHne /tmp/tmp.gJYNWbPZxM ++ return 0 + [[ 3 == \3 ]] + patch_secret my-cluster-secrets proxyadmin dGVzdC1wYXNzd29yZA== + local secret=my-cluster-secrets + local key=proxyadmin + local value=dGVzdC1wYXNzd29yZA== + kubectl_bin patch secret my-cluster-secrets '-p={"data":{"proxyadmin": "dGVzdC1wYXNzd29yZA=="}}' ++ mktemp + local LAST_OUT=/tmp/tmp.zEVxUEWwFl ++ mktemp + local LAST_ERR=/tmp/tmp.OR6oddXa7d + local exit_status=0 ++ seq 0 2 + for i in '$(seq 0 2)' + set +e + kubectl patch secret my-cluster-secrets '-p={"data":{"proxyadmin": "dGVzdC1wYXNzd29yZA=="}}' + exit_status=0 + set -e + '[' 0 '!=' 0 ']' + break + cat /tmp/tmp.zEVxUEWwFl secret/my-cluster-secrets patched + cat /tmp/tmp.OR6oddXa7d + rm /tmp/tmp.zEVxUEWwFl /tmp/tmp.OR6oddXa7d + return 0 + sleep 15 + wait_cluster_consistency some-name 3 3 + local cluster_name=some-name + local cluster_size=3 + local proxy_size=3 + '[' -z 3 ']' + desc 'wait cluster consistency' + set +o xtrace ----------------------------------------------------------------------------------- wait cluster consistency ----------------------------------------------------------------------------------- + local i=0 + local max=36 + sleep 7 ++ kubectl_bin get pxc some-name -o 'jsonpath={.status.state}' +++ mktemp ++ local LAST_OUT=/tmp/tmp.6GkQ9FVDTX +++ mktemp ++ local LAST_ERR=/tmp/tmp.ftfY1Dq8Py ++ local exit_status=0 +++ seq 0 2 ++ for i in '$(seq 0 2)' ++ set +e ++ kubectl get pxc some-name -o 'jsonpath={.status.state}' ++ exit_status=0 ++ set -e ++ '[' 0 '!=' 0 ']' ++ break ++ cat /tmp/tmp.6GkQ9FVDTX ++ cat /tmp/tmp.ftfY1Dq8Py ++ rm /tmp/tmp.6GkQ9FVDTX /tmp/tmp.ftfY1Dq8Py ++ return 0 + [[ initializing == \r\e\a\d\y ]] + echo 'waiting for cluster readyness' waiting for cluster readyness + sleep 20 + [[ 0 -ge 36 ]] + let i+=1 ++ kubectl_bin get pxc some-name -o 'jsonpath={.status.state}' +++ mktemp ++ local LAST_OUT=/tmp/tmp.6yQIK9gCYs +++ mktemp ++ local LAST_ERR=/tmp/tmp.wwiNqruQjR ++ local exit_status=0 +++ seq 0 2 ++ for i in '$(seq 0 2)' ++ set +e ++ kubectl get pxc some-name -o 'jsonpath={.status.state}' ++ exit_status=0 ++ set -e ++ '[' 0 '!=' 0 ']' ++ break ++ cat /tmp/tmp.6yQIK9gCYs ++ cat /tmp/tmp.wwiNqruQjR ++ rm /tmp/tmp.6yQIK9gCYs /tmp/tmp.wwiNqruQjR ++ return 0 + [[ ready == \r\e\a\d\y ]] ++ kubectl_bin get pxc some-name -o 'jsonpath={.status.pxc.ready}' +++ mktemp ++ local LAST_OUT=/tmp/tmp.MbBnVXGOiD +++ mktemp ++ local LAST_ERR=/tmp/tmp.nLeQpYiEPY ++ local exit_status=0 +++ seq 0 2 ++ for i in '$(seq 0 2)' ++ set +e ++ kubectl get pxc some-name -o 'jsonpath={.status.pxc.ready}' ++ exit_status=0 ++ set -e ++ '[' 0 '!=' 0 ']' ++ break ++ cat /tmp/tmp.MbBnVXGOiD ++ cat /tmp/tmp.nLeQpYiEPY ++ rm /tmp/tmp.MbBnVXGOiD /tmp/tmp.nLeQpYiEPY ++ return 0 + [[ 3 == \3 ]] +++ get_proxy_engine some-name +++ local cluster_name=some-name ++++ get_proxy some-name ++++ local target_cluster=some-name +++++ kubectl_bin get pxc some-name -o 'jsonpath={.spec.haproxy.enabled}' ++++++ mktemp +++++ local LAST_OUT=/tmp/tmp.VQlCBV3LbK ++++++ mktemp +++++ local LAST_ERR=/tmp/tmp.piWIk4HqYP +++++ local exit_status=0 ++++++ seq 0 2 +++++ for i in '$(seq 0 2)' +++++ set +e +++++ kubectl get pxc some-name -o 'jsonpath={.spec.haproxy.enabled}' +++++ exit_status=0 +++++ set -e +++++ '[' 0 '!=' 0 ']' +++++ break +++++ cat /tmp/tmp.VQlCBV3LbK +++++ cat /tmp/tmp.piWIk4HqYP +++++ rm /tmp/tmp.VQlCBV3LbK /tmp/tmp.piWIk4HqYP +++++ return 0 ++++ [[ '' == \t\r\u\e ]] +++++ kubectl_bin get pxc some-name -o 'jsonpath={.spec.proxysql.enabled}' ++++++ mktemp +++++ local LAST_OUT=/tmp/tmp.yNbA8lwDNr ++++++ mktemp +++++ local LAST_ERR=/tmp/tmp.OlrdUqKmDc +++++ local exit_status=0 ++++++ seq 0 2 +++++ for i in '$(seq 0 2)' +++++ set +e +++++ kubectl get pxc some-name -o 'jsonpath={.spec.proxysql.enabled}' +++++ exit_status=0 +++++ set -e +++++ '[' 0 '!=' 0 ']' +++++ break +++++ cat /tmp/tmp.yNbA8lwDNr +++++ cat /tmp/tmp.OlrdUqKmDc +++++ rm /tmp/tmp.yNbA8lwDNr /tmp/tmp.OlrdUqKmDc +++++ return 0 ++++ [[ true == \t\r\u\e ]] ++++ echo some-name-proxysql ++++ return +++ local cluster_proxy=some-name-proxysql +++ echo proxysql ++ kubectl_bin get pxc some-name -o 'jsonpath={.status.proxysql.ready}' +++ mktemp ++ local LAST_OUT=/tmp/tmp.WNdIk4NnB8 +++ mktemp ++ local LAST_ERR=/tmp/tmp.FGaH7JbuY5 ++ local exit_status=0 +++ seq 0 2 ++ for i in '$(seq 0 2)' ++ set +e ++ kubectl get pxc some-name -o 'jsonpath={.status.proxysql.ready}' ++ exit_status=0 ++ set -e ++ '[' 0 '!=' 0 ']' ++ break ++ cat /tmp/tmp.WNdIk4NnB8 ++ cat /tmp/tmp.FGaH7JbuY5 ++ rm /tmp/tmp.WNdIk4NnB8 /tmp/tmp.FGaH7JbuY5 ++ return 0 + [[ 3 == \3 ]] + compare_mysql_cmd_local select-2 'SHOW TABLES;' '-h127.0.0.1 -P6032 -uproxyadmin -p'\''test-password'\''' some-name-proxysql-0 '' proxysql + local command_id=select-2 + local 'command=SHOW TABLES;' + local 'uri=-h127.0.0.1 -P6032 -uproxyadmin -p'\''test-password'\''' + local pod=some-name-proxysql-0 + local postfix= + local container_name=proxysql + local expected_result=/mnt/jenkins/workspace/cloud-pxc-operator_PR-1736/e2e-tests/users/compare/select-2.sql + [[ perconalab/percona-xtradb-cluster-operator:main-pxc5.7 =~ 8\.0 ]] + run_mysql_local 'SHOW TABLES;' '-h127.0.0.1 -P6032 -uproxyadmin -p'\''test-password'\''' some-name-proxysql-0 proxysql + local 'command=SHOW TABLES;' + local 'uri=-h127.0.0.1 -P6032 -uproxyadmin -p'\''test-password'\''' + local pod=some-name-proxysql-0 + local container_name=proxysql + set +o xtrace + '[' '!' -s /tmp/tmp.XJynlYwy6y/select-2.sql ']' + diff -u /mnt/jenkins/workspace/cloud-pxc-operator_PR-1736/e2e-tests/users/compare/select-2.sql /tmp/tmp.XJynlYwy6y/select-2.sql + compare_mysql_cmd_local select-2 'SHOW TABLES;' '-h127.0.0.1 -P6032 -uproxyadmin -p'\''test-password'\''' some-name-proxysql-1 '' proxysql + local command_id=select-2 + local 'command=SHOW TABLES;' + local 'uri=-h127.0.0.1 -P6032 -uproxyadmin -p'\''test-password'\''' + local pod=some-name-proxysql-1 + local postfix= + local container_name=proxysql + local expected_result=/mnt/jenkins/workspace/cloud-pxc-operator_PR-1736/e2e-tests/users/compare/select-2.sql + [[ perconalab/percona-xtradb-cluster-operator:main-pxc5.7 =~ 8\.0 ]] + run_mysql_local 'SHOW TABLES;' '-h127.0.0.1 -P6032 -uproxyadmin -p'\''test-password'\''' some-name-proxysql-1 proxysql + local 'command=SHOW TABLES;' + local 'uri=-h127.0.0.1 -P6032 -uproxyadmin -p'\''test-password'\''' + local pod=some-name-proxysql-1 + local container_name=proxysql + set +o xtrace + '[' '!' -s /tmp/tmp.XJynlYwy6y/select-2.sql ']' + diff -u /mnt/jenkins/workspace/cloud-pxc-operator_PR-1736/e2e-tests/users/compare/select-2.sql /tmp/tmp.XJynlYwy6y/select-2.sql + compare_mysql_cmd_local select-2 'SHOW TABLES;' '-h127.0.0.1 -P6032 -uproxyadmin -p'\''test-password'\''' some-name-proxysql-2 '' proxysql + local command_id=select-2 + local 'command=SHOW TABLES;' + local 'uri=-h127.0.0.1 -P6032 -uproxyadmin -p'\''test-password'\''' + local pod=some-name-proxysql-2 + local postfix= + local container_name=proxysql + local expected_result=/mnt/jenkins/workspace/cloud-pxc-operator_PR-1736/e2e-tests/users/compare/select-2.sql + [[ perconalab/percona-xtradb-cluster-operator:main-pxc5.7 =~ 8\.0 ]] + run_mysql_local 'SHOW TABLES;' '-h127.0.0.1 -P6032 -uproxyadmin -p'\''test-password'\''' some-name-proxysql-2 proxysql + local 'command=SHOW TABLES;' + local 'uri=-h127.0.0.1 -P6032 -uproxyadmin -p'\''test-password'\''' + local pod=some-name-proxysql-2 + local container_name=proxysql + set +o xtrace + '[' '!' -s /tmp/tmp.XJynlYwy6y/select-2.sql ']' + diff -u /mnt/jenkins/workspace/cloud-pxc-operator_PR-1736/e2e-tests/users/compare/select-2.sql /tmp/tmp.XJynlYwy6y/select-2.sql + desc 'test xtrabackup' + set +o xtrace ----------------------------------------------------------------------------------- test xtrabackup ----------------------------------------------------------------------------------- + kubectl_bin patch pxc some-name --type=merge '-p={"spec":{"proxysql":{"size":2}}}' ++ mktemp + local LAST_OUT=/tmp/tmp.SXsLta4HBf ++ mktemp + local LAST_ERR=/tmp/tmp.p8G45JM6Bx + local exit_status=0 ++ seq 0 2 + for i in '$(seq 0 2)' + set +e + kubectl patch pxc some-name --type=merge '-p={"spec":{"proxysql":{"size":2}}}' + exit_status=0 + set -e + '[' 0 '!=' 0 ']' + break + cat /tmp/tmp.SXsLta4HBf perconaxtradbcluster.pxc.percona.com/some-name patched + cat /tmp/tmp.p8G45JM6Bx + rm /tmp/tmp.SXsLta4HBf /tmp/tmp.p8G45JM6Bx + return 0 + patch_secret my-cluster-secrets xtrabackup dGVzdC1wYXNzd29yZA== + local secret=my-cluster-secrets + local key=xtrabackup + local value=dGVzdC1wYXNzd29yZA== + kubectl_bin patch secret my-cluster-secrets '-p={"data":{"xtrabackup": "dGVzdC1wYXNzd29yZA=="}}' ++ mktemp + local LAST_OUT=/tmp/tmp.2omemyslN0 ++ mktemp + local LAST_ERR=/tmp/tmp.xUfsR2NPla + local exit_status=0 ++ seq 0 2 + for i in '$(seq 0 2)' + set +e + kubectl patch secret my-cluster-secrets '-p={"data":{"xtrabackup": "dGVzdC1wYXNzd29yZA=="}}' + exit_status=0 + set -e + '[' 0 '!=' 0 ']' + break + cat /tmp/tmp.2omemyslN0 secret/my-cluster-secrets patched + cat /tmp/tmp.xUfsR2NPla + rm /tmp/tmp.2omemyslN0 /tmp/tmp.xUfsR2NPla + return 0 + sleep 15 + wait_cluster_consistency some-name 3 2 + local cluster_name=some-name + local cluster_size=3 + local proxy_size=2 + '[' -z 2 ']' + desc 'wait cluster consistency' + set +o xtrace ----------------------------------------------------------------------------------- wait cluster consistency ----------------------------------------------------------------------------------- + local i=0 + local max=36 + sleep 7 ++ kubectl_bin get pxc some-name -o 'jsonpath={.status.state}' +++ mktemp ++ local LAST_OUT=/tmp/tmp.hOHhVGGl8c +++ mktemp ++ local LAST_ERR=/tmp/tmp.mwLnWw4W8e ++ local exit_status=0 +++ seq 0 2 ++ for i in '$(seq 0 2)' ++ set +e ++ kubectl get pxc some-name -o 'jsonpath={.status.state}' ++ exit_status=0 ++ set -e ++ '[' 0 '!=' 0 ']' ++ break ++ cat /tmp/tmp.hOHhVGGl8c ++ cat /tmp/tmp.mwLnWw4W8e ++ rm /tmp/tmp.hOHhVGGl8c /tmp/tmp.mwLnWw4W8e ++ return 0 + [[ initializing == \r\e\a\d\y ]] + echo 'waiting for cluster readyness' waiting for cluster readyness + sleep 20 + [[ 0 -ge 36 ]] + let i+=1 ++ kubectl_bin get pxc some-name -o 'jsonpath={.status.state}' +++ mktemp ++ local LAST_OUT=/tmp/tmp.znrX9nY6tz +++ mktemp ++ local LAST_ERR=/tmp/tmp.6QEVFXuO2F ++ local exit_status=0 +++ seq 0 2 ++ for i in '$(seq 0 2)' ++ set +e ++ kubectl get pxc some-name -o 'jsonpath={.status.state}' ++ exit_status=0 ++ set -e ++ '[' 0 '!=' 0 ']' ++ break ++ cat /tmp/tmp.znrX9nY6tz ++ cat /tmp/tmp.6QEVFXuO2F ++ rm /tmp/tmp.znrX9nY6tz /tmp/tmp.6QEVFXuO2F ++ return 0 + [[ initializing == \r\e\a\d\y ]] + echo 'waiting for cluster readyness' waiting for cluster readyness + sleep 20 + [[ 1 -ge 36 ]] + let i+=1 ++ kubectl_bin get pxc some-name -o 'jsonpath={.status.state}' +++ mktemp ++ local LAST_OUT=/tmp/tmp.oG1W6IEAG0 +++ mktemp ++ local LAST_ERR=/tmp/tmp.AI5cKVTW4v ++ local exit_status=0 +++ seq 0 2 ++ for i in '$(seq 0 2)' ++ set +e ++ kubectl get pxc some-name -o 'jsonpath={.status.state}' ++ exit_status=0 ++ set -e ++ '[' 0 '!=' 0 ']' ++ break ++ cat /tmp/tmp.oG1W6IEAG0 ++ cat /tmp/tmp.AI5cKVTW4v ++ rm /tmp/tmp.oG1W6IEAG0 /tmp/tmp.AI5cKVTW4v ++ return 0 + [[ initializing == \r\e\a\d\y ]] + echo 'waiting for cluster readyness' waiting for cluster readyness + sleep 20 + [[ 2 -ge 36 ]] + let i+=1 ++ kubectl_bin get pxc some-name -o 'jsonpath={.status.state}' +++ mktemp ++ local LAST_OUT=/tmp/tmp.QUO6hDRzuo +++ mktemp ++ local LAST_ERR=/tmp/tmp.8qVmrUKHPD ++ local exit_status=0 +++ seq 0 2 ++ for i in '$(seq 0 2)' ++ set +e ++ kubectl get pxc some-name -o 'jsonpath={.status.state}' ++ exit_status=0 ++ set -e ++ '[' 0 '!=' 0 ']' ++ break ++ cat /tmp/tmp.QUO6hDRzuo ++ cat /tmp/tmp.8qVmrUKHPD ++ rm /tmp/tmp.QUO6hDRzuo /tmp/tmp.8qVmrUKHPD ++ return 0 + [[ initializing == \r\e\a\d\y ]] + echo 'waiting for cluster readyness' waiting for cluster readyness + sleep 20 + [[ 3 -ge 36 ]] + let i+=1 ++ kubectl_bin get pxc some-name -o 'jsonpath={.status.state}' +++ mktemp ++ local LAST_OUT=/tmp/tmp.WkxcXQvWEf +++ mktemp ++ local LAST_ERR=/tmp/tmp.oQLXmsbCqc ++ local exit_status=0 +++ seq 0 2 ++ for i in '$(seq 0 2)' ++ set +e ++ kubectl get pxc some-name -o 'jsonpath={.status.state}' ++ exit_status=0 ++ set -e ++ '[' 0 '!=' 0 ']' ++ break ++ cat /tmp/tmp.WkxcXQvWEf ++ cat /tmp/tmp.oQLXmsbCqc ++ rm /tmp/tmp.WkxcXQvWEf /tmp/tmp.oQLXmsbCqc ++ return 0 + [[ initializing == \r\e\a\d\y ]] + echo 'waiting for cluster readyness' waiting for cluster readyness + sleep 20 + [[ 4 -ge 36 ]] + let i+=1 ++ kubectl_bin get pxc some-name -o 'jsonpath={.status.state}' +++ mktemp ++ local LAST_OUT=/tmp/tmp.3UtGjdGyUK +++ mktemp ++ local LAST_ERR=/tmp/tmp.1IXEejmdtu ++ local exit_status=0 +++ seq 0 2 ++ for i in '$(seq 0 2)' ++ set +e ++ kubectl get pxc some-name -o 'jsonpath={.status.state}' ++ exit_status=0 ++ set -e ++ '[' 0 '!=' 0 ']' ++ break ++ cat /tmp/tmp.3UtGjdGyUK ++ cat /tmp/tmp.1IXEejmdtu ++ rm /tmp/tmp.3UtGjdGyUK /tmp/tmp.1IXEejmdtu ++ return 0 + [[ initializing == \r\e\a\d\y ]] + echo 'waiting for cluster readyness' waiting for cluster readyness + sleep 20 + [[ 5 -ge 36 ]] + let i+=1 ++ kubectl_bin get pxc some-name -o 'jsonpath={.status.state}' +++ mktemp ++ local LAST_OUT=/tmp/tmp.CLfIttparq +++ mktemp ++ local LAST_ERR=/tmp/tmp.KvCU8LOmLn ++ local exit_status=0 +++ seq 0 2 ++ for i in '$(seq 0 2)' ++ set +e ++ kubectl get pxc some-name -o 'jsonpath={.status.state}' ++ exit_status=0 ++ set -e ++ '[' 0 '!=' 0 ']' ++ break ++ cat /tmp/tmp.CLfIttparq ++ cat /tmp/tmp.KvCU8LOmLn ++ rm /tmp/tmp.CLfIttparq /tmp/tmp.KvCU8LOmLn ++ return 0 + [[ initializing == \r\e\a\d\y ]] + echo 'waiting for cluster readyness' waiting for cluster readyness + sleep 20 + [[ 6 -ge 36 ]] + let i+=1 ++ kubectl_bin get pxc some-name -o 'jsonpath={.status.state}' +++ mktemp ++ local LAST_OUT=/tmp/tmp.3rpjHerezy +++ mktemp ++ local LAST_ERR=/tmp/tmp.eyPCNnN40D ++ local exit_status=0 +++ seq 0 2 ++ for i in '$(seq 0 2)' ++ set +e ++ kubectl get pxc some-name -o 'jsonpath={.status.state}' ++ exit_status=0 ++ set -e ++ '[' 0 '!=' 0 ']' ++ break ++ cat /tmp/tmp.3rpjHerezy ++ cat /tmp/tmp.eyPCNnN40D ++ rm /tmp/tmp.3rpjHerezy /tmp/tmp.eyPCNnN40D ++ return 0 + [[ ready == \r\e\a\d\y ]] ++ kubectl_bin get pxc some-name -o 'jsonpath={.status.pxc.ready}' +++ mktemp ++ local LAST_OUT=/tmp/tmp.SoM7d5My5W +++ mktemp ++ local LAST_ERR=/tmp/tmp.n0td84QSUP ++ local exit_status=0 +++ seq 0 2 ++ for i in '$(seq 0 2)' ++ set +e ++ kubectl get pxc some-name -o 'jsonpath={.status.pxc.ready}' ++ exit_status=0 ++ set -e ++ '[' 0 '!=' 0 ']' ++ break ++ cat /tmp/tmp.SoM7d5My5W ++ cat /tmp/tmp.n0td84QSUP ++ rm /tmp/tmp.SoM7d5My5W /tmp/tmp.n0td84QSUP ++ return 0 + [[ 3 == \3 ]] +++ get_proxy_engine some-name +++ local cluster_name=some-name ++++ get_proxy some-name ++++ local target_cluster=some-name +++++ kubectl_bin get pxc some-name -o 'jsonpath={.spec.haproxy.enabled}' ++++++ mktemp +++++ local LAST_OUT=/tmp/tmp.LV36Sdf7hJ ++++++ mktemp +++++ local LAST_ERR=/tmp/tmp.omkkzDPgxl +++++ local exit_status=0 ++++++ seq 0 2 +++++ for i in '$(seq 0 2)' +++++ set +e +++++ kubectl get pxc some-name -o 'jsonpath={.spec.haproxy.enabled}' +++++ exit_status=0 +++++ set -e +++++ '[' 0 '!=' 0 ']' +++++ break +++++ cat /tmp/tmp.LV36Sdf7hJ +++++ cat /tmp/tmp.omkkzDPgxl +++++ rm /tmp/tmp.LV36Sdf7hJ /tmp/tmp.omkkzDPgxl +++++ return 0 ++++ [[ '' == \t\r\u\e ]] +++++ kubectl_bin get pxc some-name -o 'jsonpath={.spec.proxysql.enabled}' ++++++ mktemp +++++ local LAST_OUT=/tmp/tmp.UEn04b3Q0x ++++++ mktemp +++++ local LAST_ERR=/tmp/tmp.ex9mHuPfbO +++++ local exit_status=0 ++++++ seq 0 2 +++++ for i in '$(seq 0 2)' +++++ set +e +++++ kubectl get pxc some-name -o 'jsonpath={.spec.proxysql.enabled}' +++++ exit_status=0 +++++ set -e +++++ '[' 0 '!=' 0 ']' +++++ break +++++ cat /tmp/tmp.UEn04b3Q0x +++++ cat /tmp/tmp.ex9mHuPfbO +++++ rm /tmp/tmp.UEn04b3Q0x /tmp/tmp.ex9mHuPfbO +++++ return 0 ++++ [[ true == \t\r\u\e ]] ++++ echo some-name-proxysql ++++ return +++ local cluster_proxy=some-name-proxysql +++ echo proxysql ++ kubectl_bin get pxc some-name -o 'jsonpath={.status.proxysql.ready}' +++ mktemp ++ local LAST_OUT=/tmp/tmp.O3fEt4DwE1 +++ mktemp ++ local LAST_ERR=/tmp/tmp.NGuVf7lciQ ++ local exit_status=0 +++ seq 0 2 ++ for i in '$(seq 0 2)' ++ set +e ++ kubectl get pxc some-name -o 'jsonpath={.status.proxysql.ready}' ++ exit_status=0 ++ set -e ++ '[' 0 '!=' 0 ']' ++ break ++ cat /tmp/tmp.O3fEt4DwE1 ++ cat /tmp/tmp.NGuVf7lciQ ++ rm /tmp/tmp.O3fEt4DwE1 /tmp/tmp.NGuVf7lciQ ++ return 0 + [[ 2 == \2 ]] + compare_mysql_cmd_local select-3 'SHOW DATABASES;' '-h 127.0.0.1 -uxtrabackup -p'\''test-password'\''' some-name-pxc-0 '' pxc + local command_id=select-3 + local 'command=SHOW DATABASES;' + local 'uri=-h 127.0.0.1 -uxtrabackup -p'\''test-password'\''' + local pod=some-name-pxc-0 + local postfix= + local container_name=pxc + local expected_result=/mnt/jenkins/workspace/cloud-pxc-operator_PR-1736/e2e-tests/users/compare/select-3.sql + [[ perconalab/percona-xtradb-cluster-operator:main-pxc5.7 =~ 8\.0 ]] + run_mysql_local 'SHOW DATABASES;' '-h 127.0.0.1 -uxtrabackup -p'\''test-password'\''' some-name-pxc-0 pxc + local 'command=SHOW DATABASES;' + local 'uri=-h 127.0.0.1 -uxtrabackup -p'\''test-password'\''' + local pod=some-name-pxc-0 + local container_name=pxc + set +o xtrace + '[' '!' -s /tmp/tmp.XJynlYwy6y/select-3.sql ']' + diff -u /mnt/jenkins/workspace/cloud-pxc-operator_PR-1736/e2e-tests/users/compare/select-3.sql /tmp/tmp.XJynlYwy6y/select-3.sql + desc 'test monitor' + set +o xtrace ----------------------------------------------------------------------------------- test monitor ----------------------------------------------------------------------------------- + patch_secret my-cluster-secrets monitor dGVzdC1wYXNzd29yZA== + local secret=my-cluster-secrets + local key=monitor + local value=dGVzdC1wYXNzd29yZA== + kubectl_bin patch secret my-cluster-secrets '-p={"data":{"monitor": "dGVzdC1wYXNzd29yZA=="}}' ++ mktemp + local LAST_OUT=/tmp/tmp.7t9GP2oe1r ++ mktemp + local LAST_ERR=/tmp/tmp.vHlnEaCUTC + local exit_status=0 ++ seq 0 2 + for i in '$(seq 0 2)' + set +e + kubectl patch secret my-cluster-secrets '-p={"data":{"monitor": "dGVzdC1wYXNzd29yZA=="}}' + exit_status=0 + set -e + '[' 0 '!=' 0 ']' + break + cat /tmp/tmp.7t9GP2oe1r secret/my-cluster-secrets patched + cat /tmp/tmp.vHlnEaCUTC + rm /tmp/tmp.7t9GP2oe1r /tmp/tmp.vHlnEaCUTC + return 0 + wait_for_password_propagation my-cluster-secrets monitor + local secret=my-cluster-secrets + local user=monitor + local max_retry=240 ++ getSecretData my-cluster-secrets root ++ local secretName=my-cluster-secrets ++ local dataKey=root ++ kubectl_bin get secrets/my-cluster-secrets '--template={{.data.root}}' ++ base64 --decode +++ mktemp ++ local LAST_OUT=/tmp/tmp.zoMON6WixH +++ mktemp ++ local LAST_ERR=/tmp/tmp.kADrSmdBKm ++ local exit_status=0 +++ seq 0 2 ++ for i in '$(seq 0 2)' ++ set +e ++ kubectl get secrets/my-cluster-secrets '--template={{.data.root}}' ++ exit_status=0 ++ set -e ++ '[' 0 '!=' 0 ']' ++ break ++ cat /tmp/tmp.zoMON6WixH ++ cat /tmp/tmp.kADrSmdBKm ++ rm /tmp/tmp.zoMON6WixH /tmp/tmp.kADrSmdBKm ++ return 0 + local root_pass=test-password + [[ perconalab/percona-xtradb-cluster-operator:main-pxc5.7 =~ 5\.7 ]] + echo 'Skipping dual password feature doesn'\''t work for 5.7. PXC 5.7 doesn'\''t support it!' Skipping dual password feature doesn't work for 5.7. PXC 5.7 doesn't support it! + return + wait_cluster_consistency some-name 3 2 + local cluster_name=some-name + local cluster_size=3 + local proxy_size=2 + '[' -z 2 ']' + desc 'wait cluster consistency' + set +o xtrace ----------------------------------------------------------------------------------- wait cluster consistency ----------------------------------------------------------------------------------- + local i=0 + local max=36 + sleep 7 ++ kubectl_bin get pxc some-name -o 'jsonpath={.status.state}' +++ mktemp ++ local LAST_OUT=/tmp/tmp.6jYZpiv2ti +++ mktemp ++ local LAST_ERR=/tmp/tmp.GihBevSQvg ++ local exit_status=0 +++ seq 0 2 ++ for i in '$(seq 0 2)' ++ set +e ++ kubectl get pxc some-name -o 'jsonpath={.status.state}' ++ exit_status=0 ++ set -e ++ '[' 0 '!=' 0 ']' ++ break ++ cat /tmp/tmp.6jYZpiv2ti ++ cat /tmp/tmp.GihBevSQvg ++ rm /tmp/tmp.6jYZpiv2ti /tmp/tmp.GihBevSQvg ++ return 0 + [[ initializing == \r\e\a\d\y ]] + echo 'waiting for cluster readyness' waiting for cluster readyness + sleep 20 + [[ 0 -ge 36 ]] + let i+=1 ++ kubectl_bin get pxc some-name -o 'jsonpath={.status.state}' +++ mktemp ++ local LAST_OUT=/tmp/tmp.IP1c8jeaaA +++ mktemp ++ local LAST_ERR=/tmp/tmp.O1swWVLyZM ++ local exit_status=0 +++ seq 0 2 ++ for i in '$(seq 0 2)' ++ set +e ++ kubectl get pxc some-name -o 'jsonpath={.status.state}' ++ exit_status=0 ++ set -e ++ '[' 0 '!=' 0 ']' ++ break ++ cat /tmp/tmp.IP1c8jeaaA ++ cat /tmp/tmp.O1swWVLyZM ++ rm /tmp/tmp.IP1c8jeaaA /tmp/tmp.O1swWVLyZM ++ return 0 + [[ initializing == \r\e\a\d\y ]] + echo 'waiting for cluster readyness' waiting for cluster readyness + sleep 20 + [[ 1 -ge 36 ]] + let i+=1 ++ kubectl_bin get pxc some-name -o 'jsonpath={.status.state}' +++ mktemp ++ local LAST_OUT=/tmp/tmp.AVMUulyvhb +++ mktemp ++ local LAST_ERR=/tmp/tmp.ReCBsYXOMA ++ local exit_status=0 +++ seq 0 2 ++ for i in '$(seq 0 2)' ++ set +e ++ kubectl get pxc some-name -o 'jsonpath={.status.state}' ++ exit_status=0 ++ set -e ++ '[' 0 '!=' 0 ']' ++ break ++ cat /tmp/tmp.AVMUulyvhb ++ cat /tmp/tmp.ReCBsYXOMA ++ rm /tmp/tmp.AVMUulyvhb /tmp/tmp.ReCBsYXOMA ++ return 0 + [[ initializing == \r\e\a\d\y ]] + echo 'waiting for cluster readyness' waiting for cluster readyness + sleep 20 + [[ 2 -ge 36 ]] + let i+=1 ++ kubectl_bin get pxc some-name -o 'jsonpath={.status.state}' +++ mktemp ++ local LAST_OUT=/tmp/tmp.oJzlBL75V7 +++ mktemp ++ local LAST_ERR=/tmp/tmp.Rz3mzmpQY3 ++ local exit_status=0 +++ seq 0 2 ++ for i in '$(seq 0 2)' ++ set +e ++ kubectl get pxc some-name -o 'jsonpath={.status.state}' ++ exit_status=0 ++ set -e ++ '[' 0 '!=' 0 ']' ++ break ++ cat /tmp/tmp.oJzlBL75V7 ++ cat /tmp/tmp.Rz3mzmpQY3 ++ rm /tmp/tmp.oJzlBL75V7 /tmp/tmp.Rz3mzmpQY3 ++ return 0 + [[ ready == \r\e\a\d\y ]] ++ kubectl_bin get pxc some-name -o 'jsonpath={.status.pxc.ready}' +++ mktemp ++ local LAST_OUT=/tmp/tmp.Epp4sOCVKk +++ mktemp ++ local LAST_ERR=/tmp/tmp.Jknt7n8MoB ++ local exit_status=0 +++ seq 0 2 ++ for i in '$(seq 0 2)' ++ set +e ++ kubectl get pxc some-name -o 'jsonpath={.status.pxc.ready}' ++ exit_status=0 ++ set -e ++ '[' 0 '!=' 0 ']' ++ break ++ cat /tmp/tmp.Epp4sOCVKk ++ cat /tmp/tmp.Jknt7n8MoB ++ rm /tmp/tmp.Epp4sOCVKk /tmp/tmp.Jknt7n8MoB ++ return 0 + [[ 3 == \3 ]] +++ get_proxy_engine some-name +++ local cluster_name=some-name ++++ get_proxy some-name ++++ local target_cluster=some-name +++++ kubectl_bin get pxc some-name -o 'jsonpath={.spec.haproxy.enabled}' ++++++ mktemp +++++ local LAST_OUT=/tmp/tmp.KD56woG0Ay ++++++ mktemp +++++ local LAST_ERR=/tmp/tmp.BsC1XpuQnd +++++ local exit_status=0 ++++++ seq 0 2 +++++ for i in '$(seq 0 2)' +++++ set +e +++++ kubectl get pxc some-name -o 'jsonpath={.spec.haproxy.enabled}' +++++ exit_status=0 +++++ set -e +++++ '[' 0 '!=' 0 ']' +++++ break +++++ cat /tmp/tmp.KD56woG0Ay +++++ cat /tmp/tmp.BsC1XpuQnd +++++ rm /tmp/tmp.KD56woG0Ay /tmp/tmp.BsC1XpuQnd +++++ return 0 ++++ [[ '' == \t\r\u\e ]] +++++ kubectl_bin get pxc some-name -o 'jsonpath={.spec.proxysql.enabled}' ++++++ mktemp +++++ local LAST_OUT=/tmp/tmp.lsoyYCyBuM ++++++ mktemp +++++ local LAST_ERR=/tmp/tmp.7UW4m9LdiH +++++ local exit_status=0 ++++++ seq 0 2 +++++ for i in '$(seq 0 2)' +++++ set +e +++++ kubectl get pxc some-name -o 'jsonpath={.spec.proxysql.enabled}' +++++ exit_status=0 +++++ set -e +++++ '[' 0 '!=' 0 ']' +++++ break +++++ cat /tmp/tmp.lsoyYCyBuM +++++ cat /tmp/tmp.7UW4m9LdiH +++++ rm /tmp/tmp.lsoyYCyBuM /tmp/tmp.7UW4m9LdiH +++++ return 0 ++++ [[ true == \t\r\u\e ]] ++++ echo some-name-proxysql ++++ return +++ local cluster_proxy=some-name-proxysql +++ echo proxysql ++ kubectl_bin get pxc some-name -o 'jsonpath={.status.proxysql.ready}' +++ mktemp ++ local LAST_OUT=/tmp/tmp.2d2pm7bEa7 +++ mktemp ++ local LAST_ERR=/tmp/tmp.fCcCwlFQ9G ++ local exit_status=0 +++ seq 0 2 ++ for i in '$(seq 0 2)' ++ set +e ++ kubectl get pxc some-name -o 'jsonpath={.status.proxysql.ready}' ++ exit_status=0 ++ set -e ++ '[' 0 '!=' 0 ']' ++ break ++ cat /tmp/tmp.2d2pm7bEa7 ++ cat /tmp/tmp.fCcCwlFQ9G ++ rm /tmp/tmp.2d2pm7bEa7 /tmp/tmp.fCcCwlFQ9G ++ return 0 + [[ 2 == \2 ]] + sleep 10 + compare_mysql_cmd select-4 'SHOW TABLES;' '-h some-name-proxysql -umonitor -p'\''test-password'\''' + local command_id=select-4 + local 'command=SHOW TABLES;' + local 'uri=-h some-name-proxysql -umonitor -p'\''test-password'\''' + local postfix= + local expected_result=/mnt/jenkins/workspace/cloud-pxc-operator_PR-1736/e2e-tests/users/compare/select-4.sql + [[ perconalab/percona-xtradb-cluster-operator:main-pxc5.7 =~ 8\.0 ]] + run_mysql 'SHOW TABLES;' '-h some-name-proxysql -umonitor -p'\''test-password'\''' + local 'command=SHOW TABLES;' + local 'uri=-h some-name-proxysql -umonitor -p'\''test-password'\''' ++ get_client_pod ++ kubectl_bin get pods --selector=name=pxc-client -o 'jsonpath={.items[].metadata.name}' +++ mktemp ++ local LAST_OUT=/tmp/tmp.v6aqZxBNiP +++ mktemp ++ local LAST_ERR=/tmp/tmp.ZCeFdVcEY6 ++ local exit_status=0 +++ seq 0 2 ++ for i in '$(seq 0 2)' ++ set +e ++ kubectl get pods --selector=name=pxc-client -o 'jsonpath={.items[].metadata.name}' ++ exit_status=0 ++ set -e ++ '[' 0 '!=' 0 ']' ++ break ++ cat /tmp/tmp.v6aqZxBNiP ++ cat /tmp/tmp.ZCeFdVcEY6 ++ rm /tmp/tmp.v6aqZxBNiP /tmp/tmp.ZCeFdVcEY6 ++ return 0 + client_pod=pxc-client-64b479df95-s59c5 + wait_pod pxc-client-64b479df95-s59c5 + local pod=pxc-client-64b479df95-s59c5 + local max_retry=480 + local ns= ++ echo pxc-client-64b479df95-s59c5 ++ /usr/bin/sed -E 's/.*-(pxc|proxysql)-[0-9]/\1/' ++ egrep '^(pxc|proxysql)$' + local container= + set +o xtrace pod/pxc-client-64b479df95-s59c5 condition met pxc-client-64b479df95-s59c5.Ok + set +o xtrace + '[' '!' -s /tmp/tmp.XJynlYwy6y/select-4.sql ']' + diff -u /mnt/jenkins/workspace/cloud-pxc-operator_PR-1736/e2e-tests/users/compare/select-4.sql /tmp/tmp.XJynlYwy6y/select-4.sql + desc 'test operator' + set +o xtrace ----------------------------------------------------------------------------------- test operator ----------------------------------------------------------------------------------- + patch_secret my-cluster-secrets operator dGVzdC1wYXNzd29yZA== + local secret=my-cluster-secrets + local key=operator + local value=dGVzdC1wYXNzd29yZA== + kubectl_bin patch secret my-cluster-secrets '-p={"data":{"operator": "dGVzdC1wYXNzd29yZA=="}}' ++ mktemp + local LAST_OUT=/tmp/tmp.Y9tzHGXqUB ++ mktemp + local LAST_ERR=/tmp/tmp.Yf083w3phE + local exit_status=0 ++ seq 0 2 + for i in '$(seq 0 2)' + set +e + kubectl patch secret my-cluster-secrets '-p={"data":{"operator": "dGVzdC1wYXNzd29yZA=="}}' + exit_status=0 + set -e + '[' 0 '!=' 0 ']' + break + cat /tmp/tmp.Y9tzHGXqUB secret/my-cluster-secrets patched + cat /tmp/tmp.Yf083w3phE + rm /tmp/tmp.Y9tzHGXqUB /tmp/tmp.Yf083w3phE + return 0 + sleep 15 + wait_cluster_consistency some-name 3 2 + local cluster_name=some-name + local cluster_size=3 + local proxy_size=2 + '[' -z 2 ']' + desc 'wait cluster consistency' + set +o xtrace ----------------------------------------------------------------------------------- wait cluster consistency ----------------------------------------------------------------------------------- + local i=0 + local max=36 + sleep 7 ++ kubectl_bin get pxc some-name -o 'jsonpath={.status.state}' +++ mktemp ++ local LAST_OUT=/tmp/tmp.0mg96iSdZ5 +++ mktemp ++ local LAST_ERR=/tmp/tmp.uPEVjE9JSX ++ local exit_status=0 +++ seq 0 2 ++ for i in '$(seq 0 2)' ++ set +e ++ kubectl get pxc some-name -o 'jsonpath={.status.state}' ++ exit_status=0 ++ set -e ++ '[' 0 '!=' 0 ']' ++ break ++ cat /tmp/tmp.0mg96iSdZ5 ++ cat /tmp/tmp.uPEVjE9JSX ++ rm /tmp/tmp.0mg96iSdZ5 /tmp/tmp.uPEVjE9JSX ++ return 0 + [[ initializing == \r\e\a\d\y ]] + echo 'waiting for cluster readyness' waiting for cluster readyness + sleep 20 + [[ 0 -ge 36 ]] + let i+=1 ++ kubectl_bin get pxc some-name -o 'jsonpath={.status.state}' +++ mktemp ++ local LAST_OUT=/tmp/tmp.SDl1PklMSe +++ mktemp ++ local LAST_ERR=/tmp/tmp.yTe8eVMjhZ ++ local exit_status=0 +++ seq 0 2 ++ for i in '$(seq 0 2)' ++ set +e ++ kubectl get pxc some-name -o 'jsonpath={.status.state}' ++ exit_status=0 ++ set -e ++ '[' 0 '!=' 0 ']' ++ break ++ cat /tmp/tmp.SDl1PklMSe ++ cat /tmp/tmp.yTe8eVMjhZ ++ rm /tmp/tmp.SDl1PklMSe /tmp/tmp.yTe8eVMjhZ ++ return 0 + [[ ready == \r\e\a\d\y ]] ++ kubectl_bin get pxc some-name -o 'jsonpath={.status.pxc.ready}' +++ mktemp ++ local LAST_OUT=/tmp/tmp.X3a0hkxpBU +++ mktemp ++ local LAST_ERR=/tmp/tmp.GsYidIwIDv ++ local exit_status=0 +++ seq 0 2 ++ for i in '$(seq 0 2)' ++ set +e ++ kubectl get pxc some-name -o 'jsonpath={.status.pxc.ready}' ++ exit_status=0 ++ set -e ++ '[' 0 '!=' 0 ']' ++ break ++ cat /tmp/tmp.X3a0hkxpBU ++ cat /tmp/tmp.GsYidIwIDv ++ rm /tmp/tmp.X3a0hkxpBU /tmp/tmp.GsYidIwIDv ++ return 0 + [[ 3 == \3 ]] +++ get_proxy_engine some-name +++ local cluster_name=some-name ++++ get_proxy some-name ++++ local target_cluster=some-name +++++ kubectl_bin get pxc some-name -o 'jsonpath={.spec.haproxy.enabled}' ++++++ mktemp +++++ local LAST_OUT=/tmp/tmp.eiiHWfRI5v ++++++ mktemp +++++ local LAST_ERR=/tmp/tmp.blUns7OSCy +++++ local exit_status=0 ++++++ seq 0 2 +++++ for i in '$(seq 0 2)' +++++ set +e +++++ kubectl get pxc some-name -o 'jsonpath={.spec.haproxy.enabled}' +++++ exit_status=0 +++++ set -e +++++ '[' 0 '!=' 0 ']' +++++ break +++++ cat /tmp/tmp.eiiHWfRI5v +++++ cat /tmp/tmp.blUns7OSCy +++++ rm /tmp/tmp.eiiHWfRI5v /tmp/tmp.blUns7OSCy +++++ return 0 ++++ [[ '' == \t\r\u\e ]] +++++ kubectl_bin get pxc some-name -o 'jsonpath={.spec.proxysql.enabled}' ++++++ mktemp +++++ local LAST_OUT=/tmp/tmp.QJI9toMLYA ++++++ mktemp +++++ local LAST_ERR=/tmp/tmp.FwkBdljsoT +++++ local exit_status=0 ++++++ seq 0 2 +++++ for i in '$(seq 0 2)' +++++ set +e +++++ kubectl get pxc some-name -o 'jsonpath={.spec.proxysql.enabled}' +++++ exit_status=0 +++++ set -e +++++ '[' 0 '!=' 0 ']' +++++ break +++++ cat /tmp/tmp.QJI9toMLYA +++++ cat /tmp/tmp.FwkBdljsoT +++++ rm /tmp/tmp.QJI9toMLYA /tmp/tmp.FwkBdljsoT +++++ return 0 ++++ [[ true == \t\r\u\e ]] ++++ echo some-name-proxysql ++++ return +++ local cluster_proxy=some-name-proxysql +++ echo proxysql ++ kubectl_bin get pxc some-name -o 'jsonpath={.status.proxysql.ready}' +++ mktemp ++ local LAST_OUT=/tmp/tmp.ih7qaKvqMb +++ mktemp ++ local LAST_ERR=/tmp/tmp.nx96F07QeC ++ local exit_status=0 +++ seq 0 2 ++ for i in '$(seq 0 2)' ++ set +e ++ kubectl get pxc some-name -o 'jsonpath={.status.proxysql.ready}' ++ exit_status=0 ++ set -e ++ '[' 0 '!=' 0 ']' ++ break ++ cat /tmp/tmp.ih7qaKvqMb ++ cat /tmp/tmp.nx96F07QeC ++ rm /tmp/tmp.ih7qaKvqMb /tmp/tmp.nx96F07QeC ++ return 0 + [[ 2 == \2 ]] + sleep 10 + compare_mysql_cmd select-4 'SHOW TABLES;' '-h some-name-proxysql -uoperator -p'\''test-password'\''' + local command_id=select-4 + local 'command=SHOW TABLES;' + local 'uri=-h some-name-proxysql -uoperator -p'\''test-password'\''' + local postfix= + local expected_result=/mnt/jenkins/workspace/cloud-pxc-operator_PR-1736/e2e-tests/users/compare/select-4.sql + [[ perconalab/percona-xtradb-cluster-operator:main-pxc5.7 =~ 8\.0 ]] + run_mysql 'SHOW TABLES;' '-h some-name-proxysql -uoperator -p'\''test-password'\''' + local 'command=SHOW TABLES;' + local 'uri=-h some-name-proxysql -uoperator -p'\''test-password'\''' ++ get_client_pod ++ kubectl_bin get pods --selector=name=pxc-client -o 'jsonpath={.items[].metadata.name}' +++ mktemp ++ local LAST_OUT=/tmp/tmp.sa0GmAq8Ik +++ mktemp ++ local LAST_ERR=/tmp/tmp.EF35qCFbKf ++ local exit_status=0 +++ seq 0 2 ++ for i in '$(seq 0 2)' ++ set +e ++ kubectl get pods --selector=name=pxc-client -o 'jsonpath={.items[].metadata.name}' ++ exit_status=0 ++ set -e ++ '[' 0 '!=' 0 ']' ++ break ++ cat /tmp/tmp.sa0GmAq8Ik ++ cat /tmp/tmp.EF35qCFbKf ++ rm /tmp/tmp.sa0GmAq8Ik /tmp/tmp.EF35qCFbKf ++ return 0 + client_pod=pxc-client-64b479df95-s59c5 + wait_pod pxc-client-64b479df95-s59c5 + local pod=pxc-client-64b479df95-s59c5 + local max_retry=480 + local ns= ++ echo pxc-client-64b479df95-s59c5 ++ /usr/bin/sed -E 's/.*-(pxc|proxysql)-[0-9]/\1/' ++ egrep '^(pxc|proxysql)$' + local container= + set +o xtrace pod/pxc-client-64b479df95-s59c5 condition met pxc-client-64b479df95-s59c5.Ok + set +o xtrace + '[' '!' -s /tmp/tmp.XJynlYwy6y/select-4.sql ']' + diff -u /mnt/jenkins/workspace/cloud-pxc-operator_PR-1736/e2e-tests/users/compare/select-4.sql /tmp/tmp.XJynlYwy6y/select-4.sql + desc 'change secret name' + set +o xtrace ----------------------------------------------------------------------------------- change secret name ----------------------------------------------------------------------------------- + kubectl_bin patch pxc some-name --type merge --patch '{"spec": {"secretsName":"my-cluster-secrets-2"}}' ++ mktemp + local LAST_OUT=/tmp/tmp.vNUWKcc4h9 ++ mktemp + local LAST_ERR=/tmp/tmp.Hlmi9coQtP + local exit_status=0 ++ seq 0 2 + for i in '$(seq 0 2)' + set +e + kubectl patch pxc some-name --type merge --patch '{"spec": {"secretsName":"my-cluster-secrets-2"}}' + exit_status=0 + set -e + '[' 0 '!=' 0 ']' + break + cat /tmp/tmp.vNUWKcc4h9 perconaxtradbcluster.pxc.percona.com/some-name patched + cat /tmp/tmp.Hlmi9coQtP + rm /tmp/tmp.vNUWKcc4h9 /tmp/tmp.Hlmi9coQtP + return 0 + sleep 30 + wait_cluster_consistency some-name 3 2 + local cluster_name=some-name + local cluster_size=3 + local proxy_size=2 + '[' -z 2 ']' + desc 'wait cluster consistency' + set +o xtrace ----------------------------------------------------------------------------------- wait cluster consistency ----------------------------------------------------------------------------------- + local i=0 + local max=36 + sleep 7 ++ kubectl_bin get pxc some-name -o 'jsonpath={.status.state}' +++ mktemp ++ local LAST_OUT=/tmp/tmp.u162Q7nA2P +++ mktemp ++ local LAST_ERR=/tmp/tmp.ejG1KeVzYN ++ local exit_status=0 +++ seq 0 2 ++ for i in '$(seq 0 2)' ++ set +e ++ kubectl get pxc some-name -o 'jsonpath={.status.state}' ++ exit_status=0 ++ set -e ++ '[' 0 '!=' 0 ']' ++ break ++ cat /tmp/tmp.u162Q7nA2P ++ cat /tmp/tmp.ejG1KeVzYN ++ rm /tmp/tmp.u162Q7nA2P /tmp/tmp.ejG1KeVzYN ++ return 0 + [[ initializing == \r\e\a\d\y ]] + echo 'waiting for cluster readyness' waiting for cluster readyness + sleep 20 + [[ 0 -ge 36 ]] + let i+=1 ++ kubectl_bin get pxc some-name -o 'jsonpath={.status.state}' +++ mktemp ++ local LAST_OUT=/tmp/tmp.720ewHAM8q +++ mktemp ++ local LAST_ERR=/tmp/tmp.Pyf9zs82L2 ++ local exit_status=0 +++ seq 0 2 ++ for i in '$(seq 0 2)' ++ set +e ++ kubectl get pxc some-name -o 'jsonpath={.status.state}' ++ exit_status=0 ++ set -e ++ '[' 0 '!=' 0 ']' ++ break ++ cat /tmp/tmp.720ewHAM8q ++ cat /tmp/tmp.Pyf9zs82L2 ++ rm /tmp/tmp.720ewHAM8q /tmp/tmp.Pyf9zs82L2 ++ return 0 + [[ initializing == \r\e\a\d\y ]] + echo 'waiting for cluster readyness' waiting for cluster readyness + sleep 20 + [[ 1 -ge 36 ]] + let i+=1 ++ kubectl_bin get pxc some-name -o 'jsonpath={.status.state}' +++ mktemp ++ local LAST_OUT=/tmp/tmp.hfRPRQUf2T +++ mktemp ++ local LAST_ERR=/tmp/tmp.w91ARYaNXD ++ local exit_status=0 +++ seq 0 2 ++ for i in '$(seq 0 2)' ++ set +e ++ kubectl get pxc some-name -o 'jsonpath={.status.state}' ++ exit_status=0 ++ set -e ++ '[' 0 '!=' 0 ']' ++ break ++ cat /tmp/tmp.hfRPRQUf2T ++ cat /tmp/tmp.w91ARYaNXD ++ rm /tmp/tmp.hfRPRQUf2T /tmp/tmp.w91ARYaNXD ++ return 0 + [[ initializing == \r\e\a\d\y ]] + echo 'waiting for cluster readyness' waiting for cluster readyness + sleep 20 + [[ 2 -ge 36 ]] + let i+=1 ++ kubectl_bin get pxc some-name -o 'jsonpath={.status.state}' +++ mktemp ++ local LAST_OUT=/tmp/tmp.ZCMb0CHzZg +++ mktemp ++ local LAST_ERR=/tmp/tmp.JJeAg59OBw ++ local exit_status=0 +++ seq 0 2 ++ for i in '$(seq 0 2)' ++ set +e ++ kubectl get pxc some-name -o 'jsonpath={.status.state}' ++ exit_status=0 ++ set -e ++ '[' 0 '!=' 0 ']' ++ break ++ cat /tmp/tmp.ZCMb0CHzZg ++ cat /tmp/tmp.JJeAg59OBw ++ rm /tmp/tmp.ZCMb0CHzZg /tmp/tmp.JJeAg59OBw ++ return 0 + [[ initializing == \r\e\a\d\y ]] + echo 'waiting for cluster readyness' waiting for cluster readyness + sleep 20 + [[ 3 -ge 36 ]] + let i+=1 ++ kubectl_bin get pxc some-name -o 'jsonpath={.status.state}' +++ mktemp ++ local LAST_OUT=/tmp/tmp.MaJmVU1gX9 +++ mktemp ++ local LAST_ERR=/tmp/tmp.DoNNkf7YVn ++ local exit_status=0 +++ seq 0 2 ++ for i in '$(seq 0 2)' ++ set +e ++ kubectl get pxc some-name -o 'jsonpath={.status.state}' ++ exit_status=0 ++ set -e ++ '[' 0 '!=' 0 ']' ++ break ++ cat /tmp/tmp.MaJmVU1gX9 ++ cat /tmp/tmp.DoNNkf7YVn ++ rm /tmp/tmp.MaJmVU1gX9 /tmp/tmp.DoNNkf7YVn ++ return 0 + [[ initializing == \r\e\a\d\y ]] + echo 'waiting for cluster readyness' waiting for cluster readyness + sleep 20 + [[ 4 -ge 36 ]] + let i+=1 ++ kubectl_bin get pxc some-name -o 'jsonpath={.status.state}' +++ mktemp ++ local LAST_OUT=/tmp/tmp.TPaGI3QKZp +++ mktemp ++ local LAST_ERR=/tmp/tmp.WVlvLuMR0s ++ local exit_status=0 +++ seq 0 2 ++ for i in '$(seq 0 2)' ++ set +e ++ kubectl get pxc some-name -o 'jsonpath={.status.state}' ++ exit_status=0 ++ set -e ++ '[' 0 '!=' 0 ']' ++ break ++ cat /tmp/tmp.TPaGI3QKZp ++ cat /tmp/tmp.WVlvLuMR0s ++ rm /tmp/tmp.TPaGI3QKZp /tmp/tmp.WVlvLuMR0s ++ return 0 + [[ initializing == \r\e\a\d\y ]] + echo 'waiting for cluster readyness' waiting for cluster readyness + sleep 20 + [[ 5 -ge 36 ]] + let i+=1 ++ kubectl_bin get pxc some-name -o 'jsonpath={.status.state}' +++ mktemp ++ local LAST_OUT=/tmp/tmp.03nhueQbTO +++ mktemp ++ local LAST_ERR=/tmp/tmp.JIIst43Cgf ++ local exit_status=0 +++ seq 0 2 ++ for i in '$(seq 0 2)' ++ set +e ++ kubectl get pxc some-name -o 'jsonpath={.status.state}' ++ exit_status=0 ++ set -e ++ '[' 0 '!=' 0 ']' ++ break ++ cat /tmp/tmp.03nhueQbTO ++ cat /tmp/tmp.JIIst43Cgf ++ rm /tmp/tmp.03nhueQbTO /tmp/tmp.JIIst43Cgf ++ return 0 + [[ ready == \r\e\a\d\y ]] ++ kubectl_bin get pxc some-name -o 'jsonpath={.status.pxc.ready}' +++ mktemp ++ local LAST_OUT=/tmp/tmp.88munQvhIo +++ mktemp ++ local LAST_ERR=/tmp/tmp.y830INIFya ++ local exit_status=0 +++ seq 0 2 ++ for i in '$(seq 0 2)' ++ set +e ++ kubectl get pxc some-name -o 'jsonpath={.status.pxc.ready}' ++ exit_status=0 ++ set -e ++ '[' 0 '!=' 0 ']' ++ break ++ cat /tmp/tmp.88munQvhIo ++ cat /tmp/tmp.y830INIFya ++ rm /tmp/tmp.88munQvhIo /tmp/tmp.y830INIFya ++ return 0 + [[ 3 == \3 ]] +++ get_proxy_engine some-name +++ local cluster_name=some-name ++++ get_proxy some-name ++++ local target_cluster=some-name +++++ kubectl_bin get pxc some-name -o 'jsonpath={.spec.haproxy.enabled}' ++++++ mktemp +++++ local LAST_OUT=/tmp/tmp.d8PEtVsQvQ ++++++ mktemp +++++ local LAST_ERR=/tmp/tmp.jsJVQLF4vw +++++ local exit_status=0 ++++++ seq 0 2 +++++ for i in '$(seq 0 2)' +++++ set +e +++++ kubectl get pxc some-name -o 'jsonpath={.spec.haproxy.enabled}' +++++ exit_status=0 +++++ set -e +++++ '[' 0 '!=' 0 ']' +++++ break +++++ cat /tmp/tmp.d8PEtVsQvQ +++++ cat /tmp/tmp.jsJVQLF4vw +++++ rm /tmp/tmp.d8PEtVsQvQ /tmp/tmp.jsJVQLF4vw +++++ return 0 ++++ [[ '' == \t\r\u\e ]] +++++ kubectl_bin get pxc some-name -o 'jsonpath={.spec.proxysql.enabled}' ++++++ mktemp +++++ local LAST_OUT=/tmp/tmp.g11GgPP1Au ++++++ mktemp +++++ local LAST_ERR=/tmp/tmp.LhYt3ayTSu +++++ local exit_status=0 ++++++ seq 0 2 +++++ for i in '$(seq 0 2)' +++++ set +e +++++ kubectl get pxc some-name -o 'jsonpath={.spec.proxysql.enabled}' +++++ exit_status=0 +++++ set -e +++++ '[' 0 '!=' 0 ']' +++++ break +++++ cat /tmp/tmp.g11GgPP1Au +++++ cat /tmp/tmp.LhYt3ayTSu +++++ rm /tmp/tmp.g11GgPP1Au /tmp/tmp.LhYt3ayTSu +++++ return 0 ++++ [[ true == \t\r\u\e ]] ++++ echo some-name-proxysql ++++ return +++ local cluster_proxy=some-name-proxysql +++ echo proxysql ++ kubectl_bin get pxc some-name -o 'jsonpath={.status.proxysql.ready}' +++ mktemp ++ local LAST_OUT=/tmp/tmp.RqUsDdPXqb +++ mktemp ++ local LAST_ERR=/tmp/tmp.c4hn5eFlUe ++ local exit_status=0 +++ seq 0 2 ++ for i in '$(seq 0 2)' ++ set +e ++ kubectl get pxc some-name -o 'jsonpath={.status.proxysql.ready}' ++ exit_status=0 ++ set -e ++ '[' 0 '!=' 0 ']' ++ break ++ cat /tmp/tmp.RqUsDdPXqb ++ cat /tmp/tmp.c4hn5eFlUe ++ rm /tmp/tmp.RqUsDdPXqb /tmp/tmp.c4hn5eFlUe ++ return 0 + [[ 2 == \2 ]] + desc 'test new operator' + set +o xtrace ----------------------------------------------------------------------------------- test new operator ----------------------------------------------------------------------------------- + newpass=test-password2 ++ echo -n test-password2 ++ base64 + newpassencrypted=dGVzdC1wYXNzd29yZDI= + patch_secret my-cluster-secrets-2 operator dGVzdC1wYXNzd29yZDI= + local secret=my-cluster-secrets-2 + local key=operator + local value=dGVzdC1wYXNzd29yZDI= + kubectl_bin patch secret my-cluster-secrets-2 '-p={"data":{"operator": "dGVzdC1wYXNzd29yZDI="}}' ++ mktemp + local LAST_OUT=/tmp/tmp.jseaw2aiOC ++ mktemp + local LAST_ERR=/tmp/tmp.zbqQAASQBZ + local exit_status=0 ++ seq 0 2 + for i in '$(seq 0 2)' + set +e + kubectl patch secret my-cluster-secrets-2 '-p={"data":{"operator": "dGVzdC1wYXNzd29yZDI="}}' + exit_status=0 + set -e + '[' 0 '!=' 0 ']' + break + cat /tmp/tmp.jseaw2aiOC secret/my-cluster-secrets-2 patched + cat /tmp/tmp.zbqQAASQBZ + rm /tmp/tmp.jseaw2aiOC /tmp/tmp.zbqQAASQBZ + return 0 + sleep 15 + wait_cluster_consistency some-name 3 2 + local cluster_name=some-name + local cluster_size=3 + local proxy_size=2 + '[' -z 2 ']' + desc 'wait cluster consistency' + set +o xtrace ----------------------------------------------------------------------------------- wait cluster consistency ----------------------------------------------------------------------------------- + local i=0 + local max=36 + sleep 7 ++ kubectl_bin get pxc some-name -o 'jsonpath={.status.state}' +++ mktemp ++ local LAST_OUT=/tmp/tmp.m1msg8KNBQ +++ mktemp ++ local LAST_ERR=/tmp/tmp.clqltILPwO ++ local exit_status=0 +++ seq 0 2 ++ for i in '$(seq 0 2)' ++ set +e ++ kubectl get pxc some-name -o 'jsonpath={.status.state}' ++ exit_status=0 ++ set -e ++ '[' 0 '!=' 0 ']' ++ break ++ cat /tmp/tmp.m1msg8KNBQ ++ cat /tmp/tmp.clqltILPwO ++ rm /tmp/tmp.m1msg8KNBQ /tmp/tmp.clqltILPwO ++ return 0 + [[ initializing == \r\e\a\d\y ]] + echo 'waiting for cluster readyness' waiting for cluster readyness + sleep 20 + [[ 0 -ge 36 ]] + let i+=1 ++ kubectl_bin get pxc some-name -o 'jsonpath={.status.state}' +++ mktemp ++ local LAST_OUT=/tmp/tmp.KwfnLdcDzn +++ mktemp ++ local LAST_ERR=/tmp/tmp.pmz9iFX7Yn ++ local exit_status=0 +++ seq 0 2 ++ for i in '$(seq 0 2)' ++ set +e ++ kubectl get pxc some-name -o 'jsonpath={.status.state}' ++ exit_status=0 ++ set -e ++ '[' 0 '!=' 0 ']' ++ break ++ cat /tmp/tmp.KwfnLdcDzn ++ cat /tmp/tmp.pmz9iFX7Yn ++ rm /tmp/tmp.KwfnLdcDzn /tmp/tmp.pmz9iFX7Yn ++ return 0 + [[ ready == \r\e\a\d\y ]] ++ kubectl_bin get pxc some-name -o 'jsonpath={.status.pxc.ready}' +++ mktemp ++ local LAST_OUT=/tmp/tmp.YReFZpeKE5 +++ mktemp ++ local LAST_ERR=/tmp/tmp.vQTd8mxJ5G ++ local exit_status=0 +++ seq 0 2 ++ for i in '$(seq 0 2)' ++ set +e ++ kubectl get pxc some-name -o 'jsonpath={.status.pxc.ready}' ++ exit_status=0 ++ set -e ++ '[' 0 '!=' 0 ']' ++ break ++ cat /tmp/tmp.YReFZpeKE5 ++ cat /tmp/tmp.vQTd8mxJ5G ++ rm /tmp/tmp.YReFZpeKE5 /tmp/tmp.vQTd8mxJ5G ++ return 0 + [[ 3 == \3 ]] +++ get_proxy_engine some-name +++ local cluster_name=some-name ++++ get_proxy some-name ++++ local target_cluster=some-name +++++ kubectl_bin get pxc some-name -o 'jsonpath={.spec.haproxy.enabled}' ++++++ mktemp +++++ local LAST_OUT=/tmp/tmp.5RbAAm2BoL ++++++ mktemp +++++ local LAST_ERR=/tmp/tmp.WXvrCzBj01 +++++ local exit_status=0 ++++++ seq 0 2 +++++ for i in '$(seq 0 2)' +++++ set +e +++++ kubectl get pxc some-name -o 'jsonpath={.spec.haproxy.enabled}' +++++ exit_status=0 +++++ set -e +++++ '[' 0 '!=' 0 ']' +++++ break +++++ cat /tmp/tmp.5RbAAm2BoL +++++ cat /tmp/tmp.WXvrCzBj01 +++++ rm /tmp/tmp.5RbAAm2BoL /tmp/tmp.WXvrCzBj01 +++++ return 0 ++++ [[ '' == \t\r\u\e ]] +++++ kubectl_bin get pxc some-name -o 'jsonpath={.spec.proxysql.enabled}' ++++++ mktemp +++++ local LAST_OUT=/tmp/tmp.QwjJcRAvx4 ++++++ mktemp +++++ local LAST_ERR=/tmp/tmp.7ybQM7qIw5 +++++ local exit_status=0 ++++++ seq 0 2 +++++ for i in '$(seq 0 2)' +++++ set +e +++++ kubectl get pxc some-name -o 'jsonpath={.spec.proxysql.enabled}' +++++ exit_status=0 +++++ set -e +++++ '[' 0 '!=' 0 ']' +++++ break +++++ cat /tmp/tmp.QwjJcRAvx4 +++++ cat /tmp/tmp.7ybQM7qIw5 +++++ rm /tmp/tmp.QwjJcRAvx4 /tmp/tmp.7ybQM7qIw5 +++++ return 0 ++++ [[ true == \t\r\u\e ]] ++++ echo some-name-proxysql ++++ return +++ local cluster_proxy=some-name-proxysql +++ echo proxysql ++ kubectl_bin get pxc some-name -o 'jsonpath={.status.proxysql.ready}' +++ mktemp ++ local LAST_OUT=/tmp/tmp.iMzBShgUdj +++ mktemp ++ local LAST_ERR=/tmp/tmp.WHMvkoqvSQ ++ local exit_status=0 +++ seq 0 2 ++ for i in '$(seq 0 2)' ++ set +e ++ kubectl get pxc some-name -o 'jsonpath={.status.proxysql.ready}' ++ exit_status=0 ++ set -e ++ '[' 0 '!=' 0 ']' ++ break ++ cat /tmp/tmp.iMzBShgUdj ++ cat /tmp/tmp.WHMvkoqvSQ ++ rm /tmp/tmp.iMzBShgUdj /tmp/tmp.WHMvkoqvSQ ++ return 0 + [[ 2 == \2 ]] + sleep 20 + compare_mysql_cmd select-4 'SHOW TABLES;' '-h some-name-proxysql -uoperator -p'\''test-password2'\''' + local command_id=select-4 + local 'command=SHOW TABLES;' + local 'uri=-h some-name-proxysql -uoperator -p'\''test-password2'\''' + local postfix= + local expected_result=/mnt/jenkins/workspace/cloud-pxc-operator_PR-1736/e2e-tests/users/compare/select-4.sql + [[ perconalab/percona-xtradb-cluster-operator:main-pxc5.7 =~ 8\.0 ]] + run_mysql 'SHOW TABLES;' '-h some-name-proxysql -uoperator -p'\''test-password2'\''' + local 'command=SHOW TABLES;' + local 'uri=-h some-name-proxysql -uoperator -p'\''test-password2'\''' ++ get_client_pod ++ kubectl_bin get pods --selector=name=pxc-client -o 'jsonpath={.items[].metadata.name}' +++ mktemp ++ local LAST_OUT=/tmp/tmp.JgonBqyoHO +++ mktemp ++ local LAST_ERR=/tmp/tmp.v78h29Xwsd ++ local exit_status=0 +++ seq 0 2 ++ for i in '$(seq 0 2)' ++ set +e ++ kubectl get pods --selector=name=pxc-client -o 'jsonpath={.items[].metadata.name}' ++ exit_status=0 ++ set -e ++ '[' 0 '!=' 0 ']' ++ break ++ cat /tmp/tmp.JgonBqyoHO ++ cat /tmp/tmp.v78h29Xwsd ++ rm /tmp/tmp.JgonBqyoHO /tmp/tmp.v78h29Xwsd ++ return 0 + client_pod=pxc-client-64b479df95-s59c5 + wait_pod pxc-client-64b479df95-s59c5 + local pod=pxc-client-64b479df95-s59c5 + local max_retry=480 + local ns= ++ echo pxc-client-64b479df95-s59c5 ++ /usr/bin/sed -E 's/.*-(pxc|proxysql)-[0-9]/\1/' ++ egrep '^(pxc|proxysql)$' + local container= + set +o xtrace pod/pxc-client-64b479df95-s59c5 condition met pxc-client-64b479df95-s59c5.Ok + set +o xtrace + '[' '!' -s /tmp/tmp.XJynlYwy6y/select-4.sql ']' + diff -u /mnt/jenkins/workspace/cloud-pxc-operator_PR-1736/e2e-tests/users/compare/select-4.sql /tmp/tmp.XJynlYwy6y/select-4.sql ++ getSecretData my-cluster-secrets-2 root ++ local secretName=my-cluster-secrets-2 ++ local dataKey=root ++ kubectl_bin get secrets/my-cluster-secrets-2 '--template={{.data.root}}' ++ base64 --decode +++ mktemp ++ local LAST_OUT=/tmp/tmp.ir8UhnS30T +++ mktemp ++ local LAST_ERR=/tmp/tmp.Y1ymKZPaWY ++ local exit_status=0 +++ seq 0 2 ++ for i in '$(seq 0 2)' ++ set +e ++ kubectl get secrets/my-cluster-secrets-2 '--template={{.data.root}}' ++ exit_status=0 ++ set -e ++ '[' 0 '!=' 0 ']' ++ break ++ cat /tmp/tmp.ir8UhnS30T ++ cat /tmp/tmp.Y1ymKZPaWY ++ rm /tmp/tmp.ir8UhnS30T /tmp/tmp.Y1ymKZPaWY ++ return 0 + newpass='5D}8H2}?I)j!xaim' + desc 'test new users sync' + set +o xtrace ----------------------------------------------------------------------------------- test new users sync ----------------------------------------------------------------------------------- + run_mysql 'CREATE USER '\''testsync'\''@'\''%'\'' IDENTIFIED BY '\''5D}8H2}?I)j!xaim'\'';' '-h some-name-pxc -uroot -p'\''5D}8H2}?I)j!xaim'\''' + local 'command=CREATE USER '\''testsync'\''@'\''%'\'' IDENTIFIED BY '\''5D}8H2}?I)j!xaim'\'';' + local 'uri=-h some-name-pxc -uroot -p'\''5D}8H2}?I)j!xaim'\''' ++ get_client_pod ++ kubectl_bin get pods --selector=name=pxc-client -o 'jsonpath={.items[].metadata.name}' +++ mktemp ++ local LAST_OUT=/tmp/tmp.DCu2nAtkZg +++ mktemp ++ local LAST_ERR=/tmp/tmp.NkYJtOeimp ++ local exit_status=0 +++ seq 0 2 ++ for i in '$(seq 0 2)' ++ set +e ++ kubectl get pods --selector=name=pxc-client -o 'jsonpath={.items[].metadata.name}' ++ exit_status=0 ++ set -e ++ '[' 0 '!=' 0 ']' ++ break ++ cat /tmp/tmp.DCu2nAtkZg ++ cat /tmp/tmp.NkYJtOeimp ++ rm /tmp/tmp.DCu2nAtkZg /tmp/tmp.NkYJtOeimp ++ return 0 + client_pod=pxc-client-64b479df95-s59c5 + wait_pod pxc-client-64b479df95-s59c5 + local pod=pxc-client-64b479df95-s59c5 + local max_retry=480 + local ns= ++ echo pxc-client-64b479df95-s59c5 ++ /usr/bin/sed -E 's/.*-(pxc|proxysql)-[0-9]/\1/' ++ egrep '^(pxc|proxysql)$' + local container= + set +o xtrace pod/pxc-client-64b479df95-s59c5 condition met pxc-client-64b479df95-s59c5.Ok + set +o xtrace + sleep 40 + compare_mysql_cmd select-4 'SHOW TABLES;' '-h some-name-proxysql -utestsync -p'\''5D}8H2}?I)j!xaim'\''' + local command_id=select-4 + local 'command=SHOW TABLES;' + local 'uri=-h some-name-proxysql -utestsync -p'\''5D}8H2}?I)j!xaim'\''' + local postfix= + local expected_result=/mnt/jenkins/workspace/cloud-pxc-operator_PR-1736/e2e-tests/users/compare/select-4.sql + [[ perconalab/percona-xtradb-cluster-operator:main-pxc5.7 =~ 8\.0 ]] + run_mysql 'SHOW TABLES;' '-h some-name-proxysql -utestsync -p'\''5D}8H2}?I)j!xaim'\''' + local 'command=SHOW TABLES;' + local 'uri=-h some-name-proxysql -utestsync -p'\''5D}8H2}?I)j!xaim'\''' ++ get_client_pod ++ kubectl_bin get pods --selector=name=pxc-client -o 'jsonpath={.items[].metadata.name}' +++ mktemp ++ local LAST_OUT=/tmp/tmp.c4HZ6yodYv +++ mktemp ++ local LAST_ERR=/tmp/tmp.SLDZgBOJA8 ++ local exit_status=0 +++ seq 0 2 ++ for i in '$(seq 0 2)' ++ set +e ++ kubectl get pods --selector=name=pxc-client -o 'jsonpath={.items[].metadata.name}' ++ exit_status=0 ++ set -e ++ '[' 0 '!=' 0 ']' ++ break ++ cat /tmp/tmp.c4HZ6yodYv ++ cat /tmp/tmp.SLDZgBOJA8 ++ rm /tmp/tmp.c4HZ6yodYv /tmp/tmp.SLDZgBOJA8 ++ return 0 + client_pod=pxc-client-64b479df95-s59c5 + wait_pod pxc-client-64b479df95-s59c5 + local pod=pxc-client-64b479df95-s59c5 + local max_retry=480 + local ns= ++ echo pxc-client-64b479df95-s59c5 ++ egrep '^(pxc|proxysql)$' ++ /usr/bin/sed -E 's/.*-(pxc|proxysql)-[0-9]/\1/' + local container= + set +o xtrace pod/pxc-client-64b479df95-s59c5 condition met pxc-client-64b479df95-s59c5.Ok + set +o xtrace + '[' '!' -s /tmp/tmp.XJynlYwy6y/select-4.sql ']' + diff -u /mnt/jenkins/workspace/cloud-pxc-operator_PR-1736/e2e-tests/users/compare/select-4.sql /tmp/tmp.XJynlYwy6y/select-4.sql ++ getSecretData internal-some-name operator ++ local secretName=internal-some-name ++ local dataKey=operator ++ kubectl_bin get secrets/internal-some-name '--template={{.data.operator}}' ++ base64 --decode +++ mktemp ++ local LAST_OUT=/tmp/tmp.AJegNhUM9b +++ mktemp ++ local LAST_ERR=/tmp/tmp.vSWuyvhxg1 ++ local exit_status=0 +++ seq 0 2 ++ for i in '$(seq 0 2)' ++ set +e ++ kubectl get secrets/internal-some-name '--template={{.data.operator}}' ++ exit_status=0 ++ set -e ++ '[' 0 '!=' 0 ']' ++ break ++ cat /tmp/tmp.AJegNhUM9b ++ cat /tmp/tmp.vSWuyvhxg1 ++ rm /tmp/tmp.AJegNhUM9b /tmp/tmp.vSWuyvhxg1 ++ return 0 + pass=test-password2 + desc 'check secret without operator' + set +o xtrace ----------------------------------------------------------------------------------- check secret without operator ----------------------------------------------------------------------------------- + kubectl_bin apply -f /mnt/jenkins/workspace/cloud-pxc-operator_PR-1736/e2e-tests/users/conf/secrets.yml ++ mktemp + local LAST_OUT=/tmp/tmp.fgFkS8Q0CR ++ mktemp + local LAST_ERR=/tmp/tmp.1ZrIZ5jmNZ + local exit_status=0 ++ seq 0 2 + for i in '$(seq 0 2)' + set +e + kubectl apply -f /mnt/jenkins/workspace/cloud-pxc-operator_PR-1736/e2e-tests/users/conf/secrets.yml + exit_status=0 + set -e + '[' 0 '!=' 0 ']' + break + cat /tmp/tmp.fgFkS8Q0CR secret/my-cluster-secrets-2 configured + cat /tmp/tmp.1ZrIZ5jmNZ Warning: resource secrets/my-cluster-secrets-2 is missing the kubectl.kubernetes.io/last-applied-configuration annotation which is required by kubectl apply. kubectl apply should only be used on resources created declaratively by either kubectl create --save-config or kubectl apply. The missing annotation will be patched automatically. + rm /tmp/tmp.fgFkS8Q0CR /tmp/tmp.1ZrIZ5jmNZ + return 0 + sleep 15 + compare_mysql_cmd select-4 'SHOW TABLES;' '-h some-name-proxysql -uoperator -p'\''test-password2'\''' + local command_id=select-4 + local 'command=SHOW TABLES;' + local 'uri=-h some-name-proxysql -uoperator -p'\''test-password2'\''' + local postfix= + local expected_result=/mnt/jenkins/workspace/cloud-pxc-operator_PR-1736/e2e-tests/users/compare/select-4.sql + [[ perconalab/percona-xtradb-cluster-operator:main-pxc5.7 =~ 8\.0 ]] + run_mysql 'SHOW TABLES;' '-h some-name-proxysql -uoperator -p'\''test-password2'\''' + local 'command=SHOW TABLES;' + local 'uri=-h some-name-proxysql -uoperator -p'\''test-password2'\''' ++ get_client_pod ++ kubectl_bin get pods --selector=name=pxc-client -o 'jsonpath={.items[].metadata.name}' +++ mktemp ++ local LAST_OUT=/tmp/tmp.Tj1KL2gPq4 +++ mktemp ++ local LAST_ERR=/tmp/tmp.bn5pXxlQyr ++ local exit_status=0 +++ seq 0 2 ++ for i in '$(seq 0 2)' ++ set +e ++ kubectl get pods --selector=name=pxc-client -o 'jsonpath={.items[].metadata.name}' ++ exit_status=0 ++ set -e ++ '[' 0 '!=' 0 ']' ++ break ++ cat /tmp/tmp.Tj1KL2gPq4 ++ cat /tmp/tmp.bn5pXxlQyr ++ rm /tmp/tmp.Tj1KL2gPq4 /tmp/tmp.bn5pXxlQyr ++ return 0 + client_pod=pxc-client-64b479df95-s59c5 + wait_pod pxc-client-64b479df95-s59c5 + local pod=pxc-client-64b479df95-s59c5 + local max_retry=480 + local ns= ++ echo pxc-client-64b479df95-s59c5 ++ /usr/bin/sed -E 's/.*-(pxc|proxysql)-[0-9]/\1/' ++ egrep '^(pxc|proxysql)$' + local container= + set +o xtrace pod/pxc-client-64b479df95-s59c5 condition met pxc-client-64b479df95-s59c5.Ok + set +o xtrace + '[' '!' -s /tmp/tmp.XJynlYwy6y/select-4.sql ']' + diff -u /mnt/jenkins/workspace/cloud-pxc-operator_PR-1736/e2e-tests/users/compare/select-4.sql /tmp/tmp.XJynlYwy6y/select-4.sql + newpass=test-password2 ++ echo -n test-password2 ++ base64 + newpassencrypted=dGVzdC1wYXNzd29yZDI= + apply_config /mnt/jenkins/workspace/cloud-pxc-operator_PR-1736/e2e-tests/users/conf/some-name.yml + '[' -z '' ']' + kubectl_bin apply -f - + cat_config /mnt/jenkins/workspace/cloud-pxc-operator_PR-1736/e2e-tests/users/conf/some-name.yml + cat /mnt/jenkins/workspace/cloud-pxc-operator_PR-1736/e2e-tests/users/conf/some-name.yml ++ mktemp + /usr/bin/sed -e 's#apiVersion: pxc.percona.com/v.*$#apiVersion: pxc.percona.com/v1#' + /usr/bin/sed -e 's#image:.*\/percona-xtradb-cluster:.*$#image: perconalab/percona-xtradb-cluster-operator:main-pxc5.7#' + local LAST_OUT=/tmp/tmp.7L4SFKBtHf + /usr/bin/sed -e 's#image:.*-pxc\([0-9]*.[0-9]*\)\{0,1\}$#image: perconalab/percona-xtradb-cluster-operator:main-pxc5.7#' + /usr/bin/sed -e 's#apply:.*#apply: Never#' + /usr/bin/sed -e 's#image:.*-init$#image: perconalab/percona-xtradb-cluster-operator:PR-1736-1898f674#' ++ mktemp + /usr/bin/sed -e 's#image:.*-logcollector$#image: perconalab/percona-xtradb-cluster-operator:main-logcollector#' + /usr/bin/sed -e 's#image:.*-backup$#image: perconalab/percona-xtradb-cluster-operator:main-pxc5.7-backup#' + /usr/bin/sed -e 's#image:.*-haproxy$#image: perconalab/percona-xtradb-cluster-operator:main-haproxy#' + local LAST_ERR=/tmp/tmp.f5DiuWeENE + local exit_status=0 + /usr/bin/sed -e 's#image:.*-proxysql$#image: perconalab/percona-xtradb-cluster-operator:main-proxysql#' + /usr/bin/sed -e 's#image:.*-pmm$#image: perconalab/pmm-client:dev-latest#' + /usr/bin/sed -e s~minio-service.#namespace~minio-service.users-29349~ ++ seq 0 2 + for i in '$(seq 0 2)' + set +e + kubectl apply -f - + exit_status=0 + set -e + '[' 0 '!=' 0 ']' + break + cat /tmp/tmp.7L4SFKBtHf perconaxtradbcluster.pxc.percona.com/some-name configured + cat /tmp/tmp.f5DiuWeENE + rm /tmp/tmp.7L4SFKBtHf /tmp/tmp.f5DiuWeENE + return 0 + sleep 15 + wait_cluster_consistency some-name 3 3 + local cluster_name=some-name + local cluster_size=3 + local proxy_size=3 + '[' -z 3 ']' + desc 'wait cluster consistency' + set +o xtrace ----------------------------------------------------------------------------------- wait cluster consistency ----------------------------------------------------------------------------------- + local i=0 + local max=36 + sleep 7 ++ kubectl_bin get pxc some-name -o 'jsonpath={.status.state}' +++ mktemp ++ local LAST_OUT=/tmp/tmp.C1Ppa1HfOT +++ mktemp ++ local LAST_ERR=/tmp/tmp.n2Gois4LCa ++ local exit_status=0 +++ seq 0 2 ++ for i in '$(seq 0 2)' ++ set +e ++ kubectl get pxc some-name -o 'jsonpath={.status.state}' ++ exit_status=0 ++ set -e ++ '[' 0 '!=' 0 ']' ++ break ++ cat /tmp/tmp.C1Ppa1HfOT ++ cat /tmp/tmp.n2Gois4LCa ++ rm /tmp/tmp.C1Ppa1HfOT /tmp/tmp.n2Gois4LCa ++ return 0 + [[ initializing == \r\e\a\d\y ]] + echo 'waiting for cluster readyness' waiting for cluster readyness + sleep 20 + [[ 0 -ge 36 ]] + let i+=1 ++ kubectl_bin get pxc some-name -o 'jsonpath={.status.state}' +++ mktemp ++ local LAST_OUT=/tmp/tmp.YQ3PXaoocH +++ mktemp ++ local LAST_ERR=/tmp/tmp.eOGBJBA55D ++ local exit_status=0 +++ seq 0 2 ++ for i in '$(seq 0 2)' ++ set +e ++ kubectl get pxc some-name -o 'jsonpath={.status.state}' ++ exit_status=0 ++ set -e ++ '[' 0 '!=' 0 ']' ++ break ++ cat /tmp/tmp.YQ3PXaoocH ++ cat /tmp/tmp.eOGBJBA55D ++ rm /tmp/tmp.YQ3PXaoocH /tmp/tmp.eOGBJBA55D ++ return 0 + [[ initializing == \r\e\a\d\y ]] + echo 'waiting for cluster readyness' waiting for cluster readyness + sleep 20 + [[ 1 -ge 36 ]] + let i+=1 ++ kubectl_bin get pxc some-name -o 'jsonpath={.status.state}' +++ mktemp ++ local LAST_OUT=/tmp/tmp.3ppdBgMbWZ +++ mktemp ++ local LAST_ERR=/tmp/tmp.Gg2sKgOhPo ++ local exit_status=0 +++ seq 0 2 ++ for i in '$(seq 0 2)' ++ set +e ++ kubectl get pxc some-name -o 'jsonpath={.status.state}' ++ exit_status=0 ++ set -e ++ '[' 0 '!=' 0 ']' ++ break ++ cat /tmp/tmp.3ppdBgMbWZ ++ cat /tmp/tmp.Gg2sKgOhPo ++ rm /tmp/tmp.3ppdBgMbWZ /tmp/tmp.Gg2sKgOhPo ++ return 0 + [[ initializing == \r\e\a\d\y ]] + echo 'waiting for cluster readyness' waiting for cluster readyness + sleep 20 + [[ 2 -ge 36 ]] + let i+=1 ++ kubectl_bin get pxc some-name -o 'jsonpath={.status.state}' +++ mktemp ++ local LAST_OUT=/tmp/tmp.0EzGyKobbR +++ mktemp ++ local LAST_ERR=/tmp/tmp.5EMmHZINgC ++ local exit_status=0 +++ seq 0 2 ++ for i in '$(seq 0 2)' ++ set +e ++ kubectl get pxc some-name -o 'jsonpath={.status.state}' ++ exit_status=0 ++ set -e ++ '[' 0 '!=' 0 ']' ++ break ++ cat /tmp/tmp.0EzGyKobbR ++ cat /tmp/tmp.5EMmHZINgC ++ rm /tmp/tmp.0EzGyKobbR /tmp/tmp.5EMmHZINgC ++ return 0 + [[ initializing == \r\e\a\d\y ]] + echo 'waiting for cluster readyness' waiting for cluster readyness + sleep 20 + [[ 3 -ge 36 ]] + let i+=1 ++ kubectl_bin get pxc some-name -o 'jsonpath={.status.state}' +++ mktemp ++ local LAST_OUT=/tmp/tmp.hvfIAIQxXo +++ mktemp ++ local LAST_ERR=/tmp/tmp.mVVdnG2V1e ++ local exit_status=0 +++ seq 0 2 ++ for i in '$(seq 0 2)' ++ set +e ++ kubectl get pxc some-name -o 'jsonpath={.status.state}' ++ exit_status=0 ++ set -e ++ '[' 0 '!=' 0 ']' ++ break ++ cat /tmp/tmp.hvfIAIQxXo ++ cat /tmp/tmp.mVVdnG2V1e ++ rm /tmp/tmp.hvfIAIQxXo /tmp/tmp.mVVdnG2V1e ++ return 0 + [[ initializing == \r\e\a\d\y ]] + echo 'waiting for cluster readyness' waiting for cluster readyness + sleep 20 + [[ 4 -ge 36 ]] + let i+=1 ++ kubectl_bin get pxc some-name -o 'jsonpath={.status.state}' +++ mktemp ++ local LAST_OUT=/tmp/tmp.RuXthZw46a +++ mktemp ++ local LAST_ERR=/tmp/tmp.L60jJ6Z1c7 ++ local exit_status=0 +++ seq 0 2 ++ for i in '$(seq 0 2)' ++ set +e ++ kubectl get pxc some-name -o 'jsonpath={.status.state}' ++ exit_status=0 ++ set -e ++ '[' 0 '!=' 0 ']' ++ break ++ cat /tmp/tmp.RuXthZw46a ++ cat /tmp/tmp.L60jJ6Z1c7 ++ rm /tmp/tmp.RuXthZw46a /tmp/tmp.L60jJ6Z1c7 ++ return 0 + [[ initializing == \r\e\a\d\y ]] + echo 'waiting for cluster readyness' waiting for cluster readyness + sleep 20 + [[ 5 -ge 36 ]] + let i+=1 ++ kubectl_bin get pxc some-name -o 'jsonpath={.status.state}' +++ mktemp ++ local LAST_OUT=/tmp/tmp.2ZuydqwcBc +++ mktemp ++ local LAST_ERR=/tmp/tmp.lxK9SxXzkD ++ local exit_status=0 +++ seq 0 2 ++ for i in '$(seq 0 2)' ++ set +e ++ kubectl get pxc some-name -o 'jsonpath={.status.state}' ++ exit_status=0 ++ set -e ++ '[' 0 '!=' 0 ']' ++ break ++ cat /tmp/tmp.2ZuydqwcBc ++ cat /tmp/tmp.lxK9SxXzkD ++ rm /tmp/tmp.2ZuydqwcBc /tmp/tmp.lxK9SxXzkD ++ return 0 + [[ initializing == \r\e\a\d\y ]] + echo 'waiting for cluster readyness' waiting for cluster readyness + sleep 20 + [[ 6 -ge 36 ]] + let i+=1 ++ kubectl_bin get pxc some-name -o 'jsonpath={.status.state}' +++ mktemp ++ local LAST_OUT=/tmp/tmp.Gk2HFoBVL6 +++ mktemp ++ local LAST_ERR=/tmp/tmp.bB0BUhukvX ++ local exit_status=0 +++ seq 0 2 ++ for i in '$(seq 0 2)' ++ set +e ++ kubectl get pxc some-name -o 'jsonpath={.status.state}' ++ exit_status=0 ++ set -e ++ '[' 0 '!=' 0 ']' ++ break ++ cat /tmp/tmp.Gk2HFoBVL6 ++ cat /tmp/tmp.bB0BUhukvX ++ rm /tmp/tmp.Gk2HFoBVL6 /tmp/tmp.bB0BUhukvX ++ return 0 + [[ ready == \r\e\a\d\y ]] ++ kubectl_bin get pxc some-name -o 'jsonpath={.status.pxc.ready}' +++ mktemp ++ local LAST_OUT=/tmp/tmp.LMnq4qw5qd +++ mktemp ++ local LAST_ERR=/tmp/tmp.Z2i6m7LnwN ++ local exit_status=0 +++ seq 0 2 ++ for i in '$(seq 0 2)' ++ set +e ++ kubectl get pxc some-name -o 'jsonpath={.status.pxc.ready}' ++ exit_status=0 ++ set -e ++ '[' 0 '!=' 0 ']' ++ break ++ cat /tmp/tmp.LMnq4qw5qd ++ cat /tmp/tmp.Z2i6m7LnwN ++ rm /tmp/tmp.LMnq4qw5qd /tmp/tmp.Z2i6m7LnwN ++ return 0 + [[ 3 == \3 ]] +++ get_proxy_engine some-name +++ local cluster_name=some-name ++++ get_proxy some-name ++++ local target_cluster=some-name +++++ kubectl_bin get pxc some-name -o 'jsonpath={.spec.haproxy.enabled}' ++++++ mktemp +++++ local LAST_OUT=/tmp/tmp.tjR5smXKbm ++++++ mktemp +++++ local LAST_ERR=/tmp/tmp.fCp0wrbECI +++++ local exit_status=0 ++++++ seq 0 2 +++++ for i in '$(seq 0 2)' +++++ set +e +++++ kubectl get pxc some-name -o 'jsonpath={.spec.haproxy.enabled}' +++++ exit_status=0 +++++ set -e +++++ '[' 0 '!=' 0 ']' +++++ break +++++ cat /tmp/tmp.tjR5smXKbm +++++ cat /tmp/tmp.fCp0wrbECI +++++ rm /tmp/tmp.tjR5smXKbm /tmp/tmp.fCp0wrbECI +++++ return 0 ++++ [[ true == \t\r\u\e ]] ++++ echo some-name-haproxy ++++ return +++ local cluster_proxy=some-name-haproxy +++ echo haproxy ++ kubectl_bin get pxc some-name -o 'jsonpath={.status.haproxy.ready}' +++ mktemp ++ local LAST_OUT=/tmp/tmp.wlib4mqQWS +++ mktemp ++ local LAST_ERR=/tmp/tmp.id370jq4HK ++ local exit_status=0 +++ seq 0 2 ++ for i in '$(seq 0 2)' ++ set +e ++ kubectl get pxc some-name -o 'jsonpath={.status.haproxy.ready}' ++ exit_status=0 ++ set -e ++ '[' 0 '!=' 0 ']' ++ break ++ cat /tmp/tmp.wlib4mqQWS ++ cat /tmp/tmp.id370jq4HK ++ rm /tmp/tmp.wlib4mqQWS /tmp/tmp.id370jq4HK ++ return 0 + [[ 3 == \3 ]] + patch_secret my-cluster-secrets monitor dGVzdC1wYXNzd29yZDI= + local secret=my-cluster-secrets + local key=monitor + local value=dGVzdC1wYXNzd29yZDI= + kubectl_bin patch secret my-cluster-secrets '-p={"data":{"monitor": "dGVzdC1wYXNzd29yZDI="}}' ++ mktemp + local LAST_OUT=/tmp/tmp.RziTuGKrGv ++ mktemp + local LAST_ERR=/tmp/tmp.0TM5md6eOg + local exit_status=0 ++ seq 0 2 + for i in '$(seq 0 2)' + set +e + kubectl patch secret my-cluster-secrets '-p={"data":{"monitor": "dGVzdC1wYXNzd29yZDI="}}' + exit_status=0 + set -e + '[' 0 '!=' 0 ']' + break + cat /tmp/tmp.RziTuGKrGv secret/my-cluster-secrets patched + cat /tmp/tmp.0TM5md6eOg + rm /tmp/tmp.RziTuGKrGv /tmp/tmp.0TM5md6eOg + return 0 + sleep 15 + wait_cluster_consistency some-name 3 3 + local cluster_name=some-name + local cluster_size=3 + local proxy_size=3 + '[' -z 3 ']' + desc 'wait cluster consistency' + set +o xtrace ----------------------------------------------------------------------------------- wait cluster consistency ----------------------------------------------------------------------------------- + local i=0 + local max=36 + sleep 7 ++ kubectl_bin get pxc some-name -o 'jsonpath={.status.state}' +++ mktemp ++ local LAST_OUT=/tmp/tmp.SAni00oxV6 +++ mktemp ++ local LAST_ERR=/tmp/tmp.F6lmLljmhF ++ local exit_status=0 +++ seq 0 2 ++ for i in '$(seq 0 2)' ++ set +e ++ kubectl get pxc some-name -o 'jsonpath={.status.state}' ++ exit_status=0 ++ set -e ++ '[' 0 '!=' 0 ']' ++ break ++ cat /tmp/tmp.SAni00oxV6 ++ cat /tmp/tmp.F6lmLljmhF ++ rm /tmp/tmp.SAni00oxV6 /tmp/tmp.F6lmLljmhF ++ return 0 + [[ initializing == \r\e\a\d\y ]] + echo 'waiting for cluster readyness' waiting for cluster readyness + sleep 20 + [[ 0 -ge 36 ]] + let i+=1 ++ kubectl_bin get pxc some-name -o 'jsonpath={.status.state}' +++ mktemp ++ local LAST_OUT=/tmp/tmp.pOPRoJ9YcX +++ mktemp ++ local LAST_ERR=/tmp/tmp.gTgx6zPQ2z ++ local exit_status=0 +++ seq 0 2 ++ for i in '$(seq 0 2)' ++ set +e ++ kubectl get pxc some-name -o 'jsonpath={.status.state}' ++ exit_status=0 ++ set -e ++ '[' 0 '!=' 0 ']' ++ break ++ cat /tmp/tmp.pOPRoJ9YcX ++ cat /tmp/tmp.gTgx6zPQ2z ++ rm /tmp/tmp.pOPRoJ9YcX /tmp/tmp.gTgx6zPQ2z ++ return 0 + [[ initializing == \r\e\a\d\y ]] + echo 'waiting for cluster readyness' waiting for cluster readyness + sleep 20 + [[ 1 -ge 36 ]] + let i+=1 ++ kubectl_bin get pxc some-name -o 'jsonpath={.status.state}' +++ mktemp ++ local LAST_OUT=/tmp/tmp.uHxNj0SFNs +++ mktemp ++ local LAST_ERR=/tmp/tmp.Y4VIDPDGQR ++ local exit_status=0 +++ seq 0 2 ++ for i in '$(seq 0 2)' ++ set +e ++ kubectl get pxc some-name -o 'jsonpath={.status.state}' ++ exit_status=0 ++ set -e ++ '[' 0 '!=' 0 ']' ++ break ++ cat /tmp/tmp.uHxNj0SFNs ++ cat /tmp/tmp.Y4VIDPDGQR ++ rm /tmp/tmp.uHxNj0SFNs /tmp/tmp.Y4VIDPDGQR ++ return 0 + [[ initializing == \r\e\a\d\y ]] + echo 'waiting for cluster readyness' waiting for cluster readyness + sleep 20 + [[ 2 -ge 36 ]] + let i+=1 ++ kubectl_bin get pxc some-name -o 'jsonpath={.status.state}' +++ mktemp ++ local LAST_OUT=/tmp/tmp.WEELhsBpW7 +++ mktemp ++ local LAST_ERR=/tmp/tmp.g6BAT0GNu9 ++ local exit_status=0 +++ seq 0 2 ++ for i in '$(seq 0 2)' ++ set +e ++ kubectl get pxc some-name -o 'jsonpath={.status.state}' ++ exit_status=0 ++ set -e ++ '[' 0 '!=' 0 ']' ++ break ++ cat /tmp/tmp.WEELhsBpW7 ++ cat /tmp/tmp.g6BAT0GNu9 ++ rm /tmp/tmp.WEELhsBpW7 /tmp/tmp.g6BAT0GNu9 ++ return 0 + [[ ready == \r\e\a\d\y ]] ++ kubectl_bin get pxc some-name -o 'jsonpath={.status.pxc.ready}' +++ mktemp ++ local LAST_OUT=/tmp/tmp.9QadvYWky2 +++ mktemp ++ local LAST_ERR=/tmp/tmp.NUhDcG2beS ++ local exit_status=0 +++ seq 0 2 ++ for i in '$(seq 0 2)' ++ set +e ++ kubectl get pxc some-name -o 'jsonpath={.status.pxc.ready}' ++ exit_status=0 ++ set -e ++ '[' 0 '!=' 0 ']' ++ break ++ cat /tmp/tmp.9QadvYWky2 ++ cat /tmp/tmp.NUhDcG2beS ++ rm /tmp/tmp.9QadvYWky2 /tmp/tmp.NUhDcG2beS ++ return 0 + [[ 3 == \3 ]] +++ get_proxy_engine some-name +++ local cluster_name=some-name ++++ get_proxy some-name ++++ local target_cluster=some-name +++++ kubectl_bin get pxc some-name -o 'jsonpath={.spec.haproxy.enabled}' ++++++ mktemp +++++ local LAST_OUT=/tmp/tmp.jPHfE4TwtE ++++++ mktemp +++++ local LAST_ERR=/tmp/tmp.JGPx6te9H4 +++++ local exit_status=0 ++++++ seq 0 2 +++++ for i in '$(seq 0 2)' +++++ set +e +++++ kubectl get pxc some-name -o 'jsonpath={.spec.haproxy.enabled}' +++++ exit_status=0 +++++ set -e +++++ '[' 0 '!=' 0 ']' +++++ break +++++ cat /tmp/tmp.jPHfE4TwtE +++++ cat /tmp/tmp.JGPx6te9H4 +++++ rm /tmp/tmp.jPHfE4TwtE /tmp/tmp.JGPx6te9H4 +++++ return 0 ++++ [[ true == \t\r\u\e ]] ++++ echo some-name-haproxy ++++ return +++ local cluster_proxy=some-name-haproxy +++ echo haproxy ++ kubectl_bin get pxc some-name -o 'jsonpath={.status.haproxy.ready}' +++ mktemp ++ local LAST_OUT=/tmp/tmp.qALshcIWIn +++ mktemp ++ local LAST_ERR=/tmp/tmp.v41dcaYMrB ++ local exit_status=0 +++ seq 0 2 ++ for i in '$(seq 0 2)' ++ set +e ++ kubectl get pxc some-name -o 'jsonpath={.status.haproxy.ready}' ++ exit_status=0 ++ set -e ++ '[' 0 '!=' 0 ']' ++ break ++ cat /tmp/tmp.qALshcIWIn ++ cat /tmp/tmp.v41dcaYMrB ++ rm /tmp/tmp.qALshcIWIn /tmp/tmp.v41dcaYMrB ++ return 0 + [[ 3 == \3 ]] + compare_mysql_cmd select-3 'SHOW DATABASES;' '-h some-name-haproxy -umonitor -p'\''test-password2'\''' + local command_id=select-3 + local 'command=SHOW DATABASES;' + local 'uri=-h some-name-haproxy -umonitor -p'\''test-password2'\''' + local postfix= + local expected_result=/mnt/jenkins/workspace/cloud-pxc-operator_PR-1736/e2e-tests/users/compare/select-3.sql + [[ perconalab/percona-xtradb-cluster-operator:main-pxc5.7 =~ 8\.0 ]] + run_mysql 'SHOW DATABASES;' '-h some-name-haproxy -umonitor -p'\''test-password2'\''' + local 'command=SHOW DATABASES;' + local 'uri=-h some-name-haproxy -umonitor -p'\''test-password2'\''' ++ get_client_pod ++ kubectl_bin get pods --selector=name=pxc-client -o 'jsonpath={.items[].metadata.name}' +++ mktemp ++ local LAST_OUT=/tmp/tmp.KAqZYK05GR +++ mktemp ++ local LAST_ERR=/tmp/tmp.L3SavVNzyd ++ local exit_status=0 +++ seq 0 2 ++ for i in '$(seq 0 2)' ++ set +e ++ kubectl get pods --selector=name=pxc-client -o 'jsonpath={.items[].metadata.name}' ++ exit_status=0 ++ set -e ++ '[' 0 '!=' 0 ']' ++ break ++ cat /tmp/tmp.KAqZYK05GR ++ cat /tmp/tmp.L3SavVNzyd ++ rm /tmp/tmp.KAqZYK05GR /tmp/tmp.L3SavVNzyd ++ return 0 + client_pod=pxc-client-64b479df95-s59c5 + wait_pod pxc-client-64b479df95-s59c5 + local pod=pxc-client-64b479df95-s59c5 + local max_retry=480 + local ns= ++ echo pxc-client-64b479df95-s59c5 ++ /usr/bin/sed -E 's/.*-(pxc|proxysql)-[0-9]/\1/' ++ egrep '^(pxc|proxysql)$' + local container= + set +o xtrace pod/pxc-client-64b479df95-s59c5 condition met pxc-client-64b479df95-s59c5.Ok + set +o xtrace + '[' '!' -s /tmp/tmp.XJynlYwy6y/select-3.sql ']' + diff -u /mnt/jenkins/workspace/cloud-pxc-operator_PR-1736/e2e-tests/users/compare/select-3.sql /tmp/tmp.XJynlYwy6y/select-3.sql + destroy users-29349 + local namespace=users-29349 + local ignore_logs=true + desc 'destroy cluster/operator and all other resources' + set +o xtrace ----------------------------------------------------------------------------------- destroy cluster/operator and all other resources ----------------------------------------------------------------------------------- + '[' true == false -o 1 == 1 ']' + grep -v 'the object has been modified' + grep -v level=info + grep -v 'get backup status: Job.batch' ++ get_operator_pod ++ local label_prefix=app.kubernetes.io/ + /usr/bin/sed -r 's/"ts":[0-9.]+//; s^limits-[0-9.]+/^^g' + sort -u + tee /tmp/tmp.XJynlYwy6y/operator.log +++ kubectl get pods --selector=app.kubernetes.io/name=percona-xtradb-cluster-operator -n pxc-operator +++ grep -c percona-xtradb-cluster-operator ++ local check_label=1 ++ [[ 1 -eq 0 ]] ++ kubectl_bin get pods --selector=app.kubernetes.io/name=percona-xtradb-cluster-operator -o 'jsonpath={.items[].metadata.name}' -n pxc-operator +++ mktemp ++ local LAST_OUT=/tmp/tmp.H5q4epkIEm +++ mktemp ++ local LAST_ERR=/tmp/tmp.Wk2GygcQKc ++ local exit_status=0 +++ seq 0 2 ++ for i in '$(seq 0 2)' ++ set +e ++ kubectl get pods --selector=app.kubernetes.io/name=percona-xtradb-cluster-operator -o 'jsonpath={.items[].metadata.name}' -n pxc-operator ++ exit_status=0 ++ set -e ++ '[' 0 '!=' 0 ']' ++ break ++ cat /tmp/tmp.H5q4epkIEm ++ cat /tmp/tmp.Wk2GygcQKc ++ rm /tmp/tmp.H5q4epkIEm /tmp/tmp.Wk2GygcQKc ++ return 0 + kubectl_bin logs -n pxc-operator percona-xtradb-cluster-operator-6b9f484f66-rtddm ++ mktemp + local LAST_OUT=/tmp/tmp.q1DThwGzx9 ++ mktemp + local LAST_ERR=/tmp/tmp.usG32ob07l + local exit_status=0 ++ seq 0 2 + for i in '$(seq 0 2)' + set +e + kubectl logs -n pxc-operator percona-xtradb-cluster-operator-6b9f484f66-rtddm + exit_status=0 + set -e + '[' 0 '!=' 0 ']' + break + cat /tmp/tmp.q1DThwGzx9 + cat /tmp/tmp.usG32ob07l + rm /tmp/tmp.q1DThwGzx9 /tmp/tmp.usG32ob07l + return 0 2024-07-01T09:02:11.371Z INFO setup Manager starting up {"gitCommit": "1898f6745d34f9811ebeaf4860ddc610123bcd1a", "gitBranch": "PR-1736-1898f674", "buildTime": "2024-07-01T07:09:06Z", "goVersion": "go1.22.4", "os": "linux", "arch": "amd64"} 2024-07-01T09:02:11.371Z INFO setup Runs on {"platform": "kubernetes", "version": "v1.26.15-gke.1469000"} 2024-07-01T09:02:11.372Z INFO setup Registering Components. 2024-07-01T09:02:16.971Z INFO controller-runtime.webhook Registering webhook {"path": "/validate-percona-xtradbcluster"} 2024-07-01T09:02:16.975Z INFO controller-runtime.metrics Serving metrics server {"bindAddress": ":8080", "secure": false} 2024-07-01T09:02:16.975Z INFO controller-runtime.metrics Starting metrics server 2024-07-01T09:02:16.975Z INFO controller-runtime.webhook Starting webhook server 2024-07-01T09:02:16.975Z INFO setup Starting the Cmd. 2024-07-01T09:02:16.975Z INFO starting server {"name": "health probe", "addr": "[::]:8081"} 2024-07-01T09:02:16.976Z INFO controller-runtime.certwatcher Starting certificate watcher 2024-07-01T09:02:16.976Z INFO controller-runtime.certwatcher Updated current TLS certificate 2024-07-01T09:02:16.976Z INFO controller-runtime.webhook Serving webhook server {"host": "", "port": 9443} 2024-07-01T09:02:17.077Z INFO attempting to acquire leader lease pxc-operator/08db1feb.percona.com... 2024-07-01T09:02:17.096Z DEBUG events percona-xtradb-cluster-operator-6b9f484f66-rtddm_3c9a378f-de4c-4cb4-b83d-a9f1f4f945e8 became leader {"type": "Normal", "object": {"kind":"Lease","namespace":"pxc-operator","name":"08db1feb.percona.com","uid":"6bab93e7-149f-45be-819a-6e5be2251bb9","apiVersion":"coordination.k8s.io/v1","resourceVersion":"61802"}, "reason": "LeaderElection"} 2024-07-01T09:02:17.096Z INFO successfully acquired lease pxc-operator/08db1feb.percona.com 2024-07-01T09:02:17.097Z INFO Starting Controller {"controller": "pxcbackup-controller"} 2024-07-01T09:02:17.097Z INFO Starting Controller {"controller": "pxc-controller"} 2024-07-01T09:02:17.097Z INFO Starting Controller {"controller": "pxcrestore-controller"} 2024-07-01T09:02:17.097Z INFO Starting EventSource {"controller": "pxcbackup-controller", "source": "kind source: *v1.PerconaXtraDBClusterBackup"} 2024-07-01T09:02:17.097Z INFO Starting EventSource {"controller": "pxc-controller", "source": "kind source: *v1.PerconaXtraDBCluster"} 2024-07-01T09:02:17.097Z INFO Starting EventSource {"controller": "pxcrestore-controller", "source": "kind source: *v1.PerconaXtraDBClusterRestore"} 2024-07-01T09:02:17.208Z INFO Starting workers {"controller": "pxcbackup-controller", "worker count": 1} 2024-07-01T09:02:17.208Z INFO Starting workers {"controller": "pxc-controller", "worker count": 1} 2024-07-01T09:02:17.208Z INFO Starting workers {"controller": "pxcrestore-controller", "worker count": 1} 2024-07-01T09:02:47.175Z INFO Set CR version {"controller": "pxc-controller", "namespace": "users-29349", "name": "some-name", "reconcileID": "e06836f8-44c2-45b4-ac4e-e749b28891ce", "version": "1.15.0"} 2024-07-01T09:04:02.579Z INFO Password expiration policy updated {"controller": "pxc-controller", "namespace": "users-29349", "name": "some-name", "reconcileID": "7e5cc100-5c20-4b27-b425-5d45bce9d9a9", "user": "operator"} 2024-07-01T09:04:02.610Z INFO Password expiration policy updated {"controller": "pxc-controller", "namespace": "users-29349", "name": "some-name", "reconcileID": "7e5cc100-5c20-4b27-b425-5d45bce9d9a9", "user": "monitor"} 2024-07-01T09:04:02.655Z INFO User monitor: granted privileges {"controller": "pxc-controller", "namespace": "users-29349", "name": "some-name", "reconcileID": "7e5cc100-5c20-4b27-b425-5d45bce9d9a9"} 2024-07-01T09:04:02.699Z INFO Password expiration policy updated {"controller": "pxc-controller", "namespace": "users-29349", "name": "some-name", "reconcileID": "7e5cc100-5c20-4b27-b425-5d45bce9d9a9", "user": "xtrabackup"} 2024-07-01T09:04:02.735Z INFO User xtrabackup: granted privileges {"controller": "pxc-controller", "namespace": "users-29349", "name": "some-name", "reconcileID": "7e5cc100-5c20-4b27-b425-5d45bce9d9a9"} 2024-07-01T09:04:02.872Z INFO reconcile replication error {"controller": "pxc-controller", "namespace": "users-29349", "name": "some-name", "reconcileID": "7e5cc100-5c20-4b27-b425-5d45bce9d9a9", "err": "get primary pxc pod: not found"} 2024-07-01T09:04:07.663Z INFO reconcile replication error {"controller": "pxc-controller", "namespace": "users-29349", "name": "some-name", "reconcileID": "a5b5abd8-1823-4db7-8d2e-27472224f7ed", "err": "get primary pxc pod: not found"} 2024-07-01T09:04:12.854Z INFO reconcile replication error {"controller": "pxc-controller", "namespace": "users-29349", "name": "some-name", "reconcileID": "50813877-31b6-421f-9a85-0d6effe6ad52", "err": "get primary pxc pod: not found"} 2024-07-01T09:04:18.046Z INFO reconcile replication error {"controller": "pxc-controller", "namespace": "users-29349", "name": "some-name", "reconcileID": "0e78d02b-b18e-4125-8cf3-52fb6196fadd", "err": "get primary pxc pod: not found"} 2024-07-01T09:06:25.713Z INFO Password expiration policy updated {"controller": "pxc-controller", "namespace": "users-29349", "name": "some-name", "reconcileID": "eeeb0221-9b4f-45da-9c29-456595dda4de", "user": "root"} 2024-07-01T09:06:25.756Z INFO Password expiration policy updated {"controller": "pxc-controller", "namespace": "users-29349", "name": "some-name", "reconcileID": "eeeb0221-9b4f-45da-9c29-456595dda4de", "user": "replication"} 2024-07-01T09:06:25.993Z INFO update PXC version (fetched from db) {"controller": "pxc-controller", "namespace": "users-29349", "name": "some-name", "reconcileID": "eeeb0221-9b4f-45da-9c29-456595dda4de", "new version": "5.7.44-48-57"} 2024-07-01T09:06:29.816Z DEBUG PXC users synced with ProxySQL {"controller": "pxc-controller", "namespace": "users-29349", "name": "some-name", "reconcileID": "eeeb0221-9b4f-45da-9c29-456595dda4de"} 2024-07-01T09:06:34.420Z DEBUG PXC users synced with ProxySQL {"controller": "pxc-controller", "namespace": "users-29349", "name": "some-name", "reconcileID": "73f3c4b8-d60c-4646-9949-b335a872e290"} 2024-07-01T09:06:39.788Z DEBUG PXC users synced with ProxySQL {"controller": "pxc-controller", "namespace": "users-29349", "name": "some-name", "reconcileID": "8239f431-0065-4756-a771-c6e04246b677"} 2024-07-01T09:06:45.372Z DEBUG PXC users synced with ProxySQL {"controller": "pxc-controller", "namespace": "users-29349", "name": "some-name", "reconcileID": "1f9fc96c-55d2-482d-bc16-7efb202ab757"} 2024-07-01T09:06:50.707Z DEBUG PXC users synced with ProxySQL {"controller": "pxc-controller", "namespace": "users-29349", "name": "some-name", "reconcileID": "9964d54b-c111-4d66-9f16-fa380a62ff0c"} 2024-07-01T09:06:55.777Z DEBUG PXC users synced with ProxySQL {"controller": "pxc-controller", "namespace": "users-29349", "name": "some-name", "reconcileID": "eef9ceb5-7eba-4de4-b500-e6ea81d057bc"} 2024-07-01T09:07:00.892Z DEBUG PXC users synced with ProxySQL {"controller": "pxc-controller", "namespace": "users-29349", "name": "some-name", "reconcileID": "0ff11f19-c8e2-4660-9952-66f8929abc7b"} 2024-07-01T09:07:06.370Z DEBUG PXC users synced with ProxySQL {"controller": "pxc-controller", "namespace": "users-29349", "name": "some-name", "reconcileID": "de01417d-eada-473e-a821-c6a389894421"} 2024-07-01T09:07:11.589Z DEBUG PXC users synced with ProxySQL {"controller": "pxc-controller", "namespace": "users-29349", "name": "some-name", "reconcileID": "ff497eef-2f53-42e4-9c9d-4854373cda8e"} 2024-07-01T09:07:17.184Z DEBUG PXC users synced with ProxySQL {"controller": "pxc-controller", "namespace": "users-29349", "name": "some-name", "reconcileID": "61a3c07c-49c6-4c14-8ec0-70582177ed9d"} 2024-07-01T09:07:22.575Z DEBUG PXC users synced with ProxySQL {"controller": "pxc-controller", "namespace": "users-29349", "name": "some-name", "reconcileID": "2cf29438-7dd5-4a13-9277-f54c11ce6bca"} 2024-07-01T09:07:27.796Z DEBUG PXC users synced with ProxySQL {"controller": "pxc-controller", "namespace": "users-29349", "name": "some-name", "reconcileID": "1e09fd9f-cf31-4a55-b2a9-0239d220e400"} 2024-07-01T09:07:33.075Z DEBUG PXC users synced with ProxySQL {"controller": "pxc-controller", "namespace": "users-29349", "name": "some-name", "reconcileID": "cfc6992a-098a-47f1-9a9c-7cab6241aa72"} 2024-07-01T09:07:34.405Z INFO Password changed, updating user {"controller": "pxc-controller", "namespace": "users-29349", "name": "some-name", "reconcileID": "10598d18-db9d-4dfd-8e4c-73300d92af11", "user": "root"} 2024-07-01T09:07:34.431Z INFO User password updated {"controller": "pxc-controller", "namespace": "users-29349", "name": "some-name", "reconcileID": "10598d18-db9d-4dfd-8e4c-73300d92af11", "user": "root"} 2024-07-01T09:07:34.439Z INFO MySQL init secret created {"controller": "pxc-controller", "namespace": "users-29349", "name": "some-name", "reconcileID": "10598d18-db9d-4dfd-8e4c-73300d92af11", "secret": "some-name-mysql-init", "user": "root"} 2024-07-01T09:07:40.371Z DEBUG PXC users synced with ProxySQL {"controller": "pxc-controller", "namespace": "users-29349", "name": "some-name", "reconcileID": "10598d18-db9d-4dfd-8e4c-73300d92af11"} 2024-07-01T09:07:40.382Z INFO Internal secrets updated {"controller": "pxc-controller", "namespace": "users-29349", "name": "some-name", "reconcileID": "10598d18-db9d-4dfd-8e4c-73300d92af11", "user": "root"} 2024-07-01T09:07:44.312Z DEBUG PXC users synced with ProxySQL {"controller": "pxc-controller", "namespace": "users-29349", "name": "some-name", "reconcileID": "10598d18-db9d-4dfd-8e4c-73300d92af11"} 2024-07-01T09:07:49.691Z DEBUG PXC users synced with ProxySQL {"controller": "pxc-controller", "namespace": "users-29349", "name": "some-name", "reconcileID": "e3c2124c-de2a-4041-81e5-f4b508dd81f6"} 2024-07-01T09:07:54.974Z DEBUG PXC users synced with ProxySQL {"controller": "pxc-controller", "namespace": "users-29349", "name": "some-name", "reconcileID": "83372cfa-05d5-4f02-aadf-68a5900ecf87"} 2024-07-01T09:08:11.769Z ERROR sync users {"controller": "pxc-controller", "namespace": "users-29349", "name": "some-name", "reconcileID": "29891037-eb7e-4af8-b448-5aefef1e1432", "error": "exec syncusers: command terminated with exit code 1 / / ERROR (line:512) : The cluster (with writer hostgroup:11) has not been configured in ProxySQL\n", "errorVerbose": "exec syncusers: command terminated with exit code 1 / / ERROR (line:512) : The cluster (with writer hostgroup:11) has not been configured in ProxySQL\n\ngithub.com/percona/percona-xtradb-cluster-operator/pkg/controller/pxc.(*ReconcilePerconaXtraDBCluster).syncPXCUsersWithProxySQL\n\t/go/src/github.com/percona/percona-xtradb-cluster-operator/pkg/controller/pxc/users.go:920\ngithub.com/percona/percona-xtradb-cluster-operator/pkg/controller/pxc.(*ReconcilePerconaXtraDBCluster).resyncPXCUsersWithProxySQL.func1\n\t/go/src/github.com/percona/percona-xtradb-cluster-operator/pkg/controller/pxc/controller.go:1246\nruntime.goexit\n\t/usr/local/go/src/runtime/asm_amd64.s:1695"} 2024-07-01T09:08:16.759Z ERROR sync users {"controller": "pxc-controller", "namespace": "users-29349", "name": "some-name", "reconcileID": "19106def-64e3-443b-a736-80e0157db448", "error": "exec syncusers: command terminated with exit code 1 / / ERROR (line:512) : The cluster (with writer hostgroup:11) has not been configured in ProxySQL\n", "errorVerbose": "exec syncusers: command terminated with exit code 1 / / ERROR (line:512) : The cluster (with writer hostgroup:11) has not been configured in ProxySQL\n\ngithub.com/percona/percona-xtradb-cluster-operator/pkg/controller/pxc.(*ReconcilePerconaXtraDBCluster).syncPXCUsersWithProxySQL\n\t/go/src/github.com/percona/percona-xtradb-cluster-operator/pkg/controller/pxc/users.go:920\ngithub.com/percona/percona-xtradb-cluster-operator/pkg/controller/pxc.(*ReconcilePerconaXtraDBCluster).resyncPXCUsersWithProxySQL.func1\n\t/go/src/github.com/percona/percona-xtradb-cluster-operator/pkg/controller/pxc/controller.go:1246\nruntime.goexit\n\t/usr/local/go/src/runtime/asm_amd64.s:1695"} 2024-07-01T09:08:22.022Z ERROR sync users {"controller": "pxc-controller", "namespace": "users-29349", "name": "some-name", "reconcileID": "b8c5f218-dbc4-4536-8dd5-c1233dfb8613", "error": "exec syncusers: command terminated with exit code 1 / / ERROR (line:512) : The cluster (with writer hostgroup:11) has not been configured in ProxySQL\n", "errorVerbose": "exec syncusers: command terminated with exit code 1 / / ERROR (line:512) : The cluster (with writer hostgroup:11) has not been configured in ProxySQL\n\ngithub.com/percona/percona-xtradb-cluster-operator/pkg/controller/pxc.(*ReconcilePerconaXtraDBCluster).syncPXCUsersWithProxySQL\n\t/go/src/github.com/percona/percona-xtradb-cluster-operator/pkg/controller/pxc/users.go:920\ngithub.com/percona/percona-xtradb-cluster-operator/pkg/controller/pxc.(*ReconcilePerconaXtraDBCluster).resyncPXCUsersWithProxySQL.func1\n\t/go/src/github.com/percona/percona-xtradb-cluster-operator/pkg/controller/pxc/controller.go:1246\nruntime.goexit\n\t/usr/local/go/src/runtime/asm_amd64.s:1695"} 2024-07-01T09:08:22.737Z INFO Password changed, updating user {"controller": "pxc-controller", "namespace": "users-29349", "name": "some-name", "reconcileID": "f9c89515-d289-4502-9687-1144ece1a03d", "user": "proxyadmin"} 2024-07-01T09:08:22.737Z INFO Password expiration policy updated {"controller": "pxc-controller", "namespace": "users-29349", "name": "some-name", "reconcileID": "f9c89515-d289-4502-9687-1144ece1a03d", "user": "proxyadmin"} 2024-07-01T09:08:22.799Z INFO Proxy user updated {"controller": "pxc-controller", "namespace": "users-29349", "name": "some-name", "reconcileID": "f9c89515-d289-4502-9687-1144ece1a03d", "user": "proxyadmin"} 2024-07-01T09:08:22.808Z INFO Internal secrets updated {"controller": "pxc-controller", "namespace": "users-29349", "name": "some-name", "reconcileID": "f9c89515-d289-4502-9687-1144ece1a03d", "user": "proxyadmin"} 2024-07-01T09:08:22.808Z INFO Proxy pods will be restarted {"controller": "pxc-controller", "namespace": "users-29349", "name": "some-name", "reconcileID": "f9c89515-d289-4502-9687-1144ece1a03d", "last-applied-secret": "1a55867d0f4fa0d0643653b2eb57b2f690315d8d843a10599c9839c2a779a95f"} 2024-07-01T09:08:23.090Z ERROR sync users {"controller": "pxc-controller", "namespace": "users-29349", "name": "some-name", "reconcileID": "f9c89515-d289-4502-9687-1144ece1a03d", "error": "exec syncusers: command terminated with exit code 1 / / ERROR 1045 (28000): ProxySQL Error: Access denied for user 'proxyadmin'@'127.0.0.1' (using password: YES)\nERROR (line:335) : ProxySQL connection check failed. \n-- Could not connect to ProxySQL at localhost:6032 \n-- Please check the ProxySQL connection parameters and status.\n", "errorVerbose": "exec syncusers: command terminated with exit code 1 / / ERROR 1045 (28000): ProxySQL Error: Access denied for user 'proxyadmin'@'127.0.0.1' (using password: YES)\nERROR (line:335) : ProxySQL connection check failed. \n-- Could not connect to ProxySQL at localhost:6032 \n-- Please check the ProxySQL connection parameters and status.\n\ngithub.com/percona/percona-xtradb-cluster-operator/pkg/controller/pxc.(*ReconcilePerconaXtraDBCluster).syncPXCUsersWithProxySQL\n\t/go/src/github.com/percona/percona-xtradb-cluster-operator/pkg/controller/pxc/users.go:920\ngithub.com/percona/percona-xtradb-cluster-operator/pkg/controller/pxc.(*ReconcilePerconaXtraDBCluster).resyncPXCUsersWithProxySQL.func1\n\t/go/src/github.com/percona/percona-xtradb-cluster-operator/pkg/controller/pxc/controller.go:1246\nruntime.goexit\n\t/usr/local/go/src/runtime/asm_amd64.s:1695"} 2024-07-01T09:08:55.175Z INFO reconcile replication error {"controller": "pxc-controller", "namespace": "users-29349", "name": "some-name", "reconcileID": "f24fe885-2bb4-4e3f-ab3a-d5cdc22e336e", "err": "get primary pxc pod: not found"} 2024-07-01T09:09:07.099Z DEBUG PXC users synced with ProxySQL {"controller": "pxc-controller", "namespace": "users-29349", "name": "some-name", "reconcileID": "e0585db9-a746-4a6f-b23f-9176b1847e7d"} 2024-07-01T09:09:13.198Z DEBUG PXC users synced with ProxySQL {"controller": "pxc-controller", "namespace": "users-29349", "name": "some-name", "reconcileID": "e5a8c649-e319-4c46-8d7d-15fcf922e040"} 2024-07-01T09:09:15.546Z INFO Password changed, updating user {"controller": "pxc-controller", "namespace": "users-29349", "name": "some-name", "reconcileID": "44df77af-5443-4131-9064-e549ffd90883", "user": "xtrabackup"} 2024-07-01T09:09:15.569Z INFO User password updated {"controller": "pxc-controller", "namespace": "users-29349", "name": "some-name", "reconcileID": "44df77af-5443-4131-9064-e549ffd90883", "user": "xtrabackup"} 2024-07-01T09:09:15.587Z INFO MySQL init secret updated {"controller": "pxc-controller", "namespace": "users-29349", "name": "some-name", "reconcileID": "44df77af-5443-4131-9064-e549ffd90883", "secret": "some-name-mysql-init", "user": "xtrabackup"} 2024-07-01T09:09:15.599Z INFO Internal secrets updated {"controller": "pxc-controller", "namespace": "users-29349", "name": "some-name", "reconcileID": "44df77af-5443-4131-9064-e549ffd90883", "user": "xtrabackup"} 2024-07-01T09:09:15.600Z INFO PXC pods will be restarted {"controller": "pxc-controller", "namespace": "users-29349", "name": "some-name", "reconcileID": "44df77af-5443-4131-9064-e549ffd90883", "last-applied-secret": "d11bcc70ca53ea410cd455e13d82e7225917b0861d9b677ed34c02c5f6fbaf1b"} 2024-07-01T09:09:22.009Z DEBUG PXC users synced with ProxySQL {"controller": "pxc-controller", "namespace": "users-29349", "name": "some-name", "reconcileID": "44df77af-5443-4131-9064-e549ffd90883"} 2024-07-01T09:11:12.945Z INFO Unable to find primary pod for replication. No pod with name or ip like this {"controller": "pxc-controller", "namespace": "users-29349", "name": "some-name", "reconcileID": "e0c62def-1965-470c-ad38-5d72f93f9724", "primary name": "some-name-pxc-0.some-name-pxc.users-29349.svc.cluster.local"} 2024-07-01T09:11:23.367Z INFO Unable to find primary pod for replication. No pod with name or ip like this {"controller": "pxc-controller", "namespace": "users-29349", "name": "some-name", "reconcileID": "4387d6d9-755a-4a83-bdcb-fc4e658acc72", "primary name": "some-name-pxc-0.some-name-pxc.users-29349.svc.cluster.local"} 2024-07-01T09:11:47.593Z DEBUG PXC users synced with ProxySQL {"controller": "pxc-controller", "namespace": "users-29349", "name": "some-name", "reconcileID": "250ff716-74be-40c0-8c29-4631016d96d4"} 2024-07-01T09:11:52.588Z DEBUG PXC users synced with ProxySQL {"controller": "pxc-controller", "namespace": "users-29349", "name": "some-name", "reconcileID": "aa218e5d-cf6c-4270-a125-90d3774c51f3"} 2024-07-01T09:11:57.900Z DEBUG PXC users synced with ProxySQL {"controller": "pxc-controller", "namespace": "users-29349", "name": "some-name", "reconcileID": "ec02aa0f-f8cf-49c8-9f35-c46003c81d1f"} 2024-07-01T09:12:04.080Z DEBUG PXC users synced with ProxySQL {"controller": "pxc-controller", "namespace": "users-29349", "name": "some-name", "reconcileID": "83c5a34f-32da-495a-a013-5065f1339a29"} 2024-07-01T09:12:05.995Z INFO Password changed, updating user {"controller": "pxc-controller", "namespace": "users-29349", "name": "some-name", "reconcileID": "00ba8ce2-9699-4846-8fad-324141d117a4", "user": "monitor"} 2024-07-01T09:12:06.019Z INFO User password updated {"controller": "pxc-controller", "namespace": "users-29349", "name": "some-name", "reconcileID": "00ba8ce2-9699-4846-8fad-324141d117a4", "user": "monitor"} 2024-07-01T09:12:06.027Z INFO MySQL init secret updated {"controller": "pxc-controller", "namespace": "users-29349", "name": "some-name", "reconcileID": "00ba8ce2-9699-4846-8fad-324141d117a4", "secret": "some-name-mysql-init", "user": "monitor"} 2024-07-01T09:12:06.058Z INFO Proxy user updated {"controller": "pxc-controller", "namespace": "users-29349", "name": "some-name", "reconcileID": "00ba8ce2-9699-4846-8fad-324141d117a4", "user": "monitor"} 2024-07-01T09:12:06.069Z INFO Internal secrets updated {"controller": "pxc-controller", "namespace": "users-29349", "name": "some-name", "reconcileID": "00ba8ce2-9699-4846-8fad-324141d117a4", "user": "monitor"} 2024-07-01T09:12:06.069Z INFO Proxy pods will be restarted {"controller": "pxc-controller", "namespace": "users-29349", "name": "some-name", "reconcileID": "00ba8ce2-9699-4846-8fad-324141d117a4", "last-applied-secret": "4e3282050745046d5044e0fcb5f2bf140d7320a1256a79c8ca2184f38f4f3098"} 2024-07-01T09:12:08.877Z ERROR sync users {"controller": "pxc-controller", "namespace": "users-29349", "name": "some-name", "reconcileID": "00ba8ce2-9699-4846-8fad-324141d117a4", "error": "exec syncusers: unable to upgrade connection: container not found (\"proxysql\") / / ", "errorVerbose": "exec syncusers: unable to upgrade connection: container not found (\"proxysql\") / / \ngithub.com/percona/percona-xtradb-cluster-operator/pkg/controller/pxc.(*ReconcilePerconaXtraDBCluster).syncPXCUsersWithProxySQL\n\t/go/src/github.com/percona/percona-xtradb-cluster-operator/pkg/controller/pxc/users.go:920\ngithub.com/percona/percona-xtradb-cluster-operator/pkg/controller/pxc.(*ReconcilePerconaXtraDBCluster).resyncPXCUsersWithProxySQL.func1\n\t/go/src/github.com/percona/percona-xtradb-cluster-operator/pkg/controller/pxc/controller.go:1246\nruntime.goexit\n\t/usr/local/go/src/runtime/asm_amd64.s:1695"} 2024-07-01T09:12:55.472Z DEBUG PXC users synced with ProxySQL {"controller": "pxc-controller", "namespace": "users-29349", "name": "some-name", "reconcileID": "fc49cb17-3e37-43c0-a9b5-2a1b01090d43"} 2024-07-01T09:13:00.322Z DEBUG PXC users synced with ProxySQL {"controller": "pxc-controller", "namespace": "users-29349", "name": "some-name", "reconcileID": "2c0b2427-113b-443a-91ce-275cdc96da43"} 2024-07-01T09:13:05.721Z DEBUG PXC users synced with ProxySQL {"controller": "pxc-controller", "namespace": "users-29349", "name": "some-name", "reconcileID": "15f08f09-d125-441a-b0b0-bfa2f3649b97"} 2024-07-01T09:13:10.981Z DEBUG PXC users synced with ProxySQL {"controller": "pxc-controller", "namespace": "users-29349", "name": "some-name", "reconcileID": "d2691c16-eb01-4939-94f5-e7355e1f6085"} 2024-07-01T09:13:16.084Z DEBUG PXC users synced with ProxySQL {"controller": "pxc-controller", "namespace": "users-29349", "name": "some-name", "reconcileID": "ef7929eb-a9ed-4ce6-b627-97a074be4e47"} 2024-07-01T09:13:21.707Z DEBUG PXC users synced with ProxySQL {"controller": "pxc-controller", "namespace": "users-29349", "name": "some-name", "reconcileID": "a26e8503-8db4-4306-90a5-97989f5ee5d3"} 2024-07-01T09:13:26.700Z DEBUG PXC users synced with ProxySQL {"controller": "pxc-controller", "namespace": "users-29349", "name": "some-name", "reconcileID": "b84a9827-a874-4bba-afad-d4e8adb61b63"} 2024-07-01T09:13:32.097Z DEBUG PXC users synced with ProxySQL {"controller": "pxc-controller", "namespace": "users-29349", "name": "some-name", "reconcileID": "864650ef-19d6-48f2-bf80-3dd8a93c7df6"} 2024-07-01T09:13:33.720Z INFO Password changed, updating user {"controller": "pxc-controller", "namespace": "users-29349", "name": "some-name", "reconcileID": "6ae8bbbd-e85b-4ab3-810f-11fe0f02f5fc", "user": "operator"} 2024-07-01T09:13:33.736Z INFO User password updated {"controller": "pxc-controller", "namespace": "users-29349", "name": "some-name", "reconcileID": "6ae8bbbd-e85b-4ab3-810f-11fe0f02f5fc", "user": "operator"} 2024-07-01T09:13:33.744Z INFO MySQL init secret updated {"controller": "pxc-controller", "namespace": "users-29349", "name": "some-name", "reconcileID": "6ae8bbbd-e85b-4ab3-810f-11fe0f02f5fc", "secret": "some-name-mysql-init", "user": "operator"} 2024-07-01T09:13:33.753Z INFO Internal secrets updated {"controller": "pxc-controller", "namespace": "users-29349", "name": "some-name", "reconcileID": "6ae8bbbd-e85b-4ab3-810f-11fe0f02f5fc", "user": "operator"} 2024-07-01T09:13:33.753Z INFO Proxy pods will be restarted {"controller": "pxc-controller", "namespace": "users-29349", "name": "some-name", "reconcileID": "6ae8bbbd-e85b-4ab3-810f-11fe0f02f5fc", "last-applied-secret": "6542c6f8a2375ddfcda319bf91c99ca69519730971c577cd07366e356671ee68"} 2024-07-01T09:13:35.162Z ERROR sync users {"controller": "pxc-controller", "namespace": "users-29349", "name": "some-name", "reconcileID": "6ae8bbbd-e85b-4ab3-810f-11fe0f02f5fc", "error": "exec syncusers: command terminated with exit code 1 / / ERROR 1045 (28000): Access denied for user 'operator'@'some-name-proxysql-0.some-name-proxysql-unready.users-29349.svc.' (using password: YES)\nERROR 1045 (28000): Access denied for user 'operator'@'some-name-proxysql-0.some-name-proxysql-unready.users-29349.svc.' (using password: YES)\nERROR 1045 (28000): Access denied for user 'operator'@'some-name-proxysql-0.some-name-proxysql-unready.users-29349.svc.' (using password: YES)\nERROR (line:569) : Could not find a primary cluster node\n", "errorVerbose": "exec syncusers: command terminated with exit code 1 / / ERROR 1045 (28000): Access denied for user 'operator'@'some-name-proxysql-0.some-name-proxysql-unready.users-29349.svc.' (using password: YES)\nERROR 1045 (28000): Access denied for user 'operator'@'some-name-proxysql-0.some-name-proxysql-unready.users-29349.svc.' (using password: YES)\nERROR 1045 (28000): Access denied for user 'operator'@'some-name-proxysql-0.some-name-proxysql-unready.users-29349.svc.' (using password: YES)\nERROR (line:569) : Could not find a primary cluster node\n\ngithub.com/percona/percona-xtradb-cluster-operator/pkg/controller/pxc.(*ReconcilePerconaXtraDBCluster).syncPXCUsersWithProxySQL\n\t/go/src/github.com/percona/percona-xtradb-cluster-operator/pkg/controller/pxc/users.go:920\ngithub.com/percona/percona-xtradb-cluster-operator/pkg/controller/pxc.(*ReconcilePerconaXtraDBCluster).resyncPXCUsersWithProxySQL.func1\n\t/go/src/github.com/percona/percona-xtradb-cluster-operator/pkg/controller/pxc/controller.go:1246\nruntime.goexit\n\t/usr/local/go/src/runtime/asm_amd64.s:1695"} 2024-07-01T09:14:12.523Z DEBUG PXC users synced with ProxySQL {"controller": "pxc-controller", "namespace": "users-29349", "name": "some-name", "reconcileID": "0e728e19-eab5-4d2d-a6f4-eefb84970321"} 2024-07-01T09:14:20.271Z DEBUG PXC users synced with ProxySQL {"controller": "pxc-controller", "namespace": "users-29349", "name": "some-name", "reconcileID": "041e4cea-3edd-45ce-8a94-fc314d30c1c0"} 2024-07-01T09:14:25.703Z DEBUG PXC users synced with ProxySQL {"controller": "pxc-controller", "namespace": "users-29349", "name": "some-name", "reconcileID": "723c8101-dc32-4129-aa57-4b102c830e9a"} 2024-07-01T09:14:30.683Z INFO Created user secrets {"controller": "pxc-controller", "namespace": "users-29349", "name": "some-name", "reconcileID": "5b14f907-0ea2-4822-90b2-06401d0fbcd4", "secrets": "my-cluster-secrets-2"} 2024-07-01T09:14:30.684Z INFO Password changed, updating user {"controller": "pxc-controller", "namespace": "users-29349", "name": "some-name", "reconcileID": "5b14f907-0ea2-4822-90b2-06401d0fbcd4", "user": "root"} 2024-07-01T09:14:30.709Z INFO User password updated {"controller": "pxc-controller", "namespace": "users-29349", "name": "some-name", "reconcileID": "5b14f907-0ea2-4822-90b2-06401d0fbcd4", "user": "root"} 2024-07-01T09:14:30.721Z INFO MySQL init secret updated {"controller": "pxc-controller", "namespace": "users-29349", "name": "some-name", "reconcileID": "5b14f907-0ea2-4822-90b2-06401d0fbcd4", "secret": "some-name-mysql-init", "user": "root"} 2024-07-01T09:14:32.288Z DEBUG PXC users synced with ProxySQL {"controller": "pxc-controller", "namespace": "users-29349", "name": "some-name", "reconcileID": "c39a371b-7fc3-4509-8bb2-af9ef83ad8ab"} 2024-07-01T09:14:35.191Z DEBUG PXC users synced with ProxySQL {"controller": "pxc-controller", "namespace": "users-29349", "name": "some-name", "reconcileID": "5b14f907-0ea2-4822-90b2-06401d0fbcd4"} 2024-07-01T09:14:35.211Z INFO Internal secrets updated {"controller": "pxc-controller", "namespace": "users-29349", "name": "some-name", "reconcileID": "5b14f907-0ea2-4822-90b2-06401d0fbcd4", "user": "root"} 2024-07-01T09:14:35.211Z INFO Password changed, updating user {"controller": "pxc-controller", "namespace": "users-29349", "name": "some-name", "reconcileID": "5b14f907-0ea2-4822-90b2-06401d0fbcd4", "user": "operator"} 2024-07-01T09:14:35.235Z INFO User password updated {"controller": "pxc-controller", "namespace": "users-29349", "name": "some-name", "reconcileID": "5b14f907-0ea2-4822-90b2-06401d0fbcd4", "user": "operator"} 2024-07-01T09:14:35.256Z INFO MySQL init secret updated {"controller": "pxc-controller", "namespace": "users-29349", "name": "some-name", "reconcileID": "5b14f907-0ea2-4822-90b2-06401d0fbcd4", "secret": "some-name-mysql-init", "user": "operator"} 2024-07-01T09:14:35.284Z INFO Internal secrets updated {"controller": "pxc-controller", "namespace": "users-29349", "name": "some-name", "reconcileID": "5b14f907-0ea2-4822-90b2-06401d0fbcd4", "user": "operator"} 2024-07-01T09:14:35.284Z INFO Password changed, updating user {"controller": "pxc-controller", "namespace": "users-29349", "name": "some-name", "reconcileID": "5b14f907-0ea2-4822-90b2-06401d0fbcd4", "user": "monitor"} 2024-07-01T09:14:35.304Z INFO User password updated {"controller": "pxc-controller", "namespace": "users-29349", "name": "some-name", "reconcileID": "5b14f907-0ea2-4822-90b2-06401d0fbcd4", "user": "monitor"} 2024-07-01T09:14:35.342Z INFO MySQL init secret updated {"controller": "pxc-controller", "namespace": "users-29349", "name": "some-name", "reconcileID": "5b14f907-0ea2-4822-90b2-06401d0fbcd4", "secret": "some-name-mysql-init", "user": "monitor"} 2024-07-01T09:14:35.375Z INFO Proxy user updated {"controller": "pxc-controller", "namespace": "users-29349", "name": "some-name", "reconcileID": "5b14f907-0ea2-4822-90b2-06401d0fbcd4", "user": "monitor"} 2024-07-01T09:14:35.393Z INFO Internal secrets updated {"controller": "pxc-controller", "namespace": "users-29349", "name": "some-name", "reconcileID": "5b14f907-0ea2-4822-90b2-06401d0fbcd4", "user": "monitor"} 2024-07-01T09:14:35.393Z INFO Password changed, updating user {"controller": "pxc-controller", "namespace": "users-29349", "name": "some-name", "reconcileID": "5b14f907-0ea2-4822-90b2-06401d0fbcd4", "user": "xtrabackup"} 2024-07-01T09:14:35.413Z INFO User password updated {"controller": "pxc-controller", "namespace": "users-29349", "name": "some-name", "reconcileID": "5b14f907-0ea2-4822-90b2-06401d0fbcd4", "user": "xtrabackup"} 2024-07-01T09:14:35.422Z INFO MySQL init secret updated {"controller": "pxc-controller", "namespace": "users-29349", "name": "some-name", "reconcileID": "5b14f907-0ea2-4822-90b2-06401d0fbcd4", "secret": "some-name-mysql-init", "user": "xtrabackup"} 2024-07-01T09:14:35.433Z INFO Internal secrets updated {"controller": "pxc-controller", "namespace": "users-29349", "name": "some-name", "reconcileID": "5b14f907-0ea2-4822-90b2-06401d0fbcd4", "user": "xtrabackup"} 2024-07-01T09:14:35.433Z INFO Password changed, updating user {"controller": "pxc-controller", "namespace": "users-29349", "name": "some-name", "reconcileID": "5b14f907-0ea2-4822-90b2-06401d0fbcd4", "user": "replication"} 2024-07-01T09:14:35.450Z INFO User password updated {"controller": "pxc-controller", "namespace": "users-29349", "name": "some-name", "reconcileID": "5b14f907-0ea2-4822-90b2-06401d0fbcd4", "user": "replication"} 2024-07-01T09:14:35.460Z INFO MySQL init secret updated {"controller": "pxc-controller", "namespace": "users-29349", "name": "some-name", "reconcileID": "5b14f907-0ea2-4822-90b2-06401d0fbcd4", "secret": "some-name-mysql-init", "user": "replication"} 2024-07-01T09:14:35.469Z INFO Internal secrets updated {"controller": "pxc-controller", "namespace": "users-29349", "name": "some-name", "reconcileID": "5b14f907-0ea2-4822-90b2-06401d0fbcd4", "user": "replication"} 2024-07-01T09:14:35.469Z INFO Password changed, updating user {"controller": "pxc-controller", "namespace": "users-29349", "name": "some-name", "reconcileID": "5b14f907-0ea2-4822-90b2-06401d0fbcd4", "user": "proxyadmin"} 2024-07-01T09:14:35.500Z INFO Proxy user updated {"controller": "pxc-controller", "namespace": "users-29349", "name": "some-name", "reconcileID": "5b14f907-0ea2-4822-90b2-06401d0fbcd4", "user": "proxyadmin"} 2024-07-01T09:14:35.510Z INFO Internal secrets updated {"controller": "pxc-controller", "namespace": "users-29349", "name": "some-name", "reconcileID": "5b14f907-0ea2-4822-90b2-06401d0fbcd4", "user": "proxyadmin"} 2024-07-01T09:14:35.510Z INFO Proxy pods will be restarted {"controller": "pxc-controller", "namespace": "users-29349", "name": "some-name", "reconcileID": "5b14f907-0ea2-4822-90b2-06401d0fbcd4", "last-applied-secret": "694d9737593c56e92705757098afd55350ceebc5d02b9163ec170d7477fc9722"} 2024-07-01T09:14:35.510Z INFO PXC pods will be restarted {"controller": "pxc-controller", "namespace": "users-29349", "name": "some-name", "reconcileID": "5b14f907-0ea2-4822-90b2-06401d0fbcd4", "last-applied-secret": "694d9737593c56e92705757098afd55350ceebc5d02b9163ec170d7477fc9722"} 2024-07-01T09:14:35.869Z ERROR sync users {"controller": "pxc-controller", "namespace": "users-29349", "name": "some-name", "reconcileID": "5b14f907-0ea2-4822-90b2-06401d0fbcd4", "error": "exec syncusers: command terminated with exit code 1 / / ERROR 1045 (28000): ProxySQL Error: Access denied for user 'proxyadmin'@'127.0.0.1' (using password: YES)\nERROR (line:335) : ProxySQL connection check failed. \n-- Could not connect to ProxySQL at localhost:6032 \n-- Please check the ProxySQL connection parameters and status.\n", "errorVerbose": "exec syncusers: command terminated with exit code 1 / / ERROR 1045 (28000): ProxySQL Error: Access denied for user 'proxyadmin'@'127.0.0.1' (using password: YES)\nERROR (line:335) : ProxySQL connection check failed. \n-- Could not connect to ProxySQL at localhost:6032 \n-- Please check the ProxySQL connection parameters and status.\n\ngithub.com/percona/percona-xtradb-cluster-operator/pkg/controller/pxc.(*ReconcilePerconaXtraDBCluster).syncPXCUsersWithProxySQL\n\t/go/src/github.com/percona/percona-xtradb-cluster-operator/pkg/controller/pxc/users.go:920\ngithub.com/percona/percona-xtradb-cluster-operator/pkg/controller/pxc.(*ReconcilePerconaXtraDBCluster).resyncPXCUsersWithProxySQL.func1\n\t/go/src/github.com/percona/percona-xtradb-cluster-operator/pkg/controller/pxc/controller.go:1246\nruntime.goexit\n\t/usr/local/go/src/runtime/asm_amd64.s:1695"} 2024-07-01T09:16:11.267Z INFO reconcile replication error {"controller": "pxc-controller", "namespace": "users-29349", "name": "some-name", "reconcileID": "77b8b991-d03b-4704-878d-14d29993afa7", "err": "failed to connect to pod some-name-pxc-0: dial tcp: lookup some-name-pxc-0.some-name-pxc.users-29349 on 10.73.128.10:53: no such host"} 2024-07-01T09:16:16.527Z INFO reconcile replication error {"controller": "pxc-controller", "namespace": "users-29349", "name": "some-name", "reconcileID": "8dcea008-7697-46b9-a78e-a2f5bc26a7c5", "err": "failed to connect to pod some-name-pxc-0: dial tcp: lookup some-name-pxc-0.some-name-pxc.users-29349 on 10.73.128.10:53: no such host"} 2024-07-01T09:16:21.810Z INFO reconcile replication error {"controller": "pxc-controller", "namespace": "users-29349", "name": "some-name", "reconcileID": "ad48104e-368f-4863-af00-9aa480fd21b4", "err": "failed to connect to pod some-name-pxc-0: dial tcp: lookup some-name-pxc-0.some-name-pxc.users-29349 on 10.73.128.10:53: no such host"} 2024-07-01T09:16:27.001Z INFO Unable to find primary pod for replication. No pod with name or ip like this {"controller": "pxc-controller", "namespace": "users-29349", "name": "some-name", "reconcileID": "7ea66c60-a799-4fdf-b0ae-c5a4a17a3d93", "primary name": "some-name-pxc-0.some-name-pxc.users-29349.svc.cluster.local"} 2024-07-01T09:16:32.186Z INFO Unable to find primary pod for replication. No pod with name or ip like this {"controller": "pxc-controller", "namespace": "users-29349", "name": "some-name", "reconcileID": "0b317e37-df3f-4d11-849a-bf8a1ddb7b28", "primary name": "some-name-pxc-0.some-name-pxc.users-29349.svc.cluster.local"} 2024-07-01T09:16:37.496Z INFO Unable to find primary pod for replication. No pod with name or ip like this {"controller": "pxc-controller", "namespace": "users-29349", "name": "some-name", "reconcileID": "3a363f5e-2276-41da-8658-c33f97b649f3", "primary name": "some-name-pxc-0.some-name-pxc.users-29349.svc.cluster.local"} 2024-07-01T09:16:42.679Z INFO Unable to find primary pod for replication. No pod with name or ip like this {"controller": "pxc-controller", "namespace": "users-29349", "name": "some-name", "reconcileID": "180e4442-0722-4958-8dfe-42b00b6a1ab2", "primary name": "some-name-pxc-0.some-name-pxc.users-29349.svc.cluster.local"} 2024-07-01T09:16:47.843Z INFO Unable to find primary pod for replication. No pod with name or ip like this {"controller": "pxc-controller", "namespace": "users-29349", "name": "some-name", "reconcileID": "1ddfae71-7c3c-4f1a-ab28-1860410d2efe", "primary name": "some-name-pxc-0.some-name-pxc.users-29349.svc.cluster.local"} 2024-07-01T09:16:53.008Z INFO Unable to find primary pod for replication. No pod with name or ip like this {"controller": "pxc-controller", "namespace": "users-29349", "name": "some-name", "reconcileID": "14d75263-b5c3-4c1e-80a4-af17f91a0364", "primary name": "some-name-pxc-0.some-name-pxc.users-29349.svc.cluster.local"} 2024-07-01T09:17:02.112Z DEBUG PXC users synced with ProxySQL {"controller": "pxc-controller", "namespace": "users-29349", "name": "some-name", "reconcileID": "9719f122-955b-4279-9aaa-e4721cd0e9a2"} 2024-07-01T09:17:06.684Z DEBUG PXC users synced with ProxySQL {"controller": "pxc-controller", "namespace": "users-29349", "name": "some-name", "reconcileID": "559d7bd6-81ac-4c59-87b4-09601f2dff8e"} 2024-07-01T09:17:11.888Z DEBUG PXC users synced with ProxySQL {"controller": "pxc-controller", "namespace": "users-29349", "name": "some-name", "reconcileID": "52e6a241-04d4-4518-83d5-d2b48de6de84"} 2024-07-01T09:17:17.222Z DEBUG PXC users synced with ProxySQL {"controller": "pxc-controller", "namespace": "users-29349", "name": "some-name", "reconcileID": "f1265548-57d6-4b6b-bf7a-d712afb7a53c"} 2024-07-01T09:17:19.268Z INFO Password changed, updating user {"controller": "pxc-controller", "namespace": "users-29349", "name": "some-name", "reconcileID": "d6d902f5-2303-4813-824e-da464e35b02c", "user": "operator"} 2024-07-01T09:17:19.290Z INFO User password updated {"controller": "pxc-controller", "namespace": "users-29349", "name": "some-name", "reconcileID": "d6d902f5-2303-4813-824e-da464e35b02c", "user": "operator"} 2024-07-01T09:17:19.298Z INFO MySQL init secret updated {"controller": "pxc-controller", "namespace": "users-29349", "name": "some-name", "reconcileID": "d6d902f5-2303-4813-824e-da464e35b02c", "secret": "some-name-mysql-init", "user": "operator"} 2024-07-01T09:17:19.308Z INFO Internal secrets updated {"controller": "pxc-controller", "namespace": "users-29349", "name": "some-name", "reconcileID": "d6d902f5-2303-4813-824e-da464e35b02c", "user": "operator"} 2024-07-01T09:17:19.308Z INFO Proxy pods will be restarted {"controller": "pxc-controller", "namespace": "users-29349", "name": "some-name", "reconcileID": "d6d902f5-2303-4813-824e-da464e35b02c", "last-applied-secret": "f92ad8899c057cad8c41e4e2a3a1f0a7061f37e0f1cdd7866c6203be98b63fd8"} 2024-07-01T09:17:20.765Z ERROR sync users {"controller": "pxc-controller", "namespace": "users-29349", "name": "some-name", "reconcileID": "d6d902f5-2303-4813-824e-da464e35b02c", "error": "exec syncusers: command terminated with exit code 1 / / ERROR 1045 (28000): Access denied for user 'operator'@'some-name-proxysql-0.some-name-proxysql-unready.users-29349.svc.' (using password: YES)\nERROR 1045 (28000): Access denied for user 'operator'@'some-name-proxysql-0.some-name-proxysql-unready.users-29349.svc.' (using password: YES)\nERROR 1045 (28000): Access denied for user 'operator'@'some-name-proxysql-0.some-name-proxysql-unready.users-29349.svc.' (using password: YES)\nERROR (line:569) : Could not find a primary cluster node\n", "errorVerbose": "exec syncusers: command terminated with exit code 1 / / ERROR 1045 (28000): Access denied for user 'operator'@'some-name-proxysql-0.some-name-proxysql-unready.users-29349.svc.' (using password: YES)\nERROR 1045 (28000): Access denied for user 'operator'@'some-name-proxysql-0.some-name-proxysql-unready.users-29349.svc.' (using password: YES)\nERROR 1045 (28000): Access denied for user 'operator'@'some-name-proxysql-0.some-name-proxysql-unready.users-29349.svc.' (using password: YES)\nERROR (line:569) : Could not find a primary cluster node\n\ngithub.com/percona/percona-xtradb-cluster-operator/pkg/controller/pxc.(*ReconcilePerconaXtraDBCluster).syncPXCUsersWithProxySQL\n\t/go/src/github.com/percona/percona-xtradb-cluster-operator/pkg/controller/pxc/users.go:920\ngithub.com/percona/percona-xtradb-cluster-operator/pkg/controller/pxc.(*ReconcilePerconaXtraDBCluster).resyncPXCUsersWithProxySQL.func1\n\t/go/src/github.com/percona/percona-xtradb-cluster-operator/pkg/controller/pxc/controller.go:1246\nruntime.goexit\n\t/usr/local/go/src/runtime/asm_amd64.s:1695"} 2024-07-01T09:17:59.023Z DEBUG PXC users synced with ProxySQL {"controller": "pxc-controller", "namespace": "users-29349", "name": "some-name", "reconcileID": "49636248-4638-45ad-90a5-c93600ef1e19"} 2024-07-01T09:18:06.809Z DEBUG PXC users synced with ProxySQL {"controller": "pxc-controller", "namespace": "users-29349", "name": "some-name", "reconcileID": "0e2ab068-cc8f-468a-9c7e-a18191efaa06"} 2024-07-01T09:18:12.471Z DEBUG PXC users synced with ProxySQL {"controller": "pxc-controller", "namespace": "users-29349", "name": "some-name", "reconcileID": "b5efc7d7-84f3-4fac-97d1-3512b311251e"} 2024-07-01T09:18:17.609Z DEBUG PXC users synced with ProxySQL {"controller": "pxc-controller", "namespace": "users-29349", "name": "some-name", "reconcileID": "9b383427-aad0-4c43-b287-e20990667b26"} 2024-07-01T09:18:22.988Z DEBUG PXC users synced with ProxySQL {"controller": "pxc-controller", "namespace": "users-29349", "name": "some-name", "reconcileID": "631ec0cb-8fab-4779-9630-36a22956d999"} 2024-07-01T09:18:27.926Z DEBUG PXC users synced with ProxySQL {"controller": "pxc-controller", "namespace": "users-29349", "name": "some-name", "reconcileID": "e55dac1c-a125-4ed3-8544-96415d750f4e"} 2024-07-01T09:18:34.085Z DEBUG PXC users synced with ProxySQL {"controller": "pxc-controller", "namespace": "users-29349", "name": "some-name", "reconcileID": "371dcd26-9ad2-40d3-893e-27ea779af1c9"} 2024-07-01T09:18:40.915Z DEBUG PXC users synced with ProxySQL {"controller": "pxc-controller", "namespace": "users-29349", "name": "some-name", "reconcileID": "30a208c8-1730-4dd3-aabc-399145488a90"} 2024-07-01T09:18:44.693Z DEBUG PXC users synced with ProxySQL {"controller": "pxc-controller", "namespace": "users-29349", "name": "some-name", "reconcileID": "cd0649bc-049a-435e-87e6-dfa4890d05f4"} 2024-07-01T09:18:49.997Z DEBUG PXC users synced with ProxySQL {"controller": "pxc-controller", "namespace": "users-29349", "name": "some-name", "reconcileID": "d2dcc8a5-d933-406f-8e1b-c8b5acbf2a73"} 2024-07-01T09:18:55.292Z DEBUG PXC users synced with ProxySQL {"controller": "pxc-controller", "namespace": "users-29349", "name": "some-name", "reconcileID": "084e253a-e090-4e66-a4a4-462bcf662899"} 2024-07-01T09:19:00.573Z DEBUG PXC users synced with ProxySQL {"controller": "pxc-controller", "namespace": "users-29349", "name": "some-name", "reconcileID": "44aff4b3-a3c5-4791-9352-44cbe4e1f607"} 2024-07-01T09:19:05.876Z DEBUG PXC users synced with ProxySQL {"controller": "pxc-controller", "namespace": "users-29349", "name": "some-name", "reconcileID": "86b30142-57d2-41a4-a17b-50f5477e4918"} 2024-07-01T09:19:11.098Z DEBUG PXC users synced with ProxySQL {"controller": "pxc-controller", "namespace": "users-29349", "name": "some-name", "reconcileID": "3b1e46ae-ef9e-47c9-bbd0-c5e3275e2e02"} 2024-07-01T09:19:17.006Z DEBUG PXC users synced with ProxySQL {"controller": "pxc-controller", "namespace": "users-29349", "name": "some-name", "reconcileID": "74e4bd53-d517-4836-ae3b-a79790b2df89"} 2024-07-01T09:19:21.627Z DEBUG PXC users synced with ProxySQL {"controller": "pxc-controller", "namespace": "users-29349", "name": "some-name", "reconcileID": "60ba2db1-7dd5-4d1d-89bc-f93e076a4b0c"} 2024-07-01T09:19:23.434Z INFO Password changed, updating user {"controller": "pxc-controller", "namespace": "users-29349", "name": "some-name", "reconcileID": "d98a6ea7-a1cd-439e-ad32-7650f596bdd1", "user": "root"} 2024-07-01T09:19:23.462Z INFO User password updated {"controller": "pxc-controller", "namespace": "users-29349", "name": "some-name", "reconcileID": "d98a6ea7-a1cd-439e-ad32-7650f596bdd1", "user": "root"} 2024-07-01T09:19:23.470Z INFO MySQL init secret updated {"controller": "pxc-controller", "namespace": "users-29349", "name": "some-name", "reconcileID": "d98a6ea7-a1cd-439e-ad32-7650f596bdd1", "secret": "some-name-mysql-init", "user": "root"} 2024-07-01T09:19:28.783Z DEBUG PXC users synced with ProxySQL {"controller": "pxc-controller", "namespace": "users-29349", "name": "some-name", "reconcileID": "d98a6ea7-a1cd-439e-ad32-7650f596bdd1"} 2024-07-01T09:19:28.794Z INFO Internal secrets updated {"controller": "pxc-controller", "namespace": "users-29349", "name": "some-name", "reconcileID": "d98a6ea7-a1cd-439e-ad32-7650f596bdd1", "user": "root"} 2024-07-01T09:19:28.794Z INFO Password changed, updating user {"controller": "pxc-controller", "namespace": "users-29349", "name": "some-name", "reconcileID": "d98a6ea7-a1cd-439e-ad32-7650f596bdd1", "user": "monitor"} 2024-07-01T09:19:28.813Z INFO User password updated {"controller": "pxc-controller", "namespace": "users-29349", "name": "some-name", "reconcileID": "d98a6ea7-a1cd-439e-ad32-7650f596bdd1", "user": "monitor"} 2024-07-01T09:19:28.820Z INFO MySQL init secret updated {"controller": "pxc-controller", "namespace": "users-29349", "name": "some-name", "reconcileID": "d98a6ea7-a1cd-439e-ad32-7650f596bdd1", "secret": "some-name-mysql-init", "user": "monitor"} 2024-07-01T09:19:28.851Z INFO Proxy user updated {"controller": "pxc-controller", "namespace": "users-29349", "name": "some-name", "reconcileID": "d98a6ea7-a1cd-439e-ad32-7650f596bdd1", "user": "monitor"} 2024-07-01T09:19:28.862Z INFO Internal secrets updated {"controller": "pxc-controller", "namespace": "users-29349", "name": "some-name", "reconcileID": "d98a6ea7-a1cd-439e-ad32-7650f596bdd1", "user": "monitor"} 2024-07-01T09:19:28.862Z INFO Password changed, updating user {"controller": "pxc-controller", "namespace": "users-29349", "name": "some-name", "reconcileID": "d98a6ea7-a1cd-439e-ad32-7650f596bdd1", "user": "xtrabackup"} 2024-07-01T09:19:28.880Z INFO User password updated {"controller": "pxc-controller", "namespace": "users-29349", "name": "some-name", "reconcileID": "d98a6ea7-a1cd-439e-ad32-7650f596bdd1", "user": "xtrabackup"} 2024-07-01T09:19:28.888Z INFO MySQL init secret updated {"controller": "pxc-controller", "namespace": "users-29349", "name": "some-name", "reconcileID": "d98a6ea7-a1cd-439e-ad32-7650f596bdd1", "secret": "some-name-mysql-init", "user": "xtrabackup"} 2024-07-01T09:19:28.898Z INFO Internal secrets updated {"controller": "pxc-controller", "namespace": "users-29349", "name": "some-name", "reconcileID": "d98a6ea7-a1cd-439e-ad32-7650f596bdd1", "user": "xtrabackup"} 2024-07-01T09:19:28.899Z INFO Password changed, updating user {"controller": "pxc-controller", "namespace": "users-29349", "name": "some-name", "reconcileID": "d98a6ea7-a1cd-439e-ad32-7650f596bdd1", "user": "proxyadmin"} 2024-07-01T09:19:28.930Z INFO Proxy user updated {"controller": "pxc-controller", "namespace": "users-29349", "name": "some-name", "reconcileID": "d98a6ea7-a1cd-439e-ad32-7650f596bdd1", "user": "proxyadmin"} 2024-07-01T09:19:28.938Z INFO Internal secrets updated {"controller": "pxc-controller", "namespace": "users-29349", "name": "some-name", "reconcileID": "d98a6ea7-a1cd-439e-ad32-7650f596bdd1", "user": "proxyadmin"} 2024-07-01T09:19:28.938Z INFO Proxy pods will be restarted {"controller": "pxc-controller", "namespace": "users-29349", "name": "some-name", "reconcileID": "d98a6ea7-a1cd-439e-ad32-7650f596bdd1", "last-applied-secret": "22d5e87ef54ae4783f5ec6bb281b85ee5203eddd890cdc978af7cfaf0f512151"} 2024-07-01T09:19:28.938Z INFO PXC pods will be restarted {"controller": "pxc-controller", "namespace": "users-29349", "name": "some-name", "reconcileID": "d98a6ea7-a1cd-439e-ad32-7650f596bdd1", "last-applied-secret": "22d5e87ef54ae4783f5ec6bb281b85ee5203eddd890cdc978af7cfaf0f512151"} 2024-07-01T09:19:29.177Z ERROR sync users {"controller": "pxc-controller", "namespace": "users-29349", "name": "some-name", "reconcileID": "d98a6ea7-a1cd-439e-ad32-7650f596bdd1", "error": "exec syncusers: command terminated with exit code 1 / / ERROR 1045 (28000): ProxySQL Error: Access denied for user 'proxyadmin'@'127.0.0.1' (using password: YES)\nERROR (line:335) : ProxySQL connection check failed. \n-- Could not connect to ProxySQL at localhost:6032 \n-- Please check the ProxySQL connection parameters and status.\n", "errorVerbose": "exec syncusers: command terminated with exit code 1 / / ERROR 1045 (28000): ProxySQL Error: Access denied for user 'proxyadmin'@'127.0.0.1' (using password: YES)\nERROR (line:335) : ProxySQL connection check failed. \n-- Could not connect to ProxySQL at localhost:6032 \n-- Please check the ProxySQL connection parameters and status.\n\ngithub.com/percona/percona-xtradb-cluster-operator/pkg/controller/pxc.(*ReconcilePerconaXtraDBCluster).syncPXCUsersWithProxySQL\n\t/go/src/github.com/percona/percona-xtradb-cluster-operator/pkg/controller/pxc/users.go:920\ngithub.com/percona/percona-xtradb-cluster-operator/pkg/controller/pxc.(*ReconcilePerconaXtraDBCluster).resyncPXCUsersWithProxySQL.func1\n\t/go/src/github.com/percona/percona-xtradb-cluster-operator/pkg/controller/pxc/controller.go:1246\nruntime.goexit\n\t/usr/local/go/src/runtime/asm_amd64.s:1695"} 2024-07-01T09:21:04.045Z INFO reconcile replication error {"controller": "pxc-controller", "namespace": "users-29349", "name": "some-name", "reconcileID": "4821afac-c761-461e-8d9d-5ea272f59562", "err": "failed to connect to pod some-name-pxc-0: dial tcp: lookup some-name-pxc-0.some-name-pxc.users-29349 on 10.73.128.10:53: no such host"} 2024-07-01T09:21:09.338Z INFO reconcile replication error {"controller": "pxc-controller", "namespace": "users-29349", "name": "some-name", "reconcileID": "b6aa4d86-2522-44a0-834a-09ff9dff81cb", "err": "failed to connect to pod some-name-pxc-0: dial tcp: lookup some-name-pxc-0.some-name-pxc.users-29349 on 10.73.128.10:53: no such host"} 2024-07-01T09:21:14.899Z INFO reconcile replication error {"controller": "pxc-controller", "namespace": "users-29349", "name": "some-name", "reconcileID": "7613ea7d-ce4a-422d-a7fa-ff707d61f143", "err": "failed to connect to pod some-name-pxc-0: dial tcp: lookup some-name-pxc-0.some-name-pxc.users-29349 on 10.73.128.10:53: no such host"} 2024-07-01T09:21:20.636Z INFO Unable to find primary pod for replication. No pod with name or ip like this {"controller": "pxc-controller", "namespace": "users-29349", "name": "some-name", "reconcileID": "95303310-38b1-46ac-b65b-ec6a57573772", "primary name": "some-name-pxc-0.some-name-pxc.users-29349.svc.cluster.local"} 2024-07-01T09:21:25.840Z INFO Unable to find primary pod for replication. No pod with name or ip like this {"controller": "pxc-controller", "namespace": "users-29349", "name": "some-name", "reconcileID": "99397868-a0e0-4aeb-8b61-c470c958264e", "primary name": "some-name-pxc-0.some-name-pxc.users-29349.svc.cluster.local"} 2024-07-01T09:21:30.991Z INFO Unable to find primary pod for replication. No pod with name or ip like this {"controller": "pxc-controller", "namespace": "users-29349", "name": "some-name", "reconcileID": "94ab87f2-3838-42e5-a242-9915cde30e72", "primary name": "some-name-pxc-0.some-name-pxc.users-29349.svc.cluster.local"} 2024-07-01T09:21:36.148Z INFO Unable to find primary pod for replication. No pod with name or ip like this {"controller": "pxc-controller", "namespace": "users-29349", "name": "some-name", "reconcileID": "fd2e3fdc-3377-4730-a541-11f3688af8d4", "primary name": "some-name-pxc-0.some-name-pxc.users-29349.svc.cluster.local"} 2024-07-01T09:21:41.395Z INFO Unable to find primary pod for replication. No pod with name or ip like this {"controller": "pxc-controller", "namespace": "users-29349", "name": "some-name", "reconcileID": "cb6a5abe-be55-4c0e-bc58-f278a43a42a0", "primary name": "some-name-pxc-0.some-name-pxc.users-29349.svc.cluster.local"} 2024-07-01T09:21:46.731Z INFO Unable to find primary pod for replication. No pod with name or ip like this {"controller": "pxc-controller", "namespace": "users-29349", "name": "some-name", "reconcileID": "1db82128-2d70-4286-8341-15525bff1c48", "primary name": "some-name-pxc-0.some-name-pxc.users-29349.svc.cluster.local"} 2024-07-01T09:21:55.511Z DEBUG PXC users synced with ProxySQL {"controller": "pxc-controller", "namespace": "users-29349", "name": "some-name", "reconcileID": "fc9440c2-b662-4d8d-9ccf-156d80989f61"} 2024-07-01T09:21:56.924Z INFO Password changed, updating user {"controller": "pxc-controller", "namespace": "users-29349", "name": "some-name", "reconcileID": "edeaf6fd-ac34-43ef-a14e-2fc7b1f3d49c", "user": "root"} 2024-07-01T09:21:56.959Z INFO User password updated {"controller": "pxc-controller", "namespace": "users-29349", "name": "some-name", "reconcileID": "edeaf6fd-ac34-43ef-a14e-2fc7b1f3d49c", "user": "root"} 2024-07-01T09:21:56.973Z INFO MySQL init secret updated {"controller": "pxc-controller", "namespace": "users-29349", "name": "some-name", "reconcileID": "edeaf6fd-ac34-43ef-a14e-2fc7b1f3d49c", "secret": "some-name-mysql-init", "user": "root"} 2024-07-01T09:21:56.988Z INFO Internal secrets updated {"controller": "pxc-controller", "namespace": "users-29349", "name": "some-name", "reconcileID": "edeaf6fd-ac34-43ef-a14e-2fc7b1f3d49c", "user": "root"} 2024-07-01T09:21:56.988Z INFO Password changed, updating user {"controller": "pxc-controller", "namespace": "users-29349", "name": "some-name", "reconcileID": "edeaf6fd-ac34-43ef-a14e-2fc7b1f3d49c", "user": "operator"} 2024-07-01T09:21:57.004Z INFO User password updated {"controller": "pxc-controller", "namespace": "users-29349", "name": "some-name", "reconcileID": "edeaf6fd-ac34-43ef-a14e-2fc7b1f3d49c", "user": "operator"} 2024-07-01T09:21:57.016Z INFO MySQL init secret updated {"controller": "pxc-controller", "namespace": "users-29349", "name": "some-name", "reconcileID": "edeaf6fd-ac34-43ef-a14e-2fc7b1f3d49c", "secret": "some-name-mysql-init", "user": "operator"} 2024-07-01T09:21:57.033Z INFO Internal secrets updated {"controller": "pxc-controller", "namespace": "users-29349", "name": "some-name", "reconcileID": "edeaf6fd-ac34-43ef-a14e-2fc7b1f3d49c", "user": "operator"} 2024-07-01T09:21:57.034Z INFO Password changed, updating user {"controller": "pxc-controller", "namespace": "users-29349", "name": "some-name", "reconcileID": "edeaf6fd-ac34-43ef-a14e-2fc7b1f3d49c", "user": "monitor"} 2024-07-01T09:21:57.050Z INFO User password updated {"controller": "pxc-controller", "namespace": "users-29349", "name": "some-name", "reconcileID": "edeaf6fd-ac34-43ef-a14e-2fc7b1f3d49c", "user": "monitor"} 2024-07-01T09:21:57.058Z INFO MySQL init secret updated {"controller": "pxc-controller", "namespace": "users-29349", "name": "some-name", "reconcileID": "edeaf6fd-ac34-43ef-a14e-2fc7b1f3d49c", "secret": "some-name-mysql-init", "user": "monitor"} 2024-07-01T09:21:57.067Z INFO Internal secrets updated {"controller": "pxc-controller", "namespace": "users-29349", "name": "some-name", "reconcileID": "edeaf6fd-ac34-43ef-a14e-2fc7b1f3d49c", "user": "monitor"} 2024-07-01T09:21:57.068Z INFO Password changed, updating user {"controller": "pxc-controller", "namespace": "users-29349", "name": "some-name", "reconcileID": "edeaf6fd-ac34-43ef-a14e-2fc7b1f3d49c", "user": "xtrabackup"} 2024-07-01T09:21:57.083Z INFO User password updated {"controller": "pxc-controller", "namespace": "users-29349", "name": "some-name", "reconcileID": "edeaf6fd-ac34-43ef-a14e-2fc7b1f3d49c", "user": "xtrabackup"} 2024-07-01T09:21:57.095Z INFO MySQL init secret updated {"controller": "pxc-controller", "namespace": "users-29349", "name": "some-name", "reconcileID": "edeaf6fd-ac34-43ef-a14e-2fc7b1f3d49c", "secret": "some-name-mysql-init", "user": "xtrabackup"} 2024-07-01T09:21:57.103Z INFO Internal secrets updated {"controller": "pxc-controller", "namespace": "users-29349", "name": "some-name", "reconcileID": "edeaf6fd-ac34-43ef-a14e-2fc7b1f3d49c", "user": "xtrabackup"} 2024-07-01T09:21:57.103Z INFO Password changed, updating user {"controller": "pxc-controller", "namespace": "users-29349", "name": "some-name", "reconcileID": "edeaf6fd-ac34-43ef-a14e-2fc7b1f3d49c", "user": "replication"} 2024-07-01T09:21:57.118Z INFO User password updated {"controller": "pxc-controller", "namespace": "users-29349", "name": "some-name", "reconcileID": "edeaf6fd-ac34-43ef-a14e-2fc7b1f3d49c", "user": "replication"} 2024-07-01T09:21:57.127Z INFO MySQL init secret updated {"controller": "pxc-controller", "namespace": "users-29349", "name": "some-name", "reconcileID": "edeaf6fd-ac34-43ef-a14e-2fc7b1f3d49c", "secret": "some-name-mysql-init", "user": "replication"} 2024-07-01T09:21:57.144Z INFO Internal secrets updated {"controller": "pxc-controller", "namespace": "users-29349", "name": "some-name", "reconcileID": "edeaf6fd-ac34-43ef-a14e-2fc7b1f3d49c", "user": "replication"} 2024-07-01T09:21:57.144Z INFO Proxy pods will be restarted {"controller": "pxc-controller", "namespace": "users-29349", "name": "some-name", "reconcileID": "edeaf6fd-ac34-43ef-a14e-2fc7b1f3d49c", "last-applied-secret": "6542c6f8a2375ddfcda319bf91c99ca69519730971c577cd07366e356671ee68"} 2024-07-01T09:21:57.144Z INFO PXC pods will be restarted {"controller": "pxc-controller", "namespace": "users-29349", "name": "some-name", "reconcileID": "edeaf6fd-ac34-43ef-a14e-2fc7b1f3d49c", "last-applied-secret": "6542c6f8a2375ddfcda319bf91c99ca69519730971c577cd07366e356671ee68"} 2024-07-01T09:21:57.951Z INFO KubeAPIWarningLogger .metadata.ownerReferences contains duplicate entries; API server dedups owner references in 1.20+, and may reject such requests as early as 1.24; please fix your requests; duplicate UID(s) observed: 49a9daf2-e756-4250-9aaf-2d1b7e769f76 2024-07-01T09:22:01.154Z INFO reconcile replication error {"controller": "pxc-controller", "namespace": "users-29349", "name": "some-name", "reconcileID": "283d7b60-f121-4c60-82a2-8a2fbbb77936", "err": "get primary pxc pod: failed to get proxy connection: dial tcp 10.73.139.175:3306: connect: connection refused"} 2024-07-01T09:22:04.389Z INFO reconcile replication error {"controller": "pxc-controller", "namespace": "users-29349", "name": "some-name", "reconcileID": "e18a5086-ce89-4229-85d2-a810700957c2", "err": "get primary pxc pod: failed to get proxy connection: dial tcp 10.73.139.175:3306: connect: connection refused"} 2024-07-01T09:24:53.512Z INFO Password changed, updating user {"controller": "pxc-controller", "namespace": "users-29349", "name": "some-name", "reconcileID": "e451427f-82d2-4ee8-a747-62ec86b75d72", "user": "monitor"} 2024-07-01T09:24:53.532Z INFO User password updated {"controller": "pxc-controller", "namespace": "users-29349", "name": "some-name", "reconcileID": "e451427f-82d2-4ee8-a747-62ec86b75d72", "user": "monitor"} 2024-07-01T09:24:53.541Z INFO MySQL init secret updated {"controller": "pxc-controller", "namespace": "users-29349", "name": "some-name", "reconcileID": "e451427f-82d2-4ee8-a747-62ec86b75d72", "secret": "some-name-mysql-init", "user": "monitor"} 2024-07-01T09:24:53.549Z INFO Internal secrets updated {"controller": "pxc-controller", "namespace": "users-29349", "name": "some-name", "reconcileID": "e451427f-82d2-4ee8-a747-62ec86b75d72", "user": "monitor"} 2024-07-01T09:24:53.549Z INFO Proxy pods will be restarted {"controller": "pxc-controller", "namespace": "users-29349", "name": "some-name", "reconcileID": "e451427f-82d2-4ee8-a747-62ec86b75d72", "last-applied-secret": "81935ecf30ae523a6ee69582354f9488d47dbeb0f8c7c572af96870f2024284d"} github.com/percona/percona-xtradb-cluster-operator/pkg/controller/pxc.(*ReconcilePerconaXtraDBCluster).resyncPXCUsersWithProxySQL.func1 /go/pkg/mod/sigs.k8s.io/controller-runtime@v0.18.4/pkg/internal/controller/controller.go:222 /go/pkg/mod/sigs.k8s.io/controller-runtime@v0.18.4/pkg/internal/controller/controller.go:261 /go/pkg/mod/sigs.k8s.io/controller-runtime@v0.18.4/pkg/internal/controller/controller.go:324 /go/src/github.com/percona/percona-xtradb-cluster-operator/pkg/controller/pxc/controller.go:1248 [mysql] 2024/07/01 09:24:06 connection.go:49: read tcp 10.182.145.58:48012->10.73.139.175:3306: i/o timeout sigs.k8s.io/controller-runtime/pkg/internal/controller.(*Controller).processNextWorkItem sigs.k8s.io/controller-runtime/pkg/internal/controller.(*Controller).reconcileHandler sigs.k8s.io/controller-runtime/pkg/internal/controller.(*Controller).Start.func2.2 + grep -v NAMESPACE + kubectl get pxc --all-namespaces -o wide + xargs -L 1 sh -xc 'kubectl patch pxc -n $0 $1 --type=merge -p "{\"metadata\":{\"finalizers\":[]}}"' + kubectl patch pxc -n users-29349 some-name --type=merge -p '{"metadata":{"finalizers":[]}}' perconaxtradbcluster.pxc.percona.com/some-name patched + kubectl_bin delete pxc --all --all-namespaces ++ mktemp + local LAST_OUT=/tmp/tmp.vT7iYP3tI4 ++ mktemp + local LAST_ERR=/tmp/tmp.s2Er6VG5I5 + local exit_status=0 ++ seq 0 2 + for i in '$(seq 0 2)' + set +e + kubectl delete pxc --all --all-namespaces + exit_status=0 + set -e + '[' 0 '!=' 0 ']' + break + cat /tmp/tmp.vT7iYP3tI4 perconaxtradbcluster.pxc.percona.com "some-name" deleted + cat /tmp/tmp.s2Er6VG5I5 + rm /tmp/tmp.vT7iYP3tI4 /tmp/tmp.s2Er6VG5I5 + return 0 + kubectl_bin delete pxc-backup --all --all-namespaces ++ mktemp + local LAST_OUT=/tmp/tmp.WNDhH6yjVc ++ mktemp + local LAST_ERR=/tmp/tmp.qnKVxmR3LY + local exit_status=0 ++ seq 0 2 + for i in '$(seq 0 2)' + set +e + kubectl delete pxc-backup --all --all-namespaces + exit_status=0 + set -e + '[' 0 '!=' 0 ']' + break + cat /tmp/tmp.WNDhH6yjVc No resources found + cat /tmp/tmp.qnKVxmR3LY + rm /tmp/tmp.WNDhH6yjVc /tmp/tmp.qnKVxmR3LY + return 0 + kubectl_bin delete pxc-restore --all --all-namespaces ++ mktemp + local LAST_OUT=/tmp/tmp.rby9sgnmXy ++ mktemp + local LAST_ERR=/tmp/tmp.CTvop3RYIW + local exit_status=0 ++ seq 0 2 + for i in '$(seq 0 2)' + set +e + kubectl delete pxc-restore --all --all-namespaces + exit_status=0 + set -e + '[' 0 '!=' 0 ']' + break + cat /tmp/tmp.rby9sgnmXy No resources found + cat /tmp/tmp.CTvop3RYIW + rm /tmp/tmp.rby9sgnmXy /tmp/tmp.CTvop3RYIW + return 0 + kubectl_bin delete ValidatingWebhookConfiguration percona-xtradbcluster-webhook ++ mktemp + local LAST_OUT=/tmp/tmp.qIflKK0hl8 ++ mktemp + local LAST_ERR=/tmp/tmp.9y1Ps8CX3K + local exit_status=0 ++ seq 0 2 + for i in '$(seq 0 2)' + set +e + kubectl delete ValidatingWebhookConfiguration percona-xtradbcluster-webhook + exit_status=0 + set -e + '[' 0 '!=' 0 ']' + break + cat /tmp/tmp.qIflKK0hl8 validatingwebhookconfiguration.admissionregistration.k8s.io "percona-xtradbcluster-webhook" deleted + cat /tmp/tmp.9y1Ps8CX3K + rm /tmp/tmp.qIflKK0hl8 /tmp/tmp.9y1Ps8CX3K + return 0 + kubectl_bin delete -f https://github.com/jetstack/cert-manager/releases/download/v1.14.2/cert-manager.yaml + : + '[' '!' -z '' ']' + '[' -n pxc-operator ']' + kubectl_bin delete --grace-period=0 --force=true namespace users-29349 + rm -rf /tmp/tmp.XJynlYwy6y ++ mktemp + kubectl_bin delete --grace-period=0 --force=true namespace pxc-operator ++ mktemp + local LAST_OUT=/tmp/tmp.KQDaiQpxD1 + desc 'test passed' + set +o xtrace ----------------------------------------------------------------------------------- test passed ----------------------------------------------------------------------------------- + local LAST_OUT=/tmp/tmp.7ySCcNLXZs ++ mktemp ++ mktemp + local LAST_ERR=/tmp/tmp.kSsLnJZIiz + local exit_status=0 ++ seq 0 2 + local LAST_ERR=/tmp/tmp.4Zp1ilON6z + local exit_status=0 ++ seq 0 2 + for i in '$(seq 0 2)' + set +e + kubectl delete --grace-period=0 --force=true namespace users-29349 + for i in '$(seq 0 2)' + set +e + kubectl delete --grace-period=0 --force=true namespace pxc-operator