Log: /mnt/jenkins/workspace/cloud-pxc-operator_PR-1708/e2e-tests/logs/users-5-7.log WARNING: version difference between client (1.30) and server (1.26) exceeds the supported minor version skew of +/-1 WARNING: version difference between client (1.30) and server (1.26) exceeds the supported minor version skew of +/-1 + create_infra users-19811 + local ns=users-19811 + '[' -n pxc-operator ']' + kubectl get pxc --all-namespaces -o wide + grep -v NAMESPACE + xargs -L 1 sh -xc 'kubectl patch pxc -n $0 $1 --type=merge -p "{\"metadata\":{\"finalizers\":[]}}"' + kubectl patch pxc -n users-32670 some-name --type=merge -p '{"metadata":{"finalizers":[]}}' perconaxtradbcluster.pxc.percona.com/some-name patched + kubectl_bin delete pxc --all --all-namespaces ++ mktemp + local LAST_OUT=/tmp/tmp.ATqg5qOUyT ++ mktemp + local LAST_ERR=/tmp/tmp.l2HOJ17qYf + local exit_status=0 ++ seq 0 2 + for i in '$(seq 0 2)' + set +e + kubectl delete pxc --all --all-namespaces + exit_status=0 + set -e + '[' 0 '!=' 0 ']' + break + cat /tmp/tmp.ATqg5qOUyT perconaxtradbcluster.pxc.percona.com "some-name" deleted + cat /tmp/tmp.l2HOJ17qYf + rm /tmp/tmp.ATqg5qOUyT /tmp/tmp.l2HOJ17qYf + return 0 + kubectl_bin delete pxc-backup --all --all-namespaces ++ mktemp + local LAST_OUT=/tmp/tmp.VWWcGZJ7VC ++ mktemp + local LAST_ERR=/tmp/tmp.RFMGfGBmyK + local exit_status=0 ++ seq 0 2 + for i in '$(seq 0 2)' + set +e + kubectl delete pxc-backup --all --all-namespaces + exit_status=0 + set -e + '[' 0 '!=' 0 ']' + break + cat /tmp/tmp.VWWcGZJ7VC No resources found + cat /tmp/tmp.RFMGfGBmyK + rm /tmp/tmp.VWWcGZJ7VC /tmp/tmp.RFMGfGBmyK + return 0 + kubectl_bin delete pxc-restore --all --all-namespaces ++ mktemp + local LAST_OUT=/tmp/tmp.wOGCEGbtOV ++ mktemp + local LAST_ERR=/tmp/tmp.mKAlKOIKGr + local exit_status=0 ++ seq 0 2 + for i in '$(seq 0 2)' + set +e + kubectl delete pxc-restore --all --all-namespaces + exit_status=0 + set -e + '[' 0 '!=' 0 ']' + break + cat /tmp/tmp.wOGCEGbtOV No resources found + cat /tmp/tmp.mKAlKOIKGr + rm /tmp/tmp.wOGCEGbtOV /tmp/tmp.mKAlKOIKGr + return 0 + create_namespace pxc-operator + local namespace=pxc-operator + local skip_clean_namespace= + [[ 1 == 1 ]] + [[ -z '' ]] + destroy_chaos_mesh ++ helm list --all-namespaces --filter chaos-mesh ++ tail -n1 ++ awk '-F ' '{print $2}' ++ sed s/NAMESPACE// + local chaos_mesh_ns= + '[' -n '' ']' ++ kubectl get MutatingWebhookConfiguration ++ grep chaos-mesh ++ awk '{print $1}' + timeout 30 kubectl delete MutatingWebhookConfiguration error: resource(s) were provided, but no name was specified + : ++ kubectl get ValidatingWebhookConfiguration ++ grep chaos-mesh ++ awk '{print $1}' + timeout 30 kubectl delete ValidatingWebhookConfiguration error: resource(s) were provided, but no name was specified + : ++ kubectl get ValidatingWebhookConfiguration ++ grep validate-auth ++ awk '{print $1}' + timeout 30 kubectl delete ValidatingWebhookConfiguration error: resource(s) were provided, but no name was specified + : ++ kubectl api-resources ++ grep chaos-mesh ++ awk '{print $1}' ++ kubectl get crd ++ grep chaos-mesh.org ++ awk '{print $1}' + timeout 30 kubectl delete crd error: resource(s) were provided, but no name was specified + : ++ kubectl get clusterrolebinding ++ grep chaos-mesh ++ awk '{print $1}' + timeout 30 kubectl delete clusterrolebinding error: resource(s) were provided, but no name was specified + : ++ kubectl get clusterrole ++ grep chaos-mesh ++ awk '{print $1}' + timeout 30 kubectl delete clusterrole error: resource(s) were provided, but no name was specified + : + desc 'cleaned up all old namespaces' + set +o xtrace ----------------------------------------------------------------------------------- cleaned up all old namespaces ----------------------------------------------------------------------------------- + kubectl_bin get ns + '[' -n '' ']' + desc 'cleaned up old namespaces pxc-operator' ++ mktemp + xargs kubectl delete ns + set +o xtrace ----------------------------------------------------------------------------------- cleaned up old namespaces pxc-operator ----------------------------------------------------------------------------------- + kubectl_bin delete namespace pxc-operator ++ mktemp + egrep -v '^kube-|^default$|Terminating|pxc-operator|openshift|^NAME' + local LAST_OUT=/tmp/tmp.6vjtUFimyX ++ mktemp + local LAST_OUT=/tmp/tmp.3WxhbAebQZ ++ mktemp + local LAST_ERR=/tmp/tmp.3dD65YpWTh + local exit_status=0 + local LAST_ERR=/tmp/tmp.ykz00Q5aUN + local exit_status=0 ++ seq 0 2 + for i in '$(seq 0 2)' + set +e + kubectl get ns ++ seq 0 2 + for i in '$(seq 0 2)' + set +e + kubectl delete namespace pxc-operator + awk '{print$1}' + exit_status=0 + set -e + '[' 0 '!=' 0 ']' + break + cat /tmp/tmp.6vjtUFimyX + cat /tmp/tmp.3dD65YpWTh + rm /tmp/tmp.6vjtUFimyX /tmp/tmp.3dD65YpWTh + return 0 namespace "users-32670" deleted Error from server (Forbidden): namespaces "default" is forbidden: this namespace may not be deleted + exit_status=0 + set -e + '[' 0 '!=' 0 ']' + break + cat /tmp/tmp.3WxhbAebQZ namespace "pxc-operator" deleted + cat /tmp/tmp.ykz00Q5aUN + rm /tmp/tmp.3WxhbAebQZ /tmp/tmp.ykz00Q5aUN + return 0 + wait_for_delete namespace/pxc-operator + local res=namespace/pxc-operator + echo -n 'namespace/pxc-operator - ' namespace/pxc-operator - + set +o xtrace Error from server (NotFound): namespaces "pxc-operator" not found + desc 'create namespace pxc-operator' + set +o xtrace ----------------------------------------------------------------------------------- create namespace pxc-operator ----------------------------------------------------------------------------------- + kubectl_bin create namespace pxc-operator ++ mktemp + local LAST_OUT=/tmp/tmp.TGVtcXbYFP ++ mktemp + local LAST_ERR=/tmp/tmp.dvtpsCtC71 + local exit_status=0 ++ seq 0 2 + for i in '$(seq 0 2)' + set +e + kubectl create namespace pxc-operator + exit_status=0 + set -e + '[' 0 '!=' 0 ']' + break + cat /tmp/tmp.TGVtcXbYFP namespace/pxc-operator created + cat /tmp/tmp.dvtpsCtC71 + rm /tmp/tmp.TGVtcXbYFP /tmp/tmp.dvtpsCtC71 + return 0 ++ kubectl_bin config current-context +++ mktemp ++ local LAST_OUT=/tmp/tmp.Te1ryBYOf2 +++ mktemp ++ local LAST_ERR=/tmp/tmp.72TIOAio8h ++ local exit_status=0 +++ seq 0 2 ++ for i in '$(seq 0 2)' ++ set +e ++ kubectl config current-context ++ exit_status=0 ++ set -e ++ '[' 0 '!=' 0 ']' ++ break ++ cat /tmp/tmp.Te1ryBYOf2 ++ cat /tmp/tmp.72TIOAio8h ++ rm /tmp/tmp.Te1ryBYOf2 /tmp/tmp.72TIOAio8h ++ return 0 + kubectl_bin config set-context gke_cloud-dev-112233_us-central1-a_jen-pxc-1708-7a028e99-10-cluster4 --namespace=pxc-operator ++ mktemp + local LAST_OUT=/tmp/tmp.HlkJ0tKBjl ++ mktemp + local LAST_ERR=/tmp/tmp.Dx6Xklc7GT + local exit_status=0 ++ seq 0 2 + for i in '$(seq 0 2)' + set +e + kubectl config set-context gke_cloud-dev-112233_us-central1-a_jen-pxc-1708-7a028e99-10-cluster4 --namespace=pxc-operator + exit_status=0 + set -e + '[' 0 '!=' 0 ']' + break + cat /tmp/tmp.HlkJ0tKBjl Context "gke_cloud-dev-112233_us-central1-a_jen-pxc-1708-7a028e99-10-cluster4" modified. + cat /tmp/tmp.Dx6Xklc7GT + rm /tmp/tmp.HlkJ0tKBjl /tmp/tmp.Dx6Xklc7GT + return 0 + deploy_operator + desc 'start PXC operator' + set +o xtrace ----------------------------------------------------------------------------------- start PXC operator ----------------------------------------------------------------------------------- + kubectl_bin apply --server-side --force-conflicts -f /mnt/jenkins/workspace/cloud-pxc-operator_PR-1708/deploy/crd.yaml ++ mktemp + local LAST_OUT=/tmp/tmp.n8Efecmr6K ++ mktemp + local LAST_ERR=/tmp/tmp.rVLDprfM8X + local exit_status=0 ++ seq 0 2 + for i in '$(seq 0 2)' + set +e + kubectl apply --server-side --force-conflicts -f /mnt/jenkins/workspace/cloud-pxc-operator_PR-1708/deploy/crd.yaml + exit_status=0 + set -e + '[' 0 '!=' 0 ']' + break + cat /tmp/tmp.n8Efecmr6K customresourcedefinition.apiextensions.k8s.io/perconaxtradbclusterbackups.pxc.percona.com serverside-applied customresourcedefinition.apiextensions.k8s.io/perconaxtradbclusterrestores.pxc.percona.com serverside-applied customresourcedefinition.apiextensions.k8s.io/perconaxtradbclusters.pxc.percona.com serverside-applied + cat /tmp/tmp.rVLDprfM8X + rm /tmp/tmp.n8Efecmr6K /tmp/tmp.rVLDprfM8X + return 0 + '[' -n pxc-operator ']' + apply_rbac cw-rbac + local operator_namespace=pxc-operator + local rbac=cw-rbac + cat /mnt/jenkins/workspace/cloud-pxc-operator_PR-1708/deploy/cw-rbac.yaml + sed -e 's^namespace: .*^namespace: pxc-operator^' + kubectl_bin apply -f - ++ mktemp + local LAST_OUT=/tmp/tmp.OpQPLQgFmC ++ mktemp + local LAST_ERR=/tmp/tmp.MzhSirbDLU + local exit_status=0 ++ seq 0 2 + for i in '$(seq 0 2)' + set +e + kubectl apply -f - + exit_status=0 + set -e + '[' 0 '!=' 0 ']' + break + cat /tmp/tmp.OpQPLQgFmC clusterrole.rbac.authorization.k8s.io/percona-xtradb-cluster-operator unchanged serviceaccount/percona-xtradb-cluster-operator created clusterrolebinding.rbac.authorization.k8s.io/service-account-percona-xtradb-cluster-operator unchanged + cat /tmp/tmp.MzhSirbDLU + rm /tmp/tmp.OpQPLQgFmC /tmp/tmp.MzhSirbDLU + return 0 + cat /mnt/jenkins/workspace/cloud-pxc-operator_PR-1708/deploy/cw-operator.yaml + sed -e 's^image: .*^image: perconalab/percona-xtradb-cluster-operator:PR-1708-7a028e99^' + kubectl_bin apply -f - + yq eval '(select(.kind == "Deployment").spec.template.spec.containers[] | select(.name == "percona-xtradb-cluster-operator").env[] | select(.name == "LOG_LEVEL").value) = "DEBUG"' - + sed -e 's^failureThreshold: .*^failureThreshold: 10^' + yq eval '(select(.kind == "Deployment").spec.template.spec.containers[] | select(.name == "percona-xtradb-cluster-operator").env[] | select(.name == "DISABLE_TELEMETRY").value) = "true"' - ++ mktemp + local LAST_OUT=/tmp/tmp.z9cRDFdZsQ ++ mktemp + local LAST_ERR=/tmp/tmp.DiRweIiXIi + local exit_status=0 ++ seq 0 2 + for i in '$(seq 0 2)' + set +e + kubectl apply -f - + exit_status=0 + set -e + '[' 0 '!=' 0 ']' + break + cat /tmp/tmp.z9cRDFdZsQ deployment.apps/percona-xtradb-cluster-operator created service/percona-xtradb-cluster-operator created + cat /tmp/tmp.DiRweIiXIi + rm /tmp/tmp.z9cRDFdZsQ /tmp/tmp.DiRweIiXIi + return 0 + sleep 10 + kubectl_bin wait --for=condition=Ready pods -l app.kubernetes.io/component=operator,app.kubernetes.io/instance=percona-xtradb-cluster-operator,app.kubernetes.io/name=percona-xtradb-cluster-operator --timeout=30s ++ mktemp + local LAST_OUT=/tmp/tmp.PBA7qZwrum ++ mktemp + local LAST_ERR=/tmp/tmp.UoH0967V4D + local exit_status=0 ++ seq 0 2 + for i in '$(seq 0 2)' + set +e + kubectl wait --for=condition=Ready pods -l app.kubernetes.io/component=operator,app.kubernetes.io/instance=percona-xtradb-cluster-operator,app.kubernetes.io/name=percona-xtradb-cluster-operator --timeout=30s + exit_status=0 + set -e + '[' 0 '!=' 0 ']' + break + cat /tmp/tmp.PBA7qZwrum pod/percona-xtradb-cluster-operator-5b5db6b897-s6q7g condition met + cat /tmp/tmp.UoH0967V4D + rm /tmp/tmp.PBA7qZwrum /tmp/tmp.UoH0967V4D + return 0 ++ get_operator_pod ++ local label_prefix=app.kubernetes.io/ +++ kubectl get pods --selector=app.kubernetes.io/name=percona-xtradb-cluster-operator -n pxc-operator +++ grep -c percona-xtradb-cluster-operator ++ local check_label=1 ++ [[ 1 -eq 0 ]] ++ kubectl_bin get pods --selector=app.kubernetes.io/name=percona-xtradb-cluster-operator -o 'jsonpath={.items[].metadata.name}' -n pxc-operator +++ mktemp ++ local LAST_OUT=/tmp/tmp.UjixqVphA4 +++ mktemp ++ local LAST_ERR=/tmp/tmp.11Lnh6TMAe ++ local exit_status=0 +++ seq 0 2 ++ for i in '$(seq 0 2)' ++ set +e ++ kubectl get pods --selector=app.kubernetes.io/name=percona-xtradb-cluster-operator -o 'jsonpath={.items[].metadata.name}' -n pxc-operator ++ exit_status=0 ++ set -e ++ '[' 0 '!=' 0 ']' ++ break ++ cat /tmp/tmp.UjixqVphA4 ++ cat /tmp/tmp.11Lnh6TMAe ++ rm /tmp/tmp.UjixqVphA4 /tmp/tmp.11Lnh6TMAe ++ return 0 + wait_pod percona-xtradb-cluster-operator-5b5db6b897-s6q7g 480 pxc-operator + local pod=percona-xtradb-cluster-operator-5b5db6b897-s6q7g + local max_retry=480 + local ns=pxc-operator ++ echo percona-xtradb-cluster-operator-5b5db6b897-s6q7g ++ /usr/bin/sed -E 's/.*-(pxc|proxysql)-[0-9]/\1/' ++ egrep '^(pxc|proxysql)$' + local container= + set +o xtrace pod/percona-xtradb-cluster-operator-5b5db6b897-s6q7g condition met percona-xtradb-cluster-operator-5b5db6b897-s6q7g.Ok + sleep 3 + create_namespace users-19811 + local namespace=users-19811 + local skip_clean_namespace= + [[ 1 == 1 ]] + [[ -z '' ]] + destroy_chaos_mesh ++ helm list --all-namespaces --filter chaos-mesh ++ tail -n1 ++ awk '-F ' '{print $2}' ++ sed s/NAMESPACE// + local chaos_mesh_ns= + '[' -n '' ']' ++ kubectl get MutatingWebhookConfiguration ++ grep chaos-mesh ++ awk '{print $1}' + timeout 30 kubectl delete MutatingWebhookConfiguration error: resource(s) were provided, but no name was specified + : ++ kubectl get ValidatingWebhookConfiguration ++ grep chaos-mesh ++ awk '{print $1}' + timeout 30 kubectl delete ValidatingWebhookConfiguration error: resource(s) were provided, but no name was specified + : ++ kubectl get ValidatingWebhookConfiguration ++ grep validate-auth ++ awk '{print $1}' + timeout 30 kubectl delete ValidatingWebhookConfiguration error: resource(s) were provided, but no name was specified + : ++ kubectl api-resources ++ grep chaos-mesh ++ awk '{print $1}' ++ kubectl get crd ++ grep chaos-mesh.org ++ awk '{print $1}' + timeout 30 kubectl delete crd error: resource(s) were provided, but no name was specified + : ++ kubectl get clusterrolebinding ++ grep chaos-mesh ++ awk '{print $1}' + timeout 30 kubectl delete clusterrolebinding error: resource(s) were provided, but no name was specified + : ++ kubectl get clusterrole ++ grep chaos-mesh ++ awk '{print $1}' + timeout 30 kubectl delete clusterrole error: resource(s) were provided, but no name was specified + : + desc 'cleaned up all old namespaces' + set +o xtrace ----------------------------------------------------------------------------------- cleaned up all old namespaces ----------------------------------------------------------------------------------- + egrep -v '^kube-|^default$|Terminating|pxc-operator|openshift|^NAME' + awk '{print$1}' + '[' -n '' ']' + desc 'cleaned up old namespaces users-19811' + set +o xtrace ----------------------------------------------------------------------------------- cleaned up old namespaces users-19811 ----------------------------------------------------------------------------------- + kubectl_bin delete namespace users-19811 + kubectl_bin get ns ++ mktemp + xargs kubectl delete ns + local LAST_OUT=/tmp/tmp.jcq1FdhiCy ++ mktemp ++ mktemp + local LAST_OUT=/tmp/tmp.KRpjzCtBhX + local LAST_ERR=/tmp/tmp.0rzT1t7LTA + local exit_status=0 ++ mktemp ++ seq 0 2 + for i in '$(seq 0 2)' + set +e + kubectl get ns + local LAST_ERR=/tmp/tmp.p3hW85zhVy + local exit_status=0 ++ seq 0 2 + for i in '$(seq 0 2)' + set +e + kubectl delete namespace users-19811 + exit_status=1 + set -e + '[' 1 '!=' 0 ']' + '[' 1 == 1 ']' + sleep 0 + for i in '$(seq 0 2)' + set +e + kubectl delete namespace users-19811 + exit_status=0 + set -e + '[' 0 '!=' 0 ']' + break + cat /tmp/tmp.jcq1FdhiCy + cat /tmp/tmp.0rzT1t7LTA + rm /tmp/tmp.jcq1FdhiCy /tmp/tmp.0rzT1t7LTA + return 0 Error from server (Forbidden): namespaces "default" is forbidden: this namespace may not be deleted + exit_status=1 + set -e + '[' 1 '!=' 0 ']' + '[' 1 == 1 ']' + sleep 0 + for i in '$(seq 0 2)' + set +e + kubectl delete namespace users-19811 + exit_status=1 + set -e + '[' 1 '!=' 0 ']' + '[' 1 == 1 ']' + sleep 0 + cat /tmp/tmp.KRpjzCtBhX + cat /tmp/tmp.p3hW85zhVy Error from server (NotFound): namespaces "users-19811" not found + rm /tmp/tmp.KRpjzCtBhX /tmp/tmp.p3hW85zhVy + return 1 + : + wait_for_delete namespace/users-19811 + local res=namespace/users-19811 + echo -n 'namespace/users-19811 - ' namespace/users-19811 - + set +o xtrace Error from server (NotFound): namespaces "users-19811" not found + desc 'create namespace users-19811' + set +o xtrace ----------------------------------------------------------------------------------- create namespace users-19811 ----------------------------------------------------------------------------------- + kubectl_bin create namespace users-19811 ++ mktemp + local LAST_OUT=/tmp/tmp.bghCDI36jS ++ mktemp + local LAST_ERR=/tmp/tmp.HTH49BLYWl + local exit_status=0 ++ seq 0 2 + for i in '$(seq 0 2)' + set +e + kubectl create namespace users-19811 + exit_status=0 + set -e + '[' 0 '!=' 0 ']' + break + cat /tmp/tmp.bghCDI36jS namespace/users-19811 created + cat /tmp/tmp.HTH49BLYWl + rm /tmp/tmp.bghCDI36jS /tmp/tmp.HTH49BLYWl + return 0 ++ kubectl_bin config current-context +++ mktemp ++ local LAST_OUT=/tmp/tmp.fNxBpBlLT5 +++ mktemp ++ local LAST_ERR=/tmp/tmp.C0Tl1BVJZf ++ local exit_status=0 +++ seq 0 2 ++ for i in '$(seq 0 2)' ++ set +e ++ kubectl config current-context ++ exit_status=0 ++ set -e ++ '[' 0 '!=' 0 ']' ++ break ++ cat /tmp/tmp.fNxBpBlLT5 ++ cat /tmp/tmp.C0Tl1BVJZf ++ rm /tmp/tmp.fNxBpBlLT5 /tmp/tmp.C0Tl1BVJZf ++ return 0 + kubectl_bin config set-context gke_cloud-dev-112233_us-central1-a_jen-pxc-1708-7a028e99-10-cluster4 --namespace=users-19811 ++ mktemp + local LAST_OUT=/tmp/tmp.BkmzL4sUro ++ mktemp + local LAST_ERR=/tmp/tmp.Xe9dfBcyr9 + local exit_status=0 ++ seq 0 2 + for i in '$(seq 0 2)' + set +e + kubectl config set-context gke_cloud-dev-112233_us-central1-a_jen-pxc-1708-7a028e99-10-cluster4 --namespace=users-19811 + exit_status=0 + set -e + '[' 0 '!=' 0 ']' + break + cat /tmp/tmp.BkmzL4sUro Context "gke_cloud-dev-112233_us-central1-a_jen-pxc-1708-7a028e99-10-cluster4" modified. + cat /tmp/tmp.Xe9dfBcyr9 + rm /tmp/tmp.BkmzL4sUro /tmp/tmp.Xe9dfBcyr9 + return 0 + apply_secrets + desc 'create secrets for cloud storages' + set +o xtrace ----------------------------------------------------------------------------------- create secrets for cloud storages ----------------------------------------------------------------------------------- + '[' -z '' ']' + kubectl_bin apply -f /mnt/jenkins/workspace/cloud-pxc-operator_PR-1708/e2e-tests/conf/minio-secret.yml -f /mnt/jenkins/workspace/cloud-pxc-operator_PR-1708/e2e-tests/conf/cloud-secret.yml ++ mktemp + local LAST_OUT=/tmp/tmp.GNXI26Ne0y ++ mktemp + local LAST_ERR=/tmp/tmp.Z2NkwhgxIF + local exit_status=0 ++ seq 0 2 + for i in '$(seq 0 2)' + set +e + kubectl apply -f /mnt/jenkins/workspace/cloud-pxc-operator_PR-1708/e2e-tests/conf/minio-secret.yml -f /mnt/jenkins/workspace/cloud-pxc-operator_PR-1708/e2e-tests/conf/cloud-secret.yml + exit_status=0 + set -e + '[' 0 '!=' 0 ']' + break + cat /tmp/tmp.GNXI26Ne0y secret/minio-secret created secret/aws-s3-secret created secret/gcp-cs-secret created secret/azure-secret created + cat /tmp/tmp.Z2NkwhgxIF + rm /tmp/tmp.GNXI26Ne0y /tmp/tmp.Z2NkwhgxIF + return 0 + desc 'create PXC cluster' + set +o xtrace ----------------------------------------------------------------------------------- create PXC cluster ----------------------------------------------------------------------------------- + newpass=test-password ++ echo -n test-password ++ base64 + newpassencrypted=dGVzdC1wYXNzd29yZA== + cluster=some-name + spinup_pxc some-name /mnt/jenkins/workspace/cloud-pxc-operator_PR-1708/e2e-tests/conf/some-name.yml + local cluster=some-name + local config=/mnt/jenkins/workspace/cloud-pxc-operator_PR-1708/e2e-tests/conf/some-name.yml + local size=3 + local sleep=10 + local secretsFile=/mnt/jenkins/workspace/cloud-pxc-operator_PR-1708/e2e-tests/conf/secrets.yml + local pxcClientFile=/mnt/jenkins/workspace/cloud-pxc-operator_PR-1708/e2e-tests/conf/client.yml + local port=3306 + desc 'create first PXC cluster' + set +o xtrace ----------------------------------------------------------------------------------- create first PXC cluster ----------------------------------------------------------------------------------- + kubectl_bin apply -f /mnt/jenkins/workspace/cloud-pxc-operator_PR-1708/e2e-tests/conf/secrets.yml ++ mktemp + local LAST_OUT=/tmp/tmp.YBhER0CuMc ++ mktemp + local LAST_ERR=/tmp/tmp.vyQiKl6mab + local exit_status=0 ++ seq 0 2 + for i in '$(seq 0 2)' + set +e + kubectl apply -f /mnt/jenkins/workspace/cloud-pxc-operator_PR-1708/e2e-tests/conf/secrets.yml + exit_status=0 + set -e + '[' 0 '!=' 0 ']' + break + cat /tmp/tmp.YBhER0CuMc secret/my-cluster-secrets created secret/some-name-ssl created secret/some-name-ssl-internal created + cat /tmp/tmp.vyQiKl6mab + rm /tmp/tmp.YBhER0CuMc /tmp/tmp.vyQiKl6mab + return 0 + apply_config /mnt/jenkins/workspace/cloud-pxc-operator_PR-1708/e2e-tests/conf/client.yml + '[' -z '' ']' + cat_config /mnt/jenkins/workspace/cloud-pxc-operator_PR-1708/e2e-tests/conf/client.yml + kubectl_bin apply -f - + cat /mnt/jenkins/workspace/cloud-pxc-operator_PR-1708/e2e-tests/conf/client.yml ++ mktemp + /usr/bin/sed -e 's#apiVersion: pxc.percona.com/v.*$#apiVersion: pxc.percona.com/v1#' + /usr/bin/sed -e 's#image:.*\/percona-xtradb-cluster:.*$#image: perconalab/percona-xtradb-cluster-operator:main-pxc5.7#' + local LAST_OUT=/tmp/tmp.Hn4D7zw8O9 + /usr/bin/sed -e 's#image:.*-pxc\([0-9]*.[0-9]*\)\{0,1\}$#image: perconalab/percona-xtradb-cluster-operator:main-pxc5.7#' ++ mktemp + /usr/bin/sed -e 's#image:.*-backup$#image: perconalab/percona-xtradb-cluster-operator:main-pxc5.7-backup#' + /usr/bin/sed -e s~minio-service.#namespace~minio-service.users-19811~ + local LAST_ERR=/tmp/tmp.aFt1dN1L6i + local exit_status=0 + /usr/bin/sed -e 's#apply:.*#apply: Never#' + /usr/bin/sed -e 's#image:.*-proxysql$#image: perconalab/percona-xtradb-cluster-operator:main-proxysql#' ++ seq 0 2 + /usr/bin/sed -e 's#image:.*-logcollector$#image: perconalab/percona-xtradb-cluster-operator:main-logcollector#' + /usr/bin/sed -e 's#image:.*-haproxy$#image: perconalab/percona-xtradb-cluster-operator:main-haproxy#' + /usr/bin/sed -e 's#image:.*-pmm$#image: perconalab/pmm-client:dev-latest#' + for i in '$(seq 0 2)' + set +e + kubectl apply -f - + /usr/bin/sed -e 's#image:.*-init$#image: perconalab/percona-xtradb-cluster-operator:PR-1708-7a028e99#' + exit_status=0 + set -e + '[' 0 '!=' 0 ']' + break + cat /tmp/tmp.Hn4D7zw8O9 deployment.apps/pxc-client created + cat /tmp/tmp.aFt1dN1L6i + rm /tmp/tmp.Hn4D7zw8O9 /tmp/tmp.aFt1dN1L6i + return 0 + [[ perconalab/percona-xtradb-cluster-operator:main-pxc5.7 =~ 5\.7 ]] + [[ some-name == \d\e\m\a\n\d\-\b\a\c\k\u\p ]] + [[ some-name == \d\e\m\a\n\d\-\b\a\c\k\u\p\-\c\l\o\u\d ]] + apply_config /mnt/jenkins/workspace/cloud-pxc-operator_PR-1708/e2e-tests/conf/some-name.yml + '[' -z '' ']' + cat_config /mnt/jenkins/workspace/cloud-pxc-operator_PR-1708/e2e-tests/conf/some-name.yml + kubectl_bin apply -f - + cat /mnt/jenkins/workspace/cloud-pxc-operator_PR-1708/e2e-tests/conf/some-name.yml ++ mktemp + /usr/bin/sed -e 's#apiVersion: pxc.percona.com/v.*$#apiVersion: pxc.percona.com/v1#' + /usr/bin/sed -e 's#image:.*-pxc\([0-9]*.[0-9]*\)\{0,1\}$#image: perconalab/percona-xtradb-cluster-operator:main-pxc5.7#' + /usr/bin/sed -e 's#image:.*\/percona-xtradb-cluster:.*$#image: perconalab/percona-xtradb-cluster-operator:main-pxc5.7#' + /usr/bin/sed -e 's#image:.*-init$#image: perconalab/percona-xtradb-cluster-operator:PR-1708-7a028e99#' + /usr/bin/sed -e s~minio-service.#namespace~minio-service.users-19811~ + /usr/bin/sed -e 's#image:.*-proxysql$#image: perconalab/percona-xtradb-cluster-operator:main-proxysql#' + local LAST_OUT=/tmp/tmp.hdVWvBTjvH + /usr/bin/sed -e 's#image:.*-haproxy$#image: perconalab/percona-xtradb-cluster-operator:main-haproxy#' + /usr/bin/sed -e 's#image:.*-backup$#image: perconalab/percona-xtradb-cluster-operator:main-pxc5.7-backup#' + /usr/bin/sed -e 's#apply:.*#apply: Never#' + /usr/bin/sed -e 's#image:.*-logcollector$#image: perconalab/percona-xtradb-cluster-operator:main-logcollector#' ++ mktemp + /usr/bin/sed -e 's#image:.*-pmm$#image: perconalab/pmm-client:dev-latest#' + local LAST_ERR=/tmp/tmp.Oa0NUg5Sgc + local exit_status=0 ++ seq 0 2 + for i in '$(seq 0 2)' + set +e + kubectl apply -f - + exit_status=0 + set -e + '[' 0 '!=' 0 ']' + break + cat /tmp/tmp.hdVWvBTjvH perconaxtradbcluster.pxc.percona.com/some-name created + cat /tmp/tmp.Oa0NUg5Sgc + rm /tmp/tmp.hdVWvBTjvH /tmp/tmp.Oa0NUg5Sgc + return 0 + desc 'check if all 3 Pods started' + set +o xtrace ----------------------------------------------------------------------------------- check if all 3 Pods started ----------------------------------------------------------------------------------- ++ get_proxy some-name ++ local target_cluster=some-name +++ kubectl_bin get pxc some-name -o 'jsonpath={.spec.haproxy.enabled}' ++++ mktemp +++ local LAST_OUT=/tmp/tmp.C45zmyd1Dq ++++ mktemp +++ local LAST_ERR=/tmp/tmp.vygqkVl0cf +++ local exit_status=0 ++++ seq 0 2 +++ for i in '$(seq 0 2)' +++ set +e +++ kubectl get pxc some-name -o 'jsonpath={.spec.haproxy.enabled}' +++ exit_status=0 +++ set -e +++ '[' 0 '!=' 0 ']' +++ break +++ cat /tmp/tmp.C45zmyd1Dq +++ cat /tmp/tmp.vygqkVl0cf +++ rm /tmp/tmp.C45zmyd1Dq /tmp/tmp.vygqkVl0cf +++ return 0 ++ [[ '' == \t\r\u\e ]] +++ kubectl_bin get pxc some-name -o 'jsonpath={.spec.proxysql.enabled}' ++++ mktemp +++ local LAST_OUT=/tmp/tmp.ocWPxSbt4t ++++ mktemp +++ local LAST_ERR=/tmp/tmp.Irwi0jwVxv +++ local exit_status=0 ++++ seq 0 2 +++ for i in '$(seq 0 2)' +++ set +e +++ kubectl get pxc some-name -o 'jsonpath={.spec.proxysql.enabled}' +++ exit_status=0 +++ set -e +++ '[' 0 '!=' 0 ']' +++ break +++ cat /tmp/tmp.ocWPxSbt4t +++ cat /tmp/tmp.Irwi0jwVxv +++ rm /tmp/tmp.ocWPxSbt4t /tmp/tmp.Irwi0jwVxv +++ return 0 ++ [[ true == \t\r\u\e ]] ++ echo some-name-proxysql ++ return + local proxy=some-name-proxysql + kubectl_bin wait --for=condition=Ready pod -l app.kubernetes.io/instance=monitoring,app.kubernetes.io/managed-by=percona-xtradb-cluster-operator --timeout=300s -n users-19811 ++ mktemp + local LAST_OUT=/tmp/tmp.q26c4FOxbj ++ mktemp + local LAST_ERR=/tmp/tmp.mZP8zG61HG + local exit_status=0 ++ seq 0 2 + for i in '$(seq 0 2)' + set +e + kubectl wait --for=condition=Ready pod -l app.kubernetes.io/instance=monitoring,app.kubernetes.io/managed-by=percona-xtradb-cluster-operator --timeout=300s -n users-19811 + exit_status=1 + set -e + '[' 1 '!=' 0 ']' + '[' 1 == 1 ']' + sleep 0 + for i in '$(seq 0 2)' + set +e + kubectl wait --for=condition=Ready pod -l app.kubernetes.io/instance=monitoring,app.kubernetes.io/managed-by=percona-xtradb-cluster-operator --timeout=300s -n users-19811 + exit_status=1 + set -e + '[' 1 '!=' 0 ']' + '[' 1 == 1 ']' + sleep 0 + for i in '$(seq 0 2)' + set +e + kubectl wait --for=condition=Ready pod -l app.kubernetes.io/instance=monitoring,app.kubernetes.io/managed-by=percona-xtradb-cluster-operator --timeout=300s -n users-19811 + exit_status=1 + set -e + '[' 1 '!=' 0 ']' + '[' 1 == 1 ']' + sleep 0 + cat /tmp/tmp.q26c4FOxbj + cat /tmp/tmp.mZP8zG61HG error: no matching resources found + rm /tmp/tmp.q26c4FOxbj /tmp/tmp.mZP8zG61HG + return 1 + true + wait_for_running some-name-proxysql 1 + local name=some-name-proxysql + let last_pod=0 + : + local max_retry=480 + desc 'wait for running cluster' + set +o xtrace ----------------------------------------------------------------------------------- wait for running cluster ----------------------------------------------------------------------------------- ++ seq 0 0 + for i in '$(seq 0 $last_pod)' + wait_pod some-name-proxysql-0 480 + local pod=some-name-proxysql-0 + local max_retry=480 + local ns= ++ echo some-name-proxysql-0 ++ /usr/bin/sed -E 's/.*-(pxc|proxysql)-[0-9]/\1/' ++ egrep '^(pxc|proxysql)$' + local container=proxysql + set +o xtrace pod/some-name-proxysql-0 condition met some-name-proxysql-0.Ok + wait_for_running some-name-pxc 3 + local name=some-name-pxc + let last_pod=2 + local max_retry=480 + desc 'wait for running cluster' + set +o xtrace ----------------------------------------------------------------------------------- wait for running cluster ----------------------------------------------------------------------------------- ++ seq 0 2 + for i in '$(seq 0 $last_pod)' + wait_pod some-name-pxc-0 480 + local pod=some-name-pxc-0 + local max_retry=480 + local ns= ++ echo some-name-pxc-0 ++ egrep '^(pxc|proxysql)$' ++ /usr/bin/sed -E 's/.*-(pxc|proxysql)-[0-9]/\1/' + local container=pxc + set +o xtrace pod/some-name-pxc-0 condition met some-name-pxc-0.Ok + for i in '$(seq 0 $last_pod)' + wait_pod some-name-pxc-1 480 + local pod=some-name-pxc-1 + local max_retry=480 + local ns= ++ echo some-name-pxc-1 ++ /usr/bin/sed -E 's/.*-(pxc|proxysql)-[0-9]/\1/' ++ egrep '^(pxc|proxysql)$' + local container=pxc + set +o xtrace pod/some-name-pxc-1 condition met some-name-pxc-1.Ok + for i in '$(seq 0 $last_pod)' + wait_pod some-name-pxc-2 480 + local pod=some-name-pxc-2 + local max_retry=480 + local ns= ++ echo some-name-pxc-2 ++ /usr/bin/sed -E 's/.*-(pxc|proxysql)-[0-9]/\1/' ++ egrep '^(pxc|proxysql)$' + local container=pxc + set +o xtrace pod/some-name-pxc-2 condition met some-name-pxc-2.Ok + sleep 10 + desc 'write data' + set +o xtrace ----------------------------------------------------------------------------------- write data ----------------------------------------------------------------------------------- + [[ perconalab/percona-xtradb-cluster-operator:main-pxc5.7 =~ 5\.7 ]] ++ is_keyring_plugin_in_use some-name ++ local cluster=some-name ++ kubectl_bin exec -it some-name-pxc-0 -c pxc -- bash -c 'cat /etc/mysql/node.cnf' ++ egrep -o 'early-plugin-load=keyring_\w+.so' +++ mktemp ++ local LAST_OUT=/tmp/tmp.zBA24HWpf5 +++ mktemp ++ local LAST_ERR=/tmp/tmp.CT04RFqAVC ++ local exit_status=0 +++ seq 0 2 ++ for i in '$(seq 0 2)' ++ set +e ++ kubectl exec -it some-name-pxc-0 -c pxc -- bash -c 'cat /etc/mysql/node.cnf' ++ exit_status=0 ++ set -e ++ '[' 0 '!=' 0 ']' ++ break ++ cat /tmp/tmp.zBA24HWpf5 ++ cat /tmp/tmp.CT04RFqAVC Unable to use a TTY - input is not a terminal or the right kind of file ++ rm /tmp/tmp.zBA24HWpf5 /tmp/tmp.CT04RFqAVC ++ return 0 + [[ -n '' ]] + run_mysql 'CREATE DATABASE IF NOT EXISTS myApp; use myApp; CREATE TABLE IF NOT EXISTS myApp (id int PRIMARY KEY) ;' '-h some-name-proxysql -uroot -proot_password -P3306' + local 'command=CREATE DATABASE IF NOT EXISTS myApp; use myApp; CREATE TABLE IF NOT EXISTS myApp (id int PRIMARY KEY) ;' + local 'uri=-h some-name-proxysql -uroot -proot_password -P3306' ++ get_client_pod ++ kubectl_bin get pods --selector=name=pxc-client -o 'jsonpath={.items[].metadata.name}' +++ mktemp ++ local LAST_OUT=/tmp/tmp.SG91n3aOBd +++ mktemp ++ local LAST_ERR=/tmp/tmp.egBwtyBNmF ++ local exit_status=0 +++ seq 0 2 ++ for i in '$(seq 0 2)' ++ set +e ++ kubectl get pods --selector=name=pxc-client -o 'jsonpath={.items[].metadata.name}' ++ exit_status=0 ++ set -e ++ '[' 0 '!=' 0 ']' ++ break ++ cat /tmp/tmp.SG91n3aOBd ++ cat /tmp/tmp.egBwtyBNmF ++ rm /tmp/tmp.SG91n3aOBd /tmp/tmp.egBwtyBNmF ++ return 0 + client_pod=pxc-client-64b479df95-qhk5k + wait_pod pxc-client-64b479df95-qhk5k + local pod=pxc-client-64b479df95-qhk5k + local max_retry=480 + local ns= ++ echo pxc-client-64b479df95-qhk5k ++ /usr/bin/sed -E 's/.*-(pxc|proxysql)-[0-9]/\1/' ++ egrep '^(pxc|proxysql)$' + local container= + set +o xtrace pod/pxc-client-64b479df95-qhk5k condition met pxc-client-64b479df95-qhk5k.Ok + set +o xtrace + run_mysql 'INSERT myApp.myApp (id) VALUES (100500)' '-h some-name-proxysql -uroot -proot_password -P3306' + local 'command=INSERT myApp.myApp (id) VALUES (100500)' + local 'uri=-h some-name-proxysql -uroot -proot_password -P3306' ++ get_client_pod ++ kubectl_bin get pods --selector=name=pxc-client -o 'jsonpath={.items[].metadata.name}' +++ mktemp ++ local LAST_OUT=/tmp/tmp.D7FSUOK9mc +++ mktemp ++ local LAST_ERR=/tmp/tmp.vBkdsFpAec ++ local exit_status=0 +++ seq 0 2 ++ for i in '$(seq 0 2)' ++ set +e ++ kubectl get pods --selector=name=pxc-client -o 'jsonpath={.items[].metadata.name}' ++ exit_status=0 ++ set -e ++ '[' 0 '!=' 0 ']' ++ break ++ cat /tmp/tmp.D7FSUOK9mc ++ cat /tmp/tmp.vBkdsFpAec ++ rm /tmp/tmp.D7FSUOK9mc /tmp/tmp.vBkdsFpAec ++ return 0 + client_pod=pxc-client-64b479df95-qhk5k + wait_pod pxc-client-64b479df95-qhk5k + local pod=pxc-client-64b479df95-qhk5k + local max_retry=480 + local ns= ++ echo pxc-client-64b479df95-qhk5k ++ /usr/bin/sed -E 's/.*-(pxc|proxysql)-[0-9]/\1/' ++ egrep '^(pxc|proxysql)$' + local container= + set +o xtrace pod/pxc-client-64b479df95-qhk5k condition met pxc-client-64b479df95-qhk5k.Ok + set +o xtrace + sleep 30 ++ seq 0 2 + for i in '$(seq 0 $((size - 1)))' + compare_mysql_cmd select-1 'SELECT * from myApp.myApp;' '-h some-name-pxc-0.some-name-pxc -uroot -proot_password -P3306' + local command_id=select-1 + local 'command=SELECT * from myApp.myApp;' + local 'uri=-h some-name-pxc-0.some-name-pxc -uroot -proot_password -P3306' + local postfix= + local expected_result=/mnt/jenkins/workspace/cloud-pxc-operator_PR-1708/e2e-tests/users/compare/select-1.sql + [[ perconalab/percona-xtradb-cluster-operator:main-pxc5.7 =~ 8\.0 ]] + run_mysql 'SELECT * from myApp.myApp;' '-h some-name-pxc-0.some-name-pxc -uroot -proot_password -P3306' + local 'command=SELECT * from myApp.myApp;' + local 'uri=-h some-name-pxc-0.some-name-pxc -uroot -proot_password -P3306' ++ get_client_pod ++ kubectl_bin get pods --selector=name=pxc-client -o 'jsonpath={.items[].metadata.name}' +++ mktemp ++ local LAST_OUT=/tmp/tmp.qEMS9p8apL +++ mktemp ++ local LAST_ERR=/tmp/tmp.HjNH9fr9yN ++ local exit_status=0 +++ seq 0 2 ++ for i in '$(seq 0 2)' ++ set +e ++ kubectl get pods --selector=name=pxc-client -o 'jsonpath={.items[].metadata.name}' ++ exit_status=0 ++ set -e ++ '[' 0 '!=' 0 ']' ++ break ++ cat /tmp/tmp.qEMS9p8apL ++ cat /tmp/tmp.HjNH9fr9yN ++ rm /tmp/tmp.qEMS9p8apL /tmp/tmp.HjNH9fr9yN ++ return 0 + client_pod=pxc-client-64b479df95-qhk5k + wait_pod pxc-client-64b479df95-qhk5k + local pod=pxc-client-64b479df95-qhk5k + local max_retry=480 + local ns= ++ echo pxc-client-64b479df95-qhk5k ++ /usr/bin/sed -E 's/.*-(pxc|proxysql)-[0-9]/\1/' ++ egrep '^(pxc|proxysql)$' + local container= + set +o xtrace pod/pxc-client-64b479df95-qhk5k condition met pxc-client-64b479df95-qhk5k.Ok + set +o xtrace + '[' '!' -s /tmp/tmp.MowO4SojyA/select-1.sql ']' + diff -u /mnt/jenkins/workspace/cloud-pxc-operator_PR-1708/e2e-tests/users/compare/select-1.sql /tmp/tmp.MowO4SojyA/select-1.sql + for i in '$(seq 0 $((size - 1)))' + compare_mysql_cmd select-1 'SELECT * from myApp.myApp;' '-h some-name-pxc-1.some-name-pxc -uroot -proot_password -P3306' + local command_id=select-1 + local 'command=SELECT * from myApp.myApp;' + local 'uri=-h some-name-pxc-1.some-name-pxc -uroot -proot_password -P3306' + local postfix= + local expected_result=/mnt/jenkins/workspace/cloud-pxc-operator_PR-1708/e2e-tests/users/compare/select-1.sql + [[ perconalab/percona-xtradb-cluster-operator:main-pxc5.7 =~ 8\.0 ]] + run_mysql 'SELECT * from myApp.myApp;' '-h some-name-pxc-1.some-name-pxc -uroot -proot_password -P3306' + local 'command=SELECT * from myApp.myApp;' + local 'uri=-h some-name-pxc-1.some-name-pxc -uroot -proot_password -P3306' ++ get_client_pod ++ kubectl_bin get pods --selector=name=pxc-client -o 'jsonpath={.items[].metadata.name}' +++ mktemp ++ local LAST_OUT=/tmp/tmp.vsi5vqmMcT +++ mktemp ++ local LAST_ERR=/tmp/tmp.SM1z95jz4j ++ local exit_status=0 +++ seq 0 2 ++ for i in '$(seq 0 2)' ++ set +e ++ kubectl get pods --selector=name=pxc-client -o 'jsonpath={.items[].metadata.name}' ++ exit_status=0 ++ set -e ++ '[' 0 '!=' 0 ']' ++ break ++ cat /tmp/tmp.vsi5vqmMcT ++ cat /tmp/tmp.SM1z95jz4j ++ rm /tmp/tmp.vsi5vqmMcT /tmp/tmp.SM1z95jz4j ++ return 0 + client_pod=pxc-client-64b479df95-qhk5k + wait_pod pxc-client-64b479df95-qhk5k + local pod=pxc-client-64b479df95-qhk5k + local max_retry=480 + local ns= ++ echo pxc-client-64b479df95-qhk5k ++ /usr/bin/sed -E 's/.*-(pxc|proxysql)-[0-9]/\1/' ++ egrep '^(pxc|proxysql)$' + local container= + set +o xtrace pod/pxc-client-64b479df95-qhk5k condition met pxc-client-64b479df95-qhk5k.Ok + set +o xtrace + '[' '!' -s /tmp/tmp.MowO4SojyA/select-1.sql ']' + diff -u /mnt/jenkins/workspace/cloud-pxc-operator_PR-1708/e2e-tests/users/compare/select-1.sql /tmp/tmp.MowO4SojyA/select-1.sql + for i in '$(seq 0 $((size - 1)))' + compare_mysql_cmd select-1 'SELECT * from myApp.myApp;' '-h some-name-pxc-2.some-name-pxc -uroot -proot_password -P3306' + local command_id=select-1 + local 'command=SELECT * from myApp.myApp;' + local 'uri=-h some-name-pxc-2.some-name-pxc -uroot -proot_password -P3306' + local postfix= + local expected_result=/mnt/jenkins/workspace/cloud-pxc-operator_PR-1708/e2e-tests/users/compare/select-1.sql + [[ perconalab/percona-xtradb-cluster-operator:main-pxc5.7 =~ 8\.0 ]] + run_mysql 'SELECT * from myApp.myApp;' '-h some-name-pxc-2.some-name-pxc -uroot -proot_password -P3306' + local 'command=SELECT * from myApp.myApp;' + local 'uri=-h some-name-pxc-2.some-name-pxc -uroot -proot_password -P3306' ++ get_client_pod ++ kubectl_bin get pods --selector=name=pxc-client -o 'jsonpath={.items[].metadata.name}' +++ mktemp ++ local LAST_OUT=/tmp/tmp.nZlUpArYkE +++ mktemp ++ local LAST_ERR=/tmp/tmp.oksnHoXhwI ++ local exit_status=0 +++ seq 0 2 ++ for i in '$(seq 0 2)' ++ set +e ++ kubectl get pods --selector=name=pxc-client -o 'jsonpath={.items[].metadata.name}' ++ exit_status=0 ++ set -e ++ '[' 0 '!=' 0 ']' ++ break ++ cat /tmp/tmp.nZlUpArYkE ++ cat /tmp/tmp.oksnHoXhwI ++ rm /tmp/tmp.nZlUpArYkE /tmp/tmp.oksnHoXhwI ++ return 0 + client_pod=pxc-client-64b479df95-qhk5k + wait_pod pxc-client-64b479df95-qhk5k + local pod=pxc-client-64b479df95-qhk5k + local max_retry=480 + local ns= ++ echo pxc-client-64b479df95-qhk5k ++ /usr/bin/sed -E 's/.*-(pxc|proxysql)-[0-9]/\1/' ++ egrep '^(pxc|proxysql)$' + local container= + set +o xtrace pod/pxc-client-64b479df95-qhk5k condition met pxc-client-64b479df95-qhk5k.Ok + set +o xtrace + '[' '!' -s /tmp/tmp.MowO4SojyA/select-1.sql ']' + diff -u /mnt/jenkins/workspace/cloud-pxc-operator_PR-1708/e2e-tests/users/compare/select-1.sql /tmp/tmp.MowO4SojyA/select-1.sql ++ is_keyring_plugin_in_use some-name ++ local cluster=some-name ++ kubectl_bin exec -it some-name-pxc-0 -c pxc -- bash -c 'cat /etc/mysql/node.cnf' ++ egrep -o 'early-plugin-load=keyring_\w+.so' +++ mktemp ++ local LAST_OUT=/tmp/tmp.ctotEzlMt9 +++ mktemp ++ local LAST_ERR=/tmp/tmp.yZh60nvR9f ++ local exit_status=0 +++ seq 0 2 ++ for i in '$(seq 0 2)' ++ set +e ++ kubectl exec -it some-name-pxc-0 -c pxc -- bash -c 'cat /etc/mysql/node.cnf' ++ exit_status=0 ++ set -e ++ '[' 0 '!=' 0 ']' ++ break ++ cat /tmp/tmp.ctotEzlMt9 ++ cat /tmp/tmp.yZh60nvR9f Unable to use a TTY - input is not a terminal or the right kind of file ++ rm /tmp/tmp.ctotEzlMt9 /tmp/tmp.yZh60nvR9f ++ return 0 + '[' '' ']' + desc 'test root' + set +o xtrace ----------------------------------------------------------------------------------- test root ----------------------------------------------------------------------------------- + patch_secret my-cluster-secrets root dGVzdC1wYXNzd29yZA== + local secret=my-cluster-secrets + local key=root + local value=dGVzdC1wYXNzd29yZA== + kubectl_bin patch secret my-cluster-secrets '-p={"data":{"root": "dGVzdC1wYXNzd29yZA=="}}' ++ mktemp + local LAST_OUT=/tmp/tmp.nCK5P68eJc ++ mktemp + local LAST_ERR=/tmp/tmp.knV9wzjD3i + local exit_status=0 ++ seq 0 2 + for i in '$(seq 0 2)' + set +e + kubectl patch secret my-cluster-secrets '-p={"data":{"root": "dGVzdC1wYXNzd29yZA=="}}' + exit_status=0 + set -e + '[' 0 '!=' 0 ']' + break + cat /tmp/tmp.nCK5P68eJc secret/my-cluster-secrets patched + cat /tmp/tmp.knV9wzjD3i + rm /tmp/tmp.nCK5P68eJc /tmp/tmp.knV9wzjD3i + return 0 + sleep 15 + compare_mysql_cmd select-4 'SHOW TABLES;' '-h some-name-proxysql -uroot -p'\''test-password'\''' + local command_id=select-4 + local 'command=SHOW TABLES;' + local 'uri=-h some-name-proxysql -uroot -p'\''test-password'\''' + local postfix= + local expected_result=/mnt/jenkins/workspace/cloud-pxc-operator_PR-1708/e2e-tests/users/compare/select-4.sql + [[ perconalab/percona-xtradb-cluster-operator:main-pxc5.7 =~ 8\.0 ]] + run_mysql 'SHOW TABLES;' '-h some-name-proxysql -uroot -p'\''test-password'\''' + local 'command=SHOW TABLES;' + local 'uri=-h some-name-proxysql -uroot -p'\''test-password'\''' ++ get_client_pod ++ kubectl_bin get pods --selector=name=pxc-client -o 'jsonpath={.items[].metadata.name}' +++ mktemp ++ local LAST_OUT=/tmp/tmp.SPYJlnxqtn +++ mktemp ++ local LAST_ERR=/tmp/tmp.qPsNP5sjzm ++ local exit_status=0 +++ seq 0 2 ++ for i in '$(seq 0 2)' ++ set +e ++ kubectl get pods --selector=name=pxc-client -o 'jsonpath={.items[].metadata.name}' ++ exit_status=0 ++ set -e ++ '[' 0 '!=' 0 ']' ++ break ++ cat /tmp/tmp.SPYJlnxqtn ++ cat /tmp/tmp.qPsNP5sjzm ++ rm /tmp/tmp.SPYJlnxqtn /tmp/tmp.qPsNP5sjzm ++ return 0 + client_pod=pxc-client-64b479df95-qhk5k + wait_pod pxc-client-64b479df95-qhk5k + local pod=pxc-client-64b479df95-qhk5k + local max_retry=480 + local ns= ++ echo pxc-client-64b479df95-qhk5k ++ egrep '^(pxc|proxysql)$' ++ /usr/bin/sed -E 's/.*-(pxc|proxysql)-[0-9]/\1/' + local container= + set +o xtrace pod/pxc-client-64b479df95-qhk5k condition met pxc-client-64b479df95-qhk5k.Ok + set +o xtrace + '[' '!' -s /tmp/tmp.MowO4SojyA/select-4.sql ']' + diff -u /mnt/jenkins/workspace/cloud-pxc-operator_PR-1708/e2e-tests/users/compare/select-4.sql /tmp/tmp.MowO4SojyA/select-4.sql + desc 'test proxyadmin' + set +o xtrace ----------------------------------------------------------------------------------- test proxyadmin ----------------------------------------------------------------------------------- + kubectl_bin patch pxc some-name --type=merge '-p={"spec":{"proxysql":{"size":3}}}' ++ mktemp + local LAST_OUT=/tmp/tmp.YWGXfGOdyr ++ mktemp + local LAST_ERR=/tmp/tmp.wnGs2Inq39 + local exit_status=0 ++ seq 0 2 + for i in '$(seq 0 2)' + set +e + kubectl patch pxc some-name --type=merge '-p={"spec":{"proxysql":{"size":3}}}' + exit_status=0 + set -e + '[' 0 '!=' 0 ']' + break + cat /tmp/tmp.YWGXfGOdyr perconaxtradbcluster.pxc.percona.com/some-name patched + cat /tmp/tmp.wnGs2Inq39 + rm /tmp/tmp.YWGXfGOdyr /tmp/tmp.wnGs2Inq39 + return 0 + sleep 15 + wait_cluster_consistency some-name 3 3 + local cluster_name=some-name + local cluster_size=3 + local proxy_size=3 + '[' -z 3 ']' + desc 'wait cluster consistency' + set +o xtrace ----------------------------------------------------------------------------------- wait cluster consistency ----------------------------------------------------------------------------------- + local i=0 + local max=36 + sleep 7 ++ kubectl_bin get pxc some-name -o 'jsonpath={.status.state}' +++ mktemp ++ local LAST_OUT=/tmp/tmp.V5a3VJvTY7 +++ mktemp ++ local LAST_ERR=/tmp/tmp.dcosoN0wBz ++ local exit_status=0 +++ seq 0 2 ++ for i in '$(seq 0 2)' ++ set +e ++ kubectl get pxc some-name -o 'jsonpath={.status.state}' ++ exit_status=0 ++ set -e ++ '[' 0 '!=' 0 ']' ++ break ++ cat /tmp/tmp.V5a3VJvTY7 ++ cat /tmp/tmp.dcosoN0wBz ++ rm /tmp/tmp.V5a3VJvTY7 /tmp/tmp.dcosoN0wBz ++ return 0 + [[ ready == \r\e\a\d\y ]] ++ kubectl_bin get pxc some-name -o 'jsonpath={.status.pxc.ready}' +++ mktemp ++ local LAST_OUT=/tmp/tmp.4StZ47HsWd +++ mktemp ++ local LAST_ERR=/tmp/tmp.xT9wBsFUBC ++ local exit_status=0 +++ seq 0 2 ++ for i in '$(seq 0 2)' ++ set +e ++ kubectl get pxc some-name -o 'jsonpath={.status.pxc.ready}' ++ exit_status=0 ++ set -e ++ '[' 0 '!=' 0 ']' ++ break ++ cat /tmp/tmp.4StZ47HsWd ++ cat /tmp/tmp.xT9wBsFUBC ++ rm /tmp/tmp.4StZ47HsWd /tmp/tmp.xT9wBsFUBC ++ return 0 + [[ 3 == \3 ]] +++ get_proxy_engine some-name +++ local cluster_name=some-name ++++ get_proxy some-name ++++ local target_cluster=some-name +++++ kubectl_bin get pxc some-name -o 'jsonpath={.spec.haproxy.enabled}' ++++++ mktemp +++++ local LAST_OUT=/tmp/tmp.11xYRbAjgV ++++++ mktemp +++++ local LAST_ERR=/tmp/tmp.K9L9W9FmAR +++++ local exit_status=0 ++++++ seq 0 2 +++++ for i in '$(seq 0 2)' +++++ set +e +++++ kubectl get pxc some-name -o 'jsonpath={.spec.haproxy.enabled}' +++++ exit_status=0 +++++ set -e +++++ '[' 0 '!=' 0 ']' +++++ break +++++ cat /tmp/tmp.11xYRbAjgV +++++ cat /tmp/tmp.K9L9W9FmAR +++++ rm /tmp/tmp.11xYRbAjgV /tmp/tmp.K9L9W9FmAR +++++ return 0 ++++ [[ '' == \t\r\u\e ]] +++++ kubectl_bin get pxc some-name -o 'jsonpath={.spec.proxysql.enabled}' ++++++ mktemp +++++ local LAST_OUT=/tmp/tmp.KKX5FActYy ++++++ mktemp +++++ local LAST_ERR=/tmp/tmp.CeesRVwKhG +++++ local exit_status=0 ++++++ seq 0 2 +++++ for i in '$(seq 0 2)' +++++ set +e +++++ kubectl get pxc some-name -o 'jsonpath={.spec.proxysql.enabled}' +++++ exit_status=0 +++++ set -e +++++ '[' 0 '!=' 0 ']' +++++ break +++++ cat /tmp/tmp.KKX5FActYy +++++ cat /tmp/tmp.CeesRVwKhG +++++ rm /tmp/tmp.KKX5FActYy /tmp/tmp.CeesRVwKhG +++++ return 0 ++++ [[ true == \t\r\u\e ]] ++++ echo some-name-proxysql ++++ return +++ local cluster_proxy=some-name-proxysql +++ echo proxysql ++ kubectl_bin get pxc some-name -o 'jsonpath={.status.proxysql.ready}' +++ mktemp ++ local LAST_OUT=/tmp/tmp.nVp8FSxydk +++ mktemp ++ local LAST_ERR=/tmp/tmp.UU6vTcSnBX ++ local exit_status=0 +++ seq 0 2 ++ for i in '$(seq 0 2)' ++ set +e ++ kubectl get pxc some-name -o 'jsonpath={.status.proxysql.ready}' ++ exit_status=0 ++ set -e ++ '[' 0 '!=' 0 ']' ++ break ++ cat /tmp/tmp.nVp8FSxydk ++ cat /tmp/tmp.UU6vTcSnBX ++ rm /tmp/tmp.nVp8FSxydk /tmp/tmp.UU6vTcSnBX ++ return 0 + [[ 3 == \3 ]] + patch_secret my-cluster-secrets proxyadmin dGVzdC1wYXNzd29yZA== + local secret=my-cluster-secrets + local key=proxyadmin + local value=dGVzdC1wYXNzd29yZA== + kubectl_bin patch secret my-cluster-secrets '-p={"data":{"proxyadmin": "dGVzdC1wYXNzd29yZA=="}}' ++ mktemp + local LAST_OUT=/tmp/tmp.4uxGtEQ9lO ++ mktemp + local LAST_ERR=/tmp/tmp.A6sRGxYCGE + local exit_status=0 ++ seq 0 2 + for i in '$(seq 0 2)' + set +e + kubectl patch secret my-cluster-secrets '-p={"data":{"proxyadmin": "dGVzdC1wYXNzd29yZA=="}}' + exit_status=0 + set -e + '[' 0 '!=' 0 ']' + break + cat /tmp/tmp.4uxGtEQ9lO secret/my-cluster-secrets patched + cat /tmp/tmp.A6sRGxYCGE + rm /tmp/tmp.4uxGtEQ9lO /tmp/tmp.A6sRGxYCGE + return 0 + sleep 15 + wait_cluster_consistency some-name 3 3 + local cluster_name=some-name + local cluster_size=3 + local proxy_size=3 + '[' -z 3 ']' + desc 'wait cluster consistency' + set +o xtrace ----------------------------------------------------------------------------------- wait cluster consistency ----------------------------------------------------------------------------------- + local i=0 + local max=36 + sleep 7 ++ kubectl_bin get pxc some-name -o 'jsonpath={.status.state}' +++ mktemp ++ local LAST_OUT=/tmp/tmp.0l412Nc7RN +++ mktemp ++ local LAST_ERR=/tmp/tmp.VcfOUdyTEe ++ local exit_status=0 +++ seq 0 2 ++ for i in '$(seq 0 2)' ++ set +e ++ kubectl get pxc some-name -o 'jsonpath={.status.state}' ++ exit_status=0 ++ set -e ++ '[' 0 '!=' 0 ']' ++ break ++ cat /tmp/tmp.0l412Nc7RN ++ cat /tmp/tmp.VcfOUdyTEe ++ rm /tmp/tmp.0l412Nc7RN /tmp/tmp.VcfOUdyTEe ++ return 0 + [[ initializing == \r\e\a\d\y ]] + echo 'waiting for cluster readyness' waiting for cluster readyness + sleep 20 + [[ 0 -ge 36 ]] + let i+=1 ++ kubectl_bin get pxc some-name -o 'jsonpath={.status.state}' +++ mktemp ++ local LAST_OUT=/tmp/tmp.TLvDSJ1MSV +++ mktemp ++ local LAST_ERR=/tmp/tmp.MM6SW0rW3c ++ local exit_status=0 +++ seq 0 2 ++ for i in '$(seq 0 2)' ++ set +e ++ kubectl get pxc some-name -o 'jsonpath={.status.state}' ++ exit_status=0 ++ set -e ++ '[' 0 '!=' 0 ']' ++ break ++ cat /tmp/tmp.TLvDSJ1MSV ++ cat /tmp/tmp.MM6SW0rW3c ++ rm /tmp/tmp.TLvDSJ1MSV /tmp/tmp.MM6SW0rW3c ++ return 0 + [[ initializing == \r\e\a\d\y ]] + echo 'waiting for cluster readyness' waiting for cluster readyness + sleep 20 + [[ 1 -ge 36 ]] + let i+=1 ++ kubectl_bin get pxc some-name -o 'jsonpath={.status.state}' +++ mktemp ++ local LAST_OUT=/tmp/tmp.j4CA5kA2Yk +++ mktemp ++ local LAST_ERR=/tmp/tmp.WW7uAFrzHB ++ local exit_status=0 +++ seq 0 2 ++ for i in '$(seq 0 2)' ++ set +e ++ kubectl get pxc some-name -o 'jsonpath={.status.state}' ++ exit_status=0 ++ set -e ++ '[' 0 '!=' 0 ']' ++ break ++ cat /tmp/tmp.j4CA5kA2Yk ++ cat /tmp/tmp.WW7uAFrzHB ++ rm /tmp/tmp.j4CA5kA2Yk /tmp/tmp.WW7uAFrzHB ++ return 0 + [[ ready == \r\e\a\d\y ]] ++ kubectl_bin get pxc some-name -o 'jsonpath={.status.pxc.ready}' +++ mktemp ++ local LAST_OUT=/tmp/tmp.bv5dAflHjN +++ mktemp ++ local LAST_ERR=/tmp/tmp.BpOdmbmWox ++ local exit_status=0 +++ seq 0 2 ++ for i in '$(seq 0 2)' ++ set +e ++ kubectl get pxc some-name -o 'jsonpath={.status.pxc.ready}' ++ exit_status=0 ++ set -e ++ '[' 0 '!=' 0 ']' ++ break ++ cat /tmp/tmp.bv5dAflHjN ++ cat /tmp/tmp.BpOdmbmWox ++ rm /tmp/tmp.bv5dAflHjN /tmp/tmp.BpOdmbmWox ++ return 0 + [[ 3 == \3 ]] +++ get_proxy_engine some-name +++ local cluster_name=some-name ++++ get_proxy some-name ++++ local target_cluster=some-name +++++ kubectl_bin get pxc some-name -o 'jsonpath={.spec.haproxy.enabled}' ++++++ mktemp +++++ local LAST_OUT=/tmp/tmp.gqGp4Yqo2X ++++++ mktemp +++++ local LAST_ERR=/tmp/tmp.p75zCyjPbn +++++ local exit_status=0 ++++++ seq 0 2 +++++ for i in '$(seq 0 2)' +++++ set +e +++++ kubectl get pxc some-name -o 'jsonpath={.spec.haproxy.enabled}' +++++ exit_status=0 +++++ set -e +++++ '[' 0 '!=' 0 ']' +++++ break +++++ cat /tmp/tmp.gqGp4Yqo2X +++++ cat /tmp/tmp.p75zCyjPbn +++++ rm /tmp/tmp.gqGp4Yqo2X /tmp/tmp.p75zCyjPbn +++++ return 0 ++++ [[ '' == \t\r\u\e ]] +++++ kubectl_bin get pxc some-name -o 'jsonpath={.spec.proxysql.enabled}' ++++++ mktemp +++++ local LAST_OUT=/tmp/tmp.vGIFMCqpqO ++++++ mktemp +++++ local LAST_ERR=/tmp/tmp.OV7x1n0Ltl +++++ local exit_status=0 ++++++ seq 0 2 +++++ for i in '$(seq 0 2)' +++++ set +e +++++ kubectl get pxc some-name -o 'jsonpath={.spec.proxysql.enabled}' +++++ exit_status=0 +++++ set -e +++++ '[' 0 '!=' 0 ']' +++++ break +++++ cat /tmp/tmp.vGIFMCqpqO +++++ cat /tmp/tmp.OV7x1n0Ltl +++++ rm /tmp/tmp.vGIFMCqpqO /tmp/tmp.OV7x1n0Ltl +++++ return 0 ++++ [[ true == \t\r\u\e ]] ++++ echo some-name-proxysql ++++ return +++ local cluster_proxy=some-name-proxysql +++ echo proxysql ++ kubectl_bin get pxc some-name -o 'jsonpath={.status.proxysql.ready}' +++ mktemp ++ local LAST_OUT=/tmp/tmp.qUWouRjUrZ +++ mktemp ++ local LAST_ERR=/tmp/tmp.gOo8yYVW1h ++ local exit_status=0 +++ seq 0 2 ++ for i in '$(seq 0 2)' ++ set +e ++ kubectl get pxc some-name -o 'jsonpath={.status.proxysql.ready}' ++ exit_status=0 ++ set -e ++ '[' 0 '!=' 0 ']' ++ break ++ cat /tmp/tmp.qUWouRjUrZ ++ cat /tmp/tmp.gOo8yYVW1h ++ rm /tmp/tmp.qUWouRjUrZ /tmp/tmp.gOo8yYVW1h ++ return 0 + [[ 3 == \3 ]] + compare_mysql_cmd_local select-2 'SHOW TABLES;' '-h127.0.0.1 -P6032 -uproxyadmin -p'\''test-password'\''' some-name-proxysql-0 '' proxysql + local command_id=select-2 + local 'command=SHOW TABLES;' + local 'uri=-h127.0.0.1 -P6032 -uproxyadmin -p'\''test-password'\''' + local pod=some-name-proxysql-0 + local postfix= + local container_name=proxysql + local expected_result=/mnt/jenkins/workspace/cloud-pxc-operator_PR-1708/e2e-tests/users/compare/select-2.sql + [[ perconalab/percona-xtradb-cluster-operator:main-pxc5.7 =~ 8\.0 ]] + run_mysql_local 'SHOW TABLES;' '-h127.0.0.1 -P6032 -uproxyadmin -p'\''test-password'\''' some-name-proxysql-0 proxysql + local 'command=SHOW TABLES;' + local 'uri=-h127.0.0.1 -P6032 -uproxyadmin -p'\''test-password'\''' + local pod=some-name-proxysql-0 + local container_name=proxysql + set +o xtrace + '[' '!' -s /tmp/tmp.MowO4SojyA/select-2.sql ']' + diff -u /mnt/jenkins/workspace/cloud-pxc-operator_PR-1708/e2e-tests/users/compare/select-2.sql /tmp/tmp.MowO4SojyA/select-2.sql + compare_mysql_cmd_local select-2 'SHOW TABLES;' '-h127.0.0.1 -P6032 -uproxyadmin -p'\''test-password'\''' some-name-proxysql-1 '' proxysql + local command_id=select-2 + local 'command=SHOW TABLES;' + local 'uri=-h127.0.0.1 -P6032 -uproxyadmin -p'\''test-password'\''' + local pod=some-name-proxysql-1 + local postfix= + local container_name=proxysql + local expected_result=/mnt/jenkins/workspace/cloud-pxc-operator_PR-1708/e2e-tests/users/compare/select-2.sql + [[ perconalab/percona-xtradb-cluster-operator:main-pxc5.7 =~ 8\.0 ]] + run_mysql_local 'SHOW TABLES;' '-h127.0.0.1 -P6032 -uproxyadmin -p'\''test-password'\''' some-name-proxysql-1 proxysql + local 'command=SHOW TABLES;' + local 'uri=-h127.0.0.1 -P6032 -uproxyadmin -p'\''test-password'\''' + local pod=some-name-proxysql-1 + local container_name=proxysql + set +o xtrace + '[' '!' -s /tmp/tmp.MowO4SojyA/select-2.sql ']' + diff -u /mnt/jenkins/workspace/cloud-pxc-operator_PR-1708/e2e-tests/users/compare/select-2.sql /tmp/tmp.MowO4SojyA/select-2.sql + compare_mysql_cmd_local select-2 'SHOW TABLES;' '-h127.0.0.1 -P6032 -uproxyadmin -p'\''test-password'\''' some-name-proxysql-2 '' proxysql + local command_id=select-2 + local 'command=SHOW TABLES;' + local 'uri=-h127.0.0.1 -P6032 -uproxyadmin -p'\''test-password'\''' + local pod=some-name-proxysql-2 + local postfix= + local container_name=proxysql + local expected_result=/mnt/jenkins/workspace/cloud-pxc-operator_PR-1708/e2e-tests/users/compare/select-2.sql + [[ perconalab/percona-xtradb-cluster-operator:main-pxc5.7 =~ 8\.0 ]] + run_mysql_local 'SHOW TABLES;' '-h127.0.0.1 -P6032 -uproxyadmin -p'\''test-password'\''' some-name-proxysql-2 proxysql + local 'command=SHOW TABLES;' + local 'uri=-h127.0.0.1 -P6032 -uproxyadmin -p'\''test-password'\''' + local pod=some-name-proxysql-2 + local container_name=proxysql + set +o xtrace + '[' '!' -s /tmp/tmp.MowO4SojyA/select-2.sql ']' + diff -u /mnt/jenkins/workspace/cloud-pxc-operator_PR-1708/e2e-tests/users/compare/select-2.sql /tmp/tmp.MowO4SojyA/select-2.sql + desc 'test xtrabackup' + set +o xtrace ----------------------------------------------------------------------------------- test xtrabackup ----------------------------------------------------------------------------------- + kubectl_bin patch pxc some-name --type=merge '-p={"spec":{"proxysql":{"size":2}}}' ++ mktemp + local LAST_OUT=/tmp/tmp.VrS3TaEImW ++ mktemp + local LAST_ERR=/tmp/tmp.3qUwC7jm8F + local exit_status=0 ++ seq 0 2 + for i in '$(seq 0 2)' + set +e + kubectl patch pxc some-name --type=merge '-p={"spec":{"proxysql":{"size":2}}}' + exit_status=0 + set -e + '[' 0 '!=' 0 ']' + break + cat /tmp/tmp.VrS3TaEImW perconaxtradbcluster.pxc.percona.com/some-name patched + cat /tmp/tmp.3qUwC7jm8F + rm /tmp/tmp.VrS3TaEImW /tmp/tmp.3qUwC7jm8F + return 0 + patch_secret my-cluster-secrets xtrabackup dGVzdC1wYXNzd29yZA== + local secret=my-cluster-secrets + local key=xtrabackup + local value=dGVzdC1wYXNzd29yZA== + kubectl_bin patch secret my-cluster-secrets '-p={"data":{"xtrabackup": "dGVzdC1wYXNzd29yZA=="}}' ++ mktemp + local LAST_OUT=/tmp/tmp.iWwQz5yO0G ++ mktemp + local LAST_ERR=/tmp/tmp.fiUvWEmLMy + local exit_status=0 ++ seq 0 2 + for i in '$(seq 0 2)' + set +e + kubectl patch secret my-cluster-secrets '-p={"data":{"xtrabackup": "dGVzdC1wYXNzd29yZA=="}}' + exit_status=0 + set -e + '[' 0 '!=' 0 ']' + break + cat /tmp/tmp.iWwQz5yO0G secret/my-cluster-secrets patched + cat /tmp/tmp.fiUvWEmLMy + rm /tmp/tmp.iWwQz5yO0G /tmp/tmp.fiUvWEmLMy + return 0 + sleep 15 + wait_cluster_consistency some-name 3 2 + local cluster_name=some-name + local cluster_size=3 + local proxy_size=2 + '[' -z 2 ']' + desc 'wait cluster consistency' + set +o xtrace ----------------------------------------------------------------------------------- wait cluster consistency ----------------------------------------------------------------------------------- + local i=0 + local max=36 + sleep 7 ++ kubectl_bin get pxc some-name -o 'jsonpath={.status.state}' +++ mktemp ++ local LAST_OUT=/tmp/tmp.FWzlQCxwDa +++ mktemp ++ local LAST_ERR=/tmp/tmp.3EVRE6eglI ++ local exit_status=0 +++ seq 0 2 ++ for i in '$(seq 0 2)' ++ set +e ++ kubectl get pxc some-name -o 'jsonpath={.status.state}' ++ exit_status=0 ++ set -e ++ '[' 0 '!=' 0 ']' ++ break ++ cat /tmp/tmp.FWzlQCxwDa ++ cat /tmp/tmp.3EVRE6eglI ++ rm /tmp/tmp.FWzlQCxwDa /tmp/tmp.3EVRE6eglI ++ return 0 + [[ initializing == \r\e\a\d\y ]] + echo 'waiting for cluster readyness' waiting for cluster readyness + sleep 20 + [[ 0 -ge 36 ]] + let i+=1 ++ kubectl_bin get pxc some-name -o 'jsonpath={.status.state}' +++ mktemp ++ local LAST_OUT=/tmp/tmp.uATc7MkbaP +++ mktemp ++ local LAST_ERR=/tmp/tmp.lKUPcBK4kr ++ local exit_status=0 +++ seq 0 2 ++ for i in '$(seq 0 2)' ++ set +e ++ kubectl get pxc some-name -o 'jsonpath={.status.state}' ++ exit_status=0 ++ set -e ++ '[' 0 '!=' 0 ']' ++ break ++ cat /tmp/tmp.uATc7MkbaP ++ cat /tmp/tmp.lKUPcBK4kr ++ rm /tmp/tmp.uATc7MkbaP /tmp/tmp.lKUPcBK4kr ++ return 0 + [[ initializing == \r\e\a\d\y ]] + echo 'waiting for cluster readyness' waiting for cluster readyness + sleep 20 + [[ 1 -ge 36 ]] + let i+=1 ++ kubectl_bin get pxc some-name -o 'jsonpath={.status.state}' +++ mktemp ++ local LAST_OUT=/tmp/tmp.dt9DJgXB40 +++ mktemp ++ local LAST_ERR=/tmp/tmp.m9BNuF3hPK ++ local exit_status=0 +++ seq 0 2 ++ for i in '$(seq 0 2)' ++ set +e ++ kubectl get pxc some-name -o 'jsonpath={.status.state}' ++ exit_status=0 ++ set -e ++ '[' 0 '!=' 0 ']' ++ break ++ cat /tmp/tmp.dt9DJgXB40 ++ cat /tmp/tmp.m9BNuF3hPK ++ rm /tmp/tmp.dt9DJgXB40 /tmp/tmp.m9BNuF3hPK ++ return 0 + [[ initializing == \r\e\a\d\y ]] + echo 'waiting for cluster readyness' waiting for cluster readyness + sleep 20 + [[ 2 -ge 36 ]] + let i+=1 ++ kubectl_bin get pxc some-name -o 'jsonpath={.status.state}' +++ mktemp ++ local LAST_OUT=/tmp/tmp.po4dlUIIWa +++ mktemp ++ local LAST_ERR=/tmp/tmp.d9HZisTL9u ++ local exit_status=0 +++ seq 0 2 ++ for i in '$(seq 0 2)' ++ set +e ++ kubectl get pxc some-name -o 'jsonpath={.status.state}' ++ exit_status=0 ++ set -e ++ '[' 0 '!=' 0 ']' ++ break ++ cat /tmp/tmp.po4dlUIIWa ++ cat /tmp/tmp.d9HZisTL9u ++ rm /tmp/tmp.po4dlUIIWa /tmp/tmp.d9HZisTL9u ++ return 0 + [[ initializing == \r\e\a\d\y ]] + echo 'waiting for cluster readyness' waiting for cluster readyness + sleep 20 + [[ 3 -ge 36 ]] + let i+=1 ++ kubectl_bin get pxc some-name -o 'jsonpath={.status.state}' +++ mktemp ++ local LAST_OUT=/tmp/tmp.M33eEfV31r +++ mktemp ++ local LAST_ERR=/tmp/tmp.rCR7dimaPN ++ local exit_status=0 +++ seq 0 2 ++ for i in '$(seq 0 2)' ++ set +e ++ kubectl get pxc some-name -o 'jsonpath={.status.state}' ++ exit_status=0 ++ set -e ++ '[' 0 '!=' 0 ']' ++ break ++ cat /tmp/tmp.M33eEfV31r ++ cat /tmp/tmp.rCR7dimaPN ++ rm /tmp/tmp.M33eEfV31r /tmp/tmp.rCR7dimaPN ++ return 0 + [[ initializing == \r\e\a\d\y ]] + echo 'waiting for cluster readyness' waiting for cluster readyness + sleep 20 + [[ 4 -ge 36 ]] + let i+=1 ++ kubectl_bin get pxc some-name -o 'jsonpath={.status.state}' +++ mktemp ++ local LAST_OUT=/tmp/tmp.egBFwF4LJL +++ mktemp ++ local LAST_ERR=/tmp/tmp.LDwI4LD90I ++ local exit_status=0 +++ seq 0 2 ++ for i in '$(seq 0 2)' ++ set +e ++ kubectl get pxc some-name -o 'jsonpath={.status.state}' ++ exit_status=0 ++ set -e ++ '[' 0 '!=' 0 ']' ++ break ++ cat /tmp/tmp.egBFwF4LJL ++ cat /tmp/tmp.LDwI4LD90I ++ rm /tmp/tmp.egBFwF4LJL /tmp/tmp.LDwI4LD90I ++ return 0 + [[ initializing == \r\e\a\d\y ]] + echo 'waiting for cluster readyness' waiting for cluster readyness + sleep 20 + [[ 5 -ge 36 ]] + let i+=1 ++ kubectl_bin get pxc some-name -o 'jsonpath={.status.state}' +++ mktemp ++ local LAST_OUT=/tmp/tmp.IIxYBGgc73 +++ mktemp ++ local LAST_ERR=/tmp/tmp.ZJcxya1JrH ++ local exit_status=0 +++ seq 0 2 ++ for i in '$(seq 0 2)' ++ set +e ++ kubectl get pxc some-name -o 'jsonpath={.status.state}' ++ exit_status=0 ++ set -e ++ '[' 0 '!=' 0 ']' ++ break ++ cat /tmp/tmp.IIxYBGgc73 ++ cat /tmp/tmp.ZJcxya1JrH ++ rm /tmp/tmp.IIxYBGgc73 /tmp/tmp.ZJcxya1JrH ++ return 0 + [[ initializing == \r\e\a\d\y ]] + echo 'waiting for cluster readyness' waiting for cluster readyness + sleep 20 + [[ 6 -ge 36 ]] + let i+=1 ++ kubectl_bin get pxc some-name -o 'jsonpath={.status.state}' +++ mktemp ++ local LAST_OUT=/tmp/tmp.Tzecg9QDg3 +++ mktemp ++ local LAST_ERR=/tmp/tmp.kpCh86ylSM ++ local exit_status=0 +++ seq 0 2 ++ for i in '$(seq 0 2)' ++ set +e ++ kubectl get pxc some-name -o 'jsonpath={.status.state}' ++ exit_status=0 ++ set -e ++ '[' 0 '!=' 0 ']' ++ break ++ cat /tmp/tmp.Tzecg9QDg3 ++ cat /tmp/tmp.kpCh86ylSM ++ rm /tmp/tmp.Tzecg9QDg3 /tmp/tmp.kpCh86ylSM ++ return 0 + [[ ready == \r\e\a\d\y ]] ++ kubectl_bin get pxc some-name -o 'jsonpath={.status.pxc.ready}' +++ mktemp ++ local LAST_OUT=/tmp/tmp.tXHF6SIBld +++ mktemp ++ local LAST_ERR=/tmp/tmp.C2vHLPHccH ++ local exit_status=0 +++ seq 0 2 ++ for i in '$(seq 0 2)' ++ set +e ++ kubectl get pxc some-name -o 'jsonpath={.status.pxc.ready}' ++ exit_status=0 ++ set -e ++ '[' 0 '!=' 0 ']' ++ break ++ cat /tmp/tmp.tXHF6SIBld ++ cat /tmp/tmp.C2vHLPHccH ++ rm /tmp/tmp.tXHF6SIBld /tmp/tmp.C2vHLPHccH ++ return 0 + [[ 3 == \3 ]] +++ get_proxy_engine some-name +++ local cluster_name=some-name ++++ get_proxy some-name ++++ local target_cluster=some-name +++++ kubectl_bin get pxc some-name -o 'jsonpath={.spec.haproxy.enabled}' ++++++ mktemp +++++ local LAST_OUT=/tmp/tmp.0IibItlNEV ++++++ mktemp +++++ local LAST_ERR=/tmp/tmp.HqwwlO8Otk +++++ local exit_status=0 ++++++ seq 0 2 +++++ for i in '$(seq 0 2)' +++++ set +e +++++ kubectl get pxc some-name -o 'jsonpath={.spec.haproxy.enabled}' +++++ exit_status=0 +++++ set -e +++++ '[' 0 '!=' 0 ']' +++++ break +++++ cat /tmp/tmp.0IibItlNEV +++++ cat /tmp/tmp.HqwwlO8Otk +++++ rm /tmp/tmp.0IibItlNEV /tmp/tmp.HqwwlO8Otk +++++ return 0 ++++ [[ '' == \t\r\u\e ]] +++++ kubectl_bin get pxc some-name -o 'jsonpath={.spec.proxysql.enabled}' ++++++ mktemp +++++ local LAST_OUT=/tmp/tmp.FIOSDkz8K3 ++++++ mktemp +++++ local LAST_ERR=/tmp/tmp.SWzMZqWAOg +++++ local exit_status=0 ++++++ seq 0 2 +++++ for i in '$(seq 0 2)' +++++ set +e +++++ kubectl get pxc some-name -o 'jsonpath={.spec.proxysql.enabled}' +++++ exit_status=0 +++++ set -e +++++ '[' 0 '!=' 0 ']' +++++ break +++++ cat /tmp/tmp.FIOSDkz8K3 +++++ cat /tmp/tmp.SWzMZqWAOg +++++ rm /tmp/tmp.FIOSDkz8K3 /tmp/tmp.SWzMZqWAOg +++++ return 0 ++++ [[ true == \t\r\u\e ]] ++++ echo some-name-proxysql ++++ return +++ local cluster_proxy=some-name-proxysql +++ echo proxysql ++ kubectl_bin get pxc some-name -o 'jsonpath={.status.proxysql.ready}' +++ mktemp ++ local LAST_OUT=/tmp/tmp.XvAseyVutS +++ mktemp ++ local LAST_ERR=/tmp/tmp.CFT3x8duM1 ++ local exit_status=0 +++ seq 0 2 ++ for i in '$(seq 0 2)' ++ set +e ++ kubectl get pxc some-name -o 'jsonpath={.status.proxysql.ready}' ++ exit_status=0 ++ set -e ++ '[' 0 '!=' 0 ']' ++ break ++ cat /tmp/tmp.XvAseyVutS ++ cat /tmp/tmp.CFT3x8duM1 ++ rm /tmp/tmp.XvAseyVutS /tmp/tmp.CFT3x8duM1 ++ return 0 + [[ 2 == \2 ]] + compare_mysql_cmd_local select-3 'SHOW DATABASES;' '-h 127.0.0.1 -uxtrabackup -p'\''test-password'\''' some-name-pxc-0 '' pxc + local command_id=select-3 + local 'command=SHOW DATABASES;' + local 'uri=-h 127.0.0.1 -uxtrabackup -p'\''test-password'\''' + local pod=some-name-pxc-0 + local postfix= + local container_name=pxc + local expected_result=/mnt/jenkins/workspace/cloud-pxc-operator_PR-1708/e2e-tests/users/compare/select-3.sql + [[ perconalab/percona-xtradb-cluster-operator:main-pxc5.7 =~ 8\.0 ]] + run_mysql_local 'SHOW DATABASES;' '-h 127.0.0.1 -uxtrabackup -p'\''test-password'\''' some-name-pxc-0 pxc + local 'command=SHOW DATABASES;' + local 'uri=-h 127.0.0.1 -uxtrabackup -p'\''test-password'\''' + local pod=some-name-pxc-0 + local container_name=pxc + set +o xtrace + '[' '!' -s /tmp/tmp.MowO4SojyA/select-3.sql ']' + diff -u /mnt/jenkins/workspace/cloud-pxc-operator_PR-1708/e2e-tests/users/compare/select-3.sql /tmp/tmp.MowO4SojyA/select-3.sql + desc 'test monitor' + set +o xtrace ----------------------------------------------------------------------------------- test monitor ----------------------------------------------------------------------------------- + patch_secret my-cluster-secrets monitor dGVzdC1wYXNzd29yZA== + local secret=my-cluster-secrets + local key=monitor + local value=dGVzdC1wYXNzd29yZA== + kubectl_bin patch secret my-cluster-secrets '-p={"data":{"monitor": "dGVzdC1wYXNzd29yZA=="}}' ++ mktemp + local LAST_OUT=/tmp/tmp.gSRcDtHI26 ++ mktemp + local LAST_ERR=/tmp/tmp.zv8XUNHx4M + local exit_status=0 ++ seq 0 2 + for i in '$(seq 0 2)' + set +e + kubectl patch secret my-cluster-secrets '-p={"data":{"monitor": "dGVzdC1wYXNzd29yZA=="}}' + exit_status=0 + set -e + '[' 0 '!=' 0 ']' + break + cat /tmp/tmp.gSRcDtHI26 secret/my-cluster-secrets patched + cat /tmp/tmp.zv8XUNHx4M + rm /tmp/tmp.gSRcDtHI26 /tmp/tmp.zv8XUNHx4M + return 0 + wait_for_password_propagation my-cluster-secrets monitor + local secret=my-cluster-secrets + local user=monitor + local max_retry=240 ++ getSecretData my-cluster-secrets root ++ local secretName=my-cluster-secrets ++ local dataKey=root ++ kubectl_bin get secrets/my-cluster-secrets '--template={{.data.root}}' ++ base64 --decode +++ mktemp ++ local LAST_OUT=/tmp/tmp.55EyUqsd3u +++ mktemp ++ local LAST_ERR=/tmp/tmp.Psn64qkf5i ++ local exit_status=0 +++ seq 0 2 ++ for i in '$(seq 0 2)' ++ set +e ++ kubectl get secrets/my-cluster-secrets '--template={{.data.root}}' ++ exit_status=0 ++ set -e ++ '[' 0 '!=' 0 ']' ++ break ++ cat /tmp/tmp.55EyUqsd3u ++ cat /tmp/tmp.Psn64qkf5i ++ rm /tmp/tmp.55EyUqsd3u /tmp/tmp.Psn64qkf5i ++ return 0 + local root_pass=test-password + [[ perconalab/percona-xtradb-cluster-operator:main-pxc5.7 =~ 5\.7 ]] + echo 'Skipping dual password feature doesn'\''t work for 5.7. PXC 5.7 doesn'\''t support it!' Skipping dual password feature doesn't work for 5.7. PXC 5.7 doesn't support it! + return + wait_cluster_consistency some-name 3 2 + local cluster_name=some-name + local cluster_size=3 + local proxy_size=2 + '[' -z 2 ']' + desc 'wait cluster consistency' + set +o xtrace ----------------------------------------------------------------------------------- wait cluster consistency ----------------------------------------------------------------------------------- + local i=0 + local max=36 + sleep 7 ++ kubectl_bin get pxc some-name -o 'jsonpath={.status.state}' +++ mktemp ++ local LAST_OUT=/tmp/tmp.iiStaaGz0Y +++ mktemp ++ local LAST_ERR=/tmp/tmp.O657xkB8uv ++ local exit_status=0 +++ seq 0 2 ++ for i in '$(seq 0 2)' ++ set +e ++ kubectl get pxc some-name -o 'jsonpath={.status.state}' ++ exit_status=0 ++ set -e ++ '[' 0 '!=' 0 ']' ++ break ++ cat /tmp/tmp.iiStaaGz0Y ++ cat /tmp/tmp.O657xkB8uv ++ rm /tmp/tmp.iiStaaGz0Y /tmp/tmp.O657xkB8uv ++ return 0 + [[ initializing == \r\e\a\d\y ]] + echo 'waiting for cluster readyness' waiting for cluster readyness + sleep 20 + [[ 0 -ge 36 ]] + let i+=1 ++ kubectl_bin get pxc some-name -o 'jsonpath={.status.state}' +++ mktemp ++ local LAST_OUT=/tmp/tmp.LWevqDH71b +++ mktemp ++ local LAST_ERR=/tmp/tmp.uNqWSOUpCk ++ local exit_status=0 +++ seq 0 2 ++ for i in '$(seq 0 2)' ++ set +e ++ kubectl get pxc some-name -o 'jsonpath={.status.state}' ++ exit_status=0 ++ set -e ++ '[' 0 '!=' 0 ']' ++ break ++ cat /tmp/tmp.LWevqDH71b ++ cat /tmp/tmp.uNqWSOUpCk ++ rm /tmp/tmp.LWevqDH71b /tmp/tmp.uNqWSOUpCk ++ return 0 + [[ initializing == \r\e\a\d\y ]] + echo 'waiting for cluster readyness' waiting for cluster readyness + sleep 20 + [[ 1 -ge 36 ]] + let i+=1 ++ kubectl_bin get pxc some-name -o 'jsonpath={.status.state}' +++ mktemp ++ local LAST_OUT=/tmp/tmp.iKyMklTw6F +++ mktemp ++ local LAST_ERR=/tmp/tmp.1sulF3T3yI ++ local exit_status=0 +++ seq 0 2 ++ for i in '$(seq 0 2)' ++ set +e ++ kubectl get pxc some-name -o 'jsonpath={.status.state}' ++ exit_status=0 ++ set -e ++ '[' 0 '!=' 0 ']' ++ break ++ cat /tmp/tmp.iKyMklTw6F ++ cat /tmp/tmp.1sulF3T3yI ++ rm /tmp/tmp.iKyMklTw6F /tmp/tmp.1sulF3T3yI ++ return 0 + [[ initializing == \r\e\a\d\y ]] + echo 'waiting for cluster readyness' waiting for cluster readyness + sleep 20 + [[ 2 -ge 36 ]] + let i+=1 ++ kubectl_bin get pxc some-name -o 'jsonpath={.status.state}' +++ mktemp ++ local LAST_OUT=/tmp/tmp.hdJIZCg6WG +++ mktemp ++ local LAST_ERR=/tmp/tmp.Ft2DDdKGcx ++ local exit_status=0 +++ seq 0 2 ++ for i in '$(seq 0 2)' ++ set +e ++ kubectl get pxc some-name -o 'jsonpath={.status.state}' ++ exit_status=0 ++ set -e ++ '[' 0 '!=' 0 ']' ++ break ++ cat /tmp/tmp.hdJIZCg6WG ++ cat /tmp/tmp.Ft2DDdKGcx ++ rm /tmp/tmp.hdJIZCg6WG /tmp/tmp.Ft2DDdKGcx ++ return 0 + [[ ready == \r\e\a\d\y ]] ++ kubectl_bin get pxc some-name -o 'jsonpath={.status.pxc.ready}' +++ mktemp ++ local LAST_OUT=/tmp/tmp.9qCqku6K5D +++ mktemp ++ local LAST_ERR=/tmp/tmp.tqEdSx9LzT ++ local exit_status=0 +++ seq 0 2 ++ for i in '$(seq 0 2)' ++ set +e ++ kubectl get pxc some-name -o 'jsonpath={.status.pxc.ready}' ++ exit_status=0 ++ set -e ++ '[' 0 '!=' 0 ']' ++ break ++ cat /tmp/tmp.9qCqku6K5D ++ cat /tmp/tmp.tqEdSx9LzT ++ rm /tmp/tmp.9qCqku6K5D /tmp/tmp.tqEdSx9LzT ++ return 0 + [[ 3 == \3 ]] +++ get_proxy_engine some-name +++ local cluster_name=some-name ++++ get_proxy some-name ++++ local target_cluster=some-name +++++ kubectl_bin get pxc some-name -o 'jsonpath={.spec.haproxy.enabled}' ++++++ mktemp +++++ local LAST_OUT=/tmp/tmp.zWVs7tMHP0 ++++++ mktemp +++++ local LAST_ERR=/tmp/tmp.6zgMC2QhGv +++++ local exit_status=0 ++++++ seq 0 2 +++++ for i in '$(seq 0 2)' +++++ set +e +++++ kubectl get pxc some-name -o 'jsonpath={.spec.haproxy.enabled}' +++++ exit_status=0 +++++ set -e +++++ '[' 0 '!=' 0 ']' +++++ break +++++ cat /tmp/tmp.zWVs7tMHP0 +++++ cat /tmp/tmp.6zgMC2QhGv +++++ rm /tmp/tmp.zWVs7tMHP0 /tmp/tmp.6zgMC2QhGv +++++ return 0 ++++ [[ '' == \t\r\u\e ]] +++++ kubectl_bin get pxc some-name -o 'jsonpath={.spec.proxysql.enabled}' ++++++ mktemp +++++ local LAST_OUT=/tmp/tmp.270grmyBHV ++++++ mktemp +++++ local LAST_ERR=/tmp/tmp.1KbFIZ43oF +++++ local exit_status=0 ++++++ seq 0 2 +++++ for i in '$(seq 0 2)' +++++ set +e +++++ kubectl get pxc some-name -o 'jsonpath={.spec.proxysql.enabled}' +++++ exit_status=0 +++++ set -e +++++ '[' 0 '!=' 0 ']' +++++ break +++++ cat /tmp/tmp.270grmyBHV +++++ cat /tmp/tmp.1KbFIZ43oF +++++ rm /tmp/tmp.270grmyBHV /tmp/tmp.1KbFIZ43oF +++++ return 0 ++++ [[ true == \t\r\u\e ]] ++++ echo some-name-proxysql ++++ return +++ local cluster_proxy=some-name-proxysql +++ echo proxysql ++ kubectl_bin get pxc some-name -o 'jsonpath={.status.proxysql.ready}' +++ mktemp ++ local LAST_OUT=/tmp/tmp.dlQRDCuTvT +++ mktemp ++ local LAST_ERR=/tmp/tmp.A0d9NpPK3Q ++ local exit_status=0 +++ seq 0 2 ++ for i in '$(seq 0 2)' ++ set +e ++ kubectl get pxc some-name -o 'jsonpath={.status.proxysql.ready}' ++ exit_status=0 ++ set -e ++ '[' 0 '!=' 0 ']' ++ break ++ cat /tmp/tmp.dlQRDCuTvT ++ cat /tmp/tmp.A0d9NpPK3Q ++ rm /tmp/tmp.dlQRDCuTvT /tmp/tmp.A0d9NpPK3Q ++ return 0 + [[ 2 == \2 ]] + sleep 10 + compare_mysql_cmd select-4 'SHOW TABLES;' '-h some-name-proxysql -umonitor -p'\''test-password'\''' + local command_id=select-4 + local 'command=SHOW TABLES;' + local 'uri=-h some-name-proxysql -umonitor -p'\''test-password'\''' + local postfix= + local expected_result=/mnt/jenkins/workspace/cloud-pxc-operator_PR-1708/e2e-tests/users/compare/select-4.sql + [[ perconalab/percona-xtradb-cluster-operator:main-pxc5.7 =~ 8\.0 ]] + run_mysql 'SHOW TABLES;' '-h some-name-proxysql -umonitor -p'\''test-password'\''' + local 'command=SHOW TABLES;' + local 'uri=-h some-name-proxysql -umonitor -p'\''test-password'\''' ++ get_client_pod ++ kubectl_bin get pods --selector=name=pxc-client -o 'jsonpath={.items[].metadata.name}' +++ mktemp ++ local LAST_OUT=/tmp/tmp.PMhpNT6ApM +++ mktemp ++ local LAST_ERR=/tmp/tmp.vHI4hqObxM ++ local exit_status=0 +++ seq 0 2 ++ for i in '$(seq 0 2)' ++ set +e ++ kubectl get pods --selector=name=pxc-client -o 'jsonpath={.items[].metadata.name}' ++ exit_status=0 ++ set -e ++ '[' 0 '!=' 0 ']' ++ break ++ cat /tmp/tmp.PMhpNT6ApM ++ cat /tmp/tmp.vHI4hqObxM ++ rm /tmp/tmp.PMhpNT6ApM /tmp/tmp.vHI4hqObxM ++ return 0 + client_pod=pxc-client-64b479df95-qhk5k + wait_pod pxc-client-64b479df95-qhk5k + local pod=pxc-client-64b479df95-qhk5k + local max_retry=480 + local ns= ++ echo pxc-client-64b479df95-qhk5k ++ /usr/bin/sed -E 's/.*-(pxc|proxysql)-[0-9]/\1/' ++ egrep '^(pxc|proxysql)$' + local container= + set +o xtrace pod/pxc-client-64b479df95-qhk5k condition met pxc-client-64b479df95-qhk5k.Ok + set +o xtrace + '[' '!' -s /tmp/tmp.MowO4SojyA/select-4.sql ']' + diff -u /mnt/jenkins/workspace/cloud-pxc-operator_PR-1708/e2e-tests/users/compare/select-4.sql /tmp/tmp.MowO4SojyA/select-4.sql + desc 'test operator' + set +o xtrace ----------------------------------------------------------------------------------- test operator ----------------------------------------------------------------------------------- + patch_secret my-cluster-secrets operator dGVzdC1wYXNzd29yZA== + local secret=my-cluster-secrets + local key=operator + local value=dGVzdC1wYXNzd29yZA== + kubectl_bin patch secret my-cluster-secrets '-p={"data":{"operator": "dGVzdC1wYXNzd29yZA=="}}' ++ mktemp + local LAST_OUT=/tmp/tmp.Q9xKXxNgmw ++ mktemp + local LAST_ERR=/tmp/tmp.vSSQsIdHDe + local exit_status=0 ++ seq 0 2 + for i in '$(seq 0 2)' + set +e + kubectl patch secret my-cluster-secrets '-p={"data":{"operator": "dGVzdC1wYXNzd29yZA=="}}' + exit_status=0 + set -e + '[' 0 '!=' 0 ']' + break + cat /tmp/tmp.Q9xKXxNgmw secret/my-cluster-secrets patched + cat /tmp/tmp.vSSQsIdHDe + rm /tmp/tmp.Q9xKXxNgmw /tmp/tmp.vSSQsIdHDe + return 0 + sleep 15 + wait_cluster_consistency some-name 3 2 + local cluster_name=some-name + local cluster_size=3 + local proxy_size=2 + '[' -z 2 ']' + desc 'wait cluster consistency' + set +o xtrace ----------------------------------------------------------------------------------- wait cluster consistency ----------------------------------------------------------------------------------- + local i=0 + local max=36 + sleep 7 ++ kubectl_bin get pxc some-name -o 'jsonpath={.status.state}' +++ mktemp ++ local LAST_OUT=/tmp/tmp.dCWCvG7Vta +++ mktemp ++ local LAST_ERR=/tmp/tmp.x8LtRYmrVx ++ local exit_status=0 +++ seq 0 2 ++ for i in '$(seq 0 2)' ++ set +e ++ kubectl get pxc some-name -o 'jsonpath={.status.state}' ++ exit_status=0 ++ set -e ++ '[' 0 '!=' 0 ']' ++ break ++ cat /tmp/tmp.dCWCvG7Vta ++ cat /tmp/tmp.x8LtRYmrVx ++ rm /tmp/tmp.dCWCvG7Vta /tmp/tmp.x8LtRYmrVx ++ return 0 + [[ ready == \r\e\a\d\y ]] ++ kubectl_bin get pxc some-name -o 'jsonpath={.status.pxc.ready}' +++ mktemp ++ local LAST_OUT=/tmp/tmp.KeBgUlU643 +++ mktemp ++ local LAST_ERR=/tmp/tmp.GH82gfpJYz ++ local exit_status=0 +++ seq 0 2 ++ for i in '$(seq 0 2)' ++ set +e ++ kubectl get pxc some-name -o 'jsonpath={.status.pxc.ready}' ++ exit_status=0 ++ set -e ++ '[' 0 '!=' 0 ']' ++ break ++ cat /tmp/tmp.KeBgUlU643 ++ cat /tmp/tmp.GH82gfpJYz ++ rm /tmp/tmp.KeBgUlU643 /tmp/tmp.GH82gfpJYz ++ return 0 + [[ 3 == \3 ]] +++ get_proxy_engine some-name +++ local cluster_name=some-name ++++ get_proxy some-name ++++ local target_cluster=some-name +++++ kubectl_bin get pxc some-name -o 'jsonpath={.spec.haproxy.enabled}' ++++++ mktemp +++++ local LAST_OUT=/tmp/tmp.XaFNF5iKm7 ++++++ mktemp +++++ local LAST_ERR=/tmp/tmp.bdmtwwbLT3 +++++ local exit_status=0 ++++++ seq 0 2 +++++ for i in '$(seq 0 2)' +++++ set +e +++++ kubectl get pxc some-name -o 'jsonpath={.spec.haproxy.enabled}' +++++ exit_status=0 +++++ set -e +++++ '[' 0 '!=' 0 ']' +++++ break +++++ cat /tmp/tmp.XaFNF5iKm7 +++++ cat /tmp/tmp.bdmtwwbLT3 +++++ rm /tmp/tmp.XaFNF5iKm7 /tmp/tmp.bdmtwwbLT3 +++++ return 0 ++++ [[ '' == \t\r\u\e ]] +++++ kubectl_bin get pxc some-name -o 'jsonpath={.spec.proxysql.enabled}' ++++++ mktemp +++++ local LAST_OUT=/tmp/tmp.E80u1pRlq5 ++++++ mktemp +++++ local LAST_ERR=/tmp/tmp.769QkGSrHT +++++ local exit_status=0 ++++++ seq 0 2 +++++ for i in '$(seq 0 2)' +++++ set +e +++++ kubectl get pxc some-name -o 'jsonpath={.spec.proxysql.enabled}' +++++ exit_status=0 +++++ set -e +++++ '[' 0 '!=' 0 ']' +++++ break +++++ cat /tmp/tmp.E80u1pRlq5 +++++ cat /tmp/tmp.769QkGSrHT +++++ rm /tmp/tmp.E80u1pRlq5 /tmp/tmp.769QkGSrHT +++++ return 0 ++++ [[ true == \t\r\u\e ]] ++++ echo some-name-proxysql ++++ return +++ local cluster_proxy=some-name-proxysql +++ echo proxysql ++ kubectl_bin get pxc some-name -o 'jsonpath={.status.proxysql.ready}' +++ mktemp ++ local LAST_OUT=/tmp/tmp.qbBqJvrbnc +++ mktemp ++ local LAST_ERR=/tmp/tmp.0jkKzMZ9Yj ++ local exit_status=0 +++ seq 0 2 ++ for i in '$(seq 0 2)' ++ set +e ++ kubectl get pxc some-name -o 'jsonpath={.status.proxysql.ready}' ++ exit_status=0 ++ set -e ++ '[' 0 '!=' 0 ']' ++ break ++ cat /tmp/tmp.qbBqJvrbnc ++ cat /tmp/tmp.0jkKzMZ9Yj ++ rm /tmp/tmp.qbBqJvrbnc /tmp/tmp.0jkKzMZ9Yj ++ return 0 + [[ 2 == \2 ]] + sleep 10 + compare_mysql_cmd select-4 'SHOW TABLES;' '-h some-name-proxysql -uoperator -p'\''test-password'\''' + local command_id=select-4 + local 'command=SHOW TABLES;' + local 'uri=-h some-name-proxysql -uoperator -p'\''test-password'\''' + local postfix= + local expected_result=/mnt/jenkins/workspace/cloud-pxc-operator_PR-1708/e2e-tests/users/compare/select-4.sql + [[ perconalab/percona-xtradb-cluster-operator:main-pxc5.7 =~ 8\.0 ]] + run_mysql 'SHOW TABLES;' '-h some-name-proxysql -uoperator -p'\''test-password'\''' + local 'command=SHOW TABLES;' + local 'uri=-h some-name-proxysql -uoperator -p'\''test-password'\''' ++ get_client_pod ++ kubectl_bin get pods --selector=name=pxc-client -o 'jsonpath={.items[].metadata.name}' +++ mktemp ++ local LAST_OUT=/tmp/tmp.UzPpKztQHO +++ mktemp ++ local LAST_ERR=/tmp/tmp.lKCZRuEjxz ++ local exit_status=0 +++ seq 0 2 ++ for i in '$(seq 0 2)' ++ set +e ++ kubectl get pods --selector=name=pxc-client -o 'jsonpath={.items[].metadata.name}' ++ exit_status=0 ++ set -e ++ '[' 0 '!=' 0 ']' ++ break ++ cat /tmp/tmp.UzPpKztQHO ++ cat /tmp/tmp.lKCZRuEjxz ++ rm /tmp/tmp.UzPpKztQHO /tmp/tmp.lKCZRuEjxz ++ return 0 + client_pod=pxc-client-64b479df95-qhk5k + wait_pod pxc-client-64b479df95-qhk5k + local pod=pxc-client-64b479df95-qhk5k + local max_retry=480 + local ns= ++ echo pxc-client-64b479df95-qhk5k ++ /usr/bin/sed -E 's/.*-(pxc|proxysql)-[0-9]/\1/' ++ egrep '^(pxc|proxysql)$' + local container= + set +o xtrace pod/pxc-client-64b479df95-qhk5k condition met pxc-client-64b479df95-qhk5k.Ok + set +o xtrace + '[' '!' -s /tmp/tmp.MowO4SojyA/select-4.sql ']' + diff -u /mnt/jenkins/workspace/cloud-pxc-operator_PR-1708/e2e-tests/users/compare/select-4.sql /tmp/tmp.MowO4SojyA/select-4.sql + desc 'change secret name' + set +o xtrace ----------------------------------------------------------------------------------- change secret name ----------------------------------------------------------------------------------- + kubectl_bin patch pxc some-name --type merge --patch '{"spec": {"secretsName":"my-cluster-secrets-2"}}' ++ mktemp + local LAST_OUT=/tmp/tmp.wLHsMUhma7 ++ mktemp + local LAST_ERR=/tmp/tmp.ANQJGRrzWY + local exit_status=0 ++ seq 0 2 + for i in '$(seq 0 2)' + set +e + kubectl patch pxc some-name --type merge --patch '{"spec": {"secretsName":"my-cluster-secrets-2"}}' + exit_status=0 + set -e + '[' 0 '!=' 0 ']' + break + cat /tmp/tmp.wLHsMUhma7 perconaxtradbcluster.pxc.percona.com/some-name patched + cat /tmp/tmp.ANQJGRrzWY + rm /tmp/tmp.wLHsMUhma7 /tmp/tmp.ANQJGRrzWY + return 0 + sleep 30 + wait_cluster_consistency some-name 3 2 + local cluster_name=some-name + local cluster_size=3 + local proxy_size=2 + '[' -z 2 ']' + desc 'wait cluster consistency' + set +o xtrace ----------------------------------------------------------------------------------- wait cluster consistency ----------------------------------------------------------------------------------- + local i=0 + local max=36 + sleep 7 ++ kubectl_bin get pxc some-name -o 'jsonpath={.status.state}' +++ mktemp ++ local LAST_OUT=/tmp/tmp.5RamLDsq4b +++ mktemp ++ local LAST_ERR=/tmp/tmp.C83wJb0Kip ++ local exit_status=0 +++ seq 0 2 ++ for i in '$(seq 0 2)' ++ set +e ++ kubectl get pxc some-name -o 'jsonpath={.status.state}' ++ exit_status=0 ++ set -e ++ '[' 0 '!=' 0 ']' ++ break ++ cat /tmp/tmp.5RamLDsq4b ++ cat /tmp/tmp.C83wJb0Kip ++ rm /tmp/tmp.5RamLDsq4b /tmp/tmp.C83wJb0Kip ++ return 0 + [[ initializing == \r\e\a\d\y ]] + echo 'waiting for cluster readyness' waiting for cluster readyness + sleep 20 + [[ 0 -ge 36 ]] + let i+=1 ++ kubectl_bin get pxc some-name -o 'jsonpath={.status.state}' +++ mktemp ++ local LAST_OUT=/tmp/tmp.Ll3AUH7vFf +++ mktemp ++ local LAST_ERR=/tmp/tmp.ZMJe6MQDHA ++ local exit_status=0 +++ seq 0 2 ++ for i in '$(seq 0 2)' ++ set +e ++ kubectl get pxc some-name -o 'jsonpath={.status.state}' ++ exit_status=0 ++ set -e ++ '[' 0 '!=' 0 ']' ++ break ++ cat /tmp/tmp.Ll3AUH7vFf ++ cat /tmp/tmp.ZMJe6MQDHA ++ rm /tmp/tmp.Ll3AUH7vFf /tmp/tmp.ZMJe6MQDHA ++ return 0 + [[ initializing == \r\e\a\d\y ]] + echo 'waiting for cluster readyness' waiting for cluster readyness + sleep 20 + [[ 1 -ge 36 ]] + let i+=1 ++ kubectl_bin get pxc some-name -o 'jsonpath={.status.state}' +++ mktemp ++ local LAST_OUT=/tmp/tmp.K9dINoCvrv +++ mktemp ++ local LAST_ERR=/tmp/tmp.EPNiqUWbyS ++ local exit_status=0 +++ seq 0 2 ++ for i in '$(seq 0 2)' ++ set +e ++ kubectl get pxc some-name -o 'jsonpath={.status.state}' ++ exit_status=0 ++ set -e ++ '[' 0 '!=' 0 ']' ++ break ++ cat /tmp/tmp.K9dINoCvrv ++ cat /tmp/tmp.EPNiqUWbyS ++ rm /tmp/tmp.K9dINoCvrv /tmp/tmp.EPNiqUWbyS ++ return 0 + [[ initializing == \r\e\a\d\y ]] + echo 'waiting for cluster readyness' waiting for cluster readyness + sleep 20 + [[ 2 -ge 36 ]] + let i+=1 ++ kubectl_bin get pxc some-name -o 'jsonpath={.status.state}' +++ mktemp ++ local LAST_OUT=/tmp/tmp.AulFx9N4tr +++ mktemp ++ local LAST_ERR=/tmp/tmp.2N1LrZjBjq ++ local exit_status=0 +++ seq 0 2 ++ for i in '$(seq 0 2)' ++ set +e ++ kubectl get pxc some-name -o 'jsonpath={.status.state}' ++ exit_status=0 ++ set -e ++ '[' 0 '!=' 0 ']' ++ break ++ cat /tmp/tmp.AulFx9N4tr ++ cat /tmp/tmp.2N1LrZjBjq ++ rm /tmp/tmp.AulFx9N4tr /tmp/tmp.2N1LrZjBjq ++ return 0 + [[ initializing == \r\e\a\d\y ]] + echo 'waiting for cluster readyness' waiting for cluster readyness + sleep 20 + [[ 3 -ge 36 ]] + let i+=1 ++ kubectl_bin get pxc some-name -o 'jsonpath={.status.state}' +++ mktemp ++ local LAST_OUT=/tmp/tmp.66tnDh4lrP +++ mktemp ++ local LAST_ERR=/tmp/tmp.1VVWhkle0A ++ local exit_status=0 +++ seq 0 2 ++ for i in '$(seq 0 2)' ++ set +e ++ kubectl get pxc some-name -o 'jsonpath={.status.state}' ++ exit_status=0 ++ set -e ++ '[' 0 '!=' 0 ']' ++ break ++ cat /tmp/tmp.66tnDh4lrP ++ cat /tmp/tmp.1VVWhkle0A ++ rm /tmp/tmp.66tnDh4lrP /tmp/tmp.1VVWhkle0A ++ return 0 + [[ initializing == \r\e\a\d\y ]] + echo 'waiting for cluster readyness' waiting for cluster readyness + sleep 20 + [[ 4 -ge 36 ]] + let i+=1 ++ kubectl_bin get pxc some-name -o 'jsonpath={.status.state}' +++ mktemp ++ local LAST_OUT=/tmp/tmp.LK5kV8R5fU +++ mktemp ++ local LAST_ERR=/tmp/tmp.viwZNxeCJE ++ local exit_status=0 +++ seq 0 2 ++ for i in '$(seq 0 2)' ++ set +e ++ kubectl get pxc some-name -o 'jsonpath={.status.state}' ++ exit_status=0 ++ set -e ++ '[' 0 '!=' 0 ']' ++ break ++ cat /tmp/tmp.LK5kV8R5fU ++ cat /tmp/tmp.viwZNxeCJE ++ rm /tmp/tmp.LK5kV8R5fU /tmp/tmp.viwZNxeCJE ++ return 0 + [[ initializing == \r\e\a\d\y ]] + echo 'waiting for cluster readyness' waiting for cluster readyness + sleep 20 + [[ 5 -ge 36 ]] + let i+=1 ++ kubectl_bin get pxc some-name -o 'jsonpath={.status.state}' +++ mktemp ++ local LAST_OUT=/tmp/tmp.U0zaw16NTH +++ mktemp ++ local LAST_ERR=/tmp/tmp.utJ3Kpc1k1 ++ local exit_status=0 +++ seq 0 2 ++ for i in '$(seq 0 2)' ++ set +e ++ kubectl get pxc some-name -o 'jsonpath={.status.state}' ++ exit_status=0 ++ set -e ++ '[' 0 '!=' 0 ']' ++ break ++ cat /tmp/tmp.U0zaw16NTH ++ cat /tmp/tmp.utJ3Kpc1k1 ++ rm /tmp/tmp.U0zaw16NTH /tmp/tmp.utJ3Kpc1k1 ++ return 0 + [[ ready == \r\e\a\d\y ]] ++ kubectl_bin get pxc some-name -o 'jsonpath={.status.pxc.ready}' +++ mktemp ++ local LAST_OUT=/tmp/tmp.7owpG1tN5r +++ mktemp ++ local LAST_ERR=/tmp/tmp.VmydxXb7ej ++ local exit_status=0 +++ seq 0 2 ++ for i in '$(seq 0 2)' ++ set +e ++ kubectl get pxc some-name -o 'jsonpath={.status.pxc.ready}' ++ exit_status=0 ++ set -e ++ '[' 0 '!=' 0 ']' ++ break ++ cat /tmp/tmp.7owpG1tN5r ++ cat /tmp/tmp.VmydxXb7ej ++ rm /tmp/tmp.7owpG1tN5r /tmp/tmp.VmydxXb7ej ++ return 0 + [[ 3 == \3 ]] +++ get_proxy_engine some-name +++ local cluster_name=some-name ++++ get_proxy some-name ++++ local target_cluster=some-name +++++ kubectl_bin get pxc some-name -o 'jsonpath={.spec.haproxy.enabled}' ++++++ mktemp +++++ local LAST_OUT=/tmp/tmp.7NBuOusH3W ++++++ mktemp +++++ local LAST_ERR=/tmp/tmp.v3yqhtZBVQ +++++ local exit_status=0 ++++++ seq 0 2 +++++ for i in '$(seq 0 2)' +++++ set +e +++++ kubectl get pxc some-name -o 'jsonpath={.spec.haproxy.enabled}' +++++ exit_status=0 +++++ set -e +++++ '[' 0 '!=' 0 ']' +++++ break +++++ cat /tmp/tmp.7NBuOusH3W +++++ cat /tmp/tmp.v3yqhtZBVQ +++++ rm /tmp/tmp.7NBuOusH3W /tmp/tmp.v3yqhtZBVQ +++++ return 0 ++++ [[ '' == \t\r\u\e ]] +++++ kubectl_bin get pxc some-name -o 'jsonpath={.spec.proxysql.enabled}' ++++++ mktemp +++++ local LAST_OUT=/tmp/tmp.1mCh0d3dip ++++++ mktemp +++++ local LAST_ERR=/tmp/tmp.w6uDrVqUMs +++++ local exit_status=0 ++++++ seq 0 2 +++++ for i in '$(seq 0 2)' +++++ set +e +++++ kubectl get pxc some-name -o 'jsonpath={.spec.proxysql.enabled}' +++++ exit_status=0 +++++ set -e +++++ '[' 0 '!=' 0 ']' +++++ break +++++ cat /tmp/tmp.1mCh0d3dip +++++ cat /tmp/tmp.w6uDrVqUMs +++++ rm /tmp/tmp.1mCh0d3dip /tmp/tmp.w6uDrVqUMs +++++ return 0 ++++ [[ true == \t\r\u\e ]] ++++ echo some-name-proxysql ++++ return +++ local cluster_proxy=some-name-proxysql +++ echo proxysql ++ kubectl_bin get pxc some-name -o 'jsonpath={.status.proxysql.ready}' +++ mktemp ++ local LAST_OUT=/tmp/tmp.kNRprvvEI0 +++ mktemp ++ local LAST_ERR=/tmp/tmp.2x4bfWKDKH ++ local exit_status=0 +++ seq 0 2 ++ for i in '$(seq 0 2)' ++ set +e ++ kubectl get pxc some-name -o 'jsonpath={.status.proxysql.ready}' ++ exit_status=0 ++ set -e ++ '[' 0 '!=' 0 ']' ++ break ++ cat /tmp/tmp.kNRprvvEI0 ++ cat /tmp/tmp.2x4bfWKDKH ++ rm /tmp/tmp.kNRprvvEI0 /tmp/tmp.2x4bfWKDKH ++ return 0 + [[ 2 == \2 ]] + desc 'test new operator' + set +o xtrace ----------------------------------------------------------------------------------- test new operator ----------------------------------------------------------------------------------- + newpass=test-password2 ++ echo -n test-password2 ++ base64 + newpassencrypted=dGVzdC1wYXNzd29yZDI= + patch_secret my-cluster-secrets-2 operator dGVzdC1wYXNzd29yZDI= + local secret=my-cluster-secrets-2 + local key=operator + local value=dGVzdC1wYXNzd29yZDI= + kubectl_bin patch secret my-cluster-secrets-2 '-p={"data":{"operator": "dGVzdC1wYXNzd29yZDI="}}' ++ mktemp + local LAST_OUT=/tmp/tmp.pLOt9IXSz4 ++ mktemp + local LAST_ERR=/tmp/tmp.qe3liBfUyF + local exit_status=0 ++ seq 0 2 + for i in '$(seq 0 2)' + set +e + kubectl patch secret my-cluster-secrets-2 '-p={"data":{"operator": "dGVzdC1wYXNzd29yZDI="}}' + exit_status=0 + set -e + '[' 0 '!=' 0 ']' + break + cat /tmp/tmp.pLOt9IXSz4 secret/my-cluster-secrets-2 patched + cat /tmp/tmp.qe3liBfUyF + rm /tmp/tmp.pLOt9IXSz4 /tmp/tmp.qe3liBfUyF + return 0 + sleep 15 + wait_cluster_consistency some-name 3 2 + local cluster_name=some-name + local cluster_size=3 + local proxy_size=2 + '[' -z 2 ']' + desc 'wait cluster consistency' + set +o xtrace ----------------------------------------------------------------------------------- wait cluster consistency ----------------------------------------------------------------------------------- + local i=0 + local max=36 + sleep 7 ++ kubectl_bin get pxc some-name -o 'jsonpath={.status.state}' +++ mktemp ++ local LAST_OUT=/tmp/tmp.BVpd2Bxz3X +++ mktemp ++ local LAST_ERR=/tmp/tmp.zMT4U0PWlj ++ local exit_status=0 +++ seq 0 2 ++ for i in '$(seq 0 2)' ++ set +e ++ kubectl get pxc some-name -o 'jsonpath={.status.state}' ++ exit_status=0 ++ set -e ++ '[' 0 '!=' 0 ']' ++ break ++ cat /tmp/tmp.BVpd2Bxz3X ++ cat /tmp/tmp.zMT4U0PWlj ++ rm /tmp/tmp.BVpd2Bxz3X /tmp/tmp.zMT4U0PWlj ++ return 0 + [[ initializing == \r\e\a\d\y ]] + echo 'waiting for cluster readyness' waiting for cluster readyness + sleep 20 + [[ 0 -ge 36 ]] + let i+=1 ++ kubectl_bin get pxc some-name -o 'jsonpath={.status.state}' +++ mktemp ++ local LAST_OUT=/tmp/tmp.SzCrHh6Jvl +++ mktemp ++ local LAST_ERR=/tmp/tmp.HBxfpyawuO ++ local exit_status=0 +++ seq 0 2 ++ for i in '$(seq 0 2)' ++ set +e ++ kubectl get pxc some-name -o 'jsonpath={.status.state}' ++ exit_status=0 ++ set -e ++ '[' 0 '!=' 0 ']' ++ break ++ cat /tmp/tmp.SzCrHh6Jvl ++ cat /tmp/tmp.HBxfpyawuO ++ rm /tmp/tmp.SzCrHh6Jvl /tmp/tmp.HBxfpyawuO ++ return 0 + [[ ready == \r\e\a\d\y ]] ++ kubectl_bin get pxc some-name -o 'jsonpath={.status.pxc.ready}' +++ mktemp ++ local LAST_OUT=/tmp/tmp.xg0vbObqKu +++ mktemp ++ local LAST_ERR=/tmp/tmp.2f9Ephf5Qr ++ local exit_status=0 +++ seq 0 2 ++ for i in '$(seq 0 2)' ++ set +e ++ kubectl get pxc some-name -o 'jsonpath={.status.pxc.ready}' ++ exit_status=0 ++ set -e ++ '[' 0 '!=' 0 ']' ++ break ++ cat /tmp/tmp.xg0vbObqKu ++ cat /tmp/tmp.2f9Ephf5Qr ++ rm /tmp/tmp.xg0vbObqKu /tmp/tmp.2f9Ephf5Qr ++ return 0 + [[ 3 == \3 ]] +++ get_proxy_engine some-name +++ local cluster_name=some-name ++++ get_proxy some-name ++++ local target_cluster=some-name +++++ kubectl_bin get pxc some-name -o 'jsonpath={.spec.haproxy.enabled}' ++++++ mktemp +++++ local LAST_OUT=/tmp/tmp.aKNALfMBIk ++++++ mktemp +++++ local LAST_ERR=/tmp/tmp.dv03t04POt +++++ local exit_status=0 ++++++ seq 0 2 +++++ for i in '$(seq 0 2)' +++++ set +e +++++ kubectl get pxc some-name -o 'jsonpath={.spec.haproxy.enabled}' +++++ exit_status=0 +++++ set -e +++++ '[' 0 '!=' 0 ']' +++++ break +++++ cat /tmp/tmp.aKNALfMBIk +++++ cat /tmp/tmp.dv03t04POt +++++ rm /tmp/tmp.aKNALfMBIk /tmp/tmp.dv03t04POt +++++ return 0 ++++ [[ '' == \t\r\u\e ]] +++++ kubectl_bin get pxc some-name -o 'jsonpath={.spec.proxysql.enabled}' ++++++ mktemp +++++ local LAST_OUT=/tmp/tmp.XcbLwaSeYS ++++++ mktemp +++++ local LAST_ERR=/tmp/tmp.BNSvEhrrKy +++++ local exit_status=0 ++++++ seq 0 2 +++++ for i in '$(seq 0 2)' +++++ set +e +++++ kubectl get pxc some-name -o 'jsonpath={.spec.proxysql.enabled}' +++++ exit_status=0 +++++ set -e +++++ '[' 0 '!=' 0 ']' +++++ break +++++ cat /tmp/tmp.XcbLwaSeYS +++++ cat /tmp/tmp.BNSvEhrrKy +++++ rm /tmp/tmp.XcbLwaSeYS /tmp/tmp.BNSvEhrrKy +++++ return 0 ++++ [[ true == \t\r\u\e ]] ++++ echo some-name-proxysql ++++ return +++ local cluster_proxy=some-name-proxysql +++ echo proxysql ++ kubectl_bin get pxc some-name -o 'jsonpath={.status.proxysql.ready}' +++ mktemp ++ local LAST_OUT=/tmp/tmp.wKzlqwUYZm +++ mktemp ++ local LAST_ERR=/tmp/tmp.mtcAGPUf20 ++ local exit_status=0 +++ seq 0 2 ++ for i in '$(seq 0 2)' ++ set +e ++ kubectl get pxc some-name -o 'jsonpath={.status.proxysql.ready}' ++ exit_status=0 ++ set -e ++ '[' 0 '!=' 0 ']' ++ break ++ cat /tmp/tmp.wKzlqwUYZm ++ cat /tmp/tmp.mtcAGPUf20 ++ rm /tmp/tmp.wKzlqwUYZm /tmp/tmp.mtcAGPUf20 ++ return 0 + [[ 2 == \2 ]] + sleep 20 + compare_mysql_cmd select-4 'SHOW TABLES;' '-h some-name-proxysql -uoperator -p'\''test-password2'\''' + local command_id=select-4 + local 'command=SHOW TABLES;' + local 'uri=-h some-name-proxysql -uoperator -p'\''test-password2'\''' + local postfix= + local expected_result=/mnt/jenkins/workspace/cloud-pxc-operator_PR-1708/e2e-tests/users/compare/select-4.sql + [[ perconalab/percona-xtradb-cluster-operator:main-pxc5.7 =~ 8\.0 ]] + run_mysql 'SHOW TABLES;' '-h some-name-proxysql -uoperator -p'\''test-password2'\''' + local 'command=SHOW TABLES;' + local 'uri=-h some-name-proxysql -uoperator -p'\''test-password2'\''' ++ get_client_pod ++ kubectl_bin get pods --selector=name=pxc-client -o 'jsonpath={.items[].metadata.name}' +++ mktemp ++ local LAST_OUT=/tmp/tmp.P8smdJMfam +++ mktemp ++ local LAST_ERR=/tmp/tmp.ow7YVfSOhI ++ local exit_status=0 +++ seq 0 2 ++ for i in '$(seq 0 2)' ++ set +e ++ kubectl get pods --selector=name=pxc-client -o 'jsonpath={.items[].metadata.name}' ++ exit_status=0 ++ set -e ++ '[' 0 '!=' 0 ']' ++ break ++ cat /tmp/tmp.P8smdJMfam ++ cat /tmp/tmp.ow7YVfSOhI ++ rm /tmp/tmp.P8smdJMfam /tmp/tmp.ow7YVfSOhI ++ return 0 + client_pod=pxc-client-64b479df95-qhk5k + wait_pod pxc-client-64b479df95-qhk5k + local pod=pxc-client-64b479df95-qhk5k + local max_retry=480 + local ns= ++ echo pxc-client-64b479df95-qhk5k ++ /usr/bin/sed -E 's/.*-(pxc|proxysql)-[0-9]/\1/' ++ egrep '^(pxc|proxysql)$' + local container= + set +o xtrace pod/pxc-client-64b479df95-qhk5k condition met pxc-client-64b479df95-qhk5k.Ok + set +o xtrace + '[' '!' -s /tmp/tmp.MowO4SojyA/select-4.sql ']' + diff -u /mnt/jenkins/workspace/cloud-pxc-operator_PR-1708/e2e-tests/users/compare/select-4.sql /tmp/tmp.MowO4SojyA/select-4.sql ++ getSecretData my-cluster-secrets-2 root ++ local secretName=my-cluster-secrets-2 ++ local dataKey=root ++ kubectl_bin get secrets/my-cluster-secrets-2 '--template={{.data.root}}' ++ base64 --decode +++ mktemp ++ local LAST_OUT=/tmp/tmp.CgYShOYReg +++ mktemp ++ local LAST_ERR=/tmp/tmp.QZDH2GEP2i ++ local exit_status=0 +++ seq 0 2 ++ for i in '$(seq 0 2)' ++ set +e ++ kubectl get secrets/my-cluster-secrets-2 '--template={{.data.root}}' ++ exit_status=0 ++ set -e ++ '[' 0 '!=' 0 ']' ++ break ++ cat /tmp/tmp.CgYShOYReg ++ cat /tmp/tmp.QZDH2GEP2i ++ rm /tmp/tmp.CgYShOYReg /tmp/tmp.QZDH2GEP2i ++ return 0 + newpass='j{2FpAZ4,_(~s6MM_H' + desc 'test new users sync' + set +o xtrace ----------------------------------------------------------------------------------- test new users sync ----------------------------------------------------------------------------------- + run_mysql 'CREATE USER '\''testsync'\''@'\''%'\'' IDENTIFIED BY '\''j{2FpAZ4,_(~s6MM_H'\'';' '-h some-name-pxc -uroot -p'\''j{2FpAZ4,_(~s6MM_H'\''' + local 'command=CREATE USER '\''testsync'\''@'\''%'\'' IDENTIFIED BY '\''j{2FpAZ4,_(~s6MM_H'\'';' + local 'uri=-h some-name-pxc -uroot -p'\''j{2FpAZ4,_(~s6MM_H'\''' ++ get_client_pod ++ kubectl_bin get pods --selector=name=pxc-client -o 'jsonpath={.items[].metadata.name}' +++ mktemp ++ local LAST_OUT=/tmp/tmp.hgBolNXl9n +++ mktemp ++ local LAST_ERR=/tmp/tmp.s1nhRCGJCL ++ local exit_status=0 +++ seq 0 2 ++ for i in '$(seq 0 2)' ++ set +e ++ kubectl get pods --selector=name=pxc-client -o 'jsonpath={.items[].metadata.name}' ++ exit_status=0 ++ set -e ++ '[' 0 '!=' 0 ']' ++ break ++ cat /tmp/tmp.hgBolNXl9n ++ cat /tmp/tmp.s1nhRCGJCL ++ rm /tmp/tmp.hgBolNXl9n /tmp/tmp.s1nhRCGJCL ++ return 0 + client_pod=pxc-client-64b479df95-qhk5k + wait_pod pxc-client-64b479df95-qhk5k + local pod=pxc-client-64b479df95-qhk5k + local max_retry=480 + local ns= ++ echo pxc-client-64b479df95-qhk5k ++ /usr/bin/sed -E 's/.*-(pxc|proxysql)-[0-9]/\1/' ++ egrep '^(pxc|proxysql)$' + local container= + set +o xtrace pod/pxc-client-64b479df95-qhk5k condition met pxc-client-64b479df95-qhk5k.Ok + set +o xtrace + sleep 40 + compare_mysql_cmd select-4 'SHOW TABLES;' '-h some-name-proxysql -utestsync -p'\''j{2FpAZ4,_(~s6MM_H'\''' + local command_id=select-4 + local 'command=SHOW TABLES;' + local 'uri=-h some-name-proxysql -utestsync -p'\''j{2FpAZ4,_(~s6MM_H'\''' + local postfix= + local expected_result=/mnt/jenkins/workspace/cloud-pxc-operator_PR-1708/e2e-tests/users/compare/select-4.sql + [[ perconalab/percona-xtradb-cluster-operator:main-pxc5.7 =~ 8\.0 ]] + run_mysql 'SHOW TABLES;' '-h some-name-proxysql -utestsync -p'\''j{2FpAZ4,_(~s6MM_H'\''' + local 'command=SHOW TABLES;' + local 'uri=-h some-name-proxysql -utestsync -p'\''j{2FpAZ4,_(~s6MM_H'\''' ++ get_client_pod ++ kubectl_bin get pods --selector=name=pxc-client -o 'jsonpath={.items[].metadata.name}' +++ mktemp ++ local LAST_OUT=/tmp/tmp.P9DzCxDkMH +++ mktemp ++ local LAST_ERR=/tmp/tmp.24VkoCVdxZ ++ local exit_status=0 +++ seq 0 2 ++ for i in '$(seq 0 2)' ++ set +e ++ kubectl get pods --selector=name=pxc-client -o 'jsonpath={.items[].metadata.name}' ++ exit_status=0 ++ set -e ++ '[' 0 '!=' 0 ']' ++ break ++ cat /tmp/tmp.P9DzCxDkMH ++ cat /tmp/tmp.24VkoCVdxZ ++ rm /tmp/tmp.P9DzCxDkMH /tmp/tmp.24VkoCVdxZ ++ return 0 + client_pod=pxc-client-64b479df95-qhk5k + wait_pod pxc-client-64b479df95-qhk5k + local pod=pxc-client-64b479df95-qhk5k + local max_retry=480 + local ns= ++ echo pxc-client-64b479df95-qhk5k ++ /usr/bin/sed -E 's/.*-(pxc|proxysql)-[0-9]/\1/' ++ egrep '^(pxc|proxysql)$' + local container= + set +o xtrace pod/pxc-client-64b479df95-qhk5k condition met pxc-client-64b479df95-qhk5k.Ok + set +o xtrace + '[' '!' -s /tmp/tmp.MowO4SojyA/select-4.sql ']' + diff -u /mnt/jenkins/workspace/cloud-pxc-operator_PR-1708/e2e-tests/users/compare/select-4.sql /tmp/tmp.MowO4SojyA/select-4.sql ++ getSecretData internal-some-name operator ++ local secretName=internal-some-name ++ local dataKey=operator ++ kubectl_bin get secrets/internal-some-name '--template={{.data.operator}}' ++ base64 --decode +++ mktemp ++ local LAST_OUT=/tmp/tmp.6IMYhFH9UI +++ mktemp ++ local LAST_ERR=/tmp/tmp.v4GePCzzEN ++ local exit_status=0 +++ seq 0 2 ++ for i in '$(seq 0 2)' ++ set +e ++ kubectl get secrets/internal-some-name '--template={{.data.operator}}' ++ exit_status=0 ++ set -e ++ '[' 0 '!=' 0 ']' ++ break ++ cat /tmp/tmp.6IMYhFH9UI ++ cat /tmp/tmp.v4GePCzzEN ++ rm /tmp/tmp.6IMYhFH9UI /tmp/tmp.v4GePCzzEN ++ return 0 + pass=test-password2 + desc 'check secret without operator' + set +o xtrace ----------------------------------------------------------------------------------- check secret without operator ----------------------------------------------------------------------------------- + kubectl_bin apply -f /mnt/jenkins/workspace/cloud-pxc-operator_PR-1708/e2e-tests/users/conf/secrets.yml ++ mktemp + local LAST_OUT=/tmp/tmp.8iuV0Zgxm8 ++ mktemp + local LAST_ERR=/tmp/tmp.lTBDiyA8i8 + local exit_status=0 ++ seq 0 2 + for i in '$(seq 0 2)' + set +e + kubectl apply -f /mnt/jenkins/workspace/cloud-pxc-operator_PR-1708/e2e-tests/users/conf/secrets.yml + exit_status=0 + set -e + '[' 0 '!=' 0 ']' + break + cat /tmp/tmp.8iuV0Zgxm8 secret/my-cluster-secrets-2 configured + cat /tmp/tmp.lTBDiyA8i8 Warning: resource secrets/my-cluster-secrets-2 is missing the kubectl.kubernetes.io/last-applied-configuration annotation which is required by kubectl apply. kubectl apply should only be used on resources created declaratively by either kubectl create --save-config or kubectl apply. The missing annotation will be patched automatically. + rm /tmp/tmp.8iuV0Zgxm8 /tmp/tmp.lTBDiyA8i8 + return 0 + sleep 15 + compare_mysql_cmd select-4 'SHOW TABLES;' '-h some-name-proxysql -uoperator -p'\''test-password2'\''' + local command_id=select-4 + local 'command=SHOW TABLES;' + local 'uri=-h some-name-proxysql -uoperator -p'\''test-password2'\''' + local postfix= + local expected_result=/mnt/jenkins/workspace/cloud-pxc-operator_PR-1708/e2e-tests/users/compare/select-4.sql + [[ perconalab/percona-xtradb-cluster-operator:main-pxc5.7 =~ 8\.0 ]] + run_mysql 'SHOW TABLES;' '-h some-name-proxysql -uoperator -p'\''test-password2'\''' + local 'command=SHOW TABLES;' + local 'uri=-h some-name-proxysql -uoperator -p'\''test-password2'\''' ++ get_client_pod ++ kubectl_bin get pods --selector=name=pxc-client -o 'jsonpath={.items[].metadata.name}' +++ mktemp ++ local LAST_OUT=/tmp/tmp.wYgFSV6gbo +++ mktemp ++ local LAST_ERR=/tmp/tmp.wSWwOfvh9g ++ local exit_status=0 +++ seq 0 2 ++ for i in '$(seq 0 2)' ++ set +e ++ kubectl get pods --selector=name=pxc-client -o 'jsonpath={.items[].metadata.name}' ++ exit_status=0 ++ set -e ++ '[' 0 '!=' 0 ']' ++ break ++ cat /tmp/tmp.wYgFSV6gbo ++ cat /tmp/tmp.wSWwOfvh9g ++ rm /tmp/tmp.wYgFSV6gbo /tmp/tmp.wSWwOfvh9g ++ return 0 + client_pod=pxc-client-64b479df95-qhk5k + wait_pod pxc-client-64b479df95-qhk5k + local pod=pxc-client-64b479df95-qhk5k + local max_retry=480 + local ns= ++ echo pxc-client-64b479df95-qhk5k ++ /usr/bin/sed -E 's/.*-(pxc|proxysql)-[0-9]/\1/' ++ egrep '^(pxc|proxysql)$' + local container= + set +o xtrace pod/pxc-client-64b479df95-qhk5k condition met pxc-client-64b479df95-qhk5k.Ok + set +o xtrace + '[' '!' -s /tmp/tmp.MowO4SojyA/select-4.sql ']' + diff -u /mnt/jenkins/workspace/cloud-pxc-operator_PR-1708/e2e-tests/users/compare/select-4.sql /tmp/tmp.MowO4SojyA/select-4.sql + newpass=test-password2 ++ echo -n test-password2 ++ base64 + newpassencrypted=dGVzdC1wYXNzd29yZDI= + apply_config /mnt/jenkins/workspace/cloud-pxc-operator_PR-1708/e2e-tests/users/conf/some-name.yml + '[' -z '' ']' + cat_config /mnt/jenkins/workspace/cloud-pxc-operator_PR-1708/e2e-tests/users/conf/some-name.yml + kubectl_bin apply -f - + cat /mnt/jenkins/workspace/cloud-pxc-operator_PR-1708/e2e-tests/users/conf/some-name.yml ++ mktemp + /usr/bin/sed -e 's#image:.*-pxc\([0-9]*.[0-9]*\)\{0,1\}$#image: perconalab/percona-xtradb-cluster-operator:main-pxc5.7#' + /usr/bin/sed -e 's#image:.*-init$#image: perconalab/percona-xtradb-cluster-operator:PR-1708-7a028e99#' + local LAST_OUT=/tmp/tmp.eHSH0ZccAa + /usr/bin/sed -e 's#image:.*-pmm$#image: perconalab/pmm-client:dev-latest#' + /usr/bin/sed -e 's#image:.*-backup$#image: perconalab/percona-xtradb-cluster-operator:main-pxc5.7-backup#' + /usr/bin/sed -e 's#image:.*-haproxy$#image: perconalab/percona-xtradb-cluster-operator:main-haproxy#' + /usr/bin/sed -e 's#image:.*-logcollector$#image: perconalab/percona-xtradb-cluster-operator:main-logcollector#' + /usr/bin/sed -e s~minio-service.#namespace~minio-service.users-19811~ + /usr/bin/sed -e 's#image:.*-proxysql$#image: perconalab/percona-xtradb-cluster-operator:main-proxysql#' + /usr/bin/sed -e 's#apply:.*#apply: Never#' + /usr/bin/sed -e 's#apiVersion: pxc.percona.com/v.*$#apiVersion: pxc.percona.com/v1#' ++ mktemp + local LAST_ERR=/tmp/tmp.vXhaOoSQxR + local exit_status=0 + /usr/bin/sed -e 's#image:.*\/percona-xtradb-cluster:.*$#image: perconalab/percona-xtradb-cluster-operator:main-pxc5.7#' ++ seq 0 2 + for i in '$(seq 0 2)' + set +e + kubectl apply -f - + exit_status=0 + set -e + '[' 0 '!=' 0 ']' + break + cat /tmp/tmp.eHSH0ZccAa perconaxtradbcluster.pxc.percona.com/some-name configured + cat /tmp/tmp.vXhaOoSQxR + rm /tmp/tmp.eHSH0ZccAa /tmp/tmp.vXhaOoSQxR + return 0 + sleep 15 + wait_cluster_consistency some-name 3 3 + local cluster_name=some-name + local cluster_size=3 + local proxy_size=3 + '[' -z 3 ']' + desc 'wait cluster consistency' + set +o xtrace ----------------------------------------------------------------------------------- wait cluster consistency ----------------------------------------------------------------------------------- + local i=0 + local max=36 + sleep 7 ++ kubectl_bin get pxc some-name -o 'jsonpath={.status.state}' +++ mktemp ++ local LAST_OUT=/tmp/tmp.XT62lpJygu +++ mktemp ++ local LAST_ERR=/tmp/tmp.Vo0MmycBYD ++ local exit_status=0 +++ seq 0 2 ++ for i in '$(seq 0 2)' ++ set +e ++ kubectl get pxc some-name -o 'jsonpath={.status.state}' ++ exit_status=0 ++ set -e ++ '[' 0 '!=' 0 ']' ++ break ++ cat /tmp/tmp.XT62lpJygu ++ cat /tmp/tmp.Vo0MmycBYD ++ rm /tmp/tmp.XT62lpJygu /tmp/tmp.Vo0MmycBYD ++ return 0 + [[ initializing == \r\e\a\d\y ]] + echo 'waiting for cluster readyness' waiting for cluster readyness + sleep 20 + [[ 0 -ge 36 ]] + let i+=1 ++ kubectl_bin get pxc some-name -o 'jsonpath={.status.state}' +++ mktemp ++ local LAST_OUT=/tmp/tmp.CExIraCj3z +++ mktemp ++ local LAST_ERR=/tmp/tmp.sh8LceoPMi ++ local exit_status=0 +++ seq 0 2 ++ for i in '$(seq 0 2)' ++ set +e ++ kubectl get pxc some-name -o 'jsonpath={.status.state}' ++ exit_status=0 ++ set -e ++ '[' 0 '!=' 0 ']' ++ break ++ cat /tmp/tmp.CExIraCj3z ++ cat /tmp/tmp.sh8LceoPMi ++ rm /tmp/tmp.CExIraCj3z /tmp/tmp.sh8LceoPMi ++ return 0 + [[ initializing == \r\e\a\d\y ]] + echo 'waiting for cluster readyness' waiting for cluster readyness + sleep 20 + [[ 1 -ge 36 ]] + let i+=1 ++ kubectl_bin get pxc some-name -o 'jsonpath={.status.state}' +++ mktemp ++ local LAST_OUT=/tmp/tmp.oNHDtzEh0D +++ mktemp ++ local LAST_ERR=/tmp/tmp.yt5WK6fdhd ++ local exit_status=0 +++ seq 0 2 ++ for i in '$(seq 0 2)' ++ set +e ++ kubectl get pxc some-name -o 'jsonpath={.status.state}' ++ exit_status=0 ++ set -e ++ '[' 0 '!=' 0 ']' ++ break ++ cat /tmp/tmp.oNHDtzEh0D ++ cat /tmp/tmp.yt5WK6fdhd ++ rm /tmp/tmp.oNHDtzEh0D /tmp/tmp.yt5WK6fdhd ++ return 0 + [[ initializing == \r\e\a\d\y ]] + echo 'waiting for cluster readyness' waiting for cluster readyness + sleep 20 + [[ 2 -ge 36 ]] + let i+=1 ++ kubectl_bin get pxc some-name -o 'jsonpath={.status.state}' +++ mktemp ++ local LAST_OUT=/tmp/tmp.CTSsBi2Qyp +++ mktemp ++ local LAST_ERR=/tmp/tmp.G0nAMSPMOl ++ local exit_status=0 +++ seq 0 2 ++ for i in '$(seq 0 2)' ++ set +e ++ kubectl get pxc some-name -o 'jsonpath={.status.state}' ++ exit_status=0 ++ set -e ++ '[' 0 '!=' 0 ']' ++ break ++ cat /tmp/tmp.CTSsBi2Qyp ++ cat /tmp/tmp.G0nAMSPMOl ++ rm /tmp/tmp.CTSsBi2Qyp /tmp/tmp.G0nAMSPMOl ++ return 0 + [[ initializing == \r\e\a\d\y ]] + echo 'waiting for cluster readyness' waiting for cluster readyness + sleep 20 + [[ 3 -ge 36 ]] + let i+=1 ++ kubectl_bin get pxc some-name -o 'jsonpath={.status.state}' +++ mktemp ++ local LAST_OUT=/tmp/tmp.GTAfBsPCXT +++ mktemp ++ local LAST_ERR=/tmp/tmp.134sI7cMRR ++ local exit_status=0 +++ seq 0 2 ++ for i in '$(seq 0 2)' ++ set +e ++ kubectl get pxc some-name -o 'jsonpath={.status.state}' ++ exit_status=0 ++ set -e ++ '[' 0 '!=' 0 ']' ++ break ++ cat /tmp/tmp.GTAfBsPCXT ++ cat /tmp/tmp.134sI7cMRR ++ rm /tmp/tmp.GTAfBsPCXT /tmp/tmp.134sI7cMRR ++ return 0 + [[ initializing == \r\e\a\d\y ]] + echo 'waiting for cluster readyness' waiting for cluster readyness + sleep 20 + [[ 4 -ge 36 ]] + let i+=1 ++ kubectl_bin get pxc some-name -o 'jsonpath={.status.state}' +++ mktemp ++ local LAST_OUT=/tmp/tmp.QDFiLFggbk +++ mktemp ++ local LAST_ERR=/tmp/tmp.g0xvbZ5Qfz ++ local exit_status=0 +++ seq 0 2 ++ for i in '$(seq 0 2)' ++ set +e ++ kubectl get pxc some-name -o 'jsonpath={.status.state}' ++ exit_status=0 ++ set -e ++ '[' 0 '!=' 0 ']' ++ break ++ cat /tmp/tmp.QDFiLFggbk ++ cat /tmp/tmp.g0xvbZ5Qfz ++ rm /tmp/tmp.QDFiLFggbk /tmp/tmp.g0xvbZ5Qfz ++ return 0 + [[ initializing == \r\e\a\d\y ]] + echo 'waiting for cluster readyness' waiting for cluster readyness + sleep 20 + [[ 5 -ge 36 ]] + let i+=1 ++ kubectl_bin get pxc some-name -o 'jsonpath={.status.state}' +++ mktemp ++ local LAST_OUT=/tmp/tmp.4389fSr6eb +++ mktemp ++ local LAST_ERR=/tmp/tmp.DtgI8FfHKH ++ local exit_status=0 +++ seq 0 2 ++ for i in '$(seq 0 2)' ++ set +e ++ kubectl get pxc some-name -o 'jsonpath={.status.state}' ++ exit_status=0 ++ set -e ++ '[' 0 '!=' 0 ']' ++ break ++ cat /tmp/tmp.4389fSr6eb ++ cat /tmp/tmp.DtgI8FfHKH ++ rm /tmp/tmp.4389fSr6eb /tmp/tmp.DtgI8FfHKH ++ return 0 + [[ initializing == \r\e\a\d\y ]] + echo 'waiting for cluster readyness' waiting for cluster readyness + sleep 20 + [[ 6 -ge 36 ]] + let i+=1 ++ kubectl_bin get pxc some-name -o 'jsonpath={.status.state}' +++ mktemp ++ local LAST_OUT=/tmp/tmp.Wi6WL3UNZq +++ mktemp ++ local LAST_ERR=/tmp/tmp.QnUBuHOzps ++ local exit_status=0 +++ seq 0 2 ++ for i in '$(seq 0 2)' ++ set +e ++ kubectl get pxc some-name -o 'jsonpath={.status.state}' ++ exit_status=0 ++ set -e ++ '[' 0 '!=' 0 ']' ++ break ++ cat /tmp/tmp.Wi6WL3UNZq ++ cat /tmp/tmp.QnUBuHOzps ++ rm /tmp/tmp.Wi6WL3UNZq /tmp/tmp.QnUBuHOzps ++ return 0 + [[ initializing == \r\e\a\d\y ]] + echo 'waiting for cluster readyness' waiting for cluster readyness + sleep 20 + [[ 7 -ge 36 ]] + let i+=1 ++ kubectl_bin get pxc some-name -o 'jsonpath={.status.state}' +++ mktemp ++ local LAST_OUT=/tmp/tmp.i6CTWvzFZA +++ mktemp ++ local LAST_ERR=/tmp/tmp.aOidpeHCcw ++ local exit_status=0 +++ seq 0 2 ++ for i in '$(seq 0 2)' ++ set +e ++ kubectl get pxc some-name -o 'jsonpath={.status.state}' ++ exit_status=0 ++ set -e ++ '[' 0 '!=' 0 ']' ++ break ++ cat /tmp/tmp.i6CTWvzFZA ++ cat /tmp/tmp.aOidpeHCcw ++ rm /tmp/tmp.i6CTWvzFZA /tmp/tmp.aOidpeHCcw ++ return 0 + [[ initializing == \r\e\a\d\y ]] + echo 'waiting for cluster readyness' waiting for cluster readyness + sleep 20 + [[ 8 -ge 36 ]] + let i+=1 ++ kubectl_bin get pxc some-name -o 'jsonpath={.status.state}' +++ mktemp ++ local LAST_OUT=/tmp/tmp.UdiVTbxqAL +++ mktemp ++ local LAST_ERR=/tmp/tmp.XseDbiLnq2 ++ local exit_status=0 +++ seq 0 2 ++ for i in '$(seq 0 2)' ++ set +e ++ kubectl get pxc some-name -o 'jsonpath={.status.state}' ++ exit_status=0 ++ set -e ++ '[' 0 '!=' 0 ']' ++ break ++ cat /tmp/tmp.UdiVTbxqAL ++ cat /tmp/tmp.XseDbiLnq2 ++ rm /tmp/tmp.UdiVTbxqAL /tmp/tmp.XseDbiLnq2 ++ return 0 + [[ initializing == \r\e\a\d\y ]] + echo 'waiting for cluster readyness' waiting for cluster readyness + sleep 20 + [[ 9 -ge 36 ]] + let i+=1 ++ kubectl_bin get pxc some-name -o 'jsonpath={.status.state}' +++ mktemp ++ local LAST_OUT=/tmp/tmp.kRwYZ2INOw +++ mktemp ++ local LAST_ERR=/tmp/tmp.iNLpMTTAto ++ local exit_status=0 +++ seq 0 2 ++ for i in '$(seq 0 2)' ++ set +e ++ kubectl get pxc some-name -o 'jsonpath={.status.state}' ++ exit_status=0 ++ set -e ++ '[' 0 '!=' 0 ']' ++ break ++ cat /tmp/tmp.kRwYZ2INOw ++ cat /tmp/tmp.iNLpMTTAto ++ rm /tmp/tmp.kRwYZ2INOw /tmp/tmp.iNLpMTTAto ++ return 0 + [[ initializing == \r\e\a\d\y ]] + echo 'waiting for cluster readyness' waiting for cluster readyness + sleep 20 + [[ 10 -ge 36 ]] + let i+=1 ++ kubectl_bin get pxc some-name -o 'jsonpath={.status.state}' +++ mktemp ++ local LAST_OUT=/tmp/tmp.y2uA2EpG06 +++ mktemp ++ local LAST_ERR=/tmp/tmp.ZBpRrJMxdL ++ local exit_status=0 +++ seq 0 2 ++ for i in '$(seq 0 2)' ++ set +e ++ kubectl get pxc some-name -o 'jsonpath={.status.state}' ++ exit_status=0 ++ set -e ++ '[' 0 '!=' 0 ']' ++ break ++ cat /tmp/tmp.y2uA2EpG06 ++ cat /tmp/tmp.ZBpRrJMxdL ++ rm /tmp/tmp.y2uA2EpG06 /tmp/tmp.ZBpRrJMxdL ++ return 0 + [[ initializing == \r\e\a\d\y ]] + echo 'waiting for cluster readyness' waiting for cluster readyness + sleep 20 + [[ 11 -ge 36 ]] + let i+=1 ++ kubectl_bin get pxc some-name -o 'jsonpath={.status.state}' +++ mktemp ++ local LAST_OUT=/tmp/tmp.1PJhu3t0yj +++ mktemp ++ local LAST_ERR=/tmp/tmp.P123boFKd6 ++ local exit_status=0 +++ seq 0 2 ++ for i in '$(seq 0 2)' ++ set +e ++ kubectl get pxc some-name -o 'jsonpath={.status.state}' ++ exit_status=0 ++ set -e ++ '[' 0 '!=' 0 ']' ++ break ++ cat /tmp/tmp.1PJhu3t0yj ++ cat /tmp/tmp.P123boFKd6 ++ rm /tmp/tmp.1PJhu3t0yj /tmp/tmp.P123boFKd6 ++ return 0 + [[ initializing == \r\e\a\d\y ]] + echo 'waiting for cluster readyness' waiting for cluster readyness + sleep 20 + [[ 12 -ge 36 ]] + let i+=1 ++ kubectl_bin get pxc some-name -o 'jsonpath={.status.state}' +++ mktemp ++ local LAST_OUT=/tmp/tmp.J8l2AVjrtg +++ mktemp ++ local LAST_ERR=/tmp/tmp.M7OFlI4g0f ++ local exit_status=0 +++ seq 0 2 ++ for i in '$(seq 0 2)' ++ set +e ++ kubectl get pxc some-name -o 'jsonpath={.status.state}' ++ exit_status=0 ++ set -e ++ '[' 0 '!=' 0 ']' ++ break ++ cat /tmp/tmp.J8l2AVjrtg ++ cat /tmp/tmp.M7OFlI4g0f ++ rm /tmp/tmp.J8l2AVjrtg /tmp/tmp.M7OFlI4g0f ++ return 0 + [[ initializing == \r\e\a\d\y ]] + echo 'waiting for cluster readyness' waiting for cluster readyness + sleep 20 + [[ 13 -ge 36 ]] + let i+=1 ++ kubectl_bin get pxc some-name -o 'jsonpath={.status.state}' +++ mktemp ++ local LAST_OUT=/tmp/tmp.KxDHfR4ye0 +++ mktemp ++ local LAST_ERR=/tmp/tmp.reBxJyifs7 ++ local exit_status=0 +++ seq 0 2 ++ for i in '$(seq 0 2)' ++ set +e ++ kubectl get pxc some-name -o 'jsonpath={.status.state}' ++ exit_status=0 ++ set -e ++ '[' 0 '!=' 0 ']' ++ break ++ cat /tmp/tmp.KxDHfR4ye0 ++ cat /tmp/tmp.reBxJyifs7 ++ rm /tmp/tmp.KxDHfR4ye0 /tmp/tmp.reBxJyifs7 ++ return 0 + [[ ready == \r\e\a\d\y ]] ++ kubectl_bin get pxc some-name -o 'jsonpath={.status.pxc.ready}' +++ mktemp ++ local LAST_OUT=/tmp/tmp.SD8BgV0QFC +++ mktemp ++ local LAST_ERR=/tmp/tmp.XLjUTQTPW3 ++ local exit_status=0 +++ seq 0 2 ++ for i in '$(seq 0 2)' ++ set +e ++ kubectl get pxc some-name -o 'jsonpath={.status.pxc.ready}' ++ exit_status=0 ++ set -e ++ '[' 0 '!=' 0 ']' ++ break ++ cat /tmp/tmp.SD8BgV0QFC ++ cat /tmp/tmp.XLjUTQTPW3 ++ rm /tmp/tmp.SD8BgV0QFC /tmp/tmp.XLjUTQTPW3 ++ return 0 + [[ 3 == \3 ]] +++ get_proxy_engine some-name +++ local cluster_name=some-name ++++ get_proxy some-name ++++ local target_cluster=some-name +++++ kubectl_bin get pxc some-name -o 'jsonpath={.spec.haproxy.enabled}' ++++++ mktemp +++++ local LAST_OUT=/tmp/tmp.2c9HcTJQXO ++++++ mktemp +++++ local LAST_ERR=/tmp/tmp.GXeTJk2iIU +++++ local exit_status=0 ++++++ seq 0 2 +++++ for i in '$(seq 0 2)' +++++ set +e +++++ kubectl get pxc some-name -o 'jsonpath={.spec.haproxy.enabled}' +++++ exit_status=0 +++++ set -e +++++ '[' 0 '!=' 0 ']' +++++ break +++++ cat /tmp/tmp.2c9HcTJQXO +++++ cat /tmp/tmp.GXeTJk2iIU +++++ rm /tmp/tmp.2c9HcTJQXO /tmp/tmp.GXeTJk2iIU +++++ return 0 ++++ [[ true == \t\r\u\e ]] ++++ echo some-name-haproxy ++++ return +++ local cluster_proxy=some-name-haproxy +++ echo haproxy ++ kubectl_bin get pxc some-name -o 'jsonpath={.status.haproxy.ready}' +++ mktemp ++ local LAST_OUT=/tmp/tmp.qA2d4ZKSab +++ mktemp ++ local LAST_ERR=/tmp/tmp.NE7TBoQwFt ++ local exit_status=0 +++ seq 0 2 ++ for i in '$(seq 0 2)' ++ set +e ++ kubectl get pxc some-name -o 'jsonpath={.status.haproxy.ready}' ++ exit_status=0 ++ set -e ++ '[' 0 '!=' 0 ']' ++ break ++ cat /tmp/tmp.qA2d4ZKSab ++ cat /tmp/tmp.NE7TBoQwFt ++ rm /tmp/tmp.qA2d4ZKSab /tmp/tmp.NE7TBoQwFt ++ return 0 + [[ 3 == \3 ]] + patch_secret my-cluster-secrets monitor dGVzdC1wYXNzd29yZDI= + local secret=my-cluster-secrets + local key=monitor + local value=dGVzdC1wYXNzd29yZDI= + kubectl_bin patch secret my-cluster-secrets '-p={"data":{"monitor": "dGVzdC1wYXNzd29yZDI="}}' ++ mktemp + local LAST_OUT=/tmp/tmp.LInRqlRbzC ++ mktemp + local LAST_ERR=/tmp/tmp.VQaRbYbbXV + local exit_status=0 ++ seq 0 2 + for i in '$(seq 0 2)' + set +e + kubectl patch secret my-cluster-secrets '-p={"data":{"monitor": "dGVzdC1wYXNzd29yZDI="}}' + exit_status=0 + set -e + '[' 0 '!=' 0 ']' + break + cat /tmp/tmp.LInRqlRbzC secret/my-cluster-secrets patched + cat /tmp/tmp.VQaRbYbbXV + rm /tmp/tmp.LInRqlRbzC /tmp/tmp.VQaRbYbbXV + return 0 + sleep 15 + wait_cluster_consistency some-name 3 3 + local cluster_name=some-name + local cluster_size=3 + local proxy_size=3 + '[' -z 3 ']' + desc 'wait cluster consistency' + set +o xtrace ----------------------------------------------------------------------------------- wait cluster consistency ----------------------------------------------------------------------------------- + local i=0 + local max=36 + sleep 7 ++ kubectl_bin get pxc some-name -o 'jsonpath={.status.state}' +++ mktemp ++ local LAST_OUT=/tmp/tmp.8UTU5WPvqd +++ mktemp ++ local LAST_ERR=/tmp/tmp.qDGMvh1gQx ++ local exit_status=0 +++ seq 0 2 ++ for i in '$(seq 0 2)' ++ set +e ++ kubectl get pxc some-name -o 'jsonpath={.status.state}' ++ exit_status=0 ++ set -e ++ '[' 0 '!=' 0 ']' ++ break ++ cat /tmp/tmp.8UTU5WPvqd ++ cat /tmp/tmp.qDGMvh1gQx ++ rm /tmp/tmp.8UTU5WPvqd /tmp/tmp.qDGMvh1gQx ++ return 0 + [[ initializing == \r\e\a\d\y ]] + echo 'waiting for cluster readyness' waiting for cluster readyness + sleep 20 + [[ 0 -ge 36 ]] + let i+=1 ++ kubectl_bin get pxc some-name -o 'jsonpath={.status.state}' +++ mktemp ++ local LAST_OUT=/tmp/tmp.pNbz8Yu217 +++ mktemp ++ local LAST_ERR=/tmp/tmp.qMWGO8k1yB ++ local exit_status=0 +++ seq 0 2 ++ for i in '$(seq 0 2)' ++ set +e ++ kubectl get pxc some-name -o 'jsonpath={.status.state}' ++ exit_status=0 ++ set -e ++ '[' 0 '!=' 0 ']' ++ break ++ cat /tmp/tmp.pNbz8Yu217 ++ cat /tmp/tmp.qMWGO8k1yB ++ rm /tmp/tmp.pNbz8Yu217 /tmp/tmp.qMWGO8k1yB ++ return 0 + [[ initializing == \r\e\a\d\y ]] + echo 'waiting for cluster readyness' waiting for cluster readyness + sleep 20 + [[ 1 -ge 36 ]] + let i+=1 ++ kubectl_bin get pxc some-name -o 'jsonpath={.status.state}' +++ mktemp ++ local LAST_OUT=/tmp/tmp.U894YroQU7 +++ mktemp ++ local LAST_ERR=/tmp/tmp.X46lPKFy48 ++ local exit_status=0 +++ seq 0 2 ++ for i in '$(seq 0 2)' ++ set +e ++ kubectl get pxc some-name -o 'jsonpath={.status.state}' ++ exit_status=0 ++ set -e ++ '[' 0 '!=' 0 ']' ++ break ++ cat /tmp/tmp.U894YroQU7 ++ cat /tmp/tmp.X46lPKFy48 ++ rm /tmp/tmp.U894YroQU7 /tmp/tmp.X46lPKFy48 ++ return 0 + [[ initializing == \r\e\a\d\y ]] + echo 'waiting for cluster readyness' waiting for cluster readyness + sleep 20 + [[ 2 -ge 36 ]] + let i+=1 ++ kubectl_bin get pxc some-name -o 'jsonpath={.status.state}' +++ mktemp ++ local LAST_OUT=/tmp/tmp.NpgFakJLxz +++ mktemp ++ local LAST_ERR=/tmp/tmp.3WvUFGmzJB ++ local exit_status=0 +++ seq 0 2 ++ for i in '$(seq 0 2)' ++ set +e ++ kubectl get pxc some-name -o 'jsonpath={.status.state}' ++ exit_status=0 ++ set -e ++ '[' 0 '!=' 0 ']' ++ break ++ cat /tmp/tmp.NpgFakJLxz ++ cat /tmp/tmp.3WvUFGmzJB ++ rm /tmp/tmp.NpgFakJLxz /tmp/tmp.3WvUFGmzJB ++ return 0 + [[ ready == \r\e\a\d\y ]] ++ kubectl_bin get pxc some-name -o 'jsonpath={.status.pxc.ready}' +++ mktemp ++ local LAST_OUT=/tmp/tmp.8iElnTfJIq +++ mktemp ++ local LAST_ERR=/tmp/tmp.yXGQk25NyQ ++ local exit_status=0 +++ seq 0 2 ++ for i in '$(seq 0 2)' ++ set +e ++ kubectl get pxc some-name -o 'jsonpath={.status.pxc.ready}' ++ exit_status=0 ++ set -e ++ '[' 0 '!=' 0 ']' ++ break ++ cat /tmp/tmp.8iElnTfJIq ++ cat /tmp/tmp.yXGQk25NyQ ++ rm /tmp/tmp.8iElnTfJIq /tmp/tmp.yXGQk25NyQ ++ return 0 + [[ 3 == \3 ]] +++ get_proxy_engine some-name +++ local cluster_name=some-name ++++ get_proxy some-name ++++ local target_cluster=some-name +++++ kubectl_bin get pxc some-name -o 'jsonpath={.spec.haproxy.enabled}' ++++++ mktemp +++++ local LAST_OUT=/tmp/tmp.fRzaS4SZc0 ++++++ mktemp +++++ local LAST_ERR=/tmp/tmp.wgwIgiUH5g +++++ local exit_status=0 ++++++ seq 0 2 +++++ for i in '$(seq 0 2)' +++++ set +e +++++ kubectl get pxc some-name -o 'jsonpath={.spec.haproxy.enabled}' +++++ exit_status=0 +++++ set -e +++++ '[' 0 '!=' 0 ']' +++++ break +++++ cat /tmp/tmp.fRzaS4SZc0 +++++ cat /tmp/tmp.wgwIgiUH5g +++++ rm /tmp/tmp.fRzaS4SZc0 /tmp/tmp.wgwIgiUH5g +++++ return 0 ++++ [[ true == \t\r\u\e ]] ++++ echo some-name-haproxy ++++ return +++ local cluster_proxy=some-name-haproxy +++ echo haproxy ++ kubectl_bin get pxc some-name -o 'jsonpath={.status.haproxy.ready}' +++ mktemp ++ local LAST_OUT=/tmp/tmp.v05ZQAW2we +++ mktemp ++ local LAST_ERR=/tmp/tmp.5jwKjYbyzr ++ local exit_status=0 +++ seq 0 2 ++ for i in '$(seq 0 2)' ++ set +e ++ kubectl get pxc some-name -o 'jsonpath={.status.haproxy.ready}' ++ exit_status=0 ++ set -e ++ '[' 0 '!=' 0 ']' ++ break ++ cat /tmp/tmp.v05ZQAW2we ++ cat /tmp/tmp.5jwKjYbyzr ++ rm /tmp/tmp.v05ZQAW2we /tmp/tmp.5jwKjYbyzr ++ return 0 + [[ 3 == \3 ]] + compare_mysql_cmd select-3 'SHOW DATABASES;' '-h some-name-haproxy -umonitor -p'\''test-password2'\''' + local command_id=select-3 + local 'command=SHOW DATABASES;' + local 'uri=-h some-name-haproxy -umonitor -p'\''test-password2'\''' + local postfix= + local expected_result=/mnt/jenkins/workspace/cloud-pxc-operator_PR-1708/e2e-tests/users/compare/select-3.sql + [[ perconalab/percona-xtradb-cluster-operator:main-pxc5.7 =~ 8\.0 ]] + run_mysql 'SHOW DATABASES;' '-h some-name-haproxy -umonitor -p'\''test-password2'\''' + local 'command=SHOW DATABASES;' + local 'uri=-h some-name-haproxy -umonitor -p'\''test-password2'\''' ++ get_client_pod ++ kubectl_bin get pods --selector=name=pxc-client -o 'jsonpath={.items[].metadata.name}' +++ mktemp ++ local LAST_OUT=/tmp/tmp.KoFlaBRgia +++ mktemp ++ local LAST_ERR=/tmp/tmp.K99Gn2Lzac ++ local exit_status=0 +++ seq 0 2 ++ for i in '$(seq 0 2)' ++ set +e ++ kubectl get pods --selector=name=pxc-client -o 'jsonpath={.items[].metadata.name}' ++ exit_status=0 ++ set -e ++ '[' 0 '!=' 0 ']' ++ break ++ cat /tmp/tmp.KoFlaBRgia ++ cat /tmp/tmp.K99Gn2Lzac ++ rm /tmp/tmp.KoFlaBRgia /tmp/tmp.K99Gn2Lzac ++ return 0 + client_pod=pxc-client-64b479df95-qhk5k + wait_pod pxc-client-64b479df95-qhk5k + local pod=pxc-client-64b479df95-qhk5k + local max_retry=480 + local ns= ++ echo pxc-client-64b479df95-qhk5k ++ /usr/bin/sed -E 's/.*-(pxc|proxysql)-[0-9]/\1/' ++ egrep '^(pxc|proxysql)$' + local container= + set +o xtrace pod/pxc-client-64b479df95-qhk5k condition met pxc-client-64b479df95-qhk5k.Ok + set +o xtrace + '[' '!' -s /tmp/tmp.MowO4SojyA/select-3.sql ']' + diff -u /mnt/jenkins/workspace/cloud-pxc-operator_PR-1708/e2e-tests/users/compare/select-3.sql /tmp/tmp.MowO4SojyA/select-3.sql + destroy users-19811 + local namespace=users-19811 + local ignore_logs=true + desc 'destroy cluster/operator and all other resources' + set +o xtrace ----------------------------------------------------------------------------------- destroy cluster/operator and all other resources ----------------------------------------------------------------------------------- + '[' true == false -o 1 == 1 ']' + grep -v level=info + grep -v 'the object has been modified' + /usr/bin/sed -r 's/"ts":[0-9.]+//; s^limits-[0-9.]+/^^g' ++ get_operator_pod ++ local label_prefix=app.kubernetes.io/ + grep -v 'get backup status: Job.batch' + sort -u + tee /tmp/tmp.MowO4SojyA/operator.log +++ kubectl get pods --selector=app.kubernetes.io/name=percona-xtradb-cluster-operator -n pxc-operator +++ grep -c percona-xtradb-cluster-operator ++ local check_label=1 ++ [[ 1 -eq 0 ]] ++ kubectl_bin get pods --selector=app.kubernetes.io/name=percona-xtradb-cluster-operator -o 'jsonpath={.items[].metadata.name}' -n pxc-operator +++ mktemp ++ local LAST_OUT=/tmp/tmp.rIyeimJwQW +++ mktemp ++ local LAST_ERR=/tmp/tmp.HoSaSMPmyR ++ local exit_status=0 +++ seq 0 2 ++ for i in '$(seq 0 2)' ++ set +e ++ kubectl get pods --selector=app.kubernetes.io/name=percona-xtradb-cluster-operator -o 'jsonpath={.items[].metadata.name}' -n pxc-operator ++ exit_status=0 ++ set -e ++ '[' 0 '!=' 0 ']' ++ break ++ cat /tmp/tmp.rIyeimJwQW ++ cat /tmp/tmp.HoSaSMPmyR ++ rm /tmp/tmp.rIyeimJwQW /tmp/tmp.HoSaSMPmyR ++ return 0 + kubectl_bin logs -n pxc-operator percona-xtradb-cluster-operator-5b5db6b897-s6q7g ++ mktemp + local LAST_OUT=/tmp/tmp.GVsRJ0bZJt ++ mktemp + local LAST_ERR=/tmp/tmp.ZYmSJ7Tb7o + local exit_status=0 ++ seq 0 2 + for i in '$(seq 0 2)' + set +e + kubectl logs -n pxc-operator percona-xtradb-cluster-operator-5b5db6b897-s6q7g + exit_status=0 + set -e + '[' 0 '!=' 0 ']' + break + cat /tmp/tmp.GVsRJ0bZJt + cat /tmp/tmp.ZYmSJ7Tb7o + rm /tmp/tmp.GVsRJ0bZJt /tmp/tmp.ZYmSJ7Tb7o + return 0 2024-05-14T10:53:02.620Z INFO setup Runs on {"platform": "kubernetes", "version": "v1.26.15-gke.1300000"} 2024-05-14T10:53:02.621Z INFO setup Manager starting up {"gitCommit": "7a028e99532396cdf8e71c835c422bd8d903460c", "gitBranch": "PR-1708-7a028e99", "buildTime": "2024-05-14T08:49:43Z", "goVersion": "go1.22.3", "os": "linux", "arch": "amd64"} 2024-05-14T10:53:02.622Z INFO setup Registering Components. 2024-05-14T10:53:06.630Z INFO controller-runtime.webhook Registering webhook {"path": "/validate-percona-xtradbcluster"} 2024-05-14T10:53:06.712Z INFO controller-runtime.metrics Serving metrics server {"bindAddress": ":8080", "secure": false} 2024-05-14T10:53:06.712Z INFO controller-runtime.metrics Starting metrics server 2024-05-14T10:53:06.712Z INFO controller-runtime.webhook Starting webhook server 2024-05-14T10:53:06.712Z INFO setup Starting the Cmd. 2024-05-14T10:53:06.712Z INFO starting server {"name": "health probe", "addr": "[::]:8081"} 2024-05-14T10:53:06.713Z INFO controller-runtime.certwatcher Starting certificate watcher 2024-05-14T10:53:06.713Z INFO controller-runtime.certwatcher Updated current TLS certificate 2024-05-14T10:53:06.713Z INFO controller-runtime.webhook Serving webhook server {"host": "", "port": 9443} 2024-05-14T10:53:06.813Z INFO attempting to acquire leader lease pxc-operator/08db1feb.percona.com... 2024-05-14T10:53:06.827Z DEBUG events percona-xtradb-cluster-operator-5b5db6b897-s6q7g_56e94103-0cfa-478d-a740-8fb0eeffee5c became leader {"type": "Normal", "object": {"kind":"Lease","namespace":"pxc-operator","name":"08db1feb.percona.com","uid":"67e91589-280e-433d-b682-08a783b080d2","apiVersion":"coordination.k8s.io/v1","resourceVersion":"66268"}, "reason": "LeaderElection"} 2024-05-14T10:53:06.827Z INFO Starting Controller {"controller": "pxcbackup-controller"} 2024-05-14T10:53:06.827Z INFO Starting Controller {"controller": "pxc-controller"} 2024-05-14T10:53:06.827Z INFO Starting Controller {"controller": "pxcrestore-controller"} 2024-05-14T10:53:06.827Z INFO Starting EventSource {"controller": "pxcbackup-controller", "source": "kind source: unknown type"} 2024-05-14T10:53:06.827Z INFO Starting EventSource {"controller": "pxc-controller", "source": "kind source: unknown type"} 2024-05-14T10:53:06.827Z INFO Starting EventSource {"controller": "pxcrestore-controller", "source": "kind source: unknown type"} 2024-05-14T10:53:06.827Z INFO successfully acquired lease pxc-operator/08db1feb.percona.com 2024-05-14T10:53:07.038Z INFO Starting workers {"controller": "pxcbackup-controller", "worker count": 1} 2024-05-14T10:53:07.038Z INFO Starting workers {"controller": "pxc-controller", "worker count": 1} 2024-05-14T10:53:07.038Z INFO Starting workers {"controller": "pxcrestore-controller", "worker count": 1} 2024-05-14T10:53:36.933Z INFO Set CR version {"controller": "pxc-controller", "namespace": "users-19811", "name": "some-name", "reconcileID": "0d355ae3-dfbe-4c03-bb7e-1dceaf15447a", "version": "1.15.0"} 2024-05-14T10:54:56.509Z INFO Password expiration policy updated {"controller": "pxc-controller", "namespace": "users-19811", "name": "some-name", "reconcileID": "ccff24df-2a97-4e9e-8e0b-aee39469c6ad", "user": "operator"} 2024-05-14T10:54:56.550Z INFO Password expiration policy updated {"controller": "pxc-controller", "namespace": "users-19811", "name": "some-name", "reconcileID": "ccff24df-2a97-4e9e-8e0b-aee39469c6ad", "user": "monitor"} 2024-05-14T10:54:56.600Z INFO User monitor: granted privileges {"controller": "pxc-controller", "namespace": "users-19811", "name": "some-name", "reconcileID": "ccff24df-2a97-4e9e-8e0b-aee39469c6ad"} 2024-05-14T10:54:56.654Z INFO Password expiration policy updated {"controller": "pxc-controller", "namespace": "users-19811", "name": "some-name", "reconcileID": "ccff24df-2a97-4e9e-8e0b-aee39469c6ad", "user": "xtrabackup"} 2024-05-14T10:54:56.697Z INFO User xtrabackup: granted privileges {"controller": "pxc-controller", "namespace": "users-19811", "name": "some-name", "reconcileID": "ccff24df-2a97-4e9e-8e0b-aee39469c6ad"} 2024-05-14T10:54:56.802Z INFO reconcile replication error {"controller": "pxc-controller", "namespace": "users-19811", "name": "some-name", "reconcileID": "ccff24df-2a97-4e9e-8e0b-aee39469c6ad", "err": "get primary pxc pod: not found"} 2024-05-14T10:55:01.540Z INFO reconcile replication error {"controller": "pxc-controller", "namespace": "users-19811", "name": "some-name", "reconcileID": "69946fd8-82c8-4071-9fc9-4d50becc009b", "err": "get primary pxc pod: not found"} 2024-05-14T10:55:06.757Z INFO reconcile replication error {"controller": "pxc-controller", "namespace": "users-19811", "name": "some-name", "reconcileID": "6c8ab684-dbaf-4b24-b4ec-b8ced86bc0af", "err": "get primary pxc pod: not found"} 2024-05-14T10:55:12.091Z INFO reconcile replication error {"controller": "pxc-controller", "namespace": "users-19811", "name": "some-name", "reconcileID": "bc3be0d4-3c03-4414-bee8-eea8753b5445", "err": "get primary pxc pod: not found"} 2024-05-14T10:57:23.673Z INFO Password expiration policy updated {"controller": "pxc-controller", "namespace": "users-19811", "name": "some-name", "reconcileID": "c32e4df4-b379-41b9-886d-9eac33caedde", "user": "root"} 2024-05-14T10:57:23.716Z INFO Password expiration policy updated {"controller": "pxc-controller", "namespace": "users-19811", "name": "some-name", "reconcileID": "c32e4df4-b379-41b9-886d-9eac33caedde", "user": "replication"} 2024-05-14T10:57:23.857Z INFO update PXC version (fetched from db) {"controller": "pxc-controller", "namespace": "users-19811", "name": "some-name", "reconcileID": "c32e4df4-b379-41b9-886d-9eac33caedde", "new version": "5.7.44-48-57"} 2024-05-14T10:57:27.357Z DEBUG PXC users synced with ProxySQL {"controller": "pxc-controller", "namespace": "users-19811", "name": "some-name", "reconcileID": "c32e4df4-b379-41b9-886d-9eac33caedde"} 2024-05-14T10:57:32.026Z DEBUG PXC users synced with ProxySQL {"controller": "pxc-controller", "namespace": "users-19811", "name": "some-name", "reconcileID": "35fc6e1e-b6b2-4d14-b1c2-d6f86e1fc330"} 2024-05-14T10:57:37.644Z DEBUG PXC users synced with ProxySQL {"controller": "pxc-controller", "namespace": "users-19811", "name": "some-name", "reconcileID": "bce2be8c-269c-449e-9195-736c381ecda5"} 2024-05-14T10:57:43.129Z DEBUG PXC users synced with ProxySQL {"controller": "pxc-controller", "namespace": "users-19811", "name": "some-name", "reconcileID": "19749a83-8da5-489b-bdbf-979f78d523d7"} 2024-05-14T10:57:48.437Z DEBUG PXC users synced with ProxySQL {"controller": "pxc-controller", "namespace": "users-19811", "name": "some-name", "reconcileID": "fc5c304b-b6ba-4e47-b4e8-3a90c373c503"} 2024-05-14T10:57:53.539Z DEBUG PXC users synced with ProxySQL {"controller": "pxc-controller", "namespace": "users-19811", "name": "some-name", "reconcileID": "041c5280-31b4-436c-9ba7-93c187f03d84"} 2024-05-14T10:57:58.772Z DEBUG PXC users synced with ProxySQL {"controller": "pxc-controller", "namespace": "users-19811", "name": "some-name", "reconcileID": "713e8814-ddbf-4281-b9e6-c3b81fcc13e9"} 2024-05-14T10:58:04.360Z DEBUG PXC users synced with ProxySQL {"controller": "pxc-controller", "namespace": "users-19811", "name": "some-name", "reconcileID": "785648e3-c560-4138-90e5-62b41b91ab6f"} 2024-05-14T10:58:09.671Z DEBUG PXC users synced with ProxySQL {"controller": "pxc-controller", "namespace": "users-19811", "name": "some-name", "reconcileID": "bcb59a1c-ae19-43f4-83b5-220c51294925"} 2024-05-14T10:58:14.628Z DEBUG PXC users synced with ProxySQL {"controller": "pxc-controller", "namespace": "users-19811", "name": "some-name", "reconcileID": "4830e85e-f0d6-4fe2-a39b-1c922babc448"} 2024-05-14T10:58:19.822Z DEBUG PXC users synced with ProxySQL {"controller": "pxc-controller", "namespace": "users-19811", "name": "some-name", "reconcileID": "af15c540-6403-4f39-88af-aeb3570b0466"} 2024-05-14T10:58:25.050Z DEBUG PXC users synced with ProxySQL {"controller": "pxc-controller", "namespace": "users-19811", "name": "some-name", "reconcileID": "e9dab01a-9afc-4e39-9d00-25b96c0d7054"} 2024-05-14T10:58:30.325Z DEBUG PXC users synced with ProxySQL {"controller": "pxc-controller", "namespace": "users-19811", "name": "some-name", "reconcileID": "3c7cd990-90cb-4be8-abc4-c60a6fd5a3f1"} 2024-05-14T10:58:32.107Z INFO Password changed, updating user {"controller": "pxc-controller", "namespace": "users-19811", "name": "some-name", "reconcileID": "c56c5ae6-907e-443a-8d2b-b2f42f1bde66", "user": "root"} 2024-05-14T10:58:32.145Z INFO User password updated {"controller": "pxc-controller", "namespace": "users-19811", "name": "some-name", "reconcileID": "c56c5ae6-907e-443a-8d2b-b2f42f1bde66", "user": "root"} 2024-05-14T10:58:32.153Z INFO MySQL init secret created {"controller": "pxc-controller", "namespace": "users-19811", "name": "some-name", "reconcileID": "c56c5ae6-907e-443a-8d2b-b2f42f1bde66", "secret": "some-name-mysql-init", "user": "root"} 2024-05-14T10:58:37.900Z DEBUG PXC users synced with ProxySQL {"controller": "pxc-controller", "namespace": "users-19811", "name": "some-name", "reconcileID": "c56c5ae6-907e-443a-8d2b-b2f42f1bde66"} 2024-05-14T10:58:37.909Z INFO Internal secrets updated {"controller": "pxc-controller", "namespace": "users-19811", "name": "some-name", "reconcileID": "c56c5ae6-907e-443a-8d2b-b2f42f1bde66", "user": "root"} 2024-05-14T10:58:41.729Z DEBUG PXC users synced with ProxySQL {"controller": "pxc-controller", "namespace": "users-19811", "name": "some-name", "reconcileID": "c56c5ae6-907e-443a-8d2b-b2f42f1bde66"} 2024-05-14T10:58:46.559Z DEBUG PXC users synced with ProxySQL {"controller": "pxc-controller", "namespace": "users-19811", "name": "some-name", "reconcileID": "4521331d-67f9-4aa4-9fb5-a7cd863375f1"} 2024-05-14T10:59:09.044Z ERROR sync users {"controller": "pxc-controller", "namespace": "users-19811", "name": "some-name", "reconcileID": "e7545096-e03f-4758-afa1-c9c1f2746699", "error": "exec syncusers: command terminated with exit code 1 / / ERROR (line:512) : The cluster (with writer hostgroup:11) has not been configured in ProxySQL\n", "errorVerbose": "exec syncusers: command terminated with exit code 1 / / ERROR (line:512) : The cluster (with writer hostgroup:11) has not been configured in ProxySQL\n\ngithub.com/percona/percona-xtradb-cluster-operator/pkg/controller/pxc.(*ReconcilePerconaXtraDBCluster).syncPXCUsersWithProxySQL\n\t/go/src/github.com/percona/percona-xtradb-cluster-operator/pkg/controller/pxc/users.go:920\ngithub.com/percona/percona-xtradb-cluster-operator/pkg/controller/pxc.(*ReconcilePerconaXtraDBCluster).resyncPXCUsersWithProxySQL.func1\n\t/go/src/github.com/percona/percona-xtradb-cluster-operator/pkg/controller/pxc/controller.go:1246\nruntime.goexit\n\t/usr/local/go/src/runtime/asm_amd64.s:1695"} 2024-05-14T10:59:13.860Z ERROR sync users {"controller": "pxc-controller", "namespace": "users-19811", "name": "some-name", "reconcileID": "590c3045-3183-4570-992b-cec771b9a277", "error": "exec syncusers: command terminated with exit code 1 / / ERROR (line:512) : The cluster (with writer hostgroup:11) has not been configured in ProxySQL\n", "errorVerbose": "exec syncusers: command terminated with exit code 1 / / ERROR (line:512) : The cluster (with writer hostgroup:11) has not been configured in ProxySQL\n\ngithub.com/percona/percona-xtradb-cluster-operator/pkg/controller/pxc.(*ReconcilePerconaXtraDBCluster).syncPXCUsersWithProxySQL\n\t/go/src/github.com/percona/percona-xtradb-cluster-operator/pkg/controller/pxc/users.go:920\ngithub.com/percona/percona-xtradb-cluster-operator/pkg/controller/pxc.(*ReconcilePerconaXtraDBCluster).resyncPXCUsersWithProxySQL.func1\n\t/go/src/github.com/percona/percona-xtradb-cluster-operator/pkg/controller/pxc/controller.go:1246\nruntime.goexit\n\t/usr/local/go/src/runtime/asm_amd64.s:1695"} 2024-05-14T10:59:15.138Z INFO Password changed, updating user {"controller": "pxc-controller", "namespace": "users-19811", "name": "some-name", "reconcileID": "3a459fd6-742a-4af4-a7fd-88e868c18523", "user": "proxyadmin"} 2024-05-14T10:59:15.138Z INFO Password expiration policy updated {"controller": "pxc-controller", "namespace": "users-19811", "name": "some-name", "reconcileID": "3a459fd6-742a-4af4-a7fd-88e868c18523", "user": "proxyadmin"} 2024-05-14T10:59:15.215Z INFO Proxy user updated {"controller": "pxc-controller", "namespace": "users-19811", "name": "some-name", "reconcileID": "3a459fd6-742a-4af4-a7fd-88e868c18523", "user": "proxyadmin"} 2024-05-14T10:59:15.224Z INFO Internal secrets updated {"controller": "pxc-controller", "namespace": "users-19811", "name": "some-name", "reconcileID": "3a459fd6-742a-4af4-a7fd-88e868c18523", "user": "proxyadmin"} 2024-05-14T10:59:15.224Z INFO Proxy pods will be restarted {"controller": "pxc-controller", "namespace": "users-19811", "name": "some-name", "reconcileID": "3a459fd6-742a-4af4-a7fd-88e868c18523", "last-applied-secret": "1a55867d0f4fa0d0643653b2eb57b2f690315d8d843a10599c9839c2a779a95f"} 2024-05-14T10:59:15.373Z INFO reconcile replication error {"controller": "pxc-controller", "namespace": "users-19811", "name": "some-name", "reconcileID": "3a459fd6-742a-4af4-a7fd-88e868c18523", "err": "get primary pxc pod: not found"} 2024-05-14T10:59:15.453Z ERROR sync users {"controller": "pxc-controller", "namespace": "users-19811", "name": "some-name", "reconcileID": "3a459fd6-742a-4af4-a7fd-88e868c18523", "error": "exec syncusers: command terminated with exit code 1 / / ERROR 1045 (28000): ProxySQL Error: Access denied for user 'proxyadmin'@'127.0.0.1' (using password: YES)\nERROR (line:335) : ProxySQL connection check failed. \n-- Could not connect to ProxySQL at localhost:6032 \n-- Please check the ProxySQL connection parameters and status.\n", "errorVerbose": "exec syncusers: command terminated with exit code 1 / / ERROR 1045 (28000): ProxySQL Error: Access denied for user 'proxyadmin'@'127.0.0.1' (using password: YES)\nERROR (line:335) : ProxySQL connection check failed. \n-- Could not connect to ProxySQL at localhost:6032 \n-- Please check the ProxySQL connection parameters and status.\n\ngithub.com/percona/percona-xtradb-cluster-operator/pkg/controller/pxc.(*ReconcilePerconaXtraDBCluster).syncPXCUsersWithProxySQL\n\t/go/src/github.com/percona/percona-xtradb-cluster-operator/pkg/controller/pxc/users.go:920\ngithub.com/percona/percona-xtradb-cluster-operator/pkg/controller/pxc.(*ReconcilePerconaXtraDBCluster).resyncPXCUsersWithProxySQL.func1\n\t/go/src/github.com/percona/percona-xtradb-cluster-operator/pkg/controller/pxc/controller.go:1246\nruntime.goexit\n\t/usr/local/go/src/runtime/asm_amd64.s:1695"} 2024-05-14T10:59:59.044Z DEBUG PXC users synced with ProxySQL {"controller": "pxc-controller", "namespace": "users-19811", "name": "some-name", "reconcileID": "e49a2bb4-524c-40af-a573-d4ee8a7feba4"} 2024-05-14T11:00:09.932Z DEBUG PXC users synced with ProxySQL {"controller": "pxc-controller", "namespace": "users-19811", "name": "some-name", "reconcileID": "95bf47af-d4a7-48cd-9983-77ba04ab26de"} 2024-05-14T11:00:20.236Z DEBUG PXC users synced with ProxySQL {"controller": "pxc-controller", "namespace": "users-19811", "name": "some-name", "reconcileID": "0a038f82-bdf1-4d58-9774-209a51c94fa8"} 2024-05-14T11:00:27.994Z INFO Password changed, updating user {"controller": "pxc-controller", "namespace": "users-19811", "name": "some-name", "reconcileID": "566d7df4-645b-4c73-bf64-cd6c64e37e89", "user": "xtrabackup"} 2024-05-14T11:00:28.032Z INFO User password updated {"controller": "pxc-controller", "namespace": "users-19811", "name": "some-name", "reconcileID": "566d7df4-645b-4c73-bf64-cd6c64e37e89", "user": "xtrabackup"} 2024-05-14T11:00:28.044Z INFO MySQL init secret updated {"controller": "pxc-controller", "namespace": "users-19811", "name": "some-name", "reconcileID": "566d7df4-645b-4c73-bf64-cd6c64e37e89", "secret": "some-name-mysql-init", "user": "xtrabackup"} 2024-05-14T11:00:28.052Z INFO Internal secrets updated {"controller": "pxc-controller", "namespace": "users-19811", "name": "some-name", "reconcileID": "566d7df4-645b-4c73-bf64-cd6c64e37e89", "user": "xtrabackup"} 2024-05-14T11:00:28.052Z INFO PXC pods will be restarted {"controller": "pxc-controller", "namespace": "users-19811", "name": "some-name", "reconcileID": "566d7df4-645b-4c73-bf64-cd6c64e37e89", "last-applied-secret": "d11bcc70ca53ea410cd455e13d82e7225917b0861d9b677ed34c02c5f6fbaf1b"} 2024-05-14T11:00:33.492Z DEBUG PXC users synced with ProxySQL {"controller": "pxc-controller", "namespace": "users-19811", "name": "some-name", "reconcileID": "566d7df4-645b-4c73-bf64-cd6c64e37e89"} 2024-05-14T11:02:08.093Z INFO reconcile replication error {"controller": "pxc-controller", "namespace": "users-19811", "name": "some-name", "reconcileID": "f930a9d0-feea-4475-a03c-9c22f2ddbb1e", "err": "failed to connect to pod some-name-pxc-0: dial tcp: lookup some-name-pxc-0.some-name-pxc.users-19811 on 10.54.112.10:53: no such host"} 2024-05-14T11:02:54.338Z DEBUG PXC users synced with ProxySQL {"controller": "pxc-controller", "namespace": "users-19811", "name": "some-name", "reconcileID": "5f0d15d1-df43-4902-b7b1-901d33f4a36f"} 2024-05-14T11:02:59.023Z DEBUG PXC users synced with ProxySQL {"controller": "pxc-controller", "namespace": "users-19811", "name": "some-name", "reconcileID": "4820cb8a-b94e-440e-835c-b82c38e25cce"} 2024-05-14T11:03:04.284Z DEBUG PXC users synced with ProxySQL {"controller": "pxc-controller", "namespace": "users-19811", "name": "some-name", "reconcileID": "67a88973-fbfb-462f-9e2c-4944fbd8c4b0"} 2024-05-14T11:03:09.449Z DEBUG PXC users synced with ProxySQL {"controller": "pxc-controller", "namespace": "users-19811", "name": "some-name", "reconcileID": "6421dcd2-f63c-4c54-a42c-03c65758a8f3"} 2024-05-14T11:03:11.335Z INFO Password changed, updating user {"controller": "pxc-controller", "namespace": "users-19811", "name": "some-name", "reconcileID": "0194bb29-d7de-4412-a694-6c3b091993f9", "user": "monitor"} 2024-05-14T11:03:11.358Z INFO User password updated {"controller": "pxc-controller", "namespace": "users-19811", "name": "some-name", "reconcileID": "0194bb29-d7de-4412-a694-6c3b091993f9", "user": "monitor"} 2024-05-14T11:03:11.368Z INFO MySQL init secret updated {"controller": "pxc-controller", "namespace": "users-19811", "name": "some-name", "reconcileID": "0194bb29-d7de-4412-a694-6c3b091993f9", "secret": "some-name-mysql-init", "user": "monitor"} 2024-05-14T11:03:11.414Z INFO Proxy user updated {"controller": "pxc-controller", "namespace": "users-19811", "name": "some-name", "reconcileID": "0194bb29-d7de-4412-a694-6c3b091993f9", "user": "monitor"} 2024-05-14T11:03:11.423Z INFO Internal secrets updated {"controller": "pxc-controller", "namespace": "users-19811", "name": "some-name", "reconcileID": "0194bb29-d7de-4412-a694-6c3b091993f9", "user": "monitor"} 2024-05-14T11:03:11.423Z INFO Proxy pods will be restarted {"controller": "pxc-controller", "namespace": "users-19811", "name": "some-name", "reconcileID": "0194bb29-d7de-4412-a694-6c3b091993f9", "last-applied-secret": "4e3282050745046d5044e0fcb5f2bf140d7320a1256a79c8ca2184f38f4f3098"} 2024-05-14T11:03:14.206Z ERROR sync users {"controller": "pxc-controller", "namespace": "users-19811", "name": "some-name", "reconcileID": "0194bb29-d7de-4412-a694-6c3b091993f9", "error": "exec syncusers: unable to upgrade connection: container not found (\"proxysql\") / / ", "errorVerbose": "exec syncusers: unable to upgrade connection: container not found (\"proxysql\") / / \ngithub.com/percona/percona-xtradb-cluster-operator/pkg/controller/pxc.(*ReconcilePerconaXtraDBCluster).syncPXCUsersWithProxySQL\n\t/go/src/github.com/percona/percona-xtradb-cluster-operator/pkg/controller/pxc/users.go:920\ngithub.com/percona/percona-xtradb-cluster-operator/pkg/controller/pxc.(*ReconcilePerconaXtraDBCluster).resyncPXCUsersWithProxySQL.func1\n\t/go/src/github.com/percona/percona-xtradb-cluster-operator/pkg/controller/pxc/controller.go:1246\nruntime.goexit\n\t/usr/local/go/src/runtime/asm_amd64.s:1695"} 2024-05-14T11:04:18.814Z DEBUG PXC users synced with ProxySQL {"controller": "pxc-controller", "namespace": "users-19811", "name": "some-name", "reconcileID": "07c0b800-041e-467c-8417-6430c2bf2646"} 2024-05-14T11:04:23.746Z DEBUG PXC users synced with ProxySQL {"controller": "pxc-controller", "namespace": "users-19811", "name": "some-name", "reconcileID": "a70b0578-c2b3-4dcb-826f-c291d830eb92"} 2024-05-14T11:04:29.160Z DEBUG PXC users synced with ProxySQL {"controller": "pxc-controller", "namespace": "users-19811", "name": "some-name", "reconcileID": "4682bfd3-0621-4dec-b689-69d6c2ccd045"} 2024-05-14T11:04:34.248Z DEBUG PXC users synced with ProxySQL {"controller": "pxc-controller", "namespace": "users-19811", "name": "some-name", "reconcileID": "9fd065e3-6c14-42d8-bab2-fdd59e259fd4"} 2024-05-14T11:04:36.166Z INFO Password changed, updating user {"controller": "pxc-controller", "namespace": "users-19811", "name": "some-name", "reconcileID": "a10833b8-a2ed-4993-b94a-0004996e3f97", "user": "operator"} 2024-05-14T11:04:36.193Z INFO User password updated {"controller": "pxc-controller", "namespace": "users-19811", "name": "some-name", "reconcileID": "a10833b8-a2ed-4993-b94a-0004996e3f97", "user": "operator"} 2024-05-14T11:04:36.204Z INFO MySQL init secret updated {"controller": "pxc-controller", "namespace": "users-19811", "name": "some-name", "reconcileID": "a10833b8-a2ed-4993-b94a-0004996e3f97", "secret": "some-name-mysql-init", "user": "operator"} 2024-05-14T11:04:36.216Z INFO Internal secrets updated {"controller": "pxc-controller", "namespace": "users-19811", "name": "some-name", "reconcileID": "a10833b8-a2ed-4993-b94a-0004996e3f97", "user": "operator"} 2024-05-14T11:04:36.216Z INFO Proxy pods will be restarted {"controller": "pxc-controller", "namespace": "users-19811", "name": "some-name", "reconcileID": "a10833b8-a2ed-4993-b94a-0004996e3f97", "last-applied-secret": "6542c6f8a2375ddfcda319bf91c99ca69519730971c577cd07366e356671ee68"} 2024-05-14T11:04:37.649Z ERROR sync users {"controller": "pxc-controller", "namespace": "users-19811", "name": "some-name", "reconcileID": "a10833b8-a2ed-4993-b94a-0004996e3f97", "error": "exec syncusers: command terminated with exit code 1 / / ERROR 1045 (28000): Access denied for user 'operator'@'some-name-proxysql-0.some-name-proxysql-unready.users-19811.svc.' (using password: YES)\nERROR 1045 (28000): Access denied for user 'operator'@'some-name-proxysql-0.some-name-proxysql-unready.users-19811.svc.' (using password: YES)\nERROR 1045 (28000): Access denied for user 'operator'@'some-name-proxysql-0.some-name-proxysql-unready.users-19811.svc.' (using password: YES)\nERROR (line:569) : Could not find a primary cluster node\n", "errorVerbose": "exec syncusers: command terminated with exit code 1 / / ERROR 1045 (28000): Access denied for user 'operator'@'some-name-proxysql-0.some-name-proxysql-unready.users-19811.svc.' (using password: YES)\nERROR 1045 (28000): Access denied for user 'operator'@'some-name-proxysql-0.some-name-proxysql-unready.users-19811.svc.' (using password: YES)\nERROR 1045 (28000): Access denied for user 'operator'@'some-name-proxysql-0.some-name-proxysql-unready.users-19811.svc.' (using password: YES)\nERROR (line:569) : Could not find a primary cluster node\n\ngithub.com/percona/percona-xtradb-cluster-operator/pkg/controller/pxc.(*ReconcilePerconaXtraDBCluster).syncPXCUsersWithProxySQL\n\t/go/src/github.com/percona/percona-xtradb-cluster-operator/pkg/controller/pxc/users.go:920\ngithub.com/percona/percona-xtradb-cluster-operator/pkg/controller/pxc.(*ReconcilePerconaXtraDBCluster).resyncPXCUsersWithProxySQL.func1\n\t/go/src/github.com/percona/percona-xtradb-cluster-operator/pkg/controller/pxc/controller.go:1246\nruntime.goexit\n\t/usr/local/go/src/runtime/asm_amd64.s:1695"} 2024-05-14T11:05:00.268Z DEBUG PXC users synced with ProxySQL {"controller": "pxc-controller", "namespace": "users-19811", "name": "some-name", "reconcileID": "643f9c01-41e3-4e93-a8d0-d5c1b6e7f1c7"} 2024-05-14T11:05:14.918Z INFO Created user secrets {"controller": "pxc-controller", "namespace": "users-19811", "name": "some-name", "reconcileID": "969a672e-8a5e-4731-b29f-7e741f5f826d", "secrets": "my-cluster-secrets-2"} 2024-05-14T11:05:14.918Z INFO Password changed, updating user {"controller": "pxc-controller", "namespace": "users-19811", "name": "some-name", "reconcileID": "969a672e-8a5e-4731-b29f-7e741f5f826d", "user": "root"} 2024-05-14T11:05:14.960Z INFO User password updated {"controller": "pxc-controller", "namespace": "users-19811", "name": "some-name", "reconcileID": "969a672e-8a5e-4731-b29f-7e741f5f826d", "user": "root"} 2024-05-14T11:05:14.970Z INFO MySQL init secret updated {"controller": "pxc-controller", "namespace": "users-19811", "name": "some-name", "reconcileID": "969a672e-8a5e-4731-b29f-7e741f5f826d", "secret": "some-name-mysql-init", "user": "root"} 2024-05-14T11:05:16.597Z DEBUG PXC users synced with ProxySQL {"controller": "pxc-controller", "namespace": "users-19811", "name": "some-name", "reconcileID": "efd3a91a-eb12-4b1a-9a23-ef64e5299867"} 2024-05-14T11:05:19.620Z DEBUG PXC users synced with ProxySQL {"controller": "pxc-controller", "namespace": "users-19811", "name": "some-name", "reconcileID": "969a672e-8a5e-4731-b29f-7e741f5f826d"} 2024-05-14T11:05:19.630Z INFO Internal secrets updated {"controller": "pxc-controller", "namespace": "users-19811", "name": "some-name", "reconcileID": "969a672e-8a5e-4731-b29f-7e741f5f826d", "user": "root"} 2024-05-14T11:05:19.630Z INFO Password changed, updating user {"controller": "pxc-controller", "namespace": "users-19811", "name": "some-name", "reconcileID": "969a672e-8a5e-4731-b29f-7e741f5f826d", "user": "operator"} 2024-05-14T11:05:19.657Z INFO User password updated {"controller": "pxc-controller", "namespace": "users-19811", "name": "some-name", "reconcileID": "969a672e-8a5e-4731-b29f-7e741f5f826d", "user": "operator"} 2024-05-14T11:05:19.669Z INFO MySQL init secret updated {"controller": "pxc-controller", "namespace": "users-19811", "name": "some-name", "reconcileID": "969a672e-8a5e-4731-b29f-7e741f5f826d", "secret": "some-name-mysql-init", "user": "operator"} 2024-05-14T11:05:19.684Z INFO Internal secrets updated {"controller": "pxc-controller", "namespace": "users-19811", "name": "some-name", "reconcileID": "969a672e-8a5e-4731-b29f-7e741f5f826d", "user": "operator"} 2024-05-14T11:05:19.684Z INFO Password changed, updating user {"controller": "pxc-controller", "namespace": "users-19811", "name": "some-name", "reconcileID": "969a672e-8a5e-4731-b29f-7e741f5f826d", "user": "monitor"} 2024-05-14T11:05:19.709Z INFO User password updated {"controller": "pxc-controller", "namespace": "users-19811", "name": "some-name", "reconcileID": "969a672e-8a5e-4731-b29f-7e741f5f826d", "user": "monitor"} 2024-05-14T11:05:19.723Z INFO MySQL init secret updated {"controller": "pxc-controller", "namespace": "users-19811", "name": "some-name", "reconcileID": "969a672e-8a5e-4731-b29f-7e741f5f826d", "secret": "some-name-mysql-init", "user": "monitor"} 2024-05-14T11:05:19.769Z INFO Proxy user updated {"controller": "pxc-controller", "namespace": "users-19811", "name": "some-name", "reconcileID": "969a672e-8a5e-4731-b29f-7e741f5f826d", "user": "monitor"} 2024-05-14T11:05:19.782Z INFO Internal secrets updated {"controller": "pxc-controller", "namespace": "users-19811", "name": "some-name", "reconcileID": "969a672e-8a5e-4731-b29f-7e741f5f826d", "user": "monitor"} 2024-05-14T11:05:19.782Z INFO Password changed, updating user {"controller": "pxc-controller", "namespace": "users-19811", "name": "some-name", "reconcileID": "969a672e-8a5e-4731-b29f-7e741f5f826d", "user": "xtrabackup"} 2024-05-14T11:05:19.805Z INFO User password updated {"controller": "pxc-controller", "namespace": "users-19811", "name": "some-name", "reconcileID": "969a672e-8a5e-4731-b29f-7e741f5f826d", "user": "xtrabackup"} 2024-05-14T11:05:19.815Z INFO MySQL init secret updated {"controller": "pxc-controller", "namespace": "users-19811", "name": "some-name", "reconcileID": "969a672e-8a5e-4731-b29f-7e741f5f826d", "secret": "some-name-mysql-init", "user": "xtrabackup"} 2024-05-14T11:05:19.827Z INFO Internal secrets updated {"controller": "pxc-controller", "namespace": "users-19811", "name": "some-name", "reconcileID": "969a672e-8a5e-4731-b29f-7e741f5f826d", "user": "xtrabackup"} 2024-05-14T11:05:19.827Z INFO Password changed, updating user {"controller": "pxc-controller", "namespace": "users-19811", "name": "some-name", "reconcileID": "969a672e-8a5e-4731-b29f-7e741f5f826d", "user": "replication"} 2024-05-14T11:05:19.853Z INFO User password updated {"controller": "pxc-controller", "namespace": "users-19811", "name": "some-name", "reconcileID": "969a672e-8a5e-4731-b29f-7e741f5f826d", "user": "replication"} 2024-05-14T11:05:19.862Z INFO MySQL init secret updated {"controller": "pxc-controller", "namespace": "users-19811", "name": "some-name", "reconcileID": "969a672e-8a5e-4731-b29f-7e741f5f826d", "secret": "some-name-mysql-init", "user": "replication"} 2024-05-14T11:05:19.872Z INFO Internal secrets updated {"controller": "pxc-controller", "namespace": "users-19811", "name": "some-name", "reconcileID": "969a672e-8a5e-4731-b29f-7e741f5f826d", "user": "replication"} 2024-05-14T11:05:19.872Z INFO Password changed, updating user {"controller": "pxc-controller", "namespace": "users-19811", "name": "some-name", "reconcileID": "969a672e-8a5e-4731-b29f-7e741f5f826d", "user": "proxyadmin"} 2024-05-14T11:05:19.918Z INFO Proxy user updated {"controller": "pxc-controller", "namespace": "users-19811", "name": "some-name", "reconcileID": "969a672e-8a5e-4731-b29f-7e741f5f826d", "user": "proxyadmin"} 2024-05-14T11:05:19.931Z INFO Internal secrets updated {"controller": "pxc-controller", "namespace": "users-19811", "name": "some-name", "reconcileID": "969a672e-8a5e-4731-b29f-7e741f5f826d", "user": "proxyadmin"} 2024-05-14T11:05:19.931Z INFO Proxy pods will be restarted {"controller": "pxc-controller", "namespace": "users-19811", "name": "some-name", "reconcileID": "969a672e-8a5e-4731-b29f-7e741f5f826d", "last-applied-secret": "b389e08da101fb7aebe2634b0b7a70e981cf38f71c3aac8916da6103a0fbfec8"} 2024-05-14T11:05:19.931Z INFO PXC pods will be restarted {"controller": "pxc-controller", "namespace": "users-19811", "name": "some-name", "reconcileID": "969a672e-8a5e-4731-b29f-7e741f5f826d", "last-applied-secret": "b389e08da101fb7aebe2634b0b7a70e981cf38f71c3aac8916da6103a0fbfec8"} 2024-05-14T11:05:20.225Z ERROR sync users {"controller": "pxc-controller", "namespace": "users-19811", "name": "some-name", "reconcileID": "969a672e-8a5e-4731-b29f-7e741f5f826d", "error": "exec syncusers: command terminated with exit code 1 / / ERROR 1045 (28000): ProxySQL Error: Access denied for user 'proxyadmin'@'127.0.0.1' (using password: YES)\nERROR (line:335) : ProxySQL connection check failed. \n-- Could not connect to ProxySQL at localhost:6032 \n-- Please check the ProxySQL connection parameters and status.\n", "errorVerbose": "exec syncusers: command terminated with exit code 1 / / ERROR 1045 (28000): ProxySQL Error: Access denied for user 'proxyadmin'@'127.0.0.1' (using password: YES)\nERROR (line:335) : ProxySQL connection check failed. \n-- Could not connect to ProxySQL at localhost:6032 \n-- Please check the ProxySQL connection parameters and status.\n\ngithub.com/percona/percona-xtradb-cluster-operator/pkg/controller/pxc.(*ReconcilePerconaXtraDBCluster).syncPXCUsersWithProxySQL\n\t/go/src/github.com/percona/percona-xtradb-cluster-operator/pkg/controller/pxc/users.go:920\ngithub.com/percona/percona-xtradb-cluster-operator/pkg/controller/pxc.(*ReconcilePerconaXtraDBCluster).resyncPXCUsersWithProxySQL.func1\n\t/go/src/github.com/percona/percona-xtradb-cluster-operator/pkg/controller/pxc/controller.go:1246\nruntime.goexit\n\t/usr/local/go/src/runtime/asm_amd64.s:1695"} 2024-05-14T11:06:58.209Z INFO reconcile replication error {"controller": "pxc-controller", "namespace": "users-19811", "name": "some-name", "reconcileID": "e5218565-98f9-485b-9cd1-030ea9beac47", "err": "failed to connect to pod some-name-pxc-0: dial tcp: lookup some-name-pxc-0.some-name-pxc.users-19811 on 10.54.112.10:53: no such host"} 2024-05-14T11:07:08.685Z INFO reconcile replication error {"controller": "pxc-controller", "namespace": "users-19811", "name": "some-name", "reconcileID": "da467f11-a7e1-44d4-8dec-e1d6bf363e4c", "err": "failed to connect to pod some-name-pxc-0: dial tcp 10.236.0.71:33062: connect: connection refused"} 2024-05-14T11:07:13.866Z INFO Unable to find primary pod for replication. No pod with name or ip like this {"controller": "pxc-controller", "namespace": "users-19811", "name": "some-name", "reconcileID": "fe650d31-9097-4e27-92a6-5865a2be911d", "primary name": "some-name-pxc-0.some-name-pxc.users-19811.svc.cluster.local"} 2024-05-14T11:07:19.688Z INFO Unable to find primary pod for replication. No pod with name or ip like this {"controller": "pxc-controller", "namespace": "users-19811", "name": "some-name", "reconcileID": "e719b1a7-c379-4c17-b205-47d566b1f178", "primary name": "some-name-pxc-0.some-name-pxc.users-19811.svc.cluster.local"} 2024-05-14T11:07:24.964Z INFO Unable to find primary pod for replication. No pod with name or ip like this {"controller": "pxc-controller", "namespace": "users-19811", "name": "some-name", "reconcileID": "313833c2-5cd6-47d0-ace9-cb9d5ccbcc8f", "primary name": "some-name-pxc-0.some-name-pxc.users-19811.svc.cluster.local"} 2024-05-14T11:07:30.174Z INFO Unable to find primary pod for replication. No pod with name or ip like this {"controller": "pxc-controller", "namespace": "users-19811", "name": "some-name", "reconcileID": "77c125a7-9837-4307-be48-cd9ee4358789", "primary name": "some-name-pxc-0.some-name-pxc.users-19811.svc.cluster.local"} 2024-05-14T11:07:35.490Z INFO Unable to find primary pod for replication. No pod with name or ip like this {"controller": "pxc-controller", "namespace": "users-19811", "name": "some-name", "reconcileID": "ae9e365a-43ae-4f90-8af5-120d88b17fcc", "primary name": "some-name-pxc-0.some-name-pxc.users-19811.svc.cluster.local"} 2024-05-14T11:07:40.787Z INFO Unable to find primary pod for replication. No pod with name or ip like this {"controller": "pxc-controller", "namespace": "users-19811", "name": "some-name", "reconcileID": "ca2d9c5b-4841-4daa-9bbe-25f42d07ad3f", "primary name": "some-name-pxc-0.some-name-pxc.users-19811.svc.cluster.local"} 2024-05-14T11:07:49.754Z DEBUG PXC users synced with ProxySQL {"controller": "pxc-controller", "namespace": "users-19811", "name": "some-name", "reconcileID": "f3acabe5-c998-4fc9-a561-23c36f44bef3"} 2024-05-14T11:07:54.942Z DEBUG PXC users synced with ProxySQL {"controller": "pxc-controller", "namespace": "users-19811", "name": "some-name", "reconcileID": "07413992-1cb6-484a-8b6e-8ca57044d259"} 2024-05-14T11:07:59.932Z DEBUG PXC users synced with ProxySQL {"controller": "pxc-controller", "namespace": "users-19811", "name": "some-name", "reconcileID": "2d7d6fd8-c71d-4294-a7ec-5d69a2d02c91"} 2024-05-14T11:08:01.748Z INFO Password changed, updating user {"controller": "pxc-controller", "namespace": "users-19811", "name": "some-name", "reconcileID": "b473fa52-e135-487c-8ca7-b3ce269853ea", "user": "operator"} 2024-05-14T11:08:01.773Z INFO User password updated {"controller": "pxc-controller", "namespace": "users-19811", "name": "some-name", "reconcileID": "b473fa52-e135-487c-8ca7-b3ce269853ea", "user": "operator"} 2024-05-14T11:08:01.784Z INFO MySQL init secret updated {"controller": "pxc-controller", "namespace": "users-19811", "name": "some-name", "reconcileID": "b473fa52-e135-487c-8ca7-b3ce269853ea", "secret": "some-name-mysql-init", "user": "operator"} 2024-05-14T11:08:01.793Z INFO Internal secrets updated {"controller": "pxc-controller", "namespace": "users-19811", "name": "some-name", "reconcileID": "b473fa52-e135-487c-8ca7-b3ce269853ea", "user": "operator"} 2024-05-14T11:08:01.793Z INFO Proxy pods will be restarted {"controller": "pxc-controller", "namespace": "users-19811", "name": "some-name", "reconcileID": "b473fa52-e135-487c-8ca7-b3ce269853ea", "last-applied-secret": "deaabd035e1c897f790a71c41b14e3ccdbfb082df652d71e639dd87aa5521b84"} 2024-05-14T11:08:03.148Z ERROR sync users {"controller": "pxc-controller", "namespace": "users-19811", "name": "some-name", "reconcileID": "b473fa52-e135-487c-8ca7-b3ce269853ea", "error": "exec syncusers: command terminated with exit code 1 / / ERROR 1045 (28000): Access denied for user 'operator'@'some-name-proxysql-0.some-name-proxysql-unready.users-19811.svc.' (using password: YES)\nERROR 1045 (28000): Access denied for user 'operator'@'some-name-proxysql-0.some-name-proxysql-unready.users-19811.svc.' (using password: YES)\nERROR 1045 (28000): Access denied for user 'operator'@'some-name-proxysql-0.some-name-proxysql-unready.users-19811.svc.' (using password: YES)\nERROR (line:569) : Could not find a primary cluster node\n", "errorVerbose": "exec syncusers: command terminated with exit code 1 / / ERROR 1045 (28000): Access denied for user 'operator'@'some-name-proxysql-0.some-name-proxysql-unready.users-19811.svc.' (using password: YES)\nERROR 1045 (28000): Access denied for user 'operator'@'some-name-proxysql-0.some-name-proxysql-unready.users-19811.svc.' (using password: YES)\nERROR 1045 (28000): Access denied for user 'operator'@'some-name-proxysql-0.some-name-proxysql-unready.users-19811.svc.' (using password: YES)\nERROR (line:569) : Could not find a primary cluster node\n\ngithub.com/percona/percona-xtradb-cluster-operator/pkg/controller/pxc.(*ReconcilePerconaXtraDBCluster).syncPXCUsersWithProxySQL\n\t/go/src/github.com/percona/percona-xtradb-cluster-operator/pkg/controller/pxc/users.go:920\ngithub.com/percona/percona-xtradb-cluster-operator/pkg/controller/pxc.(*ReconcilePerconaXtraDBCluster).resyncPXCUsersWithProxySQL.func1\n\t/go/src/github.com/percona/percona-xtradb-cluster-operator/pkg/controller/pxc/controller.go:1246\nruntime.goexit\n\t/usr/local/go/src/runtime/asm_amd64.s:1695"} 2024-05-14T11:08:44.986Z DEBUG PXC users synced with ProxySQL {"controller": "pxc-controller", "namespace": "users-19811", "name": "some-name", "reconcileID": "3450e34c-8e9d-42cf-9b77-2b6b51c68263"} 2024-05-14T11:08:53.344Z DEBUG PXC users synced with ProxySQL {"controller": "pxc-controller", "namespace": "users-19811", "name": "some-name", "reconcileID": "d526b630-9fc6-4b63-a23c-09a036d73096"} 2024-05-14T11:08:58.749Z DEBUG PXC users synced with ProxySQL {"controller": "pxc-controller", "namespace": "users-19811", "name": "some-name", "reconcileID": "026ddf96-42d5-4f8c-80cf-44714c8bb99f"} 2024-05-14T11:09:04.297Z DEBUG PXC users synced with ProxySQL {"controller": "pxc-controller", "namespace": "users-19811", "name": "some-name", "reconcileID": "4ff3dece-5131-42c3-82c7-d1f1eafbe9d0"} 2024-05-14T11:09:09.445Z DEBUG PXC users synced with ProxySQL {"controller": "pxc-controller", "namespace": "users-19811", "name": "some-name", "reconcileID": "659b25a9-8b94-40e0-983f-d43346a3eb6d"} 2024-05-14T11:09:15.981Z DEBUG PXC users synced with ProxySQL {"controller": "pxc-controller", "namespace": "users-19811", "name": "some-name", "reconcileID": "c3ce6f62-0b70-4bf2-9a6b-3d6a26c812aa"} 2024-05-14T11:09:19.735Z DEBUG PXC users synced with ProxySQL {"controller": "pxc-controller", "namespace": "users-19811", "name": "some-name", "reconcileID": "a2c8e454-f2b5-4b7a-92b0-587bdfd45d9e"} 2024-05-14T11:09:25.023Z DEBUG PXC users synced with ProxySQL {"controller": "pxc-controller", "namespace": "users-19811", "name": "some-name", "reconcileID": "ab29e755-a08f-4bc8-912d-2f0c462ae5e6"} 2024-05-14T11:09:30.256Z DEBUG PXC users synced with ProxySQL {"controller": "pxc-controller", "namespace": "users-19811", "name": "some-name", "reconcileID": "d648b235-b2a4-4d27-9731-7d714502409e"} 2024-05-14T11:09:35.526Z DEBUG PXC users synced with ProxySQL {"controller": "pxc-controller", "namespace": "users-19811", "name": "some-name", "reconcileID": "d0ba3aa6-3df4-478a-8dae-570f1b65bfb7"} 2024-05-14T11:09:40.764Z DEBUG PXC users synced with ProxySQL {"controller": "pxc-controller", "namespace": "users-19811", "name": "some-name", "reconcileID": "903ff7fa-c82a-4e2f-875b-fa0b2ac6e694"} 2024-05-14T11:09:46.257Z DEBUG PXC users synced with ProxySQL {"controller": "pxc-controller", "namespace": "users-19811", "name": "some-name", "reconcileID": "4ab1e4df-1fc5-47f6-9966-eeb5fbe89323"} 2024-05-14T11:09:51.448Z DEBUG PXC users synced with ProxySQL {"controller": "pxc-controller", "namespace": "users-19811", "name": "some-name", "reconcileID": "30335631-5d4c-4877-9d08-30514efeb594"} 2024-05-14T11:09:56.839Z DEBUG PXC users synced with ProxySQL {"controller": "pxc-controller", "namespace": "users-19811", "name": "some-name", "reconcileID": "0a9fb226-e06c-4908-b916-780dca6d320a"} 2024-05-14T11:09:58.443Z INFO Password changed, updating user {"controller": "pxc-controller", "namespace": "users-19811", "name": "some-name", "reconcileID": "655468b5-f7e6-4a6f-97a4-ff9c82edffd6", "user": "root"} 2024-05-14T11:09:58.512Z INFO User password updated {"controller": "pxc-controller", "namespace": "users-19811", "name": "some-name", "reconcileID": "655468b5-f7e6-4a6f-97a4-ff9c82edffd6", "user": "root"} 2024-05-14T11:09:58.523Z INFO MySQL init secret updated {"controller": "pxc-controller", "namespace": "users-19811", "name": "some-name", "reconcileID": "655468b5-f7e6-4a6f-97a4-ff9c82edffd6", "secret": "some-name-mysql-init", "user": "root"} 2024-05-14T11:10:03.967Z DEBUG PXC users synced with ProxySQL {"controller": "pxc-controller", "namespace": "users-19811", "name": "some-name", "reconcileID": "655468b5-f7e6-4a6f-97a4-ff9c82edffd6"} 2024-05-14T11:10:04.233Z INFO Internal secrets updated {"controller": "pxc-controller", "namespace": "users-19811", "name": "some-name", "reconcileID": "655468b5-f7e6-4a6f-97a4-ff9c82edffd6", "user": "root"} 2024-05-14T11:10:04.233Z INFO Password changed, updating user {"controller": "pxc-controller", "namespace": "users-19811", "name": "some-name", "reconcileID": "655468b5-f7e6-4a6f-97a4-ff9c82edffd6", "user": "monitor"} 2024-05-14T11:10:04.263Z INFO User password updated {"controller": "pxc-controller", "namespace": "users-19811", "name": "some-name", "reconcileID": "655468b5-f7e6-4a6f-97a4-ff9c82edffd6", "user": "monitor"} 2024-05-14T11:10:04.302Z INFO MySQL init secret updated {"controller": "pxc-controller", "namespace": "users-19811", "name": "some-name", "reconcileID": "655468b5-f7e6-4a6f-97a4-ff9c82edffd6", "secret": "some-name-mysql-init", "user": "monitor"} 2024-05-14T11:10:04.348Z INFO Proxy user updated {"controller": "pxc-controller", "namespace": "users-19811", "name": "some-name", "reconcileID": "655468b5-f7e6-4a6f-97a4-ff9c82edffd6", "user": "monitor"} 2024-05-14T11:10:04.359Z INFO Internal secrets updated {"controller": "pxc-controller", "namespace": "users-19811", "name": "some-name", "reconcileID": "655468b5-f7e6-4a6f-97a4-ff9c82edffd6", "user": "monitor"} 2024-05-14T11:10:04.359Z INFO Password changed, updating user {"controller": "pxc-controller", "namespace": "users-19811", "name": "some-name", "reconcileID": "655468b5-f7e6-4a6f-97a4-ff9c82edffd6", "user": "xtrabackup"} 2024-05-14T11:10:04.385Z INFO User password updated {"controller": "pxc-controller", "namespace": "users-19811", "name": "some-name", "reconcileID": "655468b5-f7e6-4a6f-97a4-ff9c82edffd6", "user": "xtrabackup"} 2024-05-14T11:10:04.395Z INFO MySQL init secret updated {"controller": "pxc-controller", "namespace": "users-19811", "name": "some-name", "reconcileID": "655468b5-f7e6-4a6f-97a4-ff9c82edffd6", "secret": "some-name-mysql-init", "user": "xtrabackup"} 2024-05-14T11:10:04.432Z INFO Internal secrets updated {"controller": "pxc-controller", "namespace": "users-19811", "name": "some-name", "reconcileID": "655468b5-f7e6-4a6f-97a4-ff9c82edffd6", "user": "xtrabackup"} 2024-05-14T11:10:04.432Z INFO Password changed, updating user {"controller": "pxc-controller", "namespace": "users-19811", "name": "some-name", "reconcileID": "655468b5-f7e6-4a6f-97a4-ff9c82edffd6", "user": "proxyadmin"} 2024-05-14T11:10:04.479Z INFO Proxy user updated {"controller": "pxc-controller", "namespace": "users-19811", "name": "some-name", "reconcileID": "655468b5-f7e6-4a6f-97a4-ff9c82edffd6", "user": "proxyadmin"} 2024-05-14T11:10:04.494Z INFO Internal secrets updated {"controller": "pxc-controller", "namespace": "users-19811", "name": "some-name", "reconcileID": "655468b5-f7e6-4a6f-97a4-ff9c82edffd6", "user": "proxyadmin"} 2024-05-14T11:10:04.494Z INFO Proxy pods will be restarted {"controller": "pxc-controller", "namespace": "users-19811", "name": "some-name", "reconcileID": "655468b5-f7e6-4a6f-97a4-ff9c82edffd6", "last-applied-secret": "8386673dc4d6e371be0a1b3780912c1c7a2cb6f1436fd2a09ff08bfecc1c0287"} 2024-05-14T11:10:04.494Z INFO PXC pods will be restarted {"controller": "pxc-controller", "namespace": "users-19811", "name": "some-name", "reconcileID": "655468b5-f7e6-4a6f-97a4-ff9c82edffd6", "last-applied-secret": "8386673dc4d6e371be0a1b3780912c1c7a2cb6f1436fd2a09ff08bfecc1c0287"} 2024-05-14T11:10:04.810Z ERROR sync users {"controller": "pxc-controller", "namespace": "users-19811", "name": "some-name", "reconcileID": "655468b5-f7e6-4a6f-97a4-ff9c82edffd6", "error": "exec syncusers: command terminated with exit code 1 / / ERROR 1045 (28000): ProxySQL Error: Access denied for user 'proxyadmin'@'127.0.0.1' (using password: YES)\nERROR (line:335) : ProxySQL connection check failed. \n-- Could not connect to ProxySQL at localhost:6032 \n-- Please check the ProxySQL connection parameters and status.\n", "errorVerbose": "exec syncusers: command terminated with exit code 1 / / ERROR 1045 (28000): ProxySQL Error: Access denied for user 'proxyadmin'@'127.0.0.1' (using password: YES)\nERROR (line:335) : ProxySQL connection check failed. \n-- Could not connect to ProxySQL at localhost:6032 \n-- Please check the ProxySQL connection parameters and status.\n\ngithub.com/percona/percona-xtradb-cluster-operator/pkg/controller/pxc.(*ReconcilePerconaXtraDBCluster).syncPXCUsersWithProxySQL\n\t/go/src/github.com/percona/percona-xtradb-cluster-operator/pkg/controller/pxc/users.go:920\ngithub.com/percona/percona-xtradb-cluster-operator/pkg/controller/pxc.(*ReconcilePerconaXtraDBCluster).resyncPXCUsersWithProxySQL.func1\n\t/go/src/github.com/percona/percona-xtradb-cluster-operator/pkg/controller/pxc/controller.go:1246\nruntime.goexit\n\t/usr/local/go/src/runtime/asm_amd64.s:1695"} 2024-05-14T11:11:51.298Z INFO reconcile replication error {"controller": "pxc-controller", "namespace": "users-19811", "name": "some-name", "reconcileID": "ea84d72e-871f-4132-bb7c-867a2a9bdf1b", "err": "failed to connect to pod some-name-pxc-0: dial tcp: lookup some-name-pxc-0.some-name-pxc.users-19811 on 10.54.112.10:53: no such host"} 2024-05-14T11:11:51.902Z INFO reconcile replication error {"controller": "pxc-controller", "namespace": "users-19811", "name": "some-name", "reconcileID": "94636da9-d960-4339-9c82-989721b1dca4", "err": "failed to connect to pod some-name-pxc-0: dial tcp: lookup some-name-pxc-0.some-name-pxc.users-19811 on 10.54.112.10:53: no such host"} 2024-05-14T11:11:56.631Z INFO reconcile replication error {"controller": "pxc-controller", "namespace": "users-19811", "name": "some-name", "reconcileID": "1118df82-c856-4465-843a-c31f0ecb2b59", "err": "failed to connect to pod some-name-pxc-0: dial tcp: lookup some-name-pxc-0.some-name-pxc.users-19811 on 10.54.112.10:53: no such host"} 2024-05-14T11:12:01.903Z INFO reconcile replication error {"controller": "pxc-controller", "namespace": "users-19811", "name": "some-name", "reconcileID": "cee4c588-8b56-4f7e-9109-88ea77668343", "err": "failed to connect to pod some-name-pxc-0: dial tcp: lookup some-name-pxc-0.some-name-pxc.users-19811 on 10.54.112.10:53: no such host"} 2024-05-14T11:12:07.116Z INFO Unable to find primary pod for replication. No pod with name or ip like this {"controller": "pxc-controller", "namespace": "users-19811", "name": "some-name", "reconcileID": "7ba94d6e-d92c-4418-b55c-9ce11ae9197f", "primary name": "some-name-pxc-0.some-name-pxc.users-19811.svc.cluster.local"} 2024-05-14T11:12:12.419Z INFO Unable to find primary pod for replication. No pod with name or ip like this {"controller": "pxc-controller", "namespace": "users-19811", "name": "some-name", "reconcileID": "81cbed7a-6a0b-48ea-b72f-d1c320c78f1f", "primary name": "some-name-pxc-0.some-name-pxc.users-19811.svc.cluster.local"} 2024-05-14T11:12:32.089Z INFO KubeAPIWarningLogger .metadata.ownerReferences contains duplicate entries; API server dedups owner references in 1.20+, and may reject such requests as early as 1.24; please fix your requests; duplicate UID(s) observed: 6a89f750-50ed-47c9-a623-3a4ec5a1c76b 2024-05-14T11:12:35.438Z INFO reconcile replication error {"controller": "pxc-controller", "namespace": "users-19811", "name": "some-name", "reconcileID": "1c9d03e2-1c8c-4379-8ce3-4ee165155321", "err": "get primary pxc pod: failed to get proxy connection: dial tcp 10.54.119.54:3306: connect: connection refused"} 2024-05-14T11:15:19.846Z INFO Password changed, updating user {"controller": "pxc-controller", "namespace": "users-19811", "name": "some-name", "reconcileID": "0e232d70-bc32-46b7-b412-467609191f1f", "user": "root"} 2024-05-14T11:15:19.884Z INFO User password updated {"controller": "pxc-controller", "namespace": "users-19811", "name": "some-name", "reconcileID": "0e232d70-bc32-46b7-b412-467609191f1f", "user": "root"} 2024-05-14T11:15:19.926Z INFO MySQL init secret updated {"controller": "pxc-controller", "namespace": "users-19811", "name": "some-name", "reconcileID": "0e232d70-bc32-46b7-b412-467609191f1f", "secret": "some-name-mysql-init", "user": "root"} 2024-05-14T11:15:19.940Z INFO Internal secrets updated {"controller": "pxc-controller", "namespace": "users-19811", "name": "some-name", "reconcileID": "0e232d70-bc32-46b7-b412-467609191f1f", "user": "root"} 2024-05-14T11:15:19.940Z INFO Password changed, updating user {"controller": "pxc-controller", "namespace": "users-19811", "name": "some-name", "reconcileID": "0e232d70-bc32-46b7-b412-467609191f1f", "user": "operator"} 2024-05-14T11:15:19.968Z INFO User password updated {"controller": "pxc-controller", "namespace": "users-19811", "name": "some-name", "reconcileID": "0e232d70-bc32-46b7-b412-467609191f1f", "user": "operator"} 2024-05-14T11:15:19.995Z INFO MySQL init secret updated {"controller": "pxc-controller", "namespace": "users-19811", "name": "some-name", "reconcileID": "0e232d70-bc32-46b7-b412-467609191f1f", "secret": "some-name-mysql-init", "user": "operator"} 2024-05-14T11:15:20.014Z INFO Internal secrets updated {"controller": "pxc-controller", "namespace": "users-19811", "name": "some-name", "reconcileID": "0e232d70-bc32-46b7-b412-467609191f1f", "user": "operator"} 2024-05-14T11:15:20.014Z INFO Password changed, updating user {"controller": "pxc-controller", "namespace": "users-19811", "name": "some-name", "reconcileID": "0e232d70-bc32-46b7-b412-467609191f1f", "user": "monitor"} 2024-05-14T11:15:20.039Z INFO User password updated {"controller": "pxc-controller", "namespace": "users-19811", "name": "some-name", "reconcileID": "0e232d70-bc32-46b7-b412-467609191f1f", "user": "monitor"} 2024-05-14T11:15:20.051Z INFO MySQL init secret updated {"controller": "pxc-controller", "namespace": "users-19811", "name": "some-name", "reconcileID": "0e232d70-bc32-46b7-b412-467609191f1f", "secret": "some-name-mysql-init", "user": "monitor"} 2024-05-14T11:15:20.076Z INFO Internal secrets updated {"controller": "pxc-controller", "namespace": "users-19811", "name": "some-name", "reconcileID": "0e232d70-bc32-46b7-b412-467609191f1f", "user": "monitor"} 2024-05-14T11:15:20.076Z INFO Password changed, updating user {"controller": "pxc-controller", "namespace": "users-19811", "name": "some-name", "reconcileID": "0e232d70-bc32-46b7-b412-467609191f1f", "user": "xtrabackup"} 2024-05-14T11:15:20.096Z INFO User password updated {"controller": "pxc-controller", "namespace": "users-19811", "name": "some-name", "reconcileID": "0e232d70-bc32-46b7-b412-467609191f1f", "user": "xtrabackup"} 2024-05-14T11:15:20.108Z INFO MySQL init secret updated {"controller": "pxc-controller", "namespace": "users-19811", "name": "some-name", "reconcileID": "0e232d70-bc32-46b7-b412-467609191f1f", "secret": "some-name-mysql-init", "user": "xtrabackup"} 2024-05-14T11:15:20.124Z INFO Internal secrets updated {"controller": "pxc-controller", "namespace": "users-19811", "name": "some-name", "reconcileID": "0e232d70-bc32-46b7-b412-467609191f1f", "user": "xtrabackup"} 2024-05-14T11:15:20.124Z INFO Password changed, updating user {"controller": "pxc-controller", "namespace": "users-19811", "name": "some-name", "reconcileID": "0e232d70-bc32-46b7-b412-467609191f1f", "user": "replication"} 2024-05-14T11:15:20.147Z INFO User password updated {"controller": "pxc-controller", "namespace": "users-19811", "name": "some-name", "reconcileID": "0e232d70-bc32-46b7-b412-467609191f1f", "user": "replication"} 2024-05-14T11:15:20.176Z INFO MySQL init secret updated {"controller": "pxc-controller", "namespace": "users-19811", "name": "some-name", "reconcileID": "0e232d70-bc32-46b7-b412-467609191f1f", "secret": "some-name-mysql-init", "user": "replication"} 2024-05-14T11:15:20.201Z INFO Internal secrets updated {"controller": "pxc-controller", "namespace": "users-19811", "name": "some-name", "reconcileID": "0e232d70-bc32-46b7-b412-467609191f1f", "user": "replication"} 2024-05-14T11:15:20.201Z INFO Proxy pods will be restarted {"controller": "pxc-controller", "namespace": "users-19811", "name": "some-name", "reconcileID": "0e232d70-bc32-46b7-b412-467609191f1f", "last-applied-secret": "6542c6f8a2375ddfcda319bf91c99ca69519730971c577cd07366e356671ee68"} 2024-05-14T11:15:20.201Z INFO PXC pods will be restarted {"controller": "pxc-controller", "namespace": "users-19811", "name": "some-name", "reconcileID": "0e232d70-bc32-46b7-b412-467609191f1f", "last-applied-secret": "6542c6f8a2375ddfcda319bf91c99ca69519730971c577cd07366e356671ee68"} 2024-05-14T11:17:51.759Z INFO Password changed, updating user {"controller": "pxc-controller", "namespace": "users-19811", "name": "some-name", "reconcileID": "597da5bf-5737-445c-af4e-9bd7a0b0ca73", "user": "monitor"} 2024-05-14T11:17:51.784Z INFO User password updated {"controller": "pxc-controller", "namespace": "users-19811", "name": "some-name", "reconcileID": "597da5bf-5737-445c-af4e-9bd7a0b0ca73", "user": "monitor"} 2024-05-14T11:17:51.795Z INFO MySQL init secret updated {"controller": "pxc-controller", "namespace": "users-19811", "name": "some-name", "reconcileID": "597da5bf-5737-445c-af4e-9bd7a0b0ca73", "secret": "some-name-mysql-init", "user": "monitor"} 2024-05-14T11:17:51.811Z INFO Internal secrets updated {"controller": "pxc-controller", "namespace": "users-19811", "name": "some-name", "reconcileID": "597da5bf-5737-445c-af4e-9bd7a0b0ca73", "user": "monitor"} 2024-05-14T11:17:51.811Z INFO Proxy pods will be restarted {"controller": "pxc-controller", "namespace": "users-19811", "name": "some-name", "reconcileID": "597da5bf-5737-445c-af4e-9bd7a0b0ca73", "last-applied-secret": "81935ecf30ae523a6ee69582354f9488d47dbeb0f8c7c572af96870f2024284d"} github.com/percona/percona-xtradb-cluster-operator/pkg/controller/pxc.(*ReconcilePerconaXtraDBCluster).resyncPXCUsersWithProxySQL.func1 /go/pkg/mod/sigs.k8s.io/controller-runtime@v0.18.0/pkg/internal/controller/controller.go:222 /go/pkg/mod/sigs.k8s.io/controller-runtime@v0.18.0/pkg/internal/controller/controller.go:261 /go/pkg/mod/sigs.k8s.io/controller-runtime@v0.18.0/pkg/internal/controller/controller.go:324 /go/src/github.com/percona/percona-xtradb-cluster-operator/pkg/controller/pxc/controller.go:1248 sigs.k8s.io/controller-runtime/pkg/internal/controller.(*Controller).processNextWorkItem sigs.k8s.io/controller-runtime/pkg/internal/controller.(*Controller).reconcileHandler sigs.k8s.io/controller-runtime/pkg/internal/controller.(*Controller).Start.func2.2 + kubectl get pxc --all-namespaces -o wide + grep -v NAMESPACE + xargs -L 1 sh -xc 'kubectl patch pxc -n $0 $1 --type=merge -p "{\"metadata\":{\"finalizers\":[]}}"' + kubectl patch pxc -n users-19811 some-name --type=merge -p '{"metadata":{"finalizers":[]}}' perconaxtradbcluster.pxc.percona.com/some-name patched + kubectl_bin delete pxc --all --all-namespaces ++ mktemp + local LAST_OUT=/tmp/tmp.lS2zRsoQAb ++ mktemp + local LAST_ERR=/tmp/tmp.iwNXLJ4isD + local exit_status=0 ++ seq 0 2 + for i in '$(seq 0 2)' + set +e + kubectl delete pxc --all --all-namespaces + exit_status=0 + set -e + '[' 0 '!=' 0 ']' + break + cat /tmp/tmp.lS2zRsoQAb perconaxtradbcluster.pxc.percona.com "some-name" deleted + cat /tmp/tmp.iwNXLJ4isD + rm /tmp/tmp.lS2zRsoQAb /tmp/tmp.iwNXLJ4isD + return 0 + kubectl_bin delete pxc-backup --all --all-namespaces ++ mktemp + local LAST_OUT=/tmp/tmp.gL8o8JC2Cf ++ mktemp + local LAST_ERR=/tmp/tmp.iWkOipR5YS + local exit_status=0 ++ seq 0 2 + for i in '$(seq 0 2)' + set +e + kubectl delete pxc-backup --all --all-namespaces + exit_status=0 + set -e + '[' 0 '!=' 0 ']' + break + cat /tmp/tmp.gL8o8JC2Cf No resources found + cat /tmp/tmp.iWkOipR5YS + rm /tmp/tmp.gL8o8JC2Cf /tmp/tmp.iWkOipR5YS + return 0 + kubectl_bin delete pxc-restore --all --all-namespaces ++ mktemp + local LAST_OUT=/tmp/tmp.W81kXTqCAc ++ mktemp + local LAST_ERR=/tmp/tmp.9M6Xn3jhev + local exit_status=0 ++ seq 0 2 + for i in '$(seq 0 2)' + set +e + kubectl delete pxc-restore --all --all-namespaces + exit_status=0 + set -e + '[' 0 '!=' 0 ']' + break + cat /tmp/tmp.W81kXTqCAc No resources found + cat /tmp/tmp.9M6Xn3jhev + rm /tmp/tmp.W81kXTqCAc /tmp/tmp.9M6Xn3jhev + return 0 + kubectl_bin delete ValidatingWebhookConfiguration percona-xtradbcluster-webhook ++ mktemp + local LAST_OUT=/tmp/tmp.1RaLDwCOWz ++ mktemp + local LAST_ERR=/tmp/tmp.rRsikubEV3 + local exit_status=0 ++ seq 0 2 + for i in '$(seq 0 2)' + set +e + kubectl delete ValidatingWebhookConfiguration percona-xtradbcluster-webhook + exit_status=0 + set -e + '[' 0 '!=' 0 ']' + break + cat /tmp/tmp.1RaLDwCOWz validatingwebhookconfiguration.admissionregistration.k8s.io "percona-xtradbcluster-webhook" deleted + cat /tmp/tmp.rRsikubEV3 + rm /tmp/tmp.1RaLDwCOWz /tmp/tmp.rRsikubEV3 + return 0 + kubectl_bin delete -f https://github.com/jetstack/cert-manager/releases/download/v1.14.2/cert-manager.yaml + : + '[' '!' -z '' ']' + '[' -n pxc-operator ']' + kubectl_bin delete --grace-period=0 --force=true namespace users-19811 + rm -rf /tmp/tmp.MowO4SojyA + kubectl_bin delete --grace-period=0 --force=true namespace pxc-operator ++ mktemp ++ mktemp + local LAST_OUT=/tmp/tmp.dwKVTDUQVu + desc 'test passed' + set +o xtrace ----------------------------------------------------------------------------------- test passed ----------------------------------------------------------------------------------- ++ mktemp + local LAST_OUT=/tmp/tmp.Q61Bq9jASR + local LAST_ERR=/tmp/tmp.78YEAPVhD8 + local exit_status=0 ++ mktemp ++ seq 0 2 + local LAST_ERR=/tmp/tmp.CNY8IkTt0g + local exit_status=0 + for i in '$(seq 0 2)' + set +e + kubectl delete --grace-period=0 --force=true namespace users-19811 ++ seq 0 2 + for i in '$(seq 0 2)' + set +e + kubectl delete --grace-period=0 --force=true namespace pxc-operator