Log: /mnt/jenkins/workspace/cloud-pxc-operator_PR-1785/e2e-tests/logs/users-5-7.log WARNING: version difference between client (1.30) and server (1.27) exceeds the supported minor version skew of +/-1 WARNING: version difference between client (1.30) and server (1.27) exceeds the supported minor version skew of +/-1 + create_infra users-10631 + local ns=users-10631 + '[' -n pxc-operator ']' + kubectl get pxc --all-namespaces -o wide + xargs -L 1 sh -xc 'kubectl patch pxc -n $0 $1 --type=merge -p "{\"metadata\":{\"finalizers\":[]}}"' + grep -v NAMESPACE + kubectl patch pxc -n users-4005 some-name --type=merge -p '{"metadata":{"finalizers":[]}}' perconaxtradbcluster.pxc.percona.com/some-name patched + kubectl_bin delete pxc --all --all-namespaces ++ mktemp + local LAST_OUT=/tmp/tmp.SXXDxC5bhA ++ mktemp + local LAST_ERR=/tmp/tmp.7Rkq8iBsid + local exit_status=0 ++ seq 0 2 + for i in '$(seq 0 2)' + set +e + kubectl delete pxc --all --all-namespaces + exit_status=0 + set -e + '[' 0 '!=' 0 ']' + break + cat /tmp/tmp.SXXDxC5bhA perconaxtradbcluster.pxc.percona.com "some-name" deleted + cat /tmp/tmp.7Rkq8iBsid + rm /tmp/tmp.SXXDxC5bhA /tmp/tmp.7Rkq8iBsid + return 0 + kubectl_bin delete pxc-backup --all --all-namespaces ++ mktemp + local LAST_OUT=/tmp/tmp.OWA8b6Wdlg ++ mktemp + local LAST_ERR=/tmp/tmp.Xz9z01wI5w + local exit_status=0 ++ seq 0 2 + for i in '$(seq 0 2)' + set +e + kubectl delete pxc-backup --all --all-namespaces + exit_status=0 + set -e + '[' 0 '!=' 0 ']' + break + cat /tmp/tmp.OWA8b6Wdlg No resources found + cat /tmp/tmp.Xz9z01wI5w + rm /tmp/tmp.OWA8b6Wdlg /tmp/tmp.Xz9z01wI5w + return 0 + kubectl_bin delete pxc-restore --all --all-namespaces ++ mktemp + local LAST_OUT=/tmp/tmp.ApBTlHKbNg ++ mktemp + local LAST_ERR=/tmp/tmp.vgsvvoCHSM + local exit_status=0 ++ seq 0 2 + for i in '$(seq 0 2)' + set +e + kubectl delete pxc-restore --all --all-namespaces + exit_status=0 + set -e + '[' 0 '!=' 0 ']' + break + cat /tmp/tmp.ApBTlHKbNg No resources found + cat /tmp/tmp.vgsvvoCHSM + rm /tmp/tmp.ApBTlHKbNg /tmp/tmp.vgsvvoCHSM + return 0 + create_namespace pxc-operator + local namespace=pxc-operator + local skip_clean_namespace= + [[ 1 == 1 ]] + [[ -z '' ]] + destroy_chaos_mesh ++ helm list --all-namespaces --filter chaos-mesh ++ tail -n1 ++ awk '-F ' '{print $2}' ++ sed s/NAMESPACE// + local chaos_mesh_ns= + '[' -n '' ']' ++ kubectl get MutatingWebhookConfiguration ++ grep chaos-mesh ++ awk '{print $1}' + timeout 30 kubectl delete MutatingWebhookConfiguration error: resource(s) were provided, but no name was specified + : ++ kubectl get ValidatingWebhookConfiguration ++ grep chaos-mesh ++ awk '{print $1}' + timeout 30 kubectl delete ValidatingWebhookConfiguration error: resource(s) were provided, but no name was specified + : ++ grep validate-auth ++ awk '{print $1}' ++ kubectl get ValidatingWebhookConfiguration + timeout 30 kubectl delete ValidatingWebhookConfiguration error: resource(s) were provided, but no name was specified + : ++ kubectl api-resources ++ grep chaos-mesh ++ awk '{print $1}' ++ kubectl get crd ++ grep chaos-mesh.org ++ awk '{print $1}' + timeout 30 kubectl delete crd error: resource(s) were provided, but no name was specified + : ++ kubectl get clusterrolebinding ++ grep chaos-mesh ++ awk '{print $1}' + timeout 30 kubectl delete clusterrolebinding error: resource(s) were provided, but no name was specified + : ++ kubectl get clusterrole ++ grep chaos-mesh ++ awk '{print $1}' + timeout 30 kubectl delete clusterrole error: resource(s) were provided, but no name was specified + : + desc 'cleaned up all old namespaces' + set +o xtrace ----------------------------------------------------------------------------------- cleaned up all old namespaces ----------------------------------------------------------------------------------- + kubectl_bin get ns + egrep -v '^kube-|^default$|Terminating|pxc-operator|openshift|^NAME' + awk '{print$1}' + '[' -n '' ']' + desc 'cleaned up old namespaces pxc-operator' + set +o xtrace ----------------------------------------------------------------------------------- cleaned up old namespaces pxc-operator ----------------------------------------------------------------------------------- + xargs kubectl delete ns + kubectl_bin delete namespace pxc-operator ++ mktemp + local LAST_OUT=/tmp/tmp.JLx57FOj3F ++ mktemp + local LAST_ERR=/tmp/tmp.8YPKgNyU5j + local exit_status=0 ++ mktemp ++ seq 0 2 + local LAST_OUT=/tmp/tmp.AcvPTSIJI0 + for i in '$(seq 0 2)' + set +e + kubectl delete namespace pxc-operator ++ mktemp + local LAST_ERR=/tmp/tmp.TFNFqRlxhQ + local exit_status=0 ++ seq 0 2 + for i in '$(seq 0 2)' + set +e + kubectl get ns + exit_status=0 + set -e + '[' 0 '!=' 0 ']' + break + cat /tmp/tmp.AcvPTSIJI0 + cat /tmp/tmp.TFNFqRlxhQ + rm /tmp/tmp.AcvPTSIJI0 /tmp/tmp.TFNFqRlxhQ + return 0 namespace "gmp-public" deleted namespace "gmp-system" deleted namespace "users-4005" deleted Error from server (Forbidden): namespaces "default" is forbidden: this namespace may not be deleted + exit_status=0 + set -e + '[' 0 '!=' 0 ']' + break + cat /tmp/tmp.JLx57FOj3F namespace "pxc-operator" deleted + cat /tmp/tmp.8YPKgNyU5j + rm /tmp/tmp.JLx57FOj3F /tmp/tmp.8YPKgNyU5j + return 0 + wait_for_delete namespace/pxc-operator + local res=namespace/pxc-operator + echo -n 'namespace/pxc-operator - ' namespace/pxc-operator - + set +o xtrace Error from server (NotFound): namespaces "pxc-operator" not found + desc 'create namespace pxc-operator' + set +o xtrace ----------------------------------------------------------------------------------- create namespace pxc-operator ----------------------------------------------------------------------------------- + kubectl_bin create namespace pxc-operator ++ mktemp + local LAST_OUT=/tmp/tmp.f9Hqz7FIxC ++ mktemp + local LAST_ERR=/tmp/tmp.GMA0OVaft1 + local exit_status=0 ++ seq 0 2 + for i in '$(seq 0 2)' + set +e + kubectl create namespace pxc-operator + exit_status=0 + set -e + '[' 0 '!=' 0 ']' + break + cat /tmp/tmp.f9Hqz7FIxC namespace/pxc-operator created + cat /tmp/tmp.GMA0OVaft1 + rm /tmp/tmp.f9Hqz7FIxC /tmp/tmp.GMA0OVaft1 + return 0 ++ kubectl_bin config current-context +++ mktemp ++ local LAST_OUT=/tmp/tmp.xx71mxYnaF +++ mktemp ++ local LAST_ERR=/tmp/tmp.t3jz5UzXol ++ local exit_status=0 +++ seq 0 2 ++ for i in '$(seq 0 2)' ++ set +e ++ kubectl config current-context ++ exit_status=0 ++ set -e ++ '[' 0 '!=' 0 ']' ++ break ++ cat /tmp/tmp.xx71mxYnaF ++ cat /tmp/tmp.t3jz5UzXol ++ rm /tmp/tmp.xx71mxYnaF /tmp/tmp.t3jz5UzXol ++ return 0 + kubectl_bin config set-context gke_cloud-dev-112233_us-central1-a_jen-pxc-1785-7e3ddd30-4-cluster8 --namespace=pxc-operator ++ mktemp + local LAST_OUT=/tmp/tmp.dSn8KiQeuv ++ mktemp + local LAST_ERR=/tmp/tmp.C9c0JMCE3a + local exit_status=0 ++ seq 0 2 + for i in '$(seq 0 2)' + set +e + kubectl config set-context gke_cloud-dev-112233_us-central1-a_jen-pxc-1785-7e3ddd30-4-cluster8 --namespace=pxc-operator + exit_status=0 + set -e + '[' 0 '!=' 0 ']' + break + cat /tmp/tmp.dSn8KiQeuv Context "gke_cloud-dev-112233_us-central1-a_jen-pxc-1785-7e3ddd30-4-cluster8" modified. + cat /tmp/tmp.C9c0JMCE3a + rm /tmp/tmp.dSn8KiQeuv /tmp/tmp.C9c0JMCE3a + return 0 + deploy_operator + desc 'start PXC operator' + set +o xtrace ----------------------------------------------------------------------------------- start PXC operator ----------------------------------------------------------------------------------- + kubectl_bin apply --server-side --force-conflicts -f /mnt/jenkins/workspace/cloud-pxc-operator_PR-1785/deploy/crd.yaml ++ mktemp + local LAST_OUT=/tmp/tmp.TeLsKaLmHh ++ mktemp + local LAST_ERR=/tmp/tmp.uTa8n0kuPZ + local exit_status=0 ++ seq 0 2 + for i in '$(seq 0 2)' + set +e + kubectl apply --server-side --force-conflicts -f /mnt/jenkins/workspace/cloud-pxc-operator_PR-1785/deploy/crd.yaml + exit_status=0 + set -e + '[' 0 '!=' 0 ']' + break + cat /tmp/tmp.TeLsKaLmHh customresourcedefinition.apiextensions.k8s.io/perconaxtradbclusterbackups.pxc.percona.com serverside-applied customresourcedefinition.apiextensions.k8s.io/perconaxtradbclusterrestores.pxc.percona.com serverside-applied customresourcedefinition.apiextensions.k8s.io/perconaxtradbclusters.pxc.percona.com serverside-applied + cat /tmp/tmp.uTa8n0kuPZ + rm /tmp/tmp.TeLsKaLmHh /tmp/tmp.uTa8n0kuPZ + return 0 + '[' -n pxc-operator ']' + apply_rbac cw-rbac + local operator_namespace=pxc-operator + local rbac=cw-rbac + cat /mnt/jenkins/workspace/cloud-pxc-operator_PR-1785/deploy/cw-rbac.yaml + sed -e 's^namespace: .*^namespace: pxc-operator^' + kubectl_bin apply -f - ++ mktemp + local LAST_OUT=/tmp/tmp.0X3RQTXJWn ++ mktemp + local LAST_ERR=/tmp/tmp.qmV1G74WeP + local exit_status=0 ++ seq 0 2 + for i in '$(seq 0 2)' + set +e + kubectl apply -f - + exit_status=0 + set -e + '[' 0 '!=' 0 ']' + break + cat /tmp/tmp.0X3RQTXJWn clusterrole.rbac.authorization.k8s.io/percona-xtradb-cluster-operator unchanged serviceaccount/percona-xtradb-cluster-operator created clusterrolebinding.rbac.authorization.k8s.io/service-account-percona-xtradb-cluster-operator unchanged + cat /tmp/tmp.qmV1G74WeP + rm /tmp/tmp.0X3RQTXJWn /tmp/tmp.qmV1G74WeP + return 0 + yq eval '(select(.kind == "Deployment").spec.template.spec.containers[] | select(.name == "percona-xtradb-cluster-operator").env[] | select(.name == "DISABLE_TELEMETRY").value) = "true"' - + sed -e 's^failureThreshold: .*^failureThreshold: 10^' + yq eval '(select(.kind == "Deployment").spec.template.spec.containers[] | select(.name == "percona-xtradb-cluster-operator").env[] | select(.name == "LOG_LEVEL").value) = "DEBUG"' - + cat /mnt/jenkins/workspace/cloud-pxc-operator_PR-1785/deploy/cw-operator.yaml + kubectl_bin apply -f - ++ mktemp + local LAST_OUT=/tmp/tmp.8FcDTD0JZ8 + sed -e 's^image: .*^image: perconalab/percona-xtradb-cluster-operator:PR-1785-7e3ddd30^' ++ mktemp + local LAST_ERR=/tmp/tmp.wojDyj8Ute + local exit_status=0 ++ seq 0 2 + for i in '$(seq 0 2)' + set +e + kubectl apply -f - + exit_status=0 + set -e + '[' 0 '!=' 0 ']' + break + cat /tmp/tmp.8FcDTD0JZ8 deployment.apps/percona-xtradb-cluster-operator created service/percona-xtradb-cluster-operator created + cat /tmp/tmp.wojDyj8Ute + rm /tmp/tmp.8FcDTD0JZ8 /tmp/tmp.wojDyj8Ute + return 0 + sleep 10 + kubectl_bin wait --for=condition=Ready pods -l app.kubernetes.io/component=operator,app.kubernetes.io/instance=percona-xtradb-cluster-operator,app.kubernetes.io/name=percona-xtradb-cluster-operator --timeout=30s ++ mktemp + local LAST_OUT=/tmp/tmp.czx263bNf8 ++ mktemp + local LAST_ERR=/tmp/tmp.qaUsPYSWNa + local exit_status=0 ++ seq 0 2 + for i in '$(seq 0 2)' + set +e + kubectl wait --for=condition=Ready pods -l app.kubernetes.io/component=operator,app.kubernetes.io/instance=percona-xtradb-cluster-operator,app.kubernetes.io/name=percona-xtradb-cluster-operator --timeout=30s + exit_status=0 + set -e + '[' 0 '!=' 0 ']' + break + cat /tmp/tmp.czx263bNf8 pod/percona-xtradb-cluster-operator-66cc8ffb95-hj7s5 condition met + cat /tmp/tmp.qaUsPYSWNa + rm /tmp/tmp.czx263bNf8 /tmp/tmp.qaUsPYSWNa + return 0 ++ get_operator_pod ++ local label_prefix=app.kubernetes.io/ +++ kubectl get pods --selector=app.kubernetes.io/name=percona-xtradb-cluster-operator -n pxc-operator +++ grep -c percona-xtradb-cluster-operator ++ local check_label=1 ++ [[ 1 -eq 0 ]] ++ kubectl_bin get pods --selector=app.kubernetes.io/name=percona-xtradb-cluster-operator -o 'jsonpath={.items[].metadata.name}' -n pxc-operator +++ mktemp ++ local LAST_OUT=/tmp/tmp.MSc6dhAZ55 +++ mktemp ++ local LAST_ERR=/tmp/tmp.azCiYrvS1c ++ local exit_status=0 +++ seq 0 2 ++ for i in '$(seq 0 2)' ++ set +e ++ kubectl get pods --selector=app.kubernetes.io/name=percona-xtradb-cluster-operator -o 'jsonpath={.items[].metadata.name}' -n pxc-operator ++ exit_status=0 ++ set -e ++ '[' 0 '!=' 0 ']' ++ break ++ cat /tmp/tmp.MSc6dhAZ55 ++ cat /tmp/tmp.azCiYrvS1c ++ rm /tmp/tmp.MSc6dhAZ55 /tmp/tmp.azCiYrvS1c ++ return 0 + wait_pod percona-xtradb-cluster-operator-66cc8ffb95-hj7s5 480 pxc-operator + local pod=percona-xtradb-cluster-operator-66cc8ffb95-hj7s5 + local max_retry=480 + local ns=pxc-operator ++ echo percona-xtradb-cluster-operator-66cc8ffb95-hj7s5 ++ /usr/bin/sed -E 's/.*-(pxc|proxysql)-[0-9]/\1/' ++ egrep '^(pxc|proxysql)$' + local container= + set +o xtrace pod/percona-xtradb-cluster-operator-66cc8ffb95-hj7s5 condition met percona-xtradb-cluster-operator-66cc8ffb95-hj7s5.Ok + sleep 3 + create_namespace users-10631 + local namespace=users-10631 + local skip_clean_namespace= + [[ 1 == 1 ]] + [[ -z '' ]] + destroy_chaos_mesh ++ helm list --all-namespaces --filter chaos-mesh ++ tail -n1 ++ awk '-F ' '{print $2}' ++ sed s/NAMESPACE// + local chaos_mesh_ns= + '[' -n '' ']' ++ kubectl get MutatingWebhookConfiguration ++ grep chaos-mesh ++ awk '{print $1}' + timeout 30 kubectl delete MutatingWebhookConfiguration error: resource(s) were provided, but no name was specified + : ++ kubectl get ValidatingWebhookConfiguration ++ grep chaos-mesh ++ awk '{print $1}' + timeout 30 kubectl delete ValidatingWebhookConfiguration error: resource(s) were provided, but no name was specified + : ++ kubectl get ValidatingWebhookConfiguration ++ grep validate-auth ++ awk '{print $1}' + timeout 30 kubectl delete ValidatingWebhookConfiguration error: resource(s) were provided, but no name was specified + : ++ kubectl api-resources ++ grep chaos-mesh ++ awk '{print $1}' ++ grep chaos-mesh.org ++ awk '{print $1}' ++ kubectl get crd + timeout 30 kubectl delete crd error: resource(s) were provided, but no name was specified + : ++ kubectl get clusterrolebinding ++ grep chaos-mesh ++ awk '{print $1}' + timeout 30 kubectl delete clusterrolebinding error: resource(s) were provided, but no name was specified + : ++ kubectl get clusterrole ++ grep chaos-mesh ++ awk '{print $1}' + timeout 30 kubectl delete clusterrole error: resource(s) were provided, but no name was specified + : + desc 'cleaned up all old namespaces' + set +o xtrace ----------------------------------------------------------------------------------- cleaned up all old namespaces ----------------------------------------------------------------------------------- + egrep -v '^kube-|^default$|Terminating|pxc-operator|openshift|^NAME' + awk '{print$1}' + '[' -n '' ']' + desc 'cleaned up old namespaces users-10631' + set +o xtrace ----------------------------------------------------------------------------------- cleaned up old namespaces users-10631 ----------------------------------------------------------------------------------- + kubectl_bin delete namespace users-10631 + kubectl_bin get ns ++ mktemp ++ mktemp + local LAST_OUT=/tmp/tmp.liVaILkgwi + local LAST_OUT=/tmp/tmp.eVJHWO35sc ++ mktemp ++ mktemp + xargs kubectl delete ns + local LAST_ERR=/tmp/tmp.Z1a9FVNnTH + local LAST_ERR=/tmp/tmp.C52uHTv4nU + local exit_status=0 + local exit_status=0 ++ seq 0 2 ++ seq 0 2 + for i in '$(seq 0 2)' + for i in '$(seq 0 2)' + set +e + set +e + kubectl delete namespace users-10631 + kubectl get ns + exit_status=0 + set -e + '[' 0 '!=' 0 ']' + break + cat /tmp/tmp.liVaILkgwi + cat /tmp/tmp.C52uHTv4nU + rm /tmp/tmp.liVaILkgwi /tmp/tmp.C52uHTv4nU + return 0 + exit_status=1 + set -e + '[' 1 '!=' 0 ']' + '[' 1 == 1 ']' + sleep 0 + for i in '$(seq 0 2)' + set +e + kubectl delete namespace users-10631 + exit_status=1 + set -e + '[' 1 '!=' 0 ']' + '[' 1 == 1 ']' + sleep 0 + for i in '$(seq 0 2)' + set +e + kubectl delete namespace users-10631 namespace "gmp-public" deleted namespace "gmp-system" deleted Error from server (Forbidden): namespaces "default" is forbidden: this namespace may not be deleted + exit_status=1 + set -e + '[' 1 '!=' 0 ']' + '[' 1 == 1 ']' + sleep 0 + cat /tmp/tmp.eVJHWO35sc + cat /tmp/tmp.Z1a9FVNnTH Error from server (NotFound): namespaces "users-10631" not found + rm /tmp/tmp.eVJHWO35sc /tmp/tmp.Z1a9FVNnTH + return 1 + : + wait_for_delete namespace/users-10631 + local res=namespace/users-10631 + echo -n 'namespace/users-10631 - ' namespace/users-10631 - + set +o xtrace Error from server (NotFound): namespaces "users-10631" not found + desc 'create namespace users-10631' + set +o xtrace ----------------------------------------------------------------------------------- create namespace users-10631 ----------------------------------------------------------------------------------- + kubectl_bin create namespace users-10631 ++ mktemp + local LAST_OUT=/tmp/tmp.4gJpjh22Fm ++ mktemp + local LAST_ERR=/tmp/tmp.0lIrRbN9BG + local exit_status=0 ++ seq 0 2 + for i in '$(seq 0 2)' + set +e + kubectl create namespace users-10631 + exit_status=0 + set -e + '[' 0 '!=' 0 ']' + break + cat /tmp/tmp.4gJpjh22Fm namespace/users-10631 created + cat /tmp/tmp.0lIrRbN9BG + rm /tmp/tmp.4gJpjh22Fm /tmp/tmp.0lIrRbN9BG + return 0 ++ kubectl_bin config current-context +++ mktemp ++ local LAST_OUT=/tmp/tmp.ZhWMYxtITl +++ mktemp ++ local LAST_ERR=/tmp/tmp.UrJI6DIxv2 ++ local exit_status=0 +++ seq 0 2 ++ for i in '$(seq 0 2)' ++ set +e ++ kubectl config current-context ++ exit_status=0 ++ set -e ++ '[' 0 '!=' 0 ']' ++ break ++ cat /tmp/tmp.ZhWMYxtITl ++ cat /tmp/tmp.UrJI6DIxv2 ++ rm /tmp/tmp.ZhWMYxtITl /tmp/tmp.UrJI6DIxv2 ++ return 0 + kubectl_bin config set-context gke_cloud-dev-112233_us-central1-a_jen-pxc-1785-7e3ddd30-4-cluster8 --namespace=users-10631 ++ mktemp + local LAST_OUT=/tmp/tmp.spE0ZRwau2 ++ mktemp + local LAST_ERR=/tmp/tmp.d8ZLv5j58K + local exit_status=0 ++ seq 0 2 + for i in '$(seq 0 2)' + set +e + kubectl config set-context gke_cloud-dev-112233_us-central1-a_jen-pxc-1785-7e3ddd30-4-cluster8 --namespace=users-10631 + exit_status=0 + set -e + '[' 0 '!=' 0 ']' + break + cat /tmp/tmp.spE0ZRwau2 Context "gke_cloud-dev-112233_us-central1-a_jen-pxc-1785-7e3ddd30-4-cluster8" modified. + cat /tmp/tmp.d8ZLv5j58K + rm /tmp/tmp.spE0ZRwau2 /tmp/tmp.d8ZLv5j58K + return 0 + apply_secrets + desc 'create secrets for cloud storages' + set +o xtrace ----------------------------------------------------------------------------------- create secrets for cloud storages ----------------------------------------------------------------------------------- + '[' -z '' ']' + kubectl_bin apply -f /mnt/jenkins/workspace/cloud-pxc-operator_PR-1785/e2e-tests/conf/minio-secret.yml -f /mnt/jenkins/workspace/cloud-pxc-operator_PR-1785/e2e-tests/conf/cloud-secret.yml ++ mktemp + local LAST_OUT=/tmp/tmp.2iD8pWQRlr ++ mktemp + local LAST_ERR=/tmp/tmp.nhygBfcDRF + local exit_status=0 ++ seq 0 2 + for i in '$(seq 0 2)' + set +e + kubectl apply -f /mnt/jenkins/workspace/cloud-pxc-operator_PR-1785/e2e-tests/conf/minio-secret.yml -f /mnt/jenkins/workspace/cloud-pxc-operator_PR-1785/e2e-tests/conf/cloud-secret.yml + exit_status=0 + set -e + '[' 0 '!=' 0 ']' + break + cat /tmp/tmp.2iD8pWQRlr secret/minio-secret created secret/aws-s3-secret created secret/gcp-cs-secret created secret/azure-secret created + cat /tmp/tmp.nhygBfcDRF + rm /tmp/tmp.2iD8pWQRlr /tmp/tmp.nhygBfcDRF + return 0 + desc 'create PXC cluster' + set +o xtrace ----------------------------------------------------------------------------------- create PXC cluster ----------------------------------------------------------------------------------- + newpass=test-password ++ echo -n test-password ++ base64 + newpassencrypted=dGVzdC1wYXNzd29yZA== + cluster=some-name + spinup_pxc some-name /mnt/jenkins/workspace/cloud-pxc-operator_PR-1785/e2e-tests/conf/some-name.yml + local cluster=some-name + local config=/mnt/jenkins/workspace/cloud-pxc-operator_PR-1785/e2e-tests/conf/some-name.yml + local size=3 + local sleep=10 + local secretsFile=/mnt/jenkins/workspace/cloud-pxc-operator_PR-1785/e2e-tests/conf/secrets.yml + local pxcClientFile=/mnt/jenkins/workspace/cloud-pxc-operator_PR-1785/e2e-tests/conf/client.yml + local port=3306 + desc 'create first PXC cluster' + set +o xtrace ----------------------------------------------------------------------------------- create first PXC cluster ----------------------------------------------------------------------------------- + kubectl_bin apply -f /mnt/jenkins/workspace/cloud-pxc-operator_PR-1785/e2e-tests/conf/secrets.yml ++ mktemp + local LAST_OUT=/tmp/tmp.QY8tKpAM80 ++ mktemp + local LAST_ERR=/tmp/tmp.DNZBC1apVy + local exit_status=0 ++ seq 0 2 + for i in '$(seq 0 2)' + set +e + kubectl apply -f /mnt/jenkins/workspace/cloud-pxc-operator_PR-1785/e2e-tests/conf/secrets.yml + exit_status=0 + set -e + '[' 0 '!=' 0 ']' + break + cat /tmp/tmp.QY8tKpAM80 secret/my-cluster-secrets created secret/some-name-ssl created secret/some-name-ssl-internal created + cat /tmp/tmp.DNZBC1apVy + rm /tmp/tmp.QY8tKpAM80 /tmp/tmp.DNZBC1apVy + return 0 + apply_config /mnt/jenkins/workspace/cloud-pxc-operator_PR-1785/e2e-tests/conf/client.yml + '[' -z '' ']' + cat_config /mnt/jenkins/workspace/cloud-pxc-operator_PR-1785/e2e-tests/conf/client.yml + kubectl_bin apply -f - + cat /mnt/jenkins/workspace/cloud-pxc-operator_PR-1785/e2e-tests/conf/client.yml + /usr/bin/sed -e 's#apiVersion: pxc.percona.com/v.*$#apiVersion: pxc.percona.com/v1#' + /usr/bin/sed -e 's#image:.*-pxc\([0-9]*.[0-9]*\)\{0,1\}$#image: perconalab/percona-xtradb-cluster-operator:main-pxc5.7#' + /usr/bin/sed -e 's#image:.*-init$#image: perconalab/percona-xtradb-cluster-operator:PR-1785-7e3ddd30#' + /usr/bin/sed -e 's#image:.*-pmm$#image: perconalab/pmm-client:dev-latest#' ++ mktemp + /usr/bin/sed -e 's#image:.*-proxysql$#image: perconalab/percona-xtradb-cluster-operator:main-proxysql#' + /usr/bin/sed -e 's#image:.*-haproxy$#image: perconalab/percona-xtradb-cluster-operator:main-haproxy#' + /usr/bin/sed -e 's#image:.*-backup$#image: perconalab/percona-xtradb-cluster-operator:main-pxc5.7-backup#' + /usr/bin/sed -e 's#image:.*\/percona-xtradb-cluster:.*$#image: perconalab/percona-xtradb-cluster-operator:main-pxc5.7#' + /usr/bin/sed -e s~minio-service.#namespace~minio-service.users-10631~ + /usr/bin/sed -e 's#apply:.*#apply: Never#' + local LAST_OUT=/tmp/tmp.O9BNLL0IMd + /usr/bin/sed -e 's#image:.*-logcollector$#image: perconalab/percona-xtradb-cluster-operator:main-logcollector#' ++ mktemp + local LAST_ERR=/tmp/tmp.933HrIJhlE + local exit_status=0 ++ seq 0 2 + for i in '$(seq 0 2)' + set +e + kubectl apply -f - + exit_status=0 + set -e + '[' 0 '!=' 0 ']' + break + cat /tmp/tmp.O9BNLL0IMd deployment.apps/pxc-client created + cat /tmp/tmp.933HrIJhlE + rm /tmp/tmp.O9BNLL0IMd /tmp/tmp.933HrIJhlE + return 0 + [[ perconalab/percona-xtradb-cluster-operator:main-pxc5.7 =~ 5\.7 ]] + [[ some-name == \d\e\m\a\n\d\-\b\a\c\k\u\p ]] + [[ some-name == \d\e\m\a\n\d\-\b\a\c\k\u\p\-\c\l\o\u\d ]] + apply_config /mnt/jenkins/workspace/cloud-pxc-operator_PR-1785/e2e-tests/conf/some-name.yml + '[' -z '' ']' + cat_config /mnt/jenkins/workspace/cloud-pxc-operator_PR-1785/e2e-tests/conf/some-name.yml + kubectl_bin apply -f - + cat /mnt/jenkins/workspace/cloud-pxc-operator_PR-1785/e2e-tests/conf/some-name.yml + /usr/bin/sed -e 's#apiVersion: pxc.percona.com/v.*$#apiVersion: pxc.percona.com/v1#' ++ mktemp + /usr/bin/sed -e 's#image:.*-pmm$#image: perconalab/pmm-client:dev-latest#' + local LAST_OUT=/tmp/tmp.ccLFirZ4e8 + /usr/bin/sed -e 's#image:.*\/percona-xtradb-cluster:.*$#image: perconalab/percona-xtradb-cluster-operator:main-pxc5.7#' + /usr/bin/sed -e 's#image:.*-pxc\([0-9]*.[0-9]*\)\{0,1\}$#image: perconalab/percona-xtradb-cluster-operator:main-pxc5.7#' ++ mktemp + /usr/bin/sed -e 's#image:.*-backup$#image: perconalab/percona-xtradb-cluster-operator:main-pxc5.7-backup#' + /usr/bin/sed -e 's#image:.*-logcollector$#image: perconalab/percona-xtradb-cluster-operator:main-logcollector#' + /usr/bin/sed -e 's#image:.*-proxysql$#image: perconalab/percona-xtradb-cluster-operator:main-proxysql#' + /usr/bin/sed -e s~minio-service.#namespace~minio-service.users-10631~ + /usr/bin/sed -e 's#image:.*-haproxy$#image: perconalab/percona-xtradb-cluster-operator:main-haproxy#' + local LAST_ERR=/tmp/tmp.u4SOeghYmp + local exit_status=0 + /usr/bin/sed -e 's#image:.*-init$#image: perconalab/percona-xtradb-cluster-operator:PR-1785-7e3ddd30#' ++ seq 0 2 + for i in '$(seq 0 2)' + set +e + kubectl apply -f - + /usr/bin/sed -e 's#apply:.*#apply: Never#' + exit_status=0 + set -e + '[' 0 '!=' 0 ']' + break + cat /tmp/tmp.ccLFirZ4e8 perconaxtradbcluster.pxc.percona.com/some-name created + cat /tmp/tmp.u4SOeghYmp + rm /tmp/tmp.ccLFirZ4e8 /tmp/tmp.u4SOeghYmp + return 0 + desc 'check if all 3 Pods started' + set +o xtrace ----------------------------------------------------------------------------------- check if all 3 Pods started ----------------------------------------------------------------------------------- ++ get_proxy some-name ++ local target_cluster=some-name +++ kubectl_bin get pxc some-name -o 'jsonpath={.spec.haproxy.enabled}' ++++ mktemp +++ local LAST_OUT=/tmp/tmp.bfa4eSmfUJ ++++ mktemp +++ local LAST_ERR=/tmp/tmp.CuNrVnHWmC +++ local exit_status=0 ++++ seq 0 2 +++ for i in '$(seq 0 2)' +++ set +e +++ kubectl get pxc some-name -o 'jsonpath={.spec.haproxy.enabled}' +++ exit_status=0 +++ set -e +++ '[' 0 '!=' 0 ']' +++ break +++ cat /tmp/tmp.bfa4eSmfUJ +++ cat /tmp/tmp.CuNrVnHWmC +++ rm /tmp/tmp.bfa4eSmfUJ /tmp/tmp.CuNrVnHWmC +++ return 0 ++ [[ '' == \t\r\u\e ]] +++ kubectl_bin get pxc some-name -o 'jsonpath={.spec.proxysql.enabled}' ++++ mktemp +++ local LAST_OUT=/tmp/tmp.boVBeR1tT6 ++++ mktemp +++ local LAST_ERR=/tmp/tmp.WqRRPXuRko +++ local exit_status=0 ++++ seq 0 2 +++ for i in '$(seq 0 2)' +++ set +e +++ kubectl get pxc some-name -o 'jsonpath={.spec.proxysql.enabled}' +++ exit_status=0 +++ set -e +++ '[' 0 '!=' 0 ']' +++ break +++ cat /tmp/tmp.boVBeR1tT6 +++ cat /tmp/tmp.WqRRPXuRko +++ rm /tmp/tmp.boVBeR1tT6 /tmp/tmp.WqRRPXuRko +++ return 0 ++ [[ true == \t\r\u\e ]] ++ echo some-name-proxysql ++ return + local proxy=some-name-proxysql + kubectl_bin wait --for=condition=Ready pod -l app.kubernetes.io/instance=monitoring,app.kubernetes.io/managed-by=percona-xtradb-cluster-operator --timeout=300s -n users-10631 ++ mktemp + local LAST_OUT=/tmp/tmp.KrZMFHTWDQ ++ mktemp + local LAST_ERR=/tmp/tmp.yCLsPVIjlm + local exit_status=0 ++ seq 0 2 + for i in '$(seq 0 2)' + set +e + kubectl wait --for=condition=Ready pod -l app.kubernetes.io/instance=monitoring,app.kubernetes.io/managed-by=percona-xtradb-cluster-operator --timeout=300s -n users-10631 + exit_status=1 + set -e + '[' 1 '!=' 0 ']' + '[' 1 == 1 ']' + sleep 0 + for i in '$(seq 0 2)' + set +e + kubectl wait --for=condition=Ready pod -l app.kubernetes.io/instance=monitoring,app.kubernetes.io/managed-by=percona-xtradb-cluster-operator --timeout=300s -n users-10631 + exit_status=1 + set -e + '[' 1 '!=' 0 ']' + '[' 1 == 1 ']' + sleep 0 + for i in '$(seq 0 2)' + set +e + kubectl wait --for=condition=Ready pod -l app.kubernetes.io/instance=monitoring,app.kubernetes.io/managed-by=percona-xtradb-cluster-operator --timeout=300s -n users-10631 + exit_status=1 + set -e + '[' 1 '!=' 0 ']' + '[' 1 == 1 ']' + sleep 0 + cat /tmp/tmp.KrZMFHTWDQ + cat /tmp/tmp.yCLsPVIjlm error: no matching resources found + rm /tmp/tmp.KrZMFHTWDQ /tmp/tmp.yCLsPVIjlm + return 1 + true + wait_for_running some-name-proxysql 1 + local name=some-name-proxysql + let last_pod=0 + : + local max_retry=480 + desc 'wait for running cluster' + set +o xtrace ----------------------------------------------------------------------------------- wait for running cluster ----------------------------------------------------------------------------------- ++ seq 0 0 + for i in '$(seq 0 $last_pod)' + wait_pod some-name-proxysql-0 480 + local pod=some-name-proxysql-0 + local max_retry=480 + local ns= ++ echo some-name-proxysql-0 ++ /usr/bin/sed -E 's/.*-(pxc|proxysql)-[0-9]/\1/' ++ egrep '^(pxc|proxysql)$' + local container=proxysql + set +o xtrace pod/some-name-proxysql-0 condition met some-name-proxysql-0.Ok + wait_for_running some-name-pxc 3 + local name=some-name-pxc + let last_pod=2 + local max_retry=480 + desc 'wait for running cluster' + set +o xtrace ----------------------------------------------------------------------------------- wait for running cluster ----------------------------------------------------------------------------------- ++ seq 0 2 + for i in '$(seq 0 $last_pod)' + wait_pod some-name-pxc-0 480 + local pod=some-name-pxc-0 + local max_retry=480 + local ns= ++ echo some-name-pxc-0 ++ /usr/bin/sed -E 's/.*-(pxc|proxysql)-[0-9]/\1/' ++ egrep '^(pxc|proxysql)$' + local container=pxc + set +o xtrace pod/some-name-pxc-0 condition met some-name-pxc-0.Ok + for i in '$(seq 0 $last_pod)' + wait_pod some-name-pxc-1 480 + local pod=some-name-pxc-1 + local max_retry=480 + local ns= ++ echo some-name-pxc-1 ++ /usr/bin/sed -E 's/.*-(pxc|proxysql)-[0-9]/\1/' ++ egrep '^(pxc|proxysql)$' + local container=pxc + set +o xtrace pod/some-name-pxc-1 condition met some-name-pxc-1.Ok + for i in '$(seq 0 $last_pod)' + wait_pod some-name-pxc-2 480 + local pod=some-name-pxc-2 + local max_retry=480 + local ns= ++ echo some-name-pxc-2 ++ /usr/bin/sed -E 's/.*-(pxc|proxysql)-[0-9]/\1/' ++ egrep '^(pxc|proxysql)$' + local container=pxc + set +o xtrace pod/some-name-pxc-2 condition met some-name-pxc-2.Ok + sleep 10 + desc 'write data' + set +o xtrace ----------------------------------------------------------------------------------- write data ----------------------------------------------------------------------------------- + [[ perconalab/percona-xtradb-cluster-operator:main-pxc5.7 =~ 5\.7 ]] ++ is_keyring_plugin_in_use some-name ++ local cluster=some-name ++ kubectl_bin exec -it some-name-pxc-0 -c pxc -- bash -c 'cat /etc/mysql/node.cnf' ++ egrep -o 'early-plugin-load=keyring_\w+.so' +++ mktemp ++ local LAST_OUT=/tmp/tmp.OE6Csz7Jme +++ mktemp ++ local LAST_ERR=/tmp/tmp.fs44c17qqf ++ local exit_status=0 +++ seq 0 2 ++ for i in '$(seq 0 2)' ++ set +e ++ kubectl exec -it some-name-pxc-0 -c pxc -- bash -c 'cat /etc/mysql/node.cnf' ++ exit_status=0 ++ set -e ++ '[' 0 '!=' 0 ']' ++ break ++ cat /tmp/tmp.OE6Csz7Jme ++ cat /tmp/tmp.fs44c17qqf Unable to use a TTY - input is not a terminal or the right kind of file ++ rm /tmp/tmp.OE6Csz7Jme /tmp/tmp.fs44c17qqf ++ return 0 + [[ -n '' ]] + run_mysql 'CREATE DATABASE IF NOT EXISTS myApp; use myApp; CREATE TABLE IF NOT EXISTS myApp (id int PRIMARY KEY) ;' '-h some-name-proxysql -uroot -proot_password -P3306' + local 'command=CREATE DATABASE IF NOT EXISTS myApp; use myApp; CREATE TABLE IF NOT EXISTS myApp (id int PRIMARY KEY) ;' + local 'uri=-h some-name-proxysql -uroot -proot_password -P3306' ++ get_client_pod ++ kubectl_bin get pods --selector=name=pxc-client -o 'jsonpath={.items[].metadata.name}' +++ mktemp ++ local LAST_OUT=/tmp/tmp.8DlFFJfl37 +++ mktemp ++ local LAST_ERR=/tmp/tmp.PDFEb0gyZH ++ local exit_status=0 +++ seq 0 2 ++ for i in '$(seq 0 2)' ++ set +e ++ kubectl get pods --selector=name=pxc-client -o 'jsonpath={.items[].metadata.name}' ++ exit_status=0 ++ set -e ++ '[' 0 '!=' 0 ']' ++ break ++ cat /tmp/tmp.8DlFFJfl37 ++ cat /tmp/tmp.PDFEb0gyZH ++ rm /tmp/tmp.8DlFFJfl37 /tmp/tmp.PDFEb0gyZH ++ return 0 + client_pod=pxc-client-7b7f8bcff9-4b69t + wait_pod pxc-client-7b7f8bcff9-4b69t + local pod=pxc-client-7b7f8bcff9-4b69t + local max_retry=480 + local ns= ++ echo pxc-client-7b7f8bcff9-4b69t ++ /usr/bin/sed -E 's/.*-(pxc|proxysql)-[0-9]/\1/' ++ egrep '^(pxc|proxysql)$' + local container= + set +o xtrace pod/pxc-client-7b7f8bcff9-4b69t condition met pxc-client-7b7f8bcff9-4b69t.Ok + set +o xtrace + run_mysql 'INSERT myApp.myApp (id) VALUES (100500)' '-h some-name-proxysql -uroot -proot_password -P3306' + local 'command=INSERT myApp.myApp (id) VALUES (100500)' + local 'uri=-h some-name-proxysql -uroot -proot_password -P3306' ++ get_client_pod ++ kubectl_bin get pods --selector=name=pxc-client -o 'jsonpath={.items[].metadata.name}' +++ mktemp ++ local LAST_OUT=/tmp/tmp.5lTZgc7EEO +++ mktemp ++ local LAST_ERR=/tmp/tmp.UfRqBWjUEq ++ local exit_status=0 +++ seq 0 2 ++ for i in '$(seq 0 2)' ++ set +e ++ kubectl get pods --selector=name=pxc-client -o 'jsonpath={.items[].metadata.name}' ++ exit_status=0 ++ set -e ++ '[' 0 '!=' 0 ']' ++ break ++ cat /tmp/tmp.5lTZgc7EEO ++ cat /tmp/tmp.UfRqBWjUEq ++ rm /tmp/tmp.5lTZgc7EEO /tmp/tmp.UfRqBWjUEq ++ return 0 + client_pod=pxc-client-7b7f8bcff9-4b69t + wait_pod pxc-client-7b7f8bcff9-4b69t + local pod=pxc-client-7b7f8bcff9-4b69t + local max_retry=480 + local ns= ++ echo pxc-client-7b7f8bcff9-4b69t ++ /usr/bin/sed -E 's/.*-(pxc|proxysql)-[0-9]/\1/' ++ egrep '^(pxc|proxysql)$' + local container= + set +o xtrace pod/pxc-client-7b7f8bcff9-4b69t condition met pxc-client-7b7f8bcff9-4b69t.Ok + set +o xtrace + sleep 30 ++ seq 0 2 + for i in '$(seq 0 $((size - 1)))' + compare_mysql_cmd select-1 'SELECT * from myApp.myApp;' '-h some-name-pxc-0.some-name-pxc -uroot -proot_password -P3306' + local command_id=select-1 + local 'command=SELECT * from myApp.myApp;' + local 'uri=-h some-name-pxc-0.some-name-pxc -uroot -proot_password -P3306' + local postfix= + local expected_result=/mnt/jenkins/workspace/cloud-pxc-operator_PR-1785/e2e-tests/users/compare/select-1.sql + [[ perconalab/percona-xtradb-cluster-operator:main-pxc5.7 =~ 8\.0 ]] + run_mysql 'SELECT * from myApp.myApp;' '-h some-name-pxc-0.some-name-pxc -uroot -proot_password -P3306' + local 'command=SELECT * from myApp.myApp;' + local 'uri=-h some-name-pxc-0.some-name-pxc -uroot -proot_password -P3306' ++ get_client_pod ++ kubectl_bin get pods --selector=name=pxc-client -o 'jsonpath={.items[].metadata.name}' +++ mktemp ++ local LAST_OUT=/tmp/tmp.07rcLJAkDi +++ mktemp ++ local LAST_ERR=/tmp/tmp.8qZGXq8IoW ++ local exit_status=0 +++ seq 0 2 ++ for i in '$(seq 0 2)' ++ set +e ++ kubectl get pods --selector=name=pxc-client -o 'jsonpath={.items[].metadata.name}' ++ exit_status=0 ++ set -e ++ '[' 0 '!=' 0 ']' ++ break ++ cat /tmp/tmp.07rcLJAkDi ++ cat /tmp/tmp.8qZGXq8IoW ++ rm /tmp/tmp.07rcLJAkDi /tmp/tmp.8qZGXq8IoW ++ return 0 + client_pod=pxc-client-7b7f8bcff9-4b69t + wait_pod pxc-client-7b7f8bcff9-4b69t + local pod=pxc-client-7b7f8bcff9-4b69t + local max_retry=480 + local ns= ++ echo pxc-client-7b7f8bcff9-4b69t ++ /usr/bin/sed -E 's/.*-(pxc|proxysql)-[0-9]/\1/' ++ egrep '^(pxc|proxysql)$' + local container= + set +o xtrace pod/pxc-client-7b7f8bcff9-4b69t condition met pxc-client-7b7f8bcff9-4b69t.Ok + set +o xtrace + '[' '!' -s /tmp/tmp.RJRIosOSHb/select-1.sql ']' + diff -u /mnt/jenkins/workspace/cloud-pxc-operator_PR-1785/e2e-tests/users/compare/select-1.sql /tmp/tmp.RJRIosOSHb/select-1.sql + for i in '$(seq 0 $((size - 1)))' + compare_mysql_cmd select-1 'SELECT * from myApp.myApp;' '-h some-name-pxc-1.some-name-pxc -uroot -proot_password -P3306' + local command_id=select-1 + local 'command=SELECT * from myApp.myApp;' + local 'uri=-h some-name-pxc-1.some-name-pxc -uroot -proot_password -P3306' + local postfix= + local expected_result=/mnt/jenkins/workspace/cloud-pxc-operator_PR-1785/e2e-tests/users/compare/select-1.sql + [[ perconalab/percona-xtradb-cluster-operator:main-pxc5.7 =~ 8\.0 ]] + run_mysql 'SELECT * from myApp.myApp;' '-h some-name-pxc-1.some-name-pxc -uroot -proot_password -P3306' + local 'command=SELECT * from myApp.myApp;' + local 'uri=-h some-name-pxc-1.some-name-pxc -uroot -proot_password -P3306' ++ get_client_pod ++ kubectl_bin get pods --selector=name=pxc-client -o 'jsonpath={.items[].metadata.name}' +++ mktemp ++ local LAST_OUT=/tmp/tmp.O8VnGKDKnl +++ mktemp ++ local LAST_ERR=/tmp/tmp.jpMa2BksCb ++ local exit_status=0 +++ seq 0 2 ++ for i in '$(seq 0 2)' ++ set +e ++ kubectl get pods --selector=name=pxc-client -o 'jsonpath={.items[].metadata.name}' ++ exit_status=0 ++ set -e ++ '[' 0 '!=' 0 ']' ++ break ++ cat /tmp/tmp.O8VnGKDKnl ++ cat /tmp/tmp.jpMa2BksCb ++ rm /tmp/tmp.O8VnGKDKnl /tmp/tmp.jpMa2BksCb ++ return 0 + client_pod=pxc-client-7b7f8bcff9-4b69t + wait_pod pxc-client-7b7f8bcff9-4b69t + local pod=pxc-client-7b7f8bcff9-4b69t + local max_retry=480 + local ns= ++ echo pxc-client-7b7f8bcff9-4b69t ++ /usr/bin/sed -E 's/.*-(pxc|proxysql)-[0-9]/\1/' ++ egrep '^(pxc|proxysql)$' + local container= + set +o xtrace pod/pxc-client-7b7f8bcff9-4b69t condition met pxc-client-7b7f8bcff9-4b69t.Ok + set +o xtrace + '[' '!' -s /tmp/tmp.RJRIosOSHb/select-1.sql ']' + diff -u /mnt/jenkins/workspace/cloud-pxc-operator_PR-1785/e2e-tests/users/compare/select-1.sql /tmp/tmp.RJRIosOSHb/select-1.sql + for i in '$(seq 0 $((size - 1)))' + compare_mysql_cmd select-1 'SELECT * from myApp.myApp;' '-h some-name-pxc-2.some-name-pxc -uroot -proot_password -P3306' + local command_id=select-1 + local 'command=SELECT * from myApp.myApp;' + local 'uri=-h some-name-pxc-2.some-name-pxc -uroot -proot_password -P3306' + local postfix= + local expected_result=/mnt/jenkins/workspace/cloud-pxc-operator_PR-1785/e2e-tests/users/compare/select-1.sql + [[ perconalab/percona-xtradb-cluster-operator:main-pxc5.7 =~ 8\.0 ]] + run_mysql 'SELECT * from myApp.myApp;' '-h some-name-pxc-2.some-name-pxc -uroot -proot_password -P3306' + local 'command=SELECT * from myApp.myApp;' + local 'uri=-h some-name-pxc-2.some-name-pxc -uroot -proot_password -P3306' ++ get_client_pod ++ kubectl_bin get pods --selector=name=pxc-client -o 'jsonpath={.items[].metadata.name}' +++ mktemp ++ local LAST_OUT=/tmp/tmp.eMDYgjSliX +++ mktemp ++ local LAST_ERR=/tmp/tmp.SqJ79Cxica ++ local exit_status=0 +++ seq 0 2 ++ for i in '$(seq 0 2)' ++ set +e ++ kubectl get pods --selector=name=pxc-client -o 'jsonpath={.items[].metadata.name}' ++ exit_status=0 ++ set -e ++ '[' 0 '!=' 0 ']' ++ break ++ cat /tmp/tmp.eMDYgjSliX ++ cat /tmp/tmp.SqJ79Cxica ++ rm /tmp/tmp.eMDYgjSliX /tmp/tmp.SqJ79Cxica ++ return 0 + client_pod=pxc-client-7b7f8bcff9-4b69t + wait_pod pxc-client-7b7f8bcff9-4b69t + local pod=pxc-client-7b7f8bcff9-4b69t + local max_retry=480 + local ns= ++ echo pxc-client-7b7f8bcff9-4b69t ++ egrep '^(pxc|proxysql)$' ++ /usr/bin/sed -E 's/.*-(pxc|proxysql)-[0-9]/\1/' + local container= + set +o xtrace pod/pxc-client-7b7f8bcff9-4b69t condition met pxc-client-7b7f8bcff9-4b69t.Ok + set +o xtrace + '[' '!' -s /tmp/tmp.RJRIosOSHb/select-1.sql ']' + diff -u /mnt/jenkins/workspace/cloud-pxc-operator_PR-1785/e2e-tests/users/compare/select-1.sql /tmp/tmp.RJRIosOSHb/select-1.sql ++ is_keyring_plugin_in_use some-name ++ local cluster=some-name ++ egrep -o 'early-plugin-load=keyring_\w+.so' ++ kubectl_bin exec -it some-name-pxc-0 -c pxc -- bash -c 'cat /etc/mysql/node.cnf' +++ mktemp ++ local LAST_OUT=/tmp/tmp.SeVLrfTs3F +++ mktemp ++ local LAST_ERR=/tmp/tmp.H6cQQYa1Zc ++ local exit_status=0 +++ seq 0 2 ++ for i in '$(seq 0 2)' ++ set +e ++ kubectl exec -it some-name-pxc-0 -c pxc -- bash -c 'cat /etc/mysql/node.cnf' ++ exit_status=0 ++ set -e ++ '[' 0 '!=' 0 ']' ++ break ++ cat /tmp/tmp.SeVLrfTs3F ++ cat /tmp/tmp.H6cQQYa1Zc Unable to use a TTY - input is not a terminal or the right kind of file ++ rm /tmp/tmp.SeVLrfTs3F /tmp/tmp.H6cQQYa1Zc ++ return 0 + '[' '' ']' + desc 'test root' + set +o xtrace ----------------------------------------------------------------------------------- test root ----------------------------------------------------------------------------------- + patch_secret my-cluster-secrets root dGVzdC1wYXNzd29yZA== + local secret=my-cluster-secrets + local key=root + local value=dGVzdC1wYXNzd29yZA== + kubectl_bin patch secret my-cluster-secrets '-p={"data":{"root": "dGVzdC1wYXNzd29yZA=="}}' ++ mktemp + local LAST_OUT=/tmp/tmp.39fyxOlwRd ++ mktemp + local LAST_ERR=/tmp/tmp.VfttOPKjKA + local exit_status=0 ++ seq 0 2 + for i in '$(seq 0 2)' + set +e + kubectl patch secret my-cluster-secrets '-p={"data":{"root": "dGVzdC1wYXNzd29yZA=="}}' + exit_status=0 + set -e + '[' 0 '!=' 0 ']' + break + cat /tmp/tmp.39fyxOlwRd secret/my-cluster-secrets patched + cat /tmp/tmp.VfttOPKjKA + rm /tmp/tmp.39fyxOlwRd /tmp/tmp.VfttOPKjKA + return 0 + sleep 15 + compare_mysql_cmd select-4 'SHOW TABLES;' '-h some-name-proxysql -uroot -p'\''test-password'\''' + local command_id=select-4 + local 'command=SHOW TABLES;' + local 'uri=-h some-name-proxysql -uroot -p'\''test-password'\''' + local postfix= + local expected_result=/mnt/jenkins/workspace/cloud-pxc-operator_PR-1785/e2e-tests/users/compare/select-4.sql + [[ perconalab/percona-xtradb-cluster-operator:main-pxc5.7 =~ 8\.0 ]] + run_mysql 'SHOW TABLES;' '-h some-name-proxysql -uroot -p'\''test-password'\''' + local 'command=SHOW TABLES;' + local 'uri=-h some-name-proxysql -uroot -p'\''test-password'\''' ++ get_client_pod ++ kubectl_bin get pods --selector=name=pxc-client -o 'jsonpath={.items[].metadata.name}' +++ mktemp ++ local LAST_OUT=/tmp/tmp.XgLJkh8T1o +++ mktemp ++ local LAST_ERR=/tmp/tmp.HaDlWUSJZs ++ local exit_status=0 +++ seq 0 2 ++ for i in '$(seq 0 2)' ++ set +e ++ kubectl get pods --selector=name=pxc-client -o 'jsonpath={.items[].metadata.name}' ++ exit_status=0 ++ set -e ++ '[' 0 '!=' 0 ']' ++ break ++ cat /tmp/tmp.XgLJkh8T1o ++ cat /tmp/tmp.HaDlWUSJZs ++ rm /tmp/tmp.XgLJkh8T1o /tmp/tmp.HaDlWUSJZs ++ return 0 + client_pod=pxc-client-7b7f8bcff9-4b69t + wait_pod pxc-client-7b7f8bcff9-4b69t + local pod=pxc-client-7b7f8bcff9-4b69t + local max_retry=480 + local ns= ++ echo pxc-client-7b7f8bcff9-4b69t ++ /usr/bin/sed -E 's/.*-(pxc|proxysql)-[0-9]/\1/' ++ egrep '^(pxc|proxysql)$' + local container= + set +o xtrace pod/pxc-client-7b7f8bcff9-4b69t condition met pxc-client-7b7f8bcff9-4b69t.Ok + set +o xtrace + '[' '!' -s /tmp/tmp.RJRIosOSHb/select-4.sql ']' + diff -u /mnt/jenkins/workspace/cloud-pxc-operator_PR-1785/e2e-tests/users/compare/select-4.sql /tmp/tmp.RJRIosOSHb/select-4.sql + desc 'test proxyadmin' + set +o xtrace ----------------------------------------------------------------------------------- test proxyadmin ----------------------------------------------------------------------------------- + kubectl_bin patch pxc some-name --type=merge '-p={"spec":{"proxysql":{"size":3}}}' ++ mktemp + local LAST_OUT=/tmp/tmp.UkN128c6Rf ++ mktemp + local LAST_ERR=/tmp/tmp.cqmHMQvoeu + local exit_status=0 ++ seq 0 2 + for i in '$(seq 0 2)' + set +e + kubectl patch pxc some-name --type=merge '-p={"spec":{"proxysql":{"size":3}}}' + exit_status=0 + set -e + '[' 0 '!=' 0 ']' + break + cat /tmp/tmp.UkN128c6Rf perconaxtradbcluster.pxc.percona.com/some-name patched + cat /tmp/tmp.cqmHMQvoeu + rm /tmp/tmp.UkN128c6Rf /tmp/tmp.cqmHMQvoeu + return 0 + sleep 15 + wait_cluster_consistency some-name 3 3 + local cluster_name=some-name + local cluster_size=3 + local proxy_size=3 + '[' -z 3 ']' + desc 'wait cluster consistency' + set +o xtrace ----------------------------------------------------------------------------------- wait cluster consistency ----------------------------------------------------------------------------------- + local i=0 + local max=36 + sleep 7 ++ kubectl_bin get pxc some-name -o 'jsonpath={.status.state}' +++ mktemp ++ local LAST_OUT=/tmp/tmp.ddQtYb9iRr +++ mktemp ++ local LAST_ERR=/tmp/tmp.UkJKdGfYWU ++ local exit_status=0 +++ seq 0 2 ++ for i in '$(seq 0 2)' ++ set +e ++ kubectl get pxc some-name -o 'jsonpath={.status.state}' ++ exit_status=0 ++ set -e ++ '[' 0 '!=' 0 ']' ++ break ++ cat /tmp/tmp.ddQtYb9iRr ++ cat /tmp/tmp.UkJKdGfYWU ++ rm /tmp/tmp.ddQtYb9iRr /tmp/tmp.UkJKdGfYWU ++ return 0 + [[ ready == \r\e\a\d\y ]] ++ kubectl_bin get pxc some-name -o 'jsonpath={.status.pxc.ready}' +++ mktemp ++ local LAST_OUT=/tmp/tmp.U2ybVs1dNT +++ mktemp ++ local LAST_ERR=/tmp/tmp.GxfruHFOUP ++ local exit_status=0 +++ seq 0 2 ++ for i in '$(seq 0 2)' ++ set +e ++ kubectl get pxc some-name -o 'jsonpath={.status.pxc.ready}' ++ exit_status=0 ++ set -e ++ '[' 0 '!=' 0 ']' ++ break ++ cat /tmp/tmp.U2ybVs1dNT ++ cat /tmp/tmp.GxfruHFOUP ++ rm /tmp/tmp.U2ybVs1dNT /tmp/tmp.GxfruHFOUP ++ return 0 + [[ 3 == \3 ]] +++ get_proxy_engine some-name +++ local cluster_name=some-name ++++ get_proxy some-name ++++ local target_cluster=some-name +++++ kubectl_bin get pxc some-name -o 'jsonpath={.spec.haproxy.enabled}' ++++++ mktemp +++++ local LAST_OUT=/tmp/tmp.viLaHM3aIz ++++++ mktemp +++++ local LAST_ERR=/tmp/tmp.8EYxYmzlMI +++++ local exit_status=0 ++++++ seq 0 2 +++++ for i in '$(seq 0 2)' +++++ set +e +++++ kubectl get pxc some-name -o 'jsonpath={.spec.haproxy.enabled}' +++++ exit_status=0 +++++ set -e +++++ '[' 0 '!=' 0 ']' +++++ break +++++ cat /tmp/tmp.viLaHM3aIz +++++ cat /tmp/tmp.8EYxYmzlMI +++++ rm /tmp/tmp.viLaHM3aIz /tmp/tmp.8EYxYmzlMI +++++ return 0 ++++ [[ '' == \t\r\u\e ]] +++++ kubectl_bin get pxc some-name -o 'jsonpath={.spec.proxysql.enabled}' ++++++ mktemp +++++ local LAST_OUT=/tmp/tmp.8T1rN5OLe1 ++++++ mktemp +++++ local LAST_ERR=/tmp/tmp.ko5LpNz5Fw +++++ local exit_status=0 ++++++ seq 0 2 +++++ for i in '$(seq 0 2)' +++++ set +e +++++ kubectl get pxc some-name -o 'jsonpath={.spec.proxysql.enabled}' +++++ exit_status=0 +++++ set -e +++++ '[' 0 '!=' 0 ']' +++++ break +++++ cat /tmp/tmp.8T1rN5OLe1 +++++ cat /tmp/tmp.ko5LpNz5Fw +++++ rm /tmp/tmp.8T1rN5OLe1 /tmp/tmp.ko5LpNz5Fw +++++ return 0 ++++ [[ true == \t\r\u\e ]] ++++ echo some-name-proxysql ++++ return +++ local cluster_proxy=some-name-proxysql +++ echo proxysql ++ kubectl_bin get pxc some-name -o 'jsonpath={.status.proxysql.ready}' +++ mktemp ++ local LAST_OUT=/tmp/tmp.Qsk56NH0Au +++ mktemp ++ local LAST_ERR=/tmp/tmp.wqCyT8Os5N ++ local exit_status=0 +++ seq 0 2 ++ for i in '$(seq 0 2)' ++ set +e ++ kubectl get pxc some-name -o 'jsonpath={.status.proxysql.ready}' ++ exit_status=0 ++ set -e ++ '[' 0 '!=' 0 ']' ++ break ++ cat /tmp/tmp.Qsk56NH0Au ++ cat /tmp/tmp.wqCyT8Os5N ++ rm /tmp/tmp.Qsk56NH0Au /tmp/tmp.wqCyT8Os5N ++ return 0 + [[ 3 == \3 ]] + patch_secret my-cluster-secrets proxyadmin dGVzdC1wYXNzd29yZA== + local secret=my-cluster-secrets + local key=proxyadmin + local value=dGVzdC1wYXNzd29yZA== + kubectl_bin patch secret my-cluster-secrets '-p={"data":{"proxyadmin": "dGVzdC1wYXNzd29yZA=="}}' ++ mktemp + local LAST_OUT=/tmp/tmp.H84EIXEzWo ++ mktemp + local LAST_ERR=/tmp/tmp.FqlQw7xq8r + local exit_status=0 ++ seq 0 2 + for i in '$(seq 0 2)' + set +e + kubectl patch secret my-cluster-secrets '-p={"data":{"proxyadmin": "dGVzdC1wYXNzd29yZA=="}}' + exit_status=0 + set -e + '[' 0 '!=' 0 ']' + break + cat /tmp/tmp.H84EIXEzWo secret/my-cluster-secrets patched + cat /tmp/tmp.FqlQw7xq8r + rm /tmp/tmp.H84EIXEzWo /tmp/tmp.FqlQw7xq8r + return 0 + sleep 15 + wait_cluster_consistency some-name 3 3 + local cluster_name=some-name + local cluster_size=3 + local proxy_size=3 + '[' -z 3 ']' + desc 'wait cluster consistency' + set +o xtrace ----------------------------------------------------------------------------------- wait cluster consistency ----------------------------------------------------------------------------------- + local i=0 + local max=36 + sleep 7 ++ kubectl_bin get pxc some-name -o 'jsonpath={.status.state}' +++ mktemp ++ local LAST_OUT=/tmp/tmp.0GYGzbCkaZ +++ mktemp ++ local LAST_ERR=/tmp/tmp.SeefstfqFo ++ local exit_status=0 +++ seq 0 2 ++ for i in '$(seq 0 2)' ++ set +e ++ kubectl get pxc some-name -o 'jsonpath={.status.state}' ++ exit_status=0 ++ set -e ++ '[' 0 '!=' 0 ']' ++ break ++ cat /tmp/tmp.0GYGzbCkaZ ++ cat /tmp/tmp.SeefstfqFo ++ rm /tmp/tmp.0GYGzbCkaZ /tmp/tmp.SeefstfqFo ++ return 0 + [[ initializing == \r\e\a\d\y ]] + echo 'waiting for cluster readyness' waiting for cluster readyness + sleep 20 + [[ 0 -ge 36 ]] + let i+=1 ++ kubectl_bin get pxc some-name -o 'jsonpath={.status.state}' +++ mktemp ++ local LAST_OUT=/tmp/tmp.iQWoeEUpZ9 +++ mktemp ++ local LAST_ERR=/tmp/tmp.WTeWN8PKGJ ++ local exit_status=0 +++ seq 0 2 ++ for i in '$(seq 0 2)' ++ set +e ++ kubectl get pxc some-name -o 'jsonpath={.status.state}' ++ exit_status=0 ++ set -e ++ '[' 0 '!=' 0 ']' ++ break ++ cat /tmp/tmp.iQWoeEUpZ9 ++ cat /tmp/tmp.WTeWN8PKGJ ++ rm /tmp/tmp.iQWoeEUpZ9 /tmp/tmp.WTeWN8PKGJ ++ return 0 + [[ initializing == \r\e\a\d\y ]] + echo 'waiting for cluster readyness' waiting for cluster readyness + sleep 20 + [[ 1 -ge 36 ]] + let i+=1 ++ kubectl_bin get pxc some-name -o 'jsonpath={.status.state}' +++ mktemp ++ local LAST_OUT=/tmp/tmp.5RmpF7SR8o +++ mktemp ++ local LAST_ERR=/tmp/tmp.Hq0OOnWTJx ++ local exit_status=0 +++ seq 0 2 ++ for i in '$(seq 0 2)' ++ set +e ++ kubectl get pxc some-name -o 'jsonpath={.status.state}' ++ exit_status=0 ++ set -e ++ '[' 0 '!=' 0 ']' ++ break ++ cat /tmp/tmp.5RmpF7SR8o ++ cat /tmp/tmp.Hq0OOnWTJx ++ rm /tmp/tmp.5RmpF7SR8o /tmp/tmp.Hq0OOnWTJx ++ return 0 + [[ ready == \r\e\a\d\y ]] ++ kubectl_bin get pxc some-name -o 'jsonpath={.status.pxc.ready}' +++ mktemp ++ local LAST_OUT=/tmp/tmp.2gk5PDy20N +++ mktemp ++ local LAST_ERR=/tmp/tmp.tkIBbsAfQr ++ local exit_status=0 +++ seq 0 2 ++ for i in '$(seq 0 2)' ++ set +e ++ kubectl get pxc some-name -o 'jsonpath={.status.pxc.ready}' ++ exit_status=0 ++ set -e ++ '[' 0 '!=' 0 ']' ++ break ++ cat /tmp/tmp.2gk5PDy20N ++ cat /tmp/tmp.tkIBbsAfQr ++ rm /tmp/tmp.2gk5PDy20N /tmp/tmp.tkIBbsAfQr ++ return 0 + [[ 3 == \3 ]] +++ get_proxy_engine some-name +++ local cluster_name=some-name ++++ get_proxy some-name ++++ local target_cluster=some-name +++++ kubectl_bin get pxc some-name -o 'jsonpath={.spec.haproxy.enabled}' ++++++ mktemp +++++ local LAST_OUT=/tmp/tmp.bRo6oHFpVe ++++++ mktemp +++++ local LAST_ERR=/tmp/tmp.h6oJ4THYfw +++++ local exit_status=0 ++++++ seq 0 2 +++++ for i in '$(seq 0 2)' +++++ set +e +++++ kubectl get pxc some-name -o 'jsonpath={.spec.haproxy.enabled}' +++++ exit_status=0 +++++ set -e +++++ '[' 0 '!=' 0 ']' +++++ break +++++ cat /tmp/tmp.bRo6oHFpVe +++++ cat /tmp/tmp.h6oJ4THYfw +++++ rm /tmp/tmp.bRo6oHFpVe /tmp/tmp.h6oJ4THYfw +++++ return 0 ++++ [[ '' == \t\r\u\e ]] +++++ kubectl_bin get pxc some-name -o 'jsonpath={.spec.proxysql.enabled}' ++++++ mktemp +++++ local LAST_OUT=/tmp/tmp.XqZKmL1mZ2 ++++++ mktemp +++++ local LAST_ERR=/tmp/tmp.6Hsjqq4o4U +++++ local exit_status=0 ++++++ seq 0 2 +++++ for i in '$(seq 0 2)' +++++ set +e +++++ kubectl get pxc some-name -o 'jsonpath={.spec.proxysql.enabled}' +++++ exit_status=0 +++++ set -e +++++ '[' 0 '!=' 0 ']' +++++ break +++++ cat /tmp/tmp.XqZKmL1mZ2 +++++ cat /tmp/tmp.6Hsjqq4o4U +++++ rm /tmp/tmp.XqZKmL1mZ2 /tmp/tmp.6Hsjqq4o4U +++++ return 0 ++++ [[ true == \t\r\u\e ]] ++++ echo some-name-proxysql ++++ return +++ local cluster_proxy=some-name-proxysql +++ echo proxysql ++ kubectl_bin get pxc some-name -o 'jsonpath={.status.proxysql.ready}' +++ mktemp ++ local LAST_OUT=/tmp/tmp.2uV8Wk6zvu +++ mktemp ++ local LAST_ERR=/tmp/tmp.RbXv17W8jR ++ local exit_status=0 +++ seq 0 2 ++ for i in '$(seq 0 2)' ++ set +e ++ kubectl get pxc some-name -o 'jsonpath={.status.proxysql.ready}' ++ exit_status=0 ++ set -e ++ '[' 0 '!=' 0 ']' ++ break ++ cat /tmp/tmp.2uV8Wk6zvu ++ cat /tmp/tmp.RbXv17W8jR ++ rm /tmp/tmp.2uV8Wk6zvu /tmp/tmp.RbXv17W8jR ++ return 0 + [[ 3 == \3 ]] + compare_mysql_cmd_local select-2 'SHOW TABLES;' '-h127.0.0.1 -P6032 -uproxyadmin -p'\''test-password'\''' some-name-proxysql-0 '' proxysql + local command_id=select-2 + local 'command=SHOW TABLES;' + local 'uri=-h127.0.0.1 -P6032 -uproxyadmin -p'\''test-password'\''' + local pod=some-name-proxysql-0 + local postfix= + local container_name=proxysql + local expected_result=/mnt/jenkins/workspace/cloud-pxc-operator_PR-1785/e2e-tests/users/compare/select-2.sql + [[ perconalab/percona-xtradb-cluster-operator:main-pxc5.7 =~ 8\.0 ]] + run_mysql_local 'SHOW TABLES;' '-h127.0.0.1 -P6032 -uproxyadmin -p'\''test-password'\''' some-name-proxysql-0 proxysql + local 'command=SHOW TABLES;' + local 'uri=-h127.0.0.1 -P6032 -uproxyadmin -p'\''test-password'\''' + local pod=some-name-proxysql-0 + local container_name=proxysql + set +o xtrace + '[' '!' -s /tmp/tmp.RJRIosOSHb/select-2.sql ']' + diff -u /mnt/jenkins/workspace/cloud-pxc-operator_PR-1785/e2e-tests/users/compare/select-2.sql /tmp/tmp.RJRIosOSHb/select-2.sql + compare_mysql_cmd_local select-2 'SHOW TABLES;' '-h127.0.0.1 -P6032 -uproxyadmin -p'\''test-password'\''' some-name-proxysql-1 '' proxysql + local command_id=select-2 + local 'command=SHOW TABLES;' + local 'uri=-h127.0.0.1 -P6032 -uproxyadmin -p'\''test-password'\''' + local pod=some-name-proxysql-1 + local postfix= + local container_name=proxysql + local expected_result=/mnt/jenkins/workspace/cloud-pxc-operator_PR-1785/e2e-tests/users/compare/select-2.sql + [[ perconalab/percona-xtradb-cluster-operator:main-pxc5.7 =~ 8\.0 ]] + run_mysql_local 'SHOW TABLES;' '-h127.0.0.1 -P6032 -uproxyadmin -p'\''test-password'\''' some-name-proxysql-1 proxysql + local 'command=SHOW TABLES;' + local 'uri=-h127.0.0.1 -P6032 -uproxyadmin -p'\''test-password'\''' + local pod=some-name-proxysql-1 + local container_name=proxysql + set +o xtrace + '[' '!' -s /tmp/tmp.RJRIosOSHb/select-2.sql ']' + diff -u /mnt/jenkins/workspace/cloud-pxc-operator_PR-1785/e2e-tests/users/compare/select-2.sql /tmp/tmp.RJRIosOSHb/select-2.sql + compare_mysql_cmd_local select-2 'SHOW TABLES;' '-h127.0.0.1 -P6032 -uproxyadmin -p'\''test-password'\''' some-name-proxysql-2 '' proxysql + local command_id=select-2 + local 'command=SHOW TABLES;' + local 'uri=-h127.0.0.1 -P6032 -uproxyadmin -p'\''test-password'\''' + local pod=some-name-proxysql-2 + local postfix= + local container_name=proxysql + local expected_result=/mnt/jenkins/workspace/cloud-pxc-operator_PR-1785/e2e-tests/users/compare/select-2.sql + [[ perconalab/percona-xtradb-cluster-operator:main-pxc5.7 =~ 8\.0 ]] + run_mysql_local 'SHOW TABLES;' '-h127.0.0.1 -P6032 -uproxyadmin -p'\''test-password'\''' some-name-proxysql-2 proxysql + local 'command=SHOW TABLES;' + local 'uri=-h127.0.0.1 -P6032 -uproxyadmin -p'\''test-password'\''' + local pod=some-name-proxysql-2 + local container_name=proxysql + set +o xtrace + '[' '!' -s /tmp/tmp.RJRIosOSHb/select-2.sql ']' + diff -u /mnt/jenkins/workspace/cloud-pxc-operator_PR-1785/e2e-tests/users/compare/select-2.sql /tmp/tmp.RJRIosOSHb/select-2.sql + desc 'test xtrabackup' + set +o xtrace ----------------------------------------------------------------------------------- test xtrabackup ----------------------------------------------------------------------------------- + kubectl_bin patch pxc some-name --type=merge '-p={"spec":{"proxysql":{"size":2}}}' ++ mktemp + local LAST_OUT=/tmp/tmp.AgWcfiRaF3 ++ mktemp + local LAST_ERR=/tmp/tmp.Jv9Pi8Mcpf + local exit_status=0 ++ seq 0 2 + for i in '$(seq 0 2)' + set +e + kubectl patch pxc some-name --type=merge '-p={"spec":{"proxysql":{"size":2}}}' + exit_status=0 + set -e + '[' 0 '!=' 0 ']' + break + cat /tmp/tmp.AgWcfiRaF3 perconaxtradbcluster.pxc.percona.com/some-name patched + cat /tmp/tmp.Jv9Pi8Mcpf + rm /tmp/tmp.AgWcfiRaF3 /tmp/tmp.Jv9Pi8Mcpf + return 0 + patch_secret my-cluster-secrets xtrabackup dGVzdC1wYXNzd29yZA== + local secret=my-cluster-secrets + local key=xtrabackup + local value=dGVzdC1wYXNzd29yZA== + kubectl_bin patch secret my-cluster-secrets '-p={"data":{"xtrabackup": "dGVzdC1wYXNzd29yZA=="}}' ++ mktemp + local LAST_OUT=/tmp/tmp.tIPnayn5J5 ++ mktemp + local LAST_ERR=/tmp/tmp.l73Z4Lpzss + local exit_status=0 ++ seq 0 2 + for i in '$(seq 0 2)' + set +e + kubectl patch secret my-cluster-secrets '-p={"data":{"xtrabackup": "dGVzdC1wYXNzd29yZA=="}}' + exit_status=0 + set -e + '[' 0 '!=' 0 ']' + break + cat /tmp/tmp.tIPnayn5J5 secret/my-cluster-secrets patched + cat /tmp/tmp.l73Z4Lpzss + rm /tmp/tmp.tIPnayn5J5 /tmp/tmp.l73Z4Lpzss + return 0 + sleep 15 + wait_cluster_consistency some-name 3 2 + local cluster_name=some-name + local cluster_size=3 + local proxy_size=2 + '[' -z 2 ']' + desc 'wait cluster consistency' + set +o xtrace ----------------------------------------------------------------------------------- wait cluster consistency ----------------------------------------------------------------------------------- + local i=0 + local max=36 + sleep 7 ++ kubectl_bin get pxc some-name -o 'jsonpath={.status.state}' +++ mktemp ++ local LAST_OUT=/tmp/tmp.g2o1NTLmaK +++ mktemp ++ local LAST_ERR=/tmp/tmp.2iBZ52MokV ++ local exit_status=0 +++ seq 0 2 ++ for i in '$(seq 0 2)' ++ set +e ++ kubectl get pxc some-name -o 'jsonpath={.status.state}' ++ exit_status=0 ++ set -e ++ '[' 0 '!=' 0 ']' ++ break ++ cat /tmp/tmp.g2o1NTLmaK ++ cat /tmp/tmp.2iBZ52MokV ++ rm /tmp/tmp.g2o1NTLmaK /tmp/tmp.2iBZ52MokV ++ return 0 + [[ initializing == \r\e\a\d\y ]] + echo 'waiting for cluster readyness' waiting for cluster readyness + sleep 20 + [[ 0 -ge 36 ]] + let i+=1 ++ kubectl_bin get pxc some-name -o 'jsonpath={.status.state}' +++ mktemp ++ local LAST_OUT=/tmp/tmp.KFSLTB57sM +++ mktemp ++ local LAST_ERR=/tmp/tmp.Wjo0ASKq22 ++ local exit_status=0 +++ seq 0 2 ++ for i in '$(seq 0 2)' ++ set +e ++ kubectl get pxc some-name -o 'jsonpath={.status.state}' ++ exit_status=0 ++ set -e ++ '[' 0 '!=' 0 ']' ++ break ++ cat /tmp/tmp.KFSLTB57sM ++ cat /tmp/tmp.Wjo0ASKq22 ++ rm /tmp/tmp.KFSLTB57sM /tmp/tmp.Wjo0ASKq22 ++ return 0 + [[ initializing == \r\e\a\d\y ]] + echo 'waiting for cluster readyness' waiting for cluster readyness + sleep 20 + [[ 1 -ge 36 ]] + let i+=1 ++ kubectl_bin get pxc some-name -o 'jsonpath={.status.state}' +++ mktemp ++ local LAST_OUT=/tmp/tmp.zZLT3psem8 +++ mktemp ++ local LAST_ERR=/tmp/tmp.IAKlF9v1JH ++ local exit_status=0 +++ seq 0 2 ++ for i in '$(seq 0 2)' ++ set +e ++ kubectl get pxc some-name -o 'jsonpath={.status.state}' ++ exit_status=0 ++ set -e ++ '[' 0 '!=' 0 ']' ++ break ++ cat /tmp/tmp.zZLT3psem8 ++ cat /tmp/tmp.IAKlF9v1JH ++ rm /tmp/tmp.zZLT3psem8 /tmp/tmp.IAKlF9v1JH ++ return 0 + [[ initializing == \r\e\a\d\y ]] + echo 'waiting for cluster readyness' waiting for cluster readyness + sleep 20 + [[ 2 -ge 36 ]] + let i+=1 ++ kubectl_bin get pxc some-name -o 'jsonpath={.status.state}' +++ mktemp ++ local LAST_OUT=/tmp/tmp.ABwnIEkxge +++ mktemp ++ local LAST_ERR=/tmp/tmp.oOVVuXrMvn ++ local exit_status=0 +++ seq 0 2 ++ for i in '$(seq 0 2)' ++ set +e ++ kubectl get pxc some-name -o 'jsonpath={.status.state}' ++ exit_status=0 ++ set -e ++ '[' 0 '!=' 0 ']' ++ break ++ cat /tmp/tmp.ABwnIEkxge ++ cat /tmp/tmp.oOVVuXrMvn ++ rm /tmp/tmp.ABwnIEkxge /tmp/tmp.oOVVuXrMvn ++ return 0 + [[ initializing == \r\e\a\d\y ]] + echo 'waiting for cluster readyness' waiting for cluster readyness + sleep 20 + [[ 3 -ge 36 ]] + let i+=1 ++ kubectl_bin get pxc some-name -o 'jsonpath={.status.state}' +++ mktemp ++ local LAST_OUT=/tmp/tmp.scMCRJ8N2x +++ mktemp ++ local LAST_ERR=/tmp/tmp.NTLEGL30hI ++ local exit_status=0 +++ seq 0 2 ++ for i in '$(seq 0 2)' ++ set +e ++ kubectl get pxc some-name -o 'jsonpath={.status.state}' ++ exit_status=0 ++ set -e ++ '[' 0 '!=' 0 ']' ++ break ++ cat /tmp/tmp.scMCRJ8N2x ++ cat /tmp/tmp.NTLEGL30hI ++ rm /tmp/tmp.scMCRJ8N2x /tmp/tmp.NTLEGL30hI ++ return 0 + [[ initializing == \r\e\a\d\y ]] + echo 'waiting for cluster readyness' waiting for cluster readyness + sleep 20 + [[ 4 -ge 36 ]] + let i+=1 ++ kubectl_bin get pxc some-name -o 'jsonpath={.status.state}' +++ mktemp ++ local LAST_OUT=/tmp/tmp.3760QFOrxn +++ mktemp ++ local LAST_ERR=/tmp/tmp.9w62xRdaMv ++ local exit_status=0 +++ seq 0 2 ++ for i in '$(seq 0 2)' ++ set +e ++ kubectl get pxc some-name -o 'jsonpath={.status.state}' ++ exit_status=0 ++ set -e ++ '[' 0 '!=' 0 ']' ++ break ++ cat /tmp/tmp.3760QFOrxn ++ cat /tmp/tmp.9w62xRdaMv ++ rm /tmp/tmp.3760QFOrxn /tmp/tmp.9w62xRdaMv ++ return 0 + [[ initializing == \r\e\a\d\y ]] + echo 'waiting for cluster readyness' waiting for cluster readyness + sleep 20 + [[ 5 -ge 36 ]] + let i+=1 ++ kubectl_bin get pxc some-name -o 'jsonpath={.status.state}' +++ mktemp ++ local LAST_OUT=/tmp/tmp.bmKfNoRV3z +++ mktemp ++ local LAST_ERR=/tmp/tmp.532ayrFIZx ++ local exit_status=0 +++ seq 0 2 ++ for i in '$(seq 0 2)' ++ set +e ++ kubectl get pxc some-name -o 'jsonpath={.status.state}' ++ exit_status=0 ++ set -e ++ '[' 0 '!=' 0 ']' ++ break ++ cat /tmp/tmp.bmKfNoRV3z ++ cat /tmp/tmp.532ayrFIZx ++ rm /tmp/tmp.bmKfNoRV3z /tmp/tmp.532ayrFIZx ++ return 0 + [[ initializing == \r\e\a\d\y ]] + echo 'waiting for cluster readyness' waiting for cluster readyness + sleep 20 + [[ 6 -ge 36 ]] + let i+=1 ++ kubectl_bin get pxc some-name -o 'jsonpath={.status.state}' +++ mktemp ++ local LAST_OUT=/tmp/tmp.cEWJOvuQdM +++ mktemp ++ local LAST_ERR=/tmp/tmp.e39mntyc7r ++ local exit_status=0 +++ seq 0 2 ++ for i in '$(seq 0 2)' ++ set +e ++ kubectl get pxc some-name -o 'jsonpath={.status.state}' ++ exit_status=0 ++ set -e ++ '[' 0 '!=' 0 ']' ++ break ++ cat /tmp/tmp.cEWJOvuQdM ++ cat /tmp/tmp.e39mntyc7r ++ rm /tmp/tmp.cEWJOvuQdM /tmp/tmp.e39mntyc7r ++ return 0 + [[ ready == \r\e\a\d\y ]] ++ kubectl_bin get pxc some-name -o 'jsonpath={.status.pxc.ready}' +++ mktemp ++ local LAST_OUT=/tmp/tmp.Rnw5DEptqe +++ mktemp ++ local LAST_ERR=/tmp/tmp.4H3J2aFbcp ++ local exit_status=0 +++ seq 0 2 ++ for i in '$(seq 0 2)' ++ set +e ++ kubectl get pxc some-name -o 'jsonpath={.status.pxc.ready}' ++ exit_status=0 ++ set -e ++ '[' 0 '!=' 0 ']' ++ break ++ cat /tmp/tmp.Rnw5DEptqe ++ cat /tmp/tmp.4H3J2aFbcp ++ rm /tmp/tmp.Rnw5DEptqe /tmp/tmp.4H3J2aFbcp ++ return 0 + [[ 3 == \3 ]] +++ get_proxy_engine some-name +++ local cluster_name=some-name ++++ get_proxy some-name ++++ local target_cluster=some-name +++++ kubectl_bin get pxc some-name -o 'jsonpath={.spec.haproxy.enabled}' ++++++ mktemp +++++ local LAST_OUT=/tmp/tmp.awBEw1pRr6 ++++++ mktemp +++++ local LAST_ERR=/tmp/tmp.EWDICULoqQ +++++ local exit_status=0 ++++++ seq 0 2 +++++ for i in '$(seq 0 2)' +++++ set +e +++++ kubectl get pxc some-name -o 'jsonpath={.spec.haproxy.enabled}' +++++ exit_status=0 +++++ set -e +++++ '[' 0 '!=' 0 ']' +++++ break +++++ cat /tmp/tmp.awBEw1pRr6 +++++ cat /tmp/tmp.EWDICULoqQ +++++ rm /tmp/tmp.awBEw1pRr6 /tmp/tmp.EWDICULoqQ +++++ return 0 ++++ [[ '' == \t\r\u\e ]] +++++ kubectl_bin get pxc some-name -o 'jsonpath={.spec.proxysql.enabled}' ++++++ mktemp +++++ local LAST_OUT=/tmp/tmp.nXyV4aNFIn ++++++ mktemp +++++ local LAST_ERR=/tmp/tmp.yMiokR2ibO +++++ local exit_status=0 ++++++ seq 0 2 +++++ for i in '$(seq 0 2)' +++++ set +e +++++ kubectl get pxc some-name -o 'jsonpath={.spec.proxysql.enabled}' +++++ exit_status=0 +++++ set -e +++++ '[' 0 '!=' 0 ']' +++++ break +++++ cat /tmp/tmp.nXyV4aNFIn +++++ cat /tmp/tmp.yMiokR2ibO +++++ rm /tmp/tmp.nXyV4aNFIn /tmp/tmp.yMiokR2ibO +++++ return 0 ++++ [[ true == \t\r\u\e ]] ++++ echo some-name-proxysql ++++ return +++ local cluster_proxy=some-name-proxysql +++ echo proxysql ++ kubectl_bin get pxc some-name -o 'jsonpath={.status.proxysql.ready}' +++ mktemp ++ local LAST_OUT=/tmp/tmp.AsNivlemVm +++ mktemp ++ local LAST_ERR=/tmp/tmp.aK4kcgJ1p6 ++ local exit_status=0 +++ seq 0 2 ++ for i in '$(seq 0 2)' ++ set +e ++ kubectl get pxc some-name -o 'jsonpath={.status.proxysql.ready}' ++ exit_status=0 ++ set -e ++ '[' 0 '!=' 0 ']' ++ break ++ cat /tmp/tmp.AsNivlemVm ++ cat /tmp/tmp.aK4kcgJ1p6 ++ rm /tmp/tmp.AsNivlemVm /tmp/tmp.aK4kcgJ1p6 ++ return 0 + [[ 2 == \2 ]] + compare_mysql_cmd_local select-3 'SHOW DATABASES;' '-h 127.0.0.1 -uxtrabackup -p'\''test-password'\''' some-name-pxc-0 '' pxc + local command_id=select-3 + local 'command=SHOW DATABASES;' + local 'uri=-h 127.0.0.1 -uxtrabackup -p'\''test-password'\''' + local pod=some-name-pxc-0 + local postfix= + local container_name=pxc + local expected_result=/mnt/jenkins/workspace/cloud-pxc-operator_PR-1785/e2e-tests/users/compare/select-3.sql + [[ perconalab/percona-xtradb-cluster-operator:main-pxc5.7 =~ 8\.0 ]] + run_mysql_local 'SHOW DATABASES;' '-h 127.0.0.1 -uxtrabackup -p'\''test-password'\''' some-name-pxc-0 pxc + local 'command=SHOW DATABASES;' + local 'uri=-h 127.0.0.1 -uxtrabackup -p'\''test-password'\''' + local pod=some-name-pxc-0 + local container_name=pxc + set +o xtrace + '[' '!' -s /tmp/tmp.RJRIosOSHb/select-3.sql ']' + diff -u /mnt/jenkins/workspace/cloud-pxc-operator_PR-1785/e2e-tests/users/compare/select-3.sql /tmp/tmp.RJRIosOSHb/select-3.sql + desc 'test monitor' + set +o xtrace ----------------------------------------------------------------------------------- test monitor ----------------------------------------------------------------------------------- + patch_secret my-cluster-secrets monitor dGVzdC1wYXNzd29yZA== + local secret=my-cluster-secrets + local key=monitor + local value=dGVzdC1wYXNzd29yZA== + kubectl_bin patch secret my-cluster-secrets '-p={"data":{"monitor": "dGVzdC1wYXNzd29yZA=="}}' ++ mktemp + local LAST_OUT=/tmp/tmp.V19pXnS8zw ++ mktemp + local LAST_ERR=/tmp/tmp.R7BWtZQSfV + local exit_status=0 ++ seq 0 2 + for i in '$(seq 0 2)' + set +e + kubectl patch secret my-cluster-secrets '-p={"data":{"monitor": "dGVzdC1wYXNzd29yZA=="}}' + exit_status=0 + set -e + '[' 0 '!=' 0 ']' + break + cat /tmp/tmp.V19pXnS8zw secret/my-cluster-secrets patched + cat /tmp/tmp.R7BWtZQSfV + rm /tmp/tmp.V19pXnS8zw /tmp/tmp.R7BWtZQSfV + return 0 + wait_for_password_propagation my-cluster-secrets monitor + local secret=my-cluster-secrets + local user=monitor + local max_retry=240 ++ getSecretData my-cluster-secrets root ++ local secretName=my-cluster-secrets ++ local dataKey=root ++ kubectl_bin get secrets/my-cluster-secrets '--template={{.data.root}}' ++ base64 --decode +++ mktemp ++ local LAST_OUT=/tmp/tmp.7wbhc0zimC +++ mktemp ++ local LAST_ERR=/tmp/tmp.M0fgcw9Mbi ++ local exit_status=0 +++ seq 0 2 ++ for i in '$(seq 0 2)' ++ set +e ++ kubectl get secrets/my-cluster-secrets '--template={{.data.root}}' ++ exit_status=0 ++ set -e ++ '[' 0 '!=' 0 ']' ++ break ++ cat /tmp/tmp.7wbhc0zimC ++ cat /tmp/tmp.M0fgcw9Mbi ++ rm /tmp/tmp.7wbhc0zimC /tmp/tmp.M0fgcw9Mbi ++ return 0 + local root_pass=test-password + [[ perconalab/percona-xtradb-cluster-operator:main-pxc5.7 =~ 5\.7 ]] + echo 'Skipping dual password feature doesn'\''t work for 5.7. PXC 5.7 doesn'\''t support it!' Skipping dual password feature doesn't work for 5.7. PXC 5.7 doesn't support it! + return + wait_cluster_consistency some-name 3 2 + local cluster_name=some-name + local cluster_size=3 + local proxy_size=2 + '[' -z 2 ']' + desc 'wait cluster consistency' + set +o xtrace ----------------------------------------------------------------------------------- wait cluster consistency ----------------------------------------------------------------------------------- + local i=0 + local max=36 + sleep 7 ++ kubectl_bin get pxc some-name -o 'jsonpath={.status.state}' +++ mktemp ++ local LAST_OUT=/tmp/tmp.zbSKUC0xop +++ mktemp ++ local LAST_ERR=/tmp/tmp.Wzs3RD2v25 ++ local exit_status=0 +++ seq 0 2 ++ for i in '$(seq 0 2)' ++ set +e ++ kubectl get pxc some-name -o 'jsonpath={.status.state}' ++ exit_status=0 ++ set -e ++ '[' 0 '!=' 0 ']' ++ break ++ cat /tmp/tmp.zbSKUC0xop ++ cat /tmp/tmp.Wzs3RD2v25 ++ rm /tmp/tmp.zbSKUC0xop /tmp/tmp.Wzs3RD2v25 ++ return 0 + [[ initializing == \r\e\a\d\y ]] + echo 'waiting for cluster readyness' waiting for cluster readyness + sleep 20 + [[ 0 -ge 36 ]] + let i+=1 ++ kubectl_bin get pxc some-name -o 'jsonpath={.status.state}' +++ mktemp ++ local LAST_OUT=/tmp/tmp.XdFFbA1wus +++ mktemp ++ local LAST_ERR=/tmp/tmp.SrbybTactC ++ local exit_status=0 +++ seq 0 2 ++ for i in '$(seq 0 2)' ++ set +e ++ kubectl get pxc some-name -o 'jsonpath={.status.state}' ++ exit_status=0 ++ set -e ++ '[' 0 '!=' 0 ']' ++ break ++ cat /tmp/tmp.XdFFbA1wus ++ cat /tmp/tmp.SrbybTactC ++ rm /tmp/tmp.XdFFbA1wus /tmp/tmp.SrbybTactC ++ return 0 + [[ initializing == \r\e\a\d\y ]] + echo 'waiting for cluster readyness' waiting for cluster readyness + sleep 20 + [[ 1 -ge 36 ]] + let i+=1 ++ kubectl_bin get pxc some-name -o 'jsonpath={.status.state}' +++ mktemp ++ local LAST_OUT=/tmp/tmp.gG3RFdCuIU +++ mktemp ++ local LAST_ERR=/tmp/tmp.bloR0E2jLL ++ local exit_status=0 +++ seq 0 2 ++ for i in '$(seq 0 2)' ++ set +e ++ kubectl get pxc some-name -o 'jsonpath={.status.state}' ++ exit_status=0 ++ set -e ++ '[' 0 '!=' 0 ']' ++ break ++ cat /tmp/tmp.gG3RFdCuIU ++ cat /tmp/tmp.bloR0E2jLL ++ rm /tmp/tmp.gG3RFdCuIU /tmp/tmp.bloR0E2jLL ++ return 0 + [[ ready == \r\e\a\d\y ]] ++ kubectl_bin get pxc some-name -o 'jsonpath={.status.pxc.ready}' +++ mktemp ++ local LAST_OUT=/tmp/tmp.tiNm2g7Eiu +++ mktemp ++ local LAST_ERR=/tmp/tmp.1smoofmKZY ++ local exit_status=0 +++ seq 0 2 ++ for i in '$(seq 0 2)' ++ set +e ++ kubectl get pxc some-name -o 'jsonpath={.status.pxc.ready}' ++ exit_status=0 ++ set -e ++ '[' 0 '!=' 0 ']' ++ break ++ cat /tmp/tmp.tiNm2g7Eiu ++ cat /tmp/tmp.1smoofmKZY ++ rm /tmp/tmp.tiNm2g7Eiu /tmp/tmp.1smoofmKZY ++ return 0 + [[ 3 == \3 ]] +++ get_proxy_engine some-name +++ local cluster_name=some-name ++++ get_proxy some-name ++++ local target_cluster=some-name +++++ kubectl_bin get pxc some-name -o 'jsonpath={.spec.haproxy.enabled}' ++++++ mktemp +++++ local LAST_OUT=/tmp/tmp.sFU1S96PeM ++++++ mktemp +++++ local LAST_ERR=/tmp/tmp.LTVkzmPogG +++++ local exit_status=0 ++++++ seq 0 2 +++++ for i in '$(seq 0 2)' +++++ set +e +++++ kubectl get pxc some-name -o 'jsonpath={.spec.haproxy.enabled}' +++++ exit_status=0 +++++ set -e +++++ '[' 0 '!=' 0 ']' +++++ break +++++ cat /tmp/tmp.sFU1S96PeM +++++ cat /tmp/tmp.LTVkzmPogG +++++ rm /tmp/tmp.sFU1S96PeM /tmp/tmp.LTVkzmPogG +++++ return 0 ++++ [[ '' == \t\r\u\e ]] +++++ kubectl_bin get pxc some-name -o 'jsonpath={.spec.proxysql.enabled}' ++++++ mktemp +++++ local LAST_OUT=/tmp/tmp.LfuvxJRRGd ++++++ mktemp +++++ local LAST_ERR=/tmp/tmp.GEraaUnS7i +++++ local exit_status=0 ++++++ seq 0 2 +++++ for i in '$(seq 0 2)' +++++ set +e +++++ kubectl get pxc some-name -o 'jsonpath={.spec.proxysql.enabled}' +++++ exit_status=0 +++++ set -e +++++ '[' 0 '!=' 0 ']' +++++ break +++++ cat /tmp/tmp.LfuvxJRRGd +++++ cat /tmp/tmp.GEraaUnS7i +++++ rm /tmp/tmp.LfuvxJRRGd /tmp/tmp.GEraaUnS7i +++++ return 0 ++++ [[ true == \t\r\u\e ]] ++++ echo some-name-proxysql ++++ return +++ local cluster_proxy=some-name-proxysql +++ echo proxysql ++ kubectl_bin get pxc some-name -o 'jsonpath={.status.proxysql.ready}' +++ mktemp ++ local LAST_OUT=/tmp/tmp.ucc0JbiCKn +++ mktemp ++ local LAST_ERR=/tmp/tmp.mFddItDIrp ++ local exit_status=0 +++ seq 0 2 ++ for i in '$(seq 0 2)' ++ set +e ++ kubectl get pxc some-name -o 'jsonpath={.status.proxysql.ready}' ++ exit_status=0 ++ set -e ++ '[' 0 '!=' 0 ']' ++ break ++ cat /tmp/tmp.ucc0JbiCKn ++ cat /tmp/tmp.mFddItDIrp ++ rm /tmp/tmp.ucc0JbiCKn /tmp/tmp.mFddItDIrp ++ return 0 + [[ 2 == \2 ]] + sleep 10 + compare_mysql_cmd select-4 'SHOW TABLES;' '-h some-name-proxysql -umonitor -p'\''test-password'\''' + local command_id=select-4 + local 'command=SHOW TABLES;' + local 'uri=-h some-name-proxysql -umonitor -p'\''test-password'\''' + local postfix= + local expected_result=/mnt/jenkins/workspace/cloud-pxc-operator_PR-1785/e2e-tests/users/compare/select-4.sql + [[ perconalab/percona-xtradb-cluster-operator:main-pxc5.7 =~ 8\.0 ]] + run_mysql 'SHOW TABLES;' '-h some-name-proxysql -umonitor -p'\''test-password'\''' + local 'command=SHOW TABLES;' + local 'uri=-h some-name-proxysql -umonitor -p'\''test-password'\''' ++ get_client_pod ++ kubectl_bin get pods --selector=name=pxc-client -o 'jsonpath={.items[].metadata.name}' +++ mktemp ++ local LAST_OUT=/tmp/tmp.OQmGzQfeOj +++ mktemp ++ local LAST_ERR=/tmp/tmp.WNaSQQk7iI ++ local exit_status=0 +++ seq 0 2 ++ for i in '$(seq 0 2)' ++ set +e ++ kubectl get pods --selector=name=pxc-client -o 'jsonpath={.items[].metadata.name}' ++ exit_status=0 ++ set -e ++ '[' 0 '!=' 0 ']' ++ break ++ cat /tmp/tmp.OQmGzQfeOj ++ cat /tmp/tmp.WNaSQQk7iI ++ rm /tmp/tmp.OQmGzQfeOj /tmp/tmp.WNaSQQk7iI ++ return 0 + client_pod=pxc-client-7b7f8bcff9-4b69t + wait_pod pxc-client-7b7f8bcff9-4b69t + local pod=pxc-client-7b7f8bcff9-4b69t + local max_retry=480 + local ns= ++ echo pxc-client-7b7f8bcff9-4b69t ++ /usr/bin/sed -E 's/.*-(pxc|proxysql)-[0-9]/\1/' ++ egrep '^(pxc|proxysql)$' + local container= + set +o xtrace pod/pxc-client-7b7f8bcff9-4b69t condition met pxc-client-7b7f8bcff9-4b69t.Ok + set +o xtrace + '[' '!' -s /tmp/tmp.RJRIosOSHb/select-4.sql ']' + diff -u /mnt/jenkins/workspace/cloud-pxc-operator_PR-1785/e2e-tests/users/compare/select-4.sql /tmp/tmp.RJRIosOSHb/select-4.sql + desc 'test operator' + set +o xtrace ----------------------------------------------------------------------------------- test operator ----------------------------------------------------------------------------------- + patch_secret my-cluster-secrets operator dGVzdC1wYXNzd29yZA== + local secret=my-cluster-secrets + local key=operator + local value=dGVzdC1wYXNzd29yZA== + kubectl_bin patch secret my-cluster-secrets '-p={"data":{"operator": "dGVzdC1wYXNzd29yZA=="}}' ++ mktemp + local LAST_OUT=/tmp/tmp.IBdUfOP3Um ++ mktemp + local LAST_ERR=/tmp/tmp.OPHNB6IMS9 + local exit_status=0 ++ seq 0 2 + for i in '$(seq 0 2)' + set +e + kubectl patch secret my-cluster-secrets '-p={"data":{"operator": "dGVzdC1wYXNzd29yZA=="}}' + exit_status=0 + set -e + '[' 0 '!=' 0 ']' + break + cat /tmp/tmp.IBdUfOP3Um secret/my-cluster-secrets patched + cat /tmp/tmp.OPHNB6IMS9 + rm /tmp/tmp.IBdUfOP3Um /tmp/tmp.OPHNB6IMS9 + return 0 + sleep 15 + wait_cluster_consistency some-name 3 2 + local cluster_name=some-name + local cluster_size=3 + local proxy_size=2 + '[' -z 2 ']' + desc 'wait cluster consistency' + set +o xtrace ----------------------------------------------------------------------------------- wait cluster consistency ----------------------------------------------------------------------------------- + local i=0 + local max=36 + sleep 7 ++ kubectl_bin get pxc some-name -o 'jsonpath={.status.state}' +++ mktemp ++ local LAST_OUT=/tmp/tmp.9PTJ9pJKIh +++ mktemp ++ local LAST_ERR=/tmp/tmp.phG46KVrrV ++ local exit_status=0 +++ seq 0 2 ++ for i in '$(seq 0 2)' ++ set +e ++ kubectl get pxc some-name -o 'jsonpath={.status.state}' ++ exit_status=0 ++ set -e ++ '[' 0 '!=' 0 ']' ++ break ++ cat /tmp/tmp.9PTJ9pJKIh ++ cat /tmp/tmp.phG46KVrrV ++ rm /tmp/tmp.9PTJ9pJKIh /tmp/tmp.phG46KVrrV ++ return 0 + [[ initializing == \r\e\a\d\y ]] + echo 'waiting for cluster readyness' waiting for cluster readyness + sleep 20 + [[ 0 -ge 36 ]] + let i+=1 ++ kubectl_bin get pxc some-name -o 'jsonpath={.status.state}' +++ mktemp ++ local LAST_OUT=/tmp/tmp.IWbpgSZ8Na +++ mktemp ++ local LAST_ERR=/tmp/tmp.1kkZAUjzjm ++ local exit_status=0 +++ seq 0 2 ++ for i in '$(seq 0 2)' ++ set +e ++ kubectl get pxc some-name -o 'jsonpath={.status.state}' ++ exit_status=0 ++ set -e ++ '[' 0 '!=' 0 ']' ++ break ++ cat /tmp/tmp.IWbpgSZ8Na ++ cat /tmp/tmp.1kkZAUjzjm ++ rm /tmp/tmp.IWbpgSZ8Na /tmp/tmp.1kkZAUjzjm ++ return 0 + [[ ready == \r\e\a\d\y ]] ++ kubectl_bin get pxc some-name -o 'jsonpath={.status.pxc.ready}' +++ mktemp ++ local LAST_OUT=/tmp/tmp.BDg6GHm87u +++ mktemp ++ local LAST_ERR=/tmp/tmp.VlGaEmKvdL ++ local exit_status=0 +++ seq 0 2 ++ for i in '$(seq 0 2)' ++ set +e ++ kubectl get pxc some-name -o 'jsonpath={.status.pxc.ready}' ++ exit_status=0 ++ set -e ++ '[' 0 '!=' 0 ']' ++ break ++ cat /tmp/tmp.BDg6GHm87u ++ cat /tmp/tmp.VlGaEmKvdL ++ rm /tmp/tmp.BDg6GHm87u /tmp/tmp.VlGaEmKvdL ++ return 0 + [[ 3 == \3 ]] +++ get_proxy_engine some-name +++ local cluster_name=some-name ++++ get_proxy some-name ++++ local target_cluster=some-name +++++ kubectl_bin get pxc some-name -o 'jsonpath={.spec.haproxy.enabled}' ++++++ mktemp +++++ local LAST_OUT=/tmp/tmp.H12rSgpaJA ++++++ mktemp +++++ local LAST_ERR=/tmp/tmp.W430yZMYRS +++++ local exit_status=0 ++++++ seq 0 2 +++++ for i in '$(seq 0 2)' +++++ set +e +++++ kubectl get pxc some-name -o 'jsonpath={.spec.haproxy.enabled}' +++++ exit_status=0 +++++ set -e +++++ '[' 0 '!=' 0 ']' +++++ break +++++ cat /tmp/tmp.H12rSgpaJA +++++ cat /tmp/tmp.W430yZMYRS +++++ rm /tmp/tmp.H12rSgpaJA /tmp/tmp.W430yZMYRS +++++ return 0 ++++ [[ '' == \t\r\u\e ]] +++++ kubectl_bin get pxc some-name -o 'jsonpath={.spec.proxysql.enabled}' ++++++ mktemp +++++ local LAST_OUT=/tmp/tmp.lIwysnF8Dk ++++++ mktemp +++++ local LAST_ERR=/tmp/tmp.P1PBnhHmS4 +++++ local exit_status=0 ++++++ seq 0 2 +++++ for i in '$(seq 0 2)' +++++ set +e +++++ kubectl get pxc some-name -o 'jsonpath={.spec.proxysql.enabled}' +++++ exit_status=0 +++++ set -e +++++ '[' 0 '!=' 0 ']' +++++ break +++++ cat /tmp/tmp.lIwysnF8Dk +++++ cat /tmp/tmp.P1PBnhHmS4 +++++ rm /tmp/tmp.lIwysnF8Dk /tmp/tmp.P1PBnhHmS4 +++++ return 0 ++++ [[ true == \t\r\u\e ]] ++++ echo some-name-proxysql ++++ return +++ local cluster_proxy=some-name-proxysql +++ echo proxysql ++ kubectl_bin get pxc some-name -o 'jsonpath={.status.proxysql.ready}' +++ mktemp ++ local LAST_OUT=/tmp/tmp.iOI4duRuwa +++ mktemp ++ local LAST_ERR=/tmp/tmp.vtYYVQIXkw ++ local exit_status=0 +++ seq 0 2 ++ for i in '$(seq 0 2)' ++ set +e ++ kubectl get pxc some-name -o 'jsonpath={.status.proxysql.ready}' ++ exit_status=0 ++ set -e ++ '[' 0 '!=' 0 ']' ++ break ++ cat /tmp/tmp.iOI4duRuwa ++ cat /tmp/tmp.vtYYVQIXkw ++ rm /tmp/tmp.iOI4duRuwa /tmp/tmp.vtYYVQIXkw ++ return 0 + [[ 2 == \2 ]] + sleep 10 + compare_mysql_cmd select-4 'SHOW TABLES;' '-h some-name-proxysql -uoperator -p'\''test-password'\''' + local command_id=select-4 + local 'command=SHOW TABLES;' + local 'uri=-h some-name-proxysql -uoperator -p'\''test-password'\''' + local postfix= + local expected_result=/mnt/jenkins/workspace/cloud-pxc-operator_PR-1785/e2e-tests/users/compare/select-4.sql + [[ perconalab/percona-xtradb-cluster-operator:main-pxc5.7 =~ 8\.0 ]] + run_mysql 'SHOW TABLES;' '-h some-name-proxysql -uoperator -p'\''test-password'\''' + local 'command=SHOW TABLES;' + local 'uri=-h some-name-proxysql -uoperator -p'\''test-password'\''' ++ get_client_pod ++ kubectl_bin get pods --selector=name=pxc-client -o 'jsonpath={.items[].metadata.name}' +++ mktemp ++ local LAST_OUT=/tmp/tmp.eq8Zlw1PVM +++ mktemp ++ local LAST_ERR=/tmp/tmp.HosjJuhtnc ++ local exit_status=0 +++ seq 0 2 ++ for i in '$(seq 0 2)' ++ set +e ++ kubectl get pods --selector=name=pxc-client -o 'jsonpath={.items[].metadata.name}' ++ exit_status=0 ++ set -e ++ '[' 0 '!=' 0 ']' ++ break ++ cat /tmp/tmp.eq8Zlw1PVM ++ cat /tmp/tmp.HosjJuhtnc ++ rm /tmp/tmp.eq8Zlw1PVM /tmp/tmp.HosjJuhtnc ++ return 0 + client_pod=pxc-client-7b7f8bcff9-4b69t + wait_pod pxc-client-7b7f8bcff9-4b69t + local pod=pxc-client-7b7f8bcff9-4b69t + local max_retry=480 + local ns= ++ echo pxc-client-7b7f8bcff9-4b69t ++ /usr/bin/sed -E 's/.*-(pxc|proxysql)-[0-9]/\1/' ++ egrep '^(pxc|proxysql)$' + local container= + set +o xtrace pod/pxc-client-7b7f8bcff9-4b69t condition met pxc-client-7b7f8bcff9-4b69t.Ok + set +o xtrace + '[' '!' -s /tmp/tmp.RJRIosOSHb/select-4.sql ']' + diff -u /mnt/jenkins/workspace/cloud-pxc-operator_PR-1785/e2e-tests/users/compare/select-4.sql /tmp/tmp.RJRIosOSHb/select-4.sql + desc 'change secret name' + set +o xtrace ----------------------------------------------------------------------------------- change secret name ----------------------------------------------------------------------------------- + kubectl_bin patch pxc some-name --type merge --patch '{"spec": {"secretsName":"my-cluster-secrets-2"}}' ++ mktemp + local LAST_OUT=/tmp/tmp.c6qnvEKZdZ ++ mktemp + local LAST_ERR=/tmp/tmp.5cX5cZDLR5 + local exit_status=0 ++ seq 0 2 + for i in '$(seq 0 2)' + set +e + kubectl patch pxc some-name --type merge --patch '{"spec": {"secretsName":"my-cluster-secrets-2"}}' + exit_status=0 + set -e + '[' 0 '!=' 0 ']' + break + cat /tmp/tmp.c6qnvEKZdZ perconaxtradbcluster.pxc.percona.com/some-name patched + cat /tmp/tmp.5cX5cZDLR5 + rm /tmp/tmp.c6qnvEKZdZ /tmp/tmp.5cX5cZDLR5 + return 0 + sleep 30 + wait_cluster_consistency some-name 3 2 + local cluster_name=some-name + local cluster_size=3 + local proxy_size=2 + '[' -z 2 ']' + desc 'wait cluster consistency' + set +o xtrace ----------------------------------------------------------------------------------- wait cluster consistency ----------------------------------------------------------------------------------- + local i=0 + local max=36 + sleep 7 ++ kubectl_bin get pxc some-name -o 'jsonpath={.status.state}' +++ mktemp ++ local LAST_OUT=/tmp/tmp.nMLD9wg0Vd +++ mktemp ++ local LAST_ERR=/tmp/tmp.xrklnGIOB8 ++ local exit_status=0 +++ seq 0 2 ++ for i in '$(seq 0 2)' ++ set +e ++ kubectl get pxc some-name -o 'jsonpath={.status.state}' ++ exit_status=0 ++ set -e ++ '[' 0 '!=' 0 ']' ++ break ++ cat /tmp/tmp.nMLD9wg0Vd ++ cat /tmp/tmp.xrklnGIOB8 ++ rm /tmp/tmp.nMLD9wg0Vd /tmp/tmp.xrklnGIOB8 ++ return 0 + [[ initializing == \r\e\a\d\y ]] + echo 'waiting for cluster readyness' waiting for cluster readyness + sleep 20 + [[ 0 -ge 36 ]] + let i+=1 ++ kubectl_bin get pxc some-name -o 'jsonpath={.status.state}' +++ mktemp ++ local LAST_OUT=/tmp/tmp.S6HCiHnHws +++ mktemp ++ local LAST_ERR=/tmp/tmp.7J89i2sQT8 ++ local exit_status=0 +++ seq 0 2 ++ for i in '$(seq 0 2)' ++ set +e ++ kubectl get pxc some-name -o 'jsonpath={.status.state}' ++ exit_status=0 ++ set -e ++ '[' 0 '!=' 0 ']' ++ break ++ cat /tmp/tmp.S6HCiHnHws ++ cat /tmp/tmp.7J89i2sQT8 ++ rm /tmp/tmp.S6HCiHnHws /tmp/tmp.7J89i2sQT8 ++ return 0 + [[ initializing == \r\e\a\d\y ]] + echo 'waiting for cluster readyness' waiting for cluster readyness + sleep 20 + [[ 1 -ge 36 ]] + let i+=1 ++ kubectl_bin get pxc some-name -o 'jsonpath={.status.state}' +++ mktemp ++ local LAST_OUT=/tmp/tmp.Yfq2Wnzgl7 +++ mktemp ++ local LAST_ERR=/tmp/tmp.DiylArpIZu ++ local exit_status=0 +++ seq 0 2 ++ for i in '$(seq 0 2)' ++ set +e ++ kubectl get pxc some-name -o 'jsonpath={.status.state}' ++ exit_status=0 ++ set -e ++ '[' 0 '!=' 0 ']' ++ break ++ cat /tmp/tmp.Yfq2Wnzgl7 ++ cat /tmp/tmp.DiylArpIZu ++ rm /tmp/tmp.Yfq2Wnzgl7 /tmp/tmp.DiylArpIZu ++ return 0 + [[ initializing == \r\e\a\d\y ]] + echo 'waiting for cluster readyness' waiting for cluster readyness + sleep 20 + [[ 2 -ge 36 ]] + let i+=1 ++ kubectl_bin get pxc some-name -o 'jsonpath={.status.state}' +++ mktemp ++ local LAST_OUT=/tmp/tmp.7v35Z6WlnU +++ mktemp ++ local LAST_ERR=/tmp/tmp.Gr3lPTQwjM ++ local exit_status=0 +++ seq 0 2 ++ for i in '$(seq 0 2)' ++ set +e ++ kubectl get pxc some-name -o 'jsonpath={.status.state}' ++ exit_status=0 ++ set -e ++ '[' 0 '!=' 0 ']' ++ break ++ cat /tmp/tmp.7v35Z6WlnU ++ cat /tmp/tmp.Gr3lPTQwjM ++ rm /tmp/tmp.7v35Z6WlnU /tmp/tmp.Gr3lPTQwjM ++ return 0 + [[ initializing == \r\e\a\d\y ]] + echo 'waiting for cluster readyness' waiting for cluster readyness + sleep 20 + [[ 3 -ge 36 ]] + let i+=1 ++ kubectl_bin get pxc some-name -o 'jsonpath={.status.state}' +++ mktemp ++ local LAST_OUT=/tmp/tmp.Oh7SSl0Omp +++ mktemp ++ local LAST_ERR=/tmp/tmp.0wP9ehUctV ++ local exit_status=0 +++ seq 0 2 ++ for i in '$(seq 0 2)' ++ set +e ++ kubectl get pxc some-name -o 'jsonpath={.status.state}' ++ exit_status=0 ++ set -e ++ '[' 0 '!=' 0 ']' ++ break ++ cat /tmp/tmp.Oh7SSl0Omp ++ cat /tmp/tmp.0wP9ehUctV ++ rm /tmp/tmp.Oh7SSl0Omp /tmp/tmp.0wP9ehUctV ++ return 0 + [[ initializing == \r\e\a\d\y ]] + echo 'waiting for cluster readyness' waiting for cluster readyness + sleep 20 + [[ 4 -ge 36 ]] + let i+=1 ++ kubectl_bin get pxc some-name -o 'jsonpath={.status.state}' +++ mktemp ++ local LAST_OUT=/tmp/tmp.OcDtdNDLsD +++ mktemp ++ local LAST_ERR=/tmp/tmp.G5diB3xVyT ++ local exit_status=0 +++ seq 0 2 ++ for i in '$(seq 0 2)' ++ set +e ++ kubectl get pxc some-name -o 'jsonpath={.status.state}' ++ exit_status=0 ++ set -e ++ '[' 0 '!=' 0 ']' ++ break ++ cat /tmp/tmp.OcDtdNDLsD ++ cat /tmp/tmp.G5diB3xVyT ++ rm /tmp/tmp.OcDtdNDLsD /tmp/tmp.G5diB3xVyT ++ return 0 + [[ initializing == \r\e\a\d\y ]] + echo 'waiting for cluster readyness' waiting for cluster readyness + sleep 20 + [[ 5 -ge 36 ]] + let i+=1 ++ kubectl_bin get pxc some-name -o 'jsonpath={.status.state}' +++ mktemp ++ local LAST_OUT=/tmp/tmp.S4oeMLR7Pi +++ mktemp ++ local LAST_ERR=/tmp/tmp.Roov6sB7jx ++ local exit_status=0 +++ seq 0 2 ++ for i in '$(seq 0 2)' ++ set +e ++ kubectl get pxc some-name -o 'jsonpath={.status.state}' ++ exit_status=0 ++ set -e ++ '[' 0 '!=' 0 ']' ++ break ++ cat /tmp/tmp.S4oeMLR7Pi ++ cat /tmp/tmp.Roov6sB7jx ++ rm /tmp/tmp.S4oeMLR7Pi /tmp/tmp.Roov6sB7jx ++ return 0 + [[ ready == \r\e\a\d\y ]] ++ kubectl_bin get pxc some-name -o 'jsonpath={.status.pxc.ready}' +++ mktemp ++ local LAST_OUT=/tmp/tmp.xGGh65vo42 +++ mktemp ++ local LAST_ERR=/tmp/tmp.q7Tg1hTpYW ++ local exit_status=0 +++ seq 0 2 ++ for i in '$(seq 0 2)' ++ set +e ++ kubectl get pxc some-name -o 'jsonpath={.status.pxc.ready}' ++ exit_status=0 ++ set -e ++ '[' 0 '!=' 0 ']' ++ break ++ cat /tmp/tmp.xGGh65vo42 ++ cat /tmp/tmp.q7Tg1hTpYW ++ rm /tmp/tmp.xGGh65vo42 /tmp/tmp.q7Tg1hTpYW ++ return 0 + [[ 3 == \3 ]] +++ get_proxy_engine some-name +++ local cluster_name=some-name ++++ get_proxy some-name ++++ local target_cluster=some-name +++++ kubectl_bin get pxc some-name -o 'jsonpath={.spec.haproxy.enabled}' ++++++ mktemp +++++ local LAST_OUT=/tmp/tmp.yQepXyAnkI ++++++ mktemp +++++ local LAST_ERR=/tmp/tmp.5ijBosKC9p +++++ local exit_status=0 ++++++ seq 0 2 +++++ for i in '$(seq 0 2)' +++++ set +e +++++ kubectl get pxc some-name -o 'jsonpath={.spec.haproxy.enabled}' +++++ exit_status=0 +++++ set -e +++++ '[' 0 '!=' 0 ']' +++++ break +++++ cat /tmp/tmp.yQepXyAnkI +++++ cat /tmp/tmp.5ijBosKC9p +++++ rm /tmp/tmp.yQepXyAnkI /tmp/tmp.5ijBosKC9p +++++ return 0 ++++ [[ '' == \t\r\u\e ]] +++++ kubectl_bin get pxc some-name -o 'jsonpath={.spec.proxysql.enabled}' ++++++ mktemp +++++ local LAST_OUT=/tmp/tmp.74gNVD72bP ++++++ mktemp +++++ local LAST_ERR=/tmp/tmp.Wb2C7kqvrV +++++ local exit_status=0 ++++++ seq 0 2 +++++ for i in '$(seq 0 2)' +++++ set +e +++++ kubectl get pxc some-name -o 'jsonpath={.spec.proxysql.enabled}' +++++ exit_status=0 +++++ set -e +++++ '[' 0 '!=' 0 ']' +++++ break +++++ cat /tmp/tmp.74gNVD72bP +++++ cat /tmp/tmp.Wb2C7kqvrV +++++ rm /tmp/tmp.74gNVD72bP /tmp/tmp.Wb2C7kqvrV +++++ return 0 ++++ [[ true == \t\r\u\e ]] ++++ echo some-name-proxysql ++++ return +++ local cluster_proxy=some-name-proxysql +++ echo proxysql ++ kubectl_bin get pxc some-name -o 'jsonpath={.status.proxysql.ready}' +++ mktemp ++ local LAST_OUT=/tmp/tmp.p4VjmamOqY +++ mktemp ++ local LAST_ERR=/tmp/tmp.IwZLgbTxAn ++ local exit_status=0 +++ seq 0 2 ++ for i in '$(seq 0 2)' ++ set +e ++ kubectl get pxc some-name -o 'jsonpath={.status.proxysql.ready}' ++ exit_status=0 ++ set -e ++ '[' 0 '!=' 0 ']' ++ break ++ cat /tmp/tmp.p4VjmamOqY ++ cat /tmp/tmp.IwZLgbTxAn ++ rm /tmp/tmp.p4VjmamOqY /tmp/tmp.IwZLgbTxAn ++ return 0 + [[ 2 == \2 ]] + desc 'test new operator' + set +o xtrace ----------------------------------------------------------------------------------- test new operator ----------------------------------------------------------------------------------- + newpass=test-password2 ++ echo -n test-password2 ++ base64 + newpassencrypted=dGVzdC1wYXNzd29yZDI= + patch_secret my-cluster-secrets-2 operator dGVzdC1wYXNzd29yZDI= + local secret=my-cluster-secrets-2 + local key=operator + local value=dGVzdC1wYXNzd29yZDI= + kubectl_bin patch secret my-cluster-secrets-2 '-p={"data":{"operator": "dGVzdC1wYXNzd29yZDI="}}' ++ mktemp + local LAST_OUT=/tmp/tmp.kChpn3erYY ++ mktemp + local LAST_ERR=/tmp/tmp.trrV4hWpwg + local exit_status=0 ++ seq 0 2 + for i in '$(seq 0 2)' + set +e + kubectl patch secret my-cluster-secrets-2 '-p={"data":{"operator": "dGVzdC1wYXNzd29yZDI="}}' + exit_status=0 + set -e + '[' 0 '!=' 0 ']' + break + cat /tmp/tmp.kChpn3erYY secret/my-cluster-secrets-2 patched + cat /tmp/tmp.trrV4hWpwg + rm /tmp/tmp.kChpn3erYY /tmp/tmp.trrV4hWpwg + return 0 + sleep 15 + wait_cluster_consistency some-name 3 2 + local cluster_name=some-name + local cluster_size=3 + local proxy_size=2 + '[' -z 2 ']' + desc 'wait cluster consistency' + set +o xtrace ----------------------------------------------------------------------------------- wait cluster consistency ----------------------------------------------------------------------------------- + local i=0 + local max=36 + sleep 7 ++ kubectl_bin get pxc some-name -o 'jsonpath={.status.state}' +++ mktemp ++ local LAST_OUT=/tmp/tmp.w9WjMfITHN +++ mktemp ++ local LAST_ERR=/tmp/tmp.p0Co3rJarG ++ local exit_status=0 +++ seq 0 2 ++ for i in '$(seq 0 2)' ++ set +e ++ kubectl get pxc some-name -o 'jsonpath={.status.state}' ++ exit_status=0 ++ set -e ++ '[' 0 '!=' 0 ']' ++ break ++ cat /tmp/tmp.w9WjMfITHN ++ cat /tmp/tmp.p0Co3rJarG ++ rm /tmp/tmp.w9WjMfITHN /tmp/tmp.p0Co3rJarG ++ return 0 + [[ ready == \r\e\a\d\y ]] ++ kubectl_bin get pxc some-name -o 'jsonpath={.status.pxc.ready}' +++ mktemp ++ local LAST_OUT=/tmp/tmp.DEPzfDIN7J +++ mktemp ++ local LAST_ERR=/tmp/tmp.KlQTQB3vDe ++ local exit_status=0 +++ seq 0 2 ++ for i in '$(seq 0 2)' ++ set +e ++ kubectl get pxc some-name -o 'jsonpath={.status.pxc.ready}' ++ exit_status=0 ++ set -e ++ '[' 0 '!=' 0 ']' ++ break ++ cat /tmp/tmp.DEPzfDIN7J ++ cat /tmp/tmp.KlQTQB3vDe ++ rm /tmp/tmp.DEPzfDIN7J /tmp/tmp.KlQTQB3vDe ++ return 0 + [[ 3 == \3 ]] +++ get_proxy_engine some-name +++ local cluster_name=some-name ++++ get_proxy some-name ++++ local target_cluster=some-name +++++ kubectl_bin get pxc some-name -o 'jsonpath={.spec.haproxy.enabled}' ++++++ mktemp +++++ local LAST_OUT=/tmp/tmp.1x0TlFnd4C ++++++ mktemp +++++ local LAST_ERR=/tmp/tmp.jZgT4zCg1X +++++ local exit_status=0 ++++++ seq 0 2 +++++ for i in '$(seq 0 2)' +++++ set +e +++++ kubectl get pxc some-name -o 'jsonpath={.spec.haproxy.enabled}' +++++ exit_status=0 +++++ set -e +++++ '[' 0 '!=' 0 ']' +++++ break +++++ cat /tmp/tmp.1x0TlFnd4C +++++ cat /tmp/tmp.jZgT4zCg1X +++++ rm /tmp/tmp.1x0TlFnd4C /tmp/tmp.jZgT4zCg1X +++++ return 0 ++++ [[ '' == \t\r\u\e ]] +++++ kubectl_bin get pxc some-name -o 'jsonpath={.spec.proxysql.enabled}' ++++++ mktemp +++++ local LAST_OUT=/tmp/tmp.BwOlExOf7H ++++++ mktemp +++++ local LAST_ERR=/tmp/tmp.qJvzzsXxZP +++++ local exit_status=0 ++++++ seq 0 2 +++++ for i in '$(seq 0 2)' +++++ set +e +++++ kubectl get pxc some-name -o 'jsonpath={.spec.proxysql.enabled}' +++++ exit_status=0 +++++ set -e +++++ '[' 0 '!=' 0 ']' +++++ break +++++ cat /tmp/tmp.BwOlExOf7H +++++ cat /tmp/tmp.qJvzzsXxZP +++++ rm /tmp/tmp.BwOlExOf7H /tmp/tmp.qJvzzsXxZP +++++ return 0 ++++ [[ true == \t\r\u\e ]] ++++ echo some-name-proxysql ++++ return +++ local cluster_proxy=some-name-proxysql +++ echo proxysql ++ kubectl_bin get pxc some-name -o 'jsonpath={.status.proxysql.ready}' +++ mktemp ++ local LAST_OUT=/tmp/tmp.zH0TgNtPVj +++ mktemp ++ local LAST_ERR=/tmp/tmp.vz4dXBjxgW ++ local exit_status=0 +++ seq 0 2 ++ for i in '$(seq 0 2)' ++ set +e ++ kubectl get pxc some-name -o 'jsonpath={.status.proxysql.ready}' ++ exit_status=0 ++ set -e ++ '[' 0 '!=' 0 ']' ++ break ++ cat /tmp/tmp.zH0TgNtPVj ++ cat /tmp/tmp.vz4dXBjxgW ++ rm /tmp/tmp.zH0TgNtPVj /tmp/tmp.vz4dXBjxgW ++ return 0 + [[ 2 == \2 ]] + sleep 20 + compare_mysql_cmd select-4 'SHOW TABLES;' '-h some-name-proxysql -uoperator -p'\''test-password2'\''' + local command_id=select-4 + local 'command=SHOW TABLES;' + local 'uri=-h some-name-proxysql -uoperator -p'\''test-password2'\''' + local postfix= + local expected_result=/mnt/jenkins/workspace/cloud-pxc-operator_PR-1785/e2e-tests/users/compare/select-4.sql + [[ perconalab/percona-xtradb-cluster-operator:main-pxc5.7 =~ 8\.0 ]] + run_mysql 'SHOW TABLES;' '-h some-name-proxysql -uoperator -p'\''test-password2'\''' + local 'command=SHOW TABLES;' + local 'uri=-h some-name-proxysql -uoperator -p'\''test-password2'\''' ++ get_client_pod ++ kubectl_bin get pods --selector=name=pxc-client -o 'jsonpath={.items[].metadata.name}' +++ mktemp ++ local LAST_OUT=/tmp/tmp.9jFAPxwm79 +++ mktemp ++ local LAST_ERR=/tmp/tmp.6I6xCI4q83 ++ local exit_status=0 +++ seq 0 2 ++ for i in '$(seq 0 2)' ++ set +e ++ kubectl get pods --selector=name=pxc-client -o 'jsonpath={.items[].metadata.name}' ++ exit_status=0 ++ set -e ++ '[' 0 '!=' 0 ']' ++ break ++ cat /tmp/tmp.9jFAPxwm79 ++ cat /tmp/tmp.6I6xCI4q83 ++ rm /tmp/tmp.9jFAPxwm79 /tmp/tmp.6I6xCI4q83 ++ return 0 + client_pod=pxc-client-7b7f8bcff9-4b69t + wait_pod pxc-client-7b7f8bcff9-4b69t + local pod=pxc-client-7b7f8bcff9-4b69t + local max_retry=480 + local ns= ++ echo pxc-client-7b7f8bcff9-4b69t ++ egrep '^(pxc|proxysql)$' ++ /usr/bin/sed -E 's/.*-(pxc|proxysql)-[0-9]/\1/' + local container= + set +o xtrace pod/pxc-client-7b7f8bcff9-4b69t condition met pxc-client-7b7f8bcff9-4b69t.Ok + set +o xtrace + '[' '!' -s /tmp/tmp.RJRIosOSHb/select-4.sql ']' + diff -u /mnt/jenkins/workspace/cloud-pxc-operator_PR-1785/e2e-tests/users/compare/select-4.sql /tmp/tmp.RJRIosOSHb/select-4.sql ++ getSecretData my-cluster-secrets-2 root ++ local secretName=my-cluster-secrets-2 ++ local dataKey=root ++ kubectl_bin get secrets/my-cluster-secrets-2 '--template={{.data.root}}' ++ base64 --decode +++ mktemp ++ local LAST_OUT=/tmp/tmp.5dsJMdsIEm +++ mktemp ++ local LAST_ERR=/tmp/tmp.411j7AXcpU ++ local exit_status=0 +++ seq 0 2 ++ for i in '$(seq 0 2)' ++ set +e ++ kubectl get secrets/my-cluster-secrets-2 '--template={{.data.root}}' ++ exit_status=0 ++ set -e ++ '[' 0 '!=' 0 ']' ++ break ++ cat /tmp/tmp.5dsJMdsIEm ++ cat /tmp/tmp.411j7AXcpU ++ rm /tmp/tmp.5dsJMdsIEm /tmp/tmp.411j7AXcpU ++ return 0 + newpass='8HOKQj1Cf@>T[kTk' + desc 'test new users sync' + set +o xtrace ----------------------------------------------------------------------------------- test new users sync ----------------------------------------------------------------------------------- + run_mysql 'CREATE USER '\''testsync'\''@'\''%'\'' IDENTIFIED BY '\''8HOKQj1Cf@>T[kTk'\'';' '-h some-name-pxc -uroot -p'\''8HOKQj1Cf@>T[kTk'\''' + local 'command=CREATE USER '\''testsync'\''@'\''%'\'' IDENTIFIED BY '\''8HOKQj1Cf@>T[kTk'\'';' + local 'uri=-h some-name-pxc -uroot -p'\''8HOKQj1Cf@>T[kTk'\''' ++ get_client_pod ++ kubectl_bin get pods --selector=name=pxc-client -o 'jsonpath={.items[].metadata.name}' +++ mktemp ++ local LAST_OUT=/tmp/tmp.fawVv7mbx3 +++ mktemp ++ local LAST_ERR=/tmp/tmp.HqbjYARMMZ ++ local exit_status=0 +++ seq 0 2 ++ for i in '$(seq 0 2)' ++ set +e ++ kubectl get pods --selector=name=pxc-client -o 'jsonpath={.items[].metadata.name}' ++ exit_status=0 ++ set -e ++ '[' 0 '!=' 0 ']' ++ break ++ cat /tmp/tmp.fawVv7mbx3 ++ cat /tmp/tmp.HqbjYARMMZ ++ rm /tmp/tmp.fawVv7mbx3 /tmp/tmp.HqbjYARMMZ ++ return 0 + client_pod=pxc-client-7b7f8bcff9-4b69t + wait_pod pxc-client-7b7f8bcff9-4b69t + local pod=pxc-client-7b7f8bcff9-4b69t + local max_retry=480 + local ns= ++ echo pxc-client-7b7f8bcff9-4b69t ++ /usr/bin/sed -E 's/.*-(pxc|proxysql)-[0-9]/\1/' ++ egrep '^(pxc|proxysql)$' + local container= + set +o xtrace pod/pxc-client-7b7f8bcff9-4b69t condition met pxc-client-7b7f8bcff9-4b69t.Ok + set +o xtrace + sleep 40 + compare_mysql_cmd select-4 'SHOW TABLES;' '-h some-name-proxysql -utestsync -p'\''8HOKQj1Cf@>T[kTk'\''' + local command_id=select-4 + local 'command=SHOW TABLES;' + local 'uri=-h some-name-proxysql -utestsync -p'\''8HOKQj1Cf@>T[kTk'\''' + local postfix= + local expected_result=/mnt/jenkins/workspace/cloud-pxc-operator_PR-1785/e2e-tests/users/compare/select-4.sql + [[ perconalab/percona-xtradb-cluster-operator:main-pxc5.7 =~ 8\.0 ]] + run_mysql 'SHOW TABLES;' '-h some-name-proxysql -utestsync -p'\''8HOKQj1Cf@>T[kTk'\''' + local 'command=SHOW TABLES;' + local 'uri=-h some-name-proxysql -utestsync -p'\''8HOKQj1Cf@>T[kTk'\''' ++ get_client_pod ++ kubectl_bin get pods --selector=name=pxc-client -o 'jsonpath={.items[].metadata.name}' +++ mktemp ++ local LAST_OUT=/tmp/tmp.g4p3JDOsz9 +++ mktemp ++ local LAST_ERR=/tmp/tmp.gWJ2gPBn6r ++ local exit_status=0 +++ seq 0 2 ++ for i in '$(seq 0 2)' ++ set +e ++ kubectl get pods --selector=name=pxc-client -o 'jsonpath={.items[].metadata.name}' ++ exit_status=0 ++ set -e ++ '[' 0 '!=' 0 ']' ++ break ++ cat /tmp/tmp.g4p3JDOsz9 ++ cat /tmp/tmp.gWJ2gPBn6r ++ rm /tmp/tmp.g4p3JDOsz9 /tmp/tmp.gWJ2gPBn6r ++ return 0 + client_pod=pxc-client-7b7f8bcff9-4b69t + wait_pod pxc-client-7b7f8bcff9-4b69t + local pod=pxc-client-7b7f8bcff9-4b69t + local max_retry=480 + local ns= ++ echo pxc-client-7b7f8bcff9-4b69t ++ /usr/bin/sed -E 's/.*-(pxc|proxysql)-[0-9]/\1/' ++ egrep '^(pxc|proxysql)$' + local container= + set +o xtrace pod/pxc-client-7b7f8bcff9-4b69t condition met pxc-client-7b7f8bcff9-4b69t.Ok + set +o xtrace + '[' '!' -s /tmp/tmp.RJRIosOSHb/select-4.sql ']' + diff -u /mnt/jenkins/workspace/cloud-pxc-operator_PR-1785/e2e-tests/users/compare/select-4.sql /tmp/tmp.RJRIosOSHb/select-4.sql ++ getSecretData internal-some-name operator ++ local secretName=internal-some-name ++ local dataKey=operator ++ kubectl_bin get secrets/internal-some-name '--template={{.data.operator}}' ++ base64 --decode +++ mktemp ++ local LAST_OUT=/tmp/tmp.pLapUxCQWC +++ mktemp ++ local LAST_ERR=/tmp/tmp.n7ChT3f6tC ++ local exit_status=0 +++ seq 0 2 ++ for i in '$(seq 0 2)' ++ set +e ++ kubectl get secrets/internal-some-name '--template={{.data.operator}}' ++ exit_status=0 ++ set -e ++ '[' 0 '!=' 0 ']' ++ break ++ cat /tmp/tmp.pLapUxCQWC ++ cat /tmp/tmp.n7ChT3f6tC ++ rm /tmp/tmp.pLapUxCQWC /tmp/tmp.n7ChT3f6tC ++ return 0 + pass=test-password2 + desc 'check secret without operator' + set +o xtrace ----------------------------------------------------------------------------------- check secret without operator ----------------------------------------------------------------------------------- + kubectl_bin apply -f /mnt/jenkins/workspace/cloud-pxc-operator_PR-1785/e2e-tests/users/conf/secrets.yml ++ mktemp + local LAST_OUT=/tmp/tmp.nsm5tbSmye ++ mktemp + local LAST_ERR=/tmp/tmp.V6PWNwh4NT + local exit_status=0 ++ seq 0 2 + for i in '$(seq 0 2)' + set +e + kubectl apply -f /mnt/jenkins/workspace/cloud-pxc-operator_PR-1785/e2e-tests/users/conf/secrets.yml + exit_status=0 + set -e + '[' 0 '!=' 0 ']' + break + cat /tmp/tmp.nsm5tbSmye secret/my-cluster-secrets-2 configured + cat /tmp/tmp.V6PWNwh4NT Warning: resource secrets/my-cluster-secrets-2 is missing the kubectl.kubernetes.io/last-applied-configuration annotation which is required by kubectl apply. kubectl apply should only be used on resources created declaratively by either kubectl create --save-config or kubectl apply. The missing annotation will be patched automatically. + rm /tmp/tmp.nsm5tbSmye /tmp/tmp.V6PWNwh4NT + return 0 + sleep 15 + compare_mysql_cmd select-4 'SHOW TABLES;' '-h some-name-proxysql -uoperator -p'\''test-password2'\''' + local command_id=select-4 + local 'command=SHOW TABLES;' + local 'uri=-h some-name-proxysql -uoperator -p'\''test-password2'\''' + local postfix= + local expected_result=/mnt/jenkins/workspace/cloud-pxc-operator_PR-1785/e2e-tests/users/compare/select-4.sql + [[ perconalab/percona-xtradb-cluster-operator:main-pxc5.7 =~ 8\.0 ]] + run_mysql 'SHOW TABLES;' '-h some-name-proxysql -uoperator -p'\''test-password2'\''' + local 'command=SHOW TABLES;' + local 'uri=-h some-name-proxysql -uoperator -p'\''test-password2'\''' ++ get_client_pod ++ kubectl_bin get pods --selector=name=pxc-client -o 'jsonpath={.items[].metadata.name}' +++ mktemp ++ local LAST_OUT=/tmp/tmp.dhVwv6KA6f +++ mktemp ++ local LAST_ERR=/tmp/tmp.dPVrgOSzzQ ++ local exit_status=0 +++ seq 0 2 ++ for i in '$(seq 0 2)' ++ set +e ++ kubectl get pods --selector=name=pxc-client -o 'jsonpath={.items[].metadata.name}' ++ exit_status=0 ++ set -e ++ '[' 0 '!=' 0 ']' ++ break ++ cat /tmp/tmp.dhVwv6KA6f ++ cat /tmp/tmp.dPVrgOSzzQ ++ rm /tmp/tmp.dhVwv6KA6f /tmp/tmp.dPVrgOSzzQ ++ return 0 + client_pod=pxc-client-7b7f8bcff9-4b69t + wait_pod pxc-client-7b7f8bcff9-4b69t + local pod=pxc-client-7b7f8bcff9-4b69t + local max_retry=480 + local ns= ++ echo pxc-client-7b7f8bcff9-4b69t ++ /usr/bin/sed -E 's/.*-(pxc|proxysql)-[0-9]/\1/' ++ egrep '^(pxc|proxysql)$' + local container= + set +o xtrace pod/pxc-client-7b7f8bcff9-4b69t condition met pxc-client-7b7f8bcff9-4b69t.Ok + set +o xtrace + '[' '!' -s /tmp/tmp.RJRIosOSHb/select-4.sql ']' + diff -u /mnt/jenkins/workspace/cloud-pxc-operator_PR-1785/e2e-tests/users/compare/select-4.sql /tmp/tmp.RJRIosOSHb/select-4.sql + newpass=test-password2 ++ echo -n test-password2 ++ base64 + newpassencrypted=dGVzdC1wYXNzd29yZDI= + apply_config /mnt/jenkins/workspace/cloud-pxc-operator_PR-1785/e2e-tests/users/conf/some-name.yml + '[' -z '' ']' + cat_config /mnt/jenkins/workspace/cloud-pxc-operator_PR-1785/e2e-tests/users/conf/some-name.yml + kubectl_bin apply -f - + cat /mnt/jenkins/workspace/cloud-pxc-operator_PR-1785/e2e-tests/users/conf/some-name.yml + /usr/bin/sed -e 's#apiVersion: pxc.percona.com/v.*$#apiVersion: pxc.percona.com/v1#' ++ mktemp + /usr/bin/sed -e 's#image:.*-pxc\([0-9]*.[0-9]*\)\{0,1\}$#image: perconalab/percona-xtradb-cluster-operator:main-pxc5.7#' + local LAST_OUT=/tmp/tmp.qDjuussPth + /usr/bin/sed -e 's#image:.*-pmm$#image: perconalab/pmm-client:dev-latest#' + /usr/bin/sed -e 's#image:.*\/percona-xtradb-cluster:.*$#image: perconalab/percona-xtradb-cluster-operator:main-pxc5.7#' ++ mktemp + /usr/bin/sed -e 's#image:.*-init$#image: perconalab/percona-xtradb-cluster-operator:PR-1785-7e3ddd30#' + /usr/bin/sed -e 's#image:.*-proxysql$#image: perconalab/percona-xtradb-cluster-operator:main-proxysql#' + /usr/bin/sed -e 's#image:.*-logcollector$#image: perconalab/percona-xtradb-cluster-operator:main-logcollector#' + /usr/bin/sed -e 's#apply:.*#apply: Never#' + /usr/bin/sed -e 's#image:.*-haproxy$#image: perconalab/percona-xtradb-cluster-operator:main-haproxy#' + local LAST_ERR=/tmp/tmp.NJFCw1pRmJ + /usr/bin/sed -e s~minio-service.#namespace~minio-service.users-10631~ + local exit_status=0 + /usr/bin/sed -e 's#image:.*-backup$#image: perconalab/percona-xtradb-cluster-operator:main-pxc5.7-backup#' ++ seq 0 2 + for i in '$(seq 0 2)' + set +e + kubectl apply -f - + exit_status=0 + set -e + '[' 0 '!=' 0 ']' + break + cat /tmp/tmp.qDjuussPth perconaxtradbcluster.pxc.percona.com/some-name configured + cat /tmp/tmp.NJFCw1pRmJ + rm /tmp/tmp.qDjuussPth /tmp/tmp.NJFCw1pRmJ + return 0 + sleep 15 + wait_cluster_consistency some-name 3 3 + local cluster_name=some-name + local cluster_size=3 + local proxy_size=3 + '[' -z 3 ']' + desc 'wait cluster consistency' + set +o xtrace ----------------------------------------------------------------------------------- wait cluster consistency ----------------------------------------------------------------------------------- + local i=0 + local max=36 + sleep 7 ++ kubectl_bin get pxc some-name -o 'jsonpath={.status.state}' +++ mktemp ++ local LAST_OUT=/tmp/tmp.2mM1DHBcJo +++ mktemp ++ local LAST_ERR=/tmp/tmp.eWSb0bB3AE ++ local exit_status=0 +++ seq 0 2 ++ for i in '$(seq 0 2)' ++ set +e ++ kubectl get pxc some-name -o 'jsonpath={.status.state}' ++ exit_status=0 ++ set -e ++ '[' 0 '!=' 0 ']' ++ break ++ cat /tmp/tmp.2mM1DHBcJo ++ cat /tmp/tmp.eWSb0bB3AE ++ rm /tmp/tmp.2mM1DHBcJo /tmp/tmp.eWSb0bB3AE ++ return 0 + [[ initializing == \r\e\a\d\y ]] + echo 'waiting for cluster readyness' waiting for cluster readyness + sleep 20 + [[ 0 -ge 36 ]] + let i+=1 ++ kubectl_bin get pxc some-name -o 'jsonpath={.status.state}' +++ mktemp ++ local LAST_OUT=/tmp/tmp.BsDjTIesMd +++ mktemp ++ local LAST_ERR=/tmp/tmp.XZQPOXo0Wt ++ local exit_status=0 +++ seq 0 2 ++ for i in '$(seq 0 2)' ++ set +e ++ kubectl get pxc some-name -o 'jsonpath={.status.state}' ++ exit_status=0 ++ set -e ++ '[' 0 '!=' 0 ']' ++ break ++ cat /tmp/tmp.BsDjTIesMd ++ cat /tmp/tmp.XZQPOXo0Wt ++ rm /tmp/tmp.BsDjTIesMd /tmp/tmp.XZQPOXo0Wt ++ return 0 + [[ initializing == \r\e\a\d\y ]] + echo 'waiting for cluster readyness' waiting for cluster readyness + sleep 20 + [[ 1 -ge 36 ]] + let i+=1 ++ kubectl_bin get pxc some-name -o 'jsonpath={.status.state}' +++ mktemp ++ local LAST_OUT=/tmp/tmp.zEp0zkxkug +++ mktemp ++ local LAST_ERR=/tmp/tmp.aysc56LPbu ++ local exit_status=0 +++ seq 0 2 ++ for i in '$(seq 0 2)' ++ set +e ++ kubectl get pxc some-name -o 'jsonpath={.status.state}' ++ exit_status=0 ++ set -e ++ '[' 0 '!=' 0 ']' ++ break ++ cat /tmp/tmp.zEp0zkxkug ++ cat /tmp/tmp.aysc56LPbu ++ rm /tmp/tmp.zEp0zkxkug /tmp/tmp.aysc56LPbu ++ return 0 + [[ initializing == \r\e\a\d\y ]] + echo 'waiting for cluster readyness' waiting for cluster readyness + sleep 20 + [[ 2 -ge 36 ]] + let i+=1 ++ kubectl_bin get pxc some-name -o 'jsonpath={.status.state}' +++ mktemp ++ local LAST_OUT=/tmp/tmp.EGcbrbDZhe +++ mktemp ++ local LAST_ERR=/tmp/tmp.I0iTrp06BX ++ local exit_status=0 +++ seq 0 2 ++ for i in '$(seq 0 2)' ++ set +e ++ kubectl get pxc some-name -o 'jsonpath={.status.state}' ++ exit_status=0 ++ set -e ++ '[' 0 '!=' 0 ']' ++ break ++ cat /tmp/tmp.EGcbrbDZhe ++ cat /tmp/tmp.I0iTrp06BX ++ rm /tmp/tmp.EGcbrbDZhe /tmp/tmp.I0iTrp06BX ++ return 0 + [[ initializing == \r\e\a\d\y ]] + echo 'waiting for cluster readyness' waiting for cluster readyness + sleep 20 + [[ 3 -ge 36 ]] + let i+=1 ++ kubectl_bin get pxc some-name -o 'jsonpath={.status.state}' +++ mktemp ++ local LAST_OUT=/tmp/tmp.zihUKcmRyl +++ mktemp ++ local LAST_ERR=/tmp/tmp.AoqI0iQUku ++ local exit_status=0 +++ seq 0 2 ++ for i in '$(seq 0 2)' ++ set +e ++ kubectl get pxc some-name -o 'jsonpath={.status.state}' ++ exit_status=0 ++ set -e ++ '[' 0 '!=' 0 ']' ++ break ++ cat /tmp/tmp.zihUKcmRyl ++ cat /tmp/tmp.AoqI0iQUku ++ rm /tmp/tmp.zihUKcmRyl /tmp/tmp.AoqI0iQUku ++ return 0 + [[ initializing == \r\e\a\d\y ]] + echo 'waiting for cluster readyness' waiting for cluster readyness + sleep 20 + [[ 4 -ge 36 ]] + let i+=1 ++ kubectl_bin get pxc some-name -o 'jsonpath={.status.state}' +++ mktemp ++ local LAST_OUT=/tmp/tmp.aS18u9Wbpz +++ mktemp ++ local LAST_ERR=/tmp/tmp.OPep96ifXR ++ local exit_status=0 +++ seq 0 2 ++ for i in '$(seq 0 2)' ++ set +e ++ kubectl get pxc some-name -o 'jsonpath={.status.state}' ++ exit_status=0 ++ set -e ++ '[' 0 '!=' 0 ']' ++ break ++ cat /tmp/tmp.aS18u9Wbpz ++ cat /tmp/tmp.OPep96ifXR ++ rm /tmp/tmp.aS18u9Wbpz /tmp/tmp.OPep96ifXR ++ return 0 + [[ initializing == \r\e\a\d\y ]] + echo 'waiting for cluster readyness' waiting for cluster readyness + sleep 20 + [[ 5 -ge 36 ]] + let i+=1 ++ kubectl_bin get pxc some-name -o 'jsonpath={.status.state}' +++ mktemp ++ local LAST_OUT=/tmp/tmp.LAjZhaSj1H +++ mktemp ++ local LAST_ERR=/tmp/tmp.d7Rx8yrQHW ++ local exit_status=0 +++ seq 0 2 ++ for i in '$(seq 0 2)' ++ set +e ++ kubectl get pxc some-name -o 'jsonpath={.status.state}' ++ exit_status=0 ++ set -e ++ '[' 0 '!=' 0 ']' ++ break ++ cat /tmp/tmp.LAjZhaSj1H ++ cat /tmp/tmp.d7Rx8yrQHW ++ rm /tmp/tmp.LAjZhaSj1H /tmp/tmp.d7Rx8yrQHW ++ return 0 + [[ initializing == \r\e\a\d\y ]] + echo 'waiting for cluster readyness' waiting for cluster readyness + sleep 20 + [[ 6 -ge 36 ]] + let i+=1 ++ kubectl_bin get pxc some-name -o 'jsonpath={.status.state}' +++ mktemp ++ local LAST_OUT=/tmp/tmp.w4fMwX5TUn +++ mktemp ++ local LAST_ERR=/tmp/tmp.r2fw2ivjFX ++ local exit_status=0 +++ seq 0 2 ++ for i in '$(seq 0 2)' ++ set +e ++ kubectl get pxc some-name -o 'jsonpath={.status.state}' ++ exit_status=0 ++ set -e ++ '[' 0 '!=' 0 ']' ++ break ++ cat /tmp/tmp.w4fMwX5TUn ++ cat /tmp/tmp.r2fw2ivjFX ++ rm /tmp/tmp.w4fMwX5TUn /tmp/tmp.r2fw2ivjFX ++ return 0 + [[ initializing == \r\e\a\d\y ]] + echo 'waiting for cluster readyness' waiting for cluster readyness + sleep 20 + [[ 7 -ge 36 ]] + let i+=1 ++ kubectl_bin get pxc some-name -o 'jsonpath={.status.state}' +++ mktemp ++ local LAST_OUT=/tmp/tmp.ikGIouxV5f +++ mktemp ++ local LAST_ERR=/tmp/tmp.3qHbDt3bLk ++ local exit_status=0 +++ seq 0 2 ++ for i in '$(seq 0 2)' ++ set +e ++ kubectl get pxc some-name -o 'jsonpath={.status.state}' ++ exit_status=0 ++ set -e ++ '[' 0 '!=' 0 ']' ++ break ++ cat /tmp/tmp.ikGIouxV5f ++ cat /tmp/tmp.3qHbDt3bLk ++ rm /tmp/tmp.ikGIouxV5f /tmp/tmp.3qHbDt3bLk ++ return 0 + [[ initializing == \r\e\a\d\y ]] + echo 'waiting for cluster readyness' waiting for cluster readyness + sleep 20 + [[ 8 -ge 36 ]] + let i+=1 ++ kubectl_bin get pxc some-name -o 'jsonpath={.status.state}' +++ mktemp ++ local LAST_OUT=/tmp/tmp.yR387FWusf +++ mktemp ++ local LAST_ERR=/tmp/tmp.gZj5Gak0W9 ++ local exit_status=0 +++ seq 0 2 ++ for i in '$(seq 0 2)' ++ set +e ++ kubectl get pxc some-name -o 'jsonpath={.status.state}' ++ exit_status=0 ++ set -e ++ '[' 0 '!=' 0 ']' ++ break ++ cat /tmp/tmp.yR387FWusf ++ cat /tmp/tmp.gZj5Gak0W9 ++ rm /tmp/tmp.yR387FWusf /tmp/tmp.gZj5Gak0W9 ++ return 0 + [[ initializing == \r\e\a\d\y ]] + echo 'waiting for cluster readyness' waiting for cluster readyness + sleep 20 + [[ 9 -ge 36 ]] + let i+=1 ++ kubectl_bin get pxc some-name -o 'jsonpath={.status.state}' +++ mktemp ++ local LAST_OUT=/tmp/tmp.X49Pbv67df +++ mktemp ++ local LAST_ERR=/tmp/tmp.iVSGmNSth1 ++ local exit_status=0 +++ seq 0 2 ++ for i in '$(seq 0 2)' ++ set +e ++ kubectl get pxc some-name -o 'jsonpath={.status.state}' ++ exit_status=0 ++ set -e ++ '[' 0 '!=' 0 ']' ++ break ++ cat /tmp/tmp.X49Pbv67df ++ cat /tmp/tmp.iVSGmNSth1 ++ rm /tmp/tmp.X49Pbv67df /tmp/tmp.iVSGmNSth1 ++ return 0 + [[ initializing == \r\e\a\d\y ]] + echo 'waiting for cluster readyness' waiting for cluster readyness + sleep 20 + [[ 10 -ge 36 ]] + let i+=1 ++ kubectl_bin get pxc some-name -o 'jsonpath={.status.state}' +++ mktemp ++ local LAST_OUT=/tmp/tmp.QCB4yY6rM3 +++ mktemp ++ local LAST_ERR=/tmp/tmp.eGS2sHG5xd ++ local exit_status=0 +++ seq 0 2 ++ for i in '$(seq 0 2)' ++ set +e ++ kubectl get pxc some-name -o 'jsonpath={.status.state}' ++ exit_status=0 ++ set -e ++ '[' 0 '!=' 0 ']' ++ break ++ cat /tmp/tmp.QCB4yY6rM3 ++ cat /tmp/tmp.eGS2sHG5xd ++ rm /tmp/tmp.QCB4yY6rM3 /tmp/tmp.eGS2sHG5xd ++ return 0 + [[ initializing == \r\e\a\d\y ]] + echo 'waiting for cluster readyness' waiting for cluster readyness + sleep 20 + [[ 11 -ge 36 ]] + let i+=1 ++ kubectl_bin get pxc some-name -o 'jsonpath={.status.state}' +++ mktemp ++ local LAST_OUT=/tmp/tmp.cAkuD3UBTN +++ mktemp ++ local LAST_ERR=/tmp/tmp.8zO1SVVUt2 ++ local exit_status=0 +++ seq 0 2 ++ for i in '$(seq 0 2)' ++ set +e ++ kubectl get pxc some-name -o 'jsonpath={.status.state}' ++ exit_status=0 ++ set -e ++ '[' 0 '!=' 0 ']' ++ break ++ cat /tmp/tmp.cAkuD3UBTN ++ cat /tmp/tmp.8zO1SVVUt2 ++ rm /tmp/tmp.cAkuD3UBTN /tmp/tmp.8zO1SVVUt2 ++ return 0 + [[ initializing == \r\e\a\d\y ]] + echo 'waiting for cluster readyness' waiting for cluster readyness + sleep 20 + [[ 12 -ge 36 ]] + let i+=1 ++ kubectl_bin get pxc some-name -o 'jsonpath={.status.state}' +++ mktemp ++ local LAST_OUT=/tmp/tmp.UuLk3IlHHN +++ mktemp ++ local LAST_ERR=/tmp/tmp.GN3ngvj9S4 ++ local exit_status=0 +++ seq 0 2 ++ for i in '$(seq 0 2)' ++ set +e ++ kubectl get pxc some-name -o 'jsonpath={.status.state}' ++ exit_status=0 ++ set -e ++ '[' 0 '!=' 0 ']' ++ break ++ cat /tmp/tmp.UuLk3IlHHN ++ cat /tmp/tmp.GN3ngvj9S4 ++ rm /tmp/tmp.UuLk3IlHHN /tmp/tmp.GN3ngvj9S4 ++ return 0 + [[ initializing == \r\e\a\d\y ]] + echo 'waiting for cluster readyness' waiting for cluster readyness + sleep 20 + [[ 13 -ge 36 ]] + let i+=1 ++ kubectl_bin get pxc some-name -o 'jsonpath={.status.state}' +++ mktemp ++ local LAST_OUT=/tmp/tmp.rqyLUotaXz +++ mktemp ++ local LAST_ERR=/tmp/tmp.YCCW9o5M1P ++ local exit_status=0 +++ seq 0 2 ++ for i in '$(seq 0 2)' ++ set +e ++ kubectl get pxc some-name -o 'jsonpath={.status.state}' ++ exit_status=0 ++ set -e ++ '[' 0 '!=' 0 ']' ++ break ++ cat /tmp/tmp.rqyLUotaXz ++ cat /tmp/tmp.YCCW9o5M1P ++ rm /tmp/tmp.rqyLUotaXz /tmp/tmp.YCCW9o5M1P ++ return 0 + [[ ready == \r\e\a\d\y ]] ++ kubectl_bin get pxc some-name -o 'jsonpath={.status.pxc.ready}' +++ mktemp ++ local LAST_OUT=/tmp/tmp.CvHmnRhemW +++ mktemp ++ local LAST_ERR=/tmp/tmp.W01zvie4WY ++ local exit_status=0 +++ seq 0 2 ++ for i in '$(seq 0 2)' ++ set +e ++ kubectl get pxc some-name -o 'jsonpath={.status.pxc.ready}' ++ exit_status=0 ++ set -e ++ '[' 0 '!=' 0 ']' ++ break ++ cat /tmp/tmp.CvHmnRhemW ++ cat /tmp/tmp.W01zvie4WY ++ rm /tmp/tmp.CvHmnRhemW /tmp/tmp.W01zvie4WY ++ return 0 + [[ 3 == \3 ]] +++ get_proxy_engine some-name +++ local cluster_name=some-name ++++ get_proxy some-name ++++ local target_cluster=some-name +++++ kubectl_bin get pxc some-name -o 'jsonpath={.spec.haproxy.enabled}' ++++++ mktemp +++++ local LAST_OUT=/tmp/tmp.CYrX6aJAle ++++++ mktemp +++++ local LAST_ERR=/tmp/tmp.RV4GbU0aT1 +++++ local exit_status=0 ++++++ seq 0 2 +++++ for i in '$(seq 0 2)' +++++ set +e +++++ kubectl get pxc some-name -o 'jsonpath={.spec.haproxy.enabled}' +++++ exit_status=0 +++++ set -e +++++ '[' 0 '!=' 0 ']' +++++ break +++++ cat /tmp/tmp.CYrX6aJAle +++++ cat /tmp/tmp.RV4GbU0aT1 +++++ rm /tmp/tmp.CYrX6aJAle /tmp/tmp.RV4GbU0aT1 +++++ return 0 ++++ [[ true == \t\r\u\e ]] ++++ echo some-name-haproxy ++++ return +++ local cluster_proxy=some-name-haproxy +++ echo haproxy ++ kubectl_bin get pxc some-name -o 'jsonpath={.status.haproxy.ready}' +++ mktemp ++ local LAST_OUT=/tmp/tmp.kjAzzHukf8 +++ mktemp ++ local LAST_ERR=/tmp/tmp.C0YNlA5O8x ++ local exit_status=0 +++ seq 0 2 ++ for i in '$(seq 0 2)' ++ set +e ++ kubectl get pxc some-name -o 'jsonpath={.status.haproxy.ready}' ++ exit_status=0 ++ set -e ++ '[' 0 '!=' 0 ']' ++ break ++ cat /tmp/tmp.kjAzzHukf8 ++ cat /tmp/tmp.C0YNlA5O8x ++ rm /tmp/tmp.kjAzzHukf8 /tmp/tmp.C0YNlA5O8x ++ return 0 + [[ 3 == \3 ]] + [[ perconalab/percona-xtradb-cluster-operator:main-pxc5.7 =~ 5\.7 ]] + check_generation 2 haproxy some-name + local generation=2 + local container=haproxy + local cluster=some-name + local current_generation ++ kubectl_bin get statefulset some-name-haproxy -o 'jsonpath={.metadata.generation}' +++ mktemp ++ local LAST_OUT=/tmp/tmp.LXSssJJHGI +++ mktemp ++ local LAST_ERR=/tmp/tmp.5ywWJylR66 ++ local exit_status=0 +++ seq 0 2 ++ for i in '$(seq 0 2)' ++ set +e ++ kubectl get statefulset some-name-haproxy -o 'jsonpath={.metadata.generation}' ++ exit_status=0 ++ set -e ++ '[' 0 '!=' 0 ']' ++ break ++ cat /tmp/tmp.LXSssJJHGI ++ cat /tmp/tmp.5ywWJylR66 ++ rm /tmp/tmp.LXSssJJHGI /tmp/tmp.5ywWJylR66 ++ return 0 + current_generation=2 + [[ 2 != \2 ]] + patch_secret my-cluster-secrets monitor dGVzdC1wYXNzd29yZDI= + local secret=my-cluster-secrets + local key=monitor + local value=dGVzdC1wYXNzd29yZDI= + kubectl_bin patch secret my-cluster-secrets '-p={"data":{"monitor": "dGVzdC1wYXNzd29yZDI="}}' ++ mktemp + local LAST_OUT=/tmp/tmp.V0FbMD4ETA ++ mktemp + local LAST_ERR=/tmp/tmp.06vrYYFpVM + local exit_status=0 ++ seq 0 2 + for i in '$(seq 0 2)' + set +e + kubectl patch secret my-cluster-secrets '-p={"data":{"monitor": "dGVzdC1wYXNzd29yZDI="}}' + exit_status=0 + set -e + '[' 0 '!=' 0 ']' + break + cat /tmp/tmp.V0FbMD4ETA secret/my-cluster-secrets patched + cat /tmp/tmp.06vrYYFpVM + rm /tmp/tmp.V0FbMD4ETA /tmp/tmp.06vrYYFpVM + return 0 + sleep 15 + wait_cluster_consistency some-name 3 3 + local cluster_name=some-name + local cluster_size=3 + local proxy_size=3 + '[' -z 3 ']' + desc 'wait cluster consistency' + set +o xtrace ----------------------------------------------------------------------------------- wait cluster consistency ----------------------------------------------------------------------------------- + local i=0 + local max=36 + sleep 7 ++ kubectl_bin get pxc some-name -o 'jsonpath={.status.state}' +++ mktemp ++ local LAST_OUT=/tmp/tmp.1wJZDH7WeL +++ mktemp ++ local LAST_ERR=/tmp/tmp.Fda4zESbz8 ++ local exit_status=0 +++ seq 0 2 ++ for i in '$(seq 0 2)' ++ set +e ++ kubectl get pxc some-name -o 'jsonpath={.status.state}' ++ exit_status=0 ++ set -e ++ '[' 0 '!=' 0 ']' ++ break ++ cat /tmp/tmp.1wJZDH7WeL ++ cat /tmp/tmp.Fda4zESbz8 ++ rm /tmp/tmp.1wJZDH7WeL /tmp/tmp.Fda4zESbz8 ++ return 0 + [[ initializing == \r\e\a\d\y ]] + echo 'waiting for cluster readyness' waiting for cluster readyness + sleep 20 + [[ 0 -ge 36 ]] + let i+=1 ++ kubectl_bin get pxc some-name -o 'jsonpath={.status.state}' +++ mktemp ++ local LAST_OUT=/tmp/tmp.8t2rshCo7r +++ mktemp ++ local LAST_ERR=/tmp/tmp.7eNztbvu9o ++ local exit_status=0 +++ seq 0 2 ++ for i in '$(seq 0 2)' ++ set +e ++ kubectl get pxc some-name -o 'jsonpath={.status.state}' ++ exit_status=0 ++ set -e ++ '[' 0 '!=' 0 ']' ++ break ++ cat /tmp/tmp.8t2rshCo7r ++ cat /tmp/tmp.7eNztbvu9o ++ rm /tmp/tmp.8t2rshCo7r /tmp/tmp.7eNztbvu9o ++ return 0 + [[ initializing == \r\e\a\d\y ]] + echo 'waiting for cluster readyness' waiting for cluster readyness + sleep 20 + [[ 1 -ge 36 ]] + let i+=1 ++ kubectl_bin get pxc some-name -o 'jsonpath={.status.state}' +++ mktemp ++ local LAST_OUT=/tmp/tmp.VXnKQE2npV +++ mktemp ++ local LAST_ERR=/tmp/tmp.AWzBmqYexd ++ local exit_status=0 +++ seq 0 2 ++ for i in '$(seq 0 2)' ++ set +e ++ kubectl get pxc some-name -o 'jsonpath={.status.state}' ++ exit_status=0 ++ set -e ++ '[' 0 '!=' 0 ']' ++ break ++ cat /tmp/tmp.VXnKQE2npV ++ cat /tmp/tmp.AWzBmqYexd ++ rm /tmp/tmp.VXnKQE2npV /tmp/tmp.AWzBmqYexd ++ return 0 + [[ initializing == \r\e\a\d\y ]] + echo 'waiting for cluster readyness' waiting for cluster readyness + sleep 20 + [[ 2 -ge 36 ]] + let i+=1 ++ kubectl_bin get pxc some-name -o 'jsonpath={.status.state}' +++ mktemp ++ local LAST_OUT=/tmp/tmp.vmPCCYvpVq +++ mktemp ++ local LAST_ERR=/tmp/tmp.rD050WOLCG ++ local exit_status=0 +++ seq 0 2 ++ for i in '$(seq 0 2)' ++ set +e ++ kubectl get pxc some-name -o 'jsonpath={.status.state}' ++ exit_status=0 ++ set -e ++ '[' 0 '!=' 0 ']' ++ break ++ cat /tmp/tmp.vmPCCYvpVq ++ cat /tmp/tmp.rD050WOLCG ++ rm /tmp/tmp.vmPCCYvpVq /tmp/tmp.rD050WOLCG ++ return 0 + [[ ready == \r\e\a\d\y ]] ++ kubectl_bin get pxc some-name -o 'jsonpath={.status.pxc.ready}' +++ mktemp ++ local LAST_OUT=/tmp/tmp.1UAk7EHVll +++ mktemp ++ local LAST_ERR=/tmp/tmp.lQ1hO8fAS0 ++ local exit_status=0 +++ seq 0 2 ++ for i in '$(seq 0 2)' ++ set +e ++ kubectl get pxc some-name -o 'jsonpath={.status.pxc.ready}' ++ exit_status=0 ++ set -e ++ '[' 0 '!=' 0 ']' ++ break ++ cat /tmp/tmp.1UAk7EHVll ++ cat /tmp/tmp.lQ1hO8fAS0 ++ rm /tmp/tmp.1UAk7EHVll /tmp/tmp.lQ1hO8fAS0 ++ return 0 + [[ 3 == \3 ]] +++ get_proxy_engine some-name +++ local cluster_name=some-name ++++ get_proxy some-name ++++ local target_cluster=some-name +++++ kubectl_bin get pxc some-name -o 'jsonpath={.spec.haproxy.enabled}' ++++++ mktemp +++++ local LAST_OUT=/tmp/tmp.YNLobkk6Pw ++++++ mktemp +++++ local LAST_ERR=/tmp/tmp.eNOZgo4dbi +++++ local exit_status=0 ++++++ seq 0 2 +++++ for i in '$(seq 0 2)' +++++ set +e +++++ kubectl get pxc some-name -o 'jsonpath={.spec.haproxy.enabled}' +++++ exit_status=0 +++++ set -e +++++ '[' 0 '!=' 0 ']' +++++ break +++++ cat /tmp/tmp.YNLobkk6Pw +++++ cat /tmp/tmp.eNOZgo4dbi +++++ rm /tmp/tmp.YNLobkk6Pw /tmp/tmp.eNOZgo4dbi +++++ return 0 ++++ [[ true == \t\r\u\e ]] ++++ echo some-name-haproxy ++++ return +++ local cluster_proxy=some-name-haproxy +++ echo haproxy ++ kubectl_bin get pxc some-name -o 'jsonpath={.status.haproxy.ready}' +++ mktemp ++ local LAST_OUT=/tmp/tmp.s6vOGJdalV +++ mktemp ++ local LAST_ERR=/tmp/tmp.RNznnksxvR ++ local exit_status=0 +++ seq 0 2 ++ for i in '$(seq 0 2)' ++ set +e ++ kubectl get pxc some-name -o 'jsonpath={.status.haproxy.ready}' ++ exit_status=0 ++ set -e ++ '[' 0 '!=' 0 ']' ++ break ++ cat /tmp/tmp.s6vOGJdalV ++ cat /tmp/tmp.RNznnksxvR ++ rm /tmp/tmp.s6vOGJdalV /tmp/tmp.RNznnksxvR ++ return 0 + [[ 3 == \3 ]] + compare_mysql_cmd select-3 'SHOW DATABASES;' '-h some-name-haproxy -umonitor -p'\''test-password2'\''' + local command_id=select-3 + local 'command=SHOW DATABASES;' + local 'uri=-h some-name-haproxy -umonitor -p'\''test-password2'\''' + local postfix= + local expected_result=/mnt/jenkins/workspace/cloud-pxc-operator_PR-1785/e2e-tests/users/compare/select-3.sql + [[ perconalab/percona-xtradb-cluster-operator:main-pxc5.7 =~ 8\.0 ]] + run_mysql 'SHOW DATABASES;' '-h some-name-haproxy -umonitor -p'\''test-password2'\''' + local 'command=SHOW DATABASES;' + local 'uri=-h some-name-haproxy -umonitor -p'\''test-password2'\''' ++ get_client_pod ++ kubectl_bin get pods --selector=name=pxc-client -o 'jsonpath={.items[].metadata.name}' +++ mktemp ++ local LAST_OUT=/tmp/tmp.3xZ65NM5aN +++ mktemp ++ local LAST_ERR=/tmp/tmp.HGpb4hVQki ++ local exit_status=0 +++ seq 0 2 ++ for i in '$(seq 0 2)' ++ set +e ++ kubectl get pods --selector=name=pxc-client -o 'jsonpath={.items[].metadata.name}' ++ exit_status=0 ++ set -e ++ '[' 0 '!=' 0 ']' ++ break ++ cat /tmp/tmp.3xZ65NM5aN ++ cat /tmp/tmp.HGpb4hVQki ++ rm /tmp/tmp.3xZ65NM5aN /tmp/tmp.HGpb4hVQki ++ return 0 + client_pod=pxc-client-7b7f8bcff9-4b69t + wait_pod pxc-client-7b7f8bcff9-4b69t + local pod=pxc-client-7b7f8bcff9-4b69t + local max_retry=480 + local ns= ++ echo pxc-client-7b7f8bcff9-4b69t ++ /usr/bin/sed -E 's/.*-(pxc|proxysql)-[0-9]/\1/' ++ egrep '^(pxc|proxysql)$' + local container= + set +o xtrace pod/pxc-client-7b7f8bcff9-4b69t condition met pxc-client-7b7f8bcff9-4b69t.Ok + set +o xtrace + '[' '!' -s /tmp/tmp.RJRIosOSHb/select-3.sql ']' + diff -u /mnt/jenkins/workspace/cloud-pxc-operator_PR-1785/e2e-tests/users/compare/select-3.sql /tmp/tmp.RJRIosOSHb/select-3.sql + [[ perconalab/percona-xtradb-cluster-operator:main-pxc5.7 =~ 5\.7 ]] + check_generation 3 haproxy some-name + local generation=3 + local container=haproxy + local cluster=some-name + local current_generation ++ kubectl_bin get statefulset some-name-haproxy -o 'jsonpath={.metadata.generation}' +++ mktemp ++ local LAST_OUT=/tmp/tmp.1URECSUaJn +++ mktemp ++ local LAST_ERR=/tmp/tmp.KKU7diwhKC ++ local exit_status=0 +++ seq 0 2 ++ for i in '$(seq 0 2)' ++ set +e ++ kubectl get statefulset some-name-haproxy -o 'jsonpath={.metadata.generation}' ++ exit_status=0 ++ set -e ++ '[' 0 '!=' 0 ']' ++ break ++ cat /tmp/tmp.1URECSUaJn ++ cat /tmp/tmp.KKU7diwhKC ++ rm /tmp/tmp.1URECSUaJn /tmp/tmp.KKU7diwhKC ++ return 0 + current_generation=3 + [[ 3 != \3 ]] + destroy users-10631 + local namespace=users-10631 + local ignore_logs=true + desc 'destroy cluster/operator and all other resources' + set +o xtrace ----------------------------------------------------------------------------------- destroy cluster/operator and all other resources ----------------------------------------------------------------------------------- + '[' true == false -o 1 == 1 ']' + grep -v 'the object has been modified' ++ get_operator_pod + grep -v level=info ++ local label_prefix=app.kubernetes.io/ + /usr/bin/sed -r 's/"ts":[0-9.]+//; s^limits-[0-9.]+/^^g' + sort -u + grep -v 'get backup status: Job.batch' + tee /tmp/tmp.RJRIosOSHb/operator.log +++ kubectl get pods --selector=app.kubernetes.io/name=percona-xtradb-cluster-operator -n pxc-operator +++ grep -c percona-xtradb-cluster-operator ++ local check_label=1 ++ [[ 1 -eq 0 ]] ++ kubectl_bin get pods --selector=app.kubernetes.io/name=percona-xtradb-cluster-operator -o 'jsonpath={.items[].metadata.name}' -n pxc-operator +++ mktemp ++ local LAST_OUT=/tmp/tmp.6JhCJJ4XSX +++ mktemp ++ local LAST_ERR=/tmp/tmp.NFSg0ZtmwV ++ local exit_status=0 +++ seq 0 2 ++ for i in '$(seq 0 2)' ++ set +e ++ kubectl get pods --selector=app.kubernetes.io/name=percona-xtradb-cluster-operator -o 'jsonpath={.items[].metadata.name}' -n pxc-operator ++ exit_status=0 ++ set -e ++ '[' 0 '!=' 0 ']' ++ break ++ cat /tmp/tmp.6JhCJJ4XSX ++ cat /tmp/tmp.NFSg0ZtmwV ++ rm /tmp/tmp.6JhCJJ4XSX /tmp/tmp.NFSg0ZtmwV ++ return 0 + kubectl_bin logs -n pxc-operator percona-xtradb-cluster-operator-66cc8ffb95-hj7s5 ++ mktemp + local LAST_OUT=/tmp/tmp.EcsR23ALb8 ++ mktemp + local LAST_ERR=/tmp/tmp.6pSrPvKoSF + local exit_status=0 ++ seq 0 2 + for i in '$(seq 0 2)' + set +e + kubectl logs -n pxc-operator percona-xtradb-cluster-operator-66cc8ffb95-hj7s5 + exit_status=0 + set -e + '[' 0 '!=' 0 ']' + break + cat /tmp/tmp.EcsR23ALb8 + cat /tmp/tmp.6pSrPvKoSF + rm /tmp/tmp.EcsR23ALb8 /tmp/tmp.6pSrPvKoSF + return 0 2024-08-09T11:36:59.249Z INFO setup Manager starting up {"gitCommit": "7e3ddd30045a1f5403cbb31a3db0e57c5edb140b", "gitBranch": "PR-1785-7e3ddd30", "buildTime": "2024-08-09T09:47:37Z", "goVersion": "go1.22.6", "os": "linux", "arch": "amd64"} 2024-08-09T11:36:59.249Z INFO setup Runs on {"platform": "kubernetes", "version": "v1.27.16-gke.1051000"} 2024-08-09T11:36:59.250Z INFO setup Registering Components. 2024-08-09T11:37:00.601Z INFO controller-runtime.webhook Registering webhook {"path": "/validate-percona-xtradbcluster"} 2024-08-09T11:37:00.606Z INFO controller-runtime.metrics Starting metrics server 2024-08-09T11:37:00.606Z INFO setup Starting the Cmd. 2024-08-09T11:37:00.607Z INFO controller-runtime.metrics Serving metrics server {"bindAddress": ":8080", "secure": false} 2024-08-09T11:37:00.607Z INFO controller-runtime.webhook Starting webhook server 2024-08-09T11:37:00.684Z INFO controller-runtime.certwatcher Updated current TLS certificate 2024-08-09T11:37:00.684Z INFO starting server {"name": "health probe", "addr": "[::]:8081"} 2024-08-09T11:37:00.685Z INFO controller-runtime.certwatcher Starting certificate watcher 2024-08-09T11:37:00.685Z INFO controller-runtime.webhook Serving webhook server {"host": "", "port": 9443} 2024-08-09T11:37:00.785Z INFO attempting to acquire leader lease pxc-operator/08db1feb.percona.com... 2024-08-09T11:37:00.804Z INFO successfully acquired lease pxc-operator/08db1feb.percona.com 2024-08-09T11:37:00.805Z DEBUG events percona-xtradb-cluster-operator-66cc8ffb95-hj7s5_51663e5c-5044-4e23-8ee3-a69b181b5fd8 became leader {"type": "Normal", "object": {"kind":"Lease","namespace":"pxc-operator","name":"08db1feb.percona.com","uid":"135ed9fb-89a3-42a1-b330-15f9c85155b9","apiVersion":"coordination.k8s.io/v1","resourceVersion":"66006"}, "reason": "LeaderElection"} 2024-08-09T11:37:00.805Z INFO Starting Controller {"controller": "pxcbackup-controller"} 2024-08-09T11:37:00.805Z INFO Starting Controller {"controller": "pxc-controller"} 2024-08-09T11:37:00.805Z INFO Starting Controller {"controller": "pxcrestore-controller"} 2024-08-09T11:37:00.805Z INFO Starting EventSource {"controller": "pxcbackup-controller", "source": "kind source: *v1.PerconaXtraDBClusterBackup"} 2024-08-09T11:37:00.805Z INFO Starting EventSource {"controller": "pxc-controller", "source": "kind source: *v1.PerconaXtraDBCluster"} 2024-08-09T11:37:00.805Z INFO Starting EventSource {"controller": "pxcrestore-controller", "source": "kind source: *v1.PerconaXtraDBClusterRestore"} 2024-08-09T11:37:01.027Z INFO Starting workers {"controller": "pxcbackup-controller", "worker count": 1} 2024-08-09T11:37:01.027Z INFO Starting workers {"controller": "pxc-controller", "worker count": 1} 2024-08-09T11:37:01.027Z INFO Starting workers {"controller": "pxcrestore-controller", "worker count": 1} 2024-08-09T11:37:38.307Z INFO Set CR version {"controller": "pxc-controller", "namespace": "users-10631", "name": "some-name", "reconcileID": "0eeb2c30-bfc4-4700-be86-896f2b5382b7", "version": "1.15.0"} 2024-08-09T11:37:38.747Z DEBUG Creating object {"controller": "pxc-controller", "namespace": "users-10631", "name": "some-name", "reconcileID": "0eeb2c30-bfc4-4700-be86-896f2b5382b7", "object": "some-name-pxc"} 2024-08-09T11:37:38.798Z DEBUG Creating object {"controller": "pxc-controller", "namespace": "users-10631", "name": "some-name", "reconcileID": "0eeb2c30-bfc4-4700-be86-896f2b5382b7", "object": "some-name-pxc"} 2024-08-09T11:37:38.884Z DEBUG Creating object {"controller": "pxc-controller", "namespace": "users-10631", "name": "some-name", "reconcileID": "0eeb2c30-bfc4-4700-be86-896f2b5382b7", "object": "some-name-pxc-unready"} 2024-08-09T11:37:38.949Z DEBUG Updating object {"controller": "pxc-controller", "namespace": "users-10631", "name": "some-name", "reconcileID": "0eeb2c30-bfc4-4700-be86-896f2b5382b7", "object": "some-name-proxysql"} 2024-08-09T11:37:39.035Z DEBUG Creating object {"controller": "pxc-controller", "namespace": "users-10631", "name": "some-name", "reconcileID": "0eeb2c30-bfc4-4700-be86-896f2b5382b7", "object": "some-name-proxysql"} 2024-08-09T11:37:39.308Z DEBUG Creating object {"controller": "pxc-controller", "namespace": "users-10631", "name": "some-name", "reconcileID": "0eeb2c30-bfc4-4700-be86-896f2b5382b7", "object": "some-name-proxysql-unready"} 2024-08-09T11:37:39.631Z DEBUG Creating object {"controller": "pxc-controller", "namespace": "users-10631", "name": "some-name", "reconcileID": "b430cfec-c19c-40fa-9a9e-4d37196b063f", "object": "some-name-pxc"} 2024-08-09T11:37:39.688Z DEBUG Creating object {"controller": "pxc-controller", "namespace": "users-10631", "name": "some-name", "reconcileID": "b430cfec-c19c-40fa-9a9e-4d37196b063f", "object": "some-name-proxysql"} 2024-08-09T11:38:56.842Z INFO Password expiration policy updated {"controller": "pxc-controller", "namespace": "users-10631", "name": "some-name", "reconcileID": "8efbf689-e6c4-48d4-af29-7ea21bfa6566", "user": "operator"} 2024-08-09T11:38:56.880Z INFO Password expiration policy updated {"controller": "pxc-controller", "namespace": "users-10631", "name": "some-name", "reconcileID": "8efbf689-e6c4-48d4-af29-7ea21bfa6566", "user": "monitor"} 2024-08-09T11:38:56.928Z INFO User monitor: granted privileges {"controller": "pxc-controller", "namespace": "users-10631", "name": "some-name", "reconcileID": "8efbf689-e6c4-48d4-af29-7ea21bfa6566"} 2024-08-09T11:38:56.976Z INFO Password expiration policy updated {"controller": "pxc-controller", "namespace": "users-10631", "name": "some-name", "reconcileID": "8efbf689-e6c4-48d4-af29-7ea21bfa6566", "user": "xtrabackup"} 2024-08-09T11:38:57.021Z INFO User xtrabackup: granted privileges {"controller": "pxc-controller", "namespace": "users-10631", "name": "some-name", "reconcileID": "8efbf689-e6c4-48d4-af29-7ea21bfa6566"} 2024-08-09T11:38:57.063Z INFO reconcile replication error {"controller": "pxc-controller", "namespace": "users-10631", "name": "some-name", "reconcileID": "8efbf689-e6c4-48d4-af29-7ea21bfa6566", "err": "get primary pxc pod: not found"} 2024-08-09T11:39:01.818Z INFO reconcile replication error {"controller": "pxc-controller", "namespace": "users-10631", "name": "some-name", "reconcileID": "64010f14-05f2-4c8a-ae29-ca25c8dc6a6b", "err": "get primary pxc pod: not found"} 2024-08-09T11:39:06.991Z INFO reconcile replication error {"controller": "pxc-controller", "namespace": "users-10631", "name": "some-name", "reconcileID": "9be1043c-735e-4420-b180-9bb36abc2485", "err": "get primary pxc pod: not found"} 2024-08-09T11:39:12.103Z INFO reconcile replication error {"controller": "pxc-controller", "namespace": "users-10631", "name": "some-name", "reconcileID": "12592875-d6e9-4764-8cd3-cc9a57fad0b8", "err": "get primary pxc pod: not found"} 2024-08-09T11:41:22.656Z INFO Password expiration policy updated {"controller": "pxc-controller", "namespace": "users-10631", "name": "some-name", "reconcileID": "8f79b31a-e7a7-4d00-bcfe-d1c2ea1ab286", "user": "root"} 2024-08-09T11:41:22.710Z INFO Password expiration policy updated {"controller": "pxc-controller", "namespace": "users-10631", "name": "some-name", "reconcileID": "8f79b31a-e7a7-4d00-bcfe-d1c2ea1ab286", "user": "replication"} 2024-08-09T11:41:22.803Z INFO update PXC version (fetched from db) {"controller": "pxc-controller", "namespace": "users-10631", "name": "some-name", "reconcileID": "8f79b31a-e7a7-4d00-bcfe-d1c2ea1ab286", "new version": "5.7.44-48-57"} 2024-08-09T11:41:27.214Z DEBUG PXC users synced with ProxySQL {"controller": "pxc-controller", "namespace": "users-10631", "name": "some-name", "reconcileID": "8f79b31a-e7a7-4d00-bcfe-d1c2ea1ab286"} 2024-08-09T11:41:30.925Z DEBUG PXC users synced with ProxySQL {"controller": "pxc-controller", "namespace": "users-10631", "name": "some-name", "reconcileID": "8bd6f701-7508-4bd9-b949-fa27cf46022c"} 2024-08-09T11:41:36.208Z DEBUG PXC users synced with ProxySQL {"controller": "pxc-controller", "namespace": "users-10631", "name": "some-name", "reconcileID": "9282a1af-aaff-4e9c-8327-013bd566c2a6"} 2024-08-09T11:41:42.230Z DEBUG PXC users synced with ProxySQL {"controller": "pxc-controller", "namespace": "users-10631", "name": "some-name", "reconcileID": "dcc48c37-904a-498c-a6a6-9f5bbc7394ad"} 2024-08-09T11:41:46.712Z DEBUG PXC users synced with ProxySQL {"controller": "pxc-controller", "namespace": "users-10631", "name": "some-name", "reconcileID": "a1bac869-e3a8-4420-b19d-c60282f5b442"} 2024-08-09T11:41:51.719Z DEBUG PXC users synced with ProxySQL {"controller": "pxc-controller", "namespace": "users-10631", "name": "some-name", "reconcileID": "c2647743-6c81-4719-ab24-96b646f3d544"} 2024-08-09T11:41:57.011Z DEBUG PXC users synced with ProxySQL {"controller": "pxc-controller", "namespace": "users-10631", "name": "some-name", "reconcileID": "561c6337-5bc6-44f0-9b0e-d03dfb3e2737"} 2024-08-09T11:42:02.124Z DEBUG PXC users synced with ProxySQL {"controller": "pxc-controller", "namespace": "users-10631", "name": "some-name", "reconcileID": "215f49a6-70e3-47da-9332-5d3ad65909a9"} 2024-08-09T11:42:07.402Z DEBUG PXC users synced with ProxySQL {"controller": "pxc-controller", "namespace": "users-10631", "name": "some-name", "reconcileID": "bb2e1afc-2b40-4489-ab01-7b413369b0a5"} 2024-08-09T11:42:12.661Z DEBUG PXC users synced with ProxySQL {"controller": "pxc-controller", "namespace": "users-10631", "name": "some-name", "reconcileID": "7c1760bc-ce56-43a6-9775-b434e0b1ad96"} 2024-08-09T11:42:17.541Z DEBUG PXC users synced with ProxySQL {"controller": "pxc-controller", "namespace": "users-10631", "name": "some-name", "reconcileID": "299846c3-309c-4065-b38a-95aaf1b5da64"} 2024-08-09T11:42:22.633Z DEBUG PXC users synced with ProxySQL {"controller": "pxc-controller", "namespace": "users-10631", "name": "some-name", "reconcileID": "63d9efae-9ef3-4081-856c-32179f3b0a42"} 2024-08-09T11:42:27.823Z DEBUG PXC users synced with ProxySQL {"controller": "pxc-controller", "namespace": "users-10631", "name": "some-name", "reconcileID": "717b83ed-2119-4f88-9863-bfd613773564"} 2024-08-09T11:42:33.035Z DEBUG PXC users synced with ProxySQL {"controller": "pxc-controller", "namespace": "users-10631", "name": "some-name", "reconcileID": "96ee5feb-b1f4-430c-9909-11d4aaf36346"} 2024-08-09T11:42:34.770Z INFO Password changed, updating user {"controller": "pxc-controller", "namespace": "users-10631", "name": "some-name", "reconcileID": "f8c1674b-588b-424a-8c25-9dd396303408", "user": "root"} 2024-08-09T11:42:34.814Z INFO User password updated {"controller": "pxc-controller", "namespace": "users-10631", "name": "some-name", "reconcileID": "f8c1674b-588b-424a-8c25-9dd396303408", "user": "root"} 2024-08-09T11:42:34.822Z INFO MySQL init secret created {"controller": "pxc-controller", "namespace": "users-10631", "name": "some-name", "reconcileID": "f8c1674b-588b-424a-8c25-9dd396303408", "secret": "some-name-mysql-init", "user": "root"} 2024-08-09T11:42:39.590Z DEBUG PXC users synced with ProxySQL {"controller": "pxc-controller", "namespace": "users-10631", "name": "some-name", "reconcileID": "f8c1674b-588b-424a-8c25-9dd396303408"} 2024-08-09T11:42:39.601Z INFO Internal secrets updated {"controller": "pxc-controller", "namespace": "users-10631", "name": "some-name", "reconcileID": "f8c1674b-588b-424a-8c25-9dd396303408", "user": "root"} 2024-08-09T11:42:43.039Z DEBUG PXC users synced with ProxySQL {"controller": "pxc-controller", "namespace": "users-10631", "name": "some-name", "reconcileID": "f8c1674b-588b-424a-8c25-9dd396303408"} 2024-08-09T11:42:48.804Z DEBUG PXC users synced with ProxySQL {"controller": "pxc-controller", "namespace": "users-10631", "name": "some-name", "reconcileID": "057ed8a8-ffb8-4dc1-bd6b-e583fbf63894"} 2024-08-09T11:42:53.936Z DEBUG PXC users synced with ProxySQL {"controller": "pxc-controller", "namespace": "users-10631", "name": "some-name", "reconcileID": "e07052d0-a8ce-4aa5-8114-e17bad88a6d9"} 2024-08-09T11:42:54.942Z DEBUG Updating object {"controller": "pxc-controller", "namespace": "users-10631", "name": "some-name", "reconcileID": "aa7f57cd-80a4-4753-beb2-f70c13b6581e", "object": "some-name-proxysql"} 2024-08-09T11:42:58.413Z ERROR sync users {"controller": "pxc-controller", "namespace": "users-10631", "name": "some-name", "reconcileID": "aa7f57cd-80a4-4753-beb2-f70c13b6581e", "error": "exec syncusers: pod some-name-proxysql-2 does not have a host assigned / / ", "errorVerbose": "exec syncusers: pod some-name-proxysql-2 does not have a host assigned / / \ngithub.com/percona/percona-xtradb-cluster-operator/pkg/controller/pxc.(*ReconcilePerconaXtraDBCluster).syncPXCUsersWithProxySQL\n\t/go/src/github.com/percona/percona-xtradb-cluster-operator/pkg/controller/pxc/users.go:922\ngithub.com/percona/percona-xtradb-cluster-operator/pkg/controller/pxc.(*ReconcilePerconaXtraDBCluster).resyncPXCUsersWithProxySQL.func1\n\t/go/src/github.com/percona/percona-xtradb-cluster-operator/pkg/controller/pxc/controller.go:1233\nruntime.goexit\n\t/usr/local/go/src/runtime/asm_amd64.s:1695"} 2024-08-09T11:43:20.789Z ERROR sync users {"controller": "pxc-controller", "namespace": "users-10631", "name": "some-name", "reconcileID": "107f6533-59f4-44f0-b8ab-2160b7b58ef2", "error": "exec syncusers: command terminated with exit code 1 / / ERROR (line:512) : The cluster (with writer hostgroup:11) has not been configured in ProxySQL\n", "errorVerbose": "exec syncusers: command terminated with exit code 1 / / ERROR (line:512) : The cluster (with writer hostgroup:11) has not been configured in ProxySQL\n\ngithub.com/percona/percona-xtradb-cluster-operator/pkg/controller/pxc.(*ReconcilePerconaXtraDBCluster).syncPXCUsersWithProxySQL\n\t/go/src/github.com/percona/percona-xtradb-cluster-operator/pkg/controller/pxc/users.go:922\ngithub.com/percona/percona-xtradb-cluster-operator/pkg/controller/pxc.(*ReconcilePerconaXtraDBCluster).resyncPXCUsersWithProxySQL.func1\n\t/go/src/github.com/percona/percona-xtradb-cluster-operator/pkg/controller/pxc/controller.go:1233\nruntime.goexit\n\t/usr/local/go/src/runtime/asm_amd64.s:1695"} 2024-08-09T11:43:21.604Z INFO Password changed, updating user {"controller": "pxc-controller", "namespace": "users-10631", "name": "some-name", "reconcileID": "61b80a76-84cc-4e5e-be33-0541e5d44ed2", "user": "proxyadmin"} 2024-08-09T11:43:21.604Z INFO Password expiration policy updated {"controller": "pxc-controller", "namespace": "users-10631", "name": "some-name", "reconcileID": "61b80a76-84cc-4e5e-be33-0541e5d44ed2", "user": "proxyadmin"} 2024-08-09T11:43:21.677Z INFO Proxy user updated {"controller": "pxc-controller", "namespace": "users-10631", "name": "some-name", "reconcileID": "61b80a76-84cc-4e5e-be33-0541e5d44ed2", "user": "proxyadmin"} 2024-08-09T11:43:21.688Z INFO Internal secrets updated {"controller": "pxc-controller", "namespace": "users-10631", "name": "some-name", "reconcileID": "61b80a76-84cc-4e5e-be33-0541e5d44ed2", "user": "proxyadmin"} 2024-08-09T11:43:21.689Z INFO Proxy pods will be restarted {"controller": "pxc-controller", "namespace": "users-10631", "name": "some-name", "reconcileID": "61b80a76-84cc-4e5e-be33-0541e5d44ed2", "last-applied-secret": "1a55867d0f4fa0d0643653b2eb57b2f690315d8d843a10599c9839c2a779a95f"} 2024-08-09T11:43:21.713Z DEBUG Updating object {"controller": "pxc-controller", "namespace": "users-10631", "name": "some-name", "reconcileID": "61b80a76-84cc-4e5e-be33-0541e5d44ed2", "object": "some-name-proxysql"} 2024-08-09T11:43:21.871Z ERROR sync users {"controller": "pxc-controller", "namespace": "users-10631", "name": "some-name", "reconcileID": "61b80a76-84cc-4e5e-be33-0541e5d44ed2", "error": "exec syncusers: command terminated with exit code 1 / / ERROR 1045 (28000): ProxySQL Error: Access denied for user 'proxyadmin'@'127.0.0.1' (using password: YES)\nERROR (line:335) : ProxySQL connection check failed. \n-- Could not connect to ProxySQL at localhost:6032 \n-- Please check the ProxySQL connection parameters and status.\n", "errorVerbose": "exec syncusers: command terminated with exit code 1 / / ERROR 1045 (28000): ProxySQL Error: Access denied for user 'proxyadmin'@'127.0.0.1' (using password: YES)\nERROR (line:335) : ProxySQL connection check failed. \n-- Could not connect to ProxySQL at localhost:6032 \n-- Please check the ProxySQL connection parameters and status.\n\ngithub.com/percona/percona-xtradb-cluster-operator/pkg/controller/pxc.(*ReconcilePerconaXtraDBCluster).syncPXCUsersWithProxySQL\n\t/go/src/github.com/percona/percona-xtradb-cluster-operator/pkg/controller/pxc/users.go:922\ngithub.com/percona/percona-xtradb-cluster-operator/pkg/controller/pxc.(*ReconcilePerconaXtraDBCluster).resyncPXCUsersWithProxySQL.func1\n\t/go/src/github.com/percona/percona-xtradb-cluster-operator/pkg/controller/pxc/controller.go:1233\nruntime.goexit\n\t/usr/local/go/src/runtime/asm_amd64.s:1695"} 2024-08-09T11:44:09.984Z DEBUG PXC users synced with ProxySQL {"controller": "pxc-controller", "namespace": "users-10631", "name": "some-name", "reconcileID": "c30d546f-0395-4b5b-9955-dcefbc37aa69"} 2024-08-09T11:44:19.887Z DEBUG PXC users synced with ProxySQL {"controller": "pxc-controller", "namespace": "users-10631", "name": "some-name", "reconcileID": "586e7394-9926-4c35-aa47-654b8206bd27"} 2024-08-09T11:44:30.667Z DEBUG PXC users synced with ProxySQL {"controller": "pxc-controller", "namespace": "users-10631", "name": "some-name", "reconcileID": "107d3d82-5063-489c-b6bd-67ba6292a8cf"} 2024-08-09T11:44:33.116Z DEBUG Updating object {"controller": "pxc-controller", "namespace": "users-10631", "name": "some-name", "reconcileID": "b1862e0f-7893-4870-b702-6180c78dda5f", "object": "some-name-proxysql"} 2024-08-09T11:44:36.057Z INFO Password changed, updating user {"controller": "pxc-controller", "namespace": "users-10631", "name": "some-name", "reconcileID": "6d34783f-dea0-446f-990c-0fea19306077", "user": "xtrabackup"} 2024-08-09T11:44:36.085Z INFO User password updated {"controller": "pxc-controller", "namespace": "users-10631", "name": "some-name", "reconcileID": "6d34783f-dea0-446f-990c-0fea19306077", "user": "xtrabackup"} 2024-08-09T11:44:36.104Z INFO MySQL init secret updated {"controller": "pxc-controller", "namespace": "users-10631", "name": "some-name", "reconcileID": "6d34783f-dea0-446f-990c-0fea19306077", "secret": "some-name-mysql-init", "user": "xtrabackup"} 2024-08-09T11:44:36.119Z INFO Internal secrets updated {"controller": "pxc-controller", "namespace": "users-10631", "name": "some-name", "reconcileID": "6d34783f-dea0-446f-990c-0fea19306077", "user": "xtrabackup"} 2024-08-09T11:44:36.119Z INFO PXC pods will be restarted {"controller": "pxc-controller", "namespace": "users-10631", "name": "some-name", "reconcileID": "6d34783f-dea0-446f-990c-0fea19306077", "last-applied-secret": "d11bcc70ca53ea410cd455e13d82e7225917b0861d9b677ed34c02c5f6fbaf1b"} 2024-08-09T11:44:36.145Z DEBUG Updating object {"controller": "pxc-controller", "namespace": "users-10631", "name": "some-name", "reconcileID": "6d34783f-dea0-446f-990c-0fea19306077", "object": "some-name-pxc"} 2024-08-09T11:44:40.631Z DEBUG PXC users synced with ProxySQL {"controller": "pxc-controller", "namespace": "users-10631", "name": "some-name", "reconcileID": "6d34783f-dea0-446f-990c-0fea19306077"} 2024-08-09T11:47:15.224Z DEBUG PXC users synced with ProxySQL {"controller": "pxc-controller", "namespace": "users-10631", "name": "some-name", "reconcileID": "63291c55-5d9e-4761-ad5e-084f34f35290"} 2024-08-09T11:47:20.235Z DEBUG PXC users synced with ProxySQL {"controller": "pxc-controller", "namespace": "users-10631", "name": "some-name", "reconcileID": "3dbdac83-5e5e-473b-a1c6-44486742ecfb"} 2024-08-09T11:47:25.079Z DEBUG PXC users synced with ProxySQL {"controller": "pxc-controller", "namespace": "users-10631", "name": "some-name", "reconcileID": "3dab53d2-35f9-4c40-abe5-de47411eb29f"} 2024-08-09T11:47:26.861Z INFO Password changed, updating user {"controller": "pxc-controller", "namespace": "users-10631", "name": "some-name", "reconcileID": "14be455a-d789-487a-a1d1-507f3250bc47", "user": "monitor"} 2024-08-09T11:47:26.888Z INFO User password updated {"controller": "pxc-controller", "namespace": "users-10631", "name": "some-name", "reconcileID": "14be455a-d789-487a-a1d1-507f3250bc47", "user": "monitor"} 2024-08-09T11:47:26.900Z INFO MySQL init secret updated {"controller": "pxc-controller", "namespace": "users-10631", "name": "some-name", "reconcileID": "14be455a-d789-487a-a1d1-507f3250bc47", "secret": "some-name-mysql-init", "user": "monitor"} 2024-08-09T11:47:26.947Z INFO Proxy user updated {"controller": "pxc-controller", "namespace": "users-10631", "name": "some-name", "reconcileID": "14be455a-d789-487a-a1d1-507f3250bc47", "user": "monitor"} 2024-08-09T11:47:26.959Z INFO Internal secrets updated {"controller": "pxc-controller", "namespace": "users-10631", "name": "some-name", "reconcileID": "14be455a-d789-487a-a1d1-507f3250bc47", "user": "monitor"} 2024-08-09T11:47:26.959Z INFO Proxy pods will be restarted {"controller": "pxc-controller", "namespace": "users-10631", "name": "some-name", "reconcileID": "14be455a-d789-487a-a1d1-507f3250bc47", "last-applied-secret": "4e3282050745046d5044e0fcb5f2bf140d7320a1256a79c8ca2184f38f4f3098"} 2024-08-09T11:47:26.990Z DEBUG Updating object {"controller": "pxc-controller", "namespace": "users-10631", "name": "some-name", "reconcileID": "14be455a-d789-487a-a1d1-507f3250bc47", "object": "some-name-proxysql"} 2024-08-09T11:47:30.106Z ERROR sync users {"controller": "pxc-controller", "namespace": "users-10631", "name": "some-name", "reconcileID": "14be455a-d789-487a-a1d1-507f3250bc47", "error": "exec syncusers: unable to upgrade connection: container not found (\"proxysql\") / / ", "errorVerbose": "exec syncusers: unable to upgrade connection: container not found (\"proxysql\") / / \ngithub.com/percona/percona-xtradb-cluster-operator/pkg/controller/pxc.(*ReconcilePerconaXtraDBCluster).syncPXCUsersWithProxySQL\n\t/go/src/github.com/percona/percona-xtradb-cluster-operator/pkg/controller/pxc/users.go:922\ngithub.com/percona/percona-xtradb-cluster-operator/pkg/controller/pxc.(*ReconcilePerconaXtraDBCluster).resyncPXCUsersWithProxySQL.func1\n\t/go/src/github.com/percona/percona-xtradb-cluster-operator/pkg/controller/pxc/controller.go:1233\nruntime.goexit\n\t/usr/local/go/src/runtime/asm_amd64.s:1695"} 2024-08-09T11:48:04.448Z DEBUG PXC users synced with ProxySQL {"controller": "pxc-controller", "namespace": "users-10631", "name": "some-name", "reconcileID": "524df3a9-9dcd-4f8f-8ae8-b0719812d774"} 2024-08-09T11:48:09.194Z DEBUG PXC users synced with ProxySQL {"controller": "pxc-controller", "namespace": "users-10631", "name": "some-name", "reconcileID": "ccc90992-e863-48f5-b83a-b9c96a138732"} 2024-08-09T11:48:14.441Z DEBUG PXC users synced with ProxySQL {"controller": "pxc-controller", "namespace": "users-10631", "name": "some-name", "reconcileID": "02ccf415-db8a-4fe8-a21e-dcaac26505b8"} 2024-08-09T11:48:19.633Z DEBUG PXC users synced with ProxySQL {"controller": "pxc-controller", "namespace": "users-10631", "name": "some-name", "reconcileID": "e1848a1e-16c7-4650-87f9-83e983143416"} 2024-08-09T11:48:25.036Z DEBUG PXC users synced with ProxySQL {"controller": "pxc-controller", "namespace": "users-10631", "name": "some-name", "reconcileID": "04863994-31f1-40a9-88ab-2569bcaaf55a"} 2024-08-09T11:48:29.824Z DEBUG PXC users synced with ProxySQL {"controller": "pxc-controller", "namespace": "users-10631", "name": "some-name", "reconcileID": "1b40d616-7b7b-40f2-89cc-551a42e84bc5"} 2024-08-09T11:48:34.991Z DEBUG PXC users synced with ProxySQL {"controller": "pxc-controller", "namespace": "users-10631", "name": "some-name", "reconcileID": "89847d99-ff1a-416f-92ea-752fb4a51ca7"} 2024-08-09T11:48:36.729Z INFO Password changed, updating user {"controller": "pxc-controller", "namespace": "users-10631", "name": "some-name", "reconcileID": "c8ec384b-be4c-4d9c-bb99-ffa4b89155c3", "user": "operator"} 2024-08-09T11:48:36.758Z INFO User password updated {"controller": "pxc-controller", "namespace": "users-10631", "name": "some-name", "reconcileID": "c8ec384b-be4c-4d9c-bb99-ffa4b89155c3", "user": "operator"} 2024-08-09T11:48:36.770Z INFO MySQL init secret updated {"controller": "pxc-controller", "namespace": "users-10631", "name": "some-name", "reconcileID": "c8ec384b-be4c-4d9c-bb99-ffa4b89155c3", "secret": "some-name-mysql-init", "user": "operator"} 2024-08-09T11:48:36.789Z INFO Internal secrets updated {"controller": "pxc-controller", "namespace": "users-10631", "name": "some-name", "reconcileID": "c8ec384b-be4c-4d9c-bb99-ffa4b89155c3", "user": "operator"} 2024-08-09T11:48:36.789Z INFO Proxy pods will be restarted {"controller": "pxc-controller", "namespace": "users-10631", "name": "some-name", "reconcileID": "c8ec384b-be4c-4d9c-bb99-ffa4b89155c3", "last-applied-secret": "6542c6f8a2375ddfcda319bf91c99ca69519730971c577cd07366e356671ee68"} 2024-08-09T11:48:36.826Z DEBUG Updating object {"controller": "pxc-controller", "namespace": "users-10631", "name": "some-name", "reconcileID": "c8ec384b-be4c-4d9c-bb99-ffa4b89155c3", "object": "some-name-proxysql"} 2024-08-09T11:48:38.097Z ERROR sync users {"controller": "pxc-controller", "namespace": "users-10631", "name": "some-name", "reconcileID": "c8ec384b-be4c-4d9c-bb99-ffa4b89155c3", "error": "exec syncusers: command terminated with exit code 1 / / ERROR 1045 (28000): Access denied for user 'operator'@'some-name-proxysql-0.some-name-proxysql-unready.users-10631.svc.' (using password: YES)\nERROR 1045 (28000): Access denied for user 'operator'@'some-name-proxysql-0.some-name-proxysql-unready.users-10631.svc.' (using password: YES)\nERROR 1045 (28000): Access denied for user 'operator'@'some-name-proxysql-0.some-name-proxysql-unready.users-10631.svc.' (using password: YES)\nERROR (line:569) : Could not find a primary cluster node\n", "errorVerbose": "exec syncusers: command terminated with exit code 1 / / ERROR 1045 (28000): Access denied for user 'operator'@'some-name-proxysql-0.some-name-proxysql-unready.users-10631.svc.' (using password: YES)\nERROR 1045 (28000): Access denied for user 'operator'@'some-name-proxysql-0.some-name-proxysql-unready.users-10631.svc.' (using password: YES)\nERROR 1045 (28000): Access denied for user 'operator'@'some-name-proxysql-0.some-name-proxysql-unready.users-10631.svc.' (using password: YES)\nERROR (line:569) : Could not find a primary cluster node\n\ngithub.com/percona/percona-xtradb-cluster-operator/pkg/controller/pxc.(*ReconcilePerconaXtraDBCluster).syncPXCUsersWithProxySQL\n\t/go/src/github.com/percona/percona-xtradb-cluster-operator/pkg/controller/pxc/users.go:922\ngithub.com/percona/percona-xtradb-cluster-operator/pkg/controller/pxc.(*ReconcilePerconaXtraDBCluster).resyncPXCUsersWithProxySQL.func1\n\t/go/src/github.com/percona/percona-xtradb-cluster-operator/pkg/controller/pxc/controller.go:1233\nruntime.goexit\n\t/usr/local/go/src/runtime/asm_amd64.s:1695"} 2024-08-09T11:49:13.656Z DEBUG PXC users synced with ProxySQL {"controller": "pxc-controller", "namespace": "users-10631", "name": "some-name", "reconcileID": "8ce8689e-5bf7-4178-a2aa-4efbe386ad56"} 2024-08-09T11:49:20.836Z DEBUG PXC users synced with ProxySQL {"controller": "pxc-controller", "namespace": "users-10631", "name": "some-name", "reconcileID": "0194433e-221e-4e87-8cb5-29a93c707cfc"} 2024-08-09T11:49:26.193Z DEBUG PXC users synced with ProxySQL {"controller": "pxc-controller", "namespace": "users-10631", "name": "some-name", "reconcileID": "b5d3f26f-34f6-4749-a438-ea9f45b1666a"} 2024-08-09T11:49:31.124Z DEBUG PXC users synced with ProxySQL {"controller": "pxc-controller", "namespace": "users-10631", "name": "some-name", "reconcileID": "b99b2016-3d6d-4a76-9ea8-7ed7b33325be"} 2024-08-09T11:49:34.413Z INFO Created user secrets {"controller": "pxc-controller", "namespace": "users-10631", "name": "some-name", "reconcileID": "407cba2a-1e6a-4eb8-9763-f1af188f5a5c", "secrets": "my-cluster-secrets-2"} 2024-08-09T11:49:34.414Z INFO Password changed, updating user {"controller": "pxc-controller", "namespace": "users-10631", "name": "some-name", "reconcileID": "407cba2a-1e6a-4eb8-9763-f1af188f5a5c", "user": "root"} 2024-08-09T11:49:34.452Z INFO User password updated {"controller": "pxc-controller", "namespace": "users-10631", "name": "some-name", "reconcileID": "407cba2a-1e6a-4eb8-9763-f1af188f5a5c", "user": "root"} 2024-08-09T11:49:34.463Z INFO MySQL init secret updated {"controller": "pxc-controller", "namespace": "users-10631", "name": "some-name", "reconcileID": "407cba2a-1e6a-4eb8-9763-f1af188f5a5c", "secret": "some-name-mysql-init", "user": "root"} 2024-08-09T11:49:37.892Z DEBUG PXC users synced with ProxySQL {"controller": "pxc-controller", "namespace": "users-10631", "name": "some-name", "reconcileID": "04d26b1d-bf15-43fe-b115-e4bb7aca7b12"} 2024-08-09T11:49:39.703Z DEBUG PXC users synced with ProxySQL {"controller": "pxc-controller", "namespace": "users-10631", "name": "some-name", "reconcileID": "407cba2a-1e6a-4eb8-9763-f1af188f5a5c"} 2024-08-09T11:49:39.740Z INFO Internal secrets updated {"controller": "pxc-controller", "namespace": "users-10631", "name": "some-name", "reconcileID": "407cba2a-1e6a-4eb8-9763-f1af188f5a5c", "user": "root"} 2024-08-09T11:49:39.740Z INFO Password changed, updating user {"controller": "pxc-controller", "namespace": "users-10631", "name": "some-name", "reconcileID": "407cba2a-1e6a-4eb8-9763-f1af188f5a5c", "user": "operator"} 2024-08-09T11:49:39.768Z INFO User password updated {"controller": "pxc-controller", "namespace": "users-10631", "name": "some-name", "reconcileID": "407cba2a-1e6a-4eb8-9763-f1af188f5a5c", "user": "operator"} 2024-08-09T11:49:39.832Z INFO MySQL init secret updated {"controller": "pxc-controller", "namespace": "users-10631", "name": "some-name", "reconcileID": "407cba2a-1e6a-4eb8-9763-f1af188f5a5c", "secret": "some-name-mysql-init", "user": "operator"} 2024-08-09T11:49:39.913Z INFO Internal secrets updated {"controller": "pxc-controller", "namespace": "users-10631", "name": "some-name", "reconcileID": "407cba2a-1e6a-4eb8-9763-f1af188f5a5c", "user": "operator"} 2024-08-09T11:49:39.913Z INFO Password changed, updating user {"controller": "pxc-controller", "namespace": "users-10631", "name": "some-name", "reconcileID": "407cba2a-1e6a-4eb8-9763-f1af188f5a5c", "user": "monitor"} 2024-08-09T11:49:39.941Z INFO User password updated {"controller": "pxc-controller", "namespace": "users-10631", "name": "some-name", "reconcileID": "407cba2a-1e6a-4eb8-9763-f1af188f5a5c", "user": "monitor"} 2024-08-09T11:49:39.979Z INFO MySQL init secret updated {"controller": "pxc-controller", "namespace": "users-10631", "name": "some-name", "reconcileID": "407cba2a-1e6a-4eb8-9763-f1af188f5a5c", "secret": "some-name-mysql-init", "user": "monitor"} 2024-08-09T11:49:40.027Z INFO Proxy user updated {"controller": "pxc-controller", "namespace": "users-10631", "name": "some-name", "reconcileID": "407cba2a-1e6a-4eb8-9763-f1af188f5a5c", "user": "monitor"} 2024-08-09T11:49:40.159Z INFO Internal secrets updated {"controller": "pxc-controller", "namespace": "users-10631", "name": "some-name", "reconcileID": "407cba2a-1e6a-4eb8-9763-f1af188f5a5c", "user": "monitor"} 2024-08-09T11:49:40.159Z INFO Password changed, updating user {"controller": "pxc-controller", "namespace": "users-10631", "name": "some-name", "reconcileID": "407cba2a-1e6a-4eb8-9763-f1af188f5a5c", "user": "xtrabackup"} 2024-08-09T11:49:40.183Z INFO User password updated {"controller": "pxc-controller", "namespace": "users-10631", "name": "some-name", "reconcileID": "407cba2a-1e6a-4eb8-9763-f1af188f5a5c", "user": "xtrabackup"} 2024-08-09T11:49:40.288Z INFO MySQL init secret updated {"controller": "pxc-controller", "namespace": "users-10631", "name": "some-name", "reconcileID": "407cba2a-1e6a-4eb8-9763-f1af188f5a5c", "secret": "some-name-mysql-init", "user": "xtrabackup"} 2024-08-09T11:49:40.326Z INFO Internal secrets updated {"controller": "pxc-controller", "namespace": "users-10631", "name": "some-name", "reconcileID": "407cba2a-1e6a-4eb8-9763-f1af188f5a5c", "user": "xtrabackup"} 2024-08-09T11:49:40.326Z INFO Password changed, updating user {"controller": "pxc-controller", "namespace": "users-10631", "name": "some-name", "reconcileID": "407cba2a-1e6a-4eb8-9763-f1af188f5a5c", "user": "replication"} 2024-08-09T11:49:40.350Z INFO User password updated {"controller": "pxc-controller", "namespace": "users-10631", "name": "some-name", "reconcileID": "407cba2a-1e6a-4eb8-9763-f1af188f5a5c", "user": "replication"} 2024-08-09T11:49:40.412Z INFO MySQL init secret updated {"controller": "pxc-controller", "namespace": "users-10631", "name": "some-name", "reconcileID": "407cba2a-1e6a-4eb8-9763-f1af188f5a5c", "secret": "some-name-mysql-init", "user": "replication"} 2024-08-09T11:49:40.463Z INFO Internal secrets updated {"controller": "pxc-controller", "namespace": "users-10631", "name": "some-name", "reconcileID": "407cba2a-1e6a-4eb8-9763-f1af188f5a5c", "user": "replication"} 2024-08-09T11:49:40.463Z INFO Password changed, updating user {"controller": "pxc-controller", "namespace": "users-10631", "name": "some-name", "reconcileID": "407cba2a-1e6a-4eb8-9763-f1af188f5a5c", "user": "proxyadmin"} 2024-08-09T11:49:40.511Z INFO Proxy user updated {"controller": "pxc-controller", "namespace": "users-10631", "name": "some-name", "reconcileID": "407cba2a-1e6a-4eb8-9763-f1af188f5a5c", "user": "proxyadmin"} 2024-08-09T11:49:40.538Z INFO Internal secrets updated {"controller": "pxc-controller", "namespace": "users-10631", "name": "some-name", "reconcileID": "407cba2a-1e6a-4eb8-9763-f1af188f5a5c", "user": "proxyadmin"} 2024-08-09T11:49:40.539Z INFO Proxy pods will be restarted {"controller": "pxc-controller", "namespace": "users-10631", "name": "some-name", "reconcileID": "407cba2a-1e6a-4eb8-9763-f1af188f5a5c", "last-applied-secret": "6458dcc8483ef3e45fae689e2fa32c920a36858dd62eba3a4e28568f68378883"} 2024-08-09T11:49:40.539Z INFO PXC pods will be restarted {"controller": "pxc-controller", "namespace": "users-10631", "name": "some-name", "reconcileID": "407cba2a-1e6a-4eb8-9763-f1af188f5a5c", "last-applied-secret": "6458dcc8483ef3e45fae689e2fa32c920a36858dd62eba3a4e28568f68378883"} 2024-08-09T11:49:40.645Z DEBUG Updating object {"controller": "pxc-controller", "namespace": "users-10631", "name": "some-name", "reconcileID": "407cba2a-1e6a-4eb8-9763-f1af188f5a5c", "object": "some-name-pxc"} 2024-08-09T11:49:40.688Z DEBUG Updating object {"controller": "pxc-controller", "namespace": "users-10631", "name": "some-name", "reconcileID": "407cba2a-1e6a-4eb8-9763-f1af188f5a5c", "object": "some-name-proxysql"} 2024-08-09T11:49:40.947Z ERROR sync users {"controller": "pxc-controller", "namespace": "users-10631", "name": "some-name", "reconcileID": "407cba2a-1e6a-4eb8-9763-f1af188f5a5c", "error": "exec syncusers: command terminated with exit code 1 / / ERROR 1045 (28000): ProxySQL Error: Access denied for user 'proxyadmin'@'127.0.0.1' (using password: YES)\nERROR (line:335) : ProxySQL connection check failed. \n-- Could not connect to ProxySQL at localhost:6032 \n-- Please check the ProxySQL connection parameters and status.\n", "errorVerbose": "exec syncusers: command terminated with exit code 1 / / ERROR 1045 (28000): ProxySQL Error: Access denied for user 'proxyadmin'@'127.0.0.1' (using password: YES)\nERROR (line:335) : ProxySQL connection check failed. \n-- Could not connect to ProxySQL at localhost:6032 \n-- Please check the ProxySQL connection parameters and status.\n\ngithub.com/percona/percona-xtradb-cluster-operator/pkg/controller/pxc.(*ReconcilePerconaXtraDBCluster).syncPXCUsersWithProxySQL\n\t/go/src/github.com/percona/percona-xtradb-cluster-operator/pkg/controller/pxc/users.go:922\ngithub.com/percona/percona-xtradb-cluster-operator/pkg/controller/pxc.(*ReconcilePerconaXtraDBCluster).resyncPXCUsersWithProxySQL.func1\n\t/go/src/github.com/percona/percona-xtradb-cluster-operator/pkg/controller/pxc/controller.go:1233\nruntime.goexit\n\t/usr/local/go/src/runtime/asm_amd64.s:1695"} 2024-08-09T11:51:24.283Z INFO reconcile replication error {"controller": "pxc-controller", "namespace": "users-10631", "name": "some-name", "reconcileID": "f1b37c9f-e9e5-4fa9-8393-55ae198e0808", "err": "failed to connect to pod some-name-pxc-0: dial tcp: lookup some-name-pxc-0.some-name-pxc.users-10631 on 10.115.192.10:53: no such host"} 2024-08-09T11:51:29.426Z INFO Unable to find primary pod for replication. No pod with name or ip like this {"controller": "pxc-controller", "namespace": "users-10631", "name": "some-name", "reconcileID": "b5855416-76ee-4a6b-a361-3c474651e236", "primary name": "some-name-pxc-0.some-name-pxc.users-10631.svc.cluster.local"} 2024-08-09T11:51:34.567Z INFO Unable to find primary pod for replication. No pod with name or ip like this {"controller": "pxc-controller", "namespace": "users-10631", "name": "some-name", "reconcileID": "f44c32f8-4eaf-4851-b836-2ba2848c1031", "primary name": "some-name-pxc-0.some-name-pxc.users-10631.svc.cluster.local"} 2024-08-09T11:51:39.665Z INFO Unable to find primary pod for replication. No pod with name or ip like this {"controller": "pxc-controller", "namespace": "users-10631", "name": "some-name", "reconcileID": "9077e2ac-c353-4b4c-b476-c3f5ad330e60", "primary name": "some-name-pxc-0.some-name-pxc.users-10631.svc.cluster.local"} 2024-08-09T11:51:44.874Z INFO Unable to find primary pod for replication. No pod with name or ip like this {"controller": "pxc-controller", "namespace": "users-10631", "name": "some-name", "reconcileID": "7032547e-307b-4f5f-8420-dc74537f0ca7", "primary name": "some-name-pxc-0.some-name-pxc.users-10631.svc.cluster.local"} 2024-08-09T11:51:49.981Z INFO Unable to find primary pod for replication. No pod with name or ip like this {"controller": "pxc-controller", "namespace": "users-10631", "name": "some-name", "reconcileID": "254717cf-7150-4275-84af-8242f28ef2d6", "primary name": "some-name-pxc-0.some-name-pxc.users-10631.svc.cluster.local"} 2024-08-09T11:51:55.173Z INFO Unable to find primary pod for replication. No pod with name or ip like this {"controller": "pxc-controller", "namespace": "users-10631", "name": "some-name", "reconcileID": "cb0c1ff7-80bb-4333-a555-72778f16ae77", "primary name": "some-name-pxc-0.some-name-pxc.users-10631.svc.cluster.local"} 2024-08-09T11:52:00.390Z INFO Unable to find primary pod for replication. No pod with name or ip like this {"controller": "pxc-controller", "namespace": "users-10631", "name": "some-name", "reconcileID": "04c38c6f-791b-439e-a765-242bc9de4027", "primary name": "some-name-pxc-0.some-name-pxc.users-10631.svc.cluster.local"} 2024-08-09T11:52:09.198Z DEBUG PXC users synced with ProxySQL {"controller": "pxc-controller", "namespace": "users-10631", "name": "some-name", "reconcileID": "1d669432-e325-49a1-b492-7fab17ab4d10"} 2024-08-09T11:52:14.238Z DEBUG PXC users synced with ProxySQL {"controller": "pxc-controller", "namespace": "users-10631", "name": "some-name", "reconcileID": "c4a4c95e-2121-4765-bb8a-183cb0579a19"} 2024-08-09T11:52:19.516Z DEBUG PXC users synced with ProxySQL {"controller": "pxc-controller", "namespace": "users-10631", "name": "some-name", "reconcileID": "cf7a5004-cc34-42b0-8463-f7b7c1bbf2f8"} 2024-08-09T11:52:21.232Z INFO Password changed, updating user {"controller": "pxc-controller", "namespace": "users-10631", "name": "some-name", "reconcileID": "bff683d6-257f-44bf-bcd2-03adc0ba194c", "user": "operator"} 2024-08-09T11:52:21.261Z INFO User password updated {"controller": "pxc-controller", "namespace": "users-10631", "name": "some-name", "reconcileID": "bff683d6-257f-44bf-bcd2-03adc0ba194c", "user": "operator"} 2024-08-09T11:52:21.271Z INFO MySQL init secret updated {"controller": "pxc-controller", "namespace": "users-10631", "name": "some-name", "reconcileID": "bff683d6-257f-44bf-bcd2-03adc0ba194c", "secret": "some-name-mysql-init", "user": "operator"} 2024-08-09T11:52:21.281Z INFO Internal secrets updated {"controller": "pxc-controller", "namespace": "users-10631", "name": "some-name", "reconcileID": "bff683d6-257f-44bf-bcd2-03adc0ba194c", "user": "operator"} 2024-08-09T11:52:21.281Z INFO Proxy pods will be restarted {"controller": "pxc-controller", "namespace": "users-10631", "name": "some-name", "reconcileID": "bff683d6-257f-44bf-bcd2-03adc0ba194c", "last-applied-secret": "d01f4f3197dbf38864fbfd53438d59a2b13adfea39ec8e5a453eddccfb5015ca"} 2024-08-09T11:52:21.307Z DEBUG Updating object {"controller": "pxc-controller", "namespace": "users-10631", "name": "some-name", "reconcileID": "bff683d6-257f-44bf-bcd2-03adc0ba194c", "object": "some-name-proxysql"} 2024-08-09T11:52:22.644Z ERROR sync users {"controller": "pxc-controller", "namespace": "users-10631", "name": "some-name", "reconcileID": "bff683d6-257f-44bf-bcd2-03adc0ba194c", "error": "exec syncusers: command terminated with exit code 1 / / ERROR 1045 (28000): Access denied for user 'operator'@'some-name-proxysql-0.some-name-proxysql-unready.users-10631.svc.' (using password: YES)\nERROR 1045 (28000): Access denied for user 'operator'@'some-name-proxysql-0.some-name-proxysql-unready.users-10631.svc.' (using password: YES)\nERROR 1045 (28000): Access denied for user 'operator'@'some-name-proxysql-0.some-name-proxysql-unready.users-10631.svc.' (using password: YES)\nERROR (line:569) : Could not find a primary cluster node\n", "errorVerbose": "exec syncusers: command terminated with exit code 1 / / ERROR 1045 (28000): Access denied for user 'operator'@'some-name-proxysql-0.some-name-proxysql-unready.users-10631.svc.' (using password: YES)\nERROR 1045 (28000): Access denied for user 'operator'@'some-name-proxysql-0.some-name-proxysql-unready.users-10631.svc.' (using password: YES)\nERROR 1045 (28000): Access denied for user 'operator'@'some-name-proxysql-0.some-name-proxysql-unready.users-10631.svc.' (using password: YES)\nERROR (line:569) : Could not find a primary cluster node\n\ngithub.com/percona/percona-xtradb-cluster-operator/pkg/controller/pxc.(*ReconcilePerconaXtraDBCluster).syncPXCUsersWithProxySQL\n\t/go/src/github.com/percona/percona-xtradb-cluster-operator/pkg/controller/pxc/users.go:922\ngithub.com/percona/percona-xtradb-cluster-operator/pkg/controller/pxc.(*ReconcilePerconaXtraDBCluster).resyncPXCUsersWithProxySQL.func1\n\t/go/src/github.com/percona/percona-xtradb-cluster-operator/pkg/controller/pxc/controller.go:1233\nruntime.goexit\n\t/usr/local/go/src/runtime/asm_amd64.s:1695"} 2024-08-09T11:52:48.577Z DEBUG PXC users synced with ProxySQL {"controller": "pxc-controller", "namespace": "users-10631", "name": "some-name", "reconcileID": "1b57f358-97fc-4f3c-979d-713625103bf2"} 2024-08-09T11:52:56.638Z DEBUG PXC users synced with ProxySQL {"controller": "pxc-controller", "namespace": "users-10631", "name": "some-name", "reconcileID": "257505ad-0db3-465c-bff9-e1ad7e5a4d08"} 2024-08-09T11:53:01.696Z DEBUG PXC users synced with ProxySQL {"controller": "pxc-controller", "namespace": "users-10631", "name": "some-name", "reconcileID": "0e2246c9-2dc8-4bda-8515-4886fc4adf65"} 2024-08-09T11:53:06.941Z DEBUG PXC users synced with ProxySQL {"controller": "pxc-controller", "namespace": "users-10631", "name": "some-name", "reconcileID": "04e7d747-006b-4c08-89f3-98116a967ceb"} 2024-08-09T11:53:12.349Z DEBUG PXC users synced with ProxySQL {"controller": "pxc-controller", "namespace": "users-10631", "name": "some-name", "reconcileID": "c4de4f21-5561-4b25-bae9-4e3c5b00a4ad"} 2024-08-09T11:53:18.229Z DEBUG PXC users synced with ProxySQL {"controller": "pxc-controller", "namespace": "users-10631", "name": "some-name", "reconcileID": "86577538-8390-4dba-9631-d2be466173f3"} 2024-08-09T11:53:23.602Z DEBUG PXC users synced with ProxySQL {"controller": "pxc-controller", "namespace": "users-10631", "name": "some-name", "reconcileID": "9af4cd27-f493-4f88-a1bf-de454c97980d"} 2024-08-09T11:53:28.102Z DEBUG PXC users synced with ProxySQL {"controller": "pxc-controller", "namespace": "users-10631", "name": "some-name", "reconcileID": "b105446c-d778-4468-99dd-df82c6c643a4"} 2024-08-09T11:53:33.124Z DEBUG PXC users synced with ProxySQL {"controller": "pxc-controller", "namespace": "users-10631", "name": "some-name", "reconcileID": "e0c2d2cb-93f7-4a38-8273-5f1417445a3d"} 2024-08-09T11:53:38.231Z DEBUG PXC users synced with ProxySQL {"controller": "pxc-controller", "namespace": "users-10631", "name": "some-name", "reconcileID": "fc183aff-2aa2-4fe2-84b3-002c4dc7ad75"} 2024-08-09T11:53:44.098Z DEBUG PXC users synced with ProxySQL {"controller": "pxc-controller", "namespace": "users-10631", "name": "some-name", "reconcileID": "081c1174-452c-4039-be6b-2657a3514bf2"} 2024-08-09T11:53:49.315Z DEBUG PXC users synced with ProxySQL {"controller": "pxc-controller", "namespace": "users-10631", "name": "some-name", "reconcileID": "5f52034e-26cc-4428-83fb-09401e46a966"} 2024-08-09T11:53:54.615Z DEBUG PXC users synced with ProxySQL {"controller": "pxc-controller", "namespace": "users-10631", "name": "some-name", "reconcileID": "638c0440-adb6-4228-bb02-99699f34b268"} 2024-08-09T11:53:59.997Z DEBUG PXC users synced with ProxySQL {"controller": "pxc-controller", "namespace": "users-10631", "name": "some-name", "reconcileID": "1cabc240-15f0-41bf-acd8-190875023c9e"} 2024-08-09T11:54:01.627Z INFO Password changed, updating user {"controller": "pxc-controller", "namespace": "users-10631", "name": "some-name", "reconcileID": "d15c91da-e65b-4301-91f6-10a217b9ac49", "user": "root"} 2024-08-09T11:54:01.667Z INFO User password updated {"controller": "pxc-controller", "namespace": "users-10631", "name": "some-name", "reconcileID": "d15c91da-e65b-4301-91f6-10a217b9ac49", "user": "root"} 2024-08-09T11:54:01.679Z INFO MySQL init secret updated {"controller": "pxc-controller", "namespace": "users-10631", "name": "some-name", "reconcileID": "d15c91da-e65b-4301-91f6-10a217b9ac49", "secret": "some-name-mysql-init", "user": "root"} 2024-08-09T11:54:07.121Z DEBUG PXC users synced with ProxySQL {"controller": "pxc-controller", "namespace": "users-10631", "name": "some-name", "reconcileID": "d15c91da-e65b-4301-91f6-10a217b9ac49"} 2024-08-09T11:54:07.131Z INFO Internal secrets updated {"controller": "pxc-controller", "namespace": "users-10631", "name": "some-name", "reconcileID": "d15c91da-e65b-4301-91f6-10a217b9ac49", "user": "root"} 2024-08-09T11:54:07.132Z INFO Password changed, updating user {"controller": "pxc-controller", "namespace": "users-10631", "name": "some-name", "reconcileID": "d15c91da-e65b-4301-91f6-10a217b9ac49", "user": "monitor"} 2024-08-09T11:54:07.158Z INFO User password updated {"controller": "pxc-controller", "namespace": "users-10631", "name": "some-name", "reconcileID": "d15c91da-e65b-4301-91f6-10a217b9ac49", "user": "monitor"} 2024-08-09T11:54:07.168Z INFO MySQL init secret updated {"controller": "pxc-controller", "namespace": "users-10631", "name": "some-name", "reconcileID": "d15c91da-e65b-4301-91f6-10a217b9ac49", "secret": "some-name-mysql-init", "user": "monitor"} 2024-08-09T11:54:07.212Z INFO Proxy user updated {"controller": "pxc-controller", "namespace": "users-10631", "name": "some-name", "reconcileID": "d15c91da-e65b-4301-91f6-10a217b9ac49", "user": "monitor"} 2024-08-09T11:54:07.223Z INFO Internal secrets updated {"controller": "pxc-controller", "namespace": "users-10631", "name": "some-name", "reconcileID": "d15c91da-e65b-4301-91f6-10a217b9ac49", "user": "monitor"} 2024-08-09T11:54:07.223Z INFO Password changed, updating user {"controller": "pxc-controller", "namespace": "users-10631", "name": "some-name", "reconcileID": "d15c91da-e65b-4301-91f6-10a217b9ac49", "user": "xtrabackup"} 2024-08-09T11:54:07.246Z INFO User password updated {"controller": "pxc-controller", "namespace": "users-10631", "name": "some-name", "reconcileID": "d15c91da-e65b-4301-91f6-10a217b9ac49", "user": "xtrabackup"} 2024-08-09T11:54:07.255Z INFO MySQL init secret updated {"controller": "pxc-controller", "namespace": "users-10631", "name": "some-name", "reconcileID": "d15c91da-e65b-4301-91f6-10a217b9ac49", "secret": "some-name-mysql-init", "user": "xtrabackup"} 2024-08-09T11:54:07.263Z INFO Internal secrets updated {"controller": "pxc-controller", "namespace": "users-10631", "name": "some-name", "reconcileID": "d15c91da-e65b-4301-91f6-10a217b9ac49", "user": "xtrabackup"} 2024-08-09T11:54:07.263Z INFO Password changed, updating user {"controller": "pxc-controller", "namespace": "users-10631", "name": "some-name", "reconcileID": "d15c91da-e65b-4301-91f6-10a217b9ac49", "user": "proxyadmin"} 2024-08-09T11:54:07.310Z INFO Proxy user updated {"controller": "pxc-controller", "namespace": "users-10631", "name": "some-name", "reconcileID": "d15c91da-e65b-4301-91f6-10a217b9ac49", "user": "proxyadmin"} 2024-08-09T11:54:07.322Z INFO Internal secrets updated {"controller": "pxc-controller", "namespace": "users-10631", "name": "some-name", "reconcileID": "d15c91da-e65b-4301-91f6-10a217b9ac49", "user": "proxyadmin"} 2024-08-09T11:54:07.322Z INFO Proxy pods will be restarted {"controller": "pxc-controller", "namespace": "users-10631", "name": "some-name", "reconcileID": "d15c91da-e65b-4301-91f6-10a217b9ac49", "last-applied-secret": "267096652b66d5d7951e4de38d403e65960c09dc69c41d043688923ecf802608"} 2024-08-09T11:54:07.323Z INFO PXC pods will be restarted {"controller": "pxc-controller", "namespace": "users-10631", "name": "some-name", "reconcileID": "d15c91da-e65b-4301-91f6-10a217b9ac49", "last-applied-secret": "267096652b66d5d7951e4de38d403e65960c09dc69c41d043688923ecf802608"} 2024-08-09T11:54:07.351Z DEBUG Updating object {"controller": "pxc-controller", "namespace": "users-10631", "name": "some-name", "reconcileID": "d15c91da-e65b-4301-91f6-10a217b9ac49", "object": "some-name-pxc"} 2024-08-09T11:54:07.382Z DEBUG Updating object {"controller": "pxc-controller", "namespace": "users-10631", "name": "some-name", "reconcileID": "d15c91da-e65b-4301-91f6-10a217b9ac49", "object": "some-name-proxysql"} 2024-08-09T11:54:07.501Z ERROR sync users {"controller": "pxc-controller", "namespace": "users-10631", "name": "some-name", "reconcileID": "d15c91da-e65b-4301-91f6-10a217b9ac49", "error": "exec syncusers: command terminated with exit code 1 / / ERROR 1045 (28000): ProxySQL Error: Access denied for user 'proxyadmin'@'127.0.0.1' (using password: YES)\nERROR (line:335) : ProxySQL connection check failed. \n-- Could not connect to ProxySQL at localhost:6032 \n-- Please check the ProxySQL connection parameters and status.\n", "errorVerbose": "exec syncusers: command terminated with exit code 1 / / ERROR 1045 (28000): ProxySQL Error: Access denied for user 'proxyadmin'@'127.0.0.1' (using password: YES)\nERROR (line:335) : ProxySQL connection check failed. \n-- Could not connect to ProxySQL at localhost:6032 \n-- Please check the ProxySQL connection parameters and status.\n\ngithub.com/percona/percona-xtradb-cluster-operator/pkg/controller/pxc.(*ReconcilePerconaXtraDBCluster).syncPXCUsersWithProxySQL\n\t/go/src/github.com/percona/percona-xtradb-cluster-operator/pkg/controller/pxc/users.go:922\ngithub.com/percona/percona-xtradb-cluster-operator/pkg/controller/pxc.(*ReconcilePerconaXtraDBCluster).resyncPXCUsersWithProxySQL.func1\n\t/go/src/github.com/percona/percona-xtradb-cluster-operator/pkg/controller/pxc/controller.go:1233\nruntime.goexit\n\t/usr/local/go/src/runtime/asm_amd64.s:1695"} 2024-08-09T11:54:23.728Z DEBUG Updating object {"controller": "pxc-controller", "namespace": "users-10631", "name": "some-name", "reconcileID": "a6b0b68a-6713-417d-bd14-973886a4231b", "object": "some-name-pxc"} 2024-08-09T11:54:23.794Z DEBUG Creating object {"controller": "pxc-controller", "namespace": "users-10631", "name": "some-name", "reconcileID": "a6b0b68a-6713-417d-bd14-973886a4231b", "object": "some-name-haproxy"} 2024-08-09T11:54:23.814Z DEBUG Updating object {"controller": "pxc-controller", "namespace": "users-10631", "name": "some-name", "reconcileID": "a6b0b68a-6713-417d-bd14-973886a4231b", "object": "some-name-haproxy"} 2024-08-09T11:54:23.899Z DEBUG Creating object {"controller": "pxc-controller", "namespace": "users-10631", "name": "some-name", "reconcileID": "a6b0b68a-6713-417d-bd14-973886a4231b", "object": "some-name-haproxy"} 2024-08-09T11:54:23.959Z DEBUG Creating object {"controller": "pxc-controller", "namespace": "users-10631", "name": "some-name", "reconcileID": "a6b0b68a-6713-417d-bd14-973886a4231b", "object": "some-name-haproxy-replicas"} 2024-08-09T11:54:24.096Z INFO KubeAPIWarningLogger .metadata.ownerReferences contains duplicate entries; API server dedups owner references in 1.20+, and may reject such requests as early as 1.24; please fix your requests; duplicate UID(s) observed: 74438e8a-1ffb-4dc7-9729-3f9627b83c9e 2024-08-09T11:56:22.180Z INFO reconcile replication error {"controller": "pxc-controller", "namespace": "users-10631", "name": "some-name", "reconcileID": "b27b2fd6-a315-46a5-8b49-5a781c302214", "err": "failed to connect to pod some-name-pxc-0: dial tcp: lookup some-name-pxc-0.some-name-pxc.users-10631 on 10.115.192.10:53: no such host"} 2024-08-09T11:56:27.124Z INFO reconcile replication error {"controller": "pxc-controller", "namespace": "users-10631", "name": "some-name", "reconcileID": "7c7ce0f8-4e22-4d29-9372-735a599b4552", "err": "failed to connect to pod some-name-pxc-0: dial tcp: lookup some-name-pxc-0.some-name-pxc.users-10631 on 10.115.192.10:53: no such host"} 2024-08-09T11:57:08.286Z INFO Password changed, updating user {"controller": "pxc-controller", "namespace": "users-10631", "name": "some-name", "reconcileID": "737e8fcf-208d-4a8e-90d9-34b87454aec7", "user": "root"} 2024-08-09T11:57:08.320Z INFO User password updated {"controller": "pxc-controller", "namespace": "users-10631", "name": "some-name", "reconcileID": "737e8fcf-208d-4a8e-90d9-34b87454aec7", "user": "root"} 2024-08-09T11:57:08.328Z INFO MySQL init secret updated {"controller": "pxc-controller", "namespace": "users-10631", "name": "some-name", "reconcileID": "737e8fcf-208d-4a8e-90d9-34b87454aec7", "secret": "some-name-mysql-init", "user": "root"} 2024-08-09T11:57:08.340Z INFO Internal secrets updated {"controller": "pxc-controller", "namespace": "users-10631", "name": "some-name", "reconcileID": "737e8fcf-208d-4a8e-90d9-34b87454aec7", "user": "root"} 2024-08-09T11:57:08.340Z INFO Password changed, updating user {"controller": "pxc-controller", "namespace": "users-10631", "name": "some-name", "reconcileID": "737e8fcf-208d-4a8e-90d9-34b87454aec7", "user": "operator"} 2024-08-09T11:57:08.364Z INFO User password updated {"controller": "pxc-controller", "namespace": "users-10631", "name": "some-name", "reconcileID": "737e8fcf-208d-4a8e-90d9-34b87454aec7", "user": "operator"} 2024-08-09T11:57:08.372Z INFO MySQL init secret updated {"controller": "pxc-controller", "namespace": "users-10631", "name": "some-name", "reconcileID": "737e8fcf-208d-4a8e-90d9-34b87454aec7", "secret": "some-name-mysql-init", "user": "operator"} 2024-08-09T11:57:08.380Z INFO Internal secrets updated {"controller": "pxc-controller", "namespace": "users-10631", "name": "some-name", "reconcileID": "737e8fcf-208d-4a8e-90d9-34b87454aec7", "user": "operator"} 2024-08-09T11:57:08.380Z INFO Password changed, updating user {"controller": "pxc-controller", "namespace": "users-10631", "name": "some-name", "reconcileID": "737e8fcf-208d-4a8e-90d9-34b87454aec7", "user": "monitor"} 2024-08-09T11:57:08.406Z INFO User password updated {"controller": "pxc-controller", "namespace": "users-10631", "name": "some-name", "reconcileID": "737e8fcf-208d-4a8e-90d9-34b87454aec7", "user": "monitor"} 2024-08-09T11:57:08.414Z INFO MySQL init secret updated {"controller": "pxc-controller", "namespace": "users-10631", "name": "some-name", "reconcileID": "737e8fcf-208d-4a8e-90d9-34b87454aec7", "secret": "some-name-mysql-init", "user": "monitor"} 2024-08-09T11:57:08.422Z INFO Internal secrets updated {"controller": "pxc-controller", "namespace": "users-10631", "name": "some-name", "reconcileID": "737e8fcf-208d-4a8e-90d9-34b87454aec7", "user": "monitor"} 2024-08-09T11:57:08.422Z INFO Password changed, updating user {"controller": "pxc-controller", "namespace": "users-10631", "name": "some-name", "reconcileID": "737e8fcf-208d-4a8e-90d9-34b87454aec7", "user": "xtrabackup"} 2024-08-09T11:57:08.444Z INFO User password updated {"controller": "pxc-controller", "namespace": "users-10631", "name": "some-name", "reconcileID": "737e8fcf-208d-4a8e-90d9-34b87454aec7", "user": "xtrabackup"} 2024-08-09T11:57:08.454Z INFO MySQL init secret updated {"controller": "pxc-controller", "namespace": "users-10631", "name": "some-name", "reconcileID": "737e8fcf-208d-4a8e-90d9-34b87454aec7", "secret": "some-name-mysql-init", "user": "xtrabackup"} 2024-08-09T11:57:08.462Z INFO Internal secrets updated {"controller": "pxc-controller", "namespace": "users-10631", "name": "some-name", "reconcileID": "737e8fcf-208d-4a8e-90d9-34b87454aec7", "user": "xtrabackup"} 2024-08-09T11:57:08.462Z INFO Password changed, updating user {"controller": "pxc-controller", "namespace": "users-10631", "name": "some-name", "reconcileID": "737e8fcf-208d-4a8e-90d9-34b87454aec7", "user": "replication"} 2024-08-09T11:57:08.485Z INFO User password updated {"controller": "pxc-controller", "namespace": "users-10631", "name": "some-name", "reconcileID": "737e8fcf-208d-4a8e-90d9-34b87454aec7", "user": "replication"} 2024-08-09T11:57:08.495Z INFO MySQL init secret updated {"controller": "pxc-controller", "namespace": "users-10631", "name": "some-name", "reconcileID": "737e8fcf-208d-4a8e-90d9-34b87454aec7", "secret": "some-name-mysql-init", "user": "replication"} 2024-08-09T11:57:08.505Z INFO HAProxy pods will be restarted {"controller": "pxc-controller", "namespace": "users-10631", "name": "some-name", "reconcileID": "737e8fcf-208d-4a8e-90d9-34b87454aec7", "last-applied-secret": "6542c6f8a2375ddfcda319bf91c99ca69519730971c577cd07366e356671ee68"} 2024-08-09T11:57:08.505Z INFO Internal secrets updated {"controller": "pxc-controller", "namespace": "users-10631", "name": "some-name", "reconcileID": "737e8fcf-208d-4a8e-90d9-34b87454aec7", "user": "replication"} 2024-08-09T11:57:08.505Z INFO PXC pods will be restarted {"controller": "pxc-controller", "namespace": "users-10631", "name": "some-name", "reconcileID": "737e8fcf-208d-4a8e-90d9-34b87454aec7", "last-applied-secret": "6542c6f8a2375ddfcda319bf91c99ca69519730971c577cd07366e356671ee68"} 2024-08-09T11:57:08.527Z DEBUG Updating object {"controller": "pxc-controller", "namespace": "users-10631", "name": "some-name", "reconcileID": "737e8fcf-208d-4a8e-90d9-34b87454aec7", "object": "some-name-pxc"} 2024-08-09T11:57:08.586Z DEBUG Updating object {"controller": "pxc-controller", "namespace": "users-10631", "name": "some-name", "reconcileID": "737e8fcf-208d-4a8e-90d9-34b87454aec7", "object": "some-name-haproxy"} 2024-08-09T11:59:47.725Z INFO Password changed, updating user {"controller": "pxc-controller", "namespace": "users-10631", "name": "some-name", "reconcileID": "4e54a9e4-f14e-411f-b992-48e46739915f", "user": "monitor"} 2024-08-09T11:59:47.755Z INFO User password updated {"controller": "pxc-controller", "namespace": "users-10631", "name": "some-name", "reconcileID": "4e54a9e4-f14e-411f-b992-48e46739915f", "user": "monitor"} 2024-08-09T11:59:47.767Z INFO MySQL init secret updated {"controller": "pxc-controller", "namespace": "users-10631", "name": "some-name", "reconcileID": "4e54a9e4-f14e-411f-b992-48e46739915f", "secret": "some-name-mysql-init", "user": "monitor"} 2024-08-09T11:59:47.775Z INFO HAProxy pods will be restarted {"controller": "pxc-controller", "namespace": "users-10631", "name": "some-name", "reconcileID": "4e54a9e4-f14e-411f-b992-48e46739915f", "last-applied-secret": "81935ecf30ae523a6ee69582354f9488d47dbeb0f8c7c572af96870f2024284d"} 2024-08-09T11:59:47.775Z INFO Internal secrets updated {"controller": "pxc-controller", "namespace": "users-10631", "name": "some-name", "reconcileID": "4e54a9e4-f14e-411f-b992-48e46739915f", "user": "monitor"} 2024-08-09T11:59:47.807Z DEBUG Updating object {"controller": "pxc-controller", "namespace": "users-10631", "name": "some-name", "reconcileID": "4e54a9e4-f14e-411f-b992-48e46739915f", "object": "some-name-haproxy"} github.com/percona/percona-xtradb-cluster-operator/pkg/controller/pxc.(*ReconcilePerconaXtraDBCluster).resyncPXCUsersWithProxySQL.func1 /go/src/github.com/percona/percona-xtradb-cluster-operator/pkg/controller/pxc/controller.go:1235 [mysql] 2024/08/09 11:59:01 connection.go:49: read tcp 10.42.249.99:45788->10.115.200.55:3306: i/o timeout + kubectl get pxc --all-namespaces -o wide + grep -v NAMESPACE + xargs -L 1 sh -xc 'kubectl patch pxc -n $0 $1 --type=merge -p "{\"metadata\":{\"finalizers\":[]}}"' + kubectl patch pxc -n users-10631 some-name --type=merge -p '{"metadata":{"finalizers":[]}}' perconaxtradbcluster.pxc.percona.com/some-name patched + kubectl_bin delete pxc --all --all-namespaces ++ mktemp + local LAST_OUT=/tmp/tmp.XDfjqQXzyi ++ mktemp + local LAST_ERR=/tmp/tmp.RNj0PCr1mM + local exit_status=0 ++ seq 0 2 + for i in '$(seq 0 2)' + set +e + kubectl delete pxc --all --all-namespaces + exit_status=0 + set -e + '[' 0 '!=' 0 ']' + break + cat /tmp/tmp.XDfjqQXzyi perconaxtradbcluster.pxc.percona.com "some-name" deleted + cat /tmp/tmp.RNj0PCr1mM + rm /tmp/tmp.XDfjqQXzyi /tmp/tmp.RNj0PCr1mM + return 0 + kubectl_bin delete pxc-backup --all --all-namespaces ++ mktemp + local LAST_OUT=/tmp/tmp.KnXvZIqOjN ++ mktemp + local LAST_ERR=/tmp/tmp.dV7zjAMjui + local exit_status=0 ++ seq 0 2 + for i in '$(seq 0 2)' + set +e + kubectl delete pxc-backup --all --all-namespaces + exit_status=0 + set -e + '[' 0 '!=' 0 ']' + break + cat /tmp/tmp.KnXvZIqOjN No resources found + cat /tmp/tmp.dV7zjAMjui + rm /tmp/tmp.KnXvZIqOjN /tmp/tmp.dV7zjAMjui + return 0 + kubectl_bin delete pxc-restore --all --all-namespaces ++ mktemp + local LAST_OUT=/tmp/tmp.HuMYZgHgbY ++ mktemp + local LAST_ERR=/tmp/tmp.GInY5vXgfk + local exit_status=0 ++ seq 0 2 + for i in '$(seq 0 2)' + set +e + kubectl delete pxc-restore --all --all-namespaces + exit_status=0 + set -e + '[' 0 '!=' 0 ']' + break + cat /tmp/tmp.HuMYZgHgbY No resources found + cat /tmp/tmp.GInY5vXgfk + rm /tmp/tmp.HuMYZgHgbY /tmp/tmp.GInY5vXgfk + return 0 + kubectl_bin delete ValidatingWebhookConfiguration percona-xtradbcluster-webhook ++ mktemp + local LAST_OUT=/tmp/tmp.br7UEmaph4 ++ mktemp + local LAST_ERR=/tmp/tmp.AqVcN2INXl + local exit_status=0 ++ seq 0 2 + for i in '$(seq 0 2)' + set +e + kubectl delete ValidatingWebhookConfiguration percona-xtradbcluster-webhook + exit_status=0 + set -e + '[' 0 '!=' 0 ']' + break + cat /tmp/tmp.br7UEmaph4 validatingwebhookconfiguration.admissionregistration.k8s.io "percona-xtradbcluster-webhook" deleted + cat /tmp/tmp.AqVcN2INXl + rm /tmp/tmp.br7UEmaph4 /tmp/tmp.AqVcN2INXl + return 0 + kubectl_bin delete -f https://github.com/jetstack/cert-manager/releases/download/v1.14.2/cert-manager.yaml + : + '[' '!' -z '' ']' + '[' -n pxc-operator ']' + kubectl_bin delete --grace-period=0 --force=true namespace users-10631 + rm -rf /tmp/tmp.RJRIosOSHb + kubectl_bin delete --grace-period=0 --force=true namespace pxc-operator ++ mktemp ++ mktemp + local LAST_OUT=/tmp/tmp.0LAjgdRZCd + desc 'test passed' + set +o xtrace ----------------------------------------------------------------------------------- test passed ----------------------------------------------------------------------------------- ++ mktemp + local LAST_OUT=/tmp/tmp.MTCgcrTDAM ++ mktemp + local LAST_ERR=/tmp/tmp.tH4QIxXAkZ + local exit_status=0 ++ seq 0 2 + local LAST_ERR=/tmp/tmp.x5WiKajN73 + local exit_status=0 ++ seq 0 2 + for i in '$(seq 0 2)' + set +e + kubectl delete --grace-period=0 --force=true namespace users-10631 + for i in '$(seq 0 2)' + set +e + kubectl delete --grace-period=0 --force=true namespace pxc-operator