Log: /mnt/jenkins/workspace/cloud-pxc-operator_PR-1774/e2e-tests/logs/users-8-0.log WARNING: version difference between client (1.30) and server (1.27) exceeds the supported minor version skew of +/-1 WARNING: version difference between client (1.30) and server (1.27) exceeds the supported minor version skew of +/-1 + create_infra users-9717 + local ns=users-9717 + '[' -n pxc-operator ']' + kubectl get pxc --all-namespaces -o wide + grep -v NAMESPACE + xargs -L 1 sh -xc 'kubectl patch pxc -n $0 $1 --type=merge -p "{\"metadata\":{\"finalizers\":[]}}"' + kubectl patch pxc -n users-19059 some-name --type=merge -p '{"metadata":{"finalizers":[]}}' perconaxtradbcluster.pxc.percona.com/some-name patched + kubectl_bin delete pxc --all --all-namespaces ++ mktemp + local LAST_OUT=/tmp/tmp.iNHaB8AhI7 ++ mktemp + local LAST_ERR=/tmp/tmp.mwMt6Pqifj + local exit_status=0 ++ seq 0 2 + for i in '$(seq 0 2)' + set +e + kubectl delete pxc --all --all-namespaces + exit_status=0 + set -e + '[' 0 '!=' 0 ']' + break + cat /tmp/tmp.iNHaB8AhI7 perconaxtradbcluster.pxc.percona.com "some-name" deleted + cat /tmp/tmp.mwMt6Pqifj + rm /tmp/tmp.iNHaB8AhI7 /tmp/tmp.mwMt6Pqifj + return 0 + kubectl_bin delete pxc-backup --all --all-namespaces ++ mktemp + local LAST_OUT=/tmp/tmp.ENuGsX24HC ++ mktemp + local LAST_ERR=/tmp/tmp.29LgvEyckf + local exit_status=0 ++ seq 0 2 + for i in '$(seq 0 2)' + set +e + kubectl delete pxc-backup --all --all-namespaces + exit_status=0 + set -e + '[' 0 '!=' 0 ']' + break + cat /tmp/tmp.ENuGsX24HC No resources found + cat /tmp/tmp.29LgvEyckf + rm /tmp/tmp.ENuGsX24HC /tmp/tmp.29LgvEyckf + return 0 + kubectl_bin delete pxc-restore --all --all-namespaces ++ mktemp + local LAST_OUT=/tmp/tmp.HAzgclXaGl ++ mktemp + local LAST_ERR=/tmp/tmp.hNDBaFusyj + local exit_status=0 ++ seq 0 2 + for i in '$(seq 0 2)' + set +e + kubectl delete pxc-restore --all --all-namespaces + exit_status=0 + set -e + '[' 0 '!=' 0 ']' + break + cat /tmp/tmp.HAzgclXaGl No resources found + cat /tmp/tmp.hNDBaFusyj + rm /tmp/tmp.HAzgclXaGl /tmp/tmp.hNDBaFusyj + return 0 + create_namespace pxc-operator + local namespace=pxc-operator + local skip_clean_namespace= + [[ 1 == 1 ]] + [[ -z '' ]] + destroy_chaos_mesh ++ helm list --all-namespaces --filter chaos-mesh ++ tail -n1 ++ awk '-F ' '{print $2}' ++ sed s/NAMESPACE// + local chaos_mesh_ns= + '[' -n '' ']' ++ grep chaos-mesh ++ kubectl get MutatingWebhookConfiguration ++ awk '{print $1}' + timeout 30 kubectl delete MutatingWebhookConfiguration error: resource(s) were provided, but no name was specified + : ++ kubectl get ValidatingWebhookConfiguration ++ grep chaos-mesh ++ awk '{print $1}' + timeout 30 kubectl delete ValidatingWebhookConfiguration error: resource(s) were provided, but no name was specified + : ++ kubectl get ValidatingWebhookConfiguration ++ grep validate-auth ++ awk '{print $1}' + timeout 30 kubectl delete ValidatingWebhookConfiguration error: resource(s) were provided, but no name was specified + : ++ kubectl api-resources ++ grep chaos-mesh ++ awk '{print $1}' ++ kubectl get crd ++ grep chaos-mesh.org ++ awk '{print $1}' + timeout 30 kubectl delete crd error: resource(s) were provided, but no name was specified + : ++ kubectl get clusterrolebinding ++ grep chaos-mesh ++ awk '{print $1}' + timeout 30 kubectl delete clusterrolebinding error: resource(s) were provided, but no name was specified + : ++ kubectl get clusterrole ++ grep chaos-mesh ++ awk '{print $1}' + timeout 30 kubectl delete clusterrole error: resource(s) were provided, but no name was specified + : + desc 'cleaned up all old namespaces' + set +o xtrace ----------------------------------------------------------------------------------- cleaned up all old namespaces ----------------------------------------------------------------------------------- + egrep -v '^kube-|^default$|Terminating|pxc-operator|openshift|^NAME' + awk '{print$1}' + '[' -n '' ']' + desc 'cleaned up old namespaces pxc-operator' + xargs kubectl delete ns + set +o xtrace ----------------------------------------------------------------------------------- cleaned up old namespaces pxc-operator ----------------------------------------------------------------------------------- + kubectl_bin delete namespace pxc-operator + kubectl_bin get ns ++ mktemp ++ mktemp + local LAST_OUT=/tmp/tmp.jYJm0Qt7hf ++ mktemp + local LAST_OUT=/tmp/tmp.LcgyayauWX ++ mktemp + local LAST_ERR=/tmp/tmp.AF8HKJJ8Ap + local exit_status=0 + local LAST_ERR=/tmp/tmp.iiBxi1s6TS + local exit_status=0 ++ seq 0 2 ++ seq 0 2 + for i in '$(seq 0 2)' + set +e + kubectl get ns + for i in '$(seq 0 2)' + set +e + kubectl delete namespace pxc-operator + exit_status=0 + set -e + '[' 0 '!=' 0 ']' + break + cat /tmp/tmp.jYJm0Qt7hf + cat /tmp/tmp.AF8HKJJ8Ap + rm /tmp/tmp.jYJm0Qt7hf /tmp/tmp.AF8HKJJ8Ap + return 0 namespace "gmp-public" deleted namespace "gmp-system" deleted namespace "users-19059" deleted Error from server (Forbidden): namespaces "default" is forbidden: this namespace may not be deleted + exit_status=0 + set -e + '[' 0 '!=' 0 ']' + break + cat /tmp/tmp.LcgyayauWX namespace "pxc-operator" deleted + cat /tmp/tmp.iiBxi1s6TS + rm /tmp/tmp.LcgyayauWX /tmp/tmp.iiBxi1s6TS + return 0 + wait_for_delete namespace/pxc-operator + local res=namespace/pxc-operator + echo -n 'namespace/pxc-operator - ' namespace/pxc-operator - + set +o xtrace Error from server (NotFound): namespaces "pxc-operator" not found + desc 'create namespace pxc-operator' + set +o xtrace ----------------------------------------------------------------------------------- create namespace pxc-operator ----------------------------------------------------------------------------------- + kubectl_bin create namespace pxc-operator ++ mktemp + local LAST_OUT=/tmp/tmp.b7F3IGDtkK ++ mktemp + local LAST_ERR=/tmp/tmp.dkTR3jmxG5 + local exit_status=0 ++ seq 0 2 + for i in '$(seq 0 2)' + set +e + kubectl create namespace pxc-operator + exit_status=0 + set -e + '[' 0 '!=' 0 ']' + break + cat /tmp/tmp.b7F3IGDtkK namespace/pxc-operator created + cat /tmp/tmp.dkTR3jmxG5 + rm /tmp/tmp.b7F3IGDtkK /tmp/tmp.dkTR3jmxG5 + return 0 ++ kubectl_bin config current-context +++ mktemp ++ local LAST_OUT=/tmp/tmp.ATTFazylqO +++ mktemp ++ local LAST_ERR=/tmp/tmp.92VmiKNvhv ++ local exit_status=0 +++ seq 0 2 ++ for i in '$(seq 0 2)' ++ set +e ++ kubectl config current-context ++ exit_status=0 ++ set -e ++ '[' 0 '!=' 0 ']' ++ break ++ cat /tmp/tmp.ATTFazylqO ++ cat /tmp/tmp.92VmiKNvhv ++ rm /tmp/tmp.ATTFazylqO /tmp/tmp.92VmiKNvhv ++ return 0 + kubectl_bin config set-context gke_cloud-dev-112233_us-central1-a_jen-pxc-1774-70b9684b-3-cluster1 --namespace=pxc-operator ++ mktemp + local LAST_OUT=/tmp/tmp.aQjfH0ZPm3 ++ mktemp + local LAST_ERR=/tmp/tmp.PGbgFQzhua + local exit_status=0 ++ seq 0 2 + for i in '$(seq 0 2)' + set +e + kubectl config set-context gke_cloud-dev-112233_us-central1-a_jen-pxc-1774-70b9684b-3-cluster1 --namespace=pxc-operator + exit_status=0 + set -e + '[' 0 '!=' 0 ']' + break + cat /tmp/tmp.aQjfH0ZPm3 Context "gke_cloud-dev-112233_us-central1-a_jen-pxc-1774-70b9684b-3-cluster1" modified. + cat /tmp/tmp.PGbgFQzhua + rm /tmp/tmp.aQjfH0ZPm3 /tmp/tmp.PGbgFQzhua + return 0 + deploy_operator + desc 'start PXC operator' + set +o xtrace ----------------------------------------------------------------------------------- start PXC operator ----------------------------------------------------------------------------------- + kubectl_bin apply --server-side --force-conflicts -f /mnt/jenkins/workspace/cloud-pxc-operator_PR-1774/deploy/crd.yaml ++ mktemp + local LAST_OUT=/tmp/tmp.K4ZAt0eOaa ++ mktemp + local LAST_ERR=/tmp/tmp.rSOXZwGEV2 + local exit_status=0 ++ seq 0 2 + for i in '$(seq 0 2)' + set +e + kubectl apply --server-side --force-conflicts -f /mnt/jenkins/workspace/cloud-pxc-operator_PR-1774/deploy/crd.yaml + exit_status=0 + set -e + '[' 0 '!=' 0 ']' + break + cat /tmp/tmp.K4ZAt0eOaa customresourcedefinition.apiextensions.k8s.io/perconaxtradbclusterbackups.pxc.percona.com serverside-applied customresourcedefinition.apiextensions.k8s.io/perconaxtradbclusterrestores.pxc.percona.com serverside-applied customresourcedefinition.apiextensions.k8s.io/perconaxtradbclusters.pxc.percona.com serverside-applied + cat /tmp/tmp.rSOXZwGEV2 + rm /tmp/tmp.K4ZAt0eOaa /tmp/tmp.rSOXZwGEV2 + return 0 + '[' -n pxc-operator ']' + apply_rbac cw-rbac + local operator_namespace=pxc-operator + local rbac=cw-rbac + cat /mnt/jenkins/workspace/cloud-pxc-operator_PR-1774/deploy/cw-rbac.yaml + sed -e 's^namespace: .*^namespace: pxc-operator^' + kubectl_bin apply -f - ++ mktemp + local LAST_OUT=/tmp/tmp.vWvKod2pa4 ++ mktemp + local LAST_ERR=/tmp/tmp.y7LaXWqXxT + local exit_status=0 ++ seq 0 2 + for i in '$(seq 0 2)' + set +e + kubectl apply -f - + exit_status=0 + set -e + '[' 0 '!=' 0 ']' + break + cat /tmp/tmp.vWvKod2pa4 clusterrole.rbac.authorization.k8s.io/percona-xtradb-cluster-operator unchanged serviceaccount/percona-xtradb-cluster-operator created clusterrolebinding.rbac.authorization.k8s.io/service-account-percona-xtradb-cluster-operator unchanged + cat /tmp/tmp.y7LaXWqXxT + rm /tmp/tmp.vWvKod2pa4 /tmp/tmp.y7LaXWqXxT + return 0 + sed -e 's^image: .*^image: perconalab/percona-xtradb-cluster-operator:PR-1774-70b9684b^' + sed -e 's^failureThreshold: .*^failureThreshold: 10^' + cat /mnt/jenkins/workspace/cloud-pxc-operator_PR-1774/deploy/cw-operator.yaml + yq eval '(select(.kind == "Deployment").spec.template.spec.containers[] | select(.name == "percona-xtradb-cluster-operator").env[] | select(.name == "DISABLE_TELEMETRY").value) = "true"' - + yq eval '(select(.kind == "Deployment").spec.template.spec.containers[] | select(.name == "percona-xtradb-cluster-operator").env[] | select(.name == "LOG_LEVEL").value) = "DEBUG"' - + kubectl_bin apply -f - ++ mktemp + local LAST_OUT=/tmp/tmp.SMzbaFkwJ0 ++ mktemp + local LAST_ERR=/tmp/tmp.j4IaOtjQHf + local exit_status=0 ++ seq 0 2 + for i in '$(seq 0 2)' + set +e + kubectl apply -f - + exit_status=0 + set -e + '[' 0 '!=' 0 ']' + break + cat /tmp/tmp.SMzbaFkwJ0 deployment.apps/percona-xtradb-cluster-operator created service/percona-xtradb-cluster-operator created + cat /tmp/tmp.j4IaOtjQHf + rm /tmp/tmp.SMzbaFkwJ0 /tmp/tmp.j4IaOtjQHf + return 0 + sleep 10 + kubectl_bin wait --for=condition=Ready pods -l app.kubernetes.io/component=operator,app.kubernetes.io/instance=percona-xtradb-cluster-operator,app.kubernetes.io/name=percona-xtradb-cluster-operator --timeout=30s ++ mktemp + local LAST_OUT=/tmp/tmp.rwr2xI3AS3 ++ mktemp + local LAST_ERR=/tmp/tmp.Yks3i6SFl7 + local exit_status=0 ++ seq 0 2 + for i in '$(seq 0 2)' + set +e + kubectl wait --for=condition=Ready pods -l app.kubernetes.io/component=operator,app.kubernetes.io/instance=percona-xtradb-cluster-operator,app.kubernetes.io/name=percona-xtradb-cluster-operator --timeout=30s + exit_status=0 + set -e + '[' 0 '!=' 0 ']' + break + cat /tmp/tmp.rwr2xI3AS3 pod/percona-xtradb-cluster-operator-56bc5d9fb9-cf2qp condition met + cat /tmp/tmp.Yks3i6SFl7 + rm /tmp/tmp.rwr2xI3AS3 /tmp/tmp.Yks3i6SFl7 + return 0 ++ get_operator_pod ++ local label_prefix=app.kubernetes.io/ +++ kubectl get pods --selector=app.kubernetes.io/name=percona-xtradb-cluster-operator -n pxc-operator +++ grep -c percona-xtradb-cluster-operator ++ local check_label=1 ++ [[ 1 -eq 0 ]] ++ kubectl_bin get pods --selector=app.kubernetes.io/name=percona-xtradb-cluster-operator -o 'jsonpath={.items[].metadata.name}' -n pxc-operator +++ mktemp ++ local LAST_OUT=/tmp/tmp.WnRtFCWWqd +++ mktemp ++ local LAST_ERR=/tmp/tmp.ZPWUI95hxb ++ local exit_status=0 +++ seq 0 2 ++ for i in '$(seq 0 2)' ++ set +e ++ kubectl get pods --selector=app.kubernetes.io/name=percona-xtradb-cluster-operator -o 'jsonpath={.items[].metadata.name}' -n pxc-operator ++ exit_status=0 ++ set -e ++ '[' 0 '!=' 0 ']' ++ break ++ cat /tmp/tmp.WnRtFCWWqd ++ cat /tmp/tmp.ZPWUI95hxb ++ rm /tmp/tmp.WnRtFCWWqd /tmp/tmp.ZPWUI95hxb ++ return 0 + wait_pod percona-xtradb-cluster-operator-56bc5d9fb9-cf2qp 480 pxc-operator + local pod=percona-xtradb-cluster-operator-56bc5d9fb9-cf2qp + local max_retry=480 + local ns=pxc-operator ++ echo percona-xtradb-cluster-operator-56bc5d9fb9-cf2qp ++ /usr/bin/sed -E 's/.*-(pxc|proxysql)-[0-9]/\1/' ++ egrep '^(pxc|proxysql)$' + local container= + set +o xtrace pod/percona-xtradb-cluster-operator-56bc5d9fb9-cf2qp condition met percona-xtradb-cluster-operator-56bc5d9fb9-cf2qp.Ok + sleep 3 + create_namespace users-9717 + local namespace=users-9717 + local skip_clean_namespace= + [[ 1 == 1 ]] + [[ -z '' ]] + destroy_chaos_mesh ++ helm list --all-namespaces --filter chaos-mesh ++ tail -n1 ++ awk '-F ' '{print $2}' ++ sed s/NAMESPACE// + local chaos_mesh_ns= + '[' -n '' ']' ++ kubectl get MutatingWebhookConfiguration ++ grep chaos-mesh ++ awk '{print $1}' + timeout 30 kubectl delete MutatingWebhookConfiguration error: resource(s) were provided, but no name was specified + : ++ kubectl get ValidatingWebhookConfiguration ++ grep chaos-mesh ++ awk '{print $1}' + timeout 30 kubectl delete ValidatingWebhookConfiguration error: resource(s) were provided, but no name was specified + : ++ kubectl get ValidatingWebhookConfiguration ++ grep validate-auth ++ awk '{print $1}' + timeout 30 kubectl delete ValidatingWebhookConfiguration error: resource(s) were provided, but no name was specified + : ++ kubectl api-resources ++ grep chaos-mesh ++ awk '{print $1}' ++ kubectl get crd ++ grep chaos-mesh.org ++ awk '{print $1}' + timeout 30 kubectl delete crd error: resource(s) were provided, but no name was specified + : ++ kubectl get clusterrolebinding ++ grep chaos-mesh ++ awk '{print $1}' + timeout 30 kubectl delete clusterrolebinding error: resource(s) were provided, but no name was specified + : ++ kubectl get clusterrole ++ grep chaos-mesh ++ awk '{print $1}' + timeout 30 kubectl delete clusterrole error: resource(s) were provided, but no name was specified + : + desc 'cleaned up all old namespaces' + set +o xtrace ----------------------------------------------------------------------------------- cleaned up all old namespaces ----------------------------------------------------------------------------------- + kubectl_bin get ns + awk '{print$1}' + '[' -n '' ']' + xargs kubectl delete ns + desc 'cleaned up old namespaces users-9717' + set +o xtrace ----------------------------------------------------------------------------------- cleaned up old namespaces users-9717 ----------------------------------------------------------------------------------- + kubectl_bin delete namespace users-9717 + egrep -v '^kube-|^default$|Terminating|pxc-operator|openshift|^NAME' ++ mktemp ++ mktemp + local LAST_OUT=/tmp/tmp.dy3kHVrJtK ++ mktemp + local LAST_ERR=/tmp/tmp.nCbk0CQSre + local exit_status=0 ++ seq 0 2 + for i in '$(seq 0 2)' + set +e + kubectl delete namespace users-9717 + local LAST_OUT=/tmp/tmp.xBREJAiJNK ++ mktemp + local LAST_ERR=/tmp/tmp.7LfM9spRJj + local exit_status=0 ++ seq 0 2 + for i in '$(seq 0 2)' + set +e + kubectl get ns + exit_status=1 + set -e + '[' 1 '!=' 0 ']' + '[' 1 == 1 ']' + sleep 0 + for i in '$(seq 0 2)' + set +e + kubectl delete namespace users-9717 + exit_status=0 + set -e + '[' 0 '!=' 0 ']' + break + cat /tmp/tmp.xBREJAiJNK + cat /tmp/tmp.7LfM9spRJj + rm /tmp/tmp.xBREJAiJNK /tmp/tmp.7LfM9spRJj + return 0 + exit_status=1 + set -e + '[' 1 '!=' 0 ']' + '[' 1 == 1 ']' + sleep 0 + for i in '$(seq 0 2)' + set +e + kubectl delete namespace users-9717 namespace "gmp-public" deleted namespace "gmp-system" deleted Error from server (Forbidden): namespaces "default" is forbidden: this namespace may not be deleted + exit_status=1 + set -e + '[' 1 '!=' 0 ']' + '[' 1 == 1 ']' + sleep 0 + cat /tmp/tmp.dy3kHVrJtK + cat /tmp/tmp.nCbk0CQSre Error from server (NotFound): namespaces "users-9717" not found + rm /tmp/tmp.dy3kHVrJtK /tmp/tmp.nCbk0CQSre + return 1 + : + wait_for_delete namespace/users-9717 + local res=namespace/users-9717 + echo -n 'namespace/users-9717 - ' namespace/users-9717 - + set +o xtrace Error from server (NotFound): namespaces "users-9717" not found + desc 'create namespace users-9717' + set +o xtrace ----------------------------------------------------------------------------------- create namespace users-9717 ----------------------------------------------------------------------------------- + kubectl_bin create namespace users-9717 ++ mktemp + local LAST_OUT=/tmp/tmp.a7T9gZaf2z ++ mktemp + local LAST_ERR=/tmp/tmp.CHuorOGVwc + local exit_status=0 ++ seq 0 2 + for i in '$(seq 0 2)' + set +e + kubectl create namespace users-9717 + exit_status=0 + set -e + '[' 0 '!=' 0 ']' + break + cat /tmp/tmp.a7T9gZaf2z namespace/users-9717 created + cat /tmp/tmp.CHuorOGVwc + rm /tmp/tmp.a7T9gZaf2z /tmp/tmp.CHuorOGVwc + return 0 ++ kubectl_bin config current-context +++ mktemp ++ local LAST_OUT=/tmp/tmp.D4DG1TCYo0 +++ mktemp ++ local LAST_ERR=/tmp/tmp.nn2x1ipUJi ++ local exit_status=0 +++ seq 0 2 ++ for i in '$(seq 0 2)' ++ set +e ++ kubectl config current-context ++ exit_status=0 ++ set -e ++ '[' 0 '!=' 0 ']' ++ break ++ cat /tmp/tmp.D4DG1TCYo0 ++ cat /tmp/tmp.nn2x1ipUJi ++ rm /tmp/tmp.D4DG1TCYo0 /tmp/tmp.nn2x1ipUJi ++ return 0 + kubectl_bin config set-context gke_cloud-dev-112233_us-central1-a_jen-pxc-1774-70b9684b-3-cluster1 --namespace=users-9717 ++ mktemp + local LAST_OUT=/tmp/tmp.O50ZGJDKuk ++ mktemp + local LAST_ERR=/tmp/tmp.H8Vxlmlpnj + local exit_status=0 ++ seq 0 2 + for i in '$(seq 0 2)' + set +e + kubectl config set-context gke_cloud-dev-112233_us-central1-a_jen-pxc-1774-70b9684b-3-cluster1 --namespace=users-9717 + exit_status=0 + set -e + '[' 0 '!=' 0 ']' + break + cat /tmp/tmp.O50ZGJDKuk Context "gke_cloud-dev-112233_us-central1-a_jen-pxc-1774-70b9684b-3-cluster1" modified. + cat /tmp/tmp.H8Vxlmlpnj + rm /tmp/tmp.O50ZGJDKuk /tmp/tmp.H8Vxlmlpnj + return 0 + apply_secrets + desc 'create secrets for cloud storages' + set +o xtrace ----------------------------------------------------------------------------------- create secrets for cloud storages ----------------------------------------------------------------------------------- + '[' -z '' ']' + kubectl_bin apply -f /mnt/jenkins/workspace/cloud-pxc-operator_PR-1774/e2e-tests/conf/minio-secret.yml -f /mnt/jenkins/workspace/cloud-pxc-operator_PR-1774/e2e-tests/conf/cloud-secret.yml ++ mktemp + local LAST_OUT=/tmp/tmp.0FMsTXPhay ++ mktemp + local LAST_ERR=/tmp/tmp.3W34bw2p3s + local exit_status=0 ++ seq 0 2 + for i in '$(seq 0 2)' + set +e + kubectl apply -f /mnt/jenkins/workspace/cloud-pxc-operator_PR-1774/e2e-tests/conf/minio-secret.yml -f /mnt/jenkins/workspace/cloud-pxc-operator_PR-1774/e2e-tests/conf/cloud-secret.yml + exit_status=0 + set -e + '[' 0 '!=' 0 ']' + break + cat /tmp/tmp.0FMsTXPhay secret/minio-secret created secret/aws-s3-secret created secret/gcp-cs-secret created secret/azure-secret created + cat /tmp/tmp.3W34bw2p3s + rm /tmp/tmp.0FMsTXPhay /tmp/tmp.3W34bw2p3s + return 0 + desc 'create PXC cluster' + set +o xtrace ----------------------------------------------------------------------------------- create PXC cluster ----------------------------------------------------------------------------------- + newpass=test-password ++ echo -n test-password ++ base64 + newpassencrypted=dGVzdC1wYXNzd29yZA== + cluster=some-name + spinup_pxc some-name /mnt/jenkins/workspace/cloud-pxc-operator_PR-1774/e2e-tests/conf/some-name.yml + local cluster=some-name + local config=/mnt/jenkins/workspace/cloud-pxc-operator_PR-1774/e2e-tests/conf/some-name.yml + local size=3 + local sleep=10 + local secretsFile=/mnt/jenkins/workspace/cloud-pxc-operator_PR-1774/e2e-tests/conf/secrets.yml + local pxcClientFile=/mnt/jenkins/workspace/cloud-pxc-operator_PR-1774/e2e-tests/conf/client.yml + local port=3306 + desc 'create first PXC cluster' + set +o xtrace ----------------------------------------------------------------------------------- create first PXC cluster ----------------------------------------------------------------------------------- + kubectl_bin apply -f /mnt/jenkins/workspace/cloud-pxc-operator_PR-1774/e2e-tests/conf/secrets.yml ++ mktemp + local LAST_OUT=/tmp/tmp.PsXfOGz3aT ++ mktemp + local LAST_ERR=/tmp/tmp.ykdm8goAFu + local exit_status=0 ++ seq 0 2 + for i in '$(seq 0 2)' + set +e + kubectl apply -f /mnt/jenkins/workspace/cloud-pxc-operator_PR-1774/e2e-tests/conf/secrets.yml + exit_status=0 + set -e + '[' 0 '!=' 0 ']' + break + cat /tmp/tmp.PsXfOGz3aT secret/my-cluster-secrets created secret/some-name-ssl created secret/some-name-ssl-internal created + cat /tmp/tmp.ykdm8goAFu + rm /tmp/tmp.PsXfOGz3aT /tmp/tmp.ykdm8goAFu + return 0 + apply_config /mnt/jenkins/workspace/cloud-pxc-operator_PR-1774/e2e-tests/conf/client.yml + '[' -z '' ']' + cat_config /mnt/jenkins/workspace/cloud-pxc-operator_PR-1774/e2e-tests/conf/client.yml + kubectl_bin apply -f - + cat /mnt/jenkins/workspace/cloud-pxc-operator_PR-1774/e2e-tests/conf/client.yml + /usr/bin/sed -e 's#apiVersion: pxc.percona.com/v.*$#apiVersion: pxc.percona.com/v1#' + /usr/bin/sed -e 's#image:.*-pxc\([0-9]*.[0-9]*\)\{0,1\}$#image: perconalab/percona-xtradb-cluster-operator:main-pxc8.0#' + /usr/bin/sed -e 's#image:.*-init$#image: perconalab/percona-xtradb-cluster-operator:PR-1774-70b9684b#' ++ mktemp + /usr/bin/sed -e 's#image:.*-proxysql$#image: perconalab/percona-xtradb-cluster-operator:main-proxysql#' + /usr/bin/sed -e 's#image:.*-haproxy$#image: perconalab/percona-xtradb-cluster-operator:main-haproxy#' + /usr/bin/sed -e 's#image:.*-pmm$#image: perconalab/pmm-client:dev-latest#' + /usr/bin/sed -e 's#image:.*-backup$#image: perconalab/percona-xtradb-cluster-operator:main-pxc8.0-backup#' + local LAST_OUT=/tmp/tmp.XPwfFDrCsc + /usr/bin/sed -e 's#image:.*-logcollector$#image: perconalab/percona-xtradb-cluster-operator:main-logcollector#' + /usr/bin/sed -e 's#image:.*\/percona-xtradb-cluster:.*$#image: perconalab/percona-xtradb-cluster-operator:main-pxc8.0#' ++ mktemp + /usr/bin/sed -e s~minio-service.#namespace~minio-service.users-9717~ + /usr/bin/sed -e 's#apply:.*#apply: Never#' + local LAST_ERR=/tmp/tmp.QXL47PMOiP + local exit_status=0 ++ seq 0 2 + for i in '$(seq 0 2)' + set +e + kubectl apply -f - + exit_status=0 + set -e + '[' 0 '!=' 0 ']' + break + cat /tmp/tmp.XPwfFDrCsc deployment.apps/pxc-client created + cat /tmp/tmp.QXL47PMOiP + rm /tmp/tmp.XPwfFDrCsc /tmp/tmp.QXL47PMOiP + return 0 + [[ perconalab/percona-xtradb-cluster-operator:main-pxc8.0 =~ 5\.7 ]] + apply_config /mnt/jenkins/workspace/cloud-pxc-operator_PR-1774/e2e-tests/conf/some-name.yml + '[' -z '' ']' + kubectl_bin apply -f - + cat_config /mnt/jenkins/workspace/cloud-pxc-operator_PR-1774/e2e-tests/conf/some-name.yml + cat /mnt/jenkins/workspace/cloud-pxc-operator_PR-1774/e2e-tests/conf/some-name.yml ++ mktemp + /usr/bin/sed -e 's#apiVersion: pxc.percona.com/v.*$#apiVersion: pxc.percona.com/v1#' + /usr/bin/sed -e 's#image:.*\/percona-xtradb-cluster:.*$#image: perconalab/percona-xtradb-cluster-operator:main-pxc8.0#' + local LAST_OUT=/tmp/tmp.LANEvc6eXo + /usr/bin/sed -e 's#image:.*-pxc\([0-9]*.[0-9]*\)\{0,1\}$#image: perconalab/percona-xtradb-cluster-operator:main-pxc8.0#' + /usr/bin/sed -e 's#image:.*-backup$#image: perconalab/percona-xtradb-cluster-operator:main-pxc8.0-backup#' + /usr/bin/sed -e 's#image:.*-init$#image: perconalab/percona-xtradb-cluster-operator:PR-1774-70b9684b#' ++ mktemp + /usr/bin/sed -e 's#image:.*-proxysql$#image: perconalab/percona-xtradb-cluster-operator:main-proxysql#' + /usr/bin/sed -e 's#image:.*-haproxy$#image: perconalab/percona-xtradb-cluster-operator:main-haproxy#' + /usr/bin/sed -e 's#apply:.*#apply: Never#' + local LAST_ERR=/tmp/tmp.6Zx4krumtZ + local exit_status=0 + /usr/bin/sed -e s~minio-service.#namespace~minio-service.users-9717~ + /usr/bin/sed -e 's#image:.*-logcollector$#image: perconalab/percona-xtradb-cluster-operator:main-logcollector#' ++ seq 0 2 + /usr/bin/sed -e 's#image:.*-pmm$#image: perconalab/pmm-client:dev-latest#' + for i in '$(seq 0 2)' + set +e + kubectl apply -f - + exit_status=0 + set -e + '[' 0 '!=' 0 ']' + break + cat /tmp/tmp.LANEvc6eXo perconaxtradbcluster.pxc.percona.com/some-name created + cat /tmp/tmp.6Zx4krumtZ + rm /tmp/tmp.LANEvc6eXo /tmp/tmp.6Zx4krumtZ + return 0 + desc 'check if all 3 Pods started' + set +o xtrace ----------------------------------------------------------------------------------- check if all 3 Pods started ----------------------------------------------------------------------------------- ++ get_proxy some-name ++ local target_cluster=some-name +++ kubectl_bin get pxc some-name -o 'jsonpath={.spec.haproxy.enabled}' ++++ mktemp +++ local LAST_OUT=/tmp/tmp.TtEXxMuiiA ++++ mktemp +++ local LAST_ERR=/tmp/tmp.JsHDcVfYZx +++ local exit_status=0 ++++ seq 0 2 +++ for i in '$(seq 0 2)' +++ set +e +++ kubectl get pxc some-name -o 'jsonpath={.spec.haproxy.enabled}' +++ exit_status=0 +++ set -e +++ '[' 0 '!=' 0 ']' +++ break +++ cat /tmp/tmp.TtEXxMuiiA +++ cat /tmp/tmp.JsHDcVfYZx +++ rm /tmp/tmp.TtEXxMuiiA /tmp/tmp.JsHDcVfYZx +++ return 0 ++ [[ '' == \t\r\u\e ]] +++ kubectl_bin get pxc some-name -o 'jsonpath={.spec.proxysql.enabled}' ++++ mktemp +++ local LAST_OUT=/tmp/tmp.vdhoEH2nKM ++++ mktemp +++ local LAST_ERR=/tmp/tmp.8xD2INvDgg +++ local exit_status=0 ++++ seq 0 2 +++ for i in '$(seq 0 2)' +++ set +e +++ kubectl get pxc some-name -o 'jsonpath={.spec.proxysql.enabled}' +++ exit_status=0 +++ set -e +++ '[' 0 '!=' 0 ']' +++ break +++ cat /tmp/tmp.vdhoEH2nKM +++ cat /tmp/tmp.8xD2INvDgg +++ rm /tmp/tmp.vdhoEH2nKM /tmp/tmp.8xD2INvDgg +++ return 0 ++ [[ true == \t\r\u\e ]] ++ echo some-name-proxysql ++ return + local proxy=some-name-proxysql + kubectl_bin wait --for=condition=Ready pod -l app.kubernetes.io/instance=monitoring,app.kubernetes.io/managed-by=percona-xtradb-cluster-operator --timeout=300s -n users-9717 ++ mktemp + local LAST_OUT=/tmp/tmp.SoYXSpqhCg ++ mktemp + local LAST_ERR=/tmp/tmp.J591GWUXMo + local exit_status=0 ++ seq 0 2 + for i in '$(seq 0 2)' + set +e + kubectl wait --for=condition=Ready pod -l app.kubernetes.io/instance=monitoring,app.kubernetes.io/managed-by=percona-xtradb-cluster-operator --timeout=300s -n users-9717 + exit_status=1 + set -e + '[' 1 '!=' 0 ']' + '[' 1 == 1 ']' + sleep 0 + for i in '$(seq 0 2)' + set +e + kubectl wait --for=condition=Ready pod -l app.kubernetes.io/instance=monitoring,app.kubernetes.io/managed-by=percona-xtradb-cluster-operator --timeout=300s -n users-9717 + exit_status=1 + set -e + '[' 1 '!=' 0 ']' + '[' 1 == 1 ']' + sleep 0 + for i in '$(seq 0 2)' + set +e + kubectl wait --for=condition=Ready pod -l app.kubernetes.io/instance=monitoring,app.kubernetes.io/managed-by=percona-xtradb-cluster-operator --timeout=300s -n users-9717 + exit_status=1 + set -e + '[' 1 '!=' 0 ']' + '[' 1 == 1 ']' + sleep 0 + cat /tmp/tmp.SoYXSpqhCg + cat /tmp/tmp.J591GWUXMo error: no matching resources found + rm /tmp/tmp.SoYXSpqhCg /tmp/tmp.J591GWUXMo + return 1 + true + wait_for_running some-name-proxysql 1 + local name=some-name-proxysql + let last_pod=0 + : + local max_retry=480 + desc 'wait for running cluster' + set +o xtrace ----------------------------------------------------------------------------------- wait for running cluster ----------------------------------------------------------------------------------- ++ seq 0 0 + for i in '$(seq 0 $last_pod)' + wait_pod some-name-proxysql-0 480 + local pod=some-name-proxysql-0 + local max_retry=480 + local ns= ++ echo some-name-proxysql-0 ++ /usr/bin/sed -E 's/.*-(pxc|proxysql)-[0-9]/\1/' ++ egrep '^(pxc|proxysql)$' + local container=proxysql + set +o xtrace pod/some-name-proxysql-0 condition met some-name-proxysql-0.Ok + wait_for_running some-name-pxc 3 + local name=some-name-pxc + let last_pod=2 + local max_retry=480 + desc 'wait for running cluster' + set +o xtrace ----------------------------------------------------------------------------------- wait for running cluster ----------------------------------------------------------------------------------- ++ seq 0 2 + for i in '$(seq 0 $last_pod)' + wait_pod some-name-pxc-0 480 + local pod=some-name-pxc-0 + local max_retry=480 + local ns= ++ echo some-name-pxc-0 ++ /usr/bin/sed -E 's/.*-(pxc|proxysql)-[0-9]/\1/' ++ egrep '^(pxc|proxysql)$' + local container=pxc + set +o xtrace pod/some-name-pxc-0 condition met some-name-pxc-0.Ok + for i in '$(seq 0 $last_pod)' + wait_pod some-name-pxc-1 480 + local pod=some-name-pxc-1 + local max_retry=480 + local ns= ++ echo some-name-pxc-1 ++ /usr/bin/sed -E 's/.*-(pxc|proxysql)-[0-9]/\1/' ++ egrep '^(pxc|proxysql)$' + local container=pxc + set +o xtrace pod/some-name-pxc-1 condition met some-name-pxc-1.Ok + for i in '$(seq 0 $last_pod)' + wait_pod some-name-pxc-2 480 + local pod=some-name-pxc-2 + local max_retry=480 + local ns= ++ echo some-name-pxc-2 ++ /usr/bin/sed -E 's/.*-(pxc|proxysql)-[0-9]/\1/' ++ egrep '^(pxc|proxysql)$' + local container=pxc + set +o xtrace pod/some-name-pxc-2 condition met some-name-pxc-2.Ok + sleep 10 + desc 'write data' + set +o xtrace ----------------------------------------------------------------------------------- write data ----------------------------------------------------------------------------------- + [[ perconalab/percona-xtradb-cluster-operator:main-pxc8.0 =~ 5\.7 ]] + run_mysql 'CREATE DATABASE IF NOT EXISTS myApp; use myApp; CREATE TABLE IF NOT EXISTS myApp (id int PRIMARY KEY) ;' '-h some-name-proxysql -uroot -proot_password -P3306' + local 'command=CREATE DATABASE IF NOT EXISTS myApp; use myApp; CREATE TABLE IF NOT EXISTS myApp (id int PRIMARY KEY) ;' + local 'uri=-h some-name-proxysql -uroot -proot_password -P3306' ++ get_client_pod ++ kubectl_bin get pods --selector=name=pxc-client -o 'jsonpath={.items[].metadata.name}' +++ mktemp ++ local LAST_OUT=/tmp/tmp.V3VVICEC2S +++ mktemp ++ local LAST_ERR=/tmp/tmp.yjZVa4dLpf ++ local exit_status=0 +++ seq 0 2 ++ for i in '$(seq 0 2)' ++ set +e ++ kubectl get pods --selector=name=pxc-client -o 'jsonpath={.items[].metadata.name}' ++ exit_status=0 ++ set -e ++ '[' 0 '!=' 0 ']' ++ break ++ cat /tmp/tmp.V3VVICEC2S ++ cat /tmp/tmp.yjZVa4dLpf ++ rm /tmp/tmp.V3VVICEC2S /tmp/tmp.yjZVa4dLpf ++ return 0 + client_pod=pxc-client-65c795cbdf-g466m + wait_pod pxc-client-65c795cbdf-g466m + local pod=pxc-client-65c795cbdf-g466m + local max_retry=480 + local ns= ++ echo pxc-client-65c795cbdf-g466m ++ /usr/bin/sed -E 's/.*-(pxc|proxysql)-[0-9]/\1/' ++ egrep '^(pxc|proxysql)$' + local container= + set +o xtrace pod/pxc-client-65c795cbdf-g466m condition met pxc-client-65c795cbdf-g466m.Ok + set +o xtrace + run_mysql 'INSERT myApp.myApp (id) VALUES (100500)' '-h some-name-proxysql -uroot -proot_password -P3306' + local 'command=INSERT myApp.myApp (id) VALUES (100500)' + local 'uri=-h some-name-proxysql -uroot -proot_password -P3306' ++ get_client_pod ++ kubectl_bin get pods --selector=name=pxc-client -o 'jsonpath={.items[].metadata.name}' +++ mktemp ++ local LAST_OUT=/tmp/tmp.adKYZrN7Dv +++ mktemp ++ local LAST_ERR=/tmp/tmp.8AUvHtptRK ++ local exit_status=0 +++ seq 0 2 ++ for i in '$(seq 0 2)' ++ set +e ++ kubectl get pods --selector=name=pxc-client -o 'jsonpath={.items[].metadata.name}' ++ exit_status=0 ++ set -e ++ '[' 0 '!=' 0 ']' ++ break ++ cat /tmp/tmp.adKYZrN7Dv ++ cat /tmp/tmp.8AUvHtptRK ++ rm /tmp/tmp.adKYZrN7Dv /tmp/tmp.8AUvHtptRK ++ return 0 + client_pod=pxc-client-65c795cbdf-g466m + wait_pod pxc-client-65c795cbdf-g466m + local pod=pxc-client-65c795cbdf-g466m + local max_retry=480 + local ns= ++ echo pxc-client-65c795cbdf-g466m ++ /usr/bin/sed -E 's/.*-(pxc|proxysql)-[0-9]/\1/' ++ egrep '^(pxc|proxysql)$' + local container= + set +o xtrace pod/pxc-client-65c795cbdf-g466m condition met pxc-client-65c795cbdf-g466m.Ok + set +o xtrace + sleep 30 ++ seq 0 2 + for i in '$(seq 0 $((size - 1)))' + compare_mysql_cmd select-1 'SELECT * from myApp.myApp;' '-h some-name-pxc-0.some-name-pxc -uroot -proot_password -P3306' + local command_id=select-1 + local 'command=SELECT * from myApp.myApp;' + local 'uri=-h some-name-pxc-0.some-name-pxc -uroot -proot_password -P3306' + local postfix= + local expected_result=/mnt/jenkins/workspace/cloud-pxc-operator_PR-1774/e2e-tests/users/compare/select-1.sql + [[ perconalab/percona-xtradb-cluster-operator:main-pxc8.0 =~ 8\.0 ]] + '[' -f /mnt/jenkins/workspace/cloud-pxc-operator_PR-1774/e2e-tests/users/compare/select-1-80.sql ']' + run_mysql 'SELECT * from myApp.myApp;' '-h some-name-pxc-0.some-name-pxc -uroot -proot_password -P3306' + local 'command=SELECT * from myApp.myApp;' + local 'uri=-h some-name-pxc-0.some-name-pxc -uroot -proot_password -P3306' ++ get_client_pod ++ kubectl_bin get pods --selector=name=pxc-client -o 'jsonpath={.items[].metadata.name}' +++ mktemp ++ local LAST_OUT=/tmp/tmp.oTeQwgHsnd +++ mktemp ++ local LAST_ERR=/tmp/tmp.e44910dlat ++ local exit_status=0 +++ seq 0 2 ++ for i in '$(seq 0 2)' ++ set +e ++ kubectl get pods --selector=name=pxc-client -o 'jsonpath={.items[].metadata.name}' ++ exit_status=0 ++ set -e ++ '[' 0 '!=' 0 ']' ++ break ++ cat /tmp/tmp.oTeQwgHsnd ++ cat /tmp/tmp.e44910dlat ++ rm /tmp/tmp.oTeQwgHsnd /tmp/tmp.e44910dlat ++ return 0 + client_pod=pxc-client-65c795cbdf-g466m + wait_pod pxc-client-65c795cbdf-g466m + local pod=pxc-client-65c795cbdf-g466m + local max_retry=480 + local ns= ++ echo pxc-client-65c795cbdf-g466m ++ /usr/bin/sed -E 's/.*-(pxc|proxysql)-[0-9]/\1/' ++ egrep '^(pxc|proxysql)$' + local container= + set +o xtrace pod/pxc-client-65c795cbdf-g466m condition met pxc-client-65c795cbdf-g466m.Ok + set +o xtrace + '[' '!' -s /tmp/tmp.jmGBhW8nFf/select-1.sql ']' + diff -u /mnt/jenkins/workspace/cloud-pxc-operator_PR-1774/e2e-tests/users/compare/select-1.sql /tmp/tmp.jmGBhW8nFf/select-1.sql + for i in '$(seq 0 $((size - 1)))' + compare_mysql_cmd select-1 'SELECT * from myApp.myApp;' '-h some-name-pxc-1.some-name-pxc -uroot -proot_password -P3306' + local command_id=select-1 + local 'command=SELECT * from myApp.myApp;' + local 'uri=-h some-name-pxc-1.some-name-pxc -uroot -proot_password -P3306' + local postfix= + local expected_result=/mnt/jenkins/workspace/cloud-pxc-operator_PR-1774/e2e-tests/users/compare/select-1.sql + [[ perconalab/percona-xtradb-cluster-operator:main-pxc8.0 =~ 8\.0 ]] + '[' -f /mnt/jenkins/workspace/cloud-pxc-operator_PR-1774/e2e-tests/users/compare/select-1-80.sql ']' + run_mysql 'SELECT * from myApp.myApp;' '-h some-name-pxc-1.some-name-pxc -uroot -proot_password -P3306' + local 'command=SELECT * from myApp.myApp;' + local 'uri=-h some-name-pxc-1.some-name-pxc -uroot -proot_password -P3306' ++ get_client_pod ++ kubectl_bin get pods --selector=name=pxc-client -o 'jsonpath={.items[].metadata.name}' +++ mktemp ++ local LAST_OUT=/tmp/tmp.xLHytkB1xE +++ mktemp ++ local LAST_ERR=/tmp/tmp.MH5fugvTPk ++ local exit_status=0 +++ seq 0 2 ++ for i in '$(seq 0 2)' ++ set +e ++ kubectl get pods --selector=name=pxc-client -o 'jsonpath={.items[].metadata.name}' ++ exit_status=0 ++ set -e ++ '[' 0 '!=' 0 ']' ++ break ++ cat /tmp/tmp.xLHytkB1xE ++ cat /tmp/tmp.MH5fugvTPk ++ rm /tmp/tmp.xLHytkB1xE /tmp/tmp.MH5fugvTPk ++ return 0 + client_pod=pxc-client-65c795cbdf-g466m + wait_pod pxc-client-65c795cbdf-g466m + local pod=pxc-client-65c795cbdf-g466m + local max_retry=480 + local ns= ++ echo pxc-client-65c795cbdf-g466m ++ /usr/bin/sed -E 's/.*-(pxc|proxysql)-[0-9]/\1/' ++ egrep '^(pxc|proxysql)$' + local container= + set +o xtrace pod/pxc-client-65c795cbdf-g466m condition met pxc-client-65c795cbdf-g466m.Ok + set +o xtrace + '[' '!' -s /tmp/tmp.jmGBhW8nFf/select-1.sql ']' + diff -u /mnt/jenkins/workspace/cloud-pxc-operator_PR-1774/e2e-tests/users/compare/select-1.sql /tmp/tmp.jmGBhW8nFf/select-1.sql + for i in '$(seq 0 $((size - 1)))' + compare_mysql_cmd select-1 'SELECT * from myApp.myApp;' '-h some-name-pxc-2.some-name-pxc -uroot -proot_password -P3306' + local command_id=select-1 + local 'command=SELECT * from myApp.myApp;' + local 'uri=-h some-name-pxc-2.some-name-pxc -uroot -proot_password -P3306' + local postfix= + local expected_result=/mnt/jenkins/workspace/cloud-pxc-operator_PR-1774/e2e-tests/users/compare/select-1.sql + [[ perconalab/percona-xtradb-cluster-operator:main-pxc8.0 =~ 8\.0 ]] + '[' -f /mnt/jenkins/workspace/cloud-pxc-operator_PR-1774/e2e-tests/users/compare/select-1-80.sql ']' + run_mysql 'SELECT * from myApp.myApp;' '-h some-name-pxc-2.some-name-pxc -uroot -proot_password -P3306' + local 'command=SELECT * from myApp.myApp;' + local 'uri=-h some-name-pxc-2.some-name-pxc -uroot -proot_password -P3306' ++ get_client_pod ++ kubectl_bin get pods --selector=name=pxc-client -o 'jsonpath={.items[].metadata.name}' +++ mktemp ++ local LAST_OUT=/tmp/tmp.usxJfwTKon +++ mktemp ++ local LAST_ERR=/tmp/tmp.nMIKuBGVKt ++ local exit_status=0 +++ seq 0 2 ++ for i in '$(seq 0 2)' ++ set +e ++ kubectl get pods --selector=name=pxc-client -o 'jsonpath={.items[].metadata.name}' ++ exit_status=0 ++ set -e ++ '[' 0 '!=' 0 ']' ++ break ++ cat /tmp/tmp.usxJfwTKon ++ cat /tmp/tmp.nMIKuBGVKt ++ rm /tmp/tmp.usxJfwTKon /tmp/tmp.nMIKuBGVKt ++ return 0 + client_pod=pxc-client-65c795cbdf-g466m + wait_pod pxc-client-65c795cbdf-g466m + local pod=pxc-client-65c795cbdf-g466m + local max_retry=480 + local ns= ++ echo pxc-client-65c795cbdf-g466m ++ /usr/bin/sed -E 's/.*-(pxc|proxysql)-[0-9]/\1/' ++ egrep '^(pxc|proxysql)$' + local container= + set +o xtrace pod/pxc-client-65c795cbdf-g466m condition met pxc-client-65c795cbdf-g466m.Ok + set +o xtrace + '[' '!' -s /tmp/tmp.jmGBhW8nFf/select-1.sql ']' + diff -u /mnt/jenkins/workspace/cloud-pxc-operator_PR-1774/e2e-tests/users/compare/select-1.sql /tmp/tmp.jmGBhW8nFf/select-1.sql ++ is_keyring_plugin_in_use some-name ++ local cluster=some-name ++ kubectl_bin exec -it some-name-pxc-0 -c pxc -- bash -c 'cat /etc/mysql/node.cnf' ++ egrep -o 'early-plugin-load=keyring_\w+.so' +++ mktemp ++ local LAST_OUT=/tmp/tmp.aDxCPEBGtR +++ mktemp ++ local LAST_ERR=/tmp/tmp.8EPUAVFqAW ++ local exit_status=0 +++ seq 0 2 ++ for i in '$(seq 0 2)' ++ set +e ++ kubectl exec -it some-name-pxc-0 -c pxc -- bash -c 'cat /etc/mysql/node.cnf' ++ exit_status=0 ++ set -e ++ '[' 0 '!=' 0 ']' ++ break ++ cat /tmp/tmp.aDxCPEBGtR ++ cat /tmp/tmp.8EPUAVFqAW Unable to use a TTY - input is not a terminal or the right kind of file ++ rm /tmp/tmp.aDxCPEBGtR /tmp/tmp.8EPUAVFqAW ++ return 0 + '[' '' ']' + desc 'test root' + set +o xtrace ----------------------------------------------------------------------------------- test root ----------------------------------------------------------------------------------- + patch_secret my-cluster-secrets root dGVzdC1wYXNzd29yZA== + local secret=my-cluster-secrets + local key=root + local value=dGVzdC1wYXNzd29yZA== + kubectl_bin patch secret my-cluster-secrets '-p={"data":{"root": "dGVzdC1wYXNzd29yZA=="}}' ++ mktemp + local LAST_OUT=/tmp/tmp.xLTvynF8Hc ++ mktemp + local LAST_ERR=/tmp/tmp.AY7HZ1b8gm + local exit_status=0 ++ seq 0 2 + for i in '$(seq 0 2)' + set +e + kubectl patch secret my-cluster-secrets '-p={"data":{"root": "dGVzdC1wYXNzd29yZA=="}}' + exit_status=0 + set -e + '[' 0 '!=' 0 ']' + break + cat /tmp/tmp.xLTvynF8Hc secret/my-cluster-secrets patched + cat /tmp/tmp.AY7HZ1b8gm + rm /tmp/tmp.xLTvynF8Hc /tmp/tmp.AY7HZ1b8gm + return 0 + sleep 15 + compare_mysql_cmd select-4 'SHOW TABLES;' '-h some-name-proxysql -uroot -p'\''test-password'\''' + local command_id=select-4 + local 'command=SHOW TABLES;' + local 'uri=-h some-name-proxysql -uroot -p'\''test-password'\''' + local postfix= + local expected_result=/mnt/jenkins/workspace/cloud-pxc-operator_PR-1774/e2e-tests/users/compare/select-4.sql + [[ perconalab/percona-xtradb-cluster-operator:main-pxc8.0 =~ 8\.0 ]] + '[' -f /mnt/jenkins/workspace/cloud-pxc-operator_PR-1774/e2e-tests/users/compare/select-4-80.sql ']' + expected_result=/mnt/jenkins/workspace/cloud-pxc-operator_PR-1774/e2e-tests/users/compare/select-4-80.sql + run_mysql 'SHOW TABLES;' '-h some-name-proxysql -uroot -p'\''test-password'\''' + local 'command=SHOW TABLES;' + local 'uri=-h some-name-proxysql -uroot -p'\''test-password'\''' ++ get_client_pod ++ kubectl_bin get pods --selector=name=pxc-client -o 'jsonpath={.items[].metadata.name}' +++ mktemp ++ local LAST_OUT=/tmp/tmp.0pX2Y9bgAH +++ mktemp ++ local LAST_ERR=/tmp/tmp.1D6A02InQG ++ local exit_status=0 +++ seq 0 2 ++ for i in '$(seq 0 2)' ++ set +e ++ kubectl get pods --selector=name=pxc-client -o 'jsonpath={.items[].metadata.name}' ++ exit_status=0 ++ set -e ++ '[' 0 '!=' 0 ']' ++ break ++ cat /tmp/tmp.0pX2Y9bgAH ++ cat /tmp/tmp.1D6A02InQG ++ rm /tmp/tmp.0pX2Y9bgAH /tmp/tmp.1D6A02InQG ++ return 0 + client_pod=pxc-client-65c795cbdf-g466m + wait_pod pxc-client-65c795cbdf-g466m + local pod=pxc-client-65c795cbdf-g466m + local max_retry=480 + local ns= ++ echo pxc-client-65c795cbdf-g466m ++ /usr/bin/sed -E 's/.*-(pxc|proxysql)-[0-9]/\1/' ++ egrep '^(pxc|proxysql)$' + local container= + set +o xtrace pod/pxc-client-65c795cbdf-g466m condition met pxc-client-65c795cbdf-g466m.Ok + set +o xtrace + '[' '!' -s /tmp/tmp.jmGBhW8nFf/select-4.sql ']' + diff -u /mnt/jenkins/workspace/cloud-pxc-operator_PR-1774/e2e-tests/users/compare/select-4-80.sql /tmp/tmp.jmGBhW8nFf/select-4.sql + desc 'test proxyadmin' + set +o xtrace ----------------------------------------------------------------------------------- test proxyadmin ----------------------------------------------------------------------------------- + kubectl_bin patch pxc some-name --type=merge '-p={"spec":{"proxysql":{"size":3}}}' ++ mktemp + local LAST_OUT=/tmp/tmp.UfpR6mFr7H ++ mktemp + local LAST_ERR=/tmp/tmp.HMPTQf7C3S + local exit_status=0 ++ seq 0 2 + for i in '$(seq 0 2)' + set +e + kubectl patch pxc some-name --type=merge '-p={"spec":{"proxysql":{"size":3}}}' + exit_status=0 + set -e + '[' 0 '!=' 0 ']' + break + cat /tmp/tmp.UfpR6mFr7H perconaxtradbcluster.pxc.percona.com/some-name patched + cat /tmp/tmp.HMPTQf7C3S + rm /tmp/tmp.UfpR6mFr7H /tmp/tmp.HMPTQf7C3S + return 0 + sleep 15 + wait_cluster_consistency some-name 3 3 + local cluster_name=some-name + local cluster_size=3 + local proxy_size=3 + '[' -z 3 ']' + desc 'wait cluster consistency' + set +o xtrace ----------------------------------------------------------------------------------- wait cluster consistency ----------------------------------------------------------------------------------- + local i=0 + local max=36 + sleep 7 ++ kubectl_bin get pxc some-name -o 'jsonpath={.status.state}' +++ mktemp ++ local LAST_OUT=/tmp/tmp.2r2zDKXd9n +++ mktemp ++ local LAST_ERR=/tmp/tmp.kpiSgrAVLe ++ local exit_status=0 +++ seq 0 2 ++ for i in '$(seq 0 2)' ++ set +e ++ kubectl get pxc some-name -o 'jsonpath={.status.state}' ++ exit_status=0 ++ set -e ++ '[' 0 '!=' 0 ']' ++ break ++ cat /tmp/tmp.2r2zDKXd9n ++ cat /tmp/tmp.kpiSgrAVLe ++ rm /tmp/tmp.2r2zDKXd9n /tmp/tmp.kpiSgrAVLe ++ return 0 + [[ ready == \r\e\a\d\y ]] ++ kubectl_bin get pxc some-name -o 'jsonpath={.status.pxc.ready}' +++ mktemp ++ local LAST_OUT=/tmp/tmp.ZhufrgAk1m +++ mktemp ++ local LAST_ERR=/tmp/tmp.fGnZOYuqTn ++ local exit_status=0 +++ seq 0 2 ++ for i in '$(seq 0 2)' ++ set +e ++ kubectl get pxc some-name -o 'jsonpath={.status.pxc.ready}' ++ exit_status=0 ++ set -e ++ '[' 0 '!=' 0 ']' ++ break ++ cat /tmp/tmp.ZhufrgAk1m ++ cat /tmp/tmp.fGnZOYuqTn ++ rm /tmp/tmp.ZhufrgAk1m /tmp/tmp.fGnZOYuqTn ++ return 0 + [[ 3 == \3 ]] +++ get_proxy_engine some-name +++ local cluster_name=some-name ++++ get_proxy some-name ++++ local target_cluster=some-name +++++ kubectl_bin get pxc some-name -o 'jsonpath={.spec.haproxy.enabled}' ++++++ mktemp +++++ local LAST_OUT=/tmp/tmp.Fju3f8qXJo ++++++ mktemp +++++ local LAST_ERR=/tmp/tmp.zM374Lkv51 +++++ local exit_status=0 ++++++ seq 0 2 +++++ for i in '$(seq 0 2)' +++++ set +e +++++ kubectl get pxc some-name -o 'jsonpath={.spec.haproxy.enabled}' +++++ exit_status=0 +++++ set -e +++++ '[' 0 '!=' 0 ']' +++++ break +++++ cat /tmp/tmp.Fju3f8qXJo +++++ cat /tmp/tmp.zM374Lkv51 +++++ rm /tmp/tmp.Fju3f8qXJo /tmp/tmp.zM374Lkv51 +++++ return 0 ++++ [[ '' == \t\r\u\e ]] +++++ kubectl_bin get pxc some-name -o 'jsonpath={.spec.proxysql.enabled}' ++++++ mktemp +++++ local LAST_OUT=/tmp/tmp.INAuh8vmOc ++++++ mktemp +++++ local LAST_ERR=/tmp/tmp.uS8HaAONeb +++++ local exit_status=0 ++++++ seq 0 2 +++++ for i in '$(seq 0 2)' +++++ set +e +++++ kubectl get pxc some-name -o 'jsonpath={.spec.proxysql.enabled}' +++++ exit_status=0 +++++ set -e +++++ '[' 0 '!=' 0 ']' +++++ break +++++ cat /tmp/tmp.INAuh8vmOc +++++ cat /tmp/tmp.uS8HaAONeb +++++ rm /tmp/tmp.INAuh8vmOc /tmp/tmp.uS8HaAONeb +++++ return 0 ++++ [[ true == \t\r\u\e ]] ++++ echo some-name-proxysql ++++ return +++ local cluster_proxy=some-name-proxysql +++ echo proxysql ++ kubectl_bin get pxc some-name -o 'jsonpath={.status.proxysql.ready}' +++ mktemp ++ local LAST_OUT=/tmp/tmp.pkbmsUUF6S +++ mktemp ++ local LAST_ERR=/tmp/tmp.kR6PMcwfkZ ++ local exit_status=0 +++ seq 0 2 ++ for i in '$(seq 0 2)' ++ set +e ++ kubectl get pxc some-name -o 'jsonpath={.status.proxysql.ready}' ++ exit_status=0 ++ set -e ++ '[' 0 '!=' 0 ']' ++ break ++ cat /tmp/tmp.pkbmsUUF6S ++ cat /tmp/tmp.kR6PMcwfkZ ++ rm /tmp/tmp.pkbmsUUF6S /tmp/tmp.kR6PMcwfkZ ++ return 0 + [[ 3 == \3 ]] + patch_secret my-cluster-secrets proxyadmin dGVzdC1wYXNzd29yZA== + local secret=my-cluster-secrets + local key=proxyadmin + local value=dGVzdC1wYXNzd29yZA== + kubectl_bin patch secret my-cluster-secrets '-p={"data":{"proxyadmin": "dGVzdC1wYXNzd29yZA=="}}' ++ mktemp + local LAST_OUT=/tmp/tmp.YNSCa3729f ++ mktemp + local LAST_ERR=/tmp/tmp.50hH8nahPn + local exit_status=0 ++ seq 0 2 + for i in '$(seq 0 2)' + set +e + kubectl patch secret my-cluster-secrets '-p={"data":{"proxyadmin": "dGVzdC1wYXNzd29yZA=="}}' + exit_status=0 + set -e + '[' 0 '!=' 0 ']' + break + cat /tmp/tmp.YNSCa3729f secret/my-cluster-secrets patched + cat /tmp/tmp.50hH8nahPn + rm /tmp/tmp.YNSCa3729f /tmp/tmp.50hH8nahPn + return 0 + sleep 15 + wait_cluster_consistency some-name 3 3 + local cluster_name=some-name + local cluster_size=3 + local proxy_size=3 + '[' -z 3 ']' + desc 'wait cluster consistency' + set +o xtrace ----------------------------------------------------------------------------------- wait cluster consistency ----------------------------------------------------------------------------------- + local i=0 + local max=36 + sleep 7 ++ kubectl_bin get pxc some-name -o 'jsonpath={.status.state}' +++ mktemp ++ local LAST_OUT=/tmp/tmp.gYaKcrqnve +++ mktemp ++ local LAST_ERR=/tmp/tmp.tfOMBl3bvF ++ local exit_status=0 +++ seq 0 2 ++ for i in '$(seq 0 2)' ++ set +e ++ kubectl get pxc some-name -o 'jsonpath={.status.state}' ++ exit_status=0 ++ set -e ++ '[' 0 '!=' 0 ']' ++ break ++ cat /tmp/tmp.gYaKcrqnve ++ cat /tmp/tmp.tfOMBl3bvF ++ rm /tmp/tmp.gYaKcrqnve /tmp/tmp.tfOMBl3bvF ++ return 0 + [[ initializing == \r\e\a\d\y ]] + echo 'waiting for cluster readyness' waiting for cluster readyness + sleep 20 + [[ 0 -ge 36 ]] + let i+=1 ++ kubectl_bin get pxc some-name -o 'jsonpath={.status.state}' +++ mktemp ++ local LAST_OUT=/tmp/tmp.XSZqbpzMkI +++ mktemp ++ local LAST_ERR=/tmp/tmp.0oTEcCCSy7 ++ local exit_status=0 +++ seq 0 2 ++ for i in '$(seq 0 2)' ++ set +e ++ kubectl get pxc some-name -o 'jsonpath={.status.state}' ++ exit_status=0 ++ set -e ++ '[' 0 '!=' 0 ']' ++ break ++ cat /tmp/tmp.XSZqbpzMkI ++ cat /tmp/tmp.0oTEcCCSy7 ++ rm /tmp/tmp.XSZqbpzMkI /tmp/tmp.0oTEcCCSy7 ++ return 0 + [[ initializing == \r\e\a\d\y ]] + echo 'waiting for cluster readyness' waiting for cluster readyness + sleep 20 + [[ 1 -ge 36 ]] + let i+=1 ++ kubectl_bin get pxc some-name -o 'jsonpath={.status.state}' +++ mktemp ++ local LAST_OUT=/tmp/tmp.dd6xXCUc6l +++ mktemp ++ local LAST_ERR=/tmp/tmp.ZXA34LWxej ++ local exit_status=0 +++ seq 0 2 ++ for i in '$(seq 0 2)' ++ set +e ++ kubectl get pxc some-name -o 'jsonpath={.status.state}' ++ exit_status=0 ++ set -e ++ '[' 0 '!=' 0 ']' ++ break ++ cat /tmp/tmp.dd6xXCUc6l ++ cat /tmp/tmp.ZXA34LWxej ++ rm /tmp/tmp.dd6xXCUc6l /tmp/tmp.ZXA34LWxej ++ return 0 + [[ ready == \r\e\a\d\y ]] ++ kubectl_bin get pxc some-name -o 'jsonpath={.status.pxc.ready}' +++ mktemp ++ local LAST_OUT=/tmp/tmp.C1T5eKbjGC +++ mktemp ++ local LAST_ERR=/tmp/tmp.RonXU9scnd ++ local exit_status=0 +++ seq 0 2 ++ for i in '$(seq 0 2)' ++ set +e ++ kubectl get pxc some-name -o 'jsonpath={.status.pxc.ready}' ++ exit_status=0 ++ set -e ++ '[' 0 '!=' 0 ']' ++ break ++ cat /tmp/tmp.C1T5eKbjGC ++ cat /tmp/tmp.RonXU9scnd ++ rm /tmp/tmp.C1T5eKbjGC /tmp/tmp.RonXU9scnd ++ return 0 + [[ 3 == \3 ]] +++ get_proxy_engine some-name +++ local cluster_name=some-name ++++ get_proxy some-name ++++ local target_cluster=some-name +++++ kubectl_bin get pxc some-name -o 'jsonpath={.spec.haproxy.enabled}' ++++++ mktemp +++++ local LAST_OUT=/tmp/tmp.TK7WqHo4pQ ++++++ mktemp +++++ local LAST_ERR=/tmp/tmp.DGgHlIakTX +++++ local exit_status=0 ++++++ seq 0 2 +++++ for i in '$(seq 0 2)' +++++ set +e +++++ kubectl get pxc some-name -o 'jsonpath={.spec.haproxy.enabled}' +++++ exit_status=0 +++++ set -e +++++ '[' 0 '!=' 0 ']' +++++ break +++++ cat /tmp/tmp.TK7WqHo4pQ +++++ cat /tmp/tmp.DGgHlIakTX +++++ rm /tmp/tmp.TK7WqHo4pQ /tmp/tmp.DGgHlIakTX +++++ return 0 ++++ [[ '' == \t\r\u\e ]] +++++ kubectl_bin get pxc some-name -o 'jsonpath={.spec.proxysql.enabled}' ++++++ mktemp +++++ local LAST_OUT=/tmp/tmp.Qc2exb0g7h ++++++ mktemp +++++ local LAST_ERR=/tmp/tmp.H1YWklfO2C +++++ local exit_status=0 ++++++ seq 0 2 +++++ for i in '$(seq 0 2)' +++++ set +e +++++ kubectl get pxc some-name -o 'jsonpath={.spec.proxysql.enabled}' +++++ exit_status=0 +++++ set -e +++++ '[' 0 '!=' 0 ']' +++++ break +++++ cat /tmp/tmp.Qc2exb0g7h +++++ cat /tmp/tmp.H1YWklfO2C +++++ rm /tmp/tmp.Qc2exb0g7h /tmp/tmp.H1YWklfO2C +++++ return 0 ++++ [[ true == \t\r\u\e ]] ++++ echo some-name-proxysql ++++ return +++ local cluster_proxy=some-name-proxysql +++ echo proxysql ++ kubectl_bin get pxc some-name -o 'jsonpath={.status.proxysql.ready}' +++ mktemp ++ local LAST_OUT=/tmp/tmp.nx0vIJYV00 +++ mktemp ++ local LAST_ERR=/tmp/tmp.wm0JpQmIEV ++ local exit_status=0 +++ seq 0 2 ++ for i in '$(seq 0 2)' ++ set +e ++ kubectl get pxc some-name -o 'jsonpath={.status.proxysql.ready}' ++ exit_status=0 ++ set -e ++ '[' 0 '!=' 0 ']' ++ break ++ cat /tmp/tmp.nx0vIJYV00 ++ cat /tmp/tmp.wm0JpQmIEV ++ rm /tmp/tmp.nx0vIJYV00 /tmp/tmp.wm0JpQmIEV ++ return 0 + [[ 3 == \3 ]] + compare_mysql_cmd_local select-2 'SHOW TABLES;' '-h127.0.0.1 -P6032 -uproxyadmin -p'\''test-password'\''' some-name-proxysql-0 '' proxysql + local command_id=select-2 + local 'command=SHOW TABLES;' + local 'uri=-h127.0.0.1 -P6032 -uproxyadmin -p'\''test-password'\''' + local pod=some-name-proxysql-0 + local postfix= + local container_name=proxysql + local expected_result=/mnt/jenkins/workspace/cloud-pxc-operator_PR-1774/e2e-tests/users/compare/select-2.sql + [[ perconalab/percona-xtradb-cluster-operator:main-pxc8.0 =~ 8\.0 ]] + '[' -f /mnt/jenkins/workspace/cloud-pxc-operator_PR-1774/e2e-tests/users/compare/select-2-80.sql ']' + run_mysql_local 'SHOW TABLES;' '-h127.0.0.1 -P6032 -uproxyadmin -p'\''test-password'\''' some-name-proxysql-0 proxysql + local 'command=SHOW TABLES;' + local 'uri=-h127.0.0.1 -P6032 -uproxyadmin -p'\''test-password'\''' + local pod=some-name-proxysql-0 + local container_name=proxysql + set +o xtrace + '[' '!' -s /tmp/tmp.jmGBhW8nFf/select-2.sql ']' + diff -u /mnt/jenkins/workspace/cloud-pxc-operator_PR-1774/e2e-tests/users/compare/select-2.sql /tmp/tmp.jmGBhW8nFf/select-2.sql + compare_mysql_cmd_local select-2 'SHOW TABLES;' '-h127.0.0.1 -P6032 -uproxyadmin -p'\''test-password'\''' some-name-proxysql-1 '' proxysql + local command_id=select-2 + local 'command=SHOW TABLES;' + local 'uri=-h127.0.0.1 -P6032 -uproxyadmin -p'\''test-password'\''' + local pod=some-name-proxysql-1 + local postfix= + local container_name=proxysql + local expected_result=/mnt/jenkins/workspace/cloud-pxc-operator_PR-1774/e2e-tests/users/compare/select-2.sql + [[ perconalab/percona-xtradb-cluster-operator:main-pxc8.0 =~ 8\.0 ]] + '[' -f /mnt/jenkins/workspace/cloud-pxc-operator_PR-1774/e2e-tests/users/compare/select-2-80.sql ']' + run_mysql_local 'SHOW TABLES;' '-h127.0.0.1 -P6032 -uproxyadmin -p'\''test-password'\''' some-name-proxysql-1 proxysql + local 'command=SHOW TABLES;' + local 'uri=-h127.0.0.1 -P6032 -uproxyadmin -p'\''test-password'\''' + local pod=some-name-proxysql-1 + local container_name=proxysql + set +o xtrace + '[' '!' -s /tmp/tmp.jmGBhW8nFf/select-2.sql ']' + diff -u /mnt/jenkins/workspace/cloud-pxc-operator_PR-1774/e2e-tests/users/compare/select-2.sql /tmp/tmp.jmGBhW8nFf/select-2.sql + compare_mysql_cmd_local select-2 'SHOW TABLES;' '-h127.0.0.1 -P6032 -uproxyadmin -p'\''test-password'\''' some-name-proxysql-2 '' proxysql + local command_id=select-2 + local 'command=SHOW TABLES;' + local 'uri=-h127.0.0.1 -P6032 -uproxyadmin -p'\''test-password'\''' + local pod=some-name-proxysql-2 + local postfix= + local container_name=proxysql + local expected_result=/mnt/jenkins/workspace/cloud-pxc-operator_PR-1774/e2e-tests/users/compare/select-2.sql + [[ perconalab/percona-xtradb-cluster-operator:main-pxc8.0 =~ 8\.0 ]] + '[' -f /mnt/jenkins/workspace/cloud-pxc-operator_PR-1774/e2e-tests/users/compare/select-2-80.sql ']' + run_mysql_local 'SHOW TABLES;' '-h127.0.0.1 -P6032 -uproxyadmin -p'\''test-password'\''' some-name-proxysql-2 proxysql + local 'command=SHOW TABLES;' + local 'uri=-h127.0.0.1 -P6032 -uproxyadmin -p'\''test-password'\''' + local pod=some-name-proxysql-2 + local container_name=proxysql + set +o xtrace + '[' '!' -s /tmp/tmp.jmGBhW8nFf/select-2.sql ']' + diff -u /mnt/jenkins/workspace/cloud-pxc-operator_PR-1774/e2e-tests/users/compare/select-2.sql /tmp/tmp.jmGBhW8nFf/select-2.sql + desc 'test xtrabackup' + set +o xtrace ----------------------------------------------------------------------------------- test xtrabackup ----------------------------------------------------------------------------------- + kubectl_bin patch pxc some-name --type=merge '-p={"spec":{"proxysql":{"size":2}}}' ++ mktemp + local LAST_OUT=/tmp/tmp.Y7gU5bv5BZ ++ mktemp + local LAST_ERR=/tmp/tmp.F15NprntHj + local exit_status=0 ++ seq 0 2 + for i in '$(seq 0 2)' + set +e + kubectl patch pxc some-name --type=merge '-p={"spec":{"proxysql":{"size":2}}}' + exit_status=0 + set -e + '[' 0 '!=' 0 ']' + break + cat /tmp/tmp.Y7gU5bv5BZ perconaxtradbcluster.pxc.percona.com/some-name patched + cat /tmp/tmp.F15NprntHj + rm /tmp/tmp.Y7gU5bv5BZ /tmp/tmp.F15NprntHj + return 0 + patch_secret my-cluster-secrets xtrabackup dGVzdC1wYXNzd29yZA== + local secret=my-cluster-secrets + local key=xtrabackup + local value=dGVzdC1wYXNzd29yZA== + kubectl_bin patch secret my-cluster-secrets '-p={"data":{"xtrabackup": "dGVzdC1wYXNzd29yZA=="}}' ++ mktemp + local LAST_OUT=/tmp/tmp.oumSJYOuYM ++ mktemp + local LAST_ERR=/tmp/tmp.JaGR8aTFib + local exit_status=0 ++ seq 0 2 + for i in '$(seq 0 2)' + set +e + kubectl patch secret my-cluster-secrets '-p={"data":{"xtrabackup": "dGVzdC1wYXNzd29yZA=="}}' + exit_status=0 + set -e + '[' 0 '!=' 0 ']' + break + cat /tmp/tmp.oumSJYOuYM secret/my-cluster-secrets patched + cat /tmp/tmp.JaGR8aTFib + rm /tmp/tmp.oumSJYOuYM /tmp/tmp.JaGR8aTFib + return 0 + sleep 15 + wait_cluster_consistency some-name 3 2 + local cluster_name=some-name + local cluster_size=3 + local proxy_size=2 + '[' -z 2 ']' + desc 'wait cluster consistency' + set +o xtrace ----------------------------------------------------------------------------------- wait cluster consistency ----------------------------------------------------------------------------------- + local i=0 + local max=36 + sleep 7 ++ kubectl_bin get pxc some-name -o 'jsonpath={.status.state}' +++ mktemp ++ local LAST_OUT=/tmp/tmp.36tHYPGKn7 +++ mktemp ++ local LAST_ERR=/tmp/tmp.pETrkmwczf ++ local exit_status=0 +++ seq 0 2 ++ for i in '$(seq 0 2)' ++ set +e ++ kubectl get pxc some-name -o 'jsonpath={.status.state}' ++ exit_status=0 ++ set -e ++ '[' 0 '!=' 0 ']' ++ break ++ cat /tmp/tmp.36tHYPGKn7 ++ cat /tmp/tmp.pETrkmwczf ++ rm /tmp/tmp.36tHYPGKn7 /tmp/tmp.pETrkmwczf ++ return 0 + [[ initializing == \r\e\a\d\y ]] + echo 'waiting for cluster readyness' waiting for cluster readyness + sleep 20 + [[ 0 -ge 36 ]] + let i+=1 ++ kubectl_bin get pxc some-name -o 'jsonpath={.status.state}' +++ mktemp ++ local LAST_OUT=/tmp/tmp.AIbQPQAYzO +++ mktemp ++ local LAST_ERR=/tmp/tmp.8YVuDaokdC ++ local exit_status=0 +++ seq 0 2 ++ for i in '$(seq 0 2)' ++ set +e ++ kubectl get pxc some-name -o 'jsonpath={.status.state}' ++ exit_status=0 ++ set -e ++ '[' 0 '!=' 0 ']' ++ break ++ cat /tmp/tmp.AIbQPQAYzO ++ cat /tmp/tmp.8YVuDaokdC ++ rm /tmp/tmp.AIbQPQAYzO /tmp/tmp.8YVuDaokdC ++ return 0 + [[ initializing == \r\e\a\d\y ]] + echo 'waiting for cluster readyness' waiting for cluster readyness + sleep 20 + [[ 1 -ge 36 ]] + let i+=1 ++ kubectl_bin get pxc some-name -o 'jsonpath={.status.state}' +++ mktemp ++ local LAST_OUT=/tmp/tmp.r7HpQQwQOR +++ mktemp ++ local LAST_ERR=/tmp/tmp.LNdo93f0Bs ++ local exit_status=0 +++ seq 0 2 ++ for i in '$(seq 0 2)' ++ set +e ++ kubectl get pxc some-name -o 'jsonpath={.status.state}' ++ exit_status=0 ++ set -e ++ '[' 0 '!=' 0 ']' ++ break ++ cat /tmp/tmp.r7HpQQwQOR ++ cat /tmp/tmp.LNdo93f0Bs ++ rm /tmp/tmp.r7HpQQwQOR /tmp/tmp.LNdo93f0Bs ++ return 0 + [[ initializing == \r\e\a\d\y ]] + echo 'waiting for cluster readyness' waiting for cluster readyness + sleep 20 + [[ 2 -ge 36 ]] + let i+=1 ++ kubectl_bin get pxc some-name -o 'jsonpath={.status.state}' +++ mktemp ++ local LAST_OUT=/tmp/tmp.uVP0TWUaMI +++ mktemp ++ local LAST_ERR=/tmp/tmp.JvejIFo74C ++ local exit_status=0 +++ seq 0 2 ++ for i in '$(seq 0 2)' ++ set +e ++ kubectl get pxc some-name -o 'jsonpath={.status.state}' ++ exit_status=0 ++ set -e ++ '[' 0 '!=' 0 ']' ++ break ++ cat /tmp/tmp.uVP0TWUaMI ++ cat /tmp/tmp.JvejIFo74C ++ rm /tmp/tmp.uVP0TWUaMI /tmp/tmp.JvejIFo74C ++ return 0 + [[ initializing == \r\e\a\d\y ]] + echo 'waiting for cluster readyness' waiting for cluster readyness + sleep 20 + [[ 3 -ge 36 ]] + let i+=1 ++ kubectl_bin get pxc some-name -o 'jsonpath={.status.state}' +++ mktemp ++ local LAST_OUT=/tmp/tmp.DC1NDNCzGI +++ mktemp ++ local LAST_ERR=/tmp/tmp.i1ZSTaE6HT ++ local exit_status=0 +++ seq 0 2 ++ for i in '$(seq 0 2)' ++ set +e ++ kubectl get pxc some-name -o 'jsonpath={.status.state}' ++ exit_status=0 ++ set -e ++ '[' 0 '!=' 0 ']' ++ break ++ cat /tmp/tmp.DC1NDNCzGI ++ cat /tmp/tmp.i1ZSTaE6HT ++ rm /tmp/tmp.DC1NDNCzGI /tmp/tmp.i1ZSTaE6HT ++ return 0 + [[ initializing == \r\e\a\d\y ]] + echo 'waiting for cluster readyness' waiting for cluster readyness + sleep 20 + [[ 4 -ge 36 ]] + let i+=1 ++ kubectl_bin get pxc some-name -o 'jsonpath={.status.state}' +++ mktemp ++ local LAST_OUT=/tmp/tmp.xulpqrJU94 +++ mktemp ++ local LAST_ERR=/tmp/tmp.iZHBZTShva ++ local exit_status=0 +++ seq 0 2 ++ for i in '$(seq 0 2)' ++ set +e ++ kubectl get pxc some-name -o 'jsonpath={.status.state}' ++ exit_status=0 ++ set -e ++ '[' 0 '!=' 0 ']' ++ break ++ cat /tmp/tmp.xulpqrJU94 ++ cat /tmp/tmp.iZHBZTShva ++ rm /tmp/tmp.xulpqrJU94 /tmp/tmp.iZHBZTShva ++ return 0 + [[ initializing == \r\e\a\d\y ]] + echo 'waiting for cluster readyness' waiting for cluster readyness + sleep 20 + [[ 5 -ge 36 ]] + let i+=1 ++ kubectl_bin get pxc some-name -o 'jsonpath={.status.state}' +++ mktemp ++ local LAST_OUT=/tmp/tmp.L7M0Vydy0v +++ mktemp ++ local LAST_ERR=/tmp/tmp.2pU4S1A13P ++ local exit_status=0 +++ seq 0 2 ++ for i in '$(seq 0 2)' ++ set +e ++ kubectl get pxc some-name -o 'jsonpath={.status.state}' ++ exit_status=0 ++ set -e ++ '[' 0 '!=' 0 ']' ++ break ++ cat /tmp/tmp.L7M0Vydy0v ++ cat /tmp/tmp.2pU4S1A13P ++ rm /tmp/tmp.L7M0Vydy0v /tmp/tmp.2pU4S1A13P ++ return 0 + [[ initializing == \r\e\a\d\y ]] + echo 'waiting for cluster readyness' waiting for cluster readyness + sleep 20 + [[ 6 -ge 36 ]] + let i+=1 ++ kubectl_bin get pxc some-name -o 'jsonpath={.status.state}' +++ mktemp ++ local LAST_OUT=/tmp/tmp.vbgS3cNGeJ +++ mktemp ++ local LAST_ERR=/tmp/tmp.Ws0u5q30JZ ++ local exit_status=0 +++ seq 0 2 ++ for i in '$(seq 0 2)' ++ set +e ++ kubectl get pxc some-name -o 'jsonpath={.status.state}' ++ exit_status=0 ++ set -e ++ '[' 0 '!=' 0 ']' ++ break ++ cat /tmp/tmp.vbgS3cNGeJ ++ cat /tmp/tmp.Ws0u5q30JZ ++ rm /tmp/tmp.vbgS3cNGeJ /tmp/tmp.Ws0u5q30JZ ++ return 0 + [[ ready == \r\e\a\d\y ]] ++ kubectl_bin get pxc some-name -o 'jsonpath={.status.pxc.ready}' +++ mktemp ++ local LAST_OUT=/tmp/tmp.f2FDGF7Qnp +++ mktemp ++ local LAST_ERR=/tmp/tmp.26wiSrtz6x ++ local exit_status=0 +++ seq 0 2 ++ for i in '$(seq 0 2)' ++ set +e ++ kubectl get pxc some-name -o 'jsonpath={.status.pxc.ready}' ++ exit_status=0 ++ set -e ++ '[' 0 '!=' 0 ']' ++ break ++ cat /tmp/tmp.f2FDGF7Qnp ++ cat /tmp/tmp.26wiSrtz6x ++ rm /tmp/tmp.f2FDGF7Qnp /tmp/tmp.26wiSrtz6x ++ return 0 + [[ 3 == \3 ]] +++ get_proxy_engine some-name +++ local cluster_name=some-name ++++ get_proxy some-name ++++ local target_cluster=some-name +++++ kubectl_bin get pxc some-name -o 'jsonpath={.spec.haproxy.enabled}' ++++++ mktemp +++++ local LAST_OUT=/tmp/tmp.wFBxsqeojM ++++++ mktemp +++++ local LAST_ERR=/tmp/tmp.mg5L1IAUhb +++++ local exit_status=0 ++++++ seq 0 2 +++++ for i in '$(seq 0 2)' +++++ set +e +++++ kubectl get pxc some-name -o 'jsonpath={.spec.haproxy.enabled}' +++++ exit_status=0 +++++ set -e +++++ '[' 0 '!=' 0 ']' +++++ break +++++ cat /tmp/tmp.wFBxsqeojM +++++ cat /tmp/tmp.mg5L1IAUhb +++++ rm /tmp/tmp.wFBxsqeojM /tmp/tmp.mg5L1IAUhb +++++ return 0 ++++ [[ '' == \t\r\u\e ]] +++++ kubectl_bin get pxc some-name -o 'jsonpath={.spec.proxysql.enabled}' ++++++ mktemp +++++ local LAST_OUT=/tmp/tmp.kcVzXodeV4 ++++++ mktemp +++++ local LAST_ERR=/tmp/tmp.iuKMwrJwfd +++++ local exit_status=0 ++++++ seq 0 2 +++++ for i in '$(seq 0 2)' +++++ set +e +++++ kubectl get pxc some-name -o 'jsonpath={.spec.proxysql.enabled}' +++++ exit_status=0 +++++ set -e +++++ '[' 0 '!=' 0 ']' +++++ break +++++ cat /tmp/tmp.kcVzXodeV4 +++++ cat /tmp/tmp.iuKMwrJwfd +++++ rm /tmp/tmp.kcVzXodeV4 /tmp/tmp.iuKMwrJwfd +++++ return 0 ++++ [[ true == \t\r\u\e ]] ++++ echo some-name-proxysql ++++ return +++ local cluster_proxy=some-name-proxysql +++ echo proxysql ++ kubectl_bin get pxc some-name -o 'jsonpath={.status.proxysql.ready}' +++ mktemp ++ local LAST_OUT=/tmp/tmp.HtpJ1LVkFo +++ mktemp ++ local LAST_ERR=/tmp/tmp.9pMX0TUfwv ++ local exit_status=0 +++ seq 0 2 ++ for i in '$(seq 0 2)' ++ set +e ++ kubectl get pxc some-name -o 'jsonpath={.status.proxysql.ready}' ++ exit_status=0 ++ set -e ++ '[' 0 '!=' 0 ']' ++ break ++ cat /tmp/tmp.HtpJ1LVkFo ++ cat /tmp/tmp.9pMX0TUfwv ++ rm /tmp/tmp.HtpJ1LVkFo /tmp/tmp.9pMX0TUfwv ++ return 0 + [[ 2 == \2 ]] + compare_mysql_cmd_local select-3 'SHOW DATABASES;' '-h 127.0.0.1 -uxtrabackup -p'\''test-password'\''' some-name-pxc-0 '' pxc + local command_id=select-3 + local 'command=SHOW DATABASES;' + local 'uri=-h 127.0.0.1 -uxtrabackup -p'\''test-password'\''' + local pod=some-name-pxc-0 + local postfix= + local container_name=pxc + local expected_result=/mnt/jenkins/workspace/cloud-pxc-operator_PR-1774/e2e-tests/users/compare/select-3.sql + [[ perconalab/percona-xtradb-cluster-operator:main-pxc8.0 =~ 8\.0 ]] + '[' -f /mnt/jenkins/workspace/cloud-pxc-operator_PR-1774/e2e-tests/users/compare/select-3-80.sql ']' + run_mysql_local 'SHOW DATABASES;' '-h 127.0.0.1 -uxtrabackup -p'\''test-password'\''' some-name-pxc-0 pxc + local 'command=SHOW DATABASES;' + local 'uri=-h 127.0.0.1 -uxtrabackup -p'\''test-password'\''' + local pod=some-name-pxc-0 + local container_name=pxc + set +o xtrace + '[' '!' -s /tmp/tmp.jmGBhW8nFf/select-3.sql ']' + diff -u /mnt/jenkins/workspace/cloud-pxc-operator_PR-1774/e2e-tests/users/compare/select-3.sql /tmp/tmp.jmGBhW8nFf/select-3.sql + desc 'test monitor' + set +o xtrace ----------------------------------------------------------------------------------- test monitor ----------------------------------------------------------------------------------- + patch_secret my-cluster-secrets monitor dGVzdC1wYXNzd29yZA== + local secret=my-cluster-secrets + local key=monitor + local value=dGVzdC1wYXNzd29yZA== + kubectl_bin patch secret my-cluster-secrets '-p={"data":{"monitor": "dGVzdC1wYXNzd29yZA=="}}' ++ mktemp + local LAST_OUT=/tmp/tmp.Dyi4GAbymW ++ mktemp + local LAST_ERR=/tmp/tmp.2GQeUDIfFT + local exit_status=0 ++ seq 0 2 + for i in '$(seq 0 2)' + set +e + kubectl patch secret my-cluster-secrets '-p={"data":{"monitor": "dGVzdC1wYXNzd29yZA=="}}' + exit_status=0 + set -e + '[' 0 '!=' 0 ']' + break + cat /tmp/tmp.Dyi4GAbymW secret/my-cluster-secrets patched + cat /tmp/tmp.2GQeUDIfFT + rm /tmp/tmp.Dyi4GAbymW /tmp/tmp.2GQeUDIfFT + return 0 + wait_for_password_propagation my-cluster-secrets monitor + local secret=my-cluster-secrets + local user=monitor + local max_retry=240 ++ getSecretData my-cluster-secrets root ++ local secretName=my-cluster-secrets ++ local dataKey=root ++ kubectl_bin get secrets/my-cluster-secrets '--template={{.data.root}}' ++ base64 --decode +++ mktemp ++ local LAST_OUT=/tmp/tmp.EEpvZvuSIE +++ mktemp ++ local LAST_ERR=/tmp/tmp.MFDqIZNFQl ++ local exit_status=0 +++ seq 0 2 ++ for i in '$(seq 0 2)' ++ set +e ++ kubectl get secrets/my-cluster-secrets '--template={{.data.root}}' ++ exit_status=0 ++ set -e ++ '[' 0 '!=' 0 ']' ++ break ++ cat /tmp/tmp.EEpvZvuSIE ++ cat /tmp/tmp.MFDqIZNFQl ++ rm /tmp/tmp.EEpvZvuSIE /tmp/tmp.MFDqIZNFQl ++ return 0 + local root_pass=test-password + [[ perconalab/percona-xtradb-cluster-operator:main-pxc8.0 =~ 5\.7 ]] + retry=0 + is_password_updated monitor '-h some-name-pxc -uroot -p'\''test-password'\''' + local username=monitor + local 'uri=-h some-name-pxc -uroot -p'\''test-password'\''' + run_mysql 'SELECT User_attributes FROM mysql.user WHERE user='\''monitor'\''' '-h some-name-pxc -uroot -p'\''test-password'\''' + grep additional_password + local 'command=SELECT User_attributes FROM mysql.user WHERE user='\''monitor'\''' + local 'uri=-h some-name-pxc -uroot -p'\''test-password'\''' ++ get_client_pod ++ kubectl_bin get pods --selector=name=pxc-client -o 'jsonpath={.items[].metadata.name}' +++ mktemp ++ local LAST_OUT=/tmp/tmp.Bsxvk4CNoN +++ mktemp ++ local LAST_ERR=/tmp/tmp.jWIQ417eG5 ++ local exit_status=0 +++ seq 0 2 ++ for i in '$(seq 0 2)' ++ set +e ++ kubectl get pods --selector=name=pxc-client -o 'jsonpath={.items[].metadata.name}' ++ exit_status=0 ++ set -e ++ '[' 0 '!=' 0 ']' ++ break ++ cat /tmp/tmp.Bsxvk4CNoN ++ cat /tmp/tmp.jWIQ417eG5 ++ rm /tmp/tmp.Bsxvk4CNoN /tmp/tmp.jWIQ417eG5 ++ return 0 + client_pod=pxc-client-65c795cbdf-g466m + wait_pod pxc-client-65c795cbdf-g466m + local pod=pxc-client-65c795cbdf-g466m + local max_retry=480 + local ns= ++ echo pxc-client-65c795cbdf-g466m ++ /usr/bin/sed -E 's/.*-(pxc|proxysql)-[0-9]/\1/' ++ egrep '^(pxc|proxysql)$' + local container= + set +o xtrace pod/pxc-client-65c795cbdf-g466m condition met pxc-client-65c795cbdf-g466m.Ok + set +o xtrace + echo 'waiting for password update' waiting for password update + sleep 1 + let retry+=1 + [[ 1 -ge 240 ]] + is_password_updated monitor '-h some-name-pxc -uroot -p'\''test-password'\''' + local username=monitor + local 'uri=-h some-name-pxc -uroot -p'\''test-password'\''' + grep additional_password + run_mysql 'SELECT User_attributes FROM mysql.user WHERE user='\''monitor'\''' '-h some-name-pxc -uroot -p'\''test-password'\''' + local 'command=SELECT User_attributes FROM mysql.user WHERE user='\''monitor'\''' + local 'uri=-h some-name-pxc -uroot -p'\''test-password'\''' ++ get_client_pod ++ kubectl_bin get pods --selector=name=pxc-client -o 'jsonpath={.items[].metadata.name}' +++ mktemp ++ local LAST_OUT=/tmp/tmp.ZpRzWaL5B1 +++ mktemp ++ local LAST_ERR=/tmp/tmp.069rifPJQR ++ local exit_status=0 +++ seq 0 2 ++ for i in '$(seq 0 2)' ++ set +e ++ kubectl get pods --selector=name=pxc-client -o 'jsonpath={.items[].metadata.name}' ++ exit_status=0 ++ set -e ++ '[' 0 '!=' 0 ']' ++ break ++ cat /tmp/tmp.ZpRzWaL5B1 ++ cat /tmp/tmp.069rifPJQR ++ rm /tmp/tmp.ZpRzWaL5B1 /tmp/tmp.069rifPJQR ++ return 0 + client_pod=pxc-client-65c795cbdf-g466m + wait_pod pxc-client-65c795cbdf-g466m + local pod=pxc-client-65c795cbdf-g466m + local max_retry=480 + local ns= ++ echo pxc-client-65c795cbdf-g466m ++ /usr/bin/sed -E 's/.*-(pxc|proxysql)-[0-9]/\1/' ++ egrep '^(pxc|proxysql)$' + local container= + set +o xtrace pod/pxc-client-65c795cbdf-g466m condition met pxc-client-65c795cbdf-g466m.Ok + set +o xtrace {"additional_password": "*C1F414D9BAF378B656A849B31F9F8AF3125F558B"} + retry=0 + is_old_password_discarded monitor '-h some-name-pxc -uroot -p'\''test-password'\''' + local username=monitor + local 'uri=-h some-name-pxc -uroot -p'\''test-password'\''' + run_mysql 'SELECT User_attributes FROM mysql.user WHERE user='\''monitor'\''' '-h some-name-pxc -uroot -p'\''test-password'\''' + local 'command=SELECT User_attributes FROM mysql.user WHERE user='\''monitor'\''' + grep NULL + local 'uri=-h some-name-pxc -uroot -p'\''test-password'\''' ++ get_client_pod ++ kubectl_bin get pods --selector=name=pxc-client -o 'jsonpath={.items[].metadata.name}' +++ mktemp ++ local LAST_OUT=/tmp/tmp.rBf8kZZ8Pm +++ mktemp ++ local LAST_ERR=/tmp/tmp.VuXmmECdQT ++ local exit_status=0 +++ seq 0 2 ++ for i in '$(seq 0 2)' ++ set +e ++ kubectl get pods --selector=name=pxc-client -o 'jsonpath={.items[].metadata.name}' ++ exit_status=0 ++ set -e ++ '[' 0 '!=' 0 ']' ++ break ++ cat /tmp/tmp.rBf8kZZ8Pm ++ cat /tmp/tmp.VuXmmECdQT ++ rm /tmp/tmp.rBf8kZZ8Pm /tmp/tmp.VuXmmECdQT ++ return 0 + client_pod=pxc-client-65c795cbdf-g466m + wait_pod pxc-client-65c795cbdf-g466m + local pod=pxc-client-65c795cbdf-g466m + local max_retry=480 + local ns= ++ echo pxc-client-65c795cbdf-g466m ++ /usr/bin/sed -E 's/.*-(pxc|proxysql)-[0-9]/\1/' ++ egrep '^(pxc|proxysql)$' + local container= + set +o xtrace pod/pxc-client-65c795cbdf-g466m condition met pxc-client-65c795cbdf-g466m.Ok + set +o xtrace + echo 'waiting for password propagation' waiting for password propagation + sleep 1 + let retry+=1 + [[ 1 -ge 240 ]] + is_old_password_discarded monitor '-h some-name-pxc -uroot -p'\''test-password'\''' + local username=monitor + local 'uri=-h some-name-pxc -uroot -p'\''test-password'\''' + run_mysql 'SELECT User_attributes FROM mysql.user WHERE user='\''monitor'\''' '-h some-name-pxc -uroot -p'\''test-password'\''' + grep NULL + local 'command=SELECT User_attributes FROM mysql.user WHERE user='\''monitor'\''' + local 'uri=-h some-name-pxc -uroot -p'\''test-password'\''' ++ get_client_pod ++ kubectl_bin get pods --selector=name=pxc-client -o 'jsonpath={.items[].metadata.name}' +++ mktemp ++ local LAST_OUT=/tmp/tmp.9w3mzgDqR5 +++ mktemp ++ local LAST_ERR=/tmp/tmp.kcYvjGPAn1 ++ local exit_status=0 +++ seq 0 2 ++ for i in '$(seq 0 2)' ++ set +e ++ kubectl get pods --selector=name=pxc-client -o 'jsonpath={.items[].metadata.name}' ++ exit_status=0 ++ set -e ++ '[' 0 '!=' 0 ']' ++ break ++ cat /tmp/tmp.9w3mzgDqR5 ++ cat /tmp/tmp.kcYvjGPAn1 ++ rm /tmp/tmp.9w3mzgDqR5 /tmp/tmp.kcYvjGPAn1 ++ return 0 + client_pod=pxc-client-65c795cbdf-g466m + wait_pod pxc-client-65c795cbdf-g466m + local pod=pxc-client-65c795cbdf-g466m + local max_retry=480 + local ns= ++ echo pxc-client-65c795cbdf-g466m ++ /usr/bin/sed -E 's/.*-(pxc|proxysql)-[0-9]/\1/' ++ egrep '^(pxc|proxysql)$' + local container= + set +o xtrace pod/pxc-client-65c795cbdf-g466m condition met pxc-client-65c795cbdf-g466m.Ok + set +o xtrace + echo 'waiting for password propagation' waiting for password propagation + sleep 1 + let retry+=1 + [[ 2 -ge 240 ]] + is_old_password_discarded monitor '-h some-name-pxc -uroot -p'\''test-password'\''' + local username=monitor + local 'uri=-h some-name-pxc -uroot -p'\''test-password'\''' + run_mysql 'SELECT User_attributes FROM mysql.user WHERE user='\''monitor'\''' '-h some-name-pxc -uroot -p'\''test-password'\''' + grep NULL + local 'command=SELECT User_attributes FROM mysql.user WHERE user='\''monitor'\''' + local 'uri=-h some-name-pxc -uroot -p'\''test-password'\''' ++ get_client_pod ++ kubectl_bin get pods --selector=name=pxc-client -o 'jsonpath={.items[].metadata.name}' +++ mktemp ++ local LAST_OUT=/tmp/tmp.g3yW54QTvD +++ mktemp ++ local LAST_ERR=/tmp/tmp.MjlamsNmVk ++ local exit_status=0 +++ seq 0 2 ++ for i in '$(seq 0 2)' ++ set +e ++ kubectl get pods --selector=name=pxc-client -o 'jsonpath={.items[].metadata.name}' ++ exit_status=0 ++ set -e ++ '[' 0 '!=' 0 ']' ++ break ++ cat /tmp/tmp.g3yW54QTvD ++ cat /tmp/tmp.MjlamsNmVk ++ rm /tmp/tmp.g3yW54QTvD /tmp/tmp.MjlamsNmVk ++ return 0 + client_pod=pxc-client-65c795cbdf-g466m + wait_pod pxc-client-65c795cbdf-g466m + local pod=pxc-client-65c795cbdf-g466m + local max_retry=480 + local ns= ++ echo pxc-client-65c795cbdf-g466m ++ /usr/bin/sed -E 's/.*-(pxc|proxysql)-[0-9]/\1/' ++ egrep '^(pxc|proxysql)$' + local container= + set +o xtrace pod/pxc-client-65c795cbdf-g466m condition met pxc-client-65c795cbdf-g466m.Ok + set +o xtrace + echo 'waiting for password propagation' waiting for password propagation + sleep 1 + let retry+=1 + [[ 3 -ge 240 ]] + is_old_password_discarded monitor '-h some-name-pxc -uroot -p'\''test-password'\''' + local username=monitor + local 'uri=-h some-name-pxc -uroot -p'\''test-password'\''' + grep NULL + run_mysql 'SELECT User_attributes FROM mysql.user WHERE user='\''monitor'\''' '-h some-name-pxc -uroot -p'\''test-password'\''' + local 'command=SELECT User_attributes FROM mysql.user WHERE user='\''monitor'\''' + local 'uri=-h some-name-pxc -uroot -p'\''test-password'\''' ++ get_client_pod ++ kubectl_bin get pods --selector=name=pxc-client -o 'jsonpath={.items[].metadata.name}' +++ mktemp ++ local LAST_OUT=/tmp/tmp.Sh3YwXYJAC +++ mktemp ++ local LAST_ERR=/tmp/tmp.8vTS95VcHM ++ local exit_status=0 +++ seq 0 2 ++ for i in '$(seq 0 2)' ++ set +e ++ kubectl get pods --selector=name=pxc-client -o 'jsonpath={.items[].metadata.name}' ++ exit_status=0 ++ set -e ++ '[' 0 '!=' 0 ']' ++ break ++ cat /tmp/tmp.Sh3YwXYJAC ++ cat /tmp/tmp.8vTS95VcHM ++ rm /tmp/tmp.Sh3YwXYJAC /tmp/tmp.8vTS95VcHM ++ return 0 + client_pod=pxc-client-65c795cbdf-g466m + wait_pod pxc-client-65c795cbdf-g466m + local pod=pxc-client-65c795cbdf-g466m + local max_retry=480 + local ns= ++ echo pxc-client-65c795cbdf-g466m ++ /usr/bin/sed -E 's/.*-(pxc|proxysql)-[0-9]/\1/' ++ egrep '^(pxc|proxysql)$' + local container= + set +o xtrace pod/pxc-client-65c795cbdf-g466m condition met pxc-client-65c795cbdf-g466m.Ok + set +o xtrace + echo 'waiting for password propagation' waiting for password propagation + sleep 1 + let retry+=1 + [[ 4 -ge 240 ]] + is_old_password_discarded monitor '-h some-name-pxc -uroot -p'\''test-password'\''' + local username=monitor + local 'uri=-h some-name-pxc -uroot -p'\''test-password'\''' + run_mysql 'SELECT User_attributes FROM mysql.user WHERE user='\''monitor'\''' '-h some-name-pxc -uroot -p'\''test-password'\''' + local 'command=SELECT User_attributes FROM mysql.user WHERE user='\''monitor'\''' + grep NULL + local 'uri=-h some-name-pxc -uroot -p'\''test-password'\''' ++ get_client_pod ++ kubectl_bin get pods --selector=name=pxc-client -o 'jsonpath={.items[].metadata.name}' +++ mktemp ++ local LAST_OUT=/tmp/tmp.XgeZG1PnSI +++ mktemp ++ local LAST_ERR=/tmp/tmp.GqRdsudEQ4 ++ local exit_status=0 +++ seq 0 2 ++ for i in '$(seq 0 2)' ++ set +e ++ kubectl get pods --selector=name=pxc-client -o 'jsonpath={.items[].metadata.name}' ++ exit_status=0 ++ set -e ++ '[' 0 '!=' 0 ']' ++ break ++ cat /tmp/tmp.XgeZG1PnSI ++ cat /tmp/tmp.GqRdsudEQ4 ++ rm /tmp/tmp.XgeZG1PnSI /tmp/tmp.GqRdsudEQ4 ++ return 0 + client_pod=pxc-client-65c795cbdf-g466m + wait_pod pxc-client-65c795cbdf-g466m + local pod=pxc-client-65c795cbdf-g466m + local max_retry=480 + local ns= ++ echo pxc-client-65c795cbdf-g466m ++ /usr/bin/sed -E 's/.*-(pxc|proxysql)-[0-9]/\1/' ++ egrep '^(pxc|proxysql)$' + local container= + set +o xtrace pod/pxc-client-65c795cbdf-g466m condition met pxc-client-65c795cbdf-g466m.Ok + set +o xtrace + echo 'waiting for password propagation' waiting for password propagation + sleep 1 + let retry+=1 + [[ 5 -ge 240 ]] + is_old_password_discarded monitor '-h some-name-pxc -uroot -p'\''test-password'\''' + local username=monitor + local 'uri=-h some-name-pxc -uroot -p'\''test-password'\''' + run_mysql 'SELECT User_attributes FROM mysql.user WHERE user='\''monitor'\''' '-h some-name-pxc -uroot -p'\''test-password'\''' + local 'command=SELECT User_attributes FROM mysql.user WHERE user='\''monitor'\''' + grep NULL + local 'uri=-h some-name-pxc -uroot -p'\''test-password'\''' ++ get_client_pod ++ kubectl_bin get pods --selector=name=pxc-client -o 'jsonpath={.items[].metadata.name}' +++ mktemp ++ local LAST_OUT=/tmp/tmp.sCIZuNxjjx +++ mktemp ++ local LAST_ERR=/tmp/tmp.ygO8t2JyY2 ++ local exit_status=0 +++ seq 0 2 ++ for i in '$(seq 0 2)' ++ set +e ++ kubectl get pods --selector=name=pxc-client -o 'jsonpath={.items[].metadata.name}' ++ exit_status=0 ++ set -e ++ '[' 0 '!=' 0 ']' ++ break ++ cat /tmp/tmp.sCIZuNxjjx ++ cat /tmp/tmp.ygO8t2JyY2 ++ rm /tmp/tmp.sCIZuNxjjx /tmp/tmp.ygO8t2JyY2 ++ return 0 + client_pod=pxc-client-65c795cbdf-g466m + wait_pod pxc-client-65c795cbdf-g466m + local pod=pxc-client-65c795cbdf-g466m + local max_retry=480 + local ns= ++ echo pxc-client-65c795cbdf-g466m ++ /usr/bin/sed -E 's/.*-(pxc|proxysql)-[0-9]/\1/' ++ egrep '^(pxc|proxysql)$' + local container= + set +o xtrace pod/pxc-client-65c795cbdf-g466m condition met pxc-client-65c795cbdf-g466m.Ok + set +o xtrace + echo 'waiting for password propagation' waiting for password propagation + sleep 1 + let retry+=1 + [[ 6 -ge 240 ]] + is_old_password_discarded monitor '-h some-name-pxc -uroot -p'\''test-password'\''' + local username=monitor + local 'uri=-h some-name-pxc -uroot -p'\''test-password'\''' + run_mysql 'SELECT User_attributes FROM mysql.user WHERE user='\''monitor'\''' '-h some-name-pxc -uroot -p'\''test-password'\''' + local 'command=SELECT User_attributes FROM mysql.user WHERE user='\''monitor'\''' + local 'uri=-h some-name-pxc -uroot -p'\''test-password'\''' + grep NULL ++ get_client_pod ++ kubectl_bin get pods --selector=name=pxc-client -o 'jsonpath={.items[].metadata.name}' +++ mktemp ++ local LAST_OUT=/tmp/tmp.LOB4dAHQdA +++ mktemp ++ local LAST_ERR=/tmp/tmp.AsvcIACwwi ++ local exit_status=0 +++ seq 0 2 ++ for i in '$(seq 0 2)' ++ set +e ++ kubectl get pods --selector=name=pxc-client -o 'jsonpath={.items[].metadata.name}' ++ exit_status=0 ++ set -e ++ '[' 0 '!=' 0 ']' ++ break ++ cat /tmp/tmp.LOB4dAHQdA ++ cat /tmp/tmp.AsvcIACwwi ++ rm /tmp/tmp.LOB4dAHQdA /tmp/tmp.AsvcIACwwi ++ return 0 + client_pod=pxc-client-65c795cbdf-g466m + wait_pod pxc-client-65c795cbdf-g466m + local pod=pxc-client-65c795cbdf-g466m + local max_retry=480 + local ns= ++ echo pxc-client-65c795cbdf-g466m ++ /usr/bin/sed -E 's/.*-(pxc|proxysql)-[0-9]/\1/' ++ egrep '^(pxc|proxysql)$' + local container= + set +o xtrace pod/pxc-client-65c795cbdf-g466m condition met pxc-client-65c795cbdf-g466m.Ok + set +o xtrace + echo 'waiting for password propagation' waiting for password propagation + sleep 1 + let retry+=1 + [[ 7 -ge 240 ]] + is_old_password_discarded monitor '-h some-name-pxc -uroot -p'\''test-password'\''' + local username=monitor + local 'uri=-h some-name-pxc -uroot -p'\''test-password'\''' + run_mysql 'SELECT User_attributes FROM mysql.user WHERE user='\''monitor'\''' '-h some-name-pxc -uroot -p'\''test-password'\''' + grep NULL + local 'command=SELECT User_attributes FROM mysql.user WHERE user='\''monitor'\''' + local 'uri=-h some-name-pxc -uroot -p'\''test-password'\''' ++ get_client_pod ++ kubectl_bin get pods --selector=name=pxc-client -o 'jsonpath={.items[].metadata.name}' +++ mktemp ++ local LAST_OUT=/tmp/tmp.1XHSA71hdC +++ mktemp ++ local LAST_ERR=/tmp/tmp.UOJC0YdC5M ++ local exit_status=0 +++ seq 0 2 ++ for i in '$(seq 0 2)' ++ set +e ++ kubectl get pods --selector=name=pxc-client -o 'jsonpath={.items[].metadata.name}' ++ exit_status=0 ++ set -e ++ '[' 0 '!=' 0 ']' ++ break ++ cat /tmp/tmp.1XHSA71hdC ++ cat /tmp/tmp.UOJC0YdC5M ++ rm /tmp/tmp.1XHSA71hdC /tmp/tmp.UOJC0YdC5M ++ return 0 + client_pod=pxc-client-65c795cbdf-g466m + wait_pod pxc-client-65c795cbdf-g466m + local pod=pxc-client-65c795cbdf-g466m + local max_retry=480 + local ns= ++ echo pxc-client-65c795cbdf-g466m ++ /usr/bin/sed -E 's/.*-(pxc|proxysql)-[0-9]/\1/' ++ egrep '^(pxc|proxysql)$' + local container= + set +o xtrace pod/pxc-client-65c795cbdf-g466m condition met pxc-client-65c795cbdf-g466m.Ok + set +o xtrace + echo 'waiting for password propagation' waiting for password propagation + sleep 1 + let retry+=1 + [[ 8 -ge 240 ]] + is_old_password_discarded monitor '-h some-name-pxc -uroot -p'\''test-password'\''' + local username=monitor + local 'uri=-h some-name-pxc -uroot -p'\''test-password'\''' + run_mysql 'SELECT User_attributes FROM mysql.user WHERE user='\''monitor'\''' '-h some-name-pxc -uroot -p'\''test-password'\''' + local 'command=SELECT User_attributes FROM mysql.user WHERE user='\''monitor'\''' + grep NULL + local 'uri=-h some-name-pxc -uroot -p'\''test-password'\''' ++ get_client_pod ++ kubectl_bin get pods --selector=name=pxc-client -o 'jsonpath={.items[].metadata.name}' +++ mktemp ++ local LAST_OUT=/tmp/tmp.SHWgDJV8BS +++ mktemp ++ local LAST_ERR=/tmp/tmp.GWTOCn9C8X ++ local exit_status=0 +++ seq 0 2 ++ for i in '$(seq 0 2)' ++ set +e ++ kubectl get pods --selector=name=pxc-client -o 'jsonpath={.items[].metadata.name}' ++ exit_status=0 ++ set -e ++ '[' 0 '!=' 0 ']' ++ break ++ cat /tmp/tmp.SHWgDJV8BS ++ cat /tmp/tmp.GWTOCn9C8X ++ rm /tmp/tmp.SHWgDJV8BS /tmp/tmp.GWTOCn9C8X ++ return 0 + client_pod=pxc-client-65c795cbdf-g466m + wait_pod pxc-client-65c795cbdf-g466m + local pod=pxc-client-65c795cbdf-g466m + local max_retry=480 + local ns= ++ echo pxc-client-65c795cbdf-g466m ++ /usr/bin/sed -E 's/.*-(pxc|proxysql)-[0-9]/\1/' ++ egrep '^(pxc|proxysql)$' + local container= + set +o xtrace pod/pxc-client-65c795cbdf-g466m condition met pxc-client-65c795cbdf-g466m.Ok + set +o xtrace + echo 'waiting for password propagation' waiting for password propagation + sleep 1 + let retry+=1 + [[ 9 -ge 240 ]] + is_old_password_discarded monitor '-h some-name-pxc -uroot -p'\''test-password'\''' + local username=monitor + local 'uri=-h some-name-pxc -uroot -p'\''test-password'\''' + grep NULL + run_mysql 'SELECT User_attributes FROM mysql.user WHERE user='\''monitor'\''' '-h some-name-pxc -uroot -p'\''test-password'\''' + local 'command=SELECT User_attributes FROM mysql.user WHERE user='\''monitor'\''' + local 'uri=-h some-name-pxc -uroot -p'\''test-password'\''' ++ get_client_pod ++ kubectl_bin get pods --selector=name=pxc-client -o 'jsonpath={.items[].metadata.name}' +++ mktemp ++ local LAST_OUT=/tmp/tmp.rb2xZ9DYh4 +++ mktemp ++ local LAST_ERR=/tmp/tmp.ROHOLqX3Rm ++ local exit_status=0 +++ seq 0 2 ++ for i in '$(seq 0 2)' ++ set +e ++ kubectl get pods --selector=name=pxc-client -o 'jsonpath={.items[].metadata.name}' ++ exit_status=0 ++ set -e ++ '[' 0 '!=' 0 ']' ++ break ++ cat /tmp/tmp.rb2xZ9DYh4 ++ cat /tmp/tmp.ROHOLqX3Rm ++ rm /tmp/tmp.rb2xZ9DYh4 /tmp/tmp.ROHOLqX3Rm ++ return 0 + client_pod=pxc-client-65c795cbdf-g466m + wait_pod pxc-client-65c795cbdf-g466m + local pod=pxc-client-65c795cbdf-g466m + local max_retry=480 + local ns= ++ echo pxc-client-65c795cbdf-g466m ++ /usr/bin/sed -E 's/.*-(pxc|proxysql)-[0-9]/\1/' ++ egrep '^(pxc|proxysql)$' + local container= + set +o xtrace pod/pxc-client-65c795cbdf-g466m condition met pxc-client-65c795cbdf-g466m.Ok + set +o xtrace + echo 'waiting for password propagation' waiting for password propagation + sleep 1 + let retry+=1 + [[ 10 -ge 240 ]] + is_old_password_discarded monitor '-h some-name-pxc -uroot -p'\''test-password'\''' + local username=monitor + local 'uri=-h some-name-pxc -uroot -p'\''test-password'\''' + run_mysql 'SELECT User_attributes FROM mysql.user WHERE user='\''monitor'\''' '-h some-name-pxc -uroot -p'\''test-password'\''' + local 'command=SELECT User_attributes FROM mysql.user WHERE user='\''monitor'\''' + grep NULL + local 'uri=-h some-name-pxc -uroot -p'\''test-password'\''' ++ get_client_pod ++ kubectl_bin get pods --selector=name=pxc-client -o 'jsonpath={.items[].metadata.name}' +++ mktemp ++ local LAST_OUT=/tmp/tmp.kGnft4RU45 +++ mktemp ++ local LAST_ERR=/tmp/tmp.QEhevjhKZ1 ++ local exit_status=0 +++ seq 0 2 ++ for i in '$(seq 0 2)' ++ set +e ++ kubectl get pods --selector=name=pxc-client -o 'jsonpath={.items[].metadata.name}' ++ exit_status=0 ++ set -e ++ '[' 0 '!=' 0 ']' ++ break ++ cat /tmp/tmp.kGnft4RU45 ++ cat /tmp/tmp.QEhevjhKZ1 ++ rm /tmp/tmp.kGnft4RU45 /tmp/tmp.QEhevjhKZ1 ++ return 0 + client_pod=pxc-client-65c795cbdf-g466m + wait_pod pxc-client-65c795cbdf-g466m + local pod=pxc-client-65c795cbdf-g466m + local max_retry=480 + local ns= ++ echo pxc-client-65c795cbdf-g466m ++ /usr/bin/sed -E 's/.*-(pxc|proxysql)-[0-9]/\1/' ++ egrep '^(pxc|proxysql)$' + local container= + set +o xtrace pod/pxc-client-65c795cbdf-g466m condition met pxc-client-65c795cbdf-g466m.Ok + set +o xtrace + echo 'waiting for password propagation' waiting for password propagation + sleep 1 + let retry+=1 + [[ 11 -ge 240 ]] + is_old_password_discarded monitor '-h some-name-pxc -uroot -p'\''test-password'\''' + local username=monitor + local 'uri=-h some-name-pxc -uroot -p'\''test-password'\''' + grep NULL + run_mysql 'SELECT User_attributes FROM mysql.user WHERE user='\''monitor'\''' '-h some-name-pxc -uroot -p'\''test-password'\''' + local 'command=SELECT User_attributes FROM mysql.user WHERE user='\''monitor'\''' + local 'uri=-h some-name-pxc -uroot -p'\''test-password'\''' ++ get_client_pod ++ kubectl_bin get pods --selector=name=pxc-client -o 'jsonpath={.items[].metadata.name}' +++ mktemp ++ local LAST_OUT=/tmp/tmp.GfpLJd1aSY +++ mktemp ++ local LAST_ERR=/tmp/tmp.2fWk2NwYpn ++ local exit_status=0 +++ seq 0 2 ++ for i in '$(seq 0 2)' ++ set +e ++ kubectl get pods --selector=name=pxc-client -o 'jsonpath={.items[].metadata.name}' ++ exit_status=0 ++ set -e ++ '[' 0 '!=' 0 ']' ++ break ++ cat /tmp/tmp.GfpLJd1aSY ++ cat /tmp/tmp.2fWk2NwYpn ++ rm /tmp/tmp.GfpLJd1aSY /tmp/tmp.2fWk2NwYpn ++ return 0 + client_pod=pxc-client-65c795cbdf-g466m + wait_pod pxc-client-65c795cbdf-g466m + local pod=pxc-client-65c795cbdf-g466m + local max_retry=480 + local ns= ++ echo pxc-client-65c795cbdf-g466m ++ /usr/bin/sed -E 's/.*-(pxc|proxysql)-[0-9]/\1/' ++ egrep '^(pxc|proxysql)$' + local container= + set +o xtrace pod/pxc-client-65c795cbdf-g466m condition met pxc-client-65c795cbdf-g466m.Ok + set +o xtrace + echo 'waiting for password propagation' waiting for password propagation + sleep 1 + let retry+=1 + [[ 12 -ge 240 ]] + is_old_password_discarded monitor '-h some-name-pxc -uroot -p'\''test-password'\''' + local username=monitor + local 'uri=-h some-name-pxc -uroot -p'\''test-password'\''' + run_mysql 'SELECT User_attributes FROM mysql.user WHERE user='\''monitor'\''' '-h some-name-pxc -uroot -p'\''test-password'\''' + local 'command=SELECT User_attributes FROM mysql.user WHERE user='\''monitor'\''' + grep NULL + local 'uri=-h some-name-pxc -uroot -p'\''test-password'\''' ++ get_client_pod ++ kubectl_bin get pods --selector=name=pxc-client -o 'jsonpath={.items[].metadata.name}' +++ mktemp ++ local LAST_OUT=/tmp/tmp.eoUUhBqfWF +++ mktemp ++ local LAST_ERR=/tmp/tmp.USEb8NFQcI ++ local exit_status=0 +++ seq 0 2 ++ for i in '$(seq 0 2)' ++ set +e ++ kubectl get pods --selector=name=pxc-client -o 'jsonpath={.items[].metadata.name}' ++ exit_status=0 ++ set -e ++ '[' 0 '!=' 0 ']' ++ break ++ cat /tmp/tmp.eoUUhBqfWF ++ cat /tmp/tmp.USEb8NFQcI ++ rm /tmp/tmp.eoUUhBqfWF /tmp/tmp.USEb8NFQcI ++ return 0 + client_pod=pxc-client-65c795cbdf-g466m + wait_pod pxc-client-65c795cbdf-g466m + local pod=pxc-client-65c795cbdf-g466m + local max_retry=480 + local ns= ++ echo pxc-client-65c795cbdf-g466m ++ /usr/bin/sed -E 's/.*-(pxc|proxysql)-[0-9]/\1/' ++ egrep '^(pxc|proxysql)$' + local container= + set +o xtrace pod/pxc-client-65c795cbdf-g466m condition met pxc-client-65c795cbdf-g466m.Ok + set +o xtrace NULL + wait_cluster_consistency some-name 3 2 + local cluster_name=some-name + local cluster_size=3 + local proxy_size=2 + '[' -z 2 ']' + desc 'wait cluster consistency' + set +o xtrace ----------------------------------------------------------------------------------- wait cluster consistency ----------------------------------------------------------------------------------- + local i=0 + local max=36 + sleep 7 ++ kubectl_bin get pxc some-name -o 'jsonpath={.status.state}' +++ mktemp ++ local LAST_OUT=/tmp/tmp.Ap0tk69Cko +++ mktemp ++ local LAST_ERR=/tmp/tmp.RxFOpMBP3i ++ local exit_status=0 +++ seq 0 2 ++ for i in '$(seq 0 2)' ++ set +e ++ kubectl get pxc some-name -o 'jsonpath={.status.state}' ++ exit_status=0 ++ set -e ++ '[' 0 '!=' 0 ']' ++ break ++ cat /tmp/tmp.Ap0tk69Cko ++ cat /tmp/tmp.RxFOpMBP3i ++ rm /tmp/tmp.Ap0tk69Cko /tmp/tmp.RxFOpMBP3i ++ return 0 + [[ ready == \r\e\a\d\y ]] ++ kubectl_bin get pxc some-name -o 'jsonpath={.status.pxc.ready}' +++ mktemp ++ local LAST_OUT=/tmp/tmp.pJUzlkQHa1 +++ mktemp ++ local LAST_ERR=/tmp/tmp.ahKViqvLXg ++ local exit_status=0 +++ seq 0 2 ++ for i in '$(seq 0 2)' ++ set +e ++ kubectl get pxc some-name -o 'jsonpath={.status.pxc.ready}' ++ exit_status=0 ++ set -e ++ '[' 0 '!=' 0 ']' ++ break ++ cat /tmp/tmp.pJUzlkQHa1 ++ cat /tmp/tmp.ahKViqvLXg ++ rm /tmp/tmp.pJUzlkQHa1 /tmp/tmp.ahKViqvLXg ++ return 0 + [[ 3 == \3 ]] +++ get_proxy_engine some-name +++ local cluster_name=some-name ++++ get_proxy some-name ++++ local target_cluster=some-name +++++ kubectl_bin get pxc some-name -o 'jsonpath={.spec.haproxy.enabled}' ++++++ mktemp +++++ local LAST_OUT=/tmp/tmp.M1oFmdrfiV ++++++ mktemp +++++ local LAST_ERR=/tmp/tmp.GmPk5UgMyT +++++ local exit_status=0 ++++++ seq 0 2 +++++ for i in '$(seq 0 2)' +++++ set +e +++++ kubectl get pxc some-name -o 'jsonpath={.spec.haproxy.enabled}' +++++ exit_status=0 +++++ set -e +++++ '[' 0 '!=' 0 ']' +++++ break +++++ cat /tmp/tmp.M1oFmdrfiV +++++ cat /tmp/tmp.GmPk5UgMyT +++++ rm /tmp/tmp.M1oFmdrfiV /tmp/tmp.GmPk5UgMyT +++++ return 0 ++++ [[ '' == \t\r\u\e ]] +++++ kubectl_bin get pxc some-name -o 'jsonpath={.spec.proxysql.enabled}' ++++++ mktemp +++++ local LAST_OUT=/tmp/tmp.Ty10kwG3iG ++++++ mktemp +++++ local LAST_ERR=/tmp/tmp.WBBbmB023o +++++ local exit_status=0 ++++++ seq 0 2 +++++ for i in '$(seq 0 2)' +++++ set +e +++++ kubectl get pxc some-name -o 'jsonpath={.spec.proxysql.enabled}' +++++ exit_status=0 +++++ set -e +++++ '[' 0 '!=' 0 ']' +++++ break +++++ cat /tmp/tmp.Ty10kwG3iG +++++ cat /tmp/tmp.WBBbmB023o +++++ rm /tmp/tmp.Ty10kwG3iG /tmp/tmp.WBBbmB023o +++++ return 0 ++++ [[ true == \t\r\u\e ]] ++++ echo some-name-proxysql ++++ return +++ local cluster_proxy=some-name-proxysql +++ echo proxysql ++ kubectl_bin get pxc some-name -o 'jsonpath={.status.proxysql.ready}' +++ mktemp ++ local LAST_OUT=/tmp/tmp.dn4xazGjYa +++ mktemp ++ local LAST_ERR=/tmp/tmp.kaD3rW6Mc5 ++ local exit_status=0 +++ seq 0 2 ++ for i in '$(seq 0 2)' ++ set +e ++ kubectl get pxc some-name -o 'jsonpath={.status.proxysql.ready}' ++ exit_status=0 ++ set -e ++ '[' 0 '!=' 0 ']' ++ break ++ cat /tmp/tmp.dn4xazGjYa ++ cat /tmp/tmp.kaD3rW6Mc5 ++ rm /tmp/tmp.dn4xazGjYa /tmp/tmp.kaD3rW6Mc5 ++ return 0 + [[ 2 == \2 ]] + sleep 10 + compare_mysql_cmd select-4 'SHOW TABLES;' '-h some-name-proxysql -umonitor -p'\''test-password'\''' + local command_id=select-4 + local 'command=SHOW TABLES;' + local 'uri=-h some-name-proxysql -umonitor -p'\''test-password'\''' + local postfix= + local expected_result=/mnt/jenkins/workspace/cloud-pxc-operator_PR-1774/e2e-tests/users/compare/select-4.sql + [[ perconalab/percona-xtradb-cluster-operator:main-pxc8.0 =~ 8\.0 ]] + '[' -f /mnt/jenkins/workspace/cloud-pxc-operator_PR-1774/e2e-tests/users/compare/select-4-80.sql ']' + expected_result=/mnt/jenkins/workspace/cloud-pxc-operator_PR-1774/e2e-tests/users/compare/select-4-80.sql + run_mysql 'SHOW TABLES;' '-h some-name-proxysql -umonitor -p'\''test-password'\''' + local 'command=SHOW TABLES;' + local 'uri=-h some-name-proxysql -umonitor -p'\''test-password'\''' ++ get_client_pod ++ kubectl_bin get pods --selector=name=pxc-client -o 'jsonpath={.items[].metadata.name}' +++ mktemp ++ local LAST_OUT=/tmp/tmp.POv57Gna4b +++ mktemp ++ local LAST_ERR=/tmp/tmp.myp8fXRzaV ++ local exit_status=0 +++ seq 0 2 ++ for i in '$(seq 0 2)' ++ set +e ++ kubectl get pods --selector=name=pxc-client -o 'jsonpath={.items[].metadata.name}' ++ exit_status=0 ++ set -e ++ '[' 0 '!=' 0 ']' ++ break ++ cat /tmp/tmp.POv57Gna4b ++ cat /tmp/tmp.myp8fXRzaV ++ rm /tmp/tmp.POv57Gna4b /tmp/tmp.myp8fXRzaV ++ return 0 + client_pod=pxc-client-65c795cbdf-g466m + wait_pod pxc-client-65c795cbdf-g466m + local pod=pxc-client-65c795cbdf-g466m + local max_retry=480 + local ns= ++ echo pxc-client-65c795cbdf-g466m ++ /usr/bin/sed -E 's/.*-(pxc|proxysql)-[0-9]/\1/' ++ egrep '^(pxc|proxysql)$' + local container= + set +o xtrace pod/pxc-client-65c795cbdf-g466m condition met pxc-client-65c795cbdf-g466m.Ok + set +o xtrace + '[' '!' -s /tmp/tmp.jmGBhW8nFf/select-4.sql ']' + diff -u /mnt/jenkins/workspace/cloud-pxc-operator_PR-1774/e2e-tests/users/compare/select-4-80.sql /tmp/tmp.jmGBhW8nFf/select-4.sql + desc 'test operator' + set +o xtrace ----------------------------------------------------------------------------------- test operator ----------------------------------------------------------------------------------- + patch_secret my-cluster-secrets operator dGVzdC1wYXNzd29yZA== + local secret=my-cluster-secrets + local key=operator + local value=dGVzdC1wYXNzd29yZA== + kubectl_bin patch secret my-cluster-secrets '-p={"data":{"operator": "dGVzdC1wYXNzd29yZA=="}}' ++ mktemp + local LAST_OUT=/tmp/tmp.Hk3etISqr3 ++ mktemp + local LAST_ERR=/tmp/tmp.UNVo75Wc1C + local exit_status=0 ++ seq 0 2 + for i in '$(seq 0 2)' + set +e + kubectl patch secret my-cluster-secrets '-p={"data":{"operator": "dGVzdC1wYXNzd29yZA=="}}' + exit_status=0 + set -e + '[' 0 '!=' 0 ']' + break + cat /tmp/tmp.Hk3etISqr3 secret/my-cluster-secrets patched + cat /tmp/tmp.UNVo75Wc1C + rm /tmp/tmp.Hk3etISqr3 /tmp/tmp.UNVo75Wc1C + return 0 + sleep 15 + wait_cluster_consistency some-name 3 2 + local cluster_name=some-name + local cluster_size=3 + local proxy_size=2 + '[' -z 2 ']' + desc 'wait cluster consistency' + set +o xtrace ----------------------------------------------------------------------------------- wait cluster consistency ----------------------------------------------------------------------------------- + local i=0 + local max=36 + sleep 7 ++ kubectl_bin get pxc some-name -o 'jsonpath={.status.state}' +++ mktemp ++ local LAST_OUT=/tmp/tmp.nqCJvugGwD +++ mktemp ++ local LAST_ERR=/tmp/tmp.KV30sNlWTJ ++ local exit_status=0 +++ seq 0 2 ++ for i in '$(seq 0 2)' ++ set +e ++ kubectl get pxc some-name -o 'jsonpath={.status.state}' ++ exit_status=0 ++ set -e ++ '[' 0 '!=' 0 ']' ++ break ++ cat /tmp/tmp.nqCJvugGwD ++ cat /tmp/tmp.KV30sNlWTJ ++ rm /tmp/tmp.nqCJvugGwD /tmp/tmp.KV30sNlWTJ ++ return 0 + [[ initializing == \r\e\a\d\y ]] + echo 'waiting for cluster readyness' waiting for cluster readyness + sleep 20 + [[ 0 -ge 36 ]] + let i+=1 ++ kubectl_bin get pxc some-name -o 'jsonpath={.status.state}' +++ mktemp ++ local LAST_OUT=/tmp/tmp.ab9dbxR61x +++ mktemp ++ local LAST_ERR=/tmp/tmp.ITWX0VLpDa ++ local exit_status=0 +++ seq 0 2 ++ for i in '$(seq 0 2)' ++ set +e ++ kubectl get pxc some-name -o 'jsonpath={.status.state}' ++ exit_status=0 ++ set -e ++ '[' 0 '!=' 0 ']' ++ break ++ cat /tmp/tmp.ab9dbxR61x ++ cat /tmp/tmp.ITWX0VLpDa ++ rm /tmp/tmp.ab9dbxR61x /tmp/tmp.ITWX0VLpDa ++ return 0 + [[ ready == \r\e\a\d\y ]] ++ kubectl_bin get pxc some-name -o 'jsonpath={.status.pxc.ready}' +++ mktemp ++ local LAST_OUT=/tmp/tmp.C41f6SKIdd +++ mktemp ++ local LAST_ERR=/tmp/tmp.M97v11ndus ++ local exit_status=0 +++ seq 0 2 ++ for i in '$(seq 0 2)' ++ set +e ++ kubectl get pxc some-name -o 'jsonpath={.status.pxc.ready}' ++ exit_status=0 ++ set -e ++ '[' 0 '!=' 0 ']' ++ break ++ cat /tmp/tmp.C41f6SKIdd ++ cat /tmp/tmp.M97v11ndus ++ rm /tmp/tmp.C41f6SKIdd /tmp/tmp.M97v11ndus ++ return 0 + [[ 3 == \3 ]] +++ get_proxy_engine some-name +++ local cluster_name=some-name ++++ get_proxy some-name ++++ local target_cluster=some-name +++++ kubectl_bin get pxc some-name -o 'jsonpath={.spec.haproxy.enabled}' ++++++ mktemp +++++ local LAST_OUT=/tmp/tmp.iicjbBWsqP ++++++ mktemp +++++ local LAST_ERR=/tmp/tmp.dM7ElvKzU8 +++++ local exit_status=0 ++++++ seq 0 2 +++++ for i in '$(seq 0 2)' +++++ set +e +++++ kubectl get pxc some-name -o 'jsonpath={.spec.haproxy.enabled}' +++++ exit_status=0 +++++ set -e +++++ '[' 0 '!=' 0 ']' +++++ break +++++ cat /tmp/tmp.iicjbBWsqP +++++ cat /tmp/tmp.dM7ElvKzU8 +++++ rm /tmp/tmp.iicjbBWsqP /tmp/tmp.dM7ElvKzU8 +++++ return 0 ++++ [[ '' == \t\r\u\e ]] +++++ kubectl_bin get pxc some-name -o 'jsonpath={.spec.proxysql.enabled}' ++++++ mktemp +++++ local LAST_OUT=/tmp/tmp.Lzt55o0Crm ++++++ mktemp +++++ local LAST_ERR=/tmp/tmp.IQobunUsz9 +++++ local exit_status=0 ++++++ seq 0 2 +++++ for i in '$(seq 0 2)' +++++ set +e +++++ kubectl get pxc some-name -o 'jsonpath={.spec.proxysql.enabled}' +++++ exit_status=0 +++++ set -e +++++ '[' 0 '!=' 0 ']' +++++ break +++++ cat /tmp/tmp.Lzt55o0Crm +++++ cat /tmp/tmp.IQobunUsz9 +++++ rm /tmp/tmp.Lzt55o0Crm /tmp/tmp.IQobunUsz9 +++++ return 0 ++++ [[ true == \t\r\u\e ]] ++++ echo some-name-proxysql ++++ return +++ local cluster_proxy=some-name-proxysql +++ echo proxysql ++ kubectl_bin get pxc some-name -o 'jsonpath={.status.proxysql.ready}' +++ mktemp ++ local LAST_OUT=/tmp/tmp.WFIPtqJQC0 +++ mktemp ++ local LAST_ERR=/tmp/tmp.piFOntREHy ++ local exit_status=0 +++ seq 0 2 ++ for i in '$(seq 0 2)' ++ set +e ++ kubectl get pxc some-name -o 'jsonpath={.status.proxysql.ready}' ++ exit_status=0 ++ set -e ++ '[' 0 '!=' 0 ']' ++ break ++ cat /tmp/tmp.WFIPtqJQC0 ++ cat /tmp/tmp.piFOntREHy ++ rm /tmp/tmp.WFIPtqJQC0 /tmp/tmp.piFOntREHy ++ return 0 + [[ 2 == \2 ]] + sleep 10 + compare_mysql_cmd select-4 'SHOW TABLES;' '-h some-name-proxysql -uoperator -p'\''test-password'\''' + local command_id=select-4 + local 'command=SHOW TABLES;' + local 'uri=-h some-name-proxysql -uoperator -p'\''test-password'\''' + local postfix= + local expected_result=/mnt/jenkins/workspace/cloud-pxc-operator_PR-1774/e2e-tests/users/compare/select-4.sql + [[ perconalab/percona-xtradb-cluster-operator:main-pxc8.0 =~ 8\.0 ]] + '[' -f /mnt/jenkins/workspace/cloud-pxc-operator_PR-1774/e2e-tests/users/compare/select-4-80.sql ']' + expected_result=/mnt/jenkins/workspace/cloud-pxc-operator_PR-1774/e2e-tests/users/compare/select-4-80.sql + run_mysql 'SHOW TABLES;' '-h some-name-proxysql -uoperator -p'\''test-password'\''' + local 'command=SHOW TABLES;' + local 'uri=-h some-name-proxysql -uoperator -p'\''test-password'\''' ++ get_client_pod ++ kubectl_bin get pods --selector=name=pxc-client -o 'jsonpath={.items[].metadata.name}' +++ mktemp ++ local LAST_OUT=/tmp/tmp.fb7qNxJu5V +++ mktemp ++ local LAST_ERR=/tmp/tmp.cMAxpPPCVf ++ local exit_status=0 +++ seq 0 2 ++ for i in '$(seq 0 2)' ++ set +e ++ kubectl get pods --selector=name=pxc-client -o 'jsonpath={.items[].metadata.name}' ++ exit_status=0 ++ set -e ++ '[' 0 '!=' 0 ']' ++ break ++ cat /tmp/tmp.fb7qNxJu5V ++ cat /tmp/tmp.cMAxpPPCVf ++ rm /tmp/tmp.fb7qNxJu5V /tmp/tmp.cMAxpPPCVf ++ return 0 + client_pod=pxc-client-65c795cbdf-g466m + wait_pod pxc-client-65c795cbdf-g466m + local pod=pxc-client-65c795cbdf-g466m + local max_retry=480 + local ns= ++ echo pxc-client-65c795cbdf-g466m ++ /usr/bin/sed -E 's/.*-(pxc|proxysql)-[0-9]/\1/' ++ egrep '^(pxc|proxysql)$' + local container= + set +o xtrace pod/pxc-client-65c795cbdf-g466m condition met pxc-client-65c795cbdf-g466m.Ok + set +o xtrace + '[' '!' -s /tmp/tmp.jmGBhW8nFf/select-4.sql ']' + diff -u /mnt/jenkins/workspace/cloud-pxc-operator_PR-1774/e2e-tests/users/compare/select-4-80.sql /tmp/tmp.jmGBhW8nFf/select-4.sql + desc 'change secret name' + set +o xtrace ----------------------------------------------------------------------------------- change secret name ----------------------------------------------------------------------------------- + kubectl_bin patch pxc some-name --type merge --patch '{"spec": {"secretsName":"my-cluster-secrets-2"}}' ++ mktemp + local LAST_OUT=/tmp/tmp.dmekmtXmBK ++ mktemp + local LAST_ERR=/tmp/tmp.cN34g2VG8s + local exit_status=0 ++ seq 0 2 + for i in '$(seq 0 2)' + set +e + kubectl patch pxc some-name --type merge --patch '{"spec": {"secretsName":"my-cluster-secrets-2"}}' + exit_status=0 + set -e + '[' 0 '!=' 0 ']' + break + cat /tmp/tmp.dmekmtXmBK perconaxtradbcluster.pxc.percona.com/some-name patched + cat /tmp/tmp.cN34g2VG8s + rm /tmp/tmp.dmekmtXmBK /tmp/tmp.cN34g2VG8s + return 0 + sleep 30 + wait_cluster_consistency some-name 3 2 + local cluster_name=some-name + local cluster_size=3 + local proxy_size=2 + '[' -z 2 ']' + desc 'wait cluster consistency' + set +o xtrace ----------------------------------------------------------------------------------- wait cluster consistency ----------------------------------------------------------------------------------- + local i=0 + local max=36 + sleep 7 ++ kubectl_bin get pxc some-name -o 'jsonpath={.status.state}' +++ mktemp ++ local LAST_OUT=/tmp/tmp.FOW5DF4R5x +++ mktemp ++ local LAST_ERR=/tmp/tmp.q9bk7A9h13 ++ local exit_status=0 +++ seq 0 2 ++ for i in '$(seq 0 2)' ++ set +e ++ kubectl get pxc some-name -o 'jsonpath={.status.state}' ++ exit_status=0 ++ set -e ++ '[' 0 '!=' 0 ']' ++ break ++ cat /tmp/tmp.FOW5DF4R5x ++ cat /tmp/tmp.q9bk7A9h13 ++ rm /tmp/tmp.FOW5DF4R5x /tmp/tmp.q9bk7A9h13 ++ return 0 + [[ initializing == \r\e\a\d\y ]] + echo 'waiting for cluster readyness' waiting for cluster readyness + sleep 20 + [[ 0 -ge 36 ]] + let i+=1 ++ kubectl_bin get pxc some-name -o 'jsonpath={.status.state}' +++ mktemp ++ local LAST_OUT=/tmp/tmp.97HUsPfNxu +++ mktemp ++ local LAST_ERR=/tmp/tmp.fyzV8SQxjQ ++ local exit_status=0 +++ seq 0 2 ++ for i in '$(seq 0 2)' ++ set +e ++ kubectl get pxc some-name -o 'jsonpath={.status.state}' ++ exit_status=0 ++ set -e ++ '[' 0 '!=' 0 ']' ++ break ++ cat /tmp/tmp.97HUsPfNxu ++ cat /tmp/tmp.fyzV8SQxjQ ++ rm /tmp/tmp.97HUsPfNxu /tmp/tmp.fyzV8SQxjQ ++ return 0 + [[ initializing == \r\e\a\d\y ]] + echo 'waiting for cluster readyness' waiting for cluster readyness + sleep 20 + [[ 1 -ge 36 ]] + let i+=1 ++ kubectl_bin get pxc some-name -o 'jsonpath={.status.state}' +++ mktemp ++ local LAST_OUT=/tmp/tmp.Of5uuyVzNg +++ mktemp ++ local LAST_ERR=/tmp/tmp.KxWCgTQrt6 ++ local exit_status=0 +++ seq 0 2 ++ for i in '$(seq 0 2)' ++ set +e ++ kubectl get pxc some-name -o 'jsonpath={.status.state}' ++ exit_status=0 ++ set -e ++ '[' 0 '!=' 0 ']' ++ break ++ cat /tmp/tmp.Of5uuyVzNg ++ cat /tmp/tmp.KxWCgTQrt6 ++ rm /tmp/tmp.Of5uuyVzNg /tmp/tmp.KxWCgTQrt6 ++ return 0 + [[ initializing == \r\e\a\d\y ]] + echo 'waiting for cluster readyness' waiting for cluster readyness + sleep 20 + [[ 2 -ge 36 ]] + let i+=1 ++ kubectl_bin get pxc some-name -o 'jsonpath={.status.state}' +++ mktemp ++ local LAST_OUT=/tmp/tmp.sMRE9xSp08 +++ mktemp ++ local LAST_ERR=/tmp/tmp.DXXXEgEGS2 ++ local exit_status=0 +++ seq 0 2 ++ for i in '$(seq 0 2)' ++ set +e ++ kubectl get pxc some-name -o 'jsonpath={.status.state}' ++ exit_status=0 ++ set -e ++ '[' 0 '!=' 0 ']' ++ break ++ cat /tmp/tmp.sMRE9xSp08 ++ cat /tmp/tmp.DXXXEgEGS2 ++ rm /tmp/tmp.sMRE9xSp08 /tmp/tmp.DXXXEgEGS2 ++ return 0 + [[ initializing == \r\e\a\d\y ]] + echo 'waiting for cluster readyness' waiting for cluster readyness + sleep 20 + [[ 3 -ge 36 ]] + let i+=1 ++ kubectl_bin get pxc some-name -o 'jsonpath={.status.state}' +++ mktemp ++ local LAST_OUT=/tmp/tmp.OuXEUdJpBQ +++ mktemp ++ local LAST_ERR=/tmp/tmp.o0yTu0cacf ++ local exit_status=0 +++ seq 0 2 ++ for i in '$(seq 0 2)' ++ set +e ++ kubectl get pxc some-name -o 'jsonpath={.status.state}' ++ exit_status=0 ++ set -e ++ '[' 0 '!=' 0 ']' ++ break ++ cat /tmp/tmp.OuXEUdJpBQ ++ cat /tmp/tmp.o0yTu0cacf ++ rm /tmp/tmp.OuXEUdJpBQ /tmp/tmp.o0yTu0cacf ++ return 0 + [[ initializing == \r\e\a\d\y ]] + echo 'waiting for cluster readyness' waiting for cluster readyness + sleep 20 + [[ 4 -ge 36 ]] + let i+=1 ++ kubectl_bin get pxc some-name -o 'jsonpath={.status.state}' +++ mktemp ++ local LAST_OUT=/tmp/tmp.jQxOdNey9g +++ mktemp ++ local LAST_ERR=/tmp/tmp.nDGcySByfb ++ local exit_status=0 +++ seq 0 2 ++ for i in '$(seq 0 2)' ++ set +e ++ kubectl get pxc some-name -o 'jsonpath={.status.state}' ++ exit_status=0 ++ set -e ++ '[' 0 '!=' 0 ']' ++ break ++ cat /tmp/tmp.jQxOdNey9g ++ cat /tmp/tmp.nDGcySByfb ++ rm /tmp/tmp.jQxOdNey9g /tmp/tmp.nDGcySByfb ++ return 0 + [[ initializing == \r\e\a\d\y ]] + echo 'waiting for cluster readyness' waiting for cluster readyness + sleep 20 + [[ 5 -ge 36 ]] + let i+=1 ++ kubectl_bin get pxc some-name -o 'jsonpath={.status.state}' +++ mktemp ++ local LAST_OUT=/tmp/tmp.vbn9cxpGnL +++ mktemp ++ local LAST_ERR=/tmp/tmp.IP6kfcFKmZ ++ local exit_status=0 +++ seq 0 2 ++ for i in '$(seq 0 2)' ++ set +e ++ kubectl get pxc some-name -o 'jsonpath={.status.state}' ++ exit_status=0 ++ set -e ++ '[' 0 '!=' 0 ']' ++ break ++ cat /tmp/tmp.vbn9cxpGnL ++ cat /tmp/tmp.IP6kfcFKmZ ++ rm /tmp/tmp.vbn9cxpGnL /tmp/tmp.IP6kfcFKmZ ++ return 0 + [[ ready == \r\e\a\d\y ]] ++ kubectl_bin get pxc some-name -o 'jsonpath={.status.pxc.ready}' +++ mktemp ++ local LAST_OUT=/tmp/tmp.wAlkf5bmoe +++ mktemp ++ local LAST_ERR=/tmp/tmp.7jf9sJJtDH ++ local exit_status=0 +++ seq 0 2 ++ for i in '$(seq 0 2)' ++ set +e ++ kubectl get pxc some-name -o 'jsonpath={.status.pxc.ready}' ++ exit_status=0 ++ set -e ++ '[' 0 '!=' 0 ']' ++ break ++ cat /tmp/tmp.wAlkf5bmoe ++ cat /tmp/tmp.7jf9sJJtDH ++ rm /tmp/tmp.wAlkf5bmoe /tmp/tmp.7jf9sJJtDH ++ return 0 + [[ 3 == \3 ]] +++ get_proxy_engine some-name +++ local cluster_name=some-name ++++ get_proxy some-name ++++ local target_cluster=some-name +++++ kubectl_bin get pxc some-name -o 'jsonpath={.spec.haproxy.enabled}' ++++++ mktemp +++++ local LAST_OUT=/tmp/tmp.3UlBAUOabX ++++++ mktemp +++++ local LAST_ERR=/tmp/tmp.4ZpM5DgSL2 +++++ local exit_status=0 ++++++ seq 0 2 +++++ for i in '$(seq 0 2)' +++++ set +e +++++ kubectl get pxc some-name -o 'jsonpath={.spec.haproxy.enabled}' +++++ exit_status=0 +++++ set -e +++++ '[' 0 '!=' 0 ']' +++++ break +++++ cat /tmp/tmp.3UlBAUOabX +++++ cat /tmp/tmp.4ZpM5DgSL2 +++++ rm /tmp/tmp.3UlBAUOabX /tmp/tmp.4ZpM5DgSL2 +++++ return 0 ++++ [[ '' == \t\r\u\e ]] +++++ kubectl_bin get pxc some-name -o 'jsonpath={.spec.proxysql.enabled}' ++++++ mktemp +++++ local LAST_OUT=/tmp/tmp.vvVgKXIW41 ++++++ mktemp +++++ local LAST_ERR=/tmp/tmp.i4gzM1bn2d +++++ local exit_status=0 ++++++ seq 0 2 +++++ for i in '$(seq 0 2)' +++++ set +e +++++ kubectl get pxc some-name -o 'jsonpath={.spec.proxysql.enabled}' +++++ exit_status=0 +++++ set -e +++++ '[' 0 '!=' 0 ']' +++++ break +++++ cat /tmp/tmp.vvVgKXIW41 +++++ cat /tmp/tmp.i4gzM1bn2d +++++ rm /tmp/tmp.vvVgKXIW41 /tmp/tmp.i4gzM1bn2d +++++ return 0 ++++ [[ true == \t\r\u\e ]] ++++ echo some-name-proxysql ++++ return +++ local cluster_proxy=some-name-proxysql +++ echo proxysql ++ kubectl_bin get pxc some-name -o 'jsonpath={.status.proxysql.ready}' +++ mktemp ++ local LAST_OUT=/tmp/tmp.Lx994loaNN +++ mktemp ++ local LAST_ERR=/tmp/tmp.nNKG1ojI8T ++ local exit_status=0 +++ seq 0 2 ++ for i in '$(seq 0 2)' ++ set +e ++ kubectl get pxc some-name -o 'jsonpath={.status.proxysql.ready}' ++ exit_status=0 ++ set -e ++ '[' 0 '!=' 0 ']' ++ break ++ cat /tmp/tmp.Lx994loaNN ++ cat /tmp/tmp.nNKG1ojI8T ++ rm /tmp/tmp.Lx994loaNN /tmp/tmp.nNKG1ojI8T ++ return 0 + [[ 2 == \2 ]] + desc 'test new operator' + set +o xtrace ----------------------------------------------------------------------------------- test new operator ----------------------------------------------------------------------------------- + newpass=test-password2 ++ echo -n test-password2 ++ base64 + newpassencrypted=dGVzdC1wYXNzd29yZDI= + patch_secret my-cluster-secrets-2 operator dGVzdC1wYXNzd29yZDI= + local secret=my-cluster-secrets-2 + local key=operator + local value=dGVzdC1wYXNzd29yZDI= + kubectl_bin patch secret my-cluster-secrets-2 '-p={"data":{"operator": "dGVzdC1wYXNzd29yZDI="}}' ++ mktemp + local LAST_OUT=/tmp/tmp.2BBTJyPlvO ++ mktemp + local LAST_ERR=/tmp/tmp.5MLKQ5K04a + local exit_status=0 ++ seq 0 2 + for i in '$(seq 0 2)' + set +e + kubectl patch secret my-cluster-secrets-2 '-p={"data":{"operator": "dGVzdC1wYXNzd29yZDI="}}' + exit_status=0 + set -e + '[' 0 '!=' 0 ']' + break + cat /tmp/tmp.2BBTJyPlvO secret/my-cluster-secrets-2 patched + cat /tmp/tmp.5MLKQ5K04a + rm /tmp/tmp.2BBTJyPlvO /tmp/tmp.5MLKQ5K04a + return 0 + sleep 15 + wait_cluster_consistency some-name 3 2 + local cluster_name=some-name + local cluster_size=3 + local proxy_size=2 + '[' -z 2 ']' + desc 'wait cluster consistency' + set +o xtrace ----------------------------------------------------------------------------------- wait cluster consistency ----------------------------------------------------------------------------------- + local i=0 + local max=36 + sleep 7 ++ kubectl_bin get pxc some-name -o 'jsonpath={.status.state}' +++ mktemp ++ local LAST_OUT=/tmp/tmp.hIMnasLFCt +++ mktemp ++ local LAST_ERR=/tmp/tmp.yLrKYQzxtp ++ local exit_status=0 +++ seq 0 2 ++ for i in '$(seq 0 2)' ++ set +e ++ kubectl get pxc some-name -o 'jsonpath={.status.state}' ++ exit_status=0 ++ set -e ++ '[' 0 '!=' 0 ']' ++ break ++ cat /tmp/tmp.hIMnasLFCt ++ cat /tmp/tmp.yLrKYQzxtp ++ rm /tmp/tmp.hIMnasLFCt /tmp/tmp.yLrKYQzxtp ++ return 0 + [[ initializing == \r\e\a\d\y ]] + echo 'waiting for cluster readyness' waiting for cluster readyness + sleep 20 + [[ 0 -ge 36 ]] + let i+=1 ++ kubectl_bin get pxc some-name -o 'jsonpath={.status.state}' +++ mktemp ++ local LAST_OUT=/tmp/tmp.ftbsKhF93t +++ mktemp ++ local LAST_ERR=/tmp/tmp.85cyJlPdeu ++ local exit_status=0 +++ seq 0 2 ++ for i in '$(seq 0 2)' ++ set +e ++ kubectl get pxc some-name -o 'jsonpath={.status.state}' ++ exit_status=0 ++ set -e ++ '[' 0 '!=' 0 ']' ++ break ++ cat /tmp/tmp.ftbsKhF93t ++ cat /tmp/tmp.85cyJlPdeu ++ rm /tmp/tmp.ftbsKhF93t /tmp/tmp.85cyJlPdeu ++ return 0 + [[ ready == \r\e\a\d\y ]] ++ kubectl_bin get pxc some-name -o 'jsonpath={.status.pxc.ready}' +++ mktemp ++ local LAST_OUT=/tmp/tmp.xDo0avfSzP +++ mktemp ++ local LAST_ERR=/tmp/tmp.evDJnQRYvX ++ local exit_status=0 +++ seq 0 2 ++ for i in '$(seq 0 2)' ++ set +e ++ kubectl get pxc some-name -o 'jsonpath={.status.pxc.ready}' ++ exit_status=0 ++ set -e ++ '[' 0 '!=' 0 ']' ++ break ++ cat /tmp/tmp.xDo0avfSzP ++ cat /tmp/tmp.evDJnQRYvX ++ rm /tmp/tmp.xDo0avfSzP /tmp/tmp.evDJnQRYvX ++ return 0 + [[ 3 == \3 ]] +++ get_proxy_engine some-name +++ local cluster_name=some-name ++++ get_proxy some-name ++++ local target_cluster=some-name +++++ kubectl_bin get pxc some-name -o 'jsonpath={.spec.haproxy.enabled}' ++++++ mktemp +++++ local LAST_OUT=/tmp/tmp.1Ox6cyYZkF ++++++ mktemp +++++ local LAST_ERR=/tmp/tmp.PnGF4kx2du +++++ local exit_status=0 ++++++ seq 0 2 +++++ for i in '$(seq 0 2)' +++++ set +e +++++ kubectl get pxc some-name -o 'jsonpath={.spec.haproxy.enabled}' +++++ exit_status=0 +++++ set -e +++++ '[' 0 '!=' 0 ']' +++++ break +++++ cat /tmp/tmp.1Ox6cyYZkF +++++ cat /tmp/tmp.PnGF4kx2du +++++ rm /tmp/tmp.1Ox6cyYZkF /tmp/tmp.PnGF4kx2du +++++ return 0 ++++ [[ '' == \t\r\u\e ]] +++++ kubectl_bin get pxc some-name -o 'jsonpath={.spec.proxysql.enabled}' ++++++ mktemp +++++ local LAST_OUT=/tmp/tmp.88Pqm7pNSc ++++++ mktemp +++++ local LAST_ERR=/tmp/tmp.oG8IwyT1xW +++++ local exit_status=0 ++++++ seq 0 2 +++++ for i in '$(seq 0 2)' +++++ set +e +++++ kubectl get pxc some-name -o 'jsonpath={.spec.proxysql.enabled}' +++++ exit_status=0 +++++ set -e +++++ '[' 0 '!=' 0 ']' +++++ break +++++ cat /tmp/tmp.88Pqm7pNSc +++++ cat /tmp/tmp.oG8IwyT1xW +++++ rm /tmp/tmp.88Pqm7pNSc /tmp/tmp.oG8IwyT1xW +++++ return 0 ++++ [[ true == \t\r\u\e ]] ++++ echo some-name-proxysql ++++ return +++ local cluster_proxy=some-name-proxysql +++ echo proxysql ++ kubectl_bin get pxc some-name -o 'jsonpath={.status.proxysql.ready}' +++ mktemp ++ local LAST_OUT=/tmp/tmp.X7t3qkeCdS +++ mktemp ++ local LAST_ERR=/tmp/tmp.GTq8NsiWm2 ++ local exit_status=0 +++ seq 0 2 ++ for i in '$(seq 0 2)' ++ set +e ++ kubectl get pxc some-name -o 'jsonpath={.status.proxysql.ready}' ++ exit_status=0 ++ set -e ++ '[' 0 '!=' 0 ']' ++ break ++ cat /tmp/tmp.X7t3qkeCdS ++ cat /tmp/tmp.GTq8NsiWm2 ++ rm /tmp/tmp.X7t3qkeCdS /tmp/tmp.GTq8NsiWm2 ++ return 0 + [[ 2 == \2 ]] + sleep 20 + compare_mysql_cmd select-4 'SHOW TABLES;' '-h some-name-proxysql -uoperator -p'\''test-password2'\''' + local command_id=select-4 + local 'command=SHOW TABLES;' + local 'uri=-h some-name-proxysql -uoperator -p'\''test-password2'\''' + local postfix= + local expected_result=/mnt/jenkins/workspace/cloud-pxc-operator_PR-1774/e2e-tests/users/compare/select-4.sql + [[ perconalab/percona-xtradb-cluster-operator:main-pxc8.0 =~ 8\.0 ]] + '[' -f /mnt/jenkins/workspace/cloud-pxc-operator_PR-1774/e2e-tests/users/compare/select-4-80.sql ']' + expected_result=/mnt/jenkins/workspace/cloud-pxc-operator_PR-1774/e2e-tests/users/compare/select-4-80.sql + run_mysql 'SHOW TABLES;' '-h some-name-proxysql -uoperator -p'\''test-password2'\''' + local 'command=SHOW TABLES;' + local 'uri=-h some-name-proxysql -uoperator -p'\''test-password2'\''' ++ get_client_pod ++ kubectl_bin get pods --selector=name=pxc-client -o 'jsonpath={.items[].metadata.name}' +++ mktemp ++ local LAST_OUT=/tmp/tmp.lJGXJ8PM1M +++ mktemp ++ local LAST_ERR=/tmp/tmp.lIcy0JoigL ++ local exit_status=0 +++ seq 0 2 ++ for i in '$(seq 0 2)' ++ set +e ++ kubectl get pods --selector=name=pxc-client -o 'jsonpath={.items[].metadata.name}' ++ exit_status=0 ++ set -e ++ '[' 0 '!=' 0 ']' ++ break ++ cat /tmp/tmp.lJGXJ8PM1M ++ cat /tmp/tmp.lIcy0JoigL ++ rm /tmp/tmp.lJGXJ8PM1M /tmp/tmp.lIcy0JoigL ++ return 0 + client_pod=pxc-client-65c795cbdf-g466m + wait_pod pxc-client-65c795cbdf-g466m + local pod=pxc-client-65c795cbdf-g466m + local max_retry=480 + local ns= ++ echo pxc-client-65c795cbdf-g466m ++ /usr/bin/sed -E 's/.*-(pxc|proxysql)-[0-9]/\1/' ++ egrep '^(pxc|proxysql)$' + local container= + set +o xtrace pod/pxc-client-65c795cbdf-g466m condition met pxc-client-65c795cbdf-g466m.Ok + set +o xtrace + '[' '!' -s /tmp/tmp.jmGBhW8nFf/select-4.sql ']' + diff -u /mnt/jenkins/workspace/cloud-pxc-operator_PR-1774/e2e-tests/users/compare/select-4-80.sql /tmp/tmp.jmGBhW8nFf/select-4.sql ++ getSecretData my-cluster-secrets-2 root ++ local secretName=my-cluster-secrets-2 ++ local dataKey=root ++ kubectl_bin get secrets/my-cluster-secrets-2 '--template={{.data.root}}' ++ base64 --decode +++ mktemp ++ local LAST_OUT=/tmp/tmp.2TrvPO4TgK +++ mktemp ++ local LAST_ERR=/tmp/tmp.GOe7dcj78d ++ local exit_status=0 +++ seq 0 2 ++ for i in '$(seq 0 2)' ++ set +e ++ kubectl get secrets/my-cluster-secrets-2 '--template={{.data.root}}' ++ exit_status=0 ++ set -e ++ '[' 0 '!=' 0 ']' ++ break ++ cat /tmp/tmp.2TrvPO4TgK ++ cat /tmp/tmp.GOe7dcj78d ++ rm /tmp/tmp.2TrvPO4TgK /tmp/tmp.GOe7dcj78d ++ return 0 + newpass='U^acqlz[9,k{PNM@Y6' + desc 'test new users sync' + set +o xtrace ----------------------------------------------------------------------------------- test new users sync ----------------------------------------------------------------------------------- + run_mysql 'CREATE USER '\''testsync'\''@'\''%'\'' IDENTIFIED BY '\''U^acqlz[9,k{PNM@Y6'\'';' '-h some-name-pxc -uroot -p'\''U^acqlz[9,k{PNM@Y6'\''' + local 'command=CREATE USER '\''testsync'\''@'\''%'\'' IDENTIFIED BY '\''U^acqlz[9,k{PNM@Y6'\'';' + local 'uri=-h some-name-pxc -uroot -p'\''U^acqlz[9,k{PNM@Y6'\''' ++ get_client_pod ++ kubectl_bin get pods --selector=name=pxc-client -o 'jsonpath={.items[].metadata.name}' +++ mktemp ++ local LAST_OUT=/tmp/tmp.O4zXmfVmFL +++ mktemp ++ local LAST_ERR=/tmp/tmp.gHvoahLNYb ++ local exit_status=0 +++ seq 0 2 ++ for i in '$(seq 0 2)' ++ set +e ++ kubectl get pods --selector=name=pxc-client -o 'jsonpath={.items[].metadata.name}' ++ exit_status=0 ++ set -e ++ '[' 0 '!=' 0 ']' ++ break ++ cat /tmp/tmp.O4zXmfVmFL ++ cat /tmp/tmp.gHvoahLNYb ++ rm /tmp/tmp.O4zXmfVmFL /tmp/tmp.gHvoahLNYb ++ return 0 + client_pod=pxc-client-65c795cbdf-g466m + wait_pod pxc-client-65c795cbdf-g466m + local pod=pxc-client-65c795cbdf-g466m + local max_retry=480 + local ns= ++ echo pxc-client-65c795cbdf-g466m ++ /usr/bin/sed -E 's/.*-(pxc|proxysql)-[0-9]/\1/' ++ egrep '^(pxc|proxysql)$' + local container= + set +o xtrace pod/pxc-client-65c795cbdf-g466m condition met pxc-client-65c795cbdf-g466m.Ok + set +o xtrace + sleep 40 + compare_mysql_cmd select-4 'SHOW TABLES;' '-h some-name-proxysql -utestsync -p'\''U^acqlz[9,k{PNM@Y6'\''' + local command_id=select-4 + local 'command=SHOW TABLES;' + local 'uri=-h some-name-proxysql -utestsync -p'\''U^acqlz[9,k{PNM@Y6'\''' + local postfix= + local expected_result=/mnt/jenkins/workspace/cloud-pxc-operator_PR-1774/e2e-tests/users/compare/select-4.sql + [[ perconalab/percona-xtradb-cluster-operator:main-pxc8.0 =~ 8\.0 ]] + '[' -f /mnt/jenkins/workspace/cloud-pxc-operator_PR-1774/e2e-tests/users/compare/select-4-80.sql ']' + expected_result=/mnt/jenkins/workspace/cloud-pxc-operator_PR-1774/e2e-tests/users/compare/select-4-80.sql + run_mysql 'SHOW TABLES;' '-h some-name-proxysql -utestsync -p'\''U^acqlz[9,k{PNM@Y6'\''' + local 'command=SHOW TABLES;' + local 'uri=-h some-name-proxysql -utestsync -p'\''U^acqlz[9,k{PNM@Y6'\''' ++ get_client_pod ++ kubectl_bin get pods --selector=name=pxc-client -o 'jsonpath={.items[].metadata.name}' +++ mktemp ++ local LAST_OUT=/tmp/tmp.tZ4NZofT2q +++ mktemp ++ local LAST_ERR=/tmp/tmp.zXjEsJJZbE ++ local exit_status=0 +++ seq 0 2 ++ for i in '$(seq 0 2)' ++ set +e ++ kubectl get pods --selector=name=pxc-client -o 'jsonpath={.items[].metadata.name}' ++ exit_status=0 ++ set -e ++ '[' 0 '!=' 0 ']' ++ break ++ cat /tmp/tmp.tZ4NZofT2q ++ cat /tmp/tmp.zXjEsJJZbE ++ rm /tmp/tmp.tZ4NZofT2q /tmp/tmp.zXjEsJJZbE ++ return 0 + client_pod=pxc-client-65c795cbdf-g466m + wait_pod pxc-client-65c795cbdf-g466m + local pod=pxc-client-65c795cbdf-g466m + local max_retry=480 + local ns= ++ echo pxc-client-65c795cbdf-g466m ++ /usr/bin/sed -E 's/.*-(pxc|proxysql)-[0-9]/\1/' ++ egrep '^(pxc|proxysql)$' + local container= + set +o xtrace pod/pxc-client-65c795cbdf-g466m condition met pxc-client-65c795cbdf-g466m.Ok + set +o xtrace + '[' '!' -s /tmp/tmp.jmGBhW8nFf/select-4.sql ']' + diff -u /mnt/jenkins/workspace/cloud-pxc-operator_PR-1774/e2e-tests/users/compare/select-4-80.sql /tmp/tmp.jmGBhW8nFf/select-4.sql ++ getSecretData internal-some-name operator ++ local secretName=internal-some-name ++ local dataKey=operator ++ kubectl_bin get secrets/internal-some-name '--template={{.data.operator}}' ++ base64 --decode +++ mktemp ++ local LAST_OUT=/tmp/tmp.Df4M7cV6hM +++ mktemp ++ local LAST_ERR=/tmp/tmp.j1TskpLLAN ++ local exit_status=0 +++ seq 0 2 ++ for i in '$(seq 0 2)' ++ set +e ++ kubectl get secrets/internal-some-name '--template={{.data.operator}}' ++ exit_status=0 ++ set -e ++ '[' 0 '!=' 0 ']' ++ break ++ cat /tmp/tmp.Df4M7cV6hM ++ cat /tmp/tmp.j1TskpLLAN ++ rm /tmp/tmp.Df4M7cV6hM /tmp/tmp.j1TskpLLAN ++ return 0 + pass=test-password2 + desc 'check secret without operator' + set +o xtrace ----------------------------------------------------------------------------------- check secret without operator ----------------------------------------------------------------------------------- + kubectl_bin apply -f /mnt/jenkins/workspace/cloud-pxc-operator_PR-1774/e2e-tests/users/conf/secrets.yml ++ mktemp + local LAST_OUT=/tmp/tmp.VuwXd3mWhV ++ mktemp + local LAST_ERR=/tmp/tmp.KfevJPVXqq + local exit_status=0 ++ seq 0 2 + for i in '$(seq 0 2)' + set +e + kubectl apply -f /mnt/jenkins/workspace/cloud-pxc-operator_PR-1774/e2e-tests/users/conf/secrets.yml + exit_status=0 + set -e + '[' 0 '!=' 0 ']' + break + cat /tmp/tmp.VuwXd3mWhV secret/my-cluster-secrets-2 configured + cat /tmp/tmp.KfevJPVXqq Warning: resource secrets/my-cluster-secrets-2 is missing the kubectl.kubernetes.io/last-applied-configuration annotation which is required by kubectl apply. kubectl apply should only be used on resources created declaratively by either kubectl create --save-config or kubectl apply. The missing annotation will be patched automatically. + rm /tmp/tmp.VuwXd3mWhV /tmp/tmp.KfevJPVXqq + return 0 + sleep 15 + compare_mysql_cmd select-4 'SHOW TABLES;' '-h some-name-proxysql -uoperator -p'\''test-password2'\''' + local command_id=select-4 + local 'command=SHOW TABLES;' + local 'uri=-h some-name-proxysql -uoperator -p'\''test-password2'\''' + local postfix= + local expected_result=/mnt/jenkins/workspace/cloud-pxc-operator_PR-1774/e2e-tests/users/compare/select-4.sql + [[ perconalab/percona-xtradb-cluster-operator:main-pxc8.0 =~ 8\.0 ]] + '[' -f /mnt/jenkins/workspace/cloud-pxc-operator_PR-1774/e2e-tests/users/compare/select-4-80.sql ']' + expected_result=/mnt/jenkins/workspace/cloud-pxc-operator_PR-1774/e2e-tests/users/compare/select-4-80.sql + run_mysql 'SHOW TABLES;' '-h some-name-proxysql -uoperator -p'\''test-password2'\''' + local 'command=SHOW TABLES;' + local 'uri=-h some-name-proxysql -uoperator -p'\''test-password2'\''' ++ get_client_pod ++ kubectl_bin get pods --selector=name=pxc-client -o 'jsonpath={.items[].metadata.name}' +++ mktemp ++ local LAST_OUT=/tmp/tmp.4HUSTOgIF0 +++ mktemp ++ local LAST_ERR=/tmp/tmp.WKeUgB7vWA ++ local exit_status=0 +++ seq 0 2 ++ for i in '$(seq 0 2)' ++ set +e ++ kubectl get pods --selector=name=pxc-client -o 'jsonpath={.items[].metadata.name}' ++ exit_status=0 ++ set -e ++ '[' 0 '!=' 0 ']' ++ break ++ cat /tmp/tmp.4HUSTOgIF0 ++ cat /tmp/tmp.WKeUgB7vWA ++ rm /tmp/tmp.4HUSTOgIF0 /tmp/tmp.WKeUgB7vWA ++ return 0 + client_pod=pxc-client-65c795cbdf-g466m + wait_pod pxc-client-65c795cbdf-g466m + local pod=pxc-client-65c795cbdf-g466m + local max_retry=480 + local ns= ++ echo pxc-client-65c795cbdf-g466m ++ /usr/bin/sed -E 's/.*-(pxc|proxysql)-[0-9]/\1/' ++ egrep '^(pxc|proxysql)$' + local container= + set +o xtrace pod/pxc-client-65c795cbdf-g466m condition met pxc-client-65c795cbdf-g466m.Ok + set +o xtrace + '[' '!' -s /tmp/tmp.jmGBhW8nFf/select-4.sql ']' + diff -u /mnt/jenkins/workspace/cloud-pxc-operator_PR-1774/e2e-tests/users/compare/select-4-80.sql /tmp/tmp.jmGBhW8nFf/select-4.sql + newpass=test-password2 ++ echo -n test-password2 ++ base64 + newpassencrypted=dGVzdC1wYXNzd29yZDI= + apply_config /mnt/jenkins/workspace/cloud-pxc-operator_PR-1774/e2e-tests/users/conf/some-name.yml + '[' -z '' ']' + kubectl_bin apply -f - + cat_config /mnt/jenkins/workspace/cloud-pxc-operator_PR-1774/e2e-tests/users/conf/some-name.yml ++ mktemp + /usr/bin/sed -e 's#apiVersion: pxc.percona.com/v.*$#apiVersion: pxc.percona.com/v1#' + cat /mnt/jenkins/workspace/cloud-pxc-operator_PR-1774/e2e-tests/users/conf/some-name.yml + /usr/bin/sed -e 's#image:.*-pxc\([0-9]*.[0-9]*\)\{0,1\}$#image: perconalab/percona-xtradb-cluster-operator:main-pxc8.0#' + local LAST_OUT=/tmp/tmp.73g4EaEHjm + /usr/bin/sed -e 's#image:.*-pmm$#image: perconalab/pmm-client:dev-latest#' + /usr/bin/sed -e 's#image:.*-backup$#image: perconalab/percona-xtradb-cluster-operator:main-pxc8.0-backup#' ++ mktemp + /usr/bin/sed -e 's#image:.*-proxysql$#image: perconalab/percona-xtradb-cluster-operator:main-proxysql#' + /usr/bin/sed -e 's#image:.*\/percona-xtradb-cluster:.*$#image: perconalab/percona-xtradb-cluster-operator:main-pxc8.0#' + local LAST_ERR=/tmp/tmp.j43yOo4nZx + local exit_status=0 ++ seq 0 2 + /usr/bin/sed -e 's#image:.*-haproxy$#image: perconalab/percona-xtradb-cluster-operator:main-haproxy#' + for i in '$(seq 0 2)' + set +e + /usr/bin/sed -e 's#image:.*-init$#image: perconalab/percona-xtradb-cluster-operator:PR-1774-70b9684b#' + kubectl apply -f - + /usr/bin/sed -e 's#apply:.*#apply: Never#' + /usr/bin/sed -e 's#image:.*-logcollector$#image: perconalab/percona-xtradb-cluster-operator:main-logcollector#' + /usr/bin/sed -e s~minio-service.#namespace~minio-service.users-9717~ + exit_status=0 + set -e + '[' 0 '!=' 0 ']' + break + cat /tmp/tmp.73g4EaEHjm perconaxtradbcluster.pxc.percona.com/some-name configured + cat /tmp/tmp.j43yOo4nZx + rm /tmp/tmp.73g4EaEHjm /tmp/tmp.j43yOo4nZx + return 0 + sleep 15 + wait_cluster_consistency some-name 3 3 + local cluster_name=some-name + local cluster_size=3 + local proxy_size=3 + '[' -z 3 ']' + desc 'wait cluster consistency' + set +o xtrace ----------------------------------------------------------------------------------- wait cluster consistency ----------------------------------------------------------------------------------- + local i=0 + local max=36 + sleep 7 ++ kubectl_bin get pxc some-name -o 'jsonpath={.status.state}' +++ mktemp ++ local LAST_OUT=/tmp/tmp.IaQt7NLhHu +++ mktemp ++ local LAST_ERR=/tmp/tmp.YoNSDh69EB ++ local exit_status=0 +++ seq 0 2 ++ for i in '$(seq 0 2)' ++ set +e ++ kubectl get pxc some-name -o 'jsonpath={.status.state}' ++ exit_status=0 ++ set -e ++ '[' 0 '!=' 0 ']' ++ break ++ cat /tmp/tmp.IaQt7NLhHu ++ cat /tmp/tmp.YoNSDh69EB ++ rm /tmp/tmp.IaQt7NLhHu /tmp/tmp.YoNSDh69EB ++ return 0 + [[ initializing == \r\e\a\d\y ]] + echo 'waiting for cluster readyness' waiting for cluster readyness + sleep 20 + [[ 0 -ge 36 ]] + let i+=1 ++ kubectl_bin get pxc some-name -o 'jsonpath={.status.state}' +++ mktemp ++ local LAST_OUT=/tmp/tmp.ez6HQ952Ad +++ mktemp ++ local LAST_ERR=/tmp/tmp.i59UcevACi ++ local exit_status=0 +++ seq 0 2 ++ for i in '$(seq 0 2)' ++ set +e ++ kubectl get pxc some-name -o 'jsonpath={.status.state}' ++ exit_status=0 ++ set -e ++ '[' 0 '!=' 0 ']' ++ break ++ cat /tmp/tmp.ez6HQ952Ad ++ cat /tmp/tmp.i59UcevACi ++ rm /tmp/tmp.ez6HQ952Ad /tmp/tmp.i59UcevACi ++ return 0 + [[ initializing == \r\e\a\d\y ]] + echo 'waiting for cluster readyness' waiting for cluster readyness + sleep 20 + [[ 1 -ge 36 ]] + let i+=1 ++ kubectl_bin get pxc some-name -o 'jsonpath={.status.state}' +++ mktemp ++ local LAST_OUT=/tmp/tmp.fflYkKanTQ +++ mktemp ++ local LAST_ERR=/tmp/tmp.2aOZuIAAas ++ local exit_status=0 +++ seq 0 2 ++ for i in '$(seq 0 2)' ++ set +e ++ kubectl get pxc some-name -o 'jsonpath={.status.state}' ++ exit_status=0 ++ set -e ++ '[' 0 '!=' 0 ']' ++ break ++ cat /tmp/tmp.fflYkKanTQ ++ cat /tmp/tmp.2aOZuIAAas ++ rm /tmp/tmp.fflYkKanTQ /tmp/tmp.2aOZuIAAas ++ return 0 + [[ initializing == \r\e\a\d\y ]] + echo 'waiting for cluster readyness' waiting for cluster readyness + sleep 20 + [[ 2 -ge 36 ]] + let i+=1 ++ kubectl_bin get pxc some-name -o 'jsonpath={.status.state}' +++ mktemp ++ local LAST_OUT=/tmp/tmp.AymDCAGACs +++ mktemp ++ local LAST_ERR=/tmp/tmp.51fcwtyS0L ++ local exit_status=0 +++ seq 0 2 ++ for i in '$(seq 0 2)' ++ set +e ++ kubectl get pxc some-name -o 'jsonpath={.status.state}' ++ exit_status=0 ++ set -e ++ '[' 0 '!=' 0 ']' ++ break ++ cat /tmp/tmp.AymDCAGACs ++ cat /tmp/tmp.51fcwtyS0L ++ rm /tmp/tmp.AymDCAGACs /tmp/tmp.51fcwtyS0L ++ return 0 + [[ initializing == \r\e\a\d\y ]] + echo 'waiting for cluster readyness' waiting for cluster readyness + sleep 20 + [[ 3 -ge 36 ]] + let i+=1 ++ kubectl_bin get pxc some-name -o 'jsonpath={.status.state}' +++ mktemp ++ local LAST_OUT=/tmp/tmp.dsFRYhdGg3 +++ mktemp ++ local LAST_ERR=/tmp/tmp.NOI9TmqK3Q ++ local exit_status=0 +++ seq 0 2 ++ for i in '$(seq 0 2)' ++ set +e ++ kubectl get pxc some-name -o 'jsonpath={.status.state}' ++ exit_status=0 ++ set -e ++ '[' 0 '!=' 0 ']' ++ break ++ cat /tmp/tmp.dsFRYhdGg3 ++ cat /tmp/tmp.NOI9TmqK3Q ++ rm /tmp/tmp.dsFRYhdGg3 /tmp/tmp.NOI9TmqK3Q ++ return 0 + [[ initializing == \r\e\a\d\y ]] + echo 'waiting for cluster readyness' waiting for cluster readyness + sleep 20 + [[ 4 -ge 36 ]] + let i+=1 ++ kubectl_bin get pxc some-name -o 'jsonpath={.status.state}' +++ mktemp ++ local LAST_OUT=/tmp/tmp.kkzLS46cbE +++ mktemp ++ local LAST_ERR=/tmp/tmp.nH5NUkttgc ++ local exit_status=0 +++ seq 0 2 ++ for i in '$(seq 0 2)' ++ set +e ++ kubectl get pxc some-name -o 'jsonpath={.status.state}' ++ exit_status=0 ++ set -e ++ '[' 0 '!=' 0 ']' ++ break ++ cat /tmp/tmp.kkzLS46cbE ++ cat /tmp/tmp.nH5NUkttgc ++ rm /tmp/tmp.kkzLS46cbE /tmp/tmp.nH5NUkttgc ++ return 0 + [[ initializing == \r\e\a\d\y ]] + echo 'waiting for cluster readyness' waiting for cluster readyness + sleep 20 + [[ 5 -ge 36 ]] + let i+=1 ++ kubectl_bin get pxc some-name -o 'jsonpath={.status.state}' +++ mktemp ++ local LAST_OUT=/tmp/tmp.OD2KgOOBfV +++ mktemp ++ local LAST_ERR=/tmp/tmp.pm5DgE4piX ++ local exit_status=0 +++ seq 0 2 ++ for i in '$(seq 0 2)' ++ set +e ++ kubectl get pxc some-name -o 'jsonpath={.status.state}' ++ exit_status=0 ++ set -e ++ '[' 0 '!=' 0 ']' ++ break ++ cat /tmp/tmp.OD2KgOOBfV ++ cat /tmp/tmp.pm5DgE4piX ++ rm /tmp/tmp.OD2KgOOBfV /tmp/tmp.pm5DgE4piX ++ return 0 + [[ initializing == \r\e\a\d\y ]] + echo 'waiting for cluster readyness' waiting for cluster readyness + sleep 20 + [[ 6 -ge 36 ]] + let i+=1 ++ kubectl_bin get pxc some-name -o 'jsonpath={.status.state}' +++ mktemp ++ local LAST_OUT=/tmp/tmp.dwRPR0ryRg +++ mktemp ++ local LAST_ERR=/tmp/tmp.5DMOFqg8wk ++ local exit_status=0 +++ seq 0 2 ++ for i in '$(seq 0 2)' ++ set +e ++ kubectl get pxc some-name -o 'jsonpath={.status.state}' ++ exit_status=0 ++ set -e ++ '[' 0 '!=' 0 ']' ++ break ++ cat /tmp/tmp.dwRPR0ryRg ++ cat /tmp/tmp.5DMOFqg8wk ++ rm /tmp/tmp.dwRPR0ryRg /tmp/tmp.5DMOFqg8wk ++ return 0 + [[ initializing == \r\e\a\d\y ]] + echo 'waiting for cluster readyness' waiting for cluster readyness + sleep 20 + [[ 7 -ge 36 ]] + let i+=1 ++ kubectl_bin get pxc some-name -o 'jsonpath={.status.state}' +++ mktemp ++ local LAST_OUT=/tmp/tmp.ojuyrJbiLB +++ mktemp ++ local LAST_ERR=/tmp/tmp.D81Uv6pyxr ++ local exit_status=0 +++ seq 0 2 ++ for i in '$(seq 0 2)' ++ set +e ++ kubectl get pxc some-name -o 'jsonpath={.status.state}' ++ exit_status=0 ++ set -e ++ '[' 0 '!=' 0 ']' ++ break ++ cat /tmp/tmp.ojuyrJbiLB ++ cat /tmp/tmp.D81Uv6pyxr ++ rm /tmp/tmp.ojuyrJbiLB /tmp/tmp.D81Uv6pyxr ++ return 0 + [[ initializing == \r\e\a\d\y ]] + echo 'waiting for cluster readyness' waiting for cluster readyness + sleep 20 + [[ 8 -ge 36 ]] + let i+=1 ++ kubectl_bin get pxc some-name -o 'jsonpath={.status.state}' +++ mktemp ++ local LAST_OUT=/tmp/tmp.RsNvvrPAMa +++ mktemp ++ local LAST_ERR=/tmp/tmp.6R8M2XzZnZ ++ local exit_status=0 +++ seq 0 2 ++ for i in '$(seq 0 2)' ++ set +e ++ kubectl get pxc some-name -o 'jsonpath={.status.state}' ++ exit_status=0 ++ set -e ++ '[' 0 '!=' 0 ']' ++ break ++ cat /tmp/tmp.RsNvvrPAMa ++ cat /tmp/tmp.6R8M2XzZnZ ++ rm /tmp/tmp.RsNvvrPAMa /tmp/tmp.6R8M2XzZnZ ++ return 0 + [[ initializing == \r\e\a\d\y ]] + echo 'waiting for cluster readyness' waiting for cluster readyness + sleep 20 + [[ 9 -ge 36 ]] + let i+=1 ++ kubectl_bin get pxc some-name -o 'jsonpath={.status.state}' +++ mktemp ++ local LAST_OUT=/tmp/tmp.rY65qykxqj +++ mktemp ++ local LAST_ERR=/tmp/tmp.piF3BU3VRT ++ local exit_status=0 +++ seq 0 2 ++ for i in '$(seq 0 2)' ++ set +e ++ kubectl get pxc some-name -o 'jsonpath={.status.state}' ++ exit_status=0 ++ set -e ++ '[' 0 '!=' 0 ']' ++ break ++ cat /tmp/tmp.rY65qykxqj ++ cat /tmp/tmp.piF3BU3VRT ++ rm /tmp/tmp.rY65qykxqj /tmp/tmp.piF3BU3VRT ++ return 0 + [[ initializing == \r\e\a\d\y ]] + echo 'waiting for cluster readyness' waiting for cluster readyness + sleep 20 + [[ 10 -ge 36 ]] + let i+=1 ++ kubectl_bin get pxc some-name -o 'jsonpath={.status.state}' +++ mktemp ++ local LAST_OUT=/tmp/tmp.epWULhK5DG +++ mktemp ++ local LAST_ERR=/tmp/tmp.4Y0GTv13Tb ++ local exit_status=0 +++ seq 0 2 ++ for i in '$(seq 0 2)' ++ set +e ++ kubectl get pxc some-name -o 'jsonpath={.status.state}' ++ exit_status=0 ++ set -e ++ '[' 0 '!=' 0 ']' ++ break ++ cat /tmp/tmp.epWULhK5DG ++ cat /tmp/tmp.4Y0GTv13Tb ++ rm /tmp/tmp.epWULhK5DG /tmp/tmp.4Y0GTv13Tb ++ return 0 + [[ initializing == \r\e\a\d\y ]] + echo 'waiting for cluster readyness' waiting for cluster readyness + sleep 20 + [[ 11 -ge 36 ]] + let i+=1 ++ kubectl_bin get pxc some-name -o 'jsonpath={.status.state}' +++ mktemp ++ local LAST_OUT=/tmp/tmp.wXEIKdUqBw +++ mktemp ++ local LAST_ERR=/tmp/tmp.yzgGEn8lyS ++ local exit_status=0 +++ seq 0 2 ++ for i in '$(seq 0 2)' ++ set +e ++ kubectl get pxc some-name -o 'jsonpath={.status.state}' ++ exit_status=0 ++ set -e ++ '[' 0 '!=' 0 ']' ++ break ++ cat /tmp/tmp.wXEIKdUqBw ++ cat /tmp/tmp.yzgGEn8lyS ++ rm /tmp/tmp.wXEIKdUqBw /tmp/tmp.yzgGEn8lyS ++ return 0 + [[ initializing == \r\e\a\d\y ]] + echo 'waiting for cluster readyness' waiting for cluster readyness + sleep 20 + [[ 12 -ge 36 ]] + let i+=1 ++ kubectl_bin get pxc some-name -o 'jsonpath={.status.state}' +++ mktemp ++ local LAST_OUT=/tmp/tmp.ueXyZG730Y +++ mktemp ++ local LAST_ERR=/tmp/tmp.U1pJgU4Hqs ++ local exit_status=0 +++ seq 0 2 ++ for i in '$(seq 0 2)' ++ set +e ++ kubectl get pxc some-name -o 'jsonpath={.status.state}' ++ exit_status=0 ++ set -e ++ '[' 0 '!=' 0 ']' ++ break ++ cat /tmp/tmp.ueXyZG730Y ++ cat /tmp/tmp.U1pJgU4Hqs ++ rm /tmp/tmp.ueXyZG730Y /tmp/tmp.U1pJgU4Hqs ++ return 0 + [[ ready == \r\e\a\d\y ]] ++ kubectl_bin get pxc some-name -o 'jsonpath={.status.pxc.ready}' +++ mktemp ++ local LAST_OUT=/tmp/tmp.usend5SBle +++ mktemp ++ local LAST_ERR=/tmp/tmp.tLqJIK8Umw ++ local exit_status=0 +++ seq 0 2 ++ for i in '$(seq 0 2)' ++ set +e ++ kubectl get pxc some-name -o 'jsonpath={.status.pxc.ready}' ++ exit_status=0 ++ set -e ++ '[' 0 '!=' 0 ']' ++ break ++ cat /tmp/tmp.usend5SBle ++ cat /tmp/tmp.tLqJIK8Umw ++ rm /tmp/tmp.usend5SBle /tmp/tmp.tLqJIK8Umw ++ return 0 + [[ 3 == \3 ]] +++ get_proxy_engine some-name +++ local cluster_name=some-name ++++ get_proxy some-name ++++ local target_cluster=some-name +++++ kubectl_bin get pxc some-name -o 'jsonpath={.spec.haproxy.enabled}' ++++++ mktemp +++++ local LAST_OUT=/tmp/tmp.ny3Rr0JJYs ++++++ mktemp +++++ local LAST_ERR=/tmp/tmp.MjJGd2dTSc +++++ local exit_status=0 ++++++ seq 0 2 +++++ for i in '$(seq 0 2)' +++++ set +e +++++ kubectl get pxc some-name -o 'jsonpath={.spec.haproxy.enabled}' +++++ exit_status=0 +++++ set -e +++++ '[' 0 '!=' 0 ']' +++++ break +++++ cat /tmp/tmp.ny3Rr0JJYs +++++ cat /tmp/tmp.MjJGd2dTSc +++++ rm /tmp/tmp.ny3Rr0JJYs /tmp/tmp.MjJGd2dTSc +++++ return 0 ++++ [[ true == \t\r\u\e ]] ++++ echo some-name-haproxy ++++ return +++ local cluster_proxy=some-name-haproxy +++ echo haproxy ++ kubectl_bin get pxc some-name -o 'jsonpath={.status.haproxy.ready}' +++ mktemp ++ local LAST_OUT=/tmp/tmp.kPHtOnVD8N +++ mktemp ++ local LAST_ERR=/tmp/tmp.MQDb1iCcqM ++ local exit_status=0 +++ seq 0 2 ++ for i in '$(seq 0 2)' ++ set +e ++ kubectl get pxc some-name -o 'jsonpath={.status.haproxy.ready}' ++ exit_status=0 ++ set -e ++ '[' 0 '!=' 0 ']' ++ break ++ cat /tmp/tmp.kPHtOnVD8N ++ cat /tmp/tmp.MQDb1iCcqM ++ rm /tmp/tmp.kPHtOnVD8N /tmp/tmp.MQDb1iCcqM ++ return 0 + [[ 3 == \3 ]] + [[ perconalab/percona-xtradb-cluster-operator:main-pxc8.0 =~ 5\.7 ]] + check_generation 1 haproxy some-name + local generation=1 + local container=haproxy + local cluster=some-name + local current_generation ++ kubectl_bin get statefulset some-name-haproxy -o 'jsonpath={.metadata.generation}' +++ mktemp ++ local LAST_OUT=/tmp/tmp.BGnBA8LGZh +++ mktemp ++ local LAST_ERR=/tmp/tmp.6zpn7XQAlR ++ local exit_status=0 +++ seq 0 2 ++ for i in '$(seq 0 2)' ++ set +e ++ kubectl get statefulset some-name-haproxy -o 'jsonpath={.metadata.generation}' ++ exit_status=0 ++ set -e ++ '[' 0 '!=' 0 ']' ++ break ++ cat /tmp/tmp.BGnBA8LGZh ++ cat /tmp/tmp.6zpn7XQAlR ++ rm /tmp/tmp.BGnBA8LGZh /tmp/tmp.6zpn7XQAlR ++ return 0 + current_generation=1 + [[ 1 != \1 ]] + patch_secret my-cluster-secrets monitor dGVzdC1wYXNzd29yZDI= + local secret=my-cluster-secrets + local key=monitor + local value=dGVzdC1wYXNzd29yZDI= + kubectl_bin patch secret my-cluster-secrets '-p={"data":{"monitor": "dGVzdC1wYXNzd29yZDI="}}' ++ mktemp + local LAST_OUT=/tmp/tmp.SdsFL4sedL ++ mktemp + local LAST_ERR=/tmp/tmp.R5SIBXVKn5 + local exit_status=0 ++ seq 0 2 + for i in '$(seq 0 2)' + set +e + kubectl patch secret my-cluster-secrets '-p={"data":{"monitor": "dGVzdC1wYXNzd29yZDI="}}' + exit_status=0 + set -e + '[' 0 '!=' 0 ']' + break + cat /tmp/tmp.SdsFL4sedL secret/my-cluster-secrets patched + cat /tmp/tmp.R5SIBXVKn5 + rm /tmp/tmp.SdsFL4sedL /tmp/tmp.R5SIBXVKn5 + return 0 + sleep 15 + wait_cluster_consistency some-name 3 3 + local cluster_name=some-name + local cluster_size=3 + local proxy_size=3 + '[' -z 3 ']' + desc 'wait cluster consistency' + set +o xtrace ----------------------------------------------------------------------------------- wait cluster consistency ----------------------------------------------------------------------------------- + local i=0 + local max=36 + sleep 7 ++ kubectl_bin get pxc some-name -o 'jsonpath={.status.state}' +++ mktemp ++ local LAST_OUT=/tmp/tmp.GouPyfw9y1 +++ mktemp ++ local LAST_ERR=/tmp/tmp.cRHUnA2RR0 ++ local exit_status=0 +++ seq 0 2 ++ for i in '$(seq 0 2)' ++ set +e ++ kubectl get pxc some-name -o 'jsonpath={.status.state}' ++ exit_status=0 ++ set -e ++ '[' 0 '!=' 0 ']' ++ break ++ cat /tmp/tmp.GouPyfw9y1 ++ cat /tmp/tmp.cRHUnA2RR0 ++ rm /tmp/tmp.GouPyfw9y1 /tmp/tmp.cRHUnA2RR0 ++ return 0 + [[ ready == \r\e\a\d\y ]] ++ kubectl_bin get pxc some-name -o 'jsonpath={.status.pxc.ready}' +++ mktemp ++ local LAST_OUT=/tmp/tmp.eyjbBNwfh5 +++ mktemp ++ local LAST_ERR=/tmp/tmp.GnZ8pZtI0N ++ local exit_status=0 +++ seq 0 2 ++ for i in '$(seq 0 2)' ++ set +e ++ kubectl get pxc some-name -o 'jsonpath={.status.pxc.ready}' ++ exit_status=0 ++ set -e ++ '[' 0 '!=' 0 ']' ++ break ++ cat /tmp/tmp.eyjbBNwfh5 ++ cat /tmp/tmp.GnZ8pZtI0N ++ rm /tmp/tmp.eyjbBNwfh5 /tmp/tmp.GnZ8pZtI0N ++ return 0 + [[ 3 == \3 ]] +++ get_proxy_engine some-name +++ local cluster_name=some-name ++++ get_proxy some-name ++++ local target_cluster=some-name +++++ kubectl_bin get pxc some-name -o 'jsonpath={.spec.haproxy.enabled}' ++++++ mktemp +++++ local LAST_OUT=/tmp/tmp.kMBNb0rF6g ++++++ mktemp +++++ local LAST_ERR=/tmp/tmp.V6uKx5jr29 +++++ local exit_status=0 ++++++ seq 0 2 +++++ for i in '$(seq 0 2)' +++++ set +e +++++ kubectl get pxc some-name -o 'jsonpath={.spec.haproxy.enabled}' +++++ exit_status=0 +++++ set -e +++++ '[' 0 '!=' 0 ']' +++++ break +++++ cat /tmp/tmp.kMBNb0rF6g +++++ cat /tmp/tmp.V6uKx5jr29 +++++ rm /tmp/tmp.kMBNb0rF6g /tmp/tmp.V6uKx5jr29 +++++ return 0 ++++ [[ true == \t\r\u\e ]] ++++ echo some-name-haproxy ++++ return +++ local cluster_proxy=some-name-haproxy +++ echo haproxy ++ kubectl_bin get pxc some-name -o 'jsonpath={.status.haproxy.ready}' +++ mktemp ++ local LAST_OUT=/tmp/tmp.e5W5OomL0H +++ mktemp ++ local LAST_ERR=/tmp/tmp.r9X6AIavrC ++ local exit_status=0 +++ seq 0 2 ++ for i in '$(seq 0 2)' ++ set +e ++ kubectl get pxc some-name -o 'jsonpath={.status.haproxy.ready}' ++ exit_status=0 ++ set -e ++ '[' 0 '!=' 0 ']' ++ break ++ cat /tmp/tmp.e5W5OomL0H ++ cat /tmp/tmp.r9X6AIavrC ++ rm /tmp/tmp.e5W5OomL0H /tmp/tmp.r9X6AIavrC ++ return 0 + [[ 3 == \3 ]] + compare_mysql_cmd select-3 'SHOW DATABASES;' '-h some-name-haproxy -umonitor -p'\''test-password2'\''' + local command_id=select-3 + local 'command=SHOW DATABASES;' + local 'uri=-h some-name-haproxy -umonitor -p'\''test-password2'\''' + local postfix= + local expected_result=/mnt/jenkins/workspace/cloud-pxc-operator_PR-1774/e2e-tests/users/compare/select-3.sql + [[ perconalab/percona-xtradb-cluster-operator:main-pxc8.0 =~ 8\.0 ]] + '[' -f /mnt/jenkins/workspace/cloud-pxc-operator_PR-1774/e2e-tests/users/compare/select-3-80.sql ']' + run_mysql 'SHOW DATABASES;' '-h some-name-haproxy -umonitor -p'\''test-password2'\''' + local 'command=SHOW DATABASES;' + local 'uri=-h some-name-haproxy -umonitor -p'\''test-password2'\''' ++ get_client_pod ++ kubectl_bin get pods --selector=name=pxc-client -o 'jsonpath={.items[].metadata.name}' +++ mktemp ++ local LAST_OUT=/tmp/tmp.npVIyUmYUy +++ mktemp ++ local LAST_ERR=/tmp/tmp.8Qc1A2eGRy ++ local exit_status=0 +++ seq 0 2 ++ for i in '$(seq 0 2)' ++ set +e ++ kubectl get pods --selector=name=pxc-client -o 'jsonpath={.items[].metadata.name}' ++ exit_status=0 ++ set -e ++ '[' 0 '!=' 0 ']' ++ break ++ cat /tmp/tmp.npVIyUmYUy ++ cat /tmp/tmp.8Qc1A2eGRy ++ rm /tmp/tmp.npVIyUmYUy /tmp/tmp.8Qc1A2eGRy ++ return 0 + client_pod=pxc-client-65c795cbdf-g466m + wait_pod pxc-client-65c795cbdf-g466m + local pod=pxc-client-65c795cbdf-g466m + local max_retry=480 + local ns= ++ echo pxc-client-65c795cbdf-g466m ++ /usr/bin/sed -E 's/.*-(pxc|proxysql)-[0-9]/\1/' ++ egrep '^(pxc|proxysql)$' + local container= + set +o xtrace pod/pxc-client-65c795cbdf-g466m condition met pxc-client-65c795cbdf-g466m.Ok + set +o xtrace + '[' '!' -s /tmp/tmp.jmGBhW8nFf/select-3.sql ']' + diff -u /mnt/jenkins/workspace/cloud-pxc-operator_PR-1774/e2e-tests/users/compare/select-3.sql /tmp/tmp.jmGBhW8nFf/select-3.sql + [[ perconalab/percona-xtradb-cluster-operator:main-pxc8.0 =~ 5\.7 ]] + check_generation 1 haproxy some-name + local generation=1 + local container=haproxy + local cluster=some-name + local current_generation ++ kubectl_bin get statefulset some-name-haproxy -o 'jsonpath={.metadata.generation}' +++ mktemp ++ local LAST_OUT=/tmp/tmp.ymKDU7Iq2Q +++ mktemp ++ local LAST_ERR=/tmp/tmp.CfbYmZZOZA ++ local exit_status=0 +++ seq 0 2 ++ for i in '$(seq 0 2)' ++ set +e ++ kubectl get statefulset some-name-haproxy -o 'jsonpath={.metadata.generation}' ++ exit_status=0 ++ set -e ++ '[' 0 '!=' 0 ']' ++ break ++ cat /tmp/tmp.ymKDU7Iq2Q ++ cat /tmp/tmp.CfbYmZZOZA ++ rm /tmp/tmp.ymKDU7Iq2Q /tmp/tmp.CfbYmZZOZA ++ return 0 + current_generation=1 + [[ 1 != \1 ]] + destroy users-9717 + local namespace=users-9717 + local ignore_logs=true + desc 'destroy cluster/operator and all other resources' + set +o xtrace ----------------------------------------------------------------------------------- destroy cluster/operator and all other resources ----------------------------------------------------------------------------------- + '[' true == false -o 1 == 1 ']' + grep -v level=info + grep -v 'the object has been modified' ++ get_operator_pod ++ local label_prefix=app.kubernetes.io/ + grep -v 'get backup status: Job.batch' + tee /tmp/tmp.jmGBhW8nFf/operator.log + /usr/bin/sed -r 's/"ts":[0-9.]+//; s^limits-[0-9.]+/^^g' +++ kubectl get pods --selector=app.kubernetes.io/name=percona-xtradb-cluster-operator -n pxc-operator + sort -u +++ grep -c percona-xtradb-cluster-operator ++ local check_label=1 ++ [[ 1 -eq 0 ]] ++ kubectl_bin get pods --selector=app.kubernetes.io/name=percona-xtradb-cluster-operator -o 'jsonpath={.items[].metadata.name}' -n pxc-operator +++ mktemp ++ local LAST_OUT=/tmp/tmp.ENDqcsEeBZ +++ mktemp ++ local LAST_ERR=/tmp/tmp.1LcGSTkes9 ++ local exit_status=0 +++ seq 0 2 ++ for i in '$(seq 0 2)' ++ set +e ++ kubectl get pods --selector=app.kubernetes.io/name=percona-xtradb-cluster-operator -o 'jsonpath={.items[].metadata.name}' -n pxc-operator ++ exit_status=0 ++ set -e ++ '[' 0 '!=' 0 ']' ++ break ++ cat /tmp/tmp.ENDqcsEeBZ ++ cat /tmp/tmp.1LcGSTkes9 ++ rm /tmp/tmp.ENDqcsEeBZ /tmp/tmp.1LcGSTkes9 ++ return 0 + kubectl_bin logs -n pxc-operator percona-xtradb-cluster-operator-56bc5d9fb9-cf2qp ++ mktemp + local LAST_OUT=/tmp/tmp.znlLPR9tkQ ++ mktemp + local LAST_ERR=/tmp/tmp.rvoFs1SQ4S + local exit_status=0 ++ seq 0 2 + for i in '$(seq 0 2)' + set +e + kubectl logs -n pxc-operator percona-xtradb-cluster-operator-56bc5d9fb9-cf2qp + exit_status=0 + set -e + '[' 0 '!=' 0 ']' + break + cat /tmp/tmp.znlLPR9tkQ + cat /tmp/tmp.rvoFs1SQ4S + rm /tmp/tmp.znlLPR9tkQ /tmp/tmp.rvoFs1SQ4S + return 0 2024-08-05T06:27:16.537Z INFO setup Manager starting up {"gitCommit": "70b9684b9628ddfcc3dab7c6787cbf6d29753b3d", "gitBranch": "PR-1774-70b9684b", "buildTime": "2024-08-05T05:56:58Z", "goVersion": "go1.22.5", "os": "linux", "arch": "amd64"} 2024-08-05T06:27:16.537Z INFO setup Runs on {"platform": "kubernetes", "version": "v1.27.16-gke.1051000"} 2024-08-05T06:27:16.610Z INFO setup Registering Components. 2024-08-05T06:27:20.510Z INFO controller-runtime.webhook Registering webhook {"path": "/validate-percona-xtradbcluster"} 2024-08-05T06:27:20.518Z INFO controller-runtime.metrics Starting metrics server 2024-08-05T06:27:20.518Z INFO setup Starting the Cmd. 2024-08-05T06:27:20.519Z INFO controller-runtime.metrics Serving metrics server {"bindAddress": ":8080", "secure": false} 2024-08-05T06:27:20.519Z INFO controller-runtime.webhook Starting webhook server 2024-08-05T06:27:20.519Z INFO starting server {"name": "health probe", "addr": "[::]:8081"} 2024-08-05T06:27:20.520Z INFO controller-runtime.certwatcher Starting certificate watcher 2024-08-05T06:27:20.520Z INFO controller-runtime.certwatcher Updated current TLS certificate 2024-08-05T06:27:20.520Z INFO controller-runtime.webhook Serving webhook server {"host": "", "port": 9443} 2024-08-05T06:27:20.620Z INFO attempting to acquire leader lease pxc-operator/08db1feb.percona.com... 2024-08-05T06:27:20.635Z DEBUG events percona-xtradb-cluster-operator-56bc5d9fb9-cf2qp_c4153bbe-bdd8-4bb2-a91e-40aadd1710e2 became leader {"type": "Normal", "object": {"kind":"Lease","namespace":"pxc-operator","name":"08db1feb.percona.com","uid":"3770ea1d-ff36-4395-8d8e-d23e45d80711","apiVersion":"coordination.k8s.io/v1","resourceVersion":"14419"}, "reason": "LeaderElection"} 2024-08-05T06:27:20.635Z INFO successfully acquired lease pxc-operator/08db1feb.percona.com 2024-08-05T06:27:20.636Z INFO Starting Controller {"controller": "pxcbackup-controller"} 2024-08-05T06:27:20.636Z INFO Starting Controller {"controller": "pxc-controller"} 2024-08-05T06:27:20.636Z INFO Starting Controller {"controller": "pxcrestore-controller"} 2024-08-05T06:27:20.636Z INFO Starting EventSource {"controller": "pxcbackup-controller", "source": "kind source: *v1.PerconaXtraDBClusterBackup"} 2024-08-05T06:27:20.636Z INFO Starting EventSource {"controller": "pxc-controller", "source": "kind source: *v1.PerconaXtraDBCluster"} 2024-08-05T06:27:20.636Z INFO Starting EventSource {"controller": "pxcrestore-controller", "source": "kind source: *v1.PerconaXtraDBClusterRestore"} 2024-08-05T06:27:20.742Z INFO Starting workers {"controller": "pxc-controller", "worker count": 1} 2024-08-05T06:27:20.748Z INFO Starting workers {"controller": "pxcbackup-controller", "worker count": 1} 2024-08-05T06:27:20.748Z INFO Starting workers {"controller": "pxcrestore-controller", "worker count": 1} 2024-08-05T06:27:45.110Z INFO Set CR version {"controller": "pxc-controller", "namespace": "users-9717", "name": "some-name", "reconcileID": "a7a7dcb1-731f-4776-9a48-e5d0e8cac035", "version": "1.15.0"} 2024-08-05T06:27:45.553Z DEBUG Creating object {"controller": "pxc-controller", "namespace": "users-9717", "name": "some-name", "reconcileID": "a7a7dcb1-731f-4776-9a48-e5d0e8cac035", "object": "some-name-pxc"} 2024-08-05T06:27:45.697Z DEBUG Creating object {"controller": "pxc-controller", "namespace": "users-9717", "name": "some-name", "reconcileID": "a7a7dcb1-731f-4776-9a48-e5d0e8cac035", "object": "some-name-pxc"} 2024-08-05T06:27:45.880Z DEBUG Creating object {"controller": "pxc-controller", "namespace": "users-9717", "name": "some-name", "reconcileID": "a7a7dcb1-731f-4776-9a48-e5d0e8cac035", "object": "some-name-proxysql"} 2024-08-05T06:27:45.920Z DEBUG Creating object {"controller": "pxc-controller", "namespace": "users-9717", "name": "some-name", "reconcileID": "a7a7dcb1-731f-4776-9a48-e5d0e8cac035", "object": "some-name-pxc"} 2024-08-05T06:27:46.078Z DEBUG Creating object {"controller": "pxc-controller", "namespace": "users-9717", "name": "some-name", "reconcileID": "a7a7dcb1-731f-4776-9a48-e5d0e8cac035", "object": "some-name-pxc-unready"} 2024-08-05T06:27:46.270Z DEBUG Updating object {"controller": "pxc-controller", "namespace": "users-9717", "name": "some-name", "reconcileID": "a7a7dcb1-731f-4776-9a48-e5d0e8cac035", "object": "some-name-proxysql"} 2024-08-05T06:27:46.418Z DEBUG Creating object {"controller": "pxc-controller", "namespace": "users-9717", "name": "some-name", "reconcileID": "a7a7dcb1-731f-4776-9a48-e5d0e8cac035", "object": "some-name-proxysql"} 2024-08-05T06:27:46.615Z DEBUG Creating object {"controller": "pxc-controller", "namespace": "users-9717", "name": "some-name", "reconcileID": "a7a7dcb1-731f-4776-9a48-e5d0e8cac035", "object": "some-name-proxysql-unready"} 2024-08-05T06:29:03.453Z INFO Password expiration policy updated {"controller": "pxc-controller", "namespace": "users-9717", "name": "some-name", "reconcileID": "b9b19ff5-9796-4d25-a26b-7221026c2ec2", "user": "operator"} 2024-08-05T06:29:03.484Z INFO Password expiration policy updated {"controller": "pxc-controller", "namespace": "users-9717", "name": "some-name", "reconcileID": "b9b19ff5-9796-4d25-a26b-7221026c2ec2", "user": "monitor"} 2024-08-05T06:29:03.572Z INFO User monitor: granted privileges {"controller": "pxc-controller", "namespace": "users-9717", "name": "some-name", "reconcileID": "b9b19ff5-9796-4d25-a26b-7221026c2ec2"} 2024-08-05T06:29:03.601Z INFO monitor user privileges granted {"controller": "pxc-controller", "namespace": "users-9717", "name": "some-name", "reconcileID": "b9b19ff5-9796-4d25-a26b-7221026c2ec2"} 2024-08-05T06:29:03.632Z INFO Password expiration policy updated {"controller": "pxc-controller", "namespace": "users-9717", "name": "some-name", "reconcileID": "b9b19ff5-9796-4d25-a26b-7221026c2ec2", "user": "xtrabackup"} 2024-08-05T06:29:03.670Z INFO User xtrabackup: granted privileges {"controller": "pxc-controller", "namespace": "users-9717", "name": "some-name", "reconcileID": "b9b19ff5-9796-4d25-a26b-7221026c2ec2"} 2024-08-05T06:29:03.703Z INFO Password expiration policy updated {"controller": "pxc-controller", "namespace": "users-9717", "name": "some-name", "reconcileID": "b9b19ff5-9796-4d25-a26b-7221026c2ec2", "user": "replication"} 2024-08-05T06:29:03.732Z INFO reconcile replication error {"controller": "pxc-controller", "namespace": "users-9717", "name": "some-name", "reconcileID": "b9b19ff5-9796-4d25-a26b-7221026c2ec2", "err": "get primary pxc pod: not found"} 2024-08-05T06:29:08.426Z INFO reconcile replication error {"controller": "pxc-controller", "namespace": "users-9717", "name": "some-name", "reconcileID": "db618d74-bd8a-40c3-9057-8ca487c3ad66", "err": "get primary pxc pod: not found"} 2024-08-05T06:29:13.549Z INFO reconcile replication error {"controller": "pxc-controller", "namespace": "users-9717", "name": "some-name", "reconcileID": "06d27d1e-8d6d-46fb-977b-8e186865b933", "err": "get primary pxc pod: not found"} 2024-08-05T06:29:18.682Z INFO reconcile replication error {"controller": "pxc-controller", "namespace": "users-9717", "name": "some-name", "reconcileID": "80829bc0-f325-4b39-8071-5030ac5f4107", "err": "get primary pxc pod: not found"} 2024-08-05T06:31:23.695Z INFO Password expiration policy updated {"controller": "pxc-controller", "namespace": "users-9717", "name": "some-name", "reconcileID": "e4a12752-08b2-4edd-bca1-fa7691066c6e", "user": "root"} 2024-08-05T06:31:23.867Z INFO update PXC version (fetched from db) {"controller": "pxc-controller", "namespace": "users-9717", "name": "some-name", "reconcileID": "e4a12752-08b2-4edd-bca1-fa7691066c6e", "new version": "8.0.36-28.1"} 2024-08-05T06:31:26.929Z DEBUG PXC users synced with ProxySQL {"controller": "pxc-controller", "namespace": "users-9717", "name": "some-name", "reconcileID": "e4a12752-08b2-4edd-bca1-fa7691066c6e"} 2024-08-05T06:31:31.865Z DEBUG PXC users synced with ProxySQL {"controller": "pxc-controller", "namespace": "users-9717", "name": "some-name", "reconcileID": "d4e903c0-fc25-4d36-a4d3-5adfd6460358"} 2024-08-05T06:31:37.159Z DEBUG PXC users synced with ProxySQL {"controller": "pxc-controller", "namespace": "users-9717", "name": "some-name", "reconcileID": "6058ee65-2b98-41c7-a15a-8909c71a794e"} 2024-08-05T06:31:42.680Z DEBUG PXC users synced with ProxySQL {"controller": "pxc-controller", "namespace": "users-9717", "name": "some-name", "reconcileID": "bbeec532-2ceb-4142-8492-397a23efadc8"} 2024-08-05T06:31:47.741Z DEBUG PXC users synced with ProxySQL {"controller": "pxc-controller", "namespace": "users-9717", "name": "some-name", "reconcileID": "196c62f4-9102-4075-9f79-5aa37e2ef089"} 2024-08-05T06:31:52.995Z DEBUG PXC users synced with ProxySQL {"controller": "pxc-controller", "namespace": "users-9717", "name": "some-name", "reconcileID": "dd318d32-8afd-49d5-9046-d39cb72057e0"} 2024-08-05T06:31:58.595Z DEBUG PXC users synced with ProxySQL {"controller": "pxc-controller", "namespace": "users-9717", "name": "some-name", "reconcileID": "51e28c59-d7b7-498e-9f5a-a6db3911ce68"} 2024-08-05T06:32:03.560Z DEBUG PXC users synced with ProxySQL {"controller": "pxc-controller", "namespace": "users-9717", "name": "some-name", "reconcileID": "ca4c0061-ee60-460e-853c-f3c87b900b9c"} 2024-08-05T06:32:09.271Z DEBUG PXC users synced with ProxySQL {"controller": "pxc-controller", "namespace": "users-9717", "name": "some-name", "reconcileID": "4427e048-7cd2-4341-a794-82c498d23c96"} 2024-08-05T06:32:14.464Z DEBUG PXC users synced with ProxySQL {"controller": "pxc-controller", "namespace": "users-9717", "name": "some-name", "reconcileID": "3f2026ed-8e4a-46ed-b2bd-2a4fb355bb62"} 2024-08-05T06:32:19.457Z DEBUG PXC users synced with ProxySQL {"controller": "pxc-controller", "namespace": "users-9717", "name": "some-name", "reconcileID": "3d085c12-47a3-47b3-a0bd-0cb6323b4672"} 2024-08-05T06:32:24.778Z DEBUG PXC users synced with ProxySQL {"controller": "pxc-controller", "namespace": "users-9717", "name": "some-name", "reconcileID": "1d47f318-4753-4ef4-a02b-02a442dec47b"} 2024-08-05T06:32:27.070Z INFO Password changed, updating user {"controller": "pxc-controller", "namespace": "users-9717", "name": "some-name", "reconcileID": "83959d1c-f8a7-4487-8207-15b9b29a8713", "user": "root"} 2024-08-05T06:32:27.109Z INFO Password updated {"controller": "pxc-controller", "namespace": "users-9717", "name": "some-name", "reconcileID": "83959d1c-f8a7-4487-8207-15b9b29a8713", "user": "root"} 2024-08-05T06:32:27.123Z INFO MySQL init secret created {"controller": "pxc-controller", "namespace": "users-9717", "name": "some-name", "reconcileID": "83959d1c-f8a7-4487-8207-15b9b29a8713", "secret": "some-name-mysql-init", "user": "root"} 2024-08-05T06:32:31.997Z DEBUG PXC users synced with ProxySQL {"controller": "pxc-controller", "namespace": "users-9717", "name": "some-name", "reconcileID": "83959d1c-f8a7-4487-8207-15b9b29a8713"} 2024-08-05T06:32:32.009Z INFO Internal secrets updated {"controller": "pxc-controller", "namespace": "users-9717", "name": "some-name", "reconcileID": "83959d1c-f8a7-4487-8207-15b9b29a8713", "user": "root"} 2024-08-05T06:32:32.046Z INFO Old password discarded {"controller": "pxc-controller", "namespace": "users-9717", "name": "some-name", "reconcileID": "83959d1c-f8a7-4487-8207-15b9b29a8713", "user": "root"} 2024-08-05T06:32:35.379Z DEBUG PXC users synced with ProxySQL {"controller": "pxc-controller", "namespace": "users-9717", "name": "some-name", "reconcileID": "83959d1c-f8a7-4487-8207-15b9b29a8713"} 2024-08-05T06:32:40.659Z DEBUG PXC users synced with ProxySQL {"controller": "pxc-controller", "namespace": "users-9717", "name": "some-name", "reconcileID": "cbf34868-24a4-4155-a112-3a7d4fd44296"} 2024-08-05T06:32:43.105Z DEBUG Updating object {"controller": "pxc-controller", "namespace": "users-9717", "name": "some-name", "reconcileID": "550433f6-3c16-458a-abcb-5ea2c44a4aef", "object": "some-name-proxysql"} 2024-08-05T06:32:45.847Z DEBUG PXC users synced with ProxySQL {"controller": "pxc-controller", "namespace": "users-9717", "name": "some-name", "reconcileID": "64297dfb-3503-437b-88f7-d3351f377c54"} 2024-08-05T06:33:07.848Z ERROR sync users {"controller": "pxc-controller", "namespace": "users-9717", "name": "some-name", "reconcileID": "2ff211e7-17ed-4fc8-90e8-5e0ec1574b91", "error": "exec syncusers: command terminated with exit code 1 / / ERROR (line:512) : The cluster (with writer hostgroup:11) has not been configured in ProxySQL\n", "errorVerbose": "exec syncusers: command terminated with exit code 1 / / ERROR (line:512) : The cluster (with writer hostgroup:11) has not been configured in ProxySQL\n\ngithub.com/percona/percona-xtradb-cluster-operator/pkg/controller/pxc.(*ReconcilePerconaXtraDBCluster).syncPXCUsersWithProxySQL\n\t/go/src/github.com/percona/percona-xtradb-cluster-operator/pkg/controller/pxc/users.go:922\ngithub.com/percona/percona-xtradb-cluster-operator/pkg/controller/pxc.(*ReconcilePerconaXtraDBCluster).resyncPXCUsersWithProxySQL.func1\n\t/go/src/github.com/percona/percona-xtradb-cluster-operator/pkg/controller/pxc/controller.go:1240\nruntime.goexit\n\t/usr/local/go/src/runtime/asm_amd64.s:1695"} 2024-08-05T06:33:08.891Z INFO Password changed, updating user {"controller": "pxc-controller", "namespace": "users-9717", "name": "some-name", "reconcileID": "c5a853e2-25f0-4bef-b1a0-19fcc781b08d", "user": "proxyadmin"} 2024-08-05T06:33:08.891Z INFO Password expiration policy updated {"controller": "pxc-controller", "namespace": "users-9717", "name": "some-name", "reconcileID": "c5a853e2-25f0-4bef-b1a0-19fcc781b08d", "user": "proxyadmin"} 2024-08-05T06:33:08.957Z INFO Proxy user updated {"controller": "pxc-controller", "namespace": "users-9717", "name": "some-name", "reconcileID": "c5a853e2-25f0-4bef-b1a0-19fcc781b08d", "user": "proxyadmin"} 2024-08-05T06:33:08.970Z INFO Internal secrets updated {"controller": "pxc-controller", "namespace": "users-9717", "name": "some-name", "reconcileID": "c5a853e2-25f0-4bef-b1a0-19fcc781b08d", "user": "proxyadmin"} 2024-08-05T06:33:08.970Z INFO Proxy pods will be restarted {"controller": "pxc-controller", "namespace": "users-9717", "name": "some-name", "reconcileID": "c5a853e2-25f0-4bef-b1a0-19fcc781b08d", "last-applied-secret": "1a55867d0f4fa0d0643653b2eb57b2f690315d8d843a10599c9839c2a779a95f"} 2024-08-05T06:33:09.001Z DEBUG Updating object {"controller": "pxc-controller", "namespace": "users-9717", "name": "some-name", "reconcileID": "c5a853e2-25f0-4bef-b1a0-19fcc781b08d", "object": "some-name-proxysql"} 2024-08-05T06:33:09.247Z ERROR sync users {"controller": "pxc-controller", "namespace": "users-9717", "name": "some-name", "reconcileID": "c5a853e2-25f0-4bef-b1a0-19fcc781b08d", "error": "exec syncusers: command terminated with exit code 1 / / ERROR 1045 (28000): ProxySQL Error: Access denied for user 'proxyadmin'@'127.0.0.1' (using password: YES)\nERROR (line:335) : ProxySQL connection check failed. \n-- Could not connect to ProxySQL at localhost:6032 \n-- Please check the ProxySQL connection parameters and status.\n", "errorVerbose": "exec syncusers: command terminated with exit code 1 / / ERROR 1045 (28000): ProxySQL Error: Access denied for user 'proxyadmin'@'127.0.0.1' (using password: YES)\nERROR (line:335) : ProxySQL connection check failed. \n-- Could not connect to ProxySQL at localhost:6032 \n-- Please check the ProxySQL connection parameters and status.\n\ngithub.com/percona/percona-xtradb-cluster-operator/pkg/controller/pxc.(*ReconcilePerconaXtraDBCluster).syncPXCUsersWithProxySQL\n\t/go/src/github.com/percona/percona-xtradb-cluster-operator/pkg/controller/pxc/users.go:922\ngithub.com/percona/percona-xtradb-cluster-operator/pkg/controller/pxc.(*ReconcilePerconaXtraDBCluster).resyncPXCUsersWithProxySQL.func1\n\t/go/src/github.com/percona/percona-xtradb-cluster-operator/pkg/controller/pxc/controller.go:1240\nruntime.goexit\n\t/usr/local/go/src/runtime/asm_amd64.s:1695"} 2024-08-05T06:33:56.888Z DEBUG PXC users synced with ProxySQL {"controller": "pxc-controller", "namespace": "users-9717", "name": "some-name", "reconcileID": "de847adb-28f9-49da-8fab-530939825403"} 2024-08-05T06:34:02.263Z DEBUG PXC users synced with ProxySQL {"controller": "pxc-controller", "namespace": "users-9717", "name": "some-name", "reconcileID": "c7f389cd-afe1-482a-b575-6991b1ee64c1"} 2024-08-05T06:34:07.459Z DEBUG PXC users synced with ProxySQL {"controller": "pxc-controller", "namespace": "users-9717", "name": "some-name", "reconcileID": "f476b391-cd1b-4d13-a55f-15f537425c73"} 2024-08-05T06:34:12.620Z DEBUG PXC users synced with ProxySQL {"controller": "pxc-controller", "namespace": "users-9717", "name": "some-name", "reconcileID": "c8e29e44-171d-4167-8015-55baa73d61cf"} 2024-08-05T06:34:17.551Z DEBUG Updating object {"controller": "pxc-controller", "namespace": "users-9717", "name": "some-name", "reconcileID": "a176a99b-e3cf-42cc-bba6-ad77a2bde891", "object": "some-name-proxysql"} 2024-08-05T06:34:17.755Z ERROR sync users {"controller": "pxc-controller", "namespace": "users-9717", "name": "some-name", "reconcileID": "ffed0ac2-643a-436e-b598-6600ba1821f3", "error": "exec syncusers: command terminated with exit code 137 / / ", "errorVerbose": "exec syncusers: command terminated with exit code 137 / / \ngithub.com/percona/percona-xtradb-cluster-operator/pkg/controller/pxc.(*ReconcilePerconaXtraDBCluster).syncPXCUsersWithProxySQL\n\t/go/src/github.com/percona/percona-xtradb-cluster-operator/pkg/controller/pxc/users.go:922\ngithub.com/percona/percona-xtradb-cluster-operator/pkg/controller/pxc.(*ReconcilePerconaXtraDBCluster).resyncPXCUsersWithProxySQL.func1\n\t/go/src/github.com/percona/percona-xtradb-cluster-operator/pkg/controller/pxc/controller.go:1240\nruntime.goexit\n\t/usr/local/go/src/runtime/asm_amd64.s:1695"} 2024-08-05T06:34:21.811Z INFO Password changed, updating user {"controller": "pxc-controller", "namespace": "users-9717", "name": "some-name", "reconcileID": "d3fe5b2c-f5cd-48b2-bd32-cc43921c73c4", "user": "xtrabackup"} 2024-08-05T06:34:21.838Z INFO Password updated {"controller": "pxc-controller", "namespace": "users-9717", "name": "some-name", "reconcileID": "d3fe5b2c-f5cd-48b2-bd32-cc43921c73c4", "user": "xtrabackup"} 2024-08-05T06:34:21.847Z INFO MySQL init secret updated {"controller": "pxc-controller", "namespace": "users-9717", "name": "some-name", "reconcileID": "d3fe5b2c-f5cd-48b2-bd32-cc43921c73c4", "secret": "some-name-mysql-init", "user": "xtrabackup"} 2024-08-05T06:34:21.856Z INFO Internal secrets updated {"controller": "pxc-controller", "namespace": "users-9717", "name": "some-name", "reconcileID": "d3fe5b2c-f5cd-48b2-bd32-cc43921c73c4", "user": "xtrabackup"} 2024-08-05T06:34:21.880Z INFO Old password discarded {"controller": "pxc-controller", "namespace": "users-9717", "name": "some-name", "reconcileID": "d3fe5b2c-f5cd-48b2-bd32-cc43921c73c4", "user": "xtrabackup"} 2024-08-05T06:34:21.893Z INFO PXC pods will be restarted {"controller": "pxc-controller", "namespace": "users-9717", "name": "some-name", "reconcileID": "d3fe5b2c-f5cd-48b2-bd32-cc43921c73c4", "last-applied-secret": "d11bcc70ca53ea410cd455e13d82e7225917b0861d9b677ed34c02c5f6fbaf1b"} 2024-08-05T06:34:21.932Z DEBUG Updating object {"controller": "pxc-controller", "namespace": "users-9717", "name": "some-name", "reconcileID": "d3fe5b2c-f5cd-48b2-bd32-cc43921c73c4", "object": "some-name-pxc"} 2024-08-05T06:34:27.225Z DEBUG PXC users synced with ProxySQL {"controller": "pxc-controller", "namespace": "users-9717", "name": "some-name", "reconcileID": "d3fe5b2c-f5cd-48b2-bd32-cc43921c73c4"} 2024-08-05T06:35:16.351Z INFO reconcile replication error {"controller": "pxc-controller", "namespace": "users-9717", "name": "some-name", "reconcileID": "e1a60bea-f40d-4846-9f59-2ead2f40dc73", "err": "failed to ensure cluster readonly status: connect to pod some-name-pxc-1: dial tcp: lookup some-name-pxc-1.some-name-pxc.users-9717 on 10.47.64.10:53: no such host"} 2024-08-05T06:35:21.570Z INFO reconcile replication error {"controller": "pxc-controller", "namespace": "users-9717", "name": "some-name", "reconcileID": "75de73d5-e884-4fb2-99bc-a220c159e95f", "err": "failed to ensure cluster readonly status: connect to pod some-name-pxc-1: dial tcp 10.179.17.18:33062: connect: connection refused"} 2024-08-05T06:35:58.254Z INFO reconcile replication error {"controller": "pxc-controller", "namespace": "users-9717", "name": "some-name", "reconcileID": "8fa0b6a5-848d-4355-b02d-29c104045f8c", "err": "failed to ensure cluster readonly status: connect to pod some-name-pxc-0: dial tcp: lookup some-name-pxc-0.some-name-pxc.users-9717 on 10.47.64.10:53: no such host"} 2024-08-05T06:35:58.533Z INFO reconcile replication error {"controller": "pxc-controller", "namespace": "users-9717", "name": "some-name", "reconcileID": "1696c0e2-5bbf-4013-af8f-16064fa5c6f3", "err": "failed to connect to pod some-name-pxc-0: dial tcp: lookup some-name-pxc-0.some-name-pxc.users-9717 on 10.47.64.10:53: no such host"} 2024-08-05T06:36:03.538Z INFO reconcile replication error {"controller": "pxc-controller", "namespace": "users-9717", "name": "some-name", "reconcileID": "e205a769-cf54-4cb0-abb0-1221627807cb", "err": "failed to connect to pod some-name-pxc-0: dial tcp: lookup some-name-pxc-0.some-name-pxc.users-9717 on 10.47.64.10:53: no such host"} 2024-08-05T06:36:08.742Z INFO reconcile replication error {"controller": "pxc-controller", "namespace": "users-9717", "name": "some-name", "reconcileID": "4d922d6f-83cb-49ce-80c6-babfba013e1b", "err": "failed to ensure cluster readonly status: connect to pod some-name-pxc-0: dial tcp: lookup some-name-pxc-0.some-name-pxc.users-9717 on 10.47.64.10:53: no such host"} 2024-08-05T06:36:29.263Z INFO Unable to find primary pod for replication. No pod with name or ip like this {"controller": "pxc-controller", "namespace": "users-9717", "name": "some-name", "reconcileID": "44b5f496-3ebc-4798-943f-46a60e418cd4", "primary name": "some-name-pxc-0.some-name-pxc.users-9717.svc.cluster.local"} 2024-08-05T06:36:48.269Z DEBUG PXC users synced with ProxySQL {"controller": "pxc-controller", "namespace": "users-9717", "name": "some-name", "reconcileID": "bd00075a-4555-48c1-a05f-aee9862d52c1"} 2024-08-05T06:36:53.372Z DEBUG PXC users synced with ProxySQL {"controller": "pxc-controller", "namespace": "users-9717", "name": "some-name", "reconcileID": "d1a41ad3-93b7-44f5-bb7c-3ec5646d29ae"} 2024-08-05T06:36:58.675Z DEBUG PXC users synced with ProxySQL {"controller": "pxc-controller", "namespace": "users-9717", "name": "some-name", "reconcileID": "342ecd34-f230-4f28-b130-824d98425b82"} 2024-08-05T06:37:03.860Z DEBUG PXC users synced with ProxySQL {"controller": "pxc-controller", "namespace": "users-9717", "name": "some-name", "reconcileID": "cef3732b-a270-4f7f-b4c4-ef95a32a099a"} 2024-08-05T06:37:09.043Z DEBUG PXC users synced with ProxySQL {"controller": "pxc-controller", "namespace": "users-9717", "name": "some-name", "reconcileID": "3db2f44d-f019-4d8b-944c-909cf8055909"} 2024-08-05T06:37:11.047Z INFO Password changed, updating user {"controller": "pxc-controller", "namespace": "users-9717", "name": "some-name", "reconcileID": "023f8fef-2081-4f84-87db-c8e2873252e4", "user": "monitor"} 2024-08-05T06:37:11.071Z INFO Password updated {"controller": "pxc-controller", "namespace": "users-9717", "name": "some-name", "reconcileID": "023f8fef-2081-4f84-87db-c8e2873252e4", "user": "monitor"} 2024-08-05T06:37:11.079Z INFO MySQL init secret updated {"controller": "pxc-controller", "namespace": "users-9717", "name": "some-name", "reconcileID": "023f8fef-2081-4f84-87db-c8e2873252e4", "secret": "some-name-mysql-init", "user": "monitor"} 2024-08-05T06:37:11.116Z INFO Proxy user updated {"controller": "pxc-controller", "namespace": "users-9717", "name": "some-name", "reconcileID": "023f8fef-2081-4f84-87db-c8e2873252e4", "user": "monitor"} 2024-08-05T06:37:11.127Z INFO Internal secrets updated {"controller": "pxc-controller", "namespace": "users-9717", "name": "some-name", "reconcileID": "023f8fef-2081-4f84-87db-c8e2873252e4", "user": "monitor"} 2024-08-05T06:37:11.272Z INFO Proxy pods will be restarted {"controller": "pxc-controller", "namespace": "users-9717", "name": "some-name", "reconcileID": "023f8fef-2081-4f84-87db-c8e2873252e4", "last-applied-secret": "4e3282050745046d5044e0fcb5f2bf140d7320a1256a79c8ca2184f38f4f3098"} 2024-08-05T06:37:11.591Z DEBUG Updating object {"controller": "pxc-controller", "namespace": "users-9717", "name": "some-name", "reconcileID": "023f8fef-2081-4f84-87db-c8e2873252e4", "object": "some-name-proxysql"} 2024-08-05T06:37:14.395Z ERROR sync users {"controller": "pxc-controller", "namespace": "users-9717", "name": "some-name", "reconcileID": "023f8fef-2081-4f84-87db-c8e2873252e4", "error": "exec syncusers: unable to upgrade connection: container not found (\"proxysql\") / / ", "errorVerbose": "exec syncusers: unable to upgrade connection: container not found (\"proxysql\") / / \ngithub.com/percona/percona-xtradb-cluster-operator/pkg/controller/pxc.(*ReconcilePerconaXtraDBCluster).syncPXCUsersWithProxySQL\n\t/go/src/github.com/percona/percona-xtradb-cluster-operator/pkg/controller/pxc/users.go:922\ngithub.com/percona/percona-xtradb-cluster-operator/pkg/controller/pxc.(*ReconcilePerconaXtraDBCluster).resyncPXCUsersWithProxySQL.func1\n\t/go/src/github.com/percona/percona-xtradb-cluster-operator/pkg/controller/pxc/controller.go:1240\nruntime.goexit\n\t/usr/local/go/src/runtime/asm_amd64.s:1695"} 2024-08-05T06:37:46.988Z INFO Password updated but old one not discarded {"controller": "pxc-controller", "namespace": "users-9717", "name": "some-name", "reconcileID": "cedcb807-1fab-4d7e-a962-12cc512e4fe8", "user": "monitor"} 2024-08-05T06:37:50.534Z DEBUG PXC users synced with ProxySQL {"controller": "pxc-controller", "namespace": "users-9717", "name": "some-name", "reconcileID": "cedcb807-1fab-4d7e-a962-12cc512e4fe8"} 2024-08-05T06:37:51.926Z INFO Password updated but old one not discarded {"controller": "pxc-controller", "namespace": "users-9717", "name": "some-name", "reconcileID": "e4ac7532-8265-4113-9ee2-1a3d0ceb7d89", "user": "monitor"} 2024-08-05T06:37:55.427Z DEBUG PXC users synced with ProxySQL {"controller": "pxc-controller", "namespace": "users-9717", "name": "some-name", "reconcileID": "e4ac7532-8265-4113-9ee2-1a3d0ceb7d89"} 2024-08-05T06:37:57.298Z INFO Password updated but old one not discarded {"controller": "pxc-controller", "namespace": "users-9717", "name": "some-name", "reconcileID": "7bae4c9f-e9e8-46a9-b372-b1a95e7be401", "user": "monitor"} 2024-08-05T06:38:00.935Z DEBUG PXC users synced with ProxySQL {"controller": "pxc-controller", "namespace": "users-9717", "name": "some-name", "reconcileID": "7bae4c9f-e9e8-46a9-b372-b1a95e7be401"} 2024-08-05T06:38:02.755Z INFO Password updated but old one not discarded {"controller": "pxc-controller", "namespace": "users-9717", "name": "some-name", "reconcileID": "a4c6ac94-733c-4ff5-826c-a1a67a0ab279", "user": "monitor"} 2024-08-05T06:38:06.117Z DEBUG PXC users synced with ProxySQL {"controller": "pxc-controller", "namespace": "users-9717", "name": "some-name", "reconcileID": "a4c6ac94-733c-4ff5-826c-a1a67a0ab279"} 2024-08-05T06:38:08.070Z INFO Password updated but old one not discarded {"controller": "pxc-controller", "namespace": "users-9717", "name": "some-name", "reconcileID": "19db9b2a-79f1-4c13-8ec6-ac1fa48cb8a8", "user": "monitor"} 2024-08-05T06:38:11.409Z DEBUG PXC users synced with ProxySQL {"controller": "pxc-controller", "namespace": "users-9717", "name": "some-name", "reconcileID": "19db9b2a-79f1-4c13-8ec6-ac1fa48cb8a8"} 2024-08-05T06:38:13.386Z INFO Password updated but old one not discarded {"controller": "pxc-controller", "namespace": "users-9717", "name": "some-name", "reconcileID": "8c40639c-8a9a-45c3-b861-be0dd529c527", "user": "monitor"} 2024-08-05T06:38:13.627Z INFO Old password discarded {"controller": "pxc-controller", "namespace": "users-9717", "name": "some-name", "reconcileID": "8c40639c-8a9a-45c3-b861-be0dd529c527", "user": "monitor"} 2024-08-05T06:38:13.646Z INFO Proxy pods will be restarted {"controller": "pxc-controller", "namespace": "users-9717", "name": "some-name", "reconcileID": "8c40639c-8a9a-45c3-b861-be0dd529c527", "last-applied-secret": "4e3282050745046d5044e0fcb5f2bf140d7320a1256a79c8ca2184f38f4f3098"} 2024-08-05T06:38:16.925Z DEBUG PXC users synced with ProxySQL {"controller": "pxc-controller", "namespace": "users-9717", "name": "some-name", "reconcileID": "8c40639c-8a9a-45c3-b861-be0dd529c527"} 2024-08-05T06:38:22.219Z DEBUG PXC users synced with ProxySQL {"controller": "pxc-controller", "namespace": "users-9717", "name": "some-name", "reconcileID": "c93b65e5-c23f-429a-8397-4354118aeddb"} 2024-08-05T06:38:28.447Z DEBUG PXC users synced with ProxySQL {"controller": "pxc-controller", "namespace": "users-9717", "name": "some-name", "reconcileID": "5b15f672-de29-4db4-9b8e-b4c947a6dc81"} 2024-08-05T06:38:33.153Z DEBUG PXC users synced with ProxySQL {"controller": "pxc-controller", "namespace": "users-9717", "name": "some-name", "reconcileID": "22c4c46c-6add-4ba0-8658-bcab183080d7"} 2024-08-05T06:38:38.166Z DEBUG PXC users synced with ProxySQL {"controller": "pxc-controller", "namespace": "users-9717", "name": "some-name", "reconcileID": "6981d667-d217-4c75-8b55-f961da9396bc"} 2024-08-05T06:38:40.232Z INFO Password changed, updating user {"controller": "pxc-controller", "namespace": "users-9717", "name": "some-name", "reconcileID": "50e72344-abb6-4e8f-b53e-cac98cc9c84e", "user": "operator"} 2024-08-05T06:38:40.256Z INFO Password updated {"controller": "pxc-controller", "namespace": "users-9717", "name": "some-name", "reconcileID": "50e72344-abb6-4e8f-b53e-cac98cc9c84e", "user": "operator"} 2024-08-05T06:38:40.266Z INFO MySQL init secret updated {"controller": "pxc-controller", "namespace": "users-9717", "name": "some-name", "reconcileID": "50e72344-abb6-4e8f-b53e-cac98cc9c84e", "secret": "some-name-mysql-init", "user": "operator"} 2024-08-05T06:38:40.274Z INFO Internal secrets updated {"controller": "pxc-controller", "namespace": "users-9717", "name": "some-name", "reconcileID": "50e72344-abb6-4e8f-b53e-cac98cc9c84e", "user": "operator"} 2024-08-05T06:38:40.300Z INFO Old password discarded {"controller": "pxc-controller", "namespace": "users-9717", "name": "some-name", "reconcileID": "50e72344-abb6-4e8f-b53e-cac98cc9c84e", "user": "operator"} 2024-08-05T06:38:40.334Z INFO Proxy pods will be restarted {"controller": "pxc-controller", "namespace": "users-9717", "name": "some-name", "reconcileID": "50e72344-abb6-4e8f-b53e-cac98cc9c84e", "last-applied-secret": "6542c6f8a2375ddfcda319bf91c99ca69519730971c577cd07366e356671ee68"} 2024-08-05T06:38:40.369Z DEBUG Updating object {"controller": "pxc-controller", "namespace": "users-9717", "name": "some-name", "reconcileID": "50e72344-abb6-4e8f-b53e-cac98cc9c84e", "object": "some-name-proxysql"} 2024-08-05T06:38:41.498Z ERROR sync users {"controller": "pxc-controller", "namespace": "users-9717", "name": "some-name", "reconcileID": "50e72344-abb6-4e8f-b53e-cac98cc9c84e", "error": "exec syncusers: command terminated with exit code 1 / / ERROR 1045 (28000): Access denied for user 'operator'@'some-name-proxysql-0.some-name-proxysql-unready.users-9717.svc.c' (using password: YES)\nERROR 1045 (28000): Access denied for user 'operator'@'some-name-proxysql-0.some-name-proxysql-unready.users-9717.svc.c' (using password: YES)\nERROR 1045 (28000): Access denied for user 'operator'@'some-name-proxysql-0.some-name-proxysql-unready.users-9717.svc.c' (using password: YES)\nERROR (line:569) : Could not find a primary cluster node\n", "errorVerbose": "exec syncusers: command terminated with exit code 1 / / ERROR 1045 (28000): Access denied for user 'operator'@'some-name-proxysql-0.some-name-proxysql-unready.users-9717.svc.c' (using password: YES)\nERROR 1045 (28000): Access denied for user 'operator'@'some-name-proxysql-0.some-name-proxysql-unready.users-9717.svc.c' (using password: YES)\nERROR 1045 (28000): Access denied for user 'operator'@'some-name-proxysql-0.some-name-proxysql-unready.users-9717.svc.c' (using password: YES)\nERROR (line:569) : Could not find a primary cluster node\n\ngithub.com/percona/percona-xtradb-cluster-operator/pkg/controller/pxc.(*ReconcilePerconaXtraDBCluster).syncPXCUsersWithProxySQL\n\t/go/src/github.com/percona/percona-xtradb-cluster-operator/pkg/controller/pxc/users.go:922\ngithub.com/percona/percona-xtradb-cluster-operator/pkg/controller/pxc.(*ReconcilePerconaXtraDBCluster).resyncPXCUsersWithProxySQL.func1\n\t/go/src/github.com/percona/percona-xtradb-cluster-operator/pkg/controller/pxc/controller.go:1240\nruntime.goexit\n\t/usr/local/go/src/runtime/asm_amd64.s:1695"} 2024-08-05T06:39:20.053Z DEBUG PXC users synced with ProxySQL {"controller": "pxc-controller", "namespace": "users-9717", "name": "some-name", "reconcileID": "da0194a4-e444-4a55-ad4e-ed4bdec2625b"} 2024-08-05T06:39:28.751Z DEBUG PXC users synced with ProxySQL {"controller": "pxc-controller", "namespace": "users-9717", "name": "some-name", "reconcileID": "de8c26f9-6f9d-4514-83ad-5fd337cf7f48"} 2024-08-05T06:39:33.732Z DEBUG PXC users synced with ProxySQL {"controller": "pxc-controller", "namespace": "users-9717", "name": "some-name", "reconcileID": "4c1a2f79-763e-4cde-aa99-55addc602be0"} 2024-08-05T06:39:38.180Z INFO Created user secrets {"controller": "pxc-controller", "namespace": "users-9717", "name": "some-name", "reconcileID": "0d6dd34c-5068-4e6e-bb16-2e7917fc2a3d", "secrets": "my-cluster-secrets-2"} 2024-08-05T06:39:38.191Z INFO Password changed, updating user {"controller": "pxc-controller", "namespace": "users-9717", "name": "some-name", "reconcileID": "0d6dd34c-5068-4e6e-bb16-2e7917fc2a3d", "user": "root"} 2024-08-05T06:39:38.226Z INFO Password updated {"controller": "pxc-controller", "namespace": "users-9717", "name": "some-name", "reconcileID": "0d6dd34c-5068-4e6e-bb16-2e7917fc2a3d", "user": "root"} 2024-08-05T06:39:38.235Z INFO MySQL init secret updated {"controller": "pxc-controller", "namespace": "users-9717", "name": "some-name", "reconcileID": "0d6dd34c-5068-4e6e-bb16-2e7917fc2a3d", "secret": "some-name-mysql-init", "user": "root"} 2024-08-05T06:39:40.317Z DEBUG PXC users synced with ProxySQL {"controller": "pxc-controller", "namespace": "users-9717", "name": "some-name", "reconcileID": "bbecd500-cd43-4df2-8257-1354010c1bbe"} 2024-08-05T06:39:43.160Z DEBUG PXC users synced with ProxySQL {"controller": "pxc-controller", "namespace": "users-9717", "name": "some-name", "reconcileID": "0d6dd34c-5068-4e6e-bb16-2e7917fc2a3d"} 2024-08-05T06:39:43.181Z INFO Internal secrets updated {"controller": "pxc-controller", "namespace": "users-9717", "name": "some-name", "reconcileID": "0d6dd34c-5068-4e6e-bb16-2e7917fc2a3d", "user": "root"} 2024-08-05T06:39:43.219Z INFO Old password discarded {"controller": "pxc-controller", "namespace": "users-9717", "name": "some-name", "reconcileID": "0d6dd34c-5068-4e6e-bb16-2e7917fc2a3d", "user": "root"} 2024-08-05T06:39:43.231Z INFO Password changed, updating user {"controller": "pxc-controller", "namespace": "users-9717", "name": "some-name", "reconcileID": "0d6dd34c-5068-4e6e-bb16-2e7917fc2a3d", "user": "operator"} 2024-08-05T06:39:43.253Z INFO Password updated {"controller": "pxc-controller", "namespace": "users-9717", "name": "some-name", "reconcileID": "0d6dd34c-5068-4e6e-bb16-2e7917fc2a3d", "user": "operator"} 2024-08-05T06:39:43.265Z INFO MySQL init secret updated {"controller": "pxc-controller", "namespace": "users-9717", "name": "some-name", "reconcileID": "0d6dd34c-5068-4e6e-bb16-2e7917fc2a3d", "secret": "some-name-mysql-init", "user": "operator"} 2024-08-05T06:39:43.275Z INFO Internal secrets updated {"controller": "pxc-controller", "namespace": "users-9717", "name": "some-name", "reconcileID": "0d6dd34c-5068-4e6e-bb16-2e7917fc2a3d", "user": "operator"} 2024-08-05T06:39:43.299Z INFO Old password discarded {"controller": "pxc-controller", "namespace": "users-9717", "name": "some-name", "reconcileID": "0d6dd34c-5068-4e6e-bb16-2e7917fc2a3d", "user": "operator"} 2024-08-05T06:39:43.310Z INFO Password changed, updating user {"controller": "pxc-controller", "namespace": "users-9717", "name": "some-name", "reconcileID": "0d6dd34c-5068-4e6e-bb16-2e7917fc2a3d", "user": "monitor"} 2024-08-05T06:39:43.334Z INFO Password updated {"controller": "pxc-controller", "namespace": "users-9717", "name": "some-name", "reconcileID": "0d6dd34c-5068-4e6e-bb16-2e7917fc2a3d", "user": "monitor"} 2024-08-05T06:39:43.350Z INFO MySQL init secret updated {"controller": "pxc-controller", "namespace": "users-9717", "name": "some-name", "reconcileID": "0d6dd34c-5068-4e6e-bb16-2e7917fc2a3d", "secret": "some-name-mysql-init", "user": "monitor"} 2024-08-05T06:39:43.383Z INFO Proxy user updated {"controller": "pxc-controller", "namespace": "users-9717", "name": "some-name", "reconcileID": "0d6dd34c-5068-4e6e-bb16-2e7917fc2a3d", "user": "monitor"} 2024-08-05T06:39:43.395Z INFO Internal secrets updated {"controller": "pxc-controller", "namespace": "users-9717", "name": "some-name", "reconcileID": "0d6dd34c-5068-4e6e-bb16-2e7917fc2a3d", "user": "monitor"} 2024-08-05T06:39:43.488Z INFO Password changed, updating user {"controller": "pxc-controller", "namespace": "users-9717", "name": "some-name", "reconcileID": "0d6dd34c-5068-4e6e-bb16-2e7917fc2a3d", "user": "xtrabackup"} 2024-08-05T06:39:43.512Z INFO Password updated {"controller": "pxc-controller", "namespace": "users-9717", "name": "some-name", "reconcileID": "0d6dd34c-5068-4e6e-bb16-2e7917fc2a3d", "user": "xtrabackup"} 2024-08-05T06:39:43.527Z INFO MySQL init secret updated {"controller": "pxc-controller", "namespace": "users-9717", "name": "some-name", "reconcileID": "0d6dd34c-5068-4e6e-bb16-2e7917fc2a3d", "secret": "some-name-mysql-init", "user": "xtrabackup"} 2024-08-05T06:39:43.541Z INFO Internal secrets updated {"controller": "pxc-controller", "namespace": "users-9717", "name": "some-name", "reconcileID": "0d6dd34c-5068-4e6e-bb16-2e7917fc2a3d", "user": "xtrabackup"} 2024-08-05T06:39:43.563Z INFO Old password discarded {"controller": "pxc-controller", "namespace": "users-9717", "name": "some-name", "reconcileID": "0d6dd34c-5068-4e6e-bb16-2e7917fc2a3d", "user": "xtrabackup"} 2024-08-05T06:39:43.572Z INFO Password changed, updating user {"controller": "pxc-controller", "namespace": "users-9717", "name": "some-name", "reconcileID": "0d6dd34c-5068-4e6e-bb16-2e7917fc2a3d", "user": "replication"} 2024-08-05T06:39:43.594Z INFO Password updated {"controller": "pxc-controller", "namespace": "users-9717", "name": "some-name", "reconcileID": "0d6dd34c-5068-4e6e-bb16-2e7917fc2a3d", "user": "replication"} 2024-08-05T06:39:43.605Z INFO MySQL init secret updated {"controller": "pxc-controller", "namespace": "users-9717", "name": "some-name", "reconcileID": "0d6dd34c-5068-4e6e-bb16-2e7917fc2a3d", "secret": "some-name-mysql-init", "user": "replication"} 2024-08-05T06:39:43.619Z INFO Internal secrets updated {"controller": "pxc-controller", "namespace": "users-9717", "name": "some-name", "reconcileID": "0d6dd34c-5068-4e6e-bb16-2e7917fc2a3d", "user": "replication"} 2024-08-05T06:39:43.639Z INFO Old password discarded {"controller": "pxc-controller", "namespace": "users-9717", "name": "some-name", "reconcileID": "0d6dd34c-5068-4e6e-bb16-2e7917fc2a3d", "user": "replication"} 2024-08-05T06:39:43.639Z INFO Password changed, updating user {"controller": "pxc-controller", "namespace": "users-9717", "name": "some-name", "reconcileID": "0d6dd34c-5068-4e6e-bb16-2e7917fc2a3d", "user": "proxyadmin"} 2024-08-05T06:39:43.674Z INFO Proxy user updated {"controller": "pxc-controller", "namespace": "users-9717", "name": "some-name", "reconcileID": "0d6dd34c-5068-4e6e-bb16-2e7917fc2a3d", "user": "proxyadmin"} 2024-08-05T06:39:43.692Z INFO Internal secrets updated {"controller": "pxc-controller", "namespace": "users-9717", "name": "some-name", "reconcileID": "0d6dd34c-5068-4e6e-bb16-2e7917fc2a3d", "user": "proxyadmin"} 2024-08-05T06:39:43.692Z INFO Proxy pods will be restarted {"controller": "pxc-controller", "namespace": "users-9717", "name": "some-name", "reconcileID": "0d6dd34c-5068-4e6e-bb16-2e7917fc2a3d", "last-applied-secret": "91243e977fae218d1fd2146114e9c9b27fefdb8bf734080454e2c00117626141"} 2024-08-05T06:39:43.692Z INFO PXC pods will be restarted {"controller": "pxc-controller", "namespace": "users-9717", "name": "some-name", "reconcileID": "0d6dd34c-5068-4e6e-bb16-2e7917fc2a3d", "last-applied-secret": "91243e977fae218d1fd2146114e9c9b27fefdb8bf734080454e2c00117626141"} 2024-08-05T06:39:43.724Z DEBUG Updating object {"controller": "pxc-controller", "namespace": "users-9717", "name": "some-name", "reconcileID": "0d6dd34c-5068-4e6e-bb16-2e7917fc2a3d", "object": "some-name-pxc"} 2024-08-05T06:39:43.763Z DEBUG Updating object {"controller": "pxc-controller", "namespace": "users-9717", "name": "some-name", "reconcileID": "0d6dd34c-5068-4e6e-bb16-2e7917fc2a3d", "object": "some-name-proxysql"} 2024-08-05T06:39:43.950Z ERROR sync users {"controller": "pxc-controller", "namespace": "users-9717", "name": "some-name", "reconcileID": "0d6dd34c-5068-4e6e-bb16-2e7917fc2a3d", "error": "exec syncusers: command terminated with exit code 1 / / ERROR 1045 (28000): ProxySQL Error: Access denied for user 'proxyadmin'@'127.0.0.1' (using password: YES)\nERROR (line:335) : ProxySQL connection check failed. \n-- Could not connect to ProxySQL at localhost:6032 \n-- Please check the ProxySQL connection parameters and status.\n", "errorVerbose": "exec syncusers: command terminated with exit code 1 / / ERROR 1045 (28000): ProxySQL Error: Access denied for user 'proxyadmin'@'127.0.0.1' (using password: YES)\nERROR (line:335) : ProxySQL connection check failed. \n-- Could not connect to ProxySQL at localhost:6032 \n-- Please check the ProxySQL connection parameters and status.\n\ngithub.com/percona/percona-xtradb-cluster-operator/pkg/controller/pxc.(*ReconcilePerconaXtraDBCluster).syncPXCUsersWithProxySQL\n\t/go/src/github.com/percona/percona-xtradb-cluster-operator/pkg/controller/pxc/users.go:922\ngithub.com/percona/percona-xtradb-cluster-operator/pkg/controller/pxc.(*ReconcilePerconaXtraDBCluster).resyncPXCUsersWithProxySQL.func1\n\t/go/src/github.com/percona/percona-xtradb-cluster-operator/pkg/controller/pxc/controller.go:1240\nruntime.goexit\n\t/usr/local/go/src/runtime/asm_amd64.s:1695"} 2024-08-05T06:40:41.241Z INFO reconcile replication error {"controller": "pxc-controller", "namespace": "users-9717", "name": "some-name", "reconcileID": "2a778aaa-40ee-48da-8e64-7ae17f410c24", "err": "failed to ensure cluster readonly status: connect to pod some-name-pxc-1: dial tcp: lookup some-name-pxc-1.some-name-pxc.users-9717 on 10.47.64.10:53: no such host"} 2024-08-05T06:41:28.214Z INFO reconcile replication error {"controller": "pxc-controller", "namespace": "users-9717", "name": "some-name", "reconcileID": "ecb73026-a08f-4048-9280-3fee06001aa1", "err": "failed to ensure cluster readonly status: connect to pod some-name-pxc-0: dial tcp: lookup some-name-pxc-0.some-name-pxc.users-9717 on 10.47.64.10:53: no such host"} 2024-08-05T06:41:33.396Z INFO reconcile replication error {"controller": "pxc-controller", "namespace": "users-9717", "name": "some-name", "reconcileID": "cdf3c084-bc55-4918-aac5-a0314aaddda8", "err": "failed to connect to pod some-name-pxc-0: dial tcp: lookup some-name-pxc-0.some-name-pxc.users-9717 on 10.47.64.10:53: no such host"} 2024-08-05T06:41:38.522Z INFO Unable to find primary pod for replication. No pod with name or ip like this {"controller": "pxc-controller", "namespace": "users-9717", "name": "some-name", "reconcileID": "eef616be-833d-4949-8e13-40f34f1e7174", "primary name": "some-name-pxc-0.some-name-pxc.users-9717.svc.cluster.local"} 2024-08-05T06:41:43.653Z INFO Unable to find primary pod for replication. No pod with name or ip like this {"controller": "pxc-controller", "namespace": "users-9717", "name": "some-name", "reconcileID": "332d0f57-89a0-4420-b500-f77a4fc61697", "primary name": "some-name-pxc-0.some-name-pxc.users-9717.svc.cluster.local"} 2024-08-05T06:41:48.792Z INFO Unable to find primary pod for replication. No pod with name or ip like this {"controller": "pxc-controller", "namespace": "users-9717", "name": "some-name", "reconcileID": "9b453b87-9e4d-46ab-86ec-e34d30a115e8", "primary name": "some-name-pxc-0.some-name-pxc.users-9717.svc.cluster.local"} 2024-08-05T06:41:53.888Z INFO Unable to find primary pod for replication. No pod with name or ip like this {"controller": "pxc-controller", "namespace": "users-9717", "name": "some-name", "reconcileID": "06a02595-8463-4f6b-8fa6-c035e9b79538", "primary name": "some-name-pxc-0.some-name-pxc.users-9717.svc.cluster.local"} 2024-08-05T06:41:59.042Z INFO Unable to find primary pod for replication. No pod with name or ip like this {"controller": "pxc-controller", "namespace": "users-9717", "name": "some-name", "reconcileID": "4aef7a1c-0ed8-490f-bf5e-745af4ab7614", "primary name": "some-name-pxc-0.some-name-pxc.users-9717.svc.cluster.local"} 2024-08-05T06:42:09.505Z INFO Password updated but old one not discarded {"controller": "pxc-controller", "namespace": "users-9717", "name": "some-name", "reconcileID": "20f9b4c1-2c89-4e74-8fa8-c5185bf82cce", "user": "monitor"} 2024-08-05T06:42:09.778Z INFO Old password discarded {"controller": "pxc-controller", "namespace": "users-9717", "name": "some-name", "reconcileID": "20f9b4c1-2c89-4e74-8fa8-c5185bf82cce", "user": "monitor"} 2024-08-05T06:42:09.803Z INFO Proxy pods will be restarted {"controller": "pxc-controller", "namespace": "users-9717", "name": "some-name", "reconcileID": "20f9b4c1-2c89-4e74-8fa8-c5185bf82cce", "last-applied-secret": "91243e977fae218d1fd2146114e9c9b27fefdb8bf734080454e2c00117626141"} 2024-08-05T06:42:13.653Z DEBUG PXC users synced with ProxySQL {"controller": "pxc-controller", "namespace": "users-9717", "name": "some-name", "reconcileID": "20f9b4c1-2c89-4e74-8fa8-c5185bf82cce"} 2024-08-05T06:42:17.749Z DEBUG PXC users synced with ProxySQL {"controller": "pxc-controller", "namespace": "users-9717", "name": "some-name", "reconcileID": "05af2a23-6574-4154-9b13-d8001717858e"} 2024-08-05T06:42:23.072Z DEBUG PXC users synced with ProxySQL {"controller": "pxc-controller", "namespace": "users-9717", "name": "some-name", "reconcileID": "17cbc6a3-bd5c-47b6-b94a-1020ea086a92"} 2024-08-05T06:42:25.114Z INFO Password changed, updating user {"controller": "pxc-controller", "namespace": "users-9717", "name": "some-name", "reconcileID": "44963cee-98b4-46cb-ae45-8607a82c12bc", "user": "operator"} 2024-08-05T06:42:25.138Z INFO Password updated {"controller": "pxc-controller", "namespace": "users-9717", "name": "some-name", "reconcileID": "44963cee-98b4-46cb-ae45-8607a82c12bc", "user": "operator"} 2024-08-05T06:42:25.153Z INFO MySQL init secret updated {"controller": "pxc-controller", "namespace": "users-9717", "name": "some-name", "reconcileID": "44963cee-98b4-46cb-ae45-8607a82c12bc", "secret": "some-name-mysql-init", "user": "operator"} 2024-08-05T06:42:25.162Z INFO Internal secrets updated {"controller": "pxc-controller", "namespace": "users-9717", "name": "some-name", "reconcileID": "44963cee-98b4-46cb-ae45-8607a82c12bc", "user": "operator"} 2024-08-05T06:42:25.185Z INFO Old password discarded {"controller": "pxc-controller", "namespace": "users-9717", "name": "some-name", "reconcileID": "44963cee-98b4-46cb-ae45-8607a82c12bc", "user": "operator"} 2024-08-05T06:42:25.218Z INFO Proxy pods will be restarted {"controller": "pxc-controller", "namespace": "users-9717", "name": "some-name", "reconcileID": "44963cee-98b4-46cb-ae45-8607a82c12bc", "last-applied-secret": "9ccdd5922e5cc17de97908dfce467a3da781ba75ee01e22c79aafb95fce549a4"} 2024-08-05T06:42:25.310Z DEBUG Updating object {"controller": "pxc-controller", "namespace": "users-9717", "name": "some-name", "reconcileID": "44963cee-98b4-46cb-ae45-8607a82c12bc", "object": "some-name-proxysql"} 2024-08-05T06:42:26.409Z ERROR sync users {"controller": "pxc-controller", "namespace": "users-9717", "name": "some-name", "reconcileID": "44963cee-98b4-46cb-ae45-8607a82c12bc", "error": "exec syncusers: command terminated with exit code 1 / / ERROR 1045 (28000): Access denied for user 'operator'@'some-name-proxysql-0.some-name-proxysql-unready.users-9717.svc.c' (using password: YES)\nERROR 1045 (28000): Access denied for user 'operator'@'some-name-proxysql-0.some-name-proxysql-unready.users-9717.svc.c' (using password: YES)\nERROR 1045 (28000): Access denied for user 'operator'@'some-name-proxysql-0.some-name-proxysql-unready.users-9717.svc.c' (using password: YES)\nERROR (line:569) : Could not find a primary cluster node\n", "errorVerbose": "exec syncusers: command terminated with exit code 1 / / ERROR 1045 (28000): Access denied for user 'operator'@'some-name-proxysql-0.some-name-proxysql-unready.users-9717.svc.c' (using password: YES)\nERROR 1045 (28000): Access denied for user 'operator'@'some-name-proxysql-0.some-name-proxysql-unready.users-9717.svc.c' (using password: YES)\nERROR 1045 (28000): Access denied for user 'operator'@'some-name-proxysql-0.some-name-proxysql-unready.users-9717.svc.c' (using password: YES)\nERROR (line:569) : Could not find a primary cluster node\n\ngithub.com/percona/percona-xtradb-cluster-operator/pkg/controller/pxc.(*ReconcilePerconaXtraDBCluster).syncPXCUsersWithProxySQL\n\t/go/src/github.com/percona/percona-xtradb-cluster-operator/pkg/controller/pxc/users.go:922\ngithub.com/percona/percona-xtradb-cluster-operator/pkg/controller/pxc.(*ReconcilePerconaXtraDBCluster).resyncPXCUsersWithProxySQL.func1\n\t/go/src/github.com/percona/percona-xtradb-cluster-operator/pkg/controller/pxc/controller.go:1240\nruntime.goexit\n\t/usr/local/go/src/runtime/asm_amd64.s:1695"} 2024-08-05T06:43:00.975Z DEBUG PXC users synced with ProxySQL {"controller": "pxc-controller", "namespace": "users-9717", "name": "some-name", "reconcileID": "df6ad6d6-bdb3-4c3d-89ab-805a8f936a06"} 2024-08-05T06:43:04.165Z DEBUG PXC users synced with ProxySQL {"controller": "pxc-controller", "namespace": "users-9717", "name": "some-name", "reconcileID": "7777a127-edef-4fdd-b95d-cc13c9033841"} 2024-08-05T06:43:09.926Z DEBUG PXC users synced with ProxySQL {"controller": "pxc-controller", "namespace": "users-9717", "name": "some-name", "reconcileID": "9a43233b-7ffd-47cf-a05a-ac816f3672e8"} 2024-08-05T06:43:15.321Z DEBUG PXC users synced with ProxySQL {"controller": "pxc-controller", "namespace": "users-9717", "name": "some-name", "reconcileID": "f257a27a-c893-48d0-9bc1-07827c7628a7"} 2024-08-05T06:43:20.634Z DEBUG PXC users synced with ProxySQL {"controller": "pxc-controller", "namespace": "users-9717", "name": "some-name", "reconcileID": "0d5b80d4-ee2c-4fcf-925f-7bb6c4f31eec"} 2024-08-05T06:43:25.849Z DEBUG PXC users synced with ProxySQL {"controller": "pxc-controller", "namespace": "users-9717", "name": "some-name", "reconcileID": "3e15b8c2-3663-4e79-955a-c51e916d9855"} 2024-08-05T06:43:31.223Z DEBUG PXC users synced with ProxySQL {"controller": "pxc-controller", "namespace": "users-9717", "name": "some-name", "reconcileID": "d8897927-0c57-4e8d-9e9e-80747eca7f89"} 2024-08-05T06:43:38.244Z DEBUG PXC users synced with ProxySQL {"controller": "pxc-controller", "namespace": "users-9717", "name": "some-name", "reconcileID": "ce8eee8a-3d9c-41d2-982d-4dd9ecfc0383"} 2024-08-05T06:43:41.874Z DEBUG PXC users synced with ProxySQL {"controller": "pxc-controller", "namespace": "users-9717", "name": "some-name", "reconcileID": "c4b74b8a-5fd0-43ff-9949-d9bd88982685"} 2024-08-05T06:43:47.120Z DEBUG PXC users synced with ProxySQL {"controller": "pxc-controller", "namespace": "users-9717", "name": "some-name", "reconcileID": "c4bf26d7-afcf-4fd4-83d6-a2dc0809c3f7"} 2024-08-05T06:43:52.347Z DEBUG PXC users synced with ProxySQL {"controller": "pxc-controller", "namespace": "users-9717", "name": "some-name", "reconcileID": "947b7a6e-723f-4b85-b21b-b6ca524af6e9"} 2024-08-05T06:43:57.656Z DEBUG PXC users synced with ProxySQL {"controller": "pxc-controller", "namespace": "users-9717", "name": "some-name", "reconcileID": "eda66667-9b3e-46af-bf71-93c0f41f740e"} 2024-08-05T06:44:02.861Z DEBUG PXC users synced with ProxySQL {"controller": "pxc-controller", "namespace": "users-9717", "name": "some-name", "reconcileID": "34e21fed-4f90-4244-a7a1-d868ea106188"} 2024-08-05T06:44:08.234Z DEBUG PXC users synced with ProxySQL {"controller": "pxc-controller", "namespace": "users-9717", "name": "some-name", "reconcileID": "428b86bd-ede1-4ac0-a4a2-a7205036dbd1"} 2024-08-05T06:44:13.832Z DEBUG PXC users synced with ProxySQL {"controller": "pxc-controller", "namespace": "users-9717", "name": "some-name", "reconcileID": "6fa5900e-ca6d-4707-b1b9-283f6c98e414"} 2024-08-05T06:44:18.920Z DEBUG PXC users synced with ProxySQL {"controller": "pxc-controller", "namespace": "users-9717", "name": "some-name", "reconcileID": "881d11a7-17d4-4336-9c26-c0788426ad45"} 2024-08-05T06:44:20.822Z INFO Password changed, updating user {"controller": "pxc-controller", "namespace": "users-9717", "name": "some-name", "reconcileID": "1555dcde-1484-4313-8b17-ad46b9c43f42", "user": "root"} 2024-08-05T06:44:20.859Z INFO Password updated {"controller": "pxc-controller", "namespace": "users-9717", "name": "some-name", "reconcileID": "1555dcde-1484-4313-8b17-ad46b9c43f42", "user": "root"} 2024-08-05T06:44:20.870Z INFO MySQL init secret updated {"controller": "pxc-controller", "namespace": "users-9717", "name": "some-name", "reconcileID": "1555dcde-1484-4313-8b17-ad46b9c43f42", "secret": "some-name-mysql-init", "user": "root"} 2024-08-05T06:44:25.895Z DEBUG PXC users synced with ProxySQL {"controller": "pxc-controller", "namespace": "users-9717", "name": "some-name", "reconcileID": "1555dcde-1484-4313-8b17-ad46b9c43f42"} 2024-08-05T06:44:25.906Z INFO Internal secrets updated {"controller": "pxc-controller", "namespace": "users-9717", "name": "some-name", "reconcileID": "1555dcde-1484-4313-8b17-ad46b9c43f42", "user": "root"} 2024-08-05T06:44:25.943Z INFO Old password discarded {"controller": "pxc-controller", "namespace": "users-9717", "name": "some-name", "reconcileID": "1555dcde-1484-4313-8b17-ad46b9c43f42", "user": "root"} 2024-08-05T06:44:25.968Z INFO Password changed, updating user {"controller": "pxc-controller", "namespace": "users-9717", "name": "some-name", "reconcileID": "1555dcde-1484-4313-8b17-ad46b9c43f42", "user": "monitor"} 2024-08-05T06:44:25.991Z INFO Password updated {"controller": "pxc-controller", "namespace": "users-9717", "name": "some-name", "reconcileID": "1555dcde-1484-4313-8b17-ad46b9c43f42", "user": "monitor"} 2024-08-05T06:44:26.000Z INFO MySQL init secret updated {"controller": "pxc-controller", "namespace": "users-9717", "name": "some-name", "reconcileID": "1555dcde-1484-4313-8b17-ad46b9c43f42", "secret": "some-name-mysql-init", "user": "monitor"} 2024-08-05T06:44:26.039Z INFO Proxy user updated {"controller": "pxc-controller", "namespace": "users-9717", "name": "some-name", "reconcileID": "1555dcde-1484-4313-8b17-ad46b9c43f42", "user": "monitor"} 2024-08-05T06:44:26.048Z INFO Internal secrets updated {"controller": "pxc-controller", "namespace": "users-9717", "name": "some-name", "reconcileID": "1555dcde-1484-4313-8b17-ad46b9c43f42", "user": "monitor"} 2024-08-05T06:44:26.134Z INFO Password changed, updating user {"controller": "pxc-controller", "namespace": "users-9717", "name": "some-name", "reconcileID": "1555dcde-1484-4313-8b17-ad46b9c43f42", "user": "xtrabackup"} 2024-08-05T06:44:26.160Z INFO Password updated {"controller": "pxc-controller", "namespace": "users-9717", "name": "some-name", "reconcileID": "1555dcde-1484-4313-8b17-ad46b9c43f42", "user": "xtrabackup"} 2024-08-05T06:44:26.170Z INFO MySQL init secret updated {"controller": "pxc-controller", "namespace": "users-9717", "name": "some-name", "reconcileID": "1555dcde-1484-4313-8b17-ad46b9c43f42", "secret": "some-name-mysql-init", "user": "xtrabackup"} 2024-08-05T06:44:26.179Z INFO Internal secrets updated {"controller": "pxc-controller", "namespace": "users-9717", "name": "some-name", "reconcileID": "1555dcde-1484-4313-8b17-ad46b9c43f42", "user": "xtrabackup"} 2024-08-05T06:44:26.231Z INFO Old password discarded {"controller": "pxc-controller", "namespace": "users-9717", "name": "some-name", "reconcileID": "1555dcde-1484-4313-8b17-ad46b9c43f42", "user": "xtrabackup"} 2024-08-05T06:44:26.241Z INFO Password changed, updating user {"controller": "pxc-controller", "namespace": "users-9717", "name": "some-name", "reconcileID": "1555dcde-1484-4313-8b17-ad46b9c43f42", "user": "proxyadmin"} 2024-08-05T06:44:26.279Z INFO Proxy user updated {"controller": "pxc-controller", "namespace": "users-9717", "name": "some-name", "reconcileID": "1555dcde-1484-4313-8b17-ad46b9c43f42", "user": "proxyadmin"} 2024-08-05T06:44:26.523Z INFO Internal secrets updated {"controller": "pxc-controller", "namespace": "users-9717", "name": "some-name", "reconcileID": "1555dcde-1484-4313-8b17-ad46b9c43f42", "user": "proxyadmin"} 2024-08-05T06:44:26.523Z INFO Proxy pods will be restarted {"controller": "pxc-controller", "namespace": "users-9717", "name": "some-name", "reconcileID": "1555dcde-1484-4313-8b17-ad46b9c43f42", "last-applied-secret": "3ccfb96fd93227c94872c7eb8110f99601ae11463383523279d9a2a71a84c1f4"} 2024-08-05T06:44:26.523Z INFO PXC pods will be restarted {"controller": "pxc-controller", "namespace": "users-9717", "name": "some-name", "reconcileID": "1555dcde-1484-4313-8b17-ad46b9c43f42", "last-applied-secret": "3ccfb96fd93227c94872c7eb8110f99601ae11463383523279d9a2a71a84c1f4"} 2024-08-05T06:44:26.617Z DEBUG Updating object {"controller": "pxc-controller", "namespace": "users-9717", "name": "some-name", "reconcileID": "1555dcde-1484-4313-8b17-ad46b9c43f42", "object": "some-name-pxc"} 2024-08-05T06:44:26.747Z DEBUG Updating object {"controller": "pxc-controller", "namespace": "users-9717", "name": "some-name", "reconcileID": "1555dcde-1484-4313-8b17-ad46b9c43f42", "object": "some-name-proxysql"} 2024-08-05T06:44:27.190Z ERROR sync users {"controller": "pxc-controller", "namespace": "users-9717", "name": "some-name", "reconcileID": "1555dcde-1484-4313-8b17-ad46b9c43f42", "error": "exec syncusers: command terminated with exit code 1 / / ERROR 1045 (28000): ProxySQL Error: Access denied for user 'proxyadmin'@'127.0.0.1' (using password: YES)\nERROR (line:335) : ProxySQL connection check failed. \n-- Could not connect to ProxySQL at localhost:6032 \n-- Please check the ProxySQL connection parameters and status.\n", "errorVerbose": "exec syncusers: command terminated with exit code 1 / / ERROR 1045 (28000): ProxySQL Error: Access denied for user 'proxyadmin'@'127.0.0.1' (using password: YES)\nERROR (line:335) : ProxySQL connection check failed. \n-- Could not connect to ProxySQL at localhost:6032 \n-- Please check the ProxySQL connection parameters and status.\n\ngithub.com/percona/percona-xtradb-cluster-operator/pkg/controller/pxc.(*ReconcilePerconaXtraDBCluster).syncPXCUsersWithProxySQL\n\t/go/src/github.com/percona/percona-xtradb-cluster-operator/pkg/controller/pxc/users.go:922\ngithub.com/percona/percona-xtradb-cluster-operator/pkg/controller/pxc.(*ReconcilePerconaXtraDBCluster).resyncPXCUsersWithProxySQL.func1\n\t/go/src/github.com/percona/percona-xtradb-cluster-operator/pkg/controller/pxc/controller.go:1240\nruntime.goexit\n\t/usr/local/go/src/runtime/asm_amd64.s:1695"} 2024-08-05T06:45:14.139Z INFO reconcile replication error {"controller": "pxc-controller", "namespace": "users-9717", "name": "some-name", "reconcileID": "36d1d18b-23fe-406a-bfdf-c5953191dd0c", "err": "failed to ensure cluster readonly status: connect to pod some-name-pxc-1: dial tcp: lookup some-name-pxc-1.some-name-pxc.users-9717 on 10.47.64.10:53: no such host"} 2024-08-05T06:45:24.342Z INFO reconcile replication error {"controller": "pxc-controller", "namespace": "users-9717", "name": "some-name", "reconcileID": "fe864f3b-4db3-49a7-a557-68b681221739", "err": "failed to ensure cluster readonly status: connect to pod some-name-pxc-1: dial tcp: lookup some-name-pxc-1.some-name-pxc.users-9717 on 10.47.64.10:53: no such host"} 2024-08-05T06:46:21.179Z INFO reconcile replication error {"controller": "pxc-controller", "namespace": "users-9717", "name": "some-name", "reconcileID": "c3c102de-bc33-4595-a8b0-17d9979c8bc7", "err": "failed to connect to pod some-name-pxc-0: dial tcp 10.179.16.38:33062: connect: connection refused"} 2024-08-05T06:46:26.801Z INFO Unable to find primary pod for replication. No pod with name or ip like this {"controller": "pxc-controller", "namespace": "users-9717", "name": "some-name", "reconcileID": "2204594f-e542-4cc2-97e8-9f32d038aceb", "primary name": "some-name-pxc-0.some-name-pxc.users-9717.svc.cluster.local"} 2024-08-05T06:46:32.343Z INFO Unable to find primary pod for replication. No pod with name or ip like this {"controller": "pxc-controller", "namespace": "users-9717", "name": "some-name", "reconcileID": "3a0c659f-f3fb-4eaf-bcbb-eb8c2c7525e6", "primary name": "some-name-pxc-0.some-name-pxc.users-9717.svc.cluster.local"} 2024-08-05T06:46:37.450Z INFO Unable to find primary pod for replication. No pod with name or ip like this {"controller": "pxc-controller", "namespace": "users-9717", "name": "some-name", "reconcileID": "053ad8e7-b2f9-4fd7-9e0b-acc3022c91de", "primary name": "some-name-pxc-0.some-name-pxc.users-9717.svc.cluster.local"} 2024-08-05T06:46:51.006Z DEBUG Updating object {"controller": "pxc-controller", "namespace": "users-9717", "name": "some-name", "reconcileID": "3b5ad681-f34a-4b43-aab1-23217e42bd84", "object": "some-name-pxc"} 2024-08-05T06:46:51.068Z DEBUG Creating object {"controller": "pxc-controller", "namespace": "users-9717", "name": "some-name", "reconcileID": "3b5ad681-f34a-4b43-aab1-23217e42bd84", "object": "some-name-haproxy"} 2024-08-05T06:46:51.136Z DEBUG Updating object {"controller": "pxc-controller", "namespace": "users-9717", "name": "some-name", "reconcileID": "3b5ad681-f34a-4b43-aab1-23217e42bd84", "object": "some-name-haproxy"} 2024-08-05T06:46:51.234Z DEBUG Creating object {"controller": "pxc-controller", "namespace": "users-9717", "name": "some-name", "reconcileID": "3b5ad681-f34a-4b43-aab1-23217e42bd84", "object": "some-name-haproxy"} 2024-08-05T06:46:51.401Z DEBUG Creating object {"controller": "pxc-controller", "namespace": "users-9717", "name": "some-name", "reconcileID": "3b5ad681-f34a-4b43-aab1-23217e42bd84", "object": "some-name-haproxy-replicas"} 2024-08-05T06:46:51.494Z INFO KubeAPIWarningLogger .metadata.ownerReferences contains duplicate entries; API server dedups owner references in 1.20+, and may reject such requests as early as 1.24; please fix your requests; duplicate UID(s) observed: 7fd48c20-3f5a-4a64-bbaa-250df27bbcfe 2024-08-05T06:46:54.843Z INFO reconcile replication error {"controller": "pxc-controller", "namespace": "users-9717", "name": "some-name", "reconcileID": "3b5ad681-f34a-4b43-aab1-23217e42bd84", "err": "get primary pxc pod: failed to get proxy connection: dial tcp 10.47.70.95:3306: connect: connection refused"} 2024-08-05T06:46:57.979Z INFO reconcile replication error {"controller": "pxc-controller", "namespace": "users-9717", "name": "some-name", "reconcileID": "ccc15f27-0c36-445c-a558-5b337bbe16d1", "err": "get primary pxc pod: failed to get proxy connection: dial tcp 10.47.70.95:3306: connect: connection refused"} 2024-08-05T06:47:41.275Z INFO reconcile replication error {"controller": "pxc-controller", "namespace": "users-9717", "name": "some-name", "reconcileID": "f95bc4ac-f98e-4e1c-8137-a034aad59d5e", "err": "failed to ensure cluster readonly status: connect to pod some-name-pxc-1: dial tcp: lookup some-name-pxc-1.some-name-pxc.users-9717 on 10.47.64.10:53: no such host"} 2024-08-05T06:47:46.143Z INFO reconcile replication error {"controller": "pxc-controller", "namespace": "users-9717", "name": "some-name", "reconcileID": "29cda7e4-2859-442f-a8fa-fd9fa770f829", "err": "failed to ensure cluster readonly status: connect to pod some-name-pxc-1: dial tcp: lookup some-name-pxc-1.some-name-pxc.users-9717 on 10.47.64.10:53: no such host"} 2024-08-05T06:47:46.314Z INFO reconcile replication error {"controller": "pxc-controller", "namespace": "users-9717", "name": "some-name", "reconcileID": "d4395174-35cb-4467-9555-5e7d01c00a7a", "err": "failed to ensure cluster readonly status: connect to pod some-name-pxc-1: dial tcp: lookup some-name-pxc-1.some-name-pxc.users-9717 on 10.47.64.10:53: no such host"} 2024-08-05T06:47:51.415Z INFO reconcile replication error {"controller": "pxc-controller", "namespace": "users-9717", "name": "some-name", "reconcileID": "4266e74b-4e3e-4ef9-855c-82f37c93e9d4", "err": "failed to ensure cluster readonly status: connect to pod some-name-pxc-1: dial tcp 10.179.17.29:33062: connect: connection refused"} 2024-08-05T06:49:20.073Z INFO reconcile replication error {"controller": "pxc-controller", "namespace": "users-9717", "name": "some-name", "reconcileID": "ce4d0180-d7b2-4238-99bd-f9eb3bfed27d", "err": "failed to ensure cluster readonly status: connect to pod some-name-pxc-0: dial tcp: lookup some-name-pxc-0.some-name-pxc.users-9717 on 10.47.64.10:53: no such host"} 2024-08-05T06:49:20.248Z INFO Password changed, updating user {"controller": "pxc-controller", "namespace": "users-9717", "name": "some-name", "reconcileID": "3bb90c63-9080-47fc-8b1b-26c2165b7b0e", "user": "root"} 2024-08-05T06:49:20.282Z INFO Password updated {"controller": "pxc-controller", "namespace": "users-9717", "name": "some-name", "reconcileID": "3bb90c63-9080-47fc-8b1b-26c2165b7b0e", "user": "root"} 2024-08-05T06:49:20.289Z INFO MySQL init secret updated {"controller": "pxc-controller", "namespace": "users-9717", "name": "some-name", "reconcileID": "3bb90c63-9080-47fc-8b1b-26c2165b7b0e", "secret": "some-name-mysql-init", "user": "root"} 2024-08-05T06:49:20.297Z INFO Internal secrets updated {"controller": "pxc-controller", "namespace": "users-9717", "name": "some-name", "reconcileID": "3bb90c63-9080-47fc-8b1b-26c2165b7b0e", "user": "root"} 2024-08-05T06:49:20.338Z INFO Old password discarded {"controller": "pxc-controller", "namespace": "users-9717", "name": "some-name", "reconcileID": "3bb90c63-9080-47fc-8b1b-26c2165b7b0e", "user": "root"} 2024-08-05T06:49:20.348Z INFO Password changed, updating user {"controller": "pxc-controller", "namespace": "users-9717", "name": "some-name", "reconcileID": "3bb90c63-9080-47fc-8b1b-26c2165b7b0e", "user": "operator"} 2024-08-05T06:49:20.369Z INFO Password updated {"controller": "pxc-controller", "namespace": "users-9717", "name": "some-name", "reconcileID": "3bb90c63-9080-47fc-8b1b-26c2165b7b0e", "user": "operator"} 2024-08-05T06:49:20.379Z INFO MySQL init secret updated {"controller": "pxc-controller", "namespace": "users-9717", "name": "some-name", "reconcileID": "3bb90c63-9080-47fc-8b1b-26c2165b7b0e", "secret": "some-name-mysql-init", "user": "operator"} 2024-08-05T06:49:20.389Z INFO Internal secrets updated {"controller": "pxc-controller", "namespace": "users-9717", "name": "some-name", "reconcileID": "3bb90c63-9080-47fc-8b1b-26c2165b7b0e", "user": "operator"} 2024-08-05T06:49:20.430Z INFO Old password discarded {"controller": "pxc-controller", "namespace": "users-9717", "name": "some-name", "reconcileID": "3bb90c63-9080-47fc-8b1b-26c2165b7b0e", "user": "operator"} 2024-08-05T06:49:20.439Z INFO Password changed, updating user {"controller": "pxc-controller", "namespace": "users-9717", "name": "some-name", "reconcileID": "3bb90c63-9080-47fc-8b1b-26c2165b7b0e", "user": "monitor"} 2024-08-05T06:49:20.462Z INFO Password updated {"controller": "pxc-controller", "namespace": "users-9717", "name": "some-name", "reconcileID": "3bb90c63-9080-47fc-8b1b-26c2165b7b0e", "user": "monitor"} 2024-08-05T06:49:20.472Z INFO MySQL init secret updated {"controller": "pxc-controller", "namespace": "users-9717", "name": "some-name", "reconcileID": "3bb90c63-9080-47fc-8b1b-26c2165b7b0e", "secret": "some-name-mysql-init", "user": "monitor"} 2024-08-05T06:49:20.480Z INFO Internal secrets updated {"controller": "pxc-controller", "namespace": "users-9717", "name": "some-name", "reconcileID": "3bb90c63-9080-47fc-8b1b-26c2165b7b0e", "user": "monitor"} 2024-08-05T06:49:20.575Z INFO Password changed, updating user {"controller": "pxc-controller", "namespace": "users-9717", "name": "some-name", "reconcileID": "3bb90c63-9080-47fc-8b1b-26c2165b7b0e", "user": "xtrabackup"} 2024-08-05T06:49:20.596Z INFO Password updated {"controller": "pxc-controller", "namespace": "users-9717", "name": "some-name", "reconcileID": "3bb90c63-9080-47fc-8b1b-26c2165b7b0e", "user": "xtrabackup"} 2024-08-05T06:49:20.606Z INFO MySQL init secret updated {"controller": "pxc-controller", "namespace": "users-9717", "name": "some-name", "reconcileID": "3bb90c63-9080-47fc-8b1b-26c2165b7b0e", "secret": "some-name-mysql-init", "user": "xtrabackup"} 2024-08-05T06:49:20.613Z INFO Internal secrets updated {"controller": "pxc-controller", "namespace": "users-9717", "name": "some-name", "reconcileID": "3bb90c63-9080-47fc-8b1b-26c2165b7b0e", "user": "xtrabackup"} 2024-08-05T06:49:20.635Z INFO Old password discarded {"controller": "pxc-controller", "namespace": "users-9717", "name": "some-name", "reconcileID": "3bb90c63-9080-47fc-8b1b-26c2165b7b0e", "user": "xtrabackup"} 2024-08-05T06:49:20.643Z INFO Password changed, updating user {"controller": "pxc-controller", "namespace": "users-9717", "name": "some-name", "reconcileID": "3bb90c63-9080-47fc-8b1b-26c2165b7b0e", "user": "replication"} 2024-08-05T06:49:20.664Z INFO Password updated {"controller": "pxc-controller", "namespace": "users-9717", "name": "some-name", "reconcileID": "3bb90c63-9080-47fc-8b1b-26c2165b7b0e", "user": "replication"} 2024-08-05T06:49:20.675Z INFO MySQL init secret updated {"controller": "pxc-controller", "namespace": "users-9717", "name": "some-name", "reconcileID": "3bb90c63-9080-47fc-8b1b-26c2165b7b0e", "secret": "some-name-mysql-init", "user": "replication"} 2024-08-05T06:49:20.684Z INFO Internal secrets updated {"controller": "pxc-controller", "namespace": "users-9717", "name": "some-name", "reconcileID": "3bb90c63-9080-47fc-8b1b-26c2165b7b0e", "user": "replication"} 2024-08-05T06:49:20.705Z INFO Old password discarded {"controller": "pxc-controller", "namespace": "users-9717", "name": "some-name", "reconcileID": "3bb90c63-9080-47fc-8b1b-26c2165b7b0e", "user": "replication"} 2024-08-05T06:49:20.705Z INFO PXC pods will be restarted {"controller": "pxc-controller", "namespace": "users-9717", "name": "some-name", "reconcileID": "3bb90c63-9080-47fc-8b1b-26c2165b7b0e", "last-applied-secret": "6542c6f8a2375ddfcda319bf91c99ca69519730971c577cd07366e356671ee68"} 2024-08-05T06:49:20.725Z DEBUG Updating object {"controller": "pxc-controller", "namespace": "users-9717", "name": "some-name", "reconcileID": "3bb90c63-9080-47fc-8b1b-26c2165b7b0e", "object": "some-name-pxc"} 2024-08-05T06:50:06.447Z INFO reconcile replication error {"controller": "pxc-controller", "namespace": "users-9717", "name": "some-name", "reconcileID": "9763e38c-cfe7-4b4a-8fa8-461482767634", "err": "failed to ensure cluster readonly status: connect to pod some-name-pxc-1: dial tcp: lookup some-name-pxc-1.some-name-pxc.users-9717 on 10.47.64.10:53: no such host"} 2024-08-05T06:50:11.780Z INFO reconcile replication error {"controller": "pxc-controller", "namespace": "users-9717", "name": "some-name", "reconcileID": "fcb68797-01d5-4a73-ae73-8ce32d530819", "err": "failed to ensure cluster readonly status: connect to pod some-name-pxc-1: dial tcp: lookup some-name-pxc-1.some-name-pxc.users-9717 on 10.47.64.10:53: no such host"} 2024-08-05T06:50:53.606Z INFO reconcile replication error {"controller": "pxc-controller", "namespace": "users-9717", "name": "some-name", "reconcileID": "080edf15-f89f-48db-a3b3-ba957ce6c66b", "err": "failed to connect to pod some-name-pxc-0: dial tcp: lookup some-name-pxc-0.some-name-pxc.users-9717 on 10.47.64.10:53: no such host"} 2024-08-05T06:51:09.790Z INFO reconcile replication error {"controller": "pxc-controller", "namespace": "users-9717", "name": "some-name", "reconcileID": "b0b87d5c-6602-4561-8cbd-2dd8c662baeb", "err": "failed to ensure cluster readonly status: connect to pod some-name-pxc-0: dial tcp: lookup some-name-pxc-0.some-name-pxc.users-9717 on 10.47.64.10:53: no such host"} 2024-08-05T06:51:35.797Z INFO Password updated but old one not discarded {"controller": "pxc-controller", "namespace": "users-9717", "name": "some-name", "reconcileID": "3872b0b0-30e2-4e60-ad4f-60ad491973ca", "user": "monitor"} 2024-08-05T06:51:36.062Z INFO Old password discarded {"controller": "pxc-controller", "namespace": "users-9717", "name": "some-name", "reconcileID": "3872b0b0-30e2-4e60-ad4f-60ad491973ca", "user": "monitor"} 2024-08-05T06:51:46.033Z INFO Password changed, updating user {"controller": "pxc-controller", "namespace": "users-9717", "name": "some-name", "reconcileID": "4a39be96-605b-4c3a-9c0b-29de042ff305", "user": "monitor"} 2024-08-05T06:51:46.056Z INFO Password updated {"controller": "pxc-controller", "namespace": "users-9717", "name": "some-name", "reconcileID": "4a39be96-605b-4c3a-9c0b-29de042ff305", "user": "monitor"} 2024-08-05T06:51:46.067Z INFO MySQL init secret updated {"controller": "pxc-controller", "namespace": "users-9717", "name": "some-name", "reconcileID": "4a39be96-605b-4c3a-9c0b-29de042ff305", "secret": "some-name-mysql-init", "user": "monitor"} 2024-08-05T06:51:46.077Z INFO Internal secrets updated {"controller": "pxc-controller", "namespace": "users-9717", "name": "some-name", "reconcileID": "4a39be96-605b-4c3a-9c0b-29de042ff305", "user": "monitor"} 2024-08-05T06:51:51.373Z INFO Password updated but old one not discarded {"controller": "pxc-controller", "namespace": "users-9717", "name": "some-name", "reconcileID": "c69ed670-085c-4cf9-aa16-168ab86efdce", "user": "monitor"} 2024-08-05T06:51:56.993Z INFO Password updated but old one not discarded {"controller": "pxc-controller", "namespace": "users-9717", "name": "some-name", "reconcileID": "dbb7a1f8-091d-459f-9ba3-462a48bdfc9b", "user": "monitor"} 2024-08-05T06:52:02.417Z INFO Password updated but old one not discarded {"controller": "pxc-controller", "namespace": "users-9717", "name": "some-name", "reconcileID": "fc07f585-f792-42e9-9548-2849c9a728b6", "user": "monitor"} 2024-08-05T06:52:07.801Z INFO Password updated but old one not discarded {"controller": "pxc-controller", "namespace": "users-9717", "name": "some-name", "reconcileID": "cd6899eb-1e04-42fb-87d7-4cb8a6b0a0b0", "user": "monitor"} github.com/percona/percona-xtradb-cluster-operator/pkg/controller/pxc.(*ReconcilePerconaXtraDBCluster).resyncPXCUsersWithProxySQL.func1 /go/src/github.com/percona/percona-xtradb-cluster-operator/pkg/controller/pxc/controller.go:1242 [mysql] 2024/08/05 06:48:58 connection.go:49: read tcp 10.179.16.23:47718->10.47.70.95:3306: i/o timeout [mysql] 2024/08/05 06:49:09 connection.go:49: unexpected EOF [mysql] 2024/08/05 06:50:59 connection.go:49: unexpected EOF [mysql] 2024/08/05 06:51:09 connection.go:49: unexpected EOF + grep -v NAMESPACE + kubectl get pxc --all-namespaces -o wide + xargs -L 1 sh -xc 'kubectl patch pxc -n $0 $1 --type=merge -p "{\"metadata\":{\"finalizers\":[]}}"' + kubectl patch pxc -n users-9717 some-name --type=merge -p '{"metadata":{"finalizers":[]}}' perconaxtradbcluster.pxc.percona.com/some-name patched + kubectl_bin delete pxc --all --all-namespaces ++ mktemp + local LAST_OUT=/tmp/tmp.ELAe9X0XaY ++ mktemp + local LAST_ERR=/tmp/tmp.YkfmW4d56n + local exit_status=0 ++ seq 0 2 + for i in '$(seq 0 2)' + set +e + kubectl delete pxc --all --all-namespaces + exit_status=0 + set -e + '[' 0 '!=' 0 ']' + break + cat /tmp/tmp.ELAe9X0XaY perconaxtradbcluster.pxc.percona.com "some-name" deleted + cat /tmp/tmp.YkfmW4d56n + rm /tmp/tmp.ELAe9X0XaY /tmp/tmp.YkfmW4d56n + return 0 + kubectl_bin delete pxc-backup --all --all-namespaces ++ mktemp + local LAST_OUT=/tmp/tmp.grK0F7gszP ++ mktemp + local LAST_ERR=/tmp/tmp.Zqxr1Be3yt + local exit_status=0 ++ seq 0 2 + for i in '$(seq 0 2)' + set +e + kubectl delete pxc-backup --all --all-namespaces + exit_status=0 + set -e + '[' 0 '!=' 0 ']' + break + cat /tmp/tmp.grK0F7gszP No resources found + cat /tmp/tmp.Zqxr1Be3yt + rm /tmp/tmp.grK0F7gszP /tmp/tmp.Zqxr1Be3yt + return 0 + kubectl_bin delete pxc-restore --all --all-namespaces ++ mktemp + local LAST_OUT=/tmp/tmp.tRQU0k91Vl ++ mktemp + local LAST_ERR=/tmp/tmp.NjZQm3LJpA + local exit_status=0 ++ seq 0 2 + for i in '$(seq 0 2)' + set +e + kubectl delete pxc-restore --all --all-namespaces + exit_status=0 + set -e + '[' 0 '!=' 0 ']' + break + cat /tmp/tmp.tRQU0k91Vl No resources found + cat /tmp/tmp.NjZQm3LJpA + rm /tmp/tmp.tRQU0k91Vl /tmp/tmp.NjZQm3LJpA + return 0 + kubectl_bin delete ValidatingWebhookConfiguration percona-xtradbcluster-webhook ++ mktemp + local LAST_OUT=/tmp/tmp.eWiTwfNhyJ ++ mktemp + local LAST_ERR=/tmp/tmp.yPXhHRcHIR + local exit_status=0 ++ seq 0 2 + for i in '$(seq 0 2)' + set +e + kubectl delete ValidatingWebhookConfiguration percona-xtradbcluster-webhook + exit_status=0 + set -e + '[' 0 '!=' 0 ']' + break + cat /tmp/tmp.eWiTwfNhyJ validatingwebhookconfiguration.admissionregistration.k8s.io "percona-xtradbcluster-webhook" deleted + cat /tmp/tmp.yPXhHRcHIR + rm /tmp/tmp.eWiTwfNhyJ /tmp/tmp.yPXhHRcHIR + return 0 + kubectl_bin delete -f https://github.com/jetstack/cert-manager/releases/download/v1.14.2/cert-manager.yaml + : + '[' '!' -z '' ']' + '[' -n pxc-operator ']' + rm -rf /tmp/tmp.jmGBhW8nFf + kubectl_bin delete --grace-period=0 --force=true namespace pxc-operator + kubectl_bin delete --grace-period=0 --force=true namespace users-9717 ++ mktemp ++ mktemp + desc 'test passed' + set +o xtrace ----------------------------------------------------------------------------------- test passed+ local LAST_OUT=/tmp/tmp.q2SykqZdL9 ----------------------------------------------------------------------------------- + local LAST_OUT=/tmp/tmp.mu66RuEAHS ++ mktemp ++ mktemp + local LAST_ERR=/tmp/tmp.8aNYGWJxzj + local LAST_ERR=/tmp/tmp.EiDL28GgEe + local exit_status=0 + local exit_status=0 ++ seq 0 2 ++ seq 0 2 + for i in '$(seq 0 2)' + set +e + kubectl delete --grace-period=0 --force=true namespace pxc-operator + for i in '$(seq 0 2)' + set +e + kubectl delete --grace-period=0 --force=true namespace users-9717