Log: /mnt/jenkins/workspace/cloud-pxc-operator_PR-1718/e2e-tests/logs/users-5-7.log WARNING: version difference between client (1.30) and server (1.26) exceeds the supported minor version skew of +/-1 WARNING: version difference between client (1.30) and server (1.26) exceeds the supported minor version skew of +/-1 + create_infra users-28617 + local ns=users-28617 + '[' -n pxc-operator ']' + kubectl get pxc --all-namespaces -o wide + xargs -L 1 sh -xc 'kubectl patch pxc -n $0 $1 --type=merge -p "{\"metadata\":{\"finalizers\":[]}}"' + grep -v NAMESPACE + kubectl patch pxc -n users-19707 some-name --type=merge -p '{"metadata":{"finalizers":[]}}' perconaxtradbcluster.pxc.percona.com/some-name patched + kubectl_bin delete pxc --all --all-namespaces ++ mktemp + local LAST_OUT=/tmp/tmp.rL4PICx3JO ++ mktemp + local LAST_ERR=/tmp/tmp.s7fO09u2J2 + local exit_status=0 ++ seq 0 2 + for i in '$(seq 0 2)' + set +e + kubectl delete pxc --all --all-namespaces + exit_status=0 + set -e + '[' 0 '!=' 0 ']' + break + cat /tmp/tmp.rL4PICx3JO perconaxtradbcluster.pxc.percona.com "some-name" deleted + cat /tmp/tmp.s7fO09u2J2 + rm /tmp/tmp.rL4PICx3JO /tmp/tmp.s7fO09u2J2 + return 0 + kubectl_bin delete pxc-backup --all --all-namespaces ++ mktemp + local LAST_OUT=/tmp/tmp.09q6opBth0 ++ mktemp + local LAST_ERR=/tmp/tmp.lUK532bX2b + local exit_status=0 ++ seq 0 2 + for i in '$(seq 0 2)' + set +e + kubectl delete pxc-backup --all --all-namespaces + exit_status=0 + set -e + '[' 0 '!=' 0 ']' + break + cat /tmp/tmp.09q6opBth0 No resources found + cat /tmp/tmp.lUK532bX2b + rm /tmp/tmp.09q6opBth0 /tmp/tmp.lUK532bX2b + return 0 + kubectl_bin delete pxc-restore --all --all-namespaces ++ mktemp + local LAST_OUT=/tmp/tmp.uiLpXzqi85 ++ mktemp + local LAST_ERR=/tmp/tmp.mIFDPsQW01 + local exit_status=0 ++ seq 0 2 + for i in '$(seq 0 2)' + set +e + kubectl delete pxc-restore --all --all-namespaces + exit_status=0 + set -e + '[' 0 '!=' 0 ']' + break + cat /tmp/tmp.uiLpXzqi85 No resources found + cat /tmp/tmp.mIFDPsQW01 + rm /tmp/tmp.uiLpXzqi85 /tmp/tmp.mIFDPsQW01 + return 0 + create_namespace pxc-operator + local namespace=pxc-operator + local skip_clean_namespace= + [[ 1 == 1 ]] + [[ -z '' ]] + destroy_chaos_mesh ++ helm list --all-namespaces --filter chaos-mesh ++ tail -n1 ++ sed s/NAMESPACE// ++ awk '-F ' '{print $2}' + local chaos_mesh_ns= + '[' -n '' ']' ++ kubectl get MutatingWebhookConfiguration ++ grep chaos-mesh ++ awk '{print $1}' + timeout 30 kubectl delete MutatingWebhookConfiguration error: resource(s) were provided, but no name was specified + : ++ kubectl get ValidatingWebhookConfiguration ++ grep chaos-mesh ++ awk '{print $1}' + timeout 30 kubectl delete ValidatingWebhookConfiguration error: resource(s) were provided, but no name was specified + : ++ kubectl get ValidatingWebhookConfiguration ++ grep validate-auth ++ awk '{print $1}' + timeout 30 kubectl delete ValidatingWebhookConfiguration error: resource(s) were provided, but no name was specified + : ++ kubectl api-resources ++ grep chaos-mesh ++ awk '{print $1}' ++ kubectl get crd ++ grep chaos-mesh.org ++ awk '{print $1}' + timeout 30 kubectl delete crd error: resource(s) were provided, but no name was specified + : ++ kubectl get clusterrolebinding ++ grep chaos-mesh ++ awk '{print $1}' + timeout 30 kubectl delete clusterrolebinding error: resource(s) were provided, but no name was specified + : ++ kubectl get clusterrole ++ grep chaos-mesh ++ awk '{print $1}' + timeout 30 kubectl delete clusterrole error: resource(s) were provided, but no name was specified + : + desc 'cleaned up all old namespaces' + set +o xtrace ----------------------------------------------------------------------------------- cleaned up all old namespaces ----------------------------------------------------------------------------------- + egrep -v '^kube-|^default$|Terminating|pxc-operator|openshift|^NAME' + awk '{print$1}' + '[' -n '' ']' + desc 'cleaned up old namespaces pxc-operator' + xargs kubectl delete ns + set +o xtrace ----------------------------------------------------------------------------------- cleaned up old namespaces pxc-operator ----------------------------------------------------------------------------------- + kubectl_bin delete namespace pxc-operator + kubectl_bin get ns ++ mktemp ++ mktemp + local LAST_OUT=/tmp/tmp.98zIeBhySm ++ mktemp + local LAST_OUT=/tmp/tmp.LgyFTRmfhe ++ mktemp + local LAST_ERR=/tmp/tmp.11XKKMFiKs + local exit_status=0 ++ seq 0 2 + local LAST_ERR=/tmp/tmp.lyucjFhmak + local exit_status=0 + for i in '$(seq 0 2)' ++ seq 0 2 + set +e + kubectl get ns + for i in '$(seq 0 2)' + set +e + kubectl delete namespace pxc-operator + exit_status=0 + set -e + '[' 0 '!=' 0 ']' + break + cat /tmp/tmp.98zIeBhySm + cat /tmp/tmp.11XKKMFiKs + rm /tmp/tmp.98zIeBhySm /tmp/tmp.11XKKMFiKs + return 0 namespace "users-19707" deleted Error from server (Forbidden): namespaces "default" is forbidden: this namespace may not be deleted + exit_status=0 + set -e + '[' 0 '!=' 0 ']' + break + cat /tmp/tmp.LgyFTRmfhe namespace "pxc-operator" deleted + cat /tmp/tmp.lyucjFhmak + rm /tmp/tmp.LgyFTRmfhe /tmp/tmp.lyucjFhmak + return 0 + wait_for_delete namespace/pxc-operator + local res=namespace/pxc-operator + echo -n 'namespace/pxc-operator - ' namespace/pxc-operator - + set +o xtrace Error from server (NotFound): namespaces "pxc-operator" not found + desc 'create namespace pxc-operator' + set +o xtrace ----------------------------------------------------------------------------------- create namespace pxc-operator ----------------------------------------------------------------------------------- + kubectl_bin create namespace pxc-operator ++ mktemp + local LAST_OUT=/tmp/tmp.sV52gk6dGe ++ mktemp + local LAST_ERR=/tmp/tmp.Sz9lp65ZXG + local exit_status=0 ++ seq 0 2 + for i in '$(seq 0 2)' + set +e + kubectl create namespace pxc-operator + exit_status=0 + set -e + '[' 0 '!=' 0 ']' + break + cat /tmp/tmp.sV52gk6dGe namespace/pxc-operator created + cat /tmp/tmp.Sz9lp65ZXG + rm /tmp/tmp.sV52gk6dGe /tmp/tmp.Sz9lp65ZXG + return 0 ++ kubectl_bin config current-context +++ mktemp ++ local LAST_OUT=/tmp/tmp.cd6mdiHAvn +++ mktemp ++ local LAST_ERR=/tmp/tmp.aOvM8NdH3n ++ local exit_status=0 +++ seq 0 2 ++ for i in '$(seq 0 2)' ++ set +e ++ kubectl config current-context ++ exit_status=0 ++ set -e ++ '[' 0 '!=' 0 ']' ++ break ++ cat /tmp/tmp.cd6mdiHAvn ++ cat /tmp/tmp.aOvM8NdH3n ++ rm /tmp/tmp.cd6mdiHAvn /tmp/tmp.aOvM8NdH3n ++ return 0 + kubectl_bin config set-context gke_cloud-dev-112233_us-central1-a_jen-pxc-1718-4c5c3df7-4-cluster2 --namespace=pxc-operator ++ mktemp + local LAST_OUT=/tmp/tmp.2p16xsqmzm ++ mktemp + local LAST_ERR=/tmp/tmp.UuF4CGd8rg + local exit_status=0 ++ seq 0 2 + for i in '$(seq 0 2)' + set +e + kubectl config set-context gke_cloud-dev-112233_us-central1-a_jen-pxc-1718-4c5c3df7-4-cluster2 --namespace=pxc-operator + exit_status=0 + set -e + '[' 0 '!=' 0 ']' + break + cat /tmp/tmp.2p16xsqmzm Context "gke_cloud-dev-112233_us-central1-a_jen-pxc-1718-4c5c3df7-4-cluster2" modified. + cat /tmp/tmp.UuF4CGd8rg + rm /tmp/tmp.2p16xsqmzm /tmp/tmp.UuF4CGd8rg + return 0 + deploy_operator + desc 'start PXC operator' + set +o xtrace ----------------------------------------------------------------------------------- start PXC operator ----------------------------------------------------------------------------------- + kubectl_bin apply --server-side --force-conflicts -f /mnt/jenkins/workspace/cloud-pxc-operator_PR-1718/deploy/crd.yaml ++ mktemp + local LAST_OUT=/tmp/tmp.plm1yJtkiH ++ mktemp + local LAST_ERR=/tmp/tmp.lxz48PnyMK + local exit_status=0 ++ seq 0 2 + for i in '$(seq 0 2)' + set +e + kubectl apply --server-side --force-conflicts -f /mnt/jenkins/workspace/cloud-pxc-operator_PR-1718/deploy/crd.yaml + exit_status=0 + set -e + '[' 0 '!=' 0 ']' + break + cat /tmp/tmp.plm1yJtkiH customresourcedefinition.apiextensions.k8s.io/perconaxtradbclusterbackups.pxc.percona.com serverside-applied customresourcedefinition.apiextensions.k8s.io/perconaxtradbclusterrestores.pxc.percona.com serverside-applied customresourcedefinition.apiextensions.k8s.io/perconaxtradbclusters.pxc.percona.com serverside-applied + cat /tmp/tmp.lxz48PnyMK + rm /tmp/tmp.plm1yJtkiH /tmp/tmp.lxz48PnyMK + return 0 + '[' -n pxc-operator ']' + apply_rbac cw-rbac + local operator_namespace=pxc-operator + local rbac=cw-rbac + cat /mnt/jenkins/workspace/cloud-pxc-operator_PR-1718/deploy/cw-rbac.yaml + sed -e 's^namespace: .*^namespace: pxc-operator^' + kubectl_bin apply -f - ++ mktemp + local LAST_OUT=/tmp/tmp.gBiKjnP3gk ++ mktemp + local LAST_ERR=/tmp/tmp.W8eWjMUHCY + local exit_status=0 ++ seq 0 2 + for i in '$(seq 0 2)' + set +e + kubectl apply -f - + exit_status=0 + set -e + '[' 0 '!=' 0 ']' + break + cat /tmp/tmp.gBiKjnP3gk clusterrole.rbac.authorization.k8s.io/percona-xtradb-cluster-operator unchanged serviceaccount/percona-xtradb-cluster-operator created clusterrolebinding.rbac.authorization.k8s.io/service-account-percona-xtradb-cluster-operator unchanged + cat /tmp/tmp.W8eWjMUHCY + rm /tmp/tmp.gBiKjnP3gk /tmp/tmp.W8eWjMUHCY + return 0 + sed -e 's^image: .*^image: perconalab/percona-xtradb-cluster-operator:PR-1718-4c5c3df7^' + sed -e 's^failureThreshold: .*^failureThreshold: 10^' + cat /mnt/jenkins/workspace/cloud-pxc-operator_PR-1718/deploy/cw-operator.yaml + kubectl_bin apply -f - + yq eval '(select(.kind == "Deployment").spec.template.spec.containers[] | select(.name == "percona-xtradb-cluster-operator").env[] | select(.name == "LOG_LEVEL").value) = "DEBUG"' - + yq eval '(select(.kind == "Deployment").spec.template.spec.containers[] | select(.name == "percona-xtradb-cluster-operator").env[] | select(.name == "DISABLE_TELEMETRY").value) = "true"' - ++ mktemp + local LAST_OUT=/tmp/tmp.Z3r0wtfnG6 ++ mktemp + local LAST_ERR=/tmp/tmp.mGuIque0V8 + local exit_status=0 ++ seq 0 2 + for i in '$(seq 0 2)' + set +e + kubectl apply -f - + exit_status=0 + set -e + '[' 0 '!=' 0 ']' + break + cat /tmp/tmp.Z3r0wtfnG6 deployment.apps/percona-xtradb-cluster-operator created service/percona-xtradb-cluster-operator created + cat /tmp/tmp.mGuIque0V8 + rm /tmp/tmp.Z3r0wtfnG6 /tmp/tmp.mGuIque0V8 + return 0 + sleep 10 + kubectl_bin wait --for=condition=Ready pods -l app.kubernetes.io/component=operator,app.kubernetes.io/instance=percona-xtradb-cluster-operator,app.kubernetes.io/name=percona-xtradb-cluster-operator --timeout=30s ++ mktemp + local LAST_OUT=/tmp/tmp.kqE51Ed37u ++ mktemp + local LAST_ERR=/tmp/tmp.K19humv4Q2 + local exit_status=0 ++ seq 0 2 + for i in '$(seq 0 2)' + set +e + kubectl wait --for=condition=Ready pods -l app.kubernetes.io/component=operator,app.kubernetes.io/instance=percona-xtradb-cluster-operator,app.kubernetes.io/name=percona-xtradb-cluster-operator --timeout=30s + exit_status=0 + set -e + '[' 0 '!=' 0 ']' + break + cat /tmp/tmp.kqE51Ed37u pod/percona-xtradb-cluster-operator-68fd4c6d88-rtnq9 condition met + cat /tmp/tmp.K19humv4Q2 + rm /tmp/tmp.kqE51Ed37u /tmp/tmp.K19humv4Q2 + return 0 ++ get_operator_pod ++ local label_prefix=app.kubernetes.io/ +++ kubectl get pods --selector=app.kubernetes.io/name=percona-xtradb-cluster-operator -n pxc-operator +++ grep -c percona-xtradb-cluster-operator ++ local check_label=1 ++ [[ 1 -eq 0 ]] ++ kubectl_bin get pods --selector=app.kubernetes.io/name=percona-xtradb-cluster-operator -o 'jsonpath={.items[].metadata.name}' -n pxc-operator +++ mktemp ++ local LAST_OUT=/tmp/tmp.2BQIxeycbQ +++ mktemp ++ local LAST_ERR=/tmp/tmp.nOECLSWu2w ++ local exit_status=0 +++ seq 0 2 ++ for i in '$(seq 0 2)' ++ set +e ++ kubectl get pods --selector=app.kubernetes.io/name=percona-xtradb-cluster-operator -o 'jsonpath={.items[].metadata.name}' -n pxc-operator ++ exit_status=0 ++ set -e ++ '[' 0 '!=' 0 ']' ++ break ++ cat /tmp/tmp.2BQIxeycbQ ++ cat /tmp/tmp.nOECLSWu2w ++ rm /tmp/tmp.2BQIxeycbQ /tmp/tmp.nOECLSWu2w ++ return 0 + wait_pod percona-xtradb-cluster-operator-68fd4c6d88-rtnq9 480 pxc-operator + local pod=percona-xtradb-cluster-operator-68fd4c6d88-rtnq9 + local max_retry=480 + local ns=pxc-operator ++ echo percona-xtradb-cluster-operator-68fd4c6d88-rtnq9 ++ /usr/bin/sed -E 's/.*-(pxc|proxysql)-[0-9]/\1/' ++ egrep '^(pxc|proxysql)$' + local container= + set +o xtrace pod/percona-xtradb-cluster-operator-68fd4c6d88-rtnq9 condition met percona-xtradb-cluster-operator-68fd4c6d88-rtnq9.Ok + sleep 3 + create_namespace users-28617 + local namespace=users-28617 + local skip_clean_namespace= + [[ 1 == 1 ]] + [[ -z '' ]] + destroy_chaos_mesh ++ helm list --all-namespaces --filter chaos-mesh ++ tail -n1 ++ awk '-F ' '{print $2}' ++ sed s/NAMESPACE// + local chaos_mesh_ns= + '[' -n '' ']' ++ kubectl get MutatingWebhookConfiguration ++ grep chaos-mesh ++ awk '{print $1}' + timeout 30 kubectl delete MutatingWebhookConfiguration error: resource(s) were provided, but no name was specified + : ++ kubectl get ValidatingWebhookConfiguration ++ grep chaos-mesh ++ awk '{print $1}' + timeout 30 kubectl delete ValidatingWebhookConfiguration error: resource(s) were provided, but no name was specified + : ++ kubectl get ValidatingWebhookConfiguration ++ grep validate-auth ++ awk '{print $1}' + timeout 30 kubectl delete ValidatingWebhookConfiguration error: resource(s) were provided, but no name was specified + : ++ kubectl api-resources ++ grep chaos-mesh ++ awk '{print $1}' ++ kubectl get crd ++ grep chaos-mesh.org ++ awk '{print $1}' + timeout 30 kubectl delete crd error: resource(s) were provided, but no name was specified + : ++ kubectl get clusterrolebinding ++ grep chaos-mesh ++ awk '{print $1}' + timeout 30 kubectl delete clusterrolebinding error: resource(s) were provided, but no name was specified + : ++ kubectl get clusterrole ++ grep chaos-mesh ++ awk '{print $1}' + timeout 30 kubectl delete clusterrole error: resource(s) were provided, but no name was specified + : + desc 'cleaned up all old namespaces' + set +o xtrace ----------------------------------------------------------------------------------- cleaned up all old namespaces ----------------------------------------------------------------------------------- + kubectl_bin get ns + awk '{print$1}' + '[' -n '' ']' + desc 'cleaned up old namespaces users-28617' + xargs kubectl delete ns + set +o xtrace ----------------------------------------------------------------------------------- cleaned up old namespaces users-28617 ----------------------------------------------------------------------------------- + kubectl_bin delete namespace users-28617 + egrep -v '^kube-|^default$|Terminating|pxc-operator|openshift|^NAME' ++ mktemp ++ mktemp + local LAST_OUT=/tmp/tmp.YfOHbuTqlL + local LAST_OUT=/tmp/tmp.aWO35hOVuI ++ mktemp + local LAST_ERR=/tmp/tmp.h1bFGAjDZQ + local exit_status=0 ++ mktemp ++ seq 0 2 + local LAST_ERR=/tmp/tmp.a0gcOkzZQe + local exit_status=0 + for i in '$(seq 0 2)' + set +e + kubectl get ns ++ seq 0 2 + for i in '$(seq 0 2)' + set +e + kubectl delete namespace users-28617 + exit_status=1 + set -e + '[' 1 '!=' 0 ']' + '[' 1 == 1 ']' + sleep 0 + for i in '$(seq 0 2)' + set +e + kubectl delete namespace users-28617 + exit_status=0 + set -e + '[' 0 '!=' 0 ']' + break + cat /tmp/tmp.aWO35hOVuI + cat /tmp/tmp.h1bFGAjDZQ + rm /tmp/tmp.aWO35hOVuI /tmp/tmp.h1bFGAjDZQ + return 0 Error from server (Forbidden): namespaces "default" is forbidden: this namespace may not be deleted + exit_status=1 + set -e + '[' 1 '!=' 0 ']' + '[' 1 == 1 ']' + sleep 0 + for i in '$(seq 0 2)' + set +e + kubectl delete namespace users-28617 + exit_status=1 + set -e + '[' 1 '!=' 0 ']' + '[' 1 == 1 ']' + sleep 0 + cat /tmp/tmp.YfOHbuTqlL + cat /tmp/tmp.a0gcOkzZQe Error from server (NotFound): namespaces "users-28617" not found + rm /tmp/tmp.YfOHbuTqlL /tmp/tmp.a0gcOkzZQe + return 1 + : + wait_for_delete namespace/users-28617 + local res=namespace/users-28617 + echo -n 'namespace/users-28617 - ' namespace/users-28617 - + set +o xtrace Error from server (NotFound): namespaces "users-28617" not found + desc 'create namespace users-28617' + set +o xtrace ----------------------------------------------------------------------------------- create namespace users-28617 ----------------------------------------------------------------------------------- + kubectl_bin create namespace users-28617 ++ mktemp + local LAST_OUT=/tmp/tmp.aeF7OUBjbJ ++ mktemp + local LAST_ERR=/tmp/tmp.FiVcC9EhCW + local exit_status=0 ++ seq 0 2 + for i in '$(seq 0 2)' + set +e + kubectl create namespace users-28617 + exit_status=0 + set -e + '[' 0 '!=' 0 ']' + break + cat /tmp/tmp.aeF7OUBjbJ namespace/users-28617 created + cat /tmp/tmp.FiVcC9EhCW + rm /tmp/tmp.aeF7OUBjbJ /tmp/tmp.FiVcC9EhCW + return 0 ++ kubectl_bin config current-context +++ mktemp ++ local LAST_OUT=/tmp/tmp.NKOpaj5K04 +++ mktemp ++ local LAST_ERR=/tmp/tmp.I4wtrnqkws ++ local exit_status=0 +++ seq 0 2 ++ for i in '$(seq 0 2)' ++ set +e ++ kubectl config current-context ++ exit_status=0 ++ set -e ++ '[' 0 '!=' 0 ']' ++ break ++ cat /tmp/tmp.NKOpaj5K04 ++ cat /tmp/tmp.I4wtrnqkws ++ rm /tmp/tmp.NKOpaj5K04 /tmp/tmp.I4wtrnqkws ++ return 0 + kubectl_bin config set-context gke_cloud-dev-112233_us-central1-a_jen-pxc-1718-4c5c3df7-4-cluster2 --namespace=users-28617 ++ mktemp + local LAST_OUT=/tmp/tmp.TKxiAPt9Bd ++ mktemp + local LAST_ERR=/tmp/tmp.FQdMcpXeKn + local exit_status=0 ++ seq 0 2 + for i in '$(seq 0 2)' + set +e + kubectl config set-context gke_cloud-dev-112233_us-central1-a_jen-pxc-1718-4c5c3df7-4-cluster2 --namespace=users-28617 + exit_status=0 + set -e + '[' 0 '!=' 0 ']' + break + cat /tmp/tmp.TKxiAPt9Bd Context "gke_cloud-dev-112233_us-central1-a_jen-pxc-1718-4c5c3df7-4-cluster2" modified. + cat /tmp/tmp.FQdMcpXeKn + rm /tmp/tmp.TKxiAPt9Bd /tmp/tmp.FQdMcpXeKn + return 0 + apply_secrets + desc 'create secrets for cloud storages' + set +o xtrace ----------------------------------------------------------------------------------- create secrets for cloud storages ----------------------------------------------------------------------------------- + '[' -z '' ']' + kubectl_bin apply -f /mnt/jenkins/workspace/cloud-pxc-operator_PR-1718/e2e-tests/conf/minio-secret.yml -f /mnt/jenkins/workspace/cloud-pxc-operator_PR-1718/e2e-tests/conf/cloud-secret.yml ++ mktemp + local LAST_OUT=/tmp/tmp.LrQiIxGae1 ++ mktemp + local LAST_ERR=/tmp/tmp.ixopuwk3KA + local exit_status=0 ++ seq 0 2 + for i in '$(seq 0 2)' + set +e + kubectl apply -f /mnt/jenkins/workspace/cloud-pxc-operator_PR-1718/e2e-tests/conf/minio-secret.yml -f /mnt/jenkins/workspace/cloud-pxc-operator_PR-1718/e2e-tests/conf/cloud-secret.yml + exit_status=0 + set -e + '[' 0 '!=' 0 ']' + break + cat /tmp/tmp.LrQiIxGae1 secret/minio-secret created secret/aws-s3-secret created secret/gcp-cs-secret created secret/azure-secret created + cat /tmp/tmp.ixopuwk3KA + rm /tmp/tmp.LrQiIxGae1 /tmp/tmp.ixopuwk3KA + return 0 + desc 'create PXC cluster' + set +o xtrace ----------------------------------------------------------------------------------- create PXC cluster ----------------------------------------------------------------------------------- + newpass=test-password ++ echo -n test-password ++ base64 + newpassencrypted=dGVzdC1wYXNzd29yZA== + cluster=some-name + spinup_pxc some-name /mnt/jenkins/workspace/cloud-pxc-operator_PR-1718/e2e-tests/conf/some-name.yml + local cluster=some-name + local config=/mnt/jenkins/workspace/cloud-pxc-operator_PR-1718/e2e-tests/conf/some-name.yml + local size=3 + local sleep=10 + local secretsFile=/mnt/jenkins/workspace/cloud-pxc-operator_PR-1718/e2e-tests/conf/secrets.yml + local pxcClientFile=/mnt/jenkins/workspace/cloud-pxc-operator_PR-1718/e2e-tests/conf/client.yml + local port=3306 + desc 'create first PXC cluster' + set +o xtrace ----------------------------------------------------------------------------------- create first PXC cluster ----------------------------------------------------------------------------------- + kubectl_bin apply -f /mnt/jenkins/workspace/cloud-pxc-operator_PR-1718/e2e-tests/conf/secrets.yml ++ mktemp + local LAST_OUT=/tmp/tmp.VfJYObNqa0 ++ mktemp + local LAST_ERR=/tmp/tmp.NFLj3WYbEI + local exit_status=0 ++ seq 0 2 + for i in '$(seq 0 2)' + set +e + kubectl apply -f /mnt/jenkins/workspace/cloud-pxc-operator_PR-1718/e2e-tests/conf/secrets.yml + exit_status=0 + set -e + '[' 0 '!=' 0 ']' + break + cat /tmp/tmp.VfJYObNqa0 secret/my-cluster-secrets created secret/some-name-ssl created secret/some-name-ssl-internal created + cat /tmp/tmp.NFLj3WYbEI + rm /tmp/tmp.VfJYObNqa0 /tmp/tmp.NFLj3WYbEI + return 0 + apply_config /mnt/jenkins/workspace/cloud-pxc-operator_PR-1718/e2e-tests/conf/client.yml + '[' -z '' ']' + cat_config /mnt/jenkins/workspace/cloud-pxc-operator_PR-1718/e2e-tests/conf/client.yml + kubectl_bin apply -f - + cat /mnt/jenkins/workspace/cloud-pxc-operator_PR-1718/e2e-tests/conf/client.yml + /usr/bin/sed -e 's#apiVersion: pxc.percona.com/v.*$#apiVersion: pxc.percona.com/v1#' ++ mktemp + /usr/bin/sed -e 's#image:.*-pxc\([0-9]*.[0-9]*\)\{0,1\}$#image: perconalab/percona-xtradb-cluster-operator:main-pxc5.7#' + local LAST_OUT=/tmp/tmp.zpt22SecXu + /usr/bin/sed -e 's#image:.*-init$#image: perconalab/percona-xtradb-cluster-operator:PR-1718-4c5c3df7#' + /usr/bin/sed -e 's#image:.*\/percona-xtradb-cluster:.*$#image: perconalab/percona-xtradb-cluster-operator:main-pxc5.7#' + /usr/bin/sed -e 's#image:.*-pmm$#image: perconalab/pmm-client:dev-latest#' ++ mktemp + /usr/bin/sed -e 's#image:.*-backup$#image: perconalab/percona-xtradb-cluster-operator:main-pxc5.7-backup#' + /usr/bin/sed -e 's#image:.*-proxysql$#image: perconalab/percona-xtradb-cluster-operator:main-proxysql#' + local LAST_ERR=/tmp/tmp.vbVrHHMlGY + local exit_status=0 + /usr/bin/sed -e s~minio-service.#namespace~minio-service.users-28617~ + /usr/bin/sed -e 's#image:.*-haproxy$#image: perconalab/percona-xtradb-cluster-operator:main-haproxy#' + /usr/bin/sed -e 's#image:.*-logcollector$#image: perconalab/percona-xtradb-cluster-operator:main-logcollector#' + /usr/bin/sed -e 's#apply:.*#apply: Never#' ++ seq 0 2 + for i in '$(seq 0 2)' + set +e + kubectl apply -f - + exit_status=0 + set -e + '[' 0 '!=' 0 ']' + break + cat /tmp/tmp.zpt22SecXu deployment.apps/pxc-client created + cat /tmp/tmp.vbVrHHMlGY + rm /tmp/tmp.zpt22SecXu /tmp/tmp.vbVrHHMlGY + return 0 + [[ perconalab/percona-xtradb-cluster-operator:main-pxc5.7 =~ 5\.7 ]] + [[ some-name == \d\e\m\a\n\d\-\b\a\c\k\u\p ]] + [[ some-name == \d\e\m\a\n\d\-\b\a\c\k\u\p\-\c\l\o\u\d ]] + apply_config /mnt/jenkins/workspace/cloud-pxc-operator_PR-1718/e2e-tests/conf/some-name.yml + '[' -z '' ']' + cat_config /mnt/jenkins/workspace/cloud-pxc-operator_PR-1718/e2e-tests/conf/some-name.yml + kubectl_bin apply -f - + cat /mnt/jenkins/workspace/cloud-pxc-operator_PR-1718/e2e-tests/conf/some-name.yml + /usr/bin/sed -e 's#apiVersion: pxc.percona.com/v.*$#apiVersion: pxc.percona.com/v1#' ++ mktemp + /usr/bin/sed -e 's#image:.*-pxc\([0-9]*.[0-9]*\)\{0,1\}$#image: perconalab/percona-xtradb-cluster-operator:main-pxc5.7#' + local LAST_OUT=/tmp/tmp.lxthHGDUru + /usr/bin/sed -e 's#image:.*-backup$#image: perconalab/percona-xtradb-cluster-operator:main-pxc5.7-backup#' + /usr/bin/sed -e 's#image:.*-pmm$#image: perconalab/pmm-client:dev-latest#' ++ mktemp + /usr/bin/sed -e 's#image:.*-logcollector$#image: perconalab/percona-xtradb-cluster-operator:main-logcollector#' + /usr/bin/sed -e 's#image:.*\/percona-xtradb-cluster:.*$#image: perconalab/percona-xtradb-cluster-operator:main-pxc5.7#' + /usr/bin/sed -e s~minio-service.#namespace~minio-service.users-28617~ + /usr/bin/sed -e 's#image:.*-init$#image: perconalab/percona-xtradb-cluster-operator:PR-1718-4c5c3df7#' + /usr/bin/sed -e 's#image:.*-proxysql$#image: perconalab/percona-xtradb-cluster-operator:main-proxysql#' + /usr/bin/sed -e 's#image:.*-haproxy$#image: perconalab/percona-xtradb-cluster-operator:main-haproxy#' + local LAST_ERR=/tmp/tmp.YdzKvCbSUZ + local exit_status=0 ++ seq 0 2 + /usr/bin/sed -e 's#apply:.*#apply: Never#' + for i in '$(seq 0 2)' + set +e + kubectl apply -f - + exit_status=0 + set -e + '[' 0 '!=' 0 ']' + break + cat /tmp/tmp.lxthHGDUru perconaxtradbcluster.pxc.percona.com/some-name created + cat /tmp/tmp.YdzKvCbSUZ + rm /tmp/tmp.lxthHGDUru /tmp/tmp.YdzKvCbSUZ + return 0 + desc 'check if all 3 Pods started' + set +o xtrace ----------------------------------------------------------------------------------- check if all 3 Pods started ----------------------------------------------------------------------------------- ++ get_proxy some-name ++ local target_cluster=some-name +++ kubectl_bin get pxc some-name -o 'jsonpath={.spec.haproxy.enabled}' ++++ mktemp +++ local LAST_OUT=/tmp/tmp.G3VcdAfIeH ++++ mktemp +++ local LAST_ERR=/tmp/tmp.gG9X8zJhfE +++ local exit_status=0 ++++ seq 0 2 +++ for i in '$(seq 0 2)' +++ set +e +++ kubectl get pxc some-name -o 'jsonpath={.spec.haproxy.enabled}' +++ exit_status=0 +++ set -e +++ '[' 0 '!=' 0 ']' +++ break +++ cat /tmp/tmp.G3VcdAfIeH +++ cat /tmp/tmp.gG9X8zJhfE +++ rm /tmp/tmp.G3VcdAfIeH /tmp/tmp.gG9X8zJhfE +++ return 0 ++ [[ '' == \t\r\u\e ]] +++ kubectl_bin get pxc some-name -o 'jsonpath={.spec.proxysql.enabled}' ++++ mktemp +++ local LAST_OUT=/tmp/tmp.l4X0bYgLSH ++++ mktemp +++ local LAST_ERR=/tmp/tmp.JoYlQ0EZB6 +++ local exit_status=0 ++++ seq 0 2 +++ for i in '$(seq 0 2)' +++ set +e +++ kubectl get pxc some-name -o 'jsonpath={.spec.proxysql.enabled}' +++ exit_status=0 +++ set -e +++ '[' 0 '!=' 0 ']' +++ break +++ cat /tmp/tmp.l4X0bYgLSH +++ cat /tmp/tmp.JoYlQ0EZB6 +++ rm /tmp/tmp.l4X0bYgLSH /tmp/tmp.JoYlQ0EZB6 +++ return 0 ++ [[ true == \t\r\u\e ]] ++ echo some-name-proxysql ++ return + local proxy=some-name-proxysql + kubectl_bin wait --for=condition=Ready pod -l app.kubernetes.io/instance=monitoring,app.kubernetes.io/managed-by=percona-xtradb-cluster-operator --timeout=300s -n users-28617 ++ mktemp + local LAST_OUT=/tmp/tmp.bHJN5OuKJd ++ mktemp + local LAST_ERR=/tmp/tmp.MQQBUd2fLH + local exit_status=0 ++ seq 0 2 + for i in '$(seq 0 2)' + set +e + kubectl wait --for=condition=Ready pod -l app.kubernetes.io/instance=monitoring,app.kubernetes.io/managed-by=percona-xtradb-cluster-operator --timeout=300s -n users-28617 + exit_status=1 + set -e + '[' 1 '!=' 0 ']' + '[' 1 == 1 ']' + sleep 0 + for i in '$(seq 0 2)' + set +e + kubectl wait --for=condition=Ready pod -l app.kubernetes.io/instance=monitoring,app.kubernetes.io/managed-by=percona-xtradb-cluster-operator --timeout=300s -n users-28617 + exit_status=1 + set -e + '[' 1 '!=' 0 ']' + '[' 1 == 1 ']' + sleep 0 + for i in '$(seq 0 2)' + set +e + kubectl wait --for=condition=Ready pod -l app.kubernetes.io/instance=monitoring,app.kubernetes.io/managed-by=percona-xtradb-cluster-operator --timeout=300s -n users-28617 + exit_status=1 + set -e + '[' 1 '!=' 0 ']' + '[' 1 == 1 ']' + sleep 0 + cat /tmp/tmp.bHJN5OuKJd + cat /tmp/tmp.MQQBUd2fLH error: no matching resources found + rm /tmp/tmp.bHJN5OuKJd /tmp/tmp.MQQBUd2fLH + return 1 + true + wait_for_running some-name-proxysql 1 + local name=some-name-proxysql + let last_pod=0 + : + local max_retry=480 + desc 'wait for running cluster' + set +o xtrace ----------------------------------------------------------------------------------- wait for running cluster ----------------------------------------------------------------------------------- ++ seq 0 0 + for i in '$(seq 0 $last_pod)' + wait_pod some-name-proxysql-0 480 + local pod=some-name-proxysql-0 + local max_retry=480 + local ns= ++ echo some-name-proxysql-0 ++ /usr/bin/sed -E 's/.*-(pxc|proxysql)-[0-9]/\1/' ++ egrep '^(pxc|proxysql)$' + local container=proxysql + set +o xtrace pod/some-name-proxysql-0 condition met some-name-proxysql-0.Ok + wait_for_running some-name-pxc 3 + local name=some-name-pxc + let last_pod=2 + local max_retry=480 + desc 'wait for running cluster' + set +o xtrace ----------------------------------------------------------------------------------- wait for running cluster ----------------------------------------------------------------------------------- ++ seq 0 2 + for i in '$(seq 0 $last_pod)' + wait_pod some-name-pxc-0 480 + local pod=some-name-pxc-0 + local max_retry=480 + local ns= ++ echo some-name-pxc-0 ++ /usr/bin/sed -E 's/.*-(pxc|proxysql)-[0-9]/\1/' ++ egrep '^(pxc|proxysql)$' + local container=pxc + set +o xtrace pod/some-name-pxc-0 condition met some-name-pxc-0.Ok + for i in '$(seq 0 $last_pod)' + wait_pod some-name-pxc-1 480 + local pod=some-name-pxc-1 + local max_retry=480 + local ns= ++ echo some-name-pxc-1 ++ /usr/bin/sed -E 's/.*-(pxc|proxysql)-[0-9]/\1/' ++ egrep '^(pxc|proxysql)$' + local container=pxc + set +o xtrace pod/some-name-pxc-1 condition met some-name-pxc-1.Ok + for i in '$(seq 0 $last_pod)' + wait_pod some-name-pxc-2 480 + local pod=some-name-pxc-2 + local max_retry=480 + local ns= ++ echo some-name-pxc-2 ++ /usr/bin/sed -E 's/.*-(pxc|proxysql)-[0-9]/\1/' ++ egrep '^(pxc|proxysql)$' + local container=pxc + set +o xtrace pod/some-name-pxc-2 condition met some-name-pxc-2.Ok + sleep 10 + desc 'write data' + set +o xtrace ----------------------------------------------------------------------------------- write data ----------------------------------------------------------------------------------- + [[ perconalab/percona-xtradb-cluster-operator:main-pxc5.7 =~ 5\.7 ]] ++ is_keyring_plugin_in_use some-name ++ local cluster=some-name ++ kubectl_bin exec -it some-name-pxc-0 -c pxc -- bash -c 'cat /etc/mysql/node.cnf' ++ egrep -o 'early-plugin-load=keyring_\w+.so' +++ mktemp ++ local LAST_OUT=/tmp/tmp.qA1D4aeTXq +++ mktemp ++ local LAST_ERR=/tmp/tmp.tZ2RNAsZSE ++ local exit_status=0 +++ seq 0 2 ++ for i in '$(seq 0 2)' ++ set +e ++ kubectl exec -it some-name-pxc-0 -c pxc -- bash -c 'cat /etc/mysql/node.cnf' ++ exit_status=0 ++ set -e ++ '[' 0 '!=' 0 ']' ++ break ++ cat /tmp/tmp.qA1D4aeTXq ++ cat /tmp/tmp.tZ2RNAsZSE Unable to use a TTY - input is not a terminal or the right kind of file ++ rm /tmp/tmp.qA1D4aeTXq /tmp/tmp.tZ2RNAsZSE ++ return 0 + [[ -n '' ]] + run_mysql 'CREATE DATABASE IF NOT EXISTS myApp; use myApp; CREATE TABLE IF NOT EXISTS myApp (id int PRIMARY KEY) ;' '-h some-name-proxysql -uroot -proot_password -P3306' + local 'command=CREATE DATABASE IF NOT EXISTS myApp; use myApp; CREATE TABLE IF NOT EXISTS myApp (id int PRIMARY KEY) ;' + local 'uri=-h some-name-proxysql -uroot -proot_password -P3306' ++ get_client_pod ++ kubectl_bin get pods --selector=name=pxc-client -o 'jsonpath={.items[].metadata.name}' +++ mktemp ++ local LAST_OUT=/tmp/tmp.wF3xbruyHM +++ mktemp ++ local LAST_ERR=/tmp/tmp.XXYXI24ONT ++ local exit_status=0 +++ seq 0 2 ++ for i in '$(seq 0 2)' ++ set +e ++ kubectl get pods --selector=name=pxc-client -o 'jsonpath={.items[].metadata.name}' ++ exit_status=0 ++ set -e ++ '[' 0 '!=' 0 ']' ++ break ++ cat /tmp/tmp.wF3xbruyHM ++ cat /tmp/tmp.XXYXI24ONT ++ rm /tmp/tmp.wF3xbruyHM /tmp/tmp.XXYXI24ONT ++ return 0 + client_pod=pxc-client-64b479df95-8brh6 + wait_pod pxc-client-64b479df95-8brh6 + local pod=pxc-client-64b479df95-8brh6 + local max_retry=480 + local ns= ++ echo pxc-client-64b479df95-8brh6 ++ /usr/bin/sed -E 's/.*-(pxc|proxysql)-[0-9]/\1/' ++ egrep '^(pxc|proxysql)$' + local container= + set +o xtrace pod/pxc-client-64b479df95-8brh6 condition met pxc-client-64b479df95-8brh6.Ok + set +o xtrace + run_mysql 'INSERT myApp.myApp (id) VALUES (100500)' '-h some-name-proxysql -uroot -proot_password -P3306' + local 'command=INSERT myApp.myApp (id) VALUES (100500)' + local 'uri=-h some-name-proxysql -uroot -proot_password -P3306' ++ get_client_pod ++ kubectl_bin get pods --selector=name=pxc-client -o 'jsonpath={.items[].metadata.name}' +++ mktemp ++ local LAST_OUT=/tmp/tmp.x2uibJ6nRf +++ mktemp ++ local LAST_ERR=/tmp/tmp.C0y0NUTe9f ++ local exit_status=0 +++ seq 0 2 ++ for i in '$(seq 0 2)' ++ set +e ++ kubectl get pods --selector=name=pxc-client -o 'jsonpath={.items[].metadata.name}' ++ exit_status=0 ++ set -e ++ '[' 0 '!=' 0 ']' ++ break ++ cat /tmp/tmp.x2uibJ6nRf ++ cat /tmp/tmp.C0y0NUTe9f ++ rm /tmp/tmp.x2uibJ6nRf /tmp/tmp.C0y0NUTe9f ++ return 0 + client_pod=pxc-client-64b479df95-8brh6 + wait_pod pxc-client-64b479df95-8brh6 + local pod=pxc-client-64b479df95-8brh6 + local max_retry=480 + local ns= ++ echo pxc-client-64b479df95-8brh6 ++ /usr/bin/sed -E 's/.*-(pxc|proxysql)-[0-9]/\1/' ++ egrep '^(pxc|proxysql)$' + local container= + set +o xtrace pod/pxc-client-64b479df95-8brh6 condition met pxc-client-64b479df95-8brh6.Ok + set +o xtrace + sleep 30 ++ seq 0 2 + for i in '$(seq 0 $((size - 1)))' + compare_mysql_cmd select-1 'SELECT * from myApp.myApp;' '-h some-name-pxc-0.some-name-pxc -uroot -proot_password -P3306' + local command_id=select-1 + local 'command=SELECT * from myApp.myApp;' + local 'uri=-h some-name-pxc-0.some-name-pxc -uroot -proot_password -P3306' + local postfix= + local expected_result=/mnt/jenkins/workspace/cloud-pxc-operator_PR-1718/e2e-tests/users/compare/select-1.sql + [[ perconalab/percona-xtradb-cluster-operator:main-pxc5.7 =~ 8\.0 ]] + run_mysql 'SELECT * from myApp.myApp;' '-h some-name-pxc-0.some-name-pxc -uroot -proot_password -P3306' + local 'command=SELECT * from myApp.myApp;' + local 'uri=-h some-name-pxc-0.some-name-pxc -uroot -proot_password -P3306' ++ get_client_pod ++ kubectl_bin get pods --selector=name=pxc-client -o 'jsonpath={.items[].metadata.name}' +++ mktemp ++ local LAST_OUT=/tmp/tmp.5EqcTWljfD +++ mktemp ++ local LAST_ERR=/tmp/tmp.L3yWoUdgxd ++ local exit_status=0 +++ seq 0 2 ++ for i in '$(seq 0 2)' ++ set +e ++ kubectl get pods --selector=name=pxc-client -o 'jsonpath={.items[].metadata.name}' ++ exit_status=0 ++ set -e ++ '[' 0 '!=' 0 ']' ++ break ++ cat /tmp/tmp.5EqcTWljfD ++ cat /tmp/tmp.L3yWoUdgxd ++ rm /tmp/tmp.5EqcTWljfD /tmp/tmp.L3yWoUdgxd ++ return 0 + client_pod=pxc-client-64b479df95-8brh6 + wait_pod pxc-client-64b479df95-8brh6 + local pod=pxc-client-64b479df95-8brh6 + local max_retry=480 + local ns= ++ echo pxc-client-64b479df95-8brh6 ++ /usr/bin/sed -E 's/.*-(pxc|proxysql)-[0-9]/\1/' ++ egrep '^(pxc|proxysql)$' + local container= + set +o xtrace pod/pxc-client-64b479df95-8brh6 condition met pxc-client-64b479df95-8brh6.Ok + set +o xtrace + '[' '!' -s /tmp/tmp.oaVtWV5LhP/select-1.sql ']' + diff -u /mnt/jenkins/workspace/cloud-pxc-operator_PR-1718/e2e-tests/users/compare/select-1.sql /tmp/tmp.oaVtWV5LhP/select-1.sql + for i in '$(seq 0 $((size - 1)))' + compare_mysql_cmd select-1 'SELECT * from myApp.myApp;' '-h some-name-pxc-1.some-name-pxc -uroot -proot_password -P3306' + local command_id=select-1 + local 'command=SELECT * from myApp.myApp;' + local 'uri=-h some-name-pxc-1.some-name-pxc -uroot -proot_password -P3306' + local postfix= + local expected_result=/mnt/jenkins/workspace/cloud-pxc-operator_PR-1718/e2e-tests/users/compare/select-1.sql + [[ perconalab/percona-xtradb-cluster-operator:main-pxc5.7 =~ 8\.0 ]] + run_mysql 'SELECT * from myApp.myApp;' '-h some-name-pxc-1.some-name-pxc -uroot -proot_password -P3306' + local 'command=SELECT * from myApp.myApp;' + local 'uri=-h some-name-pxc-1.some-name-pxc -uroot -proot_password -P3306' ++ get_client_pod ++ kubectl_bin get pods --selector=name=pxc-client -o 'jsonpath={.items[].metadata.name}' +++ mktemp ++ local LAST_OUT=/tmp/tmp.WGyrzBm4Aa +++ mktemp ++ local LAST_ERR=/tmp/tmp.b2g5kdqo6J ++ local exit_status=0 +++ seq 0 2 ++ for i in '$(seq 0 2)' ++ set +e ++ kubectl get pods --selector=name=pxc-client -o 'jsonpath={.items[].metadata.name}' ++ exit_status=0 ++ set -e ++ '[' 0 '!=' 0 ']' ++ break ++ cat /tmp/tmp.WGyrzBm4Aa ++ cat /tmp/tmp.b2g5kdqo6J ++ rm /tmp/tmp.WGyrzBm4Aa /tmp/tmp.b2g5kdqo6J ++ return 0 + client_pod=pxc-client-64b479df95-8brh6 + wait_pod pxc-client-64b479df95-8brh6 + local pod=pxc-client-64b479df95-8brh6 + local max_retry=480 + local ns= ++ echo pxc-client-64b479df95-8brh6 ++ /usr/bin/sed -E 's/.*-(pxc|proxysql)-[0-9]/\1/' ++ egrep '^(pxc|proxysql)$' + local container= + set +o xtrace pod/pxc-client-64b479df95-8brh6 condition met pxc-client-64b479df95-8brh6.Ok + set +o xtrace + '[' '!' -s /tmp/tmp.oaVtWV5LhP/select-1.sql ']' + diff -u /mnt/jenkins/workspace/cloud-pxc-operator_PR-1718/e2e-tests/users/compare/select-1.sql /tmp/tmp.oaVtWV5LhP/select-1.sql + for i in '$(seq 0 $((size - 1)))' + compare_mysql_cmd select-1 'SELECT * from myApp.myApp;' '-h some-name-pxc-2.some-name-pxc -uroot -proot_password -P3306' + local command_id=select-1 + local 'command=SELECT * from myApp.myApp;' + local 'uri=-h some-name-pxc-2.some-name-pxc -uroot -proot_password -P3306' + local postfix= + local expected_result=/mnt/jenkins/workspace/cloud-pxc-operator_PR-1718/e2e-tests/users/compare/select-1.sql + [[ perconalab/percona-xtradb-cluster-operator:main-pxc5.7 =~ 8\.0 ]] + run_mysql 'SELECT * from myApp.myApp;' '-h some-name-pxc-2.some-name-pxc -uroot -proot_password -P3306' + local 'command=SELECT * from myApp.myApp;' + local 'uri=-h some-name-pxc-2.some-name-pxc -uroot -proot_password -P3306' ++ get_client_pod ++ kubectl_bin get pods --selector=name=pxc-client -o 'jsonpath={.items[].metadata.name}' +++ mktemp ++ local LAST_OUT=/tmp/tmp.nxXS9Ok9c7 +++ mktemp ++ local LAST_ERR=/tmp/tmp.ia8jpSpjTZ ++ local exit_status=0 +++ seq 0 2 ++ for i in '$(seq 0 2)' ++ set +e ++ kubectl get pods --selector=name=pxc-client -o 'jsonpath={.items[].metadata.name}' ++ exit_status=0 ++ set -e ++ '[' 0 '!=' 0 ']' ++ break ++ cat /tmp/tmp.nxXS9Ok9c7 ++ cat /tmp/tmp.ia8jpSpjTZ ++ rm /tmp/tmp.nxXS9Ok9c7 /tmp/tmp.ia8jpSpjTZ ++ return 0 + client_pod=pxc-client-64b479df95-8brh6 + wait_pod pxc-client-64b479df95-8brh6 + local pod=pxc-client-64b479df95-8brh6 + local max_retry=480 + local ns= ++ echo pxc-client-64b479df95-8brh6 ++ /usr/bin/sed -E 's/.*-(pxc|proxysql)-[0-9]/\1/' ++ egrep '^(pxc|proxysql)$' + local container= + set +o xtrace pod/pxc-client-64b479df95-8brh6 condition met pxc-client-64b479df95-8brh6.Ok + set +o xtrace + '[' '!' -s /tmp/tmp.oaVtWV5LhP/select-1.sql ']' + diff -u /mnt/jenkins/workspace/cloud-pxc-operator_PR-1718/e2e-tests/users/compare/select-1.sql /tmp/tmp.oaVtWV5LhP/select-1.sql ++ is_keyring_plugin_in_use some-name ++ local cluster=some-name ++ kubectl_bin exec -it some-name-pxc-0 -c pxc -- bash -c 'cat /etc/mysql/node.cnf' ++ egrep -o 'early-plugin-load=keyring_\w+.so' +++ mktemp ++ local LAST_OUT=/tmp/tmp.QBARgjhlZI +++ mktemp ++ local LAST_ERR=/tmp/tmp.J34nNoxs5l ++ local exit_status=0 +++ seq 0 2 ++ for i in '$(seq 0 2)' ++ set +e ++ kubectl exec -it some-name-pxc-0 -c pxc -- bash -c 'cat /etc/mysql/node.cnf' ++ exit_status=0 ++ set -e ++ '[' 0 '!=' 0 ']' ++ break ++ cat /tmp/tmp.QBARgjhlZI ++ cat /tmp/tmp.J34nNoxs5l Unable to use a TTY - input is not a terminal or the right kind of file ++ rm /tmp/tmp.QBARgjhlZI /tmp/tmp.J34nNoxs5l ++ return 0 + '[' '' ']' + desc 'test root' + set +o xtrace ----------------------------------------------------------------------------------- test root ----------------------------------------------------------------------------------- + patch_secret my-cluster-secrets root dGVzdC1wYXNzd29yZA== + local secret=my-cluster-secrets + local key=root + local value=dGVzdC1wYXNzd29yZA== + kubectl_bin patch secret my-cluster-secrets '-p={"data":{"root": "dGVzdC1wYXNzd29yZA=="}}' ++ mktemp + local LAST_OUT=/tmp/tmp.HydqZXuvJz ++ mktemp + local LAST_ERR=/tmp/tmp.6pvAxmQ0zo + local exit_status=0 ++ seq 0 2 + for i in '$(seq 0 2)' + set +e + kubectl patch secret my-cluster-secrets '-p={"data":{"root": "dGVzdC1wYXNzd29yZA=="}}' + exit_status=0 + set -e + '[' 0 '!=' 0 ']' + break + cat /tmp/tmp.HydqZXuvJz secret/my-cluster-secrets patched + cat /tmp/tmp.6pvAxmQ0zo + rm /tmp/tmp.HydqZXuvJz /tmp/tmp.6pvAxmQ0zo + return 0 + sleep 15 + compare_mysql_cmd select-4 'SHOW TABLES;' '-h some-name-proxysql -uroot -p'\''test-password'\''' + local command_id=select-4 + local 'command=SHOW TABLES;' + local 'uri=-h some-name-proxysql -uroot -p'\''test-password'\''' + local postfix= + local expected_result=/mnt/jenkins/workspace/cloud-pxc-operator_PR-1718/e2e-tests/users/compare/select-4.sql + [[ perconalab/percona-xtradb-cluster-operator:main-pxc5.7 =~ 8\.0 ]] + run_mysql 'SHOW TABLES;' '-h some-name-proxysql -uroot -p'\''test-password'\''' + local 'command=SHOW TABLES;' + local 'uri=-h some-name-proxysql -uroot -p'\''test-password'\''' ++ get_client_pod ++ kubectl_bin get pods --selector=name=pxc-client -o 'jsonpath={.items[].metadata.name}' +++ mktemp ++ local LAST_OUT=/tmp/tmp.Xk1n1zwA0F +++ mktemp ++ local LAST_ERR=/tmp/tmp.Tv6Dcd4myo ++ local exit_status=0 +++ seq 0 2 ++ for i in '$(seq 0 2)' ++ set +e ++ kubectl get pods --selector=name=pxc-client -o 'jsonpath={.items[].metadata.name}' ++ exit_status=0 ++ set -e ++ '[' 0 '!=' 0 ']' ++ break ++ cat /tmp/tmp.Xk1n1zwA0F ++ cat /tmp/tmp.Tv6Dcd4myo ++ rm /tmp/tmp.Xk1n1zwA0F /tmp/tmp.Tv6Dcd4myo ++ return 0 + client_pod=pxc-client-64b479df95-8brh6 + wait_pod pxc-client-64b479df95-8brh6 + local pod=pxc-client-64b479df95-8brh6 + local max_retry=480 + local ns= ++ echo pxc-client-64b479df95-8brh6 ++ /usr/bin/sed -E 's/.*-(pxc|proxysql)-[0-9]/\1/' ++ egrep '^(pxc|proxysql)$' + local container= + set +o xtrace pod/pxc-client-64b479df95-8brh6 condition met pxc-client-64b479df95-8brh6.Ok + set +o xtrace + '[' '!' -s /tmp/tmp.oaVtWV5LhP/select-4.sql ']' + diff -u /mnt/jenkins/workspace/cloud-pxc-operator_PR-1718/e2e-tests/users/compare/select-4.sql /tmp/tmp.oaVtWV5LhP/select-4.sql + desc 'test proxyadmin' + set +o xtrace ----------------------------------------------------------------------------------- test proxyadmin ----------------------------------------------------------------------------------- + kubectl_bin patch pxc some-name --type=merge '-p={"spec":{"proxysql":{"size":3}}}' ++ mktemp + local LAST_OUT=/tmp/tmp.pSck7FluN2 ++ mktemp + local LAST_ERR=/tmp/tmp.SSxYjKG3xp + local exit_status=0 ++ seq 0 2 + for i in '$(seq 0 2)' + set +e + kubectl patch pxc some-name --type=merge '-p={"spec":{"proxysql":{"size":3}}}' + exit_status=0 + set -e + '[' 0 '!=' 0 ']' + break + cat /tmp/tmp.pSck7FluN2 perconaxtradbcluster.pxc.percona.com/some-name patched + cat /tmp/tmp.SSxYjKG3xp + rm /tmp/tmp.pSck7FluN2 /tmp/tmp.SSxYjKG3xp + return 0 + sleep 15 + wait_cluster_consistency some-name 3 3 + local cluster_name=some-name + local cluster_size=3 + local proxy_size=3 + '[' -z 3 ']' + desc 'wait cluster consistency' + set +o xtrace ----------------------------------------------------------------------------------- wait cluster consistency ----------------------------------------------------------------------------------- + local i=0 + local max=36 + sleep 7 ++ kubectl_bin get pxc some-name -o 'jsonpath={.status.state}' +++ mktemp ++ local LAST_OUT=/tmp/tmp.GX4lmkyMtv +++ mktemp ++ local LAST_ERR=/tmp/tmp.EBrRAjjv4t ++ local exit_status=0 +++ seq 0 2 ++ for i in '$(seq 0 2)' ++ set +e ++ kubectl get pxc some-name -o 'jsonpath={.status.state}' ++ exit_status=0 ++ set -e ++ '[' 0 '!=' 0 ']' ++ break ++ cat /tmp/tmp.GX4lmkyMtv ++ cat /tmp/tmp.EBrRAjjv4t ++ rm /tmp/tmp.GX4lmkyMtv /tmp/tmp.EBrRAjjv4t ++ return 0 + [[ ready == \r\e\a\d\y ]] ++ kubectl_bin get pxc some-name -o 'jsonpath={.status.pxc.ready}' +++ mktemp ++ local LAST_OUT=/tmp/tmp.TIRWOhgeTU +++ mktemp ++ local LAST_ERR=/tmp/tmp.KVLEot3czU ++ local exit_status=0 +++ seq 0 2 ++ for i in '$(seq 0 2)' ++ set +e ++ kubectl get pxc some-name -o 'jsonpath={.status.pxc.ready}' ++ exit_status=0 ++ set -e ++ '[' 0 '!=' 0 ']' ++ break ++ cat /tmp/tmp.TIRWOhgeTU ++ cat /tmp/tmp.KVLEot3czU ++ rm /tmp/tmp.TIRWOhgeTU /tmp/tmp.KVLEot3czU ++ return 0 + [[ 3 == \3 ]] +++ get_proxy_engine some-name +++ local cluster_name=some-name ++++ get_proxy some-name ++++ local target_cluster=some-name +++++ kubectl_bin get pxc some-name -o 'jsonpath={.spec.haproxy.enabled}' ++++++ mktemp +++++ local LAST_OUT=/tmp/tmp.lL1Pki362D ++++++ mktemp +++++ local LAST_ERR=/tmp/tmp.BqGf13eNyu +++++ local exit_status=0 ++++++ seq 0 2 +++++ for i in '$(seq 0 2)' +++++ set +e +++++ kubectl get pxc some-name -o 'jsonpath={.spec.haproxy.enabled}' +++++ exit_status=0 +++++ set -e +++++ '[' 0 '!=' 0 ']' +++++ break +++++ cat /tmp/tmp.lL1Pki362D +++++ cat /tmp/tmp.BqGf13eNyu +++++ rm /tmp/tmp.lL1Pki362D /tmp/tmp.BqGf13eNyu +++++ return 0 ++++ [[ '' == \t\r\u\e ]] +++++ kubectl_bin get pxc some-name -o 'jsonpath={.spec.proxysql.enabled}' ++++++ mktemp +++++ local LAST_OUT=/tmp/tmp.IZoaCRrXoA ++++++ mktemp +++++ local LAST_ERR=/tmp/tmp.n1JkNLlZvi +++++ local exit_status=0 ++++++ seq 0 2 +++++ for i in '$(seq 0 2)' +++++ set +e +++++ kubectl get pxc some-name -o 'jsonpath={.spec.proxysql.enabled}' +++++ exit_status=0 +++++ set -e +++++ '[' 0 '!=' 0 ']' +++++ break +++++ cat /tmp/tmp.IZoaCRrXoA +++++ cat /tmp/tmp.n1JkNLlZvi +++++ rm /tmp/tmp.IZoaCRrXoA /tmp/tmp.n1JkNLlZvi +++++ return 0 ++++ [[ true == \t\r\u\e ]] ++++ echo some-name-proxysql ++++ return +++ local cluster_proxy=some-name-proxysql +++ echo proxysql ++ kubectl_bin get pxc some-name -o 'jsonpath={.status.proxysql.ready}' +++ mktemp ++ local LAST_OUT=/tmp/tmp.tLhi43AYX5 +++ mktemp ++ local LAST_ERR=/tmp/tmp.gJgZBb6KRh ++ local exit_status=0 +++ seq 0 2 ++ for i in '$(seq 0 2)' ++ set +e ++ kubectl get pxc some-name -o 'jsonpath={.status.proxysql.ready}' ++ exit_status=0 ++ set -e ++ '[' 0 '!=' 0 ']' ++ break ++ cat /tmp/tmp.tLhi43AYX5 ++ cat /tmp/tmp.gJgZBb6KRh ++ rm /tmp/tmp.tLhi43AYX5 /tmp/tmp.gJgZBb6KRh ++ return 0 + [[ 3 == \3 ]] + patch_secret my-cluster-secrets proxyadmin dGVzdC1wYXNzd29yZA== + local secret=my-cluster-secrets + local key=proxyadmin + local value=dGVzdC1wYXNzd29yZA== + kubectl_bin patch secret my-cluster-secrets '-p={"data":{"proxyadmin": "dGVzdC1wYXNzd29yZA=="}}' ++ mktemp + local LAST_OUT=/tmp/tmp.ZEEm8Kp9vR ++ mktemp + local LAST_ERR=/tmp/tmp.KYyKMoW1kA + local exit_status=0 ++ seq 0 2 + for i in '$(seq 0 2)' + set +e + kubectl patch secret my-cluster-secrets '-p={"data":{"proxyadmin": "dGVzdC1wYXNzd29yZA=="}}' + exit_status=0 + set -e + '[' 0 '!=' 0 ']' + break + cat /tmp/tmp.ZEEm8Kp9vR secret/my-cluster-secrets patched + cat /tmp/tmp.KYyKMoW1kA + rm /tmp/tmp.ZEEm8Kp9vR /tmp/tmp.KYyKMoW1kA + return 0 + sleep 15 + wait_cluster_consistency some-name 3 3 + local cluster_name=some-name + local cluster_size=3 + local proxy_size=3 + '[' -z 3 ']' + desc 'wait cluster consistency' + set +o xtrace ----------------------------------------------------------------------------------- wait cluster consistency ----------------------------------------------------------------------------------- + local i=0 + local max=36 + sleep 7 ++ kubectl_bin get pxc some-name -o 'jsonpath={.status.state}' +++ mktemp ++ local LAST_OUT=/tmp/tmp.irwynoTzrf +++ mktemp ++ local LAST_ERR=/tmp/tmp.6hFJliFm0u ++ local exit_status=0 +++ seq 0 2 ++ for i in '$(seq 0 2)' ++ set +e ++ kubectl get pxc some-name -o 'jsonpath={.status.state}' ++ exit_status=0 ++ set -e ++ '[' 0 '!=' 0 ']' ++ break ++ cat /tmp/tmp.irwynoTzrf ++ cat /tmp/tmp.6hFJliFm0u ++ rm /tmp/tmp.irwynoTzrf /tmp/tmp.6hFJliFm0u ++ return 0 + [[ initializing == \r\e\a\d\y ]] + echo 'waiting for cluster readyness' waiting for cluster readyness + sleep 20 + [[ 0 -ge 36 ]] + let i+=1 ++ kubectl_bin get pxc some-name -o 'jsonpath={.status.state}' +++ mktemp ++ local LAST_OUT=/tmp/tmp.69J9cqDmm4 +++ mktemp ++ local LAST_ERR=/tmp/tmp.jugM0gq1Cc ++ local exit_status=0 +++ seq 0 2 ++ for i in '$(seq 0 2)' ++ set +e ++ kubectl get pxc some-name -o 'jsonpath={.status.state}' ++ exit_status=0 ++ set -e ++ '[' 0 '!=' 0 ']' ++ break ++ cat /tmp/tmp.69J9cqDmm4 ++ cat /tmp/tmp.jugM0gq1Cc ++ rm /tmp/tmp.69J9cqDmm4 /tmp/tmp.jugM0gq1Cc ++ return 0 + [[ ready == \r\e\a\d\y ]] ++ kubectl_bin get pxc some-name -o 'jsonpath={.status.pxc.ready}' +++ mktemp ++ local LAST_OUT=/tmp/tmp.2wbMUJ8C7G +++ mktemp ++ local LAST_ERR=/tmp/tmp.zvlux6CojA ++ local exit_status=0 +++ seq 0 2 ++ for i in '$(seq 0 2)' ++ set +e ++ kubectl get pxc some-name -o 'jsonpath={.status.pxc.ready}' ++ exit_status=0 ++ set -e ++ '[' 0 '!=' 0 ']' ++ break ++ cat /tmp/tmp.2wbMUJ8C7G ++ cat /tmp/tmp.zvlux6CojA ++ rm /tmp/tmp.2wbMUJ8C7G /tmp/tmp.zvlux6CojA ++ return 0 + [[ 3 == \3 ]] +++ get_proxy_engine some-name +++ local cluster_name=some-name ++++ get_proxy some-name ++++ local target_cluster=some-name +++++ kubectl_bin get pxc some-name -o 'jsonpath={.spec.haproxy.enabled}' ++++++ mktemp +++++ local LAST_OUT=/tmp/tmp.MlgJ18QZEx ++++++ mktemp +++++ local LAST_ERR=/tmp/tmp.H7rDiHhhHn +++++ local exit_status=0 ++++++ seq 0 2 +++++ for i in '$(seq 0 2)' +++++ set +e +++++ kubectl get pxc some-name -o 'jsonpath={.spec.haproxy.enabled}' +++++ exit_status=0 +++++ set -e +++++ '[' 0 '!=' 0 ']' +++++ break +++++ cat /tmp/tmp.MlgJ18QZEx +++++ cat /tmp/tmp.H7rDiHhhHn +++++ rm /tmp/tmp.MlgJ18QZEx /tmp/tmp.H7rDiHhhHn +++++ return 0 ++++ [[ '' == \t\r\u\e ]] +++++ kubectl_bin get pxc some-name -o 'jsonpath={.spec.proxysql.enabled}' ++++++ mktemp +++++ local LAST_OUT=/tmp/tmp.ifQSkYXric ++++++ mktemp +++++ local LAST_ERR=/tmp/tmp.lmev7TgjVP +++++ local exit_status=0 ++++++ seq 0 2 +++++ for i in '$(seq 0 2)' +++++ set +e +++++ kubectl get pxc some-name -o 'jsonpath={.spec.proxysql.enabled}' +++++ exit_status=0 +++++ set -e +++++ '[' 0 '!=' 0 ']' +++++ break +++++ cat /tmp/tmp.ifQSkYXric +++++ cat /tmp/tmp.lmev7TgjVP +++++ rm /tmp/tmp.ifQSkYXric /tmp/tmp.lmev7TgjVP +++++ return 0 ++++ [[ true == \t\r\u\e ]] ++++ echo some-name-proxysql ++++ return +++ local cluster_proxy=some-name-proxysql +++ echo proxysql ++ kubectl_bin get pxc some-name -o 'jsonpath={.status.proxysql.ready}' +++ mktemp ++ local LAST_OUT=/tmp/tmp.4BO9ngf9cn +++ mktemp ++ local LAST_ERR=/tmp/tmp.cO8TN74p3O ++ local exit_status=0 +++ seq 0 2 ++ for i in '$(seq 0 2)' ++ set +e ++ kubectl get pxc some-name -o 'jsonpath={.status.proxysql.ready}' ++ exit_status=0 ++ set -e ++ '[' 0 '!=' 0 ']' ++ break ++ cat /tmp/tmp.4BO9ngf9cn ++ cat /tmp/tmp.cO8TN74p3O ++ rm /tmp/tmp.4BO9ngf9cn /tmp/tmp.cO8TN74p3O ++ return 0 + [[ 3 == \3 ]] + compare_mysql_cmd_local select-2 'SHOW TABLES;' '-h127.0.0.1 -P6032 -uproxyadmin -p'\''test-password'\''' some-name-proxysql-0 '' proxysql + local command_id=select-2 + local 'command=SHOW TABLES;' + local 'uri=-h127.0.0.1 -P6032 -uproxyadmin -p'\''test-password'\''' + local pod=some-name-proxysql-0 + local postfix= + local container_name=proxysql + local expected_result=/mnt/jenkins/workspace/cloud-pxc-operator_PR-1718/e2e-tests/users/compare/select-2.sql + [[ perconalab/percona-xtradb-cluster-operator:main-pxc5.7 =~ 8\.0 ]] + run_mysql_local 'SHOW TABLES;' '-h127.0.0.1 -P6032 -uproxyadmin -p'\''test-password'\''' some-name-proxysql-0 proxysql + local 'command=SHOW TABLES;' + local 'uri=-h127.0.0.1 -P6032 -uproxyadmin -p'\''test-password'\''' + local pod=some-name-proxysql-0 + local container_name=proxysql + set +o xtrace + '[' '!' -s /tmp/tmp.oaVtWV5LhP/select-2.sql ']' + diff -u /mnt/jenkins/workspace/cloud-pxc-operator_PR-1718/e2e-tests/users/compare/select-2.sql /tmp/tmp.oaVtWV5LhP/select-2.sql + compare_mysql_cmd_local select-2 'SHOW TABLES;' '-h127.0.0.1 -P6032 -uproxyadmin -p'\''test-password'\''' some-name-proxysql-1 '' proxysql + local command_id=select-2 + local 'command=SHOW TABLES;' + local 'uri=-h127.0.0.1 -P6032 -uproxyadmin -p'\''test-password'\''' + local pod=some-name-proxysql-1 + local postfix= + local container_name=proxysql + local expected_result=/mnt/jenkins/workspace/cloud-pxc-operator_PR-1718/e2e-tests/users/compare/select-2.sql + [[ perconalab/percona-xtradb-cluster-operator:main-pxc5.7 =~ 8\.0 ]] + run_mysql_local 'SHOW TABLES;' '-h127.0.0.1 -P6032 -uproxyadmin -p'\''test-password'\''' some-name-proxysql-1 proxysql + local 'command=SHOW TABLES;' + local 'uri=-h127.0.0.1 -P6032 -uproxyadmin -p'\''test-password'\''' + local pod=some-name-proxysql-1 + local container_name=proxysql + set +o xtrace + '[' '!' -s /tmp/tmp.oaVtWV5LhP/select-2.sql ']' + diff -u /mnt/jenkins/workspace/cloud-pxc-operator_PR-1718/e2e-tests/users/compare/select-2.sql /tmp/tmp.oaVtWV5LhP/select-2.sql + compare_mysql_cmd_local select-2 'SHOW TABLES;' '-h127.0.0.1 -P6032 -uproxyadmin -p'\''test-password'\''' some-name-proxysql-2 '' proxysql + local command_id=select-2 + local 'command=SHOW TABLES;' + local 'uri=-h127.0.0.1 -P6032 -uproxyadmin -p'\''test-password'\''' + local pod=some-name-proxysql-2 + local postfix= + local container_name=proxysql + local expected_result=/mnt/jenkins/workspace/cloud-pxc-operator_PR-1718/e2e-tests/users/compare/select-2.sql + [[ perconalab/percona-xtradb-cluster-operator:main-pxc5.7 =~ 8\.0 ]] + run_mysql_local 'SHOW TABLES;' '-h127.0.0.1 -P6032 -uproxyadmin -p'\''test-password'\''' some-name-proxysql-2 proxysql + local 'command=SHOW TABLES;' + local 'uri=-h127.0.0.1 -P6032 -uproxyadmin -p'\''test-password'\''' + local pod=some-name-proxysql-2 + local container_name=proxysql + set +o xtrace + '[' '!' -s /tmp/tmp.oaVtWV5LhP/select-2.sql ']' + diff -u /mnt/jenkins/workspace/cloud-pxc-operator_PR-1718/e2e-tests/users/compare/select-2.sql /tmp/tmp.oaVtWV5LhP/select-2.sql + desc 'test xtrabackup' + set +o xtrace ----------------------------------------------------------------------------------- test xtrabackup ----------------------------------------------------------------------------------- + kubectl_bin patch pxc some-name --type=merge '-p={"spec":{"proxysql":{"size":2}}}' ++ mktemp + local LAST_OUT=/tmp/tmp.RxNPRNsCbS ++ mktemp + local LAST_ERR=/tmp/tmp.utBS10t6Zq + local exit_status=0 ++ seq 0 2 + for i in '$(seq 0 2)' + set +e + kubectl patch pxc some-name --type=merge '-p={"spec":{"proxysql":{"size":2}}}' + exit_status=0 + set -e + '[' 0 '!=' 0 ']' + break + cat /tmp/tmp.RxNPRNsCbS perconaxtradbcluster.pxc.percona.com/some-name patched + cat /tmp/tmp.utBS10t6Zq + rm /tmp/tmp.RxNPRNsCbS /tmp/tmp.utBS10t6Zq + return 0 + patch_secret my-cluster-secrets xtrabackup dGVzdC1wYXNzd29yZA== + local secret=my-cluster-secrets + local key=xtrabackup + local value=dGVzdC1wYXNzd29yZA== + kubectl_bin patch secret my-cluster-secrets '-p={"data":{"xtrabackup": "dGVzdC1wYXNzd29yZA=="}}' ++ mktemp + local LAST_OUT=/tmp/tmp.KRX6fzkSgE ++ mktemp + local LAST_ERR=/tmp/tmp.XaGhuP8kfR + local exit_status=0 ++ seq 0 2 + for i in '$(seq 0 2)' + set +e + kubectl patch secret my-cluster-secrets '-p={"data":{"xtrabackup": "dGVzdC1wYXNzd29yZA=="}}' + exit_status=0 + set -e + '[' 0 '!=' 0 ']' + break + cat /tmp/tmp.KRX6fzkSgE secret/my-cluster-secrets patched + cat /tmp/tmp.XaGhuP8kfR + rm /tmp/tmp.KRX6fzkSgE /tmp/tmp.XaGhuP8kfR + return 0 + sleep 15 + wait_cluster_consistency some-name 3 2 + local cluster_name=some-name + local cluster_size=3 + local proxy_size=2 + '[' -z 2 ']' + desc 'wait cluster consistency' + set +o xtrace ----------------------------------------------------------------------------------- wait cluster consistency ----------------------------------------------------------------------------------- + local i=0 + local max=36 + sleep 7 ++ kubectl_bin get pxc some-name -o 'jsonpath={.status.state}' +++ mktemp ++ local LAST_OUT=/tmp/tmp.Mc7k7X9lQJ +++ mktemp ++ local LAST_ERR=/tmp/tmp.aO5MOSfl1h ++ local exit_status=0 +++ seq 0 2 ++ for i in '$(seq 0 2)' ++ set +e ++ kubectl get pxc some-name -o 'jsonpath={.status.state}' ++ exit_status=0 ++ set -e ++ '[' 0 '!=' 0 ']' ++ break ++ cat /tmp/tmp.Mc7k7X9lQJ ++ cat /tmp/tmp.aO5MOSfl1h ++ rm /tmp/tmp.Mc7k7X9lQJ /tmp/tmp.aO5MOSfl1h ++ return 0 + [[ initializing == \r\e\a\d\y ]] + echo 'waiting for cluster readyness' waiting for cluster readyness + sleep 20 + [[ 0 -ge 36 ]] + let i+=1 ++ kubectl_bin get pxc some-name -o 'jsonpath={.status.state}' +++ mktemp ++ local LAST_OUT=/tmp/tmp.VtaOmUueOs +++ mktemp ++ local LAST_ERR=/tmp/tmp.zTLBU2meVy ++ local exit_status=0 +++ seq 0 2 ++ for i in '$(seq 0 2)' ++ set +e ++ kubectl get pxc some-name -o 'jsonpath={.status.state}' ++ exit_status=0 ++ set -e ++ '[' 0 '!=' 0 ']' ++ break ++ cat /tmp/tmp.VtaOmUueOs ++ cat /tmp/tmp.zTLBU2meVy ++ rm /tmp/tmp.VtaOmUueOs /tmp/tmp.zTLBU2meVy ++ return 0 + [[ initializing == \r\e\a\d\y ]] + echo 'waiting for cluster readyness' waiting for cluster readyness + sleep 20 + [[ 1 -ge 36 ]] + let i+=1 ++ kubectl_bin get pxc some-name -o 'jsonpath={.status.state}' +++ mktemp ++ local LAST_OUT=/tmp/tmp.EjZimtm572 +++ mktemp ++ local LAST_ERR=/tmp/tmp.MngIUymMIN ++ local exit_status=0 +++ seq 0 2 ++ for i in '$(seq 0 2)' ++ set +e ++ kubectl get pxc some-name -o 'jsonpath={.status.state}' ++ exit_status=0 ++ set -e ++ '[' 0 '!=' 0 ']' ++ break ++ cat /tmp/tmp.EjZimtm572 ++ cat /tmp/tmp.MngIUymMIN ++ rm /tmp/tmp.EjZimtm572 /tmp/tmp.MngIUymMIN ++ return 0 + [[ initializing == \r\e\a\d\y ]] + echo 'waiting for cluster readyness' waiting for cluster readyness + sleep 20 + [[ 2 -ge 36 ]] + let i+=1 ++ kubectl_bin get pxc some-name -o 'jsonpath={.status.state}' +++ mktemp ++ local LAST_OUT=/tmp/tmp.sZPFlpfmqz +++ mktemp ++ local LAST_ERR=/tmp/tmp.SsGWoOqkWf ++ local exit_status=0 +++ seq 0 2 ++ for i in '$(seq 0 2)' ++ set +e ++ kubectl get pxc some-name -o 'jsonpath={.status.state}' ++ exit_status=0 ++ set -e ++ '[' 0 '!=' 0 ']' ++ break ++ cat /tmp/tmp.sZPFlpfmqz ++ cat /tmp/tmp.SsGWoOqkWf ++ rm /tmp/tmp.sZPFlpfmqz /tmp/tmp.SsGWoOqkWf ++ return 0 + [[ initializing == \r\e\a\d\y ]] + echo 'waiting for cluster readyness' waiting for cluster readyness + sleep 20 + [[ 3 -ge 36 ]] + let i+=1 ++ kubectl_bin get pxc some-name -o 'jsonpath={.status.state}' +++ mktemp ++ local LAST_OUT=/tmp/tmp.pPLDpKnHNi +++ mktemp ++ local LAST_ERR=/tmp/tmp.5p1Mf6cY7R ++ local exit_status=0 +++ seq 0 2 ++ for i in '$(seq 0 2)' ++ set +e ++ kubectl get pxc some-name -o 'jsonpath={.status.state}' ++ exit_status=0 ++ set -e ++ '[' 0 '!=' 0 ']' ++ break ++ cat /tmp/tmp.pPLDpKnHNi ++ cat /tmp/tmp.5p1Mf6cY7R ++ rm /tmp/tmp.pPLDpKnHNi /tmp/tmp.5p1Mf6cY7R ++ return 0 + [[ initializing == \r\e\a\d\y ]] + echo 'waiting for cluster readyness' waiting for cluster readyness + sleep 20 + [[ 4 -ge 36 ]] + let i+=1 ++ kubectl_bin get pxc some-name -o 'jsonpath={.status.state}' +++ mktemp ++ local LAST_OUT=/tmp/tmp.0lN4m6dXvf +++ mktemp ++ local LAST_ERR=/tmp/tmp.Lt0PO8hbXj ++ local exit_status=0 +++ seq 0 2 ++ for i in '$(seq 0 2)' ++ set +e ++ kubectl get pxc some-name -o 'jsonpath={.status.state}' ++ exit_status=0 ++ set -e ++ '[' 0 '!=' 0 ']' ++ break ++ cat /tmp/tmp.0lN4m6dXvf ++ cat /tmp/tmp.Lt0PO8hbXj ++ rm /tmp/tmp.0lN4m6dXvf /tmp/tmp.Lt0PO8hbXj ++ return 0 + [[ initializing == \r\e\a\d\y ]] + echo 'waiting for cluster readyness' waiting for cluster readyness + sleep 20 + [[ 5 -ge 36 ]] + let i+=1 ++ kubectl_bin get pxc some-name -o 'jsonpath={.status.state}' +++ mktemp ++ local LAST_OUT=/tmp/tmp.NSQVW2ekMH +++ mktemp ++ local LAST_ERR=/tmp/tmp.6zbZH1gWwi ++ local exit_status=0 +++ seq 0 2 ++ for i in '$(seq 0 2)' ++ set +e ++ kubectl get pxc some-name -o 'jsonpath={.status.state}' ++ exit_status=0 ++ set -e ++ '[' 0 '!=' 0 ']' ++ break ++ cat /tmp/tmp.NSQVW2ekMH ++ cat /tmp/tmp.6zbZH1gWwi ++ rm /tmp/tmp.NSQVW2ekMH /tmp/tmp.6zbZH1gWwi ++ return 0 + [[ initializing == \r\e\a\d\y ]] + echo 'waiting for cluster readyness' waiting for cluster readyness + sleep 20 + [[ 6 -ge 36 ]] + let i+=1 ++ kubectl_bin get pxc some-name -o 'jsonpath={.status.state}' +++ mktemp ++ local LAST_OUT=/tmp/tmp.smi1Pv1rMW +++ mktemp ++ local LAST_ERR=/tmp/tmp.Q82WI9a2fN ++ local exit_status=0 +++ seq 0 2 ++ for i in '$(seq 0 2)' ++ set +e ++ kubectl get pxc some-name -o 'jsonpath={.status.state}' ++ exit_status=0 ++ set -e ++ '[' 0 '!=' 0 ']' ++ break ++ cat /tmp/tmp.smi1Pv1rMW ++ cat /tmp/tmp.Q82WI9a2fN ++ rm /tmp/tmp.smi1Pv1rMW /tmp/tmp.Q82WI9a2fN ++ return 0 + [[ ready == \r\e\a\d\y ]] ++ kubectl_bin get pxc some-name -o 'jsonpath={.status.pxc.ready}' +++ mktemp ++ local LAST_OUT=/tmp/tmp.DtF2OtZHvv +++ mktemp ++ local LAST_ERR=/tmp/tmp.wX5dc2EoXm ++ local exit_status=0 +++ seq 0 2 ++ for i in '$(seq 0 2)' ++ set +e ++ kubectl get pxc some-name -o 'jsonpath={.status.pxc.ready}' ++ exit_status=0 ++ set -e ++ '[' 0 '!=' 0 ']' ++ break ++ cat /tmp/tmp.DtF2OtZHvv ++ cat /tmp/tmp.wX5dc2EoXm ++ rm /tmp/tmp.DtF2OtZHvv /tmp/tmp.wX5dc2EoXm ++ return 0 + [[ 3 == \3 ]] +++ get_proxy_engine some-name +++ local cluster_name=some-name ++++ get_proxy some-name ++++ local target_cluster=some-name +++++ kubectl_bin get pxc some-name -o 'jsonpath={.spec.haproxy.enabled}' ++++++ mktemp +++++ local LAST_OUT=/tmp/tmp.Q7sIILgWnS ++++++ mktemp +++++ local LAST_ERR=/tmp/tmp.8nn049UsjO +++++ local exit_status=0 ++++++ seq 0 2 +++++ for i in '$(seq 0 2)' +++++ set +e +++++ kubectl get pxc some-name -o 'jsonpath={.spec.haproxy.enabled}' +++++ exit_status=0 +++++ set -e +++++ '[' 0 '!=' 0 ']' +++++ break +++++ cat /tmp/tmp.Q7sIILgWnS +++++ cat /tmp/tmp.8nn049UsjO +++++ rm /tmp/tmp.Q7sIILgWnS /tmp/tmp.8nn049UsjO +++++ return 0 ++++ [[ '' == \t\r\u\e ]] +++++ kubectl_bin get pxc some-name -o 'jsonpath={.spec.proxysql.enabled}' ++++++ mktemp +++++ local LAST_OUT=/tmp/tmp.mB4oaZHlD9 ++++++ mktemp +++++ local LAST_ERR=/tmp/tmp.RwP3GB6Pbr +++++ local exit_status=0 ++++++ seq 0 2 +++++ for i in '$(seq 0 2)' +++++ set +e +++++ kubectl get pxc some-name -o 'jsonpath={.spec.proxysql.enabled}' +++++ exit_status=0 +++++ set -e +++++ '[' 0 '!=' 0 ']' +++++ break +++++ cat /tmp/tmp.mB4oaZHlD9 +++++ cat /tmp/tmp.RwP3GB6Pbr +++++ rm /tmp/tmp.mB4oaZHlD9 /tmp/tmp.RwP3GB6Pbr +++++ return 0 ++++ [[ true == \t\r\u\e ]] ++++ echo some-name-proxysql ++++ return +++ local cluster_proxy=some-name-proxysql +++ echo proxysql ++ kubectl_bin get pxc some-name -o 'jsonpath={.status.proxysql.ready}' +++ mktemp ++ local LAST_OUT=/tmp/tmp.F0YPeNnotd +++ mktemp ++ local LAST_ERR=/tmp/tmp.OpqIRTjWCi ++ local exit_status=0 +++ seq 0 2 ++ for i in '$(seq 0 2)' ++ set +e ++ kubectl get pxc some-name -o 'jsonpath={.status.proxysql.ready}' ++ exit_status=0 ++ set -e ++ '[' 0 '!=' 0 ']' ++ break ++ cat /tmp/tmp.F0YPeNnotd ++ cat /tmp/tmp.OpqIRTjWCi ++ rm /tmp/tmp.F0YPeNnotd /tmp/tmp.OpqIRTjWCi ++ return 0 + [[ 2 == \2 ]] + compare_mysql_cmd_local select-3 'SHOW DATABASES;' '-h 127.0.0.1 -uxtrabackup -p'\''test-password'\''' some-name-pxc-0 '' pxc + local command_id=select-3 + local 'command=SHOW DATABASES;' + local 'uri=-h 127.0.0.1 -uxtrabackup -p'\''test-password'\''' + local pod=some-name-pxc-0 + local postfix= + local container_name=pxc + local expected_result=/mnt/jenkins/workspace/cloud-pxc-operator_PR-1718/e2e-tests/users/compare/select-3.sql + [[ perconalab/percona-xtradb-cluster-operator:main-pxc5.7 =~ 8\.0 ]] + run_mysql_local 'SHOW DATABASES;' '-h 127.0.0.1 -uxtrabackup -p'\''test-password'\''' some-name-pxc-0 pxc + local 'command=SHOW DATABASES;' + local 'uri=-h 127.0.0.1 -uxtrabackup -p'\''test-password'\''' + local pod=some-name-pxc-0 + local container_name=pxc + set +o xtrace + '[' '!' -s /tmp/tmp.oaVtWV5LhP/select-3.sql ']' + diff -u /mnt/jenkins/workspace/cloud-pxc-operator_PR-1718/e2e-tests/users/compare/select-3.sql /tmp/tmp.oaVtWV5LhP/select-3.sql + desc 'test monitor' + set +o xtrace ----------------------------------------------------------------------------------- test monitor ----------------------------------------------------------------------------------- + patch_secret my-cluster-secrets monitor dGVzdC1wYXNzd29yZA== + local secret=my-cluster-secrets + local key=monitor + local value=dGVzdC1wYXNzd29yZA== + kubectl_bin patch secret my-cluster-secrets '-p={"data":{"monitor": "dGVzdC1wYXNzd29yZA=="}}' ++ mktemp + local LAST_OUT=/tmp/tmp.rEQTvV4O0G ++ mktemp + local LAST_ERR=/tmp/tmp.PGeCmozoa0 + local exit_status=0 ++ seq 0 2 + for i in '$(seq 0 2)' + set +e + kubectl patch secret my-cluster-secrets '-p={"data":{"monitor": "dGVzdC1wYXNzd29yZA=="}}' + exit_status=0 + set -e + '[' 0 '!=' 0 ']' + break + cat /tmp/tmp.rEQTvV4O0G secret/my-cluster-secrets patched + cat /tmp/tmp.PGeCmozoa0 + rm /tmp/tmp.rEQTvV4O0G /tmp/tmp.PGeCmozoa0 + return 0 + wait_for_password_propagation my-cluster-secrets monitor + local secret=my-cluster-secrets + local user=monitor + local max_retry=240 ++ getSecretData my-cluster-secrets root ++ local secretName=my-cluster-secrets ++ local dataKey=root ++ kubectl_bin get secrets/my-cluster-secrets '--template={{.data.root}}' ++ base64 --decode +++ mktemp ++ local LAST_OUT=/tmp/tmp.1Tqde8VvRM +++ mktemp ++ local LAST_ERR=/tmp/tmp.CzKs9PHcJA ++ local exit_status=0 +++ seq 0 2 ++ for i in '$(seq 0 2)' ++ set +e ++ kubectl get secrets/my-cluster-secrets '--template={{.data.root}}' ++ exit_status=0 ++ set -e ++ '[' 0 '!=' 0 ']' ++ break ++ cat /tmp/tmp.1Tqde8VvRM ++ cat /tmp/tmp.CzKs9PHcJA ++ rm /tmp/tmp.1Tqde8VvRM /tmp/tmp.CzKs9PHcJA ++ return 0 + local root_pass=test-password + [[ perconalab/percona-xtradb-cluster-operator:main-pxc5.7 =~ 5\.7 ]] + echo 'Skipping dual password feature doesn'\''t work for 5.7. PXC 5.7 doesn'\''t support it!' Skipping dual password feature doesn't work for 5.7. PXC 5.7 doesn't support it! + return + wait_cluster_consistency some-name 3 2 + local cluster_name=some-name + local cluster_size=3 + local proxy_size=2 + '[' -z 2 ']' + desc 'wait cluster consistency' + set +o xtrace ----------------------------------------------------------------------------------- wait cluster consistency ----------------------------------------------------------------------------------- + local i=0 + local max=36 + sleep 7 ++ kubectl_bin get pxc some-name -o 'jsonpath={.status.state}' +++ mktemp ++ local LAST_OUT=/tmp/tmp.6qoBNo2iHF +++ mktemp ++ local LAST_ERR=/tmp/tmp.OVni1smt79 ++ local exit_status=0 +++ seq 0 2 ++ for i in '$(seq 0 2)' ++ set +e ++ kubectl get pxc some-name -o 'jsonpath={.status.state}' ++ exit_status=0 ++ set -e ++ '[' 0 '!=' 0 ']' ++ break ++ cat /tmp/tmp.6qoBNo2iHF ++ cat /tmp/tmp.OVni1smt79 ++ rm /tmp/tmp.6qoBNo2iHF /tmp/tmp.OVni1smt79 ++ return 0 + [[ initializing == \r\e\a\d\y ]] + echo 'waiting for cluster readyness' waiting for cluster readyness + sleep 20 + [[ 0 -ge 36 ]] + let i+=1 ++ kubectl_bin get pxc some-name -o 'jsonpath={.status.state}' +++ mktemp ++ local LAST_OUT=/tmp/tmp.HH5UuNP5ZA +++ mktemp ++ local LAST_ERR=/tmp/tmp.uosGyKd0Gc ++ local exit_status=0 +++ seq 0 2 ++ for i in '$(seq 0 2)' ++ set +e ++ kubectl get pxc some-name -o 'jsonpath={.status.state}' ++ exit_status=0 ++ set -e ++ '[' 0 '!=' 0 ']' ++ break ++ cat /tmp/tmp.HH5UuNP5ZA ++ cat /tmp/tmp.uosGyKd0Gc ++ rm /tmp/tmp.HH5UuNP5ZA /tmp/tmp.uosGyKd0Gc ++ return 0 + [[ initializing == \r\e\a\d\y ]] + echo 'waiting for cluster readyness' waiting for cluster readyness + sleep 20 + [[ 1 -ge 36 ]] + let i+=1 ++ kubectl_bin get pxc some-name -o 'jsonpath={.status.state}' +++ mktemp ++ local LAST_OUT=/tmp/tmp.6waUwjMp8d +++ mktemp ++ local LAST_ERR=/tmp/tmp.pwGSlteuBD ++ local exit_status=0 +++ seq 0 2 ++ for i in '$(seq 0 2)' ++ set +e ++ kubectl get pxc some-name -o 'jsonpath={.status.state}' ++ exit_status=0 ++ set -e ++ '[' 0 '!=' 0 ']' ++ break ++ cat /tmp/tmp.6waUwjMp8d ++ cat /tmp/tmp.pwGSlteuBD ++ rm /tmp/tmp.6waUwjMp8d /tmp/tmp.pwGSlteuBD ++ return 0 + [[ ready == \r\e\a\d\y ]] ++ kubectl_bin get pxc some-name -o 'jsonpath={.status.pxc.ready}' +++ mktemp ++ local LAST_OUT=/tmp/tmp.vfWN3Pw85U +++ mktemp ++ local LAST_ERR=/tmp/tmp.mgO8eQr65s ++ local exit_status=0 +++ seq 0 2 ++ for i in '$(seq 0 2)' ++ set +e ++ kubectl get pxc some-name -o 'jsonpath={.status.pxc.ready}' ++ exit_status=0 ++ set -e ++ '[' 0 '!=' 0 ']' ++ break ++ cat /tmp/tmp.vfWN3Pw85U ++ cat /tmp/tmp.mgO8eQr65s ++ rm /tmp/tmp.vfWN3Pw85U /tmp/tmp.mgO8eQr65s ++ return 0 + [[ 3 == \3 ]] +++ get_proxy_engine some-name +++ local cluster_name=some-name ++++ get_proxy some-name ++++ local target_cluster=some-name +++++ kubectl_bin get pxc some-name -o 'jsonpath={.spec.haproxy.enabled}' ++++++ mktemp +++++ local LAST_OUT=/tmp/tmp.qi4shxfa6H ++++++ mktemp +++++ local LAST_ERR=/tmp/tmp.TqNMLpfx14 +++++ local exit_status=0 ++++++ seq 0 2 +++++ for i in '$(seq 0 2)' +++++ set +e +++++ kubectl get pxc some-name -o 'jsonpath={.spec.haproxy.enabled}' +++++ exit_status=0 +++++ set -e +++++ '[' 0 '!=' 0 ']' +++++ break +++++ cat /tmp/tmp.qi4shxfa6H +++++ cat /tmp/tmp.TqNMLpfx14 +++++ rm /tmp/tmp.qi4shxfa6H /tmp/tmp.TqNMLpfx14 +++++ return 0 ++++ [[ '' == \t\r\u\e ]] +++++ kubectl_bin get pxc some-name -o 'jsonpath={.spec.proxysql.enabled}' ++++++ mktemp +++++ local LAST_OUT=/tmp/tmp.ApjTNqGeLz ++++++ mktemp +++++ local LAST_ERR=/tmp/tmp.jcKsNqkEmc +++++ local exit_status=0 ++++++ seq 0 2 +++++ for i in '$(seq 0 2)' +++++ set +e +++++ kubectl get pxc some-name -o 'jsonpath={.spec.proxysql.enabled}' +++++ exit_status=0 +++++ set -e +++++ '[' 0 '!=' 0 ']' +++++ break +++++ cat /tmp/tmp.ApjTNqGeLz +++++ cat /tmp/tmp.jcKsNqkEmc +++++ rm /tmp/tmp.ApjTNqGeLz /tmp/tmp.jcKsNqkEmc +++++ return 0 ++++ [[ true == \t\r\u\e ]] ++++ echo some-name-proxysql ++++ return +++ local cluster_proxy=some-name-proxysql +++ echo proxysql ++ kubectl_bin get pxc some-name -o 'jsonpath={.status.proxysql.ready}' +++ mktemp ++ local LAST_OUT=/tmp/tmp.d3UECBOA2e +++ mktemp ++ local LAST_ERR=/tmp/tmp.XotLCOhTRS ++ local exit_status=0 +++ seq 0 2 ++ for i in '$(seq 0 2)' ++ set +e ++ kubectl get pxc some-name -o 'jsonpath={.status.proxysql.ready}' ++ exit_status=0 ++ set -e ++ '[' 0 '!=' 0 ']' ++ break ++ cat /tmp/tmp.d3UECBOA2e ++ cat /tmp/tmp.XotLCOhTRS ++ rm /tmp/tmp.d3UECBOA2e /tmp/tmp.XotLCOhTRS ++ return 0 + [[ 2 == \2 ]] + sleep 10 + compare_mysql_cmd select-4 'SHOW TABLES;' '-h some-name-proxysql -umonitor -p'\''test-password'\''' + local command_id=select-4 + local 'command=SHOW TABLES;' + local 'uri=-h some-name-proxysql -umonitor -p'\''test-password'\''' + local postfix= + local expected_result=/mnt/jenkins/workspace/cloud-pxc-operator_PR-1718/e2e-tests/users/compare/select-4.sql + [[ perconalab/percona-xtradb-cluster-operator:main-pxc5.7 =~ 8\.0 ]] + run_mysql 'SHOW TABLES;' '-h some-name-proxysql -umonitor -p'\''test-password'\''' + local 'command=SHOW TABLES;' + local 'uri=-h some-name-proxysql -umonitor -p'\''test-password'\''' ++ get_client_pod ++ kubectl_bin get pods --selector=name=pxc-client -o 'jsonpath={.items[].metadata.name}' +++ mktemp ++ local LAST_OUT=/tmp/tmp.lhDsnHfnWT +++ mktemp ++ local LAST_ERR=/tmp/tmp.Jl1zKsbVkI ++ local exit_status=0 +++ seq 0 2 ++ for i in '$(seq 0 2)' ++ set +e ++ kubectl get pods --selector=name=pxc-client -o 'jsonpath={.items[].metadata.name}' ++ exit_status=0 ++ set -e ++ '[' 0 '!=' 0 ']' ++ break ++ cat /tmp/tmp.lhDsnHfnWT ++ cat /tmp/tmp.Jl1zKsbVkI ++ rm /tmp/tmp.lhDsnHfnWT /tmp/tmp.Jl1zKsbVkI ++ return 0 + client_pod=pxc-client-64b479df95-8brh6 + wait_pod pxc-client-64b479df95-8brh6 + local pod=pxc-client-64b479df95-8brh6 + local max_retry=480 + local ns= ++ echo pxc-client-64b479df95-8brh6 ++ /usr/bin/sed -E 's/.*-(pxc|proxysql)-[0-9]/\1/' ++ egrep '^(pxc|proxysql)$' + local container= + set +o xtrace pod/pxc-client-64b479df95-8brh6 condition met pxc-client-64b479df95-8brh6.Ok + set +o xtrace + '[' '!' -s /tmp/tmp.oaVtWV5LhP/select-4.sql ']' + diff -u /mnt/jenkins/workspace/cloud-pxc-operator_PR-1718/e2e-tests/users/compare/select-4.sql /tmp/tmp.oaVtWV5LhP/select-4.sql + desc 'test operator' + set +o xtrace ----------------------------------------------------------------------------------- test operator ----------------------------------------------------------------------------------- + patch_secret my-cluster-secrets operator dGVzdC1wYXNzd29yZA== + local secret=my-cluster-secrets + local key=operator + local value=dGVzdC1wYXNzd29yZA== + kubectl_bin patch secret my-cluster-secrets '-p={"data":{"operator": "dGVzdC1wYXNzd29yZA=="}}' ++ mktemp + local LAST_OUT=/tmp/tmp.kLSVF8svOJ ++ mktemp + local LAST_ERR=/tmp/tmp.54R9oASakK + local exit_status=0 ++ seq 0 2 + for i in '$(seq 0 2)' + set +e + kubectl patch secret my-cluster-secrets '-p={"data":{"operator": "dGVzdC1wYXNzd29yZA=="}}' + exit_status=0 + set -e + '[' 0 '!=' 0 ']' + break + cat /tmp/tmp.kLSVF8svOJ secret/my-cluster-secrets patched + cat /tmp/tmp.54R9oASakK + rm /tmp/tmp.kLSVF8svOJ /tmp/tmp.54R9oASakK + return 0 + sleep 15 + wait_cluster_consistency some-name 3 2 + local cluster_name=some-name + local cluster_size=3 + local proxy_size=2 + '[' -z 2 ']' + desc 'wait cluster consistency' + set +o xtrace ----------------------------------------------------------------------------------- wait cluster consistency ----------------------------------------------------------------------------------- + local i=0 + local max=36 + sleep 7 ++ kubectl_bin get pxc some-name -o 'jsonpath={.status.state}' +++ mktemp ++ local LAST_OUT=/tmp/tmp.HQFrKs23oD +++ mktemp ++ local LAST_ERR=/tmp/tmp.Zg714feejg ++ local exit_status=0 +++ seq 0 2 ++ for i in '$(seq 0 2)' ++ set +e ++ kubectl get pxc some-name -o 'jsonpath={.status.state}' ++ exit_status=0 ++ set -e ++ '[' 0 '!=' 0 ']' ++ break ++ cat /tmp/tmp.HQFrKs23oD ++ cat /tmp/tmp.Zg714feejg ++ rm /tmp/tmp.HQFrKs23oD /tmp/tmp.Zg714feejg ++ return 0 + [[ initializing == \r\e\a\d\y ]] + echo 'waiting for cluster readyness' waiting for cluster readyness + sleep 20 + [[ 0 -ge 36 ]] + let i+=1 ++ kubectl_bin get pxc some-name -o 'jsonpath={.status.state}' +++ mktemp ++ local LAST_OUT=/tmp/tmp.tXAYFzQIcY +++ mktemp ++ local LAST_ERR=/tmp/tmp.QB1vLQL1Kv ++ local exit_status=0 +++ seq 0 2 ++ for i in '$(seq 0 2)' ++ set +e ++ kubectl get pxc some-name -o 'jsonpath={.status.state}' ++ exit_status=0 ++ set -e ++ '[' 0 '!=' 0 ']' ++ break ++ cat /tmp/tmp.tXAYFzQIcY ++ cat /tmp/tmp.QB1vLQL1Kv ++ rm /tmp/tmp.tXAYFzQIcY /tmp/tmp.QB1vLQL1Kv ++ return 0 + [[ initializing == \r\e\a\d\y ]] + echo 'waiting for cluster readyness' waiting for cluster readyness + sleep 20 + [[ 1 -ge 36 ]] + let i+=1 ++ kubectl_bin get pxc some-name -o 'jsonpath={.status.state}' +++ mktemp ++ local LAST_OUT=/tmp/tmp.TqcXcJrLIE +++ mktemp ++ local LAST_ERR=/tmp/tmp.7Xj2uT9oFp ++ local exit_status=0 +++ seq 0 2 ++ for i in '$(seq 0 2)' ++ set +e ++ kubectl get pxc some-name -o 'jsonpath={.status.state}' ++ exit_status=0 ++ set -e ++ '[' 0 '!=' 0 ']' ++ break ++ cat /tmp/tmp.TqcXcJrLIE ++ cat /tmp/tmp.7Xj2uT9oFp ++ rm /tmp/tmp.TqcXcJrLIE /tmp/tmp.7Xj2uT9oFp ++ return 0 + [[ ready == \r\e\a\d\y ]] ++ kubectl_bin get pxc some-name -o 'jsonpath={.status.pxc.ready}' +++ mktemp ++ local LAST_OUT=/tmp/tmp.Pf1z7dUpKC +++ mktemp ++ local LAST_ERR=/tmp/tmp.7jYQazTmot ++ local exit_status=0 +++ seq 0 2 ++ for i in '$(seq 0 2)' ++ set +e ++ kubectl get pxc some-name -o 'jsonpath={.status.pxc.ready}' ++ exit_status=0 ++ set -e ++ '[' 0 '!=' 0 ']' ++ break ++ cat /tmp/tmp.Pf1z7dUpKC ++ cat /tmp/tmp.7jYQazTmot ++ rm /tmp/tmp.Pf1z7dUpKC /tmp/tmp.7jYQazTmot ++ return 0 + [[ 3 == \3 ]] +++ get_proxy_engine some-name +++ local cluster_name=some-name ++++ get_proxy some-name ++++ local target_cluster=some-name +++++ kubectl_bin get pxc some-name -o 'jsonpath={.spec.haproxy.enabled}' ++++++ mktemp +++++ local LAST_OUT=/tmp/tmp.tqJsdcwKQb ++++++ mktemp +++++ local LAST_ERR=/tmp/tmp.xiVNgYPFtW +++++ local exit_status=0 ++++++ seq 0 2 +++++ for i in '$(seq 0 2)' +++++ set +e +++++ kubectl get pxc some-name -o 'jsonpath={.spec.haproxy.enabled}' +++++ exit_status=0 +++++ set -e +++++ '[' 0 '!=' 0 ']' +++++ break +++++ cat /tmp/tmp.tqJsdcwKQb +++++ cat /tmp/tmp.xiVNgYPFtW +++++ rm /tmp/tmp.tqJsdcwKQb /tmp/tmp.xiVNgYPFtW +++++ return 0 ++++ [[ '' == \t\r\u\e ]] +++++ kubectl_bin get pxc some-name -o 'jsonpath={.spec.proxysql.enabled}' ++++++ mktemp +++++ local LAST_OUT=/tmp/tmp.SBniAYuW3I ++++++ mktemp +++++ local LAST_ERR=/tmp/tmp.AHDi8NhGFF +++++ local exit_status=0 ++++++ seq 0 2 +++++ for i in '$(seq 0 2)' +++++ set +e +++++ kubectl get pxc some-name -o 'jsonpath={.spec.proxysql.enabled}' +++++ exit_status=0 +++++ set -e +++++ '[' 0 '!=' 0 ']' +++++ break +++++ cat /tmp/tmp.SBniAYuW3I +++++ cat /tmp/tmp.AHDi8NhGFF +++++ rm /tmp/tmp.SBniAYuW3I /tmp/tmp.AHDi8NhGFF +++++ return 0 ++++ [[ true == \t\r\u\e ]] ++++ echo some-name-proxysql ++++ return +++ local cluster_proxy=some-name-proxysql +++ echo proxysql ++ kubectl_bin get pxc some-name -o 'jsonpath={.status.proxysql.ready}' +++ mktemp ++ local LAST_OUT=/tmp/tmp.ZgYsCMGIHY +++ mktemp ++ local LAST_ERR=/tmp/tmp.gS5pLGqozV ++ local exit_status=0 +++ seq 0 2 ++ for i in '$(seq 0 2)' ++ set +e ++ kubectl get pxc some-name -o 'jsonpath={.status.proxysql.ready}' ++ exit_status=0 ++ set -e ++ '[' 0 '!=' 0 ']' ++ break ++ cat /tmp/tmp.ZgYsCMGIHY ++ cat /tmp/tmp.gS5pLGqozV ++ rm /tmp/tmp.ZgYsCMGIHY /tmp/tmp.gS5pLGqozV ++ return 0 + [[ 2 == \2 ]] + sleep 10 + compare_mysql_cmd select-4 'SHOW TABLES;' '-h some-name-proxysql -uoperator -p'\''test-password'\''' + local command_id=select-4 + local 'command=SHOW TABLES;' + local 'uri=-h some-name-proxysql -uoperator -p'\''test-password'\''' + local postfix= + local expected_result=/mnt/jenkins/workspace/cloud-pxc-operator_PR-1718/e2e-tests/users/compare/select-4.sql + [[ perconalab/percona-xtradb-cluster-operator:main-pxc5.7 =~ 8\.0 ]] + run_mysql 'SHOW TABLES;' '-h some-name-proxysql -uoperator -p'\''test-password'\''' + local 'command=SHOW TABLES;' + local 'uri=-h some-name-proxysql -uoperator -p'\''test-password'\''' ++ get_client_pod ++ kubectl_bin get pods --selector=name=pxc-client -o 'jsonpath={.items[].metadata.name}' +++ mktemp ++ local LAST_OUT=/tmp/tmp.3t0m9EiMeG +++ mktemp ++ local LAST_ERR=/tmp/tmp.6wsXACw3fY ++ local exit_status=0 +++ seq 0 2 ++ for i in '$(seq 0 2)' ++ set +e ++ kubectl get pods --selector=name=pxc-client -o 'jsonpath={.items[].metadata.name}' ++ exit_status=0 ++ set -e ++ '[' 0 '!=' 0 ']' ++ break ++ cat /tmp/tmp.3t0m9EiMeG ++ cat /tmp/tmp.6wsXACw3fY ++ rm /tmp/tmp.3t0m9EiMeG /tmp/tmp.6wsXACw3fY ++ return 0 + client_pod=pxc-client-64b479df95-8brh6 + wait_pod pxc-client-64b479df95-8brh6 + local pod=pxc-client-64b479df95-8brh6 + local max_retry=480 + local ns= ++ echo pxc-client-64b479df95-8brh6 ++ /usr/bin/sed -E 's/.*-(pxc|proxysql)-[0-9]/\1/' ++ egrep '^(pxc|proxysql)$' + local container= + set +o xtrace pod/pxc-client-64b479df95-8brh6 condition met pxc-client-64b479df95-8brh6.Ok + set +o xtrace + '[' '!' -s /tmp/tmp.oaVtWV5LhP/select-4.sql ']' + diff -u /mnt/jenkins/workspace/cloud-pxc-operator_PR-1718/e2e-tests/users/compare/select-4.sql /tmp/tmp.oaVtWV5LhP/select-4.sql + desc 'change secret name' + set +o xtrace ----------------------------------------------------------------------------------- change secret name ----------------------------------------------------------------------------------- + kubectl_bin patch pxc some-name --type merge --patch '{"spec": {"secretsName":"my-cluster-secrets-2"}}' ++ mktemp + local LAST_OUT=/tmp/tmp.S2rT3wUIDS ++ mktemp + local LAST_ERR=/tmp/tmp.ckqw4TzCUE + local exit_status=0 ++ seq 0 2 + for i in '$(seq 0 2)' + set +e + kubectl patch pxc some-name --type merge --patch '{"spec": {"secretsName":"my-cluster-secrets-2"}}' + exit_status=0 + set -e + '[' 0 '!=' 0 ']' + break + cat /tmp/tmp.S2rT3wUIDS perconaxtradbcluster.pxc.percona.com/some-name patched + cat /tmp/tmp.ckqw4TzCUE + rm /tmp/tmp.S2rT3wUIDS /tmp/tmp.ckqw4TzCUE + return 0 + sleep 30 + wait_cluster_consistency some-name 3 2 + local cluster_name=some-name + local cluster_size=3 + local proxy_size=2 + '[' -z 2 ']' + desc 'wait cluster consistency' + set +o xtrace ----------------------------------------------------------------------------------- wait cluster consistency ----------------------------------------------------------------------------------- + local i=0 + local max=36 + sleep 7 ++ kubectl_bin get pxc some-name -o 'jsonpath={.status.state}' +++ mktemp ++ local LAST_OUT=/tmp/tmp.QykdMw8TFW +++ mktemp ++ local LAST_ERR=/tmp/tmp.OBh3PJUezp ++ local exit_status=0 +++ seq 0 2 ++ for i in '$(seq 0 2)' ++ set +e ++ kubectl get pxc some-name -o 'jsonpath={.status.state}' ++ exit_status=0 ++ set -e ++ '[' 0 '!=' 0 ']' ++ break ++ cat /tmp/tmp.QykdMw8TFW ++ cat /tmp/tmp.OBh3PJUezp ++ rm /tmp/tmp.QykdMw8TFW /tmp/tmp.OBh3PJUezp ++ return 0 + [[ initializing == \r\e\a\d\y ]] + echo 'waiting for cluster readyness' waiting for cluster readyness + sleep 20 + [[ 0 -ge 36 ]] + let i+=1 ++ kubectl_bin get pxc some-name -o 'jsonpath={.status.state}' +++ mktemp ++ local LAST_OUT=/tmp/tmp.Iy7S7IBsSz +++ mktemp ++ local LAST_ERR=/tmp/tmp.SUKouKXimp ++ local exit_status=0 +++ seq 0 2 ++ for i in '$(seq 0 2)' ++ set +e ++ kubectl get pxc some-name -o 'jsonpath={.status.state}' ++ exit_status=0 ++ set -e ++ '[' 0 '!=' 0 ']' ++ break ++ cat /tmp/tmp.Iy7S7IBsSz ++ cat /tmp/tmp.SUKouKXimp ++ rm /tmp/tmp.Iy7S7IBsSz /tmp/tmp.SUKouKXimp ++ return 0 + [[ initializing == \r\e\a\d\y ]] + echo 'waiting for cluster readyness' waiting for cluster readyness + sleep 20 + [[ 1 -ge 36 ]] + let i+=1 ++ kubectl_bin get pxc some-name -o 'jsonpath={.status.state}' +++ mktemp ++ local LAST_OUT=/tmp/tmp.fhZn19YKK1 +++ mktemp ++ local LAST_ERR=/tmp/tmp.OyPAQihZki ++ local exit_status=0 +++ seq 0 2 ++ for i in '$(seq 0 2)' ++ set +e ++ kubectl get pxc some-name -o 'jsonpath={.status.state}' ++ exit_status=0 ++ set -e ++ '[' 0 '!=' 0 ']' ++ break ++ cat /tmp/tmp.fhZn19YKK1 ++ cat /tmp/tmp.OyPAQihZki ++ rm /tmp/tmp.fhZn19YKK1 /tmp/tmp.OyPAQihZki ++ return 0 + [[ initializing == \r\e\a\d\y ]] + echo 'waiting for cluster readyness' waiting for cluster readyness + sleep 20 + [[ 2 -ge 36 ]] + let i+=1 ++ kubectl_bin get pxc some-name -o 'jsonpath={.status.state}' +++ mktemp ++ local LAST_OUT=/tmp/tmp.mHnZBPP46C +++ mktemp ++ local LAST_ERR=/tmp/tmp.4aAiKCwHA3 ++ local exit_status=0 +++ seq 0 2 ++ for i in '$(seq 0 2)' ++ set +e ++ kubectl get pxc some-name -o 'jsonpath={.status.state}' ++ exit_status=0 ++ set -e ++ '[' 0 '!=' 0 ']' ++ break ++ cat /tmp/tmp.mHnZBPP46C ++ cat /tmp/tmp.4aAiKCwHA3 ++ rm /tmp/tmp.mHnZBPP46C /tmp/tmp.4aAiKCwHA3 ++ return 0 + [[ initializing == \r\e\a\d\y ]] + echo 'waiting for cluster readyness' waiting for cluster readyness + sleep 20 + [[ 3 -ge 36 ]] + let i+=1 ++ kubectl_bin get pxc some-name -o 'jsonpath={.status.state}' +++ mktemp ++ local LAST_OUT=/tmp/tmp.iZ0qB4f50w +++ mktemp ++ local LAST_ERR=/tmp/tmp.EZs20iRufT ++ local exit_status=0 +++ seq 0 2 ++ for i in '$(seq 0 2)' ++ set +e ++ kubectl get pxc some-name -o 'jsonpath={.status.state}' ++ exit_status=0 ++ set -e ++ '[' 0 '!=' 0 ']' ++ break ++ cat /tmp/tmp.iZ0qB4f50w ++ cat /tmp/tmp.EZs20iRufT ++ rm /tmp/tmp.iZ0qB4f50w /tmp/tmp.EZs20iRufT ++ return 0 + [[ initializing == \r\e\a\d\y ]] + echo 'waiting for cluster readyness' waiting for cluster readyness + sleep 20 + [[ 4 -ge 36 ]] + let i+=1 ++ kubectl_bin get pxc some-name -o 'jsonpath={.status.state}' +++ mktemp ++ local LAST_OUT=/tmp/tmp.OagJwUZ7Af +++ mktemp ++ local LAST_ERR=/tmp/tmp.Mo4qXeib8w ++ local exit_status=0 +++ seq 0 2 ++ for i in '$(seq 0 2)' ++ set +e ++ kubectl get pxc some-name -o 'jsonpath={.status.state}' ++ exit_status=0 ++ set -e ++ '[' 0 '!=' 0 ']' ++ break ++ cat /tmp/tmp.OagJwUZ7Af ++ cat /tmp/tmp.Mo4qXeib8w ++ rm /tmp/tmp.OagJwUZ7Af /tmp/tmp.Mo4qXeib8w ++ return 0 + [[ initializing == \r\e\a\d\y ]] + echo 'waiting for cluster readyness' waiting for cluster readyness + sleep 20 + [[ 5 -ge 36 ]] + let i+=1 ++ kubectl_bin get pxc some-name -o 'jsonpath={.status.state}' +++ mktemp ++ local LAST_OUT=/tmp/tmp.qZjEraMmet +++ mktemp ++ local LAST_ERR=/tmp/tmp.YK9ZcOymnx ++ local exit_status=0 +++ seq 0 2 ++ for i in '$(seq 0 2)' ++ set +e ++ kubectl get pxc some-name -o 'jsonpath={.status.state}' ++ exit_status=0 ++ set -e ++ '[' 0 '!=' 0 ']' ++ break ++ cat /tmp/tmp.qZjEraMmet ++ cat /tmp/tmp.YK9ZcOymnx ++ rm /tmp/tmp.qZjEraMmet /tmp/tmp.YK9ZcOymnx ++ return 0 + [[ initializing == \r\e\a\d\y ]] + echo 'waiting for cluster readyness' waiting for cluster readyness + sleep 20 + [[ 6 -ge 36 ]] + let i+=1 ++ kubectl_bin get pxc some-name -o 'jsonpath={.status.state}' +++ mktemp ++ local LAST_OUT=/tmp/tmp.YnoHNquuL3 +++ mktemp ++ local LAST_ERR=/tmp/tmp.Pwf5ESWibE ++ local exit_status=0 +++ seq 0 2 ++ for i in '$(seq 0 2)' ++ set +e ++ kubectl get pxc some-name -o 'jsonpath={.status.state}' ++ exit_status=0 ++ set -e ++ '[' 0 '!=' 0 ']' ++ break ++ cat /tmp/tmp.YnoHNquuL3 ++ cat /tmp/tmp.Pwf5ESWibE ++ rm /tmp/tmp.YnoHNquuL3 /tmp/tmp.Pwf5ESWibE ++ return 0 + [[ ready == \r\e\a\d\y ]] ++ kubectl_bin get pxc some-name -o 'jsonpath={.status.pxc.ready}' +++ mktemp ++ local LAST_OUT=/tmp/tmp.6qBUR1ewHf +++ mktemp ++ local LAST_ERR=/tmp/tmp.5DHTlSP45M ++ local exit_status=0 +++ seq 0 2 ++ for i in '$(seq 0 2)' ++ set +e ++ kubectl get pxc some-name -o 'jsonpath={.status.pxc.ready}' ++ exit_status=0 ++ set -e ++ '[' 0 '!=' 0 ']' ++ break ++ cat /tmp/tmp.6qBUR1ewHf ++ cat /tmp/tmp.5DHTlSP45M ++ rm /tmp/tmp.6qBUR1ewHf /tmp/tmp.5DHTlSP45M ++ return 0 + [[ 3 == \3 ]] +++ get_proxy_engine some-name +++ local cluster_name=some-name ++++ get_proxy some-name ++++ local target_cluster=some-name +++++ kubectl_bin get pxc some-name -o 'jsonpath={.spec.haproxy.enabled}' ++++++ mktemp +++++ local LAST_OUT=/tmp/tmp.mIwIZ8QPNY ++++++ mktemp +++++ local LAST_ERR=/tmp/tmp.99mdRDstZR +++++ local exit_status=0 ++++++ seq 0 2 +++++ for i in '$(seq 0 2)' +++++ set +e +++++ kubectl get pxc some-name -o 'jsonpath={.spec.haproxy.enabled}' +++++ exit_status=0 +++++ set -e +++++ '[' 0 '!=' 0 ']' +++++ break +++++ cat /tmp/tmp.mIwIZ8QPNY +++++ cat /tmp/tmp.99mdRDstZR +++++ rm /tmp/tmp.mIwIZ8QPNY /tmp/tmp.99mdRDstZR +++++ return 0 ++++ [[ '' == \t\r\u\e ]] +++++ kubectl_bin get pxc some-name -o 'jsonpath={.spec.proxysql.enabled}' ++++++ mktemp +++++ local LAST_OUT=/tmp/tmp.XbbMXhRdRr ++++++ mktemp +++++ local LAST_ERR=/tmp/tmp.5QOfweujGH +++++ local exit_status=0 ++++++ seq 0 2 +++++ for i in '$(seq 0 2)' +++++ set +e +++++ kubectl get pxc some-name -o 'jsonpath={.spec.proxysql.enabled}' +++++ exit_status=0 +++++ set -e +++++ '[' 0 '!=' 0 ']' +++++ break +++++ cat /tmp/tmp.XbbMXhRdRr +++++ cat /tmp/tmp.5QOfweujGH +++++ rm /tmp/tmp.XbbMXhRdRr /tmp/tmp.5QOfweujGH +++++ return 0 ++++ [[ true == \t\r\u\e ]] ++++ echo some-name-proxysql ++++ return +++ local cluster_proxy=some-name-proxysql +++ echo proxysql ++ kubectl_bin get pxc some-name -o 'jsonpath={.status.proxysql.ready}' +++ mktemp ++ local LAST_OUT=/tmp/tmp.HwmIuaZeJD +++ mktemp ++ local LAST_ERR=/tmp/tmp.WzcMkApLNQ ++ local exit_status=0 +++ seq 0 2 ++ for i in '$(seq 0 2)' ++ set +e ++ kubectl get pxc some-name -o 'jsonpath={.status.proxysql.ready}' ++ exit_status=0 ++ set -e ++ '[' 0 '!=' 0 ']' ++ break ++ cat /tmp/tmp.HwmIuaZeJD ++ cat /tmp/tmp.WzcMkApLNQ ++ rm /tmp/tmp.HwmIuaZeJD /tmp/tmp.WzcMkApLNQ ++ return 0 + [[ 2 == \2 ]] + desc 'test new operator' + set +o xtrace ----------------------------------------------------------------------------------- test new operator ----------------------------------------------------------------------------------- + newpass=test-password2 ++ echo -n test-password2 ++ base64 + newpassencrypted=dGVzdC1wYXNzd29yZDI= + patch_secret my-cluster-secrets-2 operator dGVzdC1wYXNzd29yZDI= + local secret=my-cluster-secrets-2 + local key=operator + local value=dGVzdC1wYXNzd29yZDI= + kubectl_bin patch secret my-cluster-secrets-2 '-p={"data":{"operator": "dGVzdC1wYXNzd29yZDI="}}' ++ mktemp + local LAST_OUT=/tmp/tmp.HbE5kWaDha ++ mktemp + local LAST_ERR=/tmp/tmp.Q6M8RTIAKh + local exit_status=0 ++ seq 0 2 + for i in '$(seq 0 2)' + set +e + kubectl patch secret my-cluster-secrets-2 '-p={"data":{"operator": "dGVzdC1wYXNzd29yZDI="}}' + exit_status=0 + set -e + '[' 0 '!=' 0 ']' + break + cat /tmp/tmp.HbE5kWaDha secret/my-cluster-secrets-2 patched + cat /tmp/tmp.Q6M8RTIAKh + rm /tmp/tmp.HbE5kWaDha /tmp/tmp.Q6M8RTIAKh + return 0 + sleep 15 + wait_cluster_consistency some-name 3 2 + local cluster_name=some-name + local cluster_size=3 + local proxy_size=2 + '[' -z 2 ']' + desc 'wait cluster consistency' + set +o xtrace ----------------------------------------------------------------------------------- wait cluster consistency ----------------------------------------------------------------------------------- + local i=0 + local max=36 + sleep 7 ++ kubectl_bin get pxc some-name -o 'jsonpath={.status.state}' +++ mktemp ++ local LAST_OUT=/tmp/tmp.e8XlYjQBdb +++ mktemp ++ local LAST_ERR=/tmp/tmp.YjXqyu0X6x ++ local exit_status=0 +++ seq 0 2 ++ for i in '$(seq 0 2)' ++ set +e ++ kubectl get pxc some-name -o 'jsonpath={.status.state}' ++ exit_status=0 ++ set -e ++ '[' 0 '!=' 0 ']' ++ break ++ cat /tmp/tmp.e8XlYjQBdb ++ cat /tmp/tmp.YjXqyu0X6x ++ rm /tmp/tmp.e8XlYjQBdb /tmp/tmp.YjXqyu0X6x ++ return 0 + [[ initializing == \r\e\a\d\y ]] + echo 'waiting for cluster readyness' waiting for cluster readyness + sleep 20 + [[ 0 -ge 36 ]] + let i+=1 ++ kubectl_bin get pxc some-name -o 'jsonpath={.status.state}' +++ mktemp ++ local LAST_OUT=/tmp/tmp.NbZQp1tjrd +++ mktemp ++ local LAST_ERR=/tmp/tmp.22ymBP1RlR ++ local exit_status=0 +++ seq 0 2 ++ for i in '$(seq 0 2)' ++ set +e ++ kubectl get pxc some-name -o 'jsonpath={.status.state}' ++ exit_status=0 ++ set -e ++ '[' 0 '!=' 0 ']' ++ break ++ cat /tmp/tmp.NbZQp1tjrd ++ cat /tmp/tmp.22ymBP1RlR ++ rm /tmp/tmp.NbZQp1tjrd /tmp/tmp.22ymBP1RlR ++ return 0 + [[ ready == \r\e\a\d\y ]] ++ kubectl_bin get pxc some-name -o 'jsonpath={.status.pxc.ready}' +++ mktemp ++ local LAST_OUT=/tmp/tmp.gGRrNMPkde +++ mktemp ++ local LAST_ERR=/tmp/tmp.Neq59x0J8s ++ local exit_status=0 +++ seq 0 2 ++ for i in '$(seq 0 2)' ++ set +e ++ kubectl get pxc some-name -o 'jsonpath={.status.pxc.ready}' ++ exit_status=0 ++ set -e ++ '[' 0 '!=' 0 ']' ++ break ++ cat /tmp/tmp.gGRrNMPkde ++ cat /tmp/tmp.Neq59x0J8s ++ rm /tmp/tmp.gGRrNMPkde /tmp/tmp.Neq59x0J8s ++ return 0 + [[ 3 == \3 ]] +++ get_proxy_engine some-name +++ local cluster_name=some-name ++++ get_proxy some-name ++++ local target_cluster=some-name +++++ kubectl_bin get pxc some-name -o 'jsonpath={.spec.haproxy.enabled}' ++++++ mktemp +++++ local LAST_OUT=/tmp/tmp.j8jg7V4fn3 ++++++ mktemp +++++ local LAST_ERR=/tmp/tmp.Q6Zd5YD8VP +++++ local exit_status=0 ++++++ seq 0 2 +++++ for i in '$(seq 0 2)' +++++ set +e +++++ kubectl get pxc some-name -o 'jsonpath={.spec.haproxy.enabled}' +++++ exit_status=0 +++++ set -e +++++ '[' 0 '!=' 0 ']' +++++ break +++++ cat /tmp/tmp.j8jg7V4fn3 +++++ cat /tmp/tmp.Q6Zd5YD8VP +++++ rm /tmp/tmp.j8jg7V4fn3 /tmp/tmp.Q6Zd5YD8VP +++++ return 0 ++++ [[ '' == \t\r\u\e ]] +++++ kubectl_bin get pxc some-name -o 'jsonpath={.spec.proxysql.enabled}' ++++++ mktemp +++++ local LAST_OUT=/tmp/tmp.RIkEdLXxjz ++++++ mktemp +++++ local LAST_ERR=/tmp/tmp.Pt0yKaZQqY +++++ local exit_status=0 ++++++ seq 0 2 +++++ for i in '$(seq 0 2)' +++++ set +e +++++ kubectl get pxc some-name -o 'jsonpath={.spec.proxysql.enabled}' +++++ exit_status=0 +++++ set -e +++++ '[' 0 '!=' 0 ']' +++++ break +++++ cat /tmp/tmp.RIkEdLXxjz +++++ cat /tmp/tmp.Pt0yKaZQqY +++++ rm /tmp/tmp.RIkEdLXxjz /tmp/tmp.Pt0yKaZQqY +++++ return 0 ++++ [[ true == \t\r\u\e ]] ++++ echo some-name-proxysql ++++ return +++ local cluster_proxy=some-name-proxysql +++ echo proxysql ++ kubectl_bin get pxc some-name -o 'jsonpath={.status.proxysql.ready}' +++ mktemp ++ local LAST_OUT=/tmp/tmp.WtpCRFNZmR +++ mktemp ++ local LAST_ERR=/tmp/tmp.IgGKPs7UfM ++ local exit_status=0 +++ seq 0 2 ++ for i in '$(seq 0 2)' ++ set +e ++ kubectl get pxc some-name -o 'jsonpath={.status.proxysql.ready}' ++ exit_status=0 ++ set -e ++ '[' 0 '!=' 0 ']' ++ break ++ cat /tmp/tmp.WtpCRFNZmR ++ cat /tmp/tmp.IgGKPs7UfM ++ rm /tmp/tmp.WtpCRFNZmR /tmp/tmp.IgGKPs7UfM ++ return 0 + [[ 2 == \2 ]] + sleep 20 + compare_mysql_cmd select-4 'SHOW TABLES;' '-h some-name-proxysql -uoperator -p'\''test-password2'\''' + local command_id=select-4 + local 'command=SHOW TABLES;' + local 'uri=-h some-name-proxysql -uoperator -p'\''test-password2'\''' + local postfix= + local expected_result=/mnt/jenkins/workspace/cloud-pxc-operator_PR-1718/e2e-tests/users/compare/select-4.sql + [[ perconalab/percona-xtradb-cluster-operator:main-pxc5.7 =~ 8\.0 ]] + run_mysql 'SHOW TABLES;' '-h some-name-proxysql -uoperator -p'\''test-password2'\''' + local 'command=SHOW TABLES;' + local 'uri=-h some-name-proxysql -uoperator -p'\''test-password2'\''' ++ get_client_pod ++ kubectl_bin get pods --selector=name=pxc-client -o 'jsonpath={.items[].metadata.name}' +++ mktemp ++ local LAST_OUT=/tmp/tmp.J9WLVm0eTO +++ mktemp ++ local LAST_ERR=/tmp/tmp.tQcmmXkuCg ++ local exit_status=0 +++ seq 0 2 ++ for i in '$(seq 0 2)' ++ set +e ++ kubectl get pods --selector=name=pxc-client -o 'jsonpath={.items[].metadata.name}' ++ exit_status=0 ++ set -e ++ '[' 0 '!=' 0 ']' ++ break ++ cat /tmp/tmp.J9WLVm0eTO ++ cat /tmp/tmp.tQcmmXkuCg ++ rm /tmp/tmp.J9WLVm0eTO /tmp/tmp.tQcmmXkuCg ++ return 0 + client_pod=pxc-client-64b479df95-8brh6 + wait_pod pxc-client-64b479df95-8brh6 + local pod=pxc-client-64b479df95-8brh6 + local max_retry=480 + local ns= ++ echo pxc-client-64b479df95-8brh6 ++ /usr/bin/sed -E 's/.*-(pxc|proxysql)-[0-9]/\1/' ++ egrep '^(pxc|proxysql)$' + local container= + set +o xtrace pod/pxc-client-64b479df95-8brh6 condition met pxc-client-64b479df95-8brh6.Ok + set +o xtrace + '[' '!' -s /tmp/tmp.oaVtWV5LhP/select-4.sql ']' + diff -u /mnt/jenkins/workspace/cloud-pxc-operator_PR-1718/e2e-tests/users/compare/select-4.sql /tmp/tmp.oaVtWV5LhP/select-4.sql ++ getSecretData my-cluster-secrets-2 root ++ local secretName=my-cluster-secrets-2 ++ local dataKey=root ++ kubectl_bin get secrets/my-cluster-secrets-2 '--template={{.data.root}}' ++ base64 --decode +++ mktemp ++ local LAST_OUT=/tmp/tmp.1NWdS7q8dP +++ mktemp ++ local LAST_ERR=/tmp/tmp.OqU2h9Lv0f ++ local exit_status=0 +++ seq 0 2 ++ for i in '$(seq 0 2)' ++ set +e ++ kubectl get secrets/my-cluster-secrets-2 '--template={{.data.root}}' ++ exit_status=0 ++ set -e ++ '[' 0 '!=' 0 ']' ++ break ++ cat /tmp/tmp.1NWdS7q8dP ++ cat /tmp/tmp.OqU2h9Lv0f ++ rm /tmp/tmp.1NWdS7q8dP /tmp/tmp.OqU2h9Lv0f ++ return 0 + newpass='@1,*ZUm%[l5xbCF85)9' + desc 'test new users sync' + set +o xtrace ----------------------------------------------------------------------------------- test new users sync ----------------------------------------------------------------------------------- + run_mysql 'CREATE USER '\''testsync'\''@'\''%'\'' IDENTIFIED BY '\''@1,*ZUm%[l5xbCF85)9'\'';' '-h some-name-pxc -uroot -p'\''@1,*ZUm%[l5xbCF85)9'\''' + local 'command=CREATE USER '\''testsync'\''@'\''%'\'' IDENTIFIED BY '\''@1,*ZUm%[l5xbCF85)9'\'';' + local 'uri=-h some-name-pxc -uroot -p'\''@1,*ZUm%[l5xbCF85)9'\''' ++ get_client_pod ++ kubectl_bin get pods --selector=name=pxc-client -o 'jsonpath={.items[].metadata.name}' +++ mktemp ++ local LAST_OUT=/tmp/tmp.VbJswvuyqN +++ mktemp ++ local LAST_ERR=/tmp/tmp.7zQUQMmrX5 ++ local exit_status=0 +++ seq 0 2 ++ for i in '$(seq 0 2)' ++ set +e ++ kubectl get pods --selector=name=pxc-client -o 'jsonpath={.items[].metadata.name}' ++ exit_status=0 ++ set -e ++ '[' 0 '!=' 0 ']' ++ break ++ cat /tmp/tmp.VbJswvuyqN ++ cat /tmp/tmp.7zQUQMmrX5 ++ rm /tmp/tmp.VbJswvuyqN /tmp/tmp.7zQUQMmrX5 ++ return 0 + client_pod=pxc-client-64b479df95-8brh6 + wait_pod pxc-client-64b479df95-8brh6 + local pod=pxc-client-64b479df95-8brh6 + local max_retry=480 + local ns= ++ echo pxc-client-64b479df95-8brh6 ++ /usr/bin/sed -E 's/.*-(pxc|proxysql)-[0-9]/\1/' ++ egrep '^(pxc|proxysql)$' + local container= + set +o xtrace pod/pxc-client-64b479df95-8brh6 condition met pxc-client-64b479df95-8brh6.Ok + set +o xtrace + sleep 40 + compare_mysql_cmd select-4 'SHOW TABLES;' '-h some-name-proxysql -utestsync -p'\''@1,*ZUm%[l5xbCF85)9'\''' + local command_id=select-4 + local 'command=SHOW TABLES;' + local 'uri=-h some-name-proxysql -utestsync -p'\''@1,*ZUm%[l5xbCF85)9'\''' + local postfix= + local expected_result=/mnt/jenkins/workspace/cloud-pxc-operator_PR-1718/e2e-tests/users/compare/select-4.sql + [[ perconalab/percona-xtradb-cluster-operator:main-pxc5.7 =~ 8\.0 ]] + run_mysql 'SHOW TABLES;' '-h some-name-proxysql -utestsync -p'\''@1,*ZUm%[l5xbCF85)9'\''' + local 'command=SHOW TABLES;' + local 'uri=-h some-name-proxysql -utestsync -p'\''@1,*ZUm%[l5xbCF85)9'\''' ++ get_client_pod ++ kubectl_bin get pods --selector=name=pxc-client -o 'jsonpath={.items[].metadata.name}' +++ mktemp ++ local LAST_OUT=/tmp/tmp.IVKlE1uJz6 +++ mktemp ++ local LAST_ERR=/tmp/tmp.88huzNZsjq ++ local exit_status=0 +++ seq 0 2 ++ for i in '$(seq 0 2)' ++ set +e ++ kubectl get pods --selector=name=pxc-client -o 'jsonpath={.items[].metadata.name}' ++ exit_status=0 ++ set -e ++ '[' 0 '!=' 0 ']' ++ break ++ cat /tmp/tmp.IVKlE1uJz6 ++ cat /tmp/tmp.88huzNZsjq ++ rm /tmp/tmp.IVKlE1uJz6 /tmp/tmp.88huzNZsjq ++ return 0 + client_pod=pxc-client-64b479df95-8brh6 + wait_pod pxc-client-64b479df95-8brh6 + local pod=pxc-client-64b479df95-8brh6 + local max_retry=480 + local ns= ++ echo pxc-client-64b479df95-8brh6 ++ /usr/bin/sed -E 's/.*-(pxc|proxysql)-[0-9]/\1/' ++ egrep '^(pxc|proxysql)$' + local container= + set +o xtrace pod/pxc-client-64b479df95-8brh6 condition met pxc-client-64b479df95-8brh6.Ok + set +o xtrace + '[' '!' -s /tmp/tmp.oaVtWV5LhP/select-4.sql ']' + diff -u /mnt/jenkins/workspace/cloud-pxc-operator_PR-1718/e2e-tests/users/compare/select-4.sql /tmp/tmp.oaVtWV5LhP/select-4.sql ++ getSecretData internal-some-name operator ++ local secretName=internal-some-name ++ local dataKey=operator ++ kubectl_bin get secrets/internal-some-name '--template={{.data.operator}}' ++ base64 --decode +++ mktemp ++ local LAST_OUT=/tmp/tmp.rhdoQ9bStO +++ mktemp ++ local LAST_ERR=/tmp/tmp.Gx99pgICwQ ++ local exit_status=0 +++ seq 0 2 ++ for i in '$(seq 0 2)' ++ set +e ++ kubectl get secrets/internal-some-name '--template={{.data.operator}}' ++ exit_status=0 ++ set -e ++ '[' 0 '!=' 0 ']' ++ break ++ cat /tmp/tmp.rhdoQ9bStO ++ cat /tmp/tmp.Gx99pgICwQ ++ rm /tmp/tmp.rhdoQ9bStO /tmp/tmp.Gx99pgICwQ ++ return 0 + pass=test-password2 + desc 'check secret without operator' + set +o xtrace ----------------------------------------------------------------------------------- check secret without operator ----------------------------------------------------------------------------------- + kubectl_bin apply -f /mnt/jenkins/workspace/cloud-pxc-operator_PR-1718/e2e-tests/users/conf/secrets.yml ++ mktemp + local LAST_OUT=/tmp/tmp.cJdq1wu8yL ++ mktemp + local LAST_ERR=/tmp/tmp.tpdJHnzcGa + local exit_status=0 ++ seq 0 2 + for i in '$(seq 0 2)' + set +e + kubectl apply -f /mnt/jenkins/workspace/cloud-pxc-operator_PR-1718/e2e-tests/users/conf/secrets.yml + exit_status=0 + set -e + '[' 0 '!=' 0 ']' + break + cat /tmp/tmp.cJdq1wu8yL secret/my-cluster-secrets-2 configured + cat /tmp/tmp.tpdJHnzcGa Warning: resource secrets/my-cluster-secrets-2 is missing the kubectl.kubernetes.io/last-applied-configuration annotation which is required by kubectl apply. kubectl apply should only be used on resources created declaratively by either kubectl create --save-config or kubectl apply. The missing annotation will be patched automatically. + rm /tmp/tmp.cJdq1wu8yL /tmp/tmp.tpdJHnzcGa + return 0 + sleep 15 + compare_mysql_cmd select-4 'SHOW TABLES;' '-h some-name-proxysql -uoperator -p'\''test-password2'\''' + local command_id=select-4 + local 'command=SHOW TABLES;' + local 'uri=-h some-name-proxysql -uoperator -p'\''test-password2'\''' + local postfix= + local expected_result=/mnt/jenkins/workspace/cloud-pxc-operator_PR-1718/e2e-tests/users/compare/select-4.sql + [[ perconalab/percona-xtradb-cluster-operator:main-pxc5.7 =~ 8\.0 ]] + run_mysql 'SHOW TABLES;' '-h some-name-proxysql -uoperator -p'\''test-password2'\''' + local 'command=SHOW TABLES;' + local 'uri=-h some-name-proxysql -uoperator -p'\''test-password2'\''' ++ get_client_pod ++ kubectl_bin get pods --selector=name=pxc-client -o 'jsonpath={.items[].metadata.name}' +++ mktemp ++ local LAST_OUT=/tmp/tmp.13twgpiEtP +++ mktemp ++ local LAST_ERR=/tmp/tmp.nJhSwgc4fM ++ local exit_status=0 +++ seq 0 2 ++ for i in '$(seq 0 2)' ++ set +e ++ kubectl get pods --selector=name=pxc-client -o 'jsonpath={.items[].metadata.name}' ++ exit_status=0 ++ set -e ++ '[' 0 '!=' 0 ']' ++ break ++ cat /tmp/tmp.13twgpiEtP ++ cat /tmp/tmp.nJhSwgc4fM ++ rm /tmp/tmp.13twgpiEtP /tmp/tmp.nJhSwgc4fM ++ return 0 + client_pod=pxc-client-64b479df95-8brh6 + wait_pod pxc-client-64b479df95-8brh6 + local pod=pxc-client-64b479df95-8brh6 + local max_retry=480 + local ns= ++ echo pxc-client-64b479df95-8brh6 ++ /usr/bin/sed -E 's/.*-(pxc|proxysql)-[0-9]/\1/' ++ egrep '^(pxc|proxysql)$' + local container= + set +o xtrace pod/pxc-client-64b479df95-8brh6 condition met pxc-client-64b479df95-8brh6.Ok + set +o xtrace + '[' '!' -s /tmp/tmp.oaVtWV5LhP/select-4.sql ']' + diff -u /mnt/jenkins/workspace/cloud-pxc-operator_PR-1718/e2e-tests/users/compare/select-4.sql /tmp/tmp.oaVtWV5LhP/select-4.sql + newpass=test-password2 ++ echo -n test-password2 ++ base64 + newpassencrypted=dGVzdC1wYXNzd29yZDI= + apply_config /mnt/jenkins/workspace/cloud-pxc-operator_PR-1718/e2e-tests/users/conf/some-name.yml + '[' -z '' ']' + cat_config /mnt/jenkins/workspace/cloud-pxc-operator_PR-1718/e2e-tests/users/conf/some-name.yml + kubectl_bin apply -f - + cat /mnt/jenkins/workspace/cloud-pxc-operator_PR-1718/e2e-tests/users/conf/some-name.yml ++ mktemp + /usr/bin/sed -e 's#apiVersion: pxc.percona.com/v.*$#apiVersion: pxc.percona.com/v1#' + /usr/bin/sed -e 's#image:.*\/percona-xtradb-cluster:.*$#image: perconalab/percona-xtradb-cluster-operator:main-pxc5.7#' + local LAST_OUT=/tmp/tmp.diQ181iWMF + /usr/bin/sed -e 's#image:.*-pxc\([0-9]*.[0-9]*\)\{0,1\}$#image: perconalab/percona-xtradb-cluster-operator:main-pxc5.7#' ++ mktemp + /usr/bin/sed -e 's#image:.*-pmm$#image: perconalab/pmm-client:dev-latest#' + /usr/bin/sed -e 's#image:.*-init$#image: perconalab/percona-xtradb-cluster-operator:PR-1718-4c5c3df7#' + /usr/bin/sed -e 's#image:.*-proxysql$#image: perconalab/percona-xtradb-cluster-operator:main-proxysql#' + /usr/bin/sed -e 's#image:.*-logcollector$#image: perconalab/percona-xtradb-cluster-operator:main-logcollector#' + /usr/bin/sed -e 's#image:.*-backup$#image: perconalab/percona-xtradb-cluster-operator:main-pxc5.7-backup#' + local LAST_ERR=/tmp/tmp.Ok0VMMIE1s + local exit_status=0 + /usr/bin/sed -e s~minio-service.#namespace~minio-service.users-28617~ + /usr/bin/sed -e 's#image:.*-haproxy$#image: perconalab/percona-xtradb-cluster-operator:main-haproxy#' + /usr/bin/sed -e 's#apply:.*#apply: Never#' ++ seq 0 2 + for i in '$(seq 0 2)' + set +e + kubectl apply -f - + exit_status=0 + set -e + '[' 0 '!=' 0 ']' + break + cat /tmp/tmp.diQ181iWMF perconaxtradbcluster.pxc.percona.com/some-name configured + cat /tmp/tmp.Ok0VMMIE1s + rm /tmp/tmp.diQ181iWMF /tmp/tmp.Ok0VMMIE1s + return 0 + sleep 15 + wait_cluster_consistency some-name 3 3 + local cluster_name=some-name + local cluster_size=3 + local proxy_size=3 + '[' -z 3 ']' + desc 'wait cluster consistency' + set +o xtrace ----------------------------------------------------------------------------------- wait cluster consistency ----------------------------------------------------------------------------------- + local i=0 + local max=36 + sleep 7 ++ kubectl_bin get pxc some-name -o 'jsonpath={.status.state}' +++ mktemp ++ local LAST_OUT=/tmp/tmp.e7ZCCkGCbf +++ mktemp ++ local LAST_ERR=/tmp/tmp.MBtp87S3Qb ++ local exit_status=0 +++ seq 0 2 ++ for i in '$(seq 0 2)' ++ set +e ++ kubectl get pxc some-name -o 'jsonpath={.status.state}' ++ exit_status=0 ++ set -e ++ '[' 0 '!=' 0 ']' ++ break ++ cat /tmp/tmp.e7ZCCkGCbf ++ cat /tmp/tmp.MBtp87S3Qb ++ rm /tmp/tmp.e7ZCCkGCbf /tmp/tmp.MBtp87S3Qb ++ return 0 + [[ initializing == \r\e\a\d\y ]] + echo 'waiting for cluster readyness' waiting for cluster readyness + sleep 20 + [[ 0 -ge 36 ]] + let i+=1 ++ kubectl_bin get pxc some-name -o 'jsonpath={.status.state}' +++ mktemp ++ local LAST_OUT=/tmp/tmp.NXd8fkF0Hp +++ mktemp ++ local LAST_ERR=/tmp/tmp.eByredDXZn ++ local exit_status=0 +++ seq 0 2 ++ for i in '$(seq 0 2)' ++ set +e ++ kubectl get pxc some-name -o 'jsonpath={.status.state}' ++ exit_status=0 ++ set -e ++ '[' 0 '!=' 0 ']' ++ break ++ cat /tmp/tmp.NXd8fkF0Hp ++ cat /tmp/tmp.eByredDXZn ++ rm /tmp/tmp.NXd8fkF0Hp /tmp/tmp.eByredDXZn ++ return 0 + [[ initializing == \r\e\a\d\y ]] + echo 'waiting for cluster readyness' waiting for cluster readyness + sleep 20 + [[ 1 -ge 36 ]] + let i+=1 ++ kubectl_bin get pxc some-name -o 'jsonpath={.status.state}' +++ mktemp ++ local LAST_OUT=/tmp/tmp.80JN58W2dz +++ mktemp ++ local LAST_ERR=/tmp/tmp.Uu1JczxMUZ ++ local exit_status=0 +++ seq 0 2 ++ for i in '$(seq 0 2)' ++ set +e ++ kubectl get pxc some-name -o 'jsonpath={.status.state}' ++ exit_status=0 ++ set -e ++ '[' 0 '!=' 0 ']' ++ break ++ cat /tmp/tmp.80JN58W2dz ++ cat /tmp/tmp.Uu1JczxMUZ ++ rm /tmp/tmp.80JN58W2dz /tmp/tmp.Uu1JczxMUZ ++ return 0 + [[ initializing == \r\e\a\d\y ]] + echo 'waiting for cluster readyness' waiting for cluster readyness + sleep 20 + [[ 2 -ge 36 ]] + let i+=1 ++ kubectl_bin get pxc some-name -o 'jsonpath={.status.state}' +++ mktemp ++ local LAST_OUT=/tmp/tmp.FCWNFh8WlG +++ mktemp ++ local LAST_ERR=/tmp/tmp.9sAbExqsKn ++ local exit_status=0 +++ seq 0 2 ++ for i in '$(seq 0 2)' ++ set +e ++ kubectl get pxc some-name -o 'jsonpath={.status.state}' ++ exit_status=0 ++ set -e ++ '[' 0 '!=' 0 ']' ++ break ++ cat /tmp/tmp.FCWNFh8WlG ++ cat /tmp/tmp.9sAbExqsKn ++ rm /tmp/tmp.FCWNFh8WlG /tmp/tmp.9sAbExqsKn ++ return 0 + [[ initializing == \r\e\a\d\y ]] + echo 'waiting for cluster readyness' waiting for cluster readyness + sleep 20 + [[ 3 -ge 36 ]] + let i+=1 ++ kubectl_bin get pxc some-name -o 'jsonpath={.status.state}' +++ mktemp ++ local LAST_OUT=/tmp/tmp.N2Zm3BbWl0 +++ mktemp ++ local LAST_ERR=/tmp/tmp.JoJkDQDAWP ++ local exit_status=0 +++ seq 0 2 ++ for i in '$(seq 0 2)' ++ set +e ++ kubectl get pxc some-name -o 'jsonpath={.status.state}' ++ exit_status=0 ++ set -e ++ '[' 0 '!=' 0 ']' ++ break ++ cat /tmp/tmp.N2Zm3BbWl0 ++ cat /tmp/tmp.JoJkDQDAWP ++ rm /tmp/tmp.N2Zm3BbWl0 /tmp/tmp.JoJkDQDAWP ++ return 0 + [[ initializing == \r\e\a\d\y ]] + echo 'waiting for cluster readyness' waiting for cluster readyness + sleep 20 + [[ 4 -ge 36 ]] + let i+=1 ++ kubectl_bin get pxc some-name -o 'jsonpath={.status.state}' +++ mktemp ++ local LAST_OUT=/tmp/tmp.3BCp8PD3tS +++ mktemp ++ local LAST_ERR=/tmp/tmp.WOoCtwCeDD ++ local exit_status=0 +++ seq 0 2 ++ for i in '$(seq 0 2)' ++ set +e ++ kubectl get pxc some-name -o 'jsonpath={.status.state}' ++ exit_status=0 ++ set -e ++ '[' 0 '!=' 0 ']' ++ break ++ cat /tmp/tmp.3BCp8PD3tS ++ cat /tmp/tmp.WOoCtwCeDD ++ rm /tmp/tmp.3BCp8PD3tS /tmp/tmp.WOoCtwCeDD ++ return 0 + [[ initializing == \r\e\a\d\y ]] + echo 'waiting for cluster readyness' waiting for cluster readyness + sleep 20 + [[ 5 -ge 36 ]] + let i+=1 ++ kubectl_bin get pxc some-name -o 'jsonpath={.status.state}' +++ mktemp ++ local LAST_OUT=/tmp/tmp.id4m69gCgS +++ mktemp ++ local LAST_ERR=/tmp/tmp.Jv1AKrW41J ++ local exit_status=0 +++ seq 0 2 ++ for i in '$(seq 0 2)' ++ set +e ++ kubectl get pxc some-name -o 'jsonpath={.status.state}' ++ exit_status=0 ++ set -e ++ '[' 0 '!=' 0 ']' ++ break ++ cat /tmp/tmp.id4m69gCgS ++ cat /tmp/tmp.Jv1AKrW41J ++ rm /tmp/tmp.id4m69gCgS /tmp/tmp.Jv1AKrW41J ++ return 0 + [[ initializing == \r\e\a\d\y ]] + echo 'waiting for cluster readyness' waiting for cluster readyness + sleep 20 + [[ 6 -ge 36 ]] + let i+=1 ++ kubectl_bin get pxc some-name -o 'jsonpath={.status.state}' +++ mktemp ++ local LAST_OUT=/tmp/tmp.mV94kRWu0K +++ mktemp ++ local LAST_ERR=/tmp/tmp.mdrQW8ofwE ++ local exit_status=0 +++ seq 0 2 ++ for i in '$(seq 0 2)' ++ set +e ++ kubectl get pxc some-name -o 'jsonpath={.status.state}' ++ exit_status=0 ++ set -e ++ '[' 0 '!=' 0 ']' ++ break ++ cat /tmp/tmp.mV94kRWu0K ++ cat /tmp/tmp.mdrQW8ofwE ++ rm /tmp/tmp.mV94kRWu0K /tmp/tmp.mdrQW8ofwE ++ return 0 + [[ initializing == \r\e\a\d\y ]] + echo 'waiting for cluster readyness' waiting for cluster readyness + sleep 20 + [[ 7 -ge 36 ]] + let i+=1 ++ kubectl_bin get pxc some-name -o 'jsonpath={.status.state}' +++ mktemp ++ local LAST_OUT=/tmp/tmp.ZP4XifhzJY +++ mktemp ++ local LAST_ERR=/tmp/tmp.hgiQEtYMGp ++ local exit_status=0 +++ seq 0 2 ++ for i in '$(seq 0 2)' ++ set +e ++ kubectl get pxc some-name -o 'jsonpath={.status.state}' ++ exit_status=0 ++ set -e ++ '[' 0 '!=' 0 ']' ++ break ++ cat /tmp/tmp.ZP4XifhzJY ++ cat /tmp/tmp.hgiQEtYMGp ++ rm /tmp/tmp.ZP4XifhzJY /tmp/tmp.hgiQEtYMGp ++ return 0 + [[ initializing == \r\e\a\d\y ]] + echo 'waiting for cluster readyness' waiting for cluster readyness + sleep 20 + [[ 8 -ge 36 ]] + let i+=1 ++ kubectl_bin get pxc some-name -o 'jsonpath={.status.state}' +++ mktemp ++ local LAST_OUT=/tmp/tmp.fr7Fw1Zam1 +++ mktemp ++ local LAST_ERR=/tmp/tmp.082ZKKqQHg ++ local exit_status=0 +++ seq 0 2 ++ for i in '$(seq 0 2)' ++ set +e ++ kubectl get pxc some-name -o 'jsonpath={.status.state}' ++ exit_status=0 ++ set -e ++ '[' 0 '!=' 0 ']' ++ break ++ cat /tmp/tmp.fr7Fw1Zam1 ++ cat /tmp/tmp.082ZKKqQHg ++ rm /tmp/tmp.fr7Fw1Zam1 /tmp/tmp.082ZKKqQHg ++ return 0 + [[ initializing == \r\e\a\d\y ]] + echo 'waiting for cluster readyness' waiting for cluster readyness + sleep 20 + [[ 9 -ge 36 ]] + let i+=1 ++ kubectl_bin get pxc some-name -o 'jsonpath={.status.state}' +++ mktemp ++ local LAST_OUT=/tmp/tmp.HnsMqj7JeC +++ mktemp ++ local LAST_ERR=/tmp/tmp.pChApOoege ++ local exit_status=0 +++ seq 0 2 ++ for i in '$(seq 0 2)' ++ set +e ++ kubectl get pxc some-name -o 'jsonpath={.status.state}' ++ exit_status=0 ++ set -e ++ '[' 0 '!=' 0 ']' ++ break ++ cat /tmp/tmp.HnsMqj7JeC ++ cat /tmp/tmp.pChApOoege ++ rm /tmp/tmp.HnsMqj7JeC /tmp/tmp.pChApOoege ++ return 0 + [[ initializing == \r\e\a\d\y ]] + echo 'waiting for cluster readyness' waiting for cluster readyness + sleep 20 + [[ 10 -ge 36 ]] + let i+=1 ++ kubectl_bin get pxc some-name -o 'jsonpath={.status.state}' +++ mktemp ++ local LAST_OUT=/tmp/tmp.CjOBD7HCBr +++ mktemp ++ local LAST_ERR=/tmp/tmp.d2Ik2kHnna ++ local exit_status=0 +++ seq 0 2 ++ for i in '$(seq 0 2)' ++ set +e ++ kubectl get pxc some-name -o 'jsonpath={.status.state}' ++ exit_status=0 ++ set -e ++ '[' 0 '!=' 0 ']' ++ break ++ cat /tmp/tmp.CjOBD7HCBr ++ cat /tmp/tmp.d2Ik2kHnna ++ rm /tmp/tmp.CjOBD7HCBr /tmp/tmp.d2Ik2kHnna ++ return 0 + [[ initializing == \r\e\a\d\y ]] + echo 'waiting for cluster readyness' waiting for cluster readyness + sleep 20 + [[ 11 -ge 36 ]] + let i+=1 ++ kubectl_bin get pxc some-name -o 'jsonpath={.status.state}' +++ mktemp ++ local LAST_OUT=/tmp/tmp.gQuM5bb5iA +++ mktemp ++ local LAST_ERR=/tmp/tmp.48ennOlxLB ++ local exit_status=0 +++ seq 0 2 ++ for i in '$(seq 0 2)' ++ set +e ++ kubectl get pxc some-name -o 'jsonpath={.status.state}' ++ exit_status=0 ++ set -e ++ '[' 0 '!=' 0 ']' ++ break ++ cat /tmp/tmp.gQuM5bb5iA ++ cat /tmp/tmp.48ennOlxLB ++ rm /tmp/tmp.gQuM5bb5iA /tmp/tmp.48ennOlxLB ++ return 0 + [[ initializing == \r\e\a\d\y ]] + echo 'waiting for cluster readyness' waiting for cluster readyness + sleep 20 + [[ 12 -ge 36 ]] + let i+=1 ++ kubectl_bin get pxc some-name -o 'jsonpath={.status.state}' +++ mktemp ++ local LAST_OUT=/tmp/tmp.POHb9Zw2dW +++ mktemp ++ local LAST_ERR=/tmp/tmp.vW3NWYnuwK ++ local exit_status=0 +++ seq 0 2 ++ for i in '$(seq 0 2)' ++ set +e ++ kubectl get pxc some-name -o 'jsonpath={.status.state}' ++ exit_status=0 ++ set -e ++ '[' 0 '!=' 0 ']' ++ break ++ cat /tmp/tmp.POHb9Zw2dW ++ cat /tmp/tmp.vW3NWYnuwK ++ rm /tmp/tmp.POHb9Zw2dW /tmp/tmp.vW3NWYnuwK ++ return 0 + [[ initializing == \r\e\a\d\y ]] + echo 'waiting for cluster readyness' waiting for cluster readyness + sleep 20 + [[ 13 -ge 36 ]] + let i+=1 ++ kubectl_bin get pxc some-name -o 'jsonpath={.status.state}' +++ mktemp ++ local LAST_OUT=/tmp/tmp.P8MSr4KBGK +++ mktemp ++ local LAST_ERR=/tmp/tmp.R103gAtGsd ++ local exit_status=0 +++ seq 0 2 ++ for i in '$(seq 0 2)' ++ set +e ++ kubectl get pxc some-name -o 'jsonpath={.status.state}' ++ exit_status=0 ++ set -e ++ '[' 0 '!=' 0 ']' ++ break ++ cat /tmp/tmp.P8MSr4KBGK ++ cat /tmp/tmp.R103gAtGsd ++ rm /tmp/tmp.P8MSr4KBGK /tmp/tmp.R103gAtGsd ++ return 0 + [[ initializing == \r\e\a\d\y ]] + echo 'waiting for cluster readyness' waiting for cluster readyness + sleep 20 + [[ 14 -ge 36 ]] + let i+=1 ++ kubectl_bin get pxc some-name -o 'jsonpath={.status.state}' +++ mktemp ++ local LAST_OUT=/tmp/tmp.EcMMZQIH8N +++ mktemp ++ local LAST_ERR=/tmp/tmp.62x9gx1yWW ++ local exit_status=0 +++ seq 0 2 ++ for i in '$(seq 0 2)' ++ set +e ++ kubectl get pxc some-name -o 'jsonpath={.status.state}' ++ exit_status=0 ++ set -e ++ '[' 0 '!=' 0 ']' ++ break ++ cat /tmp/tmp.EcMMZQIH8N ++ cat /tmp/tmp.62x9gx1yWW ++ rm /tmp/tmp.EcMMZQIH8N /tmp/tmp.62x9gx1yWW ++ return 0 + [[ initializing == \r\e\a\d\y ]] + echo 'waiting for cluster readyness' waiting for cluster readyness + sleep 20 + [[ 15 -ge 36 ]] + let i+=1 ++ kubectl_bin get pxc some-name -o 'jsonpath={.status.state}' +++ mktemp ++ local LAST_OUT=/tmp/tmp.5XulgImxRT +++ mktemp ++ local LAST_ERR=/tmp/tmp.mQDaIASiGB ++ local exit_status=0 +++ seq 0 2 ++ for i in '$(seq 0 2)' ++ set +e ++ kubectl get pxc some-name -o 'jsonpath={.status.state}' ++ exit_status=0 ++ set -e ++ '[' 0 '!=' 0 ']' ++ break ++ cat /tmp/tmp.5XulgImxRT ++ cat /tmp/tmp.mQDaIASiGB ++ rm /tmp/tmp.5XulgImxRT /tmp/tmp.mQDaIASiGB ++ return 0 + [[ initializing == \r\e\a\d\y ]] + echo 'waiting for cluster readyness' waiting for cluster readyness + sleep 20 + [[ 16 -ge 36 ]] + let i+=1 ++ kubectl_bin get pxc some-name -o 'jsonpath={.status.state}' +++ mktemp ++ local LAST_OUT=/tmp/tmp.jIQoVC0IFE +++ mktemp ++ local LAST_ERR=/tmp/tmp.ypYVK4rdtw ++ local exit_status=0 +++ seq 0 2 ++ for i in '$(seq 0 2)' ++ set +e ++ kubectl get pxc some-name -o 'jsonpath={.status.state}' ++ exit_status=0 ++ set -e ++ '[' 0 '!=' 0 ']' ++ break ++ cat /tmp/tmp.jIQoVC0IFE ++ cat /tmp/tmp.ypYVK4rdtw ++ rm /tmp/tmp.jIQoVC0IFE /tmp/tmp.ypYVK4rdtw ++ return 0 + [[ ready == \r\e\a\d\y ]] ++ kubectl_bin get pxc some-name -o 'jsonpath={.status.pxc.ready}' +++ mktemp ++ local LAST_OUT=/tmp/tmp.KCO6kDhYB0 +++ mktemp ++ local LAST_ERR=/tmp/tmp.lSD7X8FFd6 ++ local exit_status=0 +++ seq 0 2 ++ for i in '$(seq 0 2)' ++ set +e ++ kubectl get pxc some-name -o 'jsonpath={.status.pxc.ready}' ++ exit_status=0 ++ set -e ++ '[' 0 '!=' 0 ']' ++ break ++ cat /tmp/tmp.KCO6kDhYB0 ++ cat /tmp/tmp.lSD7X8FFd6 ++ rm /tmp/tmp.KCO6kDhYB0 /tmp/tmp.lSD7X8FFd6 ++ return 0 + [[ 3 == \3 ]] +++ get_proxy_engine some-name +++ local cluster_name=some-name ++++ get_proxy some-name ++++ local target_cluster=some-name +++++ kubectl_bin get pxc some-name -o 'jsonpath={.spec.haproxy.enabled}' ++++++ mktemp +++++ local LAST_OUT=/tmp/tmp.ryVGd1SwTt ++++++ mktemp +++++ local LAST_ERR=/tmp/tmp.zIaqS5EFrh +++++ local exit_status=0 ++++++ seq 0 2 +++++ for i in '$(seq 0 2)' +++++ set +e +++++ kubectl get pxc some-name -o 'jsonpath={.spec.haproxy.enabled}' +++++ exit_status=0 +++++ set -e +++++ '[' 0 '!=' 0 ']' +++++ break +++++ cat /tmp/tmp.ryVGd1SwTt +++++ cat /tmp/tmp.zIaqS5EFrh +++++ rm /tmp/tmp.ryVGd1SwTt /tmp/tmp.zIaqS5EFrh +++++ return 0 ++++ [[ true == \t\r\u\e ]] ++++ echo some-name-haproxy ++++ return +++ local cluster_proxy=some-name-haproxy +++ echo haproxy ++ kubectl_bin get pxc some-name -o 'jsonpath={.status.haproxy.ready}' +++ mktemp ++ local LAST_OUT=/tmp/tmp.I5WikjwrnP +++ mktemp ++ local LAST_ERR=/tmp/tmp.H3XDNIp3sH ++ local exit_status=0 +++ seq 0 2 ++ for i in '$(seq 0 2)' ++ set +e ++ kubectl get pxc some-name -o 'jsonpath={.status.haproxy.ready}' ++ exit_status=0 ++ set -e ++ '[' 0 '!=' 0 ']' ++ break ++ cat /tmp/tmp.I5WikjwrnP ++ cat /tmp/tmp.H3XDNIp3sH ++ rm /tmp/tmp.I5WikjwrnP /tmp/tmp.H3XDNIp3sH ++ return 0 + [[ 3 == \3 ]] + patch_secret my-cluster-secrets monitor dGVzdC1wYXNzd29yZDI= + local secret=my-cluster-secrets + local key=monitor + local value=dGVzdC1wYXNzd29yZDI= + kubectl_bin patch secret my-cluster-secrets '-p={"data":{"monitor": "dGVzdC1wYXNzd29yZDI="}}' ++ mktemp + local LAST_OUT=/tmp/tmp.adGT7dH9H0 ++ mktemp + local LAST_ERR=/tmp/tmp.lcNqIM3bz5 + local exit_status=0 ++ seq 0 2 + for i in '$(seq 0 2)' + set +e + kubectl patch secret my-cluster-secrets '-p={"data":{"monitor": "dGVzdC1wYXNzd29yZDI="}}' + exit_status=0 + set -e + '[' 0 '!=' 0 ']' + break + cat /tmp/tmp.adGT7dH9H0 secret/my-cluster-secrets patched + cat /tmp/tmp.lcNqIM3bz5 + rm /tmp/tmp.adGT7dH9H0 /tmp/tmp.lcNqIM3bz5 + return 0 + sleep 15 + wait_cluster_consistency some-name 3 3 + local cluster_name=some-name + local cluster_size=3 + local proxy_size=3 + '[' -z 3 ']' + desc 'wait cluster consistency' + set +o xtrace ----------------------------------------------------------------------------------- wait cluster consistency ----------------------------------------------------------------------------------- + local i=0 + local max=36 + sleep 7 ++ kubectl_bin get pxc some-name -o 'jsonpath={.status.state}' +++ mktemp ++ local LAST_OUT=/tmp/tmp.iZREvck7Wb +++ mktemp ++ local LAST_ERR=/tmp/tmp.nc4tL9Qg4b ++ local exit_status=0 +++ seq 0 2 ++ for i in '$(seq 0 2)' ++ set +e ++ kubectl get pxc some-name -o 'jsonpath={.status.state}' ++ exit_status=0 ++ set -e ++ '[' 0 '!=' 0 ']' ++ break ++ cat /tmp/tmp.iZREvck7Wb ++ cat /tmp/tmp.nc4tL9Qg4b ++ rm /tmp/tmp.iZREvck7Wb /tmp/tmp.nc4tL9Qg4b ++ return 0 + [[ initializing == \r\e\a\d\y ]] + echo 'waiting for cluster readyness' waiting for cluster readyness + sleep 20 + [[ 0 -ge 36 ]] + let i+=1 ++ kubectl_bin get pxc some-name -o 'jsonpath={.status.state}' +++ mktemp ++ local LAST_OUT=/tmp/tmp.bMXdMd6sLD +++ mktemp ++ local LAST_ERR=/tmp/tmp.ktW3Chq0kn ++ local exit_status=0 +++ seq 0 2 ++ for i in '$(seq 0 2)' ++ set +e ++ kubectl get pxc some-name -o 'jsonpath={.status.state}' ++ exit_status=0 ++ set -e ++ '[' 0 '!=' 0 ']' ++ break ++ cat /tmp/tmp.bMXdMd6sLD ++ cat /tmp/tmp.ktW3Chq0kn ++ rm /tmp/tmp.bMXdMd6sLD /tmp/tmp.ktW3Chq0kn ++ return 0 + [[ initializing == \r\e\a\d\y ]] + echo 'waiting for cluster readyness' waiting for cluster readyness + sleep 20 + [[ 1 -ge 36 ]] + let i+=1 ++ kubectl_bin get pxc some-name -o 'jsonpath={.status.state}' +++ mktemp ++ local LAST_OUT=/tmp/tmp.2LqB9DYJw7 +++ mktemp ++ local LAST_ERR=/tmp/tmp.fIaU5mQCvq ++ local exit_status=0 +++ seq 0 2 ++ for i in '$(seq 0 2)' ++ set +e ++ kubectl get pxc some-name -o 'jsonpath={.status.state}' ++ exit_status=0 ++ set -e ++ '[' 0 '!=' 0 ']' ++ break ++ cat /tmp/tmp.2LqB9DYJw7 ++ cat /tmp/tmp.fIaU5mQCvq ++ rm /tmp/tmp.2LqB9DYJw7 /tmp/tmp.fIaU5mQCvq ++ return 0 + [[ initializing == \r\e\a\d\y ]] + echo 'waiting for cluster readyness' waiting for cluster readyness + sleep 20 + [[ 2 -ge 36 ]] + let i+=1 ++ kubectl_bin get pxc some-name -o 'jsonpath={.status.state}' +++ mktemp ++ local LAST_OUT=/tmp/tmp.NzPXxRrW4b +++ mktemp ++ local LAST_ERR=/tmp/tmp.WNzd6NChuh ++ local exit_status=0 +++ seq 0 2 ++ for i in '$(seq 0 2)' ++ set +e ++ kubectl get pxc some-name -o 'jsonpath={.status.state}' ++ exit_status=0 ++ set -e ++ '[' 0 '!=' 0 ']' ++ break ++ cat /tmp/tmp.NzPXxRrW4b ++ cat /tmp/tmp.WNzd6NChuh ++ rm /tmp/tmp.NzPXxRrW4b /tmp/tmp.WNzd6NChuh ++ return 0 + [[ ready == \r\e\a\d\y ]] ++ kubectl_bin get pxc some-name -o 'jsonpath={.status.pxc.ready}' +++ mktemp ++ local LAST_OUT=/tmp/tmp.JqoJT0Hm7r +++ mktemp ++ local LAST_ERR=/tmp/tmp.mMwSNlf81A ++ local exit_status=0 +++ seq 0 2 ++ for i in '$(seq 0 2)' ++ set +e ++ kubectl get pxc some-name -o 'jsonpath={.status.pxc.ready}' ++ exit_status=0 ++ set -e ++ '[' 0 '!=' 0 ']' ++ break ++ cat /tmp/tmp.JqoJT0Hm7r ++ cat /tmp/tmp.mMwSNlf81A ++ rm /tmp/tmp.JqoJT0Hm7r /tmp/tmp.mMwSNlf81A ++ return 0 + [[ 3 == \3 ]] +++ get_proxy_engine some-name +++ local cluster_name=some-name ++++ get_proxy some-name ++++ local target_cluster=some-name +++++ kubectl_bin get pxc some-name -o 'jsonpath={.spec.haproxy.enabled}' ++++++ mktemp +++++ local LAST_OUT=/tmp/tmp.C1ppYBQjP3 ++++++ mktemp +++++ local LAST_ERR=/tmp/tmp.gphFvtEUZv +++++ local exit_status=0 ++++++ seq 0 2 +++++ for i in '$(seq 0 2)' +++++ set +e +++++ kubectl get pxc some-name -o 'jsonpath={.spec.haproxy.enabled}' +++++ exit_status=0 +++++ set -e +++++ '[' 0 '!=' 0 ']' +++++ break +++++ cat /tmp/tmp.C1ppYBQjP3 +++++ cat /tmp/tmp.gphFvtEUZv +++++ rm /tmp/tmp.C1ppYBQjP3 /tmp/tmp.gphFvtEUZv +++++ return 0 ++++ [[ true == \t\r\u\e ]] ++++ echo some-name-haproxy ++++ return +++ local cluster_proxy=some-name-haproxy +++ echo haproxy ++ kubectl_bin get pxc some-name -o 'jsonpath={.status.haproxy.ready}' +++ mktemp ++ local LAST_OUT=/tmp/tmp.ixE2wrEPmB +++ mktemp ++ local LAST_ERR=/tmp/tmp.bqIoHkkhRl ++ local exit_status=0 +++ seq 0 2 ++ for i in '$(seq 0 2)' ++ set +e ++ kubectl get pxc some-name -o 'jsonpath={.status.haproxy.ready}' ++ exit_status=0 ++ set -e ++ '[' 0 '!=' 0 ']' ++ break ++ cat /tmp/tmp.ixE2wrEPmB ++ cat /tmp/tmp.bqIoHkkhRl ++ rm /tmp/tmp.ixE2wrEPmB /tmp/tmp.bqIoHkkhRl ++ return 0 + [[ 3 == \3 ]] + compare_mysql_cmd select-3 'SHOW DATABASES;' '-h some-name-haproxy -umonitor -p'\''test-password2'\''' + local command_id=select-3 + local 'command=SHOW DATABASES;' + local 'uri=-h some-name-haproxy -umonitor -p'\''test-password2'\''' + local postfix= + local expected_result=/mnt/jenkins/workspace/cloud-pxc-operator_PR-1718/e2e-tests/users/compare/select-3.sql + [[ perconalab/percona-xtradb-cluster-operator:main-pxc5.7 =~ 8\.0 ]] + run_mysql 'SHOW DATABASES;' '-h some-name-haproxy -umonitor -p'\''test-password2'\''' + local 'command=SHOW DATABASES;' + local 'uri=-h some-name-haproxy -umonitor -p'\''test-password2'\''' ++ get_client_pod ++ kubectl_bin get pods --selector=name=pxc-client -o 'jsonpath={.items[].metadata.name}' +++ mktemp ++ local LAST_OUT=/tmp/tmp.47ItGP9gTD +++ mktemp ++ local LAST_ERR=/tmp/tmp.8J5CLPAhEm ++ local exit_status=0 +++ seq 0 2 ++ for i in '$(seq 0 2)' ++ set +e ++ kubectl get pods --selector=name=pxc-client -o 'jsonpath={.items[].metadata.name}' ++ exit_status=0 ++ set -e ++ '[' 0 '!=' 0 ']' ++ break ++ cat /tmp/tmp.47ItGP9gTD ++ cat /tmp/tmp.8J5CLPAhEm ++ rm /tmp/tmp.47ItGP9gTD /tmp/tmp.8J5CLPAhEm ++ return 0 + client_pod=pxc-client-64b479df95-8brh6 + wait_pod pxc-client-64b479df95-8brh6 + local pod=pxc-client-64b479df95-8brh6 + local max_retry=480 + local ns= ++ echo pxc-client-64b479df95-8brh6 ++ /usr/bin/sed -E 's/.*-(pxc|proxysql)-[0-9]/\1/' ++ egrep '^(pxc|proxysql)$' + local container= + set +o xtrace pod/pxc-client-64b479df95-8brh6 condition met pxc-client-64b479df95-8brh6.Ok + set +o xtrace + '[' '!' -s /tmp/tmp.oaVtWV5LhP/select-3.sql ']' + diff -u /mnt/jenkins/workspace/cloud-pxc-operator_PR-1718/e2e-tests/users/compare/select-3.sql /tmp/tmp.oaVtWV5LhP/select-3.sql + destroy users-28617 + local namespace=users-28617 + local ignore_logs=true + desc 'destroy cluster/operator and all other resources' + set +o xtrace ----------------------------------------------------------------------------------- destroy cluster/operator and all other resources ----------------------------------------------------------------------------------- + '[' true == false -o 1 == 1 ']' + grep -v level=info + grep -v 'the object has been modified' ++ get_operator_pod ++ local label_prefix=app.kubernetes.io/ + sort -u + /usr/bin/sed -r 's/"ts":[0-9.]+//; s^limits-[0-9.]+/^^g' +++ kubectl get pods --selector=app.kubernetes.io/name=percona-xtradb-cluster-operator -n pxc-operator + tee /tmp/tmp.oaVtWV5LhP/operator.log +++ grep -c percona-xtradb-cluster-operator + grep -v 'get backup status: Job.batch' ++ local check_label=1 ++ [[ 1 -eq 0 ]] ++ kubectl_bin get pods --selector=app.kubernetes.io/name=percona-xtradb-cluster-operator -o 'jsonpath={.items[].metadata.name}' -n pxc-operator +++ mktemp ++ local LAST_OUT=/tmp/tmp.nWweBLZOWp +++ mktemp ++ local LAST_ERR=/tmp/tmp.HYAnAKPNHr ++ local exit_status=0 +++ seq 0 2 ++ for i in '$(seq 0 2)' ++ set +e ++ kubectl get pods --selector=app.kubernetes.io/name=percona-xtradb-cluster-operator -o 'jsonpath={.items[].metadata.name}' -n pxc-operator ++ exit_status=0 ++ set -e ++ '[' 0 '!=' 0 ']' ++ break ++ cat /tmp/tmp.nWweBLZOWp ++ cat /tmp/tmp.HYAnAKPNHr ++ rm /tmp/tmp.nWweBLZOWp /tmp/tmp.HYAnAKPNHr ++ return 0 + kubectl_bin logs -n pxc-operator percona-xtradb-cluster-operator-68fd4c6d88-rtnq9 ++ mktemp + local LAST_OUT=/tmp/tmp.CHG9wqVgu0 ++ mktemp + local LAST_ERR=/tmp/tmp.0DufeUhUPA + local exit_status=0 ++ seq 0 2 + for i in '$(seq 0 2)' + set +e + kubectl logs -n pxc-operator percona-xtradb-cluster-operator-68fd4c6d88-rtnq9 + exit_status=0 + set -e + '[' 0 '!=' 0 ']' + break + cat /tmp/tmp.CHG9wqVgu0 + cat /tmp/tmp.0DufeUhUPA + rm /tmp/tmp.CHG9wqVgu0 /tmp/tmp.0DufeUhUPA + return 0 2024-06-24T09:51:27.135Z INFO setup Manager starting up {"gitCommit": "4c5c3df763551644a6ecc997d33180e3c7ed387e", "gitBranch": "PR-1718-4c5c3df7", "buildTime": "2024-06-24T09:24:09Z", "goVersion": "go1.22.4", "os": "linux", "arch": "amd64"} 2024-06-24T09:51:27.135Z INFO setup Runs on {"platform": "kubernetes", "version": "v1.26.15-gke.1436000"} 2024-06-24T09:51:27.136Z INFO setup Registering Components. 2024-06-24T09:51:30.739Z INFO controller-runtime.webhook Registering webhook {"path": "/validate-percona-xtradbcluster"} 2024-06-24T09:51:30.744Z INFO controller-runtime.metrics Serving metrics server {"bindAddress": ":8080", "secure": false} 2024-06-24T09:51:30.744Z INFO controller-runtime.metrics Starting metrics server 2024-06-24T09:51:30.744Z INFO controller-runtime.webhook Starting webhook server 2024-06-24T09:51:30.744Z INFO setup Starting the Cmd. 2024-06-24T09:51:30.744Z INFO starting server {"name": "health probe", "addr": "[::]:8081"} 2024-06-24T09:51:30.745Z INFO controller-runtime.certwatcher Updated current TLS certificate 2024-06-24T09:51:30.746Z INFO controller-runtime.certwatcher Starting certificate watcher 2024-06-24T09:51:30.746Z INFO controller-runtime.webhook Serving webhook server {"host": "", "port": 9443} 2024-06-24T09:51:30.846Z INFO attempting to acquire leader lease pxc-operator/08db1feb.percona.com... 2024-06-24T09:51:30.876Z INFO successfully acquired lease pxc-operator/08db1feb.percona.com 2024-06-24T09:51:30.877Z DEBUG events percona-xtradb-cluster-operator-68fd4c6d88-rtnq9_c13492d6-14df-4da2-8529-0fdf3cbec7b3 became leader {"type": "Normal", "object": {"kind":"Lease","namespace":"pxc-operator","name":"08db1feb.percona.com","uid":"dac6fba4-4068-4694-877f-5deef0720e2f","apiVersion":"coordination.k8s.io/v1","resourceVersion":"12032"}, "reason": "LeaderElection"} 2024-06-24T09:51:30.877Z INFO Starting Controller {"controller": "pxc-controller"} 2024-06-24T09:51:30.877Z INFO Starting EventSource {"controller": "pxc-controller", "source": "kind source: *v1.PerconaXtraDBCluster"} 2024-06-24T09:51:30.924Z INFO Starting Controller {"controller": "pxcrestore-controller"} 2024-06-24T09:51:30.924Z INFO Starting EventSource {"controller": "pxcrestore-controller", "source": "kind source: *v1.PerconaXtraDBClusterRestore"} 2024-06-24T09:51:30.925Z INFO Starting Controller {"controller": "pxcbackup-controller"} 2024-06-24T09:51:30.925Z INFO Starting EventSource {"controller": "pxcbackup-controller", "source": "kind source: *v1.PerconaXtraDBClusterBackup"} 2024-06-24T09:51:31.026Z INFO Starting workers {"controller": "pxcbackup-controller", "worker count": 1} 2024-06-24T09:51:31.026Z INFO Starting workers {"controller": "pxc-controller", "worker count": 1} 2024-06-24T09:51:31.026Z INFO Starting workers {"controller": "pxcrestore-controller", "worker count": 1} 2024-06-24T09:51:54.161Z INFO Set CR version {"controller": "pxc-controller", "namespace": "users-28617", "name": "some-name", "reconcileID": "d2051c31-8350-40a9-8293-659ce7a1b24b", "version": "1.15.0"} 2024-06-24T09:53:14.812Z INFO Password expiration policy updated {"controller": "pxc-controller", "namespace": "users-28617", "name": "some-name", "reconcileID": "817ed7a1-43c3-4a6a-96a0-df9bba27cbcd", "user": "operator"} 2024-06-24T09:53:14.844Z INFO Password expiration policy updated {"controller": "pxc-controller", "namespace": "users-28617", "name": "some-name", "reconcileID": "817ed7a1-43c3-4a6a-96a0-df9bba27cbcd", "user": "monitor"} 2024-06-24T09:53:14.900Z INFO User monitor: granted privileges {"controller": "pxc-controller", "namespace": "users-28617", "name": "some-name", "reconcileID": "817ed7a1-43c3-4a6a-96a0-df9bba27cbcd"} 2024-06-24T09:53:14.951Z INFO Password expiration policy updated {"controller": "pxc-controller", "namespace": "users-28617", "name": "some-name", "reconcileID": "817ed7a1-43c3-4a6a-96a0-df9bba27cbcd", "user": "xtrabackup"} 2024-06-24T09:53:14.995Z INFO User xtrabackup: granted privileges {"controller": "pxc-controller", "namespace": "users-28617", "name": "some-name", "reconcileID": "817ed7a1-43c3-4a6a-96a0-df9bba27cbcd"} 2024-06-24T09:53:15.126Z INFO reconcile replication error {"controller": "pxc-controller", "namespace": "users-28617", "name": "some-name", "reconcileID": "817ed7a1-43c3-4a6a-96a0-df9bba27cbcd", "err": "get primary pxc pod: not found"} 2024-06-24T09:53:19.840Z INFO reconcile replication error {"controller": "pxc-controller", "namespace": "users-28617", "name": "some-name", "reconcileID": "9702a810-3c9f-42ac-9a5f-0baa78341c4d", "err": "get primary pxc pod: not found"} 2024-06-24T09:53:25.152Z INFO reconcile replication error {"controller": "pxc-controller", "namespace": "users-28617", "name": "some-name", "reconcileID": "9beb62d5-dc94-4b1a-b70b-34d9fbd73de5", "err": "get primary pxc pod: not found"} 2024-06-24T09:53:30.486Z INFO reconcile replication error {"controller": "pxc-controller", "namespace": "users-28617", "name": "some-name", "reconcileID": "bca3380c-93e2-462e-80b8-c53228170216", "err": "get primary pxc pod: not found"} 2024-06-24T09:55:45.562Z INFO Password expiration policy updated {"controller": "pxc-controller", "namespace": "users-28617", "name": "some-name", "reconcileID": "f8f4db8d-1f9f-4d1e-b4b5-c8f5cc5cfaba", "user": "root"} 2024-06-24T09:55:45.610Z INFO Password expiration policy updated {"controller": "pxc-controller", "namespace": "users-28617", "name": "some-name", "reconcileID": "f8f4db8d-1f9f-4d1e-b4b5-c8f5cc5cfaba", "user": "replication"} 2024-06-24T09:55:46.083Z INFO update PXC version (fetched from db) {"controller": "pxc-controller", "namespace": "users-28617", "name": "some-name", "reconcileID": "f8f4db8d-1f9f-4d1e-b4b5-c8f5cc5cfaba", "new version": "5.7.44-48-57"} 2024-06-24T09:55:49.466Z DEBUG PXC users synced with ProxySQL {"controller": "pxc-controller", "namespace": "users-28617", "name": "some-name", "reconcileID": "f8f4db8d-1f9f-4d1e-b4b5-c8f5cc5cfaba"} 2024-06-24T09:55:53.866Z DEBUG PXC users synced with ProxySQL {"controller": "pxc-controller", "namespace": "users-28617", "name": "some-name", "reconcileID": "0703c056-487c-47c2-bae1-e9a5e0eb5267"} 2024-06-24T09:55:59.389Z DEBUG PXC users synced with ProxySQL {"controller": "pxc-controller", "namespace": "users-28617", "name": "some-name", "reconcileID": "3bb7c1b4-1e37-4d11-9536-c13105e250b8"} 2024-06-24T09:56:04.764Z DEBUG PXC users synced with ProxySQL {"controller": "pxc-controller", "namespace": "users-28617", "name": "some-name", "reconcileID": "c842e0d3-c7f1-4aa0-a3cb-18d33be4de5a"} 2024-06-24T09:56:09.947Z DEBUG PXC users synced with ProxySQL {"controller": "pxc-controller", "namespace": "users-28617", "name": "some-name", "reconcileID": "7a901885-6cdd-40e6-8233-b29d443c6007"} 2024-06-24T09:56:15.966Z DEBUG PXC users synced with ProxySQL {"controller": "pxc-controller", "namespace": "users-28617", "name": "some-name", "reconcileID": "aff720f7-1caa-449b-95cf-ec03aa2ce306"} 2024-06-24T09:56:20.589Z DEBUG PXC users synced with ProxySQL {"controller": "pxc-controller", "namespace": "users-28617", "name": "some-name", "reconcileID": "d63181d3-e274-4e66-8363-67e9c92fbe71"} 2024-06-24T09:56:26.069Z DEBUG PXC users synced with ProxySQL {"controller": "pxc-controller", "namespace": "users-28617", "name": "some-name", "reconcileID": "ddd78734-c805-4c21-b2c7-0c26f0ef28b2"} 2024-06-24T09:56:31.289Z DEBUG PXC users synced with ProxySQL {"controller": "pxc-controller", "namespace": "users-28617", "name": "some-name", "reconcileID": "2584b8ff-cf2e-4262-aeae-25d91d19f354"} 2024-06-24T09:56:36.649Z DEBUG PXC users synced with ProxySQL {"controller": "pxc-controller", "namespace": "users-28617", "name": "some-name", "reconcileID": "abf9da77-b623-4bdb-84cf-e17edb0abe0f"} 2024-06-24T09:56:41.960Z DEBUG PXC users synced with ProxySQL {"controller": "pxc-controller", "namespace": "users-28617", "name": "some-name", "reconcileID": "6e70410c-f20e-4c8d-af13-5bd7665caeeb"} 2024-06-24T09:56:48.240Z DEBUG PXC users synced with ProxySQL {"controller": "pxc-controller", "namespace": "users-28617", "name": "some-name", "reconcileID": "92346e11-1dd7-4fdd-9001-58dd92da14f1"} 2024-06-24T09:56:49.903Z INFO Password changed, updating user {"controller": "pxc-controller", "namespace": "users-28617", "name": "some-name", "reconcileID": "eab4dc19-06bf-47b9-af27-b3efc72fd3d9", "user": "root"} 2024-06-24T09:56:49.957Z INFO User password updated {"controller": "pxc-controller", "namespace": "users-28617", "name": "some-name", "reconcileID": "eab4dc19-06bf-47b9-af27-b3efc72fd3d9", "user": "root"} 2024-06-24T09:56:49.966Z INFO MySQL init secret created {"controller": "pxc-controller", "namespace": "users-28617", "name": "some-name", "reconcileID": "eab4dc19-06bf-47b9-af27-b3efc72fd3d9", "secret": "some-name-mysql-init", "user": "root"} 2024-06-24T09:56:55.776Z DEBUG PXC users synced with ProxySQL {"controller": "pxc-controller", "namespace": "users-28617", "name": "some-name", "reconcileID": "eab4dc19-06bf-47b9-af27-b3efc72fd3d9"} 2024-06-24T09:56:55.788Z INFO Internal secrets updated {"controller": "pxc-controller", "namespace": "users-28617", "name": "some-name", "reconcileID": "eab4dc19-06bf-47b9-af27-b3efc72fd3d9", "user": "root"} 2024-06-24T09:56:59.839Z DEBUG PXC users synced with ProxySQL {"controller": "pxc-controller", "namespace": "users-28617", "name": "some-name", "reconcileID": "eab4dc19-06bf-47b9-af27-b3efc72fd3d9"} 2024-06-24T09:57:04.730Z DEBUG PXC users synced with ProxySQL {"controller": "pxc-controller", "namespace": "users-28617", "name": "some-name", "reconcileID": "ee0a909c-0d84-430b-a617-784cbb43e7eb"} 2024-06-24T09:57:10.066Z DEBUG PXC users synced with ProxySQL {"controller": "pxc-controller", "namespace": "users-28617", "name": "some-name", "reconcileID": "dc586310-81de-45c1-b1e6-b89fc2b267ff"} 2024-06-24T09:57:32.203Z ERROR sync users {"controller": "pxc-controller", "namespace": "users-28617", "name": "some-name", "reconcileID": "2a2ee4db-7b50-441a-aecd-100c5f202c7b", "error": "exec syncusers: command terminated with exit code 1 / / ERROR (line:512) : The cluster (with writer hostgroup:11) has not been configured in ProxySQL\n", "errorVerbose": "exec syncusers: command terminated with exit code 1 / / ERROR (line:512) : The cluster (with writer hostgroup:11) has not been configured in ProxySQL\n\ngithub.com/percona/percona-xtradb-cluster-operator/pkg/controller/pxc.(*ReconcilePerconaXtraDBCluster).syncPXCUsersWithProxySQL\n\t/go/src/github.com/percona/percona-xtradb-cluster-operator/pkg/controller/pxc/users.go:920\ngithub.com/percona/percona-xtradb-cluster-operator/pkg/controller/pxc.(*ReconcilePerconaXtraDBCluster).resyncPXCUsersWithProxySQL.func1\n\t/go/src/github.com/percona/percona-xtradb-cluster-operator/pkg/controller/pxc/controller.go:1246\nruntime.goexit\n\t/usr/local/go/src/runtime/asm_amd64.s:1695"} 2024-06-24T09:57:33.038Z INFO Password changed, updating user {"controller": "pxc-controller", "namespace": "users-28617", "name": "some-name", "reconcileID": "16d9a236-5c19-48f0-940e-668829345aa9", "user": "proxyadmin"} 2024-06-24T09:57:33.038Z INFO Password expiration policy updated {"controller": "pxc-controller", "namespace": "users-28617", "name": "some-name", "reconcileID": "16d9a236-5c19-48f0-940e-668829345aa9", "user": "proxyadmin"} 2024-06-24T09:57:33.114Z INFO Proxy user updated {"controller": "pxc-controller", "namespace": "users-28617", "name": "some-name", "reconcileID": "16d9a236-5c19-48f0-940e-668829345aa9", "user": "proxyadmin"} 2024-06-24T09:57:33.127Z INFO Internal secrets updated {"controller": "pxc-controller", "namespace": "users-28617", "name": "some-name", "reconcileID": "16d9a236-5c19-48f0-940e-668829345aa9", "user": "proxyadmin"} 2024-06-24T09:57:33.127Z INFO Proxy pods will be restarted {"controller": "pxc-controller", "namespace": "users-28617", "name": "some-name", "reconcileID": "16d9a236-5c19-48f0-940e-668829345aa9", "last-applied-secret": "1a55867d0f4fa0d0643653b2eb57b2f690315d8d843a10599c9839c2a779a95f"} 2024-06-24T09:57:33.362Z ERROR sync users {"controller": "pxc-controller", "namespace": "users-28617", "name": "some-name", "reconcileID": "16d9a236-5c19-48f0-940e-668829345aa9", "error": "exec syncusers: command terminated with exit code 1 / / ERROR 1045 (28000): ProxySQL Error: Access denied for user 'proxyadmin'@'127.0.0.1' (using password: YES)\nERROR (line:335) : ProxySQL connection check failed. \n-- Could not connect to ProxySQL at localhost:6032 \n-- Please check the ProxySQL connection parameters and status.\n", "errorVerbose": "exec syncusers: command terminated with exit code 1 / / ERROR 1045 (28000): ProxySQL Error: Access denied for user 'proxyadmin'@'127.0.0.1' (using password: YES)\nERROR (line:335) : ProxySQL connection check failed. \n-- Could not connect to ProxySQL at localhost:6032 \n-- Please check the ProxySQL connection parameters and status.\n\ngithub.com/percona/percona-xtradb-cluster-operator/pkg/controller/pxc.(*ReconcilePerconaXtraDBCluster).syncPXCUsersWithProxySQL\n\t/go/src/github.com/percona/percona-xtradb-cluster-operator/pkg/controller/pxc/users.go:920\ngithub.com/percona/percona-xtradb-cluster-operator/pkg/controller/pxc.(*ReconcilePerconaXtraDBCluster).resyncPXCUsersWithProxySQL.func1\n\t/go/src/github.com/percona/percona-xtradb-cluster-operator/pkg/controller/pxc/controller.go:1246\nruntime.goexit\n\t/usr/local/go/src/runtime/asm_amd64.s:1695"} 2024-06-24T09:58:18.386Z DEBUG PXC users synced with ProxySQL {"controller": "pxc-controller", "namespace": "users-28617", "name": "some-name", "reconcileID": "5b82f706-aef3-4a4c-a66e-9442891fef2b"} 2024-06-24T09:58:29.249Z INFO Password changed, updating user {"controller": "pxc-controller", "namespace": "users-28617", "name": "some-name", "reconcileID": "4873e8be-eeb5-4c63-bd48-5f9119f3c99c", "user": "xtrabackup"} 2024-06-24T09:58:29.282Z INFO User password updated {"controller": "pxc-controller", "namespace": "users-28617", "name": "some-name", "reconcileID": "4873e8be-eeb5-4c63-bd48-5f9119f3c99c", "user": "xtrabackup"} 2024-06-24T09:58:29.306Z INFO MySQL init secret updated {"controller": "pxc-controller", "namespace": "users-28617", "name": "some-name", "reconcileID": "4873e8be-eeb5-4c63-bd48-5f9119f3c99c", "secret": "some-name-mysql-init", "user": "xtrabackup"} 2024-06-24T09:58:29.322Z INFO Internal secrets updated {"controller": "pxc-controller", "namespace": "users-28617", "name": "some-name", "reconcileID": "4873e8be-eeb5-4c63-bd48-5f9119f3c99c", "user": "xtrabackup"} 2024-06-24T09:58:29.322Z INFO PXC pods will be restarted {"controller": "pxc-controller", "namespace": "users-28617", "name": "some-name", "reconcileID": "4873e8be-eeb5-4c63-bd48-5f9119f3c99c", "last-applied-secret": "d11bcc70ca53ea410cd455e13d82e7225917b0861d9b677ed34c02c5f6fbaf1b"} 2024-06-24T09:58:36.188Z DEBUG PXC users synced with ProxySQL {"controller": "pxc-controller", "namespace": "users-28617", "name": "some-name", "reconcileID": "4873e8be-eeb5-4c63-bd48-5f9119f3c99c"} 2024-06-24T10:01:09.141Z DEBUG PXC users synced with ProxySQL {"controller": "pxc-controller", "namespace": "users-28617", "name": "some-name", "reconcileID": "304e47d0-a865-4760-96cc-413280e07175"} 2024-06-24T10:01:13.765Z DEBUG PXC users synced with ProxySQL {"controller": "pxc-controller", "namespace": "users-28617", "name": "some-name", "reconcileID": "ea18ceb3-6bfe-4754-9c5a-85554f962afb"} 2024-06-24T10:01:14.913Z INFO Password changed, updating user {"controller": "pxc-controller", "namespace": "users-28617", "name": "some-name", "reconcileID": "d63c1199-e3e5-44a9-9d58-e25061c995c8", "user": "monitor"} 2024-06-24T10:01:14.956Z INFO User password updated {"controller": "pxc-controller", "namespace": "users-28617", "name": "some-name", "reconcileID": "d63c1199-e3e5-44a9-9d58-e25061c995c8", "user": "monitor"} 2024-06-24T10:01:14.967Z INFO MySQL init secret updated {"controller": "pxc-controller", "namespace": "users-28617", "name": "some-name", "reconcileID": "d63c1199-e3e5-44a9-9d58-e25061c995c8", "secret": "some-name-mysql-init", "user": "monitor"} 2024-06-24T10:01:15.041Z INFO Proxy user updated {"controller": "pxc-controller", "namespace": "users-28617", "name": "some-name", "reconcileID": "d63c1199-e3e5-44a9-9d58-e25061c995c8", "user": "monitor"} 2024-06-24T10:01:15.053Z INFO Internal secrets updated {"controller": "pxc-controller", "namespace": "users-28617", "name": "some-name", "reconcileID": "d63c1199-e3e5-44a9-9d58-e25061c995c8", "user": "monitor"} 2024-06-24T10:01:15.053Z INFO Proxy pods will be restarted {"controller": "pxc-controller", "namespace": "users-28617", "name": "some-name", "reconcileID": "d63c1199-e3e5-44a9-9d58-e25061c995c8", "last-applied-secret": "4e3282050745046d5044e0fcb5f2bf140d7320a1256a79c8ca2184f38f4f3098"} 2024-06-24T10:01:18.496Z ERROR sync users {"controller": "pxc-controller", "namespace": "users-28617", "name": "some-name", "reconcileID": "d63c1199-e3e5-44a9-9d58-e25061c995c8", "error": "exec syncusers: unable to upgrade connection: container not found (\"proxysql\") / / ", "errorVerbose": "exec syncusers: unable to upgrade connection: container not found (\"proxysql\") / / \ngithub.com/percona/percona-xtradb-cluster-operator/pkg/controller/pxc.(*ReconcilePerconaXtraDBCluster).syncPXCUsersWithProxySQL\n\t/go/src/github.com/percona/percona-xtradb-cluster-operator/pkg/controller/pxc/users.go:920\ngithub.com/percona/percona-xtradb-cluster-operator/pkg/controller/pxc.(*ReconcilePerconaXtraDBCluster).resyncPXCUsersWithProxySQL.func1\n\t/go/src/github.com/percona/percona-xtradb-cluster-operator/pkg/controller/pxc/controller.go:1246\nruntime.goexit\n\t/usr/local/go/src/runtime/asm_amd64.s:1695"} 2024-06-24T10:02:01.807Z DEBUG PXC users synced with ProxySQL {"controller": "pxc-controller", "namespace": "users-28617", "name": "some-name", "reconcileID": "6820338f-c6f7-437d-b036-f894be7b7441"} 2024-06-24T10:02:06.820Z DEBUG PXC users synced with ProxySQL {"controller": "pxc-controller", "namespace": "users-28617", "name": "some-name", "reconcileID": "a777f7c6-3b0f-483d-8fe9-dc66c34e293c"} 2024-06-24T10:02:12.217Z DEBUG PXC users synced with ProxySQL {"controller": "pxc-controller", "namespace": "users-28617", "name": "some-name", "reconcileID": "8d8a3794-cf22-4873-b406-6a0e1eaae316"} 2024-06-24T10:02:19.234Z DEBUG PXC users synced with ProxySQL {"controller": "pxc-controller", "namespace": "users-28617", "name": "some-name", "reconcileID": "a7a5f2bf-6414-42d0-9a54-e90e743071d0"} 2024-06-24T10:02:19.954Z INFO Password changed, updating user {"controller": "pxc-controller", "namespace": "users-28617", "name": "some-name", "reconcileID": "903fe679-b688-4157-8638-e33b1e0bfeb4", "user": "operator"} 2024-06-24T10:02:19.981Z INFO User password updated {"controller": "pxc-controller", "namespace": "users-28617", "name": "some-name", "reconcileID": "903fe679-b688-4157-8638-e33b1e0bfeb4", "user": "operator"} 2024-06-24T10:02:19.992Z INFO MySQL init secret updated {"controller": "pxc-controller", "namespace": "users-28617", "name": "some-name", "reconcileID": "903fe679-b688-4157-8638-e33b1e0bfeb4", "secret": "some-name-mysql-init", "user": "operator"} 2024-06-24T10:02:20.005Z INFO Internal secrets updated {"controller": "pxc-controller", "namespace": "users-28617", "name": "some-name", "reconcileID": "903fe679-b688-4157-8638-e33b1e0bfeb4", "user": "operator"} 2024-06-24T10:02:20.005Z INFO Proxy pods will be restarted {"controller": "pxc-controller", "namespace": "users-28617", "name": "some-name", "reconcileID": "903fe679-b688-4157-8638-e33b1e0bfeb4", "last-applied-secret": "6542c6f8a2375ddfcda319bf91c99ca69519730971c577cd07366e356671ee68"} 2024-06-24T10:02:21.615Z ERROR sync users {"controller": "pxc-controller", "namespace": "users-28617", "name": "some-name", "reconcileID": "903fe679-b688-4157-8638-e33b1e0bfeb4", "error": "exec syncusers: command terminated with exit code 1 / / ERROR 1045 (28000): Access denied for user 'operator'@'some-name-proxysql-0.some-name-proxysql-unready.users-28617.svc.' (using password: YES)\nERROR 1045 (28000): Access denied for user 'operator'@'some-name-proxysql-0.some-name-proxysql-unready.users-28617.svc.' (using password: YES)\nERROR 1045 (28000): Access denied for user 'operator'@'some-name-proxysql-0.some-name-proxysql-unready.users-28617.svc.' (using password: YES)\nERROR (line:569) : Could not find a primary cluster node\n", "errorVerbose": "exec syncusers: command terminated with exit code 1 / / ERROR 1045 (28000): Access denied for user 'operator'@'some-name-proxysql-0.some-name-proxysql-unready.users-28617.svc.' (using password: YES)\nERROR 1045 (28000): Access denied for user 'operator'@'some-name-proxysql-0.some-name-proxysql-unready.users-28617.svc.' (using password: YES)\nERROR 1045 (28000): Access denied for user 'operator'@'some-name-proxysql-0.some-name-proxysql-unready.users-28617.svc.' (using password: YES)\nERROR (line:569) : Could not find a primary cluster node\n\ngithub.com/percona/percona-xtradb-cluster-operator/pkg/controller/pxc.(*ReconcilePerconaXtraDBCluster).syncPXCUsersWithProxySQL\n\t/go/src/github.com/percona/percona-xtradb-cluster-operator/pkg/controller/pxc/users.go:920\ngithub.com/percona/percona-xtradb-cluster-operator/pkg/controller/pxc.(*ReconcilePerconaXtraDBCluster).resyncPXCUsersWithProxySQL.func1\n\t/go/src/github.com/percona/percona-xtradb-cluster-operator/pkg/controller/pxc/controller.go:1246\nruntime.goexit\n\t/usr/local/go/src/runtime/asm_amd64.s:1695"} 2024-06-24T10:03:06.836Z DEBUG PXC users synced with ProxySQL {"controller": "pxc-controller", "namespace": "users-28617", "name": "some-name", "reconcileID": "eb4659fd-56a3-4719-a99b-468a82833ac7"} 2024-06-24T10:03:14.845Z DEBUG PXC users synced with ProxySQL {"controller": "pxc-controller", "namespace": "users-28617", "name": "some-name", "reconcileID": "70f91ef5-1d60-4263-9f89-b3f5b9fee345"} 2024-06-24T10:03:20.450Z DEBUG PXC users synced with ProxySQL {"controller": "pxc-controller", "namespace": "users-28617", "name": "some-name", "reconcileID": "70fbe0b8-7095-4f79-8d92-b26325829893"} 2024-06-24T10:03:25.437Z DEBUG PXC users synced with ProxySQL {"controller": "pxc-controller", "namespace": "users-28617", "name": "some-name", "reconcileID": "b8254e78-e65c-4237-ae4e-3c7fe32efeb9"} 2024-06-24T10:03:30.744Z DEBUG PXC users synced with ProxySQL {"controller": "pxc-controller", "namespace": "users-28617", "name": "some-name", "reconcileID": "7d680ae5-4f0d-42d0-ac49-f775818b6e39"} 2024-06-24T10:03:35.881Z DEBUG PXC users synced with ProxySQL {"controller": "pxc-controller", "namespace": "users-28617", "name": "some-name", "reconcileID": "9263602e-b0da-4964-9b4e-20a18607aeff"} 2024-06-24T10:03:36.736Z INFO Created user secrets {"controller": "pxc-controller", "namespace": "users-28617", "name": "some-name", "reconcileID": "baea978d-a6a5-4349-a9d3-d9024b6dd871", "secrets": "my-cluster-secrets-2"} 2024-06-24T10:03:36.736Z INFO Password changed, updating user {"controller": "pxc-controller", "namespace": "users-28617", "name": "some-name", "reconcileID": "baea978d-a6a5-4349-a9d3-d9024b6dd871", "user": "root"} 2024-06-24T10:03:36.815Z INFO User password updated {"controller": "pxc-controller", "namespace": "users-28617", "name": "some-name", "reconcileID": "baea978d-a6a5-4349-a9d3-d9024b6dd871", "user": "root"} 2024-06-24T10:03:36.825Z INFO MySQL init secret updated {"controller": "pxc-controller", "namespace": "users-28617", "name": "some-name", "reconcileID": "baea978d-a6a5-4349-a9d3-d9024b6dd871", "secret": "some-name-mysql-init", "user": "root"} 2024-06-24T10:03:42.699Z DEBUG PXC users synced with ProxySQL {"controller": "pxc-controller", "namespace": "users-28617", "name": "some-name", "reconcileID": "baea978d-a6a5-4349-a9d3-d9024b6dd871"} 2024-06-24T10:03:42.709Z INFO Internal secrets updated {"controller": "pxc-controller", "namespace": "users-28617", "name": "some-name", "reconcileID": "baea978d-a6a5-4349-a9d3-d9024b6dd871", "user": "root"} 2024-06-24T10:03:42.710Z INFO Password changed, updating user {"controller": "pxc-controller", "namespace": "users-28617", "name": "some-name", "reconcileID": "baea978d-a6a5-4349-a9d3-d9024b6dd871", "user": "operator"} 2024-06-24T10:03:42.738Z INFO User password updated {"controller": "pxc-controller", "namespace": "users-28617", "name": "some-name", "reconcileID": "baea978d-a6a5-4349-a9d3-d9024b6dd871", "user": "operator"} 2024-06-24T10:03:42.751Z INFO MySQL init secret updated {"controller": "pxc-controller", "namespace": "users-28617", "name": "some-name", "reconcileID": "baea978d-a6a5-4349-a9d3-d9024b6dd871", "secret": "some-name-mysql-init", "user": "operator"} 2024-06-24T10:03:42.763Z INFO Internal secrets updated {"controller": "pxc-controller", "namespace": "users-28617", "name": "some-name", "reconcileID": "baea978d-a6a5-4349-a9d3-d9024b6dd871", "user": "operator"} 2024-06-24T10:03:42.763Z INFO Password changed, updating user {"controller": "pxc-controller", "namespace": "users-28617", "name": "some-name", "reconcileID": "baea978d-a6a5-4349-a9d3-d9024b6dd871", "user": "monitor"} 2024-06-24T10:03:42.789Z INFO User password updated {"controller": "pxc-controller", "namespace": "users-28617", "name": "some-name", "reconcileID": "baea978d-a6a5-4349-a9d3-d9024b6dd871", "user": "monitor"} 2024-06-24T10:03:42.799Z INFO MySQL init secret updated {"controller": "pxc-controller", "namespace": "users-28617", "name": "some-name", "reconcileID": "baea978d-a6a5-4349-a9d3-d9024b6dd871", "secret": "some-name-mysql-init", "user": "monitor"} 2024-06-24T10:03:42.855Z INFO Proxy user updated {"controller": "pxc-controller", "namespace": "users-28617", "name": "some-name", "reconcileID": "baea978d-a6a5-4349-a9d3-d9024b6dd871", "user": "monitor"} 2024-06-24T10:03:42.868Z INFO Internal secrets updated {"controller": "pxc-controller", "namespace": "users-28617", "name": "some-name", "reconcileID": "baea978d-a6a5-4349-a9d3-d9024b6dd871", "user": "monitor"} 2024-06-24T10:03:42.869Z INFO Password changed, updating user {"controller": "pxc-controller", "namespace": "users-28617", "name": "some-name", "reconcileID": "baea978d-a6a5-4349-a9d3-d9024b6dd871", "user": "xtrabackup"} 2024-06-24T10:03:42.907Z INFO User password updated {"controller": "pxc-controller", "namespace": "users-28617", "name": "some-name", "reconcileID": "baea978d-a6a5-4349-a9d3-d9024b6dd871", "user": "xtrabackup"} 2024-06-24T10:03:42.927Z INFO MySQL init secret updated {"controller": "pxc-controller", "namespace": "users-28617", "name": "some-name", "reconcileID": "baea978d-a6a5-4349-a9d3-d9024b6dd871", "secret": "some-name-mysql-init", "user": "xtrabackup"} 2024-06-24T10:03:42.943Z INFO Internal secrets updated {"controller": "pxc-controller", "namespace": "users-28617", "name": "some-name", "reconcileID": "baea978d-a6a5-4349-a9d3-d9024b6dd871", "user": "xtrabackup"} 2024-06-24T10:03:42.943Z INFO Password changed, updating user {"controller": "pxc-controller", "namespace": "users-28617", "name": "some-name", "reconcileID": "baea978d-a6a5-4349-a9d3-d9024b6dd871", "user": "replication"} 2024-06-24T10:03:42.975Z INFO User password updated {"controller": "pxc-controller", "namespace": "users-28617", "name": "some-name", "reconcileID": "baea978d-a6a5-4349-a9d3-d9024b6dd871", "user": "replication"} 2024-06-24T10:03:42.991Z INFO MySQL init secret updated {"controller": "pxc-controller", "namespace": "users-28617", "name": "some-name", "reconcileID": "baea978d-a6a5-4349-a9d3-d9024b6dd871", "secret": "some-name-mysql-init", "user": "replication"} 2024-06-24T10:03:43.006Z INFO Internal secrets updated {"controller": "pxc-controller", "namespace": "users-28617", "name": "some-name", "reconcileID": "baea978d-a6a5-4349-a9d3-d9024b6dd871", "user": "replication"} 2024-06-24T10:03:43.006Z INFO Password changed, updating user {"controller": "pxc-controller", "namespace": "users-28617", "name": "some-name", "reconcileID": "baea978d-a6a5-4349-a9d3-d9024b6dd871", "user": "proxyadmin"} 2024-06-24T10:03:43.060Z INFO Proxy user updated {"controller": "pxc-controller", "namespace": "users-28617", "name": "some-name", "reconcileID": "baea978d-a6a5-4349-a9d3-d9024b6dd871", "user": "proxyadmin"} 2024-06-24T10:03:43.070Z INFO Internal secrets updated {"controller": "pxc-controller", "namespace": "users-28617", "name": "some-name", "reconcileID": "baea978d-a6a5-4349-a9d3-d9024b6dd871", "user": "proxyadmin"} 2024-06-24T10:03:43.070Z INFO Proxy pods will be restarted {"controller": "pxc-controller", "namespace": "users-28617", "name": "some-name", "reconcileID": "baea978d-a6a5-4349-a9d3-d9024b6dd871", "last-applied-secret": "afb8d7b2b2cedfb6463636cf6836e91e67d0455aeeb08e63dca4f5fe0ac0ff1e"} 2024-06-24T10:03:43.070Z INFO PXC pods will be restarted {"controller": "pxc-controller", "namespace": "users-28617", "name": "some-name", "reconcileID": "baea978d-a6a5-4349-a9d3-d9024b6dd871", "last-applied-secret": "afb8d7b2b2cedfb6463636cf6836e91e67d0455aeeb08e63dca4f5fe0ac0ff1e"} 2024-06-24T10:03:43.545Z ERROR sync users {"controller": "pxc-controller", "namespace": "users-28617", "name": "some-name", "reconcileID": "baea978d-a6a5-4349-a9d3-d9024b6dd871", "error": "exec syncusers: command terminated with exit code 1 / / ERROR 1045 (28000): ProxySQL Error: Access denied for user 'proxyadmin'@'127.0.0.1' (using password: YES)\nERROR (line:335) : ProxySQL connection check failed. \n-- Could not connect to ProxySQL at localhost:6032 \n-- Please check the ProxySQL connection parameters and status.\n", "errorVerbose": "exec syncusers: command terminated with exit code 1 / / ERROR 1045 (28000): ProxySQL Error: Access denied for user 'proxyadmin'@'127.0.0.1' (using password: YES)\nERROR (line:335) : ProxySQL connection check failed. \n-- Could not connect to ProxySQL at localhost:6032 \n-- Please check the ProxySQL connection parameters and status.\n\ngithub.com/percona/percona-xtradb-cluster-operator/pkg/controller/pxc.(*ReconcilePerconaXtraDBCluster).syncPXCUsersWithProxySQL\n\t/go/src/github.com/percona/percona-xtradb-cluster-operator/pkg/controller/pxc/users.go:920\ngithub.com/percona/percona-xtradb-cluster-operator/pkg/controller/pxc.(*ReconcilePerconaXtraDBCluster).resyncPXCUsersWithProxySQL.func1\n\t/go/src/github.com/percona/percona-xtradb-cluster-operator/pkg/controller/pxc/controller.go:1246\nruntime.goexit\n\t/usr/local/go/src/runtime/asm_amd64.s:1695"} 2024-06-24T10:05:31.817Z INFO reconcile replication error {"controller": "pxc-controller", "namespace": "users-28617", "name": "some-name", "reconcileID": "364fc46a-3664-4f85-8772-1818125cb4f8", "err": "failed to connect to pod some-name-pxc-0: dial tcp: lookup some-name-pxc-0.some-name-pxc.users-28617 on 10.178.64.10:53: no such host"} 2024-06-24T10:05:37.152Z INFO reconcile replication error {"controller": "pxc-controller", "namespace": "users-28617", "name": "some-name", "reconcileID": "198e2793-0508-4066-8a88-d6d24e6c37b6", "err": "failed to connect to pod some-name-pxc-0: dial tcp: lookup some-name-pxc-0.some-name-pxc.users-28617 on 10.178.64.10:53: no such host"} 2024-06-24T10:05:42.371Z INFO Unable to find primary pod for replication. No pod with name or ip like this {"controller": "pxc-controller", "namespace": "users-28617", "name": "some-name", "reconcileID": "c4daadd6-a76a-45f3-8b25-4944234e645c", "primary name": "some-name-pxc-0.some-name-pxc.users-28617.svc.cluster.local"} 2024-06-24T10:05:47.610Z INFO Unable to find primary pod for replication. No pod with name or ip like this {"controller": "pxc-controller", "namespace": "users-28617", "name": "some-name", "reconcileID": "e86d26c3-1db9-4ccd-b5c7-ac6b82c6f382", "primary name": "some-name-pxc-0.some-name-pxc.users-28617.svc.cluster.local"} 2024-06-24T10:05:52.933Z INFO Unable to find primary pod for replication. No pod with name or ip like this {"controller": "pxc-controller", "namespace": "users-28617", "name": "some-name", "reconcileID": "fc9c01b9-3c30-40bc-ac78-914d3623b29d", "primary name": "some-name-pxc-0.some-name-pxc.users-28617.svc.cluster.local"} 2024-06-24T10:05:58.131Z INFO Unable to find primary pod for replication. No pod with name or ip like this {"controller": "pxc-controller", "namespace": "users-28617", "name": "some-name", "reconcileID": "a95586e6-9bf4-40ef-abe1-8526b9ba7018", "primary name": "some-name-pxc-0.some-name-pxc.users-28617.svc.cluster.local"} 2024-06-24T10:06:03.343Z INFO Unable to find primary pod for replication. No pod with name or ip like this {"controller": "pxc-controller", "namespace": "users-28617", "name": "some-name", "reconcileID": "2fd6268b-a29b-4a41-a2d7-9d558a5bdcfc", "primary name": "some-name-pxc-0.some-name-pxc.users-28617.svc.cluster.local"} 2024-06-24T10:06:08.552Z INFO Unable to find primary pod for replication. No pod with name or ip like this {"controller": "pxc-controller", "namespace": "users-28617", "name": "some-name", "reconcileID": "fdc3758b-d2ab-4a6b-b45f-a2ea414f871e", "primary name": "some-name-pxc-0.some-name-pxc.users-28617.svc.cluster.local"} 2024-06-24T10:06:14.518Z INFO Unable to find primary pod for replication. No pod with name or ip like this {"controller": "pxc-controller", "namespace": "users-28617", "name": "some-name", "reconcileID": "16f250a1-d73d-4b5e-a546-f9db31e874dd", "primary name": "some-name-pxc-0.some-name-pxc.users-28617.svc.cluster.local"} 2024-06-24T10:06:23.671Z DEBUG PXC users synced with ProxySQL {"controller": "pxc-controller", "namespace": "users-28617", "name": "some-name", "reconcileID": "77adf2bc-8de1-42ea-9efe-c8c441969edb"} 2024-06-24T10:06:28.761Z DEBUG PXC users synced with ProxySQL {"controller": "pxc-controller", "namespace": "users-28617", "name": "some-name", "reconcileID": "7ba7fdaa-e2a0-4d14-ac33-2a501dc29054"} 2024-06-24T10:06:34.045Z DEBUG PXC users synced with ProxySQL {"controller": "pxc-controller", "namespace": "users-28617", "name": "some-name", "reconcileID": "678efd0f-e74d-42a0-b139-690104c38ee5"} 2024-06-24T10:06:39.960Z DEBUG PXC users synced with ProxySQL {"controller": "pxc-controller", "namespace": "users-28617", "name": "some-name", "reconcileID": "1b5a8066-416f-450a-8bf1-2d170ef82995"} 2024-06-24T10:06:41.417Z INFO Password changed, updating user {"controller": "pxc-controller", "namespace": "users-28617", "name": "some-name", "reconcileID": "6f620d50-7806-40b6-8e22-809189340be9", "user": "operator"} 2024-06-24T10:06:41.447Z INFO User password updated {"controller": "pxc-controller", "namespace": "users-28617", "name": "some-name", "reconcileID": "6f620d50-7806-40b6-8e22-809189340be9", "user": "operator"} 2024-06-24T10:06:41.485Z INFO MySQL init secret updated {"controller": "pxc-controller", "namespace": "users-28617", "name": "some-name", "reconcileID": "6f620d50-7806-40b6-8e22-809189340be9", "secret": "some-name-mysql-init", "user": "operator"} 2024-06-24T10:06:41.540Z INFO Internal secrets updated {"controller": "pxc-controller", "namespace": "users-28617", "name": "some-name", "reconcileID": "6f620d50-7806-40b6-8e22-809189340be9", "user": "operator"} 2024-06-24T10:06:41.540Z INFO Proxy pods will be restarted {"controller": "pxc-controller", "namespace": "users-28617", "name": "some-name", "reconcileID": "6f620d50-7806-40b6-8e22-809189340be9", "last-applied-secret": "acfa07f19cd5a142357ae6ddc2fafc0775522d58a9dc0e8e0782e41b838a4d70"} 2024-06-24T10:06:43.228Z ERROR sync users {"controller": "pxc-controller", "namespace": "users-28617", "name": "some-name", "reconcileID": "6f620d50-7806-40b6-8e22-809189340be9", "error": "exec syncusers: command terminated with exit code 1 / / ERROR 1045 (28000): Access denied for user 'operator'@'some-name-proxysql-0.some-name-proxysql-unready.users-28617.svc.' (using password: YES)\nERROR 1045 (28000): Access denied for user 'operator'@'some-name-proxysql-0.some-name-proxysql-unready.users-28617.svc.' (using password: YES)\nERROR 1045 (28000): Access denied for user 'operator'@'some-name-proxysql-0.some-name-proxysql-unready.users-28617.svc.' (using password: YES)\nERROR (line:569) : Could not find a primary cluster node\n", "errorVerbose": "exec syncusers: command terminated with exit code 1 / / ERROR 1045 (28000): Access denied for user 'operator'@'some-name-proxysql-0.some-name-proxysql-unready.users-28617.svc.' (using password: YES)\nERROR 1045 (28000): Access denied for user 'operator'@'some-name-proxysql-0.some-name-proxysql-unready.users-28617.svc.' (using password: YES)\nERROR 1045 (28000): Access denied for user 'operator'@'some-name-proxysql-0.some-name-proxysql-unready.users-28617.svc.' (using password: YES)\nERROR (line:569) : Could not find a primary cluster node\n\ngithub.com/percona/percona-xtradb-cluster-operator/pkg/controller/pxc.(*ReconcilePerconaXtraDBCluster).syncPXCUsersWithProxySQL\n\t/go/src/github.com/percona/percona-xtradb-cluster-operator/pkg/controller/pxc/users.go:920\ngithub.com/percona/percona-xtradb-cluster-operator/pkg/controller/pxc.(*ReconcilePerconaXtraDBCluster).resyncPXCUsersWithProxySQL.func1\n\t/go/src/github.com/percona/percona-xtradb-cluster-operator/pkg/controller/pxc/controller.go:1246\nruntime.goexit\n\t/usr/local/go/src/runtime/asm_amd64.s:1695"} 2024-06-24T10:07:24.831Z DEBUG PXC users synced with ProxySQL {"controller": "pxc-controller", "namespace": "users-28617", "name": "some-name", "reconcileID": "aebb602d-b3a7-416d-9594-e1a282eacef7"} 2024-06-24T10:07:33.470Z DEBUG PXC users synced with ProxySQL {"controller": "pxc-controller", "namespace": "users-28617", "name": "some-name", "reconcileID": "288baf93-e2bd-4983-afb0-c5851a9fdd33"} 2024-06-24T10:07:39.079Z DEBUG PXC users synced with ProxySQL {"controller": "pxc-controller", "namespace": "users-28617", "name": "some-name", "reconcileID": "87fe08a8-21b5-4c23-ac08-d47d4e412b30"} 2024-06-24T10:07:44.272Z DEBUG PXC users synced with ProxySQL {"controller": "pxc-controller", "namespace": "users-28617", "name": "some-name", "reconcileID": "779f7606-7b14-4b71-8f24-c11938723160"} 2024-06-24T10:07:49.942Z DEBUG PXC users synced with ProxySQL {"controller": "pxc-controller", "namespace": "users-28617", "name": "some-name", "reconcileID": "0e8e40da-be86-49ba-a231-da3a92254e17"} 2024-06-24T10:07:56.523Z DEBUG PXC users synced with ProxySQL {"controller": "pxc-controller", "namespace": "users-28617", "name": "some-name", "reconcileID": "631da920-dcc3-4912-8d66-30757337bbe5"} 2024-06-24T10:08:05.473Z DEBUG PXC users synced with ProxySQL {"controller": "pxc-controller", "namespace": "users-28617", "name": "some-name", "reconcileID": "0ddd2af9-cefb-4446-bd06-ae610da419a9"} 2024-06-24T10:08:11.230Z DEBUG PXC users synced with ProxySQL {"controller": "pxc-controller", "namespace": "users-28617", "name": "some-name", "reconcileID": "e78667db-5484-4b7f-9827-5965309c86dc"} 2024-06-24T10:08:16.263Z DEBUG PXC users synced with ProxySQL {"controller": "pxc-controller", "namespace": "users-28617", "name": "some-name", "reconcileID": "1c3320c3-6daa-44cf-b0fc-96d763a58725"} 2024-06-24T10:08:21.352Z DEBUG PXC users synced with ProxySQL {"controller": "pxc-controller", "namespace": "users-28617", "name": "some-name", "reconcileID": "d2428890-a4c6-4216-bf75-5456b9c847fc"} 2024-06-24T10:08:26.676Z DEBUG PXC users synced with ProxySQL {"controller": "pxc-controller", "namespace": "users-28617", "name": "some-name", "reconcileID": "9c25e6c5-0f70-4d64-bd70-2534d32e8583"} 2024-06-24T10:08:32.451Z DEBUG PXC users synced with ProxySQL {"controller": "pxc-controller", "namespace": "users-28617", "name": "some-name", "reconcileID": "1a3f65e0-4470-42af-b004-15d5e5675e75"} 2024-06-24T10:08:37.673Z DEBUG PXC users synced with ProxySQL {"controller": "pxc-controller", "namespace": "users-28617", "name": "some-name", "reconcileID": "1d57bb4b-6bcb-41f8-a53e-23fe39f78281"} 2024-06-24T10:08:39.399Z INFO Password changed, updating user {"controller": "pxc-controller", "namespace": "users-28617", "name": "some-name", "reconcileID": "731705d8-7153-429e-97bb-909e07909e36", "user": "root"} 2024-06-24T10:08:39.438Z INFO User password updated {"controller": "pxc-controller", "namespace": "users-28617", "name": "some-name", "reconcileID": "731705d8-7153-429e-97bb-909e07909e36", "user": "root"} 2024-06-24T10:08:39.448Z INFO MySQL init secret updated {"controller": "pxc-controller", "namespace": "users-28617", "name": "some-name", "reconcileID": "731705d8-7153-429e-97bb-909e07909e36", "secret": "some-name-mysql-init", "user": "root"} 2024-06-24T10:08:44.928Z DEBUG PXC users synced with ProxySQL {"controller": "pxc-controller", "namespace": "users-28617", "name": "some-name", "reconcileID": "731705d8-7153-429e-97bb-909e07909e36"} 2024-06-24T10:08:44.938Z INFO Internal secrets updated {"controller": "pxc-controller", "namespace": "users-28617", "name": "some-name", "reconcileID": "731705d8-7153-429e-97bb-909e07909e36", "user": "root"} 2024-06-24T10:08:44.938Z INFO Password changed, updating user {"controller": "pxc-controller", "namespace": "users-28617", "name": "some-name", "reconcileID": "731705d8-7153-429e-97bb-909e07909e36", "user": "monitor"} 2024-06-24T10:08:44.964Z INFO User password updated {"controller": "pxc-controller", "namespace": "users-28617", "name": "some-name", "reconcileID": "731705d8-7153-429e-97bb-909e07909e36", "user": "monitor"} 2024-06-24T10:08:44.975Z INFO MySQL init secret updated {"controller": "pxc-controller", "namespace": "users-28617", "name": "some-name", "reconcileID": "731705d8-7153-429e-97bb-909e07909e36", "secret": "some-name-mysql-init", "user": "monitor"} 2024-06-24T10:08:45.024Z INFO Proxy user updated {"controller": "pxc-controller", "namespace": "users-28617", "name": "some-name", "reconcileID": "731705d8-7153-429e-97bb-909e07909e36", "user": "monitor"} 2024-06-24T10:08:45.042Z INFO Internal secrets updated {"controller": "pxc-controller", "namespace": "users-28617", "name": "some-name", "reconcileID": "731705d8-7153-429e-97bb-909e07909e36", "user": "monitor"} 2024-06-24T10:08:45.042Z INFO Password changed, updating user {"controller": "pxc-controller", "namespace": "users-28617", "name": "some-name", "reconcileID": "731705d8-7153-429e-97bb-909e07909e36", "user": "xtrabackup"} 2024-06-24T10:08:45.067Z INFO User password updated {"controller": "pxc-controller", "namespace": "users-28617", "name": "some-name", "reconcileID": "731705d8-7153-429e-97bb-909e07909e36", "user": "xtrabackup"} 2024-06-24T10:08:45.079Z INFO MySQL init secret updated {"controller": "pxc-controller", "namespace": "users-28617", "name": "some-name", "reconcileID": "731705d8-7153-429e-97bb-909e07909e36", "secret": "some-name-mysql-init", "user": "xtrabackup"} 2024-06-24T10:08:45.123Z INFO Internal secrets updated {"controller": "pxc-controller", "namespace": "users-28617", "name": "some-name", "reconcileID": "731705d8-7153-429e-97bb-909e07909e36", "user": "xtrabackup"} 2024-06-24T10:08:45.123Z INFO Password changed, updating user {"controller": "pxc-controller", "namespace": "users-28617", "name": "some-name", "reconcileID": "731705d8-7153-429e-97bb-909e07909e36", "user": "proxyadmin"} 2024-06-24T10:08:45.176Z INFO Proxy user updated {"controller": "pxc-controller", "namespace": "users-28617", "name": "some-name", "reconcileID": "731705d8-7153-429e-97bb-909e07909e36", "user": "proxyadmin"} 2024-06-24T10:08:45.219Z INFO Internal secrets updated {"controller": "pxc-controller", "namespace": "users-28617", "name": "some-name", "reconcileID": "731705d8-7153-429e-97bb-909e07909e36", "user": "proxyadmin"} 2024-06-24T10:08:45.219Z INFO Proxy pods will be restarted {"controller": "pxc-controller", "namespace": "users-28617", "name": "some-name", "reconcileID": "731705d8-7153-429e-97bb-909e07909e36", "last-applied-secret": "96eab325fe6260940415d6f355ba897524b50d4c44b2dd17e6f4f09d403e3e41"} 2024-06-24T10:08:45.219Z INFO PXC pods will be restarted {"controller": "pxc-controller", "namespace": "users-28617", "name": "some-name", "reconcileID": "731705d8-7153-429e-97bb-909e07909e36", "last-applied-secret": "96eab325fe6260940415d6f355ba897524b50d4c44b2dd17e6f4f09d403e3e41"} 2024-06-24T10:08:45.529Z ERROR sync users {"controller": "pxc-controller", "namespace": "users-28617", "name": "some-name", "reconcileID": "731705d8-7153-429e-97bb-909e07909e36", "error": "exec syncusers: command terminated with exit code 1 / / ERROR 1045 (28000): ProxySQL Error: Access denied for user 'proxyadmin'@'127.0.0.1' (using password: YES)\nERROR (line:335) : ProxySQL connection check failed. \n-- Could not connect to ProxySQL at localhost:6032 \n-- Please check the ProxySQL connection parameters and status.\n", "errorVerbose": "exec syncusers: command terminated with exit code 1 / / ERROR 1045 (28000): ProxySQL Error: Access denied for user 'proxyadmin'@'127.0.0.1' (using password: YES)\nERROR (line:335) : ProxySQL connection check failed. \n-- Could not connect to ProxySQL at localhost:6032 \n-- Please check the ProxySQL connection parameters and status.\n\ngithub.com/percona/percona-xtradb-cluster-operator/pkg/controller/pxc.(*ReconcilePerconaXtraDBCluster).syncPXCUsersWithProxySQL\n\t/go/src/github.com/percona/percona-xtradb-cluster-operator/pkg/controller/pxc/users.go:920\ngithub.com/percona/percona-xtradb-cluster-operator/pkg/controller/pxc.(*ReconcilePerconaXtraDBCluster).resyncPXCUsersWithProxySQL.func1\n\t/go/src/github.com/percona/percona-xtradb-cluster-operator/pkg/controller/pxc/controller.go:1246\nruntime.goexit\n\t/usr/local/go/src/runtime/asm_amd64.s:1695"} 2024-06-24T10:09:01.901Z INFO KubeAPIWarningLogger .metadata.ownerReferences contains duplicate entries; API server dedups owner references in 1.20+, and may reject such requests as early as 1.24; please fix your requests; duplicate UID(s) observed: 947b580f-ccc3-4a59-bc97-6754a3a6bc06 2024-06-24T10:12:25.438Z INFO Password changed, updating user {"controller": "pxc-controller", "namespace": "users-28617", "name": "some-name", "reconcileID": "55a086ef-d447-4234-af2d-4fdb713a7ee9", "user": "root"} 2024-06-24T10:12:25.477Z INFO User password updated {"controller": "pxc-controller", "namespace": "users-28617", "name": "some-name", "reconcileID": "55a086ef-d447-4234-af2d-4fdb713a7ee9", "user": "root"} 2024-06-24T10:12:25.487Z INFO MySQL init secret updated {"controller": "pxc-controller", "namespace": "users-28617", "name": "some-name", "reconcileID": "55a086ef-d447-4234-af2d-4fdb713a7ee9", "secret": "some-name-mysql-init", "user": "root"} 2024-06-24T10:12:25.499Z INFO Internal secrets updated {"controller": "pxc-controller", "namespace": "users-28617", "name": "some-name", "reconcileID": "55a086ef-d447-4234-af2d-4fdb713a7ee9", "user": "root"} 2024-06-24T10:12:25.499Z INFO Password changed, updating user {"controller": "pxc-controller", "namespace": "users-28617", "name": "some-name", "reconcileID": "55a086ef-d447-4234-af2d-4fdb713a7ee9", "user": "operator"} 2024-06-24T10:12:25.526Z INFO User password updated {"controller": "pxc-controller", "namespace": "users-28617", "name": "some-name", "reconcileID": "55a086ef-d447-4234-af2d-4fdb713a7ee9", "user": "operator"} 2024-06-24T10:12:25.536Z INFO MySQL init secret updated {"controller": "pxc-controller", "namespace": "users-28617", "name": "some-name", "reconcileID": "55a086ef-d447-4234-af2d-4fdb713a7ee9", "secret": "some-name-mysql-init", "user": "operator"} 2024-06-24T10:12:25.546Z INFO Internal secrets updated {"controller": "pxc-controller", "namespace": "users-28617", "name": "some-name", "reconcileID": "55a086ef-d447-4234-af2d-4fdb713a7ee9", "user": "operator"} 2024-06-24T10:12:25.546Z INFO Password changed, updating user {"controller": "pxc-controller", "namespace": "users-28617", "name": "some-name", "reconcileID": "55a086ef-d447-4234-af2d-4fdb713a7ee9", "user": "monitor"} 2024-06-24T10:12:25.574Z INFO User password updated {"controller": "pxc-controller", "namespace": "users-28617", "name": "some-name", "reconcileID": "55a086ef-d447-4234-af2d-4fdb713a7ee9", "user": "monitor"} 2024-06-24T10:12:25.586Z INFO MySQL init secret updated {"controller": "pxc-controller", "namespace": "users-28617", "name": "some-name", "reconcileID": "55a086ef-d447-4234-af2d-4fdb713a7ee9", "secret": "some-name-mysql-init", "user": "monitor"} 2024-06-24T10:12:25.595Z INFO Internal secrets updated {"controller": "pxc-controller", "namespace": "users-28617", "name": "some-name", "reconcileID": "55a086ef-d447-4234-af2d-4fdb713a7ee9", "user": "monitor"} 2024-06-24T10:12:25.595Z INFO Password changed, updating user {"controller": "pxc-controller", "namespace": "users-28617", "name": "some-name", "reconcileID": "55a086ef-d447-4234-af2d-4fdb713a7ee9", "user": "xtrabackup"} 2024-06-24T10:12:25.618Z INFO User password updated {"controller": "pxc-controller", "namespace": "users-28617", "name": "some-name", "reconcileID": "55a086ef-d447-4234-af2d-4fdb713a7ee9", "user": "xtrabackup"} 2024-06-24T10:12:25.631Z INFO MySQL init secret updated {"controller": "pxc-controller", "namespace": "users-28617", "name": "some-name", "reconcileID": "55a086ef-d447-4234-af2d-4fdb713a7ee9", "secret": "some-name-mysql-init", "user": "xtrabackup"} 2024-06-24T10:12:25.641Z INFO Internal secrets updated {"controller": "pxc-controller", "namespace": "users-28617", "name": "some-name", "reconcileID": "55a086ef-d447-4234-af2d-4fdb713a7ee9", "user": "xtrabackup"} 2024-06-24T10:12:25.641Z INFO Password changed, updating user {"controller": "pxc-controller", "namespace": "users-28617", "name": "some-name", "reconcileID": "55a086ef-d447-4234-af2d-4fdb713a7ee9", "user": "replication"} 2024-06-24T10:12:25.675Z INFO User password updated {"controller": "pxc-controller", "namespace": "users-28617", "name": "some-name", "reconcileID": "55a086ef-d447-4234-af2d-4fdb713a7ee9", "user": "replication"} 2024-06-24T10:12:25.689Z INFO MySQL init secret updated {"controller": "pxc-controller", "namespace": "users-28617", "name": "some-name", "reconcileID": "55a086ef-d447-4234-af2d-4fdb713a7ee9", "secret": "some-name-mysql-init", "user": "replication"} 2024-06-24T10:12:25.700Z INFO Internal secrets updated {"controller": "pxc-controller", "namespace": "users-28617", "name": "some-name", "reconcileID": "55a086ef-d447-4234-af2d-4fdb713a7ee9", "user": "replication"} 2024-06-24T10:12:25.700Z INFO Proxy pods will be restarted {"controller": "pxc-controller", "namespace": "users-28617", "name": "some-name", "reconcileID": "55a086ef-d447-4234-af2d-4fdb713a7ee9", "last-applied-secret": "6542c6f8a2375ddfcda319bf91c99ca69519730971c577cd07366e356671ee68"} 2024-06-24T10:12:25.700Z INFO PXC pods will be restarted {"controller": "pxc-controller", "namespace": "users-28617", "name": "some-name", "reconcileID": "55a086ef-d447-4234-af2d-4fdb713a7ee9", "last-applied-secret": "6542c6f8a2375ddfcda319bf91c99ca69519730971c577cd07366e356671ee68"} 2024-06-24T10:14:06.682Z INFO reconcile replication error {"controller": "pxc-controller", "namespace": "users-28617", "name": "some-name", "reconcileID": "7aa783e8-f651-46f5-9eef-2a11f6c8f13f", "err": "failed to connect to pod some-name-pxc-0: dial tcp: lookup some-name-pxc-0.some-name-pxc.users-28617 on 10.178.64.10:53: no such host"} 2024-06-24T10:15:16.031Z INFO Password changed, updating user {"controller": "pxc-controller", "namespace": "users-28617", "name": "some-name", "reconcileID": "fd25e7bc-93ad-4c19-9bdc-fb5965309e7f", "user": "monitor"} 2024-06-24T10:15:16.056Z INFO User password updated {"controller": "pxc-controller", "namespace": "users-28617", "name": "some-name", "reconcileID": "fd25e7bc-93ad-4c19-9bdc-fb5965309e7f", "user": "monitor"} 2024-06-24T10:15:16.081Z INFO MySQL init secret updated {"controller": "pxc-controller", "namespace": "users-28617", "name": "some-name", "reconcileID": "fd25e7bc-93ad-4c19-9bdc-fb5965309e7f", "secret": "some-name-mysql-init", "user": "monitor"} 2024-06-24T10:15:16.112Z INFO Internal secrets updated {"controller": "pxc-controller", "namespace": "users-28617", "name": "some-name", "reconcileID": "fd25e7bc-93ad-4c19-9bdc-fb5965309e7f", "user": "monitor"} 2024-06-24T10:15:16.112Z INFO Proxy pods will be restarted {"controller": "pxc-controller", "namespace": "users-28617", "name": "some-name", "reconcileID": "fd25e7bc-93ad-4c19-9bdc-fb5965309e7f", "last-applied-secret": "81935ecf30ae523a6ee69582354f9488d47dbeb0f8c7c572af96870f2024284d"} github.com/percona/percona-xtradb-cluster-operator/pkg/controller/pxc.(*ReconcilePerconaXtraDBCluster).resyncPXCUsersWithProxySQL.func1 /go/pkg/mod/sigs.k8s.io/controller-runtime@v0.18.4/pkg/internal/controller/controller.go:222 /go/pkg/mod/sigs.k8s.io/controller-runtime@v0.18.4/pkg/internal/controller/controller.go:261 /go/pkg/mod/sigs.k8s.io/controller-runtime@v0.18.4/pkg/internal/controller/controller.go:324 /go/src/github.com/percona/percona-xtradb-cluster-operator/pkg/controller/pxc/controller.go:1248 [mysql] 2024/06/24 10:15:28 packets.go:37: unexpected EOF [mysql] 2024/06/24 10:15:29 packets.go:37: unexpected EOF [mysql] 2024/06/24 10:15:30 packets.go:37: unexpected EOF sigs.k8s.io/controller-runtime/pkg/internal/controller.(*Controller).processNextWorkItem sigs.k8s.io/controller-runtime/pkg/internal/controller.(*Controller).reconcileHandler sigs.k8s.io/controller-runtime/pkg/internal/controller.(*Controller).Start.func2.2 + kubectl get pxc --all-namespaces -o wide + grep -v NAMESPACE + xargs -L 1 sh -xc 'kubectl patch pxc -n $0 $1 --type=merge -p "{\"metadata\":{\"finalizers\":[]}}"' + kubectl patch pxc -n users-28617 some-name --type=merge -p '{"metadata":{"finalizers":[]}}' perconaxtradbcluster.pxc.percona.com/some-name patched + kubectl_bin delete pxc --all --all-namespaces ++ mktemp + local LAST_OUT=/tmp/tmp.3dFQtCmWKi ++ mktemp + local LAST_ERR=/tmp/tmp.JPvm8J4axh + local exit_status=0 ++ seq 0 2 + for i in '$(seq 0 2)' + set +e + kubectl delete pxc --all --all-namespaces + exit_status=0 + set -e + '[' 0 '!=' 0 ']' + break + cat /tmp/tmp.3dFQtCmWKi perconaxtradbcluster.pxc.percona.com "some-name" deleted + cat /tmp/tmp.JPvm8J4axh + rm /tmp/tmp.3dFQtCmWKi /tmp/tmp.JPvm8J4axh + return 0 + kubectl_bin delete pxc-backup --all --all-namespaces ++ mktemp + local LAST_OUT=/tmp/tmp.hgVBL9X4n3 ++ mktemp + local LAST_ERR=/tmp/tmp.1XJIB7mYVy + local exit_status=0 ++ seq 0 2 + for i in '$(seq 0 2)' + set +e + kubectl delete pxc-backup --all --all-namespaces + exit_status=0 + set -e + '[' 0 '!=' 0 ']' + break + cat /tmp/tmp.hgVBL9X4n3 No resources found + cat /tmp/tmp.1XJIB7mYVy + rm /tmp/tmp.hgVBL9X4n3 /tmp/tmp.1XJIB7mYVy + return 0 + kubectl_bin delete pxc-restore --all --all-namespaces ++ mktemp + local LAST_OUT=/tmp/tmp.Va5OdbQCbV ++ mktemp + local LAST_ERR=/tmp/tmp.AFpyjmqC35 + local exit_status=0 ++ seq 0 2 + for i in '$(seq 0 2)' + set +e + kubectl delete pxc-restore --all --all-namespaces + exit_status=0 + set -e + '[' 0 '!=' 0 ']' + break + cat /tmp/tmp.Va5OdbQCbV No resources found + cat /tmp/tmp.AFpyjmqC35 + rm /tmp/tmp.Va5OdbQCbV /tmp/tmp.AFpyjmqC35 + return 0 + kubectl_bin delete ValidatingWebhookConfiguration percona-xtradbcluster-webhook ++ mktemp + local LAST_OUT=/tmp/tmp.OFJiZT8ZZr ++ mktemp + local LAST_ERR=/tmp/tmp.Xoi73va6Hy + local exit_status=0 ++ seq 0 2 + for i in '$(seq 0 2)' + set +e + kubectl delete ValidatingWebhookConfiguration percona-xtradbcluster-webhook + exit_status=0 + set -e + '[' 0 '!=' 0 ']' + break + cat /tmp/tmp.OFJiZT8ZZr validatingwebhookconfiguration.admissionregistration.k8s.io "percona-xtradbcluster-webhook" deleted + cat /tmp/tmp.Xoi73va6Hy + rm /tmp/tmp.OFJiZT8ZZr /tmp/tmp.Xoi73va6Hy + return 0 + kubectl_bin delete -f https://github.com/jetstack/cert-manager/releases/download/v1.14.2/cert-manager.yaml + : + '[' '!' -z '' ']' + '[' -n pxc-operator ']' + kubectl_bin delete --grace-period=0 --force=true namespace users-28617 + rm -rf /tmp/tmp.oaVtWV5LhP + kubectl_bin delete --grace-period=0 --force=true namespace pxc-operator ++ mktemp ++ mktemp + local LAST_OUT=/tmp/tmp.FIi7Rm3RxI + desc 'test passed' + set +o xtrace + local LAST_OUT=/tmp/tmp.sBXVtTYGvA ----------------------------------------------------------------------------------- test passed ----------------------------------------------------------------------------------- ++ mktemp ++ mktemp + local LAST_ERR=/tmp/tmp.M84eQLQ1RI + local exit_status=0 ++ seq 0 2 + local LAST_ERR=/tmp/tmp.FBSNuu0RgA + local exit_status=0 ++ seq 0 2 + for i in '$(seq 0 2)' + set +e + kubectl delete --grace-period=0 --force=true namespace users-28617 + for i in '$(seq 0 2)' + set +e + kubectl delete --grace-period=0 --force=true namespace pxc-operator