Log: /mnt/jenkins/workspace/cloud-pxc-operator_PR-1752/e2e-tests/logs/users-5-7.log WARNING: version difference between client (1.30) and server (1.26) exceeds the supported minor version skew of +/-1 WARNING: version difference between client (1.30) and server (1.26) exceeds the supported minor version skew of +/-1 + create_infra users-22051 + local ns=users-22051 + '[' -n pxc-operator ']' + kubectl get pxc --all-namespaces -o wide + xargs -L 1 sh -xc 'kubectl patch pxc -n $0 $1 --type=merge -p "{\"metadata\":{\"finalizers\":[]}}"' + grep -v NAMESPACE + kubectl patch pxc -n users-19107 some-name --type=merge -p '{"metadata":{"finalizers":[]}}' perconaxtradbcluster.pxc.percona.com/some-name patched + kubectl_bin delete pxc --all --all-namespaces ++ mktemp + local LAST_OUT=/tmp/tmp.HInR2wnzTs ++ mktemp + local LAST_ERR=/tmp/tmp.U26B1ICFFV + local exit_status=0 ++ seq 0 2 + for i in '$(seq 0 2)' + set +e + kubectl delete pxc --all --all-namespaces + exit_status=0 + set -e + '[' 0 '!=' 0 ']' + break + cat /tmp/tmp.HInR2wnzTs perconaxtradbcluster.pxc.percona.com "some-name" deleted + cat /tmp/tmp.U26B1ICFFV + rm /tmp/tmp.HInR2wnzTs /tmp/tmp.U26B1ICFFV + return 0 + kubectl_bin delete pxc-backup --all --all-namespaces ++ mktemp + local LAST_OUT=/tmp/tmp.tqcQqz7lyq ++ mktemp + local LAST_ERR=/tmp/tmp.Wui2sW89XU + local exit_status=0 ++ seq 0 2 + for i in '$(seq 0 2)' + set +e + kubectl delete pxc-backup --all --all-namespaces + exit_status=0 + set -e + '[' 0 '!=' 0 ']' + break + cat /tmp/tmp.tqcQqz7lyq No resources found + cat /tmp/tmp.Wui2sW89XU + rm /tmp/tmp.tqcQqz7lyq /tmp/tmp.Wui2sW89XU + return 0 + kubectl_bin delete pxc-restore --all --all-namespaces ++ mktemp + local LAST_OUT=/tmp/tmp.hQfGOSCwtm ++ mktemp + local LAST_ERR=/tmp/tmp.1vL3AcqgXn + local exit_status=0 ++ seq 0 2 + for i in '$(seq 0 2)' + set +e + kubectl delete pxc-restore --all --all-namespaces + exit_status=0 + set -e + '[' 0 '!=' 0 ']' + break + cat /tmp/tmp.hQfGOSCwtm No resources found + cat /tmp/tmp.1vL3AcqgXn + rm /tmp/tmp.hQfGOSCwtm /tmp/tmp.1vL3AcqgXn + return 0 + create_namespace pxc-operator + local namespace=pxc-operator + local skip_clean_namespace= + [[ 1 == 1 ]] + [[ -z '' ]] + destroy_chaos_mesh ++ helm list --all-namespaces --filter chaos-mesh ++ tail -n1 ++ awk '-F ' '{print $2}' ++ sed s/NAMESPACE// + local chaos_mesh_ns= + '[' -n '' ']' ++ kubectl get MutatingWebhookConfiguration ++ grep chaos-mesh ++ awk '{print $1}' + timeout 30 kubectl delete MutatingWebhookConfiguration error: resource(s) were provided, but no name was specified + : ++ kubectl get ValidatingWebhookConfiguration ++ grep chaos-mesh ++ awk '{print $1}' + timeout 30 kubectl delete ValidatingWebhookConfiguration error: resource(s) were provided, but no name was specified + : ++ awk '{print $1}' ++ grep validate-auth ++ kubectl get ValidatingWebhookConfiguration + timeout 30 kubectl delete ValidatingWebhookConfiguration error: resource(s) were provided, but no name was specified + : ++ grep chaos-mesh ++ kubectl api-resources ++ awk '{print $1}' ++ kubectl get crd ++ grep chaos-mesh.org ++ awk '{print $1}' + timeout 30 kubectl delete crd error: resource(s) were provided, but no name was specified + : ++ kubectl get clusterrolebinding ++ grep chaos-mesh ++ awk '{print $1}' + timeout 30 kubectl delete clusterrolebinding error: resource(s) were provided, but no name was specified + : ++ grep chaos-mesh ++ kubectl get clusterrole ++ awk '{print $1}' + timeout 30 kubectl delete clusterrole error: resource(s) were provided, but no name was specified + : + desc 'cleaned up all old namespaces' + set +o xtrace ----------------------------------------------------------------------------------- cleaned up all old namespaces ----------------------------------------------------------------------------------- + '[' -n '' ']' + desc 'cleaned up old namespaces pxc-operator' + set +o xtrace ----------------------------------------------------------------------------------- cleaned up old namespaces pxc-operator ----------------------------------------------------------------------------------- + kubectl_bin delete namespace pxc-operator ++ mktemp + xargs kubectl delete ns + kubectl_bin get ns ++ mktemp + local LAST_OUT=/tmp/tmp.2V37XA0N29 ++ mktemp + local LAST_ERR=/tmp/tmp.yavXsFxWNf + local exit_status=0 ++ seq 0 2 + for i in '$(seq 0 2)' + set +e + kubectl get ns + local LAST_OUT=/tmp/tmp.mqj33OyPi3 + awk '{print$1}' ++ mktemp + local LAST_ERR=/tmp/tmp.i3RtYQBMDy + local exit_status=0 ++ seq 0 2 + for i in '$(seq 0 2)' + set +e + kubectl delete namespace pxc-operator + egrep -v '^kube-|^default$|Terminating|pxc-operator|openshift|^NAME' + exit_status=0 + set -e + '[' 0 '!=' 0 ']' + break + cat /tmp/tmp.2V37XA0N29 + cat /tmp/tmp.yavXsFxWNf + rm /tmp/tmp.2V37XA0N29 /tmp/tmp.yavXsFxWNf + return 0 namespace "users-19107" deleted Error from server (Forbidden): namespaces "default" is forbidden: this namespace may not be deleted + exit_status=0 + set -e + '[' 0 '!=' 0 ']' + break + cat /tmp/tmp.mqj33OyPi3 namespace "pxc-operator" deleted + cat /tmp/tmp.i3RtYQBMDy + rm /tmp/tmp.mqj33OyPi3 /tmp/tmp.i3RtYQBMDy + return 0 + wait_for_delete namespace/pxc-operator + local res=namespace/pxc-operator + echo -n 'namespace/pxc-operator - ' namespace/pxc-operator - + set +o xtrace Error from server (NotFound): namespaces "pxc-operator" not found + desc 'create namespace pxc-operator' + set +o xtrace ----------------------------------------------------------------------------------- create namespace pxc-operator ----------------------------------------------------------------------------------- + kubectl_bin create namespace pxc-operator ++ mktemp + local LAST_OUT=/tmp/tmp.3ExAAqIbFD ++ mktemp + local LAST_ERR=/tmp/tmp.imv8hGD79O + local exit_status=0 ++ seq 0 2 + for i in '$(seq 0 2)' + set +e + kubectl create namespace pxc-operator + exit_status=0 + set -e + '[' 0 '!=' 0 ']' + break + cat /tmp/tmp.3ExAAqIbFD namespace/pxc-operator created + cat /tmp/tmp.imv8hGD79O + rm /tmp/tmp.3ExAAqIbFD /tmp/tmp.imv8hGD79O + return 0 ++ kubectl_bin config current-context +++ mktemp ++ local LAST_OUT=/tmp/tmp.NaHCXcgKZJ +++ mktemp ++ local LAST_ERR=/tmp/tmp.chNNdEDI35 ++ local exit_status=0 +++ seq 0 2 ++ for i in '$(seq 0 2)' ++ set +e ++ kubectl config current-context ++ exit_status=0 ++ set -e ++ '[' 0 '!=' 0 ']' ++ break ++ cat /tmp/tmp.NaHCXcgKZJ ++ cat /tmp/tmp.chNNdEDI35 ++ rm /tmp/tmp.NaHCXcgKZJ /tmp/tmp.chNNdEDI35 ++ return 0 + kubectl_bin config set-context gke_cloud-dev-112233_us-central1-a_jen-pxc-1752-44f0e1a8-4-cluster3 --namespace=pxc-operator ++ mktemp + local LAST_OUT=/tmp/tmp.aZtn5DEWXf ++ mktemp + local LAST_ERR=/tmp/tmp.bxehQuy1aJ + local exit_status=0 ++ seq 0 2 + for i in '$(seq 0 2)' + set +e + kubectl config set-context gke_cloud-dev-112233_us-central1-a_jen-pxc-1752-44f0e1a8-4-cluster3 --namespace=pxc-operator + exit_status=0 + set -e + '[' 0 '!=' 0 ']' + break + cat /tmp/tmp.aZtn5DEWXf Context "gke_cloud-dev-112233_us-central1-a_jen-pxc-1752-44f0e1a8-4-cluster3" modified. + cat /tmp/tmp.bxehQuy1aJ + rm /tmp/tmp.aZtn5DEWXf /tmp/tmp.bxehQuy1aJ + return 0 + deploy_operator + desc 'start PXC operator' + set +o xtrace ----------------------------------------------------------------------------------- start PXC operator ----------------------------------------------------------------------------------- + kubectl_bin apply --server-side --force-conflicts -f /mnt/jenkins/workspace/cloud-pxc-operator_PR-1752/deploy/crd.yaml ++ mktemp + local LAST_OUT=/tmp/tmp.Fo5bAGlOql ++ mktemp + local LAST_ERR=/tmp/tmp.4vcJkt46xM + local exit_status=0 ++ seq 0 2 + for i in '$(seq 0 2)' + set +e + kubectl apply --server-side --force-conflicts -f /mnt/jenkins/workspace/cloud-pxc-operator_PR-1752/deploy/crd.yaml + exit_status=0 + set -e + '[' 0 '!=' 0 ']' + break + cat /tmp/tmp.Fo5bAGlOql customresourcedefinition.apiextensions.k8s.io/perconaxtradbclusterbackups.pxc.percona.com serverside-applied customresourcedefinition.apiextensions.k8s.io/perconaxtradbclusterrestores.pxc.percona.com serverside-applied customresourcedefinition.apiextensions.k8s.io/perconaxtradbclusters.pxc.percona.com serverside-applied + cat /tmp/tmp.4vcJkt46xM + rm /tmp/tmp.Fo5bAGlOql /tmp/tmp.4vcJkt46xM + return 0 + '[' -n pxc-operator ']' + apply_rbac cw-rbac + local operator_namespace=pxc-operator + local rbac=cw-rbac + cat /mnt/jenkins/workspace/cloud-pxc-operator_PR-1752/deploy/cw-rbac.yaml + sed -e 's^namespace: .*^namespace: pxc-operator^' + kubectl_bin apply -f - ++ mktemp + local LAST_OUT=/tmp/tmp.FRVdgo5cDC ++ mktemp + local LAST_ERR=/tmp/tmp.zKKAkNlsDz + local exit_status=0 ++ seq 0 2 + for i in '$(seq 0 2)' + set +e + kubectl apply -f - + exit_status=0 + set -e + '[' 0 '!=' 0 ']' + break + cat /tmp/tmp.FRVdgo5cDC clusterrole.rbac.authorization.k8s.io/percona-xtradb-cluster-operator unchanged serviceaccount/percona-xtradb-cluster-operator created clusterrolebinding.rbac.authorization.k8s.io/service-account-percona-xtradb-cluster-operator unchanged + cat /tmp/tmp.zKKAkNlsDz + rm /tmp/tmp.FRVdgo5cDC /tmp/tmp.zKKAkNlsDz + return 0 + cat /mnt/jenkins/workspace/cloud-pxc-operator_PR-1752/deploy/cw-operator.yaml + sed -e 's^image: .*^image: perconalab/percona-xtradb-cluster-operator:PR-1752-44f0e1a8^' + yq eval '(select(.kind == "Deployment").spec.template.spec.containers[] | select(.name == "percona-xtradb-cluster-operator").env[] | select(.name == "DISABLE_TELEMETRY").value) = "true"' - + yq eval '(select(.kind == "Deployment").spec.template.spec.containers[] | select(.name == "percona-xtradb-cluster-operator").env[] | select(.name == "LOG_LEVEL").value) = "DEBUG"' - + kubectl_bin apply -f - + sed -e 's^failureThreshold: .*^failureThreshold: 10^' ++ mktemp + local LAST_OUT=/tmp/tmp.65jKGU0P4g ++ mktemp + local LAST_ERR=/tmp/tmp.ebiMdZfmWX + local exit_status=0 ++ seq 0 2 + for i in '$(seq 0 2)' + set +e + kubectl apply -f - + exit_status=0 + set -e + '[' 0 '!=' 0 ']' + break + cat /tmp/tmp.65jKGU0P4g deployment.apps/percona-xtradb-cluster-operator created service/percona-xtradb-cluster-operator created + cat /tmp/tmp.ebiMdZfmWX + rm /tmp/tmp.65jKGU0P4g /tmp/tmp.ebiMdZfmWX + return 0 + sleep 10 + kubectl_bin wait --for=condition=Ready pods -l app.kubernetes.io/component=operator,app.kubernetes.io/instance=percona-xtradb-cluster-operator,app.kubernetes.io/name=percona-xtradb-cluster-operator --timeout=30s ++ mktemp + local LAST_OUT=/tmp/tmp.wvoUC3Zi4S ++ mktemp + local LAST_ERR=/tmp/tmp.bjXAOooJhd + local exit_status=0 ++ seq 0 2 + for i in '$(seq 0 2)' + set +e + kubectl wait --for=condition=Ready pods -l app.kubernetes.io/component=operator,app.kubernetes.io/instance=percona-xtradb-cluster-operator,app.kubernetes.io/name=percona-xtradb-cluster-operator --timeout=30s + exit_status=0 + set -e + '[' 0 '!=' 0 ']' + break + cat /tmp/tmp.wvoUC3Zi4S pod/percona-xtradb-cluster-operator-6dbfd9bb9d-8vkvm condition met + cat /tmp/tmp.bjXAOooJhd + rm /tmp/tmp.wvoUC3Zi4S /tmp/tmp.bjXAOooJhd + return 0 ++ get_operator_pod ++ local label_prefix=app.kubernetes.io/ +++ kubectl get pods --selector=app.kubernetes.io/name=percona-xtradb-cluster-operator -n pxc-operator +++ grep -c percona-xtradb-cluster-operator ++ local check_label=1 ++ [[ 1 -eq 0 ]] ++ kubectl_bin get pods --selector=app.kubernetes.io/name=percona-xtradb-cluster-operator -o 'jsonpath={.items[].metadata.name}' -n pxc-operator +++ mktemp ++ local LAST_OUT=/tmp/tmp.fiSTpXcrFg +++ mktemp ++ local LAST_ERR=/tmp/tmp.FLLqJeDgmX ++ local exit_status=0 +++ seq 0 2 ++ for i in '$(seq 0 2)' ++ set +e ++ kubectl get pods --selector=app.kubernetes.io/name=percona-xtradb-cluster-operator -o 'jsonpath={.items[].metadata.name}' -n pxc-operator ++ exit_status=0 ++ set -e ++ '[' 0 '!=' 0 ']' ++ break ++ cat /tmp/tmp.fiSTpXcrFg ++ cat /tmp/tmp.FLLqJeDgmX ++ rm /tmp/tmp.fiSTpXcrFg /tmp/tmp.FLLqJeDgmX ++ return 0 + wait_pod percona-xtradb-cluster-operator-6dbfd9bb9d-8vkvm 480 pxc-operator + local pod=percona-xtradb-cluster-operator-6dbfd9bb9d-8vkvm + local max_retry=480 + local ns=pxc-operator ++ echo percona-xtradb-cluster-operator-6dbfd9bb9d-8vkvm ++ /usr/bin/sed -E 's/.*-(pxc|proxysql)-[0-9]/\1/' ++ egrep '^(pxc|proxysql)$' + local container= + set +o xtrace pod/percona-xtradb-cluster-operator-6dbfd9bb9d-8vkvm condition met percona-xtradb-cluster-operator-6dbfd9bb9d-8vkvm.Ok + sleep 3 + create_namespace users-22051 + local namespace=users-22051 + local skip_clean_namespace= + [[ 1 == 1 ]] + [[ -z '' ]] + destroy_chaos_mesh ++ helm list --all-namespaces --filter chaos-mesh ++ tail -n1 ++ awk '-F ' '{print $2}' ++ sed s/NAMESPACE// + local chaos_mesh_ns= + '[' -n '' ']' ++ kubectl get MutatingWebhookConfiguration ++ grep chaos-mesh ++ awk '{print $1}' + timeout 30 kubectl delete MutatingWebhookConfiguration error: resource(s) were provided, but no name was specified + : ++ kubectl get ValidatingWebhookConfiguration ++ grep chaos-mesh ++ awk '{print $1}' + timeout 30 kubectl delete ValidatingWebhookConfiguration error: resource(s) were provided, but no name was specified + : ++ kubectl get ValidatingWebhookConfiguration ++ grep validate-auth ++ awk '{print $1}' + timeout 30 kubectl delete ValidatingWebhookConfiguration error: resource(s) were provided, but no name was specified + : ++ kubectl api-resources ++ grep chaos-mesh ++ awk '{print $1}' ++ kubectl get crd ++ grep chaos-mesh.org ++ awk '{print $1}' + timeout 30 kubectl delete crd error: resource(s) were provided, but no name was specified + : ++ kubectl get clusterrolebinding ++ grep chaos-mesh ++ awk '{print $1}' + timeout 30 kubectl delete clusterrolebinding error: resource(s) were provided, but no name was specified + : ++ kubectl get clusterrole ++ grep chaos-mesh ++ awk '{print $1}' + timeout 30 kubectl delete clusterrole error: resource(s) were provided, but no name was specified + : + desc 'cleaned up all old namespaces' + set +o xtrace ----------------------------------------------------------------------------------- cleaned up all old namespaces ----------------------------------------------------------------------------------- + kubectl_bin get ns + egrep -v '^kube-|^default$|Terminating|pxc-operator|openshift|^NAME' + '[' -n '' ']' + desc 'cleaned up old namespaces users-22051' ++ mktemp + set +o xtrace ----------------------------------------------------------------------------------- cleaned up old namespaces users-22051 ----------------------------------------------------------------------------------- + kubectl_bin delete namespace users-22051 + xargs kubectl delete ns ++ mktemp + local LAST_OUT=/tmp/tmp.5BHvsX0qKC + awk '{print$1}' + local LAST_OUT=/tmp/tmp.QOwgD9aGkO ++ mktemp ++ mktemp + local LAST_ERR=/tmp/tmp.FH2vtLIlCi + local exit_status=0 ++ seq 0 2 + local LAST_ERR=/tmp/tmp.KKzV4M2L7D + local exit_status=0 ++ seq 0 2 + for i in '$(seq 0 2)' + set +e + kubectl get ns + for i in '$(seq 0 2)' + set +e + kubectl delete namespace users-22051 + exit_status=1 + set -e + '[' 1 '!=' 0 ']' + '[' 1 == 1 ']' + sleep 0 + for i in '$(seq 0 2)' + set +e + kubectl delete namespace users-22051 + exit_status=0 + set -e + '[' 0 '!=' 0 ']' + break + cat /tmp/tmp.5BHvsX0qKC + cat /tmp/tmp.KKzV4M2L7D + rm /tmp/tmp.5BHvsX0qKC /tmp/tmp.KKzV4M2L7D + return 0 + exit_status=1 + set -e + '[' 1 '!=' 0 ']' + '[' 1 == 1 ']' + sleep 0 + for i in '$(seq 0 2)' + set +e + kubectl delete namespace users-22051 Error from server (Forbidden): namespaces "default" is forbidden: this namespace may not be deleted + exit_status=1 + set -e + '[' 1 '!=' 0 ']' + '[' 1 == 1 ']' + sleep 0 + cat /tmp/tmp.QOwgD9aGkO + cat /tmp/tmp.FH2vtLIlCi Error from server (NotFound): namespaces "users-22051" not found + rm /tmp/tmp.QOwgD9aGkO /tmp/tmp.FH2vtLIlCi + return 1 + : + wait_for_delete namespace/users-22051 + local res=namespace/users-22051 + echo -n 'namespace/users-22051 - ' namespace/users-22051 - + set +o xtrace Error from server (NotFound): namespaces "users-22051" not found + desc 'create namespace users-22051' + set +o xtrace ----------------------------------------------------------------------------------- create namespace users-22051 ----------------------------------------------------------------------------------- + kubectl_bin create namespace users-22051 ++ mktemp + local LAST_OUT=/tmp/tmp.N1oyvYmn81 ++ mktemp + local LAST_ERR=/tmp/tmp.tDagwFczcd + local exit_status=0 ++ seq 0 2 + for i in '$(seq 0 2)' + set +e + kubectl create namespace users-22051 + exit_status=0 + set -e + '[' 0 '!=' 0 ']' + break + cat /tmp/tmp.N1oyvYmn81 namespace/users-22051 created + cat /tmp/tmp.tDagwFczcd + rm /tmp/tmp.N1oyvYmn81 /tmp/tmp.tDagwFczcd + return 0 ++ kubectl_bin config current-context +++ mktemp ++ local LAST_OUT=/tmp/tmp.JPIsBa4JRq +++ mktemp ++ local LAST_ERR=/tmp/tmp.hXRiXXT3Y2 ++ local exit_status=0 +++ seq 0 2 ++ for i in '$(seq 0 2)' ++ set +e ++ kubectl config current-context ++ exit_status=0 ++ set -e ++ '[' 0 '!=' 0 ']' ++ break ++ cat /tmp/tmp.JPIsBa4JRq ++ cat /tmp/tmp.hXRiXXT3Y2 ++ rm /tmp/tmp.JPIsBa4JRq /tmp/tmp.hXRiXXT3Y2 ++ return 0 + kubectl_bin config set-context gke_cloud-dev-112233_us-central1-a_jen-pxc-1752-44f0e1a8-4-cluster3 --namespace=users-22051 ++ mktemp + local LAST_OUT=/tmp/tmp.cqeDNTeT4Y ++ mktemp + local LAST_ERR=/tmp/tmp.8HPu8aD8R1 + local exit_status=0 ++ seq 0 2 + for i in '$(seq 0 2)' + set +e + kubectl config set-context gke_cloud-dev-112233_us-central1-a_jen-pxc-1752-44f0e1a8-4-cluster3 --namespace=users-22051 + exit_status=0 + set -e + '[' 0 '!=' 0 ']' + break + cat /tmp/tmp.cqeDNTeT4Y Context "gke_cloud-dev-112233_us-central1-a_jen-pxc-1752-44f0e1a8-4-cluster3" modified. + cat /tmp/tmp.8HPu8aD8R1 + rm /tmp/tmp.cqeDNTeT4Y /tmp/tmp.8HPu8aD8R1 + return 0 + apply_secrets + desc 'create secrets for cloud storages' + set +o xtrace ----------------------------------------------------------------------------------- create secrets for cloud storages ----------------------------------------------------------------------------------- + '[' -z '' ']' + kubectl_bin apply -f /mnt/jenkins/workspace/cloud-pxc-operator_PR-1752/e2e-tests/conf/minio-secret.yml -f /mnt/jenkins/workspace/cloud-pxc-operator_PR-1752/e2e-tests/conf/cloud-secret.yml ++ mktemp + local LAST_OUT=/tmp/tmp.KWAb5hobq4 ++ mktemp + local LAST_ERR=/tmp/tmp.NgLQWyaQQj + local exit_status=0 ++ seq 0 2 + for i in '$(seq 0 2)' + set +e + kubectl apply -f /mnt/jenkins/workspace/cloud-pxc-operator_PR-1752/e2e-tests/conf/minio-secret.yml -f /mnt/jenkins/workspace/cloud-pxc-operator_PR-1752/e2e-tests/conf/cloud-secret.yml + exit_status=0 + set -e + '[' 0 '!=' 0 ']' + break + cat /tmp/tmp.KWAb5hobq4 secret/minio-secret created secret/aws-s3-secret created secret/gcp-cs-secret created secret/azure-secret created + cat /tmp/tmp.NgLQWyaQQj + rm /tmp/tmp.KWAb5hobq4 /tmp/tmp.NgLQWyaQQj + return 0 + desc 'create PXC cluster' + set +o xtrace ----------------------------------------------------------------------------------- create PXC cluster ----------------------------------------------------------------------------------- + newpass=test-password ++ echo -n test-password ++ base64 + newpassencrypted=dGVzdC1wYXNzd29yZA== + cluster=some-name + spinup_pxc some-name /mnt/jenkins/workspace/cloud-pxc-operator_PR-1752/e2e-tests/conf/some-name.yml + local cluster=some-name + local config=/mnt/jenkins/workspace/cloud-pxc-operator_PR-1752/e2e-tests/conf/some-name.yml + local size=3 + local sleep=10 + local secretsFile=/mnt/jenkins/workspace/cloud-pxc-operator_PR-1752/e2e-tests/conf/secrets.yml + local pxcClientFile=/mnt/jenkins/workspace/cloud-pxc-operator_PR-1752/e2e-tests/conf/client.yml + local port=3306 + desc 'create first PXC cluster' + set +o xtrace ----------------------------------------------------------------------------------- create first PXC cluster ----------------------------------------------------------------------------------- + kubectl_bin apply -f /mnt/jenkins/workspace/cloud-pxc-operator_PR-1752/e2e-tests/conf/secrets.yml ++ mktemp + local LAST_OUT=/tmp/tmp.ojT4eEsYao ++ mktemp + local LAST_ERR=/tmp/tmp.re91eOcI1K + local exit_status=0 ++ seq 0 2 + for i in '$(seq 0 2)' + set +e + kubectl apply -f /mnt/jenkins/workspace/cloud-pxc-operator_PR-1752/e2e-tests/conf/secrets.yml + exit_status=0 + set -e + '[' 0 '!=' 0 ']' + break + cat /tmp/tmp.ojT4eEsYao secret/my-cluster-secrets created secret/some-name-ssl created secret/some-name-ssl-internal created + cat /tmp/tmp.re91eOcI1K + rm /tmp/tmp.ojT4eEsYao /tmp/tmp.re91eOcI1K + return 0 + apply_config /mnt/jenkins/workspace/cloud-pxc-operator_PR-1752/e2e-tests/conf/client.yml + '[' -z '' ']' + cat_config /mnt/jenkins/workspace/cloud-pxc-operator_PR-1752/e2e-tests/conf/client.yml + kubectl_bin apply -f - + cat /mnt/jenkins/workspace/cloud-pxc-operator_PR-1752/e2e-tests/conf/client.yml + /usr/bin/sed -e 's#apiVersion: pxc.percona.com/v.*$#apiVersion: pxc.percona.com/v1#' + /usr/bin/sed -e 's#image:.*-pxc\([0-9]*.[0-9]*\)\{0,1\}$#image: perconalab/percona-xtradb-cluster-operator:main-pxc5.7#' ++ mktemp + /usr/bin/sed -e 's#image:.*\/percona-xtradb-cluster:.*$#image: perconalab/percona-xtradb-cluster-operator:main-pxc5.7#' + local LAST_OUT=/tmp/tmp.shvOS65kIw + /usr/bin/sed -e 's#image:.*-backup$#image: perconalab/percona-xtradb-cluster-operator:main-pxc5.7-backup#' + /usr/bin/sed -e 's#image:.*-pmm$#image: perconalab/pmm-client:dev-latest#' + /usr/bin/sed -e 's#image:.*-init$#image: perconalab/percona-xtradb-cluster-operator:PR-1752-44f0e1a8#' + /usr/bin/sed -e 's#apply:.*#apply: Never#' + /usr/bin/sed -e s~minio-service.#namespace~minio-service.users-22051~ + /usr/bin/sed -e 's#image:.*-proxysql$#image: perconalab/percona-xtradb-cluster-operator:main-proxysql#' + /usr/bin/sed -e 's#image:.*-haproxy$#image: perconalab/percona-xtradb-cluster-operator:main-haproxy#' + /usr/bin/sed -e 's#image:.*-logcollector$#image: perconalab/percona-xtradb-cluster-operator:main-logcollector#' ++ mktemp + local LAST_ERR=/tmp/tmp.nK5nR12Cnh + local exit_status=0 ++ seq 0 2 + for i in '$(seq 0 2)' + set +e + kubectl apply -f - + exit_status=0 + set -e + '[' 0 '!=' 0 ']' + break + cat /tmp/tmp.shvOS65kIw deployment.apps/pxc-client created + cat /tmp/tmp.nK5nR12Cnh + rm /tmp/tmp.shvOS65kIw /tmp/tmp.nK5nR12Cnh + return 0 + [[ perconalab/percona-xtradb-cluster-operator:main-pxc5.7 =~ 5\.7 ]] + [[ some-name == \d\e\m\a\n\d\-\b\a\c\k\u\p ]] + [[ some-name == \d\e\m\a\n\d\-\b\a\c\k\u\p\-\c\l\o\u\d ]] + apply_config /mnt/jenkins/workspace/cloud-pxc-operator_PR-1752/e2e-tests/conf/some-name.yml + '[' -z '' ']' + cat_config /mnt/jenkins/workspace/cloud-pxc-operator_PR-1752/e2e-tests/conf/some-name.yml + kubectl_bin apply -f - + cat /mnt/jenkins/workspace/cloud-pxc-operator_PR-1752/e2e-tests/conf/some-name.yml + /usr/bin/sed -e 's#apiVersion: pxc.percona.com/v.*$#apiVersion: pxc.percona.com/v1#' ++ mktemp + /usr/bin/sed -e 's#image:.*-init$#image: perconalab/percona-xtradb-cluster-operator:PR-1752-44f0e1a8#' + local LAST_OUT=/tmp/tmp.pweRlEVAo9 + /usr/bin/sed -e 's#image:.*-pmm$#image: perconalab/pmm-client:dev-latest#' + /usr/bin/sed -e 's#image:.*-backup$#image: perconalab/percona-xtradb-cluster-operator:main-pxc5.7-backup#' + /usr/bin/sed -e 's#image:.*-proxysql$#image: perconalab/percona-xtradb-cluster-operator:main-proxysql#' + /usr/bin/sed -e 's#image:.*-pxc\([0-9]*.[0-9]*\)\{0,1\}$#image: perconalab/percona-xtradb-cluster-operator:main-pxc5.7#' ++ mktemp + /usr/bin/sed -e s~minio-service.#namespace~minio-service.users-22051~ + /usr/bin/sed -e 's#image:.*-logcollector$#image: perconalab/percona-xtradb-cluster-operator:main-logcollector#' + /usr/bin/sed -e 's#image:.*-haproxy$#image: perconalab/percona-xtradb-cluster-operator:main-haproxy#' + /usr/bin/sed -e 's#image:.*\/percona-xtradb-cluster:.*$#image: perconalab/percona-xtradb-cluster-operator:main-pxc5.7#' + /usr/bin/sed -e 's#apply:.*#apply: Never#' + local LAST_ERR=/tmp/tmp.bXsutCCZIV + local exit_status=0 ++ seq 0 2 + for i in '$(seq 0 2)' + set +e + kubectl apply -f - + exit_status=0 + set -e + '[' 0 '!=' 0 ']' + break + cat /tmp/tmp.pweRlEVAo9 perconaxtradbcluster.pxc.percona.com/some-name created + cat /tmp/tmp.bXsutCCZIV + rm /tmp/tmp.pweRlEVAo9 /tmp/tmp.bXsutCCZIV + return 0 + desc 'check if all 3 Pods started' + set +o xtrace ----------------------------------------------------------------------------------- check if all 3 Pods started ----------------------------------------------------------------------------------- ++ get_proxy some-name ++ local target_cluster=some-name +++ kubectl_bin get pxc some-name -o 'jsonpath={.spec.haproxy.enabled}' ++++ mktemp +++ local LAST_OUT=/tmp/tmp.8c44k0i0zF ++++ mktemp +++ local LAST_ERR=/tmp/tmp.uiooC4yS4G +++ local exit_status=0 ++++ seq 0 2 +++ for i in '$(seq 0 2)' +++ set +e +++ kubectl get pxc some-name -o 'jsonpath={.spec.haproxy.enabled}' +++ exit_status=0 +++ set -e +++ '[' 0 '!=' 0 ']' +++ break +++ cat /tmp/tmp.8c44k0i0zF +++ cat /tmp/tmp.uiooC4yS4G +++ rm /tmp/tmp.8c44k0i0zF /tmp/tmp.uiooC4yS4G +++ return 0 ++ [[ '' == \t\r\u\e ]] +++ kubectl_bin get pxc some-name -o 'jsonpath={.spec.proxysql.enabled}' ++++ mktemp +++ local LAST_OUT=/tmp/tmp.pqnsYd8WP9 ++++ mktemp +++ local LAST_ERR=/tmp/tmp.Q0u51ApPP8 +++ local exit_status=0 ++++ seq 0 2 +++ for i in '$(seq 0 2)' +++ set +e +++ kubectl get pxc some-name -o 'jsonpath={.spec.proxysql.enabled}' +++ exit_status=0 +++ set -e +++ '[' 0 '!=' 0 ']' +++ break +++ cat /tmp/tmp.pqnsYd8WP9 +++ cat /tmp/tmp.Q0u51ApPP8 +++ rm /tmp/tmp.pqnsYd8WP9 /tmp/tmp.Q0u51ApPP8 +++ return 0 ++ [[ true == \t\r\u\e ]] ++ echo some-name-proxysql ++ return + local proxy=some-name-proxysql + kubectl_bin wait --for=condition=Ready pod -l app.kubernetes.io/instance=monitoring,app.kubernetes.io/managed-by=percona-xtradb-cluster-operator --timeout=300s -n users-22051 ++ mktemp + local LAST_OUT=/tmp/tmp.J58fLPwWuj ++ mktemp + local LAST_ERR=/tmp/tmp.hxYHe0s95G + local exit_status=0 ++ seq 0 2 + for i in '$(seq 0 2)' + set +e + kubectl wait --for=condition=Ready pod -l app.kubernetes.io/instance=monitoring,app.kubernetes.io/managed-by=percona-xtradb-cluster-operator --timeout=300s -n users-22051 + exit_status=1 + set -e + '[' 1 '!=' 0 ']' + '[' 1 == 1 ']' + sleep 0 + for i in '$(seq 0 2)' + set +e + kubectl wait --for=condition=Ready pod -l app.kubernetes.io/instance=monitoring,app.kubernetes.io/managed-by=percona-xtradb-cluster-operator --timeout=300s -n users-22051 + exit_status=1 + set -e + '[' 1 '!=' 0 ']' + '[' 1 == 1 ']' + sleep 0 + for i in '$(seq 0 2)' + set +e + kubectl wait --for=condition=Ready pod -l app.kubernetes.io/instance=monitoring,app.kubernetes.io/managed-by=percona-xtradb-cluster-operator --timeout=300s -n users-22051 + exit_status=1 + set -e + '[' 1 '!=' 0 ']' + '[' 1 == 1 ']' + sleep 0 + cat /tmp/tmp.J58fLPwWuj + cat /tmp/tmp.hxYHe0s95G error: no matching resources found + rm /tmp/tmp.J58fLPwWuj /tmp/tmp.hxYHe0s95G + return 1 + true + wait_for_running some-name-proxysql 1 + local name=some-name-proxysql + let last_pod=0 + : + local max_retry=480 + desc 'wait for running cluster' + set +o xtrace ----------------------------------------------------------------------------------- wait for running cluster ----------------------------------------------------------------------------------- ++ seq 0 0 + for i in '$(seq 0 $last_pod)' + wait_pod some-name-proxysql-0 480 + local pod=some-name-proxysql-0 + local max_retry=480 + local ns= ++ echo some-name-proxysql-0 ++ /usr/bin/sed -E 's/.*-(pxc|proxysql)-[0-9]/\1/' ++ egrep '^(pxc|proxysql)$' + local container=proxysql + set +o xtrace pod/some-name-proxysql-0 condition met some-name-proxysql-0.Ok + wait_for_running some-name-pxc 3 + local name=some-name-pxc + let last_pod=2 + local max_retry=480 + desc 'wait for running cluster' + set +o xtrace ----------------------------------------------------------------------------------- wait for running cluster ----------------------------------------------------------------------------------- ++ seq 0 2 + for i in '$(seq 0 $last_pod)' + wait_pod some-name-pxc-0 480 + local pod=some-name-pxc-0 + local max_retry=480 + local ns= ++ echo some-name-pxc-0 ++ /usr/bin/sed -E 's/.*-(pxc|proxysql)-[0-9]/\1/' ++ egrep '^(pxc|proxysql)$' + local container=pxc + set +o xtrace pod/some-name-pxc-0 condition met some-name-pxc-0.Ok + for i in '$(seq 0 $last_pod)' + wait_pod some-name-pxc-1 480 + local pod=some-name-pxc-1 + local max_retry=480 + local ns= ++ echo some-name-pxc-1 ++ egrep '^(pxc|proxysql)$' ++ /usr/bin/sed -E 's/.*-(pxc|proxysql)-[0-9]/\1/' + local container=pxc + set +o xtrace pod/some-name-pxc-1 condition met some-name-pxc-1.Ok + for i in '$(seq 0 $last_pod)' + wait_pod some-name-pxc-2 480 + local pod=some-name-pxc-2 + local max_retry=480 + local ns= ++ egrep '^(pxc|proxysql)$' ++ /usr/bin/sed -E 's/.*-(pxc|proxysql)-[0-9]/\1/' ++ echo some-name-pxc-2 + local container=pxc + set +o xtrace pod/some-name-pxc-2 condition met some-name-pxc-2.Ok + sleep 10 + desc 'write data' + set +o xtrace ----------------------------------------------------------------------------------- write data ----------------------------------------------------------------------------------- + [[ perconalab/percona-xtradb-cluster-operator:main-pxc5.7 =~ 5\.7 ]] ++ is_keyring_plugin_in_use some-name ++ local cluster=some-name ++ kubectl_bin exec -it some-name-pxc-0 -c pxc -- bash -c 'cat /etc/mysql/node.cnf' ++ egrep -o 'early-plugin-load=keyring_\w+.so' +++ mktemp ++ local LAST_OUT=/tmp/tmp.CbqWWgDSWN +++ mktemp ++ local LAST_ERR=/tmp/tmp.e6J6ExLxw0 ++ local exit_status=0 +++ seq 0 2 ++ for i in '$(seq 0 2)' ++ set +e ++ kubectl exec -it some-name-pxc-0 -c pxc -- bash -c 'cat /etc/mysql/node.cnf' ++ exit_status=0 ++ set -e ++ '[' 0 '!=' 0 ']' ++ break ++ cat /tmp/tmp.CbqWWgDSWN ++ cat /tmp/tmp.e6J6ExLxw0 Unable to use a TTY - input is not a terminal or the right kind of file ++ rm /tmp/tmp.CbqWWgDSWN /tmp/tmp.e6J6ExLxw0 ++ return 0 + [[ -n '' ]] + run_mysql 'CREATE DATABASE IF NOT EXISTS myApp; use myApp; CREATE TABLE IF NOT EXISTS myApp (id int PRIMARY KEY) ;' '-h some-name-proxysql -uroot -proot_password -P3306' + local 'command=CREATE DATABASE IF NOT EXISTS myApp; use myApp; CREATE TABLE IF NOT EXISTS myApp (id int PRIMARY KEY) ;' + local 'uri=-h some-name-proxysql -uroot -proot_password -P3306' ++ get_client_pod ++ kubectl_bin get pods --selector=name=pxc-client -o 'jsonpath={.items[].metadata.name}' +++ mktemp ++ local LAST_OUT=/tmp/tmp.QwTHBCrok7 +++ mktemp ++ local LAST_ERR=/tmp/tmp.09PS6UVD8R ++ local exit_status=0 +++ seq 0 2 ++ for i in '$(seq 0 2)' ++ set +e ++ kubectl get pods --selector=name=pxc-client -o 'jsonpath={.items[].metadata.name}' ++ exit_status=0 ++ set -e ++ '[' 0 '!=' 0 ']' ++ break ++ cat /tmp/tmp.QwTHBCrok7 ++ cat /tmp/tmp.09PS6UVD8R ++ rm /tmp/tmp.QwTHBCrok7 /tmp/tmp.09PS6UVD8R ++ return 0 + client_pod=pxc-client-64b479df95-499mv + wait_pod pxc-client-64b479df95-499mv + local pod=pxc-client-64b479df95-499mv + local max_retry=480 + local ns= ++ echo pxc-client-64b479df95-499mv ++ /usr/bin/sed -E 's/.*-(pxc|proxysql)-[0-9]/\1/' ++ egrep '^(pxc|proxysql)$' + local container= + set +o xtrace pod/pxc-client-64b479df95-499mv condition met pxc-client-64b479df95-499mv.Ok + set +o xtrace + run_mysql 'INSERT myApp.myApp (id) VALUES (100500)' '-h some-name-proxysql -uroot -proot_password -P3306' + local 'command=INSERT myApp.myApp (id) VALUES (100500)' + local 'uri=-h some-name-proxysql -uroot -proot_password -P3306' ++ get_client_pod ++ kubectl_bin get pods --selector=name=pxc-client -o 'jsonpath={.items[].metadata.name}' +++ mktemp ++ local LAST_OUT=/tmp/tmp.EgZrVNtDKF +++ mktemp ++ local LAST_ERR=/tmp/tmp.4tNebVv1f4 ++ local exit_status=0 +++ seq 0 2 ++ for i in '$(seq 0 2)' ++ set +e ++ kubectl get pods --selector=name=pxc-client -o 'jsonpath={.items[].metadata.name}' ++ exit_status=0 ++ set -e ++ '[' 0 '!=' 0 ']' ++ break ++ cat /tmp/tmp.EgZrVNtDKF ++ cat /tmp/tmp.4tNebVv1f4 ++ rm /tmp/tmp.EgZrVNtDKF /tmp/tmp.4tNebVv1f4 ++ return 0 + client_pod=pxc-client-64b479df95-499mv + wait_pod pxc-client-64b479df95-499mv + local pod=pxc-client-64b479df95-499mv + local max_retry=480 + local ns= ++ echo pxc-client-64b479df95-499mv ++ /usr/bin/sed -E 's/.*-(pxc|proxysql)-[0-9]/\1/' ++ egrep '^(pxc|proxysql)$' + local container= + set +o xtrace pod/pxc-client-64b479df95-499mv condition met pxc-client-64b479df95-499mv.Ok + set +o xtrace + sleep 30 ++ seq 0 2 + for i in '$(seq 0 $((size - 1)))' + compare_mysql_cmd select-1 'SELECT * from myApp.myApp;' '-h some-name-pxc-0.some-name-pxc -uroot -proot_password -P3306' + local command_id=select-1 + local 'command=SELECT * from myApp.myApp;' + local 'uri=-h some-name-pxc-0.some-name-pxc -uroot -proot_password -P3306' + local postfix= + local expected_result=/mnt/jenkins/workspace/cloud-pxc-operator_PR-1752/e2e-tests/users/compare/select-1.sql + [[ perconalab/percona-xtradb-cluster-operator:main-pxc5.7 =~ 8\.0 ]] + run_mysql 'SELECT * from myApp.myApp;' '-h some-name-pxc-0.some-name-pxc -uroot -proot_password -P3306' + local 'command=SELECT * from myApp.myApp;' + local 'uri=-h some-name-pxc-0.some-name-pxc -uroot -proot_password -P3306' ++ get_client_pod ++ kubectl_bin get pods --selector=name=pxc-client -o 'jsonpath={.items[].metadata.name}' +++ mktemp ++ local LAST_OUT=/tmp/tmp.nREzQqDPKX +++ mktemp ++ local LAST_ERR=/tmp/tmp.M7xTjgOQKh ++ local exit_status=0 +++ seq 0 2 ++ for i in '$(seq 0 2)' ++ set +e ++ kubectl get pods --selector=name=pxc-client -o 'jsonpath={.items[].metadata.name}' ++ exit_status=0 ++ set -e ++ '[' 0 '!=' 0 ']' ++ break ++ cat /tmp/tmp.nREzQqDPKX ++ cat /tmp/tmp.M7xTjgOQKh ++ rm /tmp/tmp.nREzQqDPKX /tmp/tmp.M7xTjgOQKh ++ return 0 + client_pod=pxc-client-64b479df95-499mv + wait_pod pxc-client-64b479df95-499mv + local pod=pxc-client-64b479df95-499mv + local max_retry=480 + local ns= ++ echo pxc-client-64b479df95-499mv ++ /usr/bin/sed -E 's/.*-(pxc|proxysql)-[0-9]/\1/' ++ egrep '^(pxc|proxysql)$' + local container= + set +o xtrace pod/pxc-client-64b479df95-499mv condition met pxc-client-64b479df95-499mv.Ok + set +o xtrace + '[' '!' -s /tmp/tmp.mo3cXZH4Te/select-1.sql ']' + diff -u /mnt/jenkins/workspace/cloud-pxc-operator_PR-1752/e2e-tests/users/compare/select-1.sql /tmp/tmp.mo3cXZH4Te/select-1.sql + for i in '$(seq 0 $((size - 1)))' + compare_mysql_cmd select-1 'SELECT * from myApp.myApp;' '-h some-name-pxc-1.some-name-pxc -uroot -proot_password -P3306' + local command_id=select-1 + local 'command=SELECT * from myApp.myApp;' + local 'uri=-h some-name-pxc-1.some-name-pxc -uroot -proot_password -P3306' + local postfix= + local expected_result=/mnt/jenkins/workspace/cloud-pxc-operator_PR-1752/e2e-tests/users/compare/select-1.sql + [[ perconalab/percona-xtradb-cluster-operator:main-pxc5.7 =~ 8\.0 ]] + run_mysql 'SELECT * from myApp.myApp;' '-h some-name-pxc-1.some-name-pxc -uroot -proot_password -P3306' + local 'command=SELECT * from myApp.myApp;' + local 'uri=-h some-name-pxc-1.some-name-pxc -uroot -proot_password -P3306' ++ get_client_pod ++ kubectl_bin get pods --selector=name=pxc-client -o 'jsonpath={.items[].metadata.name}' +++ mktemp ++ local LAST_OUT=/tmp/tmp.WqDM1x2rS9 +++ mktemp ++ local LAST_ERR=/tmp/tmp.3WGfF3DQPD ++ local exit_status=0 +++ seq 0 2 ++ for i in '$(seq 0 2)' ++ set +e ++ kubectl get pods --selector=name=pxc-client -o 'jsonpath={.items[].metadata.name}' ++ exit_status=0 ++ set -e ++ '[' 0 '!=' 0 ']' ++ break ++ cat /tmp/tmp.WqDM1x2rS9 ++ cat /tmp/tmp.3WGfF3DQPD ++ rm /tmp/tmp.WqDM1x2rS9 /tmp/tmp.3WGfF3DQPD ++ return 0 + client_pod=pxc-client-64b479df95-499mv + wait_pod pxc-client-64b479df95-499mv + local pod=pxc-client-64b479df95-499mv + local max_retry=480 + local ns= ++ echo pxc-client-64b479df95-499mv ++ /usr/bin/sed -E 's/.*-(pxc|proxysql)-[0-9]/\1/' ++ egrep '^(pxc|proxysql)$' + local container= + set +o xtrace pod/pxc-client-64b479df95-499mv condition met pxc-client-64b479df95-499mv.Ok + set +o xtrace + '[' '!' -s /tmp/tmp.mo3cXZH4Te/select-1.sql ']' + diff -u /mnt/jenkins/workspace/cloud-pxc-operator_PR-1752/e2e-tests/users/compare/select-1.sql /tmp/tmp.mo3cXZH4Te/select-1.sql + for i in '$(seq 0 $((size - 1)))' + compare_mysql_cmd select-1 'SELECT * from myApp.myApp;' '-h some-name-pxc-2.some-name-pxc -uroot -proot_password -P3306' + local command_id=select-1 + local 'command=SELECT * from myApp.myApp;' + local 'uri=-h some-name-pxc-2.some-name-pxc -uroot -proot_password -P3306' + local postfix= + local expected_result=/mnt/jenkins/workspace/cloud-pxc-operator_PR-1752/e2e-tests/users/compare/select-1.sql + [[ perconalab/percona-xtradb-cluster-operator:main-pxc5.7 =~ 8\.0 ]] + run_mysql 'SELECT * from myApp.myApp;' '-h some-name-pxc-2.some-name-pxc -uroot -proot_password -P3306' + local 'command=SELECT * from myApp.myApp;' + local 'uri=-h some-name-pxc-2.some-name-pxc -uroot -proot_password -P3306' ++ get_client_pod ++ kubectl_bin get pods --selector=name=pxc-client -o 'jsonpath={.items[].metadata.name}' +++ mktemp ++ local LAST_OUT=/tmp/tmp.ZL7OqqNCXb +++ mktemp ++ local LAST_ERR=/tmp/tmp.00grXKpkRw ++ local exit_status=0 +++ seq 0 2 ++ for i in '$(seq 0 2)' ++ set +e ++ kubectl get pods --selector=name=pxc-client -o 'jsonpath={.items[].metadata.name}' ++ exit_status=0 ++ set -e ++ '[' 0 '!=' 0 ']' ++ break ++ cat /tmp/tmp.ZL7OqqNCXb ++ cat /tmp/tmp.00grXKpkRw ++ rm /tmp/tmp.ZL7OqqNCXb /tmp/tmp.00grXKpkRw ++ return 0 + client_pod=pxc-client-64b479df95-499mv + wait_pod pxc-client-64b479df95-499mv + local pod=pxc-client-64b479df95-499mv + local max_retry=480 + local ns= ++ echo pxc-client-64b479df95-499mv ++ /usr/bin/sed -E 's/.*-(pxc|proxysql)-[0-9]/\1/' ++ egrep '^(pxc|proxysql)$' + local container= + set +o xtrace pod/pxc-client-64b479df95-499mv condition met pxc-client-64b479df95-499mv.Ok + set +o xtrace + '[' '!' -s /tmp/tmp.mo3cXZH4Te/select-1.sql ']' + diff -u /mnt/jenkins/workspace/cloud-pxc-operator_PR-1752/e2e-tests/users/compare/select-1.sql /tmp/tmp.mo3cXZH4Te/select-1.sql ++ is_keyring_plugin_in_use some-name ++ local cluster=some-name ++ kubectl_bin exec -it some-name-pxc-0 -c pxc -- bash -c 'cat /etc/mysql/node.cnf' ++ egrep -o 'early-plugin-load=keyring_\w+.so' +++ mktemp ++ local LAST_OUT=/tmp/tmp.GPNhZhsiyO +++ mktemp ++ local LAST_ERR=/tmp/tmp.pN0WqKHBHG ++ local exit_status=0 +++ seq 0 2 ++ for i in '$(seq 0 2)' ++ set +e ++ kubectl exec -it some-name-pxc-0 -c pxc -- bash -c 'cat /etc/mysql/node.cnf' ++ exit_status=0 ++ set -e ++ '[' 0 '!=' 0 ']' ++ break ++ cat /tmp/tmp.GPNhZhsiyO ++ cat /tmp/tmp.pN0WqKHBHG Unable to use a TTY - input is not a terminal or the right kind of file ++ rm /tmp/tmp.GPNhZhsiyO /tmp/tmp.pN0WqKHBHG ++ return 0 + '[' '' ']' + desc 'test root' + set +o xtrace ----------------------------------------------------------------------------------- test root ----------------------------------------------------------------------------------- + patch_secret my-cluster-secrets root dGVzdC1wYXNzd29yZA== + local secret=my-cluster-secrets + local key=root + local value=dGVzdC1wYXNzd29yZA== + kubectl_bin patch secret my-cluster-secrets '-p={"data":{"root": "dGVzdC1wYXNzd29yZA=="}}' ++ mktemp + local LAST_OUT=/tmp/tmp.a8MomyNwFo ++ mktemp + local LAST_ERR=/tmp/tmp.HtvnFwVbNN + local exit_status=0 ++ seq 0 2 + for i in '$(seq 0 2)' + set +e + kubectl patch secret my-cluster-secrets '-p={"data":{"root": "dGVzdC1wYXNzd29yZA=="}}' + exit_status=0 + set -e + '[' 0 '!=' 0 ']' + break + cat /tmp/tmp.a8MomyNwFo secret/my-cluster-secrets patched + cat /tmp/tmp.HtvnFwVbNN + rm /tmp/tmp.a8MomyNwFo /tmp/tmp.HtvnFwVbNN + return 0 + sleep 15 + compare_mysql_cmd select-4 'SHOW TABLES;' '-h some-name-proxysql -uroot -p'\''test-password'\''' + local command_id=select-4 + local 'command=SHOW TABLES;' + local 'uri=-h some-name-proxysql -uroot -p'\''test-password'\''' + local postfix= + local expected_result=/mnt/jenkins/workspace/cloud-pxc-operator_PR-1752/e2e-tests/users/compare/select-4.sql + [[ perconalab/percona-xtradb-cluster-operator:main-pxc5.7 =~ 8\.0 ]] + run_mysql 'SHOW TABLES;' '-h some-name-proxysql -uroot -p'\''test-password'\''' + local 'command=SHOW TABLES;' + local 'uri=-h some-name-proxysql -uroot -p'\''test-password'\''' ++ get_client_pod ++ kubectl_bin get pods --selector=name=pxc-client -o 'jsonpath={.items[].metadata.name}' +++ mktemp ++ local LAST_OUT=/tmp/tmp.emEqhQswHI +++ mktemp ++ local LAST_ERR=/tmp/tmp.TE6SVNW7Lg ++ local exit_status=0 +++ seq 0 2 ++ for i in '$(seq 0 2)' ++ set +e ++ kubectl get pods --selector=name=pxc-client -o 'jsonpath={.items[].metadata.name}' ++ exit_status=0 ++ set -e ++ '[' 0 '!=' 0 ']' ++ break ++ cat /tmp/tmp.emEqhQswHI ++ cat /tmp/tmp.TE6SVNW7Lg ++ rm /tmp/tmp.emEqhQswHI /tmp/tmp.TE6SVNW7Lg ++ return 0 + client_pod=pxc-client-64b479df95-499mv + wait_pod pxc-client-64b479df95-499mv + local pod=pxc-client-64b479df95-499mv + local max_retry=480 + local ns= ++ echo pxc-client-64b479df95-499mv ++ /usr/bin/sed -E 's/.*-(pxc|proxysql)-[0-9]/\1/' ++ egrep '^(pxc|proxysql)$' + local container= + set +o xtrace pod/pxc-client-64b479df95-499mv condition met pxc-client-64b479df95-499mv.Ok + set +o xtrace + '[' '!' -s /tmp/tmp.mo3cXZH4Te/select-4.sql ']' + diff -u /mnt/jenkins/workspace/cloud-pxc-operator_PR-1752/e2e-tests/users/compare/select-4.sql /tmp/tmp.mo3cXZH4Te/select-4.sql + desc 'test proxyadmin' + set +o xtrace ----------------------------------------------------------------------------------- test proxyadmin ----------------------------------------------------------------------------------- + kubectl_bin patch pxc some-name --type=merge '-p={"spec":{"proxysql":{"size":3}}}' ++ mktemp + local LAST_OUT=/tmp/tmp.5zADERFDR4 ++ mktemp + local LAST_ERR=/tmp/tmp.vo5ZXpi9z0 + local exit_status=0 ++ seq 0 2 + for i in '$(seq 0 2)' + set +e + kubectl patch pxc some-name --type=merge '-p={"spec":{"proxysql":{"size":3}}}' + exit_status=0 + set -e + '[' 0 '!=' 0 ']' + break + cat /tmp/tmp.5zADERFDR4 perconaxtradbcluster.pxc.percona.com/some-name patched + cat /tmp/tmp.vo5ZXpi9z0 + rm /tmp/tmp.5zADERFDR4 /tmp/tmp.vo5ZXpi9z0 + return 0 + sleep 15 + wait_cluster_consistency some-name 3 3 + local cluster_name=some-name + local cluster_size=3 + local proxy_size=3 + '[' -z 3 ']' + desc 'wait cluster consistency' + set +o xtrace ----------------------------------------------------------------------------------- wait cluster consistency ----------------------------------------------------------------------------------- + local i=0 + local max=36 + sleep 7 ++ kubectl_bin get pxc some-name -o 'jsonpath={.status.state}' +++ mktemp ++ local LAST_OUT=/tmp/tmp.zPZBQlmCub +++ mktemp ++ local LAST_ERR=/tmp/tmp.T4siIWZakK ++ local exit_status=0 +++ seq 0 2 ++ for i in '$(seq 0 2)' ++ set +e ++ kubectl get pxc some-name -o 'jsonpath={.status.state}' ++ exit_status=0 ++ set -e ++ '[' 0 '!=' 0 ']' ++ break ++ cat /tmp/tmp.zPZBQlmCub ++ cat /tmp/tmp.T4siIWZakK ++ rm /tmp/tmp.zPZBQlmCub /tmp/tmp.T4siIWZakK ++ return 0 + [[ ready == \r\e\a\d\y ]] ++ kubectl_bin get pxc some-name -o 'jsonpath={.status.pxc.ready}' +++ mktemp ++ local LAST_OUT=/tmp/tmp.lrSyRe4nHK +++ mktemp ++ local LAST_ERR=/tmp/tmp.ObzFM82SaU ++ local exit_status=0 +++ seq 0 2 ++ for i in '$(seq 0 2)' ++ set +e ++ kubectl get pxc some-name -o 'jsonpath={.status.pxc.ready}' ++ exit_status=0 ++ set -e ++ '[' 0 '!=' 0 ']' ++ break ++ cat /tmp/tmp.lrSyRe4nHK ++ cat /tmp/tmp.ObzFM82SaU ++ rm /tmp/tmp.lrSyRe4nHK /tmp/tmp.ObzFM82SaU ++ return 0 + [[ 3 == \3 ]] +++ get_proxy_engine some-name +++ local cluster_name=some-name ++++ get_proxy some-name ++++ local target_cluster=some-name +++++ kubectl_bin get pxc some-name -o 'jsonpath={.spec.haproxy.enabled}' ++++++ mktemp +++++ local LAST_OUT=/tmp/tmp.MGEWk5gNGV ++++++ mktemp +++++ local LAST_ERR=/tmp/tmp.dGZX5DYHLU +++++ local exit_status=0 ++++++ seq 0 2 +++++ for i in '$(seq 0 2)' +++++ set +e +++++ kubectl get pxc some-name -o 'jsonpath={.spec.haproxy.enabled}' +++++ exit_status=0 +++++ set -e +++++ '[' 0 '!=' 0 ']' +++++ break +++++ cat /tmp/tmp.MGEWk5gNGV +++++ cat /tmp/tmp.dGZX5DYHLU +++++ rm /tmp/tmp.MGEWk5gNGV /tmp/tmp.dGZX5DYHLU +++++ return 0 ++++ [[ '' == \t\r\u\e ]] +++++ kubectl_bin get pxc some-name -o 'jsonpath={.spec.proxysql.enabled}' ++++++ mktemp +++++ local LAST_OUT=/tmp/tmp.JKPybz7xoJ ++++++ mktemp +++++ local LAST_ERR=/tmp/tmp.4mJU1ttmTi +++++ local exit_status=0 ++++++ seq 0 2 +++++ for i in '$(seq 0 2)' +++++ set +e +++++ kubectl get pxc some-name -o 'jsonpath={.spec.proxysql.enabled}' +++++ exit_status=0 +++++ set -e +++++ '[' 0 '!=' 0 ']' +++++ break +++++ cat /tmp/tmp.JKPybz7xoJ +++++ cat /tmp/tmp.4mJU1ttmTi +++++ rm /tmp/tmp.JKPybz7xoJ /tmp/tmp.4mJU1ttmTi +++++ return 0 ++++ [[ true == \t\r\u\e ]] ++++ echo some-name-proxysql ++++ return +++ local cluster_proxy=some-name-proxysql +++ echo proxysql ++ kubectl_bin get pxc some-name -o 'jsonpath={.status.proxysql.ready}' +++ mktemp ++ local LAST_OUT=/tmp/tmp.3OKwrso5Ia +++ mktemp ++ local LAST_ERR=/tmp/tmp.Xd8pN3piAq ++ local exit_status=0 +++ seq 0 2 ++ for i in '$(seq 0 2)' ++ set +e ++ kubectl get pxc some-name -o 'jsonpath={.status.proxysql.ready}' ++ exit_status=0 ++ set -e ++ '[' 0 '!=' 0 ']' ++ break ++ cat /tmp/tmp.3OKwrso5Ia ++ cat /tmp/tmp.Xd8pN3piAq ++ rm /tmp/tmp.3OKwrso5Ia /tmp/tmp.Xd8pN3piAq ++ return 0 + [[ 3 == \3 ]] + patch_secret my-cluster-secrets proxyadmin dGVzdC1wYXNzd29yZA== + local secret=my-cluster-secrets + local key=proxyadmin + local value=dGVzdC1wYXNzd29yZA== + kubectl_bin patch secret my-cluster-secrets '-p={"data":{"proxyadmin": "dGVzdC1wYXNzd29yZA=="}}' ++ mktemp + local LAST_OUT=/tmp/tmp.ZjqzO0S22C ++ mktemp + local LAST_ERR=/tmp/tmp.Knuuo30yQ3 + local exit_status=0 ++ seq 0 2 + for i in '$(seq 0 2)' + set +e + kubectl patch secret my-cluster-secrets '-p={"data":{"proxyadmin": "dGVzdC1wYXNzd29yZA=="}}' + exit_status=0 + set -e + '[' 0 '!=' 0 ']' + break + cat /tmp/tmp.ZjqzO0S22C secret/my-cluster-secrets patched + cat /tmp/tmp.Knuuo30yQ3 + rm /tmp/tmp.ZjqzO0S22C /tmp/tmp.Knuuo30yQ3 + return 0 + sleep 15 + wait_cluster_consistency some-name 3 3 + local cluster_name=some-name + local cluster_size=3 + local proxy_size=3 + '[' -z 3 ']' + desc 'wait cluster consistency' + set +o xtrace ----------------------------------------------------------------------------------- wait cluster consistency ----------------------------------------------------------------------------------- + local i=0 + local max=36 + sleep 7 ++ kubectl_bin get pxc some-name -o 'jsonpath={.status.state}' +++ mktemp ++ local LAST_OUT=/tmp/tmp.2ABku2fg7c +++ mktemp ++ local LAST_ERR=/tmp/tmp.U2VOI7jDUQ ++ local exit_status=0 +++ seq 0 2 ++ for i in '$(seq 0 2)' ++ set +e ++ kubectl get pxc some-name -o 'jsonpath={.status.state}' ++ exit_status=0 ++ set -e ++ '[' 0 '!=' 0 ']' ++ break ++ cat /tmp/tmp.2ABku2fg7c ++ cat /tmp/tmp.U2VOI7jDUQ ++ rm /tmp/tmp.2ABku2fg7c /tmp/tmp.U2VOI7jDUQ ++ return 0 + [[ initializing == \r\e\a\d\y ]] + echo 'waiting for cluster readyness' waiting for cluster readyness + sleep 20 + [[ 0 -ge 36 ]] + let i+=1 ++ kubectl_bin get pxc some-name -o 'jsonpath={.status.state}' +++ mktemp ++ local LAST_OUT=/tmp/tmp.4b3xBWKnHO +++ mktemp ++ local LAST_ERR=/tmp/tmp.ylQpaBKRQr ++ local exit_status=0 +++ seq 0 2 ++ for i in '$(seq 0 2)' ++ set +e ++ kubectl get pxc some-name -o 'jsonpath={.status.state}' ++ exit_status=0 ++ set -e ++ '[' 0 '!=' 0 ']' ++ break ++ cat /tmp/tmp.4b3xBWKnHO ++ cat /tmp/tmp.ylQpaBKRQr ++ rm /tmp/tmp.4b3xBWKnHO /tmp/tmp.ylQpaBKRQr ++ return 0 + [[ initializing == \r\e\a\d\y ]] + echo 'waiting for cluster readyness' waiting for cluster readyness + sleep 20 + [[ 1 -ge 36 ]] + let i+=1 ++ kubectl_bin get pxc some-name -o 'jsonpath={.status.state}' +++ mktemp ++ local LAST_OUT=/tmp/tmp.YP6xhUa3uG +++ mktemp ++ local LAST_ERR=/tmp/tmp.9hVbLtUMPB ++ local exit_status=0 +++ seq 0 2 ++ for i in '$(seq 0 2)' ++ set +e ++ kubectl get pxc some-name -o 'jsonpath={.status.state}' ++ exit_status=0 ++ set -e ++ '[' 0 '!=' 0 ']' ++ break ++ cat /tmp/tmp.YP6xhUa3uG ++ cat /tmp/tmp.9hVbLtUMPB ++ rm /tmp/tmp.YP6xhUa3uG /tmp/tmp.9hVbLtUMPB ++ return 0 + [[ ready == \r\e\a\d\y ]] ++ kubectl_bin get pxc some-name -o 'jsonpath={.status.pxc.ready}' +++ mktemp ++ local LAST_OUT=/tmp/tmp.cvJ2Azh5DN +++ mktemp ++ local LAST_ERR=/tmp/tmp.Lgy4HxuCUC ++ local exit_status=0 +++ seq 0 2 ++ for i in '$(seq 0 2)' ++ set +e ++ kubectl get pxc some-name -o 'jsonpath={.status.pxc.ready}' ++ exit_status=0 ++ set -e ++ '[' 0 '!=' 0 ']' ++ break ++ cat /tmp/tmp.cvJ2Azh5DN ++ cat /tmp/tmp.Lgy4HxuCUC ++ rm /tmp/tmp.cvJ2Azh5DN /tmp/tmp.Lgy4HxuCUC ++ return 0 + [[ 3 == \3 ]] +++ get_proxy_engine some-name +++ local cluster_name=some-name ++++ get_proxy some-name ++++ local target_cluster=some-name +++++ kubectl_bin get pxc some-name -o 'jsonpath={.spec.haproxy.enabled}' ++++++ mktemp +++++ local LAST_OUT=/tmp/tmp.L6yXFlFSd6 ++++++ mktemp +++++ local LAST_ERR=/tmp/tmp.THtFz0aFf6 +++++ local exit_status=0 ++++++ seq 0 2 +++++ for i in '$(seq 0 2)' +++++ set +e +++++ kubectl get pxc some-name -o 'jsonpath={.spec.haproxy.enabled}' +++++ exit_status=0 +++++ set -e +++++ '[' 0 '!=' 0 ']' +++++ break +++++ cat /tmp/tmp.L6yXFlFSd6 +++++ cat /tmp/tmp.THtFz0aFf6 +++++ rm /tmp/tmp.L6yXFlFSd6 /tmp/tmp.THtFz0aFf6 +++++ return 0 ++++ [[ '' == \t\r\u\e ]] +++++ kubectl_bin get pxc some-name -o 'jsonpath={.spec.proxysql.enabled}' ++++++ mktemp +++++ local LAST_OUT=/tmp/tmp.PZF0alVFL2 ++++++ mktemp +++++ local LAST_ERR=/tmp/tmp.43u1jg9HNk +++++ local exit_status=0 ++++++ seq 0 2 +++++ for i in '$(seq 0 2)' +++++ set +e +++++ kubectl get pxc some-name -o 'jsonpath={.spec.proxysql.enabled}' +++++ exit_status=0 +++++ set -e +++++ '[' 0 '!=' 0 ']' +++++ break +++++ cat /tmp/tmp.PZF0alVFL2 +++++ cat /tmp/tmp.43u1jg9HNk +++++ rm /tmp/tmp.PZF0alVFL2 /tmp/tmp.43u1jg9HNk +++++ return 0 ++++ [[ true == \t\r\u\e ]] ++++ echo some-name-proxysql ++++ return +++ local cluster_proxy=some-name-proxysql +++ echo proxysql ++ kubectl_bin get pxc some-name -o 'jsonpath={.status.proxysql.ready}' +++ mktemp ++ local LAST_OUT=/tmp/tmp.0lVKn3sAEY +++ mktemp ++ local LAST_ERR=/tmp/tmp.iqf5xtyy92 ++ local exit_status=0 +++ seq 0 2 ++ for i in '$(seq 0 2)' ++ set +e ++ kubectl get pxc some-name -o 'jsonpath={.status.proxysql.ready}' ++ exit_status=0 ++ set -e ++ '[' 0 '!=' 0 ']' ++ break ++ cat /tmp/tmp.0lVKn3sAEY ++ cat /tmp/tmp.iqf5xtyy92 ++ rm /tmp/tmp.0lVKn3sAEY /tmp/tmp.iqf5xtyy92 ++ return 0 + [[ 3 == \3 ]] + compare_mysql_cmd_local select-2 'SHOW TABLES;' '-h127.0.0.1 -P6032 -uproxyadmin -p'\''test-password'\''' some-name-proxysql-0 '' proxysql + local command_id=select-2 + local 'command=SHOW TABLES;' + local 'uri=-h127.0.0.1 -P6032 -uproxyadmin -p'\''test-password'\''' + local pod=some-name-proxysql-0 + local postfix= + local container_name=proxysql + local expected_result=/mnt/jenkins/workspace/cloud-pxc-operator_PR-1752/e2e-tests/users/compare/select-2.sql + [[ perconalab/percona-xtradb-cluster-operator:main-pxc5.7 =~ 8\.0 ]] + run_mysql_local 'SHOW TABLES;' '-h127.0.0.1 -P6032 -uproxyadmin -p'\''test-password'\''' some-name-proxysql-0 proxysql + local 'command=SHOW TABLES;' + local 'uri=-h127.0.0.1 -P6032 -uproxyadmin -p'\''test-password'\''' + local pod=some-name-proxysql-0 + local container_name=proxysql + set +o xtrace + '[' '!' -s /tmp/tmp.mo3cXZH4Te/select-2.sql ']' + diff -u /mnt/jenkins/workspace/cloud-pxc-operator_PR-1752/e2e-tests/users/compare/select-2.sql /tmp/tmp.mo3cXZH4Te/select-2.sql + compare_mysql_cmd_local select-2 'SHOW TABLES;' '-h127.0.0.1 -P6032 -uproxyadmin -p'\''test-password'\''' some-name-proxysql-1 '' proxysql + local command_id=select-2 + local 'command=SHOW TABLES;' + local 'uri=-h127.0.0.1 -P6032 -uproxyadmin -p'\''test-password'\''' + local pod=some-name-proxysql-1 + local postfix= + local container_name=proxysql + local expected_result=/mnt/jenkins/workspace/cloud-pxc-operator_PR-1752/e2e-tests/users/compare/select-2.sql + [[ perconalab/percona-xtradb-cluster-operator:main-pxc5.7 =~ 8\.0 ]] + run_mysql_local 'SHOW TABLES;' '-h127.0.0.1 -P6032 -uproxyadmin -p'\''test-password'\''' some-name-proxysql-1 proxysql + local 'command=SHOW TABLES;' + local 'uri=-h127.0.0.1 -P6032 -uproxyadmin -p'\''test-password'\''' + local pod=some-name-proxysql-1 + local container_name=proxysql + set +o xtrace + '[' '!' -s /tmp/tmp.mo3cXZH4Te/select-2.sql ']' + diff -u /mnt/jenkins/workspace/cloud-pxc-operator_PR-1752/e2e-tests/users/compare/select-2.sql /tmp/tmp.mo3cXZH4Te/select-2.sql + compare_mysql_cmd_local select-2 'SHOW TABLES;' '-h127.0.0.1 -P6032 -uproxyadmin -p'\''test-password'\''' some-name-proxysql-2 '' proxysql + local command_id=select-2 + local 'command=SHOW TABLES;' + local 'uri=-h127.0.0.1 -P6032 -uproxyadmin -p'\''test-password'\''' + local pod=some-name-proxysql-2 + local postfix= + local container_name=proxysql + local expected_result=/mnt/jenkins/workspace/cloud-pxc-operator_PR-1752/e2e-tests/users/compare/select-2.sql + [[ perconalab/percona-xtradb-cluster-operator:main-pxc5.7 =~ 8\.0 ]] + run_mysql_local 'SHOW TABLES;' '-h127.0.0.1 -P6032 -uproxyadmin -p'\''test-password'\''' some-name-proxysql-2 proxysql + local 'command=SHOW TABLES;' + local 'uri=-h127.0.0.1 -P6032 -uproxyadmin -p'\''test-password'\''' + local pod=some-name-proxysql-2 + local container_name=proxysql + set +o xtrace + '[' '!' -s /tmp/tmp.mo3cXZH4Te/select-2.sql ']' + diff -u /mnt/jenkins/workspace/cloud-pxc-operator_PR-1752/e2e-tests/users/compare/select-2.sql /tmp/tmp.mo3cXZH4Te/select-2.sql + desc 'test xtrabackup' + set +o xtrace ----------------------------------------------------------------------------------- test xtrabackup ----------------------------------------------------------------------------------- + kubectl_bin patch pxc some-name --type=merge '-p={"spec":{"proxysql":{"size":2}}}' ++ mktemp + local LAST_OUT=/tmp/tmp.1otXpaTyUa ++ mktemp + local LAST_ERR=/tmp/tmp.U7OilGOHMy + local exit_status=0 ++ seq 0 2 + for i in '$(seq 0 2)' + set +e + kubectl patch pxc some-name --type=merge '-p={"spec":{"proxysql":{"size":2}}}' + exit_status=0 + set -e + '[' 0 '!=' 0 ']' + break + cat /tmp/tmp.1otXpaTyUa perconaxtradbcluster.pxc.percona.com/some-name patched + cat /tmp/tmp.U7OilGOHMy + rm /tmp/tmp.1otXpaTyUa /tmp/tmp.U7OilGOHMy + return 0 + patch_secret my-cluster-secrets xtrabackup dGVzdC1wYXNzd29yZA== + local secret=my-cluster-secrets + local key=xtrabackup + local value=dGVzdC1wYXNzd29yZA== + kubectl_bin patch secret my-cluster-secrets '-p={"data":{"xtrabackup": "dGVzdC1wYXNzd29yZA=="}}' ++ mktemp + local LAST_OUT=/tmp/tmp.jCGrlGeITB ++ mktemp + local LAST_ERR=/tmp/tmp.SSuQpPFcHq + local exit_status=0 ++ seq 0 2 + for i in '$(seq 0 2)' + set +e + kubectl patch secret my-cluster-secrets '-p={"data":{"xtrabackup": "dGVzdC1wYXNzd29yZA=="}}' + exit_status=0 + set -e + '[' 0 '!=' 0 ']' + break + cat /tmp/tmp.jCGrlGeITB secret/my-cluster-secrets patched + cat /tmp/tmp.SSuQpPFcHq + rm /tmp/tmp.jCGrlGeITB /tmp/tmp.SSuQpPFcHq + return 0 + sleep 15 + wait_cluster_consistency some-name 3 2 + local cluster_name=some-name + local cluster_size=3 + local proxy_size=2 + '[' -z 2 ']' + desc 'wait cluster consistency' + set +o xtrace ----------------------------------------------------------------------------------- wait cluster consistency ----------------------------------------------------------------------------------- + local i=0 + local max=36 + sleep 7 ++ kubectl_bin get pxc some-name -o 'jsonpath={.status.state}' +++ mktemp ++ local LAST_OUT=/tmp/tmp.9UBjT4ntV9 +++ mktemp ++ local LAST_ERR=/tmp/tmp.zmaBIatTSf ++ local exit_status=0 +++ seq 0 2 ++ for i in '$(seq 0 2)' ++ set +e ++ kubectl get pxc some-name -o 'jsonpath={.status.state}' ++ exit_status=0 ++ set -e ++ '[' 0 '!=' 0 ']' ++ break ++ cat /tmp/tmp.9UBjT4ntV9 ++ cat /tmp/tmp.zmaBIatTSf ++ rm /tmp/tmp.9UBjT4ntV9 /tmp/tmp.zmaBIatTSf ++ return 0 + [[ initializing == \r\e\a\d\y ]] + echo 'waiting for cluster readyness' waiting for cluster readyness + sleep 20 + [[ 0 -ge 36 ]] + let i+=1 ++ kubectl_bin get pxc some-name -o 'jsonpath={.status.state}' +++ mktemp ++ local LAST_OUT=/tmp/tmp.FFXdoPMApZ +++ mktemp ++ local LAST_ERR=/tmp/tmp.fwkMA9AEJZ ++ local exit_status=0 +++ seq 0 2 ++ for i in '$(seq 0 2)' ++ set +e ++ kubectl get pxc some-name -o 'jsonpath={.status.state}' ++ exit_status=0 ++ set -e ++ '[' 0 '!=' 0 ']' ++ break ++ cat /tmp/tmp.FFXdoPMApZ ++ cat /tmp/tmp.fwkMA9AEJZ ++ rm /tmp/tmp.FFXdoPMApZ /tmp/tmp.fwkMA9AEJZ ++ return 0 + [[ initializing == \r\e\a\d\y ]] + echo 'waiting for cluster readyness' waiting for cluster readyness + sleep 20 + [[ 1 -ge 36 ]] + let i+=1 ++ kubectl_bin get pxc some-name -o 'jsonpath={.status.state}' +++ mktemp ++ local LAST_OUT=/tmp/tmp.jmeuvxxIwW +++ mktemp ++ local LAST_ERR=/tmp/tmp.Bqc6opxDs3 ++ local exit_status=0 +++ seq 0 2 ++ for i in '$(seq 0 2)' ++ set +e ++ kubectl get pxc some-name -o 'jsonpath={.status.state}' ++ exit_status=0 ++ set -e ++ '[' 0 '!=' 0 ']' ++ break ++ cat /tmp/tmp.jmeuvxxIwW ++ cat /tmp/tmp.Bqc6opxDs3 ++ rm /tmp/tmp.jmeuvxxIwW /tmp/tmp.Bqc6opxDs3 ++ return 0 + [[ initializing == \r\e\a\d\y ]] + echo 'waiting for cluster readyness' waiting for cluster readyness + sleep 20 + [[ 2 -ge 36 ]] + let i+=1 ++ kubectl_bin get pxc some-name -o 'jsonpath={.status.state}' +++ mktemp ++ local LAST_OUT=/tmp/tmp.EBPH6oqzU8 +++ mktemp ++ local LAST_ERR=/tmp/tmp.rjjRCpnzb4 ++ local exit_status=0 +++ seq 0 2 ++ for i in '$(seq 0 2)' ++ set +e ++ kubectl get pxc some-name -o 'jsonpath={.status.state}' ++ exit_status=0 ++ set -e ++ '[' 0 '!=' 0 ']' ++ break ++ cat /tmp/tmp.EBPH6oqzU8 ++ cat /tmp/tmp.rjjRCpnzb4 ++ rm /tmp/tmp.EBPH6oqzU8 /tmp/tmp.rjjRCpnzb4 ++ return 0 + [[ initializing == \r\e\a\d\y ]] + echo 'waiting for cluster readyness' waiting for cluster readyness + sleep 20 + [[ 3 -ge 36 ]] + let i+=1 ++ kubectl_bin get pxc some-name -o 'jsonpath={.status.state}' +++ mktemp ++ local LAST_OUT=/tmp/tmp.qizlSGBAv4 +++ mktemp ++ local LAST_ERR=/tmp/tmp.Xda3nQiHwB ++ local exit_status=0 +++ seq 0 2 ++ for i in '$(seq 0 2)' ++ set +e ++ kubectl get pxc some-name -o 'jsonpath={.status.state}' ++ exit_status=0 ++ set -e ++ '[' 0 '!=' 0 ']' ++ break ++ cat /tmp/tmp.qizlSGBAv4 ++ cat /tmp/tmp.Xda3nQiHwB ++ rm /tmp/tmp.qizlSGBAv4 /tmp/tmp.Xda3nQiHwB ++ return 0 + [[ initializing == \r\e\a\d\y ]] + echo 'waiting for cluster readyness' waiting for cluster readyness + sleep 20 + [[ 4 -ge 36 ]] + let i+=1 ++ kubectl_bin get pxc some-name -o 'jsonpath={.status.state}' +++ mktemp ++ local LAST_OUT=/tmp/tmp.Y7UBbizPsO +++ mktemp ++ local LAST_ERR=/tmp/tmp.s3szyrEDGW ++ local exit_status=0 +++ seq 0 2 ++ for i in '$(seq 0 2)' ++ set +e ++ kubectl get pxc some-name -o 'jsonpath={.status.state}' ++ exit_status=0 ++ set -e ++ '[' 0 '!=' 0 ']' ++ break ++ cat /tmp/tmp.Y7UBbizPsO ++ cat /tmp/tmp.s3szyrEDGW ++ rm /tmp/tmp.Y7UBbizPsO /tmp/tmp.s3szyrEDGW ++ return 0 + [[ initializing == \r\e\a\d\y ]] + echo 'waiting for cluster readyness' waiting for cluster readyness + sleep 20 + [[ 5 -ge 36 ]] + let i+=1 ++ kubectl_bin get pxc some-name -o 'jsonpath={.status.state}' +++ mktemp ++ local LAST_OUT=/tmp/tmp.Z7vWyxZEAY +++ mktemp ++ local LAST_ERR=/tmp/tmp.6PmApXpFn5 ++ local exit_status=0 +++ seq 0 2 ++ for i in '$(seq 0 2)' ++ set +e ++ kubectl get pxc some-name -o 'jsonpath={.status.state}' ++ exit_status=0 ++ set -e ++ '[' 0 '!=' 0 ']' ++ break ++ cat /tmp/tmp.Z7vWyxZEAY ++ cat /tmp/tmp.6PmApXpFn5 ++ rm /tmp/tmp.Z7vWyxZEAY /tmp/tmp.6PmApXpFn5 ++ return 0 + [[ initializing == \r\e\a\d\y ]] + echo 'waiting for cluster readyness' waiting for cluster readyness + sleep 20 + [[ 6 -ge 36 ]] + let i+=1 ++ kubectl_bin get pxc some-name -o 'jsonpath={.status.state}' +++ mktemp ++ local LAST_OUT=/tmp/tmp.lmubWmLJPH +++ mktemp ++ local LAST_ERR=/tmp/tmp.f1HYLfkkLL ++ local exit_status=0 +++ seq 0 2 ++ for i in '$(seq 0 2)' ++ set +e ++ kubectl get pxc some-name -o 'jsonpath={.status.state}' ++ exit_status=0 ++ set -e ++ '[' 0 '!=' 0 ']' ++ break ++ cat /tmp/tmp.lmubWmLJPH ++ cat /tmp/tmp.f1HYLfkkLL ++ rm /tmp/tmp.lmubWmLJPH /tmp/tmp.f1HYLfkkLL ++ return 0 + [[ ready == \r\e\a\d\y ]] ++ kubectl_bin get pxc some-name -o 'jsonpath={.status.pxc.ready}' +++ mktemp ++ local LAST_OUT=/tmp/tmp.sxlMNHofht +++ mktemp ++ local LAST_ERR=/tmp/tmp.RuXr4nmQHx ++ local exit_status=0 +++ seq 0 2 ++ for i in '$(seq 0 2)' ++ set +e ++ kubectl get pxc some-name -o 'jsonpath={.status.pxc.ready}' ++ exit_status=0 ++ set -e ++ '[' 0 '!=' 0 ']' ++ break ++ cat /tmp/tmp.sxlMNHofht ++ cat /tmp/tmp.RuXr4nmQHx ++ rm /tmp/tmp.sxlMNHofht /tmp/tmp.RuXr4nmQHx ++ return 0 + [[ 3 == \3 ]] +++ get_proxy_engine some-name +++ local cluster_name=some-name ++++ get_proxy some-name ++++ local target_cluster=some-name +++++ kubectl_bin get pxc some-name -o 'jsonpath={.spec.haproxy.enabled}' ++++++ mktemp +++++ local LAST_OUT=/tmp/tmp.RStpDs5w47 ++++++ mktemp +++++ local LAST_ERR=/tmp/tmp.Agzit4g2gA +++++ local exit_status=0 ++++++ seq 0 2 +++++ for i in '$(seq 0 2)' +++++ set +e +++++ kubectl get pxc some-name -o 'jsonpath={.spec.haproxy.enabled}' +++++ exit_status=0 +++++ set -e +++++ '[' 0 '!=' 0 ']' +++++ break +++++ cat /tmp/tmp.RStpDs5w47 +++++ cat /tmp/tmp.Agzit4g2gA +++++ rm /tmp/tmp.RStpDs5w47 /tmp/tmp.Agzit4g2gA +++++ return 0 ++++ [[ '' == \t\r\u\e ]] +++++ kubectl_bin get pxc some-name -o 'jsonpath={.spec.proxysql.enabled}' ++++++ mktemp +++++ local LAST_OUT=/tmp/tmp.KGnXIG6rSN ++++++ mktemp +++++ local LAST_ERR=/tmp/tmp.TRmB2HldNl +++++ local exit_status=0 ++++++ seq 0 2 +++++ for i in '$(seq 0 2)' +++++ set +e +++++ kubectl get pxc some-name -o 'jsonpath={.spec.proxysql.enabled}' +++++ exit_status=0 +++++ set -e +++++ '[' 0 '!=' 0 ']' +++++ break +++++ cat /tmp/tmp.KGnXIG6rSN +++++ cat /tmp/tmp.TRmB2HldNl +++++ rm /tmp/tmp.KGnXIG6rSN /tmp/tmp.TRmB2HldNl +++++ return 0 ++++ [[ true == \t\r\u\e ]] ++++ echo some-name-proxysql ++++ return +++ local cluster_proxy=some-name-proxysql +++ echo proxysql ++ kubectl_bin get pxc some-name -o 'jsonpath={.status.proxysql.ready}' +++ mktemp ++ local LAST_OUT=/tmp/tmp.PmhrcgNyZN +++ mktemp ++ local LAST_ERR=/tmp/tmp.6nO7k8w5zr ++ local exit_status=0 +++ seq 0 2 ++ for i in '$(seq 0 2)' ++ set +e ++ kubectl get pxc some-name -o 'jsonpath={.status.proxysql.ready}' ++ exit_status=0 ++ set -e ++ '[' 0 '!=' 0 ']' ++ break ++ cat /tmp/tmp.PmhrcgNyZN ++ cat /tmp/tmp.6nO7k8w5zr ++ rm /tmp/tmp.PmhrcgNyZN /tmp/tmp.6nO7k8w5zr ++ return 0 + [[ 2 == \2 ]] + compare_mysql_cmd_local select-3 'SHOW DATABASES;' '-h 127.0.0.1 -uxtrabackup -p'\''test-password'\''' some-name-pxc-0 '' pxc + local command_id=select-3 + local 'command=SHOW DATABASES;' + local 'uri=-h 127.0.0.1 -uxtrabackup -p'\''test-password'\''' + local pod=some-name-pxc-0 + local postfix= + local container_name=pxc + local expected_result=/mnt/jenkins/workspace/cloud-pxc-operator_PR-1752/e2e-tests/users/compare/select-3.sql + [[ perconalab/percona-xtradb-cluster-operator:main-pxc5.7 =~ 8\.0 ]] + run_mysql_local 'SHOW DATABASES;' '-h 127.0.0.1 -uxtrabackup -p'\''test-password'\''' some-name-pxc-0 pxc + local 'command=SHOW DATABASES;' + local 'uri=-h 127.0.0.1 -uxtrabackup -p'\''test-password'\''' + local pod=some-name-pxc-0 + local container_name=pxc + set +o xtrace + '[' '!' -s /tmp/tmp.mo3cXZH4Te/select-3.sql ']' + diff -u /mnt/jenkins/workspace/cloud-pxc-operator_PR-1752/e2e-tests/users/compare/select-3.sql /tmp/tmp.mo3cXZH4Te/select-3.sql + desc 'test monitor' + set +o xtrace ----------------------------------------------------------------------------------- test monitor ----------------------------------------------------------------------------------- + patch_secret my-cluster-secrets monitor dGVzdC1wYXNzd29yZA== + local secret=my-cluster-secrets + local key=monitor + local value=dGVzdC1wYXNzd29yZA== + kubectl_bin patch secret my-cluster-secrets '-p={"data":{"monitor": "dGVzdC1wYXNzd29yZA=="}}' ++ mktemp + local LAST_OUT=/tmp/tmp.1ukhGXhkvp ++ mktemp + local LAST_ERR=/tmp/tmp.TT8MBT47fF + local exit_status=0 ++ seq 0 2 + for i in '$(seq 0 2)' + set +e + kubectl patch secret my-cluster-secrets '-p={"data":{"monitor": "dGVzdC1wYXNzd29yZA=="}}' + exit_status=0 + set -e + '[' 0 '!=' 0 ']' + break + cat /tmp/tmp.1ukhGXhkvp secret/my-cluster-secrets patched + cat /tmp/tmp.TT8MBT47fF + rm /tmp/tmp.1ukhGXhkvp /tmp/tmp.TT8MBT47fF + return 0 + wait_for_password_propagation my-cluster-secrets monitor + local secret=my-cluster-secrets + local user=monitor + local max_retry=240 ++ getSecretData my-cluster-secrets root ++ local secretName=my-cluster-secrets ++ local dataKey=root ++ base64 --decode ++ kubectl_bin get secrets/my-cluster-secrets '--template={{.data.root}}' +++ mktemp ++ local LAST_OUT=/tmp/tmp.OJQfkBXV5d +++ mktemp ++ local LAST_ERR=/tmp/tmp.hzLSBpp14U ++ local exit_status=0 +++ seq 0 2 ++ for i in '$(seq 0 2)' ++ set +e ++ kubectl get secrets/my-cluster-secrets '--template={{.data.root}}' ++ exit_status=0 ++ set -e ++ '[' 0 '!=' 0 ']' ++ break ++ cat /tmp/tmp.OJQfkBXV5d ++ cat /tmp/tmp.hzLSBpp14U ++ rm /tmp/tmp.OJQfkBXV5d /tmp/tmp.hzLSBpp14U ++ return 0 + local root_pass=test-password + [[ perconalab/percona-xtradb-cluster-operator:main-pxc5.7 =~ 5\.7 ]] + echo 'Skipping dual password feature doesn'\''t work for 5.7. PXC 5.7 doesn'\''t support it!' Skipping dual password feature doesn't work for 5.7. PXC 5.7 doesn't support it! + return + wait_cluster_consistency some-name 3 2 + local cluster_name=some-name + local cluster_size=3 + local proxy_size=2 + '[' -z 2 ']' + desc 'wait cluster consistency' + set +o xtrace ----------------------------------------------------------------------------------- wait cluster consistency ----------------------------------------------------------------------------------- + local i=0 + local max=36 + sleep 7 ++ kubectl_bin get pxc some-name -o 'jsonpath={.status.state}' +++ mktemp ++ local LAST_OUT=/tmp/tmp.hbfmNaYF6g +++ mktemp ++ local LAST_ERR=/tmp/tmp.TIvgxMes1K ++ local exit_status=0 +++ seq 0 2 ++ for i in '$(seq 0 2)' ++ set +e ++ kubectl get pxc some-name -o 'jsonpath={.status.state}' ++ exit_status=0 ++ set -e ++ '[' 0 '!=' 0 ']' ++ break ++ cat /tmp/tmp.hbfmNaYF6g ++ cat /tmp/tmp.TIvgxMes1K ++ rm /tmp/tmp.hbfmNaYF6g /tmp/tmp.TIvgxMes1K ++ return 0 + [[ initializing == \r\e\a\d\y ]] + echo 'waiting for cluster readyness' waiting for cluster readyness + sleep 20 + [[ 0 -ge 36 ]] + let i+=1 ++ kubectl_bin get pxc some-name -o 'jsonpath={.status.state}' +++ mktemp ++ local LAST_OUT=/tmp/tmp.V9YOtv1muI +++ mktemp ++ local LAST_ERR=/tmp/tmp.1ZLRR3Peqc ++ local exit_status=0 +++ seq 0 2 ++ for i in '$(seq 0 2)' ++ set +e ++ kubectl get pxc some-name -o 'jsonpath={.status.state}' ++ exit_status=0 ++ set -e ++ '[' 0 '!=' 0 ']' ++ break ++ cat /tmp/tmp.V9YOtv1muI ++ cat /tmp/tmp.1ZLRR3Peqc ++ rm /tmp/tmp.V9YOtv1muI /tmp/tmp.1ZLRR3Peqc ++ return 0 + [[ initializing == \r\e\a\d\y ]] + echo 'waiting for cluster readyness' waiting for cluster readyness + sleep 20 + [[ 1 -ge 36 ]] + let i+=1 ++ kubectl_bin get pxc some-name -o 'jsonpath={.status.state}' +++ mktemp ++ local LAST_OUT=/tmp/tmp.pAWTPq6l6m +++ mktemp ++ local LAST_ERR=/tmp/tmp.wRcAe8Vp6w ++ local exit_status=0 +++ seq 0 2 ++ for i in '$(seq 0 2)' ++ set +e ++ kubectl get pxc some-name -o 'jsonpath={.status.state}' ++ exit_status=0 ++ set -e ++ '[' 0 '!=' 0 ']' ++ break ++ cat /tmp/tmp.pAWTPq6l6m ++ cat /tmp/tmp.wRcAe8Vp6w ++ rm /tmp/tmp.pAWTPq6l6m /tmp/tmp.wRcAe8Vp6w ++ return 0 + [[ ready == \r\e\a\d\y ]] ++ kubectl_bin get pxc some-name -o 'jsonpath={.status.pxc.ready}' +++ mktemp ++ local LAST_OUT=/tmp/tmp.hrG5uFI2Fp +++ mktemp ++ local LAST_ERR=/tmp/tmp.bh5uEWf0si ++ local exit_status=0 +++ seq 0 2 ++ for i in '$(seq 0 2)' ++ set +e ++ kubectl get pxc some-name -o 'jsonpath={.status.pxc.ready}' ++ exit_status=0 ++ set -e ++ '[' 0 '!=' 0 ']' ++ break ++ cat /tmp/tmp.hrG5uFI2Fp ++ cat /tmp/tmp.bh5uEWf0si ++ rm /tmp/tmp.hrG5uFI2Fp /tmp/tmp.bh5uEWf0si ++ return 0 + [[ 3 == \3 ]] +++ get_proxy_engine some-name +++ local cluster_name=some-name ++++ get_proxy some-name ++++ local target_cluster=some-name +++++ kubectl_bin get pxc some-name -o 'jsonpath={.spec.haproxy.enabled}' ++++++ mktemp +++++ local LAST_OUT=/tmp/tmp.SNL9ACfhpE ++++++ mktemp +++++ local LAST_ERR=/tmp/tmp.plxksKv0dC +++++ local exit_status=0 ++++++ seq 0 2 +++++ for i in '$(seq 0 2)' +++++ set +e +++++ kubectl get pxc some-name -o 'jsonpath={.spec.haproxy.enabled}' +++++ exit_status=0 +++++ set -e +++++ '[' 0 '!=' 0 ']' +++++ break +++++ cat /tmp/tmp.SNL9ACfhpE +++++ cat /tmp/tmp.plxksKv0dC +++++ rm /tmp/tmp.SNL9ACfhpE /tmp/tmp.plxksKv0dC +++++ return 0 ++++ [[ '' == \t\r\u\e ]] +++++ kubectl_bin get pxc some-name -o 'jsonpath={.spec.proxysql.enabled}' ++++++ mktemp +++++ local LAST_OUT=/tmp/tmp.1eW2enj1Kj ++++++ mktemp +++++ local LAST_ERR=/tmp/tmp.TKXEWegBQF +++++ local exit_status=0 ++++++ seq 0 2 +++++ for i in '$(seq 0 2)' +++++ set +e +++++ kubectl get pxc some-name -o 'jsonpath={.spec.proxysql.enabled}' +++++ exit_status=0 +++++ set -e +++++ '[' 0 '!=' 0 ']' +++++ break +++++ cat /tmp/tmp.1eW2enj1Kj +++++ cat /tmp/tmp.TKXEWegBQF +++++ rm /tmp/tmp.1eW2enj1Kj /tmp/tmp.TKXEWegBQF +++++ return 0 ++++ [[ true == \t\r\u\e ]] ++++ echo some-name-proxysql ++++ return +++ local cluster_proxy=some-name-proxysql +++ echo proxysql ++ kubectl_bin get pxc some-name -o 'jsonpath={.status.proxysql.ready}' +++ mktemp ++ local LAST_OUT=/tmp/tmp.n1McAtZv6U +++ mktemp ++ local LAST_ERR=/tmp/tmp.aNYlWqF6bO ++ local exit_status=0 +++ seq 0 2 ++ for i in '$(seq 0 2)' ++ set +e ++ kubectl get pxc some-name -o 'jsonpath={.status.proxysql.ready}' ++ exit_status=0 ++ set -e ++ '[' 0 '!=' 0 ']' ++ break ++ cat /tmp/tmp.n1McAtZv6U ++ cat /tmp/tmp.aNYlWqF6bO ++ rm /tmp/tmp.n1McAtZv6U /tmp/tmp.aNYlWqF6bO ++ return 0 + [[ 2 == \2 ]] + sleep 10 + compare_mysql_cmd select-4 'SHOW TABLES;' '-h some-name-proxysql -umonitor -p'\''test-password'\''' + local command_id=select-4 + local 'command=SHOW TABLES;' + local 'uri=-h some-name-proxysql -umonitor -p'\''test-password'\''' + local postfix= + local expected_result=/mnt/jenkins/workspace/cloud-pxc-operator_PR-1752/e2e-tests/users/compare/select-4.sql + [[ perconalab/percona-xtradb-cluster-operator:main-pxc5.7 =~ 8\.0 ]] + run_mysql 'SHOW TABLES;' '-h some-name-proxysql -umonitor -p'\''test-password'\''' + local 'command=SHOW TABLES;' + local 'uri=-h some-name-proxysql -umonitor -p'\''test-password'\''' ++ get_client_pod ++ kubectl_bin get pods --selector=name=pxc-client -o 'jsonpath={.items[].metadata.name}' +++ mktemp ++ local LAST_OUT=/tmp/tmp.NKyeiALgH5 +++ mktemp ++ local LAST_ERR=/tmp/tmp.ihFbEnwUH3 ++ local exit_status=0 +++ seq 0 2 ++ for i in '$(seq 0 2)' ++ set +e ++ kubectl get pods --selector=name=pxc-client -o 'jsonpath={.items[].metadata.name}' ++ exit_status=0 ++ set -e ++ '[' 0 '!=' 0 ']' ++ break ++ cat /tmp/tmp.NKyeiALgH5 ++ cat /tmp/tmp.ihFbEnwUH3 ++ rm /tmp/tmp.NKyeiALgH5 /tmp/tmp.ihFbEnwUH3 ++ return 0 + client_pod=pxc-client-64b479df95-499mv + wait_pod pxc-client-64b479df95-499mv + local pod=pxc-client-64b479df95-499mv + local max_retry=480 + local ns= ++ echo pxc-client-64b479df95-499mv ++ /usr/bin/sed -E 's/.*-(pxc|proxysql)-[0-9]/\1/' ++ egrep '^(pxc|proxysql)$' + local container= + set +o xtrace pod/pxc-client-64b479df95-499mv condition met pxc-client-64b479df95-499mv.Ok + set +o xtrace + '[' '!' -s /tmp/tmp.mo3cXZH4Te/select-4.sql ']' + diff -u /mnt/jenkins/workspace/cloud-pxc-operator_PR-1752/e2e-tests/users/compare/select-4.sql /tmp/tmp.mo3cXZH4Te/select-4.sql + desc 'test operator' + set +o xtrace ----------------------------------------------------------------------------------- test operator ----------------------------------------------------------------------------------- + patch_secret my-cluster-secrets operator dGVzdC1wYXNzd29yZA== + local secret=my-cluster-secrets + local key=operator + local value=dGVzdC1wYXNzd29yZA== + kubectl_bin patch secret my-cluster-secrets '-p={"data":{"operator": "dGVzdC1wYXNzd29yZA=="}}' ++ mktemp + local LAST_OUT=/tmp/tmp.7kAVByULEG ++ mktemp + local LAST_ERR=/tmp/tmp.uvq203U4QG + local exit_status=0 ++ seq 0 2 + for i in '$(seq 0 2)' + set +e + kubectl patch secret my-cluster-secrets '-p={"data":{"operator": "dGVzdC1wYXNzd29yZA=="}}' + exit_status=0 + set -e + '[' 0 '!=' 0 ']' + break + cat /tmp/tmp.7kAVByULEG secret/my-cluster-secrets patched + cat /tmp/tmp.uvq203U4QG + rm /tmp/tmp.7kAVByULEG /tmp/tmp.uvq203U4QG + return 0 + sleep 15 + wait_cluster_consistency some-name 3 2 + local cluster_name=some-name + local cluster_size=3 + local proxy_size=2 + '[' -z 2 ']' + desc 'wait cluster consistency' + set +o xtrace ----------------------------------------------------------------------------------- wait cluster consistency ----------------------------------------------------------------------------------- + local i=0 + local max=36 + sleep 7 ++ kubectl_bin get pxc some-name -o 'jsonpath={.status.state}' +++ mktemp ++ local LAST_OUT=/tmp/tmp.aZVZscp5XU +++ mktemp ++ local LAST_ERR=/tmp/tmp.RUyq2vw89W ++ local exit_status=0 +++ seq 0 2 ++ for i in '$(seq 0 2)' ++ set +e ++ kubectl get pxc some-name -o 'jsonpath={.status.state}' ++ exit_status=0 ++ set -e ++ '[' 0 '!=' 0 ']' ++ break ++ cat /tmp/tmp.aZVZscp5XU ++ cat /tmp/tmp.RUyq2vw89W ++ rm /tmp/tmp.aZVZscp5XU /tmp/tmp.RUyq2vw89W ++ return 0 + [[ initializing == \r\e\a\d\y ]] + echo 'waiting for cluster readyness' waiting for cluster readyness + sleep 20 + [[ 0 -ge 36 ]] + let i+=1 ++ kubectl_bin get pxc some-name -o 'jsonpath={.status.state}' +++ mktemp ++ local LAST_OUT=/tmp/tmp.p3AvJGcdCM +++ mktemp ++ local LAST_ERR=/tmp/tmp.nHgqjlZCCB ++ local exit_status=0 +++ seq 0 2 ++ for i in '$(seq 0 2)' ++ set +e ++ kubectl get pxc some-name -o 'jsonpath={.status.state}' ++ exit_status=0 ++ set -e ++ '[' 0 '!=' 0 ']' ++ break ++ cat /tmp/tmp.p3AvJGcdCM ++ cat /tmp/tmp.nHgqjlZCCB ++ rm /tmp/tmp.p3AvJGcdCM /tmp/tmp.nHgqjlZCCB ++ return 0 + [[ ready == \r\e\a\d\y ]] ++ kubectl_bin get pxc some-name -o 'jsonpath={.status.pxc.ready}' +++ mktemp ++ local LAST_OUT=/tmp/tmp.1zfJxFgfYh +++ mktemp ++ local LAST_ERR=/tmp/tmp.aWAj6nZdp9 ++ local exit_status=0 +++ seq 0 2 ++ for i in '$(seq 0 2)' ++ set +e ++ kubectl get pxc some-name -o 'jsonpath={.status.pxc.ready}' ++ exit_status=0 ++ set -e ++ '[' 0 '!=' 0 ']' ++ break ++ cat /tmp/tmp.1zfJxFgfYh ++ cat /tmp/tmp.aWAj6nZdp9 ++ rm /tmp/tmp.1zfJxFgfYh /tmp/tmp.aWAj6nZdp9 ++ return 0 + [[ 3 == \3 ]] +++ get_proxy_engine some-name +++ local cluster_name=some-name ++++ get_proxy some-name ++++ local target_cluster=some-name +++++ kubectl_bin get pxc some-name -o 'jsonpath={.spec.haproxy.enabled}' ++++++ mktemp +++++ local LAST_OUT=/tmp/tmp.wX1QC1vJ2m ++++++ mktemp +++++ local LAST_ERR=/tmp/tmp.qfiUECKEh1 +++++ local exit_status=0 ++++++ seq 0 2 +++++ for i in '$(seq 0 2)' +++++ set +e +++++ kubectl get pxc some-name -o 'jsonpath={.spec.haproxy.enabled}' +++++ exit_status=0 +++++ set -e +++++ '[' 0 '!=' 0 ']' +++++ break +++++ cat /tmp/tmp.wX1QC1vJ2m +++++ cat /tmp/tmp.qfiUECKEh1 +++++ rm /tmp/tmp.wX1QC1vJ2m /tmp/tmp.qfiUECKEh1 +++++ return 0 ++++ [[ '' == \t\r\u\e ]] +++++ kubectl_bin get pxc some-name -o 'jsonpath={.spec.proxysql.enabled}' ++++++ mktemp +++++ local LAST_OUT=/tmp/tmp.ufkkQcDIMH ++++++ mktemp +++++ local LAST_ERR=/tmp/tmp.m5Cp6PdqKy +++++ local exit_status=0 ++++++ seq 0 2 +++++ for i in '$(seq 0 2)' +++++ set +e +++++ kubectl get pxc some-name -o 'jsonpath={.spec.proxysql.enabled}' +++++ exit_status=0 +++++ set -e +++++ '[' 0 '!=' 0 ']' +++++ break +++++ cat /tmp/tmp.ufkkQcDIMH +++++ cat /tmp/tmp.m5Cp6PdqKy +++++ rm /tmp/tmp.ufkkQcDIMH /tmp/tmp.m5Cp6PdqKy +++++ return 0 ++++ [[ true == \t\r\u\e ]] ++++ echo some-name-proxysql ++++ return +++ local cluster_proxy=some-name-proxysql +++ echo proxysql ++ kubectl_bin get pxc some-name -o 'jsonpath={.status.proxysql.ready}' +++ mktemp ++ local LAST_OUT=/tmp/tmp.Cjfc84tmsY +++ mktemp ++ local LAST_ERR=/tmp/tmp.xg2wi3elY6 ++ local exit_status=0 +++ seq 0 2 ++ for i in '$(seq 0 2)' ++ set +e ++ kubectl get pxc some-name -o 'jsonpath={.status.proxysql.ready}' ++ exit_status=0 ++ set -e ++ '[' 0 '!=' 0 ']' ++ break ++ cat /tmp/tmp.Cjfc84tmsY ++ cat /tmp/tmp.xg2wi3elY6 ++ rm /tmp/tmp.Cjfc84tmsY /tmp/tmp.xg2wi3elY6 ++ return 0 + [[ 2 == \2 ]] + sleep 10 + compare_mysql_cmd select-4 'SHOW TABLES;' '-h some-name-proxysql -uoperator -p'\''test-password'\''' + local command_id=select-4 + local 'command=SHOW TABLES;' + local 'uri=-h some-name-proxysql -uoperator -p'\''test-password'\''' + local postfix= + local expected_result=/mnt/jenkins/workspace/cloud-pxc-operator_PR-1752/e2e-tests/users/compare/select-4.sql + [[ perconalab/percona-xtradb-cluster-operator:main-pxc5.7 =~ 8\.0 ]] + run_mysql 'SHOW TABLES;' '-h some-name-proxysql -uoperator -p'\''test-password'\''' + local 'command=SHOW TABLES;' + local 'uri=-h some-name-proxysql -uoperator -p'\''test-password'\''' ++ get_client_pod ++ kubectl_bin get pods --selector=name=pxc-client -o 'jsonpath={.items[].metadata.name}' +++ mktemp ++ local LAST_OUT=/tmp/tmp.s2LElVhwIy +++ mktemp ++ local LAST_ERR=/tmp/tmp.K7saZ4mDNz ++ local exit_status=0 +++ seq 0 2 ++ for i in '$(seq 0 2)' ++ set +e ++ kubectl get pods --selector=name=pxc-client -o 'jsonpath={.items[].metadata.name}' ++ exit_status=0 ++ set -e ++ '[' 0 '!=' 0 ']' ++ break ++ cat /tmp/tmp.s2LElVhwIy ++ cat /tmp/tmp.K7saZ4mDNz ++ rm /tmp/tmp.s2LElVhwIy /tmp/tmp.K7saZ4mDNz ++ return 0 + client_pod=pxc-client-64b479df95-499mv + wait_pod pxc-client-64b479df95-499mv + local pod=pxc-client-64b479df95-499mv + local max_retry=480 + local ns= ++ echo pxc-client-64b479df95-499mv ++ /usr/bin/sed -E 's/.*-(pxc|proxysql)-[0-9]/\1/' ++ egrep '^(pxc|proxysql)$' + local container= + set +o xtrace pod/pxc-client-64b479df95-499mv condition met pxc-client-64b479df95-499mv.Ok + set +o xtrace + '[' '!' -s /tmp/tmp.mo3cXZH4Te/select-4.sql ']' + diff -u /mnt/jenkins/workspace/cloud-pxc-operator_PR-1752/e2e-tests/users/compare/select-4.sql /tmp/tmp.mo3cXZH4Te/select-4.sql + desc 'change secret name' + set +o xtrace ----------------------------------------------------------------------------------- change secret name ----------------------------------------------------------------------------------- + kubectl_bin patch pxc some-name --type merge --patch '{"spec": {"secretsName":"my-cluster-secrets-2"}}' ++ mktemp + local LAST_OUT=/tmp/tmp.Va1UEXX9cT ++ mktemp + local LAST_ERR=/tmp/tmp.fgcXIZgEXL + local exit_status=0 ++ seq 0 2 + for i in '$(seq 0 2)' + set +e + kubectl patch pxc some-name --type merge --patch '{"spec": {"secretsName":"my-cluster-secrets-2"}}' + exit_status=0 + set -e + '[' 0 '!=' 0 ']' + break + cat /tmp/tmp.Va1UEXX9cT perconaxtradbcluster.pxc.percona.com/some-name patched + cat /tmp/tmp.fgcXIZgEXL + rm /tmp/tmp.Va1UEXX9cT /tmp/tmp.fgcXIZgEXL + return 0 + sleep 30 + wait_cluster_consistency some-name 3 2 + local cluster_name=some-name + local cluster_size=3 + local proxy_size=2 + '[' -z 2 ']' + desc 'wait cluster consistency' + set +o xtrace ----------------------------------------------------------------------------------- wait cluster consistency ----------------------------------------------------------------------------------- + local i=0 + local max=36 + sleep 7 ++ kubectl_bin get pxc some-name -o 'jsonpath={.status.state}' +++ mktemp ++ local LAST_OUT=/tmp/tmp.irB2QvIkFN +++ mktemp ++ local LAST_ERR=/tmp/tmp.ify49b1lWe ++ local exit_status=0 +++ seq 0 2 ++ for i in '$(seq 0 2)' ++ set +e ++ kubectl get pxc some-name -o 'jsonpath={.status.state}' ++ exit_status=0 ++ set -e ++ '[' 0 '!=' 0 ']' ++ break ++ cat /tmp/tmp.irB2QvIkFN ++ cat /tmp/tmp.ify49b1lWe ++ rm /tmp/tmp.irB2QvIkFN /tmp/tmp.ify49b1lWe ++ return 0 + [[ initializing == \r\e\a\d\y ]] + echo 'waiting for cluster readyness' waiting for cluster readyness + sleep 20 + [[ 0 -ge 36 ]] + let i+=1 ++ kubectl_bin get pxc some-name -o 'jsonpath={.status.state}' +++ mktemp ++ local LAST_OUT=/tmp/tmp.YZcnUm384r +++ mktemp ++ local LAST_ERR=/tmp/tmp.Rq3fkzmENj ++ local exit_status=0 +++ seq 0 2 ++ for i in '$(seq 0 2)' ++ set +e ++ kubectl get pxc some-name -o 'jsonpath={.status.state}' ++ exit_status=0 ++ set -e ++ '[' 0 '!=' 0 ']' ++ break ++ cat /tmp/tmp.YZcnUm384r ++ cat /tmp/tmp.Rq3fkzmENj ++ rm /tmp/tmp.YZcnUm384r /tmp/tmp.Rq3fkzmENj ++ return 0 + [[ initializing == \r\e\a\d\y ]] + echo 'waiting for cluster readyness' waiting for cluster readyness + sleep 20 + [[ 1 -ge 36 ]] + let i+=1 ++ kubectl_bin get pxc some-name -o 'jsonpath={.status.state}' +++ mktemp ++ local LAST_OUT=/tmp/tmp.AGdimMNE5Y +++ mktemp ++ local LAST_ERR=/tmp/tmp.2aRGcgc7US ++ local exit_status=0 +++ seq 0 2 ++ for i in '$(seq 0 2)' ++ set +e ++ kubectl get pxc some-name -o 'jsonpath={.status.state}' ++ exit_status=0 ++ set -e ++ '[' 0 '!=' 0 ']' ++ break ++ cat /tmp/tmp.AGdimMNE5Y ++ cat /tmp/tmp.2aRGcgc7US ++ rm /tmp/tmp.AGdimMNE5Y /tmp/tmp.2aRGcgc7US ++ return 0 + [[ initializing == \r\e\a\d\y ]] + echo 'waiting for cluster readyness' waiting for cluster readyness + sleep 20 + [[ 2 -ge 36 ]] + let i+=1 ++ kubectl_bin get pxc some-name -o 'jsonpath={.status.state}' +++ mktemp ++ local LAST_OUT=/tmp/tmp.vPwx2wEwrl +++ mktemp ++ local LAST_ERR=/tmp/tmp.kvswBBDa58 ++ local exit_status=0 +++ seq 0 2 ++ for i in '$(seq 0 2)' ++ set +e ++ kubectl get pxc some-name -o 'jsonpath={.status.state}' ++ exit_status=0 ++ set -e ++ '[' 0 '!=' 0 ']' ++ break ++ cat /tmp/tmp.vPwx2wEwrl ++ cat /tmp/tmp.kvswBBDa58 ++ rm /tmp/tmp.vPwx2wEwrl /tmp/tmp.kvswBBDa58 ++ return 0 + [[ initializing == \r\e\a\d\y ]] + echo 'waiting for cluster readyness' waiting for cluster readyness + sleep 20 + [[ 3 -ge 36 ]] + let i+=1 ++ kubectl_bin get pxc some-name -o 'jsonpath={.status.state}' +++ mktemp ++ local LAST_OUT=/tmp/tmp.7rmQt7uH1z +++ mktemp ++ local LAST_ERR=/tmp/tmp.m9G1rglCEA ++ local exit_status=0 +++ seq 0 2 ++ for i in '$(seq 0 2)' ++ set +e ++ kubectl get pxc some-name -o 'jsonpath={.status.state}' ++ exit_status=0 ++ set -e ++ '[' 0 '!=' 0 ']' ++ break ++ cat /tmp/tmp.7rmQt7uH1z ++ cat /tmp/tmp.m9G1rglCEA ++ rm /tmp/tmp.7rmQt7uH1z /tmp/tmp.m9G1rglCEA ++ return 0 + [[ initializing == \r\e\a\d\y ]] + echo 'waiting for cluster readyness' waiting for cluster readyness + sleep 20 + [[ 4 -ge 36 ]] + let i+=1 ++ kubectl_bin get pxc some-name -o 'jsonpath={.status.state}' +++ mktemp ++ local LAST_OUT=/tmp/tmp.VbdocaL1p9 +++ mktemp ++ local LAST_ERR=/tmp/tmp.VZUYWzIGFA ++ local exit_status=0 +++ seq 0 2 ++ for i in '$(seq 0 2)' ++ set +e ++ kubectl get pxc some-name -o 'jsonpath={.status.state}' ++ exit_status=0 ++ set -e ++ '[' 0 '!=' 0 ']' ++ break ++ cat /tmp/tmp.VbdocaL1p9 ++ cat /tmp/tmp.VZUYWzIGFA ++ rm /tmp/tmp.VbdocaL1p9 /tmp/tmp.VZUYWzIGFA ++ return 0 + [[ initializing == \r\e\a\d\y ]] + echo 'waiting for cluster readyness' waiting for cluster readyness + sleep 20 + [[ 5 -ge 36 ]] + let i+=1 ++ kubectl_bin get pxc some-name -o 'jsonpath={.status.state}' +++ mktemp ++ local LAST_OUT=/tmp/tmp.Q8Apk0nVzO +++ mktemp ++ local LAST_ERR=/tmp/tmp.0YQwQ9S83O ++ local exit_status=0 +++ seq 0 2 ++ for i in '$(seq 0 2)' ++ set +e ++ kubectl get pxc some-name -o 'jsonpath={.status.state}' ++ exit_status=0 ++ set -e ++ '[' 0 '!=' 0 ']' ++ break ++ cat /tmp/tmp.Q8Apk0nVzO ++ cat /tmp/tmp.0YQwQ9S83O ++ rm /tmp/tmp.Q8Apk0nVzO /tmp/tmp.0YQwQ9S83O ++ return 0 + [[ ready == \r\e\a\d\y ]] ++ kubectl_bin get pxc some-name -o 'jsonpath={.status.pxc.ready}' +++ mktemp ++ local LAST_OUT=/tmp/tmp.jtnBPkEYq2 +++ mktemp ++ local LAST_ERR=/tmp/tmp.8N8rMEyKWu ++ local exit_status=0 +++ seq 0 2 ++ for i in '$(seq 0 2)' ++ set +e ++ kubectl get pxc some-name -o 'jsonpath={.status.pxc.ready}' ++ exit_status=0 ++ set -e ++ '[' 0 '!=' 0 ']' ++ break ++ cat /tmp/tmp.jtnBPkEYq2 ++ cat /tmp/tmp.8N8rMEyKWu ++ rm /tmp/tmp.jtnBPkEYq2 /tmp/tmp.8N8rMEyKWu ++ return 0 + [[ 3 == \3 ]] +++ get_proxy_engine some-name +++ local cluster_name=some-name ++++ get_proxy some-name ++++ local target_cluster=some-name +++++ kubectl_bin get pxc some-name -o 'jsonpath={.spec.haproxy.enabled}' ++++++ mktemp +++++ local LAST_OUT=/tmp/tmp.87ZazWE9pm ++++++ mktemp +++++ local LAST_ERR=/tmp/tmp.XjeiN4Uctq +++++ local exit_status=0 ++++++ seq 0 2 +++++ for i in '$(seq 0 2)' +++++ set +e +++++ kubectl get pxc some-name -o 'jsonpath={.spec.haproxy.enabled}' +++++ exit_status=0 +++++ set -e +++++ '[' 0 '!=' 0 ']' +++++ break +++++ cat /tmp/tmp.87ZazWE9pm +++++ cat /tmp/tmp.XjeiN4Uctq +++++ rm /tmp/tmp.87ZazWE9pm /tmp/tmp.XjeiN4Uctq +++++ return 0 ++++ [[ '' == \t\r\u\e ]] +++++ kubectl_bin get pxc some-name -o 'jsonpath={.spec.proxysql.enabled}' ++++++ mktemp +++++ local LAST_OUT=/tmp/tmp.asPWxp1AqD ++++++ mktemp +++++ local LAST_ERR=/tmp/tmp.VgmsqRVA1x +++++ local exit_status=0 ++++++ seq 0 2 +++++ for i in '$(seq 0 2)' +++++ set +e +++++ kubectl get pxc some-name -o 'jsonpath={.spec.proxysql.enabled}' +++++ exit_status=0 +++++ set -e +++++ '[' 0 '!=' 0 ']' +++++ break +++++ cat /tmp/tmp.asPWxp1AqD +++++ cat /tmp/tmp.VgmsqRVA1x +++++ rm /tmp/tmp.asPWxp1AqD /tmp/tmp.VgmsqRVA1x +++++ return 0 ++++ [[ true == \t\r\u\e ]] ++++ echo some-name-proxysql ++++ return +++ local cluster_proxy=some-name-proxysql +++ echo proxysql ++ kubectl_bin get pxc some-name -o 'jsonpath={.status.proxysql.ready}' +++ mktemp ++ local LAST_OUT=/tmp/tmp.sFjcTt1MH3 +++ mktemp ++ local LAST_ERR=/tmp/tmp.IvohuQ5yfY ++ local exit_status=0 +++ seq 0 2 ++ for i in '$(seq 0 2)' ++ set +e ++ kubectl get pxc some-name -o 'jsonpath={.status.proxysql.ready}' ++ exit_status=0 ++ set -e ++ '[' 0 '!=' 0 ']' ++ break ++ cat /tmp/tmp.sFjcTt1MH3 ++ cat /tmp/tmp.IvohuQ5yfY ++ rm /tmp/tmp.sFjcTt1MH3 /tmp/tmp.IvohuQ5yfY ++ return 0 + [[ 2 == \2 ]] + desc 'test new operator' + set +o xtrace ----------------------------------------------------------------------------------- test new operator ----------------------------------------------------------------------------------- + newpass=test-password2 ++ echo -n test-password2 ++ base64 + newpassencrypted=dGVzdC1wYXNzd29yZDI= + patch_secret my-cluster-secrets-2 operator dGVzdC1wYXNzd29yZDI= + local secret=my-cluster-secrets-2 + local key=operator + local value=dGVzdC1wYXNzd29yZDI= + kubectl_bin patch secret my-cluster-secrets-2 '-p={"data":{"operator": "dGVzdC1wYXNzd29yZDI="}}' ++ mktemp + local LAST_OUT=/tmp/tmp.7l1SnkXYV4 ++ mktemp + local LAST_ERR=/tmp/tmp.1RZ1jVYf4m + local exit_status=0 ++ seq 0 2 + for i in '$(seq 0 2)' + set +e + kubectl patch secret my-cluster-secrets-2 '-p={"data":{"operator": "dGVzdC1wYXNzd29yZDI="}}' + exit_status=0 + set -e + '[' 0 '!=' 0 ']' + break + cat /tmp/tmp.7l1SnkXYV4 secret/my-cluster-secrets-2 patched + cat /tmp/tmp.1RZ1jVYf4m + rm /tmp/tmp.7l1SnkXYV4 /tmp/tmp.1RZ1jVYf4m + return 0 + sleep 15 + wait_cluster_consistency some-name 3 2 + local cluster_name=some-name + local cluster_size=3 + local proxy_size=2 + '[' -z 2 ']' + desc 'wait cluster consistency' + set +o xtrace ----------------------------------------------------------------------------------- wait cluster consistency ----------------------------------------------------------------------------------- + local i=0 + local max=36 + sleep 7 ++ kubectl_bin get pxc some-name -o 'jsonpath={.status.state}' +++ mktemp ++ local LAST_OUT=/tmp/tmp.unqgI5Acaz +++ mktemp ++ local LAST_ERR=/tmp/tmp.7UlPsImeYk ++ local exit_status=0 +++ seq 0 2 ++ for i in '$(seq 0 2)' ++ set +e ++ kubectl get pxc some-name -o 'jsonpath={.status.state}' ++ exit_status=0 ++ set -e ++ '[' 0 '!=' 0 ']' ++ break ++ cat /tmp/tmp.unqgI5Acaz ++ cat /tmp/tmp.7UlPsImeYk ++ rm /tmp/tmp.unqgI5Acaz /tmp/tmp.7UlPsImeYk ++ return 0 + [[ initializing == \r\e\a\d\y ]] + echo 'waiting for cluster readyness' waiting for cluster readyness + sleep 20 + [[ 0 -ge 36 ]] + let i+=1 ++ kubectl_bin get pxc some-name -o 'jsonpath={.status.state}' +++ mktemp ++ local LAST_OUT=/tmp/tmp.sPnLUcTbTL +++ mktemp ++ local LAST_ERR=/tmp/tmp.5FiI0nDrCJ ++ local exit_status=0 +++ seq 0 2 ++ for i in '$(seq 0 2)' ++ set +e ++ kubectl get pxc some-name -o 'jsonpath={.status.state}' ++ exit_status=0 ++ set -e ++ '[' 0 '!=' 0 ']' ++ break ++ cat /tmp/tmp.sPnLUcTbTL ++ cat /tmp/tmp.5FiI0nDrCJ ++ rm /tmp/tmp.sPnLUcTbTL /tmp/tmp.5FiI0nDrCJ ++ return 0 + [[ ready == \r\e\a\d\y ]] ++ kubectl_bin get pxc some-name -o 'jsonpath={.status.pxc.ready}' +++ mktemp ++ local LAST_OUT=/tmp/tmp.rThSz0PHLN +++ mktemp ++ local LAST_ERR=/tmp/tmp.LQuLyJrnon ++ local exit_status=0 +++ seq 0 2 ++ for i in '$(seq 0 2)' ++ set +e ++ kubectl get pxc some-name -o 'jsonpath={.status.pxc.ready}' ++ exit_status=0 ++ set -e ++ '[' 0 '!=' 0 ']' ++ break ++ cat /tmp/tmp.rThSz0PHLN ++ cat /tmp/tmp.LQuLyJrnon ++ rm /tmp/tmp.rThSz0PHLN /tmp/tmp.LQuLyJrnon ++ return 0 + [[ 3 == \3 ]] +++ get_proxy_engine some-name +++ local cluster_name=some-name ++++ get_proxy some-name ++++ local target_cluster=some-name +++++ kubectl_bin get pxc some-name -o 'jsonpath={.spec.haproxy.enabled}' ++++++ mktemp +++++ local LAST_OUT=/tmp/tmp.9eKxGEf11W ++++++ mktemp +++++ local LAST_ERR=/tmp/tmp.jTzPJ0Lrmt +++++ local exit_status=0 ++++++ seq 0 2 +++++ for i in '$(seq 0 2)' +++++ set +e +++++ kubectl get pxc some-name -o 'jsonpath={.spec.haproxy.enabled}' +++++ exit_status=0 +++++ set -e +++++ '[' 0 '!=' 0 ']' +++++ break +++++ cat /tmp/tmp.9eKxGEf11W +++++ cat /tmp/tmp.jTzPJ0Lrmt +++++ rm /tmp/tmp.9eKxGEf11W /tmp/tmp.jTzPJ0Lrmt +++++ return 0 ++++ [[ '' == \t\r\u\e ]] +++++ kubectl_bin get pxc some-name -o 'jsonpath={.spec.proxysql.enabled}' ++++++ mktemp +++++ local LAST_OUT=/tmp/tmp.2pkf8vfVmx ++++++ mktemp +++++ local LAST_ERR=/tmp/tmp.2NijsLGtw2 +++++ local exit_status=0 ++++++ seq 0 2 +++++ for i in '$(seq 0 2)' +++++ set +e +++++ kubectl get pxc some-name -o 'jsonpath={.spec.proxysql.enabled}' +++++ exit_status=0 +++++ set -e +++++ '[' 0 '!=' 0 ']' +++++ break +++++ cat /tmp/tmp.2pkf8vfVmx +++++ cat /tmp/tmp.2NijsLGtw2 +++++ rm /tmp/tmp.2pkf8vfVmx /tmp/tmp.2NijsLGtw2 +++++ return 0 ++++ [[ true == \t\r\u\e ]] ++++ echo some-name-proxysql ++++ return +++ local cluster_proxy=some-name-proxysql +++ echo proxysql ++ kubectl_bin get pxc some-name -o 'jsonpath={.status.proxysql.ready}' +++ mktemp ++ local LAST_OUT=/tmp/tmp.xbxzl5TFVO +++ mktemp ++ local LAST_ERR=/tmp/tmp.CcTPhEdbxu ++ local exit_status=0 +++ seq 0 2 ++ for i in '$(seq 0 2)' ++ set +e ++ kubectl get pxc some-name -o 'jsonpath={.status.proxysql.ready}' ++ exit_status=0 ++ set -e ++ '[' 0 '!=' 0 ']' ++ break ++ cat /tmp/tmp.xbxzl5TFVO ++ cat /tmp/tmp.CcTPhEdbxu ++ rm /tmp/tmp.xbxzl5TFVO /tmp/tmp.CcTPhEdbxu ++ return 0 + [[ 2 == \2 ]] + sleep 20 + compare_mysql_cmd select-4 'SHOW TABLES;' '-h some-name-proxysql -uoperator -p'\''test-password2'\''' + local command_id=select-4 + local 'command=SHOW TABLES;' + local 'uri=-h some-name-proxysql -uoperator -p'\''test-password2'\''' + local postfix= + local expected_result=/mnt/jenkins/workspace/cloud-pxc-operator_PR-1752/e2e-tests/users/compare/select-4.sql + [[ perconalab/percona-xtradb-cluster-operator:main-pxc5.7 =~ 8\.0 ]] + run_mysql 'SHOW TABLES;' '-h some-name-proxysql -uoperator -p'\''test-password2'\''' + local 'command=SHOW TABLES;' + local 'uri=-h some-name-proxysql -uoperator -p'\''test-password2'\''' ++ get_client_pod ++ kubectl_bin get pods --selector=name=pxc-client -o 'jsonpath={.items[].metadata.name}' +++ mktemp ++ local LAST_OUT=/tmp/tmp.MDRwHHm1rm +++ mktemp ++ local LAST_ERR=/tmp/tmp.MWuLA8SOAp ++ local exit_status=0 +++ seq 0 2 ++ for i in '$(seq 0 2)' ++ set +e ++ kubectl get pods --selector=name=pxc-client -o 'jsonpath={.items[].metadata.name}' ++ exit_status=0 ++ set -e ++ '[' 0 '!=' 0 ']' ++ break ++ cat /tmp/tmp.MDRwHHm1rm ++ cat /tmp/tmp.MWuLA8SOAp ++ rm /tmp/tmp.MDRwHHm1rm /tmp/tmp.MWuLA8SOAp ++ return 0 + client_pod=pxc-client-64b479df95-499mv + wait_pod pxc-client-64b479df95-499mv + local pod=pxc-client-64b479df95-499mv + local max_retry=480 + local ns= ++ echo pxc-client-64b479df95-499mv ++ egrep '^(pxc|proxysql)$' ++ /usr/bin/sed -E 's/.*-(pxc|proxysql)-[0-9]/\1/' + local container= + set +o xtrace pod/pxc-client-64b479df95-499mv condition met pxc-client-64b479df95-499mv.Ok + set +o xtrace + '[' '!' -s /tmp/tmp.mo3cXZH4Te/select-4.sql ']' + diff -u /mnt/jenkins/workspace/cloud-pxc-operator_PR-1752/e2e-tests/users/compare/select-4.sql /tmp/tmp.mo3cXZH4Te/select-4.sql ++ getSecretData my-cluster-secrets-2 root ++ local secretName=my-cluster-secrets-2 ++ local dataKey=root ++ kubectl_bin get secrets/my-cluster-secrets-2 '--template={{.data.root}}' +++ mktemp ++ local LAST_OUT=/tmp/tmp.NyF2JlwqMA +++ mktemp ++ local LAST_ERR=/tmp/tmp.IHtDSwYgk8 ++ local exit_status=0 +++ seq 0 2 ++ for i in '$(seq 0 2)' ++ set +e ++ kubectl get secrets/my-cluster-secrets-2 '--template={{.data.root}}' ++ base64 --decode ++ exit_status=0 ++ set -e ++ '[' 0 '!=' 0 ']' ++ break ++ cat /tmp/tmp.NyF2JlwqMA ++ cat /tmp/tmp.IHtDSwYgk8 ++ rm /tmp/tmp.NyF2JlwqMA /tmp/tmp.IHtDSwYgk8 ++ return 0 + newpass='YawThvl^TqnP3u6fA' + desc 'test new users sync' + set +o xtrace ----------------------------------------------------------------------------------- test new users sync ----------------------------------------------------------------------------------- + run_mysql 'CREATE USER '\''testsync'\''@'\''%'\'' IDENTIFIED BY '\''YawThvl^TqnP3u6fA'\'';' '-h some-name-pxc -uroot -p'\''YawThvl^TqnP3u6fA'\''' + local 'command=CREATE USER '\''testsync'\''@'\''%'\'' IDENTIFIED BY '\''YawThvl^TqnP3u6fA'\'';' + local 'uri=-h some-name-pxc -uroot -p'\''YawThvl^TqnP3u6fA'\''' ++ get_client_pod ++ kubectl_bin get pods --selector=name=pxc-client -o 'jsonpath={.items[].metadata.name}' +++ mktemp ++ local LAST_OUT=/tmp/tmp.rCm9kDw2ns +++ mktemp ++ local LAST_ERR=/tmp/tmp.t18muSG8oE ++ local exit_status=0 +++ seq 0 2 ++ for i in '$(seq 0 2)' ++ set +e ++ kubectl get pods --selector=name=pxc-client -o 'jsonpath={.items[].metadata.name}' ++ exit_status=0 ++ set -e ++ '[' 0 '!=' 0 ']' ++ break ++ cat /tmp/tmp.rCm9kDw2ns ++ cat /tmp/tmp.t18muSG8oE ++ rm /tmp/tmp.rCm9kDw2ns /tmp/tmp.t18muSG8oE ++ return 0 + client_pod=pxc-client-64b479df95-499mv + wait_pod pxc-client-64b479df95-499mv + local pod=pxc-client-64b479df95-499mv + local max_retry=480 + local ns= ++ echo pxc-client-64b479df95-499mv ++ /usr/bin/sed -E 's/.*-(pxc|proxysql)-[0-9]/\1/' ++ egrep '^(pxc|proxysql)$' + local container= + set +o xtrace pod/pxc-client-64b479df95-499mv condition met pxc-client-64b479df95-499mv.Ok + set +o xtrace + sleep 40 + compare_mysql_cmd select-4 'SHOW TABLES;' '-h some-name-proxysql -utestsync -p'\''YawThvl^TqnP3u6fA'\''' + local command_id=select-4 + local 'command=SHOW TABLES;' + local 'uri=-h some-name-proxysql -utestsync -p'\''YawThvl^TqnP3u6fA'\''' + local postfix= + local expected_result=/mnt/jenkins/workspace/cloud-pxc-operator_PR-1752/e2e-tests/users/compare/select-4.sql + [[ perconalab/percona-xtradb-cluster-operator:main-pxc5.7 =~ 8\.0 ]] + run_mysql 'SHOW TABLES;' '-h some-name-proxysql -utestsync -p'\''YawThvl^TqnP3u6fA'\''' + local 'command=SHOW TABLES;' + local 'uri=-h some-name-proxysql -utestsync -p'\''YawThvl^TqnP3u6fA'\''' ++ get_client_pod ++ kubectl_bin get pods --selector=name=pxc-client -o 'jsonpath={.items[].metadata.name}' +++ mktemp ++ local LAST_OUT=/tmp/tmp.EDsQY7Lbjl +++ mktemp ++ local LAST_ERR=/tmp/tmp.gDSqzOkERh ++ local exit_status=0 +++ seq 0 2 ++ for i in '$(seq 0 2)' ++ set +e ++ kubectl get pods --selector=name=pxc-client -o 'jsonpath={.items[].metadata.name}' ++ exit_status=0 ++ set -e ++ '[' 0 '!=' 0 ']' ++ break ++ cat /tmp/tmp.EDsQY7Lbjl ++ cat /tmp/tmp.gDSqzOkERh ++ rm /tmp/tmp.EDsQY7Lbjl /tmp/tmp.gDSqzOkERh ++ return 0 + client_pod=pxc-client-64b479df95-499mv + wait_pod pxc-client-64b479df95-499mv + local pod=pxc-client-64b479df95-499mv + local max_retry=480 + local ns= ++ echo pxc-client-64b479df95-499mv ++ /usr/bin/sed -E 's/.*-(pxc|proxysql)-[0-9]/\1/' ++ egrep '^(pxc|proxysql)$' + local container= + set +o xtrace pod/pxc-client-64b479df95-499mv condition met pxc-client-64b479df95-499mv.Ok + set +o xtrace + '[' '!' -s /tmp/tmp.mo3cXZH4Te/select-4.sql ']' + diff -u /mnt/jenkins/workspace/cloud-pxc-operator_PR-1752/e2e-tests/users/compare/select-4.sql /tmp/tmp.mo3cXZH4Te/select-4.sql ++ getSecretData internal-some-name operator ++ local secretName=internal-some-name ++ local dataKey=operator ++ kubectl_bin get secrets/internal-some-name '--template={{.data.operator}}' ++ base64 --decode +++ mktemp ++ local LAST_OUT=/tmp/tmp.gfHzyaQRYp +++ mktemp ++ local LAST_ERR=/tmp/tmp.m201oFFzSr ++ local exit_status=0 +++ seq 0 2 ++ for i in '$(seq 0 2)' ++ set +e ++ kubectl get secrets/internal-some-name '--template={{.data.operator}}' ++ exit_status=0 ++ set -e ++ '[' 0 '!=' 0 ']' ++ break ++ cat /tmp/tmp.gfHzyaQRYp ++ cat /tmp/tmp.m201oFFzSr ++ rm /tmp/tmp.gfHzyaQRYp /tmp/tmp.m201oFFzSr ++ return 0 + pass=test-password2 + desc 'check secret without operator' + set +o xtrace ----------------------------------------------------------------------------------- check secret without operator ----------------------------------------------------------------------------------- + kubectl_bin apply -f /mnt/jenkins/workspace/cloud-pxc-operator_PR-1752/e2e-tests/users/conf/secrets.yml ++ mktemp + local LAST_OUT=/tmp/tmp.v09TX2c7ur ++ mktemp + local LAST_ERR=/tmp/tmp.smbq7ftx9h + local exit_status=0 ++ seq 0 2 + for i in '$(seq 0 2)' + set +e + kubectl apply -f /mnt/jenkins/workspace/cloud-pxc-operator_PR-1752/e2e-tests/users/conf/secrets.yml + exit_status=0 + set -e + '[' 0 '!=' 0 ']' + break + cat /tmp/tmp.v09TX2c7ur secret/my-cluster-secrets-2 configured + cat /tmp/tmp.smbq7ftx9h Warning: resource secrets/my-cluster-secrets-2 is missing the kubectl.kubernetes.io/last-applied-configuration annotation which is required by kubectl apply. kubectl apply should only be used on resources created declaratively by either kubectl create --save-config or kubectl apply. The missing annotation will be patched automatically. + rm /tmp/tmp.v09TX2c7ur /tmp/tmp.smbq7ftx9h + return 0 + sleep 15 + compare_mysql_cmd select-4 'SHOW TABLES;' '-h some-name-proxysql -uoperator -p'\''test-password2'\''' + local command_id=select-4 + local 'command=SHOW TABLES;' + local 'uri=-h some-name-proxysql -uoperator -p'\''test-password2'\''' + local postfix= + local expected_result=/mnt/jenkins/workspace/cloud-pxc-operator_PR-1752/e2e-tests/users/compare/select-4.sql + [[ perconalab/percona-xtradb-cluster-operator:main-pxc5.7 =~ 8\.0 ]] + run_mysql 'SHOW TABLES;' '-h some-name-proxysql -uoperator -p'\''test-password2'\''' + local 'command=SHOW TABLES;' + local 'uri=-h some-name-proxysql -uoperator -p'\''test-password2'\''' ++ get_client_pod ++ kubectl_bin get pods --selector=name=pxc-client -o 'jsonpath={.items[].metadata.name}' +++ mktemp ++ local LAST_OUT=/tmp/tmp.c0149VbSfe +++ mktemp ++ local LAST_ERR=/tmp/tmp.kRn8U8yA5h ++ local exit_status=0 +++ seq 0 2 ++ for i in '$(seq 0 2)' ++ set +e ++ kubectl get pods --selector=name=pxc-client -o 'jsonpath={.items[].metadata.name}' ++ exit_status=0 ++ set -e ++ '[' 0 '!=' 0 ']' ++ break ++ cat /tmp/tmp.c0149VbSfe ++ cat /tmp/tmp.kRn8U8yA5h ++ rm /tmp/tmp.c0149VbSfe /tmp/tmp.kRn8U8yA5h ++ return 0 + client_pod=pxc-client-64b479df95-499mv + wait_pod pxc-client-64b479df95-499mv + local pod=pxc-client-64b479df95-499mv + local max_retry=480 + local ns= ++ echo pxc-client-64b479df95-499mv ++ /usr/bin/sed -E 's/.*-(pxc|proxysql)-[0-9]/\1/' ++ egrep '^(pxc|proxysql)$' + local container= + set +o xtrace pod/pxc-client-64b479df95-499mv condition met pxc-client-64b479df95-499mv.Ok + set +o xtrace + '[' '!' -s /tmp/tmp.mo3cXZH4Te/select-4.sql ']' + diff -u /mnt/jenkins/workspace/cloud-pxc-operator_PR-1752/e2e-tests/users/compare/select-4.sql /tmp/tmp.mo3cXZH4Te/select-4.sql + newpass=test-password2 ++ base64 ++ echo -n test-password2 + newpassencrypted=dGVzdC1wYXNzd29yZDI= + apply_config /mnt/jenkins/workspace/cloud-pxc-operator_PR-1752/e2e-tests/users/conf/some-name.yml + '[' -z '' ']' + cat_config /mnt/jenkins/workspace/cloud-pxc-operator_PR-1752/e2e-tests/users/conf/some-name.yml + kubectl_bin apply -f - ++ mktemp + local LAST_OUT=/tmp/tmp.0YbFCVBQsD + /usr/bin/sed -e 's#image:.*-backup$#image: perconalab/percona-xtradb-cluster-operator:main-pxc5.7-backup#' + /usr/bin/sed -e 's#image:.*-proxysql$#image: perconalab/percona-xtradb-cluster-operator:main-proxysql#' + /usr/bin/sed -e s~minio-service.#namespace~minio-service.users-22051~ + /usr/bin/sed -e 's#apply:.*#apply: Never#' + /usr/bin/sed -e 's#image:.*-logcollector$#image: perconalab/percona-xtradb-cluster-operator:main-logcollector#' ++ mktemp + /usr/bin/sed -e 's#image:.*-haproxy$#image: perconalab/percona-xtradb-cluster-operator:main-haproxy#' + /usr/bin/sed -e 's#apiVersion: pxc.percona.com/v.*$#apiVersion: pxc.percona.com/v1#' + cat /mnt/jenkins/workspace/cloud-pxc-operator_PR-1752/e2e-tests/users/conf/some-name.yml + local LAST_ERR=/tmp/tmp.fOCfVoGOxk + local exit_status=0 ++ seq 0 2 + for i in '$(seq 0 2)' + set +e + kubectl apply -f - + /usr/bin/sed -e 's#image:.*-init$#image: perconalab/percona-xtradb-cluster-operator:PR-1752-44f0e1a8#' + /usr/bin/sed -e 's#image:.*-pmm$#image: perconalab/pmm-client:dev-latest#' + /usr/bin/sed -e 's#image:.*-pxc\([0-9]*.[0-9]*\)\{0,1\}$#image: perconalab/percona-xtradb-cluster-operator:main-pxc5.7#' + /usr/bin/sed -e 's#image:.*\/percona-xtradb-cluster:.*$#image: perconalab/percona-xtradb-cluster-operator:main-pxc5.7#' + exit_status=0 + set -e + '[' 0 '!=' 0 ']' + break + cat /tmp/tmp.0YbFCVBQsD perconaxtradbcluster.pxc.percona.com/some-name configured + cat /tmp/tmp.fOCfVoGOxk + rm /tmp/tmp.0YbFCVBQsD /tmp/tmp.fOCfVoGOxk + return 0 + sleep 15 + wait_cluster_consistency some-name 3 3 + local cluster_name=some-name + local cluster_size=3 + local proxy_size=3 + '[' -z 3 ']' + desc 'wait cluster consistency' + set +o xtrace ----------------------------------------------------------------------------------- wait cluster consistency ----------------------------------------------------------------------------------- + local i=0 + local max=36 + sleep 7 ++ kubectl_bin get pxc some-name -o 'jsonpath={.status.state}' +++ mktemp ++ local LAST_OUT=/tmp/tmp.STRrnQVsO0 +++ mktemp ++ local LAST_ERR=/tmp/tmp.AvDckEEg7b ++ local exit_status=0 +++ seq 0 2 ++ for i in '$(seq 0 2)' ++ set +e ++ kubectl get pxc some-name -o 'jsonpath={.status.state}' ++ exit_status=0 ++ set -e ++ '[' 0 '!=' 0 ']' ++ break ++ cat /tmp/tmp.STRrnQVsO0 ++ cat /tmp/tmp.AvDckEEg7b ++ rm /tmp/tmp.STRrnQVsO0 /tmp/tmp.AvDckEEg7b ++ return 0 + [[ initializing == \r\e\a\d\y ]] + echo 'waiting for cluster readyness' waiting for cluster readyness + sleep 20 + [[ 0 -ge 36 ]] + let i+=1 ++ kubectl_bin get pxc some-name -o 'jsonpath={.status.state}' +++ mktemp ++ local LAST_OUT=/tmp/tmp.eR24gtRNZa +++ mktemp ++ local LAST_ERR=/tmp/tmp.CvMJyzNNmO ++ local exit_status=0 +++ seq 0 2 ++ for i in '$(seq 0 2)' ++ set +e ++ kubectl get pxc some-name -o 'jsonpath={.status.state}' ++ exit_status=0 ++ set -e ++ '[' 0 '!=' 0 ']' ++ break ++ cat /tmp/tmp.eR24gtRNZa ++ cat /tmp/tmp.CvMJyzNNmO ++ rm /tmp/tmp.eR24gtRNZa /tmp/tmp.CvMJyzNNmO ++ return 0 + [[ initializing == \r\e\a\d\y ]] + echo 'waiting for cluster readyness' waiting for cluster readyness + sleep 20 + [[ 1 -ge 36 ]] + let i+=1 ++ kubectl_bin get pxc some-name -o 'jsonpath={.status.state}' +++ mktemp ++ local LAST_OUT=/tmp/tmp.WfbWlMNNxU +++ mktemp ++ local LAST_ERR=/tmp/tmp.aR8Gel51K0 ++ local exit_status=0 +++ seq 0 2 ++ for i in '$(seq 0 2)' ++ set +e ++ kubectl get pxc some-name -o 'jsonpath={.status.state}' ++ exit_status=0 ++ set -e ++ '[' 0 '!=' 0 ']' ++ break ++ cat /tmp/tmp.WfbWlMNNxU ++ cat /tmp/tmp.aR8Gel51K0 ++ rm /tmp/tmp.WfbWlMNNxU /tmp/tmp.aR8Gel51K0 ++ return 0 + [[ initializing == \r\e\a\d\y ]] + echo 'waiting for cluster readyness' waiting for cluster readyness + sleep 20 + [[ 2 -ge 36 ]] + let i+=1 ++ kubectl_bin get pxc some-name -o 'jsonpath={.status.state}' +++ mktemp ++ local LAST_OUT=/tmp/tmp.7yQfGzX4CA +++ mktemp ++ local LAST_ERR=/tmp/tmp.ahF5Q03YHs ++ local exit_status=0 +++ seq 0 2 ++ for i in '$(seq 0 2)' ++ set +e ++ kubectl get pxc some-name -o 'jsonpath={.status.state}' ++ exit_status=0 ++ set -e ++ '[' 0 '!=' 0 ']' ++ break ++ cat /tmp/tmp.7yQfGzX4CA ++ cat /tmp/tmp.ahF5Q03YHs ++ rm /tmp/tmp.7yQfGzX4CA /tmp/tmp.ahF5Q03YHs ++ return 0 + [[ initializing == \r\e\a\d\y ]] + echo 'waiting for cluster readyness' waiting for cluster readyness + sleep 20 + [[ 3 -ge 36 ]] + let i+=1 ++ kubectl_bin get pxc some-name -o 'jsonpath={.status.state}' +++ mktemp ++ local LAST_OUT=/tmp/tmp.bawEsJp7lr +++ mktemp ++ local LAST_ERR=/tmp/tmp.mBviqr2eFv ++ local exit_status=0 +++ seq 0 2 ++ for i in '$(seq 0 2)' ++ set +e ++ kubectl get pxc some-name -o 'jsonpath={.status.state}' ++ exit_status=0 ++ set -e ++ '[' 0 '!=' 0 ']' ++ break ++ cat /tmp/tmp.bawEsJp7lr ++ cat /tmp/tmp.mBviqr2eFv ++ rm /tmp/tmp.bawEsJp7lr /tmp/tmp.mBviqr2eFv ++ return 0 + [[ initializing == \r\e\a\d\y ]] + echo 'waiting for cluster readyness' waiting for cluster readyness + sleep 20 + [[ 4 -ge 36 ]] + let i+=1 ++ kubectl_bin get pxc some-name -o 'jsonpath={.status.state}' +++ mktemp ++ local LAST_OUT=/tmp/tmp.3NVP78UJYr +++ mktemp ++ local LAST_ERR=/tmp/tmp.uQ3FvIkcz7 ++ local exit_status=0 +++ seq 0 2 ++ for i in '$(seq 0 2)' ++ set +e ++ kubectl get pxc some-name -o 'jsonpath={.status.state}' ++ exit_status=0 ++ set -e ++ '[' 0 '!=' 0 ']' ++ break ++ cat /tmp/tmp.3NVP78UJYr ++ cat /tmp/tmp.uQ3FvIkcz7 ++ rm /tmp/tmp.3NVP78UJYr /tmp/tmp.uQ3FvIkcz7 ++ return 0 + [[ initializing == \r\e\a\d\y ]] + echo 'waiting for cluster readyness' waiting for cluster readyness + sleep 20 + [[ 5 -ge 36 ]] + let i+=1 ++ kubectl_bin get pxc some-name -o 'jsonpath={.status.state}' +++ mktemp ++ local LAST_OUT=/tmp/tmp.ebaH6wYY8E +++ mktemp ++ local LAST_ERR=/tmp/tmp.WD75OPG0KE ++ local exit_status=0 +++ seq 0 2 ++ for i in '$(seq 0 2)' ++ set +e ++ kubectl get pxc some-name -o 'jsonpath={.status.state}' ++ exit_status=0 ++ set -e ++ '[' 0 '!=' 0 ']' ++ break ++ cat /tmp/tmp.ebaH6wYY8E ++ cat /tmp/tmp.WD75OPG0KE ++ rm /tmp/tmp.ebaH6wYY8E /tmp/tmp.WD75OPG0KE ++ return 0 + [[ initializing == \r\e\a\d\y ]] + echo 'waiting for cluster readyness' waiting for cluster readyness + sleep 20 + [[ 6 -ge 36 ]] + let i+=1 ++ kubectl_bin get pxc some-name -o 'jsonpath={.status.state}' +++ mktemp ++ local LAST_OUT=/tmp/tmp.XSiO97xw5r +++ mktemp ++ local LAST_ERR=/tmp/tmp.YgU3MPXK1j ++ local exit_status=0 +++ seq 0 2 ++ for i in '$(seq 0 2)' ++ set +e ++ kubectl get pxc some-name -o 'jsonpath={.status.state}' ++ exit_status=0 ++ set -e ++ '[' 0 '!=' 0 ']' ++ break ++ cat /tmp/tmp.XSiO97xw5r ++ cat /tmp/tmp.YgU3MPXK1j ++ rm /tmp/tmp.XSiO97xw5r /tmp/tmp.YgU3MPXK1j ++ return 0 + [[ ready == \r\e\a\d\y ]] ++ kubectl_bin get pxc some-name -o 'jsonpath={.status.pxc.ready}' +++ mktemp ++ local LAST_OUT=/tmp/tmp.OvVFKED3a2 +++ mktemp ++ local LAST_ERR=/tmp/tmp.E8dxKfFLFs ++ local exit_status=0 +++ seq 0 2 ++ for i in '$(seq 0 2)' ++ set +e ++ kubectl get pxc some-name -o 'jsonpath={.status.pxc.ready}' ++ exit_status=0 ++ set -e ++ '[' 0 '!=' 0 ']' ++ break ++ cat /tmp/tmp.OvVFKED3a2 ++ cat /tmp/tmp.E8dxKfFLFs ++ rm /tmp/tmp.OvVFKED3a2 /tmp/tmp.E8dxKfFLFs ++ return 0 + [[ 3 == \3 ]] +++ get_proxy_engine some-name +++ local cluster_name=some-name ++++ get_proxy some-name ++++ local target_cluster=some-name +++++ kubectl_bin get pxc some-name -o 'jsonpath={.spec.haproxy.enabled}' ++++++ mktemp +++++ local LAST_OUT=/tmp/tmp.YanATkPHgb ++++++ mktemp +++++ local LAST_ERR=/tmp/tmp.u149f6J8Tr +++++ local exit_status=0 ++++++ seq 0 2 +++++ for i in '$(seq 0 2)' +++++ set +e +++++ kubectl get pxc some-name -o 'jsonpath={.spec.haproxy.enabled}' +++++ exit_status=0 +++++ set -e +++++ '[' 0 '!=' 0 ']' +++++ break +++++ cat /tmp/tmp.YanATkPHgb +++++ cat /tmp/tmp.u149f6J8Tr +++++ rm /tmp/tmp.YanATkPHgb /tmp/tmp.u149f6J8Tr +++++ return 0 ++++ [[ true == \t\r\u\e ]] ++++ echo some-name-haproxy ++++ return +++ local cluster_proxy=some-name-haproxy +++ echo haproxy ++ kubectl_bin get pxc some-name -o 'jsonpath={.status.haproxy.ready}' +++ mktemp ++ local LAST_OUT=/tmp/tmp.MrAvDHKlri +++ mktemp ++ local LAST_ERR=/tmp/tmp.bzhJuCGuW2 ++ local exit_status=0 +++ seq 0 2 ++ for i in '$(seq 0 2)' ++ set +e ++ kubectl get pxc some-name -o 'jsonpath={.status.haproxy.ready}' ++ exit_status=0 ++ set -e ++ '[' 0 '!=' 0 ']' ++ break ++ cat /tmp/tmp.MrAvDHKlri ++ cat /tmp/tmp.bzhJuCGuW2 ++ rm /tmp/tmp.MrAvDHKlri /tmp/tmp.bzhJuCGuW2 ++ return 0 + [[ 3 == \3 ]] + patch_secret my-cluster-secrets monitor dGVzdC1wYXNzd29yZDI= + local secret=my-cluster-secrets + local key=monitor + local value=dGVzdC1wYXNzd29yZDI= + kubectl_bin patch secret my-cluster-secrets '-p={"data":{"monitor": "dGVzdC1wYXNzd29yZDI="}}' ++ mktemp + local LAST_OUT=/tmp/tmp.q5ws7w6keN ++ mktemp + local LAST_ERR=/tmp/tmp.LTeQvfTsUr + local exit_status=0 ++ seq 0 2 + for i in '$(seq 0 2)' + set +e + kubectl patch secret my-cluster-secrets '-p={"data":{"monitor": "dGVzdC1wYXNzd29yZDI="}}' + exit_status=0 + set -e + '[' 0 '!=' 0 ']' + break + cat /tmp/tmp.q5ws7w6keN secret/my-cluster-secrets patched + cat /tmp/tmp.LTeQvfTsUr + rm /tmp/tmp.q5ws7w6keN /tmp/tmp.LTeQvfTsUr + return 0 + sleep 15 + wait_cluster_consistency some-name 3 3 + local cluster_name=some-name + local cluster_size=3 + local proxy_size=3 + '[' -z 3 ']' + desc 'wait cluster consistency' + set +o xtrace ----------------------------------------------------------------------------------- wait cluster consistency ----------------------------------------------------------------------------------- + local i=0 + local max=36 + sleep 7 ++ kubectl_bin get pxc some-name -o 'jsonpath={.status.state}' +++ mktemp ++ local LAST_OUT=/tmp/tmp.Qz8I4tff8W +++ mktemp ++ local LAST_ERR=/tmp/tmp.rnlwgvGg2B ++ local exit_status=0 +++ seq 0 2 ++ for i in '$(seq 0 2)' ++ set +e ++ kubectl get pxc some-name -o 'jsonpath={.status.state}' ++ exit_status=0 ++ set -e ++ '[' 0 '!=' 0 ']' ++ break ++ cat /tmp/tmp.Qz8I4tff8W ++ cat /tmp/tmp.rnlwgvGg2B ++ rm /tmp/tmp.Qz8I4tff8W /tmp/tmp.rnlwgvGg2B ++ return 0 + [[ initializing == \r\e\a\d\y ]] + echo 'waiting for cluster readyness' waiting for cluster readyness + sleep 20 + [[ 0 -ge 36 ]] + let i+=1 ++ kubectl_bin get pxc some-name -o 'jsonpath={.status.state}' +++ mktemp ++ local LAST_OUT=/tmp/tmp.hFZuB8MYes +++ mktemp ++ local LAST_ERR=/tmp/tmp.61jPrXhqdZ ++ local exit_status=0 +++ seq 0 2 ++ for i in '$(seq 0 2)' ++ set +e ++ kubectl get pxc some-name -o 'jsonpath={.status.state}' ++ exit_status=0 ++ set -e ++ '[' 0 '!=' 0 ']' ++ break ++ cat /tmp/tmp.hFZuB8MYes ++ cat /tmp/tmp.61jPrXhqdZ ++ rm /tmp/tmp.hFZuB8MYes /tmp/tmp.61jPrXhqdZ ++ return 0 + [[ initializing == \r\e\a\d\y ]] + echo 'waiting for cluster readyness' waiting for cluster readyness + sleep 20 + [[ 1 -ge 36 ]] + let i+=1 ++ kubectl_bin get pxc some-name -o 'jsonpath={.status.state}' +++ mktemp ++ local LAST_OUT=/tmp/tmp.1SaBPuzotx +++ mktemp ++ local LAST_ERR=/tmp/tmp.MveEspSXAx ++ local exit_status=0 +++ seq 0 2 ++ for i in '$(seq 0 2)' ++ set +e ++ kubectl get pxc some-name -o 'jsonpath={.status.state}' ++ exit_status=0 ++ set -e ++ '[' 0 '!=' 0 ']' ++ break ++ cat /tmp/tmp.1SaBPuzotx ++ cat /tmp/tmp.MveEspSXAx ++ rm /tmp/tmp.1SaBPuzotx /tmp/tmp.MveEspSXAx ++ return 0 + [[ initializing == \r\e\a\d\y ]] + echo 'waiting for cluster readyness' waiting for cluster readyness + sleep 20 + [[ 2 -ge 36 ]] + let i+=1 ++ kubectl_bin get pxc some-name -o 'jsonpath={.status.state}' +++ mktemp ++ local LAST_OUT=/tmp/tmp.nDJpTAkZqg +++ mktemp ++ local LAST_ERR=/tmp/tmp.ueVpIKXIQq ++ local exit_status=0 +++ seq 0 2 ++ for i in '$(seq 0 2)' ++ set +e ++ kubectl get pxc some-name -o 'jsonpath={.status.state}' ++ exit_status=0 ++ set -e ++ '[' 0 '!=' 0 ']' ++ break ++ cat /tmp/tmp.nDJpTAkZqg ++ cat /tmp/tmp.ueVpIKXIQq ++ rm /tmp/tmp.nDJpTAkZqg /tmp/tmp.ueVpIKXIQq ++ return 0 + [[ ready == \r\e\a\d\y ]] ++ kubectl_bin get pxc some-name -o 'jsonpath={.status.pxc.ready}' +++ mktemp ++ local LAST_OUT=/tmp/tmp.pbsOV7ScRD +++ mktemp ++ local LAST_ERR=/tmp/tmp.b4S8y528G0 ++ local exit_status=0 +++ seq 0 2 ++ for i in '$(seq 0 2)' ++ set +e ++ kubectl get pxc some-name -o 'jsonpath={.status.pxc.ready}' ++ exit_status=0 ++ set -e ++ '[' 0 '!=' 0 ']' ++ break ++ cat /tmp/tmp.pbsOV7ScRD ++ cat /tmp/tmp.b4S8y528G0 ++ rm /tmp/tmp.pbsOV7ScRD /tmp/tmp.b4S8y528G0 ++ return 0 + [[ 3 == \3 ]] +++ get_proxy_engine some-name +++ local cluster_name=some-name ++++ get_proxy some-name ++++ local target_cluster=some-name +++++ kubectl_bin get pxc some-name -o 'jsonpath={.spec.haproxy.enabled}' ++++++ mktemp +++++ local LAST_OUT=/tmp/tmp.UGXfAwTLo0 ++++++ mktemp +++++ local LAST_ERR=/tmp/tmp.UrRnuw68nb +++++ local exit_status=0 ++++++ seq 0 2 +++++ for i in '$(seq 0 2)' +++++ set +e +++++ kubectl get pxc some-name -o 'jsonpath={.spec.haproxy.enabled}' +++++ exit_status=0 +++++ set -e +++++ '[' 0 '!=' 0 ']' +++++ break +++++ cat /tmp/tmp.UGXfAwTLo0 +++++ cat /tmp/tmp.UrRnuw68nb +++++ rm /tmp/tmp.UGXfAwTLo0 /tmp/tmp.UrRnuw68nb +++++ return 0 ++++ [[ true == \t\r\u\e ]] ++++ echo some-name-haproxy ++++ return +++ local cluster_proxy=some-name-haproxy +++ echo haproxy ++ kubectl_bin get pxc some-name -o 'jsonpath={.status.haproxy.ready}' +++ mktemp ++ local LAST_OUT=/tmp/tmp.84i99RI83P +++ mktemp ++ local LAST_ERR=/tmp/tmp.CXRFjOHX0U ++ local exit_status=0 +++ seq 0 2 ++ for i in '$(seq 0 2)' ++ set +e ++ kubectl get pxc some-name -o 'jsonpath={.status.haproxy.ready}' ++ exit_status=0 ++ set -e ++ '[' 0 '!=' 0 ']' ++ break ++ cat /tmp/tmp.84i99RI83P ++ cat /tmp/tmp.CXRFjOHX0U ++ rm /tmp/tmp.84i99RI83P /tmp/tmp.CXRFjOHX0U ++ return 0 + [[ 3 == \3 ]] + compare_mysql_cmd select-3 'SHOW DATABASES;' '-h some-name-haproxy -umonitor -p'\''test-password2'\''' + local command_id=select-3 + local 'command=SHOW DATABASES;' + local 'uri=-h some-name-haproxy -umonitor -p'\''test-password2'\''' + local postfix= + local expected_result=/mnt/jenkins/workspace/cloud-pxc-operator_PR-1752/e2e-tests/users/compare/select-3.sql + [[ perconalab/percona-xtradb-cluster-operator:main-pxc5.7 =~ 8\.0 ]] + run_mysql 'SHOW DATABASES;' '-h some-name-haproxy -umonitor -p'\''test-password2'\''' + local 'command=SHOW DATABASES;' + local 'uri=-h some-name-haproxy -umonitor -p'\''test-password2'\''' ++ get_client_pod ++ kubectl_bin get pods --selector=name=pxc-client -o 'jsonpath={.items[].metadata.name}' +++ mktemp ++ local LAST_OUT=/tmp/tmp.JCnBrbV2Nc +++ mktemp ++ local LAST_ERR=/tmp/tmp.gqzrbp5GBJ ++ local exit_status=0 +++ seq 0 2 ++ for i in '$(seq 0 2)' ++ set +e ++ kubectl get pods --selector=name=pxc-client -o 'jsonpath={.items[].metadata.name}' ++ exit_status=0 ++ set -e ++ '[' 0 '!=' 0 ']' ++ break ++ cat /tmp/tmp.JCnBrbV2Nc ++ cat /tmp/tmp.gqzrbp5GBJ ++ rm /tmp/tmp.JCnBrbV2Nc /tmp/tmp.gqzrbp5GBJ ++ return 0 + client_pod=pxc-client-64b479df95-499mv + wait_pod pxc-client-64b479df95-499mv + local pod=pxc-client-64b479df95-499mv + local max_retry=480 + local ns= ++ echo pxc-client-64b479df95-499mv ++ /usr/bin/sed -E 's/.*-(pxc|proxysql)-[0-9]/\1/' ++ egrep '^(pxc|proxysql)$' + local container= + set +o xtrace pod/pxc-client-64b479df95-499mv condition met pxc-client-64b479df95-499mv.Ok + set +o xtrace + '[' '!' -s /tmp/tmp.mo3cXZH4Te/select-3.sql ']' + diff -u /mnt/jenkins/workspace/cloud-pxc-operator_PR-1752/e2e-tests/users/compare/select-3.sql /tmp/tmp.mo3cXZH4Te/select-3.sql + destroy users-22051 + local namespace=users-22051 + local ignore_logs=true + desc 'destroy cluster/operator and all other resources' + set +o xtrace ----------------------------------------------------------------------------------- destroy cluster/operator and all other resources ----------------------------------------------------------------------------------- + '[' true == false -o 1 == 1 ']' + grep -v 'the object has been modified' ++ get_operator_pod ++ local label_prefix=app.kubernetes.io/ + /usr/bin/sed -r 's/"ts":[0-9.]+//; s^limits-[0-9.]+/^^g' + grep -v 'get backup status: Job.batch' + grep -v level=info + sort -u + tee /tmp/tmp.mo3cXZH4Te/operator.log +++ kubectl get pods --selector=app.kubernetes.io/name=percona-xtradb-cluster-operator -n pxc-operator +++ grep -c percona-xtradb-cluster-operator ++ local check_label=1 ++ [[ 1 -eq 0 ]] ++ kubectl_bin get pods --selector=app.kubernetes.io/name=percona-xtradb-cluster-operator -o 'jsonpath={.items[].metadata.name}' -n pxc-operator +++ mktemp ++ local LAST_OUT=/tmp/tmp.8jIQ87EsZ7 +++ mktemp ++ local LAST_ERR=/tmp/tmp.j54EM8rvTr ++ local exit_status=0 +++ seq 0 2 ++ for i in '$(seq 0 2)' ++ set +e ++ kubectl get pods --selector=app.kubernetes.io/name=percona-xtradb-cluster-operator -o 'jsonpath={.items[].metadata.name}' -n pxc-operator ++ exit_status=0 ++ set -e ++ '[' 0 '!=' 0 ']' ++ break ++ cat /tmp/tmp.8jIQ87EsZ7 ++ cat /tmp/tmp.j54EM8rvTr ++ rm /tmp/tmp.8jIQ87EsZ7 /tmp/tmp.j54EM8rvTr ++ return 0 + kubectl_bin logs -n pxc-operator percona-xtradb-cluster-operator-6dbfd9bb9d-8vkvm ++ mktemp + local LAST_OUT=/tmp/tmp.jQc2lRsH8r ++ mktemp + local LAST_ERR=/tmp/tmp.wD5G2yH9A3 + local exit_status=0 ++ seq 0 2 + for i in '$(seq 0 2)' + set +e + kubectl logs -n pxc-operator percona-xtradb-cluster-operator-6dbfd9bb9d-8vkvm + exit_status=0 + set -e + '[' 0 '!=' 0 ']' + break + cat /tmp/tmp.jQc2lRsH8r + cat /tmp/tmp.wD5G2yH9A3 + rm /tmp/tmp.jQc2lRsH8r /tmp/tmp.wD5G2yH9A3 + return 0 2024-07-08T09:43:46.717Z INFO setup Manager starting up {"gitCommit": "44f0e1a80c0c8b8f0ea06b22799841ae06d9bf8c", "gitBranch": "PR-1752-44f0e1a8", "buildTime": "2024-07-08T07:48:21Z", "goVersion": "go1.22.5", "os": "linux", "arch": "amd64"} 2024-07-08T09:43:46.717Z INFO setup Runs on {"platform": "kubernetes", "version": "v1.26.15-gke.1469001"} 2024-07-08T09:43:46.789Z INFO setup Registering Components. 2024-07-08T09:43:49.908Z INFO controller-runtime.webhook Registering webhook {"path": "/validate-percona-xtradbcluster"} 2024-07-08T09:43:50.012Z INFO controller-runtime.metrics Serving metrics server {"bindAddress": ":8080", "secure": false} 2024-07-08T09:43:50.012Z INFO controller-runtime.metrics Starting metrics server 2024-07-08T09:43:50.012Z INFO controller-runtime.webhook Starting webhook server 2024-07-08T09:43:50.012Z INFO setup Starting the Cmd. 2024-07-08T09:43:50.012Z INFO starting server {"name": "health probe", "addr": "[::]:8081"} 2024-07-08T09:43:50.013Z INFO controller-runtime.certwatcher Starting certificate watcher 2024-07-08T09:43:50.013Z INFO controller-runtime.certwatcher Updated current TLS certificate 2024-07-08T09:43:50.013Z INFO controller-runtime.webhook Serving webhook server {"host": "", "port": 9443} 2024-07-08T09:43:50.213Z INFO attempting to acquire leader lease pxc-operator/08db1feb.percona.com... 2024-07-08T09:43:50.578Z DEBUG events percona-xtradb-cluster-operator-6dbfd9bb9d-8vkvm_d79eef56-6fe7-4fa8-be65-5645515122e7 became leader {"type": "Normal", "object": {"kind":"Lease","namespace":"pxc-operator","name":"08db1feb.percona.com","uid":"64974147-705d-43ca-8692-b232b7bf3c2f","apiVersion":"coordination.k8s.io/v1","resourceVersion":"62695"}, "reason": "LeaderElection"} 2024-07-08T09:43:50.578Z INFO Starting Controller {"controller": "pxc-controller"} 2024-07-08T09:43:50.578Z INFO Starting EventSource {"controller": "pxc-controller", "source": "kind source: *v1.PerconaXtraDBCluster"} 2024-07-08T09:43:50.578Z INFO successfully acquired lease pxc-operator/08db1feb.percona.com 2024-07-08T09:43:50.579Z INFO Starting Controller {"controller": "pxcbackup-controller"} 2024-07-08T09:43:50.579Z INFO Starting Controller {"controller": "pxcrestore-controller"} 2024-07-08T09:43:50.579Z INFO Starting EventSource {"controller": "pxcbackup-controller", "source": "kind source: *v1.PerconaXtraDBClusterBackup"} 2024-07-08T09:43:50.579Z INFO Starting EventSource {"controller": "pxcrestore-controller", "source": "kind source: *v1.PerconaXtraDBClusterRestore"} 2024-07-08T09:43:50.731Z INFO Starting workers {"controller": "pxc-controller", "worker count": 1} 2024-07-08T09:43:50.731Z INFO Starting workers {"controller": "pxcrestore-controller", "worker count": 1} 2024-07-08T09:43:50.751Z INFO Starting workers {"controller": "pxcbackup-controller", "worker count": 1} 2024-07-08T09:44:21.020Z INFO Set CR version {"controller": "pxc-controller", "namespace": "users-22051", "name": "some-name", "reconcileID": "e20ce642-de64-4c16-b50e-46766182ab4f", "version": "1.15.0"} 2024-07-08T09:45:39.988Z INFO Password expiration policy updated {"controller": "pxc-controller", "namespace": "users-22051", "name": "some-name", "reconcileID": "c5abfd77-b752-4f34-a620-eafd09210b37", "user": "operator"} 2024-07-08T09:45:40.015Z INFO Password expiration policy updated {"controller": "pxc-controller", "namespace": "users-22051", "name": "some-name", "reconcileID": "c5abfd77-b752-4f34-a620-eafd09210b37", "user": "monitor"} 2024-07-08T09:45:40.054Z INFO User monitor: granted privileges {"controller": "pxc-controller", "namespace": "users-22051", "name": "some-name", "reconcileID": "c5abfd77-b752-4f34-a620-eafd09210b37"} 2024-07-08T09:45:40.092Z INFO Password expiration policy updated {"controller": "pxc-controller", "namespace": "users-22051", "name": "some-name", "reconcileID": "c5abfd77-b752-4f34-a620-eafd09210b37", "user": "xtrabackup"} 2024-07-08T09:45:40.130Z INFO User xtrabackup: granted privileges {"controller": "pxc-controller", "namespace": "users-22051", "name": "some-name", "reconcileID": "c5abfd77-b752-4f34-a620-eafd09210b37"} 2024-07-08T09:45:40.233Z INFO reconcile replication error {"controller": "pxc-controller", "namespace": "users-22051", "name": "some-name", "reconcileID": "c5abfd77-b752-4f34-a620-eafd09210b37", "err": "get primary pxc pod: not found"} 2024-07-08T09:45:45.078Z INFO reconcile replication error {"controller": "pxc-controller", "namespace": "users-22051", "name": "some-name", "reconcileID": "07dd59ef-5649-4a9b-bbb8-55aae58efbfc", "err": "get primary pxc pod: not found"} 2024-07-08T09:45:50.950Z INFO reconcile replication error {"controller": "pxc-controller", "namespace": "users-22051", "name": "some-name", "reconcileID": "38a4f8de-5b5e-40ad-87b9-2aeae956e844", "err": "get primary pxc pod: not found"} 2024-07-08T09:45:56.291Z INFO reconcile replication error {"controller": "pxc-controller", "namespace": "users-22051", "name": "some-name", "reconcileID": "440dfbec-8e63-4b95-92b1-21ee0c77a902", "err": "get primary pxc pod: not found"} 2024-07-08T09:48:07.938Z INFO Password expiration policy updated {"controller": "pxc-controller", "namespace": "users-22051", "name": "some-name", "reconcileID": "32551e01-d3ee-426a-a722-5e4e249dddda", "user": "root"} 2024-07-08T09:48:07.984Z INFO Password expiration policy updated {"controller": "pxc-controller", "namespace": "users-22051", "name": "some-name", "reconcileID": "32551e01-d3ee-426a-a722-5e4e249dddda", "user": "replication"} 2024-07-08T09:48:08.133Z INFO update PXC version (fetched from db) {"controller": "pxc-controller", "namespace": "users-22051", "name": "some-name", "reconcileID": "32551e01-d3ee-426a-a722-5e4e249dddda", "new version": "5.7.44-48-57"} 2024-07-08T09:48:11.352Z DEBUG PXC users synced with ProxySQL {"controller": "pxc-controller", "namespace": "users-22051", "name": "some-name", "reconcileID": "32551e01-d3ee-426a-a722-5e4e249dddda"} 2024-07-08T09:48:16.158Z DEBUG PXC users synced with ProxySQL {"controller": "pxc-controller", "namespace": "users-22051", "name": "some-name", "reconcileID": "f6885e04-7818-4dc1-a086-fada32eefe34"} 2024-07-08T09:48:21.319Z DEBUG PXC users synced with ProxySQL {"controller": "pxc-controller", "namespace": "users-22051", "name": "some-name", "reconcileID": "00960a0a-0db9-4a7d-8567-8a34cf5f9d00"} 2024-07-08T09:48:26.614Z DEBUG PXC users synced with ProxySQL {"controller": "pxc-controller", "namespace": "users-22051", "name": "some-name", "reconcileID": "62a1224e-5dd0-432d-b519-8ad1f279a42e"} 2024-07-08T09:48:31.852Z DEBUG PXC users synced with ProxySQL {"controller": "pxc-controller", "namespace": "users-22051", "name": "some-name", "reconcileID": "aa748231-abef-4894-90ab-da20024f26c5"} 2024-07-08T09:48:37.427Z DEBUG PXC users synced with ProxySQL {"controller": "pxc-controller", "namespace": "users-22051", "name": "some-name", "reconcileID": "374f9edd-6fd8-4c5b-9d99-4adfcbf64ac2"} 2024-07-08T09:48:42.252Z DEBUG PXC users synced with ProxySQL {"controller": "pxc-controller", "namespace": "users-22051", "name": "some-name", "reconcileID": "f4d03106-6abf-4490-8bae-8326b5f21283"} 2024-07-08T09:48:47.521Z DEBUG PXC users synced with ProxySQL {"controller": "pxc-controller", "namespace": "users-22051", "name": "some-name", "reconcileID": "f166d46a-bb83-4c49-ac31-af081c116d8f"} 2024-07-08T09:48:52.741Z DEBUG PXC users synced with ProxySQL {"controller": "pxc-controller", "namespace": "users-22051", "name": "some-name", "reconcileID": "00315d77-1226-4fd5-a717-271f6c15fd58"} 2024-07-08T09:48:58.040Z DEBUG PXC users synced with ProxySQL {"controller": "pxc-controller", "namespace": "users-22051", "name": "some-name", "reconcileID": "4470f119-e835-4b7c-a297-094e832a51a5"} 2024-07-08T09:49:03.227Z DEBUG PXC users synced with ProxySQL {"controller": "pxc-controller", "namespace": "users-22051", "name": "some-name", "reconcileID": "dd1e7374-db41-4af2-96f5-9f2be3aa6982"} 2024-07-08T09:49:08.652Z DEBUG PXC users synced with ProxySQL {"controller": "pxc-controller", "namespace": "users-22051", "name": "some-name", "reconcileID": "e4331556-de8a-4291-868f-15d85955ef08"} 2024-07-08T09:49:13.923Z DEBUG PXC users synced with ProxySQL {"controller": "pxc-controller", "namespace": "users-22051", "name": "some-name", "reconcileID": "5a7afb0b-24dc-4396-976f-1f2f0113dc3f"} 2024-07-08T09:49:19.142Z DEBUG PXC users synced with ProxySQL {"controller": "pxc-controller", "namespace": "users-22051", "name": "some-name", "reconcileID": "63a10a9b-8e6b-4a44-9b80-0b9f2be77f14"} 2024-07-08T09:49:21.142Z INFO Password changed, updating user {"controller": "pxc-controller", "namespace": "users-22051", "name": "some-name", "reconcileID": "cb2e4606-9b1c-4e97-a966-9fea9bca311d", "user": "root"} 2024-07-08T09:49:21.177Z INFO User password updated {"controller": "pxc-controller", "namespace": "users-22051", "name": "some-name", "reconcileID": "cb2e4606-9b1c-4e97-a966-9fea9bca311d", "user": "root"} 2024-07-08T09:49:21.185Z INFO MySQL init secret created {"controller": "pxc-controller", "namespace": "users-22051", "name": "some-name", "reconcileID": "cb2e4606-9b1c-4e97-a966-9fea9bca311d", "secret": "some-name-mysql-init", "user": "root"} 2024-07-08T09:49:27.088Z DEBUG PXC users synced with ProxySQL {"controller": "pxc-controller", "namespace": "users-22051", "name": "some-name", "reconcileID": "cb2e4606-9b1c-4e97-a966-9fea9bca311d"} 2024-07-08T09:49:27.105Z INFO Internal secrets updated {"controller": "pxc-controller", "namespace": "users-22051", "name": "some-name", "reconcileID": "cb2e4606-9b1c-4e97-a966-9fea9bca311d", "user": "root"} 2024-07-08T09:49:30.341Z DEBUG PXC users synced with ProxySQL {"controller": "pxc-controller", "namespace": "users-22051", "name": "some-name", "reconcileID": "cb2e4606-9b1c-4e97-a966-9fea9bca311d"} 2024-07-08T09:49:35.671Z DEBUG PXC users synced with ProxySQL {"controller": "pxc-controller", "namespace": "users-22051", "name": "some-name", "reconcileID": "44d16946-2be1-4f24-8b1e-d88d05c805e7"} 2024-07-08T09:49:41.041Z DEBUG PXC users synced with ProxySQL {"controller": "pxc-controller", "namespace": "users-22051", "name": "some-name", "reconcileID": "9a5a5d78-6548-46e9-b865-0dcd620a476a"} 2024-07-08T09:49:57.840Z ERROR sync users {"controller": "pxc-controller", "namespace": "users-22051", "name": "some-name", "reconcileID": "2c2b9ae3-a458-41cb-bd87-77a1cd46dd01", "error": "exec syncusers: command terminated with exit code 1 / / ERROR (line:512) : The cluster (with writer hostgroup:11) has not been configured in ProxySQL\n", "errorVerbose": "exec syncusers: command terminated with exit code 1 / / ERROR (line:512) : The cluster (with writer hostgroup:11) has not been configured in ProxySQL\n\ngithub.com/percona/percona-xtradb-cluster-operator/pkg/controller/pxc.(*ReconcilePerconaXtraDBCluster).syncPXCUsersWithProxySQL\n\t/go/src/github.com/percona/percona-xtradb-cluster-operator/pkg/controller/pxc/users.go:915\ngithub.com/percona/percona-xtradb-cluster-operator/pkg/controller/pxc.(*ReconcilePerconaXtraDBCluster).resyncPXCUsersWithProxySQL.func1\n\t/go/src/github.com/percona/percona-xtradb-cluster-operator/pkg/controller/pxc/controller.go:1246\nruntime.goexit\n\t/usr/local/go/src/runtime/asm_amd64.s:1695"} 2024-07-08T09:49:58.971Z INFO reconcile replication error {"controller": "pxc-controller", "namespace": "users-22051", "name": "some-name", "reconcileID": "b03a2590-7860-4aed-ac83-7b17c23748e4", "err": "get primary pxc pod: not found"} 2024-07-08T09:50:02.846Z ERROR sync users {"controller": "pxc-controller", "namespace": "users-22051", "name": "some-name", "reconcileID": "b03a2590-7860-4aed-ac83-7b17c23748e4", "error": "exec syncusers: command terminated with exit code 1 / / ERROR (line:512) : The cluster (with writer hostgroup:11) has not been configured in ProxySQL\n", "errorVerbose": "exec syncusers: command terminated with exit code 1 / / ERROR (line:512) : The cluster (with writer hostgroup:11) has not been configured in ProxySQL\n\ngithub.com/percona/percona-xtradb-cluster-operator/pkg/controller/pxc.(*ReconcilePerconaXtraDBCluster).syncPXCUsersWithProxySQL\n\t/go/src/github.com/percona/percona-xtradb-cluster-operator/pkg/controller/pxc/users.go:915\ngithub.com/percona/percona-xtradb-cluster-operator/pkg/controller/pxc.(*ReconcilePerconaXtraDBCluster).resyncPXCUsersWithProxySQL.func1\n\t/go/src/github.com/percona/percona-xtradb-cluster-operator/pkg/controller/pxc/controller.go:1246\nruntime.goexit\n\t/usr/local/go/src/runtime/asm_amd64.s:1695"} 2024-07-08T09:50:04.156Z INFO Password changed, updating user {"controller": "pxc-controller", "namespace": "users-22051", "name": "some-name", "reconcileID": "69018245-e677-44be-a82d-9472146183de", "user": "proxyadmin"} 2024-07-08T09:50:04.156Z INFO Password expiration policy updated {"controller": "pxc-controller", "namespace": "users-22051", "name": "some-name", "reconcileID": "69018245-e677-44be-a82d-9472146183de", "user": "proxyadmin"} 2024-07-08T09:50:04.221Z INFO Proxy user updated {"controller": "pxc-controller", "namespace": "users-22051", "name": "some-name", "reconcileID": "69018245-e677-44be-a82d-9472146183de", "user": "proxyadmin"} 2024-07-08T09:50:04.230Z INFO Internal secrets updated {"controller": "pxc-controller", "namespace": "users-22051", "name": "some-name", "reconcileID": "69018245-e677-44be-a82d-9472146183de", "user": "proxyadmin"} 2024-07-08T09:50:04.230Z INFO Proxy pods will be restarted {"controller": "pxc-controller", "namespace": "users-22051", "name": "some-name", "reconcileID": "69018245-e677-44be-a82d-9472146183de", "last-applied-secret": "1a55867d0f4fa0d0643653b2eb57b2f690315d8d843a10599c9839c2a779a95f"} 2024-07-08T09:50:04.428Z ERROR sync users {"controller": "pxc-controller", "namespace": "users-22051", "name": "some-name", "reconcileID": "69018245-e677-44be-a82d-9472146183de", "error": "exec syncusers: command terminated with exit code 1 / / ERROR 1045 (28000): ProxySQL Error: Access denied for user 'proxyadmin'@'127.0.0.1' (using password: YES)\nERROR (line:335) : ProxySQL connection check failed. \n-- Could not connect to ProxySQL at localhost:6032 \n-- Please check the ProxySQL connection parameters and status.\n", "errorVerbose": "exec syncusers: command terminated with exit code 1 / / ERROR 1045 (28000): ProxySQL Error: Access denied for user 'proxyadmin'@'127.0.0.1' (using password: YES)\nERROR (line:335) : ProxySQL connection check failed. \n-- Could not connect to ProxySQL at localhost:6032 \n-- Please check the ProxySQL connection parameters and status.\n\ngithub.com/percona/percona-xtradb-cluster-operator/pkg/controller/pxc.(*ReconcilePerconaXtraDBCluster).syncPXCUsersWithProxySQL\n\t/go/src/github.com/percona/percona-xtradb-cluster-operator/pkg/controller/pxc/users.go:915\ngithub.com/percona/percona-xtradb-cluster-operator/pkg/controller/pxc.(*ReconcilePerconaXtraDBCluster).resyncPXCUsersWithProxySQL.func1\n\t/go/src/github.com/percona/percona-xtradb-cluster-operator/pkg/controller/pxc/controller.go:1246\nruntime.goexit\n\t/usr/local/go/src/runtime/asm_amd64.s:1695"} 2024-07-08T09:50:57.940Z DEBUG PXC users synced with ProxySQL {"controller": "pxc-controller", "namespace": "users-22051", "name": "some-name", "reconcileID": "86e4f381-7727-4160-9e9b-0954de5bbee7"} 2024-07-08T09:51:08.201Z DEBUG PXC users synced with ProxySQL {"controller": "pxc-controller", "namespace": "users-22051", "name": "some-name", "reconcileID": "538d2b11-0536-48fd-b561-554360ac5b69"} 2024-07-08T09:51:13.247Z DEBUG PXC users synced with ProxySQL {"controller": "pxc-controller", "namespace": "users-22051", "name": "some-name", "reconcileID": "28359031-dff4-4dc0-aa0f-59c3e0a31e00"} 2024-07-08T09:51:17.138Z ERROR sync users {"controller": "pxc-controller", "namespace": "users-22051", "name": "some-name", "reconcileID": "aa3658a2-f7c7-4178-8871-db2382839ffb", "error": "exec syncusers: unable to upgrade connection: container not found (\"proxysql\") / / ", "errorVerbose": "exec syncusers: unable to upgrade connection: container not found (\"proxysql\") / / \ngithub.com/percona/percona-xtradb-cluster-operator/pkg/controller/pxc.(*ReconcilePerconaXtraDBCluster).syncPXCUsersWithProxySQL\n\t/go/src/github.com/percona/percona-xtradb-cluster-operator/pkg/controller/pxc/users.go:915\ngithub.com/percona/percona-xtradb-cluster-operator/pkg/controller/pxc.(*ReconcilePerconaXtraDBCluster).resyncPXCUsersWithProxySQL.func1\n\t/go/src/github.com/percona/percona-xtradb-cluster-operator/pkg/controller/pxc/controller.go:1246\nruntime.goexit\n\t/usr/local/go/src/runtime/asm_amd64.s:1695"} 2024-07-08T09:51:22.882Z INFO Password changed, updating user {"controller": "pxc-controller", "namespace": "users-22051", "name": "some-name", "reconcileID": "db8b5fa5-eaa5-4a2b-8593-cd45a9d79926", "user": "xtrabackup"} 2024-07-08T09:51:22.907Z INFO User password updated {"controller": "pxc-controller", "namespace": "users-22051", "name": "some-name", "reconcileID": "db8b5fa5-eaa5-4a2b-8593-cd45a9d79926", "user": "xtrabackup"} 2024-07-08T09:51:22.916Z INFO MySQL init secret updated {"controller": "pxc-controller", "namespace": "users-22051", "name": "some-name", "reconcileID": "db8b5fa5-eaa5-4a2b-8593-cd45a9d79926", "secret": "some-name-mysql-init", "user": "xtrabackup"} 2024-07-08T09:51:22.924Z INFO Internal secrets updated {"controller": "pxc-controller", "namespace": "users-22051", "name": "some-name", "reconcileID": "db8b5fa5-eaa5-4a2b-8593-cd45a9d79926", "user": "xtrabackup"} 2024-07-08T09:51:22.924Z INFO PXC pods will be restarted {"controller": "pxc-controller", "namespace": "users-22051", "name": "some-name", "reconcileID": "db8b5fa5-eaa5-4a2b-8593-cd45a9d79926", "last-applied-secret": "d11bcc70ca53ea410cd455e13d82e7225917b0861d9b677ed34c02c5f6fbaf1b"} 2024-07-08T09:51:28.852Z DEBUG PXC users synced with ProxySQL {"controller": "pxc-controller", "namespace": "users-22051", "name": "some-name", "reconcileID": "db8b5fa5-eaa5-4a2b-8593-cd45a9d79926"} 2024-07-08T09:53:17.838Z INFO reconcile replication error {"controller": "pxc-controller", "namespace": "users-22051", "name": "some-name", "reconcileID": "bb06d5da-915f-4931-9b28-3bd2a3f1f85c", "err": "failed to connect to pod some-name-pxc-0: dial tcp 10.47.250.62:33062: connect: connection refused"} 2024-07-08T09:53:23.033Z INFO Unable to find primary pod for replication. No pod with name or ip like this {"controller": "pxc-controller", "namespace": "users-22051", "name": "some-name", "reconcileID": "49c523a7-f107-40e5-96b8-2f69e3631ed1", "primary name": "some-name-pxc-0.some-name-pxc.users-22051.svc.cluster.local"} 2024-07-08T09:53:28.204Z INFO Unable to find primary pod for replication. No pod with name or ip like this {"controller": "pxc-controller", "namespace": "users-22051", "name": "some-name", "reconcileID": "b0b72d89-61f7-4d4a-b8e8-930defcbdc18", "primary name": "some-name-pxc-0.some-name-pxc.users-22051.svc.cluster.local"} 2024-07-08T09:53:33.390Z INFO Unable to find primary pod for replication. No pod with name or ip like this {"controller": "pxc-controller", "namespace": "users-22051", "name": "some-name", "reconcileID": "5991ff6a-1455-4e48-bdcf-874d26535d0b", "primary name": "some-name-pxc-0.some-name-pxc.users-22051.svc.cluster.local"} 2024-07-08T09:53:57.705Z DEBUG PXC users synced with ProxySQL {"controller": "pxc-controller", "namespace": "users-22051", "name": "some-name", "reconcileID": "12ce9360-76a9-4341-9d5a-3b648150e1d9"} 2024-07-08T09:54:03.030Z DEBUG PXC users synced with ProxySQL {"controller": "pxc-controller", "namespace": "users-22051", "name": "some-name", "reconcileID": "c58a2438-ee91-492e-8c72-3b0a49e5c416"} 2024-07-08T09:54:08.324Z DEBUG PXC users synced with ProxySQL {"controller": "pxc-controller", "namespace": "users-22051", "name": "some-name", "reconcileID": "f669c010-b79b-43af-bad4-85f24a3420af"} 2024-07-08T09:54:13.121Z DEBUG PXC users synced with ProxySQL {"controller": "pxc-controller", "namespace": "users-22051", "name": "some-name", "reconcileID": "1bd87891-11e0-4c5a-8311-d3c696861b21"} 2024-07-08T09:54:15.066Z INFO Password changed, updating user {"controller": "pxc-controller", "namespace": "users-22051", "name": "some-name", "reconcileID": "bc204fe0-5ec9-4e9f-b696-a0572904ecfa", "user": "monitor"} 2024-07-08T09:54:15.093Z INFO User password updated {"controller": "pxc-controller", "namespace": "users-22051", "name": "some-name", "reconcileID": "bc204fe0-5ec9-4e9f-b696-a0572904ecfa", "user": "monitor"} 2024-07-08T09:54:15.106Z INFO MySQL init secret updated {"controller": "pxc-controller", "namespace": "users-22051", "name": "some-name", "reconcileID": "bc204fe0-5ec9-4e9f-b696-a0572904ecfa", "secret": "some-name-mysql-init", "user": "monitor"} 2024-07-08T09:54:15.147Z INFO Proxy user updated {"controller": "pxc-controller", "namespace": "users-22051", "name": "some-name", "reconcileID": "bc204fe0-5ec9-4e9f-b696-a0572904ecfa", "user": "monitor"} 2024-07-08T09:54:15.157Z INFO Internal secrets updated {"controller": "pxc-controller", "namespace": "users-22051", "name": "some-name", "reconcileID": "bc204fe0-5ec9-4e9f-b696-a0572904ecfa", "user": "monitor"} 2024-07-08T09:54:15.157Z INFO Proxy pods will be restarted {"controller": "pxc-controller", "namespace": "users-22051", "name": "some-name", "reconcileID": "bc204fe0-5ec9-4e9f-b696-a0572904ecfa", "last-applied-secret": "4e3282050745046d5044e0fcb5f2bf140d7320a1256a79c8ca2184f38f4f3098"} 2024-07-08T09:54:17.902Z ERROR sync users {"controller": "pxc-controller", "namespace": "users-22051", "name": "some-name", "reconcileID": "bc204fe0-5ec9-4e9f-b696-a0572904ecfa", "error": "exec syncusers: unable to upgrade connection: container not found (\"proxysql\") / / ", "errorVerbose": "exec syncusers: unable to upgrade connection: container not found (\"proxysql\") / / \ngithub.com/percona/percona-xtradb-cluster-operator/pkg/controller/pxc.(*ReconcilePerconaXtraDBCluster).syncPXCUsersWithProxySQL\n\t/go/src/github.com/percona/percona-xtradb-cluster-operator/pkg/controller/pxc/users.go:915\ngithub.com/percona/percona-xtradb-cluster-operator/pkg/controller/pxc.(*ReconcilePerconaXtraDBCluster).resyncPXCUsersWithProxySQL.func1\n\t/go/src/github.com/percona/percona-xtradb-cluster-operator/pkg/controller/pxc/controller.go:1246\nruntime.goexit\n\t/usr/local/go/src/runtime/asm_amd64.s:1695"} 2024-07-08T09:55:00.718Z DEBUG PXC users synced with ProxySQL {"controller": "pxc-controller", "namespace": "users-22051", "name": "some-name", "reconcileID": "2a9c79ae-1b44-4d12-9171-8f5c9c3fa14a"} 2024-07-08T09:55:05.530Z DEBUG PXC users synced with ProxySQL {"controller": "pxc-controller", "namespace": "users-22051", "name": "some-name", "reconcileID": "e13aaf26-57dc-4440-a867-5ce14aa5900f"} 2024-07-08T09:55:10.839Z DEBUG PXC users synced with ProxySQL {"controller": "pxc-controller", "namespace": "users-22051", "name": "some-name", "reconcileID": "824ddc38-f329-431a-a680-f0b4fdc7e53d"} 2024-07-08T09:55:16.044Z DEBUG PXC users synced with ProxySQL {"controller": "pxc-controller", "namespace": "users-22051", "name": "some-name", "reconcileID": "6f5fe476-478a-4dc2-bef4-4a171fac1fdd"} 2024-07-08T09:55:21.412Z DEBUG PXC users synced with ProxySQL {"controller": "pxc-controller", "namespace": "users-22051", "name": "some-name", "reconcileID": "a57a72ba-983d-4ffa-b33f-816c3e84856b"} 2024-07-08T09:55:23.305Z INFO Password changed, updating user {"controller": "pxc-controller", "namespace": "users-22051", "name": "some-name", "reconcileID": "48ba1348-0ef8-4be0-a13c-f9023ec5026b", "user": "operator"} 2024-07-08T09:55:23.329Z INFO User password updated {"controller": "pxc-controller", "namespace": "users-22051", "name": "some-name", "reconcileID": "48ba1348-0ef8-4be0-a13c-f9023ec5026b", "user": "operator"} 2024-07-08T09:55:23.337Z INFO MySQL init secret updated {"controller": "pxc-controller", "namespace": "users-22051", "name": "some-name", "reconcileID": "48ba1348-0ef8-4be0-a13c-f9023ec5026b", "secret": "some-name-mysql-init", "user": "operator"} 2024-07-08T09:55:23.346Z INFO Internal secrets updated {"controller": "pxc-controller", "namespace": "users-22051", "name": "some-name", "reconcileID": "48ba1348-0ef8-4be0-a13c-f9023ec5026b", "user": "operator"} 2024-07-08T09:55:23.346Z INFO Proxy pods will be restarted {"controller": "pxc-controller", "namespace": "users-22051", "name": "some-name", "reconcileID": "48ba1348-0ef8-4be0-a13c-f9023ec5026b", "last-applied-secret": "6542c6f8a2375ddfcda319bf91c99ca69519730971c577cd07366e356671ee68"} 2024-07-08T09:55:24.713Z ERROR sync users {"controller": "pxc-controller", "namespace": "users-22051", "name": "some-name", "reconcileID": "48ba1348-0ef8-4be0-a13c-f9023ec5026b", "error": "exec syncusers: command terminated with exit code 1 / / ERROR 1045 (28000): Access denied for user 'operator'@'some-name-proxysql-0.some-name-proxysql-unready.users-22051.svc.' (using password: YES)\nERROR 1045 (28000): Access denied for user 'operator'@'some-name-proxysql-0.some-name-proxysql-unready.users-22051.svc.' (using password: YES)\nERROR 1045 (28000): Access denied for user 'operator'@'some-name-proxysql-0.some-name-proxysql-unready.users-22051.svc.' (using password: YES)\nERROR (line:569) : Could not find a primary cluster node\n", "errorVerbose": "exec syncusers: command terminated with exit code 1 / / ERROR 1045 (28000): Access denied for user 'operator'@'some-name-proxysql-0.some-name-proxysql-unready.users-22051.svc.' (using password: YES)\nERROR 1045 (28000): Access denied for user 'operator'@'some-name-proxysql-0.some-name-proxysql-unready.users-22051.svc.' (using password: YES)\nERROR 1045 (28000): Access denied for user 'operator'@'some-name-proxysql-0.some-name-proxysql-unready.users-22051.svc.' (using password: YES)\nERROR (line:569) : Could not find a primary cluster node\n\ngithub.com/percona/percona-xtradb-cluster-operator/pkg/controller/pxc.(*ReconcilePerconaXtraDBCluster).syncPXCUsersWithProxySQL\n\t/go/src/github.com/percona/percona-xtradb-cluster-operator/pkg/controller/pxc/users.go:915\ngithub.com/percona/percona-xtradb-cluster-operator/pkg/controller/pxc.(*ReconcilePerconaXtraDBCluster).resyncPXCUsersWithProxySQL.func1\n\t/go/src/github.com/percona/percona-xtradb-cluster-operator/pkg/controller/pxc/controller.go:1246\nruntime.goexit\n\t/usr/local/go/src/runtime/asm_amd64.s:1695"} 2024-07-08T09:56:08.740Z DEBUG PXC users synced with ProxySQL {"controller": "pxc-controller", "namespace": "users-22051", "name": "some-name", "reconcileID": "2d856912-4271-4784-9b0f-8e3e095f414d"} 2024-07-08T09:56:17.031Z DEBUG PXC users synced with ProxySQL {"controller": "pxc-controller", "namespace": "users-22051", "name": "some-name", "reconcileID": "88094fe0-bf50-4d33-b953-1a83606e2edc"} 2024-07-08T09:56:21.508Z INFO Created user secrets {"controller": "pxc-controller", "namespace": "users-22051", "name": "some-name", "reconcileID": "2ad335ea-2cb3-467a-9d69-611b7e102a63", "secrets": "my-cluster-secrets-2"} 2024-07-08T09:56:21.508Z INFO Password changed, updating user {"controller": "pxc-controller", "namespace": "users-22051", "name": "some-name", "reconcileID": "2ad335ea-2cb3-467a-9d69-611b7e102a63", "user": "root"} 2024-07-08T09:56:21.543Z INFO User password updated {"controller": "pxc-controller", "namespace": "users-22051", "name": "some-name", "reconcileID": "2ad335ea-2cb3-467a-9d69-611b7e102a63", "user": "root"} 2024-07-08T09:56:21.551Z INFO MySQL init secret updated {"controller": "pxc-controller", "namespace": "users-22051", "name": "some-name", "reconcileID": "2ad335ea-2cb3-467a-9d69-611b7e102a63", "secret": "some-name-mysql-init", "user": "root"} 2024-07-08T09:56:23.374Z DEBUG PXC users synced with ProxySQL {"controller": "pxc-controller", "namespace": "users-22051", "name": "some-name", "reconcileID": "31dd5dcc-0f36-4eb2-9360-f11eadfc9caf"} 2024-07-08T09:56:25.639Z DEBUG PXC users synced with ProxySQL {"controller": "pxc-controller", "namespace": "users-22051", "name": "some-name", "reconcileID": "2ad335ea-2cb3-467a-9d69-611b7e102a63"} 2024-07-08T09:56:25.647Z INFO Internal secrets updated {"controller": "pxc-controller", "namespace": "users-22051", "name": "some-name", "reconcileID": "2ad335ea-2cb3-467a-9d69-611b7e102a63", "user": "root"} 2024-07-08T09:56:25.647Z INFO Password changed, updating user {"controller": "pxc-controller", "namespace": "users-22051", "name": "some-name", "reconcileID": "2ad335ea-2cb3-467a-9d69-611b7e102a63", "user": "operator"} 2024-07-08T09:56:25.669Z INFO User password updated {"controller": "pxc-controller", "namespace": "users-22051", "name": "some-name", "reconcileID": "2ad335ea-2cb3-467a-9d69-611b7e102a63", "user": "operator"} 2024-07-08T09:56:25.677Z INFO MySQL init secret updated {"controller": "pxc-controller", "namespace": "users-22051", "name": "some-name", "reconcileID": "2ad335ea-2cb3-467a-9d69-611b7e102a63", "secret": "some-name-mysql-init", "user": "operator"} 2024-07-08T09:56:25.687Z INFO Internal secrets updated {"controller": "pxc-controller", "namespace": "users-22051", "name": "some-name", "reconcileID": "2ad335ea-2cb3-467a-9d69-611b7e102a63", "user": "operator"} 2024-07-08T09:56:25.687Z INFO Password changed, updating user {"controller": "pxc-controller", "namespace": "users-22051", "name": "some-name", "reconcileID": "2ad335ea-2cb3-467a-9d69-611b7e102a63", "user": "monitor"} 2024-07-08T09:56:25.708Z INFO User password updated {"controller": "pxc-controller", "namespace": "users-22051", "name": "some-name", "reconcileID": "2ad335ea-2cb3-467a-9d69-611b7e102a63", "user": "monitor"} 2024-07-08T09:56:25.719Z INFO MySQL init secret updated {"controller": "pxc-controller", "namespace": "users-22051", "name": "some-name", "reconcileID": "2ad335ea-2cb3-467a-9d69-611b7e102a63", "secret": "some-name-mysql-init", "user": "monitor"} 2024-07-08T09:56:25.760Z INFO Proxy user updated {"controller": "pxc-controller", "namespace": "users-22051", "name": "some-name", "reconcileID": "2ad335ea-2cb3-467a-9d69-611b7e102a63", "user": "monitor"} 2024-07-08T09:56:25.770Z INFO Internal secrets updated {"controller": "pxc-controller", "namespace": "users-22051", "name": "some-name", "reconcileID": "2ad335ea-2cb3-467a-9d69-611b7e102a63", "user": "monitor"} 2024-07-08T09:56:25.770Z INFO Password changed, updating user {"controller": "pxc-controller", "namespace": "users-22051", "name": "some-name", "reconcileID": "2ad335ea-2cb3-467a-9d69-611b7e102a63", "user": "xtrabackup"} 2024-07-08T09:56:25.791Z INFO User password updated {"controller": "pxc-controller", "namespace": "users-22051", "name": "some-name", "reconcileID": "2ad335ea-2cb3-467a-9d69-611b7e102a63", "user": "xtrabackup"} 2024-07-08T09:56:25.799Z INFO MySQL init secret updated {"controller": "pxc-controller", "namespace": "users-22051", "name": "some-name", "reconcileID": "2ad335ea-2cb3-467a-9d69-611b7e102a63", "secret": "some-name-mysql-init", "user": "xtrabackup"} 2024-07-08T09:56:25.815Z INFO Internal secrets updated {"controller": "pxc-controller", "namespace": "users-22051", "name": "some-name", "reconcileID": "2ad335ea-2cb3-467a-9d69-611b7e102a63", "user": "xtrabackup"} 2024-07-08T09:56:25.815Z INFO Password changed, updating user {"controller": "pxc-controller", "namespace": "users-22051", "name": "some-name", "reconcileID": "2ad335ea-2cb3-467a-9d69-611b7e102a63", "user": "replication"} 2024-07-08T09:56:25.834Z INFO User password updated {"controller": "pxc-controller", "namespace": "users-22051", "name": "some-name", "reconcileID": "2ad335ea-2cb3-467a-9d69-611b7e102a63", "user": "replication"} 2024-07-08T09:56:25.842Z INFO MySQL init secret updated {"controller": "pxc-controller", "namespace": "users-22051", "name": "some-name", "reconcileID": "2ad335ea-2cb3-467a-9d69-611b7e102a63", "secret": "some-name-mysql-init", "user": "replication"} 2024-07-08T09:56:25.852Z INFO Internal secrets updated {"controller": "pxc-controller", "namespace": "users-22051", "name": "some-name", "reconcileID": "2ad335ea-2cb3-467a-9d69-611b7e102a63", "user": "replication"} 2024-07-08T09:56:25.852Z INFO Password changed, updating user {"controller": "pxc-controller", "namespace": "users-22051", "name": "some-name", "reconcileID": "2ad335ea-2cb3-467a-9d69-611b7e102a63", "user": "proxyadmin"} 2024-07-08T09:56:25.894Z INFO Proxy user updated {"controller": "pxc-controller", "namespace": "users-22051", "name": "some-name", "reconcileID": "2ad335ea-2cb3-467a-9d69-611b7e102a63", "user": "proxyadmin"} 2024-07-08T09:56:25.903Z INFO Internal secrets updated {"controller": "pxc-controller", "namespace": "users-22051", "name": "some-name", "reconcileID": "2ad335ea-2cb3-467a-9d69-611b7e102a63", "user": "proxyadmin"} 2024-07-08T09:56:25.903Z INFO Proxy pods will be restarted {"controller": "pxc-controller", "namespace": "users-22051", "name": "some-name", "reconcileID": "2ad335ea-2cb3-467a-9d69-611b7e102a63", "last-applied-secret": "5fc5693f1d6a3aa05f9ac8351369a8ced4a96a5be3ea024948dbbc04af8c676b"} 2024-07-08T09:56:25.903Z INFO PXC pods will be restarted {"controller": "pxc-controller", "namespace": "users-22051", "name": "some-name", "reconcileID": "2ad335ea-2cb3-467a-9d69-611b7e102a63", "last-applied-secret": "5fc5693f1d6a3aa05f9ac8351369a8ced4a96a5be3ea024948dbbc04af8c676b"} 2024-07-08T09:56:26.178Z ERROR sync users {"controller": "pxc-controller", "namespace": "users-22051", "name": "some-name", "reconcileID": "2ad335ea-2cb3-467a-9d69-611b7e102a63", "error": "exec syncusers: command terminated with exit code 1 / / ERROR 1045 (28000): ProxySQL Error: Access denied for user 'proxyadmin'@'127.0.0.1' (using password: YES)\nERROR (line:335) : ProxySQL connection check failed. \n-- Could not connect to ProxySQL at localhost:6032 \n-- Please check the ProxySQL connection parameters and status.\n", "errorVerbose": "exec syncusers: command terminated with exit code 1 / / ERROR 1045 (28000): ProxySQL Error: Access denied for user 'proxyadmin'@'127.0.0.1' (using password: YES)\nERROR (line:335) : ProxySQL connection check failed. \n-- Could not connect to ProxySQL at localhost:6032 \n-- Please check the ProxySQL connection parameters and status.\n\ngithub.com/percona/percona-xtradb-cluster-operator/pkg/controller/pxc.(*ReconcilePerconaXtraDBCluster).syncPXCUsersWithProxySQL\n\t/go/src/github.com/percona/percona-xtradb-cluster-operator/pkg/controller/pxc/users.go:915\ngithub.com/percona/percona-xtradb-cluster-operator/pkg/controller/pxc.(*ReconcilePerconaXtraDBCluster).resyncPXCUsersWithProxySQL.func1\n\t/go/src/github.com/percona/percona-xtradb-cluster-operator/pkg/controller/pxc/controller.go:1246\nruntime.goexit\n\t/usr/local/go/src/runtime/asm_amd64.s:1695"} 2024-07-08T09:58:11.863Z INFO reconcile replication error {"controller": "pxc-controller", "namespace": "users-22051", "name": "some-name", "reconcileID": "2cf0584f-fcd4-4fa7-a262-7c3602ba97df", "err": "failed to connect to pod some-name-pxc-0: dial tcp: lookup some-name-pxc-0.some-name-pxc.users-22051 on 10.45.128.10:53: no such host"} 2024-07-08T09:58:17.150Z INFO reconcile replication error {"controller": "pxc-controller", "namespace": "users-22051", "name": "some-name", "reconcileID": "4f18dbe5-059b-43c9-a1d2-767dd91489e3", "err": "failed to connect to pod some-name-pxc-0: dial tcp: lookup some-name-pxc-0.some-name-pxc.users-22051 on 10.45.128.10:53: no such host"} 2024-07-08T09:58:22.376Z INFO Unable to find primary pod for replication. No pod with name or ip like this {"controller": "pxc-controller", "namespace": "users-22051", "name": "some-name", "reconcileID": "64320906-7da5-4102-a612-737534adc452", "primary name": "some-name-pxc-0.some-name-pxc.users-22051.svc.cluster.local"} 2024-07-08T09:58:27.542Z INFO Unable to find primary pod for replication. No pod with name or ip like this {"controller": "pxc-controller", "namespace": "users-22051", "name": "some-name", "reconcileID": "13b39894-0164-4912-a15b-21b850cdca4d", "primary name": "some-name-pxc-0.some-name-pxc.users-22051.svc.cluster.local"} 2024-07-08T09:58:32.696Z INFO Unable to find primary pod for replication. No pod with name or ip like this {"controller": "pxc-controller", "namespace": "users-22051", "name": "some-name", "reconcileID": "b61d49b9-b0c3-4b2d-bb23-28b4ef94e4bd", "primary name": "some-name-pxc-0.some-name-pxc.users-22051.svc.cluster.local"} 2024-07-08T09:58:37.872Z INFO Unable to find primary pod for replication. No pod with name or ip like this {"controller": "pxc-controller", "namespace": "users-22051", "name": "some-name", "reconcileID": "38a78697-49b1-420b-b250-3f820eb95a3d", "primary name": "some-name-pxc-0.some-name-pxc.users-22051.svc.cluster.local"} 2024-07-08T09:58:43.024Z INFO Unable to find primary pod for replication. No pod with name or ip like this {"controller": "pxc-controller", "namespace": "users-22051", "name": "some-name", "reconcileID": "73a89ccf-d164-4475-9ad1-053268fe9412", "primary name": "some-name-pxc-0.some-name-pxc.users-22051.svc.cluster.local"} 2024-07-08T09:58:48.192Z INFO Unable to find primary pod for replication. No pod with name or ip like this {"controller": "pxc-controller", "namespace": "users-22051", "name": "some-name", "reconcileID": "bd4fe80f-c6f6-4dc8-be54-109ac2856e5d", "primary name": "some-name-pxc-0.some-name-pxc.users-22051.svc.cluster.local"} 2024-07-08T09:58:56.822Z DEBUG PXC users synced with ProxySQL {"controller": "pxc-controller", "namespace": "users-22051", "name": "some-name", "reconcileID": "ff19547f-e0d8-45d6-8958-2ab066ac07e0"} 2024-07-08T09:59:01.795Z DEBUG PXC users synced with ProxySQL {"controller": "pxc-controller", "namespace": "users-22051", "name": "some-name", "reconcileID": "5f4149bd-ad14-4537-a3fd-c600485018c9"} 2024-07-08T09:59:07.306Z DEBUG PXC users synced with ProxySQL {"controller": "pxc-controller", "namespace": "users-22051", "name": "some-name", "reconcileID": "875f4395-1add-4776-956a-dab730263fe8"} 2024-07-08T09:59:09.013Z INFO Password changed, updating user {"controller": "pxc-controller", "namespace": "users-22051", "name": "some-name", "reconcileID": "edfd2bd6-b93f-44d1-8f5f-cc98c07cb7e3", "user": "operator"} 2024-07-08T09:59:09.040Z INFO User password updated {"controller": "pxc-controller", "namespace": "users-22051", "name": "some-name", "reconcileID": "edfd2bd6-b93f-44d1-8f5f-cc98c07cb7e3", "user": "operator"} 2024-07-08T09:59:09.054Z INFO MySQL init secret updated {"controller": "pxc-controller", "namespace": "users-22051", "name": "some-name", "reconcileID": "edfd2bd6-b93f-44d1-8f5f-cc98c07cb7e3", "secret": "some-name-mysql-init", "user": "operator"} 2024-07-08T09:59:09.075Z INFO Internal secrets updated {"controller": "pxc-controller", "namespace": "users-22051", "name": "some-name", "reconcileID": "edfd2bd6-b93f-44d1-8f5f-cc98c07cb7e3", "user": "operator"} 2024-07-08T09:59:09.075Z INFO Proxy pods will be restarted {"controller": "pxc-controller", "namespace": "users-22051", "name": "some-name", "reconcileID": "edfd2bd6-b93f-44d1-8f5f-cc98c07cb7e3", "last-applied-secret": "3a9395b199e47766f4f8a71efbda2e5e191408775922719d6b0fe4f783c94a0f"} 2024-07-08T09:59:10.414Z ERROR sync users {"controller": "pxc-controller", "namespace": "users-22051", "name": "some-name", "reconcileID": "edfd2bd6-b93f-44d1-8f5f-cc98c07cb7e3", "error": "exec syncusers: command terminated with exit code 1 / / ERROR 1045 (28000): Access denied for user 'operator'@'some-name-proxysql-0.some-name-proxysql-unready.users-22051.svc.' (using password: YES)\nERROR 1045 (28000): Access denied for user 'operator'@'some-name-proxysql-0.some-name-proxysql-unready.users-22051.svc.' (using password: YES)\nERROR 1045 (28000): Access denied for user 'operator'@'some-name-proxysql-0.some-name-proxysql-unready.users-22051.svc.' (using password: YES)\nERROR (line:569) : Could not find a primary cluster node\n", "errorVerbose": "exec syncusers: command terminated with exit code 1 / / ERROR 1045 (28000): Access denied for user 'operator'@'some-name-proxysql-0.some-name-proxysql-unready.users-22051.svc.' (using password: YES)\nERROR 1045 (28000): Access denied for user 'operator'@'some-name-proxysql-0.some-name-proxysql-unready.users-22051.svc.' (using password: YES)\nERROR 1045 (28000): Access denied for user 'operator'@'some-name-proxysql-0.some-name-proxysql-unready.users-22051.svc.' (using password: YES)\nERROR (line:569) : Could not find a primary cluster node\n\ngithub.com/percona/percona-xtradb-cluster-operator/pkg/controller/pxc.(*ReconcilePerconaXtraDBCluster).syncPXCUsersWithProxySQL\n\t/go/src/github.com/percona/percona-xtradb-cluster-operator/pkg/controller/pxc/users.go:915\ngithub.com/percona/percona-xtradb-cluster-operator/pkg/controller/pxc.(*ReconcilePerconaXtraDBCluster).resyncPXCUsersWithProxySQL.func1\n\t/go/src/github.com/percona/percona-xtradb-cluster-operator/pkg/controller/pxc/controller.go:1246\nruntime.goexit\n\t/usr/local/go/src/runtime/asm_amd64.s:1695"} 2024-07-08T09:59:46.139Z DEBUG PXC users synced with ProxySQL {"controller": "pxc-controller", "namespace": "users-22051", "name": "some-name", "reconcileID": "ecccc495-029b-4c8d-a6ea-2500efc40b76"} 2024-07-08T09:59:54.203Z DEBUG PXC users synced with ProxySQL {"controller": "pxc-controller", "namespace": "users-22051", "name": "some-name", "reconcileID": "1d78e274-2167-4f4b-b779-0b7e49a9818b"} 2024-07-08T10:00:06.849Z DEBUG PXC users synced with ProxySQL {"controller": "pxc-controller", "namespace": "users-22051", "name": "some-name", "reconcileID": "be00a58c-c9e5-45b1-8ad1-f66789c9610b"} 2024-07-08T10:00:12.409Z DEBUG PXC users synced with ProxySQL {"controller": "pxc-controller", "namespace": "users-22051", "name": "some-name", "reconcileID": "0d593f65-e568-4820-8de1-6aa76c4e93bb"} 2024-07-08T10:00:17.710Z DEBUG PXC users synced with ProxySQL {"controller": "pxc-controller", "namespace": "users-22051", "name": "some-name", "reconcileID": "93f4127e-8053-4dce-93d2-4317f570d04f"} 2024-07-08T10:00:22.693Z DEBUG PXC users synced with ProxySQL {"controller": "pxc-controller", "namespace": "users-22051", "name": "some-name", "reconcileID": "e883c9f5-1c20-4219-91b5-e8ca25e76ebe"} 2024-07-08T10:00:28.131Z DEBUG PXC users synced with ProxySQL {"controller": "pxc-controller", "namespace": "users-22051", "name": "some-name", "reconcileID": "199dabfc-722f-4021-b8d2-361c817300f1"} 2024-07-08T10:00:34.257Z DEBUG PXC users synced with ProxySQL {"controller": "pxc-controller", "namespace": "users-22051", "name": "some-name", "reconcileID": "e480d654-8ed9-4cbf-bcd0-5f250f541dcc"} 2024-07-08T10:00:39.511Z DEBUG PXC users synced with ProxySQL {"controller": "pxc-controller", "namespace": "users-22051", "name": "some-name", "reconcileID": "4552f0be-e079-403d-82c7-5342b201f3c6"} 2024-07-08T10:00:44.745Z DEBUG PXC users synced with ProxySQL {"controller": "pxc-controller", "namespace": "users-22051", "name": "some-name", "reconcileID": "4d4e2c5a-c9c3-474c-a2fc-e6da3a0b3a6d"} 2024-07-08T10:00:50.130Z DEBUG PXC users synced with ProxySQL {"controller": "pxc-controller", "namespace": "users-22051", "name": "some-name", "reconcileID": "cbd2734e-05a9-44b2-bba1-92b965442ed9"} 2024-07-08T10:00:55.310Z DEBUG PXC users synced with ProxySQL {"controller": "pxc-controller", "namespace": "users-22051", "name": "some-name", "reconcileID": "df415c74-8d91-4b80-8733-d1feb8a414c5"} 2024-07-08T10:01:00.511Z DEBUG PXC users synced with ProxySQL {"controller": "pxc-controller", "namespace": "users-22051", "name": "some-name", "reconcileID": "dcbc69cd-3285-4b0f-a58a-34e2712f078b"} 2024-07-08T10:01:05.836Z DEBUG PXC users synced with ProxySQL {"controller": "pxc-controller", "namespace": "users-22051", "name": "some-name", "reconcileID": "f730bd99-c736-48ed-9562-be50edde8633"} 2024-07-08T10:01:11.128Z DEBUG PXC users synced with ProxySQL {"controller": "pxc-controller", "namespace": "users-22051", "name": "some-name", "reconcileID": "1e90b9d2-e962-4895-83b3-6836710267bb"} 2024-07-08T10:01:16.410Z DEBUG PXC users synced with ProxySQL {"controller": "pxc-controller", "namespace": "users-22051", "name": "some-name", "reconcileID": "d157a9ed-34aa-448e-86b5-eb6c61d821ea"} 2024-07-08T10:01:18.329Z INFO Password changed, updating user {"controller": "pxc-controller", "namespace": "users-22051", "name": "some-name", "reconcileID": "a908309d-63a3-439f-b6eb-9ae18438d98c", "user": "root"} 2024-07-08T10:01:18.364Z INFO User password updated {"controller": "pxc-controller", "namespace": "users-22051", "name": "some-name", "reconcileID": "a908309d-63a3-439f-b6eb-9ae18438d98c", "user": "root"} 2024-07-08T10:01:18.372Z INFO MySQL init secret updated {"controller": "pxc-controller", "namespace": "users-22051", "name": "some-name", "reconcileID": "a908309d-63a3-439f-b6eb-9ae18438d98c", "secret": "some-name-mysql-init", "user": "root"} 2024-07-08T10:01:23.545Z DEBUG PXC users synced with ProxySQL {"controller": "pxc-controller", "namespace": "users-22051", "name": "some-name", "reconcileID": "a908309d-63a3-439f-b6eb-9ae18438d98c"} 2024-07-08T10:01:23.554Z INFO Internal secrets updated {"controller": "pxc-controller", "namespace": "users-22051", "name": "some-name", "reconcileID": "a908309d-63a3-439f-b6eb-9ae18438d98c", "user": "root"} 2024-07-08T10:01:23.554Z INFO Password changed, updating user {"controller": "pxc-controller", "namespace": "users-22051", "name": "some-name", "reconcileID": "a908309d-63a3-439f-b6eb-9ae18438d98c", "user": "monitor"} 2024-07-08T10:01:23.578Z INFO User password updated {"controller": "pxc-controller", "namespace": "users-22051", "name": "some-name", "reconcileID": "a908309d-63a3-439f-b6eb-9ae18438d98c", "user": "monitor"} 2024-07-08T10:01:23.587Z INFO MySQL init secret updated {"controller": "pxc-controller", "namespace": "users-22051", "name": "some-name", "reconcileID": "a908309d-63a3-439f-b6eb-9ae18438d98c", "secret": "some-name-mysql-init", "user": "monitor"} 2024-07-08T10:01:23.627Z INFO Proxy user updated {"controller": "pxc-controller", "namespace": "users-22051", "name": "some-name", "reconcileID": "a908309d-63a3-439f-b6eb-9ae18438d98c", "user": "monitor"} 2024-07-08T10:01:23.639Z INFO Internal secrets updated {"controller": "pxc-controller", "namespace": "users-22051", "name": "some-name", "reconcileID": "a908309d-63a3-439f-b6eb-9ae18438d98c", "user": "monitor"} 2024-07-08T10:01:23.639Z INFO Password changed, updating user {"controller": "pxc-controller", "namespace": "users-22051", "name": "some-name", "reconcileID": "a908309d-63a3-439f-b6eb-9ae18438d98c", "user": "xtrabackup"} 2024-07-08T10:01:23.661Z INFO User password updated {"controller": "pxc-controller", "namespace": "users-22051", "name": "some-name", "reconcileID": "a908309d-63a3-439f-b6eb-9ae18438d98c", "user": "xtrabackup"} 2024-07-08T10:01:23.668Z INFO MySQL init secret updated {"controller": "pxc-controller", "namespace": "users-22051", "name": "some-name", "reconcileID": "a908309d-63a3-439f-b6eb-9ae18438d98c", "secret": "some-name-mysql-init", "user": "xtrabackup"} 2024-07-08T10:01:23.679Z INFO Internal secrets updated {"controller": "pxc-controller", "namespace": "users-22051", "name": "some-name", "reconcileID": "a908309d-63a3-439f-b6eb-9ae18438d98c", "user": "xtrabackup"} 2024-07-08T10:01:23.679Z INFO Password changed, updating user {"controller": "pxc-controller", "namespace": "users-22051", "name": "some-name", "reconcileID": "a908309d-63a3-439f-b6eb-9ae18438d98c", "user": "proxyadmin"} 2024-07-08T10:01:23.722Z INFO Proxy user updated {"controller": "pxc-controller", "namespace": "users-22051", "name": "some-name", "reconcileID": "a908309d-63a3-439f-b6eb-9ae18438d98c", "user": "proxyadmin"} 2024-07-08T10:01:23.731Z INFO Internal secrets updated {"controller": "pxc-controller", "namespace": "users-22051", "name": "some-name", "reconcileID": "a908309d-63a3-439f-b6eb-9ae18438d98c", "user": "proxyadmin"} 2024-07-08T10:01:23.731Z INFO Proxy pods will be restarted {"controller": "pxc-controller", "namespace": "users-22051", "name": "some-name", "reconcileID": "a908309d-63a3-439f-b6eb-9ae18438d98c", "last-applied-secret": "221351dba34eb0c00203f0737b83aa73e5966df969cd0990db7a0009570f8d18"} 2024-07-08T10:01:23.731Z INFO PXC pods will be restarted {"controller": "pxc-controller", "namespace": "users-22051", "name": "some-name", "reconcileID": "a908309d-63a3-439f-b6eb-9ae18438d98c", "last-applied-secret": "221351dba34eb0c00203f0737b83aa73e5966df969cd0990db7a0009570f8d18"} 2024-07-08T10:01:23.994Z ERROR sync users {"controller": "pxc-controller", "namespace": "users-22051", "name": "some-name", "reconcileID": "a908309d-63a3-439f-b6eb-9ae18438d98c", "error": "exec syncusers: command terminated with exit code 1 / / ERROR 1045 (28000): ProxySQL Error: Access denied for user 'proxyadmin'@'127.0.0.1' (using password: YES)\nERROR (line:335) : ProxySQL connection check failed. \n-- Could not connect to ProxySQL at localhost:6032 \n-- Please check the ProxySQL connection parameters and status.\n", "errorVerbose": "exec syncusers: command terminated with exit code 1 / / ERROR 1045 (28000): ProxySQL Error: Access denied for user 'proxyadmin'@'127.0.0.1' (using password: YES)\nERROR (line:335) : ProxySQL connection check failed. \n-- Could not connect to ProxySQL at localhost:6032 \n-- Please check the ProxySQL connection parameters and status.\n\ngithub.com/percona/percona-xtradb-cluster-operator/pkg/controller/pxc.(*ReconcilePerconaXtraDBCluster).syncPXCUsersWithProxySQL\n\t/go/src/github.com/percona/percona-xtradb-cluster-operator/pkg/controller/pxc/users.go:915\ngithub.com/percona/percona-xtradb-cluster-operator/pkg/controller/pxc.(*ReconcilePerconaXtraDBCluster).resyncPXCUsersWithProxySQL.func1\n\t/go/src/github.com/percona/percona-xtradb-cluster-operator/pkg/controller/pxc/controller.go:1246\nruntime.goexit\n\t/usr/local/go/src/runtime/asm_amd64.s:1695"} 2024-07-08T10:02:59.242Z INFO reconcile replication error {"controller": "pxc-controller", "namespace": "users-22051", "name": "some-name", "reconcileID": "fb8b1b44-7a23-4931-8932-f41d2324fe18", "err": "failed to connect to pod some-name-pxc-0: dial tcp: lookup some-name-pxc-0.some-name-pxc.users-22051 on 10.45.128.10:53: no such host"} 2024-07-08T10:03:09.422Z INFO reconcile replication error {"controller": "pxc-controller", "namespace": "users-22051", "name": "some-name", "reconcileID": "31397dff-15d4-49bc-95a2-54d7f71b575e", "err": "failed to connect to pod some-name-pxc-0: dial tcp: lookup some-name-pxc-0.some-name-pxc.users-22051 on 10.45.128.10:53: no such host"} 2024-07-08T10:03:14.637Z INFO Unable to find primary pod for replication. No pod with name or ip like this {"controller": "pxc-controller", "namespace": "users-22051", "name": "some-name", "reconcileID": "f6590c23-148e-42e7-ae3e-e374aaac9cca", "primary name": "some-name-pxc-0.some-name-pxc.users-22051.svc.cluster.local"} 2024-07-08T10:03:20.143Z INFO Unable to find primary pod for replication. No pod with name or ip like this {"controller": "pxc-controller", "namespace": "users-22051", "name": "some-name", "reconcileID": "08e9ab9a-d0c4-4265-9e5d-542b8a33193c", "primary name": "some-name-pxc-0.some-name-pxc.users-22051.svc.cluster.local"} 2024-07-08T10:03:26.100Z INFO Unable to find primary pod for replication. No pod with name or ip like this {"controller": "pxc-controller", "namespace": "users-22051", "name": "some-name", "reconcileID": "727303e8-c7b0-4a0a-aefa-12f0c61bc0ba", "primary name": "some-name-pxc-0.some-name-pxc.users-22051.svc.cluster.local"} 2024-07-08T10:03:31.298Z INFO Unable to find primary pod for replication. No pod with name or ip like this {"controller": "pxc-controller", "namespace": "users-22051", "name": "some-name", "reconcileID": "080caed9-cb39-4623-b6b9-7b3fdca936d9", "primary name": "some-name-pxc-0.some-name-pxc.users-22051.svc.cluster.local"} 2024-07-08T10:03:36.482Z INFO Unable to find primary pod for replication. No pod with name or ip like this {"controller": "pxc-controller", "namespace": "users-22051", "name": "some-name", "reconcileID": "c174793f-8e83-449d-b539-960a86ed7d3b", "primary name": "some-name-pxc-0.some-name-pxc.users-22051.svc.cluster.local"} 2024-07-08T10:03:41.684Z INFO Unable to find primary pod for replication. No pod with name or ip like this {"controller": "pxc-controller", "namespace": "users-22051", "name": "some-name", "reconcileID": "e2439a3e-0282-4ac3-97d8-13ac0cc1e940", "primary name": "some-name-pxc-0.some-name-pxc.users-22051.svc.cluster.local"} 2024-07-08T10:03:50.393Z DEBUG PXC users synced with ProxySQL {"controller": "pxc-controller", "namespace": "users-22051", "name": "some-name", "reconcileID": "4d6531a2-a4c3-4f39-9dbb-35fa9e9a1ffa"} 2024-07-08T10:03:55.695Z DEBUG PXC users synced with ProxySQL {"controller": "pxc-controller", "namespace": "users-22051", "name": "some-name", "reconcileID": "ab4cff87-7003-4c89-adeb-29fa4cdf9e38"} 2024-07-08T10:03:56.369Z INFO Password changed, updating user {"controller": "pxc-controller", "namespace": "users-22051", "name": "some-name", "reconcileID": "1719d30b-d80a-42d0-963a-6e96a6d81871", "user": "root"} 2024-07-08T10:03:56.405Z INFO User password updated {"controller": "pxc-controller", "namespace": "users-22051", "name": "some-name", "reconcileID": "1719d30b-d80a-42d0-963a-6e96a6d81871", "user": "root"} 2024-07-08T10:03:56.413Z INFO MySQL init secret updated {"controller": "pxc-controller", "namespace": "users-22051", "name": "some-name", "reconcileID": "1719d30b-d80a-42d0-963a-6e96a6d81871", "secret": "some-name-mysql-init", "user": "root"} 2024-07-08T10:03:56.425Z INFO Internal secrets updated {"controller": "pxc-controller", "namespace": "users-22051", "name": "some-name", "reconcileID": "1719d30b-d80a-42d0-963a-6e96a6d81871", "user": "root"} 2024-07-08T10:03:56.425Z INFO Password changed, updating user {"controller": "pxc-controller", "namespace": "users-22051", "name": "some-name", "reconcileID": "1719d30b-d80a-42d0-963a-6e96a6d81871", "user": "operator"} 2024-07-08T10:03:56.448Z INFO User password updated {"controller": "pxc-controller", "namespace": "users-22051", "name": "some-name", "reconcileID": "1719d30b-d80a-42d0-963a-6e96a6d81871", "user": "operator"} 2024-07-08T10:03:56.456Z INFO MySQL init secret updated {"controller": "pxc-controller", "namespace": "users-22051", "name": "some-name", "reconcileID": "1719d30b-d80a-42d0-963a-6e96a6d81871", "secret": "some-name-mysql-init", "user": "operator"} 2024-07-08T10:03:56.467Z INFO Internal secrets updated {"controller": "pxc-controller", "namespace": "users-22051", "name": "some-name", "reconcileID": "1719d30b-d80a-42d0-963a-6e96a6d81871", "user": "operator"} 2024-07-08T10:03:56.467Z INFO Password changed, updating user {"controller": "pxc-controller", "namespace": "users-22051", "name": "some-name", "reconcileID": "1719d30b-d80a-42d0-963a-6e96a6d81871", "user": "monitor"} 2024-07-08T10:03:56.490Z INFO User password updated {"controller": "pxc-controller", "namespace": "users-22051", "name": "some-name", "reconcileID": "1719d30b-d80a-42d0-963a-6e96a6d81871", "user": "monitor"} 2024-07-08T10:03:56.499Z INFO MySQL init secret updated {"controller": "pxc-controller", "namespace": "users-22051", "name": "some-name", "reconcileID": "1719d30b-d80a-42d0-963a-6e96a6d81871", "secret": "some-name-mysql-init", "user": "monitor"} 2024-07-08T10:03:56.513Z INFO Internal secrets updated {"controller": "pxc-controller", "namespace": "users-22051", "name": "some-name", "reconcileID": "1719d30b-d80a-42d0-963a-6e96a6d81871", "user": "monitor"} 2024-07-08T10:03:56.513Z INFO Password changed, updating user {"controller": "pxc-controller", "namespace": "users-22051", "name": "some-name", "reconcileID": "1719d30b-d80a-42d0-963a-6e96a6d81871", "user": "xtrabackup"} 2024-07-08T10:03:56.533Z INFO User password updated {"controller": "pxc-controller", "namespace": "users-22051", "name": "some-name", "reconcileID": "1719d30b-d80a-42d0-963a-6e96a6d81871", "user": "xtrabackup"} 2024-07-08T10:03:56.546Z INFO MySQL init secret updated {"controller": "pxc-controller", "namespace": "users-22051", "name": "some-name", "reconcileID": "1719d30b-d80a-42d0-963a-6e96a6d81871", "secret": "some-name-mysql-init", "user": "xtrabackup"} 2024-07-08T10:03:56.556Z INFO Internal secrets updated {"controller": "pxc-controller", "namespace": "users-22051", "name": "some-name", "reconcileID": "1719d30b-d80a-42d0-963a-6e96a6d81871", "user": "xtrabackup"} 2024-07-08T10:03:56.556Z INFO Password changed, updating user {"controller": "pxc-controller", "namespace": "users-22051", "name": "some-name", "reconcileID": "1719d30b-d80a-42d0-963a-6e96a6d81871", "user": "replication"} 2024-07-08T10:03:56.577Z INFO User password updated {"controller": "pxc-controller", "namespace": "users-22051", "name": "some-name", "reconcileID": "1719d30b-d80a-42d0-963a-6e96a6d81871", "user": "replication"} 2024-07-08T10:03:56.588Z INFO MySQL init secret updated {"controller": "pxc-controller", "namespace": "users-22051", "name": "some-name", "reconcileID": "1719d30b-d80a-42d0-963a-6e96a6d81871", "secret": "some-name-mysql-init", "user": "replication"} 2024-07-08T10:03:56.597Z INFO Internal secrets updated {"controller": "pxc-controller", "namespace": "users-22051", "name": "some-name", "reconcileID": "1719d30b-d80a-42d0-963a-6e96a6d81871", "user": "replication"} 2024-07-08T10:03:56.598Z INFO Proxy pods will be restarted {"controller": "pxc-controller", "namespace": "users-22051", "name": "some-name", "reconcileID": "1719d30b-d80a-42d0-963a-6e96a6d81871", "last-applied-secret": "6542c6f8a2375ddfcda319bf91c99ca69519730971c577cd07366e356671ee68"} 2024-07-08T10:03:56.598Z INFO PXC pods will be restarted {"controller": "pxc-controller", "namespace": "users-22051", "name": "some-name", "reconcileID": "1719d30b-d80a-42d0-963a-6e96a6d81871", "last-applied-secret": "6542c6f8a2375ddfcda319bf91c99ca69519730971c577cd07366e356671ee68"} 2024-07-08T10:03:57.300Z INFO KubeAPIWarningLogger .metadata.ownerReferences contains duplicate entries; API server dedups owner references in 1.20+, and may reject such requests as early as 1.24; please fix your requests; duplicate UID(s) observed: ba6eed29-5c75-43ff-bc7d-a53a93f97dd6 2024-07-08T10:04:00.506Z INFO reconcile replication error {"controller": "pxc-controller", "namespace": "users-22051", "name": "some-name", "reconcileID": "3bc5a9c4-2a6e-4b02-aad7-ecb534e1211a", "err": "get primary pxc pod: failed to get proxy connection: dial tcp 10.45.138.136:3306: connect: connection refused"} 2024-07-08T10:05:51.229Z INFO reconcile replication error {"controller": "pxc-controller", "namespace": "users-22051", "name": "some-name", "reconcileID": "7d5f5dfd-ca84-43fb-8b1c-dbd8369b9282", "err": "failed to connect to pod some-name-pxc-0: dial tcp: lookup some-name-pxc-0.some-name-pxc.users-22051 on 10.45.128.10:53: no such host"} 2024-07-08T10:06:49.348Z INFO Password changed, updating user {"controller": "pxc-controller", "namespace": "users-22051", "name": "some-name", "reconcileID": "08e6b8e3-860b-49b6-9566-0c3ae838f99d", "user": "monitor"} 2024-07-08T10:06:49.374Z INFO User password updated {"controller": "pxc-controller", "namespace": "users-22051", "name": "some-name", "reconcileID": "08e6b8e3-860b-49b6-9566-0c3ae838f99d", "user": "monitor"} 2024-07-08T10:06:49.382Z INFO MySQL init secret updated {"controller": "pxc-controller", "namespace": "users-22051", "name": "some-name", "reconcileID": "08e6b8e3-860b-49b6-9566-0c3ae838f99d", "secret": "some-name-mysql-init", "user": "monitor"} 2024-07-08T10:06:49.390Z INFO Internal secrets updated {"controller": "pxc-controller", "namespace": "users-22051", "name": "some-name", "reconcileID": "08e6b8e3-860b-49b6-9566-0c3ae838f99d", "user": "monitor"} 2024-07-08T10:06:49.390Z INFO Proxy pods will be restarted {"controller": "pxc-controller", "namespace": "users-22051", "name": "some-name", "reconcileID": "08e6b8e3-860b-49b6-9566-0c3ae838f99d", "last-applied-secret": "81935ecf30ae523a6ee69582354f9488d47dbeb0f8c7c572af96870f2024284d"} github.com/percona/percona-xtradb-cluster-operator/pkg/controller/pxc.(*ReconcilePerconaXtraDBCluster).resyncPXCUsersWithProxySQL.func1 /go/pkg/mod/sigs.k8s.io/controller-runtime@v0.18.4/pkg/internal/controller/controller.go:222 /go/pkg/mod/sigs.k8s.io/controller-runtime@v0.18.4/pkg/internal/controller/controller.go:261 /go/pkg/mod/sigs.k8s.io/controller-runtime@v0.18.4/pkg/internal/controller/controller.go:324 /go/src/github.com/percona/percona-xtradb-cluster-operator/pkg/controller/pxc/controller.go:1248 sigs.k8s.io/controller-runtime/pkg/internal/controller.(*Controller).processNextWorkItem sigs.k8s.io/controller-runtime/pkg/internal/controller.(*Controller).reconcileHandler sigs.k8s.io/controller-runtime/pkg/internal/controller.(*Controller).Start.func2.2 + kubectl get pxc --all-namespaces -o wide + xargs -L 1 sh -xc 'kubectl patch pxc -n $0 $1 --type=merge -p "{\"metadata\":{\"finalizers\":[]}}"' + grep -v NAMESPACE + kubectl patch pxc -n users-22051 some-name --type=merge -p '{"metadata":{"finalizers":[]}}' perconaxtradbcluster.pxc.percona.com/some-name patched + kubectl_bin delete pxc --all --all-namespaces ++ mktemp + local LAST_OUT=/tmp/tmp.OcslCu6WT3 ++ mktemp + local LAST_ERR=/tmp/tmp.KHeztJZQvR + local exit_status=0 ++ seq 0 2 + for i in '$(seq 0 2)' + set +e + kubectl delete pxc --all --all-namespaces + exit_status=0 + set -e + '[' 0 '!=' 0 ']' + break + cat /tmp/tmp.OcslCu6WT3 perconaxtradbcluster.pxc.percona.com "some-name" deleted + cat /tmp/tmp.KHeztJZQvR + rm /tmp/tmp.OcslCu6WT3 /tmp/tmp.KHeztJZQvR + return 0 + kubectl_bin delete pxc-backup --all --all-namespaces ++ mktemp + local LAST_OUT=/tmp/tmp.Von3v69axd ++ mktemp + local LAST_ERR=/tmp/tmp.cATrwNHd5P + local exit_status=0 ++ seq 0 2 + for i in '$(seq 0 2)' + set +e + kubectl delete pxc-backup --all --all-namespaces + exit_status=0 + set -e + '[' 0 '!=' 0 ']' + break + cat /tmp/tmp.Von3v69axd No resources found + cat /tmp/tmp.cATrwNHd5P + rm /tmp/tmp.Von3v69axd /tmp/tmp.cATrwNHd5P + return 0 + kubectl_bin delete pxc-restore --all --all-namespaces ++ mktemp + local LAST_OUT=/tmp/tmp.q9trW5Tfiw ++ mktemp + local LAST_ERR=/tmp/tmp.9c14GHebfM + local exit_status=0 ++ seq 0 2 + for i in '$(seq 0 2)' + set +e + kubectl delete pxc-restore --all --all-namespaces + exit_status=0 + set -e + '[' 0 '!=' 0 ']' + break + cat /tmp/tmp.q9trW5Tfiw No resources found + cat /tmp/tmp.9c14GHebfM + rm /tmp/tmp.q9trW5Tfiw /tmp/tmp.9c14GHebfM + return 0 + kubectl_bin delete ValidatingWebhookConfiguration percona-xtradbcluster-webhook ++ mktemp + local LAST_OUT=/tmp/tmp.LIoJYHpEKD ++ mktemp + local LAST_ERR=/tmp/tmp.N0GGcstCNO + local exit_status=0 ++ seq 0 2 + for i in '$(seq 0 2)' + set +e + kubectl delete ValidatingWebhookConfiguration percona-xtradbcluster-webhook + exit_status=0 + set -e + '[' 0 '!=' 0 ']' + break + cat /tmp/tmp.LIoJYHpEKD validatingwebhookconfiguration.admissionregistration.k8s.io "percona-xtradbcluster-webhook" deleted + cat /tmp/tmp.N0GGcstCNO + rm /tmp/tmp.LIoJYHpEKD /tmp/tmp.N0GGcstCNO + return 0 + kubectl_bin delete -f https://github.com/jetstack/cert-manager/releases/download/v1.14.2/cert-manager.yaml + : + '[' '!' -z '' ']' + '[' -n pxc-operator ']' + kubectl_bin delete --grace-period=0 --force=true namespace users-22051 + rm -rf /tmp/tmp.mo3cXZH4Te + kubectl_bin delete --grace-period=0 --force=true namespace pxc-operator ++ mktemp ++ mktemp + local LAST_OUT=/tmp/tmp.WffDTaRTpx + desc 'test passed' + set +o xtrace ----------------------------------------------------------------------------------- test passed ----------------------------------------------------------------------------------- + local LAST_OUT=/tmp/tmp.y1FbRzMwnp ++ mktemp ++ mktemp + local LAST_ERR=/tmp/tmp.bt7Oq8RzHR + local exit_status=0 + local LAST_ERR=/tmp/tmp.UdOjdDVMHT + local exit_status=0 ++ seq 0 2 ++ seq 0 2 + for i in '$(seq 0 2)' + set +e + kubectl delete --grace-period=0 --force=true namespace users-22051 + for i in '$(seq 0 2)' + set +e + kubectl delete --grace-period=0 --force=true namespace pxc-operator