Log: /mnt/jenkins/workspace/cloud-pxc-operator_PR-1707/e2e-tests/logs/users-8-0.log WARNING: version difference between client (1.30) and server (1.26) exceeds the supported minor version skew of +/-1 WARNING: version difference between client (1.30) and server (1.26) exceeds the supported minor version skew of +/-1 + create_infra users-26053 + local ns=users-26053 + '[' -n pxc-operator ']' + kubectl get pxc --all-namespaces -o wide + xargs -L 1 sh -xc 'kubectl patch pxc -n $0 $1 --type=merge -p "{\"metadata\":{\"finalizers\":[]}}"' + grep -v NAMESPACE + kubectl patch pxc -n users-2923 some-name --type=merge -p '{"metadata":{"finalizers":[]}}' perconaxtradbcluster.pxc.percona.com/some-name patched + kubectl_bin delete pxc --all --all-namespaces ++ mktemp + local LAST_OUT=/tmp/tmp.EbwTnJn3ra ++ mktemp + local LAST_ERR=/tmp/tmp.2AsUvXev1g + local exit_status=0 ++ seq 0 2 + for i in '$(seq 0 2)' + set +e + kubectl delete pxc --all --all-namespaces + exit_status=0 + set -e + '[' 0 '!=' 0 ']' + break + cat /tmp/tmp.EbwTnJn3ra perconaxtradbcluster.pxc.percona.com "some-name" deleted + cat /tmp/tmp.2AsUvXev1g + rm /tmp/tmp.EbwTnJn3ra /tmp/tmp.2AsUvXev1g + return 0 + kubectl_bin delete pxc-backup --all --all-namespaces ++ mktemp + local LAST_OUT=/tmp/tmp.vfUkXR2RYE ++ mktemp + local LAST_ERR=/tmp/tmp.DuGZrA3p6q + local exit_status=0 ++ seq 0 2 + for i in '$(seq 0 2)' + set +e + kubectl delete pxc-backup --all --all-namespaces + exit_status=0 + set -e + '[' 0 '!=' 0 ']' + break + cat /tmp/tmp.vfUkXR2RYE No resources found + cat /tmp/tmp.DuGZrA3p6q + rm /tmp/tmp.vfUkXR2RYE /tmp/tmp.DuGZrA3p6q + return 0 + kubectl_bin delete pxc-restore --all --all-namespaces ++ mktemp + local LAST_OUT=/tmp/tmp.YQL6vn1GBw ++ mktemp + local LAST_ERR=/tmp/tmp.SKESUQWgzN + local exit_status=0 ++ seq 0 2 + for i in '$(seq 0 2)' + set +e + kubectl delete pxc-restore --all --all-namespaces + exit_status=0 + set -e + '[' 0 '!=' 0 ']' + break + cat /tmp/tmp.YQL6vn1GBw No resources found + cat /tmp/tmp.SKESUQWgzN + rm /tmp/tmp.YQL6vn1GBw /tmp/tmp.SKESUQWgzN + return 0 + create_namespace pxc-operator + local namespace=pxc-operator + local skip_clean_namespace= + [[ 1 == 1 ]] + [[ -z '' ]] + destroy_chaos_mesh ++ helm list --all-namespaces --filter chaos-mesh ++ tail -n1 ++ awk '-F ' '{print $2}' ++ sed s/NAMESPACE// + local chaos_mesh_ns= + '[' -n '' ']' ++ kubectl get MutatingWebhookConfiguration ++ grep chaos-mesh ++ awk '{print $1}' + timeout 30 kubectl delete MutatingWebhookConfiguration error: resource(s) were provided, but no name was specified + : ++ kubectl get ValidatingWebhookConfiguration ++ grep chaos-mesh ++ awk '{print $1}' + timeout 30 kubectl delete ValidatingWebhookConfiguration error: resource(s) were provided, but no name was specified + : ++ kubectl get ValidatingWebhookConfiguration ++ grep validate-auth ++ awk '{print $1}' + timeout 30 kubectl delete ValidatingWebhookConfiguration error: resource(s) were provided, but no name was specified + : ++ kubectl api-resources ++ grep chaos-mesh ++ awk '{print $1}' ++ kubectl get crd ++ grep chaos-mesh.org ++ awk '{print $1}' + timeout 30 kubectl delete crd error: resource(s) were provided, but no name was specified + : ++ kubectl get clusterrolebinding ++ grep chaos-mesh ++ awk '{print $1}' + timeout 30 kubectl delete clusterrolebinding error: resource(s) were provided, but no name was specified + : ++ kubectl get clusterrole ++ awk '{print $1}' ++ grep chaos-mesh + timeout 30 kubectl delete clusterrole error: resource(s) were provided, but no name was specified + : + desc 'cleaned up all old namespaces' + set +o xtrace ----------------------------------------------------------------------------------- cleaned up all old namespaces ----------------------------------------------------------------------------------- + egrep -v '^kube-|^default$|Terminating|pxc-operator|openshift|^NAME' + awk '{print$1}' + '[' -n '' ']' + desc 'cleaned up old namespaces pxc-operator' + set +o xtrace ----------------------------------------------------------------------------------- cleaned up old namespaces pxc-operator ----------------------------------------------------------------------------------- + kubectl_bin delete namespace pxc-operator + kubectl_bin get ns ++ mktemp ++ mktemp + local LAST_OUT=/tmp/tmp.Ki6zIXNVcW + local LAST_OUT=/tmp/tmp.IFnfeys5Py ++ mktemp + xargs kubectl delete ns ++ mktemp + local LAST_ERR=/tmp/tmp.C0g4eXL2Kn + local exit_status=0 + local LAST_ERR=/tmp/tmp.m6aspHXmfD + local exit_status=0 ++ seq 0 2 ++ seq 0 2 + for i in '$(seq 0 2)' + set +e + kubectl get ns + for i in '$(seq 0 2)' + set +e + kubectl delete namespace pxc-operator + exit_status=0 + set -e + '[' 0 '!=' 0 ']' + break + cat /tmp/tmp.Ki6zIXNVcW + cat /tmp/tmp.C0g4eXL2Kn + rm /tmp/tmp.Ki6zIXNVcW /tmp/tmp.C0g4eXL2Kn + return 0 namespace "users-2923" deleted Error from server (Forbidden): namespaces "default" is forbidden: this namespace may not be deleted + exit_status=0 + set -e + '[' 0 '!=' 0 ']' + break + cat /tmp/tmp.IFnfeys5Py namespace "pxc-operator" deleted + cat /tmp/tmp.m6aspHXmfD + rm /tmp/tmp.IFnfeys5Py /tmp/tmp.m6aspHXmfD + return 0 + wait_for_delete namespace/pxc-operator + local res=namespace/pxc-operator + echo -n 'namespace/pxc-operator - ' namespace/pxc-operator - + set +o xtrace Error from server (NotFound): namespaces "pxc-operator" not found + desc 'create namespace pxc-operator' + set +o xtrace ----------------------------------------------------------------------------------- create namespace pxc-operator ----------------------------------------------------------------------------------- + kubectl_bin create namespace pxc-operator ++ mktemp + local LAST_OUT=/tmp/tmp.uM4UdGlXXl ++ mktemp + local LAST_ERR=/tmp/tmp.KI8sgF8MXN + local exit_status=0 ++ seq 0 2 + for i in '$(seq 0 2)' + set +e + kubectl create namespace pxc-operator + exit_status=0 + set -e + '[' 0 '!=' 0 ']' + break + cat /tmp/tmp.uM4UdGlXXl namespace/pxc-operator created + cat /tmp/tmp.KI8sgF8MXN + rm /tmp/tmp.uM4UdGlXXl /tmp/tmp.KI8sgF8MXN + return 0 ++ kubectl_bin config current-context +++ mktemp ++ local LAST_OUT=/tmp/tmp.WMLpkmKpfP +++ mktemp ++ local LAST_ERR=/tmp/tmp.FXncH4Zs5k ++ local exit_status=0 +++ seq 0 2 ++ for i in '$(seq 0 2)' ++ set +e ++ kubectl config current-context ++ exit_status=0 ++ set -e ++ '[' 0 '!=' 0 ']' ++ break ++ cat /tmp/tmp.WMLpkmKpfP ++ cat /tmp/tmp.FXncH4Zs5k ++ rm /tmp/tmp.WMLpkmKpfP /tmp/tmp.FXncH4Zs5k ++ return 0 + kubectl_bin config set-context gke_cloud-dev-112233_us-central1-a_jen-pxc-1707-bb49e0ac-3-cluster7 --namespace=pxc-operator ++ mktemp + local LAST_OUT=/tmp/tmp.ZnoY8VwljY ++ mktemp + local LAST_ERR=/tmp/tmp.9RRfvjfhpp + local exit_status=0 ++ seq 0 2 + for i in '$(seq 0 2)' + set +e + kubectl config set-context gke_cloud-dev-112233_us-central1-a_jen-pxc-1707-bb49e0ac-3-cluster7 --namespace=pxc-operator + exit_status=0 + set -e + '[' 0 '!=' 0 ']' + break + cat /tmp/tmp.ZnoY8VwljY Context "gke_cloud-dev-112233_us-central1-a_jen-pxc-1707-bb49e0ac-3-cluster7" modified. + cat /tmp/tmp.9RRfvjfhpp + rm /tmp/tmp.ZnoY8VwljY /tmp/tmp.9RRfvjfhpp + return 0 + deploy_operator + desc 'start PXC operator' + set +o xtrace ----------------------------------------------------------------------------------- start PXC operator ----------------------------------------------------------------------------------- + kubectl_bin apply --server-side --force-conflicts -f /mnt/jenkins/workspace/cloud-pxc-operator_PR-1707/deploy/crd.yaml ++ mktemp + local LAST_OUT=/tmp/tmp.g2tf8wXFv8 ++ mktemp + local LAST_ERR=/tmp/tmp.sZPHtVzRfd + local exit_status=0 ++ seq 0 2 + for i in '$(seq 0 2)' + set +e + kubectl apply --server-side --force-conflicts -f /mnt/jenkins/workspace/cloud-pxc-operator_PR-1707/deploy/crd.yaml + exit_status=0 + set -e + '[' 0 '!=' 0 ']' + break + cat /tmp/tmp.g2tf8wXFv8 customresourcedefinition.apiextensions.k8s.io/perconaxtradbclusterbackups.pxc.percona.com serverside-applied customresourcedefinition.apiextensions.k8s.io/perconaxtradbclusterrestores.pxc.percona.com serverside-applied customresourcedefinition.apiextensions.k8s.io/perconaxtradbclusters.pxc.percona.com serverside-applied + cat /tmp/tmp.sZPHtVzRfd + rm /tmp/tmp.g2tf8wXFv8 /tmp/tmp.sZPHtVzRfd + return 0 + '[' -n pxc-operator ']' + apply_rbac cw-rbac + local operator_namespace=pxc-operator + local rbac=cw-rbac + cat /mnt/jenkins/workspace/cloud-pxc-operator_PR-1707/deploy/cw-rbac.yaml + sed -e 's^namespace: .*^namespace: pxc-operator^' + kubectl_bin apply -f - ++ mktemp + local LAST_OUT=/tmp/tmp.tQ7i4w9Kb9 ++ mktemp + local LAST_ERR=/tmp/tmp.2kfBYbVFKV + local exit_status=0 ++ seq 0 2 + for i in '$(seq 0 2)' + set +e + kubectl apply -f - + exit_status=0 + set -e + '[' 0 '!=' 0 ']' + break + cat /tmp/tmp.tQ7i4w9Kb9 clusterrole.rbac.authorization.k8s.io/percona-xtradb-cluster-operator unchanged serviceaccount/percona-xtradb-cluster-operator created clusterrolebinding.rbac.authorization.k8s.io/service-account-percona-xtradb-cluster-operator unchanged + cat /tmp/tmp.2kfBYbVFKV + rm /tmp/tmp.tQ7i4w9Kb9 /tmp/tmp.2kfBYbVFKV + return 0 + sed -e 's^image: .*^image: perconalab/percona-xtradb-cluster-operator:PR-1707-bb49e0ac^' + sed -e 's^failureThreshold: .*^failureThreshold: 10^' + cat /mnt/jenkins/workspace/cloud-pxc-operator_PR-1707/deploy/cw-operator.yaml + yq eval '(select(.kind == "Deployment").spec.template.spec.containers[] | select(.name == "percona-xtradb-cluster-operator").env[] | select(.name == "LOG_LEVEL").value) = "DEBUG"' - + kubectl_bin apply -f - + yq eval '(select(.kind == "Deployment").spec.template.spec.containers[] | select(.name == "percona-xtradb-cluster-operator").env[] | select(.name == "DISABLE_TELEMETRY").value) = "true"' - ++ mktemp + local LAST_OUT=/tmp/tmp.Z9lYhOFB8Q ++ mktemp + local LAST_ERR=/tmp/tmp.NNdH8EG2sv + local exit_status=0 ++ seq 0 2 + for i in '$(seq 0 2)' + set +e + kubectl apply -f - + exit_status=0 + set -e + '[' 0 '!=' 0 ']' + break + cat /tmp/tmp.Z9lYhOFB8Q deployment.apps/percona-xtradb-cluster-operator created service/percona-xtradb-cluster-operator created + cat /tmp/tmp.NNdH8EG2sv + rm /tmp/tmp.Z9lYhOFB8Q /tmp/tmp.NNdH8EG2sv + return 0 + sleep 10 + kubectl_bin wait --for=condition=Ready pods -l app.kubernetes.io/component=operator,app.kubernetes.io/instance=percona-xtradb-cluster-operator,app.kubernetes.io/name=percona-xtradb-cluster-operator --timeout=30s ++ mktemp + local LAST_OUT=/tmp/tmp.x9ArPZMe8f ++ mktemp + local LAST_ERR=/tmp/tmp.0Lk9nWQuF2 + local exit_status=0 ++ seq 0 2 + for i in '$(seq 0 2)' + set +e + kubectl wait --for=condition=Ready pods -l app.kubernetes.io/component=operator,app.kubernetes.io/instance=percona-xtradb-cluster-operator,app.kubernetes.io/name=percona-xtradb-cluster-operator --timeout=30s + exit_status=0 + set -e + '[' 0 '!=' 0 ']' + break + cat /tmp/tmp.x9ArPZMe8f pod/percona-xtradb-cluster-operator-bdf674667-kmvgs condition met + cat /tmp/tmp.0Lk9nWQuF2 + rm /tmp/tmp.x9ArPZMe8f /tmp/tmp.0Lk9nWQuF2 + return 0 ++ get_operator_pod ++ local label_prefix=app.kubernetes.io/ +++ grep -c percona-xtradb-cluster-operator +++ kubectl get pods --selector=app.kubernetes.io/name=percona-xtradb-cluster-operator -n pxc-operator ++ local check_label=1 ++ [[ 1 -eq 0 ]] ++ kubectl_bin get pods --selector=app.kubernetes.io/name=percona-xtradb-cluster-operator -o 'jsonpath={.items[].metadata.name}' -n pxc-operator +++ mktemp ++ local LAST_OUT=/tmp/tmp.W4Klo8UNiO +++ mktemp ++ local LAST_ERR=/tmp/tmp.Db6zoywdFp ++ local exit_status=0 +++ seq 0 2 ++ for i in '$(seq 0 2)' ++ set +e ++ kubectl get pods --selector=app.kubernetes.io/name=percona-xtradb-cluster-operator -o 'jsonpath={.items[].metadata.name}' -n pxc-operator ++ exit_status=0 ++ set -e ++ '[' 0 '!=' 0 ']' ++ break ++ cat /tmp/tmp.W4Klo8UNiO ++ cat /tmp/tmp.Db6zoywdFp ++ rm /tmp/tmp.W4Klo8UNiO /tmp/tmp.Db6zoywdFp ++ return 0 + wait_pod percona-xtradb-cluster-operator-bdf674667-kmvgs 480 pxc-operator + local pod=percona-xtradb-cluster-operator-bdf674667-kmvgs + local max_retry=480 + local ns=pxc-operator ++ echo percona-xtradb-cluster-operator-bdf674667-kmvgs ++ /usr/bin/sed -E 's/.*-(pxc|proxysql)-[0-9]/\1/' ++ egrep '^(pxc|proxysql)$' + local container= + set +o xtrace pod/percona-xtradb-cluster-operator-bdf674667-kmvgs condition met percona-xtradb-cluster-operator-bdf674667-kmvgs.Ok + sleep 3 + create_namespace users-26053 + local namespace=users-26053 + local skip_clean_namespace= + [[ 1 == 1 ]] + [[ -z '' ]] + destroy_chaos_mesh ++ helm list --all-namespaces --filter chaos-mesh ++ tail -n1 ++ sed s/NAMESPACE// ++ awk '-F ' '{print $2}' + local chaos_mesh_ns= + '[' -n '' ']' ++ awk '{print $1}' ++ grep chaos-mesh ++ kubectl get MutatingWebhookConfiguration + timeout 30 kubectl delete MutatingWebhookConfiguration error: resource(s) were provided, but no name was specified + : ++ kubectl get ValidatingWebhookConfiguration ++ grep chaos-mesh ++ awk '{print $1}' + timeout 30 kubectl delete ValidatingWebhookConfiguration error: resource(s) were provided, but no name was specified + : ++ kubectl get ValidatingWebhookConfiguration ++ grep validate-auth ++ awk '{print $1}' + timeout 30 kubectl delete ValidatingWebhookConfiguration error: resource(s) were provided, but no name was specified + : ++ kubectl api-resources ++ grep chaos-mesh ++ awk '{print $1}' ++ kubectl get crd ++ grep chaos-mesh.org ++ awk '{print $1}' + timeout 30 kubectl delete crd error: resource(s) were provided, but no name was specified + : ++ kubectl get clusterrolebinding ++ grep chaos-mesh ++ awk '{print $1}' + timeout 30 kubectl delete clusterrolebinding error: resource(s) were provided, but no name was specified + : ++ kubectl get clusterrole ++ grep chaos-mesh ++ awk '{print $1}' + timeout 30 kubectl delete clusterrole error: resource(s) were provided, but no name was specified + : + desc 'cleaned up all old namespaces' + set +o xtrace ----------------------------------------------------------------------------------- cleaned up all old namespaces ----------------------------------------------------------------------------------- + egrep -v '^kube-|^default$|Terminating|pxc-operator|openshift|^NAME' + awk '{print$1}' + '[' -n '' ']' + desc 'cleaned up old namespaces users-26053' + xargs kubectl delete ns + set +o xtrace ----------------------------------------------------------------------------------- cleaned up old namespaces users-26053 ----------------------------------------------------------------------------------- + kubectl_bin delete namespace users-26053 + kubectl_bin get ns ++ mktemp ++ mktemp + local LAST_OUT=/tmp/tmp.BFsLARk14f ++ mktemp + local LAST_OUT=/tmp/tmp.5qck5PyWs0 ++ mktemp + local LAST_ERR=/tmp/tmp.wcfDqUaqsY + local exit_status=0 ++ seq 0 2 + local LAST_ERR=/tmp/tmp.CpcglGkgbK + local exit_status=0 ++ seq 0 2 + for i in '$(seq 0 2)' + set +e + kubectl get ns + for i in '$(seq 0 2)' + set +e + kubectl delete namespace users-26053 + exit_status=0 + set -e + '[' 0 '!=' 0 ']' + break + cat /tmp/tmp.BFsLARk14f + cat /tmp/tmp.wcfDqUaqsY + rm /tmp/tmp.BFsLARk14f /tmp/tmp.wcfDqUaqsY + return 0 + exit_status=1 + set -e + '[' 1 '!=' 0 ']' + '[' 1 == 1 ']' + sleep 0 + for i in '$(seq 0 2)' + set +e + kubectl delete namespace users-26053 Error from server (Forbidden): namespaces "default" is forbidden: this namespace may not be deleted + exit_status=1 + set -e + '[' 1 '!=' 0 ']' + '[' 1 == 1 ']' + sleep 0 + for i in '$(seq 0 2)' + set +e + kubectl delete namespace users-26053 + exit_status=1 + set -e + '[' 1 '!=' 0 ']' + '[' 1 == 1 ']' + sleep 0 + cat /tmp/tmp.5qck5PyWs0 + cat /tmp/tmp.CpcglGkgbK Error from server (NotFound): namespaces "users-26053" not found + rm /tmp/tmp.5qck5PyWs0 /tmp/tmp.CpcglGkgbK + return 1 + : + wait_for_delete namespace/users-26053 + local res=namespace/users-26053 + echo -n 'namespace/users-26053 - ' namespace/users-26053 - + set +o xtrace Error from server (NotFound): namespaces "users-26053" not found + desc 'create namespace users-26053' + set +o xtrace ----------------------------------------------------------------------------------- create namespace users-26053 ----------------------------------------------------------------------------------- + kubectl_bin create namespace users-26053 ++ mktemp + local LAST_OUT=/tmp/tmp.mW9ik7Z7Kg ++ mktemp + local LAST_ERR=/tmp/tmp.0Z0RDvEZn7 + local exit_status=0 ++ seq 0 2 + for i in '$(seq 0 2)' + set +e + kubectl create namespace users-26053 + exit_status=0 + set -e + '[' 0 '!=' 0 ']' + break + cat /tmp/tmp.mW9ik7Z7Kg namespace/users-26053 created + cat /tmp/tmp.0Z0RDvEZn7 + rm /tmp/tmp.mW9ik7Z7Kg /tmp/tmp.0Z0RDvEZn7 + return 0 ++ kubectl_bin config current-context +++ mktemp ++ local LAST_OUT=/tmp/tmp.V8QPYLU45n +++ mktemp ++ local LAST_ERR=/tmp/tmp.pQs3J1QJ3G ++ local exit_status=0 +++ seq 0 2 ++ for i in '$(seq 0 2)' ++ set +e ++ kubectl config current-context ++ exit_status=0 ++ set -e ++ '[' 0 '!=' 0 ']' ++ break ++ cat /tmp/tmp.V8QPYLU45n ++ cat /tmp/tmp.pQs3J1QJ3G ++ rm /tmp/tmp.V8QPYLU45n /tmp/tmp.pQs3J1QJ3G ++ return 0 + kubectl_bin config set-context gke_cloud-dev-112233_us-central1-a_jen-pxc-1707-bb49e0ac-3-cluster7 --namespace=users-26053 ++ mktemp + local LAST_OUT=/tmp/tmp.GnwdXrDLyP ++ mktemp + local LAST_ERR=/tmp/tmp.atfvymyzHq + local exit_status=0 ++ seq 0 2 + for i in '$(seq 0 2)' + set +e + kubectl config set-context gke_cloud-dev-112233_us-central1-a_jen-pxc-1707-bb49e0ac-3-cluster7 --namespace=users-26053 + exit_status=0 + set -e + '[' 0 '!=' 0 ']' + break + cat /tmp/tmp.GnwdXrDLyP Context "gke_cloud-dev-112233_us-central1-a_jen-pxc-1707-bb49e0ac-3-cluster7" modified. + cat /tmp/tmp.atfvymyzHq + rm /tmp/tmp.GnwdXrDLyP /tmp/tmp.atfvymyzHq + return 0 + apply_secrets + desc 'create secrets for cloud storages' + set +o xtrace ----------------------------------------------------------------------------------- create secrets for cloud storages ----------------------------------------------------------------------------------- + '[' -z '' ']' + kubectl_bin apply -f /mnt/jenkins/workspace/cloud-pxc-operator_PR-1707/e2e-tests/conf/minio-secret.yml -f /mnt/jenkins/workspace/cloud-pxc-operator_PR-1707/e2e-tests/conf/cloud-secret.yml ++ mktemp + local LAST_OUT=/tmp/tmp.OcGE8y6kHy ++ mktemp + local LAST_ERR=/tmp/tmp.WB0CyuwPzk + local exit_status=0 ++ seq 0 2 + for i in '$(seq 0 2)' + set +e + kubectl apply -f /mnt/jenkins/workspace/cloud-pxc-operator_PR-1707/e2e-tests/conf/minio-secret.yml -f /mnt/jenkins/workspace/cloud-pxc-operator_PR-1707/e2e-tests/conf/cloud-secret.yml + exit_status=0 + set -e + '[' 0 '!=' 0 ']' + break + cat /tmp/tmp.OcGE8y6kHy secret/minio-secret created secret/aws-s3-secret created secret/gcp-cs-secret created secret/azure-secret created + cat /tmp/tmp.WB0CyuwPzk + rm /tmp/tmp.OcGE8y6kHy /tmp/tmp.WB0CyuwPzk + return 0 + desc 'create PXC cluster' + set +o xtrace ----------------------------------------------------------------------------------- create PXC cluster ----------------------------------------------------------------------------------- + newpass=test-password ++ echo -n test-password ++ base64 + newpassencrypted=dGVzdC1wYXNzd29yZA== + cluster=some-name + spinup_pxc some-name /mnt/jenkins/workspace/cloud-pxc-operator_PR-1707/e2e-tests/conf/some-name.yml + local cluster=some-name + local config=/mnt/jenkins/workspace/cloud-pxc-operator_PR-1707/e2e-tests/conf/some-name.yml + local size=3 + local sleep=10 + local secretsFile=/mnt/jenkins/workspace/cloud-pxc-operator_PR-1707/e2e-tests/conf/secrets.yml + local pxcClientFile=/mnt/jenkins/workspace/cloud-pxc-operator_PR-1707/e2e-tests/conf/client.yml + local port=3306 + desc 'create first PXC cluster' + set +o xtrace ----------------------------------------------------------------------------------- create first PXC cluster ----------------------------------------------------------------------------------- + kubectl_bin apply -f /mnt/jenkins/workspace/cloud-pxc-operator_PR-1707/e2e-tests/conf/secrets.yml ++ mktemp + local LAST_OUT=/tmp/tmp.wqsJFh96nQ ++ mktemp + local LAST_ERR=/tmp/tmp.9rPn38GK6N + local exit_status=0 ++ seq 0 2 + for i in '$(seq 0 2)' + set +e + kubectl apply -f /mnt/jenkins/workspace/cloud-pxc-operator_PR-1707/e2e-tests/conf/secrets.yml + exit_status=0 + set -e + '[' 0 '!=' 0 ']' + break + cat /tmp/tmp.wqsJFh96nQ secret/my-cluster-secrets created secret/some-name-ssl created secret/some-name-ssl-internal created + cat /tmp/tmp.9rPn38GK6N + rm /tmp/tmp.wqsJFh96nQ /tmp/tmp.9rPn38GK6N + return 0 + apply_config /mnt/jenkins/workspace/cloud-pxc-operator_PR-1707/e2e-tests/conf/client.yml + '[' -z '' ']' + cat_config /mnt/jenkins/workspace/cloud-pxc-operator_PR-1707/e2e-tests/conf/client.yml + kubectl_bin apply -f - + cat /mnt/jenkins/workspace/cloud-pxc-operator_PR-1707/e2e-tests/conf/client.yml + /usr/bin/sed -e 's#apiVersion: pxc.percona.com/v.*$#apiVersion: pxc.percona.com/v1#' ++ mktemp + /usr/bin/sed -e 's#image:.*-init$#image: perconalab/percona-xtradb-cluster-operator:PR-1707-bb49e0ac#' + local LAST_OUT=/tmp/tmp.k2cDyw2z9k + /usr/bin/sed -e 's#image:.*-haproxy$#image: perconalab/percona-xtradb-cluster-operator:main-haproxy#' + /usr/bin/sed -e 's#image:.*\/percona-xtradb-cluster:.*$#image: perconalab/percona-xtradb-cluster-operator:main-pxc8.0#' ++ mktemp + /usr/bin/sed -e 's#image:.*-pmm$#image: perconalab/pmm-client:dev-latest#' + /usr/bin/sed -e 's#image:.*-backup$#image: perconalab/percona-xtradb-cluster-operator:main-pxc8.0-backup#' + /usr/bin/sed -e 's#image:.*-logcollector$#image: perconalab/percona-xtradb-cluster-operator:main-logcollector#' + /usr/bin/sed -e 's#image:.*-proxysql$#image: perconalab/percona-xtradb-cluster-operator:main-proxysql#' + local LAST_ERR=/tmp/tmp.ofWtEQ20CE + local exit_status=0 + /usr/bin/sed -e 's#image:.*-pxc\([0-9]*.[0-9]*\)\{0,1\}$#image: perconalab/percona-xtradb-cluster-operator:main-pxc8.0#' + /usr/bin/sed -e s~minio-service.#namespace~minio-service.users-26053~ + /usr/bin/sed -e 's#apply:.*#apply: Never#' ++ seq 0 2 + for i in '$(seq 0 2)' + set +e + kubectl apply -f - + exit_status=0 + set -e + '[' 0 '!=' 0 ']' + break + cat /tmp/tmp.k2cDyw2z9k deployment.apps/pxc-client created + cat /tmp/tmp.ofWtEQ20CE + rm /tmp/tmp.k2cDyw2z9k /tmp/tmp.ofWtEQ20CE + return 0 + [[ perconalab/percona-xtradb-cluster-operator:main-pxc8.0 =~ 5\.7 ]] + apply_config /mnt/jenkins/workspace/cloud-pxc-operator_PR-1707/e2e-tests/conf/some-name.yml + '[' -z '' ']' + cat_config /mnt/jenkins/workspace/cloud-pxc-operator_PR-1707/e2e-tests/conf/some-name.yml + kubectl_bin apply -f - + cat /mnt/jenkins/workspace/cloud-pxc-operator_PR-1707/e2e-tests/conf/some-name.yml ++ mktemp + /usr/bin/sed -e 's#image:.*-pxc\([0-9]*.[0-9]*\)\{0,1\}$#image: perconalab/percona-xtradb-cluster-operator:main-pxc8.0#' + /usr/bin/sed -e 's#apiVersion: pxc.percona.com/v.*$#apiVersion: pxc.percona.com/v1#' + /usr/bin/sed -e 's#image:.*-init$#image: perconalab/percona-xtradb-cluster-operator:PR-1707-bb49e0ac#' + /usr/bin/sed -e 's#image:.*-pmm$#image: perconalab/pmm-client:dev-latest#' + /usr/bin/sed -e 's#image:.*-proxysql$#image: perconalab/percona-xtradb-cluster-operator:main-proxysql#' + /usr/bin/sed -e 's#image:.*\/percona-xtradb-cluster:.*$#image: perconalab/percona-xtradb-cluster-operator:main-pxc8.0#' + /usr/bin/sed -e 's#image:.*-haproxy$#image: perconalab/percona-xtradb-cluster-operator:main-haproxy#' + /usr/bin/sed -e 's#apply:.*#apply: Never#' + local LAST_OUT=/tmp/tmp.9UEHljuGh4 + /usr/bin/sed -e s~minio-service.#namespace~minio-service.users-26053~ + /usr/bin/sed -e 's#image:.*-logcollector$#image: perconalab/percona-xtradb-cluster-operator:main-logcollector#' ++ mktemp + /usr/bin/sed -e 's#image:.*-backup$#image: perconalab/percona-xtradb-cluster-operator:main-pxc8.0-backup#' + local LAST_ERR=/tmp/tmp.JzqsYmuiOd + local exit_status=0 ++ seq 0 2 + for i in '$(seq 0 2)' + set +e + kubectl apply -f - + exit_status=0 + set -e + '[' 0 '!=' 0 ']' + break + cat /tmp/tmp.9UEHljuGh4 perconaxtradbcluster.pxc.percona.com/some-name created + cat /tmp/tmp.JzqsYmuiOd + rm /tmp/tmp.9UEHljuGh4 /tmp/tmp.JzqsYmuiOd + return 0 + desc 'check if all 3 Pods started' + set +o xtrace ----------------------------------------------------------------------------------- check if all 3 Pods started ----------------------------------------------------------------------------------- ++ get_proxy some-name ++ local target_cluster=some-name +++ kubectl_bin get pxc some-name -o 'jsonpath={.spec.haproxy.enabled}' ++++ mktemp +++ local LAST_OUT=/tmp/tmp.VHuxSDDGNW ++++ mktemp +++ local LAST_ERR=/tmp/tmp.SgH00tmYbA +++ local exit_status=0 ++++ seq 0 2 +++ for i in '$(seq 0 2)' +++ set +e +++ kubectl get pxc some-name -o 'jsonpath={.spec.haproxy.enabled}' +++ exit_status=0 +++ set -e +++ '[' 0 '!=' 0 ']' +++ break +++ cat /tmp/tmp.VHuxSDDGNW +++ cat /tmp/tmp.SgH00tmYbA +++ rm /tmp/tmp.VHuxSDDGNW /tmp/tmp.SgH00tmYbA +++ return 0 ++ [[ '' == \t\r\u\e ]] +++ kubectl_bin get pxc some-name -o 'jsonpath={.spec.proxysql.enabled}' ++++ mktemp +++ local LAST_OUT=/tmp/tmp.nLHeGYo02t ++++ mktemp +++ local LAST_ERR=/tmp/tmp.7TCxPJsjUH +++ local exit_status=0 ++++ seq 0 2 +++ for i in '$(seq 0 2)' +++ set +e +++ kubectl get pxc some-name -o 'jsonpath={.spec.proxysql.enabled}' +++ exit_status=0 +++ set -e +++ '[' 0 '!=' 0 ']' +++ break +++ cat /tmp/tmp.nLHeGYo02t +++ cat /tmp/tmp.7TCxPJsjUH +++ rm /tmp/tmp.nLHeGYo02t /tmp/tmp.7TCxPJsjUH +++ return 0 ++ [[ true == \t\r\u\e ]] ++ echo some-name-proxysql ++ return + local proxy=some-name-proxysql + kubectl_bin wait --for=condition=Ready pod -l app.kubernetes.io/instance=monitoring,app.kubernetes.io/managed-by=percona-xtradb-cluster-operator --timeout=300s -n users-26053 ++ mktemp + local LAST_OUT=/tmp/tmp.0iAg85m0Ji ++ mktemp + local LAST_ERR=/tmp/tmp.BIw18mpGzg + local exit_status=0 ++ seq 0 2 + for i in '$(seq 0 2)' + set +e + kubectl wait --for=condition=Ready pod -l app.kubernetes.io/instance=monitoring,app.kubernetes.io/managed-by=percona-xtradb-cluster-operator --timeout=300s -n users-26053 + exit_status=1 + set -e + '[' 1 '!=' 0 ']' + '[' 1 == 1 ']' + sleep 0 + for i in '$(seq 0 2)' + set +e + kubectl wait --for=condition=Ready pod -l app.kubernetes.io/instance=monitoring,app.kubernetes.io/managed-by=percona-xtradb-cluster-operator --timeout=300s -n users-26053 + exit_status=1 + set -e + '[' 1 '!=' 0 ']' + '[' 1 == 1 ']' + sleep 0 + for i in '$(seq 0 2)' + set +e + kubectl wait --for=condition=Ready pod -l app.kubernetes.io/instance=monitoring,app.kubernetes.io/managed-by=percona-xtradb-cluster-operator --timeout=300s -n users-26053 + exit_status=1 + set -e + '[' 1 '!=' 0 ']' + '[' 1 == 1 ']' + sleep 0 + cat /tmp/tmp.0iAg85m0Ji + cat /tmp/tmp.BIw18mpGzg error: no matching resources found + rm /tmp/tmp.0iAg85m0Ji /tmp/tmp.BIw18mpGzg + return 1 + true + wait_for_running some-name-proxysql 1 + local name=some-name-proxysql + let last_pod=0 + : + local max_retry=480 + desc 'wait for running cluster' + set +o xtrace ----------------------------------------------------------------------------------- wait for running cluster ----------------------------------------------------------------------------------- ++ seq 0 0 + for i in '$(seq 0 $last_pod)' + wait_pod some-name-proxysql-0 480 + local pod=some-name-proxysql-0 + local max_retry=480 + local ns= ++ echo some-name-proxysql-0 ++ /usr/bin/sed -E 's/.*-(pxc|proxysql)-[0-9]/\1/' ++ egrep '^(pxc|proxysql)$' + local container=proxysql + set +o xtrace pod/some-name-proxysql-0 condition met some-name-proxysql-0.Ok + wait_for_running some-name-pxc 3 + local name=some-name-pxc + let last_pod=2 + local max_retry=480 + desc 'wait for running cluster' + set +o xtrace ----------------------------------------------------------------------------------- wait for running cluster ----------------------------------------------------------------------------------- ++ seq 0 2 + for i in '$(seq 0 $last_pod)' + wait_pod some-name-pxc-0 480 + local pod=some-name-pxc-0 + local max_retry=480 + local ns= ++ echo some-name-pxc-0 ++ /usr/bin/sed -E 's/.*-(pxc|proxysql)-[0-9]/\1/' ++ egrep '^(pxc|proxysql)$' + local container=pxc + set +o xtrace pod/some-name-pxc-0 condition met some-name-pxc-0.Ok + for i in '$(seq 0 $last_pod)' + wait_pod some-name-pxc-1 480 + local pod=some-name-pxc-1 + local max_retry=480 + local ns= ++ echo some-name-pxc-1 ++ egrep '^(pxc|proxysql)$' ++ /usr/bin/sed -E 's/.*-(pxc|proxysql)-[0-9]/\1/' + local container=pxc + set +o xtrace pod/some-name-pxc-1 condition met some-name-pxc-1.Ok + for i in '$(seq 0 $last_pod)' + wait_pod some-name-pxc-2 480 + local pod=some-name-pxc-2 + local max_retry=480 + local ns= ++ echo some-name-pxc-2 ++ /usr/bin/sed -E 's/.*-(pxc|proxysql)-[0-9]/\1/' ++ egrep '^(pxc|proxysql)$' + local container=pxc + set +o xtrace pod/some-name-pxc-2 condition met some-name-pxc-2.Ok + sleep 10 + desc 'write data' + set +o xtrace ----------------------------------------------------------------------------------- write data ----------------------------------------------------------------------------------- + [[ perconalab/percona-xtradb-cluster-operator:main-pxc8.0 =~ 5\.7 ]] + run_mysql 'CREATE DATABASE IF NOT EXISTS myApp; use myApp; CREATE TABLE IF NOT EXISTS myApp (id int PRIMARY KEY) ;' '-h some-name-proxysql -uroot -proot_password -P3306' + local 'command=CREATE DATABASE IF NOT EXISTS myApp; use myApp; CREATE TABLE IF NOT EXISTS myApp (id int PRIMARY KEY) ;' + local 'uri=-h some-name-proxysql -uroot -proot_password -P3306' ++ get_client_pod ++ kubectl_bin get pods --selector=name=pxc-client -o 'jsonpath={.items[].metadata.name}' +++ mktemp ++ local LAST_OUT=/tmp/tmp.4kt52plaG3 +++ mktemp ++ local LAST_ERR=/tmp/tmp.RcYgQvgWRX ++ local exit_status=0 +++ seq 0 2 ++ for i in '$(seq 0 2)' ++ set +e ++ kubectl get pods --selector=name=pxc-client -o 'jsonpath={.items[].metadata.name}' ++ exit_status=0 ++ set -e ++ '[' 0 '!=' 0 ']' ++ break ++ cat /tmp/tmp.4kt52plaG3 ++ cat /tmp/tmp.RcYgQvgWRX ++ rm /tmp/tmp.4kt52plaG3 /tmp/tmp.RcYgQvgWRX ++ return 0 + client_pod=pxc-client-6644d8898f-d92km + wait_pod pxc-client-6644d8898f-d92km + local pod=pxc-client-6644d8898f-d92km + local max_retry=480 + local ns= ++ echo pxc-client-6644d8898f-d92km ++ /usr/bin/sed -E 's/.*-(pxc|proxysql)-[0-9]/\1/' ++ egrep '^(pxc|proxysql)$' + local container= + set +o xtrace pod/pxc-client-6644d8898f-d92km condition met pxc-client-6644d8898f-d92km.Ok + set +o xtrace + run_mysql 'INSERT myApp.myApp (id) VALUES (100500)' '-h some-name-proxysql -uroot -proot_password -P3306' + local 'command=INSERT myApp.myApp (id) VALUES (100500)' + local 'uri=-h some-name-proxysql -uroot -proot_password -P3306' ++ get_client_pod ++ kubectl_bin get pods --selector=name=pxc-client -o 'jsonpath={.items[].metadata.name}' +++ mktemp ++ local LAST_OUT=/tmp/tmp.BnPEl7dcyx +++ mktemp ++ local LAST_ERR=/tmp/tmp.JuZ3lFqPzg ++ local exit_status=0 +++ seq 0 2 ++ for i in '$(seq 0 2)' ++ set +e ++ kubectl get pods --selector=name=pxc-client -o 'jsonpath={.items[].metadata.name}' ++ exit_status=0 ++ set -e ++ '[' 0 '!=' 0 ']' ++ break ++ cat /tmp/tmp.BnPEl7dcyx ++ cat /tmp/tmp.JuZ3lFqPzg ++ rm /tmp/tmp.BnPEl7dcyx /tmp/tmp.JuZ3lFqPzg ++ return 0 + client_pod=pxc-client-6644d8898f-d92km + wait_pod pxc-client-6644d8898f-d92km + local pod=pxc-client-6644d8898f-d92km + local max_retry=480 + local ns= ++ echo pxc-client-6644d8898f-d92km ++ /usr/bin/sed -E 's/.*-(pxc|proxysql)-[0-9]/\1/' ++ egrep '^(pxc|proxysql)$' + local container= + set +o xtrace pod/pxc-client-6644d8898f-d92km condition met pxc-client-6644d8898f-d92km.Ok + set +o xtrace + sleep 30 ++ seq 0 2 + for i in '$(seq 0 $((size - 1)))' + compare_mysql_cmd select-1 'SELECT * from myApp.myApp;' '-h some-name-pxc-0.some-name-pxc -uroot -proot_password -P3306' + local command_id=select-1 + local 'command=SELECT * from myApp.myApp;' + local 'uri=-h some-name-pxc-0.some-name-pxc -uroot -proot_password -P3306' + local postfix= + local expected_result=/mnt/jenkins/workspace/cloud-pxc-operator_PR-1707/e2e-tests/users/compare/select-1.sql + [[ perconalab/percona-xtradb-cluster-operator:main-pxc8.0 =~ 8\.0 ]] + '[' -f /mnt/jenkins/workspace/cloud-pxc-operator_PR-1707/e2e-tests/users/compare/select-1-80.sql ']' + run_mysql 'SELECT * from myApp.myApp;' '-h some-name-pxc-0.some-name-pxc -uroot -proot_password -P3306' + local 'command=SELECT * from myApp.myApp;' + local 'uri=-h some-name-pxc-0.some-name-pxc -uroot -proot_password -P3306' ++ get_client_pod ++ kubectl_bin get pods --selector=name=pxc-client -o 'jsonpath={.items[].metadata.name}' +++ mktemp ++ local LAST_OUT=/tmp/tmp.5ISlWfeWeT +++ mktemp ++ local LAST_ERR=/tmp/tmp.01hkLQwp8k ++ local exit_status=0 +++ seq 0 2 ++ for i in '$(seq 0 2)' ++ set +e ++ kubectl get pods --selector=name=pxc-client -o 'jsonpath={.items[].metadata.name}' ++ exit_status=0 ++ set -e ++ '[' 0 '!=' 0 ']' ++ break ++ cat /tmp/tmp.5ISlWfeWeT ++ cat /tmp/tmp.01hkLQwp8k ++ rm /tmp/tmp.5ISlWfeWeT /tmp/tmp.01hkLQwp8k ++ return 0 + client_pod=pxc-client-6644d8898f-d92km + wait_pod pxc-client-6644d8898f-d92km + local pod=pxc-client-6644d8898f-d92km + local max_retry=480 + local ns= ++ echo pxc-client-6644d8898f-d92km ++ /usr/bin/sed -E 's/.*-(pxc|proxysql)-[0-9]/\1/' ++ egrep '^(pxc|proxysql)$' + local container= + set +o xtrace pod/pxc-client-6644d8898f-d92km condition met pxc-client-6644d8898f-d92km.Ok + set +o xtrace + '[' '!' -s /tmp/tmp.Y95G4TXZdR/select-1.sql ']' + diff -u /mnt/jenkins/workspace/cloud-pxc-operator_PR-1707/e2e-tests/users/compare/select-1.sql /tmp/tmp.Y95G4TXZdR/select-1.sql + for i in '$(seq 0 $((size - 1)))' + compare_mysql_cmd select-1 'SELECT * from myApp.myApp;' '-h some-name-pxc-1.some-name-pxc -uroot -proot_password -P3306' + local command_id=select-1 + local 'command=SELECT * from myApp.myApp;' + local 'uri=-h some-name-pxc-1.some-name-pxc -uroot -proot_password -P3306' + local postfix= + local expected_result=/mnt/jenkins/workspace/cloud-pxc-operator_PR-1707/e2e-tests/users/compare/select-1.sql + [[ perconalab/percona-xtradb-cluster-operator:main-pxc8.0 =~ 8\.0 ]] + '[' -f /mnt/jenkins/workspace/cloud-pxc-operator_PR-1707/e2e-tests/users/compare/select-1-80.sql ']' + run_mysql 'SELECT * from myApp.myApp;' '-h some-name-pxc-1.some-name-pxc -uroot -proot_password -P3306' + local 'command=SELECT * from myApp.myApp;' + local 'uri=-h some-name-pxc-1.some-name-pxc -uroot -proot_password -P3306' ++ get_client_pod ++ kubectl_bin get pods --selector=name=pxc-client -o 'jsonpath={.items[].metadata.name}' +++ mktemp ++ local LAST_OUT=/tmp/tmp.cJJpywqzxa +++ mktemp ++ local LAST_ERR=/tmp/tmp.Dlxp6vA3ny ++ local exit_status=0 +++ seq 0 2 ++ for i in '$(seq 0 2)' ++ set +e ++ kubectl get pods --selector=name=pxc-client -o 'jsonpath={.items[].metadata.name}' ++ exit_status=0 ++ set -e ++ '[' 0 '!=' 0 ']' ++ break ++ cat /tmp/tmp.cJJpywqzxa ++ cat /tmp/tmp.Dlxp6vA3ny ++ rm /tmp/tmp.cJJpywqzxa /tmp/tmp.Dlxp6vA3ny ++ return 0 + client_pod=pxc-client-6644d8898f-d92km + wait_pod pxc-client-6644d8898f-d92km + local pod=pxc-client-6644d8898f-d92km + local max_retry=480 + local ns= ++ echo pxc-client-6644d8898f-d92km ++ /usr/bin/sed -E 's/.*-(pxc|proxysql)-[0-9]/\1/' ++ egrep '^(pxc|proxysql)$' + local container= + set +o xtrace pod/pxc-client-6644d8898f-d92km condition met pxc-client-6644d8898f-d92km.Ok + set +o xtrace + '[' '!' -s /tmp/tmp.Y95G4TXZdR/select-1.sql ']' + diff -u /mnt/jenkins/workspace/cloud-pxc-operator_PR-1707/e2e-tests/users/compare/select-1.sql /tmp/tmp.Y95G4TXZdR/select-1.sql + for i in '$(seq 0 $((size - 1)))' + compare_mysql_cmd select-1 'SELECT * from myApp.myApp;' '-h some-name-pxc-2.some-name-pxc -uroot -proot_password -P3306' + local command_id=select-1 + local 'command=SELECT * from myApp.myApp;' + local 'uri=-h some-name-pxc-2.some-name-pxc -uroot -proot_password -P3306' + local postfix= + local expected_result=/mnt/jenkins/workspace/cloud-pxc-operator_PR-1707/e2e-tests/users/compare/select-1.sql + [[ perconalab/percona-xtradb-cluster-operator:main-pxc8.0 =~ 8\.0 ]] + '[' -f /mnt/jenkins/workspace/cloud-pxc-operator_PR-1707/e2e-tests/users/compare/select-1-80.sql ']' + run_mysql 'SELECT * from myApp.myApp;' '-h some-name-pxc-2.some-name-pxc -uroot -proot_password -P3306' + local 'command=SELECT * from myApp.myApp;' + local 'uri=-h some-name-pxc-2.some-name-pxc -uroot -proot_password -P3306' ++ get_client_pod ++ kubectl_bin get pods --selector=name=pxc-client -o 'jsonpath={.items[].metadata.name}' +++ mktemp ++ local LAST_OUT=/tmp/tmp.MrNr8CMTvt +++ mktemp ++ local LAST_ERR=/tmp/tmp.S9Piucdrq5 ++ local exit_status=0 +++ seq 0 2 ++ for i in '$(seq 0 2)' ++ set +e ++ kubectl get pods --selector=name=pxc-client -o 'jsonpath={.items[].metadata.name}' ++ exit_status=0 ++ set -e ++ '[' 0 '!=' 0 ']' ++ break ++ cat /tmp/tmp.MrNr8CMTvt ++ cat /tmp/tmp.S9Piucdrq5 ++ rm /tmp/tmp.MrNr8CMTvt /tmp/tmp.S9Piucdrq5 ++ return 0 + client_pod=pxc-client-6644d8898f-d92km + wait_pod pxc-client-6644d8898f-d92km + local pod=pxc-client-6644d8898f-d92km + local max_retry=480 + local ns= ++ /usr/bin/sed -E 's/.*-(pxc|proxysql)-[0-9]/\1/' ++ egrep '^(pxc|proxysql)$' ++ echo pxc-client-6644d8898f-d92km + local container= + set +o xtrace pod/pxc-client-6644d8898f-d92km condition met pxc-client-6644d8898f-d92km.Ok + set +o xtrace + '[' '!' -s /tmp/tmp.Y95G4TXZdR/select-1.sql ']' + diff -u /mnt/jenkins/workspace/cloud-pxc-operator_PR-1707/e2e-tests/users/compare/select-1.sql /tmp/tmp.Y95G4TXZdR/select-1.sql ++ is_keyring_plugin_in_use some-name ++ local cluster=some-name ++ kubectl_bin exec -it some-name-pxc-0 -c pxc -- bash -c 'cat /etc/mysql/node.cnf' ++ egrep -o 'early-plugin-load=keyring_\w+.so' +++ mktemp ++ local LAST_OUT=/tmp/tmp.IFnY2fk0rB +++ mktemp ++ local LAST_ERR=/tmp/tmp.z4R2ShDXzS ++ local exit_status=0 +++ seq 0 2 ++ for i in '$(seq 0 2)' ++ set +e ++ kubectl exec -it some-name-pxc-0 -c pxc -- bash -c 'cat /etc/mysql/node.cnf' ++ exit_status=0 ++ set -e ++ '[' 0 '!=' 0 ']' ++ break ++ cat /tmp/tmp.IFnY2fk0rB ++ cat /tmp/tmp.z4R2ShDXzS Unable to use a TTY - input is not a terminal or the right kind of file ++ rm /tmp/tmp.IFnY2fk0rB /tmp/tmp.z4R2ShDXzS ++ return 0 + '[' '' ']' + desc 'test root' + set +o xtrace ----------------------------------------------------------------------------------- test root ----------------------------------------------------------------------------------- + patch_secret my-cluster-secrets root dGVzdC1wYXNzd29yZA== + local secret=my-cluster-secrets + local key=root + local value=dGVzdC1wYXNzd29yZA== + kubectl_bin patch secret my-cluster-secrets '-p={"data":{"root": "dGVzdC1wYXNzd29yZA=="}}' ++ mktemp + local LAST_OUT=/tmp/tmp.yzntV4zwLY ++ mktemp + local LAST_ERR=/tmp/tmp.uAZGfaLqcU + local exit_status=0 ++ seq 0 2 + for i in '$(seq 0 2)' + set +e + kubectl patch secret my-cluster-secrets '-p={"data":{"root": "dGVzdC1wYXNzd29yZA=="}}' + exit_status=0 + set -e + '[' 0 '!=' 0 ']' + break + cat /tmp/tmp.yzntV4zwLY secret/my-cluster-secrets patched + cat /tmp/tmp.uAZGfaLqcU + rm /tmp/tmp.yzntV4zwLY /tmp/tmp.uAZGfaLqcU + return 0 + sleep 15 + compare_mysql_cmd select-4 'SHOW TABLES;' '-h some-name-proxysql -uroot -p'\''test-password'\''' + local command_id=select-4 + local 'command=SHOW TABLES;' + local 'uri=-h some-name-proxysql -uroot -p'\''test-password'\''' + local postfix= + local expected_result=/mnt/jenkins/workspace/cloud-pxc-operator_PR-1707/e2e-tests/users/compare/select-4.sql + [[ perconalab/percona-xtradb-cluster-operator:main-pxc8.0 =~ 8\.0 ]] + '[' -f /mnt/jenkins/workspace/cloud-pxc-operator_PR-1707/e2e-tests/users/compare/select-4-80.sql ']' + expected_result=/mnt/jenkins/workspace/cloud-pxc-operator_PR-1707/e2e-tests/users/compare/select-4-80.sql + run_mysql 'SHOW TABLES;' '-h some-name-proxysql -uroot -p'\''test-password'\''' + local 'command=SHOW TABLES;' + local 'uri=-h some-name-proxysql -uroot -p'\''test-password'\''' ++ get_client_pod ++ kubectl_bin get pods --selector=name=pxc-client -o 'jsonpath={.items[].metadata.name}' +++ mktemp ++ local LAST_OUT=/tmp/tmp.GMZnxVBzbk +++ mktemp ++ local LAST_ERR=/tmp/tmp.knaRNWfrpJ ++ local exit_status=0 +++ seq 0 2 ++ for i in '$(seq 0 2)' ++ set +e ++ kubectl get pods --selector=name=pxc-client -o 'jsonpath={.items[].metadata.name}' ++ exit_status=0 ++ set -e ++ '[' 0 '!=' 0 ']' ++ break ++ cat /tmp/tmp.GMZnxVBzbk ++ cat /tmp/tmp.knaRNWfrpJ ++ rm /tmp/tmp.GMZnxVBzbk /tmp/tmp.knaRNWfrpJ ++ return 0 + client_pod=pxc-client-6644d8898f-d92km + wait_pod pxc-client-6644d8898f-d92km + local pod=pxc-client-6644d8898f-d92km + local max_retry=480 + local ns= ++ echo pxc-client-6644d8898f-d92km ++ /usr/bin/sed -E 's/.*-(pxc|proxysql)-[0-9]/\1/' ++ egrep '^(pxc|proxysql)$' + local container= + set +o xtrace pod/pxc-client-6644d8898f-d92km condition met pxc-client-6644d8898f-d92km.Ok + set +o xtrace + '[' '!' -s /tmp/tmp.Y95G4TXZdR/select-4.sql ']' + diff -u /mnt/jenkins/workspace/cloud-pxc-operator_PR-1707/e2e-tests/users/compare/select-4-80.sql /tmp/tmp.Y95G4TXZdR/select-4.sql + desc 'test proxyadmin' + set +o xtrace ----------------------------------------------------------------------------------- test proxyadmin ----------------------------------------------------------------------------------- + kubectl_bin patch pxc some-name --type=merge '-p={"spec":{"proxysql":{"size":3}}}' ++ mktemp + local LAST_OUT=/tmp/tmp.5U3I9s2tCo ++ mktemp + local LAST_ERR=/tmp/tmp.oYBFlLxykw + local exit_status=0 ++ seq 0 2 + for i in '$(seq 0 2)' + set +e + kubectl patch pxc some-name --type=merge '-p={"spec":{"proxysql":{"size":3}}}' + exit_status=0 + set -e + '[' 0 '!=' 0 ']' + break + cat /tmp/tmp.5U3I9s2tCo perconaxtradbcluster.pxc.percona.com/some-name patched + cat /tmp/tmp.oYBFlLxykw + rm /tmp/tmp.5U3I9s2tCo /tmp/tmp.oYBFlLxykw + return 0 + sleep 15 + wait_cluster_consistency some-name 3 3 + local cluster_name=some-name + local cluster_size=3 + local proxy_size=3 + '[' -z 3 ']' + desc 'wait cluster consistency' + set +o xtrace ----------------------------------------------------------------------------------- wait cluster consistency ----------------------------------------------------------------------------------- + local i=0 + local max=36 + sleep 7 ++ kubectl_bin get pxc some-name -o 'jsonpath={.status.state}' +++ mktemp ++ local LAST_OUT=/tmp/tmp.N7gfuskn6m +++ mktemp ++ local LAST_ERR=/tmp/tmp.XjUqoT2gzO ++ local exit_status=0 +++ seq 0 2 ++ for i in '$(seq 0 2)' ++ set +e ++ kubectl get pxc some-name -o 'jsonpath={.status.state}' ++ exit_status=0 ++ set -e ++ '[' 0 '!=' 0 ']' ++ break ++ cat /tmp/tmp.N7gfuskn6m ++ cat /tmp/tmp.XjUqoT2gzO ++ rm /tmp/tmp.N7gfuskn6m /tmp/tmp.XjUqoT2gzO ++ return 0 + [[ ready == \r\e\a\d\y ]] ++ kubectl_bin get pxc some-name -o 'jsonpath={.status.pxc.ready}' +++ mktemp ++ local LAST_OUT=/tmp/tmp.TryZrg8so1 +++ mktemp ++ local LAST_ERR=/tmp/tmp.Y31YF02Ztt ++ local exit_status=0 +++ seq 0 2 ++ for i in '$(seq 0 2)' ++ set +e ++ kubectl get pxc some-name -o 'jsonpath={.status.pxc.ready}' ++ exit_status=0 ++ set -e ++ '[' 0 '!=' 0 ']' ++ break ++ cat /tmp/tmp.TryZrg8so1 ++ cat /tmp/tmp.Y31YF02Ztt ++ rm /tmp/tmp.TryZrg8so1 /tmp/tmp.Y31YF02Ztt ++ return 0 + [[ 3 == \3 ]] +++ get_proxy_engine some-name +++ local cluster_name=some-name ++++ get_proxy some-name ++++ local target_cluster=some-name +++++ kubectl_bin get pxc some-name -o 'jsonpath={.spec.haproxy.enabled}' ++++++ mktemp +++++ local LAST_OUT=/tmp/tmp.ponrkUiWcu ++++++ mktemp +++++ local LAST_ERR=/tmp/tmp.OKFdCTcemt +++++ local exit_status=0 ++++++ seq 0 2 +++++ for i in '$(seq 0 2)' +++++ set +e +++++ kubectl get pxc some-name -o 'jsonpath={.spec.haproxy.enabled}' +++++ exit_status=0 +++++ set -e +++++ '[' 0 '!=' 0 ']' +++++ break +++++ cat /tmp/tmp.ponrkUiWcu +++++ cat /tmp/tmp.OKFdCTcemt +++++ rm /tmp/tmp.ponrkUiWcu /tmp/tmp.OKFdCTcemt +++++ return 0 ++++ [[ '' == \t\r\u\e ]] +++++ kubectl_bin get pxc some-name -o 'jsonpath={.spec.proxysql.enabled}' ++++++ mktemp +++++ local LAST_OUT=/tmp/tmp.6HggJXV2LR ++++++ mktemp +++++ local LAST_ERR=/tmp/tmp.Qfzr4sxJNK +++++ local exit_status=0 ++++++ seq 0 2 +++++ for i in '$(seq 0 2)' +++++ set +e +++++ kubectl get pxc some-name -o 'jsonpath={.spec.proxysql.enabled}' +++++ exit_status=0 +++++ set -e +++++ '[' 0 '!=' 0 ']' +++++ break +++++ cat /tmp/tmp.6HggJXV2LR +++++ cat /tmp/tmp.Qfzr4sxJNK +++++ rm /tmp/tmp.6HggJXV2LR /tmp/tmp.Qfzr4sxJNK +++++ return 0 ++++ [[ true == \t\r\u\e ]] ++++ echo some-name-proxysql ++++ return +++ local cluster_proxy=some-name-proxysql +++ echo proxysql ++ kubectl_bin get pxc some-name -o 'jsonpath={.status.proxysql.ready}' +++ mktemp ++ local LAST_OUT=/tmp/tmp.VVQ20dZ1LH +++ mktemp ++ local LAST_ERR=/tmp/tmp.57yE4kwnmo ++ local exit_status=0 +++ seq 0 2 ++ for i in '$(seq 0 2)' ++ set +e ++ kubectl get pxc some-name -o 'jsonpath={.status.proxysql.ready}' ++ exit_status=0 ++ set -e ++ '[' 0 '!=' 0 ']' ++ break ++ cat /tmp/tmp.VVQ20dZ1LH ++ cat /tmp/tmp.57yE4kwnmo ++ rm /tmp/tmp.VVQ20dZ1LH /tmp/tmp.57yE4kwnmo ++ return 0 + [[ 3 == \3 ]] + patch_secret my-cluster-secrets proxyadmin dGVzdC1wYXNzd29yZA== + local secret=my-cluster-secrets + local key=proxyadmin + local value=dGVzdC1wYXNzd29yZA== + kubectl_bin patch secret my-cluster-secrets '-p={"data":{"proxyadmin": "dGVzdC1wYXNzd29yZA=="}}' ++ mktemp + local LAST_OUT=/tmp/tmp.i5EGN9yjo2 ++ mktemp + local LAST_ERR=/tmp/tmp.1vUiVzfYhX + local exit_status=0 ++ seq 0 2 + for i in '$(seq 0 2)' + set +e + kubectl patch secret my-cluster-secrets '-p={"data":{"proxyadmin": "dGVzdC1wYXNzd29yZA=="}}' + exit_status=0 + set -e + '[' 0 '!=' 0 ']' + break + cat /tmp/tmp.i5EGN9yjo2 secret/my-cluster-secrets patched + cat /tmp/tmp.1vUiVzfYhX + rm /tmp/tmp.i5EGN9yjo2 /tmp/tmp.1vUiVzfYhX + return 0 + sleep 15 + wait_cluster_consistency some-name 3 3 + local cluster_name=some-name + local cluster_size=3 + local proxy_size=3 + '[' -z 3 ']' + desc 'wait cluster consistency' + set +o xtrace ----------------------------------------------------------------------------------- wait cluster consistency ----------------------------------------------------------------------------------- + local i=0 + local max=36 + sleep 7 ++ kubectl_bin get pxc some-name -o 'jsonpath={.status.state}' +++ mktemp ++ local LAST_OUT=/tmp/tmp.8kdA2amiGe +++ mktemp ++ local LAST_ERR=/tmp/tmp.1D4TFBV4nJ ++ local exit_status=0 +++ seq 0 2 ++ for i in '$(seq 0 2)' ++ set +e ++ kubectl get pxc some-name -o 'jsonpath={.status.state}' ++ exit_status=0 ++ set -e ++ '[' 0 '!=' 0 ']' ++ break ++ cat /tmp/tmp.8kdA2amiGe ++ cat /tmp/tmp.1D4TFBV4nJ ++ rm /tmp/tmp.8kdA2amiGe /tmp/tmp.1D4TFBV4nJ ++ return 0 + [[ initializing == \r\e\a\d\y ]] + echo 'waiting for cluster readyness' waiting for cluster readyness + sleep 20 + [[ 0 -ge 36 ]] + let i+=1 ++ kubectl_bin get pxc some-name -o 'jsonpath={.status.state}' +++ mktemp ++ local LAST_OUT=/tmp/tmp.YVHceFCsDQ +++ mktemp ++ local LAST_ERR=/tmp/tmp.lVTsznA88i ++ local exit_status=0 +++ seq 0 2 ++ for i in '$(seq 0 2)' ++ set +e ++ kubectl get pxc some-name -o 'jsonpath={.status.state}' ++ exit_status=0 ++ set -e ++ '[' 0 '!=' 0 ']' ++ break ++ cat /tmp/tmp.YVHceFCsDQ ++ cat /tmp/tmp.lVTsznA88i ++ rm /tmp/tmp.YVHceFCsDQ /tmp/tmp.lVTsznA88i ++ return 0 + [[ ready == \r\e\a\d\y ]] ++ kubectl_bin get pxc some-name -o 'jsonpath={.status.pxc.ready}' +++ mktemp ++ local LAST_OUT=/tmp/tmp.1Ezg9IO3zC +++ mktemp ++ local LAST_ERR=/tmp/tmp.JacPhtTYwV ++ local exit_status=0 +++ seq 0 2 ++ for i in '$(seq 0 2)' ++ set +e ++ kubectl get pxc some-name -o 'jsonpath={.status.pxc.ready}' ++ exit_status=0 ++ set -e ++ '[' 0 '!=' 0 ']' ++ break ++ cat /tmp/tmp.1Ezg9IO3zC ++ cat /tmp/tmp.JacPhtTYwV ++ rm /tmp/tmp.1Ezg9IO3zC /tmp/tmp.JacPhtTYwV ++ return 0 + [[ 3 == \3 ]] +++ get_proxy_engine some-name +++ local cluster_name=some-name ++++ get_proxy some-name ++++ local target_cluster=some-name +++++ kubectl_bin get pxc some-name -o 'jsonpath={.spec.haproxy.enabled}' ++++++ mktemp +++++ local LAST_OUT=/tmp/tmp.FnUYV8NoPd ++++++ mktemp +++++ local LAST_ERR=/tmp/tmp.5YW3AemyQu +++++ local exit_status=0 ++++++ seq 0 2 +++++ for i in '$(seq 0 2)' +++++ set +e +++++ kubectl get pxc some-name -o 'jsonpath={.spec.haproxy.enabled}' +++++ exit_status=0 +++++ set -e +++++ '[' 0 '!=' 0 ']' +++++ break +++++ cat /tmp/tmp.FnUYV8NoPd +++++ cat /tmp/tmp.5YW3AemyQu +++++ rm /tmp/tmp.FnUYV8NoPd /tmp/tmp.5YW3AemyQu +++++ return 0 ++++ [[ '' == \t\r\u\e ]] +++++ kubectl_bin get pxc some-name -o 'jsonpath={.spec.proxysql.enabled}' ++++++ mktemp +++++ local LAST_OUT=/tmp/tmp.5qe30EF5or ++++++ mktemp +++++ local LAST_ERR=/tmp/tmp.yYXhz4Tmsp +++++ local exit_status=0 ++++++ seq 0 2 +++++ for i in '$(seq 0 2)' +++++ set +e +++++ kubectl get pxc some-name -o 'jsonpath={.spec.proxysql.enabled}' +++++ exit_status=0 +++++ set -e +++++ '[' 0 '!=' 0 ']' +++++ break +++++ cat /tmp/tmp.5qe30EF5or +++++ cat /tmp/tmp.yYXhz4Tmsp +++++ rm /tmp/tmp.5qe30EF5or /tmp/tmp.yYXhz4Tmsp +++++ return 0 ++++ [[ true == \t\r\u\e ]] ++++ echo some-name-proxysql ++++ return +++ local cluster_proxy=some-name-proxysql +++ echo proxysql ++ kubectl_bin get pxc some-name -o 'jsonpath={.status.proxysql.ready}' +++ mktemp ++ local LAST_OUT=/tmp/tmp.f07GhmvXi5 +++ mktemp ++ local LAST_ERR=/tmp/tmp.xyaIDuFG0O ++ local exit_status=0 +++ seq 0 2 ++ for i in '$(seq 0 2)' ++ set +e ++ kubectl get pxc some-name -o 'jsonpath={.status.proxysql.ready}' ++ exit_status=0 ++ set -e ++ '[' 0 '!=' 0 ']' ++ break ++ cat /tmp/tmp.f07GhmvXi5 ++ cat /tmp/tmp.xyaIDuFG0O ++ rm /tmp/tmp.f07GhmvXi5 /tmp/tmp.xyaIDuFG0O ++ return 0 + [[ 3 == \3 ]] + compare_mysql_cmd_local select-2 'SHOW TABLES;' '-h127.0.0.1 -P6032 -uproxyadmin -p'\''test-password'\''' some-name-proxysql-0 '' proxysql + local command_id=select-2 + local 'command=SHOW TABLES;' + local 'uri=-h127.0.0.1 -P6032 -uproxyadmin -p'\''test-password'\''' + local pod=some-name-proxysql-0 + local postfix= + local container_name=proxysql + local expected_result=/mnt/jenkins/workspace/cloud-pxc-operator_PR-1707/e2e-tests/users/compare/select-2.sql + [[ perconalab/percona-xtradb-cluster-operator:main-pxc8.0 =~ 8\.0 ]] + '[' -f /mnt/jenkins/workspace/cloud-pxc-operator_PR-1707/e2e-tests/users/compare/select-2-80.sql ']' + run_mysql_local 'SHOW TABLES;' '-h127.0.0.1 -P6032 -uproxyadmin -p'\''test-password'\''' some-name-proxysql-0 proxysql + local 'command=SHOW TABLES;' + local 'uri=-h127.0.0.1 -P6032 -uproxyadmin -p'\''test-password'\''' + local pod=some-name-proxysql-0 + local container_name=proxysql + set +o xtrace + '[' '!' -s /tmp/tmp.Y95G4TXZdR/select-2.sql ']' + diff -u /mnt/jenkins/workspace/cloud-pxc-operator_PR-1707/e2e-tests/users/compare/select-2.sql /tmp/tmp.Y95G4TXZdR/select-2.sql + compare_mysql_cmd_local select-2 'SHOW TABLES;' '-h127.0.0.1 -P6032 -uproxyadmin -p'\''test-password'\''' some-name-proxysql-1 '' proxysql + local command_id=select-2 + local 'command=SHOW TABLES;' + local 'uri=-h127.0.0.1 -P6032 -uproxyadmin -p'\''test-password'\''' + local pod=some-name-proxysql-1 + local postfix= + local container_name=proxysql + local expected_result=/mnt/jenkins/workspace/cloud-pxc-operator_PR-1707/e2e-tests/users/compare/select-2.sql + [[ perconalab/percona-xtradb-cluster-operator:main-pxc8.0 =~ 8\.0 ]] + '[' -f /mnt/jenkins/workspace/cloud-pxc-operator_PR-1707/e2e-tests/users/compare/select-2-80.sql ']' + run_mysql_local 'SHOW TABLES;' '-h127.0.0.1 -P6032 -uproxyadmin -p'\''test-password'\''' some-name-proxysql-1 proxysql + local 'command=SHOW TABLES;' + local 'uri=-h127.0.0.1 -P6032 -uproxyadmin -p'\''test-password'\''' + local pod=some-name-proxysql-1 + local container_name=proxysql + set +o xtrace + '[' '!' -s /tmp/tmp.Y95G4TXZdR/select-2.sql ']' + diff -u /mnt/jenkins/workspace/cloud-pxc-operator_PR-1707/e2e-tests/users/compare/select-2.sql /tmp/tmp.Y95G4TXZdR/select-2.sql + compare_mysql_cmd_local select-2 'SHOW TABLES;' '-h127.0.0.1 -P6032 -uproxyadmin -p'\''test-password'\''' some-name-proxysql-2 '' proxysql + local command_id=select-2 + local 'command=SHOW TABLES;' + local 'uri=-h127.0.0.1 -P6032 -uproxyadmin -p'\''test-password'\''' + local pod=some-name-proxysql-2 + local postfix= + local container_name=proxysql + local expected_result=/mnt/jenkins/workspace/cloud-pxc-operator_PR-1707/e2e-tests/users/compare/select-2.sql + [[ perconalab/percona-xtradb-cluster-operator:main-pxc8.0 =~ 8\.0 ]] + '[' -f /mnt/jenkins/workspace/cloud-pxc-operator_PR-1707/e2e-tests/users/compare/select-2-80.sql ']' + run_mysql_local 'SHOW TABLES;' '-h127.0.0.1 -P6032 -uproxyadmin -p'\''test-password'\''' some-name-proxysql-2 proxysql + local 'command=SHOW TABLES;' + local 'uri=-h127.0.0.1 -P6032 -uproxyadmin -p'\''test-password'\''' + local pod=some-name-proxysql-2 + local container_name=proxysql + set +o xtrace + '[' '!' -s /tmp/tmp.Y95G4TXZdR/select-2.sql ']' + diff -u /mnt/jenkins/workspace/cloud-pxc-operator_PR-1707/e2e-tests/users/compare/select-2.sql /tmp/tmp.Y95G4TXZdR/select-2.sql + desc 'test xtrabackup' + set +o xtrace ----------------------------------------------------------------------------------- test xtrabackup ----------------------------------------------------------------------------------- + kubectl_bin patch pxc some-name --type=merge '-p={"spec":{"proxysql":{"size":2}}}' ++ mktemp + local LAST_OUT=/tmp/tmp.jtJoaAy8jq ++ mktemp + local LAST_ERR=/tmp/tmp.OWSSFGs8Dv + local exit_status=0 ++ seq 0 2 + for i in '$(seq 0 2)' + set +e + kubectl patch pxc some-name --type=merge '-p={"spec":{"proxysql":{"size":2}}}' + exit_status=0 + set -e + '[' 0 '!=' 0 ']' + break + cat /tmp/tmp.jtJoaAy8jq perconaxtradbcluster.pxc.percona.com/some-name patched + cat /tmp/tmp.OWSSFGs8Dv + rm /tmp/tmp.jtJoaAy8jq /tmp/tmp.OWSSFGs8Dv + return 0 + patch_secret my-cluster-secrets xtrabackup dGVzdC1wYXNzd29yZA== + local secret=my-cluster-secrets + local key=xtrabackup + local value=dGVzdC1wYXNzd29yZA== + kubectl_bin patch secret my-cluster-secrets '-p={"data":{"xtrabackup": "dGVzdC1wYXNzd29yZA=="}}' ++ mktemp + local LAST_OUT=/tmp/tmp.u6bAnFJt1b ++ mktemp + local LAST_ERR=/tmp/tmp.hNH4HxyEp0 + local exit_status=0 ++ seq 0 2 + for i in '$(seq 0 2)' + set +e + kubectl patch secret my-cluster-secrets '-p={"data":{"xtrabackup": "dGVzdC1wYXNzd29yZA=="}}' + exit_status=0 + set -e + '[' 0 '!=' 0 ']' + break + cat /tmp/tmp.u6bAnFJt1b secret/my-cluster-secrets patched + cat /tmp/tmp.hNH4HxyEp0 + rm /tmp/tmp.u6bAnFJt1b /tmp/tmp.hNH4HxyEp0 + return 0 + sleep 15 + wait_cluster_consistency some-name 3 2 + local cluster_name=some-name + local cluster_size=3 + local proxy_size=2 + '[' -z 2 ']' + desc 'wait cluster consistency' + set +o xtrace ----------------------------------------------------------------------------------- wait cluster consistency ----------------------------------------------------------------------------------- + local i=0 + local max=36 + sleep 7 ++ kubectl_bin get pxc some-name -o 'jsonpath={.status.state}' +++ mktemp ++ local LAST_OUT=/tmp/tmp.lr1Ey2nVwe +++ mktemp ++ local LAST_ERR=/tmp/tmp.lBtV7qLh39 ++ local exit_status=0 +++ seq 0 2 ++ for i in '$(seq 0 2)' ++ set +e ++ kubectl get pxc some-name -o 'jsonpath={.status.state}' ++ exit_status=0 ++ set -e ++ '[' 0 '!=' 0 ']' ++ break ++ cat /tmp/tmp.lr1Ey2nVwe ++ cat /tmp/tmp.lBtV7qLh39 ++ rm /tmp/tmp.lr1Ey2nVwe /tmp/tmp.lBtV7qLh39 ++ return 0 + [[ initializing == \r\e\a\d\y ]] + echo 'waiting for cluster readyness' waiting for cluster readyness + sleep 20 + [[ 0 -ge 36 ]] + let i+=1 ++ kubectl_bin get pxc some-name -o 'jsonpath={.status.state}' +++ mktemp ++ local LAST_OUT=/tmp/tmp.eoWv4llvA1 +++ mktemp ++ local LAST_ERR=/tmp/tmp.IGImxnebKX ++ local exit_status=0 +++ seq 0 2 ++ for i in '$(seq 0 2)' ++ set +e ++ kubectl get pxc some-name -o 'jsonpath={.status.state}' ++ exit_status=0 ++ set -e ++ '[' 0 '!=' 0 ']' ++ break ++ cat /tmp/tmp.eoWv4llvA1 ++ cat /tmp/tmp.IGImxnebKX ++ rm /tmp/tmp.eoWv4llvA1 /tmp/tmp.IGImxnebKX ++ return 0 + [[ initializing == \r\e\a\d\y ]] + echo 'waiting for cluster readyness' waiting for cluster readyness + sleep 20 + [[ 1 -ge 36 ]] + let i+=1 ++ kubectl_bin get pxc some-name -o 'jsonpath={.status.state}' +++ mktemp ++ local LAST_OUT=/tmp/tmp.fkpbMAtH3s +++ mktemp ++ local LAST_ERR=/tmp/tmp.DMXFsIktAy ++ local exit_status=0 +++ seq 0 2 ++ for i in '$(seq 0 2)' ++ set +e ++ kubectl get pxc some-name -o 'jsonpath={.status.state}' ++ exit_status=0 ++ set -e ++ '[' 0 '!=' 0 ']' ++ break ++ cat /tmp/tmp.fkpbMAtH3s ++ cat /tmp/tmp.DMXFsIktAy ++ rm /tmp/tmp.fkpbMAtH3s /tmp/tmp.DMXFsIktAy ++ return 0 + [[ initializing == \r\e\a\d\y ]] + echo 'waiting for cluster readyness' waiting for cluster readyness + sleep 20 + [[ 2 -ge 36 ]] + let i+=1 ++ kubectl_bin get pxc some-name -o 'jsonpath={.status.state}' +++ mktemp ++ local LAST_OUT=/tmp/tmp.7JSZIMnAkz +++ mktemp ++ local LAST_ERR=/tmp/tmp.rL3bF94CWE ++ local exit_status=0 +++ seq 0 2 ++ for i in '$(seq 0 2)' ++ set +e ++ kubectl get pxc some-name -o 'jsonpath={.status.state}' ++ exit_status=0 ++ set -e ++ '[' 0 '!=' 0 ']' ++ break ++ cat /tmp/tmp.7JSZIMnAkz ++ cat /tmp/tmp.rL3bF94CWE ++ rm /tmp/tmp.7JSZIMnAkz /tmp/tmp.rL3bF94CWE ++ return 0 + [[ initializing == \r\e\a\d\y ]] + echo 'waiting for cluster readyness' waiting for cluster readyness + sleep 20 + [[ 3 -ge 36 ]] + let i+=1 ++ kubectl_bin get pxc some-name -o 'jsonpath={.status.state}' +++ mktemp ++ local LAST_OUT=/tmp/tmp.lYy9Dz17da +++ mktemp ++ local LAST_ERR=/tmp/tmp.Xi56Vlu9iQ ++ local exit_status=0 +++ seq 0 2 ++ for i in '$(seq 0 2)' ++ set +e ++ kubectl get pxc some-name -o 'jsonpath={.status.state}' ++ exit_status=0 ++ set -e ++ '[' 0 '!=' 0 ']' ++ break ++ cat /tmp/tmp.lYy9Dz17da ++ cat /tmp/tmp.Xi56Vlu9iQ ++ rm /tmp/tmp.lYy9Dz17da /tmp/tmp.Xi56Vlu9iQ ++ return 0 + [[ initializing == \r\e\a\d\y ]] + echo 'waiting for cluster readyness' waiting for cluster readyness + sleep 20 + [[ 4 -ge 36 ]] + let i+=1 ++ kubectl_bin get pxc some-name -o 'jsonpath={.status.state}' +++ mktemp ++ local LAST_OUT=/tmp/tmp.59p8pa9ijB +++ mktemp ++ local LAST_ERR=/tmp/tmp.HWr8n6HYzb ++ local exit_status=0 +++ seq 0 2 ++ for i in '$(seq 0 2)' ++ set +e ++ kubectl get pxc some-name -o 'jsonpath={.status.state}' ++ exit_status=0 ++ set -e ++ '[' 0 '!=' 0 ']' ++ break ++ cat /tmp/tmp.59p8pa9ijB ++ cat /tmp/tmp.HWr8n6HYzb ++ rm /tmp/tmp.59p8pa9ijB /tmp/tmp.HWr8n6HYzb ++ return 0 + [[ initializing == \r\e\a\d\y ]] + echo 'waiting for cluster readyness' waiting for cluster readyness + sleep 20 + [[ 5 -ge 36 ]] + let i+=1 ++ kubectl_bin get pxc some-name -o 'jsonpath={.status.state}' +++ mktemp ++ local LAST_OUT=/tmp/tmp.b6TDkiyJAu +++ mktemp ++ local LAST_ERR=/tmp/tmp.CdYebaRnAb ++ local exit_status=0 +++ seq 0 2 ++ for i in '$(seq 0 2)' ++ set +e ++ kubectl get pxc some-name -o 'jsonpath={.status.state}' ++ exit_status=0 ++ set -e ++ '[' 0 '!=' 0 ']' ++ break ++ cat /tmp/tmp.b6TDkiyJAu ++ cat /tmp/tmp.CdYebaRnAb ++ rm /tmp/tmp.b6TDkiyJAu /tmp/tmp.CdYebaRnAb ++ return 0 + [[ initializing == \r\e\a\d\y ]] + echo 'waiting for cluster readyness' waiting for cluster readyness + sleep 20 + [[ 6 -ge 36 ]] + let i+=1 ++ kubectl_bin get pxc some-name -o 'jsonpath={.status.state}' +++ mktemp ++ local LAST_OUT=/tmp/tmp.1nRzxYEXG7 +++ mktemp ++ local LAST_ERR=/tmp/tmp.lvok1VWyOt ++ local exit_status=0 +++ seq 0 2 ++ for i in '$(seq 0 2)' ++ set +e ++ kubectl get pxc some-name -o 'jsonpath={.status.state}' ++ exit_status=0 ++ set -e ++ '[' 0 '!=' 0 ']' ++ break ++ cat /tmp/tmp.1nRzxYEXG7 ++ cat /tmp/tmp.lvok1VWyOt ++ rm /tmp/tmp.1nRzxYEXG7 /tmp/tmp.lvok1VWyOt ++ return 0 + [[ ready == \r\e\a\d\y ]] ++ kubectl_bin get pxc some-name -o 'jsonpath={.status.pxc.ready}' +++ mktemp ++ local LAST_OUT=/tmp/tmp.QDHqMcU01a +++ mktemp ++ local LAST_ERR=/tmp/tmp.HZtHOGjMsL ++ local exit_status=0 +++ seq 0 2 ++ for i in '$(seq 0 2)' ++ set +e ++ kubectl get pxc some-name -o 'jsonpath={.status.pxc.ready}' ++ exit_status=0 ++ set -e ++ '[' 0 '!=' 0 ']' ++ break ++ cat /tmp/tmp.QDHqMcU01a ++ cat /tmp/tmp.HZtHOGjMsL ++ rm /tmp/tmp.QDHqMcU01a /tmp/tmp.HZtHOGjMsL ++ return 0 + [[ 3 == \3 ]] +++ get_proxy_engine some-name +++ local cluster_name=some-name ++++ get_proxy some-name ++++ local target_cluster=some-name +++++ kubectl_bin get pxc some-name -o 'jsonpath={.spec.haproxy.enabled}' ++++++ mktemp +++++ local LAST_OUT=/tmp/tmp.fG98zGtFDi ++++++ mktemp +++++ local LAST_ERR=/tmp/tmp.1GfioxnJPz +++++ local exit_status=0 ++++++ seq 0 2 +++++ for i in '$(seq 0 2)' +++++ set +e +++++ kubectl get pxc some-name -o 'jsonpath={.spec.haproxy.enabled}' +++++ exit_status=0 +++++ set -e +++++ '[' 0 '!=' 0 ']' +++++ break +++++ cat /tmp/tmp.fG98zGtFDi +++++ cat /tmp/tmp.1GfioxnJPz +++++ rm /tmp/tmp.fG98zGtFDi /tmp/tmp.1GfioxnJPz +++++ return 0 ++++ [[ '' == \t\r\u\e ]] +++++ kubectl_bin get pxc some-name -o 'jsonpath={.spec.proxysql.enabled}' ++++++ mktemp +++++ local LAST_OUT=/tmp/tmp.S0EBLoHxV4 ++++++ mktemp +++++ local LAST_ERR=/tmp/tmp.lHAv4ZeIwQ +++++ local exit_status=0 ++++++ seq 0 2 +++++ for i in '$(seq 0 2)' +++++ set +e +++++ kubectl get pxc some-name -o 'jsonpath={.spec.proxysql.enabled}' +++++ exit_status=0 +++++ set -e +++++ '[' 0 '!=' 0 ']' +++++ break +++++ cat /tmp/tmp.S0EBLoHxV4 +++++ cat /tmp/tmp.lHAv4ZeIwQ +++++ rm /tmp/tmp.S0EBLoHxV4 /tmp/tmp.lHAv4ZeIwQ +++++ return 0 ++++ [[ true == \t\r\u\e ]] ++++ echo some-name-proxysql ++++ return +++ local cluster_proxy=some-name-proxysql +++ echo proxysql ++ kubectl_bin get pxc some-name -o 'jsonpath={.status.proxysql.ready}' +++ mktemp ++ local LAST_OUT=/tmp/tmp.QGSoBTlC3q +++ mktemp ++ local LAST_ERR=/tmp/tmp.tzdhIAxfRc ++ local exit_status=0 +++ seq 0 2 ++ for i in '$(seq 0 2)' ++ set +e ++ kubectl get pxc some-name -o 'jsonpath={.status.proxysql.ready}' ++ exit_status=0 ++ set -e ++ '[' 0 '!=' 0 ']' ++ break ++ cat /tmp/tmp.QGSoBTlC3q ++ cat /tmp/tmp.tzdhIAxfRc ++ rm /tmp/tmp.QGSoBTlC3q /tmp/tmp.tzdhIAxfRc ++ return 0 + [[ 2 == \2 ]] + compare_mysql_cmd_local select-3 'SHOW DATABASES;' '-h 127.0.0.1 -uxtrabackup -p'\''test-password'\''' some-name-pxc-0 '' pxc + local command_id=select-3 + local 'command=SHOW DATABASES;' + local 'uri=-h 127.0.0.1 -uxtrabackup -p'\''test-password'\''' + local pod=some-name-pxc-0 + local postfix= + local container_name=pxc + local expected_result=/mnt/jenkins/workspace/cloud-pxc-operator_PR-1707/e2e-tests/users/compare/select-3.sql + [[ perconalab/percona-xtradb-cluster-operator:main-pxc8.0 =~ 8\.0 ]] + '[' -f /mnt/jenkins/workspace/cloud-pxc-operator_PR-1707/e2e-tests/users/compare/select-3-80.sql ']' + run_mysql_local 'SHOW DATABASES;' '-h 127.0.0.1 -uxtrabackup -p'\''test-password'\''' some-name-pxc-0 pxc + local 'command=SHOW DATABASES;' + local 'uri=-h 127.0.0.1 -uxtrabackup -p'\''test-password'\''' + local pod=some-name-pxc-0 + local container_name=pxc + set +o xtrace + '[' '!' -s /tmp/tmp.Y95G4TXZdR/select-3.sql ']' + diff -u /mnt/jenkins/workspace/cloud-pxc-operator_PR-1707/e2e-tests/users/compare/select-3.sql /tmp/tmp.Y95G4TXZdR/select-3.sql + desc 'test monitor' + set +o xtrace ----------------------------------------------------------------------------------- test monitor ----------------------------------------------------------------------------------- + patch_secret my-cluster-secrets monitor dGVzdC1wYXNzd29yZA== + local secret=my-cluster-secrets + local key=monitor + local value=dGVzdC1wYXNzd29yZA== + kubectl_bin patch secret my-cluster-secrets '-p={"data":{"monitor": "dGVzdC1wYXNzd29yZA=="}}' ++ mktemp + local LAST_OUT=/tmp/tmp.Vdkb37stJE ++ mktemp + local LAST_ERR=/tmp/tmp.xAE13v8MKy + local exit_status=0 ++ seq 0 2 + for i in '$(seq 0 2)' + set +e + kubectl patch secret my-cluster-secrets '-p={"data":{"monitor": "dGVzdC1wYXNzd29yZA=="}}' + exit_status=0 + set -e + '[' 0 '!=' 0 ']' + break + cat /tmp/tmp.Vdkb37stJE secret/my-cluster-secrets patched + cat /tmp/tmp.xAE13v8MKy + rm /tmp/tmp.Vdkb37stJE /tmp/tmp.xAE13v8MKy + return 0 + wait_for_password_propagation my-cluster-secrets monitor + local secret=my-cluster-secrets + local user=monitor + local max_retry=240 ++ getSecretData my-cluster-secrets root ++ local secretName=my-cluster-secrets ++ local dataKey=root ++ kubectl_bin get secrets/my-cluster-secrets '--template={{.data.root}}' ++ base64 --decode +++ mktemp ++ local LAST_OUT=/tmp/tmp.28T6UkRj7z +++ mktemp ++ local LAST_ERR=/tmp/tmp.ZVBHDUymv1 ++ local exit_status=0 +++ seq 0 2 ++ for i in '$(seq 0 2)' ++ set +e ++ kubectl get secrets/my-cluster-secrets '--template={{.data.root}}' ++ exit_status=0 ++ set -e ++ '[' 0 '!=' 0 ']' ++ break ++ cat /tmp/tmp.28T6UkRj7z ++ cat /tmp/tmp.ZVBHDUymv1 ++ rm /tmp/tmp.28T6UkRj7z /tmp/tmp.ZVBHDUymv1 ++ return 0 + local root_pass=test-password + [[ perconalab/percona-xtradb-cluster-operator:main-pxc8.0 =~ 5\.7 ]] + retry=0 + is_password_updated monitor '-h some-name-pxc -uroot -p'\''test-password'\''' + local username=monitor + local 'uri=-h some-name-pxc -uroot -p'\''test-password'\''' + run_mysql 'SELECT User_attributes FROM mysql.user WHERE user='\''monitor'\''' '-h some-name-pxc -uroot -p'\''test-password'\''' + local 'command=SELECT User_attributes FROM mysql.user WHERE user='\''monitor'\''' + local 'uri=-h some-name-pxc -uroot -p'\''test-password'\''' + grep additional_password ++ get_client_pod ++ kubectl_bin get pods --selector=name=pxc-client -o 'jsonpath={.items[].metadata.name}' +++ mktemp ++ local LAST_OUT=/tmp/tmp.eTB5b9NuaW +++ mktemp ++ local LAST_ERR=/tmp/tmp.eUG7tbLm2M ++ local exit_status=0 +++ seq 0 2 ++ for i in '$(seq 0 2)' ++ set +e ++ kubectl get pods --selector=name=pxc-client -o 'jsonpath={.items[].metadata.name}' ++ exit_status=0 ++ set -e ++ '[' 0 '!=' 0 ']' ++ break ++ cat /tmp/tmp.eTB5b9NuaW ++ cat /tmp/tmp.eUG7tbLm2M ++ rm /tmp/tmp.eTB5b9NuaW /tmp/tmp.eUG7tbLm2M ++ return 0 + client_pod=pxc-client-6644d8898f-d92km + wait_pod pxc-client-6644d8898f-d92km + local pod=pxc-client-6644d8898f-d92km + local max_retry=480 + local ns= ++ echo pxc-client-6644d8898f-d92km ++ /usr/bin/sed -E 's/.*-(pxc|proxysql)-[0-9]/\1/' ++ egrep '^(pxc|proxysql)$' + local container= + set +o xtrace pod/pxc-client-6644d8898f-d92km condition met pxc-client-6644d8898f-d92km.Ok + set +o xtrace {"additional_password": "*C1F414D9BAF378B656A849B31F9F8AF3125F558B"} + retry=0 + is_old_password_discarded monitor '-h some-name-pxc -uroot -p'\''test-password'\''' + local username=monitor + local 'uri=-h some-name-pxc -uroot -p'\''test-password'\''' + grep NULL + run_mysql 'SELECT User_attributes FROM mysql.user WHERE user='\''monitor'\''' '-h some-name-pxc -uroot -p'\''test-password'\''' + local 'command=SELECT User_attributes FROM mysql.user WHERE user='\''monitor'\''' + local 'uri=-h some-name-pxc -uroot -p'\''test-password'\''' ++ get_client_pod ++ kubectl_bin get pods --selector=name=pxc-client -o 'jsonpath={.items[].metadata.name}' +++ mktemp ++ local LAST_OUT=/tmp/tmp.mYquw2LohY +++ mktemp ++ local LAST_ERR=/tmp/tmp.ELHkBUOX0j ++ local exit_status=0 +++ seq 0 2 ++ for i in '$(seq 0 2)' ++ set +e ++ kubectl get pods --selector=name=pxc-client -o 'jsonpath={.items[].metadata.name}' ++ exit_status=0 ++ set -e ++ '[' 0 '!=' 0 ']' ++ break ++ cat /tmp/tmp.mYquw2LohY ++ cat /tmp/tmp.ELHkBUOX0j ++ rm /tmp/tmp.mYquw2LohY /tmp/tmp.ELHkBUOX0j ++ return 0 + client_pod=pxc-client-6644d8898f-d92km + wait_pod pxc-client-6644d8898f-d92km + local pod=pxc-client-6644d8898f-d92km + local max_retry=480 + local ns= ++ echo pxc-client-6644d8898f-d92km ++ /usr/bin/sed -E 's/.*-(pxc|proxysql)-[0-9]/\1/' ++ egrep '^(pxc|proxysql)$' + local container= + set +o xtrace pod/pxc-client-6644d8898f-d92km condition met pxc-client-6644d8898f-d92km.Ok + set +o xtrace + echo 'waiting for password propagation' waiting for password propagation + sleep 1 + let retry+=1 + [[ 1 -ge 240 ]] + is_old_password_discarded monitor '-h some-name-pxc -uroot -p'\''test-password'\''' + local username=monitor + local 'uri=-h some-name-pxc -uroot -p'\''test-password'\''' + run_mysql 'SELECT User_attributes FROM mysql.user WHERE user='\''monitor'\''' '-h some-name-pxc -uroot -p'\''test-password'\''' + grep NULL + local 'command=SELECT User_attributes FROM mysql.user WHERE user='\''monitor'\''' + local 'uri=-h some-name-pxc -uroot -p'\''test-password'\''' ++ get_client_pod ++ kubectl_bin get pods --selector=name=pxc-client -o 'jsonpath={.items[].metadata.name}' +++ mktemp ++ local LAST_OUT=/tmp/tmp.4O2qqAJ5LE +++ mktemp ++ local LAST_ERR=/tmp/tmp.YN1EBl4pOM ++ local exit_status=0 +++ seq 0 2 ++ for i in '$(seq 0 2)' ++ set +e ++ kubectl get pods --selector=name=pxc-client -o 'jsonpath={.items[].metadata.name}' ++ exit_status=0 ++ set -e ++ '[' 0 '!=' 0 ']' ++ break ++ cat /tmp/tmp.4O2qqAJ5LE ++ cat /tmp/tmp.YN1EBl4pOM ++ rm /tmp/tmp.4O2qqAJ5LE /tmp/tmp.YN1EBl4pOM ++ return 0 + client_pod=pxc-client-6644d8898f-d92km + wait_pod pxc-client-6644d8898f-d92km + local pod=pxc-client-6644d8898f-d92km + local max_retry=480 + local ns= ++ echo pxc-client-6644d8898f-d92km ++ /usr/bin/sed -E 's/.*-(pxc|proxysql)-[0-9]/\1/' ++ egrep '^(pxc|proxysql)$' + local container= + set +o xtrace pod/pxc-client-6644d8898f-d92km condition met pxc-client-6644d8898f-d92km.Ok + set +o xtrace + echo 'waiting for password propagation' waiting for password propagation + sleep 1 + let retry+=1 + [[ 2 -ge 240 ]] + is_old_password_discarded monitor '-h some-name-pxc -uroot -p'\''test-password'\''' + local username=monitor + local 'uri=-h some-name-pxc -uroot -p'\''test-password'\''' + run_mysql 'SELECT User_attributes FROM mysql.user WHERE user='\''monitor'\''' '-h some-name-pxc -uroot -p'\''test-password'\''' + local 'command=SELECT User_attributes FROM mysql.user WHERE user='\''monitor'\''' + local 'uri=-h some-name-pxc -uroot -p'\''test-password'\''' + grep NULL ++ get_client_pod ++ kubectl_bin get pods --selector=name=pxc-client -o 'jsonpath={.items[].metadata.name}' +++ mktemp ++ local LAST_OUT=/tmp/tmp.t22VStLXei +++ mktemp ++ local LAST_ERR=/tmp/tmp.Dn5y7kjJmO ++ local exit_status=0 +++ seq 0 2 ++ for i in '$(seq 0 2)' ++ set +e ++ kubectl get pods --selector=name=pxc-client -o 'jsonpath={.items[].metadata.name}' ++ exit_status=0 ++ set -e ++ '[' 0 '!=' 0 ']' ++ break ++ cat /tmp/tmp.t22VStLXei ++ cat /tmp/tmp.Dn5y7kjJmO ++ rm /tmp/tmp.t22VStLXei /tmp/tmp.Dn5y7kjJmO ++ return 0 + client_pod=pxc-client-6644d8898f-d92km + wait_pod pxc-client-6644d8898f-d92km + local pod=pxc-client-6644d8898f-d92km + local max_retry=480 + local ns= ++ echo pxc-client-6644d8898f-d92km ++ /usr/bin/sed -E 's/.*-(pxc|proxysql)-[0-9]/\1/' ++ egrep '^(pxc|proxysql)$' + local container= + set +o xtrace pod/pxc-client-6644d8898f-d92km condition met pxc-client-6644d8898f-d92km.Ok + set +o xtrace + echo 'waiting for password propagation' waiting for password propagation + sleep 1 + let retry+=1 + [[ 3 -ge 240 ]] + is_old_password_discarded monitor '-h some-name-pxc -uroot -p'\''test-password'\''' + local username=monitor + local 'uri=-h some-name-pxc -uroot -p'\''test-password'\''' + run_mysql 'SELECT User_attributes FROM mysql.user WHERE user='\''monitor'\''' '-h some-name-pxc -uroot -p'\''test-password'\''' + local 'command=SELECT User_attributes FROM mysql.user WHERE user='\''monitor'\''' + local 'uri=-h some-name-pxc -uroot -p'\''test-password'\''' + grep NULL ++ get_client_pod ++ kubectl_bin get pods --selector=name=pxc-client -o 'jsonpath={.items[].metadata.name}' +++ mktemp ++ local LAST_OUT=/tmp/tmp.ih79W3AsKI +++ mktemp ++ local LAST_ERR=/tmp/tmp.dWrwtH2GH7 ++ local exit_status=0 +++ seq 0 2 ++ for i in '$(seq 0 2)' ++ set +e ++ kubectl get pods --selector=name=pxc-client -o 'jsonpath={.items[].metadata.name}' ++ exit_status=0 ++ set -e ++ '[' 0 '!=' 0 ']' ++ break ++ cat /tmp/tmp.ih79W3AsKI ++ cat /tmp/tmp.dWrwtH2GH7 ++ rm /tmp/tmp.ih79W3AsKI /tmp/tmp.dWrwtH2GH7 ++ return 0 + client_pod=pxc-client-6644d8898f-d92km + wait_pod pxc-client-6644d8898f-d92km + local pod=pxc-client-6644d8898f-d92km + local max_retry=480 + local ns= ++ echo pxc-client-6644d8898f-d92km ++ /usr/bin/sed -E 's/.*-(pxc|proxysql)-[0-9]/\1/' ++ egrep '^(pxc|proxysql)$' + local container= + set +o xtrace pod/pxc-client-6644d8898f-d92km condition met pxc-client-6644d8898f-d92km.Ok + set +o xtrace + echo 'waiting for password propagation' waiting for password propagation + sleep 1 + let retry+=1 + [[ 4 -ge 240 ]] + is_old_password_discarded monitor '-h some-name-pxc -uroot -p'\''test-password'\''' + local username=monitor + local 'uri=-h some-name-pxc -uroot -p'\''test-password'\''' + run_mysql 'SELECT User_attributes FROM mysql.user WHERE user='\''monitor'\''' '-h some-name-pxc -uroot -p'\''test-password'\''' + local 'command=SELECT User_attributes FROM mysql.user WHERE user='\''monitor'\''' + local 'uri=-h some-name-pxc -uroot -p'\''test-password'\''' + grep NULL ++ get_client_pod ++ kubectl_bin get pods --selector=name=pxc-client -o 'jsonpath={.items[].metadata.name}' +++ mktemp ++ local LAST_OUT=/tmp/tmp.NEKXeRZZwQ +++ mktemp ++ local LAST_ERR=/tmp/tmp.zfSbAfpxRo ++ local exit_status=0 +++ seq 0 2 ++ for i in '$(seq 0 2)' ++ set +e ++ kubectl get pods --selector=name=pxc-client -o 'jsonpath={.items[].metadata.name}' ++ exit_status=0 ++ set -e ++ '[' 0 '!=' 0 ']' ++ break ++ cat /tmp/tmp.NEKXeRZZwQ ++ cat /tmp/tmp.zfSbAfpxRo ++ rm /tmp/tmp.NEKXeRZZwQ /tmp/tmp.zfSbAfpxRo ++ return 0 + client_pod=pxc-client-6644d8898f-d92km + wait_pod pxc-client-6644d8898f-d92km + local pod=pxc-client-6644d8898f-d92km + local max_retry=480 + local ns= ++ echo pxc-client-6644d8898f-d92km ++ /usr/bin/sed -E 's/.*-(pxc|proxysql)-[0-9]/\1/' ++ egrep '^(pxc|proxysql)$' + local container= + set +o xtrace pod/pxc-client-6644d8898f-d92km condition met pxc-client-6644d8898f-d92km.Ok + set +o xtrace + echo 'waiting for password propagation' waiting for password propagation + sleep 1 + let retry+=1 + [[ 5 -ge 240 ]] + is_old_password_discarded monitor '-h some-name-pxc -uroot -p'\''test-password'\''' + local username=monitor + local 'uri=-h some-name-pxc -uroot -p'\''test-password'\''' + run_mysql 'SELECT User_attributes FROM mysql.user WHERE user='\''monitor'\''' '-h some-name-pxc -uroot -p'\''test-password'\''' + local 'command=SELECT User_attributes FROM mysql.user WHERE user='\''monitor'\''' + grep NULL + local 'uri=-h some-name-pxc -uroot -p'\''test-password'\''' ++ get_client_pod ++ kubectl_bin get pods --selector=name=pxc-client -o 'jsonpath={.items[].metadata.name}' +++ mktemp ++ local LAST_OUT=/tmp/tmp.QauEmc9c00 +++ mktemp ++ local LAST_ERR=/tmp/tmp.d1OpTslgKZ ++ local exit_status=0 +++ seq 0 2 ++ for i in '$(seq 0 2)' ++ set +e ++ kubectl get pods --selector=name=pxc-client -o 'jsonpath={.items[].metadata.name}' ++ exit_status=0 ++ set -e ++ '[' 0 '!=' 0 ']' ++ break ++ cat /tmp/tmp.QauEmc9c00 ++ cat /tmp/tmp.d1OpTslgKZ ++ rm /tmp/tmp.QauEmc9c00 /tmp/tmp.d1OpTslgKZ ++ return 0 + client_pod=pxc-client-6644d8898f-d92km + wait_pod pxc-client-6644d8898f-d92km + local pod=pxc-client-6644d8898f-d92km + local max_retry=480 + local ns= ++ echo pxc-client-6644d8898f-d92km ++ /usr/bin/sed -E 's/.*-(pxc|proxysql)-[0-9]/\1/' ++ egrep '^(pxc|proxysql)$' + local container= + set +o xtrace pod/pxc-client-6644d8898f-d92km condition met pxc-client-6644d8898f-d92km.Ok + set +o xtrace + echo 'waiting for password propagation' waiting for password propagation + sleep 1 + let retry+=1 + [[ 6 -ge 240 ]] + is_old_password_discarded monitor '-h some-name-pxc -uroot -p'\''test-password'\''' + local username=monitor + local 'uri=-h some-name-pxc -uroot -p'\''test-password'\''' + run_mysql 'SELECT User_attributes FROM mysql.user WHERE user='\''monitor'\''' '-h some-name-pxc -uroot -p'\''test-password'\''' + grep NULL + local 'command=SELECT User_attributes FROM mysql.user WHERE user='\''monitor'\''' + local 'uri=-h some-name-pxc -uroot -p'\''test-password'\''' ++ get_client_pod ++ kubectl_bin get pods --selector=name=pxc-client -o 'jsonpath={.items[].metadata.name}' +++ mktemp ++ local LAST_OUT=/tmp/tmp.Wljfy8Nrgp +++ mktemp ++ local LAST_ERR=/tmp/tmp.PqUyN0Gpax ++ local exit_status=0 +++ seq 0 2 ++ for i in '$(seq 0 2)' ++ set +e ++ kubectl get pods --selector=name=pxc-client -o 'jsonpath={.items[].metadata.name}' ++ exit_status=0 ++ set -e ++ '[' 0 '!=' 0 ']' ++ break ++ cat /tmp/tmp.Wljfy8Nrgp ++ cat /tmp/tmp.PqUyN0Gpax ++ rm /tmp/tmp.Wljfy8Nrgp /tmp/tmp.PqUyN0Gpax ++ return 0 + client_pod=pxc-client-6644d8898f-d92km + wait_pod pxc-client-6644d8898f-d92km + local pod=pxc-client-6644d8898f-d92km + local max_retry=480 + local ns= ++ echo pxc-client-6644d8898f-d92km ++ /usr/bin/sed -E 's/.*-(pxc|proxysql)-[0-9]/\1/' ++ egrep '^(pxc|proxysql)$' + local container= + set +o xtrace pod/pxc-client-6644d8898f-d92km condition met pxc-client-6644d8898f-d92km.Ok + set +o xtrace + echo 'waiting for password propagation' waiting for password propagation + sleep 1 + let retry+=1 + [[ 7 -ge 240 ]] + is_old_password_discarded monitor '-h some-name-pxc -uroot -p'\''test-password'\''' + local username=monitor + local 'uri=-h some-name-pxc -uroot -p'\''test-password'\''' + run_mysql 'SELECT User_attributes FROM mysql.user WHERE user='\''monitor'\''' '-h some-name-pxc -uroot -p'\''test-password'\''' + local 'command=SELECT User_attributes FROM mysql.user WHERE user='\''monitor'\''' + local 'uri=-h some-name-pxc -uroot -p'\''test-password'\''' + grep NULL ++ get_client_pod ++ kubectl_bin get pods --selector=name=pxc-client -o 'jsonpath={.items[].metadata.name}' +++ mktemp ++ local LAST_OUT=/tmp/tmp.Pc4uC2fVLl +++ mktemp ++ local LAST_ERR=/tmp/tmp.mKvrYUlcOB ++ local exit_status=0 +++ seq 0 2 ++ for i in '$(seq 0 2)' ++ set +e ++ kubectl get pods --selector=name=pxc-client -o 'jsonpath={.items[].metadata.name}' ++ exit_status=0 ++ set -e ++ '[' 0 '!=' 0 ']' ++ break ++ cat /tmp/tmp.Pc4uC2fVLl ++ cat /tmp/tmp.mKvrYUlcOB ++ rm /tmp/tmp.Pc4uC2fVLl /tmp/tmp.mKvrYUlcOB ++ return 0 + client_pod=pxc-client-6644d8898f-d92km + wait_pod pxc-client-6644d8898f-d92km + local pod=pxc-client-6644d8898f-d92km + local max_retry=480 + local ns= ++ echo pxc-client-6644d8898f-d92km ++ /usr/bin/sed -E 's/.*-(pxc|proxysql)-[0-9]/\1/' ++ egrep '^(pxc|proxysql)$' + local container= + set +o xtrace pod/pxc-client-6644d8898f-d92km condition met pxc-client-6644d8898f-d92km.Ok + set +o xtrace + echo 'waiting for password propagation' waiting for password propagation + sleep 1 + let retry+=1 + [[ 8 -ge 240 ]] + is_old_password_discarded monitor '-h some-name-pxc -uroot -p'\''test-password'\''' + local username=monitor + local 'uri=-h some-name-pxc -uroot -p'\''test-password'\''' + run_mysql 'SELECT User_attributes FROM mysql.user WHERE user='\''monitor'\''' '-h some-name-pxc -uroot -p'\''test-password'\''' + local 'command=SELECT User_attributes FROM mysql.user WHERE user='\''monitor'\''' + grep NULL + local 'uri=-h some-name-pxc -uroot -p'\''test-password'\''' ++ get_client_pod ++ kubectl_bin get pods --selector=name=pxc-client -o 'jsonpath={.items[].metadata.name}' +++ mktemp ++ local LAST_OUT=/tmp/tmp.AmX7fmFuC2 +++ mktemp ++ local LAST_ERR=/tmp/tmp.YzgFEOKO1G ++ local exit_status=0 +++ seq 0 2 ++ for i in '$(seq 0 2)' ++ set +e ++ kubectl get pods --selector=name=pxc-client -o 'jsonpath={.items[].metadata.name}' ++ exit_status=0 ++ set -e ++ '[' 0 '!=' 0 ']' ++ break ++ cat /tmp/tmp.AmX7fmFuC2 ++ cat /tmp/tmp.YzgFEOKO1G ++ rm /tmp/tmp.AmX7fmFuC2 /tmp/tmp.YzgFEOKO1G ++ return 0 + client_pod=pxc-client-6644d8898f-d92km + wait_pod pxc-client-6644d8898f-d92km + local pod=pxc-client-6644d8898f-d92km + local max_retry=480 + local ns= ++ echo pxc-client-6644d8898f-d92km ++ /usr/bin/sed -E 's/.*-(pxc|proxysql)-[0-9]/\1/' ++ egrep '^(pxc|proxysql)$' + local container= + set +o xtrace pod/pxc-client-6644d8898f-d92km condition met pxc-client-6644d8898f-d92km.Ok + set +o xtrace + echo 'waiting for password propagation' waiting for password propagation + sleep 1 + let retry+=1 + [[ 9 -ge 240 ]] + is_old_password_discarded monitor '-h some-name-pxc -uroot -p'\''test-password'\''' + local username=monitor + local 'uri=-h some-name-pxc -uroot -p'\''test-password'\''' + grep NULL + run_mysql 'SELECT User_attributes FROM mysql.user WHERE user='\''monitor'\''' '-h some-name-pxc -uroot -p'\''test-password'\''' + local 'command=SELECT User_attributes FROM mysql.user WHERE user='\''monitor'\''' + local 'uri=-h some-name-pxc -uroot -p'\''test-password'\''' ++ get_client_pod ++ kubectl_bin get pods --selector=name=pxc-client -o 'jsonpath={.items[].metadata.name}' +++ mktemp ++ local LAST_OUT=/tmp/tmp.JsPSDUfLVk +++ mktemp ++ local LAST_ERR=/tmp/tmp.mbvfaj1mEL ++ local exit_status=0 +++ seq 0 2 ++ for i in '$(seq 0 2)' ++ set +e ++ kubectl get pods --selector=name=pxc-client -o 'jsonpath={.items[].metadata.name}' ++ exit_status=0 ++ set -e ++ '[' 0 '!=' 0 ']' ++ break ++ cat /tmp/tmp.JsPSDUfLVk ++ cat /tmp/tmp.mbvfaj1mEL ++ rm /tmp/tmp.JsPSDUfLVk /tmp/tmp.mbvfaj1mEL ++ return 0 + client_pod=pxc-client-6644d8898f-d92km + wait_pod pxc-client-6644d8898f-d92km + local pod=pxc-client-6644d8898f-d92km + local max_retry=480 + local ns= ++ echo pxc-client-6644d8898f-d92km ++ /usr/bin/sed -E 's/.*-(pxc|proxysql)-[0-9]/\1/' ++ egrep '^(pxc|proxysql)$' + local container= + set +o xtrace pod/pxc-client-6644d8898f-d92km condition met pxc-client-6644d8898f-d92km.Ok + set +o xtrace + echo 'waiting for password propagation' waiting for password propagation + sleep 1 + let retry+=1 + [[ 10 -ge 240 ]] + is_old_password_discarded monitor '-h some-name-pxc -uroot -p'\''test-password'\''' + local username=monitor + local 'uri=-h some-name-pxc -uroot -p'\''test-password'\''' + run_mysql 'SELECT User_attributes FROM mysql.user WHERE user='\''monitor'\''' '-h some-name-pxc -uroot -p'\''test-password'\''' + grep NULL + local 'command=SELECT User_attributes FROM mysql.user WHERE user='\''monitor'\''' + local 'uri=-h some-name-pxc -uroot -p'\''test-password'\''' ++ get_client_pod ++ kubectl_bin get pods --selector=name=pxc-client -o 'jsonpath={.items[].metadata.name}' +++ mktemp ++ local LAST_OUT=/tmp/tmp.p694m0gebp +++ mktemp ++ local LAST_ERR=/tmp/tmp.ZCt1UvbOwH ++ local exit_status=0 +++ seq 0 2 ++ for i in '$(seq 0 2)' ++ set +e ++ kubectl get pods --selector=name=pxc-client -o 'jsonpath={.items[].metadata.name}' ++ exit_status=0 ++ set -e ++ '[' 0 '!=' 0 ']' ++ break ++ cat /tmp/tmp.p694m0gebp ++ cat /tmp/tmp.ZCt1UvbOwH ++ rm /tmp/tmp.p694m0gebp /tmp/tmp.ZCt1UvbOwH ++ return 0 + client_pod=pxc-client-6644d8898f-d92km + wait_pod pxc-client-6644d8898f-d92km + local pod=pxc-client-6644d8898f-d92km + local max_retry=480 + local ns= ++ echo pxc-client-6644d8898f-d92km ++ /usr/bin/sed -E 's/.*-(pxc|proxysql)-[0-9]/\1/' ++ egrep '^(pxc|proxysql)$' + local container= + set +o xtrace pod/pxc-client-6644d8898f-d92km condition met pxc-client-6644d8898f-d92km.Ok + set +o xtrace + echo 'waiting for password propagation' waiting for password propagation + sleep 1 + let retry+=1 + [[ 11 -ge 240 ]] + is_old_password_discarded monitor '-h some-name-pxc -uroot -p'\''test-password'\''' + local username=monitor + local 'uri=-h some-name-pxc -uroot -p'\''test-password'\''' + run_mysql 'SELECT User_attributes FROM mysql.user WHERE user='\''monitor'\''' '-h some-name-pxc -uroot -p'\''test-password'\''' + grep NULL + local 'command=SELECT User_attributes FROM mysql.user WHERE user='\''monitor'\''' + local 'uri=-h some-name-pxc -uroot -p'\''test-password'\''' ++ get_client_pod ++ kubectl_bin get pods --selector=name=pxc-client -o 'jsonpath={.items[].metadata.name}' +++ mktemp ++ local LAST_OUT=/tmp/tmp.jfEugWzyPs +++ mktemp ++ local LAST_ERR=/tmp/tmp.L69kuawFXi ++ local exit_status=0 +++ seq 0 2 ++ for i in '$(seq 0 2)' ++ set +e ++ kubectl get pods --selector=name=pxc-client -o 'jsonpath={.items[].metadata.name}' ++ exit_status=0 ++ set -e ++ '[' 0 '!=' 0 ']' ++ break ++ cat /tmp/tmp.jfEugWzyPs ++ cat /tmp/tmp.L69kuawFXi ++ rm /tmp/tmp.jfEugWzyPs /tmp/tmp.L69kuawFXi ++ return 0 + client_pod=pxc-client-6644d8898f-d92km + wait_pod pxc-client-6644d8898f-d92km + local pod=pxc-client-6644d8898f-d92km + local max_retry=480 + local ns= ++ echo pxc-client-6644d8898f-d92km ++ /usr/bin/sed -E 's/.*-(pxc|proxysql)-[0-9]/\1/' ++ egrep '^(pxc|proxysql)$' + local container= + set +o xtrace pod/pxc-client-6644d8898f-d92km condition met pxc-client-6644d8898f-d92km.Ok + set +o xtrace + echo 'waiting for password propagation' waiting for password propagation + sleep 1 + let retry+=1 + [[ 12 -ge 240 ]] + is_old_password_discarded monitor '-h some-name-pxc -uroot -p'\''test-password'\''' + local username=monitor + local 'uri=-h some-name-pxc -uroot -p'\''test-password'\''' + run_mysql 'SELECT User_attributes FROM mysql.user WHERE user='\''monitor'\''' '-h some-name-pxc -uroot -p'\''test-password'\''' + local 'command=SELECT User_attributes FROM mysql.user WHERE user='\''monitor'\''' + local 'uri=-h some-name-pxc -uroot -p'\''test-password'\''' + grep NULL ++ get_client_pod ++ kubectl_bin get pods --selector=name=pxc-client -o 'jsonpath={.items[].metadata.name}' +++ mktemp ++ local LAST_OUT=/tmp/tmp.01vm4naqZw +++ mktemp ++ local LAST_ERR=/tmp/tmp.mF847aQ43w ++ local exit_status=0 +++ seq 0 2 ++ for i in '$(seq 0 2)' ++ set +e ++ kubectl get pods --selector=name=pxc-client -o 'jsonpath={.items[].metadata.name}' ++ exit_status=0 ++ set -e ++ '[' 0 '!=' 0 ']' ++ break ++ cat /tmp/tmp.01vm4naqZw ++ cat /tmp/tmp.mF847aQ43w ++ rm /tmp/tmp.01vm4naqZw /tmp/tmp.mF847aQ43w ++ return 0 + client_pod=pxc-client-6644d8898f-d92km + wait_pod pxc-client-6644d8898f-d92km + local pod=pxc-client-6644d8898f-d92km + local max_retry=480 + local ns= ++ echo pxc-client-6644d8898f-d92km ++ /usr/bin/sed -E 's/.*-(pxc|proxysql)-[0-9]/\1/' ++ egrep '^(pxc|proxysql)$' + local container= + set +o xtrace pod/pxc-client-6644d8898f-d92km condition met pxc-client-6644d8898f-d92km.Ok + set +o xtrace NULL + wait_cluster_consistency some-name 3 2 + local cluster_name=some-name + local cluster_size=3 + local proxy_size=2 + '[' -z 2 ']' + desc 'wait cluster consistency' + set +o xtrace ----------------------------------------------------------------------------------- wait cluster consistency ----------------------------------------------------------------------------------- + local i=0 + local max=36 + sleep 7 ++ kubectl_bin get pxc some-name -o 'jsonpath={.status.state}' +++ mktemp ++ local LAST_OUT=/tmp/tmp.zgEYnuPzJg +++ mktemp ++ local LAST_ERR=/tmp/tmp.SKdnyyZ3k7 ++ local exit_status=0 +++ seq 0 2 ++ for i in '$(seq 0 2)' ++ set +e ++ kubectl get pxc some-name -o 'jsonpath={.status.state}' ++ exit_status=0 ++ set -e ++ '[' 0 '!=' 0 ']' ++ break ++ cat /tmp/tmp.zgEYnuPzJg ++ cat /tmp/tmp.SKdnyyZ3k7 ++ rm /tmp/tmp.zgEYnuPzJg /tmp/tmp.SKdnyyZ3k7 ++ return 0 + [[ ready == \r\e\a\d\y ]] ++ kubectl_bin get pxc some-name -o 'jsonpath={.status.pxc.ready}' +++ mktemp ++ local LAST_OUT=/tmp/tmp.EfkUPNG1CF +++ mktemp ++ local LAST_ERR=/tmp/tmp.mS2cFsZ7uW ++ local exit_status=0 +++ seq 0 2 ++ for i in '$(seq 0 2)' ++ set +e ++ kubectl get pxc some-name -o 'jsonpath={.status.pxc.ready}' ++ exit_status=0 ++ set -e ++ '[' 0 '!=' 0 ']' ++ break ++ cat /tmp/tmp.EfkUPNG1CF ++ cat /tmp/tmp.mS2cFsZ7uW ++ rm /tmp/tmp.EfkUPNG1CF /tmp/tmp.mS2cFsZ7uW ++ return 0 + [[ 3 == \3 ]] +++ get_proxy_engine some-name +++ local cluster_name=some-name ++++ get_proxy some-name ++++ local target_cluster=some-name +++++ kubectl_bin get pxc some-name -o 'jsonpath={.spec.haproxy.enabled}' ++++++ mktemp +++++ local LAST_OUT=/tmp/tmp.7fWkgtty49 ++++++ mktemp +++++ local LAST_ERR=/tmp/tmp.g0IgMxfuUg +++++ local exit_status=0 ++++++ seq 0 2 +++++ for i in '$(seq 0 2)' +++++ set +e +++++ kubectl get pxc some-name -o 'jsonpath={.spec.haproxy.enabled}' +++++ exit_status=0 +++++ set -e +++++ '[' 0 '!=' 0 ']' +++++ break +++++ cat /tmp/tmp.7fWkgtty49 +++++ cat /tmp/tmp.g0IgMxfuUg +++++ rm /tmp/tmp.7fWkgtty49 /tmp/tmp.g0IgMxfuUg +++++ return 0 ++++ [[ '' == \t\r\u\e ]] +++++ kubectl_bin get pxc some-name -o 'jsonpath={.spec.proxysql.enabled}' ++++++ mktemp +++++ local LAST_OUT=/tmp/tmp.AQeLVWIChC ++++++ mktemp +++++ local LAST_ERR=/tmp/tmp.oj8L3buQHH +++++ local exit_status=0 ++++++ seq 0 2 +++++ for i in '$(seq 0 2)' +++++ set +e +++++ kubectl get pxc some-name -o 'jsonpath={.spec.proxysql.enabled}' +++++ exit_status=0 +++++ set -e +++++ '[' 0 '!=' 0 ']' +++++ break +++++ cat /tmp/tmp.AQeLVWIChC +++++ cat /tmp/tmp.oj8L3buQHH +++++ rm /tmp/tmp.AQeLVWIChC /tmp/tmp.oj8L3buQHH +++++ return 0 ++++ [[ true == \t\r\u\e ]] ++++ echo some-name-proxysql ++++ return +++ local cluster_proxy=some-name-proxysql +++ echo proxysql ++ kubectl_bin get pxc some-name -o 'jsonpath={.status.proxysql.ready}' +++ mktemp ++ local LAST_OUT=/tmp/tmp.NBBayuadC2 +++ mktemp ++ local LAST_ERR=/tmp/tmp.mn00kGWO3T ++ local exit_status=0 +++ seq 0 2 ++ for i in '$(seq 0 2)' ++ set +e ++ kubectl get pxc some-name -o 'jsonpath={.status.proxysql.ready}' ++ exit_status=0 ++ set -e ++ '[' 0 '!=' 0 ']' ++ break ++ cat /tmp/tmp.NBBayuadC2 ++ cat /tmp/tmp.mn00kGWO3T ++ rm /tmp/tmp.NBBayuadC2 /tmp/tmp.mn00kGWO3T ++ return 0 + [[ 2 == \2 ]] + sleep 10 + compare_mysql_cmd select-4 'SHOW TABLES;' '-h some-name-proxysql -umonitor -p'\''test-password'\''' + local command_id=select-4 + local 'command=SHOW TABLES;' + local 'uri=-h some-name-proxysql -umonitor -p'\''test-password'\''' + local postfix= + local expected_result=/mnt/jenkins/workspace/cloud-pxc-operator_PR-1707/e2e-tests/users/compare/select-4.sql + [[ perconalab/percona-xtradb-cluster-operator:main-pxc8.0 =~ 8\.0 ]] + '[' -f /mnt/jenkins/workspace/cloud-pxc-operator_PR-1707/e2e-tests/users/compare/select-4-80.sql ']' + expected_result=/mnt/jenkins/workspace/cloud-pxc-operator_PR-1707/e2e-tests/users/compare/select-4-80.sql + run_mysql 'SHOW TABLES;' '-h some-name-proxysql -umonitor -p'\''test-password'\''' + local 'command=SHOW TABLES;' + local 'uri=-h some-name-proxysql -umonitor -p'\''test-password'\''' ++ get_client_pod ++ kubectl_bin get pods --selector=name=pxc-client -o 'jsonpath={.items[].metadata.name}' +++ mktemp ++ local LAST_OUT=/tmp/tmp.KT4Y39q5iS +++ mktemp ++ local LAST_ERR=/tmp/tmp.nx2FzGve1o ++ local exit_status=0 +++ seq 0 2 ++ for i in '$(seq 0 2)' ++ set +e ++ kubectl get pods --selector=name=pxc-client -o 'jsonpath={.items[].metadata.name}' ++ exit_status=0 ++ set -e ++ '[' 0 '!=' 0 ']' ++ break ++ cat /tmp/tmp.KT4Y39q5iS ++ cat /tmp/tmp.nx2FzGve1o ++ rm /tmp/tmp.KT4Y39q5iS /tmp/tmp.nx2FzGve1o ++ return 0 + client_pod=pxc-client-6644d8898f-d92km + wait_pod pxc-client-6644d8898f-d92km + local pod=pxc-client-6644d8898f-d92km + local max_retry=480 + local ns= ++ echo pxc-client-6644d8898f-d92km ++ /usr/bin/sed -E 's/.*-(pxc|proxysql)-[0-9]/\1/' ++ egrep '^(pxc|proxysql)$' + local container= + set +o xtrace pod/pxc-client-6644d8898f-d92km condition met pxc-client-6644d8898f-d92km.Ok + set +o xtrace + '[' '!' -s /tmp/tmp.Y95G4TXZdR/select-4.sql ']' + diff -u /mnt/jenkins/workspace/cloud-pxc-operator_PR-1707/e2e-tests/users/compare/select-4-80.sql /tmp/tmp.Y95G4TXZdR/select-4.sql + desc 'test operator' + set +o xtrace ----------------------------------------------------------------------------------- test operator ----------------------------------------------------------------------------------- + patch_secret my-cluster-secrets operator dGVzdC1wYXNzd29yZA== + local secret=my-cluster-secrets + local key=operator + local value=dGVzdC1wYXNzd29yZA== + kubectl_bin patch secret my-cluster-secrets '-p={"data":{"operator": "dGVzdC1wYXNzd29yZA=="}}' ++ mktemp + local LAST_OUT=/tmp/tmp.TJplQ920l4 ++ mktemp + local LAST_ERR=/tmp/tmp.6pUr0FRpxe + local exit_status=0 ++ seq 0 2 + for i in '$(seq 0 2)' + set +e + kubectl patch secret my-cluster-secrets '-p={"data":{"operator": "dGVzdC1wYXNzd29yZA=="}}' + exit_status=0 + set -e + '[' 0 '!=' 0 ']' + break + cat /tmp/tmp.TJplQ920l4 secret/my-cluster-secrets patched + cat /tmp/tmp.6pUr0FRpxe + rm /tmp/tmp.TJplQ920l4 /tmp/tmp.6pUr0FRpxe + return 0 + sleep 15 + wait_cluster_consistency some-name 3 2 + local cluster_name=some-name + local cluster_size=3 + local proxy_size=2 + '[' -z 2 ']' + desc 'wait cluster consistency' + set +o xtrace ----------------------------------------------------------------------------------- wait cluster consistency ----------------------------------------------------------------------------------- + local i=0 + local max=36 + sleep 7 ++ kubectl_bin get pxc some-name -o 'jsonpath={.status.state}' +++ mktemp ++ local LAST_OUT=/tmp/tmp.7rnAvrbAyX +++ mktemp ++ local LAST_ERR=/tmp/tmp.2CbMvyp5Ik ++ local exit_status=0 +++ seq 0 2 ++ for i in '$(seq 0 2)' ++ set +e ++ kubectl get pxc some-name -o 'jsonpath={.status.state}' ++ exit_status=0 ++ set -e ++ '[' 0 '!=' 0 ']' ++ break ++ cat /tmp/tmp.7rnAvrbAyX ++ cat /tmp/tmp.2CbMvyp5Ik ++ rm /tmp/tmp.7rnAvrbAyX /tmp/tmp.2CbMvyp5Ik ++ return 0 + [[ initializing == \r\e\a\d\y ]] + echo 'waiting for cluster readyness' waiting for cluster readyness + sleep 20 + [[ 0 -ge 36 ]] + let i+=1 ++ kubectl_bin get pxc some-name -o 'jsonpath={.status.state}' +++ mktemp ++ local LAST_OUT=/tmp/tmp.GVLflZTbmD +++ mktemp ++ local LAST_ERR=/tmp/tmp.eSQbj5VhIL ++ local exit_status=0 +++ seq 0 2 ++ for i in '$(seq 0 2)' ++ set +e ++ kubectl get pxc some-name -o 'jsonpath={.status.state}' ++ exit_status=0 ++ set -e ++ '[' 0 '!=' 0 ']' ++ break ++ cat /tmp/tmp.GVLflZTbmD ++ cat /tmp/tmp.eSQbj5VhIL ++ rm /tmp/tmp.GVLflZTbmD /tmp/tmp.eSQbj5VhIL ++ return 0 + [[ ready == \r\e\a\d\y ]] ++ kubectl_bin get pxc some-name -o 'jsonpath={.status.pxc.ready}' +++ mktemp ++ local LAST_OUT=/tmp/tmp.uDk9nEOPAW +++ mktemp ++ local LAST_ERR=/tmp/tmp.ftVZekWRwU ++ local exit_status=0 +++ seq 0 2 ++ for i in '$(seq 0 2)' ++ set +e ++ kubectl get pxc some-name -o 'jsonpath={.status.pxc.ready}' ++ exit_status=0 ++ set -e ++ '[' 0 '!=' 0 ']' ++ break ++ cat /tmp/tmp.uDk9nEOPAW ++ cat /tmp/tmp.ftVZekWRwU ++ rm /tmp/tmp.uDk9nEOPAW /tmp/tmp.ftVZekWRwU ++ return 0 + [[ 3 == \3 ]] +++ get_proxy_engine some-name +++ local cluster_name=some-name ++++ get_proxy some-name ++++ local target_cluster=some-name +++++ kubectl_bin get pxc some-name -o 'jsonpath={.spec.haproxy.enabled}' ++++++ mktemp +++++ local LAST_OUT=/tmp/tmp.AAPR14NvkT ++++++ mktemp +++++ local LAST_ERR=/tmp/tmp.7mC5j0j88O +++++ local exit_status=0 ++++++ seq 0 2 +++++ for i in '$(seq 0 2)' +++++ set +e +++++ kubectl get pxc some-name -o 'jsonpath={.spec.haproxy.enabled}' +++++ exit_status=0 +++++ set -e +++++ '[' 0 '!=' 0 ']' +++++ break +++++ cat /tmp/tmp.AAPR14NvkT +++++ cat /tmp/tmp.7mC5j0j88O +++++ rm /tmp/tmp.AAPR14NvkT /tmp/tmp.7mC5j0j88O +++++ return 0 ++++ [[ '' == \t\r\u\e ]] +++++ kubectl_bin get pxc some-name -o 'jsonpath={.spec.proxysql.enabled}' ++++++ mktemp +++++ local LAST_OUT=/tmp/tmp.eXTAFjEJ6M ++++++ mktemp +++++ local LAST_ERR=/tmp/tmp.7NfaTvv9e8 +++++ local exit_status=0 ++++++ seq 0 2 +++++ for i in '$(seq 0 2)' +++++ set +e +++++ kubectl get pxc some-name -o 'jsonpath={.spec.proxysql.enabled}' +++++ exit_status=0 +++++ set -e +++++ '[' 0 '!=' 0 ']' +++++ break +++++ cat /tmp/tmp.eXTAFjEJ6M +++++ cat /tmp/tmp.7NfaTvv9e8 +++++ rm /tmp/tmp.eXTAFjEJ6M /tmp/tmp.7NfaTvv9e8 +++++ return 0 ++++ [[ true == \t\r\u\e ]] ++++ echo some-name-proxysql ++++ return +++ local cluster_proxy=some-name-proxysql +++ echo proxysql ++ kubectl_bin get pxc some-name -o 'jsonpath={.status.proxysql.ready}' +++ mktemp ++ local LAST_OUT=/tmp/tmp.Lx6ZpDiDCL +++ mktemp ++ local LAST_ERR=/tmp/tmp.ZFLCEDcqtF ++ local exit_status=0 +++ seq 0 2 ++ for i in '$(seq 0 2)' ++ set +e ++ kubectl get pxc some-name -o 'jsonpath={.status.proxysql.ready}' ++ exit_status=0 ++ set -e ++ '[' 0 '!=' 0 ']' ++ break ++ cat /tmp/tmp.Lx6ZpDiDCL ++ cat /tmp/tmp.ZFLCEDcqtF ++ rm /tmp/tmp.Lx6ZpDiDCL /tmp/tmp.ZFLCEDcqtF ++ return 0 + [[ 2 == \2 ]] + sleep 10 + compare_mysql_cmd select-4 'SHOW TABLES;' '-h some-name-proxysql -uoperator -p'\''test-password'\''' + local command_id=select-4 + local 'command=SHOW TABLES;' + local 'uri=-h some-name-proxysql -uoperator -p'\''test-password'\''' + local postfix= + local expected_result=/mnt/jenkins/workspace/cloud-pxc-operator_PR-1707/e2e-tests/users/compare/select-4.sql + [[ perconalab/percona-xtradb-cluster-operator:main-pxc8.0 =~ 8\.0 ]] + '[' -f /mnt/jenkins/workspace/cloud-pxc-operator_PR-1707/e2e-tests/users/compare/select-4-80.sql ']' + expected_result=/mnt/jenkins/workspace/cloud-pxc-operator_PR-1707/e2e-tests/users/compare/select-4-80.sql + run_mysql 'SHOW TABLES;' '-h some-name-proxysql -uoperator -p'\''test-password'\''' + local 'command=SHOW TABLES;' + local 'uri=-h some-name-proxysql -uoperator -p'\''test-password'\''' ++ get_client_pod ++ kubectl_bin get pods --selector=name=pxc-client -o 'jsonpath={.items[].metadata.name}' +++ mktemp ++ local LAST_OUT=/tmp/tmp.jc8PBajbwJ +++ mktemp ++ local LAST_ERR=/tmp/tmp.pFpcU7TxYV ++ local exit_status=0 +++ seq 0 2 ++ for i in '$(seq 0 2)' ++ set +e ++ kubectl get pods --selector=name=pxc-client -o 'jsonpath={.items[].metadata.name}' ++ exit_status=0 ++ set -e ++ '[' 0 '!=' 0 ']' ++ break ++ cat /tmp/tmp.jc8PBajbwJ ++ cat /tmp/tmp.pFpcU7TxYV ++ rm /tmp/tmp.jc8PBajbwJ /tmp/tmp.pFpcU7TxYV ++ return 0 + client_pod=pxc-client-6644d8898f-d92km + wait_pod pxc-client-6644d8898f-d92km + local pod=pxc-client-6644d8898f-d92km + local max_retry=480 + local ns= ++ echo pxc-client-6644d8898f-d92km ++ /usr/bin/sed -E 's/.*-(pxc|proxysql)-[0-9]/\1/' ++ egrep '^(pxc|proxysql)$' + local container= + set +o xtrace pod/pxc-client-6644d8898f-d92km condition met pxc-client-6644d8898f-d92km.Ok + set +o xtrace + '[' '!' -s /tmp/tmp.Y95G4TXZdR/select-4.sql ']' + diff -u /mnt/jenkins/workspace/cloud-pxc-operator_PR-1707/e2e-tests/users/compare/select-4-80.sql /tmp/tmp.Y95G4TXZdR/select-4.sql + desc 'change secret name' + set +o xtrace ----------------------------------------------------------------------------------- change secret name ----------------------------------------------------------------------------------- + kubectl_bin patch pxc some-name --type merge --patch '{"spec": {"secretsName":"my-cluster-secrets-2"}}' ++ mktemp + local LAST_OUT=/tmp/tmp.zk6XvGEFZ8 ++ mktemp + local LAST_ERR=/tmp/tmp.1E7F7UR6xY + local exit_status=0 ++ seq 0 2 + for i in '$(seq 0 2)' + set +e + kubectl patch pxc some-name --type merge --patch '{"spec": {"secretsName":"my-cluster-secrets-2"}}' + exit_status=0 + set -e + '[' 0 '!=' 0 ']' + break + cat /tmp/tmp.zk6XvGEFZ8 perconaxtradbcluster.pxc.percona.com/some-name patched + cat /tmp/tmp.1E7F7UR6xY + rm /tmp/tmp.zk6XvGEFZ8 /tmp/tmp.1E7F7UR6xY + return 0 + sleep 30 + wait_cluster_consistency some-name 3 2 + local cluster_name=some-name + local cluster_size=3 + local proxy_size=2 + '[' -z 2 ']' + desc 'wait cluster consistency' + set +o xtrace ----------------------------------------------------------------------------------- wait cluster consistency ----------------------------------------------------------------------------------- + local i=0 + local max=36 + sleep 7 ++ kubectl_bin get pxc some-name -o 'jsonpath={.status.state}' +++ mktemp ++ local LAST_OUT=/tmp/tmp.hwsoWv7QjI +++ mktemp ++ local LAST_ERR=/tmp/tmp.mVL1rqZzXL ++ local exit_status=0 +++ seq 0 2 ++ for i in '$(seq 0 2)' ++ set +e ++ kubectl get pxc some-name -o 'jsonpath={.status.state}' ++ exit_status=0 ++ set -e ++ '[' 0 '!=' 0 ']' ++ break ++ cat /tmp/tmp.hwsoWv7QjI ++ cat /tmp/tmp.mVL1rqZzXL ++ rm /tmp/tmp.hwsoWv7QjI /tmp/tmp.mVL1rqZzXL ++ return 0 + [[ initializing == \r\e\a\d\y ]] + echo 'waiting for cluster readyness' waiting for cluster readyness + sleep 20 + [[ 0 -ge 36 ]] + let i+=1 ++ kubectl_bin get pxc some-name -o 'jsonpath={.status.state}' +++ mktemp ++ local LAST_OUT=/tmp/tmp.bL2Z5Mg2Cl +++ mktemp ++ local LAST_ERR=/tmp/tmp.FC3aleVUGL ++ local exit_status=0 +++ seq 0 2 ++ for i in '$(seq 0 2)' ++ set +e ++ kubectl get pxc some-name -o 'jsonpath={.status.state}' ++ exit_status=0 ++ set -e ++ '[' 0 '!=' 0 ']' ++ break ++ cat /tmp/tmp.bL2Z5Mg2Cl ++ cat /tmp/tmp.FC3aleVUGL ++ rm /tmp/tmp.bL2Z5Mg2Cl /tmp/tmp.FC3aleVUGL ++ return 0 + [[ initializing == \r\e\a\d\y ]] + echo 'waiting for cluster readyness' waiting for cluster readyness + sleep 20 + [[ 1 -ge 36 ]] + let i+=1 ++ kubectl_bin get pxc some-name -o 'jsonpath={.status.state}' +++ mktemp ++ local LAST_OUT=/tmp/tmp.ZORtZnejVL +++ mktemp ++ local LAST_ERR=/tmp/tmp.oQuOUmBKGO ++ local exit_status=0 +++ seq 0 2 ++ for i in '$(seq 0 2)' ++ set +e ++ kubectl get pxc some-name -o 'jsonpath={.status.state}' ++ exit_status=0 ++ set -e ++ '[' 0 '!=' 0 ']' ++ break ++ cat /tmp/tmp.ZORtZnejVL ++ cat /tmp/tmp.oQuOUmBKGO ++ rm /tmp/tmp.ZORtZnejVL /tmp/tmp.oQuOUmBKGO ++ return 0 + [[ initializing == \r\e\a\d\y ]] + echo 'waiting for cluster readyness' waiting for cluster readyness + sleep 20 + [[ 2 -ge 36 ]] + let i+=1 ++ kubectl_bin get pxc some-name -o 'jsonpath={.status.state}' +++ mktemp ++ local LAST_OUT=/tmp/tmp.9VlyH2INFQ +++ mktemp ++ local LAST_ERR=/tmp/tmp.liNWSlcMuF ++ local exit_status=0 +++ seq 0 2 ++ for i in '$(seq 0 2)' ++ set +e ++ kubectl get pxc some-name -o 'jsonpath={.status.state}' ++ exit_status=0 ++ set -e ++ '[' 0 '!=' 0 ']' ++ break ++ cat /tmp/tmp.9VlyH2INFQ ++ cat /tmp/tmp.liNWSlcMuF ++ rm /tmp/tmp.9VlyH2INFQ /tmp/tmp.liNWSlcMuF ++ return 0 + [[ initializing == \r\e\a\d\y ]] + echo 'waiting for cluster readyness' waiting for cluster readyness + sleep 20 + [[ 3 -ge 36 ]] + let i+=1 ++ kubectl_bin get pxc some-name -o 'jsonpath={.status.state}' +++ mktemp ++ local LAST_OUT=/tmp/tmp.hBvVFiTxoS +++ mktemp ++ local LAST_ERR=/tmp/tmp.qOB5wbrTrZ ++ local exit_status=0 +++ seq 0 2 ++ for i in '$(seq 0 2)' ++ set +e ++ kubectl get pxc some-name -o 'jsonpath={.status.state}' ++ exit_status=0 ++ set -e ++ '[' 0 '!=' 0 ']' ++ break ++ cat /tmp/tmp.hBvVFiTxoS ++ cat /tmp/tmp.qOB5wbrTrZ ++ rm /tmp/tmp.hBvVFiTxoS /tmp/tmp.qOB5wbrTrZ ++ return 0 + [[ initializing == \r\e\a\d\y ]] + echo 'waiting for cluster readyness' waiting for cluster readyness + sleep 20 + [[ 4 -ge 36 ]] + let i+=1 ++ kubectl_bin get pxc some-name -o 'jsonpath={.status.state}' +++ mktemp ++ local LAST_OUT=/tmp/tmp.IfIKp4dY1g +++ mktemp ++ local LAST_ERR=/tmp/tmp.8597VzvLMD ++ local exit_status=0 +++ seq 0 2 ++ for i in '$(seq 0 2)' ++ set +e ++ kubectl get pxc some-name -o 'jsonpath={.status.state}' ++ exit_status=0 ++ set -e ++ '[' 0 '!=' 0 ']' ++ break ++ cat /tmp/tmp.IfIKp4dY1g ++ cat /tmp/tmp.8597VzvLMD ++ rm /tmp/tmp.IfIKp4dY1g /tmp/tmp.8597VzvLMD ++ return 0 + [[ initializing == \r\e\a\d\y ]] + echo 'waiting for cluster readyness' waiting for cluster readyness + sleep 20 + [[ 5 -ge 36 ]] + let i+=1 ++ kubectl_bin get pxc some-name -o 'jsonpath={.status.state}' +++ mktemp ++ local LAST_OUT=/tmp/tmp.ZapZhZ45bN +++ mktemp ++ local LAST_ERR=/tmp/tmp.H0fhRi72L1 ++ local exit_status=0 +++ seq 0 2 ++ for i in '$(seq 0 2)' ++ set +e ++ kubectl get pxc some-name -o 'jsonpath={.status.state}' ++ exit_status=0 ++ set -e ++ '[' 0 '!=' 0 ']' ++ break ++ cat /tmp/tmp.ZapZhZ45bN ++ cat /tmp/tmp.H0fhRi72L1 ++ rm /tmp/tmp.ZapZhZ45bN /tmp/tmp.H0fhRi72L1 ++ return 0 + [[ initializing == \r\e\a\d\y ]] + echo 'waiting for cluster readyness' waiting for cluster readyness + sleep 20 + [[ 6 -ge 36 ]] + let i+=1 ++ kubectl_bin get pxc some-name -o 'jsonpath={.status.state}' +++ mktemp ++ local LAST_OUT=/tmp/tmp.suK8tVoncE +++ mktemp ++ local LAST_ERR=/tmp/tmp.vkpB08Mzoi ++ local exit_status=0 +++ seq 0 2 ++ for i in '$(seq 0 2)' ++ set +e ++ kubectl get pxc some-name -o 'jsonpath={.status.state}' ++ exit_status=0 ++ set -e ++ '[' 0 '!=' 0 ']' ++ break ++ cat /tmp/tmp.suK8tVoncE ++ cat /tmp/tmp.vkpB08Mzoi ++ rm /tmp/tmp.suK8tVoncE /tmp/tmp.vkpB08Mzoi ++ return 0 + [[ ready == \r\e\a\d\y ]] ++ kubectl_bin get pxc some-name -o 'jsonpath={.status.pxc.ready}' +++ mktemp ++ local LAST_OUT=/tmp/tmp.c3X1V0CVGD +++ mktemp ++ local LAST_ERR=/tmp/tmp.ZUOdqqqTje ++ local exit_status=0 +++ seq 0 2 ++ for i in '$(seq 0 2)' ++ set +e ++ kubectl get pxc some-name -o 'jsonpath={.status.pxc.ready}' ++ exit_status=0 ++ set -e ++ '[' 0 '!=' 0 ']' ++ break ++ cat /tmp/tmp.c3X1V0CVGD ++ cat /tmp/tmp.ZUOdqqqTje ++ rm /tmp/tmp.c3X1V0CVGD /tmp/tmp.ZUOdqqqTje ++ return 0 + [[ 3 == \3 ]] +++ get_proxy_engine some-name +++ local cluster_name=some-name ++++ get_proxy some-name ++++ local target_cluster=some-name +++++ kubectl_bin get pxc some-name -o 'jsonpath={.spec.haproxy.enabled}' ++++++ mktemp +++++ local LAST_OUT=/tmp/tmp.NnNZK0bISi ++++++ mktemp +++++ local LAST_ERR=/tmp/tmp.rQHZ8eiXjE +++++ local exit_status=0 ++++++ seq 0 2 +++++ for i in '$(seq 0 2)' +++++ set +e +++++ kubectl get pxc some-name -o 'jsonpath={.spec.haproxy.enabled}' +++++ exit_status=0 +++++ set -e +++++ '[' 0 '!=' 0 ']' +++++ break +++++ cat /tmp/tmp.NnNZK0bISi +++++ cat /tmp/tmp.rQHZ8eiXjE +++++ rm /tmp/tmp.NnNZK0bISi /tmp/tmp.rQHZ8eiXjE +++++ return 0 ++++ [[ '' == \t\r\u\e ]] +++++ kubectl_bin get pxc some-name -o 'jsonpath={.spec.proxysql.enabled}' ++++++ mktemp +++++ local LAST_OUT=/tmp/tmp.c9qb4VVnQ8 ++++++ mktemp +++++ local LAST_ERR=/tmp/tmp.QpU9pTqM8u +++++ local exit_status=0 ++++++ seq 0 2 +++++ for i in '$(seq 0 2)' +++++ set +e +++++ kubectl get pxc some-name -o 'jsonpath={.spec.proxysql.enabled}' +++++ exit_status=0 +++++ set -e +++++ '[' 0 '!=' 0 ']' +++++ break +++++ cat /tmp/tmp.c9qb4VVnQ8 +++++ cat /tmp/tmp.QpU9pTqM8u +++++ rm /tmp/tmp.c9qb4VVnQ8 /tmp/tmp.QpU9pTqM8u +++++ return 0 ++++ [[ true == \t\r\u\e ]] ++++ echo some-name-proxysql ++++ return +++ local cluster_proxy=some-name-proxysql +++ echo proxysql ++ kubectl_bin get pxc some-name -o 'jsonpath={.status.proxysql.ready}' +++ mktemp ++ local LAST_OUT=/tmp/tmp.l1DT1nLuQh +++ mktemp ++ local LAST_ERR=/tmp/tmp.I0LW6QZqHF ++ local exit_status=0 +++ seq 0 2 ++ for i in '$(seq 0 2)' ++ set +e ++ kubectl get pxc some-name -o 'jsonpath={.status.proxysql.ready}' ++ exit_status=0 ++ set -e ++ '[' 0 '!=' 0 ']' ++ break ++ cat /tmp/tmp.l1DT1nLuQh ++ cat /tmp/tmp.I0LW6QZqHF ++ rm /tmp/tmp.l1DT1nLuQh /tmp/tmp.I0LW6QZqHF ++ return 0 + [[ 2 == \2 ]] + desc 'test new operator' + set +o xtrace ----------------------------------------------------------------------------------- test new operator ----------------------------------------------------------------------------------- + newpass=test-password2 ++ echo -n test-password2 ++ base64 + newpassencrypted=dGVzdC1wYXNzd29yZDI= + patch_secret my-cluster-secrets-2 operator dGVzdC1wYXNzd29yZDI= + local secret=my-cluster-secrets-2 + local key=operator + local value=dGVzdC1wYXNzd29yZDI= + kubectl_bin patch secret my-cluster-secrets-2 '-p={"data":{"operator": "dGVzdC1wYXNzd29yZDI="}}' ++ mktemp + local LAST_OUT=/tmp/tmp.hQtqWX76EN ++ mktemp + local LAST_ERR=/tmp/tmp.R6GgEcyLbG + local exit_status=0 ++ seq 0 2 + for i in '$(seq 0 2)' + set +e + kubectl patch secret my-cluster-secrets-2 '-p={"data":{"operator": "dGVzdC1wYXNzd29yZDI="}}' + exit_status=0 + set -e + '[' 0 '!=' 0 ']' + break + cat /tmp/tmp.hQtqWX76EN secret/my-cluster-secrets-2 patched + cat /tmp/tmp.R6GgEcyLbG + rm /tmp/tmp.hQtqWX76EN /tmp/tmp.R6GgEcyLbG + return 0 + sleep 15 + wait_cluster_consistency some-name 3 2 + local cluster_name=some-name + local cluster_size=3 + local proxy_size=2 + '[' -z 2 ']' + desc 'wait cluster consistency' + set +o xtrace ----------------------------------------------------------------------------------- wait cluster consistency ----------------------------------------------------------------------------------- + local i=0 + local max=36 + sleep 7 ++ kubectl_bin get pxc some-name -o 'jsonpath={.status.state}' +++ mktemp ++ local LAST_OUT=/tmp/tmp.RgrV51Y44a +++ mktemp ++ local LAST_ERR=/tmp/tmp.obpbzRbCG0 ++ local exit_status=0 +++ seq 0 2 ++ for i in '$(seq 0 2)' ++ set +e ++ kubectl get pxc some-name -o 'jsonpath={.status.state}' ++ exit_status=0 ++ set -e ++ '[' 0 '!=' 0 ']' ++ break ++ cat /tmp/tmp.RgrV51Y44a ++ cat /tmp/tmp.obpbzRbCG0 ++ rm /tmp/tmp.RgrV51Y44a /tmp/tmp.obpbzRbCG0 ++ return 0 + [[ initializing == \r\e\a\d\y ]] + echo 'waiting for cluster readyness' waiting for cluster readyness + sleep 20 + [[ 0 -ge 36 ]] + let i+=1 ++ kubectl_bin get pxc some-name -o 'jsonpath={.status.state}' +++ mktemp ++ local LAST_OUT=/tmp/tmp.gBBbvJU7Za +++ mktemp ++ local LAST_ERR=/tmp/tmp.XvNFrHtuno ++ local exit_status=0 +++ seq 0 2 ++ for i in '$(seq 0 2)' ++ set +e ++ kubectl get pxc some-name -o 'jsonpath={.status.state}' ++ exit_status=0 ++ set -e ++ '[' 0 '!=' 0 ']' ++ break ++ cat /tmp/tmp.gBBbvJU7Za ++ cat /tmp/tmp.XvNFrHtuno ++ rm /tmp/tmp.gBBbvJU7Za /tmp/tmp.XvNFrHtuno ++ return 0 + [[ ready == \r\e\a\d\y ]] ++ kubectl_bin get pxc some-name -o 'jsonpath={.status.pxc.ready}' +++ mktemp ++ local LAST_OUT=/tmp/tmp.G5ZXPEb5Fk +++ mktemp ++ local LAST_ERR=/tmp/tmp.Y207c6L8fd ++ local exit_status=0 +++ seq 0 2 ++ for i in '$(seq 0 2)' ++ set +e ++ kubectl get pxc some-name -o 'jsonpath={.status.pxc.ready}' ++ exit_status=0 ++ set -e ++ '[' 0 '!=' 0 ']' ++ break ++ cat /tmp/tmp.G5ZXPEb5Fk ++ cat /tmp/tmp.Y207c6L8fd ++ rm /tmp/tmp.G5ZXPEb5Fk /tmp/tmp.Y207c6L8fd ++ return 0 + [[ 3 == \3 ]] +++ get_proxy_engine some-name +++ local cluster_name=some-name ++++ get_proxy some-name ++++ local target_cluster=some-name +++++ kubectl_bin get pxc some-name -o 'jsonpath={.spec.haproxy.enabled}' ++++++ mktemp +++++ local LAST_OUT=/tmp/tmp.mqlfSAiEht ++++++ mktemp +++++ local LAST_ERR=/tmp/tmp.rP22hFxVRG +++++ local exit_status=0 ++++++ seq 0 2 +++++ for i in '$(seq 0 2)' +++++ set +e +++++ kubectl get pxc some-name -o 'jsonpath={.spec.haproxy.enabled}' +++++ exit_status=0 +++++ set -e +++++ '[' 0 '!=' 0 ']' +++++ break +++++ cat /tmp/tmp.mqlfSAiEht +++++ cat /tmp/tmp.rP22hFxVRG +++++ rm /tmp/tmp.mqlfSAiEht /tmp/tmp.rP22hFxVRG +++++ return 0 ++++ [[ '' == \t\r\u\e ]] +++++ kubectl_bin get pxc some-name -o 'jsonpath={.spec.proxysql.enabled}' ++++++ mktemp +++++ local LAST_OUT=/tmp/tmp.cX8EpsXxui ++++++ mktemp +++++ local LAST_ERR=/tmp/tmp.vN1sMPS8yE +++++ local exit_status=0 ++++++ seq 0 2 +++++ for i in '$(seq 0 2)' +++++ set +e +++++ kubectl get pxc some-name -o 'jsonpath={.spec.proxysql.enabled}' +++++ exit_status=0 +++++ set -e +++++ '[' 0 '!=' 0 ']' +++++ break +++++ cat /tmp/tmp.cX8EpsXxui +++++ cat /tmp/tmp.vN1sMPS8yE +++++ rm /tmp/tmp.cX8EpsXxui /tmp/tmp.vN1sMPS8yE +++++ return 0 ++++ [[ true == \t\r\u\e ]] ++++ echo some-name-proxysql ++++ return +++ local cluster_proxy=some-name-proxysql +++ echo proxysql ++ kubectl_bin get pxc some-name -o 'jsonpath={.status.proxysql.ready}' +++ mktemp ++ local LAST_OUT=/tmp/tmp.3ht2fiE94e +++ mktemp ++ local LAST_ERR=/tmp/tmp.ewsDDyOMv5 ++ local exit_status=0 +++ seq 0 2 ++ for i in '$(seq 0 2)' ++ set +e ++ kubectl get pxc some-name -o 'jsonpath={.status.proxysql.ready}' ++ exit_status=0 ++ set -e ++ '[' 0 '!=' 0 ']' ++ break ++ cat /tmp/tmp.3ht2fiE94e ++ cat /tmp/tmp.ewsDDyOMv5 ++ rm /tmp/tmp.3ht2fiE94e /tmp/tmp.ewsDDyOMv5 ++ return 0 + [[ 2 == \2 ]] + sleep 20 + compare_mysql_cmd select-4 'SHOW TABLES;' '-h some-name-proxysql -uoperator -p'\''test-password2'\''' + local command_id=select-4 + local 'command=SHOW TABLES;' + local 'uri=-h some-name-proxysql -uoperator -p'\''test-password2'\''' + local postfix= + local expected_result=/mnt/jenkins/workspace/cloud-pxc-operator_PR-1707/e2e-tests/users/compare/select-4.sql + [[ perconalab/percona-xtradb-cluster-operator:main-pxc8.0 =~ 8\.0 ]] + '[' -f /mnt/jenkins/workspace/cloud-pxc-operator_PR-1707/e2e-tests/users/compare/select-4-80.sql ']' + expected_result=/mnt/jenkins/workspace/cloud-pxc-operator_PR-1707/e2e-tests/users/compare/select-4-80.sql + run_mysql 'SHOW TABLES;' '-h some-name-proxysql -uoperator -p'\''test-password2'\''' + local 'command=SHOW TABLES;' + local 'uri=-h some-name-proxysql -uoperator -p'\''test-password2'\''' ++ get_client_pod ++ kubectl_bin get pods --selector=name=pxc-client -o 'jsonpath={.items[].metadata.name}' +++ mktemp ++ local LAST_OUT=/tmp/tmp.dOXNsAIhWr +++ mktemp ++ local LAST_ERR=/tmp/tmp.gRDomGQ2KW ++ local exit_status=0 +++ seq 0 2 ++ for i in '$(seq 0 2)' ++ set +e ++ kubectl get pods --selector=name=pxc-client -o 'jsonpath={.items[].metadata.name}' ++ exit_status=0 ++ set -e ++ '[' 0 '!=' 0 ']' ++ break ++ cat /tmp/tmp.dOXNsAIhWr ++ cat /tmp/tmp.gRDomGQ2KW ++ rm /tmp/tmp.dOXNsAIhWr /tmp/tmp.gRDomGQ2KW ++ return 0 + client_pod=pxc-client-6644d8898f-d92km + wait_pod pxc-client-6644d8898f-d92km + local pod=pxc-client-6644d8898f-d92km + local max_retry=480 + local ns= ++ echo pxc-client-6644d8898f-d92km ++ /usr/bin/sed -E 's/.*-(pxc|proxysql)-[0-9]/\1/' ++ egrep '^(pxc|proxysql)$' + local container= + set +o xtrace pod/pxc-client-6644d8898f-d92km condition met pxc-client-6644d8898f-d92km.Ok + set +o xtrace + '[' '!' -s /tmp/tmp.Y95G4TXZdR/select-4.sql ']' + diff -u /mnt/jenkins/workspace/cloud-pxc-operator_PR-1707/e2e-tests/users/compare/select-4-80.sql /tmp/tmp.Y95G4TXZdR/select-4.sql ++ getSecretData my-cluster-secrets-2 root ++ local secretName=my-cluster-secrets-2 ++ local dataKey=root ++ kubectl_bin get secrets/my-cluster-secrets-2 '--template={{.data.root}}' ++ base64 --decode +++ mktemp ++ local LAST_OUT=/tmp/tmp.uytoCCnQ0o +++ mktemp ++ local LAST_ERR=/tmp/tmp.Nc72YW2UQl ++ local exit_status=0 +++ seq 0 2 ++ for i in '$(seq 0 2)' ++ set +e ++ kubectl get secrets/my-cluster-secrets-2 '--template={{.data.root}}' ++ exit_status=0 ++ set -e ++ '[' 0 '!=' 0 ']' ++ break ++ cat /tmp/tmp.uytoCCnQ0o ++ cat /tmp/tmp.Nc72YW2UQl ++ rm /tmp/tmp.uytoCCnQ0o /tmp/tmp.Nc72YW2UQl ++ return 0 + newpass='(&-jwG3Z5qtVd1F^!' + desc 'test new users sync' + set +o xtrace ----------------------------------------------------------------------------------- test new users sync ----------------------------------------------------------------------------------- + run_mysql 'CREATE USER '\''testsync'\''@'\''%'\'' IDENTIFIED BY '\''(&-jwG3Z5qtVd1F^!'\'';' '-h some-name-pxc -uroot -p'\''(&-jwG3Z5qtVd1F^!'\''' + local 'command=CREATE USER '\''testsync'\''@'\''%'\'' IDENTIFIED BY '\''(&-jwG3Z5qtVd1F^!'\'';' + local 'uri=-h some-name-pxc -uroot -p'\''(&-jwG3Z5qtVd1F^!'\''' ++ get_client_pod ++ kubectl_bin get pods --selector=name=pxc-client -o 'jsonpath={.items[].metadata.name}' +++ mktemp ++ local LAST_OUT=/tmp/tmp.8RwA9D7KiI +++ mktemp ++ local LAST_ERR=/tmp/tmp.qhCzv9TdPF ++ local exit_status=0 +++ seq 0 2 ++ for i in '$(seq 0 2)' ++ set +e ++ kubectl get pods --selector=name=pxc-client -o 'jsonpath={.items[].metadata.name}' ++ exit_status=0 ++ set -e ++ '[' 0 '!=' 0 ']' ++ break ++ cat /tmp/tmp.8RwA9D7KiI ++ cat /tmp/tmp.qhCzv9TdPF ++ rm /tmp/tmp.8RwA9D7KiI /tmp/tmp.qhCzv9TdPF ++ return 0 + client_pod=pxc-client-6644d8898f-d92km + wait_pod pxc-client-6644d8898f-d92km + local pod=pxc-client-6644d8898f-d92km + local max_retry=480 + local ns= ++ echo pxc-client-6644d8898f-d92km ++ /usr/bin/sed -E 's/.*-(pxc|proxysql)-[0-9]/\1/' ++ egrep '^(pxc|proxysql)$' + local container= + set +o xtrace pod/pxc-client-6644d8898f-d92km condition met pxc-client-6644d8898f-d92km.Ok + set +o xtrace + sleep 40 + compare_mysql_cmd select-4 'SHOW TABLES;' '-h some-name-proxysql -utestsync -p'\''(&-jwG3Z5qtVd1F^!'\''' + local command_id=select-4 + local 'command=SHOW TABLES;' + local 'uri=-h some-name-proxysql -utestsync -p'\''(&-jwG3Z5qtVd1F^!'\''' + local postfix= + local expected_result=/mnt/jenkins/workspace/cloud-pxc-operator_PR-1707/e2e-tests/users/compare/select-4.sql + [[ perconalab/percona-xtradb-cluster-operator:main-pxc8.0 =~ 8\.0 ]] + '[' -f /mnt/jenkins/workspace/cloud-pxc-operator_PR-1707/e2e-tests/users/compare/select-4-80.sql ']' + expected_result=/mnt/jenkins/workspace/cloud-pxc-operator_PR-1707/e2e-tests/users/compare/select-4-80.sql + run_mysql 'SHOW TABLES;' '-h some-name-proxysql -utestsync -p'\''(&-jwG3Z5qtVd1F^!'\''' + local 'command=SHOW TABLES;' + local 'uri=-h some-name-proxysql -utestsync -p'\''(&-jwG3Z5qtVd1F^!'\''' ++ get_client_pod ++ kubectl_bin get pods --selector=name=pxc-client -o 'jsonpath={.items[].metadata.name}' +++ mktemp ++ local LAST_OUT=/tmp/tmp.5CDVsQuZma +++ mktemp ++ local LAST_ERR=/tmp/tmp.rCXB94Gbkz ++ local exit_status=0 +++ seq 0 2 ++ for i in '$(seq 0 2)' ++ set +e ++ kubectl get pods --selector=name=pxc-client -o 'jsonpath={.items[].metadata.name}' ++ exit_status=0 ++ set -e ++ '[' 0 '!=' 0 ']' ++ break ++ cat /tmp/tmp.5CDVsQuZma ++ cat /tmp/tmp.rCXB94Gbkz ++ rm /tmp/tmp.5CDVsQuZma /tmp/tmp.rCXB94Gbkz ++ return 0 + client_pod=pxc-client-6644d8898f-d92km + wait_pod pxc-client-6644d8898f-d92km + local pod=pxc-client-6644d8898f-d92km + local max_retry=480 + local ns= ++ echo pxc-client-6644d8898f-d92km ++ /usr/bin/sed -E 's/.*-(pxc|proxysql)-[0-9]/\1/' ++ egrep '^(pxc|proxysql)$' + local container= + set +o xtrace pod/pxc-client-6644d8898f-d92km condition met pxc-client-6644d8898f-d92km.Ok + set +o xtrace + '[' '!' -s /tmp/tmp.Y95G4TXZdR/select-4.sql ']' + diff -u /mnt/jenkins/workspace/cloud-pxc-operator_PR-1707/e2e-tests/users/compare/select-4-80.sql /tmp/tmp.Y95G4TXZdR/select-4.sql ++ getSecretData internal-some-name operator ++ local secretName=internal-some-name ++ local dataKey=operator ++ kubectl_bin get secrets/internal-some-name '--template={{.data.operator}}' ++ base64 --decode +++ mktemp ++ local LAST_OUT=/tmp/tmp.1eUzsaWtWr +++ mktemp ++ local LAST_ERR=/tmp/tmp.SAxdPAhmpo ++ local exit_status=0 +++ seq 0 2 ++ for i in '$(seq 0 2)' ++ set +e ++ kubectl get secrets/internal-some-name '--template={{.data.operator}}' ++ exit_status=0 ++ set -e ++ '[' 0 '!=' 0 ']' ++ break ++ cat /tmp/tmp.1eUzsaWtWr ++ cat /tmp/tmp.SAxdPAhmpo ++ rm /tmp/tmp.1eUzsaWtWr /tmp/tmp.SAxdPAhmpo ++ return 0 + pass=test-password2 + desc 'check secret without operator' + set +o xtrace ----------------------------------------------------------------------------------- check secret without operator ----------------------------------------------------------------------------------- + kubectl_bin apply -f /mnt/jenkins/workspace/cloud-pxc-operator_PR-1707/e2e-tests/users/conf/secrets.yml ++ mktemp + local LAST_OUT=/tmp/tmp.JTb7FM08dY ++ mktemp + local LAST_ERR=/tmp/tmp.Q72XPuBpR6 + local exit_status=0 ++ seq 0 2 + for i in '$(seq 0 2)' + set +e + kubectl apply -f /mnt/jenkins/workspace/cloud-pxc-operator_PR-1707/e2e-tests/users/conf/secrets.yml + exit_status=0 + set -e + '[' 0 '!=' 0 ']' + break + cat /tmp/tmp.JTb7FM08dY secret/my-cluster-secrets-2 configured + cat /tmp/tmp.Q72XPuBpR6 Warning: resource secrets/my-cluster-secrets-2 is missing the kubectl.kubernetes.io/last-applied-configuration annotation which is required by kubectl apply. kubectl apply should only be used on resources created declaratively by either kubectl create --save-config or kubectl apply. The missing annotation will be patched automatically. + rm /tmp/tmp.JTb7FM08dY /tmp/tmp.Q72XPuBpR6 + return 0 + sleep 15 + compare_mysql_cmd select-4 'SHOW TABLES;' '-h some-name-proxysql -uoperator -p'\''test-password2'\''' + local command_id=select-4 + local 'command=SHOW TABLES;' + local 'uri=-h some-name-proxysql -uoperator -p'\''test-password2'\''' + local postfix= + local expected_result=/mnt/jenkins/workspace/cloud-pxc-operator_PR-1707/e2e-tests/users/compare/select-4.sql + [[ perconalab/percona-xtradb-cluster-operator:main-pxc8.0 =~ 8\.0 ]] + '[' -f /mnt/jenkins/workspace/cloud-pxc-operator_PR-1707/e2e-tests/users/compare/select-4-80.sql ']' + expected_result=/mnt/jenkins/workspace/cloud-pxc-operator_PR-1707/e2e-tests/users/compare/select-4-80.sql + run_mysql 'SHOW TABLES;' '-h some-name-proxysql -uoperator -p'\''test-password2'\''' + local 'command=SHOW TABLES;' + local 'uri=-h some-name-proxysql -uoperator -p'\''test-password2'\''' ++ get_client_pod ++ kubectl_bin get pods --selector=name=pxc-client -o 'jsonpath={.items[].metadata.name}' +++ mktemp ++ local LAST_OUT=/tmp/tmp.6x1Jk81jAm +++ mktemp ++ local LAST_ERR=/tmp/tmp.9MnlVZYYFd ++ local exit_status=0 +++ seq 0 2 ++ for i in '$(seq 0 2)' ++ set +e ++ kubectl get pods --selector=name=pxc-client -o 'jsonpath={.items[].metadata.name}' ++ exit_status=0 ++ set -e ++ '[' 0 '!=' 0 ']' ++ break ++ cat /tmp/tmp.6x1Jk81jAm ++ cat /tmp/tmp.9MnlVZYYFd ++ rm /tmp/tmp.6x1Jk81jAm /tmp/tmp.9MnlVZYYFd ++ return 0 + client_pod=pxc-client-6644d8898f-d92km + wait_pod pxc-client-6644d8898f-d92km + local pod=pxc-client-6644d8898f-d92km + local max_retry=480 + local ns= ++ echo pxc-client-6644d8898f-d92km ++ /usr/bin/sed -E 's/.*-(pxc|proxysql)-[0-9]/\1/' ++ egrep '^(pxc|proxysql)$' + local container= + set +o xtrace pod/pxc-client-6644d8898f-d92km condition met pxc-client-6644d8898f-d92km.Ok + set +o xtrace + '[' '!' -s /tmp/tmp.Y95G4TXZdR/select-4.sql ']' + diff -u /mnt/jenkins/workspace/cloud-pxc-operator_PR-1707/e2e-tests/users/compare/select-4-80.sql /tmp/tmp.Y95G4TXZdR/select-4.sql + newpass=test-password2 ++ echo -n test-password2 ++ base64 + newpassencrypted=dGVzdC1wYXNzd29yZDI= + apply_config /mnt/jenkins/workspace/cloud-pxc-operator_PR-1707/e2e-tests/users/conf/some-name.yml + '[' -z '' ']' + cat_config /mnt/jenkins/workspace/cloud-pxc-operator_PR-1707/e2e-tests/users/conf/some-name.yml + kubectl_bin apply -f - + cat /mnt/jenkins/workspace/cloud-pxc-operator_PR-1707/e2e-tests/users/conf/some-name.yml ++ mktemp + /usr/bin/sed -e 's#apiVersion: pxc.percona.com/v.*$#apiVersion: pxc.percona.com/v1#' + /usr/bin/sed -e 's#image:.*-pxc\([0-9]*.[0-9]*\)\{0,1\}$#image: perconalab/percona-xtradb-cluster-operator:main-pxc8.0#' + local LAST_OUT=/tmp/tmp.7pcRmGwgDa + /usr/bin/sed -e 's#image:.*\/percona-xtradb-cluster:.*$#image: perconalab/percona-xtradb-cluster-operator:main-pxc8.0#' + /usr/bin/sed -e 's#image:.*-proxysql$#image: perconalab/percona-xtradb-cluster-operator:main-proxysql#' + /usr/bin/sed -e 's#image:.*-haproxy$#image: perconalab/percona-xtradb-cluster-operator:main-haproxy#' + /usr/bin/sed -e 's#image:.*-pmm$#image: perconalab/pmm-client:dev-latest#' + /usr/bin/sed -e s~minio-service.#namespace~minio-service.users-26053~ ++ mktemp + /usr/bin/sed -e 's#apply:.*#apply: Never#' + /usr/bin/sed -e 's#image:.*-init$#image: perconalab/percona-xtradb-cluster-operator:PR-1707-bb49e0ac#' + /usr/bin/sed -e 's#image:.*-backup$#image: perconalab/percona-xtradb-cluster-operator:main-pxc8.0-backup#' + local LAST_ERR=/tmp/tmp.JMtJ8oX3LR + local exit_status=0 ++ seq 0 2 + /usr/bin/sed -e 's#image:.*-logcollector$#image: perconalab/percona-xtradb-cluster-operator:main-logcollector#' + for i in '$(seq 0 2)' + set +e + kubectl apply -f - + exit_status=0 + set -e + '[' 0 '!=' 0 ']' + break + cat /tmp/tmp.7pcRmGwgDa perconaxtradbcluster.pxc.percona.com/some-name configured + cat /tmp/tmp.JMtJ8oX3LR + rm /tmp/tmp.7pcRmGwgDa /tmp/tmp.JMtJ8oX3LR + return 0 + sleep 15 + wait_cluster_consistency some-name 3 3 + local cluster_name=some-name + local cluster_size=3 + local proxy_size=3 + '[' -z 3 ']' + desc 'wait cluster consistency' + set +o xtrace ----------------------------------------------------------------------------------- wait cluster consistency ----------------------------------------------------------------------------------- + local i=0 + local max=36 + sleep 7 ++ kubectl_bin get pxc some-name -o 'jsonpath={.status.state}' +++ mktemp ++ local LAST_OUT=/tmp/tmp.gyZ3EwaL2n +++ mktemp ++ local LAST_ERR=/tmp/tmp.hTFpFr0v3d ++ local exit_status=0 +++ seq 0 2 ++ for i in '$(seq 0 2)' ++ set +e ++ kubectl get pxc some-name -o 'jsonpath={.status.state}' ++ exit_status=0 ++ set -e ++ '[' 0 '!=' 0 ']' ++ break ++ cat /tmp/tmp.gyZ3EwaL2n ++ cat /tmp/tmp.hTFpFr0v3d ++ rm /tmp/tmp.gyZ3EwaL2n /tmp/tmp.hTFpFr0v3d ++ return 0 + [[ initializing == \r\e\a\d\y ]] + echo 'waiting for cluster readyness' waiting for cluster readyness + sleep 20 + [[ 0 -ge 36 ]] + let i+=1 ++ kubectl_bin get pxc some-name -o 'jsonpath={.status.state}' +++ mktemp ++ local LAST_OUT=/tmp/tmp.3Y7L9nlNNB +++ mktemp ++ local LAST_ERR=/tmp/tmp.p3X3KlNYjq ++ local exit_status=0 +++ seq 0 2 ++ for i in '$(seq 0 2)' ++ set +e ++ kubectl get pxc some-name -o 'jsonpath={.status.state}' ++ exit_status=0 ++ set -e ++ '[' 0 '!=' 0 ']' ++ break ++ cat /tmp/tmp.3Y7L9nlNNB ++ cat /tmp/tmp.p3X3KlNYjq ++ rm /tmp/tmp.3Y7L9nlNNB /tmp/tmp.p3X3KlNYjq ++ return 0 + [[ initializing == \r\e\a\d\y ]] + echo 'waiting for cluster readyness' waiting for cluster readyness + sleep 20 + [[ 1 -ge 36 ]] + let i+=1 ++ kubectl_bin get pxc some-name -o 'jsonpath={.status.state}' +++ mktemp ++ local LAST_OUT=/tmp/tmp.usrbqIpxdQ +++ mktemp ++ local LAST_ERR=/tmp/tmp.tlnSv7e54x ++ local exit_status=0 +++ seq 0 2 ++ for i in '$(seq 0 2)' ++ set +e ++ kubectl get pxc some-name -o 'jsonpath={.status.state}' ++ exit_status=0 ++ set -e ++ '[' 0 '!=' 0 ']' ++ break ++ cat /tmp/tmp.usrbqIpxdQ ++ cat /tmp/tmp.tlnSv7e54x ++ rm /tmp/tmp.usrbqIpxdQ /tmp/tmp.tlnSv7e54x ++ return 0 + [[ initializing == \r\e\a\d\y ]] + echo 'waiting for cluster readyness' waiting for cluster readyness + sleep 20 + [[ 2 -ge 36 ]] + let i+=1 ++ kubectl_bin get pxc some-name -o 'jsonpath={.status.state}' +++ mktemp ++ local LAST_OUT=/tmp/tmp.yRYXp0qha0 +++ mktemp ++ local LAST_ERR=/tmp/tmp.KKjXzGU8KR ++ local exit_status=0 +++ seq 0 2 ++ for i in '$(seq 0 2)' ++ set +e ++ kubectl get pxc some-name -o 'jsonpath={.status.state}' ++ exit_status=0 ++ set -e ++ '[' 0 '!=' 0 ']' ++ break ++ cat /tmp/tmp.yRYXp0qha0 ++ cat /tmp/tmp.KKjXzGU8KR ++ rm /tmp/tmp.yRYXp0qha0 /tmp/tmp.KKjXzGU8KR ++ return 0 + [[ initializing == \r\e\a\d\y ]] + echo 'waiting for cluster readyness' waiting for cluster readyness + sleep 20 + [[ 3 -ge 36 ]] + let i+=1 ++ kubectl_bin get pxc some-name -o 'jsonpath={.status.state}' +++ mktemp ++ local LAST_OUT=/tmp/tmp.MNNecfskKX +++ mktemp ++ local LAST_ERR=/tmp/tmp.YLBg9WJlz8 ++ local exit_status=0 +++ seq 0 2 ++ for i in '$(seq 0 2)' ++ set +e ++ kubectl get pxc some-name -o 'jsonpath={.status.state}' ++ exit_status=0 ++ set -e ++ '[' 0 '!=' 0 ']' ++ break ++ cat /tmp/tmp.MNNecfskKX ++ cat /tmp/tmp.YLBg9WJlz8 ++ rm /tmp/tmp.MNNecfskKX /tmp/tmp.YLBg9WJlz8 ++ return 0 + [[ initializing == \r\e\a\d\y ]] + echo 'waiting for cluster readyness' waiting for cluster readyness + sleep 20 + [[ 4 -ge 36 ]] + let i+=1 ++ kubectl_bin get pxc some-name -o 'jsonpath={.status.state}' +++ mktemp ++ local LAST_OUT=/tmp/tmp.GSDxmNHZBX +++ mktemp ++ local LAST_ERR=/tmp/tmp.myHSiLlmEu ++ local exit_status=0 +++ seq 0 2 ++ for i in '$(seq 0 2)' ++ set +e ++ kubectl get pxc some-name -o 'jsonpath={.status.state}' ++ exit_status=0 ++ set -e ++ '[' 0 '!=' 0 ']' ++ break ++ cat /tmp/tmp.GSDxmNHZBX ++ cat /tmp/tmp.myHSiLlmEu ++ rm /tmp/tmp.GSDxmNHZBX /tmp/tmp.myHSiLlmEu ++ return 0 + [[ initializing == \r\e\a\d\y ]] + echo 'waiting for cluster readyness' waiting for cluster readyness + sleep 20 + [[ 5 -ge 36 ]] + let i+=1 ++ kubectl_bin get pxc some-name -o 'jsonpath={.status.state}' +++ mktemp ++ local LAST_OUT=/tmp/tmp.w3yMZHaZUI +++ mktemp ++ local LAST_ERR=/tmp/tmp.js7KIta0k5 ++ local exit_status=0 +++ seq 0 2 ++ for i in '$(seq 0 2)' ++ set +e ++ kubectl get pxc some-name -o 'jsonpath={.status.state}' ++ exit_status=0 ++ set -e ++ '[' 0 '!=' 0 ']' ++ break ++ cat /tmp/tmp.w3yMZHaZUI ++ cat /tmp/tmp.js7KIta0k5 ++ rm /tmp/tmp.w3yMZHaZUI /tmp/tmp.js7KIta0k5 ++ return 0 + [[ initializing == \r\e\a\d\y ]] + echo 'waiting for cluster readyness' waiting for cluster readyness + sleep 20 + [[ 6 -ge 36 ]] + let i+=1 ++ kubectl_bin get pxc some-name -o 'jsonpath={.status.state}' +++ mktemp ++ local LAST_OUT=/tmp/tmp.lOgyjTHxUj +++ mktemp ++ local LAST_ERR=/tmp/tmp.LtYAKhGWlu ++ local exit_status=0 +++ seq 0 2 ++ for i in '$(seq 0 2)' ++ set +e ++ kubectl get pxc some-name -o 'jsonpath={.status.state}' ++ exit_status=0 ++ set -e ++ '[' 0 '!=' 0 ']' ++ break ++ cat /tmp/tmp.lOgyjTHxUj ++ cat /tmp/tmp.LtYAKhGWlu ++ rm /tmp/tmp.lOgyjTHxUj /tmp/tmp.LtYAKhGWlu ++ return 0 + [[ initializing == \r\e\a\d\y ]] + echo 'waiting for cluster readyness' waiting for cluster readyness + sleep 20 + [[ 7 -ge 36 ]] + let i+=1 ++ kubectl_bin get pxc some-name -o 'jsonpath={.status.state}' +++ mktemp ++ local LAST_OUT=/tmp/tmp.uMC5nlQuEQ +++ mktemp ++ local LAST_ERR=/tmp/tmp.gt60xcrkIY ++ local exit_status=0 +++ seq 0 2 ++ for i in '$(seq 0 2)' ++ set +e ++ kubectl get pxc some-name -o 'jsonpath={.status.state}' ++ exit_status=0 ++ set -e ++ '[' 0 '!=' 0 ']' ++ break ++ cat /tmp/tmp.uMC5nlQuEQ ++ cat /tmp/tmp.gt60xcrkIY ++ rm /tmp/tmp.uMC5nlQuEQ /tmp/tmp.gt60xcrkIY ++ return 0 + [[ initializing == \r\e\a\d\y ]] + echo 'waiting for cluster readyness' waiting for cluster readyness + sleep 20 + [[ 8 -ge 36 ]] + let i+=1 ++ kubectl_bin get pxc some-name -o 'jsonpath={.status.state}' +++ mktemp ++ local LAST_OUT=/tmp/tmp.6TSEsxr05E +++ mktemp ++ local LAST_ERR=/tmp/tmp.9OXLuDI7tF ++ local exit_status=0 +++ seq 0 2 ++ for i in '$(seq 0 2)' ++ set +e ++ kubectl get pxc some-name -o 'jsonpath={.status.state}' ++ exit_status=0 ++ set -e ++ '[' 0 '!=' 0 ']' ++ break ++ cat /tmp/tmp.6TSEsxr05E ++ cat /tmp/tmp.9OXLuDI7tF ++ rm /tmp/tmp.6TSEsxr05E /tmp/tmp.9OXLuDI7tF ++ return 0 + [[ initializing == \r\e\a\d\y ]] + echo 'waiting for cluster readyness' waiting for cluster readyness + sleep 20 + [[ 9 -ge 36 ]] + let i+=1 ++ kubectl_bin get pxc some-name -o 'jsonpath={.status.state}' +++ mktemp ++ local LAST_OUT=/tmp/tmp.WOrEdG7aks +++ mktemp ++ local LAST_ERR=/tmp/tmp.TvUyWa6XxS ++ local exit_status=0 +++ seq 0 2 ++ for i in '$(seq 0 2)' ++ set +e ++ kubectl get pxc some-name -o 'jsonpath={.status.state}' ++ exit_status=0 ++ set -e ++ '[' 0 '!=' 0 ']' ++ break ++ cat /tmp/tmp.WOrEdG7aks ++ cat /tmp/tmp.TvUyWa6XxS ++ rm /tmp/tmp.WOrEdG7aks /tmp/tmp.TvUyWa6XxS ++ return 0 + [[ initializing == \r\e\a\d\y ]] + echo 'waiting for cluster readyness' waiting for cluster readyness + sleep 20 + [[ 10 -ge 36 ]] + let i+=1 ++ kubectl_bin get pxc some-name -o 'jsonpath={.status.state}' +++ mktemp ++ local LAST_OUT=/tmp/tmp.imx7GaT2Ok +++ mktemp ++ local LAST_ERR=/tmp/tmp.PnjzMna97l ++ local exit_status=0 +++ seq 0 2 ++ for i in '$(seq 0 2)' ++ set +e ++ kubectl get pxc some-name -o 'jsonpath={.status.state}' ++ exit_status=0 ++ set -e ++ '[' 0 '!=' 0 ']' ++ break ++ cat /tmp/tmp.imx7GaT2Ok ++ cat /tmp/tmp.PnjzMna97l ++ rm /tmp/tmp.imx7GaT2Ok /tmp/tmp.PnjzMna97l ++ return 0 + [[ initializing == \r\e\a\d\y ]] + echo 'waiting for cluster readyness' waiting for cluster readyness + sleep 20 + [[ 11 -ge 36 ]] + let i+=1 ++ kubectl_bin get pxc some-name -o 'jsonpath={.status.state}' +++ mktemp ++ local LAST_OUT=/tmp/tmp.Hd61oIaYDu +++ mktemp ++ local LAST_ERR=/tmp/tmp.loReg4vspY ++ local exit_status=0 +++ seq 0 2 ++ for i in '$(seq 0 2)' ++ set +e ++ kubectl get pxc some-name -o 'jsonpath={.status.state}' ++ exit_status=0 ++ set -e ++ '[' 0 '!=' 0 ']' ++ break ++ cat /tmp/tmp.Hd61oIaYDu ++ cat /tmp/tmp.loReg4vspY ++ rm /tmp/tmp.Hd61oIaYDu /tmp/tmp.loReg4vspY ++ return 0 + [[ initializing == \r\e\a\d\y ]] + echo 'waiting for cluster readyness' waiting for cluster readyness + sleep 20 + [[ 12 -ge 36 ]] + let i+=1 ++ kubectl_bin get pxc some-name -o 'jsonpath={.status.state}' +++ mktemp ++ local LAST_OUT=/tmp/tmp.k28EaAiu1G +++ mktemp ++ local LAST_ERR=/tmp/tmp.YsiJXgUxyg ++ local exit_status=0 +++ seq 0 2 ++ for i in '$(seq 0 2)' ++ set +e ++ kubectl get pxc some-name -o 'jsonpath={.status.state}' ++ exit_status=0 ++ set -e ++ '[' 0 '!=' 0 ']' ++ break ++ cat /tmp/tmp.k28EaAiu1G ++ cat /tmp/tmp.YsiJXgUxyg ++ rm /tmp/tmp.k28EaAiu1G /tmp/tmp.YsiJXgUxyg ++ return 0 + [[ ready == \r\e\a\d\y ]] ++ kubectl_bin get pxc some-name -o 'jsonpath={.status.pxc.ready}' +++ mktemp ++ local LAST_OUT=/tmp/tmp.yDvRXfilk5 +++ mktemp ++ local LAST_ERR=/tmp/tmp.SUE0E1NLYa ++ local exit_status=0 +++ seq 0 2 ++ for i in '$(seq 0 2)' ++ set +e ++ kubectl get pxc some-name -o 'jsonpath={.status.pxc.ready}' ++ exit_status=0 ++ set -e ++ '[' 0 '!=' 0 ']' ++ break ++ cat /tmp/tmp.yDvRXfilk5 ++ cat /tmp/tmp.SUE0E1NLYa ++ rm /tmp/tmp.yDvRXfilk5 /tmp/tmp.SUE0E1NLYa ++ return 0 + [[ 3 == \3 ]] +++ get_proxy_engine some-name +++ local cluster_name=some-name ++++ get_proxy some-name ++++ local target_cluster=some-name +++++ kubectl_bin get pxc some-name -o 'jsonpath={.spec.haproxy.enabled}' ++++++ mktemp +++++ local LAST_OUT=/tmp/tmp.Z7gXc5IBzN ++++++ mktemp +++++ local LAST_ERR=/tmp/tmp.RDH0Rf5pSq +++++ local exit_status=0 ++++++ seq 0 2 +++++ for i in '$(seq 0 2)' +++++ set +e +++++ kubectl get pxc some-name -o 'jsonpath={.spec.haproxy.enabled}' +++++ exit_status=0 +++++ set -e +++++ '[' 0 '!=' 0 ']' +++++ break +++++ cat /tmp/tmp.Z7gXc5IBzN +++++ cat /tmp/tmp.RDH0Rf5pSq +++++ rm /tmp/tmp.Z7gXc5IBzN /tmp/tmp.RDH0Rf5pSq +++++ return 0 ++++ [[ true == \t\r\u\e ]] ++++ echo some-name-haproxy ++++ return +++ local cluster_proxy=some-name-haproxy +++ echo haproxy ++ kubectl_bin get pxc some-name -o 'jsonpath={.status.haproxy.ready}' +++ mktemp ++ local LAST_OUT=/tmp/tmp.Dnns8XOOq2 +++ mktemp ++ local LAST_ERR=/tmp/tmp.ixpRSxFJDA ++ local exit_status=0 +++ seq 0 2 ++ for i in '$(seq 0 2)' ++ set +e ++ kubectl get pxc some-name -o 'jsonpath={.status.haproxy.ready}' ++ exit_status=0 ++ set -e ++ '[' 0 '!=' 0 ']' ++ break ++ cat /tmp/tmp.Dnns8XOOq2 ++ cat /tmp/tmp.ixpRSxFJDA ++ rm /tmp/tmp.Dnns8XOOq2 /tmp/tmp.ixpRSxFJDA ++ return 0 + [[ 3 == \3 ]] + patch_secret my-cluster-secrets monitor dGVzdC1wYXNzd29yZDI= + local secret=my-cluster-secrets + local key=monitor + local value=dGVzdC1wYXNzd29yZDI= + kubectl_bin patch secret my-cluster-secrets '-p={"data":{"monitor": "dGVzdC1wYXNzd29yZDI="}}' ++ mktemp + local LAST_OUT=/tmp/tmp.CnOKpw8EDR ++ mktemp + local LAST_ERR=/tmp/tmp.AQTOGWUGJu + local exit_status=0 ++ seq 0 2 + for i in '$(seq 0 2)' + set +e + kubectl patch secret my-cluster-secrets '-p={"data":{"monitor": "dGVzdC1wYXNzd29yZDI="}}' + exit_status=0 + set -e + '[' 0 '!=' 0 ']' + break + cat /tmp/tmp.CnOKpw8EDR secret/my-cluster-secrets patched + cat /tmp/tmp.AQTOGWUGJu + rm /tmp/tmp.CnOKpw8EDR /tmp/tmp.AQTOGWUGJu + return 0 + sleep 15 + wait_cluster_consistency some-name 3 3 + local cluster_name=some-name + local cluster_size=3 + local proxy_size=3 + '[' -z 3 ']' + desc 'wait cluster consistency' + set +o xtrace ----------------------------------------------------------------------------------- wait cluster consistency ----------------------------------------------------------------------------------- + local i=0 + local max=36 + sleep 7 ++ kubectl_bin get pxc some-name -o 'jsonpath={.status.state}' +++ mktemp ++ local LAST_OUT=/tmp/tmp.ISjmqmtd2n +++ mktemp ++ local LAST_ERR=/tmp/tmp.36DQ0SwZUy ++ local exit_status=0 +++ seq 0 2 ++ for i in '$(seq 0 2)' ++ set +e ++ kubectl get pxc some-name -o 'jsonpath={.status.state}' ++ exit_status=0 ++ set -e ++ '[' 0 '!=' 0 ']' ++ break ++ cat /tmp/tmp.ISjmqmtd2n ++ cat /tmp/tmp.36DQ0SwZUy ++ rm /tmp/tmp.ISjmqmtd2n /tmp/tmp.36DQ0SwZUy ++ return 0 + [[ initializing == \r\e\a\d\y ]] + echo 'waiting for cluster readyness' waiting for cluster readyness + sleep 20 + [[ 0 -ge 36 ]] + let i+=1 ++ kubectl_bin get pxc some-name -o 'jsonpath={.status.state}' +++ mktemp ++ local LAST_OUT=/tmp/tmp.2sC2QQ5U37 +++ mktemp ++ local LAST_ERR=/tmp/tmp.JBnRT3m8nd ++ local exit_status=0 +++ seq 0 2 ++ for i in '$(seq 0 2)' ++ set +e ++ kubectl get pxc some-name -o 'jsonpath={.status.state}' ++ exit_status=0 ++ set -e ++ '[' 0 '!=' 0 ']' ++ break ++ cat /tmp/tmp.2sC2QQ5U37 ++ cat /tmp/tmp.JBnRT3m8nd ++ rm /tmp/tmp.2sC2QQ5U37 /tmp/tmp.JBnRT3m8nd ++ return 0 + [[ initializing == \r\e\a\d\y ]] + echo 'waiting for cluster readyness' waiting for cluster readyness + sleep 20 + [[ 1 -ge 36 ]] + let i+=1 ++ kubectl_bin get pxc some-name -o 'jsonpath={.status.state}' +++ mktemp ++ local LAST_OUT=/tmp/tmp.hLCloAufUq +++ mktemp ++ local LAST_ERR=/tmp/tmp.3lpjxzRESg ++ local exit_status=0 +++ seq 0 2 ++ for i in '$(seq 0 2)' ++ set +e ++ kubectl get pxc some-name -o 'jsonpath={.status.state}' ++ exit_status=0 ++ set -e ++ '[' 0 '!=' 0 ']' ++ break ++ cat /tmp/tmp.hLCloAufUq ++ cat /tmp/tmp.3lpjxzRESg ++ rm /tmp/tmp.hLCloAufUq /tmp/tmp.3lpjxzRESg ++ return 0 + [[ initializing == \r\e\a\d\y ]] + echo 'waiting for cluster readyness' waiting for cluster readyness + sleep 20 + [[ 2 -ge 36 ]] + let i+=1 ++ kubectl_bin get pxc some-name -o 'jsonpath={.status.state}' +++ mktemp ++ local LAST_OUT=/tmp/tmp.W4HpWdGGCa +++ mktemp ++ local LAST_ERR=/tmp/tmp.pcv5iEF02b ++ local exit_status=0 +++ seq 0 2 ++ for i in '$(seq 0 2)' ++ set +e ++ kubectl get pxc some-name -o 'jsonpath={.status.state}' ++ exit_status=0 ++ set -e ++ '[' 0 '!=' 0 ']' ++ break ++ cat /tmp/tmp.W4HpWdGGCa ++ cat /tmp/tmp.pcv5iEF02b ++ rm /tmp/tmp.W4HpWdGGCa /tmp/tmp.pcv5iEF02b ++ return 0 + [[ ready == \r\e\a\d\y ]] ++ kubectl_bin get pxc some-name -o 'jsonpath={.status.pxc.ready}' +++ mktemp ++ local LAST_OUT=/tmp/tmp.6wLYZJ60fU +++ mktemp ++ local LAST_ERR=/tmp/tmp.EwXWCTXzaB ++ local exit_status=0 +++ seq 0 2 ++ for i in '$(seq 0 2)' ++ set +e ++ kubectl get pxc some-name -o 'jsonpath={.status.pxc.ready}' ++ exit_status=0 ++ set -e ++ '[' 0 '!=' 0 ']' ++ break ++ cat /tmp/tmp.6wLYZJ60fU ++ cat /tmp/tmp.EwXWCTXzaB ++ rm /tmp/tmp.6wLYZJ60fU /tmp/tmp.EwXWCTXzaB ++ return 0 + [[ 3 == \3 ]] +++ get_proxy_engine some-name +++ local cluster_name=some-name ++++ get_proxy some-name ++++ local target_cluster=some-name +++++ kubectl_bin get pxc some-name -o 'jsonpath={.spec.haproxy.enabled}' ++++++ mktemp +++++ local LAST_OUT=/tmp/tmp.o6lN0xLIsr ++++++ mktemp +++++ local LAST_ERR=/tmp/tmp.u5FVDbonOJ +++++ local exit_status=0 ++++++ seq 0 2 +++++ for i in '$(seq 0 2)' +++++ set +e +++++ kubectl get pxc some-name -o 'jsonpath={.spec.haproxy.enabled}' +++++ exit_status=0 +++++ set -e +++++ '[' 0 '!=' 0 ']' +++++ break +++++ cat /tmp/tmp.o6lN0xLIsr +++++ cat /tmp/tmp.u5FVDbonOJ +++++ rm /tmp/tmp.o6lN0xLIsr /tmp/tmp.u5FVDbonOJ +++++ return 0 ++++ [[ true == \t\r\u\e ]] ++++ echo some-name-haproxy ++++ return +++ local cluster_proxy=some-name-haproxy +++ echo haproxy ++ kubectl_bin get pxc some-name -o 'jsonpath={.status.haproxy.ready}' +++ mktemp ++ local LAST_OUT=/tmp/tmp.VbFdNRy2gj +++ mktemp ++ local LAST_ERR=/tmp/tmp.sat2oZquFd ++ local exit_status=0 +++ seq 0 2 ++ for i in '$(seq 0 2)' ++ set +e ++ kubectl get pxc some-name -o 'jsonpath={.status.haproxy.ready}' ++ exit_status=0 ++ set -e ++ '[' 0 '!=' 0 ']' ++ break ++ cat /tmp/tmp.VbFdNRy2gj ++ cat /tmp/tmp.sat2oZquFd ++ rm /tmp/tmp.VbFdNRy2gj /tmp/tmp.sat2oZquFd ++ return 0 + [[ 3 == \3 ]] + compare_mysql_cmd select-3 'SHOW DATABASES;' '-h some-name-haproxy -umonitor -p'\''test-password2'\''' + local command_id=select-3 + local 'command=SHOW DATABASES;' + local 'uri=-h some-name-haproxy -umonitor -p'\''test-password2'\''' + local postfix= + local expected_result=/mnt/jenkins/workspace/cloud-pxc-operator_PR-1707/e2e-tests/users/compare/select-3.sql + [[ perconalab/percona-xtradb-cluster-operator:main-pxc8.0 =~ 8\.0 ]] + '[' -f /mnt/jenkins/workspace/cloud-pxc-operator_PR-1707/e2e-tests/users/compare/select-3-80.sql ']' + run_mysql 'SHOW DATABASES;' '-h some-name-haproxy -umonitor -p'\''test-password2'\''' + local 'command=SHOW DATABASES;' + local 'uri=-h some-name-haproxy -umonitor -p'\''test-password2'\''' ++ get_client_pod ++ kubectl_bin get pods --selector=name=pxc-client -o 'jsonpath={.items[].metadata.name}' +++ mktemp ++ local LAST_OUT=/tmp/tmp.cs2FegLUc6 +++ mktemp ++ local LAST_ERR=/tmp/tmp.2Qv1LOCxIx ++ local exit_status=0 +++ seq 0 2 ++ for i in '$(seq 0 2)' ++ set +e ++ kubectl get pods --selector=name=pxc-client -o 'jsonpath={.items[].metadata.name}' ++ exit_status=0 ++ set -e ++ '[' 0 '!=' 0 ']' ++ break ++ cat /tmp/tmp.cs2FegLUc6 ++ cat /tmp/tmp.2Qv1LOCxIx ++ rm /tmp/tmp.cs2FegLUc6 /tmp/tmp.2Qv1LOCxIx ++ return 0 + client_pod=pxc-client-6644d8898f-d92km + wait_pod pxc-client-6644d8898f-d92km + local pod=pxc-client-6644d8898f-d92km + local max_retry=480 + local ns= ++ echo pxc-client-6644d8898f-d92km ++ /usr/bin/sed -E 's/.*-(pxc|proxysql)-[0-9]/\1/' ++ egrep '^(pxc|proxysql)$' + local container= + set +o xtrace pod/pxc-client-6644d8898f-d92km condition met pxc-client-6644d8898f-d92km.Ok + set +o xtrace + '[' '!' -s /tmp/tmp.Y95G4TXZdR/select-3.sql ']' + diff -u /mnt/jenkins/workspace/cloud-pxc-operator_PR-1707/e2e-tests/users/compare/select-3.sql /tmp/tmp.Y95G4TXZdR/select-3.sql + destroy users-26053 + local namespace=users-26053 + local ignore_logs=true + desc 'destroy cluster/operator and all other resources' + set +o xtrace ----------------------------------------------------------------------------------- destroy cluster/operator and all other resources ----------------------------------------------------------------------------------- + '[' true == false -o 1 == 1 ']' + grep -v 'the object has been modified' + grep -v level=info ++ get_operator_pod ++ local label_prefix=app.kubernetes.io/ + /usr/bin/sed -r 's/"ts":[0-9.]+//; s^limits-[0-9.]+/^^g' + sort -u + grep -v 'get backup status: Job.batch' + tee /tmp/tmp.Y95G4TXZdR/operator.log +++ kubectl get pods --selector=app.kubernetes.io/name=percona-xtradb-cluster-operator -n pxc-operator +++ grep -c percona-xtradb-cluster-operator ++ local check_label=1 ++ [[ 1 -eq 0 ]] ++ kubectl_bin get pods --selector=app.kubernetes.io/name=percona-xtradb-cluster-operator -o 'jsonpath={.items[].metadata.name}' -n pxc-operator +++ mktemp ++ local LAST_OUT=/tmp/tmp.hewpxiemwI +++ mktemp ++ local LAST_ERR=/tmp/tmp.n3VcgnFT4y ++ local exit_status=0 +++ seq 0 2 ++ for i in '$(seq 0 2)' ++ set +e ++ kubectl get pods --selector=app.kubernetes.io/name=percona-xtradb-cluster-operator -o 'jsonpath={.items[].metadata.name}' -n pxc-operator ++ exit_status=0 ++ set -e ++ '[' 0 '!=' 0 ']' ++ break ++ cat /tmp/tmp.hewpxiemwI ++ cat /tmp/tmp.n3VcgnFT4y ++ rm /tmp/tmp.hewpxiemwI /tmp/tmp.n3VcgnFT4y ++ return 0 + kubectl_bin logs -n pxc-operator percona-xtradb-cluster-operator-bdf674667-kmvgs ++ mktemp + local LAST_OUT=/tmp/tmp.sbFVYIUcv7 ++ mktemp + local LAST_ERR=/tmp/tmp.qLOGdmwVnz + local exit_status=0 ++ seq 0 2 + for i in '$(seq 0 2)' + set +e + kubectl logs -n pxc-operator percona-xtradb-cluster-operator-bdf674667-kmvgs + exit_status=0 + set -e + '[' 0 '!=' 0 ']' + break + cat /tmp/tmp.sbFVYIUcv7 + cat /tmp/tmp.qLOGdmwVnz + rm /tmp/tmp.sbFVYIUcv7 /tmp/tmp.qLOGdmwVnz + return 0 2024-05-14T18:06:28.826Z INFO setup Manager starting up {"gitCommit": "bb49e0ace4c0c85d9e6eb6fc7243c481ddec0d0a", "gitBranch": "PR-1707-bb49e0ac", "buildTime": "2024-05-14T16:17:41Z", "goVersion": "go1.22.3", "os": "linux", "arch": "amd64"} 2024-05-14T18:06:28.826Z INFO setup Runs on {"platform": "kubernetes", "version": "v1.26.15-gke.1300000"} 2024-05-14T18:06:28.827Z INFO setup Registering Components. 2024-05-14T18:06:35.023Z INFO controller-runtime.webhook Registering webhook {"path": "/validate-percona-xtradbcluster"} 2024-05-14T18:06:35.027Z INFO setup Starting the Cmd. 2024-05-14T18:06:35.028Z INFO controller-runtime.certwatcher Starting certificate watcher 2024-05-14T18:06:35.028Z INFO controller-runtime.certwatcher Updated current TLS certificate 2024-05-14T18:06:35.028Z INFO controller-runtime.metrics Serving metrics server {"bindAddress": ":8080", "secure": false} 2024-05-14T18:06:35.028Z INFO controller-runtime.metrics Starting metrics server 2024-05-14T18:06:35.028Z INFO controller-runtime.webhook Serving webhook server {"host": "", "port": 9443} 2024-05-14T18:06:35.028Z INFO controller-runtime.webhook Starting webhook server 2024-05-14T18:06:35.028Z INFO starting server {"name": "health probe", "addr": "[::]:8081"} 2024-05-14T18:06:35.129Z INFO attempting to acquire leader lease pxc-operator/08db1feb.percona.com... 2024-05-14T18:06:35.143Z DEBUG events percona-xtradb-cluster-operator-bdf674667-kmvgs_d9851237-60f6-4234-bd76-6e339d0d3101 became leader {"type": "Normal", "object": {"kind":"Lease","namespace":"pxc-operator","name":"08db1feb.percona.com","uid":"752d8d5b-e831-472e-9113-6bcbb8407dbd","apiVersion":"coordination.k8s.io/v1","resourceVersion":"58164"}, "reason": "LeaderElection"} 2024-05-14T18:06:35.143Z INFO Starting Controller {"controller": "pxc-controller"} 2024-05-14T18:06:35.143Z INFO Starting EventSource {"controller": "pxc-controller", "source": "kind source: unknown type"} 2024-05-14T18:06:35.143Z INFO successfully acquired lease pxc-operator/08db1feb.percona.com 2024-05-14T18:06:35.144Z INFO Starting Controller {"controller": "pxcbackup-controller"} 2024-05-14T18:06:35.144Z INFO Starting Controller {"controller": "pxcrestore-controller"} 2024-05-14T18:06:35.144Z INFO Starting EventSource {"controller": "pxcbackup-controller", "source": "kind source: unknown type"} 2024-05-14T18:06:35.144Z INFO Starting EventSource {"controller": "pxcrestore-controller", "source": "kind source: unknown type"} 2024-05-14T18:06:35.249Z INFO Starting workers {"controller": "pxcbackup-controller", "worker count": 1} 2024-05-14T18:06:35.249Z INFO Starting workers {"controller": "pxc-controller", "worker count": 1} 2024-05-14T18:06:35.249Z INFO Starting workers {"controller": "pxcrestore-controller", "worker count": 1} 2024-05-14T18:07:06.495Z INFO Set CR version {"controller": "pxc-controller", "namespace": "users-26053", "name": "some-name", "reconcileID": "901c757c-fd26-46af-bd1f-db49de91ebbb", "version": "1.15.0"} 2024-05-14T18:08:26.533Z INFO Password expiration policy updated {"controller": "pxc-controller", "namespace": "users-26053", "name": "some-name", "reconcileID": "5004680d-56c7-48d0-a16a-f574155f7c81", "user": "operator"} 2024-05-14T18:08:26.578Z INFO Password expiration policy updated {"controller": "pxc-controller", "namespace": "users-26053", "name": "some-name", "reconcileID": "5004680d-56c7-48d0-a16a-f574155f7c81", "user": "monitor"} 2024-05-14T18:08:26.698Z INFO User monitor: granted privileges {"controller": "pxc-controller", "namespace": "users-26053", "name": "some-name", "reconcileID": "5004680d-56c7-48d0-a16a-f574155f7c81"} 2024-05-14T18:08:26.743Z INFO monitor user privileges granted {"controller": "pxc-controller", "namespace": "users-26053", "name": "some-name", "reconcileID": "5004680d-56c7-48d0-a16a-f574155f7c81"} 2024-05-14T18:08:26.784Z INFO Password expiration policy updated {"controller": "pxc-controller", "namespace": "users-26053", "name": "some-name", "reconcileID": "5004680d-56c7-48d0-a16a-f574155f7c81", "user": "xtrabackup"} 2024-05-14T18:08:26.852Z INFO User xtrabackup: granted privileges {"controller": "pxc-controller", "namespace": "users-26053", "name": "some-name", "reconcileID": "5004680d-56c7-48d0-a16a-f574155f7c81"} 2024-05-14T18:08:26.894Z INFO Password expiration policy updated {"controller": "pxc-controller", "namespace": "users-26053", "name": "some-name", "reconcileID": "5004680d-56c7-48d0-a16a-f574155f7c81", "user": "replication"} 2024-05-14T18:08:27.034Z INFO reconcile replication error {"controller": "pxc-controller", "namespace": "users-26053", "name": "some-name", "reconcileID": "5004680d-56c7-48d0-a16a-f574155f7c81", "err": "get primary pxc pod: not found"} 2024-05-14T18:08:31.580Z INFO reconcile replication error {"controller": "pxc-controller", "namespace": "users-26053", "name": "some-name", "reconcileID": "5cd2411f-1a62-4de9-9423-b76adab03b1c", "err": "get primary pxc pod: not found"} 2024-05-14T18:08:36.874Z INFO reconcile replication error {"controller": "pxc-controller", "namespace": "users-26053", "name": "some-name", "reconcileID": "99575e50-bebf-473b-af8c-193ff14e6589", "err": "get primary pxc pod: not found"} 2024-05-14T18:08:42.088Z INFO reconcile replication error {"controller": "pxc-controller", "namespace": "users-26053", "name": "some-name", "reconcileID": "23505200-511b-43ac-9c88-813afa1f6fbb", "err": "get primary pxc pod: not found"} 2024-05-14T18:10:49.905Z INFO Password expiration policy updated {"controller": "pxc-controller", "namespace": "users-26053", "name": "some-name", "reconcileID": "81e6fc6d-eeed-4fab-96bf-b9ef458927b3", "user": "root"} 2024-05-14T18:10:50.220Z INFO update PXC version (fetched from db) {"controller": "pxc-controller", "namespace": "users-26053", "name": "some-name", "reconcileID": "81e6fc6d-eeed-4fab-96bf-b9ef458927b3", "new version": "8.0.36-28.1"} 2024-05-14T18:10:53.400Z DEBUG PXC users synced with ProxySQL {"controller": "pxc-controller", "namespace": "users-26053", "name": "some-name", "reconcileID": "81e6fc6d-eeed-4fab-96bf-b9ef458927b3"} 2024-05-14T18:10:58.480Z DEBUG PXC users synced with ProxySQL {"controller": "pxc-controller", "namespace": "users-26053", "name": "some-name", "reconcileID": "f9f3870b-5214-4d00-8f2d-0046553696b4"} 2024-05-14T18:11:03.786Z DEBUG PXC users synced with ProxySQL {"controller": "pxc-controller", "namespace": "users-26053", "name": "some-name", "reconcileID": "8146e9bb-e836-4e53-bac9-41b71dcc24b7"} 2024-05-14T18:11:09.216Z DEBUG PXC users synced with ProxySQL {"controller": "pxc-controller", "namespace": "users-26053", "name": "some-name", "reconcileID": "c15025ee-e434-4f80-96c4-14c38a9f86c9"} 2024-05-14T18:11:14.616Z DEBUG PXC users synced with ProxySQL {"controller": "pxc-controller", "namespace": "users-26053", "name": "some-name", "reconcileID": "b2ad294e-8cf0-454d-9565-189cf0b75b83"} 2024-05-14T18:11:20.073Z DEBUG PXC users synced with ProxySQL {"controller": "pxc-controller", "namespace": "users-26053", "name": "some-name", "reconcileID": "766cc761-6eaa-4417-952f-bc6d599959d9"} 2024-05-14T18:11:25.415Z DEBUG PXC users synced with ProxySQL {"controller": "pxc-controller", "namespace": "users-26053", "name": "some-name", "reconcileID": "13815f19-2a40-441d-82a4-2fc604cd4730"} 2024-05-14T18:11:31.012Z DEBUG PXC users synced with ProxySQL {"controller": "pxc-controller", "namespace": "users-26053", "name": "some-name", "reconcileID": "fb3f25fc-1035-4629-9a33-2cb3869b3402"} 2024-05-14T18:11:36.577Z DEBUG PXC users synced with ProxySQL {"controller": "pxc-controller", "namespace": "users-26053", "name": "some-name", "reconcileID": "823c4e02-5d92-47ef-8f62-6b68e3fd4144"} 2024-05-14T18:11:41.772Z DEBUG PXC users synced with ProxySQL {"controller": "pxc-controller", "namespace": "users-26053", "name": "some-name", "reconcileID": "5704e822-6967-4cab-8712-0175b46cc9ae"} 2024-05-14T18:11:47.023Z DEBUG PXC users synced with ProxySQL {"controller": "pxc-controller", "namespace": "users-26053", "name": "some-name", "reconcileID": "e2617a08-849c-4131-9973-591f024aaa9f"} 2024-05-14T18:11:52.674Z DEBUG PXC users synced with ProxySQL {"controller": "pxc-controller", "namespace": "users-26053", "name": "some-name", "reconcileID": "3abe2713-816b-4490-9c80-6465c82da20c"} 2024-05-14T18:11:58.199Z DEBUG PXC users synced with ProxySQL {"controller": "pxc-controller", "namespace": "users-26053", "name": "some-name", "reconcileID": "78ec07bf-7b5d-42f9-a94c-b7fc9bf9e8a0"} 2024-05-14T18:12:03.568Z DEBUG PXC users synced with ProxySQL {"controller": "pxc-controller", "namespace": "users-26053", "name": "some-name", "reconcileID": "91c3f9a6-f2aa-4ba8-9b4b-48fa174a971d"} 2024-05-14T18:12:05.603Z INFO Password changed, updating user {"controller": "pxc-controller", "namespace": "users-26053", "name": "some-name", "reconcileID": "2ca03d72-b8a1-484d-ae3b-a92696f40740", "user": "root"} 2024-05-14T18:12:05.661Z INFO Password updated {"controller": "pxc-controller", "namespace": "users-26053", "name": "some-name", "reconcileID": "2ca03d72-b8a1-484d-ae3b-a92696f40740", "user": "root"} 2024-05-14T18:12:05.671Z INFO MySQL init secret created {"controller": "pxc-controller", "namespace": "users-26053", "name": "some-name", "reconcileID": "2ca03d72-b8a1-484d-ae3b-a92696f40740", "secret": "some-name-mysql-init", "user": "root"} 2024-05-14T18:12:10.335Z DEBUG PXC users synced with ProxySQL {"controller": "pxc-controller", "namespace": "users-26053", "name": "some-name", "reconcileID": "2ca03d72-b8a1-484d-ae3b-a92696f40740"} 2024-05-14T18:12:10.350Z INFO Internal secrets updated {"controller": "pxc-controller", "namespace": "users-26053", "name": "some-name", "reconcileID": "2ca03d72-b8a1-484d-ae3b-a92696f40740", "user": "root"} 2024-05-14T18:12:10.402Z INFO Old password discarded {"controller": "pxc-controller", "namespace": "users-26053", "name": "some-name", "reconcileID": "2ca03d72-b8a1-484d-ae3b-a92696f40740", "user": "root"} 2024-05-14T18:12:13.824Z DEBUG PXC users synced with ProxySQL {"controller": "pxc-controller", "namespace": "users-26053", "name": "some-name", "reconcileID": "2ca03d72-b8a1-484d-ae3b-a92696f40740"} 2024-05-14T18:12:19.275Z DEBUG PXC users synced with ProxySQL {"controller": "pxc-controller", "namespace": "users-26053", "name": "some-name", "reconcileID": "dc8fb7e2-4015-4bdd-9dfc-487f9bb6ea7e"} 2024-05-14T18:12:24.849Z ERROR sync users {"controller": "pxc-controller", "namespace": "users-26053", "name": "some-name", "reconcileID": "718928a8-a682-427b-bdaa-bc1f76841276", "error": "exec syncusers: pod some-name-proxysql-2 does not have a host assigned / / ", "errorVerbose": "exec syncusers: pod some-name-proxysql-2 does not have a host assigned / / \ngithub.com/percona/percona-xtradb-cluster-operator/pkg/controller/pxc.(*ReconcilePerconaXtraDBCluster).syncPXCUsersWithProxySQL\n\t/go/src/github.com/percona/percona-xtradb-cluster-operator/pkg/controller/pxc/users.go:920\ngithub.com/percona/percona-xtradb-cluster-operator/pkg/controller/pxc.(*ReconcilePerconaXtraDBCluster).resyncPXCUsersWithProxySQL.func1\n\t/go/src/github.com/percona/percona-xtradb-cluster-operator/pkg/controller/pxc/controller.go:1246\nruntime.goexit\n\t/usr/local/go/src/runtime/asm_amd64.s:1695"} 2024-05-14T18:12:47.137Z ERROR sync users {"controller": "pxc-controller", "namespace": "users-26053", "name": "some-name", "reconcileID": "49a32d1b-5ece-49cd-81ca-78577573507d", "error": "exec syncusers: command terminated with exit code 1 / / ERROR (line:512) : The cluster (with writer hostgroup:11) has not been configured in ProxySQL\n", "errorVerbose": "exec syncusers: command terminated with exit code 1 / / ERROR (line:512) : The cluster (with writer hostgroup:11) has not been configured in ProxySQL\n\ngithub.com/percona/percona-xtradb-cluster-operator/pkg/controller/pxc.(*ReconcilePerconaXtraDBCluster).syncPXCUsersWithProxySQL\n\t/go/src/github.com/percona/percona-xtradb-cluster-operator/pkg/controller/pxc/users.go:920\ngithub.com/percona/percona-xtradb-cluster-operator/pkg/controller/pxc.(*ReconcilePerconaXtraDBCluster).resyncPXCUsersWithProxySQL.func1\n\t/go/src/github.com/percona/percona-xtradb-cluster-operator/pkg/controller/pxc/controller.go:1246\nruntime.goexit\n\t/usr/local/go/src/runtime/asm_amd64.s:1695"} 2024-05-14T18:12:48.157Z INFO Password changed, updating user {"controller": "pxc-controller", "namespace": "users-26053", "name": "some-name", "reconcileID": "4ef5e10b-3c06-4241-9214-5a5ffc4ac1cf", "user": "proxyadmin"} 2024-05-14T18:12:48.157Z INFO Password expiration policy updated {"controller": "pxc-controller", "namespace": "users-26053", "name": "some-name", "reconcileID": "4ef5e10b-3c06-4241-9214-5a5ffc4ac1cf", "user": "proxyadmin"} 2024-05-14T18:12:48.227Z INFO Proxy user updated {"controller": "pxc-controller", "namespace": "users-26053", "name": "some-name", "reconcileID": "4ef5e10b-3c06-4241-9214-5a5ffc4ac1cf", "user": "proxyadmin"} 2024-05-14T18:12:48.241Z INFO Internal secrets updated {"controller": "pxc-controller", "namespace": "users-26053", "name": "some-name", "reconcileID": "4ef5e10b-3c06-4241-9214-5a5ffc4ac1cf", "user": "proxyadmin"} 2024-05-14T18:12:48.241Z INFO Proxy pods will be restarted {"controller": "pxc-controller", "namespace": "users-26053", "name": "some-name", "reconcileID": "4ef5e10b-3c06-4241-9214-5a5ffc4ac1cf", "last-applied-secret": "1a55867d0f4fa0d0643653b2eb57b2f690315d8d843a10599c9839c2a779a95f"} 2024-05-14T18:12:48.517Z ERROR sync users {"controller": "pxc-controller", "namespace": "users-26053", "name": "some-name", "reconcileID": "4ef5e10b-3c06-4241-9214-5a5ffc4ac1cf", "error": "exec syncusers: command terminated with exit code 1 / / ERROR 1045 (28000): ProxySQL Error: Access denied for user 'proxyadmin'@'127.0.0.1' (using password: YES)\nERROR (line:335) : ProxySQL connection check failed. \n-- Could not connect to ProxySQL at localhost:6032 \n-- Please check the ProxySQL connection parameters and status.\n", "errorVerbose": "exec syncusers: command terminated with exit code 1 / / ERROR 1045 (28000): ProxySQL Error: Access denied for user 'proxyadmin'@'127.0.0.1' (using password: YES)\nERROR (line:335) : ProxySQL connection check failed. \n-- Could not connect to ProxySQL at localhost:6032 \n-- Please check the ProxySQL connection parameters and status.\n\ngithub.com/percona/percona-xtradb-cluster-operator/pkg/controller/pxc.(*ReconcilePerconaXtraDBCluster).syncPXCUsersWithProxySQL\n\t/go/src/github.com/percona/percona-xtradb-cluster-operator/pkg/controller/pxc/users.go:920\ngithub.com/percona/percona-xtradb-cluster-operator/pkg/controller/pxc.(*ReconcilePerconaXtraDBCluster).resyncPXCUsersWithProxySQL.func1\n\t/go/src/github.com/percona/percona-xtradb-cluster-operator/pkg/controller/pxc/controller.go:1246\nruntime.goexit\n\t/usr/local/go/src/runtime/asm_amd64.s:1695"} 2024-05-14T18:13:32.129Z DEBUG PXC users synced with ProxySQL {"controller": "pxc-controller", "namespace": "users-26053", "name": "some-name", "reconcileID": "28535708-cfff-426f-bbfb-8410efaa579c"} 2024-05-14T18:13:37.915Z DEBUG PXC users synced with ProxySQL {"controller": "pxc-controller", "namespace": "users-26053", "name": "some-name", "reconcileID": "9cd1febb-eb69-4da5-9b1c-a4826a7ffa23"} 2024-05-14T18:13:43.278Z DEBUG PXC users synced with ProxySQL {"controller": "pxc-controller", "namespace": "users-26053", "name": "some-name", "reconcileID": "5eb4d292-1bf7-4c0d-a61a-794998d73066"} 2024-05-14T18:13:46.264Z INFO Password changed, updating user {"controller": "pxc-controller", "namespace": "users-26053", "name": "some-name", "reconcileID": "81127906-4222-43f2-a6d5-096f037c741b", "user": "xtrabackup"} 2024-05-14T18:13:46.306Z INFO Password updated {"controller": "pxc-controller", "namespace": "users-26053", "name": "some-name", "reconcileID": "81127906-4222-43f2-a6d5-096f037c741b", "user": "xtrabackup"} 2024-05-14T18:13:46.316Z INFO MySQL init secret updated {"controller": "pxc-controller", "namespace": "users-26053", "name": "some-name", "reconcileID": "81127906-4222-43f2-a6d5-096f037c741b", "secret": "some-name-mysql-init", "user": "xtrabackup"} 2024-05-14T18:13:46.328Z INFO Internal secrets updated {"controller": "pxc-controller", "namespace": "users-26053", "name": "some-name", "reconcileID": "81127906-4222-43f2-a6d5-096f037c741b", "user": "xtrabackup"} 2024-05-14T18:13:46.360Z INFO Old password discarded {"controller": "pxc-controller", "namespace": "users-26053", "name": "some-name", "reconcileID": "81127906-4222-43f2-a6d5-096f037c741b", "user": "xtrabackup"} 2024-05-14T18:13:46.372Z INFO PXC pods will be restarted {"controller": "pxc-controller", "namespace": "users-26053", "name": "some-name", "reconcileID": "81127906-4222-43f2-a6d5-096f037c741b", "last-applied-secret": "d11bcc70ca53ea410cd455e13d82e7225917b0861d9b677ed34c02c5f6fbaf1b"} 2024-05-14T18:13:51.777Z DEBUG PXC users synced with ProxySQL {"controller": "pxc-controller", "namespace": "users-26053", "name": "some-name", "reconcileID": "81127906-4222-43f2-a6d5-096f037c741b"} 2024-05-14T18:14:44.159Z INFO reconcile replication error {"controller": "pxc-controller", "namespace": "users-26053", "name": "some-name", "reconcileID": "599c3ba5-2f0f-45f8-bf7a-2174de8d2f18", "err": "failed to ensure cluster readonly status: connect to pod some-name-pxc-1: dial tcp: lookup some-name-pxc-1.some-name-pxc.users-26053 on 10.57.176.10:53: no such host"} 2024-05-14T18:14:49.118Z INFO reconcile replication error {"controller": "pxc-controller", "namespace": "users-26053", "name": "some-name", "reconcileID": "db245556-70cb-441a-acb5-213e88d206d0", "err": "failed to ensure cluster readonly status: connect to pod some-name-pxc-1: dial tcp: lookup some-name-pxc-1.some-name-pxc.users-26053 on 10.57.176.10:53: no such host"} 2024-05-14T18:14:54.481Z INFO reconcile replication error {"controller": "pxc-controller", "namespace": "users-26053", "name": "some-name", "reconcileID": "2127c9c2-a8e6-4786-b305-157fb3ac04b8", "err": "failed to ensure cluster readonly status: connect to pod some-name-pxc-1: dial tcp: lookup some-name-pxc-1.some-name-pxc.users-26053 on 10.57.176.10:53: no such host"} 2024-05-14T18:15:43.237Z INFO Unable to find primary pod for replication. No pod with name or ip like this {"controller": "pxc-controller", "namespace": "users-26053", "name": "some-name", "reconcileID": "8727a6e5-3321-4611-ac26-54fe1993817a", "primary name": "some-name-pxc-0.some-name-pxc.users-26053.svc.cluster.local"} 2024-05-14T18:15:43.466Z INFO Unable to find primary pod for replication. No pod with name or ip like this {"controller": "pxc-controller", "namespace": "users-26053", "name": "some-name", "reconcileID": "5a8dcd99-6ff0-4b92-8849-06173d81c310", "primary name": "some-name-pxc-0.some-name-pxc.users-26053.svc.cluster.local"} 2024-05-14T18:15:48.470Z INFO Unable to find primary pod for replication. No pod with name or ip like this {"controller": "pxc-controller", "namespace": "users-26053", "name": "some-name", "reconcileID": "97260ada-144b-4153-87fb-2702c721e00a", "primary name": "some-name-pxc-0.some-name-pxc.users-26053.svc.cluster.local"} 2024-05-14T18:15:53.636Z INFO Unable to find primary pod for replication. No pod with name or ip like this {"controller": "pxc-controller", "namespace": "users-26053", "name": "some-name", "reconcileID": "8dedc886-ca26-47fc-88b9-02612046b4fa", "primary name": "some-name-pxc-0.some-name-pxc.users-26053.svc.cluster.local"} 2024-05-14T18:15:58.814Z INFO Unable to find primary pod for replication. No pod with name or ip like this {"controller": "pxc-controller", "namespace": "users-26053", "name": "some-name", "reconcileID": "f5e30234-4192-4475-a764-e44d0b62d222", "primary name": "some-name-pxc-0.some-name-pxc.users-26053.svc.cluster.local"} 2024-05-14T18:16:04.006Z INFO Unable to find primary pod for replication. No pod with name or ip like this {"controller": "pxc-controller", "namespace": "users-26053", "name": "some-name", "reconcileID": "05d7bbf4-a15e-4a3d-8a5a-494cbd6c7ba2", "primary name": "some-name-pxc-0.some-name-pxc.users-26053.svc.cluster.local"} 2024-05-14T18:16:09.188Z INFO Unable to find primary pod for replication. No pod with name or ip like this {"controller": "pxc-controller", "namespace": "users-26053", "name": "some-name", "reconcileID": "5cf19757-4e08-4905-b5d1-4699a0b15e38", "primary name": "some-name-pxc-0.some-name-pxc.users-26053.svc.cluster.local"} 2024-05-14T18:16:14.402Z INFO Unable to find primary pod for replication. No pod with name or ip like this {"controller": "pxc-controller", "namespace": "users-26053", "name": "some-name", "reconcileID": "b5d3a290-4ed4-48b8-a09b-c44ea2c615eb", "primary name": "some-name-pxc-0.some-name-pxc.users-26053.svc.cluster.local"} 2024-05-14T18:16:19.646Z INFO Unable to find primary pod for replication. No pod with name or ip like this {"controller": "pxc-controller", "namespace": "users-26053", "name": "some-name", "reconcileID": "b8cdf3e7-c80e-44f3-a96e-49ad28209536", "primary name": "some-name-pxc-0.some-name-pxc.users-26053.svc.cluster.local"} 2024-05-14T18:16:29.268Z DEBUG PXC users synced with ProxySQL {"controller": "pxc-controller", "namespace": "users-26053", "name": "some-name", "reconcileID": "24cdcc37-f820-4cab-b2d9-eac64ec20526"} 2024-05-14T18:16:34.084Z DEBUG PXC users synced with ProxySQL {"controller": "pxc-controller", "namespace": "users-26053", "name": "some-name", "reconcileID": "a4aadfef-4ec3-4ebb-860e-a9e42797d85c"} 2024-05-14T18:16:36.203Z INFO Password changed, updating user {"controller": "pxc-controller", "namespace": "users-26053", "name": "some-name", "reconcileID": "d17acfbc-90ea-46d9-95e3-07dc7f8e1e9e", "user": "monitor"} 2024-05-14T18:16:36.238Z INFO Password updated {"controller": "pxc-controller", "namespace": "users-26053", "name": "some-name", "reconcileID": "d17acfbc-90ea-46d9-95e3-07dc7f8e1e9e", "user": "monitor"} 2024-05-14T18:16:36.246Z INFO MySQL init secret updated {"controller": "pxc-controller", "namespace": "users-26053", "name": "some-name", "reconcileID": "d17acfbc-90ea-46d9-95e3-07dc7f8e1e9e", "secret": "some-name-mysql-init", "user": "monitor"} 2024-05-14T18:16:36.293Z INFO Proxy user updated {"controller": "pxc-controller", "namespace": "users-26053", "name": "some-name", "reconcileID": "d17acfbc-90ea-46d9-95e3-07dc7f8e1e9e", "user": "monitor"} 2024-05-14T18:16:36.304Z INFO Internal secrets updated {"controller": "pxc-controller", "namespace": "users-26053", "name": "some-name", "reconcileID": "d17acfbc-90ea-46d9-95e3-07dc7f8e1e9e", "user": "monitor"} 2024-05-14T18:16:36.419Z INFO Proxy pods will be restarted {"controller": "pxc-controller", "namespace": "users-26053", "name": "some-name", "reconcileID": "d17acfbc-90ea-46d9-95e3-07dc7f8e1e9e", "last-applied-secret": "4e3282050745046d5044e0fcb5f2bf140d7320a1256a79c8ca2184f38f4f3098"} 2024-05-14T18:16:39.083Z ERROR sync users {"controller": "pxc-controller", "namespace": "users-26053", "name": "some-name", "reconcileID": "d17acfbc-90ea-46d9-95e3-07dc7f8e1e9e", "error": "exec syncusers: unable to upgrade connection: container not found (\"proxysql\") / / ", "errorVerbose": "exec syncusers: unable to upgrade connection: container not found (\"proxysql\") / / \ngithub.com/percona/percona-xtradb-cluster-operator/pkg/controller/pxc.(*ReconcilePerconaXtraDBCluster).syncPXCUsersWithProxySQL\n\t/go/src/github.com/percona/percona-xtradb-cluster-operator/pkg/controller/pxc/users.go:920\ngithub.com/percona/percona-xtradb-cluster-operator/pkg/controller/pxc.(*ReconcilePerconaXtraDBCluster).resyncPXCUsersWithProxySQL.func1\n\t/go/src/github.com/percona/percona-xtradb-cluster-operator/pkg/controller/pxc/controller.go:1246\nruntime.goexit\n\t/usr/local/go/src/runtime/asm_amd64.s:1695"} 2024-05-14T18:17:32.593Z INFO Password updated but old one not discarded {"controller": "pxc-controller", "namespace": "users-26053", "name": "some-name", "reconcileID": "f1f3b863-97fe-4df7-ad2c-14522b0777f6", "user": "monitor"} 2024-05-14T18:17:36.134Z DEBUG PXC users synced with ProxySQL {"controller": "pxc-controller", "namespace": "users-26053", "name": "some-name", "reconcileID": "f1f3b863-97fe-4df7-ad2c-14522b0777f6"} 2024-05-14T18:17:37.633Z INFO Password updated but old one not discarded {"controller": "pxc-controller", "namespace": "users-26053", "name": "some-name", "reconcileID": "cbc534ee-c278-48e9-85c6-aadeb7ce0d4c", "user": "monitor"} 2024-05-14T18:17:41.765Z DEBUG PXC users synced with ProxySQL {"controller": "pxc-controller", "namespace": "users-26053", "name": "some-name", "reconcileID": "cbc534ee-c278-48e9-85c6-aadeb7ce0d4c"} 2024-05-14T18:17:43.207Z INFO Password updated but old one not discarded {"controller": "pxc-controller", "namespace": "users-26053", "name": "some-name", "reconcileID": "06612390-0cfc-40e2-b523-14e1eb473b3c", "user": "monitor"} 2024-05-14T18:17:43.537Z INFO Old password discarded {"controller": "pxc-controller", "namespace": "users-26053", "name": "some-name", "reconcileID": "06612390-0cfc-40e2-b523-14e1eb473b3c", "user": "monitor"} 2024-05-14T18:17:43.566Z INFO Proxy pods will be restarted {"controller": "pxc-controller", "namespace": "users-26053", "name": "some-name", "reconcileID": "06612390-0cfc-40e2-b523-14e1eb473b3c", "last-applied-secret": "4e3282050745046d5044e0fcb5f2bf140d7320a1256a79c8ca2184f38f4f3098"} 2024-05-14T18:17:47.133Z DEBUG PXC users synced with ProxySQL {"controller": "pxc-controller", "namespace": "users-26053", "name": "some-name", "reconcileID": "06612390-0cfc-40e2-b523-14e1eb473b3c"} 2024-05-14T18:17:52.923Z DEBUG PXC users synced with ProxySQL {"controller": "pxc-controller", "namespace": "users-26053", "name": "some-name", "reconcileID": "2af297d8-fecd-450a-8482-899883343df2"} 2024-05-14T18:17:58.432Z DEBUG PXC users synced with ProxySQL {"controller": "pxc-controller", "namespace": "users-26053", "name": "some-name", "reconcileID": "39ab3e79-35f5-4d7b-92c3-022b2752b35d"} 2024-05-14T18:18:03.456Z DEBUG PXC users synced with ProxySQL {"controller": "pxc-controller", "namespace": "users-26053", "name": "some-name", "reconcileID": "7dd48629-a43f-4afd-a089-d3cfae87dbcc"} 2024-05-14T18:18:08.933Z DEBUG PXC users synced with ProxySQL {"controller": "pxc-controller", "namespace": "users-26053", "name": "some-name", "reconcileID": "1ff59733-6323-44fd-bfe5-7e9d4c6932c9"} 2024-05-14T18:18:14.255Z DEBUG PXC users synced with ProxySQL {"controller": "pxc-controller", "namespace": "users-26053", "name": "some-name", "reconcileID": "476497b1-a4e9-46af-999c-6483faf55420"} 2024-05-14T18:18:16.279Z INFO Password changed, updating user {"controller": "pxc-controller", "namespace": "users-26053", "name": "some-name", "reconcileID": "f9b6fa14-9f39-4d08-a7ee-fd8ba9a0a63f", "user": "operator"} 2024-05-14T18:18:16.311Z INFO Password updated {"controller": "pxc-controller", "namespace": "users-26053", "name": "some-name", "reconcileID": "f9b6fa14-9f39-4d08-a7ee-fd8ba9a0a63f", "user": "operator"} 2024-05-14T18:18:16.322Z INFO MySQL init secret updated {"controller": "pxc-controller", "namespace": "users-26053", "name": "some-name", "reconcileID": "f9b6fa14-9f39-4d08-a7ee-fd8ba9a0a63f", "secret": "some-name-mysql-init", "user": "operator"} 2024-05-14T18:18:16.332Z INFO Internal secrets updated {"controller": "pxc-controller", "namespace": "users-26053", "name": "some-name", "reconcileID": "f9b6fa14-9f39-4d08-a7ee-fd8ba9a0a63f", "user": "operator"} 2024-05-14T18:18:16.366Z INFO Old password discarded {"controller": "pxc-controller", "namespace": "users-26053", "name": "some-name", "reconcileID": "f9b6fa14-9f39-4d08-a7ee-fd8ba9a0a63f", "user": "operator"} 2024-05-14T18:18:16.412Z INFO Proxy pods will be restarted {"controller": "pxc-controller", "namespace": "users-26053", "name": "some-name", "reconcileID": "f9b6fa14-9f39-4d08-a7ee-fd8ba9a0a63f", "last-applied-secret": "6542c6f8a2375ddfcda319bf91c99ca69519730971c577cd07366e356671ee68"} 2024-05-14T18:18:17.886Z ERROR sync users {"controller": "pxc-controller", "namespace": "users-26053", "name": "some-name", "reconcileID": "f9b6fa14-9f39-4d08-a7ee-fd8ba9a0a63f", "error": "exec syncusers: command terminated with exit code 1 / / ERROR 1045 (28000): Access denied for user 'operator'@'some-name-proxysql-0.some-name-proxysql-unready.users-26053.svc.' (using password: YES)\nERROR 1045 (28000): Access denied for user 'operator'@'some-name-proxysql-0.some-name-proxysql-unready.users-26053.svc.' (using password: YES)\nERROR 1045 (28000): Access denied for user 'operator'@'some-name-proxysql-0.some-name-proxysql-unready.users-26053.svc.' (using password: YES)\nERROR (line:569) : Could not find a primary cluster node\n", "errorVerbose": "exec syncusers: command terminated with exit code 1 / / ERROR 1045 (28000): Access denied for user 'operator'@'some-name-proxysql-0.some-name-proxysql-unready.users-26053.svc.' (using password: YES)\nERROR 1045 (28000): Access denied for user 'operator'@'some-name-proxysql-0.some-name-proxysql-unready.users-26053.svc.' (using password: YES)\nERROR 1045 (28000): Access denied for user 'operator'@'some-name-proxysql-0.some-name-proxysql-unready.users-26053.svc.' (using password: YES)\nERROR (line:569) : Could not find a primary cluster node\n\ngithub.com/percona/percona-xtradb-cluster-operator/pkg/controller/pxc.(*ReconcilePerconaXtraDBCluster).syncPXCUsersWithProxySQL\n\t/go/src/github.com/percona/percona-xtradb-cluster-operator/pkg/controller/pxc/users.go:920\ngithub.com/percona/percona-xtradb-cluster-operator/pkg/controller/pxc.(*ReconcilePerconaXtraDBCluster).resyncPXCUsersWithProxySQL.func1\n\t/go/src/github.com/percona/percona-xtradb-cluster-operator/pkg/controller/pxc/controller.go:1246\nruntime.goexit\n\t/usr/local/go/src/runtime/asm_amd64.s:1695"} 2024-05-14T18:18:47.897Z DEBUG PXC users synced with ProxySQL {"controller": "pxc-controller", "namespace": "users-26053", "name": "some-name", "reconcileID": "e155e886-4acd-4a08-be96-60097aaaa67f"} 2024-05-14T18:18:56.329Z DEBUG PXC users synced with ProxySQL {"controller": "pxc-controller", "namespace": "users-26053", "name": "some-name", "reconcileID": "b9b73f5c-ba41-41fe-8078-5889f4e450fe"} 2024-05-14T18:19:00.941Z DEBUG PXC users synced with ProxySQL {"controller": "pxc-controller", "namespace": "users-26053", "name": "some-name", "reconcileID": "1c057f88-c7ac-40b8-b86b-11a0ab8137de"} 2024-05-14T18:19:06.437Z DEBUG PXC users synced with ProxySQL {"controller": "pxc-controller", "namespace": "users-26053", "name": "some-name", "reconcileID": "4a7db3b0-cbf7-4fe5-b8fd-53be96f1dd49"} 2024-05-14T18:19:11.928Z DEBUG PXC users synced with ProxySQL {"controller": "pxc-controller", "namespace": "users-26053", "name": "some-name", "reconcileID": "1d5c81bc-ae61-44c6-9a1a-a508127373c4"} 2024-05-14T18:19:15.401Z INFO Created user secrets {"controller": "pxc-controller", "namespace": "users-26053", "name": "some-name", "reconcileID": "25e5db49-28bb-4929-a285-f24253d9e69d", "secrets": "my-cluster-secrets-2"} 2024-05-14T18:19:15.420Z INFO Password changed, updating user {"controller": "pxc-controller", "namespace": "users-26053", "name": "some-name", "reconcileID": "25e5db49-28bb-4929-a285-f24253d9e69d", "user": "root"} 2024-05-14T18:19:15.464Z INFO Password updated {"controller": "pxc-controller", "namespace": "users-26053", "name": "some-name", "reconcileID": "25e5db49-28bb-4929-a285-f24253d9e69d", "user": "root"} 2024-05-14T18:19:15.473Z INFO MySQL init secret updated {"controller": "pxc-controller", "namespace": "users-26053", "name": "some-name", "reconcileID": "25e5db49-28bb-4929-a285-f24253d9e69d", "secret": "some-name-mysql-init", "user": "root"} 2024-05-14T18:19:18.066Z DEBUG PXC users synced with ProxySQL {"controller": "pxc-controller", "namespace": "users-26053", "name": "some-name", "reconcileID": "e24ac876-ff55-4b76-a58e-7a3a8ec213df"} 2024-05-14T18:19:20.029Z DEBUG PXC users synced with ProxySQL {"controller": "pxc-controller", "namespace": "users-26053", "name": "some-name", "reconcileID": "25e5db49-28bb-4929-a285-f24253d9e69d"} 2024-05-14T18:19:20.045Z INFO Internal secrets updated {"controller": "pxc-controller", "namespace": "users-26053", "name": "some-name", "reconcileID": "25e5db49-28bb-4929-a285-f24253d9e69d", "user": "root"} 2024-05-14T18:19:20.091Z INFO Old password discarded {"controller": "pxc-controller", "namespace": "users-26053", "name": "some-name", "reconcileID": "25e5db49-28bb-4929-a285-f24253d9e69d", "user": "root"} 2024-05-14T18:19:20.114Z INFO Password changed, updating user {"controller": "pxc-controller", "namespace": "users-26053", "name": "some-name", "reconcileID": "25e5db49-28bb-4929-a285-f24253d9e69d", "user": "operator"} 2024-05-14T18:19:20.147Z INFO Password updated {"controller": "pxc-controller", "namespace": "users-26053", "name": "some-name", "reconcileID": "25e5db49-28bb-4929-a285-f24253d9e69d", "user": "operator"} 2024-05-14T18:19:20.155Z INFO MySQL init secret updated {"controller": "pxc-controller", "namespace": "users-26053", "name": "some-name", "reconcileID": "25e5db49-28bb-4929-a285-f24253d9e69d", "secret": "some-name-mysql-init", "user": "operator"} 2024-05-14T18:19:20.168Z INFO Internal secrets updated {"controller": "pxc-controller", "namespace": "users-26053", "name": "some-name", "reconcileID": "25e5db49-28bb-4929-a285-f24253d9e69d", "user": "operator"} 2024-05-14T18:19:20.207Z INFO Old password discarded {"controller": "pxc-controller", "namespace": "users-26053", "name": "some-name", "reconcileID": "25e5db49-28bb-4929-a285-f24253d9e69d", "user": "operator"} 2024-05-14T18:19:20.227Z INFO Password changed, updating user {"controller": "pxc-controller", "namespace": "users-26053", "name": "some-name", "reconcileID": "25e5db49-28bb-4929-a285-f24253d9e69d", "user": "monitor"} 2024-05-14T18:19:20.261Z INFO Password updated {"controller": "pxc-controller", "namespace": "users-26053", "name": "some-name", "reconcileID": "25e5db49-28bb-4929-a285-f24253d9e69d", "user": "monitor"} 2024-05-14T18:19:20.272Z INFO MySQL init secret updated {"controller": "pxc-controller", "namespace": "users-26053", "name": "some-name", "reconcileID": "25e5db49-28bb-4929-a285-f24253d9e69d", "secret": "some-name-mysql-init", "user": "monitor"} 2024-05-14T18:19:20.321Z INFO Proxy user updated {"controller": "pxc-controller", "namespace": "users-26053", "name": "some-name", "reconcileID": "25e5db49-28bb-4929-a285-f24253d9e69d", "user": "monitor"} 2024-05-14T18:19:20.331Z INFO Internal secrets updated {"controller": "pxc-controller", "namespace": "users-26053", "name": "some-name", "reconcileID": "25e5db49-28bb-4929-a285-f24253d9e69d", "user": "monitor"} 2024-05-14T18:19:20.423Z INFO Password changed, updating user {"controller": "pxc-controller", "namespace": "users-26053", "name": "some-name", "reconcileID": "25e5db49-28bb-4929-a285-f24253d9e69d", "user": "xtrabackup"} 2024-05-14T18:19:20.456Z INFO Password updated {"controller": "pxc-controller", "namespace": "users-26053", "name": "some-name", "reconcileID": "25e5db49-28bb-4929-a285-f24253d9e69d", "user": "xtrabackup"} 2024-05-14T18:19:20.464Z INFO MySQL init secret updated {"controller": "pxc-controller", "namespace": "users-26053", "name": "some-name", "reconcileID": "25e5db49-28bb-4929-a285-f24253d9e69d", "secret": "some-name-mysql-init", "user": "xtrabackup"} 2024-05-14T18:19:20.477Z INFO Internal secrets updated {"controller": "pxc-controller", "namespace": "users-26053", "name": "some-name", "reconcileID": "25e5db49-28bb-4929-a285-f24253d9e69d", "user": "xtrabackup"} 2024-05-14T18:19:20.514Z INFO Old password discarded {"controller": "pxc-controller", "namespace": "users-26053", "name": "some-name", "reconcileID": "25e5db49-28bb-4929-a285-f24253d9e69d", "user": "xtrabackup"} 2024-05-14T18:19:20.535Z INFO Password changed, updating user {"controller": "pxc-controller", "namespace": "users-26053", "name": "some-name", "reconcileID": "25e5db49-28bb-4929-a285-f24253d9e69d", "user": "replication"} 2024-05-14T18:19:20.566Z INFO Password updated {"controller": "pxc-controller", "namespace": "users-26053", "name": "some-name", "reconcileID": "25e5db49-28bb-4929-a285-f24253d9e69d", "user": "replication"} 2024-05-14T18:19:20.574Z INFO MySQL init secret updated {"controller": "pxc-controller", "namespace": "users-26053", "name": "some-name", "reconcileID": "25e5db49-28bb-4929-a285-f24253d9e69d", "secret": "some-name-mysql-init", "user": "replication"} 2024-05-14T18:19:20.584Z INFO Internal secrets updated {"controller": "pxc-controller", "namespace": "users-26053", "name": "some-name", "reconcileID": "25e5db49-28bb-4929-a285-f24253d9e69d", "user": "replication"} 2024-05-14T18:19:20.620Z INFO Old password discarded {"controller": "pxc-controller", "namespace": "users-26053", "name": "some-name", "reconcileID": "25e5db49-28bb-4929-a285-f24253d9e69d", "user": "replication"} 2024-05-14T18:19:20.620Z INFO Password changed, updating user {"controller": "pxc-controller", "namespace": "users-26053", "name": "some-name", "reconcileID": "25e5db49-28bb-4929-a285-f24253d9e69d", "user": "proxyadmin"} 2024-05-14T18:19:20.732Z INFO Proxy user updated {"controller": "pxc-controller", "namespace": "users-26053", "name": "some-name", "reconcileID": "25e5db49-28bb-4929-a285-f24253d9e69d", "user": "proxyadmin"} 2024-05-14T18:19:20.762Z INFO Internal secrets updated {"controller": "pxc-controller", "namespace": "users-26053", "name": "some-name", "reconcileID": "25e5db49-28bb-4929-a285-f24253d9e69d", "user": "proxyadmin"} 2024-05-14T18:19:20.762Z INFO Proxy pods will be restarted {"controller": "pxc-controller", "namespace": "users-26053", "name": "some-name", "reconcileID": "25e5db49-28bb-4929-a285-f24253d9e69d", "last-applied-secret": "8990685ac869ef868942880b48fd1c41df6e1c2aa4ec2628ed6feb0163ca558a"} 2024-05-14T18:19:20.762Z INFO PXC pods will be restarted {"controller": "pxc-controller", "namespace": "users-26053", "name": "some-name", "reconcileID": "25e5db49-28bb-4929-a285-f24253d9e69d", "last-applied-secret": "8990685ac869ef868942880b48fd1c41df6e1c2aa4ec2628ed6feb0163ca558a"} 2024-05-14T18:19:21.122Z ERROR sync users {"controller": "pxc-controller", "namespace": "users-26053", "name": "some-name", "reconcileID": "25e5db49-28bb-4929-a285-f24253d9e69d", "error": "exec syncusers: command terminated with exit code 1 / / ERROR 1045 (28000): ProxySQL Error: Access denied for user 'proxyadmin'@'127.0.0.1' (using password: YES)\nERROR (line:335) : ProxySQL connection check failed. \n-- Could not connect to ProxySQL at localhost:6032 \n-- Please check the ProxySQL connection parameters and status.\n", "errorVerbose": "exec syncusers: command terminated with exit code 1 / / ERROR 1045 (28000): ProxySQL Error: Access denied for user 'proxyadmin'@'127.0.0.1' (using password: YES)\nERROR (line:335) : ProxySQL connection check failed. \n-- Could not connect to ProxySQL at localhost:6032 \n-- Please check the ProxySQL connection parameters and status.\n\ngithub.com/percona/percona-xtradb-cluster-operator/pkg/controller/pxc.(*ReconcilePerconaXtraDBCluster).syncPXCUsersWithProxySQL\n\t/go/src/github.com/percona/percona-xtradb-cluster-operator/pkg/controller/pxc/users.go:920\ngithub.com/percona/percona-xtradb-cluster-operator/pkg/controller/pxc.(*ReconcilePerconaXtraDBCluster).resyncPXCUsersWithProxySQL.func1\n\t/go/src/github.com/percona/percona-xtradb-cluster-operator/pkg/controller/pxc/controller.go:1246\nruntime.goexit\n\t/usr/local/go/src/runtime/asm_amd64.s:1695"} 2024-05-14T18:19:21.507Z INFO Password updated but old one not discarded {"controller": "pxc-controller", "namespace": "users-26053", "name": "some-name", "reconcileID": "c40df436-b574-4e8d-a1a9-22ad7fa6700f", "user": "monitor"} 2024-05-14T18:19:21.807Z ERROR sync users {"controller": "pxc-controller", "namespace": "users-26053", "name": "some-name", "reconcileID": "c40df436-b574-4e8d-a1a9-22ad7fa6700f", "error": "exec syncusers: command terminated with exit code 1 / / ERROR 1045 (28000): ProxySQL Error: Access denied for user 'proxyadmin'@'127.0.0.1' (using password: YES)\nERROR (line:335) : ProxySQL connection check failed. \n-- Could not connect to ProxySQL at localhost:6032 \n-- Please check the ProxySQL connection parameters and status.\n", "errorVerbose": "exec syncusers: command terminated with exit code 1 / / ERROR 1045 (28000): ProxySQL Error: Access denied for user 'proxyadmin'@'127.0.0.1' (using password: YES)\nERROR (line:335) : ProxySQL connection check failed. \n-- Could not connect to ProxySQL at localhost:6032 \n-- Please check the ProxySQL connection parameters and status.\n\ngithub.com/percona/percona-xtradb-cluster-operator/pkg/controller/pxc.(*ReconcilePerconaXtraDBCluster).syncPXCUsersWithProxySQL\n\t/go/src/github.com/percona/percona-xtradb-cluster-operator/pkg/controller/pxc/users.go:920\ngithub.com/percona/percona-xtradb-cluster-operator/pkg/controller/pxc.(*ReconcilePerconaXtraDBCluster).resyncPXCUsersWithProxySQL.func1\n\t/go/src/github.com/percona/percona-xtradb-cluster-operator/pkg/controller/pxc/controller.go:1246\nruntime.goexit\n\t/usr/local/go/src/runtime/asm_amd64.s:1695"} 2024-05-14T18:20:46.156Z INFO reconcile replication error {"controller": "pxc-controller", "namespace": "users-26053", "name": "some-name", "reconcileID": "b0299468-5d9e-401f-825c-ff8a0ba3b74c", "err": "failed to ensure cluster readonly status: connect to pod some-name-pxc-1: dial tcp 10.12.241.8:33062: i/o timeout"} 2024-05-14T18:21:32.810Z INFO reconcile replication error {"controller": "pxc-controller", "namespace": "users-26053", "name": "some-name", "reconcileID": "631548d2-0497-4e88-9767-03735b71eb46", "err": "failed to connect to pod some-name-pxc-0: dial tcp 10.12.242.27:33062: i/o timeout"} 2024-05-14T18:21:43.229Z INFO Unable to find primary pod for replication. No pod with name or ip like this {"controller": "pxc-controller", "namespace": "users-26053", "name": "some-name", "reconcileID": "79d3bb30-2047-41ae-8177-0e8727aeeb4a", "primary name": "some-name-pxc-0.some-name-pxc.users-26053.svc.cluster.local"} 2024-05-14T18:21:53.637Z INFO Unable to find primary pod for replication. No pod with name or ip like this {"controller": "pxc-controller", "namespace": "users-26053", "name": "some-name", "reconcileID": "03dd56da-f639-4f74-b2ed-db2122bf01a6", "primary name": "some-name-pxc-0.some-name-pxc.users-26053.svc.cluster.local"} 2024-05-14T18:21:59.146Z INFO Password updated but old one not discarded {"controller": "pxc-controller", "namespace": "users-26053", "name": "some-name", "reconcileID": "de78789a-e1e5-48f0-989c-9eca0fced03b", "user": "monitor"} 2024-05-14T18:21:59.438Z INFO Old password discarded {"controller": "pxc-controller", "namespace": "users-26053", "name": "some-name", "reconcileID": "de78789a-e1e5-48f0-989c-9eca0fced03b", "user": "monitor"} 2024-05-14T18:21:59.470Z INFO Proxy pods will be restarted {"controller": "pxc-controller", "namespace": "users-26053", "name": "some-name", "reconcileID": "de78789a-e1e5-48f0-989c-9eca0fced03b", "last-applied-secret": "8990685ac869ef868942880b48fd1c41df6e1c2aa4ec2628ed6feb0163ca558a"} 2024-05-14T18:22:02.858Z DEBUG PXC users synced with ProxySQL {"controller": "pxc-controller", "namespace": "users-26053", "name": "some-name", "reconcileID": "de78789a-e1e5-48f0-989c-9eca0fced03b"} 2024-05-14T18:22:07.618Z DEBUG PXC users synced with ProxySQL {"controller": "pxc-controller", "namespace": "users-26053", "name": "some-name", "reconcileID": "bd23a450-bdfc-4bac-9608-f697b4d58adb"} 2024-05-14T18:22:13.522Z DEBUG PXC users synced with ProxySQL {"controller": "pxc-controller", "namespace": "users-26053", "name": "some-name", "reconcileID": "5c374e81-c056-44e9-a95e-aa4e7b5fd92f"} 2024-05-14T18:22:19.035Z DEBUG PXC users synced with ProxySQL {"controller": "pxc-controller", "namespace": "users-26053", "name": "some-name", "reconcileID": "4c9a1bd2-6efb-4307-b75e-2af4c69c76c0"} 2024-05-14T18:22:24.601Z DEBUG PXC users synced with ProxySQL {"controller": "pxc-controller", "namespace": "users-26053", "name": "some-name", "reconcileID": "dc8347c1-5f37-4a8d-87cf-b7eeffdc436f"} 2024-05-14T18:22:26.436Z INFO Password changed, updating user {"controller": "pxc-controller", "namespace": "users-26053", "name": "some-name", "reconcileID": "2f1249e9-4113-4953-a65b-3b1c74bc8394", "user": "operator"} 2024-05-14T18:22:26.467Z INFO Password updated {"controller": "pxc-controller", "namespace": "users-26053", "name": "some-name", "reconcileID": "2f1249e9-4113-4953-a65b-3b1c74bc8394", "user": "operator"} 2024-05-14T18:22:26.476Z INFO MySQL init secret updated {"controller": "pxc-controller", "namespace": "users-26053", "name": "some-name", "reconcileID": "2f1249e9-4113-4953-a65b-3b1c74bc8394", "secret": "some-name-mysql-init", "user": "operator"} 2024-05-14T18:22:26.487Z INFO Internal secrets updated {"controller": "pxc-controller", "namespace": "users-26053", "name": "some-name", "reconcileID": "2f1249e9-4113-4953-a65b-3b1c74bc8394", "user": "operator"} 2024-05-14T18:22:26.519Z INFO Old password discarded {"controller": "pxc-controller", "namespace": "users-26053", "name": "some-name", "reconcileID": "2f1249e9-4113-4953-a65b-3b1c74bc8394", "user": "operator"} 2024-05-14T18:22:26.563Z INFO Proxy pods will be restarted {"controller": "pxc-controller", "namespace": "users-26053", "name": "some-name", "reconcileID": "2f1249e9-4113-4953-a65b-3b1c74bc8394", "last-applied-secret": "d1bfc315130c6130f865446cdbd940798df26c4179411dd459225d0db43af61a"} 2024-05-14T18:22:27.870Z ERROR sync users {"controller": "pxc-controller", "namespace": "users-26053", "name": "some-name", "reconcileID": "2f1249e9-4113-4953-a65b-3b1c74bc8394", "error": "exec syncusers: command terminated with exit code 1 / / ERROR 1045 (28000): Access denied for user 'operator'@'some-name-proxysql-0.some-name-proxysql-unready.users-26053.svc.' (using password: YES)\nERROR 1045 (28000): Access denied for user 'operator'@'some-name-proxysql-0.some-name-proxysql-unready.users-26053.svc.' (using password: YES)\nERROR 1045 (28000): Access denied for user 'operator'@'some-name-proxysql-0.some-name-proxysql-unready.users-26053.svc.' (using password: YES)\nERROR (line:569) : Could not find a primary cluster node\n", "errorVerbose": "exec syncusers: command terminated with exit code 1 / / ERROR 1045 (28000): Access denied for user 'operator'@'some-name-proxysql-0.some-name-proxysql-unready.users-26053.svc.' (using password: YES)\nERROR 1045 (28000): Access denied for user 'operator'@'some-name-proxysql-0.some-name-proxysql-unready.users-26053.svc.' (using password: YES)\nERROR 1045 (28000): Access denied for user 'operator'@'some-name-proxysql-0.some-name-proxysql-unready.users-26053.svc.' (using password: YES)\nERROR (line:569) : Could not find a primary cluster node\n\ngithub.com/percona/percona-xtradb-cluster-operator/pkg/controller/pxc.(*ReconcilePerconaXtraDBCluster).syncPXCUsersWithProxySQL\n\t/go/src/github.com/percona/percona-xtradb-cluster-operator/pkg/controller/pxc/users.go:920\ngithub.com/percona/percona-xtradb-cluster-operator/pkg/controller/pxc.(*ReconcilePerconaXtraDBCluster).resyncPXCUsersWithProxySQL.func1\n\t/go/src/github.com/percona/percona-xtradb-cluster-operator/pkg/controller/pxc/controller.go:1246\nruntime.goexit\n\t/usr/local/go/src/runtime/asm_amd64.s:1695"} 2024-05-14T18:23:10.125Z DEBUG PXC users synced with ProxySQL {"controller": "pxc-controller", "namespace": "users-26053", "name": "some-name", "reconcileID": "1cca0512-2e26-43cd-b437-fbf322579420"} 2024-05-14T18:23:13.747Z DEBUG PXC users synced with ProxySQL {"controller": "pxc-controller", "namespace": "users-26053", "name": "some-name", "reconcileID": "02298160-fa9e-489e-b093-225978b5012e"} 2024-05-14T18:23:19.538Z DEBUG PXC users synced with ProxySQL {"controller": "pxc-controller", "namespace": "users-26053", "name": "some-name", "reconcileID": "3b9d105e-1d4b-444f-b133-af50fb9620ef"} 2024-05-14T18:23:24.849Z DEBUG PXC users synced with ProxySQL {"controller": "pxc-controller", "namespace": "users-26053", "name": "some-name", "reconcileID": "2b430710-a6ce-4016-99bb-d993022b325c"} 2024-05-14T18:23:30.050Z DEBUG PXC users synced with ProxySQL {"controller": "pxc-controller", "namespace": "users-26053", "name": "some-name", "reconcileID": "77e189b8-ed40-4cd1-b58e-409b78274ff8"} 2024-05-14T18:23:35.349Z DEBUG PXC users synced with ProxySQL {"controller": "pxc-controller", "namespace": "users-26053", "name": "some-name", "reconcileID": "1f70a7ab-b5ac-4c45-b071-e1b819d5a89c"} 2024-05-14T18:23:42.766Z DEBUG PXC users synced with ProxySQL {"controller": "pxc-controller", "namespace": "users-26053", "name": "some-name", "reconcileID": "ea3a62a2-da7e-4c1b-bc1b-74bceb9e74ba"} 2024-05-14T18:23:46.266Z DEBUG PXC users synced with ProxySQL {"controller": "pxc-controller", "namespace": "users-26053", "name": "some-name", "reconcileID": "bae76a46-ade7-4866-8e61-98596beb83e9"} 2024-05-14T18:23:51.733Z DEBUG PXC users synced with ProxySQL {"controller": "pxc-controller", "namespace": "users-26053", "name": "some-name", "reconcileID": "ac2127c9-120e-4860-8115-37e10e9f1f28"} 2024-05-14T18:23:57.159Z DEBUG PXC users synced with ProxySQL {"controller": "pxc-controller", "namespace": "users-26053", "name": "some-name", "reconcileID": "a6be7d2a-9c7b-4c59-b80f-42fe1294ad78"} 2024-05-14T18:24:02.564Z DEBUG PXC users synced with ProxySQL {"controller": "pxc-controller", "namespace": "users-26053", "name": "some-name", "reconcileID": "8be2e45a-6e0e-485c-b8e3-fc3364ec7ceb"} 2024-05-14T18:24:07.973Z DEBUG PXC users synced with ProxySQL {"controller": "pxc-controller", "namespace": "users-26053", "name": "some-name", "reconcileID": "5612099b-47f8-4e4c-a2e9-9ae951d2c241"} 2024-05-14T18:24:13.855Z DEBUG PXC users synced with ProxySQL {"controller": "pxc-controller", "namespace": "users-26053", "name": "some-name", "reconcileID": "b7419595-04fe-49ce-9e0c-7c3a0b5de9f7"} 2024-05-14T18:24:19.451Z DEBUG PXC users synced with ProxySQL {"controller": "pxc-controller", "namespace": "users-26053", "name": "some-name", "reconcileID": "4a467e63-c13c-47e2-8c61-dac8811bbd07"} 2024-05-14T18:24:24.800Z DEBUG PXC users synced with ProxySQL {"controller": "pxc-controller", "namespace": "users-26053", "name": "some-name", "reconcileID": "6b960485-59ac-4055-8207-5056c120a48e"} 2024-05-14T18:24:26.607Z INFO Password changed, updating user {"controller": "pxc-controller", "namespace": "users-26053", "name": "some-name", "reconcileID": "93987f5b-aa80-4e8c-aef0-c59b5d786e03", "user": "root"} 2024-05-14T18:24:26.649Z INFO Password updated {"controller": "pxc-controller", "namespace": "users-26053", "name": "some-name", "reconcileID": "93987f5b-aa80-4e8c-aef0-c59b5d786e03", "user": "root"} 2024-05-14T18:24:26.659Z INFO MySQL init secret updated {"controller": "pxc-controller", "namespace": "users-26053", "name": "some-name", "reconcileID": "93987f5b-aa80-4e8c-aef0-c59b5d786e03", "secret": "some-name-mysql-init", "user": "root"} 2024-05-14T18:24:32.014Z DEBUG PXC users synced with ProxySQL {"controller": "pxc-controller", "namespace": "users-26053", "name": "some-name", "reconcileID": "93987f5b-aa80-4e8c-aef0-c59b5d786e03"} 2024-05-14T18:24:32.024Z INFO Internal secrets updated {"controller": "pxc-controller", "namespace": "users-26053", "name": "some-name", "reconcileID": "93987f5b-aa80-4e8c-aef0-c59b5d786e03", "user": "root"} 2024-05-14T18:24:32.076Z INFO Old password discarded {"controller": "pxc-controller", "namespace": "users-26053", "name": "some-name", "reconcileID": "93987f5b-aa80-4e8c-aef0-c59b5d786e03", "user": "root"} 2024-05-14T18:24:32.108Z INFO Password changed, updating user {"controller": "pxc-controller", "namespace": "users-26053", "name": "some-name", "reconcileID": "93987f5b-aa80-4e8c-aef0-c59b5d786e03", "user": "monitor"} 2024-05-14T18:24:32.146Z INFO Password updated {"controller": "pxc-controller", "namespace": "users-26053", "name": "some-name", "reconcileID": "93987f5b-aa80-4e8c-aef0-c59b5d786e03", "user": "monitor"} 2024-05-14T18:24:32.156Z INFO MySQL init secret updated {"controller": "pxc-controller", "namespace": "users-26053", "name": "some-name", "reconcileID": "93987f5b-aa80-4e8c-aef0-c59b5d786e03", "secret": "some-name-mysql-init", "user": "monitor"} 2024-05-14T18:24:32.205Z INFO Proxy user updated {"controller": "pxc-controller", "namespace": "users-26053", "name": "some-name", "reconcileID": "93987f5b-aa80-4e8c-aef0-c59b5d786e03", "user": "monitor"} 2024-05-14T18:24:32.216Z INFO Internal secrets updated {"controller": "pxc-controller", "namespace": "users-26053", "name": "some-name", "reconcileID": "93987f5b-aa80-4e8c-aef0-c59b5d786e03", "user": "monitor"} 2024-05-14T18:24:32.305Z INFO Password changed, updating user {"controller": "pxc-controller", "namespace": "users-26053", "name": "some-name", "reconcileID": "93987f5b-aa80-4e8c-aef0-c59b5d786e03", "user": "xtrabackup"} 2024-05-14T18:24:32.340Z INFO Password updated {"controller": "pxc-controller", "namespace": "users-26053", "name": "some-name", "reconcileID": "93987f5b-aa80-4e8c-aef0-c59b5d786e03", "user": "xtrabackup"} 2024-05-14T18:24:32.351Z INFO MySQL init secret updated {"controller": "pxc-controller", "namespace": "users-26053", "name": "some-name", "reconcileID": "93987f5b-aa80-4e8c-aef0-c59b5d786e03", "secret": "some-name-mysql-init", "user": "xtrabackup"} 2024-05-14T18:24:32.363Z INFO Internal secrets updated {"controller": "pxc-controller", "namespace": "users-26053", "name": "some-name", "reconcileID": "93987f5b-aa80-4e8c-aef0-c59b5d786e03", "user": "xtrabackup"} 2024-05-14T18:24:32.395Z INFO Old password discarded {"controller": "pxc-controller", "namespace": "users-26053", "name": "some-name", "reconcileID": "93987f5b-aa80-4e8c-aef0-c59b5d786e03", "user": "xtrabackup"} 2024-05-14T18:24:32.416Z INFO Password changed, updating user {"controller": "pxc-controller", "namespace": "users-26053", "name": "some-name", "reconcileID": "93987f5b-aa80-4e8c-aef0-c59b5d786e03", "user": "proxyadmin"} 2024-05-14T18:24:32.466Z INFO Proxy user updated {"controller": "pxc-controller", "namespace": "users-26053", "name": "some-name", "reconcileID": "93987f5b-aa80-4e8c-aef0-c59b5d786e03", "user": "proxyadmin"} 2024-05-14T18:24:32.481Z INFO Internal secrets updated {"controller": "pxc-controller", "namespace": "users-26053", "name": "some-name", "reconcileID": "93987f5b-aa80-4e8c-aef0-c59b5d786e03", "user": "proxyadmin"} 2024-05-14T18:24:32.481Z INFO Proxy pods will be restarted {"controller": "pxc-controller", "namespace": "users-26053", "name": "some-name", "reconcileID": "93987f5b-aa80-4e8c-aef0-c59b5d786e03", "last-applied-secret": "7caef568cc5dfd37cdcc4652527f38de73f40ded73f143138aba7e854d77945c"} 2024-05-14T18:24:32.481Z INFO PXC pods will be restarted {"controller": "pxc-controller", "namespace": "users-26053", "name": "some-name", "reconcileID": "93987f5b-aa80-4e8c-aef0-c59b5d786e03", "last-applied-secret": "7caef568cc5dfd37cdcc4652527f38de73f40ded73f143138aba7e854d77945c"} 2024-05-14T18:24:32.746Z ERROR sync users {"controller": "pxc-controller", "namespace": "users-26053", "name": "some-name", "reconcileID": "93987f5b-aa80-4e8c-aef0-c59b5d786e03", "error": "exec syncusers: command terminated with exit code 1 / / ERROR 1045 (28000): ProxySQL Error: Access denied for user 'proxyadmin'@'127.0.0.1' (using password: YES)\nERROR (line:335) : ProxySQL connection check failed. \n-- Could not connect to ProxySQL at localhost:6032 \n-- Please check the ProxySQL connection parameters and status.\n", "errorVerbose": "exec syncusers: command terminated with exit code 1 / / ERROR 1045 (28000): ProxySQL Error: Access denied for user 'proxyadmin'@'127.0.0.1' (using password: YES)\nERROR (line:335) : ProxySQL connection check failed. \n-- Could not connect to ProxySQL at localhost:6032 \n-- Please check the ProxySQL connection parameters and status.\n\ngithub.com/percona/percona-xtradb-cluster-operator/pkg/controller/pxc.(*ReconcilePerconaXtraDBCluster).syncPXCUsersWithProxySQL\n\t/go/src/github.com/percona/percona-xtradb-cluster-operator/pkg/controller/pxc/users.go:920\ngithub.com/percona/percona-xtradb-cluster-operator/pkg/controller/pxc.(*ReconcilePerconaXtraDBCluster).resyncPXCUsersWithProxySQL.func1\n\t/go/src/github.com/percona/percona-xtradb-cluster-operator/pkg/controller/pxc/controller.go:1246\nruntime.goexit\n\t/usr/local/go/src/runtime/asm_amd64.s:1695"} 2024-05-14T18:24:45.991Z INFO KubeAPIWarningLogger .metadata.ownerReferences contains duplicate entries; API server dedups owner references in 1.20+, and may reject such requests as early as 1.24; please fix your requests; duplicate UID(s) observed: a7f30188-532e-4e62-b013-591dc0e311d1 2024-05-14T18:26:30.892Z INFO reconcile replication error {"controller": "pxc-controller", "namespace": "users-26053", "name": "some-name", "reconcileID": "4276551f-29c5-400c-a016-864d0a89057c", "err": "failed to ensure cluster readonly status: connect to pod some-name-pxc-0: dial tcp: lookup some-name-pxc-0.some-name-pxc.users-26053 on 10.57.176.10:53: no such host"} 2024-05-14T18:27:07.905Z INFO Password changed, updating user {"controller": "pxc-controller", "namespace": "users-26053", "name": "some-name", "reconcileID": "eb14ab1a-0389-4a78-9cfc-4c88f3e4c083", "user": "root"} 2024-05-14T18:27:07.947Z INFO Password updated {"controller": "pxc-controller", "namespace": "users-26053", "name": "some-name", "reconcileID": "eb14ab1a-0389-4a78-9cfc-4c88f3e4c083", "user": "root"} 2024-05-14T18:27:07.958Z INFO MySQL init secret updated {"controller": "pxc-controller", "namespace": "users-26053", "name": "some-name", "reconcileID": "eb14ab1a-0389-4a78-9cfc-4c88f3e4c083", "secret": "some-name-mysql-init", "user": "root"} 2024-05-14T18:27:07.966Z INFO Internal secrets updated {"controller": "pxc-controller", "namespace": "users-26053", "name": "some-name", "reconcileID": "eb14ab1a-0389-4a78-9cfc-4c88f3e4c083", "user": "root"} 2024-05-14T18:27:08.018Z INFO Old password discarded {"controller": "pxc-controller", "namespace": "users-26053", "name": "some-name", "reconcileID": "eb14ab1a-0389-4a78-9cfc-4c88f3e4c083", "user": "root"} 2024-05-14T18:27:08.036Z INFO Password changed, updating user {"controller": "pxc-controller", "namespace": "users-26053", "name": "some-name", "reconcileID": "eb14ab1a-0389-4a78-9cfc-4c88f3e4c083", "user": "operator"} 2024-05-14T18:27:08.070Z INFO Password updated {"controller": "pxc-controller", "namespace": "users-26053", "name": "some-name", "reconcileID": "eb14ab1a-0389-4a78-9cfc-4c88f3e4c083", "user": "operator"} 2024-05-14T18:27:08.078Z INFO MySQL init secret updated {"controller": "pxc-controller", "namespace": "users-26053", "name": "some-name", "reconcileID": "eb14ab1a-0389-4a78-9cfc-4c88f3e4c083", "secret": "some-name-mysql-init", "user": "operator"} 2024-05-14T18:27:08.088Z INFO Internal secrets updated {"controller": "pxc-controller", "namespace": "users-26053", "name": "some-name", "reconcileID": "eb14ab1a-0389-4a78-9cfc-4c88f3e4c083", "user": "operator"} 2024-05-14T18:27:08.119Z INFO Old password discarded {"controller": "pxc-controller", "namespace": "users-26053", "name": "some-name", "reconcileID": "eb14ab1a-0389-4a78-9cfc-4c88f3e4c083", "user": "operator"} 2024-05-14T18:27:08.132Z INFO Password changed, updating user {"controller": "pxc-controller", "namespace": "users-26053", "name": "some-name", "reconcileID": "eb14ab1a-0389-4a78-9cfc-4c88f3e4c083", "user": "monitor"} 2024-05-14T18:27:08.163Z INFO Password updated {"controller": "pxc-controller", "namespace": "users-26053", "name": "some-name", "reconcileID": "eb14ab1a-0389-4a78-9cfc-4c88f3e4c083", "user": "monitor"} 2024-05-14T18:27:08.171Z INFO MySQL init secret updated {"controller": "pxc-controller", "namespace": "users-26053", "name": "some-name", "reconcileID": "eb14ab1a-0389-4a78-9cfc-4c88f3e4c083", "secret": "some-name-mysql-init", "user": "monitor"} 2024-05-14T18:27:08.180Z INFO Internal secrets updated {"controller": "pxc-controller", "namespace": "users-26053", "name": "some-name", "reconcileID": "eb14ab1a-0389-4a78-9cfc-4c88f3e4c083", "user": "monitor"} 2024-05-14T18:27:08.308Z INFO Password changed, updating user {"controller": "pxc-controller", "namespace": "users-26053", "name": "some-name", "reconcileID": "eb14ab1a-0389-4a78-9cfc-4c88f3e4c083", "user": "xtrabackup"} 2024-05-14T18:27:08.339Z INFO Password updated {"controller": "pxc-controller", "namespace": "users-26053", "name": "some-name", "reconcileID": "eb14ab1a-0389-4a78-9cfc-4c88f3e4c083", "user": "xtrabackup"} 2024-05-14T18:27:08.348Z INFO MySQL init secret updated {"controller": "pxc-controller", "namespace": "users-26053", "name": "some-name", "reconcileID": "eb14ab1a-0389-4a78-9cfc-4c88f3e4c083", "secret": "some-name-mysql-init", "user": "xtrabackup"} 2024-05-14T18:27:08.358Z INFO Internal secrets updated {"controller": "pxc-controller", "namespace": "users-26053", "name": "some-name", "reconcileID": "eb14ab1a-0389-4a78-9cfc-4c88f3e4c083", "user": "xtrabackup"} 2024-05-14T18:27:08.388Z INFO Old password discarded {"controller": "pxc-controller", "namespace": "users-26053", "name": "some-name", "reconcileID": "eb14ab1a-0389-4a78-9cfc-4c88f3e4c083", "user": "xtrabackup"} 2024-05-14T18:27:08.403Z INFO Password changed, updating user {"controller": "pxc-controller", "namespace": "users-26053", "name": "some-name", "reconcileID": "eb14ab1a-0389-4a78-9cfc-4c88f3e4c083", "user": "replication"} 2024-05-14T18:27:08.434Z INFO Password updated {"controller": "pxc-controller", "namespace": "users-26053", "name": "some-name", "reconcileID": "eb14ab1a-0389-4a78-9cfc-4c88f3e4c083", "user": "replication"} 2024-05-14T18:27:08.443Z INFO MySQL init secret updated {"controller": "pxc-controller", "namespace": "users-26053", "name": "some-name", "reconcileID": "eb14ab1a-0389-4a78-9cfc-4c88f3e4c083", "secret": "some-name-mysql-init", "user": "replication"} 2024-05-14T18:27:08.453Z INFO Internal secrets updated {"controller": "pxc-controller", "namespace": "users-26053", "name": "some-name", "reconcileID": "eb14ab1a-0389-4a78-9cfc-4c88f3e4c083", "user": "replication"} 2024-05-14T18:27:08.486Z INFO Old password discarded {"controller": "pxc-controller", "namespace": "users-26053", "name": "some-name", "reconcileID": "eb14ab1a-0389-4a78-9cfc-4c88f3e4c083", "user": "replication"} 2024-05-14T18:27:08.486Z INFO Proxy pods will be restarted {"controller": "pxc-controller", "namespace": "users-26053", "name": "some-name", "reconcileID": "eb14ab1a-0389-4a78-9cfc-4c88f3e4c083", "last-applied-secret": "6542c6f8a2375ddfcda319bf91c99ca69519730971c577cd07366e356671ee68"} 2024-05-14T18:27:08.486Z INFO PXC pods will be restarted {"controller": "pxc-controller", "namespace": "users-26053", "name": "some-name", "reconcileID": "eb14ab1a-0389-4a78-9cfc-4c88f3e4c083", "last-applied-secret": "6542c6f8a2375ddfcda319bf91c99ca69519730971c577cd07366e356671ee68"} 2024-05-14T18:28:01.338Z INFO reconcile replication error {"controller": "pxc-controller", "namespace": "users-26053", "name": "some-name", "reconcileID": "21be4f00-0362-49ab-b7c0-3283014daa11", "err": "failed to ensure cluster readonly status: connect to pod some-name-pxc-1: dial tcp: lookup some-name-pxc-1.some-name-pxc.users-26053 on 10.57.176.10:53: no such host"} 2024-05-14T18:28:01.684Z INFO reconcile replication error {"controller": "pxc-controller", "namespace": "users-26053", "name": "some-name", "reconcileID": "fba26827-b2ea-4b02-bcdc-d384fb48db89", "err": "failed to ensure cluster readonly status: connect to pod some-name-pxc-1: dial tcp: lookup some-name-pxc-1.some-name-pxc.users-26053 on 10.57.176.10:53: no such host"} 2024-05-14T18:29:09.143Z INFO reconcile replication error {"controller": "pxc-controller", "namespace": "users-26053", "name": "some-name", "reconcileID": "a3839b55-84c1-4763-be6a-65e072844ab7", "err": "failed to ensure cluster readonly status: connect to pod some-name-pxc-0: dial tcp: lookup some-name-pxc-0.some-name-pxc.users-26053 on 10.57.176.10:53: no such host"} 2024-05-14T18:29:31.313Z INFO Password updated but old one not discarded {"controller": "pxc-controller", "namespace": "users-26053", "name": "some-name", "reconcileID": "123e632b-f517-4e6f-998e-c9b5a9a21e14", "user": "monitor"} 2024-05-14T18:29:31.658Z INFO Old password discarded {"controller": "pxc-controller", "namespace": "users-26053", "name": "some-name", "reconcileID": "123e632b-f517-4e6f-998e-c9b5a9a21e14", "user": "monitor"} 2024-05-14T18:29:31.684Z INFO Proxy pods will be restarted {"controller": "pxc-controller", "namespace": "users-26053", "name": "some-name", "reconcileID": "123e632b-f517-4e6f-998e-c9b5a9a21e14", "last-applied-secret": "6542c6f8a2375ddfcda319bf91c99ca69519730971c577cd07366e356671ee68"} 2024-05-14T18:29:46.997Z INFO Password changed, updating user {"controller": "pxc-controller", "namespace": "users-26053", "name": "some-name", "reconcileID": "806f8b72-d327-4553-9524-30893aacb9ce", "user": "monitor"} 2024-05-14T18:29:47.030Z INFO Password updated {"controller": "pxc-controller", "namespace": "users-26053", "name": "some-name", "reconcileID": "806f8b72-d327-4553-9524-30893aacb9ce", "user": "monitor"} 2024-05-14T18:29:47.042Z INFO MySQL init secret updated {"controller": "pxc-controller", "namespace": "users-26053", "name": "some-name", "reconcileID": "806f8b72-d327-4553-9524-30893aacb9ce", "secret": "some-name-mysql-init", "user": "monitor"} 2024-05-14T18:29:47.050Z INFO Internal secrets updated {"controller": "pxc-controller", "namespace": "users-26053", "name": "some-name", "reconcileID": "806f8b72-d327-4553-9524-30893aacb9ce", "user": "monitor"} 2024-05-14T18:29:47.164Z INFO Proxy pods will be restarted {"controller": "pxc-controller", "namespace": "users-26053", "name": "some-name", "reconcileID": "806f8b72-d327-4553-9524-30893aacb9ce", "last-applied-secret": "81935ecf30ae523a6ee69582354f9488d47dbeb0f8c7c572af96870f2024284d"} 2024-05-14T18:30:57.224Z INFO Password updated but old one not discarded {"controller": "pxc-controller", "namespace": "users-26053", "name": "some-name", "reconcileID": "a69e6730-ffb0-40de-b21f-59a7398b899f", "user": "monitor"} 2024-05-14T18:30:57.589Z INFO Old password discarded {"controller": "pxc-controller", "namespace": "users-26053", "name": "some-name", "reconcileID": "a69e6730-ffb0-40de-b21f-59a7398b899f", "user": "monitor"} 2024-05-14T18:30:57.621Z INFO Proxy pods will be restarted {"controller": "pxc-controller", "namespace": "users-26053", "name": "some-name", "reconcileID": "a69e6730-ffb0-40de-b21f-59a7398b899f", "last-applied-secret": "81935ecf30ae523a6ee69582354f9488d47dbeb0f8c7c572af96870f2024284d"} github.com/percona/percona-xtradb-cluster-operator/pkg/controller/pxc.(*ReconcilePerconaXtraDBCluster).resyncPXCUsersWithProxySQL.func1 /go/pkg/mod/sigs.k8s.io/controller-runtime@v0.18.0/pkg/internal/controller/controller.go:222 /go/pkg/mod/sigs.k8s.io/controller-runtime@v0.18.0/pkg/internal/controller/controller.go:261 /go/pkg/mod/sigs.k8s.io/controller-runtime@v0.18.0/pkg/internal/controller/controller.go:324 /go/src/github.com/percona/percona-xtradb-cluster-operator/pkg/controller/pxc/controller.go:1248 [mysql] 2024/05/14 18:26:30 packets.go:37: read tcp 10.12.240.49:37124->10.57.178.20:3306: i/o timeout [mysql] 2024/05/14 18:28:54 packets.go:37: unexpected EOF [mysql] 2024/05/14 18:29:09 packets.go:37: read tcp 10.12.240.49:44552->10.57.178.20:3306: i/o timeout sigs.k8s.io/controller-runtime/pkg/internal/controller.(*Controller).processNextWorkItem sigs.k8s.io/controller-runtime/pkg/internal/controller.(*Controller).reconcileHandler sigs.k8s.io/controller-runtime/pkg/internal/controller.(*Controller).Start.func2.2 + grep -v NAMESPACE + kubectl get pxc --all-namespaces -o wide + xargs -L 1 sh -xc 'kubectl patch pxc -n $0 $1 --type=merge -p "{\"metadata\":{\"finalizers\":[]}}"' + kubectl patch pxc -n users-26053 some-name --type=merge -p '{"metadata":{"finalizers":[]}}' perconaxtradbcluster.pxc.percona.com/some-name patched + kubectl_bin delete pxc --all --all-namespaces ++ mktemp + local LAST_OUT=/tmp/tmp.Yhag7IG0Wd ++ mktemp + local LAST_ERR=/tmp/tmp.6CaLg35Vn0 + local exit_status=0 ++ seq 0 2 + for i in '$(seq 0 2)' + set +e + kubectl delete pxc --all --all-namespaces + exit_status=0 + set -e + '[' 0 '!=' 0 ']' + break + cat /tmp/tmp.Yhag7IG0Wd perconaxtradbcluster.pxc.percona.com "some-name" deleted + cat /tmp/tmp.6CaLg35Vn0 + rm /tmp/tmp.Yhag7IG0Wd /tmp/tmp.6CaLg35Vn0 + return 0 + kubectl_bin delete pxc-backup --all --all-namespaces ++ mktemp + local LAST_OUT=/tmp/tmp.oFT5d2r7JN ++ mktemp + local LAST_ERR=/tmp/tmp.wLUvdlb6jY + local exit_status=0 ++ seq 0 2 + for i in '$(seq 0 2)' + set +e + kubectl delete pxc-backup --all --all-namespaces + exit_status=0 + set -e + '[' 0 '!=' 0 ']' + break + cat /tmp/tmp.oFT5d2r7JN No resources found + cat /tmp/tmp.wLUvdlb6jY + rm /tmp/tmp.oFT5d2r7JN /tmp/tmp.wLUvdlb6jY + return 0 + kubectl_bin delete pxc-restore --all --all-namespaces ++ mktemp + local LAST_OUT=/tmp/tmp.vMMbi19mpQ ++ mktemp + local LAST_ERR=/tmp/tmp.t15pkbh2Px + local exit_status=0 ++ seq 0 2 + for i in '$(seq 0 2)' + set +e + kubectl delete pxc-restore --all --all-namespaces + exit_status=0 + set -e + '[' 0 '!=' 0 ']' + break + cat /tmp/tmp.vMMbi19mpQ No resources found + cat /tmp/tmp.t15pkbh2Px + rm /tmp/tmp.vMMbi19mpQ /tmp/tmp.t15pkbh2Px + return 0 + kubectl_bin delete ValidatingWebhookConfiguration percona-xtradbcluster-webhook ++ mktemp + local LAST_OUT=/tmp/tmp.D5LT96PTJr ++ mktemp + local LAST_ERR=/tmp/tmp.UDojgsWmgf + local exit_status=0 ++ seq 0 2 + for i in '$(seq 0 2)' + set +e + kubectl delete ValidatingWebhookConfiguration percona-xtradbcluster-webhook + exit_status=0 + set -e + '[' 0 '!=' 0 ']' + break + cat /tmp/tmp.D5LT96PTJr validatingwebhookconfiguration.admissionregistration.k8s.io "percona-xtradbcluster-webhook" deleted + cat /tmp/tmp.UDojgsWmgf + rm /tmp/tmp.D5LT96PTJr /tmp/tmp.UDojgsWmgf + return 0 + kubectl_bin delete -f https://github.com/jetstack/cert-manager/releases/download/v1.14.2/cert-manager.yaml + : + '[' '!' -z '' ']' + '[' -n pxc-operator ']' + kubectl_bin delete --grace-period=0 --force=true namespace users-26053 + rm -rf /tmp/tmp.Y95G4TXZdR + kubectl_bin delete --grace-period=0 --force=true namespace pxc-operator ++ mktemp ++ mktemp + desc 'test passed' + set +o xtrace ----------------------------------------------------------------------------------- test passed ----------------------------------------------------------------------------------- + local LAST_OUT=/tmp/tmp.GXthC6OgPs + local LAST_OUT=/tmp/tmp.tLcmjsg6Cg ++ mktemp ++ mktemp + local LAST_ERR=/tmp/tmp.yoRZovl9B4 + local exit_status=0 ++ seq 0 2 + local LAST_ERR=/tmp/tmp.zMyRN2ho1q + local exit_status=0 ++ seq 0 2 + for i in '$(seq 0 2)' + set +e + kubectl delete --grace-period=0 --force=true namespace pxc-operator + for i in '$(seq 0 2)' + set +e + kubectl delete --grace-period=0 --force=true namespace users-26053