Log: /mnt/jenkins/workspace/cloud-pxc-operator_PR-1730/e2e-tests/logs/users-8-0.log WARNING: version difference between client (1.30) and server (1.26) exceeds the supported minor version skew of +/-1 WARNING: version difference between client (1.30) and server (1.26) exceeds the supported minor version skew of +/-1 + create_infra users-18742 + local ns=users-18742 + '[' -n pxc-operator ']' + kubectl get pxc --all-namespaces -o wide + grep -v NAMESPACE + xargs -L 1 sh -xc 'kubectl patch pxc -n $0 $1 --type=merge -p "{\"metadata\":{\"finalizers\":[]}}"' + kubectl patch pxc -n users-14969 some-name --type=merge -p '{"metadata":{"finalizers":[]}}' perconaxtradbcluster.pxc.percona.com/some-name patched + kubectl_bin delete pxc --all --all-namespaces ++ mktemp + local LAST_OUT=/tmp/tmp.eS4P4sOfQV ++ mktemp + local LAST_ERR=/tmp/tmp.A5YwMOWzzH + local exit_status=0 ++ seq 0 2 + for i in '$(seq 0 2)' + set +e + kubectl delete pxc --all --all-namespaces + exit_status=0 + set -e + '[' 0 '!=' 0 ']' + break + cat /tmp/tmp.eS4P4sOfQV perconaxtradbcluster.pxc.percona.com "some-name" deleted + cat /tmp/tmp.A5YwMOWzzH + rm /tmp/tmp.eS4P4sOfQV /tmp/tmp.A5YwMOWzzH + return 0 + kubectl_bin delete pxc-backup --all --all-namespaces ++ mktemp + local LAST_OUT=/tmp/tmp.o9PGz0JMEF ++ mktemp + local LAST_ERR=/tmp/tmp.b65Nnj0Qqk + local exit_status=0 ++ seq 0 2 + for i in '$(seq 0 2)' + set +e + kubectl delete pxc-backup --all --all-namespaces + exit_status=0 + set -e + '[' 0 '!=' 0 ']' + break + cat /tmp/tmp.o9PGz0JMEF No resources found + cat /tmp/tmp.b65Nnj0Qqk + rm /tmp/tmp.o9PGz0JMEF /tmp/tmp.b65Nnj0Qqk + return 0 + kubectl_bin delete pxc-restore --all --all-namespaces ++ mktemp + local LAST_OUT=/tmp/tmp.mca01AqwJv ++ mktemp + local LAST_ERR=/tmp/tmp.E5H0siwa4P + local exit_status=0 ++ seq 0 2 + for i in '$(seq 0 2)' + set +e + kubectl delete pxc-restore --all --all-namespaces + exit_status=0 + set -e + '[' 0 '!=' 0 ']' + break + cat /tmp/tmp.mca01AqwJv No resources found + cat /tmp/tmp.E5H0siwa4P + rm /tmp/tmp.mca01AqwJv /tmp/tmp.E5H0siwa4P + return 0 + create_namespace pxc-operator + local namespace=pxc-operator + local skip_clean_namespace= + [[ 1 == 1 ]] + [[ -z '' ]] + destroy_chaos_mesh ++ helm list --all-namespaces --filter chaos-mesh ++ tail -n1 ++ awk '-F ' '{print $2}' ++ sed s/NAMESPACE// + local chaos_mesh_ns= + '[' -n '' ']' ++ kubectl get MutatingWebhookConfiguration ++ grep chaos-mesh ++ awk '{print $1}' + timeout 30 kubectl delete MutatingWebhookConfiguration error: resource(s) were provided, but no name was specified + : ++ kubectl get ValidatingWebhookConfiguration ++ grep chaos-mesh ++ awk '{print $1}' + timeout 30 kubectl delete ValidatingWebhookConfiguration error: resource(s) were provided, but no name was specified + : ++ kubectl get ValidatingWebhookConfiguration ++ grep validate-auth ++ awk '{print $1}' + timeout 30 kubectl delete ValidatingWebhookConfiguration error: resource(s) were provided, but no name was specified + : ++ kubectl api-resources ++ grep chaos-mesh ++ awk '{print $1}' ++ kubectl get crd ++ grep chaos-mesh.org ++ awk '{print $1}' + timeout 30 kubectl delete crd error: resource(s) were provided, but no name was specified + : ++ kubectl get clusterrolebinding ++ grep chaos-mesh ++ awk '{print $1}' + timeout 30 kubectl delete clusterrolebinding error: resource(s) were provided, but no name was specified + : ++ grep chaos-mesh ++ awk '{print $1}' ++ kubectl get clusterrole + timeout 30 kubectl delete clusterrole error: resource(s) were provided, but no name was specified + : + desc 'cleaned up all old namespaces' + set +o xtrace ----------------------------------------------------------------------------------- cleaned up all old namespaces ----------------------------------------------------------------------------------- + kubectl_bin get ns + egrep -v '^kube-|^default$|Terminating|pxc-operator|openshift|^NAME' + awk '{print$1}' + '[' -n '' ']' + desc 'cleaned up old namespaces pxc-operator' + set +o xtrace ----------------------------------------------------------------------------------- cleaned up old namespaces pxc-operator ----------------------------------------------------------------------------------- + kubectl_bin delete namespace pxc-operator + xargs kubectl delete ns ++ mktemp ++ mktemp + local LAST_OUT=/tmp/tmp.7oRviqEqh4 + local LAST_OUT=/tmp/tmp.xi5CP7kowB ++ mktemp ++ mktemp + local LAST_ERR=/tmp/tmp.vh0QpW2Xdd + local exit_status=0 + local LAST_ERR=/tmp/tmp.ThEsClTxwb + local exit_status=0 ++ seq 0 2 ++ seq 0 2 + for i in '$(seq 0 2)' + set +e + kubectl get ns + for i in '$(seq 0 2)' + set +e + kubectl delete namespace pxc-operator + exit_status=0 + set -e + '[' 0 '!=' 0 ']' + break + cat /tmp/tmp.7oRviqEqh4 + cat /tmp/tmp.vh0QpW2Xdd + rm /tmp/tmp.7oRviqEqh4 /tmp/tmp.vh0QpW2Xdd + return 0 namespace "users-14969" deleted Error from server (Forbidden): namespaces "default" is forbidden: this namespace may not be deleted + exit_status=0 + set -e + '[' 0 '!=' 0 ']' + break + cat /tmp/tmp.xi5CP7kowB namespace "pxc-operator" deleted + cat /tmp/tmp.ThEsClTxwb + rm /tmp/tmp.xi5CP7kowB /tmp/tmp.ThEsClTxwb + return 0 + wait_for_delete namespace/pxc-operator + local res=namespace/pxc-operator + echo -n 'namespace/pxc-operator - ' namespace/pxc-operator - + set +o xtrace Error from server (NotFound): namespaces "pxc-operator" not found + desc 'create namespace pxc-operator' + set +o xtrace ----------------------------------------------------------------------------------- create namespace pxc-operator ----------------------------------------------------------------------------------- + kubectl_bin create namespace pxc-operator ++ mktemp + local LAST_OUT=/tmp/tmp.vfYTwD5eh6 ++ mktemp + local LAST_ERR=/tmp/tmp.jYbfGUXExV + local exit_status=0 ++ seq 0 2 + for i in '$(seq 0 2)' + set +e + kubectl create namespace pxc-operator + exit_status=0 + set -e + '[' 0 '!=' 0 ']' + break + cat /tmp/tmp.vfYTwD5eh6 namespace/pxc-operator created + cat /tmp/tmp.jYbfGUXExV + rm /tmp/tmp.vfYTwD5eh6 /tmp/tmp.jYbfGUXExV + return 0 ++ kubectl_bin config current-context +++ mktemp ++ local LAST_OUT=/tmp/tmp.E3BS8sUiXR +++ mktemp ++ local LAST_ERR=/tmp/tmp.Z0eoiW0Q89 ++ local exit_status=0 +++ seq 0 2 ++ for i in '$(seq 0 2)' ++ set +e ++ kubectl config current-context ++ exit_status=0 ++ set -e ++ '[' 0 '!=' 0 ']' ++ break ++ cat /tmp/tmp.E3BS8sUiXR ++ cat /tmp/tmp.Z0eoiW0Q89 ++ rm /tmp/tmp.E3BS8sUiXR /tmp/tmp.Z0eoiW0Q89 ++ return 0 + kubectl_bin config set-context gke_cloud-dev-112233_us-central1-a_jen-pxc-1730-852bae96-1-cluster9 --namespace=pxc-operator ++ mktemp + local LAST_OUT=/tmp/tmp.VrkrCJue5f ++ mktemp + local LAST_ERR=/tmp/tmp.3ygtsi68BG + local exit_status=0 ++ seq 0 2 + for i in '$(seq 0 2)' + set +e + kubectl config set-context gke_cloud-dev-112233_us-central1-a_jen-pxc-1730-852bae96-1-cluster9 --namespace=pxc-operator + exit_status=0 + set -e + '[' 0 '!=' 0 ']' + break + cat /tmp/tmp.VrkrCJue5f Context "gke_cloud-dev-112233_us-central1-a_jen-pxc-1730-852bae96-1-cluster9" modified. + cat /tmp/tmp.3ygtsi68BG + rm /tmp/tmp.VrkrCJue5f /tmp/tmp.3ygtsi68BG + return 0 + deploy_operator + desc 'start PXC operator' + set +o xtrace ----------------------------------------------------------------------------------- start PXC operator ----------------------------------------------------------------------------------- + kubectl_bin apply --server-side --force-conflicts -f /mnt/jenkins/workspace/cloud-pxc-operator_PR-1730/deploy/crd.yaml ++ mktemp + local LAST_OUT=/tmp/tmp.80wNEv8s7H ++ mktemp + local LAST_ERR=/tmp/tmp.WaC2kI8sLH + local exit_status=0 ++ seq 0 2 + for i in '$(seq 0 2)' + set +e + kubectl apply --server-side --force-conflicts -f /mnt/jenkins/workspace/cloud-pxc-operator_PR-1730/deploy/crd.yaml + exit_status=0 + set -e + '[' 0 '!=' 0 ']' + break + cat /tmp/tmp.80wNEv8s7H customresourcedefinition.apiextensions.k8s.io/perconaxtradbclusterbackups.pxc.percona.com serverside-applied customresourcedefinition.apiextensions.k8s.io/perconaxtradbclusterrestores.pxc.percona.com serverside-applied customresourcedefinition.apiextensions.k8s.io/perconaxtradbclusters.pxc.percona.com serverside-applied + cat /tmp/tmp.WaC2kI8sLH + rm /tmp/tmp.80wNEv8s7H /tmp/tmp.WaC2kI8sLH + return 0 + '[' -n pxc-operator ']' + apply_rbac cw-rbac + local operator_namespace=pxc-operator + local rbac=cw-rbac + cat /mnt/jenkins/workspace/cloud-pxc-operator_PR-1730/deploy/cw-rbac.yaml + sed -e 's^namespace: .*^namespace: pxc-operator^' + kubectl_bin apply -f - ++ mktemp + local LAST_OUT=/tmp/tmp.hBycKz3jkn ++ mktemp + local LAST_ERR=/tmp/tmp.labs1MlTOO + local exit_status=0 ++ seq 0 2 + for i in '$(seq 0 2)' + set +e + kubectl apply -f - + exit_status=0 + set -e + '[' 0 '!=' 0 ']' + break + cat /tmp/tmp.hBycKz3jkn clusterrole.rbac.authorization.k8s.io/percona-xtradb-cluster-operator unchanged serviceaccount/percona-xtradb-cluster-operator created clusterrolebinding.rbac.authorization.k8s.io/service-account-percona-xtradb-cluster-operator unchanged + cat /tmp/tmp.labs1MlTOO + rm /tmp/tmp.hBycKz3jkn /tmp/tmp.labs1MlTOO + return 0 + cat /mnt/jenkins/workspace/cloud-pxc-operator_PR-1730/deploy/cw-operator.yaml + sed -e 's^image: .*^image: perconalab/percona-xtradb-cluster-operator:PR-1730-852bae96^' + sed -e 's^failureThreshold: .*^failureThreshold: 10^' + kubectl_bin apply -f - ++ mktemp + yq eval '(select(.kind == "Deployment").spec.template.spec.containers[] | select(.name == "percona-xtradb-cluster-operator").env[] | select(.name == "DISABLE_TELEMETRY").value) = "true"' - + yq eval '(select(.kind == "Deployment").spec.template.spec.containers[] | select(.name == "percona-xtradb-cluster-operator").env[] | select(.name == "LOG_LEVEL").value) = "DEBUG"' - + local LAST_OUT=/tmp/tmp.l26YchGIR7 ++ mktemp + local LAST_ERR=/tmp/tmp.ImIXMrw3lD + local exit_status=0 ++ seq 0 2 + for i in '$(seq 0 2)' + set +e + kubectl apply -f - + exit_status=0 + set -e + '[' 0 '!=' 0 ']' + break + cat /tmp/tmp.l26YchGIR7 deployment.apps/percona-xtradb-cluster-operator created service/percona-xtradb-cluster-operator created + cat /tmp/tmp.ImIXMrw3lD + rm /tmp/tmp.l26YchGIR7 /tmp/tmp.ImIXMrw3lD + return 0 + sleep 10 + kubectl_bin wait --for=condition=Ready pods -l app.kubernetes.io/component=operator,app.kubernetes.io/instance=percona-xtradb-cluster-operator,app.kubernetes.io/name=percona-xtradb-cluster-operator --timeout=30s ++ mktemp + local LAST_OUT=/tmp/tmp.nPwRPDgjA7 ++ mktemp + local LAST_ERR=/tmp/tmp.kXlollk9jr + local exit_status=0 ++ seq 0 2 + for i in '$(seq 0 2)' + set +e + kubectl wait --for=condition=Ready pods -l app.kubernetes.io/component=operator,app.kubernetes.io/instance=percona-xtradb-cluster-operator,app.kubernetes.io/name=percona-xtradb-cluster-operator --timeout=30s + exit_status=0 + set -e + '[' 0 '!=' 0 ']' + break + cat /tmp/tmp.nPwRPDgjA7 pod/percona-xtradb-cluster-operator-784d88b77-sqbms condition met + cat /tmp/tmp.kXlollk9jr + rm /tmp/tmp.nPwRPDgjA7 /tmp/tmp.kXlollk9jr + return 0 ++ get_operator_pod ++ local label_prefix=app.kubernetes.io/ +++ kubectl get pods --selector=app.kubernetes.io/name=percona-xtradb-cluster-operator -n pxc-operator +++ grep -c percona-xtradb-cluster-operator ++ local check_label=1 ++ [[ 1 -eq 0 ]] ++ kubectl_bin get pods --selector=app.kubernetes.io/name=percona-xtradb-cluster-operator -o 'jsonpath={.items[].metadata.name}' -n pxc-operator +++ mktemp ++ local LAST_OUT=/tmp/tmp.rWiTLQ6wEF +++ mktemp ++ local LAST_ERR=/tmp/tmp.wy3yWZFLeT ++ local exit_status=0 +++ seq 0 2 ++ for i in '$(seq 0 2)' ++ set +e ++ kubectl get pods --selector=app.kubernetes.io/name=percona-xtradb-cluster-operator -o 'jsonpath={.items[].metadata.name}' -n pxc-operator ++ exit_status=0 ++ set -e ++ '[' 0 '!=' 0 ']' ++ break ++ cat /tmp/tmp.rWiTLQ6wEF ++ cat /tmp/tmp.wy3yWZFLeT ++ rm /tmp/tmp.rWiTLQ6wEF /tmp/tmp.wy3yWZFLeT ++ return 0 + wait_pod percona-xtradb-cluster-operator-784d88b77-sqbms 480 pxc-operator + local pod=percona-xtradb-cluster-operator-784d88b77-sqbms + local max_retry=480 + local ns=pxc-operator ++ echo percona-xtradb-cluster-operator-784d88b77-sqbms ++ /usr/bin/sed -E 's/.*-(pxc|proxysql)-[0-9]/\1/' ++ egrep '^(pxc|proxysql)$' + local container= + set +o xtrace pod/percona-xtradb-cluster-operator-784d88b77-sqbms condition met percona-xtradb-cluster-operator-784d88b77-sqbms.Ok + sleep 3 + create_namespace users-18742 + local namespace=users-18742 + local skip_clean_namespace= + [[ 1 == 1 ]] + [[ -z '' ]] + destroy_chaos_mesh ++ helm list --all-namespaces --filter chaos-mesh ++ tail -n1 ++ awk '-F ' '{print $2}' ++ sed s/NAMESPACE// + local chaos_mesh_ns= + '[' -n '' ']' ++ kubectl get MutatingWebhookConfiguration ++ grep chaos-mesh ++ awk '{print $1}' + timeout 30 kubectl delete MutatingWebhookConfiguration error: resource(s) were provided, but no name was specified + : ++ kubectl get ValidatingWebhookConfiguration ++ grep chaos-mesh ++ awk '{print $1}' + timeout 30 kubectl delete ValidatingWebhookConfiguration error: resource(s) were provided, but no name was specified + : ++ kubectl get ValidatingWebhookConfiguration ++ grep validate-auth ++ awk '{print $1}' + timeout 30 kubectl delete ValidatingWebhookConfiguration error: resource(s) were provided, but no name was specified + : ++ kubectl api-resources ++ grep chaos-mesh ++ awk '{print $1}' ++ kubectl get crd ++ grep chaos-mesh.org ++ awk '{print $1}' + timeout 30 kubectl delete crd error: resource(s) were provided, but no name was specified + : ++ kubectl get clusterrolebinding ++ grep chaos-mesh ++ awk '{print $1}' + timeout 30 kubectl delete clusterrolebinding error: resource(s) were provided, but no name was specified + : ++ kubectl get clusterrole ++ awk '{print $1}' ++ grep chaos-mesh + timeout 30 kubectl delete clusterrole error: resource(s) were provided, but no name was specified + : + desc 'cleaned up all old namespaces' + set +o xtrace ----------------------------------------------------------------------------------- cleaned up all old namespaces ----------------------------------------------------------------------------------- + egrep -v '^kube-|^default$|Terminating|pxc-operator|openshift|^NAME' + '[' -n '' ']' + desc 'cleaned up old namespaces users-18742' + set +o xtrace ----------------------------------------------------------------------------------- cleaned up old namespaces users-18742 ----------------------------------------------------------------------------------- + kubectl_bin delete namespace users-18742 + kubectl_bin get ns ++ mktemp + local LAST_OUT=/tmp/tmp.pCBXCAELQS ++ mktemp + local LAST_ERR=/tmp/tmp.Bo5x1nb3qa + local exit_status=0 ++ seq 0 2 + for i in '$(seq 0 2)' + set +e + kubectl get ns + xargs kubectl delete ns + awk '{print$1}' ++ mktemp + local LAST_OUT=/tmp/tmp.70DuSX0TIF ++ mktemp + local LAST_ERR=/tmp/tmp.BWx5nZgy8R + local exit_status=0 ++ seq 0 2 + for i in '$(seq 0 2)' + set +e + kubectl delete namespace users-18742 + exit_status=0 + set -e + '[' 0 '!=' 0 ']' + break + cat /tmp/tmp.pCBXCAELQS + cat /tmp/tmp.Bo5x1nb3qa + rm /tmp/tmp.pCBXCAELQS /tmp/tmp.Bo5x1nb3qa + return 0 + exit_status=1 + set -e + '[' 1 '!=' 0 ']' + '[' 1 == 1 ']' + sleep 0 + for i in '$(seq 0 2)' + set +e + kubectl delete namespace users-18742 Error from server (Forbidden): namespaces "default" is forbidden: this namespace may not be deleted + exit_status=1 + set -e + '[' 1 '!=' 0 ']' + '[' 1 == 1 ']' + sleep 0 + for i in '$(seq 0 2)' + set +e + kubectl delete namespace users-18742 + exit_status=1 + set -e + '[' 1 '!=' 0 ']' + '[' 1 == 1 ']' + sleep 0 + cat /tmp/tmp.70DuSX0TIF + cat /tmp/tmp.BWx5nZgy8R Error from server (NotFound): namespaces "users-18742" not found + rm /tmp/tmp.70DuSX0TIF /tmp/tmp.BWx5nZgy8R + return 1 + : + wait_for_delete namespace/users-18742 + local res=namespace/users-18742 + echo -n 'namespace/users-18742 - ' namespace/users-18742 - + set +o xtrace Error from server (NotFound): namespaces "users-18742" not found + desc 'create namespace users-18742' + set +o xtrace ----------------------------------------------------------------------------------- create namespace users-18742 ----------------------------------------------------------------------------------- + kubectl_bin create namespace users-18742 ++ mktemp + local LAST_OUT=/tmp/tmp.iIRMtVIf6e ++ mktemp + local LAST_ERR=/tmp/tmp.abwHnvjAg9 + local exit_status=0 ++ seq 0 2 + for i in '$(seq 0 2)' + set +e + kubectl create namespace users-18742 + exit_status=0 + set -e + '[' 0 '!=' 0 ']' + break + cat /tmp/tmp.iIRMtVIf6e namespace/users-18742 created + cat /tmp/tmp.abwHnvjAg9 + rm /tmp/tmp.iIRMtVIf6e /tmp/tmp.abwHnvjAg9 + return 0 ++ kubectl_bin config current-context +++ mktemp ++ local LAST_OUT=/tmp/tmp.eig8447jdk +++ mktemp ++ local LAST_ERR=/tmp/tmp.6nZ2SXACrV ++ local exit_status=0 +++ seq 0 2 ++ for i in '$(seq 0 2)' ++ set +e ++ kubectl config current-context ++ exit_status=0 ++ set -e ++ '[' 0 '!=' 0 ']' ++ break ++ cat /tmp/tmp.eig8447jdk ++ cat /tmp/tmp.6nZ2SXACrV ++ rm /tmp/tmp.eig8447jdk /tmp/tmp.6nZ2SXACrV ++ return 0 + kubectl_bin config set-context gke_cloud-dev-112233_us-central1-a_jen-pxc-1730-852bae96-1-cluster9 --namespace=users-18742 ++ mktemp + local LAST_OUT=/tmp/tmp.dzFzL8Tnij ++ mktemp + local LAST_ERR=/tmp/tmp.x7Z1PYi3GX + local exit_status=0 ++ seq 0 2 + for i in '$(seq 0 2)' + set +e + kubectl config set-context gke_cloud-dev-112233_us-central1-a_jen-pxc-1730-852bae96-1-cluster9 --namespace=users-18742 + exit_status=0 + set -e + '[' 0 '!=' 0 ']' + break + cat /tmp/tmp.dzFzL8Tnij Context "gke_cloud-dev-112233_us-central1-a_jen-pxc-1730-852bae96-1-cluster9" modified. + cat /tmp/tmp.x7Z1PYi3GX + rm /tmp/tmp.dzFzL8Tnij /tmp/tmp.x7Z1PYi3GX + return 0 + apply_secrets + desc 'create secrets for cloud storages' + set +o xtrace ----------------------------------------------------------------------------------- create secrets for cloud storages ----------------------------------------------------------------------------------- + '[' -z '' ']' + kubectl_bin apply -f /mnt/jenkins/workspace/cloud-pxc-operator_PR-1730/e2e-tests/conf/minio-secret.yml -f /mnt/jenkins/workspace/cloud-pxc-operator_PR-1730/e2e-tests/conf/cloud-secret.yml ++ mktemp + local LAST_OUT=/tmp/tmp.AIBQ2ZJCE1 ++ mktemp + local LAST_ERR=/tmp/tmp.HSvOO3uTBW + local exit_status=0 ++ seq 0 2 + for i in '$(seq 0 2)' + set +e + kubectl apply -f /mnt/jenkins/workspace/cloud-pxc-operator_PR-1730/e2e-tests/conf/minio-secret.yml -f /mnt/jenkins/workspace/cloud-pxc-operator_PR-1730/e2e-tests/conf/cloud-secret.yml + exit_status=0 + set -e + '[' 0 '!=' 0 ']' + break + cat /tmp/tmp.AIBQ2ZJCE1 secret/minio-secret created secret/aws-s3-secret created secret/gcp-cs-secret created secret/azure-secret created + cat /tmp/tmp.HSvOO3uTBW + rm /tmp/tmp.AIBQ2ZJCE1 /tmp/tmp.HSvOO3uTBW + return 0 + desc 'create PXC cluster' + set +o xtrace ----------------------------------------------------------------------------------- create PXC cluster ----------------------------------------------------------------------------------- + newpass=test-password ++ echo -n test-password ++ base64 + newpassencrypted=dGVzdC1wYXNzd29yZA== + cluster=some-name + spinup_pxc some-name /mnt/jenkins/workspace/cloud-pxc-operator_PR-1730/e2e-tests/conf/some-name.yml + local cluster=some-name + local config=/mnt/jenkins/workspace/cloud-pxc-operator_PR-1730/e2e-tests/conf/some-name.yml + local size=3 + local sleep=10 + local secretsFile=/mnt/jenkins/workspace/cloud-pxc-operator_PR-1730/e2e-tests/conf/secrets.yml + local pxcClientFile=/mnt/jenkins/workspace/cloud-pxc-operator_PR-1730/e2e-tests/conf/client.yml + local port=3306 + desc 'create first PXC cluster' + set +o xtrace ----------------------------------------------------------------------------------- create first PXC cluster ----------------------------------------------------------------------------------- + kubectl_bin apply -f /mnt/jenkins/workspace/cloud-pxc-operator_PR-1730/e2e-tests/conf/secrets.yml ++ mktemp + local LAST_OUT=/tmp/tmp.p5xVOsF0NZ ++ mktemp + local LAST_ERR=/tmp/tmp.w6PxHjRxeT + local exit_status=0 ++ seq 0 2 + for i in '$(seq 0 2)' + set +e + kubectl apply -f /mnt/jenkins/workspace/cloud-pxc-operator_PR-1730/e2e-tests/conf/secrets.yml + exit_status=0 + set -e + '[' 0 '!=' 0 ']' + break + cat /tmp/tmp.p5xVOsF0NZ secret/my-cluster-secrets created secret/some-name-ssl created secret/some-name-ssl-internal created + cat /tmp/tmp.w6PxHjRxeT + rm /tmp/tmp.p5xVOsF0NZ /tmp/tmp.w6PxHjRxeT + return 0 + apply_config /mnt/jenkins/workspace/cloud-pxc-operator_PR-1730/e2e-tests/conf/client.yml + '[' -z '' ']' + cat_config /mnt/jenkins/workspace/cloud-pxc-operator_PR-1730/e2e-tests/conf/client.yml + kubectl_bin apply -f - + cat /mnt/jenkins/workspace/cloud-pxc-operator_PR-1730/e2e-tests/conf/client.yml + /usr/bin/sed -e 's#apiVersion: pxc.percona.com/v.*$#apiVersion: pxc.percona.com/v1#' + /usr/bin/sed -e 's#image:.*-pxc\([0-9]*.[0-9]*\)\{0,1\}$#image: perconalab/percona-xtradb-cluster-operator:main-pxc8.0#' + /usr/bin/sed -e 's#image:.*\/percona-xtradb-cluster:.*$#image: perconalab/percona-xtradb-cluster-operator:main-pxc8.0#' + /usr/bin/sed -e 's#image:.*-init$#image: perconalab/percona-xtradb-cluster-operator:PR-1730-852bae96#' + /usr/bin/sed -e 's#image:.*-pmm$#image: perconalab/pmm-client:dev-latest#' + /usr/bin/sed -e 's#image:.*-backup$#image: perconalab/percona-xtradb-cluster-operator:main-pxc8.0-backup#' + /usr/bin/sed -e 's#image:.*-proxysql$#image: perconalab/percona-xtradb-cluster-operator:main-proxysql#' + /usr/bin/sed -e 's#image:.*-haproxy$#image: perconalab/percona-xtradb-cluster-operator:main-haproxy#' + /usr/bin/sed -e 's#image:.*-logcollector$#image: perconalab/percona-xtradb-cluster-operator:main-logcollector#' ++ mktemp + /usr/bin/sed -e s~minio-service.#namespace~minio-service.users-18742~ + /usr/bin/sed -e 's#apply:.*#apply: Never#' + local LAST_OUT=/tmp/tmp.gJi5PKAs9p ++ mktemp + local LAST_ERR=/tmp/tmp.5WbyLTQ0Js + local exit_status=0 ++ seq 0 2 + for i in '$(seq 0 2)' + set +e + kubectl apply -f - + exit_status=0 + set -e + '[' 0 '!=' 0 ']' + break + cat /tmp/tmp.gJi5PKAs9p deployment.apps/pxc-client created + cat /tmp/tmp.5WbyLTQ0Js + rm /tmp/tmp.gJi5PKAs9p /tmp/tmp.5WbyLTQ0Js + return 0 + [[ perconalab/percona-xtradb-cluster-operator:main-pxc8.0 =~ 5\.7 ]] + apply_config /mnt/jenkins/workspace/cloud-pxc-operator_PR-1730/e2e-tests/conf/some-name.yml + '[' -z '' ']' + kubectl_bin apply -f - + cat_config /mnt/jenkins/workspace/cloud-pxc-operator_PR-1730/e2e-tests/conf/some-name.yml + cat /mnt/jenkins/workspace/cloud-pxc-operator_PR-1730/e2e-tests/conf/some-name.yml + /usr/bin/sed -e 's#apiVersion: pxc.percona.com/v.*$#apiVersion: pxc.percona.com/v1#' + /usr/bin/sed -e 's#image:.*-pxc\([0-9]*.[0-9]*\)\{0,1\}$#image: perconalab/percona-xtradb-cluster-operator:main-pxc8.0#' ++ mktemp + /usr/bin/sed -e 's#image:.*\/percona-xtradb-cluster:.*$#image: perconalab/percona-xtradb-cluster-operator:main-pxc8.0#' + /usr/bin/sed -e 's#image:.*-init$#image: perconalab/percona-xtradb-cluster-operator:PR-1730-852bae96#' + local LAST_OUT=/tmp/tmp.lSHSTLFIoE + /usr/bin/sed -e 's#image:.*-pmm$#image: perconalab/pmm-client:dev-latest#' + /usr/bin/sed -e 's#image:.*-backup$#image: perconalab/percona-xtradb-cluster-operator:main-pxc8.0-backup#' ++ mktemp + /usr/bin/sed -e 's#image:.*-proxysql$#image: perconalab/percona-xtradb-cluster-operator:main-proxysql#' + /usr/bin/sed -e 's#image:.*-haproxy$#image: perconalab/percona-xtradb-cluster-operator:main-haproxy#' + /usr/bin/sed -e s~minio-service.#namespace~minio-service.users-18742~ + /usr/bin/sed -e 's#apply:.*#apply: Never#' + local LAST_ERR=/tmp/tmp.cBujHEP5Xv + local exit_status=0 + /usr/bin/sed -e 's#image:.*-logcollector$#image: perconalab/percona-xtradb-cluster-operator:main-logcollector#' ++ seq 0 2 + for i in '$(seq 0 2)' + set +e + kubectl apply -f - + exit_status=0 + set -e + '[' 0 '!=' 0 ']' + break + cat /tmp/tmp.lSHSTLFIoE perconaxtradbcluster.pxc.percona.com/some-name created + cat /tmp/tmp.cBujHEP5Xv + rm /tmp/tmp.lSHSTLFIoE /tmp/tmp.cBujHEP5Xv + return 0 + desc 'check if all 3 Pods started' + set +o xtrace ----------------------------------------------------------------------------------- check if all 3 Pods started ----------------------------------------------------------------------------------- ++ get_proxy some-name ++ local target_cluster=some-name +++ kubectl_bin get pxc some-name -o 'jsonpath={.spec.haproxy.enabled}' ++++ mktemp +++ local LAST_OUT=/tmp/tmp.nw40b7n9JK ++++ mktemp +++ local LAST_ERR=/tmp/tmp.aKO7L7gKKC +++ local exit_status=0 ++++ seq 0 2 +++ for i in '$(seq 0 2)' +++ set +e +++ kubectl get pxc some-name -o 'jsonpath={.spec.haproxy.enabled}' +++ exit_status=0 +++ set -e +++ '[' 0 '!=' 0 ']' +++ break +++ cat /tmp/tmp.nw40b7n9JK +++ cat /tmp/tmp.aKO7L7gKKC +++ rm /tmp/tmp.nw40b7n9JK /tmp/tmp.aKO7L7gKKC +++ return 0 ++ [[ '' == \t\r\u\e ]] +++ kubectl_bin get pxc some-name -o 'jsonpath={.spec.proxysql.enabled}' ++++ mktemp +++ local LAST_OUT=/tmp/tmp.pnbLnRHd3Q ++++ mktemp +++ local LAST_ERR=/tmp/tmp.U84EeYW3oO +++ local exit_status=0 ++++ seq 0 2 +++ for i in '$(seq 0 2)' +++ set +e +++ kubectl get pxc some-name -o 'jsonpath={.spec.proxysql.enabled}' +++ exit_status=0 +++ set -e +++ '[' 0 '!=' 0 ']' +++ break +++ cat /tmp/tmp.pnbLnRHd3Q +++ cat /tmp/tmp.U84EeYW3oO +++ rm /tmp/tmp.pnbLnRHd3Q /tmp/tmp.U84EeYW3oO +++ return 0 ++ [[ true == \t\r\u\e ]] ++ echo some-name-proxysql ++ return + local proxy=some-name-proxysql + kubectl_bin wait --for=condition=Ready pod -l app.kubernetes.io/instance=monitoring,app.kubernetes.io/managed-by=percona-xtradb-cluster-operator --timeout=300s -n users-18742 ++ mktemp + local LAST_OUT=/tmp/tmp.RNnH9U890A ++ mktemp + local LAST_ERR=/tmp/tmp.Ks3RTXmXXs + local exit_status=0 ++ seq 0 2 + for i in '$(seq 0 2)' + set +e + kubectl wait --for=condition=Ready pod -l app.kubernetes.io/instance=monitoring,app.kubernetes.io/managed-by=percona-xtradb-cluster-operator --timeout=300s -n users-18742 + exit_status=1 + set -e + '[' 1 '!=' 0 ']' + '[' 1 == 1 ']' + sleep 0 + for i in '$(seq 0 2)' + set +e + kubectl wait --for=condition=Ready pod -l app.kubernetes.io/instance=monitoring,app.kubernetes.io/managed-by=percona-xtradb-cluster-operator --timeout=300s -n users-18742 + exit_status=1 + set -e + '[' 1 '!=' 0 ']' + '[' 1 == 1 ']' + sleep 0 + for i in '$(seq 0 2)' + set +e + kubectl wait --for=condition=Ready pod -l app.kubernetes.io/instance=monitoring,app.kubernetes.io/managed-by=percona-xtradb-cluster-operator --timeout=300s -n users-18742 + exit_status=1 + set -e + '[' 1 '!=' 0 ']' + '[' 1 == 1 ']' + sleep 0 + cat /tmp/tmp.RNnH9U890A + cat /tmp/tmp.Ks3RTXmXXs error: no matching resources found + rm /tmp/tmp.RNnH9U890A /tmp/tmp.Ks3RTXmXXs + return 1 + true + wait_for_running some-name-proxysql 1 + local name=some-name-proxysql + let last_pod=0 + : + local max_retry=480 + desc 'wait for running cluster' + set +o xtrace ----------------------------------------------------------------------------------- wait for running cluster ----------------------------------------------------------------------------------- ++ seq 0 0 + for i in '$(seq 0 $last_pod)' + wait_pod some-name-proxysql-0 480 + local pod=some-name-proxysql-0 + local max_retry=480 + local ns= ++ echo some-name-proxysql-0 ++ egrep '^(pxc|proxysql)$' ++ /usr/bin/sed -E 's/.*-(pxc|proxysql)-[0-9]/\1/' + local container=proxysql + set +o xtrace pod/some-name-proxysql-0 condition met some-name-proxysql-0.Ok + wait_for_running some-name-pxc 3 + local name=some-name-pxc + let last_pod=2 + local max_retry=480 + desc 'wait for running cluster' + set +o xtrace ----------------------------------------------------------------------------------- wait for running cluster ----------------------------------------------------------------------------------- ++ seq 0 2 + for i in '$(seq 0 $last_pod)' + wait_pod some-name-pxc-0 480 + local pod=some-name-pxc-0 + local max_retry=480 + local ns= ++ echo some-name-pxc-0 ++ /usr/bin/sed -E 's/.*-(pxc|proxysql)-[0-9]/\1/' ++ egrep '^(pxc|proxysql)$' + local container=pxc + set +o xtrace pod/some-name-pxc-0 condition met some-name-pxc-0.Ok + for i in '$(seq 0 $last_pod)' + wait_pod some-name-pxc-1 480 + local pod=some-name-pxc-1 + local max_retry=480 + local ns= ++ echo some-name-pxc-1 ++ /usr/bin/sed -E 's/.*-(pxc|proxysql)-[0-9]/\1/' ++ egrep '^(pxc|proxysql)$' + local container=pxc + set +o xtrace pod/some-name-pxc-1 condition met some-name-pxc-1.Ok + for i in '$(seq 0 $last_pod)' + wait_pod some-name-pxc-2 480 + local pod=some-name-pxc-2 + local max_retry=480 + local ns= ++ echo some-name-pxc-2 ++ /usr/bin/sed -E 's/.*-(pxc|proxysql)-[0-9]/\1/' ++ egrep '^(pxc|proxysql)$' + local container=pxc + set +o xtrace pod/some-name-pxc-2 condition met some-name-pxc-2.Ok + sleep 10 + desc 'write data' + set +o xtrace ----------------------------------------------------------------------------------- write data ----------------------------------------------------------------------------------- + [[ perconalab/percona-xtradb-cluster-operator:main-pxc8.0 =~ 5\.7 ]] + run_mysql 'CREATE DATABASE IF NOT EXISTS myApp; use myApp; CREATE TABLE IF NOT EXISTS myApp (id int PRIMARY KEY) ;' '-h some-name-proxysql -uroot -proot_password -P3306' + local 'command=CREATE DATABASE IF NOT EXISTS myApp; use myApp; CREATE TABLE IF NOT EXISTS myApp (id int PRIMARY KEY) ;' + local 'uri=-h some-name-proxysql -uroot -proot_password -P3306' ++ get_client_pod ++ kubectl_bin get pods --selector=name=pxc-client -o 'jsonpath={.items[].metadata.name}' +++ mktemp ++ local LAST_OUT=/tmp/tmp.K2bgFZDNcC +++ mktemp ++ local LAST_ERR=/tmp/tmp.YBzAlTkq2P ++ local exit_status=0 +++ seq 0 2 ++ for i in '$(seq 0 2)' ++ set +e ++ kubectl get pods --selector=name=pxc-client -o 'jsonpath={.items[].metadata.name}' ++ exit_status=0 ++ set -e ++ '[' 0 '!=' 0 ']' ++ break ++ cat /tmp/tmp.K2bgFZDNcC ++ cat /tmp/tmp.YBzAlTkq2P ++ rm /tmp/tmp.K2bgFZDNcC /tmp/tmp.YBzAlTkq2P ++ return 0 + client_pod=pxc-client-6644d8898f-z827r + wait_pod pxc-client-6644d8898f-z827r + local pod=pxc-client-6644d8898f-z827r + local max_retry=480 + local ns= ++ echo pxc-client-6644d8898f-z827r ++ /usr/bin/sed -E 's/.*-(pxc|proxysql)-[0-9]/\1/' ++ egrep '^(pxc|proxysql)$' + local container= + set +o xtrace pod/pxc-client-6644d8898f-z827r condition met pxc-client-6644d8898f-z827r.Ok + set +o xtrace + run_mysql 'INSERT myApp.myApp (id) VALUES (100500)' '-h some-name-proxysql -uroot -proot_password -P3306' + local 'command=INSERT myApp.myApp (id) VALUES (100500)' + local 'uri=-h some-name-proxysql -uroot -proot_password -P3306' ++ get_client_pod ++ kubectl_bin get pods --selector=name=pxc-client -o 'jsonpath={.items[].metadata.name}' +++ mktemp ++ local LAST_OUT=/tmp/tmp.pv1of2IwVD +++ mktemp ++ local LAST_ERR=/tmp/tmp.NWS1rOvXEv ++ local exit_status=0 +++ seq 0 2 ++ for i in '$(seq 0 2)' ++ set +e ++ kubectl get pods --selector=name=pxc-client -o 'jsonpath={.items[].metadata.name}' ++ exit_status=0 ++ set -e ++ '[' 0 '!=' 0 ']' ++ break ++ cat /tmp/tmp.pv1of2IwVD ++ cat /tmp/tmp.NWS1rOvXEv ++ rm /tmp/tmp.pv1of2IwVD /tmp/tmp.NWS1rOvXEv ++ return 0 + client_pod=pxc-client-6644d8898f-z827r + wait_pod pxc-client-6644d8898f-z827r + local pod=pxc-client-6644d8898f-z827r + local max_retry=480 + local ns= ++ echo pxc-client-6644d8898f-z827r ++ egrep '^(pxc|proxysql)$' ++ /usr/bin/sed -E 's/.*-(pxc|proxysql)-[0-9]/\1/' + local container= + set +o xtrace pod/pxc-client-6644d8898f-z827r condition met pxc-client-6644d8898f-z827r.Ok + set +o xtrace + sleep 30 ++ seq 0 2 + for i in '$(seq 0 $((size - 1)))' + compare_mysql_cmd select-1 'SELECT * from myApp.myApp;' '-h some-name-pxc-0.some-name-pxc -uroot -proot_password -P3306' + local command_id=select-1 + local 'command=SELECT * from myApp.myApp;' + local 'uri=-h some-name-pxc-0.some-name-pxc -uroot -proot_password -P3306' + local postfix= + local expected_result=/mnt/jenkins/workspace/cloud-pxc-operator_PR-1730/e2e-tests/users/compare/select-1.sql + [[ perconalab/percona-xtradb-cluster-operator:main-pxc8.0 =~ 8\.0 ]] + '[' -f /mnt/jenkins/workspace/cloud-pxc-operator_PR-1730/e2e-tests/users/compare/select-1-80.sql ']' + run_mysql 'SELECT * from myApp.myApp;' '-h some-name-pxc-0.some-name-pxc -uroot -proot_password -P3306' + local 'command=SELECT * from myApp.myApp;' + local 'uri=-h some-name-pxc-0.some-name-pxc -uroot -proot_password -P3306' ++ get_client_pod ++ kubectl_bin get pods --selector=name=pxc-client -o 'jsonpath={.items[].metadata.name}' +++ mktemp ++ local LAST_OUT=/tmp/tmp.Qd8Pu285RP +++ mktemp ++ local LAST_ERR=/tmp/tmp.ptCgqWm6f1 ++ local exit_status=0 +++ seq 0 2 ++ for i in '$(seq 0 2)' ++ set +e ++ kubectl get pods --selector=name=pxc-client -o 'jsonpath={.items[].metadata.name}' ++ exit_status=0 ++ set -e ++ '[' 0 '!=' 0 ']' ++ break ++ cat /tmp/tmp.Qd8Pu285RP ++ cat /tmp/tmp.ptCgqWm6f1 ++ rm /tmp/tmp.Qd8Pu285RP /tmp/tmp.ptCgqWm6f1 ++ return 0 + client_pod=pxc-client-6644d8898f-z827r + wait_pod pxc-client-6644d8898f-z827r + local pod=pxc-client-6644d8898f-z827r + local max_retry=480 + local ns= ++ echo pxc-client-6644d8898f-z827r ++ /usr/bin/sed -E 's/.*-(pxc|proxysql)-[0-9]/\1/' ++ egrep '^(pxc|proxysql)$' + local container= + set +o xtrace pod/pxc-client-6644d8898f-z827r condition met pxc-client-6644d8898f-z827r.Ok + set +o xtrace + '[' '!' -s /tmp/tmp.ShPTGmQQOH/select-1.sql ']' + diff -u /mnt/jenkins/workspace/cloud-pxc-operator_PR-1730/e2e-tests/users/compare/select-1.sql /tmp/tmp.ShPTGmQQOH/select-1.sql + for i in '$(seq 0 $((size - 1)))' + compare_mysql_cmd select-1 'SELECT * from myApp.myApp;' '-h some-name-pxc-1.some-name-pxc -uroot -proot_password -P3306' + local command_id=select-1 + local 'command=SELECT * from myApp.myApp;' + local 'uri=-h some-name-pxc-1.some-name-pxc -uroot -proot_password -P3306' + local postfix= + local expected_result=/mnt/jenkins/workspace/cloud-pxc-operator_PR-1730/e2e-tests/users/compare/select-1.sql + [[ perconalab/percona-xtradb-cluster-operator:main-pxc8.0 =~ 8\.0 ]] + '[' -f /mnt/jenkins/workspace/cloud-pxc-operator_PR-1730/e2e-tests/users/compare/select-1-80.sql ']' + run_mysql 'SELECT * from myApp.myApp;' '-h some-name-pxc-1.some-name-pxc -uroot -proot_password -P3306' + local 'command=SELECT * from myApp.myApp;' + local 'uri=-h some-name-pxc-1.some-name-pxc -uroot -proot_password -P3306' ++ get_client_pod ++ kubectl_bin get pods --selector=name=pxc-client -o 'jsonpath={.items[].metadata.name}' +++ mktemp ++ local LAST_OUT=/tmp/tmp.Od956w9MxU +++ mktemp ++ local LAST_ERR=/tmp/tmp.lLrJZnDDtK ++ local exit_status=0 +++ seq 0 2 ++ for i in '$(seq 0 2)' ++ set +e ++ kubectl get pods --selector=name=pxc-client -o 'jsonpath={.items[].metadata.name}' ++ exit_status=0 ++ set -e ++ '[' 0 '!=' 0 ']' ++ break ++ cat /tmp/tmp.Od956w9MxU ++ cat /tmp/tmp.lLrJZnDDtK ++ rm /tmp/tmp.Od956w9MxU /tmp/tmp.lLrJZnDDtK ++ return 0 + client_pod=pxc-client-6644d8898f-z827r + wait_pod pxc-client-6644d8898f-z827r + local pod=pxc-client-6644d8898f-z827r + local max_retry=480 + local ns= ++ echo pxc-client-6644d8898f-z827r ++ /usr/bin/sed -E 's/.*-(pxc|proxysql)-[0-9]/\1/' ++ egrep '^(pxc|proxysql)$' + local container= + set +o xtrace pod/pxc-client-6644d8898f-z827r condition met pxc-client-6644d8898f-z827r.Ok + set +o xtrace + '[' '!' -s /tmp/tmp.ShPTGmQQOH/select-1.sql ']' + diff -u /mnt/jenkins/workspace/cloud-pxc-operator_PR-1730/e2e-tests/users/compare/select-1.sql /tmp/tmp.ShPTGmQQOH/select-1.sql + for i in '$(seq 0 $((size - 1)))' + compare_mysql_cmd select-1 'SELECT * from myApp.myApp;' '-h some-name-pxc-2.some-name-pxc -uroot -proot_password -P3306' + local command_id=select-1 + local 'command=SELECT * from myApp.myApp;' + local 'uri=-h some-name-pxc-2.some-name-pxc -uroot -proot_password -P3306' + local postfix= + local expected_result=/mnt/jenkins/workspace/cloud-pxc-operator_PR-1730/e2e-tests/users/compare/select-1.sql + [[ perconalab/percona-xtradb-cluster-operator:main-pxc8.0 =~ 8\.0 ]] + '[' -f /mnt/jenkins/workspace/cloud-pxc-operator_PR-1730/e2e-tests/users/compare/select-1-80.sql ']' + run_mysql 'SELECT * from myApp.myApp;' '-h some-name-pxc-2.some-name-pxc -uroot -proot_password -P3306' + local 'command=SELECT * from myApp.myApp;' + local 'uri=-h some-name-pxc-2.some-name-pxc -uroot -proot_password -P3306' ++ get_client_pod ++ kubectl_bin get pods --selector=name=pxc-client -o 'jsonpath={.items[].metadata.name}' +++ mktemp ++ local LAST_OUT=/tmp/tmp.JzyPrRwVOc +++ mktemp ++ local LAST_ERR=/tmp/tmp.sGdYRm6XeQ ++ local exit_status=0 +++ seq 0 2 ++ for i in '$(seq 0 2)' ++ set +e ++ kubectl get pods --selector=name=pxc-client -o 'jsonpath={.items[].metadata.name}' ++ exit_status=0 ++ set -e ++ '[' 0 '!=' 0 ']' ++ break ++ cat /tmp/tmp.JzyPrRwVOc ++ cat /tmp/tmp.sGdYRm6XeQ ++ rm /tmp/tmp.JzyPrRwVOc /tmp/tmp.sGdYRm6XeQ ++ return 0 + client_pod=pxc-client-6644d8898f-z827r + wait_pod pxc-client-6644d8898f-z827r + local pod=pxc-client-6644d8898f-z827r + local max_retry=480 + local ns= ++ echo pxc-client-6644d8898f-z827r ++ /usr/bin/sed -E 's/.*-(pxc|proxysql)-[0-9]/\1/' ++ egrep '^(pxc|proxysql)$' + local container= + set +o xtrace pod/pxc-client-6644d8898f-z827r condition met pxc-client-6644d8898f-z827r.Ok + set +o xtrace + '[' '!' -s /tmp/tmp.ShPTGmQQOH/select-1.sql ']' + diff -u /mnt/jenkins/workspace/cloud-pxc-operator_PR-1730/e2e-tests/users/compare/select-1.sql /tmp/tmp.ShPTGmQQOH/select-1.sql ++ is_keyring_plugin_in_use some-name ++ local cluster=some-name ++ kubectl_bin exec -it some-name-pxc-0 -c pxc -- bash -c 'cat /etc/mysql/node.cnf' ++ egrep -o 'early-plugin-load=keyring_\w+.so' +++ mktemp ++ local LAST_OUT=/tmp/tmp.bFU5tqnOUW +++ mktemp ++ local LAST_ERR=/tmp/tmp.EpcHsfDN2a ++ local exit_status=0 +++ seq 0 2 ++ for i in '$(seq 0 2)' ++ set +e ++ kubectl exec -it some-name-pxc-0 -c pxc -- bash -c 'cat /etc/mysql/node.cnf' ++ exit_status=0 ++ set -e ++ '[' 0 '!=' 0 ']' ++ break ++ cat /tmp/tmp.bFU5tqnOUW ++ cat /tmp/tmp.EpcHsfDN2a Unable to use a TTY - input is not a terminal or the right kind of file ++ rm /tmp/tmp.bFU5tqnOUW /tmp/tmp.EpcHsfDN2a ++ return 0 + '[' '' ']' + desc 'test root' + set +o xtrace ----------------------------------------------------------------------------------- test root ----------------------------------------------------------------------------------- + patch_secret my-cluster-secrets root dGVzdC1wYXNzd29yZA== + local secret=my-cluster-secrets + local key=root + local value=dGVzdC1wYXNzd29yZA== + kubectl_bin patch secret my-cluster-secrets '-p={"data":{"root": "dGVzdC1wYXNzd29yZA=="}}' ++ mktemp + local LAST_OUT=/tmp/tmp.YrO3YcPZ5J ++ mktemp + local LAST_ERR=/tmp/tmp.ke8gKjhrHA + local exit_status=0 ++ seq 0 2 + for i in '$(seq 0 2)' + set +e + kubectl patch secret my-cluster-secrets '-p={"data":{"root": "dGVzdC1wYXNzd29yZA=="}}' + exit_status=0 + set -e + '[' 0 '!=' 0 ']' + break + cat /tmp/tmp.YrO3YcPZ5J secret/my-cluster-secrets patched + cat /tmp/tmp.ke8gKjhrHA + rm /tmp/tmp.YrO3YcPZ5J /tmp/tmp.ke8gKjhrHA + return 0 + sleep 15 + compare_mysql_cmd select-4 'SHOW TABLES;' '-h some-name-proxysql -uroot -p'\''test-password'\''' + local command_id=select-4 + local 'command=SHOW TABLES;' + local 'uri=-h some-name-proxysql -uroot -p'\''test-password'\''' + local postfix= + local expected_result=/mnt/jenkins/workspace/cloud-pxc-operator_PR-1730/e2e-tests/users/compare/select-4.sql + [[ perconalab/percona-xtradb-cluster-operator:main-pxc8.0 =~ 8\.0 ]] + '[' -f /mnt/jenkins/workspace/cloud-pxc-operator_PR-1730/e2e-tests/users/compare/select-4-80.sql ']' + expected_result=/mnt/jenkins/workspace/cloud-pxc-operator_PR-1730/e2e-tests/users/compare/select-4-80.sql + run_mysql 'SHOW TABLES;' '-h some-name-proxysql -uroot -p'\''test-password'\''' + local 'command=SHOW TABLES;' + local 'uri=-h some-name-proxysql -uroot -p'\''test-password'\''' ++ get_client_pod ++ kubectl_bin get pods --selector=name=pxc-client -o 'jsonpath={.items[].metadata.name}' +++ mktemp ++ local LAST_OUT=/tmp/tmp.1p1QEP14Ls +++ mktemp ++ local LAST_ERR=/tmp/tmp.aGj6dEU6Cu ++ local exit_status=0 +++ seq 0 2 ++ for i in '$(seq 0 2)' ++ set +e ++ kubectl get pods --selector=name=pxc-client -o 'jsonpath={.items[].metadata.name}' ++ exit_status=0 ++ set -e ++ '[' 0 '!=' 0 ']' ++ break ++ cat /tmp/tmp.1p1QEP14Ls ++ cat /tmp/tmp.aGj6dEU6Cu ++ rm /tmp/tmp.1p1QEP14Ls /tmp/tmp.aGj6dEU6Cu ++ return 0 + client_pod=pxc-client-6644d8898f-z827r + wait_pod pxc-client-6644d8898f-z827r + local pod=pxc-client-6644d8898f-z827r + local max_retry=480 + local ns= ++ echo pxc-client-6644d8898f-z827r ++ /usr/bin/sed -E 's/.*-(pxc|proxysql)-[0-9]/\1/' ++ egrep '^(pxc|proxysql)$' + local container= + set +o xtrace pod/pxc-client-6644d8898f-z827r condition met pxc-client-6644d8898f-z827r.Ok + set +o xtrace + '[' '!' -s /tmp/tmp.ShPTGmQQOH/select-4.sql ']' + diff -u /mnt/jenkins/workspace/cloud-pxc-operator_PR-1730/e2e-tests/users/compare/select-4-80.sql /tmp/tmp.ShPTGmQQOH/select-4.sql + desc 'test proxyadmin' + set +o xtrace ----------------------------------------------------------------------------------- test proxyadmin ----------------------------------------------------------------------------------- + kubectl_bin patch pxc some-name --type=merge '-p={"spec":{"proxysql":{"size":3}}}' ++ mktemp + local LAST_OUT=/tmp/tmp.ABeV4ytId2 ++ mktemp + local LAST_ERR=/tmp/tmp.J2LV8irEmi + local exit_status=0 ++ seq 0 2 + for i in '$(seq 0 2)' + set +e + kubectl patch pxc some-name --type=merge '-p={"spec":{"proxysql":{"size":3}}}' + exit_status=0 + set -e + '[' 0 '!=' 0 ']' + break + cat /tmp/tmp.ABeV4ytId2 perconaxtradbcluster.pxc.percona.com/some-name patched + cat /tmp/tmp.J2LV8irEmi + rm /tmp/tmp.ABeV4ytId2 /tmp/tmp.J2LV8irEmi + return 0 + sleep 15 + wait_cluster_consistency some-name 3 3 + local cluster_name=some-name + local cluster_size=3 + local proxy_size=3 + '[' -z 3 ']' + desc 'wait cluster consistency' + set +o xtrace ----------------------------------------------------------------------------------- wait cluster consistency ----------------------------------------------------------------------------------- + local i=0 + local max=36 + sleep 7 ++ kubectl_bin get pxc some-name -o 'jsonpath={.status.state}' +++ mktemp ++ local LAST_OUT=/tmp/tmp.bJ66VymKgF +++ mktemp ++ local LAST_ERR=/tmp/tmp.pxsEKUAlP8 ++ local exit_status=0 +++ seq 0 2 ++ for i in '$(seq 0 2)' ++ set +e ++ kubectl get pxc some-name -o 'jsonpath={.status.state}' ++ exit_status=0 ++ set -e ++ '[' 0 '!=' 0 ']' ++ break ++ cat /tmp/tmp.bJ66VymKgF ++ cat /tmp/tmp.pxsEKUAlP8 ++ rm /tmp/tmp.bJ66VymKgF /tmp/tmp.pxsEKUAlP8 ++ return 0 + [[ ready == \r\e\a\d\y ]] ++ kubectl_bin get pxc some-name -o 'jsonpath={.status.pxc.ready}' +++ mktemp ++ local LAST_OUT=/tmp/tmp.RY3XzrL5h3 +++ mktemp ++ local LAST_ERR=/tmp/tmp.jRjYhWPNSq ++ local exit_status=0 +++ seq 0 2 ++ for i in '$(seq 0 2)' ++ set +e ++ kubectl get pxc some-name -o 'jsonpath={.status.pxc.ready}' ++ exit_status=0 ++ set -e ++ '[' 0 '!=' 0 ']' ++ break ++ cat /tmp/tmp.RY3XzrL5h3 ++ cat /tmp/tmp.jRjYhWPNSq ++ rm /tmp/tmp.RY3XzrL5h3 /tmp/tmp.jRjYhWPNSq ++ return 0 + [[ 3 == \3 ]] +++ get_proxy_engine some-name +++ local cluster_name=some-name ++++ get_proxy some-name ++++ local target_cluster=some-name +++++ kubectl_bin get pxc some-name -o 'jsonpath={.spec.haproxy.enabled}' ++++++ mktemp +++++ local LAST_OUT=/tmp/tmp.JvvvuKaA8v ++++++ mktemp +++++ local LAST_ERR=/tmp/tmp.vuo1gNug5N +++++ local exit_status=0 ++++++ seq 0 2 +++++ for i in '$(seq 0 2)' +++++ set +e +++++ kubectl get pxc some-name -o 'jsonpath={.spec.haproxy.enabled}' +++++ exit_status=0 +++++ set -e +++++ '[' 0 '!=' 0 ']' +++++ break +++++ cat /tmp/tmp.JvvvuKaA8v +++++ cat /tmp/tmp.vuo1gNug5N +++++ rm /tmp/tmp.JvvvuKaA8v /tmp/tmp.vuo1gNug5N +++++ return 0 ++++ [[ '' == \t\r\u\e ]] +++++ kubectl_bin get pxc some-name -o 'jsonpath={.spec.proxysql.enabled}' ++++++ mktemp +++++ local LAST_OUT=/tmp/tmp.rYggxo6mze ++++++ mktemp +++++ local LAST_ERR=/tmp/tmp.4bkDYJraDq +++++ local exit_status=0 ++++++ seq 0 2 +++++ for i in '$(seq 0 2)' +++++ set +e +++++ kubectl get pxc some-name -o 'jsonpath={.spec.proxysql.enabled}' +++++ exit_status=0 +++++ set -e +++++ '[' 0 '!=' 0 ']' +++++ break +++++ cat /tmp/tmp.rYggxo6mze +++++ cat /tmp/tmp.4bkDYJraDq +++++ rm /tmp/tmp.rYggxo6mze /tmp/tmp.4bkDYJraDq +++++ return 0 ++++ [[ true == \t\r\u\e ]] ++++ echo some-name-proxysql ++++ return +++ local cluster_proxy=some-name-proxysql +++ echo proxysql ++ kubectl_bin get pxc some-name -o 'jsonpath={.status.proxysql.ready}' +++ mktemp ++ local LAST_OUT=/tmp/tmp.8dtYY5Q2td +++ mktemp ++ local LAST_ERR=/tmp/tmp.PmAIRKKTNA ++ local exit_status=0 +++ seq 0 2 ++ for i in '$(seq 0 2)' ++ set +e ++ kubectl get pxc some-name -o 'jsonpath={.status.proxysql.ready}' ++ exit_status=0 ++ set -e ++ '[' 0 '!=' 0 ']' ++ break ++ cat /tmp/tmp.8dtYY5Q2td ++ cat /tmp/tmp.PmAIRKKTNA ++ rm /tmp/tmp.8dtYY5Q2td /tmp/tmp.PmAIRKKTNA ++ return 0 + [[ 3 == \3 ]] + patch_secret my-cluster-secrets proxyadmin dGVzdC1wYXNzd29yZA== + local secret=my-cluster-secrets + local key=proxyadmin + local value=dGVzdC1wYXNzd29yZA== + kubectl_bin patch secret my-cluster-secrets '-p={"data":{"proxyadmin": "dGVzdC1wYXNzd29yZA=="}}' ++ mktemp + local LAST_OUT=/tmp/tmp.gqZ3AueIIq ++ mktemp + local LAST_ERR=/tmp/tmp.ank7IkAZtN + local exit_status=0 ++ seq 0 2 + for i in '$(seq 0 2)' + set +e + kubectl patch secret my-cluster-secrets '-p={"data":{"proxyadmin": "dGVzdC1wYXNzd29yZA=="}}' + exit_status=0 + set -e + '[' 0 '!=' 0 ']' + break + cat /tmp/tmp.gqZ3AueIIq secret/my-cluster-secrets patched + cat /tmp/tmp.ank7IkAZtN + rm /tmp/tmp.gqZ3AueIIq /tmp/tmp.ank7IkAZtN + return 0 + sleep 15 + wait_cluster_consistency some-name 3 3 + local cluster_name=some-name + local cluster_size=3 + local proxy_size=3 + '[' -z 3 ']' + desc 'wait cluster consistency' + set +o xtrace ----------------------------------------------------------------------------------- wait cluster consistency ----------------------------------------------------------------------------------- + local i=0 + local max=36 + sleep 7 ++ kubectl_bin get pxc some-name -o 'jsonpath={.status.state}' +++ mktemp ++ local LAST_OUT=/tmp/tmp.W9vxm0C4da +++ mktemp ++ local LAST_ERR=/tmp/tmp.iFoyvqepXi ++ local exit_status=0 +++ seq 0 2 ++ for i in '$(seq 0 2)' ++ set +e ++ kubectl get pxc some-name -o 'jsonpath={.status.state}' ++ exit_status=0 ++ set -e ++ '[' 0 '!=' 0 ']' ++ break ++ cat /tmp/tmp.W9vxm0C4da ++ cat /tmp/tmp.iFoyvqepXi ++ rm /tmp/tmp.W9vxm0C4da /tmp/tmp.iFoyvqepXi ++ return 0 + [[ initializing == \r\e\a\d\y ]] + echo 'waiting for cluster readyness' waiting for cluster readyness + sleep 20 + [[ 0 -ge 36 ]] + let i+=1 ++ kubectl_bin get pxc some-name -o 'jsonpath={.status.state}' +++ mktemp ++ local LAST_OUT=/tmp/tmp.GqUXAuBKZb +++ mktemp ++ local LAST_ERR=/tmp/tmp.NIXojWz0VW ++ local exit_status=0 +++ seq 0 2 ++ for i in '$(seq 0 2)' ++ set +e ++ kubectl get pxc some-name -o 'jsonpath={.status.state}' ++ exit_status=0 ++ set -e ++ '[' 0 '!=' 0 ']' ++ break ++ cat /tmp/tmp.GqUXAuBKZb ++ cat /tmp/tmp.NIXojWz0VW ++ rm /tmp/tmp.GqUXAuBKZb /tmp/tmp.NIXojWz0VW ++ return 0 + [[ initializing == \r\e\a\d\y ]] + echo 'waiting for cluster readyness' waiting for cluster readyness + sleep 20 + [[ 1 -ge 36 ]] + let i+=1 ++ kubectl_bin get pxc some-name -o 'jsonpath={.status.state}' +++ mktemp ++ local LAST_OUT=/tmp/tmp.iwIvk5HHyy +++ mktemp ++ local LAST_ERR=/tmp/tmp.SM0KSe4jQy ++ local exit_status=0 +++ seq 0 2 ++ for i in '$(seq 0 2)' ++ set +e ++ kubectl get pxc some-name -o 'jsonpath={.status.state}' ++ exit_status=0 ++ set -e ++ '[' 0 '!=' 0 ']' ++ break ++ cat /tmp/tmp.iwIvk5HHyy ++ cat /tmp/tmp.SM0KSe4jQy ++ rm /tmp/tmp.iwIvk5HHyy /tmp/tmp.SM0KSe4jQy ++ return 0 + [[ ready == \r\e\a\d\y ]] ++ kubectl_bin get pxc some-name -o 'jsonpath={.status.pxc.ready}' +++ mktemp ++ local LAST_OUT=/tmp/tmp.mGvhwsPspr +++ mktemp ++ local LAST_ERR=/tmp/tmp.unzTX80dfA ++ local exit_status=0 +++ seq 0 2 ++ for i in '$(seq 0 2)' ++ set +e ++ kubectl get pxc some-name -o 'jsonpath={.status.pxc.ready}' ++ exit_status=0 ++ set -e ++ '[' 0 '!=' 0 ']' ++ break ++ cat /tmp/tmp.mGvhwsPspr ++ cat /tmp/tmp.unzTX80dfA ++ rm /tmp/tmp.mGvhwsPspr /tmp/tmp.unzTX80dfA ++ return 0 + [[ 3 == \3 ]] +++ get_proxy_engine some-name +++ local cluster_name=some-name ++++ get_proxy some-name ++++ local target_cluster=some-name +++++ kubectl_bin get pxc some-name -o 'jsonpath={.spec.haproxy.enabled}' ++++++ mktemp +++++ local LAST_OUT=/tmp/tmp.qBcMXjqLIw ++++++ mktemp +++++ local LAST_ERR=/tmp/tmp.C4eys8RhDU +++++ local exit_status=0 ++++++ seq 0 2 +++++ for i in '$(seq 0 2)' +++++ set +e +++++ kubectl get pxc some-name -o 'jsonpath={.spec.haproxy.enabled}' +++++ exit_status=0 +++++ set -e +++++ '[' 0 '!=' 0 ']' +++++ break +++++ cat /tmp/tmp.qBcMXjqLIw +++++ cat /tmp/tmp.C4eys8RhDU +++++ rm /tmp/tmp.qBcMXjqLIw /tmp/tmp.C4eys8RhDU +++++ return 0 ++++ [[ '' == \t\r\u\e ]] +++++ kubectl_bin get pxc some-name -o 'jsonpath={.spec.proxysql.enabled}' ++++++ mktemp +++++ local LAST_OUT=/tmp/tmp.YZV6Ss2FVv ++++++ mktemp +++++ local LAST_ERR=/tmp/tmp.OmKtLhX2w8 +++++ local exit_status=0 ++++++ seq 0 2 +++++ for i in '$(seq 0 2)' +++++ set +e +++++ kubectl get pxc some-name -o 'jsonpath={.spec.proxysql.enabled}' +++++ exit_status=0 +++++ set -e +++++ '[' 0 '!=' 0 ']' +++++ break +++++ cat /tmp/tmp.YZV6Ss2FVv +++++ cat /tmp/tmp.OmKtLhX2w8 +++++ rm /tmp/tmp.YZV6Ss2FVv /tmp/tmp.OmKtLhX2w8 +++++ return 0 ++++ [[ true == \t\r\u\e ]] ++++ echo some-name-proxysql ++++ return +++ local cluster_proxy=some-name-proxysql +++ echo proxysql ++ kubectl_bin get pxc some-name -o 'jsonpath={.status.proxysql.ready}' +++ mktemp ++ local LAST_OUT=/tmp/tmp.qSRfkhEmkf +++ mktemp ++ local LAST_ERR=/tmp/tmp.wapqnEJQGE ++ local exit_status=0 +++ seq 0 2 ++ for i in '$(seq 0 2)' ++ set +e ++ kubectl get pxc some-name -o 'jsonpath={.status.proxysql.ready}' ++ exit_status=0 ++ set -e ++ '[' 0 '!=' 0 ']' ++ break ++ cat /tmp/tmp.qSRfkhEmkf ++ cat /tmp/tmp.wapqnEJQGE ++ rm /tmp/tmp.qSRfkhEmkf /tmp/tmp.wapqnEJQGE ++ return 0 + [[ 3 == \3 ]] + compare_mysql_cmd_local select-2 'SHOW TABLES;' '-h127.0.0.1 -P6032 -uproxyadmin -p'\''test-password'\''' some-name-proxysql-0 '' proxysql + local command_id=select-2 + local 'command=SHOW TABLES;' + local 'uri=-h127.0.0.1 -P6032 -uproxyadmin -p'\''test-password'\''' + local pod=some-name-proxysql-0 + local postfix= + local container_name=proxysql + local expected_result=/mnt/jenkins/workspace/cloud-pxc-operator_PR-1730/e2e-tests/users/compare/select-2.sql + [[ perconalab/percona-xtradb-cluster-operator:main-pxc8.0 =~ 8\.0 ]] + '[' -f /mnt/jenkins/workspace/cloud-pxc-operator_PR-1730/e2e-tests/users/compare/select-2-80.sql ']' + run_mysql_local 'SHOW TABLES;' '-h127.0.0.1 -P6032 -uproxyadmin -p'\''test-password'\''' some-name-proxysql-0 proxysql + local 'command=SHOW TABLES;' + local 'uri=-h127.0.0.1 -P6032 -uproxyadmin -p'\''test-password'\''' + local pod=some-name-proxysql-0 + local container_name=proxysql + set +o xtrace + '[' '!' -s /tmp/tmp.ShPTGmQQOH/select-2.sql ']' + diff -u /mnt/jenkins/workspace/cloud-pxc-operator_PR-1730/e2e-tests/users/compare/select-2.sql /tmp/tmp.ShPTGmQQOH/select-2.sql + compare_mysql_cmd_local select-2 'SHOW TABLES;' '-h127.0.0.1 -P6032 -uproxyadmin -p'\''test-password'\''' some-name-proxysql-1 '' proxysql + local command_id=select-2 + local 'command=SHOW TABLES;' + local 'uri=-h127.0.0.1 -P6032 -uproxyadmin -p'\''test-password'\''' + local pod=some-name-proxysql-1 + local postfix= + local container_name=proxysql + local expected_result=/mnt/jenkins/workspace/cloud-pxc-operator_PR-1730/e2e-tests/users/compare/select-2.sql + [[ perconalab/percona-xtradb-cluster-operator:main-pxc8.0 =~ 8\.0 ]] + '[' -f /mnt/jenkins/workspace/cloud-pxc-operator_PR-1730/e2e-tests/users/compare/select-2-80.sql ']' + run_mysql_local 'SHOW TABLES;' '-h127.0.0.1 -P6032 -uproxyadmin -p'\''test-password'\''' some-name-proxysql-1 proxysql + local 'command=SHOW TABLES;' + local 'uri=-h127.0.0.1 -P6032 -uproxyadmin -p'\''test-password'\''' + local pod=some-name-proxysql-1 + local container_name=proxysql + set +o xtrace + '[' '!' -s /tmp/tmp.ShPTGmQQOH/select-2.sql ']' + diff -u /mnt/jenkins/workspace/cloud-pxc-operator_PR-1730/e2e-tests/users/compare/select-2.sql /tmp/tmp.ShPTGmQQOH/select-2.sql + compare_mysql_cmd_local select-2 'SHOW TABLES;' '-h127.0.0.1 -P6032 -uproxyadmin -p'\''test-password'\''' some-name-proxysql-2 '' proxysql + local command_id=select-2 + local 'command=SHOW TABLES;' + local 'uri=-h127.0.0.1 -P6032 -uproxyadmin -p'\''test-password'\''' + local pod=some-name-proxysql-2 + local postfix= + local container_name=proxysql + local expected_result=/mnt/jenkins/workspace/cloud-pxc-operator_PR-1730/e2e-tests/users/compare/select-2.sql + [[ perconalab/percona-xtradb-cluster-operator:main-pxc8.0 =~ 8\.0 ]] + '[' -f /mnt/jenkins/workspace/cloud-pxc-operator_PR-1730/e2e-tests/users/compare/select-2-80.sql ']' + run_mysql_local 'SHOW TABLES;' '-h127.0.0.1 -P6032 -uproxyadmin -p'\''test-password'\''' some-name-proxysql-2 proxysql + local 'command=SHOW TABLES;' + local 'uri=-h127.0.0.1 -P6032 -uproxyadmin -p'\''test-password'\''' + local pod=some-name-proxysql-2 + local container_name=proxysql + set +o xtrace + '[' '!' -s /tmp/tmp.ShPTGmQQOH/select-2.sql ']' + diff -u /mnt/jenkins/workspace/cloud-pxc-operator_PR-1730/e2e-tests/users/compare/select-2.sql /tmp/tmp.ShPTGmQQOH/select-2.sql + desc 'test xtrabackup' + set +o xtrace ----------------------------------------------------------------------------------- test xtrabackup ----------------------------------------------------------------------------------- + kubectl_bin patch pxc some-name --type=merge '-p={"spec":{"proxysql":{"size":2}}}' ++ mktemp + local LAST_OUT=/tmp/tmp.ZckO0kqr75 ++ mktemp + local LAST_ERR=/tmp/tmp.RE97GtUTQb + local exit_status=0 ++ seq 0 2 + for i in '$(seq 0 2)' + set +e + kubectl patch pxc some-name --type=merge '-p={"spec":{"proxysql":{"size":2}}}' + exit_status=0 + set -e + '[' 0 '!=' 0 ']' + break + cat /tmp/tmp.ZckO0kqr75 perconaxtradbcluster.pxc.percona.com/some-name patched + cat /tmp/tmp.RE97GtUTQb + rm /tmp/tmp.ZckO0kqr75 /tmp/tmp.RE97GtUTQb + return 0 + patch_secret my-cluster-secrets xtrabackup dGVzdC1wYXNzd29yZA== + local secret=my-cluster-secrets + local key=xtrabackup + local value=dGVzdC1wYXNzd29yZA== + kubectl_bin patch secret my-cluster-secrets '-p={"data":{"xtrabackup": "dGVzdC1wYXNzd29yZA=="}}' ++ mktemp + local LAST_OUT=/tmp/tmp.Zmdlo0zVQt ++ mktemp + local LAST_ERR=/tmp/tmp.QKfOWsCrno + local exit_status=0 ++ seq 0 2 + for i in '$(seq 0 2)' + set +e + kubectl patch secret my-cluster-secrets '-p={"data":{"xtrabackup": "dGVzdC1wYXNzd29yZA=="}}' + exit_status=0 + set -e + '[' 0 '!=' 0 ']' + break + cat /tmp/tmp.Zmdlo0zVQt secret/my-cluster-secrets patched + cat /tmp/tmp.QKfOWsCrno + rm /tmp/tmp.Zmdlo0zVQt /tmp/tmp.QKfOWsCrno + return 0 + sleep 15 + wait_cluster_consistency some-name 3 2 + local cluster_name=some-name + local cluster_size=3 + local proxy_size=2 + '[' -z 2 ']' + desc 'wait cluster consistency' + set +o xtrace ----------------------------------------------------------------------------------- wait cluster consistency ----------------------------------------------------------------------------------- + local i=0 + local max=36 + sleep 7 ++ kubectl_bin get pxc some-name -o 'jsonpath={.status.state}' +++ mktemp ++ local LAST_OUT=/tmp/tmp.Q7uZ0nkpB7 +++ mktemp ++ local LAST_ERR=/tmp/tmp.1q631jogg1 ++ local exit_status=0 +++ seq 0 2 ++ for i in '$(seq 0 2)' ++ set +e ++ kubectl get pxc some-name -o 'jsonpath={.status.state}' ++ exit_status=0 ++ set -e ++ '[' 0 '!=' 0 ']' ++ break ++ cat /tmp/tmp.Q7uZ0nkpB7 ++ cat /tmp/tmp.1q631jogg1 ++ rm /tmp/tmp.Q7uZ0nkpB7 /tmp/tmp.1q631jogg1 ++ return 0 + [[ initializing == \r\e\a\d\y ]] + echo 'waiting for cluster readyness' waiting for cluster readyness + sleep 20 + [[ 0 -ge 36 ]] + let i+=1 ++ kubectl_bin get pxc some-name -o 'jsonpath={.status.state}' +++ mktemp ++ local LAST_OUT=/tmp/tmp.dYaMKwWN94 +++ mktemp ++ local LAST_ERR=/tmp/tmp.JjUZGhKlvV ++ local exit_status=0 +++ seq 0 2 ++ for i in '$(seq 0 2)' ++ set +e ++ kubectl get pxc some-name -o 'jsonpath={.status.state}' ++ exit_status=0 ++ set -e ++ '[' 0 '!=' 0 ']' ++ break ++ cat /tmp/tmp.dYaMKwWN94 ++ cat /tmp/tmp.JjUZGhKlvV ++ rm /tmp/tmp.dYaMKwWN94 /tmp/tmp.JjUZGhKlvV ++ return 0 + [[ initializing == \r\e\a\d\y ]] + echo 'waiting for cluster readyness' waiting for cluster readyness + sleep 20 + [[ 1 -ge 36 ]] + let i+=1 ++ kubectl_bin get pxc some-name -o 'jsonpath={.status.state}' +++ mktemp ++ local LAST_OUT=/tmp/tmp.DvuY93cm2z +++ mktemp ++ local LAST_ERR=/tmp/tmp.HDW4wLiaEl ++ local exit_status=0 +++ seq 0 2 ++ for i in '$(seq 0 2)' ++ set +e ++ kubectl get pxc some-name -o 'jsonpath={.status.state}' ++ exit_status=0 ++ set -e ++ '[' 0 '!=' 0 ']' ++ break ++ cat /tmp/tmp.DvuY93cm2z ++ cat /tmp/tmp.HDW4wLiaEl ++ rm /tmp/tmp.DvuY93cm2z /tmp/tmp.HDW4wLiaEl ++ return 0 + [[ initializing == \r\e\a\d\y ]] + echo 'waiting for cluster readyness' waiting for cluster readyness + sleep 20 + [[ 2 -ge 36 ]] + let i+=1 ++ kubectl_bin get pxc some-name -o 'jsonpath={.status.state}' +++ mktemp ++ local LAST_OUT=/tmp/tmp.yx64zvkMVY +++ mktemp ++ local LAST_ERR=/tmp/tmp.cZb8QmrCNp ++ local exit_status=0 +++ seq 0 2 ++ for i in '$(seq 0 2)' ++ set +e ++ kubectl get pxc some-name -o 'jsonpath={.status.state}' ++ exit_status=0 ++ set -e ++ '[' 0 '!=' 0 ']' ++ break ++ cat /tmp/tmp.yx64zvkMVY ++ cat /tmp/tmp.cZb8QmrCNp ++ rm /tmp/tmp.yx64zvkMVY /tmp/tmp.cZb8QmrCNp ++ return 0 + [[ initializing == \r\e\a\d\y ]] + echo 'waiting for cluster readyness' waiting for cluster readyness + sleep 20 + [[ 3 -ge 36 ]] + let i+=1 ++ kubectl_bin get pxc some-name -o 'jsonpath={.status.state}' +++ mktemp ++ local LAST_OUT=/tmp/tmp.3NiuBA2o7o +++ mktemp ++ local LAST_ERR=/tmp/tmp.nhTp2fPVbJ ++ local exit_status=0 +++ seq 0 2 ++ for i in '$(seq 0 2)' ++ set +e ++ kubectl get pxc some-name -o 'jsonpath={.status.state}' ++ exit_status=0 ++ set -e ++ '[' 0 '!=' 0 ']' ++ break ++ cat /tmp/tmp.3NiuBA2o7o ++ cat /tmp/tmp.nhTp2fPVbJ ++ rm /tmp/tmp.3NiuBA2o7o /tmp/tmp.nhTp2fPVbJ ++ return 0 + [[ initializing == \r\e\a\d\y ]] + echo 'waiting for cluster readyness' waiting for cluster readyness + sleep 20 + [[ 4 -ge 36 ]] + let i+=1 ++ kubectl_bin get pxc some-name -o 'jsonpath={.status.state}' +++ mktemp ++ local LAST_OUT=/tmp/tmp.kgEBgsdDmd +++ mktemp ++ local LAST_ERR=/tmp/tmp.h8mleY2iks ++ local exit_status=0 +++ seq 0 2 ++ for i in '$(seq 0 2)' ++ set +e ++ kubectl get pxc some-name -o 'jsonpath={.status.state}' ++ exit_status=0 ++ set -e ++ '[' 0 '!=' 0 ']' ++ break ++ cat /tmp/tmp.kgEBgsdDmd ++ cat /tmp/tmp.h8mleY2iks ++ rm /tmp/tmp.kgEBgsdDmd /tmp/tmp.h8mleY2iks ++ return 0 + [[ initializing == \r\e\a\d\y ]] + echo 'waiting for cluster readyness' waiting for cluster readyness + sleep 20 + [[ 5 -ge 36 ]] + let i+=1 ++ kubectl_bin get pxc some-name -o 'jsonpath={.status.state}' +++ mktemp ++ local LAST_OUT=/tmp/tmp.xVqBy9dmlV +++ mktemp ++ local LAST_ERR=/tmp/tmp.lYUHB2X7ta ++ local exit_status=0 +++ seq 0 2 ++ for i in '$(seq 0 2)' ++ set +e ++ kubectl get pxc some-name -o 'jsonpath={.status.state}' ++ exit_status=0 ++ set -e ++ '[' 0 '!=' 0 ']' ++ break ++ cat /tmp/tmp.xVqBy9dmlV ++ cat /tmp/tmp.lYUHB2X7ta ++ rm /tmp/tmp.xVqBy9dmlV /tmp/tmp.lYUHB2X7ta ++ return 0 + [[ initializing == \r\e\a\d\y ]] + echo 'waiting for cluster readyness' waiting for cluster readyness + sleep 20 + [[ 6 -ge 36 ]] + let i+=1 ++ kubectl_bin get pxc some-name -o 'jsonpath={.status.state}' +++ mktemp ++ local LAST_OUT=/tmp/tmp.L1cIcGpVst +++ mktemp ++ local LAST_ERR=/tmp/tmp.xJol4Q6cKd ++ local exit_status=0 +++ seq 0 2 ++ for i in '$(seq 0 2)' ++ set +e ++ kubectl get pxc some-name -o 'jsonpath={.status.state}' ++ exit_status=0 ++ set -e ++ '[' 0 '!=' 0 ']' ++ break ++ cat /tmp/tmp.L1cIcGpVst ++ cat /tmp/tmp.xJol4Q6cKd ++ rm /tmp/tmp.L1cIcGpVst /tmp/tmp.xJol4Q6cKd ++ return 0 + [[ initializing == \r\e\a\d\y ]] + echo 'waiting for cluster readyness' waiting for cluster readyness + sleep 20 + [[ 7 -ge 36 ]] + let i+=1 ++ kubectl_bin get pxc some-name -o 'jsonpath={.status.state}' +++ mktemp ++ local LAST_OUT=/tmp/tmp.HBh0g16RZt +++ mktemp ++ local LAST_ERR=/tmp/tmp.AFUqrR4u3p ++ local exit_status=0 +++ seq 0 2 ++ for i in '$(seq 0 2)' ++ set +e ++ kubectl get pxc some-name -o 'jsonpath={.status.state}' ++ exit_status=0 ++ set -e ++ '[' 0 '!=' 0 ']' ++ break ++ cat /tmp/tmp.HBh0g16RZt ++ cat /tmp/tmp.AFUqrR4u3p ++ rm /tmp/tmp.HBh0g16RZt /tmp/tmp.AFUqrR4u3p ++ return 0 + [[ ready == \r\e\a\d\y ]] ++ kubectl_bin get pxc some-name -o 'jsonpath={.status.pxc.ready}' +++ mktemp ++ local LAST_OUT=/tmp/tmp.fghEzE1Mni +++ mktemp ++ local LAST_ERR=/tmp/tmp.ig1XOaIAaD ++ local exit_status=0 +++ seq 0 2 ++ for i in '$(seq 0 2)' ++ set +e ++ kubectl get pxc some-name -o 'jsonpath={.status.pxc.ready}' ++ exit_status=0 ++ set -e ++ '[' 0 '!=' 0 ']' ++ break ++ cat /tmp/tmp.fghEzE1Mni ++ cat /tmp/tmp.ig1XOaIAaD ++ rm /tmp/tmp.fghEzE1Mni /tmp/tmp.ig1XOaIAaD ++ return 0 + [[ 3 == \3 ]] +++ get_proxy_engine some-name +++ local cluster_name=some-name ++++ get_proxy some-name ++++ local target_cluster=some-name +++++ kubectl_bin get pxc some-name -o 'jsonpath={.spec.haproxy.enabled}' ++++++ mktemp +++++ local LAST_OUT=/tmp/tmp.92oPD8TaAk ++++++ mktemp +++++ local LAST_ERR=/tmp/tmp.MpO3ecHwqF +++++ local exit_status=0 ++++++ seq 0 2 +++++ for i in '$(seq 0 2)' +++++ set +e +++++ kubectl get pxc some-name -o 'jsonpath={.spec.haproxy.enabled}' +++++ exit_status=0 +++++ set -e +++++ '[' 0 '!=' 0 ']' +++++ break +++++ cat /tmp/tmp.92oPD8TaAk +++++ cat /tmp/tmp.MpO3ecHwqF +++++ rm /tmp/tmp.92oPD8TaAk /tmp/tmp.MpO3ecHwqF +++++ return 0 ++++ [[ '' == \t\r\u\e ]] +++++ kubectl_bin get pxc some-name -o 'jsonpath={.spec.proxysql.enabled}' ++++++ mktemp +++++ local LAST_OUT=/tmp/tmp.JyRiOoRnHQ ++++++ mktemp +++++ local LAST_ERR=/tmp/tmp.sNx4OnDj2N +++++ local exit_status=0 ++++++ seq 0 2 +++++ for i in '$(seq 0 2)' +++++ set +e +++++ kubectl get pxc some-name -o 'jsonpath={.spec.proxysql.enabled}' +++++ exit_status=0 +++++ set -e +++++ '[' 0 '!=' 0 ']' +++++ break +++++ cat /tmp/tmp.JyRiOoRnHQ +++++ cat /tmp/tmp.sNx4OnDj2N +++++ rm /tmp/tmp.JyRiOoRnHQ /tmp/tmp.sNx4OnDj2N +++++ return 0 ++++ [[ true == \t\r\u\e ]] ++++ echo some-name-proxysql ++++ return +++ local cluster_proxy=some-name-proxysql +++ echo proxysql ++ kubectl_bin get pxc some-name -o 'jsonpath={.status.proxysql.ready}' +++ mktemp ++ local LAST_OUT=/tmp/tmp.3aB93lqQsT +++ mktemp ++ local LAST_ERR=/tmp/tmp.Q6fwZnEEk4 ++ local exit_status=0 +++ seq 0 2 ++ for i in '$(seq 0 2)' ++ set +e ++ kubectl get pxc some-name -o 'jsonpath={.status.proxysql.ready}' ++ exit_status=0 ++ set -e ++ '[' 0 '!=' 0 ']' ++ break ++ cat /tmp/tmp.3aB93lqQsT ++ cat /tmp/tmp.Q6fwZnEEk4 ++ rm /tmp/tmp.3aB93lqQsT /tmp/tmp.Q6fwZnEEk4 ++ return 0 + [[ 2 == \2 ]] + compare_mysql_cmd_local select-3 'SHOW DATABASES;' '-h 127.0.0.1 -uxtrabackup -p'\''test-password'\''' some-name-pxc-0 '' pxc + local command_id=select-3 + local 'command=SHOW DATABASES;' + local 'uri=-h 127.0.0.1 -uxtrabackup -p'\''test-password'\''' + local pod=some-name-pxc-0 + local postfix= + local container_name=pxc + local expected_result=/mnt/jenkins/workspace/cloud-pxc-operator_PR-1730/e2e-tests/users/compare/select-3.sql + [[ perconalab/percona-xtradb-cluster-operator:main-pxc8.0 =~ 8\.0 ]] + '[' -f /mnt/jenkins/workspace/cloud-pxc-operator_PR-1730/e2e-tests/users/compare/select-3-80.sql ']' + run_mysql_local 'SHOW DATABASES;' '-h 127.0.0.1 -uxtrabackup -p'\''test-password'\''' some-name-pxc-0 pxc + local 'command=SHOW DATABASES;' + local 'uri=-h 127.0.0.1 -uxtrabackup -p'\''test-password'\''' + local pod=some-name-pxc-0 + local container_name=pxc + set +o xtrace + '[' '!' -s /tmp/tmp.ShPTGmQQOH/select-3.sql ']' + diff -u /mnt/jenkins/workspace/cloud-pxc-operator_PR-1730/e2e-tests/users/compare/select-3.sql /tmp/tmp.ShPTGmQQOH/select-3.sql + desc 'test monitor' + set +o xtrace ----------------------------------------------------------------------------------- test monitor ----------------------------------------------------------------------------------- + patch_secret my-cluster-secrets monitor dGVzdC1wYXNzd29yZA== + local secret=my-cluster-secrets + local key=monitor + local value=dGVzdC1wYXNzd29yZA== + kubectl_bin patch secret my-cluster-secrets '-p={"data":{"monitor": "dGVzdC1wYXNzd29yZA=="}}' ++ mktemp + local LAST_OUT=/tmp/tmp.lVC2ttX6pu ++ mktemp + local LAST_ERR=/tmp/tmp.pwRzq7MV3u + local exit_status=0 ++ seq 0 2 + for i in '$(seq 0 2)' + set +e + kubectl patch secret my-cluster-secrets '-p={"data":{"monitor": "dGVzdC1wYXNzd29yZA=="}}' + exit_status=0 + set -e + '[' 0 '!=' 0 ']' + break + cat /tmp/tmp.lVC2ttX6pu secret/my-cluster-secrets patched + cat /tmp/tmp.pwRzq7MV3u + rm /tmp/tmp.lVC2ttX6pu /tmp/tmp.pwRzq7MV3u + return 0 + wait_for_password_propagation my-cluster-secrets monitor + local secret=my-cluster-secrets + local user=monitor + local max_retry=240 ++ getSecretData my-cluster-secrets root ++ local secretName=my-cluster-secrets ++ local dataKey=root ++ kubectl_bin get secrets/my-cluster-secrets '--template={{.data.root}}' ++ base64 --decode +++ mktemp ++ local LAST_OUT=/tmp/tmp.wmZ87SF0QM +++ mktemp ++ local LAST_ERR=/tmp/tmp.Hm62Zvs4fT ++ local exit_status=0 +++ seq 0 2 ++ for i in '$(seq 0 2)' ++ set +e ++ kubectl get secrets/my-cluster-secrets '--template={{.data.root}}' ++ exit_status=0 ++ set -e ++ '[' 0 '!=' 0 ']' ++ break ++ cat /tmp/tmp.wmZ87SF0QM ++ cat /tmp/tmp.Hm62Zvs4fT ++ rm /tmp/tmp.wmZ87SF0QM /tmp/tmp.Hm62Zvs4fT ++ return 0 + local root_pass=test-password + [[ perconalab/percona-xtradb-cluster-operator:main-pxc8.0 =~ 5\.7 ]] + retry=0 + is_password_updated monitor '-h some-name-pxc -uroot -p'\''test-password'\''' + local username=monitor + local 'uri=-h some-name-pxc -uroot -p'\''test-password'\''' + run_mysql 'SELECT User_attributes FROM mysql.user WHERE user='\''monitor'\''' '-h some-name-pxc -uroot -p'\''test-password'\''' + local 'command=SELECT User_attributes FROM mysql.user WHERE user='\''monitor'\''' + grep additional_password + local 'uri=-h some-name-pxc -uroot -p'\''test-password'\''' ++ get_client_pod ++ kubectl_bin get pods --selector=name=pxc-client -o 'jsonpath={.items[].metadata.name}' +++ mktemp ++ local LAST_OUT=/tmp/tmp.KMP8yMPS8S +++ mktemp ++ local LAST_ERR=/tmp/tmp.OcyszqQVd3 ++ local exit_status=0 +++ seq 0 2 ++ for i in '$(seq 0 2)' ++ set +e ++ kubectl get pods --selector=name=pxc-client -o 'jsonpath={.items[].metadata.name}' ++ exit_status=0 ++ set -e ++ '[' 0 '!=' 0 ']' ++ break ++ cat /tmp/tmp.KMP8yMPS8S ++ cat /tmp/tmp.OcyszqQVd3 ++ rm /tmp/tmp.KMP8yMPS8S /tmp/tmp.OcyszqQVd3 ++ return 0 + client_pod=pxc-client-6644d8898f-z827r + wait_pod pxc-client-6644d8898f-z827r + local pod=pxc-client-6644d8898f-z827r + local max_retry=480 + local ns= ++ echo pxc-client-6644d8898f-z827r ++ /usr/bin/sed -E 's/.*-(pxc|proxysql)-[0-9]/\1/' ++ egrep '^(pxc|proxysql)$' + local container= + set +o xtrace pod/pxc-client-6644d8898f-z827r condition met pxc-client-6644d8898f-z827r.Ok + set +o xtrace {"additional_password": "*C1F414D9BAF378B656A849B31F9F8AF3125F558B"} + retry=0 + is_old_password_discarded monitor '-h some-name-pxc -uroot -p'\''test-password'\''' + local username=monitor + local 'uri=-h some-name-pxc -uroot -p'\''test-password'\''' + grep NULL + run_mysql 'SELECT User_attributes FROM mysql.user WHERE user='\''monitor'\''' '-h some-name-pxc -uroot -p'\''test-password'\''' + local 'command=SELECT User_attributes FROM mysql.user WHERE user='\''monitor'\''' + local 'uri=-h some-name-pxc -uroot -p'\''test-password'\''' ++ get_client_pod ++ kubectl_bin get pods --selector=name=pxc-client -o 'jsonpath={.items[].metadata.name}' +++ mktemp ++ local LAST_OUT=/tmp/tmp.jTYmo3dA8p +++ mktemp ++ local LAST_ERR=/tmp/tmp.NcZ0ynStjv ++ local exit_status=0 +++ seq 0 2 ++ for i in '$(seq 0 2)' ++ set +e ++ kubectl get pods --selector=name=pxc-client -o 'jsonpath={.items[].metadata.name}' ++ exit_status=0 ++ set -e ++ '[' 0 '!=' 0 ']' ++ break ++ cat /tmp/tmp.jTYmo3dA8p ++ cat /tmp/tmp.NcZ0ynStjv ++ rm /tmp/tmp.jTYmo3dA8p /tmp/tmp.NcZ0ynStjv ++ return 0 + client_pod=pxc-client-6644d8898f-z827r + wait_pod pxc-client-6644d8898f-z827r + local pod=pxc-client-6644d8898f-z827r + local max_retry=480 + local ns= ++ echo pxc-client-6644d8898f-z827r ++ /usr/bin/sed -E 's/.*-(pxc|proxysql)-[0-9]/\1/' ++ egrep '^(pxc|proxysql)$' + local container= + set +o xtrace pod/pxc-client-6644d8898f-z827r condition met pxc-client-6644d8898f-z827r.Ok + set +o xtrace + echo 'waiting for password propagation' waiting for password propagation + sleep 1 + let retry+=1 + [[ 1 -ge 240 ]] + is_old_password_discarded monitor '-h some-name-pxc -uroot -p'\''test-password'\''' + local username=monitor + local 'uri=-h some-name-pxc -uroot -p'\''test-password'\''' + run_mysql 'SELECT User_attributes FROM mysql.user WHERE user='\''monitor'\''' '-h some-name-pxc -uroot -p'\''test-password'\''' + local 'command=SELECT User_attributes FROM mysql.user WHERE user='\''monitor'\''' + local 'uri=-h some-name-pxc -uroot -p'\''test-password'\''' + grep NULL ++ get_client_pod ++ kubectl_bin get pods --selector=name=pxc-client -o 'jsonpath={.items[].metadata.name}' +++ mktemp ++ local LAST_OUT=/tmp/tmp.wgqIgmfmKo +++ mktemp ++ local LAST_ERR=/tmp/tmp.bdqQcMatNo ++ local exit_status=0 +++ seq 0 2 ++ for i in '$(seq 0 2)' ++ set +e ++ kubectl get pods --selector=name=pxc-client -o 'jsonpath={.items[].metadata.name}' ++ exit_status=0 ++ set -e ++ '[' 0 '!=' 0 ']' ++ break ++ cat /tmp/tmp.wgqIgmfmKo ++ cat /tmp/tmp.bdqQcMatNo ++ rm /tmp/tmp.wgqIgmfmKo /tmp/tmp.bdqQcMatNo ++ return 0 + client_pod=pxc-client-6644d8898f-z827r + wait_pod pxc-client-6644d8898f-z827r + local pod=pxc-client-6644d8898f-z827r + local max_retry=480 + local ns= ++ echo pxc-client-6644d8898f-z827r ++ egrep '^(pxc|proxysql)$' ++ /usr/bin/sed -E 's/.*-(pxc|proxysql)-[0-9]/\1/' + local container= + set +o xtrace pod/pxc-client-6644d8898f-z827r condition met pxc-client-6644d8898f-z827r.Ok + set +o xtrace + echo 'waiting for password propagation' waiting for password propagation + sleep 1 + let retry+=1 + [[ 2 -ge 240 ]] + is_old_password_discarded monitor '-h some-name-pxc -uroot -p'\''test-password'\''' + local username=monitor + local 'uri=-h some-name-pxc -uroot -p'\''test-password'\''' + run_mysql 'SELECT User_attributes FROM mysql.user WHERE user='\''monitor'\''' '-h some-name-pxc -uroot -p'\''test-password'\''' + local 'command=SELECT User_attributes FROM mysql.user WHERE user='\''monitor'\''' + grep NULL + local 'uri=-h some-name-pxc -uroot -p'\''test-password'\''' ++ get_client_pod ++ kubectl_bin get pods --selector=name=pxc-client -o 'jsonpath={.items[].metadata.name}' +++ mktemp ++ local LAST_OUT=/tmp/tmp.tnbL6G8vPL +++ mktemp ++ local LAST_ERR=/tmp/tmp.U8kifGGcbp ++ local exit_status=0 +++ seq 0 2 ++ for i in '$(seq 0 2)' ++ set +e ++ kubectl get pods --selector=name=pxc-client -o 'jsonpath={.items[].metadata.name}' ++ exit_status=0 ++ set -e ++ '[' 0 '!=' 0 ']' ++ break ++ cat /tmp/tmp.tnbL6G8vPL ++ cat /tmp/tmp.U8kifGGcbp ++ rm /tmp/tmp.tnbL6G8vPL /tmp/tmp.U8kifGGcbp ++ return 0 + client_pod=pxc-client-6644d8898f-z827r + wait_pod pxc-client-6644d8898f-z827r + local pod=pxc-client-6644d8898f-z827r + local max_retry=480 + local ns= ++ echo pxc-client-6644d8898f-z827r ++ /usr/bin/sed -E 's/.*-(pxc|proxysql)-[0-9]/\1/' ++ egrep '^(pxc|proxysql)$' + local container= + set +o xtrace pod/pxc-client-6644d8898f-z827r condition met pxc-client-6644d8898f-z827r.Ok + set +o xtrace + echo 'waiting for password propagation' waiting for password propagation + sleep 1 + let retry+=1 + [[ 3 -ge 240 ]] + is_old_password_discarded monitor '-h some-name-pxc -uroot -p'\''test-password'\''' + local username=monitor + local 'uri=-h some-name-pxc -uroot -p'\''test-password'\''' + grep NULL + run_mysql 'SELECT User_attributes FROM mysql.user WHERE user='\''monitor'\''' '-h some-name-pxc -uroot -p'\''test-password'\''' + local 'command=SELECT User_attributes FROM mysql.user WHERE user='\''monitor'\''' + local 'uri=-h some-name-pxc -uroot -p'\''test-password'\''' ++ get_client_pod ++ kubectl_bin get pods --selector=name=pxc-client -o 'jsonpath={.items[].metadata.name}' +++ mktemp ++ local LAST_OUT=/tmp/tmp.1Cb4uA3cQg +++ mktemp ++ local LAST_ERR=/tmp/tmp.t6pk5ZLYWK ++ local exit_status=0 +++ seq 0 2 ++ for i in '$(seq 0 2)' ++ set +e ++ kubectl get pods --selector=name=pxc-client -o 'jsonpath={.items[].metadata.name}' ++ exit_status=0 ++ set -e ++ '[' 0 '!=' 0 ']' ++ break ++ cat /tmp/tmp.1Cb4uA3cQg ++ cat /tmp/tmp.t6pk5ZLYWK ++ rm /tmp/tmp.1Cb4uA3cQg /tmp/tmp.t6pk5ZLYWK ++ return 0 + client_pod=pxc-client-6644d8898f-z827r + wait_pod pxc-client-6644d8898f-z827r + local pod=pxc-client-6644d8898f-z827r + local max_retry=480 + local ns= ++ echo pxc-client-6644d8898f-z827r ++ /usr/bin/sed -E 's/.*-(pxc|proxysql)-[0-9]/\1/' ++ egrep '^(pxc|proxysql)$' + local container= + set +o xtrace pod/pxc-client-6644d8898f-z827r condition met pxc-client-6644d8898f-z827r.Ok + set +o xtrace + echo 'waiting for password propagation' waiting for password propagation + sleep 1 + let retry+=1 + [[ 4 -ge 240 ]] + is_old_password_discarded monitor '-h some-name-pxc -uroot -p'\''test-password'\''' + local username=monitor + local 'uri=-h some-name-pxc -uroot -p'\''test-password'\''' + run_mysql 'SELECT User_attributes FROM mysql.user WHERE user='\''monitor'\''' '-h some-name-pxc -uroot -p'\''test-password'\''' + local 'command=SELECT User_attributes FROM mysql.user WHERE user='\''monitor'\''' + local 'uri=-h some-name-pxc -uroot -p'\''test-password'\''' + grep NULL ++ get_client_pod ++ kubectl_bin get pods --selector=name=pxc-client -o 'jsonpath={.items[].metadata.name}' +++ mktemp ++ local LAST_OUT=/tmp/tmp.RaDoPotCbp +++ mktemp ++ local LAST_ERR=/tmp/tmp.DUu3Reu3Z5 ++ local exit_status=0 +++ seq 0 2 ++ for i in '$(seq 0 2)' ++ set +e ++ kubectl get pods --selector=name=pxc-client -o 'jsonpath={.items[].metadata.name}' ++ exit_status=0 ++ set -e ++ '[' 0 '!=' 0 ']' ++ break ++ cat /tmp/tmp.RaDoPotCbp ++ cat /tmp/tmp.DUu3Reu3Z5 ++ rm /tmp/tmp.RaDoPotCbp /tmp/tmp.DUu3Reu3Z5 ++ return 0 + client_pod=pxc-client-6644d8898f-z827r + wait_pod pxc-client-6644d8898f-z827r + local pod=pxc-client-6644d8898f-z827r + local max_retry=480 + local ns= ++ echo pxc-client-6644d8898f-z827r ++ /usr/bin/sed -E 's/.*-(pxc|proxysql)-[0-9]/\1/' ++ egrep '^(pxc|proxysql)$' + local container= + set +o xtrace pod/pxc-client-6644d8898f-z827r condition met pxc-client-6644d8898f-z827r.Ok + set +o xtrace + echo 'waiting for password propagation' waiting for password propagation + sleep 1 + let retry+=1 + [[ 5 -ge 240 ]] + is_old_password_discarded monitor '-h some-name-pxc -uroot -p'\''test-password'\''' + local username=monitor + local 'uri=-h some-name-pxc -uroot -p'\''test-password'\''' + run_mysql 'SELECT User_attributes FROM mysql.user WHERE user='\''monitor'\''' '-h some-name-pxc -uroot -p'\''test-password'\''' + local 'command=SELECT User_attributes FROM mysql.user WHERE user='\''monitor'\''' + local 'uri=-h some-name-pxc -uroot -p'\''test-password'\''' + grep NULL ++ get_client_pod ++ kubectl_bin get pods --selector=name=pxc-client -o 'jsonpath={.items[].metadata.name}' +++ mktemp ++ local LAST_OUT=/tmp/tmp.s6XoFkFVr7 +++ mktemp ++ local LAST_ERR=/tmp/tmp.R9MsFGExKn ++ local exit_status=0 +++ seq 0 2 ++ for i in '$(seq 0 2)' ++ set +e ++ kubectl get pods --selector=name=pxc-client -o 'jsonpath={.items[].metadata.name}' ++ exit_status=0 ++ set -e ++ '[' 0 '!=' 0 ']' ++ break ++ cat /tmp/tmp.s6XoFkFVr7 ++ cat /tmp/tmp.R9MsFGExKn ++ rm /tmp/tmp.s6XoFkFVr7 /tmp/tmp.R9MsFGExKn ++ return 0 + client_pod=pxc-client-6644d8898f-z827r + wait_pod pxc-client-6644d8898f-z827r + local pod=pxc-client-6644d8898f-z827r + local max_retry=480 + local ns= ++ echo pxc-client-6644d8898f-z827r ++ /usr/bin/sed -E 's/.*-(pxc|proxysql)-[0-9]/\1/' ++ egrep '^(pxc|proxysql)$' + local container= + set +o xtrace pod/pxc-client-6644d8898f-z827r condition met pxc-client-6644d8898f-z827r.Ok + set +o xtrace + echo 'waiting for password propagation' waiting for password propagation + sleep 1 + let retry+=1 + [[ 6 -ge 240 ]] + is_old_password_discarded monitor '-h some-name-pxc -uroot -p'\''test-password'\''' + local username=monitor + local 'uri=-h some-name-pxc -uroot -p'\''test-password'\''' + run_mysql 'SELECT User_attributes FROM mysql.user WHERE user='\''monitor'\''' '-h some-name-pxc -uroot -p'\''test-password'\''' + local 'command=SELECT User_attributes FROM mysql.user WHERE user='\''monitor'\''' + local 'uri=-h some-name-pxc -uroot -p'\''test-password'\''' + grep NULL ++ get_client_pod ++ kubectl_bin get pods --selector=name=pxc-client -o 'jsonpath={.items[].metadata.name}' +++ mktemp ++ local LAST_OUT=/tmp/tmp.YZsVMz6WSj +++ mktemp ++ local LAST_ERR=/tmp/tmp.NrGoXRD5jN ++ local exit_status=0 +++ seq 0 2 ++ for i in '$(seq 0 2)' ++ set +e ++ kubectl get pods --selector=name=pxc-client -o 'jsonpath={.items[].metadata.name}' ++ exit_status=0 ++ set -e ++ '[' 0 '!=' 0 ']' ++ break ++ cat /tmp/tmp.YZsVMz6WSj ++ cat /tmp/tmp.NrGoXRD5jN ++ rm /tmp/tmp.YZsVMz6WSj /tmp/tmp.NrGoXRD5jN ++ return 0 + client_pod=pxc-client-6644d8898f-z827r + wait_pod pxc-client-6644d8898f-z827r + local pod=pxc-client-6644d8898f-z827r + local max_retry=480 + local ns= ++ echo pxc-client-6644d8898f-z827r ++ /usr/bin/sed -E 's/.*-(pxc|proxysql)-[0-9]/\1/' ++ egrep '^(pxc|proxysql)$' + local container= + set +o xtrace pod/pxc-client-6644d8898f-z827r condition met pxc-client-6644d8898f-z827r.Ok + set +o xtrace + echo 'waiting for password propagation' waiting for password propagation + sleep 1 + let retry+=1 + [[ 7 -ge 240 ]] + is_old_password_discarded monitor '-h some-name-pxc -uroot -p'\''test-password'\''' + local username=monitor + local 'uri=-h some-name-pxc -uroot -p'\''test-password'\''' + run_mysql 'SELECT User_attributes FROM mysql.user WHERE user='\''monitor'\''' '-h some-name-pxc -uroot -p'\''test-password'\''' + local 'command=SELECT User_attributes FROM mysql.user WHERE user='\''monitor'\''' + local 'uri=-h some-name-pxc -uroot -p'\''test-password'\''' + grep NULL ++ get_client_pod ++ kubectl_bin get pods --selector=name=pxc-client -o 'jsonpath={.items[].metadata.name}' +++ mktemp ++ local LAST_OUT=/tmp/tmp.aIbImWHArO +++ mktemp ++ local LAST_ERR=/tmp/tmp.o6E8FcMNhp ++ local exit_status=0 +++ seq 0 2 ++ for i in '$(seq 0 2)' ++ set +e ++ kubectl get pods --selector=name=pxc-client -o 'jsonpath={.items[].metadata.name}' ++ exit_status=0 ++ set -e ++ '[' 0 '!=' 0 ']' ++ break ++ cat /tmp/tmp.aIbImWHArO ++ cat /tmp/tmp.o6E8FcMNhp ++ rm /tmp/tmp.aIbImWHArO /tmp/tmp.o6E8FcMNhp ++ return 0 + client_pod=pxc-client-6644d8898f-z827r + wait_pod pxc-client-6644d8898f-z827r + local pod=pxc-client-6644d8898f-z827r + local max_retry=480 + local ns= ++ echo pxc-client-6644d8898f-z827r ++ /usr/bin/sed -E 's/.*-(pxc|proxysql)-[0-9]/\1/' ++ egrep '^(pxc|proxysql)$' + local container= + set +o xtrace pod/pxc-client-6644d8898f-z827r condition met pxc-client-6644d8898f-z827r.Ok + set +o xtrace + echo 'waiting for password propagation' waiting for password propagation + sleep 1 + let retry+=1 + [[ 8 -ge 240 ]] + is_old_password_discarded monitor '-h some-name-pxc -uroot -p'\''test-password'\''' + local username=monitor + local 'uri=-h some-name-pxc -uroot -p'\''test-password'\''' + run_mysql 'SELECT User_attributes FROM mysql.user WHERE user='\''monitor'\''' '-h some-name-pxc -uroot -p'\''test-password'\''' + grep NULL + local 'command=SELECT User_attributes FROM mysql.user WHERE user='\''monitor'\''' + local 'uri=-h some-name-pxc -uroot -p'\''test-password'\''' ++ get_client_pod ++ kubectl_bin get pods --selector=name=pxc-client -o 'jsonpath={.items[].metadata.name}' +++ mktemp ++ local LAST_OUT=/tmp/tmp.hAvW3eAZVu +++ mktemp ++ local LAST_ERR=/tmp/tmp.u1D3fKXGOE ++ local exit_status=0 +++ seq 0 2 ++ for i in '$(seq 0 2)' ++ set +e ++ kubectl get pods --selector=name=pxc-client -o 'jsonpath={.items[].metadata.name}' ++ exit_status=0 ++ set -e ++ '[' 0 '!=' 0 ']' ++ break ++ cat /tmp/tmp.hAvW3eAZVu ++ cat /tmp/tmp.u1D3fKXGOE ++ rm /tmp/tmp.hAvW3eAZVu /tmp/tmp.u1D3fKXGOE ++ return 0 + client_pod=pxc-client-6644d8898f-z827r + wait_pod pxc-client-6644d8898f-z827r + local pod=pxc-client-6644d8898f-z827r + local max_retry=480 + local ns= ++ echo pxc-client-6644d8898f-z827r ++ /usr/bin/sed -E 's/.*-(pxc|proxysql)-[0-9]/\1/' ++ egrep '^(pxc|proxysql)$' + local container= + set +o xtrace pod/pxc-client-6644d8898f-z827r condition met pxc-client-6644d8898f-z827r.Ok + set +o xtrace + echo 'waiting for password propagation' waiting for password propagation + sleep 1 + let retry+=1 + [[ 9 -ge 240 ]] + is_old_password_discarded monitor '-h some-name-pxc -uroot -p'\''test-password'\''' + local username=monitor + local 'uri=-h some-name-pxc -uroot -p'\''test-password'\''' + run_mysql 'SELECT User_attributes FROM mysql.user WHERE user='\''monitor'\''' '-h some-name-pxc -uroot -p'\''test-password'\''' + grep NULL + local 'command=SELECT User_attributes FROM mysql.user WHERE user='\''monitor'\''' + local 'uri=-h some-name-pxc -uroot -p'\''test-password'\''' ++ get_client_pod ++ kubectl_bin get pods --selector=name=pxc-client -o 'jsonpath={.items[].metadata.name}' +++ mktemp ++ local LAST_OUT=/tmp/tmp.ZzJ7RLE6tu +++ mktemp ++ local LAST_ERR=/tmp/tmp.Xlmw5K9sTW ++ local exit_status=0 +++ seq 0 2 ++ for i in '$(seq 0 2)' ++ set +e ++ kubectl get pods --selector=name=pxc-client -o 'jsonpath={.items[].metadata.name}' ++ exit_status=0 ++ set -e ++ '[' 0 '!=' 0 ']' ++ break ++ cat /tmp/tmp.ZzJ7RLE6tu ++ cat /tmp/tmp.Xlmw5K9sTW ++ rm /tmp/tmp.ZzJ7RLE6tu /tmp/tmp.Xlmw5K9sTW ++ return 0 + client_pod=pxc-client-6644d8898f-z827r + wait_pod pxc-client-6644d8898f-z827r + local pod=pxc-client-6644d8898f-z827r + local max_retry=480 + local ns= ++ echo pxc-client-6644d8898f-z827r ++ /usr/bin/sed -E 's/.*-(pxc|proxysql)-[0-9]/\1/' ++ egrep '^(pxc|proxysql)$' + local container= + set +o xtrace pod/pxc-client-6644d8898f-z827r condition met pxc-client-6644d8898f-z827r.Ok + set +o xtrace + echo 'waiting for password propagation' waiting for password propagation + sleep 1 + let retry+=1 + [[ 10 -ge 240 ]] + is_old_password_discarded monitor '-h some-name-pxc -uroot -p'\''test-password'\''' + local username=monitor + local 'uri=-h some-name-pxc -uroot -p'\''test-password'\''' + grep NULL + run_mysql 'SELECT User_attributes FROM mysql.user WHERE user='\''monitor'\''' '-h some-name-pxc -uroot -p'\''test-password'\''' + local 'command=SELECT User_attributes FROM mysql.user WHERE user='\''monitor'\''' + local 'uri=-h some-name-pxc -uroot -p'\''test-password'\''' ++ get_client_pod ++ kubectl_bin get pods --selector=name=pxc-client -o 'jsonpath={.items[].metadata.name}' +++ mktemp ++ local LAST_OUT=/tmp/tmp.Fw1siFO9GJ +++ mktemp ++ local LAST_ERR=/tmp/tmp.EFcAAYL8kK ++ local exit_status=0 +++ seq 0 2 ++ for i in '$(seq 0 2)' ++ set +e ++ kubectl get pods --selector=name=pxc-client -o 'jsonpath={.items[].metadata.name}' ++ exit_status=0 ++ set -e ++ '[' 0 '!=' 0 ']' ++ break ++ cat /tmp/tmp.Fw1siFO9GJ ++ cat /tmp/tmp.EFcAAYL8kK ++ rm /tmp/tmp.Fw1siFO9GJ /tmp/tmp.EFcAAYL8kK ++ return 0 + client_pod=pxc-client-6644d8898f-z827r + wait_pod pxc-client-6644d8898f-z827r + local pod=pxc-client-6644d8898f-z827r + local max_retry=480 + local ns= ++ echo pxc-client-6644d8898f-z827r ++ /usr/bin/sed -E 's/.*-(pxc|proxysql)-[0-9]/\1/' ++ egrep '^(pxc|proxysql)$' + local container= + set +o xtrace pod/pxc-client-6644d8898f-z827r condition met pxc-client-6644d8898f-z827r.Ok + set +o xtrace NULL + wait_cluster_consistency some-name 3 2 + local cluster_name=some-name + local cluster_size=3 + local proxy_size=2 + '[' -z 2 ']' + desc 'wait cluster consistency' + set +o xtrace ----------------------------------------------------------------------------------- wait cluster consistency ----------------------------------------------------------------------------------- + local i=0 + local max=36 + sleep 7 ++ kubectl_bin get pxc some-name -o 'jsonpath={.status.state}' +++ mktemp ++ local LAST_OUT=/tmp/tmp.Y7Aj7G683n +++ mktemp ++ local LAST_ERR=/tmp/tmp.wehh72b8DU ++ local exit_status=0 +++ seq 0 2 ++ for i in '$(seq 0 2)' ++ set +e ++ kubectl get pxc some-name -o 'jsonpath={.status.state}' ++ exit_status=0 ++ set -e ++ '[' 0 '!=' 0 ']' ++ break ++ cat /tmp/tmp.Y7Aj7G683n ++ cat /tmp/tmp.wehh72b8DU ++ rm /tmp/tmp.Y7Aj7G683n /tmp/tmp.wehh72b8DU ++ return 0 + [[ ready == \r\e\a\d\y ]] ++ kubectl_bin get pxc some-name -o 'jsonpath={.status.pxc.ready}' +++ mktemp ++ local LAST_OUT=/tmp/tmp.Zn1jaH1dgP +++ mktemp ++ local LAST_ERR=/tmp/tmp.1wlWmLVQu2 ++ local exit_status=0 +++ seq 0 2 ++ for i in '$(seq 0 2)' ++ set +e ++ kubectl get pxc some-name -o 'jsonpath={.status.pxc.ready}' ++ exit_status=0 ++ set -e ++ '[' 0 '!=' 0 ']' ++ break ++ cat /tmp/tmp.Zn1jaH1dgP ++ cat /tmp/tmp.1wlWmLVQu2 ++ rm /tmp/tmp.Zn1jaH1dgP /tmp/tmp.1wlWmLVQu2 ++ return 0 + [[ 3 == \3 ]] +++ get_proxy_engine some-name +++ local cluster_name=some-name ++++ get_proxy some-name ++++ local target_cluster=some-name +++++ kubectl_bin get pxc some-name -o 'jsonpath={.spec.haproxy.enabled}' ++++++ mktemp +++++ local LAST_OUT=/tmp/tmp.obsU21WM2H ++++++ mktemp +++++ local LAST_ERR=/tmp/tmp.NAfVC4TCfz +++++ local exit_status=0 ++++++ seq 0 2 +++++ for i in '$(seq 0 2)' +++++ set +e +++++ kubectl get pxc some-name -o 'jsonpath={.spec.haproxy.enabled}' +++++ exit_status=0 +++++ set -e +++++ '[' 0 '!=' 0 ']' +++++ break +++++ cat /tmp/tmp.obsU21WM2H +++++ cat /tmp/tmp.NAfVC4TCfz +++++ rm /tmp/tmp.obsU21WM2H /tmp/tmp.NAfVC4TCfz +++++ return 0 ++++ [[ '' == \t\r\u\e ]] +++++ kubectl_bin get pxc some-name -o 'jsonpath={.spec.proxysql.enabled}' ++++++ mktemp +++++ local LAST_OUT=/tmp/tmp.ZmOkjP6maA ++++++ mktemp +++++ local LAST_ERR=/tmp/tmp.ufIN8aLvbt +++++ local exit_status=0 ++++++ seq 0 2 +++++ for i in '$(seq 0 2)' +++++ set +e +++++ kubectl get pxc some-name -o 'jsonpath={.spec.proxysql.enabled}' +++++ exit_status=0 +++++ set -e +++++ '[' 0 '!=' 0 ']' +++++ break +++++ cat /tmp/tmp.ZmOkjP6maA +++++ cat /tmp/tmp.ufIN8aLvbt +++++ rm /tmp/tmp.ZmOkjP6maA /tmp/tmp.ufIN8aLvbt +++++ return 0 ++++ [[ true == \t\r\u\e ]] ++++ echo some-name-proxysql ++++ return +++ local cluster_proxy=some-name-proxysql +++ echo proxysql ++ kubectl_bin get pxc some-name -o 'jsonpath={.status.proxysql.ready}' +++ mktemp ++ local LAST_OUT=/tmp/tmp.Had73KYlDJ +++ mktemp ++ local LAST_ERR=/tmp/tmp.69WHdmEsBh ++ local exit_status=0 +++ seq 0 2 ++ for i in '$(seq 0 2)' ++ set +e ++ kubectl get pxc some-name -o 'jsonpath={.status.proxysql.ready}' ++ exit_status=0 ++ set -e ++ '[' 0 '!=' 0 ']' ++ break ++ cat /tmp/tmp.Had73KYlDJ ++ cat /tmp/tmp.69WHdmEsBh ++ rm /tmp/tmp.Had73KYlDJ /tmp/tmp.69WHdmEsBh ++ return 0 + [[ 2 == \2 ]] + sleep 10 + compare_mysql_cmd select-4 'SHOW TABLES;' '-h some-name-proxysql -umonitor -p'\''test-password'\''' + local command_id=select-4 + local 'command=SHOW TABLES;' + local 'uri=-h some-name-proxysql -umonitor -p'\''test-password'\''' + local postfix= + local expected_result=/mnt/jenkins/workspace/cloud-pxc-operator_PR-1730/e2e-tests/users/compare/select-4.sql + [[ perconalab/percona-xtradb-cluster-operator:main-pxc8.0 =~ 8\.0 ]] + '[' -f /mnt/jenkins/workspace/cloud-pxc-operator_PR-1730/e2e-tests/users/compare/select-4-80.sql ']' + expected_result=/mnt/jenkins/workspace/cloud-pxc-operator_PR-1730/e2e-tests/users/compare/select-4-80.sql + run_mysql 'SHOW TABLES;' '-h some-name-proxysql -umonitor -p'\''test-password'\''' + local 'command=SHOW TABLES;' + local 'uri=-h some-name-proxysql -umonitor -p'\''test-password'\''' ++ get_client_pod ++ kubectl_bin get pods --selector=name=pxc-client -o 'jsonpath={.items[].metadata.name}' +++ mktemp ++ local LAST_OUT=/tmp/tmp.y7ghjbEaSm +++ mktemp ++ local LAST_ERR=/tmp/tmp.XSXxcQORpp ++ local exit_status=0 +++ seq 0 2 ++ for i in '$(seq 0 2)' ++ set +e ++ kubectl get pods --selector=name=pxc-client -o 'jsonpath={.items[].metadata.name}' ++ exit_status=0 ++ set -e ++ '[' 0 '!=' 0 ']' ++ break ++ cat /tmp/tmp.y7ghjbEaSm ++ cat /tmp/tmp.XSXxcQORpp ++ rm /tmp/tmp.y7ghjbEaSm /tmp/tmp.XSXxcQORpp ++ return 0 + client_pod=pxc-client-6644d8898f-z827r + wait_pod pxc-client-6644d8898f-z827r + local pod=pxc-client-6644d8898f-z827r + local max_retry=480 + local ns= ++ echo pxc-client-6644d8898f-z827r ++ /usr/bin/sed -E 's/.*-(pxc|proxysql)-[0-9]/\1/' ++ egrep '^(pxc|proxysql)$' + local container= + set +o xtrace pod/pxc-client-6644d8898f-z827r condition met pxc-client-6644d8898f-z827r.Ok + set +o xtrace + '[' '!' -s /tmp/tmp.ShPTGmQQOH/select-4.sql ']' + diff -u /mnt/jenkins/workspace/cloud-pxc-operator_PR-1730/e2e-tests/users/compare/select-4-80.sql /tmp/tmp.ShPTGmQQOH/select-4.sql + desc 'test operator' + set +o xtrace ----------------------------------------------------------------------------------- test operator ----------------------------------------------------------------------------------- + patch_secret my-cluster-secrets operator dGVzdC1wYXNzd29yZA== + local secret=my-cluster-secrets + local key=operator + local value=dGVzdC1wYXNzd29yZA== + kubectl_bin patch secret my-cluster-secrets '-p={"data":{"operator": "dGVzdC1wYXNzd29yZA=="}}' ++ mktemp + local LAST_OUT=/tmp/tmp.7FQctHjF75 ++ mktemp + local LAST_ERR=/tmp/tmp.Eu4PJ9MlM9 + local exit_status=0 ++ seq 0 2 + for i in '$(seq 0 2)' + set +e + kubectl patch secret my-cluster-secrets '-p={"data":{"operator": "dGVzdC1wYXNzd29yZA=="}}' + exit_status=0 + set -e + '[' 0 '!=' 0 ']' + break + cat /tmp/tmp.7FQctHjF75 secret/my-cluster-secrets patched + cat /tmp/tmp.Eu4PJ9MlM9 + rm /tmp/tmp.7FQctHjF75 /tmp/tmp.Eu4PJ9MlM9 + return 0 + sleep 15 + wait_cluster_consistency some-name 3 2 + local cluster_name=some-name + local cluster_size=3 + local proxy_size=2 + '[' -z 2 ']' + desc 'wait cluster consistency' + set +o xtrace ----------------------------------------------------------------------------------- wait cluster consistency ----------------------------------------------------------------------------------- + local i=0 + local max=36 + sleep 7 ++ kubectl_bin get pxc some-name -o 'jsonpath={.status.state}' +++ mktemp ++ local LAST_OUT=/tmp/tmp.BqcvKD33bG +++ mktemp ++ local LAST_ERR=/tmp/tmp.RmtWBEh7Sd ++ local exit_status=0 +++ seq 0 2 ++ for i in '$(seq 0 2)' ++ set +e ++ kubectl get pxc some-name -o 'jsonpath={.status.state}' ++ exit_status=0 ++ set -e ++ '[' 0 '!=' 0 ']' ++ break ++ cat /tmp/tmp.BqcvKD33bG ++ cat /tmp/tmp.RmtWBEh7Sd ++ rm /tmp/tmp.BqcvKD33bG /tmp/tmp.RmtWBEh7Sd ++ return 0 + [[ initializing == \r\e\a\d\y ]] + echo 'waiting for cluster readyness' waiting for cluster readyness + sleep 20 + [[ 0 -ge 36 ]] + let i+=1 ++ kubectl_bin get pxc some-name -o 'jsonpath={.status.state}' +++ mktemp ++ local LAST_OUT=/tmp/tmp.PDmeF1sNc0 +++ mktemp ++ local LAST_ERR=/tmp/tmp.1f8uFLgL8J ++ local exit_status=0 +++ seq 0 2 ++ for i in '$(seq 0 2)' ++ set +e ++ kubectl get pxc some-name -o 'jsonpath={.status.state}' ++ exit_status=0 ++ set -e ++ '[' 0 '!=' 0 ']' ++ break ++ cat /tmp/tmp.PDmeF1sNc0 ++ cat /tmp/tmp.1f8uFLgL8J ++ rm /tmp/tmp.PDmeF1sNc0 /tmp/tmp.1f8uFLgL8J ++ return 0 + [[ ready == \r\e\a\d\y ]] ++ kubectl_bin get pxc some-name -o 'jsonpath={.status.pxc.ready}' +++ mktemp ++ local LAST_OUT=/tmp/tmp.QNpN3cTVhT +++ mktemp ++ local LAST_ERR=/tmp/tmp.dBnC27Z2Tv ++ local exit_status=0 +++ seq 0 2 ++ for i in '$(seq 0 2)' ++ set +e ++ kubectl get pxc some-name -o 'jsonpath={.status.pxc.ready}' ++ exit_status=0 ++ set -e ++ '[' 0 '!=' 0 ']' ++ break ++ cat /tmp/tmp.QNpN3cTVhT ++ cat /tmp/tmp.dBnC27Z2Tv ++ rm /tmp/tmp.QNpN3cTVhT /tmp/tmp.dBnC27Z2Tv ++ return 0 + [[ 3 == \3 ]] +++ get_proxy_engine some-name +++ local cluster_name=some-name ++++ get_proxy some-name ++++ local target_cluster=some-name +++++ kubectl_bin get pxc some-name -o 'jsonpath={.spec.haproxy.enabled}' ++++++ mktemp +++++ local LAST_OUT=/tmp/tmp.dNtDZXf1a1 ++++++ mktemp +++++ local LAST_ERR=/tmp/tmp.IyOB3KjOVV +++++ local exit_status=0 ++++++ seq 0 2 +++++ for i in '$(seq 0 2)' +++++ set +e +++++ kubectl get pxc some-name -o 'jsonpath={.spec.haproxy.enabled}' +++++ exit_status=0 +++++ set -e +++++ '[' 0 '!=' 0 ']' +++++ break +++++ cat /tmp/tmp.dNtDZXf1a1 +++++ cat /tmp/tmp.IyOB3KjOVV +++++ rm /tmp/tmp.dNtDZXf1a1 /tmp/tmp.IyOB3KjOVV +++++ return 0 ++++ [[ '' == \t\r\u\e ]] +++++ kubectl_bin get pxc some-name -o 'jsonpath={.spec.proxysql.enabled}' ++++++ mktemp +++++ local LAST_OUT=/tmp/tmp.BjXYwsLM5F ++++++ mktemp +++++ local LAST_ERR=/tmp/tmp.0wxqDlggtN +++++ local exit_status=0 ++++++ seq 0 2 +++++ for i in '$(seq 0 2)' +++++ set +e +++++ kubectl get pxc some-name -o 'jsonpath={.spec.proxysql.enabled}' +++++ exit_status=0 +++++ set -e +++++ '[' 0 '!=' 0 ']' +++++ break +++++ cat /tmp/tmp.BjXYwsLM5F +++++ cat /tmp/tmp.0wxqDlggtN +++++ rm /tmp/tmp.BjXYwsLM5F /tmp/tmp.0wxqDlggtN +++++ return 0 ++++ [[ true == \t\r\u\e ]] ++++ echo some-name-proxysql ++++ return +++ local cluster_proxy=some-name-proxysql +++ echo proxysql ++ kubectl_bin get pxc some-name -o 'jsonpath={.status.proxysql.ready}' +++ mktemp ++ local LAST_OUT=/tmp/tmp.bl50okPTiP +++ mktemp ++ local LAST_ERR=/tmp/tmp.sp7lFZNzuv ++ local exit_status=0 +++ seq 0 2 ++ for i in '$(seq 0 2)' ++ set +e ++ kubectl get pxc some-name -o 'jsonpath={.status.proxysql.ready}' ++ exit_status=0 ++ set -e ++ '[' 0 '!=' 0 ']' ++ break ++ cat /tmp/tmp.bl50okPTiP ++ cat /tmp/tmp.sp7lFZNzuv ++ rm /tmp/tmp.bl50okPTiP /tmp/tmp.sp7lFZNzuv ++ return 0 + [[ 2 == \2 ]] + sleep 10 + compare_mysql_cmd select-4 'SHOW TABLES;' '-h some-name-proxysql -uoperator -p'\''test-password'\''' + local command_id=select-4 + local 'command=SHOW TABLES;' + local 'uri=-h some-name-proxysql -uoperator -p'\''test-password'\''' + local postfix= + local expected_result=/mnt/jenkins/workspace/cloud-pxc-operator_PR-1730/e2e-tests/users/compare/select-4.sql + [[ perconalab/percona-xtradb-cluster-operator:main-pxc8.0 =~ 8\.0 ]] + '[' -f /mnt/jenkins/workspace/cloud-pxc-operator_PR-1730/e2e-tests/users/compare/select-4-80.sql ']' + expected_result=/mnt/jenkins/workspace/cloud-pxc-operator_PR-1730/e2e-tests/users/compare/select-4-80.sql + run_mysql 'SHOW TABLES;' '-h some-name-proxysql -uoperator -p'\''test-password'\''' + local 'command=SHOW TABLES;' + local 'uri=-h some-name-proxysql -uoperator -p'\''test-password'\''' ++ get_client_pod ++ kubectl_bin get pods --selector=name=pxc-client -o 'jsonpath={.items[].metadata.name}' +++ mktemp ++ local LAST_OUT=/tmp/tmp.TSyWmFfiMM +++ mktemp ++ local LAST_ERR=/tmp/tmp.fu9jGHj8PT ++ local exit_status=0 +++ seq 0 2 ++ for i in '$(seq 0 2)' ++ set +e ++ kubectl get pods --selector=name=pxc-client -o 'jsonpath={.items[].metadata.name}' ++ exit_status=0 ++ set -e ++ '[' 0 '!=' 0 ']' ++ break ++ cat /tmp/tmp.TSyWmFfiMM ++ cat /tmp/tmp.fu9jGHj8PT ++ rm /tmp/tmp.TSyWmFfiMM /tmp/tmp.fu9jGHj8PT ++ return 0 + client_pod=pxc-client-6644d8898f-z827r + wait_pod pxc-client-6644d8898f-z827r + local pod=pxc-client-6644d8898f-z827r + local max_retry=480 + local ns= ++ echo pxc-client-6644d8898f-z827r ++ /usr/bin/sed -E 's/.*-(pxc|proxysql)-[0-9]/\1/' ++ egrep '^(pxc|proxysql)$' + local container= + set +o xtrace pod/pxc-client-6644d8898f-z827r condition met pxc-client-6644d8898f-z827r.Ok + set +o xtrace + '[' '!' -s /tmp/tmp.ShPTGmQQOH/select-4.sql ']' + diff -u /mnt/jenkins/workspace/cloud-pxc-operator_PR-1730/e2e-tests/users/compare/select-4-80.sql /tmp/tmp.ShPTGmQQOH/select-4.sql + desc 'change secret name' + set +o xtrace ----------------------------------------------------------------------------------- change secret name ----------------------------------------------------------------------------------- + kubectl_bin patch pxc some-name --type merge --patch '{"spec": {"secretsName":"my-cluster-secrets-2"}}' ++ mktemp + local LAST_OUT=/tmp/tmp.Za7cR9eTTx ++ mktemp + local LAST_ERR=/tmp/tmp.HiT1T5REeq + local exit_status=0 ++ seq 0 2 + for i in '$(seq 0 2)' + set +e + kubectl patch pxc some-name --type merge --patch '{"spec": {"secretsName":"my-cluster-secrets-2"}}' + exit_status=0 + set -e + '[' 0 '!=' 0 ']' + break + cat /tmp/tmp.Za7cR9eTTx perconaxtradbcluster.pxc.percona.com/some-name patched + cat /tmp/tmp.HiT1T5REeq + rm /tmp/tmp.Za7cR9eTTx /tmp/tmp.HiT1T5REeq + return 0 + sleep 30 + wait_cluster_consistency some-name 3 2 + local cluster_name=some-name + local cluster_size=3 + local proxy_size=2 + '[' -z 2 ']' + desc 'wait cluster consistency' + set +o xtrace ----------------------------------------------------------------------------------- wait cluster consistency ----------------------------------------------------------------------------------- + local i=0 + local max=36 + sleep 7 ++ kubectl_bin get pxc some-name -o 'jsonpath={.status.state}' +++ mktemp ++ local LAST_OUT=/tmp/tmp.eQPS1t947n +++ mktemp ++ local LAST_ERR=/tmp/tmp.Mf526u8rcM ++ local exit_status=0 +++ seq 0 2 ++ for i in '$(seq 0 2)' ++ set +e ++ kubectl get pxc some-name -o 'jsonpath={.status.state}' ++ exit_status=0 ++ set -e ++ '[' 0 '!=' 0 ']' ++ break ++ cat /tmp/tmp.eQPS1t947n ++ cat /tmp/tmp.Mf526u8rcM ++ rm /tmp/tmp.eQPS1t947n /tmp/tmp.Mf526u8rcM ++ return 0 + [[ initializing == \r\e\a\d\y ]] + echo 'waiting for cluster readyness' waiting for cluster readyness + sleep 20 + [[ 0 -ge 36 ]] + let i+=1 ++ kubectl_bin get pxc some-name -o 'jsonpath={.status.state}' +++ mktemp ++ local LAST_OUT=/tmp/tmp.QUuhETC8XG +++ mktemp ++ local LAST_ERR=/tmp/tmp.zh34ucSTUX ++ local exit_status=0 +++ seq 0 2 ++ for i in '$(seq 0 2)' ++ set +e ++ kubectl get pxc some-name -o 'jsonpath={.status.state}' ++ exit_status=0 ++ set -e ++ '[' 0 '!=' 0 ']' ++ break ++ cat /tmp/tmp.QUuhETC8XG ++ cat /tmp/tmp.zh34ucSTUX ++ rm /tmp/tmp.QUuhETC8XG /tmp/tmp.zh34ucSTUX ++ return 0 + [[ initializing == \r\e\a\d\y ]] + echo 'waiting for cluster readyness' waiting for cluster readyness + sleep 20 + [[ 1 -ge 36 ]] + let i+=1 ++ kubectl_bin get pxc some-name -o 'jsonpath={.status.state}' +++ mktemp ++ local LAST_OUT=/tmp/tmp.tVfWOmTJeL +++ mktemp ++ local LAST_ERR=/tmp/tmp.VmhgbHXZJG ++ local exit_status=0 +++ seq 0 2 ++ for i in '$(seq 0 2)' ++ set +e ++ kubectl get pxc some-name -o 'jsonpath={.status.state}' ++ exit_status=0 ++ set -e ++ '[' 0 '!=' 0 ']' ++ break ++ cat /tmp/tmp.tVfWOmTJeL ++ cat /tmp/tmp.VmhgbHXZJG ++ rm /tmp/tmp.tVfWOmTJeL /tmp/tmp.VmhgbHXZJG ++ return 0 + [[ initializing == \r\e\a\d\y ]] + echo 'waiting for cluster readyness' waiting for cluster readyness + sleep 20 + [[ 2 -ge 36 ]] + let i+=1 ++ kubectl_bin get pxc some-name -o 'jsonpath={.status.state}' +++ mktemp ++ local LAST_OUT=/tmp/tmp.YwB6DtUvFK +++ mktemp ++ local LAST_ERR=/tmp/tmp.f9Ds15lWbt ++ local exit_status=0 +++ seq 0 2 ++ for i in '$(seq 0 2)' ++ set +e ++ kubectl get pxc some-name -o 'jsonpath={.status.state}' ++ exit_status=0 ++ set -e ++ '[' 0 '!=' 0 ']' ++ break ++ cat /tmp/tmp.YwB6DtUvFK ++ cat /tmp/tmp.f9Ds15lWbt ++ rm /tmp/tmp.YwB6DtUvFK /tmp/tmp.f9Ds15lWbt ++ return 0 + [[ initializing == \r\e\a\d\y ]] + echo 'waiting for cluster readyness' waiting for cluster readyness + sleep 20 + [[ 3 -ge 36 ]] + let i+=1 ++ kubectl_bin get pxc some-name -o 'jsonpath={.status.state}' +++ mktemp ++ local LAST_OUT=/tmp/tmp.x5MFhykbSF +++ mktemp ++ local LAST_ERR=/tmp/tmp.IV1kxwLMIM ++ local exit_status=0 +++ seq 0 2 ++ for i in '$(seq 0 2)' ++ set +e ++ kubectl get pxc some-name -o 'jsonpath={.status.state}' ++ exit_status=0 ++ set -e ++ '[' 0 '!=' 0 ']' ++ break ++ cat /tmp/tmp.x5MFhykbSF ++ cat /tmp/tmp.IV1kxwLMIM ++ rm /tmp/tmp.x5MFhykbSF /tmp/tmp.IV1kxwLMIM ++ return 0 + [[ initializing == \r\e\a\d\y ]] + echo 'waiting for cluster readyness' waiting for cluster readyness + sleep 20 + [[ 4 -ge 36 ]] + let i+=1 ++ kubectl_bin get pxc some-name -o 'jsonpath={.status.state}' +++ mktemp ++ local LAST_OUT=/tmp/tmp.vfmmhGQ2Ty +++ mktemp ++ local LAST_ERR=/tmp/tmp.3qMyl1UmnK ++ local exit_status=0 +++ seq 0 2 ++ for i in '$(seq 0 2)' ++ set +e ++ kubectl get pxc some-name -o 'jsonpath={.status.state}' ++ exit_status=0 ++ set -e ++ '[' 0 '!=' 0 ']' ++ break ++ cat /tmp/tmp.vfmmhGQ2Ty ++ cat /tmp/tmp.3qMyl1UmnK ++ rm /tmp/tmp.vfmmhGQ2Ty /tmp/tmp.3qMyl1UmnK ++ return 0 + [[ initializing == \r\e\a\d\y ]] + echo 'waiting for cluster readyness' waiting for cluster readyness + sleep 20 + [[ 5 -ge 36 ]] + let i+=1 ++ kubectl_bin get pxc some-name -o 'jsonpath={.status.state}' +++ mktemp ++ local LAST_OUT=/tmp/tmp.isrETNZBrz +++ mktemp ++ local LAST_ERR=/tmp/tmp.yEm0h5v1kW ++ local exit_status=0 +++ seq 0 2 ++ for i in '$(seq 0 2)' ++ set +e ++ kubectl get pxc some-name -o 'jsonpath={.status.state}' ++ exit_status=0 ++ set -e ++ '[' 0 '!=' 0 ']' ++ break ++ cat /tmp/tmp.isrETNZBrz ++ cat /tmp/tmp.yEm0h5v1kW ++ rm /tmp/tmp.isrETNZBrz /tmp/tmp.yEm0h5v1kW ++ return 0 + [[ ready == \r\e\a\d\y ]] ++ kubectl_bin get pxc some-name -o 'jsonpath={.status.pxc.ready}' +++ mktemp ++ local LAST_OUT=/tmp/tmp.k5RY2UQGX3 +++ mktemp ++ local LAST_ERR=/tmp/tmp.ycJQEPyoWQ ++ local exit_status=0 +++ seq 0 2 ++ for i in '$(seq 0 2)' ++ set +e ++ kubectl get pxc some-name -o 'jsonpath={.status.pxc.ready}' ++ exit_status=0 ++ set -e ++ '[' 0 '!=' 0 ']' ++ break ++ cat /tmp/tmp.k5RY2UQGX3 ++ cat /tmp/tmp.ycJQEPyoWQ ++ rm /tmp/tmp.k5RY2UQGX3 /tmp/tmp.ycJQEPyoWQ ++ return 0 + [[ 3 == \3 ]] +++ get_proxy_engine some-name +++ local cluster_name=some-name ++++ get_proxy some-name ++++ local target_cluster=some-name +++++ kubectl_bin get pxc some-name -o 'jsonpath={.spec.haproxy.enabled}' ++++++ mktemp +++++ local LAST_OUT=/tmp/tmp.6YxTETfQ8e ++++++ mktemp +++++ local LAST_ERR=/tmp/tmp.gTCrWzdiDq +++++ local exit_status=0 ++++++ seq 0 2 +++++ for i in '$(seq 0 2)' +++++ set +e +++++ kubectl get pxc some-name -o 'jsonpath={.spec.haproxy.enabled}' +++++ exit_status=0 +++++ set -e +++++ '[' 0 '!=' 0 ']' +++++ break +++++ cat /tmp/tmp.6YxTETfQ8e +++++ cat /tmp/tmp.gTCrWzdiDq +++++ rm /tmp/tmp.6YxTETfQ8e /tmp/tmp.gTCrWzdiDq +++++ return 0 ++++ [[ '' == \t\r\u\e ]] +++++ kubectl_bin get pxc some-name -o 'jsonpath={.spec.proxysql.enabled}' ++++++ mktemp +++++ local LAST_OUT=/tmp/tmp.jjq83xeIRS ++++++ mktemp +++++ local LAST_ERR=/tmp/tmp.BPxG2HdARa +++++ local exit_status=0 ++++++ seq 0 2 +++++ for i in '$(seq 0 2)' +++++ set +e +++++ kubectl get pxc some-name -o 'jsonpath={.spec.proxysql.enabled}' +++++ exit_status=0 +++++ set -e +++++ '[' 0 '!=' 0 ']' +++++ break +++++ cat /tmp/tmp.jjq83xeIRS +++++ cat /tmp/tmp.BPxG2HdARa +++++ rm /tmp/tmp.jjq83xeIRS /tmp/tmp.BPxG2HdARa +++++ return 0 ++++ [[ true == \t\r\u\e ]] ++++ echo some-name-proxysql ++++ return +++ local cluster_proxy=some-name-proxysql +++ echo proxysql ++ kubectl_bin get pxc some-name -o 'jsonpath={.status.proxysql.ready}' +++ mktemp ++ local LAST_OUT=/tmp/tmp.H9MkkHxiQI +++ mktemp ++ local LAST_ERR=/tmp/tmp.cVvZuZXSPc ++ local exit_status=0 +++ seq 0 2 ++ for i in '$(seq 0 2)' ++ set +e ++ kubectl get pxc some-name -o 'jsonpath={.status.proxysql.ready}' ++ exit_status=0 ++ set -e ++ '[' 0 '!=' 0 ']' ++ break ++ cat /tmp/tmp.H9MkkHxiQI ++ cat /tmp/tmp.cVvZuZXSPc ++ rm /tmp/tmp.H9MkkHxiQI /tmp/tmp.cVvZuZXSPc ++ return 0 + [[ 2 == \2 ]] + desc 'test new operator' + set +o xtrace ----------------------------------------------------------------------------------- test new operator ----------------------------------------------------------------------------------- + newpass=test-password2 ++ echo -n test-password2 ++ base64 + newpassencrypted=dGVzdC1wYXNzd29yZDI= + patch_secret my-cluster-secrets-2 operator dGVzdC1wYXNzd29yZDI= + local secret=my-cluster-secrets-2 + local key=operator + local value=dGVzdC1wYXNzd29yZDI= + kubectl_bin patch secret my-cluster-secrets-2 '-p={"data":{"operator": "dGVzdC1wYXNzd29yZDI="}}' ++ mktemp + local LAST_OUT=/tmp/tmp.xseTuhXyJL ++ mktemp + local LAST_ERR=/tmp/tmp.ffwOGBW6bd + local exit_status=0 ++ seq 0 2 + for i in '$(seq 0 2)' + set +e + kubectl patch secret my-cluster-secrets-2 '-p={"data":{"operator": "dGVzdC1wYXNzd29yZDI="}}' + exit_status=0 + set -e + '[' 0 '!=' 0 ']' + break + cat /tmp/tmp.xseTuhXyJL secret/my-cluster-secrets-2 patched + cat /tmp/tmp.ffwOGBW6bd + rm /tmp/tmp.xseTuhXyJL /tmp/tmp.ffwOGBW6bd + return 0 + sleep 15 + wait_cluster_consistency some-name 3 2 + local cluster_name=some-name + local cluster_size=3 + local proxy_size=2 + '[' -z 2 ']' + desc 'wait cluster consistency' + set +o xtrace ----------------------------------------------------------------------------------- wait cluster consistency ----------------------------------------------------------------------------------- + local i=0 + local max=36 + sleep 7 ++ kubectl_bin get pxc some-name -o 'jsonpath={.status.state}' +++ mktemp ++ local LAST_OUT=/tmp/tmp.ScVrv5w6e3 +++ mktemp ++ local LAST_ERR=/tmp/tmp.XVg4S5DuWT ++ local exit_status=0 +++ seq 0 2 ++ for i in '$(seq 0 2)' ++ set +e ++ kubectl get pxc some-name -o 'jsonpath={.status.state}' ++ exit_status=0 ++ set -e ++ '[' 0 '!=' 0 ']' ++ break ++ cat /tmp/tmp.ScVrv5w6e3 ++ cat /tmp/tmp.XVg4S5DuWT ++ rm /tmp/tmp.ScVrv5w6e3 /tmp/tmp.XVg4S5DuWT ++ return 0 + [[ initializing == \r\e\a\d\y ]] + echo 'waiting for cluster readyness' waiting for cluster readyness + sleep 20 + [[ 0 -ge 36 ]] + let i+=1 ++ kubectl_bin get pxc some-name -o 'jsonpath={.status.state}' +++ mktemp ++ local LAST_OUT=/tmp/tmp.FkEDNGApK9 +++ mktemp ++ local LAST_ERR=/tmp/tmp.gkIquQVLGN ++ local exit_status=0 +++ seq 0 2 ++ for i in '$(seq 0 2)' ++ set +e ++ kubectl get pxc some-name -o 'jsonpath={.status.state}' ++ exit_status=0 ++ set -e ++ '[' 0 '!=' 0 ']' ++ break ++ cat /tmp/tmp.FkEDNGApK9 ++ cat /tmp/tmp.gkIquQVLGN ++ rm /tmp/tmp.FkEDNGApK9 /tmp/tmp.gkIquQVLGN ++ return 0 + [[ ready == \r\e\a\d\y ]] ++ kubectl_bin get pxc some-name -o 'jsonpath={.status.pxc.ready}' +++ mktemp ++ local LAST_OUT=/tmp/tmp.KaQJASVtSo +++ mktemp ++ local LAST_ERR=/tmp/tmp.G18CthwaaT ++ local exit_status=0 +++ seq 0 2 ++ for i in '$(seq 0 2)' ++ set +e ++ kubectl get pxc some-name -o 'jsonpath={.status.pxc.ready}' ++ exit_status=0 ++ set -e ++ '[' 0 '!=' 0 ']' ++ break ++ cat /tmp/tmp.KaQJASVtSo ++ cat /tmp/tmp.G18CthwaaT ++ rm /tmp/tmp.KaQJASVtSo /tmp/tmp.G18CthwaaT ++ return 0 + [[ 3 == \3 ]] +++ get_proxy_engine some-name +++ local cluster_name=some-name ++++ get_proxy some-name ++++ local target_cluster=some-name +++++ kubectl_bin get pxc some-name -o 'jsonpath={.spec.haproxy.enabled}' ++++++ mktemp +++++ local LAST_OUT=/tmp/tmp.pT5sBA76OS ++++++ mktemp +++++ local LAST_ERR=/tmp/tmp.FKikbpnenW +++++ local exit_status=0 ++++++ seq 0 2 +++++ for i in '$(seq 0 2)' +++++ set +e +++++ kubectl get pxc some-name -o 'jsonpath={.spec.haproxy.enabled}' +++++ exit_status=0 +++++ set -e +++++ '[' 0 '!=' 0 ']' +++++ break +++++ cat /tmp/tmp.pT5sBA76OS +++++ cat /tmp/tmp.FKikbpnenW +++++ rm /tmp/tmp.pT5sBA76OS /tmp/tmp.FKikbpnenW +++++ return 0 ++++ [[ '' == \t\r\u\e ]] +++++ kubectl_bin get pxc some-name -o 'jsonpath={.spec.proxysql.enabled}' ++++++ mktemp +++++ local LAST_OUT=/tmp/tmp.gvlsJLHIeJ ++++++ mktemp +++++ local LAST_ERR=/tmp/tmp.MnIKrw2DZM +++++ local exit_status=0 ++++++ seq 0 2 +++++ for i in '$(seq 0 2)' +++++ set +e +++++ kubectl get pxc some-name -o 'jsonpath={.spec.proxysql.enabled}' +++++ exit_status=0 +++++ set -e +++++ '[' 0 '!=' 0 ']' +++++ break +++++ cat /tmp/tmp.gvlsJLHIeJ +++++ cat /tmp/tmp.MnIKrw2DZM +++++ rm /tmp/tmp.gvlsJLHIeJ /tmp/tmp.MnIKrw2DZM +++++ return 0 ++++ [[ true == \t\r\u\e ]] ++++ echo some-name-proxysql ++++ return +++ local cluster_proxy=some-name-proxysql +++ echo proxysql ++ kubectl_bin get pxc some-name -o 'jsonpath={.status.proxysql.ready}' +++ mktemp ++ local LAST_OUT=/tmp/tmp.bPY467rXC4 +++ mktemp ++ local LAST_ERR=/tmp/tmp.XWR2obNw9m ++ local exit_status=0 +++ seq 0 2 ++ for i in '$(seq 0 2)' ++ set +e ++ kubectl get pxc some-name -o 'jsonpath={.status.proxysql.ready}' ++ exit_status=0 ++ set -e ++ '[' 0 '!=' 0 ']' ++ break ++ cat /tmp/tmp.bPY467rXC4 ++ cat /tmp/tmp.XWR2obNw9m ++ rm /tmp/tmp.bPY467rXC4 /tmp/tmp.XWR2obNw9m ++ return 0 + [[ 2 == \2 ]] + sleep 20 + compare_mysql_cmd select-4 'SHOW TABLES;' '-h some-name-proxysql -uoperator -p'\''test-password2'\''' + local command_id=select-4 + local 'command=SHOW TABLES;' + local 'uri=-h some-name-proxysql -uoperator -p'\''test-password2'\''' + local postfix= + local expected_result=/mnt/jenkins/workspace/cloud-pxc-operator_PR-1730/e2e-tests/users/compare/select-4.sql + [[ perconalab/percona-xtradb-cluster-operator:main-pxc8.0 =~ 8\.0 ]] + '[' -f /mnt/jenkins/workspace/cloud-pxc-operator_PR-1730/e2e-tests/users/compare/select-4-80.sql ']' + expected_result=/mnt/jenkins/workspace/cloud-pxc-operator_PR-1730/e2e-tests/users/compare/select-4-80.sql + run_mysql 'SHOW TABLES;' '-h some-name-proxysql -uoperator -p'\''test-password2'\''' + local 'command=SHOW TABLES;' + local 'uri=-h some-name-proxysql -uoperator -p'\''test-password2'\''' ++ get_client_pod ++ kubectl_bin get pods --selector=name=pxc-client -o 'jsonpath={.items[].metadata.name}' +++ mktemp ++ local LAST_OUT=/tmp/tmp.F6ZKAx0L2I +++ mktemp ++ local LAST_ERR=/tmp/tmp.qDubXqyTxB ++ local exit_status=0 +++ seq 0 2 ++ for i in '$(seq 0 2)' ++ set +e ++ kubectl get pods --selector=name=pxc-client -o 'jsonpath={.items[].metadata.name}' ++ exit_status=0 ++ set -e ++ '[' 0 '!=' 0 ']' ++ break ++ cat /tmp/tmp.F6ZKAx0L2I ++ cat /tmp/tmp.qDubXqyTxB ++ rm /tmp/tmp.F6ZKAx0L2I /tmp/tmp.qDubXqyTxB ++ return 0 + client_pod=pxc-client-6644d8898f-z827r + wait_pod pxc-client-6644d8898f-z827r + local pod=pxc-client-6644d8898f-z827r + local max_retry=480 + local ns= ++ echo pxc-client-6644d8898f-z827r ++ /usr/bin/sed -E 's/.*-(pxc|proxysql)-[0-9]/\1/' ++ egrep '^(pxc|proxysql)$' + local container= + set +o xtrace pod/pxc-client-6644d8898f-z827r condition met pxc-client-6644d8898f-z827r.Ok + set +o xtrace + '[' '!' -s /tmp/tmp.ShPTGmQQOH/select-4.sql ']' + diff -u /mnt/jenkins/workspace/cloud-pxc-operator_PR-1730/e2e-tests/users/compare/select-4-80.sql /tmp/tmp.ShPTGmQQOH/select-4.sql ++ getSecretData my-cluster-secrets-2 root ++ local secretName=my-cluster-secrets-2 ++ local dataKey=root ++ kubectl_bin get secrets/my-cluster-secrets-2 '--template={{.data.root}}' ++ base64 --decode +++ mktemp ++ local LAST_OUT=/tmp/tmp.DFA4wf6tAF +++ mktemp ++ local LAST_ERR=/tmp/tmp.jdftxEqDqW ++ local exit_status=0 +++ seq 0 2 ++ for i in '$(seq 0 2)' ++ set +e ++ kubectl get secrets/my-cluster-secrets-2 '--template={{.data.root}}' ++ exit_status=0 ++ set -e ++ '[' 0 '!=' 0 ']' ++ break ++ cat /tmp/tmp.DFA4wf6tAF ++ cat /tmp/tmp.jdftxEqDqW ++ rm /tmp/tmp.DFA4wf6tAF /tmp/tmp.jdftxEqDqW ++ return 0 + newpass=')]*wccFDo>[QIpX{s%)' + desc 'test new users sync' + set +o xtrace ----------------------------------------------------------------------------------- test new users sync ----------------------------------------------------------------------------------- + run_mysql 'CREATE USER '\''testsync'\''@'\''%'\'' IDENTIFIED BY '\'')]*wccFDo>[QIpX{s%)'\'';' '-h some-name-pxc -uroot -p'\'')]*wccFDo>[QIpX{s%)'\''' + local 'command=CREATE USER '\''testsync'\''@'\''%'\'' IDENTIFIED BY '\'')]*wccFDo>[QIpX{s%)'\'';' + local 'uri=-h some-name-pxc -uroot -p'\'')]*wccFDo>[QIpX{s%)'\''' ++ get_client_pod ++ kubectl_bin get pods --selector=name=pxc-client -o 'jsonpath={.items[].metadata.name}' +++ mktemp ++ local LAST_OUT=/tmp/tmp.VxOxfURwjq +++ mktemp ++ local LAST_ERR=/tmp/tmp.pRJMezgqV9 ++ local exit_status=0 +++ seq 0 2 ++ for i in '$(seq 0 2)' ++ set +e ++ kubectl get pods --selector=name=pxc-client -o 'jsonpath={.items[].metadata.name}' ++ exit_status=0 ++ set -e ++ '[' 0 '!=' 0 ']' ++ break ++ cat /tmp/tmp.VxOxfURwjq ++ cat /tmp/tmp.pRJMezgqV9 ++ rm /tmp/tmp.VxOxfURwjq /tmp/tmp.pRJMezgqV9 ++ return 0 + client_pod=pxc-client-6644d8898f-z827r + wait_pod pxc-client-6644d8898f-z827r + local pod=pxc-client-6644d8898f-z827r + local max_retry=480 + local ns= ++ echo pxc-client-6644d8898f-z827r ++ /usr/bin/sed -E 's/.*-(pxc|proxysql)-[0-9]/\1/' ++ egrep '^(pxc|proxysql)$' + local container= + set +o xtrace pod/pxc-client-6644d8898f-z827r condition met pxc-client-6644d8898f-z827r.Ok + set +o xtrace + sleep 40 + compare_mysql_cmd select-4 'SHOW TABLES;' '-h some-name-proxysql -utestsync -p'\'')]*wccFDo>[QIpX{s%)'\''' + local command_id=select-4 + local 'command=SHOW TABLES;' + local 'uri=-h some-name-proxysql -utestsync -p'\'')]*wccFDo>[QIpX{s%)'\''' + local postfix= + local expected_result=/mnt/jenkins/workspace/cloud-pxc-operator_PR-1730/e2e-tests/users/compare/select-4.sql + [[ perconalab/percona-xtradb-cluster-operator:main-pxc8.0 =~ 8\.0 ]] + '[' -f /mnt/jenkins/workspace/cloud-pxc-operator_PR-1730/e2e-tests/users/compare/select-4-80.sql ']' + expected_result=/mnt/jenkins/workspace/cloud-pxc-operator_PR-1730/e2e-tests/users/compare/select-4-80.sql + run_mysql 'SHOW TABLES;' '-h some-name-proxysql -utestsync -p'\'')]*wccFDo>[QIpX{s%)'\''' + local 'command=SHOW TABLES;' + local 'uri=-h some-name-proxysql -utestsync -p'\'')]*wccFDo>[QIpX{s%)'\''' ++ get_client_pod ++ kubectl_bin get pods --selector=name=pxc-client -o 'jsonpath={.items[].metadata.name}' +++ mktemp ++ local LAST_OUT=/tmp/tmp.9BakpV1LFy +++ mktemp ++ local LAST_ERR=/tmp/tmp.LMQy3LKWvQ ++ local exit_status=0 +++ seq 0 2 ++ for i in '$(seq 0 2)' ++ set +e ++ kubectl get pods --selector=name=pxc-client -o 'jsonpath={.items[].metadata.name}' ++ exit_status=0 ++ set -e ++ '[' 0 '!=' 0 ']' ++ break ++ cat /tmp/tmp.9BakpV1LFy ++ cat /tmp/tmp.LMQy3LKWvQ ++ rm /tmp/tmp.9BakpV1LFy /tmp/tmp.LMQy3LKWvQ ++ return 0 + client_pod=pxc-client-6644d8898f-z827r + wait_pod pxc-client-6644d8898f-z827r + local pod=pxc-client-6644d8898f-z827r + local max_retry=480 + local ns= ++ echo pxc-client-6644d8898f-z827r ++ /usr/bin/sed -E 's/.*-(pxc|proxysql)-[0-9]/\1/' ++ egrep '^(pxc|proxysql)$' + local container= + set +o xtrace pod/pxc-client-6644d8898f-z827r condition met pxc-client-6644d8898f-z827r.Ok + set +o xtrace + '[' '!' -s /tmp/tmp.ShPTGmQQOH/select-4.sql ']' + diff -u /mnt/jenkins/workspace/cloud-pxc-operator_PR-1730/e2e-tests/users/compare/select-4-80.sql /tmp/tmp.ShPTGmQQOH/select-4.sql ++ getSecretData internal-some-name operator ++ local secretName=internal-some-name ++ local dataKey=operator ++ kubectl_bin get secrets/internal-some-name '--template={{.data.operator}}' ++ base64 --decode +++ mktemp ++ local LAST_OUT=/tmp/tmp.DV08AYYu8q +++ mktemp ++ local LAST_ERR=/tmp/tmp.GFed9ohLbl ++ local exit_status=0 +++ seq 0 2 ++ for i in '$(seq 0 2)' ++ set +e ++ kubectl get secrets/internal-some-name '--template={{.data.operator}}' ++ exit_status=0 ++ set -e ++ '[' 0 '!=' 0 ']' ++ break ++ cat /tmp/tmp.DV08AYYu8q ++ cat /tmp/tmp.GFed9ohLbl ++ rm /tmp/tmp.DV08AYYu8q /tmp/tmp.GFed9ohLbl ++ return 0 + pass=test-password2 + desc 'check secret without operator' + set +o xtrace ----------------------------------------------------------------------------------- check secret without operator ----------------------------------------------------------------------------------- + kubectl_bin apply -f /mnt/jenkins/workspace/cloud-pxc-operator_PR-1730/e2e-tests/users/conf/secrets.yml ++ mktemp + local LAST_OUT=/tmp/tmp.sFXwh9erZp ++ mktemp + local LAST_ERR=/tmp/tmp.ZweUIVjHjK + local exit_status=0 ++ seq 0 2 + for i in '$(seq 0 2)' + set +e + kubectl apply -f /mnt/jenkins/workspace/cloud-pxc-operator_PR-1730/e2e-tests/users/conf/secrets.yml + exit_status=0 + set -e + '[' 0 '!=' 0 ']' + break + cat /tmp/tmp.sFXwh9erZp secret/my-cluster-secrets-2 configured + cat /tmp/tmp.ZweUIVjHjK Warning: resource secrets/my-cluster-secrets-2 is missing the kubectl.kubernetes.io/last-applied-configuration annotation which is required by kubectl apply. kubectl apply should only be used on resources created declaratively by either kubectl create --save-config or kubectl apply. The missing annotation will be patched automatically. + rm /tmp/tmp.sFXwh9erZp /tmp/tmp.ZweUIVjHjK + return 0 + sleep 15 + compare_mysql_cmd select-4 'SHOW TABLES;' '-h some-name-proxysql -uoperator -p'\''test-password2'\''' + local command_id=select-4 + local 'command=SHOW TABLES;' + local 'uri=-h some-name-proxysql -uoperator -p'\''test-password2'\''' + local postfix= + local expected_result=/mnt/jenkins/workspace/cloud-pxc-operator_PR-1730/e2e-tests/users/compare/select-4.sql + [[ perconalab/percona-xtradb-cluster-operator:main-pxc8.0 =~ 8\.0 ]] + '[' -f /mnt/jenkins/workspace/cloud-pxc-operator_PR-1730/e2e-tests/users/compare/select-4-80.sql ']' + expected_result=/mnt/jenkins/workspace/cloud-pxc-operator_PR-1730/e2e-tests/users/compare/select-4-80.sql + run_mysql 'SHOW TABLES;' '-h some-name-proxysql -uoperator -p'\''test-password2'\''' + local 'command=SHOW TABLES;' + local 'uri=-h some-name-proxysql -uoperator -p'\''test-password2'\''' ++ get_client_pod ++ kubectl_bin get pods --selector=name=pxc-client -o 'jsonpath={.items[].metadata.name}' +++ mktemp ++ local LAST_OUT=/tmp/tmp.8UHrlq6VUw +++ mktemp ++ local LAST_ERR=/tmp/tmp.8oPCyA2Ybf ++ local exit_status=0 +++ seq 0 2 ++ for i in '$(seq 0 2)' ++ set +e ++ kubectl get pods --selector=name=pxc-client -o 'jsonpath={.items[].metadata.name}' ++ exit_status=0 ++ set -e ++ '[' 0 '!=' 0 ']' ++ break ++ cat /tmp/tmp.8UHrlq6VUw ++ cat /tmp/tmp.8oPCyA2Ybf ++ rm /tmp/tmp.8UHrlq6VUw /tmp/tmp.8oPCyA2Ybf ++ return 0 + client_pod=pxc-client-6644d8898f-z827r + wait_pod pxc-client-6644d8898f-z827r + local pod=pxc-client-6644d8898f-z827r + local max_retry=480 + local ns= ++ echo pxc-client-6644d8898f-z827r ++ /usr/bin/sed -E 's/.*-(pxc|proxysql)-[0-9]/\1/' ++ egrep '^(pxc|proxysql)$' + local container= + set +o xtrace pod/pxc-client-6644d8898f-z827r condition met pxc-client-6644d8898f-z827r.Ok + set +o xtrace + '[' '!' -s /tmp/tmp.ShPTGmQQOH/select-4.sql ']' + diff -u /mnt/jenkins/workspace/cloud-pxc-operator_PR-1730/e2e-tests/users/compare/select-4-80.sql /tmp/tmp.ShPTGmQQOH/select-4.sql + newpass=test-password2 ++ echo -n test-password2 ++ base64 + newpassencrypted=dGVzdC1wYXNzd29yZDI= + apply_config /mnt/jenkins/workspace/cloud-pxc-operator_PR-1730/e2e-tests/users/conf/some-name.yml + '[' -z '' ']' + cat_config /mnt/jenkins/workspace/cloud-pxc-operator_PR-1730/e2e-tests/users/conf/some-name.yml + kubectl_bin apply -f - + cat /mnt/jenkins/workspace/cloud-pxc-operator_PR-1730/e2e-tests/users/conf/some-name.yml ++ mktemp + /usr/bin/sed -e 's#apiVersion: pxc.percona.com/v.*$#apiVersion: pxc.percona.com/v1#' + /usr/bin/sed -e 's#image:.*\/percona-xtradb-cluster:.*$#image: perconalab/percona-xtradb-cluster-operator:main-pxc8.0#' + /usr/bin/sed -e 's#image:.*-init$#image: perconalab/percona-xtradb-cluster-operator:PR-1730-852bae96#' + /usr/bin/sed -e 's#image:.*-pxc\([0-9]*.[0-9]*\)\{0,1\}$#image: perconalab/percona-xtradb-cluster-operator:main-pxc8.0#' + /usr/bin/sed -e 's#image:.*-backup$#image: perconalab/percona-xtradb-cluster-operator:main-pxc8.0-backup#' + local LAST_OUT=/tmp/tmp.4pwFU1QjL4 + /usr/bin/sed -e 's#image:.*-proxysql$#image: perconalab/percona-xtradb-cluster-operator:main-proxysql#' + /usr/bin/sed -e 's#image:.*-haproxy$#image: perconalab/percona-xtradb-cluster-operator:main-haproxy#' + /usr/bin/sed -e 's#image:.*-pmm$#image: perconalab/pmm-client:dev-latest#' ++ mktemp + /usr/bin/sed -e 's#image:.*-logcollector$#image: perconalab/percona-xtradb-cluster-operator:main-logcollector#' + local LAST_ERR=/tmp/tmp.6fUeBOzQ0H + local exit_status=0 + /usr/bin/sed -e s~minio-service.#namespace~minio-service.users-18742~ + /usr/bin/sed -e 's#apply:.*#apply: Never#' ++ seq 0 2 + for i in '$(seq 0 2)' + set +e + kubectl apply -f - + exit_status=0 + set -e + '[' 0 '!=' 0 ']' + break + cat /tmp/tmp.4pwFU1QjL4 perconaxtradbcluster.pxc.percona.com/some-name configured + cat /tmp/tmp.6fUeBOzQ0H + rm /tmp/tmp.4pwFU1QjL4 /tmp/tmp.6fUeBOzQ0H + return 0 + sleep 15 + wait_cluster_consistency some-name 3 3 + local cluster_name=some-name + local cluster_size=3 + local proxy_size=3 + '[' -z 3 ']' + desc 'wait cluster consistency' + set +o xtrace ----------------------------------------------------------------------------------- wait cluster consistency ----------------------------------------------------------------------------------- + local i=0 + local max=36 + sleep 7 ++ kubectl_bin get pxc some-name -o 'jsonpath={.status.state}' +++ mktemp ++ local LAST_OUT=/tmp/tmp.aERscgCTLc +++ mktemp ++ local LAST_ERR=/tmp/tmp.cglFSIsbKP ++ local exit_status=0 +++ seq 0 2 ++ for i in '$(seq 0 2)' ++ set +e ++ kubectl get pxc some-name -o 'jsonpath={.status.state}' ++ exit_status=0 ++ set -e ++ '[' 0 '!=' 0 ']' ++ break ++ cat /tmp/tmp.aERscgCTLc ++ cat /tmp/tmp.cglFSIsbKP ++ rm /tmp/tmp.aERscgCTLc /tmp/tmp.cglFSIsbKP ++ return 0 + [[ initializing == \r\e\a\d\y ]] + echo 'waiting for cluster readyness' waiting for cluster readyness + sleep 20 + [[ 0 -ge 36 ]] + let i+=1 ++ kubectl_bin get pxc some-name -o 'jsonpath={.status.state}' +++ mktemp ++ local LAST_OUT=/tmp/tmp.5SbIUFo6YI +++ mktemp ++ local LAST_ERR=/tmp/tmp.SpT9tPTUUD ++ local exit_status=0 +++ seq 0 2 ++ for i in '$(seq 0 2)' ++ set +e ++ kubectl get pxc some-name -o 'jsonpath={.status.state}' ++ exit_status=0 ++ set -e ++ '[' 0 '!=' 0 ']' ++ break ++ cat /tmp/tmp.5SbIUFo6YI ++ cat /tmp/tmp.SpT9tPTUUD ++ rm /tmp/tmp.5SbIUFo6YI /tmp/tmp.SpT9tPTUUD ++ return 0 + [[ initializing == \r\e\a\d\y ]] + echo 'waiting for cluster readyness' waiting for cluster readyness + sleep 20 + [[ 1 -ge 36 ]] + let i+=1 ++ kubectl_bin get pxc some-name -o 'jsonpath={.status.state}' +++ mktemp ++ local LAST_OUT=/tmp/tmp.Lgl2Jxr56m +++ mktemp ++ local LAST_ERR=/tmp/tmp.OTNPzIcom0 ++ local exit_status=0 +++ seq 0 2 ++ for i in '$(seq 0 2)' ++ set +e ++ kubectl get pxc some-name -o 'jsonpath={.status.state}' ++ exit_status=0 ++ set -e ++ '[' 0 '!=' 0 ']' ++ break ++ cat /tmp/tmp.Lgl2Jxr56m ++ cat /tmp/tmp.OTNPzIcom0 ++ rm /tmp/tmp.Lgl2Jxr56m /tmp/tmp.OTNPzIcom0 ++ return 0 + [[ initializing == \r\e\a\d\y ]] + echo 'waiting for cluster readyness' waiting for cluster readyness + sleep 20 + [[ 2 -ge 36 ]] + let i+=1 ++ kubectl_bin get pxc some-name -o 'jsonpath={.status.state}' +++ mktemp ++ local LAST_OUT=/tmp/tmp.DNn7b6qsd2 +++ mktemp ++ local LAST_ERR=/tmp/tmp.61f1AeF8Y1 ++ local exit_status=0 +++ seq 0 2 ++ for i in '$(seq 0 2)' ++ set +e ++ kubectl get pxc some-name -o 'jsonpath={.status.state}' ++ exit_status=0 ++ set -e ++ '[' 0 '!=' 0 ']' ++ break ++ cat /tmp/tmp.DNn7b6qsd2 ++ cat /tmp/tmp.61f1AeF8Y1 ++ rm /tmp/tmp.DNn7b6qsd2 /tmp/tmp.61f1AeF8Y1 ++ return 0 + [[ initializing == \r\e\a\d\y ]] + echo 'waiting for cluster readyness' waiting for cluster readyness + sleep 20 + [[ 3 -ge 36 ]] + let i+=1 ++ kubectl_bin get pxc some-name -o 'jsonpath={.status.state}' +++ mktemp ++ local LAST_OUT=/tmp/tmp.EWzMiekznW +++ mktemp ++ local LAST_ERR=/tmp/tmp.NqJr0nG4KF ++ local exit_status=0 +++ seq 0 2 ++ for i in '$(seq 0 2)' ++ set +e ++ kubectl get pxc some-name -o 'jsonpath={.status.state}' ++ exit_status=0 ++ set -e ++ '[' 0 '!=' 0 ']' ++ break ++ cat /tmp/tmp.EWzMiekznW ++ cat /tmp/tmp.NqJr0nG4KF ++ rm /tmp/tmp.EWzMiekznW /tmp/tmp.NqJr0nG4KF ++ return 0 + [[ initializing == \r\e\a\d\y ]] + echo 'waiting for cluster readyness' waiting for cluster readyness + sleep 20 + [[ 4 -ge 36 ]] + let i+=1 ++ kubectl_bin get pxc some-name -o 'jsonpath={.status.state}' +++ mktemp ++ local LAST_OUT=/tmp/tmp.XrAOz2RegI +++ mktemp ++ local LAST_ERR=/tmp/tmp.olHO3qJcSp ++ local exit_status=0 +++ seq 0 2 ++ for i in '$(seq 0 2)' ++ set +e ++ kubectl get pxc some-name -o 'jsonpath={.status.state}' ++ exit_status=0 ++ set -e ++ '[' 0 '!=' 0 ']' ++ break ++ cat /tmp/tmp.XrAOz2RegI ++ cat /tmp/tmp.olHO3qJcSp ++ rm /tmp/tmp.XrAOz2RegI /tmp/tmp.olHO3qJcSp ++ return 0 + [[ initializing == \r\e\a\d\y ]] + echo 'waiting for cluster readyness' waiting for cluster readyness + sleep 20 + [[ 5 -ge 36 ]] + let i+=1 ++ kubectl_bin get pxc some-name -o 'jsonpath={.status.state}' +++ mktemp ++ local LAST_OUT=/tmp/tmp.VGMNMtIXc5 +++ mktemp ++ local LAST_ERR=/tmp/tmp.llpclzZWIt ++ local exit_status=0 +++ seq 0 2 ++ for i in '$(seq 0 2)' ++ set +e ++ kubectl get pxc some-name -o 'jsonpath={.status.state}' ++ exit_status=0 ++ set -e ++ '[' 0 '!=' 0 ']' ++ break ++ cat /tmp/tmp.VGMNMtIXc5 ++ cat /tmp/tmp.llpclzZWIt ++ rm /tmp/tmp.VGMNMtIXc5 /tmp/tmp.llpclzZWIt ++ return 0 + [[ initializing == \r\e\a\d\y ]] + echo 'waiting for cluster readyness' waiting for cluster readyness + sleep 20 + [[ 6 -ge 36 ]] + let i+=1 ++ kubectl_bin get pxc some-name -o 'jsonpath={.status.state}' +++ mktemp ++ local LAST_OUT=/tmp/tmp.OV2qoMXa8t +++ mktemp ++ local LAST_ERR=/tmp/tmp.Twq2YDoNcA ++ local exit_status=0 +++ seq 0 2 ++ for i in '$(seq 0 2)' ++ set +e ++ kubectl get pxc some-name -o 'jsonpath={.status.state}' ++ exit_status=0 ++ set -e ++ '[' 0 '!=' 0 ']' ++ break ++ cat /tmp/tmp.OV2qoMXa8t ++ cat /tmp/tmp.Twq2YDoNcA ++ rm /tmp/tmp.OV2qoMXa8t /tmp/tmp.Twq2YDoNcA ++ return 0 + [[ initializing == \r\e\a\d\y ]] + echo 'waiting for cluster readyness' waiting for cluster readyness + sleep 20 + [[ 7 -ge 36 ]] + let i+=1 ++ kubectl_bin get pxc some-name -o 'jsonpath={.status.state}' +++ mktemp ++ local LAST_OUT=/tmp/tmp.jRzYfN0omo +++ mktemp ++ local LAST_ERR=/tmp/tmp.wxeVBdKHKS ++ local exit_status=0 +++ seq 0 2 ++ for i in '$(seq 0 2)' ++ set +e ++ kubectl get pxc some-name -o 'jsonpath={.status.state}' ++ exit_status=0 ++ set -e ++ '[' 0 '!=' 0 ']' ++ break ++ cat /tmp/tmp.jRzYfN0omo ++ cat /tmp/tmp.wxeVBdKHKS ++ rm /tmp/tmp.jRzYfN0omo /tmp/tmp.wxeVBdKHKS ++ return 0 + [[ initializing == \r\e\a\d\y ]] + echo 'waiting for cluster readyness' waiting for cluster readyness + sleep 20 + [[ 8 -ge 36 ]] + let i+=1 ++ kubectl_bin get pxc some-name -o 'jsonpath={.status.state}' +++ mktemp ++ local LAST_OUT=/tmp/tmp.9ZVytKNxvl +++ mktemp ++ local LAST_ERR=/tmp/tmp.kMCrfIBHDJ ++ local exit_status=0 +++ seq 0 2 ++ for i in '$(seq 0 2)' ++ set +e ++ kubectl get pxc some-name -o 'jsonpath={.status.state}' ++ exit_status=0 ++ set -e ++ '[' 0 '!=' 0 ']' ++ break ++ cat /tmp/tmp.9ZVytKNxvl ++ cat /tmp/tmp.kMCrfIBHDJ ++ rm /tmp/tmp.9ZVytKNxvl /tmp/tmp.kMCrfIBHDJ ++ return 0 + [[ initializing == \r\e\a\d\y ]] + echo 'waiting for cluster readyness' waiting for cluster readyness + sleep 20 + [[ 9 -ge 36 ]] + let i+=1 ++ kubectl_bin get pxc some-name -o 'jsonpath={.status.state}' +++ mktemp ++ local LAST_OUT=/tmp/tmp.qTFL13C9u4 +++ mktemp ++ local LAST_ERR=/tmp/tmp.cpgJ7QHKvz ++ local exit_status=0 +++ seq 0 2 ++ for i in '$(seq 0 2)' ++ set +e ++ kubectl get pxc some-name -o 'jsonpath={.status.state}' ++ exit_status=0 ++ set -e ++ '[' 0 '!=' 0 ']' ++ break ++ cat /tmp/tmp.qTFL13C9u4 ++ cat /tmp/tmp.cpgJ7QHKvz ++ rm /tmp/tmp.qTFL13C9u4 /tmp/tmp.cpgJ7QHKvz ++ return 0 + [[ error == \r\e\a\d\y ]] + echo 'waiting for cluster readyness' waiting for cluster readyness + sleep 20 + [[ 10 -ge 36 ]] + let i+=1 ++ kubectl_bin get pxc some-name -o 'jsonpath={.status.state}' +++ mktemp ++ local LAST_OUT=/tmp/tmp.sGv49FAjPj +++ mktemp ++ local LAST_ERR=/tmp/tmp.dlbef7XlgX ++ local exit_status=0 +++ seq 0 2 ++ for i in '$(seq 0 2)' ++ set +e ++ kubectl get pxc some-name -o 'jsonpath={.status.state}' ++ exit_status=0 ++ set -e ++ '[' 0 '!=' 0 ']' ++ break ++ cat /tmp/tmp.sGv49FAjPj ++ cat /tmp/tmp.dlbef7XlgX ++ rm /tmp/tmp.sGv49FAjPj /tmp/tmp.dlbef7XlgX ++ return 0 + [[ initializing == \r\e\a\d\y ]] + echo 'waiting for cluster readyness' waiting for cluster readyness + sleep 20 + [[ 11 -ge 36 ]] + let i+=1 ++ kubectl_bin get pxc some-name -o 'jsonpath={.status.state}' +++ mktemp ++ local LAST_OUT=/tmp/tmp.TzC6Y9XqGt +++ mktemp ++ local LAST_ERR=/tmp/tmp.iBg6ZjfrFL ++ local exit_status=0 +++ seq 0 2 ++ for i in '$(seq 0 2)' ++ set +e ++ kubectl get pxc some-name -o 'jsonpath={.status.state}' ++ exit_status=0 ++ set -e ++ '[' 0 '!=' 0 ']' ++ break ++ cat /tmp/tmp.TzC6Y9XqGt ++ cat /tmp/tmp.iBg6ZjfrFL ++ rm /tmp/tmp.TzC6Y9XqGt /tmp/tmp.iBg6ZjfrFL ++ return 0 + [[ initializing == \r\e\a\d\y ]] + echo 'waiting for cluster readyness' waiting for cluster readyness + sleep 20 + [[ 12 -ge 36 ]] + let i+=1 ++ kubectl_bin get pxc some-name -o 'jsonpath={.status.state}' +++ mktemp ++ local LAST_OUT=/tmp/tmp.0k7E0ebNRH +++ mktemp ++ local LAST_ERR=/tmp/tmp.G7w8qKbgPF ++ local exit_status=0 +++ seq 0 2 ++ for i in '$(seq 0 2)' ++ set +e ++ kubectl get pxc some-name -o 'jsonpath={.status.state}' ++ exit_status=0 ++ set -e ++ '[' 0 '!=' 0 ']' ++ break ++ cat /tmp/tmp.0k7E0ebNRH ++ cat /tmp/tmp.G7w8qKbgPF ++ rm /tmp/tmp.0k7E0ebNRH /tmp/tmp.G7w8qKbgPF ++ return 0 + [[ initializing == \r\e\a\d\y ]] + echo 'waiting for cluster readyness' waiting for cluster readyness + sleep 20 + [[ 13 -ge 36 ]] + let i+=1 ++ kubectl_bin get pxc some-name -o 'jsonpath={.status.state}' +++ mktemp ++ local LAST_OUT=/tmp/tmp.ZXqjfTILHD +++ mktemp ++ local LAST_ERR=/tmp/tmp.9OeknYRjND ++ local exit_status=0 +++ seq 0 2 ++ for i in '$(seq 0 2)' ++ set +e ++ kubectl get pxc some-name -o 'jsonpath={.status.state}' ++ exit_status=0 ++ set -e ++ '[' 0 '!=' 0 ']' ++ break ++ cat /tmp/tmp.ZXqjfTILHD ++ cat /tmp/tmp.9OeknYRjND ++ rm /tmp/tmp.ZXqjfTILHD /tmp/tmp.9OeknYRjND ++ return 0 + [[ ready == \r\e\a\d\y ]] ++ kubectl_bin get pxc some-name -o 'jsonpath={.status.pxc.ready}' +++ mktemp ++ local LAST_OUT=/tmp/tmp.nuSFTBbaZE +++ mktemp ++ local LAST_ERR=/tmp/tmp.hr0I5GBLWo ++ local exit_status=0 +++ seq 0 2 ++ for i in '$(seq 0 2)' ++ set +e ++ kubectl get pxc some-name -o 'jsonpath={.status.pxc.ready}' ++ exit_status=0 ++ set -e ++ '[' 0 '!=' 0 ']' ++ break ++ cat /tmp/tmp.nuSFTBbaZE ++ cat /tmp/tmp.hr0I5GBLWo ++ rm /tmp/tmp.nuSFTBbaZE /tmp/tmp.hr0I5GBLWo ++ return 0 + [[ 3 == \3 ]] +++ get_proxy_engine some-name +++ local cluster_name=some-name ++++ get_proxy some-name ++++ local target_cluster=some-name +++++ kubectl_bin get pxc some-name -o 'jsonpath={.spec.haproxy.enabled}' ++++++ mktemp +++++ local LAST_OUT=/tmp/tmp.c06mrvhUBr ++++++ mktemp +++++ local LAST_ERR=/tmp/tmp.mv7dVob3PJ +++++ local exit_status=0 ++++++ seq 0 2 +++++ for i in '$(seq 0 2)' +++++ set +e +++++ kubectl get pxc some-name -o 'jsonpath={.spec.haproxy.enabled}' +++++ exit_status=0 +++++ set -e +++++ '[' 0 '!=' 0 ']' +++++ break +++++ cat /tmp/tmp.c06mrvhUBr +++++ cat /tmp/tmp.mv7dVob3PJ +++++ rm /tmp/tmp.c06mrvhUBr /tmp/tmp.mv7dVob3PJ +++++ return 0 ++++ [[ true == \t\r\u\e ]] ++++ echo some-name-haproxy ++++ return +++ local cluster_proxy=some-name-haproxy +++ echo haproxy ++ kubectl_bin get pxc some-name -o 'jsonpath={.status.haproxy.ready}' +++ mktemp ++ local LAST_OUT=/tmp/tmp.MOl9avHnSA +++ mktemp ++ local LAST_ERR=/tmp/tmp.Bhipv51vBL ++ local exit_status=0 +++ seq 0 2 ++ for i in '$(seq 0 2)' ++ set +e ++ kubectl get pxc some-name -o 'jsonpath={.status.haproxy.ready}' ++ exit_status=0 ++ set -e ++ '[' 0 '!=' 0 ']' ++ break ++ cat /tmp/tmp.MOl9avHnSA ++ cat /tmp/tmp.Bhipv51vBL ++ rm /tmp/tmp.MOl9avHnSA /tmp/tmp.Bhipv51vBL ++ return 0 + [[ 3 == \3 ]] + patch_secret my-cluster-secrets monitor dGVzdC1wYXNzd29yZDI= + local secret=my-cluster-secrets + local key=monitor + local value=dGVzdC1wYXNzd29yZDI= + kubectl_bin patch secret my-cluster-secrets '-p={"data":{"monitor": "dGVzdC1wYXNzd29yZDI="}}' ++ mktemp + local LAST_OUT=/tmp/tmp.fpKGm2fYEC ++ mktemp + local LAST_ERR=/tmp/tmp.RmfajvfGWP + local exit_status=0 ++ seq 0 2 + for i in '$(seq 0 2)' + set +e + kubectl patch secret my-cluster-secrets '-p={"data":{"monitor": "dGVzdC1wYXNzd29yZDI="}}' + exit_status=0 + set -e + '[' 0 '!=' 0 ']' + break + cat /tmp/tmp.fpKGm2fYEC secret/my-cluster-secrets patched + cat /tmp/tmp.RmfajvfGWP + rm /tmp/tmp.fpKGm2fYEC /tmp/tmp.RmfajvfGWP + return 0 + sleep 15 + wait_cluster_consistency some-name 3 3 + local cluster_name=some-name + local cluster_size=3 + local proxy_size=3 + '[' -z 3 ']' + desc 'wait cluster consistency' + set +o xtrace ----------------------------------------------------------------------------------- wait cluster consistency ----------------------------------------------------------------------------------- + local i=0 + local max=36 + sleep 7 ++ kubectl_bin get pxc some-name -o 'jsonpath={.status.state}' +++ mktemp ++ local LAST_OUT=/tmp/tmp.vQrA8DFInE +++ mktemp ++ local LAST_ERR=/tmp/tmp.pwoimBtc3q ++ local exit_status=0 +++ seq 0 2 ++ for i in '$(seq 0 2)' ++ set +e ++ kubectl get pxc some-name -o 'jsonpath={.status.state}' ++ exit_status=0 ++ set -e ++ '[' 0 '!=' 0 ']' ++ break ++ cat /tmp/tmp.vQrA8DFInE ++ cat /tmp/tmp.pwoimBtc3q ++ rm /tmp/tmp.vQrA8DFInE /tmp/tmp.pwoimBtc3q ++ return 0 + [[ initializing == \r\e\a\d\y ]] + echo 'waiting for cluster readyness' waiting for cluster readyness + sleep 20 + [[ 0 -ge 36 ]] + let i+=1 ++ kubectl_bin get pxc some-name -o 'jsonpath={.status.state}' +++ mktemp ++ local LAST_OUT=/tmp/tmp.O0kQlBwghG +++ mktemp ++ local LAST_ERR=/tmp/tmp.LRyrOvdcxz ++ local exit_status=0 +++ seq 0 2 ++ for i in '$(seq 0 2)' ++ set +e ++ kubectl get pxc some-name -o 'jsonpath={.status.state}' ++ exit_status=0 ++ set -e ++ '[' 0 '!=' 0 ']' ++ break ++ cat /tmp/tmp.O0kQlBwghG ++ cat /tmp/tmp.LRyrOvdcxz ++ rm /tmp/tmp.O0kQlBwghG /tmp/tmp.LRyrOvdcxz ++ return 0 + [[ initializing == \r\e\a\d\y ]] + echo 'waiting for cluster readyness' waiting for cluster readyness + sleep 20 + [[ 1 -ge 36 ]] + let i+=1 ++ kubectl_bin get pxc some-name -o 'jsonpath={.status.state}' +++ mktemp ++ local LAST_OUT=/tmp/tmp.cqPxO0CRNL +++ mktemp ++ local LAST_ERR=/tmp/tmp.kx3gEswHo6 ++ local exit_status=0 +++ seq 0 2 ++ for i in '$(seq 0 2)' ++ set +e ++ kubectl get pxc some-name -o 'jsonpath={.status.state}' ++ exit_status=0 ++ set -e ++ '[' 0 '!=' 0 ']' ++ break ++ cat /tmp/tmp.cqPxO0CRNL ++ cat /tmp/tmp.kx3gEswHo6 ++ rm /tmp/tmp.cqPxO0CRNL /tmp/tmp.kx3gEswHo6 ++ return 0 + [[ initializing == \r\e\a\d\y ]] + echo 'waiting for cluster readyness' waiting for cluster readyness + sleep 20 + [[ 2 -ge 36 ]] + let i+=1 ++ kubectl_bin get pxc some-name -o 'jsonpath={.status.state}' +++ mktemp ++ local LAST_OUT=/tmp/tmp.D8lzVVn0or +++ mktemp ++ local LAST_ERR=/tmp/tmp.Hd3ciaIN6B ++ local exit_status=0 +++ seq 0 2 ++ for i in '$(seq 0 2)' ++ set +e ++ kubectl get pxc some-name -o 'jsonpath={.status.state}' ++ exit_status=0 ++ set -e ++ '[' 0 '!=' 0 ']' ++ break ++ cat /tmp/tmp.D8lzVVn0or ++ cat /tmp/tmp.Hd3ciaIN6B ++ rm /tmp/tmp.D8lzVVn0or /tmp/tmp.Hd3ciaIN6B ++ return 0 + [[ ready == \r\e\a\d\y ]] ++ kubectl_bin get pxc some-name -o 'jsonpath={.status.pxc.ready}' +++ mktemp ++ local LAST_OUT=/tmp/tmp.bvJXnkfSwY +++ mktemp ++ local LAST_ERR=/tmp/tmp.RHxdGV820u ++ local exit_status=0 +++ seq 0 2 ++ for i in '$(seq 0 2)' ++ set +e ++ kubectl get pxc some-name -o 'jsonpath={.status.pxc.ready}' ++ exit_status=0 ++ set -e ++ '[' 0 '!=' 0 ']' ++ break ++ cat /tmp/tmp.bvJXnkfSwY ++ cat /tmp/tmp.RHxdGV820u ++ rm /tmp/tmp.bvJXnkfSwY /tmp/tmp.RHxdGV820u ++ return 0 + [[ 3 == \3 ]] +++ get_proxy_engine some-name +++ local cluster_name=some-name ++++ get_proxy some-name ++++ local target_cluster=some-name +++++ kubectl_bin get pxc some-name -o 'jsonpath={.spec.haproxy.enabled}' ++++++ mktemp +++++ local LAST_OUT=/tmp/tmp.liAgFKIEut ++++++ mktemp +++++ local LAST_ERR=/tmp/tmp.riBVwCU4f2 +++++ local exit_status=0 ++++++ seq 0 2 +++++ for i in '$(seq 0 2)' +++++ set +e +++++ kubectl get pxc some-name -o 'jsonpath={.spec.haproxy.enabled}' +++++ exit_status=0 +++++ set -e +++++ '[' 0 '!=' 0 ']' +++++ break +++++ cat /tmp/tmp.liAgFKIEut +++++ cat /tmp/tmp.riBVwCU4f2 +++++ rm /tmp/tmp.liAgFKIEut /tmp/tmp.riBVwCU4f2 +++++ return 0 ++++ [[ true == \t\r\u\e ]] ++++ echo some-name-haproxy ++++ return +++ local cluster_proxy=some-name-haproxy +++ echo haproxy ++ kubectl_bin get pxc some-name -o 'jsonpath={.status.haproxy.ready}' +++ mktemp ++ local LAST_OUT=/tmp/tmp.6qX8DGTtoC +++ mktemp ++ local LAST_ERR=/tmp/tmp.6h1GqMa5bp ++ local exit_status=0 +++ seq 0 2 ++ for i in '$(seq 0 2)' ++ set +e ++ kubectl get pxc some-name -o 'jsonpath={.status.haproxy.ready}' ++ exit_status=0 ++ set -e ++ '[' 0 '!=' 0 ']' ++ break ++ cat /tmp/tmp.6qX8DGTtoC ++ cat /tmp/tmp.6h1GqMa5bp ++ rm /tmp/tmp.6qX8DGTtoC /tmp/tmp.6h1GqMa5bp ++ return 0 + [[ 3 == \3 ]] + compare_mysql_cmd select-3 'SHOW DATABASES;' '-h some-name-haproxy -umonitor -p'\''test-password2'\''' + local command_id=select-3 + local 'command=SHOW DATABASES;' + local 'uri=-h some-name-haproxy -umonitor -p'\''test-password2'\''' + local postfix= + local expected_result=/mnt/jenkins/workspace/cloud-pxc-operator_PR-1730/e2e-tests/users/compare/select-3.sql + [[ perconalab/percona-xtradb-cluster-operator:main-pxc8.0 =~ 8\.0 ]] + '[' -f /mnt/jenkins/workspace/cloud-pxc-operator_PR-1730/e2e-tests/users/compare/select-3-80.sql ']' + run_mysql 'SHOW DATABASES;' '-h some-name-haproxy -umonitor -p'\''test-password2'\''' + local 'command=SHOW DATABASES;' + local 'uri=-h some-name-haproxy -umonitor -p'\''test-password2'\''' ++ get_client_pod ++ kubectl_bin get pods --selector=name=pxc-client -o 'jsonpath={.items[].metadata.name}' +++ mktemp ++ local LAST_OUT=/tmp/tmp.l6C5fCOSSp +++ mktemp ++ local LAST_ERR=/tmp/tmp.NN5clsw2ur ++ local exit_status=0 +++ seq 0 2 ++ for i in '$(seq 0 2)' ++ set +e ++ kubectl get pods --selector=name=pxc-client -o 'jsonpath={.items[].metadata.name}' ++ exit_status=0 ++ set -e ++ '[' 0 '!=' 0 ']' ++ break ++ cat /tmp/tmp.l6C5fCOSSp ++ cat /tmp/tmp.NN5clsw2ur ++ rm /tmp/tmp.l6C5fCOSSp /tmp/tmp.NN5clsw2ur ++ return 0 + client_pod=pxc-client-6644d8898f-z827r + wait_pod pxc-client-6644d8898f-z827r + local pod=pxc-client-6644d8898f-z827r + local max_retry=480 + local ns= ++ echo pxc-client-6644d8898f-z827r ++ /usr/bin/sed -E 's/.*-(pxc|proxysql)-[0-9]/\1/' ++ egrep '^(pxc|proxysql)$' + local container= + set +o xtrace pod/pxc-client-6644d8898f-z827r condition met pxc-client-6644d8898f-z827r.Ok + set +o xtrace + '[' '!' -s /tmp/tmp.ShPTGmQQOH/select-3.sql ']' + diff -u /mnt/jenkins/workspace/cloud-pxc-operator_PR-1730/e2e-tests/users/compare/select-3.sql /tmp/tmp.ShPTGmQQOH/select-3.sql + destroy users-18742 + local namespace=users-18742 + local ignore_logs=true + desc 'destroy cluster/operator and all other resources' + set +o xtrace ----------------------------------------------------------------------------------- destroy cluster/operator and all other resources ----------------------------------------------------------------------------------- + '[' true == false -o 1 == 1 ']' + grep -v 'the object has been modified' + grep -v level=info ++ get_operator_pod ++ local label_prefix=app.kubernetes.io/ + sort -u + /usr/bin/sed -r 's/"ts":[0-9.]+//; s^limits-[0-9.]+/^^g' + grep -v 'get backup status: Job.batch' + tee /tmp/tmp.ShPTGmQQOH/operator.log +++ kubectl get pods --selector=app.kubernetes.io/name=percona-xtradb-cluster-operator -n pxc-operator +++ grep -c percona-xtradb-cluster-operator ++ local check_label=1 ++ [[ 1 -eq 0 ]] ++ kubectl_bin get pods --selector=app.kubernetes.io/name=percona-xtradb-cluster-operator -o 'jsonpath={.items[].metadata.name}' -n pxc-operator +++ mktemp ++ local LAST_OUT=/tmp/tmp.UBXqH88hxk +++ mktemp ++ local LAST_ERR=/tmp/tmp.HILvRWuwEi ++ local exit_status=0 +++ seq 0 2 ++ for i in '$(seq 0 2)' ++ set +e ++ kubectl get pods --selector=app.kubernetes.io/name=percona-xtradb-cluster-operator -o 'jsonpath={.items[].metadata.name}' -n pxc-operator ++ exit_status=0 ++ set -e ++ '[' 0 '!=' 0 ']' ++ break ++ cat /tmp/tmp.UBXqH88hxk ++ cat /tmp/tmp.HILvRWuwEi ++ rm /tmp/tmp.UBXqH88hxk /tmp/tmp.HILvRWuwEi ++ return 0 + kubectl_bin logs -n pxc-operator percona-xtradb-cluster-operator-784d88b77-sqbms ++ mktemp + local LAST_OUT=/tmp/tmp.eiuor9zdp7 ++ mktemp + local LAST_ERR=/tmp/tmp.5EknPpKbrp + local exit_status=0 ++ seq 0 2 + for i in '$(seq 0 2)' + set +e + kubectl logs -n pxc-operator percona-xtradb-cluster-operator-784d88b77-sqbms + exit_status=0 + set -e + '[' 0 '!=' 0 ']' + break + cat /tmp/tmp.eiuor9zdp7 + cat /tmp/tmp.5EknPpKbrp + rm /tmp/tmp.eiuor9zdp7 /tmp/tmp.5EknPpKbrp + return 0 2024-06-13T04:03:51.892Z INFO setup Manager starting up {"gitCommit": "852bae96aa0d82139a70b7c3d88a51521401373a", "gitBranch": "PR-1730-852bae96", "buildTime": "2024-06-13T02:01:34Z", "goVersion": "go1.22.4", "os": "linux", "arch": "amd64"} 2024-06-13T04:03:51.892Z INFO setup Runs on {"platform": "kubernetes", "version": "v1.26.15-gke.1390000"} 2024-06-13T04:03:51.893Z INFO setup Registering Components. 2024-06-13T04:03:56.579Z INFO controller-runtime.webhook Registering webhook {"path": "/validate-percona-xtradbcluster"} 2024-06-13T04:03:56.582Z INFO controller-runtime.metrics Serving metrics server {"bindAddress": ":8080", "secure": false} 2024-06-13T04:03:56.582Z INFO controller-runtime.metrics Starting metrics server 2024-06-13T04:03:56.582Z INFO setup Starting the Cmd. 2024-06-13T04:03:56.583Z INFO controller-runtime.certwatcher Starting certificate watcher 2024-06-13T04:03:56.583Z INFO controller-runtime.certwatcher Updated current TLS certificate 2024-06-13T04:03:56.583Z INFO controller-runtime.webhook Serving webhook server {"host": "", "port": 9443} 2024-06-13T04:03:56.583Z INFO controller-runtime.webhook Starting webhook server 2024-06-13T04:03:56.583Z INFO starting server {"name": "health probe", "addr": "[::]:8081"} 2024-06-13T04:03:56.683Z INFO attempting to acquire leader lease pxc-operator/08db1feb.percona.com... 2024-06-13T04:03:56.721Z DEBUG events percona-xtradb-cluster-operator-784d88b77-sqbms_57e5cfd5-4487-42f0-92f3-82e66cf0d8f4 became leader {"type": "Normal", "object": {"kind":"Lease","namespace":"pxc-operator","name":"08db1feb.percona.com","uid":"2d54514f-c215-40c1-b5c5-966e60e9c6b4","apiVersion":"coordination.k8s.io/v1","resourceVersion":"65238"}, "reason": "LeaderElection"} 2024-06-13T04:03:56.721Z INFO successfully acquired lease pxc-operator/08db1feb.percona.com 2024-06-13T04:03:56.722Z INFO Starting Controller {"controller": "pxcbackup-controller"} 2024-06-13T04:03:56.722Z INFO Starting Controller {"controller": "pxc-controller"} 2024-06-13T04:03:56.722Z INFO Starting Controller {"controller": "pxcrestore-controller"} 2024-06-13T04:03:56.722Z INFO Starting EventSource {"controller": "pxcbackup-controller", "source": "kind source: unknown type"} 2024-06-13T04:03:56.722Z INFO Starting EventSource {"controller": "pxc-controller", "source": "kind source: unknown type"} 2024-06-13T04:03:56.722Z INFO Starting EventSource {"controller": "pxcrestore-controller", "source": "kind source: unknown type"} 2024-06-13T04:03:56.829Z INFO Starting workers {"controller": "pxc-controller", "worker count": 1} 2024-06-13T04:03:56.829Z INFO Starting workers {"controller": "pxcrestore-controller", "worker count": 1} 2024-06-13T04:03:56.830Z INFO Starting workers {"controller": "pxcbackup-controller", "worker count": 1} 2024-06-13T04:04:27.050Z INFO Set CR version {"controller": "pxc-controller", "namespace": "users-18742", "name": "some-name", "reconcileID": "9b79983e-0d1b-4e73-b13e-6e0d7be778cb", "version": "1.15.0"} 2024-06-13T04:05:42.258Z INFO Password expiration policy updated {"controller": "pxc-controller", "namespace": "users-18742", "name": "some-name", "reconcileID": "ff12891b-91f3-47a4-a946-b62a944cf605", "user": "operator"} 2024-06-13T04:05:42.297Z INFO Password expiration policy updated {"controller": "pxc-controller", "namespace": "users-18742", "name": "some-name", "reconcileID": "ff12891b-91f3-47a4-a946-b62a944cf605", "user": "monitor"} 2024-06-13T04:05:42.400Z INFO User monitor: granted privileges {"controller": "pxc-controller", "namespace": "users-18742", "name": "some-name", "reconcileID": "ff12891b-91f3-47a4-a946-b62a944cf605"} 2024-06-13T04:05:42.437Z INFO monitor user privileges granted {"controller": "pxc-controller", "namespace": "users-18742", "name": "some-name", "reconcileID": "ff12891b-91f3-47a4-a946-b62a944cf605"} 2024-06-13T04:05:42.480Z INFO Password expiration policy updated {"controller": "pxc-controller", "namespace": "users-18742", "name": "some-name", "reconcileID": "ff12891b-91f3-47a4-a946-b62a944cf605", "user": "xtrabackup"} 2024-06-13T04:05:42.536Z INFO User xtrabackup: granted privileges {"controller": "pxc-controller", "namespace": "users-18742", "name": "some-name", "reconcileID": "ff12891b-91f3-47a4-a946-b62a944cf605"} 2024-06-13T04:05:42.585Z INFO Password expiration policy updated {"controller": "pxc-controller", "namespace": "users-18742", "name": "some-name", "reconcileID": "ff12891b-91f3-47a4-a946-b62a944cf605", "user": "replication"} 2024-06-13T04:05:42.706Z INFO reconcile replication error {"controller": "pxc-controller", "namespace": "users-18742", "name": "some-name", "reconcileID": "ff12891b-91f3-47a4-a946-b62a944cf605", "err": "get primary pxc pod: not found"} 2024-06-13T04:05:42.912Z INFO reconcile replication error {"controller": "pxc-controller", "namespace": "users-18742", "name": "some-name", "reconcileID": "6b4960c8-7553-4a76-93b6-7cb3fa64e49b", "err": "get primary pxc pod: not found"} 2024-06-13T04:05:47.325Z INFO reconcile replication error {"controller": "pxc-controller", "namespace": "users-18742", "name": "some-name", "reconcileID": "ebc2e52d-86fa-4d0f-ae4f-da3d2bc4d7e8", "err": "get primary pxc pod: not found"} 2024-06-13T04:05:52.540Z INFO reconcile replication error {"controller": "pxc-controller", "namespace": "users-18742", "name": "some-name", "reconcileID": "40ec7258-5529-496a-be83-651a8a1e4268", "err": "get primary pxc pod: not found"} 2024-06-13T04:05:57.857Z INFO reconcile replication error {"controller": "pxc-controller", "namespace": "users-18742", "name": "some-name", "reconcileID": "7b2b2ca4-c98d-409d-974e-2733b4262ca8", "err": "get primary pxc pod: not found"} 2024-06-13T04:08:12.016Z INFO Password expiration policy updated {"controller": "pxc-controller", "namespace": "users-18742", "name": "some-name", "reconcileID": "cc25fc77-8356-4b6d-8239-2bff8834e18e", "user": "root"} 2024-06-13T04:08:12.292Z INFO update PXC version (fetched from db) {"controller": "pxc-controller", "namespace": "users-18742", "name": "some-name", "reconcileID": "cc25fc77-8356-4b6d-8239-2bff8834e18e", "new version": "8.0.36-28.1"} 2024-06-13T04:08:15.446Z DEBUG PXC users synced with ProxySQL {"controller": "pxc-controller", "namespace": "users-18742", "name": "some-name", "reconcileID": "cc25fc77-8356-4b6d-8239-2bff8834e18e"} 2024-06-13T04:08:20.378Z DEBUG PXC users synced with ProxySQL {"controller": "pxc-controller", "namespace": "users-18742", "name": "some-name", "reconcileID": "e5fff53e-8b92-4dd4-834e-c9f5167111a5"} 2024-06-13T04:08:25.656Z DEBUG PXC users synced with ProxySQL {"controller": "pxc-controller", "namespace": "users-18742", "name": "some-name", "reconcileID": "77326d32-bc73-4839-8732-7d90d4285911"} 2024-06-13T04:08:31.272Z DEBUG PXC users synced with ProxySQL {"controller": "pxc-controller", "namespace": "users-18742", "name": "some-name", "reconcileID": "12dc77f1-1890-41fb-87b6-f8a5261dd00b"} 2024-06-13T04:08:36.429Z DEBUG PXC users synced with ProxySQL {"controller": "pxc-controller", "namespace": "users-18742", "name": "some-name", "reconcileID": "7ef9782b-87bc-4801-b7b7-8967b19bc987"} 2024-06-13T04:08:41.826Z DEBUG PXC users synced with ProxySQL {"controller": "pxc-controller", "namespace": "users-18742", "name": "some-name", "reconcileID": "02edeaa7-6017-4e8a-b178-be66fdb69d47"} 2024-06-13T04:08:47.170Z DEBUG PXC users synced with ProxySQL {"controller": "pxc-controller", "namespace": "users-18742", "name": "some-name", "reconcileID": "9d4ae0a7-bfdd-4660-9b25-ee00e777fc28"} 2024-06-13T04:08:52.847Z DEBUG PXC users synced with ProxySQL {"controller": "pxc-controller", "namespace": "users-18742", "name": "some-name", "reconcileID": "a0f9cc31-5531-489a-a8da-c9e88d18b5b8"} 2024-06-13T04:08:58.236Z DEBUG PXC users synced with ProxySQL {"controller": "pxc-controller", "namespace": "users-18742", "name": "some-name", "reconcileID": "84d4352d-b4f0-4d41-8ab4-b897d3f13df4"} 2024-06-13T04:09:03.325Z DEBUG PXC users synced with ProxySQL {"controller": "pxc-controller", "namespace": "users-18742", "name": "some-name", "reconcileID": "cc567688-eb69-4840-8329-34c4433d48fe"} 2024-06-13T04:09:08.671Z DEBUG PXC users synced with ProxySQL {"controller": "pxc-controller", "namespace": "users-18742", "name": "some-name", "reconcileID": "0fb49778-0e80-41af-aebe-3983838ff057"} 2024-06-13T04:09:14.835Z DEBUG PXC users synced with ProxySQL {"controller": "pxc-controller", "namespace": "users-18742", "name": "some-name", "reconcileID": "0055d693-ff27-4de4-b727-a94e85fc0e6e"} 2024-06-13T04:09:20.350Z DEBUG PXC users synced with ProxySQL {"controller": "pxc-controller", "namespace": "users-18742", "name": "some-name", "reconcileID": "83a963e7-53cd-4e07-bbdf-d636f8d40c8b"} 2024-06-13T04:09:22.273Z INFO Password changed, updating user {"controller": "pxc-controller", "namespace": "users-18742", "name": "some-name", "reconcileID": "3ba983c9-514d-462d-9a63-8ff4c2f114ee", "user": "root"} 2024-06-13T04:09:22.321Z INFO Password updated {"controller": "pxc-controller", "namespace": "users-18742", "name": "some-name", "reconcileID": "3ba983c9-514d-462d-9a63-8ff4c2f114ee", "user": "root"} 2024-06-13T04:09:22.329Z INFO MySQL init secret created {"controller": "pxc-controller", "namespace": "users-18742", "name": "some-name", "reconcileID": "3ba983c9-514d-462d-9a63-8ff4c2f114ee", "secret": "some-name-mysql-init", "user": "root"} 2024-06-13T04:09:28.037Z DEBUG PXC users synced with ProxySQL {"controller": "pxc-controller", "namespace": "users-18742", "name": "some-name", "reconcileID": "3ba983c9-514d-462d-9a63-8ff4c2f114ee"} 2024-06-13T04:09:28.062Z INFO Internal secrets updated {"controller": "pxc-controller", "namespace": "users-18742", "name": "some-name", "reconcileID": "3ba983c9-514d-462d-9a63-8ff4c2f114ee", "user": "root"} 2024-06-13T04:09:28.123Z INFO Old password discarded {"controller": "pxc-controller", "namespace": "users-18742", "name": "some-name", "reconcileID": "3ba983c9-514d-462d-9a63-8ff4c2f114ee", "user": "root"} 2024-06-13T04:09:31.640Z DEBUG PXC users synced with ProxySQL {"controller": "pxc-controller", "namespace": "users-18742", "name": "some-name", "reconcileID": "3ba983c9-514d-462d-9a63-8ff4c2f114ee"} 2024-06-13T04:09:37.367Z DEBUG PXC users synced with ProxySQL {"controller": "pxc-controller", "namespace": "users-18742", "name": "some-name", "reconcileID": "549ece74-4513-4e63-a50b-d1697401dd76"} 2024-06-13T04:09:43.259Z DEBUG PXC users synced with ProxySQL {"controller": "pxc-controller", "namespace": "users-18742", "name": "some-name", "reconcileID": "beaca6d3-97b0-4b11-a5eb-47b5948e2c87"} 2024-06-13T04:10:04.541Z ERROR sync users {"controller": "pxc-controller", "namespace": "users-18742", "name": "some-name", "reconcileID": "e09dd908-4564-4e74-a611-dbe1c6741150", "error": "exec syncusers: command terminated with exit code 1 / / ERROR (line:512) : The cluster (with writer hostgroup:11) has not been configured in ProxySQL\n", "errorVerbose": "exec syncusers: command terminated with exit code 1 / / ERROR (line:512) : The cluster (with writer hostgroup:11) has not been configured in ProxySQL\n\ngithub.com/percona/percona-xtradb-cluster-operator/pkg/controller/pxc.(*ReconcilePerconaXtraDBCluster).syncPXCUsersWithProxySQL\n\t/go/src/github.com/percona/percona-xtradb-cluster-operator/pkg/controller/pxc/users.go:920\ngithub.com/percona/percona-xtradb-cluster-operator/pkg/controller/pxc.(*ReconcilePerconaXtraDBCluster).resyncPXCUsersWithProxySQL.func1\n\t/go/src/github.com/percona/percona-xtradb-cluster-operator/pkg/controller/pxc/controller.go:1246\nruntime.goexit\n\t/usr/local/go/src/runtime/asm_amd64.s:1695"} 2024-06-13T04:10:05.525Z INFO Password changed, updating user {"controller": "pxc-controller", "namespace": "users-18742", "name": "some-name", "reconcileID": "e5c71ea3-8650-43b4-8bdb-baad971516dc", "user": "proxyadmin"} 2024-06-13T04:10:05.525Z INFO Password expiration policy updated {"controller": "pxc-controller", "namespace": "users-18742", "name": "some-name", "reconcileID": "e5c71ea3-8650-43b4-8bdb-baad971516dc", "user": "proxyadmin"} 2024-06-13T04:10:05.594Z INFO Proxy user updated {"controller": "pxc-controller", "namespace": "users-18742", "name": "some-name", "reconcileID": "e5c71ea3-8650-43b4-8bdb-baad971516dc", "user": "proxyadmin"} 2024-06-13T04:10:05.603Z INFO Internal secrets updated {"controller": "pxc-controller", "namespace": "users-18742", "name": "some-name", "reconcileID": "e5c71ea3-8650-43b4-8bdb-baad971516dc", "user": "proxyadmin"} 2024-06-13T04:10:05.603Z INFO Proxy pods will be restarted {"controller": "pxc-controller", "namespace": "users-18742", "name": "some-name", "reconcileID": "e5c71ea3-8650-43b4-8bdb-baad971516dc", "last-applied-secret": "1a55867d0f4fa0d0643653b2eb57b2f690315d8d843a10599c9839c2a779a95f"} 2024-06-13T04:10:05.784Z INFO reconcile replication error {"controller": "pxc-controller", "namespace": "users-18742", "name": "some-name", "reconcileID": "e5c71ea3-8650-43b4-8bdb-baad971516dc", "err": "get primary pxc pod: not found"} 2024-06-13T04:10:05.821Z ERROR sync users {"controller": "pxc-controller", "namespace": "users-18742", "name": "some-name", "reconcileID": "e5c71ea3-8650-43b4-8bdb-baad971516dc", "error": "exec syncusers: command terminated with exit code 1 / / ERROR 1045 (28000): ProxySQL Error: Access denied for user 'proxyadmin'@'127.0.0.1' (using password: YES)\nERROR (line:335) : ProxySQL connection check failed. \n-- Could not connect to ProxySQL at localhost:6032 \n-- Please check the ProxySQL connection parameters and status.\n", "errorVerbose": "exec syncusers: command terminated with exit code 1 / / ERROR 1045 (28000): ProxySQL Error: Access denied for user 'proxyadmin'@'127.0.0.1' (using password: YES)\nERROR (line:335) : ProxySQL connection check failed. \n-- Could not connect to ProxySQL at localhost:6032 \n-- Please check the ProxySQL connection parameters and status.\n\ngithub.com/percona/percona-xtradb-cluster-operator/pkg/controller/pxc.(*ReconcilePerconaXtraDBCluster).syncPXCUsersWithProxySQL\n\t/go/src/github.com/percona/percona-xtradb-cluster-operator/pkg/controller/pxc/users.go:920\ngithub.com/percona/percona-xtradb-cluster-operator/pkg/controller/pxc.(*ReconcilePerconaXtraDBCluster).resyncPXCUsersWithProxySQL.func1\n\t/go/src/github.com/percona/percona-xtradb-cluster-operator/pkg/controller/pxc/controller.go:1246\nruntime.goexit\n\t/usr/local/go/src/runtime/asm_amd64.s:1695"} 2024-06-13T04:10:22.033Z INFO reconcile replication error {"controller": "pxc-controller", "namespace": "users-18742", "name": "some-name", "reconcileID": "b215d7e9-a885-4a74-bb42-6a681709dac6", "err": "get primary pxc pod: not found"} 2024-06-13T04:10:22.284Z INFO reconcile replication error {"controller": "pxc-controller", "namespace": "users-18742", "name": "some-name", "reconcileID": "70cebfc0-0152-4ab0-89dc-46de3f54aa17", "err": "get primary pxc pod: not found"} 2024-06-13T04:10:33.068Z INFO reconcile replication error {"controller": "pxc-controller", "namespace": "users-18742", "name": "some-name", "reconcileID": "c8c20244-9eff-42e3-a843-3aec6a7a1662", "err": "get primary pxc pod: not found"} 2024-06-13T04:11:09.015Z DEBUG PXC users synced with ProxySQL {"controller": "pxc-controller", "namespace": "users-18742", "name": "some-name", "reconcileID": "a9d23f41-721c-4a3d-93e2-8b47eeccb404"} 2024-06-13T04:11:19.631Z DEBUG PXC users synced with ProxySQL {"controller": "pxc-controller", "namespace": "users-18742", "name": "some-name", "reconcileID": "4f10b908-88ba-4298-bcf0-49d7ed22fa3d"} 2024-06-13T04:11:24.159Z DEBUG PXC users synced with ProxySQL {"controller": "pxc-controller", "namespace": "users-18742", "name": "some-name", "reconcileID": "69377044-bcd9-4610-9115-1a52c02165c9"} 2024-06-13T04:11:27.288Z INFO Password changed, updating user {"controller": "pxc-controller", "namespace": "users-18742", "name": "some-name", "reconcileID": "860c1efc-ddef-4acb-b5e2-1307d7de9744", "user": "xtrabackup"} 2024-06-13T04:11:27.319Z INFO Password updated {"controller": "pxc-controller", "namespace": "users-18742", "name": "some-name", "reconcileID": "860c1efc-ddef-4acb-b5e2-1307d7de9744", "user": "xtrabackup"} 2024-06-13T04:11:27.332Z INFO MySQL init secret updated {"controller": "pxc-controller", "namespace": "users-18742", "name": "some-name", "reconcileID": "860c1efc-ddef-4acb-b5e2-1307d7de9744", "secret": "some-name-mysql-init", "user": "xtrabackup"} 2024-06-13T04:11:27.349Z INFO Internal secrets updated {"controller": "pxc-controller", "namespace": "users-18742", "name": "some-name", "reconcileID": "860c1efc-ddef-4acb-b5e2-1307d7de9744", "user": "xtrabackup"} 2024-06-13T04:11:27.375Z INFO Old password discarded {"controller": "pxc-controller", "namespace": "users-18742", "name": "some-name", "reconcileID": "860c1efc-ddef-4acb-b5e2-1307d7de9744", "user": "xtrabackup"} 2024-06-13T04:11:27.389Z INFO PXC pods will be restarted {"controller": "pxc-controller", "namespace": "users-18742", "name": "some-name", "reconcileID": "860c1efc-ddef-4acb-b5e2-1307d7de9744", "last-applied-secret": "d11bcc70ca53ea410cd455e13d82e7225917b0861d9b677ed34c02c5f6fbaf1b"} 2024-06-13T04:11:32.898Z DEBUG PXC users synced with ProxySQL {"controller": "pxc-controller", "namespace": "users-18742", "name": "some-name", "reconcileID": "860c1efc-ddef-4acb-b5e2-1307d7de9744"} 2024-06-13T04:11:33.917Z INFO reconcile replication error {"controller": "pxc-controller", "namespace": "users-18742", "name": "some-name", "reconcileID": "e9076bb0-1fb6-4449-9b37-4fd3c0bf11ca", "err": "failed to ensure cluster readonly status: connect to pod some-name-pxc-2: dial tcp: lookup some-name-pxc-2.some-name-pxc.users-18742 on 10.17.224.10:53: no such host"} 2024-06-13T04:11:34.258Z INFO reconcile replication error {"controller": "pxc-controller", "namespace": "users-18742", "name": "some-name", "reconcileID": "9b474a15-8660-47c5-8efc-b7b75acb1e67", "err": "failed to ensure cluster readonly status: connect to pod some-name-pxc-2: dial tcp: lookup some-name-pxc-2.some-name-pxc.users-18742 on 10.17.224.10:53: no such host"} 2024-06-13T04:12:26.981Z INFO reconcile replication error {"controller": "pxc-controller", "namespace": "users-18742", "name": "some-name", "reconcileID": "477a5dda-ab83-4626-922a-ab3c67fa4304", "err": "failed to ensure cluster readonly status: connect to pod some-name-pxc-1: dial tcp: lookup some-name-pxc-1.some-name-pxc.users-18742 on 10.17.224.10:53: no such host"} 2024-06-13T04:12:31.590Z INFO reconcile replication error {"controller": "pxc-controller", "namespace": "users-18742", "name": "some-name", "reconcileID": "fe9c6a3f-64bf-4cd6-9182-60606f9dfb60", "err": "failed to ensure cluster readonly status: connect to pod some-name-pxc-1: dial tcp: lookup some-name-pxc-1.some-name-pxc.users-18742 on 10.17.224.10:53: no such host"} 2024-06-13T04:12:36.850Z INFO reconcile replication error {"controller": "pxc-controller", "namespace": "users-18742", "name": "some-name", "reconcileID": "3c5bae87-0f84-4e17-a414-58fc44373011", "err": "failed to ensure cluster readonly status: connect to pod some-name-pxc-1: dial tcp: lookup some-name-pxc-1.some-name-pxc.users-18742 on 10.17.224.10:53: no such host"} 2024-06-13T04:13:29.403Z INFO reconcile replication error {"controller": "pxc-controller", "namespace": "users-18742", "name": "some-name", "reconcileID": "d2b9ab41-152b-482d-950b-c5d0e146cdb9", "err": "failed to connect to pod some-name-pxc-0: dial tcp 10.17.208.59:33062: connect: connection refused"} 2024-06-13T04:13:40.016Z INFO Unable to find primary pod for replication. No pod with name or ip like this {"controller": "pxc-controller", "namespace": "users-18742", "name": "some-name", "reconcileID": "3437d92b-ec0d-4b1e-aa0c-23b74507a3ea", "primary name": "some-name-pxc-0.some-name-pxc.users-18742.svc.cluster.local"} 2024-06-13T04:14:16.032Z DEBUG PXC users synced with ProxySQL {"controller": "pxc-controller", "namespace": "users-18742", "name": "some-name", "reconcileID": "449e8ebe-0b7f-4b7b-8f9f-6241ec5ba9dc"} 2024-06-13T04:14:21.259Z DEBUG PXC users synced with ProxySQL {"controller": "pxc-controller", "namespace": "users-18742", "name": "some-name", "reconcileID": "2e1ec744-c06e-4299-a8ae-7559db80298a"} 2024-06-13T04:14:26.727Z DEBUG PXC users synced with ProxySQL {"controller": "pxc-controller", "namespace": "users-18742", "name": "some-name", "reconcileID": "e21eaa7e-dcc5-4d97-a9d4-e326269e6023"} 2024-06-13T04:14:32.035Z DEBUG PXC users synced with ProxySQL {"controller": "pxc-controller", "namespace": "users-18742", "name": "some-name", "reconcileID": "b5dd46c3-61a8-4073-acf8-969303fde46c"} 2024-06-13T04:14:37.071Z DEBUG PXC users synced with ProxySQL {"controller": "pxc-controller", "namespace": "users-18742", "name": "some-name", "reconcileID": "20256979-7e47-43da-8d4d-0fd6bb27f783"} 2024-06-13T04:14:39.122Z INFO Password changed, updating user {"controller": "pxc-controller", "namespace": "users-18742", "name": "some-name", "reconcileID": "91500fbc-64ff-4076-9864-2ad8ab31a4a4", "user": "monitor"} 2024-06-13T04:14:39.153Z INFO Password updated {"controller": "pxc-controller", "namespace": "users-18742", "name": "some-name", "reconcileID": "91500fbc-64ff-4076-9864-2ad8ab31a4a4", "user": "monitor"} 2024-06-13T04:14:39.163Z INFO MySQL init secret updated {"controller": "pxc-controller", "namespace": "users-18742", "name": "some-name", "reconcileID": "91500fbc-64ff-4076-9864-2ad8ab31a4a4", "secret": "some-name-mysql-init", "user": "monitor"} 2024-06-13T04:14:39.208Z INFO Proxy user updated {"controller": "pxc-controller", "namespace": "users-18742", "name": "some-name", "reconcileID": "91500fbc-64ff-4076-9864-2ad8ab31a4a4", "user": "monitor"} 2024-06-13T04:14:39.221Z INFO Internal secrets updated {"controller": "pxc-controller", "namespace": "users-18742", "name": "some-name", "reconcileID": "91500fbc-64ff-4076-9864-2ad8ab31a4a4", "user": "monitor"} 2024-06-13T04:14:39.303Z INFO Proxy pods will be restarted {"controller": "pxc-controller", "namespace": "users-18742", "name": "some-name", "reconcileID": "91500fbc-64ff-4076-9864-2ad8ab31a4a4", "last-applied-secret": "4e3282050745046d5044e0fcb5f2bf140d7320a1256a79c8ca2184f38f4f3098"} 2024-06-13T04:14:41.997Z ERROR sync users {"controller": "pxc-controller", "namespace": "users-18742", "name": "some-name", "reconcileID": "91500fbc-64ff-4076-9864-2ad8ab31a4a4", "error": "exec syncusers: unable to upgrade connection: container not found (\"proxysql\") / / ", "errorVerbose": "exec syncusers: unable to upgrade connection: container not found (\"proxysql\") / / \ngithub.com/percona/percona-xtradb-cluster-operator/pkg/controller/pxc.(*ReconcilePerconaXtraDBCluster).syncPXCUsersWithProxySQL\n\t/go/src/github.com/percona/percona-xtradb-cluster-operator/pkg/controller/pxc/users.go:920\ngithub.com/percona/percona-xtradb-cluster-operator/pkg/controller/pxc.(*ReconcilePerconaXtraDBCluster).resyncPXCUsersWithProxySQL.func1\n\t/go/src/github.com/percona/percona-xtradb-cluster-operator/pkg/controller/pxc/controller.go:1246\nruntime.goexit\n\t/usr/local/go/src/runtime/asm_amd64.s:1695"} 2024-06-13T04:15:16.000Z INFO Password updated but old one not discarded {"controller": "pxc-controller", "namespace": "users-18742", "name": "some-name", "reconcileID": "51f00eb5-e083-42db-b16a-0dc14320c93d", "user": "monitor"} 2024-06-13T04:15:20.363Z DEBUG PXC users synced with ProxySQL {"controller": "pxc-controller", "namespace": "users-18742", "name": "some-name", "reconcileID": "51f00eb5-e083-42db-b16a-0dc14320c93d"} 2024-06-13T04:15:32.513Z INFO Password updated but old one not discarded {"controller": "pxc-controller", "namespace": "users-18742", "name": "some-name", "reconcileID": "18c45fa0-5c00-44b9-b8e3-96f833698e9e", "user": "monitor"} 2024-06-13T04:15:32.765Z INFO Old password discarded {"controller": "pxc-controller", "namespace": "users-18742", "name": "some-name", "reconcileID": "18c45fa0-5c00-44b9-b8e3-96f833698e9e", "user": "monitor"} 2024-06-13T04:15:32.792Z INFO Proxy pods will be restarted {"controller": "pxc-controller", "namespace": "users-18742", "name": "some-name", "reconcileID": "18c45fa0-5c00-44b9-b8e3-96f833698e9e", "last-applied-secret": "4e3282050745046d5044e0fcb5f2bf140d7320a1256a79c8ca2184f38f4f3098"} 2024-06-13T04:15:36.282Z DEBUG PXC users synced with ProxySQL {"controller": "pxc-controller", "namespace": "users-18742", "name": "some-name", "reconcileID": "18c45fa0-5c00-44b9-b8e3-96f833698e9e"} 2024-06-13T04:15:40.921Z DEBUG PXC users synced with ProxySQL {"controller": "pxc-controller", "namespace": "users-18742", "name": "some-name", "reconcileID": "9c30f894-ba33-42fe-97bb-68e6a7401fec"} 2024-06-13T04:15:46.495Z DEBUG PXC users synced with ProxySQL {"controller": "pxc-controller", "namespace": "users-18742", "name": "some-name", "reconcileID": "b94538e5-83da-465a-a871-94add1ace5f2"} 2024-06-13T04:15:51.787Z DEBUG PXC users synced with ProxySQL {"controller": "pxc-controller", "namespace": "users-18742", "name": "some-name", "reconcileID": "63b9bb57-8c68-4898-8b28-3c017fe9993f"} 2024-06-13T04:15:57.437Z DEBUG PXC users synced with ProxySQL {"controller": "pxc-controller", "namespace": "users-18742", "name": "some-name", "reconcileID": "c364379c-0c19-4a97-b875-32e31cd26ba7"} 2024-06-13T04:16:02.496Z DEBUG PXC users synced with ProxySQL {"controller": "pxc-controller", "namespace": "users-18742", "name": "some-name", "reconcileID": "1f7b3a16-8a9f-4680-b3af-9a8ee71e98cb"} 2024-06-13T04:16:04.642Z INFO Password changed, updating user {"controller": "pxc-controller", "namespace": "users-18742", "name": "some-name", "reconcileID": "cae21fae-d883-4677-8e32-80a451b83c3f", "user": "operator"} 2024-06-13T04:16:04.673Z INFO Password updated {"controller": "pxc-controller", "namespace": "users-18742", "name": "some-name", "reconcileID": "cae21fae-d883-4677-8e32-80a451b83c3f", "user": "operator"} 2024-06-13T04:16:04.688Z INFO MySQL init secret updated {"controller": "pxc-controller", "namespace": "users-18742", "name": "some-name", "reconcileID": "cae21fae-d883-4677-8e32-80a451b83c3f", "secret": "some-name-mysql-init", "user": "operator"} 2024-06-13T04:16:04.704Z INFO Internal secrets updated {"controller": "pxc-controller", "namespace": "users-18742", "name": "some-name", "reconcileID": "cae21fae-d883-4677-8e32-80a451b83c3f", "user": "operator"} 2024-06-13T04:16:04.763Z INFO Old password discarded {"controller": "pxc-controller", "namespace": "users-18742", "name": "some-name", "reconcileID": "cae21fae-d883-4677-8e32-80a451b83c3f", "user": "operator"} 2024-06-13T04:16:04.797Z INFO Proxy pods will be restarted {"controller": "pxc-controller", "namespace": "users-18742", "name": "some-name", "reconcileID": "cae21fae-d883-4677-8e32-80a451b83c3f", "last-applied-secret": "6542c6f8a2375ddfcda319bf91c99ca69519730971c577cd07366e356671ee68"} 2024-06-13T04:16:06.139Z ERROR sync users {"controller": "pxc-controller", "namespace": "users-18742", "name": "some-name", "reconcileID": "cae21fae-d883-4677-8e32-80a451b83c3f", "error": "exec syncusers: command terminated with exit code 1 / / ERROR 1045 (28000): Access denied for user 'operator'@'some-name-proxysql-0.some-name-proxysql-unready.users-18742.svc.' (using password: YES)\nERROR 1045 (28000): Access denied for user 'operator'@'some-name-proxysql-0.some-name-proxysql-unready.users-18742.svc.' (using password: YES)\nERROR 1045 (28000): Access denied for user 'operator'@'some-name-proxysql-0.some-name-proxysql-unready.users-18742.svc.' (using password: YES)\nERROR (line:569) : Could not find a primary cluster node\n", "errorVerbose": "exec syncusers: command terminated with exit code 1 / / ERROR 1045 (28000): Access denied for user 'operator'@'some-name-proxysql-0.some-name-proxysql-unready.users-18742.svc.' (using password: YES)\nERROR 1045 (28000): Access denied for user 'operator'@'some-name-proxysql-0.some-name-proxysql-unready.users-18742.svc.' (using password: YES)\nERROR 1045 (28000): Access denied for user 'operator'@'some-name-proxysql-0.some-name-proxysql-unready.users-18742.svc.' (using password: YES)\nERROR (line:569) : Could not find a primary cluster node\n\ngithub.com/percona/percona-xtradb-cluster-operator/pkg/controller/pxc.(*ReconcilePerconaXtraDBCluster).syncPXCUsersWithProxySQL\n\t/go/src/github.com/percona/percona-xtradb-cluster-operator/pkg/controller/pxc/users.go:920\ngithub.com/percona/percona-xtradb-cluster-operator/pkg/controller/pxc.(*ReconcilePerconaXtraDBCluster).resyncPXCUsersWithProxySQL.func1\n\t/go/src/github.com/percona/percona-xtradb-cluster-operator/pkg/controller/pxc/controller.go:1246\nruntime.goexit\n\t/usr/local/go/src/runtime/asm_amd64.s:1695"} 2024-06-13T04:16:44.733Z DEBUG PXC users synced with ProxySQL {"controller": "pxc-controller", "namespace": "users-18742", "name": "some-name", "reconcileID": "07b24a74-30ac-4eac-9e3c-a0b6ec5fd8f8"} 2024-06-13T04:16:53.214Z DEBUG PXC users synced with ProxySQL {"controller": "pxc-controller", "namespace": "users-18742", "name": "some-name", "reconcileID": "d3824ef0-853e-4026-8dcd-b281c4325831"} 2024-06-13T04:16:57.936Z DEBUG PXC users synced with ProxySQL {"controller": "pxc-controller", "namespace": "users-18742", "name": "some-name", "reconcileID": "98274cff-b11c-4130-8efd-02561385ab6a"} 2024-06-13T04:17:03.119Z DEBUG PXC users synced with ProxySQL {"controller": "pxc-controller", "namespace": "users-18742", "name": "some-name", "reconcileID": "07bbb2ef-25bc-4ced-bfb4-0f5743109ced"} 2024-06-13T04:17:04.434Z INFO Created user secrets {"controller": "pxc-controller", "namespace": "users-18742", "name": "some-name", "reconcileID": "8ae7aeb9-aebe-4e10-8946-4f0abd4d87cb", "secrets": "my-cluster-secrets-2"} 2024-06-13T04:17:04.447Z INFO Password changed, updating user {"controller": "pxc-controller", "namespace": "users-18742", "name": "some-name", "reconcileID": "8ae7aeb9-aebe-4e10-8946-4f0abd4d87cb", "user": "root"} 2024-06-13T04:17:04.498Z INFO Password updated {"controller": "pxc-controller", "namespace": "users-18742", "name": "some-name", "reconcileID": "8ae7aeb9-aebe-4e10-8946-4f0abd4d87cb", "user": "root"} 2024-06-13T04:17:04.509Z INFO MySQL init secret updated {"controller": "pxc-controller", "namespace": "users-18742", "name": "some-name", "reconcileID": "8ae7aeb9-aebe-4e10-8946-4f0abd4d87cb", "secret": "some-name-mysql-init", "user": "root"} 2024-06-13T04:17:09.643Z DEBUG PXC users synced with ProxySQL {"controller": "pxc-controller", "namespace": "users-18742", "name": "some-name", "reconcileID": "8ae7aeb9-aebe-4e10-8946-4f0abd4d87cb"} 2024-06-13T04:17:09.656Z INFO Internal secrets updated {"controller": "pxc-controller", "namespace": "users-18742", "name": "some-name", "reconcileID": "8ae7aeb9-aebe-4e10-8946-4f0abd4d87cb", "user": "root"} 2024-06-13T04:17:09.704Z INFO Old password discarded {"controller": "pxc-controller", "namespace": "users-18742", "name": "some-name", "reconcileID": "8ae7aeb9-aebe-4e10-8946-4f0abd4d87cb", "user": "root"} 2024-06-13T04:17:09.715Z INFO Password changed, updating user {"controller": "pxc-controller", "namespace": "users-18742", "name": "some-name", "reconcileID": "8ae7aeb9-aebe-4e10-8946-4f0abd4d87cb", "user": "operator"} 2024-06-13T04:17:09.745Z INFO Password updated {"controller": "pxc-controller", "namespace": "users-18742", "name": "some-name", "reconcileID": "8ae7aeb9-aebe-4e10-8946-4f0abd4d87cb", "user": "operator"} 2024-06-13T04:17:09.754Z INFO MySQL init secret updated {"controller": "pxc-controller", "namespace": "users-18742", "name": "some-name", "reconcileID": "8ae7aeb9-aebe-4e10-8946-4f0abd4d87cb", "secret": "some-name-mysql-init", "user": "operator"} 2024-06-13T04:17:09.763Z INFO Internal secrets updated {"controller": "pxc-controller", "namespace": "users-18742", "name": "some-name", "reconcileID": "8ae7aeb9-aebe-4e10-8946-4f0abd4d87cb", "user": "operator"} 2024-06-13T04:17:09.794Z INFO Old password discarded {"controller": "pxc-controller", "namespace": "users-18742", "name": "some-name", "reconcileID": "8ae7aeb9-aebe-4e10-8946-4f0abd4d87cb", "user": "operator"} 2024-06-13T04:17:09.810Z INFO Password changed, updating user {"controller": "pxc-controller", "namespace": "users-18742", "name": "some-name", "reconcileID": "8ae7aeb9-aebe-4e10-8946-4f0abd4d87cb", "user": "monitor"} 2024-06-13T04:17:09.840Z INFO Password updated {"controller": "pxc-controller", "namespace": "users-18742", "name": "some-name", "reconcileID": "8ae7aeb9-aebe-4e10-8946-4f0abd4d87cb", "user": "monitor"} 2024-06-13T04:17:09.850Z INFO MySQL init secret updated {"controller": "pxc-controller", "namespace": "users-18742", "name": "some-name", "reconcileID": "8ae7aeb9-aebe-4e10-8946-4f0abd4d87cb", "secret": "some-name-mysql-init", "user": "monitor"} 2024-06-13T04:17:09.893Z INFO Proxy user updated {"controller": "pxc-controller", "namespace": "users-18742", "name": "some-name", "reconcileID": "8ae7aeb9-aebe-4e10-8946-4f0abd4d87cb", "user": "monitor"} 2024-06-13T04:17:09.902Z INFO Internal secrets updated {"controller": "pxc-controller", "namespace": "users-18742", "name": "some-name", "reconcileID": "8ae7aeb9-aebe-4e10-8946-4f0abd4d87cb", "user": "monitor"} 2024-06-13T04:17:09.990Z INFO Password changed, updating user {"controller": "pxc-controller", "namespace": "users-18742", "name": "some-name", "reconcileID": "8ae7aeb9-aebe-4e10-8946-4f0abd4d87cb", "user": "xtrabackup"} 2024-06-13T04:17:10.018Z INFO Password updated {"controller": "pxc-controller", "namespace": "users-18742", "name": "some-name", "reconcileID": "8ae7aeb9-aebe-4e10-8946-4f0abd4d87cb", "user": "xtrabackup"} 2024-06-13T04:17:10.029Z INFO MySQL init secret updated {"controller": "pxc-controller", "namespace": "users-18742", "name": "some-name", "reconcileID": "8ae7aeb9-aebe-4e10-8946-4f0abd4d87cb", "secret": "some-name-mysql-init", "user": "xtrabackup"} 2024-06-13T04:17:10.038Z INFO Internal secrets updated {"controller": "pxc-controller", "namespace": "users-18742", "name": "some-name", "reconcileID": "8ae7aeb9-aebe-4e10-8946-4f0abd4d87cb", "user": "xtrabackup"} 2024-06-13T04:17:10.085Z INFO Old password discarded {"controller": "pxc-controller", "namespace": "users-18742", "name": "some-name", "reconcileID": "8ae7aeb9-aebe-4e10-8946-4f0abd4d87cb", "user": "xtrabackup"} 2024-06-13T04:17:10.096Z INFO Password changed, updating user {"controller": "pxc-controller", "namespace": "users-18742", "name": "some-name", "reconcileID": "8ae7aeb9-aebe-4e10-8946-4f0abd4d87cb", "user": "replication"} 2024-06-13T04:17:10.127Z INFO Password updated {"controller": "pxc-controller", "namespace": "users-18742", "name": "some-name", "reconcileID": "8ae7aeb9-aebe-4e10-8946-4f0abd4d87cb", "user": "replication"} 2024-06-13T04:17:10.139Z INFO MySQL init secret updated {"controller": "pxc-controller", "namespace": "users-18742", "name": "some-name", "reconcileID": "8ae7aeb9-aebe-4e10-8946-4f0abd4d87cb", "secret": "some-name-mysql-init", "user": "replication"} 2024-06-13T04:17:10.147Z INFO Internal secrets updated {"controller": "pxc-controller", "namespace": "users-18742", "name": "some-name", "reconcileID": "8ae7aeb9-aebe-4e10-8946-4f0abd4d87cb", "user": "replication"} 2024-06-13T04:17:10.176Z INFO Old password discarded {"controller": "pxc-controller", "namespace": "users-18742", "name": "some-name", "reconcileID": "8ae7aeb9-aebe-4e10-8946-4f0abd4d87cb", "user": "replication"} 2024-06-13T04:17:10.176Z INFO Password changed, updating user {"controller": "pxc-controller", "namespace": "users-18742", "name": "some-name", "reconcileID": "8ae7aeb9-aebe-4e10-8946-4f0abd4d87cb", "user": "proxyadmin"} 2024-06-13T04:17:10.220Z INFO Proxy user updated {"controller": "pxc-controller", "namespace": "users-18742", "name": "some-name", "reconcileID": "8ae7aeb9-aebe-4e10-8946-4f0abd4d87cb", "user": "proxyadmin"} 2024-06-13T04:17:10.233Z INFO Internal secrets updated {"controller": "pxc-controller", "namespace": "users-18742", "name": "some-name", "reconcileID": "8ae7aeb9-aebe-4e10-8946-4f0abd4d87cb", "user": "proxyadmin"} 2024-06-13T04:17:10.233Z INFO Proxy pods will be restarted {"controller": "pxc-controller", "namespace": "users-18742", "name": "some-name", "reconcileID": "8ae7aeb9-aebe-4e10-8946-4f0abd4d87cb", "last-applied-secret": "6d7455b44712def24a30faa04b036897b06529db11e20795a832ce7a77258a10"} 2024-06-13T04:17:10.233Z INFO PXC pods will be restarted {"controller": "pxc-controller", "namespace": "users-18742", "name": "some-name", "reconcileID": "8ae7aeb9-aebe-4e10-8946-4f0abd4d87cb", "last-applied-secret": "6d7455b44712def24a30faa04b036897b06529db11e20795a832ce7a77258a10"} 2024-06-13T04:17:10.450Z ERROR sync users {"controller": "pxc-controller", "namespace": "users-18742", "name": "some-name", "reconcileID": "8ae7aeb9-aebe-4e10-8946-4f0abd4d87cb", "error": "exec syncusers: command terminated with exit code 1 / / ERROR 1045 (28000): ProxySQL Error: Access denied for user 'proxyadmin'@'127.0.0.1' (using password: YES)\nERROR (line:335) : ProxySQL connection check failed. \n-- Could not connect to ProxySQL at localhost:6032 \n-- Please check the ProxySQL connection parameters and status.\n", "errorVerbose": "exec syncusers: command terminated with exit code 1 / / ERROR 1045 (28000): ProxySQL Error: Access denied for user 'proxyadmin'@'127.0.0.1' (using password: YES)\nERROR (line:335) : ProxySQL connection check failed. \n-- Could not connect to ProxySQL at localhost:6032 \n-- Please check the ProxySQL connection parameters and status.\n\ngithub.com/percona/percona-xtradb-cluster-operator/pkg/controller/pxc.(*ReconcilePerconaXtraDBCluster).syncPXCUsersWithProxySQL\n\t/go/src/github.com/percona/percona-xtradb-cluster-operator/pkg/controller/pxc/users.go:920\ngithub.com/percona/percona-xtradb-cluster-operator/pkg/controller/pxc.(*ReconcilePerconaXtraDBCluster).resyncPXCUsersWithProxySQL.func1\n\t/go/src/github.com/percona/percona-xtradb-cluster-operator/pkg/controller/pxc/controller.go:1246\nruntime.goexit\n\t/usr/local/go/src/runtime/asm_amd64.s:1695"} 2024-06-13T04:17:58.397Z INFO reconcile replication error {"controller": "pxc-controller", "namespace": "users-18742", "name": "some-name", "reconcileID": "27ffac64-aca5-4c29-ae6b-297f5094c5d2", "err": "failed to ensure cluster readonly status: connect to pod some-name-pxc-1: dial tcp: lookup some-name-pxc-1.some-name-pxc.users-18742 on 10.17.224.10:53: no such host"} 2024-06-13T04:18:03.413Z INFO reconcile replication error {"controller": "pxc-controller", "namespace": "users-18742", "name": "some-name", "reconcileID": "02c69006-e4e7-4a2a-87d7-7346fcd47780", "err": "failed to ensure cluster readonly status: connect to pod some-name-pxc-1: dial tcp: lookup some-name-pxc-1.some-name-pxc.users-18742 on 10.17.224.10:53: no such host"} 2024-06-13T04:18:08.690Z INFO reconcile replication error {"controller": "pxc-controller", "namespace": "users-18742", "name": "some-name", "reconcileID": "23f68682-71b4-4c3d-ba68-d57fa55d5928", "err": "failed to ensure cluster readonly status: connect to pod some-name-pxc-1: dial tcp: lookup some-name-pxc-1.some-name-pxc.users-18742 on 10.17.224.10:53: no such host"} 2024-06-13T04:18:56.614Z INFO reconcile replication error {"controller": "pxc-controller", "namespace": "users-18742", "name": "some-name", "reconcileID": "841bbc86-1d60-4a0a-88ae-f47f4e484ae0", "err": "failed to connect to pod some-name-pxc-0: dial tcp 10.17.208.62:33062: connect: connection refused"} 2024-06-13T04:19:07.143Z INFO Unable to find primary pod for replication. No pod with name or ip like this {"controller": "pxc-controller", "namespace": "users-18742", "name": "some-name", "reconcileID": "494d1841-35ba-46ef-a857-6b7ec9c69674", "primary name": "some-name-pxc-0.some-name-pxc.users-18742.svc.cluster.local"} 2024-06-13T04:19:17.646Z INFO Unable to find primary pod for replication. No pod with name or ip like this {"controller": "pxc-controller", "namespace": "users-18742", "name": "some-name", "reconcileID": "f87195c9-ed79-4fc9-bfdc-fdbfc8cafc15", "primary name": "some-name-pxc-0.some-name-pxc.users-18742.svc.cluster.local"} 2024-06-13T04:19:22.854Z INFO Unable to find primary pod for replication. No pod with name or ip like this {"controller": "pxc-controller", "namespace": "users-18742", "name": "some-name", "reconcileID": "2dcda31c-9afd-408a-86ec-caaeec251636", "primary name": "some-name-pxc-0.some-name-pxc.users-18742.svc.cluster.local"} 2024-06-13T04:19:33.584Z INFO Password updated but old one not discarded {"controller": "pxc-controller", "namespace": "users-18742", "name": "some-name", "reconcileID": "56e834f7-f800-463f-a580-1052f879e30c", "user": "monitor"} 2024-06-13T04:19:33.817Z INFO Old password discarded {"controller": "pxc-controller", "namespace": "users-18742", "name": "some-name", "reconcileID": "56e834f7-f800-463f-a580-1052f879e30c", "user": "monitor"} 2024-06-13T04:19:33.838Z INFO Proxy pods will be restarted {"controller": "pxc-controller", "namespace": "users-18742", "name": "some-name", "reconcileID": "56e834f7-f800-463f-a580-1052f879e30c", "last-applied-secret": "6d7455b44712def24a30faa04b036897b06529db11e20795a832ce7a77258a10"} 2024-06-13T04:19:37.117Z DEBUG PXC users synced with ProxySQL {"controller": "pxc-controller", "namespace": "users-18742", "name": "some-name", "reconcileID": "56e834f7-f800-463f-a580-1052f879e30c"} 2024-06-13T04:19:41.996Z DEBUG PXC users synced with ProxySQL {"controller": "pxc-controller", "namespace": "users-18742", "name": "some-name", "reconcileID": "41a1c2be-7d83-4931-8070-e1f6a940573c"} 2024-06-13T04:19:47.232Z DEBUG PXC users synced with ProxySQL {"controller": "pxc-controller", "namespace": "users-18742", "name": "some-name", "reconcileID": "3270d9ad-f749-44aa-b805-0592024fe188"} 2024-06-13T04:19:52.714Z DEBUG PXC users synced with ProxySQL {"controller": "pxc-controller", "namespace": "users-18742", "name": "some-name", "reconcileID": "a6edcced-bd16-4f66-b1ef-85fdc521dea5"} 2024-06-13T04:19:54.712Z INFO Password changed, updating user {"controller": "pxc-controller", "namespace": "users-18742", "name": "some-name", "reconcileID": "040781f5-f23c-4d18-8901-531d10a51079", "user": "operator"} 2024-06-13T04:19:54.741Z INFO Password updated {"controller": "pxc-controller", "namespace": "users-18742", "name": "some-name", "reconcileID": "040781f5-f23c-4d18-8901-531d10a51079", "user": "operator"} 2024-06-13T04:19:54.754Z INFO MySQL init secret updated {"controller": "pxc-controller", "namespace": "users-18742", "name": "some-name", "reconcileID": "040781f5-f23c-4d18-8901-531d10a51079", "secret": "some-name-mysql-init", "user": "operator"} 2024-06-13T04:19:54.762Z INFO Internal secrets updated {"controller": "pxc-controller", "namespace": "users-18742", "name": "some-name", "reconcileID": "040781f5-f23c-4d18-8901-531d10a51079", "user": "operator"} 2024-06-13T04:19:54.794Z INFO Old password discarded {"controller": "pxc-controller", "namespace": "users-18742", "name": "some-name", "reconcileID": "040781f5-f23c-4d18-8901-531d10a51079", "user": "operator"} 2024-06-13T04:19:54.828Z INFO Proxy pods will be restarted {"controller": "pxc-controller", "namespace": "users-18742", "name": "some-name", "reconcileID": "040781f5-f23c-4d18-8901-531d10a51079", "last-applied-secret": "827660f88a88d0ebccbed7990a39e4efa08a5dcbbb98bf4eefd71eea00651669"} 2024-06-13T04:19:56.292Z ERROR sync users {"controller": "pxc-controller", "namespace": "users-18742", "name": "some-name", "reconcileID": "040781f5-f23c-4d18-8901-531d10a51079", "error": "exec syncusers: command terminated with exit code 1 / / ERROR 1045 (28000): Access denied for user 'operator'@'some-name-proxysql-0.some-name-proxysql-unready.users-18742.svc.' (using password: YES)\nERROR 1045 (28000): Access denied for user 'operator'@'some-name-proxysql-0.some-name-proxysql-unready.users-18742.svc.' (using password: YES)\nERROR 1045 (28000): Access denied for user 'operator'@'some-name-proxysql-0.some-name-proxysql-unready.users-18742.svc.' (using password: YES)\nERROR (line:569) : Could not find a primary cluster node\n", "errorVerbose": "exec syncusers: command terminated with exit code 1 / / ERROR 1045 (28000): Access denied for user 'operator'@'some-name-proxysql-0.some-name-proxysql-unready.users-18742.svc.' (using password: YES)\nERROR 1045 (28000): Access denied for user 'operator'@'some-name-proxysql-0.some-name-proxysql-unready.users-18742.svc.' (using password: YES)\nERROR 1045 (28000): Access denied for user 'operator'@'some-name-proxysql-0.some-name-proxysql-unready.users-18742.svc.' (using password: YES)\nERROR (line:569) : Could not find a primary cluster node\n\ngithub.com/percona/percona-xtradb-cluster-operator/pkg/controller/pxc.(*ReconcilePerconaXtraDBCluster).syncPXCUsersWithProxySQL\n\t/go/src/github.com/percona/percona-xtradb-cluster-operator/pkg/controller/pxc/users.go:920\ngithub.com/percona/percona-xtradb-cluster-operator/pkg/controller/pxc.(*ReconcilePerconaXtraDBCluster).resyncPXCUsersWithProxySQL.func1\n\t/go/src/github.com/percona/percona-xtradb-cluster-operator/pkg/controller/pxc/controller.go:1246\nruntime.goexit\n\t/usr/local/go/src/runtime/asm_amd64.s:1695"} 2024-06-13T04:20:22.060Z DEBUG PXC users synced with ProxySQL {"controller": "pxc-controller", "namespace": "users-18742", "name": "some-name", "reconcileID": "994d9e2a-33b5-43a4-9b76-8ca5779ca946"} 2024-06-13T04:20:30.532Z DEBUG PXC users synced with ProxySQL {"controller": "pxc-controller", "namespace": "users-18742", "name": "some-name", "reconcileID": "9c2de5c8-b68d-48a4-9b2c-800619b379e5"} 2024-06-13T04:20:35.816Z DEBUG PXC users synced with ProxySQL {"controller": "pxc-controller", "namespace": "users-18742", "name": "some-name", "reconcileID": "67e623bb-f9cf-41ee-bd38-0ac03e3bb0ec"} 2024-06-13T04:20:41.285Z DEBUG PXC users synced with ProxySQL {"controller": "pxc-controller", "namespace": "users-18742", "name": "some-name", "reconcileID": "88f4ea27-0ab7-4edc-9ebc-90b398f1efbb"} 2024-06-13T04:20:46.617Z DEBUG PXC users synced with ProxySQL {"controller": "pxc-controller", "namespace": "users-18742", "name": "some-name", "reconcileID": "cc7b2fb9-f2b3-43de-b869-5f01107519dd"} 2024-06-13T04:20:52.139Z DEBUG PXC users synced with ProxySQL {"controller": "pxc-controller", "namespace": "users-18742", "name": "some-name", "reconcileID": "d1ef523e-5970-426d-a820-a7d7fe5a53ca"} 2024-06-13T04:20:58.013Z DEBUG PXC users synced with ProxySQL {"controller": "pxc-controller", "namespace": "users-18742", "name": "some-name", "reconcileID": "a1bf9a44-2e29-4547-8e3c-c3c0c130526c"} 2024-06-13T04:21:02.807Z DEBUG PXC users synced with ProxySQL {"controller": "pxc-controller", "namespace": "users-18742", "name": "some-name", "reconcileID": "ee5174b3-6306-4862-b668-4dd45378ae78"} 2024-06-13T04:21:09.560Z DEBUG PXC users synced with ProxySQL {"controller": "pxc-controller", "namespace": "users-18742", "name": "some-name", "reconcileID": "36e966e1-4410-456d-aa7a-4b254458b5c4"} 2024-06-13T04:21:13.437Z DEBUG PXC users synced with ProxySQL {"controller": "pxc-controller", "namespace": "users-18742", "name": "some-name", "reconcileID": "d780df65-2a80-4319-b33a-4b05ce80c741"} 2024-06-13T04:21:18.719Z DEBUG PXC users synced with ProxySQL {"controller": "pxc-controller", "namespace": "users-18742", "name": "some-name", "reconcileID": "7cb8ed3c-927c-4576-af13-21a1d4253513"} 2024-06-13T04:21:24.127Z DEBUG PXC users synced with ProxySQL {"controller": "pxc-controller", "namespace": "users-18742", "name": "some-name", "reconcileID": "f9e73e0e-e34d-41e9-9dfd-25052b6dcbca"} 2024-06-13T04:21:29.708Z DEBUG PXC users synced with ProxySQL {"controller": "pxc-controller", "namespace": "users-18742", "name": "some-name", "reconcileID": "f4e95780-f0d9-4f35-a989-8223978ab688"} 2024-06-13T04:21:35.082Z DEBUG PXC users synced with ProxySQL {"controller": "pxc-controller", "namespace": "users-18742", "name": "some-name", "reconcileID": "7039f914-e31e-45a7-a9af-ab89aa1580b2"} 2024-06-13T04:21:40.484Z DEBUG PXC users synced with ProxySQL {"controller": "pxc-controller", "namespace": "users-18742", "name": "some-name", "reconcileID": "904ba311-2360-4041-a049-c47cd4fdbf83"} 2024-06-13T04:21:45.905Z DEBUG PXC users synced with ProxySQL {"controller": "pxc-controller", "namespace": "users-18742", "name": "some-name", "reconcileID": "7b7f6283-549f-4838-bb5e-e7b86be7d9bd"} 2024-06-13T04:21:51.205Z DEBUG PXC users synced with ProxySQL {"controller": "pxc-controller", "namespace": "users-18742", "name": "some-name", "reconcileID": "168ff1b3-7c62-46d5-8020-376bd53fc20e"} 2024-06-13T04:21:53.144Z INFO Password changed, updating user {"controller": "pxc-controller", "namespace": "users-18742", "name": "some-name", "reconcileID": "06e98b4a-81e3-4b60-ba08-2e16f7d61ea1", "user": "root"} 2024-06-13T04:21:53.198Z INFO Password updated {"controller": "pxc-controller", "namespace": "users-18742", "name": "some-name", "reconcileID": "06e98b4a-81e3-4b60-ba08-2e16f7d61ea1", "user": "root"} 2024-06-13T04:21:53.210Z INFO MySQL init secret updated {"controller": "pxc-controller", "namespace": "users-18742", "name": "some-name", "reconcileID": "06e98b4a-81e3-4b60-ba08-2e16f7d61ea1", "secret": "some-name-mysql-init", "user": "root"} 2024-06-13T04:21:59.041Z DEBUG PXC users synced with ProxySQL {"controller": "pxc-controller", "namespace": "users-18742", "name": "some-name", "reconcileID": "06e98b4a-81e3-4b60-ba08-2e16f7d61ea1"} 2024-06-13T04:21:59.050Z INFO Internal secrets updated {"controller": "pxc-controller", "namespace": "users-18742", "name": "some-name", "reconcileID": "06e98b4a-81e3-4b60-ba08-2e16f7d61ea1", "user": "root"} 2024-06-13T04:21:59.098Z INFO Old password discarded {"controller": "pxc-controller", "namespace": "users-18742", "name": "some-name", "reconcileID": "06e98b4a-81e3-4b60-ba08-2e16f7d61ea1", "user": "root"} 2024-06-13T04:21:59.123Z INFO Password changed, updating user {"controller": "pxc-controller", "namespace": "users-18742", "name": "some-name", "reconcileID": "06e98b4a-81e3-4b60-ba08-2e16f7d61ea1", "user": "monitor"} 2024-06-13T04:21:59.155Z INFO Password updated {"controller": "pxc-controller", "namespace": "users-18742", "name": "some-name", "reconcileID": "06e98b4a-81e3-4b60-ba08-2e16f7d61ea1", "user": "monitor"} 2024-06-13T04:21:59.166Z INFO MySQL init secret updated {"controller": "pxc-controller", "namespace": "users-18742", "name": "some-name", "reconcileID": "06e98b4a-81e3-4b60-ba08-2e16f7d61ea1", "secret": "some-name-mysql-init", "user": "monitor"} 2024-06-13T04:21:59.210Z INFO Proxy user updated {"controller": "pxc-controller", "namespace": "users-18742", "name": "some-name", "reconcileID": "06e98b4a-81e3-4b60-ba08-2e16f7d61ea1", "user": "monitor"} 2024-06-13T04:21:59.221Z INFO Internal secrets updated {"controller": "pxc-controller", "namespace": "users-18742", "name": "some-name", "reconcileID": "06e98b4a-81e3-4b60-ba08-2e16f7d61ea1", "user": "monitor"} 2024-06-13T04:21:59.324Z INFO Password changed, updating user {"controller": "pxc-controller", "namespace": "users-18742", "name": "some-name", "reconcileID": "06e98b4a-81e3-4b60-ba08-2e16f7d61ea1", "user": "xtrabackup"} 2024-06-13T04:21:59.352Z INFO Password updated {"controller": "pxc-controller", "namespace": "users-18742", "name": "some-name", "reconcileID": "06e98b4a-81e3-4b60-ba08-2e16f7d61ea1", "user": "xtrabackup"} 2024-06-13T04:21:59.361Z INFO MySQL init secret updated {"controller": "pxc-controller", "namespace": "users-18742", "name": "some-name", "reconcileID": "06e98b4a-81e3-4b60-ba08-2e16f7d61ea1", "secret": "some-name-mysql-init", "user": "xtrabackup"} 2024-06-13T04:21:59.369Z INFO Internal secrets updated {"controller": "pxc-controller", "namespace": "users-18742", "name": "some-name", "reconcileID": "06e98b4a-81e3-4b60-ba08-2e16f7d61ea1", "user": "xtrabackup"} 2024-06-13T04:21:59.398Z INFO Old password discarded {"controller": "pxc-controller", "namespace": "users-18742", "name": "some-name", "reconcileID": "06e98b4a-81e3-4b60-ba08-2e16f7d61ea1", "user": "xtrabackup"} 2024-06-13T04:21:59.409Z INFO Password changed, updating user {"controller": "pxc-controller", "namespace": "users-18742", "name": "some-name", "reconcileID": "06e98b4a-81e3-4b60-ba08-2e16f7d61ea1", "user": "proxyadmin"} 2024-06-13T04:21:59.453Z INFO Proxy user updated {"controller": "pxc-controller", "namespace": "users-18742", "name": "some-name", "reconcileID": "06e98b4a-81e3-4b60-ba08-2e16f7d61ea1", "user": "proxyadmin"} 2024-06-13T04:21:59.463Z INFO Internal secrets updated {"controller": "pxc-controller", "namespace": "users-18742", "name": "some-name", "reconcileID": "06e98b4a-81e3-4b60-ba08-2e16f7d61ea1", "user": "proxyadmin"} 2024-06-13T04:21:59.463Z INFO Proxy pods will be restarted {"controller": "pxc-controller", "namespace": "users-18742", "name": "some-name", "reconcileID": "06e98b4a-81e3-4b60-ba08-2e16f7d61ea1", "last-applied-secret": "e76269a0fdb6b5d2e3290e2a4cf7083baeed3214986968ce01108bcc1e3c9de6"} 2024-06-13T04:21:59.463Z INFO PXC pods will be restarted {"controller": "pxc-controller", "namespace": "users-18742", "name": "some-name", "reconcileID": "06e98b4a-81e3-4b60-ba08-2e16f7d61ea1", "last-applied-secret": "e76269a0fdb6b5d2e3290e2a4cf7083baeed3214986968ce01108bcc1e3c9de6"} 2024-06-13T04:21:59.729Z ERROR sync users {"controller": "pxc-controller", "namespace": "users-18742", "name": "some-name", "reconcileID": "06e98b4a-81e3-4b60-ba08-2e16f7d61ea1", "error": "exec syncusers: command terminated with exit code 1 / / ERROR 1045 (28000): ProxySQL Error: Access denied for user 'proxyadmin'@'127.0.0.1' (using password: YES)\nERROR (line:335) : ProxySQL connection check failed. \n-- Could not connect to ProxySQL at localhost:6032 \n-- Please check the ProxySQL connection parameters and status.\n", "errorVerbose": "exec syncusers: command terminated with exit code 1 / / ERROR 1045 (28000): ProxySQL Error: Access denied for user 'proxyadmin'@'127.0.0.1' (using password: YES)\nERROR (line:335) : ProxySQL connection check failed. \n-- Could not connect to ProxySQL at localhost:6032 \n-- Please check the ProxySQL connection parameters and status.\n\ngithub.com/percona/percona-xtradb-cluster-operator/pkg/controller/pxc.(*ReconcilePerconaXtraDBCluster).syncPXCUsersWithProxySQL\n\t/go/src/github.com/percona/percona-xtradb-cluster-operator/pkg/controller/pxc/users.go:920\ngithub.com/percona/percona-xtradb-cluster-operator/pkg/controller/pxc.(*ReconcilePerconaXtraDBCluster).resyncPXCUsersWithProxySQL.func1\n\t/go/src/github.com/percona/percona-xtradb-cluster-operator/pkg/controller/pxc/controller.go:1246\nruntime.goexit\n\t/usr/local/go/src/runtime/asm_amd64.s:1695"} 2024-06-13T04:22:57.963Z INFO reconcile replication error {"controller": "pxc-controller", "namespace": "users-18742", "name": "some-name", "reconcileID": "f3d458e2-f83e-48df-b25a-1a3a3a8bc2b7", "err": "failed to ensure cluster readonly status: connect to pod some-name-pxc-1: dial tcp: lookup some-name-pxc-1.some-name-pxc.users-18742 on 10.17.224.10:53: no such host"} 2024-06-13T04:23:02.909Z INFO reconcile replication error {"controller": "pxc-controller", "namespace": "users-18742", "name": "some-name", "reconcileID": "3452103e-3bb8-4f28-9836-752692ab7223", "err": "failed to ensure cluster readonly status: connect to pod some-name-pxc-1: dial tcp: lookup some-name-pxc-1.some-name-pxc.users-18742 on 10.17.224.10:53: no such host"} 2024-06-13T04:23:08.161Z INFO reconcile replication error {"controller": "pxc-controller", "namespace": "users-18742", "name": "some-name", "reconcileID": "3a6448ab-de84-4672-90d9-dbfc302f8ba4", "err": "failed to ensure cluster readonly status: connect to pod some-name-pxc-1: dial tcp: lookup some-name-pxc-1.some-name-pxc.users-18742 on 10.17.224.10:53: no such host"} 2024-06-13T04:24:06.231Z INFO reconcile replication error {"controller": "pxc-controller", "namespace": "users-18742", "name": "some-name", "reconcileID": "f9974052-f598-4bcb-96d2-453b8605da17", "err": "failed to connect to pod some-name-pxc-0: dial tcp: lookup some-name-pxc-0.some-name-pxc.users-18742 on 10.17.224.10:53: no such host"} 2024-06-13T04:24:11.737Z INFO Unable to find primary pod for replication. No pod with name or ip like this {"controller": "pxc-controller", "namespace": "users-18742", "name": "some-name", "reconcileID": "9f7e0228-d373-41cb-8536-38a3e2ce0a07", "primary name": "some-name-pxc-0.some-name-pxc.users-18742.svc.cluster.local"} 2024-06-13T04:24:25.012Z INFO KubeAPIWarningLogger .metadata.ownerReferences contains duplicate entries; API server dedups owner references in 1.20+, and may reject such requests as early as 1.24; please fix your requests; duplicate UID(s) observed: b3b45874-c44e-4903-a7da-dce2bdaf9917 2024-06-13T04:24:28.296Z INFO reconcile replication error {"controller": "pxc-controller", "namespace": "users-18742", "name": "some-name", "reconcileID": "7e230583-8a59-4761-9662-114fcfe9106b", "err": "get primary pxc pod: failed to get proxy connection: dial tcp 10.17.234.170:3306: connect: connection refused"} 2024-06-13T04:25:41.993Z INFO reconcile replication error {"controller": "pxc-controller", "namespace": "users-18742", "name": "some-name", "reconcileID": "221e097e-d127-46ac-a7c1-7165ca86f574", "err": "failed to ensure cluster readonly status: connect to pod some-name-pxc-1: dial tcp 10.17.209.55:33062: connect: connection refused"} 2024-06-13T04:27:07.721Z INFO Password changed, updating user {"controller": "pxc-controller", "namespace": "users-18742", "name": "some-name", "reconcileID": "e6694c9a-1ec1-4523-8b46-f75cadf7b809", "user": "root"} 2024-06-13T04:27:07.771Z INFO Password updated {"controller": "pxc-controller", "namespace": "users-18742", "name": "some-name", "reconcileID": "e6694c9a-1ec1-4523-8b46-f75cadf7b809", "user": "root"} 2024-06-13T04:27:07.798Z INFO MySQL init secret updated {"controller": "pxc-controller", "namespace": "users-18742", "name": "some-name", "reconcileID": "e6694c9a-1ec1-4523-8b46-f75cadf7b809", "secret": "some-name-mysql-init", "user": "root"} 2024-06-13T04:27:07.818Z INFO Internal secrets updated {"controller": "pxc-controller", "namespace": "users-18742", "name": "some-name", "reconcileID": "e6694c9a-1ec1-4523-8b46-f75cadf7b809", "user": "root"} 2024-06-13T04:27:07.868Z INFO Old password discarded {"controller": "pxc-controller", "namespace": "users-18742", "name": "some-name", "reconcileID": "e6694c9a-1ec1-4523-8b46-f75cadf7b809", "user": "root"} 2024-06-13T04:27:07.878Z INFO Password changed, updating user {"controller": "pxc-controller", "namespace": "users-18742", "name": "some-name", "reconcileID": "e6694c9a-1ec1-4523-8b46-f75cadf7b809", "user": "operator"} 2024-06-13T04:27:07.905Z INFO Password updated {"controller": "pxc-controller", "namespace": "users-18742", "name": "some-name", "reconcileID": "e6694c9a-1ec1-4523-8b46-f75cadf7b809", "user": "operator"} 2024-06-13T04:27:07.915Z INFO MySQL init secret updated {"controller": "pxc-controller", "namespace": "users-18742", "name": "some-name", "reconcileID": "e6694c9a-1ec1-4523-8b46-f75cadf7b809", "secret": "some-name-mysql-init", "user": "operator"} 2024-06-13T04:27:07.929Z INFO Internal secrets updated {"controller": "pxc-controller", "namespace": "users-18742", "name": "some-name", "reconcileID": "e6694c9a-1ec1-4523-8b46-f75cadf7b809", "user": "operator"} 2024-06-13T04:27:07.958Z INFO Old password discarded {"controller": "pxc-controller", "namespace": "users-18742", "name": "some-name", "reconcileID": "e6694c9a-1ec1-4523-8b46-f75cadf7b809", "user": "operator"} 2024-06-13T04:27:07.967Z INFO Password changed, updating user {"controller": "pxc-controller", "namespace": "users-18742", "name": "some-name", "reconcileID": "e6694c9a-1ec1-4523-8b46-f75cadf7b809", "user": "monitor"} 2024-06-13T04:27:07.996Z INFO Password updated {"controller": "pxc-controller", "namespace": "users-18742", "name": "some-name", "reconcileID": "e6694c9a-1ec1-4523-8b46-f75cadf7b809", "user": "monitor"} 2024-06-13T04:27:08.004Z INFO MySQL init secret updated {"controller": "pxc-controller", "namespace": "users-18742", "name": "some-name", "reconcileID": "e6694c9a-1ec1-4523-8b46-f75cadf7b809", "secret": "some-name-mysql-init", "user": "monitor"} 2024-06-13T04:27:08.016Z INFO Internal secrets updated {"controller": "pxc-controller", "namespace": "users-18742", "name": "some-name", "reconcileID": "e6694c9a-1ec1-4523-8b46-f75cadf7b809", "user": "monitor"} 2024-06-13T04:27:08.115Z INFO Password changed, updating user {"controller": "pxc-controller", "namespace": "users-18742", "name": "some-name", "reconcileID": "e6694c9a-1ec1-4523-8b46-f75cadf7b809", "user": "xtrabackup"} 2024-06-13T04:27:08.146Z INFO Password updated {"controller": "pxc-controller", "namespace": "users-18742", "name": "some-name", "reconcileID": "e6694c9a-1ec1-4523-8b46-f75cadf7b809", "user": "xtrabackup"} 2024-06-13T04:27:08.157Z INFO MySQL init secret updated {"controller": "pxc-controller", "namespace": "users-18742", "name": "some-name", "reconcileID": "e6694c9a-1ec1-4523-8b46-f75cadf7b809", "secret": "some-name-mysql-init", "user": "xtrabackup"} 2024-06-13T04:27:08.169Z INFO Internal secrets updated {"controller": "pxc-controller", "namespace": "users-18742", "name": "some-name", "reconcileID": "e6694c9a-1ec1-4523-8b46-f75cadf7b809", "user": "xtrabackup"} 2024-06-13T04:27:08.196Z INFO Old password discarded {"controller": "pxc-controller", "namespace": "users-18742", "name": "some-name", "reconcileID": "e6694c9a-1ec1-4523-8b46-f75cadf7b809", "user": "xtrabackup"} 2024-06-13T04:27:08.206Z INFO Password changed, updating user {"controller": "pxc-controller", "namespace": "users-18742", "name": "some-name", "reconcileID": "e6694c9a-1ec1-4523-8b46-f75cadf7b809", "user": "replication"} 2024-06-13T04:27:08.239Z INFO Password updated {"controller": "pxc-controller", "namespace": "users-18742", "name": "some-name", "reconcileID": "e6694c9a-1ec1-4523-8b46-f75cadf7b809", "user": "replication"} 2024-06-13T04:27:08.247Z INFO MySQL init secret updated {"controller": "pxc-controller", "namespace": "users-18742", "name": "some-name", "reconcileID": "e6694c9a-1ec1-4523-8b46-f75cadf7b809", "secret": "some-name-mysql-init", "user": "replication"} 2024-06-13T04:27:08.258Z INFO Internal secrets updated {"controller": "pxc-controller", "namespace": "users-18742", "name": "some-name", "reconcileID": "e6694c9a-1ec1-4523-8b46-f75cadf7b809", "user": "replication"} 2024-06-13T04:27:08.286Z INFO Old password discarded {"controller": "pxc-controller", "namespace": "users-18742", "name": "some-name", "reconcileID": "e6694c9a-1ec1-4523-8b46-f75cadf7b809", "user": "replication"} 2024-06-13T04:27:08.286Z INFO Proxy pods will be restarted {"controller": "pxc-controller", "namespace": "users-18742", "name": "some-name", "reconcileID": "e6694c9a-1ec1-4523-8b46-f75cadf7b809", "last-applied-secret": "6542c6f8a2375ddfcda319bf91c99ca69519730971c577cd07366e356671ee68"} 2024-06-13T04:27:08.286Z INFO PXC pods will be restarted {"controller": "pxc-controller", "namespace": "users-18742", "name": "some-name", "reconcileID": "e6694c9a-1ec1-4523-8b46-f75cadf7b809", "last-applied-secret": "6542c6f8a2375ddfcda319bf91c99ca69519730971c577cd07366e356671ee68"} 2024-06-13T04:29:24.229Z INFO Password updated but old one not discarded {"controller": "pxc-controller", "namespace": "users-18742", "name": "some-name", "reconcileID": "84770395-f3b0-4c6b-8551-792a787e2b19", "user": "monitor"} 2024-06-13T04:29:24.475Z INFO Old password discarded {"controller": "pxc-controller", "namespace": "users-18742", "name": "some-name", "reconcileID": "84770395-f3b0-4c6b-8551-792a787e2b19", "user": "monitor"} 2024-06-13T04:29:24.494Z INFO Proxy pods will be restarted {"controller": "pxc-controller", "namespace": "users-18742", "name": "some-name", "reconcileID": "84770395-f3b0-4c6b-8551-792a787e2b19", "last-applied-secret": "6542c6f8a2375ddfcda319bf91c99ca69519730971c577cd07366e356671ee68"} 2024-06-13T04:29:45.157Z INFO Password changed, updating user {"controller": "pxc-controller", "namespace": "users-18742", "name": "some-name", "reconcileID": "e7d341a1-7068-4378-aa5a-7084a003706d", "user": "monitor"} 2024-06-13T04:29:45.184Z INFO Password updated {"controller": "pxc-controller", "namespace": "users-18742", "name": "some-name", "reconcileID": "e7d341a1-7068-4378-aa5a-7084a003706d", "user": "monitor"} 2024-06-13T04:29:45.196Z INFO MySQL init secret updated {"controller": "pxc-controller", "namespace": "users-18742", "name": "some-name", "reconcileID": "e7d341a1-7068-4378-aa5a-7084a003706d", "secret": "some-name-mysql-init", "user": "monitor"} 2024-06-13T04:29:45.205Z INFO Internal secrets updated {"controller": "pxc-controller", "namespace": "users-18742", "name": "some-name", "reconcileID": "e7d341a1-7068-4378-aa5a-7084a003706d", "user": "monitor"} 2024-06-13T04:29:45.312Z INFO Proxy pods will be restarted {"controller": "pxc-controller", "namespace": "users-18742", "name": "some-name", "reconcileID": "e7d341a1-7068-4378-aa5a-7084a003706d", "last-applied-secret": "81935ecf30ae523a6ee69582354f9488d47dbeb0f8c7c572af96870f2024284d"} 2024-06-13T04:30:50.991Z INFO Password updated but old one not discarded {"controller": "pxc-controller", "namespace": "users-18742", "name": "some-name", "reconcileID": "84bb3f20-4627-47bc-8d74-6630e8c04314", "user": "monitor"} 2024-06-13T04:30:51.253Z INFO Old password discarded {"controller": "pxc-controller", "namespace": "users-18742", "name": "some-name", "reconcileID": "84bb3f20-4627-47bc-8d74-6630e8c04314", "user": "monitor"} 2024-06-13T04:30:51.273Z INFO Proxy pods will be restarted {"controller": "pxc-controller", "namespace": "users-18742", "name": "some-name", "reconcileID": "84bb3f20-4627-47bc-8d74-6630e8c04314", "last-applied-secret": "81935ecf30ae523a6ee69582354f9488d47dbeb0f8c7c572af96870f2024284d"} github.com/percona/percona-xtradb-cluster-operator/pkg/controller/pxc.(*ReconcilePerconaXtraDBCluster).resyncPXCUsersWithProxySQL.func1 /go/pkg/mod/sigs.k8s.io/controller-runtime@v0.18.0/pkg/internal/controller/controller.go:222 /go/pkg/mod/sigs.k8s.io/controller-runtime@v0.18.0/pkg/internal/controller/controller.go:261 /go/pkg/mod/sigs.k8s.io/controller-runtime@v0.18.0/pkg/internal/controller/controller.go:324 /go/src/github.com/percona/percona-xtradb-cluster-operator/pkg/controller/pxc/controller.go:1248 [mysql] 2024/06/13 04:13:29 packets.go:37: read tcp 10.17.208.57:47580->10.17.208.59:33062: read: connection reset by peer [mysql] 2024/06/13 04:26:40 packets.go:37: unexpected EOF [mysql] 2024/06/13 04:29:08 packets.go:37: read tcp 10.17.208.57:40456->10.17.234.170:3306: i/o timeout sigs.k8s.io/controller-runtime/pkg/internal/controller.(*Controller).processNextWorkItem sigs.k8s.io/controller-runtime/pkg/internal/controller.(*Controller).reconcileHandler sigs.k8s.io/controller-runtime/pkg/internal/controller.(*Controller).Start.func2.2 + kubectl get pxc --all-namespaces -o wide + grep -v NAMESPACE + xargs -L 1 sh -xc 'kubectl patch pxc -n $0 $1 --type=merge -p "{\"metadata\":{\"finalizers\":[]}}"' + kubectl patch pxc -n users-18742 some-name --type=merge -p '{"metadata":{"finalizers":[]}}' perconaxtradbcluster.pxc.percona.com/some-name patched + kubectl_bin delete pxc --all --all-namespaces ++ mktemp + local LAST_OUT=/tmp/tmp.vsAhiDbxlB ++ mktemp + local LAST_ERR=/tmp/tmp.DMvzy6XjhZ + local exit_status=0 ++ seq 0 2 + for i in '$(seq 0 2)' + set +e + kubectl delete pxc --all --all-namespaces + exit_status=0 + set -e + '[' 0 '!=' 0 ']' + break + cat /tmp/tmp.vsAhiDbxlB perconaxtradbcluster.pxc.percona.com "some-name" deleted + cat /tmp/tmp.DMvzy6XjhZ + rm /tmp/tmp.vsAhiDbxlB /tmp/tmp.DMvzy6XjhZ + return 0 + kubectl_bin delete pxc-backup --all --all-namespaces ++ mktemp + local LAST_OUT=/tmp/tmp.abvmub4xXw ++ mktemp + local LAST_ERR=/tmp/tmp.sQDh89qmg2 + local exit_status=0 ++ seq 0 2 + for i in '$(seq 0 2)' + set +e + kubectl delete pxc-backup --all --all-namespaces + exit_status=0 + set -e + '[' 0 '!=' 0 ']' + break + cat /tmp/tmp.abvmub4xXw No resources found + cat /tmp/tmp.sQDh89qmg2 + rm /tmp/tmp.abvmub4xXw /tmp/tmp.sQDh89qmg2 + return 0 + kubectl_bin delete pxc-restore --all --all-namespaces ++ mktemp + local LAST_OUT=/tmp/tmp.yB87QGtrH5 ++ mktemp + local LAST_ERR=/tmp/tmp.huHrCgBMqn + local exit_status=0 ++ seq 0 2 + for i in '$(seq 0 2)' + set +e + kubectl delete pxc-restore --all --all-namespaces + exit_status=0 + set -e + '[' 0 '!=' 0 ']' + break + cat /tmp/tmp.yB87QGtrH5 No resources found + cat /tmp/tmp.huHrCgBMqn + rm /tmp/tmp.yB87QGtrH5 /tmp/tmp.huHrCgBMqn + return 0 + kubectl_bin delete ValidatingWebhookConfiguration percona-xtradbcluster-webhook ++ mktemp + local LAST_OUT=/tmp/tmp.iBvZuBbVOd ++ mktemp + local LAST_ERR=/tmp/tmp.GHQlYSTU8h + local exit_status=0 ++ seq 0 2 + for i in '$(seq 0 2)' + set +e + kubectl delete ValidatingWebhookConfiguration percona-xtradbcluster-webhook + exit_status=0 + set -e + '[' 0 '!=' 0 ']' + break + cat /tmp/tmp.iBvZuBbVOd validatingwebhookconfiguration.admissionregistration.k8s.io "percona-xtradbcluster-webhook" deleted + cat /tmp/tmp.GHQlYSTU8h + rm /tmp/tmp.iBvZuBbVOd /tmp/tmp.GHQlYSTU8h + return 0 + kubectl_bin delete -f https://github.com/jetstack/cert-manager/releases/download/v1.14.2/cert-manager.yaml + : + '[' '!' -z '' ']' + '[' -n pxc-operator ']' + kubectl_bin delete --grace-period=0 --force=true namespace users-18742 + rm -rf /tmp/tmp.ShPTGmQQOH + kubectl_bin delete --grace-period=0 --force=true namespace pxc-operator ++ mktemp ++ mktemp + local LAST_OUT=/tmp/tmp.9swajB1L31 + desc 'test passed' + set +o xtrace ----------------------------------------------------------------------------------- test passed++ mktemp ----------------------------------------------------------------------------------- + local LAST_OUT=/tmp/tmp.C4bmTW0lzl ++ mktemp + local LAST_ERR=/tmp/tmp.2nILoDAg7A + local exit_status=0 ++ seq 0 2 + local LAST_ERR=/tmp/tmp.EKxk81NOQs + local exit_status=0 ++ seq 0 2 + for i in '$(seq 0 2)' + set +e + kubectl delete --grace-period=0 --force=true namespace users-18742 + for i in '$(seq 0 2)' + set +e + kubectl delete --grace-period=0 --force=true namespace pxc-operator