Log: /mnt/jenkins/workspace/cloud-pxc-operator_PR-1752/e2e-tests/logs/users-8-0.log WARNING: version difference between client (1.30) and server (1.26) exceeds the supported minor version skew of +/-1 WARNING: version difference between client (1.30) and server (1.26) exceeds the supported minor version skew of +/-1 + create_infra users-13843 + local ns=users-13843 + '[' -n pxc-operator ']' + kubectl get pxc --all-namespaces -o wide + grep -v NAMESPACE + xargs -L 1 sh -xc 'kubectl patch pxc -n $0 $1 --type=merge -p "{\"metadata\":{\"finalizers\":[]}}"' + kubectl patch pxc -n users-6785 some-name --type=merge -p '{"metadata":{"finalizers":[]}}' perconaxtradbcluster.pxc.percona.com/some-name patched + kubectl_bin delete pxc --all --all-namespaces ++ mktemp + local LAST_OUT=/tmp/tmp.ZiEyE95Sba ++ mktemp + local LAST_ERR=/tmp/tmp.0s7TyhgJ2Z + local exit_status=0 ++ seq 0 2 + for i in '$(seq 0 2)' + set +e + kubectl delete pxc --all --all-namespaces + exit_status=0 + set -e + '[' 0 '!=' 0 ']' + break + cat /tmp/tmp.ZiEyE95Sba perconaxtradbcluster.pxc.percona.com "some-name" deleted + cat /tmp/tmp.0s7TyhgJ2Z + rm /tmp/tmp.ZiEyE95Sba /tmp/tmp.0s7TyhgJ2Z + return 0 + kubectl_bin delete pxc-backup --all --all-namespaces ++ mktemp + local LAST_OUT=/tmp/tmp.M0NtTdxzuX ++ mktemp + local LAST_ERR=/tmp/tmp.io5IlxngW2 + local exit_status=0 ++ seq 0 2 + for i in '$(seq 0 2)' + set +e + kubectl delete pxc-backup --all --all-namespaces + exit_status=0 + set -e + '[' 0 '!=' 0 ']' + break + cat /tmp/tmp.M0NtTdxzuX No resources found + cat /tmp/tmp.io5IlxngW2 + rm /tmp/tmp.M0NtTdxzuX /tmp/tmp.io5IlxngW2 + return 0 + kubectl_bin delete pxc-restore --all --all-namespaces ++ mktemp + local LAST_OUT=/tmp/tmp.5tzm1hFwmT ++ mktemp + local LAST_ERR=/tmp/tmp.DqDqi1ejGJ + local exit_status=0 ++ seq 0 2 + for i in '$(seq 0 2)' + set +e + kubectl delete pxc-restore --all --all-namespaces + exit_status=0 + set -e + '[' 0 '!=' 0 ']' + break + cat /tmp/tmp.5tzm1hFwmT No resources found + cat /tmp/tmp.DqDqi1ejGJ + rm /tmp/tmp.5tzm1hFwmT /tmp/tmp.DqDqi1ejGJ + return 0 + create_namespace pxc-operator + local namespace=pxc-operator + local skip_clean_namespace= + [[ 1 == 1 ]] + [[ -z '' ]] + destroy_chaos_mesh ++ sed s/NAMESPACE// ++ helm list --all-namespaces --filter chaos-mesh ++ tail -n1 ++ awk '-F ' '{print $2}' + local chaos_mesh_ns= + '[' -n '' ']' ++ kubectl get MutatingWebhookConfiguration ++ grep chaos-mesh ++ awk '{print $1}' + timeout 30 kubectl delete MutatingWebhookConfiguration error: resource(s) were provided, but no name was specified + : ++ kubectl get ValidatingWebhookConfiguration ++ grep chaos-mesh ++ awk '{print $1}' + timeout 30 kubectl delete ValidatingWebhookConfiguration error: resource(s) were provided, but no name was specified + : ++ kubectl get ValidatingWebhookConfiguration ++ grep validate-auth ++ awk '{print $1}' + timeout 30 kubectl delete ValidatingWebhookConfiguration error: resource(s) were provided, but no name was specified + : ++ kubectl api-resources ++ awk '{print $1}' ++ grep chaos-mesh ++ kubectl get crd ++ awk '{print $1}' ++ grep chaos-mesh.org + timeout 30 kubectl delete crd error: resource(s) were provided, but no name was specified + : ++ grep chaos-mesh ++ awk '{print $1}' ++ kubectl get clusterrolebinding + timeout 30 kubectl delete clusterrolebinding error: resource(s) were provided, but no name was specified + : ++ kubectl get clusterrole ++ grep chaos-mesh ++ awk '{print $1}' + timeout 30 kubectl delete clusterrole error: resource(s) were provided, but no name was specified + : + desc 'cleaned up all old namespaces' + set +o xtrace ----------------------------------------------------------------------------------- cleaned up all old namespaces ----------------------------------------------------------------------------------- + kubectl_bin get ns + '[' -n '' ']' + desc 'cleaned up old namespaces pxc-operator' + set +o xtrace ----------------------------------------------------------------------------------- cleaned up old namespaces pxc-operator++ mktemp ----------------------------------------------------------------------------------- + kubectl_bin delete namespace pxc-operator ++ mktemp + xargs kubectl delete ns + local LAST_OUT=/tmp/tmp.3p83LSykRO + local LAST_OUT=/tmp/tmp.duOMRi8dyK ++ mktemp ++ mktemp + local LAST_ERR=/tmp/tmp.eZ1jcPIQSB + local exit_status=0 + local LAST_ERR=/tmp/tmp.5QL1pwaKwk + local exit_status=0 ++ seq 0 2 + for i in '$(seq 0 2)' + set +e + kubectl delete namespace pxc-operator ++ seq 0 2 + for i in '$(seq 0 2)' + set +e + kubectl get ns + egrep -v '^kube-|^default$|Terminating|pxc-operator|openshift|^NAME' + awk '{print$1}' + exit_status=0 + set -e + '[' 0 '!=' 0 ']' + break + cat /tmp/tmp.duOMRi8dyK + cat /tmp/tmp.5QL1pwaKwk + rm /tmp/tmp.duOMRi8dyK /tmp/tmp.5QL1pwaKwk + return 0 namespace "users-6785" deleted Error from server (Forbidden): namespaces "default" is forbidden: this namespace may not be deleted + exit_status=0 + set -e + '[' 0 '!=' 0 ']' + break + cat /tmp/tmp.3p83LSykRO namespace "pxc-operator" deleted + cat /tmp/tmp.eZ1jcPIQSB + rm /tmp/tmp.3p83LSykRO /tmp/tmp.eZ1jcPIQSB + return 0 + wait_for_delete namespace/pxc-operator + local res=namespace/pxc-operator + echo -n 'namespace/pxc-operator - ' namespace/pxc-operator - + set +o xtrace Error from server (NotFound): namespaces "pxc-operator" not found + desc 'create namespace pxc-operator' + set +o xtrace ----------------------------------------------------------------------------------- create namespace pxc-operator ----------------------------------------------------------------------------------- + kubectl_bin create namespace pxc-operator ++ mktemp + local LAST_OUT=/tmp/tmp.xWrmcvb1mw ++ mktemp + local LAST_ERR=/tmp/tmp.JGfjKnfH57 + local exit_status=0 ++ seq 0 2 + for i in '$(seq 0 2)' + set +e + kubectl create namespace pxc-operator + exit_status=0 + set -e + '[' 0 '!=' 0 ']' + break + cat /tmp/tmp.xWrmcvb1mw namespace/pxc-operator created + cat /tmp/tmp.JGfjKnfH57 + rm /tmp/tmp.xWrmcvb1mw /tmp/tmp.JGfjKnfH57 + return 0 ++ kubectl_bin config current-context +++ mktemp ++ local LAST_OUT=/tmp/tmp.OluNAAwclY +++ mktemp ++ local LAST_ERR=/tmp/tmp.wHE4WolSLG ++ local exit_status=0 +++ seq 0 2 ++ for i in '$(seq 0 2)' ++ set +e ++ kubectl config current-context ++ exit_status=0 ++ set -e ++ '[' 0 '!=' 0 ']' ++ break ++ cat /tmp/tmp.OluNAAwclY ++ cat /tmp/tmp.wHE4WolSLG ++ rm /tmp/tmp.OluNAAwclY /tmp/tmp.wHE4WolSLG ++ return 0 + kubectl_bin config set-context gke_cloud-dev-112233_us-central1-a_jen-pxc-1752-44f0e1a8-4-cluster6 --namespace=pxc-operator ++ mktemp + local LAST_OUT=/tmp/tmp.Zf3jJbpNFh ++ mktemp + local LAST_ERR=/tmp/tmp.jXSzZmEocx + local exit_status=0 ++ seq 0 2 + for i in '$(seq 0 2)' + set +e + kubectl config set-context gke_cloud-dev-112233_us-central1-a_jen-pxc-1752-44f0e1a8-4-cluster6 --namespace=pxc-operator + exit_status=0 + set -e + '[' 0 '!=' 0 ']' + break + cat /tmp/tmp.Zf3jJbpNFh Context "gke_cloud-dev-112233_us-central1-a_jen-pxc-1752-44f0e1a8-4-cluster6" modified. + cat /tmp/tmp.jXSzZmEocx + rm /tmp/tmp.Zf3jJbpNFh /tmp/tmp.jXSzZmEocx + return 0 + deploy_operator + desc 'start PXC operator' + set +o xtrace ----------------------------------------------------------------------------------- start PXC operator ----------------------------------------------------------------------------------- + kubectl_bin apply --server-side --force-conflicts -f /mnt/jenkins/workspace/cloud-pxc-operator_PR-1752/deploy/crd.yaml ++ mktemp + local LAST_OUT=/tmp/tmp.i7H34xTixx ++ mktemp + local LAST_ERR=/tmp/tmp.uhcQZNMO9f + local exit_status=0 ++ seq 0 2 + for i in '$(seq 0 2)' + set +e + kubectl apply --server-side --force-conflicts -f /mnt/jenkins/workspace/cloud-pxc-operator_PR-1752/deploy/crd.yaml + exit_status=0 + set -e + '[' 0 '!=' 0 ']' + break + cat /tmp/tmp.i7H34xTixx customresourcedefinition.apiextensions.k8s.io/perconaxtradbclusterbackups.pxc.percona.com serverside-applied customresourcedefinition.apiextensions.k8s.io/perconaxtradbclusterrestores.pxc.percona.com serverside-applied customresourcedefinition.apiextensions.k8s.io/perconaxtradbclusters.pxc.percona.com serverside-applied + cat /tmp/tmp.uhcQZNMO9f + rm /tmp/tmp.i7H34xTixx /tmp/tmp.uhcQZNMO9f + return 0 + '[' -n pxc-operator ']' + apply_rbac cw-rbac + local operator_namespace=pxc-operator + local rbac=cw-rbac + cat /mnt/jenkins/workspace/cloud-pxc-operator_PR-1752/deploy/cw-rbac.yaml + kubectl_bin apply -f - + sed -e 's^namespace: .*^namespace: pxc-operator^' ++ mktemp + local LAST_OUT=/tmp/tmp.szkfbIPmX2 ++ mktemp + local LAST_ERR=/tmp/tmp.gruHn8zyR9 + local exit_status=0 ++ seq 0 2 + for i in '$(seq 0 2)' + set +e + kubectl apply -f - + exit_status=0 + set -e + '[' 0 '!=' 0 ']' + break + cat /tmp/tmp.szkfbIPmX2 clusterrole.rbac.authorization.k8s.io/percona-xtradb-cluster-operator unchanged serviceaccount/percona-xtradb-cluster-operator created clusterrolebinding.rbac.authorization.k8s.io/service-account-percona-xtradb-cluster-operator unchanged + cat /tmp/tmp.gruHn8zyR9 + rm /tmp/tmp.szkfbIPmX2 /tmp/tmp.gruHn8zyR9 + return 0 + cat /mnt/jenkins/workspace/cloud-pxc-operator_PR-1752/deploy/cw-operator.yaml + sed -e 's^image: .*^image: perconalab/percona-xtradb-cluster-operator:PR-1752-44f0e1a8^' + sed -e 's^failureThreshold: .*^failureThreshold: 10^' + kubectl_bin apply -f - + yq eval '(select(.kind == "Deployment").spec.template.spec.containers[] | select(.name == "percona-xtradb-cluster-operator").env[] | select(.name == "DISABLE_TELEMETRY").value) = "true"' - ++ mktemp + yq eval '(select(.kind == "Deployment").spec.template.spec.containers[] | select(.name == "percona-xtradb-cluster-operator").env[] | select(.name == "LOG_LEVEL").value) = "DEBUG"' - + local LAST_OUT=/tmp/tmp.K6Xe9kcQaz ++ mktemp + local LAST_ERR=/tmp/tmp.iUM3Y4rg6O + local exit_status=0 ++ seq 0 2 + for i in '$(seq 0 2)' + set +e + kubectl apply -f - + exit_status=0 + set -e + '[' 0 '!=' 0 ']' + break + cat /tmp/tmp.K6Xe9kcQaz deployment.apps/percona-xtradb-cluster-operator created service/percona-xtradb-cluster-operator created + cat /tmp/tmp.iUM3Y4rg6O + rm /tmp/tmp.K6Xe9kcQaz /tmp/tmp.iUM3Y4rg6O + return 0 + sleep 10 + kubectl_bin wait --for=condition=Ready pods -l app.kubernetes.io/component=operator,app.kubernetes.io/instance=percona-xtradb-cluster-operator,app.kubernetes.io/name=percona-xtradb-cluster-operator --timeout=30s ++ mktemp + local LAST_OUT=/tmp/tmp.R9gEz1htNA ++ mktemp + local LAST_ERR=/tmp/tmp.GDfONC2a6L + local exit_status=0 ++ seq 0 2 + for i in '$(seq 0 2)' + set +e + kubectl wait --for=condition=Ready pods -l app.kubernetes.io/component=operator,app.kubernetes.io/instance=percona-xtradb-cluster-operator,app.kubernetes.io/name=percona-xtradb-cluster-operator --timeout=30s + exit_status=0 + set -e + '[' 0 '!=' 0 ']' + break + cat /tmp/tmp.R9gEz1htNA pod/percona-xtradb-cluster-operator-6dbfd9bb9d-cs2sq condition met + cat /tmp/tmp.GDfONC2a6L + rm /tmp/tmp.R9gEz1htNA /tmp/tmp.GDfONC2a6L + return 0 ++ get_operator_pod ++ local label_prefix=app.kubernetes.io/ +++ kubectl get pods --selector=app.kubernetes.io/name=percona-xtradb-cluster-operator -n pxc-operator +++ grep -c percona-xtradb-cluster-operator ++ local check_label=1 ++ [[ 1 -eq 0 ]] ++ kubectl_bin get pods --selector=app.kubernetes.io/name=percona-xtradb-cluster-operator -o 'jsonpath={.items[].metadata.name}' -n pxc-operator +++ mktemp ++ local LAST_OUT=/tmp/tmp.gUnU1xC9W6 +++ mktemp ++ local LAST_ERR=/tmp/tmp.Id4BnZNpgu ++ local exit_status=0 +++ seq 0 2 ++ for i in '$(seq 0 2)' ++ set +e ++ kubectl get pods --selector=app.kubernetes.io/name=percona-xtradb-cluster-operator -o 'jsonpath={.items[].metadata.name}' -n pxc-operator ++ exit_status=0 ++ set -e ++ '[' 0 '!=' 0 ']' ++ break ++ cat /tmp/tmp.gUnU1xC9W6 ++ cat /tmp/tmp.Id4BnZNpgu ++ rm /tmp/tmp.gUnU1xC9W6 /tmp/tmp.Id4BnZNpgu ++ return 0 + wait_pod percona-xtradb-cluster-operator-6dbfd9bb9d-cs2sq 480 pxc-operator + local pod=percona-xtradb-cluster-operator-6dbfd9bb9d-cs2sq + local max_retry=480 + local ns=pxc-operator ++ echo percona-xtradb-cluster-operator-6dbfd9bb9d-cs2sq ++ /usr/bin/sed -E 's/.*-(pxc|proxysql)-[0-9]/\1/' ++ egrep '^(pxc|proxysql)$' + local container= + set +o xtrace pod/percona-xtradb-cluster-operator-6dbfd9bb9d-cs2sq condition met percona-xtradb-cluster-operator-6dbfd9bb9d-cs2sq.Ok + sleep 3 + create_namespace users-13843 + local namespace=users-13843 + local skip_clean_namespace= + [[ 1 == 1 ]] + [[ -z '' ]] + destroy_chaos_mesh ++ helm list --all-namespaces --filter chaos-mesh ++ tail -n1 ++ awk '-F ' '{print $2}' ++ sed s/NAMESPACE// + local chaos_mesh_ns= + '[' -n '' ']' ++ kubectl get MutatingWebhookConfiguration ++ grep chaos-mesh ++ awk '{print $1}' + timeout 30 kubectl delete MutatingWebhookConfiguration error: resource(s) were provided, but no name was specified + : ++ kubectl get ValidatingWebhookConfiguration ++ awk '{print $1}' ++ grep chaos-mesh + timeout 30 kubectl delete ValidatingWebhookConfiguration error: resource(s) were provided, but no name was specified + : ++ kubectl get ValidatingWebhookConfiguration ++ grep validate-auth ++ awk '{print $1}' + timeout 30 kubectl delete ValidatingWebhookConfiguration error: resource(s) were provided, but no name was specified + : ++ kubectl api-resources ++ awk '{print $1}' ++ grep chaos-mesh ++ kubectl get crd ++ grep chaos-mesh.org ++ awk '{print $1}' + timeout 30 kubectl delete crd error: resource(s) were provided, but no name was specified + : ++ kubectl get clusterrolebinding ++ grep chaos-mesh ++ awk '{print $1}' + timeout 30 kubectl delete clusterrolebinding error: resource(s) were provided, but no name was specified + : ++ kubectl get clusterrole ++ grep chaos-mesh ++ awk '{print $1}' + timeout 30 kubectl delete clusterrole error: resource(s) were provided, but no name was specified + : + desc 'cleaned up all old namespaces' + set +o xtrace ----------------------------------------------------------------------------------- cleaned up all old namespaces ----------------------------------------------------------------------------------- + egrep -v '^kube-|^default$|Terminating|pxc-operator|openshift|^NAME' + awk '{print$1}' + '[' -n '' ']' + desc 'cleaned up old namespaces users-13843' + set +o xtrace ----------------------------------------------------------------------------------- cleaned up old namespaces users-13843 ----------------------------------------------------------------------------------- + kubectl_bin delete namespace users-13843 ++ mktemp + kubectl_bin get ns ++ mktemp + xargs kubectl delete ns + local LAST_OUT=/tmp/tmp.LZTFp47Aiq ++ mktemp + local LAST_OUT=/tmp/tmp.n4rU37KBtj + local LAST_ERR=/tmp/tmp.55OappnZxF ++ mktemp + local exit_status=0 ++ seq 0 2 + local LAST_ERR=/tmp/tmp.h8hzdVfSnx + local exit_status=0 + for i in '$(seq 0 2)' + set +e + kubectl get ns ++ seq 0 2 + for i in '$(seq 0 2)' + set +e + kubectl delete namespace users-13843 + exit_status=1 + set -e + '[' 1 '!=' 0 ']' + '[' 1 == 1 ']' + sleep 0 + exit_status=0 + set -e + '[' 0 '!=' 0 ']' + break + cat /tmp/tmp.LZTFp47Aiq + for i in '$(seq 0 2)' + set +e + kubectl delete namespace users-13843 + cat /tmp/tmp.55OappnZxF + rm /tmp/tmp.LZTFp47Aiq /tmp/tmp.55OappnZxF + return 0 + exit_status=1 + set -e + '[' 1 '!=' 0 ']' + '[' 1 == 1 ']' + sleep 0 + for i in '$(seq 0 2)' + set +e + kubectl delete namespace users-13843 Error from server (Forbidden): namespaces "default" is forbidden: this namespace may not be deleted + exit_status=1 + set -e + '[' 1 '!=' 0 ']' + '[' 1 == 1 ']' + sleep 0 + cat /tmp/tmp.n4rU37KBtj + cat /tmp/tmp.h8hzdVfSnx Error from server (NotFound): namespaces "users-13843" not found + rm /tmp/tmp.n4rU37KBtj /tmp/tmp.h8hzdVfSnx + return 1 + : + wait_for_delete namespace/users-13843 + local res=namespace/users-13843 + echo -n 'namespace/users-13843 - ' namespace/users-13843 - + set +o xtrace Error from server (NotFound): namespaces "users-13843" not found + desc 'create namespace users-13843' + set +o xtrace ----------------------------------------------------------------------------------- create namespace users-13843 ----------------------------------------------------------------------------------- + kubectl_bin create namespace users-13843 ++ mktemp + local LAST_OUT=/tmp/tmp.5k3XddPXLG ++ mktemp + local LAST_ERR=/tmp/tmp.mdNJScCMom + local exit_status=0 ++ seq 0 2 + for i in '$(seq 0 2)' + set +e + kubectl create namespace users-13843 + exit_status=0 + set -e + '[' 0 '!=' 0 ']' + break + cat /tmp/tmp.5k3XddPXLG namespace/users-13843 created + cat /tmp/tmp.mdNJScCMom + rm /tmp/tmp.5k3XddPXLG /tmp/tmp.mdNJScCMom + return 0 ++ kubectl_bin config current-context +++ mktemp ++ local LAST_OUT=/tmp/tmp.zVZx070HK6 +++ mktemp ++ local LAST_ERR=/tmp/tmp.lCcvycbSdx ++ local exit_status=0 +++ seq 0 2 ++ for i in '$(seq 0 2)' ++ set +e ++ kubectl config current-context ++ exit_status=0 ++ set -e ++ '[' 0 '!=' 0 ']' ++ break ++ cat /tmp/tmp.zVZx070HK6 ++ cat /tmp/tmp.lCcvycbSdx ++ rm /tmp/tmp.zVZx070HK6 /tmp/tmp.lCcvycbSdx ++ return 0 + kubectl_bin config set-context gke_cloud-dev-112233_us-central1-a_jen-pxc-1752-44f0e1a8-4-cluster6 --namespace=users-13843 ++ mktemp + local LAST_OUT=/tmp/tmp.GVThDYIwy2 ++ mktemp + local LAST_ERR=/tmp/tmp.heBphQOIGv + local exit_status=0 ++ seq 0 2 + for i in '$(seq 0 2)' + set +e + kubectl config set-context gke_cloud-dev-112233_us-central1-a_jen-pxc-1752-44f0e1a8-4-cluster6 --namespace=users-13843 + exit_status=0 + set -e + '[' 0 '!=' 0 ']' + break + cat /tmp/tmp.GVThDYIwy2 Context "gke_cloud-dev-112233_us-central1-a_jen-pxc-1752-44f0e1a8-4-cluster6" modified. + cat /tmp/tmp.heBphQOIGv + rm /tmp/tmp.GVThDYIwy2 /tmp/tmp.heBphQOIGv + return 0 + apply_secrets + desc 'create secrets for cloud storages' + set +o xtrace ----------------------------------------------------------------------------------- create secrets for cloud storages ----------------------------------------------------------------------------------- + '[' -z '' ']' + kubectl_bin apply -f /mnt/jenkins/workspace/cloud-pxc-operator_PR-1752/e2e-tests/conf/minio-secret.yml -f /mnt/jenkins/workspace/cloud-pxc-operator_PR-1752/e2e-tests/conf/cloud-secret.yml ++ mktemp + local LAST_OUT=/tmp/tmp.BRc0AS4ZzU ++ mktemp + local LAST_ERR=/tmp/tmp.Ep72RY5OH3 + local exit_status=0 ++ seq 0 2 + for i in '$(seq 0 2)' + set +e + kubectl apply -f /mnt/jenkins/workspace/cloud-pxc-operator_PR-1752/e2e-tests/conf/minio-secret.yml -f /mnt/jenkins/workspace/cloud-pxc-operator_PR-1752/e2e-tests/conf/cloud-secret.yml + exit_status=0 + set -e + '[' 0 '!=' 0 ']' + break + cat /tmp/tmp.BRc0AS4ZzU secret/minio-secret created secret/aws-s3-secret created secret/gcp-cs-secret created secret/azure-secret created + cat /tmp/tmp.Ep72RY5OH3 + rm /tmp/tmp.BRc0AS4ZzU /tmp/tmp.Ep72RY5OH3 + return 0 + desc 'create PXC cluster' + set +o xtrace ----------------------------------------------------------------------------------- create PXC cluster ----------------------------------------------------------------------------------- + newpass=test-password ++ echo -n test-password ++ base64 + newpassencrypted=dGVzdC1wYXNzd29yZA== + cluster=some-name + spinup_pxc some-name /mnt/jenkins/workspace/cloud-pxc-operator_PR-1752/e2e-tests/conf/some-name.yml + local cluster=some-name + local config=/mnt/jenkins/workspace/cloud-pxc-operator_PR-1752/e2e-tests/conf/some-name.yml + local size=3 + local sleep=10 + local secretsFile=/mnt/jenkins/workspace/cloud-pxc-operator_PR-1752/e2e-tests/conf/secrets.yml + local pxcClientFile=/mnt/jenkins/workspace/cloud-pxc-operator_PR-1752/e2e-tests/conf/client.yml + local port=3306 + desc 'create first PXC cluster' + set +o xtrace ----------------------------------------------------------------------------------- create first PXC cluster ----------------------------------------------------------------------------------- + kubectl_bin apply -f /mnt/jenkins/workspace/cloud-pxc-operator_PR-1752/e2e-tests/conf/secrets.yml ++ mktemp + local LAST_OUT=/tmp/tmp.doXhGqB5X2 ++ mktemp + local LAST_ERR=/tmp/tmp.YKnRHcdhnC + local exit_status=0 ++ seq 0 2 + for i in '$(seq 0 2)' + set +e + kubectl apply -f /mnt/jenkins/workspace/cloud-pxc-operator_PR-1752/e2e-tests/conf/secrets.yml + exit_status=0 + set -e + '[' 0 '!=' 0 ']' + break + cat /tmp/tmp.doXhGqB5X2 secret/my-cluster-secrets created secret/some-name-ssl created secret/some-name-ssl-internal created + cat /tmp/tmp.YKnRHcdhnC + rm /tmp/tmp.doXhGqB5X2 /tmp/tmp.YKnRHcdhnC + return 0 + apply_config /mnt/jenkins/workspace/cloud-pxc-operator_PR-1752/e2e-tests/conf/client.yml + '[' -z '' ']' + cat_config /mnt/jenkins/workspace/cloud-pxc-operator_PR-1752/e2e-tests/conf/client.yml + kubectl_bin apply -f - + cat /mnt/jenkins/workspace/cloud-pxc-operator_PR-1752/e2e-tests/conf/client.yml + /usr/bin/sed -e 's#apiVersion: pxc.percona.com/v.*$#apiVersion: pxc.percona.com/v1#' ++ mktemp + /usr/bin/sed -e 's#image:.*-pxc\([0-9]*.[0-9]*\)\{0,1\}$#image: perconalab/percona-xtradb-cluster-operator:main-pxc8.0#' + local LAST_OUT=/tmp/tmp.0he4C9NT3f + /usr/bin/sed -e 's#image:.*-pmm$#image: perconalab/pmm-client:dev-latest#' ++ mktemp + /usr/bin/sed -e 's#image:.*-backup$#image: perconalab/percona-xtradb-cluster-operator:main-pxc8.0-backup#' + /usr/bin/sed -e 's#image:.*\/percona-xtradb-cluster:.*$#image: perconalab/percona-xtradb-cluster-operator:main-pxc8.0#' + /usr/bin/sed -e 's#image:.*-init$#image: perconalab/percona-xtradb-cluster-operator:PR-1752-44f0e1a8#' + /usr/bin/sed -e s~minio-service.#namespace~minio-service.users-13843~ + /usr/bin/sed -e 's#image:.*-haproxy$#image: perconalab/percona-xtradb-cluster-operator:main-haproxy#' + /usr/bin/sed -e 's#apply:.*#apply: Never#' + /usr/bin/sed -e 's#image:.*-logcollector$#image: perconalab/percona-xtradb-cluster-operator:main-logcollector#' + /usr/bin/sed -e 's#image:.*-proxysql$#image: perconalab/percona-xtradb-cluster-operator:main-proxysql#' + local LAST_ERR=/tmp/tmp.iAHYnsx5uR + local exit_status=0 ++ seq 0 2 + for i in '$(seq 0 2)' + set +e + kubectl apply -f - + exit_status=0 + set -e + '[' 0 '!=' 0 ']' + break + cat /tmp/tmp.0he4C9NT3f deployment.apps/pxc-client created + cat /tmp/tmp.iAHYnsx5uR + rm /tmp/tmp.0he4C9NT3f /tmp/tmp.iAHYnsx5uR + return 0 + [[ perconalab/percona-xtradb-cluster-operator:main-pxc8.0 =~ 5\.7 ]] + apply_config /mnt/jenkins/workspace/cloud-pxc-operator_PR-1752/e2e-tests/conf/some-name.yml + '[' -z '' ']' + cat_config /mnt/jenkins/workspace/cloud-pxc-operator_PR-1752/e2e-tests/conf/some-name.yml + kubectl_bin apply -f - + cat /mnt/jenkins/workspace/cloud-pxc-operator_PR-1752/e2e-tests/conf/some-name.yml + /usr/bin/sed -e 's#apiVersion: pxc.percona.com/v.*$#apiVersion: pxc.percona.com/v1#' ++ mktemp + /usr/bin/sed -e 's#image:.*-init$#image: perconalab/percona-xtradb-cluster-operator:PR-1752-44f0e1a8#' + local LAST_OUT=/tmp/tmp.OK0iphKXgP + /usr/bin/sed -e 's#image:.*-pmm$#image: perconalab/pmm-client:dev-latest#' + /usr/bin/sed -e 's#image:.*-backup$#image: perconalab/percona-xtradb-cluster-operator:main-pxc8.0-backup#' + /usr/bin/sed -e 's#image:.*-haproxy$#image: perconalab/percona-xtradb-cluster-operator:main-haproxy#' + /usr/bin/sed -e 's#image:.*-pxc\([0-9]*.[0-9]*\)\{0,1\}$#image: perconalab/percona-xtradb-cluster-operator:main-pxc8.0#' + /usr/bin/sed -e 's#image:.*-proxysql$#image: perconalab/percona-xtradb-cluster-operator:main-proxysql#' + /usr/bin/sed -e 's#image:.*\/percona-xtradb-cluster:.*$#image: perconalab/percona-xtradb-cluster-operator:main-pxc8.0#' + /usr/bin/sed -e 's#image:.*-logcollector$#image: perconalab/percona-xtradb-cluster-operator:main-logcollector#' + /usr/bin/sed -e s~minio-service.#namespace~minio-service.users-13843~ ++ mktemp + /usr/bin/sed -e 's#apply:.*#apply: Never#' + local LAST_ERR=/tmp/tmp.VCS7RsgXu2 + local exit_status=0 ++ seq 0 2 + for i in '$(seq 0 2)' + set +e + kubectl apply -f - + exit_status=0 + set -e + '[' 0 '!=' 0 ']' + break + cat /tmp/tmp.OK0iphKXgP perconaxtradbcluster.pxc.percona.com/some-name created + cat /tmp/tmp.VCS7RsgXu2 + rm /tmp/tmp.OK0iphKXgP /tmp/tmp.VCS7RsgXu2 + return 0 + desc 'check if all 3 Pods started' + set +o xtrace ----------------------------------------------------------------------------------- check if all 3 Pods started ----------------------------------------------------------------------------------- ++ get_proxy some-name ++ local target_cluster=some-name +++ kubectl_bin get pxc some-name -o 'jsonpath={.spec.haproxy.enabled}' ++++ mktemp +++ local LAST_OUT=/tmp/tmp.VbGlZQDm3H ++++ mktemp +++ local LAST_ERR=/tmp/tmp.OIJTzTOTCx +++ local exit_status=0 ++++ seq 0 2 +++ for i in '$(seq 0 2)' +++ set +e +++ kubectl get pxc some-name -o 'jsonpath={.spec.haproxy.enabled}' +++ exit_status=0 +++ set -e +++ '[' 0 '!=' 0 ']' +++ break +++ cat /tmp/tmp.VbGlZQDm3H +++ cat /tmp/tmp.OIJTzTOTCx +++ rm /tmp/tmp.VbGlZQDm3H /tmp/tmp.OIJTzTOTCx +++ return 0 ++ [[ '' == \t\r\u\e ]] +++ kubectl_bin get pxc some-name -o 'jsonpath={.spec.proxysql.enabled}' ++++ mktemp +++ local LAST_OUT=/tmp/tmp.8i7YIGhyJt ++++ mktemp +++ local LAST_ERR=/tmp/tmp.NSnZEFqFid +++ local exit_status=0 ++++ seq 0 2 +++ for i in '$(seq 0 2)' +++ set +e +++ kubectl get pxc some-name -o 'jsonpath={.spec.proxysql.enabled}' +++ exit_status=0 +++ set -e +++ '[' 0 '!=' 0 ']' +++ break +++ cat /tmp/tmp.8i7YIGhyJt +++ cat /tmp/tmp.NSnZEFqFid +++ rm /tmp/tmp.8i7YIGhyJt /tmp/tmp.NSnZEFqFid +++ return 0 ++ [[ true == \t\r\u\e ]] ++ echo some-name-proxysql ++ return + local proxy=some-name-proxysql + kubectl_bin wait --for=condition=Ready pod -l app.kubernetes.io/instance=monitoring,app.kubernetes.io/managed-by=percona-xtradb-cluster-operator --timeout=300s -n users-13843 ++ mktemp + local LAST_OUT=/tmp/tmp.Arxp9JzAUc ++ mktemp + local LAST_ERR=/tmp/tmp.fvSADoAaID + local exit_status=0 ++ seq 0 2 + for i in '$(seq 0 2)' + set +e + kubectl wait --for=condition=Ready pod -l app.kubernetes.io/instance=monitoring,app.kubernetes.io/managed-by=percona-xtradb-cluster-operator --timeout=300s -n users-13843 + exit_status=1 + set -e + '[' 1 '!=' 0 ']' + '[' 1 == 1 ']' + sleep 0 + for i in '$(seq 0 2)' + set +e + kubectl wait --for=condition=Ready pod -l app.kubernetes.io/instance=monitoring,app.kubernetes.io/managed-by=percona-xtradb-cluster-operator --timeout=300s -n users-13843 + exit_status=1 + set -e + '[' 1 '!=' 0 ']' + '[' 1 == 1 ']' + sleep 0 + for i in '$(seq 0 2)' + set +e + kubectl wait --for=condition=Ready pod -l app.kubernetes.io/instance=monitoring,app.kubernetes.io/managed-by=percona-xtradb-cluster-operator --timeout=300s -n users-13843 + exit_status=1 + set -e + '[' 1 '!=' 0 ']' + '[' 1 == 1 ']' + sleep 0 + cat /tmp/tmp.Arxp9JzAUc + cat /tmp/tmp.fvSADoAaID error: no matching resources found + rm /tmp/tmp.Arxp9JzAUc /tmp/tmp.fvSADoAaID + return 1 + true + wait_for_running some-name-proxysql 1 + local name=some-name-proxysql + let last_pod=0 + : + local max_retry=480 + desc 'wait for running cluster' + set +o xtrace ----------------------------------------------------------------------------------- wait for running cluster ----------------------------------------------------------------------------------- ++ seq 0 0 + for i in '$(seq 0 $last_pod)' + wait_pod some-name-proxysql-0 480 + local pod=some-name-proxysql-0 + local max_retry=480 + local ns= ++ echo some-name-proxysql-0 ++ /usr/bin/sed -E 's/.*-(pxc|proxysql)-[0-9]/\1/' ++ egrep '^(pxc|proxysql)$' + local container=proxysql + set +o xtrace pod/some-name-proxysql-0 condition met some-name-proxysql-0.Ok + wait_for_running some-name-pxc 3 + local name=some-name-pxc + let last_pod=2 + local max_retry=480 + desc 'wait for running cluster' + set +o xtrace ----------------------------------------------------------------------------------- wait for running cluster ----------------------------------------------------------------------------------- ++ seq 0 2 + for i in '$(seq 0 $last_pod)' + wait_pod some-name-pxc-0 480 + local pod=some-name-pxc-0 + local max_retry=480 + local ns= ++ echo some-name-pxc-0 ++ /usr/bin/sed -E 's/.*-(pxc|proxysql)-[0-9]/\1/' ++ egrep '^(pxc|proxysql)$' + local container=pxc + set +o xtrace pod/some-name-pxc-0 condition met some-name-pxc-0.Ok + for i in '$(seq 0 $last_pod)' + wait_pod some-name-pxc-1 480 + local pod=some-name-pxc-1 + local max_retry=480 + local ns= ++ echo some-name-pxc-1 ++ egrep '^(pxc|proxysql)$' ++ /usr/bin/sed -E 's/.*-(pxc|proxysql)-[0-9]/\1/' + local container=pxc + set +o xtrace pod/some-name-pxc-1 condition met some-name-pxc-1.Ok + for i in '$(seq 0 $last_pod)' + wait_pod some-name-pxc-2 480 + local pod=some-name-pxc-2 + local max_retry=480 + local ns= ++ echo some-name-pxc-2 ++ /usr/bin/sed -E 's/.*-(pxc|proxysql)-[0-9]/\1/' ++ egrep '^(pxc|proxysql)$' + local container=pxc + set +o xtrace pod/some-name-pxc-2 condition met some-name-pxc-2.Ok + sleep 10 + desc 'write data' + set +o xtrace ----------------------------------------------------------------------------------- write data ----------------------------------------------------------------------------------- + [[ perconalab/percona-xtradb-cluster-operator:main-pxc8.0 =~ 5\.7 ]] + run_mysql 'CREATE DATABASE IF NOT EXISTS myApp; use myApp; CREATE TABLE IF NOT EXISTS myApp (id int PRIMARY KEY) ;' '-h some-name-proxysql -uroot -proot_password -P3306' + local 'command=CREATE DATABASE IF NOT EXISTS myApp; use myApp; CREATE TABLE IF NOT EXISTS myApp (id int PRIMARY KEY) ;' + local 'uri=-h some-name-proxysql -uroot -proot_password -P3306' ++ get_client_pod ++ kubectl_bin get pods --selector=name=pxc-client -o 'jsonpath={.items[].metadata.name}' +++ mktemp ++ local LAST_OUT=/tmp/tmp.yoTVgCKu9X +++ mktemp ++ local LAST_ERR=/tmp/tmp.UBUJfpr3WZ ++ local exit_status=0 +++ seq 0 2 ++ for i in '$(seq 0 2)' ++ set +e ++ kubectl get pods --selector=name=pxc-client -o 'jsonpath={.items[].metadata.name}' ++ exit_status=0 ++ set -e ++ '[' 0 '!=' 0 ']' ++ break ++ cat /tmp/tmp.yoTVgCKu9X ++ cat /tmp/tmp.UBUJfpr3WZ ++ rm /tmp/tmp.yoTVgCKu9X /tmp/tmp.UBUJfpr3WZ ++ return 0 + client_pod=pxc-client-6644d8898f-4zb6x + wait_pod pxc-client-6644d8898f-4zb6x + local pod=pxc-client-6644d8898f-4zb6x + local max_retry=480 + local ns= ++ echo pxc-client-6644d8898f-4zb6x ++ /usr/bin/sed -E 's/.*-(pxc|proxysql)-[0-9]/\1/' ++ egrep '^(pxc|proxysql)$' + local container= + set +o xtrace pod/pxc-client-6644d8898f-4zb6x condition met pxc-client-6644d8898f-4zb6x.Ok + set +o xtrace + run_mysql 'INSERT myApp.myApp (id) VALUES (100500)' '-h some-name-proxysql -uroot -proot_password -P3306' + local 'command=INSERT myApp.myApp (id) VALUES (100500)' + local 'uri=-h some-name-proxysql -uroot -proot_password -P3306' ++ get_client_pod ++ kubectl_bin get pods --selector=name=pxc-client -o 'jsonpath={.items[].metadata.name}' +++ mktemp ++ local LAST_OUT=/tmp/tmp.2rKWq0cOJW +++ mktemp ++ local LAST_ERR=/tmp/tmp.WVx2HoUbZd ++ local exit_status=0 +++ seq 0 2 ++ for i in '$(seq 0 2)' ++ set +e ++ kubectl get pods --selector=name=pxc-client -o 'jsonpath={.items[].metadata.name}' ++ exit_status=0 ++ set -e ++ '[' 0 '!=' 0 ']' ++ break ++ cat /tmp/tmp.2rKWq0cOJW ++ cat /tmp/tmp.WVx2HoUbZd ++ rm /tmp/tmp.2rKWq0cOJW /tmp/tmp.WVx2HoUbZd ++ return 0 + client_pod=pxc-client-6644d8898f-4zb6x + wait_pod pxc-client-6644d8898f-4zb6x + local pod=pxc-client-6644d8898f-4zb6x + local max_retry=480 + local ns= ++ /usr/bin/sed -E 's/.*-(pxc|proxysql)-[0-9]/\1/' ++ egrep '^(pxc|proxysql)$' ++ echo pxc-client-6644d8898f-4zb6x + local container= + set +o xtrace pod/pxc-client-6644d8898f-4zb6x condition met pxc-client-6644d8898f-4zb6x.Ok + set +o xtrace + sleep 30 ++ seq 0 2 + for i in '$(seq 0 $((size - 1)))' + compare_mysql_cmd select-1 'SELECT * from myApp.myApp;' '-h some-name-pxc-0.some-name-pxc -uroot -proot_password -P3306' + local command_id=select-1 + local 'command=SELECT * from myApp.myApp;' + local 'uri=-h some-name-pxc-0.some-name-pxc -uroot -proot_password -P3306' + local postfix= + local expected_result=/mnt/jenkins/workspace/cloud-pxc-operator_PR-1752/e2e-tests/users/compare/select-1.sql + [[ perconalab/percona-xtradb-cluster-operator:main-pxc8.0 =~ 8\.0 ]] + '[' -f /mnt/jenkins/workspace/cloud-pxc-operator_PR-1752/e2e-tests/users/compare/select-1-80.sql ']' + run_mysql 'SELECT * from myApp.myApp;' '-h some-name-pxc-0.some-name-pxc -uroot -proot_password -P3306' + local 'command=SELECT * from myApp.myApp;' + local 'uri=-h some-name-pxc-0.some-name-pxc -uroot -proot_password -P3306' ++ get_client_pod ++ kubectl_bin get pods --selector=name=pxc-client -o 'jsonpath={.items[].metadata.name}' +++ mktemp ++ local LAST_OUT=/tmp/tmp.WbZbrjqSAB +++ mktemp ++ local LAST_ERR=/tmp/tmp.E04tG9GGQe ++ local exit_status=0 +++ seq 0 2 ++ for i in '$(seq 0 2)' ++ set +e ++ kubectl get pods --selector=name=pxc-client -o 'jsonpath={.items[].metadata.name}' ++ exit_status=0 ++ set -e ++ '[' 0 '!=' 0 ']' ++ break ++ cat /tmp/tmp.WbZbrjqSAB ++ cat /tmp/tmp.E04tG9GGQe ++ rm /tmp/tmp.WbZbrjqSAB /tmp/tmp.E04tG9GGQe ++ return 0 + client_pod=pxc-client-6644d8898f-4zb6x + wait_pod pxc-client-6644d8898f-4zb6x + local pod=pxc-client-6644d8898f-4zb6x + local max_retry=480 + local ns= ++ echo pxc-client-6644d8898f-4zb6x ++ /usr/bin/sed -E 's/.*-(pxc|proxysql)-[0-9]/\1/' ++ egrep '^(pxc|proxysql)$' + local container= + set +o xtrace pod/pxc-client-6644d8898f-4zb6x condition met pxc-client-6644d8898f-4zb6x.Ok + set +o xtrace + '[' '!' -s /tmp/tmp.3wek4g2sOh/select-1.sql ']' + diff -u /mnt/jenkins/workspace/cloud-pxc-operator_PR-1752/e2e-tests/users/compare/select-1.sql /tmp/tmp.3wek4g2sOh/select-1.sql + for i in '$(seq 0 $((size - 1)))' + compare_mysql_cmd select-1 'SELECT * from myApp.myApp;' '-h some-name-pxc-1.some-name-pxc -uroot -proot_password -P3306' + local command_id=select-1 + local 'command=SELECT * from myApp.myApp;' + local 'uri=-h some-name-pxc-1.some-name-pxc -uroot -proot_password -P3306' + local postfix= + local expected_result=/mnt/jenkins/workspace/cloud-pxc-operator_PR-1752/e2e-tests/users/compare/select-1.sql + [[ perconalab/percona-xtradb-cluster-operator:main-pxc8.0 =~ 8\.0 ]] + '[' -f /mnt/jenkins/workspace/cloud-pxc-operator_PR-1752/e2e-tests/users/compare/select-1-80.sql ']' + run_mysql 'SELECT * from myApp.myApp;' '-h some-name-pxc-1.some-name-pxc -uroot -proot_password -P3306' + local 'command=SELECT * from myApp.myApp;' + local 'uri=-h some-name-pxc-1.some-name-pxc -uroot -proot_password -P3306' ++ get_client_pod ++ kubectl_bin get pods --selector=name=pxc-client -o 'jsonpath={.items[].metadata.name}' +++ mktemp ++ local LAST_OUT=/tmp/tmp.8eljNdU1Fx +++ mktemp ++ local LAST_ERR=/tmp/tmp.TA3taGLO49 ++ local exit_status=0 +++ seq 0 2 ++ for i in '$(seq 0 2)' ++ set +e ++ kubectl get pods --selector=name=pxc-client -o 'jsonpath={.items[].metadata.name}' ++ exit_status=0 ++ set -e ++ '[' 0 '!=' 0 ']' ++ break ++ cat /tmp/tmp.8eljNdU1Fx ++ cat /tmp/tmp.TA3taGLO49 ++ rm /tmp/tmp.8eljNdU1Fx /tmp/tmp.TA3taGLO49 ++ return 0 + client_pod=pxc-client-6644d8898f-4zb6x + wait_pod pxc-client-6644d8898f-4zb6x + local pod=pxc-client-6644d8898f-4zb6x + local max_retry=480 + local ns= ++ echo pxc-client-6644d8898f-4zb6x ++ /usr/bin/sed -E 's/.*-(pxc|proxysql)-[0-9]/\1/' ++ egrep '^(pxc|proxysql)$' + local container= + set +o xtrace pod/pxc-client-6644d8898f-4zb6x condition met pxc-client-6644d8898f-4zb6x.Ok + set +o xtrace + '[' '!' -s /tmp/tmp.3wek4g2sOh/select-1.sql ']' + diff -u /mnt/jenkins/workspace/cloud-pxc-operator_PR-1752/e2e-tests/users/compare/select-1.sql /tmp/tmp.3wek4g2sOh/select-1.sql + for i in '$(seq 0 $((size - 1)))' + compare_mysql_cmd select-1 'SELECT * from myApp.myApp;' '-h some-name-pxc-2.some-name-pxc -uroot -proot_password -P3306' + local command_id=select-1 + local 'command=SELECT * from myApp.myApp;' + local 'uri=-h some-name-pxc-2.some-name-pxc -uroot -proot_password -P3306' + local postfix= + local expected_result=/mnt/jenkins/workspace/cloud-pxc-operator_PR-1752/e2e-tests/users/compare/select-1.sql + [[ perconalab/percona-xtradb-cluster-operator:main-pxc8.0 =~ 8\.0 ]] + '[' -f /mnt/jenkins/workspace/cloud-pxc-operator_PR-1752/e2e-tests/users/compare/select-1-80.sql ']' + run_mysql 'SELECT * from myApp.myApp;' '-h some-name-pxc-2.some-name-pxc -uroot -proot_password -P3306' + local 'command=SELECT * from myApp.myApp;' + local 'uri=-h some-name-pxc-2.some-name-pxc -uroot -proot_password -P3306' ++ get_client_pod ++ kubectl_bin get pods --selector=name=pxc-client -o 'jsonpath={.items[].metadata.name}' +++ mktemp ++ local LAST_OUT=/tmp/tmp.jaChFTVudB +++ mktemp ++ local LAST_ERR=/tmp/tmp.WHHRE73Tki ++ local exit_status=0 +++ seq 0 2 ++ for i in '$(seq 0 2)' ++ set +e ++ kubectl get pods --selector=name=pxc-client -o 'jsonpath={.items[].metadata.name}' ++ exit_status=0 ++ set -e ++ '[' 0 '!=' 0 ']' ++ break ++ cat /tmp/tmp.jaChFTVudB ++ cat /tmp/tmp.WHHRE73Tki ++ rm /tmp/tmp.jaChFTVudB /tmp/tmp.WHHRE73Tki ++ return 0 + client_pod=pxc-client-6644d8898f-4zb6x + wait_pod pxc-client-6644d8898f-4zb6x + local pod=pxc-client-6644d8898f-4zb6x + local max_retry=480 + local ns= ++ echo pxc-client-6644d8898f-4zb6x ++ /usr/bin/sed -E 's/.*-(pxc|proxysql)-[0-9]/\1/' ++ egrep '^(pxc|proxysql)$' + local container= + set +o xtrace pod/pxc-client-6644d8898f-4zb6x condition met pxc-client-6644d8898f-4zb6x.Ok + set +o xtrace + '[' '!' -s /tmp/tmp.3wek4g2sOh/select-1.sql ']' + diff -u /mnt/jenkins/workspace/cloud-pxc-operator_PR-1752/e2e-tests/users/compare/select-1.sql /tmp/tmp.3wek4g2sOh/select-1.sql ++ is_keyring_plugin_in_use some-name ++ local cluster=some-name ++ kubectl_bin exec -it some-name-pxc-0 -c pxc -- bash -c 'cat /etc/mysql/node.cnf' ++ egrep -o 'early-plugin-load=keyring_\w+.so' +++ mktemp ++ local LAST_OUT=/tmp/tmp.FZMjN1rVTJ +++ mktemp ++ local LAST_ERR=/tmp/tmp.0cro85aZqR ++ local exit_status=0 +++ seq 0 2 ++ for i in '$(seq 0 2)' ++ set +e ++ kubectl exec -it some-name-pxc-0 -c pxc -- bash -c 'cat /etc/mysql/node.cnf' ++ exit_status=0 ++ set -e ++ '[' 0 '!=' 0 ']' ++ break ++ cat /tmp/tmp.FZMjN1rVTJ ++ cat /tmp/tmp.0cro85aZqR Unable to use a TTY - input is not a terminal or the right kind of file ++ rm /tmp/tmp.FZMjN1rVTJ /tmp/tmp.0cro85aZqR ++ return 0 + '[' '' ']' + desc 'test root' + set +o xtrace ----------------------------------------------------------------------------------- test root ----------------------------------------------------------------------------------- + patch_secret my-cluster-secrets root dGVzdC1wYXNzd29yZA== + local secret=my-cluster-secrets + local key=root + local value=dGVzdC1wYXNzd29yZA== + kubectl_bin patch secret my-cluster-secrets '-p={"data":{"root": "dGVzdC1wYXNzd29yZA=="}}' ++ mktemp + local LAST_OUT=/tmp/tmp.zh3NznkAw5 ++ mktemp + local LAST_ERR=/tmp/tmp.D0WCyTQWMf + local exit_status=0 ++ seq 0 2 + for i in '$(seq 0 2)' + set +e + kubectl patch secret my-cluster-secrets '-p={"data":{"root": "dGVzdC1wYXNzd29yZA=="}}' + exit_status=0 + set -e + '[' 0 '!=' 0 ']' + break + cat /tmp/tmp.zh3NznkAw5 secret/my-cluster-secrets patched + cat /tmp/tmp.D0WCyTQWMf + rm /tmp/tmp.zh3NznkAw5 /tmp/tmp.D0WCyTQWMf + return 0 + sleep 15 + compare_mysql_cmd select-4 'SHOW TABLES;' '-h some-name-proxysql -uroot -p'\''test-password'\''' + local command_id=select-4 + local 'command=SHOW TABLES;' + local 'uri=-h some-name-proxysql -uroot -p'\''test-password'\''' + local postfix= + local expected_result=/mnt/jenkins/workspace/cloud-pxc-operator_PR-1752/e2e-tests/users/compare/select-4.sql + [[ perconalab/percona-xtradb-cluster-operator:main-pxc8.0 =~ 8\.0 ]] + '[' -f /mnt/jenkins/workspace/cloud-pxc-operator_PR-1752/e2e-tests/users/compare/select-4-80.sql ']' + expected_result=/mnt/jenkins/workspace/cloud-pxc-operator_PR-1752/e2e-tests/users/compare/select-4-80.sql + run_mysql 'SHOW TABLES;' '-h some-name-proxysql -uroot -p'\''test-password'\''' + local 'command=SHOW TABLES;' + local 'uri=-h some-name-proxysql -uroot -p'\''test-password'\''' ++ get_client_pod ++ kubectl_bin get pods --selector=name=pxc-client -o 'jsonpath={.items[].metadata.name}' +++ mktemp ++ local LAST_OUT=/tmp/tmp.K6v9OOFNwy +++ mktemp ++ local LAST_ERR=/tmp/tmp.rjxXtiUW7D ++ local exit_status=0 +++ seq 0 2 ++ for i in '$(seq 0 2)' ++ set +e ++ kubectl get pods --selector=name=pxc-client -o 'jsonpath={.items[].metadata.name}' ++ exit_status=0 ++ set -e ++ '[' 0 '!=' 0 ']' ++ break ++ cat /tmp/tmp.K6v9OOFNwy ++ cat /tmp/tmp.rjxXtiUW7D ++ rm /tmp/tmp.K6v9OOFNwy /tmp/tmp.rjxXtiUW7D ++ return 0 + client_pod=pxc-client-6644d8898f-4zb6x + wait_pod pxc-client-6644d8898f-4zb6x + local pod=pxc-client-6644d8898f-4zb6x + local max_retry=480 + local ns= ++ echo pxc-client-6644d8898f-4zb6x ++ /usr/bin/sed -E 's/.*-(pxc|proxysql)-[0-9]/\1/' ++ egrep '^(pxc|proxysql)$' + local container= + set +o xtrace pod/pxc-client-6644d8898f-4zb6x condition met pxc-client-6644d8898f-4zb6x.Ok + set +o xtrace + '[' '!' -s /tmp/tmp.3wek4g2sOh/select-4.sql ']' + diff -u /mnt/jenkins/workspace/cloud-pxc-operator_PR-1752/e2e-tests/users/compare/select-4-80.sql /tmp/tmp.3wek4g2sOh/select-4.sql + desc 'test proxyadmin' + set +o xtrace ----------------------------------------------------------------------------------- test proxyadmin ----------------------------------------------------------------------------------- + kubectl_bin patch pxc some-name --type=merge '-p={"spec":{"proxysql":{"size":3}}}' ++ mktemp + local LAST_OUT=/tmp/tmp.zYxyFjrNMB ++ mktemp + local LAST_ERR=/tmp/tmp.mK58OGQrbN + local exit_status=0 ++ seq 0 2 + for i in '$(seq 0 2)' + set +e + kubectl patch pxc some-name --type=merge '-p={"spec":{"proxysql":{"size":3}}}' + exit_status=0 + set -e + '[' 0 '!=' 0 ']' + break + cat /tmp/tmp.zYxyFjrNMB perconaxtradbcluster.pxc.percona.com/some-name patched + cat /tmp/tmp.mK58OGQrbN + rm /tmp/tmp.zYxyFjrNMB /tmp/tmp.mK58OGQrbN + return 0 + sleep 15 + wait_cluster_consistency some-name 3 3 + local cluster_name=some-name + local cluster_size=3 + local proxy_size=3 + '[' -z 3 ']' + desc 'wait cluster consistency' + set +o xtrace ----------------------------------------------------------------------------------- wait cluster consistency ----------------------------------------------------------------------------------- + local i=0 + local max=36 + sleep 7 ++ kubectl_bin get pxc some-name -o 'jsonpath={.status.state}' +++ mktemp ++ local LAST_OUT=/tmp/tmp.0j9PSGU56B +++ mktemp ++ local LAST_ERR=/tmp/tmp.rLFNibFkCO ++ local exit_status=0 +++ seq 0 2 ++ for i in '$(seq 0 2)' ++ set +e ++ kubectl get pxc some-name -o 'jsonpath={.status.state}' ++ exit_status=0 ++ set -e ++ '[' 0 '!=' 0 ']' ++ break ++ cat /tmp/tmp.0j9PSGU56B ++ cat /tmp/tmp.rLFNibFkCO ++ rm /tmp/tmp.0j9PSGU56B /tmp/tmp.rLFNibFkCO ++ return 0 + [[ ready == \r\e\a\d\y ]] ++ kubectl_bin get pxc some-name -o 'jsonpath={.status.pxc.ready}' +++ mktemp ++ local LAST_OUT=/tmp/tmp.TTwB2wzKGq +++ mktemp ++ local LAST_ERR=/tmp/tmp.4xU2Pbqqlw ++ local exit_status=0 +++ seq 0 2 ++ for i in '$(seq 0 2)' ++ set +e ++ kubectl get pxc some-name -o 'jsonpath={.status.pxc.ready}' ++ exit_status=0 ++ set -e ++ '[' 0 '!=' 0 ']' ++ break ++ cat /tmp/tmp.TTwB2wzKGq ++ cat /tmp/tmp.4xU2Pbqqlw ++ rm /tmp/tmp.TTwB2wzKGq /tmp/tmp.4xU2Pbqqlw ++ return 0 + [[ 3 == \3 ]] +++ get_proxy_engine some-name +++ local cluster_name=some-name ++++ get_proxy some-name ++++ local target_cluster=some-name +++++ kubectl_bin get pxc some-name -o 'jsonpath={.spec.haproxy.enabled}' ++++++ mktemp +++++ local LAST_OUT=/tmp/tmp.tPynoWT6eJ ++++++ mktemp +++++ local LAST_ERR=/tmp/tmp.0na4job8Zr +++++ local exit_status=0 ++++++ seq 0 2 +++++ for i in '$(seq 0 2)' +++++ set +e +++++ kubectl get pxc some-name -o 'jsonpath={.spec.haproxy.enabled}' +++++ exit_status=0 +++++ set -e +++++ '[' 0 '!=' 0 ']' +++++ break +++++ cat /tmp/tmp.tPynoWT6eJ +++++ cat /tmp/tmp.0na4job8Zr +++++ rm /tmp/tmp.tPynoWT6eJ /tmp/tmp.0na4job8Zr +++++ return 0 ++++ [[ '' == \t\r\u\e ]] +++++ kubectl_bin get pxc some-name -o 'jsonpath={.spec.proxysql.enabled}' ++++++ mktemp +++++ local LAST_OUT=/tmp/tmp.nkLrTdhtBB ++++++ mktemp +++++ local LAST_ERR=/tmp/tmp.UKLjlbC0Yt +++++ local exit_status=0 ++++++ seq 0 2 +++++ for i in '$(seq 0 2)' +++++ set +e +++++ kubectl get pxc some-name -o 'jsonpath={.spec.proxysql.enabled}' +++++ exit_status=0 +++++ set -e +++++ '[' 0 '!=' 0 ']' +++++ break +++++ cat /tmp/tmp.nkLrTdhtBB +++++ cat /tmp/tmp.UKLjlbC0Yt +++++ rm /tmp/tmp.nkLrTdhtBB /tmp/tmp.UKLjlbC0Yt +++++ return 0 ++++ [[ true == \t\r\u\e ]] ++++ echo some-name-proxysql ++++ return +++ local cluster_proxy=some-name-proxysql +++ echo proxysql ++ kubectl_bin get pxc some-name -o 'jsonpath={.status.proxysql.ready}' +++ mktemp ++ local LAST_OUT=/tmp/tmp.27iP2r3qdA +++ mktemp ++ local LAST_ERR=/tmp/tmp.QkX3G92zek ++ local exit_status=0 +++ seq 0 2 ++ for i in '$(seq 0 2)' ++ set +e ++ kubectl get pxc some-name -o 'jsonpath={.status.proxysql.ready}' ++ exit_status=0 ++ set -e ++ '[' 0 '!=' 0 ']' ++ break ++ cat /tmp/tmp.27iP2r3qdA ++ cat /tmp/tmp.QkX3G92zek ++ rm /tmp/tmp.27iP2r3qdA /tmp/tmp.QkX3G92zek ++ return 0 + [[ 3 == \3 ]] + patch_secret my-cluster-secrets proxyadmin dGVzdC1wYXNzd29yZA== + local secret=my-cluster-secrets + local key=proxyadmin + local value=dGVzdC1wYXNzd29yZA== + kubectl_bin patch secret my-cluster-secrets '-p={"data":{"proxyadmin": "dGVzdC1wYXNzd29yZA=="}}' ++ mktemp + local LAST_OUT=/tmp/tmp.ZC4soXS2kR ++ mktemp + local LAST_ERR=/tmp/tmp.mEGoXPntWd + local exit_status=0 ++ seq 0 2 + for i in '$(seq 0 2)' + set +e + kubectl patch secret my-cluster-secrets '-p={"data":{"proxyadmin": "dGVzdC1wYXNzd29yZA=="}}' + exit_status=0 + set -e + '[' 0 '!=' 0 ']' + break + cat /tmp/tmp.ZC4soXS2kR secret/my-cluster-secrets patched + cat /tmp/tmp.mEGoXPntWd + rm /tmp/tmp.ZC4soXS2kR /tmp/tmp.mEGoXPntWd + return 0 + sleep 15 + wait_cluster_consistency some-name 3 3 + local cluster_name=some-name + local cluster_size=3 + local proxy_size=3 + '[' -z 3 ']' + desc 'wait cluster consistency' + set +o xtrace ----------------------------------------------------------------------------------- wait cluster consistency ----------------------------------------------------------------------------------- + local i=0 + local max=36 + sleep 7 ++ kubectl_bin get pxc some-name -o 'jsonpath={.status.state}' +++ mktemp ++ local LAST_OUT=/tmp/tmp.TfsCWIrteS +++ mktemp ++ local LAST_ERR=/tmp/tmp.GanMiMTjBP ++ local exit_status=0 +++ seq 0 2 ++ for i in '$(seq 0 2)' ++ set +e ++ kubectl get pxc some-name -o 'jsonpath={.status.state}' ++ exit_status=0 ++ set -e ++ '[' 0 '!=' 0 ']' ++ break ++ cat /tmp/tmp.TfsCWIrteS ++ cat /tmp/tmp.GanMiMTjBP ++ rm /tmp/tmp.TfsCWIrteS /tmp/tmp.GanMiMTjBP ++ return 0 + [[ initializing == \r\e\a\d\y ]] + echo 'waiting for cluster readyness' waiting for cluster readyness + sleep 20 + [[ 0 -ge 36 ]] + let i+=1 ++ kubectl_bin get pxc some-name -o 'jsonpath={.status.state}' +++ mktemp ++ local LAST_OUT=/tmp/tmp.4Hb2Wti9sV +++ mktemp ++ local LAST_ERR=/tmp/tmp.zh3WRJiQyY ++ local exit_status=0 +++ seq 0 2 ++ for i in '$(seq 0 2)' ++ set +e ++ kubectl get pxc some-name -o 'jsonpath={.status.state}' ++ exit_status=0 ++ set -e ++ '[' 0 '!=' 0 ']' ++ break ++ cat /tmp/tmp.4Hb2Wti9sV ++ cat /tmp/tmp.zh3WRJiQyY ++ rm /tmp/tmp.4Hb2Wti9sV /tmp/tmp.zh3WRJiQyY ++ return 0 + [[ ready == \r\e\a\d\y ]] ++ kubectl_bin get pxc some-name -o 'jsonpath={.status.pxc.ready}' +++ mktemp ++ local LAST_OUT=/tmp/tmp.hv3B4hXzIY +++ mktemp ++ local LAST_ERR=/tmp/tmp.IuWpNxVLsn ++ local exit_status=0 +++ seq 0 2 ++ for i in '$(seq 0 2)' ++ set +e ++ kubectl get pxc some-name -o 'jsonpath={.status.pxc.ready}' ++ exit_status=0 ++ set -e ++ '[' 0 '!=' 0 ']' ++ break ++ cat /tmp/tmp.hv3B4hXzIY ++ cat /tmp/tmp.IuWpNxVLsn ++ rm /tmp/tmp.hv3B4hXzIY /tmp/tmp.IuWpNxVLsn ++ return 0 + [[ 3 == \3 ]] +++ get_proxy_engine some-name +++ local cluster_name=some-name ++++ get_proxy some-name ++++ local target_cluster=some-name +++++ kubectl_bin get pxc some-name -o 'jsonpath={.spec.haproxy.enabled}' ++++++ mktemp +++++ local LAST_OUT=/tmp/tmp.Jr1HlSPHPc ++++++ mktemp +++++ local LAST_ERR=/tmp/tmp.Kq2PlZ0nna +++++ local exit_status=0 ++++++ seq 0 2 +++++ for i in '$(seq 0 2)' +++++ set +e +++++ kubectl get pxc some-name -o 'jsonpath={.spec.haproxy.enabled}' +++++ exit_status=0 +++++ set -e +++++ '[' 0 '!=' 0 ']' +++++ break +++++ cat /tmp/tmp.Jr1HlSPHPc +++++ cat /tmp/tmp.Kq2PlZ0nna +++++ rm /tmp/tmp.Jr1HlSPHPc /tmp/tmp.Kq2PlZ0nna +++++ return 0 ++++ [[ '' == \t\r\u\e ]] +++++ kubectl_bin get pxc some-name -o 'jsonpath={.spec.proxysql.enabled}' ++++++ mktemp +++++ local LAST_OUT=/tmp/tmp.yzcldLsWt7 ++++++ mktemp +++++ local LAST_ERR=/tmp/tmp.g6wvHPZlM2 +++++ local exit_status=0 ++++++ seq 0 2 +++++ for i in '$(seq 0 2)' +++++ set +e +++++ kubectl get pxc some-name -o 'jsonpath={.spec.proxysql.enabled}' +++++ exit_status=0 +++++ set -e +++++ '[' 0 '!=' 0 ']' +++++ break +++++ cat /tmp/tmp.yzcldLsWt7 +++++ cat /tmp/tmp.g6wvHPZlM2 +++++ rm /tmp/tmp.yzcldLsWt7 /tmp/tmp.g6wvHPZlM2 +++++ return 0 ++++ [[ true == \t\r\u\e ]] ++++ echo some-name-proxysql ++++ return +++ local cluster_proxy=some-name-proxysql +++ echo proxysql ++ kubectl_bin get pxc some-name -o 'jsonpath={.status.proxysql.ready}' +++ mktemp ++ local LAST_OUT=/tmp/tmp.FqSQoO1dNl +++ mktemp ++ local LAST_ERR=/tmp/tmp.BsQWkrKrAF ++ local exit_status=0 +++ seq 0 2 ++ for i in '$(seq 0 2)' ++ set +e ++ kubectl get pxc some-name -o 'jsonpath={.status.proxysql.ready}' ++ exit_status=0 ++ set -e ++ '[' 0 '!=' 0 ']' ++ break ++ cat /tmp/tmp.FqSQoO1dNl ++ cat /tmp/tmp.BsQWkrKrAF ++ rm /tmp/tmp.FqSQoO1dNl /tmp/tmp.BsQWkrKrAF ++ return 0 + [[ 3 == \3 ]] + compare_mysql_cmd_local select-2 'SHOW TABLES;' '-h127.0.0.1 -P6032 -uproxyadmin -p'\''test-password'\''' some-name-proxysql-0 '' proxysql + local command_id=select-2 + local 'command=SHOW TABLES;' + local 'uri=-h127.0.0.1 -P6032 -uproxyadmin -p'\''test-password'\''' + local pod=some-name-proxysql-0 + local postfix= + local container_name=proxysql + local expected_result=/mnt/jenkins/workspace/cloud-pxc-operator_PR-1752/e2e-tests/users/compare/select-2.sql + [[ perconalab/percona-xtradb-cluster-operator:main-pxc8.0 =~ 8\.0 ]] + '[' -f /mnt/jenkins/workspace/cloud-pxc-operator_PR-1752/e2e-tests/users/compare/select-2-80.sql ']' + run_mysql_local 'SHOW TABLES;' '-h127.0.0.1 -P6032 -uproxyadmin -p'\''test-password'\''' some-name-proxysql-0 proxysql + local 'command=SHOW TABLES;' + local 'uri=-h127.0.0.1 -P6032 -uproxyadmin -p'\''test-password'\''' + local pod=some-name-proxysql-0 + local container_name=proxysql + set +o xtrace + '[' '!' -s /tmp/tmp.3wek4g2sOh/select-2.sql ']' + diff -u /mnt/jenkins/workspace/cloud-pxc-operator_PR-1752/e2e-tests/users/compare/select-2.sql /tmp/tmp.3wek4g2sOh/select-2.sql + compare_mysql_cmd_local select-2 'SHOW TABLES;' '-h127.0.0.1 -P6032 -uproxyadmin -p'\''test-password'\''' some-name-proxysql-1 '' proxysql + local command_id=select-2 + local 'command=SHOW TABLES;' + local 'uri=-h127.0.0.1 -P6032 -uproxyadmin -p'\''test-password'\''' + local pod=some-name-proxysql-1 + local postfix= + local container_name=proxysql + local expected_result=/mnt/jenkins/workspace/cloud-pxc-operator_PR-1752/e2e-tests/users/compare/select-2.sql + [[ perconalab/percona-xtradb-cluster-operator:main-pxc8.0 =~ 8\.0 ]] + '[' -f /mnt/jenkins/workspace/cloud-pxc-operator_PR-1752/e2e-tests/users/compare/select-2-80.sql ']' + run_mysql_local 'SHOW TABLES;' '-h127.0.0.1 -P6032 -uproxyadmin -p'\''test-password'\''' some-name-proxysql-1 proxysql + local 'command=SHOW TABLES;' + local 'uri=-h127.0.0.1 -P6032 -uproxyadmin -p'\''test-password'\''' + local pod=some-name-proxysql-1 + local container_name=proxysql + set +o xtrace + '[' '!' -s /tmp/tmp.3wek4g2sOh/select-2.sql ']' + diff -u /mnt/jenkins/workspace/cloud-pxc-operator_PR-1752/e2e-tests/users/compare/select-2.sql /tmp/tmp.3wek4g2sOh/select-2.sql + compare_mysql_cmd_local select-2 'SHOW TABLES;' '-h127.0.0.1 -P6032 -uproxyadmin -p'\''test-password'\''' some-name-proxysql-2 '' proxysql + local command_id=select-2 + local 'command=SHOW TABLES;' + local 'uri=-h127.0.0.1 -P6032 -uproxyadmin -p'\''test-password'\''' + local pod=some-name-proxysql-2 + local postfix= + local container_name=proxysql + local expected_result=/mnt/jenkins/workspace/cloud-pxc-operator_PR-1752/e2e-tests/users/compare/select-2.sql + [[ perconalab/percona-xtradb-cluster-operator:main-pxc8.0 =~ 8\.0 ]] + '[' -f /mnt/jenkins/workspace/cloud-pxc-operator_PR-1752/e2e-tests/users/compare/select-2-80.sql ']' + run_mysql_local 'SHOW TABLES;' '-h127.0.0.1 -P6032 -uproxyadmin -p'\''test-password'\''' some-name-proxysql-2 proxysql + local 'command=SHOW TABLES;' + local 'uri=-h127.0.0.1 -P6032 -uproxyadmin -p'\''test-password'\''' + local pod=some-name-proxysql-2 + local container_name=proxysql + set +o xtrace + '[' '!' -s /tmp/tmp.3wek4g2sOh/select-2.sql ']' + diff -u /mnt/jenkins/workspace/cloud-pxc-operator_PR-1752/e2e-tests/users/compare/select-2.sql /tmp/tmp.3wek4g2sOh/select-2.sql + desc 'test xtrabackup' + set +o xtrace ----------------------------------------------------------------------------------- test xtrabackup ----------------------------------------------------------------------------------- + kubectl_bin patch pxc some-name --type=merge '-p={"spec":{"proxysql":{"size":2}}}' ++ mktemp + local LAST_OUT=/tmp/tmp.DdwTvctoMQ ++ mktemp + local LAST_ERR=/tmp/tmp.uFswD7i72L + local exit_status=0 ++ seq 0 2 + for i in '$(seq 0 2)' + set +e + kubectl patch pxc some-name --type=merge '-p={"spec":{"proxysql":{"size":2}}}' + exit_status=0 + set -e + '[' 0 '!=' 0 ']' + break + cat /tmp/tmp.DdwTvctoMQ perconaxtradbcluster.pxc.percona.com/some-name patched + cat /tmp/tmp.uFswD7i72L + rm /tmp/tmp.DdwTvctoMQ /tmp/tmp.uFswD7i72L + return 0 + patch_secret my-cluster-secrets xtrabackup dGVzdC1wYXNzd29yZA== + local secret=my-cluster-secrets + local key=xtrabackup + local value=dGVzdC1wYXNzd29yZA== + kubectl_bin patch secret my-cluster-secrets '-p={"data":{"xtrabackup": "dGVzdC1wYXNzd29yZA=="}}' ++ mktemp + local LAST_OUT=/tmp/tmp.0hLiAKjnRv ++ mktemp + local LAST_ERR=/tmp/tmp.ZVBXjXUxDg + local exit_status=0 ++ seq 0 2 + for i in '$(seq 0 2)' + set +e + kubectl patch secret my-cluster-secrets '-p={"data":{"xtrabackup": "dGVzdC1wYXNzd29yZA=="}}' + exit_status=0 + set -e + '[' 0 '!=' 0 ']' + break + cat /tmp/tmp.0hLiAKjnRv secret/my-cluster-secrets patched + cat /tmp/tmp.ZVBXjXUxDg + rm /tmp/tmp.0hLiAKjnRv /tmp/tmp.ZVBXjXUxDg + return 0 + sleep 15 + wait_cluster_consistency some-name 3 2 + local cluster_name=some-name + local cluster_size=3 + local proxy_size=2 + '[' -z 2 ']' + desc 'wait cluster consistency' + set +o xtrace ----------------------------------------------------------------------------------- wait cluster consistency ----------------------------------------------------------------------------------- + local i=0 + local max=36 + sleep 7 ++ kubectl_bin get pxc some-name -o 'jsonpath={.status.state}' +++ mktemp ++ local LAST_OUT=/tmp/tmp.bgSkN7lIq5 +++ mktemp ++ local LAST_ERR=/tmp/tmp.hEBt1GGIsg ++ local exit_status=0 +++ seq 0 2 ++ for i in '$(seq 0 2)' ++ set +e ++ kubectl get pxc some-name -o 'jsonpath={.status.state}' ++ exit_status=0 ++ set -e ++ '[' 0 '!=' 0 ']' ++ break ++ cat /tmp/tmp.bgSkN7lIq5 ++ cat /tmp/tmp.hEBt1GGIsg ++ rm /tmp/tmp.bgSkN7lIq5 /tmp/tmp.hEBt1GGIsg ++ return 0 + [[ initializing == \r\e\a\d\y ]] + echo 'waiting for cluster readyness' waiting for cluster readyness + sleep 20 + [[ 0 -ge 36 ]] + let i+=1 ++ kubectl_bin get pxc some-name -o 'jsonpath={.status.state}' +++ mktemp ++ local LAST_OUT=/tmp/tmp.aykllqSEn6 +++ mktemp ++ local LAST_ERR=/tmp/tmp.jSO6zYvMDK ++ local exit_status=0 +++ seq 0 2 ++ for i in '$(seq 0 2)' ++ set +e ++ kubectl get pxc some-name -o 'jsonpath={.status.state}' ++ exit_status=0 ++ set -e ++ '[' 0 '!=' 0 ']' ++ break ++ cat /tmp/tmp.aykllqSEn6 ++ cat /tmp/tmp.jSO6zYvMDK ++ rm /tmp/tmp.aykllqSEn6 /tmp/tmp.jSO6zYvMDK ++ return 0 + [[ initializing == \r\e\a\d\y ]] + echo 'waiting for cluster readyness' waiting for cluster readyness + sleep 20 + [[ 1 -ge 36 ]] + let i+=1 ++ kubectl_bin get pxc some-name -o 'jsonpath={.status.state}' +++ mktemp ++ local LAST_OUT=/tmp/tmp.1LqC2ilEf3 +++ mktemp ++ local LAST_ERR=/tmp/tmp.z1KXIrllM3 ++ local exit_status=0 +++ seq 0 2 ++ for i in '$(seq 0 2)' ++ set +e ++ kubectl get pxc some-name -o 'jsonpath={.status.state}' ++ exit_status=0 ++ set -e ++ '[' 0 '!=' 0 ']' ++ break ++ cat /tmp/tmp.1LqC2ilEf3 ++ cat /tmp/tmp.z1KXIrllM3 ++ rm /tmp/tmp.1LqC2ilEf3 /tmp/tmp.z1KXIrllM3 ++ return 0 + [[ initializing == \r\e\a\d\y ]] + echo 'waiting for cluster readyness' waiting for cluster readyness + sleep 20 + [[ 2 -ge 36 ]] + let i+=1 ++ kubectl_bin get pxc some-name -o 'jsonpath={.status.state}' +++ mktemp ++ local LAST_OUT=/tmp/tmp.p2avlbv1lp +++ mktemp ++ local LAST_ERR=/tmp/tmp.ThsSR5ptdq ++ local exit_status=0 +++ seq 0 2 ++ for i in '$(seq 0 2)' ++ set +e ++ kubectl get pxc some-name -o 'jsonpath={.status.state}' ++ exit_status=0 ++ set -e ++ '[' 0 '!=' 0 ']' ++ break ++ cat /tmp/tmp.p2avlbv1lp ++ cat /tmp/tmp.ThsSR5ptdq ++ rm /tmp/tmp.p2avlbv1lp /tmp/tmp.ThsSR5ptdq ++ return 0 + [[ initializing == \r\e\a\d\y ]] + echo 'waiting for cluster readyness' waiting for cluster readyness + sleep 20 + [[ 3 -ge 36 ]] + let i+=1 ++ kubectl_bin get pxc some-name -o 'jsonpath={.status.state}' +++ mktemp ++ local LAST_OUT=/tmp/tmp.g7CG5P0Rmu +++ mktemp ++ local LAST_ERR=/tmp/tmp.cvLdEJJjDj ++ local exit_status=0 +++ seq 0 2 ++ for i in '$(seq 0 2)' ++ set +e ++ kubectl get pxc some-name -o 'jsonpath={.status.state}' ++ exit_status=0 ++ set -e ++ '[' 0 '!=' 0 ']' ++ break ++ cat /tmp/tmp.g7CG5P0Rmu ++ cat /tmp/tmp.cvLdEJJjDj ++ rm /tmp/tmp.g7CG5P0Rmu /tmp/tmp.cvLdEJJjDj ++ return 0 + [[ initializing == \r\e\a\d\y ]] + echo 'waiting for cluster readyness' waiting for cluster readyness + sleep 20 + [[ 4 -ge 36 ]] + let i+=1 ++ kubectl_bin get pxc some-name -o 'jsonpath={.status.state}' +++ mktemp ++ local LAST_OUT=/tmp/tmp.utAgUkJp9C +++ mktemp ++ local LAST_ERR=/tmp/tmp.XpkD5vJUWF ++ local exit_status=0 +++ seq 0 2 ++ for i in '$(seq 0 2)' ++ set +e ++ kubectl get pxc some-name -o 'jsonpath={.status.state}' ++ exit_status=0 ++ set -e ++ '[' 0 '!=' 0 ']' ++ break ++ cat /tmp/tmp.utAgUkJp9C ++ cat /tmp/tmp.XpkD5vJUWF ++ rm /tmp/tmp.utAgUkJp9C /tmp/tmp.XpkD5vJUWF ++ return 0 + [[ initializing == \r\e\a\d\y ]] + echo 'waiting for cluster readyness' waiting for cluster readyness + sleep 20 + [[ 5 -ge 36 ]] + let i+=1 ++ kubectl_bin get pxc some-name -o 'jsonpath={.status.state}' +++ mktemp ++ local LAST_OUT=/tmp/tmp.g08wisChYm +++ mktemp ++ local LAST_ERR=/tmp/tmp.jPTfYy8Dzz ++ local exit_status=0 +++ seq 0 2 ++ for i in '$(seq 0 2)' ++ set +e ++ kubectl get pxc some-name -o 'jsonpath={.status.state}' ++ exit_status=0 ++ set -e ++ '[' 0 '!=' 0 ']' ++ break ++ cat /tmp/tmp.g08wisChYm ++ cat /tmp/tmp.jPTfYy8Dzz ++ rm /tmp/tmp.g08wisChYm /tmp/tmp.jPTfYy8Dzz ++ return 0 + [[ initializing == \r\e\a\d\y ]] + echo 'waiting for cluster readyness' waiting for cluster readyness + sleep 20 + [[ 6 -ge 36 ]] + let i+=1 ++ kubectl_bin get pxc some-name -o 'jsonpath={.status.state}' +++ mktemp ++ local LAST_OUT=/tmp/tmp.BpFDYbYCwi +++ mktemp ++ local LAST_ERR=/tmp/tmp.hPKROYvoHB ++ local exit_status=0 +++ seq 0 2 ++ for i in '$(seq 0 2)' ++ set +e ++ kubectl get pxc some-name -o 'jsonpath={.status.state}' ++ exit_status=0 ++ set -e ++ '[' 0 '!=' 0 ']' ++ break ++ cat /tmp/tmp.BpFDYbYCwi ++ cat /tmp/tmp.hPKROYvoHB ++ rm /tmp/tmp.BpFDYbYCwi /tmp/tmp.hPKROYvoHB ++ return 0 + [[ ready == \r\e\a\d\y ]] ++ kubectl_bin get pxc some-name -o 'jsonpath={.status.pxc.ready}' +++ mktemp ++ local LAST_OUT=/tmp/tmp.vZd0LkoVgC +++ mktemp ++ local LAST_ERR=/tmp/tmp.v6LUAEeOTB ++ local exit_status=0 +++ seq 0 2 ++ for i in '$(seq 0 2)' ++ set +e ++ kubectl get pxc some-name -o 'jsonpath={.status.pxc.ready}' ++ exit_status=0 ++ set -e ++ '[' 0 '!=' 0 ']' ++ break ++ cat /tmp/tmp.vZd0LkoVgC ++ cat /tmp/tmp.v6LUAEeOTB ++ rm /tmp/tmp.vZd0LkoVgC /tmp/tmp.v6LUAEeOTB ++ return 0 + [[ 3 == \3 ]] +++ get_proxy_engine some-name +++ local cluster_name=some-name ++++ get_proxy some-name ++++ local target_cluster=some-name +++++ kubectl_bin get pxc some-name -o 'jsonpath={.spec.haproxy.enabled}' ++++++ mktemp +++++ local LAST_OUT=/tmp/tmp.PZYLwRGhw6 ++++++ mktemp +++++ local LAST_ERR=/tmp/tmp.SpV1oH3qSw +++++ local exit_status=0 ++++++ seq 0 2 +++++ for i in '$(seq 0 2)' +++++ set +e +++++ kubectl get pxc some-name -o 'jsonpath={.spec.haproxy.enabled}' +++++ exit_status=0 +++++ set -e +++++ '[' 0 '!=' 0 ']' +++++ break +++++ cat /tmp/tmp.PZYLwRGhw6 +++++ cat /tmp/tmp.SpV1oH3qSw +++++ rm /tmp/tmp.PZYLwRGhw6 /tmp/tmp.SpV1oH3qSw +++++ return 0 ++++ [[ '' == \t\r\u\e ]] +++++ kubectl_bin get pxc some-name -o 'jsonpath={.spec.proxysql.enabled}' ++++++ mktemp +++++ local LAST_OUT=/tmp/tmp.kbaj363XXF ++++++ mktemp +++++ local LAST_ERR=/tmp/tmp.EPsUasJL8R +++++ local exit_status=0 ++++++ seq 0 2 +++++ for i in '$(seq 0 2)' +++++ set +e +++++ kubectl get pxc some-name -o 'jsonpath={.spec.proxysql.enabled}' +++++ exit_status=0 +++++ set -e +++++ '[' 0 '!=' 0 ']' +++++ break +++++ cat /tmp/tmp.kbaj363XXF +++++ cat /tmp/tmp.EPsUasJL8R +++++ rm /tmp/tmp.kbaj363XXF /tmp/tmp.EPsUasJL8R +++++ return 0 ++++ [[ true == \t\r\u\e ]] ++++ echo some-name-proxysql ++++ return +++ local cluster_proxy=some-name-proxysql +++ echo proxysql ++ kubectl_bin get pxc some-name -o 'jsonpath={.status.proxysql.ready}' +++ mktemp ++ local LAST_OUT=/tmp/tmp.BBhGlfQXpi +++ mktemp ++ local LAST_ERR=/tmp/tmp.8oHKAnqlwQ ++ local exit_status=0 +++ seq 0 2 ++ for i in '$(seq 0 2)' ++ set +e ++ kubectl get pxc some-name -o 'jsonpath={.status.proxysql.ready}' ++ exit_status=0 ++ set -e ++ '[' 0 '!=' 0 ']' ++ break ++ cat /tmp/tmp.BBhGlfQXpi ++ cat /tmp/tmp.8oHKAnqlwQ ++ rm /tmp/tmp.BBhGlfQXpi /tmp/tmp.8oHKAnqlwQ ++ return 0 + [[ 2 == \2 ]] + compare_mysql_cmd_local select-3 'SHOW DATABASES;' '-h 127.0.0.1 -uxtrabackup -p'\''test-password'\''' some-name-pxc-0 '' pxc + local command_id=select-3 + local 'command=SHOW DATABASES;' + local 'uri=-h 127.0.0.1 -uxtrabackup -p'\''test-password'\''' + local pod=some-name-pxc-0 + local postfix= + local container_name=pxc + local expected_result=/mnt/jenkins/workspace/cloud-pxc-operator_PR-1752/e2e-tests/users/compare/select-3.sql + [[ perconalab/percona-xtradb-cluster-operator:main-pxc8.0 =~ 8\.0 ]] + '[' -f /mnt/jenkins/workspace/cloud-pxc-operator_PR-1752/e2e-tests/users/compare/select-3-80.sql ']' + run_mysql_local 'SHOW DATABASES;' '-h 127.0.0.1 -uxtrabackup -p'\''test-password'\''' some-name-pxc-0 pxc + local 'command=SHOW DATABASES;' + local 'uri=-h 127.0.0.1 -uxtrabackup -p'\''test-password'\''' + local pod=some-name-pxc-0 + local container_name=pxc + set +o xtrace + '[' '!' -s /tmp/tmp.3wek4g2sOh/select-3.sql ']' + diff -u /mnt/jenkins/workspace/cloud-pxc-operator_PR-1752/e2e-tests/users/compare/select-3.sql /tmp/tmp.3wek4g2sOh/select-3.sql + desc 'test monitor' + set +o xtrace ----------------------------------------------------------------------------------- test monitor ----------------------------------------------------------------------------------- + patch_secret my-cluster-secrets monitor dGVzdC1wYXNzd29yZA== + local secret=my-cluster-secrets + local key=monitor + local value=dGVzdC1wYXNzd29yZA== + kubectl_bin patch secret my-cluster-secrets '-p={"data":{"monitor": "dGVzdC1wYXNzd29yZA=="}}' ++ mktemp + local LAST_OUT=/tmp/tmp.ENUlEhj1TM ++ mktemp + local LAST_ERR=/tmp/tmp.MvVYzRVZsk + local exit_status=0 ++ seq 0 2 + for i in '$(seq 0 2)' + set +e + kubectl patch secret my-cluster-secrets '-p={"data":{"monitor": "dGVzdC1wYXNzd29yZA=="}}' + exit_status=0 + set -e + '[' 0 '!=' 0 ']' + break + cat /tmp/tmp.ENUlEhj1TM secret/my-cluster-secrets patched + cat /tmp/tmp.MvVYzRVZsk + rm /tmp/tmp.ENUlEhj1TM /tmp/tmp.MvVYzRVZsk + return 0 + wait_for_password_propagation my-cluster-secrets monitor + local secret=my-cluster-secrets + local user=monitor + local max_retry=240 ++ getSecretData my-cluster-secrets root ++ local secretName=my-cluster-secrets ++ local dataKey=root ++ kubectl_bin get secrets/my-cluster-secrets '--template={{.data.root}}' ++ base64 --decode +++ mktemp ++ local LAST_OUT=/tmp/tmp.s4rgoa6nTN +++ mktemp ++ local LAST_ERR=/tmp/tmp.JEhWY8ruVo ++ local exit_status=0 +++ seq 0 2 ++ for i in '$(seq 0 2)' ++ set +e ++ kubectl get secrets/my-cluster-secrets '--template={{.data.root}}' ++ exit_status=0 ++ set -e ++ '[' 0 '!=' 0 ']' ++ break ++ cat /tmp/tmp.s4rgoa6nTN ++ cat /tmp/tmp.JEhWY8ruVo ++ rm /tmp/tmp.s4rgoa6nTN /tmp/tmp.JEhWY8ruVo ++ return 0 + local root_pass=test-password + [[ perconalab/percona-xtradb-cluster-operator:main-pxc8.0 =~ 5\.7 ]] + retry=0 + is_password_updated monitor '-h some-name-pxc -uroot -p'\''test-password'\''' + local username=monitor + local 'uri=-h some-name-pxc -uroot -p'\''test-password'\''' + grep additional_password + run_mysql 'SELECT User_attributes FROM mysql.user WHERE user='\''monitor'\''' '-h some-name-pxc -uroot -p'\''test-password'\''' + local 'command=SELECT User_attributes FROM mysql.user WHERE user='\''monitor'\''' + local 'uri=-h some-name-pxc -uroot -p'\''test-password'\''' ++ get_client_pod ++ kubectl_bin get pods --selector=name=pxc-client -o 'jsonpath={.items[].metadata.name}' +++ mktemp ++ local LAST_OUT=/tmp/tmp.RG9nZrmnHZ +++ mktemp ++ local LAST_ERR=/tmp/tmp.DaXOxWDaax ++ local exit_status=0 +++ seq 0 2 ++ for i in '$(seq 0 2)' ++ set +e ++ kubectl get pods --selector=name=pxc-client -o 'jsonpath={.items[].metadata.name}' ++ exit_status=0 ++ set -e ++ '[' 0 '!=' 0 ']' ++ break ++ cat /tmp/tmp.RG9nZrmnHZ ++ cat /tmp/tmp.DaXOxWDaax ++ rm /tmp/tmp.RG9nZrmnHZ /tmp/tmp.DaXOxWDaax ++ return 0 + client_pod=pxc-client-6644d8898f-4zb6x + wait_pod pxc-client-6644d8898f-4zb6x + local pod=pxc-client-6644d8898f-4zb6x + local max_retry=480 + local ns= ++ echo pxc-client-6644d8898f-4zb6x ++ /usr/bin/sed -E 's/.*-(pxc|proxysql)-[0-9]/\1/' ++ egrep '^(pxc|proxysql)$' + local container= + set +o xtrace pod/pxc-client-6644d8898f-4zb6x condition met pxc-client-6644d8898f-4zb6x.Ok + set +o xtrace {"additional_password": "*C1F414D9BAF378B656A849B31F9F8AF3125F558B"} + retry=0 + is_old_password_discarded monitor '-h some-name-pxc -uroot -p'\''test-password'\''' + local username=monitor + local 'uri=-h some-name-pxc -uroot -p'\''test-password'\''' + run_mysql 'SELECT User_attributes FROM mysql.user WHERE user='\''monitor'\''' '-h some-name-pxc -uroot -p'\''test-password'\''' + local 'command=SELECT User_attributes FROM mysql.user WHERE user='\''monitor'\''' + grep NULL + local 'uri=-h some-name-pxc -uroot -p'\''test-password'\''' ++ get_client_pod ++ kubectl_bin get pods --selector=name=pxc-client -o 'jsonpath={.items[].metadata.name}' +++ mktemp ++ local LAST_OUT=/tmp/tmp.h91xwQJobr +++ mktemp ++ local LAST_ERR=/tmp/tmp.wFTAs9R7fV ++ local exit_status=0 +++ seq 0 2 ++ for i in '$(seq 0 2)' ++ set +e ++ kubectl get pods --selector=name=pxc-client -o 'jsonpath={.items[].metadata.name}' ++ exit_status=0 ++ set -e ++ '[' 0 '!=' 0 ']' ++ break ++ cat /tmp/tmp.h91xwQJobr ++ cat /tmp/tmp.wFTAs9R7fV ++ rm /tmp/tmp.h91xwQJobr /tmp/tmp.wFTAs9R7fV ++ return 0 + client_pod=pxc-client-6644d8898f-4zb6x + wait_pod pxc-client-6644d8898f-4zb6x + local pod=pxc-client-6644d8898f-4zb6x + local max_retry=480 + local ns= ++ echo pxc-client-6644d8898f-4zb6x ++ /usr/bin/sed -E 's/.*-(pxc|proxysql)-[0-9]/\1/' ++ egrep '^(pxc|proxysql)$' + local container= + set +o xtrace pod/pxc-client-6644d8898f-4zb6x condition met pxc-client-6644d8898f-4zb6x.Ok + set +o xtrace + echo 'waiting for password propagation' waiting for password propagation + sleep 1 + let retry+=1 + [[ 1 -ge 240 ]] + is_old_password_discarded monitor '-h some-name-pxc -uroot -p'\''test-password'\''' + local username=monitor + local 'uri=-h some-name-pxc -uroot -p'\''test-password'\''' + run_mysql 'SELECT User_attributes FROM mysql.user WHERE user='\''monitor'\''' '-h some-name-pxc -uroot -p'\''test-password'\''' + local 'command=SELECT User_attributes FROM mysql.user WHERE user='\''monitor'\''' + grep NULL + local 'uri=-h some-name-pxc -uroot -p'\''test-password'\''' ++ get_client_pod ++ kubectl_bin get pods --selector=name=pxc-client -o 'jsonpath={.items[].metadata.name}' +++ mktemp ++ local LAST_OUT=/tmp/tmp.NxYLtEvASy +++ mktemp ++ local LAST_ERR=/tmp/tmp.OZTPDgrszv ++ local exit_status=0 +++ seq 0 2 ++ for i in '$(seq 0 2)' ++ set +e ++ kubectl get pods --selector=name=pxc-client -o 'jsonpath={.items[].metadata.name}' ++ exit_status=0 ++ set -e ++ '[' 0 '!=' 0 ']' ++ break ++ cat /tmp/tmp.NxYLtEvASy ++ cat /tmp/tmp.OZTPDgrszv ++ rm /tmp/tmp.NxYLtEvASy /tmp/tmp.OZTPDgrszv ++ return 0 + client_pod=pxc-client-6644d8898f-4zb6x + wait_pod pxc-client-6644d8898f-4zb6x + local pod=pxc-client-6644d8898f-4zb6x + local max_retry=480 + local ns= ++ egrep '^(pxc|proxysql)$' ++ /usr/bin/sed -E 's/.*-(pxc|proxysql)-[0-9]/\1/' ++ echo pxc-client-6644d8898f-4zb6x + local container= + set +o xtrace pod/pxc-client-6644d8898f-4zb6x condition met pxc-client-6644d8898f-4zb6x.Ok + set +o xtrace + echo 'waiting for password propagation' waiting for password propagation + sleep 1 + let retry+=1 + [[ 2 -ge 240 ]] + is_old_password_discarded monitor '-h some-name-pxc -uroot -p'\''test-password'\''' + local username=monitor + local 'uri=-h some-name-pxc -uroot -p'\''test-password'\''' + run_mysql 'SELECT User_attributes FROM mysql.user WHERE user='\''monitor'\''' '-h some-name-pxc -uroot -p'\''test-password'\''' + local 'command=SELECT User_attributes FROM mysql.user WHERE user='\''monitor'\''' + local 'uri=-h some-name-pxc -uroot -p'\''test-password'\''' + grep NULL ++ get_client_pod ++ kubectl_bin get pods --selector=name=pxc-client -o 'jsonpath={.items[].metadata.name}' +++ mktemp ++ local LAST_OUT=/tmp/tmp.mKvhqZYMmb +++ mktemp ++ local LAST_ERR=/tmp/tmp.n7SdO1bkOE ++ local exit_status=0 +++ seq 0 2 ++ for i in '$(seq 0 2)' ++ set +e ++ kubectl get pods --selector=name=pxc-client -o 'jsonpath={.items[].metadata.name}' ++ exit_status=0 ++ set -e ++ '[' 0 '!=' 0 ']' ++ break ++ cat /tmp/tmp.mKvhqZYMmb ++ cat /tmp/tmp.n7SdO1bkOE ++ rm /tmp/tmp.mKvhqZYMmb /tmp/tmp.n7SdO1bkOE ++ return 0 + client_pod=pxc-client-6644d8898f-4zb6x + wait_pod pxc-client-6644d8898f-4zb6x + local pod=pxc-client-6644d8898f-4zb6x + local max_retry=480 + local ns= ++ echo pxc-client-6644d8898f-4zb6x ++ egrep '^(pxc|proxysql)$' ++ /usr/bin/sed -E 's/.*-(pxc|proxysql)-[0-9]/\1/' + local container= + set +o xtrace pod/pxc-client-6644d8898f-4zb6x condition met pxc-client-6644d8898f-4zb6x.Ok + set +o xtrace + echo 'waiting for password propagation' waiting for password propagation + sleep 1 + let retry+=1 + [[ 3 -ge 240 ]] + is_old_password_discarded monitor '-h some-name-pxc -uroot -p'\''test-password'\''' + local username=monitor + local 'uri=-h some-name-pxc -uroot -p'\''test-password'\''' + run_mysql 'SELECT User_attributes FROM mysql.user WHERE user='\''monitor'\''' '-h some-name-pxc -uroot -p'\''test-password'\''' + grep NULL + local 'command=SELECT User_attributes FROM mysql.user WHERE user='\''monitor'\''' + local 'uri=-h some-name-pxc -uroot -p'\''test-password'\''' ++ get_client_pod ++ kubectl_bin get pods --selector=name=pxc-client -o 'jsonpath={.items[].metadata.name}' +++ mktemp ++ local LAST_OUT=/tmp/tmp.A8ztDDsSV8 +++ mktemp ++ local LAST_ERR=/tmp/tmp.JmDXF0zB4u ++ local exit_status=0 +++ seq 0 2 ++ for i in '$(seq 0 2)' ++ set +e ++ kubectl get pods --selector=name=pxc-client -o 'jsonpath={.items[].metadata.name}' ++ exit_status=0 ++ set -e ++ '[' 0 '!=' 0 ']' ++ break ++ cat /tmp/tmp.A8ztDDsSV8 ++ cat /tmp/tmp.JmDXF0zB4u ++ rm /tmp/tmp.A8ztDDsSV8 /tmp/tmp.JmDXF0zB4u ++ return 0 + client_pod=pxc-client-6644d8898f-4zb6x + wait_pod pxc-client-6644d8898f-4zb6x + local pod=pxc-client-6644d8898f-4zb6x + local max_retry=480 + local ns= ++ echo pxc-client-6644d8898f-4zb6x ++ /usr/bin/sed -E 's/.*-(pxc|proxysql)-[0-9]/\1/' ++ egrep '^(pxc|proxysql)$' + local container= + set +o xtrace pod/pxc-client-6644d8898f-4zb6x condition met pxc-client-6644d8898f-4zb6x.Ok + set +o xtrace + echo 'waiting for password propagation' waiting for password propagation + sleep 1 + let retry+=1 + [[ 4 -ge 240 ]] + is_old_password_discarded monitor '-h some-name-pxc -uroot -p'\''test-password'\''' + local username=monitor + local 'uri=-h some-name-pxc -uroot -p'\''test-password'\''' + run_mysql 'SELECT User_attributes FROM mysql.user WHERE user='\''monitor'\''' '-h some-name-pxc -uroot -p'\''test-password'\''' + local 'command=SELECT User_attributes FROM mysql.user WHERE user='\''monitor'\''' + local 'uri=-h some-name-pxc -uroot -p'\''test-password'\''' + grep NULL ++ get_client_pod ++ kubectl_bin get pods --selector=name=pxc-client -o 'jsonpath={.items[].metadata.name}' +++ mktemp ++ local LAST_OUT=/tmp/tmp.LM9ujiq007 +++ mktemp ++ local LAST_ERR=/tmp/tmp.JVgF44P4WV ++ local exit_status=0 +++ seq 0 2 ++ for i in '$(seq 0 2)' ++ set +e ++ kubectl get pods --selector=name=pxc-client -o 'jsonpath={.items[].metadata.name}' ++ exit_status=0 ++ set -e ++ '[' 0 '!=' 0 ']' ++ break ++ cat /tmp/tmp.LM9ujiq007 ++ cat /tmp/tmp.JVgF44P4WV ++ rm /tmp/tmp.LM9ujiq007 /tmp/tmp.JVgF44P4WV ++ return 0 + client_pod=pxc-client-6644d8898f-4zb6x + wait_pod pxc-client-6644d8898f-4zb6x + local pod=pxc-client-6644d8898f-4zb6x + local max_retry=480 + local ns= ++ echo pxc-client-6644d8898f-4zb6x ++ /usr/bin/sed -E 's/.*-(pxc|proxysql)-[0-9]/\1/' ++ egrep '^(pxc|proxysql)$' + local container= + set +o xtrace pod/pxc-client-6644d8898f-4zb6x condition met pxc-client-6644d8898f-4zb6x.Ok + set +o xtrace + echo 'waiting for password propagation' waiting for password propagation + sleep 1 + let retry+=1 + [[ 5 -ge 240 ]] + is_old_password_discarded monitor '-h some-name-pxc -uroot -p'\''test-password'\''' + local username=monitor + local 'uri=-h some-name-pxc -uroot -p'\''test-password'\''' + run_mysql 'SELECT User_attributes FROM mysql.user WHERE user='\''monitor'\''' '-h some-name-pxc -uroot -p'\''test-password'\''' + local 'command=SELECT User_attributes FROM mysql.user WHERE user='\''monitor'\''' + local 'uri=-h some-name-pxc -uroot -p'\''test-password'\''' + grep NULL ++ get_client_pod ++ kubectl_bin get pods --selector=name=pxc-client -o 'jsonpath={.items[].metadata.name}' +++ mktemp ++ local LAST_OUT=/tmp/tmp.ZIGLPXI2ZU +++ mktemp ++ local LAST_ERR=/tmp/tmp.TGzQFRKxew ++ local exit_status=0 +++ seq 0 2 ++ for i in '$(seq 0 2)' ++ set +e ++ kubectl get pods --selector=name=pxc-client -o 'jsonpath={.items[].metadata.name}' ++ exit_status=0 ++ set -e ++ '[' 0 '!=' 0 ']' ++ break ++ cat /tmp/tmp.ZIGLPXI2ZU ++ cat /tmp/tmp.TGzQFRKxew ++ rm /tmp/tmp.ZIGLPXI2ZU /tmp/tmp.TGzQFRKxew ++ return 0 + client_pod=pxc-client-6644d8898f-4zb6x + wait_pod pxc-client-6644d8898f-4zb6x + local pod=pxc-client-6644d8898f-4zb6x + local max_retry=480 + local ns= ++ echo pxc-client-6644d8898f-4zb6x ++ /usr/bin/sed -E 's/.*-(pxc|proxysql)-[0-9]/\1/' ++ egrep '^(pxc|proxysql)$' + local container= + set +o xtrace pod/pxc-client-6644d8898f-4zb6x condition met pxc-client-6644d8898f-4zb6x.Ok + set +o xtrace NULL + wait_cluster_consistency some-name 3 2 + local cluster_name=some-name + local cluster_size=3 + local proxy_size=2 + '[' -z 2 ']' + desc 'wait cluster consistency' + set +o xtrace ----------------------------------------------------------------------------------- wait cluster consistency ----------------------------------------------------------------------------------- + local i=0 + local max=36 + sleep 7 ++ kubectl_bin get pxc some-name -o 'jsonpath={.status.state}' +++ mktemp ++ local LAST_OUT=/tmp/tmp.3c8xlAny5i +++ mktemp ++ local LAST_ERR=/tmp/tmp.pouRBgCf67 ++ local exit_status=0 +++ seq 0 2 ++ for i in '$(seq 0 2)' ++ set +e ++ kubectl get pxc some-name -o 'jsonpath={.status.state}' ++ exit_status=0 ++ set -e ++ '[' 0 '!=' 0 ']' ++ break ++ cat /tmp/tmp.3c8xlAny5i ++ cat /tmp/tmp.pouRBgCf67 ++ rm /tmp/tmp.3c8xlAny5i /tmp/tmp.pouRBgCf67 ++ return 0 + [[ ready == \r\e\a\d\y ]] ++ kubectl_bin get pxc some-name -o 'jsonpath={.status.pxc.ready}' +++ mktemp ++ local LAST_OUT=/tmp/tmp.SaNOhOHEUT +++ mktemp ++ local LAST_ERR=/tmp/tmp.X8ZDrMDeoJ ++ local exit_status=0 +++ seq 0 2 ++ for i in '$(seq 0 2)' ++ set +e ++ kubectl get pxc some-name -o 'jsonpath={.status.pxc.ready}' ++ exit_status=0 ++ set -e ++ '[' 0 '!=' 0 ']' ++ break ++ cat /tmp/tmp.SaNOhOHEUT ++ cat /tmp/tmp.X8ZDrMDeoJ ++ rm /tmp/tmp.SaNOhOHEUT /tmp/tmp.X8ZDrMDeoJ ++ return 0 + [[ 3 == \3 ]] +++ get_proxy_engine some-name +++ local cluster_name=some-name ++++ get_proxy some-name ++++ local target_cluster=some-name +++++ kubectl_bin get pxc some-name -o 'jsonpath={.spec.haproxy.enabled}' ++++++ mktemp +++++ local LAST_OUT=/tmp/tmp.KnK1xpUU4n ++++++ mktemp +++++ local LAST_ERR=/tmp/tmp.VdAsMprTsb +++++ local exit_status=0 ++++++ seq 0 2 +++++ for i in '$(seq 0 2)' +++++ set +e +++++ kubectl get pxc some-name -o 'jsonpath={.spec.haproxy.enabled}' +++++ exit_status=0 +++++ set -e +++++ '[' 0 '!=' 0 ']' +++++ break +++++ cat /tmp/tmp.KnK1xpUU4n +++++ cat /tmp/tmp.VdAsMprTsb +++++ rm /tmp/tmp.KnK1xpUU4n /tmp/tmp.VdAsMprTsb +++++ return 0 ++++ [[ '' == \t\r\u\e ]] +++++ kubectl_bin get pxc some-name -o 'jsonpath={.spec.proxysql.enabled}' ++++++ mktemp +++++ local LAST_OUT=/tmp/tmp.pMs1DsQsoC ++++++ mktemp +++++ local LAST_ERR=/tmp/tmp.M83Mxnfw3h +++++ local exit_status=0 ++++++ seq 0 2 +++++ for i in '$(seq 0 2)' +++++ set +e +++++ kubectl get pxc some-name -o 'jsonpath={.spec.proxysql.enabled}' +++++ exit_status=0 +++++ set -e +++++ '[' 0 '!=' 0 ']' +++++ break +++++ cat /tmp/tmp.pMs1DsQsoC +++++ cat /tmp/tmp.M83Mxnfw3h +++++ rm /tmp/tmp.pMs1DsQsoC /tmp/tmp.M83Mxnfw3h +++++ return 0 ++++ [[ true == \t\r\u\e ]] ++++ echo some-name-proxysql ++++ return +++ local cluster_proxy=some-name-proxysql +++ echo proxysql ++ kubectl_bin get pxc some-name -o 'jsonpath={.status.proxysql.ready}' +++ mktemp ++ local LAST_OUT=/tmp/tmp.7LLevbj0lI +++ mktemp ++ local LAST_ERR=/tmp/tmp.U5u4M0hG5A ++ local exit_status=0 +++ seq 0 2 ++ for i in '$(seq 0 2)' ++ set +e ++ kubectl get pxc some-name -o 'jsonpath={.status.proxysql.ready}' ++ exit_status=0 ++ set -e ++ '[' 0 '!=' 0 ']' ++ break ++ cat /tmp/tmp.7LLevbj0lI ++ cat /tmp/tmp.U5u4M0hG5A ++ rm /tmp/tmp.7LLevbj0lI /tmp/tmp.U5u4M0hG5A ++ return 0 + [[ 2 == \2 ]] + sleep 10 + compare_mysql_cmd select-4 'SHOW TABLES;' '-h some-name-proxysql -umonitor -p'\''test-password'\''' + local command_id=select-4 + local 'command=SHOW TABLES;' + local 'uri=-h some-name-proxysql -umonitor -p'\''test-password'\''' + local postfix= + local expected_result=/mnt/jenkins/workspace/cloud-pxc-operator_PR-1752/e2e-tests/users/compare/select-4.sql + [[ perconalab/percona-xtradb-cluster-operator:main-pxc8.0 =~ 8\.0 ]] + '[' -f /mnt/jenkins/workspace/cloud-pxc-operator_PR-1752/e2e-tests/users/compare/select-4-80.sql ']' + expected_result=/mnt/jenkins/workspace/cloud-pxc-operator_PR-1752/e2e-tests/users/compare/select-4-80.sql + run_mysql 'SHOW TABLES;' '-h some-name-proxysql -umonitor -p'\''test-password'\''' + local 'command=SHOW TABLES;' + local 'uri=-h some-name-proxysql -umonitor -p'\''test-password'\''' ++ get_client_pod ++ kubectl_bin get pods --selector=name=pxc-client -o 'jsonpath={.items[].metadata.name}' +++ mktemp ++ local LAST_OUT=/tmp/tmp.IoLcKoJo7f +++ mktemp ++ local LAST_ERR=/tmp/tmp.QRpS4Dh9Jq ++ local exit_status=0 +++ seq 0 2 ++ for i in '$(seq 0 2)' ++ set +e ++ kubectl get pods --selector=name=pxc-client -o 'jsonpath={.items[].metadata.name}' ++ exit_status=0 ++ set -e ++ '[' 0 '!=' 0 ']' ++ break ++ cat /tmp/tmp.IoLcKoJo7f ++ cat /tmp/tmp.QRpS4Dh9Jq ++ rm /tmp/tmp.IoLcKoJo7f /tmp/tmp.QRpS4Dh9Jq ++ return 0 + client_pod=pxc-client-6644d8898f-4zb6x + wait_pod pxc-client-6644d8898f-4zb6x + local pod=pxc-client-6644d8898f-4zb6x + local max_retry=480 + local ns= ++ echo pxc-client-6644d8898f-4zb6x ++ /usr/bin/sed -E 's/.*-(pxc|proxysql)-[0-9]/\1/' ++ egrep '^(pxc|proxysql)$' + local container= + set +o xtrace pod/pxc-client-6644d8898f-4zb6x condition met pxc-client-6644d8898f-4zb6x.Ok + set +o xtrace + '[' '!' -s /tmp/tmp.3wek4g2sOh/select-4.sql ']' + diff -u /mnt/jenkins/workspace/cloud-pxc-operator_PR-1752/e2e-tests/users/compare/select-4-80.sql /tmp/tmp.3wek4g2sOh/select-4.sql + desc 'test operator' + set +o xtrace ----------------------------------------------------------------------------------- test operator ----------------------------------------------------------------------------------- + patch_secret my-cluster-secrets operator dGVzdC1wYXNzd29yZA== + local secret=my-cluster-secrets + local key=operator + local value=dGVzdC1wYXNzd29yZA== + kubectl_bin patch secret my-cluster-secrets '-p={"data":{"operator": "dGVzdC1wYXNzd29yZA=="}}' ++ mktemp + local LAST_OUT=/tmp/tmp.ebzK9Flidn ++ mktemp + local LAST_ERR=/tmp/tmp.mmILJupE3G + local exit_status=0 ++ seq 0 2 + for i in '$(seq 0 2)' + set +e + kubectl patch secret my-cluster-secrets '-p={"data":{"operator": "dGVzdC1wYXNzd29yZA=="}}' + exit_status=0 + set -e + '[' 0 '!=' 0 ']' + break + cat /tmp/tmp.ebzK9Flidn secret/my-cluster-secrets patched + cat /tmp/tmp.mmILJupE3G + rm /tmp/tmp.ebzK9Flidn /tmp/tmp.mmILJupE3G + return 0 + sleep 15 + wait_cluster_consistency some-name 3 2 + local cluster_name=some-name + local cluster_size=3 + local proxy_size=2 + '[' -z 2 ']' + desc 'wait cluster consistency' + set +o xtrace ----------------------------------------------------------------------------------- wait cluster consistency ----------------------------------------------------------------------------------- + local i=0 + local max=36 + sleep 7 ++ kubectl_bin get pxc some-name -o 'jsonpath={.status.state}' +++ mktemp ++ local LAST_OUT=/tmp/tmp.stGwuDqunz +++ mktemp ++ local LAST_ERR=/tmp/tmp.7QnMxFLlg5 ++ local exit_status=0 +++ seq 0 2 ++ for i in '$(seq 0 2)' ++ set +e ++ kubectl get pxc some-name -o 'jsonpath={.status.state}' ++ exit_status=0 ++ set -e ++ '[' 0 '!=' 0 ']' ++ break ++ cat /tmp/tmp.stGwuDqunz ++ cat /tmp/tmp.7QnMxFLlg5 ++ rm /tmp/tmp.stGwuDqunz /tmp/tmp.7QnMxFLlg5 ++ return 0 + [[ initializing == \r\e\a\d\y ]] + echo 'waiting for cluster readyness' waiting for cluster readyness + sleep 20 + [[ 0 -ge 36 ]] + let i+=1 ++ kubectl_bin get pxc some-name -o 'jsonpath={.status.state}' +++ mktemp ++ local LAST_OUT=/tmp/tmp.KLNGgkWg8b +++ mktemp ++ local LAST_ERR=/tmp/tmp.guIVZpW6Kx ++ local exit_status=0 +++ seq 0 2 ++ for i in '$(seq 0 2)' ++ set +e ++ kubectl get pxc some-name -o 'jsonpath={.status.state}' ++ exit_status=0 ++ set -e ++ '[' 0 '!=' 0 ']' ++ break ++ cat /tmp/tmp.KLNGgkWg8b ++ cat /tmp/tmp.guIVZpW6Kx ++ rm /tmp/tmp.KLNGgkWg8b /tmp/tmp.guIVZpW6Kx ++ return 0 + [[ ready == \r\e\a\d\y ]] ++ kubectl_bin get pxc some-name -o 'jsonpath={.status.pxc.ready}' +++ mktemp ++ local LAST_OUT=/tmp/tmp.tPPsPCo1ZR +++ mktemp ++ local LAST_ERR=/tmp/tmp.Ge6VqjBNfK ++ local exit_status=0 +++ seq 0 2 ++ for i in '$(seq 0 2)' ++ set +e ++ kubectl get pxc some-name -o 'jsonpath={.status.pxc.ready}' ++ exit_status=0 ++ set -e ++ '[' 0 '!=' 0 ']' ++ break ++ cat /tmp/tmp.tPPsPCo1ZR ++ cat /tmp/tmp.Ge6VqjBNfK ++ rm /tmp/tmp.tPPsPCo1ZR /tmp/tmp.Ge6VqjBNfK ++ return 0 + [[ 3 == \3 ]] +++ get_proxy_engine some-name +++ local cluster_name=some-name ++++ get_proxy some-name ++++ local target_cluster=some-name +++++ kubectl_bin get pxc some-name -o 'jsonpath={.spec.haproxy.enabled}' ++++++ mktemp +++++ local LAST_OUT=/tmp/tmp.KCru6Ox8GR ++++++ mktemp +++++ local LAST_ERR=/tmp/tmp.WPlOKpadgW +++++ local exit_status=0 ++++++ seq 0 2 +++++ for i in '$(seq 0 2)' +++++ set +e +++++ kubectl get pxc some-name -o 'jsonpath={.spec.haproxy.enabled}' +++++ exit_status=0 +++++ set -e +++++ '[' 0 '!=' 0 ']' +++++ break +++++ cat /tmp/tmp.KCru6Ox8GR +++++ cat /tmp/tmp.WPlOKpadgW +++++ rm /tmp/tmp.KCru6Ox8GR /tmp/tmp.WPlOKpadgW +++++ return 0 ++++ [[ '' == \t\r\u\e ]] +++++ kubectl_bin get pxc some-name -o 'jsonpath={.spec.proxysql.enabled}' ++++++ mktemp +++++ local LAST_OUT=/tmp/tmp.Vaoy4m1VSD ++++++ mktemp +++++ local LAST_ERR=/tmp/tmp.4Uik28wfbt +++++ local exit_status=0 ++++++ seq 0 2 +++++ for i in '$(seq 0 2)' +++++ set +e +++++ kubectl get pxc some-name -o 'jsonpath={.spec.proxysql.enabled}' +++++ exit_status=0 +++++ set -e +++++ '[' 0 '!=' 0 ']' +++++ break +++++ cat /tmp/tmp.Vaoy4m1VSD +++++ cat /tmp/tmp.4Uik28wfbt +++++ rm /tmp/tmp.Vaoy4m1VSD /tmp/tmp.4Uik28wfbt +++++ return 0 ++++ [[ true == \t\r\u\e ]] ++++ echo some-name-proxysql ++++ return +++ local cluster_proxy=some-name-proxysql +++ echo proxysql ++ kubectl_bin get pxc some-name -o 'jsonpath={.status.proxysql.ready}' +++ mktemp ++ local LAST_OUT=/tmp/tmp.OxWiGkuo0e +++ mktemp ++ local LAST_ERR=/tmp/tmp.YT8xXLJIT0 ++ local exit_status=0 +++ seq 0 2 ++ for i in '$(seq 0 2)' ++ set +e ++ kubectl get pxc some-name -o 'jsonpath={.status.proxysql.ready}' ++ exit_status=0 ++ set -e ++ '[' 0 '!=' 0 ']' ++ break ++ cat /tmp/tmp.OxWiGkuo0e ++ cat /tmp/tmp.YT8xXLJIT0 ++ rm /tmp/tmp.OxWiGkuo0e /tmp/tmp.YT8xXLJIT0 ++ return 0 + [[ 2 == \2 ]] + sleep 10 + compare_mysql_cmd select-4 'SHOW TABLES;' '-h some-name-proxysql -uoperator -p'\''test-password'\''' + local command_id=select-4 + local 'command=SHOW TABLES;' + local 'uri=-h some-name-proxysql -uoperator -p'\''test-password'\''' + local postfix= + local expected_result=/mnt/jenkins/workspace/cloud-pxc-operator_PR-1752/e2e-tests/users/compare/select-4.sql + [[ perconalab/percona-xtradb-cluster-operator:main-pxc8.0 =~ 8\.0 ]] + '[' -f /mnt/jenkins/workspace/cloud-pxc-operator_PR-1752/e2e-tests/users/compare/select-4-80.sql ']' + expected_result=/mnt/jenkins/workspace/cloud-pxc-operator_PR-1752/e2e-tests/users/compare/select-4-80.sql + run_mysql 'SHOW TABLES;' '-h some-name-proxysql -uoperator -p'\''test-password'\''' + local 'command=SHOW TABLES;' + local 'uri=-h some-name-proxysql -uoperator -p'\''test-password'\''' ++ get_client_pod ++ kubectl_bin get pods --selector=name=pxc-client -o 'jsonpath={.items[].metadata.name}' +++ mktemp ++ local LAST_OUT=/tmp/tmp.H9bflXx7TU +++ mktemp ++ local LAST_ERR=/tmp/tmp.x5IUFFW44h ++ local exit_status=0 +++ seq 0 2 ++ for i in '$(seq 0 2)' ++ set +e ++ kubectl get pods --selector=name=pxc-client -o 'jsonpath={.items[].metadata.name}' ++ exit_status=0 ++ set -e ++ '[' 0 '!=' 0 ']' ++ break ++ cat /tmp/tmp.H9bflXx7TU ++ cat /tmp/tmp.x5IUFFW44h ++ rm /tmp/tmp.H9bflXx7TU /tmp/tmp.x5IUFFW44h ++ return 0 + client_pod=pxc-client-6644d8898f-4zb6x + wait_pod pxc-client-6644d8898f-4zb6x + local pod=pxc-client-6644d8898f-4zb6x + local max_retry=480 + local ns= ++ echo pxc-client-6644d8898f-4zb6x ++ /usr/bin/sed -E 's/.*-(pxc|proxysql)-[0-9]/\1/' ++ egrep '^(pxc|proxysql)$' + local container= + set +o xtrace pod/pxc-client-6644d8898f-4zb6x condition met pxc-client-6644d8898f-4zb6x.Ok + set +o xtrace + '[' '!' -s /tmp/tmp.3wek4g2sOh/select-4.sql ']' + diff -u /mnt/jenkins/workspace/cloud-pxc-operator_PR-1752/e2e-tests/users/compare/select-4-80.sql /tmp/tmp.3wek4g2sOh/select-4.sql + desc 'change secret name' + set +o xtrace ----------------------------------------------------------------------------------- change secret name ----------------------------------------------------------------------------------- + kubectl_bin patch pxc some-name --type merge --patch '{"spec": {"secretsName":"my-cluster-secrets-2"}}' ++ mktemp + local LAST_OUT=/tmp/tmp.tJDYrWv4Kn ++ mktemp + local LAST_ERR=/tmp/tmp.OpVHQDxgqB + local exit_status=0 ++ seq 0 2 + for i in '$(seq 0 2)' + set +e + kubectl patch pxc some-name --type merge --patch '{"spec": {"secretsName":"my-cluster-secrets-2"}}' + exit_status=0 + set -e + '[' 0 '!=' 0 ']' + break + cat /tmp/tmp.tJDYrWv4Kn perconaxtradbcluster.pxc.percona.com/some-name patched + cat /tmp/tmp.OpVHQDxgqB + rm /tmp/tmp.tJDYrWv4Kn /tmp/tmp.OpVHQDxgqB + return 0 + sleep 30 + wait_cluster_consistency some-name 3 2 + local cluster_name=some-name + local cluster_size=3 + local proxy_size=2 + '[' -z 2 ']' + desc 'wait cluster consistency' + set +o xtrace ----------------------------------------------------------------------------------- wait cluster consistency ----------------------------------------------------------------------------------- + local i=0 + local max=36 + sleep 7 ++ kubectl_bin get pxc some-name -o 'jsonpath={.status.state}' +++ mktemp ++ local LAST_OUT=/tmp/tmp.QWEjd3px0Q +++ mktemp ++ local LAST_ERR=/tmp/tmp.APOWLOZzgq ++ local exit_status=0 +++ seq 0 2 ++ for i in '$(seq 0 2)' ++ set +e ++ kubectl get pxc some-name -o 'jsonpath={.status.state}' ++ exit_status=0 ++ set -e ++ '[' 0 '!=' 0 ']' ++ break ++ cat /tmp/tmp.QWEjd3px0Q ++ cat /tmp/tmp.APOWLOZzgq ++ rm /tmp/tmp.QWEjd3px0Q /tmp/tmp.APOWLOZzgq ++ return 0 + [[ initializing == \r\e\a\d\y ]] + echo 'waiting for cluster readyness' waiting for cluster readyness + sleep 20 + [[ 0 -ge 36 ]] + let i+=1 ++ kubectl_bin get pxc some-name -o 'jsonpath={.status.state}' +++ mktemp ++ local LAST_OUT=/tmp/tmp.kM5AGCITZU +++ mktemp ++ local LAST_ERR=/tmp/tmp.I8nsxuRkMR ++ local exit_status=0 +++ seq 0 2 ++ for i in '$(seq 0 2)' ++ set +e ++ kubectl get pxc some-name -o 'jsonpath={.status.state}' ++ exit_status=0 ++ set -e ++ '[' 0 '!=' 0 ']' ++ break ++ cat /tmp/tmp.kM5AGCITZU ++ cat /tmp/tmp.I8nsxuRkMR ++ rm /tmp/tmp.kM5AGCITZU /tmp/tmp.I8nsxuRkMR ++ return 0 + [[ initializing == \r\e\a\d\y ]] + echo 'waiting for cluster readyness' waiting for cluster readyness + sleep 20 + [[ 1 -ge 36 ]] + let i+=1 ++ kubectl_bin get pxc some-name -o 'jsonpath={.status.state}' +++ mktemp ++ local LAST_OUT=/tmp/tmp.2eWOwpRRN4 +++ mktemp ++ local LAST_ERR=/tmp/tmp.tQEKlZvRsw ++ local exit_status=0 +++ seq 0 2 ++ for i in '$(seq 0 2)' ++ set +e ++ kubectl get pxc some-name -o 'jsonpath={.status.state}' ++ exit_status=0 ++ set -e ++ '[' 0 '!=' 0 ']' ++ break ++ cat /tmp/tmp.2eWOwpRRN4 ++ cat /tmp/tmp.tQEKlZvRsw ++ rm /tmp/tmp.2eWOwpRRN4 /tmp/tmp.tQEKlZvRsw ++ return 0 + [[ initializing == \r\e\a\d\y ]] + echo 'waiting for cluster readyness' waiting for cluster readyness + sleep 20 + [[ 2 -ge 36 ]] + let i+=1 ++ kubectl_bin get pxc some-name -o 'jsonpath={.status.state}' +++ mktemp ++ local LAST_OUT=/tmp/tmp.da5DwkK58F +++ mktemp ++ local LAST_ERR=/tmp/tmp.mNk7ikCD8q ++ local exit_status=0 +++ seq 0 2 ++ for i in '$(seq 0 2)' ++ set +e ++ kubectl get pxc some-name -o 'jsonpath={.status.state}' ++ exit_status=0 ++ set -e ++ '[' 0 '!=' 0 ']' ++ break ++ cat /tmp/tmp.da5DwkK58F ++ cat /tmp/tmp.mNk7ikCD8q ++ rm /tmp/tmp.da5DwkK58F /tmp/tmp.mNk7ikCD8q ++ return 0 + [[ initializing == \r\e\a\d\y ]] + echo 'waiting for cluster readyness' waiting for cluster readyness + sleep 20 + [[ 3 -ge 36 ]] + let i+=1 ++ kubectl_bin get pxc some-name -o 'jsonpath={.status.state}' +++ mktemp ++ local LAST_OUT=/tmp/tmp.IpApKH4Hwh +++ mktemp ++ local LAST_ERR=/tmp/tmp.OVeuJcAaMl ++ local exit_status=0 +++ seq 0 2 ++ for i in '$(seq 0 2)' ++ set +e ++ kubectl get pxc some-name -o 'jsonpath={.status.state}' ++ exit_status=0 ++ set -e ++ '[' 0 '!=' 0 ']' ++ break ++ cat /tmp/tmp.IpApKH4Hwh ++ cat /tmp/tmp.OVeuJcAaMl ++ rm /tmp/tmp.IpApKH4Hwh /tmp/tmp.OVeuJcAaMl ++ return 0 + [[ initializing == \r\e\a\d\y ]] + echo 'waiting for cluster readyness' waiting for cluster readyness + sleep 20 + [[ 4 -ge 36 ]] + let i+=1 ++ kubectl_bin get pxc some-name -o 'jsonpath={.status.state}' +++ mktemp ++ local LAST_OUT=/tmp/tmp.OeFd0Ck9WE +++ mktemp ++ local LAST_ERR=/tmp/tmp.9hWTkzPmwW ++ local exit_status=0 +++ seq 0 2 ++ for i in '$(seq 0 2)' ++ set +e ++ kubectl get pxc some-name -o 'jsonpath={.status.state}' ++ exit_status=0 ++ set -e ++ '[' 0 '!=' 0 ']' ++ break ++ cat /tmp/tmp.OeFd0Ck9WE ++ cat /tmp/tmp.9hWTkzPmwW ++ rm /tmp/tmp.OeFd0Ck9WE /tmp/tmp.9hWTkzPmwW ++ return 0 + [[ initializing == \r\e\a\d\y ]] + echo 'waiting for cluster readyness' waiting for cluster readyness + sleep 20 + [[ 5 -ge 36 ]] + let i+=1 ++ kubectl_bin get pxc some-name -o 'jsonpath={.status.state}' +++ mktemp ++ local LAST_OUT=/tmp/tmp.qljqbpoCyF +++ mktemp ++ local LAST_ERR=/tmp/tmp.VLSGNwqe0u ++ local exit_status=0 +++ seq 0 2 ++ for i in '$(seq 0 2)' ++ set +e ++ kubectl get pxc some-name -o 'jsonpath={.status.state}' ++ exit_status=0 ++ set -e ++ '[' 0 '!=' 0 ']' ++ break ++ cat /tmp/tmp.qljqbpoCyF ++ cat /tmp/tmp.VLSGNwqe0u ++ rm /tmp/tmp.qljqbpoCyF /tmp/tmp.VLSGNwqe0u ++ return 0 + [[ ready == \r\e\a\d\y ]] ++ kubectl_bin get pxc some-name -o 'jsonpath={.status.pxc.ready}' +++ mktemp ++ local LAST_OUT=/tmp/tmp.9PGOu1jn6a +++ mktemp ++ local LAST_ERR=/tmp/tmp.F9rS16V8YA ++ local exit_status=0 +++ seq 0 2 ++ for i in '$(seq 0 2)' ++ set +e ++ kubectl get pxc some-name -o 'jsonpath={.status.pxc.ready}' ++ exit_status=0 ++ set -e ++ '[' 0 '!=' 0 ']' ++ break ++ cat /tmp/tmp.9PGOu1jn6a ++ cat /tmp/tmp.F9rS16V8YA ++ rm /tmp/tmp.9PGOu1jn6a /tmp/tmp.F9rS16V8YA ++ return 0 + [[ 3 == \3 ]] +++ get_proxy_engine some-name +++ local cluster_name=some-name ++++ get_proxy some-name ++++ local target_cluster=some-name +++++ kubectl_bin get pxc some-name -o 'jsonpath={.spec.haproxy.enabled}' ++++++ mktemp +++++ local LAST_OUT=/tmp/tmp.UHEnrE1m3v ++++++ mktemp +++++ local LAST_ERR=/tmp/tmp.tsBlvvgHvx +++++ local exit_status=0 ++++++ seq 0 2 +++++ for i in '$(seq 0 2)' +++++ set +e +++++ kubectl get pxc some-name -o 'jsonpath={.spec.haproxy.enabled}' +++++ exit_status=0 +++++ set -e +++++ '[' 0 '!=' 0 ']' +++++ break +++++ cat /tmp/tmp.UHEnrE1m3v +++++ cat /tmp/tmp.tsBlvvgHvx +++++ rm /tmp/tmp.UHEnrE1m3v /tmp/tmp.tsBlvvgHvx +++++ return 0 ++++ [[ '' == \t\r\u\e ]] +++++ kubectl_bin get pxc some-name -o 'jsonpath={.spec.proxysql.enabled}' ++++++ mktemp +++++ local LAST_OUT=/tmp/tmp.2oWDN48zlB ++++++ mktemp +++++ local LAST_ERR=/tmp/tmp.fLkSAtNkyt +++++ local exit_status=0 ++++++ seq 0 2 +++++ for i in '$(seq 0 2)' +++++ set +e +++++ kubectl get pxc some-name -o 'jsonpath={.spec.proxysql.enabled}' +++++ exit_status=0 +++++ set -e +++++ '[' 0 '!=' 0 ']' +++++ break +++++ cat /tmp/tmp.2oWDN48zlB +++++ cat /tmp/tmp.fLkSAtNkyt +++++ rm /tmp/tmp.2oWDN48zlB /tmp/tmp.fLkSAtNkyt +++++ return 0 ++++ [[ true == \t\r\u\e ]] ++++ echo some-name-proxysql ++++ return +++ local cluster_proxy=some-name-proxysql +++ echo proxysql ++ kubectl_bin get pxc some-name -o 'jsonpath={.status.proxysql.ready}' +++ mktemp ++ local LAST_OUT=/tmp/tmp.y0Or5DXJNO +++ mktemp ++ local LAST_ERR=/tmp/tmp.9Vbx5iGpbm ++ local exit_status=0 +++ seq 0 2 ++ for i in '$(seq 0 2)' ++ set +e ++ kubectl get pxc some-name -o 'jsonpath={.status.proxysql.ready}' ++ exit_status=0 ++ set -e ++ '[' 0 '!=' 0 ']' ++ break ++ cat /tmp/tmp.y0Or5DXJNO ++ cat /tmp/tmp.9Vbx5iGpbm ++ rm /tmp/tmp.y0Or5DXJNO /tmp/tmp.9Vbx5iGpbm ++ return 0 + [[ 2 == \2 ]] + desc 'test new operator' + set +o xtrace ----------------------------------------------------------------------------------- test new operator ----------------------------------------------------------------------------------- + newpass=test-password2 ++ echo -n test-password2 ++ base64 + newpassencrypted=dGVzdC1wYXNzd29yZDI= + patch_secret my-cluster-secrets-2 operator dGVzdC1wYXNzd29yZDI= + local secret=my-cluster-secrets-2 + local key=operator + local value=dGVzdC1wYXNzd29yZDI= + kubectl_bin patch secret my-cluster-secrets-2 '-p={"data":{"operator": "dGVzdC1wYXNzd29yZDI="}}' ++ mktemp + local LAST_OUT=/tmp/tmp.sHKLRWX1FC ++ mktemp + local LAST_ERR=/tmp/tmp.Mz29nhOrkl + local exit_status=0 ++ seq 0 2 + for i in '$(seq 0 2)' + set +e + kubectl patch secret my-cluster-secrets-2 '-p={"data":{"operator": "dGVzdC1wYXNzd29yZDI="}}' + exit_status=0 + set -e + '[' 0 '!=' 0 ']' + break + cat /tmp/tmp.sHKLRWX1FC secret/my-cluster-secrets-2 patched + cat /tmp/tmp.Mz29nhOrkl + rm /tmp/tmp.sHKLRWX1FC /tmp/tmp.Mz29nhOrkl + return 0 + sleep 15 + wait_cluster_consistency some-name 3 2 + local cluster_name=some-name + local cluster_size=3 + local proxy_size=2 + '[' -z 2 ']' + desc 'wait cluster consistency' + set +o xtrace ----------------------------------------------------------------------------------- wait cluster consistency ----------------------------------------------------------------------------------- + local i=0 + local max=36 + sleep 7 ++ kubectl_bin get pxc some-name -o 'jsonpath={.status.state}' +++ mktemp ++ local LAST_OUT=/tmp/tmp.8sLSkq16QP +++ mktemp ++ local LAST_ERR=/tmp/tmp.2exTVioZPI ++ local exit_status=0 +++ seq 0 2 ++ for i in '$(seq 0 2)' ++ set +e ++ kubectl get pxc some-name -o 'jsonpath={.status.state}' ++ exit_status=0 ++ set -e ++ '[' 0 '!=' 0 ']' ++ break ++ cat /tmp/tmp.8sLSkq16QP ++ cat /tmp/tmp.2exTVioZPI ++ rm /tmp/tmp.8sLSkq16QP /tmp/tmp.2exTVioZPI ++ return 0 + [[ initializing == \r\e\a\d\y ]] + echo 'waiting for cluster readyness' waiting for cluster readyness + sleep 20 + [[ 0 -ge 36 ]] + let i+=1 ++ kubectl_bin get pxc some-name -o 'jsonpath={.status.state}' +++ mktemp ++ local LAST_OUT=/tmp/tmp.NwKpd1eSEP +++ mktemp ++ local LAST_ERR=/tmp/tmp.rtUEiFW0XD ++ local exit_status=0 +++ seq 0 2 ++ for i in '$(seq 0 2)' ++ set +e ++ kubectl get pxc some-name -o 'jsonpath={.status.state}' ++ exit_status=0 ++ set -e ++ '[' 0 '!=' 0 ']' ++ break ++ cat /tmp/tmp.NwKpd1eSEP ++ cat /tmp/tmp.rtUEiFW0XD ++ rm /tmp/tmp.NwKpd1eSEP /tmp/tmp.rtUEiFW0XD ++ return 0 + [[ initializing == \r\e\a\d\y ]] + echo 'waiting for cluster readyness' waiting for cluster readyness + sleep 20 + [[ 1 -ge 36 ]] + let i+=1 ++ kubectl_bin get pxc some-name -o 'jsonpath={.status.state}' +++ mktemp ++ local LAST_OUT=/tmp/tmp.bmKQlWuDa0 +++ mktemp ++ local LAST_ERR=/tmp/tmp.sdFbokE0IC ++ local exit_status=0 +++ seq 0 2 ++ for i in '$(seq 0 2)' ++ set +e ++ kubectl get pxc some-name -o 'jsonpath={.status.state}' ++ exit_status=0 ++ set -e ++ '[' 0 '!=' 0 ']' ++ break ++ cat /tmp/tmp.bmKQlWuDa0 ++ cat /tmp/tmp.sdFbokE0IC ++ rm /tmp/tmp.bmKQlWuDa0 /tmp/tmp.sdFbokE0IC ++ return 0 + [[ ready == \r\e\a\d\y ]] ++ kubectl_bin get pxc some-name -o 'jsonpath={.status.pxc.ready}' +++ mktemp ++ local LAST_OUT=/tmp/tmp.WbYjgEUvFt +++ mktemp ++ local LAST_ERR=/tmp/tmp.SOFn15DNzt ++ local exit_status=0 +++ seq 0 2 ++ for i in '$(seq 0 2)' ++ set +e ++ kubectl get pxc some-name -o 'jsonpath={.status.pxc.ready}' ++ exit_status=0 ++ set -e ++ '[' 0 '!=' 0 ']' ++ break ++ cat /tmp/tmp.WbYjgEUvFt ++ cat /tmp/tmp.SOFn15DNzt ++ rm /tmp/tmp.WbYjgEUvFt /tmp/tmp.SOFn15DNzt ++ return 0 + [[ 3 == \3 ]] +++ get_proxy_engine some-name +++ local cluster_name=some-name ++++ get_proxy some-name ++++ local target_cluster=some-name +++++ kubectl_bin get pxc some-name -o 'jsonpath={.spec.haproxy.enabled}' ++++++ mktemp +++++ local LAST_OUT=/tmp/tmp.wb8IGZIb5f ++++++ mktemp +++++ local LAST_ERR=/tmp/tmp.OFCRupLIXT +++++ local exit_status=0 ++++++ seq 0 2 +++++ for i in '$(seq 0 2)' +++++ set +e +++++ kubectl get pxc some-name -o 'jsonpath={.spec.haproxy.enabled}' +++++ exit_status=0 +++++ set -e +++++ '[' 0 '!=' 0 ']' +++++ break +++++ cat /tmp/tmp.wb8IGZIb5f +++++ cat /tmp/tmp.OFCRupLIXT +++++ rm /tmp/tmp.wb8IGZIb5f /tmp/tmp.OFCRupLIXT +++++ return 0 ++++ [[ '' == \t\r\u\e ]] +++++ kubectl_bin get pxc some-name -o 'jsonpath={.spec.proxysql.enabled}' ++++++ mktemp +++++ local LAST_OUT=/tmp/tmp.CqWzqMHyc3 ++++++ mktemp +++++ local LAST_ERR=/tmp/tmp.l0JYb581vV +++++ local exit_status=0 ++++++ seq 0 2 +++++ for i in '$(seq 0 2)' +++++ set +e +++++ kubectl get pxc some-name -o 'jsonpath={.spec.proxysql.enabled}' +++++ exit_status=0 +++++ set -e +++++ '[' 0 '!=' 0 ']' +++++ break +++++ cat /tmp/tmp.CqWzqMHyc3 +++++ cat /tmp/tmp.l0JYb581vV +++++ rm /tmp/tmp.CqWzqMHyc3 /tmp/tmp.l0JYb581vV +++++ return 0 ++++ [[ true == \t\r\u\e ]] ++++ echo some-name-proxysql ++++ return +++ local cluster_proxy=some-name-proxysql +++ echo proxysql ++ kubectl_bin get pxc some-name -o 'jsonpath={.status.proxysql.ready}' +++ mktemp ++ local LAST_OUT=/tmp/tmp.BX5aBZc2SM +++ mktemp ++ local LAST_ERR=/tmp/tmp.u1pa9isZUn ++ local exit_status=0 +++ seq 0 2 ++ for i in '$(seq 0 2)' ++ set +e ++ kubectl get pxc some-name -o 'jsonpath={.status.proxysql.ready}' ++ exit_status=0 ++ set -e ++ '[' 0 '!=' 0 ']' ++ break ++ cat /tmp/tmp.BX5aBZc2SM ++ cat /tmp/tmp.u1pa9isZUn ++ rm /tmp/tmp.BX5aBZc2SM /tmp/tmp.u1pa9isZUn ++ return 0 + [[ 2 == \2 ]] + sleep 20 + compare_mysql_cmd select-4 'SHOW TABLES;' '-h some-name-proxysql -uoperator -p'\''test-password2'\''' + local command_id=select-4 + local 'command=SHOW TABLES;' + local 'uri=-h some-name-proxysql -uoperator -p'\''test-password2'\''' + local postfix= + local expected_result=/mnt/jenkins/workspace/cloud-pxc-operator_PR-1752/e2e-tests/users/compare/select-4.sql + [[ perconalab/percona-xtradb-cluster-operator:main-pxc8.0 =~ 8\.0 ]] + '[' -f /mnt/jenkins/workspace/cloud-pxc-operator_PR-1752/e2e-tests/users/compare/select-4-80.sql ']' + expected_result=/mnt/jenkins/workspace/cloud-pxc-operator_PR-1752/e2e-tests/users/compare/select-4-80.sql + run_mysql 'SHOW TABLES;' '-h some-name-proxysql -uoperator -p'\''test-password2'\''' + local 'command=SHOW TABLES;' + local 'uri=-h some-name-proxysql -uoperator -p'\''test-password2'\''' ++ get_client_pod ++ kubectl_bin get pods --selector=name=pxc-client -o 'jsonpath={.items[].metadata.name}' +++ mktemp ++ local LAST_OUT=/tmp/tmp.JBmIAJDdih +++ mktemp ++ local LAST_ERR=/tmp/tmp.T3c4VB1cJS ++ local exit_status=0 +++ seq 0 2 ++ for i in '$(seq 0 2)' ++ set +e ++ kubectl get pods --selector=name=pxc-client -o 'jsonpath={.items[].metadata.name}' ++ exit_status=0 ++ set -e ++ '[' 0 '!=' 0 ']' ++ break ++ cat /tmp/tmp.JBmIAJDdih ++ cat /tmp/tmp.T3c4VB1cJS ++ rm /tmp/tmp.JBmIAJDdih /tmp/tmp.T3c4VB1cJS ++ return 0 + client_pod=pxc-client-6644d8898f-4zb6x + wait_pod pxc-client-6644d8898f-4zb6x + local pod=pxc-client-6644d8898f-4zb6x + local max_retry=480 + local ns= ++ /usr/bin/sed -E 's/.*-(pxc|proxysql)-[0-9]/\1/' ++ echo pxc-client-6644d8898f-4zb6x ++ egrep '^(pxc|proxysql)$' + local container= + set +o xtrace pod/pxc-client-6644d8898f-4zb6x condition met pxc-client-6644d8898f-4zb6x.Ok + set +o xtrace + '[' '!' -s /tmp/tmp.3wek4g2sOh/select-4.sql ']' + diff -u /mnt/jenkins/workspace/cloud-pxc-operator_PR-1752/e2e-tests/users/compare/select-4-80.sql /tmp/tmp.3wek4g2sOh/select-4.sql ++ getSecretData my-cluster-secrets-2 root ++ local secretName=my-cluster-secrets-2 ++ local dataKey=root ++ kubectl_bin get secrets/my-cluster-secrets-2 '--template={{.data.root}}' ++ base64 --decode +++ mktemp ++ local LAST_OUT=/tmp/tmp.RUNjLA6LQQ +++ mktemp ++ local LAST_ERR=/tmp/tmp.5nnBSgIDc6 ++ local exit_status=0 +++ seq 0 2 ++ for i in '$(seq 0 2)' ++ set +e ++ kubectl get secrets/my-cluster-secrets-2 '--template={{.data.root}}' ++ exit_status=0 ++ set -e ++ '[' 0 '!=' 0 ']' ++ break ++ cat /tmp/tmp.RUNjLA6LQQ ++ cat /tmp/tmp.5nnBSgIDc6 ++ rm /tmp/tmp.RUNjLA6LQQ /tmp/tmp.5nnBSgIDc6 ++ return 0 + newpass='4P#7>H>hGUqDTfhPq' + desc 'test new users sync' + set +o xtrace ----------------------------------------------------------------------------------- test new users sync ----------------------------------------------------------------------------------- + run_mysql 'CREATE USER '\''testsync'\''@'\''%'\'' IDENTIFIED BY '\''4P#7>H>hGUqDTfhPq'\'';' '-h some-name-pxc -uroot -p'\''4P#7>H>hGUqDTfhPq'\''' + local 'command=CREATE USER '\''testsync'\''@'\''%'\'' IDENTIFIED BY '\''4P#7>H>hGUqDTfhPq'\'';' + local 'uri=-h some-name-pxc -uroot -p'\''4P#7>H>hGUqDTfhPq'\''' ++ get_client_pod ++ kubectl_bin get pods --selector=name=pxc-client -o 'jsonpath={.items[].metadata.name}' +++ mktemp ++ local LAST_OUT=/tmp/tmp.fWJ6K6tJRa +++ mktemp ++ local LAST_ERR=/tmp/tmp.RO76Fyx3iT ++ local exit_status=0 +++ seq 0 2 ++ for i in '$(seq 0 2)' ++ set +e ++ kubectl get pods --selector=name=pxc-client -o 'jsonpath={.items[].metadata.name}' ++ exit_status=0 ++ set -e ++ '[' 0 '!=' 0 ']' ++ break ++ cat /tmp/tmp.fWJ6K6tJRa ++ cat /tmp/tmp.RO76Fyx3iT ++ rm /tmp/tmp.fWJ6K6tJRa /tmp/tmp.RO76Fyx3iT ++ return 0 + client_pod=pxc-client-6644d8898f-4zb6x + wait_pod pxc-client-6644d8898f-4zb6x + local pod=pxc-client-6644d8898f-4zb6x + local max_retry=480 + local ns= ++ echo pxc-client-6644d8898f-4zb6x ++ egrep '^(pxc|proxysql)$' ++ /usr/bin/sed -E 's/.*-(pxc|proxysql)-[0-9]/\1/' + local container= + set +o xtrace pod/pxc-client-6644d8898f-4zb6x condition met pxc-client-6644d8898f-4zb6x.Ok + set +o xtrace + sleep 40 + compare_mysql_cmd select-4 'SHOW TABLES;' '-h some-name-proxysql -utestsync -p'\''4P#7>H>hGUqDTfhPq'\''' + local command_id=select-4 + local 'command=SHOW TABLES;' + local 'uri=-h some-name-proxysql -utestsync -p'\''4P#7>H>hGUqDTfhPq'\''' + local postfix= + local expected_result=/mnt/jenkins/workspace/cloud-pxc-operator_PR-1752/e2e-tests/users/compare/select-4.sql + [[ perconalab/percona-xtradb-cluster-operator:main-pxc8.0 =~ 8\.0 ]] + '[' -f /mnt/jenkins/workspace/cloud-pxc-operator_PR-1752/e2e-tests/users/compare/select-4-80.sql ']' + expected_result=/mnt/jenkins/workspace/cloud-pxc-operator_PR-1752/e2e-tests/users/compare/select-4-80.sql + run_mysql 'SHOW TABLES;' '-h some-name-proxysql -utestsync -p'\''4P#7>H>hGUqDTfhPq'\''' + local 'command=SHOW TABLES;' + local 'uri=-h some-name-proxysql -utestsync -p'\''4P#7>H>hGUqDTfhPq'\''' ++ get_client_pod ++ kubectl_bin get pods --selector=name=pxc-client -o 'jsonpath={.items[].metadata.name}' +++ mktemp ++ local LAST_OUT=/tmp/tmp.aqnHJJ87b7 +++ mktemp ++ local LAST_ERR=/tmp/tmp.FQl3VLA8ff ++ local exit_status=0 +++ seq 0 2 ++ for i in '$(seq 0 2)' ++ set +e ++ kubectl get pods --selector=name=pxc-client -o 'jsonpath={.items[].metadata.name}' ++ exit_status=0 ++ set -e ++ '[' 0 '!=' 0 ']' ++ break ++ cat /tmp/tmp.aqnHJJ87b7 ++ cat /tmp/tmp.FQl3VLA8ff ++ rm /tmp/tmp.aqnHJJ87b7 /tmp/tmp.FQl3VLA8ff ++ return 0 + client_pod=pxc-client-6644d8898f-4zb6x + wait_pod pxc-client-6644d8898f-4zb6x + local pod=pxc-client-6644d8898f-4zb6x + local max_retry=480 + local ns= ++ echo pxc-client-6644d8898f-4zb6x ++ /usr/bin/sed -E 's/.*-(pxc|proxysql)-[0-9]/\1/' ++ egrep '^(pxc|proxysql)$' + local container= + set +o xtrace pod/pxc-client-6644d8898f-4zb6x condition met pxc-client-6644d8898f-4zb6x.Ok + set +o xtrace + '[' '!' -s /tmp/tmp.3wek4g2sOh/select-4.sql ']' + diff -u /mnt/jenkins/workspace/cloud-pxc-operator_PR-1752/e2e-tests/users/compare/select-4-80.sql /tmp/tmp.3wek4g2sOh/select-4.sql ++ getSecretData internal-some-name operator ++ local secretName=internal-some-name ++ local dataKey=operator ++ kubectl_bin get secrets/internal-some-name '--template={{.data.operator}}' ++ base64 --decode +++ mktemp ++ local LAST_OUT=/tmp/tmp.eotrIekrja +++ mktemp ++ local LAST_ERR=/tmp/tmp.CiPFhOnJq7 ++ local exit_status=0 +++ seq 0 2 ++ for i in '$(seq 0 2)' ++ set +e ++ kubectl get secrets/internal-some-name '--template={{.data.operator}}' ++ exit_status=0 ++ set -e ++ '[' 0 '!=' 0 ']' ++ break ++ cat /tmp/tmp.eotrIekrja ++ cat /tmp/tmp.CiPFhOnJq7 ++ rm /tmp/tmp.eotrIekrja /tmp/tmp.CiPFhOnJq7 ++ return 0 + pass=test-password2 + desc 'check secret without operator' + set +o xtrace ----------------------------------------------------------------------------------- check secret without operator ----------------------------------------------------------------------------------- + kubectl_bin apply -f /mnt/jenkins/workspace/cloud-pxc-operator_PR-1752/e2e-tests/users/conf/secrets.yml ++ mktemp + local LAST_OUT=/tmp/tmp.RrsUY9KwNu ++ mktemp + local LAST_ERR=/tmp/tmp.W86SL1tYtg + local exit_status=0 ++ seq 0 2 + for i in '$(seq 0 2)' + set +e + kubectl apply -f /mnt/jenkins/workspace/cloud-pxc-operator_PR-1752/e2e-tests/users/conf/secrets.yml + exit_status=0 + set -e + '[' 0 '!=' 0 ']' + break + cat /tmp/tmp.RrsUY9KwNu secret/my-cluster-secrets-2 configured + cat /tmp/tmp.W86SL1tYtg Warning: resource secrets/my-cluster-secrets-2 is missing the kubectl.kubernetes.io/last-applied-configuration annotation which is required by kubectl apply. kubectl apply should only be used on resources created declaratively by either kubectl create --save-config or kubectl apply. The missing annotation will be patched automatically. + rm /tmp/tmp.RrsUY9KwNu /tmp/tmp.W86SL1tYtg + return 0 + sleep 15 + compare_mysql_cmd select-4 'SHOW TABLES;' '-h some-name-proxysql -uoperator -p'\''test-password2'\''' + local command_id=select-4 + local 'command=SHOW TABLES;' + local 'uri=-h some-name-proxysql -uoperator -p'\''test-password2'\''' + local postfix= + local expected_result=/mnt/jenkins/workspace/cloud-pxc-operator_PR-1752/e2e-tests/users/compare/select-4.sql + [[ perconalab/percona-xtradb-cluster-operator:main-pxc8.0 =~ 8\.0 ]] + '[' -f /mnt/jenkins/workspace/cloud-pxc-operator_PR-1752/e2e-tests/users/compare/select-4-80.sql ']' + expected_result=/mnt/jenkins/workspace/cloud-pxc-operator_PR-1752/e2e-tests/users/compare/select-4-80.sql + run_mysql 'SHOW TABLES;' '-h some-name-proxysql -uoperator -p'\''test-password2'\''' + local 'command=SHOW TABLES;' + local 'uri=-h some-name-proxysql -uoperator -p'\''test-password2'\''' ++ get_client_pod ++ kubectl_bin get pods --selector=name=pxc-client -o 'jsonpath={.items[].metadata.name}' +++ mktemp ++ local LAST_OUT=/tmp/tmp.69EnUJTkdS +++ mktemp ++ local LAST_ERR=/tmp/tmp.5yhNrY6faA ++ local exit_status=0 +++ seq 0 2 ++ for i in '$(seq 0 2)' ++ set +e ++ kubectl get pods --selector=name=pxc-client -o 'jsonpath={.items[].metadata.name}' ++ exit_status=0 ++ set -e ++ '[' 0 '!=' 0 ']' ++ break ++ cat /tmp/tmp.69EnUJTkdS ++ cat /tmp/tmp.5yhNrY6faA ++ rm /tmp/tmp.69EnUJTkdS /tmp/tmp.5yhNrY6faA ++ return 0 + client_pod=pxc-client-6644d8898f-4zb6x + wait_pod pxc-client-6644d8898f-4zb6x + local pod=pxc-client-6644d8898f-4zb6x + local max_retry=480 + local ns= ++ /usr/bin/sed -E 's/.*-(pxc|proxysql)-[0-9]/\1/' ++ egrep '^(pxc|proxysql)$' ++ echo pxc-client-6644d8898f-4zb6x + local container= + set +o xtrace pod/pxc-client-6644d8898f-4zb6x condition met pxc-client-6644d8898f-4zb6x.Ok + set +o xtrace + '[' '!' -s /tmp/tmp.3wek4g2sOh/select-4.sql ']' + diff -u /mnt/jenkins/workspace/cloud-pxc-operator_PR-1752/e2e-tests/users/compare/select-4-80.sql /tmp/tmp.3wek4g2sOh/select-4.sql + newpass=test-password2 ++ base64 ++ echo -n test-password2 + newpassencrypted=dGVzdC1wYXNzd29yZDI= + apply_config /mnt/jenkins/workspace/cloud-pxc-operator_PR-1752/e2e-tests/users/conf/some-name.yml + '[' -z '' ']' + kubectl_bin apply -f - + cat_config /mnt/jenkins/workspace/cloud-pxc-operator_PR-1752/e2e-tests/users/conf/some-name.yml + cat /mnt/jenkins/workspace/cloud-pxc-operator_PR-1752/e2e-tests/users/conf/some-name.yml + /usr/bin/sed -e 's#apiVersion: pxc.percona.com/v.*$#apiVersion: pxc.percona.com/v1#' + /usr/bin/sed -e 's#apply:.*#apply: Never#' + /usr/bin/sed -e s~minio-service.#namespace~minio-service.users-13843~ + /usr/bin/sed -e 's#image:.*-proxysql$#image: perconalab/percona-xtradb-cluster-operator:main-proxysql#' + /usr/bin/sed -e 's#image:.*-backup$#image: perconalab/percona-xtradb-cluster-operator:main-pxc8.0-backup#' + /usr/bin/sed -e 's#image:.*-pmm$#image: perconalab/pmm-client:dev-latest#' + /usr/bin/sed -e 's#image:.*-haproxy$#image: perconalab/percona-xtradb-cluster-operator:main-haproxy#' + /usr/bin/sed -e 's#image:.*-logcollector$#image: perconalab/percona-xtradb-cluster-operator:main-logcollector#' + /usr/bin/sed -e 's#image:.*-init$#image: perconalab/percona-xtradb-cluster-operator:PR-1752-44f0e1a8#' + /usr/bin/sed -e 's#image:.*-pxc\([0-9]*.[0-9]*\)\{0,1\}$#image: perconalab/percona-xtradb-cluster-operator:main-pxc8.0#' + /usr/bin/sed -e 's#image:.*\/percona-xtradb-cluster:.*$#image: perconalab/percona-xtradb-cluster-operator:main-pxc8.0#' ++ mktemp + local LAST_OUT=/tmp/tmp.gcTTiTxBR8 ++ mktemp + local LAST_ERR=/tmp/tmp.dtIQZR8sPy + local exit_status=0 ++ seq 0 2 + for i in '$(seq 0 2)' + set +e + kubectl apply -f - + exit_status=0 + set -e + '[' 0 '!=' 0 ']' + break + cat /tmp/tmp.gcTTiTxBR8 perconaxtradbcluster.pxc.percona.com/some-name configured + cat /tmp/tmp.dtIQZR8sPy + rm /tmp/tmp.gcTTiTxBR8 /tmp/tmp.dtIQZR8sPy + return 0 + sleep 15 + wait_cluster_consistency some-name 3 3 + local cluster_name=some-name + local cluster_size=3 + local proxy_size=3 + '[' -z 3 ']' + desc 'wait cluster consistency' + set +o xtrace ----------------------------------------------------------------------------------- wait cluster consistency ----------------------------------------------------------------------------------- + local i=0 + local max=36 + sleep 7 ++ kubectl_bin get pxc some-name -o 'jsonpath={.status.state}' +++ mktemp ++ local LAST_OUT=/tmp/tmp.a5QuvNJUbn +++ mktemp ++ local LAST_ERR=/tmp/tmp.BHK6b7hJT7 ++ local exit_status=0 +++ seq 0 2 ++ for i in '$(seq 0 2)' ++ set +e ++ kubectl get pxc some-name -o 'jsonpath={.status.state}' ++ exit_status=0 ++ set -e ++ '[' 0 '!=' 0 ']' ++ break ++ cat /tmp/tmp.a5QuvNJUbn ++ cat /tmp/tmp.BHK6b7hJT7 ++ rm /tmp/tmp.a5QuvNJUbn /tmp/tmp.BHK6b7hJT7 ++ return 0 + [[ initializing == \r\e\a\d\y ]] + echo 'waiting for cluster readyness' waiting for cluster readyness + sleep 20 + [[ 0 -ge 36 ]] + let i+=1 ++ kubectl_bin get pxc some-name -o 'jsonpath={.status.state}' +++ mktemp ++ local LAST_OUT=/tmp/tmp.tGFKwfuEJR +++ mktemp ++ local LAST_ERR=/tmp/tmp.KkgUXdsSBQ ++ local exit_status=0 +++ seq 0 2 ++ for i in '$(seq 0 2)' ++ set +e ++ kubectl get pxc some-name -o 'jsonpath={.status.state}' ++ exit_status=0 ++ set -e ++ '[' 0 '!=' 0 ']' ++ break ++ cat /tmp/tmp.tGFKwfuEJR ++ cat /tmp/tmp.KkgUXdsSBQ ++ rm /tmp/tmp.tGFKwfuEJR /tmp/tmp.KkgUXdsSBQ ++ return 0 + [[ initializing == \r\e\a\d\y ]] + echo 'waiting for cluster readyness' waiting for cluster readyness + sleep 20 + [[ 1 -ge 36 ]] + let i+=1 ++ kubectl_bin get pxc some-name -o 'jsonpath={.status.state}' +++ mktemp ++ local LAST_OUT=/tmp/tmp.uufSAAr10G +++ mktemp ++ local LAST_ERR=/tmp/tmp.t2xmBIhfKA ++ local exit_status=0 +++ seq 0 2 ++ for i in '$(seq 0 2)' ++ set +e ++ kubectl get pxc some-name -o 'jsonpath={.status.state}' ++ exit_status=0 ++ set -e ++ '[' 0 '!=' 0 ']' ++ break ++ cat /tmp/tmp.uufSAAr10G ++ cat /tmp/tmp.t2xmBIhfKA ++ rm /tmp/tmp.uufSAAr10G /tmp/tmp.t2xmBIhfKA ++ return 0 + [[ initializing == \r\e\a\d\y ]] + echo 'waiting for cluster readyness' waiting for cluster readyness + sleep 20 + [[ 2 -ge 36 ]] + let i+=1 ++ kubectl_bin get pxc some-name -o 'jsonpath={.status.state}' +++ mktemp ++ local LAST_OUT=/tmp/tmp.OyA2yeNCk8 +++ mktemp ++ local LAST_ERR=/tmp/tmp.ul3ej3n1a5 ++ local exit_status=0 +++ seq 0 2 ++ for i in '$(seq 0 2)' ++ set +e ++ kubectl get pxc some-name -o 'jsonpath={.status.state}' ++ exit_status=0 ++ set -e ++ '[' 0 '!=' 0 ']' ++ break ++ cat /tmp/tmp.OyA2yeNCk8 ++ cat /tmp/tmp.ul3ej3n1a5 ++ rm /tmp/tmp.OyA2yeNCk8 /tmp/tmp.ul3ej3n1a5 ++ return 0 + [[ initializing == \r\e\a\d\y ]] + echo 'waiting for cluster readyness' waiting for cluster readyness + sleep 20 + [[ 3 -ge 36 ]] + let i+=1 ++ kubectl_bin get pxc some-name -o 'jsonpath={.status.state}' +++ mktemp ++ local LAST_OUT=/tmp/tmp.Ux3OBgMevV +++ mktemp ++ local LAST_ERR=/tmp/tmp.OTa83ngafC ++ local exit_status=0 +++ seq 0 2 ++ for i in '$(seq 0 2)' ++ set +e ++ kubectl get pxc some-name -o 'jsonpath={.status.state}' ++ exit_status=0 ++ set -e ++ '[' 0 '!=' 0 ']' ++ break ++ cat /tmp/tmp.Ux3OBgMevV ++ cat /tmp/tmp.OTa83ngafC ++ rm /tmp/tmp.Ux3OBgMevV /tmp/tmp.OTa83ngafC ++ return 0 + [[ initializing == \r\e\a\d\y ]] + echo 'waiting for cluster readyness' waiting for cluster readyness + sleep 20 + [[ 4 -ge 36 ]] + let i+=1 ++ kubectl_bin get pxc some-name -o 'jsonpath={.status.state}' +++ mktemp ++ local LAST_OUT=/tmp/tmp.oHMGqDY1Tg +++ mktemp ++ local LAST_ERR=/tmp/tmp.nCgQVv2Hlf ++ local exit_status=0 +++ seq 0 2 ++ for i in '$(seq 0 2)' ++ set +e ++ kubectl get pxc some-name -o 'jsonpath={.status.state}' ++ exit_status=0 ++ set -e ++ '[' 0 '!=' 0 ']' ++ break ++ cat /tmp/tmp.oHMGqDY1Tg ++ cat /tmp/tmp.nCgQVv2Hlf ++ rm /tmp/tmp.oHMGqDY1Tg /tmp/tmp.nCgQVv2Hlf ++ return 0 + [[ initializing == \r\e\a\d\y ]] + echo 'waiting for cluster readyness' waiting for cluster readyness + sleep 20 + [[ 5 -ge 36 ]] + let i+=1 ++ kubectl_bin get pxc some-name -o 'jsonpath={.status.state}' +++ mktemp ++ local LAST_OUT=/tmp/tmp.TNSgTwOwwN +++ mktemp ++ local LAST_ERR=/tmp/tmp.a5rjGs6eYU ++ local exit_status=0 +++ seq 0 2 ++ for i in '$(seq 0 2)' ++ set +e ++ kubectl get pxc some-name -o 'jsonpath={.status.state}' ++ exit_status=0 ++ set -e ++ '[' 0 '!=' 0 ']' ++ break ++ cat /tmp/tmp.TNSgTwOwwN ++ cat /tmp/tmp.a5rjGs6eYU ++ rm /tmp/tmp.TNSgTwOwwN /tmp/tmp.a5rjGs6eYU ++ return 0 + [[ ready == \r\e\a\d\y ]] ++ kubectl_bin get pxc some-name -o 'jsonpath={.status.pxc.ready}' +++ mktemp ++ local LAST_OUT=/tmp/tmp.0uK6m6VbAq +++ mktemp ++ local LAST_ERR=/tmp/tmp.dUhCjS7yp0 ++ local exit_status=0 +++ seq 0 2 ++ for i in '$(seq 0 2)' ++ set +e ++ kubectl get pxc some-name -o 'jsonpath={.status.pxc.ready}' ++ exit_status=0 ++ set -e ++ '[' 0 '!=' 0 ']' ++ break ++ cat /tmp/tmp.0uK6m6VbAq ++ cat /tmp/tmp.dUhCjS7yp0 ++ rm /tmp/tmp.0uK6m6VbAq /tmp/tmp.dUhCjS7yp0 ++ return 0 + [[ 3 == \3 ]] +++ get_proxy_engine some-name +++ local cluster_name=some-name ++++ get_proxy some-name ++++ local target_cluster=some-name +++++ kubectl_bin get pxc some-name -o 'jsonpath={.spec.haproxy.enabled}' ++++++ mktemp +++++ local LAST_OUT=/tmp/tmp.owsOth00Zi ++++++ mktemp +++++ local LAST_ERR=/tmp/tmp.cV9Et1hoL1 +++++ local exit_status=0 ++++++ seq 0 2 +++++ for i in '$(seq 0 2)' +++++ set +e +++++ kubectl get pxc some-name -o 'jsonpath={.spec.haproxy.enabled}' +++++ exit_status=0 +++++ set -e +++++ '[' 0 '!=' 0 ']' +++++ break +++++ cat /tmp/tmp.owsOth00Zi +++++ cat /tmp/tmp.cV9Et1hoL1 +++++ rm /tmp/tmp.owsOth00Zi /tmp/tmp.cV9Et1hoL1 +++++ return 0 ++++ [[ true == \t\r\u\e ]] ++++ echo some-name-haproxy ++++ return +++ local cluster_proxy=some-name-haproxy +++ echo haproxy ++ kubectl_bin get pxc some-name -o 'jsonpath={.status.haproxy.ready}' +++ mktemp ++ local LAST_OUT=/tmp/tmp.Smz9juA8Lp +++ mktemp ++ local LAST_ERR=/tmp/tmp.AeMAXj94Y1 ++ local exit_status=0 +++ seq 0 2 ++ for i in '$(seq 0 2)' ++ set +e ++ kubectl get pxc some-name -o 'jsonpath={.status.haproxy.ready}' ++ exit_status=0 ++ set -e ++ '[' 0 '!=' 0 ']' ++ break ++ cat /tmp/tmp.Smz9juA8Lp ++ cat /tmp/tmp.AeMAXj94Y1 ++ rm /tmp/tmp.Smz9juA8Lp /tmp/tmp.AeMAXj94Y1 ++ return 0 + [[ 2 == \3 ]] + echo 'waiting for cluster readyness' waiting for cluster readyness + sleep 20 + [[ 6 -ge 36 ]] + let i+=1 ++ kubectl_bin get pxc some-name -o 'jsonpath={.status.state}' +++ mktemp ++ local LAST_OUT=/tmp/tmp.lMZA2j8Cmt +++ mktemp ++ local LAST_ERR=/tmp/tmp.92aRs69oRO ++ local exit_status=0 +++ seq 0 2 ++ for i in '$(seq 0 2)' ++ set +e ++ kubectl get pxc some-name -o 'jsonpath={.status.state}' ++ exit_status=0 ++ set -e ++ '[' 0 '!=' 0 ']' ++ break ++ cat /tmp/tmp.lMZA2j8Cmt ++ cat /tmp/tmp.92aRs69oRO ++ rm /tmp/tmp.lMZA2j8Cmt /tmp/tmp.92aRs69oRO ++ return 0 + [[ initializing == \r\e\a\d\y ]] + echo 'waiting for cluster readyness' waiting for cluster readyness + sleep 20 + [[ 7 -ge 36 ]] + let i+=1 ++ kubectl_bin get pxc some-name -o 'jsonpath={.status.state}' +++ mktemp ++ local LAST_OUT=/tmp/tmp.O6NlPU8jCi +++ mktemp ++ local LAST_ERR=/tmp/tmp.cu4uoKki71 ++ local exit_status=0 +++ seq 0 2 ++ for i in '$(seq 0 2)' ++ set +e ++ kubectl get pxc some-name -o 'jsonpath={.status.state}' ++ exit_status=0 ++ set -e ++ '[' 0 '!=' 0 ']' ++ break ++ cat /tmp/tmp.O6NlPU8jCi ++ cat /tmp/tmp.cu4uoKki71 ++ rm /tmp/tmp.O6NlPU8jCi /tmp/tmp.cu4uoKki71 ++ return 0 + [[ initializing == \r\e\a\d\y ]] + echo 'waiting for cluster readyness' waiting for cluster readyness + sleep 20 + [[ 8 -ge 36 ]] + let i+=1 ++ kubectl_bin get pxc some-name -o 'jsonpath={.status.state}' +++ mktemp ++ local LAST_OUT=/tmp/tmp.xBYMMXaJZ9 +++ mktemp ++ local LAST_ERR=/tmp/tmp.Eou7vrwZ6J ++ local exit_status=0 +++ seq 0 2 ++ for i in '$(seq 0 2)' ++ set +e ++ kubectl get pxc some-name -o 'jsonpath={.status.state}' ++ exit_status=0 ++ set -e ++ '[' 0 '!=' 0 ']' ++ break ++ cat /tmp/tmp.xBYMMXaJZ9 ++ cat /tmp/tmp.Eou7vrwZ6J ++ rm /tmp/tmp.xBYMMXaJZ9 /tmp/tmp.Eou7vrwZ6J ++ return 0 + [[ initializing == \r\e\a\d\y ]] + echo 'waiting for cluster readyness' waiting for cluster readyness + sleep 20 + [[ 9 -ge 36 ]] + let i+=1 ++ kubectl_bin get pxc some-name -o 'jsonpath={.status.state}' +++ mktemp ++ local LAST_OUT=/tmp/tmp.TSYfAQJKso +++ mktemp ++ local LAST_ERR=/tmp/tmp.KeBPaBUYli ++ local exit_status=0 +++ seq 0 2 ++ for i in '$(seq 0 2)' ++ set +e ++ kubectl get pxc some-name -o 'jsonpath={.status.state}' ++ exit_status=0 ++ set -e ++ '[' 0 '!=' 0 ']' ++ break ++ cat /tmp/tmp.TSYfAQJKso ++ cat /tmp/tmp.KeBPaBUYli ++ rm /tmp/tmp.TSYfAQJKso /tmp/tmp.KeBPaBUYli ++ return 0 + [[ initializing == \r\e\a\d\y ]] + echo 'waiting for cluster readyness' waiting for cluster readyness + sleep 20 + [[ 10 -ge 36 ]] + let i+=1 ++ kubectl_bin get pxc some-name -o 'jsonpath={.status.state}' +++ mktemp ++ local LAST_OUT=/tmp/tmp.u3f3KnkTxG +++ mktemp ++ local LAST_ERR=/tmp/tmp.eslGn6XaTP ++ local exit_status=0 +++ seq 0 2 ++ for i in '$(seq 0 2)' ++ set +e ++ kubectl get pxc some-name -o 'jsonpath={.status.state}' ++ exit_status=0 ++ set -e ++ '[' 0 '!=' 0 ']' ++ break ++ cat /tmp/tmp.u3f3KnkTxG ++ cat /tmp/tmp.eslGn6XaTP ++ rm /tmp/tmp.u3f3KnkTxG /tmp/tmp.eslGn6XaTP ++ return 0 + [[ initializing == \r\e\a\d\y ]] + echo 'waiting for cluster readyness' waiting for cluster readyness + sleep 20 + [[ 11 -ge 36 ]] + let i+=1 ++ kubectl_bin get pxc some-name -o 'jsonpath={.status.state}' +++ mktemp ++ local LAST_OUT=/tmp/tmp.7pZddlRl4L +++ mktemp ++ local LAST_ERR=/tmp/tmp.o5zuVgDNeY ++ local exit_status=0 +++ seq 0 2 ++ for i in '$(seq 0 2)' ++ set +e ++ kubectl get pxc some-name -o 'jsonpath={.status.state}' ++ exit_status=0 ++ set -e ++ '[' 0 '!=' 0 ']' ++ break ++ cat /tmp/tmp.7pZddlRl4L ++ cat /tmp/tmp.o5zuVgDNeY ++ rm /tmp/tmp.7pZddlRl4L /tmp/tmp.o5zuVgDNeY ++ return 0 + [[ initializing == \r\e\a\d\y ]] + echo 'waiting for cluster readyness' waiting for cluster readyness + sleep 20 + [[ 12 -ge 36 ]] + let i+=1 ++ kubectl_bin get pxc some-name -o 'jsonpath={.status.state}' +++ mktemp ++ local LAST_OUT=/tmp/tmp.WwASrFDHia +++ mktemp ++ local LAST_ERR=/tmp/tmp.gtFLnGC4fB ++ local exit_status=0 +++ seq 0 2 ++ for i in '$(seq 0 2)' ++ set +e ++ kubectl get pxc some-name -o 'jsonpath={.status.state}' ++ exit_status=0 ++ set -e ++ '[' 0 '!=' 0 ']' ++ break ++ cat /tmp/tmp.WwASrFDHia ++ cat /tmp/tmp.gtFLnGC4fB ++ rm /tmp/tmp.WwASrFDHia /tmp/tmp.gtFLnGC4fB ++ return 0 + [[ ready == \r\e\a\d\y ]] ++ kubectl_bin get pxc some-name -o 'jsonpath={.status.pxc.ready}' +++ mktemp ++ local LAST_OUT=/tmp/tmp.Qzow7blaGL +++ mktemp ++ local LAST_ERR=/tmp/tmp.MrVBpMxp63 ++ local exit_status=0 +++ seq 0 2 ++ for i in '$(seq 0 2)' ++ set +e ++ kubectl get pxc some-name -o 'jsonpath={.status.pxc.ready}' ++ exit_status=0 ++ set -e ++ '[' 0 '!=' 0 ']' ++ break ++ cat /tmp/tmp.Qzow7blaGL ++ cat /tmp/tmp.MrVBpMxp63 ++ rm /tmp/tmp.Qzow7blaGL /tmp/tmp.MrVBpMxp63 ++ return 0 + [[ 3 == \3 ]] +++ get_proxy_engine some-name +++ local cluster_name=some-name ++++ get_proxy some-name ++++ local target_cluster=some-name +++++ kubectl_bin get pxc some-name -o 'jsonpath={.spec.haproxy.enabled}' ++++++ mktemp +++++ local LAST_OUT=/tmp/tmp.0kaeDmQO6p ++++++ mktemp +++++ local LAST_ERR=/tmp/tmp.51ZQ0rHasb +++++ local exit_status=0 ++++++ seq 0 2 +++++ for i in '$(seq 0 2)' +++++ set +e +++++ kubectl get pxc some-name -o 'jsonpath={.spec.haproxy.enabled}' +++++ exit_status=0 +++++ set -e +++++ '[' 0 '!=' 0 ']' +++++ break +++++ cat /tmp/tmp.0kaeDmQO6p +++++ cat /tmp/tmp.51ZQ0rHasb +++++ rm /tmp/tmp.0kaeDmQO6p /tmp/tmp.51ZQ0rHasb +++++ return 0 ++++ [[ true == \t\r\u\e ]] ++++ echo some-name-haproxy ++++ return +++ local cluster_proxy=some-name-haproxy +++ echo haproxy ++ kubectl_bin get pxc some-name -o 'jsonpath={.status.haproxy.ready}' +++ mktemp ++ local LAST_OUT=/tmp/tmp.VMtb81NcJo +++ mktemp ++ local LAST_ERR=/tmp/tmp.7y9j6RB0BB ++ local exit_status=0 +++ seq 0 2 ++ for i in '$(seq 0 2)' ++ set +e ++ kubectl get pxc some-name -o 'jsonpath={.status.haproxy.ready}' ++ exit_status=0 ++ set -e ++ '[' 0 '!=' 0 ']' ++ break ++ cat /tmp/tmp.VMtb81NcJo ++ cat /tmp/tmp.7y9j6RB0BB ++ rm /tmp/tmp.VMtb81NcJo /tmp/tmp.7y9j6RB0BB ++ return 0 + [[ 3 == \3 ]] + patch_secret my-cluster-secrets monitor dGVzdC1wYXNzd29yZDI= + local secret=my-cluster-secrets + local key=monitor + local value=dGVzdC1wYXNzd29yZDI= + kubectl_bin patch secret my-cluster-secrets '-p={"data":{"monitor": "dGVzdC1wYXNzd29yZDI="}}' ++ mktemp + local LAST_OUT=/tmp/tmp.ayB7c8om0k ++ mktemp + local LAST_ERR=/tmp/tmp.vncGzQ5YTd + local exit_status=0 ++ seq 0 2 + for i in '$(seq 0 2)' + set +e + kubectl patch secret my-cluster-secrets '-p={"data":{"monitor": "dGVzdC1wYXNzd29yZDI="}}' + exit_status=0 + set -e + '[' 0 '!=' 0 ']' + break + cat /tmp/tmp.ayB7c8om0k secret/my-cluster-secrets patched + cat /tmp/tmp.vncGzQ5YTd + rm /tmp/tmp.ayB7c8om0k /tmp/tmp.vncGzQ5YTd + return 0 + sleep 15 + wait_cluster_consistency some-name 3 3 + local cluster_name=some-name + local cluster_size=3 + local proxy_size=3 + '[' -z 3 ']' + desc 'wait cluster consistency' + set +o xtrace ----------------------------------------------------------------------------------- wait cluster consistency ----------------------------------------------------------------------------------- + local i=0 + local max=36 + sleep 7 ++ kubectl_bin get pxc some-name -o 'jsonpath={.status.state}' +++ mktemp ++ local LAST_OUT=/tmp/tmp.BWXJCbYD3p +++ mktemp ++ local LAST_ERR=/tmp/tmp.1K1Tr4u6Ik ++ local exit_status=0 +++ seq 0 2 ++ for i in '$(seq 0 2)' ++ set +e ++ kubectl get pxc some-name -o 'jsonpath={.status.state}' ++ exit_status=0 ++ set -e ++ '[' 0 '!=' 0 ']' ++ break ++ cat /tmp/tmp.BWXJCbYD3p ++ cat /tmp/tmp.1K1Tr4u6Ik ++ rm /tmp/tmp.BWXJCbYD3p /tmp/tmp.1K1Tr4u6Ik ++ return 0 + [[ initializing == \r\e\a\d\y ]] + echo 'waiting for cluster readyness' waiting for cluster readyness + sleep 20 + [[ 0 -ge 36 ]] + let i+=1 ++ kubectl_bin get pxc some-name -o 'jsonpath={.status.state}' +++ mktemp ++ local LAST_OUT=/tmp/tmp.is0OdFyAYw +++ mktemp ++ local LAST_ERR=/tmp/tmp.KFOQcj3WLW ++ local exit_status=0 +++ seq 0 2 ++ for i in '$(seq 0 2)' ++ set +e ++ kubectl get pxc some-name -o 'jsonpath={.status.state}' ++ exit_status=0 ++ set -e ++ '[' 0 '!=' 0 ']' ++ break ++ cat /tmp/tmp.is0OdFyAYw ++ cat /tmp/tmp.KFOQcj3WLW ++ rm /tmp/tmp.is0OdFyAYw /tmp/tmp.KFOQcj3WLW ++ return 0 + [[ initializing == \r\e\a\d\y ]] + echo 'waiting for cluster readyness' waiting for cluster readyness + sleep 20 + [[ 1 -ge 36 ]] + let i+=1 ++ kubectl_bin get pxc some-name -o 'jsonpath={.status.state}' +++ mktemp ++ local LAST_OUT=/tmp/tmp.nLHO4Q30nd +++ mktemp ++ local LAST_ERR=/tmp/tmp.utDL3cPk7G ++ local exit_status=0 +++ seq 0 2 ++ for i in '$(seq 0 2)' ++ set +e ++ kubectl get pxc some-name -o 'jsonpath={.status.state}' ++ exit_status=0 ++ set -e ++ '[' 0 '!=' 0 ']' ++ break ++ cat /tmp/tmp.nLHO4Q30nd ++ cat /tmp/tmp.utDL3cPk7G ++ rm /tmp/tmp.nLHO4Q30nd /tmp/tmp.utDL3cPk7G ++ return 0 + [[ initializing == \r\e\a\d\y ]] + echo 'waiting for cluster readyness' waiting for cluster readyness + sleep 20 + [[ 2 -ge 36 ]] + let i+=1 ++ kubectl_bin get pxc some-name -o 'jsonpath={.status.state}' +++ mktemp ++ local LAST_OUT=/tmp/tmp.XHY5IrIG9J +++ mktemp ++ local LAST_ERR=/tmp/tmp.F1ZBWNaeJc ++ local exit_status=0 +++ seq 0 2 ++ for i in '$(seq 0 2)' ++ set +e ++ kubectl get pxc some-name -o 'jsonpath={.status.state}' ++ exit_status=0 ++ set -e ++ '[' 0 '!=' 0 ']' ++ break ++ cat /tmp/tmp.XHY5IrIG9J ++ cat /tmp/tmp.F1ZBWNaeJc ++ rm /tmp/tmp.XHY5IrIG9J /tmp/tmp.F1ZBWNaeJc ++ return 0 + [[ ready == \r\e\a\d\y ]] ++ kubectl_bin get pxc some-name -o 'jsonpath={.status.pxc.ready}' +++ mktemp ++ local LAST_OUT=/tmp/tmp.ygqaZzNQ9A +++ mktemp ++ local LAST_ERR=/tmp/tmp.H1PNBIfJw3 ++ local exit_status=0 +++ seq 0 2 ++ for i in '$(seq 0 2)' ++ set +e ++ kubectl get pxc some-name -o 'jsonpath={.status.pxc.ready}' ++ exit_status=0 ++ set -e ++ '[' 0 '!=' 0 ']' ++ break ++ cat /tmp/tmp.ygqaZzNQ9A ++ cat /tmp/tmp.H1PNBIfJw3 ++ rm /tmp/tmp.ygqaZzNQ9A /tmp/tmp.H1PNBIfJw3 ++ return 0 + [[ 3 == \3 ]] +++ get_proxy_engine some-name +++ local cluster_name=some-name ++++ get_proxy some-name ++++ local target_cluster=some-name +++++ kubectl_bin get pxc some-name -o 'jsonpath={.spec.haproxy.enabled}' ++++++ mktemp +++++ local LAST_OUT=/tmp/tmp.UaULvfLCdA ++++++ mktemp +++++ local LAST_ERR=/tmp/tmp.I03RFrwkuS +++++ local exit_status=0 ++++++ seq 0 2 +++++ for i in '$(seq 0 2)' +++++ set +e +++++ kubectl get pxc some-name -o 'jsonpath={.spec.haproxy.enabled}' +++++ exit_status=0 +++++ set -e +++++ '[' 0 '!=' 0 ']' +++++ break +++++ cat /tmp/tmp.UaULvfLCdA +++++ cat /tmp/tmp.I03RFrwkuS +++++ rm /tmp/tmp.UaULvfLCdA /tmp/tmp.I03RFrwkuS +++++ return 0 ++++ [[ true == \t\r\u\e ]] ++++ echo some-name-haproxy ++++ return +++ local cluster_proxy=some-name-haproxy +++ echo haproxy ++ kubectl_bin get pxc some-name -o 'jsonpath={.status.haproxy.ready}' +++ mktemp ++ local LAST_OUT=/tmp/tmp.9SL9gbXOS6 +++ mktemp ++ local LAST_ERR=/tmp/tmp.npSWfVqHwx ++ local exit_status=0 +++ seq 0 2 ++ for i in '$(seq 0 2)' ++ set +e ++ kubectl get pxc some-name -o 'jsonpath={.status.haproxy.ready}' ++ exit_status=0 ++ set -e ++ '[' 0 '!=' 0 ']' ++ break ++ cat /tmp/tmp.9SL9gbXOS6 ++ cat /tmp/tmp.npSWfVqHwx ++ rm /tmp/tmp.9SL9gbXOS6 /tmp/tmp.npSWfVqHwx ++ return 0 + [[ 3 == \3 ]] + compare_mysql_cmd select-3 'SHOW DATABASES;' '-h some-name-haproxy -umonitor -p'\''test-password2'\''' + local command_id=select-3 + local 'command=SHOW DATABASES;' + local 'uri=-h some-name-haproxy -umonitor -p'\''test-password2'\''' + local postfix= + local expected_result=/mnt/jenkins/workspace/cloud-pxc-operator_PR-1752/e2e-tests/users/compare/select-3.sql + [[ perconalab/percona-xtradb-cluster-operator:main-pxc8.0 =~ 8\.0 ]] + '[' -f /mnt/jenkins/workspace/cloud-pxc-operator_PR-1752/e2e-tests/users/compare/select-3-80.sql ']' + run_mysql 'SHOW DATABASES;' '-h some-name-haproxy -umonitor -p'\''test-password2'\''' + local 'command=SHOW DATABASES;' + local 'uri=-h some-name-haproxy -umonitor -p'\''test-password2'\''' ++ get_client_pod ++ kubectl_bin get pods --selector=name=pxc-client -o 'jsonpath={.items[].metadata.name}' +++ mktemp ++ local LAST_OUT=/tmp/tmp.LxmW8c16Yx +++ mktemp ++ local LAST_ERR=/tmp/tmp.XaH88zjmz2 ++ local exit_status=0 +++ seq 0 2 ++ for i in '$(seq 0 2)' ++ set +e ++ kubectl get pods --selector=name=pxc-client -o 'jsonpath={.items[].metadata.name}' ++ exit_status=0 ++ set -e ++ '[' 0 '!=' 0 ']' ++ break ++ cat /tmp/tmp.LxmW8c16Yx ++ cat /tmp/tmp.XaH88zjmz2 ++ rm /tmp/tmp.LxmW8c16Yx /tmp/tmp.XaH88zjmz2 ++ return 0 + client_pod=pxc-client-6644d8898f-4zb6x + wait_pod pxc-client-6644d8898f-4zb6x + local pod=pxc-client-6644d8898f-4zb6x + local max_retry=480 + local ns= ++ echo pxc-client-6644d8898f-4zb6x ++ /usr/bin/sed -E 's/.*-(pxc|proxysql)-[0-9]/\1/' ++ egrep '^(pxc|proxysql)$' + local container= + set +o xtrace pod/pxc-client-6644d8898f-4zb6x condition met pxc-client-6644d8898f-4zb6x.Ok + set +o xtrace + '[' '!' -s /tmp/tmp.3wek4g2sOh/select-3.sql ']' + diff -u /mnt/jenkins/workspace/cloud-pxc-operator_PR-1752/e2e-tests/users/compare/select-3.sql /tmp/tmp.3wek4g2sOh/select-3.sql + destroy users-13843 + local namespace=users-13843 + local ignore_logs=true + desc 'destroy cluster/operator and all other resources' + set +o xtrace ----------------------------------------------------------------------------------- destroy cluster/operator and all other resources ----------------------------------------------------------------------------------- + '[' true == false -o 1 == 1 ']' + grep -v level=info + grep -v 'the object has been modified' + grep -v 'get backup status: Job.batch' + sort -u + /usr/bin/sed -r 's/"ts":[0-9.]+//; s^limits-[0-9.]+/^^g' + tee /tmp/tmp.3wek4g2sOh/operator.log ++ get_operator_pod ++ local label_prefix=app.kubernetes.io/ +++ kubectl get pods --selector=app.kubernetes.io/name=percona-xtradb-cluster-operator -n pxc-operator +++ grep -c percona-xtradb-cluster-operator ++ local check_label=1 ++ [[ 1 -eq 0 ]] ++ kubectl_bin get pods --selector=app.kubernetes.io/name=percona-xtradb-cluster-operator -o 'jsonpath={.items[].metadata.name}' -n pxc-operator +++ mktemp ++ local LAST_OUT=/tmp/tmp.kHwQsX1kaz +++ mktemp ++ local LAST_ERR=/tmp/tmp.RwCYirDulf ++ local exit_status=0 +++ seq 0 2 ++ for i in '$(seq 0 2)' ++ set +e ++ kubectl get pods --selector=app.kubernetes.io/name=percona-xtradb-cluster-operator -o 'jsonpath={.items[].metadata.name}' -n pxc-operator ++ exit_status=0 ++ set -e ++ '[' 0 '!=' 0 ']' ++ break ++ cat /tmp/tmp.kHwQsX1kaz ++ cat /tmp/tmp.RwCYirDulf ++ rm /tmp/tmp.kHwQsX1kaz /tmp/tmp.RwCYirDulf ++ return 0 + kubectl_bin logs -n pxc-operator percona-xtradb-cluster-operator-6dbfd9bb9d-cs2sq ++ mktemp + local LAST_OUT=/tmp/tmp.QP3gz96Xtu ++ mktemp + local LAST_ERR=/tmp/tmp.0mvbsERkJr + local exit_status=0 ++ seq 0 2 + for i in '$(seq 0 2)' + set +e + kubectl logs -n pxc-operator percona-xtradb-cluster-operator-6dbfd9bb9d-cs2sq + exit_status=0 + set -e + '[' 0 '!=' 0 ']' + break + cat /tmp/tmp.QP3gz96Xtu + cat /tmp/tmp.0mvbsERkJr + rm /tmp/tmp.QP3gz96Xtu /tmp/tmp.0mvbsERkJr + return 0 2024-07-08T09:43:45.936Z INFO setup Manager starting up {"gitCommit": "44f0e1a80c0c8b8f0ea06b22799841ae06d9bf8c", "gitBranch": "PR-1752-44f0e1a8", "buildTime": "2024-07-08T07:48:21Z", "goVersion": "go1.22.5", "os": "linux", "arch": "amd64"} 2024-07-08T09:43:45.936Z INFO setup Registering Components. 2024-07-08T09:43:45.936Z INFO setup Runs on {"platform": "kubernetes", "version": "v1.26.15-gke.1469001"} 2024-07-08T09:43:52.324Z INFO controller-runtime.webhook Registering webhook {"path": "/validate-percona-xtradbcluster"} 2024-07-08T09:43:52.328Z INFO setup Starting the Cmd. 2024-07-08T09:43:52.329Z INFO controller-runtime.certwatcher Updated current TLS certificate 2024-07-08T09:43:52.329Z INFO controller-runtime.metrics Serving metrics server {"bindAddress": ":8080", "secure": false} 2024-07-08T09:43:52.329Z INFO controller-runtime.metrics Starting metrics server 2024-07-08T09:43:52.329Z INFO controller-runtime.webhook Serving webhook server {"host": "", "port": 9443} 2024-07-08T09:43:52.329Z INFO controller-runtime.webhook Starting webhook server 2024-07-08T09:43:52.329Z INFO starting server {"name": "health probe", "addr": "[::]:8081"} 2024-07-08T09:43:52.330Z INFO controller-runtime.certwatcher Starting certificate watcher 2024-07-08T09:43:52.430Z INFO attempting to acquire leader lease pxc-operator/08db1feb.percona.com... 2024-07-08T09:43:52.447Z DEBUG events percona-xtradb-cluster-operator-6dbfd9bb9d-cs2sq_dbc2cc93-c5d9-4f49-a22b-e358d6341b48 became leader {"type": "Normal", "object": {"kind":"Lease","namespace":"pxc-operator","name":"08db1feb.percona.com","uid":"e579a789-4b6f-4700-9b3f-49971095592a","apiVersion":"coordination.k8s.io/v1","resourceVersion":"61417"}, "reason": "LeaderElection"} 2024-07-08T09:43:52.447Z INFO Starting Controller {"controller": "pxcbackup-controller"} 2024-07-08T09:43:52.447Z INFO Starting Controller {"controller": "pxc-controller"} 2024-07-08T09:43:52.447Z INFO Starting EventSource {"controller": "pxcbackup-controller", "source": "kind source: *v1.PerconaXtraDBClusterBackup"} 2024-07-08T09:43:52.447Z INFO Starting EventSource {"controller": "pxc-controller", "source": "kind source: *v1.PerconaXtraDBCluster"} 2024-07-08T09:43:52.447Z INFO Starting EventSource {"controller": "pxcrestore-controller", "source": "kind source: *v1.PerconaXtraDBClusterRestore"} 2024-07-08T09:43:52.447Z INFO successfully acquired lease pxc-operator/08db1feb.percona.com 2024-07-08T09:43:52.448Z INFO Starting Controller {"controller": "pxcrestore-controller"} 2024-07-08T09:43:52.656Z INFO Starting workers {"controller": "pxcbackup-controller", "worker count": 1} 2024-07-08T09:43:52.656Z INFO Starting workers {"controller": "pxc-controller", "worker count": 1} 2024-07-08T09:43:52.656Z INFO Starting workers {"controller": "pxcrestore-controller", "worker count": 1} 2024-07-08T09:44:19.733Z INFO Set CR version {"controller": "pxc-controller", "namespace": "users-13843", "name": "some-name", "reconcileID": "7789c9c2-31fb-43e8-8b0d-b349548c3606", "version": "1.15.0"} 2024-07-08T09:45:40.079Z INFO Password expiration policy updated {"controller": "pxc-controller", "namespace": "users-13843", "name": "some-name", "reconcileID": "1500bc29-30ca-4b62-81dc-f41d0ad6b4b2", "user": "operator"} 2024-07-08T09:45:40.119Z INFO Password expiration policy updated {"controller": "pxc-controller", "namespace": "users-13843", "name": "some-name", "reconcileID": "1500bc29-30ca-4b62-81dc-f41d0ad6b4b2", "user": "monitor"} 2024-07-08T09:45:40.207Z INFO User monitor: granted privileges {"controller": "pxc-controller", "namespace": "users-13843", "name": "some-name", "reconcileID": "1500bc29-30ca-4b62-81dc-f41d0ad6b4b2"} 2024-07-08T09:45:40.248Z INFO monitor user privileges granted {"controller": "pxc-controller", "namespace": "users-13843", "name": "some-name", "reconcileID": "1500bc29-30ca-4b62-81dc-f41d0ad6b4b2"} 2024-07-08T09:45:40.292Z INFO Password expiration policy updated {"controller": "pxc-controller", "namespace": "users-13843", "name": "some-name", "reconcileID": "1500bc29-30ca-4b62-81dc-f41d0ad6b4b2", "user": "xtrabackup"} 2024-07-08T09:45:40.350Z INFO User xtrabackup: granted privileges {"controller": "pxc-controller", "namespace": "users-13843", "name": "some-name", "reconcileID": "1500bc29-30ca-4b62-81dc-f41d0ad6b4b2"} 2024-07-08T09:45:40.384Z INFO Password expiration policy updated {"controller": "pxc-controller", "namespace": "users-13843", "name": "some-name", "reconcileID": "1500bc29-30ca-4b62-81dc-f41d0ad6b4b2", "user": "replication"} 2024-07-08T09:45:40.475Z INFO reconcile replication error {"controller": "pxc-controller", "namespace": "users-13843", "name": "some-name", "reconcileID": "1500bc29-30ca-4b62-81dc-f41d0ad6b4b2", "err": "get primary pxc pod: not found"} 2024-07-08T09:45:45.157Z INFO reconcile replication error {"controller": "pxc-controller", "namespace": "users-13843", "name": "some-name", "reconcileID": "96d30998-9592-465a-bcf5-48ec79c66eff", "err": "get primary pxc pod: not found"} 2024-07-08T09:45:50.375Z INFO reconcile replication error {"controller": "pxc-controller", "namespace": "users-13843", "name": "some-name", "reconcileID": "a5ba6e4c-eb48-45ae-8f1c-a151ede181d4", "err": "get primary pxc pod: not found"} 2024-07-08T09:45:55.556Z INFO reconcile replication error {"controller": "pxc-controller", "namespace": "users-13843", "name": "some-name", "reconcileID": "7a7f3a45-e0c1-474c-b617-c281df26d47f", "err": "get primary pxc pod: not found"} 2024-07-08T09:48:03.967Z INFO Password expiration policy updated {"controller": "pxc-controller", "namespace": "users-13843", "name": "some-name", "reconcileID": "f2d97815-1494-4783-b115-1c5d2bbca043", "user": "root"} 2024-07-08T09:48:04.263Z INFO update PXC version (fetched from db) {"controller": "pxc-controller", "namespace": "users-13843", "name": "some-name", "reconcileID": "f2d97815-1494-4783-b115-1c5d2bbca043", "new version": "8.0.36-28.1"} 2024-07-08T09:48:07.275Z DEBUG PXC users synced with ProxySQL {"controller": "pxc-controller", "namespace": "users-13843", "name": "some-name", "reconcileID": "f2d97815-1494-4783-b115-1c5d2bbca043"} 2024-07-08T09:48:12.157Z DEBUG PXC users synced with ProxySQL {"controller": "pxc-controller", "namespace": "users-13843", "name": "some-name", "reconcileID": "26417868-1338-4a50-aabd-ec3738a26ea2"} 2024-07-08T09:48:17.545Z DEBUG PXC users synced with ProxySQL {"controller": "pxc-controller", "namespace": "users-13843", "name": "some-name", "reconcileID": "8a4268b5-6653-4ed8-b217-143846d3a3dc"} 2024-07-08T09:48:23.236Z DEBUG PXC users synced with ProxySQL {"controller": "pxc-controller", "namespace": "users-13843", "name": "some-name", "reconcileID": "8edd4f91-c29e-4c2b-9dbc-fb469cd63b00"} 2024-07-08T09:48:28.434Z DEBUG PXC users synced with ProxySQL {"controller": "pxc-controller", "namespace": "users-13843", "name": "some-name", "reconcileID": "780e070c-5feb-4929-8d9c-0c66a1c9071b"} 2024-07-08T09:48:33.848Z DEBUG PXC users synced with ProxySQL {"controller": "pxc-controller", "namespace": "users-13843", "name": "some-name", "reconcileID": "8706639d-2c1c-4e0b-ab5c-84a2169126cc"} 2024-07-08T09:48:39.067Z DEBUG PXC users synced with ProxySQL {"controller": "pxc-controller", "namespace": "users-13843", "name": "some-name", "reconcileID": "1c44240b-07e5-4f23-a993-f6970097a3ac"} 2024-07-08T09:48:44.675Z DEBUG PXC users synced with ProxySQL {"controller": "pxc-controller", "namespace": "users-13843", "name": "some-name", "reconcileID": "6c584fc9-c4a6-4fd1-a660-25e225fd700b"} 2024-07-08T09:48:50.257Z DEBUG PXC users synced with ProxySQL {"controller": "pxc-controller", "namespace": "users-13843", "name": "some-name", "reconcileID": "34a2ca24-2330-4f89-af92-5ad11381b8bc"} 2024-07-08T09:48:55.144Z DEBUG PXC users synced with ProxySQL {"controller": "pxc-controller", "namespace": "users-13843", "name": "some-name", "reconcileID": "90d21a13-c80c-458c-926d-d66eeaca6618"} 2024-07-08T09:49:00.636Z DEBUG PXC users synced with ProxySQL {"controller": "pxc-controller", "namespace": "users-13843", "name": "some-name", "reconcileID": "0fbf57de-3295-4056-b3ce-949312e3ee1a"} 2024-07-08T09:49:06.570Z DEBUG PXC users synced with ProxySQL {"controller": "pxc-controller", "namespace": "users-13843", "name": "some-name", "reconcileID": "b2f98f62-5ccf-4f2e-b3c0-1e91babd33e0"} 2024-07-08T09:49:12.033Z DEBUG PXC users synced with ProxySQL {"controller": "pxc-controller", "namespace": "users-13843", "name": "some-name", "reconcileID": "891e8799-3f99-4451-a343-285cfc3c5562"} 2024-07-08T09:49:14.061Z INFO Password changed, updating user {"controller": "pxc-controller", "namespace": "users-13843", "name": "some-name", "reconcileID": "861d282b-f9b0-40bd-817e-a6946dc5d911", "user": "root"} 2024-07-08T09:49:14.109Z INFO Password updated {"controller": "pxc-controller", "namespace": "users-13843", "name": "some-name", "reconcileID": "861d282b-f9b0-40bd-817e-a6946dc5d911", "user": "root"} 2024-07-08T09:49:14.123Z INFO MySQL init secret created {"controller": "pxc-controller", "namespace": "users-13843", "name": "some-name", "reconcileID": "861d282b-f9b0-40bd-817e-a6946dc5d911", "secret": "some-name-mysql-init", "user": "root"} 2024-07-08T09:49:19.630Z DEBUG PXC users synced with ProxySQL {"controller": "pxc-controller", "namespace": "users-13843", "name": "some-name", "reconcileID": "861d282b-f9b0-40bd-817e-a6946dc5d911"} 2024-07-08T09:49:19.648Z INFO Internal secrets updated {"controller": "pxc-controller", "namespace": "users-13843", "name": "some-name", "reconcileID": "861d282b-f9b0-40bd-817e-a6946dc5d911", "user": "root"} 2024-07-08T09:49:19.699Z INFO Old password discarded {"controller": "pxc-controller", "namespace": "users-13843", "name": "some-name", "reconcileID": "861d282b-f9b0-40bd-817e-a6946dc5d911", "user": "root"} 2024-07-08T09:49:23.439Z DEBUG PXC users synced with ProxySQL {"controller": "pxc-controller", "namespace": "users-13843", "name": "some-name", "reconcileID": "861d282b-f9b0-40bd-817e-a6946dc5d911"} 2024-07-08T09:49:28.426Z DEBUG PXC users synced with ProxySQL {"controller": "pxc-controller", "namespace": "users-13843", "name": "some-name", "reconcileID": "aecb1214-feed-487e-a196-c0eccfd5d3db"} 2024-07-08T09:49:33.835Z DEBUG PXC users synced with ProxySQL {"controller": "pxc-controller", "namespace": "users-13843", "name": "some-name", "reconcileID": "30b06770-de40-4d80-b32d-3f49a74c771f"} 2024-07-08T09:49:55.922Z ERROR sync users {"controller": "pxc-controller", "namespace": "users-13843", "name": "some-name", "reconcileID": "64a049ec-0be8-469c-b45f-3fe35b500de1", "error": "exec syncusers: command terminated with exit code 1 / / ERROR (line:512) : The cluster (with writer hostgroup:11) has not been configured in ProxySQL\n", "errorVerbose": "exec syncusers: command terminated with exit code 1 / / ERROR (line:512) : The cluster (with writer hostgroup:11) has not been configured in ProxySQL\n\ngithub.com/percona/percona-xtradb-cluster-operator/pkg/controller/pxc.(*ReconcilePerconaXtraDBCluster).syncPXCUsersWithProxySQL\n\t/go/src/github.com/percona/percona-xtradb-cluster-operator/pkg/controller/pxc/users.go:915\ngithub.com/percona/percona-xtradb-cluster-operator/pkg/controller/pxc.(*ReconcilePerconaXtraDBCluster).resyncPXCUsersWithProxySQL.func1\n\t/go/src/github.com/percona/percona-xtradb-cluster-operator/pkg/controller/pxc/controller.go:1246\nruntime.goexit\n\t/usr/local/go/src/runtime/asm_amd64.s:1695"} 2024-07-08T09:50:00.990Z ERROR sync users {"controller": "pxc-controller", "namespace": "users-13843", "name": "some-name", "reconcileID": "584bfdb9-e26b-4a77-8bbb-0235a58fc53b", "error": "exec syncusers: command terminated with exit code 1 / / ERROR (line:512) : The cluster (with writer hostgroup:11) has not been configured in ProxySQL\n", "errorVerbose": "exec syncusers: command terminated with exit code 1 / / ERROR (line:512) : The cluster (with writer hostgroup:11) has not been configured in ProxySQL\n\ngithub.com/percona/percona-xtradb-cluster-operator/pkg/controller/pxc.(*ReconcilePerconaXtraDBCluster).syncPXCUsersWithProxySQL\n\t/go/src/github.com/percona/percona-xtradb-cluster-operator/pkg/controller/pxc/users.go:915\ngithub.com/percona/percona-xtradb-cluster-operator/pkg/controller/pxc.(*ReconcilePerconaXtraDBCluster).resyncPXCUsersWithProxySQL.func1\n\t/go/src/github.com/percona/percona-xtradb-cluster-operator/pkg/controller/pxc/controller.go:1246\nruntime.goexit\n\t/usr/local/go/src/runtime/asm_amd64.s:1695"} 2024-07-08T09:50:02.382Z INFO Password changed, updating user {"controller": "pxc-controller", "namespace": "users-13843", "name": "some-name", "reconcileID": "e34284d5-e1d8-4db9-a564-604f44da417e", "user": "proxyadmin"} 2024-07-08T09:50:02.382Z INFO Password expiration policy updated {"controller": "pxc-controller", "namespace": "users-13843", "name": "some-name", "reconcileID": "e34284d5-e1d8-4db9-a564-604f44da417e", "user": "proxyadmin"} 2024-07-08T09:50:02.464Z INFO Proxy user updated {"controller": "pxc-controller", "namespace": "users-13843", "name": "some-name", "reconcileID": "e34284d5-e1d8-4db9-a564-604f44da417e", "user": "proxyadmin"} 2024-07-08T09:50:02.474Z INFO Internal secrets updated {"controller": "pxc-controller", "namespace": "users-13843", "name": "some-name", "reconcileID": "e34284d5-e1d8-4db9-a564-604f44da417e", "user": "proxyadmin"} 2024-07-08T09:50:02.474Z INFO Proxy pods will be restarted {"controller": "pxc-controller", "namespace": "users-13843", "name": "some-name", "reconcileID": "e34284d5-e1d8-4db9-a564-604f44da417e", "last-applied-secret": "1a55867d0f4fa0d0643653b2eb57b2f690315d8d843a10599c9839c2a779a95f"} 2024-07-08T09:50:02.764Z ERROR sync users {"controller": "pxc-controller", "namespace": "users-13843", "name": "some-name", "reconcileID": "e34284d5-e1d8-4db9-a564-604f44da417e", "error": "exec syncusers: command terminated with exit code 1 / / ERROR 1045 (28000): ProxySQL Error: Access denied for user 'proxyadmin'@'127.0.0.1' (using password: YES)\nERROR (line:335) : ProxySQL connection check failed. \n-- Could not connect to ProxySQL at localhost:6032 \n-- Please check the ProxySQL connection parameters and status.\n", "errorVerbose": "exec syncusers: command terminated with exit code 1 / / ERROR 1045 (28000): ProxySQL Error: Access denied for user 'proxyadmin'@'127.0.0.1' (using password: YES)\nERROR (line:335) : ProxySQL connection check failed. \n-- Could not connect to ProxySQL at localhost:6032 \n-- Please check the ProxySQL connection parameters and status.\n\ngithub.com/percona/percona-xtradb-cluster-operator/pkg/controller/pxc.(*ReconcilePerconaXtraDBCluster).syncPXCUsersWithProxySQL\n\t/go/src/github.com/percona/percona-xtradb-cluster-operator/pkg/controller/pxc/users.go:915\ngithub.com/percona/percona-xtradb-cluster-operator/pkg/controller/pxc.(*ReconcilePerconaXtraDBCluster).resyncPXCUsersWithProxySQL.func1\n\t/go/src/github.com/percona/percona-xtradb-cluster-operator/pkg/controller/pxc/controller.go:1246\nruntime.goexit\n\t/usr/local/go/src/runtime/asm_amd64.s:1695"} 2024-07-08T09:50:46.323Z DEBUG PXC users synced with ProxySQL {"controller": "pxc-controller", "namespace": "users-13843", "name": "some-name", "reconcileID": "b58d1443-bbdb-46f5-b3d7-9d72918c4197"} 2024-07-08T09:50:53.461Z DEBUG PXC users synced with ProxySQL {"controller": "pxc-controller", "namespace": "users-13843", "name": "some-name", "reconcileID": "9daefc28-0690-476c-ac73-dd1da99a98db"} 2024-07-08T09:50:56.070Z INFO Password changed, updating user {"controller": "pxc-controller", "namespace": "users-13843", "name": "some-name", "reconcileID": "8421f596-f0cc-4ab0-8189-e65b97e795d7", "user": "xtrabackup"} 2024-07-08T09:50:56.096Z INFO Password updated {"controller": "pxc-controller", "namespace": "users-13843", "name": "some-name", "reconcileID": "8421f596-f0cc-4ab0-8189-e65b97e795d7", "user": "xtrabackup"} 2024-07-08T09:50:56.105Z INFO MySQL init secret updated {"controller": "pxc-controller", "namespace": "users-13843", "name": "some-name", "reconcileID": "8421f596-f0cc-4ab0-8189-e65b97e795d7", "secret": "some-name-mysql-init", "user": "xtrabackup"} 2024-07-08T09:50:56.116Z INFO Internal secrets updated {"controller": "pxc-controller", "namespace": "users-13843", "name": "some-name", "reconcileID": "8421f596-f0cc-4ab0-8189-e65b97e795d7", "user": "xtrabackup"} 2024-07-08T09:50:56.140Z INFO Old password discarded {"controller": "pxc-controller", "namespace": "users-13843", "name": "some-name", "reconcileID": "8421f596-f0cc-4ab0-8189-e65b97e795d7", "user": "xtrabackup"} 2024-07-08T09:50:56.148Z INFO PXC pods will be restarted {"controller": "pxc-controller", "namespace": "users-13843", "name": "some-name", "reconcileID": "8421f596-f0cc-4ab0-8189-e65b97e795d7", "last-applied-secret": "d11bcc70ca53ea410cd455e13d82e7225917b0861d9b677ed34c02c5f6fbaf1b"} 2024-07-08T09:51:01.363Z DEBUG PXC users synced with ProxySQL {"controller": "pxc-controller", "namespace": "users-13843", "name": "some-name", "reconcileID": "8421f596-f0cc-4ab0-8189-e65b97e795d7"} 2024-07-08T09:51:54.941Z INFO reconcile replication error {"controller": "pxc-controller", "namespace": "users-13843", "name": "some-name", "reconcileID": "d1489e5d-3e84-47e5-96f0-eeb1e2733bcb", "err": "failed to ensure cluster readonly status: connect to pod some-name-pxc-1: dial tcp: lookup some-name-pxc-1.some-name-pxc.users-13843 on 10.180.128.10:53: no such host"} 2024-07-08T09:52:00.221Z INFO reconcile replication error {"controller": "pxc-controller", "namespace": "users-13843", "name": "some-name", "reconcileID": "8855389b-7523-4296-a459-db6d4da90d77", "err": "failed to ensure cluster readonly status: connect to pod some-name-pxc-1: dial tcp 10.168.200.39:33062: connect: connection refused"} 2024-07-08T09:52:36.839Z INFO reconcile replication error {"controller": "pxc-controller", "namespace": "users-13843", "name": "some-name", "reconcileID": "ef8598ea-2a0c-46df-ba7a-a0f0b6f05a60", "err": "failed to connect to pod some-name-pxc-0: dial tcp: lookup some-name-pxc-0.some-name-pxc.users-13843 on 10.180.128.10:53: no such host"} 2024-07-08T09:52:37.091Z INFO reconcile replication error {"controller": "pxc-controller", "namespace": "users-13843", "name": "some-name", "reconcileID": "f9e82cb1-1476-4e2d-ae6d-74fe1bc6d4b7", "err": "failed to connect to pod some-name-pxc-0: dial tcp: lookup some-name-pxc-0.some-name-pxc.users-13843 on 10.180.128.10:53: no such host"} 2024-07-08T09:52:42.144Z INFO reconcile replication error {"controller": "pxc-controller", "namespace": "users-13843", "name": "some-name", "reconcileID": "d4ea69a0-df0f-459e-b45f-7cfd34118bec", "err": "failed to connect to pod some-name-pxc-0: dial tcp: lookup some-name-pxc-0.some-name-pxc.users-13843 on 10.180.128.10:53: no such host"} 2024-07-08T09:52:48.928Z INFO Unable to find primary pod for replication. No pod with name or ip like this {"controller": "pxc-controller", "namespace": "users-13843", "name": "some-name", "reconcileID": "9c5f37cd-dec4-4c5f-80b8-891613adf04d", "primary name": "some-name-pxc-0.some-name-pxc.users-13843.svc.cluster.local"} 2024-07-08T09:52:49.167Z INFO Unable to find primary pod for replication. No pod with name or ip like this {"controller": "pxc-controller", "namespace": "users-13843", "name": "some-name", "reconcileID": "357e71d4-09a1-4f15-8cee-71c6e5330981", "primary name": "some-name-pxc-0.some-name-pxc.users-13843.svc.cluster.local"} 2024-07-08T09:52:54.181Z INFO Unable to find primary pod for replication. No pod with name or ip like this {"controller": "pxc-controller", "namespace": "users-13843", "name": "some-name", "reconcileID": "da74369b-3ffc-46f5-a798-47906e3c0ad2", "primary name": "some-name-pxc-0.some-name-pxc.users-13843.svc.cluster.local"} 2024-07-08T09:52:59.429Z INFO Unable to find primary pod for replication. No pod with name or ip like this {"controller": "pxc-controller", "namespace": "users-13843", "name": "some-name", "reconcileID": "287220e0-10b0-4ffc-8692-a9f7159db20d", "primary name": "some-name-pxc-0.some-name-pxc.users-13843.svc.cluster.local"} 2024-07-08T09:53:04.618Z INFO Unable to find primary pod for replication. No pod with name or ip like this {"controller": "pxc-controller", "namespace": "users-13843", "name": "some-name", "reconcileID": "d8643321-4cfb-4991-a770-caa71aaecf50", "primary name": "some-name-pxc-0.some-name-pxc.users-13843.svc.cluster.local"} 2024-07-08T09:53:09.807Z INFO Unable to find primary pod for replication. No pod with name or ip like this {"controller": "pxc-controller", "namespace": "users-13843", "name": "some-name", "reconcileID": "a8991c36-5b06-4ef9-a3bf-22e6b33d9bac", "primary name": "some-name-pxc-0.some-name-pxc.users-13843.svc.cluster.local"} 2024-07-08T09:53:14.998Z INFO Unable to find primary pod for replication. No pod with name or ip like this {"controller": "pxc-controller", "namespace": "users-13843", "name": "some-name", "reconcileID": "8637002a-fb8c-4ce8-8da2-59bd15563342", "primary name": "some-name-pxc-0.some-name-pxc.users-13843.svc.cluster.local"} 2024-07-08T09:53:20.238Z INFO Unable to find primary pod for replication. No pod with name or ip like this {"controller": "pxc-controller", "namespace": "users-13843", "name": "some-name", "reconcileID": "79b3f96f-732f-4688-801f-68e434079500", "primary name": "some-name-pxc-0.some-name-pxc.users-13843.svc.cluster.local"} 2024-07-08T09:53:25.448Z INFO Unable to find primary pod for replication. No pod with name or ip like this {"controller": "pxc-controller", "namespace": "users-13843", "name": "some-name", "reconcileID": "025a8919-77e5-41bb-b0ee-15bdafa02c35", "primary name": "some-name-pxc-0.some-name-pxc.users-13843.svc.cluster.local"} 2024-07-08T09:53:34.879Z DEBUG PXC users synced with ProxySQL {"controller": "pxc-controller", "namespace": "users-13843", "name": "some-name", "reconcileID": "b0167a80-b9c1-4750-8455-6dec02ae4010"} 2024-07-08T09:53:39.174Z DEBUG PXC users synced with ProxySQL {"controller": "pxc-controller", "namespace": "users-13843", "name": "some-name", "reconcileID": "a26c433f-823c-4273-92eb-1e82c3eead3c"} 2024-07-08T09:53:44.625Z DEBUG PXC users synced with ProxySQL {"controller": "pxc-controller", "namespace": "users-13843", "name": "some-name", "reconcileID": "96e1e23d-747a-4612-990b-22c55ce8c2ec"} 2024-07-08T09:53:50.132Z DEBUG PXC users synced with ProxySQL {"controller": "pxc-controller", "namespace": "users-13843", "name": "some-name", "reconcileID": "978aee03-fd33-439a-b354-1334c020a153"} 2024-07-08T09:53:52.012Z INFO Password changed, updating user {"controller": "pxc-controller", "namespace": "users-13843", "name": "some-name", "reconcileID": "46fae9d0-942b-48d1-becf-e7f8580f087d", "user": "monitor"} 2024-07-08T09:53:52.043Z INFO Password updated {"controller": "pxc-controller", "namespace": "users-13843", "name": "some-name", "reconcileID": "46fae9d0-942b-48d1-becf-e7f8580f087d", "user": "monitor"} 2024-07-08T09:53:52.052Z INFO MySQL init secret updated {"controller": "pxc-controller", "namespace": "users-13843", "name": "some-name", "reconcileID": "46fae9d0-942b-48d1-becf-e7f8580f087d", "secret": "some-name-mysql-init", "user": "monitor"} 2024-07-08T09:53:52.093Z INFO Proxy user updated {"controller": "pxc-controller", "namespace": "users-13843", "name": "some-name", "reconcileID": "46fae9d0-942b-48d1-becf-e7f8580f087d", "user": "monitor"} 2024-07-08T09:53:52.104Z INFO Internal secrets updated {"controller": "pxc-controller", "namespace": "users-13843", "name": "some-name", "reconcileID": "46fae9d0-942b-48d1-becf-e7f8580f087d", "user": "monitor"} 2024-07-08T09:53:52.224Z INFO Proxy pods will be restarted {"controller": "pxc-controller", "namespace": "users-13843", "name": "some-name", "reconcileID": "46fae9d0-942b-48d1-becf-e7f8580f087d", "last-applied-secret": "4e3282050745046d5044e0fcb5f2bf140d7320a1256a79c8ca2184f38f4f3098"} 2024-07-08T09:53:54.817Z ERROR sync users {"controller": "pxc-controller", "namespace": "users-13843", "name": "some-name", "reconcileID": "46fae9d0-942b-48d1-becf-e7f8580f087d", "error": "exec syncusers: unable to upgrade connection: container not found (\"proxysql\") / / ", "errorVerbose": "exec syncusers: unable to upgrade connection: container not found (\"proxysql\") / / \ngithub.com/percona/percona-xtradb-cluster-operator/pkg/controller/pxc.(*ReconcilePerconaXtraDBCluster).syncPXCUsersWithProxySQL\n\t/go/src/github.com/percona/percona-xtradb-cluster-operator/pkg/controller/pxc/users.go:915\ngithub.com/percona/percona-xtradb-cluster-operator/pkg/controller/pxc.(*ReconcilePerconaXtraDBCluster).resyncPXCUsersWithProxySQL.func1\n\t/go/src/github.com/percona/percona-xtradb-cluster-operator/pkg/controller/pxc/controller.go:1246\nruntime.goexit\n\t/usr/local/go/src/runtime/asm_amd64.s:1695"} 2024-07-08T09:54:17.541Z INFO Password updated but old one not discarded {"controller": "pxc-controller", "namespace": "users-13843", "name": "some-name", "reconcileID": "b90a6fdf-2a5a-460c-811e-d92fd123f005", "user": "monitor"} 2024-07-08T09:54:21.563Z DEBUG PXC users synced with ProxySQL {"controller": "pxc-controller", "namespace": "users-13843", "name": "some-name", "reconcileID": "b90a6fdf-2a5a-460c-811e-d92fd123f005"} 2024-07-08T09:54:21.872Z INFO Password updated but old one not discarded {"controller": "pxc-controller", "namespace": "users-13843", "name": "some-name", "reconcileID": "6b190d95-01fe-46df-9e1d-71d03f318b24", "user": "monitor"} 2024-07-08T09:54:25.490Z DEBUG PXC users synced with ProxySQL {"controller": "pxc-controller", "namespace": "users-13843", "name": "some-name", "reconcileID": "6b190d95-01fe-46df-9e1d-71d03f318b24"} 2024-07-08T09:54:27.335Z INFO Password updated but old one not discarded {"controller": "pxc-controller", "namespace": "users-13843", "name": "some-name", "reconcileID": "5364076a-d2c3-4ea9-a999-18785ae80f6d", "user": "monitor"} 2024-07-08T09:54:31.166Z DEBUG PXC users synced with ProxySQL {"controller": "pxc-controller", "namespace": "users-13843", "name": "some-name", "reconcileID": "5364076a-d2c3-4ea9-a999-18785ae80f6d"} 2024-07-08T09:54:33.382Z INFO Password updated but old one not discarded {"controller": "pxc-controller", "namespace": "users-13843", "name": "some-name", "reconcileID": "99fa3194-d084-4f05-863e-e4ad9fb00737", "user": "monitor"} 2024-07-08T09:54:33.957Z INFO Old password discarded {"controller": "pxc-controller", "namespace": "users-13843", "name": "some-name", "reconcileID": "99fa3194-d084-4f05-863e-e4ad9fb00737", "user": "monitor"} 2024-07-08T09:54:33.980Z INFO Proxy pods will be restarted {"controller": "pxc-controller", "namespace": "users-13843", "name": "some-name", "reconcileID": "99fa3194-d084-4f05-863e-e4ad9fb00737", "last-applied-secret": "4e3282050745046d5044e0fcb5f2bf140d7320a1256a79c8ca2184f38f4f3098"} 2024-07-08T09:54:37.543Z DEBUG PXC users synced with ProxySQL {"controller": "pxc-controller", "namespace": "users-13843", "name": "some-name", "reconcileID": "99fa3194-d084-4f05-863e-e4ad9fb00737"} 2024-07-08T09:54:42.728Z DEBUG PXC users synced with ProxySQL {"controller": "pxc-controller", "namespace": "users-13843", "name": "some-name", "reconcileID": "f8db780c-0fc1-43a9-9351-a6ffb1c308b3"} 2024-07-08T09:54:48.206Z DEBUG PXC users synced with ProxySQL {"controller": "pxc-controller", "namespace": "users-13843", "name": "some-name", "reconcileID": "5985fa41-757b-4131-baf1-27d55d6ec0ee"} 2024-07-08T09:54:53.864Z DEBUG PXC users synced with ProxySQL {"controller": "pxc-controller", "namespace": "users-13843", "name": "some-name", "reconcileID": "f8db6f74-dfdc-47ab-a98e-a6b7ca241a77"} 2024-07-08T09:54:58.856Z DEBUG PXC users synced with ProxySQL {"controller": "pxc-controller", "namespace": "users-13843", "name": "some-name", "reconcileID": "2a861d50-becb-478c-8c41-81a8568dceb7"} 2024-07-08T09:55:04.330Z DEBUG PXC users synced with ProxySQL {"controller": "pxc-controller", "namespace": "users-13843", "name": "some-name", "reconcileID": "d70350ab-55bf-4e93-83cf-562835c20207"} 2024-07-08T09:55:06.106Z INFO Password changed, updating user {"controller": "pxc-controller", "namespace": "users-13843", "name": "some-name", "reconcileID": "d9908f2b-2be3-4754-8bbe-a408c72240c8", "user": "operator"} 2024-07-08T09:55:06.136Z INFO Password updated {"controller": "pxc-controller", "namespace": "users-13843", "name": "some-name", "reconcileID": "d9908f2b-2be3-4754-8bbe-a408c72240c8", "user": "operator"} 2024-07-08T09:55:06.145Z INFO MySQL init secret updated {"controller": "pxc-controller", "namespace": "users-13843", "name": "some-name", "reconcileID": "d9908f2b-2be3-4754-8bbe-a408c72240c8", "secret": "some-name-mysql-init", "user": "operator"} 2024-07-08T09:55:06.157Z INFO Internal secrets updated {"controller": "pxc-controller", "namespace": "users-13843", "name": "some-name", "reconcileID": "d9908f2b-2be3-4754-8bbe-a408c72240c8", "user": "operator"} 2024-07-08T09:55:06.187Z INFO Old password discarded {"controller": "pxc-controller", "namespace": "users-13843", "name": "some-name", "reconcileID": "d9908f2b-2be3-4754-8bbe-a408c72240c8", "user": "operator"} 2024-07-08T09:55:06.217Z INFO Proxy pods will be restarted {"controller": "pxc-controller", "namespace": "users-13843", "name": "some-name", "reconcileID": "d9908f2b-2be3-4754-8bbe-a408c72240c8", "last-applied-secret": "6542c6f8a2375ddfcda319bf91c99ca69519730971c577cd07366e356671ee68"} 2024-07-08T09:55:07.469Z ERROR sync users {"controller": "pxc-controller", "namespace": "users-13843", "name": "some-name", "reconcileID": "d9908f2b-2be3-4754-8bbe-a408c72240c8", "error": "exec syncusers: command terminated with exit code 1 / / ERROR 1045 (28000): Access denied for user 'operator'@'some-name-proxysql-0.some-name-proxysql-unready.users-13843.svc.' (using password: YES)\nERROR 1045 (28000): Access denied for user 'operator'@'some-name-proxysql-0.some-name-proxysql-unready.users-13843.svc.' (using password: YES)\nERROR 1045 (28000): Access denied for user 'operator'@'some-name-proxysql-0.some-name-proxysql-unready.users-13843.svc.' (using password: YES)\nERROR (line:569) : Could not find a primary cluster node\n", "errorVerbose": "exec syncusers: command terminated with exit code 1 / / ERROR 1045 (28000): Access denied for user 'operator'@'some-name-proxysql-0.some-name-proxysql-unready.users-13843.svc.' (using password: YES)\nERROR 1045 (28000): Access denied for user 'operator'@'some-name-proxysql-0.some-name-proxysql-unready.users-13843.svc.' (using password: YES)\nERROR 1045 (28000): Access denied for user 'operator'@'some-name-proxysql-0.some-name-proxysql-unready.users-13843.svc.' (using password: YES)\nERROR (line:569) : Could not find a primary cluster node\n\ngithub.com/percona/percona-xtradb-cluster-operator/pkg/controller/pxc.(*ReconcilePerconaXtraDBCluster).syncPXCUsersWithProxySQL\n\t/go/src/github.com/percona/percona-xtradb-cluster-operator/pkg/controller/pxc/users.go:915\ngithub.com/percona/percona-xtradb-cluster-operator/pkg/controller/pxc.(*ReconcilePerconaXtraDBCluster).resyncPXCUsersWithProxySQL.func1\n\t/go/src/github.com/percona/percona-xtradb-cluster-operator/pkg/controller/pxc/controller.go:1246\nruntime.goexit\n\t/usr/local/go/src/runtime/asm_amd64.s:1695"} 2024-07-08T09:55:33.901Z DEBUG PXC users synced with ProxySQL {"controller": "pxc-controller", "namespace": "users-13843", "name": "some-name", "reconcileID": "c68e5133-41bf-4a3c-8552-3fae34254c53"} 2024-07-08T09:55:39.957Z DEBUG PXC users synced with ProxySQL {"controller": "pxc-controller", "namespace": "users-13843", "name": "some-name", "reconcileID": "b3264c1b-3a1e-497b-b315-69e4d00513b1"} 2024-07-08T09:55:44.952Z DEBUG PXC users synced with ProxySQL {"controller": "pxc-controller", "namespace": "users-13843", "name": "some-name", "reconcileID": "6408e9f0-bf6d-43a4-b795-3505ac367d39"} 2024-07-08T09:55:50.278Z DEBUG PXC users synced with ProxySQL {"controller": "pxc-controller", "namespace": "users-13843", "name": "some-name", "reconcileID": "d1962aae-8bc5-4a26-8a12-b6bb90784b2f"} 2024-07-08T09:55:55.654Z DEBUG PXC users synced with ProxySQL {"controller": "pxc-controller", "namespace": "users-13843", "name": "some-name", "reconcileID": "588c810f-b3f0-41f2-84bc-669b584baa93"} 2024-07-08T09:56:01.049Z DEBUG PXC users synced with ProxySQL {"controller": "pxc-controller", "namespace": "users-13843", "name": "some-name", "reconcileID": "9acc8fd2-6457-4363-9c15-605e26afb39f"} 2024-07-08T09:56:04.281Z INFO Created user secrets {"controller": "pxc-controller", "namespace": "users-13843", "name": "some-name", "reconcileID": "008c16e2-7aac-4aba-b912-ade55c35324e", "secrets": "my-cluster-secrets-2"} 2024-07-08T09:56:04.290Z INFO Password changed, updating user {"controller": "pxc-controller", "namespace": "users-13843", "name": "some-name", "reconcileID": "008c16e2-7aac-4aba-b912-ade55c35324e", "user": "root"} 2024-07-08T09:56:04.333Z INFO Password updated {"controller": "pxc-controller", "namespace": "users-13843", "name": "some-name", "reconcileID": "008c16e2-7aac-4aba-b912-ade55c35324e", "user": "root"} 2024-07-08T09:56:04.345Z INFO MySQL init secret updated {"controller": "pxc-controller", "namespace": "users-13843", "name": "some-name", "reconcileID": "008c16e2-7aac-4aba-b912-ade55c35324e", "secret": "some-name-mysql-init", "user": "root"} 2024-07-08T09:56:13.047Z DEBUG PXC users synced with ProxySQL {"controller": "pxc-controller", "namespace": "users-13843", "name": "some-name", "reconcileID": "0abaae90-7656-4a3b-aa57-78515d23a62e"} 2024-07-08T09:56:13.477Z DEBUG PXC users synced with ProxySQL {"controller": "pxc-controller", "namespace": "users-13843", "name": "some-name", "reconcileID": "008c16e2-7aac-4aba-b912-ade55c35324e"} 2024-07-08T09:56:13.492Z INFO Internal secrets updated {"controller": "pxc-controller", "namespace": "users-13843", "name": "some-name", "reconcileID": "008c16e2-7aac-4aba-b912-ade55c35324e", "user": "root"} 2024-07-08T09:56:13.539Z INFO Old password discarded {"controller": "pxc-controller", "namespace": "users-13843", "name": "some-name", "reconcileID": "008c16e2-7aac-4aba-b912-ade55c35324e", "user": "root"} 2024-07-08T09:56:13.550Z INFO Password changed, updating user {"controller": "pxc-controller", "namespace": "users-13843", "name": "some-name", "reconcileID": "008c16e2-7aac-4aba-b912-ade55c35324e", "user": "operator"} 2024-07-08T09:56:13.577Z INFO Password updated {"controller": "pxc-controller", "namespace": "users-13843", "name": "some-name", "reconcileID": "008c16e2-7aac-4aba-b912-ade55c35324e", "user": "operator"} 2024-07-08T09:56:13.588Z INFO MySQL init secret updated {"controller": "pxc-controller", "namespace": "users-13843", "name": "some-name", "reconcileID": "008c16e2-7aac-4aba-b912-ade55c35324e", "secret": "some-name-mysql-init", "user": "operator"} 2024-07-08T09:56:13.597Z INFO Internal secrets updated {"controller": "pxc-controller", "namespace": "users-13843", "name": "some-name", "reconcileID": "008c16e2-7aac-4aba-b912-ade55c35324e", "user": "operator"} 2024-07-08T09:56:13.625Z INFO Old password discarded {"controller": "pxc-controller", "namespace": "users-13843", "name": "some-name", "reconcileID": "008c16e2-7aac-4aba-b912-ade55c35324e", "user": "operator"} 2024-07-08T09:56:13.637Z INFO Password changed, updating user {"controller": "pxc-controller", "namespace": "users-13843", "name": "some-name", "reconcileID": "008c16e2-7aac-4aba-b912-ade55c35324e", "user": "monitor"} 2024-07-08T09:56:13.666Z INFO Password updated {"controller": "pxc-controller", "namespace": "users-13843", "name": "some-name", "reconcileID": "008c16e2-7aac-4aba-b912-ade55c35324e", "user": "monitor"} 2024-07-08T09:56:13.674Z INFO MySQL init secret updated {"controller": "pxc-controller", "namespace": "users-13843", "name": "some-name", "reconcileID": "008c16e2-7aac-4aba-b912-ade55c35324e", "secret": "some-name-mysql-init", "user": "monitor"} 2024-07-08T09:56:13.714Z INFO Proxy user updated {"controller": "pxc-controller", "namespace": "users-13843", "name": "some-name", "reconcileID": "008c16e2-7aac-4aba-b912-ade55c35324e", "user": "monitor"} 2024-07-08T09:56:13.727Z INFO Internal secrets updated {"controller": "pxc-controller", "namespace": "users-13843", "name": "some-name", "reconcileID": "008c16e2-7aac-4aba-b912-ade55c35324e", "user": "monitor"} 2024-07-08T09:56:13.875Z INFO Password changed, updating user {"controller": "pxc-controller", "namespace": "users-13843", "name": "some-name", "reconcileID": "008c16e2-7aac-4aba-b912-ade55c35324e", "user": "xtrabackup"} 2024-07-08T09:56:13.903Z INFO Password updated {"controller": "pxc-controller", "namespace": "users-13843", "name": "some-name", "reconcileID": "008c16e2-7aac-4aba-b912-ade55c35324e", "user": "xtrabackup"} 2024-07-08T09:56:13.914Z INFO MySQL init secret updated {"controller": "pxc-controller", "namespace": "users-13843", "name": "some-name", "reconcileID": "008c16e2-7aac-4aba-b912-ade55c35324e", "secret": "some-name-mysql-init", "user": "xtrabackup"} 2024-07-08T09:56:13.923Z INFO Internal secrets updated {"controller": "pxc-controller", "namespace": "users-13843", "name": "some-name", "reconcileID": "008c16e2-7aac-4aba-b912-ade55c35324e", "user": "xtrabackup"} 2024-07-08T09:56:13.950Z INFO Old password discarded {"controller": "pxc-controller", "namespace": "users-13843", "name": "some-name", "reconcileID": "008c16e2-7aac-4aba-b912-ade55c35324e", "user": "xtrabackup"} 2024-07-08T09:56:13.960Z INFO Password changed, updating user {"controller": "pxc-controller", "namespace": "users-13843", "name": "some-name", "reconcileID": "008c16e2-7aac-4aba-b912-ade55c35324e", "user": "replication"} 2024-07-08T09:56:13.988Z INFO Password updated {"controller": "pxc-controller", "namespace": "users-13843", "name": "some-name", "reconcileID": "008c16e2-7aac-4aba-b912-ade55c35324e", "user": "replication"} 2024-07-08T09:56:13.997Z INFO MySQL init secret updated {"controller": "pxc-controller", "namespace": "users-13843", "name": "some-name", "reconcileID": "008c16e2-7aac-4aba-b912-ade55c35324e", "secret": "some-name-mysql-init", "user": "replication"} 2024-07-08T09:56:14.013Z INFO Internal secrets updated {"controller": "pxc-controller", "namespace": "users-13843", "name": "some-name", "reconcileID": "008c16e2-7aac-4aba-b912-ade55c35324e", "user": "replication"} 2024-07-08T09:56:14.043Z INFO Old password discarded {"controller": "pxc-controller", "namespace": "users-13843", "name": "some-name", "reconcileID": "008c16e2-7aac-4aba-b912-ade55c35324e", "user": "replication"} 2024-07-08T09:56:14.043Z INFO Password changed, updating user {"controller": "pxc-controller", "namespace": "users-13843", "name": "some-name", "reconcileID": "008c16e2-7aac-4aba-b912-ade55c35324e", "user": "proxyadmin"} 2024-07-08T09:56:14.084Z INFO Proxy user updated {"controller": "pxc-controller", "namespace": "users-13843", "name": "some-name", "reconcileID": "008c16e2-7aac-4aba-b912-ade55c35324e", "user": "proxyadmin"} 2024-07-08T09:56:14.102Z INFO Internal secrets updated {"controller": "pxc-controller", "namespace": "users-13843", "name": "some-name", "reconcileID": "008c16e2-7aac-4aba-b912-ade55c35324e", "user": "proxyadmin"} 2024-07-08T09:56:14.102Z INFO Proxy pods will be restarted {"controller": "pxc-controller", "namespace": "users-13843", "name": "some-name", "reconcileID": "008c16e2-7aac-4aba-b912-ade55c35324e", "last-applied-secret": "98e0a7d1566585fb6c912b0bedae78d69a680a2b09c395a83d95642026869811"} 2024-07-08T09:56:14.102Z INFO PXC pods will be restarted {"controller": "pxc-controller", "namespace": "users-13843", "name": "some-name", "reconcileID": "008c16e2-7aac-4aba-b912-ade55c35324e", "last-applied-secret": "98e0a7d1566585fb6c912b0bedae78d69a680a2b09c395a83d95642026869811"} 2024-07-08T09:56:14.364Z ERROR sync users {"controller": "pxc-controller", "namespace": "users-13843", "name": "some-name", "reconcileID": "008c16e2-7aac-4aba-b912-ade55c35324e", "error": "exec syncusers: command terminated with exit code 1 / / ERROR 1045 (28000): ProxySQL Error: Access denied for user 'proxyadmin'@'127.0.0.1' (using password: YES)\nERROR (line:335) : ProxySQL connection check failed. \n-- Could not connect to ProxySQL at localhost:6032 \n-- Please check the ProxySQL connection parameters and status.\n", "errorVerbose": "exec syncusers: command terminated with exit code 1 / / ERROR 1045 (28000): ProxySQL Error: Access denied for user 'proxyadmin'@'127.0.0.1' (using password: YES)\nERROR (line:335) : ProxySQL connection check failed. \n-- Could not connect to ProxySQL at localhost:6032 \n-- Please check the ProxySQL connection parameters and status.\n\ngithub.com/percona/percona-xtradb-cluster-operator/pkg/controller/pxc.(*ReconcilePerconaXtraDBCluster).syncPXCUsersWithProxySQL\n\t/go/src/github.com/percona/percona-xtradb-cluster-operator/pkg/controller/pxc/users.go:915\ngithub.com/percona/percona-xtradb-cluster-operator/pkg/controller/pxc.(*ReconcilePerconaXtraDBCluster).resyncPXCUsersWithProxySQL.func1\n\t/go/src/github.com/percona/percona-xtradb-cluster-operator/pkg/controller/pxc/controller.go:1246\nruntime.goexit\n\t/usr/local/go/src/runtime/asm_amd64.s:1695"} 2024-07-08T09:56:14.944Z INFO Password updated but old one not discarded {"controller": "pxc-controller", "namespace": "users-13843", "name": "some-name", "reconcileID": "82dbf940-85ef-4c63-a616-779487974de8", "user": "monitor"} 2024-07-08T09:56:15.416Z ERROR sync users {"controller": "pxc-controller", "namespace": "users-13843", "name": "some-name", "reconcileID": "82dbf940-85ef-4c63-a616-779487974de8", "error": "exec syncusers: command terminated with exit code 1 / / ERROR 1045 (28000): ProxySQL Error: Access denied for user 'proxyadmin'@'127.0.0.1' (using password: YES)\nERROR (line:335) : ProxySQL connection check failed. \n-- Could not connect to ProxySQL at localhost:6032 \n-- Please check the ProxySQL connection parameters and status.\n", "errorVerbose": "exec syncusers: command terminated with exit code 1 / / ERROR 1045 (28000): ProxySQL Error: Access denied for user 'proxyadmin'@'127.0.0.1' (using password: YES)\nERROR (line:335) : ProxySQL connection check failed. \n-- Could not connect to ProxySQL at localhost:6032 \n-- Please check the ProxySQL connection parameters and status.\n\ngithub.com/percona/percona-xtradb-cluster-operator/pkg/controller/pxc.(*ReconcilePerconaXtraDBCluster).syncPXCUsersWithProxySQL\n\t/go/src/github.com/percona/percona-xtradb-cluster-operator/pkg/controller/pxc/users.go:915\ngithub.com/percona/percona-xtradb-cluster-operator/pkg/controller/pxc.(*ReconcilePerconaXtraDBCluster).resyncPXCUsersWithProxySQL.func1\n\t/go/src/github.com/percona/percona-xtradb-cluster-operator/pkg/controller/pxc/controller.go:1246\nruntime.goexit\n\t/usr/local/go/src/runtime/asm_amd64.s:1695"} 2024-07-08T09:56:31.701Z INFO reconcile replication error {"controller": "pxc-controller", "namespace": "users-13843", "name": "some-name", "reconcileID": "580662fe-6fe4-4f4f-a8e6-928c7c76564a", "err": "failed to ensure cluster readonly status: connect to pod some-name-pxc-2: dial tcp: lookup some-name-pxc-2.some-name-pxc.users-13843 on 10.180.128.10:53: no such host"} 2024-07-08T09:57:14.748Z INFO reconcile replication error {"controller": "pxc-controller", "namespace": "users-13843", "name": "some-name", "reconcileID": "51697384-1370-4132-96f9-6dac12ade1dc", "err": "failed to ensure cluster readonly status: connect to pod some-name-pxc-1: dial tcp 10.168.200.42:33062: connect: connection refused"} 2024-07-08T09:58:02.185Z INFO reconcile replication error {"controller": "pxc-controller", "namespace": "users-13843", "name": "some-name", "reconcileID": "5dde8de3-efe7-4988-af1e-43e43e622e58", "err": "failed to connect to pod some-name-pxc-0: dial tcp 10.168.201.59:33062: connect: connection refused"} 2024-07-08T09:58:07.385Z INFO Unable to find primary pod for replication. No pod with name or ip like this {"controller": "pxc-controller", "namespace": "users-13843", "name": "some-name", "reconcileID": "e755a02a-ff39-4320-9db0-1cb2aa2cc4e1", "primary name": "some-name-pxc-0.some-name-pxc.users-13843.svc.cluster.local"} 2024-07-08T09:58:12.591Z INFO Unable to find primary pod for replication. No pod with name or ip like this {"controller": "pxc-controller", "namespace": "users-13843", "name": "some-name", "reconcileID": "baed3343-d5f7-4a23-9491-c2dba7b58e6d", "primary name": "some-name-pxc-0.some-name-pxc.users-13843.svc.cluster.local"} 2024-07-08T09:58:17.830Z INFO Unable to find primary pod for replication. No pod with name or ip like this {"controller": "pxc-controller", "namespace": "users-13843", "name": "some-name", "reconcileID": "ff12fd8d-94e7-4e36-a816-512308493dbe", "primary name": "some-name-pxc-0.some-name-pxc.users-13843.svc.cluster.local"} 2024-07-08T09:58:23.044Z INFO Unable to find primary pod for replication. No pod with name or ip like this {"controller": "pxc-controller", "namespace": "users-13843", "name": "some-name", "reconcileID": "5d755124-a573-462d-90c5-705249ee0c94", "primary name": "some-name-pxc-0.some-name-pxc.users-13843.svc.cluster.local"} 2024-07-08T09:58:28.243Z INFO Unable to find primary pod for replication. No pod with name or ip like this {"controller": "pxc-controller", "namespace": "users-13843", "name": "some-name", "reconcileID": "c656c2f2-4bfe-476b-a1d1-4ac22bfc950c", "primary name": "some-name-pxc-0.some-name-pxc.users-13843.svc.cluster.local"} 2024-07-08T09:58:33.797Z INFO Unable to find primary pod for replication. No pod with name or ip like this {"controller": "pxc-controller", "namespace": "users-13843", "name": "some-name", "reconcileID": "588c8e48-0056-4bc4-9033-753bc1e8f388", "primary name": "some-name-pxc-0.some-name-pxc.users-13843.svc.cluster.local"} 2024-07-08T09:58:39.001Z INFO Unable to find primary pod for replication. No pod with name or ip like this {"controller": "pxc-controller", "namespace": "users-13843", "name": "some-name", "reconcileID": "d4846f64-8d65-4890-81a8-9476ec0c0920", "primary name": "some-name-pxc-0.some-name-pxc.users-13843.svc.cluster.local"} 2024-07-08T09:58:44.574Z INFO Password updated but old one not discarded {"controller": "pxc-controller", "namespace": "users-13843", "name": "some-name", "reconcileID": "9d430413-fc3f-434f-93dd-877772e7045a", "user": "monitor"} 2024-07-08T09:58:44.857Z INFO Old password discarded {"controller": "pxc-controller", "namespace": "users-13843", "name": "some-name", "reconcileID": "9d430413-fc3f-434f-93dd-877772e7045a", "user": "monitor"} 2024-07-08T09:58:44.874Z INFO Proxy pods will be restarted {"controller": "pxc-controller", "namespace": "users-13843", "name": "some-name", "reconcileID": "9d430413-fc3f-434f-93dd-877772e7045a", "last-applied-secret": "98e0a7d1566585fb6c912b0bedae78d69a680a2b09c395a83d95642026869811"} 2024-07-08T09:58:48.155Z DEBUG PXC users synced with ProxySQL {"controller": "pxc-controller", "namespace": "users-13843", "name": "some-name", "reconcileID": "9d430413-fc3f-434f-93dd-877772e7045a"} 2024-07-08T09:58:52.861Z DEBUG PXC users synced with ProxySQL {"controller": "pxc-controller", "namespace": "users-13843", "name": "some-name", "reconcileID": "501b1651-b207-4414-ab81-ed4605324c30"} 2024-07-08T09:58:54.986Z INFO Password changed, updating user {"controller": "pxc-controller", "namespace": "users-13843", "name": "some-name", "reconcileID": "a7a5437b-b84b-4fdf-b705-088739606103", "user": "operator"} 2024-07-08T09:58:55.014Z INFO Password updated {"controller": "pxc-controller", "namespace": "users-13843", "name": "some-name", "reconcileID": "a7a5437b-b84b-4fdf-b705-088739606103", "user": "operator"} 2024-07-08T09:58:55.024Z INFO MySQL init secret updated {"controller": "pxc-controller", "namespace": "users-13843", "name": "some-name", "reconcileID": "a7a5437b-b84b-4fdf-b705-088739606103", "secret": "some-name-mysql-init", "user": "operator"} 2024-07-08T09:58:55.038Z INFO Internal secrets updated {"controller": "pxc-controller", "namespace": "users-13843", "name": "some-name", "reconcileID": "a7a5437b-b84b-4fdf-b705-088739606103", "user": "operator"} 2024-07-08T09:58:55.068Z INFO Old password discarded {"controller": "pxc-controller", "namespace": "users-13843", "name": "some-name", "reconcileID": "a7a5437b-b84b-4fdf-b705-088739606103", "user": "operator"} 2024-07-08T09:58:55.104Z INFO Proxy pods will be restarted {"controller": "pxc-controller", "namespace": "users-13843", "name": "some-name", "reconcileID": "a7a5437b-b84b-4fdf-b705-088739606103", "last-applied-secret": "31486aef920a36e288d21b9a85f42d2793e28bdb7fd63d5d86c7c95b4a4c7b90"} 2024-07-08T09:58:56.371Z ERROR sync users {"controller": "pxc-controller", "namespace": "users-13843", "name": "some-name", "reconcileID": "a7a5437b-b84b-4fdf-b705-088739606103", "error": "exec syncusers: command terminated with exit code 1 / / ERROR 1045 (28000): Access denied for user 'operator'@'some-name-proxysql-0.some-name-proxysql-unready.users-13843.svc.' (using password: YES)\nERROR 1045 (28000): Access denied for user 'operator'@'some-name-proxysql-0.some-name-proxysql-unready.users-13843.svc.' (using password: YES)\nERROR 1045 (28000): Access denied for user 'operator'@'some-name-proxysql-0.some-name-proxysql-unready.users-13843.svc.' (using password: YES)\nERROR (line:569) : Could not find a primary cluster node\n", "errorVerbose": "exec syncusers: command terminated with exit code 1 / / ERROR 1045 (28000): Access denied for user 'operator'@'some-name-proxysql-0.some-name-proxysql-unready.users-13843.svc.' (using password: YES)\nERROR 1045 (28000): Access denied for user 'operator'@'some-name-proxysql-0.some-name-proxysql-unready.users-13843.svc.' (using password: YES)\nERROR 1045 (28000): Access denied for user 'operator'@'some-name-proxysql-0.some-name-proxysql-unready.users-13843.svc.' (using password: YES)\nERROR (line:569) : Could not find a primary cluster node\n\ngithub.com/percona/percona-xtradb-cluster-operator/pkg/controller/pxc.(*ReconcilePerconaXtraDBCluster).syncPXCUsersWithProxySQL\n\t/go/src/github.com/percona/percona-xtradb-cluster-operator/pkg/controller/pxc/users.go:915\ngithub.com/percona/percona-xtradb-cluster-operator/pkg/controller/pxc.(*ReconcilePerconaXtraDBCluster).resyncPXCUsersWithProxySQL.func1\n\t/go/src/github.com/percona/percona-xtradb-cluster-operator/pkg/controller/pxc/controller.go:1246\nruntime.goexit\n\t/usr/local/go/src/runtime/asm_amd64.s:1695"} 2024-07-08T09:59:46.224Z DEBUG PXC users synced with ProxySQL {"controller": "pxc-controller", "namespace": "users-13843", "name": "some-name", "reconcileID": "d5ed0543-390b-48fc-95c2-5d0bd2915864"} 2024-07-08T09:59:54.977Z DEBUG PXC users synced with ProxySQL {"controller": "pxc-controller", "namespace": "users-13843", "name": "some-name", "reconcileID": "27bd81b5-cfb8-4abd-82e8-263e5cfb3fff"} 2024-07-08T10:00:00.375Z DEBUG PXC users synced with ProxySQL {"controller": "pxc-controller", "namespace": "users-13843", "name": "some-name", "reconcileID": "35b5c796-cbe1-41d7-8e6f-050241a6b2ea"} 2024-07-08T10:00:05.469Z DEBUG PXC users synced with ProxySQL {"controller": "pxc-controller", "namespace": "users-13843", "name": "some-name", "reconcileID": "9857e4d9-26ab-418c-a5dc-841089fdfc79"} 2024-07-08T10:00:10.778Z DEBUG PXC users synced with ProxySQL {"controller": "pxc-controller", "namespace": "users-13843", "name": "some-name", "reconcileID": "b09fe8d8-fd48-4af6-8855-8032983f1476"} 2024-07-08T10:00:16.080Z DEBUG PXC users synced with ProxySQL {"controller": "pxc-controller", "namespace": "users-13843", "name": "some-name", "reconcileID": "4469f6e6-bb82-475f-841d-15dc863e2668"} 2024-07-08T10:00:21.662Z DEBUG PXC users synced with ProxySQL {"controller": "pxc-controller", "namespace": "users-13843", "name": "some-name", "reconcileID": "5d3bad8f-9d2f-4cd1-96fb-6f6483acfe4a"} 2024-07-08T10:00:27.151Z DEBUG PXC users synced with ProxySQL {"controller": "pxc-controller", "namespace": "users-13843", "name": "some-name", "reconcileID": "fc2f9321-23b6-4c58-b88c-081755864df3"} 2024-07-08T10:00:33.357Z DEBUG PXC users synced with ProxySQL {"controller": "pxc-controller", "namespace": "users-13843", "name": "some-name", "reconcileID": "5375dc5a-5b03-41c0-9cbc-3c59f403f38b"} 2024-07-08T10:00:38.662Z DEBUG PXC users synced with ProxySQL {"controller": "pxc-controller", "namespace": "users-13843", "name": "some-name", "reconcileID": "1140926a-3562-4e89-924f-6dac17fc6212"} 2024-07-08T10:00:43.437Z DEBUG PXC users synced with ProxySQL {"controller": "pxc-controller", "namespace": "users-13843", "name": "some-name", "reconcileID": "d6884466-6d15-4f73-86ab-de44cd8d566e"} 2024-07-08T10:00:48.669Z DEBUG PXC users synced with ProxySQL {"controller": "pxc-controller", "namespace": "users-13843", "name": "some-name", "reconcileID": "f8f3ea51-000a-44db-99c6-bd3282d789da"} 2024-07-08T10:00:54.052Z DEBUG PXC users synced with ProxySQL {"controller": "pxc-controller", "namespace": "users-13843", "name": "some-name", "reconcileID": "ea7491de-dd05-409f-9fef-82cabd2771bf"} 2024-07-08T10:00:59.361Z DEBUG PXC users synced with ProxySQL {"controller": "pxc-controller", "namespace": "users-13843", "name": "some-name", "reconcileID": "e2759e51-2a4f-44e5-87db-bc32e063f40c"} 2024-07-08T10:01:05.146Z DEBUG PXC users synced with ProxySQL {"controller": "pxc-controller", "namespace": "users-13843", "name": "some-name", "reconcileID": "3b751e3f-b56c-4b98-97ad-2c8a45f5a8ac"} 2024-07-08T10:01:10.065Z DEBUG PXC users synced with ProxySQL {"controller": "pxc-controller", "namespace": "users-13843", "name": "some-name", "reconcileID": "8d8f30da-d264-4a9c-8d93-482943b5d56f"} 2024-07-08T10:01:15.553Z DEBUG PXC users synced with ProxySQL {"controller": "pxc-controller", "namespace": "users-13843", "name": "some-name", "reconcileID": "30df4022-fc30-41ca-a45c-c7dae0b24565"} 2024-07-08T10:01:20.942Z DEBUG PXC users synced with ProxySQL {"controller": "pxc-controller", "namespace": "users-13843", "name": "some-name", "reconcileID": "e4c2ab6d-dd3b-449e-9191-66dba1741d16"} 2024-07-08T10:01:22.890Z INFO Password changed, updating user {"controller": "pxc-controller", "namespace": "users-13843", "name": "some-name", "reconcileID": "25608689-3f61-4e1e-b506-32786dc8f862", "user": "root"} 2024-07-08T10:01:22.938Z INFO Password updated {"controller": "pxc-controller", "namespace": "users-13843", "name": "some-name", "reconcileID": "25608689-3f61-4e1e-b506-32786dc8f862", "user": "root"} 2024-07-08T10:01:22.948Z INFO MySQL init secret updated {"controller": "pxc-controller", "namespace": "users-13843", "name": "some-name", "reconcileID": "25608689-3f61-4e1e-b506-32786dc8f862", "secret": "some-name-mysql-init", "user": "root"} 2024-07-08T10:01:28.111Z DEBUG PXC users synced with ProxySQL {"controller": "pxc-controller", "namespace": "users-13843", "name": "some-name", "reconcileID": "25608689-3f61-4e1e-b506-32786dc8f862"} 2024-07-08T10:01:28.121Z INFO Internal secrets updated {"controller": "pxc-controller", "namespace": "users-13843", "name": "some-name", "reconcileID": "25608689-3f61-4e1e-b506-32786dc8f862", "user": "root"} 2024-07-08T10:01:28.165Z INFO Old password discarded {"controller": "pxc-controller", "namespace": "users-13843", "name": "some-name", "reconcileID": "25608689-3f61-4e1e-b506-32786dc8f862", "user": "root"} 2024-07-08T10:01:28.187Z INFO Password changed, updating user {"controller": "pxc-controller", "namespace": "users-13843", "name": "some-name", "reconcileID": "25608689-3f61-4e1e-b506-32786dc8f862", "user": "monitor"} 2024-07-08T10:01:28.218Z INFO Password updated {"controller": "pxc-controller", "namespace": "users-13843", "name": "some-name", "reconcileID": "25608689-3f61-4e1e-b506-32786dc8f862", "user": "monitor"} 2024-07-08T10:01:28.229Z INFO MySQL init secret updated {"controller": "pxc-controller", "namespace": "users-13843", "name": "some-name", "reconcileID": "25608689-3f61-4e1e-b506-32786dc8f862", "secret": "some-name-mysql-init", "user": "monitor"} 2024-07-08T10:01:28.269Z INFO Proxy user updated {"controller": "pxc-controller", "namespace": "users-13843", "name": "some-name", "reconcileID": "25608689-3f61-4e1e-b506-32786dc8f862", "user": "monitor"} 2024-07-08T10:01:28.280Z INFO Internal secrets updated {"controller": "pxc-controller", "namespace": "users-13843", "name": "some-name", "reconcileID": "25608689-3f61-4e1e-b506-32786dc8f862", "user": "monitor"} 2024-07-08T10:01:28.365Z INFO Password changed, updating user {"controller": "pxc-controller", "namespace": "users-13843", "name": "some-name", "reconcileID": "25608689-3f61-4e1e-b506-32786dc8f862", "user": "xtrabackup"} 2024-07-08T10:01:28.393Z INFO Password updated {"controller": "pxc-controller", "namespace": "users-13843", "name": "some-name", "reconcileID": "25608689-3f61-4e1e-b506-32786dc8f862", "user": "xtrabackup"} 2024-07-08T10:01:28.402Z INFO MySQL init secret updated {"controller": "pxc-controller", "namespace": "users-13843", "name": "some-name", "reconcileID": "25608689-3f61-4e1e-b506-32786dc8f862", "secret": "some-name-mysql-init", "user": "xtrabackup"} 2024-07-08T10:01:28.411Z INFO Internal secrets updated {"controller": "pxc-controller", "namespace": "users-13843", "name": "some-name", "reconcileID": "25608689-3f61-4e1e-b506-32786dc8f862", "user": "xtrabackup"} 2024-07-08T10:01:28.438Z INFO Old password discarded {"controller": "pxc-controller", "namespace": "users-13843", "name": "some-name", "reconcileID": "25608689-3f61-4e1e-b506-32786dc8f862", "user": "xtrabackup"} 2024-07-08T10:01:28.451Z INFO Password changed, updating user {"controller": "pxc-controller", "namespace": "users-13843", "name": "some-name", "reconcileID": "25608689-3f61-4e1e-b506-32786dc8f862", "user": "proxyadmin"} 2024-07-08T10:01:28.494Z INFO Proxy user updated {"controller": "pxc-controller", "namespace": "users-13843", "name": "some-name", "reconcileID": "25608689-3f61-4e1e-b506-32786dc8f862", "user": "proxyadmin"} 2024-07-08T10:01:28.503Z INFO Internal secrets updated {"controller": "pxc-controller", "namespace": "users-13843", "name": "some-name", "reconcileID": "25608689-3f61-4e1e-b506-32786dc8f862", "user": "proxyadmin"} 2024-07-08T10:01:28.503Z INFO Proxy pods will be restarted {"controller": "pxc-controller", "namespace": "users-13843", "name": "some-name", "reconcileID": "25608689-3f61-4e1e-b506-32786dc8f862", "last-applied-secret": "44e1e68c3e53542c41669e8cf545457c51a2f7ee30583d2863102e8fc21284bd"} 2024-07-08T10:01:28.503Z INFO PXC pods will be restarted {"controller": "pxc-controller", "namespace": "users-13843", "name": "some-name", "reconcileID": "25608689-3f61-4e1e-b506-32786dc8f862", "last-applied-secret": "44e1e68c3e53542c41669e8cf545457c51a2f7ee30583d2863102e8fc21284bd"} 2024-07-08T10:01:28.787Z ERROR sync users {"controller": "pxc-controller", "namespace": "users-13843", "name": "some-name", "reconcileID": "25608689-3f61-4e1e-b506-32786dc8f862", "error": "exec syncusers: command terminated with exit code 1 / / ERROR 1045 (28000): ProxySQL Error: Access denied for user 'proxyadmin'@'127.0.0.1' (using password: YES)\nERROR (line:335) : ProxySQL connection check failed. \n-- Could not connect to ProxySQL at localhost:6032 \n-- Please check the ProxySQL connection parameters and status.\n", "errorVerbose": "exec syncusers: command terminated with exit code 1 / / ERROR 1045 (28000): ProxySQL Error: Access denied for user 'proxyadmin'@'127.0.0.1' (using password: YES)\nERROR (line:335) : ProxySQL connection check failed. \n-- Could not connect to ProxySQL at localhost:6032 \n-- Please check the ProxySQL connection parameters and status.\n\ngithub.com/percona/percona-xtradb-cluster-operator/pkg/controller/pxc.(*ReconcilePerconaXtraDBCluster).syncPXCUsersWithProxySQL\n\t/go/src/github.com/percona/percona-xtradb-cluster-operator/pkg/controller/pxc/users.go:915\ngithub.com/percona/percona-xtradb-cluster-operator/pkg/controller/pxc.(*ReconcilePerconaXtraDBCluster).resyncPXCUsersWithProxySQL.func1\n\t/go/src/github.com/percona/percona-xtradb-cluster-operator/pkg/controller/pxc/controller.go:1246\nruntime.goexit\n\t/usr/local/go/src/runtime/asm_amd64.s:1695"} 2024-07-08T10:02:21.702Z INFO reconcile replication error {"controller": "pxc-controller", "namespace": "users-13843", "name": "some-name", "reconcileID": "f5fdf907-f7ac-4ad4-87cc-b113d1926a9a", "err": "failed to ensure cluster readonly status: connect to pod some-name-pxc-1: dial tcp: lookup some-name-pxc-1.some-name-pxc.users-13843 on 10.180.128.10:53: no such host"} 2024-07-08T10:02:26.739Z INFO reconcile replication error {"controller": "pxc-controller", "namespace": "users-13843", "name": "some-name", "reconcileID": "81a8695c-efd8-42ba-adc7-2e4dcc68449a", "err": "failed to ensure cluster readonly status: connect to pod some-name-pxc-1: dial tcp: lookup some-name-pxc-1.some-name-pxc.users-13843 on 10.180.128.10:53: no such host"} 2024-07-08T10:02:32.045Z INFO reconcile replication error {"controller": "pxc-controller", "namespace": "users-13843", "name": "some-name", "reconcileID": "102c3c21-115e-4737-865b-569c47a81773", "err": "failed to ensure cluster readonly status: connect to pod some-name-pxc-1: dial tcp: lookup some-name-pxc-1.some-name-pxc.users-13843 on 10.180.128.10:53: no such host"} 2024-07-08T10:03:20.142Z INFO reconcile replication error {"controller": "pxc-controller", "namespace": "users-13843", "name": "some-name", "reconcileID": "0574e2c6-709f-43ca-bec1-4e8f161ac302", "err": "failed to connect to pod some-name-pxc-0: dial tcp 10.168.201.63:33062: connect: connection refused"} 2024-07-08T10:03:25.344Z INFO Unable to find primary pod for replication. No pod with name or ip like this {"controller": "pxc-controller", "namespace": "users-13843", "name": "some-name", "reconcileID": "0d6a7a51-63be-43aa-b169-5e3ae0f99444", "primary name": "some-name-pxc-0.some-name-pxc.users-13843.svc.cluster.local"} 2024-07-08T10:03:30.521Z INFO Unable to find primary pod for replication. No pod with name or ip like this {"controller": "pxc-controller", "namespace": "users-13843", "name": "some-name", "reconcileID": "68795d02-7dad-4df2-9529-ebeb10febb8f", "primary name": "some-name-pxc-0.some-name-pxc.users-13843.svc.cluster.local"} 2024-07-08T10:03:35.707Z INFO Unable to find primary pod for replication. No pod with name or ip like this {"controller": "pxc-controller", "namespace": "users-13843", "name": "some-name", "reconcileID": "a97fcbe4-a8ea-4e28-a01a-c9d4901b7098", "primary name": "some-name-pxc-0.some-name-pxc.users-13843.svc.cluster.local"} 2024-07-08T10:03:40.949Z INFO Unable to find primary pod for replication. No pod with name or ip like this {"controller": "pxc-controller", "namespace": "users-13843", "name": "some-name", "reconcileID": "384f682c-e971-4bc8-a8bd-8ac09bf456b2", "primary name": "some-name-pxc-0.some-name-pxc.users-13843.svc.cluster.local"} 2024-07-08T10:03:46.125Z INFO Unable to find primary pod for replication. No pod with name or ip like this {"controller": "pxc-controller", "namespace": "users-13843", "name": "some-name", "reconcileID": "f2656fdf-69c4-4f54-9fa5-2b11eececf9f", "primary name": "some-name-pxc-0.some-name-pxc.users-13843.svc.cluster.local"} 2024-07-08T10:03:51.304Z INFO Unable to find primary pod for replication. No pod with name or ip like this {"controller": "pxc-controller", "namespace": "users-13843", "name": "some-name", "reconcileID": "58e40214-c657-4bd7-ba1e-00fb9ea74c13", "primary name": "some-name-pxc-0.some-name-pxc.users-13843.svc.cluster.local"} 2024-07-08T10:03:55.685Z INFO KubeAPIWarningLogger .metadata.ownerReferences contains duplicate entries; API server dedups owner references in 1.20+, and may reject such requests as early as 1.24; please fix your requests; duplicate UID(s) observed: c0a5ff66-ea80-4203-8ca4-bc768e7cd2ba 2024-07-08T10:03:59.052Z INFO reconcile replication error {"controller": "pxc-controller", "namespace": "users-13843", "name": "some-name", "reconcileID": "c17bbbc5-1213-4246-b194-4919082ab4ba", "err": "get primary pxc pod: failed to get proxy connection: dial tcp 10.180.135.218:3306: connect: connection refused"} 2024-07-08T10:05:45.218Z INFO reconcile replication error {"controller": "pxc-controller", "namespace": "users-13843", "name": "some-name", "reconcileID": "2f6db560-67bb-4170-bd6d-3491c30302f7", "err": "failed to ensure cluster readonly status: connect to pod some-name-pxc-0: dial tcp: lookup some-name-pxc-0.some-name-pxc.users-13843 on 10.180.128.10:53: no such host"} 2024-07-08T10:06:22.980Z INFO Password changed, updating user {"controller": "pxc-controller", "namespace": "users-13843", "name": "some-name", "reconcileID": "f37526ad-8308-4496-8c88-d3bc008a9f4d", "user": "root"} 2024-07-08T10:06:23.027Z INFO Password updated {"controller": "pxc-controller", "namespace": "users-13843", "name": "some-name", "reconcileID": "f37526ad-8308-4496-8c88-d3bc008a9f4d", "user": "root"} 2024-07-08T10:06:23.035Z INFO MySQL init secret updated {"controller": "pxc-controller", "namespace": "users-13843", "name": "some-name", "reconcileID": "f37526ad-8308-4496-8c88-d3bc008a9f4d", "secret": "some-name-mysql-init", "user": "root"} 2024-07-08T10:06:23.048Z INFO Internal secrets updated {"controller": "pxc-controller", "namespace": "users-13843", "name": "some-name", "reconcileID": "f37526ad-8308-4496-8c88-d3bc008a9f4d", "user": "root"} 2024-07-08T10:06:23.089Z INFO Old password discarded {"controller": "pxc-controller", "namespace": "users-13843", "name": "some-name", "reconcileID": "f37526ad-8308-4496-8c88-d3bc008a9f4d", "user": "root"} 2024-07-08T10:06:23.098Z INFO Password changed, updating user {"controller": "pxc-controller", "namespace": "users-13843", "name": "some-name", "reconcileID": "f37526ad-8308-4496-8c88-d3bc008a9f4d", "user": "operator"} 2024-07-08T10:06:23.141Z INFO Password updated {"controller": "pxc-controller", "namespace": "users-13843", "name": "some-name", "reconcileID": "f37526ad-8308-4496-8c88-d3bc008a9f4d", "user": "operator"} 2024-07-08T10:06:23.151Z INFO MySQL init secret updated {"controller": "pxc-controller", "namespace": "users-13843", "name": "some-name", "reconcileID": "f37526ad-8308-4496-8c88-d3bc008a9f4d", "secret": "some-name-mysql-init", "user": "operator"} 2024-07-08T10:06:23.163Z INFO Internal secrets updated {"controller": "pxc-controller", "namespace": "users-13843", "name": "some-name", "reconcileID": "f37526ad-8308-4496-8c88-d3bc008a9f4d", "user": "operator"} 2024-07-08T10:06:23.190Z INFO Old password discarded {"controller": "pxc-controller", "namespace": "users-13843", "name": "some-name", "reconcileID": "f37526ad-8308-4496-8c88-d3bc008a9f4d", "user": "operator"} 2024-07-08T10:06:23.201Z INFO Password changed, updating user {"controller": "pxc-controller", "namespace": "users-13843", "name": "some-name", "reconcileID": "f37526ad-8308-4496-8c88-d3bc008a9f4d", "user": "monitor"} 2024-07-08T10:06:23.229Z INFO Password updated {"controller": "pxc-controller", "namespace": "users-13843", "name": "some-name", "reconcileID": "f37526ad-8308-4496-8c88-d3bc008a9f4d", "user": "monitor"} 2024-07-08T10:06:23.237Z INFO MySQL init secret updated {"controller": "pxc-controller", "namespace": "users-13843", "name": "some-name", "reconcileID": "f37526ad-8308-4496-8c88-d3bc008a9f4d", "secret": "some-name-mysql-init", "user": "monitor"} 2024-07-08T10:06:23.250Z INFO Internal secrets updated {"controller": "pxc-controller", "namespace": "users-13843", "name": "some-name", "reconcileID": "f37526ad-8308-4496-8c88-d3bc008a9f4d", "user": "monitor"} 2024-07-08T10:06:23.336Z INFO Password changed, updating user {"controller": "pxc-controller", "namespace": "users-13843", "name": "some-name", "reconcileID": "f37526ad-8308-4496-8c88-d3bc008a9f4d", "user": "xtrabackup"} 2024-07-08T10:06:23.361Z INFO Password updated {"controller": "pxc-controller", "namespace": "users-13843", "name": "some-name", "reconcileID": "f37526ad-8308-4496-8c88-d3bc008a9f4d", "user": "xtrabackup"} 2024-07-08T10:06:23.369Z INFO MySQL init secret updated {"controller": "pxc-controller", "namespace": "users-13843", "name": "some-name", "reconcileID": "f37526ad-8308-4496-8c88-d3bc008a9f4d", "secret": "some-name-mysql-init", "user": "xtrabackup"} 2024-07-08T10:06:23.379Z INFO Internal secrets updated {"controller": "pxc-controller", "namespace": "users-13843", "name": "some-name", "reconcileID": "f37526ad-8308-4496-8c88-d3bc008a9f4d", "user": "xtrabackup"} 2024-07-08T10:06:23.408Z INFO Old password discarded {"controller": "pxc-controller", "namespace": "users-13843", "name": "some-name", "reconcileID": "f37526ad-8308-4496-8c88-d3bc008a9f4d", "user": "xtrabackup"} 2024-07-08T10:06:23.417Z INFO Password changed, updating user {"controller": "pxc-controller", "namespace": "users-13843", "name": "some-name", "reconcileID": "f37526ad-8308-4496-8c88-d3bc008a9f4d", "user": "replication"} 2024-07-08T10:06:23.455Z INFO Password updated {"controller": "pxc-controller", "namespace": "users-13843", "name": "some-name", "reconcileID": "f37526ad-8308-4496-8c88-d3bc008a9f4d", "user": "replication"} 2024-07-08T10:06:23.464Z INFO MySQL init secret updated {"controller": "pxc-controller", "namespace": "users-13843", "name": "some-name", "reconcileID": "f37526ad-8308-4496-8c88-d3bc008a9f4d", "secret": "some-name-mysql-init", "user": "replication"} 2024-07-08T10:06:23.474Z INFO Internal secrets updated {"controller": "pxc-controller", "namespace": "users-13843", "name": "some-name", "reconcileID": "f37526ad-8308-4496-8c88-d3bc008a9f4d", "user": "replication"} 2024-07-08T10:06:23.502Z INFO Old password discarded {"controller": "pxc-controller", "namespace": "users-13843", "name": "some-name", "reconcileID": "f37526ad-8308-4496-8c88-d3bc008a9f4d", "user": "replication"} 2024-07-08T10:06:23.502Z INFO Proxy pods will be restarted {"controller": "pxc-controller", "namespace": "users-13843", "name": "some-name", "reconcileID": "f37526ad-8308-4496-8c88-d3bc008a9f4d", "last-applied-secret": "6542c6f8a2375ddfcda319bf91c99ca69519730971c577cd07366e356671ee68"} 2024-07-08T10:06:23.502Z INFO PXC pods will be restarted {"controller": "pxc-controller", "namespace": "users-13843", "name": "some-name", "reconcileID": "f37526ad-8308-4496-8c88-d3bc008a9f4d", "last-applied-secret": "6542c6f8a2375ddfcda319bf91c99ca69519730971c577cd07366e356671ee68"} 2024-07-08T10:07:10.848Z INFO reconcile replication error {"controller": "pxc-controller", "namespace": "users-13843", "name": "some-name", "reconcileID": "6caec526-8caa-412f-ab36-c3aeee994e0b", "err": "failed to ensure cluster readonly status: connect to pod some-name-pxc-1: dial tcp: lookup some-name-pxc-1.some-name-pxc.users-13843 on 10.180.128.10:53: no such host"} 2024-07-08T10:07:15.964Z INFO reconcile replication error {"controller": "pxc-controller", "namespace": "users-13843", "name": "some-name", "reconcileID": "01e6fe43-6a4a-43af-90ce-0a2f7835b9f8", "err": "failed to ensure cluster readonly status: connect to pod some-name-pxc-1: dial tcp: lookup some-name-pxc-1.some-name-pxc.users-13843 on 10.180.128.10:53: no such host"} 2024-07-08T10:07:53.081Z INFO reconcile replication error {"controller": "pxc-controller", "namespace": "users-13843", "name": "some-name", "reconcileID": "2889969a-ab89-4326-be40-24326e1c9077", "err": "failed to connect to pod some-name-pxc-0: dial tcp: lookup some-name-pxc-0.some-name-pxc.users-13843 on 10.180.128.10:53: no such host"} 2024-07-08T10:07:58.037Z INFO reconcile replication error {"controller": "pxc-controller", "namespace": "users-13843", "name": "some-name", "reconcileID": "44f4e499-2f8f-4bfe-b8c2-106d4be14e45", "err": "failed to connect to pod some-name-pxc-0: dial tcp: lookup some-name-pxc-0.some-name-pxc.users-13843 on 10.180.128.10:53: no such host"} 2024-07-08T10:08:03.740Z INFO reconcile replication error {"controller": "pxc-controller", "namespace": "users-13843", "name": "some-name", "reconcileID": "272a3726-eb0b-43b9-be46-d7487906de25", "err": "failed to ensure cluster readonly status: connect to pod some-name-pxc-0: dial tcp: lookup some-name-pxc-0.some-name-pxc.users-13843 on 10.180.128.10:53: no such host"} 2024-07-08T10:08:40.694Z INFO Password updated but old one not discarded {"controller": "pxc-controller", "namespace": "users-13843", "name": "some-name", "reconcileID": "e0cb08f0-467a-4f83-97bd-97c6e87755d5", "user": "monitor"} 2024-07-08T10:08:40.986Z INFO Old password discarded {"controller": "pxc-controller", "namespace": "users-13843", "name": "some-name", "reconcileID": "e0cb08f0-467a-4f83-97bd-97c6e87755d5", "user": "monitor"} 2024-07-08T10:08:41.003Z INFO Proxy pods will be restarted {"controller": "pxc-controller", "namespace": "users-13843", "name": "some-name", "reconcileID": "e0cb08f0-467a-4f83-97bd-97c6e87755d5", "last-applied-secret": "6542c6f8a2375ddfcda319bf91c99ca69519730971c577cd07366e356671ee68"} 2024-07-08T10:08:56.466Z INFO Password changed, updating user {"controller": "pxc-controller", "namespace": "users-13843", "name": "some-name", "reconcileID": "25f60bf0-122f-4718-bd1f-e60b9c7d447a", "user": "monitor"} 2024-07-08T10:08:56.493Z INFO Password updated {"controller": "pxc-controller", "namespace": "users-13843", "name": "some-name", "reconcileID": "25f60bf0-122f-4718-bd1f-e60b9c7d447a", "user": "monitor"} 2024-07-08T10:08:56.502Z INFO MySQL init secret updated {"controller": "pxc-controller", "namespace": "users-13843", "name": "some-name", "reconcileID": "25f60bf0-122f-4718-bd1f-e60b9c7d447a", "secret": "some-name-mysql-init", "user": "monitor"} 2024-07-08T10:08:56.513Z INFO Internal secrets updated {"controller": "pxc-controller", "namespace": "users-13843", "name": "some-name", "reconcileID": "25f60bf0-122f-4718-bd1f-e60b9c7d447a", "user": "monitor"} 2024-07-08T10:08:56.695Z INFO Proxy pods will be restarted {"controller": "pxc-controller", "namespace": "users-13843", "name": "some-name", "reconcileID": "25f60bf0-122f-4718-bd1f-e60b9c7d447a", "last-applied-secret": "81935ecf30ae523a6ee69582354f9488d47dbeb0f8c7c572af96870f2024284d"} 2024-07-08T10:10:02.531Z INFO Password updated but old one not discarded {"controller": "pxc-controller", "namespace": "users-13843", "name": "some-name", "reconcileID": "26acd228-13bf-43e4-9370-de50bf6dbdce", "user": "monitor"} 2024-07-08T10:10:03.798Z INFO Old password discarded {"controller": "pxc-controller", "namespace": "users-13843", "name": "some-name", "reconcileID": "26acd228-13bf-43e4-9370-de50bf6dbdce", "user": "monitor"} 2024-07-08T10:10:03.815Z INFO Proxy pods will be restarted {"controller": "pxc-controller", "namespace": "users-13843", "name": "some-name", "reconcileID": "26acd228-13bf-43e4-9370-de50bf6dbdce", "last-applied-secret": "81935ecf30ae523a6ee69582354f9488d47dbeb0f8c7c572af96870f2024284d"} github.com/percona/percona-xtradb-cluster-operator/pkg/controller/pxc.(*ReconcilePerconaXtraDBCluster).resyncPXCUsersWithProxySQL.func1 /go/pkg/mod/sigs.k8s.io/controller-runtime@v0.18.4/pkg/internal/controller/controller.go:222 /go/pkg/mod/sigs.k8s.io/controller-runtime@v0.18.4/pkg/internal/controller/controller.go:261 /go/pkg/mod/sigs.k8s.io/controller-runtime@v0.18.4/pkg/internal/controller/controller.go:324 /go/src/github.com/percona/percona-xtradb-cluster-operator/pkg/controller/pxc/controller.go:1248 [mysql] 2024/07/08 09:57:14 connection.go:49: read tcp 10.168.201.54:38380->10.168.200.42:33062: read: connection reset by peer [mysql] 2024/07/08 10:06:01 connection.go:49: unexpected EOF sigs.k8s.io/controller-runtime/pkg/internal/controller.(*Controller).processNextWorkItem sigs.k8s.io/controller-runtime/pkg/internal/controller.(*Controller).reconcileHandler sigs.k8s.io/controller-runtime/pkg/internal/controller.(*Controller).Start.func2.2 + grep -v NAMESPACE + kubectl get pxc --all-namespaces -o wide + xargs -L 1 sh -xc 'kubectl patch pxc -n $0 $1 --type=merge -p "{\"metadata\":{\"finalizers\":[]}}"' + kubectl patch pxc -n users-13843 some-name --type=merge -p '{"metadata":{"finalizers":[]}}' perconaxtradbcluster.pxc.percona.com/some-name patched + kubectl_bin delete pxc --all --all-namespaces ++ mktemp + local LAST_OUT=/tmp/tmp.9fcMHe5JaF ++ mktemp + local LAST_ERR=/tmp/tmp.uch18E10tf + local exit_status=0 ++ seq 0 2 + for i in '$(seq 0 2)' + set +e + kubectl delete pxc --all --all-namespaces + exit_status=0 + set -e + '[' 0 '!=' 0 ']' + break + cat /tmp/tmp.9fcMHe5JaF perconaxtradbcluster.pxc.percona.com "some-name" deleted + cat /tmp/tmp.uch18E10tf + rm /tmp/tmp.9fcMHe5JaF /tmp/tmp.uch18E10tf + return 0 + kubectl_bin delete pxc-backup --all --all-namespaces ++ mktemp + local LAST_OUT=/tmp/tmp.USCgqvHOdd ++ mktemp + local LAST_ERR=/tmp/tmp.QOzVyF5scy + local exit_status=0 ++ seq 0 2 + for i in '$(seq 0 2)' + set +e + kubectl delete pxc-backup --all --all-namespaces + exit_status=0 + set -e + '[' 0 '!=' 0 ']' + break + cat /tmp/tmp.USCgqvHOdd No resources found + cat /tmp/tmp.QOzVyF5scy + rm /tmp/tmp.USCgqvHOdd /tmp/tmp.QOzVyF5scy + return 0 + kubectl_bin delete pxc-restore --all --all-namespaces ++ mktemp + local LAST_OUT=/tmp/tmp.qYCd09L7wy ++ mktemp + local LAST_ERR=/tmp/tmp.bGacSEN9FE + local exit_status=0 ++ seq 0 2 + for i in '$(seq 0 2)' + set +e + kubectl delete pxc-restore --all --all-namespaces + exit_status=0 + set -e + '[' 0 '!=' 0 ']' + break + cat /tmp/tmp.qYCd09L7wy No resources found + cat /tmp/tmp.bGacSEN9FE + rm /tmp/tmp.qYCd09L7wy /tmp/tmp.bGacSEN9FE + return 0 + kubectl_bin delete ValidatingWebhookConfiguration percona-xtradbcluster-webhook ++ mktemp + local LAST_OUT=/tmp/tmp.4lMLJNy4ax ++ mktemp + local LAST_ERR=/tmp/tmp.tOPeaXuHdL + local exit_status=0 ++ seq 0 2 + for i in '$(seq 0 2)' + set +e + kubectl delete ValidatingWebhookConfiguration percona-xtradbcluster-webhook + exit_status=0 + set -e + '[' 0 '!=' 0 ']' + break + cat /tmp/tmp.4lMLJNy4ax validatingwebhookconfiguration.admissionregistration.k8s.io "percona-xtradbcluster-webhook" deleted + cat /tmp/tmp.tOPeaXuHdL + rm /tmp/tmp.4lMLJNy4ax /tmp/tmp.tOPeaXuHdL + return 0 + kubectl_bin delete -f https://github.com/jetstack/cert-manager/releases/download/v1.14.2/cert-manager.yaml + : + '[' '!' -z '' ']' + '[' -n pxc-operator ']' + kubectl_bin delete --grace-period=0 --force=true namespace users-13843 + rm -rf /tmp/tmp.3wek4g2sOh + kubectl_bin delete --grace-period=0 --force=true namespace pxc-operator ++ mktemp ++ mktemp + desc 'test passed' + set +o xtrace ----------------------------------------------------------------------------------- test passed+ local LAST_OUT=/tmp/tmp.gZ3GQ5wYYW ----------------------------------------------------------------------------------- + local LAST_OUT=/tmp/tmp.DecmzZKdxb ++ mktemp ++ mktemp + local LAST_ERR=/tmp/tmp.dtGQDpXmgY + local exit_status=0 + local LAST_ERR=/tmp/tmp.wvNV4Xp5qe + local exit_status=0 ++ seq 0 2 ++ seq 0 2 + for i in '$(seq 0 2)' + set +e + kubectl delete --grace-period=0 --force=true namespace users-13843 + for i in '$(seq 0 2)' + set +e + kubectl delete --grace-period=0 --force=true namespace pxc-operator