Log: /mnt/jenkins/workspace/cloud-pxc-operator_PR-1740/e2e-tests/logs/users-5-7.log WARNING: version difference between client (1.30) and server (1.26) exceeds the supported minor version skew of +/-1 WARNING: version difference between client (1.30) and server (1.26) exceeds the supported minor version skew of +/-1 + create_infra users-18704 + local ns=users-18704 + '[' -n pxc-operator ']' + kubectl get pxc --all-namespaces -o wide + grep -v NAMESPACE + xargs -L 1 sh -xc 'kubectl patch pxc -n $0 $1 --type=merge -p "{\"metadata\":{\"finalizers\":[]}}"' + kubectl patch pxc -n users-14736 some-name --type=merge -p '{"metadata":{"finalizers":[]}}' perconaxtradbcluster.pxc.percona.com/some-name patched + kubectl_bin delete pxc --all --all-namespaces ++ mktemp + local LAST_OUT=/tmp/tmp.FIQ5CeYm7m ++ mktemp + local LAST_ERR=/tmp/tmp.4j0tnwNZbH + local exit_status=0 ++ seq 0 2 + for i in '$(seq 0 2)' + set +e + kubectl delete pxc --all --all-namespaces + exit_status=0 + set -e + '[' 0 '!=' 0 ']' + break + cat /tmp/tmp.FIQ5CeYm7m perconaxtradbcluster.pxc.percona.com "some-name" deleted + cat /tmp/tmp.4j0tnwNZbH + rm /tmp/tmp.FIQ5CeYm7m /tmp/tmp.4j0tnwNZbH + return 0 + kubectl_bin delete pxc-backup --all --all-namespaces ++ mktemp + local LAST_OUT=/tmp/tmp.wKdUY9puOQ ++ mktemp + local LAST_ERR=/tmp/tmp.0HB0zzJtjP + local exit_status=0 ++ seq 0 2 + for i in '$(seq 0 2)' + set +e + kubectl delete pxc-backup --all --all-namespaces + exit_status=0 + set -e + '[' 0 '!=' 0 ']' + break + cat /tmp/tmp.wKdUY9puOQ No resources found + cat /tmp/tmp.0HB0zzJtjP + rm /tmp/tmp.wKdUY9puOQ /tmp/tmp.0HB0zzJtjP + return 0 + kubectl_bin delete pxc-restore --all --all-namespaces ++ mktemp + local LAST_OUT=/tmp/tmp.DIWhab51Ki ++ mktemp + local LAST_ERR=/tmp/tmp.c6WBhOEacQ + local exit_status=0 ++ seq 0 2 + for i in '$(seq 0 2)' + set +e + kubectl delete pxc-restore --all --all-namespaces + exit_status=0 + set -e + '[' 0 '!=' 0 ']' + break + cat /tmp/tmp.DIWhab51Ki No resources found + cat /tmp/tmp.c6WBhOEacQ + rm /tmp/tmp.DIWhab51Ki /tmp/tmp.c6WBhOEacQ + return 0 + create_namespace pxc-operator + local namespace=pxc-operator + local skip_clean_namespace= + [[ 1 == 1 ]] + [[ -z '' ]] + destroy_chaos_mesh ++ helm list --all-namespaces --filter chaos-mesh ++ tail -n1 ++ awk '-F ' '{print $2}' ++ sed s/NAMESPACE// + local chaos_mesh_ns= + '[' -n '' ']' ++ kubectl get MutatingWebhookConfiguration ++ grep chaos-mesh ++ awk '{print $1}' + timeout 30 kubectl delete MutatingWebhookConfiguration error: resource(s) were provided, but no name was specified + : ++ kubectl get ValidatingWebhookConfiguration ++ grep chaos-mesh ++ awk '{print $1}' + timeout 30 kubectl delete ValidatingWebhookConfiguration error: resource(s) were provided, but no name was specified + : ++ kubectl get ValidatingWebhookConfiguration ++ grep validate-auth ++ awk '{print $1}' + timeout 30 kubectl delete ValidatingWebhookConfiguration error: resource(s) were provided, but no name was specified + : ++ kubectl api-resources ++ awk '{print $1}' ++ grep chaos-mesh ++ kubectl get crd ++ grep chaos-mesh.org ++ awk '{print $1}' + timeout 30 kubectl delete crd error: resource(s) were provided, but no name was specified + : ++ kubectl get clusterrolebinding ++ grep chaos-mesh ++ awk '{print $1}' + timeout 30 kubectl delete clusterrolebinding error: resource(s) were provided, but no name was specified + : ++ kubectl get clusterrole ++ grep chaos-mesh ++ awk '{print $1}' + timeout 30 kubectl delete clusterrole error: resource(s) were provided, but no name was specified + : + desc 'cleaned up all old namespaces' + set +o xtrace ----------------------------------------------------------------------------------- cleaned up all old namespaces ----------------------------------------------------------------------------------- + egrep -v '^kube-|^default$|Terminating|pxc-operator|openshift|^NAME' + '[' -n '' ']' + desc 'cleaned up old namespaces pxc-operator' + set +o xtrace ----------------------------------------------------------------------------------- cleaned up old namespaces pxc-operator ----------------------------------------------------------------------------------- + kubectl_bin delete namespace pxc-operator + kubectl_bin get ns ++ mktemp + local LAST_OUT=/tmp/tmp.NJ8xzetLPr ++ mktemp ++ mktemp + local LAST_ERR=/tmp/tmp.Krr2EuVWot + local exit_status=0 ++ seq 0 2 + local LAST_OUT=/tmp/tmp.PZlH9iNtFg + for i in '$(seq 0 2)' + set +e + kubectl get ns ++ mktemp + local LAST_ERR=/tmp/tmp.eUvOObfebp + local exit_status=0 ++ seq 0 2 + for i in '$(seq 0 2)' + set +e + kubectl delete namespace pxc-operator + xargs kubectl delete ns + awk '{print$1}' + exit_status=0 + set -e + '[' 0 '!=' 0 ']' + break + cat /tmp/tmp.NJ8xzetLPr + cat /tmp/tmp.Krr2EuVWot + rm /tmp/tmp.NJ8xzetLPr /tmp/tmp.Krr2EuVWot + return 0 namespace "users-14736" deleted Error from server (Forbidden): namespaces "default" is forbidden: this namespace may not be deleted + exit_status=0 + set -e + '[' 0 '!=' 0 ']' + break + cat /tmp/tmp.PZlH9iNtFg namespace "pxc-operator" deleted + cat /tmp/tmp.eUvOObfebp + rm /tmp/tmp.PZlH9iNtFg /tmp/tmp.eUvOObfebp + return 0 + wait_for_delete namespace/pxc-operator + local res=namespace/pxc-operator + echo -n 'namespace/pxc-operator - ' namespace/pxc-operator - + set +o xtrace Error from server (NotFound): namespaces "pxc-operator" not found + desc 'create namespace pxc-operator' + set +o xtrace ----------------------------------------------------------------------------------- create namespace pxc-operator ----------------------------------------------------------------------------------- + kubectl_bin create namespace pxc-operator ++ mktemp + local LAST_OUT=/tmp/tmp.355I8WKc5I ++ mktemp + local LAST_ERR=/tmp/tmp.zeIyN6xIIt + local exit_status=0 ++ seq 0 2 + for i in '$(seq 0 2)' + set +e + kubectl create namespace pxc-operator + exit_status=0 + set -e + '[' 0 '!=' 0 ']' + break + cat /tmp/tmp.355I8WKc5I namespace/pxc-operator created + cat /tmp/tmp.zeIyN6xIIt + rm /tmp/tmp.355I8WKc5I /tmp/tmp.zeIyN6xIIt + return 0 ++ kubectl_bin config current-context +++ mktemp ++ local LAST_OUT=/tmp/tmp.Y9LXLEzbZp +++ mktemp ++ local LAST_ERR=/tmp/tmp.6hRUxetZmt ++ local exit_status=0 +++ seq 0 2 ++ for i in '$(seq 0 2)' ++ set +e ++ kubectl config current-context ++ exit_status=0 ++ set -e ++ '[' 0 '!=' 0 ']' ++ break ++ cat /tmp/tmp.Y9LXLEzbZp ++ cat /tmp/tmp.6hRUxetZmt ++ rm /tmp/tmp.Y9LXLEzbZp /tmp/tmp.6hRUxetZmt ++ return 0 + kubectl_bin config set-context gke_cloud-dev-112233_us-central1-a_jen-pxc-1740-0a840b68-3-cluster7 --namespace=pxc-operator ++ mktemp + local LAST_OUT=/tmp/tmp.DqH5iwLCB2 ++ mktemp + local LAST_ERR=/tmp/tmp.gesz06DypD + local exit_status=0 ++ seq 0 2 + for i in '$(seq 0 2)' + set +e + kubectl config set-context gke_cloud-dev-112233_us-central1-a_jen-pxc-1740-0a840b68-3-cluster7 --namespace=pxc-operator + exit_status=0 + set -e + '[' 0 '!=' 0 ']' + break + cat /tmp/tmp.DqH5iwLCB2 Context "gke_cloud-dev-112233_us-central1-a_jen-pxc-1740-0a840b68-3-cluster7" modified. + cat /tmp/tmp.gesz06DypD + rm /tmp/tmp.DqH5iwLCB2 /tmp/tmp.gesz06DypD + return 0 + deploy_operator + desc 'start PXC operator' + set +o xtrace ----------------------------------------------------------------------------------- start PXC operator ----------------------------------------------------------------------------------- + kubectl_bin apply --server-side --force-conflicts -f /mnt/jenkins/workspace/cloud-pxc-operator_PR-1740/deploy/crd.yaml ++ mktemp + local LAST_OUT=/tmp/tmp.QDj6Y3KnEg ++ mktemp + local LAST_ERR=/tmp/tmp.iF2YJPtPue + local exit_status=0 ++ seq 0 2 + for i in '$(seq 0 2)' + set +e + kubectl apply --server-side --force-conflicts -f /mnt/jenkins/workspace/cloud-pxc-operator_PR-1740/deploy/crd.yaml + exit_status=0 + set -e + '[' 0 '!=' 0 ']' + break + cat /tmp/tmp.QDj6Y3KnEg customresourcedefinition.apiextensions.k8s.io/perconaxtradbclusterbackups.pxc.percona.com serverside-applied customresourcedefinition.apiextensions.k8s.io/perconaxtradbclusterrestores.pxc.percona.com serverside-applied customresourcedefinition.apiextensions.k8s.io/perconaxtradbclusters.pxc.percona.com serverside-applied + cat /tmp/tmp.iF2YJPtPue + rm /tmp/tmp.QDj6Y3KnEg /tmp/tmp.iF2YJPtPue + return 0 + '[' -n pxc-operator ']' + apply_rbac cw-rbac + local operator_namespace=pxc-operator + local rbac=cw-rbac + cat /mnt/jenkins/workspace/cloud-pxc-operator_PR-1740/deploy/cw-rbac.yaml + sed -e 's^namespace: .*^namespace: pxc-operator^' + kubectl_bin apply -f - ++ mktemp + local LAST_OUT=/tmp/tmp.cGBINxP87O ++ mktemp + local LAST_ERR=/tmp/tmp.P9PrYx2alE + local exit_status=0 ++ seq 0 2 + for i in '$(seq 0 2)' + set +e + kubectl apply -f - + exit_status=0 + set -e + '[' 0 '!=' 0 ']' + break + cat /tmp/tmp.cGBINxP87O clusterrole.rbac.authorization.k8s.io/percona-xtradb-cluster-operator unchanged serviceaccount/percona-xtradb-cluster-operator created clusterrolebinding.rbac.authorization.k8s.io/service-account-percona-xtradb-cluster-operator unchanged + cat /tmp/tmp.P9PrYx2alE + rm /tmp/tmp.cGBINxP87O /tmp/tmp.P9PrYx2alE + return 0 + cat /mnt/jenkins/workspace/cloud-pxc-operator_PR-1740/deploy/cw-operator.yaml + sed -e 's^image: .*^image: perconalab/percona-xtradb-cluster-operator:PR-1740-0a840b68^' + sed -e 's^failureThreshold: .*^failureThreshold: 10^' + yq eval '(select(.kind == "Deployment").spec.template.spec.containers[] | select(.name == "percona-xtradb-cluster-operator").env[] | select(.name == "LOG_LEVEL").value) = "DEBUG"' - + yq eval '(select(.kind == "Deployment").spec.template.spec.containers[] | select(.name == "percona-xtradb-cluster-operator").env[] | select(.name == "DISABLE_TELEMETRY").value) = "true"' - + kubectl_bin apply -f - ++ mktemp + local LAST_OUT=/tmp/tmp.amgBg59X4O ++ mktemp + local LAST_ERR=/tmp/tmp.xWAYtMZenI + local exit_status=0 ++ seq 0 2 + for i in '$(seq 0 2)' + set +e + kubectl apply -f - + exit_status=0 + set -e + '[' 0 '!=' 0 ']' + break + cat /tmp/tmp.amgBg59X4O deployment.apps/percona-xtradb-cluster-operator created service/percona-xtradb-cluster-operator created + cat /tmp/tmp.xWAYtMZenI + rm /tmp/tmp.amgBg59X4O /tmp/tmp.xWAYtMZenI + return 0 + sleep 10 + kubectl_bin wait --for=condition=Ready pods -l app.kubernetes.io/component=operator,app.kubernetes.io/instance=percona-xtradb-cluster-operator,app.kubernetes.io/name=percona-xtradb-cluster-operator --timeout=30s ++ mktemp + local LAST_OUT=/tmp/tmp.XYiX3EEBUF ++ mktemp + local LAST_ERR=/tmp/tmp.faX5VEqTYt + local exit_status=0 ++ seq 0 2 + for i in '$(seq 0 2)' + set +e + kubectl wait --for=condition=Ready pods -l app.kubernetes.io/component=operator,app.kubernetes.io/instance=percona-xtradb-cluster-operator,app.kubernetes.io/name=percona-xtradb-cluster-operator --timeout=30s + exit_status=0 + set -e + '[' 0 '!=' 0 ']' + break + cat /tmp/tmp.XYiX3EEBUF pod/percona-xtradb-cluster-operator-6849457d9-kkwd7 condition met + cat /tmp/tmp.faX5VEqTYt + rm /tmp/tmp.XYiX3EEBUF /tmp/tmp.faX5VEqTYt + return 0 ++ get_operator_pod ++ local label_prefix=app.kubernetes.io/ +++ kubectl get pods --selector=app.kubernetes.io/name=percona-xtradb-cluster-operator -n pxc-operator +++ grep -c percona-xtradb-cluster-operator ++ local check_label=1 ++ [[ 1 -eq 0 ]] ++ kubectl_bin get pods --selector=app.kubernetes.io/name=percona-xtradb-cluster-operator -o 'jsonpath={.items[].metadata.name}' -n pxc-operator +++ mktemp ++ local LAST_OUT=/tmp/tmp.qkxLcY5iWa +++ mktemp ++ local LAST_ERR=/tmp/tmp.c0ugSZcfBM ++ local exit_status=0 +++ seq 0 2 ++ for i in '$(seq 0 2)' ++ set +e ++ kubectl get pods --selector=app.kubernetes.io/name=percona-xtradb-cluster-operator -o 'jsonpath={.items[].metadata.name}' -n pxc-operator ++ exit_status=0 ++ set -e ++ '[' 0 '!=' 0 ']' ++ break ++ cat /tmp/tmp.qkxLcY5iWa ++ cat /tmp/tmp.c0ugSZcfBM ++ rm /tmp/tmp.qkxLcY5iWa /tmp/tmp.c0ugSZcfBM ++ return 0 + wait_pod percona-xtradb-cluster-operator-6849457d9-kkwd7 480 pxc-operator + local pod=percona-xtradb-cluster-operator-6849457d9-kkwd7 + local max_retry=480 + local ns=pxc-operator ++ echo percona-xtradb-cluster-operator-6849457d9-kkwd7 ++ /usr/bin/sed -E 's/.*-(pxc|proxysql)-[0-9]/\1/' ++ egrep '^(pxc|proxysql)$' + local container= + set +o xtrace pod/percona-xtradb-cluster-operator-6849457d9-kkwd7 condition met percona-xtradb-cluster-operator-6849457d9-kkwd7.Ok + sleep 3 + create_namespace users-18704 + local namespace=users-18704 + local skip_clean_namespace= + [[ 1 == 1 ]] + [[ -z '' ]] + destroy_chaos_mesh ++ helm list --all-namespaces --filter chaos-mesh ++ tail -n1 ++ awk '-F ' '{print $2}' ++ sed s/NAMESPACE// + local chaos_mesh_ns= + '[' -n '' ']' ++ kubectl get MutatingWebhookConfiguration ++ grep chaos-mesh ++ awk '{print $1}' + timeout 30 kubectl delete MutatingWebhookConfiguration error: resource(s) were provided, but no name was specified + : ++ kubectl get ValidatingWebhookConfiguration ++ grep chaos-mesh ++ awk '{print $1}' + timeout 30 kubectl delete ValidatingWebhookConfiguration error: resource(s) were provided, but no name was specified + : ++ kubectl get ValidatingWebhookConfiguration ++ grep validate-auth ++ awk '{print $1}' + timeout 30 kubectl delete ValidatingWebhookConfiguration error: resource(s) were provided, but no name was specified + : ++ kubectl api-resources ++ grep chaos-mesh ++ awk '{print $1}' ++ kubectl get crd ++ grep chaos-mesh.org ++ awk '{print $1}' + timeout 30 kubectl delete crd error: resource(s) were provided, but no name was specified + : ++ kubectl get clusterrolebinding ++ grep chaos-mesh ++ awk '{print $1}' + timeout 30 kubectl delete clusterrolebinding error: resource(s) were provided, but no name was specified + : ++ kubectl get clusterrole ++ grep chaos-mesh ++ awk '{print $1}' + timeout 30 kubectl delete clusterrole error: resource(s) were provided, but no name was specified + : + desc 'cleaned up all old namespaces' + set +o xtrace ----------------------------------------------------------------------------------- cleaned up all old namespaces ----------------------------------------------------------------------------------- + '[' -n '' ']' + desc 'cleaned up old namespaces users-18704' + set +o xtrace ----------------------------------------------------------------------------------- cleaned up old namespaces users-18704 ----------------------------------------------------------------------------------- + kubectl_bin delete namespace users-18704 + xargs kubectl delete ns + awk '{print$1}' + kubectl_bin get ns ++ mktemp + local LAST_OUT=/tmp/tmp.RSMiiZJLK6 ++ mktemp + local LAST_ERR=/tmp/tmp.6dCHMmQsvD + local exit_status=0 ++ seq 0 2 + for i in '$(seq 0 2)' + set +e + kubectl get ns ++ mktemp + egrep -v '^kube-|^default$|Terminating|pxc-operator|openshift|^NAME' + local LAST_OUT=/tmp/tmp.PsDCw7J0Dh ++ mktemp + local LAST_ERR=/tmp/tmp.pKGVhu1AFL + local exit_status=0 ++ seq 0 2 + for i in '$(seq 0 2)' + set +e + kubectl delete namespace users-18704 + exit_status=1 + set -e + '[' 1 '!=' 0 ']' + '[' 1 == 1 ']' + sleep 0 + for i in '$(seq 0 2)' + set +e + kubectl delete namespace users-18704 + exit_status=0 + set -e + '[' 0 '!=' 0 ']' + break + cat /tmp/tmp.RSMiiZJLK6 + cat /tmp/tmp.6dCHMmQsvD + rm /tmp/tmp.RSMiiZJLK6 /tmp/tmp.6dCHMmQsvD + return 0 Error from server (Forbidden): namespaces "default" is forbidden: this namespace may not be deleted + exit_status=1 + set -e + '[' 1 '!=' 0 ']' + '[' 1 == 1 ']' + sleep 0 + for i in '$(seq 0 2)' + set +e + kubectl delete namespace users-18704 + exit_status=1 + set -e + '[' 1 '!=' 0 ']' + '[' 1 == 1 ']' + sleep 0 + cat /tmp/tmp.PsDCw7J0Dh + cat /tmp/tmp.pKGVhu1AFL Error from server (NotFound): namespaces "users-18704" not found + rm /tmp/tmp.PsDCw7J0Dh /tmp/tmp.pKGVhu1AFL + return 1 + : + wait_for_delete namespace/users-18704 + local res=namespace/users-18704 + echo -n 'namespace/users-18704 - ' namespace/users-18704 - + set +o xtrace Error from server (NotFound): namespaces "users-18704" not found + desc 'create namespace users-18704' + set +o xtrace ----------------------------------------------------------------------------------- create namespace users-18704 ----------------------------------------------------------------------------------- + kubectl_bin create namespace users-18704 ++ mktemp + local LAST_OUT=/tmp/tmp.QRPLe6MTB9 ++ mktemp + local LAST_ERR=/tmp/tmp.eDsT2dH0dy + local exit_status=0 ++ seq 0 2 + for i in '$(seq 0 2)' + set +e + kubectl create namespace users-18704 + exit_status=0 + set -e + '[' 0 '!=' 0 ']' + break + cat /tmp/tmp.QRPLe6MTB9 namespace/users-18704 created + cat /tmp/tmp.eDsT2dH0dy + rm /tmp/tmp.QRPLe6MTB9 /tmp/tmp.eDsT2dH0dy + return 0 ++ kubectl_bin config current-context +++ mktemp ++ local LAST_OUT=/tmp/tmp.VX6erhPEej +++ mktemp ++ local LAST_ERR=/tmp/tmp.qoRt7pyWih ++ local exit_status=0 +++ seq 0 2 ++ for i in '$(seq 0 2)' ++ set +e ++ kubectl config current-context ++ exit_status=0 ++ set -e ++ '[' 0 '!=' 0 ']' ++ break ++ cat /tmp/tmp.VX6erhPEej ++ cat /tmp/tmp.qoRt7pyWih ++ rm /tmp/tmp.VX6erhPEej /tmp/tmp.qoRt7pyWih ++ return 0 + kubectl_bin config set-context gke_cloud-dev-112233_us-central1-a_jen-pxc-1740-0a840b68-3-cluster7 --namespace=users-18704 ++ mktemp + local LAST_OUT=/tmp/tmp.WYmJNUehlF ++ mktemp + local LAST_ERR=/tmp/tmp.EalITrXVOX + local exit_status=0 ++ seq 0 2 + for i in '$(seq 0 2)' + set +e + kubectl config set-context gke_cloud-dev-112233_us-central1-a_jen-pxc-1740-0a840b68-3-cluster7 --namespace=users-18704 + exit_status=0 + set -e + '[' 0 '!=' 0 ']' + break + cat /tmp/tmp.WYmJNUehlF Context "gke_cloud-dev-112233_us-central1-a_jen-pxc-1740-0a840b68-3-cluster7" modified. + cat /tmp/tmp.EalITrXVOX + rm /tmp/tmp.WYmJNUehlF /tmp/tmp.EalITrXVOX + return 0 + apply_secrets + desc 'create secrets for cloud storages' + set +o xtrace ----------------------------------------------------------------------------------- create secrets for cloud storages ----------------------------------------------------------------------------------- + '[' -z '' ']' + kubectl_bin apply -f /mnt/jenkins/workspace/cloud-pxc-operator_PR-1740/e2e-tests/conf/minio-secret.yml -f /mnt/jenkins/workspace/cloud-pxc-operator_PR-1740/e2e-tests/conf/cloud-secret.yml ++ mktemp + local LAST_OUT=/tmp/tmp.Y3B28cLlVX ++ mktemp + local LAST_ERR=/tmp/tmp.GkbDAirffb + local exit_status=0 ++ seq 0 2 + for i in '$(seq 0 2)' + set +e + kubectl apply -f /mnt/jenkins/workspace/cloud-pxc-operator_PR-1740/e2e-tests/conf/minio-secret.yml -f /mnt/jenkins/workspace/cloud-pxc-operator_PR-1740/e2e-tests/conf/cloud-secret.yml + exit_status=0 + set -e + '[' 0 '!=' 0 ']' + break + cat /tmp/tmp.Y3B28cLlVX secret/minio-secret created secret/aws-s3-secret created secret/gcp-cs-secret created secret/azure-secret created + cat /tmp/tmp.GkbDAirffb + rm /tmp/tmp.Y3B28cLlVX /tmp/tmp.GkbDAirffb + return 0 + desc 'create PXC cluster' + set +o xtrace ----------------------------------------------------------------------------------- create PXC cluster ----------------------------------------------------------------------------------- + newpass=test-password ++ echo -n test-password ++ base64 + newpassencrypted=dGVzdC1wYXNzd29yZA== + cluster=some-name + spinup_pxc some-name /mnt/jenkins/workspace/cloud-pxc-operator_PR-1740/e2e-tests/conf/some-name.yml + local cluster=some-name + local config=/mnt/jenkins/workspace/cloud-pxc-operator_PR-1740/e2e-tests/conf/some-name.yml + local size=3 + local sleep=10 + local secretsFile=/mnt/jenkins/workspace/cloud-pxc-operator_PR-1740/e2e-tests/conf/secrets.yml + local pxcClientFile=/mnt/jenkins/workspace/cloud-pxc-operator_PR-1740/e2e-tests/conf/client.yml + local port=3306 + desc 'create first PXC cluster' + set +o xtrace ----------------------------------------------------------------------------------- create first PXC cluster ----------------------------------------------------------------------------------- + kubectl_bin apply -f /mnt/jenkins/workspace/cloud-pxc-operator_PR-1740/e2e-tests/conf/secrets.yml ++ mktemp + local LAST_OUT=/tmp/tmp.eFhqUlFuc3 ++ mktemp + local LAST_ERR=/tmp/tmp.7IIVNDZmDq + local exit_status=0 ++ seq 0 2 + for i in '$(seq 0 2)' + set +e + kubectl apply -f /mnt/jenkins/workspace/cloud-pxc-operator_PR-1740/e2e-tests/conf/secrets.yml + exit_status=0 + set -e + '[' 0 '!=' 0 ']' + break + cat /tmp/tmp.eFhqUlFuc3 secret/my-cluster-secrets created secret/some-name-ssl created secret/some-name-ssl-internal created + cat /tmp/tmp.7IIVNDZmDq + rm /tmp/tmp.eFhqUlFuc3 /tmp/tmp.7IIVNDZmDq + return 0 + apply_config /mnt/jenkins/workspace/cloud-pxc-operator_PR-1740/e2e-tests/conf/client.yml + '[' -z '' ']' + cat_config /mnt/jenkins/workspace/cloud-pxc-operator_PR-1740/e2e-tests/conf/client.yml + kubectl_bin apply -f - + /usr/bin/sed -e 's#apiVersion: pxc.percona.com/v.*$#apiVersion: pxc.percona.com/v1#' + /usr/bin/sed -e 's#image:.*-backup$#image: perconalab/percona-xtradb-cluster-operator:main-pxc5.7-backup#' + /usr/bin/sed -e 's#image:.*-proxysql$#image: perconalab/percona-xtradb-cluster-operator:main-proxysql#' + /usr/bin/sed -e 's#image:.*-pmm$#image: perconalab/pmm-client:dev-latest#' + /usr/bin/sed -e 's#image:.*-pxc\([0-9]*.[0-9]*\)\{0,1\}$#image: perconalab/percona-xtradb-cluster-operator:main-pxc5.7#' + /usr/bin/sed -e 's#image:.*-init$#image: perconalab/percona-xtradb-cluster-operator:PR-1740-0a840b68#' + /usr/bin/sed -e 's#image:.*\/percona-xtradb-cluster:.*$#image: perconalab/percona-xtradb-cluster-operator:main-pxc5.7#' + /usr/bin/sed -e s~minio-service.#namespace~minio-service.users-18704~ + /usr/bin/sed -e 's#apply:.*#apply: Never#' + cat /mnt/jenkins/workspace/cloud-pxc-operator_PR-1740/e2e-tests/conf/client.yml + /usr/bin/sed -e 's#image:.*-haproxy$#image: perconalab/percona-xtradb-cluster-operator:main-haproxy#' + /usr/bin/sed -e 's#image:.*-logcollector$#image: perconalab/percona-xtradb-cluster-operator:main-logcollector#' ++ mktemp + local LAST_OUT=/tmp/tmp.21bCE8mSrX ++ mktemp + local LAST_ERR=/tmp/tmp.1PJwlW1EEq + local exit_status=0 ++ seq 0 2 + for i in '$(seq 0 2)' + set +e + kubectl apply -f - + exit_status=0 + set -e + '[' 0 '!=' 0 ']' + break + cat /tmp/tmp.21bCE8mSrX deployment.apps/pxc-client created + cat /tmp/tmp.1PJwlW1EEq + rm /tmp/tmp.21bCE8mSrX /tmp/tmp.1PJwlW1EEq + return 0 + [[ perconalab/percona-xtradb-cluster-operator:main-pxc5.7 =~ 5\.7 ]] + [[ some-name == \d\e\m\a\n\d\-\b\a\c\k\u\p ]] + [[ some-name == \d\e\m\a\n\d\-\b\a\c\k\u\p\-\c\l\o\u\d ]] + apply_config /mnt/jenkins/workspace/cloud-pxc-operator_PR-1740/e2e-tests/conf/some-name.yml + '[' -z '' ']' + cat_config /mnt/jenkins/workspace/cloud-pxc-operator_PR-1740/e2e-tests/conf/some-name.yml + kubectl_bin apply -f - + /usr/bin/sed -e 's#image:.*-logcollector$#image: perconalab/percona-xtradb-cluster-operator:main-logcollector#' + /usr/bin/sed -e 's#image:.*-haproxy$#image: perconalab/percona-xtradb-cluster-operator:main-haproxy#' + /usr/bin/sed -e 's#image:.*-backup$#image: perconalab/percona-xtradb-cluster-operator:main-pxc5.7-backup#' + /usr/bin/sed -e 's#image:.*-init$#image: perconalab/percona-xtradb-cluster-operator:PR-1740-0a840b68#' + cat /mnt/jenkins/workspace/cloud-pxc-operator_PR-1740/e2e-tests/conf/some-name.yml + /usr/bin/sed -e 's#image:.*-proxysql$#image: perconalab/percona-xtradb-cluster-operator:main-proxysql#' + /usr/bin/sed -e 's#image:.*-pmm$#image: perconalab/pmm-client:dev-latest#' + /usr/bin/sed -e 's#apiVersion: pxc.percona.com/v.*$#apiVersion: pxc.percona.com/v1#' + /usr/bin/sed -e 's#image:.*-pxc\([0-9]*.[0-9]*\)\{0,1\}$#image: perconalab/percona-xtradb-cluster-operator:main-pxc5.7#' + /usr/bin/sed -e s~minio-service.#namespace~minio-service.users-18704~ + /usr/bin/sed -e 's#apply:.*#apply: Never#' ++ mktemp + /usr/bin/sed -e 's#image:.*\/percona-xtradb-cluster:.*$#image: perconalab/percona-xtradb-cluster-operator:main-pxc5.7#' + local LAST_OUT=/tmp/tmp.aWcqhfrakn ++ mktemp + local LAST_ERR=/tmp/tmp.cGdH6y093c + local exit_status=0 ++ seq 0 2 + for i in '$(seq 0 2)' + set +e + kubectl apply -f - + exit_status=0 + set -e + '[' 0 '!=' 0 ']' + break + cat /tmp/tmp.aWcqhfrakn perconaxtradbcluster.pxc.percona.com/some-name created + cat /tmp/tmp.cGdH6y093c + rm /tmp/tmp.aWcqhfrakn /tmp/tmp.cGdH6y093c + return 0 + desc 'check if all 3 Pods started' + set +o xtrace ----------------------------------------------------------------------------------- check if all 3 Pods started ----------------------------------------------------------------------------------- ++ get_proxy some-name ++ local target_cluster=some-name +++ kubectl_bin get pxc some-name -o 'jsonpath={.spec.haproxy.enabled}' ++++ mktemp +++ local LAST_OUT=/tmp/tmp.fhId4T4Tww ++++ mktemp +++ local LAST_ERR=/tmp/tmp.zzsLZqYTJJ +++ local exit_status=0 ++++ seq 0 2 +++ for i in '$(seq 0 2)' +++ set +e +++ kubectl get pxc some-name -o 'jsonpath={.spec.haproxy.enabled}' +++ exit_status=0 +++ set -e +++ '[' 0 '!=' 0 ']' +++ break +++ cat /tmp/tmp.fhId4T4Tww +++ cat /tmp/tmp.zzsLZqYTJJ +++ rm /tmp/tmp.fhId4T4Tww /tmp/tmp.zzsLZqYTJJ +++ return 0 ++ [[ '' == \t\r\u\e ]] +++ kubectl_bin get pxc some-name -o 'jsonpath={.spec.proxysql.enabled}' ++++ mktemp +++ local LAST_OUT=/tmp/tmp.kLYS5lTDPr ++++ mktemp +++ local LAST_ERR=/tmp/tmp.jEm4QJaL05 +++ local exit_status=0 ++++ seq 0 2 +++ for i in '$(seq 0 2)' +++ set +e +++ kubectl get pxc some-name -o 'jsonpath={.spec.proxysql.enabled}' +++ exit_status=0 +++ set -e +++ '[' 0 '!=' 0 ']' +++ break +++ cat /tmp/tmp.kLYS5lTDPr +++ cat /tmp/tmp.jEm4QJaL05 +++ rm /tmp/tmp.kLYS5lTDPr /tmp/tmp.jEm4QJaL05 +++ return 0 ++ [[ true == \t\r\u\e ]] ++ echo some-name-proxysql ++ return + local proxy=some-name-proxysql + kubectl_bin wait --for=condition=Ready pod -l app.kubernetes.io/instance=monitoring,app.kubernetes.io/managed-by=percona-xtradb-cluster-operator --timeout=300s -n users-18704 ++ mktemp + local LAST_OUT=/tmp/tmp.Qre4XPqiyt ++ mktemp + local LAST_ERR=/tmp/tmp.eiawjBf6GW + local exit_status=0 ++ seq 0 2 + for i in '$(seq 0 2)' + set +e + kubectl wait --for=condition=Ready pod -l app.kubernetes.io/instance=monitoring,app.kubernetes.io/managed-by=percona-xtradb-cluster-operator --timeout=300s -n users-18704 + exit_status=1 + set -e + '[' 1 '!=' 0 ']' + '[' 1 == 1 ']' + sleep 0 + for i in '$(seq 0 2)' + set +e + kubectl wait --for=condition=Ready pod -l app.kubernetes.io/instance=monitoring,app.kubernetes.io/managed-by=percona-xtradb-cluster-operator --timeout=300s -n users-18704 + exit_status=1 + set -e + '[' 1 '!=' 0 ']' + '[' 1 == 1 ']' + sleep 0 + for i in '$(seq 0 2)' + set +e + kubectl wait --for=condition=Ready pod -l app.kubernetes.io/instance=monitoring,app.kubernetes.io/managed-by=percona-xtradb-cluster-operator --timeout=300s -n users-18704 + exit_status=1 + set -e + '[' 1 '!=' 0 ']' + '[' 1 == 1 ']' + sleep 0 + cat /tmp/tmp.Qre4XPqiyt + cat /tmp/tmp.eiawjBf6GW error: no matching resources found + rm /tmp/tmp.Qre4XPqiyt /tmp/tmp.eiawjBf6GW + return 1 + true + wait_for_running some-name-proxysql 1 + local name=some-name-proxysql + let last_pod=0 + : + local max_retry=480 + desc 'wait for running cluster' + set +o xtrace ----------------------------------------------------------------------------------- wait for running cluster ----------------------------------------------------------------------------------- ++ seq 0 0 + for i in '$(seq 0 $last_pod)' + wait_pod some-name-proxysql-0 480 + local pod=some-name-proxysql-0 + local max_retry=480 + local ns= ++ echo some-name-proxysql-0 ++ /usr/bin/sed -E 's/.*-(pxc|proxysql)-[0-9]/\1/' ++ egrep '^(pxc|proxysql)$' + local container=proxysql + set +o xtrace pod/some-name-proxysql-0 condition met some-name-proxysql-0.Ok + wait_for_running some-name-pxc 3 + local name=some-name-pxc + let last_pod=2 + local max_retry=480 + desc 'wait for running cluster' + set +o xtrace ----------------------------------------------------------------------------------- wait for running cluster ----------------------------------------------------------------------------------- ++ seq 0 2 + for i in '$(seq 0 $last_pod)' + wait_pod some-name-pxc-0 480 + local pod=some-name-pxc-0 + local max_retry=480 + local ns= ++ echo some-name-pxc-0 ++ /usr/bin/sed -E 's/.*-(pxc|proxysql)-[0-9]/\1/' ++ egrep '^(pxc|proxysql)$' + local container=pxc + set +o xtrace pod/some-name-pxc-0 condition met some-name-pxc-0.Ok + for i in '$(seq 0 $last_pod)' + wait_pod some-name-pxc-1 480 + local pod=some-name-pxc-1 + local max_retry=480 + local ns= ++ echo some-name-pxc-1 ++ /usr/bin/sed -E 's/.*-(pxc|proxysql)-[0-9]/\1/' ++ egrep '^(pxc|proxysql)$' + local container=pxc + set +o xtrace pod/some-name-pxc-1 condition met some-name-pxc-1.Ok + for i in '$(seq 0 $last_pod)' + wait_pod some-name-pxc-2 480 + local pod=some-name-pxc-2 + local max_retry=480 + local ns= ++ echo some-name-pxc-2 ++ /usr/bin/sed -E 's/.*-(pxc|proxysql)-[0-9]/\1/' ++ egrep '^(pxc|proxysql)$' + local container=pxc + set +o xtrace pod/some-name-pxc-2 condition met some-name-pxc-2.Ok + sleep 10 + desc 'write data' + set +o xtrace ----------------------------------------------------------------------------------- write data ----------------------------------------------------------------------------------- + [[ perconalab/percona-xtradb-cluster-operator:main-pxc5.7 =~ 5\.7 ]] ++ is_keyring_plugin_in_use some-name ++ local cluster=some-name ++ kubectl_bin exec -it some-name-pxc-0 -c pxc -- bash -c 'cat /etc/mysql/node.cnf' ++ egrep -o 'early-plugin-load=keyring_\w+.so' +++ mktemp ++ local LAST_OUT=/tmp/tmp.BdCkIKlCbh +++ mktemp ++ local LAST_ERR=/tmp/tmp.EJZmiX6kEQ ++ local exit_status=0 +++ seq 0 2 ++ for i in '$(seq 0 2)' ++ set +e ++ kubectl exec -it some-name-pxc-0 -c pxc -- bash -c 'cat /etc/mysql/node.cnf' ++ exit_status=0 ++ set -e ++ '[' 0 '!=' 0 ']' ++ break ++ cat /tmp/tmp.BdCkIKlCbh ++ cat /tmp/tmp.EJZmiX6kEQ Unable to use a TTY - input is not a terminal or the right kind of file ++ rm /tmp/tmp.BdCkIKlCbh /tmp/tmp.EJZmiX6kEQ ++ return 0 + [[ -n '' ]] + run_mysql 'CREATE DATABASE IF NOT EXISTS myApp; use myApp; CREATE TABLE IF NOT EXISTS myApp (id int PRIMARY KEY) ;' '-h some-name-proxysql -uroot -proot_password -P3306' + local 'command=CREATE DATABASE IF NOT EXISTS myApp; use myApp; CREATE TABLE IF NOT EXISTS myApp (id int PRIMARY KEY) ;' + local 'uri=-h some-name-proxysql -uroot -proot_password -P3306' ++ get_client_pod ++ kubectl_bin get pods --selector=name=pxc-client -o 'jsonpath={.items[].metadata.name}' +++ mktemp ++ local LAST_OUT=/tmp/tmp.4NkBiJoGuH +++ mktemp ++ local LAST_ERR=/tmp/tmp.8I2ZZ14vQi ++ local exit_status=0 +++ seq 0 2 ++ for i in '$(seq 0 2)' ++ set +e ++ kubectl get pods --selector=name=pxc-client -o 'jsonpath={.items[].metadata.name}' ++ exit_status=0 ++ set -e ++ '[' 0 '!=' 0 ']' ++ break ++ cat /tmp/tmp.4NkBiJoGuH ++ cat /tmp/tmp.8I2ZZ14vQi ++ rm /tmp/tmp.4NkBiJoGuH /tmp/tmp.8I2ZZ14vQi ++ return 0 + client_pod=pxc-client-64b479df95-qtwzv + wait_pod pxc-client-64b479df95-qtwzv + local pod=pxc-client-64b479df95-qtwzv + local max_retry=480 + local ns= ++ echo pxc-client-64b479df95-qtwzv ++ /usr/bin/sed -E 's/.*-(pxc|proxysql)-[0-9]/\1/' ++ egrep '^(pxc|proxysql)$' + local container= + set +o xtrace pod/pxc-client-64b479df95-qtwzv condition met pxc-client-64b479df95-qtwzv.Ok + set +o xtrace + run_mysql 'INSERT myApp.myApp (id) VALUES (100500)' '-h some-name-proxysql -uroot -proot_password -P3306' + local 'command=INSERT myApp.myApp (id) VALUES (100500)' + local 'uri=-h some-name-proxysql -uroot -proot_password -P3306' ++ get_client_pod ++ kubectl_bin get pods --selector=name=pxc-client -o 'jsonpath={.items[].metadata.name}' +++ mktemp ++ local LAST_OUT=/tmp/tmp.OhBympLzIx +++ mktemp ++ local LAST_ERR=/tmp/tmp.96MijXKakC ++ local exit_status=0 +++ seq 0 2 ++ for i in '$(seq 0 2)' ++ set +e ++ kubectl get pods --selector=name=pxc-client -o 'jsonpath={.items[].metadata.name}' ++ exit_status=0 ++ set -e ++ '[' 0 '!=' 0 ']' ++ break ++ cat /tmp/tmp.OhBympLzIx ++ cat /tmp/tmp.96MijXKakC ++ rm /tmp/tmp.OhBympLzIx /tmp/tmp.96MijXKakC ++ return 0 + client_pod=pxc-client-64b479df95-qtwzv + wait_pod pxc-client-64b479df95-qtwzv + local pod=pxc-client-64b479df95-qtwzv + local max_retry=480 + local ns= ++ echo pxc-client-64b479df95-qtwzv ++ /usr/bin/sed -E 's/.*-(pxc|proxysql)-[0-9]/\1/' ++ egrep '^(pxc|proxysql)$' + local container= + set +o xtrace pod/pxc-client-64b479df95-qtwzv condition met pxc-client-64b479df95-qtwzv.Ok + set +o xtrace + sleep 30 ++ seq 0 2 + for i in '$(seq 0 $((size - 1)))' + compare_mysql_cmd select-1 'SELECT * from myApp.myApp;' '-h some-name-pxc-0.some-name-pxc -uroot -proot_password -P3306' + local command_id=select-1 + local 'command=SELECT * from myApp.myApp;' + local 'uri=-h some-name-pxc-0.some-name-pxc -uroot -proot_password -P3306' + local postfix= + local expected_result=/mnt/jenkins/workspace/cloud-pxc-operator_PR-1740/e2e-tests/users/compare/select-1.sql + [[ perconalab/percona-xtradb-cluster-operator:main-pxc5.7 =~ 8\.0 ]] + run_mysql 'SELECT * from myApp.myApp;' '-h some-name-pxc-0.some-name-pxc -uroot -proot_password -P3306' + local 'command=SELECT * from myApp.myApp;' + local 'uri=-h some-name-pxc-0.some-name-pxc -uroot -proot_password -P3306' ++ get_client_pod ++ kubectl_bin get pods --selector=name=pxc-client -o 'jsonpath={.items[].metadata.name}' +++ mktemp ++ local LAST_OUT=/tmp/tmp.i7Dr8VUorE +++ mktemp ++ local LAST_ERR=/tmp/tmp.caeWpH2WFl ++ local exit_status=0 +++ seq 0 2 ++ for i in '$(seq 0 2)' ++ set +e ++ kubectl get pods --selector=name=pxc-client -o 'jsonpath={.items[].metadata.name}' ++ exit_status=0 ++ set -e ++ '[' 0 '!=' 0 ']' ++ break ++ cat /tmp/tmp.i7Dr8VUorE ++ cat /tmp/tmp.caeWpH2WFl ++ rm /tmp/tmp.i7Dr8VUorE /tmp/tmp.caeWpH2WFl ++ return 0 + client_pod=pxc-client-64b479df95-qtwzv + wait_pod pxc-client-64b479df95-qtwzv + local pod=pxc-client-64b479df95-qtwzv + local max_retry=480 + local ns= ++ echo pxc-client-64b479df95-qtwzv ++ /usr/bin/sed -E 's/.*-(pxc|proxysql)-[0-9]/\1/' ++ egrep '^(pxc|proxysql)$' + local container= + set +o xtrace pod/pxc-client-64b479df95-qtwzv condition met pxc-client-64b479df95-qtwzv.Ok + set +o xtrace + '[' '!' -s /tmp/tmp.iIEmphhn2S/select-1.sql ']' + diff -u /mnt/jenkins/workspace/cloud-pxc-operator_PR-1740/e2e-tests/users/compare/select-1.sql /tmp/tmp.iIEmphhn2S/select-1.sql + for i in '$(seq 0 $((size - 1)))' + compare_mysql_cmd select-1 'SELECT * from myApp.myApp;' '-h some-name-pxc-1.some-name-pxc -uroot -proot_password -P3306' + local command_id=select-1 + local 'command=SELECT * from myApp.myApp;' + local 'uri=-h some-name-pxc-1.some-name-pxc -uroot -proot_password -P3306' + local postfix= + local expected_result=/mnt/jenkins/workspace/cloud-pxc-operator_PR-1740/e2e-tests/users/compare/select-1.sql + [[ perconalab/percona-xtradb-cluster-operator:main-pxc5.7 =~ 8\.0 ]] + run_mysql 'SELECT * from myApp.myApp;' '-h some-name-pxc-1.some-name-pxc -uroot -proot_password -P3306' + local 'command=SELECT * from myApp.myApp;' + local 'uri=-h some-name-pxc-1.some-name-pxc -uroot -proot_password -P3306' ++ get_client_pod ++ kubectl_bin get pods --selector=name=pxc-client -o 'jsonpath={.items[].metadata.name}' +++ mktemp ++ local LAST_OUT=/tmp/tmp.KAS7fOiaJK +++ mktemp ++ local LAST_ERR=/tmp/tmp.f5YPicFWU9 ++ local exit_status=0 +++ seq 0 2 ++ for i in '$(seq 0 2)' ++ set +e ++ kubectl get pods --selector=name=pxc-client -o 'jsonpath={.items[].metadata.name}' ++ exit_status=0 ++ set -e ++ '[' 0 '!=' 0 ']' ++ break ++ cat /tmp/tmp.KAS7fOiaJK ++ cat /tmp/tmp.f5YPicFWU9 ++ rm /tmp/tmp.KAS7fOiaJK /tmp/tmp.f5YPicFWU9 ++ return 0 + client_pod=pxc-client-64b479df95-qtwzv + wait_pod pxc-client-64b479df95-qtwzv + local pod=pxc-client-64b479df95-qtwzv + local max_retry=480 + local ns= ++ echo pxc-client-64b479df95-qtwzv ++ egrep '^(pxc|proxysql)$' ++ /usr/bin/sed -E 's/.*-(pxc|proxysql)-[0-9]/\1/' + local container= + set +o xtrace pod/pxc-client-64b479df95-qtwzv condition met pxc-client-64b479df95-qtwzv.Ok + set +o xtrace + '[' '!' -s /tmp/tmp.iIEmphhn2S/select-1.sql ']' + diff -u /mnt/jenkins/workspace/cloud-pxc-operator_PR-1740/e2e-tests/users/compare/select-1.sql /tmp/tmp.iIEmphhn2S/select-1.sql + for i in '$(seq 0 $((size - 1)))' + compare_mysql_cmd select-1 'SELECT * from myApp.myApp;' '-h some-name-pxc-2.some-name-pxc -uroot -proot_password -P3306' + local command_id=select-1 + local 'command=SELECT * from myApp.myApp;' + local 'uri=-h some-name-pxc-2.some-name-pxc -uroot -proot_password -P3306' + local postfix= + local expected_result=/mnt/jenkins/workspace/cloud-pxc-operator_PR-1740/e2e-tests/users/compare/select-1.sql + [[ perconalab/percona-xtradb-cluster-operator:main-pxc5.7 =~ 8\.0 ]] + run_mysql 'SELECT * from myApp.myApp;' '-h some-name-pxc-2.some-name-pxc -uroot -proot_password -P3306' + local 'command=SELECT * from myApp.myApp;' + local 'uri=-h some-name-pxc-2.some-name-pxc -uroot -proot_password -P3306' ++ get_client_pod ++ kubectl_bin get pods --selector=name=pxc-client -o 'jsonpath={.items[].metadata.name}' +++ mktemp ++ local LAST_OUT=/tmp/tmp.YVx9986VQA +++ mktemp ++ local LAST_ERR=/tmp/tmp.vZP11yxDFn ++ local exit_status=0 +++ seq 0 2 ++ for i in '$(seq 0 2)' ++ set +e ++ kubectl get pods --selector=name=pxc-client -o 'jsonpath={.items[].metadata.name}' ++ exit_status=0 ++ set -e ++ '[' 0 '!=' 0 ']' ++ break ++ cat /tmp/tmp.YVx9986VQA ++ cat /tmp/tmp.vZP11yxDFn ++ rm /tmp/tmp.YVx9986VQA /tmp/tmp.vZP11yxDFn ++ return 0 + client_pod=pxc-client-64b479df95-qtwzv + wait_pod pxc-client-64b479df95-qtwzv + local pod=pxc-client-64b479df95-qtwzv + local max_retry=480 + local ns= ++ echo pxc-client-64b479df95-qtwzv ++ /usr/bin/sed -E 's/.*-(pxc|proxysql)-[0-9]/\1/' ++ egrep '^(pxc|proxysql)$' + local container= + set +o xtrace pod/pxc-client-64b479df95-qtwzv condition met pxc-client-64b479df95-qtwzv.Ok + set +o xtrace + '[' '!' -s /tmp/tmp.iIEmphhn2S/select-1.sql ']' + diff -u /mnt/jenkins/workspace/cloud-pxc-operator_PR-1740/e2e-tests/users/compare/select-1.sql /tmp/tmp.iIEmphhn2S/select-1.sql ++ is_keyring_plugin_in_use some-name ++ local cluster=some-name ++ kubectl_bin exec -it some-name-pxc-0 -c pxc -- bash -c 'cat /etc/mysql/node.cnf' ++ egrep -o 'early-plugin-load=keyring_\w+.so' +++ mktemp ++ local LAST_OUT=/tmp/tmp.WvFMOI5nQW +++ mktemp ++ local LAST_ERR=/tmp/tmp.HAY9M0Eho7 ++ local exit_status=0 +++ seq 0 2 ++ for i in '$(seq 0 2)' ++ set +e ++ kubectl exec -it some-name-pxc-0 -c pxc -- bash -c 'cat /etc/mysql/node.cnf' ++ exit_status=0 ++ set -e ++ '[' 0 '!=' 0 ']' ++ break ++ cat /tmp/tmp.WvFMOI5nQW ++ cat /tmp/tmp.HAY9M0Eho7 Unable to use a TTY - input is not a terminal or the right kind of file ++ rm /tmp/tmp.WvFMOI5nQW /tmp/tmp.HAY9M0Eho7 ++ return 0 + '[' '' ']' + desc 'test root' + set +o xtrace ----------------------------------------------------------------------------------- test root ----------------------------------------------------------------------------------- + patch_secret my-cluster-secrets root dGVzdC1wYXNzd29yZA== + local secret=my-cluster-secrets + local key=root + local value=dGVzdC1wYXNzd29yZA== + kubectl_bin patch secret my-cluster-secrets '-p={"data":{"root": "dGVzdC1wYXNzd29yZA=="}}' ++ mktemp + local LAST_OUT=/tmp/tmp.AxyrWUHRld ++ mktemp + local LAST_ERR=/tmp/tmp.IHDLxiZd5E + local exit_status=0 ++ seq 0 2 + for i in '$(seq 0 2)' + set +e + kubectl patch secret my-cluster-secrets '-p={"data":{"root": "dGVzdC1wYXNzd29yZA=="}}' + exit_status=0 + set -e + '[' 0 '!=' 0 ']' + break + cat /tmp/tmp.AxyrWUHRld secret/my-cluster-secrets patched + cat /tmp/tmp.IHDLxiZd5E + rm /tmp/tmp.AxyrWUHRld /tmp/tmp.IHDLxiZd5E + return 0 + sleep 15 + compare_mysql_cmd select-4 'SHOW TABLES;' '-h some-name-proxysql -uroot -p'\''test-password'\''' + local command_id=select-4 + local 'command=SHOW TABLES;' + local 'uri=-h some-name-proxysql -uroot -p'\''test-password'\''' + local postfix= + local expected_result=/mnt/jenkins/workspace/cloud-pxc-operator_PR-1740/e2e-tests/users/compare/select-4.sql + [[ perconalab/percona-xtradb-cluster-operator:main-pxc5.7 =~ 8\.0 ]] + run_mysql 'SHOW TABLES;' '-h some-name-proxysql -uroot -p'\''test-password'\''' + local 'command=SHOW TABLES;' + local 'uri=-h some-name-proxysql -uroot -p'\''test-password'\''' ++ get_client_pod ++ kubectl_bin get pods --selector=name=pxc-client -o 'jsonpath={.items[].metadata.name}' +++ mktemp ++ local LAST_OUT=/tmp/tmp.BemCaoqKc9 +++ mktemp ++ local LAST_ERR=/tmp/tmp.qcyHQ9A68H ++ local exit_status=0 +++ seq 0 2 ++ for i in '$(seq 0 2)' ++ set +e ++ kubectl get pods --selector=name=pxc-client -o 'jsonpath={.items[].metadata.name}' ++ exit_status=0 ++ set -e ++ '[' 0 '!=' 0 ']' ++ break ++ cat /tmp/tmp.BemCaoqKc9 ++ cat /tmp/tmp.qcyHQ9A68H ++ rm /tmp/tmp.BemCaoqKc9 /tmp/tmp.qcyHQ9A68H ++ return 0 + client_pod=pxc-client-64b479df95-qtwzv + wait_pod pxc-client-64b479df95-qtwzv + local pod=pxc-client-64b479df95-qtwzv + local max_retry=480 + local ns= ++ echo pxc-client-64b479df95-qtwzv ++ /usr/bin/sed -E 's/.*-(pxc|proxysql)-[0-9]/\1/' ++ egrep '^(pxc|proxysql)$' + local container= + set +o xtrace pod/pxc-client-64b479df95-qtwzv condition met pxc-client-64b479df95-qtwzv.Ok + set +o xtrace + '[' '!' -s /tmp/tmp.iIEmphhn2S/select-4.sql ']' + diff -u /mnt/jenkins/workspace/cloud-pxc-operator_PR-1740/e2e-tests/users/compare/select-4.sql /tmp/tmp.iIEmphhn2S/select-4.sql + desc 'test proxyadmin' + set +o xtrace ----------------------------------------------------------------------------------- test proxyadmin ----------------------------------------------------------------------------------- + kubectl_bin patch pxc some-name --type=merge '-p={"spec":{"proxysql":{"size":3}}}' ++ mktemp + local LAST_OUT=/tmp/tmp.keaKkPnhfa ++ mktemp + local LAST_ERR=/tmp/tmp.rU60wiXBbM + local exit_status=0 ++ seq 0 2 + for i in '$(seq 0 2)' + set +e + kubectl patch pxc some-name --type=merge '-p={"spec":{"proxysql":{"size":3}}}' + exit_status=0 + set -e + '[' 0 '!=' 0 ']' + break + cat /tmp/tmp.keaKkPnhfa perconaxtradbcluster.pxc.percona.com/some-name patched + cat /tmp/tmp.rU60wiXBbM + rm /tmp/tmp.keaKkPnhfa /tmp/tmp.rU60wiXBbM + return 0 + sleep 15 + wait_cluster_consistency some-name 3 3 + local cluster_name=some-name + local cluster_size=3 + local proxy_size=3 + '[' -z 3 ']' + desc 'wait cluster consistency' + set +o xtrace ----------------------------------------------------------------------------------- wait cluster consistency ----------------------------------------------------------------------------------- + local i=0 + local max=36 + sleep 7 ++ kubectl_bin get pxc some-name -o 'jsonpath={.status.state}' +++ mktemp ++ local LAST_OUT=/tmp/tmp.sBBO7sMct1 +++ mktemp ++ local LAST_ERR=/tmp/tmp.JyiF8UB6iJ ++ local exit_status=0 +++ seq 0 2 ++ for i in '$(seq 0 2)' ++ set +e ++ kubectl get pxc some-name -o 'jsonpath={.status.state}' ++ exit_status=0 ++ set -e ++ '[' 0 '!=' 0 ']' ++ break ++ cat /tmp/tmp.sBBO7sMct1 ++ cat /tmp/tmp.JyiF8UB6iJ ++ rm /tmp/tmp.sBBO7sMct1 /tmp/tmp.JyiF8UB6iJ ++ return 0 + [[ ready == \r\e\a\d\y ]] ++ kubectl_bin get pxc some-name -o 'jsonpath={.status.pxc.ready}' +++ mktemp ++ local LAST_OUT=/tmp/tmp.KzqWE5znSw +++ mktemp ++ local LAST_ERR=/tmp/tmp.aBPpashPBY ++ local exit_status=0 +++ seq 0 2 ++ for i in '$(seq 0 2)' ++ set +e ++ kubectl get pxc some-name -o 'jsonpath={.status.pxc.ready}' ++ exit_status=0 ++ set -e ++ '[' 0 '!=' 0 ']' ++ break ++ cat /tmp/tmp.KzqWE5znSw ++ cat /tmp/tmp.aBPpashPBY ++ rm /tmp/tmp.KzqWE5znSw /tmp/tmp.aBPpashPBY ++ return 0 + [[ 3 == \3 ]] +++ get_proxy_engine some-name +++ local cluster_name=some-name ++++ get_proxy some-name ++++ local target_cluster=some-name +++++ kubectl_bin get pxc some-name -o 'jsonpath={.spec.haproxy.enabled}' ++++++ mktemp +++++ local LAST_OUT=/tmp/tmp.kwbRoYG4GM ++++++ mktemp +++++ local LAST_ERR=/tmp/tmp.Yxzom2fuUV +++++ local exit_status=0 ++++++ seq 0 2 +++++ for i in '$(seq 0 2)' +++++ set +e +++++ kubectl get pxc some-name -o 'jsonpath={.spec.haproxy.enabled}' +++++ exit_status=0 +++++ set -e +++++ '[' 0 '!=' 0 ']' +++++ break +++++ cat /tmp/tmp.kwbRoYG4GM +++++ cat /tmp/tmp.Yxzom2fuUV +++++ rm /tmp/tmp.kwbRoYG4GM /tmp/tmp.Yxzom2fuUV +++++ return 0 ++++ [[ '' == \t\r\u\e ]] +++++ kubectl_bin get pxc some-name -o 'jsonpath={.spec.proxysql.enabled}' ++++++ mktemp +++++ local LAST_OUT=/tmp/tmp.UonIOCNRI6 ++++++ mktemp +++++ local LAST_ERR=/tmp/tmp.1hqfIpYNmZ +++++ local exit_status=0 ++++++ seq 0 2 +++++ for i in '$(seq 0 2)' +++++ set +e +++++ kubectl get pxc some-name -o 'jsonpath={.spec.proxysql.enabled}' +++++ exit_status=0 +++++ set -e +++++ '[' 0 '!=' 0 ']' +++++ break +++++ cat /tmp/tmp.UonIOCNRI6 +++++ cat /tmp/tmp.1hqfIpYNmZ +++++ rm /tmp/tmp.UonIOCNRI6 /tmp/tmp.1hqfIpYNmZ +++++ return 0 ++++ [[ true == \t\r\u\e ]] ++++ echo some-name-proxysql ++++ return +++ local cluster_proxy=some-name-proxysql +++ echo proxysql ++ kubectl_bin get pxc some-name -o 'jsonpath={.status.proxysql.ready}' +++ mktemp ++ local LAST_OUT=/tmp/tmp.JrO4AtsxPf +++ mktemp ++ local LAST_ERR=/tmp/tmp.lzfVyT7oZQ ++ local exit_status=0 +++ seq 0 2 ++ for i in '$(seq 0 2)' ++ set +e ++ kubectl get pxc some-name -o 'jsonpath={.status.proxysql.ready}' ++ exit_status=0 ++ set -e ++ '[' 0 '!=' 0 ']' ++ break ++ cat /tmp/tmp.JrO4AtsxPf ++ cat /tmp/tmp.lzfVyT7oZQ ++ rm /tmp/tmp.JrO4AtsxPf /tmp/tmp.lzfVyT7oZQ ++ return 0 + [[ 3 == \3 ]] + patch_secret my-cluster-secrets proxyadmin dGVzdC1wYXNzd29yZA== + local secret=my-cluster-secrets + local key=proxyadmin + local value=dGVzdC1wYXNzd29yZA== + kubectl_bin patch secret my-cluster-secrets '-p={"data":{"proxyadmin": "dGVzdC1wYXNzd29yZA=="}}' ++ mktemp + local LAST_OUT=/tmp/tmp.8D4ugkMySB ++ mktemp + local LAST_ERR=/tmp/tmp.ZnHb5dkCvG + local exit_status=0 ++ seq 0 2 + for i in '$(seq 0 2)' + set +e + kubectl patch secret my-cluster-secrets '-p={"data":{"proxyadmin": "dGVzdC1wYXNzd29yZA=="}}' + exit_status=0 + set -e + '[' 0 '!=' 0 ']' + break + cat /tmp/tmp.8D4ugkMySB secret/my-cluster-secrets patched + cat /tmp/tmp.ZnHb5dkCvG + rm /tmp/tmp.8D4ugkMySB /tmp/tmp.ZnHb5dkCvG + return 0 + sleep 15 + wait_cluster_consistency some-name 3 3 + local cluster_name=some-name + local cluster_size=3 + local proxy_size=3 + '[' -z 3 ']' + desc 'wait cluster consistency' + set +o xtrace ----------------------------------------------------------------------------------- wait cluster consistency ----------------------------------------------------------------------------------- + local i=0 + local max=36 + sleep 7 ++ kubectl_bin get pxc some-name -o 'jsonpath={.status.state}' +++ mktemp ++ local LAST_OUT=/tmp/tmp.xDG8EGNZ3m +++ mktemp ++ local LAST_ERR=/tmp/tmp.mu6OzjrUo4 ++ local exit_status=0 +++ seq 0 2 ++ for i in '$(seq 0 2)' ++ set +e ++ kubectl get pxc some-name -o 'jsonpath={.status.state}' ++ exit_status=0 ++ set -e ++ '[' 0 '!=' 0 ']' ++ break ++ cat /tmp/tmp.xDG8EGNZ3m ++ cat /tmp/tmp.mu6OzjrUo4 ++ rm /tmp/tmp.xDG8EGNZ3m /tmp/tmp.mu6OzjrUo4 ++ return 0 + [[ initializing == \r\e\a\d\y ]] + echo 'waiting for cluster readyness' waiting for cluster readyness + sleep 20 + [[ 0 -ge 36 ]] + let i+=1 ++ kubectl_bin get pxc some-name -o 'jsonpath={.status.state}' +++ mktemp ++ local LAST_OUT=/tmp/tmp.MwAVO7vzmv +++ mktemp ++ local LAST_ERR=/tmp/tmp.w2NwL88Tkp ++ local exit_status=0 +++ seq 0 2 ++ for i in '$(seq 0 2)' ++ set +e ++ kubectl get pxc some-name -o 'jsonpath={.status.state}' ++ exit_status=0 ++ set -e ++ '[' 0 '!=' 0 ']' ++ break ++ cat /tmp/tmp.MwAVO7vzmv ++ cat /tmp/tmp.w2NwL88Tkp ++ rm /tmp/tmp.MwAVO7vzmv /tmp/tmp.w2NwL88Tkp ++ return 0 + [[ ready == \r\e\a\d\y ]] ++ kubectl_bin get pxc some-name -o 'jsonpath={.status.pxc.ready}' +++ mktemp ++ local LAST_OUT=/tmp/tmp.z7TOi7H2sY +++ mktemp ++ local LAST_ERR=/tmp/tmp.kvXsujmW1V ++ local exit_status=0 +++ seq 0 2 ++ for i in '$(seq 0 2)' ++ set +e ++ kubectl get pxc some-name -o 'jsonpath={.status.pxc.ready}' ++ exit_status=0 ++ set -e ++ '[' 0 '!=' 0 ']' ++ break ++ cat /tmp/tmp.z7TOi7H2sY ++ cat /tmp/tmp.kvXsujmW1V ++ rm /tmp/tmp.z7TOi7H2sY /tmp/tmp.kvXsujmW1V ++ return 0 + [[ 3 == \3 ]] +++ get_proxy_engine some-name +++ local cluster_name=some-name ++++ get_proxy some-name ++++ local target_cluster=some-name +++++ kubectl_bin get pxc some-name -o 'jsonpath={.spec.haproxy.enabled}' ++++++ mktemp +++++ local LAST_OUT=/tmp/tmp.n8emqloVW1 ++++++ mktemp +++++ local LAST_ERR=/tmp/tmp.AEG0ep8xcC +++++ local exit_status=0 ++++++ seq 0 2 +++++ for i in '$(seq 0 2)' +++++ set +e +++++ kubectl get pxc some-name -o 'jsonpath={.spec.haproxy.enabled}' +++++ exit_status=0 +++++ set -e +++++ '[' 0 '!=' 0 ']' +++++ break +++++ cat /tmp/tmp.n8emqloVW1 +++++ cat /tmp/tmp.AEG0ep8xcC +++++ rm /tmp/tmp.n8emqloVW1 /tmp/tmp.AEG0ep8xcC +++++ return 0 ++++ [[ '' == \t\r\u\e ]] +++++ kubectl_bin get pxc some-name -o 'jsonpath={.spec.proxysql.enabled}' ++++++ mktemp +++++ local LAST_OUT=/tmp/tmp.Hm33egSvnB ++++++ mktemp +++++ local LAST_ERR=/tmp/tmp.hsQBoOO3Zx +++++ local exit_status=0 ++++++ seq 0 2 +++++ for i in '$(seq 0 2)' +++++ set +e +++++ kubectl get pxc some-name -o 'jsonpath={.spec.proxysql.enabled}' +++++ exit_status=0 +++++ set -e +++++ '[' 0 '!=' 0 ']' +++++ break +++++ cat /tmp/tmp.Hm33egSvnB +++++ cat /tmp/tmp.hsQBoOO3Zx +++++ rm /tmp/tmp.Hm33egSvnB /tmp/tmp.hsQBoOO3Zx +++++ return 0 ++++ [[ true == \t\r\u\e ]] ++++ echo some-name-proxysql ++++ return +++ local cluster_proxy=some-name-proxysql +++ echo proxysql ++ kubectl_bin get pxc some-name -o 'jsonpath={.status.proxysql.ready}' +++ mktemp ++ local LAST_OUT=/tmp/tmp.MTYxoJ2t7X +++ mktemp ++ local LAST_ERR=/tmp/tmp.uuvZWivP1F ++ local exit_status=0 +++ seq 0 2 ++ for i in '$(seq 0 2)' ++ set +e ++ kubectl get pxc some-name -o 'jsonpath={.status.proxysql.ready}' ++ exit_status=0 ++ set -e ++ '[' 0 '!=' 0 ']' ++ break ++ cat /tmp/tmp.MTYxoJ2t7X ++ cat /tmp/tmp.uuvZWivP1F ++ rm /tmp/tmp.MTYxoJ2t7X /tmp/tmp.uuvZWivP1F ++ return 0 + [[ 3 == \3 ]] + compare_mysql_cmd_local select-2 'SHOW TABLES;' '-h127.0.0.1 -P6032 -uproxyadmin -p'\''test-password'\''' some-name-proxysql-0 '' proxysql + local command_id=select-2 + local 'command=SHOW TABLES;' + local 'uri=-h127.0.0.1 -P6032 -uproxyadmin -p'\''test-password'\''' + local pod=some-name-proxysql-0 + local postfix= + local container_name=proxysql + local expected_result=/mnt/jenkins/workspace/cloud-pxc-operator_PR-1740/e2e-tests/users/compare/select-2.sql + [[ perconalab/percona-xtradb-cluster-operator:main-pxc5.7 =~ 8\.0 ]] + run_mysql_local 'SHOW TABLES;' '-h127.0.0.1 -P6032 -uproxyadmin -p'\''test-password'\''' some-name-proxysql-0 proxysql + local 'command=SHOW TABLES;' + local 'uri=-h127.0.0.1 -P6032 -uproxyadmin -p'\''test-password'\''' + local pod=some-name-proxysql-0 + local container_name=proxysql + set +o xtrace + '[' '!' -s /tmp/tmp.iIEmphhn2S/select-2.sql ']' + diff -u /mnt/jenkins/workspace/cloud-pxc-operator_PR-1740/e2e-tests/users/compare/select-2.sql /tmp/tmp.iIEmphhn2S/select-2.sql + compare_mysql_cmd_local select-2 'SHOW TABLES;' '-h127.0.0.1 -P6032 -uproxyadmin -p'\''test-password'\''' some-name-proxysql-1 '' proxysql + local command_id=select-2 + local 'command=SHOW TABLES;' + local 'uri=-h127.0.0.1 -P6032 -uproxyadmin -p'\''test-password'\''' + local pod=some-name-proxysql-1 + local postfix= + local container_name=proxysql + local expected_result=/mnt/jenkins/workspace/cloud-pxc-operator_PR-1740/e2e-tests/users/compare/select-2.sql + [[ perconalab/percona-xtradb-cluster-operator:main-pxc5.7 =~ 8\.0 ]] + run_mysql_local 'SHOW TABLES;' '-h127.0.0.1 -P6032 -uproxyadmin -p'\''test-password'\''' some-name-proxysql-1 proxysql + local 'command=SHOW TABLES;' + local 'uri=-h127.0.0.1 -P6032 -uproxyadmin -p'\''test-password'\''' + local pod=some-name-proxysql-1 + local container_name=proxysql + set +o xtrace + '[' '!' -s /tmp/tmp.iIEmphhn2S/select-2.sql ']' + diff -u /mnt/jenkins/workspace/cloud-pxc-operator_PR-1740/e2e-tests/users/compare/select-2.sql /tmp/tmp.iIEmphhn2S/select-2.sql + compare_mysql_cmd_local select-2 'SHOW TABLES;' '-h127.0.0.1 -P6032 -uproxyadmin -p'\''test-password'\''' some-name-proxysql-2 '' proxysql + local command_id=select-2 + local 'command=SHOW TABLES;' + local 'uri=-h127.0.0.1 -P6032 -uproxyadmin -p'\''test-password'\''' + local pod=some-name-proxysql-2 + local postfix= + local container_name=proxysql + local expected_result=/mnt/jenkins/workspace/cloud-pxc-operator_PR-1740/e2e-tests/users/compare/select-2.sql + [[ perconalab/percona-xtradb-cluster-operator:main-pxc5.7 =~ 8\.0 ]] + run_mysql_local 'SHOW TABLES;' '-h127.0.0.1 -P6032 -uproxyadmin -p'\''test-password'\''' some-name-proxysql-2 proxysql + local 'command=SHOW TABLES;' + local 'uri=-h127.0.0.1 -P6032 -uproxyadmin -p'\''test-password'\''' + local pod=some-name-proxysql-2 + local container_name=proxysql + set +o xtrace + '[' '!' -s /tmp/tmp.iIEmphhn2S/select-2.sql ']' + diff -u /mnt/jenkins/workspace/cloud-pxc-operator_PR-1740/e2e-tests/users/compare/select-2.sql /tmp/tmp.iIEmphhn2S/select-2.sql + desc 'test xtrabackup' + set +o xtrace ----------------------------------------------------------------------------------- test xtrabackup ----------------------------------------------------------------------------------- + kubectl_bin patch pxc some-name --type=merge '-p={"spec":{"proxysql":{"size":2}}}' ++ mktemp + local LAST_OUT=/tmp/tmp.QMJLmxIu4L ++ mktemp + local LAST_ERR=/tmp/tmp.Oc3UQq0yJQ + local exit_status=0 ++ seq 0 2 + for i in '$(seq 0 2)' + set +e + kubectl patch pxc some-name --type=merge '-p={"spec":{"proxysql":{"size":2}}}' + exit_status=0 + set -e + '[' 0 '!=' 0 ']' + break + cat /tmp/tmp.QMJLmxIu4L perconaxtradbcluster.pxc.percona.com/some-name patched + cat /tmp/tmp.Oc3UQq0yJQ + rm /tmp/tmp.QMJLmxIu4L /tmp/tmp.Oc3UQq0yJQ + return 0 + patch_secret my-cluster-secrets xtrabackup dGVzdC1wYXNzd29yZA== + local secret=my-cluster-secrets + local key=xtrabackup + local value=dGVzdC1wYXNzd29yZA== + kubectl_bin patch secret my-cluster-secrets '-p={"data":{"xtrabackup": "dGVzdC1wYXNzd29yZA=="}}' ++ mktemp + local LAST_OUT=/tmp/tmp.LVMwf2zzWt ++ mktemp + local LAST_ERR=/tmp/tmp.OFZR1M4Zut + local exit_status=0 ++ seq 0 2 + for i in '$(seq 0 2)' + set +e + kubectl patch secret my-cluster-secrets '-p={"data":{"xtrabackup": "dGVzdC1wYXNzd29yZA=="}}' + exit_status=0 + set -e + '[' 0 '!=' 0 ']' + break + cat /tmp/tmp.LVMwf2zzWt secret/my-cluster-secrets patched + cat /tmp/tmp.OFZR1M4Zut + rm /tmp/tmp.LVMwf2zzWt /tmp/tmp.OFZR1M4Zut + return 0 + sleep 15 + wait_cluster_consistency some-name 3 2 + local cluster_name=some-name + local cluster_size=3 + local proxy_size=2 + '[' -z 2 ']' + desc 'wait cluster consistency' + set +o xtrace ----------------------------------------------------------------------------------- wait cluster consistency ----------------------------------------------------------------------------------- + local i=0 + local max=36 + sleep 7 ++ kubectl_bin get pxc some-name -o 'jsonpath={.status.state}' +++ mktemp ++ local LAST_OUT=/tmp/tmp.HLyuIKJlyJ +++ mktemp ++ local LAST_ERR=/tmp/tmp.Zhv9EO1MBu ++ local exit_status=0 +++ seq 0 2 ++ for i in '$(seq 0 2)' ++ set +e ++ kubectl get pxc some-name -o 'jsonpath={.status.state}' ++ exit_status=0 ++ set -e ++ '[' 0 '!=' 0 ']' ++ break ++ cat /tmp/tmp.HLyuIKJlyJ ++ cat /tmp/tmp.Zhv9EO1MBu ++ rm /tmp/tmp.HLyuIKJlyJ /tmp/tmp.Zhv9EO1MBu ++ return 0 + [[ initializing == \r\e\a\d\y ]] + echo 'waiting for cluster readyness' waiting for cluster readyness + sleep 20 + [[ 0 -ge 36 ]] + let i+=1 ++ kubectl_bin get pxc some-name -o 'jsonpath={.status.state}' +++ mktemp ++ local LAST_OUT=/tmp/tmp.9t8psBhuza +++ mktemp ++ local LAST_ERR=/tmp/tmp.lEHgkj9fdA ++ local exit_status=0 +++ seq 0 2 ++ for i in '$(seq 0 2)' ++ set +e ++ kubectl get pxc some-name -o 'jsonpath={.status.state}' ++ exit_status=0 ++ set -e ++ '[' 0 '!=' 0 ']' ++ break ++ cat /tmp/tmp.9t8psBhuza ++ cat /tmp/tmp.lEHgkj9fdA ++ rm /tmp/tmp.9t8psBhuza /tmp/tmp.lEHgkj9fdA ++ return 0 + [[ initializing == \r\e\a\d\y ]] + echo 'waiting for cluster readyness' waiting for cluster readyness + sleep 20 + [[ 1 -ge 36 ]] + let i+=1 ++ kubectl_bin get pxc some-name -o 'jsonpath={.status.state}' +++ mktemp ++ local LAST_OUT=/tmp/tmp.vKkLUenjtX +++ mktemp ++ local LAST_ERR=/tmp/tmp.FCeM6S9vhU ++ local exit_status=0 +++ seq 0 2 ++ for i in '$(seq 0 2)' ++ set +e ++ kubectl get pxc some-name -o 'jsonpath={.status.state}' ++ exit_status=0 ++ set -e ++ '[' 0 '!=' 0 ']' ++ break ++ cat /tmp/tmp.vKkLUenjtX ++ cat /tmp/tmp.FCeM6S9vhU ++ rm /tmp/tmp.vKkLUenjtX /tmp/tmp.FCeM6S9vhU ++ return 0 + [[ initializing == \r\e\a\d\y ]] + echo 'waiting for cluster readyness' waiting for cluster readyness + sleep 20 + [[ 2 -ge 36 ]] + let i+=1 ++ kubectl_bin get pxc some-name -o 'jsonpath={.status.state}' +++ mktemp ++ local LAST_OUT=/tmp/tmp.zK7yBDKuBl +++ mktemp ++ local LAST_ERR=/tmp/tmp.b1sNCn9K9u ++ local exit_status=0 +++ seq 0 2 ++ for i in '$(seq 0 2)' ++ set +e ++ kubectl get pxc some-name -o 'jsonpath={.status.state}' ++ exit_status=0 ++ set -e ++ '[' 0 '!=' 0 ']' ++ break ++ cat /tmp/tmp.zK7yBDKuBl ++ cat /tmp/tmp.b1sNCn9K9u ++ rm /tmp/tmp.zK7yBDKuBl /tmp/tmp.b1sNCn9K9u ++ return 0 + [[ initializing == \r\e\a\d\y ]] + echo 'waiting for cluster readyness' waiting for cluster readyness + sleep 20 + [[ 3 -ge 36 ]] + let i+=1 ++ kubectl_bin get pxc some-name -o 'jsonpath={.status.state}' +++ mktemp ++ local LAST_OUT=/tmp/tmp.o8YTF8ac29 +++ mktemp ++ local LAST_ERR=/tmp/tmp.RnfTU6Y7s2 ++ local exit_status=0 +++ seq 0 2 ++ for i in '$(seq 0 2)' ++ set +e ++ kubectl get pxc some-name -o 'jsonpath={.status.state}' ++ exit_status=0 ++ set -e ++ '[' 0 '!=' 0 ']' ++ break ++ cat /tmp/tmp.o8YTF8ac29 ++ cat /tmp/tmp.RnfTU6Y7s2 ++ rm /tmp/tmp.o8YTF8ac29 /tmp/tmp.RnfTU6Y7s2 ++ return 0 + [[ initializing == \r\e\a\d\y ]] + echo 'waiting for cluster readyness' waiting for cluster readyness + sleep 20 + [[ 4 -ge 36 ]] + let i+=1 ++ kubectl_bin get pxc some-name -o 'jsonpath={.status.state}' +++ mktemp ++ local LAST_OUT=/tmp/tmp.uVIGVUclns +++ mktemp ++ local LAST_ERR=/tmp/tmp.ZZaFa027mq ++ local exit_status=0 +++ seq 0 2 ++ for i in '$(seq 0 2)' ++ set +e ++ kubectl get pxc some-name -o 'jsonpath={.status.state}' ++ exit_status=0 ++ set -e ++ '[' 0 '!=' 0 ']' ++ break ++ cat /tmp/tmp.uVIGVUclns ++ cat /tmp/tmp.ZZaFa027mq ++ rm /tmp/tmp.uVIGVUclns /tmp/tmp.ZZaFa027mq ++ return 0 + [[ initializing == \r\e\a\d\y ]] + echo 'waiting for cluster readyness' waiting for cluster readyness + sleep 20 + [[ 5 -ge 36 ]] + let i+=1 ++ kubectl_bin get pxc some-name -o 'jsonpath={.status.state}' +++ mktemp ++ local LAST_OUT=/tmp/tmp.TTYl5cydXM +++ mktemp ++ local LAST_ERR=/tmp/tmp.E6IM09W7xO ++ local exit_status=0 +++ seq 0 2 ++ for i in '$(seq 0 2)' ++ set +e ++ kubectl get pxc some-name -o 'jsonpath={.status.state}' ++ exit_status=0 ++ set -e ++ '[' 0 '!=' 0 ']' ++ break ++ cat /tmp/tmp.TTYl5cydXM ++ cat /tmp/tmp.E6IM09W7xO ++ rm /tmp/tmp.TTYl5cydXM /tmp/tmp.E6IM09W7xO ++ return 0 + [[ initializing == \r\e\a\d\y ]] + echo 'waiting for cluster readyness' waiting for cluster readyness + sleep 20 + [[ 6 -ge 36 ]] + let i+=1 ++ kubectl_bin get pxc some-name -o 'jsonpath={.status.state}' +++ mktemp ++ local LAST_OUT=/tmp/tmp.rswdiWzQ51 +++ mktemp ++ local LAST_ERR=/tmp/tmp.T9xS6Sw4vt ++ local exit_status=0 +++ seq 0 2 ++ for i in '$(seq 0 2)' ++ set +e ++ kubectl get pxc some-name -o 'jsonpath={.status.state}' ++ exit_status=0 ++ set -e ++ '[' 0 '!=' 0 ']' ++ break ++ cat /tmp/tmp.rswdiWzQ51 ++ cat /tmp/tmp.T9xS6Sw4vt ++ rm /tmp/tmp.rswdiWzQ51 /tmp/tmp.T9xS6Sw4vt ++ return 0 + [[ ready == \r\e\a\d\y ]] ++ kubectl_bin get pxc some-name -o 'jsonpath={.status.pxc.ready}' +++ mktemp ++ local LAST_OUT=/tmp/tmp.6DeReul9Q7 +++ mktemp ++ local LAST_ERR=/tmp/tmp.FaNz85Sm6N ++ local exit_status=0 +++ seq 0 2 ++ for i in '$(seq 0 2)' ++ set +e ++ kubectl get pxc some-name -o 'jsonpath={.status.pxc.ready}' ++ exit_status=0 ++ set -e ++ '[' 0 '!=' 0 ']' ++ break ++ cat /tmp/tmp.6DeReul9Q7 ++ cat /tmp/tmp.FaNz85Sm6N ++ rm /tmp/tmp.6DeReul9Q7 /tmp/tmp.FaNz85Sm6N ++ return 0 + [[ 3 == \3 ]] +++ get_proxy_engine some-name +++ local cluster_name=some-name ++++ get_proxy some-name ++++ local target_cluster=some-name +++++ kubectl_bin get pxc some-name -o 'jsonpath={.spec.haproxy.enabled}' ++++++ mktemp +++++ local LAST_OUT=/tmp/tmp.IKKXmEwcVu ++++++ mktemp +++++ local LAST_ERR=/tmp/tmp.hs9Y4oTiQJ +++++ local exit_status=0 ++++++ seq 0 2 +++++ for i in '$(seq 0 2)' +++++ set +e +++++ kubectl get pxc some-name -o 'jsonpath={.spec.haproxy.enabled}' +++++ exit_status=0 +++++ set -e +++++ '[' 0 '!=' 0 ']' +++++ break +++++ cat /tmp/tmp.IKKXmEwcVu +++++ cat /tmp/tmp.hs9Y4oTiQJ +++++ rm /tmp/tmp.IKKXmEwcVu /tmp/tmp.hs9Y4oTiQJ +++++ return 0 ++++ [[ '' == \t\r\u\e ]] +++++ kubectl_bin get pxc some-name -o 'jsonpath={.spec.proxysql.enabled}' ++++++ mktemp +++++ local LAST_OUT=/tmp/tmp.ItD2XDGcLb ++++++ mktemp +++++ local LAST_ERR=/tmp/tmp.PugKkPwRV1 +++++ local exit_status=0 ++++++ seq 0 2 +++++ for i in '$(seq 0 2)' +++++ set +e +++++ kubectl get pxc some-name -o 'jsonpath={.spec.proxysql.enabled}' +++++ exit_status=0 +++++ set -e +++++ '[' 0 '!=' 0 ']' +++++ break +++++ cat /tmp/tmp.ItD2XDGcLb +++++ cat /tmp/tmp.PugKkPwRV1 +++++ rm /tmp/tmp.ItD2XDGcLb /tmp/tmp.PugKkPwRV1 +++++ return 0 ++++ [[ true == \t\r\u\e ]] ++++ echo some-name-proxysql ++++ return +++ local cluster_proxy=some-name-proxysql +++ echo proxysql ++ kubectl_bin get pxc some-name -o 'jsonpath={.status.proxysql.ready}' +++ mktemp ++ local LAST_OUT=/tmp/tmp.R2sv0O9Aie +++ mktemp ++ local LAST_ERR=/tmp/tmp.m44Jq2utAI ++ local exit_status=0 +++ seq 0 2 ++ for i in '$(seq 0 2)' ++ set +e ++ kubectl get pxc some-name -o 'jsonpath={.status.proxysql.ready}' ++ exit_status=0 ++ set -e ++ '[' 0 '!=' 0 ']' ++ break ++ cat /tmp/tmp.R2sv0O9Aie ++ cat /tmp/tmp.m44Jq2utAI ++ rm /tmp/tmp.R2sv0O9Aie /tmp/tmp.m44Jq2utAI ++ return 0 + [[ 2 == \2 ]] + compare_mysql_cmd_local select-3 'SHOW DATABASES;' '-h 127.0.0.1 -uxtrabackup -p'\''test-password'\''' some-name-pxc-0 '' pxc + local command_id=select-3 + local 'command=SHOW DATABASES;' + local 'uri=-h 127.0.0.1 -uxtrabackup -p'\''test-password'\''' + local pod=some-name-pxc-0 + local postfix= + local container_name=pxc + local expected_result=/mnt/jenkins/workspace/cloud-pxc-operator_PR-1740/e2e-tests/users/compare/select-3.sql + [[ perconalab/percona-xtradb-cluster-operator:main-pxc5.7 =~ 8\.0 ]] + run_mysql_local 'SHOW DATABASES;' '-h 127.0.0.1 -uxtrabackup -p'\''test-password'\''' some-name-pxc-0 pxc + local 'command=SHOW DATABASES;' + local 'uri=-h 127.0.0.1 -uxtrabackup -p'\''test-password'\''' + local pod=some-name-pxc-0 + local container_name=pxc + set +o xtrace + '[' '!' -s /tmp/tmp.iIEmphhn2S/select-3.sql ']' + diff -u /mnt/jenkins/workspace/cloud-pxc-operator_PR-1740/e2e-tests/users/compare/select-3.sql /tmp/tmp.iIEmphhn2S/select-3.sql + desc 'test monitor' + set +o xtrace ----------------------------------------------------------------------------------- test monitor ----------------------------------------------------------------------------------- + patch_secret my-cluster-secrets monitor dGVzdC1wYXNzd29yZA== + local secret=my-cluster-secrets + local key=monitor + local value=dGVzdC1wYXNzd29yZA== + kubectl_bin patch secret my-cluster-secrets '-p={"data":{"monitor": "dGVzdC1wYXNzd29yZA=="}}' ++ mktemp + local LAST_OUT=/tmp/tmp.sB3Y60rxQu ++ mktemp + local LAST_ERR=/tmp/tmp.bdl93AqvUJ + local exit_status=0 ++ seq 0 2 + for i in '$(seq 0 2)' + set +e + kubectl patch secret my-cluster-secrets '-p={"data":{"monitor": "dGVzdC1wYXNzd29yZA=="}}' + exit_status=0 + set -e + '[' 0 '!=' 0 ']' + break + cat /tmp/tmp.sB3Y60rxQu secret/my-cluster-secrets patched + cat /tmp/tmp.bdl93AqvUJ + rm /tmp/tmp.sB3Y60rxQu /tmp/tmp.bdl93AqvUJ + return 0 + wait_for_password_propagation my-cluster-secrets monitor + local secret=my-cluster-secrets + local user=monitor + local max_retry=240 ++ getSecretData my-cluster-secrets root ++ local secretName=my-cluster-secrets ++ local dataKey=root ++ kubectl_bin get secrets/my-cluster-secrets '--template={{.data.root}}' ++ base64 --decode +++ mktemp ++ local LAST_OUT=/tmp/tmp.RbyrbCDaI8 +++ mktemp ++ local LAST_ERR=/tmp/tmp.xTB7vwAcWJ ++ local exit_status=0 +++ seq 0 2 ++ for i in '$(seq 0 2)' ++ set +e ++ kubectl get secrets/my-cluster-secrets '--template={{.data.root}}' ++ exit_status=0 ++ set -e ++ '[' 0 '!=' 0 ']' ++ break ++ cat /tmp/tmp.RbyrbCDaI8 ++ cat /tmp/tmp.xTB7vwAcWJ ++ rm /tmp/tmp.RbyrbCDaI8 /tmp/tmp.xTB7vwAcWJ ++ return 0 + local root_pass=test-password + [[ perconalab/percona-xtradb-cluster-operator:main-pxc5.7 =~ 5\.7 ]] + echo 'Skipping dual password feature doesn'\''t work for 5.7. PXC 5.7 doesn'\''t support it!' Skipping dual password feature doesn't work for 5.7. PXC 5.7 doesn't support it! + return + wait_cluster_consistency some-name 3 2 + local cluster_name=some-name + local cluster_size=3 + local proxy_size=2 + '[' -z 2 ']' + desc 'wait cluster consistency' + set +o xtrace ----------------------------------------------------------------------------------- wait cluster consistency ----------------------------------------------------------------------------------- + local i=0 + local max=36 + sleep 7 ++ kubectl_bin get pxc some-name -o 'jsonpath={.status.state}' +++ mktemp ++ local LAST_OUT=/tmp/tmp.gT0dMAZ20D +++ mktemp ++ local LAST_ERR=/tmp/tmp.hMNmzGTzdf ++ local exit_status=0 +++ seq 0 2 ++ for i in '$(seq 0 2)' ++ set +e ++ kubectl get pxc some-name -o 'jsonpath={.status.state}' ++ exit_status=0 ++ set -e ++ '[' 0 '!=' 0 ']' ++ break ++ cat /tmp/tmp.gT0dMAZ20D ++ cat /tmp/tmp.hMNmzGTzdf ++ rm /tmp/tmp.gT0dMAZ20D /tmp/tmp.hMNmzGTzdf ++ return 0 + [[ initializing == \r\e\a\d\y ]] + echo 'waiting for cluster readyness' waiting for cluster readyness + sleep 20 + [[ 0 -ge 36 ]] + let i+=1 ++ kubectl_bin get pxc some-name -o 'jsonpath={.status.state}' +++ mktemp ++ local LAST_OUT=/tmp/tmp.TXwqgEC7TI +++ mktemp ++ local LAST_ERR=/tmp/tmp.PDtJ8INxwo ++ local exit_status=0 +++ seq 0 2 ++ for i in '$(seq 0 2)' ++ set +e ++ kubectl get pxc some-name -o 'jsonpath={.status.state}' ++ exit_status=0 ++ set -e ++ '[' 0 '!=' 0 ']' ++ break ++ cat /tmp/tmp.TXwqgEC7TI ++ cat /tmp/tmp.PDtJ8INxwo ++ rm /tmp/tmp.TXwqgEC7TI /tmp/tmp.PDtJ8INxwo ++ return 0 + [[ initializing == \r\e\a\d\y ]] + echo 'waiting for cluster readyness' waiting for cluster readyness + sleep 20 + [[ 1 -ge 36 ]] + let i+=1 ++ kubectl_bin get pxc some-name -o 'jsonpath={.status.state}' +++ mktemp ++ local LAST_OUT=/tmp/tmp.z01dBSiDEr +++ mktemp ++ local LAST_ERR=/tmp/tmp.JmIcBRekui ++ local exit_status=0 +++ seq 0 2 ++ for i in '$(seq 0 2)' ++ set +e ++ kubectl get pxc some-name -o 'jsonpath={.status.state}' ++ exit_status=0 ++ set -e ++ '[' 0 '!=' 0 ']' ++ break ++ cat /tmp/tmp.z01dBSiDEr ++ cat /tmp/tmp.JmIcBRekui ++ rm /tmp/tmp.z01dBSiDEr /tmp/tmp.JmIcBRekui ++ return 0 + [[ ready == \r\e\a\d\y ]] ++ kubectl_bin get pxc some-name -o 'jsonpath={.status.pxc.ready}' +++ mktemp ++ local LAST_OUT=/tmp/tmp.ynyN3hTsOx +++ mktemp ++ local LAST_ERR=/tmp/tmp.oBUJdFS5kz ++ local exit_status=0 +++ seq 0 2 ++ for i in '$(seq 0 2)' ++ set +e ++ kubectl get pxc some-name -o 'jsonpath={.status.pxc.ready}' ++ exit_status=0 ++ set -e ++ '[' 0 '!=' 0 ']' ++ break ++ cat /tmp/tmp.ynyN3hTsOx ++ cat /tmp/tmp.oBUJdFS5kz ++ rm /tmp/tmp.ynyN3hTsOx /tmp/tmp.oBUJdFS5kz ++ return 0 + [[ 3 == \3 ]] +++ get_proxy_engine some-name +++ local cluster_name=some-name ++++ get_proxy some-name ++++ local target_cluster=some-name +++++ kubectl_bin get pxc some-name -o 'jsonpath={.spec.haproxy.enabled}' ++++++ mktemp +++++ local LAST_OUT=/tmp/tmp.I2pNHVSv47 ++++++ mktemp +++++ local LAST_ERR=/tmp/tmp.R9AKMcplpC +++++ local exit_status=0 ++++++ seq 0 2 +++++ for i in '$(seq 0 2)' +++++ set +e +++++ kubectl get pxc some-name -o 'jsonpath={.spec.haproxy.enabled}' +++++ exit_status=0 +++++ set -e +++++ '[' 0 '!=' 0 ']' +++++ break +++++ cat /tmp/tmp.I2pNHVSv47 +++++ cat /tmp/tmp.R9AKMcplpC +++++ rm /tmp/tmp.I2pNHVSv47 /tmp/tmp.R9AKMcplpC +++++ return 0 ++++ [[ '' == \t\r\u\e ]] +++++ kubectl_bin get pxc some-name -o 'jsonpath={.spec.proxysql.enabled}' ++++++ mktemp +++++ local LAST_OUT=/tmp/tmp.pmErwpXsjb ++++++ mktemp +++++ local LAST_ERR=/tmp/tmp.1dsiTiAiXY +++++ local exit_status=0 ++++++ seq 0 2 +++++ for i in '$(seq 0 2)' +++++ set +e +++++ kubectl get pxc some-name -o 'jsonpath={.spec.proxysql.enabled}' +++++ exit_status=0 +++++ set -e +++++ '[' 0 '!=' 0 ']' +++++ break +++++ cat /tmp/tmp.pmErwpXsjb +++++ cat /tmp/tmp.1dsiTiAiXY +++++ rm /tmp/tmp.pmErwpXsjb /tmp/tmp.1dsiTiAiXY +++++ return 0 ++++ [[ true == \t\r\u\e ]] ++++ echo some-name-proxysql ++++ return +++ local cluster_proxy=some-name-proxysql +++ echo proxysql ++ kubectl_bin get pxc some-name -o 'jsonpath={.status.proxysql.ready}' +++ mktemp ++ local LAST_OUT=/tmp/tmp.qxkWJY0s0X +++ mktemp ++ local LAST_ERR=/tmp/tmp.hIqpukE96Z ++ local exit_status=0 +++ seq 0 2 ++ for i in '$(seq 0 2)' ++ set +e ++ kubectl get pxc some-name -o 'jsonpath={.status.proxysql.ready}' ++ exit_status=0 ++ set -e ++ '[' 0 '!=' 0 ']' ++ break ++ cat /tmp/tmp.qxkWJY0s0X ++ cat /tmp/tmp.hIqpukE96Z ++ rm /tmp/tmp.qxkWJY0s0X /tmp/tmp.hIqpukE96Z ++ return 0 + [[ 2 == \2 ]] + sleep 10 + compare_mysql_cmd select-4 'SHOW TABLES;' '-h some-name-proxysql -umonitor -p'\''test-password'\''' + local command_id=select-4 + local 'command=SHOW TABLES;' + local 'uri=-h some-name-proxysql -umonitor -p'\''test-password'\''' + local postfix= + local expected_result=/mnt/jenkins/workspace/cloud-pxc-operator_PR-1740/e2e-tests/users/compare/select-4.sql + [[ perconalab/percona-xtradb-cluster-operator:main-pxc5.7 =~ 8\.0 ]] + run_mysql 'SHOW TABLES;' '-h some-name-proxysql -umonitor -p'\''test-password'\''' + local 'command=SHOW TABLES;' + local 'uri=-h some-name-proxysql -umonitor -p'\''test-password'\''' ++ get_client_pod ++ kubectl_bin get pods --selector=name=pxc-client -o 'jsonpath={.items[].metadata.name}' +++ mktemp ++ local LAST_OUT=/tmp/tmp.mfpSzvAEew +++ mktemp ++ local LAST_ERR=/tmp/tmp.UZGrAF5Gnw ++ local exit_status=0 +++ seq 0 2 ++ for i in '$(seq 0 2)' ++ set +e ++ kubectl get pods --selector=name=pxc-client -o 'jsonpath={.items[].metadata.name}' ++ exit_status=0 ++ set -e ++ '[' 0 '!=' 0 ']' ++ break ++ cat /tmp/tmp.mfpSzvAEew ++ cat /tmp/tmp.UZGrAF5Gnw ++ rm /tmp/tmp.mfpSzvAEew /tmp/tmp.UZGrAF5Gnw ++ return 0 + client_pod=pxc-client-64b479df95-qtwzv + wait_pod pxc-client-64b479df95-qtwzv + local pod=pxc-client-64b479df95-qtwzv + local max_retry=480 + local ns= ++ echo pxc-client-64b479df95-qtwzv ++ /usr/bin/sed -E 's/.*-(pxc|proxysql)-[0-9]/\1/' ++ egrep '^(pxc|proxysql)$' + local container= + set +o xtrace pod/pxc-client-64b479df95-qtwzv condition met pxc-client-64b479df95-qtwzv.Ok + set +o xtrace + '[' '!' -s /tmp/tmp.iIEmphhn2S/select-4.sql ']' + diff -u /mnt/jenkins/workspace/cloud-pxc-operator_PR-1740/e2e-tests/users/compare/select-4.sql /tmp/tmp.iIEmphhn2S/select-4.sql + desc 'test operator' + set +o xtrace ----------------------------------------------------------------------------------- test operator ----------------------------------------------------------------------------------- + patch_secret my-cluster-secrets operator dGVzdC1wYXNzd29yZA== + local secret=my-cluster-secrets + local key=operator + local value=dGVzdC1wYXNzd29yZA== + kubectl_bin patch secret my-cluster-secrets '-p={"data":{"operator": "dGVzdC1wYXNzd29yZA=="}}' ++ mktemp + local LAST_OUT=/tmp/tmp.qfzQQiCX3i ++ mktemp + local LAST_ERR=/tmp/tmp.76OTI7463A + local exit_status=0 ++ seq 0 2 + for i in '$(seq 0 2)' + set +e + kubectl patch secret my-cluster-secrets '-p={"data":{"operator": "dGVzdC1wYXNzd29yZA=="}}' + exit_status=0 + set -e + '[' 0 '!=' 0 ']' + break + cat /tmp/tmp.qfzQQiCX3i secret/my-cluster-secrets patched + cat /tmp/tmp.76OTI7463A + rm /tmp/tmp.qfzQQiCX3i /tmp/tmp.76OTI7463A + return 0 + sleep 15 + wait_cluster_consistency some-name 3 2 + local cluster_name=some-name + local cluster_size=3 + local proxy_size=2 + '[' -z 2 ']' + desc 'wait cluster consistency' + set +o xtrace ----------------------------------------------------------------------------------- wait cluster consistency ----------------------------------------------------------------------------------- + local i=0 + local max=36 + sleep 7 ++ kubectl_bin get pxc some-name -o 'jsonpath={.status.state}' +++ mktemp ++ local LAST_OUT=/tmp/tmp.STRH2YN2Mt +++ mktemp ++ local LAST_ERR=/tmp/tmp.0ANUWcGH6D ++ local exit_status=0 +++ seq 0 2 ++ for i in '$(seq 0 2)' ++ set +e ++ kubectl get pxc some-name -o 'jsonpath={.status.state}' ++ exit_status=0 ++ set -e ++ '[' 0 '!=' 0 ']' ++ break ++ cat /tmp/tmp.STRH2YN2Mt ++ cat /tmp/tmp.0ANUWcGH6D ++ rm /tmp/tmp.STRH2YN2Mt /tmp/tmp.0ANUWcGH6D ++ return 0 + [[ ready == \r\e\a\d\y ]] ++ kubectl_bin get pxc some-name -o 'jsonpath={.status.pxc.ready}' +++ mktemp ++ local LAST_OUT=/tmp/tmp.q2hJLmzyg6 +++ mktemp ++ local LAST_ERR=/tmp/tmp.6lhHcD4CsH ++ local exit_status=0 +++ seq 0 2 ++ for i in '$(seq 0 2)' ++ set +e ++ kubectl get pxc some-name -o 'jsonpath={.status.pxc.ready}' ++ exit_status=0 ++ set -e ++ '[' 0 '!=' 0 ']' ++ break ++ cat /tmp/tmp.q2hJLmzyg6 ++ cat /tmp/tmp.6lhHcD4CsH ++ rm /tmp/tmp.q2hJLmzyg6 /tmp/tmp.6lhHcD4CsH ++ return 0 + [[ 3 == \3 ]] +++ get_proxy_engine some-name +++ local cluster_name=some-name ++++ get_proxy some-name ++++ local target_cluster=some-name +++++ kubectl_bin get pxc some-name -o 'jsonpath={.spec.haproxy.enabled}' ++++++ mktemp +++++ local LAST_OUT=/tmp/tmp.rLSdUx5t7g ++++++ mktemp +++++ local LAST_ERR=/tmp/tmp.yQLhkfSnsP +++++ local exit_status=0 ++++++ seq 0 2 +++++ for i in '$(seq 0 2)' +++++ set +e +++++ kubectl get pxc some-name -o 'jsonpath={.spec.haproxy.enabled}' +++++ exit_status=0 +++++ set -e +++++ '[' 0 '!=' 0 ']' +++++ break +++++ cat /tmp/tmp.rLSdUx5t7g +++++ cat /tmp/tmp.yQLhkfSnsP +++++ rm /tmp/tmp.rLSdUx5t7g /tmp/tmp.yQLhkfSnsP +++++ return 0 ++++ [[ '' == \t\r\u\e ]] +++++ kubectl_bin get pxc some-name -o 'jsonpath={.spec.proxysql.enabled}' ++++++ mktemp +++++ local LAST_OUT=/tmp/tmp.MBWzrBrsjM ++++++ mktemp +++++ local LAST_ERR=/tmp/tmp.NtYyPlE6FD +++++ local exit_status=0 ++++++ seq 0 2 +++++ for i in '$(seq 0 2)' +++++ set +e +++++ kubectl get pxc some-name -o 'jsonpath={.spec.proxysql.enabled}' +++++ exit_status=0 +++++ set -e +++++ '[' 0 '!=' 0 ']' +++++ break +++++ cat /tmp/tmp.MBWzrBrsjM +++++ cat /tmp/tmp.NtYyPlE6FD +++++ rm /tmp/tmp.MBWzrBrsjM /tmp/tmp.NtYyPlE6FD +++++ return 0 ++++ [[ true == \t\r\u\e ]] ++++ echo some-name-proxysql ++++ return +++ local cluster_proxy=some-name-proxysql +++ echo proxysql ++ kubectl_bin get pxc some-name -o 'jsonpath={.status.proxysql.ready}' +++ mktemp ++ local LAST_OUT=/tmp/tmp.pcGDI8HWLR +++ mktemp ++ local LAST_ERR=/tmp/tmp.v3g78gCHnb ++ local exit_status=0 +++ seq 0 2 ++ for i in '$(seq 0 2)' ++ set +e ++ kubectl get pxc some-name -o 'jsonpath={.status.proxysql.ready}' ++ exit_status=0 ++ set -e ++ '[' 0 '!=' 0 ']' ++ break ++ cat /tmp/tmp.pcGDI8HWLR ++ cat /tmp/tmp.v3g78gCHnb ++ rm /tmp/tmp.pcGDI8HWLR /tmp/tmp.v3g78gCHnb ++ return 0 + [[ 2 == \2 ]] + sleep 10 + compare_mysql_cmd select-4 'SHOW TABLES;' '-h some-name-proxysql -uoperator -p'\''test-password'\''' + local command_id=select-4 + local 'command=SHOW TABLES;' + local 'uri=-h some-name-proxysql -uoperator -p'\''test-password'\''' + local postfix= + local expected_result=/mnt/jenkins/workspace/cloud-pxc-operator_PR-1740/e2e-tests/users/compare/select-4.sql + [[ perconalab/percona-xtradb-cluster-operator:main-pxc5.7 =~ 8\.0 ]] + run_mysql 'SHOW TABLES;' '-h some-name-proxysql -uoperator -p'\''test-password'\''' + local 'command=SHOW TABLES;' + local 'uri=-h some-name-proxysql -uoperator -p'\''test-password'\''' ++ get_client_pod ++ kubectl_bin get pods --selector=name=pxc-client -o 'jsonpath={.items[].metadata.name}' +++ mktemp ++ local LAST_OUT=/tmp/tmp.efuI94xtpz +++ mktemp ++ local LAST_ERR=/tmp/tmp.M9g5HiNlKP ++ local exit_status=0 +++ seq 0 2 ++ for i in '$(seq 0 2)' ++ set +e ++ kubectl get pods --selector=name=pxc-client -o 'jsonpath={.items[].metadata.name}' ++ exit_status=0 ++ set -e ++ '[' 0 '!=' 0 ']' ++ break ++ cat /tmp/tmp.efuI94xtpz ++ cat /tmp/tmp.M9g5HiNlKP ++ rm /tmp/tmp.efuI94xtpz /tmp/tmp.M9g5HiNlKP ++ return 0 + client_pod=pxc-client-64b479df95-qtwzv + wait_pod pxc-client-64b479df95-qtwzv + local pod=pxc-client-64b479df95-qtwzv + local max_retry=480 + local ns= ++ echo pxc-client-64b479df95-qtwzv ++ egrep '^(pxc|proxysql)$' ++ /usr/bin/sed -E 's/.*-(pxc|proxysql)-[0-9]/\1/' + local container= + set +o xtrace pod/pxc-client-64b479df95-qtwzv condition met pxc-client-64b479df95-qtwzv.Ok + set +o xtrace + '[' '!' -s /tmp/tmp.iIEmphhn2S/select-4.sql ']' + diff -u /mnt/jenkins/workspace/cloud-pxc-operator_PR-1740/e2e-tests/users/compare/select-4.sql /tmp/tmp.iIEmphhn2S/select-4.sql + desc 'change secret name' + set +o xtrace ----------------------------------------------------------------------------------- change secret name ----------------------------------------------------------------------------------- + kubectl_bin patch pxc some-name --type merge --patch '{"spec": {"secretsName":"my-cluster-secrets-2"}}' ++ mktemp + local LAST_OUT=/tmp/tmp.K1x6fAEfgB ++ mktemp + local LAST_ERR=/tmp/tmp.NVLkHiOgfE + local exit_status=0 ++ seq 0 2 + for i in '$(seq 0 2)' + set +e + kubectl patch pxc some-name --type merge --patch '{"spec": {"secretsName":"my-cluster-secrets-2"}}' + exit_status=0 + set -e + '[' 0 '!=' 0 ']' + break + cat /tmp/tmp.K1x6fAEfgB perconaxtradbcluster.pxc.percona.com/some-name patched + cat /tmp/tmp.NVLkHiOgfE + rm /tmp/tmp.K1x6fAEfgB /tmp/tmp.NVLkHiOgfE + return 0 + sleep 30 + wait_cluster_consistency some-name 3 2 + local cluster_name=some-name + local cluster_size=3 + local proxy_size=2 + '[' -z 2 ']' + desc 'wait cluster consistency' + set +o xtrace ----------------------------------------------------------------------------------- wait cluster consistency ----------------------------------------------------------------------------------- + local i=0 + local max=36 + sleep 7 ++ kubectl_bin get pxc some-name -o 'jsonpath={.status.state}' +++ mktemp ++ local LAST_OUT=/tmp/tmp.G9kwWOtT1P +++ mktemp ++ local LAST_ERR=/tmp/tmp.X4kFuSOKTU ++ local exit_status=0 +++ seq 0 2 ++ for i in '$(seq 0 2)' ++ set +e ++ kubectl get pxc some-name -o 'jsonpath={.status.state}' ++ exit_status=0 ++ set -e ++ '[' 0 '!=' 0 ']' ++ break ++ cat /tmp/tmp.G9kwWOtT1P ++ cat /tmp/tmp.X4kFuSOKTU ++ rm /tmp/tmp.G9kwWOtT1P /tmp/tmp.X4kFuSOKTU ++ return 0 + [[ initializing == \r\e\a\d\y ]] + echo 'waiting for cluster readyness' waiting for cluster readyness + sleep 20 + [[ 0 -ge 36 ]] + let i+=1 ++ kubectl_bin get pxc some-name -o 'jsonpath={.status.state}' +++ mktemp ++ local LAST_OUT=/tmp/tmp.za7uYdbkNq +++ mktemp ++ local LAST_ERR=/tmp/tmp.Ry5RQzpicl ++ local exit_status=0 +++ seq 0 2 ++ for i in '$(seq 0 2)' ++ set +e ++ kubectl get pxc some-name -o 'jsonpath={.status.state}' ++ exit_status=0 ++ set -e ++ '[' 0 '!=' 0 ']' ++ break ++ cat /tmp/tmp.za7uYdbkNq ++ cat /tmp/tmp.Ry5RQzpicl ++ rm /tmp/tmp.za7uYdbkNq /tmp/tmp.Ry5RQzpicl ++ return 0 + [[ initializing == \r\e\a\d\y ]] + echo 'waiting for cluster readyness' waiting for cluster readyness + sleep 20 + [[ 1 -ge 36 ]] + let i+=1 ++ kubectl_bin get pxc some-name -o 'jsonpath={.status.state}' +++ mktemp ++ local LAST_OUT=/tmp/tmp.Dfwzpco7ci +++ mktemp ++ local LAST_ERR=/tmp/tmp.MviutLDhsr ++ local exit_status=0 +++ seq 0 2 ++ for i in '$(seq 0 2)' ++ set +e ++ kubectl get pxc some-name -o 'jsonpath={.status.state}' ++ exit_status=0 ++ set -e ++ '[' 0 '!=' 0 ']' ++ break ++ cat /tmp/tmp.Dfwzpco7ci ++ cat /tmp/tmp.MviutLDhsr ++ rm /tmp/tmp.Dfwzpco7ci /tmp/tmp.MviutLDhsr ++ return 0 + [[ initializing == \r\e\a\d\y ]] + echo 'waiting for cluster readyness' waiting for cluster readyness + sleep 20 + [[ 2 -ge 36 ]] + let i+=1 ++ kubectl_bin get pxc some-name -o 'jsonpath={.status.state}' +++ mktemp ++ local LAST_OUT=/tmp/tmp.shYmygniiw +++ mktemp ++ local LAST_ERR=/tmp/tmp.1p7E6ZkCHU ++ local exit_status=0 +++ seq 0 2 ++ for i in '$(seq 0 2)' ++ set +e ++ kubectl get pxc some-name -o 'jsonpath={.status.state}' ++ exit_status=0 ++ set -e ++ '[' 0 '!=' 0 ']' ++ break ++ cat /tmp/tmp.shYmygniiw ++ cat /tmp/tmp.1p7E6ZkCHU ++ rm /tmp/tmp.shYmygniiw /tmp/tmp.1p7E6ZkCHU ++ return 0 + [[ initializing == \r\e\a\d\y ]] + echo 'waiting for cluster readyness' waiting for cluster readyness + sleep 20 + [[ 3 -ge 36 ]] + let i+=1 ++ kubectl_bin get pxc some-name -o 'jsonpath={.status.state}' +++ mktemp ++ local LAST_OUT=/tmp/tmp.LLZMXjB3OI +++ mktemp ++ local LAST_ERR=/tmp/tmp.FtxOvXlcT3 ++ local exit_status=0 +++ seq 0 2 ++ for i in '$(seq 0 2)' ++ set +e ++ kubectl get pxc some-name -o 'jsonpath={.status.state}' ++ exit_status=0 ++ set -e ++ '[' 0 '!=' 0 ']' ++ break ++ cat /tmp/tmp.LLZMXjB3OI ++ cat /tmp/tmp.FtxOvXlcT3 ++ rm /tmp/tmp.LLZMXjB3OI /tmp/tmp.FtxOvXlcT3 ++ return 0 + [[ initializing == \r\e\a\d\y ]] + echo 'waiting for cluster readyness' waiting for cluster readyness + sleep 20 + [[ 4 -ge 36 ]] + let i+=1 ++ kubectl_bin get pxc some-name -o 'jsonpath={.status.state}' +++ mktemp ++ local LAST_OUT=/tmp/tmp.xzsm6pxkXz +++ mktemp ++ local LAST_ERR=/tmp/tmp.yLMdiLerNO ++ local exit_status=0 +++ seq 0 2 ++ for i in '$(seq 0 2)' ++ set +e ++ kubectl get pxc some-name -o 'jsonpath={.status.state}' ++ exit_status=0 ++ set -e ++ '[' 0 '!=' 0 ']' ++ break ++ cat /tmp/tmp.xzsm6pxkXz ++ cat /tmp/tmp.yLMdiLerNO ++ rm /tmp/tmp.xzsm6pxkXz /tmp/tmp.yLMdiLerNO ++ return 0 + [[ initializing == \r\e\a\d\y ]] + echo 'waiting for cluster readyness' waiting for cluster readyness + sleep 20 + [[ 5 -ge 36 ]] + let i+=1 ++ kubectl_bin get pxc some-name -o 'jsonpath={.status.state}' +++ mktemp ++ local LAST_OUT=/tmp/tmp.YB3dZc8M9b +++ mktemp ++ local LAST_ERR=/tmp/tmp.WKVnn6ZYzJ ++ local exit_status=0 +++ seq 0 2 ++ for i in '$(seq 0 2)' ++ set +e ++ kubectl get pxc some-name -o 'jsonpath={.status.state}' ++ exit_status=0 ++ set -e ++ '[' 0 '!=' 0 ']' ++ break ++ cat /tmp/tmp.YB3dZc8M9b ++ cat /tmp/tmp.WKVnn6ZYzJ ++ rm /tmp/tmp.YB3dZc8M9b /tmp/tmp.WKVnn6ZYzJ ++ return 0 + [[ initializing == \r\e\a\d\y ]] + echo 'waiting for cluster readyness' waiting for cluster readyness + sleep 20 + [[ 6 -ge 36 ]] + let i+=1 ++ kubectl_bin get pxc some-name -o 'jsonpath={.status.state}' +++ mktemp ++ local LAST_OUT=/tmp/tmp.IJJ6Z7XpYs +++ mktemp ++ local LAST_ERR=/tmp/tmp.Im9JV8uFpf ++ local exit_status=0 +++ seq 0 2 ++ for i in '$(seq 0 2)' ++ set +e ++ kubectl get pxc some-name -o 'jsonpath={.status.state}' ++ exit_status=0 ++ set -e ++ '[' 0 '!=' 0 ']' ++ break ++ cat /tmp/tmp.IJJ6Z7XpYs ++ cat /tmp/tmp.Im9JV8uFpf ++ rm /tmp/tmp.IJJ6Z7XpYs /tmp/tmp.Im9JV8uFpf ++ return 0 + [[ ready == \r\e\a\d\y ]] ++ kubectl_bin get pxc some-name -o 'jsonpath={.status.pxc.ready}' +++ mktemp ++ local LAST_OUT=/tmp/tmp.dhtMrOI6tw +++ mktemp ++ local LAST_ERR=/tmp/tmp.rDcNir8gKQ ++ local exit_status=0 +++ seq 0 2 ++ for i in '$(seq 0 2)' ++ set +e ++ kubectl get pxc some-name -o 'jsonpath={.status.pxc.ready}' ++ exit_status=0 ++ set -e ++ '[' 0 '!=' 0 ']' ++ break ++ cat /tmp/tmp.dhtMrOI6tw ++ cat /tmp/tmp.rDcNir8gKQ ++ rm /tmp/tmp.dhtMrOI6tw /tmp/tmp.rDcNir8gKQ ++ return 0 + [[ 3 == \3 ]] +++ get_proxy_engine some-name +++ local cluster_name=some-name ++++ get_proxy some-name ++++ local target_cluster=some-name +++++ kubectl_bin get pxc some-name -o 'jsonpath={.spec.haproxy.enabled}' ++++++ mktemp +++++ local LAST_OUT=/tmp/tmp.VSvFlxOaGN ++++++ mktemp +++++ local LAST_ERR=/tmp/tmp.0XfCXDXTzM +++++ local exit_status=0 ++++++ seq 0 2 +++++ for i in '$(seq 0 2)' +++++ set +e +++++ kubectl get pxc some-name -o 'jsonpath={.spec.haproxy.enabled}' +++++ exit_status=0 +++++ set -e +++++ '[' 0 '!=' 0 ']' +++++ break +++++ cat /tmp/tmp.VSvFlxOaGN +++++ cat /tmp/tmp.0XfCXDXTzM +++++ rm /tmp/tmp.VSvFlxOaGN /tmp/tmp.0XfCXDXTzM +++++ return 0 ++++ [[ '' == \t\r\u\e ]] +++++ kubectl_bin get pxc some-name -o 'jsonpath={.spec.proxysql.enabled}' ++++++ mktemp +++++ local LAST_OUT=/tmp/tmp.9ovTlht6SI ++++++ mktemp +++++ local LAST_ERR=/tmp/tmp.31RsQccSpl +++++ local exit_status=0 ++++++ seq 0 2 +++++ for i in '$(seq 0 2)' +++++ set +e +++++ kubectl get pxc some-name -o 'jsonpath={.spec.proxysql.enabled}' +++++ exit_status=0 +++++ set -e +++++ '[' 0 '!=' 0 ']' +++++ break +++++ cat /tmp/tmp.9ovTlht6SI +++++ cat /tmp/tmp.31RsQccSpl +++++ rm /tmp/tmp.9ovTlht6SI /tmp/tmp.31RsQccSpl +++++ return 0 ++++ [[ true == \t\r\u\e ]] ++++ echo some-name-proxysql ++++ return +++ local cluster_proxy=some-name-proxysql +++ echo proxysql ++ kubectl_bin get pxc some-name -o 'jsonpath={.status.proxysql.ready}' +++ mktemp ++ local LAST_OUT=/tmp/tmp.t8KSUMGGm4 +++ mktemp ++ local LAST_ERR=/tmp/tmp.b4c3JQoKYS ++ local exit_status=0 +++ seq 0 2 ++ for i in '$(seq 0 2)' ++ set +e ++ kubectl get pxc some-name -o 'jsonpath={.status.proxysql.ready}' ++ exit_status=0 ++ set -e ++ '[' 0 '!=' 0 ']' ++ break ++ cat /tmp/tmp.t8KSUMGGm4 ++ cat /tmp/tmp.b4c3JQoKYS ++ rm /tmp/tmp.t8KSUMGGm4 /tmp/tmp.b4c3JQoKYS ++ return 0 + [[ 2 == \2 ]] + desc 'test new operator' + set +o xtrace ----------------------------------------------------------------------------------- test new operator ----------------------------------------------------------------------------------- + newpass=test-password2 ++ echo -n test-password2 ++ base64 + newpassencrypted=dGVzdC1wYXNzd29yZDI= + patch_secret my-cluster-secrets-2 operator dGVzdC1wYXNzd29yZDI= + local secret=my-cluster-secrets-2 + local key=operator + local value=dGVzdC1wYXNzd29yZDI= + kubectl_bin patch secret my-cluster-secrets-2 '-p={"data":{"operator": "dGVzdC1wYXNzd29yZDI="}}' ++ mktemp + local LAST_OUT=/tmp/tmp.aJs8EyI8dA ++ mktemp + local LAST_ERR=/tmp/tmp.ICh1jUGazo + local exit_status=0 ++ seq 0 2 + for i in '$(seq 0 2)' + set +e + kubectl patch secret my-cluster-secrets-2 '-p={"data":{"operator": "dGVzdC1wYXNzd29yZDI="}}' + exit_status=0 + set -e + '[' 0 '!=' 0 ']' + break + cat /tmp/tmp.aJs8EyI8dA secret/my-cluster-secrets-2 patched + cat /tmp/tmp.ICh1jUGazo + rm /tmp/tmp.aJs8EyI8dA /tmp/tmp.ICh1jUGazo + return 0 + sleep 15 + wait_cluster_consistency some-name 3 2 + local cluster_name=some-name + local cluster_size=3 + local proxy_size=2 + '[' -z 2 ']' + desc 'wait cluster consistency' + set +o xtrace ----------------------------------------------------------------------------------- wait cluster consistency ----------------------------------------------------------------------------------- + local i=0 + local max=36 + sleep 7 ++ kubectl_bin get pxc some-name -o 'jsonpath={.status.state}' +++ mktemp ++ local LAST_OUT=/tmp/tmp.xNE7vadz0o +++ mktemp ++ local LAST_ERR=/tmp/tmp.o6ODaFRidw ++ local exit_status=0 +++ seq 0 2 ++ for i in '$(seq 0 2)' ++ set +e ++ kubectl get pxc some-name -o 'jsonpath={.status.state}' ++ exit_status=0 ++ set -e ++ '[' 0 '!=' 0 ']' ++ break ++ cat /tmp/tmp.xNE7vadz0o ++ cat /tmp/tmp.o6ODaFRidw ++ rm /tmp/tmp.xNE7vadz0o /tmp/tmp.o6ODaFRidw ++ return 0 + [[ initializing == \r\e\a\d\y ]] + echo 'waiting for cluster readyness' waiting for cluster readyness + sleep 20 + [[ 0 -ge 36 ]] + let i+=1 ++ kubectl_bin get pxc some-name -o 'jsonpath={.status.state}' +++ mktemp ++ local LAST_OUT=/tmp/tmp.qJg2ILnXbv +++ mktemp ++ local LAST_ERR=/tmp/tmp.9PnSThdJws ++ local exit_status=0 +++ seq 0 2 ++ for i in '$(seq 0 2)' ++ set +e ++ kubectl get pxc some-name -o 'jsonpath={.status.state}' ++ exit_status=0 ++ set -e ++ '[' 0 '!=' 0 ']' ++ break ++ cat /tmp/tmp.qJg2ILnXbv ++ cat /tmp/tmp.9PnSThdJws ++ rm /tmp/tmp.qJg2ILnXbv /tmp/tmp.9PnSThdJws ++ return 0 + [[ ready == \r\e\a\d\y ]] ++ kubectl_bin get pxc some-name -o 'jsonpath={.status.pxc.ready}' +++ mktemp ++ local LAST_OUT=/tmp/tmp.HQHnF61y1j +++ mktemp ++ local LAST_ERR=/tmp/tmp.AhqmODIBvU ++ local exit_status=0 +++ seq 0 2 ++ for i in '$(seq 0 2)' ++ set +e ++ kubectl get pxc some-name -o 'jsonpath={.status.pxc.ready}' ++ exit_status=0 ++ set -e ++ '[' 0 '!=' 0 ']' ++ break ++ cat /tmp/tmp.HQHnF61y1j ++ cat /tmp/tmp.AhqmODIBvU ++ rm /tmp/tmp.HQHnF61y1j /tmp/tmp.AhqmODIBvU ++ return 0 + [[ 3 == \3 ]] +++ get_proxy_engine some-name +++ local cluster_name=some-name ++++ get_proxy some-name ++++ local target_cluster=some-name +++++ kubectl_bin get pxc some-name -o 'jsonpath={.spec.haproxy.enabled}' ++++++ mktemp +++++ local LAST_OUT=/tmp/tmp.HsbNZwyCdD ++++++ mktemp +++++ local LAST_ERR=/tmp/tmp.dmrfeSfqds +++++ local exit_status=0 ++++++ seq 0 2 +++++ for i in '$(seq 0 2)' +++++ set +e +++++ kubectl get pxc some-name -o 'jsonpath={.spec.haproxy.enabled}' +++++ exit_status=0 +++++ set -e +++++ '[' 0 '!=' 0 ']' +++++ break +++++ cat /tmp/tmp.HsbNZwyCdD +++++ cat /tmp/tmp.dmrfeSfqds +++++ rm /tmp/tmp.HsbNZwyCdD /tmp/tmp.dmrfeSfqds +++++ return 0 ++++ [[ '' == \t\r\u\e ]] +++++ kubectl_bin get pxc some-name -o 'jsonpath={.spec.proxysql.enabled}' ++++++ mktemp +++++ local LAST_OUT=/tmp/tmp.qwEqlLxJP0 ++++++ mktemp +++++ local LAST_ERR=/tmp/tmp.ODpaPqoPDp +++++ local exit_status=0 ++++++ seq 0 2 +++++ for i in '$(seq 0 2)' +++++ set +e +++++ kubectl get pxc some-name -o 'jsonpath={.spec.proxysql.enabled}' +++++ exit_status=0 +++++ set -e +++++ '[' 0 '!=' 0 ']' +++++ break +++++ cat /tmp/tmp.qwEqlLxJP0 +++++ cat /tmp/tmp.ODpaPqoPDp +++++ rm /tmp/tmp.qwEqlLxJP0 /tmp/tmp.ODpaPqoPDp +++++ return 0 ++++ [[ true == \t\r\u\e ]] ++++ echo some-name-proxysql ++++ return +++ local cluster_proxy=some-name-proxysql +++ echo proxysql ++ kubectl_bin get pxc some-name -o 'jsonpath={.status.proxysql.ready}' +++ mktemp ++ local LAST_OUT=/tmp/tmp.tdwfyujCfs +++ mktemp ++ local LAST_ERR=/tmp/tmp.mThDzNz74O ++ local exit_status=0 +++ seq 0 2 ++ for i in '$(seq 0 2)' ++ set +e ++ kubectl get pxc some-name -o 'jsonpath={.status.proxysql.ready}' ++ exit_status=0 ++ set -e ++ '[' 0 '!=' 0 ']' ++ break ++ cat /tmp/tmp.tdwfyujCfs ++ cat /tmp/tmp.mThDzNz74O ++ rm /tmp/tmp.tdwfyujCfs /tmp/tmp.mThDzNz74O ++ return 0 + [[ 2 == \2 ]] + sleep 20 + compare_mysql_cmd select-4 'SHOW TABLES;' '-h some-name-proxysql -uoperator -p'\''test-password2'\''' + local command_id=select-4 + local 'command=SHOW TABLES;' + local 'uri=-h some-name-proxysql -uoperator -p'\''test-password2'\''' + local postfix= + local expected_result=/mnt/jenkins/workspace/cloud-pxc-operator_PR-1740/e2e-tests/users/compare/select-4.sql + [[ perconalab/percona-xtradb-cluster-operator:main-pxc5.7 =~ 8\.0 ]] + run_mysql 'SHOW TABLES;' '-h some-name-proxysql -uoperator -p'\''test-password2'\''' + local 'command=SHOW TABLES;' + local 'uri=-h some-name-proxysql -uoperator -p'\''test-password2'\''' ++ get_client_pod ++ kubectl_bin get pods --selector=name=pxc-client -o 'jsonpath={.items[].metadata.name}' +++ mktemp ++ local LAST_OUT=/tmp/tmp.2mb8bBi692 +++ mktemp ++ local LAST_ERR=/tmp/tmp.4Tf03Q8oyy ++ local exit_status=0 +++ seq 0 2 ++ for i in '$(seq 0 2)' ++ set +e ++ kubectl get pods --selector=name=pxc-client -o 'jsonpath={.items[].metadata.name}' ++ exit_status=0 ++ set -e ++ '[' 0 '!=' 0 ']' ++ break ++ cat /tmp/tmp.2mb8bBi692 ++ cat /tmp/tmp.4Tf03Q8oyy ++ rm /tmp/tmp.2mb8bBi692 /tmp/tmp.4Tf03Q8oyy ++ return 0 + client_pod=pxc-client-64b479df95-qtwzv + wait_pod pxc-client-64b479df95-qtwzv + local pod=pxc-client-64b479df95-qtwzv + local max_retry=480 + local ns= ++ echo pxc-client-64b479df95-qtwzv ++ /usr/bin/sed -E 's/.*-(pxc|proxysql)-[0-9]/\1/' ++ egrep '^(pxc|proxysql)$' + local container= + set +o xtrace pod/pxc-client-64b479df95-qtwzv condition met pxc-client-64b479df95-qtwzv.Ok + set +o xtrace + '[' '!' -s /tmp/tmp.iIEmphhn2S/select-4.sql ']' + diff -u /mnt/jenkins/workspace/cloud-pxc-operator_PR-1740/e2e-tests/users/compare/select-4.sql /tmp/tmp.iIEmphhn2S/select-4.sql ++ getSecretData my-cluster-secrets-2 root ++ local secretName=my-cluster-secrets-2 ++ local dataKey=root ++ kubectl_bin get secrets/my-cluster-secrets-2 '--template={{.data.root}}' ++ base64 --decode +++ mktemp ++ local LAST_OUT=/tmp/tmp.PVHRHx94qD +++ mktemp ++ local LAST_ERR=/tmp/tmp.hI2PCvHOMO ++ local exit_status=0 +++ seq 0 2 ++ for i in '$(seq 0 2)' ++ set +e ++ kubectl get secrets/my-cluster-secrets-2 '--template={{.data.root}}' ++ exit_status=0 ++ set -e ++ '[' 0 '!=' 0 ']' ++ break ++ cat /tmp/tmp.PVHRHx94qD ++ cat /tmp/tmp.hI2PCvHOMO ++ rm /tmp/tmp.PVHRHx94qD /tmp/tmp.hI2PCvHOMO ++ return 0 + newpass=y0FaUl_OY=lXDj0H~U + desc 'test new users sync' + set +o xtrace ----------------------------------------------------------------------------------- test new users sync ----------------------------------------------------------------------------------- + run_mysql 'CREATE USER '\''testsync'\''@'\''%'\'' IDENTIFIED BY '\''y0FaUl_OY=lXDj0H~U'\'';' '-h some-name-pxc -uroot -p'\''y0FaUl_OY=lXDj0H~U'\''' + local 'command=CREATE USER '\''testsync'\''@'\''%'\'' IDENTIFIED BY '\''y0FaUl_OY=lXDj0H~U'\'';' + local 'uri=-h some-name-pxc -uroot -p'\''y0FaUl_OY=lXDj0H~U'\''' ++ get_client_pod ++ kubectl_bin get pods --selector=name=pxc-client -o 'jsonpath={.items[].metadata.name}' +++ mktemp ++ local LAST_OUT=/tmp/tmp.BQFYmXpjqj +++ mktemp ++ local LAST_ERR=/tmp/tmp.VaofRdjUb7 ++ local exit_status=0 +++ seq 0 2 ++ for i in '$(seq 0 2)' ++ set +e ++ kubectl get pods --selector=name=pxc-client -o 'jsonpath={.items[].metadata.name}' ++ exit_status=0 ++ set -e ++ '[' 0 '!=' 0 ']' ++ break ++ cat /tmp/tmp.BQFYmXpjqj ++ cat /tmp/tmp.VaofRdjUb7 ++ rm /tmp/tmp.BQFYmXpjqj /tmp/tmp.VaofRdjUb7 ++ return 0 + client_pod=pxc-client-64b479df95-qtwzv + wait_pod pxc-client-64b479df95-qtwzv + local pod=pxc-client-64b479df95-qtwzv + local max_retry=480 + local ns= ++ echo pxc-client-64b479df95-qtwzv ++ /usr/bin/sed -E 's/.*-(pxc|proxysql)-[0-9]/\1/' ++ egrep '^(pxc|proxysql)$' + local container= + set +o xtrace pod/pxc-client-64b479df95-qtwzv condition met pxc-client-64b479df95-qtwzv.Ok + set +o xtrace + sleep 40 + compare_mysql_cmd select-4 'SHOW TABLES;' '-h some-name-proxysql -utestsync -p'\''y0FaUl_OY=lXDj0H~U'\''' + local command_id=select-4 + local 'command=SHOW TABLES;' + local 'uri=-h some-name-proxysql -utestsync -p'\''y0FaUl_OY=lXDj0H~U'\''' + local postfix= + local expected_result=/mnt/jenkins/workspace/cloud-pxc-operator_PR-1740/e2e-tests/users/compare/select-4.sql + [[ perconalab/percona-xtradb-cluster-operator:main-pxc5.7 =~ 8\.0 ]] + run_mysql 'SHOW TABLES;' '-h some-name-proxysql -utestsync -p'\''y0FaUl_OY=lXDj0H~U'\''' + local 'command=SHOW TABLES;' + local 'uri=-h some-name-proxysql -utestsync -p'\''y0FaUl_OY=lXDj0H~U'\''' ++ get_client_pod ++ kubectl_bin get pods --selector=name=pxc-client -o 'jsonpath={.items[].metadata.name}' +++ mktemp ++ local LAST_OUT=/tmp/tmp.mhURZXPnrb +++ mktemp ++ local LAST_ERR=/tmp/tmp.sJYzJwAmzX ++ local exit_status=0 +++ seq 0 2 ++ for i in '$(seq 0 2)' ++ set +e ++ kubectl get pods --selector=name=pxc-client -o 'jsonpath={.items[].metadata.name}' ++ exit_status=0 ++ set -e ++ '[' 0 '!=' 0 ']' ++ break ++ cat /tmp/tmp.mhURZXPnrb ++ cat /tmp/tmp.sJYzJwAmzX ++ rm /tmp/tmp.mhURZXPnrb /tmp/tmp.sJYzJwAmzX ++ return 0 + client_pod=pxc-client-64b479df95-qtwzv + wait_pod pxc-client-64b479df95-qtwzv + local pod=pxc-client-64b479df95-qtwzv + local max_retry=480 + local ns= ++ echo pxc-client-64b479df95-qtwzv ++ /usr/bin/sed -E 's/.*-(pxc|proxysql)-[0-9]/\1/' ++ egrep '^(pxc|proxysql)$' + local container= + set +o xtrace pod/pxc-client-64b479df95-qtwzv condition met pxc-client-64b479df95-qtwzv.Ok + set +o xtrace + '[' '!' -s /tmp/tmp.iIEmphhn2S/select-4.sql ']' + diff -u /mnt/jenkins/workspace/cloud-pxc-operator_PR-1740/e2e-tests/users/compare/select-4.sql /tmp/tmp.iIEmphhn2S/select-4.sql ++ getSecretData internal-some-name operator ++ local secretName=internal-some-name ++ local dataKey=operator ++ kubectl_bin get secrets/internal-some-name '--template={{.data.operator}}' ++ base64 --decode +++ mktemp ++ local LAST_OUT=/tmp/tmp.r4lhKX6fAd +++ mktemp ++ local LAST_ERR=/tmp/tmp.cCQZ64f1d6 ++ local exit_status=0 +++ seq 0 2 ++ for i in '$(seq 0 2)' ++ set +e ++ kubectl get secrets/internal-some-name '--template={{.data.operator}}' ++ exit_status=0 ++ set -e ++ '[' 0 '!=' 0 ']' ++ break ++ cat /tmp/tmp.r4lhKX6fAd ++ cat /tmp/tmp.cCQZ64f1d6 ++ rm /tmp/tmp.r4lhKX6fAd /tmp/tmp.cCQZ64f1d6 ++ return 0 + pass=test-password2 + desc 'check secret without operator' + set +o xtrace ----------------------------------------------------------------------------------- check secret without operator ----------------------------------------------------------------------------------- + kubectl_bin apply -f /mnt/jenkins/workspace/cloud-pxc-operator_PR-1740/e2e-tests/users/conf/secrets.yml ++ mktemp + local LAST_OUT=/tmp/tmp.JizfTq6e8t ++ mktemp + local LAST_ERR=/tmp/tmp.UK8ymmjG75 + local exit_status=0 ++ seq 0 2 + for i in '$(seq 0 2)' + set +e + kubectl apply -f /mnt/jenkins/workspace/cloud-pxc-operator_PR-1740/e2e-tests/users/conf/secrets.yml + exit_status=0 + set -e + '[' 0 '!=' 0 ']' + break + cat /tmp/tmp.JizfTq6e8t secret/my-cluster-secrets-2 configured + cat /tmp/tmp.UK8ymmjG75 Warning: resource secrets/my-cluster-secrets-2 is missing the kubectl.kubernetes.io/last-applied-configuration annotation which is required by kubectl apply. kubectl apply should only be used on resources created declaratively by either kubectl create --save-config or kubectl apply. The missing annotation will be patched automatically. + rm /tmp/tmp.JizfTq6e8t /tmp/tmp.UK8ymmjG75 + return 0 + sleep 15 + compare_mysql_cmd select-4 'SHOW TABLES;' '-h some-name-proxysql -uoperator -p'\''test-password2'\''' + local command_id=select-4 + local 'command=SHOW TABLES;' + local 'uri=-h some-name-proxysql -uoperator -p'\''test-password2'\''' + local postfix= + local expected_result=/mnt/jenkins/workspace/cloud-pxc-operator_PR-1740/e2e-tests/users/compare/select-4.sql + [[ perconalab/percona-xtradb-cluster-operator:main-pxc5.7 =~ 8\.0 ]] + run_mysql 'SHOW TABLES;' '-h some-name-proxysql -uoperator -p'\''test-password2'\''' + local 'command=SHOW TABLES;' + local 'uri=-h some-name-proxysql -uoperator -p'\''test-password2'\''' ++ get_client_pod ++ kubectl_bin get pods --selector=name=pxc-client -o 'jsonpath={.items[].metadata.name}' +++ mktemp ++ local LAST_OUT=/tmp/tmp.kfwgtCOVEw +++ mktemp ++ local LAST_ERR=/tmp/tmp.g56XvVkuTv ++ local exit_status=0 +++ seq 0 2 ++ for i in '$(seq 0 2)' ++ set +e ++ kubectl get pods --selector=name=pxc-client -o 'jsonpath={.items[].metadata.name}' ++ exit_status=0 ++ set -e ++ '[' 0 '!=' 0 ']' ++ break ++ cat /tmp/tmp.kfwgtCOVEw ++ cat /tmp/tmp.g56XvVkuTv ++ rm /tmp/tmp.kfwgtCOVEw /tmp/tmp.g56XvVkuTv ++ return 0 + client_pod=pxc-client-64b479df95-qtwzv + wait_pod pxc-client-64b479df95-qtwzv + local pod=pxc-client-64b479df95-qtwzv + local max_retry=480 + local ns= ++ echo pxc-client-64b479df95-qtwzv ++ /usr/bin/sed -E 's/.*-(pxc|proxysql)-[0-9]/\1/' ++ egrep '^(pxc|proxysql)$' + local container= + set +o xtrace pod/pxc-client-64b479df95-qtwzv condition met pxc-client-64b479df95-qtwzv.Ok + set +o xtrace + '[' '!' -s /tmp/tmp.iIEmphhn2S/select-4.sql ']' + diff -u /mnt/jenkins/workspace/cloud-pxc-operator_PR-1740/e2e-tests/users/compare/select-4.sql /tmp/tmp.iIEmphhn2S/select-4.sql + newpass=test-password2 ++ echo -n test-password2 ++ base64 + newpassencrypted=dGVzdC1wYXNzd29yZDI= + apply_config /mnt/jenkins/workspace/cloud-pxc-operator_PR-1740/e2e-tests/users/conf/some-name.yml + '[' -z '' ']' + cat_config /mnt/jenkins/workspace/cloud-pxc-operator_PR-1740/e2e-tests/users/conf/some-name.yml + kubectl_bin apply -f - + cat /mnt/jenkins/workspace/cloud-pxc-operator_PR-1740/e2e-tests/users/conf/some-name.yml + /usr/bin/sed -e 's#apiVersion: pxc.percona.com/v.*$#apiVersion: pxc.percona.com/v1#' ++ mktemp + /usr/bin/sed -e 's#image:.*-pxc\([0-9]*.[0-9]*\)\{0,1\}$#image: perconalab/percona-xtradb-cluster-operator:main-pxc5.7#' + local LAST_OUT=/tmp/tmp.t6UP4RVps0 + /usr/bin/sed -e 's#image:.*-init$#image: perconalab/percona-xtradb-cluster-operator:PR-1740-0a840b68#' + /usr/bin/sed -e 's#image:.*\/percona-xtradb-cluster:.*$#image: perconalab/percona-xtradb-cluster-operator:main-pxc5.7#' + /usr/bin/sed -e 's#image:.*-proxysql$#image: perconalab/percona-xtradb-cluster-operator:main-proxysql#' ++ mktemp + /usr/bin/sed -e 's#image:.*-haproxy$#image: perconalab/percona-xtradb-cluster-operator:main-haproxy#' + /usr/bin/sed -e 's#image:.*-pmm$#image: perconalab/pmm-client:dev-latest#' + /usr/bin/sed -e s~minio-service.#namespace~minio-service.users-18704~ + /usr/bin/sed -e 's#apply:.*#apply: Never#' + local LAST_ERR=/tmp/tmp.0pSAWlFrkt + local exit_status=0 + /usr/bin/sed -e 's#image:.*-backup$#image: perconalab/percona-xtradb-cluster-operator:main-pxc5.7-backup#' ++ seq 0 2 + /usr/bin/sed -e 's#image:.*-logcollector$#image: perconalab/percona-xtradb-cluster-operator:main-logcollector#' + for i in '$(seq 0 2)' + set +e + kubectl apply -f - + exit_status=0 + set -e + '[' 0 '!=' 0 ']' + break + cat /tmp/tmp.t6UP4RVps0 perconaxtradbcluster.pxc.percona.com/some-name configured + cat /tmp/tmp.0pSAWlFrkt + rm /tmp/tmp.t6UP4RVps0 /tmp/tmp.0pSAWlFrkt + return 0 + sleep 15 + wait_cluster_consistency some-name 3 3 + local cluster_name=some-name + local cluster_size=3 + local proxy_size=3 + '[' -z 3 ']' + desc 'wait cluster consistency' + set +o xtrace ----------------------------------------------------------------------------------- wait cluster consistency ----------------------------------------------------------------------------------- + local i=0 + local max=36 + sleep 7 ++ kubectl_bin get pxc some-name -o 'jsonpath={.status.state}' +++ mktemp ++ local LAST_OUT=/tmp/tmp.n0F75tkF46 +++ mktemp ++ local LAST_ERR=/tmp/tmp.tTTLr9c1P4 ++ local exit_status=0 +++ seq 0 2 ++ for i in '$(seq 0 2)' ++ set +e ++ kubectl get pxc some-name -o 'jsonpath={.status.state}' ++ exit_status=0 ++ set -e ++ '[' 0 '!=' 0 ']' ++ break ++ cat /tmp/tmp.n0F75tkF46 ++ cat /tmp/tmp.tTTLr9c1P4 ++ rm /tmp/tmp.n0F75tkF46 /tmp/tmp.tTTLr9c1P4 ++ return 0 + [[ initializing == \r\e\a\d\y ]] + echo 'waiting for cluster readyness' waiting for cluster readyness + sleep 20 + [[ 0 -ge 36 ]] + let i+=1 ++ kubectl_bin get pxc some-name -o 'jsonpath={.status.state}' +++ mktemp ++ local LAST_OUT=/tmp/tmp.g8e2erOTSq +++ mktemp ++ local LAST_ERR=/tmp/tmp.1wg6UlF9BY ++ local exit_status=0 +++ seq 0 2 ++ for i in '$(seq 0 2)' ++ set +e ++ kubectl get pxc some-name -o 'jsonpath={.status.state}' ++ exit_status=0 ++ set -e ++ '[' 0 '!=' 0 ']' ++ break ++ cat /tmp/tmp.g8e2erOTSq ++ cat /tmp/tmp.1wg6UlF9BY ++ rm /tmp/tmp.g8e2erOTSq /tmp/tmp.1wg6UlF9BY ++ return 0 + [[ initializing == \r\e\a\d\y ]] + echo 'waiting for cluster readyness' waiting for cluster readyness + sleep 20 + [[ 1 -ge 36 ]] + let i+=1 ++ kubectl_bin get pxc some-name -o 'jsonpath={.status.state}' +++ mktemp ++ local LAST_OUT=/tmp/tmp.rQnUjFz5OE +++ mktemp ++ local LAST_ERR=/tmp/tmp.yI5xw42J4R ++ local exit_status=0 +++ seq 0 2 ++ for i in '$(seq 0 2)' ++ set +e ++ kubectl get pxc some-name -o 'jsonpath={.status.state}' ++ exit_status=0 ++ set -e ++ '[' 0 '!=' 0 ']' ++ break ++ cat /tmp/tmp.rQnUjFz5OE ++ cat /tmp/tmp.yI5xw42J4R ++ rm /tmp/tmp.rQnUjFz5OE /tmp/tmp.yI5xw42J4R ++ return 0 + [[ initializing == \r\e\a\d\y ]] + echo 'waiting for cluster readyness' waiting for cluster readyness + sleep 20 + [[ 2 -ge 36 ]] + let i+=1 ++ kubectl_bin get pxc some-name -o 'jsonpath={.status.state}' +++ mktemp ++ local LAST_OUT=/tmp/tmp.ckrnUAS8o1 +++ mktemp ++ local LAST_ERR=/tmp/tmp.CVURJR5yJT ++ local exit_status=0 +++ seq 0 2 ++ for i in '$(seq 0 2)' ++ set +e ++ kubectl get pxc some-name -o 'jsonpath={.status.state}' ++ exit_status=0 ++ set -e ++ '[' 0 '!=' 0 ']' ++ break ++ cat /tmp/tmp.ckrnUAS8o1 ++ cat /tmp/tmp.CVURJR5yJT ++ rm /tmp/tmp.ckrnUAS8o1 /tmp/tmp.CVURJR5yJT ++ return 0 + [[ initializing == \r\e\a\d\y ]] + echo 'waiting for cluster readyness' waiting for cluster readyness + sleep 20 + [[ 3 -ge 36 ]] + let i+=1 ++ kubectl_bin get pxc some-name -o 'jsonpath={.status.state}' +++ mktemp ++ local LAST_OUT=/tmp/tmp.CqQuKSzLtp +++ mktemp ++ local LAST_ERR=/tmp/tmp.IpC8737EEv ++ local exit_status=0 +++ seq 0 2 ++ for i in '$(seq 0 2)' ++ set +e ++ kubectl get pxc some-name -o 'jsonpath={.status.state}' ++ exit_status=0 ++ set -e ++ '[' 0 '!=' 0 ']' ++ break ++ cat /tmp/tmp.CqQuKSzLtp ++ cat /tmp/tmp.IpC8737EEv ++ rm /tmp/tmp.CqQuKSzLtp /tmp/tmp.IpC8737EEv ++ return 0 + [[ initializing == \r\e\a\d\y ]] + echo 'waiting for cluster readyness' waiting for cluster readyness + sleep 20 + [[ 4 -ge 36 ]] + let i+=1 ++ kubectl_bin get pxc some-name -o 'jsonpath={.status.state}' +++ mktemp ++ local LAST_OUT=/tmp/tmp.7uUHgU1Xwj +++ mktemp ++ local LAST_ERR=/tmp/tmp.T9i6RPwp7N ++ local exit_status=0 +++ seq 0 2 ++ for i in '$(seq 0 2)' ++ set +e ++ kubectl get pxc some-name -o 'jsonpath={.status.state}' ++ exit_status=0 ++ set -e ++ '[' 0 '!=' 0 ']' ++ break ++ cat /tmp/tmp.7uUHgU1Xwj ++ cat /tmp/tmp.T9i6RPwp7N ++ rm /tmp/tmp.7uUHgU1Xwj /tmp/tmp.T9i6RPwp7N ++ return 0 + [[ initializing == \r\e\a\d\y ]] + echo 'waiting for cluster readyness' waiting for cluster readyness + sleep 20 + [[ 5 -ge 36 ]] + let i+=1 ++ kubectl_bin get pxc some-name -o 'jsonpath={.status.state}' +++ mktemp ++ local LAST_OUT=/tmp/tmp.VHFuRJbgOO +++ mktemp ++ local LAST_ERR=/tmp/tmp.RZXXggp1Eh ++ local exit_status=0 +++ seq 0 2 ++ for i in '$(seq 0 2)' ++ set +e ++ kubectl get pxc some-name -o 'jsonpath={.status.state}' ++ exit_status=0 ++ set -e ++ '[' 0 '!=' 0 ']' ++ break ++ cat /tmp/tmp.VHFuRJbgOO ++ cat /tmp/tmp.RZXXggp1Eh ++ rm /tmp/tmp.VHFuRJbgOO /tmp/tmp.RZXXggp1Eh ++ return 0 + [[ initializing == \r\e\a\d\y ]] + echo 'waiting for cluster readyness' waiting for cluster readyness + sleep 20 + [[ 6 -ge 36 ]] + let i+=1 ++ kubectl_bin get pxc some-name -o 'jsonpath={.status.state}' +++ mktemp ++ local LAST_OUT=/tmp/tmp.yGymC6SCwt +++ mktemp ++ local LAST_ERR=/tmp/tmp.8rCSDY2RbT ++ local exit_status=0 +++ seq 0 2 ++ for i in '$(seq 0 2)' ++ set +e ++ kubectl get pxc some-name -o 'jsonpath={.status.state}' ++ exit_status=0 ++ set -e ++ '[' 0 '!=' 0 ']' ++ break ++ cat /tmp/tmp.yGymC6SCwt ++ cat /tmp/tmp.8rCSDY2RbT ++ rm /tmp/tmp.yGymC6SCwt /tmp/tmp.8rCSDY2RbT ++ return 0 + [[ initializing == \r\e\a\d\y ]] + echo 'waiting for cluster readyness' waiting for cluster readyness + sleep 20 + [[ 7 -ge 36 ]] + let i+=1 ++ kubectl_bin get pxc some-name -o 'jsonpath={.status.state}' +++ mktemp ++ local LAST_OUT=/tmp/tmp.ZeQbCHz6e6 +++ mktemp ++ local LAST_ERR=/tmp/tmp.8cJqfX8p3m ++ local exit_status=0 +++ seq 0 2 ++ for i in '$(seq 0 2)' ++ set +e ++ kubectl get pxc some-name -o 'jsonpath={.status.state}' ++ exit_status=0 ++ set -e ++ '[' 0 '!=' 0 ']' ++ break ++ cat /tmp/tmp.ZeQbCHz6e6 ++ cat /tmp/tmp.8cJqfX8p3m ++ rm /tmp/tmp.ZeQbCHz6e6 /tmp/tmp.8cJqfX8p3m ++ return 0 + [[ initializing == \r\e\a\d\y ]] + echo 'waiting for cluster readyness' waiting for cluster readyness + sleep 20 + [[ 8 -ge 36 ]] + let i+=1 ++ kubectl_bin get pxc some-name -o 'jsonpath={.status.state}' +++ mktemp ++ local LAST_OUT=/tmp/tmp.0FfyF2G3Yi +++ mktemp ++ local LAST_ERR=/tmp/tmp.qwRa38OwML ++ local exit_status=0 +++ seq 0 2 ++ for i in '$(seq 0 2)' ++ set +e ++ kubectl get pxc some-name -o 'jsonpath={.status.state}' ++ exit_status=0 ++ set -e ++ '[' 0 '!=' 0 ']' ++ break ++ cat /tmp/tmp.0FfyF2G3Yi ++ cat /tmp/tmp.qwRa38OwML ++ rm /tmp/tmp.0FfyF2G3Yi /tmp/tmp.qwRa38OwML ++ return 0 + [[ initializing == \r\e\a\d\y ]] + echo 'waiting for cluster readyness' waiting for cluster readyness + sleep 20 + [[ 9 -ge 36 ]] + let i+=1 ++ kubectl_bin get pxc some-name -o 'jsonpath={.status.state}' +++ mktemp ++ local LAST_OUT=/tmp/tmp.L2sWcPsDo3 +++ mktemp ++ local LAST_ERR=/tmp/tmp.cEMqd9btVj ++ local exit_status=0 +++ seq 0 2 ++ for i in '$(seq 0 2)' ++ set +e ++ kubectl get pxc some-name -o 'jsonpath={.status.state}' ++ exit_status=0 ++ set -e ++ '[' 0 '!=' 0 ']' ++ break ++ cat /tmp/tmp.L2sWcPsDo3 ++ cat /tmp/tmp.cEMqd9btVj ++ rm /tmp/tmp.L2sWcPsDo3 /tmp/tmp.cEMqd9btVj ++ return 0 + [[ initializing == \r\e\a\d\y ]] + echo 'waiting for cluster readyness' waiting for cluster readyness + sleep 20 + [[ 10 -ge 36 ]] + let i+=1 ++ kubectl_bin get pxc some-name -o 'jsonpath={.status.state}' +++ mktemp ++ local LAST_OUT=/tmp/tmp.wGONSHujWU +++ mktemp ++ local LAST_ERR=/tmp/tmp.qhNQY0L1zp ++ local exit_status=0 +++ seq 0 2 ++ for i in '$(seq 0 2)' ++ set +e ++ kubectl get pxc some-name -o 'jsonpath={.status.state}' ++ exit_status=0 ++ set -e ++ '[' 0 '!=' 0 ']' ++ break ++ cat /tmp/tmp.wGONSHujWU ++ cat /tmp/tmp.qhNQY0L1zp ++ rm /tmp/tmp.wGONSHujWU /tmp/tmp.qhNQY0L1zp ++ return 0 + [[ initializing == \r\e\a\d\y ]] + echo 'waiting for cluster readyness' waiting for cluster readyness + sleep 20 + [[ 11 -ge 36 ]] + let i+=1 ++ kubectl_bin get pxc some-name -o 'jsonpath={.status.state}' +++ mktemp ++ local LAST_OUT=/tmp/tmp.9zBsO4ezTx +++ mktemp ++ local LAST_ERR=/tmp/tmp.vK9HAMynTp ++ local exit_status=0 +++ seq 0 2 ++ for i in '$(seq 0 2)' ++ set +e ++ kubectl get pxc some-name -o 'jsonpath={.status.state}' ++ exit_status=0 ++ set -e ++ '[' 0 '!=' 0 ']' ++ break ++ cat /tmp/tmp.9zBsO4ezTx ++ cat /tmp/tmp.vK9HAMynTp ++ rm /tmp/tmp.9zBsO4ezTx /tmp/tmp.vK9HAMynTp ++ return 0 + [[ initializing == \r\e\a\d\y ]] + echo 'waiting for cluster readyness' waiting for cluster readyness + sleep 20 + [[ 12 -ge 36 ]] + let i+=1 ++ kubectl_bin get pxc some-name -o 'jsonpath={.status.state}' +++ mktemp ++ local LAST_OUT=/tmp/tmp.w1AtZ98Vrp +++ mktemp ++ local LAST_ERR=/tmp/tmp.l4eW6ufF0G ++ local exit_status=0 +++ seq 0 2 ++ for i in '$(seq 0 2)' ++ set +e ++ kubectl get pxc some-name -o 'jsonpath={.status.state}' ++ exit_status=0 ++ set -e ++ '[' 0 '!=' 0 ']' ++ break ++ cat /tmp/tmp.w1AtZ98Vrp ++ cat /tmp/tmp.l4eW6ufF0G ++ rm /tmp/tmp.w1AtZ98Vrp /tmp/tmp.l4eW6ufF0G ++ return 0 + [[ initializing == \r\e\a\d\y ]] + echo 'waiting for cluster readyness' waiting for cluster readyness + sleep 20 + [[ 13 -ge 36 ]] + let i+=1 ++ kubectl_bin get pxc some-name -o 'jsonpath={.status.state}' +++ mktemp ++ local LAST_OUT=/tmp/tmp.sieVQ34Sxa +++ mktemp ++ local LAST_ERR=/tmp/tmp.uvIlbB9eIy ++ local exit_status=0 +++ seq 0 2 ++ for i in '$(seq 0 2)' ++ set +e ++ kubectl get pxc some-name -o 'jsonpath={.status.state}' ++ exit_status=0 ++ set -e ++ '[' 0 '!=' 0 ']' ++ break ++ cat /tmp/tmp.sieVQ34Sxa ++ cat /tmp/tmp.uvIlbB9eIy ++ rm /tmp/tmp.sieVQ34Sxa /tmp/tmp.uvIlbB9eIy ++ return 0 + [[ initializing == \r\e\a\d\y ]] + echo 'waiting for cluster readyness' waiting for cluster readyness + sleep 20 + [[ 14 -ge 36 ]] + let i+=1 ++ kubectl_bin get pxc some-name -o 'jsonpath={.status.state}' +++ mktemp ++ local LAST_OUT=/tmp/tmp.vRkGoSfCdd +++ mktemp ++ local LAST_ERR=/tmp/tmp.OO3nPRFURX ++ local exit_status=0 +++ seq 0 2 ++ for i in '$(seq 0 2)' ++ set +e ++ kubectl get pxc some-name -o 'jsonpath={.status.state}' ++ exit_status=0 ++ set -e ++ '[' 0 '!=' 0 ']' ++ break ++ cat /tmp/tmp.vRkGoSfCdd ++ cat /tmp/tmp.OO3nPRFURX ++ rm /tmp/tmp.vRkGoSfCdd /tmp/tmp.OO3nPRFURX ++ return 0 + [[ ready == \r\e\a\d\y ]] ++ kubectl_bin get pxc some-name -o 'jsonpath={.status.pxc.ready}' +++ mktemp ++ local LAST_OUT=/tmp/tmp.e0h4qx3VTS +++ mktemp ++ local LAST_ERR=/tmp/tmp.F3xnwKWeXN ++ local exit_status=0 +++ seq 0 2 ++ for i in '$(seq 0 2)' ++ set +e ++ kubectl get pxc some-name -o 'jsonpath={.status.pxc.ready}' ++ exit_status=0 ++ set -e ++ '[' 0 '!=' 0 ']' ++ break ++ cat /tmp/tmp.e0h4qx3VTS ++ cat /tmp/tmp.F3xnwKWeXN ++ rm /tmp/tmp.e0h4qx3VTS /tmp/tmp.F3xnwKWeXN ++ return 0 + [[ 3 == \3 ]] +++ get_proxy_engine some-name +++ local cluster_name=some-name ++++ get_proxy some-name ++++ local target_cluster=some-name +++++ kubectl_bin get pxc some-name -o 'jsonpath={.spec.haproxy.enabled}' ++++++ mktemp +++++ local LAST_OUT=/tmp/tmp.en5LdDIsvS ++++++ mktemp +++++ local LAST_ERR=/tmp/tmp.CiZ2DAG68r +++++ local exit_status=0 ++++++ seq 0 2 +++++ for i in '$(seq 0 2)' +++++ set +e +++++ kubectl get pxc some-name -o 'jsonpath={.spec.haproxy.enabled}' +++++ exit_status=0 +++++ set -e +++++ '[' 0 '!=' 0 ']' +++++ break +++++ cat /tmp/tmp.en5LdDIsvS +++++ cat /tmp/tmp.CiZ2DAG68r +++++ rm /tmp/tmp.en5LdDIsvS /tmp/tmp.CiZ2DAG68r +++++ return 0 ++++ [[ true == \t\r\u\e ]] ++++ echo some-name-haproxy ++++ return +++ local cluster_proxy=some-name-haproxy +++ echo haproxy ++ kubectl_bin get pxc some-name -o 'jsonpath={.status.haproxy.ready}' +++ mktemp ++ local LAST_OUT=/tmp/tmp.LyKkBKIpWa +++ mktemp ++ local LAST_ERR=/tmp/tmp.zty9eHg8gT ++ local exit_status=0 +++ seq 0 2 ++ for i in '$(seq 0 2)' ++ set +e ++ kubectl get pxc some-name -o 'jsonpath={.status.haproxy.ready}' ++ exit_status=0 ++ set -e ++ '[' 0 '!=' 0 ']' ++ break ++ cat /tmp/tmp.LyKkBKIpWa ++ cat /tmp/tmp.zty9eHg8gT ++ rm /tmp/tmp.LyKkBKIpWa /tmp/tmp.zty9eHg8gT ++ return 0 + [[ 3 == \3 ]] + patch_secret my-cluster-secrets monitor dGVzdC1wYXNzd29yZDI= + local secret=my-cluster-secrets + local key=monitor + local value=dGVzdC1wYXNzd29yZDI= + kubectl_bin patch secret my-cluster-secrets '-p={"data":{"monitor": "dGVzdC1wYXNzd29yZDI="}}' ++ mktemp + local LAST_OUT=/tmp/tmp.bEoHKrY8JC ++ mktemp + local LAST_ERR=/tmp/tmp.MdBa5ZpwCI + local exit_status=0 ++ seq 0 2 + for i in '$(seq 0 2)' + set +e + kubectl patch secret my-cluster-secrets '-p={"data":{"monitor": "dGVzdC1wYXNzd29yZDI="}}' + exit_status=0 + set -e + '[' 0 '!=' 0 ']' + break + cat /tmp/tmp.bEoHKrY8JC secret/my-cluster-secrets patched + cat /tmp/tmp.MdBa5ZpwCI + rm /tmp/tmp.bEoHKrY8JC /tmp/tmp.MdBa5ZpwCI + return 0 + sleep 15 + wait_cluster_consistency some-name 3 3 + local cluster_name=some-name + local cluster_size=3 + local proxy_size=3 + '[' -z 3 ']' + desc 'wait cluster consistency' + set +o xtrace ----------------------------------------------------------------------------------- wait cluster consistency ----------------------------------------------------------------------------------- + local i=0 + local max=36 + sleep 7 ++ kubectl_bin get pxc some-name -o 'jsonpath={.status.state}' +++ mktemp ++ local LAST_OUT=/tmp/tmp.vfFJg9LV3Y +++ mktemp ++ local LAST_ERR=/tmp/tmp.zp9jHta2Bk ++ local exit_status=0 +++ seq 0 2 ++ for i in '$(seq 0 2)' ++ set +e ++ kubectl get pxc some-name -o 'jsonpath={.status.state}' ++ exit_status=0 ++ set -e ++ '[' 0 '!=' 0 ']' ++ break ++ cat /tmp/tmp.vfFJg9LV3Y ++ cat /tmp/tmp.zp9jHta2Bk ++ rm /tmp/tmp.vfFJg9LV3Y /tmp/tmp.zp9jHta2Bk ++ return 0 + [[ initializing == \r\e\a\d\y ]] + echo 'waiting for cluster readyness' waiting for cluster readyness + sleep 20 + [[ 0 -ge 36 ]] + let i+=1 ++ kubectl_bin get pxc some-name -o 'jsonpath={.status.state}' +++ mktemp ++ local LAST_OUT=/tmp/tmp.JPJbNesqhS +++ mktemp ++ local LAST_ERR=/tmp/tmp.0W8rKYSVP1 ++ local exit_status=0 +++ seq 0 2 ++ for i in '$(seq 0 2)' ++ set +e ++ kubectl get pxc some-name -o 'jsonpath={.status.state}' ++ exit_status=0 ++ set -e ++ '[' 0 '!=' 0 ']' ++ break ++ cat /tmp/tmp.JPJbNesqhS ++ cat /tmp/tmp.0W8rKYSVP1 ++ rm /tmp/tmp.JPJbNesqhS /tmp/tmp.0W8rKYSVP1 ++ return 0 + [[ initializing == \r\e\a\d\y ]] + echo 'waiting for cluster readyness' waiting for cluster readyness + sleep 20 + [[ 1 -ge 36 ]] + let i+=1 ++ kubectl_bin get pxc some-name -o 'jsonpath={.status.state}' +++ mktemp ++ local LAST_OUT=/tmp/tmp.tsI1qbBj6E +++ mktemp ++ local LAST_ERR=/tmp/tmp.jh72o4rNmr ++ local exit_status=0 +++ seq 0 2 ++ for i in '$(seq 0 2)' ++ set +e ++ kubectl get pxc some-name -o 'jsonpath={.status.state}' ++ exit_status=0 ++ set -e ++ '[' 0 '!=' 0 ']' ++ break ++ cat /tmp/tmp.tsI1qbBj6E ++ cat /tmp/tmp.jh72o4rNmr ++ rm /tmp/tmp.tsI1qbBj6E /tmp/tmp.jh72o4rNmr ++ return 0 + [[ initializing == \r\e\a\d\y ]] + echo 'waiting for cluster readyness' waiting for cluster readyness + sleep 20 + [[ 2 -ge 36 ]] + let i+=1 ++ kubectl_bin get pxc some-name -o 'jsonpath={.status.state}' +++ mktemp ++ local LAST_OUT=/tmp/tmp.n6HiTIwsSF +++ mktemp ++ local LAST_ERR=/tmp/tmp.T8Lj7fTQUy ++ local exit_status=0 +++ seq 0 2 ++ for i in '$(seq 0 2)' ++ set +e ++ kubectl get pxc some-name -o 'jsonpath={.status.state}' ++ exit_status=0 ++ set -e ++ '[' 0 '!=' 0 ']' ++ break ++ cat /tmp/tmp.n6HiTIwsSF ++ cat /tmp/tmp.T8Lj7fTQUy ++ rm /tmp/tmp.n6HiTIwsSF /tmp/tmp.T8Lj7fTQUy ++ return 0 + [[ ready == \r\e\a\d\y ]] ++ kubectl_bin get pxc some-name -o 'jsonpath={.status.pxc.ready}' +++ mktemp ++ local LAST_OUT=/tmp/tmp.l7MmxLxMAk +++ mktemp ++ local LAST_ERR=/tmp/tmp.KWLhne7fmi ++ local exit_status=0 +++ seq 0 2 ++ for i in '$(seq 0 2)' ++ set +e ++ kubectl get pxc some-name -o 'jsonpath={.status.pxc.ready}' ++ exit_status=0 ++ set -e ++ '[' 0 '!=' 0 ']' ++ break ++ cat /tmp/tmp.l7MmxLxMAk ++ cat /tmp/tmp.KWLhne7fmi ++ rm /tmp/tmp.l7MmxLxMAk /tmp/tmp.KWLhne7fmi ++ return 0 + [[ 3 == \3 ]] +++ get_proxy_engine some-name +++ local cluster_name=some-name ++++ get_proxy some-name ++++ local target_cluster=some-name +++++ kubectl_bin get pxc some-name -o 'jsonpath={.spec.haproxy.enabled}' ++++++ mktemp +++++ local LAST_OUT=/tmp/tmp.FHRfC8qruG ++++++ mktemp +++++ local LAST_ERR=/tmp/tmp.P2Ety9uqYo +++++ local exit_status=0 ++++++ seq 0 2 +++++ for i in '$(seq 0 2)' +++++ set +e +++++ kubectl get pxc some-name -o 'jsonpath={.spec.haproxy.enabled}' +++++ exit_status=0 +++++ set -e +++++ '[' 0 '!=' 0 ']' +++++ break +++++ cat /tmp/tmp.FHRfC8qruG +++++ cat /tmp/tmp.P2Ety9uqYo +++++ rm /tmp/tmp.FHRfC8qruG /tmp/tmp.P2Ety9uqYo +++++ return 0 ++++ [[ true == \t\r\u\e ]] ++++ echo some-name-haproxy ++++ return +++ local cluster_proxy=some-name-haproxy +++ echo haproxy ++ kubectl_bin get pxc some-name -o 'jsonpath={.status.haproxy.ready}' +++ mktemp ++ local LAST_OUT=/tmp/tmp.MuyLmP93C6 +++ mktemp ++ local LAST_ERR=/tmp/tmp.SkUFx7UA0k ++ local exit_status=0 +++ seq 0 2 ++ for i in '$(seq 0 2)' ++ set +e ++ kubectl get pxc some-name -o 'jsonpath={.status.haproxy.ready}' ++ exit_status=0 ++ set -e ++ '[' 0 '!=' 0 ']' ++ break ++ cat /tmp/tmp.MuyLmP93C6 ++ cat /tmp/tmp.SkUFx7UA0k ++ rm /tmp/tmp.MuyLmP93C6 /tmp/tmp.SkUFx7UA0k ++ return 0 + [[ 3 == \3 ]] + compare_mysql_cmd select-3 'SHOW DATABASES;' '-h some-name-haproxy -umonitor -p'\''test-password2'\''' + local command_id=select-3 + local 'command=SHOW DATABASES;' + local 'uri=-h some-name-haproxy -umonitor -p'\''test-password2'\''' + local postfix= + local expected_result=/mnt/jenkins/workspace/cloud-pxc-operator_PR-1740/e2e-tests/users/compare/select-3.sql + [[ perconalab/percona-xtradb-cluster-operator:main-pxc5.7 =~ 8\.0 ]] + run_mysql 'SHOW DATABASES;' '-h some-name-haproxy -umonitor -p'\''test-password2'\''' + local 'command=SHOW DATABASES;' + local 'uri=-h some-name-haproxy -umonitor -p'\''test-password2'\''' ++ get_client_pod ++ kubectl_bin get pods --selector=name=pxc-client -o 'jsonpath={.items[].metadata.name}' +++ mktemp ++ local LAST_OUT=/tmp/tmp.vNTOWDa8L7 +++ mktemp ++ local LAST_ERR=/tmp/tmp.M6T8ABEkwP ++ local exit_status=0 +++ seq 0 2 ++ for i in '$(seq 0 2)' ++ set +e ++ kubectl get pods --selector=name=pxc-client -o 'jsonpath={.items[].metadata.name}' ++ exit_status=0 ++ set -e ++ '[' 0 '!=' 0 ']' ++ break ++ cat /tmp/tmp.vNTOWDa8L7 ++ cat /tmp/tmp.M6T8ABEkwP ++ rm /tmp/tmp.vNTOWDa8L7 /tmp/tmp.M6T8ABEkwP ++ return 0 + client_pod=pxc-client-64b479df95-qtwzv + wait_pod pxc-client-64b479df95-qtwzv + local pod=pxc-client-64b479df95-qtwzv + local max_retry=480 + local ns= ++ echo pxc-client-64b479df95-qtwzv ++ /usr/bin/sed -E 's/.*-(pxc|proxysql)-[0-9]/\1/' ++ egrep '^(pxc|proxysql)$' + local container= + set +o xtrace pod/pxc-client-64b479df95-qtwzv condition met pxc-client-64b479df95-qtwzv.Ok + set +o xtrace + '[' '!' -s /tmp/tmp.iIEmphhn2S/select-3.sql ']' + diff -u /mnt/jenkins/workspace/cloud-pxc-operator_PR-1740/e2e-tests/users/compare/select-3.sql /tmp/tmp.iIEmphhn2S/select-3.sql + destroy users-18704 + local namespace=users-18704 + local ignore_logs=true + desc 'destroy cluster/operator and all other resources' + set +o xtrace ----------------------------------------------------------------------------------- destroy cluster/operator and all other resources ----------------------------------------------------------------------------------- + '[' true == false -o 1 == 1 ']' + grep -v level=info + grep -v 'the object has been modified' + grep -v 'get backup status: Job.batch' + sort -u ++ get_operator_pod ++ local label_prefix=app.kubernetes.io/ + /usr/bin/sed -r 's/"ts":[0-9.]+//; s^limits-[0-9.]+/^^g' +++ kubectl get pods --selector=app.kubernetes.io/name=percona-xtradb-cluster-operator -n pxc-operator +++ grep -c percona-xtradb-cluster-operator + tee /tmp/tmp.iIEmphhn2S/operator.log ++ local check_label=1 ++ [[ 1 -eq 0 ]] ++ kubectl_bin get pods --selector=app.kubernetes.io/name=percona-xtradb-cluster-operator -o 'jsonpath={.items[].metadata.name}' -n pxc-operator +++ mktemp ++ local LAST_OUT=/tmp/tmp.5AC6GeY9zg +++ mktemp ++ local LAST_ERR=/tmp/tmp.DT5cO4b9Vo ++ local exit_status=0 +++ seq 0 2 ++ for i in '$(seq 0 2)' ++ set +e ++ kubectl get pods --selector=app.kubernetes.io/name=percona-xtradb-cluster-operator -o 'jsonpath={.items[].metadata.name}' -n pxc-operator ++ exit_status=0 ++ set -e ++ '[' 0 '!=' 0 ']' ++ break ++ cat /tmp/tmp.5AC6GeY9zg ++ cat /tmp/tmp.DT5cO4b9Vo ++ rm /tmp/tmp.5AC6GeY9zg /tmp/tmp.DT5cO4b9Vo ++ return 0 + kubectl_bin logs -n pxc-operator percona-xtradb-cluster-operator-6849457d9-kkwd7 ++ mktemp + local LAST_OUT=/tmp/tmp.YPYqvj50Bp ++ mktemp + local LAST_ERR=/tmp/tmp.1zp2zkYhKu + local exit_status=0 ++ seq 0 2 + for i in '$(seq 0 2)' + set +e + kubectl logs -n pxc-operator percona-xtradb-cluster-operator-6849457d9-kkwd7 + exit_status=0 + set -e + '[' 0 '!=' 0 ']' + break + cat /tmp/tmp.YPYqvj50Bp + cat /tmp/tmp.1zp2zkYhKu + rm /tmp/tmp.YPYqvj50Bp /tmp/tmp.1zp2zkYhKu + return 0 2024-06-28T09:11:37.582Z INFO setup Manager starting up {"gitCommit": "0a840b68490b2f8881fb749474303f7fb8a1239d", "gitBranch": "PR-1740-0a840b68", "buildTime": "2024-06-28T06:58:02Z", "goVersion": "go1.22.4", "os": "linux", "arch": "amd64"} 2024-06-28T09:11:37.582Z INFO setup Runs on {"platform": "kubernetes", "version": "v1.26.15-gke.1436000"} 2024-06-28T09:11:37.583Z INFO setup Registering Components. 2024-06-28T09:11:39.650Z INFO controller-runtime.webhook Registering webhook {"path": "/validate-percona-xtradbcluster"} 2024-06-28T09:11:39.654Z INFO setup Starting the Cmd. 2024-06-28T09:11:39.655Z INFO controller-runtime.certwatcher Updated current TLS certificate 2024-06-28T09:11:39.655Z INFO controller-runtime.metrics Serving metrics server {"bindAddress": ":8080", "secure": false} 2024-06-28T09:11:39.655Z INFO controller-runtime.metrics Starting metrics server 2024-06-28T09:11:39.655Z INFO controller-runtime.webhook Serving webhook server {"host": "", "port": 9443} 2024-06-28T09:11:39.655Z INFO controller-runtime.webhook Starting webhook server 2024-06-28T09:11:39.655Z INFO starting server {"name": "health probe", "addr": "[::]:8081"} 2024-06-28T09:11:39.656Z INFO controller-runtime.certwatcher Starting certificate watcher 2024-06-28T09:11:39.756Z INFO attempting to acquire leader lease pxc-operator/08db1feb.percona.com... 2024-06-28T09:11:39.782Z DEBUG events percona-xtradb-cluster-operator-6849457d9-kkwd7_7fe1d2b8-62c8-44ae-8a0e-057bf9c3f75a became leader {"type": "Normal", "object": {"kind":"Lease","namespace":"pxc-operator","name":"08db1feb.percona.com","uid":"e286707c-735f-494f-8cec-ac6513e3ef2f","apiVersion":"coordination.k8s.io/v1","resourceVersion":"72841"}, "reason": "LeaderElection"} 2024-06-28T09:11:39.782Z INFO Starting Controller {"controller": "pxcrestore-controller"} 2024-06-28T09:11:39.782Z INFO Starting EventSource {"controller": "pxcrestore-controller", "source": "kind source: *v1.PerconaXtraDBClusterRestore"} 2024-06-28T09:11:39.782Z INFO successfully acquired lease pxc-operator/08db1feb.percona.com 2024-06-28T09:11:39.783Z INFO Starting Controller {"controller": "pxcbackup-controller"} 2024-06-28T09:11:39.783Z INFO Starting Controller {"controller": "pxc-controller"} 2024-06-28T09:11:39.783Z INFO Starting EventSource {"controller": "pxcbackup-controller", "source": "kind source: *v1.PerconaXtraDBClusterBackup"} 2024-06-28T09:11:39.783Z INFO Starting EventSource {"controller": "pxc-controller", "source": "kind source: *v1.PerconaXtraDBCluster"} 2024-06-28T09:11:39.894Z INFO Starting workers {"controller": "pxcbackup-controller", "worker count": 1} 2024-06-28T09:11:39.894Z INFO Starting workers {"controller": "pxc-controller", "worker count": 1} 2024-06-28T09:11:39.894Z INFO Starting workers {"controller": "pxcrestore-controller", "worker count": 1} 2024-06-28T09:12:16.376Z INFO Set CR version {"controller": "pxc-controller", "namespace": "users-18704", "name": "some-name", "reconcileID": "e9ab10d3-e958-422d-81f7-b11e936dad64", "version": "1.15.0"} 2024-06-28T09:13:31.642Z INFO Password expiration policy updated {"controller": "pxc-controller", "namespace": "users-18704", "name": "some-name", "reconcileID": "6955405f-9d5e-4c2f-afa1-50abf21e70ce", "user": "operator"} 2024-06-28T09:13:31.693Z INFO Password expiration policy updated {"controller": "pxc-controller", "namespace": "users-18704", "name": "some-name", "reconcileID": "6955405f-9d5e-4c2f-afa1-50abf21e70ce", "user": "monitor"} 2024-06-28T09:13:31.747Z INFO User monitor: granted privileges {"controller": "pxc-controller", "namespace": "users-18704", "name": "some-name", "reconcileID": "6955405f-9d5e-4c2f-afa1-50abf21e70ce"} 2024-06-28T09:13:31.801Z INFO Password expiration policy updated {"controller": "pxc-controller", "namespace": "users-18704", "name": "some-name", "reconcileID": "6955405f-9d5e-4c2f-afa1-50abf21e70ce", "user": "xtrabackup"} 2024-06-28T09:13:31.875Z INFO User xtrabackup: granted privileges {"controller": "pxc-controller", "namespace": "users-18704", "name": "some-name", "reconcileID": "6955405f-9d5e-4c2f-afa1-50abf21e70ce"} 2024-06-28T09:13:32.145Z INFO reconcile replication error {"controller": "pxc-controller", "namespace": "users-18704", "name": "some-name", "reconcileID": "6955405f-9d5e-4c2f-afa1-50abf21e70ce", "err": "get primary pxc pod: not found"} 2024-06-28T09:13:36.753Z INFO reconcile replication error {"controller": "pxc-controller", "namespace": "users-18704", "name": "some-name", "reconcileID": "fb10b73c-5a8a-488e-acee-b7b5c66b78aa", "err": "get primary pxc pod: not found"} 2024-06-28T09:13:41.945Z INFO reconcile replication error {"controller": "pxc-controller", "namespace": "users-18704", "name": "some-name", "reconcileID": "3660be2e-a645-4558-ab2a-d36b1a0c220e", "err": "get primary pxc pod: not found"} 2024-06-28T09:13:47.149Z INFO reconcile replication error {"controller": "pxc-controller", "namespace": "users-18704", "name": "some-name", "reconcileID": "671b7d1c-7c2e-49db-9894-ed0594797ffe", "err": "get primary pxc pod: not found"} 2024-06-28T09:15:56.648Z INFO Password expiration policy updated {"controller": "pxc-controller", "namespace": "users-18704", "name": "some-name", "reconcileID": "ac1a9065-38e6-44a5-8137-7cf4e477d967", "user": "root"} 2024-06-28T09:15:56.693Z INFO Password expiration policy updated {"controller": "pxc-controller", "namespace": "users-18704", "name": "some-name", "reconcileID": "ac1a9065-38e6-44a5-8137-7cf4e477d967", "user": "replication"} 2024-06-28T09:15:56.842Z INFO update PXC version (fetched from db) {"controller": "pxc-controller", "namespace": "users-18704", "name": "some-name", "reconcileID": "ac1a9065-38e6-44a5-8137-7cf4e477d967", "new version": "5.7.44-48-57"} 2024-06-28T09:16:00.234Z DEBUG PXC users synced with ProxySQL {"controller": "pxc-controller", "namespace": "users-18704", "name": "some-name", "reconcileID": "ac1a9065-38e6-44a5-8137-7cf4e477d967"} 2024-06-28T09:16:05.110Z DEBUG PXC users synced with ProxySQL {"controller": "pxc-controller", "namespace": "users-18704", "name": "some-name", "reconcileID": "0def9632-b73e-4db2-b626-6704454fe32e"} 2024-06-28T09:16:10.411Z DEBUG PXC users synced with ProxySQL {"controller": "pxc-controller", "namespace": "users-18704", "name": "some-name", "reconcileID": "e7bb9d80-2f1f-4db4-b202-3145021367a5"} 2024-06-28T09:16:15.811Z DEBUG PXC users synced with ProxySQL {"controller": "pxc-controller", "namespace": "users-18704", "name": "some-name", "reconcileID": "952b463d-499e-4f8e-a0a0-76b0893cfab9"} 2024-06-28T09:16:21.246Z DEBUG PXC users synced with ProxySQL {"controller": "pxc-controller", "namespace": "users-18704", "name": "some-name", "reconcileID": "e889cfbc-1b60-4a53-9784-759853c8780b"} 2024-06-28T09:16:27.306Z DEBUG PXC users synced with ProxySQL {"controller": "pxc-controller", "namespace": "users-18704", "name": "some-name", "reconcileID": "b8cfe366-8165-43ed-8306-5718df916648"} 2024-06-28T09:16:31.820Z DEBUG PXC users synced with ProxySQL {"controller": "pxc-controller", "namespace": "users-18704", "name": "some-name", "reconcileID": "418e8353-ec72-4c1e-841a-05420d66678c"} 2024-06-28T09:16:37.103Z DEBUG PXC users synced with ProxySQL {"controller": "pxc-controller", "namespace": "users-18704", "name": "some-name", "reconcileID": "6f8b797a-a84a-4318-8f72-c9c229fc69bc"} 2024-06-28T09:16:42.899Z DEBUG PXC users synced with ProxySQL {"controller": "pxc-controller", "namespace": "users-18704", "name": "some-name", "reconcileID": "20bd0854-7139-4ea6-9c67-8583c8abe410"} 2024-06-28T09:16:47.716Z DEBUG PXC users synced with ProxySQL {"controller": "pxc-controller", "namespace": "users-18704", "name": "some-name", "reconcileID": "0586d1dc-e2eb-405f-8ba5-10cc9e53151e"} 2024-06-28T09:16:53.035Z DEBUG PXC users synced with ProxySQL {"controller": "pxc-controller", "namespace": "users-18704", "name": "some-name", "reconcileID": "74616d5a-751b-476b-93cf-3fe621fe42b5"} 2024-06-28T09:16:59.221Z DEBUG PXC users synced with ProxySQL {"controller": "pxc-controller", "namespace": "users-18704", "name": "some-name", "reconcileID": "03944bbc-59f0-4d21-8cc4-0b092a15b4a1"} 2024-06-28T09:17:04.914Z DEBUG PXC users synced with ProxySQL {"controller": "pxc-controller", "namespace": "users-18704", "name": "some-name", "reconcileID": "d8a22c50-5cdb-4cee-8999-1c8ffc4aa99c"} 2024-06-28T09:17:06.834Z INFO Password changed, updating user {"controller": "pxc-controller", "namespace": "users-18704", "name": "some-name", "reconcileID": "f3d86660-e399-4021-80ad-889b58b325c3", "user": "root"} 2024-06-28T09:17:06.877Z INFO User password updated {"controller": "pxc-controller", "namespace": "users-18704", "name": "some-name", "reconcileID": "f3d86660-e399-4021-80ad-889b58b325c3", "user": "root"} 2024-06-28T09:17:06.892Z INFO MySQL init secret created {"controller": "pxc-controller", "namespace": "users-18704", "name": "some-name", "reconcileID": "f3d86660-e399-4021-80ad-889b58b325c3", "secret": "some-name-mysql-init", "user": "root"} 2024-06-28T09:17:12.865Z DEBUG PXC users synced with ProxySQL {"controller": "pxc-controller", "namespace": "users-18704", "name": "some-name", "reconcileID": "f3d86660-e399-4021-80ad-889b58b325c3"} 2024-06-28T09:17:12.877Z INFO Internal secrets updated {"controller": "pxc-controller", "namespace": "users-18704", "name": "some-name", "reconcileID": "f3d86660-e399-4021-80ad-889b58b325c3", "user": "root"} 2024-06-28T09:17:16.719Z DEBUG PXC users synced with ProxySQL {"controller": "pxc-controller", "namespace": "users-18704", "name": "some-name", "reconcileID": "f3d86660-e399-4021-80ad-889b58b325c3"} 2024-06-28T09:17:21.808Z DEBUG PXC users synced with ProxySQL {"controller": "pxc-controller", "namespace": "users-18704", "name": "some-name", "reconcileID": "c0f82e4d-495e-4c7d-8aa7-0b4d08abe858"} 2024-06-28T09:17:27.541Z DEBUG PXC users synced with ProxySQL {"controller": "pxc-controller", "namespace": "users-18704", "name": "some-name", "reconcileID": "0cf855ef-1a76-449f-87b0-8cf0e1f9898f"} 2024-06-28T09:17:49.283Z ERROR sync users {"controller": "pxc-controller", "namespace": "users-18704", "name": "some-name", "reconcileID": "97e8237d-833f-4c2b-a1f8-092102dd91c3", "error": "exec syncusers: command terminated with exit code 1 / / ERROR (line:512) : The cluster (with writer hostgroup:11) has not been configured in ProxySQL\n", "errorVerbose": "exec syncusers: command terminated with exit code 1 / / ERROR (line:512) : The cluster (with writer hostgroup:11) has not been configured in ProxySQL\n\ngithub.com/percona/percona-xtradb-cluster-operator/pkg/controller/pxc.(*ReconcilePerconaXtraDBCluster).syncPXCUsersWithProxySQL\n\t/go/src/github.com/percona/percona-xtradb-cluster-operator/pkg/controller/pxc/users.go:920\ngithub.com/percona/percona-xtradb-cluster-operator/pkg/controller/pxc.(*ReconcilePerconaXtraDBCluster).resyncPXCUsersWithProxySQL.func1\n\t/go/src/github.com/percona/percona-xtradb-cluster-operator/pkg/controller/pxc/controller.go:1246\nruntime.goexit\n\t/usr/local/go/src/runtime/asm_amd64.s:1695"} 2024-06-28T09:17:50.055Z INFO Password changed, updating user {"controller": "pxc-controller", "namespace": "users-18704", "name": "some-name", "reconcileID": "a471b398-5321-4e33-84f6-017ab18628ea", "user": "proxyadmin"} 2024-06-28T09:17:50.055Z INFO Password expiration policy updated {"controller": "pxc-controller", "namespace": "users-18704", "name": "some-name", "reconcileID": "a471b398-5321-4e33-84f6-017ab18628ea", "user": "proxyadmin"} 2024-06-28T09:17:50.129Z INFO Proxy user updated {"controller": "pxc-controller", "namespace": "users-18704", "name": "some-name", "reconcileID": "a471b398-5321-4e33-84f6-017ab18628ea", "user": "proxyadmin"} 2024-06-28T09:17:50.141Z INFO Internal secrets updated {"controller": "pxc-controller", "namespace": "users-18704", "name": "some-name", "reconcileID": "a471b398-5321-4e33-84f6-017ab18628ea", "user": "proxyadmin"} 2024-06-28T09:17:50.141Z INFO Proxy pods will be restarted {"controller": "pxc-controller", "namespace": "users-18704", "name": "some-name", "reconcileID": "a471b398-5321-4e33-84f6-017ab18628ea", "last-applied-secret": "1a55867d0f4fa0d0643653b2eb57b2f690315d8d843a10599c9839c2a779a95f"} 2024-06-28T09:17:50.414Z ERROR sync users {"controller": "pxc-controller", "namespace": "users-18704", "name": "some-name", "reconcileID": "a471b398-5321-4e33-84f6-017ab18628ea", "error": "exec syncusers: command terminated with exit code 1 / / ERROR 1045 (28000): ProxySQL Error: Access denied for user 'proxyadmin'@'127.0.0.1' (using password: YES)\nERROR (line:335) : ProxySQL connection check failed. \n-- Could not connect to ProxySQL at localhost:6032 \n-- Please check the ProxySQL connection parameters and status.\n", "errorVerbose": "exec syncusers: command terminated with exit code 1 / / ERROR 1045 (28000): ProxySQL Error: Access denied for user 'proxyadmin'@'127.0.0.1' (using password: YES)\nERROR (line:335) : ProxySQL connection check failed. \n-- Could not connect to ProxySQL at localhost:6032 \n-- Please check the ProxySQL connection parameters and status.\n\ngithub.com/percona/percona-xtradb-cluster-operator/pkg/controller/pxc.(*ReconcilePerconaXtraDBCluster).syncPXCUsersWithProxySQL\n\t/go/src/github.com/percona/percona-xtradb-cluster-operator/pkg/controller/pxc/users.go:920\ngithub.com/percona/percona-xtradb-cluster-operator/pkg/controller/pxc.(*ReconcilePerconaXtraDBCluster).resyncPXCUsersWithProxySQL.func1\n\t/go/src/github.com/percona/percona-xtradb-cluster-operator/pkg/controller/pxc/controller.go:1246\nruntime.goexit\n\t/usr/local/go/src/runtime/asm_amd64.s:1695"} 2024-06-28T09:18:28.503Z DEBUG PXC users synced with ProxySQL {"controller": "pxc-controller", "namespace": "users-18704", "name": "some-name", "reconcileID": "2b7594bf-cae1-4242-8a3d-65c186d9359c"} 2024-06-28T09:18:38.289Z DEBUG PXC users synced with ProxySQL {"controller": "pxc-controller", "namespace": "users-18704", "name": "some-name", "reconcileID": "2a3b07b3-dd8f-469b-a657-20e3aa7be44e"} 2024-06-28T09:18:42.917Z ERROR sync users {"controller": "pxc-controller", "namespace": "users-18704", "name": "some-name", "reconcileID": "e60c9a60-cba0-4412-be9f-7744ff67e3ef", "error": "exec syncusers: unable to upgrade connection: pod does not exist / / ", "errorVerbose": "exec syncusers: unable to upgrade connection: pod does not exist / / \ngithub.com/percona/percona-xtradb-cluster-operator/pkg/controller/pxc.(*ReconcilePerconaXtraDBCluster).syncPXCUsersWithProxySQL\n\t/go/src/github.com/percona/percona-xtradb-cluster-operator/pkg/controller/pxc/users.go:920\ngithub.com/percona/percona-xtradb-cluster-operator/pkg/controller/pxc.(*ReconcilePerconaXtraDBCluster).resyncPXCUsersWithProxySQL.func1\n\t/go/src/github.com/percona/percona-xtradb-cluster-operator/pkg/controller/pxc/controller.go:1246\nruntime.goexit\n\t/usr/local/go/src/runtime/asm_amd64.s:1695"} 2024-06-28T09:18:43.555Z INFO Password changed, updating user {"controller": "pxc-controller", "namespace": "users-18704", "name": "some-name", "reconcileID": "9206a3b6-369e-4b52-ab4f-1b509970e410", "user": "xtrabackup"} 2024-06-28T09:18:43.581Z INFO User password updated {"controller": "pxc-controller", "namespace": "users-18704", "name": "some-name", "reconcileID": "9206a3b6-369e-4b52-ab4f-1b509970e410", "user": "xtrabackup"} 2024-06-28T09:18:43.592Z INFO MySQL init secret updated {"controller": "pxc-controller", "namespace": "users-18704", "name": "some-name", "reconcileID": "9206a3b6-369e-4b52-ab4f-1b509970e410", "secret": "some-name-mysql-init", "user": "xtrabackup"} 2024-06-28T09:18:43.603Z INFO Internal secrets updated {"controller": "pxc-controller", "namespace": "users-18704", "name": "some-name", "reconcileID": "9206a3b6-369e-4b52-ab4f-1b509970e410", "user": "xtrabackup"} 2024-06-28T09:18:43.603Z INFO PXC pods will be restarted {"controller": "pxc-controller", "namespace": "users-18704", "name": "some-name", "reconcileID": "9206a3b6-369e-4b52-ab4f-1b509970e410", "last-applied-secret": "d11bcc70ca53ea410cd455e13d82e7225917b0861d9b677ed34c02c5f6fbaf1b"} 2024-06-28T09:18:49.193Z DEBUG PXC users synced with ProxySQL {"controller": "pxc-controller", "namespace": "users-18704", "name": "some-name", "reconcileID": "9206a3b6-369e-4b52-ab4f-1b509970e410"} 2024-06-28T09:20:26.645Z INFO reconcile replication error {"controller": "pxc-controller", "namespace": "users-18704", "name": "some-name", "reconcileID": "6eaa4c1d-798a-4cec-8e25-8fbbdba64efe", "err": "failed to connect to pod some-name-pxc-0: dial tcp: lookup some-name-pxc-0.some-name-pxc.users-18704 on 10.124.112.10:53: no such host"} 2024-06-28T09:20:32.102Z INFO reconcile replication error {"controller": "pxc-controller", "namespace": "users-18704", "name": "some-name", "reconcileID": "f1368105-4434-4ac2-9327-e29b6d3ce2e3", "err": "failed to connect to pod some-name-pxc-0: dial tcp: lookup some-name-pxc-0.some-name-pxc.users-18704 on 10.124.112.10:53: no such host"} 2024-06-28T09:20:37.373Z INFO reconcile replication error {"controller": "pxc-controller", "namespace": "users-18704", "name": "some-name", "reconcileID": "d8cbf3af-928c-43ef-8b31-b37f9b0fa16f", "err": "failed to connect to pod some-name-pxc-0: dial tcp: lookup some-name-pxc-0.some-name-pxc.users-18704 on 10.124.112.10:53: no such host"} 2024-06-28T09:21:03.935Z INFO Unable to find primary pod for replication. No pod with name or ip like this {"controller": "pxc-controller", "namespace": "users-18704", "name": "some-name", "reconcileID": "addb73fa-1287-4dd5-b596-18aa91ffc708", "primary name": "some-name-pxc-0.some-name-pxc.users-18704.svc.cluster.local"} 2024-06-28T09:21:14.355Z INFO Unable to find primary pod for replication. No pod with name or ip like this {"controller": "pxc-controller", "namespace": "users-18704", "name": "some-name", "reconcileID": "46090e4b-dcea-47b7-aaff-9ea7641b3fcb", "primary name": "some-name-pxc-0.some-name-pxc.users-18704.svc.cluster.local"} 2024-06-28T09:21:23.527Z DEBUG PXC users synced with ProxySQL {"controller": "pxc-controller", "namespace": "users-18704", "name": "some-name", "reconcileID": "f998245a-03d6-430f-b85a-ee9276c891eb"} 2024-06-28T09:21:29.295Z DEBUG PXC users synced with ProxySQL {"controller": "pxc-controller", "namespace": "users-18704", "name": "some-name", "reconcileID": "4106aea1-a5cc-48cf-9066-718b384d8757"} 2024-06-28T09:21:34.903Z DEBUG PXC users synced with ProxySQL {"controller": "pxc-controller", "namespace": "users-18704", "name": "some-name", "reconcileID": "90314518-fd22-4e8c-8f2d-60e4c247bea5"} 2024-06-28T09:21:40.380Z DEBUG PXC users synced with ProxySQL {"controller": "pxc-controller", "namespace": "users-18704", "name": "some-name", "reconcileID": "e014e4a8-b7c5-4816-bb7f-f85cd5050f0b"} 2024-06-28T09:21:42.025Z INFO Password changed, updating user {"controller": "pxc-controller", "namespace": "users-18704", "name": "some-name", "reconcileID": "834402c5-4279-46e2-8a8a-e4dfbcd32b7d", "user": "monitor"} 2024-06-28T09:21:42.075Z INFO User password updated {"controller": "pxc-controller", "namespace": "users-18704", "name": "some-name", "reconcileID": "834402c5-4279-46e2-8a8a-e4dfbcd32b7d", "user": "monitor"} 2024-06-28T09:21:42.085Z INFO MySQL init secret updated {"controller": "pxc-controller", "namespace": "users-18704", "name": "some-name", "reconcileID": "834402c5-4279-46e2-8a8a-e4dfbcd32b7d", "secret": "some-name-mysql-init", "user": "monitor"} 2024-06-28T09:21:42.140Z INFO Proxy user updated {"controller": "pxc-controller", "namespace": "users-18704", "name": "some-name", "reconcileID": "834402c5-4279-46e2-8a8a-e4dfbcd32b7d", "user": "monitor"} 2024-06-28T09:21:42.153Z INFO Internal secrets updated {"controller": "pxc-controller", "namespace": "users-18704", "name": "some-name", "reconcileID": "834402c5-4279-46e2-8a8a-e4dfbcd32b7d", "user": "monitor"} 2024-06-28T09:21:42.153Z INFO Proxy pods will be restarted {"controller": "pxc-controller", "namespace": "users-18704", "name": "some-name", "reconcileID": "834402c5-4279-46e2-8a8a-e4dfbcd32b7d", "last-applied-secret": "4e3282050745046d5044e0fcb5f2bf140d7320a1256a79c8ca2184f38f4f3098"} 2024-06-28T09:21:45.437Z ERROR sync users {"controller": "pxc-controller", "namespace": "users-18704", "name": "some-name", "reconcileID": "834402c5-4279-46e2-8a8a-e4dfbcd32b7d", "error": "exec syncusers: unable to upgrade connection: container not found (\"proxysql\") / / ", "errorVerbose": "exec syncusers: unable to upgrade connection: container not found (\"proxysql\") / / \ngithub.com/percona/percona-xtradb-cluster-operator/pkg/controller/pxc.(*ReconcilePerconaXtraDBCluster).syncPXCUsersWithProxySQL\n\t/go/src/github.com/percona/percona-xtradb-cluster-operator/pkg/controller/pxc/users.go:920\ngithub.com/percona/percona-xtradb-cluster-operator/pkg/controller/pxc.(*ReconcilePerconaXtraDBCluster).resyncPXCUsersWithProxySQL.func1\n\t/go/src/github.com/percona/percona-xtradb-cluster-operator/pkg/controller/pxc/controller.go:1246\nruntime.goexit\n\t/usr/local/go/src/runtime/asm_amd64.s:1695"} 2024-06-28T09:22:25.993Z DEBUG PXC users synced with ProxySQL {"controller": "pxc-controller", "namespace": "users-18704", "name": "some-name", "reconcileID": "b389c127-4b14-42f7-83e5-9dd8f463bce4"} 2024-06-28T09:22:31.391Z DEBUG PXC users synced with ProxySQL {"controller": "pxc-controller", "namespace": "users-18704", "name": "some-name", "reconcileID": "228f52cc-3a2b-43d9-b147-51483ffd96a2"} 2024-06-28T09:22:36.475Z DEBUG PXC users synced with ProxySQL {"controller": "pxc-controller", "namespace": "users-18704", "name": "some-name", "reconcileID": "ee27c716-4fb8-4058-8af8-5fcaa86a5e17"} 2024-06-28T09:22:45.919Z INFO Password changed, updating user {"controller": "pxc-controller", "namespace": "users-18704", "name": "some-name", "reconcileID": "567e69d9-5dfc-40b6-897d-096eb6582905", "user": "operator"} 2024-06-28T09:22:45.946Z INFO User password updated {"controller": "pxc-controller", "namespace": "users-18704", "name": "some-name", "reconcileID": "567e69d9-5dfc-40b6-897d-096eb6582905", "user": "operator"} 2024-06-28T09:22:45.964Z INFO MySQL init secret updated {"controller": "pxc-controller", "namespace": "users-18704", "name": "some-name", "reconcileID": "567e69d9-5dfc-40b6-897d-096eb6582905", "secret": "some-name-mysql-init", "user": "operator"} 2024-06-28T09:22:45.979Z INFO Internal secrets updated {"controller": "pxc-controller", "namespace": "users-18704", "name": "some-name", "reconcileID": "567e69d9-5dfc-40b6-897d-096eb6582905", "user": "operator"} 2024-06-28T09:22:45.979Z INFO Proxy pods will be restarted {"controller": "pxc-controller", "namespace": "users-18704", "name": "some-name", "reconcileID": "567e69d9-5dfc-40b6-897d-096eb6582905", "last-applied-secret": "6542c6f8a2375ddfcda319bf91c99ca69519730971c577cd07366e356671ee68"} 2024-06-28T09:22:47.576Z ERROR sync users {"controller": "pxc-controller", "namespace": "users-18704", "name": "some-name", "reconcileID": "567e69d9-5dfc-40b6-897d-096eb6582905", "error": "exec syncusers: command terminated with exit code 1 / / ERROR 1045 (28000): Access denied for user 'operator'@'some-name-proxysql-0.some-name-proxysql-unready.users-18704.svc.' (using password: YES)\nERROR 1045 (28000): Access denied for user 'operator'@'some-name-proxysql-0.some-name-proxysql-unready.users-18704.svc.' (using password: YES)\nERROR 1045 (28000): Access denied for user 'operator'@'some-name-proxysql-0.some-name-proxysql-unready.users-18704.svc.' (using password: YES)\nERROR (line:569) : Could not find a primary cluster node\n", "errorVerbose": "exec syncusers: command terminated with exit code 1 / / ERROR 1045 (28000): Access denied for user 'operator'@'some-name-proxysql-0.some-name-proxysql-unready.users-18704.svc.' (using password: YES)\nERROR 1045 (28000): Access denied for user 'operator'@'some-name-proxysql-0.some-name-proxysql-unready.users-18704.svc.' (using password: YES)\nERROR 1045 (28000): Access denied for user 'operator'@'some-name-proxysql-0.some-name-proxysql-unready.users-18704.svc.' (using password: YES)\nERROR (line:569) : Could not find a primary cluster node\n\ngithub.com/percona/percona-xtradb-cluster-operator/pkg/controller/pxc.(*ReconcilePerconaXtraDBCluster).syncPXCUsersWithProxySQL\n\t/go/src/github.com/percona/percona-xtradb-cluster-operator/pkg/controller/pxc/users.go:920\ngithub.com/percona/percona-xtradb-cluster-operator/pkg/controller/pxc.(*ReconcilePerconaXtraDBCluster).resyncPXCUsersWithProxySQL.func1\n\t/go/src/github.com/percona/percona-xtradb-cluster-operator/pkg/controller/pxc/controller.go:1246\nruntime.goexit\n\t/usr/local/go/src/runtime/asm_amd64.s:1695"} 2024-06-28T09:23:08.111Z DEBUG PXC users synced with ProxySQL {"controller": "pxc-controller", "namespace": "users-18704", "name": "some-name", "reconcileID": "48ef8757-0fa9-4578-8f1b-723f46126225"} 2024-06-28T09:23:14.609Z DEBUG PXC users synced with ProxySQL {"controller": "pxc-controller", "namespace": "users-18704", "name": "some-name", "reconcileID": "0168e9a3-1e22-4a8a-9aad-8be37c60ef03"} 2024-06-28T09:23:18.910Z DEBUG PXC users synced with ProxySQL {"controller": "pxc-controller", "namespace": "users-18704", "name": "some-name", "reconcileID": "f546a904-5cca-46f0-a5f6-c052bf0e52d9"} 2024-06-28T09:23:31.480Z INFO Created user secrets {"controller": "pxc-controller", "namespace": "users-18704", "name": "some-name", "reconcileID": "aacb3a9e-00b1-439e-946c-af3d4dd498a9", "secrets": "my-cluster-secrets-2"} 2024-06-28T09:23:31.480Z INFO Password changed, updating user {"controller": "pxc-controller", "namespace": "users-18704", "name": "some-name", "reconcileID": "aacb3a9e-00b1-439e-946c-af3d4dd498a9", "user": "root"} 2024-06-28T09:23:31.524Z INFO User password updated {"controller": "pxc-controller", "namespace": "users-18704", "name": "some-name", "reconcileID": "aacb3a9e-00b1-439e-946c-af3d4dd498a9", "user": "root"} 2024-06-28T09:23:31.537Z INFO MySQL init secret updated {"controller": "pxc-controller", "namespace": "users-18704", "name": "some-name", "reconcileID": "aacb3a9e-00b1-439e-946c-af3d4dd498a9", "secret": "some-name-mysql-init", "user": "root"} 2024-06-28T09:23:37.322Z DEBUG PXC users synced with ProxySQL {"controller": "pxc-controller", "namespace": "users-18704", "name": "some-name", "reconcileID": "aacb3a9e-00b1-439e-946c-af3d4dd498a9"} 2024-06-28T09:23:37.332Z INFO Internal secrets updated {"controller": "pxc-controller", "namespace": "users-18704", "name": "some-name", "reconcileID": "aacb3a9e-00b1-439e-946c-af3d4dd498a9", "user": "root"} 2024-06-28T09:23:37.332Z INFO Password changed, updating user {"controller": "pxc-controller", "namespace": "users-18704", "name": "some-name", "reconcileID": "aacb3a9e-00b1-439e-946c-af3d4dd498a9", "user": "operator"} 2024-06-28T09:23:37.363Z INFO User password updated {"controller": "pxc-controller", "namespace": "users-18704", "name": "some-name", "reconcileID": "aacb3a9e-00b1-439e-946c-af3d4dd498a9", "user": "operator"} 2024-06-28T09:23:37.375Z INFO MySQL init secret updated {"controller": "pxc-controller", "namespace": "users-18704", "name": "some-name", "reconcileID": "aacb3a9e-00b1-439e-946c-af3d4dd498a9", "secret": "some-name-mysql-init", "user": "operator"} 2024-06-28T09:23:37.392Z INFO Internal secrets updated {"controller": "pxc-controller", "namespace": "users-18704", "name": "some-name", "reconcileID": "aacb3a9e-00b1-439e-946c-af3d4dd498a9", "user": "operator"} 2024-06-28T09:23:37.392Z INFO Password changed, updating user {"controller": "pxc-controller", "namespace": "users-18704", "name": "some-name", "reconcileID": "aacb3a9e-00b1-439e-946c-af3d4dd498a9", "user": "monitor"} 2024-06-28T09:23:37.418Z INFO User password updated {"controller": "pxc-controller", "namespace": "users-18704", "name": "some-name", "reconcileID": "aacb3a9e-00b1-439e-946c-af3d4dd498a9", "user": "monitor"} 2024-06-28T09:23:37.431Z INFO MySQL init secret updated {"controller": "pxc-controller", "namespace": "users-18704", "name": "some-name", "reconcileID": "aacb3a9e-00b1-439e-946c-af3d4dd498a9", "secret": "some-name-mysql-init", "user": "monitor"} 2024-06-28T09:23:37.481Z INFO Proxy user updated {"controller": "pxc-controller", "namespace": "users-18704", "name": "some-name", "reconcileID": "aacb3a9e-00b1-439e-946c-af3d4dd498a9", "user": "monitor"} 2024-06-28T09:23:37.496Z INFO Internal secrets updated {"controller": "pxc-controller", "namespace": "users-18704", "name": "some-name", "reconcileID": "aacb3a9e-00b1-439e-946c-af3d4dd498a9", "user": "monitor"} 2024-06-28T09:23:37.496Z INFO Password changed, updating user {"controller": "pxc-controller", "namespace": "users-18704", "name": "some-name", "reconcileID": "aacb3a9e-00b1-439e-946c-af3d4dd498a9", "user": "xtrabackup"} 2024-06-28T09:23:37.522Z INFO User password updated {"controller": "pxc-controller", "namespace": "users-18704", "name": "some-name", "reconcileID": "aacb3a9e-00b1-439e-946c-af3d4dd498a9", "user": "xtrabackup"} 2024-06-28T09:23:37.533Z INFO MySQL init secret updated {"controller": "pxc-controller", "namespace": "users-18704", "name": "some-name", "reconcileID": "aacb3a9e-00b1-439e-946c-af3d4dd498a9", "secret": "some-name-mysql-init", "user": "xtrabackup"} 2024-06-28T09:23:37.549Z INFO Internal secrets updated {"controller": "pxc-controller", "namespace": "users-18704", "name": "some-name", "reconcileID": "aacb3a9e-00b1-439e-946c-af3d4dd498a9", "user": "xtrabackup"} 2024-06-28T09:23:37.549Z INFO Password changed, updating user {"controller": "pxc-controller", "namespace": "users-18704", "name": "some-name", "reconcileID": "aacb3a9e-00b1-439e-946c-af3d4dd498a9", "user": "replication"} 2024-06-28T09:23:37.576Z INFO User password updated {"controller": "pxc-controller", "namespace": "users-18704", "name": "some-name", "reconcileID": "aacb3a9e-00b1-439e-946c-af3d4dd498a9", "user": "replication"} 2024-06-28T09:23:37.589Z INFO MySQL init secret updated {"controller": "pxc-controller", "namespace": "users-18704", "name": "some-name", "reconcileID": "aacb3a9e-00b1-439e-946c-af3d4dd498a9", "secret": "some-name-mysql-init", "user": "replication"} 2024-06-28T09:23:37.604Z INFO Internal secrets updated {"controller": "pxc-controller", "namespace": "users-18704", "name": "some-name", "reconcileID": "aacb3a9e-00b1-439e-946c-af3d4dd498a9", "user": "replication"} 2024-06-28T09:23:37.604Z INFO Password changed, updating user {"controller": "pxc-controller", "namespace": "users-18704", "name": "some-name", "reconcileID": "aacb3a9e-00b1-439e-946c-af3d4dd498a9", "user": "proxyadmin"} 2024-06-28T09:23:37.653Z INFO Proxy user updated {"controller": "pxc-controller", "namespace": "users-18704", "name": "some-name", "reconcileID": "aacb3a9e-00b1-439e-946c-af3d4dd498a9", "user": "proxyadmin"} 2024-06-28T09:23:37.668Z INFO Internal secrets updated {"controller": "pxc-controller", "namespace": "users-18704", "name": "some-name", "reconcileID": "aacb3a9e-00b1-439e-946c-af3d4dd498a9", "user": "proxyadmin"} 2024-06-28T09:23:37.668Z INFO Proxy pods will be restarted {"controller": "pxc-controller", "namespace": "users-18704", "name": "some-name", "reconcileID": "aacb3a9e-00b1-439e-946c-af3d4dd498a9", "last-applied-secret": "9478f53645552d1f959474fbe85d3bde37898a7ce967e0240ee469c88db4f6ac"} 2024-06-28T09:23:37.668Z INFO PXC pods will be restarted {"controller": "pxc-controller", "namespace": "users-18704", "name": "some-name", "reconcileID": "aacb3a9e-00b1-439e-946c-af3d4dd498a9", "last-applied-secret": "9478f53645552d1f959474fbe85d3bde37898a7ce967e0240ee469c88db4f6ac"} 2024-06-28T09:23:38.049Z ERROR sync users {"controller": "pxc-controller", "namespace": "users-18704", "name": "some-name", "reconcileID": "aacb3a9e-00b1-439e-946c-af3d4dd498a9", "error": "exec syncusers: command terminated with exit code 1 / / ERROR 1045 (28000): ProxySQL Error: Access denied for user 'proxyadmin'@'127.0.0.1' (using password: YES)\nERROR (line:335) : ProxySQL connection check failed. \n-- Could not connect to ProxySQL at localhost:6032 \n-- Please check the ProxySQL connection parameters and status.\n", "errorVerbose": "exec syncusers: command terminated with exit code 1 / / ERROR 1045 (28000): ProxySQL Error: Access denied for user 'proxyadmin'@'127.0.0.1' (using password: YES)\nERROR (line:335) : ProxySQL connection check failed. \n-- Could not connect to ProxySQL at localhost:6032 \n-- Please check the ProxySQL connection parameters and status.\n\ngithub.com/percona/percona-xtradb-cluster-operator/pkg/controller/pxc.(*ReconcilePerconaXtraDBCluster).syncPXCUsersWithProxySQL\n\t/go/src/github.com/percona/percona-xtradb-cluster-operator/pkg/controller/pxc/users.go:920\ngithub.com/percona/percona-xtradb-cluster-operator/pkg/controller/pxc.(*ReconcilePerconaXtraDBCluster).resyncPXCUsersWithProxySQL.func1\n\t/go/src/github.com/percona/percona-xtradb-cluster-operator/pkg/controller/pxc/controller.go:1246\nruntime.goexit\n\t/usr/local/go/src/runtime/asm_amd64.s:1695"} 2024-06-28T09:25:35.425Z INFO Unable to find primary pod for replication. No pod with name or ip like this {"controller": "pxc-controller", "namespace": "users-18704", "name": "some-name", "reconcileID": "6467cab3-2a13-49a7-91b2-f947811b5597", "primary name": "some-name-pxc-0.some-name-pxc.users-18704.svc.cluster.local"} 2024-06-28T09:25:36.851Z INFO Unable to find primary pod for replication. No pod with name or ip like this {"controller": "pxc-controller", "namespace": "users-18704", "name": "some-name", "reconcileID": "35780353-e37d-4c97-9440-e7a4b1d1a323", "primary name": "some-name-pxc-0.some-name-pxc.users-18704.svc.cluster.local"} 2024-06-28T09:25:37.163Z INFO Unable to find primary pod for replication. No pod with name or ip like this {"controller": "pxc-controller", "namespace": "users-18704", "name": "some-name", "reconcileID": "e34bb7fb-dc17-445f-8e28-fe09e5a95b25", "primary name": "some-name-pxc-0.some-name-pxc.users-18704.svc.cluster.local"} 2024-06-28T09:25:42.149Z INFO Unable to find primary pod for replication. No pod with name or ip like this {"controller": "pxc-controller", "namespace": "users-18704", "name": "some-name", "reconcileID": "c9ce2cf8-a89a-4606-aad9-29de738bb96e", "primary name": "some-name-pxc-0.some-name-pxc.users-18704.svc.cluster.local"} 2024-06-28T09:25:47.458Z INFO Unable to find primary pod for replication. No pod with name or ip like this {"controller": "pxc-controller", "namespace": "users-18704", "name": "some-name", "reconcileID": "143491ff-e83f-4be0-b162-3be87d9daab9", "primary name": "some-name-pxc-0.some-name-pxc.users-18704.svc.cluster.local"} 2024-06-28T09:25:53.037Z INFO Unable to find primary pod for replication. No pod with name or ip like this {"controller": "pxc-controller", "namespace": "users-18704", "name": "some-name", "reconcileID": "c27e45e1-12cc-4c25-a078-a8be4219ab1b", "primary name": "some-name-pxc-0.some-name-pxc.users-18704.svc.cluster.local"} 2024-06-28T09:25:58.314Z INFO Unable to find primary pod for replication. No pod with name or ip like this {"controller": "pxc-controller", "namespace": "users-18704", "name": "some-name", "reconcileID": "9a5a72bc-979e-4cda-982e-fab4accc117f", "primary name": "some-name-pxc-0.some-name-pxc.users-18704.svc.cluster.local"} 2024-06-28T09:26:03.572Z INFO Unable to find primary pod for replication. No pod with name or ip like this {"controller": "pxc-controller", "namespace": "users-18704", "name": "some-name", "reconcileID": "a5f198a3-8281-48d9-81f6-07014dd5b6e3", "primary name": "some-name-pxc-0.some-name-pxc.users-18704.svc.cluster.local"} 2024-06-28T09:26:08.909Z INFO Unable to find primary pod for replication. No pod with name or ip like this {"controller": "pxc-controller", "namespace": "users-18704", "name": "some-name", "reconcileID": "67a30eda-f8d1-4fc0-9804-965b6da30794", "primary name": "some-name-pxc-0.some-name-pxc.users-18704.svc.cluster.local"} 2024-06-28T09:26:20.688Z DEBUG PXC users synced with ProxySQL {"controller": "pxc-controller", "namespace": "users-18704", "name": "some-name", "reconcileID": "189a2741-839c-4ed4-97e6-88a74ba359dc"} 2024-06-28T09:26:31.712Z DEBUG PXC users synced with ProxySQL {"controller": "pxc-controller", "namespace": "users-18704", "name": "some-name", "reconcileID": "418ce7c2-a047-447e-bf8d-1f2a7ba5b1af"} 2024-06-28T09:26:36.720Z INFO Password changed, updating user {"controller": "pxc-controller", "namespace": "users-18704", "name": "some-name", "reconcileID": "cb103a03-467e-4025-a25a-892f8e9a9d9a", "user": "operator"} 2024-06-28T09:26:36.776Z INFO User password updated {"controller": "pxc-controller", "namespace": "users-18704", "name": "some-name", "reconcileID": "cb103a03-467e-4025-a25a-892f8e9a9d9a", "user": "operator"} 2024-06-28T09:26:36.792Z INFO MySQL init secret updated {"controller": "pxc-controller", "namespace": "users-18704", "name": "some-name", "reconcileID": "cb103a03-467e-4025-a25a-892f8e9a9d9a", "secret": "some-name-mysql-init", "user": "operator"} 2024-06-28T09:26:36.809Z INFO Internal secrets updated {"controller": "pxc-controller", "namespace": "users-18704", "name": "some-name", "reconcileID": "cb103a03-467e-4025-a25a-892f8e9a9d9a", "user": "operator"} 2024-06-28T09:26:36.810Z INFO Proxy pods will be restarted {"controller": "pxc-controller", "namespace": "users-18704", "name": "some-name", "reconcileID": "cb103a03-467e-4025-a25a-892f8e9a9d9a", "last-applied-secret": "a5d9ff74f9a3a7abb35f590e425ff147390bdfb2878e61e47195a79533f09ad3"} 2024-06-28T09:26:39.722Z ERROR sync users {"controller": "pxc-controller", "namespace": "users-18704", "name": "some-name", "reconcileID": "cb103a03-467e-4025-a25a-892f8e9a9d9a", "error": "exec syncusers: command terminated with exit code 1 / / ERROR 1045 (28000): Access denied for user 'operator'@'some-name-proxysql-0.some-name-proxysql-unready.users-18704.svc.' (using password: YES)\nERROR 1045 (28000): Access denied for user 'operator'@'some-name-proxysql-0.some-name-proxysql-unready.users-18704.svc.' (using password: YES)\nERROR 1045 (28000): Access denied for user 'operator'@'some-name-proxysql-0.some-name-proxysql-unready.users-18704.svc.' (using password: YES)\nERROR (line:569) : Could not find a primary cluster node\n", "errorVerbose": "exec syncusers: command terminated with exit code 1 / / ERROR 1045 (28000): Access denied for user 'operator'@'some-name-proxysql-0.some-name-proxysql-unready.users-18704.svc.' (using password: YES)\nERROR 1045 (28000): Access denied for user 'operator'@'some-name-proxysql-0.some-name-proxysql-unready.users-18704.svc.' (using password: YES)\nERROR 1045 (28000): Access denied for user 'operator'@'some-name-proxysql-0.some-name-proxysql-unready.users-18704.svc.' (using password: YES)\nERROR (line:569) : Could not find a primary cluster node\n\ngithub.com/percona/percona-xtradb-cluster-operator/pkg/controller/pxc.(*ReconcilePerconaXtraDBCluster).syncPXCUsersWithProxySQL\n\t/go/src/github.com/percona/percona-xtradb-cluster-operator/pkg/controller/pxc/users.go:920\ngithub.com/percona/percona-xtradb-cluster-operator/pkg/controller/pxc.(*ReconcilePerconaXtraDBCluster).resyncPXCUsersWithProxySQL.func1\n\t/go/src/github.com/percona/percona-xtradb-cluster-operator/pkg/controller/pxc/controller.go:1246\nruntime.goexit\n\t/usr/local/go/src/runtime/asm_amd64.s:1695"} 2024-06-28T09:27:22.911Z DEBUG PXC users synced with ProxySQL {"controller": "pxc-controller", "namespace": "users-18704", "name": "some-name", "reconcileID": "4feff4b2-20a2-4fa3-8a8b-eff805f785e4"} 2024-06-28T09:27:28.269Z DEBUG PXC users synced with ProxySQL {"controller": "pxc-controller", "namespace": "users-18704", "name": "some-name", "reconcileID": "1b27b3a5-fe81-45e3-8699-c7bcaef0f3a7"} 2024-06-28T09:27:38.169Z DEBUG PXC users synced with ProxySQL {"controller": "pxc-controller", "namespace": "users-18704", "name": "some-name", "reconcileID": "1113503d-d5bf-4387-97ca-508e6f54e7fc"} 2024-06-28T09:27:43.576Z DEBUG PXC users synced with ProxySQL {"controller": "pxc-controller", "namespace": "users-18704", "name": "some-name", "reconcileID": "2731a57d-31c7-483e-ade6-253c231fd33f"} 2024-06-28T09:27:49.629Z DEBUG PXC users synced with ProxySQL {"controller": "pxc-controller", "namespace": "users-18704", "name": "some-name", "reconcileID": "45e7648d-5fd7-4f7f-8fbd-66d1843530ec"} 2024-06-28T09:27:54.950Z DEBUG PXC users synced with ProxySQL {"controller": "pxc-controller", "namespace": "users-18704", "name": "some-name", "reconcileID": "2cff4a9d-0356-4721-8265-2a2060ae38ba"} 2024-06-28T09:28:00.593Z DEBUG PXC users synced with ProxySQL {"controller": "pxc-controller", "namespace": "users-18704", "name": "some-name", "reconcileID": "3c8517d6-ac90-42f5-8099-8478b7aa078a"} 2024-06-28T09:28:05.458Z DEBUG PXC users synced with ProxySQL {"controller": "pxc-controller", "namespace": "users-18704", "name": "some-name", "reconcileID": "080de7b5-deda-43c8-ae70-395c78dbff48"} 2024-06-28T09:28:10.507Z DEBUG PXC users synced with ProxySQL {"controller": "pxc-controller", "namespace": "users-18704", "name": "some-name", "reconcileID": "030adc6a-9972-48eb-8a65-94860f873bbc"} 2024-06-28T09:28:15.975Z DEBUG PXC users synced with ProxySQL {"controller": "pxc-controller", "namespace": "users-18704", "name": "some-name", "reconcileID": "5df1061d-e107-4273-b01b-1014a7b430f7"} 2024-06-28T09:28:21.380Z DEBUG PXC users synced with ProxySQL {"controller": "pxc-controller", "namespace": "users-18704", "name": "some-name", "reconcileID": "2ddfb8da-d078-4240-88f1-bd863557298c"} 2024-06-28T09:28:27.160Z DEBUG PXC users synced with ProxySQL {"controller": "pxc-controller", "namespace": "users-18704", "name": "some-name", "reconcileID": "06216e32-5e30-45d5-8554-9a2de5db49c8"} 2024-06-28T09:28:31.970Z DEBUG PXC users synced with ProxySQL {"controller": "pxc-controller", "namespace": "users-18704", "name": "some-name", "reconcileID": "c7e497fb-bd91-4d7a-bf9e-914cf0129d02"} 2024-06-28T09:28:33.600Z INFO Password changed, updating user {"controller": "pxc-controller", "namespace": "users-18704", "name": "some-name", "reconcileID": "f3081ad3-c55a-4f26-bd60-f2095f34ecaa", "user": "root"} 2024-06-28T09:28:33.645Z INFO User password updated {"controller": "pxc-controller", "namespace": "users-18704", "name": "some-name", "reconcileID": "f3081ad3-c55a-4f26-bd60-f2095f34ecaa", "user": "root"} 2024-06-28T09:28:33.663Z INFO MySQL init secret updated {"controller": "pxc-controller", "namespace": "users-18704", "name": "some-name", "reconcileID": "f3081ad3-c55a-4f26-bd60-f2095f34ecaa", "secret": "some-name-mysql-init", "user": "root"} 2024-06-28T09:28:39.297Z DEBUG PXC users synced with ProxySQL {"controller": "pxc-controller", "namespace": "users-18704", "name": "some-name", "reconcileID": "f3081ad3-c55a-4f26-bd60-f2095f34ecaa"} 2024-06-28T09:28:39.307Z INFO Internal secrets updated {"controller": "pxc-controller", "namespace": "users-18704", "name": "some-name", "reconcileID": "f3081ad3-c55a-4f26-bd60-f2095f34ecaa", "user": "root"} 2024-06-28T09:28:39.307Z INFO Password changed, updating user {"controller": "pxc-controller", "namespace": "users-18704", "name": "some-name", "reconcileID": "f3081ad3-c55a-4f26-bd60-f2095f34ecaa", "user": "monitor"} 2024-06-28T09:28:39.337Z INFO User password updated {"controller": "pxc-controller", "namespace": "users-18704", "name": "some-name", "reconcileID": "f3081ad3-c55a-4f26-bd60-f2095f34ecaa", "user": "monitor"} 2024-06-28T09:28:39.347Z INFO MySQL init secret updated {"controller": "pxc-controller", "namespace": "users-18704", "name": "some-name", "reconcileID": "f3081ad3-c55a-4f26-bd60-f2095f34ecaa", "secret": "some-name-mysql-init", "user": "monitor"} 2024-06-28T09:28:39.397Z INFO Proxy user updated {"controller": "pxc-controller", "namespace": "users-18704", "name": "some-name", "reconcileID": "f3081ad3-c55a-4f26-bd60-f2095f34ecaa", "user": "monitor"} 2024-06-28T09:28:39.412Z INFO Internal secrets updated {"controller": "pxc-controller", "namespace": "users-18704", "name": "some-name", "reconcileID": "f3081ad3-c55a-4f26-bd60-f2095f34ecaa", "user": "monitor"} 2024-06-28T09:28:39.412Z INFO Password changed, updating user {"controller": "pxc-controller", "namespace": "users-18704", "name": "some-name", "reconcileID": "f3081ad3-c55a-4f26-bd60-f2095f34ecaa", "user": "xtrabackup"} 2024-06-28T09:28:39.437Z INFO User password updated {"controller": "pxc-controller", "namespace": "users-18704", "name": "some-name", "reconcileID": "f3081ad3-c55a-4f26-bd60-f2095f34ecaa", "user": "xtrabackup"} 2024-06-28T09:28:39.453Z INFO MySQL init secret updated {"controller": "pxc-controller", "namespace": "users-18704", "name": "some-name", "reconcileID": "f3081ad3-c55a-4f26-bd60-f2095f34ecaa", "secret": "some-name-mysql-init", "user": "xtrabackup"} 2024-06-28T09:28:39.469Z INFO Internal secrets updated {"controller": "pxc-controller", "namespace": "users-18704", "name": "some-name", "reconcileID": "f3081ad3-c55a-4f26-bd60-f2095f34ecaa", "user": "xtrabackup"} 2024-06-28T09:28:39.469Z INFO Password changed, updating user {"controller": "pxc-controller", "namespace": "users-18704", "name": "some-name", "reconcileID": "f3081ad3-c55a-4f26-bd60-f2095f34ecaa", "user": "proxyadmin"} 2024-06-28T09:28:39.519Z INFO Proxy user updated {"controller": "pxc-controller", "namespace": "users-18704", "name": "some-name", "reconcileID": "f3081ad3-c55a-4f26-bd60-f2095f34ecaa", "user": "proxyadmin"} 2024-06-28T09:28:39.550Z INFO Internal secrets updated {"controller": "pxc-controller", "namespace": "users-18704", "name": "some-name", "reconcileID": "f3081ad3-c55a-4f26-bd60-f2095f34ecaa", "user": "proxyadmin"} 2024-06-28T09:28:39.550Z INFO Proxy pods will be restarted {"controller": "pxc-controller", "namespace": "users-18704", "name": "some-name", "reconcileID": "f3081ad3-c55a-4f26-bd60-f2095f34ecaa", "last-applied-secret": "ce78cb551a6e07048b28354f20b936d6d6070529a7a0624ccdd0bbaa9350eccd"} 2024-06-28T09:28:39.550Z INFO PXC pods will be restarted {"controller": "pxc-controller", "namespace": "users-18704", "name": "some-name", "reconcileID": "f3081ad3-c55a-4f26-bd60-f2095f34ecaa", "last-applied-secret": "ce78cb551a6e07048b28354f20b936d6d6070529a7a0624ccdd0bbaa9350eccd"} 2024-06-28T09:28:39.829Z ERROR sync users {"controller": "pxc-controller", "namespace": "users-18704", "name": "some-name", "reconcileID": "f3081ad3-c55a-4f26-bd60-f2095f34ecaa", "error": "exec syncusers: command terminated with exit code 1 / / ERROR 1045 (28000): ProxySQL Error: Access denied for user 'proxyadmin'@'127.0.0.1' (using password: YES)\nERROR (line:335) : ProxySQL connection check failed. \n-- Could not connect to ProxySQL at localhost:6032 \n-- Please check the ProxySQL connection parameters and status.\n", "errorVerbose": "exec syncusers: command terminated with exit code 1 / / ERROR 1045 (28000): ProxySQL Error: Access denied for user 'proxyadmin'@'127.0.0.1' (using password: YES)\nERROR (line:335) : ProxySQL connection check failed. \n-- Could not connect to ProxySQL at localhost:6032 \n-- Please check the ProxySQL connection parameters and status.\n\ngithub.com/percona/percona-xtradb-cluster-operator/pkg/controller/pxc.(*ReconcilePerconaXtraDBCluster).syncPXCUsersWithProxySQL\n\t/go/src/github.com/percona/percona-xtradb-cluster-operator/pkg/controller/pxc/users.go:920\ngithub.com/percona/percona-xtradb-cluster-operator/pkg/controller/pxc.(*ReconcilePerconaXtraDBCluster).resyncPXCUsersWithProxySQL.func1\n\t/go/src/github.com/percona/percona-xtradb-cluster-operator/pkg/controller/pxc/controller.go:1246\nruntime.goexit\n\t/usr/local/go/src/runtime/asm_amd64.s:1695"} 2024-06-28T09:28:56.727Z INFO KubeAPIWarningLogger .metadata.ownerReferences contains duplicate entries; API server dedups owner references in 1.20+, and may reject such requests as early as 1.24; please fix your requests; duplicate UID(s) observed: 0039f735-3e74-4f9f-9f56-ac0196c2110f 2024-06-28T09:32:02.942Z INFO Password changed, updating user {"controller": "pxc-controller", "namespace": "users-18704", "name": "some-name", "reconcileID": "a20e9333-85fe-4472-bb1d-259cdb104b30", "user": "root"} 2024-06-28T09:32:02.989Z INFO User password updated {"controller": "pxc-controller", "namespace": "users-18704", "name": "some-name", "reconcileID": "a20e9333-85fe-4472-bb1d-259cdb104b30", "user": "root"} 2024-06-28T09:32:03.001Z INFO MySQL init secret updated {"controller": "pxc-controller", "namespace": "users-18704", "name": "some-name", "reconcileID": "a20e9333-85fe-4472-bb1d-259cdb104b30", "secret": "some-name-mysql-init", "user": "root"} 2024-06-28T09:32:03.012Z INFO Internal secrets updated {"controller": "pxc-controller", "namespace": "users-18704", "name": "some-name", "reconcileID": "a20e9333-85fe-4472-bb1d-259cdb104b30", "user": "root"} 2024-06-28T09:32:03.012Z INFO Password changed, updating user {"controller": "pxc-controller", "namespace": "users-18704", "name": "some-name", "reconcileID": "a20e9333-85fe-4472-bb1d-259cdb104b30", "user": "operator"} 2024-06-28T09:32:03.038Z INFO User password updated {"controller": "pxc-controller", "namespace": "users-18704", "name": "some-name", "reconcileID": "a20e9333-85fe-4472-bb1d-259cdb104b30", "user": "operator"} 2024-06-28T09:32:03.055Z INFO MySQL init secret updated {"controller": "pxc-controller", "namespace": "users-18704", "name": "some-name", "reconcileID": "a20e9333-85fe-4472-bb1d-259cdb104b30", "secret": "some-name-mysql-init", "user": "operator"} 2024-06-28T09:32:03.068Z INFO Internal secrets updated {"controller": "pxc-controller", "namespace": "users-18704", "name": "some-name", "reconcileID": "a20e9333-85fe-4472-bb1d-259cdb104b30", "user": "operator"} 2024-06-28T09:32:03.069Z INFO Password changed, updating user {"controller": "pxc-controller", "namespace": "users-18704", "name": "some-name", "reconcileID": "a20e9333-85fe-4472-bb1d-259cdb104b30", "user": "monitor"} 2024-06-28T09:32:03.094Z INFO User password updated {"controller": "pxc-controller", "namespace": "users-18704", "name": "some-name", "reconcileID": "a20e9333-85fe-4472-bb1d-259cdb104b30", "user": "monitor"} 2024-06-28T09:32:03.107Z INFO MySQL init secret updated {"controller": "pxc-controller", "namespace": "users-18704", "name": "some-name", "reconcileID": "a20e9333-85fe-4472-bb1d-259cdb104b30", "secret": "some-name-mysql-init", "user": "monitor"} 2024-06-28T09:32:03.119Z INFO Internal secrets updated {"controller": "pxc-controller", "namespace": "users-18704", "name": "some-name", "reconcileID": "a20e9333-85fe-4472-bb1d-259cdb104b30", "user": "monitor"} 2024-06-28T09:32:03.119Z INFO Password changed, updating user {"controller": "pxc-controller", "namespace": "users-18704", "name": "some-name", "reconcileID": "a20e9333-85fe-4472-bb1d-259cdb104b30", "user": "xtrabackup"} 2024-06-28T09:32:03.145Z INFO User password updated {"controller": "pxc-controller", "namespace": "users-18704", "name": "some-name", "reconcileID": "a20e9333-85fe-4472-bb1d-259cdb104b30", "user": "xtrabackup"} 2024-06-28T09:32:03.160Z INFO MySQL init secret updated {"controller": "pxc-controller", "namespace": "users-18704", "name": "some-name", "reconcileID": "a20e9333-85fe-4472-bb1d-259cdb104b30", "secret": "some-name-mysql-init", "user": "xtrabackup"} 2024-06-28T09:32:03.175Z INFO Internal secrets updated {"controller": "pxc-controller", "namespace": "users-18704", "name": "some-name", "reconcileID": "a20e9333-85fe-4472-bb1d-259cdb104b30", "user": "xtrabackup"} 2024-06-28T09:32:03.175Z INFO Password changed, updating user {"controller": "pxc-controller", "namespace": "users-18704", "name": "some-name", "reconcileID": "a20e9333-85fe-4472-bb1d-259cdb104b30", "user": "replication"} 2024-06-28T09:32:03.201Z INFO User password updated {"controller": "pxc-controller", "namespace": "users-18704", "name": "some-name", "reconcileID": "a20e9333-85fe-4472-bb1d-259cdb104b30", "user": "replication"} 2024-06-28T09:32:03.214Z INFO MySQL init secret updated {"controller": "pxc-controller", "namespace": "users-18704", "name": "some-name", "reconcileID": "a20e9333-85fe-4472-bb1d-259cdb104b30", "secret": "some-name-mysql-init", "user": "replication"} 2024-06-28T09:32:03.238Z INFO Internal secrets updated {"controller": "pxc-controller", "namespace": "users-18704", "name": "some-name", "reconcileID": "a20e9333-85fe-4472-bb1d-259cdb104b30", "user": "replication"} 2024-06-28T09:32:03.239Z INFO Proxy pods will be restarted {"controller": "pxc-controller", "namespace": "users-18704", "name": "some-name", "reconcileID": "a20e9333-85fe-4472-bb1d-259cdb104b30", "last-applied-secret": "6542c6f8a2375ddfcda319bf91c99ca69519730971c577cd07366e356671ee68"} 2024-06-28T09:32:03.239Z INFO PXC pods will be restarted {"controller": "pxc-controller", "namespace": "users-18704", "name": "some-name", "reconcileID": "a20e9333-85fe-4472-bb1d-259cdb104b30", "last-applied-secret": "6542c6f8a2375ddfcda319bf91c99ca69519730971c577cd07366e356671ee68"} 2024-06-28T09:34:38.616Z INFO Password changed, updating user {"controller": "pxc-controller", "namespace": "users-18704", "name": "some-name", "reconcileID": "579cc79d-f2ce-4822-a505-82c4f653100e", "user": "monitor"} 2024-06-28T09:34:38.672Z INFO User password updated {"controller": "pxc-controller", "namespace": "users-18704", "name": "some-name", "reconcileID": "579cc79d-f2ce-4822-a505-82c4f653100e", "user": "monitor"} 2024-06-28T09:34:38.686Z INFO MySQL init secret updated {"controller": "pxc-controller", "namespace": "users-18704", "name": "some-name", "reconcileID": "579cc79d-f2ce-4822-a505-82c4f653100e", "secret": "some-name-mysql-init", "user": "monitor"} 2024-06-28T09:34:38.702Z INFO Internal secrets updated {"controller": "pxc-controller", "namespace": "users-18704", "name": "some-name", "reconcileID": "579cc79d-f2ce-4822-a505-82c4f653100e", "user": "monitor"} 2024-06-28T09:34:38.702Z INFO Proxy pods will be restarted {"controller": "pxc-controller", "namespace": "users-18704", "name": "some-name", "reconcileID": "579cc79d-f2ce-4822-a505-82c4f653100e", "last-applied-secret": "81935ecf30ae523a6ee69582354f9488d47dbeb0f8c7c572af96870f2024284d"} github.com/percona/percona-xtradb-cluster-operator/pkg/controller/pxc.(*ReconcilePerconaXtraDBCluster).resyncPXCUsersWithProxySQL.func1 /go/pkg/mod/sigs.k8s.io/controller-runtime@v0.18.4/pkg/internal/controller/controller.go:222 /go/pkg/mod/sigs.k8s.io/controller-runtime@v0.18.4/pkg/internal/controller/controller.go:261 /go/pkg/mod/sigs.k8s.io/controller-runtime@v0.18.4/pkg/internal/controller/controller.go:324 /go/src/github.com/percona/percona-xtradb-cluster-operator/pkg/controller/pxc/controller.go:1248 [mysql] 2024/06/28 09:31:31 connection.go:49: unexpected EOF [mysql] 2024/06/28 09:34:00 connection.go:49: unexpected EOF sigs.k8s.io/controller-runtime/pkg/internal/controller.(*Controller).processNextWorkItem sigs.k8s.io/controller-runtime/pkg/internal/controller.(*Controller).reconcileHandler sigs.k8s.io/controller-runtime/pkg/internal/controller.(*Controller).Start.func2.2 + grep -v NAMESPACE + kubectl get pxc --all-namespaces -o wide + xargs -L 1 sh -xc 'kubectl patch pxc -n $0 $1 --type=merge -p "{\"metadata\":{\"finalizers\":[]}}"' + kubectl patch pxc -n users-18704 some-name --type=merge -p '{"metadata":{"finalizers":[]}}' perconaxtradbcluster.pxc.percona.com/some-name patched + kubectl_bin delete pxc --all --all-namespaces ++ mktemp + local LAST_OUT=/tmp/tmp.A1C17OMcmT ++ mktemp + local LAST_ERR=/tmp/tmp.HyuHQaYe1A + local exit_status=0 ++ seq 0 2 + for i in '$(seq 0 2)' + set +e + kubectl delete pxc --all --all-namespaces + exit_status=0 + set -e + '[' 0 '!=' 0 ']' + break + cat /tmp/tmp.A1C17OMcmT perconaxtradbcluster.pxc.percona.com "some-name" deleted + cat /tmp/tmp.HyuHQaYe1A + rm /tmp/tmp.A1C17OMcmT /tmp/tmp.HyuHQaYe1A + return 0 + kubectl_bin delete pxc-backup --all --all-namespaces ++ mktemp + local LAST_OUT=/tmp/tmp.uLnPLnIIUq ++ mktemp + local LAST_ERR=/tmp/tmp.G1uUeP6RRW + local exit_status=0 ++ seq 0 2 + for i in '$(seq 0 2)' + set +e + kubectl delete pxc-backup --all --all-namespaces + exit_status=0 + set -e + '[' 0 '!=' 0 ']' + break + cat /tmp/tmp.uLnPLnIIUq No resources found + cat /tmp/tmp.G1uUeP6RRW + rm /tmp/tmp.uLnPLnIIUq /tmp/tmp.G1uUeP6RRW + return 0 + kubectl_bin delete pxc-restore --all --all-namespaces ++ mktemp + local LAST_OUT=/tmp/tmp.nEwKDW7Brh ++ mktemp + local LAST_ERR=/tmp/tmp.UUrdfzp1wk + local exit_status=0 ++ seq 0 2 + for i in '$(seq 0 2)' + set +e + kubectl delete pxc-restore --all --all-namespaces + exit_status=0 + set -e + '[' 0 '!=' 0 ']' + break + cat /tmp/tmp.nEwKDW7Brh No resources found + cat /tmp/tmp.UUrdfzp1wk + rm /tmp/tmp.nEwKDW7Brh /tmp/tmp.UUrdfzp1wk + return 0 + kubectl_bin delete ValidatingWebhookConfiguration percona-xtradbcluster-webhook ++ mktemp + local LAST_OUT=/tmp/tmp.O491s7UARf ++ mktemp + local LAST_ERR=/tmp/tmp.lXmx4p107v + local exit_status=0 ++ seq 0 2 + for i in '$(seq 0 2)' + set +e + kubectl delete ValidatingWebhookConfiguration percona-xtradbcluster-webhook + exit_status=0 + set -e + '[' 0 '!=' 0 ']' + break + cat /tmp/tmp.O491s7UARf validatingwebhookconfiguration.admissionregistration.k8s.io "percona-xtradbcluster-webhook" deleted + cat /tmp/tmp.lXmx4p107v + rm /tmp/tmp.O491s7UARf /tmp/tmp.lXmx4p107v + return 0 + kubectl_bin delete -f https://github.com/jetstack/cert-manager/releases/download/v1.14.2/cert-manager.yaml + : + '[' '!' -z '' ']' + '[' -n pxc-operator ']' + kubectl_bin delete --grace-period=0 --force=true namespace users-18704 + rm -rf /tmp/tmp.iIEmphhn2S + kubectl_bin delete --grace-period=0 --force=true namespace pxc-operator ++ mktemp ++ mktemp + desc 'test passed' + set +o xtrace + local LAST_OUT=/tmp/tmp.jNwewa3ucw ----------------------------------------------------------------------------------- test passed ----------------------------------------------------------------------------------- + local LAST_OUT=/tmp/tmp.VXwyvPKSCB ++ mktemp ++ mktemp + local LAST_ERR=/tmp/tmp.ObRzXKnmWl + local exit_status=0 + local LAST_ERR=/tmp/tmp.l0xuGGgo5v + local exit_status=0 ++ seq 0 2 ++ seq 0 2 + for i in '$(seq 0 2)' + set +e + kubectl delete --grace-period=0 --force=true namespace pxc-operator + for i in '$(seq 0 2)' + set +e + kubectl delete --grace-period=0 --force=true namespace users-18704