Log: /mnt/jenkins/workspace/cloud-pxc-operator_PR-2124/e2e-tests/logs/users-5-7.log Warning: version difference between client (1.34) and server (1.31) exceeds the supported minor version skew of +/-1 Warning: version difference between client (1.34) and server (1.31) exceeds the supported minor version skew of +/-1 + create_infra users-18989 + local ns=users-18989 + '[' -n pxc-operator ']' + kubectl get pxc --all-namespaces -o wide + grep -v NAMESPACE + xargs -L 1 sh -xc 'kubectl patch pxc -n $0 $1 --type=merge -p "{\"metadata\":{\"finalizers\":[]}}"' + kubectl patch pxc -n users-26707 some-name --type=merge -p '{"metadata":{"finalizers":[]}}' perconaxtradbcluster.pxc.percona.com/some-name patched + kubectl_bin delete pxc --all --all-namespaces ++ mktemp + local LAST_OUT=/tmp/tmp.i5amX2NHwo ++ mktemp + local LAST_ERR=/tmp/tmp.DxDh5hnqXK + local exit_status=0 ++ seq 0 2 + for i in '$(seq 0 2)' + set +e + kubectl delete pxc --all --all-namespaces + exit_status=0 + set -e + '[' 0 '!=' 0 ']' + break + cat /tmp/tmp.i5amX2NHwo perconaxtradbcluster.pxc.percona.com "some-name" deleted from users-26707 namespace + cat /tmp/tmp.DxDh5hnqXK + rm /tmp/tmp.i5amX2NHwo /tmp/tmp.DxDh5hnqXK + return 0 + kubectl_bin delete pxc-backup --all --all-namespaces ++ mktemp + local LAST_OUT=/tmp/tmp.gMNacBlsms ++ mktemp + local LAST_ERR=/tmp/tmp.SK6JM41sHw + local exit_status=0 ++ seq 0 2 + for i in '$(seq 0 2)' + set +e + kubectl delete pxc-backup --all --all-namespaces + exit_status=0 + set -e + '[' 0 '!=' 0 ']' + break + cat /tmp/tmp.gMNacBlsms No resources found + cat /tmp/tmp.SK6JM41sHw + rm /tmp/tmp.gMNacBlsms /tmp/tmp.SK6JM41sHw + return 0 + kubectl_bin delete pxc-restore --all --all-namespaces ++ mktemp + local LAST_OUT=/tmp/tmp.RifP5d7zak ++ mktemp + local LAST_ERR=/tmp/tmp.OTjTimPB2D + local exit_status=0 ++ seq 0 2 + for i in '$(seq 0 2)' + set +e + kubectl delete pxc-restore --all --all-namespaces + exit_status=0 + set -e + '[' 0 '!=' 0 ']' + break + cat /tmp/tmp.RifP5d7zak No resources found + cat /tmp/tmp.OTjTimPB2D + rm /tmp/tmp.RifP5d7zak /tmp/tmp.OTjTimPB2D + return 0 + create_namespace pxc-operator + local namespace=pxc-operator + local skip_clean_namespace= + [[ 1 == 1 ]] + [[ -z '' ]] + destroy_chaos_mesh ++ helm list --all-namespaces --filter chaos-mesh ++ sed s/NAMESPACE// ++ awk '-F ' '{print $2}' ++ tail -n1 + local chaos_mesh_ns= + '[' -n '' ']' ++ grep chaos-mesh ++ kubectl get MutatingWebhookConfiguration ++ awk '{print $1}' + timeout 30 kubectl delete MutatingWebhookConfiguration error: resource(s) were provided, but no name was specified + : ++ kubectl get ValidatingWebhookConfiguration ++ grep chaos-mesh ++ awk '{print $1}' + timeout 30 kubectl delete ValidatingWebhookConfiguration error: resource(s) were provided, but no name was specified + : ++ kubectl get ValidatingWebhookConfiguration ++ grep validate-auth ++ awk '{print $1}' + timeout 30 kubectl delete ValidatingWebhookConfiguration error: resource(s) were provided, but no name was specified + : ++ kubectl api-resources ++ grep chaos-mesh ++ awk '{print $1}' ++ kubectl get crd ++ grep chaos-mesh.org ++ awk '{print $1}' + timeout 30 kubectl delete crd error: resource(s) were provided, but no name was specified + : ++ kubectl get clusterrolebinding ++ grep chaos-mesh ++ awk '{print $1}' + timeout 30 kubectl delete clusterrolebinding error: resource(s) were provided, but no name was specified + : ++ kubectl get clusterrole ++ awk '{print $1}' ++ grep chaos-mesh + timeout 30 kubectl delete clusterrole error: resource(s) were provided, but no name was specified + : + desc 'cleaned up all old namespaces' + set +o xtrace ----------------------------------------------------------------------------------- cleaned up all old namespaces ----------------------------------------------------------------------------------- + kubectl_bin get ns + awk '{print$1}' + '[' -n '' ']' + desc 'cleaned up old namespaces pxc-operator' + xargs kubectl delete ns + set +o xtrace ----------------------------------------------------------------------------------- cleaned up old namespaces pxc-operator ----------------------------------------------------------------------------------- + kubectl_bin delete namespace pxc-operator ++ mktemp + egrep -v '^kube-|^default|Terminating|pxc-operator|openshift|^gke-|^gmp-|^NAME' ++ mktemp + local LAST_OUT=/tmp/tmp.NUXZjk1I28 + local LAST_OUT=/tmp/tmp.uEzYY6wYhr ++ mktemp ++ mktemp + local LAST_ERR=/tmp/tmp.xt7aLWtTP0 + local exit_status=0 ++ seq 0 2 + local LAST_ERR=/tmp/tmp.Oq0SXQpZbe + local exit_status=0 + for i in '$(seq 0 2)' + set +e + kubectl delete namespace pxc-operator ++ seq 0 2 + for i in '$(seq 0 2)' + set +e + kubectl get ns + exit_status=0 + set -e + '[' 0 '!=' 0 ']' + break + cat /tmp/tmp.uEzYY6wYhr + cat /tmp/tmp.Oq0SXQpZbe + rm /tmp/tmp.uEzYY6wYhr /tmp/tmp.Oq0SXQpZbe + return 0 namespace "users-26707" deleted + exit_status=0 + set -e + '[' 0 '!=' 0 ']' + break + cat /tmp/tmp.NUXZjk1I28 namespace "pxc-operator" deleted + cat /tmp/tmp.xt7aLWtTP0 + rm /tmp/tmp.NUXZjk1I28 /tmp/tmp.xt7aLWtTP0 + return 0 + wait_for_delete namespace/pxc-operator + local res=namespace/pxc-operator + echo -n 'waiting for namespace/pxc-operator to be deleted' waiting for namespace/pxc-operator to be deleted+ set +o xtrace Error from server (NotFound): namespaces "pxc-operator" not found + desc 'create namespace pxc-operator' + set +o xtrace ----------------------------------------------------------------------------------- create namespace pxc-operator ----------------------------------------------------------------------------------- + kubectl_bin create namespace pxc-operator ++ mktemp + local LAST_OUT=/tmp/tmp.z2Aytht9r8 ++ mktemp + local LAST_ERR=/tmp/tmp.LQLv2JEqk6 + local exit_status=0 ++ seq 0 2 + for i in '$(seq 0 2)' + set +e + kubectl create namespace pxc-operator + exit_status=0 + set -e + '[' 0 '!=' 0 ']' + break + cat /tmp/tmp.z2Aytht9r8 namespace/pxc-operator created + cat /tmp/tmp.LQLv2JEqk6 + rm /tmp/tmp.z2Aytht9r8 /tmp/tmp.LQLv2JEqk6 + return 0 ++ kubectl_bin config current-context +++ mktemp ++ local LAST_OUT=/tmp/tmp.sVOXjrMN9o +++ mktemp ++ local LAST_ERR=/tmp/tmp.Xh3rD0hR0f ++ local exit_status=0 +++ seq 0 2 ++ for i in '$(seq 0 2)' ++ set +e ++ kubectl config current-context ++ exit_status=0 ++ set -e ++ '[' 0 '!=' 0 ']' ++ break ++ cat /tmp/tmp.sVOXjrMN9o ++ cat /tmp/tmp.Xh3rD0hR0f ++ rm /tmp/tmp.sVOXjrMN9o /tmp/tmp.Xh3rD0hR0f ++ return 0 + kubectl_bin config set-context gke_cloud-dev-112233_us-central1-a_jen-pxc-2124-ae03d011-4-cluster9 --namespace=pxc-operator ++ mktemp + local LAST_OUT=/tmp/tmp.kULoPyPU7U ++ mktemp + local LAST_ERR=/tmp/tmp.5fOabMHghJ + local exit_status=0 ++ seq 0 2 + for i in '$(seq 0 2)' + set +e + kubectl config set-context gke_cloud-dev-112233_us-central1-a_jen-pxc-2124-ae03d011-4-cluster9 --namespace=pxc-operator + exit_status=0 + set -e + '[' 0 '!=' 0 ']' + break + cat /tmp/tmp.kULoPyPU7U Context "gke_cloud-dev-112233_us-central1-a_jen-pxc-2124-ae03d011-4-cluster9" modified. + cat /tmp/tmp.5fOabMHghJ + rm /tmp/tmp.kULoPyPU7U /tmp/tmp.5fOabMHghJ + return 0 + deploy_operator + desc 'start PXC operator' + set +o xtrace ----------------------------------------------------------------------------------- start PXC operator ----------------------------------------------------------------------------------- + kubectl_bin apply --server-side --force-conflicts -f /mnt/jenkins/workspace/cloud-pxc-operator_PR-2124/deploy/crd.yaml ++ mktemp + local LAST_OUT=/tmp/tmp.xrbhqQxWdy ++ mktemp + local LAST_ERR=/tmp/tmp.6kdVgaYdpL + local exit_status=0 ++ seq 0 2 + for i in '$(seq 0 2)' + set +e + kubectl apply --server-side --force-conflicts -f /mnt/jenkins/workspace/cloud-pxc-operator_PR-2124/deploy/crd.yaml + exit_status=0 + set -e + '[' 0 '!=' 0 ']' + break + cat /tmp/tmp.xrbhqQxWdy customresourcedefinition.apiextensions.k8s.io/perconaxtradbclusterbackups.pxc.percona.com serverside-applied customresourcedefinition.apiextensions.k8s.io/perconaxtradbclusterrestores.pxc.percona.com serverside-applied customresourcedefinition.apiextensions.k8s.io/perconaxtradbclusters.pxc.percona.com serverside-applied + cat /tmp/tmp.6kdVgaYdpL + rm /tmp/tmp.xrbhqQxWdy /tmp/tmp.6kdVgaYdpL + return 0 + '[' -n pxc-operator ']' + apply_rbac cw-rbac + local operator_namespace=pxc-operator + local rbac=cw-rbac + cat /mnt/jenkins/workspace/cloud-pxc-operator_PR-2124/deploy/cw-rbac.yaml + sed -e 's^namespace: .*^namespace: pxc-operator^' + kubectl_bin apply -f - ++ mktemp + local LAST_OUT=/tmp/tmp.sbYzsfdWru ++ mktemp + local LAST_ERR=/tmp/tmp.vD3cU2dRlQ + local exit_status=0 ++ seq 0 2 + for i in '$(seq 0 2)' + set +e + kubectl apply -f - + exit_status=0 + set -e + '[' 0 '!=' 0 ']' + break + cat /tmp/tmp.sbYzsfdWru clusterrole.rbac.authorization.k8s.io/percona-xtradb-cluster-operator unchanged serviceaccount/percona-xtradb-cluster-operator created clusterrolebinding.rbac.authorization.k8s.io/service-account-percona-xtradb-cluster-operator unchanged + cat /tmp/tmp.vD3cU2dRlQ + rm /tmp/tmp.sbYzsfdWru /tmp/tmp.vD3cU2dRlQ + return 0 + sed -e 's^image: .*^image: perconalab/percona-xtradb-cluster-operator:PR-2124-ae03d011^' + sed -e 's^failureThreshold: .*^failureThreshold: 10^' + cat /mnt/jenkins/workspace/cloud-pxc-operator_PR-2124/deploy/cw-operator.yaml + kubectl_bin apply -f - ++ mktemp + yq eval '(select(.kind == "Deployment").spec.template.spec.containers[] | select(.name == "percona-xtradb-cluster-operator").env[] | select(.name == "DISABLE_TELEMETRY").value) = "true"' - + yq eval '(select(.kind == "Deployment").spec.template.spec.containers[] | select(.name == "percona-xtradb-cluster-operator").env[] | select(.name == "LOG_LEVEL").value) = "VERBOSE"' - + local LAST_OUT=/tmp/tmp.14RixbwkBR ++ mktemp + local LAST_ERR=/tmp/tmp.uf2hovZjj6 + local exit_status=0 ++ seq 0 2 + for i in '$(seq 0 2)' + set +e + kubectl apply -f - + exit_status=0 + set -e + '[' 0 '!=' 0 ']' + break + cat /tmp/tmp.14RixbwkBR deployment.apps/percona-xtradb-cluster-operator created service/percona-xtradb-cluster-operator created + cat /tmp/tmp.uf2hovZjj6 + rm /tmp/tmp.14RixbwkBR /tmp/tmp.uf2hovZjj6 + return 0 + sleep 10 + kubectl_bin wait --for=condition=Ready pods -l app.kubernetes.io/component=operator,app.kubernetes.io/instance=percona-xtradb-cluster-operator,app.kubernetes.io/name=percona-xtradb-cluster-operator --timeout=30s ++ mktemp + local LAST_OUT=/tmp/tmp.hKRD2vN1kK ++ mktemp + local LAST_ERR=/tmp/tmp.D73VD4WQeT + local exit_status=0 ++ seq 0 2 + for i in '$(seq 0 2)' + set +e + kubectl wait --for=condition=Ready pods -l app.kubernetes.io/component=operator,app.kubernetes.io/instance=percona-xtradb-cluster-operator,app.kubernetes.io/name=percona-xtradb-cluster-operator --timeout=30s + exit_status=0 + set -e + '[' 0 '!=' 0 ']' + break + cat /tmp/tmp.hKRD2vN1kK pod/percona-xtradb-cluster-operator-748bf66775-shz6k condition met + cat /tmp/tmp.D73VD4WQeT + rm /tmp/tmp.hKRD2vN1kK /tmp/tmp.D73VD4WQeT + return 0 ++ get_operator_pod ++ local label_prefix=app.kubernetes.io/ +++ kubectl get pods --selector=app.kubernetes.io/name=percona-xtradb-cluster-operator -n pxc-operator +++ grep -c percona-xtradb-cluster-operator ++ local check_label=1 ++ [[ 1 -eq 0 ]] ++ kubectl_bin get pods --selector=app.kubernetes.io/name=percona-xtradb-cluster-operator -o 'jsonpath={.items[].metadata.name}' -n pxc-operator +++ mktemp ++ local LAST_OUT=/tmp/tmp.4Lcc75qBjs +++ mktemp ++ local LAST_ERR=/tmp/tmp.hTWjwimSm6 ++ local exit_status=0 +++ seq 0 2 ++ for i in '$(seq 0 2)' ++ set +e ++ kubectl get pods --selector=app.kubernetes.io/name=percona-xtradb-cluster-operator -o 'jsonpath={.items[].metadata.name}' -n pxc-operator ++ exit_status=0 ++ set -e ++ '[' 0 '!=' 0 ']' ++ break ++ cat /tmp/tmp.4Lcc75qBjs ++ cat /tmp/tmp.hTWjwimSm6 ++ rm /tmp/tmp.4Lcc75qBjs /tmp/tmp.hTWjwimSm6 ++ return 0 + wait_pod percona-xtradb-cluster-operator-748bf66775-shz6k 480 pxc-operator + local pod=percona-xtradb-cluster-operator-748bf66775-shz6k + local max_retry=480 + local ns=pxc-operator ++ echo percona-xtradb-cluster-operator-748bf66775-shz6k ++ /usr/bin/sed -E 's/.*-(pxc|proxysql)-[0-9]/\1/' ++ egrep '^(pxc|proxysql)$' + local container= + set +o xtrace pod/percona-xtradb-cluster-operator-748bf66775-shz6k condition met waiting for pod/percona-xtradb-cluster-operator-748bf66775-shz6k to become Ready.Ok + sleep 3 + create_namespace users-18989 + local namespace=users-18989 + local skip_clean_namespace= + [[ 1 == 1 ]] + [[ -z '' ]] + destroy_chaos_mesh ++ helm list --all-namespaces --filter chaos-mesh ++ tail -n1 ++ awk '-F ' '{print $2}' ++ sed s/NAMESPACE// + local chaos_mesh_ns= + '[' -n '' ']' ++ grep chaos-mesh ++ awk '{print $1}' ++ kubectl get MutatingWebhookConfiguration + timeout 30 kubectl delete MutatingWebhookConfiguration error: resource(s) were provided, but no name was specified + : ++ kubectl get ValidatingWebhookConfiguration ++ grep chaos-mesh ++ awk '{print $1}' + timeout 30 kubectl delete ValidatingWebhookConfiguration error: resource(s) were provided, but no name was specified + : ++ awk '{print $1}' ++ kubectl get ValidatingWebhookConfiguration ++ grep validate-auth + timeout 30 kubectl delete ValidatingWebhookConfiguration error: resource(s) were provided, but no name was specified + : ++ kubectl api-resources ++ grep chaos-mesh ++ awk '{print $1}' ++ kubectl get crd ++ awk '{print $1}' ++ grep chaos-mesh.org + timeout 30 kubectl delete crd error: resource(s) were provided, but no name was specified + : ++ kubectl get clusterrolebinding ++ awk '{print $1}' ++ grep chaos-mesh + timeout 30 kubectl delete clusterrolebinding error: resource(s) were provided, but no name was specified + : ++ kubectl get clusterrole ++ grep chaos-mesh ++ awk '{print $1}' + timeout 30 kubectl delete clusterrole error: resource(s) were provided, but no name was specified + : + desc 'cleaned up all old namespaces' + set +o xtrace ----------------------------------------------------------------------------------- cleaned up all old namespaces ----------------------------------------------------------------------------------- + kubectl_bin get ns + '[' -n '' ']' + desc 'cleaned up old namespaces users-18989' + xargs kubectl delete ns + set +o xtrace ----------------------------------------------------------------------------------- cleaned up old namespaces users-18989 ----------------------------------------------------------------------------------- + kubectl_bin delete namespace users-18989 + awk '{print$1}' ++ mktemp ++ mktemp + local LAST_OUT=/tmp/tmp.ExuJFmK1tE + local LAST_OUT=/tmp/tmp.OEgxwIEEC9 + egrep -v '^kube-|^default|Terminating|pxc-operator|openshift|^gke-|^gmp-|^NAME' ++ mktemp + local LAST_ERR=/tmp/tmp.DuVLyrOIX7 + local exit_status=0 ++ seq 0 2 + for i in '$(seq 0 2)' + set +e + kubectl delete namespace users-18989 ++ mktemp + local LAST_ERR=/tmp/tmp.qhYVnZaIRT + local exit_status=0 ++ seq 0 2 + for i in '$(seq 0 2)' + set +e + kubectl get ns + exit_status=1 + set -e + '[' 1 '!=' 0 ']' + '[' 1 == 1 ']' + sleep 0 + for i in '$(seq 0 2)' + set +e + kubectl delete namespace users-18989 + exit_status=0 + set -e + '[' 0 '!=' 0 ']' + break + cat /tmp/tmp.ExuJFmK1tE + cat /tmp/tmp.qhYVnZaIRT + rm /tmp/tmp.ExuJFmK1tE /tmp/tmp.qhYVnZaIRT + return 0 error: resource(s) were provided, but no name was specified + exit_status=1 + set -e + '[' 1 '!=' 0 ']' + '[' 1 == 1 ']' + sleep 0 + for i in '$(seq 0 2)' + set +e + kubectl delete namespace users-18989 + exit_status=1 + set -e + '[' 1 '!=' 0 ']' + '[' 1 == 1 ']' + sleep 0 + cat /tmp/tmp.OEgxwIEEC9 + cat /tmp/tmp.DuVLyrOIX7 Error from server (NotFound): namespaces "users-18989" not found + rm /tmp/tmp.OEgxwIEEC9 /tmp/tmp.DuVLyrOIX7 + return 1 + : + wait_for_delete namespace/users-18989 + local res=namespace/users-18989 + echo -n 'waiting for namespace/users-18989 to be deleted' waiting for namespace/users-18989 to be deleted+ set +o xtrace Error from server (NotFound): namespaces "users-18989" not found + desc 'create namespace users-18989' + set +o xtrace ----------------------------------------------------------------------------------- create namespace users-18989 ----------------------------------------------------------------------------------- + kubectl_bin create namespace users-18989 ++ mktemp + local LAST_OUT=/tmp/tmp.Y1Bk7resEX ++ mktemp + local LAST_ERR=/tmp/tmp.CeSKoPxdDq + local exit_status=0 ++ seq 0 2 + for i in '$(seq 0 2)' + set +e + kubectl create namespace users-18989 + exit_status=0 + set -e + '[' 0 '!=' 0 ']' + break + cat /tmp/tmp.Y1Bk7resEX namespace/users-18989 created + cat /tmp/tmp.CeSKoPxdDq + rm /tmp/tmp.Y1Bk7resEX /tmp/tmp.CeSKoPxdDq + return 0 ++ kubectl_bin config current-context +++ mktemp ++ local LAST_OUT=/tmp/tmp.boh2M8Pzly +++ mktemp ++ local LAST_ERR=/tmp/tmp.jZUuP7Oe6k ++ local exit_status=0 +++ seq 0 2 ++ for i in '$(seq 0 2)' ++ set +e ++ kubectl config current-context ++ exit_status=0 ++ set -e ++ '[' 0 '!=' 0 ']' ++ break ++ cat /tmp/tmp.boh2M8Pzly ++ cat /tmp/tmp.jZUuP7Oe6k ++ rm /tmp/tmp.boh2M8Pzly /tmp/tmp.jZUuP7Oe6k ++ return 0 + kubectl_bin config set-context gke_cloud-dev-112233_us-central1-a_jen-pxc-2124-ae03d011-4-cluster9 --namespace=users-18989 ++ mktemp + local LAST_OUT=/tmp/tmp.LQibGcUY8A ++ mktemp + local LAST_ERR=/tmp/tmp.tfAWgu68ir + local exit_status=0 ++ seq 0 2 + for i in '$(seq 0 2)' + set +e + kubectl config set-context gke_cloud-dev-112233_us-central1-a_jen-pxc-2124-ae03d011-4-cluster9 --namespace=users-18989 + exit_status=0 + set -e + '[' 0 '!=' 0 ']' + break + cat /tmp/tmp.LQibGcUY8A Context "gke_cloud-dev-112233_us-central1-a_jen-pxc-2124-ae03d011-4-cluster9" modified. + cat /tmp/tmp.tfAWgu68ir + rm /tmp/tmp.LQibGcUY8A /tmp/tmp.tfAWgu68ir + return 0 + apply_secrets + desc 'create secrets for cloud storages' + set +o xtrace ----------------------------------------------------------------------------------- create secrets for cloud storages ----------------------------------------------------------------------------------- + '[' -z '' ']' + kubectl_bin apply -f /mnt/jenkins/workspace/cloud-pxc-operator_PR-2124/e2e-tests/conf/minio-secret.yml -f /mnt/jenkins/workspace/cloud-pxc-operator_PR-2124/e2e-tests/conf/cloud-secret.yml ++ mktemp + local LAST_OUT=/tmp/tmp.bqo8FynzkA ++ mktemp + local LAST_ERR=/tmp/tmp.WcUdGTw5PC + local exit_status=0 ++ seq 0 2 + for i in '$(seq 0 2)' + set +e + kubectl apply -f /mnt/jenkins/workspace/cloud-pxc-operator_PR-2124/e2e-tests/conf/minio-secret.yml -f /mnt/jenkins/workspace/cloud-pxc-operator_PR-2124/e2e-tests/conf/cloud-secret.yml + exit_status=0 + set -e + '[' 0 '!=' 0 ']' + break + cat /tmp/tmp.bqo8FynzkA secret/minio-secret created secret/aws-s3-secret created secret/gcp-cs-secret created secret/azure-secret created + cat /tmp/tmp.WcUdGTw5PC + rm /tmp/tmp.bqo8FynzkA /tmp/tmp.WcUdGTw5PC + return 0 + desc 'create PXC cluster with 1-password secret' + set +o xtrace ----------------------------------------------------------------------------------- create PXC cluster with 1-password secret ----------------------------------------------------------------------------------- + newpass=test-password ++ echo -n test-password ++ base64 + newpassencrypted=dGVzdC1wYXNzd29yZA== + cluster=some-name + spinup_pxc some-name /mnt/jenkins/workspace/cloud-pxc-operator_PR-2124/e2e-tests/conf/some-name.yml '' '' /mnt/jenkins/workspace/cloud-pxc-operator_PR-2124/e2e-tests/conf/secrets_one_pass.yml + local cluster=some-name + local config=/mnt/jenkins/workspace/cloud-pxc-operator_PR-2124/e2e-tests/conf/some-name.yml + local size=3 + local sleep=10 + local secretsFile=/mnt/jenkins/workspace/cloud-pxc-operator_PR-2124/e2e-tests/conf/secrets_one_pass.yml + local pxcClientFile=/mnt/jenkins/workspace/cloud-pxc-operator_PR-2124/e2e-tests/conf/client.yml + local port=3306 + desc 'create first PXC cluster' + set +o xtrace ----------------------------------------------------------------------------------- create first PXC cluster ----------------------------------------------------------------------------------- + kubectl_bin apply -f /mnt/jenkins/workspace/cloud-pxc-operator_PR-2124/e2e-tests/conf/secrets_one_pass.yml ++ mktemp + local LAST_OUT=/tmp/tmp.lURICagQCB ++ mktemp + local LAST_ERR=/tmp/tmp.JZOOpLbhpj + local exit_status=0 ++ seq 0 2 + for i in '$(seq 0 2)' + set +e + kubectl apply -f /mnt/jenkins/workspace/cloud-pxc-operator_PR-2124/e2e-tests/conf/secrets_one_pass.yml + exit_status=0 + set -e + '[' 0 '!=' 0 ']' + break + cat /tmp/tmp.lURICagQCB secret/my-cluster-secrets created secret/some-name-ssl created secret/some-name-ssl-internal created + cat /tmp/tmp.JZOOpLbhpj + rm /tmp/tmp.lURICagQCB /tmp/tmp.JZOOpLbhpj + return 0 + apply_config /mnt/jenkins/workspace/cloud-pxc-operator_PR-2124/e2e-tests/conf/client.yml + '[' -z '' ']' + cat_config /mnt/jenkins/workspace/cloud-pxc-operator_PR-2124/e2e-tests/conf/client.yml + cat /mnt/jenkins/workspace/cloud-pxc-operator_PR-2124/e2e-tests/conf/client.yml + /usr/bin/sed -e 's#image:.*\/percona-xtradb-cluster:.*$#image: perconalab/percona-xtradb-cluster-operator:main-pxc5.7#' + /usr/bin/sed -e 's#apply:.*#apply: Never#' + /usr/bin/sed -e 's#image:.*-proxysql$#image: perconalab/percona-xtradb-cluster-operator:main-proxysql#' + /usr/bin/sed -e 's#image:.*-haproxy$#image: perconalab/percona-xtradb-cluster-operator:main-haproxy#' + /usr/bin/sed -e 's#image:.*-logcollector$#image: perconalab/percona-xtradb-cluster-operator:main-logcollector#' + /usr/bin/sed -e s~minio-service.#namespace~minio-service.users-18989~ + /usr/bin/sed -e 's#image:.*-pxc\([0-9]*.[0-9]*\)\{0,1\}$#image: perconalab/percona-xtradb-cluster-operator:main-pxc5.7#' + /usr/bin/sed -e 's#apiVersion: pxc.percona.com/v.*$#apiVersion: pxc.percona.com/v1#' + /usr/bin/sed -e 's#image:.*-pmm$#image: perconalab/pmm-client:dev-latest#' + /usr/bin/sed -e 's#image:.*-backup$#image: perconalab/percona-xtradb-cluster-operator:main-pxc5.7-backup#' + /usr/bin/sed -e 's#image:.*-init$#image: perconalab/percona-xtradb-cluster-operator:PR-2124-ae03d011#' + kubectl_bin apply -f - ++ mktemp + local LAST_OUT=/tmp/tmp.65MS85ImIo ++ mktemp + local LAST_ERR=/tmp/tmp.nZAyuR4PF1 + local exit_status=0 ++ seq 0 2 + for i in '$(seq 0 2)' + set +e + kubectl apply -f - + exit_status=0 + set -e + '[' 0 '!=' 0 ']' + break + cat /tmp/tmp.65MS85ImIo deployment.apps/pxc-client created + cat /tmp/tmp.nZAyuR4PF1 + rm /tmp/tmp.65MS85ImIo /tmp/tmp.nZAyuR4PF1 + return 0 + [[ perconalab/percona-xtradb-cluster-operator:main-pxc5.7 =~ 5\.7 ]] + [[ some-name == \d\e\m\a\n\d\-\b\a\c\k\u\p ]] + [[ some-name == \d\e\m\a\n\d\-\b\a\c\k\u\p\-\c\l\o\u\d ]] + apply_config /mnt/jenkins/workspace/cloud-pxc-operator_PR-2124/e2e-tests/conf/some-name.yml + '[' -z '' ']' + cat_config /mnt/jenkins/workspace/cloud-pxc-operator_PR-2124/e2e-tests/conf/some-name.yml + kubectl_bin apply -f - + /usr/bin/sed -e 's#image:.*-pxc\([0-9]*.[0-9]*\)\{0,1\}$#image: perconalab/percona-xtradb-cluster-operator:main-pxc5.7#' ++ mktemp + /usr/bin/sed -e 's#apply:.*#apply: Never#' + local LAST_OUT=/tmp/tmp.Mer8jfyrQk + /usr/bin/sed -e 's#image:.*-proxysql$#image: perconalab/percona-xtradb-cluster-operator:main-proxysql#' + /usr/bin/sed -e 's#image:.*-haproxy$#image: perconalab/percona-xtradb-cluster-operator:main-haproxy#' + /usr/bin/sed -e 's#image:.*-logcollector$#image: perconalab/percona-xtradb-cluster-operator:main-logcollector#' + /usr/bin/sed -e s~minio-service.#namespace~minio-service.users-18989~ ++ mktemp + /usr/bin/sed -e 's#apiVersion: pxc.percona.com/v.*$#apiVersion: pxc.percona.com/v1#' + local LAST_ERR=/tmp/tmp.jznVDbHqmv + local exit_status=0 + cat /mnt/jenkins/workspace/cloud-pxc-operator_PR-2124/e2e-tests/conf/some-name.yml ++ seq 0 2 + for i in '$(seq 0 2)' + set +e + kubectl apply -f - + /usr/bin/sed -e 's#image:.*-backup$#image: perconalab/percona-xtradb-cluster-operator:main-pxc5.7-backup#' + /usr/bin/sed -e 's#image:.*\/percona-xtradb-cluster:.*$#image: perconalab/percona-xtradb-cluster-operator:main-pxc5.7#' + /usr/bin/sed -e 's#image:.*-init$#image: perconalab/percona-xtradb-cluster-operator:PR-2124-ae03d011#' + /usr/bin/sed -e 's#image:.*-pmm$#image: perconalab/pmm-client:dev-latest#' + exit_status=0 + set -e + '[' 0 '!=' 0 ']' + break + cat /tmp/tmp.Mer8jfyrQk perconaxtradbcluster.pxc.percona.com/some-name created + cat /tmp/tmp.jznVDbHqmv + rm /tmp/tmp.Mer8jfyrQk /tmp/tmp.jznVDbHqmv + return 0 + desc 'check if all 3 Pods started' + set +o xtrace ----------------------------------------------------------------------------------- check if all 3 Pods started ----------------------------------------------------------------------------------- ++ get_proxy some-name ++ local target_cluster=some-name +++ kubectl_bin get pxc some-name -o 'jsonpath={.spec.haproxy.enabled}' ++++ mktemp +++ local LAST_OUT=/tmp/tmp.Oc8S7IuciC ++++ mktemp +++ local LAST_ERR=/tmp/tmp.LyvA5kyY2O +++ local exit_status=0 ++++ seq 0 2 +++ for i in '$(seq 0 2)' +++ set +e +++ kubectl get pxc some-name -o 'jsonpath={.spec.haproxy.enabled}' +++ exit_status=0 +++ set -e +++ '[' 0 '!=' 0 ']' +++ break +++ cat /tmp/tmp.Oc8S7IuciC +++ cat /tmp/tmp.LyvA5kyY2O +++ rm /tmp/tmp.Oc8S7IuciC /tmp/tmp.LyvA5kyY2O +++ return 0 ++ [[ '' == \t\r\u\e ]] +++ kubectl_bin get pxc some-name -o 'jsonpath={.spec.proxysql.enabled}' ++++ mktemp +++ local LAST_OUT=/tmp/tmp.MRU1P7FfJd ++++ mktemp +++ local LAST_ERR=/tmp/tmp.cIA3KTvY9U +++ local exit_status=0 ++++ seq 0 2 +++ for i in '$(seq 0 2)' +++ set +e +++ kubectl get pxc some-name -o 'jsonpath={.spec.proxysql.enabled}' +++ exit_status=0 +++ set -e +++ '[' 0 '!=' 0 ']' +++ break +++ cat /tmp/tmp.MRU1P7FfJd +++ cat /tmp/tmp.cIA3KTvY9U +++ rm /tmp/tmp.MRU1P7FfJd /tmp/tmp.cIA3KTvY9U +++ return 0 ++ [[ true == \t\r\u\e ]] ++ echo some-name-proxysql ++ return + local proxy=some-name-proxysql + kubectl_bin wait --for=condition=Ready pod -l app.kubernetes.io/instance=monitoring,app.kubernetes.io/managed-by=percona-xtradb-cluster-operator --timeout=300s -n users-18989 ++ mktemp + local LAST_OUT=/tmp/tmp.dTIcINMBj3 ++ mktemp + local LAST_ERR=/tmp/tmp.cVRT5X36y3 + local exit_status=0 ++ seq 0 2 + for i in '$(seq 0 2)' + set +e + kubectl wait --for=condition=Ready pod -l app.kubernetes.io/instance=monitoring,app.kubernetes.io/managed-by=percona-xtradb-cluster-operator --timeout=300s -n users-18989 + exit_status=1 + set -e + '[' 1 '!=' 0 ']' + '[' 1 == 1 ']' + sleep 0 + for i in '$(seq 0 2)' + set +e + kubectl wait --for=condition=Ready pod -l app.kubernetes.io/instance=monitoring,app.kubernetes.io/managed-by=percona-xtradb-cluster-operator --timeout=300s -n users-18989 + exit_status=1 + set -e + '[' 1 '!=' 0 ']' + '[' 1 == 1 ']' + sleep 0 + for i in '$(seq 0 2)' + set +e + kubectl wait --for=condition=Ready pod -l app.kubernetes.io/instance=monitoring,app.kubernetes.io/managed-by=percona-xtradb-cluster-operator --timeout=300s -n users-18989 + exit_status=1 + set -e + '[' 1 '!=' 0 ']' + '[' 1 == 1 ']' + sleep 0 + cat /tmp/tmp.dTIcINMBj3 + cat /tmp/tmp.cVRT5X36y3 error: no matching resources found + rm /tmp/tmp.dTIcINMBj3 /tmp/tmp.cVRT5X36y3 + return 1 + true + wait_for_running some-name-proxysql 1 + local name=some-name-proxysql + let last_pod=0 + : + local max_retry=480 + desc 'wait for running cluster' + set +o xtrace ----------------------------------------------------------------------------------- wait for running cluster ----------------------------------------------------------------------------------- ++ seq 0 0 + for i in '$(seq 0 $last_pod)' + wait_pod some-name-proxysql-0 480 + local pod=some-name-proxysql-0 + local max_retry=480 + local ns= ++ echo some-name-proxysql-0 ++ /usr/bin/sed -E 's/.*-(pxc|proxysql)-[0-9]/\1/' ++ egrep '^(pxc|proxysql)$' + local container=proxysql + set +o xtrace pod/some-name-proxysql-0 condition met waiting for pod/some-name-proxysql-0 to become Ready.Ok + wait_for_running some-name-pxc 3 + local name=some-name-pxc + let last_pod=2 + local max_retry=480 + desc 'wait for running cluster' + set +o xtrace ----------------------------------------------------------------------------------- wait for running cluster ----------------------------------------------------------------------------------- ++ seq 0 2 + for i in '$(seq 0 $last_pod)' + wait_pod some-name-pxc-0 480 + local pod=some-name-pxc-0 + local max_retry=480 + local ns= ++ echo some-name-pxc-0 ++ egrep '^(pxc|proxysql)$' ++ /usr/bin/sed -E 's/.*-(pxc|proxysql)-[0-9]/\1/' + local container=pxc + set +o xtrace pod/some-name-pxc-0 condition met waiting for pod/some-name-pxc-0 to become Ready.Ok + for i in '$(seq 0 $last_pod)' + wait_pod some-name-pxc-1 480 + local pod=some-name-pxc-1 + local max_retry=480 + local ns= ++ echo some-name-pxc-1 ++ /usr/bin/sed -E 's/.*-(pxc|proxysql)-[0-9]/\1/' ++ egrep '^(pxc|proxysql)$' + local container=pxc + set +o xtrace pod/some-name-pxc-1 condition met waiting for pod/some-name-pxc-1 to become Ready.Ok + for i in '$(seq 0 $last_pod)' + wait_pod some-name-pxc-2 480 + local pod=some-name-pxc-2 + local max_retry=480 + local ns= ++ echo some-name-pxc-2 ++ egrep '^(pxc|proxysql)$' ++ /usr/bin/sed -E 's/.*-(pxc|proxysql)-[0-9]/\1/' + local container=pxc + set +o xtrace pod/some-name-pxc-2 condition met waiting for pod/some-name-pxc-2 to become Ready.Ok + sleep 10 ++ kubectl get pxc some-name -o 'jsonpath={.spec.secretsName}' + local secret_name=my-cluster-secrets ++ getSecretData my-cluster-secrets root ++ local secretName=my-cluster-secrets ++ local dataKey=root ++ kubectl_bin get secrets/my-cluster-secrets '--template={{.data.root}}' ++ base64 --decode +++ mktemp ++ local LAST_OUT=/tmp/tmp.OLKgq4JfjQ +++ mktemp ++ local LAST_ERR=/tmp/tmp.hEZXup0vDI ++ local exit_status=0 +++ seq 0 2 ++ for i in '$(seq 0 2)' ++ set +e ++ kubectl get secrets/my-cluster-secrets '--template={{.data.root}}' ++ exit_status=0 ++ set -e ++ '[' 0 '!=' 0 ']' ++ break ++ cat /tmp/tmp.OLKgq4JfjQ ++ cat /tmp/tmp.hEZXup0vDI ++ rm /tmp/tmp.OLKgq4JfjQ /tmp/tmp.hEZXup0vDI ++ return 0 + local 'root_pass=QPUS4aX$0?RxN?_azg30' + desc 'write data' + set +o xtrace ----------------------------------------------------------------------------------- write data ----------------------------------------------------------------------------------- + [[ perconalab/percona-xtradb-cluster-operator:main-pxc5.7 =~ 5\.7 ]] ++ is_keyring_plugin_in_use some-name ++ local cluster=some-name ++ kubectl_bin exec -it some-name-pxc-0 -c pxc -- bash -c 'cat /etc/mysql/node.cnf' ++ egrep -o 'early-plugin-load=keyring_\w+.so' +++ mktemp ++ local LAST_OUT=/tmp/tmp.QykRPt6ahh +++ mktemp ++ local LAST_ERR=/tmp/tmp.gYnPAxAHr8 ++ local exit_status=0 +++ seq 0 2 ++ for i in '$(seq 0 2)' ++ set +e ++ kubectl exec -it some-name-pxc-0 -c pxc -- bash -c 'cat /etc/mysql/node.cnf' ++ exit_status=0 ++ set -e ++ '[' 0 '!=' 0 ']' ++ break ++ cat /tmp/tmp.QykRPt6ahh ++ cat /tmp/tmp.gYnPAxAHr8 Unable to use a TTY - input is not a terminal or the right kind of file ++ rm /tmp/tmp.QykRPt6ahh /tmp/tmp.gYnPAxAHr8 ++ return 0 + [[ -n '' ]] + run_mysql 'CREATE DATABASE IF NOT EXISTS myApp; use myApp; CREATE TABLE IF NOT EXISTS myApp (id int PRIMARY KEY) ;' '-h some-name-proxysql -uroot -p'\''QPUS4aX$0?RxN?_azg30'\'' -P3306' + local 'command=CREATE DATABASE IF NOT EXISTS myApp; use myApp; CREATE TABLE IF NOT EXISTS myApp (id int PRIMARY KEY) ;' + local 'uri=-h some-name-proxysql -uroot -p'\''QPUS4aX$0?RxN?_azg30'\'' -P3306' ++ get_client_pod ++ kubectl_bin get pods --selector=name=pxc-client -o 'jsonpath={.items[].metadata.name}' +++ mktemp ++ local LAST_OUT=/tmp/tmp.Iul59r3mpe +++ mktemp ++ local LAST_ERR=/tmp/tmp.lieRvOiJPQ ++ local exit_status=0 +++ seq 0 2 ++ for i in '$(seq 0 2)' ++ set +e ++ kubectl get pods --selector=name=pxc-client -o 'jsonpath={.items[].metadata.name}' ++ exit_status=0 ++ set -e ++ '[' 0 '!=' 0 ']' ++ break ++ cat /tmp/tmp.Iul59r3mpe ++ cat /tmp/tmp.lieRvOiJPQ ++ rm /tmp/tmp.Iul59r3mpe /tmp/tmp.lieRvOiJPQ ++ return 0 + client_pod=pxc-client-857d976497-x6k8q + wait_pod pxc-client-857d976497-x6k8q + local pod=pxc-client-857d976497-x6k8q + local max_retry=480 + local ns= ++ echo pxc-client-857d976497-x6k8q ++ /usr/bin/sed -E 's/.*-(pxc|proxysql)-[0-9]/\1/' ++ egrep '^(pxc|proxysql)$' + local container= + set +o xtrace pod/pxc-client-857d976497-x6k8q condition met waiting for pod/pxc-client-857d976497-x6k8q to become ReadyDefaulted container "pxc-client" out of: pxc-client, backup .Ok + set +o xtrace + run_mysql 'INSERT myApp.myApp (id) VALUES (100500)' '-h some-name-proxysql -uroot -p'\''QPUS4aX$0?RxN?_azg30'\'' -P3306' + local 'command=INSERT myApp.myApp (id) VALUES (100500)' + local 'uri=-h some-name-proxysql -uroot -p'\''QPUS4aX$0?RxN?_azg30'\'' -P3306' ++ get_client_pod ++ kubectl_bin get pods --selector=name=pxc-client -o 'jsonpath={.items[].metadata.name}' +++ mktemp ++ local LAST_OUT=/tmp/tmp.7Vdj2H6fJV +++ mktemp ++ local LAST_ERR=/tmp/tmp.JkAJfQKZXu ++ local exit_status=0 +++ seq 0 2 ++ for i in '$(seq 0 2)' ++ set +e ++ kubectl get pods --selector=name=pxc-client -o 'jsonpath={.items[].metadata.name}' ++ exit_status=0 ++ set -e ++ '[' 0 '!=' 0 ']' ++ break ++ cat /tmp/tmp.7Vdj2H6fJV ++ cat /tmp/tmp.JkAJfQKZXu ++ rm /tmp/tmp.7Vdj2H6fJV /tmp/tmp.JkAJfQKZXu ++ return 0 + client_pod=pxc-client-857d976497-x6k8q + wait_pod pxc-client-857d976497-x6k8q + local pod=pxc-client-857d976497-x6k8q + local max_retry=480 + local ns= ++ echo pxc-client-857d976497-x6k8q ++ egrep '^(pxc|proxysql)$' ++ /usr/bin/sed -E 's/.*-(pxc|proxysql)-[0-9]/\1/' + local container= + set +o xtrace pod/pxc-client-857d976497-x6k8q condition met waiting for pod/pxc-client-857d976497-x6k8q to become ReadyDefaulted container "pxc-client" out of: pxc-client, backup .Ok + set +o xtrace + sleep 30 ++ seq 0 2 + for i in '$(seq 0 $((size - 1)))' + compare_mysql_cmd select-1 'SELECT * from myApp.myApp;' '-h some-name-pxc-0.some-name-pxc -uroot -p'\''QPUS4aX$0?RxN?_azg30'\'' -P3306' + local command_id=select-1 + local 'command=SELECT * from myApp.myApp;' + local 'uri=-h some-name-pxc-0.some-name-pxc -uroot -p'\''QPUS4aX$0?RxN?_azg30'\'' -P3306' + local postfix= + local expected_result=/mnt/jenkins/workspace/cloud-pxc-operator_PR-2124/e2e-tests/users/compare/select-1.sql + [[ perconalab/percona-xtradb-cluster-operator:main-pxc5.7 =~ 8\.4 ]] + [[ perconalab/percona-xtradb-cluster-operator:main-pxc5.7 =~ 8\.0 ]] + [[ perconalab/percona-xtradb-cluster-operator:main-pxc5.7 =~ 5\.7 ]] + [[ -f /mnt/jenkins/workspace/cloud-pxc-operator_PR-2124/e2e-tests/users/compare/select-1-57.sql ]] + run_mysql 'SELECT * from myApp.myApp;' '-h some-name-pxc-0.some-name-pxc -uroot -p'\''QPUS4aX$0?RxN?_azg30'\'' -P3306' + local 'command=SELECT * from myApp.myApp;' + local 'uri=-h some-name-pxc-0.some-name-pxc -uroot -p'\''QPUS4aX$0?RxN?_azg30'\'' -P3306' ++ get_client_pod ++ kubectl_bin get pods --selector=name=pxc-client -o 'jsonpath={.items[].metadata.name}' +++ mktemp ++ local LAST_OUT=/tmp/tmp.WvPBfYwQi1 +++ mktemp ++ local LAST_ERR=/tmp/tmp.IENAOWqnzw ++ local exit_status=0 +++ seq 0 2 ++ for i in '$(seq 0 2)' ++ set +e ++ kubectl get pods --selector=name=pxc-client -o 'jsonpath={.items[].metadata.name}' ++ exit_status=0 ++ set -e ++ '[' 0 '!=' 0 ']' ++ break ++ cat /tmp/tmp.WvPBfYwQi1 ++ cat /tmp/tmp.IENAOWqnzw ++ rm /tmp/tmp.WvPBfYwQi1 /tmp/tmp.IENAOWqnzw ++ return 0 + client_pod=pxc-client-857d976497-x6k8q + wait_pod pxc-client-857d976497-x6k8q + local pod=pxc-client-857d976497-x6k8q + local max_retry=480 + local ns= ++ echo pxc-client-857d976497-x6k8q ++ egrep '^(pxc|proxysql)$' ++ /usr/bin/sed -E 's/.*-(pxc|proxysql)-[0-9]/\1/' + local container= + set +o xtrace pod/pxc-client-857d976497-x6k8q condition met waiting for pod/pxc-client-857d976497-x6k8q to become ReadyDefaulted container "pxc-client" out of: pxc-client, backup .Ok + set +o xtrace + '[' '!' -s /tmp/tmp.UQXTNYaefz/select-1.sql ']' + diff -u /mnt/jenkins/workspace/cloud-pxc-operator_PR-2124/e2e-tests/users/compare/select-1.sql /tmp/tmp.UQXTNYaefz/select-1.sql + for i in '$(seq 0 $((size - 1)))' + compare_mysql_cmd select-1 'SELECT * from myApp.myApp;' '-h some-name-pxc-1.some-name-pxc -uroot -p'\''QPUS4aX$0?RxN?_azg30'\'' -P3306' + local command_id=select-1 + local 'command=SELECT * from myApp.myApp;' + local 'uri=-h some-name-pxc-1.some-name-pxc -uroot -p'\''QPUS4aX$0?RxN?_azg30'\'' -P3306' + local postfix= + local expected_result=/mnt/jenkins/workspace/cloud-pxc-operator_PR-2124/e2e-tests/users/compare/select-1.sql + [[ perconalab/percona-xtradb-cluster-operator:main-pxc5.7 =~ 8\.4 ]] + [[ perconalab/percona-xtradb-cluster-operator:main-pxc5.7 =~ 8\.0 ]] + [[ perconalab/percona-xtradb-cluster-operator:main-pxc5.7 =~ 5\.7 ]] + [[ -f /mnt/jenkins/workspace/cloud-pxc-operator_PR-2124/e2e-tests/users/compare/select-1-57.sql ]] + run_mysql 'SELECT * from myApp.myApp;' '-h some-name-pxc-1.some-name-pxc -uroot -p'\''QPUS4aX$0?RxN?_azg30'\'' -P3306' + local 'command=SELECT * from myApp.myApp;' + local 'uri=-h some-name-pxc-1.some-name-pxc -uroot -p'\''QPUS4aX$0?RxN?_azg30'\'' -P3306' ++ get_client_pod ++ kubectl_bin get pods --selector=name=pxc-client -o 'jsonpath={.items[].metadata.name}' +++ mktemp ++ local LAST_OUT=/tmp/tmp.JGN2WeBQ2b +++ mktemp ++ local LAST_ERR=/tmp/tmp.FhSeRcdebe ++ local exit_status=0 +++ seq 0 2 ++ for i in '$(seq 0 2)' ++ set +e ++ kubectl get pods --selector=name=pxc-client -o 'jsonpath={.items[].metadata.name}' ++ exit_status=0 ++ set -e ++ '[' 0 '!=' 0 ']' ++ break ++ cat /tmp/tmp.JGN2WeBQ2b ++ cat /tmp/tmp.FhSeRcdebe ++ rm /tmp/tmp.JGN2WeBQ2b /tmp/tmp.FhSeRcdebe ++ return 0 + client_pod=pxc-client-857d976497-x6k8q + wait_pod pxc-client-857d976497-x6k8q + local pod=pxc-client-857d976497-x6k8q + local max_retry=480 + local ns= ++ echo pxc-client-857d976497-x6k8q ++ egrep '^(pxc|proxysql)$' ++ /usr/bin/sed -E 's/.*-(pxc|proxysql)-[0-9]/\1/' + local container= + set +o xtrace pod/pxc-client-857d976497-x6k8q condition met waiting for pod/pxc-client-857d976497-x6k8q to become ReadyDefaulted container "pxc-client" out of: pxc-client, backup .Ok + set +o xtrace + '[' '!' -s /tmp/tmp.UQXTNYaefz/select-1.sql ']' + diff -u /mnt/jenkins/workspace/cloud-pxc-operator_PR-2124/e2e-tests/users/compare/select-1.sql /tmp/tmp.UQXTNYaefz/select-1.sql + for i in '$(seq 0 $((size - 1)))' + compare_mysql_cmd select-1 'SELECT * from myApp.myApp;' '-h some-name-pxc-2.some-name-pxc -uroot -p'\''QPUS4aX$0?RxN?_azg30'\'' -P3306' + local command_id=select-1 + local 'command=SELECT * from myApp.myApp;' + local 'uri=-h some-name-pxc-2.some-name-pxc -uroot -p'\''QPUS4aX$0?RxN?_azg30'\'' -P3306' + local postfix= + local expected_result=/mnt/jenkins/workspace/cloud-pxc-operator_PR-2124/e2e-tests/users/compare/select-1.sql + [[ perconalab/percona-xtradb-cluster-operator:main-pxc5.7 =~ 8\.4 ]] + [[ perconalab/percona-xtradb-cluster-operator:main-pxc5.7 =~ 8\.0 ]] + [[ perconalab/percona-xtradb-cluster-operator:main-pxc5.7 =~ 5\.7 ]] + [[ -f /mnt/jenkins/workspace/cloud-pxc-operator_PR-2124/e2e-tests/users/compare/select-1-57.sql ]] + run_mysql 'SELECT * from myApp.myApp;' '-h some-name-pxc-2.some-name-pxc -uroot -p'\''QPUS4aX$0?RxN?_azg30'\'' -P3306' + local 'command=SELECT * from myApp.myApp;' + local 'uri=-h some-name-pxc-2.some-name-pxc -uroot -p'\''QPUS4aX$0?RxN?_azg30'\'' -P3306' ++ get_client_pod ++ kubectl_bin get pods --selector=name=pxc-client -o 'jsonpath={.items[].metadata.name}' +++ mktemp ++ local LAST_OUT=/tmp/tmp.WDhYrpU0qy +++ mktemp ++ local LAST_ERR=/tmp/tmp.CX6wS4FRZH ++ local exit_status=0 +++ seq 0 2 ++ for i in '$(seq 0 2)' ++ set +e ++ kubectl get pods --selector=name=pxc-client -o 'jsonpath={.items[].metadata.name}' ++ exit_status=0 ++ set -e ++ '[' 0 '!=' 0 ']' ++ break ++ cat /tmp/tmp.WDhYrpU0qy ++ cat /tmp/tmp.CX6wS4FRZH ++ rm /tmp/tmp.WDhYrpU0qy /tmp/tmp.CX6wS4FRZH ++ return 0 + client_pod=pxc-client-857d976497-x6k8q + wait_pod pxc-client-857d976497-x6k8q + local pod=pxc-client-857d976497-x6k8q + local max_retry=480 + local ns= ++ echo pxc-client-857d976497-x6k8q ++ /usr/bin/sed -E 's/.*-(pxc|proxysql)-[0-9]/\1/' ++ egrep '^(pxc|proxysql)$' + local container= + set +o xtrace pod/pxc-client-857d976497-x6k8q condition met waiting for pod/pxc-client-857d976497-x6k8q to become ReadyDefaulted container "pxc-client" out of: pxc-client, backup .Ok + set +o xtrace + '[' '!' -s /tmp/tmp.UQXTNYaefz/select-1.sql ']' + diff -u /mnt/jenkins/workspace/cloud-pxc-operator_PR-2124/e2e-tests/users/compare/select-1.sql /tmp/tmp.UQXTNYaefz/select-1.sql ++ is_keyring_plugin_in_use some-name ++ local cluster=some-name ++ kubectl_bin exec -it some-name-pxc-0 -c pxc -- bash -c 'cat /etc/mysql/node.cnf' ++ egrep -o 'early-plugin-load=keyring_\w+.so' +++ mktemp ++ local LAST_OUT=/tmp/tmp.YNefuJkEaC +++ mktemp ++ local LAST_ERR=/tmp/tmp.cRjyID7HhM ++ local exit_status=0 +++ seq 0 2 ++ for i in '$(seq 0 2)' ++ set +e ++ kubectl exec -it some-name-pxc-0 -c pxc -- bash -c 'cat /etc/mysql/node.cnf' ++ exit_status=0 ++ set -e ++ '[' 0 '!=' 0 ']' ++ break ++ cat /tmp/tmp.YNefuJkEaC ++ cat /tmp/tmp.cRjyID7HhM Unable to use a TTY - input is not a terminal or the right kind of file ++ rm /tmp/tmp.YNefuJkEaC /tmp/tmp.cRjyID7HhM ++ return 0 + '[' '' ']' + desc 'test missing passwords were created and present in internal secrets' + set +o xtrace ----------------------------------------------------------------------------------- test missing passwords were created and present in internal secrets ----------------------------------------------------------------------------------- + empty_pwds=() + wrong_pwds=() + for user in root xtrabackup monitor proxyadmin operator replication + echo 'Checking root' Checking root ++ getSecretData my-cluster-secrets root ++ local secretName=my-cluster-secrets ++ local dataKey=root ++ kubectl_bin get secrets/my-cluster-secrets '--template={{.data.root}}' ++ base64 --decode +++ mktemp ++ local LAST_OUT=/tmp/tmp.gG3v3JOUR9 +++ mktemp ++ local LAST_ERR=/tmp/tmp.rlR92VJQzR ++ local exit_status=0 +++ seq 0 2 ++ for i in '$(seq 0 2)' ++ set +e ++ kubectl get secrets/my-cluster-secrets '--template={{.data.root}}' ++ exit_status=0 ++ set -e ++ '[' 0 '!=' 0 ']' ++ break ++ cat /tmp/tmp.gG3v3JOUR9 ++ cat /tmp/tmp.rlR92VJQzR ++ rm /tmp/tmp.gG3v3JOUR9 /tmp/tmp.rlR92VJQzR ++ return 0 + secret_pass='QPUS4aX$0?RxN?_azg30' ++ getSecretData internal-some-name root ++ local secretName=internal-some-name ++ local dataKey=root ++ kubectl_bin get secrets/internal-some-name '--template={{.data.root}}' ++ base64 --decode +++ mktemp ++ local LAST_OUT=/tmp/tmp.nFURSU9zAV +++ mktemp ++ local LAST_ERR=/tmp/tmp.ZSRyyXUU2X ++ local exit_status=0 +++ seq 0 2 ++ for i in '$(seq 0 2)' ++ set +e ++ kubectl get secrets/internal-some-name '--template={{.data.root}}' ++ exit_status=0 ++ set -e ++ '[' 0 '!=' 0 ']' ++ break ++ cat /tmp/tmp.nFURSU9zAV ++ cat /tmp/tmp.ZSRyyXUU2X ++ rm /tmp/tmp.nFURSU9zAV /tmp/tmp.ZSRyyXUU2X ++ return 0 + int_secret_pass='QPUS4aX$0?RxN?_azg30' + [[ -z QPUS4aX$0?RxN?_azg30 ]] + [[ QPUS4aX$0?RxN?_azg30 != \Q\P\U\S\4\a\X\$\0\?\R\x\N\?\_\a\z\g\3\0 ]] + [[ root != \p\r\o\x\y\a\d\m\i\n ]] + [[ '' =~ root ]] + [[ '' =~ root ]] + echo 'Running compare for root' Running compare for root + compare_mysql_cmd select-4 'SHOW TABLES;' '-h some-name-proxysql -uroot -p'\''QPUS4aX$0?RxN?_azg30'\''' + local command_id=select-4 + local 'command=SHOW TABLES;' + local 'uri=-h some-name-proxysql -uroot -p'\''QPUS4aX$0?RxN?_azg30'\''' + local postfix= + local expected_result=/mnt/jenkins/workspace/cloud-pxc-operator_PR-2124/e2e-tests/users/compare/select-4.sql + [[ perconalab/percona-xtradb-cluster-operator:main-pxc5.7 =~ 8\.4 ]] + [[ perconalab/percona-xtradb-cluster-operator:main-pxc5.7 =~ 8\.0 ]] + [[ perconalab/percona-xtradb-cluster-operator:main-pxc5.7 =~ 5\.7 ]] + [[ -f /mnt/jenkins/workspace/cloud-pxc-operator_PR-2124/e2e-tests/users/compare/select-4-57.sql ]] + run_mysql 'SHOW TABLES;' '-h some-name-proxysql -uroot -p'\''QPUS4aX$0?RxN?_azg30'\''' + local 'command=SHOW TABLES;' + local 'uri=-h some-name-proxysql -uroot -p'\''QPUS4aX$0?RxN?_azg30'\''' ++ get_client_pod ++ kubectl_bin get pods --selector=name=pxc-client -o 'jsonpath={.items[].metadata.name}' +++ mktemp ++ local LAST_OUT=/tmp/tmp.jNRxZozLJW +++ mktemp ++ local LAST_ERR=/tmp/tmp.b8Gjt1L38h ++ local exit_status=0 +++ seq 0 2 ++ for i in '$(seq 0 2)' ++ set +e ++ kubectl get pods --selector=name=pxc-client -o 'jsonpath={.items[].metadata.name}' ++ exit_status=0 ++ set -e ++ '[' 0 '!=' 0 ']' ++ break ++ cat /tmp/tmp.jNRxZozLJW ++ cat /tmp/tmp.b8Gjt1L38h ++ rm /tmp/tmp.jNRxZozLJW /tmp/tmp.b8Gjt1L38h ++ return 0 + client_pod=pxc-client-857d976497-x6k8q + wait_pod pxc-client-857d976497-x6k8q + local pod=pxc-client-857d976497-x6k8q + local max_retry=480 + local ns= ++ echo pxc-client-857d976497-x6k8q ++ /usr/bin/sed -E 's/.*-(pxc|proxysql)-[0-9]/\1/' ++ egrep '^(pxc|proxysql)$' + local container= + set +o xtrace pod/pxc-client-857d976497-x6k8q condition met waiting for pod/pxc-client-857d976497-x6k8q to become ReadyDefaulted container "pxc-client" out of: pxc-client, backup .Ok + set +o xtrace + '[' '!' -s /tmp/tmp.UQXTNYaefz/select-4.sql ']' + diff -u /mnt/jenkins/workspace/cloud-pxc-operator_PR-2124/e2e-tests/users/compare/select-4.sql /tmp/tmp.UQXTNYaefz/select-4.sql + for user in root xtrabackup monitor proxyadmin operator replication + echo 'Checking xtrabackup' Checking xtrabackup ++ getSecretData my-cluster-secrets xtrabackup ++ local secretName=my-cluster-secrets ++ local dataKey=xtrabackup ++ kubectl_bin get secrets/my-cluster-secrets '--template={{.data.xtrabackup}}' ++ base64 --decode +++ mktemp ++ local LAST_OUT=/tmp/tmp.jfWxtf1j3f +++ mktemp ++ local LAST_ERR=/tmp/tmp.9UriTdVgJI ++ local exit_status=0 +++ seq 0 2 ++ for i in '$(seq 0 2)' ++ set +e ++ kubectl get secrets/my-cluster-secrets '--template={{.data.xtrabackup}}' ++ exit_status=0 ++ set -e ++ '[' 0 '!=' 0 ']' ++ break ++ cat /tmp/tmp.jfWxtf1j3f ++ cat /tmp/tmp.9UriTdVgJI ++ rm /tmp/tmp.jfWxtf1j3f /tmp/tmp.9UriTdVgJI ++ return 0 + secret_pass=')q(pNPRy!' ++ getSecretData internal-some-name xtrabackup ++ local secretName=internal-some-name ++ local dataKey=xtrabackup ++ kubectl_bin get secrets/internal-some-name '--template={{.data.xtrabackup}}' ++ base64 --decode +++ mktemp ++ local LAST_OUT=/tmp/tmp.BLcDRKYGZF +++ mktemp ++ local LAST_ERR=/tmp/tmp.l7Ca1CLtGC ++ local exit_status=0 +++ seq 0 2 ++ for i in '$(seq 0 2)' ++ set +e ++ kubectl get secrets/internal-some-name '--template={{.data.xtrabackup}}' ++ exit_status=0 ++ set -e ++ '[' 0 '!=' 0 ']' ++ break ++ cat /tmp/tmp.BLcDRKYGZF ++ cat /tmp/tmp.l7Ca1CLtGC ++ rm /tmp/tmp.BLcDRKYGZF /tmp/tmp.l7Ca1CLtGC ++ return 0 + int_secret_pass=')q(pNPRy!' + [[ -z )q(pNPRy! ]] + [[ )q(pNPRy! != \)\q\<\n\k\*\[\Y\]\0\W\M\>\(\p\N\P\R\y\! ]] + [[ xtrabackup != \p\r\o\x\y\a\d\m\i\n ]] + [[ '' =~ xtrabackup ]] + [[ '' =~ xtrabackup ]] + echo 'Running compare for xtrabackup' Running compare for xtrabackup + compare_mysql_cmd select-4 'SHOW TABLES;' '-h some-name-proxysql -uxtrabackup -p'\'')q(pNPRy!'\''' + local command_id=select-4 + local 'command=SHOW TABLES;' + local 'uri=-h some-name-proxysql -uxtrabackup -p'\'')q(pNPRy!'\''' + local postfix= + local expected_result=/mnt/jenkins/workspace/cloud-pxc-operator_PR-2124/e2e-tests/users/compare/select-4.sql + [[ perconalab/percona-xtradb-cluster-operator:main-pxc5.7 =~ 8\.4 ]] + [[ perconalab/percona-xtradb-cluster-operator:main-pxc5.7 =~ 8\.0 ]] + [[ perconalab/percona-xtradb-cluster-operator:main-pxc5.7 =~ 5\.7 ]] + [[ -f /mnt/jenkins/workspace/cloud-pxc-operator_PR-2124/e2e-tests/users/compare/select-4-57.sql ]] + run_mysql 'SHOW TABLES;' '-h some-name-proxysql -uxtrabackup -p'\'')q(pNPRy!'\''' + local 'command=SHOW TABLES;' + local 'uri=-h some-name-proxysql -uxtrabackup -p'\'')q(pNPRy!'\''' ++ get_client_pod ++ kubectl_bin get pods --selector=name=pxc-client -o 'jsonpath={.items[].metadata.name}' +++ mktemp ++ local LAST_OUT=/tmp/tmp.3Lb1lxfZwh +++ mktemp ++ local LAST_ERR=/tmp/tmp.3WSzQnbgjD ++ local exit_status=0 +++ seq 0 2 ++ for i in '$(seq 0 2)' ++ set +e ++ kubectl get pods --selector=name=pxc-client -o 'jsonpath={.items[].metadata.name}' ++ exit_status=0 ++ set -e ++ '[' 0 '!=' 0 ']' ++ break ++ cat /tmp/tmp.3Lb1lxfZwh ++ cat /tmp/tmp.3WSzQnbgjD ++ rm /tmp/tmp.3Lb1lxfZwh /tmp/tmp.3WSzQnbgjD ++ return 0 + client_pod=pxc-client-857d976497-x6k8q + wait_pod pxc-client-857d976497-x6k8q + local pod=pxc-client-857d976497-x6k8q + local max_retry=480 + local ns= ++ echo pxc-client-857d976497-x6k8q ++ /usr/bin/sed -E 's/.*-(pxc|proxysql)-[0-9]/\1/' ++ egrep '^(pxc|proxysql)$' + local container= + set +o xtrace pod/pxc-client-857d976497-x6k8q condition met waiting for pod/pxc-client-857d976497-x6k8q to become ReadyDefaulted container "pxc-client" out of: pxc-client, backup .Ok + set +o xtrace + '[' '!' -s /tmp/tmp.UQXTNYaefz/select-4.sql ']' + diff -u /mnt/jenkins/workspace/cloud-pxc-operator_PR-2124/e2e-tests/users/compare/select-4.sql /tmp/tmp.UQXTNYaefz/select-4.sql + for user in root xtrabackup monitor proxyadmin operator replication + echo 'Checking monitor' Checking monitor ++ getSecretData my-cluster-secrets monitor ++ local secretName=my-cluster-secrets ++ local dataKey=monitor ++ kubectl_bin get secrets/my-cluster-secrets '--template={{.data.monitor}}' ++ base64 --decode +++ mktemp ++ local LAST_OUT=/tmp/tmp.1a2SxyMGkf +++ mktemp ++ local LAST_ERR=/tmp/tmp.19eIwsbWvi ++ local exit_status=0 +++ seq 0 2 ++ for i in '$(seq 0 2)' ++ set +e ++ kubectl get secrets/my-cluster-secrets '--template={{.data.monitor}}' ++ exit_status=0 ++ set -e ++ '[' 0 '!=' 0 ']' ++ break ++ cat /tmp/tmp.1a2SxyMGkf ++ cat /tmp/tmp.19eIwsbWvi ++ rm /tmp/tmp.1a2SxyMGkf /tmp/tmp.19eIwsbWvi ++ return 0 + secret_pass=monitor_password ++ getSecretData internal-some-name monitor ++ local secretName=internal-some-name ++ local dataKey=monitor ++ kubectl_bin get secrets/internal-some-name '--template={{.data.monitor}}' ++ base64 --decode +++ mktemp ++ local LAST_OUT=/tmp/tmp.1M2gZy3bwK +++ mktemp ++ local LAST_ERR=/tmp/tmp.wYQDXZuMvb ++ local exit_status=0 +++ seq 0 2 ++ for i in '$(seq 0 2)' ++ set +e ++ kubectl get secrets/internal-some-name '--template={{.data.monitor}}' ++ exit_status=0 ++ set -e ++ '[' 0 '!=' 0 ']' ++ break ++ cat /tmp/tmp.1M2gZy3bwK ++ cat /tmp/tmp.wYQDXZuMvb ++ rm /tmp/tmp.1M2gZy3bwK /tmp/tmp.wYQDXZuMvb ++ return 0 + int_secret_pass=monitor_password + [[ -z monitor_password ]] + [[ monitor_password != \m\o\n\i\t\o\r\_\p\a\s\s\w\o\r\d ]] + [[ monitor != \p\r\o\x\y\a\d\m\i\n ]] + [[ '' =~ monitor ]] + [[ '' =~ monitor ]] + echo 'Running compare for monitor' Running compare for monitor + compare_mysql_cmd select-4 'SHOW TABLES;' '-h some-name-proxysql -umonitor -p'\''monitor_password'\''' + local command_id=select-4 + local 'command=SHOW TABLES;' + local 'uri=-h some-name-proxysql -umonitor -p'\''monitor_password'\''' + local postfix= + local expected_result=/mnt/jenkins/workspace/cloud-pxc-operator_PR-2124/e2e-tests/users/compare/select-4.sql + [[ perconalab/percona-xtradb-cluster-operator:main-pxc5.7 =~ 8\.4 ]] + [[ perconalab/percona-xtradb-cluster-operator:main-pxc5.7 =~ 8\.0 ]] + [[ perconalab/percona-xtradb-cluster-operator:main-pxc5.7 =~ 5\.7 ]] + [[ -f /mnt/jenkins/workspace/cloud-pxc-operator_PR-2124/e2e-tests/users/compare/select-4-57.sql ]] + run_mysql 'SHOW TABLES;' '-h some-name-proxysql -umonitor -p'\''monitor_password'\''' + local 'command=SHOW TABLES;' + local 'uri=-h some-name-proxysql -umonitor -p'\''monitor_password'\''' ++ get_client_pod ++ kubectl_bin get pods --selector=name=pxc-client -o 'jsonpath={.items[].metadata.name}' +++ mktemp ++ local LAST_OUT=/tmp/tmp.bMU7nG9Rzw +++ mktemp ++ local LAST_ERR=/tmp/tmp.5M8kFlVUqa ++ local exit_status=0 +++ seq 0 2 ++ for i in '$(seq 0 2)' ++ set +e ++ kubectl get pods --selector=name=pxc-client -o 'jsonpath={.items[].metadata.name}' ++ exit_status=0 ++ set -e ++ '[' 0 '!=' 0 ']' ++ break ++ cat /tmp/tmp.bMU7nG9Rzw ++ cat /tmp/tmp.5M8kFlVUqa ++ rm /tmp/tmp.bMU7nG9Rzw /tmp/tmp.5M8kFlVUqa ++ return 0 + client_pod=pxc-client-857d976497-x6k8q + wait_pod pxc-client-857d976497-x6k8q + local pod=pxc-client-857d976497-x6k8q + local max_retry=480 + local ns= ++ echo pxc-client-857d976497-x6k8q ++ /usr/bin/sed -E 's/.*-(pxc|proxysql)-[0-9]/\1/' ++ egrep '^(pxc|proxysql)$' + local container= + set +o xtrace pod/pxc-client-857d976497-x6k8q condition met waiting for pod/pxc-client-857d976497-x6k8q to become ReadyDefaulted container "pxc-client" out of: pxc-client, backup .Ok + set +o xtrace + '[' '!' -s /tmp/tmp.UQXTNYaefz/select-4.sql ']' + diff -u /mnt/jenkins/workspace/cloud-pxc-operator_PR-2124/e2e-tests/users/compare/select-4.sql /tmp/tmp.UQXTNYaefz/select-4.sql + for user in root xtrabackup monitor proxyadmin operator replication + echo 'Checking proxyadmin' Checking proxyadmin ++ getSecretData my-cluster-secrets proxyadmin ++ local secretName=my-cluster-secrets ++ local dataKey=proxyadmin ++ kubectl_bin get secrets/my-cluster-secrets '--template={{.data.proxyadmin}}' ++ base64 --decode +++ mktemp ++ local LAST_OUT=/tmp/tmp.UTEMLPTy3z +++ mktemp ++ local LAST_ERR=/tmp/tmp.HdEjL4O02q ++ local exit_status=0 +++ seq 0 2 ++ for i in '$(seq 0 2)' ++ set +e ++ kubectl get secrets/my-cluster-secrets '--template={{.data.proxyadmin}}' ++ exit_status=0 ++ set -e ++ '[' 0 '!=' 0 ']' ++ break ++ cat /tmp/tmp.UTEMLPTy3z ++ cat /tmp/tmp.HdEjL4O02q ++ rm /tmp/tmp.UTEMLPTy3z /tmp/tmp.HdEjL4O02q ++ return 0 + secret_pass=f.-0mAN374eoztbikoW7 ++ getSecretData internal-some-name proxyadmin ++ local secretName=internal-some-name ++ local dataKey=proxyadmin ++ kubectl_bin get secrets/internal-some-name '--template={{.data.proxyadmin}}' ++ base64 --decode +++ mktemp ++ local LAST_OUT=/tmp/tmp.sw8CvR1ERO +++ mktemp ++ local LAST_ERR=/tmp/tmp.awbVwDSFpP ++ local exit_status=0 +++ seq 0 2 ++ for i in '$(seq 0 2)' ++ set +e ++ kubectl get secrets/internal-some-name '--template={{.data.proxyadmin}}' ++ exit_status=0 ++ set -e ++ '[' 0 '!=' 0 ']' ++ break ++ cat /tmp/tmp.sw8CvR1ERO ++ cat /tmp/tmp.awbVwDSFpP ++ rm /tmp/tmp.sw8CvR1ERO /tmp/tmp.awbVwDSFpP ++ return 0 + int_secret_pass=f.-0mAN374eoztbikoW7 + [[ -z f.-0mAN374eoztbikoW7 ]] + [[ f.-0mAN374eoztbikoW7 != \f\.\-\0\m\A\N\3\7\4\e\o\z\t\b\i\k\o\W\7 ]] + [[ proxyadmin != \p\r\o\x\y\a\d\m\i\n ]] + [[ proxyadmin == \p\r\o\x\y\a\d\m\i\n ]] + [[ '' =~ proxyadmin ]] + [[ '' =~ proxyadmin ]] + echo 'Running compare for proxyadmin' Running compare for proxyadmin + compare_mysql_cmd_local select-2 'SHOW TABLES;' '-h127.0.0.1 -P6032 -uproxyadmin -p'\''f.-0mAN374eoztbikoW7'\''' some-name-proxysql-0 '' proxysql + local command_id=select-2 + local 'command=SHOW TABLES;' + local 'uri=-h127.0.0.1 -P6032 -uproxyadmin -p'\''f.-0mAN374eoztbikoW7'\''' + local pod=some-name-proxysql-0 + local postfix= + local container_name=proxysql + local expected_result=/mnt/jenkins/workspace/cloud-pxc-operator_PR-2124/e2e-tests/users/compare/select-2.sql + [[ perconalab/percona-xtradb-cluster-operator:main-pxc5.7 =~ 8\.0 ]] + [[ perconalab/percona-xtradb-cluster-operator:main-pxc5.7 =~ 8\.4 ]] + run_mysql_local 'SHOW TABLES;' '-h127.0.0.1 -P6032 -uproxyadmin -p'\''f.-0mAN374eoztbikoW7'\''' some-name-proxysql-0 proxysql + local 'command=SHOW TABLES;' + local 'uri=-h127.0.0.1 -P6032 -uproxyadmin -p'\''f.-0mAN374eoztbikoW7'\''' + local pod=some-name-proxysql-0 + local container_name=proxysql + set +o xtrace + '[' '!' -s /tmp/tmp.UQXTNYaefz/select-2.sql ']' + diff -u /mnt/jenkins/workspace/cloud-pxc-operator_PR-2124/e2e-tests/users/compare/select-2.sql /tmp/tmp.UQXTNYaefz/select-2.sql + for user in root xtrabackup monitor proxyadmin operator replication + echo 'Checking operator' Checking operator ++ getSecretData my-cluster-secrets operator ++ local secretName=my-cluster-secrets ++ local dataKey=operator ++ kubectl_bin get secrets/my-cluster-secrets '--template={{.data.operator}}' ++ base64 --decode +++ mktemp ++ local LAST_OUT=/tmp/tmp.0O3W7xdSkW +++ mktemp ++ local LAST_ERR=/tmp/tmp.WLqGxJGvKo ++ local exit_status=0 +++ seq 0 2 ++ for i in '$(seq 0 2)' ++ set +e ++ kubectl get secrets/my-cluster-secrets '--template={{.data.operator}}' ++ exit_status=0 ++ set -e ++ '[' 0 '!=' 0 ']' ++ break ++ cat /tmp/tmp.0O3W7xdSkW ++ cat /tmp/tmp.WLqGxJGvKo ++ rm /tmp/tmp.0O3W7xdSkW /tmp/tmp.WLqGxJGvKo ++ return 0 + secret_pass=4svkvtT5JnT0WdxQyvKV ++ getSecretData internal-some-name operator ++ local secretName=internal-some-name ++ local dataKey=operator ++ kubectl_bin get secrets/internal-some-name '--template={{.data.operator}}' ++ base64 --decode +++ mktemp ++ local LAST_OUT=/tmp/tmp.eU5eYQqTVh +++ mktemp ++ local LAST_ERR=/tmp/tmp.zuTnDTTfLO ++ local exit_status=0 +++ seq 0 2 ++ for i in '$(seq 0 2)' ++ set +e ++ kubectl get secrets/internal-some-name '--template={{.data.operator}}' ++ exit_status=0 ++ set -e ++ '[' 0 '!=' 0 ']' ++ break ++ cat /tmp/tmp.eU5eYQqTVh ++ cat /tmp/tmp.zuTnDTTfLO ++ rm /tmp/tmp.eU5eYQqTVh /tmp/tmp.zuTnDTTfLO ++ return 0 + int_secret_pass=4svkvtT5JnT0WdxQyvKV + [[ -z 4svkvtT5JnT0WdxQyvKV ]] + [[ 4svkvtT5JnT0WdxQyvKV != \4\s\v\k\v\t\T\5\J\n\T\0\W\d\x\Q\y\v\K\V ]] + [[ operator != \p\r\o\x\y\a\d\m\i\n ]] + [[ '' =~ operator ]] + [[ '' =~ operator ]] + echo 'Running compare for operator' Running compare for operator + compare_mysql_cmd select-4 'SHOW TABLES;' '-h some-name-proxysql -uoperator -p'\''4svkvtT5JnT0WdxQyvKV'\''' + local command_id=select-4 + local 'command=SHOW TABLES;' + local 'uri=-h some-name-proxysql -uoperator -p'\''4svkvtT5JnT0WdxQyvKV'\''' + local postfix= + local expected_result=/mnt/jenkins/workspace/cloud-pxc-operator_PR-2124/e2e-tests/users/compare/select-4.sql + [[ perconalab/percona-xtradb-cluster-operator:main-pxc5.7 =~ 8\.4 ]] + [[ perconalab/percona-xtradb-cluster-operator:main-pxc5.7 =~ 8\.0 ]] + [[ perconalab/percona-xtradb-cluster-operator:main-pxc5.7 =~ 5\.7 ]] + [[ -f /mnt/jenkins/workspace/cloud-pxc-operator_PR-2124/e2e-tests/users/compare/select-4-57.sql ]] + run_mysql 'SHOW TABLES;' '-h some-name-proxysql -uoperator -p'\''4svkvtT5JnT0WdxQyvKV'\''' + local 'command=SHOW TABLES;' + local 'uri=-h some-name-proxysql -uoperator -p'\''4svkvtT5JnT0WdxQyvKV'\''' ++ get_client_pod ++ kubectl_bin get pods --selector=name=pxc-client -o 'jsonpath={.items[].metadata.name}' +++ mktemp ++ local LAST_OUT=/tmp/tmp.JAPdPaCWng +++ mktemp ++ local LAST_ERR=/tmp/tmp.j4kU5wa6WP ++ local exit_status=0 +++ seq 0 2 ++ for i in '$(seq 0 2)' ++ set +e ++ kubectl get pods --selector=name=pxc-client -o 'jsonpath={.items[].metadata.name}' ++ exit_status=0 ++ set -e ++ '[' 0 '!=' 0 ']' ++ break ++ cat /tmp/tmp.JAPdPaCWng ++ cat /tmp/tmp.j4kU5wa6WP ++ rm /tmp/tmp.JAPdPaCWng /tmp/tmp.j4kU5wa6WP ++ return 0 + client_pod=pxc-client-857d976497-x6k8q + wait_pod pxc-client-857d976497-x6k8q + local pod=pxc-client-857d976497-x6k8q + local max_retry=480 + local ns= ++ echo pxc-client-857d976497-x6k8q ++ /usr/bin/sed -E 's/.*-(pxc|proxysql)-[0-9]/\1/' ++ egrep '^(pxc|proxysql)$' + local container= + set +o xtrace pod/pxc-client-857d976497-x6k8q condition met waiting for pod/pxc-client-857d976497-x6k8q to become ReadyDefaulted container "pxc-client" out of: pxc-client, backup .Ok + set +o xtrace + '[' '!' -s /tmp/tmp.UQXTNYaefz/select-4.sql ']' + diff -u /mnt/jenkins/workspace/cloud-pxc-operator_PR-2124/e2e-tests/users/compare/select-4.sql /tmp/tmp.UQXTNYaefz/select-4.sql + for user in root xtrabackup monitor proxyadmin operator replication + echo 'Checking replication' Checking replication ++ getSecretData my-cluster-secrets replication ++ local secretName=my-cluster-secrets ++ local dataKey=replication ++ kubectl_bin get secrets/my-cluster-secrets '--template={{.data.replication}}' ++ base64 --decode +++ mktemp ++ local LAST_OUT=/tmp/tmp.IjtqmAfkZm +++ mktemp ++ local LAST_ERR=/tmp/tmp.CAnA4W0tyC ++ local exit_status=0 +++ seq 0 2 ++ for i in '$(seq 0 2)' ++ set +e ++ kubectl get secrets/my-cluster-secrets '--template={{.data.replication}}' ++ exit_status=0 ++ set -e ++ '[' 0 '!=' 0 ']' ++ break ++ cat /tmp/tmp.IjtqmAfkZm ++ cat /tmp/tmp.CAnA4W0tyC ++ rm /tmp/tmp.IjtqmAfkZm /tmp/tmp.CAnA4W0tyC ++ return 0 + secret_pass='P*{ER6h^%~RbWF_6p^3I' ++ getSecretData internal-some-name replication ++ local secretName=internal-some-name ++ local dataKey=replication ++ kubectl_bin get secrets/internal-some-name '--template={{.data.replication}}' ++ base64 --decode +++ mktemp ++ local LAST_OUT=/tmp/tmp.mH4kyEL9sI +++ mktemp ++ local LAST_ERR=/tmp/tmp.wqMgJw96Kz ++ local exit_status=0 +++ seq 0 2 ++ for i in '$(seq 0 2)' ++ set +e ++ kubectl get secrets/internal-some-name '--template={{.data.replication}}' ++ exit_status=0 ++ set -e ++ '[' 0 '!=' 0 ']' ++ break ++ cat /tmp/tmp.mH4kyEL9sI ++ cat /tmp/tmp.wqMgJw96Kz ++ rm /tmp/tmp.mH4kyEL9sI /tmp/tmp.wqMgJw96Kz ++ return 0 + int_secret_pass='P*{ER6h^%~RbWF_6p^3I' + [[ -z P*{ER6h^%~RbWF_6p^3I ]] + [[ P*{ER6h^%~RbWF_6p^3I != \P\*\{\E\R\6\h\^\%\~\R\b\W\F\_\6\p\^\3\I ]] + [[ replication != \p\r\o\x\y\a\d\m\i\n ]] + [[ '' =~ replication ]] + [[ '' =~ replication ]] + echo 'Running compare for replication' Running compare for replication + compare_mysql_cmd select-4 'SHOW TABLES;' '-h some-name-proxysql -ureplication -p'\''P*{ER6h^%~RbWF_6p^3I'\''' + local command_id=select-4 + local 'command=SHOW TABLES;' + local 'uri=-h some-name-proxysql -ureplication -p'\''P*{ER6h^%~RbWF_6p^3I'\''' + local postfix= + local expected_result=/mnt/jenkins/workspace/cloud-pxc-operator_PR-2124/e2e-tests/users/compare/select-4.sql + [[ perconalab/percona-xtradb-cluster-operator:main-pxc5.7 =~ 8\.4 ]] + [[ perconalab/percona-xtradb-cluster-operator:main-pxc5.7 =~ 8\.0 ]] + [[ perconalab/percona-xtradb-cluster-operator:main-pxc5.7 =~ 5\.7 ]] + [[ -f /mnt/jenkins/workspace/cloud-pxc-operator_PR-2124/e2e-tests/users/compare/select-4-57.sql ]] + run_mysql 'SHOW TABLES;' '-h some-name-proxysql -ureplication -p'\''P*{ER6h^%~RbWF_6p^3I'\''' + local 'command=SHOW TABLES;' + local 'uri=-h some-name-proxysql -ureplication -p'\''P*{ER6h^%~RbWF_6p^3I'\''' ++ get_client_pod ++ kubectl_bin get pods --selector=name=pxc-client -o 'jsonpath={.items[].metadata.name}' +++ mktemp ++ local LAST_OUT=/tmp/tmp.RzsfL2Md59 +++ mktemp ++ local LAST_ERR=/tmp/tmp.aEMoYJaKHE ++ local exit_status=0 +++ seq 0 2 ++ for i in '$(seq 0 2)' ++ set +e ++ kubectl get pods --selector=name=pxc-client -o 'jsonpath={.items[].metadata.name}' ++ exit_status=0 ++ set -e ++ '[' 0 '!=' 0 ']' ++ break ++ cat /tmp/tmp.RzsfL2Md59 ++ cat /tmp/tmp.aEMoYJaKHE ++ rm /tmp/tmp.RzsfL2Md59 /tmp/tmp.aEMoYJaKHE ++ return 0 + client_pod=pxc-client-857d976497-x6k8q + wait_pod pxc-client-857d976497-x6k8q + local pod=pxc-client-857d976497-x6k8q + local max_retry=480 + local ns= ++ echo pxc-client-857d976497-x6k8q ++ /usr/bin/sed -E 's/.*-(pxc|proxysql)-[0-9]/\1/' ++ egrep '^(pxc|proxysql)$' + local container= + set +o xtrace pod/pxc-client-857d976497-x6k8q condition met waiting for pod/pxc-client-857d976497-x6k8q to become ReadyDefaulted container "pxc-client" out of: pxc-client, backup .Ok + set +o xtrace + '[' '!' -s /tmp/tmp.UQXTNYaefz/select-4.sql ']' + diff -u /mnt/jenkins/workspace/cloud-pxc-operator_PR-2124/e2e-tests/users/compare/select-4.sql /tmp/tmp.UQXTNYaefz/select-4.sql + [[ -n '' ]] + [[ -n '' ]] + desc 'test root' + set +o xtrace ----------------------------------------------------------------------------------- test root ----------------------------------------------------------------------------------- + patch_secret my-cluster-secrets root dGVzdC1wYXNzd29yZA== + local secret=my-cluster-secrets + local key=root + local value=dGVzdC1wYXNzd29yZA== + kubectl_bin patch secret my-cluster-secrets '-p={"data":{"root": "dGVzdC1wYXNzd29yZA=="}}' ++ mktemp + local LAST_OUT=/tmp/tmp.Oe9nfAbev0 ++ mktemp + local LAST_ERR=/tmp/tmp.PbKa0PMi8B + local exit_status=0 ++ seq 0 2 + for i in '$(seq 0 2)' + set +e + kubectl patch secret my-cluster-secrets '-p={"data":{"root": "dGVzdC1wYXNzd29yZA=="}}' + exit_status=0 + set -e + '[' 0 '!=' 0 ']' + break + cat /tmp/tmp.Oe9nfAbev0 secret/my-cluster-secrets patched + cat /tmp/tmp.PbKa0PMi8B + rm /tmp/tmp.Oe9nfAbev0 /tmp/tmp.PbKa0PMi8B + return 0 + sleep 15 + compare_mysql_cmd select-4 'SHOW TABLES;' '-h some-name-proxysql -uroot -p'\''test-password'\''' + local command_id=select-4 + local 'command=SHOW TABLES;' + local 'uri=-h some-name-proxysql -uroot -p'\''test-password'\''' + local postfix= + local expected_result=/mnt/jenkins/workspace/cloud-pxc-operator_PR-2124/e2e-tests/users/compare/select-4.sql + [[ perconalab/percona-xtradb-cluster-operator:main-pxc5.7 =~ 8\.4 ]] + [[ perconalab/percona-xtradb-cluster-operator:main-pxc5.7 =~ 8\.0 ]] + [[ perconalab/percona-xtradb-cluster-operator:main-pxc5.7 =~ 5\.7 ]] + [[ -f /mnt/jenkins/workspace/cloud-pxc-operator_PR-2124/e2e-tests/users/compare/select-4-57.sql ]] + run_mysql 'SHOW TABLES;' '-h some-name-proxysql -uroot -p'\''test-password'\''' + local 'command=SHOW TABLES;' + local 'uri=-h some-name-proxysql -uroot -p'\''test-password'\''' ++ get_client_pod ++ kubectl_bin get pods --selector=name=pxc-client -o 'jsonpath={.items[].metadata.name}' +++ mktemp ++ local LAST_OUT=/tmp/tmp.J5KmFgXCn0 +++ mktemp ++ local LAST_ERR=/tmp/tmp.Bg4wa8EKJd ++ local exit_status=0 +++ seq 0 2 ++ for i in '$(seq 0 2)' ++ set +e ++ kubectl get pods --selector=name=pxc-client -o 'jsonpath={.items[].metadata.name}' ++ exit_status=0 ++ set -e ++ '[' 0 '!=' 0 ']' ++ break ++ cat /tmp/tmp.J5KmFgXCn0 ++ cat /tmp/tmp.Bg4wa8EKJd ++ rm /tmp/tmp.J5KmFgXCn0 /tmp/tmp.Bg4wa8EKJd ++ return 0 + client_pod=pxc-client-857d976497-x6k8q + wait_pod pxc-client-857d976497-x6k8q + local pod=pxc-client-857d976497-x6k8q + local max_retry=480 + local ns= ++ echo pxc-client-857d976497-x6k8q ++ egrep '^(pxc|proxysql)$' ++ /usr/bin/sed -E 's/.*-(pxc|proxysql)-[0-9]/\1/' + local container= + set +o xtrace pod/pxc-client-857d976497-x6k8q condition met waiting for pod/pxc-client-857d976497-x6k8q to become ReadyDefaulted container "pxc-client" out of: pxc-client, backup .Ok + set +o xtrace + '[' '!' -s /tmp/tmp.UQXTNYaefz/select-4.sql ']' + diff -u /mnt/jenkins/workspace/cloud-pxc-operator_PR-2124/e2e-tests/users/compare/select-4.sql /tmp/tmp.UQXTNYaefz/select-4.sql + desc 'test proxyadmin' + set +o xtrace ----------------------------------------------------------------------------------- test proxyadmin ----------------------------------------------------------------------------------- + kubectl_bin patch pxc some-name --type=merge '-p={"spec":{"proxysql":{"size":3}}}' ++ mktemp + local LAST_OUT=/tmp/tmp.qUnWlLWVvl ++ mktemp + local LAST_ERR=/tmp/tmp.sKRDIWu79z + local exit_status=0 ++ seq 0 2 + for i in '$(seq 0 2)' + set +e + kubectl patch pxc some-name --type=merge '-p={"spec":{"proxysql":{"size":3}}}' + exit_status=0 + set -e + '[' 0 '!=' 0 ']' + break + cat /tmp/tmp.qUnWlLWVvl perconaxtradbcluster.pxc.percona.com/some-name patched + cat /tmp/tmp.sKRDIWu79z + rm /tmp/tmp.qUnWlLWVvl /tmp/tmp.sKRDIWu79z + return 0 + sleep 15 + wait_cluster_consistency some-name 3 3 + local cluster_name=some-name + local cluster_size=3 + local proxy_size=3 + '[' -z 3 ']' + desc 'wait cluster consistency' + set +o xtrace ----------------------------------------------------------------------------------- wait cluster consistency ----------------------------------------------------------------------------------- + local i=0 + local max=300 + sleep 7 + echo -n 'waiting for pxc/some-name to be ready' waiting for pxc/some-name to be ready++ kubectl_bin get pxc some-name -o 'jsonpath={.status.state}' +++ mktemp ++ local LAST_OUT=/tmp/tmp.oIbDpvkVzi +++ mktemp ++ local LAST_ERR=/tmp/tmp.GDmZZSFbQH ++ local exit_status=0 +++ seq 0 2 ++ for i in '$(seq 0 2)' ++ set +e ++ kubectl get pxc some-name -o 'jsonpath={.status.state}' ++ exit_status=0 ++ set -e ++ '[' 0 '!=' 0 ']' ++ break ++ cat /tmp/tmp.oIbDpvkVzi ++ cat /tmp/tmp.GDmZZSFbQH ++ rm /tmp/tmp.oIbDpvkVzi /tmp/tmp.GDmZZSFbQH ++ return 0 + [[ ready == \r\e\a\d\y ]] ++ kubectl_bin get pxc some-name -o 'jsonpath={.status.pxc.ready}' +++ mktemp ++ local LAST_OUT=/tmp/tmp.NnZ4PthRIH +++ mktemp ++ local LAST_ERR=/tmp/tmp.QfijdCb1Kh ++ local exit_status=0 +++ seq 0 2 ++ for i in '$(seq 0 2)' ++ set +e ++ kubectl get pxc some-name -o 'jsonpath={.status.pxc.ready}' ++ exit_status=0 ++ set -e ++ '[' 0 '!=' 0 ']' ++ break ++ cat /tmp/tmp.NnZ4PthRIH ++ cat /tmp/tmp.QfijdCb1Kh ++ rm /tmp/tmp.NnZ4PthRIH /tmp/tmp.QfijdCb1Kh ++ return 0 + [[ 3 == \3 ]] +++ get_proxy_engine some-name +++ local cluster_name=some-name ++++ get_proxy some-name ++++ local target_cluster=some-name +++++ kubectl_bin get pxc some-name -o 'jsonpath={.spec.haproxy.enabled}' ++++++ mktemp +++++ local LAST_OUT=/tmp/tmp.aaNq3SEhGX ++++++ mktemp +++++ local LAST_ERR=/tmp/tmp.6CEcz017iS +++++ local exit_status=0 ++++++ seq 0 2 +++++ for i in '$(seq 0 2)' +++++ set +e +++++ kubectl get pxc some-name -o 'jsonpath={.spec.haproxy.enabled}' +++++ exit_status=0 +++++ set -e +++++ '[' 0 '!=' 0 ']' +++++ break +++++ cat /tmp/tmp.aaNq3SEhGX +++++ cat /tmp/tmp.6CEcz017iS +++++ rm /tmp/tmp.aaNq3SEhGX /tmp/tmp.6CEcz017iS +++++ return 0 ++++ [[ '' == \t\r\u\e ]] +++++ kubectl_bin get pxc some-name -o 'jsonpath={.spec.proxysql.enabled}' ++++++ mktemp +++++ local LAST_OUT=/tmp/tmp.13n8F4TUPp ++++++ mktemp +++++ local LAST_ERR=/tmp/tmp.17B8yQqNHN +++++ local exit_status=0 ++++++ seq 0 2 +++++ for i in '$(seq 0 2)' +++++ set +e +++++ kubectl get pxc some-name -o 'jsonpath={.spec.proxysql.enabled}' +++++ exit_status=0 +++++ set -e +++++ '[' 0 '!=' 0 ']' +++++ break +++++ cat /tmp/tmp.13n8F4TUPp +++++ cat /tmp/tmp.17B8yQqNHN +++++ rm /tmp/tmp.13n8F4TUPp /tmp/tmp.17B8yQqNHN +++++ return 0 ++++ [[ true == \t\r\u\e ]] ++++ echo some-name-proxysql ++++ return +++ local cluster_proxy=some-name-proxysql +++ echo proxysql ++ kubectl_bin get pxc some-name -o 'jsonpath={.status.proxysql.ready}' +++ mktemp ++ local LAST_OUT=/tmp/tmp.0AK3BJLOHY +++ mktemp ++ local LAST_ERR=/tmp/tmp.ZYQfZPDAkQ ++ local exit_status=0 +++ seq 0 2 ++ for i in '$(seq 0 2)' ++ set +e ++ kubectl get pxc some-name -o 'jsonpath={.status.proxysql.ready}' ++ exit_status=0 ++ set -e ++ '[' 0 '!=' 0 ']' ++ break ++ cat /tmp/tmp.0AK3BJLOHY ++ cat /tmp/tmp.ZYQfZPDAkQ ++ rm /tmp/tmp.0AK3BJLOHY /tmp/tmp.ZYQfZPDAkQ ++ return 0 + [[ 3 == \3 ]] + echo + patch_secret my-cluster-secrets proxyadmin dGVzdC1wYXNzd29yZA== + local secret=my-cluster-secrets + local key=proxyadmin + local value=dGVzdC1wYXNzd29yZA== + kubectl_bin patch secret my-cluster-secrets '-p={"data":{"proxyadmin": "dGVzdC1wYXNzd29yZA=="}}' ++ mktemp + local LAST_OUT=/tmp/tmp.Ff5fE2RMYO ++ mktemp + local LAST_ERR=/tmp/tmp.o906w6J93c + local exit_status=0 ++ seq 0 2 + for i in '$(seq 0 2)' + set +e + kubectl patch secret my-cluster-secrets '-p={"data":{"proxyadmin": "dGVzdC1wYXNzd29yZA=="}}' + exit_status=0 + set -e + '[' 0 '!=' 0 ']' + break + cat /tmp/tmp.Ff5fE2RMYO secret/my-cluster-secrets patched + cat /tmp/tmp.o906w6J93c + rm /tmp/tmp.Ff5fE2RMYO /tmp/tmp.o906w6J93c + return 0 + sleep 15 + wait_cluster_consistency some-name 3 3 + local cluster_name=some-name + local cluster_size=3 + local proxy_size=3 + '[' -z 3 ']' + desc 'wait cluster consistency' + set +o xtrace ----------------------------------------------------------------------------------- wait cluster consistency ----------------------------------------------------------------------------------- + local i=0 + local max=300 + sleep 7 + echo -n 'waiting for pxc/some-name to be ready' waiting for pxc/some-name to be ready++ kubectl_bin get pxc some-name -o 'jsonpath={.status.state}' +++ mktemp ++ local LAST_OUT=/tmp/tmp.Ohe2Gr8qOM +++ mktemp ++ local LAST_ERR=/tmp/tmp.k21odCNpNG ++ local exit_status=0 +++ seq 0 2 ++ for i in '$(seq 0 2)' ++ set +e ++ kubectl get pxc some-name -o 'jsonpath={.status.state}' ++ exit_status=0 ++ set -e ++ '[' 0 '!=' 0 ']' ++ break ++ cat /tmp/tmp.Ohe2Gr8qOM ++ cat /tmp/tmp.k21odCNpNG ++ rm /tmp/tmp.Ohe2Gr8qOM /tmp/tmp.k21odCNpNG ++ return 0 + [[ initializing == \r\e\a\d\y ]] + echo -n . .+ sleep 5 + [[ 0 -ge 300 ]] + let i+=1 ++ kubectl_bin get pxc some-name -o 'jsonpath={.status.state}' +++ mktemp ++ local LAST_OUT=/tmp/tmp.sB7gyfNFfM +++ mktemp ++ local LAST_ERR=/tmp/tmp.GaAL1USZZA ++ local exit_status=0 +++ seq 0 2 ++ for i in '$(seq 0 2)' ++ set +e ++ kubectl get pxc some-name -o 'jsonpath={.status.state}' ++ exit_status=0 ++ set -e ++ '[' 0 '!=' 0 ']' ++ break ++ cat /tmp/tmp.sB7gyfNFfM ++ cat /tmp/tmp.GaAL1USZZA ++ rm /tmp/tmp.sB7gyfNFfM /tmp/tmp.GaAL1USZZA ++ return 0 + [[ initializing == \r\e\a\d\y ]] + echo -n . .+ sleep 5 + [[ 1 -ge 300 ]] + let i+=1 ++ kubectl_bin get pxc some-name -o 'jsonpath={.status.state}' +++ mktemp ++ local LAST_OUT=/tmp/tmp.fdVenDpPxI +++ mktemp ++ local LAST_ERR=/tmp/tmp.mMp0HlIgGa ++ local exit_status=0 +++ seq 0 2 ++ for i in '$(seq 0 2)' ++ set +e ++ kubectl get pxc some-name -o 'jsonpath={.status.state}' ++ exit_status=0 ++ set -e ++ '[' 0 '!=' 0 ']' ++ break ++ cat /tmp/tmp.fdVenDpPxI ++ cat /tmp/tmp.mMp0HlIgGa ++ rm /tmp/tmp.fdVenDpPxI /tmp/tmp.mMp0HlIgGa ++ return 0 + [[ initializing == \r\e\a\d\y ]] + echo -n . .+ sleep 5 + [[ 2 -ge 300 ]] + let i+=1 ++ kubectl_bin get pxc some-name -o 'jsonpath={.status.state}' +++ mktemp ++ local LAST_OUT=/tmp/tmp.Hsbp6S44ky +++ mktemp ++ local LAST_ERR=/tmp/tmp.9rBIgCND2u ++ local exit_status=0 +++ seq 0 2 ++ for i in '$(seq 0 2)' ++ set +e ++ kubectl get pxc some-name -o 'jsonpath={.status.state}' ++ exit_status=0 ++ set -e ++ '[' 0 '!=' 0 ']' ++ break ++ cat /tmp/tmp.Hsbp6S44ky ++ cat /tmp/tmp.9rBIgCND2u ++ rm /tmp/tmp.Hsbp6S44ky /tmp/tmp.9rBIgCND2u ++ return 0 + [[ initializing == \r\e\a\d\y ]] + echo -n . .+ sleep 5 + [[ 3 -ge 300 ]] + let i+=1 ++ kubectl_bin get pxc some-name -o 'jsonpath={.status.state}' +++ mktemp ++ local LAST_OUT=/tmp/tmp.UsdrxHNK6y +++ mktemp ++ local LAST_ERR=/tmp/tmp.3rJUoyNq5A ++ local exit_status=0 +++ seq 0 2 ++ for i in '$(seq 0 2)' ++ set +e ++ kubectl get pxc some-name -o 'jsonpath={.status.state}' ++ exit_status=0 ++ set -e ++ '[' 0 '!=' 0 ']' ++ break ++ cat /tmp/tmp.UsdrxHNK6y ++ cat /tmp/tmp.3rJUoyNq5A ++ rm /tmp/tmp.UsdrxHNK6y /tmp/tmp.3rJUoyNq5A ++ return 0 + [[ ready == \r\e\a\d\y ]] ++ kubectl_bin get pxc some-name -o 'jsonpath={.status.pxc.ready}' +++ mktemp ++ local LAST_OUT=/tmp/tmp.18K7Xpxf4q +++ mktemp ++ local LAST_ERR=/tmp/tmp.WrfPvKlZRI ++ local exit_status=0 +++ seq 0 2 ++ for i in '$(seq 0 2)' ++ set +e ++ kubectl get pxc some-name -o 'jsonpath={.status.pxc.ready}' ++ exit_status=0 ++ set -e ++ '[' 0 '!=' 0 ']' ++ break ++ cat /tmp/tmp.18K7Xpxf4q ++ cat /tmp/tmp.WrfPvKlZRI ++ rm /tmp/tmp.18K7Xpxf4q /tmp/tmp.WrfPvKlZRI ++ return 0 + [[ 3 == \3 ]] +++ get_proxy_engine some-name +++ local cluster_name=some-name ++++ get_proxy some-name ++++ local target_cluster=some-name +++++ kubectl_bin get pxc some-name -o 'jsonpath={.spec.haproxy.enabled}' ++++++ mktemp +++++ local LAST_OUT=/tmp/tmp.2ff4UHSkFm ++++++ mktemp +++++ local LAST_ERR=/tmp/tmp.PDwmVkEwns +++++ local exit_status=0 ++++++ seq 0 2 +++++ for i in '$(seq 0 2)' +++++ set +e +++++ kubectl get pxc some-name -o 'jsonpath={.spec.haproxy.enabled}' +++++ exit_status=0 +++++ set -e +++++ '[' 0 '!=' 0 ']' +++++ break +++++ cat /tmp/tmp.2ff4UHSkFm +++++ cat /tmp/tmp.PDwmVkEwns +++++ rm /tmp/tmp.2ff4UHSkFm /tmp/tmp.PDwmVkEwns +++++ return 0 ++++ [[ '' == \t\r\u\e ]] +++++ kubectl_bin get pxc some-name -o 'jsonpath={.spec.proxysql.enabled}' ++++++ mktemp +++++ local LAST_OUT=/tmp/tmp.3D8CwcKcW7 ++++++ mktemp +++++ local LAST_ERR=/tmp/tmp.k6N5wd1sG7 +++++ local exit_status=0 ++++++ seq 0 2 +++++ for i in '$(seq 0 2)' +++++ set +e +++++ kubectl get pxc some-name -o 'jsonpath={.spec.proxysql.enabled}' +++++ exit_status=0 +++++ set -e +++++ '[' 0 '!=' 0 ']' +++++ break +++++ cat /tmp/tmp.3D8CwcKcW7 +++++ cat /tmp/tmp.k6N5wd1sG7 +++++ rm /tmp/tmp.3D8CwcKcW7 /tmp/tmp.k6N5wd1sG7 +++++ return 0 ++++ [[ true == \t\r\u\e ]] ++++ echo some-name-proxysql ++++ return +++ local cluster_proxy=some-name-proxysql +++ echo proxysql ++ kubectl_bin get pxc some-name -o 'jsonpath={.status.proxysql.ready}' +++ mktemp ++ local LAST_OUT=/tmp/tmp.Zm3e6WsA8v +++ mktemp ++ local LAST_ERR=/tmp/tmp.mHP3ASkMo8 ++ local exit_status=0 +++ seq 0 2 ++ for i in '$(seq 0 2)' ++ set +e ++ kubectl get pxc some-name -o 'jsonpath={.status.proxysql.ready}' ++ exit_status=0 ++ set -e ++ '[' 0 '!=' 0 ']' ++ break ++ cat /tmp/tmp.Zm3e6WsA8v ++ cat /tmp/tmp.mHP3ASkMo8 ++ rm /tmp/tmp.Zm3e6WsA8v /tmp/tmp.mHP3ASkMo8 ++ return 0 + [[ 3 == \3 ]] + echo + compare_mysql_cmd_local select-2 'SHOW TABLES;' '-h127.0.0.1 -P6032 -uproxyadmin -p'\''test-password'\''' some-name-proxysql-0 '' proxysql + local command_id=select-2 + local 'command=SHOW TABLES;' + local 'uri=-h127.0.0.1 -P6032 -uproxyadmin -p'\''test-password'\''' + local pod=some-name-proxysql-0 + local postfix= + local container_name=proxysql + local expected_result=/mnt/jenkins/workspace/cloud-pxc-operator_PR-2124/e2e-tests/users/compare/select-2.sql + [[ perconalab/percona-xtradb-cluster-operator:main-pxc5.7 =~ 8\.0 ]] + [[ perconalab/percona-xtradb-cluster-operator:main-pxc5.7 =~ 8\.4 ]] + run_mysql_local 'SHOW TABLES;' '-h127.0.0.1 -P6032 -uproxyadmin -p'\''test-password'\''' some-name-proxysql-0 proxysql + local 'command=SHOW TABLES;' + local 'uri=-h127.0.0.1 -P6032 -uproxyadmin -p'\''test-password'\''' + local pod=some-name-proxysql-0 + local container_name=proxysql + set +o xtrace + '[' '!' -s /tmp/tmp.UQXTNYaefz/select-2.sql ']' + diff -u /mnt/jenkins/workspace/cloud-pxc-operator_PR-2124/e2e-tests/users/compare/select-2.sql /tmp/tmp.UQXTNYaefz/select-2.sql + compare_mysql_cmd_local select-2 'SHOW TABLES;' '-h127.0.0.1 -P6032 -uproxyadmin -p'\''test-password'\''' some-name-proxysql-1 '' proxysql + local command_id=select-2 + local 'command=SHOW TABLES;' + local 'uri=-h127.0.0.1 -P6032 -uproxyadmin -p'\''test-password'\''' + local pod=some-name-proxysql-1 + local postfix= + local container_name=proxysql + local expected_result=/mnt/jenkins/workspace/cloud-pxc-operator_PR-2124/e2e-tests/users/compare/select-2.sql + [[ perconalab/percona-xtradb-cluster-operator:main-pxc5.7 =~ 8\.0 ]] + [[ perconalab/percona-xtradb-cluster-operator:main-pxc5.7 =~ 8\.4 ]] + run_mysql_local 'SHOW TABLES;' '-h127.0.0.1 -P6032 -uproxyadmin -p'\''test-password'\''' some-name-proxysql-1 proxysql + local 'command=SHOW TABLES;' + local 'uri=-h127.0.0.1 -P6032 -uproxyadmin -p'\''test-password'\''' + local pod=some-name-proxysql-1 + local container_name=proxysql + set +o xtrace + '[' '!' -s /tmp/tmp.UQXTNYaefz/select-2.sql ']' + diff -u /mnt/jenkins/workspace/cloud-pxc-operator_PR-2124/e2e-tests/users/compare/select-2.sql /tmp/tmp.UQXTNYaefz/select-2.sql + compare_mysql_cmd_local select-2 'SHOW TABLES;' '-h127.0.0.1 -P6032 -uproxyadmin -p'\''test-password'\''' some-name-proxysql-2 '' proxysql + local command_id=select-2 + local 'command=SHOW TABLES;' + local 'uri=-h127.0.0.1 -P6032 -uproxyadmin -p'\''test-password'\''' + local pod=some-name-proxysql-2 + local postfix= + local container_name=proxysql + local expected_result=/mnt/jenkins/workspace/cloud-pxc-operator_PR-2124/e2e-tests/users/compare/select-2.sql + [[ perconalab/percona-xtradb-cluster-operator:main-pxc5.7 =~ 8\.0 ]] + [[ perconalab/percona-xtradb-cluster-operator:main-pxc5.7 =~ 8\.4 ]] + run_mysql_local 'SHOW TABLES;' '-h127.0.0.1 -P6032 -uproxyadmin -p'\''test-password'\''' some-name-proxysql-2 proxysql + local 'command=SHOW TABLES;' + local 'uri=-h127.0.0.1 -P6032 -uproxyadmin -p'\''test-password'\''' + local pod=some-name-proxysql-2 + local container_name=proxysql + set +o xtrace + '[' '!' -s /tmp/tmp.UQXTNYaefz/select-2.sql ']' + diff -u /mnt/jenkins/workspace/cloud-pxc-operator_PR-2124/e2e-tests/users/compare/select-2.sql /tmp/tmp.UQXTNYaefz/select-2.sql + desc 'test xtrabackup' + set +o xtrace ----------------------------------------------------------------------------------- test xtrabackup ----------------------------------------------------------------------------------- + kubectl_bin patch pxc some-name --type=merge '-p={"spec":{"proxysql":{"size":2}}}' ++ mktemp + local LAST_OUT=/tmp/tmp.W9gzGGJ1d7 ++ mktemp + local LAST_ERR=/tmp/tmp.pNQONBYaCb + local exit_status=0 ++ seq 0 2 + for i in '$(seq 0 2)' + set +e + kubectl patch pxc some-name --type=merge '-p={"spec":{"proxysql":{"size":2}}}' + exit_status=0 + set -e + '[' 0 '!=' 0 ']' + break + cat /tmp/tmp.W9gzGGJ1d7 perconaxtradbcluster.pxc.percona.com/some-name patched + cat /tmp/tmp.pNQONBYaCb + rm /tmp/tmp.W9gzGGJ1d7 /tmp/tmp.pNQONBYaCb + return 0 + patch_secret my-cluster-secrets xtrabackup dGVzdC1wYXNzd29yZA== + local secret=my-cluster-secrets + local key=xtrabackup + local value=dGVzdC1wYXNzd29yZA== + kubectl_bin patch secret my-cluster-secrets '-p={"data":{"xtrabackup": "dGVzdC1wYXNzd29yZA=="}}' ++ mktemp + local LAST_OUT=/tmp/tmp.3axUUftYO9 ++ mktemp + local LAST_ERR=/tmp/tmp.z3dLWxfq5f + local exit_status=0 ++ seq 0 2 + for i in '$(seq 0 2)' + set +e + kubectl patch secret my-cluster-secrets '-p={"data":{"xtrabackup": "dGVzdC1wYXNzd29yZA=="}}' + exit_status=0 + set -e + '[' 0 '!=' 0 ']' + break + cat /tmp/tmp.3axUUftYO9 secret/my-cluster-secrets patched + cat /tmp/tmp.z3dLWxfq5f + rm /tmp/tmp.3axUUftYO9 /tmp/tmp.z3dLWxfq5f + return 0 + sleep 15 + wait_cluster_consistency some-name 3 2 + local cluster_name=some-name + local cluster_size=3 + local proxy_size=2 + '[' -z 2 ']' + desc 'wait cluster consistency' + set +o xtrace ----------------------------------------------------------------------------------- wait cluster consistency ----------------------------------------------------------------------------------- + local i=0 + local max=300 + sleep 7 + echo -n 'waiting for pxc/some-name to be ready' waiting for pxc/some-name to be ready++ kubectl_bin get pxc some-name -o 'jsonpath={.status.state}' +++ mktemp ++ local LAST_OUT=/tmp/tmp.SBoewoCTuo +++ mktemp ++ local LAST_ERR=/tmp/tmp.iWz9PRGaoe ++ local exit_status=0 +++ seq 0 2 ++ for i in '$(seq 0 2)' ++ set +e ++ kubectl get pxc some-name -o 'jsonpath={.status.state}' ++ exit_status=0 ++ set -e ++ '[' 0 '!=' 0 ']' ++ break ++ cat /tmp/tmp.SBoewoCTuo ++ cat /tmp/tmp.iWz9PRGaoe ++ rm /tmp/tmp.SBoewoCTuo /tmp/tmp.iWz9PRGaoe ++ return 0 + [[ initializing == \r\e\a\d\y ]] + echo -n . .+ sleep 5 + [[ 0 -ge 300 ]] + let i+=1 ++ kubectl_bin get pxc some-name -o 'jsonpath={.status.state}' +++ mktemp ++ local LAST_OUT=/tmp/tmp.LN1Ou9bFyt +++ mktemp ++ local LAST_ERR=/tmp/tmp.YhDFcQHl1c ++ local exit_status=0 +++ seq 0 2 ++ for i in '$(seq 0 2)' ++ set +e ++ kubectl get pxc some-name -o 'jsonpath={.status.state}' ++ exit_status=0 ++ set -e ++ '[' 0 '!=' 0 ']' ++ break ++ cat /tmp/tmp.LN1Ou9bFyt ++ cat /tmp/tmp.YhDFcQHl1c ++ rm /tmp/tmp.LN1Ou9bFyt /tmp/tmp.YhDFcQHl1c ++ return 0 + [[ initializing == \r\e\a\d\y ]] + echo -n . .+ sleep 5 + [[ 1 -ge 300 ]] + let i+=1 ++ kubectl_bin get pxc some-name -o 'jsonpath={.status.state}' +++ mktemp ++ local LAST_OUT=/tmp/tmp.hU4TBtIRrE +++ mktemp ++ local LAST_ERR=/tmp/tmp.77zr8Dqqbl ++ local exit_status=0 +++ seq 0 2 ++ for i in '$(seq 0 2)' ++ set +e ++ kubectl get pxc some-name -o 'jsonpath={.status.state}' ++ exit_status=0 ++ set -e ++ '[' 0 '!=' 0 ']' ++ break ++ cat /tmp/tmp.hU4TBtIRrE ++ cat /tmp/tmp.77zr8Dqqbl ++ rm /tmp/tmp.hU4TBtIRrE /tmp/tmp.77zr8Dqqbl ++ return 0 + [[ initializing == \r\e\a\d\y ]] + echo -n . .+ sleep 5 + [[ 2 -ge 300 ]] + let i+=1 ++ kubectl_bin get pxc some-name -o 'jsonpath={.status.state}' +++ mktemp ++ local LAST_OUT=/tmp/tmp.dTIZXn3blo +++ mktemp ++ local LAST_ERR=/tmp/tmp.ie6Zsl7fls ++ local exit_status=0 +++ seq 0 2 ++ for i in '$(seq 0 2)' ++ set +e ++ kubectl get pxc some-name -o 'jsonpath={.status.state}' ++ exit_status=0 ++ set -e ++ '[' 0 '!=' 0 ']' ++ break ++ cat /tmp/tmp.dTIZXn3blo ++ cat /tmp/tmp.ie6Zsl7fls ++ rm /tmp/tmp.dTIZXn3blo /tmp/tmp.ie6Zsl7fls ++ return 0 + [[ initializing == \r\e\a\d\y ]] + echo -n . .+ sleep 5 + [[ 3 -ge 300 ]] + let i+=1 ++ kubectl_bin get pxc some-name -o 'jsonpath={.status.state}' +++ mktemp ++ local LAST_OUT=/tmp/tmp.vwxLvddyWm +++ mktemp ++ local LAST_ERR=/tmp/tmp.NkNgHO5oSq ++ local exit_status=0 +++ seq 0 2 ++ for i in '$(seq 0 2)' ++ set +e ++ kubectl get pxc some-name -o 'jsonpath={.status.state}' ++ exit_status=0 ++ set -e ++ '[' 0 '!=' 0 ']' ++ break ++ cat /tmp/tmp.vwxLvddyWm ++ cat /tmp/tmp.NkNgHO5oSq ++ rm /tmp/tmp.vwxLvddyWm /tmp/tmp.NkNgHO5oSq ++ return 0 + [[ initializing == \r\e\a\d\y ]] + echo -n . .+ sleep 5 + [[ 4 -ge 300 ]] + let i+=1 ++ kubectl_bin get pxc some-name -o 'jsonpath={.status.state}' +++ mktemp ++ local LAST_OUT=/tmp/tmp.eFubO0iSCS +++ mktemp ++ local LAST_ERR=/tmp/tmp.hSDzoHrv17 ++ local exit_status=0 +++ seq 0 2 ++ for i in '$(seq 0 2)' ++ set +e ++ kubectl get pxc some-name -o 'jsonpath={.status.state}' ++ exit_status=0 ++ set -e ++ '[' 0 '!=' 0 ']' ++ break ++ cat /tmp/tmp.eFubO0iSCS ++ cat /tmp/tmp.hSDzoHrv17 ++ rm /tmp/tmp.eFubO0iSCS /tmp/tmp.hSDzoHrv17 ++ return 0 + [[ initializing == \r\e\a\d\y ]] + echo -n . .+ sleep 5 + [[ 5 -ge 300 ]] + let i+=1 ++ kubectl_bin get pxc some-name -o 'jsonpath={.status.state}' +++ mktemp ++ local LAST_OUT=/tmp/tmp.n2z7x41UFe +++ mktemp ++ local LAST_ERR=/tmp/tmp.5LftVXGCHv ++ local exit_status=0 +++ seq 0 2 ++ for i in '$(seq 0 2)' ++ set +e ++ kubectl get pxc some-name -o 'jsonpath={.status.state}' ++ exit_status=0 ++ set -e ++ '[' 0 '!=' 0 ']' ++ break ++ cat /tmp/tmp.n2z7x41UFe ++ cat /tmp/tmp.5LftVXGCHv ++ rm /tmp/tmp.n2z7x41UFe /tmp/tmp.5LftVXGCHv ++ return 0 + [[ initializing == \r\e\a\d\y ]] + echo -n . .+ sleep 5 + [[ 6 -ge 300 ]] + let i+=1 ++ kubectl_bin get pxc some-name -o 'jsonpath={.status.state}' +++ mktemp ++ local LAST_OUT=/tmp/tmp.EZOQijU0K8 +++ mktemp ++ local LAST_ERR=/tmp/tmp.MOzweq59YX ++ local exit_status=0 +++ seq 0 2 ++ for i in '$(seq 0 2)' ++ set +e ++ kubectl get pxc some-name -o 'jsonpath={.status.state}' ++ exit_status=0 ++ set -e ++ '[' 0 '!=' 0 ']' ++ break ++ cat /tmp/tmp.EZOQijU0K8 ++ cat /tmp/tmp.MOzweq59YX ++ rm /tmp/tmp.EZOQijU0K8 /tmp/tmp.MOzweq59YX ++ return 0 + [[ initializing == \r\e\a\d\y ]] + echo -n . .+ sleep 5 + [[ 7 -ge 300 ]] + let i+=1 ++ kubectl_bin get pxc some-name -o 'jsonpath={.status.state}' +++ mktemp ++ local LAST_OUT=/tmp/tmp.tvMvkrTKUF +++ mktemp ++ local LAST_ERR=/tmp/tmp.TCUAcn3Gv4 ++ local exit_status=0 +++ seq 0 2 ++ for i in '$(seq 0 2)' ++ set +e ++ kubectl get pxc some-name -o 'jsonpath={.status.state}' ++ exit_status=0 ++ set -e ++ '[' 0 '!=' 0 ']' ++ break ++ cat /tmp/tmp.tvMvkrTKUF ++ cat /tmp/tmp.TCUAcn3Gv4 ++ rm /tmp/tmp.tvMvkrTKUF /tmp/tmp.TCUAcn3Gv4 ++ return 0 + [[ initializing == \r\e\a\d\y ]] + echo -n . .+ sleep 5 + [[ 8 -ge 300 ]] + let i+=1 ++ kubectl_bin get pxc some-name -o 'jsonpath={.status.state}' +++ mktemp ++ local LAST_OUT=/tmp/tmp.pXaohJhmpT +++ mktemp ++ local LAST_ERR=/tmp/tmp.qvkBYncGIc ++ local exit_status=0 +++ seq 0 2 ++ for i in '$(seq 0 2)' ++ set +e ++ kubectl get pxc some-name -o 'jsonpath={.status.state}' ++ exit_status=0 ++ set -e ++ '[' 0 '!=' 0 ']' ++ break ++ cat /tmp/tmp.pXaohJhmpT ++ cat /tmp/tmp.qvkBYncGIc ++ rm /tmp/tmp.pXaohJhmpT /tmp/tmp.qvkBYncGIc ++ return 0 + [[ initializing == \r\e\a\d\y ]] + echo -n . .+ sleep 5 + [[ 9 -ge 300 ]] + let i+=1 ++ kubectl_bin get pxc some-name -o 'jsonpath={.status.state}' +++ mktemp ++ local LAST_OUT=/tmp/tmp.AZwsLHi1Op +++ mktemp ++ local LAST_ERR=/tmp/tmp.nmNLRKLTef ++ local exit_status=0 +++ seq 0 2 ++ for i in '$(seq 0 2)' ++ set +e ++ kubectl get pxc some-name -o 'jsonpath={.status.state}' ++ exit_status=0 ++ set -e ++ '[' 0 '!=' 0 ']' ++ break ++ cat /tmp/tmp.AZwsLHi1Op ++ cat /tmp/tmp.nmNLRKLTef ++ rm /tmp/tmp.AZwsLHi1Op /tmp/tmp.nmNLRKLTef ++ return 0 + [[ initializing == \r\e\a\d\y ]] + echo -n . .+ sleep 5 + [[ 10 -ge 300 ]] + let i+=1 ++ kubectl_bin get pxc some-name -o 'jsonpath={.status.state}' +++ mktemp ++ local LAST_OUT=/tmp/tmp.uBLEXhDOhu +++ mktemp ++ local LAST_ERR=/tmp/tmp.ukP62PgCpk ++ local exit_status=0 +++ seq 0 2 ++ for i in '$(seq 0 2)' ++ set +e ++ kubectl get pxc some-name -o 'jsonpath={.status.state}' ++ exit_status=0 ++ set -e ++ '[' 0 '!=' 0 ']' ++ break ++ cat /tmp/tmp.uBLEXhDOhu ++ cat /tmp/tmp.ukP62PgCpk ++ rm /tmp/tmp.uBLEXhDOhu /tmp/tmp.ukP62PgCpk ++ return 0 + [[ initializing == \r\e\a\d\y ]] + echo -n . .+ sleep 5 + [[ 11 -ge 300 ]] + let i+=1 ++ kubectl_bin get pxc some-name -o 'jsonpath={.status.state}' +++ mktemp ++ local LAST_OUT=/tmp/tmp.GuVnvRFM74 +++ mktemp ++ local LAST_ERR=/tmp/tmp.7u6VXNOYaX ++ local exit_status=0 +++ seq 0 2 ++ for i in '$(seq 0 2)' ++ set +e ++ kubectl get pxc some-name -o 'jsonpath={.status.state}' ++ exit_status=0 ++ set -e ++ '[' 0 '!=' 0 ']' ++ break ++ cat /tmp/tmp.GuVnvRFM74 ++ cat /tmp/tmp.7u6VXNOYaX ++ rm /tmp/tmp.GuVnvRFM74 /tmp/tmp.7u6VXNOYaX ++ return 0 + [[ initializing == \r\e\a\d\y ]] + echo -n . .+ sleep 5 + [[ 12 -ge 300 ]] + let i+=1 ++ kubectl_bin get pxc some-name -o 'jsonpath={.status.state}' +++ mktemp ++ local LAST_OUT=/tmp/tmp.rZdaFtJv1h +++ mktemp ++ local LAST_ERR=/tmp/tmp.At68Rr4Eko ++ local exit_status=0 +++ seq 0 2 ++ for i in '$(seq 0 2)' ++ set +e ++ kubectl get pxc some-name -o 'jsonpath={.status.state}' ++ exit_status=0 ++ set -e ++ '[' 0 '!=' 0 ']' ++ break ++ cat /tmp/tmp.rZdaFtJv1h ++ cat /tmp/tmp.At68Rr4Eko ++ rm /tmp/tmp.rZdaFtJv1h /tmp/tmp.At68Rr4Eko ++ return 0 + [[ initializing == \r\e\a\d\y ]] + echo -n . .+ sleep 5 + [[ 13 -ge 300 ]] + let i+=1 ++ kubectl_bin get pxc some-name -o 'jsonpath={.status.state}' +++ mktemp ++ local LAST_OUT=/tmp/tmp.NlgUExmKef +++ mktemp ++ local LAST_ERR=/tmp/tmp.IJo4QqG9ts ++ local exit_status=0 +++ seq 0 2 ++ for i in '$(seq 0 2)' ++ set +e ++ kubectl get pxc some-name -o 'jsonpath={.status.state}' ++ exit_status=0 ++ set -e ++ '[' 0 '!=' 0 ']' ++ break ++ cat /tmp/tmp.NlgUExmKef ++ cat /tmp/tmp.IJo4QqG9ts ++ rm /tmp/tmp.NlgUExmKef /tmp/tmp.IJo4QqG9ts ++ return 0 + [[ initializing == \r\e\a\d\y ]] + echo -n . .+ sleep 5 + [[ 14 -ge 300 ]] + let i+=1 ++ kubectl_bin get pxc some-name -o 'jsonpath={.status.state}' +++ mktemp ++ local LAST_OUT=/tmp/tmp.ICJnU6muTT +++ mktemp ++ local LAST_ERR=/tmp/tmp.vvfKaY0xR6 ++ local exit_status=0 +++ seq 0 2 ++ for i in '$(seq 0 2)' ++ set +e ++ kubectl get pxc some-name -o 'jsonpath={.status.state}' ++ exit_status=0 ++ set -e ++ '[' 0 '!=' 0 ']' ++ break ++ cat /tmp/tmp.ICJnU6muTT ++ cat /tmp/tmp.vvfKaY0xR6 ++ rm /tmp/tmp.ICJnU6muTT /tmp/tmp.vvfKaY0xR6 ++ return 0 + [[ initializing == \r\e\a\d\y ]] + echo -n . .+ sleep 5 + [[ 15 -ge 300 ]] + let i+=1 ++ kubectl_bin get pxc some-name -o 'jsonpath={.status.state}' +++ mktemp ++ local LAST_OUT=/tmp/tmp.f4Z7qXx56i +++ mktemp ++ local LAST_ERR=/tmp/tmp.WRvKutNFQa ++ local exit_status=0 +++ seq 0 2 ++ for i in '$(seq 0 2)' ++ set +e ++ kubectl get pxc some-name -o 'jsonpath={.status.state}' ++ exit_status=0 ++ set -e ++ '[' 0 '!=' 0 ']' ++ break ++ cat /tmp/tmp.f4Z7qXx56i ++ cat /tmp/tmp.WRvKutNFQa ++ rm /tmp/tmp.f4Z7qXx56i /tmp/tmp.WRvKutNFQa ++ return 0 + [[ initializing == \r\e\a\d\y ]] + echo -n . .+ sleep 5 + [[ 16 -ge 300 ]] + let i+=1 ++ kubectl_bin get pxc some-name -o 'jsonpath={.status.state}' +++ mktemp ++ local LAST_OUT=/tmp/tmp.u78r2O1PWX +++ mktemp ++ local LAST_ERR=/tmp/tmp.CvN9jMRAqI ++ local exit_status=0 +++ seq 0 2 ++ for i in '$(seq 0 2)' ++ set +e ++ kubectl get pxc some-name -o 'jsonpath={.status.state}' ++ exit_status=0 ++ set -e ++ '[' 0 '!=' 0 ']' ++ break ++ cat /tmp/tmp.u78r2O1PWX ++ cat /tmp/tmp.CvN9jMRAqI ++ rm /tmp/tmp.u78r2O1PWX /tmp/tmp.CvN9jMRAqI ++ return 0 + [[ initializing == \r\e\a\d\y ]] + echo -n . .+ sleep 5 + [[ 17 -ge 300 ]] + let i+=1 ++ kubectl_bin get pxc some-name -o 'jsonpath={.status.state}' +++ mktemp ++ local LAST_OUT=/tmp/tmp.k7oEDv3Bn2 +++ mktemp ++ local LAST_ERR=/tmp/tmp.4TNjvolhUr ++ local exit_status=0 +++ seq 0 2 ++ for i in '$(seq 0 2)' ++ set +e ++ kubectl get pxc some-name -o 'jsonpath={.status.state}' ++ exit_status=0 ++ set -e ++ '[' 0 '!=' 0 ']' ++ break ++ cat /tmp/tmp.k7oEDv3Bn2 ++ cat /tmp/tmp.4TNjvolhUr ++ rm /tmp/tmp.k7oEDv3Bn2 /tmp/tmp.4TNjvolhUr ++ return 0 + [[ initializing == \r\e\a\d\y ]] + echo -n . .+ sleep 5 + [[ 18 -ge 300 ]] + let i+=1 ++ kubectl_bin get pxc some-name -o 'jsonpath={.status.state}' +++ mktemp ++ local LAST_OUT=/tmp/tmp.9HjmUKCDUl +++ mktemp ++ local LAST_ERR=/tmp/tmp.Q2NXGz0v5L ++ local exit_status=0 +++ seq 0 2 ++ for i in '$(seq 0 2)' ++ set +e ++ kubectl get pxc some-name -o 'jsonpath={.status.state}' ++ exit_status=0 ++ set -e ++ '[' 0 '!=' 0 ']' ++ break ++ cat /tmp/tmp.9HjmUKCDUl ++ cat /tmp/tmp.Q2NXGz0v5L ++ rm /tmp/tmp.9HjmUKCDUl /tmp/tmp.Q2NXGz0v5L ++ return 0 + [[ initializing == \r\e\a\d\y ]] + echo -n . .+ sleep 5 + [[ 19 -ge 300 ]] + let i+=1 ++ kubectl_bin get pxc some-name -o 'jsonpath={.status.state}' +++ mktemp ++ local LAST_OUT=/tmp/tmp.VkAqf9dss8 +++ mktemp ++ local LAST_ERR=/tmp/tmp.LVF3eE2rqc ++ local exit_status=0 +++ seq 0 2 ++ for i in '$(seq 0 2)' ++ set +e ++ kubectl get pxc some-name -o 'jsonpath={.status.state}' ++ exit_status=0 ++ set -e ++ '[' 0 '!=' 0 ']' ++ break ++ cat /tmp/tmp.VkAqf9dss8 ++ cat /tmp/tmp.LVF3eE2rqc ++ rm /tmp/tmp.VkAqf9dss8 /tmp/tmp.LVF3eE2rqc ++ return 0 + [[ initializing == \r\e\a\d\y ]] + echo -n . .+ sleep 5 + [[ 20 -ge 300 ]] + let i+=1 ++ kubectl_bin get pxc some-name -o 'jsonpath={.status.state}' +++ mktemp ++ local LAST_OUT=/tmp/tmp.Lxq2BXpWks +++ mktemp ++ local LAST_ERR=/tmp/tmp.efH7W4xf9X ++ local exit_status=0 +++ seq 0 2 ++ for i in '$(seq 0 2)' ++ set +e ++ kubectl get pxc some-name -o 'jsonpath={.status.state}' ++ exit_status=0 ++ set -e ++ '[' 0 '!=' 0 ']' ++ break ++ cat /tmp/tmp.Lxq2BXpWks ++ cat /tmp/tmp.efH7W4xf9X ++ rm /tmp/tmp.Lxq2BXpWks /tmp/tmp.efH7W4xf9X ++ return 0 + [[ initializing == \r\e\a\d\y ]] + echo -n . .+ sleep 5 + [[ 21 -ge 300 ]] + let i+=1 ++ kubectl_bin get pxc some-name -o 'jsonpath={.status.state}' +++ mktemp ++ local LAST_OUT=/tmp/tmp.Eb1p1t8DdY +++ mktemp ++ local LAST_ERR=/tmp/tmp.oDvxHqOjK4 ++ local exit_status=0 +++ seq 0 2 ++ for i in '$(seq 0 2)' ++ set +e ++ kubectl get pxc some-name -o 'jsonpath={.status.state}' ++ exit_status=0 ++ set -e ++ '[' 0 '!=' 0 ']' ++ break ++ cat /tmp/tmp.Eb1p1t8DdY ++ cat /tmp/tmp.oDvxHqOjK4 ++ rm /tmp/tmp.Eb1p1t8DdY /tmp/tmp.oDvxHqOjK4 ++ return 0 + [[ initializing == \r\e\a\d\y ]] + echo -n . .+ sleep 5 + [[ 22 -ge 300 ]] + let i+=1 ++ kubectl_bin get pxc some-name -o 'jsonpath={.status.state}' +++ mktemp ++ local LAST_OUT=/tmp/tmp.nfDkesB1Ec +++ mktemp ++ local LAST_ERR=/tmp/tmp.eeJibrCyip ++ local exit_status=0 +++ seq 0 2 ++ for i in '$(seq 0 2)' ++ set +e ++ kubectl get pxc some-name -o 'jsonpath={.status.state}' ++ exit_status=0 ++ set -e ++ '[' 0 '!=' 0 ']' ++ break ++ cat /tmp/tmp.nfDkesB1Ec ++ cat /tmp/tmp.eeJibrCyip ++ rm /tmp/tmp.nfDkesB1Ec /tmp/tmp.eeJibrCyip ++ return 0 + [[ initializing == \r\e\a\d\y ]] + echo -n . .+ sleep 5 + [[ 23 -ge 300 ]] + let i+=1 ++ kubectl_bin get pxc some-name -o 'jsonpath={.status.state}' +++ mktemp ++ local LAST_OUT=/tmp/tmp.lPTenjaLZB +++ mktemp ++ local LAST_ERR=/tmp/tmp.YjztzAxlgy ++ local exit_status=0 +++ seq 0 2 ++ for i in '$(seq 0 2)' ++ set +e ++ kubectl get pxc some-name -o 'jsonpath={.status.state}' ++ exit_status=0 ++ set -e ++ '[' 0 '!=' 0 ']' ++ break ++ cat /tmp/tmp.lPTenjaLZB ++ cat /tmp/tmp.YjztzAxlgy ++ rm /tmp/tmp.lPTenjaLZB /tmp/tmp.YjztzAxlgy ++ return 0 + [[ ready == \r\e\a\d\y ]] ++ kubectl_bin get pxc some-name -o 'jsonpath={.status.pxc.ready}' +++ mktemp ++ local LAST_OUT=/tmp/tmp.XXXJAYxEmK +++ mktemp ++ local LAST_ERR=/tmp/tmp.tEsd6dMjAd ++ local exit_status=0 +++ seq 0 2 ++ for i in '$(seq 0 2)' ++ set +e ++ kubectl get pxc some-name -o 'jsonpath={.status.pxc.ready}' ++ exit_status=0 ++ set -e ++ '[' 0 '!=' 0 ']' ++ break ++ cat /tmp/tmp.XXXJAYxEmK ++ cat /tmp/tmp.tEsd6dMjAd ++ rm /tmp/tmp.XXXJAYxEmK /tmp/tmp.tEsd6dMjAd ++ return 0 + [[ 3 == \3 ]] +++ get_proxy_engine some-name +++ local cluster_name=some-name ++++ get_proxy some-name ++++ local target_cluster=some-name +++++ kubectl_bin get pxc some-name -o 'jsonpath={.spec.haproxy.enabled}' ++++++ mktemp +++++ local LAST_OUT=/tmp/tmp.FsJdDEzrUk ++++++ mktemp +++++ local LAST_ERR=/tmp/tmp.mRudi5DCRl +++++ local exit_status=0 ++++++ seq 0 2 +++++ for i in '$(seq 0 2)' +++++ set +e +++++ kubectl get pxc some-name -o 'jsonpath={.spec.haproxy.enabled}' +++++ exit_status=0 +++++ set -e +++++ '[' 0 '!=' 0 ']' +++++ break +++++ cat /tmp/tmp.FsJdDEzrUk +++++ cat /tmp/tmp.mRudi5DCRl +++++ rm /tmp/tmp.FsJdDEzrUk /tmp/tmp.mRudi5DCRl +++++ return 0 ++++ [[ '' == \t\r\u\e ]] +++++ kubectl_bin get pxc some-name -o 'jsonpath={.spec.proxysql.enabled}' ++++++ mktemp +++++ local LAST_OUT=/tmp/tmp.pe6lY6F3HN ++++++ mktemp +++++ local LAST_ERR=/tmp/tmp.dncVhDL36m +++++ local exit_status=0 ++++++ seq 0 2 +++++ for i in '$(seq 0 2)' +++++ set +e +++++ kubectl get pxc some-name -o 'jsonpath={.spec.proxysql.enabled}' +++++ exit_status=0 +++++ set -e +++++ '[' 0 '!=' 0 ']' +++++ break +++++ cat /tmp/tmp.pe6lY6F3HN +++++ cat /tmp/tmp.dncVhDL36m +++++ rm /tmp/tmp.pe6lY6F3HN /tmp/tmp.dncVhDL36m +++++ return 0 ++++ [[ true == \t\r\u\e ]] ++++ echo some-name-proxysql ++++ return +++ local cluster_proxy=some-name-proxysql +++ echo proxysql ++ kubectl_bin get pxc some-name -o 'jsonpath={.status.proxysql.ready}' +++ mktemp ++ local LAST_OUT=/tmp/tmp.WubriZW5Mr +++ mktemp ++ local LAST_ERR=/tmp/tmp.rJHVw5E0RH ++ local exit_status=0 +++ seq 0 2 ++ for i in '$(seq 0 2)' ++ set +e ++ kubectl get pxc some-name -o 'jsonpath={.status.proxysql.ready}' ++ exit_status=0 ++ set -e ++ '[' 0 '!=' 0 ']' ++ break ++ cat /tmp/tmp.WubriZW5Mr ++ cat /tmp/tmp.rJHVw5E0RH ++ rm /tmp/tmp.WubriZW5Mr /tmp/tmp.rJHVw5E0RH ++ return 0 + [[ 2 == \2 ]] + echo + compare_mysql_cmd_local select-3 'SHOW DATABASES;' '-h 127.0.0.1 -uxtrabackup -p'\''test-password'\''' some-name-pxc-0 '' pxc + local command_id=select-3 + local 'command=SHOW DATABASES;' + local 'uri=-h 127.0.0.1 -uxtrabackup -p'\''test-password'\''' + local pod=some-name-pxc-0 + local postfix= + local container_name=pxc + local expected_result=/mnt/jenkins/workspace/cloud-pxc-operator_PR-2124/e2e-tests/users/compare/select-3.sql + [[ perconalab/percona-xtradb-cluster-operator:main-pxc5.7 =~ 8\.0 ]] + [[ perconalab/percona-xtradb-cluster-operator:main-pxc5.7 =~ 8\.4 ]] + run_mysql_local 'SHOW DATABASES;' '-h 127.0.0.1 -uxtrabackup -p'\''test-password'\''' some-name-pxc-0 pxc + local 'command=SHOW DATABASES;' + local 'uri=-h 127.0.0.1 -uxtrabackup -p'\''test-password'\''' + local pod=some-name-pxc-0 + local container_name=pxc + set +o xtrace + '[' '!' -s /tmp/tmp.UQXTNYaefz/select-3.sql ']' + diff -u /mnt/jenkins/workspace/cloud-pxc-operator_PR-2124/e2e-tests/users/compare/select-3.sql /tmp/tmp.UQXTNYaefz/select-3.sql + desc 'test monitor' + set +o xtrace ----------------------------------------------------------------------------------- test monitor ----------------------------------------------------------------------------------- + patch_secret my-cluster-secrets monitor dGVzdC1wYXNzd29yZA== + local secret=my-cluster-secrets + local key=monitor + local value=dGVzdC1wYXNzd29yZA== + kubectl_bin patch secret my-cluster-secrets '-p={"data":{"monitor": "dGVzdC1wYXNzd29yZA=="}}' ++ mktemp + local LAST_OUT=/tmp/tmp.mBukmMvR37 ++ mktemp + local LAST_ERR=/tmp/tmp.2EmsQhj3mR + local exit_status=0 ++ seq 0 2 + for i in '$(seq 0 2)' + set +e + kubectl patch secret my-cluster-secrets '-p={"data":{"monitor": "dGVzdC1wYXNzd29yZA=="}}' + exit_status=0 + set -e + '[' 0 '!=' 0 ']' + break + cat /tmp/tmp.mBukmMvR37 secret/my-cluster-secrets patched + cat /tmp/tmp.2EmsQhj3mR + rm /tmp/tmp.mBukmMvR37 /tmp/tmp.2EmsQhj3mR + return 0 + wait_for_password_propagation my-cluster-secrets monitor + local secret=my-cluster-secrets + local user=monitor + local max_retry=240 ++ getSecretData my-cluster-secrets root ++ local secretName=my-cluster-secrets ++ local dataKey=root ++ kubectl_bin get secrets/my-cluster-secrets '--template={{.data.root}}' ++ base64 --decode +++ mktemp ++ local LAST_OUT=/tmp/tmp.JSJy8Lpw7z +++ mktemp ++ local LAST_ERR=/tmp/tmp.KV7m78Nanj ++ local exit_status=0 +++ seq 0 2 ++ for i in '$(seq 0 2)' ++ set +e ++ kubectl get secrets/my-cluster-secrets '--template={{.data.root}}' ++ exit_status=0 ++ set -e ++ '[' 0 '!=' 0 ']' ++ break ++ cat /tmp/tmp.JSJy8Lpw7z ++ cat /tmp/tmp.KV7m78Nanj ++ rm /tmp/tmp.JSJy8Lpw7z /tmp/tmp.KV7m78Nanj ++ return 0 + local root_pass=test-password + [[ perconalab/percona-xtradb-cluster-operator:main-pxc5.7 =~ 5\.7 ]] + echo 'Skipping dual password feature doesn'\''t work for 5.7. PXC 5.7 doesn'\''t support it!' Skipping dual password feature doesn't work for 5.7. PXC 5.7 doesn't support it! + return + wait_cluster_consistency some-name 3 2 + local cluster_name=some-name + local cluster_size=3 + local proxy_size=2 + '[' -z 2 ']' + desc 'wait cluster consistency' + set +o xtrace ----------------------------------------------------------------------------------- wait cluster consistency ----------------------------------------------------------------------------------- + local i=0 + local max=300 + sleep 7 + echo -n 'waiting for pxc/some-name to be ready' waiting for pxc/some-name to be ready++ kubectl_bin get pxc some-name -o 'jsonpath={.status.state}' +++ mktemp ++ local LAST_OUT=/tmp/tmp.cpQJlDgVER +++ mktemp ++ local LAST_ERR=/tmp/tmp.8yDChJ3Yif ++ local exit_status=0 +++ seq 0 2 ++ for i in '$(seq 0 2)' ++ set +e ++ kubectl get pxc some-name -o 'jsonpath={.status.state}' ++ exit_status=0 ++ set -e ++ '[' 0 '!=' 0 ']' ++ break ++ cat /tmp/tmp.cpQJlDgVER ++ cat /tmp/tmp.8yDChJ3Yif ++ rm /tmp/tmp.cpQJlDgVER /tmp/tmp.8yDChJ3Yif ++ return 0 + [[ initializing == \r\e\a\d\y ]] + echo -n . .+ sleep 5 + [[ 0 -ge 300 ]] + let i+=1 ++ kubectl_bin get pxc some-name -o 'jsonpath={.status.state}' +++ mktemp ++ local LAST_OUT=/tmp/tmp.OBsSAQvGXM +++ mktemp ++ local LAST_ERR=/tmp/tmp.OFbzWgPHtM ++ local exit_status=0 +++ seq 0 2 ++ for i in '$(seq 0 2)' ++ set +e ++ kubectl get pxc some-name -o 'jsonpath={.status.state}' ++ exit_status=0 ++ set -e ++ '[' 0 '!=' 0 ']' ++ break ++ cat /tmp/tmp.OBsSAQvGXM ++ cat /tmp/tmp.OFbzWgPHtM ++ rm /tmp/tmp.OBsSAQvGXM /tmp/tmp.OFbzWgPHtM ++ return 0 + [[ initializing == \r\e\a\d\y ]] + echo -n . .+ sleep 5 + [[ 1 -ge 300 ]] + let i+=1 ++ kubectl_bin get pxc some-name -o 'jsonpath={.status.state}' +++ mktemp ++ local LAST_OUT=/tmp/tmp.MoSdEVsQ20 +++ mktemp ++ local LAST_ERR=/tmp/tmp.c0aBaUVZVD ++ local exit_status=0 +++ seq 0 2 ++ for i in '$(seq 0 2)' ++ set +e ++ kubectl get pxc some-name -o 'jsonpath={.status.state}' ++ exit_status=0 ++ set -e ++ '[' 0 '!=' 0 ']' ++ break ++ cat /tmp/tmp.MoSdEVsQ20 ++ cat /tmp/tmp.c0aBaUVZVD ++ rm /tmp/tmp.MoSdEVsQ20 /tmp/tmp.c0aBaUVZVD ++ return 0 + [[ initializing == \r\e\a\d\y ]] + echo -n . .+ sleep 5 + [[ 2 -ge 300 ]] + let i+=1 ++ kubectl_bin get pxc some-name -o 'jsonpath={.status.state}' +++ mktemp ++ local LAST_OUT=/tmp/tmp.PCgXre1Xs7 +++ mktemp ++ local LAST_ERR=/tmp/tmp.LVZzN3Hyli ++ local exit_status=0 +++ seq 0 2 ++ for i in '$(seq 0 2)' ++ set +e ++ kubectl get pxc some-name -o 'jsonpath={.status.state}' ++ exit_status=0 ++ set -e ++ '[' 0 '!=' 0 ']' ++ break ++ cat /tmp/tmp.PCgXre1Xs7 ++ cat /tmp/tmp.LVZzN3Hyli ++ rm /tmp/tmp.PCgXre1Xs7 /tmp/tmp.LVZzN3Hyli ++ return 0 + [[ initializing == \r\e\a\d\y ]] + echo -n . .+ sleep 5 + [[ 3 -ge 300 ]] + let i+=1 ++ kubectl_bin get pxc some-name -o 'jsonpath={.status.state}' +++ mktemp ++ local LAST_OUT=/tmp/tmp.E432Uvm0ys +++ mktemp ++ local LAST_ERR=/tmp/tmp.DvPHdN3Lme ++ local exit_status=0 +++ seq 0 2 ++ for i in '$(seq 0 2)' ++ set +e ++ kubectl get pxc some-name -o 'jsonpath={.status.state}' ++ exit_status=0 ++ set -e ++ '[' 0 '!=' 0 ']' ++ break ++ cat /tmp/tmp.E432Uvm0ys ++ cat /tmp/tmp.DvPHdN3Lme ++ rm /tmp/tmp.E432Uvm0ys /tmp/tmp.DvPHdN3Lme ++ return 0 + [[ initializing == \r\e\a\d\y ]] + echo -n . .+ sleep 5 + [[ 4 -ge 300 ]] + let i+=1 ++ kubectl_bin get pxc some-name -o 'jsonpath={.status.state}' +++ mktemp ++ local LAST_OUT=/tmp/tmp.WvFik4Ufzi +++ mktemp ++ local LAST_ERR=/tmp/tmp.nY1lsYZJ6H ++ local exit_status=0 +++ seq 0 2 ++ for i in '$(seq 0 2)' ++ set +e ++ kubectl get pxc some-name -o 'jsonpath={.status.state}' ++ exit_status=0 ++ set -e ++ '[' 0 '!=' 0 ']' ++ break ++ cat /tmp/tmp.WvFik4Ufzi ++ cat /tmp/tmp.nY1lsYZJ6H ++ rm /tmp/tmp.WvFik4Ufzi /tmp/tmp.nY1lsYZJ6H ++ return 0 + [[ initializing == \r\e\a\d\y ]] + echo -n . .+ sleep 5 + [[ 5 -ge 300 ]] + let i+=1 ++ kubectl_bin get pxc some-name -o 'jsonpath={.status.state}' +++ mktemp ++ local LAST_OUT=/tmp/tmp.P6IQQdPwyo +++ mktemp ++ local LAST_ERR=/tmp/tmp.wjfLoIVHc3 ++ local exit_status=0 +++ seq 0 2 ++ for i in '$(seq 0 2)' ++ set +e ++ kubectl get pxc some-name -o 'jsonpath={.status.state}' ++ exit_status=0 ++ set -e ++ '[' 0 '!=' 0 ']' ++ break ++ cat /tmp/tmp.P6IQQdPwyo ++ cat /tmp/tmp.wjfLoIVHc3 ++ rm /tmp/tmp.P6IQQdPwyo /tmp/tmp.wjfLoIVHc3 ++ return 0 + [[ initializing == \r\e\a\d\y ]] + echo -n . .+ sleep 5 + [[ 6 -ge 300 ]] + let i+=1 ++ kubectl_bin get pxc some-name -o 'jsonpath={.status.state}' +++ mktemp ++ local LAST_OUT=/tmp/tmp.y938LtD0FQ +++ mktemp ++ local LAST_ERR=/tmp/tmp.NBo51dGl76 ++ local exit_status=0 +++ seq 0 2 ++ for i in '$(seq 0 2)' ++ set +e ++ kubectl get pxc some-name -o 'jsonpath={.status.state}' ++ exit_status=0 ++ set -e ++ '[' 0 '!=' 0 ']' ++ break ++ cat /tmp/tmp.y938LtD0FQ ++ cat /tmp/tmp.NBo51dGl76 ++ rm /tmp/tmp.y938LtD0FQ /tmp/tmp.NBo51dGl76 ++ return 0 + [[ ready == \r\e\a\d\y ]] ++ kubectl_bin get pxc some-name -o 'jsonpath={.status.pxc.ready}' +++ mktemp ++ local LAST_OUT=/tmp/tmp.Fw4COU1ONs +++ mktemp ++ local LAST_ERR=/tmp/tmp.jZ4wQ8ZtEh ++ local exit_status=0 +++ seq 0 2 ++ for i in '$(seq 0 2)' ++ set +e ++ kubectl get pxc some-name -o 'jsonpath={.status.pxc.ready}' ++ exit_status=0 ++ set -e ++ '[' 0 '!=' 0 ']' ++ break ++ cat /tmp/tmp.Fw4COU1ONs ++ cat /tmp/tmp.jZ4wQ8ZtEh ++ rm /tmp/tmp.Fw4COU1ONs /tmp/tmp.jZ4wQ8ZtEh ++ return 0 + [[ 3 == \3 ]] +++ get_proxy_engine some-name +++ local cluster_name=some-name ++++ get_proxy some-name ++++ local target_cluster=some-name +++++ kubectl_bin get pxc some-name -o 'jsonpath={.spec.haproxy.enabled}' ++++++ mktemp +++++ local LAST_OUT=/tmp/tmp.r0jZjSGVVd ++++++ mktemp +++++ local LAST_ERR=/tmp/tmp.sFXiMDH81r +++++ local exit_status=0 ++++++ seq 0 2 +++++ for i in '$(seq 0 2)' +++++ set +e +++++ kubectl get pxc some-name -o 'jsonpath={.spec.haproxy.enabled}' +++++ exit_status=0 +++++ set -e +++++ '[' 0 '!=' 0 ']' +++++ break +++++ cat /tmp/tmp.r0jZjSGVVd +++++ cat /tmp/tmp.sFXiMDH81r +++++ rm /tmp/tmp.r0jZjSGVVd /tmp/tmp.sFXiMDH81r +++++ return 0 ++++ [[ '' == \t\r\u\e ]] +++++ kubectl_bin get pxc some-name -o 'jsonpath={.spec.proxysql.enabled}' ++++++ mktemp +++++ local LAST_OUT=/tmp/tmp.92CSCYlta2 ++++++ mktemp +++++ local LAST_ERR=/tmp/tmp.xya0gi9Axy +++++ local exit_status=0 ++++++ seq 0 2 +++++ for i in '$(seq 0 2)' +++++ set +e +++++ kubectl get pxc some-name -o 'jsonpath={.spec.proxysql.enabled}' +++++ exit_status=0 +++++ set -e +++++ '[' 0 '!=' 0 ']' +++++ break +++++ cat /tmp/tmp.92CSCYlta2 +++++ cat /tmp/tmp.xya0gi9Axy +++++ rm /tmp/tmp.92CSCYlta2 /tmp/tmp.xya0gi9Axy +++++ return 0 ++++ [[ true == \t\r\u\e ]] ++++ echo some-name-proxysql ++++ return +++ local cluster_proxy=some-name-proxysql +++ echo proxysql ++ kubectl_bin get pxc some-name -o 'jsonpath={.status.proxysql.ready}' +++ mktemp ++ local LAST_OUT=/tmp/tmp.Iybi2LJCTH +++ mktemp ++ local LAST_ERR=/tmp/tmp.qmaoh2CYe7 ++ local exit_status=0 +++ seq 0 2 ++ for i in '$(seq 0 2)' ++ set +e ++ kubectl get pxc some-name -o 'jsonpath={.status.proxysql.ready}' ++ exit_status=0 ++ set -e ++ '[' 0 '!=' 0 ']' ++ break ++ cat /tmp/tmp.Iybi2LJCTH ++ cat /tmp/tmp.qmaoh2CYe7 ++ rm /tmp/tmp.Iybi2LJCTH /tmp/tmp.qmaoh2CYe7 ++ return 0 + [[ 2 == \2 ]] + echo + sleep 10 + compare_mysql_cmd select-4 'SHOW TABLES;' '-h some-name-proxysql -umonitor -p'\''test-password'\''' + local command_id=select-4 + local 'command=SHOW TABLES;' + local 'uri=-h some-name-proxysql -umonitor -p'\''test-password'\''' + local postfix= + local expected_result=/mnt/jenkins/workspace/cloud-pxc-operator_PR-2124/e2e-tests/users/compare/select-4.sql + [[ perconalab/percona-xtradb-cluster-operator:main-pxc5.7 =~ 8\.4 ]] + [[ perconalab/percona-xtradb-cluster-operator:main-pxc5.7 =~ 8\.0 ]] + [[ perconalab/percona-xtradb-cluster-operator:main-pxc5.7 =~ 5\.7 ]] + [[ -f /mnt/jenkins/workspace/cloud-pxc-operator_PR-2124/e2e-tests/users/compare/select-4-57.sql ]] + run_mysql 'SHOW TABLES;' '-h some-name-proxysql -umonitor -p'\''test-password'\''' + local 'command=SHOW TABLES;' + local 'uri=-h some-name-proxysql -umonitor -p'\''test-password'\''' ++ get_client_pod ++ kubectl_bin get pods --selector=name=pxc-client -o 'jsonpath={.items[].metadata.name}' +++ mktemp ++ local LAST_OUT=/tmp/tmp.X2wrE89HWy +++ mktemp ++ local LAST_ERR=/tmp/tmp.pgwCLubl4W ++ local exit_status=0 +++ seq 0 2 ++ for i in '$(seq 0 2)' ++ set +e ++ kubectl get pods --selector=name=pxc-client -o 'jsonpath={.items[].metadata.name}' ++ exit_status=0 ++ set -e ++ '[' 0 '!=' 0 ']' ++ break ++ cat /tmp/tmp.X2wrE89HWy ++ cat /tmp/tmp.pgwCLubl4W ++ rm /tmp/tmp.X2wrE89HWy /tmp/tmp.pgwCLubl4W ++ return 0 + client_pod=pxc-client-857d976497-x6k8q + wait_pod pxc-client-857d976497-x6k8q + local pod=pxc-client-857d976497-x6k8q + local max_retry=480 + local ns= ++ echo pxc-client-857d976497-x6k8q ++ /usr/bin/sed -E 's/.*-(pxc|proxysql)-[0-9]/\1/' ++ egrep '^(pxc|proxysql)$' + local container= + set +o xtrace pod/pxc-client-857d976497-x6k8q condition met waiting for pod/pxc-client-857d976497-x6k8q to become ReadyDefaulted container "pxc-client" out of: pxc-client, backup .Ok + set +o xtrace + '[' '!' -s /tmp/tmp.UQXTNYaefz/select-4.sql ']' + diff -u /mnt/jenkins/workspace/cloud-pxc-operator_PR-2124/e2e-tests/users/compare/select-4.sql /tmp/tmp.UQXTNYaefz/select-4.sql + desc 'test operator' + set +o xtrace ----------------------------------------------------------------------------------- test operator ----------------------------------------------------------------------------------- + patch_secret my-cluster-secrets operator dGVzdC1wYXNzd29yZA== + local secret=my-cluster-secrets + local key=operator + local value=dGVzdC1wYXNzd29yZA== + kubectl_bin patch secret my-cluster-secrets '-p={"data":{"operator": "dGVzdC1wYXNzd29yZA=="}}' ++ mktemp + local LAST_OUT=/tmp/tmp.wADeXyVGRZ ++ mktemp + local LAST_ERR=/tmp/tmp.sq4bB2jfak + local exit_status=0 ++ seq 0 2 + for i in '$(seq 0 2)' + set +e + kubectl patch secret my-cluster-secrets '-p={"data":{"operator": "dGVzdC1wYXNzd29yZA=="}}' + exit_status=0 + set -e + '[' 0 '!=' 0 ']' + break + cat /tmp/tmp.wADeXyVGRZ secret/my-cluster-secrets patched + cat /tmp/tmp.sq4bB2jfak + rm /tmp/tmp.wADeXyVGRZ /tmp/tmp.sq4bB2jfak + return 0 + sleep 15 + wait_cluster_consistency some-name 3 2 + local cluster_name=some-name + local cluster_size=3 + local proxy_size=2 + '[' -z 2 ']' + desc 'wait cluster consistency' + set +o xtrace ----------------------------------------------------------------------------------- wait cluster consistency ----------------------------------------------------------------------------------- + local i=0 + local max=300 + sleep 7 + echo -n 'waiting for pxc/some-name to be ready' waiting for pxc/some-name to be ready++ kubectl_bin get pxc some-name -o 'jsonpath={.status.state}' +++ mktemp ++ local LAST_OUT=/tmp/tmp.EqqGmX3WTG +++ mktemp ++ local LAST_ERR=/tmp/tmp.2MzLWfrSyL ++ local exit_status=0 +++ seq 0 2 ++ for i in '$(seq 0 2)' ++ set +e ++ kubectl get pxc some-name -o 'jsonpath={.status.state}' ++ exit_status=0 ++ set -e ++ '[' 0 '!=' 0 ']' ++ break ++ cat /tmp/tmp.EqqGmX3WTG ++ cat /tmp/tmp.2MzLWfrSyL ++ rm /tmp/tmp.EqqGmX3WTG /tmp/tmp.2MzLWfrSyL ++ return 0 + [[ initializing == \r\e\a\d\y ]] + echo -n . .+ sleep 5 + [[ 0 -ge 300 ]] + let i+=1 ++ kubectl_bin get pxc some-name -o 'jsonpath={.status.state}' +++ mktemp ++ local LAST_OUT=/tmp/tmp.PsxlSgR9Dq +++ mktemp ++ local LAST_ERR=/tmp/tmp.ZlrVmPARFG ++ local exit_status=0 +++ seq 0 2 ++ for i in '$(seq 0 2)' ++ set +e ++ kubectl get pxc some-name -o 'jsonpath={.status.state}' ++ exit_status=0 ++ set -e ++ '[' 0 '!=' 0 ']' ++ break ++ cat /tmp/tmp.PsxlSgR9Dq ++ cat /tmp/tmp.ZlrVmPARFG ++ rm /tmp/tmp.PsxlSgR9Dq /tmp/tmp.ZlrVmPARFG ++ return 0 + [[ initializing == \r\e\a\d\y ]] + echo -n . .+ sleep 5 + [[ 1 -ge 300 ]] + let i+=1 ++ kubectl_bin get pxc some-name -o 'jsonpath={.status.state}' +++ mktemp ++ local LAST_OUT=/tmp/tmp.HgQ6zmBlzV +++ mktemp ++ local LAST_ERR=/tmp/tmp.TZihez9OJg ++ local exit_status=0 +++ seq 0 2 ++ for i in '$(seq 0 2)' ++ set +e ++ kubectl get pxc some-name -o 'jsonpath={.status.state}' ++ exit_status=0 ++ set -e ++ '[' 0 '!=' 0 ']' ++ break ++ cat /tmp/tmp.HgQ6zmBlzV ++ cat /tmp/tmp.TZihez9OJg ++ rm /tmp/tmp.HgQ6zmBlzV /tmp/tmp.TZihez9OJg ++ return 0 + [[ ready == \r\e\a\d\y ]] ++ kubectl_bin get pxc some-name -o 'jsonpath={.status.pxc.ready}' +++ mktemp ++ local LAST_OUT=/tmp/tmp.zRM0MaTNo0 +++ mktemp ++ local LAST_ERR=/tmp/tmp.quMRJCAajK ++ local exit_status=0 +++ seq 0 2 ++ for i in '$(seq 0 2)' ++ set +e ++ kubectl get pxc some-name -o 'jsonpath={.status.pxc.ready}' ++ exit_status=0 ++ set -e ++ '[' 0 '!=' 0 ']' ++ break ++ cat /tmp/tmp.zRM0MaTNo0 ++ cat /tmp/tmp.quMRJCAajK ++ rm /tmp/tmp.zRM0MaTNo0 /tmp/tmp.quMRJCAajK ++ return 0 + [[ 3 == \3 ]] +++ get_proxy_engine some-name +++ local cluster_name=some-name ++++ get_proxy some-name ++++ local target_cluster=some-name +++++ kubectl_bin get pxc some-name -o 'jsonpath={.spec.haproxy.enabled}' ++++++ mktemp +++++ local LAST_OUT=/tmp/tmp.MHVooJV82R ++++++ mktemp +++++ local LAST_ERR=/tmp/tmp.IOa1wyWpuw +++++ local exit_status=0 ++++++ seq 0 2 +++++ for i in '$(seq 0 2)' +++++ set +e +++++ kubectl get pxc some-name -o 'jsonpath={.spec.haproxy.enabled}' +++++ exit_status=0 +++++ set -e +++++ '[' 0 '!=' 0 ']' +++++ break +++++ cat /tmp/tmp.MHVooJV82R +++++ cat /tmp/tmp.IOa1wyWpuw +++++ rm /tmp/tmp.MHVooJV82R /tmp/tmp.IOa1wyWpuw +++++ return 0 ++++ [[ '' == \t\r\u\e ]] +++++ kubectl_bin get pxc some-name -o 'jsonpath={.spec.proxysql.enabled}' ++++++ mktemp +++++ local LAST_OUT=/tmp/tmp.AlNVXmRvPK ++++++ mktemp +++++ local LAST_ERR=/tmp/tmp.yajWYgtrrQ +++++ local exit_status=0 ++++++ seq 0 2 +++++ for i in '$(seq 0 2)' +++++ set +e +++++ kubectl get pxc some-name -o 'jsonpath={.spec.proxysql.enabled}' +++++ exit_status=0 +++++ set -e +++++ '[' 0 '!=' 0 ']' +++++ break +++++ cat /tmp/tmp.AlNVXmRvPK +++++ cat /tmp/tmp.yajWYgtrrQ +++++ rm /tmp/tmp.AlNVXmRvPK /tmp/tmp.yajWYgtrrQ +++++ return 0 ++++ [[ true == \t\r\u\e ]] ++++ echo some-name-proxysql ++++ return +++ local cluster_proxy=some-name-proxysql +++ echo proxysql ++ kubectl_bin get pxc some-name -o 'jsonpath={.status.proxysql.ready}' +++ mktemp ++ local LAST_OUT=/tmp/tmp.C0KsDC3eSl +++ mktemp ++ local LAST_ERR=/tmp/tmp.0dcy1QNd4B ++ local exit_status=0 +++ seq 0 2 ++ for i in '$(seq 0 2)' ++ set +e ++ kubectl get pxc some-name -o 'jsonpath={.status.proxysql.ready}' ++ exit_status=0 ++ set -e ++ '[' 0 '!=' 0 ']' ++ break ++ cat /tmp/tmp.C0KsDC3eSl ++ cat /tmp/tmp.0dcy1QNd4B ++ rm /tmp/tmp.C0KsDC3eSl /tmp/tmp.0dcy1QNd4B ++ return 0 + [[ 2 == \2 ]] + echo + sleep 10 + compare_mysql_cmd select-4 'SHOW TABLES;' '-h some-name-proxysql -uoperator -p'\''test-password'\''' + local command_id=select-4 + local 'command=SHOW TABLES;' + local 'uri=-h some-name-proxysql -uoperator -p'\''test-password'\''' + local postfix= + local expected_result=/mnt/jenkins/workspace/cloud-pxc-operator_PR-2124/e2e-tests/users/compare/select-4.sql + [[ perconalab/percona-xtradb-cluster-operator:main-pxc5.7 =~ 8\.4 ]] + [[ perconalab/percona-xtradb-cluster-operator:main-pxc5.7 =~ 8\.0 ]] + [[ perconalab/percona-xtradb-cluster-operator:main-pxc5.7 =~ 5\.7 ]] + [[ -f /mnt/jenkins/workspace/cloud-pxc-operator_PR-2124/e2e-tests/users/compare/select-4-57.sql ]] + run_mysql 'SHOW TABLES;' '-h some-name-proxysql -uoperator -p'\''test-password'\''' + local 'command=SHOW TABLES;' + local 'uri=-h some-name-proxysql -uoperator -p'\''test-password'\''' ++ get_client_pod ++ kubectl_bin get pods --selector=name=pxc-client -o 'jsonpath={.items[].metadata.name}' +++ mktemp ++ local LAST_OUT=/tmp/tmp.q1c0JEyZdY +++ mktemp ++ local LAST_ERR=/tmp/tmp.DveUXbe8fE ++ local exit_status=0 +++ seq 0 2 ++ for i in '$(seq 0 2)' ++ set +e ++ kubectl get pods --selector=name=pxc-client -o 'jsonpath={.items[].metadata.name}' ++ exit_status=0 ++ set -e ++ '[' 0 '!=' 0 ']' ++ break ++ cat /tmp/tmp.q1c0JEyZdY ++ cat /tmp/tmp.DveUXbe8fE ++ rm /tmp/tmp.q1c0JEyZdY /tmp/tmp.DveUXbe8fE ++ return 0 + client_pod=pxc-client-857d976497-x6k8q + wait_pod pxc-client-857d976497-x6k8q + local pod=pxc-client-857d976497-x6k8q + local max_retry=480 + local ns= ++ echo pxc-client-857d976497-x6k8q ++ /usr/bin/sed -E 's/.*-(pxc|proxysql)-[0-9]/\1/' ++ egrep '^(pxc|proxysql)$' + local container= + set +o xtrace pod/pxc-client-857d976497-x6k8q condition met waiting for pod/pxc-client-857d976497-x6k8q to become ReadyDefaulted container "pxc-client" out of: pxc-client, backup .Ok + set +o xtrace + '[' '!' -s /tmp/tmp.UQXTNYaefz/select-4.sql ']' + diff -u /mnt/jenkins/workspace/cloud-pxc-operator_PR-2124/e2e-tests/users/compare/select-4.sql /tmp/tmp.UQXTNYaefz/select-4.sql + desc 'change secret name' + set +o xtrace ----------------------------------------------------------------------------------- change secret name ----------------------------------------------------------------------------------- + kubectl_bin patch pxc some-name --type merge --patch '{"spec": {"secretsName":"my-cluster-secrets-2"}}' ++ mktemp + local LAST_OUT=/tmp/tmp.08PNB7n2xc ++ mktemp + local LAST_ERR=/tmp/tmp.vIZcY3GJm3 + local exit_status=0 ++ seq 0 2 + for i in '$(seq 0 2)' + set +e + kubectl patch pxc some-name --type merge --patch '{"spec": {"secretsName":"my-cluster-secrets-2"}}' + exit_status=0 + set -e + '[' 0 '!=' 0 ']' + break + cat /tmp/tmp.08PNB7n2xc perconaxtradbcluster.pxc.percona.com/some-name patched + cat /tmp/tmp.vIZcY3GJm3 + rm /tmp/tmp.08PNB7n2xc /tmp/tmp.vIZcY3GJm3 + return 0 + sleep 30 + wait_cluster_consistency some-name 3 2 + local cluster_name=some-name + local cluster_size=3 + local proxy_size=2 + '[' -z 2 ']' + desc 'wait cluster consistency' + set +o xtrace ----------------------------------------------------------------------------------- wait cluster consistency ----------------------------------------------------------------------------------- + local i=0 + local max=300 + sleep 7 + echo -n 'waiting for pxc/some-name to be ready' waiting for pxc/some-name to be ready++ kubectl_bin get pxc some-name -o 'jsonpath={.status.state}' +++ mktemp ++ local LAST_OUT=/tmp/tmp.UmaEuBDUuS +++ mktemp ++ local LAST_ERR=/tmp/tmp.z3zr79zh4M ++ local exit_status=0 +++ seq 0 2 ++ for i in '$(seq 0 2)' ++ set +e ++ kubectl get pxc some-name -o 'jsonpath={.status.state}' ++ exit_status=0 ++ set -e ++ '[' 0 '!=' 0 ']' ++ break ++ cat /tmp/tmp.UmaEuBDUuS ++ cat /tmp/tmp.z3zr79zh4M ++ rm /tmp/tmp.UmaEuBDUuS /tmp/tmp.z3zr79zh4M ++ return 0 + [[ initializing == \r\e\a\d\y ]] + echo -n . .+ sleep 5 + [[ 0 -ge 300 ]] + let i+=1 ++ kubectl_bin get pxc some-name -o 'jsonpath={.status.state}' +++ mktemp ++ local LAST_OUT=/tmp/tmp.jkevNSeWTu +++ mktemp ++ local LAST_ERR=/tmp/tmp.EdLwf7AafY ++ local exit_status=0 +++ seq 0 2 ++ for i in '$(seq 0 2)' ++ set +e ++ kubectl get pxc some-name -o 'jsonpath={.status.state}' ++ exit_status=0 ++ set -e ++ '[' 0 '!=' 0 ']' ++ break ++ cat /tmp/tmp.jkevNSeWTu ++ cat /tmp/tmp.EdLwf7AafY ++ rm /tmp/tmp.jkevNSeWTu /tmp/tmp.EdLwf7AafY ++ return 0 + [[ initializing == \r\e\a\d\y ]] + echo -n . .+ sleep 5 + [[ 1 -ge 300 ]] + let i+=1 ++ kubectl_bin get pxc some-name -o 'jsonpath={.status.state}' +++ mktemp ++ local LAST_OUT=/tmp/tmp.4F3RUwaR0j +++ mktemp ++ local LAST_ERR=/tmp/tmp.XE7Be1OfY1 ++ local exit_status=0 +++ seq 0 2 ++ for i in '$(seq 0 2)' ++ set +e ++ kubectl get pxc some-name -o 'jsonpath={.status.state}' ++ exit_status=0 ++ set -e ++ '[' 0 '!=' 0 ']' ++ break ++ cat /tmp/tmp.4F3RUwaR0j ++ cat /tmp/tmp.XE7Be1OfY1 ++ rm /tmp/tmp.4F3RUwaR0j /tmp/tmp.XE7Be1OfY1 ++ return 0 + [[ initializing == \r\e\a\d\y ]] + echo -n . .+ sleep 5 + [[ 2 -ge 300 ]] + let i+=1 ++ kubectl_bin get pxc some-name -o 'jsonpath={.status.state}' +++ mktemp ++ local LAST_OUT=/tmp/tmp.5nrgM7Dtd1 +++ mktemp ++ local LAST_ERR=/tmp/tmp.VPjpOgBWY7 ++ local exit_status=0 +++ seq 0 2 ++ for i in '$(seq 0 2)' ++ set +e ++ kubectl get pxc some-name -o 'jsonpath={.status.state}' ++ exit_status=0 ++ set -e ++ '[' 0 '!=' 0 ']' ++ break ++ cat /tmp/tmp.5nrgM7Dtd1 ++ cat /tmp/tmp.VPjpOgBWY7 ++ rm /tmp/tmp.5nrgM7Dtd1 /tmp/tmp.VPjpOgBWY7 ++ return 0 + [[ initializing == \r\e\a\d\y ]] + echo -n . .+ sleep 5 + [[ 3 -ge 300 ]] + let i+=1 ++ kubectl_bin get pxc some-name -o 'jsonpath={.status.state}' +++ mktemp ++ local LAST_OUT=/tmp/tmp.knWILi5LMz +++ mktemp ++ local LAST_ERR=/tmp/tmp.OfraaIHecW ++ local exit_status=0 +++ seq 0 2 ++ for i in '$(seq 0 2)' ++ set +e ++ kubectl get pxc some-name -o 'jsonpath={.status.state}' ++ exit_status=0 ++ set -e ++ '[' 0 '!=' 0 ']' ++ break ++ cat /tmp/tmp.knWILi5LMz ++ cat /tmp/tmp.OfraaIHecW ++ rm /tmp/tmp.knWILi5LMz /tmp/tmp.OfraaIHecW ++ return 0 + [[ initializing == \r\e\a\d\y ]] + echo -n . .+ sleep 5 + [[ 4 -ge 300 ]] + let i+=1 ++ kubectl_bin get pxc some-name -o 'jsonpath={.status.state}' +++ mktemp ++ local LAST_OUT=/tmp/tmp.qYX6ntadEy +++ mktemp ++ local LAST_ERR=/tmp/tmp.AP3EKB2gmt ++ local exit_status=0 +++ seq 0 2 ++ for i in '$(seq 0 2)' ++ set +e ++ kubectl get pxc some-name -o 'jsonpath={.status.state}' ++ exit_status=0 ++ set -e ++ '[' 0 '!=' 0 ']' ++ break ++ cat /tmp/tmp.qYX6ntadEy ++ cat /tmp/tmp.AP3EKB2gmt ++ rm /tmp/tmp.qYX6ntadEy /tmp/tmp.AP3EKB2gmt ++ return 0 + [[ initializing == \r\e\a\d\y ]] + echo -n . .+ sleep 5 + [[ 5 -ge 300 ]] + let i+=1 ++ kubectl_bin get pxc some-name -o 'jsonpath={.status.state}' +++ mktemp ++ local LAST_OUT=/tmp/tmp.sDDAqzGfoA +++ mktemp ++ local LAST_ERR=/tmp/tmp.UHaHrLFR2S ++ local exit_status=0 +++ seq 0 2 ++ for i in '$(seq 0 2)' ++ set +e ++ kubectl get pxc some-name -o 'jsonpath={.status.state}' ++ exit_status=0 ++ set -e ++ '[' 0 '!=' 0 ']' ++ break ++ cat /tmp/tmp.sDDAqzGfoA ++ cat /tmp/tmp.UHaHrLFR2S ++ rm /tmp/tmp.sDDAqzGfoA /tmp/tmp.UHaHrLFR2S ++ return 0 + [[ initializing == \r\e\a\d\y ]] + echo -n . .+ sleep 5 + [[ 6 -ge 300 ]] + let i+=1 ++ kubectl_bin get pxc some-name -o 'jsonpath={.status.state}' +++ mktemp ++ local LAST_OUT=/tmp/tmp.hUY7ToLu6Y +++ mktemp ++ local LAST_ERR=/tmp/tmp.8IMsy2qtjq ++ local exit_status=0 +++ seq 0 2 ++ for i in '$(seq 0 2)' ++ set +e ++ kubectl get pxc some-name -o 'jsonpath={.status.state}' ++ exit_status=0 ++ set -e ++ '[' 0 '!=' 0 ']' ++ break ++ cat /tmp/tmp.hUY7ToLu6Y ++ cat /tmp/tmp.8IMsy2qtjq ++ rm /tmp/tmp.hUY7ToLu6Y /tmp/tmp.8IMsy2qtjq ++ return 0 + [[ initializing == \r\e\a\d\y ]] + echo -n . .+ sleep 5 + [[ 7 -ge 300 ]] + let i+=1 ++ kubectl_bin get pxc some-name -o 'jsonpath={.status.state}' +++ mktemp ++ local LAST_OUT=/tmp/tmp.NPNA6noGXh +++ mktemp ++ local LAST_ERR=/tmp/tmp.Di87ujqZel ++ local exit_status=0 +++ seq 0 2 ++ for i in '$(seq 0 2)' ++ set +e ++ kubectl get pxc some-name -o 'jsonpath={.status.state}' ++ exit_status=0 ++ set -e ++ '[' 0 '!=' 0 ']' ++ break ++ cat /tmp/tmp.NPNA6noGXh ++ cat /tmp/tmp.Di87ujqZel ++ rm /tmp/tmp.NPNA6noGXh /tmp/tmp.Di87ujqZel ++ return 0 + [[ initializing == \r\e\a\d\y ]] + echo -n . .+ sleep 5 + [[ 8 -ge 300 ]] + let i+=1 ++ kubectl_bin get pxc some-name -o 'jsonpath={.status.state}' +++ mktemp ++ local LAST_OUT=/tmp/tmp.bPdSjPxnT7 +++ mktemp ++ local LAST_ERR=/tmp/tmp.PXa6HcsqmE ++ local exit_status=0 +++ seq 0 2 ++ for i in '$(seq 0 2)' ++ set +e ++ kubectl get pxc some-name -o 'jsonpath={.status.state}' ++ exit_status=0 ++ set -e ++ '[' 0 '!=' 0 ']' ++ break ++ cat /tmp/tmp.bPdSjPxnT7 ++ cat /tmp/tmp.PXa6HcsqmE ++ rm /tmp/tmp.bPdSjPxnT7 /tmp/tmp.PXa6HcsqmE ++ return 0 + [[ initializing == \r\e\a\d\y ]] + echo -n . .+ sleep 5 + [[ 9 -ge 300 ]] + let i+=1 ++ kubectl_bin get pxc some-name -o 'jsonpath={.status.state}' +++ mktemp ++ local LAST_OUT=/tmp/tmp.oaJHkgsJu0 +++ mktemp ++ local LAST_ERR=/tmp/tmp.vytDe6d4Wb ++ local exit_status=0 +++ seq 0 2 ++ for i in '$(seq 0 2)' ++ set +e ++ kubectl get pxc some-name -o 'jsonpath={.status.state}' ++ exit_status=0 ++ set -e ++ '[' 0 '!=' 0 ']' ++ break ++ cat /tmp/tmp.oaJHkgsJu0 ++ cat /tmp/tmp.vytDe6d4Wb ++ rm /tmp/tmp.oaJHkgsJu0 /tmp/tmp.vytDe6d4Wb ++ return 0 + [[ initializing == \r\e\a\d\y ]] + echo -n . .+ sleep 5 + [[ 10 -ge 300 ]] + let i+=1 ++ kubectl_bin get pxc some-name -o 'jsonpath={.status.state}' +++ mktemp ++ local LAST_OUT=/tmp/tmp.u3hqjeFjWE +++ mktemp ++ local LAST_ERR=/tmp/tmp.9apdWcxoqm ++ local exit_status=0 +++ seq 0 2 ++ for i in '$(seq 0 2)' ++ set +e ++ kubectl get pxc some-name -o 'jsonpath={.status.state}' ++ exit_status=0 ++ set -e ++ '[' 0 '!=' 0 ']' ++ break ++ cat /tmp/tmp.u3hqjeFjWE ++ cat /tmp/tmp.9apdWcxoqm ++ rm /tmp/tmp.u3hqjeFjWE /tmp/tmp.9apdWcxoqm ++ return 0 + [[ initializing == \r\e\a\d\y ]] + echo -n . .+ sleep 5 + [[ 11 -ge 300 ]] + let i+=1 ++ kubectl_bin get pxc some-name -o 'jsonpath={.status.state}' +++ mktemp ++ local LAST_OUT=/tmp/tmp.ExS7nc2GDx +++ mktemp ++ local LAST_ERR=/tmp/tmp.tjCnSbIlsY ++ local exit_status=0 +++ seq 0 2 ++ for i in '$(seq 0 2)' ++ set +e ++ kubectl get pxc some-name -o 'jsonpath={.status.state}' ++ exit_status=0 ++ set -e ++ '[' 0 '!=' 0 ']' ++ break ++ cat /tmp/tmp.ExS7nc2GDx ++ cat /tmp/tmp.tjCnSbIlsY ++ rm /tmp/tmp.ExS7nc2GDx /tmp/tmp.tjCnSbIlsY ++ return 0 + [[ initializing == \r\e\a\d\y ]] + echo -n . .+ sleep 5 + [[ 12 -ge 300 ]] + let i+=1 ++ kubectl_bin get pxc some-name -o 'jsonpath={.status.state}' +++ mktemp ++ local LAST_OUT=/tmp/tmp.gRoh0vfZFO +++ mktemp ++ local LAST_ERR=/tmp/tmp.qWzj6pAeuV ++ local exit_status=0 +++ seq 0 2 ++ for i in '$(seq 0 2)' ++ set +e ++ kubectl get pxc some-name -o 'jsonpath={.status.state}' ++ exit_status=0 ++ set -e ++ '[' 0 '!=' 0 ']' ++ break ++ cat /tmp/tmp.gRoh0vfZFO ++ cat /tmp/tmp.qWzj6pAeuV ++ rm /tmp/tmp.gRoh0vfZFO /tmp/tmp.qWzj6pAeuV ++ return 0 + [[ initializing == \r\e\a\d\y ]] + echo -n . .+ sleep 5 + [[ 13 -ge 300 ]] + let i+=1 ++ kubectl_bin get pxc some-name -o 'jsonpath={.status.state}' +++ mktemp ++ local LAST_OUT=/tmp/tmp.MNGThTDJya +++ mktemp ++ local LAST_ERR=/tmp/tmp.ZO9qvGpr3y ++ local exit_status=0 +++ seq 0 2 ++ for i in '$(seq 0 2)' ++ set +e ++ kubectl get pxc some-name -o 'jsonpath={.status.state}' ++ exit_status=0 ++ set -e ++ '[' 0 '!=' 0 ']' ++ break ++ cat /tmp/tmp.MNGThTDJya ++ cat /tmp/tmp.ZO9qvGpr3y ++ rm /tmp/tmp.MNGThTDJya /tmp/tmp.ZO9qvGpr3y ++ return 0 + [[ initializing == \r\e\a\d\y ]] + echo -n . .+ sleep 5 + [[ 14 -ge 300 ]] + let i+=1 ++ kubectl_bin get pxc some-name -o 'jsonpath={.status.state}' +++ mktemp ++ local LAST_OUT=/tmp/tmp.gEydPmoT3H +++ mktemp ++ local LAST_ERR=/tmp/tmp.4aTOV29XCG ++ local exit_status=0 +++ seq 0 2 ++ for i in '$(seq 0 2)' ++ set +e ++ kubectl get pxc some-name -o 'jsonpath={.status.state}' ++ exit_status=0 ++ set -e ++ '[' 0 '!=' 0 ']' ++ break ++ cat /tmp/tmp.gEydPmoT3H ++ cat /tmp/tmp.4aTOV29XCG ++ rm /tmp/tmp.gEydPmoT3H /tmp/tmp.4aTOV29XCG ++ return 0 + [[ initializing == \r\e\a\d\y ]] + echo -n . .+ sleep 5 + [[ 15 -ge 300 ]] + let i+=1 ++ kubectl_bin get pxc some-name -o 'jsonpath={.status.state}' +++ mktemp ++ local LAST_OUT=/tmp/tmp.7NsimsDNAx +++ mktemp ++ local LAST_ERR=/tmp/tmp.KaemTMVtap ++ local exit_status=0 +++ seq 0 2 ++ for i in '$(seq 0 2)' ++ set +e ++ kubectl get pxc some-name -o 'jsonpath={.status.state}' ++ exit_status=0 ++ set -e ++ '[' 0 '!=' 0 ']' ++ break ++ cat /tmp/tmp.7NsimsDNAx ++ cat /tmp/tmp.KaemTMVtap ++ rm /tmp/tmp.7NsimsDNAx /tmp/tmp.KaemTMVtap ++ return 0 + [[ initializing == \r\e\a\d\y ]] + echo -n . .+ sleep 5 + [[ 16 -ge 300 ]] + let i+=1 ++ kubectl_bin get pxc some-name -o 'jsonpath={.status.state}' +++ mktemp ++ local LAST_OUT=/tmp/tmp.UqGLN4khio +++ mktemp ++ local LAST_ERR=/tmp/tmp.P5bvEsSZD5 ++ local exit_status=0 +++ seq 0 2 ++ for i in '$(seq 0 2)' ++ set +e ++ kubectl get pxc some-name -o 'jsonpath={.status.state}' ++ exit_status=0 ++ set -e ++ '[' 0 '!=' 0 ']' ++ break ++ cat /tmp/tmp.UqGLN4khio ++ cat /tmp/tmp.P5bvEsSZD5 ++ rm /tmp/tmp.UqGLN4khio /tmp/tmp.P5bvEsSZD5 ++ return 0 + [[ initializing == \r\e\a\d\y ]] + echo -n . .+ sleep 5 + [[ 17 -ge 300 ]] + let i+=1 ++ kubectl_bin get pxc some-name -o 'jsonpath={.status.state}' +++ mktemp ++ local LAST_OUT=/tmp/tmp.u6drNLLmX5 +++ mktemp ++ local LAST_ERR=/tmp/tmp.LCOAncCBzp ++ local exit_status=0 +++ seq 0 2 ++ for i in '$(seq 0 2)' ++ set +e ++ kubectl get pxc some-name -o 'jsonpath={.status.state}' ++ exit_status=0 ++ set -e ++ '[' 0 '!=' 0 ']' ++ break ++ cat /tmp/tmp.u6drNLLmX5 ++ cat /tmp/tmp.LCOAncCBzp ++ rm /tmp/tmp.u6drNLLmX5 /tmp/tmp.LCOAncCBzp ++ return 0 + [[ ready == \r\e\a\d\y ]] ++ kubectl_bin get pxc some-name -o 'jsonpath={.status.pxc.ready}' +++ mktemp ++ local LAST_OUT=/tmp/tmp.iuS7avQYFd +++ mktemp ++ local LAST_ERR=/tmp/tmp.SA7wLL8aOH ++ local exit_status=0 +++ seq 0 2 ++ for i in '$(seq 0 2)' ++ set +e ++ kubectl get pxc some-name -o 'jsonpath={.status.pxc.ready}' ++ exit_status=0 ++ set -e ++ '[' 0 '!=' 0 ']' ++ break ++ cat /tmp/tmp.iuS7avQYFd ++ cat /tmp/tmp.SA7wLL8aOH ++ rm /tmp/tmp.iuS7avQYFd /tmp/tmp.SA7wLL8aOH ++ return 0 + [[ 3 == \3 ]] +++ get_proxy_engine some-name +++ local cluster_name=some-name ++++ get_proxy some-name ++++ local target_cluster=some-name +++++ kubectl_bin get pxc some-name -o 'jsonpath={.spec.haproxy.enabled}' ++++++ mktemp +++++ local LAST_OUT=/tmp/tmp.7nWzqO7PAz ++++++ mktemp +++++ local LAST_ERR=/tmp/tmp.nICMJgm42x +++++ local exit_status=0 ++++++ seq 0 2 +++++ for i in '$(seq 0 2)' +++++ set +e +++++ kubectl get pxc some-name -o 'jsonpath={.spec.haproxy.enabled}' +++++ exit_status=0 +++++ set -e +++++ '[' 0 '!=' 0 ']' +++++ break +++++ cat /tmp/tmp.7nWzqO7PAz +++++ cat /tmp/tmp.nICMJgm42x +++++ rm /tmp/tmp.7nWzqO7PAz /tmp/tmp.nICMJgm42x +++++ return 0 ++++ [[ '' == \t\r\u\e ]] +++++ kubectl_bin get pxc some-name -o 'jsonpath={.spec.proxysql.enabled}' ++++++ mktemp +++++ local LAST_OUT=/tmp/tmp.GUEaiqvY1w ++++++ mktemp +++++ local LAST_ERR=/tmp/tmp.HXkfBPQVpC +++++ local exit_status=0 ++++++ seq 0 2 +++++ for i in '$(seq 0 2)' +++++ set +e +++++ kubectl get pxc some-name -o 'jsonpath={.spec.proxysql.enabled}' +++++ exit_status=0 +++++ set -e +++++ '[' 0 '!=' 0 ']' +++++ break +++++ cat /tmp/tmp.GUEaiqvY1w +++++ cat /tmp/tmp.HXkfBPQVpC +++++ rm /tmp/tmp.GUEaiqvY1w /tmp/tmp.HXkfBPQVpC +++++ return 0 ++++ [[ true == \t\r\u\e ]] ++++ echo some-name-proxysql ++++ return +++ local cluster_proxy=some-name-proxysql +++ echo proxysql ++ kubectl_bin get pxc some-name -o 'jsonpath={.status.proxysql.ready}' +++ mktemp ++ local LAST_OUT=/tmp/tmp.uFa6o7Zvjr +++ mktemp ++ local LAST_ERR=/tmp/tmp.VqV1dk6AUM ++ local exit_status=0 +++ seq 0 2 ++ for i in '$(seq 0 2)' ++ set +e ++ kubectl get pxc some-name -o 'jsonpath={.status.proxysql.ready}' ++ exit_status=0 ++ set -e ++ '[' 0 '!=' 0 ']' ++ break ++ cat /tmp/tmp.uFa6o7Zvjr ++ cat /tmp/tmp.VqV1dk6AUM ++ rm /tmp/tmp.uFa6o7Zvjr /tmp/tmp.VqV1dk6AUM ++ return 0 + [[ 2 == \2 ]] + echo + desc 'test new operator' + set +o xtrace ----------------------------------------------------------------------------------- test new operator ----------------------------------------------------------------------------------- + newpass=test-password2 ++ echo -n test-password2 ++ base64 + newpassencrypted=dGVzdC1wYXNzd29yZDI= + patch_secret my-cluster-secrets-2 operator dGVzdC1wYXNzd29yZDI= + local secret=my-cluster-secrets-2 + local key=operator + local value=dGVzdC1wYXNzd29yZDI= + kubectl_bin patch secret my-cluster-secrets-2 '-p={"data":{"operator": "dGVzdC1wYXNzd29yZDI="}}' ++ mktemp + local LAST_OUT=/tmp/tmp.exDGKMxNoC ++ mktemp + local LAST_ERR=/tmp/tmp.rwGz43G8BA + local exit_status=0 ++ seq 0 2 + for i in '$(seq 0 2)' + set +e + kubectl patch secret my-cluster-secrets-2 '-p={"data":{"operator": "dGVzdC1wYXNzd29yZDI="}}' + exit_status=0 + set -e + '[' 0 '!=' 0 ']' + break + cat /tmp/tmp.exDGKMxNoC secret/my-cluster-secrets-2 patched + cat /tmp/tmp.rwGz43G8BA + rm /tmp/tmp.exDGKMxNoC /tmp/tmp.rwGz43G8BA + return 0 + sleep 15 + wait_cluster_consistency some-name 3 2 + local cluster_name=some-name + local cluster_size=3 + local proxy_size=2 + '[' -z 2 ']' + desc 'wait cluster consistency' + set +o xtrace ----------------------------------------------------------------------------------- wait cluster consistency ----------------------------------------------------------------------------------- + local i=0 + local max=300 + sleep 7 + echo -n 'waiting for pxc/some-name to be ready' waiting for pxc/some-name to be ready++ kubectl_bin get pxc some-name -o 'jsonpath={.status.state}' +++ mktemp ++ local LAST_OUT=/tmp/tmp.ekm6imq144 +++ mktemp ++ local LAST_ERR=/tmp/tmp.lf5PF2bFNk ++ local exit_status=0 +++ seq 0 2 ++ for i in '$(seq 0 2)' ++ set +e ++ kubectl get pxc some-name -o 'jsonpath={.status.state}' ++ exit_status=0 ++ set -e ++ '[' 0 '!=' 0 ']' ++ break ++ cat /tmp/tmp.ekm6imq144 ++ cat /tmp/tmp.lf5PF2bFNk ++ rm /tmp/tmp.ekm6imq144 /tmp/tmp.lf5PF2bFNk ++ return 0 + [[ initializing == \r\e\a\d\y ]] + echo -n . .+ sleep 5 + [[ 0 -ge 300 ]] + let i+=1 ++ kubectl_bin get pxc some-name -o 'jsonpath={.status.state}' +++ mktemp ++ local LAST_OUT=/tmp/tmp.ATfJdrjGZu +++ mktemp ++ local LAST_ERR=/tmp/tmp.jKHM0Ul8Pp ++ local exit_status=0 +++ seq 0 2 ++ for i in '$(seq 0 2)' ++ set +e ++ kubectl get pxc some-name -o 'jsonpath={.status.state}' ++ exit_status=0 ++ set -e ++ '[' 0 '!=' 0 ']' ++ break ++ cat /tmp/tmp.ATfJdrjGZu ++ cat /tmp/tmp.jKHM0Ul8Pp ++ rm /tmp/tmp.ATfJdrjGZu /tmp/tmp.jKHM0Ul8Pp ++ return 0 + [[ initializing == \r\e\a\d\y ]] + echo -n . .+ sleep 5 + [[ 1 -ge 300 ]] + let i+=1 ++ kubectl_bin get pxc some-name -o 'jsonpath={.status.state}' +++ mktemp ++ local LAST_OUT=/tmp/tmp.W6rCTU3lmL +++ mktemp ++ local LAST_ERR=/tmp/tmp.f2cu40Ek9B ++ local exit_status=0 +++ seq 0 2 ++ for i in '$(seq 0 2)' ++ set +e ++ kubectl get pxc some-name -o 'jsonpath={.status.state}' ++ exit_status=0 ++ set -e ++ '[' 0 '!=' 0 ']' ++ break ++ cat /tmp/tmp.W6rCTU3lmL ++ cat /tmp/tmp.f2cu40Ek9B ++ rm /tmp/tmp.W6rCTU3lmL /tmp/tmp.f2cu40Ek9B ++ return 0 + [[ ready == \r\e\a\d\y ]] ++ kubectl_bin get pxc some-name -o 'jsonpath={.status.pxc.ready}' +++ mktemp ++ local LAST_OUT=/tmp/tmp.nWXoRqDpO2 +++ mktemp ++ local LAST_ERR=/tmp/tmp.ZPqp2fZANq ++ local exit_status=0 +++ seq 0 2 ++ for i in '$(seq 0 2)' ++ set +e ++ kubectl get pxc some-name -o 'jsonpath={.status.pxc.ready}' ++ exit_status=0 ++ set -e ++ '[' 0 '!=' 0 ']' ++ break ++ cat /tmp/tmp.nWXoRqDpO2 ++ cat /tmp/tmp.ZPqp2fZANq ++ rm /tmp/tmp.nWXoRqDpO2 /tmp/tmp.ZPqp2fZANq ++ return 0 + [[ 3 == \3 ]] +++ get_proxy_engine some-name +++ local cluster_name=some-name ++++ get_proxy some-name ++++ local target_cluster=some-name +++++ kubectl_bin get pxc some-name -o 'jsonpath={.spec.haproxy.enabled}' ++++++ mktemp +++++ local LAST_OUT=/tmp/tmp.2nFlU5qEhR ++++++ mktemp +++++ local LAST_ERR=/tmp/tmp.pGQIOV3dIm +++++ local exit_status=0 ++++++ seq 0 2 +++++ for i in '$(seq 0 2)' +++++ set +e +++++ kubectl get pxc some-name -o 'jsonpath={.spec.haproxy.enabled}' +++++ exit_status=0 +++++ set -e +++++ '[' 0 '!=' 0 ']' +++++ break +++++ cat /tmp/tmp.2nFlU5qEhR +++++ cat /tmp/tmp.pGQIOV3dIm +++++ rm /tmp/tmp.2nFlU5qEhR /tmp/tmp.pGQIOV3dIm +++++ return 0 ++++ [[ '' == \t\r\u\e ]] +++++ kubectl_bin get pxc some-name -o 'jsonpath={.spec.proxysql.enabled}' ++++++ mktemp +++++ local LAST_OUT=/tmp/tmp.g1TrCH5az7 ++++++ mktemp +++++ local LAST_ERR=/tmp/tmp.nOhODOr5ht +++++ local exit_status=0 ++++++ seq 0 2 +++++ for i in '$(seq 0 2)' +++++ set +e +++++ kubectl get pxc some-name -o 'jsonpath={.spec.proxysql.enabled}' +++++ exit_status=0 +++++ set -e +++++ '[' 0 '!=' 0 ']' +++++ break +++++ cat /tmp/tmp.g1TrCH5az7 +++++ cat /tmp/tmp.nOhODOr5ht +++++ rm /tmp/tmp.g1TrCH5az7 /tmp/tmp.nOhODOr5ht +++++ return 0 ++++ [[ true == \t\r\u\e ]] ++++ echo some-name-proxysql ++++ return +++ local cluster_proxy=some-name-proxysql +++ echo proxysql ++ kubectl_bin get pxc some-name -o 'jsonpath={.status.proxysql.ready}' +++ mktemp ++ local LAST_OUT=/tmp/tmp.FkkznwNK2z +++ mktemp ++ local LAST_ERR=/tmp/tmp.pWI9gatR3h ++ local exit_status=0 +++ seq 0 2 ++ for i in '$(seq 0 2)' ++ set +e ++ kubectl get pxc some-name -o 'jsonpath={.status.proxysql.ready}' ++ exit_status=0 ++ set -e ++ '[' 0 '!=' 0 ']' ++ break ++ cat /tmp/tmp.FkkznwNK2z ++ cat /tmp/tmp.pWI9gatR3h ++ rm /tmp/tmp.FkkznwNK2z /tmp/tmp.pWI9gatR3h ++ return 0 + [[ 2 == \2 ]] + echo + sleep 20 + compare_mysql_cmd select-4 'SHOW TABLES;' '-h some-name-proxysql -uoperator -p'\''test-password2'\''' + local command_id=select-4 + local 'command=SHOW TABLES;' + local 'uri=-h some-name-proxysql -uoperator -p'\''test-password2'\''' + local postfix= + local expected_result=/mnt/jenkins/workspace/cloud-pxc-operator_PR-2124/e2e-tests/users/compare/select-4.sql + [[ perconalab/percona-xtradb-cluster-operator:main-pxc5.7 =~ 8\.4 ]] + [[ perconalab/percona-xtradb-cluster-operator:main-pxc5.7 =~ 8\.0 ]] + [[ perconalab/percona-xtradb-cluster-operator:main-pxc5.7 =~ 5\.7 ]] + [[ -f /mnt/jenkins/workspace/cloud-pxc-operator_PR-2124/e2e-tests/users/compare/select-4-57.sql ]] + run_mysql 'SHOW TABLES;' '-h some-name-proxysql -uoperator -p'\''test-password2'\''' + local 'command=SHOW TABLES;' + local 'uri=-h some-name-proxysql -uoperator -p'\''test-password2'\''' ++ get_client_pod ++ kubectl_bin get pods --selector=name=pxc-client -o 'jsonpath={.items[].metadata.name}' +++ mktemp ++ local LAST_OUT=/tmp/tmp.BgiF9dBK29 +++ mktemp ++ local LAST_ERR=/tmp/tmp.kMjjSJFWSd ++ local exit_status=0 +++ seq 0 2 ++ for i in '$(seq 0 2)' ++ set +e ++ kubectl get pods --selector=name=pxc-client -o 'jsonpath={.items[].metadata.name}' ++ exit_status=0 ++ set -e ++ '[' 0 '!=' 0 ']' ++ break ++ cat /tmp/tmp.BgiF9dBK29 ++ cat /tmp/tmp.kMjjSJFWSd ++ rm /tmp/tmp.BgiF9dBK29 /tmp/tmp.kMjjSJFWSd ++ return 0 + client_pod=pxc-client-857d976497-x6k8q + wait_pod pxc-client-857d976497-x6k8q + local pod=pxc-client-857d976497-x6k8q + local max_retry=480 + local ns= ++ echo pxc-client-857d976497-x6k8q ++ /usr/bin/sed -E 's/.*-(pxc|proxysql)-[0-9]/\1/' ++ egrep '^(pxc|proxysql)$' + local container= + set +o xtrace pod/pxc-client-857d976497-x6k8q condition met waiting for pod/pxc-client-857d976497-x6k8q to become ReadyDefaulted container "pxc-client" out of: pxc-client, backup .Ok + set +o xtrace + '[' '!' -s /tmp/tmp.UQXTNYaefz/select-4.sql ']' + diff -u /mnt/jenkins/workspace/cloud-pxc-operator_PR-2124/e2e-tests/users/compare/select-4.sql /tmp/tmp.UQXTNYaefz/select-4.sql ++ getSecretData my-cluster-secrets-2 root ++ local secretName=my-cluster-secrets-2 ++ local dataKey=root ++ kubectl_bin get secrets/my-cluster-secrets-2 '--template={{.data.root}}' ++ base64 --decode +++ mktemp ++ local LAST_OUT=/tmp/tmp.wlXiFvLzAw +++ mktemp ++ local LAST_ERR=/tmp/tmp.S07SeolKgo ++ local exit_status=0 +++ seq 0 2 ++ for i in '$(seq 0 2)' ++ set +e ++ kubectl get secrets/my-cluster-secrets-2 '--template={{.data.root}}' ++ exit_status=0 ++ set -e ++ '[' 0 '!=' 0 ']' ++ break ++ cat /tmp/tmp.wlXiFvLzAw ++ cat /tmp/tmp.S07SeolKgo ++ rm /tmp/tmp.wlXiFvLzAw /tmp/tmp.S07SeolKgo ++ return 0 + newpass='bBMXAq^$gc_EgOaH{%Gy' + desc 'test new users sync' + set +o xtrace ----------------------------------------------------------------------------------- test new users sync ----------------------------------------------------------------------------------- + run_mysql 'CREATE USER '\''testsync'\''@'\''%'\'' IDENTIFIED BY '\''bBMXAq^$gc_EgOaH{%Gy'\'';' '-h some-name-pxc -uroot -p'\''bBMXAq^$gc_EgOaH{%Gy'\''' + local 'command=CREATE USER '\''testsync'\''@'\''%'\'' IDENTIFIED BY '\''bBMXAq^$gc_EgOaH{%Gy'\'';' + local 'uri=-h some-name-pxc -uroot -p'\''bBMXAq^$gc_EgOaH{%Gy'\''' ++ get_client_pod ++ kubectl_bin get pods --selector=name=pxc-client -o 'jsonpath={.items[].metadata.name}' +++ mktemp ++ local LAST_OUT=/tmp/tmp.rkv5T4ju62 +++ mktemp ++ local LAST_ERR=/tmp/tmp.re9628zXvr ++ local exit_status=0 +++ seq 0 2 ++ for i in '$(seq 0 2)' ++ set +e ++ kubectl get pods --selector=name=pxc-client -o 'jsonpath={.items[].metadata.name}' ++ exit_status=0 ++ set -e ++ '[' 0 '!=' 0 ']' ++ break ++ cat /tmp/tmp.rkv5T4ju62 ++ cat /tmp/tmp.re9628zXvr ++ rm /tmp/tmp.rkv5T4ju62 /tmp/tmp.re9628zXvr ++ return 0 + client_pod=pxc-client-857d976497-x6k8q + wait_pod pxc-client-857d976497-x6k8q + local pod=pxc-client-857d976497-x6k8q + local max_retry=480 + local ns= ++ echo pxc-client-857d976497-x6k8q ++ egrep '^(pxc|proxysql)$' ++ /usr/bin/sed -E 's/.*-(pxc|proxysql)-[0-9]/\1/' + local container= + set +o xtrace pod/pxc-client-857d976497-x6k8q condition met waiting for pod/pxc-client-857d976497-x6k8q to become ReadyDefaulted container "pxc-client" out of: pxc-client, backup .Ok + set +o xtrace + sleep 40 + compare_mysql_cmd select-4 'SHOW TABLES;' '-h some-name-proxysql -utestsync -p'\''bBMXAq^$gc_EgOaH{%Gy'\''' + local command_id=select-4 + local 'command=SHOW TABLES;' + local 'uri=-h some-name-proxysql -utestsync -p'\''bBMXAq^$gc_EgOaH{%Gy'\''' + local postfix= + local expected_result=/mnt/jenkins/workspace/cloud-pxc-operator_PR-2124/e2e-tests/users/compare/select-4.sql + [[ perconalab/percona-xtradb-cluster-operator:main-pxc5.7 =~ 8\.4 ]] + [[ perconalab/percona-xtradb-cluster-operator:main-pxc5.7 =~ 8\.0 ]] + [[ perconalab/percona-xtradb-cluster-operator:main-pxc5.7 =~ 5\.7 ]] + [[ -f /mnt/jenkins/workspace/cloud-pxc-operator_PR-2124/e2e-tests/users/compare/select-4-57.sql ]] + run_mysql 'SHOW TABLES;' '-h some-name-proxysql -utestsync -p'\''bBMXAq^$gc_EgOaH{%Gy'\''' + local 'command=SHOW TABLES;' + local 'uri=-h some-name-proxysql -utestsync -p'\''bBMXAq^$gc_EgOaH{%Gy'\''' ++ get_client_pod ++ kubectl_bin get pods --selector=name=pxc-client -o 'jsonpath={.items[].metadata.name}' +++ mktemp ++ local LAST_OUT=/tmp/tmp.UmGWC8Ws2p +++ mktemp ++ local LAST_ERR=/tmp/tmp.3CTDQd7aOJ ++ local exit_status=0 +++ seq 0 2 ++ for i in '$(seq 0 2)' ++ set +e ++ kubectl get pods --selector=name=pxc-client -o 'jsonpath={.items[].metadata.name}' ++ exit_status=0 ++ set -e ++ '[' 0 '!=' 0 ']' ++ break ++ cat /tmp/tmp.UmGWC8Ws2p ++ cat /tmp/tmp.3CTDQd7aOJ ++ rm /tmp/tmp.UmGWC8Ws2p /tmp/tmp.3CTDQd7aOJ ++ return 0 + client_pod=pxc-client-857d976497-x6k8q + wait_pod pxc-client-857d976497-x6k8q + local pod=pxc-client-857d976497-x6k8q + local max_retry=480 + local ns= ++ echo pxc-client-857d976497-x6k8q ++ /usr/bin/sed -E 's/.*-(pxc|proxysql)-[0-9]/\1/' ++ egrep '^(pxc|proxysql)$' + local container= + set +o xtrace pod/pxc-client-857d976497-x6k8q condition met waiting for pod/pxc-client-857d976497-x6k8q to become ReadyDefaulted container "pxc-client" out of: pxc-client, backup .Ok + set +o xtrace + '[' '!' -s /tmp/tmp.UQXTNYaefz/select-4.sql ']' + diff -u /mnt/jenkins/workspace/cloud-pxc-operator_PR-2124/e2e-tests/users/compare/select-4.sql /tmp/tmp.UQXTNYaefz/select-4.sql --- /mnt/jenkins/workspace/cloud-pxc-operator_PR-2124/e2e-tests/users/compare/select-4.sql 2025-11-10 14:44:19.359893916 +0000 +++ /tmp/tmp.UQXTNYaefz/select-4.sql 2025-11-10 17:22:28.814641867 +0000 @@ -1,76 +1,2 @@ -CHARACTER_SETS -CLIENT_STATISTICS -COLLATIONS -COLLATION_CHARACTER_SET_APPLICABILITY -COLUMNS -COLUMN_PRIVILEGES -INDEX_STATISTICS -ENGINES -EVENTS -FILES -GLOBAL_STATUS -GLOBAL_TEMPORARY_TABLES -GLOBAL_VARIABLES -KEY_COLUMN_USAGE -OPTIMIZER_TRACE -PARAMETERS -PARTITIONS -PLUGINS -PROCESSLIST -PROFILING -REFERENTIAL_CONSTRAINTS -ROUTINES -SCHEMATA -SCHEMA_PRIVILEGES -SESSION_STATUS -SESSION_VARIABLES -STATISTICS -TABLES -TABLESPACES -TABLE_CONSTRAINTS -TABLE_PRIVILEGES -TABLE_STATISTICS -TEMPORARY_TABLES -THREAD_STATISTICS -TRIGGERS -USER_PRIVILEGES -USER_STATISTICS -VIEWS -INNODB_CMP -INNODB_CMP_RESET -INNODB_SYS_DATAFILES -XTRADB_READ_VIEW -INNODB_SYS_TABLESTATS -XTRADB_RSEG -INNODB_BUFFER_PAGE -INNODB_TRX -INNODB_CMP_PER_INDEX -INNODB_METRICS -INNODB_FT_DELETED -INNODB_LOCKS -INNODB_LOCK_WAITS -XTRADB_INTERNAL_HASH_TABLES -INNODB_TABLESPACES_ENCRYPTION -INNODB_CMPMEM_RESET -INNODB_SYS_FIELDS -XTRADB_ZIP_DICT -INNODB_TABLESPACES_SCRUBBING -INNODB_TEMP_TABLE_INFO -INNODB_FT_INDEX_TABLE -INNODB_CMPMEM -INNODB_SYS_TABLESPACES -INNODB_CMP_PER_INDEX_RESET -INNODB_SYS_FOREIGN_COLS -INNODB_FT_INDEX_CACHE -INNODB_BUFFER_POOL_STATS -INNODB_FT_BEING_DELETED -INNODB_SYS_FOREIGN -INNODB_BUFFER_PAGE_LRU -INNODB_FT_DEFAULT_STOPWORD -INNODB_SYS_TABLES -INNODB_SYS_COLUMNS -INNODB_FT_CONFIG -XTRADB_ZIP_DICT_COLS -INNODB_SYS_INDEXES -INNODB_SYS_VIRTUAL -INNODB_CHANGED_PAGES +ERROR 1045 (28000): ProxySQL Error: Access denied for user 'testsync'@'10.129.26.70' (using password: YES) +command terminated with exit code 1