Log: /mnt/jenkins/workspace/cloud-psmdb-operator_PR-2283/e2e-tests/logs/custom-users-roles-sharded.log Warning: version difference between client (1.35) and server (1.32) exceeds the supported minor version skew of +/-1 Warning: version difference between client (1.35) and server (1.32) exceeds the supported minor version skew of +/-1 Warning: version difference between client (1.35) and server (1.32) exceeds the supported minor version skew of +/-1 + cluster=some-name + create_infra custom-users-roles-sharded-14882 + local ns=custom-users-roles-sharded-14882 + [[ 1 == 1 ]] + delete_crd + desc 'get and delete old CRDs and RBAC' + set +o xtrace ----------------------------------------------------------------------------------- get and delete old CRDs and RBAC ----------------------------------------------------------------------------------- + kubectl_bin delete -f /mnt/jenkins/workspace/cloud-psmdb-operator_PR-2283/deploy/crd.yaml --ignore-not-found --wait=false ++ mktemp + local LAST_OUT=/tmp/tmp.9dWanOq6YT ++ mktemp + local LAST_ERR=/tmp/tmp.OxF3erSRii + local exit_status=0 + local timeout=4 ++ seq 0 2 + for i in $(seq 0 2) + set +e + kubectl delete -f /mnt/jenkins/workspace/cloud-psmdb-operator_PR-2283/deploy/crd.yaml --ignore-not-found --wait=false + exit_status=0 + set -e + '[' 0 '!=' 0 -a -n 1 ']' + break + cat /tmp/tmp.9dWanOq6YT customresourcedefinition.apiextensions.k8s.io "perconaservermongodbbackups.psmdb.percona.com" deleted customresourcedefinition.apiextensions.k8s.io "perconaservermongodbrestores.psmdb.percona.com" deleted customresourcedefinition.apiextensions.k8s.io "perconaservermongodbs.psmdb.percona.com" deleted + cat /tmp/tmp.OxF3erSRii + rm /tmp/tmp.9dWanOq6YT /tmp/tmp.OxF3erSRii + return 0 ++ yq eval .metadata.name /mnt/jenkins/workspace/cloud-psmdb-operator_PR-2283/deploy/crd.yaml ++ grep -v '\-\-\-' grep: warning: stray \ before - grep: warning: stray \ before - + for crd_name in $(yq eval '.metadata.name' "${src_dir}/deploy/crd.yaml" | grep -v '\-\-\-') + kubectl get perconaservermongodbbackups.psmdb.percona.com --all-namespaces -o wide + grep -v NAMESPACE + xargs -L 1 sh -xc 'kubectl patch perconaservermongodbbackups.psmdb.percona.com -n $0 $1 --type=merge -p "{\"metadata\":{\"finalizers\":[]}}"' error: the server doesn't have a resource type "perconaservermongodbbackups" + kubectl patch perconaservermongodbbackups.psmdb.percona.com -n sh --type=merge -p '{"metadata":{"finalizers":[]}}' error: the server doesn't have a resource type "perconaservermongodbbackups" + : + kubectl_bin wait --for=delete crd perconaservermongodbbackups.psmdb.percona.com ++ mktemp + local LAST_OUT=/tmp/tmp.SSxPhO0XQf ++ mktemp + local LAST_ERR=/tmp/tmp.P72yNg7Gwy + local exit_status=0 + local timeout=4 ++ seq 0 2 + for i in $(seq 0 2) + set +e + kubectl wait --for=delete crd perconaservermongodbbackups.psmdb.percona.com + exit_status=0 + set -e + '[' 0 '!=' 0 -a -n 1 ']' + break + cat /tmp/tmp.SSxPhO0XQf + cat /tmp/tmp.P72yNg7Gwy + rm /tmp/tmp.SSxPhO0XQf /tmp/tmp.P72yNg7Gwy + return 0 + for crd_name in $(yq eval '.metadata.name' "${src_dir}/deploy/crd.yaml" | grep -v '\-\-\-') + kubectl get perconaservermongodbrestores.psmdb.percona.com --all-namespaces -o wide + grep -v NAMESPACE + xargs -L 1 sh -xc 'kubectl patch perconaservermongodbrestores.psmdb.percona.com -n $0 $1 --type=merge -p "{\"metadata\":{\"finalizers\":[]}}"' error: the server doesn't have a resource type "perconaservermongodbrestores" + kubectl patch perconaservermongodbrestores.psmdb.percona.com -n sh --type=merge -p '{"metadata":{"finalizers":[]}}' error: the server doesn't have a resource type "perconaservermongodbrestores" + : + kubectl_bin wait --for=delete crd perconaservermongodbrestores.psmdb.percona.com ++ mktemp + local LAST_OUT=/tmp/tmp.34fRWi0Si5 ++ mktemp + local LAST_ERR=/tmp/tmp.hh782PbvgQ + local exit_status=0 + local timeout=4 ++ seq 0 2 + for i in $(seq 0 2) + set +e + kubectl wait --for=delete crd perconaservermongodbrestores.psmdb.percona.com + exit_status=0 + set -e + '[' 0 '!=' 0 -a -n 1 ']' + break + cat /tmp/tmp.34fRWi0Si5 + cat /tmp/tmp.hh782PbvgQ + rm /tmp/tmp.34fRWi0Si5 /tmp/tmp.hh782PbvgQ + return 0 + for crd_name in $(yq eval '.metadata.name' "${src_dir}/deploy/crd.yaml" | grep -v '\-\-\-') + grep -v NAMESPACE + xargs -L 1 sh -xc 'kubectl patch perconaservermongodbs.psmdb.percona.com -n $0 $1 --type=merge -p "{\"metadata\":{\"finalizers\":[]}}"' + kubectl get perconaservermongodbs.psmdb.percona.com --all-namespaces -o wide error: the server doesn't have a resource type "perconaservermongodbs" + kubectl patch perconaservermongodbs.psmdb.percona.com -n sh --type=merge -p '{"metadata":{"finalizers":[]}}' error: the server doesn't have a resource type "perconaservermongodbs" + : + kubectl_bin wait --for=delete crd perconaservermongodbs.psmdb.percona.com ++ mktemp + local LAST_OUT=/tmp/tmp.5HChUwhIKT ++ mktemp + local LAST_ERR=/tmp/tmp.dGI8Vp0U7C + local exit_status=0 + local timeout=4 ++ seq 0 2 + for i in $(seq 0 2) + set +e + kubectl wait --for=delete crd perconaservermongodbs.psmdb.percona.com + exit_status=0 + set -e + '[' 0 '!=' 0 -a -n 1 ']' + break + cat /tmp/tmp.5HChUwhIKT + cat /tmp/tmp.dGI8Vp0U7C + rm /tmp/tmp.5HChUwhIKT /tmp/tmp.dGI8Vp0U7C + return 0 + local rbac_yaml=rbac.yaml + '[' -n psmdb-operator ']' + rbac_yaml=cw-rbac.yaml + kubectl_bin delete -f /mnt/jenkins/workspace/cloud-psmdb-operator_PR-2283/deploy/cw-rbac.yaml --ignore-not-found ++ mktemp + local LAST_OUT=/tmp/tmp.7FHNm1G8Jh ++ mktemp + local LAST_ERR=/tmp/tmp.kU70nVXpfD + local exit_status=0 + local timeout=4 ++ seq 0 2 + for i in $(seq 0 2) + set +e + kubectl delete -f /mnt/jenkins/workspace/cloud-psmdb-operator_PR-2283/deploy/cw-rbac.yaml --ignore-not-found + exit_status=0 + set -e + '[' 0 '!=' 0 -a -n 1 ']' + break + cat /tmp/tmp.7FHNm1G8Jh clusterrole.rbac.authorization.k8s.io "percona-server-mongodb-operator" deleted clusterrolebinding.rbac.authorization.k8s.io "service-account-percona-server-mongodb-operator" deleted + cat /tmp/tmp.kU70nVXpfD + rm /tmp/tmp.7FHNm1G8Jh /tmp/tmp.kU70nVXpfD + return 0 + check_crd_for_deletion PR-2283-73ca3795 + local git_tag=PR-2283-73ca3795 ++ curl -s https://raw.githubusercontent.com/percona/percona-server-mongodb-operator/PR-2283-73ca3795/deploy/crd.yaml ++ yq eval .metadata.name ++ /usr/sbin/sed s/---//g ++ /usr/sbin/sed ':a;N;$!ba;s/\n/ /g' + for crd_name in $(curl -s https://raw.githubusercontent.com/percona/percona-server-mongodb-operator/${git_tag}/deploy/crd.yaml | yq eval '.metadata.name' | $sed 's/---//g' | $sed ':a;N;$!ba;s/\n/ /g') ++ kubectl_bin get crd/null -o 'jsonpath={.status.conditions[-1].type}' +++ mktemp ++ local LAST_OUT=/tmp/tmp.1BZNDQhSvW +++ mktemp ++ local LAST_ERR=/tmp/tmp.M9zMJhl61s ++ local exit_status=0 ++ local timeout=4 +++ seq 0 2 ++ for i in $(seq 0 2) ++ set +e ++ kubectl get crd/null -o 'jsonpath={.status.conditions[-1].type}' ++ exit_status=1 ++ set -e ++ '[' 1 '!=' 0 -a -n 1 ']' ++ cat /tmp/tmp.1BZNDQhSvW ++ cat /tmp/tmp.M9zMJhl61s Error from server (NotFound): customresourcedefinitions.apiextensions.k8s.io "null" not found ++ sleep 0 ++ for i in $(seq 0 2) ++ set +e ++ kubectl get crd/null -o 'jsonpath={.status.conditions[-1].type}' ++ exit_status=1 ++ set -e ++ '[' 1 '!=' 0 -a -n 1 ']' ++ cat /tmp/tmp.1BZNDQhSvW ++ cat /tmp/tmp.M9zMJhl61s Error from server (NotFound): customresourcedefinitions.apiextensions.k8s.io "null" not found ++ sleep 4 ++ for i in $(seq 0 2) ++ set +e ++ kubectl get crd/null -o 'jsonpath={.status.conditions[-1].type}' ++ exit_status=1 ++ set -e ++ '[' 1 '!=' 0 -a -n 1 ']' ++ cat /tmp/tmp.1BZNDQhSvW ++ cat /tmp/tmp.M9zMJhl61s Error from server (NotFound): customresourcedefinitions.apiextensions.k8s.io "null" not found ++ sleep 8 ++ cat /tmp/tmp.1BZNDQhSvW ++ cat /tmp/tmp.M9zMJhl61s Error from server (NotFound): customresourcedefinitions.apiextensions.k8s.io "null" not found ++ rm /tmp/tmp.1BZNDQhSvW /tmp/tmp.M9zMJhl61s ++ return 1 + [[ '' == Terminating ]] + '[' -n psmdb-operator ']' + create_namespace psmdb-operator + local namespace=psmdb-operator + local skip_clean_namespace= + [[ 1 == 1 ]] + [[ -z '' ]] + destroy_chaos_mesh ++ helm list --all-namespaces --filter chaos-mesh ++ tail -n1 ++ awk '-F ' '{print $2}' ++ sed s/NAMESPACE// + local chaos_mesh_ns= + desc 'destroy chaos-mesh' + set +o xtrace ----------------------------------------------------------------------------------- destroy chaos-mesh ----------------------------------------------------------------------------------- + '[' -n '' ']' ++ kubectl get MutatingWebhookConfiguration ++ grep chaos-mesh ++ awk '{print $1}' + timeout 30 kubectl delete MutatingWebhookConfiguration error: resource(s) were provided, but no name was specified + : ++ kubectl get ValidatingWebhookConfiguration ++ grep chaos-mesh ++ awk '{print $1}' + timeout 30 kubectl delete ValidatingWebhookConfiguration error: resource(s) were provided, but no name was specified + : ++ kubectl get ValidatingWebhookConfiguration ++ grep validate-auth ++ awk '{print $1}' + timeout 30 kubectl delete ValidatingWebhookConfiguration error: resource(s) were provided, but no name was specified + : ++ kubectl api-resources ++ grep chaos-mesh ++ awk '{print $1}' ++ kubectl get crd ++ grep chaos-mesh.org ++ awk '{print $1}' + timeout 30 kubectl delete crd error: resource(s) were provided, but no name was specified + : ++ kubectl get clusterrolebinding ++ grep chaos-mesh ++ awk '{print $1}' + timeout 30 kubectl delete clusterrolebinding error: resource(s) were provided, but no name was specified + : ++ kubectl get clusterrole ++ grep chaos-mesh ++ awk '{print $1}' + timeout 30 kubectl delete clusterrole error: resource(s) were provided, but no name was specified + : + desc 'cleaned up all old namespaces' + set +o xtrace ----------------------------------------------------------------------------------- cleaned up all old namespaces ----------------------------------------------------------------------------------- + kubectl_bin get ns + '[' -n '' ']' + xargs kubectl delete ns + desc 'cleaned up old namespaces psmdb-operator' + set +o xtrace ++ mktemp ----------------------------------------------------------------------------------- cleaned up old namespaces psmdb-operator ----------------------------------------------------------------------------------- + kubectl_bin delete namespace psmdb-operator --ignore-not-found ++ mktemp + grep -E -v '^kube-|^default|Terminating|psmdb-operator|openshift|^gke-|^gmp-|^NAME' + awk '{print$1}' + local LAST_OUT=/tmp/tmp.aYZpo8DaYq + local LAST_OUT=/tmp/tmp.hsPTOKJMCf ++ mktemp ++ mktemp + local LAST_ERR=/tmp/tmp.LFOSvEEvdw + local exit_status=0 + local timeout=4 ++ seq 0 2 + local LAST_ERR=/tmp/tmp.8Wd0wNuwTC + local exit_status=0 + local timeout=4 ++ seq 0 2 + for i in $(seq 0 2) + set +e + kubectl get ns + for i in $(seq 0 2) + set +e + kubectl delete namespace psmdb-operator --ignore-not-found + exit_status=0 + set -e + '[' 0 '!=' 0 -a -n 1 ']' + break + cat /tmp/tmp.aYZpo8DaYq + cat /tmp/tmp.LFOSvEEvdw + rm /tmp/tmp.aYZpo8DaYq /tmp/tmp.LFOSvEEvdw + return 0 namespace "custom-users-roles-sharded-27119" deleted + exit_status=0 + set -e + '[' 0 '!=' 0 -a -n 1 ']' + break + cat /tmp/tmp.hsPTOKJMCf namespace "psmdb-operator" deleted + cat /tmp/tmp.8Wd0wNuwTC + rm /tmp/tmp.hsPTOKJMCf /tmp/tmp.8Wd0wNuwTC + return 0 + kubectl_bin wait --for=delete namespace psmdb-operator ++ mktemp + local LAST_OUT=/tmp/tmp.eHmcHJh1HD ++ mktemp + local LAST_ERR=/tmp/tmp.ZZUMv3kXSt + local exit_status=0 + local timeout=4 ++ seq 0 2 + for i in $(seq 0 2) + set +e + kubectl wait --for=delete namespace psmdb-operator + exit_status=0 + set -e + '[' 0 '!=' 0 -a -n 1 ']' + break + cat /tmp/tmp.eHmcHJh1HD + cat /tmp/tmp.ZZUMv3kXSt + rm /tmp/tmp.eHmcHJh1HD /tmp/tmp.ZZUMv3kXSt + return 0 + desc 'create namespace psmdb-operator' + set +o xtrace ----------------------------------------------------------------------------------- create namespace psmdb-operator ----------------------------------------------------------------------------------- + kubectl_bin create namespace psmdb-operator ++ mktemp + local LAST_OUT=/tmp/tmp.AMwYkR9aG8 ++ mktemp + local LAST_ERR=/tmp/tmp.WsxlGBsLys + local exit_status=0 + local timeout=4 ++ seq 0 2 + for i in $(seq 0 2) + set +e + kubectl create namespace psmdb-operator + exit_status=0 + set -e + '[' 0 '!=' 0 -a -n 1 ']' + break + cat /tmp/tmp.AMwYkR9aG8 namespace/psmdb-operator created + cat /tmp/tmp.WsxlGBsLys + rm /tmp/tmp.AMwYkR9aG8 /tmp/tmp.WsxlGBsLys + return 0 + set_kube_ctx psmdb-operator + local namespace=psmdb-operator ++ kubectl_bin config current-context +++ mktemp ++ local LAST_OUT=/tmp/tmp.dD45gfLdJ7 +++ mktemp ++ local LAST_ERR=/tmp/tmp.nY7MWgcGCO ++ local exit_status=0 ++ local timeout=4 +++ seq 0 2 ++ for i in $(seq 0 2) ++ set +e ++ kubectl config current-context ++ exit_status=0 ++ set -e ++ '[' 0 '!=' 0 -a -n 1 ']' ++ break ++ cat /tmp/tmp.dD45gfLdJ7 ++ cat /tmp/tmp.nY7MWgcGCO ++ rm /tmp/tmp.dD45gfLdJ7 /tmp/tmp.nY7MWgcGCO ++ return 0 + kubectl_bin config set-context gke_cloud-dev-112233_us-central1-a_jen-psmdb-2283-73ca3795-11-cluster7 --namespace=psmdb-operator ++ mktemp + local LAST_OUT=/tmp/tmp.acuBTGCbPm ++ mktemp + local LAST_ERR=/tmp/tmp.vZInUxfl1G + local exit_status=0 + local timeout=4 ++ seq 0 2 + for i in $(seq 0 2) + set +e + kubectl config set-context gke_cloud-dev-112233_us-central1-a_jen-psmdb-2283-73ca3795-11-cluster7 --namespace=psmdb-operator + exit_status=0 + set -e + '[' 0 '!=' 0 -a -n 1 ']' + break + cat /tmp/tmp.acuBTGCbPm Context "gke_cloud-dev-112233_us-central1-a_jen-psmdb-2283-73ca3795-11-cluster7" modified. + cat /tmp/tmp.vZInUxfl1G + rm /tmp/tmp.acuBTGCbPm /tmp/tmp.vZInUxfl1G + return 0 + deploy_operator + desc 'start PSMDB operator: docker.io/perconalab/percona-server-mongodb-operator:PR-2283-73ca3795' + set +o xtrace ----------------------------------------------------------------------------------- start PSMDB operator: docker.io/perconalab/percona-server-mongodb-operator:PR-2283-73ca3795 ----------------------------------------------------------------------------------- + local cr_file + '[' -f /mnt/jenkins/workspace/cloud-psmdb-operator_PR-2283/e2e-tests/custom-users-roles-sharded/conf/crd.yaml ']' + cr_file=/mnt/jenkins/workspace/cloud-psmdb-operator_PR-2283/deploy/crd.yaml + kubectl_bin apply --server-side --force-conflicts -f /mnt/jenkins/workspace/cloud-psmdb-operator_PR-2283/deploy/crd.yaml ++ mktemp + local LAST_OUT=/tmp/tmp.utk5CtAQwV ++ mktemp + local LAST_ERR=/tmp/tmp.oEislMrRBb + local exit_status=0 + local timeout=4 ++ seq 0 2 + for i in $(seq 0 2) + set +e + kubectl apply --server-side --force-conflicts -f /mnt/jenkins/workspace/cloud-psmdb-operator_PR-2283/deploy/crd.yaml + exit_status=0 + set -e + '[' 0 '!=' 0 -a -n 1 ']' + break + cat /tmp/tmp.utk5CtAQwV customresourcedefinition.apiextensions.k8s.io/perconaservermongodbbackups.psmdb.percona.com serverside-applied customresourcedefinition.apiextensions.k8s.io/perconaservermongodbrestores.psmdb.percona.com serverside-applied customresourcedefinition.apiextensions.k8s.io/perconaservermongodbs.psmdb.percona.com serverside-applied + cat /tmp/tmp.oEislMrRBb + rm /tmp/tmp.utk5CtAQwV /tmp/tmp.oEislMrRBb + return 0 + '[' -n psmdb-operator ']' + apply_rbac cw-rbac + local operator_namespace=psmdb-operator + local rbac=cw-rbac + cat /mnt/jenkins/workspace/cloud-psmdb-operator_PR-2283/deploy/cw-rbac.yaml + sed -e 's^namespace: .*^namespace: psmdb-operator^' + kubectl_bin apply -n psmdb-operator -f - ++ mktemp + local LAST_OUT=/tmp/tmp.re8OW2b3Bt ++ mktemp + local LAST_ERR=/tmp/tmp.3jo89fq0P9 + local exit_status=0 + local timeout=4 ++ seq 0 2 + for i in $(seq 0 2) + set +e + kubectl apply -n psmdb-operator -f - + exit_status=0 + set -e + '[' 0 '!=' 0 -a -n 1 ']' + break + cat /tmp/tmp.re8OW2b3Bt clusterrole.rbac.authorization.k8s.io/percona-server-mongodb-operator created serviceaccount/percona-server-mongodb-operator created clusterrolebinding.rbac.authorization.k8s.io/service-account-percona-server-mongodb-operator created + cat /tmp/tmp.3jo89fq0P9 + rm /tmp/tmp.re8OW2b3Bt /tmp/tmp.3jo89fq0P9 + return 0 + yq eval $'\n\t\t\t(.spec.template.spec.containers[].image = "docker.io/perconalab/percona-server-mongodb-operator:PR-2283-73ca3795") |\n\t\t\t((.. | select(.[] == "DISABLE_TELEMETRY")) |= .value="true") |\n\t\t\t((.. | select(.[] == "LOG_LEVEL")) |= .value="DEBUG")' /mnt/jenkins/workspace/cloud-psmdb-operator_PR-2283/deploy/cw-operator.yaml + kubectl_bin apply -n psmdb-operator -f - ++ mktemp + local LAST_OUT=/tmp/tmp.ajSA43MNhK ++ mktemp + local LAST_ERR=/tmp/tmp.yo3L6ZXUmQ + local exit_status=0 + local timeout=4 ++ seq 0 2 + for i in $(seq 0 2) + set +e + kubectl apply -n psmdb-operator -f - + exit_status=0 + set -e + '[' 0 '!=' 0 -a -n 1 ']' + break + cat /tmp/tmp.ajSA43MNhK deployment.apps/percona-server-mongodb-operator created + cat /tmp/tmp.yo3L6ZXUmQ + rm /tmp/tmp.ajSA43MNhK /tmp/tmp.yo3L6ZXUmQ + return 0 + sleep 20 ++ get_operator_pod ++ kubectl_bin get pods --selector=name=percona-server-mongodb-operator -o 'jsonpath={.items[].metadata.name}' -n psmdb-operator +++ mktemp ++ local LAST_OUT=/tmp/tmp.hlcbTCCtuM +++ mktemp ++ local LAST_ERR=/tmp/tmp.xFH2aGtXiT ++ local exit_status=0 ++ local timeout=4 +++ seq 0 2 ++ for i in $(seq 0 2) ++ set +e ++ kubectl get pods --selector=name=percona-server-mongodb-operator -o 'jsonpath={.items[].metadata.name}' -n psmdb-operator ++ exit_status=0 ++ set -e ++ '[' 0 '!=' 0 -a -n 1 ']' ++ break ++ cat /tmp/tmp.hlcbTCCtuM ++ cat /tmp/tmp.xFH2aGtXiT ++ rm /tmp/tmp.hlcbTCCtuM /tmp/tmp.xFH2aGtXiT ++ return 0 + wait_operator_pod percona-server-mongodb-operator-79d56c85d6-rvgfk + local pod=percona-server-mongodb-operator-79d56c85d6-rvgfk + set +o xtrace waiting for pod/percona-server-mongodb-operator-79d56c85d6-rvgfk to be ready.OK + echo 'Print operator info from log' Print operator info from log + grep 'Manager starting up' ++ get_operator_pod ++ kubectl_bin get pods --selector=name=percona-server-mongodb-operator -o 'jsonpath={.items[].metadata.name}' -n psmdb-operator +++ mktemp ++ local LAST_OUT=/tmp/tmp.1yXtoH3qlH +++ mktemp ++ local LAST_ERR=/tmp/tmp.frmuPoOSqv ++ local exit_status=0 ++ local timeout=4 +++ seq 0 2 ++ for i in $(seq 0 2) ++ set +e ++ kubectl get pods --selector=name=percona-server-mongodb-operator -o 'jsonpath={.items[].metadata.name}' -n psmdb-operator ++ exit_status=0 ++ set -e ++ '[' 0 '!=' 0 -a -n 1 ']' ++ break ++ cat /tmp/tmp.1yXtoH3qlH ++ cat /tmp/tmp.frmuPoOSqv ++ rm /tmp/tmp.1yXtoH3qlH /tmp/tmp.frmuPoOSqv ++ return 0 + kubectl_bin logs -n psmdb-operator percona-server-mongodb-operator-79d56c85d6-rvgfk ++ mktemp + local LAST_OUT=/tmp/tmp.FRKMWhABMc ++ mktemp + local LAST_ERR=/tmp/tmp.ytbg2chkJJ + local exit_status=0 + local timeout=4 ++ seq 0 2 + for i in $(seq 0 2) + set +e + kubectl logs -n psmdb-operator percona-server-mongodb-operator-79d56c85d6-rvgfk + exit_status=0 + set -e + '[' 0 '!=' 0 -a -n 1 ']' + break + cat /tmp/tmp.FRKMWhABMc + cat /tmp/tmp.ytbg2chkJJ + rm /tmp/tmp.FRKMWhABMc /tmp/tmp.ytbg2chkJJ + return 0 2026-04-09T15:29:11.619Z INFO setup Manager starting up {"gitCommit": "73ca37956356b4f3579a3ef1dce0c4105fda2da2", "gitBranch": "PR-2283-73ca3795", "buildTime": "", "goVersion": "go1.25.9", "os": "linux", "arch": "amd64"} + create_namespace custom-users-roles-sharded-14882 + local namespace=custom-users-roles-sharded-14882 + local skip_clean_namespace= + [[ 1 == 1 ]] + [[ -z '' ]] + destroy_chaos_mesh ++ helm list --all-namespaces --filter chaos-mesh ++ tail -n1 ++ awk '-F ' '{print $2}' ++ sed s/NAMESPACE// + local chaos_mesh_ns= + desc 'destroy chaos-mesh' + set +o xtrace ----------------------------------------------------------------------------------- destroy chaos-mesh ----------------------------------------------------------------------------------- + '[' -n '' ']' ++ kubectl get MutatingWebhookConfiguration ++ grep chaos-mesh ++ awk '{print $1}' + timeout 30 kubectl delete MutatingWebhookConfiguration error: resource(s) were provided, but no name was specified + : ++ kubectl get ValidatingWebhookConfiguration ++ grep chaos-mesh ++ awk '{print $1}' + timeout 30 kubectl delete ValidatingWebhookConfiguration error: resource(s) were provided, but no name was specified + : ++ kubectl get ValidatingWebhookConfiguration ++ grep validate-auth ++ awk '{print $1}' + timeout 30 kubectl delete ValidatingWebhookConfiguration error: resource(s) were provided, but no name was specified + : ++ kubectl api-resources ++ awk '{print $1}' ++ grep chaos-mesh ++ kubectl get crd ++ grep chaos-mesh.org ++ awk '{print $1}' + timeout 30 kubectl delete crd error: resource(s) were provided, but no name was specified + : ++ kubectl get clusterrolebinding ++ grep chaos-mesh ++ awk '{print $1}' + timeout 30 kubectl delete clusterrolebinding error: resource(s) were provided, but no name was specified + : ++ kubectl get clusterrole ++ grep chaos-mesh ++ awk '{print $1}' + timeout 30 kubectl delete clusterrole error: resource(s) were provided, but no name was specified + : + desc 'cleaned up all old namespaces' + set +o xtrace ----------------------------------------------------------------------------------- cleaned up all old namespaces ----------------------------------------------------------------------------------- + kubectl_bin get ns + grep -E -v '^kube-|^default|Terminating|psmdb-operator|openshift|^gke-|^gmp-|^NAME' + awk '{print$1}' + '[' -n '' ']' + desc 'cleaned up old namespaces custom-users-roles-sharded-14882' + set +o xtrace ++ mktemp ----------------------------------------------------------------------------------- cleaned up old namespaces custom-users-roles-sharded-14882 ----------------------------------------------------------------------------------- + kubectl_bin delete namespace custom-users-roles-sharded-14882 --ignore-not-found + xargs kubectl delete ns ++ mktemp + local LAST_OUT=/tmp/tmp.Vj3PZ7KcPT ++ mktemp + local LAST_OUT=/tmp/tmp.XpFR2dcVah + local LAST_ERR=/tmp/tmp.FFCycBGW0u ++ mktemp + local exit_status=0 + local timeout=4 ++ seq 0 2 + local LAST_ERR=/tmp/tmp.bIkzRiXKDF + local exit_status=0 + local timeout=4 + for i in $(seq 0 2) + set +e + kubectl get ns ++ seq 0 2 + for i in $(seq 0 2) + set +e + kubectl delete namespace custom-users-roles-sharded-14882 --ignore-not-found + exit_status=0 + set -e + '[' 0 '!=' 0 -a -n 1 ']' + break + cat /tmp/tmp.XpFR2dcVah + cat /tmp/tmp.bIkzRiXKDF + rm /tmp/tmp.XpFR2dcVah /tmp/tmp.bIkzRiXKDF + return 0 + kubectl_bin wait --for=delete namespace custom-users-roles-sharded-14882 ++ mktemp + local LAST_OUT=/tmp/tmp.wJH1oSEuX4 ++ mktemp + local LAST_ERR=/tmp/tmp.T2jBxOML1k + local exit_status=0 + local timeout=4 ++ seq 0 2 + for i in $(seq 0 2) + set +e + kubectl wait --for=delete namespace custom-users-roles-sharded-14882 + exit_status=0 + set -e + '[' 0 '!=' 0 -a -n 1 ']' + break + cat /tmp/tmp.Vj3PZ7KcPT + cat /tmp/tmp.FFCycBGW0u + rm /tmp/tmp.Vj3PZ7KcPT /tmp/tmp.FFCycBGW0u + return 0 error: resource(s) were provided, but no name was specified + exit_status=0 + set -e + '[' 0 '!=' 0 -a -n 1 ']' + break + cat /tmp/tmp.wJH1oSEuX4 + cat /tmp/tmp.T2jBxOML1k + rm /tmp/tmp.wJH1oSEuX4 /tmp/tmp.T2jBxOML1k + return 0 + desc 'create namespace custom-users-roles-sharded-14882' + set +o xtrace ----------------------------------------------------------------------------------- create namespace custom-users-roles-sharded-14882 ----------------------------------------------------------------------------------- + kubectl_bin create namespace custom-users-roles-sharded-14882 ++ mktemp + local LAST_OUT=/tmp/tmp.EkPnDBr2bn ++ mktemp + local LAST_ERR=/tmp/tmp.nSZL96z2VG + local exit_status=0 + local timeout=4 ++ seq 0 2 + for i in $(seq 0 2) + set +e + kubectl create namespace custom-users-roles-sharded-14882 + exit_status=0 + set -e + '[' 0 '!=' 0 -a -n 1 ']' + break + cat /tmp/tmp.EkPnDBr2bn namespace/custom-users-roles-sharded-14882 created + cat /tmp/tmp.nSZL96z2VG + rm /tmp/tmp.EkPnDBr2bn /tmp/tmp.nSZL96z2VG + return 0 + set_kube_ctx custom-users-roles-sharded-14882 + local namespace=custom-users-roles-sharded-14882 ++ kubectl_bin config current-context +++ mktemp ++ local LAST_OUT=/tmp/tmp.7Y1nAcXZYn +++ mktemp ++ local LAST_ERR=/tmp/tmp.3PRmtZa7v1 ++ local exit_status=0 ++ local timeout=4 +++ seq 0 2 ++ for i in $(seq 0 2) ++ set +e ++ kubectl config current-context ++ exit_status=0 ++ set -e ++ '[' 0 '!=' 0 -a -n 1 ']' ++ break ++ cat /tmp/tmp.7Y1nAcXZYn ++ cat /tmp/tmp.3PRmtZa7v1 ++ rm /tmp/tmp.7Y1nAcXZYn /tmp/tmp.3PRmtZa7v1 ++ return 0 + kubectl_bin config set-context gke_cloud-dev-112233_us-central1-a_jen-psmdb-2283-73ca3795-11-cluster7 --namespace=custom-users-roles-sharded-14882 ++ mktemp + local LAST_OUT=/tmp/tmp.auduwoQCbf ++ mktemp + local LAST_ERR=/tmp/tmp.uRukgHz7yN + local exit_status=0 + local timeout=4 ++ seq 0 2 + for i in $(seq 0 2) + set +e + kubectl config set-context gke_cloud-dev-112233_us-central1-a_jen-psmdb-2283-73ca3795-11-cluster7 --namespace=custom-users-roles-sharded-14882 + exit_status=0 + set -e + '[' 0 '!=' 0 -a -n 1 ']' + break + cat /tmp/tmp.auduwoQCbf Context "gke_cloud-dev-112233_us-central1-a_jen-psmdb-2283-73ca3795-11-cluster7" modified. + cat /tmp/tmp.uRukgHz7yN + rm /tmp/tmp.auduwoQCbf /tmp/tmp.uRukgHz7yN + return 0 + mongosUri=userAdmin:userAdmin123456@some-name-mongos.custom-users-roles-sharded-14882 + desc 'create secrets and start client' + set +o xtrace ----------------------------------------------------------------------------------- create secrets and start client ----------------------------------------------------------------------------------- + kubectl_bin apply -f /mnt/jenkins/workspace/cloud-psmdb-operator_PR-2283/e2e-tests/conf/client.yml -f /mnt/jenkins/workspace/cloud-psmdb-operator_PR-2283/e2e-tests/conf/secrets.yml -f /mnt/jenkins/workspace/cloud-psmdb-operator_PR-2283/e2e-tests/custom-users-roles-sharded/conf/app-user-secrets.yml ++ mktemp + local LAST_OUT=/tmp/tmp.iak96WEB0c ++ mktemp + local LAST_ERR=/tmp/tmp.HPSLrBbVDV + local exit_status=0 + local timeout=4 ++ seq 0 2 + for i in $(seq 0 2) + set +e + kubectl apply -f /mnt/jenkins/workspace/cloud-psmdb-operator_PR-2283/e2e-tests/conf/client.yml -f /mnt/jenkins/workspace/cloud-psmdb-operator_PR-2283/e2e-tests/conf/secrets.yml -f /mnt/jenkins/workspace/cloud-psmdb-operator_PR-2283/e2e-tests/custom-users-roles-sharded/conf/app-user-secrets.yml + exit_status=0 + set -e + '[' 0 '!=' 0 -a -n 1 ']' + break + cat /tmp/tmp.iak96WEB0c deployment.apps/psmdb-client created secret/some-users created secret/user-one created secret/user-two created + cat /tmp/tmp.HPSLrBbVDV + rm /tmp/tmp.iak96WEB0c /tmp/tmp.HPSLrBbVDV + return 0 + apply_s3_storage_secrets + desc 'create secrets for cloud storages' + set +o xtrace ----------------------------------------------------------------------------------- create secrets for cloud storages ----------------------------------------------------------------------------------- + '[' -z '' ']' + kubectl_bin apply -f /mnt/jenkins/workspace/cloud-psmdb-operator_PR-2283/e2e-tests/conf/minio-secret.yml -f /mnt/jenkins/workspace/cloud-psmdb-operator_PR-2283/e2e-tests/conf/cloud-secret.yml ++ mktemp + local LAST_OUT=/tmp/tmp.qbMn5yf7br ++ mktemp + local LAST_ERR=/tmp/tmp.rVlUwSsEib + local exit_status=0 + local timeout=4 ++ seq 0 2 + for i in $(seq 0 2) + set +e + kubectl apply -f /mnt/jenkins/workspace/cloud-psmdb-operator_PR-2283/e2e-tests/conf/minio-secret.yml -f /mnt/jenkins/workspace/cloud-psmdb-operator_PR-2283/e2e-tests/conf/cloud-secret.yml + exit_status=0 + set -e + '[' 0 '!=' 0 -a -n 1 ']' + break + cat /tmp/tmp.qbMn5yf7br secret/minio-secret created secret/aws-s3-secret created secret/gcp-cs-secret created secret/azure-secret created secret/gcp-cs-sa-key-secret created + cat /tmp/tmp.rVlUwSsEib + rm /tmp/tmp.qbMn5yf7br /tmp/tmp.rVlUwSsEib + return 0 + version_gt 1.19 ++ echo '1.32 >= 1.19' ++ bc -l + '[' 1 -eq 1 ']' + return 0 + '[' 0 -ne 1 ']' + cat /mnt/jenkins/workspace/cloud-psmdb-operator_PR-2283/e2e-tests/conf/container-rc.yaml + /usr/sbin/sed s/docker/runc/g + kubectl_bin apply -f - ++ mktemp + local LAST_OUT=/tmp/tmp.W00FqfCjtr ++ mktemp + local LAST_ERR=/tmp/tmp.3t3rPP9Hfv + local exit_status=0 + local timeout=4 ++ seq 0 2 + for i in $(seq 0 2) + set +e + kubectl apply -f - + exit_status=0 + set -e + '[' 0 '!=' 0 -a -n 1 ']' + break + cat /tmp/tmp.W00FqfCjtr runtimeclass.node.k8s.io/container-rc unchanged + cat /tmp/tmp.3t3rPP9Hfv + rm /tmp/tmp.W00FqfCjtr /tmp/tmp.3t3rPP9Hfv + return 0 + desc 'create first PSMDB cluster' + set +o xtrace ----------------------------------------------------------------------------------- create first PSMDB cluster ----------------------------------------------------------------------------------- + apply_cluster /mnt/jenkins/workspace/cloud-psmdb-operator_PR-2283/e2e-tests/custom-users-roles-sharded/conf/some-name-rs0.yml + '[' -z '' ']' + cat_config /mnt/jenkins/workspace/cloud-psmdb-operator_PR-2283/e2e-tests/custom-users-roles-sharded/conf/some-name-rs0.yml + kubectl_bin apply -f - + cat /mnt/jenkins/workspace/cloud-psmdb-operator_PR-2283/e2e-tests/custom-users-roles-sharded/conf/some-name-rs0.yml + yq eval '(.spec | select(.image == null)).image = "docker.io/perconalab/percona-server-mongodb-operator:main-mongod8.0"' + yq eval '(.spec | select(has("pmm"))).pmm.image = "docker.io/percona/pmm-client:2.44.1-1"' + yq eval '(.spec | select(has("initImage"))).initImage = "docker.io/perconalab/percona-server-mongodb-operator:PR-2283-73ca3795"' ++ mktemp + /usr/sbin/sed -e s/NAME_SPACE/custom-users-roles-sharded-14882/g + yq eval '(.spec | select(has("backup"))).backup.image = "docker.io/perconalab/percona-server-mongodb-operator:main-backup"' + local LAST_OUT=/tmp/tmp.3XxRD8OftR ++ mktemp + yq eval '.spec.upgradeOptions.apply="Never"' + local LAST_ERR=/tmp/tmp.nLkKQySqDq + local exit_status=0 + local timeout=4 ++ seq 0 2 + for i in $(seq 0 2) + set +e + kubectl apply -f - + exit_status=0 + set -e + '[' 0 '!=' 0 -a -n 1 ']' + break + cat /tmp/tmp.3XxRD8OftR perconaservermongodb.psmdb.percona.com/some-name created + cat /tmp/tmp.nLkKQySqDq + rm /tmp/tmp.3XxRD8OftR /tmp/tmp.nLkKQySqDq + return 0 + desc 'check if all 3 Pods started' + set +o xtrace ----------------------------------------------------------------------------------- check if all 3 Pods started ----------------------------------------------------------------------------------- + wait_for_running some-name-rs0 3 + local name=some-name-rs0 + let last_pod=2 + local check_cluster_readyness=true + set_debug + [[ 1 == 1 ]] + set -o xtrace + local rs_name=rs0 + local cluster_name=some-name ++ seq 0 2 + for i in $(seq 0 $last_pod) + [[ 0 -eq 2 ]] + wait_pod some-name-rs0-0 + local pod=some-name-rs0-0 + set +o xtrace waiting for pod/some-name-rs0-0 to be ready............OK + for i in $(seq 0 $last_pod) + [[ 1 -eq 2 ]] + wait_pod some-name-rs0-1 + local pod=some-name-rs0-1 + set +o xtrace waiting for pod/some-name-rs0-1 to be ready............OK + for i in $(seq 0 $last_pod) + [[ 2 -eq 2 ]] ++ kubectl_bin get psmdb some-name -o 'jsonpath={.spec.replsets[?(@.name=="rs0")].arbiter.enabled}' +++ mktemp ++ local LAST_OUT=/tmp/tmp.Yi1VJk62yc +++ mktemp ++ local LAST_ERR=/tmp/tmp.QV5dNp74OI ++ local exit_status=0 ++ local timeout=4 +++ seq 0 2 ++ for i in $(seq 0 2) ++ set +e ++ kubectl get psmdb some-name -o 'jsonpath={.spec.replsets[?(@.name=="rs0")].arbiter.enabled}' ++ exit_status=0 ++ set -e ++ '[' 0 '!=' 0 -a -n 1 ']' ++ break ++ cat /tmp/tmp.Yi1VJk62yc ++ cat /tmp/tmp.QV5dNp74OI ++ rm /tmp/tmp.Yi1VJk62yc /tmp/tmp.QV5dNp74OI ++ return 0 + [[ '' == true ]] + wait_pod some-name-rs0-2 + local pod=some-name-rs0-2 + set +o xtrace waiting for pod/some-name-rs0-2 to be ready............OK ++ kubectl_bin get psmdb some-name -o 'jsonpath={.spec.replsets[?(@.name=="rs0")].nonvoting.enabled}' +++ mktemp ++ local LAST_OUT=/tmp/tmp.5bS6JQIDyW +++ mktemp ++ local LAST_ERR=/tmp/tmp.iAXW9gSEwn ++ local exit_status=0 ++ local timeout=4 +++ seq 0 2 ++ for i in $(seq 0 2) ++ set +e ++ kubectl get psmdb some-name -o 'jsonpath={.spec.replsets[?(@.name=="rs0")].nonvoting.enabled}' ++ exit_status=0 ++ set -e ++ '[' 0 '!=' 0 -a -n 1 ']' ++ break ++ cat /tmp/tmp.5bS6JQIDyW ++ cat /tmp/tmp.iAXW9gSEwn ++ rm /tmp/tmp.5bS6JQIDyW /tmp/tmp.iAXW9gSEwn ++ return 0 + [[ '' == true ]] ++ kubectl_bin get psmdb some-name -o 'jsonpath={.spec.replsets[?(@.name=="rs0")].hidden.enabled}' +++ mktemp ++ local LAST_OUT=/tmp/tmp.Z3W0PCz8Nc +++ mktemp ++ local LAST_ERR=/tmp/tmp.nQFtgDo1vc ++ local exit_status=0 ++ local timeout=4 +++ seq 0 2 ++ for i in $(seq 0 2) ++ set +e ++ kubectl get psmdb some-name -o 'jsonpath={.spec.replsets[?(@.name=="rs0")].hidden.enabled}' ++ exit_status=0 ++ set -e ++ '[' 0 '!=' 0 -a -n 1 ']' ++ break ++ cat /tmp/tmp.Z3W0PCz8Nc ++ cat /tmp/tmp.nQFtgDo1vc ++ rm /tmp/tmp.Z3W0PCz8Nc /tmp/tmp.nQFtgDo1vc ++ return 0 + [[ '' == true ]] + sleep 10 + [[ true == true ]] + set +x Waiting for cluster readyness....................... + wait_for_running some-name-cfg 3 false + local name=some-name-cfg + let last_pod=2 + local check_cluster_readyness=false + set_debug + [[ 1 == 1 ]] + set -o xtrace + local rs_name=cfg + local cluster_name=some-name ++ seq 0 2 + for i in $(seq 0 $last_pod) + [[ 0 -eq 2 ]] + wait_pod some-name-cfg-0 + local pod=some-name-cfg-0 + set +o xtrace waiting for pod/some-name-cfg-0 to be ready.OK + for i in $(seq 0 $last_pod) + [[ 1 -eq 2 ]] + wait_pod some-name-cfg-1 + local pod=some-name-cfg-1 + set +o xtrace waiting for pod/some-name-cfg-1 to be ready.OK + for i in $(seq 0 $last_pod) + [[ 2 -eq 2 ]] ++ kubectl_bin get psmdb some-name -o 'jsonpath={.spec.replsets[?(@.name=="cfg")].arbiter.enabled}' +++ mktemp ++ local LAST_OUT=/tmp/tmp.mxKrZUsiDi +++ mktemp ++ local LAST_ERR=/tmp/tmp.vBrKepqRWR ++ local exit_status=0 ++ local timeout=4 +++ seq 0 2 ++ for i in $(seq 0 2) ++ set +e ++ kubectl get psmdb some-name -o 'jsonpath={.spec.replsets[?(@.name=="cfg")].arbiter.enabled}' ++ exit_status=0 ++ set -e ++ '[' 0 '!=' 0 -a -n 1 ']' ++ break ++ cat /tmp/tmp.mxKrZUsiDi ++ cat /tmp/tmp.vBrKepqRWR ++ rm /tmp/tmp.mxKrZUsiDi /tmp/tmp.vBrKepqRWR ++ return 0 + [[ '' == true ]] + wait_pod some-name-cfg-2 + local pod=some-name-cfg-2 + set +o xtrace waiting for pod/some-name-cfg-2 to be ready.OK ++ kubectl_bin get psmdb some-name -o 'jsonpath={.spec.replsets[?(@.name=="cfg")].nonvoting.enabled}' +++ mktemp ++ local LAST_OUT=/tmp/tmp.dMbZHDRLdH +++ mktemp ++ local LAST_ERR=/tmp/tmp.Ati2Q1A6ia ++ local exit_status=0 ++ local timeout=4 +++ seq 0 2 ++ for i in $(seq 0 2) ++ set +e ++ kubectl get psmdb some-name -o 'jsonpath={.spec.replsets[?(@.name=="cfg")].nonvoting.enabled}' ++ exit_status=0 ++ set -e ++ '[' 0 '!=' 0 -a -n 1 ']' ++ break ++ cat /tmp/tmp.dMbZHDRLdH ++ cat /tmp/tmp.Ati2Q1A6ia ++ rm /tmp/tmp.dMbZHDRLdH /tmp/tmp.Ati2Q1A6ia ++ return 0 + [[ '' == true ]] ++ kubectl_bin get psmdb some-name -o 'jsonpath={.spec.replsets[?(@.name=="cfg")].hidden.enabled}' +++ mktemp ++ local LAST_OUT=/tmp/tmp.PVnLYpHw5Z +++ mktemp ++ local LAST_ERR=/tmp/tmp.URyMNb0cVG ++ local exit_status=0 ++ local timeout=4 +++ seq 0 2 ++ for i in $(seq 0 2) ++ set +e ++ kubectl get psmdb some-name -o 'jsonpath={.spec.replsets[?(@.name=="cfg")].hidden.enabled}' ++ exit_status=0 ++ set -e ++ '[' 0 '!=' 0 -a -n 1 ']' ++ break ++ cat /tmp/tmp.PVnLYpHw5Z ++ cat /tmp/tmp.URyMNb0cVG ++ rm /tmp/tmp.PVnLYpHw5Z /tmp/tmp.URyMNb0cVG ++ return 0 + [[ '' == true ]] + sleep 10 + [[ false == true ]] + wait_for_running some-name-mongos 3 + local name=some-name-mongos + let last_pod=2 + local check_cluster_readyness=true + set_debug + [[ 1 == 1 ]] + set -o xtrace + local rs_name=mongos + local cluster_name=some-name ++ seq 0 2 + for i in $(seq 0 $last_pod) + [[ 0 -eq 2 ]] + wait_pod some-name-mongos-0 + local pod=some-name-mongos-0 + set +o xtrace waiting for pod/some-name-mongos-0 to be ready.OK + for i in $(seq 0 $last_pod) + [[ 1 -eq 2 ]] + wait_pod some-name-mongos-1 + local pod=some-name-mongos-1 + set +o xtrace waiting for pod/some-name-mongos-1 to be ready.OK + for i in $(seq 0 $last_pod) + [[ 2 -eq 2 ]] ++ kubectl_bin get psmdb some-name -o 'jsonpath={.spec.replsets[?(@.name=="mongos")].arbiter.enabled}' +++ mktemp ++ local LAST_OUT=/tmp/tmp.iRUmlvkoER +++ mktemp ++ local LAST_ERR=/tmp/tmp.8WtF6Hukkc ++ local exit_status=0 ++ local timeout=4 +++ seq 0 2 ++ for i in $(seq 0 2) ++ set +e ++ kubectl get psmdb some-name -o 'jsonpath={.spec.replsets[?(@.name=="mongos")].arbiter.enabled}' ++ exit_status=0 ++ set -e ++ '[' 0 '!=' 0 -a -n 1 ']' ++ break ++ cat /tmp/tmp.iRUmlvkoER ++ cat /tmp/tmp.8WtF6Hukkc ++ rm /tmp/tmp.iRUmlvkoER /tmp/tmp.8WtF6Hukkc ++ return 0 + [[ '' == true ]] + wait_pod some-name-mongos-2 + local pod=some-name-mongos-2 + set +o xtrace waiting for pod/some-name-mongos-2 to be ready.OK ++ kubectl_bin get psmdb some-name -o 'jsonpath={.spec.replsets[?(@.name=="mongos")].nonvoting.enabled}' +++ mktemp ++ local LAST_OUT=/tmp/tmp.vMYJf8QZRx +++ mktemp ++ local LAST_ERR=/tmp/tmp.a9ydA8Yk8X ++ local exit_status=0 ++ local timeout=4 +++ seq 0 2 ++ for i in $(seq 0 2) ++ set +e ++ kubectl get psmdb some-name -o 'jsonpath={.spec.replsets[?(@.name=="mongos")].nonvoting.enabled}' ++ exit_status=0 ++ set -e ++ '[' 0 '!=' 0 -a -n 1 ']' ++ break ++ cat /tmp/tmp.vMYJf8QZRx ++ cat /tmp/tmp.a9ydA8Yk8X ++ rm /tmp/tmp.vMYJf8QZRx /tmp/tmp.a9ydA8Yk8X ++ return 0 + [[ '' == true ]] ++ kubectl_bin get psmdb some-name -o 'jsonpath={.spec.replsets[?(@.name=="mongos")].hidden.enabled}' +++ mktemp ++ local LAST_OUT=/tmp/tmp.9Fod68ByuL +++ mktemp ++ local LAST_ERR=/tmp/tmp.PhieRszDwq ++ local exit_status=0 ++ local timeout=4 +++ seq 0 2 ++ for i in $(seq 0 2) ++ set +e ++ kubectl get psmdb some-name -o 'jsonpath={.spec.replsets[?(@.name=="mongos")].hidden.enabled}' ++ exit_status=0 ++ set -e ++ '[' 0 '!=' 0 -a -n 1 ']' ++ break ++ cat /tmp/tmp.9Fod68ByuL ++ cat /tmp/tmp.PhieRszDwq ++ rm /tmp/tmp.9Fod68ByuL /tmp/tmp.PhieRszDwq ++ return 0 + [[ '' == true ]] + sleep 10 + [[ true == true ]] + set +x Waiting for cluster readyness + wait_cluster_consistency some-name + local cluster_name=some-name + local wait_time=32 + retry=0 + sleep 7 + echo -n 'waiting for cluster readyness' waiting for cluster readyness++ kubectl_bin get psmdb some-name -o 'jsonpath={.status.state}' +++ mktemp ++ local LAST_OUT=/tmp/tmp.BZyGTgySSC +++ mktemp ++ local LAST_ERR=/tmp/tmp.V2X8DwQZTL ++ local exit_status=0 ++ local timeout=4 +++ seq 0 2 ++ for i in $(seq 0 2) ++ set +e ++ kubectl get psmdb some-name -o 'jsonpath={.status.state}' ++ exit_status=0 ++ set -e ++ '[' 0 '!=' 0 -a -n 1 ']' ++ break ++ cat /tmp/tmp.BZyGTgySSC ++ cat /tmp/tmp.V2X8DwQZTL ++ rm /tmp/tmp.BZyGTgySSC /tmp/tmp.V2X8DwQZTL ++ return 0 + [[ ready == ready ]] + echo .OK .OK + desc 'check if service and statefulset created with expected config' + set +o xtrace ----------------------------------------------------------------------------------- check if service and statefulset created with expected config ----------------------------------------------------------------------------------- + compare_kubectl statefulset/some-name-rs0 + local resource=statefulset/some-name-rs0 + local postfix= + local skip_generation_check= + local expected_result=/mnt/jenkins/workspace/cloud-psmdb-operator_PR-2283/e2e-tests/custom-users-roles-sharded/compare/statefulset_some-name-rs0.yml + local new_result=/tmp/tmp.UfAOPg7iAD/statefulset_some-name-rs0.yml + '[' -n '' -a -f /mnt/jenkins/workspace/cloud-psmdb-operator_PR-2283/e2e-tests/custom-users-roles-sharded/compare/statefulset_some-name-rs0-oc.yml ']' + kubectl_bin get -o yaml statefulset/some-name-rs0 + yq eval $'\n\t\t\tdel(.metadata.ownerReferences[].apiVersion) |\n\t\t\tdel(.metadata.managedFields) |\n\t\t\tdel(.. | select(has("creationTimestamp")).creationTimestamp) |\n\t\t\tdel(.. | select(has("namespace")).namespace) |\n\t\t\tdel(.. | select(has("uid")).uid) |\n\t\t\tdel(.metadata.resourceVersion) |\n\t\t\tdel(.spec.template.spec.containers[].env[] | select(.name == "NAMESPACE")) |\n\t\t\tdel(.metadata.selfLink) |\n\t\t\tdel(.metadata.annotations."cloud.google.com/neg") |\n\t\t\tdel(.metadata.annotations."kubectl.kubernetes.io/last-applied-configuration") |\n\t\t\tdel(.. | select(has("image")).image) |\n\t\t\tdel(.. | select(has("clusterIP")).clusterIP) |\n\t\t\tdel(.. | select(has("clusterIPs")).clusterIPs) |\n\t\t\tdel(.. | select(has("dataSource")).dataSource) |\n\t\t\tdel(.. | select(has("procMount")).procMount) |\n\t\t\tdel(.. | select(has("storageClassName")).storageClassName) |\n\t\t\tdel(.. | select(has("finalizers")).finalizers) |\n\t\t\tdel(.. | select(has("kubernetes.io/pvc-protection"))."kubernetes.io/pvc-protection") |\n\t\t\tdel(.. | select(has("volumeName")).volumeName) |\n\t\t\tdel(.. | select(has("volume.beta.kubernetes.io/storage-provisioner"))."volume.beta.kubernetes.io/storage-provisioner") |\n\t\t\tdel(.. | select(has("volume.kubernetes.io/storage-provisioner"))."volume.kubernetes.io/storage-provisioner") |\n\t\t\tdel(.spec.volumeMode) |\n\t\t\tdel(.. | select(has("volume.kubernetes.io/selected-node"))."volume.kubernetes.io/selected-node") |\n\t\t\tdel(.. | select(has("percona.com/last-config-hash"))."percona.com/last-config-hash") |\n\t\t\tdel(.. | select(has("percona.com/configuration-hash"))."percona.com/configuration-hash") |\n\t\t\tdel(.. | select(has("percona.com/ssl-hash"))."percona.com/ssl-hash") |\n\t\t\tdel(.. | select(has("percona.com/ssl-internal-hash"))."percona.com/ssl-internal-hash") |\n\t\t\tdel(.spec.volumeClaimTemplates[].spec.volumeMode | select(. == "Filesystem")) |\n\t\t\tdel(.. | select(has("healthCheckNodePort")).healthCheckNodePort) |\n\t\t\tdel(.. | select(has("nodePort")).nodePort) |\n\t\t\tdel(.status) |\n\t\t\t(.. | select(tag == "!!str")) |= sub("custom-users-roles-sharded-14882", "NAME_SPACE") |\n\t\t\tdel(.spec.volumeClaimTemplates[].apiVersion) |\n\t\t\tdel(.spec.volumeClaimTemplates[].kind) |\n\t\t\tdel(.spec.ipFamilies) |\n\t\t\tdel(.spec.ipFamilyPolicy) |\n\t\t\t(.. | select(. == "extensions/v1beta1")) = "apps/v1" |\n\t\t\t(.. | select(. == "batch/v1beta1")) = "batch/v1" ' - ++ mktemp + local LAST_OUT=/tmp/tmp.LLPhQTkgng ++ mktemp + local LAST_ERR=/tmp/tmp.4iGobzVecw + local exit_status=0 + local timeout=4 ++ seq 0 2 + for i in $(seq 0 2) + set +e + kubectl get -o yaml statefulset/some-name-rs0 + exit_status=0 + set -e + '[' 0 '!=' 0 -a -n 1 ']' + break + cat /tmp/tmp.LLPhQTkgng + cat /tmp/tmp.4iGobzVecw + rm /tmp/tmp.LLPhQTkgng /tmp/tmp.4iGobzVecw + return 0 + yq -i eval 'del(.spec.persistentVolumeClaimRetentionPolicy)' /tmp/tmp.UfAOPg7iAD/statefulset_some-name-rs0.yml + version_gt 1.22 ++ echo '1.32 >= 1.22' ++ bc -l + '[' 1 -eq 1 ']' + return 0 + yq -i eval 'del(.spec.internalTrafficPolicy)' /tmp/tmp.UfAOPg7iAD/statefulset_some-name-rs0.yml + yq -i eval 'del(.spec.allocateLoadBalancerNodePorts)' /tmp/tmp.UfAOPg7iAD/statefulset_some-name-rs0.yml + [[ /mnt/jenkins/workspace/cloud-psmdb-operator_PR-2283/e2e-tests/custom-users-roles-sharded/compare/statefulset_some-name-rs0.yml == */cronjob* ]] + '[' -n '' ']' + [[ 0 -eq 0 ]] + diff -u /mnt/jenkins/workspace/cloud-psmdb-operator_PR-2283/e2e-tests/custom-users-roles-sharded/compare/statefulset_some-name-rs0.yml /tmp/tmp.UfAOPg7iAD/statefulset_some-name-rs0.yml + log 'compare_kubectl: statefulset/some-name-rs0 OK' + set +o xtrace [2026-04-09T15:33:11+0000] compare_kubectl: statefulset/some-name-rs0 OK + compare_kubectl statefulset/some-name-cfg + local resource=statefulset/some-name-cfg + local postfix= + local skip_generation_check= + local expected_result=/mnt/jenkins/workspace/cloud-psmdb-operator_PR-2283/e2e-tests/custom-users-roles-sharded/compare/statefulset_some-name-cfg.yml + local new_result=/tmp/tmp.UfAOPg7iAD/statefulset_some-name-cfg.yml + '[' -n '' -a -f /mnt/jenkins/workspace/cloud-psmdb-operator_PR-2283/e2e-tests/custom-users-roles-sharded/compare/statefulset_some-name-cfg-oc.yml ']' + kubectl_bin get -o yaml statefulset/some-name-cfg + yq eval $'\n\t\t\tdel(.metadata.ownerReferences[].apiVersion) |\n\t\t\tdel(.metadata.managedFields) |\n\t\t\tdel(.. | select(has("creationTimestamp")).creationTimestamp) |\n\t\t\tdel(.. | select(has("namespace")).namespace) |\n\t\t\tdel(.. | select(has("uid")).uid) |\n\t\t\tdel(.metadata.resourceVersion) |\n\t\t\tdel(.spec.template.spec.containers[].env[] | select(.name == "NAMESPACE")) |\n\t\t\tdel(.metadata.selfLink) |\n\t\t\tdel(.metadata.annotations."cloud.google.com/neg") |\n\t\t\tdel(.metadata.annotations."kubectl.kubernetes.io/last-applied-configuration") |\n\t\t\tdel(.. | select(has("image")).image) |\n\t\t\tdel(.. | select(has("clusterIP")).clusterIP) |\n\t\t\tdel(.. | select(has("clusterIPs")).clusterIPs) |\n\t\t\tdel(.. | select(has("dataSource")).dataSource) |\n\t\t\tdel(.. | select(has("procMount")).procMount) |\n\t\t\tdel(.. | select(has("storageClassName")).storageClassName) |\n\t\t\tdel(.. | select(has("finalizers")).finalizers) |\n\t\t\tdel(.. | select(has("kubernetes.io/pvc-protection"))."kubernetes.io/pvc-protection") |\n\t\t\tdel(.. | select(has("volumeName")).volumeName) |\n\t\t\tdel(.. | select(has("volume.beta.kubernetes.io/storage-provisioner"))."volume.beta.kubernetes.io/storage-provisioner") |\n\t\t\tdel(.. | select(has("volume.kubernetes.io/storage-provisioner"))."volume.kubernetes.io/storage-provisioner") |\n\t\t\tdel(.spec.volumeMode) |\n\t\t\tdel(.. | select(has("volume.kubernetes.io/selected-node"))."volume.kubernetes.io/selected-node") |\n\t\t\tdel(.. | select(has("percona.com/last-config-hash"))."percona.com/last-config-hash") |\n\t\t\tdel(.. | select(has("percona.com/configuration-hash"))."percona.com/configuration-hash") |\n\t\t\tdel(.. | select(has("percona.com/ssl-hash"))."percona.com/ssl-hash") |\n\t\t\tdel(.. | select(has("percona.com/ssl-internal-hash"))."percona.com/ssl-internal-hash") |\n\t\t\tdel(.spec.volumeClaimTemplates[].spec.volumeMode | select(. == "Filesystem")) |\n\t\t\tdel(.. | select(has("healthCheckNodePort")).healthCheckNodePort) |\n\t\t\tdel(.. | select(has("nodePort")).nodePort) |\n\t\t\tdel(.status) |\n\t\t\t(.. | select(tag == "!!str")) |= sub("custom-users-roles-sharded-14882", "NAME_SPACE") |\n\t\t\tdel(.spec.volumeClaimTemplates[].apiVersion) |\n\t\t\tdel(.spec.volumeClaimTemplates[].kind) |\n\t\t\tdel(.spec.ipFamilies) |\n\t\t\tdel(.spec.ipFamilyPolicy) |\n\t\t\t(.. | select(. == "extensions/v1beta1")) = "apps/v1" |\n\t\t\t(.. | select(. == "batch/v1beta1")) = "batch/v1" ' - ++ mktemp + local LAST_OUT=/tmp/tmp.aLIrhdvz31 ++ mktemp + local LAST_ERR=/tmp/tmp.dEDuOMBxa1 + local exit_status=0 + local timeout=4 ++ seq 0 2 + for i in $(seq 0 2) + set +e + kubectl get -o yaml statefulset/some-name-cfg + exit_status=0 + set -e + '[' 0 '!=' 0 -a -n 1 ']' + break + cat /tmp/tmp.aLIrhdvz31 + cat /tmp/tmp.dEDuOMBxa1 + rm /tmp/tmp.aLIrhdvz31 /tmp/tmp.dEDuOMBxa1 + return 0 + yq -i eval 'del(.spec.persistentVolumeClaimRetentionPolicy)' /tmp/tmp.UfAOPg7iAD/statefulset_some-name-cfg.yml + version_gt 1.22 ++ echo '1.32 >= 1.22' ++ bc -l + '[' 1 -eq 1 ']' + return 0 + yq -i eval 'del(.spec.internalTrafficPolicy)' /tmp/tmp.UfAOPg7iAD/statefulset_some-name-cfg.yml + yq -i eval 'del(.spec.allocateLoadBalancerNodePorts)' /tmp/tmp.UfAOPg7iAD/statefulset_some-name-cfg.yml + [[ /mnt/jenkins/workspace/cloud-psmdb-operator_PR-2283/e2e-tests/custom-users-roles-sharded/compare/statefulset_some-name-cfg.yml == */cronjob* ]] + '[' -n '' ']' + [[ 0 -eq 0 ]] + diff -u /mnt/jenkins/workspace/cloud-psmdb-operator_PR-2283/e2e-tests/custom-users-roles-sharded/compare/statefulset_some-name-cfg.yml /tmp/tmp.UfAOPg7iAD/statefulset_some-name-cfg.yml + log 'compare_kubectl: statefulset/some-name-cfg OK' + set +o xtrace [2026-04-09T15:33:12+0000] compare_kubectl: statefulset/some-name-cfg OK + compare_kubectl statefulset/some-name-mongos '' + local resource=statefulset/some-name-mongos + local postfix= + local skip_generation_check= + local expected_result=/mnt/jenkins/workspace/cloud-psmdb-operator_PR-2283/e2e-tests/custom-users-roles-sharded/compare/statefulset_some-name-mongos.yml + local new_result=/tmp/tmp.UfAOPg7iAD/statefulset_some-name-mongos.yml + '[' -n '' -a -f /mnt/jenkins/workspace/cloud-psmdb-operator_PR-2283/e2e-tests/custom-users-roles-sharded/compare/statefulset_some-name-mongos-oc.yml ']' + kubectl_bin get -o yaml statefulset/some-name-mongos + yq eval $'\n\t\t\tdel(.metadata.ownerReferences[].apiVersion) |\n\t\t\tdel(.metadata.managedFields) |\n\t\t\tdel(.. | select(has("creationTimestamp")).creationTimestamp) |\n\t\t\tdel(.. | select(has("namespace")).namespace) |\n\t\t\tdel(.. | select(has("uid")).uid) |\n\t\t\tdel(.metadata.resourceVersion) |\n\t\t\tdel(.spec.template.spec.containers[].env[] | select(.name == "NAMESPACE")) |\n\t\t\tdel(.metadata.selfLink) |\n\t\t\tdel(.metadata.annotations."cloud.google.com/neg") |\n\t\t\tdel(.metadata.annotations."kubectl.kubernetes.io/last-applied-configuration") |\n\t\t\tdel(.. | select(has("image")).image) |\n\t\t\tdel(.. | select(has("clusterIP")).clusterIP) |\n\t\t\tdel(.. | select(has("clusterIPs")).clusterIPs) |\n\t\t\tdel(.. | select(has("dataSource")).dataSource) |\n\t\t\tdel(.. | select(has("procMount")).procMount) |\n\t\t\tdel(.. | select(has("storageClassName")).storageClassName) |\n\t\t\tdel(.. | select(has("finalizers")).finalizers) |\n\t\t\tdel(.. | select(has("kubernetes.io/pvc-protection"))."kubernetes.io/pvc-protection") |\n\t\t\tdel(.. | select(has("volumeName")).volumeName) |\n\t\t\tdel(.. | select(has("volume.beta.kubernetes.io/storage-provisioner"))."volume.beta.kubernetes.io/storage-provisioner") |\n\t\t\tdel(.. | select(has("volume.kubernetes.io/storage-provisioner"))."volume.kubernetes.io/storage-provisioner") |\n\t\t\tdel(.spec.volumeMode) |\n\t\t\tdel(.. | select(has("volume.kubernetes.io/selected-node"))."volume.kubernetes.io/selected-node") |\n\t\t\tdel(.. | select(has("percona.com/last-config-hash"))."percona.com/last-config-hash") |\n\t\t\tdel(.. | select(has("percona.com/configuration-hash"))."percona.com/configuration-hash") |\n\t\t\tdel(.. | select(has("percona.com/ssl-hash"))."percona.com/ssl-hash") |\n\t\t\tdel(.. | select(has("percona.com/ssl-internal-hash"))."percona.com/ssl-internal-hash") |\n\t\t\tdel(.spec.volumeClaimTemplates[].spec.volumeMode | select(. == "Filesystem")) |\n\t\t\tdel(.. | select(has("healthCheckNodePort")).healthCheckNodePort) |\n\t\t\tdel(.. | select(has("nodePort")).nodePort) |\n\t\t\tdel(.status) |\n\t\t\t(.. | select(tag == "!!str")) |= sub("custom-users-roles-sharded-14882", "NAME_SPACE") |\n\t\t\tdel(.spec.volumeClaimTemplates[].apiVersion) |\n\t\t\tdel(.spec.volumeClaimTemplates[].kind) |\n\t\t\tdel(.spec.ipFamilies) |\n\t\t\tdel(.spec.ipFamilyPolicy) |\n\t\t\t(.. | select(. == "extensions/v1beta1")) = "apps/v1" |\n\t\t\t(.. | select(. == "batch/v1beta1")) = "batch/v1" ' - ++ mktemp + local LAST_OUT=/tmp/tmp.6kGv0XrGb8 ++ mktemp + local LAST_ERR=/tmp/tmp.IDHLxUZxkz + local exit_status=0 + local timeout=4 ++ seq 0 2 + for i in $(seq 0 2) + set +e + kubectl get -o yaml statefulset/some-name-mongos + exit_status=0 + set -e + '[' 0 '!=' 0 -a -n 1 ']' + break + cat /tmp/tmp.6kGv0XrGb8 + cat /tmp/tmp.IDHLxUZxkz + rm /tmp/tmp.6kGv0XrGb8 /tmp/tmp.IDHLxUZxkz + return 0 + yq -i eval 'del(.spec.persistentVolumeClaimRetentionPolicy)' /tmp/tmp.UfAOPg7iAD/statefulset_some-name-mongos.yml + version_gt 1.22 ++ echo '1.32 >= 1.22' ++ bc -l + '[' 1 -eq 1 ']' + return 0 + yq -i eval 'del(.spec.internalTrafficPolicy)' /tmp/tmp.UfAOPg7iAD/statefulset_some-name-mongos.yml + yq -i eval 'del(.spec.allocateLoadBalancerNodePorts)' /tmp/tmp.UfAOPg7iAD/statefulset_some-name-mongos.yml + [[ /mnt/jenkins/workspace/cloud-psmdb-operator_PR-2283/e2e-tests/custom-users-roles-sharded/compare/statefulset_some-name-mongos.yml == */cronjob* ]] + '[' -n '' ']' + [[ 0 -eq 0 ]] + diff -u /mnt/jenkins/workspace/cloud-psmdb-operator_PR-2283/e2e-tests/custom-users-roles-sharded/compare/statefulset_some-name-mongos.yml /tmp/tmp.UfAOPg7iAD/statefulset_some-name-mongos.yml + log 'compare_kubectl: statefulset/some-name-mongos OK' + set +o xtrace [2026-04-09T15:33:14+0000] compare_kubectl: statefulset/some-name-mongos OK + desc 'check user created on cluster creation' + set +o xtrace ----------------------------------------------------------------------------------- check user created on cluster creation ----------------------------------------------------------------------------------- + userOne=user-one ++ getSecretData user-one userOnePassKey ++ local secretName=user-one ++ local dataKey=userOnePassKey +++ kubectl get secrets/user-one '--template={{.data.userOnePassKey}}' +++ base64 -d ++ local data=clusterMonitor ++ echo clusterMonitor + userOnePass=clusterMonitor ++ get_user_cmd '"user-one"' ++ local 'user="user-one"' ++ cmd=$'(function() {\n\tvar user = db.getUser("user-one");\n\tvar roles = user.roles;\n\troles.sort((a, b) => {\n\t\tif (a.role < b.role) return -1;\n\t\tif (a.role > b.role) return 1;\n\t\treturn 0;\n\t});\n\tuser.roles = roles;\n\tprintjson(user);\n\t})();' ++ echo $'(function() {\n\tvar user = db.getUser("user-one");\n\tvar roles = user.roles;\n\troles.sort((a, b) => {\n\t\tif (a.role < b.role) return -1;\n\t\tif (a.role > b.role) return 1;\n\t\treturn 0;\n\t});\n\tuser.roles = roles;\n\tprintjson(user);\n\t})();' + compare admin $'(function() {\n\tvar user = db.getUser("user-one");\n\tvar roles = user.roles;\n\troles.sort((a, b) => {\n\t\tif (a.role < b.role) return -1;\n\t\tif (a.role > b.role) return 1;\n\t\treturn 0;\n\t});\n\tuser.roles = roles;\n\tprintjson(user);\n\t})();' userAdmin:userAdmin123456@some-name-mongos.custom-users-roles-sharded-14882 user-one + local database=admin + local $'command=(function() {\n\tvar user = db.getUser("user-one");\n\tvar roles = user.roles;\n\troles.sort((a, b) => {\n\t\tif (a.role < b.role) return -1;\n\t\tif (a.role > b.role) return 1;\n\t\treturn 0;\n\t});\n\tuser.roles = roles;\n\tprintjson(user);\n\t})();' + local uri=userAdmin:userAdmin123456@some-name-mongos.custom-users-roles-sharded-14882 + local target=user-one + [[ docker.io/perconalab/percona-server-mongodb-operator:main-mongod8.0 =~ 5\.0 ]] + run_mongos $'use admin\\n (function() {\n\tvar user = db.getUser("user-one");\n\tvar roles = user.roles;\n\troles.sort((a, b) => {\n\t\tif (a.role < b.role) return -1;\n\t\tif (a.role > b.role) return 1;\n\t\treturn 0;\n\t});\n\tuser.roles = roles;\n\tprintjson(user);\n\t})();' userAdmin:userAdmin123456@some-name-mongos.custom-users-roles-sharded-14882 mongodb + local $'command=use admin\\n (function() {\n\tvar user = db.getUser("user-one");\n\tvar roles = user.roles;\n\troles.sort((a, b) => {\n\t\tif (a.role < b.role) return -1;\n\t\tif (a.role > b.role) return 1;\n\t\treturn 0;\n\t});\n\tuser.roles = roles;\n\tprintjson(user);\n\t})();' + local uri=userAdmin:userAdmin123456@some-name-mongos.custom-users-roles-sharded-14882 + local driver=mongodb + local suffix=.svc.cluster.local + local mongo_flag= + local port=27017 + local mongo_bin=mongo + sed '/"userId"/d' + grep -E -v 'I NETWORK|W NETWORK|F NETWORK|Error saving history file|Percona Server for MongoDB|connecting to:|Unable to reach primary for set|Implicit session:|versions do not match|Error saving history file:' + /usr/sbin/sed -re 's/ObjectId\("[0-9a-f]+"\)//; s/-[0-9]+.svc/-xxxesvc/' ++ echo .svc.cluster.local ++ awk -F: '{print $2}' + suffix_port= + [[ -z '' ]] + suffix=.svc.cluster.local:27017 ++ kubectl_bin get pods --selector=name=psmdb-client -o 'jsonpath={.items[].metadata.name}' +++ mktemp ++ local LAST_OUT=/tmp/tmp.7NdkRnA0hu +++ mktemp ++ local LAST_ERR=/tmp/tmp.pspX2A8usA ++ local exit_status=0 ++ local timeout=4 +++ seq 0 2 ++ for i in $(seq 0 2) ++ set +e ++ kubectl get pods --selector=name=psmdb-client -o 'jsonpath={.items[].metadata.name}' ++ exit_status=0 ++ set -e ++ '[' 0 '!=' 0 -a -n 1 ']' ++ break ++ cat /tmp/tmp.7NdkRnA0hu ++ cat /tmp/tmp.pspX2A8usA ++ rm /tmp/tmp.7NdkRnA0hu /tmp/tmp.pspX2A8usA ++ return 0 + local client_container=psmdb-client-bb8b97679-ftcxh + kubectl_bin exec psmdb-client-bb8b97679-ftcxh -- bash -c $'printf \'use admin\\n (function() {\n\tvar user = db.getUser("user-one");\n\tvar roles = user.roles;\n\troles.sort((a, b) => {\n\t\tif (a.role < b.role) return -1;\n\t\tif (a.role > b.role) return 1;\n\t\treturn 0;\n\t});\n\tuser.roles = roles;\n\tprintjson(user);\n\t})();\\n\' | mongo mongodb://userAdmin:userAdmin123456@some-name-mongos.custom-users-roles-sharded-14882.svc.cluster.local:27017/admin ' ++ mktemp + local LAST_OUT=/tmp/tmp.aqW20t6CfN ++ mktemp + local LAST_ERR=/tmp/tmp.xtiymdaE3a + local exit_status=0 + local timeout=4 ++ seq 0 2 + for i in $(seq 0 2) + set +e + kubectl exec psmdb-client-bb8b97679-ftcxh -- bash -c $'printf \'use admin\\n (function() {\n\tvar user = db.getUser("user-one");\n\tvar roles = user.roles;\n\troles.sort((a, b) => {\n\t\tif (a.role < b.role) return -1;\n\t\tif (a.role > b.role) return 1;\n\t\treturn 0;\n\t});\n\tuser.roles = roles;\n\tprintjson(user);\n\t})();\\n\' | mongo mongodb://userAdmin:userAdmin123456@some-name-mongos.custom-users-roles-sharded-14882.svc.cluster.local:27017/admin ' + exit_status=0 + set -e + '[' 0 '!=' 0 -a -n 1 ']' + break + cat /tmp/tmp.aqW20t6CfN + cat /tmp/tmp.xtiymdaE3a + rm /tmp/tmp.aqW20t6CfN /tmp/tmp.xtiymdaE3a + return 0 + diff /mnt/jenkins/workspace/cloud-psmdb-operator_PR-2283/e2e-tests/custom-users-roles-sharded/compare/user-one.json /tmp/tmp.UfAOPg7iAD/user-one + check_auth user-one:clusterMonitor@some-name-mongos.custom-users-roles-sharded-14882 + local uri=user-one:clusterMonitor@some-name-mongos.custom-users-roles-sharded-14882 ++ run_mongos 'db.runCommand({ ping: 1 }).ok' user-one:clusterMonitor@some-name-mongos.custom-users-roles-sharded-14882 '' '' --quiet ++ grep -E -v 'I NETWORK|W NETWORK|Error saving history file|Percona Server for MongoDB|connecting to:|Unable to reach primary for set|Implicit session:|versions do not match|Error saving history file:' ++ local 'command=db.runCommand({ ping: 1 }).ok' ++ local uri=user-one:clusterMonitor@some-name-mongos.custom-users-roles-sharded-14882 ++ local driver=mongodb ++ local suffix=.svc.cluster.local ++ local mongo_flag=--quiet ++ local port=27017 ++ local mongo_bin=mongo +++ echo .svc.cluster.local +++ awk -F: '{print $2}' ++ suffix_port= ++ [[ -z '' ]] ++ suffix=.svc.cluster.local:27017 +++ kubectl_bin get pods --selector=name=psmdb-client -o 'jsonpath={.items[].metadata.name}' ++++ mktemp +++ local LAST_OUT=/tmp/tmp.k30PByCUDG ++++ mktemp +++ local LAST_ERR=/tmp/tmp.SbA3UQT8ai +++ local exit_status=0 +++ local timeout=4 ++++ seq 0 2 +++ for i in $(seq 0 2) +++ set +e +++ kubectl get pods --selector=name=psmdb-client -o 'jsonpath={.items[].metadata.name}' +++ exit_status=0 +++ set -e +++ '[' 0 '!=' 0 -a -n 1 ']' +++ break +++ cat /tmp/tmp.k30PByCUDG +++ cat /tmp/tmp.SbA3UQT8ai +++ rm /tmp/tmp.k30PByCUDG /tmp/tmp.SbA3UQT8ai +++ return 0 ++ local client_container=psmdb-client-bb8b97679-ftcxh ++ kubectl_bin exec psmdb-client-bb8b97679-ftcxh -- bash -c 'printf '\''db.runCommand({ ping: 1 }).ok\n'\'' | mongo mongodb://user-one:clusterMonitor@some-name-mongos.custom-users-roles-sharded-14882.svc.cluster.local:27017/admin --quiet' +++ mktemp ++ local LAST_OUT=/tmp/tmp.8PvKTSNmo8 +++ mktemp ++ local LAST_ERR=/tmp/tmp.pxzErPGIg4 ++ local exit_status=0 ++ local timeout=4 +++ seq 0 2 ++ for i in $(seq 0 2) ++ set +e ++ kubectl exec psmdb-client-bb8b97679-ftcxh -- bash -c 'printf '\''db.runCommand({ ping: 1 }).ok\n'\'' | mongo mongodb://user-one:clusterMonitor@some-name-mongos.custom-users-roles-sharded-14882.svc.cluster.local:27017/admin --quiet' ++ exit_status=0 ++ set -e ++ '[' 0 '!=' 0 -a -n 1 ']' ++ break ++ cat /tmp/tmp.8PvKTSNmo8 ++ cat /tmp/tmp.pxzErPGIg4 ++ rm /tmp/tmp.8PvKTSNmo8 /tmp/tmp.pxzErPGIg4 ++ return 0 + ping=1 + desc 'ping return' + set +o xtrace ----------------------------------------------------------------------------------- ping return ----------------------------------------------------------------------------------- + '[' 1 '!=' 1 ']' + generatedUserSecret=some-name-custom-user-secret ++ kubectl_bin get secret some-name-custom-user-secret -o 'jsonpath={.data.user-gen}' ++ base64 -d +++ mktemp ++ local LAST_OUT=/tmp/tmp.qr0uEKLKff +++ mktemp ++ local LAST_ERR=/tmp/tmp.UfXis1ASIs ++ local exit_status=0 ++ local timeout=4 +++ seq 0 2 ++ for i in $(seq 0 2) ++ set +e ++ kubectl get secret some-name-custom-user-secret -o 'jsonpath={.data.user-gen}' ++ exit_status=0 ++ set -e ++ '[' 0 '!=' 0 -a -n 1 ']' ++ break ++ cat /tmp/tmp.qr0uEKLKff ++ cat /tmp/tmp.UfXis1ASIs ++ rm /tmp/tmp.qr0uEKLKff /tmp/tmp.UfXis1ASIs ++ return 0 + generatedPass=71nJPYY7kET94NSX ++ get_user_cmd '"user-gen"' ++ local 'user="user-gen"' ++ cmd=$'(function() {\n\tvar user = db.getUser("user-gen");\n\tvar roles = user.roles;\n\troles.sort((a, b) => {\n\t\tif (a.role < b.role) return -1;\n\t\tif (a.role > b.role) return 1;\n\t\treturn 0;\n\t});\n\tuser.roles = roles;\n\tprintjson(user);\n\t})();' ++ echo $'(function() {\n\tvar user = db.getUser("user-gen");\n\tvar roles = user.roles;\n\troles.sort((a, b) => {\n\t\tif (a.role < b.role) return -1;\n\t\tif (a.role > b.role) return 1;\n\t\treturn 0;\n\t});\n\tuser.roles = roles;\n\tprintjson(user);\n\t})();' + compare admin $'(function() {\n\tvar user = db.getUser("user-gen");\n\tvar roles = user.roles;\n\troles.sort((a, b) => {\n\t\tif (a.role < b.role) return -1;\n\t\tif (a.role > b.role) return 1;\n\t\treturn 0;\n\t});\n\tuser.roles = roles;\n\tprintjson(user);\n\t})();' userAdmin:userAdmin123456@some-name-mongos.custom-users-roles-sharded-14882 user-gen + local database=admin + local $'command=(function() {\n\tvar user = db.getUser("user-gen");\n\tvar roles = user.roles;\n\troles.sort((a, b) => {\n\t\tif (a.role < b.role) return -1;\n\t\tif (a.role > b.role) return 1;\n\t\treturn 0;\n\t});\n\tuser.roles = roles;\n\tprintjson(user);\n\t})();' + local uri=userAdmin:userAdmin123456@some-name-mongos.custom-users-roles-sharded-14882 + local target=user-gen + [[ docker.io/perconalab/percona-server-mongodb-operator:main-mongod8.0 =~ 5\.0 ]] + run_mongos $'use admin\\n (function() {\n\tvar user = db.getUser("user-gen");\n\tvar roles = user.roles;\n\troles.sort((a, b) => {\n\t\tif (a.role < b.role) return -1;\n\t\tif (a.role > b.role) return 1;\n\t\treturn 0;\n\t});\n\tuser.roles = roles;\n\tprintjson(user);\n\t})();' userAdmin:userAdmin123456@some-name-mongos.custom-users-roles-sharded-14882 mongodb + grep -E -v 'I NETWORK|W NETWORK|F NETWORK|Error saving history file|Percona Server for MongoDB|connecting to:|Unable to reach primary for set|Implicit session:|versions do not match|Error saving history file:' + local $'command=use admin\\n (function() {\n\tvar user = db.getUser("user-gen");\n\tvar roles = user.roles;\n\troles.sort((a, b) => {\n\t\tif (a.role < b.role) return -1;\n\t\tif (a.role > b.role) return 1;\n\t\treturn 0;\n\t});\n\tuser.roles = roles;\n\tprintjson(user);\n\t})();' + local uri=userAdmin:userAdmin123456@some-name-mongos.custom-users-roles-sharded-14882 + local driver=mongodb + local suffix=.svc.cluster.local + local mongo_flag= + local port=27017 + local mongo_bin=mongo + /usr/sbin/sed -re 's/ObjectId\("[0-9a-f]+"\)//; s/-[0-9]+.svc/-xxxesvc/' + sed '/"userId"/d' ++ echo .svc.cluster.local ++ awk -F: '{print $2}' + suffix_port= + [[ -z '' ]] + suffix=.svc.cluster.local:27017 ++ kubectl_bin get pods --selector=name=psmdb-client -o 'jsonpath={.items[].metadata.name}' +++ mktemp ++ local LAST_OUT=/tmp/tmp.3DolYLsBzE +++ mktemp ++ local LAST_ERR=/tmp/tmp.xElKfLZlp6 ++ local exit_status=0 ++ local timeout=4 +++ seq 0 2 ++ for i in $(seq 0 2) ++ set +e ++ kubectl get pods --selector=name=psmdb-client -o 'jsonpath={.items[].metadata.name}' ++ exit_status=0 ++ set -e ++ '[' 0 '!=' 0 -a -n 1 ']' ++ break ++ cat /tmp/tmp.3DolYLsBzE ++ cat /tmp/tmp.xElKfLZlp6 ++ rm /tmp/tmp.3DolYLsBzE /tmp/tmp.xElKfLZlp6 ++ return 0 + local client_container=psmdb-client-bb8b97679-ftcxh + kubectl_bin exec psmdb-client-bb8b97679-ftcxh -- bash -c $'printf \'use admin\\n (function() {\n\tvar user = db.getUser("user-gen");\n\tvar roles = user.roles;\n\troles.sort((a, b) => {\n\t\tif (a.role < b.role) return -1;\n\t\tif (a.role > b.role) return 1;\n\t\treturn 0;\n\t});\n\tuser.roles = roles;\n\tprintjson(user);\n\t})();\\n\' | mongo mongodb://userAdmin:userAdmin123456@some-name-mongos.custom-users-roles-sharded-14882.svc.cluster.local:27017/admin ' ++ mktemp + local LAST_OUT=/tmp/tmp.VqIHsn2Dxl ++ mktemp + local LAST_ERR=/tmp/tmp.yi7XVSyrWb + local exit_status=0 + local timeout=4 ++ seq 0 2 + for i in $(seq 0 2) + set +e + kubectl exec psmdb-client-bb8b97679-ftcxh -- bash -c $'printf \'use admin\\n (function() {\n\tvar user = db.getUser("user-gen");\n\tvar roles = user.roles;\n\troles.sort((a, b) => {\n\t\tif (a.role < b.role) return -1;\n\t\tif (a.role > b.role) return 1;\n\t\treturn 0;\n\t});\n\tuser.roles = roles;\n\tprintjson(user);\n\t})();\\n\' | mongo mongodb://userAdmin:userAdmin123456@some-name-mongos.custom-users-roles-sharded-14882.svc.cluster.local:27017/admin ' + exit_status=0 + set -e + '[' 0 '!=' 0 -a -n 1 ']' + break + cat /tmp/tmp.VqIHsn2Dxl + cat /tmp/tmp.yi7XVSyrWb + rm /tmp/tmp.VqIHsn2Dxl /tmp/tmp.yi7XVSyrWb + return 0 + diff /mnt/jenkins/workspace/cloud-psmdb-operator_PR-2283/e2e-tests/custom-users-roles-sharded/compare/user-gen.json /tmp/tmp.UfAOPg7iAD/user-gen + check_auth user-gen:71nJPYY7kET94NSX@some-name-mongos.custom-users-roles-sharded-14882 + local uri=user-gen:71nJPYY7kET94NSX@some-name-mongos.custom-users-roles-sharded-14882 ++ run_mongos 'db.runCommand({ ping: 1 }).ok' user-gen:71nJPYY7kET94NSX@some-name-mongos.custom-users-roles-sharded-14882 '' '' --quiet ++ local 'command=db.runCommand({ ping: 1 }).ok' ++ local uri=user-gen:71nJPYY7kET94NSX@some-name-mongos.custom-users-roles-sharded-14882 ++ local driver=mongodb ++ local suffix=.svc.cluster.local ++ local mongo_flag=--quiet ++ local port=27017 ++ local mongo_bin=mongo ++ grep -E -v 'I NETWORK|W NETWORK|Error saving history file|Percona Server for MongoDB|connecting to:|Unable to reach primary for set|Implicit session:|versions do not match|Error saving history file:' +++ echo .svc.cluster.local +++ awk -F: '{print $2}' ++ suffix_port= ++ [[ -z '' ]] ++ suffix=.svc.cluster.local:27017 +++ kubectl_bin get pods --selector=name=psmdb-client -o 'jsonpath={.items[].metadata.name}' ++++ mktemp +++ local LAST_OUT=/tmp/tmp.zzUJyfrtSF ++++ mktemp +++ local LAST_ERR=/tmp/tmp.a6zNNcVBFS +++ local exit_status=0 +++ local timeout=4 ++++ seq 0 2 +++ for i in $(seq 0 2) +++ set +e +++ kubectl get pods --selector=name=psmdb-client -o 'jsonpath={.items[].metadata.name}' +++ exit_status=0 +++ set -e +++ '[' 0 '!=' 0 -a -n 1 ']' +++ break +++ cat /tmp/tmp.zzUJyfrtSF +++ cat /tmp/tmp.a6zNNcVBFS +++ rm /tmp/tmp.zzUJyfrtSF /tmp/tmp.a6zNNcVBFS +++ return 0 ++ local client_container=psmdb-client-bb8b97679-ftcxh ++ kubectl_bin exec psmdb-client-bb8b97679-ftcxh -- bash -c 'printf '\''db.runCommand({ ping: 1 }).ok\n'\'' | mongo mongodb://user-gen:71nJPYY7kET94NSX@some-name-mongos.custom-users-roles-sharded-14882.svc.cluster.local:27017/admin --quiet' +++ mktemp ++ local LAST_OUT=/tmp/tmp.zUOlJ7ecXM +++ mktemp ++ local LAST_ERR=/tmp/tmp.q69MOi2acE ++ local exit_status=0 ++ local timeout=4 +++ seq 0 2 ++ for i in $(seq 0 2) ++ set +e ++ kubectl exec psmdb-client-bb8b97679-ftcxh -- bash -c 'printf '\''db.runCommand({ ping: 1 }).ok\n'\'' | mongo mongodb://user-gen:71nJPYY7kET94NSX@some-name-mongos.custom-users-roles-sharded-14882.svc.cluster.local:27017/admin --quiet' ++ exit_status=0 ++ set -e ++ '[' 0 '!=' 0 -a -n 1 ']' ++ break ++ cat /tmp/tmp.zUOlJ7ecXM ++ cat /tmp/tmp.q69MOi2acE ++ rm /tmp/tmp.zUOlJ7ecXM /tmp/tmp.q69MOi2acE ++ return 0 + ping=1 + desc 'ping return' + set +o xtrace ----------------------------------------------------------------------------------- ping return ----------------------------------------------------------------------------------- + '[' 1 '!=' 1 ']' ++ get_user_cmd '"user-external"' ++ local 'user="user-external"' ++ cmd=$'(function() {\n\tvar user = db.getUser("user-external");\n\tvar roles = user.roles;\n\troles.sort((a, b) => {\n\t\tif (a.role < b.role) return -1;\n\t\tif (a.role > b.role) return 1;\n\t\treturn 0;\n\t});\n\tuser.roles = roles;\n\tprintjson(user);\n\t})();' ++ echo $'(function() {\n\tvar user = db.getUser("user-external");\n\tvar roles = user.roles;\n\troles.sort((a, b) => {\n\t\tif (a.role < b.role) return -1;\n\t\tif (a.role > b.role) return 1;\n\t\treturn 0;\n\t});\n\tuser.roles = roles;\n\tprintjson(user);\n\t})();' + compare '$external' $'(function() {\n\tvar user = db.getUser("user-external");\n\tvar roles = user.roles;\n\troles.sort((a, b) => {\n\t\tif (a.role < b.role) return -1;\n\t\tif (a.role > b.role) return 1;\n\t\treturn 0;\n\t});\n\tuser.roles = roles;\n\tprintjson(user);\n\t})();' userAdmin:userAdmin123456@some-name-mongos.custom-users-roles-sharded-14882 user-external + local 'database=$external' + local $'command=(function() {\n\tvar user = db.getUser("user-external");\n\tvar roles = user.roles;\n\troles.sort((a, b) => {\n\t\tif (a.role < b.role) return -1;\n\t\tif (a.role > b.role) return 1;\n\t\treturn 0;\n\t});\n\tuser.roles = roles;\n\tprintjson(user);\n\t})();' + local uri=userAdmin:userAdmin123456@some-name-mongos.custom-users-roles-sharded-14882 + local target=user-external + [[ docker.io/perconalab/percona-server-mongodb-operator:main-mongod8.0 =~ 5\.0 ]] + run_mongos $'use $external\\n (function() {\n\tvar user = db.getUser("user-external");\n\tvar roles = user.roles;\n\troles.sort((a, b) => {\n\t\tif (a.role < b.role) return -1;\n\t\tif (a.role > b.role) return 1;\n\t\treturn 0;\n\t});\n\tuser.roles = roles;\n\tprintjson(user);\n\t})();' userAdmin:userAdmin123456@some-name-mongos.custom-users-roles-sharded-14882 mongodb + local $'command=use $external\\n (function() {\n\tvar user = db.getUser("user-external");\n\tvar roles = user.roles;\n\troles.sort((a, b) => {\n\t\tif (a.role < b.role) return -1;\n\t\tif (a.role > b.role) return 1;\n\t\treturn 0;\n\t});\n\tuser.roles = roles;\n\tprintjson(user);\n\t})();' + local uri=userAdmin:userAdmin123456@some-name-mongos.custom-users-roles-sharded-14882 + local driver=mongodb + local suffix=.svc.cluster.local + local mongo_flag= + local port=27017 + local mongo_bin=mongo + grep -E -v 'I NETWORK|W NETWORK|F NETWORK|Error saving history file|Percona Server for MongoDB|connecting to:|Unable to reach primary for set|Implicit session:|versions do not match|Error saving history file:' + /usr/sbin/sed -re 's/ObjectId\("[0-9a-f]+"\)//; s/-[0-9]+.svc/-xxxesvc/' + sed '/"userId"/d' ++ echo .svc.cluster.local ++ awk -F: '{print $2}' + suffix_port= + [[ -z '' ]] + suffix=.svc.cluster.local:27017 ++ kubectl_bin get pods --selector=name=psmdb-client -o 'jsonpath={.items[].metadata.name}' +++ mktemp ++ local LAST_OUT=/tmp/tmp.bZczbkO4aL +++ mktemp ++ local LAST_ERR=/tmp/tmp.A3QECAsgo1 ++ local exit_status=0 ++ local timeout=4 +++ seq 0 2 ++ for i in $(seq 0 2) ++ set +e ++ kubectl get pods --selector=name=psmdb-client -o 'jsonpath={.items[].metadata.name}' ++ exit_status=0 ++ set -e ++ '[' 0 '!=' 0 -a -n 1 ']' ++ break ++ cat /tmp/tmp.bZczbkO4aL ++ cat /tmp/tmp.A3QECAsgo1 ++ rm /tmp/tmp.bZczbkO4aL /tmp/tmp.A3QECAsgo1 ++ return 0 + local client_container=psmdb-client-bb8b97679-ftcxh + kubectl_bin exec psmdb-client-bb8b97679-ftcxh -- bash -c $'printf \'use $external\\n (function() {\n\tvar user = db.getUser("user-external");\n\tvar roles = user.roles;\n\troles.sort((a, b) => {\n\t\tif (a.role < b.role) return -1;\n\t\tif (a.role > b.role) return 1;\n\t\treturn 0;\n\t});\n\tuser.roles = roles;\n\tprintjson(user);\n\t})();\\n\' | mongo mongodb://userAdmin:userAdmin123456@some-name-mongos.custom-users-roles-sharded-14882.svc.cluster.local:27017/admin ' ++ mktemp + local LAST_OUT=/tmp/tmp.PYHMuAPojE ++ mktemp + local LAST_ERR=/tmp/tmp.wciMGcjAJ4 + local exit_status=0 + local timeout=4 ++ seq 0 2 + for i in $(seq 0 2) + set +e + kubectl exec psmdb-client-bb8b97679-ftcxh -- bash -c $'printf \'use $external\\n (function() {\n\tvar user = db.getUser("user-external");\n\tvar roles = user.roles;\n\troles.sort((a, b) => {\n\t\tif (a.role < b.role) return -1;\n\t\tif (a.role > b.role) return 1;\n\t\treturn 0;\n\t});\n\tuser.roles = roles;\n\tprintjson(user);\n\t})();\\n\' | mongo mongodb://userAdmin:userAdmin123456@some-name-mongos.custom-users-roles-sharded-14882.svc.cluster.local:27017/admin ' + exit_status=0 + set -e + '[' 0 '!=' 0 -a -n 1 ']' + break + cat /tmp/tmp.PYHMuAPojE + cat /tmp/tmp.wciMGcjAJ4 + rm /tmp/tmp.PYHMuAPojE /tmp/tmp.wciMGcjAJ4 + return 0 + diff /mnt/jenkins/workspace/cloud-psmdb-operator_PR-2283/e2e-tests/custom-users-roles-sharded/compare/user-external.json /tmp/tmp.UfAOPg7iAD/user-external + desc 'delete initial user from CR and create a new one' + set +o xtrace ----------------------------------------------------------------------------------- delete initial user from CR and create a new one ----------------------------------------------------------------------------------- + kubectl_bin patch psmdb some-name --type=merge --patch $'{\n\t\t"spec": {"users":[\n\t\t\t{\n\t\t\t\t"name":"user-two",\n\t\t\t\t"db":"admin",\n\t\t\t\t"passwordSecretRef": {\n\t\t\t\t\t"name": "user-two",\n\t\t\t\t\t"key": "userTwoPassKey"\n\t\t\t\t},\n\t\t\t\t"roles": [\n\t\t\t\t\t{"db":"admin","name":"userAdminAnyDatabase"}, \n\t\t\t\t\t{"db":"admin","name":"clusterAdmin"}\n\t\t\t\t]\n\t\t\t}\n\t\t]}\n\t}' ++ mktemp + local LAST_OUT=/tmp/tmp.ojmodlOmjd ++ mktemp + local LAST_ERR=/tmp/tmp.giDbhwgeb9 + local exit_status=0 + local timeout=4 ++ seq 0 2 + for i in $(seq 0 2) + set +e + kubectl patch psmdb some-name --type=merge --patch $'{\n\t\t"spec": {"users":[\n\t\t\t{\n\t\t\t\t"name":"user-two",\n\t\t\t\t"db":"admin",\n\t\t\t\t"passwordSecretRef": {\n\t\t\t\t\t"name": "user-two",\n\t\t\t\t\t"key": "userTwoPassKey"\n\t\t\t\t},\n\t\t\t\t"roles": [\n\t\t\t\t\t{"db":"admin","name":"userAdminAnyDatabase"}, \n\t\t\t\t\t{"db":"admin","name":"clusterAdmin"}\n\t\t\t\t]\n\t\t\t}\n\t\t]}\n\t}' + exit_status=0 + set -e + '[' 0 '!=' 0 -a -n 1 ']' + break + cat /tmp/tmp.ojmodlOmjd perconaservermongodb.psmdb.percona.com/some-name patched + cat /tmp/tmp.giDbhwgeb9 + rm /tmp/tmp.ojmodlOmjd /tmp/tmp.giDbhwgeb9 + return 0 + wait_for_running some-name-rs0 3 + local name=some-name-rs0 + let last_pod=2 + local check_cluster_readyness=true + set_debug + [[ 1 == 1 ]] + set -o xtrace + local rs_name=rs0 + local cluster_name=some-name ++ seq 0 2 + for i in $(seq 0 $last_pod) + [[ 0 -eq 2 ]] + wait_pod some-name-rs0-0 + local pod=some-name-rs0-0 + set +o xtrace waiting for pod/some-name-rs0-0 to be ready.OK + for i in $(seq 0 $last_pod) + [[ 1 -eq 2 ]] + wait_pod some-name-rs0-1 + local pod=some-name-rs0-1 + set +o xtrace waiting for pod/some-name-rs0-1 to be ready.OK + for i in $(seq 0 $last_pod) + [[ 2 -eq 2 ]] ++ kubectl_bin get psmdb some-name -o 'jsonpath={.spec.replsets[?(@.name=="rs0")].arbiter.enabled}' +++ mktemp ++ local LAST_OUT=/tmp/tmp.BCkKy6STG9 +++ mktemp ++ local LAST_ERR=/tmp/tmp.lsSP6xj3X2 ++ local exit_status=0 ++ local timeout=4 +++ seq 0 2 ++ for i in $(seq 0 2) ++ set +e ++ kubectl get psmdb some-name -o 'jsonpath={.spec.replsets[?(@.name=="rs0")].arbiter.enabled}' ++ exit_status=0 ++ set -e ++ '[' 0 '!=' 0 -a -n 1 ']' ++ break ++ cat /tmp/tmp.BCkKy6STG9 ++ cat /tmp/tmp.lsSP6xj3X2 ++ rm /tmp/tmp.BCkKy6STG9 /tmp/tmp.lsSP6xj3X2 ++ return 0 + [[ '' == true ]] + wait_pod some-name-rs0-2 + local pod=some-name-rs0-2 + set +o xtrace waiting for pod/some-name-rs0-2 to be ready.OK ++ kubectl_bin get psmdb some-name -o 'jsonpath={.spec.replsets[?(@.name=="rs0")].nonvoting.enabled}' +++ mktemp ++ local LAST_OUT=/tmp/tmp.k03M6XCHVW +++ mktemp ++ local LAST_ERR=/tmp/tmp.hH065LYnGZ ++ local exit_status=0 ++ local timeout=4 +++ seq 0 2 ++ for i in $(seq 0 2) ++ set +e ++ kubectl get psmdb some-name -o 'jsonpath={.spec.replsets[?(@.name=="rs0")].nonvoting.enabled}' ++ exit_status=0 ++ set -e ++ '[' 0 '!=' 0 -a -n 1 ']' ++ break ++ cat /tmp/tmp.k03M6XCHVW ++ cat /tmp/tmp.hH065LYnGZ ++ rm /tmp/tmp.k03M6XCHVW /tmp/tmp.hH065LYnGZ ++ return 0 + [[ '' == true ]] ++ kubectl_bin get psmdb some-name -o 'jsonpath={.spec.replsets[?(@.name=="rs0")].hidden.enabled}' +++ mktemp ++ local LAST_OUT=/tmp/tmp.EAKxw3RrFN +++ mktemp ++ local LAST_ERR=/tmp/tmp.AIClMtkmga ++ local exit_status=0 ++ local timeout=4 +++ seq 0 2 ++ for i in $(seq 0 2) ++ set +e ++ kubectl get psmdb some-name -o 'jsonpath={.spec.replsets[?(@.name=="rs0")].hidden.enabled}' ++ exit_status=0 ++ set -e ++ '[' 0 '!=' 0 -a -n 1 ']' ++ break ++ cat /tmp/tmp.EAKxw3RrFN ++ cat /tmp/tmp.AIClMtkmga ++ rm /tmp/tmp.EAKxw3RrFN /tmp/tmp.AIClMtkmga ++ return 0 + [[ '' == true ]] + sleep 10 + [[ true == true ]] + set +x Waiting for cluster readyness ++ get_user_cmd '"user-two"' ++ local 'user="user-two"' ++ cmd=$'(function() {\n\tvar user = db.getUser("user-two");\n\tvar roles = user.roles;\n\troles.sort((a, b) => {\n\t\tif (a.role < b.role) return -1;\n\t\tif (a.role > b.role) return 1;\n\t\treturn 0;\n\t});\n\tuser.roles = roles;\n\tprintjson(user);\n\t})();' ++ echo $'(function() {\n\tvar user = db.getUser("user-two");\n\tvar roles = user.roles;\n\troles.sort((a, b) => {\n\t\tif (a.role < b.role) return -1;\n\t\tif (a.role > b.role) return 1;\n\t\treturn 0;\n\t});\n\tuser.roles = roles;\n\tprintjson(user);\n\t})();' + compare admin $'(function() {\n\tvar user = db.getUser("user-two");\n\tvar roles = user.roles;\n\troles.sort((a, b) => {\n\t\tif (a.role < b.role) return -1;\n\t\tif (a.role > b.role) return 1;\n\t\treturn 0;\n\t});\n\tuser.roles = roles;\n\tprintjson(user);\n\t})();' userAdmin:userAdmin123456@some-name-mongos.custom-users-roles-sharded-14882 user-two + local database=admin + local $'command=(function() {\n\tvar user = db.getUser("user-two");\n\tvar roles = user.roles;\n\troles.sort((a, b) => {\n\t\tif (a.role < b.role) return -1;\n\t\tif (a.role > b.role) return 1;\n\t\treturn 0;\n\t});\n\tuser.roles = roles;\n\tprintjson(user);\n\t})();' + local uri=userAdmin:userAdmin123456@some-name-mongos.custom-users-roles-sharded-14882 + local target=user-two + [[ docker.io/perconalab/percona-server-mongodb-operator:main-mongod8.0 =~ 5\.0 ]] + run_mongos $'use admin\\n (function() {\n\tvar user = db.getUser("user-two");\n\tvar roles = user.roles;\n\troles.sort((a, b) => {\n\t\tif (a.role < b.role) return -1;\n\t\tif (a.role > b.role) return 1;\n\t\treturn 0;\n\t});\n\tuser.roles = roles;\n\tprintjson(user);\n\t})();' userAdmin:userAdmin123456@some-name-mongos.custom-users-roles-sharded-14882 mongodb + local $'command=use admin\\n (function() {\n\tvar user = db.getUser("user-two");\n\tvar roles = user.roles;\n\troles.sort((a, b) => {\n\t\tif (a.role < b.role) return -1;\n\t\tif (a.role > b.role) return 1;\n\t\treturn 0;\n\t});\n\tuser.roles = roles;\n\tprintjson(user);\n\t})();' + local uri=userAdmin:userAdmin123456@some-name-mongos.custom-users-roles-sharded-14882 + local driver=mongodb + local suffix=.svc.cluster.local + local mongo_flag= + local port=27017 + local mongo_bin=mongo + grep -E -v 'I NETWORK|W NETWORK|F NETWORK|Error saving history file|Percona Server for MongoDB|connecting to:|Unable to reach primary for set|Implicit session:|versions do not match|Error saving history file:' + /usr/sbin/sed -re 's/ObjectId\("[0-9a-f]+"\)//; s/-[0-9]+.svc/-xxxesvc/' ++ echo .svc.cluster.local + sed '/"userId"/d' ++ awk -F: '{print $2}' + suffix_port= + [[ -z '' ]] + suffix=.svc.cluster.local:27017 ++ kubectl_bin get pods --selector=name=psmdb-client -o 'jsonpath={.items[].metadata.name}' +++ mktemp ++ local LAST_OUT=/tmp/tmp.1W5TXk4w6y +++ mktemp ++ local LAST_ERR=/tmp/tmp.3noTTqZt26 ++ local exit_status=0 ++ local timeout=4 +++ seq 0 2 ++ for i in $(seq 0 2) ++ set +e ++ kubectl get pods --selector=name=psmdb-client -o 'jsonpath={.items[].metadata.name}' ++ exit_status=0 ++ set -e ++ '[' 0 '!=' 0 -a -n 1 ']' ++ break ++ cat /tmp/tmp.1W5TXk4w6y ++ cat /tmp/tmp.3noTTqZt26 ++ rm /tmp/tmp.1W5TXk4w6y /tmp/tmp.3noTTqZt26 ++ return 0 + local client_container=psmdb-client-bb8b97679-ftcxh + kubectl_bin exec psmdb-client-bb8b97679-ftcxh -- bash -c $'printf \'use admin\\n (function() {\n\tvar user = db.getUser("user-two");\n\tvar roles = user.roles;\n\troles.sort((a, b) => {\n\t\tif (a.role < b.role) return -1;\n\t\tif (a.role > b.role) return 1;\n\t\treturn 0;\n\t});\n\tuser.roles = roles;\n\tprintjson(user);\n\t})();\\n\' | mongo mongodb://userAdmin:userAdmin123456@some-name-mongos.custom-users-roles-sharded-14882.svc.cluster.local:27017/admin ' ++ mktemp + local LAST_OUT=/tmp/tmp.1nEacinfhQ ++ mktemp + local LAST_ERR=/tmp/tmp.xJWGf1i6rE + local exit_status=0 + local timeout=4 ++ seq 0 2 + for i in $(seq 0 2) + set +e + kubectl exec psmdb-client-bb8b97679-ftcxh -- bash -c $'printf \'use admin\\n (function() {\n\tvar user = db.getUser("user-two");\n\tvar roles = user.roles;\n\troles.sort((a, b) => {\n\t\tif (a.role < b.role) return -1;\n\t\tif (a.role > b.role) return 1;\n\t\treturn 0;\n\t});\n\tuser.roles = roles;\n\tprintjson(user);\n\t})();\\n\' | mongo mongodb://userAdmin:userAdmin123456@some-name-mongos.custom-users-roles-sharded-14882.svc.cluster.local:27017/admin ' + exit_status=0 + set -e + '[' 0 '!=' 0 -a -n 1 ']' + break + cat /tmp/tmp.1nEacinfhQ + cat /tmp/tmp.xJWGf1i6rE + rm /tmp/tmp.1nEacinfhQ /tmp/tmp.xJWGf1i6rE + return 0 + diff /mnt/jenkins/workspace/cloud-psmdb-operator_PR-2283/e2e-tests/custom-users-roles-sharded/compare/user-two.json /tmp/tmp.UfAOPg7iAD/user-two + userTwo=user-two ++ getSecretData user-two userTwoPassKey ++ local secretName=user-two ++ local dataKey=userTwoPassKey +++ kubectl get secrets/user-two '--template={{.data.userTwoPassKey}}' +++ base64 -d ++ local data=clusterMonitor ++ echo clusterMonitor + userTwoPass=clusterMonitor + check_auth user-two:clusterMonitor@some-name-mongos.custom-users-roles-sharded-14882 + local uri=user-two:clusterMonitor@some-name-mongos.custom-users-roles-sharded-14882 ++ run_mongos 'db.runCommand({ ping: 1 }).ok' user-two:clusterMonitor@some-name-mongos.custom-users-roles-sharded-14882 '' '' --quiet ++ local 'command=db.runCommand({ ping: 1 }).ok' ++ local uri=user-two:clusterMonitor@some-name-mongos.custom-users-roles-sharded-14882 ++ local driver=mongodb ++ local suffix=.svc.cluster.local ++ local mongo_flag=--quiet ++ local port=27017 ++ local mongo_bin=mongo ++ grep -E -v 'I NETWORK|W NETWORK|Error saving history file|Percona Server for MongoDB|connecting to:|Unable to reach primary for set|Implicit session:|versions do not match|Error saving history file:' +++ echo .svc.cluster.local +++ awk -F: '{print $2}' ++ suffix_port= ++ [[ -z '' ]] ++ suffix=.svc.cluster.local:27017 +++ kubectl_bin get pods --selector=name=psmdb-client -o 'jsonpath={.items[].metadata.name}' ++++ mktemp +++ local LAST_OUT=/tmp/tmp.69YGlSXEMV ++++ mktemp +++ local LAST_ERR=/tmp/tmp.uft2d8FgSO +++ local exit_status=0 +++ local timeout=4 ++++ seq 0 2 +++ for i in $(seq 0 2) +++ set +e +++ kubectl get pods --selector=name=psmdb-client -o 'jsonpath={.items[].metadata.name}' +++ exit_status=0 +++ set -e +++ '[' 0 '!=' 0 -a -n 1 ']' +++ break +++ cat /tmp/tmp.69YGlSXEMV +++ cat /tmp/tmp.uft2d8FgSO +++ rm /tmp/tmp.69YGlSXEMV /tmp/tmp.uft2d8FgSO +++ return 0 ++ local client_container=psmdb-client-bb8b97679-ftcxh ++ kubectl_bin exec psmdb-client-bb8b97679-ftcxh -- bash -c 'printf '\''db.runCommand({ ping: 1 }).ok\n'\'' | mongo mongodb://user-two:clusterMonitor@some-name-mongos.custom-users-roles-sharded-14882.svc.cluster.local:27017/admin --quiet' +++ mktemp ++ local LAST_OUT=/tmp/tmp.MH0IE6dn9y +++ mktemp ++ local LAST_ERR=/tmp/tmp.sZHBYRA1wr ++ local exit_status=0 ++ local timeout=4 +++ seq 0 2 ++ for i in $(seq 0 2) ++ set +e ++ kubectl exec psmdb-client-bb8b97679-ftcxh -- bash -c 'printf '\''db.runCommand({ ping: 1 }).ok\n'\'' | mongo mongodb://user-two:clusterMonitor@some-name-mongos.custom-users-roles-sharded-14882.svc.cluster.local:27017/admin --quiet' ++ exit_status=0 ++ set -e ++ '[' 0 '!=' 0 -a -n 1 ']' ++ break ++ cat /tmp/tmp.MH0IE6dn9y ++ cat /tmp/tmp.sZHBYRA1wr ++ rm /tmp/tmp.MH0IE6dn9y /tmp/tmp.sZHBYRA1wr ++ return 0 + ping=1 + desc 'ping return' + set +o xtrace ----------------------------------------------------------------------------------- ping return ----------------------------------------------------------------------------------- + '[' 1 '!=' 1 ']' + check_auth user-one:clusterMonitor@some-name-mongos.custom-users-roles-sharded-14882 + local uri=user-one:clusterMonitor@some-name-mongos.custom-users-roles-sharded-14882 ++ run_mongos 'db.runCommand({ ping: 1 }).ok' user-one:clusterMonitor@some-name-mongos.custom-users-roles-sharded-14882 '' '' --quiet ++ local 'command=db.runCommand({ ping: 1 }).ok' ++ local uri=user-one:clusterMonitor@some-name-mongos.custom-users-roles-sharded-14882 ++ grep -E -v 'I NETWORK|W NETWORK|Error saving history file|Percona Server for MongoDB|connecting to:|Unable to reach primary for set|Implicit session:|versions do not match|Error saving history file:' ++ local driver=mongodb ++ local suffix=.svc.cluster.local ++ local mongo_flag=--quiet ++ local port=27017 ++ local mongo_bin=mongo +++ echo .svc.cluster.local +++ awk -F: '{print $2}' ++ suffix_port= ++ [[ -z '' ]] ++ suffix=.svc.cluster.local:27017 +++ kubectl_bin get pods --selector=name=psmdb-client -o 'jsonpath={.items[].metadata.name}' ++++ mktemp +++ local LAST_OUT=/tmp/tmp.9BjXb8G6DY ++++ mktemp +++ local LAST_ERR=/tmp/tmp.0qvkFz9nV5 +++ local exit_status=0 +++ local timeout=4 ++++ seq 0 2 +++ for i in $(seq 0 2) +++ set +e +++ kubectl get pods --selector=name=psmdb-client -o 'jsonpath={.items[].metadata.name}' +++ exit_status=0 +++ set -e +++ '[' 0 '!=' 0 -a -n 1 ']' +++ break +++ cat /tmp/tmp.9BjXb8G6DY +++ cat /tmp/tmp.0qvkFz9nV5 +++ rm /tmp/tmp.9BjXb8G6DY /tmp/tmp.0qvkFz9nV5 +++ return 0 ++ local client_container=psmdb-client-bb8b97679-ftcxh ++ kubectl_bin exec psmdb-client-bb8b97679-ftcxh -- bash -c 'printf '\''db.runCommand({ ping: 1 }).ok\n'\'' | mongo mongodb://user-one:clusterMonitor@some-name-mongos.custom-users-roles-sharded-14882.svc.cluster.local:27017/admin --quiet' +++ mktemp ++ local LAST_OUT=/tmp/tmp.hBDru29g5t +++ mktemp ++ local LAST_ERR=/tmp/tmp.MO2IdXU6HE ++ local exit_status=0 ++ local timeout=4 +++ seq 0 2 ++ for i in $(seq 0 2) ++ set +e ++ kubectl exec psmdb-client-bb8b97679-ftcxh -- bash -c 'printf '\''db.runCommand({ ping: 1 }).ok\n'\'' | mongo mongodb://user-one:clusterMonitor@some-name-mongos.custom-users-roles-sharded-14882.svc.cluster.local:27017/admin --quiet' ++ exit_status=0 ++ set -e ++ '[' 0 '!=' 0 -a -n 1 ']' ++ break ++ cat /tmp/tmp.hBDru29g5t ++ cat /tmp/tmp.MO2IdXU6HE ++ rm /tmp/tmp.hBDru29g5t /tmp/tmp.MO2IdXU6HE ++ return 0 + ping=1 + desc 'ping return' + set +o xtrace ----------------------------------------------------------------------------------- ping return ----------------------------------------------------------------------------------- + '[' 1 '!=' 1 ']' + desc 'check password change' + set +o xtrace ----------------------------------------------------------------------------------- check password change ----------------------------------------------------------------------------------- + userTwoNewPass=new-user-two-password ++ echo -n new-user-two-password ++ base64 + patch_secret user-two userTwoPassKey bmV3LXVzZXItdHdvLXBhc3N3b3Jk + local secret=user-two + local key=userTwoPassKey + local value=bmV3LXVzZXItdHdvLXBhc3N3b3Jk + kubectl patch secret user-two '-p={"data":{"userTwoPassKey": "bmV3LXVzZXItdHdvLXBhc3N3b3Jk"}}' secret/user-two patched + sleep 20 + check_auth user-two:new-user-two-password@some-name-mongos.custom-users-roles-sharded-14882 + local uri=user-two:new-user-two-password@some-name-mongos.custom-users-roles-sharded-14882 ++ grep -E -v 'I NETWORK|W NETWORK|Error saving history file|Percona Server for MongoDB|connecting to:|Unable to reach primary for set|Implicit session:|versions do not match|Error saving history file:' ++ run_mongos 'db.runCommand({ ping: 1 }).ok' user-two:new-user-two-password@some-name-mongos.custom-users-roles-sharded-14882 '' '' --quiet ++ local 'command=db.runCommand({ ping: 1 }).ok' ++ local uri=user-two:new-user-two-password@some-name-mongos.custom-users-roles-sharded-14882 ++ local driver=mongodb ++ local suffix=.svc.cluster.local ++ local mongo_flag=--quiet ++ local port=27017 ++ local mongo_bin=mongo +++ echo .svc.cluster.local +++ awk -F: '{print $2}' ++ suffix_port= ++ [[ -z '' ]] ++ suffix=.svc.cluster.local:27017 +++ kubectl_bin get pods --selector=name=psmdb-client -o 'jsonpath={.items[].metadata.name}' ++++ mktemp +++ local LAST_OUT=/tmp/tmp.TXaZmOp2dx ++++ mktemp +++ local LAST_ERR=/tmp/tmp.JP0EqMMfk8 +++ local exit_status=0 +++ local timeout=4 ++++ seq 0 2 +++ for i in $(seq 0 2) +++ set +e +++ kubectl get pods --selector=name=psmdb-client -o 'jsonpath={.items[].metadata.name}' +++ exit_status=0 +++ set -e +++ '[' 0 '!=' 0 -a -n 1 ']' +++ break +++ cat /tmp/tmp.TXaZmOp2dx +++ cat /tmp/tmp.JP0EqMMfk8 +++ rm /tmp/tmp.TXaZmOp2dx /tmp/tmp.JP0EqMMfk8 +++ return 0 ++ local client_container=psmdb-client-bb8b97679-ftcxh ++ kubectl_bin exec psmdb-client-bb8b97679-ftcxh -- bash -c 'printf '\''db.runCommand({ ping: 1 }).ok\n'\'' | mongo mongodb://user-two:new-user-two-password@some-name-mongos.custom-users-roles-sharded-14882.svc.cluster.local:27017/admin --quiet' +++ mktemp ++ local LAST_OUT=/tmp/tmp.SrCC5jZUuF +++ mktemp ++ local LAST_ERR=/tmp/tmp.97NwOlBU58 ++ local exit_status=0 ++ local timeout=4 +++ seq 0 2 ++ for i in $(seq 0 2) ++ set +e ++ kubectl exec psmdb-client-bb8b97679-ftcxh -- bash -c 'printf '\''db.runCommand({ ping: 1 }).ok\n'\'' | mongo mongodb://user-two:new-user-two-password@some-name-mongos.custom-users-roles-sharded-14882.svc.cluster.local:27017/admin --quiet' ++ exit_status=0 ++ set -e ++ '[' 0 '!=' 0 -a -n 1 ']' ++ break ++ cat /tmp/tmp.SrCC5jZUuF ++ cat /tmp/tmp.97NwOlBU58 ++ rm /tmp/tmp.SrCC5jZUuF /tmp/tmp.97NwOlBU58 ++ return 0 + ping=1 + desc 'ping return' + set +o xtrace ----------------------------------------------------------------------------------- ping return ----------------------------------------------------------------------------------- + '[' 1 '!=' 1 ']' + desc 'check user roles update from CR' + set +o xtrace ----------------------------------------------------------------------------------- check user roles update from CR ----------------------------------------------------------------------------------- + kubectl_bin patch psmdb some-name --type=merge --patch $'{\n\t\t"spec": {"users":[\n\t\t\t{\n\t\t\t\t"name":"user-two",\n\t\t\t\t"db":"admin",\n\t\t\t\t"passwordSecretRef": {\n\t\t\t\t\t"name": "user-two",\n\t\t\t\t\t"key": "userTwoPassKey"\n\t\t\t\t},\n\t\t\t\t"roles": [\n\t\t\t\t\t{"db":"admin","name":"clusterAdmin"}\n\t\t\t\t]\n\t\t\t}\n\t\t]}\n\t}' ++ mktemp + local LAST_OUT=/tmp/tmp.rEMzTEzsUm ++ mktemp + local LAST_ERR=/tmp/tmp.EUnIaMiOud + local exit_status=0 + local timeout=4 ++ seq 0 2 + for i in $(seq 0 2) + set +e + kubectl patch psmdb some-name --type=merge --patch $'{\n\t\t"spec": {"users":[\n\t\t\t{\n\t\t\t\t"name":"user-two",\n\t\t\t\t"db":"admin",\n\t\t\t\t"passwordSecretRef": {\n\t\t\t\t\t"name": "user-two",\n\t\t\t\t\t"key": "userTwoPassKey"\n\t\t\t\t},\n\t\t\t\t"roles": [\n\t\t\t\t\t{"db":"admin","name":"clusterAdmin"}\n\t\t\t\t]\n\t\t\t}\n\t\t]}\n\t}' + exit_status=0 + set -e + '[' 0 '!=' 0 -a -n 1 ']' + break + cat /tmp/tmp.rEMzTEzsUm perconaservermongodb.psmdb.percona.com/some-name patched + cat /tmp/tmp.EUnIaMiOud + rm /tmp/tmp.rEMzTEzsUm /tmp/tmp.EUnIaMiOud + return 0 + wait_for_running some-name-rs0 3 + local name=some-name-rs0 + let last_pod=2 + local check_cluster_readyness=true + set_debug + [[ 1 == 1 ]] + set -o xtrace + local rs_name=rs0 + local cluster_name=some-name ++ seq 0 2 + for i in $(seq 0 $last_pod) + [[ 0 -eq 2 ]] + wait_pod some-name-rs0-0 + local pod=some-name-rs0-0 + set +o xtrace waiting for pod/some-name-rs0-0 to be ready.OK + for i in $(seq 0 $last_pod) + [[ 1 -eq 2 ]] + wait_pod some-name-rs0-1 + local pod=some-name-rs0-1 + set +o xtrace waiting for pod/some-name-rs0-1 to be ready.OK + for i in $(seq 0 $last_pod) + [[ 2 -eq 2 ]] ++ kubectl_bin get psmdb some-name -o 'jsonpath={.spec.replsets[?(@.name=="rs0")].arbiter.enabled}' +++ mktemp ++ local LAST_OUT=/tmp/tmp.b5vM27keM9 +++ mktemp ++ local LAST_ERR=/tmp/tmp.SxBmx8MVya ++ local exit_status=0 ++ local timeout=4 +++ seq 0 2 ++ for i in $(seq 0 2) ++ set +e ++ kubectl get psmdb some-name -o 'jsonpath={.spec.replsets[?(@.name=="rs0")].arbiter.enabled}' ++ exit_status=0 ++ set -e ++ '[' 0 '!=' 0 -a -n 1 ']' ++ break ++ cat /tmp/tmp.b5vM27keM9 ++ cat /tmp/tmp.SxBmx8MVya ++ rm /tmp/tmp.b5vM27keM9 /tmp/tmp.SxBmx8MVya ++ return 0 + [[ '' == true ]] + wait_pod some-name-rs0-2 + local pod=some-name-rs0-2 + set +o xtrace waiting for pod/some-name-rs0-2 to be ready.OK ++ kubectl_bin get psmdb some-name -o 'jsonpath={.spec.replsets[?(@.name=="rs0")].nonvoting.enabled}' +++ mktemp ++ local LAST_OUT=/tmp/tmp.tgSJ4pcYcX +++ mktemp ++ local LAST_ERR=/tmp/tmp.3rT1x4ok18 ++ local exit_status=0 ++ local timeout=4 +++ seq 0 2 ++ for i in $(seq 0 2) ++ set +e ++ kubectl get psmdb some-name -o 'jsonpath={.spec.replsets[?(@.name=="rs0")].nonvoting.enabled}' ++ exit_status=0 ++ set -e ++ '[' 0 '!=' 0 -a -n 1 ']' ++ break ++ cat /tmp/tmp.tgSJ4pcYcX ++ cat /tmp/tmp.3rT1x4ok18 ++ rm /tmp/tmp.tgSJ4pcYcX /tmp/tmp.3rT1x4ok18 ++ return 0 + [[ '' == true ]] ++ kubectl_bin get psmdb some-name -o 'jsonpath={.spec.replsets[?(@.name=="rs0")].hidden.enabled}' +++ mktemp ++ local LAST_OUT=/tmp/tmp.heWrGBJJ7v +++ mktemp ++ local LAST_ERR=/tmp/tmp.XNW7Cn2J6o ++ local exit_status=0 ++ local timeout=4 +++ seq 0 2 ++ for i in $(seq 0 2) ++ set +e ++ kubectl get psmdb some-name -o 'jsonpath={.spec.replsets[?(@.name=="rs0")].hidden.enabled}' ++ exit_status=0 ++ set -e ++ '[' 0 '!=' 0 -a -n 1 ']' ++ break ++ cat /tmp/tmp.heWrGBJJ7v ++ cat /tmp/tmp.XNW7Cn2J6o ++ rm /tmp/tmp.heWrGBJJ7v /tmp/tmp.XNW7Cn2J6o ++ return 0 + [[ '' == true ]] + sleep 10 + [[ true == true ]] + set +x Waiting for cluster readyness ++ get_user_cmd '"user-two"' ++ local 'user="user-two"' ++ cmd=$'(function() {\n\tvar user = db.getUser("user-two");\n\tvar roles = user.roles;\n\troles.sort((a, b) => {\n\t\tif (a.role < b.role) return -1;\n\t\tif (a.role > b.role) return 1;\n\t\treturn 0;\n\t});\n\tuser.roles = roles;\n\tprintjson(user);\n\t})();' ++ echo $'(function() {\n\tvar user = db.getUser("user-two");\n\tvar roles = user.roles;\n\troles.sort((a, b) => {\n\t\tif (a.role < b.role) return -1;\n\t\tif (a.role > b.role) return 1;\n\t\treturn 0;\n\t});\n\tuser.roles = roles;\n\tprintjson(user);\n\t})();' + compare admin $'(function() {\n\tvar user = db.getUser("user-two");\n\tvar roles = user.roles;\n\troles.sort((a, b) => {\n\t\tif (a.role < b.role) return -1;\n\t\tif (a.role > b.role) return 1;\n\t\treturn 0;\n\t});\n\tuser.roles = roles;\n\tprintjson(user);\n\t})();' userAdmin:userAdmin123456@some-name-mongos.custom-users-roles-sharded-14882 user-two-update-roles + local database=admin + local $'command=(function() {\n\tvar user = db.getUser("user-two");\n\tvar roles = user.roles;\n\troles.sort((a, b) => {\n\t\tif (a.role < b.role) return -1;\n\t\tif (a.role > b.role) return 1;\n\t\treturn 0;\n\t});\n\tuser.roles = roles;\n\tprintjson(user);\n\t})();' + local uri=userAdmin:userAdmin123456@some-name-mongos.custom-users-roles-sharded-14882 + local target=user-two-update-roles + [[ docker.io/perconalab/percona-server-mongodb-operator:main-mongod8.0 =~ 5\.0 ]] + run_mongos $'use admin\\n (function() {\n\tvar user = db.getUser("user-two");\n\tvar roles = user.roles;\n\troles.sort((a, b) => {\n\t\tif (a.role < b.role) return -1;\n\t\tif (a.role > b.role) return 1;\n\t\treturn 0;\n\t});\n\tuser.roles = roles;\n\tprintjson(user);\n\t})();' userAdmin:userAdmin123456@some-name-mongos.custom-users-roles-sharded-14882 mongodb + local $'command=use admin\\n (function() {\n\tvar user = db.getUser("user-two");\n\tvar roles = user.roles;\n\troles.sort((a, b) => {\n\t\tif (a.role < b.role) return -1;\n\t\tif (a.role > b.role) return 1;\n\t\treturn 0;\n\t});\n\tuser.roles = roles;\n\tprintjson(user);\n\t})();' + local uri=userAdmin:userAdmin123456@some-name-mongos.custom-users-roles-sharded-14882 + local driver=mongodb + local suffix=.svc.cluster.local + local mongo_flag= + local port=27017 + local mongo_bin=mongo + grep -E -v 'I NETWORK|W NETWORK|F NETWORK|Error saving history file|Percona Server for MongoDB|connecting to:|Unable to reach primary for set|Implicit session:|versions do not match|Error saving history file:' + /usr/sbin/sed -re 's/ObjectId\("[0-9a-f]+"\)//; s/-[0-9]+.svc/-xxxesvc/' ++ echo .svc.cluster.local ++ awk -F: '{print $2}' + sed '/"userId"/d' + suffix_port= + [[ -z '' ]] + suffix=.svc.cluster.local:27017 ++ kubectl_bin get pods --selector=name=psmdb-client -o 'jsonpath={.items[].metadata.name}' +++ mktemp ++ local LAST_OUT=/tmp/tmp.8GvhiP3l7o +++ mktemp ++ local LAST_ERR=/tmp/tmp.pkp0pSqUWY ++ local exit_status=0 ++ local timeout=4 +++ seq 0 2 ++ for i in $(seq 0 2) ++ set +e ++ kubectl get pods --selector=name=psmdb-client -o 'jsonpath={.items[].metadata.name}' ++ exit_status=0 ++ set -e ++ '[' 0 '!=' 0 -a -n 1 ']' ++ break ++ cat /tmp/tmp.8GvhiP3l7o ++ cat /tmp/tmp.pkp0pSqUWY ++ rm /tmp/tmp.8GvhiP3l7o /tmp/tmp.pkp0pSqUWY ++ return 0 + local client_container=psmdb-client-bb8b97679-ftcxh + kubectl_bin exec psmdb-client-bb8b97679-ftcxh -- bash -c $'printf \'use admin\\n (function() {\n\tvar user = db.getUser("user-two");\n\tvar roles = user.roles;\n\troles.sort((a, b) => {\n\t\tif (a.role < b.role) return -1;\n\t\tif (a.role > b.role) return 1;\n\t\treturn 0;\n\t});\n\tuser.roles = roles;\n\tprintjson(user);\n\t})();\\n\' | mongo mongodb://userAdmin:userAdmin123456@some-name-mongos.custom-users-roles-sharded-14882.svc.cluster.local:27017/admin ' ++ mktemp + local LAST_OUT=/tmp/tmp.Cf1DWds9F9 ++ mktemp + local LAST_ERR=/tmp/tmp.WKFNVCIudI + local exit_status=0 + local timeout=4 ++ seq 0 2 + for i in $(seq 0 2) + set +e + kubectl exec psmdb-client-bb8b97679-ftcxh -- bash -c $'printf \'use admin\\n (function() {\n\tvar user = db.getUser("user-two");\n\tvar roles = user.roles;\n\troles.sort((a, b) => {\n\t\tif (a.role < b.role) return -1;\n\t\tif (a.role > b.role) return 1;\n\t\treturn 0;\n\t});\n\tuser.roles = roles;\n\tprintjson(user);\n\t})();\\n\' | mongo mongodb://userAdmin:userAdmin123456@some-name-mongos.custom-users-roles-sharded-14882.svc.cluster.local:27017/admin ' + exit_status=0 + set -e + '[' 0 '!=' 0 -a -n 1 ']' + break + cat /tmp/tmp.Cf1DWds9F9 + cat /tmp/tmp.WKFNVCIudI + rm /tmp/tmp.Cf1DWds9F9 /tmp/tmp.WKFNVCIudI + return 0 + diff /mnt/jenkins/workspace/cloud-psmdb-operator_PR-2283/e2e-tests/custom-users-roles-sharded/compare/user-two-update-roles.json /tmp/tmp.UfAOPg7iAD/user-two-update-roles + desc 'check user roles update from DB' + set +o xtrace ----------------------------------------------------------------------------------- check user roles update from DB ----------------------------------------------------------------------------------- + run_mongos 'use admin\n db.updateUser("user-two", { roles : [{ role : "userAdminAnyDatabase", db: "admin"}]})' userAdmin:userAdmin123456@some-name-mongos.custom-users-roles-sharded-14882 + local 'command=use admin\n db.updateUser("user-two", { roles : [{ role : "userAdminAnyDatabase", db: "admin"}]})' + local uri=userAdmin:userAdmin123456@some-name-mongos.custom-users-roles-sharded-14882 + local driver=mongodb + local suffix=.svc.cluster.local + local mongo_flag= + local port=27017 + local mongo_bin=mongo ++ echo .svc.cluster.local ++ awk -F: '{print $2}' + suffix_port= + [[ -z '' ]] + suffix=.svc.cluster.local:27017 ++ kubectl_bin get pods --selector=name=psmdb-client -o 'jsonpath={.items[].metadata.name}' +++ mktemp ++ local LAST_OUT=/tmp/tmp.MJMJee4Eg9 +++ mktemp ++ local LAST_ERR=/tmp/tmp.qvGximpuEV ++ local exit_status=0 ++ local timeout=4 +++ seq 0 2 ++ for i in $(seq 0 2) ++ set +e ++ kubectl get pods --selector=name=psmdb-client -o 'jsonpath={.items[].metadata.name}' ++ exit_status=0 ++ set -e ++ '[' 0 '!=' 0 -a -n 1 ']' ++ break ++ cat /tmp/tmp.MJMJee4Eg9 ++ cat /tmp/tmp.qvGximpuEV ++ rm /tmp/tmp.MJMJee4Eg9 /tmp/tmp.qvGximpuEV ++ return 0 + local client_container=psmdb-client-bb8b97679-ftcxh + kubectl_bin exec psmdb-client-bb8b97679-ftcxh -- bash -c 'printf '\''use admin\n db.updateUser("user-two", { roles : [{ role : "userAdminAnyDatabase", db: "admin"}]})\n'\'' | mongo mongodb://userAdmin:userAdmin123456@some-name-mongos.custom-users-roles-sharded-14882.svc.cluster.local:27017/admin ' ++ mktemp + local LAST_OUT=/tmp/tmp.znstc62rIb ++ mktemp + local LAST_ERR=/tmp/tmp.hLmejoXQo9 + local exit_status=0 + local timeout=4 ++ seq 0 2 + for i in $(seq 0 2) + set +e + kubectl exec psmdb-client-bb8b97679-ftcxh -- bash -c 'printf '\''use admin\n db.updateUser("user-two", { roles : [{ role : "userAdminAnyDatabase", db: "admin"}]})\n'\'' | mongo mongodb://userAdmin:userAdmin123456@some-name-mongos.custom-users-roles-sharded-14882.svc.cluster.local:27017/admin ' + exit_status=0 + set -e + '[' 0 '!=' 0 -a -n 1 ']' + break + cat /tmp/tmp.znstc62rIb Percona Server for MongoDB shell version v4.4.29-28 connecting to: mongodb://some-name-mongos.custom-users-roles-sharded-14882.svc.cluster.local:27017/admin?compressors=disabled&gssapiServiceName=mongodb Implicit session: session { "id" : UUID("6a830a6f-7d1d-470e-99dd-454e57ae3d7f") } Percona Server for MongoDB server version: v8.0.20-8 WARNING: shell and server versions do not match switched to db admin bye + cat /tmp/tmp.hLmejoXQo9 + rm /tmp/tmp.znstc62rIb /tmp/tmp.hLmejoXQo9 + return 0 + sleep 15 ++ get_user_cmd '"user-two"' ++ local 'user="user-two"' ++ cmd=$'(function() {\n\tvar user = db.getUser("user-two");\n\tvar roles = user.roles;\n\troles.sort((a, b) => {\n\t\tif (a.role < b.role) return -1;\n\t\tif (a.role > b.role) return 1;\n\t\treturn 0;\n\t});\n\tuser.roles = roles;\n\tprintjson(user);\n\t})();' ++ echo $'(function() {\n\tvar user = db.getUser("user-two");\n\tvar roles = user.roles;\n\troles.sort((a, b) => {\n\t\tif (a.role < b.role) return -1;\n\t\tif (a.role > b.role) return 1;\n\t\treturn 0;\n\t});\n\tuser.roles = roles;\n\tprintjson(user);\n\t})();' + compare admin $'(function() {\n\tvar user = db.getUser("user-two");\n\tvar roles = user.roles;\n\troles.sort((a, b) => {\n\t\tif (a.role < b.role) return -1;\n\t\tif (a.role > b.role) return 1;\n\t\treturn 0;\n\t});\n\tuser.roles = roles;\n\tprintjson(user);\n\t})();' userAdmin:userAdmin123456@some-name-mongos.custom-users-roles-sharded-14882 user-two-update-roles + local database=admin + local $'command=(function() {\n\tvar user = db.getUser("user-two");\n\tvar roles = user.roles;\n\troles.sort((a, b) => {\n\t\tif (a.role < b.role) return -1;\n\t\tif (a.role > b.role) return 1;\n\t\treturn 0;\n\t});\n\tuser.roles = roles;\n\tprintjson(user);\n\t})();' + local uri=userAdmin:userAdmin123456@some-name-mongos.custom-users-roles-sharded-14882 + local target=user-two-update-roles + [[ docker.io/perconalab/percona-server-mongodb-operator:main-mongod8.0 =~ 5\.0 ]] + run_mongos $'use admin\\n (function() {\n\tvar user = db.getUser("user-two");\n\tvar roles = user.roles;\n\troles.sort((a, b) => {\n\t\tif (a.role < b.role) return -1;\n\t\tif (a.role > b.role) return 1;\n\t\treturn 0;\n\t});\n\tuser.roles = roles;\n\tprintjson(user);\n\t})();' userAdmin:userAdmin123456@some-name-mongos.custom-users-roles-sharded-14882 mongodb + local $'command=use admin\\n (function() {\n\tvar user = db.getUser("user-two");\n\tvar roles = user.roles;\n\troles.sort((a, b) => {\n\t\tif (a.role < b.role) return -1;\n\t\tif (a.role > b.role) return 1;\n\t\treturn 0;\n\t});\n\tuser.roles = roles;\n\tprintjson(user);\n\t})();' + local uri=userAdmin:userAdmin123456@some-name-mongos.custom-users-roles-sharded-14882 + local driver=mongodb + local suffix=.svc.cluster.local + local mongo_flag= + local port=27017 + grep -E -v 'I NETWORK|W NETWORK|F NETWORK|Error saving history file|Percona Server for MongoDB|connecting to:|Unable to reach primary for set|Implicit session:|versions do not match|Error saving history file:' + local mongo_bin=mongo + /usr/sbin/sed -re 's/ObjectId\("[0-9a-f]+"\)//; s/-[0-9]+.svc/-xxxesvc/' + sed '/"userId"/d' ++ echo .svc.cluster.local ++ awk -F: '{print $2}' + suffix_port= + [[ -z '' ]] + suffix=.svc.cluster.local:27017 ++ kubectl_bin get pods --selector=name=psmdb-client -o 'jsonpath={.items[].metadata.name}' +++ mktemp ++ local LAST_OUT=/tmp/tmp.te0IbZKoXg +++ mktemp ++ local LAST_ERR=/tmp/tmp.lsaSPza6qf ++ local exit_status=0 ++ local timeout=4 +++ seq 0 2 ++ for i in $(seq 0 2) ++ set +e ++ kubectl get pods --selector=name=psmdb-client -o 'jsonpath={.items[].metadata.name}' ++ exit_status=0 ++ set -e ++ '[' 0 '!=' 0 -a -n 1 ']' ++ break ++ cat /tmp/tmp.te0IbZKoXg ++ cat /tmp/tmp.lsaSPza6qf ++ rm /tmp/tmp.te0IbZKoXg /tmp/tmp.lsaSPza6qf ++ return 0 + local client_container=psmdb-client-bb8b97679-ftcxh + kubectl_bin exec psmdb-client-bb8b97679-ftcxh -- bash -c $'printf \'use admin\\n (function() {\n\tvar user = db.getUser("user-two");\n\tvar roles = user.roles;\n\troles.sort((a, b) => {\n\t\tif (a.role < b.role) return -1;\n\t\tif (a.role > b.role) return 1;\n\t\treturn 0;\n\t});\n\tuser.roles = roles;\n\tprintjson(user);\n\t})();\\n\' | mongo mongodb://userAdmin:userAdmin123456@some-name-mongos.custom-users-roles-sharded-14882.svc.cluster.local:27017/admin ' ++ mktemp + local LAST_OUT=/tmp/tmp.bVztDAiBtl ++ mktemp + local LAST_ERR=/tmp/tmp.7Etimcnixn + local exit_status=0 + local timeout=4 ++ seq 0 2 + for i in $(seq 0 2) + set +e + kubectl exec psmdb-client-bb8b97679-ftcxh -- bash -c $'printf \'use admin\\n (function() {\n\tvar user = db.getUser("user-two");\n\tvar roles = user.roles;\n\troles.sort((a, b) => {\n\t\tif (a.role < b.role) return -1;\n\t\tif (a.role > b.role) return 1;\n\t\treturn 0;\n\t});\n\tuser.roles = roles;\n\tprintjson(user);\n\t})();\\n\' | mongo mongodb://userAdmin:userAdmin123456@some-name-mongos.custom-users-roles-sharded-14882.svc.cluster.local:27017/admin ' + exit_status=0 + set -e + '[' 0 '!=' 0 -a -n 1 ']' + break + cat /tmp/tmp.bVztDAiBtl + cat /tmp/tmp.7Etimcnixn + rm /tmp/tmp.bVztDAiBtl /tmp/tmp.7Etimcnixn + return 0 + diff /mnt/jenkins/workspace/cloud-psmdb-operator_PR-2283/e2e-tests/custom-users-roles-sharded/compare/user-two-update-roles.json /tmp/tmp.UfAOPg7iAD/user-two-update-roles + desc 'check user recreated after deleted from DB' + set +o xtrace ----------------------------------------------------------------------------------- check user recreated after deleted from DB ----------------------------------------------------------------------------------- + run_mongos 'use admin\n db.dropUser("user-two")' userAdmin:userAdmin123456@some-name-mongos.custom-users-roles-sharded-14882 + local 'command=use admin\n db.dropUser("user-two")' + local uri=userAdmin:userAdmin123456@some-name-mongos.custom-users-roles-sharded-14882 + local driver=mongodb + local suffix=.svc.cluster.local + local mongo_flag= + local port=27017 + local mongo_bin=mongo ++ echo .svc.cluster.local ++ awk -F: '{print $2}' + suffix_port= + [[ -z '' ]] + suffix=.svc.cluster.local:27017 ++ kubectl_bin get pods --selector=name=psmdb-client -o 'jsonpath={.items[].metadata.name}' +++ mktemp ++ local LAST_OUT=/tmp/tmp.fVwuhznif6 +++ mktemp ++ local LAST_ERR=/tmp/tmp.PsEcTk6nTf ++ local exit_status=0 ++ local timeout=4 +++ seq 0 2 ++ for i in $(seq 0 2) ++ set +e ++ kubectl get pods --selector=name=psmdb-client -o 'jsonpath={.items[].metadata.name}' ++ exit_status=0 ++ set -e ++ '[' 0 '!=' 0 -a -n 1 ']' ++ break ++ cat /tmp/tmp.fVwuhznif6 ++ cat /tmp/tmp.PsEcTk6nTf ++ rm /tmp/tmp.fVwuhznif6 /tmp/tmp.PsEcTk6nTf ++ return 0 + local client_container=psmdb-client-bb8b97679-ftcxh + kubectl_bin exec psmdb-client-bb8b97679-ftcxh -- bash -c 'printf '\''use admin\n db.dropUser("user-two")\n'\'' | mongo mongodb://userAdmin:userAdmin123456@some-name-mongos.custom-users-roles-sharded-14882.svc.cluster.local:27017/admin ' ++ mktemp + local LAST_OUT=/tmp/tmp.d0pT8kimw4 ++ mktemp + local LAST_ERR=/tmp/tmp.iIenXlMwTw + local exit_status=0 + local timeout=4 ++ seq 0 2 + for i in $(seq 0 2) + set +e + kubectl exec psmdb-client-bb8b97679-ftcxh -- bash -c 'printf '\''use admin\n db.dropUser("user-two")\n'\'' | mongo mongodb://userAdmin:userAdmin123456@some-name-mongos.custom-users-roles-sharded-14882.svc.cluster.local:27017/admin ' + exit_status=0 + set -e + '[' 0 '!=' 0 -a -n 1 ']' + break + cat /tmp/tmp.d0pT8kimw4 Percona Server for MongoDB shell version v4.4.29-28 connecting to: mongodb://some-name-mongos.custom-users-roles-sharded-14882.svc.cluster.local:27017/admin?compressors=disabled&gssapiServiceName=mongodb Implicit session: session { "id" : UUID("afce57bf-986c-4060-9537-ecc6c738babc") } Percona Server for MongoDB server version: v8.0.20-8 WARNING: shell and server versions do not match switched to db admin true bye + cat /tmp/tmp.iIenXlMwTw + rm /tmp/tmp.d0pT8kimw4 /tmp/tmp.iIenXlMwTw + return 0 + sleep 15 ++ get_user_cmd '"user-two"' ++ local 'user="user-two"' ++ cmd=$'(function() {\n\tvar user = db.getUser("user-two");\n\tvar roles = user.roles;\n\troles.sort((a, b) => {\n\t\tif (a.role < b.role) return -1;\n\t\tif (a.role > b.role) return 1;\n\t\treturn 0;\n\t});\n\tuser.roles = roles;\n\tprintjson(user);\n\t})();' ++ echo $'(function() {\n\tvar user = db.getUser("user-two");\n\tvar roles = user.roles;\n\troles.sort((a, b) => {\n\t\tif (a.role < b.role) return -1;\n\t\tif (a.role > b.role) return 1;\n\t\treturn 0;\n\t});\n\tuser.roles = roles;\n\tprintjson(user);\n\t})();' + compare admin $'(function() {\n\tvar user = db.getUser("user-two");\n\tvar roles = user.roles;\n\troles.sort((a, b) => {\n\t\tif (a.role < b.role) return -1;\n\t\tif (a.role > b.role) return 1;\n\t\treturn 0;\n\t});\n\tuser.roles = roles;\n\tprintjson(user);\n\t})();' userAdmin:userAdmin123456@some-name-mongos.custom-users-roles-sharded-14882 user-two-update-roles + local database=admin + local $'command=(function() {\n\tvar user = db.getUser("user-two");\n\tvar roles = user.roles;\n\troles.sort((a, b) => {\n\t\tif (a.role < b.role) return -1;\n\t\tif (a.role > b.role) return 1;\n\t\treturn 0;\n\t});\n\tuser.roles = roles;\n\tprintjson(user);\n\t})();' + local uri=userAdmin:userAdmin123456@some-name-mongos.custom-users-roles-sharded-14882 + local target=user-two-update-roles + [[ docker.io/perconalab/percona-server-mongodb-operator:main-mongod8.0 =~ 5\.0 ]] + run_mongos $'use admin\\n (function() {\n\tvar user = db.getUser("user-two");\n\tvar roles = user.roles;\n\troles.sort((a, b) => {\n\t\tif (a.role < b.role) return -1;\n\t\tif (a.role > b.role) return 1;\n\t\treturn 0;\n\t});\n\tuser.roles = roles;\n\tprintjson(user);\n\t})();' userAdmin:userAdmin123456@some-name-mongos.custom-users-roles-sharded-14882 mongodb + grep -E -v 'I NETWORK|W NETWORK|F NETWORK|Error saving history file|Percona Server for MongoDB|connecting to:|Unable to reach primary for set|Implicit session:|versions do not match|Error saving history file:' + local $'command=use admin\\n (function() {\n\tvar user = db.getUser("user-two");\n\tvar roles = user.roles;\n\troles.sort((a, b) => {\n\t\tif (a.role < b.role) return -1;\n\t\tif (a.role > b.role) return 1;\n\t\treturn 0;\n\t});\n\tuser.roles = roles;\n\tprintjson(user);\n\t})();' + local uri=userAdmin:userAdmin123456@some-name-mongos.custom-users-roles-sharded-14882 + local driver=mongodb + local suffix=.svc.cluster.local + local mongo_flag= + local port=27017 + local mongo_bin=mongo + sed '/"userId"/d' ++ awk -F: '{print $2}' ++ echo .svc.cluster.local + /usr/sbin/sed -re 's/ObjectId\("[0-9a-f]+"\)//; s/-[0-9]+.svc/-xxxesvc/' + suffix_port= + [[ -z '' ]] + suffix=.svc.cluster.local:27017 ++ kubectl_bin get pods --selector=name=psmdb-client -o 'jsonpath={.items[].metadata.name}' +++ mktemp ++ local LAST_OUT=/tmp/tmp.3LjPsqCEeS +++ mktemp ++ local LAST_ERR=/tmp/tmp.BgDrDooqYJ ++ local exit_status=0 ++ local timeout=4 +++ seq 0 2 ++ for i in $(seq 0 2) ++ set +e ++ kubectl get pods --selector=name=psmdb-client -o 'jsonpath={.items[].metadata.name}' ++ exit_status=0 ++ set -e ++ '[' 0 '!=' 0 -a -n 1 ']' ++ break ++ cat /tmp/tmp.3LjPsqCEeS ++ cat /tmp/tmp.BgDrDooqYJ ++ rm /tmp/tmp.3LjPsqCEeS /tmp/tmp.BgDrDooqYJ ++ return 0 + local client_container=psmdb-client-bb8b97679-ftcxh + kubectl_bin exec psmdb-client-bb8b97679-ftcxh -- bash -c $'printf \'use admin\\n (function() {\n\tvar user = db.getUser("user-two");\n\tvar roles = user.roles;\n\troles.sort((a, b) => {\n\t\tif (a.role < b.role) return -1;\n\t\tif (a.role > b.role) return 1;\n\t\treturn 0;\n\t});\n\tuser.roles = roles;\n\tprintjson(user);\n\t})();\\n\' | mongo mongodb://userAdmin:userAdmin123456@some-name-mongos.custom-users-roles-sharded-14882.svc.cluster.local:27017/admin ' ++ mktemp + local LAST_OUT=/tmp/tmp.wVlv9ppALE ++ mktemp + local LAST_ERR=/tmp/tmp.MifEYf0xhx + local exit_status=0 + local timeout=4 ++ seq 0 2 + for i in $(seq 0 2) + set +e + kubectl exec psmdb-client-bb8b97679-ftcxh -- bash -c $'printf \'use admin\\n (function() {\n\tvar user = db.getUser("user-two");\n\tvar roles = user.roles;\n\troles.sort((a, b) => {\n\t\tif (a.role < b.role) return -1;\n\t\tif (a.role > b.role) return 1;\n\t\treturn 0;\n\t});\n\tuser.roles = roles;\n\tprintjson(user);\n\t})();\\n\' | mongo mongodb://userAdmin:userAdmin123456@some-name-mongos.custom-users-roles-sharded-14882.svc.cluster.local:27017/admin ' + exit_status=0 + set -e + '[' 0 '!=' 0 -a -n 1 ']' + break + cat /tmp/tmp.wVlv9ppALE + cat /tmp/tmp.MifEYf0xhx + rm /tmp/tmp.wVlv9ppALE /tmp/tmp.MifEYf0xhx + return 0 + diff /mnt/jenkins/workspace/cloud-psmdb-operator_PR-2283/e2e-tests/custom-users-roles-sharded/compare/user-two-update-roles.json /tmp/tmp.UfAOPg7iAD/user-two-update-roles + desc 'check new user created after updated user name via CR' + set +o xtrace ----------------------------------------------------------------------------------- check new user created after updated user name via CR ----------------------------------------------------------------------------------- + kubectl_bin patch psmdb some-name --type=merge --patch $'{\n\t\t"spec": {"users":[\n\t\t\t{\n\t\t\t\t"name":"user-three",\n\t\t\t\t"db":"admin",\n\t\t\t\t"passwordSecretRef": {\n\t\t\t\t\t"name": "user-two",\n\t\t\t\t\t"key": "userTwoPassKey"\n\t\t\t\t},\n\t\t\t\t"roles": [\n\t\t\t\t\t{"db":"admin","name":"clusterAdmin"}\n\t\t\t\t]\n\t\t\t}\n\t\t]}\n\t}' ++ mktemp + local LAST_OUT=/tmp/tmp.jH1HulvAug ++ mktemp + local LAST_ERR=/tmp/tmp.1y02vhGmHY + local exit_status=0 + local timeout=4 ++ seq 0 2 + for i in $(seq 0 2) + set +e + kubectl patch psmdb some-name --type=merge --patch $'{\n\t\t"spec": {"users":[\n\t\t\t{\n\t\t\t\t"name":"user-three",\n\t\t\t\t"db":"admin",\n\t\t\t\t"passwordSecretRef": {\n\t\t\t\t\t"name": "user-two",\n\t\t\t\t\t"key": "userTwoPassKey"\n\t\t\t\t},\n\t\t\t\t"roles": [\n\t\t\t\t\t{"db":"admin","name":"clusterAdmin"}\n\t\t\t\t]\n\t\t\t}\n\t\t]}\n\t}' + exit_status=0 + set -e + '[' 0 '!=' 0 -a -n 1 ']' + break + cat /tmp/tmp.jH1HulvAug perconaservermongodb.psmdb.percona.com/some-name patched + cat /tmp/tmp.1y02vhGmHY + rm /tmp/tmp.jH1HulvAug /tmp/tmp.1y02vhGmHY + return 0 + wait_for_running some-name-rs0 3 + local name=some-name-rs0 + let last_pod=2 + local check_cluster_readyness=true + set_debug + [[ 1 == 1 ]] + set -o xtrace + local rs_name=rs0 + local cluster_name=some-name ++ seq 0 2 + for i in $(seq 0 $last_pod) + [[ 0 -eq 2 ]] + wait_pod some-name-rs0-0 + local pod=some-name-rs0-0 + set +o xtrace waiting for pod/some-name-rs0-0 to be ready.OK + for i in $(seq 0 $last_pod) + [[ 1 -eq 2 ]] + wait_pod some-name-rs0-1 + local pod=some-name-rs0-1 + set +o xtrace waiting for pod/some-name-rs0-1 to be ready.OK + for i in $(seq 0 $last_pod) + [[ 2 -eq 2 ]] ++ kubectl_bin get psmdb some-name -o 'jsonpath={.spec.replsets[?(@.name=="rs0")].arbiter.enabled}' +++ mktemp ++ local LAST_OUT=/tmp/tmp.lq5Wz3sU4B +++ mktemp ++ local LAST_ERR=/tmp/tmp.ImNfOT4OyQ ++ local exit_status=0 ++ local timeout=4 +++ seq 0 2 ++ for i in $(seq 0 2) ++ set +e ++ kubectl get psmdb some-name -o 'jsonpath={.spec.replsets[?(@.name=="rs0")].arbiter.enabled}' ++ exit_status=0 ++ set -e ++ '[' 0 '!=' 0 -a -n 1 ']' ++ break ++ cat /tmp/tmp.lq5Wz3sU4B ++ cat /tmp/tmp.ImNfOT4OyQ ++ rm /tmp/tmp.lq5Wz3sU4B /tmp/tmp.ImNfOT4OyQ ++ return 0 + [[ '' == true ]] + wait_pod some-name-rs0-2 + local pod=some-name-rs0-2 + set +o xtrace waiting for pod/some-name-rs0-2 to be ready.OK ++ kubectl_bin get psmdb some-name -o 'jsonpath={.spec.replsets[?(@.name=="rs0")].nonvoting.enabled}' +++ mktemp ++ local LAST_OUT=/tmp/tmp.bDMwTYWpyJ +++ mktemp ++ local LAST_ERR=/tmp/tmp.WWN60FZSa3 ++ local exit_status=0 ++ local timeout=4 +++ seq 0 2 ++ for i in $(seq 0 2) ++ set +e ++ kubectl get psmdb some-name -o 'jsonpath={.spec.replsets[?(@.name=="rs0")].nonvoting.enabled}' ++ exit_status=0 ++ set -e ++ '[' 0 '!=' 0 -a -n 1 ']' ++ break ++ cat /tmp/tmp.bDMwTYWpyJ ++ cat /tmp/tmp.WWN60FZSa3 ++ rm /tmp/tmp.bDMwTYWpyJ /tmp/tmp.WWN60FZSa3 ++ return 0 + [[ '' == true ]] ++ kubectl_bin get psmdb some-name -o 'jsonpath={.spec.replsets[?(@.name=="rs0")].hidden.enabled}' +++ mktemp ++ local LAST_OUT=/tmp/tmp.08O9kfyY60 +++ mktemp ++ local LAST_ERR=/tmp/tmp.iK0Uu9LrxM ++ local exit_status=0 ++ local timeout=4 +++ seq 0 2 ++ for i in $(seq 0 2) ++ set +e ++ kubectl get psmdb some-name -o 'jsonpath={.spec.replsets[?(@.name=="rs0")].hidden.enabled}' ++ exit_status=0 ++ set -e ++ '[' 0 '!=' 0 -a -n 1 ']' ++ break ++ cat /tmp/tmp.08O9kfyY60 ++ cat /tmp/tmp.iK0Uu9LrxM ++ rm /tmp/tmp.08O9kfyY60 /tmp/tmp.iK0Uu9LrxM ++ return 0 + [[ '' == true ]] + sleep 10 + [[ true == true ]] + set +x Waiting for cluster readyness ++ get_user_cmd '"user-three"' ++ local 'user="user-three"' ++ cmd=$'(function() {\n\tvar user = db.getUser("user-three");\n\tvar roles = user.roles;\n\troles.sort((a, b) => {\n\t\tif (a.role < b.role) return -1;\n\t\tif (a.role > b.role) return 1;\n\t\treturn 0;\n\t});\n\tuser.roles = roles;\n\tprintjson(user);\n\t})();' ++ echo $'(function() {\n\tvar user = db.getUser("user-three");\n\tvar roles = user.roles;\n\troles.sort((a, b) => {\n\t\tif (a.role < b.role) return -1;\n\t\tif (a.role > b.role) return 1;\n\t\treturn 0;\n\t});\n\tuser.roles = roles;\n\tprintjson(user);\n\t})();' + compare admin $'(function() {\n\tvar user = db.getUser("user-three");\n\tvar roles = user.roles;\n\troles.sort((a, b) => {\n\t\tif (a.role < b.role) return -1;\n\t\tif (a.role > b.role) return 1;\n\t\treturn 0;\n\t});\n\tuser.roles = roles;\n\tprintjson(user);\n\t})();' userAdmin:userAdmin123456@some-name-mongos.custom-users-roles-sharded-14882 user-three-admin-db + local database=admin + local $'command=(function() {\n\tvar user = db.getUser("user-three");\n\tvar roles = user.roles;\n\troles.sort((a, b) => {\n\t\tif (a.role < b.role) return -1;\n\t\tif (a.role > b.role) return 1;\n\t\treturn 0;\n\t});\n\tuser.roles = roles;\n\tprintjson(user);\n\t})();' + local uri=userAdmin:userAdmin123456@some-name-mongos.custom-users-roles-sharded-14882 + local target=user-three-admin-db + [[ docker.io/perconalab/percona-server-mongodb-operator:main-mongod8.0 =~ 5\.0 ]] + run_mongos $'use admin\\n (function() {\n\tvar user = db.getUser("user-three");\n\tvar roles = user.roles;\n\troles.sort((a, b) => {\n\t\tif (a.role < b.role) return -1;\n\t\tif (a.role > b.role) return 1;\n\t\treturn 0;\n\t});\n\tuser.roles = roles;\n\tprintjson(user);\n\t})();' userAdmin:userAdmin123456@some-name-mongos.custom-users-roles-sharded-14882 mongodb + local $'command=use admin\\n (function() {\n\tvar user = db.getUser("user-three");\n\tvar roles = user.roles;\n\troles.sort((a, b) => {\n\t\tif (a.role < b.role) return -1;\n\t\tif (a.role > b.role) return 1;\n\t\treturn 0;\n\t});\n\tuser.roles = roles;\n\tprintjson(user);\n\t})();' + local uri=userAdmin:userAdmin123456@some-name-mongos.custom-users-roles-sharded-14882 + local driver=mongodb + local suffix=.svc.cluster.local + local mongo_flag= + local port=27017 + local mongo_bin=mongo + /usr/sbin/sed -re 's/ObjectId\("[0-9a-f]+"\)//; s/-[0-9]+.svc/-xxxesvc/' + grep -E -v 'I NETWORK|W NETWORK|F NETWORK|Error saving history file|Percona Server for MongoDB|connecting to:|Unable to reach primary for set|Implicit session:|versions do not match|Error saving history file:' + sed '/"userId"/d' ++ echo .svc.cluster.local ++ awk -F: '{print $2}' + suffix_port= + [[ -z '' ]] + suffix=.svc.cluster.local:27017 ++ kubectl_bin get pods --selector=name=psmdb-client -o 'jsonpath={.items[].metadata.name}' +++ mktemp ++ local LAST_OUT=/tmp/tmp.fnSy1f9cKH +++ mktemp ++ local LAST_ERR=/tmp/tmp.PywEiNOtqW ++ local exit_status=0 ++ local timeout=4 +++ seq 0 2 ++ for i in $(seq 0 2) ++ set +e ++ kubectl get pods --selector=name=psmdb-client -o 'jsonpath={.items[].metadata.name}' ++ exit_status=0 ++ set -e ++ '[' 0 '!=' 0 -a -n 1 ']' ++ break ++ cat /tmp/tmp.fnSy1f9cKH ++ cat /tmp/tmp.PywEiNOtqW ++ rm /tmp/tmp.fnSy1f9cKH /tmp/tmp.PywEiNOtqW ++ return 0 + local client_container=psmdb-client-bb8b97679-ftcxh + kubectl_bin exec psmdb-client-bb8b97679-ftcxh -- bash -c $'printf \'use admin\\n (function() {\n\tvar user = db.getUser("user-three");\n\tvar roles = user.roles;\n\troles.sort((a, b) => {\n\t\tif (a.role < b.role) return -1;\n\t\tif (a.role > b.role) return 1;\n\t\treturn 0;\n\t});\n\tuser.roles = roles;\n\tprintjson(user);\n\t})();\\n\' | mongo mongodb://userAdmin:userAdmin123456@some-name-mongos.custom-users-roles-sharded-14882.svc.cluster.local:27017/admin ' ++ mktemp + local LAST_OUT=/tmp/tmp.ooIIsA1eRG ++ mktemp + local LAST_ERR=/tmp/tmp.kFqJUt2ZuI + local exit_status=0 + local timeout=4 ++ seq 0 2 + for i in $(seq 0 2) + set +e + kubectl exec psmdb-client-bb8b97679-ftcxh -- bash -c $'printf \'use admin\\n (function() {\n\tvar user = db.getUser("user-three");\n\tvar roles = user.roles;\n\troles.sort((a, b) => {\n\t\tif (a.role < b.role) return -1;\n\t\tif (a.role > b.role) return 1;\n\t\treturn 0;\n\t});\n\tuser.roles = roles;\n\tprintjson(user);\n\t})();\\n\' | mongo mongodb://userAdmin:userAdmin123456@some-name-mongos.custom-users-roles-sharded-14882.svc.cluster.local:27017/admin ' + exit_status=0 + set -e + '[' 0 '!=' 0 -a -n 1 ']' + break + cat /tmp/tmp.ooIIsA1eRG + cat /tmp/tmp.kFqJUt2ZuI + rm /tmp/tmp.ooIIsA1eRG /tmp/tmp.kFqJUt2ZuI + return 0 + diff /mnt/jenkins/workspace/cloud-psmdb-operator_PR-2283/e2e-tests/custom-users-roles-sharded/compare/user-three-admin-db.json /tmp/tmp.UfAOPg7iAD/user-three-admin-db ++ get_user_cmd '"user-two"' ++ local 'user="user-two"' ++ cmd=$'(function() {\n\tvar user = db.getUser("user-two");\n\tvar roles = user.roles;\n\troles.sort((a, b) => {\n\t\tif (a.role < b.role) return -1;\n\t\tif (a.role > b.role) return 1;\n\t\treturn 0;\n\t});\n\tuser.roles = roles;\n\tprintjson(user);\n\t})();' ++ echo $'(function() {\n\tvar user = db.getUser("user-two");\n\tvar roles = user.roles;\n\troles.sort((a, b) => {\n\t\tif (a.role < b.role) return -1;\n\t\tif (a.role > b.role) return 1;\n\t\treturn 0;\n\t});\n\tuser.roles = roles;\n\tprintjson(user);\n\t})();' + compare admin $'(function() {\n\tvar user = db.getUser("user-two");\n\tvar roles = user.roles;\n\troles.sort((a, b) => {\n\t\tif (a.role < b.role) return -1;\n\t\tif (a.role > b.role) return 1;\n\t\treturn 0;\n\t});\n\tuser.roles = roles;\n\tprintjson(user);\n\t})();' userAdmin:userAdmin123456@some-name-mongos.custom-users-roles-sharded-14882 user-two-update-roles + local database=admin + local $'command=(function() {\n\tvar user = db.getUser("user-two");\n\tvar roles = user.roles;\n\troles.sort((a, b) => {\n\t\tif (a.role < b.role) return -1;\n\t\tif (a.role > b.role) return 1;\n\t\treturn 0;\n\t});\n\tuser.roles = roles;\n\tprintjson(user);\n\t})();' + local uri=userAdmin:userAdmin123456@some-name-mongos.custom-users-roles-sharded-14882 + local target=user-two-update-roles + [[ docker.io/perconalab/percona-server-mongodb-operator:main-mongod8.0 =~ 5\.0 ]] + run_mongos $'use admin\\n (function() {\n\tvar user = db.getUser("user-two");\n\tvar roles = user.roles;\n\troles.sort((a, b) => {\n\t\tif (a.role < b.role) return -1;\n\t\tif (a.role > b.role) return 1;\n\t\treturn 0;\n\t});\n\tuser.roles = roles;\n\tprintjson(user);\n\t})();' userAdmin:userAdmin123456@some-name-mongos.custom-users-roles-sharded-14882 mongodb + local $'command=use admin\\n (function() {\n\tvar user = db.getUser("user-two");\n\tvar roles = user.roles;\n\troles.sort((a, b) => {\n\t\tif (a.role < b.role) return -1;\n\t\tif (a.role > b.role) return 1;\n\t\treturn 0;\n\t});\n\tuser.roles = roles;\n\tprintjson(user);\n\t})();' + local uri=userAdmin:userAdmin123456@some-name-mongos.custom-users-roles-sharded-14882 + local driver=mongodb + local suffix=.svc.cluster.local + local mongo_flag= + local port=27017 + local mongo_bin=mongo + grep -E -v 'I NETWORK|W NETWORK|F NETWORK|Error saving history file|Percona Server for MongoDB|connecting to:|Unable to reach primary for set|Implicit session:|versions do not match|Error saving history file:' + /usr/sbin/sed -re 's/ObjectId\("[0-9a-f]+"\)//; s/-[0-9]+.svc/-xxxesvc/' + sed '/"userId"/d' ++ echo .svc.cluster.local ++ awk -F: '{print $2}' + suffix_port= + [[ -z '' ]] + suffix=.svc.cluster.local:27017 ++ kubectl_bin get pods --selector=name=psmdb-client -o 'jsonpath={.items[].metadata.name}' +++ mktemp ++ local LAST_OUT=/tmp/tmp.J53LsuETZ0 +++ mktemp ++ local LAST_ERR=/tmp/tmp.rtGre4kHTL ++ local exit_status=0 ++ local timeout=4 +++ seq 0 2 ++ for i in $(seq 0 2) ++ set +e ++ kubectl get pods --selector=name=psmdb-client -o 'jsonpath={.items[].metadata.name}' ++ exit_status=0 ++ set -e ++ '[' 0 '!=' 0 -a -n 1 ']' ++ break ++ cat /tmp/tmp.J53LsuETZ0 ++ cat /tmp/tmp.rtGre4kHTL ++ rm /tmp/tmp.J53LsuETZ0 /tmp/tmp.rtGre4kHTL ++ return 0 + local client_container=psmdb-client-bb8b97679-ftcxh + kubectl_bin exec psmdb-client-bb8b97679-ftcxh -- bash -c $'printf \'use admin\\n (function() {\n\tvar user = db.getUser("user-two");\n\tvar roles = user.roles;\n\troles.sort((a, b) => {\n\t\tif (a.role < b.role) return -1;\n\t\tif (a.role > b.role) return 1;\n\t\treturn 0;\n\t});\n\tuser.roles = roles;\n\tprintjson(user);\n\t})();\\n\' | mongo mongodb://userAdmin:userAdmin123456@some-name-mongos.custom-users-roles-sharded-14882.svc.cluster.local:27017/admin ' ++ mktemp + local LAST_OUT=/tmp/tmp.0hsJue0lrp ++ mktemp + local LAST_ERR=/tmp/tmp.RhHIpJG1ob + local exit_status=0 + local timeout=4 ++ seq 0 2 + for i in $(seq 0 2) + set +e + kubectl exec psmdb-client-bb8b97679-ftcxh -- bash -c $'printf \'use admin\\n (function() {\n\tvar user = db.getUser("user-two");\n\tvar roles = user.roles;\n\troles.sort((a, b) => {\n\t\tif (a.role < b.role) return -1;\n\t\tif (a.role > b.role) return 1;\n\t\treturn 0;\n\t});\n\tuser.roles = roles;\n\tprintjson(user);\n\t})();\\n\' | mongo mongodb://userAdmin:userAdmin123456@some-name-mongos.custom-users-roles-sharded-14882.svc.cluster.local:27017/admin ' + exit_status=0 + set -e + '[' 0 '!=' 0 -a -n 1 ']' + break + cat /tmp/tmp.0hsJue0lrp + cat /tmp/tmp.RhHIpJG1ob + rm /tmp/tmp.0hsJue0lrp /tmp/tmp.RhHIpJG1ob + return 0 + diff /mnt/jenkins/workspace/cloud-psmdb-operator_PR-2283/e2e-tests/custom-users-roles-sharded/compare/user-two-update-roles.json /tmp/tmp.UfAOPg7iAD/user-two-update-roles + check_auth user-two:new-user-two-password@some-name-mongos.custom-users-roles-sharded-14882 + local uri=user-two:new-user-two-password@some-name-mongos.custom-users-roles-sharded-14882 ++ run_mongos 'db.runCommand({ ping: 1 }).ok' user-two:new-user-two-password@some-name-mongos.custom-users-roles-sharded-14882 '' '' --quiet ++ grep -E -v 'I NETWORK|W NETWORK|Error saving history file|Percona Server for MongoDB|connecting to:|Unable to reach primary for set|Implicit session:|versions do not match|Error saving history file:' ++ local 'command=db.runCommand({ ping: 1 }).ok' ++ local uri=user-two:new-user-two-password@some-name-mongos.custom-users-roles-sharded-14882 ++ local driver=mongodb ++ local suffix=.svc.cluster.local ++ local mongo_flag=--quiet ++ local port=27017 ++ local mongo_bin=mongo +++ echo .svc.cluster.local +++ awk -F: '{print $2}' ++ suffix_port= ++ [[ -z '' ]] ++ suffix=.svc.cluster.local:27017 +++ kubectl_bin get pods --selector=name=psmdb-client -o 'jsonpath={.items[].metadata.name}' ++++ mktemp +++ local LAST_OUT=/tmp/tmp.B7ihAMJt4g ++++ mktemp +++ local LAST_ERR=/tmp/tmp.lXq2fazO1c +++ local exit_status=0 +++ local timeout=4 ++++ seq 0 2 +++ for i in $(seq 0 2) +++ set +e +++ kubectl get pods --selector=name=psmdb-client -o 'jsonpath={.items[].metadata.name}' +++ exit_status=0 +++ set -e +++ '[' 0 '!=' 0 -a -n 1 ']' +++ break +++ cat /tmp/tmp.B7ihAMJt4g +++ cat /tmp/tmp.lXq2fazO1c +++ rm /tmp/tmp.B7ihAMJt4g /tmp/tmp.lXq2fazO1c +++ return 0 ++ local client_container=psmdb-client-bb8b97679-ftcxh ++ kubectl_bin exec psmdb-client-bb8b97679-ftcxh -- bash -c 'printf '\''db.runCommand({ ping: 1 }).ok\n'\'' | mongo mongodb://user-two:new-user-two-password@some-name-mongos.custom-users-roles-sharded-14882.svc.cluster.local:27017/admin --quiet' +++ mktemp ++ local LAST_OUT=/tmp/tmp.3t3ElZt9Tl +++ mktemp ++ local LAST_ERR=/tmp/tmp.ZoQ4vFFSHl ++ local exit_status=0 ++ local timeout=4 +++ seq 0 2 ++ for i in $(seq 0 2) ++ set +e ++ kubectl exec psmdb-client-bb8b97679-ftcxh -- bash -c 'printf '\''db.runCommand({ ping: 1 }).ok\n'\'' | mongo mongodb://user-two:new-user-two-password@some-name-mongos.custom-users-roles-sharded-14882.svc.cluster.local:27017/admin --quiet' ++ exit_status=0 ++ set -e ++ '[' 0 '!=' 0 -a -n 1 ']' ++ break ++ cat /tmp/tmp.3t3ElZt9Tl ++ cat /tmp/tmp.ZoQ4vFFSHl ++ rm /tmp/tmp.3t3ElZt9Tl /tmp/tmp.ZoQ4vFFSHl ++ return 0 + ping=1 + desc 'ping return' + set +o xtrace ----------------------------------------------------------------------------------- ping return ----------------------------------------------------------------------------------- + '[' 1 '!=' 1 ']' + check_auth user-three:new-user-two-password@some-name-mongos.custom-users-roles-sharded-14882 + local uri=user-three:new-user-two-password@some-name-mongos.custom-users-roles-sharded-14882 ++ run_mongos 'db.runCommand({ ping: 1 }).ok' user-three:new-user-two-password@some-name-mongos.custom-users-roles-sharded-14882 '' '' --quiet ++ local 'command=db.runCommand({ ping: 1 }).ok' ++ local uri=user-three:new-user-two-password@some-name-mongos.custom-users-roles-sharded-14882 ++ local driver=mongodb ++ local suffix=.svc.cluster.local ++ local mongo_flag=--quiet ++ local port=27017 ++ local mongo_bin=mongo ++ grep -E -v 'I NETWORK|W NETWORK|Error saving history file|Percona Server for MongoDB|connecting to:|Unable to reach primary for set|Implicit session:|versions do not match|Error saving history file:' +++ echo .svc.cluster.local +++ awk -F: '{print $2}' ++ suffix_port= ++ [[ -z '' ]] ++ suffix=.svc.cluster.local:27017 +++ kubectl_bin get pods --selector=name=psmdb-client -o 'jsonpath={.items[].metadata.name}' ++++ mktemp +++ local LAST_OUT=/tmp/tmp.l7W43GyrFq ++++ mktemp +++ local LAST_ERR=/tmp/tmp.R9zV4w7UYZ +++ local exit_status=0 +++ local timeout=4 ++++ seq 0 2 +++ for i in $(seq 0 2) +++ set +e +++ kubectl get pods --selector=name=psmdb-client -o 'jsonpath={.items[].metadata.name}' +++ exit_status=0 +++ set -e +++ '[' 0 '!=' 0 -a -n 1 ']' +++ break +++ cat /tmp/tmp.l7W43GyrFq +++ cat /tmp/tmp.R9zV4w7UYZ +++ rm /tmp/tmp.l7W43GyrFq /tmp/tmp.R9zV4w7UYZ +++ return 0 ++ local client_container=psmdb-client-bb8b97679-ftcxh ++ kubectl_bin exec psmdb-client-bb8b97679-ftcxh -- bash -c 'printf '\''db.runCommand({ ping: 1 }).ok\n'\'' | mongo mongodb://user-three:new-user-two-password@some-name-mongos.custom-users-roles-sharded-14882.svc.cluster.local:27017/admin --quiet' +++ mktemp ++ local LAST_OUT=/tmp/tmp.AhgSbiGg5B +++ mktemp ++ local LAST_ERR=/tmp/tmp.ToXw7FKMKy ++ local exit_status=0 ++ local timeout=4 +++ seq 0 2 ++ for i in $(seq 0 2) ++ set +e ++ kubectl exec psmdb-client-bb8b97679-ftcxh -- bash -c 'printf '\''db.runCommand({ ping: 1 }).ok\n'\'' | mongo mongodb://user-three:new-user-two-password@some-name-mongos.custom-users-roles-sharded-14882.svc.cluster.local:27017/admin --quiet' ++ exit_status=0 ++ set -e ++ '[' 0 '!=' 0 -a -n 1 ']' ++ break ++ cat /tmp/tmp.AhgSbiGg5B ++ cat /tmp/tmp.ToXw7FKMKy ++ rm /tmp/tmp.AhgSbiGg5B /tmp/tmp.ToXw7FKMKy ++ return 0 + ping=1 + desc 'ping return' + set +o xtrace ----------------------------------------------------------------------------------- ping return ----------------------------------------------------------------------------------- + '[' 1 '!=' 1 ']' + desc 'check new user created after updated user db via CR' + set +o xtrace ----------------------------------------------------------------------------------- check new user created after updated user db via CR ----------------------------------------------------------------------------------- + kubectl_bin patch psmdb some-name --type=merge --patch $'{\n\t\t"spec": {"users":[\n\t\t\t{\n\t\t\t\t"name":"user-three",\n\t\t\t\t"db":"newDb",\n\t\t\t\t"passwordSecretRef": {\n\t\t\t\t\t"name": "user-two",\n\t\t\t\t\t"key": "userTwoPassKey"\n\t\t\t\t},\n\t\t\t\t"roles": [\n\t\t\t\t\t{"db":"admin","name":"clusterAdmin"}\n\t\t\t\t]\n\t\t\t}\n\t\t]}\n\t}' ++ mktemp + local LAST_OUT=/tmp/tmp.FHmXPvDdNG ++ mktemp + local LAST_ERR=/tmp/tmp.zsjQwBNHbz + local exit_status=0 + local timeout=4 ++ seq 0 2 + for i in $(seq 0 2) + set +e + kubectl patch psmdb some-name --type=merge --patch $'{\n\t\t"spec": {"users":[\n\t\t\t{\n\t\t\t\t"name":"user-three",\n\t\t\t\t"db":"newDb",\n\t\t\t\t"passwordSecretRef": {\n\t\t\t\t\t"name": "user-two",\n\t\t\t\t\t"key": "userTwoPassKey"\n\t\t\t\t},\n\t\t\t\t"roles": [\n\t\t\t\t\t{"db":"admin","name":"clusterAdmin"}\n\t\t\t\t]\n\t\t\t}\n\t\t]}\n\t}' + exit_status=0 + set -e + '[' 0 '!=' 0 -a -n 1 ']' + break + cat /tmp/tmp.FHmXPvDdNG perconaservermongodb.psmdb.percona.com/some-name patched + cat /tmp/tmp.zsjQwBNHbz + rm /tmp/tmp.FHmXPvDdNG /tmp/tmp.zsjQwBNHbz + return 0 + wait_for_running some-name-rs0 3 + local name=some-name-rs0 + let last_pod=2 + local check_cluster_readyness=true + set_debug + [[ 1 == 1 ]] + set -o xtrace + local rs_name=rs0 + local cluster_name=some-name ++ seq 0 2 + for i in $(seq 0 $last_pod) + [[ 0 -eq 2 ]] + wait_pod some-name-rs0-0 + local pod=some-name-rs0-0 + set +o xtrace waiting for pod/some-name-rs0-0 to be ready.OK + for i in $(seq 0 $last_pod) + [[ 1 -eq 2 ]] + wait_pod some-name-rs0-1 + local pod=some-name-rs0-1 + set +o xtrace waiting for pod/some-name-rs0-1 to be ready.OK + for i in $(seq 0 $last_pod) + [[ 2 -eq 2 ]] ++ kubectl_bin get psmdb some-name -o 'jsonpath={.spec.replsets[?(@.name=="rs0")].arbiter.enabled}' +++ mktemp ++ local LAST_OUT=/tmp/tmp.fN9ueiYeba +++ mktemp ++ local LAST_ERR=/tmp/tmp.a9p0jGVfi6 ++ local exit_status=0 ++ local timeout=4 +++ seq 0 2 ++ for i in $(seq 0 2) ++ set +e ++ kubectl get psmdb some-name -o 'jsonpath={.spec.replsets[?(@.name=="rs0")].arbiter.enabled}' ++ exit_status=0 ++ set -e ++ '[' 0 '!=' 0 -a -n 1 ']' ++ break ++ cat /tmp/tmp.fN9ueiYeba ++ cat /tmp/tmp.a9p0jGVfi6 ++ rm /tmp/tmp.fN9ueiYeba /tmp/tmp.a9p0jGVfi6 ++ return 0 + [[ '' == true ]] + wait_pod some-name-rs0-2 + local pod=some-name-rs0-2 + set +o xtrace waiting for pod/some-name-rs0-2 to be ready.OK ++ kubectl_bin get psmdb some-name -o 'jsonpath={.spec.replsets[?(@.name=="rs0")].nonvoting.enabled}' +++ mktemp ++ local LAST_OUT=/tmp/tmp.9HLf9iM10Y +++ mktemp ++ local LAST_ERR=/tmp/tmp.IerJd9c9n5 ++ local exit_status=0 ++ local timeout=4 +++ seq 0 2 ++ for i in $(seq 0 2) ++ set +e ++ kubectl get psmdb some-name -o 'jsonpath={.spec.replsets[?(@.name=="rs0")].nonvoting.enabled}' ++ exit_status=0 ++ set -e ++ '[' 0 '!=' 0 -a -n 1 ']' ++ break ++ cat /tmp/tmp.9HLf9iM10Y ++ cat /tmp/tmp.IerJd9c9n5 ++ rm /tmp/tmp.9HLf9iM10Y /tmp/tmp.IerJd9c9n5 ++ return 0 + [[ '' == true ]] ++ kubectl_bin get psmdb some-name -o 'jsonpath={.spec.replsets[?(@.name=="rs0")].hidden.enabled}' +++ mktemp ++ local LAST_OUT=/tmp/tmp.fu8gnRwvc5 +++ mktemp ++ local LAST_ERR=/tmp/tmp.mxCggKww43 ++ local exit_status=0 ++ local timeout=4 +++ seq 0 2 ++ for i in $(seq 0 2) ++ set +e ++ kubectl get psmdb some-name -o 'jsonpath={.spec.replsets[?(@.name=="rs0")].hidden.enabled}' ++ exit_status=0 ++ set -e ++ '[' 0 '!=' 0 -a -n 1 ']' ++ break ++ cat /tmp/tmp.fu8gnRwvc5 ++ cat /tmp/tmp.mxCggKww43 ++ rm /tmp/tmp.fu8gnRwvc5 /tmp/tmp.mxCggKww43 ++ return 0 + [[ '' == true ]] + sleep 10 + [[ true == true ]] + set +x Waiting for cluster readyness ++ get_user_cmd '"user-three"' ++ local 'user="user-three"' ++ cmd=$'(function() {\n\tvar user = db.getUser("user-three");\n\tvar roles = user.roles;\n\troles.sort((a, b) => {\n\t\tif (a.role < b.role) return -1;\n\t\tif (a.role > b.role) return 1;\n\t\treturn 0;\n\t});\n\tuser.roles = roles;\n\tprintjson(user);\n\t})();' ++ echo $'(function() {\n\tvar user = db.getUser("user-three");\n\tvar roles = user.roles;\n\troles.sort((a, b) => {\n\t\tif (a.role < b.role) return -1;\n\t\tif (a.role > b.role) return 1;\n\t\treturn 0;\n\t});\n\tuser.roles = roles;\n\tprintjson(user);\n\t})();' + compare newDb $'(function() {\n\tvar user = db.getUser("user-three");\n\tvar roles = user.roles;\n\troles.sort((a, b) => {\n\t\tif (a.role < b.role) return -1;\n\t\tif (a.role > b.role) return 1;\n\t\treturn 0;\n\t});\n\tuser.roles = roles;\n\tprintjson(user);\n\t})();' userAdmin:userAdmin123456@some-name-mongos.custom-users-roles-sharded-14882 user-three-newDb-db + local database=newDb + local $'command=(function() {\n\tvar user = db.getUser("user-three");\n\tvar roles = user.roles;\n\troles.sort((a, b) => {\n\t\tif (a.role < b.role) return -1;\n\t\tif (a.role > b.role) return 1;\n\t\treturn 0;\n\t});\n\tuser.roles = roles;\n\tprintjson(user);\n\t})();' + local uri=userAdmin:userAdmin123456@some-name-mongos.custom-users-roles-sharded-14882 + local target=user-three-newDb-db + [[ docker.io/perconalab/percona-server-mongodb-operator:main-mongod8.0 =~ 5\.0 ]] + run_mongos $'use newDb\\n (function() {\n\tvar user = db.getUser("user-three");\n\tvar roles = user.roles;\n\troles.sort((a, b) => {\n\t\tif (a.role < b.role) return -1;\n\t\tif (a.role > b.role) return 1;\n\t\treturn 0;\n\t});\n\tuser.roles = roles;\n\tprintjson(user);\n\t})();' userAdmin:userAdmin123456@some-name-mongos.custom-users-roles-sharded-14882 mongodb + local $'command=use newDb\\n (function() {\n\tvar user = db.getUser("user-three");\n\tvar roles = user.roles;\n\troles.sort((a, b) => {\n\t\tif (a.role < b.role) return -1;\n\t\tif (a.role > b.role) return 1;\n\t\treturn 0;\n\t});\n\tuser.roles = roles;\n\tprintjson(user);\n\t})();' + grep -E -v 'I NETWORK|W NETWORK|F NETWORK|Error saving history file|Percona Server for MongoDB|connecting to:|Unable to reach primary for set|Implicit session:|versions do not match|Error saving history file:' + sed '/"userId"/d' + /usr/sbin/sed -re 's/ObjectId\("[0-9a-f]+"\)//; s/-[0-9]+.svc/-xxxesvc/' + local uri=userAdmin:userAdmin123456@some-name-mongos.custom-users-roles-sharded-14882 + local driver=mongodb + local suffix=.svc.cluster.local + local mongo_flag= + local port=27017 + local mongo_bin=mongo ++ echo .svc.cluster.local ++ awk -F: '{print $2}' + suffix_port= + [[ -z '' ]] + suffix=.svc.cluster.local:27017 ++ kubectl_bin get pods --selector=name=psmdb-client -o 'jsonpath={.items[].metadata.name}' +++ mktemp ++ local LAST_OUT=/tmp/tmp.BdmMKWCHGi +++ mktemp ++ local LAST_ERR=/tmp/tmp.eEgDD6jXwU ++ local exit_status=0 ++ local timeout=4 +++ seq 0 2 ++ for i in $(seq 0 2) ++ set +e ++ kubectl get pods --selector=name=psmdb-client -o 'jsonpath={.items[].metadata.name}' ++ exit_status=0 ++ set -e ++ '[' 0 '!=' 0 -a -n 1 ']' ++ break ++ cat /tmp/tmp.BdmMKWCHGi ++ cat /tmp/tmp.eEgDD6jXwU ++ rm /tmp/tmp.BdmMKWCHGi /tmp/tmp.eEgDD6jXwU ++ return 0 + local client_container=psmdb-client-bb8b97679-ftcxh + kubectl_bin exec psmdb-client-bb8b97679-ftcxh -- bash -c $'printf \'use newDb\\n (function() {\n\tvar user = db.getUser("user-three");\n\tvar roles = user.roles;\n\troles.sort((a, b) => {\n\t\tif (a.role < b.role) return -1;\n\t\tif (a.role > b.role) return 1;\n\t\treturn 0;\n\t});\n\tuser.roles = roles;\n\tprintjson(user);\n\t})();\\n\' | mongo mongodb://userAdmin:userAdmin123456@some-name-mongos.custom-users-roles-sharded-14882.svc.cluster.local:27017/admin ' ++ mktemp + local LAST_OUT=/tmp/tmp.Ps5EYXa5LW ++ mktemp + local LAST_ERR=/tmp/tmp.xj8BEw44BN + local exit_status=0 + local timeout=4 ++ seq 0 2 + for i in $(seq 0 2) + set +e + kubectl exec psmdb-client-bb8b97679-ftcxh -- bash -c $'printf \'use newDb\\n (function() {\n\tvar user = db.getUser("user-three");\n\tvar roles = user.roles;\n\troles.sort((a, b) => {\n\t\tif (a.role < b.role) return -1;\n\t\tif (a.role > b.role) return 1;\n\t\treturn 0;\n\t});\n\tuser.roles = roles;\n\tprintjson(user);\n\t})();\\n\' | mongo mongodb://userAdmin:userAdmin123456@some-name-mongos.custom-users-roles-sharded-14882.svc.cluster.local:27017/admin ' + exit_status=0 + set -e + '[' 0 '!=' 0 -a -n 1 ']' + break + cat /tmp/tmp.Ps5EYXa5LW + cat /tmp/tmp.xj8BEw44BN + rm /tmp/tmp.Ps5EYXa5LW /tmp/tmp.xj8BEw44BN + return 0 + diff /mnt/jenkins/workspace/cloud-psmdb-operator_PR-2283/e2e-tests/custom-users-roles-sharded/compare/user-three-newDb-db.json /tmp/tmp.UfAOPg7iAD/user-three-newDb-db ++ get_user_cmd '"user-three"' ++ local 'user="user-three"' ++ cmd=$'(function() {\n\tvar user = db.getUser("user-three");\n\tvar roles = user.roles;\n\troles.sort((a, b) => {\n\t\tif (a.role < b.role) return -1;\n\t\tif (a.role > b.role) return 1;\n\t\treturn 0;\n\t});\n\tuser.roles = roles;\n\tprintjson(user);\n\t})();' ++ echo $'(function() {\n\tvar user = db.getUser("user-three");\n\tvar roles = user.roles;\n\troles.sort((a, b) => {\n\t\tif (a.role < b.role) return -1;\n\t\tif (a.role > b.role) return 1;\n\t\treturn 0;\n\t});\n\tuser.roles = roles;\n\tprintjson(user);\n\t})();' + compare admin $'(function() {\n\tvar user = db.getUser("user-three");\n\tvar roles = user.roles;\n\troles.sort((a, b) => {\n\t\tif (a.role < b.role) return -1;\n\t\tif (a.role > b.role) return 1;\n\t\treturn 0;\n\t});\n\tuser.roles = roles;\n\tprintjson(user);\n\t})();' userAdmin:userAdmin123456@some-name-mongos.custom-users-roles-sharded-14882 user-three-admin-db + local database=admin + local $'command=(function() {\n\tvar user = db.getUser("user-three");\n\tvar roles = user.roles;\n\troles.sort((a, b) => {\n\t\tif (a.role < b.role) return -1;\n\t\tif (a.role > b.role) return 1;\n\t\treturn 0;\n\t});\n\tuser.roles = roles;\n\tprintjson(user);\n\t})();' + local uri=userAdmin:userAdmin123456@some-name-mongos.custom-users-roles-sharded-14882 + local target=user-three-admin-db + [[ docker.io/perconalab/percona-server-mongodb-operator:main-mongod8.0 =~ 5\.0 ]] + run_mongos $'use admin\\n (function() {\n\tvar user = db.getUser("user-three");\n\tvar roles = user.roles;\n\troles.sort((a, b) => {\n\t\tif (a.role < b.role) return -1;\n\t\tif (a.role > b.role) return 1;\n\t\treturn 0;\n\t});\n\tuser.roles = roles;\n\tprintjson(user);\n\t})();' userAdmin:userAdmin123456@some-name-mongos.custom-users-roles-sharded-14882 mongodb + local $'command=use admin\\n (function() {\n\tvar user = db.getUser("user-three");\n\tvar roles = user.roles;\n\troles.sort((a, b) => {\n\t\tif (a.role < b.role) return -1;\n\t\tif (a.role > b.role) return 1;\n\t\treturn 0;\n\t});\n\tuser.roles = roles;\n\tprintjson(user);\n\t})();' + local uri=userAdmin:userAdmin123456@some-name-mongos.custom-users-roles-sharded-14882 + local driver=mongodb + local suffix=.svc.cluster.local + local mongo_flag= + local port=27017 + local mongo_bin=mongo + grep -E -v 'I NETWORK|W NETWORK|F NETWORK|Error saving history file|Percona Server for MongoDB|connecting to:|Unable to reach primary for set|Implicit session:|versions do not match|Error saving history file:' + /usr/sbin/sed -re 's/ObjectId\("[0-9a-f]+"\)//; s/-[0-9]+.svc/-xxxesvc/' + sed '/"userId"/d' ++ echo .svc.cluster.local ++ awk -F: '{print $2}' + suffix_port= + [[ -z '' ]] + suffix=.svc.cluster.local:27017 ++ kubectl_bin get pods --selector=name=psmdb-client -o 'jsonpath={.items[].metadata.name}' +++ mktemp ++ local LAST_OUT=/tmp/tmp.clCgNlcT8u +++ mktemp ++ local LAST_ERR=/tmp/tmp.03MXwmjXmc ++ local exit_status=0 ++ local timeout=4 +++ seq 0 2 ++ for i in $(seq 0 2) ++ set +e ++ kubectl get pods --selector=name=psmdb-client -o 'jsonpath={.items[].metadata.name}' ++ exit_status=0 ++ set -e ++ '[' 0 '!=' 0 -a -n 1 ']' ++ break ++ cat /tmp/tmp.clCgNlcT8u ++ cat /tmp/tmp.03MXwmjXmc ++ rm /tmp/tmp.clCgNlcT8u /tmp/tmp.03MXwmjXmc ++ return 0 + local client_container=psmdb-client-bb8b97679-ftcxh + kubectl_bin exec psmdb-client-bb8b97679-ftcxh -- bash -c $'printf \'use admin\\n (function() {\n\tvar user = db.getUser("user-three");\n\tvar roles = user.roles;\n\troles.sort((a, b) => {\n\t\tif (a.role < b.role) return -1;\n\t\tif (a.role > b.role) return 1;\n\t\treturn 0;\n\t});\n\tuser.roles = roles;\n\tprintjson(user);\n\t})();\\n\' | mongo mongodb://userAdmin:userAdmin123456@some-name-mongos.custom-users-roles-sharded-14882.svc.cluster.local:27017/admin ' ++ mktemp + local LAST_OUT=/tmp/tmp.jICjuiompO ++ mktemp + local LAST_ERR=/tmp/tmp.7Ij3CmXswa + local exit_status=0 + local timeout=4 ++ seq 0 2 + for i in $(seq 0 2) + set +e + kubectl exec psmdb-client-bb8b97679-ftcxh -- bash -c $'printf \'use admin\\n (function() {\n\tvar user = db.getUser("user-three");\n\tvar roles = user.roles;\n\troles.sort((a, b) => {\n\t\tif (a.role < b.role) return -1;\n\t\tif (a.role > b.role) return 1;\n\t\treturn 0;\n\t});\n\tuser.roles = roles;\n\tprintjson(user);\n\t})();\\n\' | mongo mongodb://userAdmin:userAdmin123456@some-name-mongos.custom-users-roles-sharded-14882.svc.cluster.local:27017/admin ' + exit_status=0 + set -e + '[' 0 '!=' 0 -a -n 1 ']' + break + cat /tmp/tmp.jICjuiompO + cat /tmp/tmp.7Ij3CmXswa + rm /tmp/tmp.jICjuiompO /tmp/tmp.7Ij3CmXswa + return 0 + diff /mnt/jenkins/workspace/cloud-psmdb-operator_PR-2283/e2e-tests/custom-users-roles-sharded/compare/user-three-admin-db.json /tmp/tmp.UfAOPg7iAD/user-three-admin-db + desc 'check new user created with default db and secret password key' + set +o xtrace ----------------------------------------------------------------------------------- check new user created with default db and secret password key ----------------------------------------------------------------------------------- + kubectl_bin patch psmdb some-name --type=merge --patch $'{\n\t\t"spec": {"users":[\n\t\t\t{\n\t\t\t\t"name":"user-four",\n\t\t\t\t"passwordSecretRef": {\n\t\t\t\t\t"name": "user-two"\n\t\t\t\t},\n\t\t\t\t"roles": [\n\t\t\t\t\t{"db":"admin","name":"clusterAdmin"}\n\t\t\t\t]\n\t\t\t}\n\t\t]}\n\t}' ++ mktemp + local LAST_OUT=/tmp/tmp.vf7ezSyadu ++ mktemp + local LAST_ERR=/tmp/tmp.ljPynPr1nV + local exit_status=0 + local timeout=4 ++ seq 0 2 + for i in $(seq 0 2) + set +e + kubectl patch psmdb some-name --type=merge --patch $'{\n\t\t"spec": {"users":[\n\t\t\t{\n\t\t\t\t"name":"user-four",\n\t\t\t\t"passwordSecretRef": {\n\t\t\t\t\t"name": "user-two"\n\t\t\t\t},\n\t\t\t\t"roles": [\n\t\t\t\t\t{"db":"admin","name":"clusterAdmin"}\n\t\t\t\t]\n\t\t\t}\n\t\t]}\n\t}' + exit_status=0 + set -e + '[' 0 '!=' 0 -a -n 1 ']' + break + cat /tmp/tmp.vf7ezSyadu perconaservermongodb.psmdb.percona.com/some-name patched + cat /tmp/tmp.ljPynPr1nV + rm /tmp/tmp.vf7ezSyadu /tmp/tmp.ljPynPr1nV + return 0 + wait_for_running some-name-rs0 3 + local name=some-name-rs0 + let last_pod=2 + local check_cluster_readyness=true + set_debug + [[ 1 == 1 ]] + set -o xtrace + local rs_name=rs0 + local cluster_name=some-name ++ seq 0 2 + for i in $(seq 0 $last_pod) + [[ 0 -eq 2 ]] + wait_pod some-name-rs0-0 + local pod=some-name-rs0-0 + set +o xtrace waiting for pod/some-name-rs0-0 to be ready.OK + for i in $(seq 0 $last_pod) + [[ 1 -eq 2 ]] + wait_pod some-name-rs0-1 + local pod=some-name-rs0-1 + set +o xtrace waiting for pod/some-name-rs0-1 to be ready.OK + for i in $(seq 0 $last_pod) + [[ 2 -eq 2 ]] ++ kubectl_bin get psmdb some-name -o 'jsonpath={.spec.replsets[?(@.name=="rs0")].arbiter.enabled}' +++ mktemp ++ local LAST_OUT=/tmp/tmp.sbSxuS3Gd4 +++ mktemp ++ local LAST_ERR=/tmp/tmp.BIJEFn5ztB ++ local exit_status=0 ++ local timeout=4 +++ seq 0 2 ++ for i in $(seq 0 2) ++ set +e ++ kubectl get psmdb some-name -o 'jsonpath={.spec.replsets[?(@.name=="rs0")].arbiter.enabled}' ++ exit_status=0 ++ set -e ++ '[' 0 '!=' 0 -a -n 1 ']' ++ break ++ cat /tmp/tmp.sbSxuS3Gd4 ++ cat /tmp/tmp.BIJEFn5ztB ++ rm /tmp/tmp.sbSxuS3Gd4 /tmp/tmp.BIJEFn5ztB ++ return 0 + [[ '' == true ]] + wait_pod some-name-rs0-2 + local pod=some-name-rs0-2 + set +o xtrace waiting for pod/some-name-rs0-2 to be ready.OK ++ kubectl_bin get psmdb some-name -o 'jsonpath={.spec.replsets[?(@.name=="rs0")].nonvoting.enabled}' +++ mktemp ++ local LAST_OUT=/tmp/tmp.AMTroUpSTZ +++ mktemp ++ local LAST_ERR=/tmp/tmp.1ekV4AhQRt ++ local exit_status=0 ++ local timeout=4 +++ seq 0 2 ++ for i in $(seq 0 2) ++ set +e ++ kubectl get psmdb some-name -o 'jsonpath={.spec.replsets[?(@.name=="rs0")].nonvoting.enabled}' ++ exit_status=0 ++ set -e ++ '[' 0 '!=' 0 -a -n 1 ']' ++ break ++ cat /tmp/tmp.AMTroUpSTZ ++ cat /tmp/tmp.1ekV4AhQRt ++ rm /tmp/tmp.AMTroUpSTZ /tmp/tmp.1ekV4AhQRt ++ return 0 + [[ '' == true ]] ++ kubectl_bin get psmdb some-name -o 'jsonpath={.spec.replsets[?(@.name=="rs0")].hidden.enabled}' +++ mktemp ++ local LAST_OUT=/tmp/tmp.Y6dQWhN9vp +++ mktemp ++ local LAST_ERR=/tmp/tmp.6ekhQ7WwSA ++ local exit_status=0 ++ local timeout=4 +++ seq 0 2 ++ for i in $(seq 0 2) ++ set +e ++ kubectl get psmdb some-name -o 'jsonpath={.spec.replsets[?(@.name=="rs0")].hidden.enabled}' ++ exit_status=0 ++ set -e ++ '[' 0 '!=' 0 -a -n 1 ']' ++ break ++ cat /tmp/tmp.Y6dQWhN9vp ++ cat /tmp/tmp.6ekhQ7WwSA ++ rm /tmp/tmp.Y6dQWhN9vp /tmp/tmp.6ekhQ7WwSA ++ return 0 + [[ '' == true ]] + sleep 10 + [[ true == true ]] + set +x Waiting for cluster readyness ++ get_user_cmd '"user-four"' ++ local 'user="user-four"' ++ cmd=$'(function() {\n\tvar user = db.getUser("user-four");\n\tvar roles = user.roles;\n\troles.sort((a, b) => {\n\t\tif (a.role < b.role) return -1;\n\t\tif (a.role > b.role) return 1;\n\t\treturn 0;\n\t});\n\tuser.roles = roles;\n\tprintjson(user);\n\t})();' ++ echo $'(function() {\n\tvar user = db.getUser("user-four");\n\tvar roles = user.roles;\n\troles.sort((a, b) => {\n\t\tif (a.role < b.role) return -1;\n\t\tif (a.role > b.role) return 1;\n\t\treturn 0;\n\t});\n\tuser.roles = roles;\n\tprintjson(user);\n\t})();' + compare admin $'(function() {\n\tvar user = db.getUser("user-four");\n\tvar roles = user.roles;\n\troles.sort((a, b) => {\n\t\tif (a.role < b.role) return -1;\n\t\tif (a.role > b.role) return 1;\n\t\treturn 0;\n\t});\n\tuser.roles = roles;\n\tprintjson(user);\n\t})();' userAdmin:userAdmin123456@some-name-mongos.custom-users-roles-sharded-14882 user-four + local database=admin + local $'command=(function() {\n\tvar user = db.getUser("user-four");\n\tvar roles = user.roles;\n\troles.sort((a, b) => {\n\t\tif (a.role < b.role) return -1;\n\t\tif (a.role > b.role) return 1;\n\t\treturn 0;\n\t});\n\tuser.roles = roles;\n\tprintjson(user);\n\t})();' + local uri=userAdmin:userAdmin123456@some-name-mongos.custom-users-roles-sharded-14882 + local target=user-four + [[ docker.io/perconalab/percona-server-mongodb-operator:main-mongod8.0 =~ 5\.0 ]] + run_mongos $'use admin\\n (function() {\n\tvar user = db.getUser("user-four");\n\tvar roles = user.roles;\n\troles.sort((a, b) => {\n\t\tif (a.role < b.role) return -1;\n\t\tif (a.role > b.role) return 1;\n\t\treturn 0;\n\t});\n\tuser.roles = roles;\n\tprintjson(user);\n\t})();' userAdmin:userAdmin123456@some-name-mongos.custom-users-roles-sharded-14882 mongodb + grep -E -v 'I NETWORK|W NETWORK|F NETWORK|Error saving history file|Percona Server for MongoDB|connecting to:|Unable to reach primary for set|Implicit session:|versions do not match|Error saving history file:' + local $'command=use admin\\n (function() {\n\tvar user = db.getUser("user-four");\n\tvar roles = user.roles;\n\troles.sort((a, b) => {\n\t\tif (a.role < b.role) return -1;\n\t\tif (a.role > b.role) return 1;\n\t\treturn 0;\n\t});\n\tuser.roles = roles;\n\tprintjson(user);\n\t})();' + local uri=userAdmin:userAdmin123456@some-name-mongos.custom-users-roles-sharded-14882 + local driver=mongodb + local suffix=.svc.cluster.local + local mongo_flag= + local port=27017 + local mongo_bin=mongo + /usr/sbin/sed -re 's/ObjectId\("[0-9a-f]+"\)//; s/-[0-9]+.svc/-xxxesvc/' + sed '/"userId"/d' ++ echo .svc.cluster.local ++ awk -F: '{print $2}' + suffix_port= + [[ -z '' ]] + suffix=.svc.cluster.local:27017 ++ kubectl_bin get pods --selector=name=psmdb-client -o 'jsonpath={.items[].metadata.name}' +++ mktemp ++ local LAST_OUT=/tmp/tmp.lKQV6lWh02 +++ mktemp ++ local LAST_ERR=/tmp/tmp.bVmGROjdKt ++ local exit_status=0 ++ local timeout=4 +++ seq 0 2 ++ for i in $(seq 0 2) ++ set +e ++ kubectl get pods --selector=name=psmdb-client -o 'jsonpath={.items[].metadata.name}' ++ exit_status=0 ++ set -e ++ '[' 0 '!=' 0 -a -n 1 ']' ++ break ++ cat /tmp/tmp.lKQV6lWh02 ++ cat /tmp/tmp.bVmGROjdKt ++ rm /tmp/tmp.lKQV6lWh02 /tmp/tmp.bVmGROjdKt ++ return 0 + local client_container=psmdb-client-bb8b97679-ftcxh + kubectl_bin exec psmdb-client-bb8b97679-ftcxh -- bash -c $'printf \'use admin\\n (function() {\n\tvar user = db.getUser("user-four");\n\tvar roles = user.roles;\n\troles.sort((a, b) => {\n\t\tif (a.role < b.role) return -1;\n\t\tif (a.role > b.role) return 1;\n\t\treturn 0;\n\t});\n\tuser.roles = roles;\n\tprintjson(user);\n\t})();\\n\' | mongo mongodb://userAdmin:userAdmin123456@some-name-mongos.custom-users-roles-sharded-14882.svc.cluster.local:27017/admin ' ++ mktemp + local LAST_OUT=/tmp/tmp.1ZzDXjVbMX ++ mktemp + local LAST_ERR=/tmp/tmp.1heiMZllLZ + local exit_status=0 + local timeout=4 ++ seq 0 2 + for i in $(seq 0 2) + set +e + kubectl exec psmdb-client-bb8b97679-ftcxh -- bash -c $'printf \'use admin\\n (function() {\n\tvar user = db.getUser("user-four");\n\tvar roles = user.roles;\n\troles.sort((a, b) => {\n\t\tif (a.role < b.role) return -1;\n\t\tif (a.role > b.role) return 1;\n\t\treturn 0;\n\t});\n\tuser.roles = roles;\n\tprintjson(user);\n\t})();\\n\' | mongo mongodb://userAdmin:userAdmin123456@some-name-mongos.custom-users-roles-sharded-14882.svc.cluster.local:27017/admin ' + exit_status=0 + set -e + '[' 0 '!=' 0 -a -n 1 ']' + break + cat /tmp/tmp.1ZzDXjVbMX + cat /tmp/tmp.1heiMZllLZ + rm /tmp/tmp.1ZzDXjVbMX /tmp/tmp.1heiMZllLZ + return 0 + diff /mnt/jenkins/workspace/cloud-psmdb-operator_PR-2283/e2e-tests/custom-users-roles-sharded/compare/user-four.json /tmp/tmp.UfAOPg7iAD/user-four + desc 'check user role on cluster initialization' + set +o xtrace ----------------------------------------------------------------------------------- check user role on cluster initialization ----------------------------------------------------------------------------------- ++ get_role_cmd '"role-one"' ++ local 'role="role-one"' ++ cmd=$'(function() {\n\tvar role = db.getRole("role-one", {showPrivileges: true, showAuthenticationRestrictions: true});\n\tvar roles = role.roles;\n\troles.sort((a, b) => {\n\t\tif (a.role < b.role) return -1;\n\t\tif (a.role > b.role) return 1;\n\t\treturn 0;\n\t});\n\trole.roles = roles;\n\tprintjson(role);\n\t})();' ++ echo $'(function() {\n\tvar role = db.getRole("role-one", {showPrivileges: true, showAuthenticationRestrictions: true});\n\tvar roles = role.roles;\n\troles.sort((a, b) => {\n\t\tif (a.role < b.role) return -1;\n\t\tif (a.role > b.role) return 1;\n\t\treturn 0;\n\t});\n\trole.roles = roles;\n\tprintjson(role);\n\t})();' + compare admin $'(function() {\n\tvar role = db.getRole("role-one", {showPrivileges: true, showAuthenticationRestrictions: true});\n\tvar roles = role.roles;\n\troles.sort((a, b) => {\n\t\tif (a.role < b.role) return -1;\n\t\tif (a.role > b.role) return 1;\n\t\treturn 0;\n\t});\n\trole.roles = roles;\n\tprintjson(role);\n\t})();' userAdmin:userAdmin123456@some-name-mongos.custom-users-roles-sharded-14882 role-one + local database=admin + local $'command=(function() {\n\tvar role = db.getRole("role-one", {showPrivileges: true, showAuthenticationRestrictions: true});\n\tvar roles = role.roles;\n\troles.sort((a, b) => {\n\t\tif (a.role < b.role) return -1;\n\t\tif (a.role > b.role) return 1;\n\t\treturn 0;\n\t});\n\trole.roles = roles;\n\tprintjson(role);\n\t})();' + local uri=userAdmin:userAdmin123456@some-name-mongos.custom-users-roles-sharded-14882 + local target=role-one + [[ docker.io/perconalab/percona-server-mongodb-operator:main-mongod8.0 =~ 5\.0 ]] + run_mongos $'use admin\\n (function() {\n\tvar role = db.getRole("role-one", {showPrivileges: true, showAuthenticationRestrictions: true});\n\tvar roles = role.roles;\n\troles.sort((a, b) => {\n\t\tif (a.role < b.role) return -1;\n\t\tif (a.role > b.role) return 1;\n\t\treturn 0;\n\t});\n\trole.roles = roles;\n\tprintjson(role);\n\t})();' userAdmin:userAdmin123456@some-name-mongos.custom-users-roles-sharded-14882 mongodb + local $'command=use admin\\n (function() {\n\tvar role = db.getRole("role-one", {showPrivileges: true, showAuthenticationRestrictions: true});\n\tvar roles = role.roles;\n\troles.sort((a, b) => {\n\t\tif (a.role < b.role) return -1;\n\t\tif (a.role > b.role) return 1;\n\t\treturn 0;\n\t});\n\trole.roles = roles;\n\tprintjson(role);\n\t})();' + local uri=userAdmin:userAdmin123456@some-name-mongos.custom-users-roles-sharded-14882 + local driver=mongodb + local suffix=.svc.cluster.local + local mongo_flag= + local port=27017 + local mongo_bin=mongo + grep -E -v 'I NETWORK|W NETWORK|F NETWORK|Error saving history file|Percona Server for MongoDB|connecting to:|Unable to reach primary for set|Implicit session:|versions do not match|Error saving history file:' + /usr/sbin/sed -re 's/ObjectId\("[0-9a-f]+"\)//; s/-[0-9]+.svc/-xxxesvc/' + sed '/"userId"/d' ++ echo .svc.cluster.local ++ awk -F: '{print $2}' + suffix_port= + [[ -z '' ]] + suffix=.svc.cluster.local:27017 ++ kubectl_bin get pods --selector=name=psmdb-client -o 'jsonpath={.items[].metadata.name}' +++ mktemp ++ local LAST_OUT=/tmp/tmp.y8nioWkHeS +++ mktemp ++ local LAST_ERR=/tmp/tmp.MpyUKcIhy1 ++ local exit_status=0 ++ local timeout=4 +++ seq 0 2 ++ for i in $(seq 0 2) ++ set +e ++ kubectl get pods --selector=name=psmdb-client -o 'jsonpath={.items[].metadata.name}' ++ exit_status=0 ++ set -e ++ '[' 0 '!=' 0 -a -n 1 ']' ++ break ++ cat /tmp/tmp.y8nioWkHeS ++ cat /tmp/tmp.MpyUKcIhy1 ++ rm /tmp/tmp.y8nioWkHeS /tmp/tmp.MpyUKcIhy1 ++ return 0 + local client_container=psmdb-client-bb8b97679-ftcxh + kubectl_bin exec psmdb-client-bb8b97679-ftcxh -- bash -c $'printf \'use admin\\n (function() {\n\tvar role = db.getRole("role-one", {showPrivileges: true, showAuthenticationRestrictions: true});\n\tvar roles = role.roles;\n\troles.sort((a, b) => {\n\t\tif (a.role < b.role) return -1;\n\t\tif (a.role > b.role) return 1;\n\t\treturn 0;\n\t});\n\trole.roles = roles;\n\tprintjson(role);\n\t})();\\n\' | mongo mongodb://userAdmin:userAdmin123456@some-name-mongos.custom-users-roles-sharded-14882.svc.cluster.local:27017/admin ' ++ mktemp + local LAST_OUT=/tmp/tmp.jnOtHpiG4s ++ mktemp + local LAST_ERR=/tmp/tmp.2j0jPPaqXs + local exit_status=0 + local timeout=4 ++ seq 0 2 + for i in $(seq 0 2) + set +e + kubectl exec psmdb-client-bb8b97679-ftcxh -- bash -c $'printf \'use admin\\n (function() {\n\tvar role = db.getRole("role-one", {showPrivileges: true, showAuthenticationRestrictions: true});\n\tvar roles = role.roles;\n\troles.sort((a, b) => {\n\t\tif (a.role < b.role) return -1;\n\t\tif (a.role > b.role) return 1;\n\t\treturn 0;\n\t});\n\trole.roles = roles;\n\tprintjson(role);\n\t})();\\n\' | mongo mongodb://userAdmin:userAdmin123456@some-name-mongos.custom-users-roles-sharded-14882.svc.cluster.local:27017/admin ' + exit_status=0 + set -e + '[' 0 '!=' 0 -a -n 1 ']' + break + cat /tmp/tmp.jnOtHpiG4s + cat /tmp/tmp.2j0jPPaqXs + rm /tmp/tmp.jnOtHpiG4s /tmp/tmp.2j0jPPaqXs + return 0 + diff /mnt/jenkins/workspace/cloud-psmdb-operator_PR-2283/e2e-tests/custom-users-roles-sharded/compare/role-one.json /tmp/tmp.UfAOPg7iAD/role-one + desc 'check role recreated after deleted from DB' + set +o xtrace ----------------------------------------------------------------------------------- check role recreated after deleted from DB ----------------------------------------------------------------------------------- + run_mongos 'use admin\n db.dropRole("role-one")' userAdmin:userAdmin123456@some-name-mongos.custom-users-roles-sharded-14882 + local 'command=use admin\n db.dropRole("role-one")' + local uri=userAdmin:userAdmin123456@some-name-mongos.custom-users-roles-sharded-14882 + local driver=mongodb + local suffix=.svc.cluster.local + local mongo_flag= + local port=27017 + local mongo_bin=mongo ++ echo .svc.cluster.local ++ awk -F: '{print $2}' + suffix_port= + [[ -z '' ]] + suffix=.svc.cluster.local:27017 ++ kubectl_bin get pods --selector=name=psmdb-client -o 'jsonpath={.items[].metadata.name}' +++ mktemp ++ local LAST_OUT=/tmp/tmp.6Lind2Oj0R +++ mktemp ++ local LAST_ERR=/tmp/tmp.lXIzPLTqRu ++ local exit_status=0 ++ local timeout=4 +++ seq 0 2 ++ for i in $(seq 0 2) ++ set +e ++ kubectl get pods --selector=name=psmdb-client -o 'jsonpath={.items[].metadata.name}' ++ exit_status=0 ++ set -e ++ '[' 0 '!=' 0 -a -n 1 ']' ++ break ++ cat /tmp/tmp.6Lind2Oj0R ++ cat /tmp/tmp.lXIzPLTqRu ++ rm /tmp/tmp.6Lind2Oj0R /tmp/tmp.lXIzPLTqRu ++ return 0 + local client_container=psmdb-client-bb8b97679-ftcxh + kubectl_bin exec psmdb-client-bb8b97679-ftcxh -- bash -c 'printf '\''use admin\n db.dropRole("role-one")\n'\'' | mongo mongodb://userAdmin:userAdmin123456@some-name-mongos.custom-users-roles-sharded-14882.svc.cluster.local:27017/admin ' ++ mktemp + local LAST_OUT=/tmp/tmp.giOqyRFHGw ++ mktemp + local LAST_ERR=/tmp/tmp.hIAf34i2js + local exit_status=0 + local timeout=4 ++ seq 0 2 + for i in $(seq 0 2) + set +e + kubectl exec psmdb-client-bb8b97679-ftcxh -- bash -c 'printf '\''use admin\n db.dropRole("role-one")\n'\'' | mongo mongodb://userAdmin:userAdmin123456@some-name-mongos.custom-users-roles-sharded-14882.svc.cluster.local:27017/admin ' + exit_status=0 + set -e + '[' 0 '!=' 0 -a -n 1 ']' + break + cat /tmp/tmp.giOqyRFHGw Percona Server for MongoDB shell version v4.4.29-28 connecting to: mongodb://some-name-mongos.custom-users-roles-sharded-14882.svc.cluster.local:27017/admin?compressors=disabled&gssapiServiceName=mongodb Implicit session: session { "id" : UUID("c095f55b-4efb-45f5-8b6e-30fd04bcb51b") } Percona Server for MongoDB server version: v8.0.20-8 WARNING: shell and server versions do not match switched to db admin true bye + cat /tmp/tmp.hIAf34i2js + rm /tmp/tmp.giOqyRFHGw /tmp/tmp.hIAf34i2js + return 0 + sleep 15 ++ get_role_cmd '"role-one"' ++ local 'role="role-one"' ++ cmd=$'(function() {\n\tvar role = db.getRole("role-one", {showPrivileges: true, showAuthenticationRestrictions: true});\n\tvar roles = role.roles;\n\troles.sort((a, b) => {\n\t\tif (a.role < b.role) return -1;\n\t\tif (a.role > b.role) return 1;\n\t\treturn 0;\n\t});\n\trole.roles = roles;\n\tprintjson(role);\n\t})();' ++ echo $'(function() {\n\tvar role = db.getRole("role-one", {showPrivileges: true, showAuthenticationRestrictions: true});\n\tvar roles = role.roles;\n\troles.sort((a, b) => {\n\t\tif (a.role < b.role) return -1;\n\t\tif (a.role > b.role) return 1;\n\t\treturn 0;\n\t});\n\trole.roles = roles;\n\tprintjson(role);\n\t})();' + compare admin $'(function() {\n\tvar role = db.getRole("role-one", {showPrivileges: true, showAuthenticationRestrictions: true});\n\tvar roles = role.roles;\n\troles.sort((a, b) => {\n\t\tif (a.role < b.role) return -1;\n\t\tif (a.role > b.role) return 1;\n\t\treturn 0;\n\t});\n\trole.roles = roles;\n\tprintjson(role);\n\t})();' userAdmin:userAdmin123456@some-name-mongos.custom-users-roles-sharded-14882 role-one + local database=admin + local $'command=(function() {\n\tvar role = db.getRole("role-one", {showPrivileges: true, showAuthenticationRestrictions: true});\n\tvar roles = role.roles;\n\troles.sort((a, b) => {\n\t\tif (a.role < b.role) return -1;\n\t\tif (a.role > b.role) return 1;\n\t\treturn 0;\n\t});\n\trole.roles = roles;\n\tprintjson(role);\n\t})();' + local uri=userAdmin:userAdmin123456@some-name-mongos.custom-users-roles-sharded-14882 + local target=role-one + [[ docker.io/perconalab/percona-server-mongodb-operator:main-mongod8.0 =~ 5\.0 ]] + run_mongos $'use admin\\n (function() {\n\tvar role = db.getRole("role-one", {showPrivileges: true, showAuthenticationRestrictions: true});\n\tvar roles = role.roles;\n\troles.sort((a, b) => {\n\t\tif (a.role < b.role) return -1;\n\t\tif (a.role > b.role) return 1;\n\t\treturn 0;\n\t});\n\trole.roles = roles;\n\tprintjson(role);\n\t})();' userAdmin:userAdmin123456@some-name-mongos.custom-users-roles-sharded-14882 mongodb + grep -E -v 'I NETWORK|W NETWORK|F NETWORK|Error saving history file|Percona Server for MongoDB|connecting to:|Unable to reach primary for set|Implicit session:|versions do not match|Error saving history file:' + local $'command=use admin\\n (function() {\n\tvar role = db.getRole("role-one", {showPrivileges: true, showAuthenticationRestrictions: true});\n\tvar roles = role.roles;\n\troles.sort((a, b) => {\n\t\tif (a.role < b.role) return -1;\n\t\tif (a.role > b.role) return 1;\n\t\treturn 0;\n\t});\n\trole.roles = roles;\n\tprintjson(role);\n\t})();' + local uri=userAdmin:userAdmin123456@some-name-mongos.custom-users-roles-sharded-14882 + local driver=mongodb + local suffix=.svc.cluster.local + local mongo_flag= + local port=27017 + local mongo_bin=mongo + /usr/sbin/sed -re 's/ObjectId\("[0-9a-f]+"\)//; s/-[0-9]+.svc/-xxxesvc/' + sed '/"userId"/d' ++ echo .svc.cluster.local ++ awk -F: '{print $2}' + suffix_port= + [[ -z '' ]] + suffix=.svc.cluster.local:27017 ++ kubectl_bin get pods --selector=name=psmdb-client -o 'jsonpath={.items[].metadata.name}' +++ mktemp ++ local LAST_OUT=/tmp/tmp.K3Sde4zWAN +++ mktemp ++ local LAST_ERR=/tmp/tmp.q278NjSWwW ++ local exit_status=0 ++ local timeout=4 +++ seq 0 2 ++ for i in $(seq 0 2) ++ set +e ++ kubectl get pods --selector=name=psmdb-client -o 'jsonpath={.items[].metadata.name}' ++ exit_status=0 ++ set -e ++ '[' 0 '!=' 0 -a -n 1 ']' ++ break ++ cat /tmp/tmp.K3Sde4zWAN ++ cat /tmp/tmp.q278NjSWwW ++ rm /tmp/tmp.K3Sde4zWAN /tmp/tmp.q278NjSWwW ++ return 0 + local client_container=psmdb-client-bb8b97679-ftcxh + kubectl_bin exec psmdb-client-bb8b97679-ftcxh -- bash -c $'printf \'use admin\\n (function() {\n\tvar role = db.getRole("role-one", {showPrivileges: true, showAuthenticationRestrictions: true});\n\tvar roles = role.roles;\n\troles.sort((a, b) => {\n\t\tif (a.role < b.role) return -1;\n\t\tif (a.role > b.role) return 1;\n\t\treturn 0;\n\t});\n\trole.roles = roles;\n\tprintjson(role);\n\t})();\\n\' | mongo mongodb://userAdmin:userAdmin123456@some-name-mongos.custom-users-roles-sharded-14882.svc.cluster.local:27017/admin ' ++ mktemp + local LAST_OUT=/tmp/tmp.mBu3iRftyc ++ mktemp + local LAST_ERR=/tmp/tmp.8jW6nH0Yic + local exit_status=0 + local timeout=4 ++ seq 0 2 + for i in $(seq 0 2) + set +e + kubectl exec psmdb-client-bb8b97679-ftcxh -- bash -c $'printf \'use admin\\n (function() {\n\tvar role = db.getRole("role-one", {showPrivileges: true, showAuthenticationRestrictions: true});\n\tvar roles = role.roles;\n\troles.sort((a, b) => {\n\t\tif (a.role < b.role) return -1;\n\t\tif (a.role > b.role) return 1;\n\t\treturn 0;\n\t});\n\trole.roles = roles;\n\tprintjson(role);\n\t})();\\n\' | mongo mongodb://userAdmin:userAdmin123456@some-name-mongos.custom-users-roles-sharded-14882.svc.cluster.local:27017/admin ' + exit_status=0 + set -e + '[' 0 '!=' 0 -a -n 1 ']' + break + cat /tmp/tmp.mBu3iRftyc + cat /tmp/tmp.8jW6nH0Yic + rm /tmp/tmp.mBu3iRftyc /tmp/tmp.8jW6nH0Yic + return 0 + diff /mnt/jenkins/workspace/cloud-psmdb-operator_PR-2283/e2e-tests/custom-users-roles-sharded/compare/role-one.json /tmp/tmp.UfAOPg7iAD/role-one + desc 'delete initial role from CR and create a new one' + set +o xtrace ----------------------------------------------------------------------------------- delete initial role from CR and create a new one ----------------------------------------------------------------------------------- + kubectl_bin patch psmdb some-name --type=merge --patch $'{\n\t\t"spec": {"roles":[\n\t\t\t{\n\t\t\t\t"role": "role-two",\n\t\t\t\t"db": "admin",\n\t\t\t\t"privileges": [\n\t\t\t\t\t{\n\t\t\t\t\t\t"resource": {\n\t\t\t\t\t\t\t"db": "config",\n\t\t\t\t\t\t\t"collection": ""\n\t\t\t\t\t\t},\n\t\t\t\t\t\t"actions": [\n\t\t\t\t\t\t\t"find"\n\t\t\t\t\t\t]\n\t\t\t\t\t}\n\t\t\t\t],\n\t\t\t\t"roles": [\n\t\t\t\t\t{\n\t\t\t\t\t\t"role": "read",\n\t\t\t\t\t\t"db": "admin"\n\t\t\t\t\t}\n\t\t\t\t]\n\t\t\t}\n\t\t]\n\t}}' ++ mktemp + local LAST_OUT=/tmp/tmp.zNv1b3OoKb ++ mktemp + local LAST_ERR=/tmp/tmp.qmecernLzL + local exit_status=0 + local timeout=4 ++ seq 0 2 + for i in $(seq 0 2) + set +e + kubectl patch psmdb some-name --type=merge --patch $'{\n\t\t"spec": {"roles":[\n\t\t\t{\n\t\t\t\t"role": "role-two",\n\t\t\t\t"db": "admin",\n\t\t\t\t"privileges": [\n\t\t\t\t\t{\n\t\t\t\t\t\t"resource": {\n\t\t\t\t\t\t\t"db": "config",\n\t\t\t\t\t\t\t"collection": ""\n\t\t\t\t\t\t},\n\t\t\t\t\t\t"actions": [\n\t\t\t\t\t\t\t"find"\n\t\t\t\t\t\t]\n\t\t\t\t\t}\n\t\t\t\t],\n\t\t\t\t"roles": [\n\t\t\t\t\t{\n\t\t\t\t\t\t"role": "read",\n\t\t\t\t\t\t"db": "admin"\n\t\t\t\t\t}\n\t\t\t\t]\n\t\t\t}\n\t\t]\n\t}}' + exit_status=0 + set -e + '[' 0 '!=' 0 -a -n 1 ']' + break + cat /tmp/tmp.zNv1b3OoKb perconaservermongodb.psmdb.percona.com/some-name patched + cat /tmp/tmp.qmecernLzL + rm /tmp/tmp.zNv1b3OoKb /tmp/tmp.qmecernLzL + return 0 + wait_for_running some-name-rs0 3 + local name=some-name-rs0 + let last_pod=2 + local check_cluster_readyness=true + set_debug + [[ 1 == 1 ]] + set -o xtrace + local rs_name=rs0 + local cluster_name=some-name ++ seq 0 2 + for i in $(seq 0 $last_pod) + [[ 0 -eq 2 ]] + wait_pod some-name-rs0-0 + local pod=some-name-rs0-0 + set +o xtrace waiting for pod/some-name-rs0-0 to be ready.OK + for i in $(seq 0 $last_pod) + [[ 1 -eq 2 ]] + wait_pod some-name-rs0-1 + local pod=some-name-rs0-1 + set +o xtrace waiting for pod/some-name-rs0-1 to be ready.OK + for i in $(seq 0 $last_pod) + [[ 2 -eq 2 ]] ++ kubectl_bin get psmdb some-name -o 'jsonpath={.spec.replsets[?(@.name=="rs0")].arbiter.enabled}' +++ mktemp ++ local LAST_OUT=/tmp/tmp.rE4eJipquL +++ mktemp ++ local LAST_ERR=/tmp/tmp.jZb3GXOIkP ++ local exit_status=0 ++ local timeout=4 +++ seq 0 2 ++ for i in $(seq 0 2) ++ set +e ++ kubectl get psmdb some-name -o 'jsonpath={.spec.replsets[?(@.name=="rs0")].arbiter.enabled}' ++ exit_status=0 ++ set -e ++ '[' 0 '!=' 0 -a -n 1 ']' ++ break ++ cat /tmp/tmp.rE4eJipquL ++ cat /tmp/tmp.jZb3GXOIkP ++ rm /tmp/tmp.rE4eJipquL /tmp/tmp.jZb3GXOIkP ++ return 0 + [[ '' == true ]] + wait_pod some-name-rs0-2 + local pod=some-name-rs0-2 + set +o xtrace waiting for pod/some-name-rs0-2 to be ready.OK ++ kubectl_bin get psmdb some-name -o 'jsonpath={.spec.replsets[?(@.name=="rs0")].nonvoting.enabled}' +++ mktemp ++ local LAST_OUT=/tmp/tmp.N00EWcBgEx +++ mktemp ++ local LAST_ERR=/tmp/tmp.71xMI4EYTi ++ local exit_status=0 ++ local timeout=4 +++ seq 0 2 ++ for i in $(seq 0 2) ++ set +e ++ kubectl get psmdb some-name -o 'jsonpath={.spec.replsets[?(@.name=="rs0")].nonvoting.enabled}' ++ exit_status=0 ++ set -e ++ '[' 0 '!=' 0 -a -n 1 ']' ++ break ++ cat /tmp/tmp.N00EWcBgEx ++ cat /tmp/tmp.71xMI4EYTi ++ rm /tmp/tmp.N00EWcBgEx /tmp/tmp.71xMI4EYTi ++ return 0 + [[ '' == true ]] ++ kubectl_bin get psmdb some-name -o 'jsonpath={.spec.replsets[?(@.name=="rs0")].hidden.enabled}' +++ mktemp ++ local LAST_OUT=/tmp/tmp.LSN7ZEnubu +++ mktemp ++ local LAST_ERR=/tmp/tmp.RIpzqzqs4k ++ local exit_status=0 ++ local timeout=4 +++ seq 0 2 ++ for i in $(seq 0 2) ++ set +e ++ kubectl get psmdb some-name -o 'jsonpath={.spec.replsets[?(@.name=="rs0")].hidden.enabled}' ++ exit_status=0 ++ set -e ++ '[' 0 '!=' 0 -a -n 1 ']' ++ break ++ cat /tmp/tmp.LSN7ZEnubu ++ cat /tmp/tmp.RIpzqzqs4k ++ rm /tmp/tmp.LSN7ZEnubu /tmp/tmp.RIpzqzqs4k ++ return 0 + [[ '' == true ]] + sleep 10 + [[ true == true ]] + set +x Waiting for cluster readyness ++ get_role_cmd '"role-one"' ++ local 'role="role-one"' ++ cmd=$'(function() {\n\tvar role = db.getRole("role-one", {showPrivileges: true, showAuthenticationRestrictions: true});\n\tvar roles = role.roles;\n\troles.sort((a, b) => {\n\t\tif (a.role < b.role) return -1;\n\t\tif (a.role > b.role) return 1;\n\t\treturn 0;\n\t});\n\trole.roles = roles;\n\tprintjson(role);\n\t})();' ++ echo $'(function() {\n\tvar role = db.getRole("role-one", {showPrivileges: true, showAuthenticationRestrictions: true});\n\tvar roles = role.roles;\n\troles.sort((a, b) => {\n\t\tif (a.role < b.role) return -1;\n\t\tif (a.role > b.role) return 1;\n\t\treturn 0;\n\t});\n\trole.roles = roles;\n\tprintjson(role);\n\t})();' + compare admin $'(function() {\n\tvar role = db.getRole("role-one", {showPrivileges: true, showAuthenticationRestrictions: true});\n\tvar roles = role.roles;\n\troles.sort((a, b) => {\n\t\tif (a.role < b.role) return -1;\n\t\tif (a.role > b.role) return 1;\n\t\treturn 0;\n\t});\n\trole.roles = roles;\n\tprintjson(role);\n\t})();' userAdmin:userAdmin123456@some-name-mongos.custom-users-roles-sharded-14882 role-one + local database=admin + local $'command=(function() {\n\tvar role = db.getRole("role-one", {showPrivileges: true, showAuthenticationRestrictions: true});\n\tvar roles = role.roles;\n\troles.sort((a, b) => {\n\t\tif (a.role < b.role) return -1;\n\t\tif (a.role > b.role) return 1;\n\t\treturn 0;\n\t});\n\trole.roles = roles;\n\tprintjson(role);\n\t})();' + local uri=userAdmin:userAdmin123456@some-name-mongos.custom-users-roles-sharded-14882 + local target=role-one + [[ docker.io/perconalab/percona-server-mongodb-operator:main-mongod8.0 =~ 5\.0 ]] + run_mongos $'use admin\\n (function() {\n\tvar role = db.getRole("role-one", {showPrivileges: true, showAuthenticationRestrictions: true});\n\tvar roles = role.roles;\n\troles.sort((a, b) => {\n\t\tif (a.role < b.role) return -1;\n\t\tif (a.role > b.role) return 1;\n\t\treturn 0;\n\t});\n\trole.roles = roles;\n\tprintjson(role);\n\t})();' userAdmin:userAdmin123456@some-name-mongos.custom-users-roles-sharded-14882 mongodb + local $'command=use admin\\n (function() {\n\tvar role = db.getRole("role-one", {showPrivileges: true, showAuthenticationRestrictions: true});\n\tvar roles = role.roles;\n\troles.sort((a, b) => {\n\t\tif (a.role < b.role) return -1;\n\t\tif (a.role > b.role) return 1;\n\t\treturn 0;\n\t});\n\trole.roles = roles;\n\tprintjson(role);\n\t})();' + local uri=userAdmin:userAdmin123456@some-name-mongos.custom-users-roles-sharded-14882 + local driver=mongodb + grep -E -v 'I NETWORK|W NETWORK|F NETWORK|Error saving history file|Percona Server for MongoDB|connecting to:|Unable to reach primary for set|Implicit session:|versions do not match|Error saving history file:' + local suffix=.svc.cluster.local + local mongo_flag= + local port=27017 + local mongo_bin=mongo + /usr/sbin/sed -re 's/ObjectId\("[0-9a-f]+"\)//; s/-[0-9]+.svc/-xxxesvc/' + sed '/"userId"/d' ++ echo .svc.cluster.local ++ awk -F: '{print $2}' + suffix_port= + [[ -z '' ]] + suffix=.svc.cluster.local:27017 ++ kubectl_bin get pods --selector=name=psmdb-client -o 'jsonpath={.items[].metadata.name}' +++ mktemp ++ local LAST_OUT=/tmp/tmp.YRG4xOGipP +++ mktemp ++ local LAST_ERR=/tmp/tmp.9sVf5kPdjj ++ local exit_status=0 ++ local timeout=4 +++ seq 0 2 ++ for i in $(seq 0 2) ++ set +e ++ kubectl get pods --selector=name=psmdb-client -o 'jsonpath={.items[].metadata.name}' ++ exit_status=0 ++ set -e ++ '[' 0 '!=' 0 -a -n 1 ']' ++ break ++ cat /tmp/tmp.YRG4xOGipP ++ cat /tmp/tmp.9sVf5kPdjj ++ rm /tmp/tmp.YRG4xOGipP /tmp/tmp.9sVf5kPdjj ++ return 0 + local client_container=psmdb-client-bb8b97679-ftcxh + kubectl_bin exec psmdb-client-bb8b97679-ftcxh -- bash -c $'printf \'use admin\\n (function() {\n\tvar role = db.getRole("role-one", {showPrivileges: true, showAuthenticationRestrictions: true});\n\tvar roles = role.roles;\n\troles.sort((a, b) => {\n\t\tif (a.role < b.role) return -1;\n\t\tif (a.role > b.role) return 1;\n\t\treturn 0;\n\t});\n\trole.roles = roles;\n\tprintjson(role);\n\t})();\\n\' | mongo mongodb://userAdmin:userAdmin123456@some-name-mongos.custom-users-roles-sharded-14882.svc.cluster.local:27017/admin ' ++ mktemp + local LAST_OUT=/tmp/tmp.CTFCx0yrZz ++ mktemp + local LAST_ERR=/tmp/tmp.jAirWERwZX + local exit_status=0 + local timeout=4 ++ seq 0 2 + for i in $(seq 0 2) + set +e + kubectl exec psmdb-client-bb8b97679-ftcxh -- bash -c $'printf \'use admin\\n (function() {\n\tvar role = db.getRole("role-one", {showPrivileges: true, showAuthenticationRestrictions: true});\n\tvar roles = role.roles;\n\troles.sort((a, b) => {\n\t\tif (a.role < b.role) return -1;\n\t\tif (a.role > b.role) return 1;\n\t\treturn 0;\n\t});\n\trole.roles = roles;\n\tprintjson(role);\n\t})();\\n\' | mongo mongodb://userAdmin:userAdmin123456@some-name-mongos.custom-users-roles-sharded-14882.svc.cluster.local:27017/admin ' + exit_status=0 + set -e + '[' 0 '!=' 0 -a -n 1 ']' + break + cat /tmp/tmp.CTFCx0yrZz + cat /tmp/tmp.jAirWERwZX + rm /tmp/tmp.CTFCx0yrZz /tmp/tmp.jAirWERwZX + return 0 + diff /mnt/jenkins/workspace/cloud-psmdb-operator_PR-2283/e2e-tests/custom-users-roles-sharded/compare/role-one.json /tmp/tmp.UfAOPg7iAD/role-one ++ get_role_cmd '"role-two"' ++ local 'role="role-two"' ++ cmd=$'(function() {\n\tvar role = db.getRole("role-two", {showPrivileges: true, showAuthenticationRestrictions: true});\n\tvar roles = role.roles;\n\troles.sort((a, b) => {\n\t\tif (a.role < b.role) return -1;\n\t\tif (a.role > b.role) return 1;\n\t\treturn 0;\n\t});\n\trole.roles = roles;\n\tprintjson(role);\n\t})();' ++ echo $'(function() {\n\tvar role = db.getRole("role-two", {showPrivileges: true, showAuthenticationRestrictions: true});\n\tvar roles = role.roles;\n\troles.sort((a, b) => {\n\t\tif (a.role < b.role) return -1;\n\t\tif (a.role > b.role) return 1;\n\t\treturn 0;\n\t});\n\trole.roles = roles;\n\tprintjson(role);\n\t})();' + compare admin $'(function() {\n\tvar role = db.getRole("role-two", {showPrivileges: true, showAuthenticationRestrictions: true});\n\tvar roles = role.roles;\n\troles.sort((a, b) => {\n\t\tif (a.role < b.role) return -1;\n\t\tif (a.role > b.role) return 1;\n\t\treturn 0;\n\t});\n\trole.roles = roles;\n\tprintjson(role);\n\t})();' userAdmin:userAdmin123456@some-name-mongos.custom-users-roles-sharded-14882 role-two + local database=admin + local $'command=(function() {\n\tvar role = db.getRole("role-two", {showPrivileges: true, showAuthenticationRestrictions: true});\n\tvar roles = role.roles;\n\troles.sort((a, b) => {\n\t\tif (a.role < b.role) return -1;\n\t\tif (a.role > b.role) return 1;\n\t\treturn 0;\n\t});\n\trole.roles = roles;\n\tprintjson(role);\n\t})();' + local uri=userAdmin:userAdmin123456@some-name-mongos.custom-users-roles-sharded-14882 + local target=role-two + [[ docker.io/perconalab/percona-server-mongodb-operator:main-mongod8.0 =~ 5\.0 ]] + run_mongos $'use admin\\n (function() {\n\tvar role = db.getRole("role-two", {showPrivileges: true, showAuthenticationRestrictions: true});\n\tvar roles = role.roles;\n\troles.sort((a, b) => {\n\t\tif (a.role < b.role) return -1;\n\t\tif (a.role > b.role) return 1;\n\t\treturn 0;\n\t});\n\trole.roles = roles;\n\tprintjson(role);\n\t})();' userAdmin:userAdmin123456@some-name-mongos.custom-users-roles-sharded-14882 mongodb + local $'command=use admin\\n (function() {\n\tvar role = db.getRole("role-two", {showPrivileges: true, showAuthenticationRestrictions: true});\n\tvar roles = role.roles;\n\troles.sort((a, b) => {\n\t\tif (a.role < b.role) return -1;\n\t\tif (a.role > b.role) return 1;\n\t\treturn 0;\n\t});\n\trole.roles = roles;\n\tprintjson(role);\n\t})();' + local uri=userAdmin:userAdmin123456@some-name-mongos.custom-users-roles-sharded-14882 + local driver=mongodb + local suffix=.svc.cluster.local + local mongo_flag= + local port=27017 + local mongo_bin=mongo + grep -E -v 'I NETWORK|W NETWORK|F NETWORK|Error saving history file|Percona Server for MongoDB|connecting to:|Unable to reach primary for set|Implicit session:|versions do not match|Error saving history file:' + /usr/sbin/sed -re 's/ObjectId\("[0-9a-f]+"\)//; s/-[0-9]+.svc/-xxxesvc/' + sed '/"userId"/d' ++ echo .svc.cluster.local ++ awk -F: '{print $2}' + suffix_port= + [[ -z '' ]] + suffix=.svc.cluster.local:27017 ++ kubectl_bin get pods --selector=name=psmdb-client -o 'jsonpath={.items[].metadata.name}' +++ mktemp ++ local LAST_OUT=/tmp/tmp.Z4J80zsWye +++ mktemp ++ local LAST_ERR=/tmp/tmp.l464Kh6EUG ++ local exit_status=0 ++ local timeout=4 +++ seq 0 2 ++ for i in $(seq 0 2) ++ set +e ++ kubectl get pods --selector=name=psmdb-client -o 'jsonpath={.items[].metadata.name}' ++ exit_status=0 ++ set -e ++ '[' 0 '!=' 0 -a -n 1 ']' ++ break ++ cat /tmp/tmp.Z4J80zsWye ++ cat /tmp/tmp.l464Kh6EUG ++ rm /tmp/tmp.Z4J80zsWye /tmp/tmp.l464Kh6EUG ++ return 0 + local client_container=psmdb-client-bb8b97679-ftcxh + kubectl_bin exec psmdb-client-bb8b97679-ftcxh -- bash -c $'printf \'use admin\\n (function() {\n\tvar role = db.getRole("role-two", {showPrivileges: true, showAuthenticationRestrictions: true});\n\tvar roles = role.roles;\n\troles.sort((a, b) => {\n\t\tif (a.role < b.role) return -1;\n\t\tif (a.role > b.role) return 1;\n\t\treturn 0;\n\t});\n\trole.roles = roles;\n\tprintjson(role);\n\t})();\\n\' | mongo mongodb://userAdmin:userAdmin123456@some-name-mongos.custom-users-roles-sharded-14882.svc.cluster.local:27017/admin ' ++ mktemp + local LAST_OUT=/tmp/tmp.SF1N9sz7Ws ++ mktemp + local LAST_ERR=/tmp/tmp.xDBtqrm3YI + local exit_status=0 + local timeout=4 ++ seq 0 2 + for i in $(seq 0 2) + set +e + kubectl exec psmdb-client-bb8b97679-ftcxh -- bash -c $'printf \'use admin\\n (function() {\n\tvar role = db.getRole("role-two", {showPrivileges: true, showAuthenticationRestrictions: true});\n\tvar roles = role.roles;\n\troles.sort((a, b) => {\n\t\tif (a.role < b.role) return -1;\n\t\tif (a.role > b.role) return 1;\n\t\treturn 0;\n\t});\n\trole.roles = roles;\n\tprintjson(role);\n\t})();\\n\' | mongo mongodb://userAdmin:userAdmin123456@some-name-mongos.custom-users-roles-sharded-14882.svc.cluster.local:27017/admin ' + exit_status=0 + set -e + '[' 0 '!=' 0 -a -n 1 ']' + break + cat /tmp/tmp.SF1N9sz7Ws + cat /tmp/tmp.xDBtqrm3YI + rm /tmp/tmp.SF1N9sz7Ws /tmp/tmp.xDBtqrm3YI + return 0 + diff /mnt/jenkins/workspace/cloud-psmdb-operator_PR-2283/e2e-tests/custom-users-roles-sharded/compare/role-two.json /tmp/tmp.UfAOPg7iAD/role-two + desc 'check role update from CR' + set +o xtrace ----------------------------------------------------------------------------------- check role update from CR ----------------------------------------------------------------------------------- + kubectl_bin patch psmdb some-name --type=merge --patch $'{\n\t\t"spec": {"roles":[\n\t\t\t{\n\t\t\t\t"role": "role-two",\n\t\t\t\t"db": "admin",\n\t\t\t\t"privileges": [\n\t\t\t\t\t{\n\t\t\t\t\t\t"resource": {\n\t\t\t\t\t\t\t"db": "config",\n\t\t\t\t\t\t\t"collection": ""\n\t\t\t\t\t\t},\n\t\t\t\t\t\t"actions": [\n\t\t\t\t\t\t\t"find"\n\t\t\t\t\t\t]\n\t\t\t\t\t}\n\t\t\t\t]\n\t\t\t}\n\t\t]\n\t}}' ++ mktemp + local LAST_OUT=/tmp/tmp.4wHh0VwMai ++ mktemp + local LAST_ERR=/tmp/tmp.XqKff9hygt + local exit_status=0 + local timeout=4 ++ seq 0 2 + for i in $(seq 0 2) + set +e + kubectl patch psmdb some-name --type=merge --patch $'{\n\t\t"spec": {"roles":[\n\t\t\t{\n\t\t\t\t"role": "role-two",\n\t\t\t\t"db": "admin",\n\t\t\t\t"privileges": [\n\t\t\t\t\t{\n\t\t\t\t\t\t"resource": {\n\t\t\t\t\t\t\t"db": "config",\n\t\t\t\t\t\t\t"collection": ""\n\t\t\t\t\t\t},\n\t\t\t\t\t\t"actions": [\n\t\t\t\t\t\t\t"find"\n\t\t\t\t\t\t]\n\t\t\t\t\t}\n\t\t\t\t]\n\t\t\t}\n\t\t]\n\t}}' + exit_status=0 + set -e + '[' 0 '!=' 0 -a -n 1 ']' + break + cat /tmp/tmp.4wHh0VwMai perconaservermongodb.psmdb.percona.com/some-name patched + cat /tmp/tmp.XqKff9hygt + rm /tmp/tmp.4wHh0VwMai /tmp/tmp.XqKff9hygt + return 0 + wait_for_running some-name-rs0 3 + local name=some-name-rs0 + let last_pod=2 + local check_cluster_readyness=true + set_debug + [[ 1 == 1 ]] + set -o xtrace + local rs_name=rs0 + local cluster_name=some-name ++ seq 0 2 + for i in $(seq 0 $last_pod) + [[ 0 -eq 2 ]] + wait_pod some-name-rs0-0 + local pod=some-name-rs0-0 + set +o xtrace waiting for pod/some-name-rs0-0 to be ready.OK + for i in $(seq 0 $last_pod) + [[ 1 -eq 2 ]] + wait_pod some-name-rs0-1 + local pod=some-name-rs0-1 + set +o xtrace waiting for pod/some-name-rs0-1 to be ready.OK + for i in $(seq 0 $last_pod) + [[ 2 -eq 2 ]] ++ kubectl_bin get psmdb some-name -o 'jsonpath={.spec.replsets[?(@.name=="rs0")].arbiter.enabled}' +++ mktemp ++ local LAST_OUT=/tmp/tmp.Zm5gaaLL8J +++ mktemp ++ local LAST_ERR=/tmp/tmp.qoyBbCUz1x ++ local exit_status=0 ++ local timeout=4 +++ seq 0 2 ++ for i in $(seq 0 2) ++ set +e ++ kubectl get psmdb some-name -o 'jsonpath={.spec.replsets[?(@.name=="rs0")].arbiter.enabled}' ++ exit_status=0 ++ set -e ++ '[' 0 '!=' 0 -a -n 1 ']' ++ break ++ cat /tmp/tmp.Zm5gaaLL8J ++ cat /tmp/tmp.qoyBbCUz1x ++ rm /tmp/tmp.Zm5gaaLL8J /tmp/tmp.qoyBbCUz1x ++ return 0 + [[ '' == true ]] + wait_pod some-name-rs0-2 + local pod=some-name-rs0-2 + set +o xtrace waiting for pod/some-name-rs0-2 to be ready.OK ++ kubectl_bin get psmdb some-name -o 'jsonpath={.spec.replsets[?(@.name=="rs0")].nonvoting.enabled}' +++ mktemp ++ local LAST_OUT=/tmp/tmp.qtVtoqs86S +++ mktemp ++ local LAST_ERR=/tmp/tmp.gUGo4iSIv9 ++ local exit_status=0 ++ local timeout=4 +++ seq 0 2 ++ for i in $(seq 0 2) ++ set +e ++ kubectl get psmdb some-name -o 'jsonpath={.spec.replsets[?(@.name=="rs0")].nonvoting.enabled}' ++ exit_status=0 ++ set -e ++ '[' 0 '!=' 0 -a -n 1 ']' ++ break ++ cat /tmp/tmp.qtVtoqs86S ++ cat /tmp/tmp.gUGo4iSIv9 ++ rm /tmp/tmp.qtVtoqs86S /tmp/tmp.gUGo4iSIv9 ++ return 0 + [[ '' == true ]] ++ kubectl_bin get psmdb some-name -o 'jsonpath={.spec.replsets[?(@.name=="rs0")].hidden.enabled}' +++ mktemp ++ local LAST_OUT=/tmp/tmp.dJfawHDevg +++ mktemp ++ local LAST_ERR=/tmp/tmp.o9gfIzYJh5 ++ local exit_status=0 ++ local timeout=4 +++ seq 0 2 ++ for i in $(seq 0 2) ++ set +e ++ kubectl get psmdb some-name -o 'jsonpath={.spec.replsets[?(@.name=="rs0")].hidden.enabled}' ++ exit_status=0 ++ set -e ++ '[' 0 '!=' 0 -a -n 1 ']' ++ break ++ cat /tmp/tmp.dJfawHDevg ++ cat /tmp/tmp.o9gfIzYJh5 ++ rm /tmp/tmp.dJfawHDevg /tmp/tmp.o9gfIzYJh5 ++ return 0 + [[ '' == true ]] + sleep 10 + [[ true == true ]] + set +x Waiting for cluster readyness ++ get_role_cmd '"role-two"' ++ local 'role="role-two"' ++ cmd=$'(function() {\n\tvar role = db.getRole("role-two", {showPrivileges: true, showAuthenticationRestrictions: true});\n\tvar roles = role.roles;\n\troles.sort((a, b) => {\n\t\tif (a.role < b.role) return -1;\n\t\tif (a.role > b.role) return 1;\n\t\treturn 0;\n\t});\n\trole.roles = roles;\n\tprintjson(role);\n\t})();' ++ echo $'(function() {\n\tvar role = db.getRole("role-two", {showPrivileges: true, showAuthenticationRestrictions: true});\n\tvar roles = role.roles;\n\troles.sort((a, b) => {\n\t\tif (a.role < b.role) return -1;\n\t\tif (a.role > b.role) return 1;\n\t\treturn 0;\n\t});\n\trole.roles = roles;\n\tprintjson(role);\n\t})();' + compare admin $'(function() {\n\tvar role = db.getRole("role-two", {showPrivileges: true, showAuthenticationRestrictions: true});\n\tvar roles = role.roles;\n\troles.sort((a, b) => {\n\t\tif (a.role < b.role) return -1;\n\t\tif (a.role > b.role) return 1;\n\t\treturn 0;\n\t});\n\trole.roles = roles;\n\tprintjson(role);\n\t})();' userAdmin:userAdmin123456@some-name-mongos.custom-users-roles-sharded-14882 role-two-updated + local database=admin + local $'command=(function() {\n\tvar role = db.getRole("role-two", {showPrivileges: true, showAuthenticationRestrictions: true});\n\tvar roles = role.roles;\n\troles.sort((a, b) => {\n\t\tif (a.role < b.role) return -1;\n\t\tif (a.role > b.role) return 1;\n\t\treturn 0;\n\t});\n\trole.roles = roles;\n\tprintjson(role);\n\t})();' + local uri=userAdmin:userAdmin123456@some-name-mongos.custom-users-roles-sharded-14882 + local target=role-two-updated + [[ docker.io/perconalab/percona-server-mongodb-operator:main-mongod8.0 =~ 5\.0 ]] + grep -E -v 'I NETWORK|W NETWORK|F NETWORK|Error saving history file|Percona Server for MongoDB|connecting to:|Unable to reach primary for set|Implicit session:|versions do not match|Error saving history file:' + sed '/"userId"/d' + /usr/sbin/sed -re 's/ObjectId\("[0-9a-f]+"\)//; s/-[0-9]+.svc/-xxxesvc/' + run_mongos $'use admin\\n (function() {\n\tvar role = db.getRole("role-two", {showPrivileges: true, showAuthenticationRestrictions: true});\n\tvar roles = role.roles;\n\troles.sort((a, b) => {\n\t\tif (a.role < b.role) return -1;\n\t\tif (a.role > b.role) return 1;\n\t\treturn 0;\n\t});\n\trole.roles = roles;\n\tprintjson(role);\n\t})();' userAdmin:userAdmin123456@some-name-mongos.custom-users-roles-sharded-14882 mongodb + local $'command=use admin\\n (function() {\n\tvar role = db.getRole("role-two", {showPrivileges: true, showAuthenticationRestrictions: true});\n\tvar roles = role.roles;\n\troles.sort((a, b) => {\n\t\tif (a.role < b.role) return -1;\n\t\tif (a.role > b.role) return 1;\n\t\treturn 0;\n\t});\n\trole.roles = roles;\n\tprintjson(role);\n\t})();' + local uri=userAdmin:userAdmin123456@some-name-mongos.custom-users-roles-sharded-14882 + local driver=mongodb + local suffix=.svc.cluster.local + local mongo_flag= + local port=27017 + local mongo_bin=mongo ++ echo .svc.cluster.local ++ awk -F: '{print $2}' + suffix_port= + [[ -z '' ]] + suffix=.svc.cluster.local:27017 ++ kubectl_bin get pods --selector=name=psmdb-client -o 'jsonpath={.items[].metadata.name}' +++ mktemp ++ local LAST_OUT=/tmp/tmp.c9gplGBOke +++ mktemp ++ local LAST_ERR=/tmp/tmp.CPkvqMDluo ++ local exit_status=0 ++ local timeout=4 +++ seq 0 2 ++ for i in $(seq 0 2) ++ set +e ++ kubectl get pods --selector=name=psmdb-client -o 'jsonpath={.items[].metadata.name}' ++ exit_status=0 ++ set -e ++ '[' 0 '!=' 0 -a -n 1 ']' ++ break ++ cat /tmp/tmp.c9gplGBOke ++ cat /tmp/tmp.CPkvqMDluo ++ rm /tmp/tmp.c9gplGBOke /tmp/tmp.CPkvqMDluo ++ return 0 + local client_container=psmdb-client-bb8b97679-ftcxh + kubectl_bin exec psmdb-client-bb8b97679-ftcxh -- bash -c $'printf \'use admin\\n (function() {\n\tvar role = db.getRole("role-two", {showPrivileges: true, showAuthenticationRestrictions: true});\n\tvar roles = role.roles;\n\troles.sort((a, b) => {\n\t\tif (a.role < b.role) return -1;\n\t\tif (a.role > b.role) return 1;\n\t\treturn 0;\n\t});\n\trole.roles = roles;\n\tprintjson(role);\n\t})();\\n\' | mongo mongodb://userAdmin:userAdmin123456@some-name-mongos.custom-users-roles-sharded-14882.svc.cluster.local:27017/admin ' ++ mktemp + local LAST_OUT=/tmp/tmp.32sHetgtiP ++ mktemp + local LAST_ERR=/tmp/tmp.NiNJTonkiN + local exit_status=0 + local timeout=4 ++ seq 0 2 + for i in $(seq 0 2) + set +e + kubectl exec psmdb-client-bb8b97679-ftcxh -- bash -c $'printf \'use admin\\n (function() {\n\tvar role = db.getRole("role-two", {showPrivileges: true, showAuthenticationRestrictions: true});\n\tvar roles = role.roles;\n\troles.sort((a, b) => {\n\t\tif (a.role < b.role) return -1;\n\t\tif (a.role > b.role) return 1;\n\t\treturn 0;\n\t});\n\trole.roles = roles;\n\tprintjson(role);\n\t})();\\n\' | mongo mongodb://userAdmin:userAdmin123456@some-name-mongos.custom-users-roles-sharded-14882.svc.cluster.local:27017/admin ' + exit_status=0 + set -e + '[' 0 '!=' 0 -a -n 1 ']' + break + cat /tmp/tmp.32sHetgtiP + cat /tmp/tmp.NiNJTonkiN + rm /tmp/tmp.32sHetgtiP /tmp/tmp.NiNJTonkiN + return 0 + diff /mnt/jenkins/workspace/cloud-psmdb-operator_PR-2283/e2e-tests/custom-users-roles-sharded/compare/role-two-updated.json /tmp/tmp.UfAOPg7iAD/role-two-updated + desc 'check role update from DB' + set +o xtrace ----------------------------------------------------------------------------------- check role update from DB ----------------------------------------------------------------------------------- + run_mongos 'use admin\n db.updateRole( "role-two",{privileges:[{resource: {db:"config", collection:"" }, actions: ["find", "update"]}]})' userAdmin:userAdmin123456@some-name-mongos.custom-users-roles-sharded-14882 + local 'command=use admin\n db.updateRole( "role-two",{privileges:[{resource: {db:"config", collection:"" }, actions: ["find", "update"]}]})' + local uri=userAdmin:userAdmin123456@some-name-mongos.custom-users-roles-sharded-14882 + local driver=mongodb + local suffix=.svc.cluster.local + local mongo_flag= + local port=27017 + local mongo_bin=mongo ++ echo .svc.cluster.local ++ awk -F: '{print $2}' + suffix_port= + [[ -z '' ]] + suffix=.svc.cluster.local:27017 ++ kubectl_bin get pods --selector=name=psmdb-client -o 'jsonpath={.items[].metadata.name}' +++ mktemp ++ local LAST_OUT=/tmp/tmp.ExPlgCEUe8 +++ mktemp ++ local LAST_ERR=/tmp/tmp.nqIM11slBp ++ local exit_status=0 ++ local timeout=4 +++ seq 0 2 ++ for i in $(seq 0 2) ++ set +e ++ kubectl get pods --selector=name=psmdb-client -o 'jsonpath={.items[].metadata.name}' ++ exit_status=0 ++ set -e ++ '[' 0 '!=' 0 -a -n 1 ']' ++ break ++ cat /tmp/tmp.ExPlgCEUe8 ++ cat /tmp/tmp.nqIM11slBp ++ rm /tmp/tmp.ExPlgCEUe8 /tmp/tmp.nqIM11slBp ++ return 0 + local client_container=psmdb-client-bb8b97679-ftcxh + kubectl_bin exec psmdb-client-bb8b97679-ftcxh -- bash -c 'printf '\''use admin\n db.updateRole( "role-two",{privileges:[{resource: {db:"config", collection:"" }, actions: ["find", "update"]}]})\n'\'' | mongo mongodb://userAdmin:userAdmin123456@some-name-mongos.custom-users-roles-sharded-14882.svc.cluster.local:27017/admin ' ++ mktemp + local LAST_OUT=/tmp/tmp.Jv0CHB9Jro ++ mktemp + local LAST_ERR=/tmp/tmp.krMjvujmTN + local exit_status=0 + local timeout=4 ++ seq 0 2 + for i in $(seq 0 2) + set +e + kubectl exec psmdb-client-bb8b97679-ftcxh -- bash -c 'printf '\''use admin\n db.updateRole( "role-two",{privileges:[{resource: {db:"config", collection:"" }, actions: ["find", "update"]}]})\n'\'' | mongo mongodb://userAdmin:userAdmin123456@some-name-mongos.custom-users-roles-sharded-14882.svc.cluster.local:27017/admin ' + exit_status=0 + set -e + '[' 0 '!=' 0 -a -n 1 ']' + break + cat /tmp/tmp.Jv0CHB9Jro Percona Server for MongoDB shell version v4.4.29-28 connecting to: mongodb://some-name-mongos.custom-users-roles-sharded-14882.svc.cluster.local:27017/admin?compressors=disabled&gssapiServiceName=mongodb Implicit session: session { "id" : UUID("34245d56-4fe2-49eb-852c-08a3917fe3ff") } Percona Server for MongoDB server version: v8.0.20-8 WARNING: shell and server versions do not match switched to db admin bye + cat /tmp/tmp.krMjvujmTN + rm /tmp/tmp.Jv0CHB9Jro /tmp/tmp.krMjvujmTN + return 0 + sleep 15 ++ get_role_cmd '"role-two"' ++ local 'role="role-two"' ++ cmd=$'(function() {\n\tvar role = db.getRole("role-two", {showPrivileges: true, showAuthenticationRestrictions: true});\n\tvar roles = role.roles;\n\troles.sort((a, b) => {\n\t\tif (a.role < b.role) return -1;\n\t\tif (a.role > b.role) return 1;\n\t\treturn 0;\n\t});\n\trole.roles = roles;\n\tprintjson(role);\n\t})();' ++ echo $'(function() {\n\tvar role = db.getRole("role-two", {showPrivileges: true, showAuthenticationRestrictions: true});\n\tvar roles = role.roles;\n\troles.sort((a, b) => {\n\t\tif (a.role < b.role) return -1;\n\t\tif (a.role > b.role) return 1;\n\t\treturn 0;\n\t});\n\trole.roles = roles;\n\tprintjson(role);\n\t})();' + compare admin $'(function() {\n\tvar role = db.getRole("role-two", {showPrivileges: true, showAuthenticationRestrictions: true});\n\tvar roles = role.roles;\n\troles.sort((a, b) => {\n\t\tif (a.role < b.role) return -1;\n\t\tif (a.role > b.role) return 1;\n\t\treturn 0;\n\t});\n\trole.roles = roles;\n\tprintjson(role);\n\t})();' userAdmin:userAdmin123456@some-name-mongos.custom-users-roles-sharded-14882 role-two-updated + local database=admin + local $'command=(function() {\n\tvar role = db.getRole("role-two", {showPrivileges: true, showAuthenticationRestrictions: true});\n\tvar roles = role.roles;\n\troles.sort((a, b) => {\n\t\tif (a.role < b.role) return -1;\n\t\tif (a.role > b.role) return 1;\n\t\treturn 0;\n\t});\n\trole.roles = roles;\n\tprintjson(role);\n\t})();' + local uri=userAdmin:userAdmin123456@some-name-mongos.custom-users-roles-sharded-14882 + local target=role-two-updated + [[ docker.io/perconalab/percona-server-mongodb-operator:main-mongod8.0 =~ 5\.0 ]] + /usr/sbin/sed -re 's/ObjectId\("[0-9a-f]+"\)//; s/-[0-9]+.svc/-xxxesvc/' + grep -E -v 'I NETWORK|W NETWORK|F NETWORK|Error saving history file|Percona Server for MongoDB|connecting to:|Unable to reach primary for set|Implicit session:|versions do not match|Error saving history file:' + run_mongos $'use admin\\n (function() {\n\tvar role = db.getRole("role-two", {showPrivileges: true, showAuthenticationRestrictions: true});\n\tvar roles = role.roles;\n\troles.sort((a, b) => {\n\t\tif (a.role < b.role) return -1;\n\t\tif (a.role > b.role) return 1;\n\t\treturn 0;\n\t});\n\trole.roles = roles;\n\tprintjson(role);\n\t})();' userAdmin:userAdmin123456@some-name-mongos.custom-users-roles-sharded-14882 mongodb + sed '/"userId"/d' + local $'command=use admin\\n (function() {\n\tvar role = db.getRole("role-two", {showPrivileges: true, showAuthenticationRestrictions: true});\n\tvar roles = role.roles;\n\troles.sort((a, b) => {\n\t\tif (a.role < b.role) return -1;\n\t\tif (a.role > b.role) return 1;\n\t\treturn 0;\n\t});\n\trole.roles = roles;\n\tprintjson(role);\n\t})();' + local uri=userAdmin:userAdmin123456@some-name-mongos.custom-users-roles-sharded-14882 + local driver=mongodb + local suffix=.svc.cluster.local + local mongo_flag= + local port=27017 + local mongo_bin=mongo ++ echo .svc.cluster.local ++ awk -F: '{print $2}' + suffix_port= + [[ -z '' ]] + suffix=.svc.cluster.local:27017 ++ kubectl_bin get pods --selector=name=psmdb-client -o 'jsonpath={.items[].metadata.name}' +++ mktemp ++ local LAST_OUT=/tmp/tmp.oPN5O60NHw +++ mktemp ++ local LAST_ERR=/tmp/tmp.tA8Wl1uMWC ++ local exit_status=0 ++ local timeout=4 +++ seq 0 2 ++ for i in $(seq 0 2) ++ set +e ++ kubectl get pods --selector=name=psmdb-client -o 'jsonpath={.items[].metadata.name}' ++ exit_status=0 ++ set -e ++ '[' 0 '!=' 0 -a -n 1 ']' ++ break ++ cat /tmp/tmp.oPN5O60NHw ++ cat /tmp/tmp.tA8Wl1uMWC ++ rm /tmp/tmp.oPN5O60NHw /tmp/tmp.tA8Wl1uMWC ++ return 0 + local client_container=psmdb-client-bb8b97679-ftcxh + kubectl_bin exec psmdb-client-bb8b97679-ftcxh -- bash -c $'printf \'use admin\\n (function() {\n\tvar role = db.getRole("role-two", {showPrivileges: true, showAuthenticationRestrictions: true});\n\tvar roles = role.roles;\n\troles.sort((a, b) => {\n\t\tif (a.role < b.role) return -1;\n\t\tif (a.role > b.role) return 1;\n\t\treturn 0;\n\t});\n\trole.roles = roles;\n\tprintjson(role);\n\t})();\\n\' | mongo mongodb://userAdmin:userAdmin123456@some-name-mongos.custom-users-roles-sharded-14882.svc.cluster.local:27017/admin ' ++ mktemp + local LAST_OUT=/tmp/tmp.6QpAMjMD6d ++ mktemp + local LAST_ERR=/tmp/tmp.JkqAjPlwxQ + local exit_status=0 + local timeout=4 ++ seq 0 2 + for i in $(seq 0 2) + set +e + kubectl exec psmdb-client-bb8b97679-ftcxh -- bash -c $'printf \'use admin\\n (function() {\n\tvar role = db.getRole("role-two", {showPrivileges: true, showAuthenticationRestrictions: true});\n\tvar roles = role.roles;\n\troles.sort((a, b) => {\n\t\tif (a.role < b.role) return -1;\n\t\tif (a.role > b.role) return 1;\n\t\treturn 0;\n\t});\n\trole.roles = roles;\n\tprintjson(role);\n\t})();\\n\' | mongo mongodb://userAdmin:userAdmin123456@some-name-mongos.custom-users-roles-sharded-14882.svc.cluster.local:27017/admin ' + exit_status=0 + set -e + '[' 0 '!=' 0 -a -n 1 ']' + break + cat /tmp/tmp.6QpAMjMD6d + cat /tmp/tmp.JkqAjPlwxQ + rm /tmp/tmp.6QpAMjMD6d /tmp/tmp.JkqAjPlwxQ + return 0 + diff /mnt/jenkins/workspace/cloud-psmdb-operator_PR-2283/e2e-tests/custom-users-roles-sharded/compare/role-two-updated.json /tmp/tmp.UfAOPg7iAD/role-two-updated + desc 'check new role created after updated role name via CR' + set +o xtrace ----------------------------------------------------------------------------------- check new role created after updated role name via CR ----------------------------------------------------------------------------------- + kubectl_bin patch psmdb some-name --type=merge --patch $'{\n\t\t"spec": {"roles":[\n\t\t\t{\n\t\t\t\t"role": "role-three",\n\t\t\t\t"db": "admin",\n\t\t\t\t"privileges": [\n\t\t\t\t\t{\n\t\t\t\t\t\t"resource": {\n\t\t\t\t\t\t\t"db": "config",\n\t\t\t\t\t\t\t"collection": ""\n\t\t\t\t\t\t},\n\t\t\t\t\t\t"actions": [\n\t\t\t\t\t\t\t"find"\n\t\t\t\t\t\t]\n\t\t\t\t\t}\n\t\t\t\t]\n\t\t\t}\n\t\t]\n\t}}' ++ mktemp + local LAST_OUT=/tmp/tmp.hDBybnT1t7 ++ mktemp + local LAST_ERR=/tmp/tmp.y2s5r1lIwr + local exit_status=0 + local timeout=4 ++ seq 0 2 + for i in $(seq 0 2) + set +e + kubectl patch psmdb some-name --type=merge --patch $'{\n\t\t"spec": {"roles":[\n\t\t\t{\n\t\t\t\t"role": "role-three",\n\t\t\t\t"db": "admin",\n\t\t\t\t"privileges": [\n\t\t\t\t\t{\n\t\t\t\t\t\t"resource": {\n\t\t\t\t\t\t\t"db": "config",\n\t\t\t\t\t\t\t"collection": ""\n\t\t\t\t\t\t},\n\t\t\t\t\t\t"actions": [\n\t\t\t\t\t\t\t"find"\n\t\t\t\t\t\t]\n\t\t\t\t\t}\n\t\t\t\t]\n\t\t\t}\n\t\t]\n\t}}' + exit_status=0 + set -e + '[' 0 '!=' 0 -a -n 1 ']' + break + cat /tmp/tmp.hDBybnT1t7 perconaservermongodb.psmdb.percona.com/some-name patched + cat /tmp/tmp.y2s5r1lIwr + rm /tmp/tmp.hDBybnT1t7 /tmp/tmp.y2s5r1lIwr + return 0 + wait_for_running some-name-rs0 3 + local name=some-name-rs0 + let last_pod=2 + local check_cluster_readyness=true + set_debug + [[ 1 == 1 ]] + set -o xtrace + local rs_name=rs0 + local cluster_name=some-name ++ seq 0 2 + for i in $(seq 0 $last_pod) + [[ 0 -eq 2 ]] + wait_pod some-name-rs0-0 + local pod=some-name-rs0-0 + set +o xtrace waiting for pod/some-name-rs0-0 to be ready.OK + for i in $(seq 0 $last_pod) + [[ 1 -eq 2 ]] + wait_pod some-name-rs0-1 + local pod=some-name-rs0-1 + set +o xtrace waiting for pod/some-name-rs0-1 to be ready.OK + for i in $(seq 0 $last_pod) + [[ 2 -eq 2 ]] ++ kubectl_bin get psmdb some-name -o 'jsonpath={.spec.replsets[?(@.name=="rs0")].arbiter.enabled}' +++ mktemp ++ local LAST_OUT=/tmp/tmp.F2psgE1Qpe +++ mktemp ++ local LAST_ERR=/tmp/tmp.AuAj9byAqi ++ local exit_status=0 ++ local timeout=4 +++ seq 0 2 ++ for i in $(seq 0 2) ++ set +e ++ kubectl get psmdb some-name -o 'jsonpath={.spec.replsets[?(@.name=="rs0")].arbiter.enabled}' ++ exit_status=0 ++ set -e ++ '[' 0 '!=' 0 -a -n 1 ']' ++ break ++ cat /tmp/tmp.F2psgE1Qpe ++ cat /tmp/tmp.AuAj9byAqi ++ rm /tmp/tmp.F2psgE1Qpe /tmp/tmp.AuAj9byAqi ++ return 0 + [[ '' == true ]] + wait_pod some-name-rs0-2 + local pod=some-name-rs0-2 + set +o xtrace waiting for pod/some-name-rs0-2 to be ready.OK ++ kubectl_bin get psmdb some-name -o 'jsonpath={.spec.replsets[?(@.name=="rs0")].nonvoting.enabled}' +++ mktemp ++ local LAST_OUT=/tmp/tmp.PggtOejzjt +++ mktemp ++ local LAST_ERR=/tmp/tmp.AP2EbBhRpO ++ local exit_status=0 ++ local timeout=4 +++ seq 0 2 ++ for i in $(seq 0 2) ++ set +e ++ kubectl get psmdb some-name -o 'jsonpath={.spec.replsets[?(@.name=="rs0")].nonvoting.enabled}' ++ exit_status=0 ++ set -e ++ '[' 0 '!=' 0 -a -n 1 ']' ++ break ++ cat /tmp/tmp.PggtOejzjt ++ cat /tmp/tmp.AP2EbBhRpO ++ rm /tmp/tmp.PggtOejzjt /tmp/tmp.AP2EbBhRpO ++ return 0 + [[ '' == true ]] ++ kubectl_bin get psmdb some-name -o 'jsonpath={.spec.replsets[?(@.name=="rs0")].hidden.enabled}' +++ mktemp ++ local LAST_OUT=/tmp/tmp.NOOBNOW37n +++ mktemp ++ local LAST_ERR=/tmp/tmp.XqdglQZ5Zt ++ local exit_status=0 ++ local timeout=4 +++ seq 0 2 ++ for i in $(seq 0 2) ++ set +e ++ kubectl get psmdb some-name -o 'jsonpath={.spec.replsets[?(@.name=="rs0")].hidden.enabled}' ++ exit_status=0 ++ set -e ++ '[' 0 '!=' 0 -a -n 1 ']' ++ break ++ cat /tmp/tmp.NOOBNOW37n ++ cat /tmp/tmp.XqdglQZ5Zt ++ rm /tmp/tmp.NOOBNOW37n /tmp/tmp.XqdglQZ5Zt ++ return 0 + [[ '' == true ]] + sleep 10 + [[ true == true ]] + set +x Waiting for cluster readyness ++ get_role_cmd '"role-three"' ++ local 'role="role-three"' ++ cmd=$'(function() {\n\tvar role = db.getRole("role-three", {showPrivileges: true, showAuthenticationRestrictions: true});\n\tvar roles = role.roles;\n\troles.sort((a, b) => {\n\t\tif (a.role < b.role) return -1;\n\t\tif (a.role > b.role) return 1;\n\t\treturn 0;\n\t});\n\trole.roles = roles;\n\tprintjson(role);\n\t})();' ++ echo $'(function() {\n\tvar role = db.getRole("role-three", {showPrivileges: true, showAuthenticationRestrictions: true});\n\tvar roles = role.roles;\n\troles.sort((a, b) => {\n\t\tif (a.role < b.role) return -1;\n\t\tif (a.role > b.role) return 1;\n\t\treturn 0;\n\t});\n\trole.roles = roles;\n\tprintjson(role);\n\t})();' + compare admin $'(function() {\n\tvar role = db.getRole("role-three", {showPrivileges: true, showAuthenticationRestrictions: true});\n\tvar roles = role.roles;\n\troles.sort((a, b) => {\n\t\tif (a.role < b.role) return -1;\n\t\tif (a.role > b.role) return 1;\n\t\treturn 0;\n\t});\n\trole.roles = roles;\n\tprintjson(role);\n\t})();' userAdmin:userAdmin123456@some-name-mongos.custom-users-roles-sharded-14882 role-three + local database=admin + local $'command=(function() {\n\tvar role = db.getRole("role-three", {showPrivileges: true, showAuthenticationRestrictions: true});\n\tvar roles = role.roles;\n\troles.sort((a, b) => {\n\t\tif (a.role < b.role) return -1;\n\t\tif (a.role > b.role) return 1;\n\t\treturn 0;\n\t});\n\trole.roles = roles;\n\tprintjson(role);\n\t})();' + local uri=userAdmin:userAdmin123456@some-name-mongos.custom-users-roles-sharded-14882 + local target=role-three + [[ docker.io/perconalab/percona-server-mongodb-operator:main-mongod8.0 =~ 5\.0 ]] + run_mongos $'use admin\\n (function() {\n\tvar role = db.getRole("role-three", {showPrivileges: true, showAuthenticationRestrictions: true});\n\tvar roles = role.roles;\n\troles.sort((a, b) => {\n\t\tif (a.role < b.role) return -1;\n\t\tif (a.role > b.role) return 1;\n\t\treturn 0;\n\t});\n\trole.roles = roles;\n\tprintjson(role);\n\t})();' userAdmin:userAdmin123456@some-name-mongos.custom-users-roles-sharded-14882 mongodb + local $'command=use admin\\n (function() {\n\tvar role = db.getRole("role-three", {showPrivileges: true, showAuthenticationRestrictions: true});\n\tvar roles = role.roles;\n\troles.sort((a, b) => {\n\t\tif (a.role < b.role) return -1;\n\t\tif (a.role > b.role) return 1;\n\t\treturn 0;\n\t});\n\trole.roles = roles;\n\tprintjson(role);\n\t})();' + local uri=userAdmin:userAdmin123456@some-name-mongos.custom-users-roles-sharded-14882 + grep -E -v 'I NETWORK|W NETWORK|F NETWORK|Error saving history file|Percona Server for MongoDB|connecting to:|Unable to reach primary for set|Implicit session:|versions do not match|Error saving history file:' + local driver=mongodb + local suffix=.svc.cluster.local + local mongo_flag= + local port=27017 + local mongo_bin=mongo + /usr/sbin/sed -re 's/ObjectId\("[0-9a-f]+"\)//; s/-[0-9]+.svc/-xxxesvc/' + sed '/"userId"/d' ++ echo .svc.cluster.local ++ awk -F: '{print $2}' + suffix_port= + [[ -z '' ]] + suffix=.svc.cluster.local:27017 ++ kubectl_bin get pods --selector=name=psmdb-client -o 'jsonpath={.items[].metadata.name}' +++ mktemp ++ local LAST_OUT=/tmp/tmp.lFTYb7JaYv +++ mktemp ++ local LAST_ERR=/tmp/tmp.Cv0unJqDh9 ++ local exit_status=0 ++ local timeout=4 +++ seq 0 2 ++ for i in $(seq 0 2) ++ set +e ++ kubectl get pods --selector=name=psmdb-client -o 'jsonpath={.items[].metadata.name}' ++ exit_status=0 ++ set -e ++ '[' 0 '!=' 0 -a -n 1 ']' ++ break ++ cat /tmp/tmp.lFTYb7JaYv ++ cat /tmp/tmp.Cv0unJqDh9 ++ rm /tmp/tmp.lFTYb7JaYv /tmp/tmp.Cv0unJqDh9 ++ return 0 + local client_container=psmdb-client-bb8b97679-ftcxh + kubectl_bin exec psmdb-client-bb8b97679-ftcxh -- bash -c $'printf \'use admin\\n (function() {\n\tvar role = db.getRole("role-three", {showPrivileges: true, showAuthenticationRestrictions: true});\n\tvar roles = role.roles;\n\troles.sort((a, b) => {\n\t\tif (a.role < b.role) return -1;\n\t\tif (a.role > b.role) return 1;\n\t\treturn 0;\n\t});\n\trole.roles = roles;\n\tprintjson(role);\n\t})();\\n\' | mongo mongodb://userAdmin:userAdmin123456@some-name-mongos.custom-users-roles-sharded-14882.svc.cluster.local:27017/admin ' ++ mktemp + local LAST_OUT=/tmp/tmp.D6I1IEBGD6 ++ mktemp + local LAST_ERR=/tmp/tmp.e7WQ3ZCnjv + local exit_status=0 + local timeout=4 ++ seq 0 2 + for i in $(seq 0 2) + set +e + kubectl exec psmdb-client-bb8b97679-ftcxh -- bash -c $'printf \'use admin\\n (function() {\n\tvar role = db.getRole("role-three", {showPrivileges: true, showAuthenticationRestrictions: true});\n\tvar roles = role.roles;\n\troles.sort((a, b) => {\n\t\tif (a.role < b.role) return -1;\n\t\tif (a.role > b.role) return 1;\n\t\treturn 0;\n\t});\n\trole.roles = roles;\n\tprintjson(role);\n\t})();\\n\' | mongo mongodb://userAdmin:userAdmin123456@some-name-mongos.custom-users-roles-sharded-14882.svc.cluster.local:27017/admin ' + exit_status=0 + set -e + '[' 0 '!=' 0 -a -n 1 ']' + break + cat /tmp/tmp.D6I1IEBGD6 + cat /tmp/tmp.e7WQ3ZCnjv + rm /tmp/tmp.D6I1IEBGD6 /tmp/tmp.e7WQ3ZCnjv + return 0 + diff /mnt/jenkins/workspace/cloud-psmdb-operator_PR-2283/e2e-tests/custom-users-roles-sharded/compare/role-three.json /tmp/tmp.UfAOPg7iAD/role-three + desc 'check creating multiple roles and the users in a single CR apply' + set +o xtrace ----------------------------------------------------------------------------------- check creating multiple roles and the users in a single CR apply ----------------------------------------------------------------------------------- + kubectl_bin patch psmdb some-name --type=merge --patch $'{\n\t\t"spec": {\n\t\t\t"roles": [\n\t\t\t\t{\n\t\t\t\t"role": "role-four",\n\t\t\t\t"db": "testAdmin1",\n\t\t\t\t"privileges": [\n\t\t\t\t\t{\n\t\t\t\t\t"resource": {\n\t\t\t\t\t\t"db": "testAdmin1",\n\t\t\t\t\t\t"collection": ""\n\t\t\t\t\t},\n\t\t\t\t\t"actions": [\n\t\t\t\t\t\t"find",\n\t\t\t\t\t\t"listIndexes",\n\t\t\t\t\t\t"listCollections"\n\t\t\t\t\t]\n\t\t\t\t\t},\n\t\t\t\t\t{\n\t\t\t\t\t"resource": {\n\t\t\t\t\t\t"db": "testAdmin1",\n\t\t\t\t\t\t"collection": "system.profile"\n\t\t\t\t\t},\n\t\t\t\t\t"actions": [\n\t\t\t\t\t\t"dbStats",\n\t\t\t\t\t\t"collStats",\n\t\t\t\t\t\t"indexStats"\n\t\t\t\t\t]\n\t\t\t\t\t},\n\t\t\t\t\t{\n\t\t\t\t\t"resource": {\n\t\t\t\t\t\t"db": "testAdmin1",\n\t\t\t\t\t\t"collection": "system.version"\n\t\t\t\t\t},\n\t\t\t\t\t"actions": [\n\t\t\t\t\t\t"find"\n\t\t\t\t\t]\n\t\t\t\t\t}\n\t\t\t\t]\n\t\t\t\t},\n\t\t\t\t{\n\t\t\t\t"role": "role-five",\n\t\t\t\t"db": "testAdmin2",\n\t\t\t\t"privileges": [\n\t\t\t\t\t{\n\t\t\t\t\t"resource": {\n\t\t\t\t\t\t"db": "testAdmin2",\n\t\t\t\t\t\t"collection": ""\n\t\t\t\t\t},\n\t\t\t\t\t"actions": [\n\t\t\t\t\t\t"find",\n\t\t\t\t\t\t"listIndexes",\n\t\t\t\t\t\t"listCollections"\n\t\t\t\t\t]\n\t\t\t\t\t},\n\t\t\t\t\t{\n\t\t\t\t\t"resource": {\n\t\t\t\t\t\t"db": "testAdmin2",\n\t\t\t\t\t\t"collection": "system.profile"\n\t\t\t\t\t},\n\t\t\t\t\t"actions": [\n\t\t\t\t\t\t"dbStats",\n\t\t\t\t\t\t"collStats",\n\t\t\t\t\t\t"indexStats"\n\t\t\t\t\t]\n\t\t\t\t\t},\n\t\t\t\t\t{\n\t\t\t\t\t"resource": {\n\t\t\t\t\t\t"db": "testAdmin2",\n\t\t\t\t\t\t"collection": "system.version"\n\t\t\t\t\t},\n\t\t\t\t\t"actions": [\n\t\t\t\t\t\t"find"\n\t\t\t\t\t]\n\t\t\t\t\t}\n\t\t\t\t]\n\t\t\t\t}\n\t\t\t],\n\t\t\t"users": [\n\t\t\t\t{\n\t\t\t\t"name": "user-five",\n\t\t\t\t"db": "testAdmin",\n\t\t\t\t"passwordSecretRef": {\n\t\t\t\t\t"name": "user-one",\n\t\t\t\t\t"key": "userOnePassKey"\n\t\t\t\t},\n\t\t\t\t"roles": [\n\t\t\t\t\t{\n\t\t\t\t\t\t"name": "role-four",\n\t\t\t\t\t\t"db": "testAdmin1"\n\t\t\t\t\t},\n\t\t\t\t\t{\n\t\t\t\t\t\t"name": "role-five",\n\t\t\t\t\t\t"db": "testAdmin2"\n\t\t\t\t\t}\n\t\t\t\t]\n\t\t\t\t},\n\t\t\t\t{\n\t\t\t\t"name": "user-six",\n\t\t\t\t"db": "testAdmin",\n\t\t\t\t"passwordSecretRef": {\n\t\t\t\t\t"name": "user-one",\n\t\t\t\t\t"key": "userOnePassKey"\n\t\t\t\t},\n\t\t\t\t"roles": [\n\t\t\t\t\t{\n\t\t\t\t\t"name": "role-five",\n\t\t\t\t\t"db": "testAdmin2"\n\t\t\t\t\t}\n\t\t\t\t]\n\t\t\t\t}\n\t\t\t]\n\t}}' ++ mktemp + local LAST_OUT=/tmp/tmp.7GcXLUA0rl ++ mktemp + local LAST_ERR=/tmp/tmp.sgL24OymMV + local exit_status=0 + local timeout=4 ++ seq 0 2 + for i in $(seq 0 2) + set +e + kubectl patch psmdb some-name --type=merge --patch $'{\n\t\t"spec": {\n\t\t\t"roles": [\n\t\t\t\t{\n\t\t\t\t"role": "role-four",\n\t\t\t\t"db": "testAdmin1",\n\t\t\t\t"privileges": [\n\t\t\t\t\t{\n\t\t\t\t\t"resource": {\n\t\t\t\t\t\t"db": "testAdmin1",\n\t\t\t\t\t\t"collection": ""\n\t\t\t\t\t},\n\t\t\t\t\t"actions": [\n\t\t\t\t\t\t"find",\n\t\t\t\t\t\t"listIndexes",\n\t\t\t\t\t\t"listCollections"\n\t\t\t\t\t]\n\t\t\t\t\t},\n\t\t\t\t\t{\n\t\t\t\t\t"resource": {\n\t\t\t\t\t\t"db": "testAdmin1",\n\t\t\t\t\t\t"collection": "system.profile"\n\t\t\t\t\t},\n\t\t\t\t\t"actions": [\n\t\t\t\t\t\t"dbStats",\n\t\t\t\t\t\t"collStats",\n\t\t\t\t\t\t"indexStats"\n\t\t\t\t\t]\n\t\t\t\t\t},\n\t\t\t\t\t{\n\t\t\t\t\t"resource": {\n\t\t\t\t\t\t"db": "testAdmin1",\n\t\t\t\t\t\t"collection": "system.version"\n\t\t\t\t\t},\n\t\t\t\t\t"actions": [\n\t\t\t\t\t\t"find"\n\t\t\t\t\t]\n\t\t\t\t\t}\n\t\t\t\t]\n\t\t\t\t},\n\t\t\t\t{\n\t\t\t\t"role": "role-five",\n\t\t\t\t"db": "testAdmin2",\n\t\t\t\t"privileges": [\n\t\t\t\t\t{\n\t\t\t\t\t"resource": {\n\t\t\t\t\t\t"db": "testAdmin2",\n\t\t\t\t\t\t"collection": ""\n\t\t\t\t\t},\n\t\t\t\t\t"actions": [\n\t\t\t\t\t\t"find",\n\t\t\t\t\t\t"listIndexes",\n\t\t\t\t\t\t"listCollections"\n\t\t\t\t\t]\n\t\t\t\t\t},\n\t\t\t\t\t{\n\t\t\t\t\t"resource": {\n\t\t\t\t\t\t"db": "testAdmin2",\n\t\t\t\t\t\t"collection": "system.profile"\n\t\t\t\t\t},\n\t\t\t\t\t"actions": [\n\t\t\t\t\t\t"dbStats",\n\t\t\t\t\t\t"collStats",\n\t\t\t\t\t\t"indexStats"\n\t\t\t\t\t]\n\t\t\t\t\t},\n\t\t\t\t\t{\n\t\t\t\t\t"resource": {\n\t\t\t\t\t\t"db": "testAdmin2",\n\t\t\t\t\t\t"collection": "system.version"\n\t\t\t\t\t},\n\t\t\t\t\t"actions": [\n\t\t\t\t\t\t"find"\n\t\t\t\t\t]\n\t\t\t\t\t}\n\t\t\t\t]\n\t\t\t\t}\n\t\t\t],\n\t\t\t"users": [\n\t\t\t\t{\n\t\t\t\t"name": "user-five",\n\t\t\t\t"db": "testAdmin",\n\t\t\t\t"passwordSecretRef": {\n\t\t\t\t\t"name": "user-one",\n\t\t\t\t\t"key": "userOnePassKey"\n\t\t\t\t},\n\t\t\t\t"roles": [\n\t\t\t\t\t{\n\t\t\t\t\t\t"name": "role-four",\n\t\t\t\t\t\t"db": "testAdmin1"\n\t\t\t\t\t},\n\t\t\t\t\t{\n\t\t\t\t\t\t"name": "role-five",\n\t\t\t\t\t\t"db": "testAdmin2"\n\t\t\t\t\t}\n\t\t\t\t]\n\t\t\t\t},\n\t\t\t\t{\n\t\t\t\t"name": "user-six",\n\t\t\t\t"db": "testAdmin",\n\t\t\t\t"passwordSecretRef": {\n\t\t\t\t\t"name": "user-one",\n\t\t\t\t\t"key": "userOnePassKey"\n\t\t\t\t},\n\t\t\t\t"roles": [\n\t\t\t\t\t{\n\t\t\t\t\t"name": "role-five",\n\t\t\t\t\t"db": "testAdmin2"\n\t\t\t\t\t}\n\t\t\t\t]\n\t\t\t\t}\n\t\t\t]\n\t}}' + exit_status=0 + set -e + '[' 0 '!=' 0 -a -n 1 ']' + break + cat /tmp/tmp.7GcXLUA0rl perconaservermongodb.psmdb.percona.com/some-name patched + cat /tmp/tmp.sgL24OymMV + rm /tmp/tmp.7GcXLUA0rl /tmp/tmp.sgL24OymMV + return 0 + wait_for_running some-name-rs0 3 + local name=some-name-rs0 + let last_pod=2 + local check_cluster_readyness=true + set_debug + [[ 1 == 1 ]] + set -o xtrace + local rs_name=rs0 + local cluster_name=some-name ++ seq 0 2 + for i in $(seq 0 $last_pod) + [[ 0 -eq 2 ]] + wait_pod some-name-rs0-0 + local pod=some-name-rs0-0 + set +o xtrace waiting for pod/some-name-rs0-0 to be ready.OK + for i in $(seq 0 $last_pod) + [[ 1 -eq 2 ]] + wait_pod some-name-rs0-1 + local pod=some-name-rs0-1 + set +o xtrace waiting for pod/some-name-rs0-1 to be ready.OK + for i in $(seq 0 $last_pod) + [[ 2 -eq 2 ]] ++ kubectl_bin get psmdb some-name -o 'jsonpath={.spec.replsets[?(@.name=="rs0")].arbiter.enabled}' +++ mktemp ++ local LAST_OUT=/tmp/tmp.Lpr7Cvamju +++ mktemp ++ local LAST_ERR=/tmp/tmp.eDHh0hglk4 ++ local exit_status=0 ++ local timeout=4 +++ seq 0 2 ++ for i in $(seq 0 2) ++ set +e ++ kubectl get psmdb some-name -o 'jsonpath={.spec.replsets[?(@.name=="rs0")].arbiter.enabled}' ++ exit_status=0 ++ set -e ++ '[' 0 '!=' 0 -a -n 1 ']' ++ break ++ cat /tmp/tmp.Lpr7Cvamju ++ cat /tmp/tmp.eDHh0hglk4 ++ rm /tmp/tmp.Lpr7Cvamju /tmp/tmp.eDHh0hglk4 ++ return 0 + [[ '' == true ]] + wait_pod some-name-rs0-2 + local pod=some-name-rs0-2 + set +o xtrace waiting for pod/some-name-rs0-2 to be ready.OK ++ kubectl_bin get psmdb some-name -o 'jsonpath={.spec.replsets[?(@.name=="rs0")].nonvoting.enabled}' +++ mktemp ++ local LAST_OUT=/tmp/tmp.g03IrEkmqx +++ mktemp ++ local LAST_ERR=/tmp/tmp.lelnB9JdLc ++ local exit_status=0 ++ local timeout=4 +++ seq 0 2 ++ for i in $(seq 0 2) ++ set +e ++ kubectl get psmdb some-name -o 'jsonpath={.spec.replsets[?(@.name=="rs0")].nonvoting.enabled}' ++ exit_status=0 ++ set -e ++ '[' 0 '!=' 0 -a -n 1 ']' ++ break ++ cat /tmp/tmp.g03IrEkmqx ++ cat /tmp/tmp.lelnB9JdLc ++ rm /tmp/tmp.g03IrEkmqx /tmp/tmp.lelnB9JdLc ++ return 0 + [[ '' == true ]] ++ kubectl_bin get psmdb some-name -o 'jsonpath={.spec.replsets[?(@.name=="rs0")].hidden.enabled}' +++ mktemp ++ local LAST_OUT=/tmp/tmp.TSa9NuNYEb +++ mktemp ++ local LAST_ERR=/tmp/tmp.51duhnWqxL ++ local exit_status=0 ++ local timeout=4 +++ seq 0 2 ++ for i in $(seq 0 2) ++ set +e ++ kubectl get psmdb some-name -o 'jsonpath={.spec.replsets[?(@.name=="rs0")].hidden.enabled}' ++ exit_status=0 ++ set -e ++ '[' 0 '!=' 0 -a -n 1 ']' ++ break ++ cat /tmp/tmp.TSa9NuNYEb ++ cat /tmp/tmp.51duhnWqxL ++ rm /tmp/tmp.TSa9NuNYEb /tmp/tmp.51duhnWqxL ++ return 0 + [[ '' == true ]] + sleep 10 + [[ true == true ]] + set +x Waiting for cluster readyness ++ get_role_cmd '"role-four"' ++ local 'role="role-four"' ++ cmd=$'(function() {\n\tvar role = db.getRole("role-four", {showPrivileges: true, showAuthenticationRestrictions: true});\n\tvar roles = role.roles;\n\troles.sort((a, b) => {\n\t\tif (a.role < b.role) return -1;\n\t\tif (a.role > b.role) return 1;\n\t\treturn 0;\n\t});\n\trole.roles = roles;\n\tprintjson(role);\n\t})();' ++ echo $'(function() {\n\tvar role = db.getRole("role-four", {showPrivileges: true, showAuthenticationRestrictions: true});\n\tvar roles = role.roles;\n\troles.sort((a, b) => {\n\t\tif (a.role < b.role) return -1;\n\t\tif (a.role > b.role) return 1;\n\t\treturn 0;\n\t});\n\trole.roles = roles;\n\tprintjson(role);\n\t})();' + compare testAdmin1 $'(function() {\n\tvar role = db.getRole("role-four", {showPrivileges: true, showAuthenticationRestrictions: true});\n\tvar roles = role.roles;\n\troles.sort((a, b) => {\n\t\tif (a.role < b.role) return -1;\n\t\tif (a.role > b.role) return 1;\n\t\treturn 0;\n\t});\n\trole.roles = roles;\n\tprintjson(role);\n\t})();' userAdmin:userAdmin123456@some-name-mongos.custom-users-roles-sharded-14882 role-four + local database=testAdmin1 + local $'command=(function() {\n\tvar role = db.getRole("role-four", {showPrivileges: true, showAuthenticationRestrictions: true});\n\tvar roles = role.roles;\n\troles.sort((a, b) => {\n\t\tif (a.role < b.role) return -1;\n\t\tif (a.role > b.role) return 1;\n\t\treturn 0;\n\t});\n\trole.roles = roles;\n\tprintjson(role);\n\t})();' + local uri=userAdmin:userAdmin123456@some-name-mongos.custom-users-roles-sharded-14882 + local target=role-four + [[ docker.io/perconalab/percona-server-mongodb-operator:main-mongod8.0 =~ 5\.0 ]] + run_mongos $'use testAdmin1\\n (function() {\n\tvar role = db.getRole("role-four", {showPrivileges: true, showAuthenticationRestrictions: true});\n\tvar roles = role.roles;\n\troles.sort((a, b) => {\n\t\tif (a.role < b.role) return -1;\n\t\tif (a.role > b.role) return 1;\n\t\treturn 0;\n\t});\n\trole.roles = roles;\n\tprintjson(role);\n\t})();' userAdmin:userAdmin123456@some-name-mongos.custom-users-roles-sharded-14882 mongodb + local $'command=use testAdmin1\\n (function() {\n\tvar role = db.getRole("role-four", {showPrivileges: true, showAuthenticationRestrictions: true});\n\tvar roles = role.roles;\n\troles.sort((a, b) => {\n\t\tif (a.role < b.role) return -1;\n\t\tif (a.role > b.role) return 1;\n\t\treturn 0;\n\t});\n\trole.roles = roles;\n\tprintjson(role);\n\t})();' + local uri=userAdmin:userAdmin123456@some-name-mongos.custom-users-roles-sharded-14882 + local driver=mongodb + local suffix=.svc.cluster.local + local mongo_flag= + local port=27017 + local mongo_bin=mongo + sed '/"userId"/d' + grep -E -v 'I NETWORK|W NETWORK|F NETWORK|Error saving history file|Percona Server for MongoDB|connecting to:|Unable to reach primary for set|Implicit session:|versions do not match|Error saving history file:' ++ echo .svc.cluster.local + /usr/sbin/sed -re 's/ObjectId\("[0-9a-f]+"\)//; s/-[0-9]+.svc/-xxxesvc/' ++ awk -F: '{print $2}' + suffix_port= + [[ -z '' ]] + suffix=.svc.cluster.local:27017 ++ kubectl_bin get pods --selector=name=psmdb-client -o 'jsonpath={.items[].metadata.name}' +++ mktemp ++ local LAST_OUT=/tmp/tmp.4xmlQnWrhk +++ mktemp ++ local LAST_ERR=/tmp/tmp.qzyzBzEHps ++ local exit_status=0 ++ local timeout=4 +++ seq 0 2 ++ for i in $(seq 0 2) ++ set +e ++ kubectl get pods --selector=name=psmdb-client -o 'jsonpath={.items[].metadata.name}' ++ exit_status=0 ++ set -e ++ '[' 0 '!=' 0 -a -n 1 ']' ++ break ++ cat /tmp/tmp.4xmlQnWrhk ++ cat /tmp/tmp.qzyzBzEHps ++ rm /tmp/tmp.4xmlQnWrhk /tmp/tmp.qzyzBzEHps ++ return 0 + local client_container=psmdb-client-bb8b97679-ftcxh + kubectl_bin exec psmdb-client-bb8b97679-ftcxh -- bash -c $'printf \'use testAdmin1\\n (function() {\n\tvar role = db.getRole("role-four", {showPrivileges: true, showAuthenticationRestrictions: true});\n\tvar roles = role.roles;\n\troles.sort((a, b) => {\n\t\tif (a.role < b.role) return -1;\n\t\tif (a.role > b.role) return 1;\n\t\treturn 0;\n\t});\n\trole.roles = roles;\n\tprintjson(role);\n\t})();\\n\' | mongo mongodb://userAdmin:userAdmin123456@some-name-mongos.custom-users-roles-sharded-14882.svc.cluster.local:27017/admin ' ++ mktemp + local LAST_OUT=/tmp/tmp.PkoKloGFYA ++ mktemp + local LAST_ERR=/tmp/tmp.s8tPNECXvF + local exit_status=0 + local timeout=4 ++ seq 0 2 + for i in $(seq 0 2) + set +e + kubectl exec psmdb-client-bb8b97679-ftcxh -- bash -c $'printf \'use testAdmin1\\n (function() {\n\tvar role = db.getRole("role-four", {showPrivileges: true, showAuthenticationRestrictions: true});\n\tvar roles = role.roles;\n\troles.sort((a, b) => {\n\t\tif (a.role < b.role) return -1;\n\t\tif (a.role > b.role) return 1;\n\t\treturn 0;\n\t});\n\trole.roles = roles;\n\tprintjson(role);\n\t})();\\n\' | mongo mongodb://userAdmin:userAdmin123456@some-name-mongos.custom-users-roles-sharded-14882.svc.cluster.local:27017/admin ' + exit_status=0 + set -e + '[' 0 '!=' 0 -a -n 1 ']' + break + cat /tmp/tmp.PkoKloGFYA + cat /tmp/tmp.s8tPNECXvF + rm /tmp/tmp.PkoKloGFYA /tmp/tmp.s8tPNECXvF + return 0 + diff /mnt/jenkins/workspace/cloud-psmdb-operator_PR-2283/e2e-tests/custom-users-roles-sharded/compare/role-four.json /tmp/tmp.UfAOPg7iAD/role-four ++ get_role_cmd '"role-five"' ++ local 'role="role-five"' ++ cmd=$'(function() {\n\tvar role = db.getRole("role-five", {showPrivileges: true, showAuthenticationRestrictions: true});\n\tvar roles = role.roles;\n\troles.sort((a, b) => {\n\t\tif (a.role < b.role) return -1;\n\t\tif (a.role > b.role) return 1;\n\t\treturn 0;\n\t});\n\trole.roles = roles;\n\tprintjson(role);\n\t})();' ++ echo $'(function() {\n\tvar role = db.getRole("role-five", {showPrivileges: true, showAuthenticationRestrictions: true});\n\tvar roles = role.roles;\n\troles.sort((a, b) => {\n\t\tif (a.role < b.role) return -1;\n\t\tif (a.role > b.role) return 1;\n\t\treturn 0;\n\t});\n\trole.roles = roles;\n\tprintjson(role);\n\t})();' + compare testAdmin2 $'(function() {\n\tvar role = db.getRole("role-five", {showPrivileges: true, showAuthenticationRestrictions: true});\n\tvar roles = role.roles;\n\troles.sort((a, b) => {\n\t\tif (a.role < b.role) return -1;\n\t\tif (a.role > b.role) return 1;\n\t\treturn 0;\n\t});\n\trole.roles = roles;\n\tprintjson(role);\n\t})();' userAdmin:userAdmin123456@some-name-mongos.custom-users-roles-sharded-14882 role-five + local database=testAdmin2 + local $'command=(function() {\n\tvar role = db.getRole("role-five", {showPrivileges: true, showAuthenticationRestrictions: true});\n\tvar roles = role.roles;\n\troles.sort((a, b) => {\n\t\tif (a.role < b.role) return -1;\n\t\tif (a.role > b.role) return 1;\n\t\treturn 0;\n\t});\n\trole.roles = roles;\n\tprintjson(role);\n\t})();' + local uri=userAdmin:userAdmin123456@some-name-mongos.custom-users-roles-sharded-14882 + local target=role-five + [[ docker.io/perconalab/percona-server-mongodb-operator:main-mongod8.0 =~ 5\.0 ]] + run_mongos $'use testAdmin2\\n (function() {\n\tvar role = db.getRole("role-five", {showPrivileges: true, showAuthenticationRestrictions: true});\n\tvar roles = role.roles;\n\troles.sort((a, b) => {\n\t\tif (a.role < b.role) return -1;\n\t\tif (a.role > b.role) return 1;\n\t\treturn 0;\n\t});\n\trole.roles = roles;\n\tprintjson(role);\n\t})();' userAdmin:userAdmin123456@some-name-mongos.custom-users-roles-sharded-14882 mongodb + /usr/sbin/sed -re 's/ObjectId\("[0-9a-f]+"\)//; s/-[0-9]+.svc/-xxxesvc/' + local $'command=use testAdmin2\\n (function() {\n\tvar role = db.getRole("role-five", {showPrivileges: true, showAuthenticationRestrictions: true});\n\tvar roles = role.roles;\n\troles.sort((a, b) => {\n\t\tif (a.role < b.role) return -1;\n\t\tif (a.role > b.role) return 1;\n\t\treturn 0;\n\t});\n\trole.roles = roles;\n\tprintjson(role);\n\t})();' + local uri=userAdmin:userAdmin123456@some-name-mongos.custom-users-roles-sharded-14882 + local driver=mongodb + local suffix=.svc.cluster.local + local mongo_flag= + local port=27017 + local mongo_bin=mongo + grep -E -v 'I NETWORK|W NETWORK|F NETWORK|Error saving history file|Percona Server for MongoDB|connecting to:|Unable to reach primary for set|Implicit session:|versions do not match|Error saving history file:' + sed '/"userId"/d' ++ echo .svc.cluster.local ++ awk -F: '{print $2}' + suffix_port= + [[ -z '' ]] + suffix=.svc.cluster.local:27017 ++ kubectl_bin get pods --selector=name=psmdb-client -o 'jsonpath={.items[].metadata.name}' +++ mktemp ++ local LAST_OUT=/tmp/tmp.WjCxnIuSGJ +++ mktemp ++ local LAST_ERR=/tmp/tmp.P56QM24T01 ++ local exit_status=0 ++ local timeout=4 +++ seq 0 2 ++ for i in $(seq 0 2) ++ set +e ++ kubectl get pods --selector=name=psmdb-client -o 'jsonpath={.items[].metadata.name}' ++ exit_status=0 ++ set -e ++ '[' 0 '!=' 0 -a -n 1 ']' ++ break ++ cat /tmp/tmp.WjCxnIuSGJ ++ cat /tmp/tmp.P56QM24T01 ++ rm /tmp/tmp.WjCxnIuSGJ /tmp/tmp.P56QM24T01 ++ return 0 + local client_container=psmdb-client-bb8b97679-ftcxh + kubectl_bin exec psmdb-client-bb8b97679-ftcxh -- bash -c $'printf \'use testAdmin2\\n (function() {\n\tvar role = db.getRole("role-five", {showPrivileges: true, showAuthenticationRestrictions: true});\n\tvar roles = role.roles;\n\troles.sort((a, b) => {\n\t\tif (a.role < b.role) return -1;\n\t\tif (a.role > b.role) return 1;\n\t\treturn 0;\n\t});\n\trole.roles = roles;\n\tprintjson(role);\n\t})();\\n\' | mongo mongodb://userAdmin:userAdmin123456@some-name-mongos.custom-users-roles-sharded-14882.svc.cluster.local:27017/admin ' ++ mktemp + local LAST_OUT=/tmp/tmp.CJanDQrS4p ++ mktemp + local LAST_ERR=/tmp/tmp.6ql56aeN3M + local exit_status=0 + local timeout=4 ++ seq 0 2 + for i in $(seq 0 2) + set +e + kubectl exec psmdb-client-bb8b97679-ftcxh -- bash -c $'printf \'use testAdmin2\\n (function() {\n\tvar role = db.getRole("role-five", {showPrivileges: true, showAuthenticationRestrictions: true});\n\tvar roles = role.roles;\n\troles.sort((a, b) => {\n\t\tif (a.role < b.role) return -1;\n\t\tif (a.role > b.role) return 1;\n\t\treturn 0;\n\t});\n\trole.roles = roles;\n\tprintjson(role);\n\t})();\\n\' | mongo mongodb://userAdmin:userAdmin123456@some-name-mongos.custom-users-roles-sharded-14882.svc.cluster.local:27017/admin ' + exit_status=0 + set -e + '[' 0 '!=' 0 -a -n 1 ']' + break + cat /tmp/tmp.CJanDQrS4p + cat /tmp/tmp.6ql56aeN3M + rm /tmp/tmp.CJanDQrS4p /tmp/tmp.6ql56aeN3M + return 0 + diff /mnt/jenkins/workspace/cloud-psmdb-operator_PR-2283/e2e-tests/custom-users-roles-sharded/compare/role-five.json /tmp/tmp.UfAOPg7iAD/role-five ++ get_user_cmd '"user-five"' ++ local 'user="user-five"' ++ cmd=$'(function() {\n\tvar user = db.getUser("user-five");\n\tvar roles = user.roles;\n\troles.sort((a, b) => {\n\t\tif (a.role < b.role) return -1;\n\t\tif (a.role > b.role) return 1;\n\t\treturn 0;\n\t});\n\tuser.roles = roles;\n\tprintjson(user);\n\t})();' ++ echo $'(function() {\n\tvar user = db.getUser("user-five");\n\tvar roles = user.roles;\n\troles.sort((a, b) => {\n\t\tif (a.role < b.role) return -1;\n\t\tif (a.role > b.role) return 1;\n\t\treturn 0;\n\t});\n\tuser.roles = roles;\n\tprintjson(user);\n\t})();' + compare testAdmin $'(function() {\n\tvar user = db.getUser("user-five");\n\tvar roles = user.roles;\n\troles.sort((a, b) => {\n\t\tif (a.role < b.role) return -1;\n\t\tif (a.role > b.role) return 1;\n\t\treturn 0;\n\t});\n\tuser.roles = roles;\n\tprintjson(user);\n\t})();' userAdmin:userAdmin123456@some-name-mongos.custom-users-roles-sharded-14882 user-five + local database=testAdmin + local $'command=(function() {\n\tvar user = db.getUser("user-five");\n\tvar roles = user.roles;\n\troles.sort((a, b) => {\n\t\tif (a.role < b.role) return -1;\n\t\tif (a.role > b.role) return 1;\n\t\treturn 0;\n\t});\n\tuser.roles = roles;\n\tprintjson(user);\n\t})();' + local uri=userAdmin:userAdmin123456@some-name-mongos.custom-users-roles-sharded-14882 + local target=user-five + [[ docker.io/perconalab/percona-server-mongodb-operator:main-mongod8.0 =~ 5\.0 ]] + grep -E -v 'I NETWORK|W NETWORK|F NETWORK|Error saving history file|Percona Server for MongoDB|connecting to:|Unable to reach primary for set|Implicit session:|versions do not match|Error saving history file:' + /usr/sbin/sed -re 's/ObjectId\("[0-9a-f]+"\)//; s/-[0-9]+.svc/-xxxesvc/' + run_mongos $'use testAdmin\\n (function() {\n\tvar user = db.getUser("user-five");\n\tvar roles = user.roles;\n\troles.sort((a, b) => {\n\t\tif (a.role < b.role) return -1;\n\t\tif (a.role > b.role) return 1;\n\t\treturn 0;\n\t});\n\tuser.roles = roles;\n\tprintjson(user);\n\t})();' userAdmin:userAdmin123456@some-name-mongos.custom-users-roles-sharded-14882 mongodb + sed '/"userId"/d' + local $'command=use testAdmin\\n (function() {\n\tvar user = db.getUser("user-five");\n\tvar roles = user.roles;\n\troles.sort((a, b) => {\n\t\tif (a.role < b.role) return -1;\n\t\tif (a.role > b.role) return 1;\n\t\treturn 0;\n\t});\n\tuser.roles = roles;\n\tprintjson(user);\n\t})();' + local uri=userAdmin:userAdmin123456@some-name-mongos.custom-users-roles-sharded-14882 + local driver=mongodb + local suffix=.svc.cluster.local + local mongo_flag= + local port=27017 + local mongo_bin=mongo ++ echo .svc.cluster.local ++ awk -F: '{print $2}' + suffix_port= + [[ -z '' ]] + suffix=.svc.cluster.local:27017 ++ kubectl_bin get pods --selector=name=psmdb-client -o 'jsonpath={.items[].metadata.name}' +++ mktemp ++ local LAST_OUT=/tmp/tmp.HCUqgHzcac +++ mktemp ++ local LAST_ERR=/tmp/tmp.F4nOqIzPDJ ++ local exit_status=0 ++ local timeout=4 +++ seq 0 2 ++ for i in $(seq 0 2) ++ set +e ++ kubectl get pods --selector=name=psmdb-client -o 'jsonpath={.items[].metadata.name}' ++ exit_status=0 ++ set -e ++ '[' 0 '!=' 0 -a -n 1 ']' ++ break ++ cat /tmp/tmp.HCUqgHzcac ++ cat /tmp/tmp.F4nOqIzPDJ ++ rm /tmp/tmp.HCUqgHzcac /tmp/tmp.F4nOqIzPDJ ++ return 0 + local client_container=psmdb-client-bb8b97679-ftcxh + kubectl_bin exec psmdb-client-bb8b97679-ftcxh -- bash -c $'printf \'use testAdmin\\n (function() {\n\tvar user = db.getUser("user-five");\n\tvar roles = user.roles;\n\troles.sort((a, b) => {\n\t\tif (a.role < b.role) return -1;\n\t\tif (a.role > b.role) return 1;\n\t\treturn 0;\n\t});\n\tuser.roles = roles;\n\tprintjson(user);\n\t})();\\n\' | mongo mongodb://userAdmin:userAdmin123456@some-name-mongos.custom-users-roles-sharded-14882.svc.cluster.local:27017/admin ' ++ mktemp + local LAST_OUT=/tmp/tmp.3lrr6SdudG ++ mktemp + local LAST_ERR=/tmp/tmp.FLqRrtQ185 + local exit_status=0 + local timeout=4 ++ seq 0 2 + for i in $(seq 0 2) + set +e + kubectl exec psmdb-client-bb8b97679-ftcxh -- bash -c $'printf \'use testAdmin\\n (function() {\n\tvar user = db.getUser("user-five");\n\tvar roles = user.roles;\n\troles.sort((a, b) => {\n\t\tif (a.role < b.role) return -1;\n\t\tif (a.role > b.role) return 1;\n\t\treturn 0;\n\t});\n\tuser.roles = roles;\n\tprintjson(user);\n\t})();\\n\' | mongo mongodb://userAdmin:userAdmin123456@some-name-mongos.custom-users-roles-sharded-14882.svc.cluster.local:27017/admin ' + exit_status=0 + set -e + '[' 0 '!=' 0 -a -n 1 ']' + break + cat /tmp/tmp.3lrr6SdudG + cat /tmp/tmp.FLqRrtQ185 + rm /tmp/tmp.3lrr6SdudG /tmp/tmp.FLqRrtQ185 + return 0 + diff /mnt/jenkins/workspace/cloud-psmdb-operator_PR-2283/e2e-tests/custom-users-roles-sharded/compare/user-five.json /tmp/tmp.UfAOPg7iAD/user-five ++ get_user_cmd '"user-six"' ++ local 'user="user-six"' ++ cmd=$'(function() {\n\tvar user = db.getUser("user-six");\n\tvar roles = user.roles;\n\troles.sort((a, b) => {\n\t\tif (a.role < b.role) return -1;\n\t\tif (a.role > b.role) return 1;\n\t\treturn 0;\n\t});\n\tuser.roles = roles;\n\tprintjson(user);\n\t})();' ++ echo $'(function() {\n\tvar user = db.getUser("user-six");\n\tvar roles = user.roles;\n\troles.sort((a, b) => {\n\t\tif (a.role < b.role) return -1;\n\t\tif (a.role > b.role) return 1;\n\t\treturn 0;\n\t});\n\tuser.roles = roles;\n\tprintjson(user);\n\t})();' + compare testAdmin $'(function() {\n\tvar user = db.getUser("user-six");\n\tvar roles = user.roles;\n\troles.sort((a, b) => {\n\t\tif (a.role < b.role) return -1;\n\t\tif (a.role > b.role) return 1;\n\t\treturn 0;\n\t});\n\tuser.roles = roles;\n\tprintjson(user);\n\t})();' userAdmin:userAdmin123456@some-name-mongos.custom-users-roles-sharded-14882 user-six + local database=testAdmin + local $'command=(function() {\n\tvar user = db.getUser("user-six");\n\tvar roles = user.roles;\n\troles.sort((a, b) => {\n\t\tif (a.role < b.role) return -1;\n\t\tif (a.role > b.role) return 1;\n\t\treturn 0;\n\t});\n\tuser.roles = roles;\n\tprintjson(user);\n\t})();' + local uri=userAdmin:userAdmin123456@some-name-mongos.custom-users-roles-sharded-14882 + local target=user-six + [[ docker.io/perconalab/percona-server-mongodb-operator:main-mongod8.0 =~ 5\.0 ]] + grep -E -v 'I NETWORK|W NETWORK|F NETWORK|Error saving history file|Percona Server for MongoDB|connecting to:|Unable to reach primary for set|Implicit session:|versions do not match|Error saving history file:' + run_mongos $'use testAdmin\\n (function() {\n\tvar user = db.getUser("user-six");\n\tvar roles = user.roles;\n\troles.sort((a, b) => {\n\t\tif (a.role < b.role) return -1;\n\t\tif (a.role > b.role) return 1;\n\t\treturn 0;\n\t});\n\tuser.roles = roles;\n\tprintjson(user);\n\t})();' userAdmin:userAdmin123456@some-name-mongos.custom-users-roles-sharded-14882 mongodb + /usr/sbin/sed -re 's/ObjectId\("[0-9a-f]+"\)//; s/-[0-9]+.svc/-xxxesvc/' + local $'command=use testAdmin\\n (function() {\n\tvar user = db.getUser("user-six");\n\tvar roles = user.roles;\n\troles.sort((a, b) => {\n\t\tif (a.role < b.role) return -1;\n\t\tif (a.role > b.role) return 1;\n\t\treturn 0;\n\t});\n\tuser.roles = roles;\n\tprintjson(user);\n\t})();' + local uri=userAdmin:userAdmin123456@some-name-mongos.custom-users-roles-sharded-14882 + local driver=mongodb + local suffix=.svc.cluster.local + local mongo_flag= + local port=27017 + local mongo_bin=mongo + sed '/"userId"/d' ++ echo .svc.cluster.local ++ awk -F: '{print $2}' + suffix_port= + [[ -z '' ]] + suffix=.svc.cluster.local:27017 ++ kubectl_bin get pods --selector=name=psmdb-client -o 'jsonpath={.items[].metadata.name}' +++ mktemp ++ local LAST_OUT=/tmp/tmp.DREtF17sXm +++ mktemp ++ local LAST_ERR=/tmp/tmp.NkHeYxtged ++ local exit_status=0 ++ local timeout=4 +++ seq 0 2 ++ for i in $(seq 0 2) ++ set +e ++ kubectl get pods --selector=name=psmdb-client -o 'jsonpath={.items[].metadata.name}' ++ exit_status=0 ++ set -e ++ '[' 0 '!=' 0 -a -n 1 ']' ++ break ++ cat /tmp/tmp.DREtF17sXm ++ cat /tmp/tmp.NkHeYxtged ++ rm /tmp/tmp.DREtF17sXm /tmp/tmp.NkHeYxtged ++ return 0 + local client_container=psmdb-client-bb8b97679-ftcxh + kubectl_bin exec psmdb-client-bb8b97679-ftcxh -- bash -c $'printf \'use testAdmin\\n (function() {\n\tvar user = db.getUser("user-six");\n\tvar roles = user.roles;\n\troles.sort((a, b) => {\n\t\tif (a.role < b.role) return -1;\n\t\tif (a.role > b.role) return 1;\n\t\treturn 0;\n\t});\n\tuser.roles = roles;\n\tprintjson(user);\n\t})();\\n\' | mongo mongodb://userAdmin:userAdmin123456@some-name-mongos.custom-users-roles-sharded-14882.svc.cluster.local:27017/admin ' ++ mktemp + local LAST_OUT=/tmp/tmp.7dBUxGNZ2K ++ mktemp + local LAST_ERR=/tmp/tmp.hKuOh5akeg + local exit_status=0 + local timeout=4 ++ seq 0 2 + for i in $(seq 0 2) + set +e + kubectl exec psmdb-client-bb8b97679-ftcxh -- bash -c $'printf \'use testAdmin\\n (function() {\n\tvar user = db.getUser("user-six");\n\tvar roles = user.roles;\n\troles.sort((a, b) => {\n\t\tif (a.role < b.role) return -1;\n\t\tif (a.role > b.role) return 1;\n\t\treturn 0;\n\t});\n\tuser.roles = roles;\n\tprintjson(user);\n\t})();\\n\' | mongo mongodb://userAdmin:userAdmin123456@some-name-mongos.custom-users-roles-sharded-14882.svc.cluster.local:27017/admin ' + exit_status=0 + set -e + '[' 0 '!=' 0 -a -n 1 ']' + break + cat /tmp/tmp.7dBUxGNZ2K + cat /tmp/tmp.hKuOh5akeg + rm /tmp/tmp.7dBUxGNZ2K /tmp/tmp.hKuOh5akeg + return 0 + diff /mnt/jenkins/workspace/cloud-psmdb-operator_PR-2283/e2e-tests/custom-users-roles-sharded/compare/user-six.json /tmp/tmp.UfAOPg7iAD/user-six + destroy custom-users-roles-sharded-14882 + local namespace=custom-users-roles-sharded-14882 + local ignore_logs=true + [[ 0 == 1 ]] + desc 'destroy cluster/operator and all other resources' + set +o xtrace ----------------------------------------------------------------------------------- destroy cluster/operator and all other resources ----------------------------------------------------------------------------------- + '[' true == false ']' + delete_backups + desc 'Delete psmdb-backup' + set +o xtrace ----------------------------------------------------------------------------------- Delete psmdb-backup ----------------------------------------------------------------------------------- ++ kubectl_bin get psmdb-backup --no-headers ++ wc -l +++ mktemp ++ local LAST_OUT=/tmp/tmp.LdebzkGUt8 +++ mktemp ++ local LAST_ERR=/tmp/tmp.UcEbMYfsnP ++ local exit_status=0 ++ local timeout=4 +++ seq 0 2 ++ for i in $(seq 0 2) ++ set +e ++ kubectl get psmdb-backup --no-headers ++ exit_status=0 ++ set -e ++ '[' 0 '!=' 0 -a -n 1 ']' ++ break ++ cat /tmp/tmp.LdebzkGUt8 ++ cat /tmp/tmp.UcEbMYfsnP No resources found in custom-users-roles-sharded-14882 namespace. ++ rm /tmp/tmp.LdebzkGUt8 /tmp/tmp.UcEbMYfsnP ++ return 0 + '[' 0 '!=' 0 ']' + delete_crd + desc 'get and delete old CRDs and RBAC' + set +o xtrace ----------------------------------------------------------------------------------- get and delete old CRDs and RBAC ----------------------------------------------------------------------------------- + kubectl_bin delete -f /mnt/jenkins/workspace/cloud-psmdb-operator_PR-2283/deploy/crd.yaml --ignore-not-found --wait=false ++ mktemp + local LAST_OUT=/tmp/tmp.Fqtx30DEUv ++ mktemp + local LAST_ERR=/tmp/tmp.nVyFD2ks8K + local exit_status=0 + local timeout=4 ++ seq 0 2 + for i in $(seq 0 2) + set +e + kubectl delete -f /mnt/jenkins/workspace/cloud-psmdb-operator_PR-2283/deploy/crd.yaml --ignore-not-found --wait=false + exit_status=0 + set -e + '[' 0 '!=' 0 -a -n 1 ']' + break + cat /tmp/tmp.Fqtx30DEUv customresourcedefinition.apiextensions.k8s.io "perconaservermongodbbackups.psmdb.percona.com" deleted customresourcedefinition.apiextensions.k8s.io "perconaservermongodbrestores.psmdb.percona.com" deleted customresourcedefinition.apiextensions.k8s.io "perconaservermongodbs.psmdb.percona.com" deleted + cat /tmp/tmp.nVyFD2ks8K + rm /tmp/tmp.Fqtx30DEUv /tmp/tmp.nVyFD2ks8K + return 0 ++ yq eval .metadata.name /mnt/jenkins/workspace/cloud-psmdb-operator_PR-2283/deploy/crd.yaml ++ grep -v '\-\-\-' grep: warning: stray \ before - grep: warning: stray \ before - + for crd_name in $(yq eval '.metadata.name' "${src_dir}/deploy/crd.yaml" | grep -v '\-\-\-') + kubectl get perconaservermongodbbackups.psmdb.percona.com --all-namespaces -o wide + grep -v NAMESPACE + xargs -L 1 sh -xc 'kubectl patch perconaservermongodbbackups.psmdb.percona.com -n $0 $1 --type=merge -p "{\"metadata\":{\"finalizers\":[]}}"' error: the server doesn't have a resource type "perconaservermongodbbackups" + kubectl patch perconaservermongodbbackups.psmdb.percona.com -n sh --type=merge -p '{"metadata":{"finalizers":[]}}' error: the server doesn't have a resource type "perconaservermongodbbackups" + : + kubectl_bin wait --for=delete crd perconaservermongodbbackups.psmdb.percona.com ++ mktemp + local LAST_OUT=/tmp/tmp.flL7zCrpdc ++ mktemp + local LAST_ERR=/tmp/tmp.j5Go9MrFnM + local exit_status=0 + local timeout=4 ++ seq 0 2 + for i in $(seq 0 2) + set +e + kubectl wait --for=delete crd perconaservermongodbbackups.psmdb.percona.com + exit_status=0 + set -e + '[' 0 '!=' 0 -a -n 1 ']' + break + cat /tmp/tmp.flL7zCrpdc + cat /tmp/tmp.j5Go9MrFnM + rm /tmp/tmp.flL7zCrpdc /tmp/tmp.j5Go9MrFnM + return 0 + for crd_name in $(yq eval '.metadata.name' "${src_dir}/deploy/crd.yaml" | grep -v '\-\-\-') + kubectl get perconaservermongodbrestores.psmdb.percona.com --all-namespaces -o wide + grep -v NAMESPACE + xargs -L 1 sh -xc 'kubectl patch perconaservermongodbrestores.psmdb.percona.com -n $0 $1 --type=merge -p "{\"metadata\":{\"finalizers\":[]}}"' error: the server doesn't have a resource type "perconaservermongodbrestores" + kubectl patch perconaservermongodbrestores.psmdb.percona.com -n sh --type=merge -p '{"metadata":{"finalizers":[]}}' error: the server doesn't have a resource type "perconaservermongodbrestores" + : + kubectl_bin wait --for=delete crd perconaservermongodbrestores.psmdb.percona.com ++ mktemp + local LAST_OUT=/tmp/tmp.QE2zEw0nYb ++ mktemp + local LAST_ERR=/tmp/tmp.9f7nnDawgd + local exit_status=0 + local timeout=4 ++ seq 0 2 + for i in $(seq 0 2) + set +e + kubectl wait --for=delete crd perconaservermongodbrestores.psmdb.percona.com + exit_status=0 + set -e + '[' 0 '!=' 0 -a -n 1 ']' + break + cat /tmp/tmp.QE2zEw0nYb + cat /tmp/tmp.9f7nnDawgd + rm /tmp/tmp.QE2zEw0nYb /tmp/tmp.9f7nnDawgd + return 0 + for crd_name in $(yq eval '.metadata.name' "${src_dir}/deploy/crd.yaml" | grep -v '\-\-\-') + grep -v NAMESPACE + kubectl get perconaservermongodbs.psmdb.percona.com --all-namespaces -o wide + xargs -L 1 sh -xc 'kubectl patch perconaservermongodbs.psmdb.percona.com -n $0 $1 --type=merge -p "{\"metadata\":{\"finalizers\":[]}}"' error: the server doesn't have a resource type "perconaservermongodbs" + kubectl patch perconaservermongodbs.psmdb.percona.com -n sh --type=merge -p '{"metadata":{"finalizers":[]}}' error: the server doesn't have a resource type "perconaservermongodbs" + : + kubectl_bin wait --for=delete crd perconaservermongodbs.psmdb.percona.com ++ mktemp + local LAST_OUT=/tmp/tmp.9Kt2BCU5C3 ++ mktemp + local LAST_ERR=/tmp/tmp.LM2U496a2L + local exit_status=0 + local timeout=4 ++ seq 0 2 + for i in $(seq 0 2) + set +e + kubectl wait --for=delete crd perconaservermongodbs.psmdb.percona.com + exit_status=0 + set -e + '[' 0 '!=' 0 -a -n 1 ']' + break + cat /tmp/tmp.9Kt2BCU5C3 + cat /tmp/tmp.LM2U496a2L + rm /tmp/tmp.9Kt2BCU5C3 /tmp/tmp.LM2U496a2L + return 0 + local rbac_yaml=rbac.yaml + '[' -n psmdb-operator ']' + rbac_yaml=cw-rbac.yaml + kubectl_bin delete -f /mnt/jenkins/workspace/cloud-psmdb-operator_PR-2283/deploy/cw-rbac.yaml --ignore-not-found ++ mktemp + local LAST_OUT=/tmp/tmp.2a1kYsCFIr ++ mktemp + local LAST_ERR=/tmp/tmp.3Qxhqy7rGv + local exit_status=0 + local timeout=4 ++ seq 0 2 + for i in $(seq 0 2) + set +e + kubectl delete -f /mnt/jenkins/workspace/cloud-psmdb-operator_PR-2283/deploy/cw-rbac.yaml --ignore-not-found + exit_status=0 + set -e + '[' 0 '!=' 0 -a -n 1 ']' + break + cat /tmp/tmp.2a1kYsCFIr clusterrole.rbac.authorization.k8s.io "percona-server-mongodb-operator" deleted clusterrolebinding.rbac.authorization.k8s.io "service-account-percona-server-mongodb-operator" deleted + cat /tmp/tmp.3Qxhqy7rGv + rm /tmp/tmp.2a1kYsCFIr /tmp/tmp.3Qxhqy7rGv + return 0 + destroy_cert_manager + kubectl_bin delete -f https://github.com/cert-manager/cert-manager/releases/download/v1.19.3/cert-manager.yaml ++ mktemp + local LAST_OUT=/tmp/tmp.2U9B7PMXAj ++ mktemp + local LAST_ERR=/tmp/tmp.RNZqWIkLu7 + local exit_status=0 + local timeout=4 ++ seq 0 2 + for i in $(seq 0 2) + set +e + kubectl delete -f https://github.com/cert-manager/cert-manager/releases/download/v1.19.3/cert-manager.yaml + exit_status=1 + set -e + '[' 1 '!=' 0 -a -n 1 ']' + cat /tmp/tmp.2U9B7PMXAj + cat /tmp/tmp.RNZqWIkLu7 Error from server (NotFound): error when deleting "https://github.com/cert-manager/cert-manager/releases/download/v1.19.3/cert-manager.yaml": namespaces "cert-manager" not found Error from server (NotFound): error when deleting "https://github.com/cert-manager/cert-manager/releases/download/v1.19.3/cert-manager.yaml": customresourcedefinitions.apiextensions.k8s.io "challenges.acme.cert-manager.io" not found Error from server (NotFound): error when deleting "https://github.com/cert-manager/cert-manager/releases/download/v1.19.3/cert-manager.yaml": customresourcedefinitions.apiextensions.k8s.io "orders.acme.cert-manager.io" not found Error from server (NotFound): error when deleting "https://github.com/cert-manager/cert-manager/releases/download/v1.19.3/cert-manager.yaml": customresourcedefinitions.apiextensions.k8s.io "certificaterequests.cert-manager.io" not found Error from server (NotFound): error when deleting "https://github.com/cert-manager/cert-manager/releases/download/v1.19.3/cert-manager.yaml": customresourcedefinitions.apiextensions.k8s.io "certificates.cert-manager.io" not found Error from server (NotFound): error when deleting "https://github.com/cert-manager/cert-manager/releases/download/v1.19.3/cert-manager.yaml": customresourcedefinitions.apiextensions.k8s.io "clusterissuers.cert-manager.io" not found Error from server (NotFound): error when deleting "https://github.com/cert-manager/cert-manager/releases/download/v1.19.3/cert-manager.yaml": customresourcedefinitions.apiextensions.k8s.io "issuers.cert-manager.io" not found Error from server (NotFound): error when deleting "https://github.com/cert-manager/cert-manager/releases/download/v1.19.3/cert-manager.yaml": serviceaccounts "cert-manager-cainjector" not found Error from server (NotFound): error when deleting "https://github.com/cert-manager/cert-manager/releases/download/v1.19.3/cert-manager.yaml": serviceaccounts "cert-manager" not found Error from server (NotFound): error when deleting "https://github.com/cert-manager/cert-manager/releases/download/v1.19.3/cert-manager.yaml": serviceaccounts "cert-manager-webhook" not found Error from server (NotFound): error when deleting "https://github.com/cert-manager/cert-manager/releases/download/v1.19.3/cert-manager.yaml": clusterroles.rbac.authorization.k8s.io "cert-manager-cainjector" not found Error from server (NotFound): error when deleting "https://github.com/cert-manager/cert-manager/releases/download/v1.19.3/cert-manager.yaml": clusterroles.rbac.authorization.k8s.io "cert-manager-controller-issuers" not found Error from server (NotFound): error when deleting "https://github.com/cert-manager/cert-manager/releases/download/v1.19.3/cert-manager.yaml": clusterroles.rbac.authorization.k8s.io "cert-manager-controller-clusterissuers" not found Error from server (NotFound): error when deleting "https://github.com/cert-manager/cert-manager/releases/download/v1.19.3/cert-manager.yaml": clusterroles.rbac.authorization.k8s.io "cert-manager-controller-certificates" not found Error from server (NotFound): error when deleting "https://github.com/cert-manager/cert-manager/releases/download/v1.19.3/cert-manager.yaml": clusterroles.rbac.authorization.k8s.io "cert-manager-controller-orders" not found Error from server (NotFound): error when deleting "https://github.com/cert-manager/cert-manager/releases/download/v1.19.3/cert-manager.yaml": clusterroles.rbac.authorization.k8s.io "cert-manager-controller-challenges" not found Error from server (NotFound): error when deleting "https://github.com/cert-manager/cert-manager/releases/download/v1.19.3/cert-manager.yaml": clusterroles.rbac.authorization.k8s.io "cert-manager-controller-ingress-shim" not found Error from server (NotFound): error when deleting "https://github.com/cert-manager/cert-manager/releases/download/v1.19.3/cert-manager.yaml": clusterroles.rbac.authorization.k8s.io "cert-manager-cluster-view" not found Error from server (NotFound): error when deleting "https://github.com/cert-manager/cert-manager/releases/download/v1.19.3/cert-manager.yaml": clusterroles.rbac.authorization.k8s.io "cert-manager-view" not found Error from server (NotFound): error when deleting "https://github.com/cert-manager/cert-manager/releases/download/v1.19.3/cert-manager.yaml": clusterroles.rbac.authorization.k8s.io "cert-manager-edit" not found Error from server (NotFound): error when deleting "https://github.com/cert-manager/cert-manager/releases/download/v1.19.3/cert-manager.yaml": clusterroles.rbac.authorization.k8s.io "cert-manager-controller-approve:cert-manager-io" not found Error from server (NotFound): error when deleting "https://github.com/cert-manager/cert-manager/releases/download/v1.19.3/cert-manager.yaml": clusterroles.rbac.authorization.k8s.io "cert-manager-controller-certificatesigningrequests" not found Error from server (NotFound): error when deleting "https://github.com/cert-manager/cert-manager/releases/download/v1.19.3/cert-manager.yaml": clusterroles.rbac.authorization.k8s.io "cert-manager-webhook:subjectaccessreviews" not found Error from server (NotFound): error when deleting "https://github.com/cert-manager/cert-manager/releases/download/v1.19.3/cert-manager.yaml": clusterrolebindings.rbac.authorization.k8s.io "cert-manager-cainjector" not found Error from server (NotFound): error when deleting "https://github.com/cert-manager/cert-manager/releases/download/v1.19.3/cert-manager.yaml": clusterrolebindings.rbac.authorization.k8s.io "cert-manager-controller-issuers" not found Error from server (NotFound): error when deleting "https://github.com/cert-manager/cert-manager/releases/download/v1.19.3/cert-manager.yaml": clusterrolebindings.rbac.authorization.k8s.io "cert-manager-controller-clusterissuers" not found Error from server (NotFound): error when deleting "https://github.com/cert-manager/cert-manager/releases/download/v1.19.3/cert-manager.yaml": clusterrolebindings.rbac.authorization.k8s.io "cert-manager-controller-certificates" not found Error from server (NotFound): error when deleting "https://github.com/cert-manager/cert-manager/releases/download/v1.19.3/cert-manager.yaml": clusterrolebindings.rbac.authorization.k8s.io "cert-manager-controller-orders" not found Error from server (NotFound): error when deleting "https://github.com/cert-manager/cert-manager/releases/download/v1.19.3/cert-manager.yaml": clusterrolebindings.rbac.authorization.k8s.io "cert-manager-controller-challenges" not found Error from server (NotFound): error when deleting "https://github.com/cert-manager/cert-manager/releases/download/v1.19.3/cert-manager.yaml": clusterrolebindings.rbac.authorization.k8s.io "cert-manager-controller-ingress-shim" not found Error from server (NotFound): error when deleting "https://github.com/cert-manager/cert-manager/releases/download/v1.19.3/cert-manager.yaml": clusterrolebindings.rbac.authorization.k8s.io "cert-manager-controller-approve:cert-manager-io" not found Error from server (NotFound): error when deleting "https://github.com/cert-manager/cert-manager/releases/download/v1.19.3/cert-manager.yaml": clusterrolebindings.rbac.authorization.k8s.io "cert-manager-controller-certificatesigningrequests" not found Error from server (NotFound): error when deleting "https://github.com/cert-manager/cert-manager/releases/download/v1.19.3/cert-manager.yaml": clusterrolebindings.rbac.authorization.k8s.io "cert-manager-webhook:subjectaccessreviews" not found Error from server (NotFound): error when deleting "https://github.com/cert-manager/cert-manager/releases/download/v1.19.3/cert-manager.yaml": roles.rbac.authorization.k8s.io "cert-manager-cainjector:leaderelection" not found Error from server (NotFound): error when deleting "https://github.com/cert-manager/cert-manager/releases/download/v1.19.3/cert-manager.yaml": roles.rbac.authorization.k8s.io "cert-manager:leaderelection" not found Error from server (NotFound): error when deleting "https://github.com/cert-manager/cert-manager/releases/download/v1.19.3/cert-manager.yaml": roles.rbac.authorization.k8s.io "cert-manager-tokenrequest" not found Error from server (NotFound): error when deleting "https://github.com/cert-manager/cert-manager/releases/download/v1.19.3/cert-manager.yaml": roles.rbac.authorization.k8s.io "cert-manager-webhook:dynamic-serving" not found Error from server (NotFound): error when deleting "https://github.com/cert-manager/cert-manager/releases/download/v1.19.3/cert-manager.yaml": rolebindings.rbac.authorization.k8s.io "cert-manager-cainjector:leaderelection" not found Error from server (NotFound): error when deleting "https://github.com/cert-manager/cert-manager/releases/download/v1.19.3/cert-manager.yaml": rolebindings.rbac.authorization.k8s.io "cert-manager:leaderelection" not found Error from server (NotFound): error when deleting "https://github.com/cert-manager/cert-manager/releases/download/v1.19.3/cert-manager.yaml": rolebindings.rbac.authorization.k8s.io "cert-manager-tokenrequest" not found Error from server (NotFound): error when deleting "https://github.com/cert-manager/cert-manager/releases/download/v1.19.3/cert-manager.yaml": rolebindings.rbac.authorization.k8s.io "cert-manager-webhook:dynamic-serving" not found Error from server (NotFound): error when deleting "https://github.com/cert-manager/cert-manager/releases/download/v1.19.3/cert-manager.yaml": services "cert-manager-cainjector" not found Error from server (NotFound): error when deleting "https://github.com/cert-manager/cert-manager/releases/download/v1.19.3/cert-manager.yaml": services "cert-manager" not found Error from server (NotFound): error when deleting "https://github.com/cert-manager/cert-manager/releases/download/v1.19.3/cert-manager.yaml": services "cert-manager-webhook" not found Error from server (NotFound): error when deleting "https://github.com/cert-manager/cert-manager/releases/download/v1.19.3/cert-manager.yaml": deployments.apps "cert-manager-cainjector" not found Error from server (NotFound): error when deleting "https://github.com/cert-manager/cert-manager/releases/download/v1.19.3/cert-manager.yaml": deployments.apps "cert-manager" not found Error from server (NotFound): error when deleting "https://github.com/cert-manager/cert-manager/releases/download/v1.19.3/cert-manager.yaml": deployments.apps "cert-manager-webhook" not found Error from server (NotFound): error when deleting "https://github.com/cert-manager/cert-manager/releases/download/v1.19.3/cert-manager.yaml": mutatingwebhookconfigurations.admissionregistration.k8s.io "cert-manager-webhook" not found Error from server (NotFound): error when deleting "https://github.com/cert-manager/cert-manager/releases/download/v1.19.3/cert-manager.yaml": validatingwebhookconfigurations.admissionregistration.k8s.io "cert-manager-webhook" not found + sleep 0 + for i in $(seq 0 2) + set +e + kubectl delete -f https://github.com/cert-manager/cert-manager/releases/download/v1.19.3/cert-manager.yaml + exit_status=1 + set -e + '[' 1 '!=' 0 -a -n 1 ']' + cat /tmp/tmp.2U9B7PMXAj + cat /tmp/tmp.RNZqWIkLu7 Error from server (NotFound): error when deleting "https://github.com/cert-manager/cert-manager/releases/download/v1.19.3/cert-manager.yaml": namespaces "cert-manager" not found Error from server (NotFound): error when deleting "https://github.com/cert-manager/cert-manager/releases/download/v1.19.3/cert-manager.yaml": customresourcedefinitions.apiextensions.k8s.io "challenges.acme.cert-manager.io" not found Error from server (NotFound): error when deleting "https://github.com/cert-manager/cert-manager/releases/download/v1.19.3/cert-manager.yaml": customresourcedefinitions.apiextensions.k8s.io "orders.acme.cert-manager.io" not found Error from server (NotFound): error when deleting "https://github.com/cert-manager/cert-manager/releases/download/v1.19.3/cert-manager.yaml": customresourcedefinitions.apiextensions.k8s.io "certificaterequests.cert-manager.io" not found Error from server (NotFound): error when deleting "https://github.com/cert-manager/cert-manager/releases/download/v1.19.3/cert-manager.yaml": customresourcedefinitions.apiextensions.k8s.io "certificates.cert-manager.io" not found Error from server (NotFound): error when deleting "https://github.com/cert-manager/cert-manager/releases/download/v1.19.3/cert-manager.yaml": customresourcedefinitions.apiextensions.k8s.io "clusterissuers.cert-manager.io" not found Error from server (NotFound): error when deleting "https://github.com/cert-manager/cert-manager/releases/download/v1.19.3/cert-manager.yaml": customresourcedefinitions.apiextensions.k8s.io "issuers.cert-manager.io" not found Error from server (NotFound): error when deleting "https://github.com/cert-manager/cert-manager/releases/download/v1.19.3/cert-manager.yaml": serviceaccounts "cert-manager-cainjector" not found Error from server (NotFound): error when deleting "https://github.com/cert-manager/cert-manager/releases/download/v1.19.3/cert-manager.yaml": serviceaccounts "cert-manager" not found Error from server (NotFound): error when deleting "https://github.com/cert-manager/cert-manager/releases/download/v1.19.3/cert-manager.yaml": serviceaccounts "cert-manager-webhook" not found Error from server (NotFound): error when deleting "https://github.com/cert-manager/cert-manager/releases/download/v1.19.3/cert-manager.yaml": clusterroles.rbac.authorization.k8s.io "cert-manager-cainjector" not found Error from server (NotFound): error when deleting "https://github.com/cert-manager/cert-manager/releases/download/v1.19.3/cert-manager.yaml": clusterroles.rbac.authorization.k8s.io "cert-manager-controller-issuers" not found Error from server (NotFound): error when deleting "https://github.com/cert-manager/cert-manager/releases/download/v1.19.3/cert-manager.yaml": clusterroles.rbac.authorization.k8s.io "cert-manager-controller-clusterissuers" not found Error from server (NotFound): error when deleting "https://github.com/cert-manager/cert-manager/releases/download/v1.19.3/cert-manager.yaml": clusterroles.rbac.authorization.k8s.io "cert-manager-controller-certificates" not found Error from server (NotFound): error when deleting "https://github.com/cert-manager/cert-manager/releases/download/v1.19.3/cert-manager.yaml": clusterroles.rbac.authorization.k8s.io "cert-manager-controller-orders" not found Error from server (NotFound): error when deleting "https://github.com/cert-manager/cert-manager/releases/download/v1.19.3/cert-manager.yaml": clusterroles.rbac.authorization.k8s.io "cert-manager-controller-challenges" not found Error from server (NotFound): error when deleting "https://github.com/cert-manager/cert-manager/releases/download/v1.19.3/cert-manager.yaml": clusterroles.rbac.authorization.k8s.io "cert-manager-controller-ingress-shim" not found Error from server (NotFound): error when deleting "https://github.com/cert-manager/cert-manager/releases/download/v1.19.3/cert-manager.yaml": clusterroles.rbac.authorization.k8s.io "cert-manager-cluster-view" not found Error from server (NotFound): error when deleting "https://github.com/cert-manager/cert-manager/releases/download/v1.19.3/cert-manager.yaml": clusterroles.rbac.authorization.k8s.io "cert-manager-view" not found Error from server (NotFound): error when deleting "https://github.com/cert-manager/cert-manager/releases/download/v1.19.3/cert-manager.yaml": clusterroles.rbac.authorization.k8s.io "cert-manager-edit" not found Error from server (NotFound): error when deleting "https://github.com/cert-manager/cert-manager/releases/download/v1.19.3/cert-manager.yaml": clusterroles.rbac.authorization.k8s.io "cert-manager-controller-approve:cert-manager-io" not found Error from server (NotFound): error when deleting "https://github.com/cert-manager/cert-manager/releases/download/v1.19.3/cert-manager.yaml": clusterroles.rbac.authorization.k8s.io "cert-manager-controller-certificatesigningrequests" not found Error from server (NotFound): error when deleting "https://github.com/cert-manager/cert-manager/releases/download/v1.19.3/cert-manager.yaml": clusterroles.rbac.authorization.k8s.io "cert-manager-webhook:subjectaccessreviews" not found Error from server (NotFound): error when deleting "https://github.com/cert-manager/cert-manager/releases/download/v1.19.3/cert-manager.yaml": clusterrolebindings.rbac.authorization.k8s.io "cert-manager-cainjector" not found Error from server (NotFound): error when deleting "https://github.com/cert-manager/cert-manager/releases/download/v1.19.3/cert-manager.yaml": clusterrolebindings.rbac.authorization.k8s.io "cert-manager-controller-issuers" not found Error from server (NotFound): error when deleting "https://github.com/cert-manager/cert-manager/releases/download/v1.19.3/cert-manager.yaml": clusterrolebindings.rbac.authorization.k8s.io "cert-manager-controller-clusterissuers" not found Error from server (NotFound): error when deleting "https://github.com/cert-manager/cert-manager/releases/download/v1.19.3/cert-manager.yaml": clusterrolebindings.rbac.authorization.k8s.io "cert-manager-controller-certificates" not found Error from server (NotFound): error when deleting "https://github.com/cert-manager/cert-manager/releases/download/v1.19.3/cert-manager.yaml": clusterrolebindings.rbac.authorization.k8s.io "cert-manager-controller-orders" not found Error from server (NotFound): error when deleting "https://github.com/cert-manager/cert-manager/releases/download/v1.19.3/cert-manager.yaml": clusterrolebindings.rbac.authorization.k8s.io "cert-manager-controller-challenges" not found Error from server (NotFound): error when deleting "https://github.com/cert-manager/cert-manager/releases/download/v1.19.3/cert-manager.yaml": clusterrolebindings.rbac.authorization.k8s.io "cert-manager-controller-ingress-shim" not found Error from server (NotFound): error when deleting "https://github.com/cert-manager/cert-manager/releases/download/v1.19.3/cert-manager.yaml": clusterrolebindings.rbac.authorization.k8s.io "cert-manager-controller-approve:cert-manager-io" not found Error from server (NotFound): error when deleting "https://github.com/cert-manager/cert-manager/releases/download/v1.19.3/cert-manager.yaml": clusterrolebindings.rbac.authorization.k8s.io "cert-manager-controller-certificatesigningrequests" not found Error from server (NotFound): error when deleting "https://github.com/cert-manager/cert-manager/releases/download/v1.19.3/cert-manager.yaml": clusterrolebindings.rbac.authorization.k8s.io "cert-manager-webhook:subjectaccessreviews" not found Error from server (NotFound): error when deleting "https://github.com/cert-manager/cert-manager/releases/download/v1.19.3/cert-manager.yaml": roles.rbac.authorization.k8s.io "cert-manager-cainjector:leaderelection" not found Error from server (NotFound): error when deleting "https://github.com/cert-manager/cert-manager/releases/download/v1.19.3/cert-manager.yaml": roles.rbac.authorization.k8s.io "cert-manager:leaderelection" not found Error from server (NotFound): error when deleting "https://github.com/cert-manager/cert-manager/releases/download/v1.19.3/cert-manager.yaml": roles.rbac.authorization.k8s.io "cert-manager-tokenrequest" not found Error from server (NotFound): error when deleting "https://github.com/cert-manager/cert-manager/releases/download/v1.19.3/cert-manager.yaml": roles.rbac.authorization.k8s.io "cert-manager-webhook:dynamic-serving" not found Error from server (NotFound): error when deleting "https://github.com/cert-manager/cert-manager/releases/download/v1.19.3/cert-manager.yaml": rolebindings.rbac.authorization.k8s.io "cert-manager-cainjector:leaderelection" not found Error from server (NotFound): error when deleting "https://github.com/cert-manager/cert-manager/releases/download/v1.19.3/cert-manager.yaml": rolebindings.rbac.authorization.k8s.io "cert-manager:leaderelection" not found Error from server (NotFound): error when deleting "https://github.com/cert-manager/cert-manager/releases/download/v1.19.3/cert-manager.yaml": rolebindings.rbac.authorization.k8s.io "cert-manager-tokenrequest" not found Error from server (NotFound): error when deleting "https://github.com/cert-manager/cert-manager/releases/download/v1.19.3/cert-manager.yaml": rolebindings.rbac.authorization.k8s.io "cert-manager-webhook:dynamic-serving" not found Error from server (NotFound): error when deleting "https://github.com/cert-manager/cert-manager/releases/download/v1.19.3/cert-manager.yaml": services "cert-manager-cainjector" not found Error from server (NotFound): error when deleting "https://github.com/cert-manager/cert-manager/releases/download/v1.19.3/cert-manager.yaml": services "cert-manager" not found Error from server (NotFound): error when deleting "https://github.com/cert-manager/cert-manager/releases/download/v1.19.3/cert-manager.yaml": services "cert-manager-webhook" not found Error from server (NotFound): error when deleting "https://github.com/cert-manager/cert-manager/releases/download/v1.19.3/cert-manager.yaml": deployments.apps "cert-manager-cainjector" not found Error from server (NotFound): error when deleting "https://github.com/cert-manager/cert-manager/releases/download/v1.19.3/cert-manager.yaml": deployments.apps "cert-manager" not found Error from server (NotFound): error when deleting "https://github.com/cert-manager/cert-manager/releases/download/v1.19.3/cert-manager.yaml": deployments.apps "cert-manager-webhook" not found Error from server (NotFound): error when deleting "https://github.com/cert-manager/cert-manager/releases/download/v1.19.3/cert-manager.yaml": mutatingwebhookconfigurations.admissionregistration.k8s.io "cert-manager-webhook" not found Error from server (NotFound): error when deleting "https://github.com/cert-manager/cert-manager/releases/download/v1.19.3/cert-manager.yaml": validatingwebhookconfigurations.admissionregistration.k8s.io "cert-manager-webhook" not found + sleep 4 + for i in $(seq 0 2) + set +e + kubectl delete -f https://github.com/cert-manager/cert-manager/releases/download/v1.19.3/cert-manager.yaml + exit_status=1 + set -e + '[' 1 '!=' 0 -a -n 1 ']' + cat /tmp/tmp.2U9B7PMXAj + cat /tmp/tmp.RNZqWIkLu7 Error from server (NotFound): error when deleting "https://github.com/cert-manager/cert-manager/releases/download/v1.19.3/cert-manager.yaml": namespaces "cert-manager" not found Error from server (NotFound): error when deleting "https://github.com/cert-manager/cert-manager/releases/download/v1.19.3/cert-manager.yaml": customresourcedefinitions.apiextensions.k8s.io "challenges.acme.cert-manager.io" not found Error from server (NotFound): error when deleting "https://github.com/cert-manager/cert-manager/releases/download/v1.19.3/cert-manager.yaml": customresourcedefinitions.apiextensions.k8s.io "orders.acme.cert-manager.io" not found Error from server (NotFound): error when deleting "https://github.com/cert-manager/cert-manager/releases/download/v1.19.3/cert-manager.yaml": customresourcedefinitions.apiextensions.k8s.io "certificaterequests.cert-manager.io" not found Error from server (NotFound): error when deleting "https://github.com/cert-manager/cert-manager/releases/download/v1.19.3/cert-manager.yaml": customresourcedefinitions.apiextensions.k8s.io "certificates.cert-manager.io" not found Error from server (NotFound): error when deleting "https://github.com/cert-manager/cert-manager/releases/download/v1.19.3/cert-manager.yaml": customresourcedefinitions.apiextensions.k8s.io "clusterissuers.cert-manager.io" not found Error from server (NotFound): error when deleting "https://github.com/cert-manager/cert-manager/releases/download/v1.19.3/cert-manager.yaml": customresourcedefinitions.apiextensions.k8s.io "issuers.cert-manager.io" not found Error from server (NotFound): error when deleting "https://github.com/cert-manager/cert-manager/releases/download/v1.19.3/cert-manager.yaml": serviceaccounts "cert-manager-cainjector" not found Error from server (NotFound): error when deleting "https://github.com/cert-manager/cert-manager/releases/download/v1.19.3/cert-manager.yaml": serviceaccounts "cert-manager" not found Error from server (NotFound): error when deleting "https://github.com/cert-manager/cert-manager/releases/download/v1.19.3/cert-manager.yaml": serviceaccounts "cert-manager-webhook" not found Error from server (NotFound): error when deleting "https://github.com/cert-manager/cert-manager/releases/download/v1.19.3/cert-manager.yaml": clusterroles.rbac.authorization.k8s.io "cert-manager-cainjector" not found Error from server (NotFound): error when deleting "https://github.com/cert-manager/cert-manager/releases/download/v1.19.3/cert-manager.yaml": clusterroles.rbac.authorization.k8s.io "cert-manager-controller-issuers" not found Error from server (NotFound): error when deleting "https://github.com/cert-manager/cert-manager/releases/download/v1.19.3/cert-manager.yaml": clusterroles.rbac.authorization.k8s.io "cert-manager-controller-clusterissuers" not found Error from server (NotFound): error when deleting "https://github.com/cert-manager/cert-manager/releases/download/v1.19.3/cert-manager.yaml": clusterroles.rbac.authorization.k8s.io "cert-manager-controller-certificates" not found Error from server (NotFound): error when deleting "https://github.com/cert-manager/cert-manager/releases/download/v1.19.3/cert-manager.yaml": clusterroles.rbac.authorization.k8s.io "cert-manager-controller-orders" not found Error from server (NotFound): error when deleting "https://github.com/cert-manager/cert-manager/releases/download/v1.19.3/cert-manager.yaml": clusterroles.rbac.authorization.k8s.io "cert-manager-controller-challenges" not found Error from server (NotFound): error when deleting "https://github.com/cert-manager/cert-manager/releases/download/v1.19.3/cert-manager.yaml": clusterroles.rbac.authorization.k8s.io "cert-manager-controller-ingress-shim" not found Error from server (NotFound): error when deleting "https://github.com/cert-manager/cert-manager/releases/download/v1.19.3/cert-manager.yaml": clusterroles.rbac.authorization.k8s.io "cert-manager-cluster-view" not found Error from server (NotFound): error when deleting "https://github.com/cert-manager/cert-manager/releases/download/v1.19.3/cert-manager.yaml": clusterroles.rbac.authorization.k8s.io "cert-manager-view" not found Error from server (NotFound): error when deleting "https://github.com/cert-manager/cert-manager/releases/download/v1.19.3/cert-manager.yaml": clusterroles.rbac.authorization.k8s.io "cert-manager-edit" not found Error from server (NotFound): error when deleting "https://github.com/cert-manager/cert-manager/releases/download/v1.19.3/cert-manager.yaml": clusterroles.rbac.authorization.k8s.io "cert-manager-controller-approve:cert-manager-io" not found Error from server (NotFound): error when deleting "https://github.com/cert-manager/cert-manager/releases/download/v1.19.3/cert-manager.yaml": clusterroles.rbac.authorization.k8s.io "cert-manager-controller-certificatesigningrequests" not found Error from server (NotFound): error when deleting "https://github.com/cert-manager/cert-manager/releases/download/v1.19.3/cert-manager.yaml": clusterroles.rbac.authorization.k8s.io "cert-manager-webhook:subjectaccessreviews" not found Error from server (NotFound): error when deleting "https://github.com/cert-manager/cert-manager/releases/download/v1.19.3/cert-manager.yaml": clusterrolebindings.rbac.authorization.k8s.io "cert-manager-cainjector" not found Error from server (NotFound): error when deleting "https://github.com/cert-manager/cert-manager/releases/download/v1.19.3/cert-manager.yaml": clusterrolebindings.rbac.authorization.k8s.io "cert-manager-controller-issuers" not found Error from server (NotFound): error when deleting "https://github.com/cert-manager/cert-manager/releases/download/v1.19.3/cert-manager.yaml": clusterrolebindings.rbac.authorization.k8s.io "cert-manager-controller-clusterissuers" not found Error from server (NotFound): error when deleting "https://github.com/cert-manager/cert-manager/releases/download/v1.19.3/cert-manager.yaml": clusterrolebindings.rbac.authorization.k8s.io "cert-manager-controller-certificates" not found Error from server (NotFound): error when deleting "https://github.com/cert-manager/cert-manager/releases/download/v1.19.3/cert-manager.yaml": clusterrolebindings.rbac.authorization.k8s.io "cert-manager-controller-orders" not found Error from server (NotFound): error when deleting "https://github.com/cert-manager/cert-manager/releases/download/v1.19.3/cert-manager.yaml": clusterrolebindings.rbac.authorization.k8s.io "cert-manager-controller-challenges" not found Error from server (NotFound): error when deleting "https://github.com/cert-manager/cert-manager/releases/download/v1.19.3/cert-manager.yaml": clusterrolebindings.rbac.authorization.k8s.io "cert-manager-controller-ingress-shim" not found Error from server (NotFound): error when deleting "https://github.com/cert-manager/cert-manager/releases/download/v1.19.3/cert-manager.yaml": clusterrolebindings.rbac.authorization.k8s.io "cert-manager-controller-approve:cert-manager-io" not found Error from server (NotFound): error when deleting "https://github.com/cert-manager/cert-manager/releases/download/v1.19.3/cert-manager.yaml": clusterrolebindings.rbac.authorization.k8s.io "cert-manager-controller-certificatesigningrequests" not found Error from server (NotFound): error when deleting "https://github.com/cert-manager/cert-manager/releases/download/v1.19.3/cert-manager.yaml": clusterrolebindings.rbac.authorization.k8s.io "cert-manager-webhook:subjectaccessreviews" not found Error from server (NotFound): error when deleting "https://github.com/cert-manager/cert-manager/releases/download/v1.19.3/cert-manager.yaml": roles.rbac.authorization.k8s.io "cert-manager-cainjector:leaderelection" not found Error from server (NotFound): error when deleting "https://github.com/cert-manager/cert-manager/releases/download/v1.19.3/cert-manager.yaml": roles.rbac.authorization.k8s.io "cert-manager:leaderelection" not found Error from server (NotFound): error when deleting "https://github.com/cert-manager/cert-manager/releases/download/v1.19.3/cert-manager.yaml": roles.rbac.authorization.k8s.io "cert-manager-tokenrequest" not found Error from server (NotFound): error when deleting "https://github.com/cert-manager/cert-manager/releases/download/v1.19.3/cert-manager.yaml": roles.rbac.authorization.k8s.io "cert-manager-webhook:dynamic-serving" not found Error from server (NotFound): error when deleting "https://github.com/cert-manager/cert-manager/releases/download/v1.19.3/cert-manager.yaml": rolebindings.rbac.authorization.k8s.io "cert-manager-cainjector:leaderelection" not found Error from server (NotFound): error when deleting "https://github.com/cert-manager/cert-manager/releases/download/v1.19.3/cert-manager.yaml": rolebindings.rbac.authorization.k8s.io "cert-manager:leaderelection" not found Error from server (NotFound): error when deleting "https://github.com/cert-manager/cert-manager/releases/download/v1.19.3/cert-manager.yaml": rolebindings.rbac.authorization.k8s.io "cert-manager-tokenrequest" not found Error from server (NotFound): error when deleting "https://github.com/cert-manager/cert-manager/releases/download/v1.19.3/cert-manager.yaml": rolebindings.rbac.authorization.k8s.io "cert-manager-webhook:dynamic-serving" not found Error from server (NotFound): error when deleting "https://github.com/cert-manager/cert-manager/releases/download/v1.19.3/cert-manager.yaml": services "cert-manager-cainjector" not found Error from server (NotFound): error when deleting "https://github.com/cert-manager/cert-manager/releases/download/v1.19.3/cert-manager.yaml": services "cert-manager" not found Error from server (NotFound): error when deleting "https://github.com/cert-manager/cert-manager/releases/download/v1.19.3/cert-manager.yaml": services "cert-manager-webhook" not found Error from server (NotFound): error when deleting "https://github.com/cert-manager/cert-manager/releases/download/v1.19.3/cert-manager.yaml": deployments.apps "cert-manager-cainjector" not found Error from server (NotFound): error when deleting "https://github.com/cert-manager/cert-manager/releases/download/v1.19.3/cert-manager.yaml": deployments.apps "cert-manager" not found Error from server (NotFound): error when deleting "https://github.com/cert-manager/cert-manager/releases/download/v1.19.3/cert-manager.yaml": deployments.apps "cert-manager-webhook" not found Error from server (NotFound): error when deleting "https://github.com/cert-manager/cert-manager/releases/download/v1.19.3/cert-manager.yaml": mutatingwebhookconfigurations.admissionregistration.k8s.io "cert-manager-webhook" not found Error from server (NotFound): error when deleting "https://github.com/cert-manager/cert-manager/releases/download/v1.19.3/cert-manager.yaml": validatingwebhookconfigurations.admissionregistration.k8s.io "cert-manager-webhook" not found + sleep 8 + cat /tmp/tmp.2U9B7PMXAj + cat /tmp/tmp.RNZqWIkLu7 Error from server (NotFound): error when deleting "https://github.com/cert-manager/cert-manager/releases/download/v1.19.3/cert-manager.yaml": namespaces "cert-manager" not found Error from server (NotFound): error when deleting "https://github.com/cert-manager/cert-manager/releases/download/v1.19.3/cert-manager.yaml": customresourcedefinitions.apiextensions.k8s.io "challenges.acme.cert-manager.io" not found Error from server (NotFound): error when deleting "https://github.com/cert-manager/cert-manager/releases/download/v1.19.3/cert-manager.yaml": customresourcedefinitions.apiextensions.k8s.io "orders.acme.cert-manager.io" not found Error from server (NotFound): error when deleting "https://github.com/cert-manager/cert-manager/releases/download/v1.19.3/cert-manager.yaml": customresourcedefinitions.apiextensions.k8s.io "certificaterequests.cert-manager.io" not found Error from server (NotFound): error when deleting "https://github.com/cert-manager/cert-manager/releases/download/v1.19.3/cert-manager.yaml": customresourcedefinitions.apiextensions.k8s.io "certificates.cert-manager.io" not found Error from server (NotFound): error when deleting "https://github.com/cert-manager/cert-manager/releases/download/v1.19.3/cert-manager.yaml": customresourcedefinitions.apiextensions.k8s.io "clusterissuers.cert-manager.io" not found Error from server (NotFound): error when deleting "https://github.com/cert-manager/cert-manager/releases/download/v1.19.3/cert-manager.yaml": customresourcedefinitions.apiextensions.k8s.io "issuers.cert-manager.io" not found Error from server (NotFound): error when deleting "https://github.com/cert-manager/cert-manager/releases/download/v1.19.3/cert-manager.yaml": serviceaccounts "cert-manager-cainjector" not found Error from server (NotFound): error when deleting "https://github.com/cert-manager/cert-manager/releases/download/v1.19.3/cert-manager.yaml": serviceaccounts "cert-manager" not found Error from server (NotFound): error when deleting "https://github.com/cert-manager/cert-manager/releases/download/v1.19.3/cert-manager.yaml": serviceaccounts "cert-manager-webhook" not found Error from server (NotFound): error when deleting "https://github.com/cert-manager/cert-manager/releases/download/v1.19.3/cert-manager.yaml": clusterroles.rbac.authorization.k8s.io "cert-manager-cainjector" not found Error from server (NotFound): error when deleting "https://github.com/cert-manager/cert-manager/releases/download/v1.19.3/cert-manager.yaml": clusterroles.rbac.authorization.k8s.io "cert-manager-controller-issuers" not found Error from server (NotFound): error when deleting "https://github.com/cert-manager/cert-manager/releases/download/v1.19.3/cert-manager.yaml": clusterroles.rbac.authorization.k8s.io "cert-manager-controller-clusterissuers" not found Error from server (NotFound): error when deleting "https://github.com/cert-manager/cert-manager/releases/download/v1.19.3/cert-manager.yaml": clusterroles.rbac.authorization.k8s.io "cert-manager-controller-certificates" not found Error from server (NotFound): error when deleting "https://github.com/cert-manager/cert-manager/releases/download/v1.19.3/cert-manager.yaml": clusterroles.rbac.authorization.k8s.io "cert-manager-controller-orders" not found Error from server (NotFound): error when deleting "https://github.com/cert-manager/cert-manager/releases/download/v1.19.3/cert-manager.yaml": clusterroles.rbac.authorization.k8s.io "cert-manager-controller-challenges" not found Error from server (NotFound): error when deleting "https://github.com/cert-manager/cert-manager/releases/download/v1.19.3/cert-manager.yaml": clusterroles.rbac.authorization.k8s.io "cert-manager-controller-ingress-shim" not found Error from server (NotFound): error when deleting "https://github.com/cert-manager/cert-manager/releases/download/v1.19.3/cert-manager.yaml": clusterroles.rbac.authorization.k8s.io "cert-manager-cluster-view" not found Error from server (NotFound): error when deleting "https://github.com/cert-manager/cert-manager/releases/download/v1.19.3/cert-manager.yaml": clusterroles.rbac.authorization.k8s.io "cert-manager-view" not found Error from server (NotFound): error when deleting "https://github.com/cert-manager/cert-manager/releases/download/v1.19.3/cert-manager.yaml": clusterroles.rbac.authorization.k8s.io "cert-manager-edit" not found Error from server (NotFound): error when deleting "https://github.com/cert-manager/cert-manager/releases/download/v1.19.3/cert-manager.yaml": clusterroles.rbac.authorization.k8s.io "cert-manager-controller-approve:cert-manager-io" not found Error from server (NotFound): error when deleting "https://github.com/cert-manager/cert-manager/releases/download/v1.19.3/cert-manager.yaml": clusterroles.rbac.authorization.k8s.io "cert-manager-controller-certificatesigningrequests" not found Error from server (NotFound): error when deleting "https://github.com/cert-manager/cert-manager/releases/download/v1.19.3/cert-manager.yaml": clusterroles.rbac.authorization.k8s.io "cert-manager-webhook:subjectaccessreviews" not found Error from server (NotFound): error when deleting "https://github.com/cert-manager/cert-manager/releases/download/v1.19.3/cert-manager.yaml": clusterrolebindings.rbac.authorization.k8s.io "cert-manager-cainjector" not found Error from server (NotFound): error when deleting "https://github.com/cert-manager/cert-manager/releases/download/v1.19.3/cert-manager.yaml": clusterrolebindings.rbac.authorization.k8s.io "cert-manager-controller-issuers" not found Error from server (NotFound): error when deleting "https://github.com/cert-manager/cert-manager/releases/download/v1.19.3/cert-manager.yaml": clusterrolebindings.rbac.authorization.k8s.io "cert-manager-controller-clusterissuers" not found Error from server (NotFound): error when deleting "https://github.com/cert-manager/cert-manager/releases/download/v1.19.3/cert-manager.yaml": clusterrolebindings.rbac.authorization.k8s.io "cert-manager-controller-certificates" not found Error from server (NotFound): error when deleting "https://github.com/cert-manager/cert-manager/releases/download/v1.19.3/cert-manager.yaml": clusterrolebindings.rbac.authorization.k8s.io "cert-manager-controller-orders" not found Error from server (NotFound): error when deleting "https://github.com/cert-manager/cert-manager/releases/download/v1.19.3/cert-manager.yaml": clusterrolebindings.rbac.authorization.k8s.io "cert-manager-controller-challenges" not found Error from server (NotFound): error when deleting "https://github.com/cert-manager/cert-manager/releases/download/v1.19.3/cert-manager.yaml": clusterrolebindings.rbac.authorization.k8s.io "cert-manager-controller-ingress-shim" not found Error from server (NotFound): error when deleting "https://github.com/cert-manager/cert-manager/releases/download/v1.19.3/cert-manager.yaml": clusterrolebindings.rbac.authorization.k8s.io "cert-manager-controller-approve:cert-manager-io" not found Error from server (NotFound): error when deleting "https://github.com/cert-manager/cert-manager/releases/download/v1.19.3/cert-manager.yaml": clusterrolebindings.rbac.authorization.k8s.io "cert-manager-controller-certificatesigningrequests" not found Error from server (NotFound): error when deleting "https://github.com/cert-manager/cert-manager/releases/download/v1.19.3/cert-manager.yaml": clusterrolebindings.rbac.authorization.k8s.io "cert-manager-webhook:subjectaccessreviews" not found Error from server (NotFound): error when deleting "https://github.com/cert-manager/cert-manager/releases/download/v1.19.3/cert-manager.yaml": roles.rbac.authorization.k8s.io "cert-manager-cainjector:leaderelection" not found Error from server (NotFound): error when deleting "https://github.com/cert-manager/cert-manager/releases/download/v1.19.3/cert-manager.yaml": roles.rbac.authorization.k8s.io "cert-manager:leaderelection" not found Error from server (NotFound): error when deleting "https://github.com/cert-manager/cert-manager/releases/download/v1.19.3/cert-manager.yaml": roles.rbac.authorization.k8s.io "cert-manager-tokenrequest" not found Error from server (NotFound): error when deleting "https://github.com/cert-manager/cert-manager/releases/download/v1.19.3/cert-manager.yaml": roles.rbac.authorization.k8s.io "cert-manager-webhook:dynamic-serving" not found Error from server (NotFound): error when deleting "https://github.com/cert-manager/cert-manager/releases/download/v1.19.3/cert-manager.yaml": rolebindings.rbac.authorization.k8s.io "cert-manager-cainjector:leaderelection" not found Error from server (NotFound): error when deleting "https://github.com/cert-manager/cert-manager/releases/download/v1.19.3/cert-manager.yaml": rolebindings.rbac.authorization.k8s.io "cert-manager:leaderelection" not found Error from server (NotFound): error when deleting "https://github.com/cert-manager/cert-manager/releases/download/v1.19.3/cert-manager.yaml": rolebindings.rbac.authorization.k8s.io "cert-manager-tokenrequest" not found Error from server (NotFound): error when deleting "https://github.com/cert-manager/cert-manager/releases/download/v1.19.3/cert-manager.yaml": rolebindings.rbac.authorization.k8s.io "cert-manager-webhook:dynamic-serving" not found Error from server (NotFound): error when deleting "https://github.com/cert-manager/cert-manager/releases/download/v1.19.3/cert-manager.yaml": services "cert-manager-cainjector" not found Error from server (NotFound): error when deleting "https://github.com/cert-manager/cert-manager/releases/download/v1.19.3/cert-manager.yaml": services "cert-manager" not found Error from server (NotFound): error when deleting "https://github.com/cert-manager/cert-manager/releases/download/v1.19.3/cert-manager.yaml": services "cert-manager-webhook" not found Error from server (NotFound): error when deleting "https://github.com/cert-manager/cert-manager/releases/download/v1.19.3/cert-manager.yaml": deployments.apps "cert-manager-cainjector" not found Error from server (NotFound): error when deleting "https://github.com/cert-manager/cert-manager/releases/download/v1.19.3/cert-manager.yaml": deployments.apps "cert-manager" not found Error from server (NotFound): error when deleting "https://github.com/cert-manager/cert-manager/releases/download/v1.19.3/cert-manager.yaml": deployments.apps "cert-manager-webhook" not found Error from server (NotFound): error when deleting "https://github.com/cert-manager/cert-manager/releases/download/v1.19.3/cert-manager.yaml": mutatingwebhookconfigurations.admissionregistration.k8s.io "cert-manager-webhook" not found Error from server (NotFound): error when deleting "https://github.com/cert-manager/cert-manager/releases/download/v1.19.3/cert-manager.yaml": validatingwebhookconfigurations.admissionregistration.k8s.io "cert-manager-webhook" not found + rm /tmp/tmp.2U9B7PMXAj /tmp/tmp.RNZqWIkLu7 + return 1 + true + '[' -n '' ']' + '[' -n psmdb-operator ']' + kubectl_bin delete --grace-period=0 --force=true namespace custom-users-roles-sharded-14882 + rm -rf /tmp/tmp.UfAOPg7iAD + kubectl_bin delete --grace-period=0 --force=true namespace psmdb-operator ++ mktemp ++ mktemp + local LAST_OUT=/tmp/tmp.P19L6fXZ9F + local LAST_OUT=/tmp/tmp.r4L4VtCtN0 ++ mktemp ++ mktemp + desc 'test passed' + set +o xtrace ----------------------------------------------------------------------------------- test passed ----------------------------------------------------------------------------------- + local LAST_ERR=/tmp/tmp.eRCCU9hybl + local exit_status=0 + local LAST_ERR=/tmp/tmp.l0Ivkvp3u9 + local timeout=4 + local exit_status=0 + local timeout=4 ++ seq 0 2 ++ seq 0 2 + for i in $(seq 0 2) + set +e + kubectl delete --grace-period=0 --force=true namespace psmdb-operator + for i in $(seq 0 2) + set +e + kubectl delete --grace-period=0 --force=true namespace custom-users-roles-sharded-14882