Log: /mnt/jenkins/workspace/cloud-psmdb-operator_PR-2287/e2e-tests/logs/custom-users-roles-sharded.log Warning: version difference between client (1.35) and server (1.32) exceeds the supported minor version skew of +/-1 Warning: version difference between client (1.35) and server (1.32) exceeds the supported minor version skew of +/-1 Warning: version difference between client (1.35) and server (1.32) exceeds the supported minor version skew of +/-1 + cluster=some-name + create_infra custom-users-roles-sharded-27922 + local ns=custom-users-roles-sharded-27922 + [[ 1 == 1 ]] + delete_crd + desc 'get and delete old CRDs and RBAC' + set +o xtrace ----------------------------------------------------------------------------------- get and delete old CRDs and RBAC ----------------------------------------------------------------------------------- + kubectl_bin delete -f /mnt/jenkins/workspace/cloud-psmdb-operator_PR-2287/deploy/crd.yaml --ignore-not-found --wait=false ++ mktemp + local LAST_OUT=/tmp/tmp.mEMZgRSlTO ++ mktemp + local LAST_ERR=/tmp/tmp.5mS1tXux6J + local exit_status=0 + local timeout=4 ++ seq 0 2 + for i in $(seq 0 2) + set +e + kubectl delete -f /mnt/jenkins/workspace/cloud-psmdb-operator_PR-2287/deploy/crd.yaml --ignore-not-found --wait=false + exit_status=0 + set -e + '[' 0 '!=' 0 -a -n 1 ']' + break + cat /tmp/tmp.mEMZgRSlTO customresourcedefinition.apiextensions.k8s.io "perconaservermongodbbackups.psmdb.percona.com" deleted customresourcedefinition.apiextensions.k8s.io "perconaservermongodbrestores.psmdb.percona.com" deleted customresourcedefinition.apiextensions.k8s.io "perconaservermongodbs.psmdb.percona.com" deleted + cat /tmp/tmp.5mS1tXux6J + rm /tmp/tmp.mEMZgRSlTO /tmp/tmp.5mS1tXux6J + return 0 ++ yq eval .metadata.name /mnt/jenkins/workspace/cloud-psmdb-operator_PR-2287/deploy/crd.yaml ++ grep -v '\-\-\-' grep: warning: stray \ before - grep: warning: stray \ before - + for crd_name in $(yq eval '.metadata.name' "${src_dir}/deploy/crd.yaml" | grep -v '\-\-\-') + kubectl get perconaservermongodbbackups.psmdb.percona.com --all-namespaces -o wide + grep -v NAMESPACE + xargs -L 1 sh -xc 'kubectl patch perconaservermongodbbackups.psmdb.percona.com -n $0 $1 --type=merge -p "{\"metadata\":{\"finalizers\":[]}}"' error: the server doesn't have a resource type "perconaservermongodbbackups" + kubectl patch perconaservermongodbbackups.psmdb.percona.com -n sh --type=merge -p '{"metadata":{"finalizers":[]}}' error: the server doesn't have a resource type "perconaservermongodbbackups" + : + kubectl_bin wait --for=delete crd perconaservermongodbbackups.psmdb.percona.com ++ mktemp + local LAST_OUT=/tmp/tmp.SFEH6457FR ++ mktemp + local LAST_ERR=/tmp/tmp.pJf13gJ3sA + local exit_status=0 + local timeout=4 ++ seq 0 2 + for i in $(seq 0 2) + set +e + kubectl wait --for=delete crd perconaservermongodbbackups.psmdb.percona.com + exit_status=0 + set -e + '[' 0 '!=' 0 -a -n 1 ']' + break + cat /tmp/tmp.SFEH6457FR + cat /tmp/tmp.pJf13gJ3sA + rm /tmp/tmp.SFEH6457FR /tmp/tmp.pJf13gJ3sA + return 0 + for crd_name in $(yq eval '.metadata.name' "${src_dir}/deploy/crd.yaml" | grep -v '\-\-\-') + kubectl get perconaservermongodbrestores.psmdb.percona.com --all-namespaces -o wide + grep -v NAMESPACE + xargs -L 1 sh -xc 'kubectl patch perconaservermongodbrestores.psmdb.percona.com -n $0 $1 --type=merge -p "{\"metadata\":{\"finalizers\":[]}}"' error: the server doesn't have a resource type "perconaservermongodbrestores" + kubectl patch perconaservermongodbrestores.psmdb.percona.com -n sh --type=merge -p '{"metadata":{"finalizers":[]}}' error: the server doesn't have a resource type "perconaservermongodbrestores" + : + kubectl_bin wait --for=delete crd perconaservermongodbrestores.psmdb.percona.com ++ mktemp + local LAST_OUT=/tmp/tmp.LvTEaKpYFR ++ mktemp + local LAST_ERR=/tmp/tmp.cNmdJET28V + local exit_status=0 + local timeout=4 ++ seq 0 2 + for i in $(seq 0 2) + set +e + kubectl wait --for=delete crd perconaservermongodbrestores.psmdb.percona.com + exit_status=0 + set -e + '[' 0 '!=' 0 -a -n 1 ']' + break + cat /tmp/tmp.LvTEaKpYFR + cat /tmp/tmp.cNmdJET28V + rm /tmp/tmp.LvTEaKpYFR /tmp/tmp.cNmdJET28V + return 0 + for crd_name in $(yq eval '.metadata.name' "${src_dir}/deploy/crd.yaml" | grep -v '\-\-\-') + kubectl get perconaservermongodbs.psmdb.percona.com --all-namespaces -o wide + grep -v NAMESPACE + xargs -L 1 sh -xc 'kubectl patch perconaservermongodbs.psmdb.percona.com -n $0 $1 --type=merge -p "{\"metadata\":{\"finalizers\":[]}}"' error: the server doesn't have a resource type "perconaservermongodbs" + kubectl patch perconaservermongodbs.psmdb.percona.com -n sh --type=merge -p '{"metadata":{"finalizers":[]}}' error: the server doesn't have a resource type "perconaservermongodbs" + : + kubectl_bin wait --for=delete crd perconaservermongodbs.psmdb.percona.com ++ mktemp + local LAST_OUT=/tmp/tmp.MylYJ3gAKN ++ mktemp + local LAST_ERR=/tmp/tmp.37lRu5xgdg + local exit_status=0 + local timeout=4 ++ seq 0 2 + for i in $(seq 0 2) + set +e + kubectl wait --for=delete crd perconaservermongodbs.psmdb.percona.com + exit_status=0 + set -e + '[' 0 '!=' 0 -a -n 1 ']' + break + cat /tmp/tmp.MylYJ3gAKN + cat /tmp/tmp.37lRu5xgdg + rm /tmp/tmp.MylYJ3gAKN /tmp/tmp.37lRu5xgdg + return 0 + local rbac_yaml=rbac.yaml + '[' -n psmdb-operator ']' + rbac_yaml=cw-rbac.yaml + kubectl_bin delete -f /mnt/jenkins/workspace/cloud-psmdb-operator_PR-2287/deploy/cw-rbac.yaml --ignore-not-found ++ mktemp + local LAST_OUT=/tmp/tmp.yROrGoXbs7 ++ mktemp + local LAST_ERR=/tmp/tmp.d7witxLF41 + local exit_status=0 + local timeout=4 ++ seq 0 2 + for i in $(seq 0 2) + set +e + kubectl delete -f /mnt/jenkins/workspace/cloud-psmdb-operator_PR-2287/deploy/cw-rbac.yaml --ignore-not-found + exit_status=0 + set -e + '[' 0 '!=' 0 -a -n 1 ']' + break + cat /tmp/tmp.yROrGoXbs7 clusterrole.rbac.authorization.k8s.io "percona-server-mongodb-operator" deleted clusterrolebinding.rbac.authorization.k8s.io "service-account-percona-server-mongodb-operator" deleted + cat /tmp/tmp.d7witxLF41 + rm /tmp/tmp.yROrGoXbs7 /tmp/tmp.d7witxLF41 + return 0 + check_crd_for_deletion PR-2287-fc474e65 + local git_tag=PR-2287-fc474e65 ++ curl -s https://raw.githubusercontent.com/percona/percona-server-mongodb-operator/PR-2287-fc474e65/deploy/crd.yaml ++ /usr/sbin/sed s/---//g ++ yq eval .metadata.name ++ /usr/sbin/sed ':a;N;$!ba;s/\n/ /g' + for crd_name in $(curl -s https://raw.githubusercontent.com/percona/percona-server-mongodb-operator/${git_tag}/deploy/crd.yaml | yq eval '.metadata.name' | $sed 's/---//g' | $sed ':a;N;$!ba;s/\n/ /g') ++ kubectl_bin get crd/null -o 'jsonpath={.status.conditions[-1].type}' +++ mktemp ++ local LAST_OUT=/tmp/tmp.cPf4Yb14uZ +++ mktemp ++ local LAST_ERR=/tmp/tmp.yH8YP6ZiPa ++ local exit_status=0 ++ local timeout=4 +++ seq 0 2 ++ for i in $(seq 0 2) ++ set +e ++ kubectl get crd/null -o 'jsonpath={.status.conditions[-1].type}' ++ exit_status=1 ++ set -e ++ '[' 1 '!=' 0 -a -n 1 ']' ++ cat /tmp/tmp.cPf4Yb14uZ ++ cat /tmp/tmp.yH8YP6ZiPa Error from server (NotFound): customresourcedefinitions.apiextensions.k8s.io "null" not found ++ sleep 0 ++ for i in $(seq 0 2) ++ set +e ++ kubectl get crd/null -o 'jsonpath={.status.conditions[-1].type}' ++ exit_status=1 ++ set -e ++ '[' 1 '!=' 0 -a -n 1 ']' ++ cat /tmp/tmp.cPf4Yb14uZ ++ cat /tmp/tmp.yH8YP6ZiPa Error from server (NotFound): customresourcedefinitions.apiextensions.k8s.io "null" not found ++ sleep 4 ++ for i in $(seq 0 2) ++ set +e ++ kubectl get crd/null -o 'jsonpath={.status.conditions[-1].type}' ++ exit_status=1 ++ set -e ++ '[' 1 '!=' 0 -a -n 1 ']' ++ cat /tmp/tmp.cPf4Yb14uZ ++ cat /tmp/tmp.yH8YP6ZiPa Error from server (NotFound): customresourcedefinitions.apiextensions.k8s.io "null" not found ++ sleep 8 ++ cat /tmp/tmp.cPf4Yb14uZ ++ cat /tmp/tmp.yH8YP6ZiPa Error from server (NotFound): customresourcedefinitions.apiextensions.k8s.io "null" not found ++ rm /tmp/tmp.cPf4Yb14uZ /tmp/tmp.yH8YP6ZiPa ++ return 1 + [[ '' == Terminating ]] + '[' -n psmdb-operator ']' + create_namespace psmdb-operator + local namespace=psmdb-operator + local skip_clean_namespace= + [[ 1 == 1 ]] + [[ -z '' ]] + destroy_chaos_mesh ++ helm list --all-namespaces --filter chaos-mesh ++ tail -n1 ++ awk '-F ' '{print $2}' ++ sed s/NAMESPACE// + local chaos_mesh_ns= + desc 'destroy chaos-mesh' + set +o xtrace ----------------------------------------------------------------------------------- destroy chaos-mesh ----------------------------------------------------------------------------------- + '[' -n '' ']' ++ kubectl get MutatingWebhookConfiguration ++ grep chaos-mesh ++ awk '{print $1}' + timeout 30 kubectl delete MutatingWebhookConfiguration error: resource(s) were provided, but no name was specified + : ++ kubectl get ValidatingWebhookConfiguration ++ grep chaos-mesh ++ awk '{print $1}' + timeout 30 kubectl delete ValidatingWebhookConfiguration error: resource(s) were provided, but no name was specified + : ++ kubectl get ValidatingWebhookConfiguration ++ grep validate-auth ++ awk '{print $1}' + timeout 30 kubectl delete ValidatingWebhookConfiguration error: resource(s) were provided, but no name was specified + : ++ kubectl api-resources ++ grep chaos-mesh ++ awk '{print $1}' ++ kubectl get crd ++ grep chaos-mesh.org ++ awk '{print $1}' + timeout 30 kubectl delete crd error: resource(s) were provided, but no name was specified + : ++ kubectl get clusterrolebinding ++ grep chaos-mesh ++ awk '{print $1}' + timeout 30 kubectl delete clusterrolebinding error: resource(s) were provided, but no name was specified + : ++ kubectl get clusterrole ++ grep chaos-mesh ++ awk '{print $1}' + timeout 30 kubectl delete clusterrole error: resource(s) were provided, but no name was specified + : + desc 'cleaned up all old namespaces' + set +o xtrace ----------------------------------------------------------------------------------- cleaned up all old namespaces ----------------------------------------------------------------------------------- + kubectl_bin get ns + grep -E -v '^kube-|^default|Terminating|psmdb-operator|openshift|^gke-|^gmp-|^NAME' + awk '{print$1}' ++ mktemp + xargs kubectl delete ns + local LAST_OUT=/tmp/tmp.s9exEdiOah ++ mktemp + '[' -n '' ']' + desc 'cleaned up old namespaces psmdb-operator' + set +o xtrace ----------------------------------------------------------------------------------- cleaned up old namespaces psmdb-operator ----------------------------------------------------------------------------------- + kubectl_bin delete namespace psmdb-operator --ignore-not-found ++ mktemp + local LAST_ERR=/tmp/tmp.2cRhAvRBno + local exit_status=0 + local timeout=4 ++ seq 0 2 + local LAST_OUT=/tmp/tmp.UIQzAV0c84 ++ mktemp + for i in $(seq 0 2) + set +e + kubectl get ns + local LAST_ERR=/tmp/tmp.Wehm8PJoQw + local exit_status=0 + local timeout=4 ++ seq 0 2 + for i in $(seq 0 2) + set +e + kubectl delete namespace psmdb-operator --ignore-not-found + exit_status=0 + set -e + '[' 0 '!=' 0 -a -n 1 ']' + break + cat /tmp/tmp.s9exEdiOah + cat /tmp/tmp.2cRhAvRBno + rm /tmp/tmp.s9exEdiOah /tmp/tmp.2cRhAvRBno + return 0 namespace "custom-users-roles-sharded-21978" deleted + exit_status=0 + set -e + '[' 0 '!=' 0 -a -n 1 ']' + break + cat /tmp/tmp.UIQzAV0c84 namespace "psmdb-operator" deleted + cat /tmp/tmp.Wehm8PJoQw + rm /tmp/tmp.UIQzAV0c84 /tmp/tmp.Wehm8PJoQw + return 0 + kubectl_bin wait --for=delete namespace psmdb-operator ++ mktemp + local LAST_OUT=/tmp/tmp.PGdqxLi5To ++ mktemp + local LAST_ERR=/tmp/tmp.h8SN2BkN2t + local exit_status=0 + local timeout=4 ++ seq 0 2 + for i in $(seq 0 2) + set +e + kubectl wait --for=delete namespace psmdb-operator + exit_status=0 + set -e + '[' 0 '!=' 0 -a -n 1 ']' + break + cat /tmp/tmp.PGdqxLi5To + cat /tmp/tmp.h8SN2BkN2t + rm /tmp/tmp.PGdqxLi5To /tmp/tmp.h8SN2BkN2t + return 0 + desc 'create namespace psmdb-operator' + set +o xtrace ----------------------------------------------------------------------------------- create namespace psmdb-operator ----------------------------------------------------------------------------------- + kubectl_bin create namespace psmdb-operator ++ mktemp + local LAST_OUT=/tmp/tmp.g06um0WFdt ++ mktemp + local LAST_ERR=/tmp/tmp.NfqWl4lEwe + local exit_status=0 + local timeout=4 ++ seq 0 2 + for i in $(seq 0 2) + set +e + kubectl create namespace psmdb-operator + exit_status=0 + set -e + '[' 0 '!=' 0 -a -n 1 ']' + break + cat /tmp/tmp.g06um0WFdt namespace/psmdb-operator created + cat /tmp/tmp.NfqWl4lEwe + rm /tmp/tmp.g06um0WFdt /tmp/tmp.NfqWl4lEwe + return 0 + set_kube_ctx psmdb-operator + local namespace=psmdb-operator ++ kubectl_bin config current-context +++ mktemp ++ local LAST_OUT=/tmp/tmp.pRwrUG57Pm +++ mktemp ++ local LAST_ERR=/tmp/tmp.DfBS2SDBcX ++ local exit_status=0 ++ local timeout=4 +++ seq 0 2 ++ for i in $(seq 0 2) ++ set +e ++ kubectl config current-context ++ exit_status=0 ++ set -e ++ '[' 0 '!=' 0 -a -n 1 ']' ++ break ++ cat /tmp/tmp.pRwrUG57Pm ++ cat /tmp/tmp.DfBS2SDBcX ++ rm /tmp/tmp.pRwrUG57Pm /tmp/tmp.DfBS2SDBcX ++ return 0 + kubectl_bin config set-context gke_cloud-dev-112233_us-central1-a_jen-psmdb-2287-fc474e65-5-cluster7 --namespace=psmdb-operator ++ mktemp + local LAST_OUT=/tmp/tmp.B9lUq3IB5a ++ mktemp + local LAST_ERR=/tmp/tmp.RgsQyBXn69 + local exit_status=0 + local timeout=4 ++ seq 0 2 + for i in $(seq 0 2) + set +e + kubectl config set-context gke_cloud-dev-112233_us-central1-a_jen-psmdb-2287-fc474e65-5-cluster7 --namespace=psmdb-operator + exit_status=0 + set -e + '[' 0 '!=' 0 -a -n 1 ']' + break + cat /tmp/tmp.B9lUq3IB5a Context "gke_cloud-dev-112233_us-central1-a_jen-psmdb-2287-fc474e65-5-cluster7" modified. + cat /tmp/tmp.RgsQyBXn69 + rm /tmp/tmp.B9lUq3IB5a /tmp/tmp.RgsQyBXn69 + return 0 + deploy_operator + desc 'start PSMDB operator: docker.io/perconalab/percona-server-mongodb-operator:PR-2287-fc474e65' + set +o xtrace ----------------------------------------------------------------------------------- start PSMDB operator: docker.io/perconalab/percona-server-mongodb-operator:PR-2287-fc474e65 ----------------------------------------------------------------------------------- + local cr_file + '[' -f /mnt/jenkins/workspace/cloud-psmdb-operator_PR-2287/e2e-tests/custom-users-roles-sharded/conf/crd.yaml ']' + cr_file=/mnt/jenkins/workspace/cloud-psmdb-operator_PR-2287/deploy/crd.yaml + kubectl_bin apply --server-side --force-conflicts -f /mnt/jenkins/workspace/cloud-psmdb-operator_PR-2287/deploy/crd.yaml ++ mktemp + local LAST_OUT=/tmp/tmp.1RmpK1HGFB ++ mktemp + local LAST_ERR=/tmp/tmp.rTCq1YNjRS + local exit_status=0 + local timeout=4 ++ seq 0 2 + for i in $(seq 0 2) + set +e + kubectl apply --server-side --force-conflicts -f /mnt/jenkins/workspace/cloud-psmdb-operator_PR-2287/deploy/crd.yaml + exit_status=0 + set -e + '[' 0 '!=' 0 -a -n 1 ']' + break + cat /tmp/tmp.1RmpK1HGFB customresourcedefinition.apiextensions.k8s.io/perconaservermongodbbackups.psmdb.percona.com serverside-applied customresourcedefinition.apiextensions.k8s.io/perconaservermongodbrestores.psmdb.percona.com serverside-applied customresourcedefinition.apiextensions.k8s.io/perconaservermongodbs.psmdb.percona.com serverside-applied + cat /tmp/tmp.rTCq1YNjRS + rm /tmp/tmp.1RmpK1HGFB /tmp/tmp.rTCq1YNjRS + return 0 + '[' -n psmdb-operator ']' + apply_rbac cw-rbac + local operator_namespace=psmdb-operator + local rbac=cw-rbac + sed -e 's^namespace: .*^namespace: psmdb-operator^' + cat /mnt/jenkins/workspace/cloud-psmdb-operator_PR-2287/deploy/cw-rbac.yaml + kubectl_bin apply -n psmdb-operator -f - ++ mktemp + local LAST_OUT=/tmp/tmp.S0XmypU0hQ ++ mktemp + local LAST_ERR=/tmp/tmp.5eosoiuAEr + local exit_status=0 + local timeout=4 ++ seq 0 2 + for i in $(seq 0 2) + set +e + kubectl apply -n psmdb-operator -f - + exit_status=0 + set -e + '[' 0 '!=' 0 -a -n 1 ']' + break + cat /tmp/tmp.S0XmypU0hQ clusterrole.rbac.authorization.k8s.io/percona-server-mongodb-operator created serviceaccount/percona-server-mongodb-operator created clusterrolebinding.rbac.authorization.k8s.io/service-account-percona-server-mongodb-operator created + cat /tmp/tmp.5eosoiuAEr + rm /tmp/tmp.S0XmypU0hQ /tmp/tmp.5eosoiuAEr + return 0 + kubectl_bin apply -n psmdb-operator -f - + yq eval $'\n\t\t\t(.spec.template.spec.containers[].image = "docker.io/perconalab/percona-server-mongodb-operator:PR-2287-fc474e65") |\n\t\t\t((.. | select(.[] == "DISABLE_TELEMETRY")) |= .value="true") |\n\t\t\t((.. | select(.[] == "LOG_LEVEL")) |= .value="DEBUG")' /mnt/jenkins/workspace/cloud-psmdb-operator_PR-2287/deploy/cw-operator.yaml ++ mktemp + local LAST_OUT=/tmp/tmp.YgGGHz0eIE ++ mktemp + local LAST_ERR=/tmp/tmp.cLKr2HDEEg + local exit_status=0 + local timeout=4 ++ seq 0 2 + for i in $(seq 0 2) + set +e + kubectl apply -n psmdb-operator -f - + exit_status=0 + set -e + '[' 0 '!=' 0 -a -n 1 ']' + break + cat /tmp/tmp.YgGGHz0eIE deployment.apps/percona-server-mongodb-operator created + cat /tmp/tmp.cLKr2HDEEg + rm /tmp/tmp.YgGGHz0eIE /tmp/tmp.cLKr2HDEEg + return 0 + sleep 20 ++ get_operator_pod ++ kubectl_bin get pods --selector=name=percona-server-mongodb-operator -o 'jsonpath={.items[].metadata.name}' -n psmdb-operator +++ mktemp ++ local LAST_OUT=/tmp/tmp.pFjW6ODsfy +++ mktemp ++ local LAST_ERR=/tmp/tmp.J7eTe71TuI ++ local exit_status=0 ++ local timeout=4 +++ seq 0 2 ++ for i in $(seq 0 2) ++ set +e ++ kubectl get pods --selector=name=percona-server-mongodb-operator -o 'jsonpath={.items[].metadata.name}' -n psmdb-operator ++ exit_status=0 ++ set -e ++ '[' 0 '!=' 0 -a -n 1 ']' ++ break ++ cat /tmp/tmp.pFjW6ODsfy ++ cat /tmp/tmp.J7eTe71TuI ++ rm /tmp/tmp.pFjW6ODsfy /tmp/tmp.J7eTe71TuI ++ return 0 + wait_operator_pod percona-server-mongodb-operator-6895f7d696-zs6g5 + local pod=percona-server-mongodb-operator-6895f7d696-zs6g5 + set +o xtrace waiting for pod/percona-server-mongodb-operator-6895f7d696-zs6g5 to be ready.OK + echo 'Print operator info from log' Print operator info from log + grep 'Manager starting up' ++ get_operator_pod ++ kubectl_bin get pods --selector=name=percona-server-mongodb-operator -o 'jsonpath={.items[].metadata.name}' -n psmdb-operator +++ mktemp ++ local LAST_OUT=/tmp/tmp.XzOWr4ky9y +++ mktemp ++ local LAST_ERR=/tmp/tmp.1jKAO6xT4d ++ local exit_status=0 ++ local timeout=4 +++ seq 0 2 ++ for i in $(seq 0 2) ++ set +e ++ kubectl get pods --selector=name=percona-server-mongodb-operator -o 'jsonpath={.items[].metadata.name}' -n psmdb-operator ++ exit_status=0 ++ set -e ++ '[' 0 '!=' 0 -a -n 1 ']' ++ break ++ cat /tmp/tmp.XzOWr4ky9y ++ cat /tmp/tmp.1jKAO6xT4d ++ rm /tmp/tmp.XzOWr4ky9y /tmp/tmp.1jKAO6xT4d ++ return 0 + kubectl_bin logs -n psmdb-operator percona-server-mongodb-operator-6895f7d696-zs6g5 ++ mktemp + local LAST_OUT=/tmp/tmp.nxEFKtMT5h ++ mktemp + local LAST_ERR=/tmp/tmp.3CJWf4Cl9e + local exit_status=0 + local timeout=4 ++ seq 0 2 + for i in $(seq 0 2) + set +e + kubectl logs -n psmdb-operator percona-server-mongodb-operator-6895f7d696-zs6g5 + exit_status=0 + set -e + '[' 0 '!=' 0 -a -n 1 ']' + break + cat /tmp/tmp.nxEFKtMT5h + cat /tmp/tmp.3CJWf4Cl9e + rm /tmp/tmp.nxEFKtMT5h /tmp/tmp.3CJWf4Cl9e + return 0 2026-04-09T11:08:47.475Z INFO setup Manager starting up {"gitCommit": "fc474e65109342c13c34c3ce7546452f0d6ad19d", "gitBranch": "PR-2287-fc474e65", "buildTime": "", "goVersion": "go1.25.9", "os": "linux", "arch": "amd64"} + create_namespace custom-users-roles-sharded-27922 + local namespace=custom-users-roles-sharded-27922 + local skip_clean_namespace= + [[ 1 == 1 ]] + [[ -z '' ]] + destroy_chaos_mesh ++ helm list --all-namespaces --filter chaos-mesh ++ tail -n1 ++ awk '-F ' '{print $2}' ++ sed s/NAMESPACE// + local chaos_mesh_ns= + desc 'destroy chaos-mesh' + set +o xtrace ----------------------------------------------------------------------------------- destroy chaos-mesh ----------------------------------------------------------------------------------- + '[' -n '' ']' ++ kubectl get MutatingWebhookConfiguration ++ awk '{print $1}' ++ grep chaos-mesh + timeout 30 kubectl delete MutatingWebhookConfiguration error: resource(s) were provided, but no name was specified + : ++ kubectl get ValidatingWebhookConfiguration ++ grep chaos-mesh ++ awk '{print $1}' + timeout 30 kubectl delete ValidatingWebhookConfiguration error: resource(s) were provided, but no name was specified + : ++ kubectl get ValidatingWebhookConfiguration ++ grep validate-auth ++ awk '{print $1}' + timeout 30 kubectl delete ValidatingWebhookConfiguration error: resource(s) were provided, but no name was specified + : ++ kubectl api-resources ++ grep chaos-mesh ++ awk '{print $1}' ++ kubectl get crd ++ grep chaos-mesh.org ++ awk '{print $1}' + timeout 30 kubectl delete crd error: resource(s) were provided, but no name was specified + : ++ kubectl get clusterrolebinding ++ grep chaos-mesh ++ awk '{print $1}' + timeout 30 kubectl delete clusterrolebinding error: resource(s) were provided, but no name was specified + : ++ kubectl get clusterrole ++ grep chaos-mesh ++ awk '{print $1}' + timeout 30 kubectl delete clusterrole error: resource(s) were provided, but no name was specified + : + desc 'cleaned up all old namespaces' + set +o xtrace ----------------------------------------------------------------------------------- cleaned up all old namespaces ----------------------------------------------------------------------------------- + kubectl_bin get ns + grep -E -v '^kube-|^default|Terminating|psmdb-operator|openshift|^gke-|^gmp-|^NAME' + awk '{print$1}' ++ mktemp + '[' -n '' ']' + desc 'cleaned up old namespaces custom-users-roles-sharded-27922' + xargs kubectl delete ns + set +o xtrace ----------------------------------------------------------------------------------- cleaned up old namespaces custom-users-roles-sharded-27922 ----------------------------------------------------------------------------------- + kubectl_bin delete namespace custom-users-roles-sharded-27922 --ignore-not-found ++ mktemp + local LAST_OUT=/tmp/tmp.qUAO8D2S4T ++ mktemp + local LAST_OUT=/tmp/tmp.tABDFfePvt + local LAST_ERR=/tmp/tmp.wcQsLfjqtG + local exit_status=0 + local timeout=4 ++ seq 0 2 ++ mktemp + for i in $(seq 0 2) + set +e + kubectl get ns + local LAST_ERR=/tmp/tmp.ZGQ8nmZ3wX + local exit_status=0 + local timeout=4 ++ seq 0 2 + for i in $(seq 0 2) + set +e + kubectl delete namespace custom-users-roles-sharded-27922 --ignore-not-found + exit_status=0 + set -e + '[' 0 '!=' 0 -a -n 1 ']' + break + cat /tmp/tmp.qUAO8D2S4T + cat /tmp/tmp.wcQsLfjqtG + rm /tmp/tmp.qUAO8D2S4T /tmp/tmp.wcQsLfjqtG + return 0 error: resource(s) were provided, but no name was specified + exit_status=0 + set -e + '[' 0 '!=' 0 -a -n 1 ']' + break + cat /tmp/tmp.tABDFfePvt + cat /tmp/tmp.ZGQ8nmZ3wX + rm /tmp/tmp.tABDFfePvt /tmp/tmp.ZGQ8nmZ3wX + return 0 + kubectl_bin wait --for=delete namespace custom-users-roles-sharded-27922 ++ mktemp + local LAST_OUT=/tmp/tmp.Ehdmyh3uV7 ++ mktemp + local LAST_ERR=/tmp/tmp.XnT7mrIpTG + local exit_status=0 + local timeout=4 ++ seq 0 2 + for i in $(seq 0 2) + set +e + kubectl wait --for=delete namespace custom-users-roles-sharded-27922 + exit_status=0 + set -e + '[' 0 '!=' 0 -a -n 1 ']' + break + cat /tmp/tmp.Ehdmyh3uV7 + cat /tmp/tmp.XnT7mrIpTG + rm /tmp/tmp.Ehdmyh3uV7 /tmp/tmp.XnT7mrIpTG + return 0 + desc 'create namespace custom-users-roles-sharded-27922' + set +o xtrace ----------------------------------------------------------------------------------- create namespace custom-users-roles-sharded-27922 ----------------------------------------------------------------------------------- + kubectl_bin create namespace custom-users-roles-sharded-27922 ++ mktemp + local LAST_OUT=/tmp/tmp.uxtMUZijp0 ++ mktemp + local LAST_ERR=/tmp/tmp.017wHfPDAS + local exit_status=0 + local timeout=4 ++ seq 0 2 + for i in $(seq 0 2) + set +e + kubectl create namespace custom-users-roles-sharded-27922 + exit_status=0 + set -e + '[' 0 '!=' 0 -a -n 1 ']' + break + cat /tmp/tmp.uxtMUZijp0 namespace/custom-users-roles-sharded-27922 created + cat /tmp/tmp.017wHfPDAS + rm /tmp/tmp.uxtMUZijp0 /tmp/tmp.017wHfPDAS + return 0 + set_kube_ctx custom-users-roles-sharded-27922 + local namespace=custom-users-roles-sharded-27922 ++ kubectl_bin config current-context +++ mktemp ++ local LAST_OUT=/tmp/tmp.yDeeric1bt +++ mktemp ++ local LAST_ERR=/tmp/tmp.CTQYOKCsIw ++ local exit_status=0 ++ local timeout=4 +++ seq 0 2 ++ for i in $(seq 0 2) ++ set +e ++ kubectl config current-context ++ exit_status=0 ++ set -e ++ '[' 0 '!=' 0 -a -n 1 ']' ++ break ++ cat /tmp/tmp.yDeeric1bt ++ cat /tmp/tmp.CTQYOKCsIw ++ rm /tmp/tmp.yDeeric1bt /tmp/tmp.CTQYOKCsIw ++ return 0 + kubectl_bin config set-context gke_cloud-dev-112233_us-central1-a_jen-psmdb-2287-fc474e65-5-cluster7 --namespace=custom-users-roles-sharded-27922 ++ mktemp + local LAST_OUT=/tmp/tmp.sdzS1kcUvw ++ mktemp + local LAST_ERR=/tmp/tmp.Uslt8oihKb + local exit_status=0 + local timeout=4 ++ seq 0 2 + for i in $(seq 0 2) + set +e + kubectl config set-context gke_cloud-dev-112233_us-central1-a_jen-psmdb-2287-fc474e65-5-cluster7 --namespace=custom-users-roles-sharded-27922 + exit_status=0 + set -e + '[' 0 '!=' 0 -a -n 1 ']' + break + cat /tmp/tmp.sdzS1kcUvw Context "gke_cloud-dev-112233_us-central1-a_jen-psmdb-2287-fc474e65-5-cluster7" modified. + cat /tmp/tmp.Uslt8oihKb + rm /tmp/tmp.sdzS1kcUvw /tmp/tmp.Uslt8oihKb + return 0 + mongosUri=userAdmin:userAdmin123456@some-name-mongos.custom-users-roles-sharded-27922 + desc 'create secrets and start client' + set +o xtrace ----------------------------------------------------------------------------------- create secrets and start client ----------------------------------------------------------------------------------- + kubectl_bin apply -f /mnt/jenkins/workspace/cloud-psmdb-operator_PR-2287/e2e-tests/conf/client.yml -f /mnt/jenkins/workspace/cloud-psmdb-operator_PR-2287/e2e-tests/conf/secrets.yml -f /mnt/jenkins/workspace/cloud-psmdb-operator_PR-2287/e2e-tests/custom-users-roles-sharded/conf/app-user-secrets.yml ++ mktemp + local LAST_OUT=/tmp/tmp.8PnLGcqUz8 ++ mktemp + local LAST_ERR=/tmp/tmp.HcVDg2PBee + local exit_status=0 + local timeout=4 ++ seq 0 2 + for i in $(seq 0 2) + set +e + kubectl apply -f /mnt/jenkins/workspace/cloud-psmdb-operator_PR-2287/e2e-tests/conf/client.yml -f /mnt/jenkins/workspace/cloud-psmdb-operator_PR-2287/e2e-tests/conf/secrets.yml -f /mnt/jenkins/workspace/cloud-psmdb-operator_PR-2287/e2e-tests/custom-users-roles-sharded/conf/app-user-secrets.yml + exit_status=0 + set -e + '[' 0 '!=' 0 -a -n 1 ']' + break + cat /tmp/tmp.8PnLGcqUz8 deployment.apps/psmdb-client created secret/some-users created secret/user-one created secret/user-two created + cat /tmp/tmp.HcVDg2PBee + rm /tmp/tmp.8PnLGcqUz8 /tmp/tmp.HcVDg2PBee + return 0 + apply_s3_storage_secrets + desc 'create secrets for cloud storages' + set +o xtrace ----------------------------------------------------------------------------------- create secrets for cloud storages ----------------------------------------------------------------------------------- + '[' -z '' ']' + kubectl_bin apply -f /mnt/jenkins/workspace/cloud-psmdb-operator_PR-2287/e2e-tests/conf/minio-secret.yml -f /mnt/jenkins/workspace/cloud-psmdb-operator_PR-2287/e2e-tests/conf/cloud-secret.yml ++ mktemp + local LAST_OUT=/tmp/tmp.dPJanVeRI2 ++ mktemp + local LAST_ERR=/tmp/tmp.YEjIPg8QRP + local exit_status=0 + local timeout=4 ++ seq 0 2 + for i in $(seq 0 2) + set +e + kubectl apply -f /mnt/jenkins/workspace/cloud-psmdb-operator_PR-2287/e2e-tests/conf/minio-secret.yml -f /mnt/jenkins/workspace/cloud-psmdb-operator_PR-2287/e2e-tests/conf/cloud-secret.yml + exit_status=0 + set -e + '[' 0 '!=' 0 -a -n 1 ']' + break + cat /tmp/tmp.dPJanVeRI2 secret/minio-secret created secret/aws-s3-secret created secret/gcp-cs-secret created secret/azure-secret created secret/gcp-cs-sa-key-secret created + cat /tmp/tmp.YEjIPg8QRP + rm /tmp/tmp.dPJanVeRI2 /tmp/tmp.YEjIPg8QRP + return 0 + version_gt 1.19 ++ echo '1.32 >= 1.19' ++ bc -l + '[' 1 -eq 1 ']' + return 0 + '[' 0 -ne 1 ']' + cat /mnt/jenkins/workspace/cloud-psmdb-operator_PR-2287/e2e-tests/conf/container-rc.yaml + /usr/sbin/sed s/docker/runc/g + kubectl_bin apply -f - ++ mktemp + local LAST_OUT=/tmp/tmp.WFAdbXLCDu ++ mktemp + local LAST_ERR=/tmp/tmp.Wg13jw9A2r + local exit_status=0 + local timeout=4 ++ seq 0 2 + for i in $(seq 0 2) + set +e + kubectl apply -f - + exit_status=0 + set -e + '[' 0 '!=' 0 -a -n 1 ']' + break + cat /tmp/tmp.WFAdbXLCDu runtimeclass.node.k8s.io/container-rc unchanged + cat /tmp/tmp.Wg13jw9A2r + rm /tmp/tmp.WFAdbXLCDu /tmp/tmp.Wg13jw9A2r + return 0 + desc 'create first PSMDB cluster' + set +o xtrace ----------------------------------------------------------------------------------- create first PSMDB cluster ----------------------------------------------------------------------------------- + apply_cluster /mnt/jenkins/workspace/cloud-psmdb-operator_PR-2287/e2e-tests/custom-users-roles-sharded/conf/some-name-rs0.yml + '[' -z '' ']' + cat_config /mnt/jenkins/workspace/cloud-psmdb-operator_PR-2287/e2e-tests/custom-users-roles-sharded/conf/some-name-rs0.yml + kubectl_bin apply -f - + cat /mnt/jenkins/workspace/cloud-psmdb-operator_PR-2287/e2e-tests/custom-users-roles-sharded/conf/some-name-rs0.yml ++ mktemp + yq eval '(.spec | select(.image == null)).image = "docker.io/perconalab/percona-server-mongodb-operator:main-mongod8.0"' + yq eval '(.spec | select(has("pmm"))).pmm.image = "docker.io/percona/pmm-client:2.44.1-1"' + yq eval '(.spec | select(has("initImage"))).initImage = "docker.io/perconalab/percona-server-mongodb-operator:PR-2287-fc474e65"' + yq eval '(.spec | select(has("backup"))).backup.image = "docker.io/perconalab/percona-server-mongodb-operator:main-backup"' + local LAST_OUT=/tmp/tmp.qexMgjKJnN + /usr/sbin/sed -e s/NAME_SPACE/custom-users-roles-sharded-27922/g ++ mktemp + yq eval '.spec.upgradeOptions.apply="Never"' + local LAST_ERR=/tmp/tmp.V5tdOBKbg4 + local exit_status=0 + local timeout=4 ++ seq 0 2 + for i in $(seq 0 2) + set +e + kubectl apply -f - + exit_status=0 + set -e + '[' 0 '!=' 0 -a -n 1 ']' + break + cat /tmp/tmp.qexMgjKJnN perconaservermongodb.psmdb.percona.com/some-name created + cat /tmp/tmp.V5tdOBKbg4 + rm /tmp/tmp.qexMgjKJnN /tmp/tmp.V5tdOBKbg4 + return 0 + desc 'check if all 3 Pods started' + set +o xtrace ----------------------------------------------------------------------------------- check if all 3 Pods started ----------------------------------------------------------------------------------- + wait_for_running some-name-rs0 3 + local name=some-name-rs0 + let last_pod=2 + local check_cluster_readyness=true + set_debug + [[ 1 == 1 ]] + set -o xtrace + local rs_name=rs0 + local cluster_name=some-name ++ seq 0 2 + for i in $(seq 0 $last_pod) + [[ 0 -eq 2 ]] + wait_pod some-name-rs0-0 + local pod=some-name-rs0-0 + set +o xtrace waiting for pod/some-name-rs0-0 to be ready.................OK + for i in $(seq 0 $last_pod) + [[ 1 -eq 2 ]] + wait_pod some-name-rs0-1 + local pod=some-name-rs0-1 + set +o xtrace waiting for pod/some-name-rs0-1 to be ready...............OK + for i in $(seq 0 $last_pod) + [[ 2 -eq 2 ]] ++ kubectl_bin get psmdb some-name -o 'jsonpath={.spec.replsets[?(@.name=="rs0")].arbiter.enabled}' +++ mktemp ++ local LAST_OUT=/tmp/tmp.iGjuonTrOZ +++ mktemp ++ local LAST_ERR=/tmp/tmp.Dy4ghkaOEz ++ local exit_status=0 ++ local timeout=4 +++ seq 0 2 ++ for i in $(seq 0 2) ++ set +e ++ kubectl get psmdb some-name -o 'jsonpath={.spec.replsets[?(@.name=="rs0")].arbiter.enabled}' ++ exit_status=0 ++ set -e ++ '[' 0 '!=' 0 -a -n 1 ']' ++ break ++ cat /tmp/tmp.iGjuonTrOZ ++ cat /tmp/tmp.Dy4ghkaOEz ++ rm /tmp/tmp.iGjuonTrOZ /tmp/tmp.Dy4ghkaOEz ++ return 0 + [[ '' == true ]] + wait_pod some-name-rs0-2 + local pod=some-name-rs0-2 + set +o xtrace waiting for pod/some-name-rs0-2 to be ready............OK ++ kubectl_bin get psmdb some-name -o 'jsonpath={.spec.replsets[?(@.name=="rs0")].nonvoting.enabled}' +++ mktemp ++ local LAST_OUT=/tmp/tmp.XSkplR2AME +++ mktemp ++ local LAST_ERR=/tmp/tmp.FmgAH4TuSS ++ local exit_status=0 ++ local timeout=4 +++ seq 0 2 ++ for i in $(seq 0 2) ++ set +e ++ kubectl get psmdb some-name -o 'jsonpath={.spec.replsets[?(@.name=="rs0")].nonvoting.enabled}' ++ exit_status=0 ++ set -e ++ '[' 0 '!=' 0 -a -n 1 ']' ++ break ++ cat /tmp/tmp.XSkplR2AME ++ cat /tmp/tmp.FmgAH4TuSS ++ rm /tmp/tmp.XSkplR2AME /tmp/tmp.FmgAH4TuSS ++ return 0 + [[ '' == true ]] ++ kubectl_bin get psmdb some-name -o 'jsonpath={.spec.replsets[?(@.name=="rs0")].hidden.enabled}' +++ mktemp ++ local LAST_OUT=/tmp/tmp.jU9NhJxEuR +++ mktemp ++ local LAST_ERR=/tmp/tmp.TJbyZwoS9T ++ local exit_status=0 ++ local timeout=4 +++ seq 0 2 ++ for i in $(seq 0 2) ++ set +e ++ kubectl get psmdb some-name -o 'jsonpath={.spec.replsets[?(@.name=="rs0")].hidden.enabled}' ++ exit_status=0 ++ set -e ++ '[' 0 '!=' 0 -a -n 1 ']' ++ break ++ cat /tmp/tmp.jU9NhJxEuR ++ cat /tmp/tmp.TJbyZwoS9T ++ rm /tmp/tmp.jU9NhJxEuR /tmp/tmp.TJbyZwoS9T ++ return 0 + [[ '' == true ]] + sleep 10 + [[ true == true ]] + set +x Waiting for cluster readyness.......................................... + wait_for_running some-name-cfg 3 false + local name=some-name-cfg + let last_pod=2 + local check_cluster_readyness=false + set_debug + [[ 1 == 1 ]] + set -o xtrace + local rs_name=cfg + local cluster_name=some-name ++ seq 0 2 + for i in $(seq 0 $last_pod) + [[ 0 -eq 2 ]] + wait_pod some-name-cfg-0 + local pod=some-name-cfg-0 + set +o xtrace waiting for pod/some-name-cfg-0 to be ready.OK + for i in $(seq 0 $last_pod) + [[ 1 -eq 2 ]] + wait_pod some-name-cfg-1 + local pod=some-name-cfg-1 + set +o xtrace waiting for pod/some-name-cfg-1 to be ready.OK + for i in $(seq 0 $last_pod) + [[ 2 -eq 2 ]] ++ kubectl_bin get psmdb some-name -o 'jsonpath={.spec.replsets[?(@.name=="cfg")].arbiter.enabled}' +++ mktemp ++ local LAST_OUT=/tmp/tmp.ZrSGC9eTLp +++ mktemp ++ local LAST_ERR=/tmp/tmp.0xV9yYzQP2 ++ local exit_status=0 ++ local timeout=4 +++ seq 0 2 ++ for i in $(seq 0 2) ++ set +e ++ kubectl get psmdb some-name -o 'jsonpath={.spec.replsets[?(@.name=="cfg")].arbiter.enabled}' ++ exit_status=0 ++ set -e ++ '[' 0 '!=' 0 -a -n 1 ']' ++ break ++ cat /tmp/tmp.ZrSGC9eTLp ++ cat /tmp/tmp.0xV9yYzQP2 ++ rm /tmp/tmp.ZrSGC9eTLp /tmp/tmp.0xV9yYzQP2 ++ return 0 + [[ '' == true ]] + wait_pod some-name-cfg-2 + local pod=some-name-cfg-2 + set +o xtrace waiting for pod/some-name-cfg-2 to be ready.OK ++ kubectl_bin get psmdb some-name -o 'jsonpath={.spec.replsets[?(@.name=="cfg")].nonvoting.enabled}' +++ mktemp ++ local LAST_OUT=/tmp/tmp.xiSfMVfumN +++ mktemp ++ local LAST_ERR=/tmp/tmp.7RS38HDtU8 ++ local exit_status=0 ++ local timeout=4 +++ seq 0 2 ++ for i in $(seq 0 2) ++ set +e ++ kubectl get psmdb some-name -o 'jsonpath={.spec.replsets[?(@.name=="cfg")].nonvoting.enabled}' ++ exit_status=0 ++ set -e ++ '[' 0 '!=' 0 -a -n 1 ']' ++ break ++ cat /tmp/tmp.xiSfMVfumN ++ cat /tmp/tmp.7RS38HDtU8 ++ rm /tmp/tmp.xiSfMVfumN /tmp/tmp.7RS38HDtU8 ++ return 0 + [[ '' == true ]] ++ kubectl_bin get psmdb some-name -o 'jsonpath={.spec.replsets[?(@.name=="cfg")].hidden.enabled}' +++ mktemp ++ local LAST_OUT=/tmp/tmp.wVOIxzXIHq +++ mktemp ++ local LAST_ERR=/tmp/tmp.upsAD8ahXS ++ local exit_status=0 ++ local timeout=4 +++ seq 0 2 ++ for i in $(seq 0 2) ++ set +e ++ kubectl get psmdb some-name -o 'jsonpath={.spec.replsets[?(@.name=="cfg")].hidden.enabled}' ++ exit_status=0 ++ set -e ++ '[' 0 '!=' 0 -a -n 1 ']' ++ break ++ cat /tmp/tmp.wVOIxzXIHq ++ cat /tmp/tmp.upsAD8ahXS ++ rm /tmp/tmp.wVOIxzXIHq /tmp/tmp.upsAD8ahXS ++ return 0 + [[ '' == true ]] + sleep 10 + [[ false == true ]] + wait_for_running some-name-mongos 3 + local name=some-name-mongos + let last_pod=2 + local check_cluster_readyness=true + set_debug + [[ 1 == 1 ]] + set -o xtrace + local rs_name=mongos + local cluster_name=some-name ++ seq 0 2 + for i in $(seq 0 $last_pod) + [[ 0 -eq 2 ]] + wait_pod some-name-mongos-0 + local pod=some-name-mongos-0 + set +o xtrace waiting for pod/some-name-mongos-0 to be ready.OK + for i in $(seq 0 $last_pod) + [[ 1 -eq 2 ]] + wait_pod some-name-mongos-1 + local pod=some-name-mongos-1 + set +o xtrace waiting for pod/some-name-mongos-1 to be ready.OK + for i in $(seq 0 $last_pod) + [[ 2 -eq 2 ]] ++ kubectl_bin get psmdb some-name -o 'jsonpath={.spec.replsets[?(@.name=="mongos")].arbiter.enabled}' +++ mktemp ++ local LAST_OUT=/tmp/tmp.OUdWVzJeNA +++ mktemp ++ local LAST_ERR=/tmp/tmp.3JxwKreN76 ++ local exit_status=0 ++ local timeout=4 +++ seq 0 2 ++ for i in $(seq 0 2) ++ set +e ++ kubectl get psmdb some-name -o 'jsonpath={.spec.replsets[?(@.name=="mongos")].arbiter.enabled}' ++ exit_status=0 ++ set -e ++ '[' 0 '!=' 0 -a -n 1 ']' ++ break ++ cat /tmp/tmp.OUdWVzJeNA ++ cat /tmp/tmp.3JxwKreN76 ++ rm /tmp/tmp.OUdWVzJeNA /tmp/tmp.3JxwKreN76 ++ return 0 + [[ '' == true ]] + wait_pod some-name-mongos-2 + local pod=some-name-mongos-2 + set +o xtrace waiting for pod/some-name-mongos-2 to be ready.OK ++ kubectl_bin get psmdb some-name -o 'jsonpath={.spec.replsets[?(@.name=="mongos")].nonvoting.enabled}' +++ mktemp ++ local LAST_OUT=/tmp/tmp.UXBZ4txTG3 +++ mktemp ++ local LAST_ERR=/tmp/tmp.Uo6wjSHfSj ++ local exit_status=0 ++ local timeout=4 +++ seq 0 2 ++ for i in $(seq 0 2) ++ set +e ++ kubectl get psmdb some-name -o 'jsonpath={.spec.replsets[?(@.name=="mongos")].nonvoting.enabled}' ++ exit_status=0 ++ set -e ++ '[' 0 '!=' 0 -a -n 1 ']' ++ break ++ cat /tmp/tmp.UXBZ4txTG3 ++ cat /tmp/tmp.Uo6wjSHfSj ++ rm /tmp/tmp.UXBZ4txTG3 /tmp/tmp.Uo6wjSHfSj ++ return 0 + [[ '' == true ]] ++ kubectl_bin get psmdb some-name -o 'jsonpath={.spec.replsets[?(@.name=="mongos")].hidden.enabled}' +++ mktemp ++ local LAST_OUT=/tmp/tmp.aKahI7OcQ4 +++ mktemp ++ local LAST_ERR=/tmp/tmp.oKahd4HiHr ++ local exit_status=0 ++ local timeout=4 +++ seq 0 2 ++ for i in $(seq 0 2) ++ set +e ++ kubectl get psmdb some-name -o 'jsonpath={.spec.replsets[?(@.name=="mongos")].hidden.enabled}' ++ exit_status=0 ++ set -e ++ '[' 0 '!=' 0 -a -n 1 ']' ++ break ++ cat /tmp/tmp.aKahI7OcQ4 ++ cat /tmp/tmp.oKahd4HiHr ++ rm /tmp/tmp.aKahI7OcQ4 /tmp/tmp.oKahd4HiHr ++ return 0 + [[ '' == true ]] + sleep 10 + [[ true == true ]] + set +x Waiting for cluster readyness + wait_cluster_consistency some-name + local cluster_name=some-name + local wait_time=32 + retry=0 + sleep 7 + echo -n 'waiting for cluster readyness' waiting for cluster readyness++ kubectl_bin get psmdb some-name -o 'jsonpath={.status.state}' +++ mktemp ++ local LAST_OUT=/tmp/tmp.xPnddTeK0t +++ mktemp ++ local LAST_ERR=/tmp/tmp.kYCIanRnht ++ local exit_status=0 ++ local timeout=4 +++ seq 0 2 ++ for i in $(seq 0 2) ++ set +e ++ kubectl get psmdb some-name -o 'jsonpath={.status.state}' ++ exit_status=0 ++ set -e ++ '[' 0 '!=' 0 -a -n 1 ']' ++ break ++ cat /tmp/tmp.xPnddTeK0t ++ cat /tmp/tmp.kYCIanRnht ++ rm /tmp/tmp.xPnddTeK0t /tmp/tmp.kYCIanRnht ++ return 0 + [[ ready == ready ]] + echo .OK .OK + desc 'check if service and statefulset created with expected config' + set +o xtrace ----------------------------------------------------------------------------------- check if service and statefulset created with expected config ----------------------------------------------------------------------------------- + compare_kubectl statefulset/some-name-rs0 + local resource=statefulset/some-name-rs0 + local postfix= + local skip_generation_check= + local expected_result=/mnt/jenkins/workspace/cloud-psmdb-operator_PR-2287/e2e-tests/custom-users-roles-sharded/compare/statefulset_some-name-rs0.yml + local new_result=/tmp/tmp.oSwann5ucX/statefulset_some-name-rs0.yml + '[' -n '' -a -f /mnt/jenkins/workspace/cloud-psmdb-operator_PR-2287/e2e-tests/custom-users-roles-sharded/compare/statefulset_some-name-rs0-oc.yml ']' + kubectl_bin get -o yaml statefulset/some-name-rs0 + yq eval $'\n\t\t\tdel(.metadata.ownerReferences[].apiVersion) |\n\t\t\tdel(.metadata.managedFields) |\n\t\t\tdel(.. | select(has("creationTimestamp")).creationTimestamp) |\n\t\t\tdel(.. | select(has("namespace")).namespace) |\n\t\t\tdel(.. | select(has("uid")).uid) |\n\t\t\tdel(.metadata.resourceVersion) |\n\t\t\tdel(.spec.template.spec.containers[].env[] | select(.name == "NAMESPACE")) |\n\t\t\tdel(.metadata.selfLink) |\n\t\t\tdel(.metadata.annotations."cloud.google.com/neg") |\n\t\t\tdel(.metadata.annotations."kubectl.kubernetes.io/last-applied-configuration") |\n\t\t\tdel(.. | select(has("image")).image) |\n\t\t\tdel(.. | select(has("clusterIP")).clusterIP) |\n\t\t\tdel(.. | select(has("clusterIPs")).clusterIPs) |\n\t\t\tdel(.. | select(has("dataSource")).dataSource) |\n\t\t\tdel(.. | select(has("procMount")).procMount) |\n\t\t\tdel(.. | select(has("storageClassName")).storageClassName) |\n\t\t\tdel(.. | select(has("finalizers")).finalizers) |\n\t\t\tdel(.. | select(has("kubernetes.io/pvc-protection"))."kubernetes.io/pvc-protection") |\n\t\t\tdel(.. | select(has("volumeName")).volumeName) |\n\t\t\tdel(.. | select(has("volume.beta.kubernetes.io/storage-provisioner"))."volume.beta.kubernetes.io/storage-provisioner") |\n\t\t\tdel(.. | select(has("volume.kubernetes.io/storage-provisioner"))."volume.kubernetes.io/storage-provisioner") |\n\t\t\tdel(.spec.volumeMode) |\n\t\t\tdel(.. | select(has("volume.kubernetes.io/selected-node"))."volume.kubernetes.io/selected-node") |\n\t\t\tdel(.. | select(has("percona.com/last-config-hash"))."percona.com/last-config-hash") |\n\t\t\tdel(.. | select(has("percona.com/configuration-hash"))."percona.com/configuration-hash") |\n\t\t\tdel(.. | select(has("percona.com/ssl-hash"))."percona.com/ssl-hash") |\n\t\t\tdel(.. | select(has("percona.com/ssl-internal-hash"))."percona.com/ssl-internal-hash") |\n\t\t\tdel(.spec.volumeClaimTemplates[].spec.volumeMode | select(. == "Filesystem")) |\n\t\t\tdel(.. | select(has("healthCheckNodePort")).healthCheckNodePort) |\n\t\t\tdel(.. | select(has("nodePort")).nodePort) |\n\t\t\tdel(.status) |\n\t\t\t(.. | select(tag == "!!str")) |= sub("custom-users-roles-sharded-27922", "NAME_SPACE") |\n\t\t\tdel(.spec.volumeClaimTemplates[].apiVersion) |\n\t\t\tdel(.spec.volumeClaimTemplates[].kind) |\n\t\t\tdel(.spec.ipFamilies) |\n\t\t\tdel(.spec.ipFamilyPolicy) |\n\t\t\t(.. | select(. == "extensions/v1beta1")) = "apps/v1" |\n\t\t\t(.. | select(. == "batch/v1beta1")) = "batch/v1" ' - ++ mktemp + local LAST_OUT=/tmp/tmp.Zq4Qks0ABh ++ mktemp + local LAST_ERR=/tmp/tmp.G2LgNyINvh + local exit_status=0 + local timeout=4 ++ seq 0 2 + for i in $(seq 0 2) + set +e + kubectl get -o yaml statefulset/some-name-rs0 + exit_status=0 + set -e + '[' 0 '!=' 0 -a -n 1 ']' + break + cat /tmp/tmp.Zq4Qks0ABh + cat /tmp/tmp.G2LgNyINvh + rm /tmp/tmp.Zq4Qks0ABh /tmp/tmp.G2LgNyINvh + return 0 + yq -i eval 'del(.spec.persistentVolumeClaimRetentionPolicy)' /tmp/tmp.oSwann5ucX/statefulset_some-name-rs0.yml + version_gt 1.22 ++ echo '1.32 >= 1.22' ++ bc -l + '[' 1 -eq 1 ']' + return 0 + yq -i eval 'del(.spec.internalTrafficPolicy)' /tmp/tmp.oSwann5ucX/statefulset_some-name-rs0.yml + yq -i eval 'del(.spec.allocateLoadBalancerNodePorts)' /tmp/tmp.oSwann5ucX/statefulset_some-name-rs0.yml + [[ /mnt/jenkins/workspace/cloud-psmdb-operator_PR-2287/e2e-tests/custom-users-roles-sharded/compare/statefulset_some-name-rs0.yml == */cronjob* ]] + '[' -n '' ']' + [[ 0 -eq 0 ]] + diff -u /mnt/jenkins/workspace/cloud-psmdb-operator_PR-2287/e2e-tests/custom-users-roles-sharded/compare/statefulset_some-name-rs0.yml /tmp/tmp.oSwann5ucX/statefulset_some-name-rs0.yml + log 'compare_kubectl: statefulset/some-name-rs0 OK' + set +o xtrace [2026-04-09T11:13:37+0000] compare_kubectl: statefulset/some-name-rs0 OK + compare_kubectl statefulset/some-name-cfg + local resource=statefulset/some-name-cfg + local postfix= + local skip_generation_check= + local expected_result=/mnt/jenkins/workspace/cloud-psmdb-operator_PR-2287/e2e-tests/custom-users-roles-sharded/compare/statefulset_some-name-cfg.yml + local new_result=/tmp/tmp.oSwann5ucX/statefulset_some-name-cfg.yml + '[' -n '' -a -f /mnt/jenkins/workspace/cloud-psmdb-operator_PR-2287/e2e-tests/custom-users-roles-sharded/compare/statefulset_some-name-cfg-oc.yml ']' + kubectl_bin get -o yaml statefulset/some-name-cfg + yq eval $'\n\t\t\tdel(.metadata.ownerReferences[].apiVersion) |\n\t\t\tdel(.metadata.managedFields) |\n\t\t\tdel(.. | select(has("creationTimestamp")).creationTimestamp) |\n\t\t\tdel(.. | select(has("namespace")).namespace) |\n\t\t\tdel(.. | select(has("uid")).uid) |\n\t\t\tdel(.metadata.resourceVersion) |\n\t\t\tdel(.spec.template.spec.containers[].env[] | select(.name == "NAMESPACE")) |\n\t\t\tdel(.metadata.selfLink) |\n\t\t\tdel(.metadata.annotations."cloud.google.com/neg") |\n\t\t\tdel(.metadata.annotations."kubectl.kubernetes.io/last-applied-configuration") |\n\t\t\tdel(.. | select(has("image")).image) |\n\t\t\tdel(.. | select(has("clusterIP")).clusterIP) |\n\t\t\tdel(.. | select(has("clusterIPs")).clusterIPs) |\n\t\t\tdel(.. | select(has("dataSource")).dataSource) |\n\t\t\tdel(.. | select(has("procMount")).procMount) |\n\t\t\tdel(.. | select(has("storageClassName")).storageClassName) |\n\t\t\tdel(.. | select(has("finalizers")).finalizers) |\n\t\t\tdel(.. | select(has("kubernetes.io/pvc-protection"))."kubernetes.io/pvc-protection") |\n\t\t\tdel(.. | select(has("volumeName")).volumeName) |\n\t\t\tdel(.. | select(has("volume.beta.kubernetes.io/storage-provisioner"))."volume.beta.kubernetes.io/storage-provisioner") |\n\t\t\tdel(.. | select(has("volume.kubernetes.io/storage-provisioner"))."volume.kubernetes.io/storage-provisioner") |\n\t\t\tdel(.spec.volumeMode) |\n\t\t\tdel(.. | select(has("volume.kubernetes.io/selected-node"))."volume.kubernetes.io/selected-node") |\n\t\t\tdel(.. | select(has("percona.com/last-config-hash"))."percona.com/last-config-hash") |\n\t\t\tdel(.. | select(has("percona.com/configuration-hash"))."percona.com/configuration-hash") |\n\t\t\tdel(.. | select(has("percona.com/ssl-hash"))."percona.com/ssl-hash") |\n\t\t\tdel(.. | select(has("percona.com/ssl-internal-hash"))."percona.com/ssl-internal-hash") |\n\t\t\tdel(.spec.volumeClaimTemplates[].spec.volumeMode | select(. == "Filesystem")) |\n\t\t\tdel(.. | select(has("healthCheckNodePort")).healthCheckNodePort) |\n\t\t\tdel(.. | select(has("nodePort")).nodePort) |\n\t\t\tdel(.status) |\n\t\t\t(.. | select(tag == "!!str")) |= sub("custom-users-roles-sharded-27922", "NAME_SPACE") |\n\t\t\tdel(.spec.volumeClaimTemplates[].apiVersion) |\n\t\t\tdel(.spec.volumeClaimTemplates[].kind) |\n\t\t\tdel(.spec.ipFamilies) |\n\t\t\tdel(.spec.ipFamilyPolicy) |\n\t\t\t(.. | select(. == "extensions/v1beta1")) = "apps/v1" |\n\t\t\t(.. | select(. == "batch/v1beta1")) = "batch/v1" ' - ++ mktemp + local LAST_OUT=/tmp/tmp.AoelmfmDXl ++ mktemp + local LAST_ERR=/tmp/tmp.xiK0z8Vsli + local exit_status=0 + local timeout=4 ++ seq 0 2 + for i in $(seq 0 2) + set +e + kubectl get -o yaml statefulset/some-name-cfg + exit_status=0 + set -e + '[' 0 '!=' 0 -a -n 1 ']' + break + cat /tmp/tmp.AoelmfmDXl + cat /tmp/tmp.xiK0z8Vsli + rm /tmp/tmp.AoelmfmDXl /tmp/tmp.xiK0z8Vsli + return 0 + yq -i eval 'del(.spec.persistentVolumeClaimRetentionPolicy)' /tmp/tmp.oSwann5ucX/statefulset_some-name-cfg.yml + version_gt 1.22 ++ echo '1.32 >= 1.22' ++ bc -l + '[' 1 -eq 1 ']' + return 0 + yq -i eval 'del(.spec.internalTrafficPolicy)' /tmp/tmp.oSwann5ucX/statefulset_some-name-cfg.yml + yq -i eval 'del(.spec.allocateLoadBalancerNodePorts)' /tmp/tmp.oSwann5ucX/statefulset_some-name-cfg.yml + [[ /mnt/jenkins/workspace/cloud-psmdb-operator_PR-2287/e2e-tests/custom-users-roles-sharded/compare/statefulset_some-name-cfg.yml == */cronjob* ]] + '[' -n '' ']' + [[ 0 -eq 0 ]] + diff -u /mnt/jenkins/workspace/cloud-psmdb-operator_PR-2287/e2e-tests/custom-users-roles-sharded/compare/statefulset_some-name-cfg.yml /tmp/tmp.oSwann5ucX/statefulset_some-name-cfg.yml + log 'compare_kubectl: statefulset/some-name-cfg OK' + set +o xtrace [2026-04-09T11:13:38+0000] compare_kubectl: statefulset/some-name-cfg OK + compare_kubectl statefulset/some-name-mongos '' + local resource=statefulset/some-name-mongos + local postfix= + local skip_generation_check= + local expected_result=/mnt/jenkins/workspace/cloud-psmdb-operator_PR-2287/e2e-tests/custom-users-roles-sharded/compare/statefulset_some-name-mongos.yml + local new_result=/tmp/tmp.oSwann5ucX/statefulset_some-name-mongos.yml + '[' -n '' -a -f /mnt/jenkins/workspace/cloud-psmdb-operator_PR-2287/e2e-tests/custom-users-roles-sharded/compare/statefulset_some-name-mongos-oc.yml ']' + kubectl_bin get -o yaml statefulset/some-name-mongos ++ mktemp + yq eval $'\n\t\t\tdel(.metadata.ownerReferences[].apiVersion) |\n\t\t\tdel(.metadata.managedFields) |\n\t\t\tdel(.. | select(has("creationTimestamp")).creationTimestamp) |\n\t\t\tdel(.. | select(has("namespace")).namespace) |\n\t\t\tdel(.. | select(has("uid")).uid) |\n\t\t\tdel(.metadata.resourceVersion) |\n\t\t\tdel(.spec.template.spec.containers[].env[] | select(.name == "NAMESPACE")) |\n\t\t\tdel(.metadata.selfLink) |\n\t\t\tdel(.metadata.annotations."cloud.google.com/neg") |\n\t\t\tdel(.metadata.annotations."kubectl.kubernetes.io/last-applied-configuration") |\n\t\t\tdel(.. | select(has("image")).image) |\n\t\t\tdel(.. | select(has("clusterIP")).clusterIP) |\n\t\t\tdel(.. | select(has("clusterIPs")).clusterIPs) |\n\t\t\tdel(.. | select(has("dataSource")).dataSource) |\n\t\t\tdel(.. | select(has("procMount")).procMount) |\n\t\t\tdel(.. | select(has("storageClassName")).storageClassName) |\n\t\t\tdel(.. | select(has("finalizers")).finalizers) |\n\t\t\tdel(.. | select(has("kubernetes.io/pvc-protection"))."kubernetes.io/pvc-protection") |\n\t\t\tdel(.. | select(has("volumeName")).volumeName) |\n\t\t\tdel(.. | select(has("volume.beta.kubernetes.io/storage-provisioner"))."volume.beta.kubernetes.io/storage-provisioner") |\n\t\t\tdel(.. | select(has("volume.kubernetes.io/storage-provisioner"))."volume.kubernetes.io/storage-provisioner") |\n\t\t\tdel(.spec.volumeMode) |\n\t\t\tdel(.. | select(has("volume.kubernetes.io/selected-node"))."volume.kubernetes.io/selected-node") |\n\t\t\tdel(.. | select(has("percona.com/last-config-hash"))."percona.com/last-config-hash") |\n\t\t\tdel(.. | select(has("percona.com/configuration-hash"))."percona.com/configuration-hash") |\n\t\t\tdel(.. | select(has("percona.com/ssl-hash"))."percona.com/ssl-hash") |\n\t\t\tdel(.. | select(has("percona.com/ssl-internal-hash"))."percona.com/ssl-internal-hash") |\n\t\t\tdel(.spec.volumeClaimTemplates[].spec.volumeMode | select(. == "Filesystem")) |\n\t\t\tdel(.. | select(has("healthCheckNodePort")).healthCheckNodePort) |\n\t\t\tdel(.. | select(has("nodePort")).nodePort) |\n\t\t\tdel(.status) |\n\t\t\t(.. | select(tag == "!!str")) |= sub("custom-users-roles-sharded-27922", "NAME_SPACE") |\n\t\t\tdel(.spec.volumeClaimTemplates[].apiVersion) |\n\t\t\tdel(.spec.volumeClaimTemplates[].kind) |\n\t\t\tdel(.spec.ipFamilies) |\n\t\t\tdel(.spec.ipFamilyPolicy) |\n\t\t\t(.. | select(. == "extensions/v1beta1")) = "apps/v1" |\n\t\t\t(.. | select(. == "batch/v1beta1")) = "batch/v1" ' - + local LAST_OUT=/tmp/tmp.wROcgeCd6T ++ mktemp + local LAST_ERR=/tmp/tmp.9EvK8k8aOK + local exit_status=0 + local timeout=4 ++ seq 0 2 + for i in $(seq 0 2) + set +e + kubectl get -o yaml statefulset/some-name-mongos + exit_status=0 + set -e + '[' 0 '!=' 0 -a -n 1 ']' + break + cat /tmp/tmp.wROcgeCd6T + cat /tmp/tmp.9EvK8k8aOK + rm /tmp/tmp.wROcgeCd6T /tmp/tmp.9EvK8k8aOK + return 0 + yq -i eval 'del(.spec.persistentVolumeClaimRetentionPolicy)' /tmp/tmp.oSwann5ucX/statefulset_some-name-mongos.yml + version_gt 1.22 ++ echo '1.32 >= 1.22' ++ bc -l + '[' 1 -eq 1 ']' + return 0 + yq -i eval 'del(.spec.internalTrafficPolicy)' /tmp/tmp.oSwann5ucX/statefulset_some-name-mongos.yml + yq -i eval 'del(.spec.allocateLoadBalancerNodePorts)' /tmp/tmp.oSwann5ucX/statefulset_some-name-mongos.yml + [[ /mnt/jenkins/workspace/cloud-psmdb-operator_PR-2287/e2e-tests/custom-users-roles-sharded/compare/statefulset_some-name-mongos.yml == */cronjob* ]] + '[' -n '' ']' + [[ 0 -eq 0 ]] + diff -u /mnt/jenkins/workspace/cloud-psmdb-operator_PR-2287/e2e-tests/custom-users-roles-sharded/compare/statefulset_some-name-mongos.yml /tmp/tmp.oSwann5ucX/statefulset_some-name-mongos.yml + log 'compare_kubectl: statefulset/some-name-mongos OK' + set +o xtrace [2026-04-09T11:13:39+0000] compare_kubectl: statefulset/some-name-mongos OK + desc 'check user created on cluster creation' + set +o xtrace ----------------------------------------------------------------------------------- check user created on cluster creation ----------------------------------------------------------------------------------- + userOne=user-one ++ getSecretData user-one userOnePassKey ++ local secretName=user-one ++ local dataKey=userOnePassKey +++ kubectl get secrets/user-one '--template={{.data.userOnePassKey}}' +++ base64 -d ++ local data=clusterMonitor ++ echo clusterMonitor + userOnePass=clusterMonitor ++ get_user_cmd '"user-one"' ++ local 'user="user-one"' ++ cmd=$'(function() {\n\tvar user = db.getUser("user-one");\n\tvar roles = user.roles;\n\troles.sort((a, b) => {\n\t\tif (a.role < b.role) return -1;\n\t\tif (a.role > b.role) return 1;\n\t\treturn 0;\n\t});\n\tuser.roles = roles;\n\tprintjson(user);\n\t})();' ++ echo $'(function() {\n\tvar user = db.getUser("user-one");\n\tvar roles = user.roles;\n\troles.sort((a, b) => {\n\t\tif (a.role < b.role) return -1;\n\t\tif (a.role > b.role) return 1;\n\t\treturn 0;\n\t});\n\tuser.roles = roles;\n\tprintjson(user);\n\t})();' + compare admin $'(function() {\n\tvar user = db.getUser("user-one");\n\tvar roles = user.roles;\n\troles.sort((a, b) => {\n\t\tif (a.role < b.role) return -1;\n\t\tif (a.role > b.role) return 1;\n\t\treturn 0;\n\t});\n\tuser.roles = roles;\n\tprintjson(user);\n\t})();' userAdmin:userAdmin123456@some-name-mongos.custom-users-roles-sharded-27922 user-one + local database=admin + local $'command=(function() {\n\tvar user = db.getUser("user-one");\n\tvar roles = user.roles;\n\troles.sort((a, b) => {\n\t\tif (a.role < b.role) return -1;\n\t\tif (a.role > b.role) return 1;\n\t\treturn 0;\n\t});\n\tuser.roles = roles;\n\tprintjson(user);\n\t})();' + local uri=userAdmin:userAdmin123456@some-name-mongos.custom-users-roles-sharded-27922 + local target=user-one + [[ docker.io/perconalab/percona-server-mongodb-operator:main-mongod8.0 =~ 5\.0 ]] + run_mongos $'use admin\\n (function() {\n\tvar user = db.getUser("user-one");\n\tvar roles = user.roles;\n\troles.sort((a, b) => {\n\t\tif (a.role < b.role) return -1;\n\t\tif (a.role > b.role) return 1;\n\t\treturn 0;\n\t});\n\tuser.roles = roles;\n\tprintjson(user);\n\t})();' userAdmin:userAdmin123456@some-name-mongos.custom-users-roles-sharded-27922 mongodb + local $'command=use admin\\n (function() {\n\tvar user = db.getUser("user-one");\n\tvar roles = user.roles;\n\troles.sort((a, b) => {\n\t\tif (a.role < b.role) return -1;\n\t\tif (a.role > b.role) return 1;\n\t\treturn 0;\n\t});\n\tuser.roles = roles;\n\tprintjson(user);\n\t})();' + local uri=userAdmin:userAdmin123456@some-name-mongos.custom-users-roles-sharded-27922 + local driver=mongodb + local suffix=.svc.cluster.local + local mongo_flag= + local port=27017 + local mongo_bin=mongo + grep -E -v 'I NETWORK|W NETWORK|F NETWORK|Error saving history file|Percona Server for MongoDB|connecting to:|Unable to reach primary for set|Implicit session:|versions do not match|Error saving history file:' + /usr/sbin/sed -re 's/ObjectId\("[0-9a-f]+"\)//; s/-[0-9]+.svc/-xxxesvc/' + sed '/"userId"/d' ++ echo .svc.cluster.local ++ awk -F: '{print $2}' + suffix_port= + [[ -z '' ]] + suffix=.svc.cluster.local:27017 ++ kubectl_bin get pods --selector=name=psmdb-client -o 'jsonpath={.items[].metadata.name}' +++ mktemp ++ local LAST_OUT=/tmp/tmp.9HZVrl4tcx +++ mktemp ++ local LAST_ERR=/tmp/tmp.q3WEZPvUOY ++ local exit_status=0 ++ local timeout=4 +++ seq 0 2 ++ for i in $(seq 0 2) ++ set +e ++ kubectl get pods --selector=name=psmdb-client -o 'jsonpath={.items[].metadata.name}' ++ exit_status=0 ++ set -e ++ '[' 0 '!=' 0 -a -n 1 ']' ++ break ++ cat /tmp/tmp.9HZVrl4tcx ++ cat /tmp/tmp.q3WEZPvUOY ++ rm /tmp/tmp.9HZVrl4tcx /tmp/tmp.q3WEZPvUOY ++ return 0 + local client_container=psmdb-client-bb8b97679-xkjrq + kubectl_bin exec psmdb-client-bb8b97679-xkjrq -- bash -c $'printf \'use admin\\n (function() {\n\tvar user = db.getUser("user-one");\n\tvar roles = user.roles;\n\troles.sort((a, b) => {\n\t\tif (a.role < b.role) return -1;\n\t\tif (a.role > b.role) return 1;\n\t\treturn 0;\n\t});\n\tuser.roles = roles;\n\tprintjson(user);\n\t})();\\n\' | mongo mongodb://userAdmin:userAdmin123456@some-name-mongos.custom-users-roles-sharded-27922.svc.cluster.local:27017/admin ' ++ mktemp + local LAST_OUT=/tmp/tmp.47WnYhhBIT ++ mktemp + local LAST_ERR=/tmp/tmp.v3DeI10asZ + local exit_status=0 + local timeout=4 ++ seq 0 2 + for i in $(seq 0 2) + set +e + kubectl exec psmdb-client-bb8b97679-xkjrq -- bash -c $'printf \'use admin\\n (function() {\n\tvar user = db.getUser("user-one");\n\tvar roles = user.roles;\n\troles.sort((a, b) => {\n\t\tif (a.role < b.role) return -1;\n\t\tif (a.role > b.role) return 1;\n\t\treturn 0;\n\t});\n\tuser.roles = roles;\n\tprintjson(user);\n\t})();\\n\' | mongo mongodb://userAdmin:userAdmin123456@some-name-mongos.custom-users-roles-sharded-27922.svc.cluster.local:27017/admin ' + exit_status=0 + set -e + '[' 0 '!=' 0 -a -n 1 ']' + break + cat /tmp/tmp.47WnYhhBIT + cat /tmp/tmp.v3DeI10asZ + rm /tmp/tmp.47WnYhhBIT /tmp/tmp.v3DeI10asZ + return 0 + diff /mnt/jenkins/workspace/cloud-psmdb-operator_PR-2287/e2e-tests/custom-users-roles-sharded/compare/user-one.json /tmp/tmp.oSwann5ucX/user-one + check_auth user-one:clusterMonitor@some-name-mongos.custom-users-roles-sharded-27922 + local uri=user-one:clusterMonitor@some-name-mongos.custom-users-roles-sharded-27922 ++ run_mongos 'db.runCommand({ ping: 1 }).ok' user-one:clusterMonitor@some-name-mongos.custom-users-roles-sharded-27922 '' '' --quiet ++ local 'command=db.runCommand({ ping: 1 }).ok' ++ local uri=user-one:clusterMonitor@some-name-mongos.custom-users-roles-sharded-27922 ++ local driver=mongodb ++ grep -E -v 'I NETWORK|W NETWORK|Error saving history file|Percona Server for MongoDB|connecting to:|Unable to reach primary for set|Implicit session:|versions do not match|Error saving history file:' ++ local suffix=.svc.cluster.local ++ local mongo_flag=--quiet ++ local port=27017 ++ local mongo_bin=mongo +++ echo .svc.cluster.local +++ awk -F: '{print $2}' ++ suffix_port= ++ [[ -z '' ]] ++ suffix=.svc.cluster.local:27017 +++ kubectl_bin get pods --selector=name=psmdb-client -o 'jsonpath={.items[].metadata.name}' ++++ mktemp +++ local LAST_OUT=/tmp/tmp.5GPyoGnDha ++++ mktemp +++ local LAST_ERR=/tmp/tmp.34SaqrbvA9 +++ local exit_status=0 +++ local timeout=4 ++++ seq 0 2 +++ for i in $(seq 0 2) +++ set +e +++ kubectl get pods --selector=name=psmdb-client -o 'jsonpath={.items[].metadata.name}' +++ exit_status=0 +++ set -e +++ '[' 0 '!=' 0 -a -n 1 ']' +++ break +++ cat /tmp/tmp.5GPyoGnDha +++ cat /tmp/tmp.34SaqrbvA9 +++ rm /tmp/tmp.5GPyoGnDha /tmp/tmp.34SaqrbvA9 +++ return 0 ++ local client_container=psmdb-client-bb8b97679-xkjrq ++ kubectl_bin exec psmdb-client-bb8b97679-xkjrq -- bash -c 'printf '\''db.runCommand({ ping: 1 }).ok\n'\'' | mongo mongodb://user-one:clusterMonitor@some-name-mongos.custom-users-roles-sharded-27922.svc.cluster.local:27017/admin --quiet' +++ mktemp ++ local LAST_OUT=/tmp/tmp.CPzFEGVpxe +++ mktemp ++ local LAST_ERR=/tmp/tmp.fZYxzkNmZk ++ local exit_status=0 ++ local timeout=4 +++ seq 0 2 ++ for i in $(seq 0 2) ++ set +e ++ kubectl exec psmdb-client-bb8b97679-xkjrq -- bash -c 'printf '\''db.runCommand({ ping: 1 }).ok\n'\'' | mongo mongodb://user-one:clusterMonitor@some-name-mongos.custom-users-roles-sharded-27922.svc.cluster.local:27017/admin --quiet' ++ exit_status=0 ++ set -e ++ '[' 0 '!=' 0 -a -n 1 ']' ++ break ++ cat /tmp/tmp.CPzFEGVpxe ++ cat /tmp/tmp.fZYxzkNmZk ++ rm /tmp/tmp.CPzFEGVpxe /tmp/tmp.fZYxzkNmZk ++ return 0 + ping=1 + desc 'ping return' + set +o xtrace ----------------------------------------------------------------------------------- ping return ----------------------------------------------------------------------------------- + '[' 1 '!=' 1 ']' + generatedUserSecret=some-name-custom-user-secret ++ kubectl_bin get secret some-name-custom-user-secret -o 'jsonpath={.data.user-gen}' ++ base64 -d +++ mktemp ++ local LAST_OUT=/tmp/tmp.HEk10k1Qu8 +++ mktemp ++ local LAST_ERR=/tmp/tmp.qnQye9cmvA ++ local exit_status=0 ++ local timeout=4 +++ seq 0 2 ++ for i in $(seq 0 2) ++ set +e ++ kubectl get secret some-name-custom-user-secret -o 'jsonpath={.data.user-gen}' ++ exit_status=0 ++ set -e ++ '[' 0 '!=' 0 -a -n 1 ']' ++ break ++ cat /tmp/tmp.HEk10k1Qu8 ++ cat /tmp/tmp.qnQye9cmvA ++ rm /tmp/tmp.HEk10k1Qu8 /tmp/tmp.qnQye9cmvA ++ return 0 + generatedPass=4n68EIdwh0b2n75nL6 ++ get_user_cmd '"user-gen"' ++ local 'user="user-gen"' ++ cmd=$'(function() {\n\tvar user = db.getUser("user-gen");\n\tvar roles = user.roles;\n\troles.sort((a, b) => {\n\t\tif (a.role < b.role) return -1;\n\t\tif (a.role > b.role) return 1;\n\t\treturn 0;\n\t});\n\tuser.roles = roles;\n\tprintjson(user);\n\t})();' ++ echo $'(function() {\n\tvar user = db.getUser("user-gen");\n\tvar roles = user.roles;\n\troles.sort((a, b) => {\n\t\tif (a.role < b.role) return -1;\n\t\tif (a.role > b.role) return 1;\n\t\treturn 0;\n\t});\n\tuser.roles = roles;\n\tprintjson(user);\n\t})();' + compare admin $'(function() {\n\tvar user = db.getUser("user-gen");\n\tvar roles = user.roles;\n\troles.sort((a, b) => {\n\t\tif (a.role < b.role) return -1;\n\t\tif (a.role > b.role) return 1;\n\t\treturn 0;\n\t});\n\tuser.roles = roles;\n\tprintjson(user);\n\t})();' userAdmin:userAdmin123456@some-name-mongos.custom-users-roles-sharded-27922 user-gen + local database=admin + local $'command=(function() {\n\tvar user = db.getUser("user-gen");\n\tvar roles = user.roles;\n\troles.sort((a, b) => {\n\t\tif (a.role < b.role) return -1;\n\t\tif (a.role > b.role) return 1;\n\t\treturn 0;\n\t});\n\tuser.roles = roles;\n\tprintjson(user);\n\t})();' + local uri=userAdmin:userAdmin123456@some-name-mongos.custom-users-roles-sharded-27922 + local target=user-gen + [[ docker.io/perconalab/percona-server-mongodb-operator:main-mongod8.0 =~ 5\.0 ]] + run_mongos $'use admin\\n (function() {\n\tvar user = db.getUser("user-gen");\n\tvar roles = user.roles;\n\troles.sort((a, b) => {\n\t\tif (a.role < b.role) return -1;\n\t\tif (a.role > b.role) return 1;\n\t\treturn 0;\n\t});\n\tuser.roles = roles;\n\tprintjson(user);\n\t})();' userAdmin:userAdmin123456@some-name-mongos.custom-users-roles-sharded-27922 mongodb + local $'command=use admin\\n (function() {\n\tvar user = db.getUser("user-gen");\n\tvar roles = user.roles;\n\troles.sort((a, b) => {\n\t\tif (a.role < b.role) return -1;\n\t\tif (a.role > b.role) return 1;\n\t\treturn 0;\n\t});\n\tuser.roles = roles;\n\tprintjson(user);\n\t})();' + local uri=userAdmin:userAdmin123456@some-name-mongos.custom-users-roles-sharded-27922 + local driver=mongodb + local suffix=.svc.cluster.local + local mongo_flag= + local port=27017 + local mongo_bin=mongo + grep -E -v 'I NETWORK|W NETWORK|F NETWORK|Error saving history file|Percona Server for MongoDB|connecting to:|Unable to reach primary for set|Implicit session:|versions do not match|Error saving history file:' + /usr/sbin/sed -re 's/ObjectId\("[0-9a-f]+"\)//; s/-[0-9]+.svc/-xxxesvc/' ++ echo .svc.cluster.local ++ awk -F: '{print $2}' + sed '/"userId"/d' + suffix_port= + [[ -z '' ]] + suffix=.svc.cluster.local:27017 ++ kubectl_bin get pods --selector=name=psmdb-client -o 'jsonpath={.items[].metadata.name}' +++ mktemp ++ local LAST_OUT=/tmp/tmp.CPdVHnOvxi +++ mktemp ++ local LAST_ERR=/tmp/tmp.VCoZ0Fej1H ++ local exit_status=0 ++ local timeout=4 +++ seq 0 2 ++ for i in $(seq 0 2) ++ set +e ++ kubectl get pods --selector=name=psmdb-client -o 'jsonpath={.items[].metadata.name}' ++ exit_status=0 ++ set -e ++ '[' 0 '!=' 0 -a -n 1 ']' ++ break ++ cat /tmp/tmp.CPdVHnOvxi ++ cat /tmp/tmp.VCoZ0Fej1H ++ rm /tmp/tmp.CPdVHnOvxi /tmp/tmp.VCoZ0Fej1H ++ return 0 + local client_container=psmdb-client-bb8b97679-xkjrq + kubectl_bin exec psmdb-client-bb8b97679-xkjrq -- bash -c $'printf \'use admin\\n (function() {\n\tvar user = db.getUser("user-gen");\n\tvar roles = user.roles;\n\troles.sort((a, b) => {\n\t\tif (a.role < b.role) return -1;\n\t\tif (a.role > b.role) return 1;\n\t\treturn 0;\n\t});\n\tuser.roles = roles;\n\tprintjson(user);\n\t})();\\n\' | mongo mongodb://userAdmin:userAdmin123456@some-name-mongos.custom-users-roles-sharded-27922.svc.cluster.local:27017/admin ' ++ mktemp + local LAST_OUT=/tmp/tmp.ixSXcC8Gfn ++ mktemp + local LAST_ERR=/tmp/tmp.PwP4jyutto + local exit_status=0 + local timeout=4 ++ seq 0 2 + for i in $(seq 0 2) + set +e + kubectl exec psmdb-client-bb8b97679-xkjrq -- bash -c $'printf \'use admin\\n (function() {\n\tvar user = db.getUser("user-gen");\n\tvar roles = user.roles;\n\troles.sort((a, b) => {\n\t\tif (a.role < b.role) return -1;\n\t\tif (a.role > b.role) return 1;\n\t\treturn 0;\n\t});\n\tuser.roles = roles;\n\tprintjson(user);\n\t})();\\n\' | mongo mongodb://userAdmin:userAdmin123456@some-name-mongos.custom-users-roles-sharded-27922.svc.cluster.local:27017/admin ' + exit_status=0 + set -e + '[' 0 '!=' 0 -a -n 1 ']' + break + cat /tmp/tmp.ixSXcC8Gfn + cat /tmp/tmp.PwP4jyutto + rm /tmp/tmp.ixSXcC8Gfn /tmp/tmp.PwP4jyutto + return 0 + diff /mnt/jenkins/workspace/cloud-psmdb-operator_PR-2287/e2e-tests/custom-users-roles-sharded/compare/user-gen.json /tmp/tmp.oSwann5ucX/user-gen + check_auth user-gen:4n68EIdwh0b2n75nL6@some-name-mongos.custom-users-roles-sharded-27922 + local uri=user-gen:4n68EIdwh0b2n75nL6@some-name-mongos.custom-users-roles-sharded-27922 ++ run_mongos 'db.runCommand({ ping: 1 }).ok' user-gen:4n68EIdwh0b2n75nL6@some-name-mongos.custom-users-roles-sharded-27922 '' '' --quiet ++ local 'command=db.runCommand({ ping: 1 }).ok' ++ local uri=user-gen:4n68EIdwh0b2n75nL6@some-name-mongos.custom-users-roles-sharded-27922 ++ local driver=mongodb ++ local suffix=.svc.cluster.local ++ local mongo_flag=--quiet ++ local port=27017 ++ local mongo_bin=mongo ++ grep -E -v 'I NETWORK|W NETWORK|Error saving history file|Percona Server for MongoDB|connecting to:|Unable to reach primary for set|Implicit session:|versions do not match|Error saving history file:' +++ echo .svc.cluster.local +++ awk -F: '{print $2}' ++ suffix_port= ++ [[ -z '' ]] ++ suffix=.svc.cluster.local:27017 +++ kubectl_bin get pods --selector=name=psmdb-client -o 'jsonpath={.items[].metadata.name}' ++++ mktemp +++ local LAST_OUT=/tmp/tmp.j3KAVaVSkd ++++ mktemp +++ local LAST_ERR=/tmp/tmp.XuM4xN7dMD +++ local exit_status=0 +++ local timeout=4 ++++ seq 0 2 +++ for i in $(seq 0 2) +++ set +e +++ kubectl get pods --selector=name=psmdb-client -o 'jsonpath={.items[].metadata.name}' +++ exit_status=0 +++ set -e +++ '[' 0 '!=' 0 -a -n 1 ']' +++ break +++ cat /tmp/tmp.j3KAVaVSkd +++ cat /tmp/tmp.XuM4xN7dMD +++ rm /tmp/tmp.j3KAVaVSkd /tmp/tmp.XuM4xN7dMD +++ return 0 ++ local client_container=psmdb-client-bb8b97679-xkjrq ++ kubectl_bin exec psmdb-client-bb8b97679-xkjrq -- bash -c 'printf '\''db.runCommand({ ping: 1 }).ok\n'\'' | mongo mongodb://user-gen:4n68EIdwh0b2n75nL6@some-name-mongos.custom-users-roles-sharded-27922.svc.cluster.local:27017/admin --quiet' +++ mktemp ++ local LAST_OUT=/tmp/tmp.INlA9BG8QG +++ mktemp ++ local LAST_ERR=/tmp/tmp.8qX4gfaier ++ local exit_status=0 ++ local timeout=4 +++ seq 0 2 ++ for i in $(seq 0 2) ++ set +e ++ kubectl exec psmdb-client-bb8b97679-xkjrq -- bash -c 'printf '\''db.runCommand({ ping: 1 }).ok\n'\'' | mongo mongodb://user-gen:4n68EIdwh0b2n75nL6@some-name-mongos.custom-users-roles-sharded-27922.svc.cluster.local:27017/admin --quiet' ++ exit_status=0 ++ set -e ++ '[' 0 '!=' 0 -a -n 1 ']' ++ break ++ cat /tmp/tmp.INlA9BG8QG ++ cat /tmp/tmp.8qX4gfaier ++ rm /tmp/tmp.INlA9BG8QG /tmp/tmp.8qX4gfaier ++ return 0 + ping=1 + desc 'ping return' + set +o xtrace ----------------------------------------------------------------------------------- ping return ----------------------------------------------------------------------------------- + '[' 1 '!=' 1 ']' ++ get_user_cmd '"user-external"' ++ local 'user="user-external"' ++ cmd=$'(function() {\n\tvar user = db.getUser("user-external");\n\tvar roles = user.roles;\n\troles.sort((a, b) => {\n\t\tif (a.role < b.role) return -1;\n\t\tif (a.role > b.role) return 1;\n\t\treturn 0;\n\t});\n\tuser.roles = roles;\n\tprintjson(user);\n\t})();' ++ echo $'(function() {\n\tvar user = db.getUser("user-external");\n\tvar roles = user.roles;\n\troles.sort((a, b) => {\n\t\tif (a.role < b.role) return -1;\n\t\tif (a.role > b.role) return 1;\n\t\treturn 0;\n\t});\n\tuser.roles = roles;\n\tprintjson(user);\n\t})();' + compare '$external' $'(function() {\n\tvar user = db.getUser("user-external");\n\tvar roles = user.roles;\n\troles.sort((a, b) => {\n\t\tif (a.role < b.role) return -1;\n\t\tif (a.role > b.role) return 1;\n\t\treturn 0;\n\t});\n\tuser.roles = roles;\n\tprintjson(user);\n\t})();' userAdmin:userAdmin123456@some-name-mongos.custom-users-roles-sharded-27922 user-external + local 'database=$external' + local $'command=(function() {\n\tvar user = db.getUser("user-external");\n\tvar roles = user.roles;\n\troles.sort((a, b) => {\n\t\tif (a.role < b.role) return -1;\n\t\tif (a.role > b.role) return 1;\n\t\treturn 0;\n\t});\n\tuser.roles = roles;\n\tprintjson(user);\n\t})();' + local uri=userAdmin:userAdmin123456@some-name-mongos.custom-users-roles-sharded-27922 + local target=user-external + [[ docker.io/perconalab/percona-server-mongodb-operator:main-mongod8.0 =~ 5\.0 ]] + run_mongos $'use $external\\n (function() {\n\tvar user = db.getUser("user-external");\n\tvar roles = user.roles;\n\troles.sort((a, b) => {\n\t\tif (a.role < b.role) return -1;\n\t\tif (a.role > b.role) return 1;\n\t\treturn 0;\n\t});\n\tuser.roles = roles;\n\tprintjson(user);\n\t})();' userAdmin:userAdmin123456@some-name-mongos.custom-users-roles-sharded-27922 mongodb + local $'command=use $external\\n (function() {\n\tvar user = db.getUser("user-external");\n\tvar roles = user.roles;\n\troles.sort((a, b) => {\n\t\tif (a.role < b.role) return -1;\n\t\tif (a.role > b.role) return 1;\n\t\treturn 0;\n\t});\n\tuser.roles = roles;\n\tprintjson(user);\n\t})();' + local uri=userAdmin:userAdmin123456@some-name-mongos.custom-users-roles-sharded-27922 + local driver=mongodb + local suffix=.svc.cluster.local + local mongo_flag= + local port=27017 + local mongo_bin=mongo + /usr/sbin/sed -re 's/ObjectId\("[0-9a-f]+"\)//; s/-[0-9]+.svc/-xxxesvc/' + sed '/"userId"/d' + grep -E -v 'I NETWORK|W NETWORK|F NETWORK|Error saving history file|Percona Server for MongoDB|connecting to:|Unable to reach primary for set|Implicit session:|versions do not match|Error saving history file:' ++ echo .svc.cluster.local ++ awk -F: '{print $2}' + suffix_port= + [[ -z '' ]] + suffix=.svc.cluster.local:27017 ++ kubectl_bin get pods --selector=name=psmdb-client -o 'jsonpath={.items[].metadata.name}' +++ mktemp ++ local LAST_OUT=/tmp/tmp.9vzp1c6l09 +++ mktemp ++ local LAST_ERR=/tmp/tmp.blSPqXBybA ++ local exit_status=0 ++ local timeout=4 +++ seq 0 2 ++ for i in $(seq 0 2) ++ set +e ++ kubectl get pods --selector=name=psmdb-client -o 'jsonpath={.items[].metadata.name}' ++ exit_status=0 ++ set -e ++ '[' 0 '!=' 0 -a -n 1 ']' ++ break ++ cat /tmp/tmp.9vzp1c6l09 ++ cat /tmp/tmp.blSPqXBybA ++ rm /tmp/tmp.9vzp1c6l09 /tmp/tmp.blSPqXBybA ++ return 0 + local client_container=psmdb-client-bb8b97679-xkjrq + kubectl_bin exec psmdb-client-bb8b97679-xkjrq -- bash -c $'printf \'use $external\\n (function() {\n\tvar user = db.getUser("user-external");\n\tvar roles = user.roles;\n\troles.sort((a, b) => {\n\t\tif (a.role < b.role) return -1;\n\t\tif (a.role > b.role) return 1;\n\t\treturn 0;\n\t});\n\tuser.roles = roles;\n\tprintjson(user);\n\t})();\\n\' | mongo mongodb://userAdmin:userAdmin123456@some-name-mongos.custom-users-roles-sharded-27922.svc.cluster.local:27017/admin ' ++ mktemp + local LAST_OUT=/tmp/tmp.tI2o8i4z0F ++ mktemp + local LAST_ERR=/tmp/tmp.9UExCVYEDT + local exit_status=0 + local timeout=4 ++ seq 0 2 + for i in $(seq 0 2) + set +e + kubectl exec psmdb-client-bb8b97679-xkjrq -- bash -c $'printf \'use $external\\n (function() {\n\tvar user = db.getUser("user-external");\n\tvar roles = user.roles;\n\troles.sort((a, b) => {\n\t\tif (a.role < b.role) return -1;\n\t\tif (a.role > b.role) return 1;\n\t\treturn 0;\n\t});\n\tuser.roles = roles;\n\tprintjson(user);\n\t})();\\n\' | mongo mongodb://userAdmin:userAdmin123456@some-name-mongos.custom-users-roles-sharded-27922.svc.cluster.local:27017/admin ' + exit_status=0 + set -e + '[' 0 '!=' 0 -a -n 1 ']' + break + cat /tmp/tmp.tI2o8i4z0F + cat /tmp/tmp.9UExCVYEDT + rm /tmp/tmp.tI2o8i4z0F /tmp/tmp.9UExCVYEDT + return 0 + diff /mnt/jenkins/workspace/cloud-psmdb-operator_PR-2287/e2e-tests/custom-users-roles-sharded/compare/user-external.json /tmp/tmp.oSwann5ucX/user-external + desc 'delete initial user from CR and create a new one' + set +o xtrace ----------------------------------------------------------------------------------- delete initial user from CR and create a new one ----------------------------------------------------------------------------------- + kubectl_bin patch psmdb some-name --type=merge --patch $'{\n\t\t"spec": {"users":[\n\t\t\t{\n\t\t\t\t"name":"user-two",\n\t\t\t\t"db":"admin",\n\t\t\t\t"passwordSecretRef": {\n\t\t\t\t\t"name": "user-two",\n\t\t\t\t\t"key": "userTwoPassKey"\n\t\t\t\t},\n\t\t\t\t"roles": [\n\t\t\t\t\t{"db":"admin","name":"userAdminAnyDatabase"}, \n\t\t\t\t\t{"db":"admin","name":"clusterAdmin"}\n\t\t\t\t]\n\t\t\t}\n\t\t]}\n\t}' ++ mktemp + local LAST_OUT=/tmp/tmp.QJvle2C9a4 ++ mktemp + local LAST_ERR=/tmp/tmp.OdwTOE1c0k + local exit_status=0 + local timeout=4 ++ seq 0 2 + for i in $(seq 0 2) + set +e + kubectl patch psmdb some-name --type=merge --patch $'{\n\t\t"spec": {"users":[\n\t\t\t{\n\t\t\t\t"name":"user-two",\n\t\t\t\t"db":"admin",\n\t\t\t\t"passwordSecretRef": {\n\t\t\t\t\t"name": "user-two",\n\t\t\t\t\t"key": "userTwoPassKey"\n\t\t\t\t},\n\t\t\t\t"roles": [\n\t\t\t\t\t{"db":"admin","name":"userAdminAnyDatabase"}, \n\t\t\t\t\t{"db":"admin","name":"clusterAdmin"}\n\t\t\t\t]\n\t\t\t}\n\t\t]}\n\t}' + exit_status=0 + set -e + '[' 0 '!=' 0 -a -n 1 ']' + break + cat /tmp/tmp.QJvle2C9a4 perconaservermongodb.psmdb.percona.com/some-name patched + cat /tmp/tmp.OdwTOE1c0k + rm /tmp/tmp.QJvle2C9a4 /tmp/tmp.OdwTOE1c0k + return 0 + wait_for_running some-name-rs0 3 + local name=some-name-rs0 + let last_pod=2 + local check_cluster_readyness=true + set_debug + [[ 1 == 1 ]] + set -o xtrace + local rs_name=rs0 + local cluster_name=some-name ++ seq 0 2 + for i in $(seq 0 $last_pod) + [[ 0 -eq 2 ]] + wait_pod some-name-rs0-0 + local pod=some-name-rs0-0 + set +o xtrace waiting for pod/some-name-rs0-0 to be ready.OK + for i in $(seq 0 $last_pod) + [[ 1 -eq 2 ]] + wait_pod some-name-rs0-1 + local pod=some-name-rs0-1 + set +o xtrace waiting for pod/some-name-rs0-1 to be ready.OK + for i in $(seq 0 $last_pod) + [[ 2 -eq 2 ]] ++ kubectl_bin get psmdb some-name -o 'jsonpath={.spec.replsets[?(@.name=="rs0")].arbiter.enabled}' +++ mktemp ++ local LAST_OUT=/tmp/tmp.ZxxxExOCi2 +++ mktemp ++ local LAST_ERR=/tmp/tmp.NxqX1nZvYK ++ local exit_status=0 ++ local timeout=4 +++ seq 0 2 ++ for i in $(seq 0 2) ++ set +e ++ kubectl get psmdb some-name -o 'jsonpath={.spec.replsets[?(@.name=="rs0")].arbiter.enabled}' ++ exit_status=0 ++ set -e ++ '[' 0 '!=' 0 -a -n 1 ']' ++ break ++ cat /tmp/tmp.ZxxxExOCi2 ++ cat /tmp/tmp.NxqX1nZvYK ++ rm /tmp/tmp.ZxxxExOCi2 /tmp/tmp.NxqX1nZvYK ++ return 0 + [[ '' == true ]] + wait_pod some-name-rs0-2 + local pod=some-name-rs0-2 + set +o xtrace waiting for pod/some-name-rs0-2 to be ready.OK ++ kubectl_bin get psmdb some-name -o 'jsonpath={.spec.replsets[?(@.name=="rs0")].nonvoting.enabled}' +++ mktemp ++ local LAST_OUT=/tmp/tmp.wai3MoQe3s +++ mktemp ++ local LAST_ERR=/tmp/tmp.N45R41f4hl ++ local exit_status=0 ++ local timeout=4 +++ seq 0 2 ++ for i in $(seq 0 2) ++ set +e ++ kubectl get psmdb some-name -o 'jsonpath={.spec.replsets[?(@.name=="rs0")].nonvoting.enabled}' ++ exit_status=0 ++ set -e ++ '[' 0 '!=' 0 -a -n 1 ']' ++ break ++ cat /tmp/tmp.wai3MoQe3s ++ cat /tmp/tmp.N45R41f4hl ++ rm /tmp/tmp.wai3MoQe3s /tmp/tmp.N45R41f4hl ++ return 0 + [[ '' == true ]] ++ kubectl_bin get psmdb some-name -o 'jsonpath={.spec.replsets[?(@.name=="rs0")].hidden.enabled}' +++ mktemp ++ local LAST_OUT=/tmp/tmp.MZl2LZDJGA +++ mktemp ++ local LAST_ERR=/tmp/tmp.5827CrDFws ++ local exit_status=0 ++ local timeout=4 +++ seq 0 2 ++ for i in $(seq 0 2) ++ set +e ++ kubectl get psmdb some-name -o 'jsonpath={.spec.replsets[?(@.name=="rs0")].hidden.enabled}' ++ exit_status=0 ++ set -e ++ '[' 0 '!=' 0 -a -n 1 ']' ++ break ++ cat /tmp/tmp.MZl2LZDJGA ++ cat /tmp/tmp.5827CrDFws ++ rm /tmp/tmp.MZl2LZDJGA /tmp/tmp.5827CrDFws ++ return 0 + [[ '' == true ]] + sleep 10 + [[ true == true ]] + set +x Waiting for cluster readyness ++ get_user_cmd '"user-two"' ++ local 'user="user-two"' ++ cmd=$'(function() {\n\tvar user = db.getUser("user-two");\n\tvar roles = user.roles;\n\troles.sort((a, b) => {\n\t\tif (a.role < b.role) return -1;\n\t\tif (a.role > b.role) return 1;\n\t\treturn 0;\n\t});\n\tuser.roles = roles;\n\tprintjson(user);\n\t})();' ++ echo $'(function() {\n\tvar user = db.getUser("user-two");\n\tvar roles = user.roles;\n\troles.sort((a, b) => {\n\t\tif (a.role < b.role) return -1;\n\t\tif (a.role > b.role) return 1;\n\t\treturn 0;\n\t});\n\tuser.roles = roles;\n\tprintjson(user);\n\t})();' + compare admin $'(function() {\n\tvar user = db.getUser("user-two");\n\tvar roles = user.roles;\n\troles.sort((a, b) => {\n\t\tif (a.role < b.role) return -1;\n\t\tif (a.role > b.role) return 1;\n\t\treturn 0;\n\t});\n\tuser.roles = roles;\n\tprintjson(user);\n\t})();' userAdmin:userAdmin123456@some-name-mongos.custom-users-roles-sharded-27922 user-two + local database=admin + local $'command=(function() {\n\tvar user = db.getUser("user-two");\n\tvar roles = user.roles;\n\troles.sort((a, b) => {\n\t\tif (a.role < b.role) return -1;\n\t\tif (a.role > b.role) return 1;\n\t\treturn 0;\n\t});\n\tuser.roles = roles;\n\tprintjson(user);\n\t})();' + local uri=userAdmin:userAdmin123456@some-name-mongos.custom-users-roles-sharded-27922 + local target=user-two + [[ docker.io/perconalab/percona-server-mongodb-operator:main-mongod8.0 =~ 5\.0 ]] + run_mongos $'use admin\\n (function() {\n\tvar user = db.getUser("user-two");\n\tvar roles = user.roles;\n\troles.sort((a, b) => {\n\t\tif (a.role < b.role) return -1;\n\t\tif (a.role > b.role) return 1;\n\t\treturn 0;\n\t});\n\tuser.roles = roles;\n\tprintjson(user);\n\t})();' userAdmin:userAdmin123456@some-name-mongos.custom-users-roles-sharded-27922 mongodb + local $'command=use admin\\n (function() {\n\tvar user = db.getUser("user-two");\n\tvar roles = user.roles;\n\troles.sort((a, b) => {\n\t\tif (a.role < b.role) return -1;\n\t\tif (a.role > b.role) return 1;\n\t\treturn 0;\n\t});\n\tuser.roles = roles;\n\tprintjson(user);\n\t})();' + local uri=userAdmin:userAdmin123456@some-name-mongos.custom-users-roles-sharded-27922 + local driver=mongodb + local suffix=.svc.cluster.local + local mongo_flag= + local port=27017 + local mongo_bin=mongo + grep -E -v 'I NETWORK|W NETWORK|F NETWORK|Error saving history file|Percona Server for MongoDB|connecting to:|Unable to reach primary for set|Implicit session:|versions do not match|Error saving history file:' + /usr/sbin/sed -re 's/ObjectId\("[0-9a-f]+"\)//; s/-[0-9]+.svc/-xxxesvc/' + sed '/"userId"/d' ++ echo .svc.cluster.local ++ awk -F: '{print $2}' + suffix_port= + [[ -z '' ]] + suffix=.svc.cluster.local:27017 ++ kubectl_bin get pods --selector=name=psmdb-client -o 'jsonpath={.items[].metadata.name}' +++ mktemp ++ local LAST_OUT=/tmp/tmp.puWLpaiyOl +++ mktemp ++ local LAST_ERR=/tmp/tmp.vgzONjxb2t ++ local exit_status=0 ++ local timeout=4 +++ seq 0 2 ++ for i in $(seq 0 2) ++ set +e ++ kubectl get pods --selector=name=psmdb-client -o 'jsonpath={.items[].metadata.name}' ++ exit_status=0 ++ set -e ++ '[' 0 '!=' 0 -a -n 1 ']' ++ break ++ cat /tmp/tmp.puWLpaiyOl ++ cat /tmp/tmp.vgzONjxb2t ++ rm /tmp/tmp.puWLpaiyOl /tmp/tmp.vgzONjxb2t ++ return 0 + local client_container=psmdb-client-bb8b97679-xkjrq + kubectl_bin exec psmdb-client-bb8b97679-xkjrq -- bash -c $'printf \'use admin\\n (function() {\n\tvar user = db.getUser("user-two");\n\tvar roles = user.roles;\n\troles.sort((a, b) => {\n\t\tif (a.role < b.role) return -1;\n\t\tif (a.role > b.role) return 1;\n\t\treturn 0;\n\t});\n\tuser.roles = roles;\n\tprintjson(user);\n\t})();\\n\' | mongo mongodb://userAdmin:userAdmin123456@some-name-mongos.custom-users-roles-sharded-27922.svc.cluster.local:27017/admin ' ++ mktemp + local LAST_OUT=/tmp/tmp.C5ILYOWVtg ++ mktemp + local LAST_ERR=/tmp/tmp.id97P83n93 + local exit_status=0 + local timeout=4 ++ seq 0 2 + for i in $(seq 0 2) + set +e + kubectl exec psmdb-client-bb8b97679-xkjrq -- bash -c $'printf \'use admin\\n (function() {\n\tvar user = db.getUser("user-two");\n\tvar roles = user.roles;\n\troles.sort((a, b) => {\n\t\tif (a.role < b.role) return -1;\n\t\tif (a.role > b.role) return 1;\n\t\treturn 0;\n\t});\n\tuser.roles = roles;\n\tprintjson(user);\n\t})();\\n\' | mongo mongodb://userAdmin:userAdmin123456@some-name-mongos.custom-users-roles-sharded-27922.svc.cluster.local:27017/admin ' + exit_status=0 + set -e + '[' 0 '!=' 0 -a -n 1 ']' + break + cat /tmp/tmp.C5ILYOWVtg + cat /tmp/tmp.id97P83n93 + rm /tmp/tmp.C5ILYOWVtg /tmp/tmp.id97P83n93 + return 0 + diff /mnt/jenkins/workspace/cloud-psmdb-operator_PR-2287/e2e-tests/custom-users-roles-sharded/compare/user-two.json /tmp/tmp.oSwann5ucX/user-two + userTwo=user-two ++ getSecretData user-two userTwoPassKey ++ local secretName=user-two ++ local dataKey=userTwoPassKey +++ kubectl get secrets/user-two '--template={{.data.userTwoPassKey}}' +++ base64 -d ++ local data=clusterMonitor ++ echo clusterMonitor + userTwoPass=clusterMonitor + check_auth user-two:clusterMonitor@some-name-mongos.custom-users-roles-sharded-27922 + local uri=user-two:clusterMonitor@some-name-mongos.custom-users-roles-sharded-27922 ++ run_mongos 'db.runCommand({ ping: 1 }).ok' user-two:clusterMonitor@some-name-mongos.custom-users-roles-sharded-27922 '' '' --quiet ++ grep -E -v 'I NETWORK|W NETWORK|Error saving history file|Percona Server for MongoDB|connecting to:|Unable to reach primary for set|Implicit session:|versions do not match|Error saving history file:' ++ local 'command=db.runCommand({ ping: 1 }).ok' ++ local uri=user-two:clusterMonitor@some-name-mongos.custom-users-roles-sharded-27922 ++ local driver=mongodb ++ local suffix=.svc.cluster.local ++ local mongo_flag=--quiet ++ local port=27017 ++ local mongo_bin=mongo +++ echo .svc.cluster.local +++ awk -F: '{print $2}' ++ suffix_port= ++ [[ -z '' ]] ++ suffix=.svc.cluster.local:27017 +++ kubectl_bin get pods --selector=name=psmdb-client -o 'jsonpath={.items[].metadata.name}' ++++ mktemp +++ local LAST_OUT=/tmp/tmp.yDH6ulXYb3 ++++ mktemp +++ local LAST_ERR=/tmp/tmp.xDTHgcSguG +++ local exit_status=0 +++ local timeout=4 ++++ seq 0 2 +++ for i in $(seq 0 2) +++ set +e +++ kubectl get pods --selector=name=psmdb-client -o 'jsonpath={.items[].metadata.name}' +++ exit_status=0 +++ set -e +++ '[' 0 '!=' 0 -a -n 1 ']' +++ break +++ cat /tmp/tmp.yDH6ulXYb3 +++ cat /tmp/tmp.xDTHgcSguG +++ rm /tmp/tmp.yDH6ulXYb3 /tmp/tmp.xDTHgcSguG +++ return 0 ++ local client_container=psmdb-client-bb8b97679-xkjrq ++ kubectl_bin exec psmdb-client-bb8b97679-xkjrq -- bash -c 'printf '\''db.runCommand({ ping: 1 }).ok\n'\'' | mongo mongodb://user-two:clusterMonitor@some-name-mongos.custom-users-roles-sharded-27922.svc.cluster.local:27017/admin --quiet' +++ mktemp ++ local LAST_OUT=/tmp/tmp.X9no41EnVR +++ mktemp ++ local LAST_ERR=/tmp/tmp.IP4LIa0Lbh ++ local exit_status=0 ++ local timeout=4 +++ seq 0 2 ++ for i in $(seq 0 2) ++ set +e ++ kubectl exec psmdb-client-bb8b97679-xkjrq -- bash -c 'printf '\''db.runCommand({ ping: 1 }).ok\n'\'' | mongo mongodb://user-two:clusterMonitor@some-name-mongos.custom-users-roles-sharded-27922.svc.cluster.local:27017/admin --quiet' ++ exit_status=0 ++ set -e ++ '[' 0 '!=' 0 -a -n 1 ']' ++ break ++ cat /tmp/tmp.X9no41EnVR ++ cat /tmp/tmp.IP4LIa0Lbh ++ rm /tmp/tmp.X9no41EnVR /tmp/tmp.IP4LIa0Lbh ++ return 0 + ping=1 + desc 'ping return' + set +o xtrace ----------------------------------------------------------------------------------- ping return ----------------------------------------------------------------------------------- + '[' 1 '!=' 1 ']' + check_auth user-one:clusterMonitor@some-name-mongos.custom-users-roles-sharded-27922 + local uri=user-one:clusterMonitor@some-name-mongos.custom-users-roles-sharded-27922 ++ run_mongos 'db.runCommand({ ping: 1 }).ok' user-one:clusterMonitor@some-name-mongos.custom-users-roles-sharded-27922 '' '' --quiet ++ local 'command=db.runCommand({ ping: 1 }).ok' ++ grep -E -v 'I NETWORK|W NETWORK|Error saving history file|Percona Server for MongoDB|connecting to:|Unable to reach primary for set|Implicit session:|versions do not match|Error saving history file:' ++ local uri=user-one:clusterMonitor@some-name-mongos.custom-users-roles-sharded-27922 ++ local driver=mongodb ++ local suffix=.svc.cluster.local ++ local mongo_flag=--quiet ++ local port=27017 ++ local mongo_bin=mongo +++ echo .svc.cluster.local +++ awk -F: '{print $2}' ++ suffix_port= ++ [[ -z '' ]] ++ suffix=.svc.cluster.local:27017 +++ kubectl_bin get pods --selector=name=psmdb-client -o 'jsonpath={.items[].metadata.name}' ++++ mktemp +++ local LAST_OUT=/tmp/tmp.I0Is61TVRb ++++ mktemp +++ local LAST_ERR=/tmp/tmp.d6SNKuW57K +++ local exit_status=0 +++ local timeout=4 ++++ seq 0 2 +++ for i in $(seq 0 2) +++ set +e +++ kubectl get pods --selector=name=psmdb-client -o 'jsonpath={.items[].metadata.name}' +++ exit_status=0 +++ set -e +++ '[' 0 '!=' 0 -a -n 1 ']' +++ break +++ cat /tmp/tmp.I0Is61TVRb +++ cat /tmp/tmp.d6SNKuW57K +++ rm /tmp/tmp.I0Is61TVRb /tmp/tmp.d6SNKuW57K +++ return 0 ++ local client_container=psmdb-client-bb8b97679-xkjrq ++ kubectl_bin exec psmdb-client-bb8b97679-xkjrq -- bash -c 'printf '\''db.runCommand({ ping: 1 }).ok\n'\'' | mongo mongodb://user-one:clusterMonitor@some-name-mongos.custom-users-roles-sharded-27922.svc.cluster.local:27017/admin --quiet' +++ mktemp ++ local LAST_OUT=/tmp/tmp.WXFcYgnBVD +++ mktemp ++ local LAST_ERR=/tmp/tmp.ojgToWLA98 ++ local exit_status=0 ++ local timeout=4 +++ seq 0 2 ++ for i in $(seq 0 2) ++ set +e ++ kubectl exec psmdb-client-bb8b97679-xkjrq -- bash -c 'printf '\''db.runCommand({ ping: 1 }).ok\n'\'' | mongo mongodb://user-one:clusterMonitor@some-name-mongos.custom-users-roles-sharded-27922.svc.cluster.local:27017/admin --quiet' ++ exit_status=0 ++ set -e ++ '[' 0 '!=' 0 -a -n 1 ']' ++ break ++ cat /tmp/tmp.WXFcYgnBVD ++ cat /tmp/tmp.ojgToWLA98 ++ rm /tmp/tmp.WXFcYgnBVD /tmp/tmp.ojgToWLA98 ++ return 0 + ping=1 + desc 'ping return' + set +o xtrace ----------------------------------------------------------------------------------- ping return ----------------------------------------------------------------------------------- + '[' 1 '!=' 1 ']' + desc 'check password change' + set +o xtrace ----------------------------------------------------------------------------------- check password change ----------------------------------------------------------------------------------- + userTwoNewPass=new-user-two-password ++ base64 ++ echo -n new-user-two-password + patch_secret user-two userTwoPassKey bmV3LXVzZXItdHdvLXBhc3N3b3Jk + local secret=user-two + local key=userTwoPassKey + local value=bmV3LXVzZXItdHdvLXBhc3N3b3Jk + kubectl patch secret user-two '-p={"data":{"userTwoPassKey": "bmV3LXVzZXItdHdvLXBhc3N3b3Jk"}}' secret/user-two patched + sleep 20 + check_auth user-two:new-user-two-password@some-name-mongos.custom-users-roles-sharded-27922 + local uri=user-two:new-user-two-password@some-name-mongos.custom-users-roles-sharded-27922 ++ run_mongos 'db.runCommand({ ping: 1 }).ok' user-two:new-user-two-password@some-name-mongos.custom-users-roles-sharded-27922 '' '' --quiet ++ local 'command=db.runCommand({ ping: 1 }).ok' ++ local uri=user-two:new-user-two-password@some-name-mongos.custom-users-roles-sharded-27922 ++ local driver=mongodb ++ local suffix=.svc.cluster.local ++ local mongo_flag=--quiet ++ grep -E -v 'I NETWORK|W NETWORK|Error saving history file|Percona Server for MongoDB|connecting to:|Unable to reach primary for set|Implicit session:|versions do not match|Error saving history file:' ++ local port=27017 ++ local mongo_bin=mongo +++ echo .svc.cluster.local +++ awk -F: '{print $2}' ++ suffix_port= ++ [[ -z '' ]] ++ suffix=.svc.cluster.local:27017 +++ kubectl_bin get pods --selector=name=psmdb-client -o 'jsonpath={.items[].metadata.name}' ++++ mktemp +++ local LAST_OUT=/tmp/tmp.4P7hjGJXaR ++++ mktemp +++ local LAST_ERR=/tmp/tmp.WFyVRymizE +++ local exit_status=0 +++ local timeout=4 ++++ seq 0 2 +++ for i in $(seq 0 2) +++ set +e +++ kubectl get pods --selector=name=psmdb-client -o 'jsonpath={.items[].metadata.name}' +++ exit_status=0 +++ set -e +++ '[' 0 '!=' 0 -a -n 1 ']' +++ break +++ cat /tmp/tmp.4P7hjGJXaR +++ cat /tmp/tmp.WFyVRymizE +++ rm /tmp/tmp.4P7hjGJXaR /tmp/tmp.WFyVRymizE +++ return 0 ++ local client_container=psmdb-client-bb8b97679-xkjrq ++ kubectl_bin exec psmdb-client-bb8b97679-xkjrq -- bash -c 'printf '\''db.runCommand({ ping: 1 }).ok\n'\'' | mongo mongodb://user-two:new-user-two-password@some-name-mongos.custom-users-roles-sharded-27922.svc.cluster.local:27017/admin --quiet' +++ mktemp ++ local LAST_OUT=/tmp/tmp.VUrI9NSsYJ +++ mktemp ++ local LAST_ERR=/tmp/tmp.pKTOWNkhkf ++ local exit_status=0 ++ local timeout=4 +++ seq 0 2 ++ for i in $(seq 0 2) ++ set +e ++ kubectl exec psmdb-client-bb8b97679-xkjrq -- bash -c 'printf '\''db.runCommand({ ping: 1 }).ok\n'\'' | mongo mongodb://user-two:new-user-two-password@some-name-mongos.custom-users-roles-sharded-27922.svc.cluster.local:27017/admin --quiet' ++ exit_status=0 ++ set -e ++ '[' 0 '!=' 0 -a -n 1 ']' ++ break ++ cat /tmp/tmp.VUrI9NSsYJ ++ cat /tmp/tmp.pKTOWNkhkf ++ rm /tmp/tmp.VUrI9NSsYJ /tmp/tmp.pKTOWNkhkf ++ return 0 + ping=1 + desc 'ping return' + set +o xtrace ----------------------------------------------------------------------------------- ping return ----------------------------------------------------------------------------------- + '[' 1 '!=' 1 ']' + desc 'check user roles update from CR' + set +o xtrace ----------------------------------------------------------------------------------- check user roles update from CR ----------------------------------------------------------------------------------- + kubectl_bin patch psmdb some-name --type=merge --patch $'{\n\t\t"spec": {"users":[\n\t\t\t{\n\t\t\t\t"name":"user-two",\n\t\t\t\t"db":"admin",\n\t\t\t\t"passwordSecretRef": {\n\t\t\t\t\t"name": "user-two",\n\t\t\t\t\t"key": "userTwoPassKey"\n\t\t\t\t},\n\t\t\t\t"roles": [\n\t\t\t\t\t{"db":"admin","name":"clusterAdmin"}\n\t\t\t\t]\n\t\t\t}\n\t\t]}\n\t}' ++ mktemp + local LAST_OUT=/tmp/tmp.1DrsXJWZBe ++ mktemp + local LAST_ERR=/tmp/tmp.72WtzTgZ0I + local exit_status=0 + local timeout=4 ++ seq 0 2 + for i in $(seq 0 2) + set +e + kubectl patch psmdb some-name --type=merge --patch $'{\n\t\t"spec": {"users":[\n\t\t\t{\n\t\t\t\t"name":"user-two",\n\t\t\t\t"db":"admin",\n\t\t\t\t"passwordSecretRef": {\n\t\t\t\t\t"name": "user-two",\n\t\t\t\t\t"key": "userTwoPassKey"\n\t\t\t\t},\n\t\t\t\t"roles": [\n\t\t\t\t\t{"db":"admin","name":"clusterAdmin"}\n\t\t\t\t]\n\t\t\t}\n\t\t]}\n\t}' + exit_status=0 + set -e + '[' 0 '!=' 0 -a -n 1 ']' + break + cat /tmp/tmp.1DrsXJWZBe perconaservermongodb.psmdb.percona.com/some-name patched + cat /tmp/tmp.72WtzTgZ0I + rm /tmp/tmp.1DrsXJWZBe /tmp/tmp.72WtzTgZ0I + return 0 + wait_for_running some-name-rs0 3 + local name=some-name-rs0 + let last_pod=2 + local check_cluster_readyness=true + set_debug + [[ 1 == 1 ]] + set -o xtrace + local rs_name=rs0 + local cluster_name=some-name ++ seq 0 2 + for i in $(seq 0 $last_pod) + [[ 0 -eq 2 ]] + wait_pod some-name-rs0-0 + local pod=some-name-rs0-0 + set +o xtrace waiting for pod/some-name-rs0-0 to be ready.OK + for i in $(seq 0 $last_pod) + [[ 1 -eq 2 ]] + wait_pod some-name-rs0-1 + local pod=some-name-rs0-1 + set +o xtrace waiting for pod/some-name-rs0-1 to be ready.OK + for i in $(seq 0 $last_pod) + [[ 2 -eq 2 ]] ++ kubectl_bin get psmdb some-name -o 'jsonpath={.spec.replsets[?(@.name=="rs0")].arbiter.enabled}' +++ mktemp ++ local LAST_OUT=/tmp/tmp.guzVxFEKyk +++ mktemp ++ local LAST_ERR=/tmp/tmp.ksBiN3Yk6K ++ local exit_status=0 ++ local timeout=4 +++ seq 0 2 ++ for i in $(seq 0 2) ++ set +e ++ kubectl get psmdb some-name -o 'jsonpath={.spec.replsets[?(@.name=="rs0")].arbiter.enabled}' ++ exit_status=0 ++ set -e ++ '[' 0 '!=' 0 -a -n 1 ']' ++ break ++ cat /tmp/tmp.guzVxFEKyk ++ cat /tmp/tmp.ksBiN3Yk6K ++ rm /tmp/tmp.guzVxFEKyk /tmp/tmp.ksBiN3Yk6K ++ return 0 + [[ '' == true ]] + wait_pod some-name-rs0-2 + local pod=some-name-rs0-2 + set +o xtrace waiting for pod/some-name-rs0-2 to be ready.OK ++ kubectl_bin get psmdb some-name -o 'jsonpath={.spec.replsets[?(@.name=="rs0")].nonvoting.enabled}' +++ mktemp ++ local LAST_OUT=/tmp/tmp.kJvA9hE8Fi +++ mktemp ++ local LAST_ERR=/tmp/tmp.MpiOPUMu3P ++ local exit_status=0 ++ local timeout=4 +++ seq 0 2 ++ for i in $(seq 0 2) ++ set +e ++ kubectl get psmdb some-name -o 'jsonpath={.spec.replsets[?(@.name=="rs0")].nonvoting.enabled}' ++ exit_status=0 ++ set -e ++ '[' 0 '!=' 0 -a -n 1 ']' ++ break ++ cat /tmp/tmp.kJvA9hE8Fi ++ cat /tmp/tmp.MpiOPUMu3P ++ rm /tmp/tmp.kJvA9hE8Fi /tmp/tmp.MpiOPUMu3P ++ return 0 + [[ '' == true ]] ++ kubectl_bin get psmdb some-name -o 'jsonpath={.spec.replsets[?(@.name=="rs0")].hidden.enabled}' +++ mktemp ++ local LAST_OUT=/tmp/tmp.flD7dUDRFc +++ mktemp ++ local LAST_ERR=/tmp/tmp.cp7ya8F24j ++ local exit_status=0 ++ local timeout=4 +++ seq 0 2 ++ for i in $(seq 0 2) ++ set +e ++ kubectl get psmdb some-name -o 'jsonpath={.spec.replsets[?(@.name=="rs0")].hidden.enabled}' ++ exit_status=0 ++ set -e ++ '[' 0 '!=' 0 -a -n 1 ']' ++ break ++ cat /tmp/tmp.flD7dUDRFc ++ cat /tmp/tmp.cp7ya8F24j ++ rm /tmp/tmp.flD7dUDRFc /tmp/tmp.cp7ya8F24j ++ return 0 + [[ '' == true ]] + sleep 10 + [[ true == true ]] + set +x Waiting for cluster readyness ++ get_user_cmd '"user-two"' ++ local 'user="user-two"' ++ cmd=$'(function() {\n\tvar user = db.getUser("user-two");\n\tvar roles = user.roles;\n\troles.sort((a, b) => {\n\t\tif (a.role < b.role) return -1;\n\t\tif (a.role > b.role) return 1;\n\t\treturn 0;\n\t});\n\tuser.roles = roles;\n\tprintjson(user);\n\t})();' ++ echo $'(function() {\n\tvar user = db.getUser("user-two");\n\tvar roles = user.roles;\n\troles.sort((a, b) => {\n\t\tif (a.role < b.role) return -1;\n\t\tif (a.role > b.role) return 1;\n\t\treturn 0;\n\t});\n\tuser.roles = roles;\n\tprintjson(user);\n\t})();' + compare admin $'(function() {\n\tvar user = db.getUser("user-two");\n\tvar roles = user.roles;\n\troles.sort((a, b) => {\n\t\tif (a.role < b.role) return -1;\n\t\tif (a.role > b.role) return 1;\n\t\treturn 0;\n\t});\n\tuser.roles = roles;\n\tprintjson(user);\n\t})();' userAdmin:userAdmin123456@some-name-mongos.custom-users-roles-sharded-27922 user-two-update-roles + local database=admin + local $'command=(function() {\n\tvar user = db.getUser("user-two");\n\tvar roles = user.roles;\n\troles.sort((a, b) => {\n\t\tif (a.role < b.role) return -1;\n\t\tif (a.role > b.role) return 1;\n\t\treturn 0;\n\t});\n\tuser.roles = roles;\n\tprintjson(user);\n\t})();' + local uri=userAdmin:userAdmin123456@some-name-mongos.custom-users-roles-sharded-27922 + local target=user-two-update-roles + [[ docker.io/perconalab/percona-server-mongodb-operator:main-mongod8.0 =~ 5\.0 ]] + run_mongos $'use admin\\n (function() {\n\tvar user = db.getUser("user-two");\n\tvar roles = user.roles;\n\troles.sort((a, b) => {\n\t\tif (a.role < b.role) return -1;\n\t\tif (a.role > b.role) return 1;\n\t\treturn 0;\n\t});\n\tuser.roles = roles;\n\tprintjson(user);\n\t})();' userAdmin:userAdmin123456@some-name-mongos.custom-users-roles-sharded-27922 mongodb + local $'command=use admin\\n (function() {\n\tvar user = db.getUser("user-two");\n\tvar roles = user.roles;\n\troles.sort((a, b) => {\n\t\tif (a.role < b.role) return -1;\n\t\tif (a.role > b.role) return 1;\n\t\treturn 0;\n\t});\n\tuser.roles = roles;\n\tprintjson(user);\n\t})();' + local uri=userAdmin:userAdmin123456@some-name-mongos.custom-users-roles-sharded-27922 + local driver=mongodb + local suffix=.svc.cluster.local + local mongo_flag= + local port=27017 + local mongo_bin=mongo + grep -E -v 'I NETWORK|W NETWORK|F NETWORK|Error saving history file|Percona Server for MongoDB|connecting to:|Unable to reach primary for set|Implicit session:|versions do not match|Error saving history file:' + /usr/sbin/sed -re 's/ObjectId\("[0-9a-f]+"\)//; s/-[0-9]+.svc/-xxxesvc/' + sed '/"userId"/d' ++ echo .svc.cluster.local ++ awk -F: '{print $2}' + suffix_port= + [[ -z '' ]] + suffix=.svc.cluster.local:27017 ++ kubectl_bin get pods --selector=name=psmdb-client -o 'jsonpath={.items[].metadata.name}' +++ mktemp ++ local LAST_OUT=/tmp/tmp.t18FidCOCe +++ mktemp ++ local LAST_ERR=/tmp/tmp.kzruV7xHHF ++ local exit_status=0 ++ local timeout=4 +++ seq 0 2 ++ for i in $(seq 0 2) ++ set +e ++ kubectl get pods --selector=name=psmdb-client -o 'jsonpath={.items[].metadata.name}' ++ exit_status=0 ++ set -e ++ '[' 0 '!=' 0 -a -n 1 ']' ++ break ++ cat /tmp/tmp.t18FidCOCe ++ cat /tmp/tmp.kzruV7xHHF ++ rm /tmp/tmp.t18FidCOCe /tmp/tmp.kzruV7xHHF ++ return 0 + local client_container=psmdb-client-bb8b97679-xkjrq + kubectl_bin exec psmdb-client-bb8b97679-xkjrq -- bash -c $'printf \'use admin\\n (function() {\n\tvar user = db.getUser("user-two");\n\tvar roles = user.roles;\n\troles.sort((a, b) => {\n\t\tif (a.role < b.role) return -1;\n\t\tif (a.role > b.role) return 1;\n\t\treturn 0;\n\t});\n\tuser.roles = roles;\n\tprintjson(user);\n\t})();\\n\' | mongo mongodb://userAdmin:userAdmin123456@some-name-mongos.custom-users-roles-sharded-27922.svc.cluster.local:27017/admin ' ++ mktemp + local LAST_OUT=/tmp/tmp.9BTZfJ6UCk ++ mktemp + local LAST_ERR=/tmp/tmp.UqTyJr90fK + local exit_status=0 + local timeout=4 ++ seq 0 2 + for i in $(seq 0 2) + set +e + kubectl exec psmdb-client-bb8b97679-xkjrq -- bash -c $'printf \'use admin\\n (function() {\n\tvar user = db.getUser("user-two");\n\tvar roles = user.roles;\n\troles.sort((a, b) => {\n\t\tif (a.role < b.role) return -1;\n\t\tif (a.role > b.role) return 1;\n\t\treturn 0;\n\t});\n\tuser.roles = roles;\n\tprintjson(user);\n\t})();\\n\' | mongo mongodb://userAdmin:userAdmin123456@some-name-mongos.custom-users-roles-sharded-27922.svc.cluster.local:27017/admin ' + exit_status=0 + set -e + '[' 0 '!=' 0 -a -n 1 ']' + break + cat /tmp/tmp.9BTZfJ6UCk + cat /tmp/tmp.UqTyJr90fK + rm /tmp/tmp.9BTZfJ6UCk /tmp/tmp.UqTyJr90fK + return 0 + diff /mnt/jenkins/workspace/cloud-psmdb-operator_PR-2287/e2e-tests/custom-users-roles-sharded/compare/user-two-update-roles.json /tmp/tmp.oSwann5ucX/user-two-update-roles + desc 'check user roles update from DB' + set +o xtrace ----------------------------------------------------------------------------------- check user roles update from DB ----------------------------------------------------------------------------------- + run_mongos 'use admin\n db.updateUser("user-two", { roles : [{ role : "userAdminAnyDatabase", db: "admin"}]})' userAdmin:userAdmin123456@some-name-mongos.custom-users-roles-sharded-27922 + local 'command=use admin\n db.updateUser("user-two", { roles : [{ role : "userAdminAnyDatabase", db: "admin"}]})' + local uri=userAdmin:userAdmin123456@some-name-mongos.custom-users-roles-sharded-27922 + local driver=mongodb + local suffix=.svc.cluster.local + local mongo_flag= + local port=27017 + local mongo_bin=mongo ++ echo .svc.cluster.local ++ awk -F: '{print $2}' + suffix_port= + [[ -z '' ]] + suffix=.svc.cluster.local:27017 ++ kubectl_bin get pods --selector=name=psmdb-client -o 'jsonpath={.items[].metadata.name}' +++ mktemp ++ local LAST_OUT=/tmp/tmp.Aatv8cZbXQ +++ mktemp ++ local LAST_ERR=/tmp/tmp.MuVbjgg8Pt ++ local exit_status=0 ++ local timeout=4 +++ seq 0 2 ++ for i in $(seq 0 2) ++ set +e ++ kubectl get pods --selector=name=psmdb-client -o 'jsonpath={.items[].metadata.name}' ++ exit_status=0 ++ set -e ++ '[' 0 '!=' 0 -a -n 1 ']' ++ break ++ cat /tmp/tmp.Aatv8cZbXQ ++ cat /tmp/tmp.MuVbjgg8Pt ++ rm /tmp/tmp.Aatv8cZbXQ /tmp/tmp.MuVbjgg8Pt ++ return 0 + local client_container=psmdb-client-bb8b97679-xkjrq + kubectl_bin exec psmdb-client-bb8b97679-xkjrq -- bash -c 'printf '\''use admin\n db.updateUser("user-two", { roles : [{ role : "userAdminAnyDatabase", db: "admin"}]})\n'\'' | mongo mongodb://userAdmin:userAdmin123456@some-name-mongos.custom-users-roles-sharded-27922.svc.cluster.local:27017/admin ' ++ mktemp + local LAST_OUT=/tmp/tmp.Uz367yj3mS ++ mktemp + local LAST_ERR=/tmp/tmp.PG33Wkp9WD + local exit_status=0 + local timeout=4 ++ seq 0 2 + for i in $(seq 0 2) + set +e + kubectl exec psmdb-client-bb8b97679-xkjrq -- bash -c 'printf '\''use admin\n db.updateUser("user-two", { roles : [{ role : "userAdminAnyDatabase", db: "admin"}]})\n'\'' | mongo mongodb://userAdmin:userAdmin123456@some-name-mongos.custom-users-roles-sharded-27922.svc.cluster.local:27017/admin ' + exit_status=0 + set -e + '[' 0 '!=' 0 -a -n 1 ']' + break + cat /tmp/tmp.Uz367yj3mS Percona Server for MongoDB shell version v4.4.29-28 connecting to: mongodb://some-name-mongos.custom-users-roles-sharded-27922.svc.cluster.local:27017/admin?compressors=disabled&gssapiServiceName=mongodb Implicit session: session { "id" : UUID("9efc36f7-fd1c-4b7b-ae9e-42945bfa984e") } Percona Server for MongoDB server version: v8.0.20-8 WARNING: shell and server versions do not match switched to db admin bye + cat /tmp/tmp.PG33Wkp9WD + rm /tmp/tmp.Uz367yj3mS /tmp/tmp.PG33Wkp9WD + return 0 + sleep 15 ++ get_user_cmd '"user-two"' ++ local 'user="user-two"' ++ cmd=$'(function() {\n\tvar user = db.getUser("user-two");\n\tvar roles = user.roles;\n\troles.sort((a, b) => {\n\t\tif (a.role < b.role) return -1;\n\t\tif (a.role > b.role) return 1;\n\t\treturn 0;\n\t});\n\tuser.roles = roles;\n\tprintjson(user);\n\t})();' ++ echo $'(function() {\n\tvar user = db.getUser("user-two");\n\tvar roles = user.roles;\n\troles.sort((a, b) => {\n\t\tif (a.role < b.role) return -1;\n\t\tif (a.role > b.role) return 1;\n\t\treturn 0;\n\t});\n\tuser.roles = roles;\n\tprintjson(user);\n\t})();' + compare admin $'(function() {\n\tvar user = db.getUser("user-two");\n\tvar roles = user.roles;\n\troles.sort((a, b) => {\n\t\tif (a.role < b.role) return -1;\n\t\tif (a.role > b.role) return 1;\n\t\treturn 0;\n\t});\n\tuser.roles = roles;\n\tprintjson(user);\n\t})();' userAdmin:userAdmin123456@some-name-mongos.custom-users-roles-sharded-27922 user-two-update-roles + local database=admin + local $'command=(function() {\n\tvar user = db.getUser("user-two");\n\tvar roles = user.roles;\n\troles.sort((a, b) => {\n\t\tif (a.role < b.role) return -1;\n\t\tif (a.role > b.role) return 1;\n\t\treturn 0;\n\t});\n\tuser.roles = roles;\n\tprintjson(user);\n\t})();' + local uri=userAdmin:userAdmin123456@some-name-mongos.custom-users-roles-sharded-27922 + local target=user-two-update-roles + [[ docker.io/perconalab/percona-server-mongodb-operator:main-mongod8.0 =~ 5\.0 ]] + run_mongos $'use admin\\n (function() {\n\tvar user = db.getUser("user-two");\n\tvar roles = user.roles;\n\troles.sort((a, b) => {\n\t\tif (a.role < b.role) return -1;\n\t\tif (a.role > b.role) return 1;\n\t\treturn 0;\n\t});\n\tuser.roles = roles;\n\tprintjson(user);\n\t})();' userAdmin:userAdmin123456@some-name-mongos.custom-users-roles-sharded-27922 mongodb + grep -E -v 'I NETWORK|W NETWORK|F NETWORK|Error saving history file|Percona Server for MongoDB|connecting to:|Unable to reach primary for set|Implicit session:|versions do not match|Error saving history file:' + local $'command=use admin\\n (function() {\n\tvar user = db.getUser("user-two");\n\tvar roles = user.roles;\n\troles.sort((a, b) => {\n\t\tif (a.role < b.role) return -1;\n\t\tif (a.role > b.role) return 1;\n\t\treturn 0;\n\t});\n\tuser.roles = roles;\n\tprintjson(user);\n\t})();' + local uri=userAdmin:userAdmin123456@some-name-mongos.custom-users-roles-sharded-27922 + local driver=mongodb + local suffix=.svc.cluster.local + local mongo_flag= + local port=27017 + local mongo_bin=mongo + /usr/sbin/sed -re 's/ObjectId\("[0-9a-f]+"\)//; s/-[0-9]+.svc/-xxxesvc/' ++ echo .svc.cluster.local ++ awk -F: '{print $2}' + sed '/"userId"/d' + suffix_port= + [[ -z '' ]] + suffix=.svc.cluster.local:27017 ++ kubectl_bin get pods --selector=name=psmdb-client -o 'jsonpath={.items[].metadata.name}' +++ mktemp ++ local LAST_OUT=/tmp/tmp.ZqqtB5i7NI +++ mktemp ++ local LAST_ERR=/tmp/tmp.xPSccOeagl ++ local exit_status=0 ++ local timeout=4 +++ seq 0 2 ++ for i in $(seq 0 2) ++ set +e ++ kubectl get pods --selector=name=psmdb-client -o 'jsonpath={.items[].metadata.name}' ++ exit_status=0 ++ set -e ++ '[' 0 '!=' 0 -a -n 1 ']' ++ break ++ cat /tmp/tmp.ZqqtB5i7NI ++ cat /tmp/tmp.xPSccOeagl ++ rm /tmp/tmp.ZqqtB5i7NI /tmp/tmp.xPSccOeagl ++ return 0 + local client_container=psmdb-client-bb8b97679-xkjrq + kubectl_bin exec psmdb-client-bb8b97679-xkjrq -- bash -c $'printf \'use admin\\n (function() {\n\tvar user = db.getUser("user-two");\n\tvar roles = user.roles;\n\troles.sort((a, b) => {\n\t\tif (a.role < b.role) return -1;\n\t\tif (a.role > b.role) return 1;\n\t\treturn 0;\n\t});\n\tuser.roles = roles;\n\tprintjson(user);\n\t})();\\n\' | mongo mongodb://userAdmin:userAdmin123456@some-name-mongos.custom-users-roles-sharded-27922.svc.cluster.local:27017/admin ' ++ mktemp + local LAST_OUT=/tmp/tmp.NrJEdWRCN5 ++ mktemp + local LAST_ERR=/tmp/tmp.PLUVp7p1io + local exit_status=0 + local timeout=4 ++ seq 0 2 + for i in $(seq 0 2) + set +e + kubectl exec psmdb-client-bb8b97679-xkjrq -- bash -c $'printf \'use admin\\n (function() {\n\tvar user = db.getUser("user-two");\n\tvar roles = user.roles;\n\troles.sort((a, b) => {\n\t\tif (a.role < b.role) return -1;\n\t\tif (a.role > b.role) return 1;\n\t\treturn 0;\n\t});\n\tuser.roles = roles;\n\tprintjson(user);\n\t})();\\n\' | mongo mongodb://userAdmin:userAdmin123456@some-name-mongos.custom-users-roles-sharded-27922.svc.cluster.local:27017/admin ' + exit_status=0 + set -e + '[' 0 '!=' 0 -a -n 1 ']' + break + cat /tmp/tmp.NrJEdWRCN5 + cat /tmp/tmp.PLUVp7p1io + rm /tmp/tmp.NrJEdWRCN5 /tmp/tmp.PLUVp7p1io + return 0 + diff /mnt/jenkins/workspace/cloud-psmdb-operator_PR-2287/e2e-tests/custom-users-roles-sharded/compare/user-two-update-roles.json /tmp/tmp.oSwann5ucX/user-two-update-roles + desc 'check user recreated after deleted from DB' + set +o xtrace ----------------------------------------------------------------------------------- check user recreated after deleted from DB ----------------------------------------------------------------------------------- + run_mongos 'use admin\n db.dropUser("user-two")' userAdmin:userAdmin123456@some-name-mongos.custom-users-roles-sharded-27922 + local 'command=use admin\n db.dropUser("user-two")' + local uri=userAdmin:userAdmin123456@some-name-mongos.custom-users-roles-sharded-27922 + local driver=mongodb + local suffix=.svc.cluster.local + local mongo_flag= + local port=27017 + local mongo_bin=mongo ++ echo .svc.cluster.local ++ awk -F: '{print $2}' + suffix_port= + [[ -z '' ]] + suffix=.svc.cluster.local:27017 ++ kubectl_bin get pods --selector=name=psmdb-client -o 'jsonpath={.items[].metadata.name}' +++ mktemp ++ local LAST_OUT=/tmp/tmp.Qa08mbcGjA +++ mktemp ++ local LAST_ERR=/tmp/tmp.ClizlhU1bz ++ local exit_status=0 ++ local timeout=4 +++ seq 0 2 ++ for i in $(seq 0 2) ++ set +e ++ kubectl get pods --selector=name=psmdb-client -o 'jsonpath={.items[].metadata.name}' ++ exit_status=0 ++ set -e ++ '[' 0 '!=' 0 -a -n 1 ']' ++ break ++ cat /tmp/tmp.Qa08mbcGjA ++ cat /tmp/tmp.ClizlhU1bz ++ rm /tmp/tmp.Qa08mbcGjA /tmp/tmp.ClizlhU1bz ++ return 0 + local client_container=psmdb-client-bb8b97679-xkjrq + kubectl_bin exec psmdb-client-bb8b97679-xkjrq -- bash -c 'printf '\''use admin\n db.dropUser("user-two")\n'\'' | mongo mongodb://userAdmin:userAdmin123456@some-name-mongos.custom-users-roles-sharded-27922.svc.cluster.local:27017/admin ' ++ mktemp + local LAST_OUT=/tmp/tmp.guVwwRJf5A ++ mktemp + local LAST_ERR=/tmp/tmp.Ci7AKwcFll + local exit_status=0 + local timeout=4 ++ seq 0 2 + for i in $(seq 0 2) + set +e + kubectl exec psmdb-client-bb8b97679-xkjrq -- bash -c 'printf '\''use admin\n db.dropUser("user-two")\n'\'' | mongo mongodb://userAdmin:userAdmin123456@some-name-mongos.custom-users-roles-sharded-27922.svc.cluster.local:27017/admin ' + exit_status=0 + set -e + '[' 0 '!=' 0 -a -n 1 ']' + break + cat /tmp/tmp.guVwwRJf5A Percona Server for MongoDB shell version v4.4.29-28 connecting to: mongodb://some-name-mongos.custom-users-roles-sharded-27922.svc.cluster.local:27017/admin?compressors=disabled&gssapiServiceName=mongodb Implicit session: session { "id" : UUID("a3448944-62c9-4668-b02f-6e6c67bec761") } Percona Server for MongoDB server version: v8.0.20-8 WARNING: shell and server versions do not match switched to db admin true bye + cat /tmp/tmp.Ci7AKwcFll + rm /tmp/tmp.guVwwRJf5A /tmp/tmp.Ci7AKwcFll + return 0 + sleep 15 ++ get_user_cmd '"user-two"' ++ local 'user="user-two"' ++ cmd=$'(function() {\n\tvar user = db.getUser("user-two");\n\tvar roles = user.roles;\n\troles.sort((a, b) => {\n\t\tif (a.role < b.role) return -1;\n\t\tif (a.role > b.role) return 1;\n\t\treturn 0;\n\t});\n\tuser.roles = roles;\n\tprintjson(user);\n\t})();' ++ echo $'(function() {\n\tvar user = db.getUser("user-two");\n\tvar roles = user.roles;\n\troles.sort((a, b) => {\n\t\tif (a.role < b.role) return -1;\n\t\tif (a.role > b.role) return 1;\n\t\treturn 0;\n\t});\n\tuser.roles = roles;\n\tprintjson(user);\n\t})();' + compare admin $'(function() {\n\tvar user = db.getUser("user-two");\n\tvar roles = user.roles;\n\troles.sort((a, b) => {\n\t\tif (a.role < b.role) return -1;\n\t\tif (a.role > b.role) return 1;\n\t\treturn 0;\n\t});\n\tuser.roles = roles;\n\tprintjson(user);\n\t})();' userAdmin:userAdmin123456@some-name-mongos.custom-users-roles-sharded-27922 user-two-update-roles + local database=admin + local $'command=(function() {\n\tvar user = db.getUser("user-two");\n\tvar roles = user.roles;\n\troles.sort((a, b) => {\n\t\tif (a.role < b.role) return -1;\n\t\tif (a.role > b.role) return 1;\n\t\treturn 0;\n\t});\n\tuser.roles = roles;\n\tprintjson(user);\n\t})();' + local uri=userAdmin:userAdmin123456@some-name-mongos.custom-users-roles-sharded-27922 + local target=user-two-update-roles + [[ docker.io/perconalab/percona-server-mongodb-operator:main-mongod8.0 =~ 5\.0 ]] + run_mongos $'use admin\\n (function() {\n\tvar user = db.getUser("user-two");\n\tvar roles = user.roles;\n\troles.sort((a, b) => {\n\t\tif (a.role < b.role) return -1;\n\t\tif (a.role > b.role) return 1;\n\t\treturn 0;\n\t});\n\tuser.roles = roles;\n\tprintjson(user);\n\t})();' userAdmin:userAdmin123456@some-name-mongos.custom-users-roles-sharded-27922 mongodb + local $'command=use admin\\n (function() {\n\tvar user = db.getUser("user-two");\n\tvar roles = user.roles;\n\troles.sort((a, b) => {\n\t\tif (a.role < b.role) return -1;\n\t\tif (a.role > b.role) return 1;\n\t\treturn 0;\n\t});\n\tuser.roles = roles;\n\tprintjson(user);\n\t})();' + grep -E -v 'I NETWORK|W NETWORK|F NETWORK|Error saving history file|Percona Server for MongoDB|connecting to:|Unable to reach primary for set|Implicit session:|versions do not match|Error saving history file:' + local uri=userAdmin:userAdmin123456@some-name-mongos.custom-users-roles-sharded-27922 + local driver=mongodb + local suffix=.svc.cluster.local + local mongo_flag= + local port=27017 + local mongo_bin=mongo + /usr/sbin/sed -re 's/ObjectId\("[0-9a-f]+"\)//; s/-[0-9]+.svc/-xxxesvc/' + sed '/"userId"/d' ++ echo .svc.cluster.local ++ awk -F: '{print $2}' + suffix_port= + [[ -z '' ]] + suffix=.svc.cluster.local:27017 ++ kubectl_bin get pods --selector=name=psmdb-client -o 'jsonpath={.items[].metadata.name}' +++ mktemp ++ local LAST_OUT=/tmp/tmp.wepCnd0r4v +++ mktemp ++ local LAST_ERR=/tmp/tmp.W7877yq0NU ++ local exit_status=0 ++ local timeout=4 +++ seq 0 2 ++ for i in $(seq 0 2) ++ set +e ++ kubectl get pods --selector=name=psmdb-client -o 'jsonpath={.items[].metadata.name}' ++ exit_status=0 ++ set -e ++ '[' 0 '!=' 0 -a -n 1 ']' ++ break ++ cat /tmp/tmp.wepCnd0r4v ++ cat /tmp/tmp.W7877yq0NU ++ rm /tmp/tmp.wepCnd0r4v /tmp/tmp.W7877yq0NU ++ return 0 + local client_container=psmdb-client-bb8b97679-xkjrq + kubectl_bin exec psmdb-client-bb8b97679-xkjrq -- bash -c $'printf \'use admin\\n (function() {\n\tvar user = db.getUser("user-two");\n\tvar roles = user.roles;\n\troles.sort((a, b) => {\n\t\tif (a.role < b.role) return -1;\n\t\tif (a.role > b.role) return 1;\n\t\treturn 0;\n\t});\n\tuser.roles = roles;\n\tprintjson(user);\n\t})();\\n\' | mongo mongodb://userAdmin:userAdmin123456@some-name-mongos.custom-users-roles-sharded-27922.svc.cluster.local:27017/admin ' ++ mktemp + local LAST_OUT=/tmp/tmp.LhabqGQ5eo ++ mktemp + local LAST_ERR=/tmp/tmp.T7MfUWvt3G + local exit_status=0 + local timeout=4 ++ seq 0 2 + for i in $(seq 0 2) + set +e + kubectl exec psmdb-client-bb8b97679-xkjrq -- bash -c $'printf \'use admin\\n (function() {\n\tvar user = db.getUser("user-two");\n\tvar roles = user.roles;\n\troles.sort((a, b) => {\n\t\tif (a.role < b.role) return -1;\n\t\tif (a.role > b.role) return 1;\n\t\treturn 0;\n\t});\n\tuser.roles = roles;\n\tprintjson(user);\n\t})();\\n\' | mongo mongodb://userAdmin:userAdmin123456@some-name-mongos.custom-users-roles-sharded-27922.svc.cluster.local:27017/admin ' + exit_status=0 + set -e + '[' 0 '!=' 0 -a -n 1 ']' + break + cat /tmp/tmp.LhabqGQ5eo + cat /tmp/tmp.T7MfUWvt3G + rm /tmp/tmp.LhabqGQ5eo /tmp/tmp.T7MfUWvt3G + return 0 + diff /mnt/jenkins/workspace/cloud-psmdb-operator_PR-2287/e2e-tests/custom-users-roles-sharded/compare/user-two-update-roles.json /tmp/tmp.oSwann5ucX/user-two-update-roles + desc 'check new user created after updated user name via CR' + set +o xtrace ----------------------------------------------------------------------------------- check new user created after updated user name via CR ----------------------------------------------------------------------------------- + kubectl_bin patch psmdb some-name --type=merge --patch $'{\n\t\t"spec": {"users":[\n\t\t\t{\n\t\t\t\t"name":"user-three",\n\t\t\t\t"db":"admin",\n\t\t\t\t"passwordSecretRef": {\n\t\t\t\t\t"name": "user-two",\n\t\t\t\t\t"key": "userTwoPassKey"\n\t\t\t\t},\n\t\t\t\t"roles": [\n\t\t\t\t\t{"db":"admin","name":"clusterAdmin"}\n\t\t\t\t]\n\t\t\t}\n\t\t]}\n\t}' ++ mktemp + local LAST_OUT=/tmp/tmp.sVRZeYVclf ++ mktemp + local LAST_ERR=/tmp/tmp.SE2IQKQRhl + local exit_status=0 + local timeout=4 ++ seq 0 2 + for i in $(seq 0 2) + set +e + kubectl patch psmdb some-name --type=merge --patch $'{\n\t\t"spec": {"users":[\n\t\t\t{\n\t\t\t\t"name":"user-three",\n\t\t\t\t"db":"admin",\n\t\t\t\t"passwordSecretRef": {\n\t\t\t\t\t"name": "user-two",\n\t\t\t\t\t"key": "userTwoPassKey"\n\t\t\t\t},\n\t\t\t\t"roles": [\n\t\t\t\t\t{"db":"admin","name":"clusterAdmin"}\n\t\t\t\t]\n\t\t\t}\n\t\t]}\n\t}' + exit_status=0 + set -e + '[' 0 '!=' 0 -a -n 1 ']' + break + cat /tmp/tmp.sVRZeYVclf perconaservermongodb.psmdb.percona.com/some-name patched + cat /tmp/tmp.SE2IQKQRhl + rm /tmp/tmp.sVRZeYVclf /tmp/tmp.SE2IQKQRhl + return 0 + wait_for_running some-name-rs0 3 + local name=some-name-rs0 + let last_pod=2 + local check_cluster_readyness=true + set_debug + [[ 1 == 1 ]] + set -o xtrace + local rs_name=rs0 + local cluster_name=some-name ++ seq 0 2 + for i in $(seq 0 $last_pod) + [[ 0 -eq 2 ]] + wait_pod some-name-rs0-0 + local pod=some-name-rs0-0 + set +o xtrace waiting for pod/some-name-rs0-0 to be ready.OK + for i in $(seq 0 $last_pod) + [[ 1 -eq 2 ]] + wait_pod some-name-rs0-1 + local pod=some-name-rs0-1 + set +o xtrace waiting for pod/some-name-rs0-1 to be ready.OK + for i in $(seq 0 $last_pod) + [[ 2 -eq 2 ]] ++ kubectl_bin get psmdb some-name -o 'jsonpath={.spec.replsets[?(@.name=="rs0")].arbiter.enabled}' +++ mktemp ++ local LAST_OUT=/tmp/tmp.2UU2yIk0dr +++ mktemp ++ local LAST_ERR=/tmp/tmp.3q8GugFEaV ++ local exit_status=0 ++ local timeout=4 +++ seq 0 2 ++ for i in $(seq 0 2) ++ set +e ++ kubectl get psmdb some-name -o 'jsonpath={.spec.replsets[?(@.name=="rs0")].arbiter.enabled}' ++ exit_status=0 ++ set -e ++ '[' 0 '!=' 0 -a -n 1 ']' ++ break ++ cat /tmp/tmp.2UU2yIk0dr ++ cat /tmp/tmp.3q8GugFEaV ++ rm /tmp/tmp.2UU2yIk0dr /tmp/tmp.3q8GugFEaV ++ return 0 + [[ '' == true ]] + wait_pod some-name-rs0-2 + local pod=some-name-rs0-2 + set +o xtrace waiting for pod/some-name-rs0-2 to be ready.OK ++ kubectl_bin get psmdb some-name -o 'jsonpath={.spec.replsets[?(@.name=="rs0")].nonvoting.enabled}' +++ mktemp ++ local LAST_OUT=/tmp/tmp.9hzscdSQAq +++ mktemp ++ local LAST_ERR=/tmp/tmp.3K8uRyO0Kf ++ local exit_status=0 ++ local timeout=4 +++ seq 0 2 ++ for i in $(seq 0 2) ++ set +e ++ kubectl get psmdb some-name -o 'jsonpath={.spec.replsets[?(@.name=="rs0")].nonvoting.enabled}' ++ exit_status=0 ++ set -e ++ '[' 0 '!=' 0 -a -n 1 ']' ++ break ++ cat /tmp/tmp.9hzscdSQAq ++ cat /tmp/tmp.3K8uRyO0Kf ++ rm /tmp/tmp.9hzscdSQAq /tmp/tmp.3K8uRyO0Kf ++ return 0 + [[ '' == true ]] ++ kubectl_bin get psmdb some-name -o 'jsonpath={.spec.replsets[?(@.name=="rs0")].hidden.enabled}' +++ mktemp ++ local LAST_OUT=/tmp/tmp.Ix5LHEFirF +++ mktemp ++ local LAST_ERR=/tmp/tmp.XBuuRq72J2 ++ local exit_status=0 ++ local timeout=4 +++ seq 0 2 ++ for i in $(seq 0 2) ++ set +e ++ kubectl get psmdb some-name -o 'jsonpath={.spec.replsets[?(@.name=="rs0")].hidden.enabled}' ++ exit_status=0 ++ set -e ++ '[' 0 '!=' 0 -a -n 1 ']' ++ break ++ cat /tmp/tmp.Ix5LHEFirF ++ cat /tmp/tmp.XBuuRq72J2 ++ rm /tmp/tmp.Ix5LHEFirF /tmp/tmp.XBuuRq72J2 ++ return 0 + [[ '' == true ]] + sleep 10 + [[ true == true ]] + set +x Waiting for cluster readyness ++ get_user_cmd '"user-three"' ++ local 'user="user-three"' ++ cmd=$'(function() {\n\tvar user = db.getUser("user-three");\n\tvar roles = user.roles;\n\troles.sort((a, b) => {\n\t\tif (a.role < b.role) return -1;\n\t\tif (a.role > b.role) return 1;\n\t\treturn 0;\n\t});\n\tuser.roles = roles;\n\tprintjson(user);\n\t})();' ++ echo $'(function() {\n\tvar user = db.getUser("user-three");\n\tvar roles = user.roles;\n\troles.sort((a, b) => {\n\t\tif (a.role < b.role) return -1;\n\t\tif (a.role > b.role) return 1;\n\t\treturn 0;\n\t});\n\tuser.roles = roles;\n\tprintjson(user);\n\t})();' + compare admin $'(function() {\n\tvar user = db.getUser("user-three");\n\tvar roles = user.roles;\n\troles.sort((a, b) => {\n\t\tif (a.role < b.role) return -1;\n\t\tif (a.role > b.role) return 1;\n\t\treturn 0;\n\t});\n\tuser.roles = roles;\n\tprintjson(user);\n\t})();' userAdmin:userAdmin123456@some-name-mongos.custom-users-roles-sharded-27922 user-three-admin-db + local database=admin + local $'command=(function() {\n\tvar user = db.getUser("user-three");\n\tvar roles = user.roles;\n\troles.sort((a, b) => {\n\t\tif (a.role < b.role) return -1;\n\t\tif (a.role > b.role) return 1;\n\t\treturn 0;\n\t});\n\tuser.roles = roles;\n\tprintjson(user);\n\t})();' + local uri=userAdmin:userAdmin123456@some-name-mongos.custom-users-roles-sharded-27922 + local target=user-three-admin-db + [[ docker.io/perconalab/percona-server-mongodb-operator:main-mongod8.0 =~ 5\.0 ]] + run_mongos $'use admin\\n (function() {\n\tvar user = db.getUser("user-three");\n\tvar roles = user.roles;\n\troles.sort((a, b) => {\n\t\tif (a.role < b.role) return -1;\n\t\tif (a.role > b.role) return 1;\n\t\treturn 0;\n\t});\n\tuser.roles = roles;\n\tprintjson(user);\n\t})();' userAdmin:userAdmin123456@some-name-mongos.custom-users-roles-sharded-27922 mongodb + local $'command=use admin\\n (function() {\n\tvar user = db.getUser("user-three");\n\tvar roles = user.roles;\n\troles.sort((a, b) => {\n\t\tif (a.role < b.role) return -1;\n\t\tif (a.role > b.role) return 1;\n\t\treturn 0;\n\t});\n\tuser.roles = roles;\n\tprintjson(user);\n\t})();' + local uri=userAdmin:userAdmin123456@some-name-mongos.custom-users-roles-sharded-27922 + local driver=mongodb + local suffix=.svc.cluster.local + local mongo_flag= + local port=27017 + local mongo_bin=mongo + grep -E -v 'I NETWORK|W NETWORK|F NETWORK|Error saving history file|Percona Server for MongoDB|connecting to:|Unable to reach primary for set|Implicit session:|versions do not match|Error saving history file:' + /usr/sbin/sed -re 's/ObjectId\("[0-9a-f]+"\)//; s/-[0-9]+.svc/-xxxesvc/' + sed '/"userId"/d' ++ echo .svc.cluster.local ++ awk -F: '{print $2}' + suffix_port= + [[ -z '' ]] + suffix=.svc.cluster.local:27017 ++ kubectl_bin get pods --selector=name=psmdb-client -o 'jsonpath={.items[].metadata.name}' +++ mktemp ++ local LAST_OUT=/tmp/tmp.f2Zm4nb3Ta +++ mktemp ++ local LAST_ERR=/tmp/tmp.YnHaGGce8J ++ local exit_status=0 ++ local timeout=4 +++ seq 0 2 ++ for i in $(seq 0 2) ++ set +e ++ kubectl get pods --selector=name=psmdb-client -o 'jsonpath={.items[].metadata.name}' ++ exit_status=0 ++ set -e ++ '[' 0 '!=' 0 -a -n 1 ']' ++ break ++ cat /tmp/tmp.f2Zm4nb3Ta ++ cat /tmp/tmp.YnHaGGce8J ++ rm /tmp/tmp.f2Zm4nb3Ta /tmp/tmp.YnHaGGce8J ++ return 0 + local client_container=psmdb-client-bb8b97679-xkjrq + kubectl_bin exec psmdb-client-bb8b97679-xkjrq -- bash -c $'printf \'use admin\\n (function() {\n\tvar user = db.getUser("user-three");\n\tvar roles = user.roles;\n\troles.sort((a, b) => {\n\t\tif (a.role < b.role) return -1;\n\t\tif (a.role > b.role) return 1;\n\t\treturn 0;\n\t});\n\tuser.roles = roles;\n\tprintjson(user);\n\t})();\\n\' | mongo mongodb://userAdmin:userAdmin123456@some-name-mongos.custom-users-roles-sharded-27922.svc.cluster.local:27017/admin ' ++ mktemp + local LAST_OUT=/tmp/tmp.RpM8GdpVox ++ mktemp + local LAST_ERR=/tmp/tmp.RW3OkDsCuF + local exit_status=0 + local timeout=4 ++ seq 0 2 + for i in $(seq 0 2) + set +e + kubectl exec psmdb-client-bb8b97679-xkjrq -- bash -c $'printf \'use admin\\n (function() {\n\tvar user = db.getUser("user-three");\n\tvar roles = user.roles;\n\troles.sort((a, b) => {\n\t\tif (a.role < b.role) return -1;\n\t\tif (a.role > b.role) return 1;\n\t\treturn 0;\n\t});\n\tuser.roles = roles;\n\tprintjson(user);\n\t})();\\n\' | mongo mongodb://userAdmin:userAdmin123456@some-name-mongos.custom-users-roles-sharded-27922.svc.cluster.local:27017/admin ' + exit_status=0 + set -e + '[' 0 '!=' 0 -a -n 1 ']' + break + cat /tmp/tmp.RpM8GdpVox + cat /tmp/tmp.RW3OkDsCuF + rm /tmp/tmp.RpM8GdpVox /tmp/tmp.RW3OkDsCuF + return 0 + diff /mnt/jenkins/workspace/cloud-psmdb-operator_PR-2287/e2e-tests/custom-users-roles-sharded/compare/user-three-admin-db.json /tmp/tmp.oSwann5ucX/user-three-admin-db ++ get_user_cmd '"user-two"' ++ local 'user="user-two"' ++ cmd=$'(function() {\n\tvar user = db.getUser("user-two");\n\tvar roles = user.roles;\n\troles.sort((a, b) => {\n\t\tif (a.role < b.role) return -1;\n\t\tif (a.role > b.role) return 1;\n\t\treturn 0;\n\t});\n\tuser.roles = roles;\n\tprintjson(user);\n\t})();' ++ echo $'(function() {\n\tvar user = db.getUser("user-two");\n\tvar roles = user.roles;\n\troles.sort((a, b) => {\n\t\tif (a.role < b.role) return -1;\n\t\tif (a.role > b.role) return 1;\n\t\treturn 0;\n\t});\n\tuser.roles = roles;\n\tprintjson(user);\n\t})();' + compare admin $'(function() {\n\tvar user = db.getUser("user-two");\n\tvar roles = user.roles;\n\troles.sort((a, b) => {\n\t\tif (a.role < b.role) return -1;\n\t\tif (a.role > b.role) return 1;\n\t\treturn 0;\n\t});\n\tuser.roles = roles;\n\tprintjson(user);\n\t})();' userAdmin:userAdmin123456@some-name-mongos.custom-users-roles-sharded-27922 user-two-update-roles + local database=admin + local $'command=(function() {\n\tvar user = db.getUser("user-two");\n\tvar roles = user.roles;\n\troles.sort((a, b) => {\n\t\tif (a.role < b.role) return -1;\n\t\tif (a.role > b.role) return 1;\n\t\treturn 0;\n\t});\n\tuser.roles = roles;\n\tprintjson(user);\n\t})();' + local uri=userAdmin:userAdmin123456@some-name-mongos.custom-users-roles-sharded-27922 + local target=user-two-update-roles + [[ docker.io/perconalab/percona-server-mongodb-operator:main-mongod8.0 =~ 5\.0 ]] + run_mongos $'use admin\\n (function() {\n\tvar user = db.getUser("user-two");\n\tvar roles = user.roles;\n\troles.sort((a, b) => {\n\t\tif (a.role < b.role) return -1;\n\t\tif (a.role > b.role) return 1;\n\t\treturn 0;\n\t});\n\tuser.roles = roles;\n\tprintjson(user);\n\t})();' userAdmin:userAdmin123456@some-name-mongos.custom-users-roles-sharded-27922 mongodb + local $'command=use admin\\n (function() {\n\tvar user = db.getUser("user-two");\n\tvar roles = user.roles;\n\troles.sort((a, b) => {\n\t\tif (a.role < b.role) return -1;\n\t\tif (a.role > b.role) return 1;\n\t\treturn 0;\n\t});\n\tuser.roles = roles;\n\tprintjson(user);\n\t})();' + local uri=userAdmin:userAdmin123456@some-name-mongos.custom-users-roles-sharded-27922 + local driver=mongodb + local suffix=.svc.cluster.local + local mongo_flag= + local port=27017 + local mongo_bin=mongo + /usr/sbin/sed -re 's/ObjectId\("[0-9a-f]+"\)//; s/-[0-9]+.svc/-xxxesvc/' + grep -E -v 'I NETWORK|W NETWORK|F NETWORK|Error saving history file|Percona Server for MongoDB|connecting to:|Unable to reach primary for set|Implicit session:|versions do not match|Error saving history file:' + sed '/"userId"/d' ++ echo .svc.cluster.local ++ awk -F: '{print $2}' + suffix_port= + [[ -z '' ]] + suffix=.svc.cluster.local:27017 ++ kubectl_bin get pods --selector=name=psmdb-client -o 'jsonpath={.items[].metadata.name}' +++ mktemp ++ local LAST_OUT=/tmp/tmp.S400dhYZRu +++ mktemp ++ local LAST_ERR=/tmp/tmp.1Pu3DJSEbA ++ local exit_status=0 ++ local timeout=4 +++ seq 0 2 ++ for i in $(seq 0 2) ++ set +e ++ kubectl get pods --selector=name=psmdb-client -o 'jsonpath={.items[].metadata.name}' ++ exit_status=0 ++ set -e ++ '[' 0 '!=' 0 -a -n 1 ']' ++ break ++ cat /tmp/tmp.S400dhYZRu ++ cat /tmp/tmp.1Pu3DJSEbA ++ rm /tmp/tmp.S400dhYZRu /tmp/tmp.1Pu3DJSEbA ++ return 0 + local client_container=psmdb-client-bb8b97679-xkjrq + kubectl_bin exec psmdb-client-bb8b97679-xkjrq -- bash -c $'printf \'use admin\\n (function() {\n\tvar user = db.getUser("user-two");\n\tvar roles = user.roles;\n\troles.sort((a, b) => {\n\t\tif (a.role < b.role) return -1;\n\t\tif (a.role > b.role) return 1;\n\t\treturn 0;\n\t});\n\tuser.roles = roles;\n\tprintjson(user);\n\t})();\\n\' | mongo mongodb://userAdmin:userAdmin123456@some-name-mongos.custom-users-roles-sharded-27922.svc.cluster.local:27017/admin ' ++ mktemp + local LAST_OUT=/tmp/tmp.xIpn5Zhmyl ++ mktemp + local LAST_ERR=/tmp/tmp.bbdKkrjwvb + local exit_status=0 + local timeout=4 ++ seq 0 2 + for i in $(seq 0 2) + set +e + kubectl exec psmdb-client-bb8b97679-xkjrq -- bash -c $'printf \'use admin\\n (function() {\n\tvar user = db.getUser("user-two");\n\tvar roles = user.roles;\n\troles.sort((a, b) => {\n\t\tif (a.role < b.role) return -1;\n\t\tif (a.role > b.role) return 1;\n\t\treturn 0;\n\t});\n\tuser.roles = roles;\n\tprintjson(user);\n\t})();\\n\' | mongo mongodb://userAdmin:userAdmin123456@some-name-mongos.custom-users-roles-sharded-27922.svc.cluster.local:27017/admin ' + exit_status=0 + set -e + '[' 0 '!=' 0 -a -n 1 ']' + break + cat /tmp/tmp.xIpn5Zhmyl + cat /tmp/tmp.bbdKkrjwvb + rm /tmp/tmp.xIpn5Zhmyl /tmp/tmp.bbdKkrjwvb + return 0 + diff /mnt/jenkins/workspace/cloud-psmdb-operator_PR-2287/e2e-tests/custom-users-roles-sharded/compare/user-two-update-roles.json /tmp/tmp.oSwann5ucX/user-two-update-roles + check_auth user-two:new-user-two-password@some-name-mongos.custom-users-roles-sharded-27922 + local uri=user-two:new-user-two-password@some-name-mongos.custom-users-roles-sharded-27922 ++ run_mongos 'db.runCommand({ ping: 1 }).ok' user-two:new-user-two-password@some-name-mongos.custom-users-roles-sharded-27922 '' '' --quiet ++ local 'command=db.runCommand({ ping: 1 }).ok' ++ local uri=user-two:new-user-two-password@some-name-mongos.custom-users-roles-sharded-27922 ++ grep -E -v 'I NETWORK|W NETWORK|Error saving history file|Percona Server for MongoDB|connecting to:|Unable to reach primary for set|Implicit session:|versions do not match|Error saving history file:' ++ local driver=mongodb ++ local suffix=.svc.cluster.local ++ local mongo_flag=--quiet ++ local port=27017 ++ local mongo_bin=mongo +++ echo .svc.cluster.local +++ awk -F: '{print $2}' ++ suffix_port= ++ [[ -z '' ]] ++ suffix=.svc.cluster.local:27017 +++ kubectl_bin get pods --selector=name=psmdb-client -o 'jsonpath={.items[].metadata.name}' ++++ mktemp +++ local LAST_OUT=/tmp/tmp.why3W522kg ++++ mktemp +++ local LAST_ERR=/tmp/tmp.QBCDCfk7ck +++ local exit_status=0 +++ local timeout=4 ++++ seq 0 2 +++ for i in $(seq 0 2) +++ set +e +++ kubectl get pods --selector=name=psmdb-client -o 'jsonpath={.items[].metadata.name}' +++ exit_status=0 +++ set -e +++ '[' 0 '!=' 0 -a -n 1 ']' +++ break +++ cat /tmp/tmp.why3W522kg +++ cat /tmp/tmp.QBCDCfk7ck +++ rm /tmp/tmp.why3W522kg /tmp/tmp.QBCDCfk7ck +++ return 0 ++ local client_container=psmdb-client-bb8b97679-xkjrq ++ kubectl_bin exec psmdb-client-bb8b97679-xkjrq -- bash -c 'printf '\''db.runCommand({ ping: 1 }).ok\n'\'' | mongo mongodb://user-two:new-user-two-password@some-name-mongos.custom-users-roles-sharded-27922.svc.cluster.local:27017/admin --quiet' +++ mktemp ++ local LAST_OUT=/tmp/tmp.6O7XIsNHfz +++ mktemp ++ local LAST_ERR=/tmp/tmp.YvPF91wCua ++ local exit_status=0 ++ local timeout=4 +++ seq 0 2 ++ for i in $(seq 0 2) ++ set +e ++ kubectl exec psmdb-client-bb8b97679-xkjrq -- bash -c 'printf '\''db.runCommand({ ping: 1 }).ok\n'\'' | mongo mongodb://user-two:new-user-two-password@some-name-mongos.custom-users-roles-sharded-27922.svc.cluster.local:27017/admin --quiet' ++ exit_status=0 ++ set -e ++ '[' 0 '!=' 0 -a -n 1 ']' ++ break ++ cat /tmp/tmp.6O7XIsNHfz ++ cat /tmp/tmp.YvPF91wCua ++ rm /tmp/tmp.6O7XIsNHfz /tmp/tmp.YvPF91wCua ++ return 0 + ping=1 + desc 'ping return' + set +o xtrace ----------------------------------------------------------------------------------- ping return ----------------------------------------------------------------------------------- + '[' 1 '!=' 1 ']' + check_auth user-three:new-user-two-password@some-name-mongos.custom-users-roles-sharded-27922 + local uri=user-three:new-user-two-password@some-name-mongos.custom-users-roles-sharded-27922 ++ run_mongos 'db.runCommand({ ping: 1 }).ok' user-three:new-user-two-password@some-name-mongos.custom-users-roles-sharded-27922 '' '' --quiet ++ local 'command=db.runCommand({ ping: 1 }).ok' ++ local uri=user-three:new-user-two-password@some-name-mongos.custom-users-roles-sharded-27922 ++ local driver=mongodb ++ local suffix=.svc.cluster.local ++ local mongo_flag=--quiet ++ local port=27017 ++ local mongo_bin=mongo ++ grep -E -v 'I NETWORK|W NETWORK|Error saving history file|Percona Server for MongoDB|connecting to:|Unable to reach primary for set|Implicit session:|versions do not match|Error saving history file:' +++ echo .svc.cluster.local +++ awk -F: '{print $2}' ++ suffix_port= ++ [[ -z '' ]] ++ suffix=.svc.cluster.local:27017 +++ kubectl_bin get pods --selector=name=psmdb-client -o 'jsonpath={.items[].metadata.name}' ++++ mktemp +++ local LAST_OUT=/tmp/tmp.YCNd4Rheof ++++ mktemp +++ local LAST_ERR=/tmp/tmp.YMTb9GhjKi +++ local exit_status=0 +++ local timeout=4 ++++ seq 0 2 +++ for i in $(seq 0 2) +++ set +e +++ kubectl get pods --selector=name=psmdb-client -o 'jsonpath={.items[].metadata.name}' +++ exit_status=0 +++ set -e +++ '[' 0 '!=' 0 -a -n 1 ']' +++ break +++ cat /tmp/tmp.YCNd4Rheof +++ cat /tmp/tmp.YMTb9GhjKi +++ rm /tmp/tmp.YCNd4Rheof /tmp/tmp.YMTb9GhjKi +++ return 0 ++ local client_container=psmdb-client-bb8b97679-xkjrq ++ kubectl_bin exec psmdb-client-bb8b97679-xkjrq -- bash -c 'printf '\''db.runCommand({ ping: 1 }).ok\n'\'' | mongo mongodb://user-three:new-user-two-password@some-name-mongos.custom-users-roles-sharded-27922.svc.cluster.local:27017/admin --quiet' +++ mktemp ++ local LAST_OUT=/tmp/tmp.F2924HxHyj +++ mktemp ++ local LAST_ERR=/tmp/tmp.nleiBDVZY4 ++ local exit_status=0 ++ local timeout=4 +++ seq 0 2 ++ for i in $(seq 0 2) ++ set +e ++ kubectl exec psmdb-client-bb8b97679-xkjrq -- bash -c 'printf '\''db.runCommand({ ping: 1 }).ok\n'\'' | mongo mongodb://user-three:new-user-two-password@some-name-mongos.custom-users-roles-sharded-27922.svc.cluster.local:27017/admin --quiet' ++ exit_status=0 ++ set -e ++ '[' 0 '!=' 0 -a -n 1 ']' ++ break ++ cat /tmp/tmp.F2924HxHyj ++ cat /tmp/tmp.nleiBDVZY4 ++ rm /tmp/tmp.F2924HxHyj /tmp/tmp.nleiBDVZY4 ++ return 0 + ping=1 + desc 'ping return' + set +o xtrace ----------------------------------------------------------------------------------- ping return ----------------------------------------------------------------------------------- + '[' 1 '!=' 1 ']' + desc 'check new user created after updated user db via CR' + set +o xtrace ----------------------------------------------------------------------------------- check new user created after updated user db via CR ----------------------------------------------------------------------------------- + kubectl_bin patch psmdb some-name --type=merge --patch $'{\n\t\t"spec": {"users":[\n\t\t\t{\n\t\t\t\t"name":"user-three",\n\t\t\t\t"db":"newDb",\n\t\t\t\t"passwordSecretRef": {\n\t\t\t\t\t"name": "user-two",\n\t\t\t\t\t"key": "userTwoPassKey"\n\t\t\t\t},\n\t\t\t\t"roles": [\n\t\t\t\t\t{"db":"admin","name":"clusterAdmin"}\n\t\t\t\t]\n\t\t\t}\n\t\t]}\n\t}' ++ mktemp + local LAST_OUT=/tmp/tmp.SErS7YpGl9 ++ mktemp + local LAST_ERR=/tmp/tmp.iiycuwXUuC + local exit_status=0 + local timeout=4 ++ seq 0 2 + for i in $(seq 0 2) + set +e + kubectl patch psmdb some-name --type=merge --patch $'{\n\t\t"spec": {"users":[\n\t\t\t{\n\t\t\t\t"name":"user-three",\n\t\t\t\t"db":"newDb",\n\t\t\t\t"passwordSecretRef": {\n\t\t\t\t\t"name": "user-two",\n\t\t\t\t\t"key": "userTwoPassKey"\n\t\t\t\t},\n\t\t\t\t"roles": [\n\t\t\t\t\t{"db":"admin","name":"clusterAdmin"}\n\t\t\t\t]\n\t\t\t}\n\t\t]}\n\t}' + exit_status=0 + set -e + '[' 0 '!=' 0 -a -n 1 ']' + break + cat /tmp/tmp.SErS7YpGl9 perconaservermongodb.psmdb.percona.com/some-name patched + cat /tmp/tmp.iiycuwXUuC + rm /tmp/tmp.SErS7YpGl9 /tmp/tmp.iiycuwXUuC + return 0 + wait_for_running some-name-rs0 3 + local name=some-name-rs0 + let last_pod=2 + local check_cluster_readyness=true + set_debug + [[ 1 == 1 ]] + set -o xtrace + local rs_name=rs0 + local cluster_name=some-name ++ seq 0 2 + for i in $(seq 0 $last_pod) + [[ 0 -eq 2 ]] + wait_pod some-name-rs0-0 + local pod=some-name-rs0-0 + set +o xtrace waiting for pod/some-name-rs0-0 to be ready.OK + for i in $(seq 0 $last_pod) + [[ 1 -eq 2 ]] + wait_pod some-name-rs0-1 + local pod=some-name-rs0-1 + set +o xtrace waiting for pod/some-name-rs0-1 to be ready.OK + for i in $(seq 0 $last_pod) + [[ 2 -eq 2 ]] ++ kubectl_bin get psmdb some-name -o 'jsonpath={.spec.replsets[?(@.name=="rs0")].arbiter.enabled}' +++ mktemp ++ local LAST_OUT=/tmp/tmp.xaOB9AcPBN +++ mktemp ++ local LAST_ERR=/tmp/tmp.i8qaWeiGKi ++ local exit_status=0 ++ local timeout=4 +++ seq 0 2 ++ for i in $(seq 0 2) ++ set +e ++ kubectl get psmdb some-name -o 'jsonpath={.spec.replsets[?(@.name=="rs0")].arbiter.enabled}' ++ exit_status=0 ++ set -e ++ '[' 0 '!=' 0 -a -n 1 ']' ++ break ++ cat /tmp/tmp.xaOB9AcPBN ++ cat /tmp/tmp.i8qaWeiGKi ++ rm /tmp/tmp.xaOB9AcPBN /tmp/tmp.i8qaWeiGKi ++ return 0 + [[ '' == true ]] + wait_pod some-name-rs0-2 + local pod=some-name-rs0-2 + set +o xtrace waiting for pod/some-name-rs0-2 to be ready.OK ++ kubectl_bin get psmdb some-name -o 'jsonpath={.spec.replsets[?(@.name=="rs0")].nonvoting.enabled}' +++ mktemp ++ local LAST_OUT=/tmp/tmp.vlRZsqyA1q +++ mktemp ++ local LAST_ERR=/tmp/tmp.3eMchQ2wiZ ++ local exit_status=0 ++ local timeout=4 +++ seq 0 2 ++ for i in $(seq 0 2) ++ set +e ++ kubectl get psmdb some-name -o 'jsonpath={.spec.replsets[?(@.name=="rs0")].nonvoting.enabled}' ++ exit_status=0 ++ set -e ++ '[' 0 '!=' 0 -a -n 1 ']' ++ break ++ cat /tmp/tmp.vlRZsqyA1q ++ cat /tmp/tmp.3eMchQ2wiZ ++ rm /tmp/tmp.vlRZsqyA1q /tmp/tmp.3eMchQ2wiZ ++ return 0 + [[ '' == true ]] ++ kubectl_bin get psmdb some-name -o 'jsonpath={.spec.replsets[?(@.name=="rs0")].hidden.enabled}' +++ mktemp ++ local LAST_OUT=/tmp/tmp.dPZ0jb1zyF +++ mktemp ++ local LAST_ERR=/tmp/tmp.wDN7Jqf5qq ++ local exit_status=0 ++ local timeout=4 +++ seq 0 2 ++ for i in $(seq 0 2) ++ set +e ++ kubectl get psmdb some-name -o 'jsonpath={.spec.replsets[?(@.name=="rs0")].hidden.enabled}' ++ exit_status=0 ++ set -e ++ '[' 0 '!=' 0 -a -n 1 ']' ++ break ++ cat /tmp/tmp.dPZ0jb1zyF ++ cat /tmp/tmp.wDN7Jqf5qq ++ rm /tmp/tmp.dPZ0jb1zyF /tmp/tmp.wDN7Jqf5qq ++ return 0 + [[ '' == true ]] + sleep 10 + [[ true == true ]] + set +x Waiting for cluster readyness ++ get_user_cmd '"user-three"' ++ local 'user="user-three"' ++ cmd=$'(function() {\n\tvar user = db.getUser("user-three");\n\tvar roles = user.roles;\n\troles.sort((a, b) => {\n\t\tif (a.role < b.role) return -1;\n\t\tif (a.role > b.role) return 1;\n\t\treturn 0;\n\t});\n\tuser.roles = roles;\n\tprintjson(user);\n\t})();' ++ echo $'(function() {\n\tvar user = db.getUser("user-three");\n\tvar roles = user.roles;\n\troles.sort((a, b) => {\n\t\tif (a.role < b.role) return -1;\n\t\tif (a.role > b.role) return 1;\n\t\treturn 0;\n\t});\n\tuser.roles = roles;\n\tprintjson(user);\n\t})();' + compare newDb $'(function() {\n\tvar user = db.getUser("user-three");\n\tvar roles = user.roles;\n\troles.sort((a, b) => {\n\t\tif (a.role < b.role) return -1;\n\t\tif (a.role > b.role) return 1;\n\t\treturn 0;\n\t});\n\tuser.roles = roles;\n\tprintjson(user);\n\t})();' userAdmin:userAdmin123456@some-name-mongos.custom-users-roles-sharded-27922 user-three-newDb-db + local database=newDb + local $'command=(function() {\n\tvar user = db.getUser("user-three");\n\tvar roles = user.roles;\n\troles.sort((a, b) => {\n\t\tif (a.role < b.role) return -1;\n\t\tif (a.role > b.role) return 1;\n\t\treturn 0;\n\t});\n\tuser.roles = roles;\n\tprintjson(user);\n\t})();' + local uri=userAdmin:userAdmin123456@some-name-mongos.custom-users-roles-sharded-27922 + local target=user-three-newDb-db + [[ docker.io/perconalab/percona-server-mongodb-operator:main-mongod8.0 =~ 5\.0 ]] + grep -E -v 'I NETWORK|W NETWORK|F NETWORK|Error saving history file|Percona Server for MongoDB|connecting to:|Unable to reach primary for set|Implicit session:|versions do not match|Error saving history file:' + run_mongos $'use newDb\\n (function() {\n\tvar user = db.getUser("user-three");\n\tvar roles = user.roles;\n\troles.sort((a, b) => {\n\t\tif (a.role < b.role) return -1;\n\t\tif (a.role > b.role) return 1;\n\t\treturn 0;\n\t});\n\tuser.roles = roles;\n\tprintjson(user);\n\t})();' userAdmin:userAdmin123456@some-name-mongos.custom-users-roles-sharded-27922 mongodb + local $'command=use newDb\\n (function() {\n\tvar user = db.getUser("user-three");\n\tvar roles = user.roles;\n\troles.sort((a, b) => {\n\t\tif (a.role < b.role) return -1;\n\t\tif (a.role > b.role) return 1;\n\t\treturn 0;\n\t});\n\tuser.roles = roles;\n\tprintjson(user);\n\t})();' + local uri=userAdmin:userAdmin123456@some-name-mongos.custom-users-roles-sharded-27922 + local driver=mongodb + local suffix=.svc.cluster.local + local mongo_flag= + /usr/sbin/sed -re 's/ObjectId\("[0-9a-f]+"\)//; s/-[0-9]+.svc/-xxxesvc/' + local port=27017 + local mongo_bin=mongo + sed '/"userId"/d' ++ echo .svc.cluster.local ++ awk -F: '{print $2}' + suffix_port= + [[ -z '' ]] + suffix=.svc.cluster.local:27017 ++ kubectl_bin get pods --selector=name=psmdb-client -o 'jsonpath={.items[].metadata.name}' +++ mktemp ++ local LAST_OUT=/tmp/tmp.cKB0moDRqR +++ mktemp ++ local LAST_ERR=/tmp/tmp.SewIjBHukA ++ local exit_status=0 ++ local timeout=4 +++ seq 0 2 ++ for i in $(seq 0 2) ++ set +e ++ kubectl get pods --selector=name=psmdb-client -o 'jsonpath={.items[].metadata.name}' ++ exit_status=0 ++ set -e ++ '[' 0 '!=' 0 -a -n 1 ']' ++ break ++ cat /tmp/tmp.cKB0moDRqR ++ cat /tmp/tmp.SewIjBHukA ++ rm /tmp/tmp.cKB0moDRqR /tmp/tmp.SewIjBHukA ++ return 0 + local client_container=psmdb-client-bb8b97679-xkjrq + kubectl_bin exec psmdb-client-bb8b97679-xkjrq -- bash -c $'printf \'use newDb\\n (function() {\n\tvar user = db.getUser("user-three");\n\tvar roles = user.roles;\n\troles.sort((a, b) => {\n\t\tif (a.role < b.role) return -1;\n\t\tif (a.role > b.role) return 1;\n\t\treturn 0;\n\t});\n\tuser.roles = roles;\n\tprintjson(user);\n\t})();\\n\' | mongo mongodb://userAdmin:userAdmin123456@some-name-mongos.custom-users-roles-sharded-27922.svc.cluster.local:27017/admin ' ++ mktemp + local LAST_OUT=/tmp/tmp.c7eByE1o70 ++ mktemp + local LAST_ERR=/tmp/tmp.V5uWjqPAX1 + local exit_status=0 + local timeout=4 ++ seq 0 2 + for i in $(seq 0 2) + set +e + kubectl exec psmdb-client-bb8b97679-xkjrq -- bash -c $'printf \'use newDb\\n (function() {\n\tvar user = db.getUser("user-three");\n\tvar roles = user.roles;\n\troles.sort((a, b) => {\n\t\tif (a.role < b.role) return -1;\n\t\tif (a.role > b.role) return 1;\n\t\treturn 0;\n\t});\n\tuser.roles = roles;\n\tprintjson(user);\n\t})();\\n\' | mongo mongodb://userAdmin:userAdmin123456@some-name-mongos.custom-users-roles-sharded-27922.svc.cluster.local:27017/admin ' + exit_status=0 + set -e + '[' 0 '!=' 0 -a -n 1 ']' + break + cat /tmp/tmp.c7eByE1o70 + cat /tmp/tmp.V5uWjqPAX1 + rm /tmp/tmp.c7eByE1o70 /tmp/tmp.V5uWjqPAX1 + return 0 + diff /mnt/jenkins/workspace/cloud-psmdb-operator_PR-2287/e2e-tests/custom-users-roles-sharded/compare/user-three-newDb-db.json /tmp/tmp.oSwann5ucX/user-three-newDb-db ++ get_user_cmd '"user-three"' ++ local 'user="user-three"' ++ cmd=$'(function() {\n\tvar user = db.getUser("user-three");\n\tvar roles = user.roles;\n\troles.sort((a, b) => {\n\t\tif (a.role < b.role) return -1;\n\t\tif (a.role > b.role) return 1;\n\t\treturn 0;\n\t});\n\tuser.roles = roles;\n\tprintjson(user);\n\t})();' ++ echo $'(function() {\n\tvar user = db.getUser("user-three");\n\tvar roles = user.roles;\n\troles.sort((a, b) => {\n\t\tif (a.role < b.role) return -1;\n\t\tif (a.role > b.role) return 1;\n\t\treturn 0;\n\t});\n\tuser.roles = roles;\n\tprintjson(user);\n\t})();' + compare admin $'(function() {\n\tvar user = db.getUser("user-three");\n\tvar roles = user.roles;\n\troles.sort((a, b) => {\n\t\tif (a.role < b.role) return -1;\n\t\tif (a.role > b.role) return 1;\n\t\treturn 0;\n\t});\n\tuser.roles = roles;\n\tprintjson(user);\n\t})();' userAdmin:userAdmin123456@some-name-mongos.custom-users-roles-sharded-27922 user-three-admin-db + local database=admin + local $'command=(function() {\n\tvar user = db.getUser("user-three");\n\tvar roles = user.roles;\n\troles.sort((a, b) => {\n\t\tif (a.role < b.role) return -1;\n\t\tif (a.role > b.role) return 1;\n\t\treturn 0;\n\t});\n\tuser.roles = roles;\n\tprintjson(user);\n\t})();' + local uri=userAdmin:userAdmin123456@some-name-mongos.custom-users-roles-sharded-27922 + local target=user-three-admin-db + [[ docker.io/perconalab/percona-server-mongodb-operator:main-mongod8.0 =~ 5\.0 ]] + run_mongos $'use admin\\n (function() {\n\tvar user = db.getUser("user-three");\n\tvar roles = user.roles;\n\troles.sort((a, b) => {\n\t\tif (a.role < b.role) return -1;\n\t\tif (a.role > b.role) return 1;\n\t\treturn 0;\n\t});\n\tuser.roles = roles;\n\tprintjson(user);\n\t})();' userAdmin:userAdmin123456@some-name-mongos.custom-users-roles-sharded-27922 mongodb + local $'command=use admin\\n (function() {\n\tvar user = db.getUser("user-three");\n\tvar roles = user.roles;\n\troles.sort((a, b) => {\n\t\tif (a.role < b.role) return -1;\n\t\tif (a.role > b.role) return 1;\n\t\treturn 0;\n\t});\n\tuser.roles = roles;\n\tprintjson(user);\n\t})();' + grep -E -v 'I NETWORK|W NETWORK|F NETWORK|Error saving history file|Percona Server for MongoDB|connecting to:|Unable to reach primary for set|Implicit session:|versions do not match|Error saving history file:' + local uri=userAdmin:userAdmin123456@some-name-mongos.custom-users-roles-sharded-27922 + local driver=mongodb + local suffix=.svc.cluster.local + local mongo_flag= + local port=27017 + local mongo_bin=mongo + /usr/sbin/sed -re 's/ObjectId\("[0-9a-f]+"\)//; s/-[0-9]+.svc/-xxxesvc/' + sed '/"userId"/d' ++ echo .svc.cluster.local ++ awk -F: '{print $2}' + suffix_port= + [[ -z '' ]] + suffix=.svc.cluster.local:27017 ++ kubectl_bin get pods --selector=name=psmdb-client -o 'jsonpath={.items[].metadata.name}' +++ mktemp ++ local LAST_OUT=/tmp/tmp.fAXsSVYLz6 +++ mktemp ++ local LAST_ERR=/tmp/tmp.EEvyYFqsO0 ++ local exit_status=0 ++ local timeout=4 +++ seq 0 2 ++ for i in $(seq 0 2) ++ set +e ++ kubectl get pods --selector=name=psmdb-client -o 'jsonpath={.items[].metadata.name}' ++ exit_status=0 ++ set -e ++ '[' 0 '!=' 0 -a -n 1 ']' ++ break ++ cat /tmp/tmp.fAXsSVYLz6 ++ cat /tmp/tmp.EEvyYFqsO0 ++ rm /tmp/tmp.fAXsSVYLz6 /tmp/tmp.EEvyYFqsO0 ++ return 0 + local client_container=psmdb-client-bb8b97679-xkjrq + kubectl_bin exec psmdb-client-bb8b97679-xkjrq -- bash -c $'printf \'use admin\\n (function() {\n\tvar user = db.getUser("user-three");\n\tvar roles = user.roles;\n\troles.sort((a, b) => {\n\t\tif (a.role < b.role) return -1;\n\t\tif (a.role > b.role) return 1;\n\t\treturn 0;\n\t});\n\tuser.roles = roles;\n\tprintjson(user);\n\t})();\\n\' | mongo mongodb://userAdmin:userAdmin123456@some-name-mongos.custom-users-roles-sharded-27922.svc.cluster.local:27017/admin ' ++ mktemp + local LAST_OUT=/tmp/tmp.czUqGQm9lf ++ mktemp + local LAST_ERR=/tmp/tmp.YcBIs1Nj55 + local exit_status=0 + local timeout=4 ++ seq 0 2 + for i in $(seq 0 2) + set +e + kubectl exec psmdb-client-bb8b97679-xkjrq -- bash -c $'printf \'use admin\\n (function() {\n\tvar user = db.getUser("user-three");\n\tvar roles = user.roles;\n\troles.sort((a, b) => {\n\t\tif (a.role < b.role) return -1;\n\t\tif (a.role > b.role) return 1;\n\t\treturn 0;\n\t});\n\tuser.roles = roles;\n\tprintjson(user);\n\t})();\\n\' | mongo mongodb://userAdmin:userAdmin123456@some-name-mongos.custom-users-roles-sharded-27922.svc.cluster.local:27017/admin ' + exit_status=0 + set -e + '[' 0 '!=' 0 -a -n 1 ']' + break + cat /tmp/tmp.czUqGQm9lf + cat /tmp/tmp.YcBIs1Nj55 + rm /tmp/tmp.czUqGQm9lf /tmp/tmp.YcBIs1Nj55 + return 0 + diff /mnt/jenkins/workspace/cloud-psmdb-operator_PR-2287/e2e-tests/custom-users-roles-sharded/compare/user-three-admin-db.json /tmp/tmp.oSwann5ucX/user-three-admin-db + desc 'check new user created with default db and secret password key' + set +o xtrace ----------------------------------------------------------------------------------- check new user created with default db and secret password key ----------------------------------------------------------------------------------- + kubectl_bin patch psmdb some-name --type=merge --patch $'{\n\t\t"spec": {"users":[\n\t\t\t{\n\t\t\t\t"name":"user-four",\n\t\t\t\t"passwordSecretRef": {\n\t\t\t\t\t"name": "user-two"\n\t\t\t\t},\n\t\t\t\t"roles": [\n\t\t\t\t\t{"db":"admin","name":"clusterAdmin"}\n\t\t\t\t]\n\t\t\t}\n\t\t]}\n\t}' ++ mktemp + local LAST_OUT=/tmp/tmp.UpSRChoKoT ++ mktemp + local LAST_ERR=/tmp/tmp.tKDnoRDgGF + local exit_status=0 + local timeout=4 ++ seq 0 2 + for i in $(seq 0 2) + set +e + kubectl patch psmdb some-name --type=merge --patch $'{\n\t\t"spec": {"users":[\n\t\t\t{\n\t\t\t\t"name":"user-four",\n\t\t\t\t"passwordSecretRef": {\n\t\t\t\t\t"name": "user-two"\n\t\t\t\t},\n\t\t\t\t"roles": [\n\t\t\t\t\t{"db":"admin","name":"clusterAdmin"}\n\t\t\t\t]\n\t\t\t}\n\t\t]}\n\t}' + exit_status=0 + set -e + '[' 0 '!=' 0 -a -n 1 ']' + break + cat /tmp/tmp.UpSRChoKoT perconaservermongodb.psmdb.percona.com/some-name patched + cat /tmp/tmp.tKDnoRDgGF + rm /tmp/tmp.UpSRChoKoT /tmp/tmp.tKDnoRDgGF + return 0 + wait_for_running some-name-rs0 3 + local name=some-name-rs0 + let last_pod=2 + local check_cluster_readyness=true + set_debug + [[ 1 == 1 ]] + set -o xtrace + local rs_name=rs0 + local cluster_name=some-name ++ seq 0 2 + for i in $(seq 0 $last_pod) + [[ 0 -eq 2 ]] + wait_pod some-name-rs0-0 + local pod=some-name-rs0-0 + set +o xtrace waiting for pod/some-name-rs0-0 to be ready.OK + for i in $(seq 0 $last_pod) + [[ 1 -eq 2 ]] + wait_pod some-name-rs0-1 + local pod=some-name-rs0-1 + set +o xtrace waiting for pod/some-name-rs0-1 to be ready.OK + for i in $(seq 0 $last_pod) + [[ 2 -eq 2 ]] ++ kubectl_bin get psmdb some-name -o 'jsonpath={.spec.replsets[?(@.name=="rs0")].arbiter.enabled}' +++ mktemp ++ local LAST_OUT=/tmp/tmp.YvEhmtaAWH +++ mktemp ++ local LAST_ERR=/tmp/tmp.bl88JgJFG6 ++ local exit_status=0 ++ local timeout=4 +++ seq 0 2 ++ for i in $(seq 0 2) ++ set +e ++ kubectl get psmdb some-name -o 'jsonpath={.spec.replsets[?(@.name=="rs0")].arbiter.enabled}' ++ exit_status=0 ++ set -e ++ '[' 0 '!=' 0 -a -n 1 ']' ++ break ++ cat /tmp/tmp.YvEhmtaAWH ++ cat /tmp/tmp.bl88JgJFG6 ++ rm /tmp/tmp.YvEhmtaAWH /tmp/tmp.bl88JgJFG6 ++ return 0 + [[ '' == true ]] + wait_pod some-name-rs0-2 + local pod=some-name-rs0-2 + set +o xtrace waiting for pod/some-name-rs0-2 to be ready.OK ++ kubectl_bin get psmdb some-name -o 'jsonpath={.spec.replsets[?(@.name=="rs0")].nonvoting.enabled}' +++ mktemp ++ local LAST_OUT=/tmp/tmp.4hAcNfSoU8 +++ mktemp ++ local LAST_ERR=/tmp/tmp.HGiuIMpcKg ++ local exit_status=0 ++ local timeout=4 +++ seq 0 2 ++ for i in $(seq 0 2) ++ set +e ++ kubectl get psmdb some-name -o 'jsonpath={.spec.replsets[?(@.name=="rs0")].nonvoting.enabled}' ++ exit_status=0 ++ set -e ++ '[' 0 '!=' 0 -a -n 1 ']' ++ break ++ cat /tmp/tmp.4hAcNfSoU8 ++ cat /tmp/tmp.HGiuIMpcKg ++ rm /tmp/tmp.4hAcNfSoU8 /tmp/tmp.HGiuIMpcKg ++ return 0 + [[ '' == true ]] ++ kubectl_bin get psmdb some-name -o 'jsonpath={.spec.replsets[?(@.name=="rs0")].hidden.enabled}' +++ mktemp ++ local LAST_OUT=/tmp/tmp.io8ZaRl68I +++ mktemp ++ local LAST_ERR=/tmp/tmp.YGQs6h7cYC ++ local exit_status=0 ++ local timeout=4 +++ seq 0 2 ++ for i in $(seq 0 2) ++ set +e ++ kubectl get psmdb some-name -o 'jsonpath={.spec.replsets[?(@.name=="rs0")].hidden.enabled}' ++ exit_status=0 ++ set -e ++ '[' 0 '!=' 0 -a -n 1 ']' ++ break ++ cat /tmp/tmp.io8ZaRl68I ++ cat /tmp/tmp.YGQs6h7cYC ++ rm /tmp/tmp.io8ZaRl68I /tmp/tmp.YGQs6h7cYC ++ return 0 + [[ '' == true ]] + sleep 10 + [[ true == true ]] + set +x Waiting for cluster readyness ++ get_user_cmd '"user-four"' ++ local 'user="user-four"' ++ cmd=$'(function() {\n\tvar user = db.getUser("user-four");\n\tvar roles = user.roles;\n\troles.sort((a, b) => {\n\t\tif (a.role < b.role) return -1;\n\t\tif (a.role > b.role) return 1;\n\t\treturn 0;\n\t});\n\tuser.roles = roles;\n\tprintjson(user);\n\t})();' ++ echo $'(function() {\n\tvar user = db.getUser("user-four");\n\tvar roles = user.roles;\n\troles.sort((a, b) => {\n\t\tif (a.role < b.role) return -1;\n\t\tif (a.role > b.role) return 1;\n\t\treturn 0;\n\t});\n\tuser.roles = roles;\n\tprintjson(user);\n\t})();' + compare admin $'(function() {\n\tvar user = db.getUser("user-four");\n\tvar roles = user.roles;\n\troles.sort((a, b) => {\n\t\tif (a.role < b.role) return -1;\n\t\tif (a.role > b.role) return 1;\n\t\treturn 0;\n\t});\n\tuser.roles = roles;\n\tprintjson(user);\n\t})();' userAdmin:userAdmin123456@some-name-mongos.custom-users-roles-sharded-27922 user-four + local database=admin + local $'command=(function() {\n\tvar user = db.getUser("user-four");\n\tvar roles = user.roles;\n\troles.sort((a, b) => {\n\t\tif (a.role < b.role) return -1;\n\t\tif (a.role > b.role) return 1;\n\t\treturn 0;\n\t});\n\tuser.roles = roles;\n\tprintjson(user);\n\t})();' + local uri=userAdmin:userAdmin123456@some-name-mongos.custom-users-roles-sharded-27922 + local target=user-four + [[ docker.io/perconalab/percona-server-mongodb-operator:main-mongod8.0 =~ 5\.0 ]] + run_mongos $'use admin\\n (function() {\n\tvar user = db.getUser("user-four");\n\tvar roles = user.roles;\n\troles.sort((a, b) => {\n\t\tif (a.role < b.role) return -1;\n\t\tif (a.role > b.role) return 1;\n\t\treturn 0;\n\t});\n\tuser.roles = roles;\n\tprintjson(user);\n\t})();' userAdmin:userAdmin123456@some-name-mongos.custom-users-roles-sharded-27922 mongodb + local $'command=use admin\\n (function() {\n\tvar user = db.getUser("user-four");\n\tvar roles = user.roles;\n\troles.sort((a, b) => {\n\t\tif (a.role < b.role) return -1;\n\t\tif (a.role > b.role) return 1;\n\t\treturn 0;\n\t});\n\tuser.roles = roles;\n\tprintjson(user);\n\t})();' + local uri=userAdmin:userAdmin123456@some-name-mongos.custom-users-roles-sharded-27922 + grep -E -v 'I NETWORK|W NETWORK|F NETWORK|Error saving history file|Percona Server for MongoDB|connecting to:|Unable to reach primary for set|Implicit session:|versions do not match|Error saving history file:' + local driver=mongodb + local suffix=.svc.cluster.local + local mongo_flag= + local port=27017 + local mongo_bin=mongo + /usr/sbin/sed -re 's/ObjectId\("[0-9a-f]+"\)//; s/-[0-9]+.svc/-xxxesvc/' + sed '/"userId"/d' ++ echo .svc.cluster.local ++ awk -F: '{print $2}' + suffix_port= + [[ -z '' ]] + suffix=.svc.cluster.local:27017 ++ kubectl_bin get pods --selector=name=psmdb-client -o 'jsonpath={.items[].metadata.name}' +++ mktemp ++ local LAST_OUT=/tmp/tmp.jdtm14b3Na +++ mktemp ++ local LAST_ERR=/tmp/tmp.Crz1yVoiE2 ++ local exit_status=0 ++ local timeout=4 +++ seq 0 2 ++ for i in $(seq 0 2) ++ set +e ++ kubectl get pods --selector=name=psmdb-client -o 'jsonpath={.items[].metadata.name}' ++ exit_status=0 ++ set -e ++ '[' 0 '!=' 0 -a -n 1 ']' ++ break ++ cat /tmp/tmp.jdtm14b3Na ++ cat /tmp/tmp.Crz1yVoiE2 ++ rm /tmp/tmp.jdtm14b3Na /tmp/tmp.Crz1yVoiE2 ++ return 0 + local client_container=psmdb-client-bb8b97679-xkjrq + kubectl_bin exec psmdb-client-bb8b97679-xkjrq -- bash -c $'printf \'use admin\\n (function() {\n\tvar user = db.getUser("user-four");\n\tvar roles = user.roles;\n\troles.sort((a, b) => {\n\t\tif (a.role < b.role) return -1;\n\t\tif (a.role > b.role) return 1;\n\t\treturn 0;\n\t});\n\tuser.roles = roles;\n\tprintjson(user);\n\t})();\\n\' | mongo mongodb://userAdmin:userAdmin123456@some-name-mongos.custom-users-roles-sharded-27922.svc.cluster.local:27017/admin ' ++ mktemp + local LAST_OUT=/tmp/tmp.f0LUvhx5ad ++ mktemp + local LAST_ERR=/tmp/tmp.ZRnPhw7dI7 + local exit_status=0 + local timeout=4 ++ seq 0 2 + for i in $(seq 0 2) + set +e + kubectl exec psmdb-client-bb8b97679-xkjrq -- bash -c $'printf \'use admin\\n (function() {\n\tvar user = db.getUser("user-four");\n\tvar roles = user.roles;\n\troles.sort((a, b) => {\n\t\tif (a.role < b.role) return -1;\n\t\tif (a.role > b.role) return 1;\n\t\treturn 0;\n\t});\n\tuser.roles = roles;\n\tprintjson(user);\n\t})();\\n\' | mongo mongodb://userAdmin:userAdmin123456@some-name-mongos.custom-users-roles-sharded-27922.svc.cluster.local:27017/admin ' + exit_status=0 + set -e + '[' 0 '!=' 0 -a -n 1 ']' + break + cat /tmp/tmp.f0LUvhx5ad + cat /tmp/tmp.ZRnPhw7dI7 + rm /tmp/tmp.f0LUvhx5ad /tmp/tmp.ZRnPhw7dI7 + return 0 + diff /mnt/jenkins/workspace/cloud-psmdb-operator_PR-2287/e2e-tests/custom-users-roles-sharded/compare/user-four.json /tmp/tmp.oSwann5ucX/user-four + desc 'check user role on cluster initialization' + set +o xtrace ----------------------------------------------------------------------------------- check user role on cluster initialization ----------------------------------------------------------------------------------- ++ get_role_cmd '"role-one"' ++ local 'role="role-one"' ++ cmd=$'(function() {\n\tvar role = db.getRole("role-one", {showPrivileges: true, showAuthenticationRestrictions: true});\n\tvar roles = role.roles;\n\troles.sort((a, b) => {\n\t\tif (a.role < b.role) return -1;\n\t\tif (a.role > b.role) return 1;\n\t\treturn 0;\n\t});\n\trole.roles = roles;\n\tprintjson(role);\n\t})();' ++ echo $'(function() {\n\tvar role = db.getRole("role-one", {showPrivileges: true, showAuthenticationRestrictions: true});\n\tvar roles = role.roles;\n\troles.sort((a, b) => {\n\t\tif (a.role < b.role) return -1;\n\t\tif (a.role > b.role) return 1;\n\t\treturn 0;\n\t});\n\trole.roles = roles;\n\tprintjson(role);\n\t})();' + compare admin $'(function() {\n\tvar role = db.getRole("role-one", {showPrivileges: true, showAuthenticationRestrictions: true});\n\tvar roles = role.roles;\n\troles.sort((a, b) => {\n\t\tif (a.role < b.role) return -1;\n\t\tif (a.role > b.role) return 1;\n\t\treturn 0;\n\t});\n\trole.roles = roles;\n\tprintjson(role);\n\t})();' userAdmin:userAdmin123456@some-name-mongos.custom-users-roles-sharded-27922 role-one + local database=admin + local $'command=(function() {\n\tvar role = db.getRole("role-one", {showPrivileges: true, showAuthenticationRestrictions: true});\n\tvar roles = role.roles;\n\troles.sort((a, b) => {\n\t\tif (a.role < b.role) return -1;\n\t\tif (a.role > b.role) return 1;\n\t\treturn 0;\n\t});\n\trole.roles = roles;\n\tprintjson(role);\n\t})();' + local uri=userAdmin:userAdmin123456@some-name-mongos.custom-users-roles-sharded-27922 + local target=role-one + [[ docker.io/perconalab/percona-server-mongodb-operator:main-mongod8.0 =~ 5\.0 ]] + run_mongos $'use admin\\n (function() {\n\tvar role = db.getRole("role-one", {showPrivileges: true, showAuthenticationRestrictions: true});\n\tvar roles = role.roles;\n\troles.sort((a, b) => {\n\t\tif (a.role < b.role) return -1;\n\t\tif (a.role > b.role) return 1;\n\t\treturn 0;\n\t});\n\trole.roles = roles;\n\tprintjson(role);\n\t})();' userAdmin:userAdmin123456@some-name-mongos.custom-users-roles-sharded-27922 mongodb + local $'command=use admin\\n (function() {\n\tvar role = db.getRole("role-one", {showPrivileges: true, showAuthenticationRestrictions: true});\n\tvar roles = role.roles;\n\troles.sort((a, b) => {\n\t\tif (a.role < b.role) return -1;\n\t\tif (a.role > b.role) return 1;\n\t\treturn 0;\n\t});\n\trole.roles = roles;\n\tprintjson(role);\n\t})();' + local uri=userAdmin:userAdmin123456@some-name-mongos.custom-users-roles-sharded-27922 + local driver=mongodb + local suffix=.svc.cluster.local + grep -E -v 'I NETWORK|W NETWORK|F NETWORK|Error saving history file|Percona Server for MongoDB|connecting to:|Unable to reach primary for set|Implicit session:|versions do not match|Error saving history file:' + local mongo_flag= + local port=27017 + local mongo_bin=mongo + /usr/sbin/sed -re 's/ObjectId\("[0-9a-f]+"\)//; s/-[0-9]+.svc/-xxxesvc/' + sed '/"userId"/d' ++ echo .svc.cluster.local ++ awk -F: '{print $2}' + suffix_port= + [[ -z '' ]] + suffix=.svc.cluster.local:27017 ++ kubectl_bin get pods --selector=name=psmdb-client -o 'jsonpath={.items[].metadata.name}' +++ mktemp ++ local LAST_OUT=/tmp/tmp.UmxrCEbQvg +++ mktemp ++ local LAST_ERR=/tmp/tmp.umbMwcooS6 ++ local exit_status=0 ++ local timeout=4 +++ seq 0 2 ++ for i in $(seq 0 2) ++ set +e ++ kubectl get pods --selector=name=psmdb-client -o 'jsonpath={.items[].metadata.name}' ++ exit_status=0 ++ set -e ++ '[' 0 '!=' 0 -a -n 1 ']' ++ break ++ cat /tmp/tmp.UmxrCEbQvg ++ cat /tmp/tmp.umbMwcooS6 ++ rm /tmp/tmp.UmxrCEbQvg /tmp/tmp.umbMwcooS6 ++ return 0 + local client_container=psmdb-client-bb8b97679-xkjrq + kubectl_bin exec psmdb-client-bb8b97679-xkjrq -- bash -c $'printf \'use admin\\n (function() {\n\tvar role = db.getRole("role-one", {showPrivileges: true, showAuthenticationRestrictions: true});\n\tvar roles = role.roles;\n\troles.sort((a, b) => {\n\t\tif (a.role < b.role) return -1;\n\t\tif (a.role > b.role) return 1;\n\t\treturn 0;\n\t});\n\trole.roles = roles;\n\tprintjson(role);\n\t})();\\n\' | mongo mongodb://userAdmin:userAdmin123456@some-name-mongos.custom-users-roles-sharded-27922.svc.cluster.local:27017/admin ' ++ mktemp + local LAST_OUT=/tmp/tmp.Qi2KRY1SFn ++ mktemp + local LAST_ERR=/tmp/tmp.JIJ1hjAKSE + local exit_status=0 + local timeout=4 ++ seq 0 2 + for i in $(seq 0 2) + set +e + kubectl exec psmdb-client-bb8b97679-xkjrq -- bash -c $'printf \'use admin\\n (function() {\n\tvar role = db.getRole("role-one", {showPrivileges: true, showAuthenticationRestrictions: true});\n\tvar roles = role.roles;\n\troles.sort((a, b) => {\n\t\tif (a.role < b.role) return -1;\n\t\tif (a.role > b.role) return 1;\n\t\treturn 0;\n\t});\n\trole.roles = roles;\n\tprintjson(role);\n\t})();\\n\' | mongo mongodb://userAdmin:userAdmin123456@some-name-mongos.custom-users-roles-sharded-27922.svc.cluster.local:27017/admin ' + exit_status=0 + set -e + '[' 0 '!=' 0 -a -n 1 ']' + break + cat /tmp/tmp.Qi2KRY1SFn + cat /tmp/tmp.JIJ1hjAKSE + rm /tmp/tmp.Qi2KRY1SFn /tmp/tmp.JIJ1hjAKSE + return 0 + diff /mnt/jenkins/workspace/cloud-psmdb-operator_PR-2287/e2e-tests/custom-users-roles-sharded/compare/role-one.json /tmp/tmp.oSwann5ucX/role-one + desc 'check role recreated after deleted from DB' + set +o xtrace ----------------------------------------------------------------------------------- check role recreated after deleted from DB ----------------------------------------------------------------------------------- + run_mongos 'use admin\n db.dropRole("role-one")' userAdmin:userAdmin123456@some-name-mongos.custom-users-roles-sharded-27922 + local 'command=use admin\n db.dropRole("role-one")' + local uri=userAdmin:userAdmin123456@some-name-mongos.custom-users-roles-sharded-27922 + local driver=mongodb + local suffix=.svc.cluster.local + local mongo_flag= + local port=27017 + local mongo_bin=mongo ++ echo .svc.cluster.local ++ awk -F: '{print $2}' + suffix_port= + [[ -z '' ]] + suffix=.svc.cluster.local:27017 ++ kubectl_bin get pods --selector=name=psmdb-client -o 'jsonpath={.items[].metadata.name}' +++ mktemp ++ local LAST_OUT=/tmp/tmp.Bwx2jFS4wz +++ mktemp ++ local LAST_ERR=/tmp/tmp.QMUBEDeVmj ++ local exit_status=0 ++ local timeout=4 +++ seq 0 2 ++ for i in $(seq 0 2) ++ set +e ++ kubectl get pods --selector=name=psmdb-client -o 'jsonpath={.items[].metadata.name}' ++ exit_status=0 ++ set -e ++ '[' 0 '!=' 0 -a -n 1 ']' ++ break ++ cat /tmp/tmp.Bwx2jFS4wz ++ cat /tmp/tmp.QMUBEDeVmj ++ rm /tmp/tmp.Bwx2jFS4wz /tmp/tmp.QMUBEDeVmj ++ return 0 + local client_container=psmdb-client-bb8b97679-xkjrq + kubectl_bin exec psmdb-client-bb8b97679-xkjrq -- bash -c 'printf '\''use admin\n db.dropRole("role-one")\n'\'' | mongo mongodb://userAdmin:userAdmin123456@some-name-mongos.custom-users-roles-sharded-27922.svc.cluster.local:27017/admin ' ++ mktemp + local LAST_OUT=/tmp/tmp.dMVPKCJm7U ++ mktemp + local LAST_ERR=/tmp/tmp.LEmpJ89xaE + local exit_status=0 + local timeout=4 ++ seq 0 2 + for i in $(seq 0 2) + set +e + kubectl exec psmdb-client-bb8b97679-xkjrq -- bash -c 'printf '\''use admin\n db.dropRole("role-one")\n'\'' | mongo mongodb://userAdmin:userAdmin123456@some-name-mongos.custom-users-roles-sharded-27922.svc.cluster.local:27017/admin ' + exit_status=0 + set -e + '[' 0 '!=' 0 -a -n 1 ']' + break + cat /tmp/tmp.dMVPKCJm7U Percona Server for MongoDB shell version v4.4.29-28 connecting to: mongodb://some-name-mongos.custom-users-roles-sharded-27922.svc.cluster.local:27017/admin?compressors=disabled&gssapiServiceName=mongodb Implicit session: session { "id" : UUID("f02ba1ce-1eda-4b79-98f6-c6c5aa25ceab") } Percona Server for MongoDB server version: v8.0.20-8 WARNING: shell and server versions do not match switched to db admin true bye + cat /tmp/tmp.LEmpJ89xaE + rm /tmp/tmp.dMVPKCJm7U /tmp/tmp.LEmpJ89xaE + return 0 + sleep 15 ++ get_role_cmd '"role-one"' ++ local 'role="role-one"' ++ cmd=$'(function() {\n\tvar role = db.getRole("role-one", {showPrivileges: true, showAuthenticationRestrictions: true});\n\tvar roles = role.roles;\n\troles.sort((a, b) => {\n\t\tif (a.role < b.role) return -1;\n\t\tif (a.role > b.role) return 1;\n\t\treturn 0;\n\t});\n\trole.roles = roles;\n\tprintjson(role);\n\t})();' ++ echo $'(function() {\n\tvar role = db.getRole("role-one", {showPrivileges: true, showAuthenticationRestrictions: true});\n\tvar roles = role.roles;\n\troles.sort((a, b) => {\n\t\tif (a.role < b.role) return -1;\n\t\tif (a.role > b.role) return 1;\n\t\treturn 0;\n\t});\n\trole.roles = roles;\n\tprintjson(role);\n\t})();' + compare admin $'(function() {\n\tvar role = db.getRole("role-one", {showPrivileges: true, showAuthenticationRestrictions: true});\n\tvar roles = role.roles;\n\troles.sort((a, b) => {\n\t\tif (a.role < b.role) return -1;\n\t\tif (a.role > b.role) return 1;\n\t\treturn 0;\n\t});\n\trole.roles = roles;\n\tprintjson(role);\n\t})();' userAdmin:userAdmin123456@some-name-mongos.custom-users-roles-sharded-27922 role-one + local database=admin + local $'command=(function() {\n\tvar role = db.getRole("role-one", {showPrivileges: true, showAuthenticationRestrictions: true});\n\tvar roles = role.roles;\n\troles.sort((a, b) => {\n\t\tif (a.role < b.role) return -1;\n\t\tif (a.role > b.role) return 1;\n\t\treturn 0;\n\t});\n\trole.roles = roles;\n\tprintjson(role);\n\t})();' + local uri=userAdmin:userAdmin123456@some-name-mongos.custom-users-roles-sharded-27922 + local target=role-one + [[ docker.io/perconalab/percona-server-mongodb-operator:main-mongod8.0 =~ 5\.0 ]] + run_mongos $'use admin\\n (function() {\n\tvar role = db.getRole("role-one", {showPrivileges: true, showAuthenticationRestrictions: true});\n\tvar roles = role.roles;\n\troles.sort((a, b) => {\n\t\tif (a.role < b.role) return -1;\n\t\tif (a.role > b.role) return 1;\n\t\treturn 0;\n\t});\n\trole.roles = roles;\n\tprintjson(role);\n\t})();' userAdmin:userAdmin123456@some-name-mongos.custom-users-roles-sharded-27922 mongodb + local $'command=use admin\\n (function() {\n\tvar role = db.getRole("role-one", {showPrivileges: true, showAuthenticationRestrictions: true});\n\tvar roles = role.roles;\n\troles.sort((a, b) => {\n\t\tif (a.role < b.role) return -1;\n\t\tif (a.role > b.role) return 1;\n\t\treturn 0;\n\t});\n\trole.roles = roles;\n\tprintjson(role);\n\t})();' + local uri=userAdmin:userAdmin123456@some-name-mongos.custom-users-roles-sharded-27922 + local driver=mongodb + local suffix=.svc.cluster.local + local mongo_flag= + local port=27017 + local mongo_bin=mongo + grep -E -v 'I NETWORK|W NETWORK|F NETWORK|Error saving history file|Percona Server for MongoDB|connecting to:|Unable to reach primary for set|Implicit session:|versions do not match|Error saving history file:' + /usr/sbin/sed -re 's/ObjectId\("[0-9a-f]+"\)//; s/-[0-9]+.svc/-xxxesvc/' ++ echo .svc.cluster.local + sed '/"userId"/d' ++ awk -F: '{print $2}' + suffix_port= + [[ -z '' ]] + suffix=.svc.cluster.local:27017 ++ kubectl_bin get pods --selector=name=psmdb-client -o 'jsonpath={.items[].metadata.name}' +++ mktemp ++ local LAST_OUT=/tmp/tmp.Ymb3hXigci +++ mktemp ++ local LAST_ERR=/tmp/tmp.G8DAnVbwMb ++ local exit_status=0 ++ local timeout=4 +++ seq 0 2 ++ for i in $(seq 0 2) ++ set +e ++ kubectl get pods --selector=name=psmdb-client -o 'jsonpath={.items[].metadata.name}' ++ exit_status=0 ++ set -e ++ '[' 0 '!=' 0 -a -n 1 ']' ++ break ++ cat /tmp/tmp.Ymb3hXigci ++ cat /tmp/tmp.G8DAnVbwMb ++ rm /tmp/tmp.Ymb3hXigci /tmp/tmp.G8DAnVbwMb ++ return 0 + local client_container=psmdb-client-bb8b97679-xkjrq + kubectl_bin exec psmdb-client-bb8b97679-xkjrq -- bash -c $'printf \'use admin\\n (function() {\n\tvar role = db.getRole("role-one", {showPrivileges: true, showAuthenticationRestrictions: true});\n\tvar roles = role.roles;\n\troles.sort((a, b) => {\n\t\tif (a.role < b.role) return -1;\n\t\tif (a.role > b.role) return 1;\n\t\treturn 0;\n\t});\n\trole.roles = roles;\n\tprintjson(role);\n\t})();\\n\' | mongo mongodb://userAdmin:userAdmin123456@some-name-mongos.custom-users-roles-sharded-27922.svc.cluster.local:27017/admin ' ++ mktemp + local LAST_OUT=/tmp/tmp.UKe3XpwMLc ++ mktemp + local LAST_ERR=/tmp/tmp.HVhNzyGzTh + local exit_status=0 + local timeout=4 ++ seq 0 2 + for i in $(seq 0 2) + set +e + kubectl exec psmdb-client-bb8b97679-xkjrq -- bash -c $'printf \'use admin\\n (function() {\n\tvar role = db.getRole("role-one", {showPrivileges: true, showAuthenticationRestrictions: true});\n\tvar roles = role.roles;\n\troles.sort((a, b) => {\n\t\tif (a.role < b.role) return -1;\n\t\tif (a.role > b.role) return 1;\n\t\treturn 0;\n\t});\n\trole.roles = roles;\n\tprintjson(role);\n\t})();\\n\' | mongo mongodb://userAdmin:userAdmin123456@some-name-mongos.custom-users-roles-sharded-27922.svc.cluster.local:27017/admin ' + exit_status=0 + set -e + '[' 0 '!=' 0 -a -n 1 ']' + break + cat /tmp/tmp.UKe3XpwMLc + cat /tmp/tmp.HVhNzyGzTh + rm /tmp/tmp.UKe3XpwMLc /tmp/tmp.HVhNzyGzTh + return 0 + diff /mnt/jenkins/workspace/cloud-psmdb-operator_PR-2287/e2e-tests/custom-users-roles-sharded/compare/role-one.json /tmp/tmp.oSwann5ucX/role-one + desc 'delete initial role from CR and create a new one' + set +o xtrace ----------------------------------------------------------------------------------- delete initial role from CR and create a new one ----------------------------------------------------------------------------------- + kubectl_bin patch psmdb some-name --type=merge --patch $'{\n\t\t"spec": {"roles":[\n\t\t\t{\n\t\t\t\t"role": "role-two",\n\t\t\t\t"db": "admin",\n\t\t\t\t"privileges": [\n\t\t\t\t\t{\n\t\t\t\t\t\t"resource": {\n\t\t\t\t\t\t\t"db": "config",\n\t\t\t\t\t\t\t"collection": ""\n\t\t\t\t\t\t},\n\t\t\t\t\t\t"actions": [\n\t\t\t\t\t\t\t"find"\n\t\t\t\t\t\t]\n\t\t\t\t\t}\n\t\t\t\t],\n\t\t\t\t"roles": [\n\t\t\t\t\t{\n\t\t\t\t\t\t"role": "read",\n\t\t\t\t\t\t"db": "admin"\n\t\t\t\t\t}\n\t\t\t\t]\n\t\t\t}\n\t\t]\n\t}}' ++ mktemp + local LAST_OUT=/tmp/tmp.D51FLKrNPY ++ mktemp + local LAST_ERR=/tmp/tmp.97S0Ta8fLl + local exit_status=0 + local timeout=4 ++ seq 0 2 + for i in $(seq 0 2) + set +e + kubectl patch psmdb some-name --type=merge --patch $'{\n\t\t"spec": {"roles":[\n\t\t\t{\n\t\t\t\t"role": "role-two",\n\t\t\t\t"db": "admin",\n\t\t\t\t"privileges": [\n\t\t\t\t\t{\n\t\t\t\t\t\t"resource": {\n\t\t\t\t\t\t\t"db": "config",\n\t\t\t\t\t\t\t"collection": ""\n\t\t\t\t\t\t},\n\t\t\t\t\t\t"actions": [\n\t\t\t\t\t\t\t"find"\n\t\t\t\t\t\t]\n\t\t\t\t\t}\n\t\t\t\t],\n\t\t\t\t"roles": [\n\t\t\t\t\t{\n\t\t\t\t\t\t"role": "read",\n\t\t\t\t\t\t"db": "admin"\n\t\t\t\t\t}\n\t\t\t\t]\n\t\t\t}\n\t\t]\n\t}}' + exit_status=0 + set -e + '[' 0 '!=' 0 -a -n 1 ']' + break + cat /tmp/tmp.D51FLKrNPY perconaservermongodb.psmdb.percona.com/some-name patched + cat /tmp/tmp.97S0Ta8fLl + rm /tmp/tmp.D51FLKrNPY /tmp/tmp.97S0Ta8fLl + return 0 + wait_for_running some-name-rs0 3 + local name=some-name-rs0 + let last_pod=2 + local check_cluster_readyness=true + set_debug + [[ 1 == 1 ]] + set -o xtrace + local rs_name=rs0 + local cluster_name=some-name ++ seq 0 2 + for i in $(seq 0 $last_pod) + [[ 0 -eq 2 ]] + wait_pod some-name-rs0-0 + local pod=some-name-rs0-0 + set +o xtrace waiting for pod/some-name-rs0-0 to be ready.OK + for i in $(seq 0 $last_pod) + [[ 1 -eq 2 ]] + wait_pod some-name-rs0-1 + local pod=some-name-rs0-1 + set +o xtrace waiting for pod/some-name-rs0-1 to be ready.OK + for i in $(seq 0 $last_pod) + [[ 2 -eq 2 ]] ++ kubectl_bin get psmdb some-name -o 'jsonpath={.spec.replsets[?(@.name=="rs0")].arbiter.enabled}' +++ mktemp ++ local LAST_OUT=/tmp/tmp.hllfpEZFcs +++ mktemp ++ local LAST_ERR=/tmp/tmp.4XMJ4BGAnE ++ local exit_status=0 ++ local timeout=4 +++ seq 0 2 ++ for i in $(seq 0 2) ++ set +e ++ kubectl get psmdb some-name -o 'jsonpath={.spec.replsets[?(@.name=="rs0")].arbiter.enabled}' ++ exit_status=0 ++ set -e ++ '[' 0 '!=' 0 -a -n 1 ']' ++ break ++ cat /tmp/tmp.hllfpEZFcs ++ cat /tmp/tmp.4XMJ4BGAnE ++ rm /tmp/tmp.hllfpEZFcs /tmp/tmp.4XMJ4BGAnE ++ return 0 + [[ '' == true ]] + wait_pod some-name-rs0-2 + local pod=some-name-rs0-2 + set +o xtrace waiting for pod/some-name-rs0-2 to be ready.OK ++ kubectl_bin get psmdb some-name -o 'jsonpath={.spec.replsets[?(@.name=="rs0")].nonvoting.enabled}' +++ mktemp ++ local LAST_OUT=/tmp/tmp.IyL7wADGvX +++ mktemp ++ local LAST_ERR=/tmp/tmp.mu0hwQKdwi ++ local exit_status=0 ++ local timeout=4 +++ seq 0 2 ++ for i in $(seq 0 2) ++ set +e ++ kubectl get psmdb some-name -o 'jsonpath={.spec.replsets[?(@.name=="rs0")].nonvoting.enabled}' ++ exit_status=0 ++ set -e ++ '[' 0 '!=' 0 -a -n 1 ']' ++ break ++ cat /tmp/tmp.IyL7wADGvX ++ cat /tmp/tmp.mu0hwQKdwi ++ rm /tmp/tmp.IyL7wADGvX /tmp/tmp.mu0hwQKdwi ++ return 0 + [[ '' == true ]] ++ kubectl_bin get psmdb some-name -o 'jsonpath={.spec.replsets[?(@.name=="rs0")].hidden.enabled}' +++ mktemp ++ local LAST_OUT=/tmp/tmp.WlG7kpKHc3 +++ mktemp ++ local LAST_ERR=/tmp/tmp.hntKDHLpyQ ++ local exit_status=0 ++ local timeout=4 +++ seq 0 2 ++ for i in $(seq 0 2) ++ set +e ++ kubectl get psmdb some-name -o 'jsonpath={.spec.replsets[?(@.name=="rs0")].hidden.enabled}' ++ exit_status=0 ++ set -e ++ '[' 0 '!=' 0 -a -n 1 ']' ++ break ++ cat /tmp/tmp.WlG7kpKHc3 ++ cat /tmp/tmp.hntKDHLpyQ ++ rm /tmp/tmp.WlG7kpKHc3 /tmp/tmp.hntKDHLpyQ ++ return 0 + [[ '' == true ]] + sleep 10 + [[ true == true ]] + set +x Waiting for cluster readyness ++ get_role_cmd '"role-one"' ++ local 'role="role-one"' ++ cmd=$'(function() {\n\tvar role = db.getRole("role-one", {showPrivileges: true, showAuthenticationRestrictions: true});\n\tvar roles = role.roles;\n\troles.sort((a, b) => {\n\t\tif (a.role < b.role) return -1;\n\t\tif (a.role > b.role) return 1;\n\t\treturn 0;\n\t});\n\trole.roles = roles;\n\tprintjson(role);\n\t})();' ++ echo $'(function() {\n\tvar role = db.getRole("role-one", {showPrivileges: true, showAuthenticationRestrictions: true});\n\tvar roles = role.roles;\n\troles.sort((a, b) => {\n\t\tif (a.role < b.role) return -1;\n\t\tif (a.role > b.role) return 1;\n\t\treturn 0;\n\t});\n\trole.roles = roles;\n\tprintjson(role);\n\t})();' + compare admin $'(function() {\n\tvar role = db.getRole("role-one", {showPrivileges: true, showAuthenticationRestrictions: true});\n\tvar roles = role.roles;\n\troles.sort((a, b) => {\n\t\tif (a.role < b.role) return -1;\n\t\tif (a.role > b.role) return 1;\n\t\treturn 0;\n\t});\n\trole.roles = roles;\n\tprintjson(role);\n\t})();' userAdmin:userAdmin123456@some-name-mongos.custom-users-roles-sharded-27922 role-one + local database=admin + local $'command=(function() {\n\tvar role = db.getRole("role-one", {showPrivileges: true, showAuthenticationRestrictions: true});\n\tvar roles = role.roles;\n\troles.sort((a, b) => {\n\t\tif (a.role < b.role) return -1;\n\t\tif (a.role > b.role) return 1;\n\t\treturn 0;\n\t});\n\trole.roles = roles;\n\tprintjson(role);\n\t})();' + local uri=userAdmin:userAdmin123456@some-name-mongos.custom-users-roles-sharded-27922 + local target=role-one + [[ docker.io/perconalab/percona-server-mongodb-operator:main-mongod8.0 =~ 5\.0 ]] + run_mongos $'use admin\\n (function() {\n\tvar role = db.getRole("role-one", {showPrivileges: true, showAuthenticationRestrictions: true});\n\tvar roles = role.roles;\n\troles.sort((a, b) => {\n\t\tif (a.role < b.role) return -1;\n\t\tif (a.role > b.role) return 1;\n\t\treturn 0;\n\t});\n\trole.roles = roles;\n\tprintjson(role);\n\t})();' userAdmin:userAdmin123456@some-name-mongos.custom-users-roles-sharded-27922 mongodb + local $'command=use admin\\n (function() {\n\tvar role = db.getRole("role-one", {showPrivileges: true, showAuthenticationRestrictions: true});\n\tvar roles = role.roles;\n\troles.sort((a, b) => {\n\t\tif (a.role < b.role) return -1;\n\t\tif (a.role > b.role) return 1;\n\t\treturn 0;\n\t});\n\trole.roles = roles;\n\tprintjson(role);\n\t})();' + local uri=userAdmin:userAdmin123456@some-name-mongos.custom-users-roles-sharded-27922 + local driver=mongodb + local suffix=.svc.cluster.local + local mongo_flag= + local port=27017 + local mongo_bin=mongo + grep -E -v 'I NETWORK|W NETWORK|F NETWORK|Error saving history file|Percona Server for MongoDB|connecting to:|Unable to reach primary for set|Implicit session:|versions do not match|Error saving history file:' + /usr/sbin/sed -re 's/ObjectId\("[0-9a-f]+"\)//; s/-[0-9]+.svc/-xxxesvc/' ++ echo .svc.cluster.local + sed '/"userId"/d' ++ awk -F: '{print $2}' + suffix_port= + [[ -z '' ]] + suffix=.svc.cluster.local:27017 ++ kubectl_bin get pods --selector=name=psmdb-client -o 'jsonpath={.items[].metadata.name}' +++ mktemp ++ local LAST_OUT=/tmp/tmp.CdvBaxijLl +++ mktemp ++ local LAST_ERR=/tmp/tmp.yEiAAaKaGq ++ local exit_status=0 ++ local timeout=4 +++ seq 0 2 ++ for i in $(seq 0 2) ++ set +e ++ kubectl get pods --selector=name=psmdb-client -o 'jsonpath={.items[].metadata.name}' ++ exit_status=0 ++ set -e ++ '[' 0 '!=' 0 -a -n 1 ']' ++ break ++ cat /tmp/tmp.CdvBaxijLl ++ cat /tmp/tmp.yEiAAaKaGq ++ rm /tmp/tmp.CdvBaxijLl /tmp/tmp.yEiAAaKaGq ++ return 0 + local client_container=psmdb-client-bb8b97679-xkjrq + kubectl_bin exec psmdb-client-bb8b97679-xkjrq -- bash -c $'printf \'use admin\\n (function() {\n\tvar role = db.getRole("role-one", {showPrivileges: true, showAuthenticationRestrictions: true});\n\tvar roles = role.roles;\n\troles.sort((a, b) => {\n\t\tif (a.role < b.role) return -1;\n\t\tif (a.role > b.role) return 1;\n\t\treturn 0;\n\t});\n\trole.roles = roles;\n\tprintjson(role);\n\t})();\\n\' | mongo mongodb://userAdmin:userAdmin123456@some-name-mongos.custom-users-roles-sharded-27922.svc.cluster.local:27017/admin ' ++ mktemp + local LAST_OUT=/tmp/tmp.Tfy7LBKnvf ++ mktemp + local LAST_ERR=/tmp/tmp.gtQvYp69PX + local exit_status=0 + local timeout=4 ++ seq 0 2 + for i in $(seq 0 2) + set +e + kubectl exec psmdb-client-bb8b97679-xkjrq -- bash -c $'printf \'use admin\\n (function() {\n\tvar role = db.getRole("role-one", {showPrivileges: true, showAuthenticationRestrictions: true});\n\tvar roles = role.roles;\n\troles.sort((a, b) => {\n\t\tif (a.role < b.role) return -1;\n\t\tif (a.role > b.role) return 1;\n\t\treturn 0;\n\t});\n\trole.roles = roles;\n\tprintjson(role);\n\t})();\\n\' | mongo mongodb://userAdmin:userAdmin123456@some-name-mongos.custom-users-roles-sharded-27922.svc.cluster.local:27017/admin ' + exit_status=0 + set -e + '[' 0 '!=' 0 -a -n 1 ']' + break + cat /tmp/tmp.Tfy7LBKnvf + cat /tmp/tmp.gtQvYp69PX + rm /tmp/tmp.Tfy7LBKnvf /tmp/tmp.gtQvYp69PX + return 0 + diff /mnt/jenkins/workspace/cloud-psmdb-operator_PR-2287/e2e-tests/custom-users-roles-sharded/compare/role-one.json /tmp/tmp.oSwann5ucX/role-one ++ get_role_cmd '"role-two"' ++ local 'role="role-two"' ++ cmd=$'(function() {\n\tvar role = db.getRole("role-two", {showPrivileges: true, showAuthenticationRestrictions: true});\n\tvar roles = role.roles;\n\troles.sort((a, b) => {\n\t\tif (a.role < b.role) return -1;\n\t\tif (a.role > b.role) return 1;\n\t\treturn 0;\n\t});\n\trole.roles = roles;\n\tprintjson(role);\n\t})();' ++ echo $'(function() {\n\tvar role = db.getRole("role-two", {showPrivileges: true, showAuthenticationRestrictions: true});\n\tvar roles = role.roles;\n\troles.sort((a, b) => {\n\t\tif (a.role < b.role) return -1;\n\t\tif (a.role > b.role) return 1;\n\t\treturn 0;\n\t});\n\trole.roles = roles;\n\tprintjson(role);\n\t})();' + compare admin $'(function() {\n\tvar role = db.getRole("role-two", {showPrivileges: true, showAuthenticationRestrictions: true});\n\tvar roles = role.roles;\n\troles.sort((a, b) => {\n\t\tif (a.role < b.role) return -1;\n\t\tif (a.role > b.role) return 1;\n\t\treturn 0;\n\t});\n\trole.roles = roles;\n\tprintjson(role);\n\t})();' userAdmin:userAdmin123456@some-name-mongos.custom-users-roles-sharded-27922 role-two + local database=admin + local $'command=(function() {\n\tvar role = db.getRole("role-two", {showPrivileges: true, showAuthenticationRestrictions: true});\n\tvar roles = role.roles;\n\troles.sort((a, b) => {\n\t\tif (a.role < b.role) return -1;\n\t\tif (a.role > b.role) return 1;\n\t\treturn 0;\n\t});\n\trole.roles = roles;\n\tprintjson(role);\n\t})();' + local uri=userAdmin:userAdmin123456@some-name-mongos.custom-users-roles-sharded-27922 + local target=role-two + [[ docker.io/perconalab/percona-server-mongodb-operator:main-mongod8.0 =~ 5\.0 ]] + run_mongos $'use admin\\n (function() {\n\tvar role = db.getRole("role-two", {showPrivileges: true, showAuthenticationRestrictions: true});\n\tvar roles = role.roles;\n\troles.sort((a, b) => {\n\t\tif (a.role < b.role) return -1;\n\t\tif (a.role > b.role) return 1;\n\t\treturn 0;\n\t});\n\trole.roles = roles;\n\tprintjson(role);\n\t})();' userAdmin:userAdmin123456@some-name-mongos.custom-users-roles-sharded-27922 mongodb + grep -E -v 'I NETWORK|W NETWORK|F NETWORK|Error saving history file|Percona Server for MongoDB|connecting to:|Unable to reach primary for set|Implicit session:|versions do not match|Error saving history file:' + /usr/sbin/sed -re 's/ObjectId\("[0-9a-f]+"\)//; s/-[0-9]+.svc/-xxxesvc/' + local $'command=use admin\\n (function() {\n\tvar role = db.getRole("role-two", {showPrivileges: true, showAuthenticationRestrictions: true});\n\tvar roles = role.roles;\n\troles.sort((a, b) => {\n\t\tif (a.role < b.role) return -1;\n\t\tif (a.role > b.role) return 1;\n\t\treturn 0;\n\t});\n\trole.roles = roles;\n\tprintjson(role);\n\t})();' + local uri=userAdmin:userAdmin123456@some-name-mongos.custom-users-roles-sharded-27922 + sed '/"userId"/d' + local driver=mongodb + local suffix=.svc.cluster.local + local mongo_flag= + local port=27017 + local mongo_bin=mongo ++ echo .svc.cluster.local ++ awk -F: '{print $2}' + suffix_port= + [[ -z '' ]] + suffix=.svc.cluster.local:27017 ++ kubectl_bin get pods --selector=name=psmdb-client -o 'jsonpath={.items[].metadata.name}' +++ mktemp ++ local LAST_OUT=/tmp/tmp.ZiWBOZi2IV +++ mktemp ++ local LAST_ERR=/tmp/tmp.V3lJjQFDCZ ++ local exit_status=0 ++ local timeout=4 +++ seq 0 2 ++ for i in $(seq 0 2) ++ set +e ++ kubectl get pods --selector=name=psmdb-client -o 'jsonpath={.items[].metadata.name}' ++ exit_status=0 ++ set -e ++ '[' 0 '!=' 0 -a -n 1 ']' ++ break ++ cat /tmp/tmp.ZiWBOZi2IV ++ cat /tmp/tmp.V3lJjQFDCZ ++ rm /tmp/tmp.ZiWBOZi2IV /tmp/tmp.V3lJjQFDCZ ++ return 0 + local client_container=psmdb-client-bb8b97679-xkjrq + kubectl_bin exec psmdb-client-bb8b97679-xkjrq -- bash -c $'printf \'use admin\\n (function() {\n\tvar role = db.getRole("role-two", {showPrivileges: true, showAuthenticationRestrictions: true});\n\tvar roles = role.roles;\n\troles.sort((a, b) => {\n\t\tif (a.role < b.role) return -1;\n\t\tif (a.role > b.role) return 1;\n\t\treturn 0;\n\t});\n\trole.roles = roles;\n\tprintjson(role);\n\t})();\\n\' | mongo mongodb://userAdmin:userAdmin123456@some-name-mongos.custom-users-roles-sharded-27922.svc.cluster.local:27017/admin ' ++ mktemp + local LAST_OUT=/tmp/tmp.WMVGj9IGnq ++ mktemp + local LAST_ERR=/tmp/tmp.NtkMILYpCs + local exit_status=0 + local timeout=4 ++ seq 0 2 + for i in $(seq 0 2) + set +e + kubectl exec psmdb-client-bb8b97679-xkjrq -- bash -c $'printf \'use admin\\n (function() {\n\tvar role = db.getRole("role-two", {showPrivileges: true, showAuthenticationRestrictions: true});\n\tvar roles = role.roles;\n\troles.sort((a, b) => {\n\t\tif (a.role < b.role) return -1;\n\t\tif (a.role > b.role) return 1;\n\t\treturn 0;\n\t});\n\trole.roles = roles;\n\tprintjson(role);\n\t})();\\n\' | mongo mongodb://userAdmin:userAdmin123456@some-name-mongos.custom-users-roles-sharded-27922.svc.cluster.local:27017/admin ' + exit_status=0 + set -e + '[' 0 '!=' 0 -a -n 1 ']' + break + cat /tmp/tmp.WMVGj9IGnq + cat /tmp/tmp.NtkMILYpCs + rm /tmp/tmp.WMVGj9IGnq /tmp/tmp.NtkMILYpCs + return 0 + diff /mnt/jenkins/workspace/cloud-psmdb-operator_PR-2287/e2e-tests/custom-users-roles-sharded/compare/role-two.json /tmp/tmp.oSwann5ucX/role-two + desc 'check role update from CR' + set +o xtrace ----------------------------------------------------------------------------------- check role update from CR ----------------------------------------------------------------------------------- + kubectl_bin patch psmdb some-name --type=merge --patch $'{\n\t\t"spec": {"roles":[\n\t\t\t{\n\t\t\t\t"role": "role-two",\n\t\t\t\t"db": "admin",\n\t\t\t\t"privileges": [\n\t\t\t\t\t{\n\t\t\t\t\t\t"resource": {\n\t\t\t\t\t\t\t"db": "config",\n\t\t\t\t\t\t\t"collection": ""\n\t\t\t\t\t\t},\n\t\t\t\t\t\t"actions": [\n\t\t\t\t\t\t\t"find"\n\t\t\t\t\t\t]\n\t\t\t\t\t}\n\t\t\t\t]\n\t\t\t}\n\t\t]\n\t}}' ++ mktemp + local LAST_OUT=/tmp/tmp.BlWSbclMcb ++ mktemp + local LAST_ERR=/tmp/tmp.bXmgzrvdoy + local exit_status=0 + local timeout=4 ++ seq 0 2 + for i in $(seq 0 2) + set +e + kubectl patch psmdb some-name --type=merge --patch $'{\n\t\t"spec": {"roles":[\n\t\t\t{\n\t\t\t\t"role": "role-two",\n\t\t\t\t"db": "admin",\n\t\t\t\t"privileges": [\n\t\t\t\t\t{\n\t\t\t\t\t\t"resource": {\n\t\t\t\t\t\t\t"db": "config",\n\t\t\t\t\t\t\t"collection": ""\n\t\t\t\t\t\t},\n\t\t\t\t\t\t"actions": [\n\t\t\t\t\t\t\t"find"\n\t\t\t\t\t\t]\n\t\t\t\t\t}\n\t\t\t\t]\n\t\t\t}\n\t\t]\n\t}}' + exit_status=0 + set -e + '[' 0 '!=' 0 -a -n 1 ']' + break + cat /tmp/tmp.BlWSbclMcb perconaservermongodb.psmdb.percona.com/some-name patched + cat /tmp/tmp.bXmgzrvdoy + rm /tmp/tmp.BlWSbclMcb /tmp/tmp.bXmgzrvdoy + return 0 + wait_for_running some-name-rs0 3 + local name=some-name-rs0 + let last_pod=2 + local check_cluster_readyness=true + set_debug + [[ 1 == 1 ]] + set -o xtrace + local rs_name=rs0 + local cluster_name=some-name ++ seq 0 2 + for i in $(seq 0 $last_pod) + [[ 0 -eq 2 ]] + wait_pod some-name-rs0-0 + local pod=some-name-rs0-0 + set +o xtrace waiting for pod/some-name-rs0-0 to be ready.OK + for i in $(seq 0 $last_pod) + [[ 1 -eq 2 ]] + wait_pod some-name-rs0-1 + local pod=some-name-rs0-1 + set +o xtrace waiting for pod/some-name-rs0-1 to be ready.OK + for i in $(seq 0 $last_pod) + [[ 2 -eq 2 ]] ++ kubectl_bin get psmdb some-name -o 'jsonpath={.spec.replsets[?(@.name=="rs0")].arbiter.enabled}' +++ mktemp ++ local LAST_OUT=/tmp/tmp.5h4U5h4CG5 +++ mktemp ++ local LAST_ERR=/tmp/tmp.uCIyGRvBGO ++ local exit_status=0 ++ local timeout=4 +++ seq 0 2 ++ for i in $(seq 0 2) ++ set +e ++ kubectl get psmdb some-name -o 'jsonpath={.spec.replsets[?(@.name=="rs0")].arbiter.enabled}' ++ exit_status=0 ++ set -e ++ '[' 0 '!=' 0 -a -n 1 ']' ++ break ++ cat /tmp/tmp.5h4U5h4CG5 ++ cat /tmp/tmp.uCIyGRvBGO ++ rm /tmp/tmp.5h4U5h4CG5 /tmp/tmp.uCIyGRvBGO ++ return 0 + [[ '' == true ]] + wait_pod some-name-rs0-2 + local pod=some-name-rs0-2 + set +o xtrace waiting for pod/some-name-rs0-2 to be ready.OK ++ kubectl_bin get psmdb some-name -o 'jsonpath={.spec.replsets[?(@.name=="rs0")].nonvoting.enabled}' +++ mktemp ++ local LAST_OUT=/tmp/tmp.5fm5fm69D0 +++ mktemp ++ local LAST_ERR=/tmp/tmp.hYTsUhBnEI ++ local exit_status=0 ++ local timeout=4 +++ seq 0 2 ++ for i in $(seq 0 2) ++ set +e ++ kubectl get psmdb some-name -o 'jsonpath={.spec.replsets[?(@.name=="rs0")].nonvoting.enabled}' ++ exit_status=0 ++ set -e ++ '[' 0 '!=' 0 -a -n 1 ']' ++ break ++ cat /tmp/tmp.5fm5fm69D0 ++ cat /tmp/tmp.hYTsUhBnEI ++ rm /tmp/tmp.5fm5fm69D0 /tmp/tmp.hYTsUhBnEI ++ return 0 + [[ '' == true ]] ++ kubectl_bin get psmdb some-name -o 'jsonpath={.spec.replsets[?(@.name=="rs0")].hidden.enabled}' +++ mktemp ++ local LAST_OUT=/tmp/tmp.DFcqur4FVg +++ mktemp ++ local LAST_ERR=/tmp/tmp.vHM4E3BG2K ++ local exit_status=0 ++ local timeout=4 +++ seq 0 2 ++ for i in $(seq 0 2) ++ set +e ++ kubectl get psmdb some-name -o 'jsonpath={.spec.replsets[?(@.name=="rs0")].hidden.enabled}' ++ exit_status=0 ++ set -e ++ '[' 0 '!=' 0 -a -n 1 ']' ++ break ++ cat /tmp/tmp.DFcqur4FVg ++ cat /tmp/tmp.vHM4E3BG2K ++ rm /tmp/tmp.DFcqur4FVg /tmp/tmp.vHM4E3BG2K ++ return 0 + [[ '' == true ]] + sleep 10 + [[ true == true ]] + set +x Waiting for cluster readyness ++ get_role_cmd '"role-two"' ++ local 'role="role-two"' ++ cmd=$'(function() {\n\tvar role = db.getRole("role-two", {showPrivileges: true, showAuthenticationRestrictions: true});\n\tvar roles = role.roles;\n\troles.sort((a, b) => {\n\t\tif (a.role < b.role) return -1;\n\t\tif (a.role > b.role) return 1;\n\t\treturn 0;\n\t});\n\trole.roles = roles;\n\tprintjson(role);\n\t})();' ++ echo $'(function() {\n\tvar role = db.getRole("role-two", {showPrivileges: true, showAuthenticationRestrictions: true});\n\tvar roles = role.roles;\n\troles.sort((a, b) => {\n\t\tif (a.role < b.role) return -1;\n\t\tif (a.role > b.role) return 1;\n\t\treturn 0;\n\t});\n\trole.roles = roles;\n\tprintjson(role);\n\t})();' + compare admin $'(function() {\n\tvar role = db.getRole("role-two", {showPrivileges: true, showAuthenticationRestrictions: true});\n\tvar roles = role.roles;\n\troles.sort((a, b) => {\n\t\tif (a.role < b.role) return -1;\n\t\tif (a.role > b.role) return 1;\n\t\treturn 0;\n\t});\n\trole.roles = roles;\n\tprintjson(role);\n\t})();' userAdmin:userAdmin123456@some-name-mongos.custom-users-roles-sharded-27922 role-two-updated + local database=admin + local $'command=(function() {\n\tvar role = db.getRole("role-two", {showPrivileges: true, showAuthenticationRestrictions: true});\n\tvar roles = role.roles;\n\troles.sort((a, b) => {\n\t\tif (a.role < b.role) return -1;\n\t\tif (a.role > b.role) return 1;\n\t\treturn 0;\n\t});\n\trole.roles = roles;\n\tprintjson(role);\n\t})();' + local uri=userAdmin:userAdmin123456@some-name-mongos.custom-users-roles-sharded-27922 + local target=role-two-updated + [[ docker.io/perconalab/percona-server-mongodb-operator:main-mongod8.0 =~ 5\.0 ]] + run_mongos $'use admin\\n (function() {\n\tvar role = db.getRole("role-two", {showPrivileges: true, showAuthenticationRestrictions: true});\n\tvar roles = role.roles;\n\troles.sort((a, b) => {\n\t\tif (a.role < b.role) return -1;\n\t\tif (a.role > b.role) return 1;\n\t\treturn 0;\n\t});\n\trole.roles = roles;\n\tprintjson(role);\n\t})();' userAdmin:userAdmin123456@some-name-mongos.custom-users-roles-sharded-27922 mongodb + local $'command=use admin\\n (function() {\n\tvar role = db.getRole("role-two", {showPrivileges: true, showAuthenticationRestrictions: true});\n\tvar roles = role.roles;\n\troles.sort((a, b) => {\n\t\tif (a.role < b.role) return -1;\n\t\tif (a.role > b.role) return 1;\n\t\treturn 0;\n\t});\n\trole.roles = roles;\n\tprintjson(role);\n\t})();' + grep -E -v 'I NETWORK|W NETWORK|F NETWORK|Error saving history file|Percona Server for MongoDB|connecting to:|Unable to reach primary for set|Implicit session:|versions do not match|Error saving history file:' + local uri=userAdmin:userAdmin123456@some-name-mongos.custom-users-roles-sharded-27922 + local driver=mongodb + local suffix=.svc.cluster.local + local mongo_flag= + local port=27017 + local mongo_bin=mongo + /usr/sbin/sed -re 's/ObjectId\("[0-9a-f]+"\)//; s/-[0-9]+.svc/-xxxesvc/' + sed '/"userId"/d' ++ echo .svc.cluster.local ++ awk -F: '{print $2}' + suffix_port= + [[ -z '' ]] + suffix=.svc.cluster.local:27017 ++ kubectl_bin get pods --selector=name=psmdb-client -o 'jsonpath={.items[].metadata.name}' +++ mktemp ++ local LAST_OUT=/tmp/tmp.CbibWd8YPr +++ mktemp ++ local LAST_ERR=/tmp/tmp.ZcnObSqCsP ++ local exit_status=0 ++ local timeout=4 +++ seq 0 2 ++ for i in $(seq 0 2) ++ set +e ++ kubectl get pods --selector=name=psmdb-client -o 'jsonpath={.items[].metadata.name}' ++ exit_status=0 ++ set -e ++ '[' 0 '!=' 0 -a -n 1 ']' ++ break ++ cat /tmp/tmp.CbibWd8YPr ++ cat /tmp/tmp.ZcnObSqCsP ++ rm /tmp/tmp.CbibWd8YPr /tmp/tmp.ZcnObSqCsP ++ return 0 + local client_container=psmdb-client-bb8b97679-xkjrq + kubectl_bin exec psmdb-client-bb8b97679-xkjrq -- bash -c $'printf \'use admin\\n (function() {\n\tvar role = db.getRole("role-two", {showPrivileges: true, showAuthenticationRestrictions: true});\n\tvar roles = role.roles;\n\troles.sort((a, b) => {\n\t\tif (a.role < b.role) return -1;\n\t\tif (a.role > b.role) return 1;\n\t\treturn 0;\n\t});\n\trole.roles = roles;\n\tprintjson(role);\n\t})();\\n\' | mongo mongodb://userAdmin:userAdmin123456@some-name-mongos.custom-users-roles-sharded-27922.svc.cluster.local:27017/admin ' ++ mktemp + local LAST_OUT=/tmp/tmp.JUgiS4Kvuj ++ mktemp + local LAST_ERR=/tmp/tmp.WACCrraAIa + local exit_status=0 + local timeout=4 ++ seq 0 2 + for i in $(seq 0 2) + set +e + kubectl exec psmdb-client-bb8b97679-xkjrq -- bash -c $'printf \'use admin\\n (function() {\n\tvar role = db.getRole("role-two", {showPrivileges: true, showAuthenticationRestrictions: true});\n\tvar roles = role.roles;\n\troles.sort((a, b) => {\n\t\tif (a.role < b.role) return -1;\n\t\tif (a.role > b.role) return 1;\n\t\treturn 0;\n\t});\n\trole.roles = roles;\n\tprintjson(role);\n\t})();\\n\' | mongo mongodb://userAdmin:userAdmin123456@some-name-mongos.custom-users-roles-sharded-27922.svc.cluster.local:27017/admin ' + exit_status=0 + set -e + '[' 0 '!=' 0 -a -n 1 ']' + break + cat /tmp/tmp.JUgiS4Kvuj + cat /tmp/tmp.WACCrraAIa + rm /tmp/tmp.JUgiS4Kvuj /tmp/tmp.WACCrraAIa + return 0 + diff /mnt/jenkins/workspace/cloud-psmdb-operator_PR-2287/e2e-tests/custom-users-roles-sharded/compare/role-two-updated.json /tmp/tmp.oSwann5ucX/role-two-updated + desc 'check role update from DB' + set +o xtrace ----------------------------------------------------------------------------------- check role update from DB ----------------------------------------------------------------------------------- + run_mongos 'use admin\n db.updateRole( "role-two",{privileges:[{resource: {db:"config", collection:"" }, actions: ["find", "update"]}]})' userAdmin:userAdmin123456@some-name-mongos.custom-users-roles-sharded-27922 + local 'command=use admin\n db.updateRole( "role-two",{privileges:[{resource: {db:"config", collection:"" }, actions: ["find", "update"]}]})' + local uri=userAdmin:userAdmin123456@some-name-mongos.custom-users-roles-sharded-27922 + local driver=mongodb + local suffix=.svc.cluster.local + local mongo_flag= + local port=27017 + local mongo_bin=mongo ++ echo .svc.cluster.local ++ awk -F: '{print $2}' + suffix_port= + [[ -z '' ]] + suffix=.svc.cluster.local:27017 ++ kubectl_bin get pods --selector=name=psmdb-client -o 'jsonpath={.items[].metadata.name}' +++ mktemp ++ local LAST_OUT=/tmp/tmp.35F506INSk +++ mktemp ++ local LAST_ERR=/tmp/tmp.cHTmMFkvkO ++ local exit_status=0 ++ local timeout=4 +++ seq 0 2 ++ for i in $(seq 0 2) ++ set +e ++ kubectl get pods --selector=name=psmdb-client -o 'jsonpath={.items[].metadata.name}' ++ exit_status=0 ++ set -e ++ '[' 0 '!=' 0 -a -n 1 ']' ++ break ++ cat /tmp/tmp.35F506INSk ++ cat /tmp/tmp.cHTmMFkvkO ++ rm /tmp/tmp.35F506INSk /tmp/tmp.cHTmMFkvkO ++ return 0 + local client_container=psmdb-client-bb8b97679-xkjrq + kubectl_bin exec psmdb-client-bb8b97679-xkjrq -- bash -c 'printf '\''use admin\n db.updateRole( "role-two",{privileges:[{resource: {db:"config", collection:"" }, actions: ["find", "update"]}]})\n'\'' | mongo mongodb://userAdmin:userAdmin123456@some-name-mongos.custom-users-roles-sharded-27922.svc.cluster.local:27017/admin ' ++ mktemp + local LAST_OUT=/tmp/tmp.4GFxGOyQFy ++ mktemp + local LAST_ERR=/tmp/tmp.S50PvBawMc + local exit_status=0 + local timeout=4 ++ seq 0 2 + for i in $(seq 0 2) + set +e + kubectl exec psmdb-client-bb8b97679-xkjrq -- bash -c 'printf '\''use admin\n db.updateRole( "role-two",{privileges:[{resource: {db:"config", collection:"" }, actions: ["find", "update"]}]})\n'\'' | mongo mongodb://userAdmin:userAdmin123456@some-name-mongos.custom-users-roles-sharded-27922.svc.cluster.local:27017/admin ' + exit_status=0 + set -e + '[' 0 '!=' 0 -a -n 1 ']' + break + cat /tmp/tmp.4GFxGOyQFy Percona Server for MongoDB shell version v4.4.29-28 connecting to: mongodb://some-name-mongos.custom-users-roles-sharded-27922.svc.cluster.local:27017/admin?compressors=disabled&gssapiServiceName=mongodb Implicit session: session { "id" : UUID("f59dbe42-1dee-4e01-b6b7-4c09abfa9d15") } Percona Server for MongoDB server version: v8.0.20-8 WARNING: shell and server versions do not match switched to db admin bye + cat /tmp/tmp.S50PvBawMc + rm /tmp/tmp.4GFxGOyQFy /tmp/tmp.S50PvBawMc + return 0 + sleep 15 ++ get_role_cmd '"role-two"' ++ local 'role="role-two"' ++ cmd=$'(function() {\n\tvar role = db.getRole("role-two", {showPrivileges: true, showAuthenticationRestrictions: true});\n\tvar roles = role.roles;\n\troles.sort((a, b) => {\n\t\tif (a.role < b.role) return -1;\n\t\tif (a.role > b.role) return 1;\n\t\treturn 0;\n\t});\n\trole.roles = roles;\n\tprintjson(role);\n\t})();' ++ echo $'(function() {\n\tvar role = db.getRole("role-two", {showPrivileges: true, showAuthenticationRestrictions: true});\n\tvar roles = role.roles;\n\troles.sort((a, b) => {\n\t\tif (a.role < b.role) return -1;\n\t\tif (a.role > b.role) return 1;\n\t\treturn 0;\n\t});\n\trole.roles = roles;\n\tprintjson(role);\n\t})();' + compare admin $'(function() {\n\tvar role = db.getRole("role-two", {showPrivileges: true, showAuthenticationRestrictions: true});\n\tvar roles = role.roles;\n\troles.sort((a, b) => {\n\t\tif (a.role < b.role) return -1;\n\t\tif (a.role > b.role) return 1;\n\t\treturn 0;\n\t});\n\trole.roles = roles;\n\tprintjson(role);\n\t})();' userAdmin:userAdmin123456@some-name-mongos.custom-users-roles-sharded-27922 role-two-updated + local database=admin + local $'command=(function() {\n\tvar role = db.getRole("role-two", {showPrivileges: true, showAuthenticationRestrictions: true});\n\tvar roles = role.roles;\n\troles.sort((a, b) => {\n\t\tif (a.role < b.role) return -1;\n\t\tif (a.role > b.role) return 1;\n\t\treturn 0;\n\t});\n\trole.roles = roles;\n\tprintjson(role);\n\t})();' + local uri=userAdmin:userAdmin123456@some-name-mongos.custom-users-roles-sharded-27922 + local target=role-two-updated + [[ docker.io/perconalab/percona-server-mongodb-operator:main-mongod8.0 =~ 5\.0 ]] + run_mongos $'use admin\\n (function() {\n\tvar role = db.getRole("role-two", {showPrivileges: true, showAuthenticationRestrictions: true});\n\tvar roles = role.roles;\n\troles.sort((a, b) => {\n\t\tif (a.role < b.role) return -1;\n\t\tif (a.role > b.role) return 1;\n\t\treturn 0;\n\t});\n\trole.roles = roles;\n\tprintjson(role);\n\t})();' userAdmin:userAdmin123456@some-name-mongos.custom-users-roles-sharded-27922 mongodb + local $'command=use admin\\n (function() {\n\tvar role = db.getRole("role-two", {showPrivileges: true, showAuthenticationRestrictions: true});\n\tvar roles = role.roles;\n\troles.sort((a, b) => {\n\t\tif (a.role < b.role) return -1;\n\t\tif (a.role > b.role) return 1;\n\t\treturn 0;\n\t});\n\trole.roles = roles;\n\tprintjson(role);\n\t})();' + local uri=userAdmin:userAdmin123456@some-name-mongos.custom-users-roles-sharded-27922 + local driver=mongodb + grep -E -v 'I NETWORK|W NETWORK|F NETWORK|Error saving history file|Percona Server for MongoDB|connecting to:|Unable to reach primary for set|Implicit session:|versions do not match|Error saving history file:' + local suffix=.svc.cluster.local + local mongo_flag= + local port=27017 + local mongo_bin=mongo + /usr/sbin/sed -re 's/ObjectId\("[0-9a-f]+"\)//; s/-[0-9]+.svc/-xxxesvc/' + sed '/"userId"/d' ++ echo .svc.cluster.local ++ awk -F: '{print $2}' + suffix_port= + [[ -z '' ]] + suffix=.svc.cluster.local:27017 ++ kubectl_bin get pods --selector=name=psmdb-client -o 'jsonpath={.items[].metadata.name}' +++ mktemp ++ local LAST_OUT=/tmp/tmp.S9T0B6DDjj +++ mktemp ++ local LAST_ERR=/tmp/tmp.dMEY3h9mlS ++ local exit_status=0 ++ local timeout=4 +++ seq 0 2 ++ for i in $(seq 0 2) ++ set +e ++ kubectl get pods --selector=name=psmdb-client -o 'jsonpath={.items[].metadata.name}' ++ exit_status=0 ++ set -e ++ '[' 0 '!=' 0 -a -n 1 ']' ++ break ++ cat /tmp/tmp.S9T0B6DDjj ++ cat /tmp/tmp.dMEY3h9mlS ++ rm /tmp/tmp.S9T0B6DDjj /tmp/tmp.dMEY3h9mlS ++ return 0 + local client_container=psmdb-client-bb8b97679-xkjrq + kubectl_bin exec psmdb-client-bb8b97679-xkjrq -- bash -c $'printf \'use admin\\n (function() {\n\tvar role = db.getRole("role-two", {showPrivileges: true, showAuthenticationRestrictions: true});\n\tvar roles = role.roles;\n\troles.sort((a, b) => {\n\t\tif (a.role < b.role) return -1;\n\t\tif (a.role > b.role) return 1;\n\t\treturn 0;\n\t});\n\trole.roles = roles;\n\tprintjson(role);\n\t})();\\n\' | mongo mongodb://userAdmin:userAdmin123456@some-name-mongos.custom-users-roles-sharded-27922.svc.cluster.local:27017/admin ' ++ mktemp + local LAST_OUT=/tmp/tmp.rlZUdF7ZD6 ++ mktemp + local LAST_ERR=/tmp/tmp.YvWOUY4SaS + local exit_status=0 + local timeout=4 ++ seq 0 2 + for i in $(seq 0 2) + set +e + kubectl exec psmdb-client-bb8b97679-xkjrq -- bash -c $'printf \'use admin\\n (function() {\n\tvar role = db.getRole("role-two", {showPrivileges: true, showAuthenticationRestrictions: true});\n\tvar roles = role.roles;\n\troles.sort((a, b) => {\n\t\tif (a.role < b.role) return -1;\n\t\tif (a.role > b.role) return 1;\n\t\treturn 0;\n\t});\n\trole.roles = roles;\n\tprintjson(role);\n\t})();\\n\' | mongo mongodb://userAdmin:userAdmin123456@some-name-mongos.custom-users-roles-sharded-27922.svc.cluster.local:27017/admin ' + exit_status=0 + set -e + '[' 0 '!=' 0 -a -n 1 ']' + break + cat /tmp/tmp.rlZUdF7ZD6 + cat /tmp/tmp.YvWOUY4SaS + rm /tmp/tmp.rlZUdF7ZD6 /tmp/tmp.YvWOUY4SaS + return 0 + diff /mnt/jenkins/workspace/cloud-psmdb-operator_PR-2287/e2e-tests/custom-users-roles-sharded/compare/role-two-updated.json /tmp/tmp.oSwann5ucX/role-two-updated + desc 'check new role created after updated role name via CR' + set +o xtrace ----------------------------------------------------------------------------------- check new role created after updated role name via CR ----------------------------------------------------------------------------------- + kubectl_bin patch psmdb some-name --type=merge --patch $'{\n\t\t"spec": {"roles":[\n\t\t\t{\n\t\t\t\t"role": "role-three",\n\t\t\t\t"db": "admin",\n\t\t\t\t"privileges": [\n\t\t\t\t\t{\n\t\t\t\t\t\t"resource": {\n\t\t\t\t\t\t\t"db": "config",\n\t\t\t\t\t\t\t"collection": ""\n\t\t\t\t\t\t},\n\t\t\t\t\t\t"actions": [\n\t\t\t\t\t\t\t"find"\n\t\t\t\t\t\t]\n\t\t\t\t\t}\n\t\t\t\t]\n\t\t\t}\n\t\t]\n\t}}' ++ mktemp + local LAST_OUT=/tmp/tmp.1p3ClqckLf ++ mktemp + local LAST_ERR=/tmp/tmp.99uZNWxvTa + local exit_status=0 + local timeout=4 ++ seq 0 2 + for i in $(seq 0 2) + set +e + kubectl patch psmdb some-name --type=merge --patch $'{\n\t\t"spec": {"roles":[\n\t\t\t{\n\t\t\t\t"role": "role-three",\n\t\t\t\t"db": "admin",\n\t\t\t\t"privileges": [\n\t\t\t\t\t{\n\t\t\t\t\t\t"resource": {\n\t\t\t\t\t\t\t"db": "config",\n\t\t\t\t\t\t\t"collection": ""\n\t\t\t\t\t\t},\n\t\t\t\t\t\t"actions": [\n\t\t\t\t\t\t\t"find"\n\t\t\t\t\t\t]\n\t\t\t\t\t}\n\t\t\t\t]\n\t\t\t}\n\t\t]\n\t}}' + exit_status=0 + set -e + '[' 0 '!=' 0 -a -n 1 ']' + break + cat /tmp/tmp.1p3ClqckLf perconaservermongodb.psmdb.percona.com/some-name patched + cat /tmp/tmp.99uZNWxvTa + rm /tmp/tmp.1p3ClqckLf /tmp/tmp.99uZNWxvTa + return 0 + wait_for_running some-name-rs0 3 + local name=some-name-rs0 + let last_pod=2 + local check_cluster_readyness=true + set_debug + [[ 1 == 1 ]] + set -o xtrace + local rs_name=rs0 + local cluster_name=some-name ++ seq 0 2 + for i in $(seq 0 $last_pod) + [[ 0 -eq 2 ]] + wait_pod some-name-rs0-0 + local pod=some-name-rs0-0 + set +o xtrace waiting for pod/some-name-rs0-0 to be ready.OK + for i in $(seq 0 $last_pod) + [[ 1 -eq 2 ]] + wait_pod some-name-rs0-1 + local pod=some-name-rs0-1 + set +o xtrace waiting for pod/some-name-rs0-1 to be ready.OK + for i in $(seq 0 $last_pod) + [[ 2 -eq 2 ]] ++ kubectl_bin get psmdb some-name -o 'jsonpath={.spec.replsets[?(@.name=="rs0")].arbiter.enabled}' +++ mktemp ++ local LAST_OUT=/tmp/tmp.amqbAcqQOT +++ mktemp ++ local LAST_ERR=/tmp/tmp.GxoJYiaZoV ++ local exit_status=0 ++ local timeout=4 +++ seq 0 2 ++ for i in $(seq 0 2) ++ set +e ++ kubectl get psmdb some-name -o 'jsonpath={.spec.replsets[?(@.name=="rs0")].arbiter.enabled}' ++ exit_status=0 ++ set -e ++ '[' 0 '!=' 0 -a -n 1 ']' ++ break ++ cat /tmp/tmp.amqbAcqQOT ++ cat /tmp/tmp.GxoJYiaZoV ++ rm /tmp/tmp.amqbAcqQOT /tmp/tmp.GxoJYiaZoV ++ return 0 + [[ '' == true ]] + wait_pod some-name-rs0-2 + local pod=some-name-rs0-2 + set +o xtrace waiting for pod/some-name-rs0-2 to be ready.OK ++ kubectl_bin get psmdb some-name -o 'jsonpath={.spec.replsets[?(@.name=="rs0")].nonvoting.enabled}' +++ mktemp ++ local LAST_OUT=/tmp/tmp.PFudBcV2eS +++ mktemp ++ local LAST_ERR=/tmp/tmp.Px4nRBsSj0 ++ local exit_status=0 ++ local timeout=4 +++ seq 0 2 ++ for i in $(seq 0 2) ++ set +e ++ kubectl get psmdb some-name -o 'jsonpath={.spec.replsets[?(@.name=="rs0")].nonvoting.enabled}' ++ exit_status=0 ++ set -e ++ '[' 0 '!=' 0 -a -n 1 ']' ++ break ++ cat /tmp/tmp.PFudBcV2eS ++ cat /tmp/tmp.Px4nRBsSj0 ++ rm /tmp/tmp.PFudBcV2eS /tmp/tmp.Px4nRBsSj0 ++ return 0 + [[ '' == true ]] ++ kubectl_bin get psmdb some-name -o 'jsonpath={.spec.replsets[?(@.name=="rs0")].hidden.enabled}' +++ mktemp ++ local LAST_OUT=/tmp/tmp.YtnKy3SaDw +++ mktemp ++ local LAST_ERR=/tmp/tmp.trQ6UwAJyD ++ local exit_status=0 ++ local timeout=4 +++ seq 0 2 ++ for i in $(seq 0 2) ++ set +e ++ kubectl get psmdb some-name -o 'jsonpath={.spec.replsets[?(@.name=="rs0")].hidden.enabled}' ++ exit_status=0 ++ set -e ++ '[' 0 '!=' 0 -a -n 1 ']' ++ break ++ cat /tmp/tmp.YtnKy3SaDw ++ cat /tmp/tmp.trQ6UwAJyD ++ rm /tmp/tmp.YtnKy3SaDw /tmp/tmp.trQ6UwAJyD ++ return 0 + [[ '' == true ]] + sleep 10 + [[ true == true ]] + set +x Waiting for cluster readyness ++ get_role_cmd '"role-three"' ++ local 'role="role-three"' ++ cmd=$'(function() {\n\tvar role = db.getRole("role-three", {showPrivileges: true, showAuthenticationRestrictions: true});\n\tvar roles = role.roles;\n\troles.sort((a, b) => {\n\t\tif (a.role < b.role) return -1;\n\t\tif (a.role > b.role) return 1;\n\t\treturn 0;\n\t});\n\trole.roles = roles;\n\tprintjson(role);\n\t})();' ++ echo $'(function() {\n\tvar role = db.getRole("role-three", {showPrivileges: true, showAuthenticationRestrictions: true});\n\tvar roles = role.roles;\n\troles.sort((a, b) => {\n\t\tif (a.role < b.role) return -1;\n\t\tif (a.role > b.role) return 1;\n\t\treturn 0;\n\t});\n\trole.roles = roles;\n\tprintjson(role);\n\t})();' + compare admin $'(function() {\n\tvar role = db.getRole("role-three", {showPrivileges: true, showAuthenticationRestrictions: true});\n\tvar roles = role.roles;\n\troles.sort((a, b) => {\n\t\tif (a.role < b.role) return -1;\n\t\tif (a.role > b.role) return 1;\n\t\treturn 0;\n\t});\n\trole.roles = roles;\n\tprintjson(role);\n\t})();' userAdmin:userAdmin123456@some-name-mongos.custom-users-roles-sharded-27922 role-three + local database=admin + local $'command=(function() {\n\tvar role = db.getRole("role-three", {showPrivileges: true, showAuthenticationRestrictions: true});\n\tvar roles = role.roles;\n\troles.sort((a, b) => {\n\t\tif (a.role < b.role) return -1;\n\t\tif (a.role > b.role) return 1;\n\t\treturn 0;\n\t});\n\trole.roles = roles;\n\tprintjson(role);\n\t})();' + local uri=userAdmin:userAdmin123456@some-name-mongos.custom-users-roles-sharded-27922 + local target=role-three + [[ docker.io/perconalab/percona-server-mongodb-operator:main-mongod8.0 =~ 5\.0 ]] + run_mongos $'use admin\\n (function() {\n\tvar role = db.getRole("role-three", {showPrivileges: true, showAuthenticationRestrictions: true});\n\tvar roles = role.roles;\n\troles.sort((a, b) => {\n\t\tif (a.role < b.role) return -1;\n\t\tif (a.role > b.role) return 1;\n\t\treturn 0;\n\t});\n\trole.roles = roles;\n\tprintjson(role);\n\t})();' userAdmin:userAdmin123456@some-name-mongos.custom-users-roles-sharded-27922 mongodb + local $'command=use admin\\n (function() {\n\tvar role = db.getRole("role-three", {showPrivileges: true, showAuthenticationRestrictions: true});\n\tvar roles = role.roles;\n\troles.sort((a, b) => {\n\t\tif (a.role < b.role) return -1;\n\t\tif (a.role > b.role) return 1;\n\t\treturn 0;\n\t});\n\trole.roles = roles;\n\tprintjson(role);\n\t})();' + local uri=userAdmin:userAdmin123456@some-name-mongos.custom-users-roles-sharded-27922 + local driver=mongodb + local suffix=.svc.cluster.local + local mongo_flag= + local port=27017 + local mongo_bin=mongo + grep -E -v 'I NETWORK|W NETWORK|F NETWORK|Error saving history file|Percona Server for MongoDB|connecting to:|Unable to reach primary for set|Implicit session:|versions do not match|Error saving history file:' + /usr/sbin/sed -re 's/ObjectId\("[0-9a-f]+"\)//; s/-[0-9]+.svc/-xxxesvc/' + sed '/"userId"/d' ++ echo .svc.cluster.local ++ awk -F: '{print $2}' + suffix_port= + [[ -z '' ]] + suffix=.svc.cluster.local:27017 ++ kubectl_bin get pods --selector=name=psmdb-client -o 'jsonpath={.items[].metadata.name}' +++ mktemp ++ local LAST_OUT=/tmp/tmp.qZX4v6qCl5 +++ mktemp ++ local LAST_ERR=/tmp/tmp.owgoRyhDj6 ++ local exit_status=0 ++ local timeout=4 +++ seq 0 2 ++ for i in $(seq 0 2) ++ set +e ++ kubectl get pods --selector=name=psmdb-client -o 'jsonpath={.items[].metadata.name}' ++ exit_status=0 ++ set -e ++ '[' 0 '!=' 0 -a -n 1 ']' ++ break ++ cat /tmp/tmp.qZX4v6qCl5 ++ cat /tmp/tmp.owgoRyhDj6 ++ rm /tmp/tmp.qZX4v6qCl5 /tmp/tmp.owgoRyhDj6 ++ return 0 + local client_container=psmdb-client-bb8b97679-xkjrq + kubectl_bin exec psmdb-client-bb8b97679-xkjrq -- bash -c $'printf \'use admin\\n (function() {\n\tvar role = db.getRole("role-three", {showPrivileges: true, showAuthenticationRestrictions: true});\n\tvar roles = role.roles;\n\troles.sort((a, b) => {\n\t\tif (a.role < b.role) return -1;\n\t\tif (a.role > b.role) return 1;\n\t\treturn 0;\n\t});\n\trole.roles = roles;\n\tprintjson(role);\n\t})();\\n\' | mongo mongodb://userAdmin:userAdmin123456@some-name-mongos.custom-users-roles-sharded-27922.svc.cluster.local:27017/admin ' ++ mktemp + local LAST_OUT=/tmp/tmp.IBwYZgST24 ++ mktemp + local LAST_ERR=/tmp/tmp.dhMiCymvRX + local exit_status=0 + local timeout=4 ++ seq 0 2 + for i in $(seq 0 2) + set +e + kubectl exec psmdb-client-bb8b97679-xkjrq -- bash -c $'printf \'use admin\\n (function() {\n\tvar role = db.getRole("role-three", {showPrivileges: true, showAuthenticationRestrictions: true});\n\tvar roles = role.roles;\n\troles.sort((a, b) => {\n\t\tif (a.role < b.role) return -1;\n\t\tif (a.role > b.role) return 1;\n\t\treturn 0;\n\t});\n\trole.roles = roles;\n\tprintjson(role);\n\t})();\\n\' | mongo mongodb://userAdmin:userAdmin123456@some-name-mongos.custom-users-roles-sharded-27922.svc.cluster.local:27017/admin ' + exit_status=0 + set -e + '[' 0 '!=' 0 -a -n 1 ']' + break + cat /tmp/tmp.IBwYZgST24 + cat /tmp/tmp.dhMiCymvRX + rm /tmp/tmp.IBwYZgST24 /tmp/tmp.dhMiCymvRX + return 0 + diff /mnt/jenkins/workspace/cloud-psmdb-operator_PR-2287/e2e-tests/custom-users-roles-sharded/compare/role-three.json /tmp/tmp.oSwann5ucX/role-three + desc 'check creating multiple roles and the users in a single CR apply' + set +o xtrace ----------------------------------------------------------------------------------- check creating multiple roles and the users in a single CR apply ----------------------------------------------------------------------------------- + kubectl_bin patch psmdb some-name --type=merge --patch $'{\n\t\t"spec": {\n\t\t\t"roles": [\n\t\t\t\t{\n\t\t\t\t"role": "role-four",\n\t\t\t\t"db": "testAdmin1",\n\t\t\t\t"privileges": [\n\t\t\t\t\t{\n\t\t\t\t\t"resource": {\n\t\t\t\t\t\t"db": "testAdmin1",\n\t\t\t\t\t\t"collection": ""\n\t\t\t\t\t},\n\t\t\t\t\t"actions": [\n\t\t\t\t\t\t"find",\n\t\t\t\t\t\t"listIndexes",\n\t\t\t\t\t\t"listCollections"\n\t\t\t\t\t]\n\t\t\t\t\t},\n\t\t\t\t\t{\n\t\t\t\t\t"resource": {\n\t\t\t\t\t\t"db": "testAdmin1",\n\t\t\t\t\t\t"collection": "system.profile"\n\t\t\t\t\t},\n\t\t\t\t\t"actions": [\n\t\t\t\t\t\t"dbStats",\n\t\t\t\t\t\t"collStats",\n\t\t\t\t\t\t"indexStats"\n\t\t\t\t\t]\n\t\t\t\t\t},\n\t\t\t\t\t{\n\t\t\t\t\t"resource": {\n\t\t\t\t\t\t"db": "testAdmin1",\n\t\t\t\t\t\t"collection": "system.version"\n\t\t\t\t\t},\n\t\t\t\t\t"actions": [\n\t\t\t\t\t\t"find"\n\t\t\t\t\t]\n\t\t\t\t\t}\n\t\t\t\t]\n\t\t\t\t},\n\t\t\t\t{\n\t\t\t\t"role": "role-five",\n\t\t\t\t"db": "testAdmin2",\n\t\t\t\t"privileges": [\n\t\t\t\t\t{\n\t\t\t\t\t"resource": {\n\t\t\t\t\t\t"db": "testAdmin2",\n\t\t\t\t\t\t"collection": ""\n\t\t\t\t\t},\n\t\t\t\t\t"actions": [\n\t\t\t\t\t\t"find",\n\t\t\t\t\t\t"listIndexes",\n\t\t\t\t\t\t"listCollections"\n\t\t\t\t\t]\n\t\t\t\t\t},\n\t\t\t\t\t{\n\t\t\t\t\t"resource": {\n\t\t\t\t\t\t"db": "testAdmin2",\n\t\t\t\t\t\t"collection": "system.profile"\n\t\t\t\t\t},\n\t\t\t\t\t"actions": [\n\t\t\t\t\t\t"dbStats",\n\t\t\t\t\t\t"collStats",\n\t\t\t\t\t\t"indexStats"\n\t\t\t\t\t]\n\t\t\t\t\t},\n\t\t\t\t\t{\n\t\t\t\t\t"resource": {\n\t\t\t\t\t\t"db": "testAdmin2",\n\t\t\t\t\t\t"collection": "system.version"\n\t\t\t\t\t},\n\t\t\t\t\t"actions": [\n\t\t\t\t\t\t"find"\n\t\t\t\t\t]\n\t\t\t\t\t}\n\t\t\t\t]\n\t\t\t\t}\n\t\t\t],\n\t\t\t"users": [\n\t\t\t\t{\n\t\t\t\t"name": "user-five",\n\t\t\t\t"db": "testAdmin",\n\t\t\t\t"passwordSecretRef": {\n\t\t\t\t\t"name": "user-one",\n\t\t\t\t\t"key": "userOnePassKey"\n\t\t\t\t},\n\t\t\t\t"roles": [\n\t\t\t\t\t{\n\t\t\t\t\t\t"name": "role-four",\n\t\t\t\t\t\t"db": "testAdmin1"\n\t\t\t\t\t},\n\t\t\t\t\t{\n\t\t\t\t\t\t"name": "role-five",\n\t\t\t\t\t\t"db": "testAdmin2"\n\t\t\t\t\t}\n\t\t\t\t]\n\t\t\t\t},\n\t\t\t\t{\n\t\t\t\t"name": "user-six",\n\t\t\t\t"db": "testAdmin",\n\t\t\t\t"passwordSecretRef": {\n\t\t\t\t\t"name": "user-one",\n\t\t\t\t\t"key": "userOnePassKey"\n\t\t\t\t},\n\t\t\t\t"roles": [\n\t\t\t\t\t{\n\t\t\t\t\t"name": "role-five",\n\t\t\t\t\t"db": "testAdmin2"\n\t\t\t\t\t}\n\t\t\t\t]\n\t\t\t\t}\n\t\t\t]\n\t}}' ++ mktemp + local LAST_OUT=/tmp/tmp.mFEl3xTPGw ++ mktemp + local LAST_ERR=/tmp/tmp.xCNSQUcneh + local exit_status=0 + local timeout=4 ++ seq 0 2 + for i in $(seq 0 2) + set +e + kubectl patch psmdb some-name --type=merge --patch $'{\n\t\t"spec": {\n\t\t\t"roles": [\n\t\t\t\t{\n\t\t\t\t"role": "role-four",\n\t\t\t\t"db": "testAdmin1",\n\t\t\t\t"privileges": [\n\t\t\t\t\t{\n\t\t\t\t\t"resource": {\n\t\t\t\t\t\t"db": "testAdmin1",\n\t\t\t\t\t\t"collection": ""\n\t\t\t\t\t},\n\t\t\t\t\t"actions": [\n\t\t\t\t\t\t"find",\n\t\t\t\t\t\t"listIndexes",\n\t\t\t\t\t\t"listCollections"\n\t\t\t\t\t]\n\t\t\t\t\t},\n\t\t\t\t\t{\n\t\t\t\t\t"resource": {\n\t\t\t\t\t\t"db": "testAdmin1",\n\t\t\t\t\t\t"collection": "system.profile"\n\t\t\t\t\t},\n\t\t\t\t\t"actions": [\n\t\t\t\t\t\t"dbStats",\n\t\t\t\t\t\t"collStats",\n\t\t\t\t\t\t"indexStats"\n\t\t\t\t\t]\n\t\t\t\t\t},\n\t\t\t\t\t{\n\t\t\t\t\t"resource": {\n\t\t\t\t\t\t"db": "testAdmin1",\n\t\t\t\t\t\t"collection": "system.version"\n\t\t\t\t\t},\n\t\t\t\t\t"actions": [\n\t\t\t\t\t\t"find"\n\t\t\t\t\t]\n\t\t\t\t\t}\n\t\t\t\t]\n\t\t\t\t},\n\t\t\t\t{\n\t\t\t\t"role": "role-five",\n\t\t\t\t"db": "testAdmin2",\n\t\t\t\t"privileges": [\n\t\t\t\t\t{\n\t\t\t\t\t"resource": {\n\t\t\t\t\t\t"db": "testAdmin2",\n\t\t\t\t\t\t"collection": ""\n\t\t\t\t\t},\n\t\t\t\t\t"actions": [\n\t\t\t\t\t\t"find",\n\t\t\t\t\t\t"listIndexes",\n\t\t\t\t\t\t"listCollections"\n\t\t\t\t\t]\n\t\t\t\t\t},\n\t\t\t\t\t{\n\t\t\t\t\t"resource": {\n\t\t\t\t\t\t"db": "testAdmin2",\n\t\t\t\t\t\t"collection": "system.profile"\n\t\t\t\t\t},\n\t\t\t\t\t"actions": [\n\t\t\t\t\t\t"dbStats",\n\t\t\t\t\t\t"collStats",\n\t\t\t\t\t\t"indexStats"\n\t\t\t\t\t]\n\t\t\t\t\t},\n\t\t\t\t\t{\n\t\t\t\t\t"resource": {\n\t\t\t\t\t\t"db": "testAdmin2",\n\t\t\t\t\t\t"collection": "system.version"\n\t\t\t\t\t},\n\t\t\t\t\t"actions": [\n\t\t\t\t\t\t"find"\n\t\t\t\t\t]\n\t\t\t\t\t}\n\t\t\t\t]\n\t\t\t\t}\n\t\t\t],\n\t\t\t"users": [\n\t\t\t\t{\n\t\t\t\t"name": "user-five",\n\t\t\t\t"db": "testAdmin",\n\t\t\t\t"passwordSecretRef": {\n\t\t\t\t\t"name": "user-one",\n\t\t\t\t\t"key": "userOnePassKey"\n\t\t\t\t},\n\t\t\t\t"roles": [\n\t\t\t\t\t{\n\t\t\t\t\t\t"name": "role-four",\n\t\t\t\t\t\t"db": "testAdmin1"\n\t\t\t\t\t},\n\t\t\t\t\t{\n\t\t\t\t\t\t"name": "role-five",\n\t\t\t\t\t\t"db": "testAdmin2"\n\t\t\t\t\t}\n\t\t\t\t]\n\t\t\t\t},\n\t\t\t\t{\n\t\t\t\t"name": "user-six",\n\t\t\t\t"db": "testAdmin",\n\t\t\t\t"passwordSecretRef": {\n\t\t\t\t\t"name": "user-one",\n\t\t\t\t\t"key": "userOnePassKey"\n\t\t\t\t},\n\t\t\t\t"roles": [\n\t\t\t\t\t{\n\t\t\t\t\t"name": "role-five",\n\t\t\t\t\t"db": "testAdmin2"\n\t\t\t\t\t}\n\t\t\t\t]\n\t\t\t\t}\n\t\t\t]\n\t}}' + exit_status=0 + set -e + '[' 0 '!=' 0 -a -n 1 ']' + break + cat /tmp/tmp.mFEl3xTPGw perconaservermongodb.psmdb.percona.com/some-name patched + cat /tmp/tmp.xCNSQUcneh + rm /tmp/tmp.mFEl3xTPGw /tmp/tmp.xCNSQUcneh + return 0 + wait_for_running some-name-rs0 3 + local name=some-name-rs0 + let last_pod=2 + local check_cluster_readyness=true + set_debug + [[ 1 == 1 ]] + set -o xtrace + local rs_name=rs0 + local cluster_name=some-name ++ seq 0 2 + for i in $(seq 0 $last_pod) + [[ 0 -eq 2 ]] + wait_pod some-name-rs0-0 + local pod=some-name-rs0-0 + set +o xtrace waiting for pod/some-name-rs0-0 to be ready.OK + for i in $(seq 0 $last_pod) + [[ 1 -eq 2 ]] + wait_pod some-name-rs0-1 + local pod=some-name-rs0-1 + set +o xtrace waiting for pod/some-name-rs0-1 to be ready.OK + for i in $(seq 0 $last_pod) + [[ 2 -eq 2 ]] ++ kubectl_bin get psmdb some-name -o 'jsonpath={.spec.replsets[?(@.name=="rs0")].arbiter.enabled}' +++ mktemp ++ local LAST_OUT=/tmp/tmp.poeTvNA6ps +++ mktemp ++ local LAST_ERR=/tmp/tmp.LpTzS5XcCZ ++ local exit_status=0 ++ local timeout=4 +++ seq 0 2 ++ for i in $(seq 0 2) ++ set +e ++ kubectl get psmdb some-name -o 'jsonpath={.spec.replsets[?(@.name=="rs0")].arbiter.enabled}' ++ exit_status=0 ++ set -e ++ '[' 0 '!=' 0 -a -n 1 ']' ++ break ++ cat /tmp/tmp.poeTvNA6ps ++ cat /tmp/tmp.LpTzS5XcCZ ++ rm /tmp/tmp.poeTvNA6ps /tmp/tmp.LpTzS5XcCZ ++ return 0 + [[ '' == true ]] + wait_pod some-name-rs0-2 + local pod=some-name-rs0-2 + set +o xtrace waiting for pod/some-name-rs0-2 to be ready.OK ++ kubectl_bin get psmdb some-name -o 'jsonpath={.spec.replsets[?(@.name=="rs0")].nonvoting.enabled}' +++ mktemp ++ local LAST_OUT=/tmp/tmp.9gDWWVYQrl +++ mktemp ++ local LAST_ERR=/tmp/tmp.ghQC286ENe ++ local exit_status=0 ++ local timeout=4 +++ seq 0 2 ++ for i in $(seq 0 2) ++ set +e ++ kubectl get psmdb some-name -o 'jsonpath={.spec.replsets[?(@.name=="rs0")].nonvoting.enabled}' ++ exit_status=0 ++ set -e ++ '[' 0 '!=' 0 -a -n 1 ']' ++ break ++ cat /tmp/tmp.9gDWWVYQrl ++ cat /tmp/tmp.ghQC286ENe ++ rm /tmp/tmp.9gDWWVYQrl /tmp/tmp.ghQC286ENe ++ return 0 + [[ '' == true ]] ++ kubectl_bin get psmdb some-name -o 'jsonpath={.spec.replsets[?(@.name=="rs0")].hidden.enabled}' +++ mktemp ++ local LAST_OUT=/tmp/tmp.EPNGlKNWY2 +++ mktemp ++ local LAST_ERR=/tmp/tmp.uTyc8FCcQk ++ local exit_status=0 ++ local timeout=4 +++ seq 0 2 ++ for i in $(seq 0 2) ++ set +e ++ kubectl get psmdb some-name -o 'jsonpath={.spec.replsets[?(@.name=="rs0")].hidden.enabled}' ++ exit_status=0 ++ set -e ++ '[' 0 '!=' 0 -a -n 1 ']' ++ break ++ cat /tmp/tmp.EPNGlKNWY2 ++ cat /tmp/tmp.uTyc8FCcQk ++ rm /tmp/tmp.EPNGlKNWY2 /tmp/tmp.uTyc8FCcQk ++ return 0 + [[ '' == true ]] + sleep 10 + [[ true == true ]] + set +x Waiting for cluster readyness ++ get_role_cmd '"role-four"' ++ local 'role="role-four"' ++ cmd=$'(function() {\n\tvar role = db.getRole("role-four", {showPrivileges: true, showAuthenticationRestrictions: true});\n\tvar roles = role.roles;\n\troles.sort((a, b) => {\n\t\tif (a.role < b.role) return -1;\n\t\tif (a.role > b.role) return 1;\n\t\treturn 0;\n\t});\n\trole.roles = roles;\n\tprintjson(role);\n\t})();' ++ echo $'(function() {\n\tvar role = db.getRole("role-four", {showPrivileges: true, showAuthenticationRestrictions: true});\n\tvar roles = role.roles;\n\troles.sort((a, b) => {\n\t\tif (a.role < b.role) return -1;\n\t\tif (a.role > b.role) return 1;\n\t\treturn 0;\n\t});\n\trole.roles = roles;\n\tprintjson(role);\n\t})();' + compare testAdmin1 $'(function() {\n\tvar role = db.getRole("role-four", {showPrivileges: true, showAuthenticationRestrictions: true});\n\tvar roles = role.roles;\n\troles.sort((a, b) => {\n\t\tif (a.role < b.role) return -1;\n\t\tif (a.role > b.role) return 1;\n\t\treturn 0;\n\t});\n\trole.roles = roles;\n\tprintjson(role);\n\t})();' userAdmin:userAdmin123456@some-name-mongos.custom-users-roles-sharded-27922 role-four + local database=testAdmin1 + local $'command=(function() {\n\tvar role = db.getRole("role-four", {showPrivileges: true, showAuthenticationRestrictions: true});\n\tvar roles = role.roles;\n\troles.sort((a, b) => {\n\t\tif (a.role < b.role) return -1;\n\t\tif (a.role > b.role) return 1;\n\t\treturn 0;\n\t});\n\trole.roles = roles;\n\tprintjson(role);\n\t})();' + local uri=userAdmin:userAdmin123456@some-name-mongos.custom-users-roles-sharded-27922 + local target=role-four + [[ docker.io/perconalab/percona-server-mongodb-operator:main-mongod8.0 =~ 5\.0 ]] + run_mongos $'use testAdmin1\\n (function() {\n\tvar role = db.getRole("role-four", {showPrivileges: true, showAuthenticationRestrictions: true});\n\tvar roles = role.roles;\n\troles.sort((a, b) => {\n\t\tif (a.role < b.role) return -1;\n\t\tif (a.role > b.role) return 1;\n\t\treturn 0;\n\t});\n\trole.roles = roles;\n\tprintjson(role);\n\t})();' userAdmin:userAdmin123456@some-name-mongos.custom-users-roles-sharded-27922 mongodb + local $'command=use testAdmin1\\n (function() {\n\tvar role = db.getRole("role-four", {showPrivileges: true, showAuthenticationRestrictions: true});\n\tvar roles = role.roles;\n\troles.sort((a, b) => {\n\t\tif (a.role < b.role) return -1;\n\t\tif (a.role > b.role) return 1;\n\t\treturn 0;\n\t});\n\trole.roles = roles;\n\tprintjson(role);\n\t})();' + local uri=userAdmin:userAdmin123456@some-name-mongos.custom-users-roles-sharded-27922 + /usr/sbin/sed -re 's/ObjectId\("[0-9a-f]+"\)//; s/-[0-9]+.svc/-xxxesvc/' + local driver=mongodb + local suffix=.svc.cluster.local + local mongo_flag= + local port=27017 + local mongo_bin=mongo + grep -E -v 'I NETWORK|W NETWORK|F NETWORK|Error saving history file|Percona Server for MongoDB|connecting to:|Unable to reach primary for set|Implicit session:|versions do not match|Error saving history file:' + sed '/"userId"/d' ++ echo .svc.cluster.local ++ awk -F: '{print $2}' + suffix_port= + [[ -z '' ]] + suffix=.svc.cluster.local:27017 ++ kubectl_bin get pods --selector=name=psmdb-client -o 'jsonpath={.items[].metadata.name}' +++ mktemp ++ local LAST_OUT=/tmp/tmp.GPw9RLarIe +++ mktemp ++ local LAST_ERR=/tmp/tmp.fEFkaqgsVd ++ local exit_status=0 ++ local timeout=4 +++ seq 0 2 ++ for i in $(seq 0 2) ++ set +e ++ kubectl get pods --selector=name=psmdb-client -o 'jsonpath={.items[].metadata.name}' ++ exit_status=0 ++ set -e ++ '[' 0 '!=' 0 -a -n 1 ']' ++ break ++ cat /tmp/tmp.GPw9RLarIe ++ cat /tmp/tmp.fEFkaqgsVd ++ rm /tmp/tmp.GPw9RLarIe /tmp/tmp.fEFkaqgsVd ++ return 0 + local client_container=psmdb-client-bb8b97679-xkjrq + kubectl_bin exec psmdb-client-bb8b97679-xkjrq -- bash -c $'printf \'use testAdmin1\\n (function() {\n\tvar role = db.getRole("role-four", {showPrivileges: true, showAuthenticationRestrictions: true});\n\tvar roles = role.roles;\n\troles.sort((a, b) => {\n\t\tif (a.role < b.role) return -1;\n\t\tif (a.role > b.role) return 1;\n\t\treturn 0;\n\t});\n\trole.roles = roles;\n\tprintjson(role);\n\t})();\\n\' | mongo mongodb://userAdmin:userAdmin123456@some-name-mongos.custom-users-roles-sharded-27922.svc.cluster.local:27017/admin ' ++ mktemp + local LAST_OUT=/tmp/tmp.lx06zcl4DD ++ mktemp + local LAST_ERR=/tmp/tmp.BeXNmxGHum + local exit_status=0 + local timeout=4 ++ seq 0 2 + for i in $(seq 0 2) + set +e + kubectl exec psmdb-client-bb8b97679-xkjrq -- bash -c $'printf \'use testAdmin1\\n (function() {\n\tvar role = db.getRole("role-four", {showPrivileges: true, showAuthenticationRestrictions: true});\n\tvar roles = role.roles;\n\troles.sort((a, b) => {\n\t\tif (a.role < b.role) return -1;\n\t\tif (a.role > b.role) return 1;\n\t\treturn 0;\n\t});\n\trole.roles = roles;\n\tprintjson(role);\n\t})();\\n\' | mongo mongodb://userAdmin:userAdmin123456@some-name-mongos.custom-users-roles-sharded-27922.svc.cluster.local:27017/admin ' + exit_status=0 + set -e + '[' 0 '!=' 0 -a -n 1 ']' + break + cat /tmp/tmp.lx06zcl4DD + cat /tmp/tmp.BeXNmxGHum + rm /tmp/tmp.lx06zcl4DD /tmp/tmp.BeXNmxGHum + return 0 + diff /mnt/jenkins/workspace/cloud-psmdb-operator_PR-2287/e2e-tests/custom-users-roles-sharded/compare/role-four.json /tmp/tmp.oSwann5ucX/role-four ++ get_role_cmd '"role-five"' ++ local 'role="role-five"' ++ cmd=$'(function() {\n\tvar role = db.getRole("role-five", {showPrivileges: true, showAuthenticationRestrictions: true});\n\tvar roles = role.roles;\n\troles.sort((a, b) => {\n\t\tif (a.role < b.role) return -1;\n\t\tif (a.role > b.role) return 1;\n\t\treturn 0;\n\t});\n\trole.roles = roles;\n\tprintjson(role);\n\t})();' ++ echo $'(function() {\n\tvar role = db.getRole("role-five", {showPrivileges: true, showAuthenticationRestrictions: true});\n\tvar roles = role.roles;\n\troles.sort((a, b) => {\n\t\tif (a.role < b.role) return -1;\n\t\tif (a.role > b.role) return 1;\n\t\treturn 0;\n\t});\n\trole.roles = roles;\n\tprintjson(role);\n\t})();' + compare testAdmin2 $'(function() {\n\tvar role = db.getRole("role-five", {showPrivileges: true, showAuthenticationRestrictions: true});\n\tvar roles = role.roles;\n\troles.sort((a, b) => {\n\t\tif (a.role < b.role) return -1;\n\t\tif (a.role > b.role) return 1;\n\t\treturn 0;\n\t});\n\trole.roles = roles;\n\tprintjson(role);\n\t})();' userAdmin:userAdmin123456@some-name-mongos.custom-users-roles-sharded-27922 role-five + local database=testAdmin2 + local $'command=(function() {\n\tvar role = db.getRole("role-five", {showPrivileges: true, showAuthenticationRestrictions: true});\n\tvar roles = role.roles;\n\troles.sort((a, b) => {\n\t\tif (a.role < b.role) return -1;\n\t\tif (a.role > b.role) return 1;\n\t\treturn 0;\n\t});\n\trole.roles = roles;\n\tprintjson(role);\n\t})();' + local uri=userAdmin:userAdmin123456@some-name-mongos.custom-users-roles-sharded-27922 + local target=role-five + [[ docker.io/perconalab/percona-server-mongodb-operator:main-mongod8.0 =~ 5\.0 ]] + run_mongos $'use testAdmin2\\n (function() {\n\tvar role = db.getRole("role-five", {showPrivileges: true, showAuthenticationRestrictions: true});\n\tvar roles = role.roles;\n\troles.sort((a, b) => {\n\t\tif (a.role < b.role) return -1;\n\t\tif (a.role > b.role) return 1;\n\t\treturn 0;\n\t});\n\trole.roles = roles;\n\tprintjson(role);\n\t})();' userAdmin:userAdmin123456@some-name-mongos.custom-users-roles-sharded-27922 mongodb + local $'command=use testAdmin2\\n (function() {\n\tvar role = db.getRole("role-five", {showPrivileges: true, showAuthenticationRestrictions: true});\n\tvar roles = role.roles;\n\troles.sort((a, b) => {\n\t\tif (a.role < b.role) return -1;\n\t\tif (a.role > b.role) return 1;\n\t\treturn 0;\n\t});\n\trole.roles = roles;\n\tprintjson(role);\n\t})();' + /usr/sbin/sed -re 's/ObjectId\("[0-9a-f]+"\)//; s/-[0-9]+.svc/-xxxesvc/' + grep -E -v 'I NETWORK|W NETWORK|F NETWORK|Error saving history file|Percona Server for MongoDB|connecting to:|Unable to reach primary for set|Implicit session:|versions do not match|Error saving history file:' + local uri=userAdmin:userAdmin123456@some-name-mongos.custom-users-roles-sharded-27922 + local driver=mongodb + local suffix=.svc.cluster.local + local mongo_flag= + local port=27017 + local mongo_bin=mongo + sed '/"userId"/d' ++ echo .svc.cluster.local ++ awk -F: '{print $2}' + suffix_port= + [[ -z '' ]] + suffix=.svc.cluster.local:27017 ++ kubectl_bin get pods --selector=name=psmdb-client -o 'jsonpath={.items[].metadata.name}' +++ mktemp ++ local LAST_OUT=/tmp/tmp.MdXZHLWIum +++ mktemp ++ local LAST_ERR=/tmp/tmp.30boNFCBQ2 ++ local exit_status=0 ++ local timeout=4 +++ seq 0 2 ++ for i in $(seq 0 2) ++ set +e ++ kubectl get pods --selector=name=psmdb-client -o 'jsonpath={.items[].metadata.name}' ++ exit_status=0 ++ set -e ++ '[' 0 '!=' 0 -a -n 1 ']' ++ break ++ cat /tmp/tmp.MdXZHLWIum ++ cat /tmp/tmp.30boNFCBQ2 ++ rm /tmp/tmp.MdXZHLWIum /tmp/tmp.30boNFCBQ2 ++ return 0 + local client_container=psmdb-client-bb8b97679-xkjrq + kubectl_bin exec psmdb-client-bb8b97679-xkjrq -- bash -c $'printf \'use testAdmin2\\n (function() {\n\tvar role = db.getRole("role-five", {showPrivileges: true, showAuthenticationRestrictions: true});\n\tvar roles = role.roles;\n\troles.sort((a, b) => {\n\t\tif (a.role < b.role) return -1;\n\t\tif (a.role > b.role) return 1;\n\t\treturn 0;\n\t});\n\trole.roles = roles;\n\tprintjson(role);\n\t})();\\n\' | mongo mongodb://userAdmin:userAdmin123456@some-name-mongos.custom-users-roles-sharded-27922.svc.cluster.local:27017/admin ' ++ mktemp + local LAST_OUT=/tmp/tmp.kMTwnxKRBc ++ mktemp + local LAST_ERR=/tmp/tmp.kmEMfbCr4v + local exit_status=0 + local timeout=4 ++ seq 0 2 + for i in $(seq 0 2) + set +e + kubectl exec psmdb-client-bb8b97679-xkjrq -- bash -c $'printf \'use testAdmin2\\n (function() {\n\tvar role = db.getRole("role-five", {showPrivileges: true, showAuthenticationRestrictions: true});\n\tvar roles = role.roles;\n\troles.sort((a, b) => {\n\t\tif (a.role < b.role) return -1;\n\t\tif (a.role > b.role) return 1;\n\t\treturn 0;\n\t});\n\trole.roles = roles;\n\tprintjson(role);\n\t})();\\n\' | mongo mongodb://userAdmin:userAdmin123456@some-name-mongos.custom-users-roles-sharded-27922.svc.cluster.local:27017/admin ' + exit_status=0 + set -e + '[' 0 '!=' 0 -a -n 1 ']' + break + cat /tmp/tmp.kMTwnxKRBc + cat /tmp/tmp.kmEMfbCr4v + rm /tmp/tmp.kMTwnxKRBc /tmp/tmp.kmEMfbCr4v + return 0 + diff /mnt/jenkins/workspace/cloud-psmdb-operator_PR-2287/e2e-tests/custom-users-roles-sharded/compare/role-five.json /tmp/tmp.oSwann5ucX/role-five ++ get_user_cmd '"user-five"' ++ local 'user="user-five"' ++ cmd=$'(function() {\n\tvar user = db.getUser("user-five");\n\tvar roles = user.roles;\n\troles.sort((a, b) => {\n\t\tif (a.role < b.role) return -1;\n\t\tif (a.role > b.role) return 1;\n\t\treturn 0;\n\t});\n\tuser.roles = roles;\n\tprintjson(user);\n\t})();' ++ echo $'(function() {\n\tvar user = db.getUser("user-five");\n\tvar roles = user.roles;\n\troles.sort((a, b) => {\n\t\tif (a.role < b.role) return -1;\n\t\tif (a.role > b.role) return 1;\n\t\treturn 0;\n\t});\n\tuser.roles = roles;\n\tprintjson(user);\n\t})();' + compare testAdmin $'(function() {\n\tvar user = db.getUser("user-five");\n\tvar roles = user.roles;\n\troles.sort((a, b) => {\n\t\tif (a.role < b.role) return -1;\n\t\tif (a.role > b.role) return 1;\n\t\treturn 0;\n\t});\n\tuser.roles = roles;\n\tprintjson(user);\n\t})();' userAdmin:userAdmin123456@some-name-mongos.custom-users-roles-sharded-27922 user-five + local database=testAdmin + local $'command=(function() {\n\tvar user = db.getUser("user-five");\n\tvar roles = user.roles;\n\troles.sort((a, b) => {\n\t\tif (a.role < b.role) return -1;\n\t\tif (a.role > b.role) return 1;\n\t\treturn 0;\n\t});\n\tuser.roles = roles;\n\tprintjson(user);\n\t})();' + local uri=userAdmin:userAdmin123456@some-name-mongos.custom-users-roles-sharded-27922 + local target=user-five + [[ docker.io/perconalab/percona-server-mongodb-operator:main-mongod8.0 =~ 5\.0 ]] + run_mongos $'use testAdmin\\n (function() {\n\tvar user = db.getUser("user-five");\n\tvar roles = user.roles;\n\troles.sort((a, b) => {\n\t\tif (a.role < b.role) return -1;\n\t\tif (a.role > b.role) return 1;\n\t\treturn 0;\n\t});\n\tuser.roles = roles;\n\tprintjson(user);\n\t})();' userAdmin:userAdmin123456@some-name-mongos.custom-users-roles-sharded-27922 mongodb + local $'command=use testAdmin\\n (function() {\n\tvar user = db.getUser("user-five");\n\tvar roles = user.roles;\n\troles.sort((a, b) => {\n\t\tif (a.role < b.role) return -1;\n\t\tif (a.role > b.role) return 1;\n\t\treturn 0;\n\t});\n\tuser.roles = roles;\n\tprintjson(user);\n\t})();' + local uri=userAdmin:userAdmin123456@some-name-mongos.custom-users-roles-sharded-27922 + local driver=mongodb + local suffix=.svc.cluster.local + grep -E -v 'I NETWORK|W NETWORK|F NETWORK|Error saving history file|Percona Server for MongoDB|connecting to:|Unable to reach primary for set|Implicit session:|versions do not match|Error saving history file:' + local mongo_flag= + local port=27017 + local mongo_bin=mongo + /usr/sbin/sed -re 's/ObjectId\("[0-9a-f]+"\)//; s/-[0-9]+.svc/-xxxesvc/' + sed '/"userId"/d' ++ echo .svc.cluster.local ++ awk -F: '{print $2}' + suffix_port= + [[ -z '' ]] + suffix=.svc.cluster.local:27017 ++ kubectl_bin get pods --selector=name=psmdb-client -o 'jsonpath={.items[].metadata.name}' +++ mktemp ++ local LAST_OUT=/tmp/tmp.qc0lI0NfCP +++ mktemp ++ local LAST_ERR=/tmp/tmp.Lj2qSLLCYY ++ local exit_status=0 ++ local timeout=4 +++ seq 0 2 ++ for i in $(seq 0 2) ++ set +e ++ kubectl get pods --selector=name=psmdb-client -o 'jsonpath={.items[].metadata.name}' ++ exit_status=0 ++ set -e ++ '[' 0 '!=' 0 -a -n 1 ']' ++ break ++ cat /tmp/tmp.qc0lI0NfCP ++ cat /tmp/tmp.Lj2qSLLCYY ++ rm /tmp/tmp.qc0lI0NfCP /tmp/tmp.Lj2qSLLCYY ++ return 0 + local client_container=psmdb-client-bb8b97679-xkjrq + kubectl_bin exec psmdb-client-bb8b97679-xkjrq -- bash -c $'printf \'use testAdmin\\n (function() {\n\tvar user = db.getUser("user-five");\n\tvar roles = user.roles;\n\troles.sort((a, b) => {\n\t\tif (a.role < b.role) return -1;\n\t\tif (a.role > b.role) return 1;\n\t\treturn 0;\n\t});\n\tuser.roles = roles;\n\tprintjson(user);\n\t})();\\n\' | mongo mongodb://userAdmin:userAdmin123456@some-name-mongos.custom-users-roles-sharded-27922.svc.cluster.local:27017/admin ' ++ mktemp + local LAST_OUT=/tmp/tmp.da922TgjmX ++ mktemp + local LAST_ERR=/tmp/tmp.ooZ50OWxFz + local exit_status=0 + local timeout=4 ++ seq 0 2 + for i in $(seq 0 2) + set +e + kubectl exec psmdb-client-bb8b97679-xkjrq -- bash -c $'printf \'use testAdmin\\n (function() {\n\tvar user = db.getUser("user-five");\n\tvar roles = user.roles;\n\troles.sort((a, b) => {\n\t\tif (a.role < b.role) return -1;\n\t\tif (a.role > b.role) return 1;\n\t\treturn 0;\n\t});\n\tuser.roles = roles;\n\tprintjson(user);\n\t})();\\n\' | mongo mongodb://userAdmin:userAdmin123456@some-name-mongos.custom-users-roles-sharded-27922.svc.cluster.local:27017/admin ' + exit_status=0 + set -e + '[' 0 '!=' 0 -a -n 1 ']' + break + cat /tmp/tmp.da922TgjmX + cat /tmp/tmp.ooZ50OWxFz + rm /tmp/tmp.da922TgjmX /tmp/tmp.ooZ50OWxFz + return 0 + diff /mnt/jenkins/workspace/cloud-psmdb-operator_PR-2287/e2e-tests/custom-users-roles-sharded/compare/user-five.json /tmp/tmp.oSwann5ucX/user-five ++ get_user_cmd '"user-six"' ++ local 'user="user-six"' ++ cmd=$'(function() {\n\tvar user = db.getUser("user-six");\n\tvar roles = user.roles;\n\troles.sort((a, b) => {\n\t\tif (a.role < b.role) return -1;\n\t\tif (a.role > b.role) return 1;\n\t\treturn 0;\n\t});\n\tuser.roles = roles;\n\tprintjson(user);\n\t})();' ++ echo $'(function() {\n\tvar user = db.getUser("user-six");\n\tvar roles = user.roles;\n\troles.sort((a, b) => {\n\t\tif (a.role < b.role) return -1;\n\t\tif (a.role > b.role) return 1;\n\t\treturn 0;\n\t});\n\tuser.roles = roles;\n\tprintjson(user);\n\t})();' + compare testAdmin $'(function() {\n\tvar user = db.getUser("user-six");\n\tvar roles = user.roles;\n\troles.sort((a, b) => {\n\t\tif (a.role < b.role) return -1;\n\t\tif (a.role > b.role) return 1;\n\t\treturn 0;\n\t});\n\tuser.roles = roles;\n\tprintjson(user);\n\t})();' userAdmin:userAdmin123456@some-name-mongos.custom-users-roles-sharded-27922 user-six + local database=testAdmin + local $'command=(function() {\n\tvar user = db.getUser("user-six");\n\tvar roles = user.roles;\n\troles.sort((a, b) => {\n\t\tif (a.role < b.role) return -1;\n\t\tif (a.role > b.role) return 1;\n\t\treturn 0;\n\t});\n\tuser.roles = roles;\n\tprintjson(user);\n\t})();' + local uri=userAdmin:userAdmin123456@some-name-mongos.custom-users-roles-sharded-27922 + local target=user-six + [[ docker.io/perconalab/percona-server-mongodb-operator:main-mongod8.0 =~ 5\.0 ]] + run_mongos $'use testAdmin\\n (function() {\n\tvar user = db.getUser("user-six");\n\tvar roles = user.roles;\n\troles.sort((a, b) => {\n\t\tif (a.role < b.role) return -1;\n\t\tif (a.role > b.role) return 1;\n\t\treturn 0;\n\t});\n\tuser.roles = roles;\n\tprintjson(user);\n\t})();' userAdmin:userAdmin123456@some-name-mongos.custom-users-roles-sharded-27922 mongodb + local $'command=use testAdmin\\n (function() {\n\tvar user = db.getUser("user-six");\n\tvar roles = user.roles;\n\troles.sort((a, b) => {\n\t\tif (a.role < b.role) return -1;\n\t\tif (a.role > b.role) return 1;\n\t\treturn 0;\n\t});\n\tuser.roles = roles;\n\tprintjson(user);\n\t})();' + grep -E -v 'I NETWORK|W NETWORK|F NETWORK|Error saving history file|Percona Server for MongoDB|connecting to:|Unable to reach primary for set|Implicit session:|versions do not match|Error saving history file:' + local uri=userAdmin:userAdmin123456@some-name-mongos.custom-users-roles-sharded-27922 + local driver=mongodb + local suffix=.svc.cluster.local + local mongo_flag= + local port=27017 + local mongo_bin=mongo + /usr/sbin/sed -re 's/ObjectId\("[0-9a-f]+"\)//; s/-[0-9]+.svc/-xxxesvc/' + sed '/"userId"/d' ++ echo .svc.cluster.local ++ awk -F: '{print $2}' + suffix_port= + [[ -z '' ]] + suffix=.svc.cluster.local:27017 ++ kubectl_bin get pods --selector=name=psmdb-client -o 'jsonpath={.items[].metadata.name}' +++ mktemp ++ local LAST_OUT=/tmp/tmp.zfRnjjm0ha +++ mktemp ++ local LAST_ERR=/tmp/tmp.3kRnLJNCeu ++ local exit_status=0 ++ local timeout=4 +++ seq 0 2 ++ for i in $(seq 0 2) ++ set +e ++ kubectl get pods --selector=name=psmdb-client -o 'jsonpath={.items[].metadata.name}' ++ exit_status=0 ++ set -e ++ '[' 0 '!=' 0 -a -n 1 ']' ++ break ++ cat /tmp/tmp.zfRnjjm0ha ++ cat /tmp/tmp.3kRnLJNCeu ++ rm /tmp/tmp.zfRnjjm0ha /tmp/tmp.3kRnLJNCeu ++ return 0 + local client_container=psmdb-client-bb8b97679-xkjrq + kubectl_bin exec psmdb-client-bb8b97679-xkjrq -- bash -c $'printf \'use testAdmin\\n (function() {\n\tvar user = db.getUser("user-six");\n\tvar roles = user.roles;\n\troles.sort((a, b) => {\n\t\tif (a.role < b.role) return -1;\n\t\tif (a.role > b.role) return 1;\n\t\treturn 0;\n\t});\n\tuser.roles = roles;\n\tprintjson(user);\n\t})();\\n\' | mongo mongodb://userAdmin:userAdmin123456@some-name-mongos.custom-users-roles-sharded-27922.svc.cluster.local:27017/admin ' ++ mktemp + local LAST_OUT=/tmp/tmp.4spuM8DFUN ++ mktemp + local LAST_ERR=/tmp/tmp.Ia54Y37DJO + local exit_status=0 + local timeout=4 ++ seq 0 2 + for i in $(seq 0 2) + set +e + kubectl exec psmdb-client-bb8b97679-xkjrq -- bash -c $'printf \'use testAdmin\\n (function() {\n\tvar user = db.getUser("user-six");\n\tvar roles = user.roles;\n\troles.sort((a, b) => {\n\t\tif (a.role < b.role) return -1;\n\t\tif (a.role > b.role) return 1;\n\t\treturn 0;\n\t});\n\tuser.roles = roles;\n\tprintjson(user);\n\t})();\\n\' | mongo mongodb://userAdmin:userAdmin123456@some-name-mongos.custom-users-roles-sharded-27922.svc.cluster.local:27017/admin ' + exit_status=0 + set -e + '[' 0 '!=' 0 -a -n 1 ']' + break + cat /tmp/tmp.4spuM8DFUN + cat /tmp/tmp.Ia54Y37DJO + rm /tmp/tmp.4spuM8DFUN /tmp/tmp.Ia54Y37DJO + return 0 + diff /mnt/jenkins/workspace/cloud-psmdb-operator_PR-2287/e2e-tests/custom-users-roles-sharded/compare/user-six.json /tmp/tmp.oSwann5ucX/user-six + destroy custom-users-roles-sharded-27922 + local namespace=custom-users-roles-sharded-27922 + local ignore_logs=true + [[ 0 == 1 ]] + desc 'destroy cluster/operator and all other resources' + set +o xtrace ----------------------------------------------------------------------------------- destroy cluster/operator and all other resources ----------------------------------------------------------------------------------- + '[' true == false ']' + delete_backups + desc 'Delete psmdb-backup' + set +o xtrace ----------------------------------------------------------------------------------- Delete psmdb-backup ----------------------------------------------------------------------------------- ++ kubectl_bin get psmdb-backup --no-headers ++ wc -l +++ mktemp ++ local LAST_OUT=/tmp/tmp.c5ZULho5kn +++ mktemp ++ local LAST_ERR=/tmp/tmp.xPsU9QWVXI ++ local exit_status=0 ++ local timeout=4 +++ seq 0 2 ++ for i in $(seq 0 2) ++ set +e ++ kubectl get psmdb-backup --no-headers ++ exit_status=0 ++ set -e ++ '[' 0 '!=' 0 -a -n 1 ']' ++ break ++ cat /tmp/tmp.c5ZULho5kn ++ cat /tmp/tmp.xPsU9QWVXI No resources found in custom-users-roles-sharded-27922 namespace. ++ rm /tmp/tmp.c5ZULho5kn /tmp/tmp.xPsU9QWVXI ++ return 0 + '[' 0 '!=' 0 ']' + delete_crd + desc 'get and delete old CRDs and RBAC' + set +o xtrace ----------------------------------------------------------------------------------- get and delete old CRDs and RBAC ----------------------------------------------------------------------------------- + kubectl_bin delete -f /mnt/jenkins/workspace/cloud-psmdb-operator_PR-2287/deploy/crd.yaml --ignore-not-found --wait=false ++ mktemp + local LAST_OUT=/tmp/tmp.ap5jHStNq1 ++ mktemp + local LAST_ERR=/tmp/tmp.Zrnb2zdATJ + local exit_status=0 + local timeout=4 ++ seq 0 2 + for i in $(seq 0 2) + set +e + kubectl delete -f /mnt/jenkins/workspace/cloud-psmdb-operator_PR-2287/deploy/crd.yaml --ignore-not-found --wait=false + exit_status=0 + set -e + '[' 0 '!=' 0 -a -n 1 ']' + break + cat /tmp/tmp.ap5jHStNq1 customresourcedefinition.apiextensions.k8s.io "perconaservermongodbbackups.psmdb.percona.com" deleted customresourcedefinition.apiextensions.k8s.io "perconaservermongodbrestores.psmdb.percona.com" deleted customresourcedefinition.apiextensions.k8s.io "perconaservermongodbs.psmdb.percona.com" deleted + cat /tmp/tmp.Zrnb2zdATJ + rm /tmp/tmp.ap5jHStNq1 /tmp/tmp.Zrnb2zdATJ + return 0 ++ yq eval .metadata.name /mnt/jenkins/workspace/cloud-psmdb-operator_PR-2287/deploy/crd.yaml ++ grep -v '\-\-\-' grep: warning: stray \ before - grep: warning: stray \ before - + for crd_name in $(yq eval '.metadata.name' "${src_dir}/deploy/crd.yaml" | grep -v '\-\-\-') + kubectl get perconaservermongodbbackups.psmdb.percona.com --all-namespaces -o wide + grep -v NAMESPACE + xargs -L 1 sh -xc 'kubectl patch perconaservermongodbbackups.psmdb.percona.com -n $0 $1 --type=merge -p "{\"metadata\":{\"finalizers\":[]}}"' error: the server doesn't have a resource type "perconaservermongodbbackups" + kubectl patch perconaservermongodbbackups.psmdb.percona.com -n sh --type=merge -p '{"metadata":{"finalizers":[]}}' error: the server doesn't have a resource type "perconaservermongodbbackups" + : + kubectl_bin wait --for=delete crd perconaservermongodbbackups.psmdb.percona.com ++ mktemp + local LAST_OUT=/tmp/tmp.PVLUVg7mG4 ++ mktemp + local LAST_ERR=/tmp/tmp.v0e8TGXyWM + local exit_status=0 + local timeout=4 ++ seq 0 2 + for i in $(seq 0 2) + set +e + kubectl wait --for=delete crd perconaservermongodbbackups.psmdb.percona.com + exit_status=0 + set -e + '[' 0 '!=' 0 -a -n 1 ']' + break + cat /tmp/tmp.PVLUVg7mG4 + cat /tmp/tmp.v0e8TGXyWM + rm /tmp/tmp.PVLUVg7mG4 /tmp/tmp.v0e8TGXyWM + return 0 + for crd_name in $(yq eval '.metadata.name' "${src_dir}/deploy/crd.yaml" | grep -v '\-\-\-') + xargs -L 1 sh -xc 'kubectl patch perconaservermongodbrestores.psmdb.percona.com -n $0 $1 --type=merge -p "{\"metadata\":{\"finalizers\":[]}}"' + grep -v NAMESPACE + kubectl get perconaservermongodbrestores.psmdb.percona.com --all-namespaces -o wide error: the server doesn't have a resource type "perconaservermongodbrestores" + kubectl patch perconaservermongodbrestores.psmdb.percona.com -n sh --type=merge -p '{"metadata":{"finalizers":[]}}' error: the server doesn't have a resource type "perconaservermongodbrestores" + : + kubectl_bin wait --for=delete crd perconaservermongodbrestores.psmdb.percona.com ++ mktemp + local LAST_OUT=/tmp/tmp.ywvMU4P9h7 ++ mktemp + local LAST_ERR=/tmp/tmp.OIWg22y92x + local exit_status=0 + local timeout=4 ++ seq 0 2 + for i in $(seq 0 2) + set +e + kubectl wait --for=delete crd perconaservermongodbrestores.psmdb.percona.com + exit_status=0 + set -e + '[' 0 '!=' 0 -a -n 1 ']' + break + cat /tmp/tmp.ywvMU4P9h7 + cat /tmp/tmp.OIWg22y92x + rm /tmp/tmp.ywvMU4P9h7 /tmp/tmp.OIWg22y92x + return 0 + for crd_name in $(yq eval '.metadata.name' "${src_dir}/deploy/crd.yaml" | grep -v '\-\-\-') + kubectl get perconaservermongodbs.psmdb.percona.com --all-namespaces -o wide + grep -v NAMESPACE + xargs -L 1 sh -xc 'kubectl patch perconaservermongodbs.psmdb.percona.com -n $0 $1 --type=merge -p "{\"metadata\":{\"finalizers\":[]}}"' error: the server doesn't have a resource type "perconaservermongodbs" + kubectl patch perconaservermongodbs.psmdb.percona.com -n sh --type=merge -p '{"metadata":{"finalizers":[]}}' error: the server doesn't have a resource type "perconaservermongodbs" + : + kubectl_bin wait --for=delete crd perconaservermongodbs.psmdb.percona.com ++ mktemp + local LAST_OUT=/tmp/tmp.6V02ewDSFP ++ mktemp + local LAST_ERR=/tmp/tmp.28fMagrDWc + local exit_status=0 + local timeout=4 ++ seq 0 2 + for i in $(seq 0 2) + set +e + kubectl wait --for=delete crd perconaservermongodbs.psmdb.percona.com + exit_status=0 + set -e + '[' 0 '!=' 0 -a -n 1 ']' + break + cat /tmp/tmp.6V02ewDSFP + cat /tmp/tmp.28fMagrDWc + rm /tmp/tmp.6V02ewDSFP /tmp/tmp.28fMagrDWc + return 0 + local rbac_yaml=rbac.yaml + '[' -n psmdb-operator ']' + rbac_yaml=cw-rbac.yaml + kubectl_bin delete -f /mnt/jenkins/workspace/cloud-psmdb-operator_PR-2287/deploy/cw-rbac.yaml --ignore-not-found ++ mktemp + local LAST_OUT=/tmp/tmp.RdojwOrquQ ++ mktemp + local LAST_ERR=/tmp/tmp.MK7C5X54Uz + local exit_status=0 + local timeout=4 ++ seq 0 2 + for i in $(seq 0 2) + set +e + kubectl delete -f /mnt/jenkins/workspace/cloud-psmdb-operator_PR-2287/deploy/cw-rbac.yaml --ignore-not-found + exit_status=0 + set -e + '[' 0 '!=' 0 -a -n 1 ']' + break + cat /tmp/tmp.RdojwOrquQ clusterrole.rbac.authorization.k8s.io "percona-server-mongodb-operator" deleted clusterrolebinding.rbac.authorization.k8s.io "service-account-percona-server-mongodb-operator" deleted + cat /tmp/tmp.MK7C5X54Uz + rm /tmp/tmp.RdojwOrquQ /tmp/tmp.MK7C5X54Uz + return 0 + destroy_cert_manager + kubectl_bin delete -f https://github.com/cert-manager/cert-manager/releases/download/v1.19.3/cert-manager.yaml ++ mktemp + local LAST_OUT=/tmp/tmp.NHfB8XxXrv ++ mktemp + local LAST_ERR=/tmp/tmp.3CpZsar8Dt + local exit_status=0 + local timeout=4 ++ seq 0 2 + for i in $(seq 0 2) + set +e + kubectl delete -f https://github.com/cert-manager/cert-manager/releases/download/v1.19.3/cert-manager.yaml + exit_status=1 + set -e + '[' 1 '!=' 0 -a -n 1 ']' + cat /tmp/tmp.NHfB8XxXrv + cat /tmp/tmp.3CpZsar8Dt Error from server (NotFound): error when deleting "https://github.com/cert-manager/cert-manager/releases/download/v1.19.3/cert-manager.yaml": namespaces "cert-manager" not found Error from server (NotFound): error when deleting "https://github.com/cert-manager/cert-manager/releases/download/v1.19.3/cert-manager.yaml": customresourcedefinitions.apiextensions.k8s.io "challenges.acme.cert-manager.io" not found Error from server (NotFound): error when deleting "https://github.com/cert-manager/cert-manager/releases/download/v1.19.3/cert-manager.yaml": customresourcedefinitions.apiextensions.k8s.io "orders.acme.cert-manager.io" not found Error from server (NotFound): error when deleting "https://github.com/cert-manager/cert-manager/releases/download/v1.19.3/cert-manager.yaml": customresourcedefinitions.apiextensions.k8s.io "certificaterequests.cert-manager.io" not found Error from server (NotFound): error when deleting "https://github.com/cert-manager/cert-manager/releases/download/v1.19.3/cert-manager.yaml": customresourcedefinitions.apiextensions.k8s.io "certificates.cert-manager.io" not found Error from server (NotFound): error when deleting "https://github.com/cert-manager/cert-manager/releases/download/v1.19.3/cert-manager.yaml": customresourcedefinitions.apiextensions.k8s.io "clusterissuers.cert-manager.io" not found Error from server (NotFound): error when deleting "https://github.com/cert-manager/cert-manager/releases/download/v1.19.3/cert-manager.yaml": customresourcedefinitions.apiextensions.k8s.io "issuers.cert-manager.io" not found Error from server (NotFound): error when deleting "https://github.com/cert-manager/cert-manager/releases/download/v1.19.3/cert-manager.yaml": serviceaccounts "cert-manager-cainjector" not found Error from server (NotFound): error when deleting "https://github.com/cert-manager/cert-manager/releases/download/v1.19.3/cert-manager.yaml": serviceaccounts "cert-manager" not found Error from server (NotFound): error when deleting "https://github.com/cert-manager/cert-manager/releases/download/v1.19.3/cert-manager.yaml": serviceaccounts "cert-manager-webhook" not found Error from server (NotFound): error when deleting "https://github.com/cert-manager/cert-manager/releases/download/v1.19.3/cert-manager.yaml": clusterroles.rbac.authorization.k8s.io "cert-manager-cainjector" not found Error from server (NotFound): error when deleting "https://github.com/cert-manager/cert-manager/releases/download/v1.19.3/cert-manager.yaml": clusterroles.rbac.authorization.k8s.io "cert-manager-controller-issuers" not found Error from server (NotFound): error when deleting "https://github.com/cert-manager/cert-manager/releases/download/v1.19.3/cert-manager.yaml": clusterroles.rbac.authorization.k8s.io "cert-manager-controller-clusterissuers" not found Error from server (NotFound): error when deleting "https://github.com/cert-manager/cert-manager/releases/download/v1.19.3/cert-manager.yaml": clusterroles.rbac.authorization.k8s.io "cert-manager-controller-certificates" not found Error from server (NotFound): error when deleting "https://github.com/cert-manager/cert-manager/releases/download/v1.19.3/cert-manager.yaml": clusterroles.rbac.authorization.k8s.io "cert-manager-controller-orders" not found Error from server (NotFound): error when deleting "https://github.com/cert-manager/cert-manager/releases/download/v1.19.3/cert-manager.yaml": clusterroles.rbac.authorization.k8s.io "cert-manager-controller-challenges" not found Error from server (NotFound): error when deleting "https://github.com/cert-manager/cert-manager/releases/download/v1.19.3/cert-manager.yaml": clusterroles.rbac.authorization.k8s.io "cert-manager-controller-ingress-shim" not found Error from server (NotFound): error when deleting "https://github.com/cert-manager/cert-manager/releases/download/v1.19.3/cert-manager.yaml": clusterroles.rbac.authorization.k8s.io "cert-manager-cluster-view" not found Error from server (NotFound): error when deleting "https://github.com/cert-manager/cert-manager/releases/download/v1.19.3/cert-manager.yaml": clusterroles.rbac.authorization.k8s.io "cert-manager-view" not found Error from server (NotFound): error when deleting "https://github.com/cert-manager/cert-manager/releases/download/v1.19.3/cert-manager.yaml": clusterroles.rbac.authorization.k8s.io "cert-manager-edit" not found Error from server (NotFound): error when deleting "https://github.com/cert-manager/cert-manager/releases/download/v1.19.3/cert-manager.yaml": clusterroles.rbac.authorization.k8s.io "cert-manager-controller-approve:cert-manager-io" not found Error from server (NotFound): error when deleting "https://github.com/cert-manager/cert-manager/releases/download/v1.19.3/cert-manager.yaml": clusterroles.rbac.authorization.k8s.io "cert-manager-controller-certificatesigningrequests" not found Error from server (NotFound): error when deleting "https://github.com/cert-manager/cert-manager/releases/download/v1.19.3/cert-manager.yaml": clusterroles.rbac.authorization.k8s.io "cert-manager-webhook:subjectaccessreviews" not found Error from server (NotFound): error when deleting "https://github.com/cert-manager/cert-manager/releases/download/v1.19.3/cert-manager.yaml": clusterrolebindings.rbac.authorization.k8s.io "cert-manager-cainjector" not found Error from server (NotFound): error when deleting "https://github.com/cert-manager/cert-manager/releases/download/v1.19.3/cert-manager.yaml": clusterrolebindings.rbac.authorization.k8s.io "cert-manager-controller-issuers" not found Error from server (NotFound): error when deleting "https://github.com/cert-manager/cert-manager/releases/download/v1.19.3/cert-manager.yaml": clusterrolebindings.rbac.authorization.k8s.io "cert-manager-controller-clusterissuers" not found Error from server (NotFound): error when deleting "https://github.com/cert-manager/cert-manager/releases/download/v1.19.3/cert-manager.yaml": clusterrolebindings.rbac.authorization.k8s.io "cert-manager-controller-certificates" not found Error from server (NotFound): error when deleting "https://github.com/cert-manager/cert-manager/releases/download/v1.19.3/cert-manager.yaml": clusterrolebindings.rbac.authorization.k8s.io "cert-manager-controller-orders" not found Error from server (NotFound): error when deleting "https://github.com/cert-manager/cert-manager/releases/download/v1.19.3/cert-manager.yaml": clusterrolebindings.rbac.authorization.k8s.io "cert-manager-controller-challenges" not found Error from server (NotFound): error when deleting "https://github.com/cert-manager/cert-manager/releases/download/v1.19.3/cert-manager.yaml": clusterrolebindings.rbac.authorization.k8s.io "cert-manager-controller-ingress-shim" not found Error from server (NotFound): error when deleting "https://github.com/cert-manager/cert-manager/releases/download/v1.19.3/cert-manager.yaml": clusterrolebindings.rbac.authorization.k8s.io "cert-manager-controller-approve:cert-manager-io" not found Error from server (NotFound): error when deleting "https://github.com/cert-manager/cert-manager/releases/download/v1.19.3/cert-manager.yaml": clusterrolebindings.rbac.authorization.k8s.io "cert-manager-controller-certificatesigningrequests" not found Error from server (NotFound): error when deleting "https://github.com/cert-manager/cert-manager/releases/download/v1.19.3/cert-manager.yaml": clusterrolebindings.rbac.authorization.k8s.io "cert-manager-webhook:subjectaccessreviews" not found Error from server (NotFound): error when deleting "https://github.com/cert-manager/cert-manager/releases/download/v1.19.3/cert-manager.yaml": roles.rbac.authorization.k8s.io "cert-manager-cainjector:leaderelection" not found Error from server (NotFound): error when deleting "https://github.com/cert-manager/cert-manager/releases/download/v1.19.3/cert-manager.yaml": roles.rbac.authorization.k8s.io "cert-manager:leaderelection" not found Error from server (NotFound): error when deleting "https://github.com/cert-manager/cert-manager/releases/download/v1.19.3/cert-manager.yaml": roles.rbac.authorization.k8s.io "cert-manager-tokenrequest" not found Error from server (NotFound): error when deleting "https://github.com/cert-manager/cert-manager/releases/download/v1.19.3/cert-manager.yaml": roles.rbac.authorization.k8s.io "cert-manager-webhook:dynamic-serving" not found Error from server (NotFound): error when deleting "https://github.com/cert-manager/cert-manager/releases/download/v1.19.3/cert-manager.yaml": rolebindings.rbac.authorization.k8s.io "cert-manager-cainjector:leaderelection" not found Error from server (NotFound): error when deleting "https://github.com/cert-manager/cert-manager/releases/download/v1.19.3/cert-manager.yaml": rolebindings.rbac.authorization.k8s.io "cert-manager:leaderelection" not found Error from server (NotFound): error when deleting "https://github.com/cert-manager/cert-manager/releases/download/v1.19.3/cert-manager.yaml": rolebindings.rbac.authorization.k8s.io "cert-manager-tokenrequest" not found Error from server (NotFound): error when deleting "https://github.com/cert-manager/cert-manager/releases/download/v1.19.3/cert-manager.yaml": rolebindings.rbac.authorization.k8s.io "cert-manager-webhook:dynamic-serving" not found Error from server (NotFound): error when deleting "https://github.com/cert-manager/cert-manager/releases/download/v1.19.3/cert-manager.yaml": services "cert-manager-cainjector" not found Error from server (NotFound): error when deleting "https://github.com/cert-manager/cert-manager/releases/download/v1.19.3/cert-manager.yaml": services "cert-manager" not found Error from server (NotFound): error when deleting "https://github.com/cert-manager/cert-manager/releases/download/v1.19.3/cert-manager.yaml": services "cert-manager-webhook" not found Error from server (NotFound): error when deleting "https://github.com/cert-manager/cert-manager/releases/download/v1.19.3/cert-manager.yaml": deployments.apps "cert-manager-cainjector" not found Error from server (NotFound): error when deleting "https://github.com/cert-manager/cert-manager/releases/download/v1.19.3/cert-manager.yaml": deployments.apps "cert-manager" not found Error from server (NotFound): error when deleting "https://github.com/cert-manager/cert-manager/releases/download/v1.19.3/cert-manager.yaml": deployments.apps "cert-manager-webhook" not found Error from server (NotFound): error when deleting "https://github.com/cert-manager/cert-manager/releases/download/v1.19.3/cert-manager.yaml": mutatingwebhookconfigurations.admissionregistration.k8s.io "cert-manager-webhook" not found Error from server (NotFound): error when deleting "https://github.com/cert-manager/cert-manager/releases/download/v1.19.3/cert-manager.yaml": validatingwebhookconfigurations.admissionregistration.k8s.io "cert-manager-webhook" not found + sleep 0 + for i in $(seq 0 2) + set +e + kubectl delete -f https://github.com/cert-manager/cert-manager/releases/download/v1.19.3/cert-manager.yaml + exit_status=1 + set -e + '[' 1 '!=' 0 -a -n 1 ']' + cat /tmp/tmp.NHfB8XxXrv + cat /tmp/tmp.3CpZsar8Dt Error from server (NotFound): error when deleting "https://github.com/cert-manager/cert-manager/releases/download/v1.19.3/cert-manager.yaml": namespaces "cert-manager" not found Error from server (NotFound): error when deleting "https://github.com/cert-manager/cert-manager/releases/download/v1.19.3/cert-manager.yaml": customresourcedefinitions.apiextensions.k8s.io "challenges.acme.cert-manager.io" not found Error from server (NotFound): error when deleting "https://github.com/cert-manager/cert-manager/releases/download/v1.19.3/cert-manager.yaml": customresourcedefinitions.apiextensions.k8s.io "orders.acme.cert-manager.io" not found Error from server (NotFound): error when deleting "https://github.com/cert-manager/cert-manager/releases/download/v1.19.3/cert-manager.yaml": customresourcedefinitions.apiextensions.k8s.io "certificaterequests.cert-manager.io" not found Error from server (NotFound): error when deleting "https://github.com/cert-manager/cert-manager/releases/download/v1.19.3/cert-manager.yaml": customresourcedefinitions.apiextensions.k8s.io "certificates.cert-manager.io" not found Error from server (NotFound): error when deleting "https://github.com/cert-manager/cert-manager/releases/download/v1.19.3/cert-manager.yaml": customresourcedefinitions.apiextensions.k8s.io "clusterissuers.cert-manager.io" not found Error from server (NotFound): error when deleting "https://github.com/cert-manager/cert-manager/releases/download/v1.19.3/cert-manager.yaml": customresourcedefinitions.apiextensions.k8s.io "issuers.cert-manager.io" not found Error from server (NotFound): error when deleting "https://github.com/cert-manager/cert-manager/releases/download/v1.19.3/cert-manager.yaml": serviceaccounts "cert-manager-cainjector" not found Error from server (NotFound): error when deleting "https://github.com/cert-manager/cert-manager/releases/download/v1.19.3/cert-manager.yaml": serviceaccounts "cert-manager" not found Error from server (NotFound): error when deleting "https://github.com/cert-manager/cert-manager/releases/download/v1.19.3/cert-manager.yaml": serviceaccounts "cert-manager-webhook" not found Error from server (NotFound): error when deleting "https://github.com/cert-manager/cert-manager/releases/download/v1.19.3/cert-manager.yaml": clusterroles.rbac.authorization.k8s.io "cert-manager-cainjector" not found Error from server (NotFound): error when deleting "https://github.com/cert-manager/cert-manager/releases/download/v1.19.3/cert-manager.yaml": clusterroles.rbac.authorization.k8s.io "cert-manager-controller-issuers" not found Error from server (NotFound): error when deleting "https://github.com/cert-manager/cert-manager/releases/download/v1.19.3/cert-manager.yaml": clusterroles.rbac.authorization.k8s.io "cert-manager-controller-clusterissuers" not found Error from server (NotFound): error when deleting "https://github.com/cert-manager/cert-manager/releases/download/v1.19.3/cert-manager.yaml": clusterroles.rbac.authorization.k8s.io "cert-manager-controller-certificates" not found Error from server (NotFound): error when deleting "https://github.com/cert-manager/cert-manager/releases/download/v1.19.3/cert-manager.yaml": clusterroles.rbac.authorization.k8s.io "cert-manager-controller-orders" not found Error from server (NotFound): error when deleting "https://github.com/cert-manager/cert-manager/releases/download/v1.19.3/cert-manager.yaml": clusterroles.rbac.authorization.k8s.io "cert-manager-controller-challenges" not found Error from server (NotFound): error when deleting "https://github.com/cert-manager/cert-manager/releases/download/v1.19.3/cert-manager.yaml": clusterroles.rbac.authorization.k8s.io "cert-manager-controller-ingress-shim" not found Error from server (NotFound): error when deleting "https://github.com/cert-manager/cert-manager/releases/download/v1.19.3/cert-manager.yaml": clusterroles.rbac.authorization.k8s.io "cert-manager-cluster-view" not found Error from server (NotFound): error when deleting "https://github.com/cert-manager/cert-manager/releases/download/v1.19.3/cert-manager.yaml": clusterroles.rbac.authorization.k8s.io "cert-manager-view" not found Error from server (NotFound): error when deleting "https://github.com/cert-manager/cert-manager/releases/download/v1.19.3/cert-manager.yaml": clusterroles.rbac.authorization.k8s.io "cert-manager-edit" not found Error from server (NotFound): error when deleting "https://github.com/cert-manager/cert-manager/releases/download/v1.19.3/cert-manager.yaml": clusterroles.rbac.authorization.k8s.io "cert-manager-controller-approve:cert-manager-io" not found Error from server (NotFound): error when deleting "https://github.com/cert-manager/cert-manager/releases/download/v1.19.3/cert-manager.yaml": clusterroles.rbac.authorization.k8s.io "cert-manager-controller-certificatesigningrequests" not found Error from server (NotFound): error when deleting "https://github.com/cert-manager/cert-manager/releases/download/v1.19.3/cert-manager.yaml": clusterroles.rbac.authorization.k8s.io "cert-manager-webhook:subjectaccessreviews" not found Error from server (NotFound): error when deleting "https://github.com/cert-manager/cert-manager/releases/download/v1.19.3/cert-manager.yaml": clusterrolebindings.rbac.authorization.k8s.io "cert-manager-cainjector" not found Error from server (NotFound): error when deleting "https://github.com/cert-manager/cert-manager/releases/download/v1.19.3/cert-manager.yaml": clusterrolebindings.rbac.authorization.k8s.io "cert-manager-controller-issuers" not found Error from server (NotFound): error when deleting "https://github.com/cert-manager/cert-manager/releases/download/v1.19.3/cert-manager.yaml": clusterrolebindings.rbac.authorization.k8s.io "cert-manager-controller-clusterissuers" not found Error from server (NotFound): error when deleting "https://github.com/cert-manager/cert-manager/releases/download/v1.19.3/cert-manager.yaml": clusterrolebindings.rbac.authorization.k8s.io "cert-manager-controller-certificates" not found Error from server (NotFound): error when deleting "https://github.com/cert-manager/cert-manager/releases/download/v1.19.3/cert-manager.yaml": clusterrolebindings.rbac.authorization.k8s.io "cert-manager-controller-orders" not found Error from server (NotFound): error when deleting "https://github.com/cert-manager/cert-manager/releases/download/v1.19.3/cert-manager.yaml": clusterrolebindings.rbac.authorization.k8s.io "cert-manager-controller-challenges" not found Error from server (NotFound): error when deleting "https://github.com/cert-manager/cert-manager/releases/download/v1.19.3/cert-manager.yaml": clusterrolebindings.rbac.authorization.k8s.io "cert-manager-controller-ingress-shim" not found Error from server (NotFound): error when deleting "https://github.com/cert-manager/cert-manager/releases/download/v1.19.3/cert-manager.yaml": clusterrolebindings.rbac.authorization.k8s.io "cert-manager-controller-approve:cert-manager-io" not found Error from server (NotFound): error when deleting "https://github.com/cert-manager/cert-manager/releases/download/v1.19.3/cert-manager.yaml": clusterrolebindings.rbac.authorization.k8s.io "cert-manager-controller-certificatesigningrequests" not found Error from server (NotFound): error when deleting "https://github.com/cert-manager/cert-manager/releases/download/v1.19.3/cert-manager.yaml": clusterrolebindings.rbac.authorization.k8s.io "cert-manager-webhook:subjectaccessreviews" not found Error from server (NotFound): error when deleting "https://github.com/cert-manager/cert-manager/releases/download/v1.19.3/cert-manager.yaml": roles.rbac.authorization.k8s.io "cert-manager-cainjector:leaderelection" not found Error from server (NotFound): error when deleting "https://github.com/cert-manager/cert-manager/releases/download/v1.19.3/cert-manager.yaml": roles.rbac.authorization.k8s.io "cert-manager:leaderelection" not found Error from server (NotFound): error when deleting "https://github.com/cert-manager/cert-manager/releases/download/v1.19.3/cert-manager.yaml": roles.rbac.authorization.k8s.io "cert-manager-tokenrequest" not found Error from server (NotFound): error when deleting "https://github.com/cert-manager/cert-manager/releases/download/v1.19.3/cert-manager.yaml": roles.rbac.authorization.k8s.io "cert-manager-webhook:dynamic-serving" not found Error from server (NotFound): error when deleting "https://github.com/cert-manager/cert-manager/releases/download/v1.19.3/cert-manager.yaml": rolebindings.rbac.authorization.k8s.io "cert-manager-cainjector:leaderelection" not found Error from server (NotFound): error when deleting "https://github.com/cert-manager/cert-manager/releases/download/v1.19.3/cert-manager.yaml": rolebindings.rbac.authorization.k8s.io "cert-manager:leaderelection" not found Error from server (NotFound): error when deleting "https://github.com/cert-manager/cert-manager/releases/download/v1.19.3/cert-manager.yaml": rolebindings.rbac.authorization.k8s.io "cert-manager-tokenrequest" not found Error from server (NotFound): error when deleting "https://github.com/cert-manager/cert-manager/releases/download/v1.19.3/cert-manager.yaml": rolebindings.rbac.authorization.k8s.io "cert-manager-webhook:dynamic-serving" not found Error from server (NotFound): error when deleting "https://github.com/cert-manager/cert-manager/releases/download/v1.19.3/cert-manager.yaml": services "cert-manager-cainjector" not found Error from server (NotFound): error when deleting "https://github.com/cert-manager/cert-manager/releases/download/v1.19.3/cert-manager.yaml": services "cert-manager" not found Error from server (NotFound): error when deleting "https://github.com/cert-manager/cert-manager/releases/download/v1.19.3/cert-manager.yaml": services "cert-manager-webhook" not found Error from server (NotFound): error when deleting "https://github.com/cert-manager/cert-manager/releases/download/v1.19.3/cert-manager.yaml": deployments.apps "cert-manager-cainjector" not found Error from server (NotFound): error when deleting "https://github.com/cert-manager/cert-manager/releases/download/v1.19.3/cert-manager.yaml": deployments.apps "cert-manager" not found Error from server (NotFound): error when deleting "https://github.com/cert-manager/cert-manager/releases/download/v1.19.3/cert-manager.yaml": deployments.apps "cert-manager-webhook" not found Error from server (NotFound): error when deleting "https://github.com/cert-manager/cert-manager/releases/download/v1.19.3/cert-manager.yaml": mutatingwebhookconfigurations.admissionregistration.k8s.io "cert-manager-webhook" not found Error from server (NotFound): error when deleting "https://github.com/cert-manager/cert-manager/releases/download/v1.19.3/cert-manager.yaml": validatingwebhookconfigurations.admissionregistration.k8s.io "cert-manager-webhook" not found + sleep 4 + for i in $(seq 0 2) + set +e + kubectl delete -f https://github.com/cert-manager/cert-manager/releases/download/v1.19.3/cert-manager.yaml + exit_status=1 + set -e + '[' 1 '!=' 0 -a -n 1 ']' + cat /tmp/tmp.NHfB8XxXrv + cat /tmp/tmp.3CpZsar8Dt Error from server (NotFound): error when deleting "https://github.com/cert-manager/cert-manager/releases/download/v1.19.3/cert-manager.yaml": namespaces "cert-manager" not found Error from server (NotFound): error when deleting "https://github.com/cert-manager/cert-manager/releases/download/v1.19.3/cert-manager.yaml": customresourcedefinitions.apiextensions.k8s.io "challenges.acme.cert-manager.io" not found Error from server (NotFound): error when deleting "https://github.com/cert-manager/cert-manager/releases/download/v1.19.3/cert-manager.yaml": customresourcedefinitions.apiextensions.k8s.io "orders.acme.cert-manager.io" not found Error from server (NotFound): error when deleting "https://github.com/cert-manager/cert-manager/releases/download/v1.19.3/cert-manager.yaml": customresourcedefinitions.apiextensions.k8s.io "certificaterequests.cert-manager.io" not found Error from server (NotFound): error when deleting "https://github.com/cert-manager/cert-manager/releases/download/v1.19.3/cert-manager.yaml": customresourcedefinitions.apiextensions.k8s.io "certificates.cert-manager.io" not found Error from server (NotFound): error when deleting "https://github.com/cert-manager/cert-manager/releases/download/v1.19.3/cert-manager.yaml": customresourcedefinitions.apiextensions.k8s.io "clusterissuers.cert-manager.io" not found Error from server (NotFound): error when deleting "https://github.com/cert-manager/cert-manager/releases/download/v1.19.3/cert-manager.yaml": customresourcedefinitions.apiextensions.k8s.io "issuers.cert-manager.io" not found Error from server (NotFound): error when deleting "https://github.com/cert-manager/cert-manager/releases/download/v1.19.3/cert-manager.yaml": serviceaccounts "cert-manager-cainjector" not found Error from server (NotFound): error when deleting "https://github.com/cert-manager/cert-manager/releases/download/v1.19.3/cert-manager.yaml": serviceaccounts "cert-manager" not found Error from server (NotFound): error when deleting "https://github.com/cert-manager/cert-manager/releases/download/v1.19.3/cert-manager.yaml": serviceaccounts "cert-manager-webhook" not found Error from server (NotFound): error when deleting "https://github.com/cert-manager/cert-manager/releases/download/v1.19.3/cert-manager.yaml": clusterroles.rbac.authorization.k8s.io "cert-manager-cainjector" not found Error from server (NotFound): error when deleting "https://github.com/cert-manager/cert-manager/releases/download/v1.19.3/cert-manager.yaml": clusterroles.rbac.authorization.k8s.io "cert-manager-controller-issuers" not found Error from server (NotFound): error when deleting "https://github.com/cert-manager/cert-manager/releases/download/v1.19.3/cert-manager.yaml": clusterroles.rbac.authorization.k8s.io "cert-manager-controller-clusterissuers" not found Error from server (NotFound): error when deleting "https://github.com/cert-manager/cert-manager/releases/download/v1.19.3/cert-manager.yaml": clusterroles.rbac.authorization.k8s.io "cert-manager-controller-certificates" not found Error from server (NotFound): error when deleting "https://github.com/cert-manager/cert-manager/releases/download/v1.19.3/cert-manager.yaml": clusterroles.rbac.authorization.k8s.io "cert-manager-controller-orders" not found Error from server (NotFound): error when deleting "https://github.com/cert-manager/cert-manager/releases/download/v1.19.3/cert-manager.yaml": clusterroles.rbac.authorization.k8s.io "cert-manager-controller-challenges" not found Error from server (NotFound): error when deleting "https://github.com/cert-manager/cert-manager/releases/download/v1.19.3/cert-manager.yaml": clusterroles.rbac.authorization.k8s.io "cert-manager-controller-ingress-shim" not found Error from server (NotFound): error when deleting "https://github.com/cert-manager/cert-manager/releases/download/v1.19.3/cert-manager.yaml": clusterroles.rbac.authorization.k8s.io "cert-manager-cluster-view" not found Error from server (NotFound): error when deleting "https://github.com/cert-manager/cert-manager/releases/download/v1.19.3/cert-manager.yaml": clusterroles.rbac.authorization.k8s.io "cert-manager-view" not found Error from server (NotFound): error when deleting "https://github.com/cert-manager/cert-manager/releases/download/v1.19.3/cert-manager.yaml": clusterroles.rbac.authorization.k8s.io "cert-manager-edit" not found Error from server (NotFound): error when deleting "https://github.com/cert-manager/cert-manager/releases/download/v1.19.3/cert-manager.yaml": clusterroles.rbac.authorization.k8s.io "cert-manager-controller-approve:cert-manager-io" not found Error from server (NotFound): error when deleting "https://github.com/cert-manager/cert-manager/releases/download/v1.19.3/cert-manager.yaml": clusterroles.rbac.authorization.k8s.io "cert-manager-controller-certificatesigningrequests" not found Error from server (NotFound): error when deleting "https://github.com/cert-manager/cert-manager/releases/download/v1.19.3/cert-manager.yaml": clusterroles.rbac.authorization.k8s.io "cert-manager-webhook:subjectaccessreviews" not found Error from server (NotFound): error when deleting "https://github.com/cert-manager/cert-manager/releases/download/v1.19.3/cert-manager.yaml": clusterrolebindings.rbac.authorization.k8s.io "cert-manager-cainjector" not found Error from server (NotFound): error when deleting "https://github.com/cert-manager/cert-manager/releases/download/v1.19.3/cert-manager.yaml": clusterrolebindings.rbac.authorization.k8s.io "cert-manager-controller-issuers" not found Error from server (NotFound): error when deleting "https://github.com/cert-manager/cert-manager/releases/download/v1.19.3/cert-manager.yaml": clusterrolebindings.rbac.authorization.k8s.io "cert-manager-controller-clusterissuers" not found Error from server (NotFound): error when deleting "https://github.com/cert-manager/cert-manager/releases/download/v1.19.3/cert-manager.yaml": clusterrolebindings.rbac.authorization.k8s.io "cert-manager-controller-certificates" not found Error from server (NotFound): error when deleting "https://github.com/cert-manager/cert-manager/releases/download/v1.19.3/cert-manager.yaml": clusterrolebindings.rbac.authorization.k8s.io "cert-manager-controller-orders" not found Error from server (NotFound): error when deleting "https://github.com/cert-manager/cert-manager/releases/download/v1.19.3/cert-manager.yaml": clusterrolebindings.rbac.authorization.k8s.io "cert-manager-controller-challenges" not found Error from server (NotFound): error when deleting "https://github.com/cert-manager/cert-manager/releases/download/v1.19.3/cert-manager.yaml": clusterrolebindings.rbac.authorization.k8s.io "cert-manager-controller-ingress-shim" not found Error from server (NotFound): error when deleting "https://github.com/cert-manager/cert-manager/releases/download/v1.19.3/cert-manager.yaml": clusterrolebindings.rbac.authorization.k8s.io "cert-manager-controller-approve:cert-manager-io" not found Error from server (NotFound): error when deleting "https://github.com/cert-manager/cert-manager/releases/download/v1.19.3/cert-manager.yaml": clusterrolebindings.rbac.authorization.k8s.io "cert-manager-controller-certificatesigningrequests" not found Error from server (NotFound): error when deleting "https://github.com/cert-manager/cert-manager/releases/download/v1.19.3/cert-manager.yaml": clusterrolebindings.rbac.authorization.k8s.io "cert-manager-webhook:subjectaccessreviews" not found Error from server (NotFound): error when deleting "https://github.com/cert-manager/cert-manager/releases/download/v1.19.3/cert-manager.yaml": roles.rbac.authorization.k8s.io "cert-manager-cainjector:leaderelection" not found Error from server (NotFound): error when deleting "https://github.com/cert-manager/cert-manager/releases/download/v1.19.3/cert-manager.yaml": roles.rbac.authorization.k8s.io "cert-manager:leaderelection" not found Error from server (NotFound): error when deleting "https://github.com/cert-manager/cert-manager/releases/download/v1.19.3/cert-manager.yaml": roles.rbac.authorization.k8s.io "cert-manager-tokenrequest" not found Error from server (NotFound): error when deleting "https://github.com/cert-manager/cert-manager/releases/download/v1.19.3/cert-manager.yaml": roles.rbac.authorization.k8s.io "cert-manager-webhook:dynamic-serving" not found Error from server (NotFound): error when deleting "https://github.com/cert-manager/cert-manager/releases/download/v1.19.3/cert-manager.yaml": rolebindings.rbac.authorization.k8s.io "cert-manager-cainjector:leaderelection" not found Error from server (NotFound): error when deleting "https://github.com/cert-manager/cert-manager/releases/download/v1.19.3/cert-manager.yaml": rolebindings.rbac.authorization.k8s.io "cert-manager:leaderelection" not found Error from server (NotFound): error when deleting "https://github.com/cert-manager/cert-manager/releases/download/v1.19.3/cert-manager.yaml": rolebindings.rbac.authorization.k8s.io "cert-manager-tokenrequest" not found Error from server (NotFound): error when deleting "https://github.com/cert-manager/cert-manager/releases/download/v1.19.3/cert-manager.yaml": rolebindings.rbac.authorization.k8s.io "cert-manager-webhook:dynamic-serving" not found Error from server (NotFound): error when deleting "https://github.com/cert-manager/cert-manager/releases/download/v1.19.3/cert-manager.yaml": services "cert-manager-cainjector" not found Error from server (NotFound): error when deleting "https://github.com/cert-manager/cert-manager/releases/download/v1.19.3/cert-manager.yaml": services "cert-manager" not found Error from server (NotFound): error when deleting "https://github.com/cert-manager/cert-manager/releases/download/v1.19.3/cert-manager.yaml": services "cert-manager-webhook" not found Error from server (NotFound): error when deleting "https://github.com/cert-manager/cert-manager/releases/download/v1.19.3/cert-manager.yaml": deployments.apps "cert-manager-cainjector" not found Error from server (NotFound): error when deleting "https://github.com/cert-manager/cert-manager/releases/download/v1.19.3/cert-manager.yaml": deployments.apps "cert-manager" not found Error from server (NotFound): error when deleting "https://github.com/cert-manager/cert-manager/releases/download/v1.19.3/cert-manager.yaml": deployments.apps "cert-manager-webhook" not found Error from server (NotFound): error when deleting "https://github.com/cert-manager/cert-manager/releases/download/v1.19.3/cert-manager.yaml": mutatingwebhookconfigurations.admissionregistration.k8s.io "cert-manager-webhook" not found Error from server (NotFound): error when deleting "https://github.com/cert-manager/cert-manager/releases/download/v1.19.3/cert-manager.yaml": validatingwebhookconfigurations.admissionregistration.k8s.io "cert-manager-webhook" not found + sleep 8 + cat /tmp/tmp.NHfB8XxXrv + cat /tmp/tmp.3CpZsar8Dt Error from server (NotFound): error when deleting "https://github.com/cert-manager/cert-manager/releases/download/v1.19.3/cert-manager.yaml": namespaces "cert-manager" not found Error from server (NotFound): error when deleting "https://github.com/cert-manager/cert-manager/releases/download/v1.19.3/cert-manager.yaml": customresourcedefinitions.apiextensions.k8s.io "challenges.acme.cert-manager.io" not found Error from server (NotFound): error when deleting "https://github.com/cert-manager/cert-manager/releases/download/v1.19.3/cert-manager.yaml": customresourcedefinitions.apiextensions.k8s.io "orders.acme.cert-manager.io" not found Error from server (NotFound): error when deleting "https://github.com/cert-manager/cert-manager/releases/download/v1.19.3/cert-manager.yaml": customresourcedefinitions.apiextensions.k8s.io "certificaterequests.cert-manager.io" not found Error from server (NotFound): error when deleting "https://github.com/cert-manager/cert-manager/releases/download/v1.19.3/cert-manager.yaml": customresourcedefinitions.apiextensions.k8s.io "certificates.cert-manager.io" not found Error from server (NotFound): error when deleting "https://github.com/cert-manager/cert-manager/releases/download/v1.19.3/cert-manager.yaml": customresourcedefinitions.apiextensions.k8s.io "clusterissuers.cert-manager.io" not found Error from server (NotFound): error when deleting "https://github.com/cert-manager/cert-manager/releases/download/v1.19.3/cert-manager.yaml": customresourcedefinitions.apiextensions.k8s.io "issuers.cert-manager.io" not found Error from server (NotFound): error when deleting "https://github.com/cert-manager/cert-manager/releases/download/v1.19.3/cert-manager.yaml": serviceaccounts "cert-manager-cainjector" not found Error from server (NotFound): error when deleting "https://github.com/cert-manager/cert-manager/releases/download/v1.19.3/cert-manager.yaml": serviceaccounts "cert-manager" not found Error from server (NotFound): error when deleting "https://github.com/cert-manager/cert-manager/releases/download/v1.19.3/cert-manager.yaml": serviceaccounts "cert-manager-webhook" not found Error from server (NotFound): error when deleting "https://github.com/cert-manager/cert-manager/releases/download/v1.19.3/cert-manager.yaml": clusterroles.rbac.authorization.k8s.io "cert-manager-cainjector" not found Error from server (NotFound): error when deleting "https://github.com/cert-manager/cert-manager/releases/download/v1.19.3/cert-manager.yaml": clusterroles.rbac.authorization.k8s.io "cert-manager-controller-issuers" not found Error from server (NotFound): error when deleting "https://github.com/cert-manager/cert-manager/releases/download/v1.19.3/cert-manager.yaml": clusterroles.rbac.authorization.k8s.io "cert-manager-controller-clusterissuers" not found Error from server (NotFound): error when deleting "https://github.com/cert-manager/cert-manager/releases/download/v1.19.3/cert-manager.yaml": clusterroles.rbac.authorization.k8s.io "cert-manager-controller-certificates" not found Error from server (NotFound): error when deleting "https://github.com/cert-manager/cert-manager/releases/download/v1.19.3/cert-manager.yaml": clusterroles.rbac.authorization.k8s.io "cert-manager-controller-orders" not found Error from server (NotFound): error when deleting "https://github.com/cert-manager/cert-manager/releases/download/v1.19.3/cert-manager.yaml": clusterroles.rbac.authorization.k8s.io "cert-manager-controller-challenges" not found Error from server (NotFound): error when deleting "https://github.com/cert-manager/cert-manager/releases/download/v1.19.3/cert-manager.yaml": clusterroles.rbac.authorization.k8s.io "cert-manager-controller-ingress-shim" not found Error from server (NotFound): error when deleting "https://github.com/cert-manager/cert-manager/releases/download/v1.19.3/cert-manager.yaml": clusterroles.rbac.authorization.k8s.io "cert-manager-cluster-view" not found Error from server (NotFound): error when deleting "https://github.com/cert-manager/cert-manager/releases/download/v1.19.3/cert-manager.yaml": clusterroles.rbac.authorization.k8s.io "cert-manager-view" not found Error from server (NotFound): error when deleting "https://github.com/cert-manager/cert-manager/releases/download/v1.19.3/cert-manager.yaml": clusterroles.rbac.authorization.k8s.io "cert-manager-edit" not found Error from server (NotFound): error when deleting "https://github.com/cert-manager/cert-manager/releases/download/v1.19.3/cert-manager.yaml": clusterroles.rbac.authorization.k8s.io "cert-manager-controller-approve:cert-manager-io" not found Error from server (NotFound): error when deleting "https://github.com/cert-manager/cert-manager/releases/download/v1.19.3/cert-manager.yaml": clusterroles.rbac.authorization.k8s.io "cert-manager-controller-certificatesigningrequests" not found Error from server (NotFound): error when deleting "https://github.com/cert-manager/cert-manager/releases/download/v1.19.3/cert-manager.yaml": clusterroles.rbac.authorization.k8s.io "cert-manager-webhook:subjectaccessreviews" not found Error from server (NotFound): error when deleting "https://github.com/cert-manager/cert-manager/releases/download/v1.19.3/cert-manager.yaml": clusterrolebindings.rbac.authorization.k8s.io "cert-manager-cainjector" not found Error from server (NotFound): error when deleting "https://github.com/cert-manager/cert-manager/releases/download/v1.19.3/cert-manager.yaml": clusterrolebindings.rbac.authorization.k8s.io "cert-manager-controller-issuers" not found Error from server (NotFound): error when deleting "https://github.com/cert-manager/cert-manager/releases/download/v1.19.3/cert-manager.yaml": clusterrolebindings.rbac.authorization.k8s.io "cert-manager-controller-clusterissuers" not found Error from server (NotFound): error when deleting "https://github.com/cert-manager/cert-manager/releases/download/v1.19.3/cert-manager.yaml": clusterrolebindings.rbac.authorization.k8s.io "cert-manager-controller-certificates" not found Error from server (NotFound): error when deleting "https://github.com/cert-manager/cert-manager/releases/download/v1.19.3/cert-manager.yaml": clusterrolebindings.rbac.authorization.k8s.io "cert-manager-controller-orders" not found Error from server (NotFound): error when deleting "https://github.com/cert-manager/cert-manager/releases/download/v1.19.3/cert-manager.yaml": clusterrolebindings.rbac.authorization.k8s.io "cert-manager-controller-challenges" not found Error from server (NotFound): error when deleting "https://github.com/cert-manager/cert-manager/releases/download/v1.19.3/cert-manager.yaml": clusterrolebindings.rbac.authorization.k8s.io "cert-manager-controller-ingress-shim" not found Error from server (NotFound): error when deleting "https://github.com/cert-manager/cert-manager/releases/download/v1.19.3/cert-manager.yaml": clusterrolebindings.rbac.authorization.k8s.io "cert-manager-controller-approve:cert-manager-io" not found Error from server (NotFound): error when deleting "https://github.com/cert-manager/cert-manager/releases/download/v1.19.3/cert-manager.yaml": clusterrolebindings.rbac.authorization.k8s.io "cert-manager-controller-certificatesigningrequests" not found Error from server (NotFound): error when deleting "https://github.com/cert-manager/cert-manager/releases/download/v1.19.3/cert-manager.yaml": clusterrolebindings.rbac.authorization.k8s.io "cert-manager-webhook:subjectaccessreviews" not found Error from server (NotFound): error when deleting "https://github.com/cert-manager/cert-manager/releases/download/v1.19.3/cert-manager.yaml": roles.rbac.authorization.k8s.io "cert-manager-cainjector:leaderelection" not found Error from server (NotFound): error when deleting "https://github.com/cert-manager/cert-manager/releases/download/v1.19.3/cert-manager.yaml": roles.rbac.authorization.k8s.io "cert-manager:leaderelection" not found Error from server (NotFound): error when deleting "https://github.com/cert-manager/cert-manager/releases/download/v1.19.3/cert-manager.yaml": roles.rbac.authorization.k8s.io "cert-manager-tokenrequest" not found Error from server (NotFound): error when deleting "https://github.com/cert-manager/cert-manager/releases/download/v1.19.3/cert-manager.yaml": roles.rbac.authorization.k8s.io "cert-manager-webhook:dynamic-serving" not found Error from server (NotFound): error when deleting "https://github.com/cert-manager/cert-manager/releases/download/v1.19.3/cert-manager.yaml": rolebindings.rbac.authorization.k8s.io "cert-manager-cainjector:leaderelection" not found Error from server (NotFound): error when deleting "https://github.com/cert-manager/cert-manager/releases/download/v1.19.3/cert-manager.yaml": rolebindings.rbac.authorization.k8s.io "cert-manager:leaderelection" not found Error from server (NotFound): error when deleting "https://github.com/cert-manager/cert-manager/releases/download/v1.19.3/cert-manager.yaml": rolebindings.rbac.authorization.k8s.io "cert-manager-tokenrequest" not found Error from server (NotFound): error when deleting "https://github.com/cert-manager/cert-manager/releases/download/v1.19.3/cert-manager.yaml": rolebindings.rbac.authorization.k8s.io "cert-manager-webhook:dynamic-serving" not found Error from server (NotFound): error when deleting "https://github.com/cert-manager/cert-manager/releases/download/v1.19.3/cert-manager.yaml": services "cert-manager-cainjector" not found Error from server (NotFound): error when deleting "https://github.com/cert-manager/cert-manager/releases/download/v1.19.3/cert-manager.yaml": services "cert-manager" not found Error from server (NotFound): error when deleting "https://github.com/cert-manager/cert-manager/releases/download/v1.19.3/cert-manager.yaml": services "cert-manager-webhook" not found Error from server (NotFound): error when deleting "https://github.com/cert-manager/cert-manager/releases/download/v1.19.3/cert-manager.yaml": deployments.apps "cert-manager-cainjector" not found Error from server (NotFound): error when deleting "https://github.com/cert-manager/cert-manager/releases/download/v1.19.3/cert-manager.yaml": deployments.apps "cert-manager" not found Error from server (NotFound): error when deleting "https://github.com/cert-manager/cert-manager/releases/download/v1.19.3/cert-manager.yaml": deployments.apps "cert-manager-webhook" not found Error from server (NotFound): error when deleting "https://github.com/cert-manager/cert-manager/releases/download/v1.19.3/cert-manager.yaml": mutatingwebhookconfigurations.admissionregistration.k8s.io "cert-manager-webhook" not found Error from server (NotFound): error when deleting "https://github.com/cert-manager/cert-manager/releases/download/v1.19.3/cert-manager.yaml": validatingwebhookconfigurations.admissionregistration.k8s.io "cert-manager-webhook" not found + rm /tmp/tmp.NHfB8XxXrv /tmp/tmp.3CpZsar8Dt + return 1 + true + '[' -n '' ']' + '[' -n psmdb-operator ']' + kubectl_bin delete --grace-period=0 --force=true namespace custom-users-roles-sharded-27922 + rm -rf /tmp/tmp.oSwann5ucX + kubectl_bin delete --grace-period=0 --force=true namespace psmdb-operator ++ mktemp ++ mktemp + desc 'test passed' + set +o xtrace ----------------------------------------------------------------------------------- test passed ----------------------------------------------------------------------------------- + local LAST_OUT=/tmp/tmp.GlxsT2lzQO + local LAST_OUT=/tmp/tmp.BpxOsQtjtR ++ mktemp ++ mktemp + local LAST_ERR=/tmp/tmp.NqXuKcNEPY + local exit_status=0 + local timeout=4 + local LAST_ERR=/tmp/tmp.NpGSrwoeez + local exit_status=0 + local timeout=4 ++ seq 0 2 ++ seq 0 2 + for i in $(seq 0 2) + set +e + kubectl delete --grace-period=0 --force=true namespace psmdb-operator + for i in $(seq 0 2) + set +e + kubectl delete --grace-period=0 --force=true namespace custom-users-roles-sharded-27922