Log: /mnt/jenkins/workspace/cloud-psmdb-operator_PR-1993/e2e-tests/logs/users.log grep: warning: stray \ before - Warning: version difference between client (1.34) and server (1.30) exceeds the supported minor version skew of +/-1 Warning: version difference between client (1.34) and server (1.30) exceeds the supported minor version skew of +/-1 Warning: version difference between client (1.34) and server (1.30) exceeds the supported minor version skew of +/-1 + newpass=test-password ++ echo -n test-password ++ base64 + newpassencrypted=dGVzdC1wYXNzd29yZA== + create_infra users-26599 + local ns=users-26599 + [[ 1 == 1 ]] + delete_crd + desc 'get and delete old CRDs and RBAC' + set +o xtrace ----------------------------------------------------------------------------------- get and delete old CRDs and RBAC ----------------------------------------------------------------------------------- + kubectl_bin delete -f /mnt/jenkins/workspace/cloud-psmdb-operator_PR-1993/deploy/crd.yaml --ignore-not-found --wait=false ++ mktemp + local LAST_OUT=/tmp/tmp.2U3GUKjapn ++ mktemp + local LAST_ERR=/tmp/tmp.988IIJ7PNF + local exit_status=0 + local timeout=4 ++ seq 0 2 + for i in $(seq 0 2) + set +e + kubectl delete -f /mnt/jenkins/workspace/cloud-psmdb-operator_PR-1993/deploy/crd.yaml --ignore-not-found --wait=false + exit_status=0 + set -e + '[' 0 '!=' 0 -a -n 1 ']' + break + cat /tmp/tmp.2U3GUKjapn customresourcedefinition.apiextensions.k8s.io "perconaservermongodbbackups.psmdb.percona.com" deleted customresourcedefinition.apiextensions.k8s.io "perconaservermongodbrestores.psmdb.percona.com" deleted customresourcedefinition.apiextensions.k8s.io "perconaservermongodbs.psmdb.percona.com" deleted + cat /tmp/tmp.988IIJ7PNF + rm /tmp/tmp.2U3GUKjapn /tmp/tmp.988IIJ7PNF + return 0 ++ yq eval .metadata.name /mnt/jenkins/workspace/cloud-psmdb-operator_PR-1993/deploy/crd.yaml ++ grep -v '\-\-\-' grep: warning: stray \ before - grep: warning: stray \ before - + for crd_name in $(yq eval '.metadata.name' "${src_dir}/deploy/crd.yaml" | grep -v '\-\-\-') + kubectl get perconaservermongodbbackups.psmdb.percona.com --all-namespaces -o wide + grep -v NAMESPACE + xargs -L 1 sh -xc 'kubectl patch perconaservermongodbbackups.psmdb.percona.com -n $0 $1 --type=merge -p "{\"metadata\":{\"finalizers\":[]}}"' + kubectl patch perconaservermongodbbackups.psmdb.percona.com -n users-29192 backup-minio --type=merge -p '{"metadata":{"finalizers":[]}}' perconaservermongodbbackup.psmdb.percona.com/backup-minio patched + kubectl_bin wait --for=delete crd perconaservermongodbbackups.psmdb.percona.com ++ mktemp + local LAST_OUT=/tmp/tmp.bats27kM5t ++ mktemp + local LAST_ERR=/tmp/tmp.pnS37vGXEC + local exit_status=0 + local timeout=4 ++ seq 0 2 + for i in $(seq 0 2) + set +e + kubectl wait --for=delete crd perconaservermongodbbackups.psmdb.percona.com + exit_status=0 + set -e + '[' 0 '!=' 0 -a -n 1 ']' + break + cat /tmp/tmp.bats27kM5t + cat /tmp/tmp.pnS37vGXEC + rm /tmp/tmp.bats27kM5t /tmp/tmp.pnS37vGXEC + return 0 + for crd_name in $(yq eval '.metadata.name' "${src_dir}/deploy/crd.yaml" | grep -v '\-\-\-') + kubectl get perconaservermongodbrestores.psmdb.percona.com --all-namespaces -o wide + grep -v NAMESPACE + xargs -L 1 sh -xc 'kubectl patch perconaservermongodbrestores.psmdb.percona.com -n $0 $1 --type=merge -p "{\"metadata\":{\"finalizers\":[]}}"' error: the server doesn't have a resource type "perconaservermongodbrestores" + kubectl patch perconaservermongodbrestores.psmdb.percona.com -n sh --type=merge -p '{"metadata":{"finalizers":[]}}' error: the server doesn't have a resource type "perconaservermongodbrestores" + : + kubectl_bin wait --for=delete crd perconaservermongodbrestores.psmdb.percona.com ++ mktemp + local LAST_OUT=/tmp/tmp.WL9ZbDymur ++ mktemp + local LAST_ERR=/tmp/tmp.f0ekTjcbAJ + local exit_status=0 + local timeout=4 ++ seq 0 2 + for i in $(seq 0 2) + set +e + kubectl wait --for=delete crd perconaservermongodbrestores.psmdb.percona.com + exit_status=0 + set -e + '[' 0 '!=' 0 -a -n 1 ']' + break + cat /tmp/tmp.WL9ZbDymur + cat /tmp/tmp.f0ekTjcbAJ + rm /tmp/tmp.WL9ZbDymur /tmp/tmp.f0ekTjcbAJ + return 0 + for crd_name in $(yq eval '.metadata.name' "${src_dir}/deploy/crd.yaml" | grep -v '\-\-\-') + kubectl get perconaservermongodbs.psmdb.percona.com --all-namespaces -o wide + grep -v NAMESPACE + xargs -L 1 sh -xc 'kubectl patch perconaservermongodbs.psmdb.percona.com -n $0 $1 --type=merge -p "{\"metadata\":{\"finalizers\":[]}}"' error: the server doesn't have a resource type "perconaservermongodbs" + kubectl patch perconaservermongodbs.psmdb.percona.com -n sh --type=merge -p '{"metadata":{"finalizers":[]}}' error: the server doesn't have a resource type "perconaservermongodbs" + : + kubectl_bin wait --for=delete crd perconaservermongodbs.psmdb.percona.com ++ mktemp + local LAST_OUT=/tmp/tmp.TXmNvxCuDX ++ mktemp + local LAST_ERR=/tmp/tmp.GaiiMs3G0Z + local exit_status=0 + local timeout=4 ++ seq 0 2 + for i in $(seq 0 2) + set +e + kubectl wait --for=delete crd perconaservermongodbs.psmdb.percona.com + exit_status=0 + set -e + '[' 0 '!=' 0 -a -n 1 ']' + break + cat /tmp/tmp.TXmNvxCuDX + cat /tmp/tmp.GaiiMs3G0Z + rm /tmp/tmp.TXmNvxCuDX /tmp/tmp.GaiiMs3G0Z + return 0 + local rbac_yaml=rbac.yaml + '[' -n psmdb-operator ']' + rbac_yaml=cw-rbac.yaml + kubectl_bin delete -f /mnt/jenkins/workspace/cloud-psmdb-operator_PR-1993/deploy/cw-rbac.yaml --ignore-not-found ++ mktemp + local LAST_OUT=/tmp/tmp.C4XIoDZ8rG ++ mktemp + local LAST_ERR=/tmp/tmp.2hHkabPcje + local exit_status=0 + local timeout=4 ++ seq 0 2 + for i in $(seq 0 2) + set +e + kubectl delete -f /mnt/jenkins/workspace/cloud-psmdb-operator_PR-1993/deploy/cw-rbac.yaml --ignore-not-found + exit_status=0 + set -e + '[' 0 '!=' 0 -a -n 1 ']' + break + cat /tmp/tmp.C4XIoDZ8rG clusterrole.rbac.authorization.k8s.io "percona-server-mongodb-operator" deleted clusterrolebinding.rbac.authorization.k8s.io "service-account-percona-server-mongodb-operator" deleted + cat /tmp/tmp.2hHkabPcje + rm /tmp/tmp.C4XIoDZ8rG /tmp/tmp.2hHkabPcje + return 0 + check_crd_for_deletion PR-1993-afe14c70 + local git_tag=PR-1993-afe14c70 ++ curl -s https://raw.githubusercontent.com/percona/percona-server-mongodb-operator/PR-1993-afe14c70/deploy/crd.yaml ++ yq eval .metadata.name ++ /usr/sbin/sed s/---//g ++ /usr/sbin/sed ':a;N;$!ba;s/\n/ /g' + for crd_name in $(curl -s https://raw.githubusercontent.com/percona/percona-server-mongodb-operator/${git_tag}/deploy/crd.yaml | yq eval '.metadata.name' | $sed 's/---//g' | $sed ':a;N;$!ba;s/\n/ /g') ++ kubectl_bin get crd/null -o 'jsonpath={.status.conditions[-1].type}' +++ mktemp ++ local LAST_OUT=/tmp/tmp.nZLmlRPLvG +++ mktemp ++ local LAST_ERR=/tmp/tmp.CounwSzJ0q ++ local exit_status=0 ++ local timeout=4 +++ seq 0 2 ++ for i in $(seq 0 2) ++ set +e ++ kubectl get crd/null -o 'jsonpath={.status.conditions[-1].type}' ++ exit_status=1 ++ set -e ++ '[' 1 '!=' 0 -a -n 1 ']' ++ cat /tmp/tmp.nZLmlRPLvG ++ cat /tmp/tmp.CounwSzJ0q Error from server (NotFound): customresourcedefinitions.apiextensions.k8s.io "null" not found ++ sleep 0 ++ for i in $(seq 0 2) ++ set +e ++ kubectl get crd/null -o 'jsonpath={.status.conditions[-1].type}' ++ exit_status=1 ++ set -e ++ '[' 1 '!=' 0 -a -n 1 ']' ++ cat /tmp/tmp.nZLmlRPLvG ++ cat /tmp/tmp.CounwSzJ0q Error from server (NotFound): customresourcedefinitions.apiextensions.k8s.io "null" not found ++ sleep 4 ++ for i in $(seq 0 2) ++ set +e ++ kubectl get crd/null -o 'jsonpath={.status.conditions[-1].type}' ++ exit_status=1 ++ set -e ++ '[' 1 '!=' 0 -a -n 1 ']' ++ cat /tmp/tmp.nZLmlRPLvG ++ cat /tmp/tmp.CounwSzJ0q Error from server (NotFound): customresourcedefinitions.apiextensions.k8s.io "null" not found ++ sleep 8 ++ cat /tmp/tmp.nZLmlRPLvG ++ cat /tmp/tmp.CounwSzJ0q Error from server (NotFound): customresourcedefinitions.apiextensions.k8s.io "null" not found ++ rm /tmp/tmp.nZLmlRPLvG /tmp/tmp.CounwSzJ0q ++ return 1 + [[ '' == \T\e\r\m\i\n\a\t\i\n\g ]] + '[' -n psmdb-operator ']' + create_namespace psmdb-operator + local namespace=psmdb-operator + local skip_clean_namespace= + [[ 1 == 1 ]] + [[ -z '' ]] + destroy_chaos_mesh ++ helm list --all-namespaces --filter chaos-mesh ++ tail -n1 ++ awk '-F ' '{print $2}' ++ sed s/NAMESPACE// + local chaos_mesh_ns= + desc 'destroy chaos-mesh' + set +o xtrace ----------------------------------------------------------------------------------- destroy chaos-mesh ----------------------------------------------------------------------------------- + '[' -n '' ']' ++ kubectl get MutatingWebhookConfiguration ++ grep chaos-mesh ++ awk '{print $1}' + timeout 30 kubectl delete MutatingWebhookConfiguration error: resource(s) were provided, but no name was specified + : ++ kubectl get ValidatingWebhookConfiguration ++ grep chaos-mesh ++ awk '{print $1}' + timeout 30 kubectl delete ValidatingWebhookConfiguration error: resource(s) were provided, but no name was specified + : ++ kubectl get ValidatingWebhookConfiguration ++ grep validate-auth ++ awk '{print $1}' + timeout 30 kubectl delete ValidatingWebhookConfiguration error: resource(s) were provided, but no name was specified + : ++ kubectl api-resources ++ grep chaos-mesh ++ awk '{print $1}' ++ kubectl get crd ++ grep chaos-mesh.org ++ awk '{print $1}' + timeout 30 kubectl delete crd error: resource(s) were provided, but no name was specified + : ++ kubectl get clusterrolebinding ++ grep chaos-mesh ++ awk '{print $1}' + timeout 30 kubectl delete clusterrolebinding error: resource(s) were provided, but no name was specified + : ++ kubectl get clusterrole ++ grep chaos-mesh ++ awk '{print $1}' + timeout 30 kubectl delete clusterrole error: resource(s) were provided, but no name was specified + : + desc 'cleaned up all old namespaces' + set +o xtrace ----------------------------------------------------------------------------------- cleaned up all old namespaces ----------------------------------------------------------------------------------- + kubectl_bin get ns + egrep -v '^kube-|^default|Terminating|psmdb-operator|openshift|^gke-|^gmp-|^NAME' + awk '{print$1}' + '[' -n '' ']' + desc 'cleaned up old namespaces psmdb-operator' + xargs kubectl delete ns + set +o xtrace ----------------------------------------------------------------------------------- cleaned up old namespaces psmdb-operator ----------------------------------------------------------------------------------- + kubectl_bin delete namespace psmdb-operator --ignore-not-found ++ mktemp ++ mktemp egrep: warning: egrep is obsolescent; using grep -E + local LAST_OUT=/tmp/tmp.2V0UClfg8D ++ mktemp + local LAST_OUT=/tmp/tmp.FKI7OfhhZ4 ++ mktemp + local LAST_ERR=/tmp/tmp.GAZlPd30Z2 + local exit_status=0 + local timeout=4 ++ seq 0 2 + local LAST_ERR=/tmp/tmp.rCdQfIbcR0 + local exit_status=0 + local timeout=4 + for i in $(seq 0 2) + set +e + kubectl get ns ++ seq 0 2 + for i in $(seq 0 2) + set +e + kubectl delete namespace psmdb-operator --ignore-not-found + exit_status=0 + set -e + '[' 0 '!=' 0 -a -n 1 ']' + break + cat /tmp/tmp.2V0UClfg8D + cat /tmp/tmp.GAZlPd30Z2 + rm /tmp/tmp.2V0UClfg8D /tmp/tmp.GAZlPd30Z2 + return 0 namespace "users-29192" deleted + exit_status=0 + set -e + '[' 0 '!=' 0 -a -n 1 ']' + break + cat /tmp/tmp.FKI7OfhhZ4 namespace "psmdb-operator" deleted + cat /tmp/tmp.rCdQfIbcR0 + rm /tmp/tmp.FKI7OfhhZ4 /tmp/tmp.rCdQfIbcR0 + return 0 + kubectl_bin wait --for=delete namespace psmdb-operator ++ mktemp + local LAST_OUT=/tmp/tmp.m8aZYKBqQn ++ mktemp + local LAST_ERR=/tmp/tmp.aVffQSMLQ7 + local exit_status=0 + local timeout=4 ++ seq 0 2 + for i in $(seq 0 2) + set +e + kubectl wait --for=delete namespace psmdb-operator + exit_status=0 + set -e + '[' 0 '!=' 0 -a -n 1 ']' + break + cat /tmp/tmp.m8aZYKBqQn + cat /tmp/tmp.aVffQSMLQ7 + rm /tmp/tmp.m8aZYKBqQn /tmp/tmp.aVffQSMLQ7 + return 0 + desc 'create namespace psmdb-operator' + set +o xtrace ----------------------------------------------------------------------------------- create namespace psmdb-operator ----------------------------------------------------------------------------------- + kubectl_bin create namespace psmdb-operator ++ mktemp + local LAST_OUT=/tmp/tmp.OkkTeX7eUl ++ mktemp + local LAST_ERR=/tmp/tmp.7RwifyAnY2 + local exit_status=0 + local timeout=4 ++ seq 0 2 + for i in $(seq 0 2) + set +e + kubectl create namespace psmdb-operator + exit_status=0 + set -e + '[' 0 '!=' 0 -a -n 1 ']' + break + cat /tmp/tmp.OkkTeX7eUl namespace/psmdb-operator created + cat /tmp/tmp.7RwifyAnY2 + rm /tmp/tmp.OkkTeX7eUl /tmp/tmp.7RwifyAnY2 + return 0 + set_kube_ctx psmdb-operator + local namespace=psmdb-operator ++ kubectl_bin config current-context +++ mktemp ++ local LAST_OUT=/tmp/tmp.7Ahs0ZqJXD +++ mktemp ++ local LAST_ERR=/tmp/tmp.KaYcxiIaXC ++ local exit_status=0 ++ local timeout=4 +++ seq 0 2 ++ for i in $(seq 0 2) ++ set +e ++ kubectl config current-context ++ exit_status=0 ++ set -e ++ '[' 0 '!=' 0 -a -n 1 ']' ++ break ++ cat /tmp/tmp.7Ahs0ZqJXD ++ cat /tmp/tmp.KaYcxiIaXC ++ rm /tmp/tmp.7Ahs0ZqJXD /tmp/tmp.KaYcxiIaXC ++ return 0 + kubectl_bin config set-context gke_cloud-dev-112233_us-central1-a_jen-psmdb-1993-afe14c70-12-cluster5 --namespace=psmdb-operator ++ mktemp + local LAST_OUT=/tmp/tmp.1Z03NiVQwb ++ mktemp + local LAST_ERR=/tmp/tmp.qFfKVANLlN + local exit_status=0 + local timeout=4 ++ seq 0 2 + for i in $(seq 0 2) + set +e + kubectl config set-context gke_cloud-dev-112233_us-central1-a_jen-psmdb-1993-afe14c70-12-cluster5 --namespace=psmdb-operator + exit_status=0 + set -e + '[' 0 '!=' 0 -a -n 1 ']' + break + cat /tmp/tmp.1Z03NiVQwb Context "gke_cloud-dev-112233_us-central1-a_jen-psmdb-1993-afe14c70-12-cluster5" modified. + cat /tmp/tmp.qFfKVANLlN + rm /tmp/tmp.1Z03NiVQwb /tmp/tmp.qFfKVANLlN + return 0 + deploy_operator + desc 'start PSMDB operator' + set +o xtrace ----------------------------------------------------------------------------------- start PSMDB operator ----------------------------------------------------------------------------------- + local cr_file + '[' -f /mnt/jenkins/workspace/cloud-psmdb-operator_PR-1993/e2e-tests/users/conf/crd.yaml ']' + cr_file=/mnt/jenkins/workspace/cloud-psmdb-operator_PR-1993/deploy/crd.yaml + kubectl_bin apply --server-side --force-conflicts -f /mnt/jenkins/workspace/cloud-psmdb-operator_PR-1993/deploy/crd.yaml ++ mktemp + local LAST_OUT=/tmp/tmp.cpeqoVqTWt ++ mktemp + local LAST_ERR=/tmp/tmp.Nw4f2D2FsP + local exit_status=0 + local timeout=4 ++ seq 0 2 + for i in $(seq 0 2) + set +e + kubectl apply --server-side --force-conflicts -f /mnt/jenkins/workspace/cloud-psmdb-operator_PR-1993/deploy/crd.yaml + exit_status=0 + set -e + '[' 0 '!=' 0 -a -n 1 ']' + break + cat /tmp/tmp.cpeqoVqTWt customresourcedefinition.apiextensions.k8s.io/perconaservermongodbbackups.psmdb.percona.com serverside-applied customresourcedefinition.apiextensions.k8s.io/perconaservermongodbrestores.psmdb.percona.com serverside-applied customresourcedefinition.apiextensions.k8s.io/perconaservermongodbs.psmdb.percona.com serverside-applied + cat /tmp/tmp.Nw4f2D2FsP + rm /tmp/tmp.cpeqoVqTWt /tmp/tmp.Nw4f2D2FsP + return 0 + '[' -n psmdb-operator ']' + apply_rbac cw-rbac + local operator_namespace=psmdb-operator + local rbac=cw-rbac + cat /mnt/jenkins/workspace/cloud-psmdb-operator_PR-1993/deploy/cw-rbac.yaml + sed -e 's^namespace: .*^namespace: psmdb-operator^' + kubectl_bin apply -n psmdb-operator -f - ++ mktemp + local LAST_OUT=/tmp/tmp.gMpwn3sYT2 ++ mktemp + local LAST_ERR=/tmp/tmp.pPDaRmBbLx + local exit_status=0 + local timeout=4 ++ seq 0 2 + for i in $(seq 0 2) + set +e + kubectl apply -n psmdb-operator -f - + exit_status=0 + set -e + '[' 0 '!=' 0 -a -n 1 ']' + break + cat /tmp/tmp.gMpwn3sYT2 clusterrole.rbac.authorization.k8s.io/percona-server-mongodb-operator created serviceaccount/percona-server-mongodb-operator created clusterrolebinding.rbac.authorization.k8s.io/service-account-percona-server-mongodb-operator created + cat /tmp/tmp.pPDaRmBbLx + rm /tmp/tmp.gMpwn3sYT2 /tmp/tmp.pPDaRmBbLx + return 0 + yq eval ' (.spec.template.spec.containers[].image = "perconalab/percona-server-mongodb-operator:PR-1993-afe14c70") | ((.. | select(.[] == "DISABLE_TELEMETRY")) |= .value="true") | ((.. | select(.[] == "LOG_LEVEL")) |= .value="DEBUG")' /mnt/jenkins/workspace/cloud-psmdb-operator_PR-1993/deploy/cw-operator.yaml + kubectl_bin apply -f - ++ mktemp + local LAST_OUT=/tmp/tmp.2EMgIyyBcz ++ mktemp + local LAST_ERR=/tmp/tmp.NPTxROl1Nb + local exit_status=0 + local timeout=4 ++ seq 0 2 + for i in $(seq 0 2) + set +e + kubectl apply -f - + exit_status=0 + set -e + '[' 0 '!=' 0 -a -n 1 ']' + break + cat /tmp/tmp.2EMgIyyBcz deployment.apps/percona-server-mongodb-operator created + cat /tmp/tmp.NPTxROl1Nb + rm /tmp/tmp.2EMgIyyBcz /tmp/tmp.NPTxROl1Nb + return 0 + sleep 2 ++ get_operator_pod ++ kubectl_bin get pods --selector=name=percona-server-mongodb-operator -o 'jsonpath={.items[].metadata.name}' -n psmdb-operator +++ mktemp ++ local LAST_OUT=/tmp/tmp.eNqnIC38YC +++ mktemp ++ local LAST_ERR=/tmp/tmp.dHJ4BMvmhi ++ local exit_status=0 ++ local timeout=4 +++ seq 0 2 ++ for i in $(seq 0 2) ++ set +e ++ kubectl get pods --selector=name=percona-server-mongodb-operator -o 'jsonpath={.items[].metadata.name}' -n psmdb-operator ++ exit_status=0 ++ set -e ++ '[' 0 '!=' 0 -a -n 1 ']' ++ break ++ cat /tmp/tmp.eNqnIC38YC ++ cat /tmp/tmp.dHJ4BMvmhi ++ rm /tmp/tmp.eNqnIC38YC /tmp/tmp.dHJ4BMvmhi ++ return 0 + wait_pod percona-server-mongodb-operator-b7c64c587-v7qws + local pod=percona-server-mongodb-operator-b7c64c587-v7qws + set +o xtrace waiting for pod/percona-server-mongodb-operator-b7c64c587-v7qws to be ready.OK + echo 'Print operator info from log' Print operator info from log + grep 'Manager starting up' ++ get_operator_pod ++ kubectl_bin get pods --selector=name=percona-server-mongodb-operator -o 'jsonpath={.items[].metadata.name}' -n psmdb-operator +++ mktemp ++ local LAST_OUT=/tmp/tmp.RDkpfOPyX8 +++ mktemp ++ local LAST_ERR=/tmp/tmp.6WNgUiXX09 ++ local exit_status=0 ++ local timeout=4 +++ seq 0 2 ++ for i in $(seq 0 2) ++ set +e ++ kubectl get pods --selector=name=percona-server-mongodb-operator -o 'jsonpath={.items[].metadata.name}' -n psmdb-operator ++ exit_status=0 ++ set -e ++ '[' 0 '!=' 0 -a -n 1 ']' ++ break ++ cat /tmp/tmp.RDkpfOPyX8 ++ cat /tmp/tmp.6WNgUiXX09 ++ rm /tmp/tmp.RDkpfOPyX8 /tmp/tmp.6WNgUiXX09 ++ return 0 + kubectl_bin logs percona-server-mongodb-operator-b7c64c587-v7qws ++ mktemp + local LAST_OUT=/tmp/tmp.vEQYeEe454 ++ mktemp + local LAST_ERR=/tmp/tmp.DpfjcIfByS + local exit_status=0 + local timeout=4 ++ seq 0 2 + for i in $(seq 0 2) + set +e + kubectl logs percona-server-mongodb-operator-b7c64c587-v7qws + exit_status=0 + set -e + '[' 0 '!=' 0 -a -n 1 ']' + break + cat /tmp/tmp.vEQYeEe454 + cat /tmp/tmp.DpfjcIfByS + rm /tmp/tmp.vEQYeEe454 /tmp/tmp.DpfjcIfByS + return 0 2025-10-06T22:30:58.423Z INFO setup Manager starting up {"gitCommit": "afe14c7037e555af435bdbf8ce6cdb2ddbfc4d36", "gitBranch": "PR-1993-afe14c70", "buildTime": "", "goVersion": "go1.25.1", "os": "linux", "arch": "amd64"} + create_namespace users-26599 + local namespace=users-26599 + local skip_clean_namespace= + [[ 1 == 1 ]] + [[ -z '' ]] + destroy_chaos_mesh ++ helm list --all-namespaces --filter chaos-mesh ++ tail -n1 ++ awk '-F ' '{print $2}' ++ sed s/NAMESPACE// + local chaos_mesh_ns= + desc 'destroy chaos-mesh' + set +o xtrace ----------------------------------------------------------------------------------- destroy chaos-mesh ----------------------------------------------------------------------------------- + '[' -n '' ']' ++ kubectl get MutatingWebhookConfiguration ++ grep chaos-mesh ++ awk '{print $1}' + timeout 30 kubectl delete MutatingWebhookConfiguration error: resource(s) were provided, but no name was specified + : ++ kubectl get ValidatingWebhookConfiguration ++ grep chaos-mesh ++ awk '{print $1}' + timeout 30 kubectl delete ValidatingWebhookConfiguration error: resource(s) were provided, but no name was specified + : ++ kubectl get ValidatingWebhookConfiguration ++ grep validate-auth ++ awk '{print $1}' + timeout 30 kubectl delete ValidatingWebhookConfiguration error: resource(s) were provided, but no name was specified + : ++ kubectl api-resources ++ grep chaos-mesh ++ awk '{print $1}' ++ kubectl get crd ++ grep chaos-mesh.org ++ awk '{print $1}' + timeout 30 kubectl delete crd error: resource(s) were provided, but no name was specified + : ++ kubectl get clusterrolebinding ++ grep chaos-mesh ++ awk '{print $1}' + timeout 30 kubectl delete clusterrolebinding error: resource(s) were provided, but no name was specified + : ++ kubectl get clusterrole ++ grep chaos-mesh ++ awk '{print $1}' + timeout 30 kubectl delete clusterrole error: resource(s) were provided, but no name was specified + : + desc 'cleaned up all old namespaces' + set +o xtrace ----------------------------------------------------------------------------------- cleaned up all old namespaces ----------------------------------------------------------------------------------- + kubectl_bin get ns + egrep -v '^kube-|^default|Terminating|psmdb-operator|openshift|^gke-|^gmp-|^NAME' + awk '{print$1}' + '[' -n '' ']' + desc 'cleaned up old namespaces users-26599' ++ mktemp + set +o xtrace ----------------------------------------------------------------------------------- cleaned up old namespaces users-26599 ----------------------------------------------------------------------------------- + kubectl_bin delete namespace users-26599 --ignore-not-found ++ mktemp + xargs kubectl delete ns + local LAST_OUT=/tmp/tmp.NiVCU3E48M egrep: warning: egrep is obsolescent; using grep -E + local LAST_OUT=/tmp/tmp.0ANuEGpYnz ++ mktemp ++ mktemp + local LAST_ERR=/tmp/tmp.wWPWQw0VTu + local exit_status=0 + local timeout=4 + local LAST_ERR=/tmp/tmp.DJEWIltq85 + local exit_status=0 + local timeout=4 ++ seq 0 2 ++ seq 0 2 + for i in $(seq 0 2) + set +e + kubectl delete namespace users-26599 --ignore-not-found + for i in $(seq 0 2) + set +e + kubectl get ns + exit_status=0 + set -e + '[' 0 '!=' 0 -a -n 1 ']' + break + cat /tmp/tmp.NiVCU3E48M + cat /tmp/tmp.DJEWIltq85 + rm /tmp/tmp.NiVCU3E48M /tmp/tmp.DJEWIltq85 + return 0 + exit_status=0 + set -e + '[' 0 '!=' 0 -a -n 1 ']' + break + cat /tmp/tmp.0ANuEGpYnz + cat /tmp/tmp.wWPWQw0VTu + rm /tmp/tmp.0ANuEGpYnz /tmp/tmp.wWPWQw0VTu error: resource(s) were provided, but no name was specified + return 0 + kubectl_bin wait --for=delete namespace users-26599 ++ mktemp + local LAST_OUT=/tmp/tmp.6HGl0exLKP ++ mktemp + local LAST_ERR=/tmp/tmp.USK2X0biLO + local exit_status=0 + local timeout=4 ++ seq 0 2 + for i in $(seq 0 2) + set +e + kubectl wait --for=delete namespace users-26599 + exit_status=0 + set -e + '[' 0 '!=' 0 -a -n 1 ']' + break + cat /tmp/tmp.6HGl0exLKP + cat /tmp/tmp.USK2X0biLO + rm /tmp/tmp.6HGl0exLKP /tmp/tmp.USK2X0biLO + return 0 + desc 'create namespace users-26599' + set +o xtrace ----------------------------------------------------------------------------------- create namespace users-26599 ----------------------------------------------------------------------------------- + kubectl_bin create namespace users-26599 ++ mktemp + local LAST_OUT=/tmp/tmp.h4elDcxAZv ++ mktemp + local LAST_ERR=/tmp/tmp.ohLwjeTSxE + local exit_status=0 + local timeout=4 ++ seq 0 2 + for i in $(seq 0 2) + set +e + kubectl create namespace users-26599 + exit_status=0 + set -e + '[' 0 '!=' 0 -a -n 1 ']' + break + cat /tmp/tmp.h4elDcxAZv namespace/users-26599 created + cat /tmp/tmp.ohLwjeTSxE + rm /tmp/tmp.h4elDcxAZv /tmp/tmp.ohLwjeTSxE + return 0 + set_kube_ctx users-26599 + local namespace=users-26599 ++ kubectl_bin config current-context +++ mktemp ++ local LAST_OUT=/tmp/tmp.i5TuFVLlKy +++ mktemp ++ local LAST_ERR=/tmp/tmp.pF4wYLmLqp ++ local exit_status=0 ++ local timeout=4 +++ seq 0 2 ++ for i in $(seq 0 2) ++ set +e ++ kubectl config current-context ++ exit_status=0 ++ set -e ++ '[' 0 '!=' 0 -a -n 1 ']' ++ break ++ cat /tmp/tmp.i5TuFVLlKy ++ cat /tmp/tmp.pF4wYLmLqp ++ rm /tmp/tmp.i5TuFVLlKy /tmp/tmp.pF4wYLmLqp ++ return 0 + kubectl_bin config set-context gke_cloud-dev-112233_us-central1-a_jen-psmdb-1993-afe14c70-12-cluster5 --namespace=users-26599 ++ mktemp + local LAST_OUT=/tmp/tmp.J5zXq5B7l2 ++ mktemp + local LAST_ERR=/tmp/tmp.AbV1dGUVXH + local exit_status=0 + local timeout=4 ++ seq 0 2 + for i in $(seq 0 2) + set +e + kubectl config set-context gke_cloud-dev-112233_us-central1-a_jen-psmdb-1993-afe14c70-12-cluster5 --namespace=users-26599 + exit_status=0 + set -e + '[' 0 '!=' 0 -a -n 1 ']' + break + cat /tmp/tmp.J5zXq5B7l2 Context "gke_cloud-dev-112233_us-central1-a_jen-psmdb-1993-afe14c70-12-cluster5" modified. + cat /tmp/tmp.AbV1dGUVXH + rm /tmp/tmp.J5zXq5B7l2 /tmp/tmp.AbV1dGUVXH + return 0 + deploy_minio + desc 'install Minio' + set +o xtrace ----------------------------------------------------------------------------------- install Minio ----------------------------------------------------------------------------------- + helm uninstall minio-service Error: uninstall: Release not loaded: minio-service: release: not found + : + helm repo remove minio "minio" has been removed from your repositories + helm repo add minio https://charts.min.io/ "minio" has been added to your repositories + retry 10 60 helm install minio-service --version 5.4.0 --set replicas=1 --set mode=standalone --set resources.requests.memory=256Mi --set rootUser=rootuser --set rootPassword=rootpass123 --set 'users[0].accessKey=some-access-key' --set 'users[0].secretKey=some-secret-key' --set 'users[0].policy=consoleAdmin' --set service.type=ClusterIP --set configPathmc=/tmp/.minio/ --set persistence.size=2G --set securityContext.enabled=false minio/minio + local max=10 + local delay=60 + shift 2 + local n=1 + helm install minio-service --version 5.4.0 --set replicas=1 --set mode=standalone --set resources.requests.memory=256Mi --set rootUser=rootuser --set rootPassword=rootpass123 --set 'users[0].accessKey=some-access-key' --set 'users[0].secretKey=some-secret-key' --set 'users[0].policy=consoleAdmin' --set service.type=ClusterIP --set configPathmc=/tmp/.minio/ --set persistence.size=2G --set securityContext.enabled=false minio/minio NAME: minio-service LAST DEPLOYED: Mon Oct 6 22:31:18 2025 NAMESPACE: users-26599 STATUS: deployed REVISION: 1 TEST SUITE: None NOTES: MinIO can be accessed via port 9000 on the following DNS name from within your cluster: minio-service.users-26599.cluster.local To access MinIO from localhost, run the below commands: 1. export POD_NAME=$(kubectl get pods --namespace users-26599 -l "release=minio-service" -o jsonpath="{.items[0].metadata.name}") 2. kubectl port-forward $POD_NAME 9000 --namespace users-26599 Read more about port forwarding here: http://kubernetes.io/docs/user-guide/kubectl/kubectl_port-forward/ You can now access MinIO server on http://localhost:9000. Follow the below steps to connect to MinIO server with mc client: 1. Download the MinIO mc client - https://min.io/docs/minio/linux/reference/minio-mc.html#quickstart 2. export MC_HOST_minio-service-local=http://$(kubectl get secret --namespace users-26599 minio-service -o jsonpath="{.data.rootUser}" | base64 --decode):$(kubectl get secret --namespace users-26599 minio-service -o jsonpath="{.data.rootPassword}" | base64 --decode)@localhost:9000 3. mc ls minio-service-local ++ kubectl_bin get pods --selector=release=minio-service -o 'jsonpath={.items[].metadata.name}' +++ mktemp ++ local LAST_OUT=/tmp/tmp.uQJeexmJ5D +++ mktemp ++ local LAST_ERR=/tmp/tmp.230jX8Gw49 ++ local exit_status=0 ++ local timeout=4 +++ seq 0 2 ++ for i in $(seq 0 2) ++ set +e ++ kubectl get pods --selector=release=minio-service -o 'jsonpath={.items[].metadata.name}' ++ exit_status=0 ++ set -e ++ '[' 0 '!=' 0 -a -n 1 ']' ++ break ++ cat /tmp/tmp.uQJeexmJ5D ++ cat /tmp/tmp.230jX8Gw49 ++ rm /tmp/tmp.uQJeexmJ5D /tmp/tmp.230jX8Gw49 ++ return 0 + MINIO_POD=minio-service-86dfccd949-2q9xl + wait_pod minio-service-86dfccd949-2q9xl + local pod=minio-service-86dfccd949-2q9xl + set +o xtrace waiting for pod/minio-service-86dfccd949-2q9xl to be ready.OK + '[' -n psmdb-operator ']' + kubectl_bin create svc -n psmdb-operator externalname minio-service --external-name=minio-service.users-26599.svc.cluster.local --tcp=9000 ++ mktemp + local LAST_OUT=/tmp/tmp.BpBPOXyaTF ++ mktemp + local LAST_ERR=/tmp/tmp.hEpQF8hqt1 + local exit_status=0 + local timeout=4 ++ seq 0 2 + for i in $(seq 0 2) + set +e + kubectl create svc -n psmdb-operator externalname minio-service --external-name=minio-service.users-26599.svc.cluster.local --tcp=9000 + exit_status=0 + set -e + '[' 0 '!=' 0 -a -n 1 ']' + break + cat /tmp/tmp.BpBPOXyaTF service/minio-service created + cat /tmp/tmp.hEpQF8hqt1 + rm /tmp/tmp.BpBPOXyaTF /tmp/tmp.hEpQF8hqt1 + return 0 + create_minio_bucket operator-testing + local bucket=operator-testing + kubectl_bin run -i --rm aws-cli --image=perconalab/awscli --restart=Never -- bash -c 'AWS_ACCESS_KEY_ID=some-access-key AWS_SECRET_ACCESS_KEY=some-secret-key AWS_DEFAULT_REGION=us-east-1 /usr/bin/aws --endpoint-url http://minio-service:9000 s3 mb s3://operator-testing' ++ mktemp + local LAST_OUT=/tmp/tmp.7T9CUVMTDJ ++ mktemp + local LAST_ERR=/tmp/tmp.HC9NhzTnom + local exit_status=0 + local timeout=4 ++ seq 0 2 + for i in $(seq 0 2) + set +e + kubectl run -i --rm aws-cli --image=perconalab/awscli --restart=Never -- bash -c 'AWS_ACCESS_KEY_ID=some-access-key AWS_SECRET_ACCESS_KEY=some-secret-key AWS_DEFAULT_REGION=us-east-1 /usr/bin/aws --endpoint-url http://minio-service:9000 s3 mb s3://operator-testing' + exit_status=0 + set -e + '[' 0 '!=' 0 -a -n 1 ']' + break + cat /tmp/tmp.7T9CUVMTDJ make_bucket: operator-testing pod "aws-cli" deleted from users-26599 namespace + cat /tmp/tmp.HC9NhzTnom All commands and output from this session will be recorded in container logs, including credentials and sensitive information passed through the command prompt. If you don't see a command prompt, try pressing enter. + rm /tmp/tmp.7T9CUVMTDJ /tmp/tmp.HC9NhzTnom + return 0 + desc 'create secrets and start client' + set +o xtrace ----------------------------------------------------------------------------------- create secrets and start client ----------------------------------------------------------------------------------- + kubectl_bin apply -f /mnt/jenkins/workspace/cloud-psmdb-operator_PR-1993/e2e-tests/conf/client.yml -f /mnt/jenkins/workspace/cloud-psmdb-operator_PR-1993/e2e-tests/users/conf/secrets.yml -f /mnt/jenkins/workspace/cloud-psmdb-operator_PR-1993/e2e-tests/conf/minio-secret.yml ++ mktemp + local LAST_OUT=/tmp/tmp.kWo6rUmDCa ++ mktemp + local LAST_ERR=/tmp/tmp.HNGtgrRCC0 + local exit_status=0 + local timeout=4 ++ seq 0 2 + for i in $(seq 0 2) + set +e + kubectl apply -f /mnt/jenkins/workspace/cloud-psmdb-operator_PR-1993/e2e-tests/conf/client.yml -f /mnt/jenkins/workspace/cloud-psmdb-operator_PR-1993/e2e-tests/users/conf/secrets.yml -f /mnt/jenkins/workspace/cloud-psmdb-operator_PR-1993/e2e-tests/conf/minio-secret.yml + exit_status=0 + set -e + '[' 0 '!=' 0 -a -n 1 ']' + break + cat /tmp/tmp.kWo6rUmDCa deployment.apps/psmdb-client created secret/some-users created secret/minio-secret created + cat /tmp/tmp.HNGtgrRCC0 + rm /tmp/tmp.kWo6rUmDCa /tmp/tmp.HNGtgrRCC0 + return 0 + cluster=some-name-rs0 + desc 'create first PSMDB cluster some-name-rs0' + set +o xtrace ----------------------------------------------------------------------------------- create first PSMDB cluster some-name-rs0 ----------------------------------------------------------------------------------- + apply_cluster /mnt/jenkins/workspace/cloud-psmdb-operator_PR-1993/e2e-tests/users/conf/some-name-rs0.yml + '[' -z '' ']' + cat_config /mnt/jenkins/workspace/cloud-psmdb-operator_PR-1993/e2e-tests/users/conf/some-name-rs0.yml + kubectl_bin apply -f - + cat /mnt/jenkins/workspace/cloud-psmdb-operator_PR-1993/e2e-tests/users/conf/some-name-rs0.yml + yq eval '(.spec | select(.image == null)).image = "perconalab/percona-server-mongodb-operator:main-mongod7.0"' ++ mktemp + yq eval '(.spec | select(has("pmm"))).pmm.image = "percona/pmm-client:2.44.1-1"' + yq eval '(.spec | select(has("initImage"))).initImage = "perconalab/percona-server-mongodb-operator:PR-1993-afe14c70"' + yq eval '(.spec | select(has("backup"))).backup.image = "perconalab/percona-server-mongodb-operator:main-backup"' + yq eval '.spec.upgradeOptions.apply="Never"' + local LAST_OUT=/tmp/tmp.TGrOxWmNOI ++ mktemp + local LAST_ERR=/tmp/tmp.0eoRrWcL9Q + local exit_status=0 + local timeout=4 ++ seq 0 2 + for i in $(seq 0 2) + set +e + kubectl apply -f - + exit_status=0 + set -e + '[' 0 '!=' 0 -a -n 1 ']' + break + cat /tmp/tmp.TGrOxWmNOI perconaservermongodb.psmdb.percona.com/some-name created + cat /tmp/tmp.0eoRrWcL9Q + rm /tmp/tmp.TGrOxWmNOI /tmp/tmp.0eoRrWcL9Q + return 0 + desc 'Check if all 3 Pods started' + set +o xtrace ----------------------------------------------------------------------------------- Check if all 3 Pods started ----------------------------------------------------------------------------------- + wait_for_running some-name-rs0 3 + local name=some-name-rs0 + let last_pod=2 + local check_cluster_readyness=true + set_debug + [[ 1 == 1 ]] + set -o xtrace + local rs_name=rs0 + local cluster_name=some-name ++ seq 0 2 + for i in $(seq 0 $last_pod) + [[ 0 -eq 2 ]] + wait_pod some-name-rs0-0 + local pod=some-name-rs0-0 + set +o xtrace waiting for pod/some-name-rs0-0 to be ready........OK + for i in $(seq 0 $last_pod) + [[ 1 -eq 2 ]] + wait_pod some-name-rs0-1 + local pod=some-name-rs0-1 + set +o xtrace waiting for pod/some-name-rs0-1 to be ready.....OK + for i in $(seq 0 $last_pod) + [[ 2 -eq 2 ]] ++ kubectl_bin get psmdb some-name -o 'jsonpath={.spec.replsets[?(@.name=="rs0")].arbiter.enabled}' +++ mktemp ++ local LAST_OUT=/tmp/tmp.XOTAhWpME4 +++ mktemp ++ local LAST_ERR=/tmp/tmp.a6zIs3Bdyy ++ local exit_status=0 ++ local timeout=4 +++ seq 0 2 ++ for i in $(seq 0 2) ++ set +e ++ kubectl get psmdb some-name -o 'jsonpath={.spec.replsets[?(@.name=="rs0")].arbiter.enabled}' ++ exit_status=0 ++ set -e ++ '[' 0 '!=' 0 -a -n 1 ']' ++ break ++ cat /tmp/tmp.XOTAhWpME4 ++ cat /tmp/tmp.a6zIs3Bdyy ++ rm /tmp/tmp.XOTAhWpME4 /tmp/tmp.a6zIs3Bdyy ++ return 0 + [[ '' == \t\r\u\e ]] + wait_pod some-name-rs0-2 + local pod=some-name-rs0-2 + set +o xtrace waiting for pod/some-name-rs0-2 to be ready.....OK ++ kubectl_bin get psmdb some-name -o 'jsonpath={.spec.replsets[?(@.name=="rs0")].nonvoting.enabled}' +++ mktemp ++ local LAST_OUT=/tmp/tmp.g2kJO1uAkp +++ mktemp ++ local LAST_ERR=/tmp/tmp.Wtd0zhjAw3 ++ local exit_status=0 ++ local timeout=4 +++ seq 0 2 ++ for i in $(seq 0 2) ++ set +e ++ kubectl get psmdb some-name -o 'jsonpath={.spec.replsets[?(@.name=="rs0")].nonvoting.enabled}' ++ exit_status=0 ++ set -e ++ '[' 0 '!=' 0 -a -n 1 ']' ++ break ++ cat /tmp/tmp.g2kJO1uAkp ++ cat /tmp/tmp.Wtd0zhjAw3 ++ rm /tmp/tmp.g2kJO1uAkp /tmp/tmp.Wtd0zhjAw3 ++ return 0 + [[ '' == \t\r\u\e ]] ++ kubectl_bin get psmdb some-name -o 'jsonpath={.spec.replsets[?(@.name=="rs0")].hidden.enabled}' +++ mktemp ++ local LAST_OUT=/tmp/tmp.3I2TQkmA5a +++ mktemp ++ local LAST_ERR=/tmp/tmp.cWsGTPyOxk ++ local exit_status=0 ++ local timeout=4 +++ seq 0 2 ++ for i in $(seq 0 2) ++ set +e ++ kubectl get psmdb some-name -o 'jsonpath={.spec.replsets[?(@.name=="rs0")].hidden.enabled}' ++ exit_status=0 ++ set -e ++ '[' 0 '!=' 0 -a -n 1 ']' ++ break ++ cat /tmp/tmp.3I2TQkmA5a ++ cat /tmp/tmp.cWsGTPyOxk ++ rm /tmp/tmp.3I2TQkmA5a /tmp/tmp.cWsGTPyOxk ++ return 0 + [[ '' == \t\r\u\e ]] + sleep 10 + [[ true == \t\r\u\e ]] + set +x Waiting for cluster readyness.......... + backup_name=backup-minio + desc 'change MONGODB_DATABASE_ADMIN_PASSWORD' + set +o xtrace ----------------------------------------------------------------------------------- change MONGODB_DATABASE_ADMIN_PASSWORD ----------------------------------------------------------------------------------- + patch_secret some-users MONGODB_DATABASE_ADMIN_PASSWORD dGVzdC1wYXNzd29yZA== + local secret=some-users + local key=MONGODB_DATABASE_ADMIN_PASSWORD + local value=dGVzdC1wYXNzd29yZA== + kubectl patch secret some-users '-p={"data":{"MONGODB_DATABASE_ADMIN_PASSWORD": "dGVzdC1wYXNzd29yZA=="}}' secret/some-users patched + sleep 25 + psmdb=some-name + wait_cluster_consistency some-name + local cluster_name=some-name + local wait_time=32 + retry=0 + sleep 7 + echo -n 'waiting for cluster readyness' waiting for cluster readyness++ kubectl_bin get psmdb some-name -o 'jsonpath={.status.state}' +++ mktemp ++ local LAST_OUT=/tmp/tmp.Qdvu5r7BDz +++ mktemp ++ local LAST_ERR=/tmp/tmp.4kby7A6nLM ++ local exit_status=0 ++ local timeout=4 +++ seq 0 2 ++ for i in $(seq 0 2) ++ set +e ++ kubectl get psmdb some-name -o 'jsonpath={.status.state}' ++ exit_status=0 ++ set -e ++ '[' 0 '!=' 0 -a -n 1 ']' ++ break ++ cat /tmp/tmp.Qdvu5r7BDz ++ cat /tmp/tmp.4kby7A6nLM ++ rm /tmp/tmp.Qdvu5r7BDz /tmp/tmp.4kby7A6nLM ++ return 0 + [[ ready == \r\e\a\d\y ]] + echo .OK .OK + sleep 15 ++ getUserData some-users MONGODB_DATABASE_ADMIN_USER ++ local secretName=some-users ++ local dataKey=MONGODB_DATABASE_ADMIN_USER +++ getSecretData some-users MONGODB_DATABASE_ADMIN_USER +++ local secretName=some-users +++ local dataKey=MONGODB_DATABASE_ADMIN_USER ++++ kubectl get secrets/some-users '--template={{.data.MONGODB_DATABASE_ADMIN_USER}}' ++++ base64 -d +++ local data=databaseAdmin +++ echo databaseAdmin ++ urlencode databaseAdmin ++ uri=databaseAdmin ++ echo -n databaseAdmin ++ jq -s -R -r @uri + user=databaseAdmin + check_mongo_auth databaseAdmin:test-password@some-name-rs0-0.some-name-rs0.users-26599 + local uri=databaseAdmin:test-password@some-name-rs0-0.some-name-rs0.users-26599 ++ run_mongo 'db.runCommand({ ping: 1 }).ok' databaseAdmin:test-password@some-name-rs0-0.some-name-rs0.users-26599 mongodb '' --quiet ++ local 'command=db.runCommand({ ping: 1 }).ok' ++ egrep -v 'I NETWORK|W NETWORK|Error saving history file|Percona Server for MongoDB|connecting to:|Unable to reach primary for set|Implicit session:|versions do not match|Error saving history file:' ++ local uri=databaseAdmin:test-password@some-name-rs0-0.some-name-rs0.users-26599 ++ local driver=mongodb ++ local suffix=.svc.cluster.local +++ kubectl_bin get pods --selector=name=psmdb-client -o 'jsonpath={.items[].metadata.name}' ++++ mktemp egrep: warning: egrep is obsolescent; using grep -E +++ local LAST_OUT=/tmp/tmp.x6b6UA2gUi ++++ mktemp +++ local LAST_ERR=/tmp/tmp.R44LwHYBVJ +++ local exit_status=0 +++ local timeout=4 ++++ seq 0 2 +++ for i in $(seq 0 2) +++ set +e +++ kubectl get pods --selector=name=psmdb-client -o 'jsonpath={.items[].metadata.name}' +++ exit_status=0 +++ set -e +++ '[' 0 '!=' 0 -a -n 1 ']' +++ break +++ cat /tmp/tmp.x6b6UA2gUi +++ cat /tmp/tmp.R44LwHYBVJ +++ rm /tmp/tmp.x6b6UA2gUi /tmp/tmp.R44LwHYBVJ +++ return 0 ++ local client_container=psmdb-client-66f577db5f-rzdh7 ++ local mongo_flag=--quiet ++ [[ databaseAdmin:test-password@some-name-rs0-0.some-name-rs0.users-26599 == *cfg* ]] ++ replica_set=rs0 ++ kubectl_bin exec psmdb-client-66f577db5f-rzdh7 -- bash -c 'printf '\''db.runCommand({ ping: 1 }).ok\n'\'' | mongo mongodb://databaseAdmin:test-password@some-name-rs0-0.some-name-rs0.users-26599.svc.cluster.local/admin?ssl=false\&replicaSet=rs0 --quiet' +++ mktemp ++ local LAST_OUT=/tmp/tmp.6X4gI5DITa +++ mktemp ++ local LAST_ERR=/tmp/tmp.Em4uU8rJVG ++ local exit_status=0 ++ local timeout=4 +++ seq 0 2 ++ for i in $(seq 0 2) ++ set +e ++ kubectl exec psmdb-client-66f577db5f-rzdh7 -- bash -c 'printf '\''db.runCommand({ ping: 1 }).ok\n'\'' | mongo mongodb://databaseAdmin:test-password@some-name-rs0-0.some-name-rs0.users-26599.svc.cluster.local/admin?ssl=false\&replicaSet=rs0 --quiet' ++ exit_status=0 ++ set -e ++ '[' 0 '!=' 0 -a -n 1 ']' ++ break ++ cat /tmp/tmp.6X4gI5DITa ++ cat /tmp/tmp.Em4uU8rJVG ++ rm /tmp/tmp.6X4gI5DITa /tmp/tmp.Em4uU8rJVG ++ return 0 + ping=1 + desc 'ping return' + set +o xtrace ----------------------------------------------------------------------------------- ping return ----------------------------------------------------------------------------------- + '[' 1 '!=' 1 ']' + check_mongo_auth databaseAdmin:test-password@some-name-rs0-1.some-name-rs0.users-26599 + local uri=databaseAdmin:test-password@some-name-rs0-1.some-name-rs0.users-26599 ++ run_mongo 'db.runCommand({ ping: 1 }).ok' databaseAdmin:test-password@some-name-rs0-1.some-name-rs0.users-26599 mongodb '' --quiet ++ local 'command=db.runCommand({ ping: 1 }).ok' ++ local uri=databaseAdmin:test-password@some-name-rs0-1.some-name-rs0.users-26599 ++ local driver=mongodb ++ local suffix=.svc.cluster.local ++ egrep -v 'I NETWORK|W NETWORK|Error saving history file|Percona Server for MongoDB|connecting to:|Unable to reach primary for set|Implicit session:|versions do not match|Error saving history file:' +++ kubectl_bin get pods --selector=name=psmdb-client -o 'jsonpath={.items[].metadata.name}' ++++ mktemp +++ local LAST_OUT=/tmp/tmp.HEOnLCkgmT ++++ mktemp egrep: warning: egrep is obsolescent; using grep -E +++ local LAST_ERR=/tmp/tmp.uE5RCJnINL +++ local exit_status=0 +++ local timeout=4 ++++ seq 0 2 +++ for i in $(seq 0 2) +++ set +e +++ kubectl get pods --selector=name=psmdb-client -o 'jsonpath={.items[].metadata.name}' +++ exit_status=0 +++ set -e +++ '[' 0 '!=' 0 -a -n 1 ']' +++ break +++ cat /tmp/tmp.HEOnLCkgmT +++ cat /tmp/tmp.uE5RCJnINL +++ rm /tmp/tmp.HEOnLCkgmT /tmp/tmp.uE5RCJnINL +++ return 0 ++ local client_container=psmdb-client-66f577db5f-rzdh7 ++ local mongo_flag=--quiet ++ [[ databaseAdmin:test-password@some-name-rs0-1.some-name-rs0.users-26599 == *cfg* ]] ++ replica_set=rs0 ++ kubectl_bin exec psmdb-client-66f577db5f-rzdh7 -- bash -c 'printf '\''db.runCommand({ ping: 1 }).ok\n'\'' | mongo mongodb://databaseAdmin:test-password@some-name-rs0-1.some-name-rs0.users-26599.svc.cluster.local/admin?ssl=false\&replicaSet=rs0 --quiet' +++ mktemp ++ local LAST_OUT=/tmp/tmp.htFEXUeQ1l +++ mktemp ++ local LAST_ERR=/tmp/tmp.lHXzQn5sBL ++ local exit_status=0 ++ local timeout=4 +++ seq 0 2 ++ for i in $(seq 0 2) ++ set +e ++ kubectl exec psmdb-client-66f577db5f-rzdh7 -- bash -c 'printf '\''db.runCommand({ ping: 1 }).ok\n'\'' | mongo mongodb://databaseAdmin:test-password@some-name-rs0-1.some-name-rs0.users-26599.svc.cluster.local/admin?ssl=false\&replicaSet=rs0 --quiet' ++ exit_status=0 ++ set -e ++ '[' 0 '!=' 0 -a -n 1 ']' ++ break ++ cat /tmp/tmp.htFEXUeQ1l ++ cat /tmp/tmp.lHXzQn5sBL ++ rm /tmp/tmp.htFEXUeQ1l /tmp/tmp.lHXzQn5sBL ++ return 0 + ping=1 + desc 'ping return' + set +o xtrace ----------------------------------------------------------------------------------- ping return ----------------------------------------------------------------------------------- + '[' 1 '!=' 1 ']' + check_mongo_auth databaseAdmin:test-password@some-name-rs0-2.some-name-rs0.users-26599 + local uri=databaseAdmin:test-password@some-name-rs0-2.some-name-rs0.users-26599 ++ run_mongo 'db.runCommand({ ping: 1 }).ok' databaseAdmin:test-password@some-name-rs0-2.some-name-rs0.users-26599 mongodb '' --quiet ++ local 'command=db.runCommand({ ping: 1 }).ok' ++ local uri=databaseAdmin:test-password@some-name-rs0-2.some-name-rs0.users-26599 ++ local driver=mongodb ++ local suffix=.svc.cluster.local ++ egrep -v 'I NETWORK|W NETWORK|Error saving history file|Percona Server for MongoDB|connecting to:|Unable to reach primary for set|Implicit session:|versions do not match|Error saving history file:' +++ kubectl_bin get pods --selector=name=psmdb-client -o 'jsonpath={.items[].metadata.name}' ++++ mktemp egrep: warning: egrep is obsolescent; using grep -E +++ local LAST_OUT=/tmp/tmp.pynlkXwZcF ++++ mktemp +++ local LAST_ERR=/tmp/tmp.d3IgQHm1pe +++ local exit_status=0 +++ local timeout=4 ++++ seq 0 2 +++ for i in $(seq 0 2) +++ set +e +++ kubectl get pods --selector=name=psmdb-client -o 'jsonpath={.items[].metadata.name}' +++ exit_status=0 +++ set -e +++ '[' 0 '!=' 0 -a -n 1 ']' +++ break +++ cat /tmp/tmp.pynlkXwZcF +++ cat /tmp/tmp.d3IgQHm1pe +++ rm /tmp/tmp.pynlkXwZcF /tmp/tmp.d3IgQHm1pe +++ return 0 ++ local client_container=psmdb-client-66f577db5f-rzdh7 ++ local mongo_flag=--quiet ++ [[ databaseAdmin:test-password@some-name-rs0-2.some-name-rs0.users-26599 == *cfg* ]] ++ replica_set=rs0 ++ kubectl_bin exec psmdb-client-66f577db5f-rzdh7 -- bash -c 'printf '\''db.runCommand({ ping: 1 }).ok\n'\'' | mongo mongodb://databaseAdmin:test-password@some-name-rs0-2.some-name-rs0.users-26599.svc.cluster.local/admin?ssl=false\&replicaSet=rs0 --quiet' +++ mktemp ++ local LAST_OUT=/tmp/tmp.HUwEsaW9jP +++ mktemp ++ local LAST_ERR=/tmp/tmp.FVjGg4tuEU ++ local exit_status=0 ++ local timeout=4 +++ seq 0 2 ++ for i in $(seq 0 2) ++ set +e ++ kubectl exec psmdb-client-66f577db5f-rzdh7 -- bash -c 'printf '\''db.runCommand({ ping: 1 }).ok\n'\'' | mongo mongodb://databaseAdmin:test-password@some-name-rs0-2.some-name-rs0.users-26599.svc.cluster.local/admin?ssl=false\&replicaSet=rs0 --quiet' ++ exit_status=0 ++ set -e ++ '[' 0 '!=' 0 -a -n 1 ']' ++ break ++ cat /tmp/tmp.HUwEsaW9jP ++ cat /tmp/tmp.FVjGg4tuEU ++ rm /tmp/tmp.HUwEsaW9jP /tmp/tmp.FVjGg4tuEU ++ return 0 + ping=1 + desc 'ping return' + set +o xtrace ----------------------------------------------------------------------------------- ping return ----------------------------------------------------------------------------------- + '[' 1 '!=' 1 ']' + desc 'change MONGODB_BACKUP_PASSWORD' + set +o xtrace ----------------------------------------------------------------------------------- change MONGODB_BACKUP_PASSWORD ----------------------------------------------------------------------------------- + patch_secret some-users MONGODB_BACKUP_PASSWORD dGVzdC1wYXNzd29yZA== + local secret=some-users + local key=MONGODB_BACKUP_PASSWORD + local value=dGVzdC1wYXNzd29yZA== + kubectl patch secret some-users '-p={"data":{"MONGODB_BACKUP_PASSWORD": "dGVzdC1wYXNzd29yZA=="}}' secret/some-users patched + sleep 25 + wait_cluster_consistency some-name + local cluster_name=some-name + local wait_time=32 + retry=0 + sleep 7 + echo -n 'waiting for cluster readyness' waiting for cluster readyness++ kubectl_bin get psmdb some-name -o 'jsonpath={.status.state}' +++ mktemp ++ local LAST_OUT=/tmp/tmp.bcGGl4vCy6 +++ mktemp ++ local LAST_ERR=/tmp/tmp.j9YZ0UZpUY ++ local exit_status=0 ++ local timeout=4 +++ seq 0 2 ++ for i in $(seq 0 2) ++ set +e ++ kubectl get psmdb some-name -o 'jsonpath={.status.state}' ++ exit_status=0 ++ set -e ++ '[' 0 '!=' 0 -a -n 1 ']' ++ break ++ cat /tmp/tmp.bcGGl4vCy6 ++ cat /tmp/tmp.j9YZ0UZpUY ++ rm /tmp/tmp.bcGGl4vCy6 /tmp/tmp.j9YZ0UZpUY ++ return 0 + [[ ready == \r\e\a\d\y ]] + echo .OK .OK + sleep 15 ++ getUserData some-users MONGODB_BACKUP_USER ++ local secretName=some-users ++ local dataKey=MONGODB_BACKUP_USER +++ getSecretData some-users MONGODB_BACKUP_USER +++ local secretName=some-users +++ local dataKey=MONGODB_BACKUP_USER ++++ kubectl get secrets/some-users '--template={{.data.MONGODB_BACKUP_USER}}' ++++ base64 -d +++ local 'data=backup$#%' +++ echo 'backup$#%' ++ urlencode 'backup$#%' ++ uri='backup$#%' ++ echo -n 'backup$#%' ++ jq -s -R -r @uri + user=backup%24%23%25 + check_mongo_auth backup%24%23%25:test-password@some-name-rs0-0.some-name-rs0.users-26599 + local uri=backup%24%23%25:test-password@some-name-rs0-0.some-name-rs0.users-26599 ++ run_mongo 'db.runCommand({ ping: 1 }).ok' backup%24%23%25:test-password@some-name-rs0-0.some-name-rs0.users-26599 mongodb '' --quiet ++ local 'command=db.runCommand({ ping: 1 }).ok' ++ egrep -v 'I NETWORK|W NETWORK|Error saving history file|Percona Server for MongoDB|connecting to:|Unable to reach primary for set|Implicit session:|versions do not match|Error saving history file:' ++ local uri=backup%24%23%25:test-password@some-name-rs0-0.some-name-rs0.users-26599 ++ local driver=mongodb ++ local suffix=.svc.cluster.local +++ kubectl_bin get pods --selector=name=psmdb-client -o 'jsonpath={.items[].metadata.name}' ++++ mktemp egrep: warning: egrep is obsolescent; using grep -E +++ local LAST_OUT=/tmp/tmp.Qa2csNzHII ++++ mktemp +++ local LAST_ERR=/tmp/tmp.7kIIP3aqvq +++ local exit_status=0 +++ local timeout=4 ++++ seq 0 2 +++ for i in $(seq 0 2) +++ set +e +++ kubectl get pods --selector=name=psmdb-client -o 'jsonpath={.items[].metadata.name}' +++ exit_status=0 +++ set -e +++ '[' 0 '!=' 0 -a -n 1 ']' +++ break +++ cat /tmp/tmp.Qa2csNzHII +++ cat /tmp/tmp.7kIIP3aqvq +++ rm /tmp/tmp.Qa2csNzHII /tmp/tmp.7kIIP3aqvq +++ return 0 ++ local client_container=psmdb-client-66f577db5f-rzdh7 ++ local mongo_flag=--quiet ++ [[ backup%24%23%25:test-password@some-name-rs0-0.some-name-rs0.users-26599 == *cfg* ]] ++ replica_set=rs0 ++ kubectl_bin exec psmdb-client-66f577db5f-rzdh7 -- bash -c 'printf '\''db.runCommand({ ping: 1 }).ok\n'\'' | mongo mongodb://backup%24%23%25:test-password@some-name-rs0-0.some-name-rs0.users-26599.svc.cluster.local/admin?ssl=false\&replicaSet=rs0 --quiet' +++ mktemp ++ local LAST_OUT=/tmp/tmp.Lm6k8aFQWP +++ mktemp ++ local LAST_ERR=/tmp/tmp.DxPN8JZ7Xu ++ local exit_status=0 ++ local timeout=4 +++ seq 0 2 ++ for i in $(seq 0 2) ++ set +e ++ kubectl exec psmdb-client-66f577db5f-rzdh7 -- bash -c 'printf '\''db.runCommand({ ping: 1 }).ok\n'\'' | mongo mongodb://backup%24%23%25:test-password@some-name-rs0-0.some-name-rs0.users-26599.svc.cluster.local/admin?ssl=false\&replicaSet=rs0 --quiet' ++ exit_status=0 ++ set -e ++ '[' 0 '!=' 0 -a -n 1 ']' ++ break ++ cat /tmp/tmp.Lm6k8aFQWP ++ cat /tmp/tmp.DxPN8JZ7Xu ++ rm /tmp/tmp.Lm6k8aFQWP /tmp/tmp.DxPN8JZ7Xu ++ return 0 + ping=1 + desc 'ping return' + set +o xtrace ----------------------------------------------------------------------------------- ping return ----------------------------------------------------------------------------------- + '[' 1 '!=' 1 ']' + check_mongo_auth backup%24%23%25:test-password@some-name-rs0-1.some-name-rs0.users-26599 + local uri=backup%24%23%25:test-password@some-name-rs0-1.some-name-rs0.users-26599 ++ run_mongo 'db.runCommand({ ping: 1 }).ok' backup%24%23%25:test-password@some-name-rs0-1.some-name-rs0.users-26599 mongodb '' --quiet ++ local 'command=db.runCommand({ ping: 1 }).ok' ++ local uri=backup%24%23%25:test-password@some-name-rs0-1.some-name-rs0.users-26599 ++ local driver=mongodb ++ local suffix=.svc.cluster.local ++ egrep -v 'I NETWORK|W NETWORK|Error saving history file|Percona Server for MongoDB|connecting to:|Unable to reach primary for set|Implicit session:|versions do not match|Error saving history file:' +++ kubectl_bin get pods --selector=name=psmdb-client -o 'jsonpath={.items[].metadata.name}' ++++ mktemp egrep: warning: egrep is obsolescent; using grep -E +++ local LAST_OUT=/tmp/tmp.32LxdtPhxg ++++ mktemp +++ local LAST_ERR=/tmp/tmp.1m81263V29 +++ local exit_status=0 +++ local timeout=4 ++++ seq 0 2 +++ for i in $(seq 0 2) +++ set +e +++ kubectl get pods --selector=name=psmdb-client -o 'jsonpath={.items[].metadata.name}' +++ exit_status=0 +++ set -e +++ '[' 0 '!=' 0 -a -n 1 ']' +++ break +++ cat /tmp/tmp.32LxdtPhxg +++ cat /tmp/tmp.1m81263V29 +++ rm /tmp/tmp.32LxdtPhxg /tmp/tmp.1m81263V29 +++ return 0 ++ local client_container=psmdb-client-66f577db5f-rzdh7 ++ local mongo_flag=--quiet ++ [[ backup%24%23%25:test-password@some-name-rs0-1.some-name-rs0.users-26599 == *cfg* ]] ++ replica_set=rs0 ++ kubectl_bin exec psmdb-client-66f577db5f-rzdh7 -- bash -c 'printf '\''db.runCommand({ ping: 1 }).ok\n'\'' | mongo mongodb://backup%24%23%25:test-password@some-name-rs0-1.some-name-rs0.users-26599.svc.cluster.local/admin?ssl=false\&replicaSet=rs0 --quiet' +++ mktemp ++ local LAST_OUT=/tmp/tmp.hZiUaA65p9 +++ mktemp ++ local LAST_ERR=/tmp/tmp.F9AFjRYDs9 ++ local exit_status=0 ++ local timeout=4 +++ seq 0 2 ++ for i in $(seq 0 2) ++ set +e ++ kubectl exec psmdb-client-66f577db5f-rzdh7 -- bash -c 'printf '\''db.runCommand({ ping: 1 }).ok\n'\'' | mongo mongodb://backup%24%23%25:test-password@some-name-rs0-1.some-name-rs0.users-26599.svc.cluster.local/admin?ssl=false\&replicaSet=rs0 --quiet' ++ exit_status=0 ++ set -e ++ '[' 0 '!=' 0 -a -n 1 ']' ++ break ++ cat /tmp/tmp.hZiUaA65p9 ++ cat /tmp/tmp.F9AFjRYDs9 ++ rm /tmp/tmp.hZiUaA65p9 /tmp/tmp.F9AFjRYDs9 ++ return 0 + ping=1 + desc 'ping return' + set +o xtrace ----------------------------------------------------------------------------------- ping return ----------------------------------------------------------------------------------- + '[' 1 '!=' 1 ']' + check_mongo_auth backup%24%23%25:test-password@some-name-rs0-2.some-name-rs0.users-26599 + local uri=backup%24%23%25:test-password@some-name-rs0-2.some-name-rs0.users-26599 ++ run_mongo 'db.runCommand({ ping: 1 }).ok' backup%24%23%25:test-password@some-name-rs0-2.some-name-rs0.users-26599 mongodb '' --quiet ++ local 'command=db.runCommand({ ping: 1 }).ok' ++ local uri=backup%24%23%25:test-password@some-name-rs0-2.some-name-rs0.users-26599 ++ local driver=mongodb ++ local suffix=.svc.cluster.local ++ egrep -v 'I NETWORK|W NETWORK|Error saving history file|Percona Server for MongoDB|connecting to:|Unable to reach primary for set|Implicit session:|versions do not match|Error saving history file:' +++ kubectl_bin get pods --selector=name=psmdb-client -o 'jsonpath={.items[].metadata.name}' ++++ mktemp egrep: warning: egrep is obsolescent; using grep -E +++ local LAST_OUT=/tmp/tmp.5wfC8SGtxt ++++ mktemp +++ local LAST_ERR=/tmp/tmp.X1v2En85a5 +++ local exit_status=0 +++ local timeout=4 ++++ seq 0 2 +++ for i in $(seq 0 2) +++ set +e +++ kubectl get pods --selector=name=psmdb-client -o 'jsonpath={.items[].metadata.name}' +++ exit_status=0 +++ set -e +++ '[' 0 '!=' 0 -a -n 1 ']' +++ break +++ cat /tmp/tmp.5wfC8SGtxt +++ cat /tmp/tmp.X1v2En85a5 +++ rm /tmp/tmp.5wfC8SGtxt /tmp/tmp.X1v2En85a5 +++ return 0 ++ local client_container=psmdb-client-66f577db5f-rzdh7 ++ local mongo_flag=--quiet ++ [[ backup%24%23%25:test-password@some-name-rs0-2.some-name-rs0.users-26599 == *cfg* ]] ++ replica_set=rs0 ++ kubectl_bin exec psmdb-client-66f577db5f-rzdh7 -- bash -c 'printf '\''db.runCommand({ ping: 1 }).ok\n'\'' | mongo mongodb://backup%24%23%25:test-password@some-name-rs0-2.some-name-rs0.users-26599.svc.cluster.local/admin?ssl=false\&replicaSet=rs0 --quiet' +++ mktemp ++ local LAST_OUT=/tmp/tmp.ZhtUAG9pai +++ mktemp ++ local LAST_ERR=/tmp/tmp.ajQUuMBXpr ++ local exit_status=0 ++ local timeout=4 +++ seq 0 2 ++ for i in $(seq 0 2) ++ set +e ++ kubectl exec psmdb-client-66f577db5f-rzdh7 -- bash -c 'printf '\''db.runCommand({ ping: 1 }).ok\n'\'' | mongo mongodb://backup%24%23%25:test-password@some-name-rs0-2.some-name-rs0.users-26599.svc.cluster.local/admin?ssl=false\&replicaSet=rs0 --quiet' ++ exit_status=0 ++ set -e ++ '[' 0 '!=' 0 -a -n 1 ']' ++ break ++ cat /tmp/tmp.ZhtUAG9pai ++ cat /tmp/tmp.ajQUuMBXpr ++ rm /tmp/tmp.ZhtUAG9pai /tmp/tmp.ajQUuMBXpr ++ return 0 + ping=1 + desc 'ping return' + set +o xtrace ----------------------------------------------------------------------------------- ping return ----------------------------------------------------------------------------------- + '[' 1 '!=' 1 ']' + desc 'change MONGODB_BACKUP_USER' + set +o xtrace ----------------------------------------------------------------------------------- change MONGODB_BACKUP_USER ----------------------------------------------------------------------------------- ++ echo -n backup2 ++ base64 + newnameencrypted=YmFja3VwMg== + patch_secret some-users MONGODB_BACKUP_USER YmFja3VwMg== + local secret=some-users + local key=MONGODB_BACKUP_USER + local value=YmFja3VwMg== + kubectl patch secret some-users '-p={"data":{"MONGODB_BACKUP_USER": "YmFja3VwMg=="}}' secret/some-users patched + sleep 25 + wait_cluster_consistency some-name + local cluster_name=some-name + local wait_time=32 + retry=0 + sleep 7 + echo -n 'waiting for cluster readyness' waiting for cluster readyness++ kubectl_bin get psmdb some-name -o 'jsonpath={.status.state}' +++ mktemp ++ local LAST_OUT=/tmp/tmp.4Fh9tLanf7 +++ mktemp ++ local LAST_ERR=/tmp/tmp.viiDPGfmz7 ++ local exit_status=0 ++ local timeout=4 +++ seq 0 2 ++ for i in $(seq 0 2) ++ set +e ++ kubectl get psmdb some-name -o 'jsonpath={.status.state}' ++ exit_status=0 ++ set -e ++ '[' 0 '!=' 0 -a -n 1 ']' ++ break ++ cat /tmp/tmp.4Fh9tLanf7 ++ cat /tmp/tmp.viiDPGfmz7 ++ rm /tmp/tmp.4Fh9tLanf7 /tmp/tmp.viiDPGfmz7 ++ return 0 + [[ ready == \r\e\a\d\y ]] + echo .OK .OK + sleep 15 ++ getUserData some-users MONGODB_BACKUP_USER ++ local secretName=some-users ++ local dataKey=MONGODB_BACKUP_USER +++ getSecretData some-users MONGODB_BACKUP_USER +++ local secretName=some-users +++ local dataKey=MONGODB_BACKUP_USER ++++ kubectl get secrets/some-users '--template={{.data.MONGODB_BACKUP_USER}}' ++++ base64 -d +++ local data=backup2 +++ echo backup2 ++ urlencode backup2 ++ uri=backup2 ++ echo -n backup2 ++ jq -s -R -r @uri + user=backup2 + check_mongo_auth backup2:test-password@some-name-rs0-0.some-name-rs0.users-26599 + local uri=backup2:test-password@some-name-rs0-0.some-name-rs0.users-26599 ++ run_mongo 'db.runCommand({ ping: 1 }).ok' backup2:test-password@some-name-rs0-0.some-name-rs0.users-26599 mongodb '' --quiet ++ egrep -v 'I NETWORK|W NETWORK|Error saving history file|Percona Server for MongoDB|connecting to:|Unable to reach primary for set|Implicit session:|versions do not match|Error saving history file:' ++ local 'command=db.runCommand({ ping: 1 }).ok' ++ local uri=backup2:test-password@some-name-rs0-0.some-name-rs0.users-26599 ++ local driver=mongodb ++ local suffix=.svc.cluster.local +++ kubectl_bin get pods --selector=name=psmdb-client -o 'jsonpath={.items[].metadata.name}' ++++ mktemp egrep: warning: egrep is obsolescent; using grep -E +++ local LAST_OUT=/tmp/tmp.vspBMB6bfY ++++ mktemp +++ local LAST_ERR=/tmp/tmp.F9Hn8N8N8R +++ local exit_status=0 +++ local timeout=4 ++++ seq 0 2 +++ for i in $(seq 0 2) +++ set +e +++ kubectl get pods --selector=name=psmdb-client -o 'jsonpath={.items[].metadata.name}' +++ exit_status=0 +++ set -e +++ '[' 0 '!=' 0 -a -n 1 ']' +++ break +++ cat /tmp/tmp.vspBMB6bfY +++ cat /tmp/tmp.F9Hn8N8N8R +++ rm /tmp/tmp.vspBMB6bfY /tmp/tmp.F9Hn8N8N8R +++ return 0 ++ local client_container=psmdb-client-66f577db5f-rzdh7 ++ local mongo_flag=--quiet ++ [[ backup2:test-password@some-name-rs0-0.some-name-rs0.users-26599 == *cfg* ]] ++ replica_set=rs0 ++ kubectl_bin exec psmdb-client-66f577db5f-rzdh7 -- bash -c 'printf '\''db.runCommand({ ping: 1 }).ok\n'\'' | mongo mongodb://backup2:test-password@some-name-rs0-0.some-name-rs0.users-26599.svc.cluster.local/admin?ssl=false\&replicaSet=rs0 --quiet' +++ mktemp ++ local LAST_OUT=/tmp/tmp.YZfyeUERJA +++ mktemp ++ local LAST_ERR=/tmp/tmp.neOjpaTgUl ++ local exit_status=0 ++ local timeout=4 +++ seq 0 2 ++ for i in $(seq 0 2) ++ set +e ++ kubectl exec psmdb-client-66f577db5f-rzdh7 -- bash -c 'printf '\''db.runCommand({ ping: 1 }).ok\n'\'' | mongo mongodb://backup2:test-password@some-name-rs0-0.some-name-rs0.users-26599.svc.cluster.local/admin?ssl=false\&replicaSet=rs0 --quiet' ++ exit_status=0 ++ set -e ++ '[' 0 '!=' 0 -a -n 1 ']' ++ break ++ cat /tmp/tmp.YZfyeUERJA ++ cat /tmp/tmp.neOjpaTgUl ++ rm /tmp/tmp.YZfyeUERJA /tmp/tmp.neOjpaTgUl ++ return 0 + ping=1 + desc 'ping return' + set +o xtrace ----------------------------------------------------------------------------------- ping return ----------------------------------------------------------------------------------- + '[' 1 '!=' 1 ']' + check_mongo_auth backup2:test-password@some-name-rs0-1.some-name-rs0.users-26599 + local uri=backup2:test-password@some-name-rs0-1.some-name-rs0.users-26599 ++ run_mongo 'db.runCommand({ ping: 1 }).ok' backup2:test-password@some-name-rs0-1.some-name-rs0.users-26599 mongodb '' --quiet ++ local 'command=db.runCommand({ ping: 1 }).ok' ++ local uri=backup2:test-password@some-name-rs0-1.some-name-rs0.users-26599 ++ local driver=mongodb ++ local suffix=.svc.cluster.local ++ egrep -v 'I NETWORK|W NETWORK|Error saving history file|Percona Server for MongoDB|connecting to:|Unable to reach primary for set|Implicit session:|versions do not match|Error saving history file:' +++ kubectl_bin get pods --selector=name=psmdb-client -o 'jsonpath={.items[].metadata.name}' ++++ mktemp +++ local LAST_OUT=/tmp/tmp.PQF0qNJeS6 egrep: warning: egrep is obsolescent; using grep -E ++++ mktemp +++ local LAST_ERR=/tmp/tmp.9bF8jfHNcr +++ local exit_status=0 +++ local timeout=4 ++++ seq 0 2 +++ for i in $(seq 0 2) +++ set +e +++ kubectl get pods --selector=name=psmdb-client -o 'jsonpath={.items[].metadata.name}' +++ exit_status=0 +++ set -e +++ '[' 0 '!=' 0 -a -n 1 ']' +++ break +++ cat /tmp/tmp.PQF0qNJeS6 +++ cat /tmp/tmp.9bF8jfHNcr +++ rm /tmp/tmp.PQF0qNJeS6 /tmp/tmp.9bF8jfHNcr +++ return 0 ++ local client_container=psmdb-client-66f577db5f-rzdh7 ++ local mongo_flag=--quiet ++ [[ backup2:test-password@some-name-rs0-1.some-name-rs0.users-26599 == *cfg* ]] ++ replica_set=rs0 ++ kubectl_bin exec psmdb-client-66f577db5f-rzdh7 -- bash -c 'printf '\''db.runCommand({ ping: 1 }).ok\n'\'' | mongo mongodb://backup2:test-password@some-name-rs0-1.some-name-rs0.users-26599.svc.cluster.local/admin?ssl=false\&replicaSet=rs0 --quiet' +++ mktemp ++ local LAST_OUT=/tmp/tmp.3TMCmP4692 +++ mktemp ++ local LAST_ERR=/tmp/tmp.ZWFDmss4tY ++ local exit_status=0 ++ local timeout=4 +++ seq 0 2 ++ for i in $(seq 0 2) ++ set +e ++ kubectl exec psmdb-client-66f577db5f-rzdh7 -- bash -c 'printf '\''db.runCommand({ ping: 1 }).ok\n'\'' | mongo mongodb://backup2:test-password@some-name-rs0-1.some-name-rs0.users-26599.svc.cluster.local/admin?ssl=false\&replicaSet=rs0 --quiet' ++ exit_status=0 ++ set -e ++ '[' 0 '!=' 0 -a -n 1 ']' ++ break ++ cat /tmp/tmp.3TMCmP4692 ++ cat /tmp/tmp.ZWFDmss4tY ++ rm /tmp/tmp.3TMCmP4692 /tmp/tmp.ZWFDmss4tY ++ return 0 + ping=1 + desc 'ping return' + set +o xtrace ----------------------------------------------------------------------------------- ping return ----------------------------------------------------------------------------------- + '[' 1 '!=' 1 ']' + check_mongo_auth backup2:test-password@some-name-rs0-2.some-name-rs0.users-26599 + local uri=backup2:test-password@some-name-rs0-2.some-name-rs0.users-26599 ++ run_mongo 'db.runCommand({ ping: 1 }).ok' backup2:test-password@some-name-rs0-2.some-name-rs0.users-26599 mongodb '' --quiet ++ egrep -v 'I NETWORK|W NETWORK|Error saving history file|Percona Server for MongoDB|connecting to:|Unable to reach primary for set|Implicit session:|versions do not match|Error saving history file:' ++ local 'command=db.runCommand({ ping: 1 }).ok' ++ local uri=backup2:test-password@some-name-rs0-2.some-name-rs0.users-26599 ++ local driver=mongodb ++ local suffix=.svc.cluster.local +++ kubectl_bin get pods --selector=name=psmdb-client -o 'jsonpath={.items[].metadata.name}' ++++ mktemp egrep: warning: egrep is obsolescent; using grep -E +++ local LAST_OUT=/tmp/tmp.hmvqvxqsgc ++++ mktemp +++ local LAST_ERR=/tmp/tmp.IZIzK4DXNZ +++ local exit_status=0 +++ local timeout=4 ++++ seq 0 2 +++ for i in $(seq 0 2) +++ set +e +++ kubectl get pods --selector=name=psmdb-client -o 'jsonpath={.items[].metadata.name}' +++ exit_status=0 +++ set -e +++ '[' 0 '!=' 0 -a -n 1 ']' +++ break +++ cat /tmp/tmp.hmvqvxqsgc +++ cat /tmp/tmp.IZIzK4DXNZ +++ rm /tmp/tmp.hmvqvxqsgc /tmp/tmp.IZIzK4DXNZ +++ return 0 ++ local client_container=psmdb-client-66f577db5f-rzdh7 ++ local mongo_flag=--quiet ++ [[ backup2:test-password@some-name-rs0-2.some-name-rs0.users-26599 == *cfg* ]] ++ replica_set=rs0 ++ kubectl_bin exec psmdb-client-66f577db5f-rzdh7 -- bash -c 'printf '\''db.runCommand({ ping: 1 }).ok\n'\'' | mongo mongodb://backup2:test-password@some-name-rs0-2.some-name-rs0.users-26599.svc.cluster.local/admin?ssl=false\&replicaSet=rs0 --quiet' +++ mktemp ++ local LAST_OUT=/tmp/tmp.LLjo11aEuO +++ mktemp ++ local LAST_ERR=/tmp/tmp.P0MiDSprBZ ++ local exit_status=0 ++ local timeout=4 +++ seq 0 2 ++ for i in $(seq 0 2) ++ set +e ++ kubectl exec psmdb-client-66f577db5f-rzdh7 -- bash -c 'printf '\''db.runCommand({ ping: 1 }).ok\n'\'' | mongo mongodb://backup2:test-password@some-name-rs0-2.some-name-rs0.users-26599.svc.cluster.local/admin?ssl=false\&replicaSet=rs0 --quiet' ++ exit_status=0 ++ set -e ++ '[' 0 '!=' 0 -a -n 1 ']' ++ break ++ cat /tmp/tmp.LLjo11aEuO ++ cat /tmp/tmp.P0MiDSprBZ ++ rm /tmp/tmp.LLjo11aEuO /tmp/tmp.P0MiDSprBZ ++ return 0 + ping=1 + desc 'ping return' + set +o xtrace ----------------------------------------------------------------------------------- ping return ----------------------------------------------------------------------------------- + '[' 1 '!=' 1 ']' + desc 'run backup' + set +o xtrace ----------------------------------------------------------------------------------- run backup ----------------------------------------------------------------------------------- + run_backup minio + local storage=minio + local backup_name=backup-minio + local type=logical + desc 'run backup backup-minio' + set +o xtrace ----------------------------------------------------------------------------------- run backup backup-minio ----------------------------------------------------------------------------------- + yq eval '.metadata.name = "backup-minio" | .spec.storageName = "minio" | .spec.type = "logical"' /mnt/jenkins/workspace/cloud-psmdb-operator_PR-1993/e2e-tests/users/conf/backup-minio.yml + kubectl_bin apply -f - ++ mktemp + local LAST_OUT=/tmp/tmp.jEIShuki6D ++ mktemp + local LAST_ERR=/tmp/tmp.oSUBNUJBoC + local exit_status=0 + local timeout=4 ++ seq 0 2 + for i in $(seq 0 2) + set +e + kubectl apply -f - + exit_status=0 + set -e + '[' 0 '!=' 0 -a -n 1 ']' + break + cat /tmp/tmp.jEIShuki6D perconaservermongodbbackup.psmdb.percona.com/backup-minio created + cat /tmp/tmp.oSUBNUJBoC + rm /tmp/tmp.jEIShuki6D /tmp/tmp.oSUBNUJBoC + return 0 + wait_backup backup-minio + local backup_name=backup-minio + local target_state=ready + set +o xtrace waiting for backup-minio to reach ready state.......OK + desc 'change MONGODB_USER_ADMIN_PASSWORD' + set +o xtrace ----------------------------------------------------------------------------------- change MONGODB_USER_ADMIN_PASSWORD ----------------------------------------------------------------------------------- + patch_secret some-users MONGODB_USER_ADMIN_PASSWORD dGVzdC1wYXNzd29yZA== + local secret=some-users + local key=MONGODB_USER_ADMIN_PASSWORD + local value=dGVzdC1wYXNzd29yZA== + kubectl patch secret some-users '-p={"data":{"MONGODB_USER_ADMIN_PASSWORD": "dGVzdC1wYXNzd29yZA=="}}' secret/some-users patched + sleep 25 + wait_cluster_consistency some-name + local cluster_name=some-name + local wait_time=32 + retry=0 + sleep 7 + echo -n 'waiting for cluster readyness' waiting for cluster readyness++ kubectl_bin get psmdb some-name -o 'jsonpath={.status.state}' +++ mktemp ++ local LAST_OUT=/tmp/tmp.RopW1TkRuo +++ mktemp ++ local LAST_ERR=/tmp/tmp.1kWeN8i9yc ++ local exit_status=0 ++ local timeout=4 +++ seq 0 2 ++ for i in $(seq 0 2) ++ set +e ++ kubectl get psmdb some-name -o 'jsonpath={.status.state}' ++ exit_status=0 ++ set -e ++ '[' 0 '!=' 0 -a -n 1 ']' ++ break ++ cat /tmp/tmp.RopW1TkRuo ++ cat /tmp/tmp.1kWeN8i9yc ++ rm /tmp/tmp.RopW1TkRuo /tmp/tmp.1kWeN8i9yc ++ return 0 + [[ ready == \r\e\a\d\y ]] + echo .OK .OK + sleep 15 ++ getUserData some-users MONGODB_USER_ADMIN_USER ++ local secretName=some-users ++ local dataKey=MONGODB_USER_ADMIN_USER +++ getSecretData some-users MONGODB_USER_ADMIN_USER +++ local secretName=some-users +++ local dataKey=MONGODB_USER_ADMIN_USER ++++ kubectl get secrets/some-users '--template={{.data.MONGODB_USER_ADMIN_USER}}' ++++ base64 -d +++ local data=userAdmin +++ echo userAdmin ++ urlencode userAdmin ++ uri=userAdmin ++ echo -n userAdmin ++ jq -s -R -r @uri + user=userAdmin + check_mongo_auth userAdmin:test-password@some-name-rs0-0.some-name-rs0.users-26599 + local uri=userAdmin:test-password@some-name-rs0-0.some-name-rs0.users-26599 ++ run_mongo 'db.runCommand({ ping: 1 }).ok' userAdmin:test-password@some-name-rs0-0.some-name-rs0.users-26599 mongodb '' --quiet ++ egrep -v 'I NETWORK|W NETWORK|Error saving history file|Percona Server for MongoDB|connecting to:|Unable to reach primary for set|Implicit session:|versions do not match|Error saving history file:' ++ local 'command=db.runCommand({ ping: 1 }).ok' ++ local uri=userAdmin:test-password@some-name-rs0-0.some-name-rs0.users-26599 ++ local driver=mongodb ++ local suffix=.svc.cluster.local +++ kubectl_bin get pods --selector=name=psmdb-client -o 'jsonpath={.items[].metadata.name}' egrep: warning: egrep is obsolescent; using grep -E ++++ mktemp +++ local LAST_OUT=/tmp/tmp.0mQBRqoESZ ++++ mktemp +++ local LAST_ERR=/tmp/tmp.SiPq6yuTbp +++ local exit_status=0 +++ local timeout=4 ++++ seq 0 2 +++ for i in $(seq 0 2) +++ set +e +++ kubectl get pods --selector=name=psmdb-client -o 'jsonpath={.items[].metadata.name}' +++ exit_status=0 +++ set -e +++ '[' 0 '!=' 0 -a -n 1 ']' +++ break +++ cat /tmp/tmp.0mQBRqoESZ +++ cat /tmp/tmp.SiPq6yuTbp +++ rm /tmp/tmp.0mQBRqoESZ /tmp/tmp.SiPq6yuTbp +++ return 0 ++ local client_container=psmdb-client-66f577db5f-rzdh7 ++ local mongo_flag=--quiet ++ [[ userAdmin:test-password@some-name-rs0-0.some-name-rs0.users-26599 == *cfg* ]] ++ replica_set=rs0 ++ kubectl_bin exec psmdb-client-66f577db5f-rzdh7 -- bash -c 'printf '\''db.runCommand({ ping: 1 }).ok\n'\'' | mongo mongodb://userAdmin:test-password@some-name-rs0-0.some-name-rs0.users-26599.svc.cluster.local/admin?ssl=false\&replicaSet=rs0 --quiet' +++ mktemp ++ local LAST_OUT=/tmp/tmp.gAcvhtsCvl +++ mktemp ++ local LAST_ERR=/tmp/tmp.bERztfMKoK ++ local exit_status=0 ++ local timeout=4 +++ seq 0 2 ++ for i in $(seq 0 2) ++ set +e ++ kubectl exec psmdb-client-66f577db5f-rzdh7 -- bash -c 'printf '\''db.runCommand({ ping: 1 }).ok\n'\'' | mongo mongodb://userAdmin:test-password@some-name-rs0-0.some-name-rs0.users-26599.svc.cluster.local/admin?ssl=false\&replicaSet=rs0 --quiet' ++ exit_status=0 ++ set -e ++ '[' 0 '!=' 0 -a -n 1 ']' ++ break ++ cat /tmp/tmp.gAcvhtsCvl ++ cat /tmp/tmp.bERztfMKoK ++ rm /tmp/tmp.gAcvhtsCvl /tmp/tmp.bERztfMKoK ++ return 0 + ping=1 + desc 'ping return' + set +o xtrace ----------------------------------------------------------------------------------- ping return ----------------------------------------------------------------------------------- + '[' 1 '!=' 1 ']' + check_mongo_auth userAdmin:test-password@some-name-rs0-1.some-name-rs0.users-26599 + local uri=userAdmin:test-password@some-name-rs0-1.some-name-rs0.users-26599 ++ run_mongo 'db.runCommand({ ping: 1 }).ok' userAdmin:test-password@some-name-rs0-1.some-name-rs0.users-26599 mongodb '' --quiet ++ local 'command=db.runCommand({ ping: 1 }).ok' ++ local uri=userAdmin:test-password@some-name-rs0-1.some-name-rs0.users-26599 ++ local driver=mongodb ++ local suffix=.svc.cluster.local ++ egrep -v 'I NETWORK|W NETWORK|Error saving history file|Percona Server for MongoDB|connecting to:|Unable to reach primary for set|Implicit session:|versions do not match|Error saving history file:' +++ kubectl_bin get pods --selector=name=psmdb-client -o 'jsonpath={.items[].metadata.name}' ++++ mktemp egrep: warning: egrep is obsolescent; using grep -E +++ local LAST_OUT=/tmp/tmp.7ILwGwbMn3 ++++ mktemp +++ local LAST_ERR=/tmp/tmp.329aXxA8oi +++ local exit_status=0 +++ local timeout=4 ++++ seq 0 2 +++ for i in $(seq 0 2) +++ set +e +++ kubectl get pods --selector=name=psmdb-client -o 'jsonpath={.items[].metadata.name}' +++ exit_status=0 +++ set -e +++ '[' 0 '!=' 0 -a -n 1 ']' +++ break +++ cat /tmp/tmp.7ILwGwbMn3 +++ cat /tmp/tmp.329aXxA8oi +++ rm /tmp/tmp.7ILwGwbMn3 /tmp/tmp.329aXxA8oi +++ return 0 ++ local client_container=psmdb-client-66f577db5f-rzdh7 ++ local mongo_flag=--quiet ++ [[ userAdmin:test-password@some-name-rs0-1.some-name-rs0.users-26599 == *cfg* ]] ++ replica_set=rs0 ++ kubectl_bin exec psmdb-client-66f577db5f-rzdh7 -- bash -c 'printf '\''db.runCommand({ ping: 1 }).ok\n'\'' | mongo mongodb://userAdmin:test-password@some-name-rs0-1.some-name-rs0.users-26599.svc.cluster.local/admin?ssl=false\&replicaSet=rs0 --quiet' +++ mktemp ++ local LAST_OUT=/tmp/tmp.RGIlTIWQQ7 +++ mktemp ++ local LAST_ERR=/tmp/tmp.0NQcR1Ug5I ++ local exit_status=0 ++ local timeout=4 +++ seq 0 2 ++ for i in $(seq 0 2) ++ set +e ++ kubectl exec psmdb-client-66f577db5f-rzdh7 -- bash -c 'printf '\''db.runCommand({ ping: 1 }).ok\n'\'' | mongo mongodb://userAdmin:test-password@some-name-rs0-1.some-name-rs0.users-26599.svc.cluster.local/admin?ssl=false\&replicaSet=rs0 --quiet' ++ exit_status=0 ++ set -e ++ '[' 0 '!=' 0 -a -n 1 ']' ++ break ++ cat /tmp/tmp.RGIlTIWQQ7 ++ cat /tmp/tmp.0NQcR1Ug5I ++ rm /tmp/tmp.RGIlTIWQQ7 /tmp/tmp.0NQcR1Ug5I ++ return 0 + ping=1 + desc 'ping return' + set +o xtrace ----------------------------------------------------------------------------------- ping return ----------------------------------------------------------------------------------- + '[' 1 '!=' 1 ']' + check_mongo_auth userAdmin:test-password@some-name-rs0-2.some-name-rs0.users-26599 + local uri=userAdmin:test-password@some-name-rs0-2.some-name-rs0.users-26599 ++ run_mongo 'db.runCommand({ ping: 1 }).ok' userAdmin:test-password@some-name-rs0-2.some-name-rs0.users-26599 mongodb '' --quiet ++ egrep -v 'I NETWORK|W NETWORK|Error saving history file|Percona Server for MongoDB|connecting to:|Unable to reach primary for set|Implicit session:|versions do not match|Error saving history file:' ++ local 'command=db.runCommand({ ping: 1 }).ok' ++ local uri=userAdmin:test-password@some-name-rs0-2.some-name-rs0.users-26599 ++ local driver=mongodb ++ local suffix=.svc.cluster.local +++ kubectl_bin get pods --selector=name=psmdb-client -o 'jsonpath={.items[].metadata.name}' ++++ mktemp egrep: warning: egrep is obsolescent; using grep -E +++ local LAST_OUT=/tmp/tmp.hARJokIQUk ++++ mktemp +++ local LAST_ERR=/tmp/tmp.3jvHXM6Wtb +++ local exit_status=0 +++ local timeout=4 ++++ seq 0 2 +++ for i in $(seq 0 2) +++ set +e +++ kubectl get pods --selector=name=psmdb-client -o 'jsonpath={.items[].metadata.name}' +++ exit_status=0 +++ set -e +++ '[' 0 '!=' 0 -a -n 1 ']' +++ break +++ cat /tmp/tmp.hARJokIQUk +++ cat /tmp/tmp.3jvHXM6Wtb +++ rm /tmp/tmp.hARJokIQUk /tmp/tmp.3jvHXM6Wtb +++ return 0 ++ local client_container=psmdb-client-66f577db5f-rzdh7 ++ local mongo_flag=--quiet ++ [[ userAdmin:test-password@some-name-rs0-2.some-name-rs0.users-26599 == *cfg* ]] ++ replica_set=rs0 ++ kubectl_bin exec psmdb-client-66f577db5f-rzdh7 -- bash -c 'printf '\''db.runCommand({ ping: 1 }).ok\n'\'' | mongo mongodb://userAdmin:test-password@some-name-rs0-2.some-name-rs0.users-26599.svc.cluster.local/admin?ssl=false\&replicaSet=rs0 --quiet' +++ mktemp ++ local LAST_OUT=/tmp/tmp.xbbUQXCRui +++ mktemp ++ local LAST_ERR=/tmp/tmp.XlLgIv4LkE ++ local exit_status=0 ++ local timeout=4 +++ seq 0 2 ++ for i in $(seq 0 2) ++ set +e ++ kubectl exec psmdb-client-66f577db5f-rzdh7 -- bash -c 'printf '\''db.runCommand({ ping: 1 }).ok\n'\'' | mongo mongodb://userAdmin:test-password@some-name-rs0-2.some-name-rs0.users-26599.svc.cluster.local/admin?ssl=false\&replicaSet=rs0 --quiet' ++ exit_status=0 ++ set -e ++ '[' 0 '!=' 0 -a -n 1 ']' ++ break ++ cat /tmp/tmp.xbbUQXCRui ++ cat /tmp/tmp.XlLgIv4LkE ++ rm /tmp/tmp.xbbUQXCRui /tmp/tmp.XlLgIv4LkE ++ return 0 + ping=1 + desc 'ping return' + set +o xtrace ----------------------------------------------------------------------------------- ping return ----------------------------------------------------------------------------------- + '[' 1 '!=' 1 ']' + desc 'change MONGODB_USER_ADMIN_USER' + set +o xtrace ----------------------------------------------------------------------------------- change MONGODB_USER_ADMIN_USER ----------------------------------------------------------------------------------- ++ echo -n userAdmin2 ++ base64 + newnameencrypted=dXNlckFkbWluMg== + patch_secret some-users MONGODB_USER_ADMIN_USER dXNlckFkbWluMg== + local secret=some-users + local key=MONGODB_USER_ADMIN_USER + local value=dXNlckFkbWluMg== + kubectl patch secret some-users '-p={"data":{"MONGODB_USER_ADMIN_USER": "dXNlckFkbWluMg=="}}' secret/some-users patched + sleep 25 + wait_cluster_consistency some-name + local cluster_name=some-name + local wait_time=32 + retry=0 + sleep 7 + echo -n 'waiting for cluster readyness' waiting for cluster readyness++ kubectl_bin get psmdb some-name -o 'jsonpath={.status.state}' +++ mktemp ++ local LAST_OUT=/tmp/tmp.QkM8rMmhgC +++ mktemp ++ local LAST_ERR=/tmp/tmp.vkB7CEFdnK ++ local exit_status=0 ++ local timeout=4 +++ seq 0 2 ++ for i in $(seq 0 2) ++ set +e ++ kubectl get psmdb some-name -o 'jsonpath={.status.state}' ++ exit_status=0 ++ set -e ++ '[' 0 '!=' 0 -a -n 1 ']' ++ break ++ cat /tmp/tmp.QkM8rMmhgC ++ cat /tmp/tmp.vkB7CEFdnK ++ rm /tmp/tmp.QkM8rMmhgC /tmp/tmp.vkB7CEFdnK ++ return 0 + [[ ready == \r\e\a\d\y ]] + echo .OK .OK + sleep 15 ++ getUserData some-users MONGODB_USER_ADMIN_USER ++ local secretName=some-users ++ local dataKey=MONGODB_USER_ADMIN_USER +++ getSecretData some-users MONGODB_USER_ADMIN_USER +++ local secretName=some-users +++ local dataKey=MONGODB_USER_ADMIN_USER ++++ base64 -d ++++ kubectl get secrets/some-users '--template={{.data.MONGODB_USER_ADMIN_USER}}' +++ local data=userAdmin2 +++ echo userAdmin2 ++ urlencode userAdmin2 ++ uri=userAdmin2 ++ echo -n userAdmin2 ++ jq -s -R -r @uri + user=userAdmin2 + check_mongo_auth userAdmin2:test-password@some-name-rs0-0.some-name-rs0.users-26599 + local uri=userAdmin2:test-password@some-name-rs0-0.some-name-rs0.users-26599 ++ run_mongo 'db.runCommand({ ping: 1 }).ok' userAdmin2:test-password@some-name-rs0-0.some-name-rs0.users-26599 mongodb '' --quiet ++ local 'command=db.runCommand({ ping: 1 }).ok' ++ egrep -v 'I NETWORK|W NETWORK|Error saving history file|Percona Server for MongoDB|connecting to:|Unable to reach primary for set|Implicit session:|versions do not match|Error saving history file:' ++ local uri=userAdmin2:test-password@some-name-rs0-0.some-name-rs0.users-26599 ++ local driver=mongodb ++ local suffix=.svc.cluster.local +++ kubectl_bin get pods --selector=name=psmdb-client -o 'jsonpath={.items[].metadata.name}' ++++ mktemp egrep: warning: egrep is obsolescent; using grep -E +++ local LAST_OUT=/tmp/tmp.wauDwTPmFa ++++ mktemp +++ local LAST_ERR=/tmp/tmp.PJ5XVMBxaN +++ local exit_status=0 +++ local timeout=4 ++++ seq 0 2 +++ for i in $(seq 0 2) +++ set +e +++ kubectl get pods --selector=name=psmdb-client -o 'jsonpath={.items[].metadata.name}' +++ exit_status=0 +++ set -e +++ '[' 0 '!=' 0 -a -n 1 ']' +++ break +++ cat /tmp/tmp.wauDwTPmFa +++ cat /tmp/tmp.PJ5XVMBxaN +++ rm /tmp/tmp.wauDwTPmFa /tmp/tmp.PJ5XVMBxaN +++ return 0 ++ local client_container=psmdb-client-66f577db5f-rzdh7 ++ local mongo_flag=--quiet ++ [[ userAdmin2:test-password@some-name-rs0-0.some-name-rs0.users-26599 == *cfg* ]] ++ replica_set=rs0 ++ kubectl_bin exec psmdb-client-66f577db5f-rzdh7 -- bash -c 'printf '\''db.runCommand({ ping: 1 }).ok\n'\'' | mongo mongodb://userAdmin2:test-password@some-name-rs0-0.some-name-rs0.users-26599.svc.cluster.local/admin?ssl=false\&replicaSet=rs0 --quiet' +++ mktemp ++ local LAST_OUT=/tmp/tmp.Z4xhw30Dkr +++ mktemp ++ local LAST_ERR=/tmp/tmp.zKmqrga2Kb ++ local exit_status=0 ++ local timeout=4 +++ seq 0 2 ++ for i in $(seq 0 2) ++ set +e ++ kubectl exec psmdb-client-66f577db5f-rzdh7 -- bash -c 'printf '\''db.runCommand({ ping: 1 }).ok\n'\'' | mongo mongodb://userAdmin2:test-password@some-name-rs0-0.some-name-rs0.users-26599.svc.cluster.local/admin?ssl=false\&replicaSet=rs0 --quiet' ++ exit_status=0 ++ set -e ++ '[' 0 '!=' 0 -a -n 1 ']' ++ break ++ cat /tmp/tmp.Z4xhw30Dkr ++ cat /tmp/tmp.zKmqrga2Kb ++ rm /tmp/tmp.Z4xhw30Dkr /tmp/tmp.zKmqrga2Kb ++ return 0 + ping=1 + desc 'ping return' + set +o xtrace ----------------------------------------------------------------------------------- ping return ----------------------------------------------------------------------------------- + '[' 1 '!=' 1 ']' + check_mongo_auth userAdmin2:test-password@some-name-rs0-1.some-name-rs0.users-26599 + local uri=userAdmin2:test-password@some-name-rs0-1.some-name-rs0.users-26599 ++ run_mongo 'db.runCommand({ ping: 1 }).ok' userAdmin2:test-password@some-name-rs0-1.some-name-rs0.users-26599 mongodb '' --quiet ++ egrep -v 'I NETWORK|W NETWORK|Error saving history file|Percona Server for MongoDB|connecting to:|Unable to reach primary for set|Implicit session:|versions do not match|Error saving history file:' ++ local 'command=db.runCommand({ ping: 1 }).ok' ++ local uri=userAdmin2:test-password@some-name-rs0-1.some-name-rs0.users-26599 ++ local driver=mongodb ++ local suffix=.svc.cluster.local +++ kubectl_bin get pods --selector=name=psmdb-client -o 'jsonpath={.items[].metadata.name}' ++++ mktemp egrep: warning: egrep is obsolescent; using grep -E +++ local LAST_OUT=/tmp/tmp.NQQHKPK1pa ++++ mktemp +++ local LAST_ERR=/tmp/tmp.KNPkztdVTK +++ local exit_status=0 +++ local timeout=4 ++++ seq 0 2 +++ for i in $(seq 0 2) +++ set +e +++ kubectl get pods --selector=name=psmdb-client -o 'jsonpath={.items[].metadata.name}' +++ exit_status=0 +++ set -e +++ '[' 0 '!=' 0 -a -n 1 ']' +++ break +++ cat /tmp/tmp.NQQHKPK1pa +++ cat /tmp/tmp.KNPkztdVTK +++ rm /tmp/tmp.NQQHKPK1pa /tmp/tmp.KNPkztdVTK +++ return 0 ++ local client_container=psmdb-client-66f577db5f-rzdh7 ++ local mongo_flag=--quiet ++ [[ userAdmin2:test-password@some-name-rs0-1.some-name-rs0.users-26599 == *cfg* ]] ++ replica_set=rs0 ++ kubectl_bin exec psmdb-client-66f577db5f-rzdh7 -- bash -c 'printf '\''db.runCommand({ ping: 1 }).ok\n'\'' | mongo mongodb://userAdmin2:test-password@some-name-rs0-1.some-name-rs0.users-26599.svc.cluster.local/admin?ssl=false\&replicaSet=rs0 --quiet' +++ mktemp ++ local LAST_OUT=/tmp/tmp.eqW2HvzzIy +++ mktemp ++ local LAST_ERR=/tmp/tmp.EJ4iW68ESi ++ local exit_status=0 ++ local timeout=4 +++ seq 0 2 ++ for i in $(seq 0 2) ++ set +e ++ kubectl exec psmdb-client-66f577db5f-rzdh7 -- bash -c 'printf '\''db.runCommand({ ping: 1 }).ok\n'\'' | mongo mongodb://userAdmin2:test-password@some-name-rs0-1.some-name-rs0.users-26599.svc.cluster.local/admin?ssl=false\&replicaSet=rs0 --quiet' ++ exit_status=0 ++ set -e ++ '[' 0 '!=' 0 -a -n 1 ']' ++ break ++ cat /tmp/tmp.eqW2HvzzIy ++ cat /tmp/tmp.EJ4iW68ESi ++ rm /tmp/tmp.eqW2HvzzIy /tmp/tmp.EJ4iW68ESi ++ return 0 + ping=1 + desc 'ping return' + set +o xtrace ----------------------------------------------------------------------------------- ping return ----------------------------------------------------------------------------------- + '[' 1 '!=' 1 ']' + check_mongo_auth userAdmin2:test-password@some-name-rs0-2.some-name-rs0.users-26599 + local uri=userAdmin2:test-password@some-name-rs0-2.some-name-rs0.users-26599 ++ run_mongo 'db.runCommand({ ping: 1 }).ok' userAdmin2:test-password@some-name-rs0-2.some-name-rs0.users-26599 mongodb '' --quiet ++ egrep -v 'I NETWORK|W NETWORK|Error saving history file|Percona Server for MongoDB|connecting to:|Unable to reach primary for set|Implicit session:|versions do not match|Error saving history file:' ++ local 'command=db.runCommand({ ping: 1 }).ok' ++ local uri=userAdmin2:test-password@some-name-rs0-2.some-name-rs0.users-26599 ++ local driver=mongodb ++ local suffix=.svc.cluster.local +++ kubectl_bin get pods --selector=name=psmdb-client -o 'jsonpath={.items[].metadata.name}' ++++ mktemp egrep: warning: egrep is obsolescent; using grep -E +++ local LAST_OUT=/tmp/tmp.JyktM9LZe7 ++++ mktemp +++ local LAST_ERR=/tmp/tmp.tvjYGQTf4D +++ local exit_status=0 +++ local timeout=4 ++++ seq 0 2 +++ for i in $(seq 0 2) +++ set +e +++ kubectl get pods --selector=name=psmdb-client -o 'jsonpath={.items[].metadata.name}' +++ exit_status=0 +++ set -e +++ '[' 0 '!=' 0 -a -n 1 ']' +++ break +++ cat /tmp/tmp.JyktM9LZe7 +++ cat /tmp/tmp.tvjYGQTf4D +++ rm /tmp/tmp.JyktM9LZe7 /tmp/tmp.tvjYGQTf4D +++ return 0 ++ local client_container=psmdb-client-66f577db5f-rzdh7 ++ local mongo_flag=--quiet ++ [[ userAdmin2:test-password@some-name-rs0-2.some-name-rs0.users-26599 == *cfg* ]] ++ replica_set=rs0 ++ kubectl_bin exec psmdb-client-66f577db5f-rzdh7 -- bash -c 'printf '\''db.runCommand({ ping: 1 }).ok\n'\'' | mongo mongodb://userAdmin2:test-password@some-name-rs0-2.some-name-rs0.users-26599.svc.cluster.local/admin?ssl=false\&replicaSet=rs0 --quiet' +++ mktemp ++ local LAST_OUT=/tmp/tmp.yY2w8TKlvj +++ mktemp ++ local LAST_ERR=/tmp/tmp.P9g0PdZdjR ++ local exit_status=0 ++ local timeout=4 +++ seq 0 2 ++ for i in $(seq 0 2) ++ set +e ++ kubectl exec psmdb-client-66f577db5f-rzdh7 -- bash -c 'printf '\''db.runCommand({ ping: 1 }).ok\n'\'' | mongo mongodb://userAdmin2:test-password@some-name-rs0-2.some-name-rs0.users-26599.svc.cluster.local/admin?ssl=false\&replicaSet=rs0 --quiet' ++ exit_status=0 ++ set -e ++ '[' 0 '!=' 0 -a -n 1 ']' ++ break ++ cat /tmp/tmp.yY2w8TKlvj ++ cat /tmp/tmp.P9g0PdZdjR ++ rm /tmp/tmp.yY2w8TKlvj /tmp/tmp.P9g0PdZdjR ++ return 0 + ping=1 + desc 'ping return' + set +o xtrace ----------------------------------------------------------------------------------- ping return ----------------------------------------------------------------------------------- + '[' 1 '!=' 1 ']' + desc 'change MONGODB_CLUSTER_ADMIN_PASSWORD' + set +o xtrace ----------------------------------------------------------------------------------- change MONGODB_CLUSTER_ADMIN_PASSWORD ----------------------------------------------------------------------------------- + patch_secret some-users MONGODB_CLUSTER_ADMIN_PASSWORD dGVzdC1wYXNzd29yZA== + local secret=some-users + local key=MONGODB_CLUSTER_ADMIN_PASSWORD + local value=dGVzdC1wYXNzd29yZA== + kubectl patch secret some-users '-p={"data":{"MONGODB_CLUSTER_ADMIN_PASSWORD": "dGVzdC1wYXNzd29yZA=="}}' secret/some-users patched + sleep 25 + wait_cluster_consistency some-name + local cluster_name=some-name + local wait_time=32 + retry=0 + sleep 7 + echo -n 'waiting for cluster readyness' waiting for cluster readyness++ kubectl_bin get psmdb some-name -o 'jsonpath={.status.state}' +++ mktemp ++ local LAST_OUT=/tmp/tmp.ngiHccr5ay +++ mktemp ++ local LAST_ERR=/tmp/tmp.jBz0U50Ze5 ++ local exit_status=0 ++ local timeout=4 +++ seq 0 2 ++ for i in $(seq 0 2) ++ set +e ++ kubectl get psmdb some-name -o 'jsonpath={.status.state}' ++ exit_status=0 ++ set -e ++ '[' 0 '!=' 0 -a -n 1 ']' ++ break ++ cat /tmp/tmp.ngiHccr5ay ++ cat /tmp/tmp.jBz0U50Ze5 ++ rm /tmp/tmp.ngiHccr5ay /tmp/tmp.jBz0U50Ze5 ++ return 0 + [[ ready == \r\e\a\d\y ]] + echo .OK .OK + sleep 15 ++ getUserData some-users MONGODB_CLUSTER_ADMIN_USER ++ local secretName=some-users ++ local dataKey=MONGODB_CLUSTER_ADMIN_USER +++ getSecretData some-users MONGODB_CLUSTER_ADMIN_USER +++ local secretName=some-users +++ local dataKey=MONGODB_CLUSTER_ADMIN_USER ++++ kubectl get secrets/some-users '--template={{.data.MONGODB_CLUSTER_ADMIN_USER}}' ++++ base64 -d +++ local data=clusterAdmin +++ echo clusterAdmin ++ urlencode clusterAdmin ++ uri=clusterAdmin ++ echo -n clusterAdmin ++ jq -s -R -r @uri + user=clusterAdmin + check_mongo_auth clusterAdmin:test-password@some-name-rs0-0.some-name-rs0.users-26599 + local uri=clusterAdmin:test-password@some-name-rs0-0.some-name-rs0.users-26599 ++ run_mongo 'db.runCommand({ ping: 1 }).ok' clusterAdmin:test-password@some-name-rs0-0.some-name-rs0.users-26599 mongodb '' --quiet ++ egrep -v 'I NETWORK|W NETWORK|Error saving history file|Percona Server for MongoDB|connecting to:|Unable to reach primary for set|Implicit session:|versions do not match|Error saving history file:' ++ local 'command=db.runCommand({ ping: 1 }).ok' ++ local uri=clusterAdmin:test-password@some-name-rs0-0.some-name-rs0.users-26599 ++ local driver=mongodb ++ local suffix=.svc.cluster.local +++ kubectl_bin get pods --selector=name=psmdb-client -o 'jsonpath={.items[].metadata.name}' ++++ mktemp egrep: warning: egrep is obsolescent; using grep -E +++ local LAST_OUT=/tmp/tmp.kDQ7QGRXUU ++++ mktemp +++ local LAST_ERR=/tmp/tmp.67Vr1IHCFt +++ local exit_status=0 +++ local timeout=4 ++++ seq 0 2 +++ for i in $(seq 0 2) +++ set +e +++ kubectl get pods --selector=name=psmdb-client -o 'jsonpath={.items[].metadata.name}' +++ exit_status=0 +++ set -e +++ '[' 0 '!=' 0 -a -n 1 ']' +++ break +++ cat /tmp/tmp.kDQ7QGRXUU +++ cat /tmp/tmp.67Vr1IHCFt +++ rm /tmp/tmp.kDQ7QGRXUU /tmp/tmp.67Vr1IHCFt +++ return 0 ++ local client_container=psmdb-client-66f577db5f-rzdh7 ++ local mongo_flag=--quiet ++ [[ clusterAdmin:test-password@some-name-rs0-0.some-name-rs0.users-26599 == *cfg* ]] ++ replica_set=rs0 ++ kubectl_bin exec psmdb-client-66f577db5f-rzdh7 -- bash -c 'printf '\''db.runCommand({ ping: 1 }).ok\n'\'' | mongo mongodb://clusterAdmin:test-password@some-name-rs0-0.some-name-rs0.users-26599.svc.cluster.local/admin?ssl=false\&replicaSet=rs0 --quiet' +++ mktemp ++ local LAST_OUT=/tmp/tmp.7rgnW3oLC9 +++ mktemp ++ local LAST_ERR=/tmp/tmp.iSyXIeYb05 ++ local exit_status=0 ++ local timeout=4 +++ seq 0 2 ++ for i in $(seq 0 2) ++ set +e ++ kubectl exec psmdb-client-66f577db5f-rzdh7 -- bash -c 'printf '\''db.runCommand({ ping: 1 }).ok\n'\'' | mongo mongodb://clusterAdmin:test-password@some-name-rs0-0.some-name-rs0.users-26599.svc.cluster.local/admin?ssl=false\&replicaSet=rs0 --quiet' ++ exit_status=0 ++ set -e ++ '[' 0 '!=' 0 -a -n 1 ']' ++ break ++ cat /tmp/tmp.7rgnW3oLC9 ++ cat /tmp/tmp.iSyXIeYb05 ++ rm /tmp/tmp.7rgnW3oLC9 /tmp/tmp.iSyXIeYb05 ++ return 0 + ping=1 + desc 'ping return' + set +o xtrace ----------------------------------------------------------------------------------- ping return ----------------------------------------------------------------------------------- + '[' 1 '!=' 1 ']' + check_mongo_auth clusterAdmin:test-password@some-name-rs0-1.some-name-rs0.users-26599 + local uri=clusterAdmin:test-password@some-name-rs0-1.some-name-rs0.users-26599 ++ run_mongo 'db.runCommand({ ping: 1 }).ok' clusterAdmin:test-password@some-name-rs0-1.some-name-rs0.users-26599 mongodb '' --quiet ++ local 'command=db.runCommand({ ping: 1 }).ok' ++ local uri=clusterAdmin:test-password@some-name-rs0-1.some-name-rs0.users-26599 ++ local driver=mongodb ++ local suffix=.svc.cluster.local ++ egrep -v 'I NETWORK|W NETWORK|Error saving history file|Percona Server for MongoDB|connecting to:|Unable to reach primary for set|Implicit session:|versions do not match|Error saving history file:' +++ kubectl_bin get pods --selector=name=psmdb-client -o 'jsonpath={.items[].metadata.name}' ++++ mktemp egrep: warning: egrep is obsolescent; using grep -E +++ local LAST_OUT=/tmp/tmp.pU20za5b7v ++++ mktemp +++ local LAST_ERR=/tmp/tmp.3sTi0p6Qyr +++ local exit_status=0 +++ local timeout=4 ++++ seq 0 2 +++ for i in $(seq 0 2) +++ set +e +++ kubectl get pods --selector=name=psmdb-client -o 'jsonpath={.items[].metadata.name}' +++ exit_status=0 +++ set -e +++ '[' 0 '!=' 0 -a -n 1 ']' +++ break +++ cat /tmp/tmp.pU20za5b7v +++ cat /tmp/tmp.3sTi0p6Qyr +++ rm /tmp/tmp.pU20za5b7v /tmp/tmp.3sTi0p6Qyr +++ return 0 ++ local client_container=psmdb-client-66f577db5f-rzdh7 ++ local mongo_flag=--quiet ++ [[ clusterAdmin:test-password@some-name-rs0-1.some-name-rs0.users-26599 == *cfg* ]] ++ replica_set=rs0 ++ kubectl_bin exec psmdb-client-66f577db5f-rzdh7 -- bash -c 'printf '\''db.runCommand({ ping: 1 }).ok\n'\'' | mongo mongodb://clusterAdmin:test-password@some-name-rs0-1.some-name-rs0.users-26599.svc.cluster.local/admin?ssl=false\&replicaSet=rs0 --quiet' +++ mktemp ++ local LAST_OUT=/tmp/tmp.NeTkn4dH2n +++ mktemp ++ local LAST_ERR=/tmp/tmp.IPQBuSfq0f ++ local exit_status=0 ++ local timeout=4 +++ seq 0 2 ++ for i in $(seq 0 2) ++ set +e ++ kubectl exec psmdb-client-66f577db5f-rzdh7 -- bash -c 'printf '\''db.runCommand({ ping: 1 }).ok\n'\'' | mongo mongodb://clusterAdmin:test-password@some-name-rs0-1.some-name-rs0.users-26599.svc.cluster.local/admin?ssl=false\&replicaSet=rs0 --quiet' ++ exit_status=0 ++ set -e ++ '[' 0 '!=' 0 -a -n 1 ']' ++ break ++ cat /tmp/tmp.NeTkn4dH2n ++ cat /tmp/tmp.IPQBuSfq0f ++ rm /tmp/tmp.NeTkn4dH2n /tmp/tmp.IPQBuSfq0f ++ return 0 + ping=1 + desc 'ping return' + set +o xtrace ----------------------------------------------------------------------------------- ping return ----------------------------------------------------------------------------------- + '[' 1 '!=' 1 ']' + check_mongo_auth clusterAdmin:test-password@some-name-rs0-2.some-name-rs0.users-26599 + local uri=clusterAdmin:test-password@some-name-rs0-2.some-name-rs0.users-26599 ++ run_mongo 'db.runCommand({ ping: 1 }).ok' clusterAdmin:test-password@some-name-rs0-2.some-name-rs0.users-26599 mongodb '' --quiet ++ local 'command=db.runCommand({ ping: 1 }).ok' ++ egrep -v 'I NETWORK|W NETWORK|Error saving history file|Percona Server for MongoDB|connecting to:|Unable to reach primary for set|Implicit session:|versions do not match|Error saving history file:' ++ local uri=clusterAdmin:test-password@some-name-rs0-2.some-name-rs0.users-26599 ++ local driver=mongodb ++ local suffix=.svc.cluster.local +++ kubectl_bin get pods --selector=name=psmdb-client -o 'jsonpath={.items[].metadata.name}' ++++ mktemp egrep: warning: egrep is obsolescent; using grep -E +++ local LAST_OUT=/tmp/tmp.no2IllbUu3 ++++ mktemp +++ local LAST_ERR=/tmp/tmp.L1BDakscHM +++ local exit_status=0 +++ local timeout=4 ++++ seq 0 2 +++ for i in $(seq 0 2) +++ set +e +++ kubectl get pods --selector=name=psmdb-client -o 'jsonpath={.items[].metadata.name}' +++ exit_status=0 +++ set -e +++ '[' 0 '!=' 0 -a -n 1 ']' +++ break +++ cat /tmp/tmp.no2IllbUu3 +++ cat /tmp/tmp.L1BDakscHM +++ rm /tmp/tmp.no2IllbUu3 /tmp/tmp.L1BDakscHM +++ return 0 ++ local client_container=psmdb-client-66f577db5f-rzdh7 ++ local mongo_flag=--quiet ++ [[ clusterAdmin:test-password@some-name-rs0-2.some-name-rs0.users-26599 == *cfg* ]] ++ replica_set=rs0 ++ kubectl_bin exec psmdb-client-66f577db5f-rzdh7 -- bash -c 'printf '\''db.runCommand({ ping: 1 }).ok\n'\'' | mongo mongodb://clusterAdmin:test-password@some-name-rs0-2.some-name-rs0.users-26599.svc.cluster.local/admin?ssl=false\&replicaSet=rs0 --quiet' +++ mktemp ++ local LAST_OUT=/tmp/tmp.SoPTKg9vrM +++ mktemp ++ local LAST_ERR=/tmp/tmp.exrpcH8SKa ++ local exit_status=0 ++ local timeout=4 +++ seq 0 2 ++ for i in $(seq 0 2) ++ set +e ++ kubectl exec psmdb-client-66f577db5f-rzdh7 -- bash -c 'printf '\''db.runCommand({ ping: 1 }).ok\n'\'' | mongo mongodb://clusterAdmin:test-password@some-name-rs0-2.some-name-rs0.users-26599.svc.cluster.local/admin?ssl=false\&replicaSet=rs0 --quiet' ++ exit_status=0 ++ set -e ++ '[' 0 '!=' 0 -a -n 1 ']' ++ break ++ cat /tmp/tmp.SoPTKg9vrM ++ cat /tmp/tmp.exrpcH8SKa ++ rm /tmp/tmp.SoPTKg9vrM /tmp/tmp.exrpcH8SKa ++ return 0 + ping=1 + desc 'ping return' + set +o xtrace ----------------------------------------------------------------------------------- ping return ----------------------------------------------------------------------------------- + '[' 1 '!=' 1 ']' + desc 'change MONGODB_CLUSTER_MONITOR_PASSWORD' + set +o xtrace ----------------------------------------------------------------------------------- change MONGODB_CLUSTER_MONITOR_PASSWORD ----------------------------------------------------------------------------------- + patch_secret some-users MONGODB_CLUSTER_MONITOR_PASSWORD dGVzdC1wYXNzd29yZA== + local secret=some-users + local key=MONGODB_CLUSTER_MONITOR_PASSWORD + local value=dGVzdC1wYXNzd29yZA== + kubectl patch secret some-users '-p={"data":{"MONGODB_CLUSTER_MONITOR_PASSWORD": "dGVzdC1wYXNzd29yZA=="}}' secret/some-users patched + sleep 25 + wait_cluster_consistency some-name + local cluster_name=some-name + local wait_time=32 + retry=0 + sleep 7 + echo -n 'waiting for cluster readyness' waiting for cluster readyness++ kubectl_bin get psmdb some-name -o 'jsonpath={.status.state}' +++ mktemp ++ local LAST_OUT=/tmp/tmp.svFUaQrKLB +++ mktemp ++ local LAST_ERR=/tmp/tmp.3fOsmRd4z3 ++ local exit_status=0 ++ local timeout=4 +++ seq 0 2 ++ for i in $(seq 0 2) ++ set +e ++ kubectl get psmdb some-name -o 'jsonpath={.status.state}' ++ exit_status=0 ++ set -e ++ '[' 0 '!=' 0 -a -n 1 ']' ++ break ++ cat /tmp/tmp.svFUaQrKLB ++ cat /tmp/tmp.3fOsmRd4z3 ++ rm /tmp/tmp.svFUaQrKLB /tmp/tmp.3fOsmRd4z3 ++ return 0 + [[ ready == \r\e\a\d\y ]] + echo .OK .OK + sleep 15 ++ getUserData some-users MONGODB_CLUSTER_MONITOR_USER ++ local secretName=some-users ++ local dataKey=MONGODB_CLUSTER_MONITOR_USER +++ getSecretData some-users MONGODB_CLUSTER_MONITOR_USER +++ local secretName=some-users +++ local dataKey=MONGODB_CLUSTER_MONITOR_USER ++++ kubectl get secrets/some-users '--template={{.data.MONGODB_CLUSTER_MONITOR_USER}}' ++++ base64 -d +++ local data=clusterMonitor +++ echo clusterMonitor ++ urlencode clusterMonitor ++ uri=clusterMonitor ++ echo -n clusterMonitor ++ jq -s -R -r @uri + user=clusterMonitor + check_mongo_auth clusterMonitor:test-password@some-name-rs0-0.some-name-rs0.users-26599 + local uri=clusterMonitor:test-password@some-name-rs0-0.some-name-rs0.users-26599 ++ run_mongo 'db.runCommand({ ping: 1 }).ok' clusterMonitor:test-password@some-name-rs0-0.some-name-rs0.users-26599 mongodb '' --quiet ++ local 'command=db.runCommand({ ping: 1 }).ok' ++ egrep -v 'I NETWORK|W NETWORK|Error saving history file|Percona Server for MongoDB|connecting to:|Unable to reach primary for set|Implicit session:|versions do not match|Error saving history file:' ++ local uri=clusterMonitor:test-password@some-name-rs0-0.some-name-rs0.users-26599 ++ local driver=mongodb ++ local suffix=.svc.cluster.local +++ kubectl_bin get pods --selector=name=psmdb-client -o 'jsonpath={.items[].metadata.name}' ++++ mktemp egrep: warning: egrep is obsolescent; using grep -E +++ local LAST_OUT=/tmp/tmp.TC6BGZC7ec ++++ mktemp +++ local LAST_ERR=/tmp/tmp.q2ytS8JE00 +++ local exit_status=0 +++ local timeout=4 ++++ seq 0 2 +++ for i in $(seq 0 2) +++ set +e +++ kubectl get pods --selector=name=psmdb-client -o 'jsonpath={.items[].metadata.name}' +++ exit_status=0 +++ set -e +++ '[' 0 '!=' 0 -a -n 1 ']' +++ break +++ cat /tmp/tmp.TC6BGZC7ec +++ cat /tmp/tmp.q2ytS8JE00 +++ rm /tmp/tmp.TC6BGZC7ec /tmp/tmp.q2ytS8JE00 +++ return 0 ++ local client_container=psmdb-client-66f577db5f-rzdh7 ++ local mongo_flag=--quiet ++ [[ clusterMonitor:test-password@some-name-rs0-0.some-name-rs0.users-26599 == *cfg* ]] ++ replica_set=rs0 ++ kubectl_bin exec psmdb-client-66f577db5f-rzdh7 -- bash -c 'printf '\''db.runCommand({ ping: 1 }).ok\n'\'' | mongo mongodb://clusterMonitor:test-password@some-name-rs0-0.some-name-rs0.users-26599.svc.cluster.local/admin?ssl=false\&replicaSet=rs0 --quiet' +++ mktemp ++ local LAST_OUT=/tmp/tmp.dYxAsOnxNA +++ mktemp ++ local LAST_ERR=/tmp/tmp.gLnM8tmazI ++ local exit_status=0 ++ local timeout=4 +++ seq 0 2 ++ for i in $(seq 0 2) ++ set +e ++ kubectl exec psmdb-client-66f577db5f-rzdh7 -- bash -c 'printf '\''db.runCommand({ ping: 1 }).ok\n'\'' | mongo mongodb://clusterMonitor:test-password@some-name-rs0-0.some-name-rs0.users-26599.svc.cluster.local/admin?ssl=false\&replicaSet=rs0 --quiet' ++ exit_status=0 ++ set -e ++ '[' 0 '!=' 0 -a -n 1 ']' ++ break ++ cat /tmp/tmp.dYxAsOnxNA ++ cat /tmp/tmp.gLnM8tmazI ++ rm /tmp/tmp.dYxAsOnxNA /tmp/tmp.gLnM8tmazI ++ return 0 + ping=1 + desc 'ping return' + set +o xtrace ----------------------------------------------------------------------------------- ping return ----------------------------------------------------------------------------------- + '[' 1 '!=' 1 ']' + check_mongo_auth clusterMonitor:test-password@some-name-rs0-1.some-name-rs0.users-26599 + local uri=clusterMonitor:test-password@some-name-rs0-1.some-name-rs0.users-26599 ++ run_mongo 'db.runCommand({ ping: 1 }).ok' clusterMonitor:test-password@some-name-rs0-1.some-name-rs0.users-26599 mongodb '' --quiet ++ local 'command=db.runCommand({ ping: 1 }).ok' ++ local uri=clusterMonitor:test-password@some-name-rs0-1.some-name-rs0.users-26599 ++ local driver=mongodb ++ local suffix=.svc.cluster.local ++ egrep -v 'I NETWORK|W NETWORK|Error saving history file|Percona Server for MongoDB|connecting to:|Unable to reach primary for set|Implicit session:|versions do not match|Error saving history file:' +++ kubectl_bin get pods --selector=name=psmdb-client -o 'jsonpath={.items[].metadata.name}' ++++ mktemp egrep: warning: egrep is obsolescent; using grep -E +++ local LAST_OUT=/tmp/tmp.Z1k1mfnGXv ++++ mktemp +++ local LAST_ERR=/tmp/tmp.TNJzeWImhO +++ local exit_status=0 +++ local timeout=4 ++++ seq 0 2 +++ for i in $(seq 0 2) +++ set +e +++ kubectl get pods --selector=name=psmdb-client -o 'jsonpath={.items[].metadata.name}' +++ exit_status=0 +++ set -e +++ '[' 0 '!=' 0 -a -n 1 ']' +++ break +++ cat /tmp/tmp.Z1k1mfnGXv +++ cat /tmp/tmp.TNJzeWImhO +++ rm /tmp/tmp.Z1k1mfnGXv /tmp/tmp.TNJzeWImhO +++ return 0 ++ local client_container=psmdb-client-66f577db5f-rzdh7 ++ local mongo_flag=--quiet ++ [[ clusterMonitor:test-password@some-name-rs0-1.some-name-rs0.users-26599 == *cfg* ]] ++ replica_set=rs0 ++ kubectl_bin exec psmdb-client-66f577db5f-rzdh7 -- bash -c 'printf '\''db.runCommand({ ping: 1 }).ok\n'\'' | mongo mongodb://clusterMonitor:test-password@some-name-rs0-1.some-name-rs0.users-26599.svc.cluster.local/admin?ssl=false\&replicaSet=rs0 --quiet' +++ mktemp ++ local LAST_OUT=/tmp/tmp.BiRQtSZAuh +++ mktemp ++ local LAST_ERR=/tmp/tmp.DLDHzvGI1U ++ local exit_status=0 ++ local timeout=4 +++ seq 0 2 ++ for i in $(seq 0 2) ++ set +e ++ kubectl exec psmdb-client-66f577db5f-rzdh7 -- bash -c 'printf '\''db.runCommand({ ping: 1 }).ok\n'\'' | mongo mongodb://clusterMonitor:test-password@some-name-rs0-1.some-name-rs0.users-26599.svc.cluster.local/admin?ssl=false\&replicaSet=rs0 --quiet' ++ exit_status=0 ++ set -e ++ '[' 0 '!=' 0 -a -n 1 ']' ++ break ++ cat /tmp/tmp.BiRQtSZAuh ++ cat /tmp/tmp.DLDHzvGI1U ++ rm /tmp/tmp.BiRQtSZAuh /tmp/tmp.DLDHzvGI1U ++ return 0 + ping=1 + desc 'ping return' + set +o xtrace ----------------------------------------------------------------------------------- ping return ----------------------------------------------------------------------------------- + '[' 1 '!=' 1 ']' + check_mongo_auth clusterMonitor:test-password@some-name-rs0-2.some-name-rs0.users-26599 + local uri=clusterMonitor:test-password@some-name-rs0-2.some-name-rs0.users-26599 ++ run_mongo 'db.runCommand({ ping: 1 }).ok' clusterMonitor:test-password@some-name-rs0-2.some-name-rs0.users-26599 mongodb '' --quiet ++ local 'command=db.runCommand({ ping: 1 }).ok' ++ local uri=clusterMonitor:test-password@some-name-rs0-2.some-name-rs0.users-26599 ++ local driver=mongodb ++ local suffix=.svc.cluster.local ++ egrep -v 'I NETWORK|W NETWORK|Error saving history file|Percona Server for MongoDB|connecting to:|Unable to reach primary for set|Implicit session:|versions do not match|Error saving history file:' +++ kubectl_bin get pods --selector=name=psmdb-client -o 'jsonpath={.items[].metadata.name}' ++++ mktemp egrep: warning: egrep is obsolescent; using grep -E +++ local LAST_OUT=/tmp/tmp.GCwYrCT7w2 ++++ mktemp +++ local LAST_ERR=/tmp/tmp.qM1fEDknm2 +++ local exit_status=0 +++ local timeout=4 ++++ seq 0 2 +++ for i in $(seq 0 2) +++ set +e +++ kubectl get pods --selector=name=psmdb-client -o 'jsonpath={.items[].metadata.name}' +++ exit_status=0 +++ set -e +++ '[' 0 '!=' 0 -a -n 1 ']' +++ break +++ cat /tmp/tmp.GCwYrCT7w2 +++ cat /tmp/tmp.qM1fEDknm2 +++ rm /tmp/tmp.GCwYrCT7w2 /tmp/tmp.qM1fEDknm2 +++ return 0 ++ local client_container=psmdb-client-66f577db5f-rzdh7 ++ local mongo_flag=--quiet ++ [[ clusterMonitor:test-password@some-name-rs0-2.some-name-rs0.users-26599 == *cfg* ]] ++ replica_set=rs0 ++ kubectl_bin exec psmdb-client-66f577db5f-rzdh7 -- bash -c 'printf '\''db.runCommand({ ping: 1 }).ok\n'\'' | mongo mongodb://clusterMonitor:test-password@some-name-rs0-2.some-name-rs0.users-26599.svc.cluster.local/admin?ssl=false\&replicaSet=rs0 --quiet' +++ mktemp ++ local LAST_OUT=/tmp/tmp.w1cBbL9evt +++ mktemp ++ local LAST_ERR=/tmp/tmp.ATauFKkR8d ++ local exit_status=0 ++ local timeout=4 +++ seq 0 2 ++ for i in $(seq 0 2) ++ set +e ++ kubectl exec psmdb-client-66f577db5f-rzdh7 -- bash -c 'printf '\''db.runCommand({ ping: 1 }).ok\n'\'' | mongo mongodb://clusterMonitor:test-password@some-name-rs0-2.some-name-rs0.users-26599.svc.cluster.local/admin?ssl=false\&replicaSet=rs0 --quiet' ++ exit_status=0 ++ set -e ++ '[' 0 '!=' 0 -a -n 1 ']' ++ break ++ cat /tmp/tmp.w1cBbL9evt ++ cat /tmp/tmp.ATauFKkR8d ++ rm /tmp/tmp.w1cBbL9evt /tmp/tmp.ATauFKkR8d ++ return 0 + ping=1 + desc 'ping return' + set +o xtrace ----------------------------------------------------------------------------------- ping return ----------------------------------------------------------------------------------- + '[' 1 '!=' 1 ']' + desc 'remove users secret' + set +o xtrace ----------------------------------------------------------------------------------- remove users secret ----------------------------------------------------------------------------------- + kubectl_bin delete secret some-users ++ mktemp + local LAST_OUT=/tmp/tmp.DSwnNM5y4J ++ mktemp + local LAST_ERR=/tmp/tmp.LerPqeSo7k + local exit_status=0 + local timeout=4 ++ seq 0 2 + for i in $(seq 0 2) + set +e + kubectl delete secret some-users + exit_status=0 + set -e + '[' 0 '!=' 0 -a -n 1 ']' + break + cat /tmp/tmp.DSwnNM5y4J secret "some-users" deleted from users-26599 namespace + cat /tmp/tmp.LerPqeSo7k + rm /tmp/tmp.DSwnNM5y4J /tmp/tmp.LerPqeSo7k + return 0 + sleep 35 + wait_cluster_consistency some-name + local cluster_name=some-name + local wait_time=32 + retry=0 + sleep 7 + echo -n 'waiting for cluster readyness' waiting for cluster readyness++ kubectl_bin get psmdb some-name -o 'jsonpath={.status.state}' +++ mktemp ++ local LAST_OUT=/tmp/tmp.nCyCjumSdy +++ mktemp ++ local LAST_ERR=/tmp/tmp.kcIwnQtrZa ++ local exit_status=0 ++ local timeout=4 +++ seq 0 2 ++ for i in $(seq 0 2) ++ set +e ++ kubectl get psmdb some-name -o 'jsonpath={.status.state}' ++ exit_status=0 ++ set -e ++ '[' 0 '!=' 0 -a -n 1 ']' ++ break ++ cat /tmp/tmp.nCyCjumSdy ++ cat /tmp/tmp.kcIwnQtrZa ++ rm /tmp/tmp.nCyCjumSdy /tmp/tmp.kcIwnQtrZa ++ return 0 + [[ ready == \r\e\a\d\y ]] + echo .OK .OK + sleep 15 ++ getUserData some-users MONGODB_USER_ADMIN_USER ++ local secretName=some-users ++ local dataKey=MONGODB_USER_ADMIN_USER +++ getSecretData some-users MONGODB_USER_ADMIN_USER +++ local secretName=some-users +++ local dataKey=MONGODB_USER_ADMIN_USER ++++ kubectl get secrets/some-users '--template={{.data.MONGODB_USER_ADMIN_USER}}' ++++ base64 -d +++ local data=userAdmin +++ echo userAdmin ++ urlencode userAdmin ++ uri=userAdmin ++ echo -n userAdmin ++ jq -s -R -r @uri + user=userAdmin ++ getUserData some-users MONGODB_USER_ADMIN_PASSWORD ++ local secretName=some-users ++ local dataKey=MONGODB_USER_ADMIN_PASSWORD +++ getSecretData some-users MONGODB_USER_ADMIN_PASSWORD +++ local secretName=some-users +++ local dataKey=MONGODB_USER_ADMIN_PASSWORD ++++ kubectl get secrets/some-users '--template={{.data.MONGODB_USER_ADMIN_PASSWORD}}' ++++ base64 -d +++ local data=ZaylJYBz6JS4rl7Ir +++ echo ZaylJYBz6JS4rl7Ir ++ urlencode ZaylJYBz6JS4rl7Ir ++ uri=ZaylJYBz6JS4rl7Ir ++ echo -n ZaylJYBz6JS4rl7Ir ++ jq -s -R -r @uri + pass=ZaylJYBz6JS4rl7Ir + check_mongo_auth userAdmin:ZaylJYBz6JS4rl7Ir@some-name-rs0-0.some-name-rs0.users-26599 + local uri=userAdmin:ZaylJYBz6JS4rl7Ir@some-name-rs0-0.some-name-rs0.users-26599 ++ run_mongo 'db.runCommand({ ping: 1 }).ok' userAdmin:ZaylJYBz6JS4rl7Ir@some-name-rs0-0.some-name-rs0.users-26599 mongodb '' --quiet ++ egrep -v 'I NETWORK|W NETWORK|Error saving history file|Percona Server for MongoDB|connecting to:|Unable to reach primary for set|Implicit session:|versions do not match|Error saving history file:' ++ local 'command=db.runCommand({ ping: 1 }).ok' ++ local uri=userAdmin:ZaylJYBz6JS4rl7Ir@some-name-rs0-0.some-name-rs0.users-26599 ++ local driver=mongodb ++ local suffix=.svc.cluster.local +++ kubectl_bin get pods --selector=name=psmdb-client -o 'jsonpath={.items[].metadata.name}' ++++ mktemp egrep: warning: egrep is obsolescent; using grep -E +++ local LAST_OUT=/tmp/tmp.vEH56ZHLwI ++++ mktemp +++ local LAST_ERR=/tmp/tmp.KoajU8z4eR +++ local exit_status=0 +++ local timeout=4 ++++ seq 0 2 +++ for i in $(seq 0 2) +++ set +e +++ kubectl get pods --selector=name=psmdb-client -o 'jsonpath={.items[].metadata.name}' +++ exit_status=0 +++ set -e +++ '[' 0 '!=' 0 -a -n 1 ']' +++ break +++ cat /tmp/tmp.vEH56ZHLwI +++ cat /tmp/tmp.KoajU8z4eR +++ rm /tmp/tmp.vEH56ZHLwI /tmp/tmp.KoajU8z4eR +++ return 0 ++ local client_container=psmdb-client-66f577db5f-rzdh7 ++ local mongo_flag=--quiet ++ [[ userAdmin:ZaylJYBz6JS4rl7Ir@some-name-rs0-0.some-name-rs0.users-26599 == *cfg* ]] ++ replica_set=rs0 ++ kubectl_bin exec psmdb-client-66f577db5f-rzdh7 -- bash -c 'printf '\''db.runCommand({ ping: 1 }).ok\n'\'' | mongo mongodb://userAdmin:ZaylJYBz6JS4rl7Ir@some-name-rs0-0.some-name-rs0.users-26599.svc.cluster.local/admin?ssl=false\&replicaSet=rs0 --quiet' +++ mktemp ++ local LAST_OUT=/tmp/tmp.uN0Z34KCkB +++ mktemp ++ local LAST_ERR=/tmp/tmp.5ymTyG6PTU ++ local exit_status=0 ++ local timeout=4 +++ seq 0 2 ++ for i in $(seq 0 2) ++ set +e ++ kubectl exec psmdb-client-66f577db5f-rzdh7 -- bash -c 'printf '\''db.runCommand({ ping: 1 }).ok\n'\'' | mongo mongodb://userAdmin:ZaylJYBz6JS4rl7Ir@some-name-rs0-0.some-name-rs0.users-26599.svc.cluster.local/admin?ssl=false\&replicaSet=rs0 --quiet' ++ exit_status=0 ++ set -e ++ '[' 0 '!=' 0 -a -n 1 ']' ++ break ++ cat /tmp/tmp.uN0Z34KCkB ++ cat /tmp/tmp.5ymTyG6PTU ++ rm /tmp/tmp.uN0Z34KCkB /tmp/tmp.5ymTyG6PTU ++ return 0 + ping=1 + desc 'ping return' + set +o xtrace ----------------------------------------------------------------------------------- ping return ----------------------------------------------------------------------------------- + '[' 1 '!=' 1 ']' + check_mongo_auth userAdmin:ZaylJYBz6JS4rl7Ir@some-name-rs0-1.some-name-rs0.users-26599 + local uri=userAdmin:ZaylJYBz6JS4rl7Ir@some-name-rs0-1.some-name-rs0.users-26599 ++ run_mongo 'db.runCommand({ ping: 1 }).ok' userAdmin:ZaylJYBz6JS4rl7Ir@some-name-rs0-1.some-name-rs0.users-26599 mongodb '' --quiet ++ egrep -v 'I NETWORK|W NETWORK|Error saving history file|Percona Server for MongoDB|connecting to:|Unable to reach primary for set|Implicit session:|versions do not match|Error saving history file:' ++ local 'command=db.runCommand({ ping: 1 }).ok' ++ local uri=userAdmin:ZaylJYBz6JS4rl7Ir@some-name-rs0-1.some-name-rs0.users-26599 ++ local driver=mongodb ++ local suffix=.svc.cluster.local +++ kubectl_bin get pods --selector=name=psmdb-client -o 'jsonpath={.items[].metadata.name}' ++++ mktemp egrep: warning: egrep is obsolescent; using grep -E +++ local LAST_OUT=/tmp/tmp.e2h4qMJMhS ++++ mktemp +++ local LAST_ERR=/tmp/tmp.Atv4CqgLd5 +++ local exit_status=0 +++ local timeout=4 ++++ seq 0 2 +++ for i in $(seq 0 2) +++ set +e +++ kubectl get pods --selector=name=psmdb-client -o 'jsonpath={.items[].metadata.name}' +++ exit_status=0 +++ set -e +++ '[' 0 '!=' 0 -a -n 1 ']' +++ break +++ cat /tmp/tmp.e2h4qMJMhS +++ cat /tmp/tmp.Atv4CqgLd5 +++ rm /tmp/tmp.e2h4qMJMhS /tmp/tmp.Atv4CqgLd5 +++ return 0 ++ local client_container=psmdb-client-66f577db5f-rzdh7 ++ local mongo_flag=--quiet ++ [[ userAdmin:ZaylJYBz6JS4rl7Ir@some-name-rs0-1.some-name-rs0.users-26599 == *cfg* ]] ++ replica_set=rs0 ++ kubectl_bin exec psmdb-client-66f577db5f-rzdh7 -- bash -c 'printf '\''db.runCommand({ ping: 1 }).ok\n'\'' | mongo mongodb://userAdmin:ZaylJYBz6JS4rl7Ir@some-name-rs0-1.some-name-rs0.users-26599.svc.cluster.local/admin?ssl=false\&replicaSet=rs0 --quiet' +++ mktemp ++ local LAST_OUT=/tmp/tmp.snJH6FXqzY +++ mktemp ++ local LAST_ERR=/tmp/tmp.DoJm5g7H95 ++ local exit_status=0 ++ local timeout=4 +++ seq 0 2 ++ for i in $(seq 0 2) ++ set +e ++ kubectl exec psmdb-client-66f577db5f-rzdh7 -- bash -c 'printf '\''db.runCommand({ ping: 1 }).ok\n'\'' | mongo mongodb://userAdmin:ZaylJYBz6JS4rl7Ir@some-name-rs0-1.some-name-rs0.users-26599.svc.cluster.local/admin?ssl=false\&replicaSet=rs0 --quiet' ++ exit_status=0 ++ set -e ++ '[' 0 '!=' 0 -a -n 1 ']' ++ break ++ cat /tmp/tmp.snJH6FXqzY ++ cat /tmp/tmp.DoJm5g7H95 ++ rm /tmp/tmp.snJH6FXqzY /tmp/tmp.DoJm5g7H95 ++ return 0 + ping=1 + desc 'ping return' + set +o xtrace ----------------------------------------------------------------------------------- ping return ----------------------------------------------------------------------------------- + '[' 1 '!=' 1 ']' + check_mongo_auth userAdmin:ZaylJYBz6JS4rl7Ir@some-name-rs0-2.some-name-rs0.users-26599 + local uri=userAdmin:ZaylJYBz6JS4rl7Ir@some-name-rs0-2.some-name-rs0.users-26599 ++ run_mongo 'db.runCommand({ ping: 1 }).ok' userAdmin:ZaylJYBz6JS4rl7Ir@some-name-rs0-2.some-name-rs0.users-26599 mongodb '' --quiet ++ local 'command=db.runCommand({ ping: 1 }).ok' ++ local uri=userAdmin:ZaylJYBz6JS4rl7Ir@some-name-rs0-2.some-name-rs0.users-26599 ++ local driver=mongodb ++ local suffix=.svc.cluster.local ++ egrep -v 'I NETWORK|W NETWORK|Error saving history file|Percona Server for MongoDB|connecting to:|Unable to reach primary for set|Implicit session:|versions do not match|Error saving history file:' +++ kubectl_bin get pods --selector=name=psmdb-client -o 'jsonpath={.items[].metadata.name}' ++++ mktemp egrep: warning: egrep is obsolescent; using grep -E +++ local LAST_OUT=/tmp/tmp.OMvgQsHCa6 ++++ mktemp +++ local LAST_ERR=/tmp/tmp.HUiEdDwwlE +++ local exit_status=0 +++ local timeout=4 ++++ seq 0 2 +++ for i in $(seq 0 2) +++ set +e +++ kubectl get pods --selector=name=psmdb-client -o 'jsonpath={.items[].metadata.name}' +++ exit_status=0 +++ set -e +++ '[' 0 '!=' 0 -a -n 1 ']' +++ break +++ cat /tmp/tmp.OMvgQsHCa6 +++ cat /tmp/tmp.HUiEdDwwlE +++ rm /tmp/tmp.OMvgQsHCa6 /tmp/tmp.HUiEdDwwlE +++ return 0 ++ local client_container=psmdb-client-66f577db5f-rzdh7 ++ local mongo_flag=--quiet ++ [[ userAdmin:ZaylJYBz6JS4rl7Ir@some-name-rs0-2.some-name-rs0.users-26599 == *cfg* ]] ++ replica_set=rs0 ++ kubectl_bin exec psmdb-client-66f577db5f-rzdh7 -- bash -c 'printf '\''db.runCommand({ ping: 1 }).ok\n'\'' | mongo mongodb://userAdmin:ZaylJYBz6JS4rl7Ir@some-name-rs0-2.some-name-rs0.users-26599.svc.cluster.local/admin?ssl=false\&replicaSet=rs0 --quiet' +++ mktemp ++ local LAST_OUT=/tmp/tmp.QGqyru5cDt +++ mktemp ++ local LAST_ERR=/tmp/tmp.JWGVNJZjop ++ local exit_status=0 ++ local timeout=4 +++ seq 0 2 ++ for i in $(seq 0 2) ++ set +e ++ kubectl exec psmdb-client-66f577db5f-rzdh7 -- bash -c 'printf '\''db.runCommand({ ping: 1 }).ok\n'\'' | mongo mongodb://userAdmin:ZaylJYBz6JS4rl7Ir@some-name-rs0-2.some-name-rs0.users-26599.svc.cluster.local/admin?ssl=false\&replicaSet=rs0 --quiet' ++ exit_status=0 ++ set -e ++ '[' 0 '!=' 0 -a -n 1 ']' ++ break ++ cat /tmp/tmp.QGqyru5cDt ++ cat /tmp/tmp.JWGVNJZjop ++ rm /tmp/tmp.QGqyru5cDt /tmp/tmp.JWGVNJZjop ++ return 0 + ping=1 + desc 'ping return' + set +o xtrace ----------------------------------------------------------------------------------- ping return ----------------------------------------------------------------------------------- + '[' 1 '!=' 1 ']' + desc 'update all users' + set +o xtrace ----------------------------------------------------------------------------------- update all users ----------------------------------------------------------------------------------- + kubectl_bin apply -f /mnt/jenkins/workspace/cloud-psmdb-operator_PR-1993/e2e-tests/conf/secrets.yml ++ mktemp + local LAST_OUT=/tmp/tmp.4PiIw9zjsw ++ mktemp + local LAST_ERR=/tmp/tmp.kKFszG5bAz + local exit_status=0 + local timeout=4 ++ seq 0 2 + for i in $(seq 0 2) + set +e + kubectl apply -f /mnt/jenkins/workspace/cloud-psmdb-operator_PR-1993/e2e-tests/conf/secrets.yml + exit_status=0 + set -e + '[' 0 '!=' 0 -a -n 1 ']' + break + cat /tmp/tmp.4PiIw9zjsw secret/some-users configured + cat /tmp/tmp.kKFszG5bAz Warning: resource secrets/some-users is missing the kubectl.kubernetes.io/last-applied-configuration annotation which is required by kubectl apply. kubectl apply should only be used on resources created declaratively by either kubectl create --save-config or kubectl apply. The missing annotation will be patched automatically. + rm /tmp/tmp.4PiIw9zjsw /tmp/tmp.kKFszG5bAz + return 0 + sleep 35 + wait_cluster_consistency some-name + local cluster_name=some-name + local wait_time=32 + retry=0 + sleep 7 + echo -n 'waiting for cluster readyness' waiting for cluster readyness++ kubectl_bin get psmdb some-name -o 'jsonpath={.status.state}' +++ mktemp ++ local LAST_OUT=/tmp/tmp.WSJafLFwza +++ mktemp ++ local LAST_ERR=/tmp/tmp.SxpCxVks9V ++ local exit_status=0 ++ local timeout=4 +++ seq 0 2 ++ for i in $(seq 0 2) ++ set +e ++ kubectl get psmdb some-name -o 'jsonpath={.status.state}' ++ exit_status=0 ++ set -e ++ '[' 0 '!=' 0 -a -n 1 ']' ++ break ++ cat /tmp/tmp.WSJafLFwza ++ cat /tmp/tmp.SxpCxVks9V ++ rm /tmp/tmp.WSJafLFwza /tmp/tmp.SxpCxVks9V ++ return 0 + [[ error == \r\e\a\d\y ]] + let retry+=1 + '[' 1 -ge 32 ']' + echo -n . .+ sleep 10 ++ kubectl_bin get psmdb some-name -o 'jsonpath={.status.state}' +++ mktemp ++ local LAST_OUT=/tmp/tmp.sbl5ooLg5j +++ mktemp ++ local LAST_ERR=/tmp/tmp.JcpgJUOKTz ++ local exit_status=0 ++ local timeout=4 +++ seq 0 2 ++ for i in $(seq 0 2) ++ set +e ++ kubectl get psmdb some-name -o 'jsonpath={.status.state}' ++ exit_status=0 ++ set -e ++ '[' 0 '!=' 0 -a -n 1 ']' ++ break ++ cat /tmp/tmp.sbl5ooLg5j ++ cat /tmp/tmp.JcpgJUOKTz ++ rm /tmp/tmp.sbl5ooLg5j /tmp/tmp.JcpgJUOKTz ++ return 0 + [[ ready == \r\e\a\d\y ]] + echo .OK .OK + sleep 15 ++ getUserData some-users MONGODB_USER_ADMIN_USER ++ local secretName=some-users ++ local dataKey=MONGODB_USER_ADMIN_USER +++ getSecretData some-users MONGODB_USER_ADMIN_USER +++ local secretName=some-users +++ local dataKey=MONGODB_USER_ADMIN_USER ++++ kubectl get secrets/some-users '--template={{.data.MONGODB_USER_ADMIN_USER}}' ++++ base64 -d +++ local data=userAdmin +++ echo userAdmin ++ urlencode userAdmin ++ uri=userAdmin ++ echo -n userAdmin ++ jq -s -R -r @uri + user=userAdmin ++ getUserData some-users MONGODB_USER_ADMIN_PASSWORD ++ local secretName=some-users ++ local dataKey=MONGODB_USER_ADMIN_PASSWORD +++ getSecretData some-users MONGODB_USER_ADMIN_PASSWORD +++ local secretName=some-users +++ local dataKey=MONGODB_USER_ADMIN_PASSWORD ++++ kubectl get secrets/some-users '--template={{.data.MONGODB_USER_ADMIN_PASSWORD}}' ++++ base64 -d +++ local data=userAdmin123456 +++ echo userAdmin123456 ++ urlencode userAdmin123456 ++ uri=userAdmin123456 ++ echo -n userAdmin123456 ++ jq -s -R -r @uri + pass=userAdmin123456 + check_mongo_auth userAdmin:userAdmin123456@some-name-rs0-0.some-name-rs0.users-26599 + local uri=userAdmin:userAdmin123456@some-name-rs0-0.some-name-rs0.users-26599 ++ run_mongo 'db.runCommand({ ping: 1 }).ok' userAdmin:userAdmin123456@some-name-rs0-0.some-name-rs0.users-26599 mongodb '' --quiet ++ egrep -v 'I NETWORK|W NETWORK|Error saving history file|Percona Server for MongoDB|connecting to:|Unable to reach primary for set|Implicit session:|versions do not match|Error saving history file:' ++ local 'command=db.runCommand({ ping: 1 }).ok' ++ local uri=userAdmin:userAdmin123456@some-name-rs0-0.some-name-rs0.users-26599 ++ local driver=mongodb ++ local suffix=.svc.cluster.local +++ kubectl_bin get pods --selector=name=psmdb-client -o 'jsonpath={.items[].metadata.name}' ++++ mktemp +++ local LAST_OUT=/tmp/tmp.QNvcN1QDOs egrep: warning: egrep is obsolescent; using grep -E ++++ mktemp +++ local LAST_ERR=/tmp/tmp.MsyfL46sgT +++ local exit_status=0 +++ local timeout=4 ++++ seq 0 2 +++ for i in $(seq 0 2) +++ set +e +++ kubectl get pods --selector=name=psmdb-client -o 'jsonpath={.items[].metadata.name}' +++ exit_status=0 +++ set -e +++ '[' 0 '!=' 0 -a -n 1 ']' +++ break +++ cat /tmp/tmp.QNvcN1QDOs +++ cat /tmp/tmp.MsyfL46sgT +++ rm /tmp/tmp.QNvcN1QDOs /tmp/tmp.MsyfL46sgT +++ return 0 ++ local client_container=psmdb-client-66f577db5f-rzdh7 ++ local mongo_flag=--quiet ++ [[ userAdmin:userAdmin123456@some-name-rs0-0.some-name-rs0.users-26599 == *cfg* ]] ++ replica_set=rs0 ++ kubectl_bin exec psmdb-client-66f577db5f-rzdh7 -- bash -c 'printf '\''db.runCommand({ ping: 1 }).ok\n'\'' | mongo mongodb://userAdmin:userAdmin123456@some-name-rs0-0.some-name-rs0.users-26599.svc.cluster.local/admin?ssl=false\&replicaSet=rs0 --quiet' +++ mktemp ++ local LAST_OUT=/tmp/tmp.0GvUiRGcsS +++ mktemp ++ local LAST_ERR=/tmp/tmp.vCVaPwamsw ++ local exit_status=0 ++ local timeout=4 +++ seq 0 2 ++ for i in $(seq 0 2) ++ set +e ++ kubectl exec psmdb-client-66f577db5f-rzdh7 -- bash -c 'printf '\''db.runCommand({ ping: 1 }).ok\n'\'' | mongo mongodb://userAdmin:userAdmin123456@some-name-rs0-0.some-name-rs0.users-26599.svc.cluster.local/admin?ssl=false\&replicaSet=rs0 --quiet' ++ exit_status=0 ++ set -e ++ '[' 0 '!=' 0 -a -n 1 ']' ++ break ++ cat /tmp/tmp.0GvUiRGcsS ++ cat /tmp/tmp.vCVaPwamsw ++ rm /tmp/tmp.0GvUiRGcsS /tmp/tmp.vCVaPwamsw ++ return 0 + ping=1 + desc 'ping return' + set +o xtrace ----------------------------------------------------------------------------------- ping return ----------------------------------------------------------------------------------- + '[' 1 '!=' 1 ']' + check_mongo_auth userAdmin:userAdmin123456@some-name-rs0-1.some-name-rs0.users-26599 + local uri=userAdmin:userAdmin123456@some-name-rs0-1.some-name-rs0.users-26599 ++ run_mongo 'db.runCommand({ ping: 1 }).ok' userAdmin:userAdmin123456@some-name-rs0-1.some-name-rs0.users-26599 mongodb '' --quiet ++ local 'command=db.runCommand({ ping: 1 }).ok' ++ local uri=userAdmin:userAdmin123456@some-name-rs0-1.some-name-rs0.users-26599 ++ local driver=mongodb ++ local suffix=.svc.cluster.local ++ egrep -v 'I NETWORK|W NETWORK|Error saving history file|Percona Server for MongoDB|connecting to:|Unable to reach primary for set|Implicit session:|versions do not match|Error saving history file:' +++ kubectl_bin get pods --selector=name=psmdb-client -o 'jsonpath={.items[].metadata.name}' ++++ mktemp egrep: warning: egrep is obsolescent; using grep -E +++ local LAST_OUT=/tmp/tmp.fsal5hXkgB ++++ mktemp +++ local LAST_ERR=/tmp/tmp.A3olBUz15k +++ local exit_status=0 +++ local timeout=4 ++++ seq 0 2 +++ for i in $(seq 0 2) +++ set +e +++ kubectl get pods --selector=name=psmdb-client -o 'jsonpath={.items[].metadata.name}' +++ exit_status=0 +++ set -e +++ '[' 0 '!=' 0 -a -n 1 ']' +++ break +++ cat /tmp/tmp.fsal5hXkgB +++ cat /tmp/tmp.A3olBUz15k +++ rm /tmp/tmp.fsal5hXkgB /tmp/tmp.A3olBUz15k +++ return 0 ++ local client_container=psmdb-client-66f577db5f-rzdh7 ++ local mongo_flag=--quiet ++ [[ userAdmin:userAdmin123456@some-name-rs0-1.some-name-rs0.users-26599 == *cfg* ]] ++ replica_set=rs0 ++ kubectl_bin exec psmdb-client-66f577db5f-rzdh7 -- bash -c 'printf '\''db.runCommand({ ping: 1 }).ok\n'\'' | mongo mongodb://userAdmin:userAdmin123456@some-name-rs0-1.some-name-rs0.users-26599.svc.cluster.local/admin?ssl=false\&replicaSet=rs0 --quiet' +++ mktemp ++ local LAST_OUT=/tmp/tmp.VCBG1V92sL +++ mktemp ++ local LAST_ERR=/tmp/tmp.rRCvWYQaAm ++ local exit_status=0 ++ local timeout=4 +++ seq 0 2 ++ for i in $(seq 0 2) ++ set +e ++ kubectl exec psmdb-client-66f577db5f-rzdh7 -- bash -c 'printf '\''db.runCommand({ ping: 1 }).ok\n'\'' | mongo mongodb://userAdmin:userAdmin123456@some-name-rs0-1.some-name-rs0.users-26599.svc.cluster.local/admin?ssl=false\&replicaSet=rs0 --quiet' ++ exit_status=0 ++ set -e ++ '[' 0 '!=' 0 -a -n 1 ']' ++ break ++ cat /tmp/tmp.VCBG1V92sL ++ cat /tmp/tmp.rRCvWYQaAm ++ rm /tmp/tmp.VCBG1V92sL /tmp/tmp.rRCvWYQaAm ++ return 0 + ping=1 + desc 'ping return' + set +o xtrace ----------------------------------------------------------------------------------- ping return ----------------------------------------------------------------------------------- + '[' 1 '!=' 1 ']' + check_mongo_auth userAdmin:userAdmin123456@some-name-rs0-2.some-name-rs0.users-26599 + local uri=userAdmin:userAdmin123456@some-name-rs0-2.some-name-rs0.users-26599 ++ run_mongo 'db.runCommand({ ping: 1 }).ok' userAdmin:userAdmin123456@some-name-rs0-2.some-name-rs0.users-26599 mongodb '' --quiet ++ local 'command=db.runCommand({ ping: 1 }).ok' ++ local uri=userAdmin:userAdmin123456@some-name-rs0-2.some-name-rs0.users-26599 ++ local driver=mongodb ++ local suffix=.svc.cluster.local ++ egrep -v 'I NETWORK|W NETWORK|Error saving history file|Percona Server for MongoDB|connecting to:|Unable to reach primary for set|Implicit session:|versions do not match|Error saving history file:' +++ kubectl_bin get pods --selector=name=psmdb-client -o 'jsonpath={.items[].metadata.name}' ++++ mktemp +++ local LAST_OUT=/tmp/tmp.1ABPvmV7ma egrep: warning: egrep is obsolescent; using grep -E ++++ mktemp +++ local LAST_ERR=/tmp/tmp.RJWlG3jtoY +++ local exit_status=0 +++ local timeout=4 ++++ seq 0 2 +++ for i in $(seq 0 2) +++ set +e +++ kubectl get pods --selector=name=psmdb-client -o 'jsonpath={.items[].metadata.name}' +++ exit_status=0 +++ set -e +++ '[' 0 '!=' 0 -a -n 1 ']' +++ break +++ cat /tmp/tmp.1ABPvmV7ma +++ cat /tmp/tmp.RJWlG3jtoY +++ rm /tmp/tmp.1ABPvmV7ma /tmp/tmp.RJWlG3jtoY +++ return 0 ++ local client_container=psmdb-client-66f577db5f-rzdh7 ++ local mongo_flag=--quiet ++ [[ userAdmin:userAdmin123456@some-name-rs0-2.some-name-rs0.users-26599 == *cfg* ]] ++ replica_set=rs0 ++ kubectl_bin exec psmdb-client-66f577db5f-rzdh7 -- bash -c 'printf '\''db.runCommand({ ping: 1 }).ok\n'\'' | mongo mongodb://userAdmin:userAdmin123456@some-name-rs0-2.some-name-rs0.users-26599.svc.cluster.local/admin?ssl=false\&replicaSet=rs0 --quiet' +++ mktemp ++ local LAST_OUT=/tmp/tmp.r8C6pwFcBx +++ mktemp ++ local LAST_ERR=/tmp/tmp.WWVgdveXEc ++ local exit_status=0 ++ local timeout=4 +++ seq 0 2 ++ for i in $(seq 0 2) ++ set +e ++ kubectl exec psmdb-client-66f577db5f-rzdh7 -- bash -c 'printf '\''db.runCommand({ ping: 1 }).ok\n'\'' | mongo mongodb://userAdmin:userAdmin123456@some-name-rs0-2.some-name-rs0.users-26599.svc.cluster.local/admin?ssl=false\&replicaSet=rs0 --quiet' ++ exit_status=0 ++ set -e ++ '[' 0 '!=' 0 -a -n 1 ']' ++ break ++ cat /tmp/tmp.r8C6pwFcBx ++ cat /tmp/tmp.WWVgdveXEc ++ rm /tmp/tmp.r8C6pwFcBx /tmp/tmp.WWVgdveXEc ++ return 0 + ping=1 + desc 'ping return' + set +o xtrace ----------------------------------------------------------------------------------- ping return ----------------------------------------------------------------------------------- + '[' 1 '!=' 1 ']' + newmonitorusername=newmonitorusername + desc 'update monitor user username' + set +o xtrace ----------------------------------------------------------------------------------- update monitor user username ----------------------------------------------------------------------------------- ++ echo -n newmonitorusername ++ base64 + patch_secret some-users MONGODB_CLUSTER_MONITOR_USER bmV3bW9uaXRvcnVzZXJuYW1l + local secret=some-users + local key=MONGODB_CLUSTER_MONITOR_USER + local value=bmV3bW9uaXRvcnVzZXJuYW1l + kubectl patch secret some-users '-p={"data":{"MONGODB_CLUSTER_MONITOR_USER": "bmV3bW9uaXRvcnVzZXJuYW1l"}}' secret/some-users patched + sleep 35 + wait_cluster_consistency some-name + local cluster_name=some-name + local wait_time=32 + retry=0 + sleep 7 + echo -n 'waiting for cluster readyness' waiting for cluster readyness++ kubectl_bin get psmdb some-name -o 'jsonpath={.status.state}' +++ mktemp ++ local LAST_OUT=/tmp/tmp.xPedHh3mkl +++ mktemp ++ local LAST_ERR=/tmp/tmp.kFOiZbNYnp ++ local exit_status=0 ++ local timeout=4 +++ seq 0 2 ++ for i in $(seq 0 2) ++ set +e ++ kubectl get psmdb some-name -o 'jsonpath={.status.state}' ++ exit_status=0 ++ set -e ++ '[' 0 '!=' 0 -a -n 1 ']' ++ break ++ cat /tmp/tmp.xPedHh3mkl ++ cat /tmp/tmp.kFOiZbNYnp ++ rm /tmp/tmp.xPedHh3mkl /tmp/tmp.kFOiZbNYnp ++ return 0 + [[ ready == \r\e\a\d\y ]] + echo .OK .OK + sleep 15 ++ getUserData some-users MONGODB_CLUSTER_MONITOR_USER ++ local secretName=some-users ++ local dataKey=MONGODB_CLUSTER_MONITOR_USER +++ getSecretData some-users MONGODB_CLUSTER_MONITOR_USER +++ local secretName=some-users +++ local dataKey=MONGODB_CLUSTER_MONITOR_USER ++++ kubectl get secrets/some-users '--template={{.data.MONGODB_CLUSTER_MONITOR_USER}}' ++++ base64 -d +++ local data=newmonitorusername +++ echo newmonitorusername ++ urlencode newmonitorusername ++ uri=newmonitorusername ++ echo -n newmonitorusername ++ jq -s -R -r @uri + user=newmonitorusername ++ getUserData some-users MONGODB_CLUSTER_MONITOR_PASSWORD ++ local secretName=some-users ++ local dataKey=MONGODB_CLUSTER_MONITOR_PASSWORD +++ getSecretData some-users MONGODB_CLUSTER_MONITOR_PASSWORD +++ local secretName=some-users +++ local dataKey=MONGODB_CLUSTER_MONITOR_PASSWORD ++++ kubectl get secrets/some-users '--template={{.data.MONGODB_CLUSTER_MONITOR_PASSWORD}}' ++++ base64 -d +++ local data=clusterMonitor123456 +++ echo clusterMonitor123456 ++ urlencode clusterMonitor123456 ++ uri=clusterMonitor123456 ++ echo -n clusterMonitor123456 ++ jq -s -R -r @uri + pass=clusterMonitor123456 + check_mongo_auth newmonitorusername:clusterMonitor123456@some-name-rs0-0.some-name-rs0.users-26599 + local uri=newmonitorusername:clusterMonitor123456@some-name-rs0-0.some-name-rs0.users-26599 ++ run_mongo 'db.runCommand({ ping: 1 }).ok' newmonitorusername:clusterMonitor123456@some-name-rs0-0.some-name-rs0.users-26599 mongodb '' --quiet ++ local 'command=db.runCommand({ ping: 1 }).ok' ++ local uri=newmonitorusername:clusterMonitor123456@some-name-rs0-0.some-name-rs0.users-26599 ++ local driver=mongodb ++ local suffix=.svc.cluster.local ++ egrep -v 'I NETWORK|W NETWORK|Error saving history file|Percona Server for MongoDB|connecting to:|Unable to reach primary for set|Implicit session:|versions do not match|Error saving history file:' +++ kubectl_bin get pods --selector=name=psmdb-client -o 'jsonpath={.items[].metadata.name}' ++++ mktemp egrep: warning: egrep is obsolescent; using grep -E +++ local LAST_OUT=/tmp/tmp.3qK0zMrffj ++++ mktemp +++ local LAST_ERR=/tmp/tmp.Hw5bc5aPRy +++ local exit_status=0 +++ local timeout=4 ++++ seq 0 2 +++ for i in $(seq 0 2) +++ set +e +++ kubectl get pods --selector=name=psmdb-client -o 'jsonpath={.items[].metadata.name}' +++ exit_status=0 +++ set -e +++ '[' 0 '!=' 0 -a -n 1 ']' +++ break +++ cat /tmp/tmp.3qK0zMrffj +++ cat /tmp/tmp.Hw5bc5aPRy +++ rm /tmp/tmp.3qK0zMrffj /tmp/tmp.Hw5bc5aPRy +++ return 0 ++ local client_container=psmdb-client-66f577db5f-rzdh7 ++ local mongo_flag=--quiet ++ [[ newmonitorusername:clusterMonitor123456@some-name-rs0-0.some-name-rs0.users-26599 == *cfg* ]] ++ replica_set=rs0 ++ kubectl_bin exec psmdb-client-66f577db5f-rzdh7 -- bash -c 'printf '\''db.runCommand({ ping: 1 }).ok\n'\'' | mongo mongodb://newmonitorusername:clusterMonitor123456@some-name-rs0-0.some-name-rs0.users-26599.svc.cluster.local/admin?ssl=false\&replicaSet=rs0 --quiet' +++ mktemp ++ local LAST_OUT=/tmp/tmp.7X7L3jotHv +++ mktemp ++ local LAST_ERR=/tmp/tmp.JjhoyEIky4 ++ local exit_status=0 ++ local timeout=4 +++ seq 0 2 ++ for i in $(seq 0 2) ++ set +e ++ kubectl exec psmdb-client-66f577db5f-rzdh7 -- bash -c 'printf '\''db.runCommand({ ping: 1 }).ok\n'\'' | mongo mongodb://newmonitorusername:clusterMonitor123456@some-name-rs0-0.some-name-rs0.users-26599.svc.cluster.local/admin?ssl=false\&replicaSet=rs0 --quiet' ++ exit_status=0 ++ set -e ++ '[' 0 '!=' 0 -a -n 1 ']' ++ break ++ cat /tmp/tmp.7X7L3jotHv ++ cat /tmp/tmp.JjhoyEIky4 ++ rm /tmp/tmp.7X7L3jotHv /tmp/tmp.JjhoyEIky4 ++ return 0 + ping=1 + desc 'ping return' + set +o xtrace ----------------------------------------------------------------------------------- ping return ----------------------------------------------------------------------------------- + '[' 1 '!=' 1 ']' + check_mongo_auth newmonitorusername:clusterMonitor123456@some-name-rs0-1.some-name-rs0.users-26599 + local uri=newmonitorusername:clusterMonitor123456@some-name-rs0-1.some-name-rs0.users-26599 ++ run_mongo 'db.runCommand({ ping: 1 }).ok' newmonitorusername:clusterMonitor123456@some-name-rs0-1.some-name-rs0.users-26599 mongodb '' --quiet ++ local 'command=db.runCommand({ ping: 1 }).ok' ++ local uri=newmonitorusername:clusterMonitor123456@some-name-rs0-1.some-name-rs0.users-26599 ++ local driver=mongodb ++ local suffix=.svc.cluster.local ++ egrep -v 'I NETWORK|W NETWORK|Error saving history file|Percona Server for MongoDB|connecting to:|Unable to reach primary for set|Implicit session:|versions do not match|Error saving history file:' +++ kubectl_bin get pods --selector=name=psmdb-client -o 'jsonpath={.items[].metadata.name}' ++++ mktemp +++ local LAST_OUT=/tmp/tmp.LB5G2Rnf86 egrep: warning: egrep is obsolescent; using grep -E ++++ mktemp +++ local LAST_ERR=/tmp/tmp.1pc6q8R5vc +++ local exit_status=0 +++ local timeout=4 ++++ seq 0 2 +++ for i in $(seq 0 2) +++ set +e +++ kubectl get pods --selector=name=psmdb-client -o 'jsonpath={.items[].metadata.name}' +++ exit_status=0 +++ set -e +++ '[' 0 '!=' 0 -a -n 1 ']' +++ break +++ cat /tmp/tmp.LB5G2Rnf86 +++ cat /tmp/tmp.1pc6q8R5vc +++ rm /tmp/tmp.LB5G2Rnf86 /tmp/tmp.1pc6q8R5vc +++ return 0 ++ local client_container=psmdb-client-66f577db5f-rzdh7 ++ local mongo_flag=--quiet ++ [[ newmonitorusername:clusterMonitor123456@some-name-rs0-1.some-name-rs0.users-26599 == *cfg* ]] ++ replica_set=rs0 ++ kubectl_bin exec psmdb-client-66f577db5f-rzdh7 -- bash -c 'printf '\''db.runCommand({ ping: 1 }).ok\n'\'' | mongo mongodb://newmonitorusername:clusterMonitor123456@some-name-rs0-1.some-name-rs0.users-26599.svc.cluster.local/admin?ssl=false\&replicaSet=rs0 --quiet' +++ mktemp ++ local LAST_OUT=/tmp/tmp.QouVa4kYZa +++ mktemp ++ local LAST_ERR=/tmp/tmp.4JSqfKmqnI ++ local exit_status=0 ++ local timeout=4 +++ seq 0 2 ++ for i in $(seq 0 2) ++ set +e ++ kubectl exec psmdb-client-66f577db5f-rzdh7 -- bash -c 'printf '\''db.runCommand({ ping: 1 }).ok\n'\'' | mongo mongodb://newmonitorusername:clusterMonitor123456@some-name-rs0-1.some-name-rs0.users-26599.svc.cluster.local/admin?ssl=false\&replicaSet=rs0 --quiet' ++ exit_status=0 ++ set -e ++ '[' 0 '!=' 0 -a -n 1 ']' ++ break ++ cat /tmp/tmp.QouVa4kYZa ++ cat /tmp/tmp.4JSqfKmqnI ++ rm /tmp/tmp.QouVa4kYZa /tmp/tmp.4JSqfKmqnI ++ return 0 + ping=1 + desc 'ping return' + set +o xtrace ----------------------------------------------------------------------------------- ping return ----------------------------------------------------------------------------------- + '[' 1 '!=' 1 ']' + check_mongo_auth newmonitorusername:clusterMonitor123456@some-name-rs0-2.some-name-rs0.users-26599 + local uri=newmonitorusername:clusterMonitor123456@some-name-rs0-2.some-name-rs0.users-26599 ++ egrep -v 'I NETWORK|W NETWORK|Error saving history file|Percona Server for MongoDB|connecting to:|Unable to reach primary for set|Implicit session:|versions do not match|Error saving history file:' ++ run_mongo 'db.runCommand({ ping: 1 }).ok' newmonitorusername:clusterMonitor123456@some-name-rs0-2.some-name-rs0.users-26599 mongodb '' --quiet ++ local 'command=db.runCommand({ ping: 1 }).ok' ++ local uri=newmonitorusername:clusterMonitor123456@some-name-rs0-2.some-name-rs0.users-26599 ++ local driver=mongodb ++ local suffix=.svc.cluster.local +++ kubectl_bin get pods --selector=name=psmdb-client -o 'jsonpath={.items[].metadata.name}' ++++ mktemp egrep: warning: egrep is obsolescent; using grep -E +++ local LAST_OUT=/tmp/tmp.EsWhwy9G7Q ++++ mktemp +++ local LAST_ERR=/tmp/tmp.uwUv5NEn6E +++ local exit_status=0 +++ local timeout=4 ++++ seq 0 2 +++ for i in $(seq 0 2) +++ set +e +++ kubectl get pods --selector=name=psmdb-client -o 'jsonpath={.items[].metadata.name}' +++ exit_status=0 +++ set -e +++ '[' 0 '!=' 0 -a -n 1 ']' +++ break +++ cat /tmp/tmp.EsWhwy9G7Q +++ cat /tmp/tmp.uwUv5NEn6E +++ rm /tmp/tmp.EsWhwy9G7Q /tmp/tmp.uwUv5NEn6E +++ return 0 ++ local client_container=psmdb-client-66f577db5f-rzdh7 ++ local mongo_flag=--quiet ++ [[ newmonitorusername:clusterMonitor123456@some-name-rs0-2.some-name-rs0.users-26599 == *cfg* ]] ++ replica_set=rs0 ++ kubectl_bin exec psmdb-client-66f577db5f-rzdh7 -- bash -c 'printf '\''db.runCommand({ ping: 1 }).ok\n'\'' | mongo mongodb://newmonitorusername:clusterMonitor123456@some-name-rs0-2.some-name-rs0.users-26599.svc.cluster.local/admin?ssl=false\&replicaSet=rs0 --quiet' +++ mktemp ++ local LAST_OUT=/tmp/tmp.bboGgOuo8r +++ mktemp ++ local LAST_ERR=/tmp/tmp.y9pvIYtGeE ++ local exit_status=0 ++ local timeout=4 +++ seq 0 2 ++ for i in $(seq 0 2) ++ set +e ++ kubectl exec psmdb-client-66f577db5f-rzdh7 -- bash -c 'printf '\''db.runCommand({ ping: 1 }).ok\n'\'' | mongo mongodb://newmonitorusername:clusterMonitor123456@some-name-rs0-2.some-name-rs0.users-26599.svc.cluster.local/admin?ssl=false\&replicaSet=rs0 --quiet' ++ exit_status=0 ++ set -e ++ '[' 0 '!=' 0 -a -n 1 ']' ++ break ++ cat /tmp/tmp.bboGgOuo8r ++ cat /tmp/tmp.y9pvIYtGeE ++ rm /tmp/tmp.bboGgOuo8r /tmp/tmp.y9pvIYtGeE ++ return 0 + ping=1 + desc 'ping return' + set +o xtrace ----------------------------------------------------------------------------------- ping return ----------------------------------------------------------------------------------- + '[' 1 '!=' 1 ']' + desc 'secret without userAdmin' + set +o xtrace ----------------------------------------------------------------------------------- secret without userAdmin ----------------------------------------------------------------------------------- + kubectl_bin apply -f /mnt/jenkins/workspace/cloud-psmdb-operator_PR-1993/e2e-tests/users/conf/secrets-2.yml ++ mktemp + local LAST_OUT=/tmp/tmp.4KRzpzqNrP ++ mktemp + local LAST_ERR=/tmp/tmp.orI8KqI5CV + local exit_status=0 + local timeout=4 ++ seq 0 2 + for i in $(seq 0 2) + set +e + kubectl apply -f /mnt/jenkins/workspace/cloud-psmdb-operator_PR-1993/e2e-tests/users/conf/secrets-2.yml + exit_status=0 + set -e + '[' 0 '!=' 0 -a -n 1 ']' + break + cat /tmp/tmp.4KRzpzqNrP secret/some-users configured + cat /tmp/tmp.orI8KqI5CV + rm /tmp/tmp.4KRzpzqNrP /tmp/tmp.orI8KqI5CV + return 0 + sleep 25 + wait_cluster_consistency some-name + local cluster_name=some-name + local wait_time=32 + retry=0 + sleep 7 + echo -n 'waiting for cluster readyness' waiting for cluster readyness++ kubectl_bin get psmdb some-name -o 'jsonpath={.status.state}' +++ mktemp ++ local LAST_OUT=/tmp/tmp.W7knUAWP42 +++ mktemp ++ local LAST_ERR=/tmp/tmp.E6JjfE8kaH ++ local exit_status=0 ++ local timeout=4 +++ seq 0 2 ++ for i in $(seq 0 2) ++ set +e ++ kubectl get psmdb some-name -o 'jsonpath={.status.state}' ++ exit_status=0 ++ set -e ++ '[' 0 '!=' 0 -a -n 1 ']' ++ break ++ cat /tmp/tmp.W7knUAWP42 ++ cat /tmp/tmp.E6JjfE8kaH ++ rm /tmp/tmp.W7knUAWP42 /tmp/tmp.E6JjfE8kaH ++ return 0 + [[ initializing == \r\e\a\d\y ]] + let retry+=1 + '[' 1 -ge 32 ']' + echo -n . .+ sleep 10 ++ kubectl_bin get psmdb some-name -o 'jsonpath={.status.state}' +++ mktemp ++ local LAST_OUT=/tmp/tmp.TzAVFvSG7j +++ mktemp ++ local LAST_ERR=/tmp/tmp.IGbspVqtWj ++ local exit_status=0 ++ local timeout=4 +++ seq 0 2 ++ for i in $(seq 0 2) ++ set +e ++ kubectl get psmdb some-name -o 'jsonpath={.status.state}' ++ exit_status=0 ++ set -e ++ '[' 0 '!=' 0 -a -n 1 ']' ++ break ++ cat /tmp/tmp.TzAVFvSG7j ++ cat /tmp/tmp.IGbspVqtWj ++ rm /tmp/tmp.TzAVFvSG7j /tmp/tmp.IGbspVqtWj ++ return 0 + [[ error == \r\e\a\d\y ]] + let retry+=1 + '[' 2 -ge 32 ']' + echo -n . .+ sleep 10 ++ kubectl_bin get psmdb some-name -o 'jsonpath={.status.state}' +++ mktemp ++ local LAST_OUT=/tmp/tmp.avgIyxjK9g +++ mktemp ++ local LAST_ERR=/tmp/tmp.ecy8IKeN3f ++ local exit_status=0 ++ local timeout=4 +++ seq 0 2 ++ for i in $(seq 0 2) ++ set +e ++ kubectl get psmdb some-name -o 'jsonpath={.status.state}' ++ exit_status=0 ++ set -e ++ '[' 0 '!=' 0 -a -n 1 ']' ++ break ++ cat /tmp/tmp.avgIyxjK9g ++ cat /tmp/tmp.ecy8IKeN3f ++ rm /tmp/tmp.avgIyxjK9g /tmp/tmp.ecy8IKeN3f ++ return 0 + [[ error == \r\e\a\d\y ]] + let retry+=1 + '[' 3 -ge 32 ']' + echo -n . .+ sleep 10 ++ kubectl_bin get psmdb some-name -o 'jsonpath={.status.state}' +++ mktemp ++ local LAST_OUT=/tmp/tmp.bKfAKnijHN +++ mktemp ++ local LAST_ERR=/tmp/tmp.oA84zXsDc4 ++ local exit_status=0 ++ local timeout=4 +++ seq 0 2 ++ for i in $(seq 0 2) ++ set +e ++ kubectl get psmdb some-name -o 'jsonpath={.status.state}' ++ exit_status=0 ++ set -e ++ '[' 0 '!=' 0 -a -n 1 ']' ++ break ++ cat /tmp/tmp.bKfAKnijHN ++ cat /tmp/tmp.oA84zXsDc4 ++ rm /tmp/tmp.bKfAKnijHN /tmp/tmp.oA84zXsDc4 ++ return 0 + [[ error == \r\e\a\d\y ]] + let retry+=1 + '[' 4 -ge 32 ']' + echo -n . .+ sleep 10 ++ kubectl_bin get psmdb some-name -o 'jsonpath={.status.state}' +++ mktemp ++ local LAST_OUT=/tmp/tmp.5z3b20oIEW +++ mktemp ++ local LAST_ERR=/tmp/tmp.iMwOGXVhSZ ++ local exit_status=0 ++ local timeout=4 +++ seq 0 2 ++ for i in $(seq 0 2) ++ set +e ++ kubectl get psmdb some-name -o 'jsonpath={.status.state}' ++ exit_status=0 ++ set -e ++ '[' 0 '!=' 0 -a -n 1 ']' ++ break ++ cat /tmp/tmp.5z3b20oIEW ++ cat /tmp/tmp.iMwOGXVhSZ ++ rm /tmp/tmp.5z3b20oIEW /tmp/tmp.iMwOGXVhSZ ++ return 0 + [[ error == \r\e\a\d\y ]] + let retry+=1 + '[' 5 -ge 32 ']' + echo -n . .+ sleep 10 ++ kubectl_bin get psmdb some-name -o 'jsonpath={.status.state}' +++ mktemp ++ local LAST_OUT=/tmp/tmp.dn9F3NVW2i +++ mktemp ++ local LAST_ERR=/tmp/tmp.o3bV8yn04E ++ local exit_status=0 ++ local timeout=4 +++ seq 0 2 ++ for i in $(seq 0 2) ++ set +e ++ kubectl get psmdb some-name -o 'jsonpath={.status.state}' ++ exit_status=0 ++ set -e ++ '[' 0 '!=' 0 -a -n 1 ']' ++ break ++ cat /tmp/tmp.dn9F3NVW2i ++ cat /tmp/tmp.o3bV8yn04E ++ rm /tmp/tmp.dn9F3NVW2i /tmp/tmp.o3bV8yn04E ++ return 0 + [[ error == \r\e\a\d\y ]] + let retry+=1 + '[' 6 -ge 32 ']' + echo -n . .+ sleep 10 ++ kubectl_bin get psmdb some-name -o 'jsonpath={.status.state}' +++ mktemp ++ local LAST_OUT=/tmp/tmp.dRo19q8Og5 +++ mktemp ++ local LAST_ERR=/tmp/tmp.z9gzqcU6RO ++ local exit_status=0 ++ local timeout=4 +++ seq 0 2 ++ for i in $(seq 0 2) ++ set +e ++ kubectl get psmdb some-name -o 'jsonpath={.status.state}' ++ exit_status=0 ++ set -e ++ '[' 0 '!=' 0 -a -n 1 ']' ++ break ++ cat /tmp/tmp.dRo19q8Og5 ++ cat /tmp/tmp.z9gzqcU6RO ++ rm /tmp/tmp.dRo19q8Og5 /tmp/tmp.z9gzqcU6RO ++ return 0 + [[ error == \r\e\a\d\y ]] + let retry+=1 + '[' 7 -ge 32 ']' + echo -n . .+ sleep 10 ++ kubectl_bin get psmdb some-name -o 'jsonpath={.status.state}' +++ mktemp ++ local LAST_OUT=/tmp/tmp.LZFCgwOpG4 +++ mktemp ++ local LAST_ERR=/tmp/tmp.sZviQvFzBc ++ local exit_status=0 ++ local timeout=4 +++ seq 0 2 ++ for i in $(seq 0 2) ++ set +e ++ kubectl get psmdb some-name -o 'jsonpath={.status.state}' ++ exit_status=0 ++ set -e ++ '[' 0 '!=' 0 -a -n 1 ']' ++ break ++ cat /tmp/tmp.LZFCgwOpG4 ++ cat /tmp/tmp.sZviQvFzBc ++ rm /tmp/tmp.LZFCgwOpG4 /tmp/tmp.sZviQvFzBc ++ return 0 + [[ ready == \r\e\a\d\y ]] + echo .OK .OK + sleep 15 ++ getUserData some-users MONGODB_USER_ADMIN_USER ++ local secretName=some-users ++ local dataKey=MONGODB_USER_ADMIN_USER +++ getSecretData some-users MONGODB_USER_ADMIN_USER +++ local secretName=some-users +++ local dataKey=MONGODB_USER_ADMIN_USER ++++ kubectl get secrets/some-users '--template={{.data.MONGODB_USER_ADMIN_USER}}' ++++ base64 -d +++ local data=userAdmin +++ echo userAdmin ++ urlencode userAdmin ++ uri=userAdmin ++ echo -n userAdmin ++ jq -s -R -r @uri + user=userAdmin ++ getUserData some-users MONGODB_USER_ADMIN_PASSWORD ++ local secretName=some-users ++ local dataKey=MONGODB_USER_ADMIN_PASSWORD +++ getSecretData some-users MONGODB_USER_ADMIN_PASSWORD +++ local secretName=some-users +++ local dataKey=MONGODB_USER_ADMIN_PASSWORD ++++ kubectl get secrets/some-users '--template={{.data.MONGODB_USER_ADMIN_PASSWORD}}' ++++ base64 -d +++ local data=03ihd05hhOPoyA2D +++ echo 03ihd05hhOPoyA2D ++ urlencode 03ihd05hhOPoyA2D ++ uri=03ihd05hhOPoyA2D ++ echo -n 03ihd05hhOPoyA2D ++ jq -s -R -r @uri + pass=03ihd05hhOPoyA2D + check_mongo_auth userAdmin:03ihd05hhOPoyA2D@some-name-rs0-0.some-name-rs0.users-26599 + local uri=userAdmin:03ihd05hhOPoyA2D@some-name-rs0-0.some-name-rs0.users-26599 ++ run_mongo 'db.runCommand({ ping: 1 }).ok' userAdmin:03ihd05hhOPoyA2D@some-name-rs0-0.some-name-rs0.users-26599 mongodb '' --quiet ++ egrep -v 'I NETWORK|W NETWORK|Error saving history file|Percona Server for MongoDB|connecting to:|Unable to reach primary for set|Implicit session:|versions do not match|Error saving history file:' ++ local 'command=db.runCommand({ ping: 1 }).ok' ++ local uri=userAdmin:03ihd05hhOPoyA2D@some-name-rs0-0.some-name-rs0.users-26599 ++ local driver=mongodb ++ local suffix=.svc.cluster.local +++ kubectl_bin get pods --selector=name=psmdb-client -o 'jsonpath={.items[].metadata.name}' ++++ mktemp egrep: warning: egrep is obsolescent; using grep -E +++ local LAST_OUT=/tmp/tmp.rPEUZXN3Rs ++++ mktemp +++ local LAST_ERR=/tmp/tmp.Lw4aKS19pD +++ local exit_status=0 +++ local timeout=4 ++++ seq 0 2 +++ for i in $(seq 0 2) +++ set +e +++ kubectl get pods --selector=name=psmdb-client -o 'jsonpath={.items[].metadata.name}' +++ exit_status=0 +++ set -e +++ '[' 0 '!=' 0 -a -n 1 ']' +++ break +++ cat /tmp/tmp.rPEUZXN3Rs +++ cat /tmp/tmp.Lw4aKS19pD +++ rm /tmp/tmp.rPEUZXN3Rs /tmp/tmp.Lw4aKS19pD +++ return 0 ++ local client_container=psmdb-client-66f577db5f-rzdh7 ++ local mongo_flag=--quiet ++ [[ userAdmin:03ihd05hhOPoyA2D@some-name-rs0-0.some-name-rs0.users-26599 == *cfg* ]] ++ replica_set=rs0 ++ kubectl_bin exec psmdb-client-66f577db5f-rzdh7 -- bash -c 'printf '\''db.runCommand({ ping: 1 }).ok\n'\'' | mongo mongodb://userAdmin:03ihd05hhOPoyA2D@some-name-rs0-0.some-name-rs0.users-26599.svc.cluster.local/admin?ssl=false\&replicaSet=rs0 --quiet' +++ mktemp ++ local LAST_OUT=/tmp/tmp.X5L2Du8DU6 +++ mktemp ++ local LAST_ERR=/tmp/tmp.vjaEcByuNI ++ local exit_status=0 ++ local timeout=4 +++ seq 0 2 ++ for i in $(seq 0 2) ++ set +e ++ kubectl exec psmdb-client-66f577db5f-rzdh7 -- bash -c 'printf '\''db.runCommand({ ping: 1 }).ok\n'\'' | mongo mongodb://userAdmin:03ihd05hhOPoyA2D@some-name-rs0-0.some-name-rs0.users-26599.svc.cluster.local/admin?ssl=false\&replicaSet=rs0 --quiet' ++ exit_status=0 ++ set -e ++ '[' 0 '!=' 0 -a -n 1 ']' ++ break ++ cat /tmp/tmp.X5L2Du8DU6 ++ cat /tmp/tmp.vjaEcByuNI ++ rm /tmp/tmp.X5L2Du8DU6 /tmp/tmp.vjaEcByuNI ++ return 0 + ping=1 + desc 'ping return' + set +o xtrace ----------------------------------------------------------------------------------- ping return ----------------------------------------------------------------------------------- + '[' 1 '!=' 1 ']' + check_mongo_auth userAdmin:03ihd05hhOPoyA2D@some-name-rs0-1.some-name-rs0.users-26599 + local uri=userAdmin:03ihd05hhOPoyA2D@some-name-rs0-1.some-name-rs0.users-26599 ++ run_mongo 'db.runCommand({ ping: 1 }).ok' userAdmin:03ihd05hhOPoyA2D@some-name-rs0-1.some-name-rs0.users-26599 mongodb '' --quiet ++ egrep -v 'I NETWORK|W NETWORK|Error saving history file|Percona Server for MongoDB|connecting to:|Unable to reach primary for set|Implicit session:|versions do not match|Error saving history file:' ++ local 'command=db.runCommand({ ping: 1 }).ok' ++ local uri=userAdmin:03ihd05hhOPoyA2D@some-name-rs0-1.some-name-rs0.users-26599 ++ local driver=mongodb ++ local suffix=.svc.cluster.local +++ kubectl_bin get pods --selector=name=psmdb-client -o 'jsonpath={.items[].metadata.name}' ++++ mktemp egrep: warning: egrep is obsolescent; using grep -E +++ local LAST_OUT=/tmp/tmp.3lIveJ9y8K ++++ mktemp +++ local LAST_ERR=/tmp/tmp.WrBi3izdSN +++ local exit_status=0 +++ local timeout=4 ++++ seq 0 2 +++ for i in $(seq 0 2) +++ set +e +++ kubectl get pods --selector=name=psmdb-client -o 'jsonpath={.items[].metadata.name}' +++ exit_status=0 +++ set -e +++ '[' 0 '!=' 0 -a -n 1 ']' +++ break +++ cat /tmp/tmp.3lIveJ9y8K +++ cat /tmp/tmp.WrBi3izdSN +++ rm /tmp/tmp.3lIveJ9y8K /tmp/tmp.WrBi3izdSN +++ return 0 ++ local client_container=psmdb-client-66f577db5f-rzdh7 ++ local mongo_flag=--quiet ++ [[ userAdmin:03ihd05hhOPoyA2D@some-name-rs0-1.some-name-rs0.users-26599 == *cfg* ]] ++ replica_set=rs0 ++ kubectl_bin exec psmdb-client-66f577db5f-rzdh7 -- bash -c 'printf '\''db.runCommand({ ping: 1 }).ok\n'\'' | mongo mongodb://userAdmin:03ihd05hhOPoyA2D@some-name-rs0-1.some-name-rs0.users-26599.svc.cluster.local/admin?ssl=false\&replicaSet=rs0 --quiet' +++ mktemp ++ local LAST_OUT=/tmp/tmp.iXTuTMqJ1g +++ mktemp ++ local LAST_ERR=/tmp/tmp.st5y8SvM9f ++ local exit_status=0 ++ local timeout=4 +++ seq 0 2 ++ for i in $(seq 0 2) ++ set +e ++ kubectl exec psmdb-client-66f577db5f-rzdh7 -- bash -c 'printf '\''db.runCommand({ ping: 1 }).ok\n'\'' | mongo mongodb://userAdmin:03ihd05hhOPoyA2D@some-name-rs0-1.some-name-rs0.users-26599.svc.cluster.local/admin?ssl=false\&replicaSet=rs0 --quiet' ++ exit_status=0 ++ set -e ++ '[' 0 '!=' 0 -a -n 1 ']' ++ break ++ cat /tmp/tmp.iXTuTMqJ1g ++ cat /tmp/tmp.st5y8SvM9f ++ rm /tmp/tmp.iXTuTMqJ1g /tmp/tmp.st5y8SvM9f ++ return 0 + ping=1 + desc 'ping return' + set +o xtrace ----------------------------------------------------------------------------------- ping return ----------------------------------------------------------------------------------- + '[' 1 '!=' 1 ']' + check_mongo_auth userAdmin:03ihd05hhOPoyA2D@some-name-rs0-2.some-name-rs0.users-26599 + local uri=userAdmin:03ihd05hhOPoyA2D@some-name-rs0-2.some-name-rs0.users-26599 ++ run_mongo 'db.runCommand({ ping: 1 }).ok' userAdmin:03ihd05hhOPoyA2D@some-name-rs0-2.some-name-rs0.users-26599 mongodb '' --quiet ++ local 'command=db.runCommand({ ping: 1 }).ok' ++ local uri=userAdmin:03ihd05hhOPoyA2D@some-name-rs0-2.some-name-rs0.users-26599 ++ local driver=mongodb ++ local suffix=.svc.cluster.local ++ egrep -v 'I NETWORK|W NETWORK|Error saving history file|Percona Server for MongoDB|connecting to:|Unable to reach primary for set|Implicit session:|versions do not match|Error saving history file:' +++ kubectl_bin get pods --selector=name=psmdb-client -o 'jsonpath={.items[].metadata.name}' ++++ mktemp egrep: warning: egrep is obsolescent; using grep -E +++ local LAST_OUT=/tmp/tmp.IMIdC6u0ds ++++ mktemp +++ local LAST_ERR=/tmp/tmp.bAutb9HJjk +++ local exit_status=0 +++ local timeout=4 ++++ seq 0 2 +++ for i in $(seq 0 2) +++ set +e +++ kubectl get pods --selector=name=psmdb-client -o 'jsonpath={.items[].metadata.name}' +++ exit_status=0 +++ set -e +++ '[' 0 '!=' 0 -a -n 1 ']' +++ break +++ cat /tmp/tmp.IMIdC6u0ds +++ cat /tmp/tmp.bAutb9HJjk +++ rm /tmp/tmp.IMIdC6u0ds /tmp/tmp.bAutb9HJjk +++ return 0 ++ local client_container=psmdb-client-66f577db5f-rzdh7 ++ local mongo_flag=--quiet ++ [[ userAdmin:03ihd05hhOPoyA2D@some-name-rs0-2.some-name-rs0.users-26599 == *cfg* ]] ++ replica_set=rs0 ++ kubectl_bin exec psmdb-client-66f577db5f-rzdh7 -- bash -c 'printf '\''db.runCommand({ ping: 1 }).ok\n'\'' | mongo mongodb://userAdmin:03ihd05hhOPoyA2D@some-name-rs0-2.some-name-rs0.users-26599.svc.cluster.local/admin?ssl=false\&replicaSet=rs0 --quiet' +++ mktemp ++ local LAST_OUT=/tmp/tmp.hvSUfblqwB +++ mktemp ++ local LAST_ERR=/tmp/tmp.qjDcyzyI6x ++ local exit_status=0 ++ local timeout=4 +++ seq 0 2 ++ for i in $(seq 0 2) ++ set +e ++ kubectl exec psmdb-client-66f577db5f-rzdh7 -- bash -c 'printf '\''db.runCommand({ ping: 1 }).ok\n'\'' | mongo mongodb://userAdmin:03ihd05hhOPoyA2D@some-name-rs0-2.some-name-rs0.users-26599.svc.cluster.local/admin?ssl=false\&replicaSet=rs0 --quiet' ++ exit_status=0 ++ set -e ++ '[' 0 '!=' 0 -a -n 1 ']' ++ break ++ cat /tmp/tmp.hvSUfblqwB ++ cat /tmp/tmp.qjDcyzyI6x ++ rm /tmp/tmp.hvSUfblqwB /tmp/tmp.qjDcyzyI6x ++ return 0 + ping=1 + desc 'ping return' + set +o xtrace ----------------------------------------------------------------------------------- ping return ----------------------------------------------------------------------------------- + '[' 1 '!=' 1 ']' + destroy users-26599 + local namespace=users-26599 + local ignore_logs=true + [[ 0 == 1 ]] + desc 'destroy cluster/operator and all other resources' + set +o xtrace ----------------------------------------------------------------------------------- destroy cluster/operator and all other resources ----------------------------------------------------------------------------------- + '[' true == false ']' + delete_backups + desc 'Delete psmdb-backup' + set +o xtrace ----------------------------------------------------------------------------------- Delete psmdb-backup ----------------------------------------------------------------------------------- ++ kubectl_bin get psmdb-backup --no-headers ++ wc -l +++ mktemp ++ local LAST_OUT=/tmp/tmp.Ma9QwCWiTY +++ mktemp ++ local LAST_ERR=/tmp/tmp.7HO9Qg6Byl ++ local exit_status=0 ++ local timeout=4 +++ seq 0 2 ++ for i in $(seq 0 2) ++ set +e ++ kubectl get psmdb-backup --no-headers ++ exit_status=0 ++ set -e ++ '[' 0 '!=' 0 -a -n 1 ']' ++ break ++ cat /tmp/tmp.Ma9QwCWiTY ++ cat /tmp/tmp.7HO9Qg6Byl ++ rm /tmp/tmp.Ma9QwCWiTY /tmp/tmp.7HO9Qg6Byl ++ return 0 + '[' 1 '!=' 0 ']' + kubectl_bin get psmdb-backup ++ mktemp + local LAST_OUT=/tmp/tmp.7wXxC6l0CK ++ mktemp + local LAST_ERR=/tmp/tmp.TEi2eUPd6Y + local exit_status=0 + local timeout=4 ++ seq 0 2 + for i in $(seq 0 2) + set +e + kubectl get psmdb-backup + exit_status=0 + set -e + '[' 0 '!=' 0 -a -n 1 ']' + break + cat /tmp/tmp.7wXxC6l0CK NAME CLUSTER STORAGE DESTINATION TYPE SIZE STATUS COMPLETED AGE backup-minio some-name minio s3://operator-testing/2025-10-06T22:37:18Z logical 44.13KB ready 9m38s 9m44s + cat /tmp/tmp.TEi2eUPd6Y + rm /tmp/tmp.7wXxC6l0CK /tmp/tmp.TEi2eUPd6Y + return 0 + kubectl_bin delete psmdb-backup --all ++ mktemp + local LAST_OUT=/tmp/tmp.Kq3Xlr6zX8 ++ mktemp + local LAST_ERR=/tmp/tmp.bOYABUlGlb + local exit_status=0 + local timeout=4 ++ seq 0 2 + for i in $(seq 0 2) + set +e + kubectl delete psmdb-backup --all + exit_status=0 + set -e + '[' 0 '!=' 0 -a -n 1 ']' + break + cat /tmp/tmp.Kq3Xlr6zX8 perconaservermongodbbackup.psmdb.percona.com "backup-minio" deleted from users-26599 namespace + cat /tmp/tmp.bOYABUlGlb + rm /tmp/tmp.Kq3Xlr6zX8 /tmp/tmp.bOYABUlGlb + return 0 + delete_crd + desc 'get and delete old CRDs and RBAC' + set +o xtrace ----------------------------------------------------------------------------------- get and delete old CRDs and RBAC ----------------------------------------------------------------------------------- + kubectl_bin delete -f /mnt/jenkins/workspace/cloud-psmdb-operator_PR-1993/deploy/crd.yaml --ignore-not-found --wait=false ++ mktemp + local LAST_OUT=/tmp/tmp.u4SRLavqBs ++ mktemp + local LAST_ERR=/tmp/tmp.zXkAqiMxio + local exit_status=0 + local timeout=4 ++ seq 0 2 + for i in $(seq 0 2) + set +e + kubectl delete -f /mnt/jenkins/workspace/cloud-psmdb-operator_PR-1993/deploy/crd.yaml --ignore-not-found --wait=false + exit_status=0 + set -e + '[' 0 '!=' 0 -a -n 1 ']' + break + cat /tmp/tmp.u4SRLavqBs customresourcedefinition.apiextensions.k8s.io "perconaservermongodbbackups.psmdb.percona.com" deleted customresourcedefinition.apiextensions.k8s.io "perconaservermongodbrestores.psmdb.percona.com" deleted customresourcedefinition.apiextensions.k8s.io "perconaservermongodbs.psmdb.percona.com" deleted + cat /tmp/tmp.zXkAqiMxio + rm /tmp/tmp.u4SRLavqBs /tmp/tmp.zXkAqiMxio + return 0 ++ yq eval .metadata.name /mnt/jenkins/workspace/cloud-psmdb-operator_PR-1993/deploy/crd.yaml ++ grep -v '\-\-\-' grep: warning: stray \ before - grep: warning: stray \ before - + for crd_name in $(yq eval '.metadata.name' "${src_dir}/deploy/crd.yaml" | grep -v '\-\-\-') + kubectl get perconaservermongodbbackups.psmdb.percona.com --all-namespaces -o wide + grep -v NAMESPACE + xargs -L 1 sh -xc 'kubectl patch perconaservermongodbbackups.psmdb.percona.com -n $0 $1 --type=merge -p "{\"metadata\":{\"finalizers\":[]}}"' error: the server doesn't have a resource type "perconaservermongodbbackups" + kubectl patch perconaservermongodbbackups.psmdb.percona.com -n sh --type=merge -p '{"metadata":{"finalizers":[]}}' error: the server doesn't have a resource type "perconaservermongodbbackups" + : + kubectl_bin wait --for=delete crd perconaservermongodbbackups.psmdb.percona.com ++ mktemp + local LAST_OUT=/tmp/tmp.8jtus1aXXC ++ mktemp + local LAST_ERR=/tmp/tmp.C9DtW71RQ7 + local exit_status=0 + local timeout=4 ++ seq 0 2 + for i in $(seq 0 2) + set +e + kubectl wait --for=delete crd perconaservermongodbbackups.psmdb.percona.com + exit_status=0 + set -e + '[' 0 '!=' 0 -a -n 1 ']' + break + cat /tmp/tmp.8jtus1aXXC + cat /tmp/tmp.C9DtW71RQ7 + rm /tmp/tmp.8jtus1aXXC /tmp/tmp.C9DtW71RQ7 + return 0 + for crd_name in $(yq eval '.metadata.name' "${src_dir}/deploy/crd.yaml" | grep -v '\-\-\-') + kubectl get perconaservermongodbrestores.psmdb.percona.com --all-namespaces -o wide + grep -v NAMESPACE + xargs -L 1 sh -xc 'kubectl patch perconaservermongodbrestores.psmdb.percona.com -n $0 $1 --type=merge -p "{\"metadata\":{\"finalizers\":[]}}"' error: the server doesn't have a resource type "perconaservermongodbrestores" + kubectl patch perconaservermongodbrestores.psmdb.percona.com -n sh --type=merge -p '{"metadata":{"finalizers":[]}}' error: the server doesn't have a resource type "perconaservermongodbrestores" + : + kubectl_bin wait --for=delete crd perconaservermongodbrestores.psmdb.percona.com ++ mktemp + local LAST_OUT=/tmp/tmp.LARN0QlDfr ++ mktemp + local LAST_ERR=/tmp/tmp.rYJwlFyq8p + local exit_status=0 + local timeout=4 ++ seq 0 2 + for i in $(seq 0 2) + set +e + kubectl wait --for=delete crd perconaservermongodbrestores.psmdb.percona.com + exit_status=0 + set -e + '[' 0 '!=' 0 -a -n 1 ']' + break + cat /tmp/tmp.LARN0QlDfr + cat /tmp/tmp.rYJwlFyq8p + rm /tmp/tmp.LARN0QlDfr /tmp/tmp.rYJwlFyq8p + return 0 + for crd_name in $(yq eval '.metadata.name' "${src_dir}/deploy/crd.yaml" | grep -v '\-\-\-') + kubectl get perconaservermongodbs.psmdb.percona.com --all-namespaces -o wide + grep -v NAMESPACE + xargs -L 1 sh -xc 'kubectl patch perconaservermongodbs.psmdb.percona.com -n $0 $1 --type=merge -p "{\"metadata\":{\"finalizers\":[]}}"' error: the server doesn't have a resource type "perconaservermongodbs" + kubectl patch perconaservermongodbs.psmdb.percona.com -n sh --type=merge -p '{"metadata":{"finalizers":[]}}' error: the server doesn't have a resource type "perconaservermongodbs" + : + kubectl_bin wait --for=delete crd perconaservermongodbs.psmdb.percona.com ++ mktemp + local LAST_OUT=/tmp/tmp.ViH09tpSPk ++ mktemp + local LAST_ERR=/tmp/tmp.4lyuvnQr8r + local exit_status=0 + local timeout=4 ++ seq 0 2 + for i in $(seq 0 2) + set +e + kubectl wait --for=delete crd perconaservermongodbs.psmdb.percona.com + exit_status=0 + set -e + '[' 0 '!=' 0 -a -n 1 ']' + break + cat /tmp/tmp.ViH09tpSPk + cat /tmp/tmp.4lyuvnQr8r + rm /tmp/tmp.ViH09tpSPk /tmp/tmp.4lyuvnQr8r + return 0 + local rbac_yaml=rbac.yaml + '[' -n psmdb-operator ']' + rbac_yaml=cw-rbac.yaml + kubectl_bin delete -f /mnt/jenkins/workspace/cloud-psmdb-operator_PR-1993/deploy/cw-rbac.yaml --ignore-not-found ++ mktemp + local LAST_OUT=/tmp/tmp.p3hdicNmC5 ++ mktemp + local LAST_ERR=/tmp/tmp.YsACNSVdHc + local exit_status=0 + local timeout=4 ++ seq 0 2 + for i in $(seq 0 2) + set +e + kubectl delete -f /mnt/jenkins/workspace/cloud-psmdb-operator_PR-1993/deploy/cw-rbac.yaml --ignore-not-found + exit_status=0 + set -e + '[' 0 '!=' 0 -a -n 1 ']' + break + cat /tmp/tmp.p3hdicNmC5 clusterrole.rbac.authorization.k8s.io "percona-server-mongodb-operator" deleted clusterrolebinding.rbac.authorization.k8s.io "service-account-percona-server-mongodb-operator" deleted + cat /tmp/tmp.YsACNSVdHc + rm /tmp/tmp.p3hdicNmC5 /tmp/tmp.YsACNSVdHc + return 0 + destroy_cert_manager + kubectl_bin delete -f https://github.com/cert-manager/cert-manager/releases/download/v1.17.2/cert-manager.yaml ++ mktemp + local LAST_OUT=/tmp/tmp.Ak6A4UQ66o ++ mktemp + local LAST_ERR=/tmp/tmp.wTU1gqH7e9 + local exit_status=0 + local timeout=4 ++ seq 0 2 + for i in $(seq 0 2) + set +e + kubectl delete -f https://github.com/cert-manager/cert-manager/releases/download/v1.17.2/cert-manager.yaml + exit_status=1 + set -e + '[' 1 '!=' 0 -a -n 1 ']' + cat /tmp/tmp.Ak6A4UQ66o + cat /tmp/tmp.wTU1gqH7e9 Error from server (NotFound): error when deleting "https://github.com/cert-manager/cert-manager/releases/download/v1.17.2/cert-manager.yaml": namespaces "cert-manager" not found Error from server (NotFound): error when deleting "https://github.com/cert-manager/cert-manager/releases/download/v1.17.2/cert-manager.yaml": customresourcedefinitions.apiextensions.k8s.io "certificaterequests.cert-manager.io" not found Error from server (NotFound): error when deleting "https://github.com/cert-manager/cert-manager/releases/download/v1.17.2/cert-manager.yaml": customresourcedefinitions.apiextensions.k8s.io "certificates.cert-manager.io" not found Error from server (NotFound): error when deleting "https://github.com/cert-manager/cert-manager/releases/download/v1.17.2/cert-manager.yaml": customresourcedefinitions.apiextensions.k8s.io "challenges.acme.cert-manager.io" not found Error from server (NotFound): error when deleting "https://github.com/cert-manager/cert-manager/releases/download/v1.17.2/cert-manager.yaml": customresourcedefinitions.apiextensions.k8s.io "clusterissuers.cert-manager.io" not found Error from server (NotFound): error when deleting "https://github.com/cert-manager/cert-manager/releases/download/v1.17.2/cert-manager.yaml": customresourcedefinitions.apiextensions.k8s.io "issuers.cert-manager.io" not found Error from server (NotFound): error when deleting "https://github.com/cert-manager/cert-manager/releases/download/v1.17.2/cert-manager.yaml": customresourcedefinitions.apiextensions.k8s.io "orders.acme.cert-manager.io" not found Error from server (NotFound): error when deleting "https://github.com/cert-manager/cert-manager/releases/download/v1.17.2/cert-manager.yaml": serviceaccounts "cert-manager-cainjector" not found Error from server (NotFound): error when deleting "https://github.com/cert-manager/cert-manager/releases/download/v1.17.2/cert-manager.yaml": serviceaccounts "cert-manager" not found Error from server (NotFound): error when deleting "https://github.com/cert-manager/cert-manager/releases/download/v1.17.2/cert-manager.yaml": serviceaccounts "cert-manager-webhook" not found Error from server (NotFound): error when deleting "https://github.com/cert-manager/cert-manager/releases/download/v1.17.2/cert-manager.yaml": clusterroles.rbac.authorization.k8s.io "cert-manager-cainjector" not found Error from server (NotFound): error when deleting "https://github.com/cert-manager/cert-manager/releases/download/v1.17.2/cert-manager.yaml": clusterroles.rbac.authorization.k8s.io "cert-manager-controller-issuers" not found Error from server (NotFound): error when deleting "https://github.com/cert-manager/cert-manager/releases/download/v1.17.2/cert-manager.yaml": clusterroles.rbac.authorization.k8s.io "cert-manager-controller-clusterissuers" not found Error from server (NotFound): error when deleting "https://github.com/cert-manager/cert-manager/releases/download/v1.17.2/cert-manager.yaml": clusterroles.rbac.authorization.k8s.io "cert-manager-controller-certificates" not found Error from server (NotFound): error when deleting "https://github.com/cert-manager/cert-manager/releases/download/v1.17.2/cert-manager.yaml": clusterroles.rbac.authorization.k8s.io "cert-manager-controller-orders" not found Error from server (NotFound): error when deleting "https://github.com/cert-manager/cert-manager/releases/download/v1.17.2/cert-manager.yaml": clusterroles.rbac.authorization.k8s.io "cert-manager-controller-challenges" not found Error from server (NotFound): error when deleting "https://github.com/cert-manager/cert-manager/releases/download/v1.17.2/cert-manager.yaml": clusterroles.rbac.authorization.k8s.io "cert-manager-controller-ingress-shim" not found Error from server (NotFound): error when deleting "https://github.com/cert-manager/cert-manager/releases/download/v1.17.2/cert-manager.yaml": clusterroles.rbac.authorization.k8s.io "cert-manager-cluster-view" not found Error from server (NotFound): error when deleting "https://github.com/cert-manager/cert-manager/releases/download/v1.17.2/cert-manager.yaml": clusterroles.rbac.authorization.k8s.io "cert-manager-view" not found Error from server (NotFound): error when deleting "https://github.com/cert-manager/cert-manager/releases/download/v1.17.2/cert-manager.yaml": clusterroles.rbac.authorization.k8s.io "cert-manager-edit" not found Error from server (NotFound): error when deleting "https://github.com/cert-manager/cert-manager/releases/download/v1.17.2/cert-manager.yaml": clusterroles.rbac.authorization.k8s.io "cert-manager-controller-approve:cert-manager-io" not found Error from server (NotFound): error when deleting "https://github.com/cert-manager/cert-manager/releases/download/v1.17.2/cert-manager.yaml": clusterroles.rbac.authorization.k8s.io "cert-manager-controller-certificatesigningrequests" not found Error from server (NotFound): error when deleting "https://github.com/cert-manager/cert-manager/releases/download/v1.17.2/cert-manager.yaml": clusterroles.rbac.authorization.k8s.io "cert-manager-webhook:subjectaccessreviews" not found Error from server (NotFound): error when deleting "https://github.com/cert-manager/cert-manager/releases/download/v1.17.2/cert-manager.yaml": clusterrolebindings.rbac.authorization.k8s.io "cert-manager-cainjector" not found Error from server (NotFound): error when deleting "https://github.com/cert-manager/cert-manager/releases/download/v1.17.2/cert-manager.yaml": clusterrolebindings.rbac.authorization.k8s.io "cert-manager-controller-issuers" not found Error from server (NotFound): error when deleting "https://github.com/cert-manager/cert-manager/releases/download/v1.17.2/cert-manager.yaml": clusterrolebindings.rbac.authorization.k8s.io "cert-manager-controller-clusterissuers" not found Error from server (NotFound): error when deleting "https://github.com/cert-manager/cert-manager/releases/download/v1.17.2/cert-manager.yaml": clusterrolebindings.rbac.authorization.k8s.io "cert-manager-controller-certificates" not found Error from server (NotFound): error when deleting "https://github.com/cert-manager/cert-manager/releases/download/v1.17.2/cert-manager.yaml": clusterrolebindings.rbac.authorization.k8s.io "cert-manager-controller-orders" not found Error from server (NotFound): error when deleting "https://github.com/cert-manager/cert-manager/releases/download/v1.17.2/cert-manager.yaml": clusterrolebindings.rbac.authorization.k8s.io "cert-manager-controller-challenges" not found Error from server (NotFound): error when deleting "https://github.com/cert-manager/cert-manager/releases/download/v1.17.2/cert-manager.yaml": clusterrolebindings.rbac.authorization.k8s.io "cert-manager-controller-ingress-shim" not found Error from server (NotFound): error when deleting "https://github.com/cert-manager/cert-manager/releases/download/v1.17.2/cert-manager.yaml": clusterrolebindings.rbac.authorization.k8s.io "cert-manager-controller-approve:cert-manager-io" not found Error from server (NotFound): error when deleting "https://github.com/cert-manager/cert-manager/releases/download/v1.17.2/cert-manager.yaml": clusterrolebindings.rbac.authorization.k8s.io "cert-manager-controller-certificatesigningrequests" not found Error from server (NotFound): error when deleting "https://github.com/cert-manager/cert-manager/releases/download/v1.17.2/cert-manager.yaml": clusterrolebindings.rbac.authorization.k8s.io "cert-manager-webhook:subjectaccessreviews" not found Error from server (NotFound): error when deleting "https://github.com/cert-manager/cert-manager/releases/download/v1.17.2/cert-manager.yaml": roles.rbac.authorization.k8s.io "cert-manager-cainjector:leaderelection" not found Error from server (NotFound): error when deleting "https://github.com/cert-manager/cert-manager/releases/download/v1.17.2/cert-manager.yaml": roles.rbac.authorization.k8s.io "cert-manager:leaderelection" not found Error from server (NotFound): error when deleting "https://github.com/cert-manager/cert-manager/releases/download/v1.17.2/cert-manager.yaml": roles.rbac.authorization.k8s.io "cert-manager-tokenrequest" not found Error from server (NotFound): error when deleting "https://github.com/cert-manager/cert-manager/releases/download/v1.17.2/cert-manager.yaml": roles.rbac.authorization.k8s.io "cert-manager-webhook:dynamic-serving" not found Error from server (NotFound): error when deleting "https://github.com/cert-manager/cert-manager/releases/download/v1.17.2/cert-manager.yaml": rolebindings.rbac.authorization.k8s.io "cert-manager-cainjector:leaderelection" not found Error from server (NotFound): error when deleting "https://github.com/cert-manager/cert-manager/releases/download/v1.17.2/cert-manager.yaml": rolebindings.rbac.authorization.k8s.io "cert-manager:leaderelection" not found Error from server (NotFound): error when deleting "https://github.com/cert-manager/cert-manager/releases/download/v1.17.2/cert-manager.yaml": rolebindings.rbac.authorization.k8s.io "cert-manager-cert-manager-tokenrequest" not found Error from server (NotFound): error when deleting "https://github.com/cert-manager/cert-manager/releases/download/v1.17.2/cert-manager.yaml": rolebindings.rbac.authorization.k8s.io "cert-manager-webhook:dynamic-serving" not found Error from server (NotFound): error when deleting "https://github.com/cert-manager/cert-manager/releases/download/v1.17.2/cert-manager.yaml": services "cert-manager-cainjector" not found Error from server (NotFound): error when deleting "https://github.com/cert-manager/cert-manager/releases/download/v1.17.2/cert-manager.yaml": services "cert-manager" not found Error from server (NotFound): error when deleting "https://github.com/cert-manager/cert-manager/releases/download/v1.17.2/cert-manager.yaml": services "cert-manager-webhook" not found Error from server (NotFound): error when deleting "https://github.com/cert-manager/cert-manager/releases/download/v1.17.2/cert-manager.yaml": deployments.apps "cert-manager-cainjector" not found Error from server (NotFound): error when deleting "https://github.com/cert-manager/cert-manager/releases/download/v1.17.2/cert-manager.yaml": deployments.apps "cert-manager" not found Error from server (NotFound): error when deleting "https://github.com/cert-manager/cert-manager/releases/download/v1.17.2/cert-manager.yaml": deployments.apps "cert-manager-webhook" not found Error from server (NotFound): error when deleting "https://github.com/cert-manager/cert-manager/releases/download/v1.17.2/cert-manager.yaml": mutatingwebhookconfigurations.admissionregistration.k8s.io "cert-manager-webhook" not found Error from server (NotFound): error when deleting "https://github.com/cert-manager/cert-manager/releases/download/v1.17.2/cert-manager.yaml": validatingwebhookconfigurations.admissionregistration.k8s.io "cert-manager-webhook" not found + sleep 0 + for i in $(seq 0 2) + set +e + kubectl delete -f https://github.com/cert-manager/cert-manager/releases/download/v1.17.2/cert-manager.yaml + exit_status=1 + set -e + '[' 1 '!=' 0 -a -n 1 ']' + cat /tmp/tmp.Ak6A4UQ66o + cat /tmp/tmp.wTU1gqH7e9 Error from server (NotFound): error when deleting "https://github.com/cert-manager/cert-manager/releases/download/v1.17.2/cert-manager.yaml": namespaces "cert-manager" not found Error from server (NotFound): error when deleting "https://github.com/cert-manager/cert-manager/releases/download/v1.17.2/cert-manager.yaml": customresourcedefinitions.apiextensions.k8s.io "certificaterequests.cert-manager.io" not found Error from server (NotFound): error when deleting "https://github.com/cert-manager/cert-manager/releases/download/v1.17.2/cert-manager.yaml": customresourcedefinitions.apiextensions.k8s.io "certificates.cert-manager.io" not found Error from server (NotFound): error when deleting "https://github.com/cert-manager/cert-manager/releases/download/v1.17.2/cert-manager.yaml": customresourcedefinitions.apiextensions.k8s.io "challenges.acme.cert-manager.io" not found Error from server (NotFound): error when deleting "https://github.com/cert-manager/cert-manager/releases/download/v1.17.2/cert-manager.yaml": customresourcedefinitions.apiextensions.k8s.io "clusterissuers.cert-manager.io" not found Error from server (NotFound): error when deleting "https://github.com/cert-manager/cert-manager/releases/download/v1.17.2/cert-manager.yaml": customresourcedefinitions.apiextensions.k8s.io "issuers.cert-manager.io" not found Error from server (NotFound): error when deleting "https://github.com/cert-manager/cert-manager/releases/download/v1.17.2/cert-manager.yaml": customresourcedefinitions.apiextensions.k8s.io "orders.acme.cert-manager.io" not found Error from server (NotFound): error when deleting "https://github.com/cert-manager/cert-manager/releases/download/v1.17.2/cert-manager.yaml": serviceaccounts "cert-manager-cainjector" not found Error from server (NotFound): error when deleting "https://github.com/cert-manager/cert-manager/releases/download/v1.17.2/cert-manager.yaml": serviceaccounts "cert-manager" not found Error from server (NotFound): error when deleting "https://github.com/cert-manager/cert-manager/releases/download/v1.17.2/cert-manager.yaml": serviceaccounts "cert-manager-webhook" not found Error from server (NotFound): error when deleting "https://github.com/cert-manager/cert-manager/releases/download/v1.17.2/cert-manager.yaml": clusterroles.rbac.authorization.k8s.io "cert-manager-cainjector" not found Error from server (NotFound): error when deleting "https://github.com/cert-manager/cert-manager/releases/download/v1.17.2/cert-manager.yaml": clusterroles.rbac.authorization.k8s.io "cert-manager-controller-issuers" not found Error from server (NotFound): error when deleting "https://github.com/cert-manager/cert-manager/releases/download/v1.17.2/cert-manager.yaml": clusterroles.rbac.authorization.k8s.io "cert-manager-controller-clusterissuers" not found Error from server (NotFound): error when deleting "https://github.com/cert-manager/cert-manager/releases/download/v1.17.2/cert-manager.yaml": clusterroles.rbac.authorization.k8s.io "cert-manager-controller-certificates" not found Error from server (NotFound): error when deleting "https://github.com/cert-manager/cert-manager/releases/download/v1.17.2/cert-manager.yaml": clusterroles.rbac.authorization.k8s.io "cert-manager-controller-orders" not found Error from server (NotFound): error when deleting "https://github.com/cert-manager/cert-manager/releases/download/v1.17.2/cert-manager.yaml": clusterroles.rbac.authorization.k8s.io "cert-manager-controller-challenges" not found Error from server (NotFound): error when deleting "https://github.com/cert-manager/cert-manager/releases/download/v1.17.2/cert-manager.yaml": clusterroles.rbac.authorization.k8s.io "cert-manager-controller-ingress-shim" not found Error from server (NotFound): error when deleting "https://github.com/cert-manager/cert-manager/releases/download/v1.17.2/cert-manager.yaml": clusterroles.rbac.authorization.k8s.io "cert-manager-cluster-view" not found Error from server (NotFound): error when deleting "https://github.com/cert-manager/cert-manager/releases/download/v1.17.2/cert-manager.yaml": clusterroles.rbac.authorization.k8s.io "cert-manager-view" not found Error from server (NotFound): error when deleting "https://github.com/cert-manager/cert-manager/releases/download/v1.17.2/cert-manager.yaml": clusterroles.rbac.authorization.k8s.io "cert-manager-edit" not found Error from server (NotFound): error when deleting "https://github.com/cert-manager/cert-manager/releases/download/v1.17.2/cert-manager.yaml": clusterroles.rbac.authorization.k8s.io "cert-manager-controller-approve:cert-manager-io" not found Error from server (NotFound): error when deleting "https://github.com/cert-manager/cert-manager/releases/download/v1.17.2/cert-manager.yaml": clusterroles.rbac.authorization.k8s.io "cert-manager-controller-certificatesigningrequests" not found Error from server (NotFound): error when deleting "https://github.com/cert-manager/cert-manager/releases/download/v1.17.2/cert-manager.yaml": clusterroles.rbac.authorization.k8s.io "cert-manager-webhook:subjectaccessreviews" not found Error from server (NotFound): error when deleting "https://github.com/cert-manager/cert-manager/releases/download/v1.17.2/cert-manager.yaml": clusterrolebindings.rbac.authorization.k8s.io "cert-manager-cainjector" not found Error from server (NotFound): error when deleting "https://github.com/cert-manager/cert-manager/releases/download/v1.17.2/cert-manager.yaml": clusterrolebindings.rbac.authorization.k8s.io "cert-manager-controller-issuers" not found Error from server (NotFound): error when deleting "https://github.com/cert-manager/cert-manager/releases/download/v1.17.2/cert-manager.yaml": clusterrolebindings.rbac.authorization.k8s.io "cert-manager-controller-clusterissuers" not found Error from server (NotFound): error when deleting "https://github.com/cert-manager/cert-manager/releases/download/v1.17.2/cert-manager.yaml": clusterrolebindings.rbac.authorization.k8s.io "cert-manager-controller-certificates" not found Error from server (NotFound): error when deleting "https://github.com/cert-manager/cert-manager/releases/download/v1.17.2/cert-manager.yaml": clusterrolebindings.rbac.authorization.k8s.io "cert-manager-controller-orders" not found Error from server (NotFound): error when deleting "https://github.com/cert-manager/cert-manager/releases/download/v1.17.2/cert-manager.yaml": clusterrolebindings.rbac.authorization.k8s.io "cert-manager-controller-challenges" not found Error from server (NotFound): error when deleting "https://github.com/cert-manager/cert-manager/releases/download/v1.17.2/cert-manager.yaml": clusterrolebindings.rbac.authorization.k8s.io "cert-manager-controller-ingress-shim" not found Error from server (NotFound): error when deleting "https://github.com/cert-manager/cert-manager/releases/download/v1.17.2/cert-manager.yaml": clusterrolebindings.rbac.authorization.k8s.io "cert-manager-controller-approve:cert-manager-io" not found Error from server (NotFound): error when deleting "https://github.com/cert-manager/cert-manager/releases/download/v1.17.2/cert-manager.yaml": clusterrolebindings.rbac.authorization.k8s.io "cert-manager-controller-certificatesigningrequests" not found Error from server (NotFound): error when deleting "https://github.com/cert-manager/cert-manager/releases/download/v1.17.2/cert-manager.yaml": clusterrolebindings.rbac.authorization.k8s.io "cert-manager-webhook:subjectaccessreviews" not found Error from server (NotFound): error when deleting "https://github.com/cert-manager/cert-manager/releases/download/v1.17.2/cert-manager.yaml": roles.rbac.authorization.k8s.io "cert-manager-cainjector:leaderelection" not found Error from server (NotFound): error when deleting "https://github.com/cert-manager/cert-manager/releases/download/v1.17.2/cert-manager.yaml": roles.rbac.authorization.k8s.io "cert-manager:leaderelection" not found Error from server (NotFound): error when deleting "https://github.com/cert-manager/cert-manager/releases/download/v1.17.2/cert-manager.yaml": roles.rbac.authorization.k8s.io "cert-manager-tokenrequest" not found Error from server (NotFound): error when deleting "https://github.com/cert-manager/cert-manager/releases/download/v1.17.2/cert-manager.yaml": roles.rbac.authorization.k8s.io "cert-manager-webhook:dynamic-serving" not found Error from server (NotFound): error when deleting "https://github.com/cert-manager/cert-manager/releases/download/v1.17.2/cert-manager.yaml": rolebindings.rbac.authorization.k8s.io "cert-manager-cainjector:leaderelection" not found Error from server (NotFound): error when deleting "https://github.com/cert-manager/cert-manager/releases/download/v1.17.2/cert-manager.yaml": rolebindings.rbac.authorization.k8s.io "cert-manager:leaderelection" not found Error from server (NotFound): error when deleting "https://github.com/cert-manager/cert-manager/releases/download/v1.17.2/cert-manager.yaml": rolebindings.rbac.authorization.k8s.io "cert-manager-cert-manager-tokenrequest" not found Error from server (NotFound): error when deleting "https://github.com/cert-manager/cert-manager/releases/download/v1.17.2/cert-manager.yaml": rolebindings.rbac.authorization.k8s.io "cert-manager-webhook:dynamic-serving" not found Error from server (NotFound): error when deleting "https://github.com/cert-manager/cert-manager/releases/download/v1.17.2/cert-manager.yaml": services "cert-manager-cainjector" not found Error from server (NotFound): error when deleting "https://github.com/cert-manager/cert-manager/releases/download/v1.17.2/cert-manager.yaml": services "cert-manager" not found Error from server (NotFound): error when deleting "https://github.com/cert-manager/cert-manager/releases/download/v1.17.2/cert-manager.yaml": services "cert-manager-webhook" not found Error from server (NotFound): error when deleting "https://github.com/cert-manager/cert-manager/releases/download/v1.17.2/cert-manager.yaml": deployments.apps "cert-manager-cainjector" not found Error from server (NotFound): error when deleting "https://github.com/cert-manager/cert-manager/releases/download/v1.17.2/cert-manager.yaml": deployments.apps "cert-manager" not found Error from server (NotFound): error when deleting "https://github.com/cert-manager/cert-manager/releases/download/v1.17.2/cert-manager.yaml": deployments.apps "cert-manager-webhook" not found Error from server (NotFound): error when deleting "https://github.com/cert-manager/cert-manager/releases/download/v1.17.2/cert-manager.yaml": mutatingwebhookconfigurations.admissionregistration.k8s.io "cert-manager-webhook" not found Error from server (NotFound): error when deleting "https://github.com/cert-manager/cert-manager/releases/download/v1.17.2/cert-manager.yaml": validatingwebhookconfigurations.admissionregistration.k8s.io "cert-manager-webhook" not found + sleep 4 + for i in $(seq 0 2) + set +e + kubectl delete -f https://github.com/cert-manager/cert-manager/releases/download/v1.17.2/cert-manager.yaml + exit_status=1 + set -e + '[' 1 '!=' 0 -a -n 1 ']' + cat /tmp/tmp.Ak6A4UQ66o + cat /tmp/tmp.wTU1gqH7e9 Error from server (NotFound): error when deleting "https://github.com/cert-manager/cert-manager/releases/download/v1.17.2/cert-manager.yaml": namespaces "cert-manager" not found Error from server (NotFound): error when deleting "https://github.com/cert-manager/cert-manager/releases/download/v1.17.2/cert-manager.yaml": customresourcedefinitions.apiextensions.k8s.io "certificaterequests.cert-manager.io" not found Error from server (NotFound): error when deleting "https://github.com/cert-manager/cert-manager/releases/download/v1.17.2/cert-manager.yaml": customresourcedefinitions.apiextensions.k8s.io "certificates.cert-manager.io" not found Error from server (NotFound): error when deleting "https://github.com/cert-manager/cert-manager/releases/download/v1.17.2/cert-manager.yaml": customresourcedefinitions.apiextensions.k8s.io "challenges.acme.cert-manager.io" not found Error from server (NotFound): error when deleting "https://github.com/cert-manager/cert-manager/releases/download/v1.17.2/cert-manager.yaml": customresourcedefinitions.apiextensions.k8s.io "clusterissuers.cert-manager.io" not found Error from server (NotFound): error when deleting "https://github.com/cert-manager/cert-manager/releases/download/v1.17.2/cert-manager.yaml": customresourcedefinitions.apiextensions.k8s.io "issuers.cert-manager.io" not found Error from server (NotFound): error when deleting "https://github.com/cert-manager/cert-manager/releases/download/v1.17.2/cert-manager.yaml": customresourcedefinitions.apiextensions.k8s.io "orders.acme.cert-manager.io" not found Error from server (NotFound): error when deleting "https://github.com/cert-manager/cert-manager/releases/download/v1.17.2/cert-manager.yaml": serviceaccounts "cert-manager-cainjector" not found Error from server (NotFound): error when deleting "https://github.com/cert-manager/cert-manager/releases/download/v1.17.2/cert-manager.yaml": serviceaccounts "cert-manager" not found Error from server (NotFound): error when deleting "https://github.com/cert-manager/cert-manager/releases/download/v1.17.2/cert-manager.yaml": serviceaccounts "cert-manager-webhook" not found Error from server (NotFound): error when deleting "https://github.com/cert-manager/cert-manager/releases/download/v1.17.2/cert-manager.yaml": clusterroles.rbac.authorization.k8s.io "cert-manager-cainjector" not found Error from server (NotFound): error when deleting "https://github.com/cert-manager/cert-manager/releases/download/v1.17.2/cert-manager.yaml": clusterroles.rbac.authorization.k8s.io "cert-manager-controller-issuers" not found Error from server (NotFound): error when deleting "https://github.com/cert-manager/cert-manager/releases/download/v1.17.2/cert-manager.yaml": clusterroles.rbac.authorization.k8s.io "cert-manager-controller-clusterissuers" not found Error from server (NotFound): error when deleting "https://github.com/cert-manager/cert-manager/releases/download/v1.17.2/cert-manager.yaml": clusterroles.rbac.authorization.k8s.io "cert-manager-controller-certificates" not found Error from server (NotFound): error when deleting "https://github.com/cert-manager/cert-manager/releases/download/v1.17.2/cert-manager.yaml": clusterroles.rbac.authorization.k8s.io "cert-manager-controller-orders" not found Error from server (NotFound): error when deleting "https://github.com/cert-manager/cert-manager/releases/download/v1.17.2/cert-manager.yaml": clusterroles.rbac.authorization.k8s.io "cert-manager-controller-challenges" not found Error from server (NotFound): error when deleting "https://github.com/cert-manager/cert-manager/releases/download/v1.17.2/cert-manager.yaml": clusterroles.rbac.authorization.k8s.io "cert-manager-controller-ingress-shim" not found Error from server (NotFound): error when deleting "https://github.com/cert-manager/cert-manager/releases/download/v1.17.2/cert-manager.yaml": clusterroles.rbac.authorization.k8s.io "cert-manager-cluster-view" not found Error from server (NotFound): error when deleting "https://github.com/cert-manager/cert-manager/releases/download/v1.17.2/cert-manager.yaml": clusterroles.rbac.authorization.k8s.io "cert-manager-view" not found Error from server (NotFound): error when deleting "https://github.com/cert-manager/cert-manager/releases/download/v1.17.2/cert-manager.yaml": clusterroles.rbac.authorization.k8s.io "cert-manager-edit" not found Error from server (NotFound): error when deleting "https://github.com/cert-manager/cert-manager/releases/download/v1.17.2/cert-manager.yaml": clusterroles.rbac.authorization.k8s.io "cert-manager-controller-approve:cert-manager-io" not found Error from server (NotFound): error when deleting "https://github.com/cert-manager/cert-manager/releases/download/v1.17.2/cert-manager.yaml": clusterroles.rbac.authorization.k8s.io "cert-manager-controller-certificatesigningrequests" not found Error from server (NotFound): error when deleting "https://github.com/cert-manager/cert-manager/releases/download/v1.17.2/cert-manager.yaml": clusterroles.rbac.authorization.k8s.io "cert-manager-webhook:subjectaccessreviews" not found Error from server (NotFound): error when deleting "https://github.com/cert-manager/cert-manager/releases/download/v1.17.2/cert-manager.yaml": clusterrolebindings.rbac.authorization.k8s.io "cert-manager-cainjector" not found Error from server (NotFound): error when deleting "https://github.com/cert-manager/cert-manager/releases/download/v1.17.2/cert-manager.yaml": clusterrolebindings.rbac.authorization.k8s.io "cert-manager-controller-issuers" not found Error from server (NotFound): error when deleting "https://github.com/cert-manager/cert-manager/releases/download/v1.17.2/cert-manager.yaml": clusterrolebindings.rbac.authorization.k8s.io "cert-manager-controller-clusterissuers" not found Error from server (NotFound): error when deleting "https://github.com/cert-manager/cert-manager/releases/download/v1.17.2/cert-manager.yaml": clusterrolebindings.rbac.authorization.k8s.io "cert-manager-controller-certificates" not found Error from server (NotFound): error when deleting "https://github.com/cert-manager/cert-manager/releases/download/v1.17.2/cert-manager.yaml": clusterrolebindings.rbac.authorization.k8s.io "cert-manager-controller-orders" not found Error from server (NotFound): error when deleting "https://github.com/cert-manager/cert-manager/releases/download/v1.17.2/cert-manager.yaml": clusterrolebindings.rbac.authorization.k8s.io "cert-manager-controller-challenges" not found Error from server (NotFound): error when deleting "https://github.com/cert-manager/cert-manager/releases/download/v1.17.2/cert-manager.yaml": clusterrolebindings.rbac.authorization.k8s.io "cert-manager-controller-ingress-shim" not found Error from server (NotFound): error when deleting "https://github.com/cert-manager/cert-manager/releases/download/v1.17.2/cert-manager.yaml": clusterrolebindings.rbac.authorization.k8s.io "cert-manager-controller-approve:cert-manager-io" not found Error from server (NotFound): error when deleting "https://github.com/cert-manager/cert-manager/releases/download/v1.17.2/cert-manager.yaml": clusterrolebindings.rbac.authorization.k8s.io "cert-manager-controller-certificatesigningrequests" not found Error from server (NotFound): error when deleting "https://github.com/cert-manager/cert-manager/releases/download/v1.17.2/cert-manager.yaml": clusterrolebindings.rbac.authorization.k8s.io "cert-manager-webhook:subjectaccessreviews" not found Error from server (NotFound): error when deleting "https://github.com/cert-manager/cert-manager/releases/download/v1.17.2/cert-manager.yaml": roles.rbac.authorization.k8s.io "cert-manager-cainjector:leaderelection" not found Error from server (NotFound): error when deleting "https://github.com/cert-manager/cert-manager/releases/download/v1.17.2/cert-manager.yaml": roles.rbac.authorization.k8s.io "cert-manager:leaderelection" not found Error from server (NotFound): error when deleting "https://github.com/cert-manager/cert-manager/releases/download/v1.17.2/cert-manager.yaml": roles.rbac.authorization.k8s.io "cert-manager-tokenrequest" not found Error from server (NotFound): error when deleting "https://github.com/cert-manager/cert-manager/releases/download/v1.17.2/cert-manager.yaml": roles.rbac.authorization.k8s.io "cert-manager-webhook:dynamic-serving" not found Error from server (NotFound): error when deleting "https://github.com/cert-manager/cert-manager/releases/download/v1.17.2/cert-manager.yaml": rolebindings.rbac.authorization.k8s.io "cert-manager-cainjector:leaderelection" not found Error from server (NotFound): error when deleting "https://github.com/cert-manager/cert-manager/releases/download/v1.17.2/cert-manager.yaml": rolebindings.rbac.authorization.k8s.io "cert-manager:leaderelection" not found Error from server (NotFound): error when deleting "https://github.com/cert-manager/cert-manager/releases/download/v1.17.2/cert-manager.yaml": rolebindings.rbac.authorization.k8s.io "cert-manager-cert-manager-tokenrequest" not found Error from server (NotFound): error when deleting "https://github.com/cert-manager/cert-manager/releases/download/v1.17.2/cert-manager.yaml": rolebindings.rbac.authorization.k8s.io "cert-manager-webhook:dynamic-serving" not found Error from server (NotFound): error when deleting "https://github.com/cert-manager/cert-manager/releases/download/v1.17.2/cert-manager.yaml": services "cert-manager-cainjector" not found Error from server (NotFound): error when deleting "https://github.com/cert-manager/cert-manager/releases/download/v1.17.2/cert-manager.yaml": services "cert-manager" not found Error from server (NotFound): error when deleting "https://github.com/cert-manager/cert-manager/releases/download/v1.17.2/cert-manager.yaml": services "cert-manager-webhook" not found Error from server (NotFound): error when deleting "https://github.com/cert-manager/cert-manager/releases/download/v1.17.2/cert-manager.yaml": deployments.apps "cert-manager-cainjector" not found Error from server (NotFound): error when deleting "https://github.com/cert-manager/cert-manager/releases/download/v1.17.2/cert-manager.yaml": deployments.apps "cert-manager" not found Error from server (NotFound): error when deleting "https://github.com/cert-manager/cert-manager/releases/download/v1.17.2/cert-manager.yaml": deployments.apps "cert-manager-webhook" not found Error from server (NotFound): error when deleting "https://github.com/cert-manager/cert-manager/releases/download/v1.17.2/cert-manager.yaml": mutatingwebhookconfigurations.admissionregistration.k8s.io "cert-manager-webhook" not found Error from server (NotFound): error when deleting "https://github.com/cert-manager/cert-manager/releases/download/v1.17.2/cert-manager.yaml": validatingwebhookconfigurations.admissionregistration.k8s.io "cert-manager-webhook" not found + sleep 8 + cat /tmp/tmp.Ak6A4UQ66o + cat /tmp/tmp.wTU1gqH7e9 Error from server (NotFound): error when deleting "https://github.com/cert-manager/cert-manager/releases/download/v1.17.2/cert-manager.yaml": namespaces "cert-manager" not found Error from server (NotFound): error when deleting "https://github.com/cert-manager/cert-manager/releases/download/v1.17.2/cert-manager.yaml": customresourcedefinitions.apiextensions.k8s.io "certificaterequests.cert-manager.io" not found Error from server (NotFound): error when deleting "https://github.com/cert-manager/cert-manager/releases/download/v1.17.2/cert-manager.yaml": customresourcedefinitions.apiextensions.k8s.io "certificates.cert-manager.io" not found Error from server (NotFound): error when deleting "https://github.com/cert-manager/cert-manager/releases/download/v1.17.2/cert-manager.yaml": customresourcedefinitions.apiextensions.k8s.io "challenges.acme.cert-manager.io" not found Error from server (NotFound): error when deleting "https://github.com/cert-manager/cert-manager/releases/download/v1.17.2/cert-manager.yaml": customresourcedefinitions.apiextensions.k8s.io "clusterissuers.cert-manager.io" not found Error from server (NotFound): error when deleting "https://github.com/cert-manager/cert-manager/releases/download/v1.17.2/cert-manager.yaml": customresourcedefinitions.apiextensions.k8s.io "issuers.cert-manager.io" not found Error from server (NotFound): error when deleting "https://github.com/cert-manager/cert-manager/releases/download/v1.17.2/cert-manager.yaml": customresourcedefinitions.apiextensions.k8s.io "orders.acme.cert-manager.io" not found Error from server (NotFound): error when deleting "https://github.com/cert-manager/cert-manager/releases/download/v1.17.2/cert-manager.yaml": serviceaccounts "cert-manager-cainjector" not found Error from server (NotFound): error when deleting "https://github.com/cert-manager/cert-manager/releases/download/v1.17.2/cert-manager.yaml": serviceaccounts "cert-manager" not found Error from server (NotFound): error when deleting "https://github.com/cert-manager/cert-manager/releases/download/v1.17.2/cert-manager.yaml": serviceaccounts "cert-manager-webhook" not found Error from server (NotFound): error when deleting "https://github.com/cert-manager/cert-manager/releases/download/v1.17.2/cert-manager.yaml": clusterroles.rbac.authorization.k8s.io "cert-manager-cainjector" not found Error from server (NotFound): error when deleting "https://github.com/cert-manager/cert-manager/releases/download/v1.17.2/cert-manager.yaml": clusterroles.rbac.authorization.k8s.io "cert-manager-controller-issuers" not found Error from server (NotFound): error when deleting "https://github.com/cert-manager/cert-manager/releases/download/v1.17.2/cert-manager.yaml": clusterroles.rbac.authorization.k8s.io "cert-manager-controller-clusterissuers" not found Error from server (NotFound): error when deleting "https://github.com/cert-manager/cert-manager/releases/download/v1.17.2/cert-manager.yaml": clusterroles.rbac.authorization.k8s.io "cert-manager-controller-certificates" not found Error from server (NotFound): error when deleting "https://github.com/cert-manager/cert-manager/releases/download/v1.17.2/cert-manager.yaml": clusterroles.rbac.authorization.k8s.io "cert-manager-controller-orders" not found Error from server (NotFound): error when deleting "https://github.com/cert-manager/cert-manager/releases/download/v1.17.2/cert-manager.yaml": clusterroles.rbac.authorization.k8s.io "cert-manager-controller-challenges" not found Error from server (NotFound): error when deleting "https://github.com/cert-manager/cert-manager/releases/download/v1.17.2/cert-manager.yaml": clusterroles.rbac.authorization.k8s.io "cert-manager-controller-ingress-shim" not found Error from server (NotFound): error when deleting "https://github.com/cert-manager/cert-manager/releases/download/v1.17.2/cert-manager.yaml": clusterroles.rbac.authorization.k8s.io "cert-manager-cluster-view" not found Error from server (NotFound): error when deleting "https://github.com/cert-manager/cert-manager/releases/download/v1.17.2/cert-manager.yaml": clusterroles.rbac.authorization.k8s.io "cert-manager-view" not found Error from server (NotFound): error when deleting "https://github.com/cert-manager/cert-manager/releases/download/v1.17.2/cert-manager.yaml": clusterroles.rbac.authorization.k8s.io "cert-manager-edit" not found Error from server (NotFound): error when deleting "https://github.com/cert-manager/cert-manager/releases/download/v1.17.2/cert-manager.yaml": clusterroles.rbac.authorization.k8s.io "cert-manager-controller-approve:cert-manager-io" not found Error from server (NotFound): error when deleting "https://github.com/cert-manager/cert-manager/releases/download/v1.17.2/cert-manager.yaml": clusterroles.rbac.authorization.k8s.io "cert-manager-controller-certificatesigningrequests" not found Error from server (NotFound): error when deleting "https://github.com/cert-manager/cert-manager/releases/download/v1.17.2/cert-manager.yaml": clusterroles.rbac.authorization.k8s.io "cert-manager-webhook:subjectaccessreviews" not found Error from server (NotFound): error when deleting "https://github.com/cert-manager/cert-manager/releases/download/v1.17.2/cert-manager.yaml": clusterrolebindings.rbac.authorization.k8s.io "cert-manager-cainjector" not found Error from server (NotFound): error when deleting "https://github.com/cert-manager/cert-manager/releases/download/v1.17.2/cert-manager.yaml": clusterrolebindings.rbac.authorization.k8s.io "cert-manager-controller-issuers" not found Error from server (NotFound): error when deleting "https://github.com/cert-manager/cert-manager/releases/download/v1.17.2/cert-manager.yaml": clusterrolebindings.rbac.authorization.k8s.io "cert-manager-controller-clusterissuers" not found Error from server (NotFound): error when deleting "https://github.com/cert-manager/cert-manager/releases/download/v1.17.2/cert-manager.yaml": clusterrolebindings.rbac.authorization.k8s.io "cert-manager-controller-certificates" not found Error from server (NotFound): error when deleting "https://github.com/cert-manager/cert-manager/releases/download/v1.17.2/cert-manager.yaml": clusterrolebindings.rbac.authorization.k8s.io "cert-manager-controller-orders" not found Error from server (NotFound): error when deleting "https://github.com/cert-manager/cert-manager/releases/download/v1.17.2/cert-manager.yaml": clusterrolebindings.rbac.authorization.k8s.io "cert-manager-controller-challenges" not found Error from server (NotFound): error when deleting "https://github.com/cert-manager/cert-manager/releases/download/v1.17.2/cert-manager.yaml": clusterrolebindings.rbac.authorization.k8s.io "cert-manager-controller-ingress-shim" not found Error from server (NotFound): error when deleting "https://github.com/cert-manager/cert-manager/releases/download/v1.17.2/cert-manager.yaml": clusterrolebindings.rbac.authorization.k8s.io "cert-manager-controller-approve:cert-manager-io" not found Error from server (NotFound): error when deleting "https://github.com/cert-manager/cert-manager/releases/download/v1.17.2/cert-manager.yaml": clusterrolebindings.rbac.authorization.k8s.io "cert-manager-controller-certificatesigningrequests" not found Error from server (NotFound): error when deleting "https://github.com/cert-manager/cert-manager/releases/download/v1.17.2/cert-manager.yaml": clusterrolebindings.rbac.authorization.k8s.io "cert-manager-webhook:subjectaccessreviews" not found Error from server (NotFound): error when deleting "https://github.com/cert-manager/cert-manager/releases/download/v1.17.2/cert-manager.yaml": roles.rbac.authorization.k8s.io "cert-manager-cainjector:leaderelection" not found Error from server (NotFound): error when deleting "https://github.com/cert-manager/cert-manager/releases/download/v1.17.2/cert-manager.yaml": roles.rbac.authorization.k8s.io "cert-manager:leaderelection" not found Error from server (NotFound): error when deleting "https://github.com/cert-manager/cert-manager/releases/download/v1.17.2/cert-manager.yaml": roles.rbac.authorization.k8s.io "cert-manager-tokenrequest" not found Error from server (NotFound): error when deleting "https://github.com/cert-manager/cert-manager/releases/download/v1.17.2/cert-manager.yaml": roles.rbac.authorization.k8s.io "cert-manager-webhook:dynamic-serving" not found Error from server (NotFound): error when deleting "https://github.com/cert-manager/cert-manager/releases/download/v1.17.2/cert-manager.yaml": rolebindings.rbac.authorization.k8s.io "cert-manager-cainjector:leaderelection" not found Error from server (NotFound): error when deleting "https://github.com/cert-manager/cert-manager/releases/download/v1.17.2/cert-manager.yaml": rolebindings.rbac.authorization.k8s.io "cert-manager:leaderelection" not found Error from server (NotFound): error when deleting "https://github.com/cert-manager/cert-manager/releases/download/v1.17.2/cert-manager.yaml": rolebindings.rbac.authorization.k8s.io "cert-manager-cert-manager-tokenrequest" not found Error from server (NotFound): error when deleting "https://github.com/cert-manager/cert-manager/releases/download/v1.17.2/cert-manager.yaml": rolebindings.rbac.authorization.k8s.io "cert-manager-webhook:dynamic-serving" not found Error from server (NotFound): error when deleting "https://github.com/cert-manager/cert-manager/releases/download/v1.17.2/cert-manager.yaml": services "cert-manager-cainjector" not found Error from server (NotFound): error when deleting "https://github.com/cert-manager/cert-manager/releases/download/v1.17.2/cert-manager.yaml": services "cert-manager" not found Error from server (NotFound): error when deleting "https://github.com/cert-manager/cert-manager/releases/download/v1.17.2/cert-manager.yaml": services "cert-manager-webhook" not found Error from server (NotFound): error when deleting "https://github.com/cert-manager/cert-manager/releases/download/v1.17.2/cert-manager.yaml": deployments.apps "cert-manager-cainjector" not found Error from server (NotFound): error when deleting "https://github.com/cert-manager/cert-manager/releases/download/v1.17.2/cert-manager.yaml": deployments.apps "cert-manager" not found Error from server (NotFound): error when deleting "https://github.com/cert-manager/cert-manager/releases/download/v1.17.2/cert-manager.yaml": deployments.apps "cert-manager-webhook" not found Error from server (NotFound): error when deleting "https://github.com/cert-manager/cert-manager/releases/download/v1.17.2/cert-manager.yaml": mutatingwebhookconfigurations.admissionregistration.k8s.io "cert-manager-webhook" not found Error from server (NotFound): error when deleting "https://github.com/cert-manager/cert-manager/releases/download/v1.17.2/cert-manager.yaml": validatingwebhookconfigurations.admissionregistration.k8s.io "cert-manager-webhook" not found + rm /tmp/tmp.Ak6A4UQ66o /tmp/tmp.wTU1gqH7e9 + return 1 + true + '[' -n '' ']' + '[' -n psmdb-operator ']' + kubectl_bin delete --grace-period=0 --force=true namespace users-26599 + rm -rf /tmp/tmp.NHPECSJQtW + kubectl_bin delete --grace-period=0 --force=true namespace psmdb-operator ++ mktemp ++ mktemp + desc 'test passed' + set +o xtrace ----------------------------------------------------------------------------------- test passed ----------------------------------------------------------------------------------- + local LAST_OUT=/tmp/tmp.yHQAMH3aAA + local LAST_OUT=/tmp/tmp.DGap10zB57 ++ mktemp ++ mktemp + local LAST_ERR=/tmp/tmp.2oYXoCXi2m + local exit_status=0 + local timeout=4 + local LAST_ERR=/tmp/tmp.ecjY1wxBuC + local exit_status=0 + local timeout=4 ++ seq 0 2 ++ seq 0 2 + for i in $(seq 0 2) + set +e + kubectl delete --grace-period=0 --force=true namespace psmdb-operator + for i in $(seq 0 2) + set +e + kubectl delete --grace-period=0 --force=true namespace users-26599