Log: /mnt/jenkins/workspace/cloud-psmdb-operator_PR-1735/e2e-tests/logs/users.log WARNING: version difference between client (1.32) and server (1.28) exceeds the supported minor version skew of +/-1 WARNING: version difference between client (1.32) and server (1.28) exceeds the supported minor version skew of +/-1 WARNING: version difference between client (1.32) and server (1.28) exceeds the supported minor version skew of +/-1 + newpass=test-password ++ echo -n test-password ++ base64 + newpassencrypted=dGVzdC1wYXNzd29yZA== + create_infra users-18968 + local ns=users-18968 + delete_crd + desc 'get and delete old CRDs and RBAC' + set +o xtrace ----------------------------------------------------------------------------------- get and delete old CRDs and RBAC ----------------------------------------------------------------------------------- + kubectl_bin delete -f /mnt/jenkins/workspace/cloud-psmdb-operator_PR-1735/deploy/crd.yaml --ignore-not-found --wait=false ++ mktemp + local LAST_OUT=/tmp/tmp.d5KuryeYTt ++ mktemp + local LAST_ERR=/tmp/tmp.QAGsJ42lMk + local exit_status=0 + local timeout=4 ++ seq 0 2 + for i in '$(seq 0 2)' + set +e + kubectl delete -f /mnt/jenkins/workspace/cloud-psmdb-operator_PR-1735/deploy/crd.yaml --ignore-not-found --wait=false + exit_status=0 + set -e + '[' 0 '!=' 0 -a -n 1 ']' + break + cat /tmp/tmp.d5KuryeYTt customresourcedefinition.apiextensions.k8s.io "perconaservermongodbbackups.psmdb.percona.com" deleted customresourcedefinition.apiextensions.k8s.io "perconaservermongodbrestores.psmdb.percona.com" deleted customresourcedefinition.apiextensions.k8s.io "perconaservermongodbs.psmdb.percona.com" deleted + cat /tmp/tmp.QAGsJ42lMk + rm /tmp/tmp.d5KuryeYTt /tmp/tmp.QAGsJ42lMk + return 0 ++ grep -v '\-\-\-' ++ yq eval .metadata.name /mnt/jenkins/workspace/cloud-psmdb-operator_PR-1735/deploy/crd.yaml + for crd_name in '$(yq eval '\''.metadata.name'\'' "${src_dir}/deploy/crd.yaml" | grep -v '\''\-\-\-'\'')' + grep -v NAMESPACE + xargs -L 1 sh -xc 'kubectl patch perconaservermongodbbackups.psmdb.percona.com -n $0 $1 --type=merge -p "{\"metadata\":{\"finalizers\":[]}}"' + kubectl get perconaservermongodbbackups.psmdb.percona.com --all-namespaces -o wide error: the server doesn't have a resource type "perconaservermongodbbackups" + kubectl patch perconaservermongodbbackups.psmdb.percona.com -n sh --type=merge -p '{"metadata":{"finalizers":[]}}' error: the server doesn't have a resource type "perconaservermongodbbackups" + : + kubectl_bin wait --for=delete crd perconaservermongodbbackups.psmdb.percona.com ++ mktemp + local LAST_OUT=/tmp/tmp.MFRGBYqikW ++ mktemp + local LAST_ERR=/tmp/tmp.qfmm3ZAYkS + local exit_status=0 + local timeout=4 ++ seq 0 2 + for i in '$(seq 0 2)' + set +e + kubectl wait --for=delete crd perconaservermongodbbackups.psmdb.percona.com + exit_status=0 + set -e + '[' 0 '!=' 0 -a -n 1 ']' + break + cat /tmp/tmp.MFRGBYqikW + cat /tmp/tmp.qfmm3ZAYkS + rm /tmp/tmp.MFRGBYqikW /tmp/tmp.qfmm3ZAYkS + return 0 + for crd_name in '$(yq eval '\''.metadata.name'\'' "${src_dir}/deploy/crd.yaml" | grep -v '\''\-\-\-'\'')' + grep -v NAMESPACE + kubectl get perconaservermongodbrestores.psmdb.percona.com --all-namespaces -o wide + xargs -L 1 sh -xc 'kubectl patch perconaservermongodbrestores.psmdb.percona.com -n $0 $1 --type=merge -p "{\"metadata\":{\"finalizers\":[]}}"' error: the server doesn't have a resource type "perconaservermongodbrestores" + kubectl patch perconaservermongodbrestores.psmdb.percona.com -n sh --type=merge -p '{"metadata":{"finalizers":[]}}' error: the server doesn't have a resource type "perconaservermongodbrestores" + : + kubectl_bin wait --for=delete crd perconaservermongodbrestores.psmdb.percona.com ++ mktemp + local LAST_OUT=/tmp/tmp.3Gb3VutqE6 ++ mktemp + local LAST_ERR=/tmp/tmp.0oPgGCAb1M + local exit_status=0 + local timeout=4 ++ seq 0 2 + for i in '$(seq 0 2)' + set +e + kubectl wait --for=delete crd perconaservermongodbrestores.psmdb.percona.com + exit_status=0 + set -e + '[' 0 '!=' 0 -a -n 1 ']' + break + cat /tmp/tmp.3Gb3VutqE6 + cat /tmp/tmp.0oPgGCAb1M + rm /tmp/tmp.3Gb3VutqE6 /tmp/tmp.0oPgGCAb1M + return 0 + for crd_name in '$(yq eval '\''.metadata.name'\'' "${src_dir}/deploy/crd.yaml" | grep -v '\''\-\-\-'\'')' + kubectl get perconaservermongodbs.psmdb.percona.com --all-namespaces -o wide + grep -v NAMESPACE + xargs -L 1 sh -xc 'kubectl patch perconaservermongodbs.psmdb.percona.com -n $0 $1 --type=merge -p "{\"metadata\":{\"finalizers\":[]}}"' error: the server doesn't have a resource type "perconaservermongodbs" + kubectl patch perconaservermongodbs.psmdb.percona.com -n sh --type=merge -p '{"metadata":{"finalizers":[]}}' error: the server doesn't have a resource type "perconaservermongodbs" + : + kubectl_bin wait --for=delete crd perconaservermongodbs.psmdb.percona.com ++ mktemp + local LAST_OUT=/tmp/tmp.UvregyL470 ++ mktemp + local LAST_ERR=/tmp/tmp.9PD4PNJoat + local exit_status=0 + local timeout=4 ++ seq 0 2 + for i in '$(seq 0 2)' + set +e + kubectl wait --for=delete crd perconaservermongodbs.psmdb.percona.com + exit_status=0 + set -e + '[' 0 '!=' 0 -a -n 1 ']' + break + cat /tmp/tmp.UvregyL470 + cat /tmp/tmp.9PD4PNJoat + rm /tmp/tmp.UvregyL470 /tmp/tmp.9PD4PNJoat + return 0 + local rbac_yaml=rbac.yaml + '[' -n psmdb-operator ']' + rbac_yaml=cw-rbac.yaml + kubectl_bin delete -f /mnt/jenkins/workspace/cloud-psmdb-operator_PR-1735/deploy/cw-rbac.yaml --ignore-not-found ++ mktemp + local LAST_OUT=/tmp/tmp.xNK6q5Xqfm ++ mktemp + local LAST_ERR=/tmp/tmp.hPkWAMBiDs + local exit_status=0 + local timeout=4 ++ seq 0 2 + for i in '$(seq 0 2)' + set +e + kubectl delete -f /mnt/jenkins/workspace/cloud-psmdb-operator_PR-1735/deploy/cw-rbac.yaml --ignore-not-found + exit_status=0 + set -e + '[' 0 '!=' 0 -a -n 1 ']' + break + cat /tmp/tmp.xNK6q5Xqfm clusterrole.rbac.authorization.k8s.io "percona-server-mongodb-operator" deleted clusterrolebinding.rbac.authorization.k8s.io "service-account-percona-server-mongodb-operator" deleted + cat /tmp/tmp.hPkWAMBiDs + rm /tmp/tmp.xNK6q5Xqfm /tmp/tmp.hPkWAMBiDs + return 0 + check_crd_for_deletion PR-1735-753d0dfe + local git_tag=PR-1735-753d0dfe ++ curl -s https://raw.githubusercontent.com/percona/percona-server-mongodb-operator/PR-1735-753d0dfe/deploy/crd.yaml ++ /usr/bin/sed s/---//g ++ yq eval .metadata.name ++ /usr/bin/sed ':a;N;$!ba;s/\n/ /g' + for crd_name in '$(curl -s https://raw.githubusercontent.com/percona/percona-server-mongodb-operator/${git_tag}/deploy/crd.yaml | yq eval '\''.metadata.name'\'' | $sed '\''s/---//g'\'' | $sed '\'':a;N;$!ba;s/\n/ /g'\'')' ++ kubectl_bin get crd/null -o 'jsonpath={.status.conditions[-1].type}' +++ mktemp ++ local LAST_OUT=/tmp/tmp.bprpfd32ez +++ mktemp ++ local LAST_ERR=/tmp/tmp.8PjIx2kJzF ++ local exit_status=0 ++ local timeout=4 +++ seq 0 2 ++ for i in '$(seq 0 2)' ++ set +e ++ kubectl get crd/null -o 'jsonpath={.status.conditions[-1].type}' ++ exit_status=1 ++ set -e ++ '[' 1 '!=' 0 -a -n 1 ']' ++ cat /tmp/tmp.bprpfd32ez ++ cat /tmp/tmp.8PjIx2kJzF Error from server (NotFound): customresourcedefinitions.apiextensions.k8s.io "null" not found ++ sleep 0 ++ for i in '$(seq 0 2)' ++ set +e ++ kubectl get crd/null -o 'jsonpath={.status.conditions[-1].type}' ++ exit_status=1 ++ set -e ++ '[' 1 '!=' 0 -a -n 1 ']' ++ cat /tmp/tmp.bprpfd32ez ++ cat /tmp/tmp.8PjIx2kJzF Error from server (NotFound): customresourcedefinitions.apiextensions.k8s.io "null" not found ++ sleep 4 ++ for i in '$(seq 0 2)' ++ set +e ++ kubectl get crd/null -o 'jsonpath={.status.conditions[-1].type}' ++ exit_status=1 ++ set -e ++ '[' 1 '!=' 0 -a -n 1 ']' ++ cat /tmp/tmp.bprpfd32ez ++ cat /tmp/tmp.8PjIx2kJzF Error from server (NotFound): customresourcedefinitions.apiextensions.k8s.io "null" not found ++ sleep 8 ++ cat /tmp/tmp.bprpfd32ez ++ cat /tmp/tmp.8PjIx2kJzF Error from server (NotFound): customresourcedefinitions.apiextensions.k8s.io "null" not found ++ rm /tmp/tmp.bprpfd32ez /tmp/tmp.8PjIx2kJzF ++ return 1 + [[ '' == \T\e\r\m\i\n\a\t\i\n\g ]] + '[' -n psmdb-operator ']' + create_namespace psmdb-operator + local namespace=psmdb-operator + local skip_clean_namespace= + [[ 1 == 1 ]] + [[ -z '' ]] + destroy_chaos_mesh ++ helm list --all-namespaces --filter chaos-mesh ++ tail -n1 ++ sed s/NAMESPACE// ++ awk '-F ' '{print $2}' + local chaos_mesh_ns= + desc 'destroy chaos-mesh' + set +o xtrace ----------------------------------------------------------------------------------- destroy chaos-mesh ----------------------------------------------------------------------------------- + '[' -n '' ']' ++ kubectl get MutatingWebhookConfiguration ++ grep chaos-mesh ++ awk '{print $1}' + timeout 30 kubectl delete MutatingWebhookConfiguration error: resource(s) were provided, but no name was specified + : ++ awk '{print $1}' ++ kubectl get ValidatingWebhookConfiguration ++ grep chaos-mesh + timeout 30 kubectl delete ValidatingWebhookConfiguration error: resource(s) were provided, but no name was specified + : ++ awk '{print $1}' ++ kubectl get ValidatingWebhookConfiguration ++ grep validate-auth + timeout 30 kubectl delete ValidatingWebhookConfiguration error: resource(s) were provided, but no name was specified + : ++ awk '{print $1}' ++ kubectl api-resources ++ grep chaos-mesh ++ grep chaos-mesh.org ++ awk '{print $1}' ++ kubectl get crd + timeout 30 kubectl delete crd error: resource(s) were provided, but no name was specified + : ++ kubectl get clusterrolebinding ++ awk '{print $1}' ++ grep chaos-mesh + timeout 30 kubectl delete clusterrolebinding error: resource(s) were provided, but no name was specified + : ++ awk '{print $1}' ++ kubectl get clusterrole ++ grep chaos-mesh + timeout 30 kubectl delete clusterrole error: resource(s) were provided, but no name was specified + : + desc 'cleaned up all old namespaces' + set +o xtrace ----------------------------------------------------------------------------------- cleaned up all old namespaces ----------------------------------------------------------------------------------- + '[' -n '' ']' + desc 'cleaned up old namespaces psmdb-operator' + set +o xtrace ----------------------------------------------------------------------------------- cleaned up old namespaces psmdb-operator ----------------------------------------------------------------------------------- + kubectl_bin delete namespace psmdb-operator --ignore-not-found + xargs kubectl delete ns + awk '{print$1}' + kubectl_bin get ns ++ mktemp ++ mktemp + egrep -v '^kube-|^default|Terminating|psmdb-operator|openshift|gke-mcs|^NAME' + local LAST_OUT=/tmp/tmp.kQ9c57AoDC + local LAST_OUT=/tmp/tmp.jnj3XYEm74 ++ mktemp + local LAST_ERR=/tmp/tmp.Gm3BhCrzdS + local exit_status=0 + local timeout=4 ++ seq 0 2 + for i in '$(seq 0 2)' + set +e + kubectl get ns ++ mktemp + local LAST_ERR=/tmp/tmp.7Zp017ljgN + local exit_status=0 + local timeout=4 ++ seq 0 2 + for i in '$(seq 0 2)' + set +e + kubectl delete namespace psmdb-operator --ignore-not-found + exit_status=0 + set -e + '[' 0 '!=' 0 -a -n 1 ']' + break + cat /tmp/tmp.kQ9c57AoDC + cat /tmp/tmp.Gm3BhCrzdS + rm /tmp/tmp.kQ9c57AoDC /tmp/tmp.Gm3BhCrzdS + return 0 namespace "gke-managed-system" deleted namespace "gmp-public" deleted namespace "gmp-system" deleted namespace "users-7350" deleted + exit_status=0 + set -e + '[' 0 '!=' 0 -a -n 1 ']' + break + cat /tmp/tmp.jnj3XYEm74 namespace "psmdb-operator" deleted + cat /tmp/tmp.7Zp017ljgN + rm /tmp/tmp.jnj3XYEm74 /tmp/tmp.7Zp017ljgN + return 0 + kubectl_bin wait --for=delete namespace psmdb-operator ++ mktemp + local LAST_OUT=/tmp/tmp.oKFC57ljM6 ++ mktemp + local LAST_ERR=/tmp/tmp.dxZ8h5VNFW + local exit_status=0 + local timeout=4 ++ seq 0 2 + for i in '$(seq 0 2)' + set +e + kubectl wait --for=delete namespace psmdb-operator + exit_status=0 + set -e + '[' 0 '!=' 0 -a -n 1 ']' + break + cat /tmp/tmp.oKFC57ljM6 + cat /tmp/tmp.dxZ8h5VNFW + rm /tmp/tmp.oKFC57ljM6 /tmp/tmp.dxZ8h5VNFW + return 0 + desc 'create namespace psmdb-operator' + set +o xtrace ----------------------------------------------------------------------------------- create namespace psmdb-operator ----------------------------------------------------------------------------------- + kubectl_bin create namespace psmdb-operator ++ mktemp + local LAST_OUT=/tmp/tmp.oGwLy4gZlT ++ mktemp + local LAST_ERR=/tmp/tmp.oaIq91DnBg + local exit_status=0 + local timeout=4 ++ seq 0 2 + for i in '$(seq 0 2)' + set +e + kubectl create namespace psmdb-operator + exit_status=0 + set -e + '[' 0 '!=' 0 -a -n 1 ']' + break + cat /tmp/tmp.oGwLy4gZlT namespace/psmdb-operator created + cat /tmp/tmp.oaIq91DnBg + rm /tmp/tmp.oGwLy4gZlT /tmp/tmp.oaIq91DnBg + return 0 ++ kubectl_bin config current-context +++ mktemp ++ local LAST_OUT=/tmp/tmp.8r4DENolpI +++ mktemp ++ local LAST_ERR=/tmp/tmp.n4bbMV95TW ++ local exit_status=0 ++ local timeout=4 +++ seq 0 2 ++ for i in '$(seq 0 2)' ++ set +e ++ kubectl config current-context ++ exit_status=0 ++ set -e ++ '[' 0 '!=' 0 -a -n 1 ']' ++ break ++ cat /tmp/tmp.8r4DENolpI ++ cat /tmp/tmp.n4bbMV95TW ++ rm /tmp/tmp.8r4DENolpI /tmp/tmp.n4bbMV95TW ++ return 0 + kubectl_bin config set-context gke_cloud-dev-112233_us-central1-a_jen-psmdb-1735-753d0dfe-52-cluster1 --namespace=psmdb-operator ++ mktemp + local LAST_OUT=/tmp/tmp.D3EUXOLXke ++ mktemp + local LAST_ERR=/tmp/tmp.U6ahFuHWR8 + local exit_status=0 + local timeout=4 ++ seq 0 2 + for i in '$(seq 0 2)' + set +e + kubectl config set-context gke_cloud-dev-112233_us-central1-a_jen-psmdb-1735-753d0dfe-52-cluster1 --namespace=psmdb-operator + exit_status=0 + set -e + '[' 0 '!=' 0 -a -n 1 ']' + break + cat /tmp/tmp.D3EUXOLXke Context "gke_cloud-dev-112233_us-central1-a_jen-psmdb-1735-753d0dfe-52-cluster1" modified. + cat /tmp/tmp.U6ahFuHWR8 + rm /tmp/tmp.D3EUXOLXke /tmp/tmp.U6ahFuHWR8 + return 0 + deploy_operator + desc 'start PSMDB operator' + set +o xtrace ----------------------------------------------------------------------------------- start PSMDB operator ----------------------------------------------------------------------------------- + local cr_file ++ mktemp + local temp_operator_yaml=/tmp/tmp.wTRDDjvdY8 + '[' -f /mnt/jenkins/workspace/cloud-psmdb-operator_PR-1735/e2e-tests/users/conf/crd.yaml ']' + cr_file=/mnt/jenkins/workspace/cloud-psmdb-operator_PR-1735/deploy/crd.yaml + kubectl_bin apply --server-side --force-conflicts -f /mnt/jenkins/workspace/cloud-psmdb-operator_PR-1735/deploy/crd.yaml ++ mktemp + local LAST_OUT=/tmp/tmp.BozbuvRbsr ++ mktemp + local LAST_ERR=/tmp/tmp.T3u9xQHhUo + local exit_status=0 + local timeout=4 ++ seq 0 2 + for i in '$(seq 0 2)' + set +e + kubectl apply --server-side --force-conflicts -f /mnt/jenkins/workspace/cloud-psmdb-operator_PR-1735/deploy/crd.yaml + exit_status=0 + set -e + '[' 0 '!=' 0 -a -n 1 ']' + break + cat /tmp/tmp.BozbuvRbsr customresourcedefinition.apiextensions.k8s.io/perconaservermongodbbackups.psmdb.percona.com serverside-applied customresourcedefinition.apiextensions.k8s.io/perconaservermongodbrestores.psmdb.percona.com serverside-applied customresourcedefinition.apiextensions.k8s.io/perconaservermongodbs.psmdb.percona.com serverside-applied + cat /tmp/tmp.T3u9xQHhUo + rm /tmp/tmp.BozbuvRbsr /tmp/tmp.T3u9xQHhUo + return 0 + [[ -n psmdb-operator ]] + apply_rbac cw-rbac + local operator_namespace=psmdb-operator + local rbac=cw-rbac + sed -e 's^namespace: .*^namespace: psmdb-operator^' + kubectl_bin apply -n psmdb-operator -f - + cat /mnt/jenkins/workspace/cloud-psmdb-operator_PR-1735/deploy/cw-rbac.yaml ++ mktemp + local LAST_OUT=/tmp/tmp.vps7s5bPIh ++ mktemp + local LAST_ERR=/tmp/tmp.4sG0l6v9ZZ + local exit_status=0 + local timeout=4 ++ seq 0 2 + for i in '$(seq 0 2)' + set +e + kubectl apply -n psmdb-operator -f - + exit_status=0 + set -e + '[' 0 '!=' 0 -a -n 1 ']' + break + cat /tmp/tmp.vps7s5bPIh clusterrole.rbac.authorization.k8s.io/percona-server-mongodb-operator created serviceaccount/percona-server-mongodb-operator created clusterrolebinding.rbac.authorization.k8s.io/service-account-percona-server-mongodb-operator created + cat /tmp/tmp.4sG0l6v9ZZ + rm /tmp/tmp.vps7s5bPIh /tmp/tmp.4sG0l6v9ZZ + return 0 + yq eval ' (.spec.template.spec.containers[].image = "perconalab/percona-server-mongodb-operator:PR-1735-753d0dfe") | ((.. | select(.[] == "DISABLE_TELEMETRY")) |= .value="true") | ((.. | select(.[] == "LOG_LEVEL")) |= .value="DEBUG")' /mnt/jenkins/workspace/cloud-psmdb-operator_PR-1735/deploy/cw-operator.yaml + [[ amd64 == \a\r\m\6\4 ]] + kubectl_bin apply -f /tmp/tmp.wTRDDjvdY8 ++ mktemp + local LAST_OUT=/tmp/tmp.T0co5JNhsW ++ mktemp + local LAST_ERR=/tmp/tmp.6nW00g6h9I + local exit_status=0 + local timeout=4 ++ seq 0 2 + for i in '$(seq 0 2)' + set +e + kubectl apply -f /tmp/tmp.wTRDDjvdY8 + exit_status=0 + set -e + '[' 0 '!=' 0 -a -n 1 ']' + break + cat /tmp/tmp.T0co5JNhsW deployment.apps/percona-server-mongodb-operator created + cat /tmp/tmp.6nW00g6h9I + rm /tmp/tmp.T0co5JNhsW /tmp/tmp.6nW00g6h9I + return 0 + sleep 2 ++ get_operator_pod ++ kubectl_bin get pods --selector=name=percona-server-mongodb-operator -o 'jsonpath={.items[].metadata.name}' -n psmdb-operator +++ mktemp ++ local LAST_OUT=/tmp/tmp.vPXOEXSH6o +++ mktemp ++ local LAST_ERR=/tmp/tmp.DNG1gCt4dS ++ local exit_status=0 ++ local timeout=4 +++ seq 0 2 ++ for i in '$(seq 0 2)' ++ set +e ++ kubectl get pods --selector=name=percona-server-mongodb-operator -o 'jsonpath={.items[].metadata.name}' -n psmdb-operator ++ exit_status=0 ++ set -e ++ '[' 0 '!=' 0 -a -n 1 ']' ++ break ++ cat /tmp/tmp.vPXOEXSH6o ++ cat /tmp/tmp.DNG1gCt4dS ++ rm /tmp/tmp.vPXOEXSH6o /tmp/tmp.DNG1gCt4dS ++ return 0 + wait_pod percona-server-mongodb-operator-8c87d9689-tgct9 + local pod=percona-server-mongodb-operator-8c87d9689-tgct9 + set +o xtrace waiting for pod/percona-server-mongodb-operator-8c87d9689-tgct9 to be ready.OK + create_namespace users-18968 + local namespace=users-18968 + local skip_clean_namespace= + [[ 1 == 1 ]] + [[ -z '' ]] + destroy_chaos_mesh ++ helm list --all-namespaces --filter chaos-mesh ++ tail -n1 ++ awk '-F ' '{print $2}' ++ sed s/NAMESPACE// + local chaos_mesh_ns= + desc 'destroy chaos-mesh' + set +o xtrace ----------------------------------------------------------------------------------- destroy chaos-mesh ----------------------------------------------------------------------------------- + '[' -n '' ']' ++ kubectl get MutatingWebhookConfiguration ++ grep chaos-mesh ++ awk '{print $1}' + timeout 30 kubectl delete MutatingWebhookConfiguration error: resource(s) were provided, but no name was specified + : ++ grep chaos-mesh ++ awk '{print $1}' ++ kubectl get ValidatingWebhookConfiguration + timeout 30 kubectl delete ValidatingWebhookConfiguration error: resource(s) were provided, but no name was specified + : ++ grep validate-auth ++ awk '{print $1}' ++ kubectl get ValidatingWebhookConfiguration + timeout 30 kubectl delete ValidatingWebhookConfiguration error: resource(s) were provided, but no name was specified + : ++ grep chaos-mesh ++ awk '{print $1}' ++ kubectl api-resources ++ grep chaos-mesh.org ++ awk '{print $1}' ++ kubectl get crd + timeout 30 kubectl delete crd error: resource(s) were provided, but no name was specified + : ++ kubectl get clusterrolebinding ++ awk '{print $1}' ++ grep chaos-mesh + timeout 30 kubectl delete clusterrolebinding error: resource(s) were provided, but no name was specified + : ++ awk '{print $1}' ++ grep chaos-mesh ++ kubectl get clusterrole + timeout 30 kubectl delete clusterrole error: resource(s) were provided, but no name was specified + : + desc 'cleaned up all old namespaces' + set +o xtrace ----------------------------------------------------------------------------------- cleaned up all old namespaces ----------------------------------------------------------------------------------- + '[' -n '' ']' + desc 'cleaned up old namespaces users-18968' + set +o xtrace ----------------------------------------------------------------------------------- cleaned up old namespaces users-18968 ----------------------------------------------------------------------------------- + kubectl_bin delete namespace users-18968 --ignore-not-found + xargs kubectl delete ns + awk '{print$1}' + egrep -v '^kube-|^default|Terminating|psmdb-operator|openshift|gke-mcs|^NAME' + kubectl_bin get ns ++ mktemp ++ mktemp + local LAST_OUT=/tmp/tmp.6KB3WtUCam + local LAST_OUT=/tmp/tmp.9BMxj43S1O ++ mktemp + local LAST_ERR=/tmp/tmp.49ZaLgIVhZ + local exit_status=0 + local timeout=4 ++ mktemp ++ seq 0 2 + for i in '$(seq 0 2)' + set +e + kubectl get ns + local LAST_ERR=/tmp/tmp.pkh9TPgmU4 + local exit_status=0 + local timeout=4 ++ seq 0 2 + for i in '$(seq 0 2)' + set +e + kubectl delete namespace users-18968 --ignore-not-found + exit_status=0 + set -e + '[' 0 '!=' 0 -a -n 1 ']' + break + cat /tmp/tmp.6KB3WtUCam + cat /tmp/tmp.49ZaLgIVhZ + rm /tmp/tmp.6KB3WtUCam /tmp/tmp.49ZaLgIVhZ + return 0 + exit_status=0 + set -e + '[' 0 '!=' 0 -a -n 1 ']' + break + cat /tmp/tmp.9BMxj43S1O + cat /tmp/tmp.pkh9TPgmU4 + rm /tmp/tmp.9BMxj43S1O /tmp/tmp.pkh9TPgmU4 + return 0 + kubectl_bin wait --for=delete namespace users-18968 ++ mktemp + local LAST_OUT=/tmp/tmp.HOgI0yKhTX ++ mktemp + local LAST_ERR=/tmp/tmp.Qnl2Pn1LMN + local exit_status=0 + local timeout=4 ++ seq 0 2 + for i in '$(seq 0 2)' + set +e + kubectl wait --for=delete namespace users-18968 namespace "gke-managed-system" deleted namespace "gmp-public" deleted namespace "gmp-system" deleted + exit_status=0 + set -e + '[' 0 '!=' 0 -a -n 1 ']' + break + cat /tmp/tmp.HOgI0yKhTX + cat /tmp/tmp.Qnl2Pn1LMN + rm /tmp/tmp.HOgI0yKhTX /tmp/tmp.Qnl2Pn1LMN + return 0 + desc 'create namespace users-18968' + set +o xtrace ----------------------------------------------------------------------------------- create namespace users-18968 ----------------------------------------------------------------------------------- + kubectl_bin create namespace users-18968 ++ mktemp + local LAST_OUT=/tmp/tmp.PZdrIIpE3O ++ mktemp + local LAST_ERR=/tmp/tmp.KJOEQ8SzGP + local exit_status=0 + local timeout=4 ++ seq 0 2 + for i in '$(seq 0 2)' + set +e + kubectl create namespace users-18968 + exit_status=0 + set -e + '[' 0 '!=' 0 -a -n 1 ']' + break + cat /tmp/tmp.PZdrIIpE3O namespace/users-18968 created + cat /tmp/tmp.KJOEQ8SzGP + rm /tmp/tmp.PZdrIIpE3O /tmp/tmp.KJOEQ8SzGP + return 0 ++ kubectl_bin config current-context +++ mktemp ++ local LAST_OUT=/tmp/tmp.v8bjcH38au +++ mktemp ++ local LAST_ERR=/tmp/tmp.VlZDbwkV9z ++ local exit_status=0 ++ local timeout=4 +++ seq 0 2 ++ for i in '$(seq 0 2)' ++ set +e ++ kubectl config current-context ++ exit_status=0 ++ set -e ++ '[' 0 '!=' 0 -a -n 1 ']' ++ break ++ cat /tmp/tmp.v8bjcH38au ++ cat /tmp/tmp.VlZDbwkV9z ++ rm /tmp/tmp.v8bjcH38au /tmp/tmp.VlZDbwkV9z ++ return 0 + kubectl_bin config set-context gke_cloud-dev-112233_us-central1-a_jen-psmdb-1735-753d0dfe-52-cluster1 --namespace=users-18968 ++ mktemp + local LAST_OUT=/tmp/tmp.RsmOYQFjKA ++ mktemp + local LAST_ERR=/tmp/tmp.oYs9usTM8R + local exit_status=0 + local timeout=4 ++ seq 0 2 + for i in '$(seq 0 2)' + set +e + kubectl config set-context gke_cloud-dev-112233_us-central1-a_jen-psmdb-1735-753d0dfe-52-cluster1 --namespace=users-18968 + exit_status=0 + set -e + '[' 0 '!=' 0 -a -n 1 ']' + break + cat /tmp/tmp.RsmOYQFjKA Context "gke_cloud-dev-112233_us-central1-a_jen-psmdb-1735-753d0dfe-52-cluster1" modified. + cat /tmp/tmp.oYs9usTM8R + rm /tmp/tmp.RsmOYQFjKA /tmp/tmp.oYs9usTM8R + return 0 + deploy_minio + desc 'install Minio' + set +o xtrace ----------------------------------------------------------------------------------- install Minio ----------------------------------------------------------------------------------- + helm uninstall minio-service Error: uninstall: Release not loaded: minio-service: release: not found + : + helm repo remove minio "minio" has been removed from your repositories + helm repo add minio https://charts.min.io/ "minio" has been added to your repositories + retry 10 60 helm install minio-service --version 5.0.14 --set replicas=1 --set mode=standalone --set resources.requests.memory=256Mi --set rootUser=rootuser --set rootPassword=rootpass123 --set 'users[0].accessKey=some-access-key' --set 'users[0].secretKey=some-secret-key' --set 'users[0].policy=consoleAdmin' --set service.type=ClusterIP --set configPathmc=/tmp/.minio/ --set persistence.size=2G --set securityContext.enabled=false --set 'tolerations[0].key=kubernetes.io/arch' --set 'tolerations[0].operator=Equal' --set 'tolerations[0].value=arm64' --set 'tolerations[0].effect=NoSchedule' --set 'postJob.tolerations[0].key=kubernetes.io/arch' --set 'postJob.tolerations[0].operator=Equal' --set 'postJob.tolerations[0].value=arm64' --set 'postJob.tolerations[0].effect=NoSchedule' minio/minio + local max=10 + local delay=60 + shift 2 + local n=1 + helm install minio-service --version 5.0.14 --set replicas=1 --set mode=standalone --set resources.requests.memory=256Mi --set rootUser=rootuser --set rootPassword=rootpass123 --set 'users[0].accessKey=some-access-key' --set 'users[0].secretKey=some-secret-key' --set 'users[0].policy=consoleAdmin' --set service.type=ClusterIP --set configPathmc=/tmp/.minio/ --set persistence.size=2G --set securityContext.enabled=false --set 'tolerations[0].key=kubernetes.io/arch' --set 'tolerations[0].operator=Equal' --set 'tolerations[0].value=arm64' --set 'tolerations[0].effect=NoSchedule' --set 'postJob.tolerations[0].key=kubernetes.io/arch' --set 'postJob.tolerations[0].operator=Equal' --set 'postJob.tolerations[0].value=arm64' --set 'postJob.tolerations[0].effect=NoSchedule' minio/minio NAME: minio-service LAST DEPLOYED: Mon Jan 13 14:32:16 2025 NAMESPACE: users-18968 STATUS: deployed REVISION: 1 TEST SUITE: None NOTES: MinIO can be accessed via port 9000 on the following DNS name from within your cluster: minio-service.users-18968.svc.cluster.local To access MinIO from localhost, run the below commands: 1. export POD_NAME=$(kubectl get pods --namespace users-18968 -l "release=minio-service" -o jsonpath="{.items[0].metadata.name}") 2. kubectl port-forward $POD_NAME 9000 --namespace users-18968 Read more about port forwarding here: http://kubernetes.io/docs/user-guide/kubectl/kubectl_port-forward/ You can now access MinIO server on http://localhost:9000. Follow the below steps to connect to MinIO server with mc client: 1. Download the MinIO mc client - https://min.io/docs/minio/linux/reference/minio-mc.html#quickstart 2. export MC_HOST_minio-service-local=http://$(kubectl get secret --namespace users-18968 minio-service -o jsonpath="{.data.rootUser}" | base64 --decode):$(kubectl get secret --namespace users-18968 minio-service -o jsonpath="{.data.rootPassword}" | base64 --decode)@localhost:9000 3. mc ls minio-service-local ++ kubectl_bin get pods --selector=release=minio-service -o 'jsonpath={.items[].metadata.name}' +++ mktemp ++ local LAST_OUT=/tmp/tmp.MXF6ko9lDh +++ mktemp ++ local LAST_ERR=/tmp/tmp.fGjB9Vphbf ++ local exit_status=0 ++ local timeout=4 +++ seq 0 2 ++ for i in '$(seq 0 2)' ++ set +e ++ kubectl get pods --selector=release=minio-service -o 'jsonpath={.items[].metadata.name}' ++ exit_status=0 ++ set -e ++ '[' 0 '!=' 0 -a -n 1 ']' ++ break ++ cat /tmp/tmp.MXF6ko9lDh ++ cat /tmp/tmp.fGjB9Vphbf ++ rm /tmp/tmp.MXF6ko9lDh /tmp/tmp.fGjB9Vphbf ++ return 0 + MINIO_POD=minio-service-754d5879cb-l9bg7 + wait_pod minio-service-754d5879cb-l9bg7 + local pod=minio-service-754d5879cb-l9bg7 + set +o xtrace waiting for pod/minio-service-754d5879cb-l9bg7 to be ready.OK + '[' -n psmdb-operator ']' + kubectl_bin create svc -n psmdb-operator externalname minio-service --external-name=minio-service.users-18968.svc.cluster.local --tcp=9000 ++ mktemp + local LAST_OUT=/tmp/tmp.LrOcX7eYuh ++ mktemp + local LAST_ERR=/tmp/tmp.oxkMm0w5bm + local exit_status=0 + local timeout=4 ++ seq 0 2 + for i in '$(seq 0 2)' + set +e + kubectl create svc -n psmdb-operator externalname minio-service --external-name=minio-service.users-18968.svc.cluster.local --tcp=9000 + exit_status=0 + set -e + '[' 0 '!=' 0 -a -n 1 ']' + break + cat /tmp/tmp.LrOcX7eYuh service/minio-service created + cat /tmp/tmp.oxkMm0w5bm + rm /tmp/tmp.LrOcX7eYuh /tmp/tmp.oxkMm0w5bm + return 0 + aws_cli 's3 mb s3://operator-testing' + local 'cmd=s3 mb s3://operator-testing' + kubectl_bin run -i --rm aws-cli --image=amazon/aws-cli --restart=Never --env=AWS_ACCESS_KEY_ID=some-access-key --env=AWS_SECRET_ACCESS_KEY=some-secret-key --env=AWS_DEFAULT_REGION=us-east-1 '--overrides={"apiVersion": "v1","spec": {"tolerations": [{"key": "kubernetes.io/arch","operator": "Equal","value": "arm64","effect": "NoSchedule"}]}}' -- --endpoint-url http://minio-service:9000 s3 mb s3://operator-testing ++ mktemp + local LAST_OUT=/tmp/tmp.kOEXa2HJOd ++ mktemp + local LAST_ERR=/tmp/tmp.Oaof1SjE2L + local exit_status=0 + local timeout=4 ++ seq 0 2 + for i in '$(seq 0 2)' + set +e + kubectl run -i --rm aws-cli --image=amazon/aws-cli --restart=Never --env=AWS_ACCESS_KEY_ID=some-access-key --env=AWS_SECRET_ACCESS_KEY=some-secret-key --env=AWS_DEFAULT_REGION=us-east-1 '--overrides={"apiVersion": "v1","spec": {"tolerations": [{"key": "kubernetes.io/arch","operator": "Equal","value": "arm64","effect": "NoSchedule"}]}}' -- --endpoint-url http://minio-service:9000 s3 mb s3://operator-testing + exit_status=0 + set -e + '[' 0 '!=' 0 -a -n 1 ']' + break + cat /tmp/tmp.kOEXa2HJOd make_bucket: operator-testing pod "aws-cli" deleted + cat /tmp/tmp.Oaof1SjE2L If you don't see a command prompt, try pressing enter. warning: couldn't attach to pod/aws-cli, falling back to streaming logs: Internal error occurred: error attaching to container: container is in CONTAINER_EXITED state + rm /tmp/tmp.kOEXa2HJOd /tmp/tmp.Oaof1SjE2L + return 0 + desc 'create secrets and start client' + set +o xtrace ----------------------------------------------------------------------------------- create secrets and start client ----------------------------------------------------------------------------------- + kubectl_bin apply -f /mnt/jenkins/workspace/cloud-psmdb-operator_PR-1735/e2e-tests/users/conf/secrets.yml ++ mktemp + local LAST_OUT=/tmp/tmp.1Znv2RDDk5 ++ mktemp + local LAST_ERR=/tmp/tmp.h3M5zNuUm7 + local exit_status=0 + local timeout=4 ++ seq 0 2 + for i in '$(seq 0 2)' + set +e + kubectl apply -f /mnt/jenkins/workspace/cloud-psmdb-operator_PR-1735/e2e-tests/users/conf/secrets.yml + exit_status=0 + set -e + '[' 0 '!=' 0 -a -n 1 ']' + break + cat /tmp/tmp.1Znv2RDDk5 secret/some-users created + cat /tmp/tmp.h3M5zNuUm7 + rm /tmp/tmp.1Znv2RDDk5 /tmp/tmp.h3M5zNuUm7 + return 0 + kubectl_bin apply -f /mnt/jenkins/workspace/cloud-psmdb-operator_PR-1735/e2e-tests/conf/minio-secret.yml ++ mktemp + local LAST_OUT=/tmp/tmp.zLimDO4Xst ++ mktemp + local LAST_ERR=/tmp/tmp.78lmPljVbG + local exit_status=0 + local timeout=4 ++ seq 0 2 + for i in '$(seq 0 2)' + set +e + kubectl apply -f /mnt/jenkins/workspace/cloud-psmdb-operator_PR-1735/e2e-tests/conf/minio-secret.yml + exit_status=0 + set -e + '[' 0 '!=' 0 -a -n 1 ']' + break + cat /tmp/tmp.zLimDO4Xst secret/minio-secret created + cat /tmp/tmp.78lmPljVbG + rm /tmp/tmp.zLimDO4Xst /tmp/tmp.78lmPljVbG + return 0 + apply_client /mnt/jenkins/workspace/cloud-psmdb-operator_PR-1735/e2e-tests/conf/client.yml + [[ amd64 == \a\r\m\6\4 ]] + kubectl_bin apply -f /mnt/jenkins/workspace/cloud-psmdb-operator_PR-1735/e2e-tests/conf/client.yml ++ mktemp + local LAST_OUT=/tmp/tmp.xstU5RQc1p ++ mktemp + local LAST_ERR=/tmp/tmp.bsv9vy8cfP + local exit_status=0 + local timeout=4 ++ seq 0 2 + for i in '$(seq 0 2)' + set +e + kubectl apply -f /mnt/jenkins/workspace/cloud-psmdb-operator_PR-1735/e2e-tests/conf/client.yml + exit_status=0 + set -e + '[' 0 '!=' 0 -a -n 1 ']' + break + cat /tmp/tmp.xstU5RQc1p deployment.apps/psmdb-client created + cat /tmp/tmp.bsv9vy8cfP + rm /tmp/tmp.xstU5RQc1p /tmp/tmp.bsv9vy8cfP + return 0 + cluster=some-name-rs0 + desc 'create first PSMDB cluster some-name-rs0' + set +o xtrace ----------------------------------------------------------------------------------- create first PSMDB cluster some-name-rs0 ----------------------------------------------------------------------------------- + apply_cluster /mnt/jenkins/workspace/cloud-psmdb-operator_PR-1735/e2e-tests/users/conf/some-name-rs0.yml + [[ -n '' ]] + cat_config /mnt/jenkins/workspace/cloud-psmdb-operator_PR-1735/e2e-tests/users/conf/some-name-rs0.yml + kubectl_bin apply -f - ++ mktemp ++ mktemp + local temp_cr=/tmp/tmp.BZWnvihSzM + yq eval ' (.spec | select(.image == null)).image = "perconalab/percona-server-mongodb-operator:main-mongod7.0" | (.spec | select(has("pmm"))).pmm.image = "perconalab/pmm-client:dev-latest" | (.spec | select(has("initImage"))).initImage = "perconalab/percona-server-mongodb-operator:PR-1735-753d0dfe" | (.spec | select(has("backup"))).backup.image = "perconalab/percona-server-mongodb-operator:main-backup" | .spec.upgradeOptions.apply="Never"' /mnt/jenkins/workspace/cloud-psmdb-operator_PR-1735/e2e-tests/users/conf/some-name-rs0.yml + local LAST_OUT=/tmp/tmp.JirQNyMyx6 ++ mktemp + local LAST_ERR=/tmp/tmp.El0lsqn48Z + local exit_status=0 + local timeout=4 + [[ amd64 == \a\r\m\6\4 ]] + cat /tmp/tmp.BZWnvihSzM ++ seq 0 2 + for i in '$(seq 0 2)' + set +e + kubectl apply -f - + exit_status=0 + set -e + '[' 0 '!=' 0 -a -n 1 ']' + break + cat /tmp/tmp.JirQNyMyx6 perconaservermongodb.psmdb.percona.com/some-name created + cat /tmp/tmp.El0lsqn48Z + rm /tmp/tmp.JirQNyMyx6 /tmp/tmp.El0lsqn48Z + return 0 + desc 'Check if all 3 Pods started' + set +o xtrace ----------------------------------------------------------------------------------- Check if all 3 Pods started ----------------------------------------------------------------------------------- + wait_for_running some-name-rs0 3 + local name=some-name-rs0 + let last_pod=2 + local check_cluster_readyness=true + set_debug + [[ 1 == 1 ]] + set -o xtrace + local rs_name=rs0 + local cluster_name=some-name ++ seq 0 2 + for i in '$(seq 0 $last_pod)' + [[ 0 -eq 2 ]] + wait_pod some-name-rs0-0 + local pod=some-name-rs0-0 + set +o xtrace waiting for pod/some-name-rs0-0 to be ready.......OK + for i in '$(seq 0 $last_pod)' + [[ 1 -eq 2 ]] + wait_pod some-name-rs0-1 + local pod=some-name-rs0-1 + set +o xtrace waiting for pod/some-name-rs0-1 to be ready.....OK + for i in '$(seq 0 $last_pod)' + [[ 2 -eq 2 ]] ++ kubectl_bin get psmdb some-name -o 'jsonpath={.spec.replsets[?(@.name=="rs0")].arbiter.enabled}' +++ mktemp ++ local LAST_OUT=/tmp/tmp.XZ8KfghLah +++ mktemp ++ local LAST_ERR=/tmp/tmp.EteE0foZPG ++ local exit_status=0 ++ local timeout=4 +++ seq 0 2 ++ for i in '$(seq 0 2)' ++ set +e ++ kubectl get psmdb some-name -o 'jsonpath={.spec.replsets[?(@.name=="rs0")].arbiter.enabled}' ++ exit_status=0 ++ set -e ++ '[' 0 '!=' 0 -a -n 1 ']' ++ break ++ cat /tmp/tmp.XZ8KfghLah ++ cat /tmp/tmp.EteE0foZPG ++ rm /tmp/tmp.XZ8KfghLah /tmp/tmp.EteE0foZPG ++ return 0 + [[ '' == \t\r\u\e ]] + wait_pod some-name-rs0-2 + local pod=some-name-rs0-2 + set +o xtrace waiting for pod/some-name-rs0-2 to be ready.....OK ++ kubectl_bin get psmdb some-name -o 'jsonpath={.spec.replsets[?(@.name=="rs0")].non_voting.enabled}' +++ mktemp ++ local LAST_OUT=/tmp/tmp.dbnwUhGJqp +++ mktemp ++ local LAST_ERR=/tmp/tmp.VqfbO6KB5a ++ local exit_status=0 ++ local timeout=4 +++ seq 0 2 ++ for i in '$(seq 0 2)' ++ set +e ++ kubectl get psmdb some-name -o 'jsonpath={.spec.replsets[?(@.name=="rs0")].non_voting.enabled}' ++ exit_status=0 ++ set -e ++ '[' 0 '!=' 0 -a -n 1 ']' ++ break ++ cat /tmp/tmp.dbnwUhGJqp ++ cat /tmp/tmp.VqfbO6KB5a ++ rm /tmp/tmp.dbnwUhGJqp /tmp/tmp.VqfbO6KB5a ++ return 0 + [[ '' == \t\r\u\e ]] + sleep 10 + [[ true == \t\r\u\e ]] + set +x Waiting for cluster readyness... + backup_name=backup-minio + desc 'change MONGODB_DATABASE_ADMIN_PASSWORD' + set +o xtrace ----------------------------------------------------------------------------------- change MONGODB_DATABASE_ADMIN_PASSWORD ----------------------------------------------------------------------------------- + patch_secret some-users MONGODB_DATABASE_ADMIN_PASSWORD dGVzdC1wYXNzd29yZA== + local secret=some-users + local key=MONGODB_DATABASE_ADMIN_PASSWORD + local value=dGVzdC1wYXNzd29yZA== + kubectl patch secret some-users '-p={"data":{"MONGODB_DATABASE_ADMIN_PASSWORD": "dGVzdC1wYXNzd29yZA=="}}' secret/some-users patched + sleep 25 + psmdb=some-name + wait_cluster_consistency some-name + local cluster_name=some-name + local wait_time=32 + retry=0 + sleep 7 + echo -n 'waiting for cluster readyness' waiting for cluster readyness++ kubectl_bin get psmdb some-name -o 'jsonpath={.status.state}' +++ mktemp ++ local LAST_OUT=/tmp/tmp.UcShpMwscs +++ mktemp ++ local LAST_ERR=/tmp/tmp.TmWEtdZsLM ++ local exit_status=0 ++ local timeout=4 +++ seq 0 2 ++ for i in '$(seq 0 2)' ++ set +e ++ kubectl get psmdb some-name -o 'jsonpath={.status.state}' ++ exit_status=0 ++ set -e ++ '[' 0 '!=' 0 -a -n 1 ']' ++ break ++ cat /tmp/tmp.UcShpMwscs ++ cat /tmp/tmp.TmWEtdZsLM ++ rm /tmp/tmp.UcShpMwscs /tmp/tmp.TmWEtdZsLM ++ return 0 + [[ ready == \r\e\a\d\y ]] + echo + sleep 15 ++ getUserData some-users MONGODB_DATABASE_ADMIN_USER ++ local secretName=some-users ++ local dataKey=MONGODB_DATABASE_ADMIN_USER +++ getSecretData some-users MONGODB_DATABASE_ADMIN_USER +++ local secretName=some-users +++ local dataKey=MONGODB_DATABASE_ADMIN_USER ++++ kubectl get secrets/some-users '--template={{.data.MONGODB_DATABASE_ADMIN_USER}}' ++++ base64 -d +++ local data=databaseAdmin +++ echo databaseAdmin ++ urlencode databaseAdmin ++ uri=databaseAdmin ++ echo -n databaseAdmin ++ jq -s -R -r @uri + user=databaseAdmin + check_mongo_auth databaseAdmin:test-password@some-name-rs0-0.some-name-rs0.users-18968 + local uri=databaseAdmin:test-password@some-name-rs0-0.some-name-rs0.users-18968 ++ run_mongo 'db.runCommand({ ping: 1 }).ok' databaseAdmin:test-password@some-name-rs0-0.some-name-rs0.users-18968 mongodb '' --quiet ++ egrep -v 'I NETWORK|W NETWORK|Error saving history file|Percona Server for MongoDB|connecting to:|Unable to reach primary for set|Implicit session:|versions do not match|Error saving history file:' ++ local 'command=db.runCommand({ ping: 1 }).ok' ++ local uri=databaseAdmin:test-password@some-name-rs0-0.some-name-rs0.users-18968 ++ local driver=mongodb ++ local suffix=.svc.cluster.local +++ kubectl_bin get pods --selector=name=psmdb-client -o 'jsonpath={.items[].metadata.name}' ++++ mktemp +++ local LAST_OUT=/tmp/tmp.xZPWmpajin ++++ mktemp +++ local LAST_ERR=/tmp/tmp.KJPZycaH5t +++ local exit_status=0 +++ local timeout=4 ++++ seq 0 2 +++ for i in '$(seq 0 2)' +++ set +e +++ kubectl get pods --selector=name=psmdb-client -o 'jsonpath={.items[].metadata.name}' +++ exit_status=0 +++ set -e +++ '[' 0 '!=' 0 -a -n 1 ']' +++ break +++ cat /tmp/tmp.xZPWmpajin +++ cat /tmp/tmp.KJPZycaH5t +++ rm /tmp/tmp.xZPWmpajin /tmp/tmp.KJPZycaH5t +++ return 0 ++ local client_container=psmdb-client-667cfd9d66-rpkb4 ++ local mongo_flag=--quiet ++ [[ databaseAdmin:test-password@some-name-rs0-0.some-name-rs0.users-18968 == *cfg* ]] ++ replica_set=rs0 ++ kubectl_bin exec psmdb-client-667cfd9d66-rpkb4 -- bash -c 'printf '\''db.runCommand({ ping: 1 }).ok\n'\'' | mongo mongodb://databaseAdmin:test-password@some-name-rs0-0.some-name-rs0.users-18968.svc.cluster.local/admin?ssl=false\&replicaSet=rs0 --quiet' +++ mktemp ++ local LAST_OUT=/tmp/tmp.qnoyApSUkx +++ mktemp ++ local LAST_ERR=/tmp/tmp.QUTrMhXMCj ++ local exit_status=0 ++ local timeout=4 +++ seq 0 2 ++ for i in '$(seq 0 2)' ++ set +e ++ kubectl exec psmdb-client-667cfd9d66-rpkb4 -- bash -c 'printf '\''db.runCommand({ ping: 1 }).ok\n'\'' | mongo mongodb://databaseAdmin:test-password@some-name-rs0-0.some-name-rs0.users-18968.svc.cluster.local/admin?ssl=false\&replicaSet=rs0 --quiet' ++ exit_status=0 ++ set -e ++ '[' 0 '!=' 0 -a -n 1 ']' ++ break ++ cat /tmp/tmp.qnoyApSUkx ++ cat /tmp/tmp.QUTrMhXMCj ++ rm /tmp/tmp.qnoyApSUkx /tmp/tmp.QUTrMhXMCj ++ return 0 + ping=1 + desc 'ping return' + set +o xtrace ----------------------------------------------------------------------------------- ping return ----------------------------------------------------------------------------------- + '[' 1 '!=' 1 ']' + check_mongo_auth databaseAdmin:test-password@some-name-rs0-1.some-name-rs0.users-18968 + local uri=databaseAdmin:test-password@some-name-rs0-1.some-name-rs0.users-18968 ++ run_mongo 'db.runCommand({ ping: 1 }).ok' databaseAdmin:test-password@some-name-rs0-1.some-name-rs0.users-18968 mongodb '' --quiet ++ local 'command=db.runCommand({ ping: 1 }).ok' ++ local uri=databaseAdmin:test-password@some-name-rs0-1.some-name-rs0.users-18968 ++ local driver=mongodb ++ local suffix=.svc.cluster.local +++ kubectl_bin get pods --selector=name=psmdb-client -o 'jsonpath={.items[].metadata.name}' ++++ mktemp +++ local LAST_OUT=/tmp/tmp.MCYYc8wivN ++ egrep -v 'I NETWORK|W NETWORK|Error saving history file|Percona Server for MongoDB|connecting to:|Unable to reach primary for set|Implicit session:|versions do not match|Error saving history file:' ++++ mktemp +++ local LAST_ERR=/tmp/tmp.S9E54GP1BS +++ local exit_status=0 +++ local timeout=4 ++++ seq 0 2 +++ for i in '$(seq 0 2)' +++ set +e +++ kubectl get pods --selector=name=psmdb-client -o 'jsonpath={.items[].metadata.name}' +++ exit_status=0 +++ set -e +++ '[' 0 '!=' 0 -a -n 1 ']' +++ break +++ cat /tmp/tmp.MCYYc8wivN +++ cat /tmp/tmp.S9E54GP1BS +++ rm /tmp/tmp.MCYYc8wivN /tmp/tmp.S9E54GP1BS +++ return 0 ++ local client_container=psmdb-client-667cfd9d66-rpkb4 ++ local mongo_flag=--quiet ++ [[ databaseAdmin:test-password@some-name-rs0-1.some-name-rs0.users-18968 == *cfg* ]] ++ replica_set=rs0 ++ kubectl_bin exec psmdb-client-667cfd9d66-rpkb4 -- bash -c 'printf '\''db.runCommand({ ping: 1 }).ok\n'\'' | mongo mongodb://databaseAdmin:test-password@some-name-rs0-1.some-name-rs0.users-18968.svc.cluster.local/admin?ssl=false\&replicaSet=rs0 --quiet' +++ mktemp ++ local LAST_OUT=/tmp/tmp.TVrgZIVv4o +++ mktemp ++ local LAST_ERR=/tmp/tmp.GqiZmkOm5C ++ local exit_status=0 ++ local timeout=4 +++ seq 0 2 ++ for i in '$(seq 0 2)' ++ set +e ++ kubectl exec psmdb-client-667cfd9d66-rpkb4 -- bash -c 'printf '\''db.runCommand({ ping: 1 }).ok\n'\'' | mongo mongodb://databaseAdmin:test-password@some-name-rs0-1.some-name-rs0.users-18968.svc.cluster.local/admin?ssl=false\&replicaSet=rs0 --quiet' ++ exit_status=0 ++ set -e ++ '[' 0 '!=' 0 -a -n 1 ']' ++ break ++ cat /tmp/tmp.TVrgZIVv4o ++ cat /tmp/tmp.GqiZmkOm5C ++ rm /tmp/tmp.TVrgZIVv4o /tmp/tmp.GqiZmkOm5C ++ return 0 + ping=1 + desc 'ping return' + set +o xtrace ----------------------------------------------------------------------------------- ping return ----------------------------------------------------------------------------------- + '[' 1 '!=' 1 ']' + check_mongo_auth databaseAdmin:test-password@some-name-rs0-2.some-name-rs0.users-18968 + local uri=databaseAdmin:test-password@some-name-rs0-2.some-name-rs0.users-18968 ++ run_mongo 'db.runCommand({ ping: 1 }).ok' databaseAdmin:test-password@some-name-rs0-2.some-name-rs0.users-18968 mongodb '' --quiet ++ egrep -v 'I NETWORK|W NETWORK|Error saving history file|Percona Server for MongoDB|connecting to:|Unable to reach primary for set|Implicit session:|versions do not match|Error saving history file:' ++ local 'command=db.runCommand({ ping: 1 }).ok' ++ local uri=databaseAdmin:test-password@some-name-rs0-2.some-name-rs0.users-18968 ++ local driver=mongodb ++ local suffix=.svc.cluster.local +++ kubectl_bin get pods --selector=name=psmdb-client -o 'jsonpath={.items[].metadata.name}' ++++ mktemp +++ local LAST_OUT=/tmp/tmp.pRu6eaDyM4 ++++ mktemp +++ local LAST_ERR=/tmp/tmp.Mprv3bTp6O +++ local exit_status=0 +++ local timeout=4 ++++ seq 0 2 +++ for i in '$(seq 0 2)' +++ set +e +++ kubectl get pods --selector=name=psmdb-client -o 'jsonpath={.items[].metadata.name}' +++ exit_status=0 +++ set -e +++ '[' 0 '!=' 0 -a -n 1 ']' +++ break +++ cat /tmp/tmp.pRu6eaDyM4 +++ cat /tmp/tmp.Mprv3bTp6O +++ rm /tmp/tmp.pRu6eaDyM4 /tmp/tmp.Mprv3bTp6O +++ return 0 ++ local client_container=psmdb-client-667cfd9d66-rpkb4 ++ local mongo_flag=--quiet ++ [[ databaseAdmin:test-password@some-name-rs0-2.some-name-rs0.users-18968 == *cfg* ]] ++ replica_set=rs0 ++ kubectl_bin exec psmdb-client-667cfd9d66-rpkb4 -- bash -c 'printf '\''db.runCommand({ ping: 1 }).ok\n'\'' | mongo mongodb://databaseAdmin:test-password@some-name-rs0-2.some-name-rs0.users-18968.svc.cluster.local/admin?ssl=false\&replicaSet=rs0 --quiet' +++ mktemp ++ local LAST_OUT=/tmp/tmp.chD0WsaW15 +++ mktemp ++ local LAST_ERR=/tmp/tmp.T4pnBxZpK3 ++ local exit_status=0 ++ local timeout=4 +++ seq 0 2 ++ for i in '$(seq 0 2)' ++ set +e ++ kubectl exec psmdb-client-667cfd9d66-rpkb4 -- bash -c 'printf '\''db.runCommand({ ping: 1 }).ok\n'\'' | mongo mongodb://databaseAdmin:test-password@some-name-rs0-2.some-name-rs0.users-18968.svc.cluster.local/admin?ssl=false\&replicaSet=rs0 --quiet' ++ exit_status=0 ++ set -e ++ '[' 0 '!=' 0 -a -n 1 ']' ++ break ++ cat /tmp/tmp.chD0WsaW15 ++ cat /tmp/tmp.T4pnBxZpK3 ++ rm /tmp/tmp.chD0WsaW15 /tmp/tmp.T4pnBxZpK3 ++ return 0 + ping=1 + desc 'ping return' + set +o xtrace ----------------------------------------------------------------------------------- ping return ----------------------------------------------------------------------------------- + '[' 1 '!=' 1 ']' + desc 'change MONGODB_BACKUP_PASSWORD' + set +o xtrace ----------------------------------------------------------------------------------- change MONGODB_BACKUP_PASSWORD ----------------------------------------------------------------------------------- + patch_secret some-users MONGODB_BACKUP_PASSWORD dGVzdC1wYXNzd29yZA== + local secret=some-users + local key=MONGODB_BACKUP_PASSWORD + local value=dGVzdC1wYXNzd29yZA== + kubectl patch secret some-users '-p={"data":{"MONGODB_BACKUP_PASSWORD": "dGVzdC1wYXNzd29yZA=="}}' secret/some-users patched + sleep 25 + wait_cluster_consistency some-name + local cluster_name=some-name + local wait_time=32 + retry=0 + sleep 7 + echo -n 'waiting for cluster readyness' waiting for cluster readyness++ kubectl_bin get psmdb some-name -o 'jsonpath={.status.state}' +++ mktemp ++ local LAST_OUT=/tmp/tmp.4lLZMn77Cn +++ mktemp ++ local LAST_ERR=/tmp/tmp.SsNMOp0g2C ++ local exit_status=0 ++ local timeout=4 +++ seq 0 2 ++ for i in '$(seq 0 2)' ++ set +e ++ kubectl get psmdb some-name -o 'jsonpath={.status.state}' ++ exit_status=0 ++ set -e ++ '[' 0 '!=' 0 -a -n 1 ']' ++ break ++ cat /tmp/tmp.4lLZMn77Cn ++ cat /tmp/tmp.SsNMOp0g2C ++ rm /tmp/tmp.4lLZMn77Cn /tmp/tmp.SsNMOp0g2C ++ return 0 + [[ ready == \r\e\a\d\y ]] + echo + sleep 15 ++ getUserData some-users MONGODB_BACKUP_USER ++ local secretName=some-users ++ local dataKey=MONGODB_BACKUP_USER +++ getSecretData some-users MONGODB_BACKUP_USER +++ local secretName=some-users +++ local dataKey=MONGODB_BACKUP_USER ++++ kubectl get secrets/some-users '--template={{.data.MONGODB_BACKUP_USER}}' ++++ base64 -d +++ local 'data=backup$#%' +++ echo 'backup$#%' ++ urlencode 'backup$#%' ++ uri='backup$#%' ++ echo -n 'backup$#%' ++ jq -s -R -r @uri + user=backup%24%23%25 + check_mongo_auth backup%24%23%25:test-password@some-name-rs0-0.some-name-rs0.users-18968 + local uri=backup%24%23%25:test-password@some-name-rs0-0.some-name-rs0.users-18968 ++ run_mongo 'db.runCommand({ ping: 1 }).ok' backup%24%23%25:test-password@some-name-rs0-0.some-name-rs0.users-18968 mongodb '' --quiet ++ local 'command=db.runCommand({ ping: 1 }).ok' ++ egrep -v 'I NETWORK|W NETWORK|Error saving history file|Percona Server for MongoDB|connecting to:|Unable to reach primary for set|Implicit session:|versions do not match|Error saving history file:' ++ local uri=backup%24%23%25:test-password@some-name-rs0-0.some-name-rs0.users-18968 ++ local driver=mongodb ++ local suffix=.svc.cluster.local +++ kubectl_bin get pods --selector=name=psmdb-client -o 'jsonpath={.items[].metadata.name}' ++++ mktemp +++ local LAST_OUT=/tmp/tmp.niGH8flsnr ++++ mktemp +++ local LAST_ERR=/tmp/tmp.SCiBjVwrGv +++ local exit_status=0 +++ local timeout=4 ++++ seq 0 2 +++ for i in '$(seq 0 2)' +++ set +e +++ kubectl get pods --selector=name=psmdb-client -o 'jsonpath={.items[].metadata.name}' +++ exit_status=0 +++ set -e +++ '[' 0 '!=' 0 -a -n 1 ']' +++ break +++ cat /tmp/tmp.niGH8flsnr +++ cat /tmp/tmp.SCiBjVwrGv +++ rm /tmp/tmp.niGH8flsnr /tmp/tmp.SCiBjVwrGv +++ return 0 ++ local client_container=psmdb-client-667cfd9d66-rpkb4 ++ local mongo_flag=--quiet ++ [[ backup%24%23%25:test-password@some-name-rs0-0.some-name-rs0.users-18968 == *cfg* ]] ++ replica_set=rs0 ++ kubectl_bin exec psmdb-client-667cfd9d66-rpkb4 -- bash -c 'printf '\''db.runCommand({ ping: 1 }).ok\n'\'' | mongo mongodb://backup%24%23%25:test-password@some-name-rs0-0.some-name-rs0.users-18968.svc.cluster.local/admin?ssl=false\&replicaSet=rs0 --quiet' +++ mktemp ++ local LAST_OUT=/tmp/tmp.xv1HfaUE8k +++ mktemp ++ local LAST_ERR=/tmp/tmp.WyryT0ul2G ++ local exit_status=0 ++ local timeout=4 +++ seq 0 2 ++ for i in '$(seq 0 2)' ++ set +e ++ kubectl exec psmdb-client-667cfd9d66-rpkb4 -- bash -c 'printf '\''db.runCommand({ ping: 1 }).ok\n'\'' | mongo mongodb://backup%24%23%25:test-password@some-name-rs0-0.some-name-rs0.users-18968.svc.cluster.local/admin?ssl=false\&replicaSet=rs0 --quiet' ++ exit_status=0 ++ set -e ++ '[' 0 '!=' 0 -a -n 1 ']' ++ break ++ cat /tmp/tmp.xv1HfaUE8k ++ cat /tmp/tmp.WyryT0ul2G ++ rm /tmp/tmp.xv1HfaUE8k /tmp/tmp.WyryT0ul2G ++ return 0 + ping=1 + desc 'ping return' + set +o xtrace ----------------------------------------------------------------------------------- ping return ----------------------------------------------------------------------------------- + '[' 1 '!=' 1 ']' + check_mongo_auth backup%24%23%25:test-password@some-name-rs0-1.some-name-rs0.users-18968 + local uri=backup%24%23%25:test-password@some-name-rs0-1.some-name-rs0.users-18968 ++ run_mongo 'db.runCommand({ ping: 1 }).ok' backup%24%23%25:test-password@some-name-rs0-1.some-name-rs0.users-18968 mongodb '' --quiet ++ local 'command=db.runCommand({ ping: 1 }).ok' ++ local uri=backup%24%23%25:test-password@some-name-rs0-1.some-name-rs0.users-18968 ++ local driver=mongodb ++ egrep -v 'I NETWORK|W NETWORK|Error saving history file|Percona Server for MongoDB|connecting to:|Unable to reach primary for set|Implicit session:|versions do not match|Error saving history file:' ++ local suffix=.svc.cluster.local +++ kubectl_bin get pods --selector=name=psmdb-client -o 'jsonpath={.items[].metadata.name}' ++++ mktemp +++ local LAST_OUT=/tmp/tmp.rJvlq0rxv2 ++++ mktemp +++ local LAST_ERR=/tmp/tmp.zKRZtJ4Xbi +++ local exit_status=0 +++ local timeout=4 ++++ seq 0 2 +++ for i in '$(seq 0 2)' +++ set +e +++ kubectl get pods --selector=name=psmdb-client -o 'jsonpath={.items[].metadata.name}' +++ exit_status=0 +++ set -e +++ '[' 0 '!=' 0 -a -n 1 ']' +++ break +++ cat /tmp/tmp.rJvlq0rxv2 +++ cat /tmp/tmp.zKRZtJ4Xbi +++ rm /tmp/tmp.rJvlq0rxv2 /tmp/tmp.zKRZtJ4Xbi +++ return 0 ++ local client_container=psmdb-client-667cfd9d66-rpkb4 ++ local mongo_flag=--quiet ++ [[ backup%24%23%25:test-password@some-name-rs0-1.some-name-rs0.users-18968 == *cfg* ]] ++ replica_set=rs0 ++ kubectl_bin exec psmdb-client-667cfd9d66-rpkb4 -- bash -c 'printf '\''db.runCommand({ ping: 1 }).ok\n'\'' | mongo mongodb://backup%24%23%25:test-password@some-name-rs0-1.some-name-rs0.users-18968.svc.cluster.local/admin?ssl=false\&replicaSet=rs0 --quiet' +++ mktemp ++ local LAST_OUT=/tmp/tmp.z9xfQhDAly +++ mktemp ++ local LAST_ERR=/tmp/tmp.gkcLwT6o14 ++ local exit_status=0 ++ local timeout=4 +++ seq 0 2 ++ for i in '$(seq 0 2)' ++ set +e ++ kubectl exec psmdb-client-667cfd9d66-rpkb4 -- bash -c 'printf '\''db.runCommand({ ping: 1 }).ok\n'\'' | mongo mongodb://backup%24%23%25:test-password@some-name-rs0-1.some-name-rs0.users-18968.svc.cluster.local/admin?ssl=false\&replicaSet=rs0 --quiet' ++ exit_status=0 ++ set -e ++ '[' 0 '!=' 0 -a -n 1 ']' ++ break ++ cat /tmp/tmp.z9xfQhDAly ++ cat /tmp/tmp.gkcLwT6o14 ++ rm /tmp/tmp.z9xfQhDAly /tmp/tmp.gkcLwT6o14 ++ return 0 + ping=1 + desc 'ping return' + set +o xtrace ----------------------------------------------------------------------------------- ping return ----------------------------------------------------------------------------------- + '[' 1 '!=' 1 ']' + check_mongo_auth backup%24%23%25:test-password@some-name-rs0-2.some-name-rs0.users-18968 + local uri=backup%24%23%25:test-password@some-name-rs0-2.some-name-rs0.users-18968 ++ run_mongo 'db.runCommand({ ping: 1 }).ok' backup%24%23%25:test-password@some-name-rs0-2.some-name-rs0.users-18968 mongodb '' --quiet ++ local 'command=db.runCommand({ ping: 1 }).ok' ++ local uri=backup%24%23%25:test-password@some-name-rs0-2.some-name-rs0.users-18968 ++ local driver=mongodb ++ local suffix=.svc.cluster.local ++ egrep -v 'I NETWORK|W NETWORK|Error saving history file|Percona Server for MongoDB|connecting to:|Unable to reach primary for set|Implicit session:|versions do not match|Error saving history file:' +++ kubectl_bin get pods --selector=name=psmdb-client -o 'jsonpath={.items[].metadata.name}' ++++ mktemp +++ local LAST_OUT=/tmp/tmp.JPxeEOFco3 ++++ mktemp +++ local LAST_ERR=/tmp/tmp.69uxR9JwKr +++ local exit_status=0 +++ local timeout=4 ++++ seq 0 2 +++ for i in '$(seq 0 2)' +++ set +e +++ kubectl get pods --selector=name=psmdb-client -o 'jsonpath={.items[].metadata.name}' +++ exit_status=0 +++ set -e +++ '[' 0 '!=' 0 -a -n 1 ']' +++ break +++ cat /tmp/tmp.JPxeEOFco3 +++ cat /tmp/tmp.69uxR9JwKr +++ rm /tmp/tmp.JPxeEOFco3 /tmp/tmp.69uxR9JwKr +++ return 0 ++ local client_container=psmdb-client-667cfd9d66-rpkb4 ++ local mongo_flag=--quiet ++ [[ backup%24%23%25:test-password@some-name-rs0-2.some-name-rs0.users-18968 == *cfg* ]] ++ replica_set=rs0 ++ kubectl_bin exec psmdb-client-667cfd9d66-rpkb4 -- bash -c 'printf '\''db.runCommand({ ping: 1 }).ok\n'\'' | mongo mongodb://backup%24%23%25:test-password@some-name-rs0-2.some-name-rs0.users-18968.svc.cluster.local/admin?ssl=false\&replicaSet=rs0 --quiet' +++ mktemp ++ local LAST_OUT=/tmp/tmp.rzAYAItB3t +++ mktemp ++ local LAST_ERR=/tmp/tmp.mTnkUJAOAe ++ local exit_status=0 ++ local timeout=4 +++ seq 0 2 ++ for i in '$(seq 0 2)' ++ set +e ++ kubectl exec psmdb-client-667cfd9d66-rpkb4 -- bash -c 'printf '\''db.runCommand({ ping: 1 }).ok\n'\'' | mongo mongodb://backup%24%23%25:test-password@some-name-rs0-2.some-name-rs0.users-18968.svc.cluster.local/admin?ssl=false\&replicaSet=rs0 --quiet' ++ exit_status=0 ++ set -e ++ '[' 0 '!=' 0 -a -n 1 ']' ++ break ++ cat /tmp/tmp.rzAYAItB3t ++ cat /tmp/tmp.mTnkUJAOAe ++ rm /tmp/tmp.rzAYAItB3t /tmp/tmp.mTnkUJAOAe ++ return 0 + ping=1 + desc 'ping return' + set +o xtrace ----------------------------------------------------------------------------------- ping return ----------------------------------------------------------------------------------- + '[' 1 '!=' 1 ']' + desc 'change MONGODB_BACKUP_USER' + set +o xtrace ----------------------------------------------------------------------------------- change MONGODB_BACKUP_USER ----------------------------------------------------------------------------------- ++ echo -n backup2 ++ base64 + newnameencrypted=YmFja3VwMg== + patch_secret some-users MONGODB_BACKUP_USER YmFja3VwMg== + local secret=some-users + local key=MONGODB_BACKUP_USER + local value=YmFja3VwMg== + kubectl patch secret some-users '-p={"data":{"MONGODB_BACKUP_USER": "YmFja3VwMg=="}}' secret/some-users patched + sleep 25 + wait_cluster_consistency some-name + local cluster_name=some-name + local wait_time=32 + retry=0 + sleep 7 + echo -n 'waiting for cluster readyness' waiting for cluster readyness++ kubectl_bin get psmdb some-name -o 'jsonpath={.status.state}' +++ mktemp ++ local LAST_OUT=/tmp/tmp.BTv7NDml1s +++ mktemp ++ local LAST_ERR=/tmp/tmp.UjXcJt2GBJ ++ local exit_status=0 ++ local timeout=4 +++ seq 0 2 ++ for i in '$(seq 0 2)' ++ set +e ++ kubectl get psmdb some-name -o 'jsonpath={.status.state}' ++ exit_status=0 ++ set -e ++ '[' 0 '!=' 0 -a -n 1 ']' ++ break ++ cat /tmp/tmp.BTv7NDml1s ++ cat /tmp/tmp.UjXcJt2GBJ ++ rm /tmp/tmp.BTv7NDml1s /tmp/tmp.UjXcJt2GBJ ++ return 0 + [[ ready == \r\e\a\d\y ]] + echo + sleep 15 ++ getUserData some-users MONGODB_BACKUP_USER ++ local secretName=some-users ++ local dataKey=MONGODB_BACKUP_USER +++ getSecretData some-users MONGODB_BACKUP_USER +++ local secretName=some-users +++ local dataKey=MONGODB_BACKUP_USER ++++ kubectl get secrets/some-users '--template={{.data.MONGODB_BACKUP_USER}}' ++++ base64 -d +++ local data=backup2 +++ echo backup2 ++ urlencode backup2 ++ uri=backup2 ++ jq -s -R -r @uri ++ echo -n backup2 + user=backup2 + check_mongo_auth backup2:test-password@some-name-rs0-0.some-name-rs0.users-18968 + local uri=backup2:test-password@some-name-rs0-0.some-name-rs0.users-18968 ++ run_mongo 'db.runCommand({ ping: 1 }).ok' backup2:test-password@some-name-rs0-0.some-name-rs0.users-18968 mongodb '' --quiet ++ local 'command=db.runCommand({ ping: 1 }).ok' ++ local uri=backup2:test-password@some-name-rs0-0.some-name-rs0.users-18968 ++ local driver=mongodb ++ local suffix=.svc.cluster.local ++ egrep -v 'I NETWORK|W NETWORK|Error saving history file|Percona Server for MongoDB|connecting to:|Unable to reach primary for set|Implicit session:|versions do not match|Error saving history file:' +++ kubectl_bin get pods --selector=name=psmdb-client -o 'jsonpath={.items[].metadata.name}' ++++ mktemp +++ local LAST_OUT=/tmp/tmp.dMyUgUWdfl ++++ mktemp +++ local LAST_ERR=/tmp/tmp.cdM4Lygj21 +++ local exit_status=0 +++ local timeout=4 ++++ seq 0 2 +++ for i in '$(seq 0 2)' +++ set +e +++ kubectl get pods --selector=name=psmdb-client -o 'jsonpath={.items[].metadata.name}' +++ exit_status=0 +++ set -e +++ '[' 0 '!=' 0 -a -n 1 ']' +++ break +++ cat /tmp/tmp.dMyUgUWdfl +++ cat /tmp/tmp.cdM4Lygj21 +++ rm /tmp/tmp.dMyUgUWdfl /tmp/tmp.cdM4Lygj21 +++ return 0 ++ local client_container=psmdb-client-667cfd9d66-rpkb4 ++ local mongo_flag=--quiet ++ [[ backup2:test-password@some-name-rs0-0.some-name-rs0.users-18968 == *cfg* ]] ++ replica_set=rs0 ++ kubectl_bin exec psmdb-client-667cfd9d66-rpkb4 -- bash -c 'printf '\''db.runCommand({ ping: 1 }).ok\n'\'' | mongo mongodb://backup2:test-password@some-name-rs0-0.some-name-rs0.users-18968.svc.cluster.local/admin?ssl=false\&replicaSet=rs0 --quiet' +++ mktemp ++ local LAST_OUT=/tmp/tmp.hMCQJ1NMUO +++ mktemp ++ local LAST_ERR=/tmp/tmp.02oZYDojts ++ local exit_status=0 ++ local timeout=4 +++ seq 0 2 ++ for i in '$(seq 0 2)' ++ set +e ++ kubectl exec psmdb-client-667cfd9d66-rpkb4 -- bash -c 'printf '\''db.runCommand({ ping: 1 }).ok\n'\'' | mongo mongodb://backup2:test-password@some-name-rs0-0.some-name-rs0.users-18968.svc.cluster.local/admin?ssl=false\&replicaSet=rs0 --quiet' ++ exit_status=0 ++ set -e ++ '[' 0 '!=' 0 -a -n 1 ']' ++ break ++ cat /tmp/tmp.hMCQJ1NMUO ++ cat /tmp/tmp.02oZYDojts ++ rm /tmp/tmp.hMCQJ1NMUO /tmp/tmp.02oZYDojts ++ return 0 + ping=1 + desc 'ping return' + set +o xtrace ----------------------------------------------------------------------------------- ping return ----------------------------------------------------------------------------------- + '[' 1 '!=' 1 ']' + check_mongo_auth backup2:test-password@some-name-rs0-1.some-name-rs0.users-18968 + local uri=backup2:test-password@some-name-rs0-1.some-name-rs0.users-18968 ++ run_mongo 'db.runCommand({ ping: 1 }).ok' backup2:test-password@some-name-rs0-1.some-name-rs0.users-18968 mongodb '' --quiet ++ local 'command=db.runCommand({ ping: 1 }).ok' ++ local uri=backup2:test-password@some-name-rs0-1.some-name-rs0.users-18968 ++ local driver=mongodb ++ local suffix=.svc.cluster.local ++ egrep -v 'I NETWORK|W NETWORK|Error saving history file|Percona Server for MongoDB|connecting to:|Unable to reach primary for set|Implicit session:|versions do not match|Error saving history file:' +++ kubectl_bin get pods --selector=name=psmdb-client -o 'jsonpath={.items[].metadata.name}' ++++ mktemp +++ local LAST_OUT=/tmp/tmp.Ru7UnwSlo1 ++++ mktemp +++ local LAST_ERR=/tmp/tmp.sXhcFjujGN +++ local exit_status=0 +++ local timeout=4 ++++ seq 0 2 +++ for i in '$(seq 0 2)' +++ set +e +++ kubectl get pods --selector=name=psmdb-client -o 'jsonpath={.items[].metadata.name}' +++ exit_status=0 +++ set -e +++ '[' 0 '!=' 0 -a -n 1 ']' +++ break +++ cat /tmp/tmp.Ru7UnwSlo1 +++ cat /tmp/tmp.sXhcFjujGN +++ rm /tmp/tmp.Ru7UnwSlo1 /tmp/tmp.sXhcFjujGN +++ return 0 ++ local client_container=psmdb-client-667cfd9d66-rpkb4 ++ local mongo_flag=--quiet ++ [[ backup2:test-password@some-name-rs0-1.some-name-rs0.users-18968 == *cfg* ]] ++ replica_set=rs0 ++ kubectl_bin exec psmdb-client-667cfd9d66-rpkb4 -- bash -c 'printf '\''db.runCommand({ ping: 1 }).ok\n'\'' | mongo mongodb://backup2:test-password@some-name-rs0-1.some-name-rs0.users-18968.svc.cluster.local/admin?ssl=false\&replicaSet=rs0 --quiet' +++ mktemp ++ local LAST_OUT=/tmp/tmp.Xl3EtB1MAc +++ mktemp ++ local LAST_ERR=/tmp/tmp.NiOHTTUcGk ++ local exit_status=0 ++ local timeout=4 +++ seq 0 2 ++ for i in '$(seq 0 2)' ++ set +e ++ kubectl exec psmdb-client-667cfd9d66-rpkb4 -- bash -c 'printf '\''db.runCommand({ ping: 1 }).ok\n'\'' | mongo mongodb://backup2:test-password@some-name-rs0-1.some-name-rs0.users-18968.svc.cluster.local/admin?ssl=false\&replicaSet=rs0 --quiet' ++ exit_status=0 ++ set -e ++ '[' 0 '!=' 0 -a -n 1 ']' ++ break ++ cat /tmp/tmp.Xl3EtB1MAc ++ cat /tmp/tmp.NiOHTTUcGk ++ rm /tmp/tmp.Xl3EtB1MAc /tmp/tmp.NiOHTTUcGk ++ return 0 + ping=1 + desc 'ping return' + set +o xtrace ----------------------------------------------------------------------------------- ping return ----------------------------------------------------------------------------------- + '[' 1 '!=' 1 ']' + check_mongo_auth backup2:test-password@some-name-rs0-2.some-name-rs0.users-18968 + local uri=backup2:test-password@some-name-rs0-2.some-name-rs0.users-18968 ++ run_mongo 'db.runCommand({ ping: 1 }).ok' backup2:test-password@some-name-rs0-2.some-name-rs0.users-18968 mongodb '' --quiet ++ local 'command=db.runCommand({ ping: 1 }).ok' ++ local uri=backup2:test-password@some-name-rs0-2.some-name-rs0.users-18968 ++ local driver=mongodb ++ local suffix=.svc.cluster.local ++ egrep -v 'I NETWORK|W NETWORK|Error saving history file|Percona Server for MongoDB|connecting to:|Unable to reach primary for set|Implicit session:|versions do not match|Error saving history file:' +++ kubectl_bin get pods --selector=name=psmdb-client -o 'jsonpath={.items[].metadata.name}' ++++ mktemp +++ local LAST_OUT=/tmp/tmp.VDHnoApX0W ++++ mktemp +++ local LAST_ERR=/tmp/tmp.hbvpRcLULR +++ local exit_status=0 +++ local timeout=4 ++++ seq 0 2 +++ for i in '$(seq 0 2)' +++ set +e +++ kubectl get pods --selector=name=psmdb-client -o 'jsonpath={.items[].metadata.name}' +++ exit_status=0 +++ set -e +++ '[' 0 '!=' 0 -a -n 1 ']' +++ break +++ cat /tmp/tmp.VDHnoApX0W +++ cat /tmp/tmp.hbvpRcLULR +++ rm /tmp/tmp.VDHnoApX0W /tmp/tmp.hbvpRcLULR +++ return 0 ++ local client_container=psmdb-client-667cfd9d66-rpkb4 ++ local mongo_flag=--quiet ++ [[ backup2:test-password@some-name-rs0-2.some-name-rs0.users-18968 == *cfg* ]] ++ replica_set=rs0 ++ kubectl_bin exec psmdb-client-667cfd9d66-rpkb4 -- bash -c 'printf '\''db.runCommand({ ping: 1 }).ok\n'\'' | mongo mongodb://backup2:test-password@some-name-rs0-2.some-name-rs0.users-18968.svc.cluster.local/admin?ssl=false\&replicaSet=rs0 --quiet' +++ mktemp ++ local LAST_OUT=/tmp/tmp.oghdv7QWRM +++ mktemp ++ local LAST_ERR=/tmp/tmp.KQ1t12Z71l ++ local exit_status=0 ++ local timeout=4 +++ seq 0 2 ++ for i in '$(seq 0 2)' ++ set +e ++ kubectl exec psmdb-client-667cfd9d66-rpkb4 -- bash -c 'printf '\''db.runCommand({ ping: 1 }).ok\n'\'' | mongo mongodb://backup2:test-password@some-name-rs0-2.some-name-rs0.users-18968.svc.cluster.local/admin?ssl=false\&replicaSet=rs0 --quiet' ++ exit_status=0 ++ set -e ++ '[' 0 '!=' 0 -a -n 1 ']' ++ break ++ cat /tmp/tmp.oghdv7QWRM ++ cat /tmp/tmp.KQ1t12Z71l ++ rm /tmp/tmp.oghdv7QWRM /tmp/tmp.KQ1t12Z71l ++ return 0 + ping=1 + desc 'ping return' + set +o xtrace ----------------------------------------------------------------------------------- ping return ----------------------------------------------------------------------------------- + '[' 1 '!=' 1 ']' + desc 'run backup' + set +o xtrace ----------------------------------------------------------------------------------- run backup ----------------------------------------------------------------------------------- + run_backup minio + local storage=minio + local backup_name=backup-minio + local type=logical + desc 'run backup backup-minio' + set +o xtrace ----------------------------------------------------------------------------------- run backup backup-minio ----------------------------------------------------------------------------------- + kubectl_bin apply -f - + yq eval '.metadata.name = "backup-minio" | .spec.storageName = "minio" | .spec.type = "logical"' /mnt/jenkins/workspace/cloud-psmdb-operator_PR-1735/e2e-tests/users/conf/backup-minio.yml ++ mktemp + local LAST_OUT=/tmp/tmp.vBSyGf8t2S ++ mktemp + local LAST_ERR=/tmp/tmp.Wdaw0llOjz + local exit_status=0 + local timeout=4 ++ seq 0 2 + for i in '$(seq 0 2)' + set +e + kubectl apply -f - + exit_status=0 + set -e + '[' 0 '!=' 0 -a -n 1 ']' + break + cat /tmp/tmp.vBSyGf8t2S perconaservermongodbbackup.psmdb.percona.com/backup-minio created + cat /tmp/tmp.Wdaw0llOjz + rm /tmp/tmp.vBSyGf8t2S /tmp/tmp.Wdaw0llOjz + return 0 + wait_backup backup-minio + local backup_name=backup-minio + local target_state=ready + set +o xtrace waiting for backup-minio to reach ready state................. + desc 'change MONGODB_USER_ADMIN_PASSWORD' + set +o xtrace ----------------------------------------------------------------------------------- change MONGODB_USER_ADMIN_PASSWORD ----------------------------------------------------------------------------------- + patch_secret some-users MONGODB_USER_ADMIN_PASSWORD dGVzdC1wYXNzd29yZA== + local secret=some-users + local key=MONGODB_USER_ADMIN_PASSWORD + local value=dGVzdC1wYXNzd29yZA== + kubectl patch secret some-users '-p={"data":{"MONGODB_USER_ADMIN_PASSWORD": "dGVzdC1wYXNzd29yZA=="}}' secret/some-users patched + sleep 25 + wait_cluster_consistency some-name + local cluster_name=some-name + local wait_time=32 + retry=0 + sleep 7 + echo -n 'waiting for cluster readyness' waiting for cluster readyness++ kubectl_bin get psmdb some-name -o 'jsonpath={.status.state}' +++ mktemp ++ local LAST_OUT=/tmp/tmp.E3KG94Rycy +++ mktemp ++ local LAST_ERR=/tmp/tmp.A5JG0HlG8g ++ local exit_status=0 ++ local timeout=4 +++ seq 0 2 ++ for i in '$(seq 0 2)' ++ set +e ++ kubectl get psmdb some-name -o 'jsonpath={.status.state}' ++ exit_status=0 ++ set -e ++ '[' 0 '!=' 0 -a -n 1 ']' ++ break ++ cat /tmp/tmp.E3KG94Rycy ++ cat /tmp/tmp.A5JG0HlG8g ++ rm /tmp/tmp.E3KG94Rycy /tmp/tmp.A5JG0HlG8g ++ return 0 + [[ ready == \r\e\a\d\y ]] + echo + sleep 15 ++ getUserData some-users MONGODB_USER_ADMIN_USER ++ local secretName=some-users ++ local dataKey=MONGODB_USER_ADMIN_USER +++ getSecretData some-users MONGODB_USER_ADMIN_USER +++ local secretName=some-users +++ local dataKey=MONGODB_USER_ADMIN_USER ++++ kubectl get secrets/some-users '--template={{.data.MONGODB_USER_ADMIN_USER}}' ++++ base64 -d +++ local data=userAdmin +++ echo userAdmin ++ urlencode userAdmin ++ uri=userAdmin ++ jq -s -R -r @uri ++ echo -n userAdmin + user=userAdmin + check_mongo_auth userAdmin:test-password@some-name-rs0-0.some-name-rs0.users-18968 + local uri=userAdmin:test-password@some-name-rs0-0.some-name-rs0.users-18968 ++ run_mongo 'db.runCommand({ ping: 1 }).ok' userAdmin:test-password@some-name-rs0-0.some-name-rs0.users-18968 mongodb '' --quiet ++ local 'command=db.runCommand({ ping: 1 }).ok' ++ local uri=userAdmin:test-password@some-name-rs0-0.some-name-rs0.users-18968 ++ local driver=mongodb ++ local suffix=.svc.cluster.local ++ egrep -v 'I NETWORK|W NETWORK|Error saving history file|Percona Server for MongoDB|connecting to:|Unable to reach primary for set|Implicit session:|versions do not match|Error saving history file:' +++ kubectl_bin get pods --selector=name=psmdb-client -o 'jsonpath={.items[].metadata.name}' ++++ mktemp +++ local LAST_OUT=/tmp/tmp.h294COGbFI ++++ mktemp +++ local LAST_ERR=/tmp/tmp.R4t1plNHAC +++ local exit_status=0 +++ local timeout=4 ++++ seq 0 2 +++ for i in '$(seq 0 2)' +++ set +e +++ kubectl get pods --selector=name=psmdb-client -o 'jsonpath={.items[].metadata.name}' +++ exit_status=0 +++ set -e +++ '[' 0 '!=' 0 -a -n 1 ']' +++ break +++ cat /tmp/tmp.h294COGbFI +++ cat /tmp/tmp.R4t1plNHAC +++ rm /tmp/tmp.h294COGbFI /tmp/tmp.R4t1plNHAC +++ return 0 ++ local client_container=psmdb-client-667cfd9d66-rpkb4 ++ local mongo_flag=--quiet ++ [[ userAdmin:test-password@some-name-rs0-0.some-name-rs0.users-18968 == *cfg* ]] ++ replica_set=rs0 ++ kubectl_bin exec psmdb-client-667cfd9d66-rpkb4 -- bash -c 'printf '\''db.runCommand({ ping: 1 }).ok\n'\'' | mongo mongodb://userAdmin:test-password@some-name-rs0-0.some-name-rs0.users-18968.svc.cluster.local/admin?ssl=false\&replicaSet=rs0 --quiet' +++ mktemp ++ local LAST_OUT=/tmp/tmp.GIDXCcApLU +++ mktemp ++ local LAST_ERR=/tmp/tmp.yBAf7ZtOIM ++ local exit_status=0 ++ local timeout=4 +++ seq 0 2 ++ for i in '$(seq 0 2)' ++ set +e ++ kubectl exec psmdb-client-667cfd9d66-rpkb4 -- bash -c 'printf '\''db.runCommand({ ping: 1 }).ok\n'\'' | mongo mongodb://userAdmin:test-password@some-name-rs0-0.some-name-rs0.users-18968.svc.cluster.local/admin?ssl=false\&replicaSet=rs0 --quiet' ++ exit_status=0 ++ set -e ++ '[' 0 '!=' 0 -a -n 1 ']' ++ break ++ cat /tmp/tmp.GIDXCcApLU ++ cat /tmp/tmp.yBAf7ZtOIM ++ rm /tmp/tmp.GIDXCcApLU /tmp/tmp.yBAf7ZtOIM ++ return 0 + ping=1 + desc 'ping return' + set +o xtrace ----------------------------------------------------------------------------------- ping return ----------------------------------------------------------------------------------- + '[' 1 '!=' 1 ']' + check_mongo_auth userAdmin:test-password@some-name-rs0-1.some-name-rs0.users-18968 + local uri=userAdmin:test-password@some-name-rs0-1.some-name-rs0.users-18968 ++ run_mongo 'db.runCommand({ ping: 1 }).ok' userAdmin:test-password@some-name-rs0-1.some-name-rs0.users-18968 mongodb '' --quiet ++ egrep -v 'I NETWORK|W NETWORK|Error saving history file|Percona Server for MongoDB|connecting to:|Unable to reach primary for set|Implicit session:|versions do not match|Error saving history file:' ++ local 'command=db.runCommand({ ping: 1 }).ok' ++ local uri=userAdmin:test-password@some-name-rs0-1.some-name-rs0.users-18968 ++ local driver=mongodb ++ local suffix=.svc.cluster.local +++ kubectl_bin get pods --selector=name=psmdb-client -o 'jsonpath={.items[].metadata.name}' ++++ mktemp +++ local LAST_OUT=/tmp/tmp.NRetLA078N ++++ mktemp +++ local LAST_ERR=/tmp/tmp.iXaCWqjVbP +++ local exit_status=0 +++ local timeout=4 ++++ seq 0 2 +++ for i in '$(seq 0 2)' +++ set +e +++ kubectl get pods --selector=name=psmdb-client -o 'jsonpath={.items[].metadata.name}' +++ exit_status=0 +++ set -e +++ '[' 0 '!=' 0 -a -n 1 ']' +++ break +++ cat /tmp/tmp.NRetLA078N +++ cat /tmp/tmp.iXaCWqjVbP +++ rm /tmp/tmp.NRetLA078N /tmp/tmp.iXaCWqjVbP +++ return 0 ++ local client_container=psmdb-client-667cfd9d66-rpkb4 ++ local mongo_flag=--quiet ++ [[ userAdmin:test-password@some-name-rs0-1.some-name-rs0.users-18968 == *cfg* ]] ++ replica_set=rs0 ++ kubectl_bin exec psmdb-client-667cfd9d66-rpkb4 -- bash -c 'printf '\''db.runCommand({ ping: 1 }).ok\n'\'' | mongo mongodb://userAdmin:test-password@some-name-rs0-1.some-name-rs0.users-18968.svc.cluster.local/admin?ssl=false\&replicaSet=rs0 --quiet' +++ mktemp ++ local LAST_OUT=/tmp/tmp.VNFedNkFFy +++ mktemp ++ local LAST_ERR=/tmp/tmp.3amwclFPOf ++ local exit_status=0 ++ local timeout=4 +++ seq 0 2 ++ for i in '$(seq 0 2)' ++ set +e ++ kubectl exec psmdb-client-667cfd9d66-rpkb4 -- bash -c 'printf '\''db.runCommand({ ping: 1 }).ok\n'\'' | mongo mongodb://userAdmin:test-password@some-name-rs0-1.some-name-rs0.users-18968.svc.cluster.local/admin?ssl=false\&replicaSet=rs0 --quiet' ++ exit_status=0 ++ set -e ++ '[' 0 '!=' 0 -a -n 1 ']' ++ break ++ cat /tmp/tmp.VNFedNkFFy ++ cat /tmp/tmp.3amwclFPOf ++ rm /tmp/tmp.VNFedNkFFy /tmp/tmp.3amwclFPOf ++ return 0 + ping=1 + desc 'ping return' + set +o xtrace ----------------------------------------------------------------------------------- ping return ----------------------------------------------------------------------------------- + '[' 1 '!=' 1 ']' + check_mongo_auth userAdmin:test-password@some-name-rs0-2.some-name-rs0.users-18968 + local uri=userAdmin:test-password@some-name-rs0-2.some-name-rs0.users-18968 ++ run_mongo 'db.runCommand({ ping: 1 }).ok' userAdmin:test-password@some-name-rs0-2.some-name-rs0.users-18968 mongodb '' --quiet ++ local 'command=db.runCommand({ ping: 1 }).ok' ++ local uri=userAdmin:test-password@some-name-rs0-2.some-name-rs0.users-18968 ++ local driver=mongodb ++ local suffix=.svc.cluster.local ++ egrep -v 'I NETWORK|W NETWORK|Error saving history file|Percona Server for MongoDB|connecting to:|Unable to reach primary for set|Implicit session:|versions do not match|Error saving history file:' +++ kubectl_bin get pods --selector=name=psmdb-client -o 'jsonpath={.items[].metadata.name}' ++++ mktemp +++ local LAST_OUT=/tmp/tmp.A1tnJnAHFT ++++ mktemp +++ local LAST_ERR=/tmp/tmp.KuxrWXr8SO +++ local exit_status=0 +++ local timeout=4 ++++ seq 0 2 +++ for i in '$(seq 0 2)' +++ set +e +++ kubectl get pods --selector=name=psmdb-client -o 'jsonpath={.items[].metadata.name}' +++ exit_status=0 +++ set -e +++ '[' 0 '!=' 0 -a -n 1 ']' +++ break +++ cat /tmp/tmp.A1tnJnAHFT +++ cat /tmp/tmp.KuxrWXr8SO +++ rm /tmp/tmp.A1tnJnAHFT /tmp/tmp.KuxrWXr8SO +++ return 0 ++ local client_container=psmdb-client-667cfd9d66-rpkb4 ++ local mongo_flag=--quiet ++ [[ userAdmin:test-password@some-name-rs0-2.some-name-rs0.users-18968 == *cfg* ]] ++ replica_set=rs0 ++ kubectl_bin exec psmdb-client-667cfd9d66-rpkb4 -- bash -c 'printf '\''db.runCommand({ ping: 1 }).ok\n'\'' | mongo mongodb://userAdmin:test-password@some-name-rs0-2.some-name-rs0.users-18968.svc.cluster.local/admin?ssl=false\&replicaSet=rs0 --quiet' +++ mktemp ++ local LAST_OUT=/tmp/tmp.nJu8OYbAfj +++ mktemp ++ local LAST_ERR=/tmp/tmp.QGfbyzDhxM ++ local exit_status=0 ++ local timeout=4 +++ seq 0 2 ++ for i in '$(seq 0 2)' ++ set +e ++ kubectl exec psmdb-client-667cfd9d66-rpkb4 -- bash -c 'printf '\''db.runCommand({ ping: 1 }).ok\n'\'' | mongo mongodb://userAdmin:test-password@some-name-rs0-2.some-name-rs0.users-18968.svc.cluster.local/admin?ssl=false\&replicaSet=rs0 --quiet' ++ exit_status=0 ++ set -e ++ '[' 0 '!=' 0 -a -n 1 ']' ++ break ++ cat /tmp/tmp.nJu8OYbAfj ++ cat /tmp/tmp.QGfbyzDhxM ++ rm /tmp/tmp.nJu8OYbAfj /tmp/tmp.QGfbyzDhxM ++ return 0 + ping=1 + desc 'ping return' + set +o xtrace ----------------------------------------------------------------------------------- ping return ----------------------------------------------------------------------------------- + '[' 1 '!=' 1 ']' + desc 'change MONGODB_USER_ADMIN_USER' + set +o xtrace ----------------------------------------------------------------------------------- change MONGODB_USER_ADMIN_USER ----------------------------------------------------------------------------------- ++ echo -n userAdmin2 ++ base64 + newnameencrypted=dXNlckFkbWluMg== + patch_secret some-users MONGODB_USER_ADMIN_USER dXNlckFkbWluMg== + local secret=some-users + local key=MONGODB_USER_ADMIN_USER + local value=dXNlckFkbWluMg== + kubectl patch secret some-users '-p={"data":{"MONGODB_USER_ADMIN_USER": "dXNlckFkbWluMg=="}}' secret/some-users patched + sleep 25 + wait_cluster_consistency some-name + local cluster_name=some-name + local wait_time=32 + retry=0 + sleep 7 + echo -n 'waiting for cluster readyness' waiting for cluster readyness++ kubectl_bin get psmdb some-name -o 'jsonpath={.status.state}' +++ mktemp ++ local LAST_OUT=/tmp/tmp.h6vegQGKoz +++ mktemp ++ local LAST_ERR=/tmp/tmp.ZWLjWgOqy1 ++ local exit_status=0 ++ local timeout=4 +++ seq 0 2 ++ for i in '$(seq 0 2)' ++ set +e ++ kubectl get psmdb some-name -o 'jsonpath={.status.state}' ++ exit_status=0 ++ set -e ++ '[' 0 '!=' 0 -a -n 1 ']' ++ break ++ cat /tmp/tmp.h6vegQGKoz ++ cat /tmp/tmp.ZWLjWgOqy1 ++ rm /tmp/tmp.h6vegQGKoz /tmp/tmp.ZWLjWgOqy1 ++ return 0 + [[ ready == \r\e\a\d\y ]] + echo + sleep 15 ++ getUserData some-users MONGODB_USER_ADMIN_USER ++ local secretName=some-users ++ local dataKey=MONGODB_USER_ADMIN_USER +++ getSecretData some-users MONGODB_USER_ADMIN_USER +++ local secretName=some-users +++ local dataKey=MONGODB_USER_ADMIN_USER ++++ kubectl get secrets/some-users '--template={{.data.MONGODB_USER_ADMIN_USER}}' ++++ base64 -d +++ local data=userAdmin2 +++ echo userAdmin2 ++ urlencode userAdmin2 ++ uri=userAdmin2 ++ echo -n userAdmin2 ++ jq -s -R -r @uri + user=userAdmin2 + check_mongo_auth userAdmin2:test-password@some-name-rs0-0.some-name-rs0.users-18968 + local uri=userAdmin2:test-password@some-name-rs0-0.some-name-rs0.users-18968 ++ run_mongo 'db.runCommand({ ping: 1 }).ok' userAdmin2:test-password@some-name-rs0-0.some-name-rs0.users-18968 mongodb '' --quiet ++ local 'command=db.runCommand({ ping: 1 }).ok' ++ local uri=userAdmin2:test-password@some-name-rs0-0.some-name-rs0.users-18968 ++ local driver=mongodb ++ local suffix=.svc.cluster.local ++ egrep -v 'I NETWORK|W NETWORK|Error saving history file|Percona Server for MongoDB|connecting to:|Unable to reach primary for set|Implicit session:|versions do not match|Error saving history file:' +++ kubectl_bin get pods --selector=name=psmdb-client -o 'jsonpath={.items[].metadata.name}' ++++ mktemp +++ local LAST_OUT=/tmp/tmp.yIMQEGUFAi ++++ mktemp +++ local LAST_ERR=/tmp/tmp.6LSRtchNa9 +++ local exit_status=0 +++ local timeout=4 ++++ seq 0 2 +++ for i in '$(seq 0 2)' +++ set +e +++ kubectl get pods --selector=name=psmdb-client -o 'jsonpath={.items[].metadata.name}' +++ exit_status=0 +++ set -e +++ '[' 0 '!=' 0 -a -n 1 ']' +++ break +++ cat /tmp/tmp.yIMQEGUFAi +++ cat /tmp/tmp.6LSRtchNa9 +++ rm /tmp/tmp.yIMQEGUFAi /tmp/tmp.6LSRtchNa9 +++ return 0 ++ local client_container=psmdb-client-667cfd9d66-rpkb4 ++ local mongo_flag=--quiet ++ [[ userAdmin2:test-password@some-name-rs0-0.some-name-rs0.users-18968 == *cfg* ]] ++ replica_set=rs0 ++ kubectl_bin exec psmdb-client-667cfd9d66-rpkb4 -- bash -c 'printf '\''db.runCommand({ ping: 1 }).ok\n'\'' | mongo mongodb://userAdmin2:test-password@some-name-rs0-0.some-name-rs0.users-18968.svc.cluster.local/admin?ssl=false\&replicaSet=rs0 --quiet' +++ mktemp ++ local LAST_OUT=/tmp/tmp.LBxTObd45A +++ mktemp ++ local LAST_ERR=/tmp/tmp.4CXmob7r6s ++ local exit_status=0 ++ local timeout=4 +++ seq 0 2 ++ for i in '$(seq 0 2)' ++ set +e ++ kubectl exec psmdb-client-667cfd9d66-rpkb4 -- bash -c 'printf '\''db.runCommand({ ping: 1 }).ok\n'\'' | mongo mongodb://userAdmin2:test-password@some-name-rs0-0.some-name-rs0.users-18968.svc.cluster.local/admin?ssl=false\&replicaSet=rs0 --quiet' ++ exit_status=0 ++ set -e ++ '[' 0 '!=' 0 -a -n 1 ']' ++ break ++ cat /tmp/tmp.LBxTObd45A ++ cat /tmp/tmp.4CXmob7r6s ++ rm /tmp/tmp.LBxTObd45A /tmp/tmp.4CXmob7r6s ++ return 0 + ping=1 + desc 'ping return' + set +o xtrace ----------------------------------------------------------------------------------- ping return ----------------------------------------------------------------------------------- + '[' 1 '!=' 1 ']' + check_mongo_auth userAdmin2:test-password@some-name-rs0-1.some-name-rs0.users-18968 + local uri=userAdmin2:test-password@some-name-rs0-1.some-name-rs0.users-18968 ++ run_mongo 'db.runCommand({ ping: 1 }).ok' userAdmin2:test-password@some-name-rs0-1.some-name-rs0.users-18968 mongodb '' --quiet ++ local 'command=db.runCommand({ ping: 1 }).ok' ++ local uri=userAdmin2:test-password@some-name-rs0-1.some-name-rs0.users-18968 ++ local driver=mongodb ++ local suffix=.svc.cluster.local ++ egrep -v 'I NETWORK|W NETWORK|Error saving history file|Percona Server for MongoDB|connecting to:|Unable to reach primary for set|Implicit session:|versions do not match|Error saving history file:' +++ kubectl_bin get pods --selector=name=psmdb-client -o 'jsonpath={.items[].metadata.name}' ++++ mktemp +++ local LAST_OUT=/tmp/tmp.1DMyvcAszM ++++ mktemp +++ local LAST_ERR=/tmp/tmp.ZSkahRdg53 +++ local exit_status=0 +++ local timeout=4 ++++ seq 0 2 +++ for i in '$(seq 0 2)' +++ set +e +++ kubectl get pods --selector=name=psmdb-client -o 'jsonpath={.items[].metadata.name}' +++ exit_status=0 +++ set -e +++ '[' 0 '!=' 0 -a -n 1 ']' +++ break +++ cat /tmp/tmp.1DMyvcAszM +++ cat /tmp/tmp.ZSkahRdg53 +++ rm /tmp/tmp.1DMyvcAszM /tmp/tmp.ZSkahRdg53 +++ return 0 ++ local client_container=psmdb-client-667cfd9d66-rpkb4 ++ local mongo_flag=--quiet ++ [[ userAdmin2:test-password@some-name-rs0-1.some-name-rs0.users-18968 == *cfg* ]] ++ replica_set=rs0 ++ kubectl_bin exec psmdb-client-667cfd9d66-rpkb4 -- bash -c 'printf '\''db.runCommand({ ping: 1 }).ok\n'\'' | mongo mongodb://userAdmin2:test-password@some-name-rs0-1.some-name-rs0.users-18968.svc.cluster.local/admin?ssl=false\&replicaSet=rs0 --quiet' +++ mktemp ++ local LAST_OUT=/tmp/tmp.8e1HBkjoXZ +++ mktemp ++ local LAST_ERR=/tmp/tmp.nwXrOFBIAZ ++ local exit_status=0 ++ local timeout=4 +++ seq 0 2 ++ for i in '$(seq 0 2)' ++ set +e ++ kubectl exec psmdb-client-667cfd9d66-rpkb4 -- bash -c 'printf '\''db.runCommand({ ping: 1 }).ok\n'\'' | mongo mongodb://userAdmin2:test-password@some-name-rs0-1.some-name-rs0.users-18968.svc.cluster.local/admin?ssl=false\&replicaSet=rs0 --quiet' ++ exit_status=0 ++ set -e ++ '[' 0 '!=' 0 -a -n 1 ']' ++ break ++ cat /tmp/tmp.8e1HBkjoXZ ++ cat /tmp/tmp.nwXrOFBIAZ ++ rm /tmp/tmp.8e1HBkjoXZ /tmp/tmp.nwXrOFBIAZ ++ return 0 + ping=1 + desc 'ping return' + set +o xtrace ----------------------------------------------------------------------------------- ping return ----------------------------------------------------------------------------------- + '[' 1 '!=' 1 ']' + check_mongo_auth userAdmin2:test-password@some-name-rs0-2.some-name-rs0.users-18968 + local uri=userAdmin2:test-password@some-name-rs0-2.some-name-rs0.users-18968 ++ egrep -v 'I NETWORK|W NETWORK|Error saving history file|Percona Server for MongoDB|connecting to:|Unable to reach primary for set|Implicit session:|versions do not match|Error saving history file:' ++ run_mongo 'db.runCommand({ ping: 1 }).ok' userAdmin2:test-password@some-name-rs0-2.some-name-rs0.users-18968 mongodb '' --quiet ++ local 'command=db.runCommand({ ping: 1 }).ok' ++ local uri=userAdmin2:test-password@some-name-rs0-2.some-name-rs0.users-18968 ++ local driver=mongodb ++ local suffix=.svc.cluster.local +++ kubectl_bin get pods --selector=name=psmdb-client -o 'jsonpath={.items[].metadata.name}' ++++ mktemp +++ local LAST_OUT=/tmp/tmp.75OulRtDqh ++++ mktemp +++ local LAST_ERR=/tmp/tmp.S21sydRStL +++ local exit_status=0 +++ local timeout=4 ++++ seq 0 2 +++ for i in '$(seq 0 2)' +++ set +e +++ kubectl get pods --selector=name=psmdb-client -o 'jsonpath={.items[].metadata.name}' +++ exit_status=0 +++ set -e +++ '[' 0 '!=' 0 -a -n 1 ']' +++ break +++ cat /tmp/tmp.75OulRtDqh +++ cat /tmp/tmp.S21sydRStL +++ rm /tmp/tmp.75OulRtDqh /tmp/tmp.S21sydRStL +++ return 0 ++ local client_container=psmdb-client-667cfd9d66-rpkb4 ++ local mongo_flag=--quiet ++ [[ userAdmin2:test-password@some-name-rs0-2.some-name-rs0.users-18968 == *cfg* ]] ++ replica_set=rs0 ++ kubectl_bin exec psmdb-client-667cfd9d66-rpkb4 -- bash -c 'printf '\''db.runCommand({ ping: 1 }).ok\n'\'' | mongo mongodb://userAdmin2:test-password@some-name-rs0-2.some-name-rs0.users-18968.svc.cluster.local/admin?ssl=false\&replicaSet=rs0 --quiet' +++ mktemp ++ local LAST_OUT=/tmp/tmp.QpjkOHO13q +++ mktemp ++ local LAST_ERR=/tmp/tmp.HAC1K7GUzl ++ local exit_status=0 ++ local timeout=4 +++ seq 0 2 ++ for i in '$(seq 0 2)' ++ set +e ++ kubectl exec psmdb-client-667cfd9d66-rpkb4 -- bash -c 'printf '\''db.runCommand({ ping: 1 }).ok\n'\'' | mongo mongodb://userAdmin2:test-password@some-name-rs0-2.some-name-rs0.users-18968.svc.cluster.local/admin?ssl=false\&replicaSet=rs0 --quiet' ++ exit_status=0 ++ set -e ++ '[' 0 '!=' 0 -a -n 1 ']' ++ break ++ cat /tmp/tmp.QpjkOHO13q ++ cat /tmp/tmp.HAC1K7GUzl ++ rm /tmp/tmp.QpjkOHO13q /tmp/tmp.HAC1K7GUzl ++ return 0 + ping=1 + desc 'ping return' + set +o xtrace ----------------------------------------------------------------------------------- ping return ----------------------------------------------------------------------------------- + '[' 1 '!=' 1 ']' + desc 'change MONGODB_CLUSTER_ADMIN_PASSWORD' + set +o xtrace ----------------------------------------------------------------------------------- change MONGODB_CLUSTER_ADMIN_PASSWORD ----------------------------------------------------------------------------------- + patch_secret some-users MONGODB_CLUSTER_ADMIN_PASSWORD dGVzdC1wYXNzd29yZA== + local secret=some-users + local key=MONGODB_CLUSTER_ADMIN_PASSWORD + local value=dGVzdC1wYXNzd29yZA== + kubectl patch secret some-users '-p={"data":{"MONGODB_CLUSTER_ADMIN_PASSWORD": "dGVzdC1wYXNzd29yZA=="}}' secret/some-users patched + sleep 25 + wait_cluster_consistency some-name + local cluster_name=some-name + local wait_time=32 + retry=0 + sleep 7 + echo -n 'waiting for cluster readyness' waiting for cluster readyness++ kubectl_bin get psmdb some-name -o 'jsonpath={.status.state}' +++ mktemp ++ local LAST_OUT=/tmp/tmp.0Hvab6IGCX +++ mktemp ++ local LAST_ERR=/tmp/tmp.5gx49tCl9A ++ local exit_status=0 ++ local timeout=4 +++ seq 0 2 ++ for i in '$(seq 0 2)' ++ set +e ++ kubectl get psmdb some-name -o 'jsonpath={.status.state}' ++ exit_status=0 ++ set -e ++ '[' 0 '!=' 0 -a -n 1 ']' ++ break ++ cat /tmp/tmp.0Hvab6IGCX ++ cat /tmp/tmp.5gx49tCl9A ++ rm /tmp/tmp.0Hvab6IGCX /tmp/tmp.5gx49tCl9A ++ return 0 + [[ ready == \r\e\a\d\y ]] + echo + sleep 15 ++ getUserData some-users MONGODB_CLUSTER_ADMIN_USER ++ local secretName=some-users ++ local dataKey=MONGODB_CLUSTER_ADMIN_USER +++ getSecretData some-users MONGODB_CLUSTER_ADMIN_USER +++ local secretName=some-users +++ local dataKey=MONGODB_CLUSTER_ADMIN_USER ++++ kubectl get secrets/some-users '--template={{.data.MONGODB_CLUSTER_ADMIN_USER}}' ++++ base64 -d +++ local data=clusterAdmin +++ echo clusterAdmin ++ urlencode clusterAdmin ++ uri=clusterAdmin ++ echo -n clusterAdmin ++ jq -s -R -r @uri + user=clusterAdmin + check_mongo_auth clusterAdmin:test-password@some-name-rs0-0.some-name-rs0.users-18968 + local uri=clusterAdmin:test-password@some-name-rs0-0.some-name-rs0.users-18968 ++ run_mongo 'db.runCommand({ ping: 1 }).ok' clusterAdmin:test-password@some-name-rs0-0.some-name-rs0.users-18968 mongodb '' --quiet ++ local 'command=db.runCommand({ ping: 1 }).ok' ++ local uri=clusterAdmin:test-password@some-name-rs0-0.some-name-rs0.users-18968 ++ local driver=mongodb ++ local suffix=.svc.cluster.local ++ egrep -v 'I NETWORK|W NETWORK|Error saving history file|Percona Server for MongoDB|connecting to:|Unable to reach primary for set|Implicit session:|versions do not match|Error saving history file:' +++ kubectl_bin get pods --selector=name=psmdb-client -o 'jsonpath={.items[].metadata.name}' ++++ mktemp +++ local LAST_OUT=/tmp/tmp.PeyS6cdMUJ ++++ mktemp +++ local LAST_ERR=/tmp/tmp.gsoZIdNLCk +++ local exit_status=0 +++ local timeout=4 ++++ seq 0 2 +++ for i in '$(seq 0 2)' +++ set +e +++ kubectl get pods --selector=name=psmdb-client -o 'jsonpath={.items[].metadata.name}' +++ exit_status=0 +++ set -e +++ '[' 0 '!=' 0 -a -n 1 ']' +++ break +++ cat /tmp/tmp.PeyS6cdMUJ +++ cat /tmp/tmp.gsoZIdNLCk +++ rm /tmp/tmp.PeyS6cdMUJ /tmp/tmp.gsoZIdNLCk +++ return 0 ++ local client_container=psmdb-client-667cfd9d66-rpkb4 ++ local mongo_flag=--quiet ++ [[ clusterAdmin:test-password@some-name-rs0-0.some-name-rs0.users-18968 == *cfg* ]] ++ replica_set=rs0 ++ kubectl_bin exec psmdb-client-667cfd9d66-rpkb4 -- bash -c 'printf '\''db.runCommand({ ping: 1 }).ok\n'\'' | mongo mongodb://clusterAdmin:test-password@some-name-rs0-0.some-name-rs0.users-18968.svc.cluster.local/admin?ssl=false\&replicaSet=rs0 --quiet' +++ mktemp ++ local LAST_OUT=/tmp/tmp.G036uT7z6l +++ mktemp ++ local LAST_ERR=/tmp/tmp.Y3mamdOVLQ ++ local exit_status=0 ++ local timeout=4 +++ seq 0 2 ++ for i in '$(seq 0 2)' ++ set +e ++ kubectl exec psmdb-client-667cfd9d66-rpkb4 -- bash -c 'printf '\''db.runCommand({ ping: 1 }).ok\n'\'' | mongo mongodb://clusterAdmin:test-password@some-name-rs0-0.some-name-rs0.users-18968.svc.cluster.local/admin?ssl=false\&replicaSet=rs0 --quiet' ++ exit_status=0 ++ set -e ++ '[' 0 '!=' 0 -a -n 1 ']' ++ break ++ cat /tmp/tmp.G036uT7z6l ++ cat /tmp/tmp.Y3mamdOVLQ ++ rm /tmp/tmp.G036uT7z6l /tmp/tmp.Y3mamdOVLQ ++ return 0 + ping=1 + desc 'ping return' + set +o xtrace ----------------------------------------------------------------------------------- ping return ----------------------------------------------------------------------------------- + '[' 1 '!=' 1 ']' + check_mongo_auth clusterAdmin:test-password@some-name-rs0-1.some-name-rs0.users-18968 + local uri=clusterAdmin:test-password@some-name-rs0-1.some-name-rs0.users-18968 ++ run_mongo 'db.runCommand({ ping: 1 }).ok' clusterAdmin:test-password@some-name-rs0-1.some-name-rs0.users-18968 mongodb '' --quiet ++ local 'command=db.runCommand({ ping: 1 }).ok' ++ local uri=clusterAdmin:test-password@some-name-rs0-1.some-name-rs0.users-18968 ++ local driver=mongodb ++ egrep -v 'I NETWORK|W NETWORK|Error saving history file|Percona Server for MongoDB|connecting to:|Unable to reach primary for set|Implicit session:|versions do not match|Error saving history file:' ++ local suffix=.svc.cluster.local +++ kubectl_bin get pods --selector=name=psmdb-client -o 'jsonpath={.items[].metadata.name}' ++++ mktemp +++ local LAST_OUT=/tmp/tmp.I1vbGqwe9c ++++ mktemp +++ local LAST_ERR=/tmp/tmp.KpDEB5rwV9 +++ local exit_status=0 +++ local timeout=4 ++++ seq 0 2 +++ for i in '$(seq 0 2)' +++ set +e +++ kubectl get pods --selector=name=psmdb-client -o 'jsonpath={.items[].metadata.name}' +++ exit_status=0 +++ set -e +++ '[' 0 '!=' 0 -a -n 1 ']' +++ break +++ cat /tmp/tmp.I1vbGqwe9c +++ cat /tmp/tmp.KpDEB5rwV9 +++ rm /tmp/tmp.I1vbGqwe9c /tmp/tmp.KpDEB5rwV9 +++ return 0 ++ local client_container=psmdb-client-667cfd9d66-rpkb4 ++ local mongo_flag=--quiet ++ [[ clusterAdmin:test-password@some-name-rs0-1.some-name-rs0.users-18968 == *cfg* ]] ++ replica_set=rs0 ++ kubectl_bin exec psmdb-client-667cfd9d66-rpkb4 -- bash -c 'printf '\''db.runCommand({ ping: 1 }).ok\n'\'' | mongo mongodb://clusterAdmin:test-password@some-name-rs0-1.some-name-rs0.users-18968.svc.cluster.local/admin?ssl=false\&replicaSet=rs0 --quiet' +++ mktemp ++ local LAST_OUT=/tmp/tmp.js6uQ4NQTO +++ mktemp ++ local LAST_ERR=/tmp/tmp.T32cypFW3c ++ local exit_status=0 ++ local timeout=4 +++ seq 0 2 ++ for i in '$(seq 0 2)' ++ set +e ++ kubectl exec psmdb-client-667cfd9d66-rpkb4 -- bash -c 'printf '\''db.runCommand({ ping: 1 }).ok\n'\'' | mongo mongodb://clusterAdmin:test-password@some-name-rs0-1.some-name-rs0.users-18968.svc.cluster.local/admin?ssl=false\&replicaSet=rs0 --quiet' ++ exit_status=0 ++ set -e ++ '[' 0 '!=' 0 -a -n 1 ']' ++ break ++ cat /tmp/tmp.js6uQ4NQTO ++ cat /tmp/tmp.T32cypFW3c ++ rm /tmp/tmp.js6uQ4NQTO /tmp/tmp.T32cypFW3c ++ return 0 + ping=1 + desc 'ping return' + set +o xtrace ----------------------------------------------------------------------------------- ping return ----------------------------------------------------------------------------------- + '[' 1 '!=' 1 ']' + check_mongo_auth clusterAdmin:test-password@some-name-rs0-2.some-name-rs0.users-18968 + local uri=clusterAdmin:test-password@some-name-rs0-2.some-name-rs0.users-18968 ++ run_mongo 'db.runCommand({ ping: 1 }).ok' clusterAdmin:test-password@some-name-rs0-2.some-name-rs0.users-18968 mongodb '' --quiet ++ local 'command=db.runCommand({ ping: 1 }).ok' ++ local uri=clusterAdmin:test-password@some-name-rs0-2.some-name-rs0.users-18968 ++ local driver=mongodb ++ local suffix=.svc.cluster.local ++ egrep -v 'I NETWORK|W NETWORK|Error saving history file|Percona Server for MongoDB|connecting to:|Unable to reach primary for set|Implicit session:|versions do not match|Error saving history file:' +++ kubectl_bin get pods --selector=name=psmdb-client -o 'jsonpath={.items[].metadata.name}' ++++ mktemp +++ local LAST_OUT=/tmp/tmp.eYTIB5Z9WK ++++ mktemp +++ local LAST_ERR=/tmp/tmp.rRZGO891G7 +++ local exit_status=0 +++ local timeout=4 ++++ seq 0 2 +++ for i in '$(seq 0 2)' +++ set +e +++ kubectl get pods --selector=name=psmdb-client -o 'jsonpath={.items[].metadata.name}' +++ exit_status=0 +++ set -e +++ '[' 0 '!=' 0 -a -n 1 ']' +++ break +++ cat /tmp/tmp.eYTIB5Z9WK +++ cat /tmp/tmp.rRZGO891G7 +++ rm /tmp/tmp.eYTIB5Z9WK /tmp/tmp.rRZGO891G7 +++ return 0 ++ local client_container=psmdb-client-667cfd9d66-rpkb4 ++ local mongo_flag=--quiet ++ [[ clusterAdmin:test-password@some-name-rs0-2.some-name-rs0.users-18968 == *cfg* ]] ++ replica_set=rs0 ++ kubectl_bin exec psmdb-client-667cfd9d66-rpkb4 -- bash -c 'printf '\''db.runCommand({ ping: 1 }).ok\n'\'' | mongo mongodb://clusterAdmin:test-password@some-name-rs0-2.some-name-rs0.users-18968.svc.cluster.local/admin?ssl=false\&replicaSet=rs0 --quiet' +++ mktemp ++ local LAST_OUT=/tmp/tmp.Xgwvz7P7hV +++ mktemp ++ local LAST_ERR=/tmp/tmp.z1YY6ZgSfc ++ local exit_status=0 ++ local timeout=4 +++ seq 0 2 ++ for i in '$(seq 0 2)' ++ set +e ++ kubectl exec psmdb-client-667cfd9d66-rpkb4 -- bash -c 'printf '\''db.runCommand({ ping: 1 }).ok\n'\'' | mongo mongodb://clusterAdmin:test-password@some-name-rs0-2.some-name-rs0.users-18968.svc.cluster.local/admin?ssl=false\&replicaSet=rs0 --quiet' ++ exit_status=0 ++ set -e ++ '[' 0 '!=' 0 -a -n 1 ']' ++ break ++ cat /tmp/tmp.Xgwvz7P7hV ++ cat /tmp/tmp.z1YY6ZgSfc ++ rm /tmp/tmp.Xgwvz7P7hV /tmp/tmp.z1YY6ZgSfc ++ return 0 + ping=1 + desc 'ping return' + set +o xtrace ----------------------------------------------------------------------------------- ping return ----------------------------------------------------------------------------------- + '[' 1 '!=' 1 ']' + desc 'change MONGODB_CLUSTER_MONITOR_PASSWORD' + set +o xtrace ----------------------------------------------------------------------------------- change MONGODB_CLUSTER_MONITOR_PASSWORD ----------------------------------------------------------------------------------- + patch_secret some-users MONGODB_CLUSTER_MONITOR_PASSWORD dGVzdC1wYXNzd29yZA== + local secret=some-users + local key=MONGODB_CLUSTER_MONITOR_PASSWORD + local value=dGVzdC1wYXNzd29yZA== + kubectl patch secret some-users '-p={"data":{"MONGODB_CLUSTER_MONITOR_PASSWORD": "dGVzdC1wYXNzd29yZA=="}}' secret/some-users patched + sleep 25 + wait_cluster_consistency some-name + local cluster_name=some-name + local wait_time=32 + retry=0 + sleep 7 + echo -n 'waiting for cluster readyness' waiting for cluster readyness++ kubectl_bin get psmdb some-name -o 'jsonpath={.status.state}' +++ mktemp ++ local LAST_OUT=/tmp/tmp.UxYsOQSmla +++ mktemp ++ local LAST_ERR=/tmp/tmp.vPyJSFzkTd ++ local exit_status=0 ++ local timeout=4 +++ seq 0 2 ++ for i in '$(seq 0 2)' ++ set +e ++ kubectl get psmdb some-name -o 'jsonpath={.status.state}' ++ exit_status=0 ++ set -e ++ '[' 0 '!=' 0 -a -n 1 ']' ++ break ++ cat /tmp/tmp.UxYsOQSmla ++ cat /tmp/tmp.vPyJSFzkTd ++ rm /tmp/tmp.UxYsOQSmla /tmp/tmp.vPyJSFzkTd ++ return 0 + [[ ready == \r\e\a\d\y ]] + echo + sleep 15 ++ getUserData some-users MONGODB_CLUSTER_MONITOR_USER ++ local secretName=some-users ++ local dataKey=MONGODB_CLUSTER_MONITOR_USER +++ getSecretData some-users MONGODB_CLUSTER_MONITOR_USER +++ local secretName=some-users +++ local dataKey=MONGODB_CLUSTER_MONITOR_USER ++++ kubectl get secrets/some-users '--template={{.data.MONGODB_CLUSTER_MONITOR_USER}}' ++++ base64 -d +++ local data=clusterMonitor +++ echo clusterMonitor ++ urlencode clusterMonitor ++ uri=clusterMonitor ++ echo -n clusterMonitor ++ jq -s -R -r @uri + user=clusterMonitor + check_mongo_auth clusterMonitor:test-password@some-name-rs0-0.some-name-rs0.users-18968 + local uri=clusterMonitor:test-password@some-name-rs0-0.some-name-rs0.users-18968 ++ run_mongo 'db.runCommand({ ping: 1 }).ok' clusterMonitor:test-password@some-name-rs0-0.some-name-rs0.users-18968 mongodb '' --quiet ++ local 'command=db.runCommand({ ping: 1 }).ok' ++ local uri=clusterMonitor:test-password@some-name-rs0-0.some-name-rs0.users-18968 ++ local driver=mongodb ++ local suffix=.svc.cluster.local +++ kubectl_bin get pods --selector=name=psmdb-client -o 'jsonpath={.items[].metadata.name}' ++++ mktemp +++ local LAST_OUT=/tmp/tmp.7d5u617r63 ++++ mktemp +++ local LAST_ERR=/tmp/tmp.IqrSjQa8lX +++ local exit_status=0 +++ local timeout=4 ++++ seq 0 2 +++ for i in '$(seq 0 2)' +++ set +e +++ kubectl get pods --selector=name=psmdb-client -o 'jsonpath={.items[].metadata.name}' ++ egrep -v 'I NETWORK|W NETWORK|Error saving history file|Percona Server for MongoDB|connecting to:|Unable to reach primary for set|Implicit session:|versions do not match|Error saving history file:' +++ exit_status=0 +++ set -e +++ '[' 0 '!=' 0 -a -n 1 ']' +++ break +++ cat /tmp/tmp.7d5u617r63 +++ cat /tmp/tmp.IqrSjQa8lX +++ rm /tmp/tmp.7d5u617r63 /tmp/tmp.IqrSjQa8lX +++ return 0 ++ local client_container=psmdb-client-667cfd9d66-rpkb4 ++ local mongo_flag=--quiet ++ [[ clusterMonitor:test-password@some-name-rs0-0.some-name-rs0.users-18968 == *cfg* ]] ++ replica_set=rs0 ++ kubectl_bin exec psmdb-client-667cfd9d66-rpkb4 -- bash -c 'printf '\''db.runCommand({ ping: 1 }).ok\n'\'' | mongo mongodb://clusterMonitor:test-password@some-name-rs0-0.some-name-rs0.users-18968.svc.cluster.local/admin?ssl=false\&replicaSet=rs0 --quiet' +++ mktemp ++ local LAST_OUT=/tmp/tmp.CzwHzWWkN0 +++ mktemp ++ local LAST_ERR=/tmp/tmp.DpaQ0VqoP9 ++ local exit_status=0 ++ local timeout=4 +++ seq 0 2 ++ for i in '$(seq 0 2)' ++ set +e ++ kubectl exec psmdb-client-667cfd9d66-rpkb4 -- bash -c 'printf '\''db.runCommand({ ping: 1 }).ok\n'\'' | mongo mongodb://clusterMonitor:test-password@some-name-rs0-0.some-name-rs0.users-18968.svc.cluster.local/admin?ssl=false\&replicaSet=rs0 --quiet' ++ exit_status=0 ++ set -e ++ '[' 0 '!=' 0 -a -n 1 ']' ++ break ++ cat /tmp/tmp.CzwHzWWkN0 ++ cat /tmp/tmp.DpaQ0VqoP9 ++ rm /tmp/tmp.CzwHzWWkN0 /tmp/tmp.DpaQ0VqoP9 ++ return 0 + ping=1 + desc 'ping return' + set +o xtrace ----------------------------------------------------------------------------------- ping return ----------------------------------------------------------------------------------- + '[' 1 '!=' 1 ']' + check_mongo_auth clusterMonitor:test-password@some-name-rs0-1.some-name-rs0.users-18968 + local uri=clusterMonitor:test-password@some-name-rs0-1.some-name-rs0.users-18968 ++ run_mongo 'db.runCommand({ ping: 1 }).ok' clusterMonitor:test-password@some-name-rs0-1.some-name-rs0.users-18968 mongodb '' --quiet ++ local 'command=db.runCommand({ ping: 1 }).ok' ++ local uri=clusterMonitor:test-password@some-name-rs0-1.some-name-rs0.users-18968 ++ local driver=mongodb ++ local suffix=.svc.cluster.local ++ egrep -v 'I NETWORK|W NETWORK|Error saving history file|Percona Server for MongoDB|connecting to:|Unable to reach primary for set|Implicit session:|versions do not match|Error saving history file:' +++ kubectl_bin get pods --selector=name=psmdb-client -o 'jsonpath={.items[].metadata.name}' ++++ mktemp +++ local LAST_OUT=/tmp/tmp.iCVmm6Irdl ++++ mktemp +++ local LAST_ERR=/tmp/tmp.ShnKko1Yxw +++ local exit_status=0 +++ local timeout=4 ++++ seq 0 2 +++ for i in '$(seq 0 2)' +++ set +e +++ kubectl get pods --selector=name=psmdb-client -o 'jsonpath={.items[].metadata.name}' +++ exit_status=0 +++ set -e +++ '[' 0 '!=' 0 -a -n 1 ']' +++ break +++ cat /tmp/tmp.iCVmm6Irdl +++ cat /tmp/tmp.ShnKko1Yxw +++ rm /tmp/tmp.iCVmm6Irdl /tmp/tmp.ShnKko1Yxw +++ return 0 ++ local client_container=psmdb-client-667cfd9d66-rpkb4 ++ local mongo_flag=--quiet ++ [[ clusterMonitor:test-password@some-name-rs0-1.some-name-rs0.users-18968 == *cfg* ]] ++ replica_set=rs0 ++ kubectl_bin exec psmdb-client-667cfd9d66-rpkb4 -- bash -c 'printf '\''db.runCommand({ ping: 1 }).ok\n'\'' | mongo mongodb://clusterMonitor:test-password@some-name-rs0-1.some-name-rs0.users-18968.svc.cluster.local/admin?ssl=false\&replicaSet=rs0 --quiet' +++ mktemp ++ local LAST_OUT=/tmp/tmp.Uv2UDJayLV +++ mktemp ++ local LAST_ERR=/tmp/tmp.DaGJFVYGk8 ++ local exit_status=0 ++ local timeout=4 +++ seq 0 2 ++ for i in '$(seq 0 2)' ++ set +e ++ kubectl exec psmdb-client-667cfd9d66-rpkb4 -- bash -c 'printf '\''db.runCommand({ ping: 1 }).ok\n'\'' | mongo mongodb://clusterMonitor:test-password@some-name-rs0-1.some-name-rs0.users-18968.svc.cluster.local/admin?ssl=false\&replicaSet=rs0 --quiet' ++ exit_status=0 ++ set -e ++ '[' 0 '!=' 0 -a -n 1 ']' ++ break ++ cat /tmp/tmp.Uv2UDJayLV ++ cat /tmp/tmp.DaGJFVYGk8 ++ rm /tmp/tmp.Uv2UDJayLV /tmp/tmp.DaGJFVYGk8 ++ return 0 + ping=1 + desc 'ping return' + set +o xtrace ----------------------------------------------------------------------------------- ping return ----------------------------------------------------------------------------------- + '[' 1 '!=' 1 ']' + check_mongo_auth clusterMonitor:test-password@some-name-rs0-2.some-name-rs0.users-18968 + local uri=clusterMonitor:test-password@some-name-rs0-2.some-name-rs0.users-18968 ++ run_mongo 'db.runCommand({ ping: 1 }).ok' clusterMonitor:test-password@some-name-rs0-2.some-name-rs0.users-18968 mongodb '' --quiet ++ local 'command=db.runCommand({ ping: 1 }).ok' ++ local uri=clusterMonitor:test-password@some-name-rs0-2.some-name-rs0.users-18968 ++ local driver=mongodb ++ local suffix=.svc.cluster.local +++ kubectl_bin get pods --selector=name=psmdb-client -o 'jsonpath={.items[].metadata.name}' ++++ mktemp ++ egrep -v 'I NETWORK|W NETWORK|Error saving history file|Percona Server for MongoDB|connecting to:|Unable to reach primary for set|Implicit session:|versions do not match|Error saving history file:' +++ local LAST_OUT=/tmp/tmp.eeYOi4hkzR ++++ mktemp +++ local LAST_ERR=/tmp/tmp.4LI757L6M6 +++ local exit_status=0 +++ local timeout=4 ++++ seq 0 2 +++ for i in '$(seq 0 2)' +++ set +e +++ kubectl get pods --selector=name=psmdb-client -o 'jsonpath={.items[].metadata.name}' +++ exit_status=0 +++ set -e +++ '[' 0 '!=' 0 -a -n 1 ']' +++ break +++ cat /tmp/tmp.eeYOi4hkzR +++ cat /tmp/tmp.4LI757L6M6 +++ rm /tmp/tmp.eeYOi4hkzR /tmp/tmp.4LI757L6M6 +++ return 0 ++ local client_container=psmdb-client-667cfd9d66-rpkb4 ++ local mongo_flag=--quiet ++ [[ clusterMonitor:test-password@some-name-rs0-2.some-name-rs0.users-18968 == *cfg* ]] ++ replica_set=rs0 ++ kubectl_bin exec psmdb-client-667cfd9d66-rpkb4 -- bash -c 'printf '\''db.runCommand({ ping: 1 }).ok\n'\'' | mongo mongodb://clusterMonitor:test-password@some-name-rs0-2.some-name-rs0.users-18968.svc.cluster.local/admin?ssl=false\&replicaSet=rs0 --quiet' +++ mktemp ++ local LAST_OUT=/tmp/tmp.pUM7OxqMqC +++ mktemp ++ local LAST_ERR=/tmp/tmp.yrzJBtcTKN ++ local exit_status=0 ++ local timeout=4 +++ seq 0 2 ++ for i in '$(seq 0 2)' ++ set +e ++ kubectl exec psmdb-client-667cfd9d66-rpkb4 -- bash -c 'printf '\''db.runCommand({ ping: 1 }).ok\n'\'' | mongo mongodb://clusterMonitor:test-password@some-name-rs0-2.some-name-rs0.users-18968.svc.cluster.local/admin?ssl=false\&replicaSet=rs0 --quiet' ++ exit_status=0 ++ set -e ++ '[' 0 '!=' 0 -a -n 1 ']' ++ break ++ cat /tmp/tmp.pUM7OxqMqC ++ cat /tmp/tmp.yrzJBtcTKN ++ rm /tmp/tmp.pUM7OxqMqC /tmp/tmp.yrzJBtcTKN ++ return 0 + ping=1 + desc 'ping return' + set +o xtrace ----------------------------------------------------------------------------------- ping return ----------------------------------------------------------------------------------- + '[' 1 '!=' 1 ']' + desc 'remove users secret' + set +o xtrace ----------------------------------------------------------------------------------- remove users secret ----------------------------------------------------------------------------------- + kubectl_bin delete secret some-users ++ mktemp + local LAST_OUT=/tmp/tmp.dM04lt8G2r ++ mktemp + local LAST_ERR=/tmp/tmp.e3jMrRFT1Z + local exit_status=0 + local timeout=4 ++ seq 0 2 + for i in '$(seq 0 2)' + set +e + kubectl delete secret some-users + exit_status=0 + set -e + '[' 0 '!=' 0 -a -n 1 ']' + break + cat /tmp/tmp.dM04lt8G2r secret "some-users" deleted + cat /tmp/tmp.e3jMrRFT1Z + rm /tmp/tmp.dM04lt8G2r /tmp/tmp.e3jMrRFT1Z + return 0 + sleep 35 + wait_cluster_consistency some-name + local cluster_name=some-name + local wait_time=32 + retry=0 + sleep 7 + echo -n 'waiting for cluster readyness' waiting for cluster readyness++ kubectl_bin get psmdb some-name -o 'jsonpath={.status.state}' +++ mktemp ++ local LAST_OUT=/tmp/tmp.C5iHvk4K8w +++ mktemp ++ local LAST_ERR=/tmp/tmp.dKgiN39vSJ ++ local exit_status=0 ++ local timeout=4 +++ seq 0 2 ++ for i in '$(seq 0 2)' ++ set +e ++ kubectl get psmdb some-name -o 'jsonpath={.status.state}' ++ exit_status=0 ++ set -e ++ '[' 0 '!=' 0 -a -n 1 ']' ++ break ++ cat /tmp/tmp.C5iHvk4K8w ++ cat /tmp/tmp.dKgiN39vSJ ++ rm /tmp/tmp.C5iHvk4K8w /tmp/tmp.dKgiN39vSJ ++ return 0 + [[ ready == \r\e\a\d\y ]] + echo + sleep 15 ++ getUserData some-users MONGODB_USER_ADMIN_USER ++ local secretName=some-users ++ local dataKey=MONGODB_USER_ADMIN_USER +++ getSecretData some-users MONGODB_USER_ADMIN_USER +++ local secretName=some-users +++ local dataKey=MONGODB_USER_ADMIN_USER ++++ kubectl get secrets/some-users '--template={{.data.MONGODB_USER_ADMIN_USER}}' ++++ base64 -d +++ local data=userAdmin +++ echo userAdmin ++ urlencode userAdmin ++ uri=userAdmin ++ jq -s -R -r @uri ++ echo -n userAdmin + user=userAdmin ++ getUserData some-users MONGODB_USER_ADMIN_PASSWORD ++ local secretName=some-users ++ local dataKey=MONGODB_USER_ADMIN_PASSWORD +++ getSecretData some-users MONGODB_USER_ADMIN_PASSWORD +++ local secretName=some-users +++ local dataKey=MONGODB_USER_ADMIN_PASSWORD ++++ kubectl get secrets/some-users '--template={{.data.MONGODB_USER_ADMIN_PASSWORD}}' ++++ base64 -d +++ local data=O34UgMn9Etxv0IY8 +++ echo O34UgMn9Etxv0IY8 ++ urlencode O34UgMn9Etxv0IY8 ++ uri=O34UgMn9Etxv0IY8 ++ jq -s -R -r @uri ++ echo -n O34UgMn9Etxv0IY8 + pass=O34UgMn9Etxv0IY8 + check_mongo_auth userAdmin:O34UgMn9Etxv0IY8@some-name-rs0-0.some-name-rs0.users-18968 + local uri=userAdmin:O34UgMn9Etxv0IY8@some-name-rs0-0.some-name-rs0.users-18968 ++ run_mongo 'db.runCommand({ ping: 1 }).ok' userAdmin:O34UgMn9Etxv0IY8@some-name-rs0-0.some-name-rs0.users-18968 mongodb '' --quiet ++ local 'command=db.runCommand({ ping: 1 }).ok' ++ local uri=userAdmin:O34UgMn9Etxv0IY8@some-name-rs0-0.some-name-rs0.users-18968 ++ local driver=mongodb ++ local suffix=.svc.cluster.local ++ egrep -v 'I NETWORK|W NETWORK|Error saving history file|Percona Server for MongoDB|connecting to:|Unable to reach primary for set|Implicit session:|versions do not match|Error saving history file:' +++ kubectl_bin get pods --selector=name=psmdb-client -o 'jsonpath={.items[].metadata.name}' ++++ mktemp +++ local LAST_OUT=/tmp/tmp.R3OFPT32BE ++++ mktemp +++ local LAST_ERR=/tmp/tmp.5HUtfKhJLK +++ local exit_status=0 +++ local timeout=4 ++++ seq 0 2 +++ for i in '$(seq 0 2)' +++ set +e +++ kubectl get pods --selector=name=psmdb-client -o 'jsonpath={.items[].metadata.name}' +++ exit_status=0 +++ set -e +++ '[' 0 '!=' 0 -a -n 1 ']' +++ break +++ cat /tmp/tmp.R3OFPT32BE +++ cat /tmp/tmp.5HUtfKhJLK +++ rm /tmp/tmp.R3OFPT32BE /tmp/tmp.5HUtfKhJLK +++ return 0 ++ local client_container=psmdb-client-667cfd9d66-rpkb4 ++ local mongo_flag=--quiet ++ [[ userAdmin:O34UgMn9Etxv0IY8@some-name-rs0-0.some-name-rs0.users-18968 == *cfg* ]] ++ replica_set=rs0 ++ kubectl_bin exec psmdb-client-667cfd9d66-rpkb4 -- bash -c 'printf '\''db.runCommand({ ping: 1 }).ok\n'\'' | mongo mongodb://userAdmin:O34UgMn9Etxv0IY8@some-name-rs0-0.some-name-rs0.users-18968.svc.cluster.local/admin?ssl=false\&replicaSet=rs0 --quiet' +++ mktemp ++ local LAST_OUT=/tmp/tmp.5XPLOQlKeY +++ mktemp ++ local LAST_ERR=/tmp/tmp.PZ0RZ8bmtT ++ local exit_status=0 ++ local timeout=4 +++ seq 0 2 ++ for i in '$(seq 0 2)' ++ set +e ++ kubectl exec psmdb-client-667cfd9d66-rpkb4 -- bash -c 'printf '\''db.runCommand({ ping: 1 }).ok\n'\'' | mongo mongodb://userAdmin:O34UgMn9Etxv0IY8@some-name-rs0-0.some-name-rs0.users-18968.svc.cluster.local/admin?ssl=false\&replicaSet=rs0 --quiet' ++ exit_status=0 ++ set -e ++ '[' 0 '!=' 0 -a -n 1 ']' ++ break ++ cat /tmp/tmp.5XPLOQlKeY ++ cat /tmp/tmp.PZ0RZ8bmtT ++ rm /tmp/tmp.5XPLOQlKeY /tmp/tmp.PZ0RZ8bmtT ++ return 0 + ping=1 + desc 'ping return' + set +o xtrace ----------------------------------------------------------------------------------- ping return ----------------------------------------------------------------------------------- + '[' 1 '!=' 1 ']' + check_mongo_auth userAdmin:O34UgMn9Etxv0IY8@some-name-rs0-1.some-name-rs0.users-18968 + local uri=userAdmin:O34UgMn9Etxv0IY8@some-name-rs0-1.some-name-rs0.users-18968 ++ run_mongo 'db.runCommand({ ping: 1 }).ok' userAdmin:O34UgMn9Etxv0IY8@some-name-rs0-1.some-name-rs0.users-18968 mongodb '' --quiet ++ local 'command=db.runCommand({ ping: 1 }).ok' ++ local uri=userAdmin:O34UgMn9Etxv0IY8@some-name-rs0-1.some-name-rs0.users-18968 ++ local driver=mongodb ++ local suffix=.svc.cluster.local ++ egrep -v 'I NETWORK|W NETWORK|Error saving history file|Percona Server for MongoDB|connecting to:|Unable to reach primary for set|Implicit session:|versions do not match|Error saving history file:' +++ kubectl_bin get pods --selector=name=psmdb-client -o 'jsonpath={.items[].metadata.name}' ++++ mktemp +++ local LAST_OUT=/tmp/tmp.RdLcOYmI9H ++++ mktemp +++ local LAST_ERR=/tmp/tmp.H2aYEADekq +++ local exit_status=0 +++ local timeout=4 ++++ seq 0 2 +++ for i in '$(seq 0 2)' +++ set +e +++ kubectl get pods --selector=name=psmdb-client -o 'jsonpath={.items[].metadata.name}' +++ exit_status=0 +++ set -e +++ '[' 0 '!=' 0 -a -n 1 ']' +++ break +++ cat /tmp/tmp.RdLcOYmI9H +++ cat /tmp/tmp.H2aYEADekq +++ rm /tmp/tmp.RdLcOYmI9H /tmp/tmp.H2aYEADekq +++ return 0 ++ local client_container=psmdb-client-667cfd9d66-rpkb4 ++ local mongo_flag=--quiet ++ [[ userAdmin:O34UgMn9Etxv0IY8@some-name-rs0-1.some-name-rs0.users-18968 == *cfg* ]] ++ replica_set=rs0 ++ kubectl_bin exec psmdb-client-667cfd9d66-rpkb4 -- bash -c 'printf '\''db.runCommand({ ping: 1 }).ok\n'\'' | mongo mongodb://userAdmin:O34UgMn9Etxv0IY8@some-name-rs0-1.some-name-rs0.users-18968.svc.cluster.local/admin?ssl=false\&replicaSet=rs0 --quiet' +++ mktemp ++ local LAST_OUT=/tmp/tmp.fzNLESPJFL +++ mktemp ++ local LAST_ERR=/tmp/tmp.4suunPXHp9 ++ local exit_status=0 ++ local timeout=4 +++ seq 0 2 ++ for i in '$(seq 0 2)' ++ set +e ++ kubectl exec psmdb-client-667cfd9d66-rpkb4 -- bash -c 'printf '\''db.runCommand({ ping: 1 }).ok\n'\'' | mongo mongodb://userAdmin:O34UgMn9Etxv0IY8@some-name-rs0-1.some-name-rs0.users-18968.svc.cluster.local/admin?ssl=false\&replicaSet=rs0 --quiet' ++ exit_status=0 ++ set -e ++ '[' 0 '!=' 0 -a -n 1 ']' ++ break ++ cat /tmp/tmp.fzNLESPJFL ++ cat /tmp/tmp.4suunPXHp9 ++ rm /tmp/tmp.fzNLESPJFL /tmp/tmp.4suunPXHp9 ++ return 0 + ping=1 + desc 'ping return' + set +o xtrace ----------------------------------------------------------------------------------- ping return ----------------------------------------------------------------------------------- + '[' 1 '!=' 1 ']' + check_mongo_auth userAdmin:O34UgMn9Etxv0IY8@some-name-rs0-2.some-name-rs0.users-18968 + local uri=userAdmin:O34UgMn9Etxv0IY8@some-name-rs0-2.some-name-rs0.users-18968 ++ run_mongo 'db.runCommand({ ping: 1 }).ok' userAdmin:O34UgMn9Etxv0IY8@some-name-rs0-2.some-name-rs0.users-18968 mongodb '' --quiet ++ local 'command=db.runCommand({ ping: 1 }).ok' ++ local uri=userAdmin:O34UgMn9Etxv0IY8@some-name-rs0-2.some-name-rs0.users-18968 ++ local driver=mongodb ++ local suffix=.svc.cluster.local ++ egrep -v 'I NETWORK|W NETWORK|Error saving history file|Percona Server for MongoDB|connecting to:|Unable to reach primary for set|Implicit session:|versions do not match|Error saving history file:' +++ kubectl_bin get pods --selector=name=psmdb-client -o 'jsonpath={.items[].metadata.name}' ++++ mktemp +++ local LAST_OUT=/tmp/tmp.jS4YgSdrEm ++++ mktemp +++ local LAST_ERR=/tmp/tmp.5N9rEp17KN +++ local exit_status=0 +++ local timeout=4 ++++ seq 0 2 +++ for i in '$(seq 0 2)' +++ set +e +++ kubectl get pods --selector=name=psmdb-client -o 'jsonpath={.items[].metadata.name}' +++ exit_status=0 +++ set -e +++ '[' 0 '!=' 0 -a -n 1 ']' +++ break +++ cat /tmp/tmp.jS4YgSdrEm +++ cat /tmp/tmp.5N9rEp17KN +++ rm /tmp/tmp.jS4YgSdrEm /tmp/tmp.5N9rEp17KN +++ return 0 ++ local client_container=psmdb-client-667cfd9d66-rpkb4 ++ local mongo_flag=--quiet ++ [[ userAdmin:O34UgMn9Etxv0IY8@some-name-rs0-2.some-name-rs0.users-18968 == *cfg* ]] ++ replica_set=rs0 ++ kubectl_bin exec psmdb-client-667cfd9d66-rpkb4 -- bash -c 'printf '\''db.runCommand({ ping: 1 }).ok\n'\'' | mongo mongodb://userAdmin:O34UgMn9Etxv0IY8@some-name-rs0-2.some-name-rs0.users-18968.svc.cluster.local/admin?ssl=false\&replicaSet=rs0 --quiet' +++ mktemp ++ local LAST_OUT=/tmp/tmp.kFVH9tovts +++ mktemp ++ local LAST_ERR=/tmp/tmp.VPYH12ybCi ++ local exit_status=0 ++ local timeout=4 +++ seq 0 2 ++ for i in '$(seq 0 2)' ++ set +e ++ kubectl exec psmdb-client-667cfd9d66-rpkb4 -- bash -c 'printf '\''db.runCommand({ ping: 1 }).ok\n'\'' | mongo mongodb://userAdmin:O34UgMn9Etxv0IY8@some-name-rs0-2.some-name-rs0.users-18968.svc.cluster.local/admin?ssl=false\&replicaSet=rs0 --quiet' ++ exit_status=0 ++ set -e ++ '[' 0 '!=' 0 -a -n 1 ']' ++ break ++ cat /tmp/tmp.kFVH9tovts ++ cat /tmp/tmp.VPYH12ybCi ++ rm /tmp/tmp.kFVH9tovts /tmp/tmp.VPYH12ybCi ++ return 0 + ping=1 + desc 'ping return' + set +o xtrace ----------------------------------------------------------------------------------- ping return ----------------------------------------------------------------------------------- + '[' 1 '!=' 1 ']' + desc 'update all users' + set +o xtrace ----------------------------------------------------------------------------------- update all users ----------------------------------------------------------------------------------- + kubectl_bin apply -f /mnt/jenkins/workspace/cloud-psmdb-operator_PR-1735/e2e-tests/conf/secrets.yml ++ mktemp + local LAST_OUT=/tmp/tmp.EkvKXAc0He ++ mktemp + local LAST_ERR=/tmp/tmp.J98ETJkEnw + local exit_status=0 + local timeout=4 ++ seq 0 2 + for i in '$(seq 0 2)' + set +e + kubectl apply -f /mnt/jenkins/workspace/cloud-psmdb-operator_PR-1735/e2e-tests/conf/secrets.yml + exit_status=0 + set -e + '[' 0 '!=' 0 -a -n 1 ']' + break + cat /tmp/tmp.EkvKXAc0He secret/some-users configured + cat /tmp/tmp.J98ETJkEnw Warning: resource secrets/some-users is missing the kubectl.kubernetes.io/last-applied-configuration annotation which is required by kubectl apply. kubectl apply should only be used on resources created declaratively by either kubectl create --save-config or kubectl apply. The missing annotation will be patched automatically. + rm /tmp/tmp.EkvKXAc0He /tmp/tmp.J98ETJkEnw + return 0 + sleep 35 + wait_cluster_consistency some-name + local cluster_name=some-name + local wait_time=32 + retry=0 + sleep 7 + echo -n 'waiting for cluster readyness' waiting for cluster readyness++ kubectl_bin get psmdb some-name -o 'jsonpath={.status.state}' +++ mktemp ++ local LAST_OUT=/tmp/tmp.JX5Hdijirj +++ mktemp ++ local LAST_ERR=/tmp/tmp.LLczvYEuna ++ local exit_status=0 ++ local timeout=4 +++ seq 0 2 ++ for i in '$(seq 0 2)' ++ set +e ++ kubectl get psmdb some-name -o 'jsonpath={.status.state}' ++ exit_status=0 ++ set -e ++ '[' 0 '!=' 0 -a -n 1 ']' ++ break ++ cat /tmp/tmp.JX5Hdijirj ++ cat /tmp/tmp.LLczvYEuna ++ rm /tmp/tmp.JX5Hdijirj /tmp/tmp.LLczvYEuna ++ return 0 + [[ error == \r\e\a\d\y ]] + let retry+=1 + '[' 1 -ge 32 ']' + echo -n . .+ sleep 10 ++ kubectl_bin get psmdb some-name -o 'jsonpath={.status.state}' +++ mktemp ++ local LAST_OUT=/tmp/tmp.L0NJfSYXgL +++ mktemp ++ local LAST_ERR=/tmp/tmp.2Mk5Zd80gX ++ local exit_status=0 ++ local timeout=4 +++ seq 0 2 ++ for i in '$(seq 0 2)' ++ set +e ++ kubectl get psmdb some-name -o 'jsonpath={.status.state}' ++ exit_status=0 ++ set -e ++ '[' 0 '!=' 0 -a -n 1 ']' ++ break ++ cat /tmp/tmp.L0NJfSYXgL ++ cat /tmp/tmp.2Mk5Zd80gX ++ rm /tmp/tmp.L0NJfSYXgL /tmp/tmp.2Mk5Zd80gX ++ return 0 + [[ error == \r\e\a\d\y ]] + let retry+=1 + '[' 2 -ge 32 ']' + echo -n . .+ sleep 10 ++ kubectl_bin get psmdb some-name -o 'jsonpath={.status.state}' +++ mktemp ++ local LAST_OUT=/tmp/tmp.VV8CPrIiam +++ mktemp ++ local LAST_ERR=/tmp/tmp.x7bdmc6qtK ++ local exit_status=0 ++ local timeout=4 +++ seq 0 2 ++ for i in '$(seq 0 2)' ++ set +e ++ kubectl get psmdb some-name -o 'jsonpath={.status.state}' ++ exit_status=0 ++ set -e ++ '[' 0 '!=' 0 -a -n 1 ']' ++ break ++ cat /tmp/tmp.VV8CPrIiam ++ cat /tmp/tmp.x7bdmc6qtK ++ rm /tmp/tmp.VV8CPrIiam /tmp/tmp.x7bdmc6qtK ++ return 0 + [[ ready == \r\e\a\d\y ]] + echo + sleep 15 ++ getUserData some-users MONGODB_USER_ADMIN_USER ++ local secretName=some-users ++ local dataKey=MONGODB_USER_ADMIN_USER +++ getSecretData some-users MONGODB_USER_ADMIN_USER +++ local secretName=some-users +++ local dataKey=MONGODB_USER_ADMIN_USER ++++ kubectl get secrets/some-users '--template={{.data.MONGODB_USER_ADMIN_USER}}' ++++ base64 -d +++ local data=userAdmin +++ echo userAdmin ++ urlencode userAdmin ++ uri=userAdmin ++ echo -n userAdmin ++ jq -s -R -r @uri + user=userAdmin ++ getUserData some-users MONGODB_USER_ADMIN_PASSWORD ++ local secretName=some-users ++ local dataKey=MONGODB_USER_ADMIN_PASSWORD +++ getSecretData some-users MONGODB_USER_ADMIN_PASSWORD +++ local secretName=some-users +++ local dataKey=MONGODB_USER_ADMIN_PASSWORD ++++ kubectl get secrets/some-users '--template={{.data.MONGODB_USER_ADMIN_PASSWORD}}' ++++ base64 -d +++ local data=userAdmin123456 +++ echo userAdmin123456 ++ urlencode userAdmin123456 ++ uri=userAdmin123456 ++ jq -s -R -r @uri ++ echo -n userAdmin123456 + pass=userAdmin123456 + check_mongo_auth userAdmin:userAdmin123456@some-name-rs0-0.some-name-rs0.users-18968 + local uri=userAdmin:userAdmin123456@some-name-rs0-0.some-name-rs0.users-18968 ++ egrep -v 'I NETWORK|W NETWORK|Error saving history file|Percona Server for MongoDB|connecting to:|Unable to reach primary for set|Implicit session:|versions do not match|Error saving history file:' ++ run_mongo 'db.runCommand({ ping: 1 }).ok' userAdmin:userAdmin123456@some-name-rs0-0.some-name-rs0.users-18968 mongodb '' --quiet ++ local 'command=db.runCommand({ ping: 1 }).ok' ++ local uri=userAdmin:userAdmin123456@some-name-rs0-0.some-name-rs0.users-18968 ++ local driver=mongodb ++ local suffix=.svc.cluster.local +++ kubectl_bin get pods --selector=name=psmdb-client -o 'jsonpath={.items[].metadata.name}' ++++ mktemp +++ local LAST_OUT=/tmp/tmp.OelWQxyBQS ++++ mktemp +++ local LAST_ERR=/tmp/tmp.LtnyZMuIO0 +++ local exit_status=0 +++ local timeout=4 ++++ seq 0 2 +++ for i in '$(seq 0 2)' +++ set +e +++ kubectl get pods --selector=name=psmdb-client -o 'jsonpath={.items[].metadata.name}' +++ exit_status=0 +++ set -e +++ '[' 0 '!=' 0 -a -n 1 ']' +++ break +++ cat /tmp/tmp.OelWQxyBQS +++ cat /tmp/tmp.LtnyZMuIO0 +++ rm /tmp/tmp.OelWQxyBQS /tmp/tmp.LtnyZMuIO0 +++ return 0 ++ local client_container=psmdb-client-667cfd9d66-rpkb4 ++ local mongo_flag=--quiet ++ [[ userAdmin:userAdmin123456@some-name-rs0-0.some-name-rs0.users-18968 == *cfg* ]] ++ replica_set=rs0 ++ kubectl_bin exec psmdb-client-667cfd9d66-rpkb4 -- bash -c 'printf '\''db.runCommand({ ping: 1 }).ok\n'\'' | mongo mongodb://userAdmin:userAdmin123456@some-name-rs0-0.some-name-rs0.users-18968.svc.cluster.local/admin?ssl=false\&replicaSet=rs0 --quiet' +++ mktemp ++ local LAST_OUT=/tmp/tmp.STPUBmVHSs +++ mktemp ++ local LAST_ERR=/tmp/tmp.ZWbq3VP1LM ++ local exit_status=0 ++ local timeout=4 +++ seq 0 2 ++ for i in '$(seq 0 2)' ++ set +e ++ kubectl exec psmdb-client-667cfd9d66-rpkb4 -- bash -c 'printf '\''db.runCommand({ ping: 1 }).ok\n'\'' | mongo mongodb://userAdmin:userAdmin123456@some-name-rs0-0.some-name-rs0.users-18968.svc.cluster.local/admin?ssl=false\&replicaSet=rs0 --quiet' ++ exit_status=0 ++ set -e ++ '[' 0 '!=' 0 -a -n 1 ']' ++ break ++ cat /tmp/tmp.STPUBmVHSs ++ cat /tmp/tmp.ZWbq3VP1LM ++ rm /tmp/tmp.STPUBmVHSs /tmp/tmp.ZWbq3VP1LM ++ return 0 + ping=1 + desc 'ping return' + set +o xtrace ----------------------------------------------------------------------------------- ping return ----------------------------------------------------------------------------------- + '[' 1 '!=' 1 ']' + check_mongo_auth userAdmin:userAdmin123456@some-name-rs0-1.some-name-rs0.users-18968 + local uri=userAdmin:userAdmin123456@some-name-rs0-1.some-name-rs0.users-18968 ++ run_mongo 'db.runCommand({ ping: 1 }).ok' userAdmin:userAdmin123456@some-name-rs0-1.some-name-rs0.users-18968 mongodb '' --quiet ++ local 'command=db.runCommand({ ping: 1 }).ok' ++ local uri=userAdmin:userAdmin123456@some-name-rs0-1.some-name-rs0.users-18968 ++ local driver=mongodb ++ local suffix=.svc.cluster.local ++ egrep -v 'I NETWORK|W NETWORK|Error saving history file|Percona Server for MongoDB|connecting to:|Unable to reach primary for set|Implicit session:|versions do not match|Error saving history file:' +++ kubectl_bin get pods --selector=name=psmdb-client -o 'jsonpath={.items[].metadata.name}' ++++ mktemp +++ local LAST_OUT=/tmp/tmp.vsPHCjSbH9 ++++ mktemp +++ local LAST_ERR=/tmp/tmp.NYVGLXfmZ1 +++ local exit_status=0 +++ local timeout=4 ++++ seq 0 2 +++ for i in '$(seq 0 2)' +++ set +e +++ kubectl get pods --selector=name=psmdb-client -o 'jsonpath={.items[].metadata.name}' +++ exit_status=0 +++ set -e +++ '[' 0 '!=' 0 -a -n 1 ']' +++ break +++ cat /tmp/tmp.vsPHCjSbH9 +++ cat /tmp/tmp.NYVGLXfmZ1 +++ rm /tmp/tmp.vsPHCjSbH9 /tmp/tmp.NYVGLXfmZ1 +++ return 0 ++ local client_container=psmdb-client-667cfd9d66-rpkb4 ++ local mongo_flag=--quiet ++ [[ userAdmin:userAdmin123456@some-name-rs0-1.some-name-rs0.users-18968 == *cfg* ]] ++ replica_set=rs0 ++ kubectl_bin exec psmdb-client-667cfd9d66-rpkb4 -- bash -c 'printf '\''db.runCommand({ ping: 1 }).ok\n'\'' | mongo mongodb://userAdmin:userAdmin123456@some-name-rs0-1.some-name-rs0.users-18968.svc.cluster.local/admin?ssl=false\&replicaSet=rs0 --quiet' +++ mktemp ++ local LAST_OUT=/tmp/tmp.VXU7K3siGv +++ mktemp ++ local LAST_ERR=/tmp/tmp.zohcpbq35c ++ local exit_status=0 ++ local timeout=4 +++ seq 0 2 ++ for i in '$(seq 0 2)' ++ set +e ++ kubectl exec psmdb-client-667cfd9d66-rpkb4 -- bash -c 'printf '\''db.runCommand({ ping: 1 }).ok\n'\'' | mongo mongodb://userAdmin:userAdmin123456@some-name-rs0-1.some-name-rs0.users-18968.svc.cluster.local/admin?ssl=false\&replicaSet=rs0 --quiet' ++ exit_status=0 ++ set -e ++ '[' 0 '!=' 0 -a -n 1 ']' ++ break ++ cat /tmp/tmp.VXU7K3siGv ++ cat /tmp/tmp.zohcpbq35c ++ rm /tmp/tmp.VXU7K3siGv /tmp/tmp.zohcpbq35c ++ return 0 + ping=1 + desc 'ping return' + set +o xtrace ----------------------------------------------------------------------------------- ping return ----------------------------------------------------------------------------------- + '[' 1 '!=' 1 ']' + check_mongo_auth userAdmin:userAdmin123456@some-name-rs0-2.some-name-rs0.users-18968 + local uri=userAdmin:userAdmin123456@some-name-rs0-2.some-name-rs0.users-18968 ++ run_mongo 'db.runCommand({ ping: 1 }).ok' userAdmin:userAdmin123456@some-name-rs0-2.some-name-rs0.users-18968 mongodb '' --quiet ++ local 'command=db.runCommand({ ping: 1 }).ok' ++ local uri=userAdmin:userAdmin123456@some-name-rs0-2.some-name-rs0.users-18968 ++ local driver=mongodb ++ local suffix=.svc.cluster.local ++ egrep -v 'I NETWORK|W NETWORK|Error saving history file|Percona Server for MongoDB|connecting to:|Unable to reach primary for set|Implicit session:|versions do not match|Error saving history file:' +++ kubectl_bin get pods --selector=name=psmdb-client -o 'jsonpath={.items[].metadata.name}' ++++ mktemp +++ local LAST_OUT=/tmp/tmp.0VUjIJb0lH ++++ mktemp +++ local LAST_ERR=/tmp/tmp.kZBieYdz44 +++ local exit_status=0 +++ local timeout=4 ++++ seq 0 2 +++ for i in '$(seq 0 2)' +++ set +e +++ kubectl get pods --selector=name=psmdb-client -o 'jsonpath={.items[].metadata.name}' +++ exit_status=0 +++ set -e +++ '[' 0 '!=' 0 -a -n 1 ']' +++ break +++ cat /tmp/tmp.0VUjIJb0lH +++ cat /tmp/tmp.kZBieYdz44 +++ rm /tmp/tmp.0VUjIJb0lH /tmp/tmp.kZBieYdz44 +++ return 0 ++ local client_container=psmdb-client-667cfd9d66-rpkb4 ++ local mongo_flag=--quiet ++ [[ userAdmin:userAdmin123456@some-name-rs0-2.some-name-rs0.users-18968 == *cfg* ]] ++ replica_set=rs0 ++ kubectl_bin exec psmdb-client-667cfd9d66-rpkb4 -- bash -c 'printf '\''db.runCommand({ ping: 1 }).ok\n'\'' | mongo mongodb://userAdmin:userAdmin123456@some-name-rs0-2.some-name-rs0.users-18968.svc.cluster.local/admin?ssl=false\&replicaSet=rs0 --quiet' +++ mktemp ++ local LAST_OUT=/tmp/tmp.JllDxZRGAP +++ mktemp ++ local LAST_ERR=/tmp/tmp.hmloJDz78E ++ local exit_status=0 ++ local timeout=4 +++ seq 0 2 ++ for i in '$(seq 0 2)' ++ set +e ++ kubectl exec psmdb-client-667cfd9d66-rpkb4 -- bash -c 'printf '\''db.runCommand({ ping: 1 }).ok\n'\'' | mongo mongodb://userAdmin:userAdmin123456@some-name-rs0-2.some-name-rs0.users-18968.svc.cluster.local/admin?ssl=false\&replicaSet=rs0 --quiet' ++ exit_status=0 ++ set -e ++ '[' 0 '!=' 0 -a -n 1 ']' ++ break ++ cat /tmp/tmp.JllDxZRGAP ++ cat /tmp/tmp.hmloJDz78E ++ rm /tmp/tmp.JllDxZRGAP /tmp/tmp.hmloJDz78E ++ return 0 + ping=1 + desc 'ping return' + set +o xtrace ----------------------------------------------------------------------------------- ping return ----------------------------------------------------------------------------------- + '[' 1 '!=' 1 ']' + newmonitorusername=newmonitorusername + desc 'update monitor user username' + set +o xtrace ----------------------------------------------------------------------------------- update monitor user username ----------------------------------------------------------------------------------- ++ echo -n newmonitorusername ++ base64 + patch_secret some-users MONGODB_CLUSTER_MONITOR_USER bmV3bW9uaXRvcnVzZXJuYW1l + local secret=some-users + local key=MONGODB_CLUSTER_MONITOR_USER + local value=bmV3bW9uaXRvcnVzZXJuYW1l + kubectl patch secret some-users '-p={"data":{"MONGODB_CLUSTER_MONITOR_USER": "bmV3bW9uaXRvcnVzZXJuYW1l"}}' secret/some-users patched + sleep 35 + wait_cluster_consistency some-name + local cluster_name=some-name + local wait_time=32 + retry=0 + sleep 7 + echo -n 'waiting for cluster readyness' waiting for cluster readyness++ kubectl_bin get psmdb some-name -o 'jsonpath={.status.state}' +++ mktemp ++ local LAST_OUT=/tmp/tmp.CA1Ed5Uvi2 +++ mktemp ++ local LAST_ERR=/tmp/tmp.jvnxdyaWVS ++ local exit_status=0 ++ local timeout=4 +++ seq 0 2 ++ for i in '$(seq 0 2)' ++ set +e ++ kubectl get psmdb some-name -o 'jsonpath={.status.state}' ++ exit_status=0 ++ set -e ++ '[' 0 '!=' 0 -a -n 1 ']' ++ break ++ cat /tmp/tmp.CA1Ed5Uvi2 ++ cat /tmp/tmp.jvnxdyaWVS ++ rm /tmp/tmp.CA1Ed5Uvi2 /tmp/tmp.jvnxdyaWVS ++ return 0 + [[ ready == \r\e\a\d\y ]] + echo + sleep 15 ++ getUserData some-users MONGODB_CLUSTER_MONITOR_USER ++ local secretName=some-users ++ local dataKey=MONGODB_CLUSTER_MONITOR_USER +++ getSecretData some-users MONGODB_CLUSTER_MONITOR_USER +++ local secretName=some-users +++ local dataKey=MONGODB_CLUSTER_MONITOR_USER ++++ kubectl get secrets/some-users '--template={{.data.MONGODB_CLUSTER_MONITOR_USER}}' ++++ base64 -d +++ local data=newmonitorusername +++ echo newmonitorusername ++ urlencode newmonitorusername ++ uri=newmonitorusername ++ jq -s -R -r @uri ++ echo -n newmonitorusername + user=newmonitorusername ++ getUserData some-users MONGODB_CLUSTER_MONITOR_PASSWORD ++ local secretName=some-users ++ local dataKey=MONGODB_CLUSTER_MONITOR_PASSWORD +++ getSecretData some-users MONGODB_CLUSTER_MONITOR_PASSWORD +++ local secretName=some-users +++ local dataKey=MONGODB_CLUSTER_MONITOR_PASSWORD ++++ kubectl get secrets/some-users '--template={{.data.MONGODB_CLUSTER_MONITOR_PASSWORD}}' ++++ base64 -d +++ local data=clusterMonitor123456 +++ echo clusterMonitor123456 ++ urlencode clusterMonitor123456 ++ uri=clusterMonitor123456 ++ echo -n clusterMonitor123456 ++ jq -s -R -r @uri + pass=clusterMonitor123456 + check_mongo_auth newmonitorusername:clusterMonitor123456@some-name-rs0-0.some-name-rs0.users-18968 + local uri=newmonitorusername:clusterMonitor123456@some-name-rs0-0.some-name-rs0.users-18968 ++ run_mongo 'db.runCommand({ ping: 1 }).ok' newmonitorusername:clusterMonitor123456@some-name-rs0-0.some-name-rs0.users-18968 mongodb '' --quiet ++ local 'command=db.runCommand({ ping: 1 }).ok' ++ local uri=newmonitorusername:clusterMonitor123456@some-name-rs0-0.some-name-rs0.users-18968 ++ local driver=mongodb ++ local suffix=.svc.cluster.local ++ egrep -v 'I NETWORK|W NETWORK|Error saving history file|Percona Server for MongoDB|connecting to:|Unable to reach primary for set|Implicit session:|versions do not match|Error saving history file:' +++ kubectl_bin get pods --selector=name=psmdb-client -o 'jsonpath={.items[].metadata.name}' ++++ mktemp +++ local LAST_OUT=/tmp/tmp.rWWMhnPZLK ++++ mktemp +++ local LAST_ERR=/tmp/tmp.mt6X2VpWA9 +++ local exit_status=0 +++ local timeout=4 ++++ seq 0 2 +++ for i in '$(seq 0 2)' +++ set +e +++ kubectl get pods --selector=name=psmdb-client -o 'jsonpath={.items[].metadata.name}' +++ exit_status=0 +++ set -e +++ '[' 0 '!=' 0 -a -n 1 ']' +++ break +++ cat /tmp/tmp.rWWMhnPZLK +++ cat /tmp/tmp.mt6X2VpWA9 +++ rm /tmp/tmp.rWWMhnPZLK /tmp/tmp.mt6X2VpWA9 +++ return 0 ++ local client_container=psmdb-client-667cfd9d66-rpkb4 ++ local mongo_flag=--quiet ++ [[ newmonitorusername:clusterMonitor123456@some-name-rs0-0.some-name-rs0.users-18968 == *cfg* ]] ++ replica_set=rs0 ++ kubectl_bin exec psmdb-client-667cfd9d66-rpkb4 -- bash -c 'printf '\''db.runCommand({ ping: 1 }).ok\n'\'' | mongo mongodb://newmonitorusername:clusterMonitor123456@some-name-rs0-0.some-name-rs0.users-18968.svc.cluster.local/admin?ssl=false\&replicaSet=rs0 --quiet' +++ mktemp ++ local LAST_OUT=/tmp/tmp.iVmHZmY9b7 +++ mktemp ++ local LAST_ERR=/tmp/tmp.PzOASd5G68 ++ local exit_status=0 ++ local timeout=4 +++ seq 0 2 ++ for i in '$(seq 0 2)' ++ set +e ++ kubectl exec psmdb-client-667cfd9d66-rpkb4 -- bash -c 'printf '\''db.runCommand({ ping: 1 }).ok\n'\'' | mongo mongodb://newmonitorusername:clusterMonitor123456@some-name-rs0-0.some-name-rs0.users-18968.svc.cluster.local/admin?ssl=false\&replicaSet=rs0 --quiet' ++ exit_status=0 ++ set -e ++ '[' 0 '!=' 0 -a -n 1 ']' ++ break ++ cat /tmp/tmp.iVmHZmY9b7 ++ cat /tmp/tmp.PzOASd5G68 ++ rm /tmp/tmp.iVmHZmY9b7 /tmp/tmp.PzOASd5G68 ++ return 0 + ping=1 + desc 'ping return' + set +o xtrace ----------------------------------------------------------------------------------- ping return ----------------------------------------------------------------------------------- + '[' 1 '!=' 1 ']' + check_mongo_auth newmonitorusername:clusterMonitor123456@some-name-rs0-1.some-name-rs0.users-18968 + local uri=newmonitorusername:clusterMonitor123456@some-name-rs0-1.some-name-rs0.users-18968 ++ run_mongo 'db.runCommand({ ping: 1 }).ok' newmonitorusername:clusterMonitor123456@some-name-rs0-1.some-name-rs0.users-18968 mongodb '' --quiet ++ local 'command=db.runCommand({ ping: 1 }).ok' ++ local uri=newmonitorusername:clusterMonitor123456@some-name-rs0-1.some-name-rs0.users-18968 ++ local driver=mongodb ++ local suffix=.svc.cluster.local ++ egrep -v 'I NETWORK|W NETWORK|Error saving history file|Percona Server for MongoDB|connecting to:|Unable to reach primary for set|Implicit session:|versions do not match|Error saving history file:' +++ kubectl_bin get pods --selector=name=psmdb-client -o 'jsonpath={.items[].metadata.name}' ++++ mktemp +++ local LAST_OUT=/tmp/tmp.Mi5fjQvngW ++++ mktemp +++ local LAST_ERR=/tmp/tmp.5zn1Oy1ToW +++ local exit_status=0 +++ local timeout=4 ++++ seq 0 2 +++ for i in '$(seq 0 2)' +++ set +e +++ kubectl get pods --selector=name=psmdb-client -o 'jsonpath={.items[].metadata.name}' +++ exit_status=0 +++ set -e +++ '[' 0 '!=' 0 -a -n 1 ']' +++ break +++ cat /tmp/tmp.Mi5fjQvngW +++ cat /tmp/tmp.5zn1Oy1ToW +++ rm /tmp/tmp.Mi5fjQvngW /tmp/tmp.5zn1Oy1ToW +++ return 0 ++ local client_container=psmdb-client-667cfd9d66-rpkb4 ++ local mongo_flag=--quiet ++ [[ newmonitorusername:clusterMonitor123456@some-name-rs0-1.some-name-rs0.users-18968 == *cfg* ]] ++ replica_set=rs0 ++ kubectl_bin exec psmdb-client-667cfd9d66-rpkb4 -- bash -c 'printf '\''db.runCommand({ ping: 1 }).ok\n'\'' | mongo mongodb://newmonitorusername:clusterMonitor123456@some-name-rs0-1.some-name-rs0.users-18968.svc.cluster.local/admin?ssl=false\&replicaSet=rs0 --quiet' +++ mktemp ++ local LAST_OUT=/tmp/tmp.gYSatmJWZr +++ mktemp ++ local LAST_ERR=/tmp/tmp.6ym3sqdWVJ ++ local exit_status=0 ++ local timeout=4 +++ seq 0 2 ++ for i in '$(seq 0 2)' ++ set +e ++ kubectl exec psmdb-client-667cfd9d66-rpkb4 -- bash -c 'printf '\''db.runCommand({ ping: 1 }).ok\n'\'' | mongo mongodb://newmonitorusername:clusterMonitor123456@some-name-rs0-1.some-name-rs0.users-18968.svc.cluster.local/admin?ssl=false\&replicaSet=rs0 --quiet' ++ exit_status=0 ++ set -e ++ '[' 0 '!=' 0 -a -n 1 ']' ++ break ++ cat /tmp/tmp.gYSatmJWZr ++ cat /tmp/tmp.6ym3sqdWVJ ++ rm /tmp/tmp.gYSatmJWZr /tmp/tmp.6ym3sqdWVJ ++ return 0 + ping=1 + desc 'ping return' + set +o xtrace ----------------------------------------------------------------------------------- ping return ----------------------------------------------------------------------------------- + '[' 1 '!=' 1 ']' + check_mongo_auth newmonitorusername:clusterMonitor123456@some-name-rs0-2.some-name-rs0.users-18968 + local uri=newmonitorusername:clusterMonitor123456@some-name-rs0-2.some-name-rs0.users-18968 ++ run_mongo 'db.runCommand({ ping: 1 }).ok' newmonitorusername:clusterMonitor123456@some-name-rs0-2.some-name-rs0.users-18968 mongodb '' --quiet ++ local 'command=db.runCommand({ ping: 1 }).ok' ++ local uri=newmonitorusername:clusterMonitor123456@some-name-rs0-2.some-name-rs0.users-18968 ++ local driver=mongodb ++ local suffix=.svc.cluster.local ++ egrep -v 'I NETWORK|W NETWORK|Error saving history file|Percona Server for MongoDB|connecting to:|Unable to reach primary for set|Implicit session:|versions do not match|Error saving history file:' +++ kubectl_bin get pods --selector=name=psmdb-client -o 'jsonpath={.items[].metadata.name}' ++++ mktemp +++ local LAST_OUT=/tmp/tmp.cfoEsgBAaR ++++ mktemp +++ local LAST_ERR=/tmp/tmp.cz0btphlua +++ local exit_status=0 +++ local timeout=4 ++++ seq 0 2 +++ for i in '$(seq 0 2)' +++ set +e +++ kubectl get pods --selector=name=psmdb-client -o 'jsonpath={.items[].metadata.name}' +++ exit_status=0 +++ set -e +++ '[' 0 '!=' 0 -a -n 1 ']' +++ break +++ cat /tmp/tmp.cfoEsgBAaR +++ cat /tmp/tmp.cz0btphlua +++ rm /tmp/tmp.cfoEsgBAaR /tmp/tmp.cz0btphlua +++ return 0 ++ local client_container=psmdb-client-667cfd9d66-rpkb4 ++ local mongo_flag=--quiet ++ [[ newmonitorusername:clusterMonitor123456@some-name-rs0-2.some-name-rs0.users-18968 == *cfg* ]] ++ replica_set=rs0 ++ kubectl_bin exec psmdb-client-667cfd9d66-rpkb4 -- bash -c 'printf '\''db.runCommand({ ping: 1 }).ok\n'\'' | mongo mongodb://newmonitorusername:clusterMonitor123456@some-name-rs0-2.some-name-rs0.users-18968.svc.cluster.local/admin?ssl=false\&replicaSet=rs0 --quiet' +++ mktemp ++ local LAST_OUT=/tmp/tmp.PBylSKKG9C +++ mktemp ++ local LAST_ERR=/tmp/tmp.nWQj6UskH8 ++ local exit_status=0 ++ local timeout=4 +++ seq 0 2 ++ for i in '$(seq 0 2)' ++ set +e ++ kubectl exec psmdb-client-667cfd9d66-rpkb4 -- bash -c 'printf '\''db.runCommand({ ping: 1 }).ok\n'\'' | mongo mongodb://newmonitorusername:clusterMonitor123456@some-name-rs0-2.some-name-rs0.users-18968.svc.cluster.local/admin?ssl=false\&replicaSet=rs0 --quiet' ++ exit_status=0 ++ set -e ++ '[' 0 '!=' 0 -a -n 1 ']' ++ break ++ cat /tmp/tmp.PBylSKKG9C ++ cat /tmp/tmp.nWQj6UskH8 ++ rm /tmp/tmp.PBylSKKG9C /tmp/tmp.nWQj6UskH8 ++ return 0 + ping=1 + desc 'ping return' + set +o xtrace ----------------------------------------------------------------------------------- ping return ----------------------------------------------------------------------------------- + '[' 1 '!=' 1 ']' + desc 'secret without userAdmin' + set +o xtrace ----------------------------------------------------------------------------------- secret without userAdmin ----------------------------------------------------------------------------------- + kubectl_bin apply -f /mnt/jenkins/workspace/cloud-psmdb-operator_PR-1735/e2e-tests/users/conf/secrets-2.yml ++ mktemp + local LAST_OUT=/tmp/tmp.3P56CWZ3Lg ++ mktemp + local LAST_ERR=/tmp/tmp.sgpBfCkli2 + local exit_status=0 + local timeout=4 ++ seq 0 2 + for i in '$(seq 0 2)' + set +e + kubectl apply -f /mnt/jenkins/workspace/cloud-psmdb-operator_PR-1735/e2e-tests/users/conf/secrets-2.yml + exit_status=0 + set -e + '[' 0 '!=' 0 -a -n 1 ']' + break + cat /tmp/tmp.3P56CWZ3Lg secret/some-users configured + cat /tmp/tmp.sgpBfCkli2 + rm /tmp/tmp.3P56CWZ3Lg /tmp/tmp.sgpBfCkli2 + return 0 + sleep 25 + wait_cluster_consistency some-name + local cluster_name=some-name + local wait_time=32 + retry=0 + sleep 7 + echo -n 'waiting for cluster readyness' waiting for cluster readyness++ kubectl_bin get psmdb some-name -o 'jsonpath={.status.state}' +++ mktemp ++ local LAST_OUT=/tmp/tmp.kI8XdDfyER +++ mktemp ++ local LAST_ERR=/tmp/tmp.uH6OXKYPNx ++ local exit_status=0 ++ local timeout=4 +++ seq 0 2 ++ for i in '$(seq 0 2)' ++ set +e ++ kubectl get psmdb some-name -o 'jsonpath={.status.state}' ++ exit_status=0 ++ set -e ++ '[' 0 '!=' 0 -a -n 1 ']' ++ break ++ cat /tmp/tmp.kI8XdDfyER ++ cat /tmp/tmp.uH6OXKYPNx ++ rm /tmp/tmp.kI8XdDfyER /tmp/tmp.uH6OXKYPNx ++ return 0 + [[ error == \r\e\a\d\y ]] + let retry+=1 + '[' 1 -ge 32 ']' + echo -n . .+ sleep 10 ++ kubectl_bin get psmdb some-name -o 'jsonpath={.status.state}' +++ mktemp ++ local LAST_OUT=/tmp/tmp.EAngZn68te +++ mktemp ++ local LAST_ERR=/tmp/tmp.FAMR3FQqSr ++ local exit_status=0 ++ local timeout=4 +++ seq 0 2 ++ for i in '$(seq 0 2)' ++ set +e ++ kubectl get psmdb some-name -o 'jsonpath={.status.state}' ++ exit_status=0 ++ set -e ++ '[' 0 '!=' 0 -a -n 1 ']' ++ break ++ cat /tmp/tmp.EAngZn68te ++ cat /tmp/tmp.FAMR3FQqSr ++ rm /tmp/tmp.EAngZn68te /tmp/tmp.FAMR3FQqSr ++ return 0 + [[ error == \r\e\a\d\y ]] + let retry+=1 + '[' 2 -ge 32 ']' + echo -n . .+ sleep 10 ++ kubectl_bin get psmdb some-name -o 'jsonpath={.status.state}' +++ mktemp ++ local LAST_OUT=/tmp/tmp.QD4D0lHWAq +++ mktemp ++ local LAST_ERR=/tmp/tmp.IfLrRR66aK ++ local exit_status=0 ++ local timeout=4 +++ seq 0 2 ++ for i in '$(seq 0 2)' ++ set +e ++ kubectl get psmdb some-name -o 'jsonpath={.status.state}' ++ exit_status=0 ++ set -e ++ '[' 0 '!=' 0 -a -n 1 ']' ++ break ++ cat /tmp/tmp.QD4D0lHWAq ++ cat /tmp/tmp.IfLrRR66aK ++ rm /tmp/tmp.QD4D0lHWAq /tmp/tmp.IfLrRR66aK ++ return 0 + [[ error == \r\e\a\d\y ]] + let retry+=1 + '[' 3 -ge 32 ']' + echo -n . .+ sleep 10 ++ kubectl_bin get psmdb some-name -o 'jsonpath={.status.state}' +++ mktemp ++ local LAST_OUT=/tmp/tmp.dM8Fxor7uz +++ mktemp ++ local LAST_ERR=/tmp/tmp.ih0hguxEUF ++ local exit_status=0 ++ local timeout=4 +++ seq 0 2 ++ for i in '$(seq 0 2)' ++ set +e ++ kubectl get psmdb some-name -o 'jsonpath={.status.state}' ++ exit_status=0 ++ set -e ++ '[' 0 '!=' 0 -a -n 1 ']' ++ break ++ cat /tmp/tmp.dM8Fxor7uz ++ cat /tmp/tmp.ih0hguxEUF ++ rm /tmp/tmp.dM8Fxor7uz /tmp/tmp.ih0hguxEUF ++ return 0 + [[ error == \r\e\a\d\y ]] + let retry+=1 + '[' 4 -ge 32 ']' + echo -n . .+ sleep 10 ++ kubectl_bin get psmdb some-name -o 'jsonpath={.status.state}' +++ mktemp ++ local LAST_OUT=/tmp/tmp.DT8gSnt7m6 +++ mktemp ++ local LAST_ERR=/tmp/tmp.UFxoKSjIzZ ++ local exit_status=0 ++ local timeout=4 +++ seq 0 2 ++ for i in '$(seq 0 2)' ++ set +e ++ kubectl get psmdb some-name -o 'jsonpath={.status.state}' ++ exit_status=0 ++ set -e ++ '[' 0 '!=' 0 -a -n 1 ']' ++ break ++ cat /tmp/tmp.DT8gSnt7m6 ++ cat /tmp/tmp.UFxoKSjIzZ ++ rm /tmp/tmp.DT8gSnt7m6 /tmp/tmp.UFxoKSjIzZ ++ return 0 + [[ error == \r\e\a\d\y ]] + let retry+=1 + '[' 5 -ge 32 ']' + echo -n . .+ sleep 10 ++ kubectl_bin get psmdb some-name -o 'jsonpath={.status.state}' +++ mktemp ++ local LAST_OUT=/tmp/tmp.YsIAfRtDTX +++ mktemp ++ local LAST_ERR=/tmp/tmp.wlfoGAVshB ++ local exit_status=0 ++ local timeout=4 +++ seq 0 2 ++ for i in '$(seq 0 2)' ++ set +e ++ kubectl get psmdb some-name -o 'jsonpath={.status.state}' ++ exit_status=0 ++ set -e ++ '[' 0 '!=' 0 -a -n 1 ']' ++ break ++ cat /tmp/tmp.YsIAfRtDTX ++ cat /tmp/tmp.wlfoGAVshB ++ rm /tmp/tmp.YsIAfRtDTX /tmp/tmp.wlfoGAVshB ++ return 0 + [[ initializing == \r\e\a\d\y ]] + let retry+=1 + '[' 6 -ge 32 ']' + echo -n . .+ sleep 10 ++ kubectl_bin get psmdb some-name -o 'jsonpath={.status.state}' +++ mktemp ++ local LAST_OUT=/tmp/tmp.YCm7kBbUQE +++ mktemp ++ local LAST_ERR=/tmp/tmp.T5DAeYvVk5 ++ local exit_status=0 ++ local timeout=4 +++ seq 0 2 ++ for i in '$(seq 0 2)' ++ set +e ++ kubectl get psmdb some-name -o 'jsonpath={.status.state}' ++ exit_status=0 ++ set -e ++ '[' 0 '!=' 0 -a -n 1 ']' ++ break ++ cat /tmp/tmp.YCm7kBbUQE ++ cat /tmp/tmp.T5DAeYvVk5 ++ rm /tmp/tmp.YCm7kBbUQE /tmp/tmp.T5DAeYvVk5 ++ return 0 + [[ ready == \r\e\a\d\y ]] + echo + sleep 15 ++ getUserData some-users MONGODB_USER_ADMIN_USER ++ local secretName=some-users ++ local dataKey=MONGODB_USER_ADMIN_USER +++ getSecretData some-users MONGODB_USER_ADMIN_USER +++ local secretName=some-users +++ local dataKey=MONGODB_USER_ADMIN_USER ++++ kubectl get secrets/some-users '--template={{.data.MONGODB_USER_ADMIN_USER}}' ++++ base64 -d +++ local data=userAdmin +++ echo userAdmin ++ urlencode userAdmin ++ uri=userAdmin ++ echo -n userAdmin ++ jq -s -R -r @uri + user=userAdmin ++ getUserData some-users MONGODB_USER_ADMIN_PASSWORD ++ local secretName=some-users ++ local dataKey=MONGODB_USER_ADMIN_PASSWORD +++ getSecretData some-users MONGODB_USER_ADMIN_PASSWORD +++ local secretName=some-users +++ local dataKey=MONGODB_USER_ADMIN_PASSWORD ++++ base64 -d ++++ kubectl get secrets/some-users '--template={{.data.MONGODB_USER_ADMIN_PASSWORD}}' +++ local data=PeZo15akg90kYC0g +++ echo PeZo15akg90kYC0g ++ urlencode PeZo15akg90kYC0g ++ uri=PeZo15akg90kYC0g ++ echo -n PeZo15akg90kYC0g ++ jq -s -R -r @uri + pass=PeZo15akg90kYC0g + check_mongo_auth userAdmin:PeZo15akg90kYC0g@some-name-rs0-0.some-name-rs0.users-18968 + local uri=userAdmin:PeZo15akg90kYC0g@some-name-rs0-0.some-name-rs0.users-18968 ++ run_mongo 'db.runCommand({ ping: 1 }).ok' userAdmin:PeZo15akg90kYC0g@some-name-rs0-0.some-name-rs0.users-18968 mongodb '' --quiet ++ local 'command=db.runCommand({ ping: 1 }).ok' ++ local uri=userAdmin:PeZo15akg90kYC0g@some-name-rs0-0.some-name-rs0.users-18968 ++ local driver=mongodb ++ local suffix=.svc.cluster.local +++ kubectl_bin get pods --selector=name=psmdb-client -o 'jsonpath={.items[].metadata.name}' ++++ mktemp +++ local LAST_OUT=/tmp/tmp.nRRzzobgIQ ++++ mktemp +++ local LAST_ERR=/tmp/tmp.1X8d893oA0 +++ local exit_status=0 +++ local timeout=4 ++++ seq 0 2 +++ for i in '$(seq 0 2)' +++ set +e +++ kubectl get pods --selector=name=psmdb-client -o 'jsonpath={.items[].metadata.name}' ++ egrep -v 'I NETWORK|W NETWORK|Error saving history file|Percona Server for MongoDB|connecting to:|Unable to reach primary for set|Implicit session:|versions do not match|Error saving history file:' +++ exit_status=0 +++ set -e +++ '[' 0 '!=' 0 -a -n 1 ']' +++ break +++ cat /tmp/tmp.nRRzzobgIQ +++ cat /tmp/tmp.1X8d893oA0 +++ rm /tmp/tmp.nRRzzobgIQ /tmp/tmp.1X8d893oA0 +++ return 0 ++ local client_container=psmdb-client-667cfd9d66-rpkb4 ++ local mongo_flag=--quiet ++ [[ userAdmin:PeZo15akg90kYC0g@some-name-rs0-0.some-name-rs0.users-18968 == *cfg* ]] ++ replica_set=rs0 ++ kubectl_bin exec psmdb-client-667cfd9d66-rpkb4 -- bash -c 'printf '\''db.runCommand({ ping: 1 }).ok\n'\'' | mongo mongodb://userAdmin:PeZo15akg90kYC0g@some-name-rs0-0.some-name-rs0.users-18968.svc.cluster.local/admin?ssl=false\&replicaSet=rs0 --quiet' +++ mktemp ++ local LAST_OUT=/tmp/tmp.5nqZKVPCSU +++ mktemp ++ local LAST_ERR=/tmp/tmp.EogpYrVreX ++ local exit_status=0 ++ local timeout=4 +++ seq 0 2 ++ for i in '$(seq 0 2)' ++ set +e ++ kubectl exec psmdb-client-667cfd9d66-rpkb4 -- bash -c 'printf '\''db.runCommand({ ping: 1 }).ok\n'\'' | mongo mongodb://userAdmin:PeZo15akg90kYC0g@some-name-rs0-0.some-name-rs0.users-18968.svc.cluster.local/admin?ssl=false\&replicaSet=rs0 --quiet' ++ exit_status=0 ++ set -e ++ '[' 0 '!=' 0 -a -n 1 ']' ++ break ++ cat /tmp/tmp.5nqZKVPCSU ++ cat /tmp/tmp.EogpYrVreX ++ rm /tmp/tmp.5nqZKVPCSU /tmp/tmp.EogpYrVreX ++ return 0 + ping=1 + desc 'ping return' + set +o xtrace ----------------------------------------------------------------------------------- ping return ----------------------------------------------------------------------------------- + '[' 1 '!=' 1 ']' + check_mongo_auth userAdmin:PeZo15akg90kYC0g@some-name-rs0-1.some-name-rs0.users-18968 + local uri=userAdmin:PeZo15akg90kYC0g@some-name-rs0-1.some-name-rs0.users-18968 ++ run_mongo 'db.runCommand({ ping: 1 }).ok' userAdmin:PeZo15akg90kYC0g@some-name-rs0-1.some-name-rs0.users-18968 mongodb '' --quiet ++ local 'command=db.runCommand({ ping: 1 }).ok' ++ local uri=userAdmin:PeZo15akg90kYC0g@some-name-rs0-1.some-name-rs0.users-18968 ++ local driver=mongodb ++ local suffix=.svc.cluster.local ++ egrep -v 'I NETWORK|W NETWORK|Error saving history file|Percona Server for MongoDB|connecting to:|Unable to reach primary for set|Implicit session:|versions do not match|Error saving history file:' +++ kubectl_bin get pods --selector=name=psmdb-client -o 'jsonpath={.items[].metadata.name}' ++++ mktemp +++ local LAST_OUT=/tmp/tmp.7zLpxLi5R9 ++++ mktemp +++ local LAST_ERR=/tmp/tmp.EPdUicfQIS +++ local exit_status=0 +++ local timeout=4 ++++ seq 0 2 +++ for i in '$(seq 0 2)' +++ set +e +++ kubectl get pods --selector=name=psmdb-client -o 'jsonpath={.items[].metadata.name}' +++ exit_status=0 +++ set -e +++ '[' 0 '!=' 0 -a -n 1 ']' +++ break +++ cat /tmp/tmp.7zLpxLi5R9 +++ cat /tmp/tmp.EPdUicfQIS +++ rm /tmp/tmp.7zLpxLi5R9 /tmp/tmp.EPdUicfQIS +++ return 0 ++ local client_container=psmdb-client-667cfd9d66-rpkb4 ++ local mongo_flag=--quiet ++ [[ userAdmin:PeZo15akg90kYC0g@some-name-rs0-1.some-name-rs0.users-18968 == *cfg* ]] ++ replica_set=rs0 ++ kubectl_bin exec psmdb-client-667cfd9d66-rpkb4 -- bash -c 'printf '\''db.runCommand({ ping: 1 }).ok\n'\'' | mongo mongodb://userAdmin:PeZo15akg90kYC0g@some-name-rs0-1.some-name-rs0.users-18968.svc.cluster.local/admin?ssl=false\&replicaSet=rs0 --quiet' +++ mktemp ++ local LAST_OUT=/tmp/tmp.50Qxk6fLib +++ mktemp ++ local LAST_ERR=/tmp/tmp.7DyM7hxEXc ++ local exit_status=0 ++ local timeout=4 +++ seq 0 2 ++ for i in '$(seq 0 2)' ++ set +e ++ kubectl exec psmdb-client-667cfd9d66-rpkb4 -- bash -c 'printf '\''db.runCommand({ ping: 1 }).ok\n'\'' | mongo mongodb://userAdmin:PeZo15akg90kYC0g@some-name-rs0-1.some-name-rs0.users-18968.svc.cluster.local/admin?ssl=false\&replicaSet=rs0 --quiet' ++ exit_status=0 ++ set -e ++ '[' 0 '!=' 0 -a -n 1 ']' ++ break ++ cat /tmp/tmp.50Qxk6fLib ++ cat /tmp/tmp.7DyM7hxEXc ++ rm /tmp/tmp.50Qxk6fLib /tmp/tmp.7DyM7hxEXc ++ return 0 + ping=1 + desc 'ping return' + set +o xtrace ----------------------------------------------------------------------------------- ping return ----------------------------------------------------------------------------------- + '[' 1 '!=' 1 ']' + check_mongo_auth userAdmin:PeZo15akg90kYC0g@some-name-rs0-2.some-name-rs0.users-18968 + local uri=userAdmin:PeZo15akg90kYC0g@some-name-rs0-2.some-name-rs0.users-18968 ++ run_mongo 'db.runCommand({ ping: 1 }).ok' userAdmin:PeZo15akg90kYC0g@some-name-rs0-2.some-name-rs0.users-18968 mongodb '' --quiet ++ local 'command=db.runCommand({ ping: 1 }).ok' ++ local uri=userAdmin:PeZo15akg90kYC0g@some-name-rs0-2.some-name-rs0.users-18968 ++ local driver=mongodb ++ local suffix=.svc.cluster.local ++ egrep -v 'I NETWORK|W NETWORK|Error saving history file|Percona Server for MongoDB|connecting to:|Unable to reach primary for set|Implicit session:|versions do not match|Error saving history file:' +++ kubectl_bin get pods --selector=name=psmdb-client -o 'jsonpath={.items[].metadata.name}' ++++ mktemp +++ local LAST_OUT=/tmp/tmp.nzlKbOdcJV ++++ mktemp +++ local LAST_ERR=/tmp/tmp.m2jBmC5Hn6 +++ local exit_status=0 +++ local timeout=4 ++++ seq 0 2 +++ for i in '$(seq 0 2)' +++ set +e +++ kubectl get pods --selector=name=psmdb-client -o 'jsonpath={.items[].metadata.name}' +++ exit_status=0 +++ set -e +++ '[' 0 '!=' 0 -a -n 1 ']' +++ break +++ cat /tmp/tmp.nzlKbOdcJV +++ cat /tmp/tmp.m2jBmC5Hn6 +++ rm /tmp/tmp.nzlKbOdcJV /tmp/tmp.m2jBmC5Hn6 +++ return 0 ++ local client_container=psmdb-client-667cfd9d66-rpkb4 ++ local mongo_flag=--quiet ++ [[ userAdmin:PeZo15akg90kYC0g@some-name-rs0-2.some-name-rs0.users-18968 == *cfg* ]] ++ replica_set=rs0 ++ kubectl_bin exec psmdb-client-667cfd9d66-rpkb4 -- bash -c 'printf '\''db.runCommand({ ping: 1 }).ok\n'\'' | mongo mongodb://userAdmin:PeZo15akg90kYC0g@some-name-rs0-2.some-name-rs0.users-18968.svc.cluster.local/admin?ssl=false\&replicaSet=rs0 --quiet' +++ mktemp ++ local LAST_OUT=/tmp/tmp.vYx4CTt78B +++ mktemp ++ local LAST_ERR=/tmp/tmp.ZTzzks2Kow ++ local exit_status=0 ++ local timeout=4 +++ seq 0 2 ++ for i in '$(seq 0 2)' ++ set +e ++ kubectl exec psmdb-client-667cfd9d66-rpkb4 -- bash -c 'printf '\''db.runCommand({ ping: 1 }).ok\n'\'' | mongo mongodb://userAdmin:PeZo15akg90kYC0g@some-name-rs0-2.some-name-rs0.users-18968.svc.cluster.local/admin?ssl=false\&replicaSet=rs0 --quiet' ++ exit_status=0 ++ set -e ++ '[' 0 '!=' 0 -a -n 1 ']' ++ break ++ cat /tmp/tmp.vYx4CTt78B ++ cat /tmp/tmp.ZTzzks2Kow ++ rm /tmp/tmp.vYx4CTt78B /tmp/tmp.ZTzzks2Kow ++ return 0 + ping=1 + desc 'ping return' + set +o xtrace ----------------------------------------------------------------------------------- ping return ----------------------------------------------------------------------------------- + '[' 1 '!=' 1 ']' + destroy users-18968 + local namespace=users-18968 + local ignore_logs=true + desc 'destroy cluster/operator and all other resources' + set +o xtrace ----------------------------------------------------------------------------------- destroy cluster/operator and all other resources ----------------------------------------------------------------------------------- + '[' true == false ']' + delete_crd + desc 'get and delete old CRDs and RBAC' + set +o xtrace ----------------------------------------------------------------------------------- get and delete old CRDs and RBAC ----------------------------------------------------------------------------------- + kubectl_bin delete -f /mnt/jenkins/workspace/cloud-psmdb-operator_PR-1735/deploy/crd.yaml --ignore-not-found --wait=false ++ mktemp + local LAST_OUT=/tmp/tmp.vWMJCxVxJu ++ mktemp + local LAST_ERR=/tmp/tmp.PG6IeDwAtX + local exit_status=0 + local timeout=4 ++ seq 0 2 + for i in '$(seq 0 2)' + set +e + kubectl delete -f /mnt/jenkins/workspace/cloud-psmdb-operator_PR-1735/deploy/crd.yaml --ignore-not-found --wait=false + exit_status=0 + set -e + '[' 0 '!=' 0 -a -n 1 ']' + break + cat /tmp/tmp.vWMJCxVxJu customresourcedefinition.apiextensions.k8s.io "perconaservermongodbbackups.psmdb.percona.com" deleted customresourcedefinition.apiextensions.k8s.io "perconaservermongodbrestores.psmdb.percona.com" deleted customresourcedefinition.apiextensions.k8s.io "perconaservermongodbs.psmdb.percona.com" deleted + cat /tmp/tmp.PG6IeDwAtX + rm /tmp/tmp.vWMJCxVxJu /tmp/tmp.PG6IeDwAtX + return 0 ++ grep -v '\-\-\-' ++ yq eval .metadata.name /mnt/jenkins/workspace/cloud-psmdb-operator_PR-1735/deploy/crd.yaml + for crd_name in '$(yq eval '\''.metadata.name'\'' "${src_dir}/deploy/crd.yaml" | grep -v '\''\-\-\-'\'')' + kubectl get perconaservermongodbbackups.psmdb.percona.com --all-namespaces -o wide + xargs -L 1 sh -xc 'kubectl patch perconaservermongodbbackups.psmdb.percona.com -n $0 $1 --type=merge -p "{\"metadata\":{\"finalizers\":[]}}"' + grep -v NAMESPACE error: the server doesn't have a resource type "perconaservermongodbbackups" + kubectl patch perconaservermongodbbackups.psmdb.percona.com -n sh --type=merge -p '{"metadata":{"finalizers":[]}}' error: the server doesn't have a resource type "perconaservermongodbbackups" + : + kubectl_bin wait --for=delete crd perconaservermongodbbackups.psmdb.percona.com ++ mktemp + local LAST_OUT=/tmp/tmp.CdNI8wpDZc ++ mktemp + local LAST_ERR=/tmp/tmp.ASkQ1zLT1a + local exit_status=0 + local timeout=4 ++ seq 0 2 + for i in '$(seq 0 2)' + set +e + kubectl wait --for=delete crd perconaservermongodbbackups.psmdb.percona.com + exit_status=0 + set -e + '[' 0 '!=' 0 -a -n 1 ']' + break + cat /tmp/tmp.CdNI8wpDZc + cat /tmp/tmp.ASkQ1zLT1a + rm /tmp/tmp.CdNI8wpDZc /tmp/tmp.ASkQ1zLT1a + return 0 + for crd_name in '$(yq eval '\''.metadata.name'\'' "${src_dir}/deploy/crd.yaml" | grep -v '\''\-\-\-'\'')' + kubectl get perconaservermongodbrestores.psmdb.percona.com --all-namespaces -o wide + grep -v NAMESPACE + xargs -L 1 sh -xc 'kubectl patch perconaservermongodbrestores.psmdb.percona.com -n $0 $1 --type=merge -p "{\"metadata\":{\"finalizers\":[]}}"' error: the server doesn't have a resource type "perconaservermongodbrestores" + kubectl patch perconaservermongodbrestores.psmdb.percona.com -n sh --type=merge -p '{"metadata":{"finalizers":[]}}' error: the server doesn't have a resource type "perconaservermongodbrestores" + : + kubectl_bin wait --for=delete crd perconaservermongodbrestores.psmdb.percona.com ++ mktemp + local LAST_OUT=/tmp/tmp.i9ww9LbyhJ ++ mktemp + local LAST_ERR=/tmp/tmp.txi5sOnVsy + local exit_status=0 + local timeout=4 ++ seq 0 2 + for i in '$(seq 0 2)' + set +e + kubectl wait --for=delete crd perconaservermongodbrestores.psmdb.percona.com + exit_status=0 + set -e + '[' 0 '!=' 0 -a -n 1 ']' + break + cat /tmp/tmp.i9ww9LbyhJ + cat /tmp/tmp.txi5sOnVsy + rm /tmp/tmp.i9ww9LbyhJ /tmp/tmp.txi5sOnVsy + return 0 + for crd_name in '$(yq eval '\''.metadata.name'\'' "${src_dir}/deploy/crd.yaml" | grep -v '\''\-\-\-'\'')' + kubectl get perconaservermongodbs.psmdb.percona.com --all-namespaces -o wide + grep -v NAMESPACE + xargs -L 1 sh -xc 'kubectl patch perconaservermongodbs.psmdb.percona.com -n $0 $1 --type=merge -p "{\"metadata\":{\"finalizers\":[]}}"' error: the server doesn't have a resource type "perconaservermongodbs" + kubectl patch perconaservermongodbs.psmdb.percona.com -n sh --type=merge -p '{"metadata":{"finalizers":[]}}' error: the server doesn't have a resource type "perconaservermongodbs" + : + kubectl_bin wait --for=delete crd perconaservermongodbs.psmdb.percona.com ++ mktemp + local LAST_OUT=/tmp/tmp.qTaYuIc1DG ++ mktemp + local LAST_ERR=/tmp/tmp.ZcIshp84Vf + local exit_status=0 + local timeout=4 ++ seq 0 2 + for i in '$(seq 0 2)' + set +e + kubectl wait --for=delete crd perconaservermongodbs.psmdb.percona.com + exit_status=0 + set -e + '[' 0 '!=' 0 -a -n 1 ']' + break + cat /tmp/tmp.qTaYuIc1DG + cat /tmp/tmp.ZcIshp84Vf + rm /tmp/tmp.qTaYuIc1DG /tmp/tmp.ZcIshp84Vf + return 0 + local rbac_yaml=rbac.yaml + '[' -n psmdb-operator ']' + rbac_yaml=cw-rbac.yaml + kubectl_bin delete -f /mnt/jenkins/workspace/cloud-psmdb-operator_PR-1735/deploy/cw-rbac.yaml --ignore-not-found ++ mktemp + local LAST_OUT=/tmp/tmp.8tB5dXFkD0 ++ mktemp + local LAST_ERR=/tmp/tmp.JTau9OKMrG + local exit_status=0 + local timeout=4 ++ seq 0 2 + for i in '$(seq 0 2)' + set +e + kubectl delete -f /mnt/jenkins/workspace/cloud-psmdb-operator_PR-1735/deploy/cw-rbac.yaml --ignore-not-found + exit_status=0 + set -e + '[' 0 '!=' 0 -a -n 1 ']' + break + cat /tmp/tmp.8tB5dXFkD0 clusterrole.rbac.authorization.k8s.io "percona-server-mongodb-operator" deleted clusterrolebinding.rbac.authorization.k8s.io "service-account-percona-server-mongodb-operator" deleted + cat /tmp/tmp.JTau9OKMrG + rm /tmp/tmp.8tB5dXFkD0 /tmp/tmp.JTau9OKMrG + return 0 + destroy_cert_manager + kubectl_bin delete -f https://github.com/cert-manager/cert-manager/releases/download/v1.16.1/cert-manager.yaml ++ mktemp + local LAST_OUT=/tmp/tmp.dwfHPBA4gO ++ mktemp + local LAST_ERR=/tmp/tmp.E1F9bBdNuO + local exit_status=0 + local timeout=4 ++ seq 0 2 + for i in '$(seq 0 2)' + set +e + kubectl delete -f https://github.com/cert-manager/cert-manager/releases/download/v1.16.1/cert-manager.yaml + exit_status=1 + set -e + '[' 1 '!=' 0 -a -n 1 ']' + cat /tmp/tmp.dwfHPBA4gO + cat /tmp/tmp.E1F9bBdNuO Error from server (NotFound): error when deleting "https://github.com/cert-manager/cert-manager/releases/download/v1.16.1/cert-manager.yaml": namespaces "cert-manager" not found Error from server (NotFound): error when deleting "https://github.com/cert-manager/cert-manager/releases/download/v1.16.1/cert-manager.yaml": customresourcedefinitions.apiextensions.k8s.io "certificaterequests.cert-manager.io" not found Error from server (NotFound): error when deleting "https://github.com/cert-manager/cert-manager/releases/download/v1.16.1/cert-manager.yaml": customresourcedefinitions.apiextensions.k8s.io "certificates.cert-manager.io" not found Error from server (NotFound): error when deleting "https://github.com/cert-manager/cert-manager/releases/download/v1.16.1/cert-manager.yaml": customresourcedefinitions.apiextensions.k8s.io "challenges.acme.cert-manager.io" not found Error from server (NotFound): error when deleting "https://github.com/cert-manager/cert-manager/releases/download/v1.16.1/cert-manager.yaml": customresourcedefinitions.apiextensions.k8s.io "clusterissuers.cert-manager.io" not found Error from server (NotFound): error when deleting "https://github.com/cert-manager/cert-manager/releases/download/v1.16.1/cert-manager.yaml": customresourcedefinitions.apiextensions.k8s.io "issuers.cert-manager.io" not found Error from server (NotFound): error when deleting "https://github.com/cert-manager/cert-manager/releases/download/v1.16.1/cert-manager.yaml": customresourcedefinitions.apiextensions.k8s.io "orders.acme.cert-manager.io" not found Error from server (NotFound): error when deleting "https://github.com/cert-manager/cert-manager/releases/download/v1.16.1/cert-manager.yaml": serviceaccounts "cert-manager-cainjector" not found Error from server (NotFound): error when deleting "https://github.com/cert-manager/cert-manager/releases/download/v1.16.1/cert-manager.yaml": serviceaccounts "cert-manager" not found Error from server (NotFound): error when deleting "https://github.com/cert-manager/cert-manager/releases/download/v1.16.1/cert-manager.yaml": serviceaccounts "cert-manager-webhook" not found Error from server (NotFound): error when deleting "https://github.com/cert-manager/cert-manager/releases/download/v1.16.1/cert-manager.yaml": clusterroles.rbac.authorization.k8s.io "cert-manager-cainjector" not found Error from server (NotFound): error when deleting "https://github.com/cert-manager/cert-manager/releases/download/v1.16.1/cert-manager.yaml": clusterroles.rbac.authorization.k8s.io "cert-manager-controller-issuers" not found Error from server (NotFound): error when deleting "https://github.com/cert-manager/cert-manager/releases/download/v1.16.1/cert-manager.yaml": clusterroles.rbac.authorization.k8s.io "cert-manager-controller-clusterissuers" not found Error from server (NotFound): error when deleting "https://github.com/cert-manager/cert-manager/releases/download/v1.16.1/cert-manager.yaml": clusterroles.rbac.authorization.k8s.io "cert-manager-controller-certificates" not found Error from server (NotFound): error when deleting "https://github.com/cert-manager/cert-manager/releases/download/v1.16.1/cert-manager.yaml": clusterroles.rbac.authorization.k8s.io "cert-manager-controller-orders" not found Error from server (NotFound): error when deleting "https://github.com/cert-manager/cert-manager/releases/download/v1.16.1/cert-manager.yaml": clusterroles.rbac.authorization.k8s.io "cert-manager-controller-challenges" not found Error from server (NotFound): error when deleting "https://github.com/cert-manager/cert-manager/releases/download/v1.16.1/cert-manager.yaml": clusterroles.rbac.authorization.k8s.io "cert-manager-controller-ingress-shim" not found Error from server (NotFound): error when deleting "https://github.com/cert-manager/cert-manager/releases/download/v1.16.1/cert-manager.yaml": clusterroles.rbac.authorization.k8s.io "cert-manager-cluster-view" not found Error from server (NotFound): error when deleting "https://github.com/cert-manager/cert-manager/releases/download/v1.16.1/cert-manager.yaml": clusterroles.rbac.authorization.k8s.io "cert-manager-view" not found Error from server (NotFound): error when deleting "https://github.com/cert-manager/cert-manager/releases/download/v1.16.1/cert-manager.yaml": clusterroles.rbac.authorization.k8s.io "cert-manager-edit" not found Error from server (NotFound): error when deleting "https://github.com/cert-manager/cert-manager/releases/download/v1.16.1/cert-manager.yaml": clusterroles.rbac.authorization.k8s.io "cert-manager-controller-approve:cert-manager-io" not found Error from server (NotFound): error when deleting "https://github.com/cert-manager/cert-manager/releases/download/v1.16.1/cert-manager.yaml": clusterroles.rbac.authorization.k8s.io "cert-manager-controller-certificatesigningrequests" not found Error from server (NotFound): error when deleting "https://github.com/cert-manager/cert-manager/releases/download/v1.16.1/cert-manager.yaml": clusterroles.rbac.authorization.k8s.io "cert-manager-webhook:subjectaccessreviews" not found Error from server (NotFound): error when deleting "https://github.com/cert-manager/cert-manager/releases/download/v1.16.1/cert-manager.yaml": clusterrolebindings.rbac.authorization.k8s.io "cert-manager-cainjector" not found Error from server (NotFound): error when deleting "https://github.com/cert-manager/cert-manager/releases/download/v1.16.1/cert-manager.yaml": clusterrolebindings.rbac.authorization.k8s.io "cert-manager-controller-issuers" not found Error from server (NotFound): error when deleting "https://github.com/cert-manager/cert-manager/releases/download/v1.16.1/cert-manager.yaml": clusterrolebindings.rbac.authorization.k8s.io "cert-manager-controller-clusterissuers" not found Error from server (NotFound): error when deleting "https://github.com/cert-manager/cert-manager/releases/download/v1.16.1/cert-manager.yaml": clusterrolebindings.rbac.authorization.k8s.io "cert-manager-controller-certificates" not found Error from server (NotFound): error when deleting "https://github.com/cert-manager/cert-manager/releases/download/v1.16.1/cert-manager.yaml": clusterrolebindings.rbac.authorization.k8s.io "cert-manager-controller-orders" not found Error from server (NotFound): error when deleting "https://github.com/cert-manager/cert-manager/releases/download/v1.16.1/cert-manager.yaml": clusterrolebindings.rbac.authorization.k8s.io "cert-manager-controller-challenges" not found Error from server (NotFound): error when deleting "https://github.com/cert-manager/cert-manager/releases/download/v1.16.1/cert-manager.yaml": clusterrolebindings.rbac.authorization.k8s.io "cert-manager-controller-ingress-shim" not found Error from server (NotFound): error when deleting "https://github.com/cert-manager/cert-manager/releases/download/v1.16.1/cert-manager.yaml": clusterrolebindings.rbac.authorization.k8s.io "cert-manager-controller-approve:cert-manager-io" not found Error from server (NotFound): error when deleting "https://github.com/cert-manager/cert-manager/releases/download/v1.16.1/cert-manager.yaml": clusterrolebindings.rbac.authorization.k8s.io "cert-manager-controller-certificatesigningrequests" not found Error from server (NotFound): error when deleting "https://github.com/cert-manager/cert-manager/releases/download/v1.16.1/cert-manager.yaml": clusterrolebindings.rbac.authorization.k8s.io "cert-manager-webhook:subjectaccessreviews" not found Error from server (NotFound): error when deleting "https://github.com/cert-manager/cert-manager/releases/download/v1.16.1/cert-manager.yaml": roles.rbac.authorization.k8s.io "cert-manager-cainjector:leaderelection" not found Error from server (NotFound): error when deleting "https://github.com/cert-manager/cert-manager/releases/download/v1.16.1/cert-manager.yaml": roles.rbac.authorization.k8s.io "cert-manager:leaderelection" not found Error from server (NotFound): error when deleting "https://github.com/cert-manager/cert-manager/releases/download/v1.16.1/cert-manager.yaml": roles.rbac.authorization.k8s.io "cert-manager-tokenrequest" not found Error from server (NotFound): error when deleting "https://github.com/cert-manager/cert-manager/releases/download/v1.16.1/cert-manager.yaml": roles.rbac.authorization.k8s.io "cert-manager-webhook:dynamic-serving" not found Error from server (NotFound): error when deleting "https://github.com/cert-manager/cert-manager/releases/download/v1.16.1/cert-manager.yaml": rolebindings.rbac.authorization.k8s.io "cert-manager-cainjector:leaderelection" not found Error from server (NotFound): error when deleting "https://github.com/cert-manager/cert-manager/releases/download/v1.16.1/cert-manager.yaml": rolebindings.rbac.authorization.k8s.io "cert-manager:leaderelection" not found Error from server (NotFound): error when deleting "https://github.com/cert-manager/cert-manager/releases/download/v1.16.1/cert-manager.yaml": rolebindings.rbac.authorization.k8s.io "cert-manager-cert-manager-tokenrequest" not found Error from server (NotFound): error when deleting "https://github.com/cert-manager/cert-manager/releases/download/v1.16.1/cert-manager.yaml": rolebindings.rbac.authorization.k8s.io "cert-manager-webhook:dynamic-serving" not found Error from server (NotFound): error when deleting "https://github.com/cert-manager/cert-manager/releases/download/v1.16.1/cert-manager.yaml": services "cert-manager-cainjector" not found Error from server (NotFound): error when deleting "https://github.com/cert-manager/cert-manager/releases/download/v1.16.1/cert-manager.yaml": services "cert-manager" not found Error from server (NotFound): error when deleting "https://github.com/cert-manager/cert-manager/releases/download/v1.16.1/cert-manager.yaml": services "cert-manager-webhook" not found Error from server (NotFound): error when deleting "https://github.com/cert-manager/cert-manager/releases/download/v1.16.1/cert-manager.yaml": deployments.apps "cert-manager-cainjector" not found Error from server (NotFound): error when deleting "https://github.com/cert-manager/cert-manager/releases/download/v1.16.1/cert-manager.yaml": deployments.apps "cert-manager" not found Error from server (NotFound): error when deleting "https://github.com/cert-manager/cert-manager/releases/download/v1.16.1/cert-manager.yaml": deployments.apps "cert-manager-webhook" not found Error from server (NotFound): error when deleting "https://github.com/cert-manager/cert-manager/releases/download/v1.16.1/cert-manager.yaml": mutatingwebhookconfigurations.admissionregistration.k8s.io "cert-manager-webhook" not found Error from server (NotFound): error when deleting "https://github.com/cert-manager/cert-manager/releases/download/v1.16.1/cert-manager.yaml": validatingwebhookconfigurations.admissionregistration.k8s.io "cert-manager-webhook" not found + sleep 0 + for i in '$(seq 0 2)' + set +e + kubectl delete -f https://github.com/cert-manager/cert-manager/releases/download/v1.16.1/cert-manager.yaml + exit_status=1 + set -e + '[' 1 '!=' 0 -a -n 1 ']' + cat /tmp/tmp.dwfHPBA4gO + cat /tmp/tmp.E1F9bBdNuO Error from server (NotFound): error when deleting "https://github.com/cert-manager/cert-manager/releases/download/v1.16.1/cert-manager.yaml": namespaces "cert-manager" not found Error from server (NotFound): error when deleting "https://github.com/cert-manager/cert-manager/releases/download/v1.16.1/cert-manager.yaml": customresourcedefinitions.apiextensions.k8s.io "certificaterequests.cert-manager.io" not found Error from server (NotFound): error when deleting "https://github.com/cert-manager/cert-manager/releases/download/v1.16.1/cert-manager.yaml": customresourcedefinitions.apiextensions.k8s.io "certificates.cert-manager.io" not found Error from server (NotFound): error when deleting "https://github.com/cert-manager/cert-manager/releases/download/v1.16.1/cert-manager.yaml": customresourcedefinitions.apiextensions.k8s.io "challenges.acme.cert-manager.io" not found Error from server (NotFound): error when deleting "https://github.com/cert-manager/cert-manager/releases/download/v1.16.1/cert-manager.yaml": customresourcedefinitions.apiextensions.k8s.io "clusterissuers.cert-manager.io" not found Error from server (NotFound): error when deleting "https://github.com/cert-manager/cert-manager/releases/download/v1.16.1/cert-manager.yaml": customresourcedefinitions.apiextensions.k8s.io "issuers.cert-manager.io" not found Error from server (NotFound): error when deleting "https://github.com/cert-manager/cert-manager/releases/download/v1.16.1/cert-manager.yaml": customresourcedefinitions.apiextensions.k8s.io "orders.acme.cert-manager.io" not found Error from server (NotFound): error when deleting "https://github.com/cert-manager/cert-manager/releases/download/v1.16.1/cert-manager.yaml": serviceaccounts "cert-manager-cainjector" not found Error from server (NotFound): error when deleting "https://github.com/cert-manager/cert-manager/releases/download/v1.16.1/cert-manager.yaml": serviceaccounts "cert-manager" not found Error from server (NotFound): error when deleting "https://github.com/cert-manager/cert-manager/releases/download/v1.16.1/cert-manager.yaml": serviceaccounts "cert-manager-webhook" not found Error from server (NotFound): error when deleting "https://github.com/cert-manager/cert-manager/releases/download/v1.16.1/cert-manager.yaml": clusterroles.rbac.authorization.k8s.io "cert-manager-cainjector" not found Error from server (NotFound): error when deleting "https://github.com/cert-manager/cert-manager/releases/download/v1.16.1/cert-manager.yaml": clusterroles.rbac.authorization.k8s.io "cert-manager-controller-issuers" not found Error from server (NotFound): error when deleting "https://github.com/cert-manager/cert-manager/releases/download/v1.16.1/cert-manager.yaml": clusterroles.rbac.authorization.k8s.io "cert-manager-controller-clusterissuers" not found Error from server (NotFound): error when deleting "https://github.com/cert-manager/cert-manager/releases/download/v1.16.1/cert-manager.yaml": clusterroles.rbac.authorization.k8s.io "cert-manager-controller-certificates" not found Error from server (NotFound): error when deleting "https://github.com/cert-manager/cert-manager/releases/download/v1.16.1/cert-manager.yaml": clusterroles.rbac.authorization.k8s.io "cert-manager-controller-orders" not found Error from server (NotFound): error when deleting "https://github.com/cert-manager/cert-manager/releases/download/v1.16.1/cert-manager.yaml": clusterroles.rbac.authorization.k8s.io "cert-manager-controller-challenges" not found Error from server (NotFound): error when deleting "https://github.com/cert-manager/cert-manager/releases/download/v1.16.1/cert-manager.yaml": clusterroles.rbac.authorization.k8s.io "cert-manager-controller-ingress-shim" not found Error from server (NotFound): error when deleting "https://github.com/cert-manager/cert-manager/releases/download/v1.16.1/cert-manager.yaml": clusterroles.rbac.authorization.k8s.io "cert-manager-cluster-view" not found Error from server (NotFound): error when deleting "https://github.com/cert-manager/cert-manager/releases/download/v1.16.1/cert-manager.yaml": clusterroles.rbac.authorization.k8s.io "cert-manager-view" not found Error from server (NotFound): error when deleting "https://github.com/cert-manager/cert-manager/releases/download/v1.16.1/cert-manager.yaml": clusterroles.rbac.authorization.k8s.io "cert-manager-edit" not found Error from server (NotFound): error when deleting "https://github.com/cert-manager/cert-manager/releases/download/v1.16.1/cert-manager.yaml": clusterroles.rbac.authorization.k8s.io "cert-manager-controller-approve:cert-manager-io" not found Error from server (NotFound): error when deleting "https://github.com/cert-manager/cert-manager/releases/download/v1.16.1/cert-manager.yaml": clusterroles.rbac.authorization.k8s.io "cert-manager-controller-certificatesigningrequests" not found Error from server (NotFound): error when deleting "https://github.com/cert-manager/cert-manager/releases/download/v1.16.1/cert-manager.yaml": clusterroles.rbac.authorization.k8s.io "cert-manager-webhook:subjectaccessreviews" not found Error from server (NotFound): error when deleting "https://github.com/cert-manager/cert-manager/releases/download/v1.16.1/cert-manager.yaml": clusterrolebindings.rbac.authorization.k8s.io "cert-manager-cainjector" not found Error from server (NotFound): error when deleting "https://github.com/cert-manager/cert-manager/releases/download/v1.16.1/cert-manager.yaml": clusterrolebindings.rbac.authorization.k8s.io "cert-manager-controller-issuers" not found Error from server (NotFound): error when deleting "https://github.com/cert-manager/cert-manager/releases/download/v1.16.1/cert-manager.yaml": clusterrolebindings.rbac.authorization.k8s.io "cert-manager-controller-clusterissuers" not found Error from server (NotFound): error when deleting "https://github.com/cert-manager/cert-manager/releases/download/v1.16.1/cert-manager.yaml": clusterrolebindings.rbac.authorization.k8s.io "cert-manager-controller-certificates" not found Error from server (NotFound): error when deleting "https://github.com/cert-manager/cert-manager/releases/download/v1.16.1/cert-manager.yaml": clusterrolebindings.rbac.authorization.k8s.io "cert-manager-controller-orders" not found Error from server (NotFound): error when deleting "https://github.com/cert-manager/cert-manager/releases/download/v1.16.1/cert-manager.yaml": clusterrolebindings.rbac.authorization.k8s.io "cert-manager-controller-challenges" not found Error from server (NotFound): error when deleting "https://github.com/cert-manager/cert-manager/releases/download/v1.16.1/cert-manager.yaml": clusterrolebindings.rbac.authorization.k8s.io "cert-manager-controller-ingress-shim" not found Error from server (NotFound): error when deleting "https://github.com/cert-manager/cert-manager/releases/download/v1.16.1/cert-manager.yaml": clusterrolebindings.rbac.authorization.k8s.io "cert-manager-controller-approve:cert-manager-io" not found Error from server (NotFound): error when deleting "https://github.com/cert-manager/cert-manager/releases/download/v1.16.1/cert-manager.yaml": clusterrolebindings.rbac.authorization.k8s.io "cert-manager-controller-certificatesigningrequests" not found Error from server (NotFound): error when deleting "https://github.com/cert-manager/cert-manager/releases/download/v1.16.1/cert-manager.yaml": clusterrolebindings.rbac.authorization.k8s.io "cert-manager-webhook:subjectaccessreviews" not found Error from server (NotFound): error when deleting "https://github.com/cert-manager/cert-manager/releases/download/v1.16.1/cert-manager.yaml": roles.rbac.authorization.k8s.io "cert-manager-cainjector:leaderelection" not found Error from server (NotFound): error when deleting "https://github.com/cert-manager/cert-manager/releases/download/v1.16.1/cert-manager.yaml": roles.rbac.authorization.k8s.io "cert-manager:leaderelection" not found Error from server (NotFound): error when deleting "https://github.com/cert-manager/cert-manager/releases/download/v1.16.1/cert-manager.yaml": roles.rbac.authorization.k8s.io "cert-manager-tokenrequest" not found Error from server (NotFound): error when deleting "https://github.com/cert-manager/cert-manager/releases/download/v1.16.1/cert-manager.yaml": roles.rbac.authorization.k8s.io "cert-manager-webhook:dynamic-serving" not found Error from server (NotFound): error when deleting "https://github.com/cert-manager/cert-manager/releases/download/v1.16.1/cert-manager.yaml": rolebindings.rbac.authorization.k8s.io "cert-manager-cainjector:leaderelection" not found Error from server (NotFound): error when deleting "https://github.com/cert-manager/cert-manager/releases/download/v1.16.1/cert-manager.yaml": rolebindings.rbac.authorization.k8s.io "cert-manager:leaderelection" not found Error from server (NotFound): error when deleting "https://github.com/cert-manager/cert-manager/releases/download/v1.16.1/cert-manager.yaml": rolebindings.rbac.authorization.k8s.io "cert-manager-cert-manager-tokenrequest" not found Error from server (NotFound): error when deleting "https://github.com/cert-manager/cert-manager/releases/download/v1.16.1/cert-manager.yaml": rolebindings.rbac.authorization.k8s.io "cert-manager-webhook:dynamic-serving" not found Error from server (NotFound): error when deleting "https://github.com/cert-manager/cert-manager/releases/download/v1.16.1/cert-manager.yaml": services "cert-manager-cainjector" not found Error from server (NotFound): error when deleting "https://github.com/cert-manager/cert-manager/releases/download/v1.16.1/cert-manager.yaml": services "cert-manager" not found Error from server (NotFound): error when deleting "https://github.com/cert-manager/cert-manager/releases/download/v1.16.1/cert-manager.yaml": services "cert-manager-webhook" not found Error from server (NotFound): error when deleting "https://github.com/cert-manager/cert-manager/releases/download/v1.16.1/cert-manager.yaml": deployments.apps "cert-manager-cainjector" not found Error from server (NotFound): error when deleting "https://github.com/cert-manager/cert-manager/releases/download/v1.16.1/cert-manager.yaml": deployments.apps "cert-manager" not found Error from server (NotFound): error when deleting "https://github.com/cert-manager/cert-manager/releases/download/v1.16.1/cert-manager.yaml": deployments.apps "cert-manager-webhook" not found Error from server (NotFound): error when deleting "https://github.com/cert-manager/cert-manager/releases/download/v1.16.1/cert-manager.yaml": mutatingwebhookconfigurations.admissionregistration.k8s.io "cert-manager-webhook" not found Error from server (NotFound): error when deleting "https://github.com/cert-manager/cert-manager/releases/download/v1.16.1/cert-manager.yaml": validatingwebhookconfigurations.admissionregistration.k8s.io "cert-manager-webhook" not found + sleep 4 + for i in '$(seq 0 2)' + set +e + kubectl delete -f https://github.com/cert-manager/cert-manager/releases/download/v1.16.1/cert-manager.yaml + exit_status=1 + set -e + '[' 1 '!=' 0 -a -n 1 ']' + cat /tmp/tmp.dwfHPBA4gO + cat /tmp/tmp.E1F9bBdNuO Error from server (NotFound): error when deleting "https://github.com/cert-manager/cert-manager/releases/download/v1.16.1/cert-manager.yaml": namespaces "cert-manager" not found Error from server (NotFound): error when deleting "https://github.com/cert-manager/cert-manager/releases/download/v1.16.1/cert-manager.yaml": customresourcedefinitions.apiextensions.k8s.io "certificaterequests.cert-manager.io" not found Error from server (NotFound): error when deleting "https://github.com/cert-manager/cert-manager/releases/download/v1.16.1/cert-manager.yaml": customresourcedefinitions.apiextensions.k8s.io "certificates.cert-manager.io" not found Error from server (NotFound): error when deleting "https://github.com/cert-manager/cert-manager/releases/download/v1.16.1/cert-manager.yaml": customresourcedefinitions.apiextensions.k8s.io "challenges.acme.cert-manager.io" not found Error from server (NotFound): error when deleting "https://github.com/cert-manager/cert-manager/releases/download/v1.16.1/cert-manager.yaml": customresourcedefinitions.apiextensions.k8s.io "clusterissuers.cert-manager.io" not found Error from server (NotFound): error when deleting "https://github.com/cert-manager/cert-manager/releases/download/v1.16.1/cert-manager.yaml": customresourcedefinitions.apiextensions.k8s.io "issuers.cert-manager.io" not found Error from server (NotFound): error when deleting "https://github.com/cert-manager/cert-manager/releases/download/v1.16.1/cert-manager.yaml": customresourcedefinitions.apiextensions.k8s.io "orders.acme.cert-manager.io" not found Error from server (NotFound): error when deleting "https://github.com/cert-manager/cert-manager/releases/download/v1.16.1/cert-manager.yaml": serviceaccounts "cert-manager-cainjector" not found Error from server (NotFound): error when deleting "https://github.com/cert-manager/cert-manager/releases/download/v1.16.1/cert-manager.yaml": serviceaccounts "cert-manager" not found Error from server (NotFound): error when deleting "https://github.com/cert-manager/cert-manager/releases/download/v1.16.1/cert-manager.yaml": serviceaccounts "cert-manager-webhook" not found Error from server (NotFound): error when deleting "https://github.com/cert-manager/cert-manager/releases/download/v1.16.1/cert-manager.yaml": clusterroles.rbac.authorization.k8s.io "cert-manager-cainjector" not found Error from server (NotFound): error when deleting "https://github.com/cert-manager/cert-manager/releases/download/v1.16.1/cert-manager.yaml": clusterroles.rbac.authorization.k8s.io "cert-manager-controller-issuers" not found Error from server (NotFound): error when deleting "https://github.com/cert-manager/cert-manager/releases/download/v1.16.1/cert-manager.yaml": clusterroles.rbac.authorization.k8s.io "cert-manager-controller-clusterissuers" not found Error from server (NotFound): error when deleting "https://github.com/cert-manager/cert-manager/releases/download/v1.16.1/cert-manager.yaml": clusterroles.rbac.authorization.k8s.io "cert-manager-controller-certificates" not found Error from server (NotFound): error when deleting "https://github.com/cert-manager/cert-manager/releases/download/v1.16.1/cert-manager.yaml": clusterroles.rbac.authorization.k8s.io "cert-manager-controller-orders" not found Error from server (NotFound): error when deleting "https://github.com/cert-manager/cert-manager/releases/download/v1.16.1/cert-manager.yaml": clusterroles.rbac.authorization.k8s.io "cert-manager-controller-challenges" not found Error from server (NotFound): error when deleting "https://github.com/cert-manager/cert-manager/releases/download/v1.16.1/cert-manager.yaml": clusterroles.rbac.authorization.k8s.io "cert-manager-controller-ingress-shim" not found Error from server (NotFound): error when deleting "https://github.com/cert-manager/cert-manager/releases/download/v1.16.1/cert-manager.yaml": clusterroles.rbac.authorization.k8s.io "cert-manager-cluster-view" not found Error from server (NotFound): error when deleting "https://github.com/cert-manager/cert-manager/releases/download/v1.16.1/cert-manager.yaml": clusterroles.rbac.authorization.k8s.io "cert-manager-view" not found Error from server (NotFound): error when deleting "https://github.com/cert-manager/cert-manager/releases/download/v1.16.1/cert-manager.yaml": clusterroles.rbac.authorization.k8s.io "cert-manager-edit" not found Error from server (NotFound): error when deleting "https://github.com/cert-manager/cert-manager/releases/download/v1.16.1/cert-manager.yaml": clusterroles.rbac.authorization.k8s.io "cert-manager-controller-approve:cert-manager-io" not found Error from server (NotFound): error when deleting "https://github.com/cert-manager/cert-manager/releases/download/v1.16.1/cert-manager.yaml": clusterroles.rbac.authorization.k8s.io "cert-manager-controller-certificatesigningrequests" not found Error from server (NotFound): error when deleting "https://github.com/cert-manager/cert-manager/releases/download/v1.16.1/cert-manager.yaml": clusterroles.rbac.authorization.k8s.io "cert-manager-webhook:subjectaccessreviews" not found Error from server (NotFound): error when deleting "https://github.com/cert-manager/cert-manager/releases/download/v1.16.1/cert-manager.yaml": clusterrolebindings.rbac.authorization.k8s.io "cert-manager-cainjector" not found Error from server (NotFound): error when deleting "https://github.com/cert-manager/cert-manager/releases/download/v1.16.1/cert-manager.yaml": clusterrolebindings.rbac.authorization.k8s.io "cert-manager-controller-issuers" not found Error from server (NotFound): error when deleting "https://github.com/cert-manager/cert-manager/releases/download/v1.16.1/cert-manager.yaml": clusterrolebindings.rbac.authorization.k8s.io "cert-manager-controller-clusterissuers" not found Error from server (NotFound): error when deleting "https://github.com/cert-manager/cert-manager/releases/download/v1.16.1/cert-manager.yaml": clusterrolebindings.rbac.authorization.k8s.io "cert-manager-controller-certificates" not found Error from server (NotFound): error when deleting "https://github.com/cert-manager/cert-manager/releases/download/v1.16.1/cert-manager.yaml": clusterrolebindings.rbac.authorization.k8s.io "cert-manager-controller-orders" not found Error from server (NotFound): error when deleting "https://github.com/cert-manager/cert-manager/releases/download/v1.16.1/cert-manager.yaml": clusterrolebindings.rbac.authorization.k8s.io "cert-manager-controller-challenges" not found Error from server (NotFound): error when deleting "https://github.com/cert-manager/cert-manager/releases/download/v1.16.1/cert-manager.yaml": clusterrolebindings.rbac.authorization.k8s.io "cert-manager-controller-ingress-shim" not found Error from server (NotFound): error when deleting "https://github.com/cert-manager/cert-manager/releases/download/v1.16.1/cert-manager.yaml": clusterrolebindings.rbac.authorization.k8s.io "cert-manager-controller-approve:cert-manager-io" not found Error from server (NotFound): error when deleting "https://github.com/cert-manager/cert-manager/releases/download/v1.16.1/cert-manager.yaml": clusterrolebindings.rbac.authorization.k8s.io "cert-manager-controller-certificatesigningrequests" not found Error from server (NotFound): error when deleting "https://github.com/cert-manager/cert-manager/releases/download/v1.16.1/cert-manager.yaml": clusterrolebindings.rbac.authorization.k8s.io "cert-manager-webhook:subjectaccessreviews" not found Error from server (NotFound): error when deleting "https://github.com/cert-manager/cert-manager/releases/download/v1.16.1/cert-manager.yaml": roles.rbac.authorization.k8s.io "cert-manager-cainjector:leaderelection" not found Error from server (NotFound): error when deleting "https://github.com/cert-manager/cert-manager/releases/download/v1.16.1/cert-manager.yaml": roles.rbac.authorization.k8s.io "cert-manager:leaderelection" not found Error from server (NotFound): error when deleting "https://github.com/cert-manager/cert-manager/releases/download/v1.16.1/cert-manager.yaml": roles.rbac.authorization.k8s.io "cert-manager-tokenrequest" not found Error from server (NotFound): error when deleting "https://github.com/cert-manager/cert-manager/releases/download/v1.16.1/cert-manager.yaml": roles.rbac.authorization.k8s.io "cert-manager-webhook:dynamic-serving" not found Error from server (NotFound): error when deleting "https://github.com/cert-manager/cert-manager/releases/download/v1.16.1/cert-manager.yaml": rolebindings.rbac.authorization.k8s.io "cert-manager-cainjector:leaderelection" not found Error from server (NotFound): error when deleting "https://github.com/cert-manager/cert-manager/releases/download/v1.16.1/cert-manager.yaml": rolebindings.rbac.authorization.k8s.io "cert-manager:leaderelection" not found Error from server (NotFound): error when deleting "https://github.com/cert-manager/cert-manager/releases/download/v1.16.1/cert-manager.yaml": rolebindings.rbac.authorization.k8s.io "cert-manager-cert-manager-tokenrequest" not found Error from server (NotFound): error when deleting "https://github.com/cert-manager/cert-manager/releases/download/v1.16.1/cert-manager.yaml": rolebindings.rbac.authorization.k8s.io "cert-manager-webhook:dynamic-serving" not found Error from server (NotFound): error when deleting "https://github.com/cert-manager/cert-manager/releases/download/v1.16.1/cert-manager.yaml": services "cert-manager-cainjector" not found Error from server (NotFound): error when deleting "https://github.com/cert-manager/cert-manager/releases/download/v1.16.1/cert-manager.yaml": services "cert-manager" not found Error from server (NotFound): error when deleting "https://github.com/cert-manager/cert-manager/releases/download/v1.16.1/cert-manager.yaml": services "cert-manager-webhook" not found Error from server (NotFound): error when deleting "https://github.com/cert-manager/cert-manager/releases/download/v1.16.1/cert-manager.yaml": deployments.apps "cert-manager-cainjector" not found Error from server (NotFound): error when deleting "https://github.com/cert-manager/cert-manager/releases/download/v1.16.1/cert-manager.yaml": deployments.apps "cert-manager" not found Error from server (NotFound): error when deleting "https://github.com/cert-manager/cert-manager/releases/download/v1.16.1/cert-manager.yaml": deployments.apps "cert-manager-webhook" not found Error from server (NotFound): error when deleting "https://github.com/cert-manager/cert-manager/releases/download/v1.16.1/cert-manager.yaml": mutatingwebhookconfigurations.admissionregistration.k8s.io "cert-manager-webhook" not found Error from server (NotFound): error when deleting "https://github.com/cert-manager/cert-manager/releases/download/v1.16.1/cert-manager.yaml": validatingwebhookconfigurations.admissionregistration.k8s.io "cert-manager-webhook" not found + sleep 8 + cat /tmp/tmp.dwfHPBA4gO + cat /tmp/tmp.E1F9bBdNuO Error from server (NotFound): error when deleting "https://github.com/cert-manager/cert-manager/releases/download/v1.16.1/cert-manager.yaml": namespaces "cert-manager" not found Error from server (NotFound): error when deleting "https://github.com/cert-manager/cert-manager/releases/download/v1.16.1/cert-manager.yaml": customresourcedefinitions.apiextensions.k8s.io "certificaterequests.cert-manager.io" not found Error from server (NotFound): error when deleting "https://github.com/cert-manager/cert-manager/releases/download/v1.16.1/cert-manager.yaml": customresourcedefinitions.apiextensions.k8s.io "certificates.cert-manager.io" not found Error from server (NotFound): error when deleting "https://github.com/cert-manager/cert-manager/releases/download/v1.16.1/cert-manager.yaml": customresourcedefinitions.apiextensions.k8s.io "challenges.acme.cert-manager.io" not found Error from server (NotFound): error when deleting "https://github.com/cert-manager/cert-manager/releases/download/v1.16.1/cert-manager.yaml": customresourcedefinitions.apiextensions.k8s.io "clusterissuers.cert-manager.io" not found Error from server (NotFound): error when deleting "https://github.com/cert-manager/cert-manager/releases/download/v1.16.1/cert-manager.yaml": customresourcedefinitions.apiextensions.k8s.io "issuers.cert-manager.io" not found Error from server (NotFound): error when deleting "https://github.com/cert-manager/cert-manager/releases/download/v1.16.1/cert-manager.yaml": customresourcedefinitions.apiextensions.k8s.io "orders.acme.cert-manager.io" not found Error from server (NotFound): error when deleting "https://github.com/cert-manager/cert-manager/releases/download/v1.16.1/cert-manager.yaml": serviceaccounts "cert-manager-cainjector" not found Error from server (NotFound): error when deleting "https://github.com/cert-manager/cert-manager/releases/download/v1.16.1/cert-manager.yaml": serviceaccounts "cert-manager" not found Error from server (NotFound): error when deleting "https://github.com/cert-manager/cert-manager/releases/download/v1.16.1/cert-manager.yaml": serviceaccounts "cert-manager-webhook" not found Error from server (NotFound): error when deleting "https://github.com/cert-manager/cert-manager/releases/download/v1.16.1/cert-manager.yaml": clusterroles.rbac.authorization.k8s.io "cert-manager-cainjector" not found Error from server (NotFound): error when deleting "https://github.com/cert-manager/cert-manager/releases/download/v1.16.1/cert-manager.yaml": clusterroles.rbac.authorization.k8s.io "cert-manager-controller-issuers" not found Error from server (NotFound): error when deleting "https://github.com/cert-manager/cert-manager/releases/download/v1.16.1/cert-manager.yaml": clusterroles.rbac.authorization.k8s.io "cert-manager-controller-clusterissuers" not found Error from server (NotFound): error when deleting "https://github.com/cert-manager/cert-manager/releases/download/v1.16.1/cert-manager.yaml": clusterroles.rbac.authorization.k8s.io "cert-manager-controller-certificates" not found Error from server (NotFound): error when deleting "https://github.com/cert-manager/cert-manager/releases/download/v1.16.1/cert-manager.yaml": clusterroles.rbac.authorization.k8s.io "cert-manager-controller-orders" not found Error from server (NotFound): error when deleting "https://github.com/cert-manager/cert-manager/releases/download/v1.16.1/cert-manager.yaml": clusterroles.rbac.authorization.k8s.io "cert-manager-controller-challenges" not found Error from server (NotFound): error when deleting "https://github.com/cert-manager/cert-manager/releases/download/v1.16.1/cert-manager.yaml": clusterroles.rbac.authorization.k8s.io "cert-manager-controller-ingress-shim" not found Error from server (NotFound): error when deleting "https://github.com/cert-manager/cert-manager/releases/download/v1.16.1/cert-manager.yaml": clusterroles.rbac.authorization.k8s.io "cert-manager-cluster-view" not found Error from server (NotFound): error when deleting "https://github.com/cert-manager/cert-manager/releases/download/v1.16.1/cert-manager.yaml": clusterroles.rbac.authorization.k8s.io "cert-manager-view" not found Error from server (NotFound): error when deleting "https://github.com/cert-manager/cert-manager/releases/download/v1.16.1/cert-manager.yaml": clusterroles.rbac.authorization.k8s.io "cert-manager-edit" not found Error from server (NotFound): error when deleting "https://github.com/cert-manager/cert-manager/releases/download/v1.16.1/cert-manager.yaml": clusterroles.rbac.authorization.k8s.io "cert-manager-controller-approve:cert-manager-io" not found Error from server (NotFound): error when deleting "https://github.com/cert-manager/cert-manager/releases/download/v1.16.1/cert-manager.yaml": clusterroles.rbac.authorization.k8s.io "cert-manager-controller-certificatesigningrequests" not found Error from server (NotFound): error when deleting "https://github.com/cert-manager/cert-manager/releases/download/v1.16.1/cert-manager.yaml": clusterroles.rbac.authorization.k8s.io "cert-manager-webhook:subjectaccessreviews" not found Error from server (NotFound): error when deleting "https://github.com/cert-manager/cert-manager/releases/download/v1.16.1/cert-manager.yaml": clusterrolebindings.rbac.authorization.k8s.io "cert-manager-cainjector" not found Error from server (NotFound): error when deleting "https://github.com/cert-manager/cert-manager/releases/download/v1.16.1/cert-manager.yaml": clusterrolebindings.rbac.authorization.k8s.io "cert-manager-controller-issuers" not found Error from server (NotFound): error when deleting "https://github.com/cert-manager/cert-manager/releases/download/v1.16.1/cert-manager.yaml": clusterrolebindings.rbac.authorization.k8s.io "cert-manager-controller-clusterissuers" not found Error from server (NotFound): error when deleting "https://github.com/cert-manager/cert-manager/releases/download/v1.16.1/cert-manager.yaml": clusterrolebindings.rbac.authorization.k8s.io "cert-manager-controller-certificates" not found Error from server (NotFound): error when deleting "https://github.com/cert-manager/cert-manager/releases/download/v1.16.1/cert-manager.yaml": clusterrolebindings.rbac.authorization.k8s.io "cert-manager-controller-orders" not found Error from server (NotFound): error when deleting "https://github.com/cert-manager/cert-manager/releases/download/v1.16.1/cert-manager.yaml": clusterrolebindings.rbac.authorization.k8s.io "cert-manager-controller-challenges" not found Error from server (NotFound): error when deleting "https://github.com/cert-manager/cert-manager/releases/download/v1.16.1/cert-manager.yaml": clusterrolebindings.rbac.authorization.k8s.io "cert-manager-controller-ingress-shim" not found Error from server (NotFound): error when deleting "https://github.com/cert-manager/cert-manager/releases/download/v1.16.1/cert-manager.yaml": clusterrolebindings.rbac.authorization.k8s.io "cert-manager-controller-approve:cert-manager-io" not found Error from server (NotFound): error when deleting "https://github.com/cert-manager/cert-manager/releases/download/v1.16.1/cert-manager.yaml": clusterrolebindings.rbac.authorization.k8s.io "cert-manager-controller-certificatesigningrequests" not found Error from server (NotFound): error when deleting "https://github.com/cert-manager/cert-manager/releases/download/v1.16.1/cert-manager.yaml": clusterrolebindings.rbac.authorization.k8s.io "cert-manager-webhook:subjectaccessreviews" not found Error from server (NotFound): error when deleting "https://github.com/cert-manager/cert-manager/releases/download/v1.16.1/cert-manager.yaml": roles.rbac.authorization.k8s.io "cert-manager-cainjector:leaderelection" not found Error from server (NotFound): error when deleting "https://github.com/cert-manager/cert-manager/releases/download/v1.16.1/cert-manager.yaml": roles.rbac.authorization.k8s.io "cert-manager:leaderelection" not found Error from server (NotFound): error when deleting "https://github.com/cert-manager/cert-manager/releases/download/v1.16.1/cert-manager.yaml": roles.rbac.authorization.k8s.io "cert-manager-tokenrequest" not found Error from server (NotFound): error when deleting "https://github.com/cert-manager/cert-manager/releases/download/v1.16.1/cert-manager.yaml": roles.rbac.authorization.k8s.io "cert-manager-webhook:dynamic-serving" not found Error from server (NotFound): error when deleting "https://github.com/cert-manager/cert-manager/releases/download/v1.16.1/cert-manager.yaml": rolebindings.rbac.authorization.k8s.io "cert-manager-cainjector:leaderelection" not found Error from server (NotFound): error when deleting "https://github.com/cert-manager/cert-manager/releases/download/v1.16.1/cert-manager.yaml": rolebindings.rbac.authorization.k8s.io "cert-manager:leaderelection" not found Error from server (NotFound): error when deleting "https://github.com/cert-manager/cert-manager/releases/download/v1.16.1/cert-manager.yaml": rolebindings.rbac.authorization.k8s.io "cert-manager-cert-manager-tokenrequest" not found Error from server (NotFound): error when deleting "https://github.com/cert-manager/cert-manager/releases/download/v1.16.1/cert-manager.yaml": rolebindings.rbac.authorization.k8s.io "cert-manager-webhook:dynamic-serving" not found Error from server (NotFound): error when deleting "https://github.com/cert-manager/cert-manager/releases/download/v1.16.1/cert-manager.yaml": services "cert-manager-cainjector" not found Error from server (NotFound): error when deleting "https://github.com/cert-manager/cert-manager/releases/download/v1.16.1/cert-manager.yaml": services "cert-manager" not found Error from server (NotFound): error when deleting "https://github.com/cert-manager/cert-manager/releases/download/v1.16.1/cert-manager.yaml": services "cert-manager-webhook" not found Error from server (NotFound): error when deleting "https://github.com/cert-manager/cert-manager/releases/download/v1.16.1/cert-manager.yaml": deployments.apps "cert-manager-cainjector" not found Error from server (NotFound): error when deleting "https://github.com/cert-manager/cert-manager/releases/download/v1.16.1/cert-manager.yaml": deployments.apps "cert-manager" not found Error from server (NotFound): error when deleting "https://github.com/cert-manager/cert-manager/releases/download/v1.16.1/cert-manager.yaml": deployments.apps "cert-manager-webhook" not found Error from server (NotFound): error when deleting "https://github.com/cert-manager/cert-manager/releases/download/v1.16.1/cert-manager.yaml": mutatingwebhookconfigurations.admissionregistration.k8s.io "cert-manager-webhook" not found Error from server (NotFound): error when deleting "https://github.com/cert-manager/cert-manager/releases/download/v1.16.1/cert-manager.yaml": validatingwebhookconfigurations.admissionregistration.k8s.io "cert-manager-webhook" not found + rm /tmp/tmp.dwfHPBA4gO /tmp/tmp.E1F9bBdNuO + return 1 + true + '[' -n '' ']' + '[' -n psmdb-operator ']' + kubectl_bin delete --grace-period=0 --force=true namespace users-18968 + rm -rf /tmp/tmp.iitV44iXyv + kubectl_bin delete --grace-period=0 --force=true namespace psmdb-operator ++ mktemp ++ mktemp + desc 'test passed' + set +o xtrace ----------------------------------------------------------------------------------- + local LAST_OUT=/tmp/tmp.mQphamjAiA test passed ----------------------------------------------------------------------------------- ++ mktemp + local LAST_OUT=/tmp/tmp.oN5bpXNT56 ++ mktemp + local LAST_ERR=/tmp/tmp.B6DwHEb2Ag + local exit_status=0 + local timeout=4 ++ seq 0 2 + local LAST_ERR=/tmp/tmp.teWXrkLVPh + local exit_status=0 + local timeout=4 + for i in '$(seq 0 2)' + set +e + kubectl delete --grace-period=0 --force=true namespace psmdb-operator ++ seq 0 2 + for i in '$(seq 0 2)' + set +e + kubectl delete --grace-period=0 --force=true namespace users-18968