Log: /mnt/jenkins/workspace/cloud-psmdb-operator_PR-1987/e2e-tests/logs/pvc-resize.log grep: warning: stray \ before - Warning: version difference between client (1.34) and server (1.30) exceeds the supported minor version skew of +/-1 Warning: version difference between client (1.34) and server (1.30) exceeds the supported minor version skew of +/-1 Warning: version difference between client (1.34) and server (1.30) exceeds the supported minor version skew of +/-1 + '[' 0 == 1 ']' + ensure_default_sc_allows_expansion + local default_sc ++ get_default_storageclass ++ kubectl_bin get sc -o 'jsonpath={.items[?(@.metadata.annotations.storageclass\.kubernetes\.io/is-default-class=="true")].metadata.name}' +++ mktemp ++ local LAST_OUT=/tmp/tmp.XxUgNBNyhy +++ mktemp ++ local LAST_ERR=/tmp/tmp.zSZNL3VepI ++ local exit_status=0 ++ local timeout=4 +++ seq 0 2 ++ for i in $(seq 0 2) ++ set +e ++ kubectl get sc -o 'jsonpath={.items[?(@.metadata.annotations.storageclass\.kubernetes\.io/is-default-class=="true")].metadata.name}' ++ exit_status=0 ++ set -e ++ '[' 0 '!=' 0 -a -n 1 ']' ++ break ++ cat /tmp/tmp.XxUgNBNyhy ++ cat /tmp/tmp.zSZNL3VepI ++ rm /tmp/tmp.XxUgNBNyhy /tmp/tmp.zSZNL3VepI ++ return 0 + default_sc=standard-rwo + echo 'Checking if default storageclass standard-rwo allows volume expansion' Checking if default storageclass standard-rwo allows volume expansion + local allowVolumeExpansion ++ kubectl_bin get sc -o 'jsonpath={.items[?(@.metadata.name=="standard-rwo")].allowVolumeExpansion}' +++ mktemp ++ local LAST_OUT=/tmp/tmp.sTstkMTAzN +++ mktemp ++ local LAST_ERR=/tmp/tmp.GR1uhx0jjv ++ local exit_status=0 ++ local timeout=4 +++ seq 0 2 ++ for i in $(seq 0 2) ++ set +e ++ kubectl get sc -o 'jsonpath={.items[?(@.metadata.name=="standard-rwo")].allowVolumeExpansion}' ++ exit_status=0 ++ set -e ++ '[' 0 '!=' 0 -a -n 1 ']' ++ break ++ cat /tmp/tmp.sTstkMTAzN ++ cat /tmp/tmp.GR1uhx0jjv ++ rm /tmp/tmp.sTstkMTAzN /tmp/tmp.GR1uhx0jjv ++ return 0 + allowVolumeExpansion=true + [[ true != \t\r\u\e ]] + create_infra pvc-resize-18358 + local ns=pvc-resize-18358 + [[ 1 == 1 ]] + delete_crd + desc 'get and delete old CRDs and RBAC' + set +o xtrace ----------------------------------------------------------------------------------- get and delete old CRDs and RBAC ----------------------------------------------------------------------------------- + kubectl_bin delete -f /mnt/jenkins/workspace/cloud-psmdb-operator_PR-1987/deploy/crd.yaml --ignore-not-found --wait=false ++ mktemp + local LAST_OUT=/tmp/tmp.IwOjgQwJe5 ++ mktemp + local LAST_ERR=/tmp/tmp.nxMltLsnsc + local exit_status=0 + local timeout=4 ++ seq 0 2 + for i in $(seq 0 2) + set +e + kubectl delete -f /mnt/jenkins/workspace/cloud-psmdb-operator_PR-1987/deploy/crd.yaml --ignore-not-found --wait=false + exit_status=0 + set -e + '[' 0 '!=' 0 -a -n 1 ']' + break + cat /tmp/tmp.IwOjgQwJe5 customresourcedefinition.apiextensions.k8s.io "perconaservermongodbbackups.psmdb.percona.com" deleted customresourcedefinition.apiextensions.k8s.io "perconaservermongodbrestores.psmdb.percona.com" deleted customresourcedefinition.apiextensions.k8s.io "perconaservermongodbs.psmdb.percona.com" deleted + cat /tmp/tmp.nxMltLsnsc + rm /tmp/tmp.IwOjgQwJe5 /tmp/tmp.nxMltLsnsc + return 0 ++ yq eval .metadata.name /mnt/jenkins/workspace/cloud-psmdb-operator_PR-1987/deploy/crd.yaml ++ grep -v '\-\-\-' grep: warning: stray \ before - grep: warning: stray \ before - + for crd_name in $(yq eval '.metadata.name' "${src_dir}/deploy/crd.yaml" | grep -v '\-\-\-') + kubectl get perconaservermongodbbackups.psmdb.percona.com --all-namespaces -o wide + grep -v NAMESPACE + xargs -L 1 sh -xc 'kubectl patch perconaservermongodbbackups.psmdb.percona.com -n $0 $1 --type=merge -p "{\"metadata\":{\"finalizers\":[]}}"' error: the server doesn't have a resource type "perconaservermongodbbackups" + kubectl patch perconaservermongodbbackups.psmdb.percona.com -n sh --type=merge -p '{"metadata":{"finalizers":[]}}' error: the server doesn't have a resource type "perconaservermongodbbackups" + : + kubectl_bin wait --for=delete crd perconaservermongodbbackups.psmdb.percona.com ++ mktemp + local LAST_OUT=/tmp/tmp.nf1mdmCdyt ++ mktemp + local LAST_ERR=/tmp/tmp.yr7tYcTJVn + local exit_status=0 + local timeout=4 ++ seq 0 2 + for i in $(seq 0 2) + set +e + kubectl wait --for=delete crd perconaservermongodbbackups.psmdb.percona.com + exit_status=0 + set -e + '[' 0 '!=' 0 -a -n 1 ']' + break + cat /tmp/tmp.nf1mdmCdyt + cat /tmp/tmp.yr7tYcTJVn + rm /tmp/tmp.nf1mdmCdyt /tmp/tmp.yr7tYcTJVn + return 0 + for crd_name in $(yq eval '.metadata.name' "${src_dir}/deploy/crd.yaml" | grep -v '\-\-\-') + kubectl get perconaservermongodbrestores.psmdb.percona.com --all-namespaces -o wide + grep -v NAMESPACE + xargs -L 1 sh -xc 'kubectl patch perconaservermongodbrestores.psmdb.percona.com -n $0 $1 --type=merge -p "{\"metadata\":{\"finalizers\":[]}}"' error: the server doesn't have a resource type "perconaservermongodbrestores" + kubectl patch perconaservermongodbrestores.psmdb.percona.com -n sh --type=merge -p '{"metadata":{"finalizers":[]}}' error: the server doesn't have a resource type "perconaservermongodbrestores" + : + kubectl_bin wait --for=delete crd perconaservermongodbrestores.psmdb.percona.com ++ mktemp + local LAST_OUT=/tmp/tmp.o0wAZTQUWs ++ mktemp + local LAST_ERR=/tmp/tmp.PdypfLU2DL + local exit_status=0 + local timeout=4 ++ seq 0 2 + for i in $(seq 0 2) + set +e + kubectl wait --for=delete crd perconaservermongodbrestores.psmdb.percona.com + exit_status=0 + set -e + '[' 0 '!=' 0 -a -n 1 ']' + break + cat /tmp/tmp.o0wAZTQUWs + cat /tmp/tmp.PdypfLU2DL + rm /tmp/tmp.o0wAZTQUWs /tmp/tmp.PdypfLU2DL + return 0 + for crd_name in $(yq eval '.metadata.name' "${src_dir}/deploy/crd.yaml" | grep -v '\-\-\-') + kubectl get perconaservermongodbs.psmdb.percona.com --all-namespaces -o wide + grep -v NAMESPACE + xargs -L 1 sh -xc 'kubectl patch perconaservermongodbs.psmdb.percona.com -n $0 $1 --type=merge -p "{\"metadata\":{\"finalizers\":[]}}"' + kubectl patch perconaservermongodbs.psmdb.percona.com -n pvc-resize-18507 some-name --type=merge -p '{"metadata":{"finalizers":[]}}' perconaservermongodb.psmdb.percona.com/some-name patched + kubectl_bin wait --for=delete crd perconaservermongodbs.psmdb.percona.com ++ mktemp + local LAST_OUT=/tmp/tmp.vYwAXUxfCs ++ mktemp + local LAST_ERR=/tmp/tmp.t0zuYWFdFt + local exit_status=0 + local timeout=4 ++ seq 0 2 + for i in $(seq 0 2) + set +e + kubectl wait --for=delete crd perconaservermongodbs.psmdb.percona.com + exit_status=0 + set -e + '[' 0 '!=' 0 -a -n 1 ']' + break + cat /tmp/tmp.vYwAXUxfCs customresourcedefinition.apiextensions.k8s.io/perconaservermongodbs.psmdb.percona.com condition met + cat /tmp/tmp.t0zuYWFdFt + rm /tmp/tmp.vYwAXUxfCs /tmp/tmp.t0zuYWFdFt + return 0 + local rbac_yaml=rbac.yaml + '[' -n psmdb-operator ']' + rbac_yaml=cw-rbac.yaml + kubectl_bin delete -f /mnt/jenkins/workspace/cloud-psmdb-operator_PR-1987/deploy/cw-rbac.yaml --ignore-not-found ++ mktemp + local LAST_OUT=/tmp/tmp.67dPg4Nf2T ++ mktemp + local LAST_ERR=/tmp/tmp.yqaIc5eZRD + local exit_status=0 + local timeout=4 ++ seq 0 2 + for i in $(seq 0 2) + set +e + kubectl delete -f /mnt/jenkins/workspace/cloud-psmdb-operator_PR-1987/deploy/cw-rbac.yaml --ignore-not-found + exit_status=0 + set -e + '[' 0 '!=' 0 -a -n 1 ']' + break + cat /tmp/tmp.67dPg4Nf2T clusterrole.rbac.authorization.k8s.io "percona-server-mongodb-operator" deleted clusterrolebinding.rbac.authorization.k8s.io "service-account-percona-server-mongodb-operator" deleted + cat /tmp/tmp.yqaIc5eZRD + rm /tmp/tmp.67dPg4Nf2T /tmp/tmp.yqaIc5eZRD + return 0 + check_crd_for_deletion PR-1987-d120637a + local git_tag=PR-1987-d120637a ++ curl -s https://raw.githubusercontent.com/percona/percona-server-mongodb-operator/PR-1987-d120637a/deploy/crd.yaml ++ yq eval .metadata.name ++ /usr/sbin/sed s/---//g ++ /usr/sbin/sed ':a;N;$!ba;s/\n/ /g' + for crd_name in $(curl -s https://raw.githubusercontent.com/percona/percona-server-mongodb-operator/${git_tag}/deploy/crd.yaml | yq eval '.metadata.name' | $sed 's/---//g' | $sed ':a;N;$!ba;s/\n/ /g') ++ kubectl_bin get crd/null -o 'jsonpath={.status.conditions[-1].type}' +++ mktemp ++ local LAST_OUT=/tmp/tmp.vuFcWYDeCd +++ mktemp ++ local LAST_ERR=/tmp/tmp.qWHQl31Wjr ++ local exit_status=0 ++ local timeout=4 +++ seq 0 2 ++ for i in $(seq 0 2) ++ set +e ++ kubectl get crd/null -o 'jsonpath={.status.conditions[-1].type}' ++ exit_status=1 ++ set -e ++ '[' 1 '!=' 0 -a -n 1 ']' ++ cat /tmp/tmp.vuFcWYDeCd ++ cat /tmp/tmp.qWHQl31Wjr Error from server (NotFound): customresourcedefinitions.apiextensions.k8s.io "null" not found ++ sleep 0 ++ for i in $(seq 0 2) ++ set +e ++ kubectl get crd/null -o 'jsonpath={.status.conditions[-1].type}' ++ exit_status=1 ++ set -e ++ '[' 1 '!=' 0 -a -n 1 ']' ++ cat /tmp/tmp.vuFcWYDeCd ++ cat /tmp/tmp.qWHQl31Wjr Error from server (NotFound): customresourcedefinitions.apiextensions.k8s.io "null" not found ++ sleep 4 ++ for i in $(seq 0 2) ++ set +e ++ kubectl get crd/null -o 'jsonpath={.status.conditions[-1].type}' ++ exit_status=1 ++ set -e ++ '[' 1 '!=' 0 -a -n 1 ']' ++ cat /tmp/tmp.vuFcWYDeCd ++ cat /tmp/tmp.qWHQl31Wjr Error from server (NotFound): customresourcedefinitions.apiextensions.k8s.io "null" not found ++ sleep 8 ++ cat /tmp/tmp.vuFcWYDeCd ++ cat /tmp/tmp.qWHQl31Wjr Error from server (NotFound): customresourcedefinitions.apiextensions.k8s.io "null" not found ++ rm /tmp/tmp.vuFcWYDeCd /tmp/tmp.qWHQl31Wjr ++ return 1 + [[ '' == \T\e\r\m\i\n\a\t\i\n\g ]] + '[' -n psmdb-operator ']' + create_namespace psmdb-operator + local namespace=psmdb-operator + local skip_clean_namespace= + [[ 1 == 1 ]] + [[ -z '' ]] + destroy_chaos_mesh ++ helm list --all-namespaces --filter chaos-mesh ++ tail -n1 ++ awk '-F ' '{print $2}' ++ sed s/NAMESPACE// + local chaos_mesh_ns= + desc 'destroy chaos-mesh' + set +o xtrace ----------------------------------------------------------------------------------- destroy chaos-mesh ----------------------------------------------------------------------------------- + '[' -n '' ']' ++ kubectl get MutatingWebhookConfiguration ++ grep chaos-mesh ++ awk '{print $1}' + timeout 30 kubectl delete MutatingWebhookConfiguration error: resource(s) were provided, but no name was specified + : ++ kubectl get ValidatingWebhookConfiguration ++ grep chaos-mesh ++ awk '{print $1}' + timeout 30 kubectl delete ValidatingWebhookConfiguration error: resource(s) were provided, but no name was specified + : ++ grep validate-auth ++ kubectl get ValidatingWebhookConfiguration ++ awk '{print $1}' + timeout 30 kubectl delete ValidatingWebhookConfiguration error: resource(s) were provided, but no name was specified + : ++ kubectl api-resources ++ grep chaos-mesh ++ awk '{print $1}' ++ kubectl get crd ++ grep chaos-mesh.org ++ awk '{print $1}' + timeout 30 kubectl delete crd error: resource(s) were provided, but no name was specified + : ++ kubectl get clusterrolebinding ++ grep chaos-mesh ++ awk '{print $1}' + timeout 30 kubectl delete clusterrolebinding error: resource(s) were provided, but no name was specified + : ++ kubectl get clusterrole ++ grep chaos-mesh ++ awk '{print $1}' + timeout 30 kubectl delete clusterrole error: resource(s) were provided, but no name was specified + : + desc 'cleaned up all old namespaces' + set +o xtrace ----------------------------------------------------------------------------------- cleaned up all old namespaces ----------------------------------------------------------------------------------- + kubectl_bin get ns + egrep -v '^kube-|^default|Terminating|psmdb-operator|openshift|^gke-|^gmp-|^NAME' + awk '{print$1}' + '[' -n '' ']' + desc 'cleaned up old namespaces psmdb-operator' + set +o xtrace ----------------------------------------------------------------------------------- cleaned up old namespaces psmdb-operator ----------------------------------------------------------------------------------- + kubectl_bin delete namespace psmdb-operator --ignore-not-found ++ mktemp + xargs kubectl delete ns egrep: warning: egrep is obsolescent; using grep -E + local LAST_OUT=/tmp/tmp.edwFCzOiBc ++ mktemp ++ mktemp + local LAST_OUT=/tmp/tmp.FqUb4pIIlT ++ mktemp + local LAST_ERR=/tmp/tmp.EbAffzAMZA + local exit_status=0 + local timeout=4 ++ seq 0 2 + local LAST_ERR=/tmp/tmp.nGaUDVqjFW + local exit_status=0 + local timeout=4 ++ seq 0 2 + for i in $(seq 0 2) + set +e + kubectl get ns + for i in $(seq 0 2) + set +e + kubectl delete namespace psmdb-operator --ignore-not-found + exit_status=0 + set -e + '[' 0 '!=' 0 -a -n 1 ']' + break + cat /tmp/tmp.edwFCzOiBc + cat /tmp/tmp.EbAffzAMZA + rm /tmp/tmp.edwFCzOiBc /tmp/tmp.EbAffzAMZA + return 0 namespace "pvc-resize-18507" deleted + exit_status=0 + set -e + '[' 0 '!=' 0 -a -n 1 ']' + break + cat /tmp/tmp.FqUb4pIIlT namespace "psmdb-operator" deleted + cat /tmp/tmp.nGaUDVqjFW + rm /tmp/tmp.FqUb4pIIlT /tmp/tmp.nGaUDVqjFW + return 0 + kubectl_bin wait --for=delete namespace psmdb-operator ++ mktemp + local LAST_OUT=/tmp/tmp.uqd25Jm0cM ++ mktemp + local LAST_ERR=/tmp/tmp.c9ub8mAHz3 + local exit_status=0 + local timeout=4 ++ seq 0 2 + for i in $(seq 0 2) + set +e + kubectl wait --for=delete namespace psmdb-operator + exit_status=0 + set -e + '[' 0 '!=' 0 -a -n 1 ']' + break + cat /tmp/tmp.uqd25Jm0cM + cat /tmp/tmp.c9ub8mAHz3 + rm /tmp/tmp.uqd25Jm0cM /tmp/tmp.c9ub8mAHz3 + return 0 + desc 'create namespace psmdb-operator' + set +o xtrace ----------------------------------------------------------------------------------- create namespace psmdb-operator ----------------------------------------------------------------------------------- + kubectl_bin create namespace psmdb-operator ++ mktemp + local LAST_OUT=/tmp/tmp.xGgyxvoNj3 ++ mktemp + local LAST_ERR=/tmp/tmp.WKoj74oITr + local exit_status=0 + local timeout=4 ++ seq 0 2 + for i in $(seq 0 2) + set +e + kubectl create namespace psmdb-operator + exit_status=0 + set -e + '[' 0 '!=' 0 -a -n 1 ']' + break + cat /tmp/tmp.xGgyxvoNj3 namespace/psmdb-operator created + cat /tmp/tmp.WKoj74oITr + rm /tmp/tmp.xGgyxvoNj3 /tmp/tmp.WKoj74oITr + return 0 + set_kube_ctx psmdb-operator + local namespace=psmdb-operator ++ kubectl_bin config current-context +++ mktemp ++ local LAST_OUT=/tmp/tmp.CvxB88ac9k +++ mktemp ++ local LAST_ERR=/tmp/tmp.esPicC7Iqv ++ local exit_status=0 ++ local timeout=4 +++ seq 0 2 ++ for i in $(seq 0 2) ++ set +e ++ kubectl config current-context ++ exit_status=0 ++ set -e ++ '[' 0 '!=' 0 -a -n 1 ']' ++ break ++ cat /tmp/tmp.CvxB88ac9k ++ cat /tmp/tmp.esPicC7Iqv ++ rm /tmp/tmp.CvxB88ac9k /tmp/tmp.esPicC7Iqv ++ return 0 + kubectl_bin config set-context gke_cloud-dev-112233_us-central1-a_jen-psmdb-1987-d120637a-5-cluster6 --namespace=psmdb-operator ++ mktemp + local LAST_OUT=/tmp/tmp.ucInVscIbw ++ mktemp + local LAST_ERR=/tmp/tmp.MWfwNU5N3A + local exit_status=0 + local timeout=4 ++ seq 0 2 + for i in $(seq 0 2) + set +e + kubectl config set-context gke_cloud-dev-112233_us-central1-a_jen-psmdb-1987-d120637a-5-cluster6 --namespace=psmdb-operator + exit_status=0 + set -e + '[' 0 '!=' 0 -a -n 1 ']' + break + cat /tmp/tmp.ucInVscIbw Context "gke_cloud-dev-112233_us-central1-a_jen-psmdb-1987-d120637a-5-cluster6" modified. + cat /tmp/tmp.MWfwNU5N3A + rm /tmp/tmp.ucInVscIbw /tmp/tmp.MWfwNU5N3A + return 0 + deploy_operator + desc 'start PSMDB operator' + set +o xtrace ----------------------------------------------------------------------------------- start PSMDB operator ----------------------------------------------------------------------------------- + local cr_file + '[' -f /mnt/jenkins/workspace/cloud-psmdb-operator_PR-1987/e2e-tests/pvc-resize/conf/crd.yaml ']' + cr_file=/mnt/jenkins/workspace/cloud-psmdb-operator_PR-1987/deploy/crd.yaml + kubectl_bin apply --server-side --force-conflicts -f /mnt/jenkins/workspace/cloud-psmdb-operator_PR-1987/deploy/crd.yaml ++ mktemp + local LAST_OUT=/tmp/tmp.fwTXvqRHYw ++ mktemp + local LAST_ERR=/tmp/tmp.uY2A0AO1p6 + local exit_status=0 + local timeout=4 ++ seq 0 2 + for i in $(seq 0 2) + set +e + kubectl apply --server-side --force-conflicts -f /mnt/jenkins/workspace/cloud-psmdb-operator_PR-1987/deploy/crd.yaml + exit_status=0 + set -e + '[' 0 '!=' 0 -a -n 1 ']' + break + cat /tmp/tmp.fwTXvqRHYw customresourcedefinition.apiextensions.k8s.io/perconaservermongodbbackups.psmdb.percona.com serverside-applied customresourcedefinition.apiextensions.k8s.io/perconaservermongodbrestores.psmdb.percona.com serverside-applied customresourcedefinition.apiextensions.k8s.io/perconaservermongodbs.psmdb.percona.com serverside-applied + cat /tmp/tmp.uY2A0AO1p6 + rm /tmp/tmp.fwTXvqRHYw /tmp/tmp.uY2A0AO1p6 + return 0 + '[' -n psmdb-operator ']' + apply_rbac cw-rbac + local operator_namespace=psmdb-operator + local rbac=cw-rbac + cat /mnt/jenkins/workspace/cloud-psmdb-operator_PR-1987/deploy/cw-rbac.yaml + sed -e 's^namespace: .*^namespace: psmdb-operator^' + kubectl_bin apply -n psmdb-operator -f - ++ mktemp + local LAST_OUT=/tmp/tmp.mPl1ca73lQ ++ mktemp + local LAST_ERR=/tmp/tmp.X9NiV1EDod + local exit_status=0 + local timeout=4 ++ seq 0 2 + for i in $(seq 0 2) + set +e + kubectl apply -n psmdb-operator -f - + exit_status=0 + set -e + '[' 0 '!=' 0 -a -n 1 ']' + break + cat /tmp/tmp.mPl1ca73lQ clusterrole.rbac.authorization.k8s.io/percona-server-mongodb-operator created serviceaccount/percona-server-mongodb-operator created clusterrolebinding.rbac.authorization.k8s.io/service-account-percona-server-mongodb-operator created + cat /tmp/tmp.X9NiV1EDod + rm /tmp/tmp.mPl1ca73lQ /tmp/tmp.X9NiV1EDod + return 0 + yq eval ' (.spec.template.spec.containers[].image = "perconalab/percona-server-mongodb-operator:PR-1987-d120637a") | ((.. | select(.[] == "DISABLE_TELEMETRY")) |= .value="true") | ((.. | select(.[] == "LOG_LEVEL")) |= .value="DEBUG")' /mnt/jenkins/workspace/cloud-psmdb-operator_PR-1987/deploy/cw-operator.yaml + kubectl_bin apply -f - ++ mktemp + local LAST_OUT=/tmp/tmp.4yhRnbJNcC ++ mktemp + local LAST_ERR=/tmp/tmp.QNIhZI0SYb + local exit_status=0 + local timeout=4 ++ seq 0 2 + for i in $(seq 0 2) + set +e + kubectl apply -f - + exit_status=0 + set -e + '[' 0 '!=' 0 -a -n 1 ']' + break + cat /tmp/tmp.4yhRnbJNcC deployment.apps/percona-server-mongodb-operator created + cat /tmp/tmp.QNIhZI0SYb + rm /tmp/tmp.4yhRnbJNcC /tmp/tmp.QNIhZI0SYb + return 0 + sleep 2 ++ get_operator_pod ++ kubectl_bin get pods --selector=name=percona-server-mongodb-operator -o 'jsonpath={.items[].metadata.name}' -n psmdb-operator +++ mktemp ++ local LAST_OUT=/tmp/tmp.HR3BxKhES2 +++ mktemp ++ local LAST_ERR=/tmp/tmp.BfDO8RdkXZ ++ local exit_status=0 ++ local timeout=4 +++ seq 0 2 ++ for i in $(seq 0 2) ++ set +e ++ kubectl get pods --selector=name=percona-server-mongodb-operator -o 'jsonpath={.items[].metadata.name}' -n psmdb-operator ++ exit_status=0 ++ set -e ++ '[' 0 '!=' 0 -a -n 1 ']' ++ break ++ cat /tmp/tmp.HR3BxKhES2 ++ cat /tmp/tmp.BfDO8RdkXZ ++ rm /tmp/tmp.HR3BxKhES2 /tmp/tmp.BfDO8RdkXZ ++ return 0 + wait_pod percona-server-mongodb-operator-85f675667c-9f6h2 + local pod=percona-server-mongodb-operator-85f675667c-9f6h2 + set +o xtrace waiting for pod/percona-server-mongodb-operator-85f675667c-9f6h2 to be ready..OK + echo 'Print operator info from log' Print operator info from log + grep 'Manager starting up' ++ get_operator_pod ++ kubectl_bin get pods --selector=name=percona-server-mongodb-operator -o 'jsonpath={.items[].metadata.name}' -n psmdb-operator +++ mktemp ++ local LAST_OUT=/tmp/tmp.pR39zIoe0M +++ mktemp ++ local LAST_ERR=/tmp/tmp.fknCUtl0b5 ++ local exit_status=0 ++ local timeout=4 +++ seq 0 2 ++ for i in $(seq 0 2) ++ set +e ++ kubectl get pods --selector=name=percona-server-mongodb-operator -o 'jsonpath={.items[].metadata.name}' -n psmdb-operator ++ exit_status=0 ++ set -e ++ '[' 0 '!=' 0 -a -n 1 ']' ++ break ++ cat /tmp/tmp.pR39zIoe0M ++ cat /tmp/tmp.fknCUtl0b5 ++ rm /tmp/tmp.pR39zIoe0M /tmp/tmp.fknCUtl0b5 ++ return 0 + kubectl_bin logs percona-server-mongodb-operator-85f675667c-9f6h2 ++ mktemp + local LAST_OUT=/tmp/tmp.Xk928QeTiw ++ mktemp + local LAST_ERR=/tmp/tmp.Lq85Thsf5o + local exit_status=0 + local timeout=4 ++ seq 0 2 + for i in $(seq 0 2) + set +e + kubectl logs percona-server-mongodb-operator-85f675667c-9f6h2 + exit_status=0 + set -e + '[' 0 '!=' 0 -a -n 1 ']' + break + cat /tmp/tmp.Xk928QeTiw + cat /tmp/tmp.Lq85Thsf5o + rm /tmp/tmp.Xk928QeTiw /tmp/tmp.Lq85Thsf5o + return 0 2025-09-22T15:28:49.140Z INFO setup Manager starting up {"gitCommit": "d120637a46cfaf89d946ade2d9faedf115796e14", "gitBranch": "PR-1987-d120637a", "buildTime": "", "goVersion": "go1.25.1", "os": "linux", "arch": "amd64"} + create_namespace pvc-resize-18358 + local namespace=pvc-resize-18358 + local skip_clean_namespace= + [[ 1 == 1 ]] + [[ -z '' ]] + destroy_chaos_mesh ++ helm list --all-namespaces --filter chaos-mesh ++ tail -n1 ++ awk '-F ' '{print $2}' ++ sed s/NAMESPACE// + local chaos_mesh_ns= + desc 'destroy chaos-mesh' + set +o xtrace ----------------------------------------------------------------------------------- destroy chaos-mesh ----------------------------------------------------------------------------------- + '[' -n '' ']' ++ kubectl get MutatingWebhookConfiguration ++ grep chaos-mesh ++ awk '{print $1}' + timeout 30 kubectl delete MutatingWebhookConfiguration error: resource(s) were provided, but no name was specified + : ++ kubectl get ValidatingWebhookConfiguration ++ grep chaos-mesh ++ awk '{print $1}' + timeout 30 kubectl delete ValidatingWebhookConfiguration error: resource(s) were provided, but no name was specified + : ++ kubectl get ValidatingWebhookConfiguration ++ grep validate-auth ++ awk '{print $1}' + timeout 30 kubectl delete ValidatingWebhookConfiguration error: resource(s) were provided, but no name was specified + : ++ kubectl api-resources ++ grep chaos-mesh ++ awk '{print $1}' ++ kubectl get crd ++ grep chaos-mesh.org ++ awk '{print $1}' + timeout 30 kubectl delete crd error: resource(s) were provided, but no name was specified + : ++ kubectl get clusterrolebinding ++ grep chaos-mesh ++ awk '{print $1}' + timeout 30 kubectl delete clusterrolebinding error: resource(s) were provided, but no name was specified + : ++ kubectl get clusterrole ++ grep chaos-mesh ++ awk '{print $1}' + timeout 30 kubectl delete clusterrole error: resource(s) were provided, but no name was specified + : + desc 'cleaned up all old namespaces' + set +o xtrace ----------------------------------------------------------------------------------- cleaned up all old namespaces ----------------------------------------------------------------------------------- + kubectl_bin get ns + egrep -v '^kube-|^default|Terminating|psmdb-operator|openshift|^gke-|^gmp-|^NAME' + awk '{print$1}' + '[' -n '' ']' + desc 'cleaned up old namespaces pvc-resize-18358' + set +o xtrace ----------------------------------------------------------------------------------- cleaned up old namespaces pvc-resize-18358 ----------------------------------------------------------------------------------- + kubectl_bin delete namespace pvc-resize-18358 --ignore-not-found ++ mktemp + xargs kubectl delete ns egrep: warning: egrep is obsolescent; using grep -E ++ mktemp + local LAST_OUT=/tmp/tmp.22ah9Drzai ++ mktemp + local LAST_OUT=/tmp/tmp.oVh89L1zP6 + local LAST_ERR=/tmp/tmp.wRh3VIkXr0 + local exit_status=0 + local timeout=4 ++ seq 0 2 ++ mktemp + for i in $(seq 0 2) + set +e + kubectl get ns + local LAST_ERR=/tmp/tmp.xGgYcJLPhO + local exit_status=0 + local timeout=4 ++ seq 0 2 + for i in $(seq 0 2) + set +e + kubectl delete namespace pvc-resize-18358 --ignore-not-found + exit_status=0 + set -e + '[' 0 '!=' 0 -a -n 1 ']' + break + cat /tmp/tmp.22ah9Drzai + cat /tmp/tmp.wRh3VIkXr0 + rm /tmp/tmp.22ah9Drzai /tmp/tmp.wRh3VIkXr0 + return 0 error: resource(s) were provided, but no name was specified + exit_status=0 + set -e + '[' 0 '!=' 0 -a -n 1 ']' + break + cat /tmp/tmp.oVh89L1zP6 + cat /tmp/tmp.xGgYcJLPhO + rm /tmp/tmp.oVh89L1zP6 /tmp/tmp.xGgYcJLPhO + return 0 + kubectl_bin wait --for=delete namespace pvc-resize-18358 ++ mktemp + local LAST_OUT=/tmp/tmp.OQ0At1Vgv3 ++ mktemp + local LAST_ERR=/tmp/tmp.gr9rKkiBZE + local exit_status=0 + local timeout=4 ++ seq 0 2 + for i in $(seq 0 2) + set +e + kubectl wait --for=delete namespace pvc-resize-18358 + exit_status=0 + set -e + '[' 0 '!=' 0 -a -n 1 ']' + break + cat /tmp/tmp.OQ0At1Vgv3 + cat /tmp/tmp.gr9rKkiBZE + rm /tmp/tmp.OQ0At1Vgv3 /tmp/tmp.gr9rKkiBZE + return 0 + desc 'create namespace pvc-resize-18358' + set +o xtrace ----------------------------------------------------------------------------------- create namespace pvc-resize-18358 ----------------------------------------------------------------------------------- + kubectl_bin create namespace pvc-resize-18358 ++ mktemp + local LAST_OUT=/tmp/tmp.p2cubqCsvf ++ mktemp + local LAST_ERR=/tmp/tmp.dfppdm1N2S + local exit_status=0 + local timeout=4 ++ seq 0 2 + for i in $(seq 0 2) + set +e + kubectl create namespace pvc-resize-18358 + exit_status=0 + set -e + '[' 0 '!=' 0 -a -n 1 ']' + break + cat /tmp/tmp.p2cubqCsvf namespace/pvc-resize-18358 created + cat /tmp/tmp.dfppdm1N2S + rm /tmp/tmp.p2cubqCsvf /tmp/tmp.dfppdm1N2S + return 0 + set_kube_ctx pvc-resize-18358 + local namespace=pvc-resize-18358 ++ kubectl_bin config current-context +++ mktemp ++ local LAST_OUT=/tmp/tmp.oC8hnTZ5yf +++ mktemp ++ local LAST_ERR=/tmp/tmp.LguvqbHYEZ ++ local exit_status=0 ++ local timeout=4 +++ seq 0 2 ++ for i in $(seq 0 2) ++ set +e ++ kubectl config current-context ++ exit_status=0 ++ set -e ++ '[' 0 '!=' 0 -a -n 1 ']' ++ break ++ cat /tmp/tmp.oC8hnTZ5yf ++ cat /tmp/tmp.LguvqbHYEZ ++ rm /tmp/tmp.oC8hnTZ5yf /tmp/tmp.LguvqbHYEZ ++ return 0 + kubectl_bin config set-context gke_cloud-dev-112233_us-central1-a_jen-psmdb-1987-d120637a-5-cluster6 --namespace=pvc-resize-18358 ++ mktemp + local LAST_OUT=/tmp/tmp.pzzd8nuM4X ++ mktemp + local LAST_ERR=/tmp/tmp.iTNNWz6uRY + local exit_status=0 + local timeout=4 ++ seq 0 2 + for i in $(seq 0 2) + set +e + kubectl config set-context gke_cloud-dev-112233_us-central1-a_jen-psmdb-1987-d120637a-5-cluster6 --namespace=pvc-resize-18358 + exit_status=0 + set -e + '[' 0 '!=' 0 -a -n 1 ']' + break + cat /tmp/tmp.pzzd8nuM4X Context "gke_cloud-dev-112233_us-central1-a_jen-psmdb-1987-d120637a-5-cluster6" modified. + cat /tmp/tmp.iTNNWz6uRY + rm /tmp/tmp.pzzd8nuM4X /tmp/tmp.iTNNWz6uRY + return 0 + desc 'create secrets and psmdb client' + set +o xtrace ----------------------------------------------------------------------------------- create secrets and psmdb client ----------------------------------------------------------------------------------- + kubectl_bin apply -f /mnt/jenkins/workspace/cloud-psmdb-operator_PR-1987/e2e-tests/conf/secrets.yml -f /mnt/jenkins/workspace/cloud-psmdb-operator_PR-1987/e2e-tests/conf/client.yml ++ mktemp + local LAST_OUT=/tmp/tmp.Cyge8SXguw ++ mktemp + local LAST_ERR=/tmp/tmp.5GOodOUlG6 + local exit_status=0 + local timeout=4 ++ seq 0 2 + for i in $(seq 0 2) + set +e + kubectl apply -f /mnt/jenkins/workspace/cloud-psmdb-operator_PR-1987/e2e-tests/conf/secrets.yml -f /mnt/jenkins/workspace/cloud-psmdb-operator_PR-1987/e2e-tests/conf/client.yml + exit_status=0 + set -e + '[' 0 '!=' 0 -a -n 1 ']' + break + cat /tmp/tmp.Cyge8SXguw secret/some-users created deployment.apps/psmdb-client created + cat /tmp/tmp.5GOodOUlG6 + rm /tmp/tmp.Cyge8SXguw /tmp/tmp.5GOodOUlG6 + return 0 + desc 'create PSMDB cluster' + set +o xtrace ----------------------------------------------------------------------------------- create PSMDB cluster ----------------------------------------------------------------------------------- + cluster=some-name + '[' 0 == 1 ']' + spinup_psmdb some-name-rs0 /mnt/jenkins/workspace/cloud-psmdb-operator_PR-1987/e2e-tests/pvc-resize/conf/some-name.yml + local cluster=some-name-rs0 + local config=/mnt/jenkins/workspace/cloud-psmdb-operator_PR-1987/e2e-tests/pvc-resize/conf/some-name.yml + local size=3 + desc 'create first PSMDB cluster' + set +o xtrace ----------------------------------------------------------------------------------- create first PSMDB cluster ----------------------------------------------------------------------------------- + apply_cluster /mnt/jenkins/workspace/cloud-psmdb-operator_PR-1987/e2e-tests/pvc-resize/conf/some-name.yml + '[' -z '' ']' + cat_config /mnt/jenkins/workspace/cloud-psmdb-operator_PR-1987/e2e-tests/pvc-resize/conf/some-name.yml + kubectl_bin apply -f - + cat /mnt/jenkins/workspace/cloud-psmdb-operator_PR-1987/e2e-tests/pvc-resize/conf/some-name.yml + yq eval '(.spec | select(.image == null)).image = "perconalab/percona-server-mongodb-operator:main-mongod7.0"' ++ mktemp + yq eval '(.spec | select(has("pmm"))).pmm.image = "percona/pmm-client:2.44.1-1"' + yq eval '(.spec | select(has("initImage"))).initImage = "perconalab/percona-server-mongodb-operator:PR-1987-d120637a"' + local LAST_OUT=/tmp/tmp.AUHqlMSYvI ++ mktemp + yq eval '(.spec | select(has("backup"))).backup.image = "perconalab/percona-server-mongodb-operator:main-backup"' + local LAST_ERR=/tmp/tmp.diAeA7HvzT + local exit_status=0 + local timeout=4 + yq eval '.spec.upgradeOptions.apply="Never"' ++ seq 0 2 + for i in $(seq 0 2) + set +e + kubectl apply -f - + exit_status=0 + set -e + '[' 0 '!=' 0 -a -n 1 ']' + break + cat /tmp/tmp.AUHqlMSYvI perconaservermongodb.psmdb.percona.com/some-name created + cat /tmp/tmp.diAeA7HvzT + rm /tmp/tmp.AUHqlMSYvI /tmp/tmp.diAeA7HvzT + return 0 + desc 'check if Pod is started' + set +o xtrace ----------------------------------------------------------------------------------- check if Pod is started ----------------------------------------------------------------------------------- + wait_for_running some-name-rs0 3 + local name=some-name-rs0 + let last_pod=2 + local check_cluster_readyness=true + set_debug + [[ 1 == 1 ]] + set -o xtrace + local rs_name=rs0 + local cluster_name=some-name ++ seq 0 2 + for i in $(seq 0 $last_pod) + [[ 0 -eq 2 ]] + wait_pod some-name-rs0-0 + local pod=some-name-rs0-0 + set +o xtrace waiting for pod/some-name-rs0-0 to be ready..................OK + for i in $(seq 0 $last_pod) + [[ 1 -eq 2 ]] + wait_pod some-name-rs0-1 + local pod=some-name-rs0-1 + set +o xtrace waiting for pod/some-name-rs0-1 to be ready.............OK + for i in $(seq 0 $last_pod) + [[ 2 -eq 2 ]] ++ kubectl_bin get psmdb some-name -o 'jsonpath={.spec.replsets[?(@.name=="rs0")].arbiter.enabled}' +++ mktemp ++ local LAST_OUT=/tmp/tmp.bPUarTO1vj +++ mktemp ++ local LAST_ERR=/tmp/tmp.wHUoL0nICX ++ local exit_status=0 ++ local timeout=4 +++ seq 0 2 ++ for i in $(seq 0 2) ++ set +e ++ kubectl get psmdb some-name -o 'jsonpath={.spec.replsets[?(@.name=="rs0")].arbiter.enabled}' ++ exit_status=0 ++ set -e ++ '[' 0 '!=' 0 -a -n 1 ']' ++ break ++ cat /tmp/tmp.bPUarTO1vj ++ cat /tmp/tmp.wHUoL0nICX ++ rm /tmp/tmp.bPUarTO1vj /tmp/tmp.wHUoL0nICX ++ return 0 + [[ '' == \t\r\u\e ]] + wait_pod some-name-rs0-2 + local pod=some-name-rs0-2 + set +o xtrace waiting for pod/some-name-rs0-2 to be ready...........OK ++ kubectl_bin get psmdb some-name -o 'jsonpath={.spec.replsets[?(@.name=="rs0")].non_voting.enabled}' +++ mktemp ++ local LAST_OUT=/tmp/tmp.PoH5S2mW6f +++ mktemp ++ local LAST_ERR=/tmp/tmp.ApfVIFH48g ++ local exit_status=0 ++ local timeout=4 +++ seq 0 2 ++ for i in $(seq 0 2) ++ set +e ++ kubectl get psmdb some-name -o 'jsonpath={.spec.replsets[?(@.name=="rs0")].non_voting.enabled}' ++ exit_status=0 ++ set -e ++ '[' 0 '!=' 0 -a -n 1 ']' ++ break ++ cat /tmp/tmp.PoH5S2mW6f ++ cat /tmp/tmp.ApfVIFH48g ++ rm /tmp/tmp.PoH5S2mW6f /tmp/tmp.ApfVIFH48g ++ return 0 + [[ '' == \t\r\u\e ]] ++ kubectl_bin get psmdb some-name -o 'jsonpath={.spec.replsets[?(@.name=="rs0")].hidden.enabled}' +++ mktemp ++ local LAST_OUT=/tmp/tmp.aWtLipiJ7G +++ mktemp ++ local LAST_ERR=/tmp/tmp.7S9Ri3v5y2 ++ local exit_status=0 ++ local timeout=4 +++ seq 0 2 ++ for i in $(seq 0 2) ++ set +e ++ kubectl get psmdb some-name -o 'jsonpath={.spec.replsets[?(@.name=="rs0")].hidden.enabled}' ++ exit_status=0 ++ set -e ++ '[' 0 '!=' 0 -a -n 1 ']' ++ break ++ cat /tmp/tmp.aWtLipiJ7G ++ cat /tmp/tmp.7S9Ri3v5y2 ++ rm /tmp/tmp.aWtLipiJ7G /tmp/tmp.7S9Ri3v5y2 ++ return 0 + [[ '' == \t\r\u\e ]] + sleep 10 + [[ true == \t\r\u\e ]] + set +x Waiting for cluster readyness + sleep 20 + compare_kubectl statefulset/some-name-rs0 + local resource=statefulset/some-name-rs0 + local postfix= + local skip_generation_check= + local expected_result=/mnt/jenkins/workspace/cloud-psmdb-operator_PR-1987/e2e-tests/pvc-resize/compare/statefulset_some-name-rs0.yml + local new_result=/tmp/tmp.Q9ZZgBE25d/statefulset_some-name-rs0.yml + '[' -n '' -a -f /mnt/jenkins/workspace/cloud-psmdb-operator_PR-1987/e2e-tests/pvc-resize/compare/statefulset_some-name-rs0-oc.yml ']' + kubectl_bin get -o yaml statefulset/some-name-rs0 + yq eval ' del(.metadata.ownerReferences[].apiVersion) | del(.metadata.managedFields) | del(.. | select(has("creationTimestamp")).creationTimestamp) | del(.. | select(has("namespace")).namespace) | del(.. | select(has("uid")).uid) | del(.metadata.resourceVersion) | del(.spec.template.spec.containers[].env[] | select(.name == "NAMESPACE")) | del(.metadata.selfLink) | del(.metadata.annotations."cloud.google.com/neg") | del(.metadata.annotations."kubectl.kubernetes.io/last-applied-configuration") | del(.. | select(has("image")).image) | del(.. | select(has("clusterIP")).clusterIP) | del(.. | select(has("clusterIPs")).clusterIPs) | del(.. | select(has("dataSource")).dataSource) | del(.. | select(has("procMount")).procMount) | del(.. | select(has("storageClassName")).storageClassName) | del(.. | select(has("finalizers")).finalizers) | del(.. | select(has("kubernetes.io/pvc-protection"))."kubernetes.io/pvc-protection") | del(.. | select(has("volumeName")).volumeName) | del(.. | select(has("volume.beta.kubernetes.io/storage-provisioner"))."volume.beta.kubernetes.io/storage-provisioner") | del(.. | select(has("volume.kubernetes.io/storage-provisioner"))."volume.kubernetes.io/storage-provisioner") | del(.spec.volumeMode) | del(.. | select(has("volume.kubernetes.io/selected-node"))."volume.kubernetes.io/selected-node") | del(.. | select(has("percona.com/last-config-hash"))."percona.com/last-config-hash") | del(.. | select(has("percona.com/configuration-hash"))."percona.com/configuration-hash") | del(.. | select(has("percona.com/ssl-hash"))."percona.com/ssl-hash") | del(.. | select(has("percona.com/ssl-internal-hash"))."percona.com/ssl-internal-hash") | del(.spec.volumeClaimTemplates[].spec.volumeMode | select(. == "Filesystem")) | del(.. | select(has("healthCheckNodePort")).healthCheckNodePort) | del(.. | select(has("nodePort")).nodePort) | del(.status) | (.. | select(tag == "!!str")) |= sub("pvc-resize-18358", "NAME_SPACE") | del(.spec.volumeClaimTemplates[].apiVersion) | del(.spec.volumeClaimTemplates[].kind) | del(.spec.ipFamilies) | del(.spec.ipFamilyPolicy) | (.. | select(. == "extensions/v1beta1")) = "apps/v1" | (.. | select(. == "batch/v1beta1")) = "batch/v1" ' - ++ mktemp + local LAST_OUT=/tmp/tmp.lx5bDV9dmD ++ mktemp + local LAST_ERR=/tmp/tmp.FguY5xVeWe + local exit_status=0 + local timeout=4 ++ seq 0 2 + for i in $(seq 0 2) + set +e + kubectl get -o yaml statefulset/some-name-rs0 + exit_status=0 + set -e + '[' 0 '!=' 0 -a -n 1 ']' + break + cat /tmp/tmp.lx5bDV9dmD + cat /tmp/tmp.FguY5xVeWe + rm /tmp/tmp.lx5bDV9dmD /tmp/tmp.FguY5xVeWe + return 0 + yq -i eval 'del(.spec.persistentVolumeClaimRetentionPolicy)' /tmp/tmp.Q9ZZgBE25d/statefulset_some-name-rs0.yml + version_gt 1.22 ++ echo '1.30 >= 1.22' ++ bc -l + '[' 1 -eq 1 ']' + return 0 + yq -i eval 'del(.spec.internalTrafficPolicy)' /tmp/tmp.Q9ZZgBE25d/statefulset_some-name-rs0.yml + yq -i eval 'del(.spec.allocateLoadBalancerNodePorts)' /tmp/tmp.Q9ZZgBE25d/statefulset_some-name-rs0.yml + [[ /mnt/jenkins/workspace/cloud-psmdb-operator_PR-1987/e2e-tests/pvc-resize/compare/statefulset_some-name-rs0.yml == */cronjob* ]] + '[' -n '' ']' + [[ 0 -eq 0 ]] + diff -u /mnt/jenkins/workspace/cloud-psmdb-operator_PR-1987/e2e-tests/pvc-resize/compare/statefulset_some-name-rs0.yml /tmp/tmp.Q9ZZgBE25d/statefulset_some-name-rs0.yml + desc 'write data' + set +o xtrace ----------------------------------------------------------------------------------- write data ----------------------------------------------------------------------------------- + run_mongo 'db.createUser({user: "myApp", pwd: "myPass", roles: [{ db: "myApp", role: "readWrite" }]})' userAdmin:userAdmin123456@some-name-rs0.pvc-resize-18358 + local 'command=db.createUser({user: "myApp", pwd: "myPass", roles: [{ db: "myApp", role: "readWrite" }]})' + local uri=userAdmin:userAdmin123456@some-name-rs0.pvc-resize-18358 + local driver=mongodb+srv + local suffix=.svc.cluster.local ++ kubectl_bin get pods --selector=name=psmdb-client -o 'jsonpath={.items[].metadata.name}' +++ mktemp ++ local LAST_OUT=/tmp/tmp.bM1WKqlkSF +++ mktemp ++ local LAST_ERR=/tmp/tmp.fTdFYDc5bW ++ local exit_status=0 ++ local timeout=4 +++ seq 0 2 ++ for i in $(seq 0 2) ++ set +e ++ kubectl get pods --selector=name=psmdb-client -o 'jsonpath={.items[].metadata.name}' ++ exit_status=0 ++ set -e ++ '[' 0 '!=' 0 -a -n 1 ']' ++ break ++ cat /tmp/tmp.bM1WKqlkSF ++ cat /tmp/tmp.fTdFYDc5bW ++ rm /tmp/tmp.bM1WKqlkSF /tmp/tmp.fTdFYDc5bW ++ return 0 + local client_container=psmdb-client-66f577db5f-rj4b7 + local mongo_flag= + [[ userAdmin:userAdmin123456@some-name-rs0.pvc-resize-18358 == *cfg* ]] + replica_set=rs0 + kubectl_bin exec psmdb-client-66f577db5f-rj4b7 -- bash -c 'printf '\''db.createUser({user: "myApp", pwd: "myPass", roles: [{ db: "myApp", role: "readWrite" }]})\n'\'' | mongo mongodb+srv://userAdmin:userAdmin123456@some-name-rs0.pvc-resize-18358.svc.cluster.local/admin?ssl=false\&replicaSet=rs0 ' ++ mktemp + local LAST_OUT=/tmp/tmp.LsaXPvcpsH ++ mktemp + local LAST_ERR=/tmp/tmp.j9jrINjwM6 + local exit_status=0 + local timeout=4 ++ seq 0 2 + for i in $(seq 0 2) + set +e + kubectl exec psmdb-client-66f577db5f-rj4b7 -- bash -c 'printf '\''db.createUser({user: "myApp", pwd: "myPass", roles: [{ db: "myApp", role: "readWrite" }]})\n'\'' | mongo mongodb+srv://userAdmin:userAdmin123456@some-name-rs0.pvc-resize-18358.svc.cluster.local/admin?ssl=false\&replicaSet=rs0 ' + exit_status=0 + set -e + '[' 0 '!=' 0 -a -n 1 ']' + break + cat /tmp/tmp.LsaXPvcpsH Percona Server for MongoDB shell version v4.4.29-28 connecting to: mongodb://some-name-rs0-0.some-name-rs0.pvc-resize-18358.svc.cluster.local:27017,some-name-rs0-1.some-name-rs0.pvc-resize-18358.svc.cluster.local:27017,some-name-rs0-2.some-name-rs0.pvc-resize-18358.svc.cluster.local:27017/admin?compressors=disabled&gssapiServiceName=mongodb&replicaSet=rs0&ssl=false Implicit session: session { "id" : UUID("f49ade02-9da1-472e-ab24-d86487c5084b") } Percona Server for MongoDB server version: v7.0.24-13 WARNING: shell and server versions do not match Successfully added user: { "user" : "myApp", "roles" : [ { "db" : "myApp", "role" : "readWrite" } ] } bye + cat /tmp/tmp.j9jrINjwM6 + rm /tmp/tmp.LsaXPvcpsH /tmp/tmp.j9jrINjwM6 + return 0 + run_mongo 'use myApp\n db.test.insert({ x: 100500 })' myApp:myPass@some-name-rs0.pvc-resize-18358 + local 'command=use myApp\n db.test.insert({ x: 100500 })' + local uri=myApp:myPass@some-name-rs0.pvc-resize-18358 + local driver=mongodb+srv + local suffix=.svc.cluster.local ++ kubectl_bin get pods --selector=name=psmdb-client -o 'jsonpath={.items[].metadata.name}' +++ mktemp ++ local LAST_OUT=/tmp/tmp.GvI58ezzWA +++ mktemp ++ local LAST_ERR=/tmp/tmp.gwIlydLzC0 ++ local exit_status=0 ++ local timeout=4 +++ seq 0 2 ++ for i in $(seq 0 2) ++ set +e ++ kubectl get pods --selector=name=psmdb-client -o 'jsonpath={.items[].metadata.name}' ++ exit_status=0 ++ set -e ++ '[' 0 '!=' 0 -a -n 1 ']' ++ break ++ cat /tmp/tmp.GvI58ezzWA ++ cat /tmp/tmp.gwIlydLzC0 ++ rm /tmp/tmp.GvI58ezzWA /tmp/tmp.gwIlydLzC0 ++ return 0 + local client_container=psmdb-client-66f577db5f-rj4b7 + local mongo_flag= + [[ myApp:myPass@some-name-rs0.pvc-resize-18358 == *cfg* ]] + replica_set=rs0 + kubectl_bin exec psmdb-client-66f577db5f-rj4b7 -- bash -c 'printf '\''use myApp\n db.test.insert({ x: 100500 })\n'\'' | mongo mongodb+srv://myApp:myPass@some-name-rs0.pvc-resize-18358.svc.cluster.local/admin?ssl=false\&replicaSet=rs0 ' ++ mktemp + local LAST_OUT=/tmp/tmp.TLhL3MWC8k ++ mktemp + local LAST_ERR=/tmp/tmp.8YbQFW0Fqf + local exit_status=0 + local timeout=4 ++ seq 0 2 + for i in $(seq 0 2) + set +e + kubectl exec psmdb-client-66f577db5f-rj4b7 -- bash -c 'printf '\''use myApp\n db.test.insert({ x: 100500 })\n'\'' | mongo mongodb+srv://myApp:myPass@some-name-rs0.pvc-resize-18358.svc.cluster.local/admin?ssl=false\&replicaSet=rs0 ' + exit_status=0 + set -e + '[' 0 '!=' 0 -a -n 1 ']' + break + cat /tmp/tmp.TLhL3MWC8k Percona Server for MongoDB shell version v4.4.29-28 connecting to: mongodb://some-name-rs0-0.some-name-rs0.pvc-resize-18358.svc.cluster.local:27017,some-name-rs0-1.some-name-rs0.pvc-resize-18358.svc.cluster.local:27017,some-name-rs0-2.some-name-rs0.pvc-resize-18358.svc.cluster.local:27017/admin?compressors=disabled&gssapiServiceName=mongodb&replicaSet=rs0&ssl=false Implicit session: session { "id" : UUID("e8f33f42-8d50-4573-8ad0-c5abf255d6d3") } Percona Server for MongoDB server version: v7.0.24-13 WARNING: shell and server versions do not match switched to db myApp WriteResult({ "nInserted" : 1 }) bye + cat /tmp/tmp.8YbQFW0Fqf + rm /tmp/tmp.TLhL3MWC8k /tmp/tmp.8YbQFW0Fqf + return 0 + patch_pvc_request some-name 2G + local cluster=some-name + local size=2G + echo 'Patching PVC request to 2G in some-name' Patching PVC request to 2G in some-name + kubectl_bin patch psmdb some-name --type=json '-p=[{"op": "replace", "path": "/spec/replsets/0/volumeSpec/persistentVolumeClaim/resources/requests/storage", "value":"2G"}]' ++ mktemp + local LAST_OUT=/tmp/tmp.IMbvq2aV17 ++ mktemp + local LAST_ERR=/tmp/tmp.uUrMXQVfHM + local exit_status=0 + local timeout=4 ++ seq 0 2 + for i in $(seq 0 2) + set +e + kubectl patch psmdb some-name --type=json '-p=[{"op": "replace", "path": "/spec/replsets/0/volumeSpec/persistentVolumeClaim/resources/requests/storage", "value":"2G"}]' + exit_status=0 + set -e + '[' 0 '!=' 0 -a -n 1 ']' + break + cat /tmp/tmp.IMbvq2aV17 perconaservermongodb.psmdb.percona.com/some-name patched + cat /tmp/tmp.uUrMXQVfHM + rm /tmp/tmp.IMbvq2aV17 /tmp/tmp.uUrMXQVfHM + return 0 + wait_cluster_consistency some-name + local cluster_name=some-name + local wait_time=32 + retry=0 + sleep 7 + echo -n 'waiting for cluster readyness' waiting for cluster readyness++ kubectl_bin get psmdb some-name -o 'jsonpath={.status.state}' +++ mktemp ++ local LAST_OUT=/tmp/tmp.NgT9YNSTqn +++ mktemp ++ local LAST_ERR=/tmp/tmp.gFkrkIofRc ++ local exit_status=0 ++ local timeout=4 +++ seq 0 2 ++ for i in $(seq 0 2) ++ set +e ++ kubectl get psmdb some-name -o 'jsonpath={.status.state}' ++ exit_status=0 ++ set -e ++ '[' 0 '!=' 0 -a -n 1 ']' ++ break ++ cat /tmp/tmp.NgT9YNSTqn ++ cat /tmp/tmp.gFkrkIofRc ++ rm /tmp/tmp.NgT9YNSTqn /tmp/tmp.gFkrkIofRc ++ return 0 + [[ ready == \r\e\a\d\y ]] + echo + echo + wait_all_pvc_resize 2Gi 120 1 + local expected_size=2Gi + local max_retry=120 + local sleep_time=1 ++ kubectl_bin get pvc -l app.kubernetes.io/component=mongod -o name +++ mktemp ++ local LAST_OUT=/tmp/tmp.opdlYPNDil +++ mktemp ++ local LAST_ERR=/tmp/tmp.knkF5Fe1eA ++ local exit_status=0 ++ local timeout=4 +++ seq 0 2 ++ for i in $(seq 0 2) ++ set +e ++ kubectl get pvc -l app.kubernetes.io/component=mongod -o name ++ exit_status=0 ++ set -e ++ '[' 0 '!=' 0 -a -n 1 ']' ++ break ++ cat /tmp/tmp.opdlYPNDil ++ cat /tmp/tmp.knkF5Fe1eA ++ rm /tmp/tmp.opdlYPNDil /tmp/tmp.knkF5Fe1eA ++ return 0 + for pvc in $(kubectl_bin get pvc -l app.kubernetes.io/component=mongod -o name) + wait_pvc_resize persistentvolumeclaim/mongod-data-some-name-rs0-0 2Gi 120 1 + local pvc=persistentvolumeclaim/mongod-data-some-name-rs0-0 + local expected_size=2Gi + local max_retry=120 + local sleep_time=1 + local retry=0 + echo 'Waiting for persistentvolumeclaim/mongod-data-some-name-rs0-0 to be resized' Waiting for persistentvolumeclaim/mongod-data-some-name-rs0-0 to be resized ++ kubectl_bin get persistentvolumeclaim/mongod-data-some-name-rs0-0 -o 'jsonpath={.status.capacity.storage}' +++ mktemp ++ local LAST_OUT=/tmp/tmp.GcvOY99QtL +++ mktemp ++ local LAST_ERR=/tmp/tmp.TEIDbWlTIW ++ local exit_status=0 ++ local timeout=4 +++ seq 0 2 ++ for i in $(seq 0 2) ++ set +e ++ kubectl get persistentvolumeclaim/mongod-data-some-name-rs0-0 -o 'jsonpath={.status.capacity.storage}' ++ exit_status=0 ++ set -e ++ '[' 0 '!=' 0 -a -n 1 ']' ++ break ++ cat /tmp/tmp.GcvOY99QtL ++ cat /tmp/tmp.TEIDbWlTIW ++ rm /tmp/tmp.GcvOY99QtL /tmp/tmp.TEIDbWlTIW ++ return 0 + [[ 1Gi == \2\G\i ]] + [[ 0 -ge 120 ]] + echo -n . .+ sleep 1 + retry=1 ++ kubectl_bin get persistentvolumeclaim/mongod-data-some-name-rs0-0 -o 'jsonpath={.status.capacity.storage}' +++ mktemp ++ local LAST_OUT=/tmp/tmp.TSsqt8Am55 +++ mktemp ++ local LAST_ERR=/tmp/tmp.athBkjT8Ex ++ local exit_status=0 ++ local timeout=4 +++ seq 0 2 ++ for i in $(seq 0 2) ++ set +e ++ kubectl get persistentvolumeclaim/mongod-data-some-name-rs0-0 -o 'jsonpath={.status.capacity.storage}' ++ exit_status=0 ++ set -e ++ '[' 0 '!=' 0 -a -n 1 ']' ++ break ++ cat /tmp/tmp.TSsqt8Am55 ++ cat /tmp/tmp.athBkjT8Ex ++ rm /tmp/tmp.TSsqt8Am55 /tmp/tmp.athBkjT8Ex ++ return 0 + [[ 1Gi == \2\G\i ]] + [[ 1 -ge 120 ]] + echo -n . .+ sleep 1 + retry=2 ++ kubectl_bin get persistentvolumeclaim/mongod-data-some-name-rs0-0 -o 'jsonpath={.status.capacity.storage}' +++ mktemp ++ local LAST_OUT=/tmp/tmp.TeqOPCg8lP +++ mktemp ++ local LAST_ERR=/tmp/tmp.sxzVIURPhT ++ local exit_status=0 ++ local timeout=4 +++ seq 0 2 ++ for i in $(seq 0 2) ++ set +e ++ kubectl get persistentvolumeclaim/mongod-data-some-name-rs0-0 -o 'jsonpath={.status.capacity.storage}' ++ exit_status=0 ++ set -e ++ '[' 0 '!=' 0 -a -n 1 ']' ++ break ++ cat /tmp/tmp.TeqOPCg8lP ++ cat /tmp/tmp.sxzVIURPhT ++ rm /tmp/tmp.TeqOPCg8lP /tmp/tmp.sxzVIURPhT ++ return 0 + [[ 1Gi == \2\G\i ]] + [[ 2 -ge 120 ]] + echo -n . .+ sleep 1 + retry=3 ++ kubectl_bin get persistentvolumeclaim/mongod-data-some-name-rs0-0 -o 'jsonpath={.status.capacity.storage}' +++ mktemp ++ local LAST_OUT=/tmp/tmp.yLNdzhs66y +++ mktemp ++ local LAST_ERR=/tmp/tmp.MGvTkUXPV9 ++ local exit_status=0 ++ local timeout=4 +++ seq 0 2 ++ for i in $(seq 0 2) ++ set +e ++ kubectl get persistentvolumeclaim/mongod-data-some-name-rs0-0 -o 'jsonpath={.status.capacity.storage}' ++ exit_status=0 ++ set -e ++ '[' 0 '!=' 0 -a -n 1 ']' ++ break ++ cat /tmp/tmp.yLNdzhs66y ++ cat /tmp/tmp.MGvTkUXPV9 ++ rm /tmp/tmp.yLNdzhs66y /tmp/tmp.MGvTkUXPV9 ++ return 0 + [[ 1Gi == \2\G\i ]] + [[ 3 -ge 120 ]] + echo -n . .+ sleep 1 + retry=4 ++ kubectl_bin get persistentvolumeclaim/mongod-data-some-name-rs0-0 -o 'jsonpath={.status.capacity.storage}' +++ mktemp ++ local LAST_OUT=/tmp/tmp.x9igONjUAI +++ mktemp ++ local LAST_ERR=/tmp/tmp.wBGUmP3Hb5 ++ local exit_status=0 ++ local timeout=4 +++ seq 0 2 ++ for i in $(seq 0 2) ++ set +e ++ kubectl get persistentvolumeclaim/mongod-data-some-name-rs0-0 -o 'jsonpath={.status.capacity.storage}' ++ exit_status=0 ++ set -e ++ '[' 0 '!=' 0 -a -n 1 ']' ++ break ++ cat /tmp/tmp.x9igONjUAI ++ cat /tmp/tmp.wBGUmP3Hb5 ++ rm /tmp/tmp.x9igONjUAI /tmp/tmp.wBGUmP3Hb5 ++ return 0 + [[ 1Gi == \2\G\i ]] + [[ 4 -ge 120 ]] + echo -n . .+ sleep 1 + retry=5 ++ kubectl_bin get persistentvolumeclaim/mongod-data-some-name-rs0-0 -o 'jsonpath={.status.capacity.storage}' +++ mktemp ++ local LAST_OUT=/tmp/tmp.TNKVaiFlUh +++ mktemp ++ local LAST_ERR=/tmp/tmp.4adnf5Iu0o ++ local exit_status=0 ++ local timeout=4 +++ seq 0 2 ++ for i in $(seq 0 2) ++ set +e ++ kubectl get persistentvolumeclaim/mongod-data-some-name-rs0-0 -o 'jsonpath={.status.capacity.storage}' ++ exit_status=0 ++ set -e ++ '[' 0 '!=' 0 -a -n 1 ']' ++ break ++ cat /tmp/tmp.TNKVaiFlUh ++ cat /tmp/tmp.4adnf5Iu0o ++ rm /tmp/tmp.TNKVaiFlUh /tmp/tmp.4adnf5Iu0o ++ return 0 + [[ 1Gi == \2\G\i ]] + [[ 5 -ge 120 ]] + echo -n . .+ sleep 1 + retry=6 ++ kubectl_bin get persistentvolumeclaim/mongod-data-some-name-rs0-0 -o 'jsonpath={.status.capacity.storage}' +++ mktemp ++ local LAST_OUT=/tmp/tmp.jIqXLVR5YN +++ mktemp ++ local LAST_ERR=/tmp/tmp.z841hJ1xGM ++ local exit_status=0 ++ local timeout=4 +++ seq 0 2 ++ for i in $(seq 0 2) ++ set +e ++ kubectl get persistentvolumeclaim/mongod-data-some-name-rs0-0 -o 'jsonpath={.status.capacity.storage}' ++ exit_status=0 ++ set -e ++ '[' 0 '!=' 0 -a -n 1 ']' ++ break ++ cat /tmp/tmp.jIqXLVR5YN ++ cat /tmp/tmp.z841hJ1xGM ++ rm /tmp/tmp.jIqXLVR5YN /tmp/tmp.z841hJ1xGM ++ return 0 + [[ 1Gi == \2\G\i ]] + [[ 6 -ge 120 ]] + echo -n . .+ sleep 1 + retry=7 ++ kubectl_bin get persistentvolumeclaim/mongod-data-some-name-rs0-0 -o 'jsonpath={.status.capacity.storage}' +++ mktemp ++ local LAST_OUT=/tmp/tmp.d9k5yV5z78 +++ mktemp ++ local LAST_ERR=/tmp/tmp.4sgFecUZmP ++ local exit_status=0 ++ local timeout=4 +++ seq 0 2 ++ for i in $(seq 0 2) ++ set +e ++ kubectl get persistentvolumeclaim/mongod-data-some-name-rs0-0 -o 'jsonpath={.status.capacity.storage}' ++ exit_status=0 ++ set -e ++ '[' 0 '!=' 0 -a -n 1 ']' ++ break ++ cat /tmp/tmp.d9k5yV5z78 ++ cat /tmp/tmp.4sgFecUZmP ++ rm /tmp/tmp.d9k5yV5z78 /tmp/tmp.4sgFecUZmP ++ return 0 + [[ 1Gi == \2\G\i ]] + [[ 7 -ge 120 ]] + echo -n . .+ sleep 1 + retry=8 ++ kubectl_bin get persistentvolumeclaim/mongod-data-some-name-rs0-0 -o 'jsonpath={.status.capacity.storage}' +++ mktemp ++ local LAST_OUT=/tmp/tmp.gtmKWTqo41 +++ mktemp ++ local LAST_ERR=/tmp/tmp.ZIuedEVJa0 ++ local exit_status=0 ++ local timeout=4 +++ seq 0 2 ++ for i in $(seq 0 2) ++ set +e ++ kubectl get persistentvolumeclaim/mongod-data-some-name-rs0-0 -o 'jsonpath={.status.capacity.storage}' ++ exit_status=0 ++ set -e ++ '[' 0 '!=' 0 -a -n 1 ']' ++ break ++ cat /tmp/tmp.gtmKWTqo41 ++ cat /tmp/tmp.ZIuedEVJa0 ++ rm /tmp/tmp.gtmKWTqo41 /tmp/tmp.ZIuedEVJa0 ++ return 0 + [[ 1Gi == \2\G\i ]] + [[ 8 -ge 120 ]] + echo -n . .+ sleep 1 + retry=9 ++ kubectl_bin get persistentvolumeclaim/mongod-data-some-name-rs0-0 -o 'jsonpath={.status.capacity.storage}' +++ mktemp ++ local LAST_OUT=/tmp/tmp.g8dBhV0ciK +++ mktemp ++ local LAST_ERR=/tmp/tmp.uphRVUNdSm ++ local exit_status=0 ++ local timeout=4 +++ seq 0 2 ++ for i in $(seq 0 2) ++ set +e ++ kubectl get persistentvolumeclaim/mongod-data-some-name-rs0-0 -o 'jsonpath={.status.capacity.storage}' ++ exit_status=0 ++ set -e ++ '[' 0 '!=' 0 -a -n 1 ']' ++ break ++ cat /tmp/tmp.g8dBhV0ciK ++ cat /tmp/tmp.uphRVUNdSm ++ rm /tmp/tmp.g8dBhV0ciK /tmp/tmp.uphRVUNdSm ++ return 0 + [[ 1Gi == \2\G\i ]] + [[ 9 -ge 120 ]] + echo -n . .+ sleep 1 + retry=10 ++ kubectl_bin get persistentvolumeclaim/mongod-data-some-name-rs0-0 -o 'jsonpath={.status.capacity.storage}' +++ mktemp ++ local LAST_OUT=/tmp/tmp.8MsqJyVd1Y +++ mktemp ++ local LAST_ERR=/tmp/tmp.UwQqxv8lxT ++ local exit_status=0 ++ local timeout=4 +++ seq 0 2 ++ for i in $(seq 0 2) ++ set +e ++ kubectl get persistentvolumeclaim/mongod-data-some-name-rs0-0 -o 'jsonpath={.status.capacity.storage}' ++ exit_status=0 ++ set -e ++ '[' 0 '!=' 0 -a -n 1 ']' ++ break ++ cat /tmp/tmp.8MsqJyVd1Y ++ cat /tmp/tmp.UwQqxv8lxT ++ rm /tmp/tmp.8MsqJyVd1Y /tmp/tmp.UwQqxv8lxT ++ return 0 + [[ 1Gi == \2\G\i ]] + [[ 10 -ge 120 ]] + echo -n . .+ sleep 1 + retry=11 ++ kubectl_bin get persistentvolumeclaim/mongod-data-some-name-rs0-0 -o 'jsonpath={.status.capacity.storage}' +++ mktemp ++ local LAST_OUT=/tmp/tmp.nUNmZioVrd +++ mktemp ++ local LAST_ERR=/tmp/tmp.VVS116U3oL ++ local exit_status=0 ++ local timeout=4 +++ seq 0 2 ++ for i in $(seq 0 2) ++ set +e ++ kubectl get persistentvolumeclaim/mongod-data-some-name-rs0-0 -o 'jsonpath={.status.capacity.storage}' ++ exit_status=0 ++ set -e ++ '[' 0 '!=' 0 -a -n 1 ']' ++ break ++ cat /tmp/tmp.nUNmZioVrd ++ cat /tmp/tmp.VVS116U3oL ++ rm /tmp/tmp.nUNmZioVrd /tmp/tmp.VVS116U3oL ++ return 0 + [[ 1Gi == \2\G\i ]] + [[ 11 -ge 120 ]] + echo -n . .+ sleep 1 + retry=12 ++ kubectl_bin get persistentvolumeclaim/mongod-data-some-name-rs0-0 -o 'jsonpath={.status.capacity.storage}' +++ mktemp ++ local LAST_OUT=/tmp/tmp.TvDIBij8r8 +++ mktemp ++ local LAST_ERR=/tmp/tmp.GTOTJCzCmx ++ local exit_status=0 ++ local timeout=4 +++ seq 0 2 ++ for i in $(seq 0 2) ++ set +e ++ kubectl get persistentvolumeclaim/mongod-data-some-name-rs0-0 -o 'jsonpath={.status.capacity.storage}' ++ exit_status=0 ++ set -e ++ '[' 0 '!=' 0 -a -n 1 ']' ++ break ++ cat /tmp/tmp.TvDIBij8r8 ++ cat /tmp/tmp.GTOTJCzCmx ++ rm /tmp/tmp.TvDIBij8r8 /tmp/tmp.GTOTJCzCmx ++ return 0 + [[ 1Gi == \2\G\i ]] + [[ 12 -ge 120 ]] + echo -n . .+ sleep 1 + retry=13 ++ kubectl_bin get persistentvolumeclaim/mongod-data-some-name-rs0-0 -o 'jsonpath={.status.capacity.storage}' +++ mktemp ++ local LAST_OUT=/tmp/tmp.hTkW8avlb3 +++ mktemp ++ local LAST_ERR=/tmp/tmp.D6p4DkZqfQ ++ local exit_status=0 ++ local timeout=4 +++ seq 0 2 ++ for i in $(seq 0 2) ++ set +e ++ kubectl get persistentvolumeclaim/mongod-data-some-name-rs0-0 -o 'jsonpath={.status.capacity.storage}' ++ exit_status=0 ++ set -e ++ '[' 0 '!=' 0 -a -n 1 ']' ++ break ++ cat /tmp/tmp.hTkW8avlb3 ++ cat /tmp/tmp.D6p4DkZqfQ ++ rm /tmp/tmp.hTkW8avlb3 /tmp/tmp.D6p4DkZqfQ ++ return 0 + [[ 1Gi == \2\G\i ]] + [[ 13 -ge 120 ]] + echo -n . .+ sleep 1 + retry=14 ++ kubectl_bin get persistentvolumeclaim/mongod-data-some-name-rs0-0 -o 'jsonpath={.status.capacity.storage}' +++ mktemp ++ local LAST_OUT=/tmp/tmp.JIQPrPI47T +++ mktemp ++ local LAST_ERR=/tmp/tmp.DYP1ops7bU ++ local exit_status=0 ++ local timeout=4 +++ seq 0 2 ++ for i in $(seq 0 2) ++ set +e ++ kubectl get persistentvolumeclaim/mongod-data-some-name-rs0-0 -o 'jsonpath={.status.capacity.storage}' ++ exit_status=0 ++ set -e ++ '[' 0 '!=' 0 -a -n 1 ']' ++ break ++ cat /tmp/tmp.JIQPrPI47T ++ cat /tmp/tmp.DYP1ops7bU ++ rm /tmp/tmp.JIQPrPI47T /tmp/tmp.DYP1ops7bU ++ return 0 + [[ 1Gi == \2\G\i ]] + [[ 14 -ge 120 ]] + echo -n . .+ sleep 1 + retry=15 ++ kubectl_bin get persistentvolumeclaim/mongod-data-some-name-rs0-0 -o 'jsonpath={.status.capacity.storage}' +++ mktemp ++ local LAST_OUT=/tmp/tmp.UODRh7guAu +++ mktemp ++ local LAST_ERR=/tmp/tmp.DMdu6vKn3U ++ local exit_status=0 ++ local timeout=4 +++ seq 0 2 ++ for i in $(seq 0 2) ++ set +e ++ kubectl get persistentvolumeclaim/mongod-data-some-name-rs0-0 -o 'jsonpath={.status.capacity.storage}' ++ exit_status=0 ++ set -e ++ '[' 0 '!=' 0 -a -n 1 ']' ++ break ++ cat /tmp/tmp.UODRh7guAu ++ cat /tmp/tmp.DMdu6vKn3U ++ rm /tmp/tmp.UODRh7guAu /tmp/tmp.DMdu6vKn3U ++ return 0 + [[ 1Gi == \2\G\i ]] + [[ 15 -ge 120 ]] + echo -n . .+ sleep 1 + retry=16 ++ kubectl_bin get persistentvolumeclaim/mongod-data-some-name-rs0-0 -o 'jsonpath={.status.capacity.storage}' +++ mktemp ++ local LAST_OUT=/tmp/tmp.iG3BR4zf9V +++ mktemp ++ local LAST_ERR=/tmp/tmp.AmVJLrIaoA ++ local exit_status=0 ++ local timeout=4 +++ seq 0 2 ++ for i in $(seq 0 2) ++ set +e ++ kubectl get persistentvolumeclaim/mongod-data-some-name-rs0-0 -o 'jsonpath={.status.capacity.storage}' ++ exit_status=0 ++ set -e ++ '[' 0 '!=' 0 -a -n 1 ']' ++ break ++ cat /tmp/tmp.iG3BR4zf9V ++ cat /tmp/tmp.AmVJLrIaoA ++ rm /tmp/tmp.iG3BR4zf9V /tmp/tmp.AmVJLrIaoA ++ return 0 + [[ 1Gi == \2\G\i ]] + [[ 16 -ge 120 ]] + echo -n . .+ sleep 1 + retry=17 ++ kubectl_bin get persistentvolumeclaim/mongod-data-some-name-rs0-0 -o 'jsonpath={.status.capacity.storage}' +++ mktemp ++ local LAST_OUT=/tmp/tmp.H1lAfV3mEL +++ mktemp ++ local LAST_ERR=/tmp/tmp.CHPlAofrif ++ local exit_status=0 ++ local timeout=4 +++ seq 0 2 ++ for i in $(seq 0 2) ++ set +e ++ kubectl get persistentvolumeclaim/mongod-data-some-name-rs0-0 -o 'jsonpath={.status.capacity.storage}' ++ exit_status=0 ++ set -e ++ '[' 0 '!=' 0 -a -n 1 ']' ++ break ++ cat /tmp/tmp.H1lAfV3mEL ++ cat /tmp/tmp.CHPlAofrif ++ rm /tmp/tmp.H1lAfV3mEL /tmp/tmp.CHPlAofrif ++ return 0 + [[ 1Gi == \2\G\i ]] + [[ 17 -ge 120 ]] + echo -n . .+ sleep 1 + retry=18 ++ kubectl_bin get persistentvolumeclaim/mongod-data-some-name-rs0-0 -o 'jsonpath={.status.capacity.storage}' +++ mktemp ++ local LAST_OUT=/tmp/tmp.YFjJJSnFVP +++ mktemp ++ local LAST_ERR=/tmp/tmp.pc1EQEpUnK ++ local exit_status=0 ++ local timeout=4 +++ seq 0 2 ++ for i in $(seq 0 2) ++ set +e ++ kubectl get persistentvolumeclaim/mongod-data-some-name-rs0-0 -o 'jsonpath={.status.capacity.storage}' ++ exit_status=0 ++ set -e ++ '[' 0 '!=' 0 -a -n 1 ']' ++ break ++ cat /tmp/tmp.YFjJJSnFVP ++ cat /tmp/tmp.pc1EQEpUnK ++ rm /tmp/tmp.YFjJJSnFVP /tmp/tmp.pc1EQEpUnK ++ return 0 + [[ 1Gi == \2\G\i ]] + [[ 18 -ge 120 ]] + echo -n . .+ sleep 1 + retry=19 ++ kubectl_bin get persistentvolumeclaim/mongod-data-some-name-rs0-0 -o 'jsonpath={.status.capacity.storage}' +++ mktemp ++ local LAST_OUT=/tmp/tmp.cgYpWe6c4G +++ mktemp ++ local LAST_ERR=/tmp/tmp.aEaZA8HwaE ++ local exit_status=0 ++ local timeout=4 +++ seq 0 2 ++ for i in $(seq 0 2) ++ set +e ++ kubectl get persistentvolumeclaim/mongod-data-some-name-rs0-0 -o 'jsonpath={.status.capacity.storage}' ++ exit_status=0 ++ set -e ++ '[' 0 '!=' 0 -a -n 1 ']' ++ break ++ cat /tmp/tmp.cgYpWe6c4G ++ cat /tmp/tmp.aEaZA8HwaE ++ rm /tmp/tmp.cgYpWe6c4G /tmp/tmp.aEaZA8HwaE ++ return 0 + [[ 1Gi == \2\G\i ]] + [[ 19 -ge 120 ]] + echo -n . .+ sleep 1 + retry=20 ++ kubectl_bin get persistentvolumeclaim/mongod-data-some-name-rs0-0 -o 'jsonpath={.status.capacity.storage}' +++ mktemp ++ local LAST_OUT=/tmp/tmp.UYYOqzloPT +++ mktemp ++ local LAST_ERR=/tmp/tmp.xeJ9BYz9UN ++ local exit_status=0 ++ local timeout=4 +++ seq 0 2 ++ for i in $(seq 0 2) ++ set +e ++ kubectl get persistentvolumeclaim/mongod-data-some-name-rs0-0 -o 'jsonpath={.status.capacity.storage}' ++ exit_status=0 ++ set -e ++ '[' 0 '!=' 0 -a -n 1 ']' ++ break ++ cat /tmp/tmp.UYYOqzloPT ++ cat /tmp/tmp.xeJ9BYz9UN ++ rm /tmp/tmp.UYYOqzloPT /tmp/tmp.xeJ9BYz9UN ++ return 0 + [[ 1Gi == \2\G\i ]] + [[ 20 -ge 120 ]] + echo -n . .+ sleep 1 + retry=21 ++ kubectl_bin get persistentvolumeclaim/mongod-data-some-name-rs0-0 -o 'jsonpath={.status.capacity.storage}' +++ mktemp ++ local LAST_OUT=/tmp/tmp.gm6BOYYldm +++ mktemp ++ local LAST_ERR=/tmp/tmp.MjvkG54Kyy ++ local exit_status=0 ++ local timeout=4 +++ seq 0 2 ++ for i in $(seq 0 2) ++ set +e ++ kubectl get persistentvolumeclaim/mongod-data-some-name-rs0-0 -o 'jsonpath={.status.capacity.storage}' ++ exit_status=0 ++ set -e ++ '[' 0 '!=' 0 -a -n 1 ']' ++ break ++ cat /tmp/tmp.gm6BOYYldm ++ cat /tmp/tmp.MjvkG54Kyy ++ rm /tmp/tmp.gm6BOYYldm /tmp/tmp.MjvkG54Kyy ++ return 0 + [[ 1Gi == \2\G\i ]] + [[ 21 -ge 120 ]] + echo -n . .+ sleep 1 + retry=22 ++ kubectl_bin get persistentvolumeclaim/mongod-data-some-name-rs0-0 -o 'jsonpath={.status.capacity.storage}' +++ mktemp ++ local LAST_OUT=/tmp/tmp.94ghAlRcoQ +++ mktemp ++ local LAST_ERR=/tmp/tmp.iqMsen3jWc ++ local exit_status=0 ++ local timeout=4 +++ seq 0 2 ++ for i in $(seq 0 2) ++ set +e ++ kubectl get persistentvolumeclaim/mongod-data-some-name-rs0-0 -o 'jsonpath={.status.capacity.storage}' ++ exit_status=0 ++ set -e ++ '[' 0 '!=' 0 -a -n 1 ']' ++ break ++ cat /tmp/tmp.94ghAlRcoQ ++ cat /tmp/tmp.iqMsen3jWc ++ rm /tmp/tmp.94ghAlRcoQ /tmp/tmp.iqMsen3jWc ++ return 0 + [[ 1Gi == \2\G\i ]] + [[ 22 -ge 120 ]] + echo -n . .+ sleep 1 + retry=23 ++ kubectl_bin get persistentvolumeclaim/mongod-data-some-name-rs0-0 -o 'jsonpath={.status.capacity.storage}' +++ mktemp ++ local LAST_OUT=/tmp/tmp.69zU4dCCMT +++ mktemp ++ local LAST_ERR=/tmp/tmp.qbi939qaS5 ++ local exit_status=0 ++ local timeout=4 +++ seq 0 2 ++ for i in $(seq 0 2) ++ set +e ++ kubectl get persistentvolumeclaim/mongod-data-some-name-rs0-0 -o 'jsonpath={.status.capacity.storage}' ++ exit_status=0 ++ set -e ++ '[' 0 '!=' 0 -a -n 1 ']' ++ break ++ cat /tmp/tmp.69zU4dCCMT ++ cat /tmp/tmp.qbi939qaS5 ++ rm /tmp/tmp.69zU4dCCMT /tmp/tmp.qbi939qaS5 ++ return 0 + [[ 1Gi == \2\G\i ]] + [[ 23 -ge 120 ]] + echo -n . .+ sleep 1 + retry=24 ++ kubectl_bin get persistentvolumeclaim/mongod-data-some-name-rs0-0 -o 'jsonpath={.status.capacity.storage}' +++ mktemp ++ local LAST_OUT=/tmp/tmp.yckNd0kJ8c +++ mktemp ++ local LAST_ERR=/tmp/tmp.hWQ8DgGXkg ++ local exit_status=0 ++ local timeout=4 +++ seq 0 2 ++ for i in $(seq 0 2) ++ set +e ++ kubectl get persistentvolumeclaim/mongod-data-some-name-rs0-0 -o 'jsonpath={.status.capacity.storage}' ++ exit_status=0 ++ set -e ++ '[' 0 '!=' 0 -a -n 1 ']' ++ break ++ cat /tmp/tmp.yckNd0kJ8c ++ cat /tmp/tmp.hWQ8DgGXkg ++ rm /tmp/tmp.yckNd0kJ8c /tmp/tmp.hWQ8DgGXkg ++ return 0 + [[ 1Gi == \2\G\i ]] + [[ 24 -ge 120 ]] + echo -n . .+ sleep 1 + retry=25 ++ kubectl_bin get persistentvolumeclaim/mongod-data-some-name-rs0-0 -o 'jsonpath={.status.capacity.storage}' +++ mktemp ++ local LAST_OUT=/tmp/tmp.pwqgJBnxrp +++ mktemp ++ local LAST_ERR=/tmp/tmp.9Sh8vi3TI9 ++ local exit_status=0 ++ local timeout=4 +++ seq 0 2 ++ for i in $(seq 0 2) ++ set +e ++ kubectl get persistentvolumeclaim/mongod-data-some-name-rs0-0 -o 'jsonpath={.status.capacity.storage}' ++ exit_status=0 ++ set -e ++ '[' 0 '!=' 0 -a -n 1 ']' ++ break ++ cat /tmp/tmp.pwqgJBnxrp ++ cat /tmp/tmp.9Sh8vi3TI9 ++ rm /tmp/tmp.pwqgJBnxrp /tmp/tmp.9Sh8vi3TI9 ++ return 0 + [[ 1Gi == \2\G\i ]] + [[ 25 -ge 120 ]] + echo -n . .+ sleep 1 + retry=26 ++ kubectl_bin get persistentvolumeclaim/mongod-data-some-name-rs0-0 -o 'jsonpath={.status.capacity.storage}' +++ mktemp ++ local LAST_OUT=/tmp/tmp.x5CyjEAizb +++ mktemp ++ local LAST_ERR=/tmp/tmp.SNKdClws2s ++ local exit_status=0 ++ local timeout=4 +++ seq 0 2 ++ for i in $(seq 0 2) ++ set +e ++ kubectl get persistentvolumeclaim/mongod-data-some-name-rs0-0 -o 'jsonpath={.status.capacity.storage}' ++ exit_status=0 ++ set -e ++ '[' 0 '!=' 0 -a -n 1 ']' ++ break ++ cat /tmp/tmp.x5CyjEAizb ++ cat /tmp/tmp.SNKdClws2s ++ rm /tmp/tmp.x5CyjEAizb /tmp/tmp.SNKdClws2s ++ return 0 + [[ 1Gi == \2\G\i ]] + [[ 26 -ge 120 ]] + echo -n . .+ sleep 1 + retry=27 ++ kubectl_bin get persistentvolumeclaim/mongod-data-some-name-rs0-0 -o 'jsonpath={.status.capacity.storage}' +++ mktemp ++ local LAST_OUT=/tmp/tmp.fq7DRYVqvo +++ mktemp ++ local LAST_ERR=/tmp/tmp.rY0UT7ZA9h ++ local exit_status=0 ++ local timeout=4 +++ seq 0 2 ++ for i in $(seq 0 2) ++ set +e ++ kubectl get persistentvolumeclaim/mongod-data-some-name-rs0-0 -o 'jsonpath={.status.capacity.storage}' ++ exit_status=0 ++ set -e ++ '[' 0 '!=' 0 -a -n 1 ']' ++ break ++ cat /tmp/tmp.fq7DRYVqvo ++ cat /tmp/tmp.rY0UT7ZA9h ++ rm /tmp/tmp.fq7DRYVqvo /tmp/tmp.rY0UT7ZA9h ++ return 0 + [[ 1Gi == \2\G\i ]] + [[ 27 -ge 120 ]] + echo -n . .+ sleep 1 + retry=28 ++ kubectl_bin get persistentvolumeclaim/mongod-data-some-name-rs0-0 -o 'jsonpath={.status.capacity.storage}' +++ mktemp ++ local LAST_OUT=/tmp/tmp.4qY3tu3GrI +++ mktemp ++ local LAST_ERR=/tmp/tmp.CKqtQflerY ++ local exit_status=0 ++ local timeout=4 +++ seq 0 2 ++ for i in $(seq 0 2) ++ set +e ++ kubectl get persistentvolumeclaim/mongod-data-some-name-rs0-0 -o 'jsonpath={.status.capacity.storage}' ++ exit_status=0 ++ set -e ++ '[' 0 '!=' 0 -a -n 1 ']' ++ break ++ cat /tmp/tmp.4qY3tu3GrI ++ cat /tmp/tmp.CKqtQflerY ++ rm /tmp/tmp.4qY3tu3GrI /tmp/tmp.CKqtQflerY ++ return 0 + [[ 1Gi == \2\G\i ]] + [[ 28 -ge 120 ]] + echo -n . .+ sleep 1 + retry=29 ++ kubectl_bin get persistentvolumeclaim/mongod-data-some-name-rs0-0 -o 'jsonpath={.status.capacity.storage}' +++ mktemp ++ local LAST_OUT=/tmp/tmp.EXceZ7Dask +++ mktemp ++ local LAST_ERR=/tmp/tmp.WOy1TWMpKy ++ local exit_status=0 ++ local timeout=4 +++ seq 0 2 ++ for i in $(seq 0 2) ++ set +e ++ kubectl get persistentvolumeclaim/mongod-data-some-name-rs0-0 -o 'jsonpath={.status.capacity.storage}' ++ exit_status=0 ++ set -e ++ '[' 0 '!=' 0 -a -n 1 ']' ++ break ++ cat /tmp/tmp.EXceZ7Dask ++ cat /tmp/tmp.WOy1TWMpKy ++ rm /tmp/tmp.EXceZ7Dask /tmp/tmp.WOy1TWMpKy ++ return 0 + [[ 1Gi == \2\G\i ]] + [[ 29 -ge 120 ]] + echo -n . .+ sleep 1 + retry=30 ++ kubectl_bin get persistentvolumeclaim/mongod-data-some-name-rs0-0 -o 'jsonpath={.status.capacity.storage}' +++ mktemp ++ local LAST_OUT=/tmp/tmp.3SqM1kegw4 +++ mktemp ++ local LAST_ERR=/tmp/tmp.hBHu1FG3rd ++ local exit_status=0 ++ local timeout=4 +++ seq 0 2 ++ for i in $(seq 0 2) ++ set +e ++ kubectl get persistentvolumeclaim/mongod-data-some-name-rs0-0 -o 'jsonpath={.status.capacity.storage}' ++ exit_status=0 ++ set -e ++ '[' 0 '!=' 0 -a -n 1 ']' ++ break ++ cat /tmp/tmp.3SqM1kegw4 ++ cat /tmp/tmp.hBHu1FG3rd ++ rm /tmp/tmp.3SqM1kegw4 /tmp/tmp.hBHu1FG3rd ++ return 0 + [[ 1Gi == \2\G\i ]] + [[ 30 -ge 120 ]] + echo -n . .+ sleep 1 + retry=31 ++ kubectl_bin get persistentvolumeclaim/mongod-data-some-name-rs0-0 -o 'jsonpath={.status.capacity.storage}' +++ mktemp ++ local LAST_OUT=/tmp/tmp.wUSd2BwFE9 +++ mktemp ++ local LAST_ERR=/tmp/tmp.aoS79XxvYA ++ local exit_status=0 ++ local timeout=4 +++ seq 0 2 ++ for i in $(seq 0 2) ++ set +e ++ kubectl get persistentvolumeclaim/mongod-data-some-name-rs0-0 -o 'jsonpath={.status.capacity.storage}' ++ exit_status=0 ++ set -e ++ '[' 0 '!=' 0 -a -n 1 ']' ++ break ++ cat /tmp/tmp.wUSd2BwFE9 ++ cat /tmp/tmp.aoS79XxvYA ++ rm /tmp/tmp.wUSd2BwFE9 /tmp/tmp.aoS79XxvYA ++ return 0 + [[ 1Gi == \2\G\i ]] + [[ 31 -ge 120 ]] + echo -n . .+ sleep 1 + retry=32 ++ kubectl_bin get persistentvolumeclaim/mongod-data-some-name-rs0-0 -o 'jsonpath={.status.capacity.storage}' +++ mktemp ++ local LAST_OUT=/tmp/tmp.yE8FeeF79B +++ mktemp ++ local LAST_ERR=/tmp/tmp.TTKtA5wBcQ ++ local exit_status=0 ++ local timeout=4 +++ seq 0 2 ++ for i in $(seq 0 2) ++ set +e ++ kubectl get persistentvolumeclaim/mongod-data-some-name-rs0-0 -o 'jsonpath={.status.capacity.storage}' ++ exit_status=0 ++ set -e ++ '[' 0 '!=' 0 -a -n 1 ']' ++ break ++ cat /tmp/tmp.yE8FeeF79B ++ cat /tmp/tmp.TTKtA5wBcQ ++ rm /tmp/tmp.yE8FeeF79B /tmp/tmp.TTKtA5wBcQ ++ return 0 + [[ 1Gi == \2\G\i ]] + [[ 32 -ge 120 ]] + echo -n . .+ sleep 1 + retry=33 ++ kubectl_bin get persistentvolumeclaim/mongod-data-some-name-rs0-0 -o 'jsonpath={.status.capacity.storage}' +++ mktemp ++ local LAST_OUT=/tmp/tmp.8LiarUXX30 +++ mktemp ++ local LAST_ERR=/tmp/tmp.Lz9MZCfwwg ++ local exit_status=0 ++ local timeout=4 +++ seq 0 2 ++ for i in $(seq 0 2) ++ set +e ++ kubectl get persistentvolumeclaim/mongod-data-some-name-rs0-0 -o 'jsonpath={.status.capacity.storage}' ++ exit_status=0 ++ set -e ++ '[' 0 '!=' 0 -a -n 1 ']' ++ break ++ cat /tmp/tmp.8LiarUXX30 ++ cat /tmp/tmp.Lz9MZCfwwg ++ rm /tmp/tmp.8LiarUXX30 /tmp/tmp.Lz9MZCfwwg ++ return 0 + [[ 1Gi == \2\G\i ]] + [[ 33 -ge 120 ]] + echo -n . .+ sleep 1 + retry=34 ++ kubectl_bin get persistentvolumeclaim/mongod-data-some-name-rs0-0 -o 'jsonpath={.status.capacity.storage}' +++ mktemp ++ local LAST_OUT=/tmp/tmp.WWU1fhyjAn +++ mktemp ++ local LAST_ERR=/tmp/tmp.a5nvXBnFuX ++ local exit_status=0 ++ local timeout=4 +++ seq 0 2 ++ for i in $(seq 0 2) ++ set +e ++ kubectl get persistentvolumeclaim/mongod-data-some-name-rs0-0 -o 'jsonpath={.status.capacity.storage}' ++ exit_status=0 ++ set -e ++ '[' 0 '!=' 0 -a -n 1 ']' ++ break ++ cat /tmp/tmp.WWU1fhyjAn ++ cat /tmp/tmp.a5nvXBnFuX ++ rm /tmp/tmp.WWU1fhyjAn /tmp/tmp.a5nvXBnFuX ++ return 0 + [[ 1Gi == \2\G\i ]] + [[ 34 -ge 120 ]] + echo -n . .+ sleep 1 + retry=35 ++ kubectl_bin get persistentvolumeclaim/mongod-data-some-name-rs0-0 -o 'jsonpath={.status.capacity.storage}' +++ mktemp ++ local LAST_OUT=/tmp/tmp.K61M7nEl0P +++ mktemp ++ local LAST_ERR=/tmp/tmp.hqWK0sTNko ++ local exit_status=0 ++ local timeout=4 +++ seq 0 2 ++ for i in $(seq 0 2) ++ set +e ++ kubectl get persistentvolumeclaim/mongod-data-some-name-rs0-0 -o 'jsonpath={.status.capacity.storage}' ++ exit_status=0 ++ set -e ++ '[' 0 '!=' 0 -a -n 1 ']' ++ break ++ cat /tmp/tmp.K61M7nEl0P ++ cat /tmp/tmp.hqWK0sTNko ++ rm /tmp/tmp.K61M7nEl0P /tmp/tmp.hqWK0sTNko ++ return 0 + [[ 1Gi == \2\G\i ]] + [[ 35 -ge 120 ]] + echo -n . .+ sleep 1 + retry=36 ++ kubectl_bin get persistentvolumeclaim/mongod-data-some-name-rs0-0 -o 'jsonpath={.status.capacity.storage}' +++ mktemp ++ local LAST_OUT=/tmp/tmp.G5sJGTSV91 +++ mktemp ++ local LAST_ERR=/tmp/tmp.meNZ1tg4nj ++ local exit_status=0 ++ local timeout=4 +++ seq 0 2 ++ for i in $(seq 0 2) ++ set +e ++ kubectl get persistentvolumeclaim/mongod-data-some-name-rs0-0 -o 'jsonpath={.status.capacity.storage}' ++ exit_status=0 ++ set -e ++ '[' 0 '!=' 0 -a -n 1 ']' ++ break ++ cat /tmp/tmp.G5sJGTSV91 ++ cat /tmp/tmp.meNZ1tg4nj ++ rm /tmp/tmp.G5sJGTSV91 /tmp/tmp.meNZ1tg4nj ++ return 0 + [[ 1Gi == \2\G\i ]] + [[ 36 -ge 120 ]] + echo -n . .+ sleep 1 + retry=37 ++ kubectl_bin get persistentvolumeclaim/mongod-data-some-name-rs0-0 -o 'jsonpath={.status.capacity.storage}' +++ mktemp ++ local LAST_OUT=/tmp/tmp.dg8GD9hQ8a +++ mktemp ++ local LAST_ERR=/tmp/tmp.9MT6SRQjoP ++ local exit_status=0 ++ local timeout=4 +++ seq 0 2 ++ for i in $(seq 0 2) ++ set +e ++ kubectl get persistentvolumeclaim/mongod-data-some-name-rs0-0 -o 'jsonpath={.status.capacity.storage}' ++ exit_status=0 ++ set -e ++ '[' 0 '!=' 0 -a -n 1 ']' ++ break ++ cat /tmp/tmp.dg8GD9hQ8a ++ cat /tmp/tmp.9MT6SRQjoP ++ rm /tmp/tmp.dg8GD9hQ8a /tmp/tmp.9MT6SRQjoP ++ return 0 + [[ 1Gi == \2\G\i ]] + [[ 37 -ge 120 ]] + echo -n . .+ sleep 1 + retry=38 ++ kubectl_bin get persistentvolumeclaim/mongod-data-some-name-rs0-0 -o 'jsonpath={.status.capacity.storage}' +++ mktemp ++ local LAST_OUT=/tmp/tmp.gaaTMupPRQ +++ mktemp ++ local LAST_ERR=/tmp/tmp.XH7xw9aYzj ++ local exit_status=0 ++ local timeout=4 +++ seq 0 2 ++ for i in $(seq 0 2) ++ set +e ++ kubectl get persistentvolumeclaim/mongod-data-some-name-rs0-0 -o 'jsonpath={.status.capacity.storage}' ++ exit_status=0 ++ set -e ++ '[' 0 '!=' 0 -a -n 1 ']' ++ break ++ cat /tmp/tmp.gaaTMupPRQ ++ cat /tmp/tmp.XH7xw9aYzj ++ rm /tmp/tmp.gaaTMupPRQ /tmp/tmp.XH7xw9aYzj ++ return 0 + [[ 1Gi == \2\G\i ]] + [[ 38 -ge 120 ]] + echo -n . .+ sleep 1 + retry=39 ++ kubectl_bin get persistentvolumeclaim/mongod-data-some-name-rs0-0 -o 'jsonpath={.status.capacity.storage}' +++ mktemp ++ local LAST_OUT=/tmp/tmp.dNlceLk62w +++ mktemp ++ local LAST_ERR=/tmp/tmp.ByQPD4ynNN ++ local exit_status=0 ++ local timeout=4 +++ seq 0 2 ++ for i in $(seq 0 2) ++ set +e ++ kubectl get persistentvolumeclaim/mongod-data-some-name-rs0-0 -o 'jsonpath={.status.capacity.storage}' ++ exit_status=0 ++ set -e ++ '[' 0 '!=' 0 -a -n 1 ']' ++ break ++ cat /tmp/tmp.dNlceLk62w ++ cat /tmp/tmp.ByQPD4ynNN ++ rm /tmp/tmp.dNlceLk62w /tmp/tmp.ByQPD4ynNN ++ return 0 + [[ 1Gi == \2\G\i ]] + [[ 39 -ge 120 ]] + echo -n . .+ sleep 1 + retry=40 ++ kubectl_bin get persistentvolumeclaim/mongod-data-some-name-rs0-0 -o 'jsonpath={.status.capacity.storage}' +++ mktemp ++ local LAST_OUT=/tmp/tmp.Jue6XugvEV +++ mktemp ++ local LAST_ERR=/tmp/tmp.KlUuePbosG ++ local exit_status=0 ++ local timeout=4 +++ seq 0 2 ++ for i in $(seq 0 2) ++ set +e ++ kubectl get persistentvolumeclaim/mongod-data-some-name-rs0-0 -o 'jsonpath={.status.capacity.storage}' ++ exit_status=0 ++ set -e ++ '[' 0 '!=' 0 -a -n 1 ']' ++ break ++ cat /tmp/tmp.Jue6XugvEV ++ cat /tmp/tmp.KlUuePbosG ++ rm /tmp/tmp.Jue6XugvEV /tmp/tmp.KlUuePbosG ++ return 0 + [[ 1Gi == \2\G\i ]] + [[ 40 -ge 120 ]] + echo -n . .+ sleep 1 + retry=41 ++ kubectl_bin get persistentvolumeclaim/mongod-data-some-name-rs0-0 -o 'jsonpath={.status.capacity.storage}' +++ mktemp ++ local LAST_OUT=/tmp/tmp.499oWi9Pba +++ mktemp ++ local LAST_ERR=/tmp/tmp.leGM9U9qBt ++ local exit_status=0 ++ local timeout=4 +++ seq 0 2 ++ for i in $(seq 0 2) ++ set +e ++ kubectl get persistentvolumeclaim/mongod-data-some-name-rs0-0 -o 'jsonpath={.status.capacity.storage}' ++ exit_status=0 ++ set -e ++ '[' 0 '!=' 0 -a -n 1 ']' ++ break ++ cat /tmp/tmp.499oWi9Pba ++ cat /tmp/tmp.leGM9U9qBt ++ rm /tmp/tmp.499oWi9Pba /tmp/tmp.leGM9U9qBt ++ return 0 + [[ 1Gi == \2\G\i ]] + [[ 41 -ge 120 ]] + echo -n . .+ sleep 1 + retry=42 ++ kubectl_bin get persistentvolumeclaim/mongod-data-some-name-rs0-0 -o 'jsonpath={.status.capacity.storage}' +++ mktemp ++ local LAST_OUT=/tmp/tmp.e34NNMwgXK +++ mktemp ++ local LAST_ERR=/tmp/tmp.nVy6KEiHj0 ++ local exit_status=0 ++ local timeout=4 +++ seq 0 2 ++ for i in $(seq 0 2) ++ set +e ++ kubectl get persistentvolumeclaim/mongod-data-some-name-rs0-0 -o 'jsonpath={.status.capacity.storage}' ++ exit_status=0 ++ set -e ++ '[' 0 '!=' 0 -a -n 1 ']' ++ break ++ cat /tmp/tmp.e34NNMwgXK ++ cat /tmp/tmp.nVy6KEiHj0 ++ rm /tmp/tmp.e34NNMwgXK /tmp/tmp.nVy6KEiHj0 ++ return 0 + [[ 1Gi == \2\G\i ]] + [[ 42 -ge 120 ]] + echo -n . .+ sleep 1 + retry=43 ++ kubectl_bin get persistentvolumeclaim/mongod-data-some-name-rs0-0 -o 'jsonpath={.status.capacity.storage}' +++ mktemp ++ local LAST_OUT=/tmp/tmp.4i56aRxq0F +++ mktemp ++ local LAST_ERR=/tmp/tmp.UWVaJWA19f ++ local exit_status=0 ++ local timeout=4 +++ seq 0 2 ++ for i in $(seq 0 2) ++ set +e ++ kubectl get persistentvolumeclaim/mongod-data-some-name-rs0-0 -o 'jsonpath={.status.capacity.storage}' ++ exit_status=0 ++ set -e ++ '[' 0 '!=' 0 -a -n 1 ']' ++ break ++ cat /tmp/tmp.4i56aRxq0F ++ cat /tmp/tmp.UWVaJWA19f ++ rm /tmp/tmp.4i56aRxq0F /tmp/tmp.UWVaJWA19f ++ return 0 + [[ 1Gi == \2\G\i ]] + [[ 43 -ge 120 ]] + echo -n . .+ sleep 1 + retry=44 ++ kubectl_bin get persistentvolumeclaim/mongod-data-some-name-rs0-0 -o 'jsonpath={.status.capacity.storage}' +++ mktemp ++ local LAST_OUT=/tmp/tmp.m1o6bpHWXX +++ mktemp ++ local LAST_ERR=/tmp/tmp.O5rrbKzKt1 ++ local exit_status=0 ++ local timeout=4 +++ seq 0 2 ++ for i in $(seq 0 2) ++ set +e ++ kubectl get persistentvolumeclaim/mongod-data-some-name-rs0-0 -o 'jsonpath={.status.capacity.storage}' ++ exit_status=0 ++ set -e ++ '[' 0 '!=' 0 -a -n 1 ']' ++ break ++ cat /tmp/tmp.m1o6bpHWXX ++ cat /tmp/tmp.O5rrbKzKt1 ++ rm /tmp/tmp.m1o6bpHWXX /tmp/tmp.O5rrbKzKt1 ++ return 0 + [[ 1Gi == \2\G\i ]] + [[ 44 -ge 120 ]] + echo -n . .+ sleep 1 + retry=45 ++ kubectl_bin get persistentvolumeclaim/mongod-data-some-name-rs0-0 -o 'jsonpath={.status.capacity.storage}' +++ mktemp ++ local LAST_OUT=/tmp/tmp.bDbx7s3I4c +++ mktemp ++ local LAST_ERR=/tmp/tmp.57FHQi4rYj ++ local exit_status=0 ++ local timeout=4 +++ seq 0 2 ++ for i in $(seq 0 2) ++ set +e ++ kubectl get persistentvolumeclaim/mongod-data-some-name-rs0-0 -o 'jsonpath={.status.capacity.storage}' ++ exit_status=0 ++ set -e ++ '[' 0 '!=' 0 -a -n 1 ']' ++ break ++ cat /tmp/tmp.bDbx7s3I4c ++ cat /tmp/tmp.57FHQi4rYj ++ rm /tmp/tmp.bDbx7s3I4c /tmp/tmp.57FHQi4rYj ++ return 0 + [[ 1Gi == \2\G\i ]] + [[ 45 -ge 120 ]] + echo -n . .+ sleep 1 + retry=46 ++ kubectl_bin get persistentvolumeclaim/mongod-data-some-name-rs0-0 -o 'jsonpath={.status.capacity.storage}' +++ mktemp ++ local LAST_OUT=/tmp/tmp.gYZtPHNbl1 +++ mktemp ++ local LAST_ERR=/tmp/tmp.ZSQwiIsyTE ++ local exit_status=0 ++ local timeout=4 +++ seq 0 2 ++ for i in $(seq 0 2) ++ set +e ++ kubectl get persistentvolumeclaim/mongod-data-some-name-rs0-0 -o 'jsonpath={.status.capacity.storage}' ++ exit_status=0 ++ set -e ++ '[' 0 '!=' 0 -a -n 1 ']' ++ break ++ cat /tmp/tmp.gYZtPHNbl1 ++ cat /tmp/tmp.ZSQwiIsyTE ++ rm /tmp/tmp.gYZtPHNbl1 /tmp/tmp.ZSQwiIsyTE ++ return 0 + [[ 1Gi == \2\G\i ]] + [[ 46 -ge 120 ]] + echo -n . .+ sleep 1 + retry=47 ++ kubectl_bin get persistentvolumeclaim/mongod-data-some-name-rs0-0 -o 'jsonpath={.status.capacity.storage}' +++ mktemp ++ local LAST_OUT=/tmp/tmp.xCHYwErxFQ +++ mktemp ++ local LAST_ERR=/tmp/tmp.zQcwuYBCMQ ++ local exit_status=0 ++ local timeout=4 +++ seq 0 2 ++ for i in $(seq 0 2) ++ set +e ++ kubectl get persistentvolumeclaim/mongod-data-some-name-rs0-0 -o 'jsonpath={.status.capacity.storage}' ++ exit_status=0 ++ set -e ++ '[' 0 '!=' 0 -a -n 1 ']' ++ break ++ cat /tmp/tmp.xCHYwErxFQ ++ cat /tmp/tmp.zQcwuYBCMQ ++ rm /tmp/tmp.xCHYwErxFQ /tmp/tmp.zQcwuYBCMQ ++ return 0 + [[ 1Gi == \2\G\i ]] + [[ 47 -ge 120 ]] + echo -n . .+ sleep 1 + retry=48 ++ kubectl_bin get persistentvolumeclaim/mongod-data-some-name-rs0-0 -o 'jsonpath={.status.capacity.storage}' +++ mktemp ++ local LAST_OUT=/tmp/tmp.H4TmGeSb1b +++ mktemp ++ local LAST_ERR=/tmp/tmp.bd3hHuKUMC ++ local exit_status=0 ++ local timeout=4 +++ seq 0 2 ++ for i in $(seq 0 2) ++ set +e ++ kubectl get persistentvolumeclaim/mongod-data-some-name-rs0-0 -o 'jsonpath={.status.capacity.storage}' ++ exit_status=0 ++ set -e ++ '[' 0 '!=' 0 -a -n 1 ']' ++ break ++ cat /tmp/tmp.H4TmGeSb1b ++ cat /tmp/tmp.bd3hHuKUMC ++ rm /tmp/tmp.H4TmGeSb1b /tmp/tmp.bd3hHuKUMC ++ return 0 + [[ 1Gi == \2\G\i ]] + [[ 48 -ge 120 ]] + echo -n . .+ sleep 1 + retry=49 ++ kubectl_bin get persistentvolumeclaim/mongod-data-some-name-rs0-0 -o 'jsonpath={.status.capacity.storage}' +++ mktemp ++ local LAST_OUT=/tmp/tmp.LHuZZtXGpj +++ mktemp ++ local LAST_ERR=/tmp/tmp.tGZ4DKeqdy ++ local exit_status=0 ++ local timeout=4 +++ seq 0 2 ++ for i in $(seq 0 2) ++ set +e ++ kubectl get persistentvolumeclaim/mongod-data-some-name-rs0-0 -o 'jsonpath={.status.capacity.storage}' ++ exit_status=0 ++ set -e ++ '[' 0 '!=' 0 -a -n 1 ']' ++ break ++ cat /tmp/tmp.LHuZZtXGpj ++ cat /tmp/tmp.tGZ4DKeqdy ++ rm /tmp/tmp.LHuZZtXGpj /tmp/tmp.tGZ4DKeqdy ++ return 0 + [[ 1Gi == \2\G\i ]] + [[ 49 -ge 120 ]] + echo -n . .+ sleep 1 + retry=50 ++ kubectl_bin get persistentvolumeclaim/mongod-data-some-name-rs0-0 -o 'jsonpath={.status.capacity.storage}' +++ mktemp ++ local LAST_OUT=/tmp/tmp.A574MzMXpX +++ mktemp ++ local LAST_ERR=/tmp/tmp.z0YbnEcezJ ++ local exit_status=0 ++ local timeout=4 +++ seq 0 2 ++ for i in $(seq 0 2) ++ set +e ++ kubectl get persistentvolumeclaim/mongod-data-some-name-rs0-0 -o 'jsonpath={.status.capacity.storage}' ++ exit_status=0 ++ set -e ++ '[' 0 '!=' 0 -a -n 1 ']' ++ break ++ cat /tmp/tmp.A574MzMXpX ++ cat /tmp/tmp.z0YbnEcezJ ++ rm /tmp/tmp.A574MzMXpX /tmp/tmp.z0YbnEcezJ ++ return 0 + [[ 1Gi == \2\G\i ]] + [[ 50 -ge 120 ]] + echo -n . .+ sleep 1 + retry=51 ++ kubectl_bin get persistentvolumeclaim/mongod-data-some-name-rs0-0 -o 'jsonpath={.status.capacity.storage}' +++ mktemp ++ local LAST_OUT=/tmp/tmp.XLMBfv8FBQ +++ mktemp ++ local LAST_ERR=/tmp/tmp.5niubwbdyK ++ local exit_status=0 ++ local timeout=4 +++ seq 0 2 ++ for i in $(seq 0 2) ++ set +e ++ kubectl get persistentvolumeclaim/mongod-data-some-name-rs0-0 -o 'jsonpath={.status.capacity.storage}' ++ exit_status=0 ++ set -e ++ '[' 0 '!=' 0 -a -n 1 ']' ++ break ++ cat /tmp/tmp.XLMBfv8FBQ ++ cat /tmp/tmp.5niubwbdyK ++ rm /tmp/tmp.XLMBfv8FBQ /tmp/tmp.5niubwbdyK ++ return 0 + [[ 1Gi == \2\G\i ]] + [[ 51 -ge 120 ]] + echo -n . .+ sleep 1 + retry=52 ++ kubectl_bin get persistentvolumeclaim/mongod-data-some-name-rs0-0 -o 'jsonpath={.status.capacity.storage}' +++ mktemp ++ local LAST_OUT=/tmp/tmp.mcB96dkgIB +++ mktemp ++ local LAST_ERR=/tmp/tmp.ITejb7PO6o ++ local exit_status=0 ++ local timeout=4 +++ seq 0 2 ++ for i in $(seq 0 2) ++ set +e ++ kubectl get persistentvolumeclaim/mongod-data-some-name-rs0-0 -o 'jsonpath={.status.capacity.storage}' ++ exit_status=0 ++ set -e ++ '[' 0 '!=' 0 -a -n 1 ']' ++ break ++ cat /tmp/tmp.mcB96dkgIB ++ cat /tmp/tmp.ITejb7PO6o ++ rm /tmp/tmp.mcB96dkgIB /tmp/tmp.ITejb7PO6o ++ return 0 + [[ 1Gi == \2\G\i ]] + [[ 52 -ge 120 ]] + echo -n . .+ sleep 1 + retry=53 ++ kubectl_bin get persistentvolumeclaim/mongod-data-some-name-rs0-0 -o 'jsonpath={.status.capacity.storage}' +++ mktemp ++ local LAST_OUT=/tmp/tmp.MOKrWbevaZ +++ mktemp ++ local LAST_ERR=/tmp/tmp.jafuLC6fgm ++ local exit_status=0 ++ local timeout=4 +++ seq 0 2 ++ for i in $(seq 0 2) ++ set +e ++ kubectl get persistentvolumeclaim/mongod-data-some-name-rs0-0 -o 'jsonpath={.status.capacity.storage}' ++ exit_status=0 ++ set -e ++ '[' 0 '!=' 0 -a -n 1 ']' ++ break ++ cat /tmp/tmp.MOKrWbevaZ ++ cat /tmp/tmp.jafuLC6fgm ++ rm /tmp/tmp.MOKrWbevaZ /tmp/tmp.jafuLC6fgm ++ return 0 + [[ 1Gi == \2\G\i ]] + [[ 53 -ge 120 ]] + echo -n . .+ sleep 1 + retry=54 ++ kubectl_bin get persistentvolumeclaim/mongod-data-some-name-rs0-0 -o 'jsonpath={.status.capacity.storage}' +++ mktemp ++ local LAST_OUT=/tmp/tmp.HuOhcNS5zn +++ mktemp ++ local LAST_ERR=/tmp/tmp.obeU0tl94V ++ local exit_status=0 ++ local timeout=4 +++ seq 0 2 ++ for i in $(seq 0 2) ++ set +e ++ kubectl get persistentvolumeclaim/mongod-data-some-name-rs0-0 -o 'jsonpath={.status.capacity.storage}' ++ exit_status=0 ++ set -e ++ '[' 0 '!=' 0 -a -n 1 ']' ++ break ++ cat /tmp/tmp.HuOhcNS5zn ++ cat /tmp/tmp.obeU0tl94V ++ rm /tmp/tmp.HuOhcNS5zn /tmp/tmp.obeU0tl94V ++ return 0 + [[ 1Gi == \2\G\i ]] + [[ 54 -ge 120 ]] + echo -n . .+ sleep 1 + retry=55 ++ kubectl_bin get persistentvolumeclaim/mongod-data-some-name-rs0-0 -o 'jsonpath={.status.capacity.storage}' +++ mktemp ++ local LAST_OUT=/tmp/tmp.mNGHpf78Dm +++ mktemp ++ local LAST_ERR=/tmp/tmp.lodOdWpynz ++ local exit_status=0 ++ local timeout=4 +++ seq 0 2 ++ for i in $(seq 0 2) ++ set +e ++ kubectl get persistentvolumeclaim/mongod-data-some-name-rs0-0 -o 'jsonpath={.status.capacity.storage}' ++ exit_status=0 ++ set -e ++ '[' 0 '!=' 0 -a -n 1 ']' ++ break ++ cat /tmp/tmp.mNGHpf78Dm ++ cat /tmp/tmp.lodOdWpynz ++ rm /tmp/tmp.mNGHpf78Dm /tmp/tmp.lodOdWpynz ++ return 0 + [[ 1Gi == \2\G\i ]] + [[ 55 -ge 120 ]] + echo -n . .+ sleep 1 + retry=56 ++ kubectl_bin get persistentvolumeclaim/mongod-data-some-name-rs0-0 -o 'jsonpath={.status.capacity.storage}' +++ mktemp ++ local LAST_OUT=/tmp/tmp.y8imRZfIDb +++ mktemp ++ local LAST_ERR=/tmp/tmp.7XmWvxc33h ++ local exit_status=0 ++ local timeout=4 +++ seq 0 2 ++ for i in $(seq 0 2) ++ set +e ++ kubectl get persistentvolumeclaim/mongod-data-some-name-rs0-0 -o 'jsonpath={.status.capacity.storage}' ++ exit_status=0 ++ set -e ++ '[' 0 '!=' 0 -a -n 1 ']' ++ break ++ cat /tmp/tmp.y8imRZfIDb ++ cat /tmp/tmp.7XmWvxc33h ++ rm /tmp/tmp.y8imRZfIDb /tmp/tmp.7XmWvxc33h ++ return 0 + [[ 1Gi == \2\G\i ]] + [[ 56 -ge 120 ]] + echo -n . .+ sleep 1 + retry=57 ++ kubectl_bin get persistentvolumeclaim/mongod-data-some-name-rs0-0 -o 'jsonpath={.status.capacity.storage}' +++ mktemp ++ local LAST_OUT=/tmp/tmp.ac0kdvk3lP +++ mktemp ++ local LAST_ERR=/tmp/tmp.kKSQWjywB7 ++ local exit_status=0 ++ local timeout=4 +++ seq 0 2 ++ for i in $(seq 0 2) ++ set +e ++ kubectl get persistentvolumeclaim/mongod-data-some-name-rs0-0 -o 'jsonpath={.status.capacity.storage}' ++ exit_status=0 ++ set -e ++ '[' 0 '!=' 0 -a -n 1 ']' ++ break ++ cat /tmp/tmp.ac0kdvk3lP ++ cat /tmp/tmp.kKSQWjywB7 ++ rm /tmp/tmp.ac0kdvk3lP /tmp/tmp.kKSQWjywB7 ++ return 0 + [[ 1Gi == \2\G\i ]] + [[ 57 -ge 120 ]] + echo -n . .+ sleep 1 + retry=58 ++ kubectl_bin get persistentvolumeclaim/mongod-data-some-name-rs0-0 -o 'jsonpath={.status.capacity.storage}' +++ mktemp ++ local LAST_OUT=/tmp/tmp.WJ07BrtozW +++ mktemp ++ local LAST_ERR=/tmp/tmp.eVKSV8ZzkK ++ local exit_status=0 ++ local timeout=4 +++ seq 0 2 ++ for i in $(seq 0 2) ++ set +e ++ kubectl get persistentvolumeclaim/mongod-data-some-name-rs0-0 -o 'jsonpath={.status.capacity.storage}' ++ exit_status=0 ++ set -e ++ '[' 0 '!=' 0 -a -n 1 ']' ++ break ++ cat /tmp/tmp.WJ07BrtozW ++ cat /tmp/tmp.eVKSV8ZzkK ++ rm /tmp/tmp.WJ07BrtozW /tmp/tmp.eVKSV8ZzkK ++ return 0 + [[ 1Gi == \2\G\i ]] + [[ 58 -ge 120 ]] + echo -n . .+ sleep 1 + retry=59 ++ kubectl_bin get persistentvolumeclaim/mongod-data-some-name-rs0-0 -o 'jsonpath={.status.capacity.storage}' +++ mktemp ++ local LAST_OUT=/tmp/tmp.1qniXASzdX +++ mktemp ++ local LAST_ERR=/tmp/tmp.kGoESYbZeJ ++ local exit_status=0 ++ local timeout=4 +++ seq 0 2 ++ for i in $(seq 0 2) ++ set +e ++ kubectl get persistentvolumeclaim/mongod-data-some-name-rs0-0 -o 'jsonpath={.status.capacity.storage}' ++ exit_status=0 ++ set -e ++ '[' 0 '!=' 0 -a -n 1 ']' ++ break ++ cat /tmp/tmp.1qniXASzdX ++ cat /tmp/tmp.kGoESYbZeJ ++ rm /tmp/tmp.1qniXASzdX /tmp/tmp.kGoESYbZeJ ++ return 0 + [[ 1Gi == \2\G\i ]] + [[ 59 -ge 120 ]] + echo -n . .+ sleep 1 + retry=60 ++ kubectl_bin get persistentvolumeclaim/mongod-data-some-name-rs0-0 -o 'jsonpath={.status.capacity.storage}' +++ mktemp ++ local LAST_OUT=/tmp/tmp.76We3X9IE6 +++ mktemp ++ local LAST_ERR=/tmp/tmp.OhefipJDn5 ++ local exit_status=0 ++ local timeout=4 +++ seq 0 2 ++ for i in $(seq 0 2) ++ set +e ++ kubectl get persistentvolumeclaim/mongod-data-some-name-rs0-0 -o 'jsonpath={.status.capacity.storage}' ++ exit_status=0 ++ set -e ++ '[' 0 '!=' 0 -a -n 1 ']' ++ break ++ cat /tmp/tmp.76We3X9IE6 ++ cat /tmp/tmp.OhefipJDn5 ++ rm /tmp/tmp.76We3X9IE6 /tmp/tmp.OhefipJDn5 ++ return 0 + [[ 1Gi == \2\G\i ]] + [[ 60 -ge 120 ]] + echo -n . .+ sleep 1 + retry=61 ++ kubectl_bin get persistentvolumeclaim/mongod-data-some-name-rs0-0 -o 'jsonpath={.status.capacity.storage}' +++ mktemp ++ local LAST_OUT=/tmp/tmp.aBwHDQgRyU +++ mktemp ++ local LAST_ERR=/tmp/tmp.ypGrFNgzEV ++ local exit_status=0 ++ local timeout=4 +++ seq 0 2 ++ for i in $(seq 0 2) ++ set +e ++ kubectl get persistentvolumeclaim/mongod-data-some-name-rs0-0 -o 'jsonpath={.status.capacity.storage}' ++ exit_status=0 ++ set -e ++ '[' 0 '!=' 0 -a -n 1 ']' ++ break ++ cat /tmp/tmp.aBwHDQgRyU ++ cat /tmp/tmp.ypGrFNgzEV ++ rm /tmp/tmp.aBwHDQgRyU /tmp/tmp.ypGrFNgzEV ++ return 0 + [[ 1Gi == \2\G\i ]] + [[ 61 -ge 120 ]] + echo -n . .+ sleep 1 + retry=62 ++ kubectl_bin get persistentvolumeclaim/mongod-data-some-name-rs0-0 -o 'jsonpath={.status.capacity.storage}' +++ mktemp ++ local LAST_OUT=/tmp/tmp.Pj1EMHkLtP +++ mktemp ++ local LAST_ERR=/tmp/tmp.d3AUQo1X7E ++ local exit_status=0 ++ local timeout=4 +++ seq 0 2 ++ for i in $(seq 0 2) ++ set +e ++ kubectl get persistentvolumeclaim/mongod-data-some-name-rs0-0 -o 'jsonpath={.status.capacity.storage}' ++ exit_status=0 ++ set -e ++ '[' 0 '!=' 0 -a -n 1 ']' ++ break ++ cat /tmp/tmp.Pj1EMHkLtP ++ cat /tmp/tmp.d3AUQo1X7E ++ rm /tmp/tmp.Pj1EMHkLtP /tmp/tmp.d3AUQo1X7E ++ return 0 + [[ 1Gi == \2\G\i ]] + [[ 62 -ge 120 ]] + echo -n . .+ sleep 1 + retry=63 ++ kubectl_bin get persistentvolumeclaim/mongod-data-some-name-rs0-0 -o 'jsonpath={.status.capacity.storage}' +++ mktemp ++ local LAST_OUT=/tmp/tmp.z13ETO1SgG +++ mktemp ++ local LAST_ERR=/tmp/tmp.Y74sM5IgTn ++ local exit_status=0 ++ local timeout=4 +++ seq 0 2 ++ for i in $(seq 0 2) ++ set +e ++ kubectl get persistentvolumeclaim/mongod-data-some-name-rs0-0 -o 'jsonpath={.status.capacity.storage}' ++ exit_status=0 ++ set -e ++ '[' 0 '!=' 0 -a -n 1 ']' ++ break ++ cat /tmp/tmp.z13ETO1SgG ++ cat /tmp/tmp.Y74sM5IgTn ++ rm /tmp/tmp.z13ETO1SgG /tmp/tmp.Y74sM5IgTn ++ return 0 + [[ 1Gi == \2\G\i ]] + [[ 63 -ge 120 ]] + echo -n . .+ sleep 1 + retry=64 ++ kubectl_bin get persistentvolumeclaim/mongod-data-some-name-rs0-0 -o 'jsonpath={.status.capacity.storage}' +++ mktemp ++ local LAST_OUT=/tmp/tmp.FntZQ95Bi4 +++ mktemp ++ local LAST_ERR=/tmp/tmp.2H51IRPGJs ++ local exit_status=0 ++ local timeout=4 +++ seq 0 2 ++ for i in $(seq 0 2) ++ set +e ++ kubectl get persistentvolumeclaim/mongod-data-some-name-rs0-0 -o 'jsonpath={.status.capacity.storage}' ++ exit_status=0 ++ set -e ++ '[' 0 '!=' 0 -a -n 1 ']' ++ break ++ cat /tmp/tmp.FntZQ95Bi4 ++ cat /tmp/tmp.2H51IRPGJs ++ rm /tmp/tmp.FntZQ95Bi4 /tmp/tmp.2H51IRPGJs ++ return 0 + [[ 1Gi == \2\G\i ]] + [[ 64 -ge 120 ]] + echo -n . .+ sleep 1 + retry=65 ++ kubectl_bin get persistentvolumeclaim/mongod-data-some-name-rs0-0 -o 'jsonpath={.status.capacity.storage}' +++ mktemp ++ local LAST_OUT=/tmp/tmp.s6Mon5Vb1T +++ mktemp ++ local LAST_ERR=/tmp/tmp.X1K9KmL2ME ++ local exit_status=0 ++ local timeout=4 +++ seq 0 2 ++ for i in $(seq 0 2) ++ set +e ++ kubectl get persistentvolumeclaim/mongod-data-some-name-rs0-0 -o 'jsonpath={.status.capacity.storage}' ++ exit_status=0 ++ set -e ++ '[' 0 '!=' 0 -a -n 1 ']' ++ break ++ cat /tmp/tmp.s6Mon5Vb1T ++ cat /tmp/tmp.X1K9KmL2ME ++ rm /tmp/tmp.s6Mon5Vb1T /tmp/tmp.X1K9KmL2ME ++ return 0 + [[ 1Gi == \2\G\i ]] + [[ 65 -ge 120 ]] + echo -n . .+ sleep 1 + retry=66 ++ kubectl_bin get persistentvolumeclaim/mongod-data-some-name-rs0-0 -o 'jsonpath={.status.capacity.storage}' +++ mktemp ++ local LAST_OUT=/tmp/tmp.ocwbFrOfcG +++ mktemp ++ local LAST_ERR=/tmp/tmp.3IL3LkKYyo ++ local exit_status=0 ++ local timeout=4 +++ seq 0 2 ++ for i in $(seq 0 2) ++ set +e ++ kubectl get persistentvolumeclaim/mongod-data-some-name-rs0-0 -o 'jsonpath={.status.capacity.storage}' ++ exit_status=0 ++ set -e ++ '[' 0 '!=' 0 -a -n 1 ']' ++ break ++ cat /tmp/tmp.ocwbFrOfcG ++ cat /tmp/tmp.3IL3LkKYyo ++ rm /tmp/tmp.ocwbFrOfcG /tmp/tmp.3IL3LkKYyo ++ return 0 + [[ 1Gi == \2\G\i ]] + [[ 66 -ge 120 ]] + echo -n . .+ sleep 1 + retry=67 ++ kubectl_bin get persistentvolumeclaim/mongod-data-some-name-rs0-0 -o 'jsonpath={.status.capacity.storage}' +++ mktemp ++ local LAST_OUT=/tmp/tmp.5UmbQZQ827 +++ mktemp ++ local LAST_ERR=/tmp/tmp.kB0p1fbySb ++ local exit_status=0 ++ local timeout=4 +++ seq 0 2 ++ for i in $(seq 0 2) ++ set +e ++ kubectl get persistentvolumeclaim/mongod-data-some-name-rs0-0 -o 'jsonpath={.status.capacity.storage}' ++ exit_status=0 ++ set -e ++ '[' 0 '!=' 0 -a -n 1 ']' ++ break ++ cat /tmp/tmp.5UmbQZQ827 ++ cat /tmp/tmp.kB0p1fbySb ++ rm /tmp/tmp.5UmbQZQ827 /tmp/tmp.kB0p1fbySb ++ return 0 + [[ 1Gi == \2\G\i ]] + [[ 67 -ge 120 ]] + echo -n . .+ sleep 1 + retry=68 ++ kubectl_bin get persistentvolumeclaim/mongod-data-some-name-rs0-0 -o 'jsonpath={.status.capacity.storage}' +++ mktemp ++ local LAST_OUT=/tmp/tmp.kuTOdILi7F +++ mktemp ++ local LAST_ERR=/tmp/tmp.fALka2fK9M ++ local exit_status=0 ++ local timeout=4 +++ seq 0 2 ++ for i in $(seq 0 2) ++ set +e ++ kubectl get persistentvolumeclaim/mongod-data-some-name-rs0-0 -o 'jsonpath={.status.capacity.storage}' ++ exit_status=0 ++ set -e ++ '[' 0 '!=' 0 -a -n 1 ']' ++ break ++ cat /tmp/tmp.kuTOdILi7F ++ cat /tmp/tmp.fALka2fK9M ++ rm /tmp/tmp.kuTOdILi7F /tmp/tmp.fALka2fK9M ++ return 0 + [[ 1Gi == \2\G\i ]] + [[ 68 -ge 120 ]] + echo -n . .+ sleep 1 + retry=69 ++ kubectl_bin get persistentvolumeclaim/mongod-data-some-name-rs0-0 -o 'jsonpath={.status.capacity.storage}' +++ mktemp ++ local LAST_OUT=/tmp/tmp.EGvHotbBBj +++ mktemp ++ local LAST_ERR=/tmp/tmp.uOUZGlazEV ++ local exit_status=0 ++ local timeout=4 +++ seq 0 2 ++ for i in $(seq 0 2) ++ set +e ++ kubectl get persistentvolumeclaim/mongod-data-some-name-rs0-0 -o 'jsonpath={.status.capacity.storage}' ++ exit_status=0 ++ set -e ++ '[' 0 '!=' 0 -a -n 1 ']' ++ break ++ cat /tmp/tmp.EGvHotbBBj ++ cat /tmp/tmp.uOUZGlazEV ++ rm /tmp/tmp.EGvHotbBBj /tmp/tmp.uOUZGlazEV ++ return 0 + [[ 1Gi == \2\G\i ]] + [[ 69 -ge 120 ]] + echo -n . .+ sleep 1 + retry=70 ++ kubectl_bin get persistentvolumeclaim/mongod-data-some-name-rs0-0 -o 'jsonpath={.status.capacity.storage}' +++ mktemp ++ local LAST_OUT=/tmp/tmp.hHC7VJLb5L +++ mktemp ++ local LAST_ERR=/tmp/tmp.cobQiOgjEi ++ local exit_status=0 ++ local timeout=4 +++ seq 0 2 ++ for i in $(seq 0 2) ++ set +e ++ kubectl get persistentvolumeclaim/mongod-data-some-name-rs0-0 -o 'jsonpath={.status.capacity.storage}' ++ exit_status=0 ++ set -e ++ '[' 0 '!=' 0 -a -n 1 ']' ++ break ++ cat /tmp/tmp.hHC7VJLb5L ++ cat /tmp/tmp.cobQiOgjEi ++ rm /tmp/tmp.hHC7VJLb5L /tmp/tmp.cobQiOgjEi ++ return 0 + [[ 1Gi == \2\G\i ]] + [[ 70 -ge 120 ]] + echo -n . .+ sleep 1 + retry=71 ++ kubectl_bin get persistentvolumeclaim/mongod-data-some-name-rs0-0 -o 'jsonpath={.status.capacity.storage}' +++ mktemp ++ local LAST_OUT=/tmp/tmp.QLmLSZrayv +++ mktemp ++ local LAST_ERR=/tmp/tmp.wi1dQECiPE ++ local exit_status=0 ++ local timeout=4 +++ seq 0 2 ++ for i in $(seq 0 2) ++ set +e ++ kubectl get persistentvolumeclaim/mongod-data-some-name-rs0-0 -o 'jsonpath={.status.capacity.storage}' ++ exit_status=0 ++ set -e ++ '[' 0 '!=' 0 -a -n 1 ']' ++ break ++ cat /tmp/tmp.QLmLSZrayv ++ cat /tmp/tmp.wi1dQECiPE ++ rm /tmp/tmp.QLmLSZrayv /tmp/tmp.wi1dQECiPE ++ return 0 + [[ 1Gi == \2\G\i ]] + [[ 71 -ge 120 ]] + echo -n . .+ sleep 1 + retry=72 ++ kubectl_bin get persistentvolumeclaim/mongod-data-some-name-rs0-0 -o 'jsonpath={.status.capacity.storage}' +++ mktemp ++ local LAST_OUT=/tmp/tmp.cTfIk6Bbm2 +++ mktemp ++ local LAST_ERR=/tmp/tmp.kcipEJ7wWK ++ local exit_status=0 ++ local timeout=4 +++ seq 0 2 ++ for i in $(seq 0 2) ++ set +e ++ kubectl get persistentvolumeclaim/mongod-data-some-name-rs0-0 -o 'jsonpath={.status.capacity.storage}' ++ exit_status=0 ++ set -e ++ '[' 0 '!=' 0 -a -n 1 ']' ++ break ++ cat /tmp/tmp.cTfIk6Bbm2 ++ cat /tmp/tmp.kcipEJ7wWK ++ rm /tmp/tmp.cTfIk6Bbm2 /tmp/tmp.kcipEJ7wWK ++ return 0 + [[ 1Gi == \2\G\i ]] + [[ 72 -ge 120 ]] + echo -n . .+ sleep 1 + retry=73 ++ kubectl_bin get persistentvolumeclaim/mongod-data-some-name-rs0-0 -o 'jsonpath={.status.capacity.storage}' +++ mktemp ++ local LAST_OUT=/tmp/tmp.XL54NoMtMx +++ mktemp ++ local LAST_ERR=/tmp/tmp.PCGn5I4qq6 ++ local exit_status=0 ++ local timeout=4 +++ seq 0 2 ++ for i in $(seq 0 2) ++ set +e ++ kubectl get persistentvolumeclaim/mongod-data-some-name-rs0-0 -o 'jsonpath={.status.capacity.storage}' ++ exit_status=0 ++ set -e ++ '[' 0 '!=' 0 -a -n 1 ']' ++ break ++ cat /tmp/tmp.XL54NoMtMx ++ cat /tmp/tmp.PCGn5I4qq6 ++ rm /tmp/tmp.XL54NoMtMx /tmp/tmp.PCGn5I4qq6 ++ return 0 + [[ 1Gi == \2\G\i ]] + [[ 73 -ge 120 ]] + echo -n . .+ sleep 1 + retry=74 ++ kubectl_bin get persistentvolumeclaim/mongod-data-some-name-rs0-0 -o 'jsonpath={.status.capacity.storage}' +++ mktemp ++ local LAST_OUT=/tmp/tmp.eFzeG95x8B +++ mktemp ++ local LAST_ERR=/tmp/tmp.eOKgIkGkpt ++ local exit_status=0 ++ local timeout=4 +++ seq 0 2 ++ for i in $(seq 0 2) ++ set +e ++ kubectl get persistentvolumeclaim/mongod-data-some-name-rs0-0 -o 'jsonpath={.status.capacity.storage}' ++ exit_status=0 ++ set -e ++ '[' 0 '!=' 0 -a -n 1 ']' ++ break ++ cat /tmp/tmp.eFzeG95x8B ++ cat /tmp/tmp.eOKgIkGkpt ++ rm /tmp/tmp.eFzeG95x8B /tmp/tmp.eOKgIkGkpt ++ return 0 + [[ 1Gi == \2\G\i ]] + [[ 74 -ge 120 ]] + echo -n . .+ sleep 1 + retry=75 ++ kubectl_bin get persistentvolumeclaim/mongod-data-some-name-rs0-0 -o 'jsonpath={.status.capacity.storage}' +++ mktemp ++ local LAST_OUT=/tmp/tmp.NN3aBBFZuY +++ mktemp ++ local LAST_ERR=/tmp/tmp.R3kdJoLI2U ++ local exit_status=0 ++ local timeout=4 +++ seq 0 2 ++ for i in $(seq 0 2) ++ set +e ++ kubectl get persistentvolumeclaim/mongod-data-some-name-rs0-0 -o 'jsonpath={.status.capacity.storage}' ++ exit_status=0 ++ set -e ++ '[' 0 '!=' 0 -a -n 1 ']' ++ break ++ cat /tmp/tmp.NN3aBBFZuY ++ cat /tmp/tmp.R3kdJoLI2U ++ rm /tmp/tmp.NN3aBBFZuY /tmp/tmp.R3kdJoLI2U ++ return 0 + [[ 1Gi == \2\G\i ]] + [[ 75 -ge 120 ]] + echo -n . .+ sleep 1 + retry=76 ++ kubectl_bin get persistentvolumeclaim/mongod-data-some-name-rs0-0 -o 'jsonpath={.status.capacity.storage}' +++ mktemp ++ local LAST_OUT=/tmp/tmp.vAuSWYrZZi +++ mktemp ++ local LAST_ERR=/tmp/tmp.ZZzPqRzOIR ++ local exit_status=0 ++ local timeout=4 +++ seq 0 2 ++ for i in $(seq 0 2) ++ set +e ++ kubectl get persistentvolumeclaim/mongod-data-some-name-rs0-0 -o 'jsonpath={.status.capacity.storage}' ++ exit_status=0 ++ set -e ++ '[' 0 '!=' 0 -a -n 1 ']' ++ break ++ cat /tmp/tmp.vAuSWYrZZi ++ cat /tmp/tmp.ZZzPqRzOIR ++ rm /tmp/tmp.vAuSWYrZZi /tmp/tmp.ZZzPqRzOIR ++ return 0 + [[ 1Gi == \2\G\i ]] + [[ 76 -ge 120 ]] + echo -n . .+ sleep 1 + retry=77 ++ kubectl_bin get persistentvolumeclaim/mongod-data-some-name-rs0-0 -o 'jsonpath={.status.capacity.storage}' +++ mktemp ++ local LAST_OUT=/tmp/tmp.eT7F31qt1v +++ mktemp ++ local LAST_ERR=/tmp/tmp.uPQ1obEG35 ++ local exit_status=0 ++ local timeout=4 +++ seq 0 2 ++ for i in $(seq 0 2) ++ set +e ++ kubectl get persistentvolumeclaim/mongod-data-some-name-rs0-0 -o 'jsonpath={.status.capacity.storage}' ++ exit_status=0 ++ set -e ++ '[' 0 '!=' 0 -a -n 1 ']' ++ break ++ cat /tmp/tmp.eT7F31qt1v ++ cat /tmp/tmp.uPQ1obEG35 ++ rm /tmp/tmp.eT7F31qt1v /tmp/tmp.uPQ1obEG35 ++ return 0 + [[ 1Gi == \2\G\i ]] + [[ 77 -ge 120 ]] + echo -n . .+ sleep 1 + retry=78 ++ kubectl_bin get persistentvolumeclaim/mongod-data-some-name-rs0-0 -o 'jsonpath={.status.capacity.storage}' +++ mktemp ++ local LAST_OUT=/tmp/tmp.bk4aWwSYPz +++ mktemp ++ local LAST_ERR=/tmp/tmp.2cJCN3P8fq ++ local exit_status=0 ++ local timeout=4 +++ seq 0 2 ++ for i in $(seq 0 2) ++ set +e ++ kubectl get persistentvolumeclaim/mongod-data-some-name-rs0-0 -o 'jsonpath={.status.capacity.storage}' ++ exit_status=0 ++ set -e ++ '[' 0 '!=' 0 -a -n 1 ']' ++ break ++ cat /tmp/tmp.bk4aWwSYPz ++ cat /tmp/tmp.2cJCN3P8fq ++ rm /tmp/tmp.bk4aWwSYPz /tmp/tmp.2cJCN3P8fq ++ return 0 + [[ 1Gi == \2\G\i ]] + [[ 78 -ge 120 ]] + echo -n . .+ sleep 1 + retry=79 ++ kubectl_bin get persistentvolumeclaim/mongod-data-some-name-rs0-0 -o 'jsonpath={.status.capacity.storage}' +++ mktemp ++ local LAST_OUT=/tmp/tmp.CLNMIJs39X +++ mktemp ++ local LAST_ERR=/tmp/tmp.PMzUksQrW5 ++ local exit_status=0 ++ local timeout=4 +++ seq 0 2 ++ for i in $(seq 0 2) ++ set +e ++ kubectl get persistentvolumeclaim/mongod-data-some-name-rs0-0 -o 'jsonpath={.status.capacity.storage}' ++ exit_status=0 ++ set -e ++ '[' 0 '!=' 0 -a -n 1 ']' ++ break ++ cat /tmp/tmp.CLNMIJs39X ++ cat /tmp/tmp.PMzUksQrW5 ++ rm /tmp/tmp.CLNMIJs39X /tmp/tmp.PMzUksQrW5 ++ return 0 + [[ 1Gi == \2\G\i ]] + [[ 79 -ge 120 ]] + echo -n . .+ sleep 1 + retry=80 ++ kubectl_bin get persistentvolumeclaim/mongod-data-some-name-rs0-0 -o 'jsonpath={.status.capacity.storage}' +++ mktemp ++ local LAST_OUT=/tmp/tmp.iBxPKCtIA5 +++ mktemp ++ local LAST_ERR=/tmp/tmp.t2FCrkI6BX ++ local exit_status=0 ++ local timeout=4 +++ seq 0 2 ++ for i in $(seq 0 2) ++ set +e ++ kubectl get persistentvolumeclaim/mongod-data-some-name-rs0-0 -o 'jsonpath={.status.capacity.storage}' ++ exit_status=0 ++ set -e ++ '[' 0 '!=' 0 -a -n 1 ']' ++ break ++ cat /tmp/tmp.iBxPKCtIA5 ++ cat /tmp/tmp.t2FCrkI6BX ++ rm /tmp/tmp.iBxPKCtIA5 /tmp/tmp.t2FCrkI6BX ++ return 0 + [[ 1Gi == \2\G\i ]] + [[ 80 -ge 120 ]] + echo -n . .+ sleep 1 + retry=81 ++ kubectl_bin get persistentvolumeclaim/mongod-data-some-name-rs0-0 -o 'jsonpath={.status.capacity.storage}' +++ mktemp ++ local LAST_OUT=/tmp/tmp.qcqN96RyVR +++ mktemp ++ local LAST_ERR=/tmp/tmp.QDqJp5Sxqi ++ local exit_status=0 ++ local timeout=4 +++ seq 0 2 ++ for i in $(seq 0 2) ++ set +e ++ kubectl get persistentvolumeclaim/mongod-data-some-name-rs0-0 -o 'jsonpath={.status.capacity.storage}' ++ exit_status=0 ++ set -e ++ '[' 0 '!=' 0 -a -n 1 ']' ++ break ++ cat /tmp/tmp.qcqN96RyVR ++ cat /tmp/tmp.QDqJp5Sxqi ++ rm /tmp/tmp.qcqN96RyVR /tmp/tmp.QDqJp5Sxqi ++ return 0 + [[ 1Gi == \2\G\i ]] + [[ 81 -ge 120 ]] + echo -n . .+ sleep 1 + retry=82 ++ kubectl_bin get persistentvolumeclaim/mongod-data-some-name-rs0-0 -o 'jsonpath={.status.capacity.storage}' +++ mktemp ++ local LAST_OUT=/tmp/tmp.dBfOChuMfJ +++ mktemp ++ local LAST_ERR=/tmp/tmp.SYRNDnLRKO ++ local exit_status=0 ++ local timeout=4 +++ seq 0 2 ++ for i in $(seq 0 2) ++ set +e ++ kubectl get persistentvolumeclaim/mongod-data-some-name-rs0-0 -o 'jsonpath={.status.capacity.storage}' ++ exit_status=0 ++ set -e ++ '[' 0 '!=' 0 -a -n 1 ']' ++ break ++ cat /tmp/tmp.dBfOChuMfJ ++ cat /tmp/tmp.SYRNDnLRKO ++ rm /tmp/tmp.dBfOChuMfJ /tmp/tmp.SYRNDnLRKO ++ return 0 + [[ 1Gi == \2\G\i ]] + [[ 82 -ge 120 ]] + echo -n . .+ sleep 1 + retry=83 ++ kubectl_bin get persistentvolumeclaim/mongod-data-some-name-rs0-0 -o 'jsonpath={.status.capacity.storage}' +++ mktemp ++ local LAST_OUT=/tmp/tmp.vdWXL1k2my +++ mktemp ++ local LAST_ERR=/tmp/tmp.Wzib2tPkh4 ++ local exit_status=0 ++ local timeout=4 +++ seq 0 2 ++ for i in $(seq 0 2) ++ set +e ++ kubectl get persistentvolumeclaim/mongod-data-some-name-rs0-0 -o 'jsonpath={.status.capacity.storage}' ++ exit_status=0 ++ set -e ++ '[' 0 '!=' 0 -a -n 1 ']' ++ break ++ cat /tmp/tmp.vdWXL1k2my ++ cat /tmp/tmp.Wzib2tPkh4 ++ rm /tmp/tmp.vdWXL1k2my /tmp/tmp.Wzib2tPkh4 ++ return 0 + [[ 1Gi == \2\G\i ]] + [[ 83 -ge 120 ]] + echo -n . .+ sleep 1 + retry=84 ++ kubectl_bin get persistentvolumeclaim/mongod-data-some-name-rs0-0 -o 'jsonpath={.status.capacity.storage}' +++ mktemp ++ local LAST_OUT=/tmp/tmp.bphMmYqfUD +++ mktemp ++ local LAST_ERR=/tmp/tmp.YkNSW1Mhe3 ++ local exit_status=0 ++ local timeout=4 +++ seq 0 2 ++ for i in $(seq 0 2) ++ set +e ++ kubectl get persistentvolumeclaim/mongod-data-some-name-rs0-0 -o 'jsonpath={.status.capacity.storage}' ++ exit_status=0 ++ set -e ++ '[' 0 '!=' 0 -a -n 1 ']' ++ break ++ cat /tmp/tmp.bphMmYqfUD ++ cat /tmp/tmp.YkNSW1Mhe3 ++ rm /tmp/tmp.bphMmYqfUD /tmp/tmp.YkNSW1Mhe3 ++ return 0 + [[ 1Gi == \2\G\i ]] + [[ 84 -ge 120 ]] + echo -n . .+ sleep 1 + retry=85 ++ kubectl_bin get persistentvolumeclaim/mongod-data-some-name-rs0-0 -o 'jsonpath={.status.capacity.storage}' +++ mktemp ++ local LAST_OUT=/tmp/tmp.zjYh098J9g +++ mktemp ++ local LAST_ERR=/tmp/tmp.8D352mDPNf ++ local exit_status=0 ++ local timeout=4 +++ seq 0 2 ++ for i in $(seq 0 2) ++ set +e ++ kubectl get persistentvolumeclaim/mongod-data-some-name-rs0-0 -o 'jsonpath={.status.capacity.storage}' ++ exit_status=0 ++ set -e ++ '[' 0 '!=' 0 -a -n 1 ']' ++ break ++ cat /tmp/tmp.zjYh098J9g ++ cat /tmp/tmp.8D352mDPNf ++ rm /tmp/tmp.zjYh098J9g /tmp/tmp.8D352mDPNf ++ return 0 + [[ 1Gi == \2\G\i ]] + [[ 85 -ge 120 ]] + echo -n . .+ sleep 1 + retry=86 ++ kubectl_bin get persistentvolumeclaim/mongod-data-some-name-rs0-0 -o 'jsonpath={.status.capacity.storage}' +++ mktemp ++ local LAST_OUT=/tmp/tmp.9hKMqkcOj1 +++ mktemp ++ local LAST_ERR=/tmp/tmp.hwTuW9ewIc ++ local exit_status=0 ++ local timeout=4 +++ seq 0 2 ++ for i in $(seq 0 2) ++ set +e ++ kubectl get persistentvolumeclaim/mongod-data-some-name-rs0-0 -o 'jsonpath={.status.capacity.storage}' ++ exit_status=0 ++ set -e ++ '[' 0 '!=' 0 -a -n 1 ']' ++ break ++ cat /tmp/tmp.9hKMqkcOj1 ++ cat /tmp/tmp.hwTuW9ewIc ++ rm /tmp/tmp.9hKMqkcOj1 /tmp/tmp.hwTuW9ewIc ++ return 0 + [[ 1Gi == \2\G\i ]] + [[ 86 -ge 120 ]] + echo -n . .+ sleep 1 + retry=87 ++ kubectl_bin get persistentvolumeclaim/mongod-data-some-name-rs0-0 -o 'jsonpath={.status.capacity.storage}' +++ mktemp ++ local LAST_OUT=/tmp/tmp.D30F2EeVUH +++ mktemp ++ local LAST_ERR=/tmp/tmp.VKAjLyOsfR ++ local exit_status=0 ++ local timeout=4 +++ seq 0 2 ++ for i in $(seq 0 2) ++ set +e ++ kubectl get persistentvolumeclaim/mongod-data-some-name-rs0-0 -o 'jsonpath={.status.capacity.storage}' ++ exit_status=0 ++ set -e ++ '[' 0 '!=' 0 -a -n 1 ']' ++ break ++ cat /tmp/tmp.D30F2EeVUH ++ cat /tmp/tmp.VKAjLyOsfR ++ rm /tmp/tmp.D30F2EeVUH /tmp/tmp.VKAjLyOsfR ++ return 0 + [[ 1Gi == \2\G\i ]] + [[ 87 -ge 120 ]] + echo -n . .+ sleep 1 + retry=88 ++ kubectl_bin get persistentvolumeclaim/mongod-data-some-name-rs0-0 -o 'jsonpath={.status.capacity.storage}' +++ mktemp ++ local LAST_OUT=/tmp/tmp.E7qHaLO4Et +++ mktemp ++ local LAST_ERR=/tmp/tmp.UGAjC9oblv ++ local exit_status=0 ++ local timeout=4 +++ seq 0 2 ++ for i in $(seq 0 2) ++ set +e ++ kubectl get persistentvolumeclaim/mongod-data-some-name-rs0-0 -o 'jsonpath={.status.capacity.storage}' ++ exit_status=0 ++ set -e ++ '[' 0 '!=' 0 -a -n 1 ']' ++ break ++ cat /tmp/tmp.E7qHaLO4Et ++ cat /tmp/tmp.UGAjC9oblv ++ rm /tmp/tmp.E7qHaLO4Et /tmp/tmp.UGAjC9oblv ++ return 0 + [[ 1Gi == \2\G\i ]] + [[ 88 -ge 120 ]] + echo -n . .+ sleep 1 + retry=89 ++ kubectl_bin get persistentvolumeclaim/mongod-data-some-name-rs0-0 -o 'jsonpath={.status.capacity.storage}' +++ mktemp ++ local LAST_OUT=/tmp/tmp.M9mzy9GhHb +++ mktemp ++ local LAST_ERR=/tmp/tmp.RFx2brBQui ++ local exit_status=0 ++ local timeout=4 +++ seq 0 2 ++ for i in $(seq 0 2) ++ set +e ++ kubectl get persistentvolumeclaim/mongod-data-some-name-rs0-0 -o 'jsonpath={.status.capacity.storage}' ++ exit_status=0 ++ set -e ++ '[' 0 '!=' 0 -a -n 1 ']' ++ break ++ cat /tmp/tmp.M9mzy9GhHb ++ cat /tmp/tmp.RFx2brBQui ++ rm /tmp/tmp.M9mzy9GhHb /tmp/tmp.RFx2brBQui ++ return 0 + [[ 1Gi == \2\G\i ]] + [[ 89 -ge 120 ]] + echo -n . .+ sleep 1 + retry=90 ++ kubectl_bin get persistentvolumeclaim/mongod-data-some-name-rs0-0 -o 'jsonpath={.status.capacity.storage}' +++ mktemp ++ local LAST_OUT=/tmp/tmp.R3GonSWfUG +++ mktemp ++ local LAST_ERR=/tmp/tmp.6eCE3JtlHG ++ local exit_status=0 ++ local timeout=4 +++ seq 0 2 ++ for i in $(seq 0 2) ++ set +e ++ kubectl get persistentvolumeclaim/mongod-data-some-name-rs0-0 -o 'jsonpath={.status.capacity.storage}' ++ exit_status=0 ++ set -e ++ '[' 0 '!=' 0 -a -n 1 ']' ++ break ++ cat /tmp/tmp.R3GonSWfUG ++ cat /tmp/tmp.6eCE3JtlHG ++ rm /tmp/tmp.R3GonSWfUG /tmp/tmp.6eCE3JtlHG ++ return 0 + [[ 1Gi == \2\G\i ]] + [[ 90 -ge 120 ]] + echo -n . .+ sleep 1 + retry=91 ++ kubectl_bin get persistentvolumeclaim/mongod-data-some-name-rs0-0 -o 'jsonpath={.status.capacity.storage}' +++ mktemp ++ local LAST_OUT=/tmp/tmp.R8BecAvFqP +++ mktemp ++ local LAST_ERR=/tmp/tmp.XVcNs4WRLJ ++ local exit_status=0 ++ local timeout=4 +++ seq 0 2 ++ for i in $(seq 0 2) ++ set +e ++ kubectl get persistentvolumeclaim/mongod-data-some-name-rs0-0 -o 'jsonpath={.status.capacity.storage}' ++ exit_status=0 ++ set -e ++ '[' 0 '!=' 0 -a -n 1 ']' ++ break ++ cat /tmp/tmp.R8BecAvFqP ++ cat /tmp/tmp.XVcNs4WRLJ ++ rm /tmp/tmp.R8BecAvFqP /tmp/tmp.XVcNs4WRLJ ++ return 0 + [[ 1Gi == \2\G\i ]] + [[ 91 -ge 120 ]] + echo -n . .+ sleep 1 + retry=92 ++ kubectl_bin get persistentvolumeclaim/mongod-data-some-name-rs0-0 -o 'jsonpath={.status.capacity.storage}' +++ mktemp ++ local LAST_OUT=/tmp/tmp.xOJ56MKP2o +++ mktemp ++ local LAST_ERR=/tmp/tmp.EUrUKyFnHz ++ local exit_status=0 ++ local timeout=4 +++ seq 0 2 ++ for i in $(seq 0 2) ++ set +e ++ kubectl get persistentvolumeclaim/mongod-data-some-name-rs0-0 -o 'jsonpath={.status.capacity.storage}' ++ exit_status=0 ++ set -e ++ '[' 0 '!=' 0 -a -n 1 ']' ++ break ++ cat /tmp/tmp.xOJ56MKP2o ++ cat /tmp/tmp.EUrUKyFnHz ++ rm /tmp/tmp.xOJ56MKP2o /tmp/tmp.EUrUKyFnHz ++ return 0 + [[ 1Gi == \2\G\i ]] + [[ 92 -ge 120 ]] + echo -n . .+ sleep 1 + retry=93 ++ kubectl_bin get persistentvolumeclaim/mongod-data-some-name-rs0-0 -o 'jsonpath={.status.capacity.storage}' +++ mktemp ++ local LAST_OUT=/tmp/tmp.QlcyODN6CI +++ mktemp ++ local LAST_ERR=/tmp/tmp.5MeBnQTkLS ++ local exit_status=0 ++ local timeout=4 +++ seq 0 2 ++ for i in $(seq 0 2) ++ set +e ++ kubectl get persistentvolumeclaim/mongod-data-some-name-rs0-0 -o 'jsonpath={.status.capacity.storage}' ++ exit_status=0 ++ set -e ++ '[' 0 '!=' 0 -a -n 1 ']' ++ break ++ cat /tmp/tmp.QlcyODN6CI ++ cat /tmp/tmp.5MeBnQTkLS ++ rm /tmp/tmp.QlcyODN6CI /tmp/tmp.5MeBnQTkLS ++ return 0 + [[ 1Gi == \2\G\i ]] + [[ 93 -ge 120 ]] + echo -n . .+ sleep 1 + retry=94 ++ kubectl_bin get persistentvolumeclaim/mongod-data-some-name-rs0-0 -o 'jsonpath={.status.capacity.storage}' +++ mktemp ++ local LAST_OUT=/tmp/tmp.fi4OnbibQH +++ mktemp ++ local LAST_ERR=/tmp/tmp.nkO6YIaG3o ++ local exit_status=0 ++ local timeout=4 +++ seq 0 2 ++ for i in $(seq 0 2) ++ set +e ++ kubectl get persistentvolumeclaim/mongod-data-some-name-rs0-0 -o 'jsonpath={.status.capacity.storage}' ++ exit_status=0 ++ set -e ++ '[' 0 '!=' 0 -a -n 1 ']' ++ break ++ cat /tmp/tmp.fi4OnbibQH ++ cat /tmp/tmp.nkO6YIaG3o ++ rm /tmp/tmp.fi4OnbibQH /tmp/tmp.nkO6YIaG3o ++ return 0 + [[ 1Gi == \2\G\i ]] + [[ 94 -ge 120 ]] + echo -n . .+ sleep 1 + retry=95 ++ kubectl_bin get persistentvolumeclaim/mongod-data-some-name-rs0-0 -o 'jsonpath={.status.capacity.storage}' +++ mktemp ++ local LAST_OUT=/tmp/tmp.sN7ua4t5uM +++ mktemp ++ local LAST_ERR=/tmp/tmp.oSs84Gezgo ++ local exit_status=0 ++ local timeout=4 +++ seq 0 2 ++ for i in $(seq 0 2) ++ set +e ++ kubectl get persistentvolumeclaim/mongod-data-some-name-rs0-0 -o 'jsonpath={.status.capacity.storage}' ++ exit_status=0 ++ set -e ++ '[' 0 '!=' 0 -a -n 1 ']' ++ break ++ cat /tmp/tmp.sN7ua4t5uM ++ cat /tmp/tmp.oSs84Gezgo ++ rm /tmp/tmp.sN7ua4t5uM /tmp/tmp.oSs84Gezgo ++ return 0 + [[ 1Gi == \2\G\i ]] + [[ 95 -ge 120 ]] + echo -n . .+ sleep 1 + retry=96 ++ kubectl_bin get persistentvolumeclaim/mongod-data-some-name-rs0-0 -o 'jsonpath={.status.capacity.storage}' +++ mktemp ++ local LAST_OUT=/tmp/tmp.GCfMBcVRK8 +++ mktemp ++ local LAST_ERR=/tmp/tmp.R8XoWv24Lv ++ local exit_status=0 ++ local timeout=4 +++ seq 0 2 ++ for i in $(seq 0 2) ++ set +e ++ kubectl get persistentvolumeclaim/mongod-data-some-name-rs0-0 -o 'jsonpath={.status.capacity.storage}' ++ exit_status=0 ++ set -e ++ '[' 0 '!=' 0 -a -n 1 ']' ++ break ++ cat /tmp/tmp.GCfMBcVRK8 ++ cat /tmp/tmp.R8XoWv24Lv ++ rm /tmp/tmp.GCfMBcVRK8 /tmp/tmp.R8XoWv24Lv ++ return 0 + [[ 1Gi == \2\G\i ]] + [[ 96 -ge 120 ]] + echo -n . .+ sleep 1 + retry=97 ++ kubectl_bin get persistentvolumeclaim/mongod-data-some-name-rs0-0 -o 'jsonpath={.status.capacity.storage}' +++ mktemp ++ local LAST_OUT=/tmp/tmp.xnPB3M4DPr +++ mktemp ++ local LAST_ERR=/tmp/tmp.ASQh7YNu3t ++ local exit_status=0 ++ local timeout=4 +++ seq 0 2 ++ for i in $(seq 0 2) ++ set +e ++ kubectl get persistentvolumeclaim/mongod-data-some-name-rs0-0 -o 'jsonpath={.status.capacity.storage}' ++ exit_status=0 ++ set -e ++ '[' 0 '!=' 0 -a -n 1 ']' ++ break ++ cat /tmp/tmp.xnPB3M4DPr ++ cat /tmp/tmp.ASQh7YNu3t ++ rm /tmp/tmp.xnPB3M4DPr /tmp/tmp.ASQh7YNu3t ++ return 0 + [[ 1Gi == \2\G\i ]] + [[ 97 -ge 120 ]] + echo -n . .+ sleep 1 + retry=98 ++ kubectl_bin get persistentvolumeclaim/mongod-data-some-name-rs0-0 -o 'jsonpath={.status.capacity.storage}' +++ mktemp ++ local LAST_OUT=/tmp/tmp.IJFWXx59FC +++ mktemp ++ local LAST_ERR=/tmp/tmp.470TiGK9EN ++ local exit_status=0 ++ local timeout=4 +++ seq 0 2 ++ for i in $(seq 0 2) ++ set +e ++ kubectl get persistentvolumeclaim/mongod-data-some-name-rs0-0 -o 'jsonpath={.status.capacity.storage}' ++ exit_status=0 ++ set -e ++ '[' 0 '!=' 0 -a -n 1 ']' ++ break ++ cat /tmp/tmp.IJFWXx59FC ++ cat /tmp/tmp.470TiGK9EN ++ rm /tmp/tmp.IJFWXx59FC /tmp/tmp.470TiGK9EN ++ return 0 + [[ 1Gi == \2\G\i ]] + [[ 98 -ge 120 ]] + echo -n . .+ sleep 1 + retry=99 ++ kubectl_bin get persistentvolumeclaim/mongod-data-some-name-rs0-0 -o 'jsonpath={.status.capacity.storage}' +++ mktemp ++ local LAST_OUT=/tmp/tmp.h41gXWgMyn +++ mktemp ++ local LAST_ERR=/tmp/tmp.KIOclZUutU ++ local exit_status=0 ++ local timeout=4 +++ seq 0 2 ++ for i in $(seq 0 2) ++ set +e ++ kubectl get persistentvolumeclaim/mongod-data-some-name-rs0-0 -o 'jsonpath={.status.capacity.storage}' ++ exit_status=0 ++ set -e ++ '[' 0 '!=' 0 -a -n 1 ']' ++ break ++ cat /tmp/tmp.h41gXWgMyn ++ cat /tmp/tmp.KIOclZUutU ++ rm /tmp/tmp.h41gXWgMyn /tmp/tmp.KIOclZUutU ++ return 0 + [[ 1Gi == \2\G\i ]] + [[ 99 -ge 120 ]] + echo -n . .+ sleep 1 + retry=100 ++ kubectl_bin get persistentvolumeclaim/mongod-data-some-name-rs0-0 -o 'jsonpath={.status.capacity.storage}' +++ mktemp ++ local LAST_OUT=/tmp/tmp.50ry53vdGO +++ mktemp ++ local LAST_ERR=/tmp/tmp.hx5cxLQ1Qf ++ local exit_status=0 ++ local timeout=4 +++ seq 0 2 ++ for i in $(seq 0 2) ++ set +e ++ kubectl get persistentvolumeclaim/mongod-data-some-name-rs0-0 -o 'jsonpath={.status.capacity.storage}' ++ exit_status=0 ++ set -e ++ '[' 0 '!=' 0 -a -n 1 ']' ++ break ++ cat /tmp/tmp.50ry53vdGO ++ cat /tmp/tmp.hx5cxLQ1Qf ++ rm /tmp/tmp.50ry53vdGO /tmp/tmp.hx5cxLQ1Qf ++ return 0 + [[ 1Gi == \2\G\i ]] + [[ 100 -ge 120 ]] + echo -n . .+ sleep 1 + retry=101 ++ kubectl_bin get persistentvolumeclaim/mongod-data-some-name-rs0-0 -o 'jsonpath={.status.capacity.storage}' +++ mktemp ++ local LAST_OUT=/tmp/tmp.lok0joX9kh +++ mktemp ++ local LAST_ERR=/tmp/tmp.dCh5wy6sNk ++ local exit_status=0 ++ local timeout=4 +++ seq 0 2 ++ for i in $(seq 0 2) ++ set +e ++ kubectl get persistentvolumeclaim/mongod-data-some-name-rs0-0 -o 'jsonpath={.status.capacity.storage}' ++ exit_status=0 ++ set -e ++ '[' 0 '!=' 0 -a -n 1 ']' ++ break ++ cat /tmp/tmp.lok0joX9kh ++ cat /tmp/tmp.dCh5wy6sNk ++ rm /tmp/tmp.lok0joX9kh /tmp/tmp.dCh5wy6sNk ++ return 0 + [[ 1Gi == \2\G\i ]] + [[ 101 -ge 120 ]] + echo -n . .+ sleep 1 + retry=102 ++ kubectl_bin get persistentvolumeclaim/mongod-data-some-name-rs0-0 -o 'jsonpath={.status.capacity.storage}' +++ mktemp ++ local LAST_OUT=/tmp/tmp.rT6QKc7kTW +++ mktemp ++ local LAST_ERR=/tmp/tmp.hdlvPjlpU9 ++ local exit_status=0 ++ local timeout=4 +++ seq 0 2 ++ for i in $(seq 0 2) ++ set +e ++ kubectl get persistentvolumeclaim/mongod-data-some-name-rs0-0 -o 'jsonpath={.status.capacity.storage}' ++ exit_status=0 ++ set -e ++ '[' 0 '!=' 0 -a -n 1 ']' ++ break ++ cat /tmp/tmp.rT6QKc7kTW ++ cat /tmp/tmp.hdlvPjlpU9 ++ rm /tmp/tmp.rT6QKc7kTW /tmp/tmp.hdlvPjlpU9 ++ return 0 + [[ 1Gi == \2\G\i ]] + [[ 102 -ge 120 ]] + echo -n . .+ sleep 1 + retry=103 ++ kubectl_bin get persistentvolumeclaim/mongod-data-some-name-rs0-0 -o 'jsonpath={.status.capacity.storage}' +++ mktemp ++ local LAST_OUT=/tmp/tmp.ZnNhi9hJhz +++ mktemp ++ local LAST_ERR=/tmp/tmp.eljOYR1Mn7 ++ local exit_status=0 ++ local timeout=4 +++ seq 0 2 ++ for i in $(seq 0 2) ++ set +e ++ kubectl get persistentvolumeclaim/mongod-data-some-name-rs0-0 -o 'jsonpath={.status.capacity.storage}' ++ exit_status=0 ++ set -e ++ '[' 0 '!=' 0 -a -n 1 ']' ++ break ++ cat /tmp/tmp.ZnNhi9hJhz ++ cat /tmp/tmp.eljOYR1Mn7 ++ rm /tmp/tmp.ZnNhi9hJhz /tmp/tmp.eljOYR1Mn7 ++ return 0 + [[ 1Gi == \2\G\i ]] + [[ 103 -ge 120 ]] + echo -n . .+ sleep 1 + retry=104 ++ kubectl_bin get persistentvolumeclaim/mongod-data-some-name-rs0-0 -o 'jsonpath={.status.capacity.storage}' +++ mktemp ++ local LAST_OUT=/tmp/tmp.F4ZVRw59VU +++ mktemp ++ local LAST_ERR=/tmp/tmp.32VNk78FZq ++ local exit_status=0 ++ local timeout=4 +++ seq 0 2 ++ for i in $(seq 0 2) ++ set +e ++ kubectl get persistentvolumeclaim/mongod-data-some-name-rs0-0 -o 'jsonpath={.status.capacity.storage}' ++ exit_status=0 ++ set -e ++ '[' 0 '!=' 0 -a -n 1 ']' ++ break ++ cat /tmp/tmp.F4ZVRw59VU ++ cat /tmp/tmp.32VNk78FZq ++ rm /tmp/tmp.F4ZVRw59VU /tmp/tmp.32VNk78FZq ++ return 0 + [[ 1Gi == \2\G\i ]] + [[ 104 -ge 120 ]] + echo -n . .+ sleep 1 + retry=105 ++ kubectl_bin get persistentvolumeclaim/mongod-data-some-name-rs0-0 -o 'jsonpath={.status.capacity.storage}' +++ mktemp ++ local LAST_OUT=/tmp/tmp.3Ii2hgUUhr +++ mktemp ++ local LAST_ERR=/tmp/tmp.ol3SU1VhPG ++ local exit_status=0 ++ local timeout=4 +++ seq 0 2 ++ for i in $(seq 0 2) ++ set +e ++ kubectl get persistentvolumeclaim/mongod-data-some-name-rs0-0 -o 'jsonpath={.status.capacity.storage}' ++ exit_status=0 ++ set -e ++ '[' 0 '!=' 0 -a -n 1 ']' ++ break ++ cat /tmp/tmp.3Ii2hgUUhr ++ cat /tmp/tmp.ol3SU1VhPG ++ rm /tmp/tmp.3Ii2hgUUhr /tmp/tmp.ol3SU1VhPG ++ return 0 + [[ 1Gi == \2\G\i ]] + [[ 105 -ge 120 ]] + echo -n . .+ sleep 1 + retry=106 ++ kubectl_bin get persistentvolumeclaim/mongod-data-some-name-rs0-0 -o 'jsonpath={.status.capacity.storage}' +++ mktemp ++ local LAST_OUT=/tmp/tmp.TkQwGJW7fM +++ mktemp ++ local LAST_ERR=/tmp/tmp.Heh3PG4VNh ++ local exit_status=0 ++ local timeout=4 +++ seq 0 2 ++ for i in $(seq 0 2) ++ set +e ++ kubectl get persistentvolumeclaim/mongod-data-some-name-rs0-0 -o 'jsonpath={.status.capacity.storage}' ++ exit_status=0 ++ set -e ++ '[' 0 '!=' 0 -a -n 1 ']' ++ break ++ cat /tmp/tmp.TkQwGJW7fM ++ cat /tmp/tmp.Heh3PG4VNh ++ rm /tmp/tmp.TkQwGJW7fM /tmp/tmp.Heh3PG4VNh ++ return 0 + [[ 1Gi == \2\G\i ]] + [[ 106 -ge 120 ]] + echo -n . .+ sleep 1 + retry=107 ++ kubectl_bin get persistentvolumeclaim/mongod-data-some-name-rs0-0 -o 'jsonpath={.status.capacity.storage}' +++ mktemp ++ local LAST_OUT=/tmp/tmp.FgSoad3N5V +++ mktemp ++ local LAST_ERR=/tmp/tmp.s6ssPWarga ++ local exit_status=0 ++ local timeout=4 +++ seq 0 2 ++ for i in $(seq 0 2) ++ set +e ++ kubectl get persistentvolumeclaim/mongod-data-some-name-rs0-0 -o 'jsonpath={.status.capacity.storage}' ++ exit_status=0 ++ set -e ++ '[' 0 '!=' 0 -a -n 1 ']' ++ break ++ cat /tmp/tmp.FgSoad3N5V ++ cat /tmp/tmp.s6ssPWarga ++ rm /tmp/tmp.FgSoad3N5V /tmp/tmp.s6ssPWarga ++ return 0 + [[ 1Gi == \2\G\i ]] + [[ 107 -ge 120 ]] + echo -n . .+ sleep 1 + retry=108 ++ kubectl_bin get persistentvolumeclaim/mongod-data-some-name-rs0-0 -o 'jsonpath={.status.capacity.storage}' +++ mktemp ++ local LAST_OUT=/tmp/tmp.2JbFPi0jks +++ mktemp ++ local LAST_ERR=/tmp/tmp.fXNspcBoPp ++ local exit_status=0 ++ local timeout=4 +++ seq 0 2 ++ for i in $(seq 0 2) ++ set +e ++ kubectl get persistentvolumeclaim/mongod-data-some-name-rs0-0 -o 'jsonpath={.status.capacity.storage}' ++ exit_status=0 ++ set -e ++ '[' 0 '!=' 0 -a -n 1 ']' ++ break ++ cat /tmp/tmp.2JbFPi0jks ++ cat /tmp/tmp.fXNspcBoPp ++ rm /tmp/tmp.2JbFPi0jks /tmp/tmp.fXNspcBoPp ++ return 0 + [[ 1Gi == \2\G\i ]] + [[ 108 -ge 120 ]] + echo -n . .+ sleep 1 + retry=109 ++ kubectl_bin get persistentvolumeclaim/mongod-data-some-name-rs0-0 -o 'jsonpath={.status.capacity.storage}' +++ mktemp ++ local LAST_OUT=/tmp/tmp.JRdCohSG9P +++ mktemp ++ local LAST_ERR=/tmp/tmp.80BCaKHNee ++ local exit_status=0 ++ local timeout=4 +++ seq 0 2 ++ for i in $(seq 0 2) ++ set +e ++ kubectl get persistentvolumeclaim/mongod-data-some-name-rs0-0 -o 'jsonpath={.status.capacity.storage}' ++ exit_status=0 ++ set -e ++ '[' 0 '!=' 0 -a -n 1 ']' ++ break ++ cat /tmp/tmp.JRdCohSG9P ++ cat /tmp/tmp.80BCaKHNee ++ rm /tmp/tmp.JRdCohSG9P /tmp/tmp.80BCaKHNee ++ return 0 + [[ 1Gi == \2\G\i ]] + [[ 109 -ge 120 ]] + echo -n . .+ sleep 1 + retry=110 ++ kubectl_bin get persistentvolumeclaim/mongod-data-some-name-rs0-0 -o 'jsonpath={.status.capacity.storage}' +++ mktemp ++ local LAST_OUT=/tmp/tmp.Nfq6xOekIB +++ mktemp ++ local LAST_ERR=/tmp/tmp.NgMXesfEv8 ++ local exit_status=0 ++ local timeout=4 +++ seq 0 2 ++ for i in $(seq 0 2) ++ set +e ++ kubectl get persistentvolumeclaim/mongod-data-some-name-rs0-0 -o 'jsonpath={.status.capacity.storage}' ++ exit_status=0 ++ set -e ++ '[' 0 '!=' 0 -a -n 1 ']' ++ break ++ cat /tmp/tmp.Nfq6xOekIB ++ cat /tmp/tmp.NgMXesfEv8 ++ rm /tmp/tmp.Nfq6xOekIB /tmp/tmp.NgMXesfEv8 ++ return 0 + [[ 1Gi == \2\G\i ]] + [[ 110 -ge 120 ]] + echo -n . .+ sleep 1 + retry=111 ++ kubectl_bin get persistentvolumeclaim/mongod-data-some-name-rs0-0 -o 'jsonpath={.status.capacity.storage}' +++ mktemp ++ local LAST_OUT=/tmp/tmp.2tGnz8i2VJ +++ mktemp ++ local LAST_ERR=/tmp/tmp.CHPhRej2nb ++ local exit_status=0 ++ local timeout=4 +++ seq 0 2 ++ for i in $(seq 0 2) ++ set +e ++ kubectl get persistentvolumeclaim/mongod-data-some-name-rs0-0 -o 'jsonpath={.status.capacity.storage}' ++ exit_status=0 ++ set -e ++ '[' 0 '!=' 0 -a -n 1 ']' ++ break ++ cat /tmp/tmp.2tGnz8i2VJ ++ cat /tmp/tmp.CHPhRej2nb ++ rm /tmp/tmp.2tGnz8i2VJ /tmp/tmp.CHPhRej2nb ++ return 0 + [[ 1Gi == \2\G\i ]] + [[ 111 -ge 120 ]] + echo -n . .+ sleep 1 + retry=112 ++ kubectl_bin get persistentvolumeclaim/mongod-data-some-name-rs0-0 -o 'jsonpath={.status.capacity.storage}' +++ mktemp ++ local LAST_OUT=/tmp/tmp.Pej2FmaJmy +++ mktemp ++ local LAST_ERR=/tmp/tmp.vndEjc9LPs ++ local exit_status=0 ++ local timeout=4 +++ seq 0 2 ++ for i in $(seq 0 2) ++ set +e ++ kubectl get persistentvolumeclaim/mongod-data-some-name-rs0-0 -o 'jsonpath={.status.capacity.storage}' ++ exit_status=0 ++ set -e ++ '[' 0 '!=' 0 -a -n 1 ']' ++ break ++ cat /tmp/tmp.Pej2FmaJmy ++ cat /tmp/tmp.vndEjc9LPs ++ rm /tmp/tmp.Pej2FmaJmy /tmp/tmp.vndEjc9LPs ++ return 0 + [[ 1Gi == \2\G\i ]] + [[ 112 -ge 120 ]] + echo -n . .+ sleep 1 + retry=113 ++ kubectl_bin get persistentvolumeclaim/mongod-data-some-name-rs0-0 -o 'jsonpath={.status.capacity.storage}' +++ mktemp ++ local LAST_OUT=/tmp/tmp.IcYRh1mH3i +++ mktemp ++ local LAST_ERR=/tmp/tmp.Oiyms5Uc3m ++ local exit_status=0 ++ local timeout=4 +++ seq 0 2 ++ for i in $(seq 0 2) ++ set +e ++ kubectl get persistentvolumeclaim/mongod-data-some-name-rs0-0 -o 'jsonpath={.status.capacity.storage}' ++ exit_status=0 ++ set -e ++ '[' 0 '!=' 0 -a -n 1 ']' ++ break ++ cat /tmp/tmp.IcYRh1mH3i ++ cat /tmp/tmp.Oiyms5Uc3m ++ rm /tmp/tmp.IcYRh1mH3i /tmp/tmp.Oiyms5Uc3m ++ return 0 + [[ 1Gi == \2\G\i ]] + [[ 113 -ge 120 ]] + echo -n . .+ sleep 1 + retry=114 ++ kubectl_bin get persistentvolumeclaim/mongod-data-some-name-rs0-0 -o 'jsonpath={.status.capacity.storage}' +++ mktemp ++ local LAST_OUT=/tmp/tmp.3Q1LfxjOCG +++ mktemp ++ local LAST_ERR=/tmp/tmp.S8LctwK9mw ++ local exit_status=0 ++ local timeout=4 +++ seq 0 2 ++ for i in $(seq 0 2) ++ set +e ++ kubectl get persistentvolumeclaim/mongod-data-some-name-rs0-0 -o 'jsonpath={.status.capacity.storage}' ++ exit_status=0 ++ set -e ++ '[' 0 '!=' 0 -a -n 1 ']' ++ break ++ cat /tmp/tmp.3Q1LfxjOCG ++ cat /tmp/tmp.S8LctwK9mw ++ rm /tmp/tmp.3Q1LfxjOCG /tmp/tmp.S8LctwK9mw ++ return 0 + [[ 1Gi == \2\G\i ]] + [[ 114 -ge 120 ]] + echo -n . .+ sleep 1 + retry=115 ++ kubectl_bin get persistentvolumeclaim/mongod-data-some-name-rs0-0 -o 'jsonpath={.status.capacity.storage}' +++ mktemp ++ local LAST_OUT=/tmp/tmp.w6uvcWX7Vc +++ mktemp ++ local LAST_ERR=/tmp/tmp.fow2gdU7Fj ++ local exit_status=0 ++ local timeout=4 +++ seq 0 2 ++ for i in $(seq 0 2) ++ set +e ++ kubectl get persistentvolumeclaim/mongod-data-some-name-rs0-0 -o 'jsonpath={.status.capacity.storage}' ++ exit_status=0 ++ set -e ++ '[' 0 '!=' 0 -a -n 1 ']' ++ break ++ cat /tmp/tmp.w6uvcWX7Vc ++ cat /tmp/tmp.fow2gdU7Fj ++ rm /tmp/tmp.w6uvcWX7Vc /tmp/tmp.fow2gdU7Fj ++ return 0 + [[ 1Gi == \2\G\i ]] + [[ 115 -ge 120 ]] + echo -n . .+ sleep 1 + retry=116 ++ kubectl_bin get persistentvolumeclaim/mongod-data-some-name-rs0-0 -o 'jsonpath={.status.capacity.storage}' +++ mktemp ++ local LAST_OUT=/tmp/tmp.LwOQ3BjRzC +++ mktemp ++ local LAST_ERR=/tmp/tmp.udSvwSV8sJ ++ local exit_status=0 ++ local timeout=4 +++ seq 0 2 ++ for i in $(seq 0 2) ++ set +e ++ kubectl get persistentvolumeclaim/mongod-data-some-name-rs0-0 -o 'jsonpath={.status.capacity.storage}' ++ exit_status=0 ++ set -e ++ '[' 0 '!=' 0 -a -n 1 ']' ++ break ++ cat /tmp/tmp.LwOQ3BjRzC ++ cat /tmp/tmp.udSvwSV8sJ ++ rm /tmp/tmp.LwOQ3BjRzC /tmp/tmp.udSvwSV8sJ ++ return 0 + [[ 1Gi == \2\G\i ]] + [[ 116 -ge 120 ]] + echo -n . .+ sleep 1 + retry=117 ++ kubectl_bin get persistentvolumeclaim/mongod-data-some-name-rs0-0 -o 'jsonpath={.status.capacity.storage}' +++ mktemp ++ local LAST_OUT=/tmp/tmp.Ut5lnFIWgf +++ mktemp ++ local LAST_ERR=/tmp/tmp.uL6n8dmDHO ++ local exit_status=0 ++ local timeout=4 +++ seq 0 2 ++ for i in $(seq 0 2) ++ set +e ++ kubectl get persistentvolumeclaim/mongod-data-some-name-rs0-0 -o 'jsonpath={.status.capacity.storage}' ++ exit_status=0 ++ set -e ++ '[' 0 '!=' 0 -a -n 1 ']' ++ break ++ cat /tmp/tmp.Ut5lnFIWgf ++ cat /tmp/tmp.uL6n8dmDHO ++ rm /tmp/tmp.Ut5lnFIWgf /tmp/tmp.uL6n8dmDHO ++ return 0 + [[ 1Gi == \2\G\i ]] + [[ 117 -ge 120 ]] + echo -n . .+ sleep 1 + retry=118 ++ kubectl_bin get persistentvolumeclaim/mongod-data-some-name-rs0-0 -o 'jsonpath={.status.capacity.storage}' +++ mktemp ++ local LAST_OUT=/tmp/tmp.JnNncnuqyU +++ mktemp ++ local LAST_ERR=/tmp/tmp.G1BE28tNJk ++ local exit_status=0 ++ local timeout=4 +++ seq 0 2 ++ for i in $(seq 0 2) ++ set +e ++ kubectl get persistentvolumeclaim/mongod-data-some-name-rs0-0 -o 'jsonpath={.status.capacity.storage}' ++ exit_status=0 ++ set -e ++ '[' 0 '!=' 0 -a -n 1 ']' ++ break ++ cat /tmp/tmp.JnNncnuqyU ++ cat /tmp/tmp.G1BE28tNJk ++ rm /tmp/tmp.JnNncnuqyU /tmp/tmp.G1BE28tNJk ++ return 0 + [[ 1Gi == \2\G\i ]] + [[ 118 -ge 120 ]] + echo -n . .+ sleep 1 + retry=119 ++ kubectl_bin get persistentvolumeclaim/mongod-data-some-name-rs0-0 -o 'jsonpath={.status.capacity.storage}' +++ mktemp ++ local LAST_OUT=/tmp/tmp.zXVf5lWaFQ +++ mktemp ++ local LAST_ERR=/tmp/tmp.ivqjZykX7h ++ local exit_status=0 ++ local timeout=4 +++ seq 0 2 ++ for i in $(seq 0 2) ++ set +e ++ kubectl get persistentvolumeclaim/mongod-data-some-name-rs0-0 -o 'jsonpath={.status.capacity.storage}' ++ exit_status=0 ++ set -e ++ '[' 0 '!=' 0 -a -n 1 ']' ++ break ++ cat /tmp/tmp.zXVf5lWaFQ ++ cat /tmp/tmp.ivqjZykX7h ++ rm /tmp/tmp.zXVf5lWaFQ /tmp/tmp.ivqjZykX7h ++ return 0 + [[ 1Gi == \2\G\i ]] + [[ 119 -ge 120 ]] + echo -n . .+ sleep 1 + retry=120 ++ kubectl_bin get persistentvolumeclaim/mongod-data-some-name-rs0-0 -o 'jsonpath={.status.capacity.storage}' +++ mktemp ++ local LAST_OUT=/tmp/tmp.XJuflpOomb +++ mktemp ++ local LAST_ERR=/tmp/tmp.ycwptZj6XP ++ local exit_status=0 ++ local timeout=4 +++ seq 0 2 ++ for i in $(seq 0 2) ++ set +e ++ kubectl get persistentvolumeclaim/mongod-data-some-name-rs0-0 -o 'jsonpath={.status.capacity.storage}' ++ exit_status=0 ++ set -e ++ '[' 0 '!=' 0 -a -n 1 ']' ++ break ++ cat /tmp/tmp.XJuflpOomb ++ cat /tmp/tmp.ycwptZj6XP ++ rm /tmp/tmp.XJuflpOomb /tmp/tmp.ycwptZj6XP ++ return 0 + [[ 1Gi == \2\G\i ]] + [[ 120 -ge 120 ]] + echo + echo 'persistentvolumeclaim/mongod-data-some-name-rs0-0 was not resized, max retries exceeded' persistentvolumeclaim/mongod-data-some-name-rs0-0 was not resized, max retries exceeded + return 1 + return 1 + echo 'Enabling PVC resize' Enabling PVC resize + kubectl_bin patch psmdb some-name --type=json '-p=[{"op": "add", "path": "/spec/enableVolumeExpansion", "value":true }]' ++ mktemp + local LAST_OUT=/tmp/tmp.BFs0zBrnSJ ++ mktemp + local LAST_ERR=/tmp/tmp.dxDugYaYbP + local exit_status=0 + local timeout=4 ++ seq 0 2 + for i in $(seq 0 2) + set +e + kubectl patch psmdb some-name --type=json '-p=[{"op": "add", "path": "/spec/enableVolumeExpansion", "value":true }]' + exit_status=0 + set -e + '[' 0 '!=' 0 -a -n 1 ']' + break + cat /tmp/tmp.BFs0zBrnSJ perconaservermongodb.psmdb.percona.com/some-name patched + cat /tmp/tmp.dxDugYaYbP + rm /tmp/tmp.BFs0zBrnSJ /tmp/tmp.dxDugYaYbP + return 0 + sleep 10 + wait_cluster_consistency some-name + local cluster_name=some-name + local wait_time=32 + retry=0 + sleep 7 + echo -n 'waiting for cluster readyness' waiting for cluster readyness++ kubectl_bin get psmdb some-name -o 'jsonpath={.status.state}' +++ mktemp ++ local LAST_OUT=/tmp/tmp.jWwDXUuJWR +++ mktemp ++ local LAST_ERR=/tmp/tmp.xaZDdMdEe8 ++ local exit_status=0 ++ local timeout=4 +++ seq 0 2 ++ for i in $(seq 0 2) ++ set +e ++ kubectl get psmdb some-name -o 'jsonpath={.status.state}' ++ exit_status=0 ++ set -e ++ '[' 0 '!=' 0 -a -n 1 ']' ++ break ++ cat /tmp/tmp.jWwDXUuJWR ++ cat /tmp/tmp.xaZDdMdEe8 ++ rm /tmp/tmp.jWwDXUuJWR /tmp/tmp.xaZDdMdEe8 ++ return 0 + [[ initializing == \r\e\a\d\y ]] + let retry+=1 + '[' 1 -ge 32 ']' + echo -n . .+ sleep 10 ++ kubectl_bin get psmdb some-name -o 'jsonpath={.status.state}' +++ mktemp ++ local LAST_OUT=/tmp/tmp.8a0y5y8dJv +++ mktemp ++ local LAST_ERR=/tmp/tmp.zunTnsHc6x ++ local exit_status=0 ++ local timeout=4 +++ seq 0 2 ++ for i in $(seq 0 2) ++ set +e ++ kubectl get psmdb some-name -o 'jsonpath={.status.state}' ++ exit_status=0 ++ set -e ++ '[' 0 '!=' 0 -a -n 1 ']' ++ break ++ cat /tmp/tmp.8a0y5y8dJv ++ cat /tmp/tmp.zunTnsHc6x ++ rm /tmp/tmp.8a0y5y8dJv /tmp/tmp.zunTnsHc6x ++ return 0 + [[ initializing == \r\e\a\d\y ]] + let retry+=1 + '[' 2 -ge 32 ']' + echo -n . .+ sleep 10 ++ kubectl_bin get psmdb some-name -o 'jsonpath={.status.state}' +++ mktemp ++ local LAST_OUT=/tmp/tmp.rAGhvKaAtW +++ mktemp ++ local LAST_ERR=/tmp/tmp.JL7GYD2KxO ++ local exit_status=0 ++ local timeout=4 +++ seq 0 2 ++ for i in $(seq 0 2) ++ set +e ++ kubectl get psmdb some-name -o 'jsonpath={.status.state}' ++ exit_status=0 ++ set -e ++ '[' 0 '!=' 0 -a -n 1 ']' ++ break ++ cat /tmp/tmp.rAGhvKaAtW ++ cat /tmp/tmp.JL7GYD2KxO ++ rm /tmp/tmp.rAGhvKaAtW /tmp/tmp.JL7GYD2KxO ++ return 0 + [[ initializing == \r\e\a\d\y ]] + let retry+=1 + '[' 3 -ge 32 ']' + echo -n . .+ sleep 10 ++ kubectl_bin get psmdb some-name -o 'jsonpath={.status.state}' +++ mktemp ++ local LAST_OUT=/tmp/tmp.c4K80q8AcO +++ mktemp ++ local LAST_ERR=/tmp/tmp.xGsiyUnIT5 ++ local exit_status=0 ++ local timeout=4 +++ seq 0 2 ++ for i in $(seq 0 2) ++ set +e ++ kubectl get psmdb some-name -o 'jsonpath={.status.state}' ++ exit_status=0 ++ set -e ++ '[' 0 '!=' 0 -a -n 1 ']' ++ break ++ cat /tmp/tmp.c4K80q8AcO ++ cat /tmp/tmp.xGsiyUnIT5 ++ rm /tmp/tmp.c4K80q8AcO /tmp/tmp.xGsiyUnIT5 ++ return 0 + [[ initializing == \r\e\a\d\y ]] + let retry+=1 + '[' 4 -ge 32 ']' + echo -n . .+ sleep 10 ++ kubectl_bin get psmdb some-name -o 'jsonpath={.status.state}' +++ mktemp ++ local LAST_OUT=/tmp/tmp.lYUVbhUaZL +++ mktemp ++ local LAST_ERR=/tmp/tmp.Yv8YmtGbXj ++ local exit_status=0 ++ local timeout=4 +++ seq 0 2 ++ for i in $(seq 0 2) ++ set +e ++ kubectl get psmdb some-name -o 'jsonpath={.status.state}' ++ exit_status=0 ++ set -e ++ '[' 0 '!=' 0 -a -n 1 ']' ++ break ++ cat /tmp/tmp.lYUVbhUaZL ++ cat /tmp/tmp.Yv8YmtGbXj ++ rm /tmp/tmp.lYUVbhUaZL /tmp/tmp.Yv8YmtGbXj ++ return 0 + [[ initializing == \r\e\a\d\y ]] + let retry+=1 + '[' 5 -ge 32 ']' + echo -n . .+ sleep 10 ++ kubectl_bin get psmdb some-name -o 'jsonpath={.status.state}' +++ mktemp ++ local LAST_OUT=/tmp/tmp.9ewjuGGDVd +++ mktemp ++ local LAST_ERR=/tmp/tmp.FD3MjlLS82 ++ local exit_status=0 ++ local timeout=4 +++ seq 0 2 ++ for i in $(seq 0 2) ++ set +e ++ kubectl get psmdb some-name -o 'jsonpath={.status.state}' ++ exit_status=0 ++ set -e ++ '[' 0 '!=' 0 -a -n 1 ']' ++ break ++ cat /tmp/tmp.9ewjuGGDVd ++ cat /tmp/tmp.FD3MjlLS82 ++ rm /tmp/tmp.9ewjuGGDVd /tmp/tmp.FD3MjlLS82 ++ return 0 + [[ initializing == \r\e\a\d\y ]] + let retry+=1 + '[' 6 -ge 32 ']' + echo -n . .+ sleep 10 ++ kubectl_bin get psmdb some-name -o 'jsonpath={.status.state}' +++ mktemp ++ local LAST_OUT=/tmp/tmp.MG0qObgOfR +++ mktemp ++ local LAST_ERR=/tmp/tmp.DCt6HexhRL ++ local exit_status=0 ++ local timeout=4 +++ seq 0 2 ++ for i in $(seq 0 2) ++ set +e ++ kubectl get psmdb some-name -o 'jsonpath={.status.state}' ++ exit_status=0 ++ set -e ++ '[' 0 '!=' 0 -a -n 1 ']' ++ break ++ cat /tmp/tmp.MG0qObgOfR ++ cat /tmp/tmp.DCt6HexhRL ++ rm /tmp/tmp.MG0qObgOfR /tmp/tmp.DCt6HexhRL ++ return 0 + [[ initializing == \r\e\a\d\y ]] + let retry+=1 + '[' 7 -ge 32 ']' + echo -n . .+ sleep 10 ++ kubectl_bin get psmdb some-name -o 'jsonpath={.status.state}' +++ mktemp ++ local LAST_OUT=/tmp/tmp.4w7Q7rrThj +++ mktemp ++ local LAST_ERR=/tmp/tmp.ySIgUAKqnw ++ local exit_status=0 ++ local timeout=4 +++ seq 0 2 ++ for i in $(seq 0 2) ++ set +e ++ kubectl get psmdb some-name -o 'jsonpath={.status.state}' ++ exit_status=0 ++ set -e ++ '[' 0 '!=' 0 -a -n 1 ']' ++ break ++ cat /tmp/tmp.4w7Q7rrThj ++ cat /tmp/tmp.ySIgUAKqnw ++ rm /tmp/tmp.4w7Q7rrThj /tmp/tmp.ySIgUAKqnw ++ return 0 + [[ initializing == \r\e\a\d\y ]] + let retry+=1 + '[' 8 -ge 32 ']' + echo -n . .+ sleep 10 ++ kubectl_bin get psmdb some-name -o 'jsonpath={.status.state}' +++ mktemp ++ local LAST_OUT=/tmp/tmp.bOHDPn0Ldg +++ mktemp ++ local LAST_ERR=/tmp/tmp.jkpvGm9uCg ++ local exit_status=0 ++ local timeout=4 +++ seq 0 2 ++ for i in $(seq 0 2) ++ set +e ++ kubectl get psmdb some-name -o 'jsonpath={.status.state}' ++ exit_status=0 ++ set -e ++ '[' 0 '!=' 0 -a -n 1 ']' ++ break ++ cat /tmp/tmp.bOHDPn0Ldg ++ cat /tmp/tmp.jkpvGm9uCg ++ rm /tmp/tmp.bOHDPn0Ldg /tmp/tmp.jkpvGm9uCg ++ return 0 + [[ ready == \r\e\a\d\y ]] + echo + wait_all_pvc_resize 2Gi + local expected_size=2Gi + local max_retry=120 + local sleep_time=5 ++ kubectl_bin get pvc -l app.kubernetes.io/component=mongod -o name +++ mktemp ++ local LAST_OUT=/tmp/tmp.SOCnraiSwL +++ mktemp ++ local LAST_ERR=/tmp/tmp.Vxg0Qc7VA5 ++ local exit_status=0 ++ local timeout=4 +++ seq 0 2 ++ for i in $(seq 0 2) ++ set +e ++ kubectl get pvc -l app.kubernetes.io/component=mongod -o name ++ exit_status=0 ++ set -e ++ '[' 0 '!=' 0 -a -n 1 ']' ++ break ++ cat /tmp/tmp.SOCnraiSwL ++ cat /tmp/tmp.Vxg0Qc7VA5 ++ rm /tmp/tmp.SOCnraiSwL /tmp/tmp.Vxg0Qc7VA5 ++ return 0 + for pvc in $(kubectl_bin get pvc -l app.kubernetes.io/component=mongod -o name) + wait_pvc_resize persistentvolumeclaim/mongod-data-some-name-rs0-0 2Gi 120 5 + local pvc=persistentvolumeclaim/mongod-data-some-name-rs0-0 + local expected_size=2Gi + local max_retry=120 + local sleep_time=5 + local retry=0 + echo 'Waiting for persistentvolumeclaim/mongod-data-some-name-rs0-0 to be resized' Waiting for persistentvolumeclaim/mongod-data-some-name-rs0-0 to be resized ++ kubectl_bin get persistentvolumeclaim/mongod-data-some-name-rs0-0 -o 'jsonpath={.status.capacity.storage}' +++ mktemp ++ local LAST_OUT=/tmp/tmp.ZFMIrzagRC +++ mktemp ++ local LAST_ERR=/tmp/tmp.3oz5ctg5gY ++ local exit_status=0 ++ local timeout=4 +++ seq 0 2 ++ for i in $(seq 0 2) ++ set +e ++ kubectl get persistentvolumeclaim/mongod-data-some-name-rs0-0 -o 'jsonpath={.status.capacity.storage}' ++ exit_status=0 ++ set -e ++ '[' 0 '!=' 0 -a -n 1 ']' ++ break ++ cat /tmp/tmp.ZFMIrzagRC ++ cat /tmp/tmp.3oz5ctg5gY ++ rm /tmp/tmp.ZFMIrzagRC /tmp/tmp.3oz5ctg5gY ++ return 0 + [[ 2Gi == \2\G\i ]] + echo + echo 'persistentvolumeclaim/mongod-data-some-name-rs0-0 was resized' persistentvolumeclaim/mongod-data-some-name-rs0-0 was resized + return 0 + for pvc in $(kubectl_bin get pvc -l app.kubernetes.io/component=mongod -o name) + wait_pvc_resize persistentvolumeclaim/mongod-data-some-name-rs0-1 2Gi 120 5 + local pvc=persistentvolumeclaim/mongod-data-some-name-rs0-1 + local expected_size=2Gi + local max_retry=120 + local sleep_time=5 + local retry=0 + echo 'Waiting for persistentvolumeclaim/mongod-data-some-name-rs0-1 to be resized' Waiting for persistentvolumeclaim/mongod-data-some-name-rs0-1 to be resized ++ kubectl_bin get persistentvolumeclaim/mongod-data-some-name-rs0-1 -o 'jsonpath={.status.capacity.storage}' +++ mktemp ++ local LAST_OUT=/tmp/tmp.qCahi88M20 +++ mktemp ++ local LAST_ERR=/tmp/tmp.3dt0hSu5qv ++ local exit_status=0 ++ local timeout=4 +++ seq 0 2 ++ for i in $(seq 0 2) ++ set +e ++ kubectl get persistentvolumeclaim/mongod-data-some-name-rs0-1 -o 'jsonpath={.status.capacity.storage}' ++ exit_status=0 ++ set -e ++ '[' 0 '!=' 0 -a -n 1 ']' ++ break ++ cat /tmp/tmp.qCahi88M20 ++ cat /tmp/tmp.3dt0hSu5qv ++ rm /tmp/tmp.qCahi88M20 /tmp/tmp.3dt0hSu5qv ++ return 0 + [[ 2Gi == \2\G\i ]] + echo + echo 'persistentvolumeclaim/mongod-data-some-name-rs0-1 was resized' persistentvolumeclaim/mongod-data-some-name-rs0-1 was resized + return 0 + for pvc in $(kubectl_bin get pvc -l app.kubernetes.io/component=mongod -o name) + wait_pvc_resize persistentvolumeclaim/mongod-data-some-name-rs0-2 2Gi 120 5 + local pvc=persistentvolumeclaim/mongod-data-some-name-rs0-2 + local expected_size=2Gi + local max_retry=120 + local sleep_time=5 + local retry=0 + echo 'Waiting for persistentvolumeclaim/mongod-data-some-name-rs0-2 to be resized' Waiting for persistentvolumeclaim/mongod-data-some-name-rs0-2 to be resized ++ kubectl_bin get persistentvolumeclaim/mongod-data-some-name-rs0-2 -o 'jsonpath={.status.capacity.storage}' +++ mktemp ++ local LAST_OUT=/tmp/tmp.Fr7UKFKlSh +++ mktemp ++ local LAST_ERR=/tmp/tmp.qUUzS7ITpO ++ local exit_status=0 ++ local timeout=4 +++ seq 0 2 ++ for i in $(seq 0 2) ++ set +e ++ kubectl get persistentvolumeclaim/mongod-data-some-name-rs0-2 -o 'jsonpath={.status.capacity.storage}' ++ exit_status=0 ++ set -e ++ '[' 0 '!=' 0 -a -n 1 ']' ++ break ++ cat /tmp/tmp.Fr7UKFKlSh ++ cat /tmp/tmp.qUUzS7ITpO ++ rm /tmp/tmp.Fr7UKFKlSh /tmp/tmp.qUUzS7ITpO ++ return 0 + [[ 2Gi == \2\G\i ]] + echo + echo 'persistentvolumeclaim/mongod-data-some-name-rs0-2 was resized' persistentvolumeclaim/mongod-data-some-name-rs0-2 was resized + return 0 + return 0 + echo + [[ 0 == 1 ]] + [[ -n '' ]] + desc 'create resourcequota' + set +o xtrace ----------------------------------------------------------------------------------- create resourcequota ----------------------------------------------------------------------------------- + apply_resourcequota 7Gi + local quota=7Gi + local default_sc ++ get_default_storageclass ++ kubectl_bin get sc -o 'jsonpath={.items[?(@.metadata.annotations.storageclass\.kubernetes\.io/is-default-class=="true")].metadata.name}' +++ mktemp ++ local LAST_OUT=/tmp/tmp.ocAffVKSj2 +++ mktemp ++ local LAST_ERR=/tmp/tmp.6W6ZR1Qwbk ++ local exit_status=0 ++ local timeout=4 +++ seq 0 2 ++ for i in $(seq 0 2) ++ set +e ++ kubectl get sc -o 'jsonpath={.items[?(@.metadata.annotations.storageclass\.kubernetes\.io/is-default-class=="true")].metadata.name}' ++ exit_status=0 ++ set -e ++ '[' 0 '!=' 0 -a -n 1 ']' ++ break ++ cat /tmp/tmp.ocAffVKSj2 ++ cat /tmp/tmp.6W6ZR1Qwbk ++ rm /tmp/tmp.ocAffVKSj2 /tmp/tmp.6W6ZR1Qwbk ++ return 0 + default_sc=standard-rwo + echo 'Applying resourcequota for default storageclass standard-rwo with quota 7Gi' Applying resourcequota for default storageclass standard-rwo with quota 7Gi + cat /mnt/jenkins/workspace/cloud-psmdb-operator_PR-1987/e2e-tests/pvc-resize/conf/resourcequota.yml + sed s/STORAGECLASS/standard-rwo/ + sed s/QUOTA/7Gi/ + kubectl_bin apply -f - ++ mktemp + local LAST_OUT=/tmp/tmp.dgoE7ti2Gw ++ mktemp + local LAST_ERR=/tmp/tmp.RZBwONTg1e + local exit_status=0 + local timeout=4 ++ seq 0 2 + for i in $(seq 0 2) + set +e + kubectl apply -f - + exit_status=0 + set -e + '[' 0 '!=' 0 -a -n 1 ']' + break + cat /tmp/tmp.dgoE7ti2Gw resourcequota/default-storage-quota created + cat /tmp/tmp.RZBwONTg1e + rm /tmp/tmp.dgoE7ti2Gw /tmp/tmp.RZBwONTg1e + return 0 + patch_pvc_request some-name 3G + local cluster=some-name + local size=3G + echo 'Patching PVC request to 3G in some-name' Patching PVC request to 3G in some-name + kubectl_bin patch psmdb some-name --type=json '-p=[{"op": "replace", "path": "/spec/replsets/0/volumeSpec/persistentVolumeClaim/resources/requests/storage", "value":"3G"}]' ++ mktemp + local LAST_OUT=/tmp/tmp.FHHRDMEkFX ++ mktemp + local LAST_ERR=/tmp/tmp.SoyZwhsZj9 + local exit_status=0 + local timeout=4 ++ seq 0 2 + for i in $(seq 0 2) + set +e + kubectl patch psmdb some-name --type=json '-p=[{"op": "replace", "path": "/spec/replsets/0/volumeSpec/persistentVolumeClaim/resources/requests/storage", "value":"3G"}]' + exit_status=0 + set -e + '[' 0 '!=' 0 -a -n 1 ']' + break + cat /tmp/tmp.FHHRDMEkFX perconaservermongodb.psmdb.percona.com/some-name patched + cat /tmp/tmp.SoyZwhsZj9 + rm /tmp/tmp.FHHRDMEkFX /tmp/tmp.SoyZwhsZj9 + return 0 + wait_cluster_consistency some-name + local cluster_name=some-name + local wait_time=32 + retry=0 + sleep 7 + echo -n 'waiting for cluster readyness' waiting for cluster readyness++ kubectl_bin get psmdb some-name -o 'jsonpath={.status.state}' +++ mktemp ++ local LAST_OUT=/tmp/tmp.Ps6DaxvwvZ +++ mktemp ++ local LAST_ERR=/tmp/tmp.OfbKFJ8IFd ++ local exit_status=0 ++ local timeout=4 +++ seq 0 2 ++ for i in $(seq 0 2) ++ set +e ++ kubectl get psmdb some-name -o 'jsonpath={.status.state}' ++ exit_status=0 ++ set -e ++ '[' 0 '!=' 0 -a -n 1 ']' ++ break ++ cat /tmp/tmp.Ps6DaxvwvZ ++ cat /tmp/tmp.OfbKFJ8IFd ++ rm /tmp/tmp.Ps6DaxvwvZ /tmp/tmp.OfbKFJ8IFd ++ return 0 + [[ ready == \r\e\a\d\y ]] + echo + echo + wait_pvc_resize persistentvolumeclaim/mongod-data-some-name-rs0-0 3Gi + local pvc=persistentvolumeclaim/mongod-data-some-name-rs0-0 + local expected_size=3Gi + local max_retry=120 + local sleep_time=5 + local retry=0 + echo 'Waiting for persistentvolumeclaim/mongod-data-some-name-rs0-0 to be resized' Waiting for persistentvolumeclaim/mongod-data-some-name-rs0-0 to be resized ++ kubectl_bin get persistentvolumeclaim/mongod-data-some-name-rs0-0 -o 'jsonpath={.status.capacity.storage}' +++ mktemp ++ local LAST_OUT=/tmp/tmp.23vKbRNckx +++ mktemp ++ local LAST_ERR=/tmp/tmp.ZW9Ksw3no6 ++ local exit_status=0 ++ local timeout=4 +++ seq 0 2 ++ for i in $(seq 0 2) ++ set +e ++ kubectl get persistentvolumeclaim/mongod-data-some-name-rs0-0 -o 'jsonpath={.status.capacity.storage}' ++ exit_status=0 ++ set -e ++ '[' 0 '!=' 0 -a -n 1 ']' ++ break ++ cat /tmp/tmp.23vKbRNckx ++ cat /tmp/tmp.ZW9Ksw3no6 ++ rm /tmp/tmp.23vKbRNckx /tmp/tmp.ZW9Ksw3no6 ++ return 0 + [[ 2Gi == \3\G\i ]] + [[ 0 -ge 120 ]] + echo -n . .+ sleep 5 + retry=1 ++ kubectl_bin get persistentvolumeclaim/mongod-data-some-name-rs0-0 -o 'jsonpath={.status.capacity.storage}' +++ mktemp ++ local LAST_OUT=/tmp/tmp.3I3Pk0Et5v +++ mktemp ++ local LAST_ERR=/tmp/tmp.Ic7ftIVfpl ++ local exit_status=0 ++ local timeout=4 +++ seq 0 2 ++ for i in $(seq 0 2) ++ set +e ++ kubectl get persistentvolumeclaim/mongod-data-some-name-rs0-0 -o 'jsonpath={.status.capacity.storage}' ++ exit_status=0 ++ set -e ++ '[' 0 '!=' 0 -a -n 1 ']' ++ break ++ cat /tmp/tmp.3I3Pk0Et5v ++ cat /tmp/tmp.Ic7ftIVfpl ++ rm /tmp/tmp.3I3Pk0Et5v /tmp/tmp.Ic7ftIVfpl ++ return 0 + [[ 3Gi == \3\G\i ]] + echo + echo 'persistentvolumeclaim/mongod-data-some-name-rs0-0 was resized' persistentvolumeclaim/mongod-data-some-name-rs0-0 was resized + return 0 + apply_resourcequota 9Gi + local quota=9Gi + local default_sc ++ get_default_storageclass ++ kubectl_bin get sc -o 'jsonpath={.items[?(@.metadata.annotations.storageclass\.kubernetes\.io/is-default-class=="true")].metadata.name}' +++ mktemp ++ local LAST_OUT=/tmp/tmp.DYdNf8dGVd +++ mktemp ++ local LAST_ERR=/tmp/tmp.VeWK0ZIzq2 ++ local exit_status=0 ++ local timeout=4 +++ seq 0 2 ++ for i in $(seq 0 2) ++ set +e ++ kubectl get sc -o 'jsonpath={.items[?(@.metadata.annotations.storageclass\.kubernetes\.io/is-default-class=="true")].metadata.name}' ++ exit_status=0 ++ set -e ++ '[' 0 '!=' 0 -a -n 1 ']' ++ break ++ cat /tmp/tmp.DYdNf8dGVd ++ cat /tmp/tmp.VeWK0ZIzq2 ++ rm /tmp/tmp.DYdNf8dGVd /tmp/tmp.VeWK0ZIzq2 ++ return 0 + default_sc=standard-rwo + echo 'Applying resourcequota for default storageclass standard-rwo with quota 9Gi' Applying resourcequota for default storageclass standard-rwo with quota 9Gi + cat /mnt/jenkins/workspace/cloud-psmdb-operator_PR-1987/e2e-tests/pvc-resize/conf/resourcequota.yml + sed s/STORAGECLASS/standard-rwo/ + sed s/QUOTA/9Gi/ + kubectl_bin apply -f - ++ mktemp + local LAST_OUT=/tmp/tmp.oDZXxTaXQ7 ++ mktemp + local LAST_ERR=/tmp/tmp.VjsqIlgI9C + local exit_status=0 + local timeout=4 ++ seq 0 2 + for i in $(seq 0 2) + set +e + kubectl apply -f - + exit_status=0 + set -e + '[' 0 '!=' 0 -a -n 1 ']' + break + cat /tmp/tmp.oDZXxTaXQ7 resourcequota/default-storage-quota configured + cat /tmp/tmp.VjsqIlgI9C + rm /tmp/tmp.oDZXxTaXQ7 /tmp/tmp.VjsqIlgI9C + return 0 + patch_pvc_request some-name 3G + local cluster=some-name + local size=3G + echo 'Patching PVC request to 3G in some-name' Patching PVC request to 3G in some-name + kubectl_bin patch psmdb some-name --type=json '-p=[{"op": "replace", "path": "/spec/replsets/0/volumeSpec/persistentVolumeClaim/resources/requests/storage", "value":"3G"}]' ++ mktemp + local LAST_OUT=/tmp/tmp.JIclYoB72e ++ mktemp + local LAST_ERR=/tmp/tmp.kOcJH6OltB + local exit_status=0 + local timeout=4 ++ seq 0 2 + for i in $(seq 0 2) + set +e + kubectl patch psmdb some-name --type=json '-p=[{"op": "replace", "path": "/spec/replsets/0/volumeSpec/persistentVolumeClaim/resources/requests/storage", "value":"3G"}]' + exit_status=0 + set -e + '[' 0 '!=' 0 -a -n 1 ']' + break + cat /tmp/tmp.JIclYoB72e perconaservermongodb.psmdb.percona.com/some-name patched + cat /tmp/tmp.kOcJH6OltB + rm /tmp/tmp.JIclYoB72e /tmp/tmp.kOcJH6OltB + return 0 + wait_cluster_consistency some-name 42 + local cluster_name=some-name + local wait_time=42 + retry=0 + sleep 7 + echo -n 'waiting for cluster readyness' waiting for cluster readyness++ kubectl_bin get psmdb some-name -o 'jsonpath={.status.state}' +++ mktemp ++ local LAST_OUT=/tmp/tmp.nisDu3AG0Y +++ mktemp ++ local LAST_ERR=/tmp/tmp.gOaCPg6VgO ++ local exit_status=0 ++ local timeout=4 +++ seq 0 2 ++ for i in $(seq 0 2) ++ set +e ++ kubectl get psmdb some-name -o 'jsonpath={.status.state}' ++ exit_status=0 ++ set -e ++ '[' 0 '!=' 0 -a -n 1 ']' ++ break ++ cat /tmp/tmp.nisDu3AG0Y ++ cat /tmp/tmp.gOaCPg6VgO ++ rm /tmp/tmp.nisDu3AG0Y /tmp/tmp.gOaCPg6VgO ++ return 0 + [[ initializing == \r\e\a\d\y ]] + let retry+=1 + '[' 1 -ge 42 ']' + echo -n . .+ sleep 10 ++ kubectl_bin get psmdb some-name -o 'jsonpath={.status.state}' +++ mktemp ++ local LAST_OUT=/tmp/tmp.wgCnaVv8sB +++ mktemp ++ local LAST_ERR=/tmp/tmp.QSYRF8IHCe ++ local exit_status=0 ++ local timeout=4 +++ seq 0 2 ++ for i in $(seq 0 2) ++ set +e ++ kubectl get psmdb some-name -o 'jsonpath={.status.state}' ++ exit_status=0 ++ set -e ++ '[' 0 '!=' 0 -a -n 1 ']' ++ break ++ cat /tmp/tmp.wgCnaVv8sB ++ cat /tmp/tmp.QSYRF8IHCe ++ rm /tmp/tmp.wgCnaVv8sB /tmp/tmp.QSYRF8IHCe ++ return 0 + [[ initializing == \r\e\a\d\y ]] + let retry+=1 + '[' 2 -ge 42 ']' + echo -n . .+ sleep 10 ++ kubectl_bin get psmdb some-name -o 'jsonpath={.status.state}' +++ mktemp ++ local LAST_OUT=/tmp/tmp.RUjTiUoC1K +++ mktemp ++ local LAST_ERR=/tmp/tmp.ka1q3gc99w ++ local exit_status=0 ++ local timeout=4 +++ seq 0 2 ++ for i in $(seq 0 2) ++ set +e ++ kubectl get psmdb some-name -o 'jsonpath={.status.state}' ++ exit_status=0 ++ set -e ++ '[' 0 '!=' 0 -a -n 1 ']' ++ break ++ cat /tmp/tmp.RUjTiUoC1K ++ cat /tmp/tmp.ka1q3gc99w ++ rm /tmp/tmp.RUjTiUoC1K /tmp/tmp.ka1q3gc99w ++ return 0 + [[ initializing == \r\e\a\d\y ]] + let retry+=1 + '[' 3 -ge 42 ']' + echo -n . .+ sleep 10 ++ kubectl_bin get psmdb some-name -o 'jsonpath={.status.state}' +++ mktemp ++ local LAST_OUT=/tmp/tmp.0rFlo8ew4i +++ mktemp ++ local LAST_ERR=/tmp/tmp.JiF5jsVbud ++ local exit_status=0 ++ local timeout=4 +++ seq 0 2 ++ for i in $(seq 0 2) ++ set +e ++ kubectl get psmdb some-name -o 'jsonpath={.status.state}' ++ exit_status=0 ++ set -e ++ '[' 0 '!=' 0 -a -n 1 ']' ++ break ++ cat /tmp/tmp.0rFlo8ew4i ++ cat /tmp/tmp.JiF5jsVbud ++ rm /tmp/tmp.0rFlo8ew4i /tmp/tmp.JiF5jsVbud ++ return 0 + [[ initializing == \r\e\a\d\y ]] + let retry+=1 + '[' 4 -ge 42 ']' + echo -n . .+ sleep 10 ++ kubectl_bin get psmdb some-name -o 'jsonpath={.status.state}' +++ mktemp ++ local LAST_OUT=/tmp/tmp.OcnIrd9i76 +++ mktemp ++ local LAST_ERR=/tmp/tmp.8DBSnQNao3 ++ local exit_status=0 ++ local timeout=4 +++ seq 0 2 ++ for i in $(seq 0 2) ++ set +e ++ kubectl get psmdb some-name -o 'jsonpath={.status.state}' ++ exit_status=0 ++ set -e ++ '[' 0 '!=' 0 -a -n 1 ']' ++ break ++ cat /tmp/tmp.OcnIrd9i76 ++ cat /tmp/tmp.8DBSnQNao3 ++ rm /tmp/tmp.OcnIrd9i76 /tmp/tmp.8DBSnQNao3 ++ return 0 + [[ initializing == \r\e\a\d\y ]] + let retry+=1 + '[' 5 -ge 42 ']' + echo -n . .+ sleep 10 ++ kubectl_bin get psmdb some-name -o 'jsonpath={.status.state}' +++ mktemp ++ local LAST_OUT=/tmp/tmp.6ZpPrvVi2X +++ mktemp ++ local LAST_ERR=/tmp/tmp.ZnpoO8Jx0D ++ local exit_status=0 ++ local timeout=4 +++ seq 0 2 ++ for i in $(seq 0 2) ++ set +e ++ kubectl get psmdb some-name -o 'jsonpath={.status.state}' ++ exit_status=0 ++ set -e ++ '[' 0 '!=' 0 -a -n 1 ']' ++ break ++ cat /tmp/tmp.6ZpPrvVi2X ++ cat /tmp/tmp.ZnpoO8Jx0D ++ rm /tmp/tmp.6ZpPrvVi2X /tmp/tmp.ZnpoO8Jx0D ++ return 0 + [[ initializing == \r\e\a\d\y ]] + let retry+=1 + '[' 6 -ge 42 ']' + echo -n . .+ sleep 10 ++ kubectl_bin get psmdb some-name -o 'jsonpath={.status.state}' +++ mktemp ++ local LAST_OUT=/tmp/tmp.nbEoYNQWGK +++ mktemp ++ local LAST_ERR=/tmp/tmp.hk42CkRKGd ++ local exit_status=0 ++ local timeout=4 +++ seq 0 2 ++ for i in $(seq 0 2) ++ set +e ++ kubectl get psmdb some-name -o 'jsonpath={.status.state}' ++ exit_status=0 ++ set -e ++ '[' 0 '!=' 0 -a -n 1 ']' ++ break ++ cat /tmp/tmp.nbEoYNQWGK ++ cat /tmp/tmp.hk42CkRKGd ++ rm /tmp/tmp.nbEoYNQWGK /tmp/tmp.hk42CkRKGd ++ return 0 + [[ initializing == \r\e\a\d\y ]] + let retry+=1 + '[' 7 -ge 42 ']' + echo -n . .+ sleep 10 ++ kubectl_bin get psmdb some-name -o 'jsonpath={.status.state}' +++ mktemp ++ local LAST_OUT=/tmp/tmp.RrkuCi6HuZ +++ mktemp ++ local LAST_ERR=/tmp/tmp.QmeGwB8y9A ++ local exit_status=0 ++ local timeout=4 +++ seq 0 2 ++ for i in $(seq 0 2) ++ set +e ++ kubectl get psmdb some-name -o 'jsonpath={.status.state}' ++ exit_status=0 ++ set -e ++ '[' 0 '!=' 0 -a -n 1 ']' ++ break ++ cat /tmp/tmp.RrkuCi6HuZ ++ cat /tmp/tmp.QmeGwB8y9A ++ rm /tmp/tmp.RrkuCi6HuZ /tmp/tmp.QmeGwB8y9A ++ return 0 + [[ initializing == \r\e\a\d\y ]] + let retry+=1 + '[' 8 -ge 42 ']' + echo -n . .+ sleep 10 ++ kubectl_bin get psmdb some-name -o 'jsonpath={.status.state}' +++ mktemp ++ local LAST_OUT=/tmp/tmp.Xqkybi1dir +++ mktemp ++ local LAST_ERR=/tmp/tmp.epahnPeUv4 ++ local exit_status=0 ++ local timeout=4 +++ seq 0 2 ++ for i in $(seq 0 2) ++ set +e ++ kubectl get psmdb some-name -o 'jsonpath={.status.state}' ++ exit_status=0 ++ set -e ++ '[' 0 '!=' 0 -a -n 1 ']' ++ break ++ cat /tmp/tmp.Xqkybi1dir ++ cat /tmp/tmp.epahnPeUv4 ++ rm /tmp/tmp.Xqkybi1dir /tmp/tmp.epahnPeUv4 ++ return 0 + [[ ready == \r\e\a\d\y ]] + echo + echo + wait_all_pvc_resize 3Gi + local expected_size=3Gi + local max_retry=120 + local sleep_time=5 ++ kubectl_bin get pvc -l app.kubernetes.io/component=mongod -o name +++ mktemp ++ local LAST_OUT=/tmp/tmp.G8GwE2jod8 +++ mktemp ++ local LAST_ERR=/tmp/tmp.PqUKe4dNfn ++ local exit_status=0 ++ local timeout=4 +++ seq 0 2 ++ for i in $(seq 0 2) ++ set +e ++ kubectl get pvc -l app.kubernetes.io/component=mongod -o name ++ exit_status=0 ++ set -e ++ '[' 0 '!=' 0 -a -n 1 ']' ++ break ++ cat /tmp/tmp.G8GwE2jod8 ++ cat /tmp/tmp.PqUKe4dNfn ++ rm /tmp/tmp.G8GwE2jod8 /tmp/tmp.PqUKe4dNfn ++ return 0 + for pvc in $(kubectl_bin get pvc -l app.kubernetes.io/component=mongod -o name) + wait_pvc_resize persistentvolumeclaim/mongod-data-some-name-rs0-0 3Gi 120 5 + local pvc=persistentvolumeclaim/mongod-data-some-name-rs0-0 + local expected_size=3Gi + local max_retry=120 + local sleep_time=5 + local retry=0 + echo 'Waiting for persistentvolumeclaim/mongod-data-some-name-rs0-0 to be resized' Waiting for persistentvolumeclaim/mongod-data-some-name-rs0-0 to be resized ++ kubectl_bin get persistentvolumeclaim/mongod-data-some-name-rs0-0 -o 'jsonpath={.status.capacity.storage}' +++ mktemp ++ local LAST_OUT=/tmp/tmp.hcV4Hewtns +++ mktemp ++ local LAST_ERR=/tmp/tmp.l9RjyhW2sV ++ local exit_status=0 ++ local timeout=4 +++ seq 0 2 ++ for i in $(seq 0 2) ++ set +e ++ kubectl get persistentvolumeclaim/mongod-data-some-name-rs0-0 -o 'jsonpath={.status.capacity.storage}' ++ exit_status=0 ++ set -e ++ '[' 0 '!=' 0 -a -n 1 ']' ++ break ++ cat /tmp/tmp.hcV4Hewtns ++ cat /tmp/tmp.l9RjyhW2sV ++ rm /tmp/tmp.hcV4Hewtns /tmp/tmp.l9RjyhW2sV ++ return 0 + [[ 3Gi == \3\G\i ]] + echo + echo 'persistentvolumeclaim/mongod-data-some-name-rs0-0 was resized' persistentvolumeclaim/mongod-data-some-name-rs0-0 was resized + return 0 + for pvc in $(kubectl_bin get pvc -l app.kubernetes.io/component=mongod -o name) + wait_pvc_resize persistentvolumeclaim/mongod-data-some-name-rs0-1 3Gi 120 5 + local pvc=persistentvolumeclaim/mongod-data-some-name-rs0-1 + local expected_size=3Gi + local max_retry=120 + local sleep_time=5 + local retry=0 + echo 'Waiting for persistentvolumeclaim/mongod-data-some-name-rs0-1 to be resized' Waiting for persistentvolumeclaim/mongod-data-some-name-rs0-1 to be resized ++ kubectl_bin get persistentvolumeclaim/mongod-data-some-name-rs0-1 -o 'jsonpath={.status.capacity.storage}' +++ mktemp ++ local LAST_OUT=/tmp/tmp.JK7bW64IqG +++ mktemp ++ local LAST_ERR=/tmp/tmp.wxB2YhE2o8 ++ local exit_status=0 ++ local timeout=4 +++ seq 0 2 ++ for i in $(seq 0 2) ++ set +e ++ kubectl get persistentvolumeclaim/mongod-data-some-name-rs0-1 -o 'jsonpath={.status.capacity.storage}' ++ exit_status=0 ++ set -e ++ '[' 0 '!=' 0 -a -n 1 ']' ++ break ++ cat /tmp/tmp.JK7bW64IqG ++ cat /tmp/tmp.wxB2YhE2o8 ++ rm /tmp/tmp.JK7bW64IqG /tmp/tmp.wxB2YhE2o8 ++ return 0 + [[ 3Gi == \3\G\i ]] + echo + echo 'persistentvolumeclaim/mongod-data-some-name-rs0-1 was resized' persistentvolumeclaim/mongod-data-some-name-rs0-1 was resized + return 0 + for pvc in $(kubectl_bin get pvc -l app.kubernetes.io/component=mongod -o name) + wait_pvc_resize persistentvolumeclaim/mongod-data-some-name-rs0-2 3Gi 120 5 + local pvc=persistentvolumeclaim/mongod-data-some-name-rs0-2 + local expected_size=3Gi + local max_retry=120 + local sleep_time=5 + local retry=0 + echo 'Waiting for persistentvolumeclaim/mongod-data-some-name-rs0-2 to be resized' Waiting for persistentvolumeclaim/mongod-data-some-name-rs0-2 to be resized ++ kubectl_bin get persistentvolumeclaim/mongod-data-some-name-rs0-2 -o 'jsonpath={.status.capacity.storage}' +++ mktemp ++ local LAST_OUT=/tmp/tmp.JsEM6N256m +++ mktemp ++ local LAST_ERR=/tmp/tmp.gSrzR9T8Fl ++ local exit_status=0 ++ local timeout=4 +++ seq 0 2 ++ for i in $(seq 0 2) ++ set +e ++ kubectl get persistentvolumeclaim/mongod-data-some-name-rs0-2 -o 'jsonpath={.status.capacity.storage}' ++ exit_status=0 ++ set -e ++ '[' 0 '!=' 0 -a -n 1 ']' ++ break ++ cat /tmp/tmp.JsEM6N256m ++ cat /tmp/tmp.gSrzR9T8Fl ++ rm /tmp/tmp.JsEM6N256m /tmp/tmp.gSrzR9T8Fl ++ return 0 + [[ 3Gi == \3\G\i ]] + echo + echo 'persistentvolumeclaim/mongod-data-some-name-rs0-2 was resized' persistentvolumeclaim/mongod-data-some-name-rs0-2 was resized + return 0 + return 0 + desc 'test downscale' + set +o xtrace ----------------------------------------------------------------------------------- test downscale ----------------------------------------------------------------------------------- + patch_pvc_request some-name 1G + local cluster=some-name + local size=1G + echo 'Patching PVC request to 1G in some-name' Patching PVC request to 1G in some-name + kubectl_bin patch psmdb some-name --type=json '-p=[{"op": "replace", "path": "/spec/replsets/0/volumeSpec/persistentVolumeClaim/resources/requests/storage", "value":"1G"}]' ++ mktemp + local LAST_OUT=/tmp/tmp.8FI8S8A02f ++ mktemp + local LAST_ERR=/tmp/tmp.bcEzMTBq2I + local exit_status=0 + local timeout=4 ++ seq 0 2 + for i in $(seq 0 2) + set +e + kubectl patch psmdb some-name --type=json '-p=[{"op": "replace", "path": "/spec/replsets/0/volumeSpec/persistentVolumeClaim/resources/requests/storage", "value":"1G"}]' + exit_status=0 + set -e + '[' 0 '!=' 0 -a -n 1 ']' + break + cat /tmp/tmp.8FI8S8A02f perconaservermongodb.psmdb.percona.com/some-name patched + cat /tmp/tmp.bcEzMTBq2I + rm /tmp/tmp.8FI8S8A02f /tmp/tmp.bcEzMTBq2I + return 0 + wait_cluster_status some-name error + local cluster=some-name + local expected=error + echo -n 'Waiting for psmdb/some-name status to be error' Waiting for psmdb/some-name status to be error++ kubectl_bin get psmdb some-name -o 'jsonpath={.status.state}' +++ mktemp ++ local LAST_OUT=/tmp/tmp.DBeEFLlFDN +++ mktemp ++ local LAST_ERR=/tmp/tmp.Mzvi06hGQW ++ local exit_status=0 ++ local timeout=4 +++ seq 0 2 ++ for i in $(seq 0 2) ++ set +e ++ kubectl get psmdb some-name -o 'jsonpath={.status.state}' ++ exit_status=0 ++ set -e ++ '[' 0 '!=' 0 -a -n 1 ']' ++ break ++ cat /tmp/tmp.DBeEFLlFDN ++ cat /tmp/tmp.Mzvi06hGQW ++ rm /tmp/tmp.DBeEFLlFDN /tmp/tmp.Mzvi06hGQW ++ return 0 + [[ error == \e\r\r\o\r ]] + echo + echo 'psmdb/some-name status is error' psmdb/some-name status is error + patch_pvc_request some-name 3G + local cluster=some-name + local size=3G + echo 'Patching PVC request to 3G in some-name' Patching PVC request to 3G in some-name + kubectl_bin patch psmdb some-name --type=json '-p=[{"op": "replace", "path": "/spec/replsets/0/volumeSpec/persistentVolumeClaim/resources/requests/storage", "value":"3G"}]' ++ mktemp + local LAST_OUT=/tmp/tmp.ofJh8ZFdas ++ mktemp + local LAST_ERR=/tmp/tmp.N6gCQGvz1y + local exit_status=0 + local timeout=4 ++ seq 0 2 + for i in $(seq 0 2) + set +e + kubectl patch psmdb some-name --type=json '-p=[{"op": "replace", "path": "/spec/replsets/0/volumeSpec/persistentVolumeClaim/resources/requests/storage", "value":"3G"}]' + exit_status=0 + set -e + '[' 0 '!=' 0 -a -n 1 ']' + break + cat /tmp/tmp.ofJh8ZFdas perconaservermongodb.psmdb.percona.com/some-name patched + cat /tmp/tmp.N6gCQGvz1y + rm /tmp/tmp.ofJh8ZFdas /tmp/tmp.N6gCQGvz1y + return 0 + wait_cluster_status some-name ready + local cluster=some-name + local expected=ready + echo -n 'Waiting for psmdb/some-name status to be ready' Waiting for psmdb/some-name status to be ready++ kubectl_bin get psmdb some-name -o 'jsonpath={.status.state}' +++ mktemp ++ local LAST_OUT=/tmp/tmp.SnViZR1SVK +++ mktemp ++ local LAST_ERR=/tmp/tmp.A2Z6NeCVNf ++ local exit_status=0 ++ local timeout=4 +++ seq 0 2 ++ for i in $(seq 0 2) ++ set +e ++ kubectl get psmdb some-name -o 'jsonpath={.status.state}' ++ exit_status=0 ++ set -e ++ '[' 0 '!=' 0 -a -n 1 ']' ++ break ++ cat /tmp/tmp.SnViZR1SVK ++ cat /tmp/tmp.A2Z6NeCVNf ++ rm /tmp/tmp.SnViZR1SVK /tmp/tmp.A2Z6NeCVNf ++ return 0 + [[ error == \r\e\a\d\y ]] + [[ 8 -ge 60 ]] + echo -n . .+ sleep 5 + retry=9 ++ kubectl_bin get psmdb some-name -o 'jsonpath={.status.state}' +++ mktemp ++ local LAST_OUT=/tmp/tmp.mpTwnO1NxR +++ mktemp ++ local LAST_ERR=/tmp/tmp.ntJccEiDt0 ++ local exit_status=0 ++ local timeout=4 +++ seq 0 2 ++ for i in $(seq 0 2) ++ set +e ++ kubectl get psmdb some-name -o 'jsonpath={.status.state}' ++ exit_status=0 ++ set -e ++ '[' 0 '!=' 0 -a -n 1 ']' ++ break ++ cat /tmp/tmp.mpTwnO1NxR ++ cat /tmp/tmp.ntJccEiDt0 ++ rm /tmp/tmp.mpTwnO1NxR /tmp/tmp.ntJccEiDt0 ++ return 0 + [[ ready == \r\e\a\d\y ]] + echo + echo 'psmdb/some-name status is ready' psmdb/some-name status is ready + destroy pvc-resize-18358 + local namespace=pvc-resize-18358 + local ignore_logs=true + [[ 0 == 1 ]] + desc 'destroy cluster/operator and all other resources' + set +o xtrace ----------------------------------------------------------------------------------- destroy cluster/operator and all other resources ----------------------------------------------------------------------------------- + '[' true == false ']' + delete_backups + desc 'Delete psmdb-backup' + set +o xtrace ----------------------------------------------------------------------------------- Delete psmdb-backup ----------------------------------------------------------------------------------- ++ kubectl_bin get psmdb-backup --no-headers ++ wc -l +++ mktemp ++ local LAST_OUT=/tmp/tmp.m6q7qpbFv2 +++ mktemp ++ local LAST_ERR=/tmp/tmp.uq8zC98OFz ++ local exit_status=0 ++ local timeout=4 +++ seq 0 2 ++ for i in $(seq 0 2) ++ set +e ++ kubectl get psmdb-backup --no-headers ++ exit_status=0 ++ set -e ++ '[' 0 '!=' 0 -a -n 1 ']' ++ break ++ cat /tmp/tmp.m6q7qpbFv2 ++ cat /tmp/tmp.uq8zC98OFz No resources found in pvc-resize-18358 namespace. ++ rm /tmp/tmp.m6q7qpbFv2 /tmp/tmp.uq8zC98OFz ++ return 0 + '[' 0 '!=' 0 ']' + delete_crd + desc 'get and delete old CRDs and RBAC' + set +o xtrace ----------------------------------------------------------------------------------- get and delete old CRDs and RBAC ----------------------------------------------------------------------------------- + kubectl_bin delete -f /mnt/jenkins/workspace/cloud-psmdb-operator_PR-1987/deploy/crd.yaml --ignore-not-found --wait=false ++ mktemp + local LAST_OUT=/tmp/tmp.YwUq5VggZ1 ++ mktemp + local LAST_ERR=/tmp/tmp.I2YPWZoBC6 + local exit_status=0 + local timeout=4 ++ seq 0 2 + for i in $(seq 0 2) + set +e + kubectl delete -f /mnt/jenkins/workspace/cloud-psmdb-operator_PR-1987/deploy/crd.yaml --ignore-not-found --wait=false + exit_status=0 + set -e + '[' 0 '!=' 0 -a -n 1 ']' + break + cat /tmp/tmp.YwUq5VggZ1 customresourcedefinition.apiextensions.k8s.io "perconaservermongodbbackups.psmdb.percona.com" deleted customresourcedefinition.apiextensions.k8s.io "perconaservermongodbrestores.psmdb.percona.com" deleted customresourcedefinition.apiextensions.k8s.io "perconaservermongodbs.psmdb.percona.com" deleted + cat /tmp/tmp.I2YPWZoBC6 + rm /tmp/tmp.YwUq5VggZ1 /tmp/tmp.I2YPWZoBC6 + return 0 ++ yq eval .metadata.name /mnt/jenkins/workspace/cloud-psmdb-operator_PR-1987/deploy/crd.yaml ++ grep -v '\-\-\-' grep: warning: stray \ before - grep: warning: stray \ before - + for crd_name in $(yq eval '.metadata.name' "${src_dir}/deploy/crd.yaml" | grep -v '\-\-\-') + kubectl get perconaservermongodbbackups.psmdb.percona.com --all-namespaces -o wide + grep -v NAMESPACE + xargs -L 1 sh -xc 'kubectl patch perconaservermongodbbackups.psmdb.percona.com -n $0 $1 --type=merge -p "{\"metadata\":{\"finalizers\":[]}}"' error: the server doesn't have a resource type "perconaservermongodbbackups" + kubectl patch perconaservermongodbbackups.psmdb.percona.com -n sh --type=merge -p '{"metadata":{"finalizers":[]}}' error: the server doesn't have a resource type "perconaservermongodbbackups" + : + kubectl_bin wait --for=delete crd perconaservermongodbbackups.psmdb.percona.com ++ mktemp + local LAST_OUT=/tmp/tmp.uGUjK3zL3M ++ mktemp + local LAST_ERR=/tmp/tmp.30Rj0ZEAir + local exit_status=0 + local timeout=4 ++ seq 0 2 + for i in $(seq 0 2) + set +e + kubectl wait --for=delete crd perconaservermongodbbackups.psmdb.percona.com + exit_status=0 + set -e + '[' 0 '!=' 0 -a -n 1 ']' + break + cat /tmp/tmp.uGUjK3zL3M + cat /tmp/tmp.30Rj0ZEAir + rm /tmp/tmp.uGUjK3zL3M /tmp/tmp.30Rj0ZEAir + return 0 + for crd_name in $(yq eval '.metadata.name' "${src_dir}/deploy/crd.yaml" | grep -v '\-\-\-') + kubectl get perconaservermongodbrestores.psmdb.percona.com --all-namespaces -o wide + grep -v NAMESPACE + xargs -L 1 sh -xc 'kubectl patch perconaservermongodbrestores.psmdb.percona.com -n $0 $1 --type=merge -p "{\"metadata\":{\"finalizers\":[]}}"' error: the server doesn't have a resource type "perconaservermongodbrestores" + kubectl patch perconaservermongodbrestores.psmdb.percona.com -n sh --type=merge -p '{"metadata":{"finalizers":[]}}' error: the server doesn't have a resource type "perconaservermongodbrestores" + : + kubectl_bin wait --for=delete crd perconaservermongodbrestores.psmdb.percona.com ++ mktemp + local LAST_OUT=/tmp/tmp.8sawgMdDOD ++ mktemp + local LAST_ERR=/tmp/tmp.waXfwiZ95t + local exit_status=0 + local timeout=4 ++ seq 0 2 + for i in $(seq 0 2) + set +e + kubectl wait --for=delete crd perconaservermongodbrestores.psmdb.percona.com + exit_status=0 + set -e + '[' 0 '!=' 0 -a -n 1 ']' + break + cat /tmp/tmp.8sawgMdDOD + cat /tmp/tmp.waXfwiZ95t + rm /tmp/tmp.8sawgMdDOD /tmp/tmp.waXfwiZ95t + return 0 + for crd_name in $(yq eval '.metadata.name' "${src_dir}/deploy/crd.yaml" | grep -v '\-\-\-') + kubectl get perconaservermongodbs.psmdb.percona.com --all-namespaces -o wide + grep -v NAMESPACE + xargs -L 1 sh -xc 'kubectl patch perconaservermongodbs.psmdb.percona.com -n $0 $1 --type=merge -p "{\"metadata\":{\"finalizers\":[]}}"' + kubectl patch perconaservermongodbs.psmdb.percona.com -n pvc-resize-18358 some-name --type=merge -p '{"metadata":{"finalizers":[]}}' perconaservermongodb.psmdb.percona.com/some-name patched + kubectl_bin wait --for=delete crd perconaservermongodbs.psmdb.percona.com ++ mktemp + local LAST_OUT=/tmp/tmp.2d0Jq3pJwM ++ mktemp + local LAST_ERR=/tmp/tmp.sB33Yu8zuj + local exit_status=0 + local timeout=4 ++ seq 0 2 + for i in $(seq 0 2) + set +e + kubectl wait --for=delete crd perconaservermongodbs.psmdb.percona.com + exit_status=0 + set -e + '[' 0 '!=' 0 -a -n 1 ']' + break + cat /tmp/tmp.2d0Jq3pJwM customresourcedefinition.apiextensions.k8s.io/perconaservermongodbs.psmdb.percona.com condition met + cat /tmp/tmp.sB33Yu8zuj + rm /tmp/tmp.2d0Jq3pJwM /tmp/tmp.sB33Yu8zuj + return 0 + local rbac_yaml=rbac.yaml + '[' -n psmdb-operator ']' + rbac_yaml=cw-rbac.yaml + kubectl_bin delete -f /mnt/jenkins/workspace/cloud-psmdb-operator_PR-1987/deploy/cw-rbac.yaml --ignore-not-found ++ mktemp + local LAST_OUT=/tmp/tmp.6HiCSQY9m0 ++ mktemp + local LAST_ERR=/tmp/tmp.2PMzhfpOGm + local exit_status=0 + local timeout=4 ++ seq 0 2 + for i in $(seq 0 2) + set +e + kubectl delete -f /mnt/jenkins/workspace/cloud-psmdb-operator_PR-1987/deploy/cw-rbac.yaml --ignore-not-found + exit_status=0 + set -e + '[' 0 '!=' 0 -a -n 1 ']' + break + cat /tmp/tmp.6HiCSQY9m0 clusterrole.rbac.authorization.k8s.io "percona-server-mongodb-operator" deleted clusterrolebinding.rbac.authorization.k8s.io "service-account-percona-server-mongodb-operator" deleted + cat /tmp/tmp.2PMzhfpOGm + rm /tmp/tmp.6HiCSQY9m0 /tmp/tmp.2PMzhfpOGm + return 0 + destroy_cert_manager + kubectl_bin delete -f https://github.com/cert-manager/cert-manager/releases/download/v1.17.2/cert-manager.yaml ++ mktemp + local LAST_OUT=/tmp/tmp.cj1CgNmr18 ++ mktemp + local LAST_ERR=/tmp/tmp.jZpQJyg4za + local exit_status=0 + local timeout=4 ++ seq 0 2 + for i in $(seq 0 2) + set +e + kubectl delete -f https://github.com/cert-manager/cert-manager/releases/download/v1.17.2/cert-manager.yaml + exit_status=1 + set -e + '[' 1 '!=' 0 -a -n 1 ']' + cat /tmp/tmp.cj1CgNmr18 + cat /tmp/tmp.jZpQJyg4za Error from server (NotFound): error when deleting "https://github.com/cert-manager/cert-manager/releases/download/v1.17.2/cert-manager.yaml": namespaces "cert-manager" not found Error from server (NotFound): error when deleting "https://github.com/cert-manager/cert-manager/releases/download/v1.17.2/cert-manager.yaml": customresourcedefinitions.apiextensions.k8s.io "certificaterequests.cert-manager.io" not found Error from server (NotFound): error when deleting "https://github.com/cert-manager/cert-manager/releases/download/v1.17.2/cert-manager.yaml": customresourcedefinitions.apiextensions.k8s.io "certificates.cert-manager.io" not found Error from server (NotFound): error when deleting "https://github.com/cert-manager/cert-manager/releases/download/v1.17.2/cert-manager.yaml": customresourcedefinitions.apiextensions.k8s.io "challenges.acme.cert-manager.io" not found Error from server (NotFound): error when deleting "https://github.com/cert-manager/cert-manager/releases/download/v1.17.2/cert-manager.yaml": customresourcedefinitions.apiextensions.k8s.io "clusterissuers.cert-manager.io" not found Error from server (NotFound): error when deleting "https://github.com/cert-manager/cert-manager/releases/download/v1.17.2/cert-manager.yaml": customresourcedefinitions.apiextensions.k8s.io "issuers.cert-manager.io" not found Error from server (NotFound): error when deleting "https://github.com/cert-manager/cert-manager/releases/download/v1.17.2/cert-manager.yaml": customresourcedefinitions.apiextensions.k8s.io "orders.acme.cert-manager.io" not found Error from server (NotFound): error when deleting "https://github.com/cert-manager/cert-manager/releases/download/v1.17.2/cert-manager.yaml": serviceaccounts "cert-manager-cainjector" not found Error from server (NotFound): error when deleting "https://github.com/cert-manager/cert-manager/releases/download/v1.17.2/cert-manager.yaml": serviceaccounts "cert-manager" not found Error from server (NotFound): error when deleting "https://github.com/cert-manager/cert-manager/releases/download/v1.17.2/cert-manager.yaml": serviceaccounts "cert-manager-webhook" not found Error from server (NotFound): error when deleting "https://github.com/cert-manager/cert-manager/releases/download/v1.17.2/cert-manager.yaml": clusterroles.rbac.authorization.k8s.io "cert-manager-cainjector" not found Error from server (NotFound): error when deleting "https://github.com/cert-manager/cert-manager/releases/download/v1.17.2/cert-manager.yaml": clusterroles.rbac.authorization.k8s.io "cert-manager-controller-issuers" not found Error from server (NotFound): error when deleting "https://github.com/cert-manager/cert-manager/releases/download/v1.17.2/cert-manager.yaml": clusterroles.rbac.authorization.k8s.io "cert-manager-controller-clusterissuers" not found Error from server (NotFound): error when deleting "https://github.com/cert-manager/cert-manager/releases/download/v1.17.2/cert-manager.yaml": clusterroles.rbac.authorization.k8s.io "cert-manager-controller-certificates" not found Error from server (NotFound): error when deleting "https://github.com/cert-manager/cert-manager/releases/download/v1.17.2/cert-manager.yaml": clusterroles.rbac.authorization.k8s.io "cert-manager-controller-orders" not found Error from server (NotFound): error when deleting "https://github.com/cert-manager/cert-manager/releases/download/v1.17.2/cert-manager.yaml": clusterroles.rbac.authorization.k8s.io "cert-manager-controller-challenges" not found Error from server (NotFound): error when deleting "https://github.com/cert-manager/cert-manager/releases/download/v1.17.2/cert-manager.yaml": clusterroles.rbac.authorization.k8s.io "cert-manager-controller-ingress-shim" not found Error from server (NotFound): error when deleting "https://github.com/cert-manager/cert-manager/releases/download/v1.17.2/cert-manager.yaml": clusterroles.rbac.authorization.k8s.io "cert-manager-cluster-view" not found Error from server (NotFound): error when deleting "https://github.com/cert-manager/cert-manager/releases/download/v1.17.2/cert-manager.yaml": clusterroles.rbac.authorization.k8s.io "cert-manager-view" not found Error from server (NotFound): error when deleting "https://github.com/cert-manager/cert-manager/releases/download/v1.17.2/cert-manager.yaml": clusterroles.rbac.authorization.k8s.io "cert-manager-edit" not found Error from server (NotFound): error when deleting "https://github.com/cert-manager/cert-manager/releases/download/v1.17.2/cert-manager.yaml": clusterroles.rbac.authorization.k8s.io "cert-manager-controller-approve:cert-manager-io" not found Error from server (NotFound): error when deleting "https://github.com/cert-manager/cert-manager/releases/download/v1.17.2/cert-manager.yaml": clusterroles.rbac.authorization.k8s.io "cert-manager-controller-certificatesigningrequests" not found Error from server (NotFound): error when deleting "https://github.com/cert-manager/cert-manager/releases/download/v1.17.2/cert-manager.yaml": clusterroles.rbac.authorization.k8s.io "cert-manager-webhook:subjectaccessreviews" not found Error from server (NotFound): error when deleting "https://github.com/cert-manager/cert-manager/releases/download/v1.17.2/cert-manager.yaml": clusterrolebindings.rbac.authorization.k8s.io "cert-manager-cainjector" not found Error from server (NotFound): error when deleting "https://github.com/cert-manager/cert-manager/releases/download/v1.17.2/cert-manager.yaml": clusterrolebindings.rbac.authorization.k8s.io "cert-manager-controller-issuers" not found Error from server (NotFound): error when deleting "https://github.com/cert-manager/cert-manager/releases/download/v1.17.2/cert-manager.yaml": clusterrolebindings.rbac.authorization.k8s.io "cert-manager-controller-clusterissuers" not found Error from server (NotFound): error when deleting "https://github.com/cert-manager/cert-manager/releases/download/v1.17.2/cert-manager.yaml": clusterrolebindings.rbac.authorization.k8s.io "cert-manager-controller-certificates" not found Error from server (NotFound): error when deleting "https://github.com/cert-manager/cert-manager/releases/download/v1.17.2/cert-manager.yaml": clusterrolebindings.rbac.authorization.k8s.io "cert-manager-controller-orders" not found Error from server (NotFound): error when deleting "https://github.com/cert-manager/cert-manager/releases/download/v1.17.2/cert-manager.yaml": clusterrolebindings.rbac.authorization.k8s.io "cert-manager-controller-challenges" not found Error from server (NotFound): error when deleting "https://github.com/cert-manager/cert-manager/releases/download/v1.17.2/cert-manager.yaml": clusterrolebindings.rbac.authorization.k8s.io "cert-manager-controller-ingress-shim" not found Error from server (NotFound): error when deleting "https://github.com/cert-manager/cert-manager/releases/download/v1.17.2/cert-manager.yaml": clusterrolebindings.rbac.authorization.k8s.io "cert-manager-controller-approve:cert-manager-io" not found Error from server (NotFound): error when deleting "https://github.com/cert-manager/cert-manager/releases/download/v1.17.2/cert-manager.yaml": clusterrolebindings.rbac.authorization.k8s.io "cert-manager-controller-certificatesigningrequests" not found Error from server (NotFound): error when deleting "https://github.com/cert-manager/cert-manager/releases/download/v1.17.2/cert-manager.yaml": clusterrolebindings.rbac.authorization.k8s.io "cert-manager-webhook:subjectaccessreviews" not found Error from server (NotFound): error when deleting "https://github.com/cert-manager/cert-manager/releases/download/v1.17.2/cert-manager.yaml": roles.rbac.authorization.k8s.io "cert-manager-cainjector:leaderelection" not found Error from server (NotFound): error when deleting "https://github.com/cert-manager/cert-manager/releases/download/v1.17.2/cert-manager.yaml": roles.rbac.authorization.k8s.io "cert-manager:leaderelection" not found Error from server (NotFound): error when deleting "https://github.com/cert-manager/cert-manager/releases/download/v1.17.2/cert-manager.yaml": roles.rbac.authorization.k8s.io "cert-manager-tokenrequest" not found Error from server (NotFound): error when deleting "https://github.com/cert-manager/cert-manager/releases/download/v1.17.2/cert-manager.yaml": roles.rbac.authorization.k8s.io "cert-manager-webhook:dynamic-serving" not found Error from server (NotFound): error when deleting "https://github.com/cert-manager/cert-manager/releases/download/v1.17.2/cert-manager.yaml": rolebindings.rbac.authorization.k8s.io "cert-manager-cainjector:leaderelection" not found Error from server (NotFound): error when deleting "https://github.com/cert-manager/cert-manager/releases/download/v1.17.2/cert-manager.yaml": rolebindings.rbac.authorization.k8s.io "cert-manager:leaderelection" not found Error from server (NotFound): error when deleting "https://github.com/cert-manager/cert-manager/releases/download/v1.17.2/cert-manager.yaml": rolebindings.rbac.authorization.k8s.io "cert-manager-cert-manager-tokenrequest" not found Error from server (NotFound): error when deleting "https://github.com/cert-manager/cert-manager/releases/download/v1.17.2/cert-manager.yaml": rolebindings.rbac.authorization.k8s.io "cert-manager-webhook:dynamic-serving" not found Error from server (NotFound): error when deleting "https://github.com/cert-manager/cert-manager/releases/download/v1.17.2/cert-manager.yaml": services "cert-manager-cainjector" not found Error from server (NotFound): error when deleting "https://github.com/cert-manager/cert-manager/releases/download/v1.17.2/cert-manager.yaml": services "cert-manager" not found Error from server (NotFound): error when deleting "https://github.com/cert-manager/cert-manager/releases/download/v1.17.2/cert-manager.yaml": services "cert-manager-webhook" not found Error from server (NotFound): error when deleting "https://github.com/cert-manager/cert-manager/releases/download/v1.17.2/cert-manager.yaml": deployments.apps "cert-manager-cainjector" not found Error from server (NotFound): error when deleting "https://github.com/cert-manager/cert-manager/releases/download/v1.17.2/cert-manager.yaml": deployments.apps "cert-manager" not found Error from server (NotFound): error when deleting "https://github.com/cert-manager/cert-manager/releases/download/v1.17.2/cert-manager.yaml": deployments.apps "cert-manager-webhook" not found Error from server (NotFound): error when deleting "https://github.com/cert-manager/cert-manager/releases/download/v1.17.2/cert-manager.yaml": mutatingwebhookconfigurations.admissionregistration.k8s.io "cert-manager-webhook" not found Error from server (NotFound): error when deleting "https://github.com/cert-manager/cert-manager/releases/download/v1.17.2/cert-manager.yaml": validatingwebhookconfigurations.admissionregistration.k8s.io "cert-manager-webhook" not found + sleep 0 + for i in $(seq 0 2) + set +e + kubectl delete -f https://github.com/cert-manager/cert-manager/releases/download/v1.17.2/cert-manager.yaml + exit_status=1 + set -e + '[' 1 '!=' 0 -a -n 1 ']' + cat /tmp/tmp.cj1CgNmr18 + cat /tmp/tmp.jZpQJyg4za Error from server (NotFound): error when deleting "https://github.com/cert-manager/cert-manager/releases/download/v1.17.2/cert-manager.yaml": namespaces "cert-manager" not found Error from server (NotFound): error when deleting "https://github.com/cert-manager/cert-manager/releases/download/v1.17.2/cert-manager.yaml": customresourcedefinitions.apiextensions.k8s.io "certificaterequests.cert-manager.io" not found Error from server (NotFound): error when deleting "https://github.com/cert-manager/cert-manager/releases/download/v1.17.2/cert-manager.yaml": customresourcedefinitions.apiextensions.k8s.io "certificates.cert-manager.io" not found Error from server (NotFound): error when deleting "https://github.com/cert-manager/cert-manager/releases/download/v1.17.2/cert-manager.yaml": customresourcedefinitions.apiextensions.k8s.io "challenges.acme.cert-manager.io" not found Error from server (NotFound): error when deleting "https://github.com/cert-manager/cert-manager/releases/download/v1.17.2/cert-manager.yaml": customresourcedefinitions.apiextensions.k8s.io "clusterissuers.cert-manager.io" not found Error from server (NotFound): error when deleting "https://github.com/cert-manager/cert-manager/releases/download/v1.17.2/cert-manager.yaml": customresourcedefinitions.apiextensions.k8s.io "issuers.cert-manager.io" not found Error from server (NotFound): error when deleting "https://github.com/cert-manager/cert-manager/releases/download/v1.17.2/cert-manager.yaml": customresourcedefinitions.apiextensions.k8s.io "orders.acme.cert-manager.io" not found Error from server (NotFound): error when deleting "https://github.com/cert-manager/cert-manager/releases/download/v1.17.2/cert-manager.yaml": serviceaccounts "cert-manager-cainjector" not found Error from server (NotFound): error when deleting "https://github.com/cert-manager/cert-manager/releases/download/v1.17.2/cert-manager.yaml": serviceaccounts "cert-manager" not found Error from server (NotFound): error when deleting "https://github.com/cert-manager/cert-manager/releases/download/v1.17.2/cert-manager.yaml": serviceaccounts "cert-manager-webhook" not found Error from server (NotFound): error when deleting "https://github.com/cert-manager/cert-manager/releases/download/v1.17.2/cert-manager.yaml": clusterroles.rbac.authorization.k8s.io "cert-manager-cainjector" not found Error from server (NotFound): error when deleting "https://github.com/cert-manager/cert-manager/releases/download/v1.17.2/cert-manager.yaml": clusterroles.rbac.authorization.k8s.io "cert-manager-controller-issuers" not found Error from server (NotFound): error when deleting "https://github.com/cert-manager/cert-manager/releases/download/v1.17.2/cert-manager.yaml": clusterroles.rbac.authorization.k8s.io "cert-manager-controller-clusterissuers" not found Error from server (NotFound): error when deleting "https://github.com/cert-manager/cert-manager/releases/download/v1.17.2/cert-manager.yaml": clusterroles.rbac.authorization.k8s.io "cert-manager-controller-certificates" not found Error from server (NotFound): error when deleting "https://github.com/cert-manager/cert-manager/releases/download/v1.17.2/cert-manager.yaml": clusterroles.rbac.authorization.k8s.io "cert-manager-controller-orders" not found Error from server (NotFound): error when deleting "https://github.com/cert-manager/cert-manager/releases/download/v1.17.2/cert-manager.yaml": clusterroles.rbac.authorization.k8s.io "cert-manager-controller-challenges" not found Error from server (NotFound): error when deleting "https://github.com/cert-manager/cert-manager/releases/download/v1.17.2/cert-manager.yaml": clusterroles.rbac.authorization.k8s.io "cert-manager-controller-ingress-shim" not found Error from server (NotFound): error when deleting "https://github.com/cert-manager/cert-manager/releases/download/v1.17.2/cert-manager.yaml": clusterroles.rbac.authorization.k8s.io "cert-manager-cluster-view" not found Error from server (NotFound): error when deleting "https://github.com/cert-manager/cert-manager/releases/download/v1.17.2/cert-manager.yaml": clusterroles.rbac.authorization.k8s.io "cert-manager-view" not found Error from server (NotFound): error when deleting "https://github.com/cert-manager/cert-manager/releases/download/v1.17.2/cert-manager.yaml": clusterroles.rbac.authorization.k8s.io "cert-manager-edit" not found Error from server (NotFound): error when deleting "https://github.com/cert-manager/cert-manager/releases/download/v1.17.2/cert-manager.yaml": clusterroles.rbac.authorization.k8s.io "cert-manager-controller-approve:cert-manager-io" not found Error from server (NotFound): error when deleting "https://github.com/cert-manager/cert-manager/releases/download/v1.17.2/cert-manager.yaml": clusterroles.rbac.authorization.k8s.io "cert-manager-controller-certificatesigningrequests" not found Error from server (NotFound): error when deleting "https://github.com/cert-manager/cert-manager/releases/download/v1.17.2/cert-manager.yaml": clusterroles.rbac.authorization.k8s.io "cert-manager-webhook:subjectaccessreviews" not found Error from server (NotFound): error when deleting "https://github.com/cert-manager/cert-manager/releases/download/v1.17.2/cert-manager.yaml": clusterrolebindings.rbac.authorization.k8s.io "cert-manager-cainjector" not found Error from server (NotFound): error when deleting "https://github.com/cert-manager/cert-manager/releases/download/v1.17.2/cert-manager.yaml": clusterrolebindings.rbac.authorization.k8s.io "cert-manager-controller-issuers" not found Error from server (NotFound): error when deleting "https://github.com/cert-manager/cert-manager/releases/download/v1.17.2/cert-manager.yaml": clusterrolebindings.rbac.authorization.k8s.io "cert-manager-controller-clusterissuers" not found Error from server (NotFound): error when deleting "https://github.com/cert-manager/cert-manager/releases/download/v1.17.2/cert-manager.yaml": clusterrolebindings.rbac.authorization.k8s.io "cert-manager-controller-certificates" not found Error from server (NotFound): error when deleting "https://github.com/cert-manager/cert-manager/releases/download/v1.17.2/cert-manager.yaml": clusterrolebindings.rbac.authorization.k8s.io "cert-manager-controller-orders" not found Error from server (NotFound): error when deleting "https://github.com/cert-manager/cert-manager/releases/download/v1.17.2/cert-manager.yaml": clusterrolebindings.rbac.authorization.k8s.io "cert-manager-controller-challenges" not found Error from server (NotFound): error when deleting "https://github.com/cert-manager/cert-manager/releases/download/v1.17.2/cert-manager.yaml": clusterrolebindings.rbac.authorization.k8s.io "cert-manager-controller-ingress-shim" not found Error from server (NotFound): error when deleting "https://github.com/cert-manager/cert-manager/releases/download/v1.17.2/cert-manager.yaml": clusterrolebindings.rbac.authorization.k8s.io "cert-manager-controller-approve:cert-manager-io" not found Error from server (NotFound): error when deleting "https://github.com/cert-manager/cert-manager/releases/download/v1.17.2/cert-manager.yaml": clusterrolebindings.rbac.authorization.k8s.io "cert-manager-controller-certificatesigningrequests" not found Error from server (NotFound): error when deleting "https://github.com/cert-manager/cert-manager/releases/download/v1.17.2/cert-manager.yaml": clusterrolebindings.rbac.authorization.k8s.io "cert-manager-webhook:subjectaccessreviews" not found Error from server (NotFound): error when deleting "https://github.com/cert-manager/cert-manager/releases/download/v1.17.2/cert-manager.yaml": roles.rbac.authorization.k8s.io "cert-manager-cainjector:leaderelection" not found Error from server (NotFound): error when deleting "https://github.com/cert-manager/cert-manager/releases/download/v1.17.2/cert-manager.yaml": roles.rbac.authorization.k8s.io "cert-manager:leaderelection" not found Error from server (NotFound): error when deleting "https://github.com/cert-manager/cert-manager/releases/download/v1.17.2/cert-manager.yaml": roles.rbac.authorization.k8s.io "cert-manager-tokenrequest" not found Error from server (NotFound): error when deleting "https://github.com/cert-manager/cert-manager/releases/download/v1.17.2/cert-manager.yaml": roles.rbac.authorization.k8s.io "cert-manager-webhook:dynamic-serving" not found Error from server (NotFound): error when deleting "https://github.com/cert-manager/cert-manager/releases/download/v1.17.2/cert-manager.yaml": rolebindings.rbac.authorization.k8s.io "cert-manager-cainjector:leaderelection" not found Error from server (NotFound): error when deleting "https://github.com/cert-manager/cert-manager/releases/download/v1.17.2/cert-manager.yaml": rolebindings.rbac.authorization.k8s.io "cert-manager:leaderelection" not found Error from server (NotFound): error when deleting "https://github.com/cert-manager/cert-manager/releases/download/v1.17.2/cert-manager.yaml": rolebindings.rbac.authorization.k8s.io "cert-manager-cert-manager-tokenrequest" not found Error from server (NotFound): error when deleting "https://github.com/cert-manager/cert-manager/releases/download/v1.17.2/cert-manager.yaml": rolebindings.rbac.authorization.k8s.io "cert-manager-webhook:dynamic-serving" not found Error from server (NotFound): error when deleting "https://github.com/cert-manager/cert-manager/releases/download/v1.17.2/cert-manager.yaml": services "cert-manager-cainjector" not found Error from server (NotFound): error when deleting "https://github.com/cert-manager/cert-manager/releases/download/v1.17.2/cert-manager.yaml": services "cert-manager" not found Error from server (NotFound): error when deleting "https://github.com/cert-manager/cert-manager/releases/download/v1.17.2/cert-manager.yaml": services "cert-manager-webhook" not found Error from server (NotFound): error when deleting "https://github.com/cert-manager/cert-manager/releases/download/v1.17.2/cert-manager.yaml": deployments.apps "cert-manager-cainjector" not found Error from server (NotFound): error when deleting "https://github.com/cert-manager/cert-manager/releases/download/v1.17.2/cert-manager.yaml": deployments.apps "cert-manager" not found Error from server (NotFound): error when deleting "https://github.com/cert-manager/cert-manager/releases/download/v1.17.2/cert-manager.yaml": deployments.apps "cert-manager-webhook" not found Error from server (NotFound): error when deleting "https://github.com/cert-manager/cert-manager/releases/download/v1.17.2/cert-manager.yaml": mutatingwebhookconfigurations.admissionregistration.k8s.io "cert-manager-webhook" not found Error from server (NotFound): error when deleting "https://github.com/cert-manager/cert-manager/releases/download/v1.17.2/cert-manager.yaml": validatingwebhookconfigurations.admissionregistration.k8s.io "cert-manager-webhook" not found + sleep 4 + for i in $(seq 0 2) + set +e + kubectl delete -f https://github.com/cert-manager/cert-manager/releases/download/v1.17.2/cert-manager.yaml + exit_status=1 + set -e + '[' 1 '!=' 0 -a -n 1 ']' + cat /tmp/tmp.cj1CgNmr18 + cat /tmp/tmp.jZpQJyg4za Error from server (NotFound): error when deleting "https://github.com/cert-manager/cert-manager/releases/download/v1.17.2/cert-manager.yaml": namespaces "cert-manager" not found Error from server (NotFound): error when deleting "https://github.com/cert-manager/cert-manager/releases/download/v1.17.2/cert-manager.yaml": customresourcedefinitions.apiextensions.k8s.io "certificaterequests.cert-manager.io" not found Error from server (NotFound): error when deleting "https://github.com/cert-manager/cert-manager/releases/download/v1.17.2/cert-manager.yaml": customresourcedefinitions.apiextensions.k8s.io "certificates.cert-manager.io" not found Error from server (NotFound): error when deleting "https://github.com/cert-manager/cert-manager/releases/download/v1.17.2/cert-manager.yaml": customresourcedefinitions.apiextensions.k8s.io "challenges.acme.cert-manager.io" not found Error from server (NotFound): error when deleting "https://github.com/cert-manager/cert-manager/releases/download/v1.17.2/cert-manager.yaml": customresourcedefinitions.apiextensions.k8s.io "clusterissuers.cert-manager.io" not found Error from server (NotFound): error when deleting "https://github.com/cert-manager/cert-manager/releases/download/v1.17.2/cert-manager.yaml": customresourcedefinitions.apiextensions.k8s.io "issuers.cert-manager.io" not found Error from server (NotFound): error when deleting "https://github.com/cert-manager/cert-manager/releases/download/v1.17.2/cert-manager.yaml": customresourcedefinitions.apiextensions.k8s.io "orders.acme.cert-manager.io" not found Error from server (NotFound): error when deleting "https://github.com/cert-manager/cert-manager/releases/download/v1.17.2/cert-manager.yaml": serviceaccounts "cert-manager-cainjector" not found Error from server (NotFound): error when deleting "https://github.com/cert-manager/cert-manager/releases/download/v1.17.2/cert-manager.yaml": serviceaccounts "cert-manager" not found Error from server (NotFound): error when deleting "https://github.com/cert-manager/cert-manager/releases/download/v1.17.2/cert-manager.yaml": serviceaccounts "cert-manager-webhook" not found Error from server (NotFound): error when deleting "https://github.com/cert-manager/cert-manager/releases/download/v1.17.2/cert-manager.yaml": clusterroles.rbac.authorization.k8s.io "cert-manager-cainjector" not found Error from server (NotFound): error when deleting "https://github.com/cert-manager/cert-manager/releases/download/v1.17.2/cert-manager.yaml": clusterroles.rbac.authorization.k8s.io "cert-manager-controller-issuers" not found Error from server (NotFound): error when deleting "https://github.com/cert-manager/cert-manager/releases/download/v1.17.2/cert-manager.yaml": clusterroles.rbac.authorization.k8s.io "cert-manager-controller-clusterissuers" not found Error from server (NotFound): error when deleting "https://github.com/cert-manager/cert-manager/releases/download/v1.17.2/cert-manager.yaml": clusterroles.rbac.authorization.k8s.io "cert-manager-controller-certificates" not found Error from server (NotFound): error when deleting "https://github.com/cert-manager/cert-manager/releases/download/v1.17.2/cert-manager.yaml": clusterroles.rbac.authorization.k8s.io "cert-manager-controller-orders" not found Error from server (NotFound): error when deleting "https://github.com/cert-manager/cert-manager/releases/download/v1.17.2/cert-manager.yaml": clusterroles.rbac.authorization.k8s.io "cert-manager-controller-challenges" not found Error from server (NotFound): error when deleting "https://github.com/cert-manager/cert-manager/releases/download/v1.17.2/cert-manager.yaml": clusterroles.rbac.authorization.k8s.io "cert-manager-controller-ingress-shim" not found Error from server (NotFound): error when deleting "https://github.com/cert-manager/cert-manager/releases/download/v1.17.2/cert-manager.yaml": clusterroles.rbac.authorization.k8s.io "cert-manager-cluster-view" not found Error from server (NotFound): error when deleting "https://github.com/cert-manager/cert-manager/releases/download/v1.17.2/cert-manager.yaml": clusterroles.rbac.authorization.k8s.io "cert-manager-view" not found Error from server (NotFound): error when deleting "https://github.com/cert-manager/cert-manager/releases/download/v1.17.2/cert-manager.yaml": clusterroles.rbac.authorization.k8s.io "cert-manager-edit" not found Error from server (NotFound): error when deleting "https://github.com/cert-manager/cert-manager/releases/download/v1.17.2/cert-manager.yaml": clusterroles.rbac.authorization.k8s.io "cert-manager-controller-approve:cert-manager-io" not found Error from server (NotFound): error when deleting "https://github.com/cert-manager/cert-manager/releases/download/v1.17.2/cert-manager.yaml": clusterroles.rbac.authorization.k8s.io "cert-manager-controller-certificatesigningrequests" not found Error from server (NotFound): error when deleting "https://github.com/cert-manager/cert-manager/releases/download/v1.17.2/cert-manager.yaml": clusterroles.rbac.authorization.k8s.io "cert-manager-webhook:subjectaccessreviews" not found Error from server (NotFound): error when deleting "https://github.com/cert-manager/cert-manager/releases/download/v1.17.2/cert-manager.yaml": clusterrolebindings.rbac.authorization.k8s.io "cert-manager-cainjector" not found Error from server (NotFound): error when deleting "https://github.com/cert-manager/cert-manager/releases/download/v1.17.2/cert-manager.yaml": clusterrolebindings.rbac.authorization.k8s.io "cert-manager-controller-issuers" not found Error from server (NotFound): error when deleting "https://github.com/cert-manager/cert-manager/releases/download/v1.17.2/cert-manager.yaml": clusterrolebindings.rbac.authorization.k8s.io "cert-manager-controller-clusterissuers" not found Error from server (NotFound): error when deleting "https://github.com/cert-manager/cert-manager/releases/download/v1.17.2/cert-manager.yaml": clusterrolebindings.rbac.authorization.k8s.io "cert-manager-controller-certificates" not found Error from server (NotFound): error when deleting "https://github.com/cert-manager/cert-manager/releases/download/v1.17.2/cert-manager.yaml": clusterrolebindings.rbac.authorization.k8s.io "cert-manager-controller-orders" not found Error from server (NotFound): error when deleting "https://github.com/cert-manager/cert-manager/releases/download/v1.17.2/cert-manager.yaml": clusterrolebindings.rbac.authorization.k8s.io "cert-manager-controller-challenges" not found Error from server (NotFound): error when deleting "https://github.com/cert-manager/cert-manager/releases/download/v1.17.2/cert-manager.yaml": clusterrolebindings.rbac.authorization.k8s.io "cert-manager-controller-ingress-shim" not found Error from server (NotFound): error when deleting "https://github.com/cert-manager/cert-manager/releases/download/v1.17.2/cert-manager.yaml": clusterrolebindings.rbac.authorization.k8s.io "cert-manager-controller-approve:cert-manager-io" not found Error from server (NotFound): error when deleting "https://github.com/cert-manager/cert-manager/releases/download/v1.17.2/cert-manager.yaml": clusterrolebindings.rbac.authorization.k8s.io "cert-manager-controller-certificatesigningrequests" not found Error from server (NotFound): error when deleting "https://github.com/cert-manager/cert-manager/releases/download/v1.17.2/cert-manager.yaml": clusterrolebindings.rbac.authorization.k8s.io "cert-manager-webhook:subjectaccessreviews" not found Error from server (NotFound): error when deleting "https://github.com/cert-manager/cert-manager/releases/download/v1.17.2/cert-manager.yaml": roles.rbac.authorization.k8s.io "cert-manager-cainjector:leaderelection" not found Error from server (NotFound): error when deleting "https://github.com/cert-manager/cert-manager/releases/download/v1.17.2/cert-manager.yaml": roles.rbac.authorization.k8s.io "cert-manager:leaderelection" not found Error from server (NotFound): error when deleting "https://github.com/cert-manager/cert-manager/releases/download/v1.17.2/cert-manager.yaml": roles.rbac.authorization.k8s.io "cert-manager-tokenrequest" not found Error from server (NotFound): error when deleting "https://github.com/cert-manager/cert-manager/releases/download/v1.17.2/cert-manager.yaml": roles.rbac.authorization.k8s.io "cert-manager-webhook:dynamic-serving" not found Error from server (NotFound): error when deleting "https://github.com/cert-manager/cert-manager/releases/download/v1.17.2/cert-manager.yaml": rolebindings.rbac.authorization.k8s.io "cert-manager-cainjector:leaderelection" not found Error from server (NotFound): error when deleting "https://github.com/cert-manager/cert-manager/releases/download/v1.17.2/cert-manager.yaml": rolebindings.rbac.authorization.k8s.io "cert-manager:leaderelection" not found Error from server (NotFound): error when deleting "https://github.com/cert-manager/cert-manager/releases/download/v1.17.2/cert-manager.yaml": rolebindings.rbac.authorization.k8s.io "cert-manager-cert-manager-tokenrequest" not found Error from server (NotFound): error when deleting "https://github.com/cert-manager/cert-manager/releases/download/v1.17.2/cert-manager.yaml": rolebindings.rbac.authorization.k8s.io "cert-manager-webhook:dynamic-serving" not found Error from server (NotFound): error when deleting "https://github.com/cert-manager/cert-manager/releases/download/v1.17.2/cert-manager.yaml": services "cert-manager-cainjector" not found Error from server (NotFound): error when deleting "https://github.com/cert-manager/cert-manager/releases/download/v1.17.2/cert-manager.yaml": services "cert-manager" not found Error from server (NotFound): error when deleting "https://github.com/cert-manager/cert-manager/releases/download/v1.17.2/cert-manager.yaml": services "cert-manager-webhook" not found Error from server (NotFound): error when deleting "https://github.com/cert-manager/cert-manager/releases/download/v1.17.2/cert-manager.yaml": deployments.apps "cert-manager-cainjector" not found Error from server (NotFound): error when deleting "https://github.com/cert-manager/cert-manager/releases/download/v1.17.2/cert-manager.yaml": deployments.apps "cert-manager" not found Error from server (NotFound): error when deleting "https://github.com/cert-manager/cert-manager/releases/download/v1.17.2/cert-manager.yaml": deployments.apps "cert-manager-webhook" not found Error from server (NotFound): error when deleting "https://github.com/cert-manager/cert-manager/releases/download/v1.17.2/cert-manager.yaml": mutatingwebhookconfigurations.admissionregistration.k8s.io "cert-manager-webhook" not found Error from server (NotFound): error when deleting "https://github.com/cert-manager/cert-manager/releases/download/v1.17.2/cert-manager.yaml": validatingwebhookconfigurations.admissionregistration.k8s.io "cert-manager-webhook" not found + sleep 8 + cat /tmp/tmp.cj1CgNmr18 + cat /tmp/tmp.jZpQJyg4za Error from server (NotFound): error when deleting "https://github.com/cert-manager/cert-manager/releases/download/v1.17.2/cert-manager.yaml": namespaces "cert-manager" not found Error from server (NotFound): error when deleting "https://github.com/cert-manager/cert-manager/releases/download/v1.17.2/cert-manager.yaml": customresourcedefinitions.apiextensions.k8s.io "certificaterequests.cert-manager.io" not found Error from server (NotFound): error when deleting "https://github.com/cert-manager/cert-manager/releases/download/v1.17.2/cert-manager.yaml": customresourcedefinitions.apiextensions.k8s.io "certificates.cert-manager.io" not found Error from server (NotFound): error when deleting "https://github.com/cert-manager/cert-manager/releases/download/v1.17.2/cert-manager.yaml": customresourcedefinitions.apiextensions.k8s.io "challenges.acme.cert-manager.io" not found Error from server (NotFound): error when deleting "https://github.com/cert-manager/cert-manager/releases/download/v1.17.2/cert-manager.yaml": customresourcedefinitions.apiextensions.k8s.io "clusterissuers.cert-manager.io" not found Error from server (NotFound): error when deleting "https://github.com/cert-manager/cert-manager/releases/download/v1.17.2/cert-manager.yaml": customresourcedefinitions.apiextensions.k8s.io "issuers.cert-manager.io" not found Error from server (NotFound): error when deleting "https://github.com/cert-manager/cert-manager/releases/download/v1.17.2/cert-manager.yaml": customresourcedefinitions.apiextensions.k8s.io "orders.acme.cert-manager.io" not found Error from server (NotFound): error when deleting "https://github.com/cert-manager/cert-manager/releases/download/v1.17.2/cert-manager.yaml": serviceaccounts "cert-manager-cainjector" not found Error from server (NotFound): error when deleting "https://github.com/cert-manager/cert-manager/releases/download/v1.17.2/cert-manager.yaml": serviceaccounts "cert-manager" not found Error from server (NotFound): error when deleting "https://github.com/cert-manager/cert-manager/releases/download/v1.17.2/cert-manager.yaml": serviceaccounts "cert-manager-webhook" not found Error from server (NotFound): error when deleting "https://github.com/cert-manager/cert-manager/releases/download/v1.17.2/cert-manager.yaml": clusterroles.rbac.authorization.k8s.io "cert-manager-cainjector" not found Error from server (NotFound): error when deleting "https://github.com/cert-manager/cert-manager/releases/download/v1.17.2/cert-manager.yaml": clusterroles.rbac.authorization.k8s.io "cert-manager-controller-issuers" not found Error from server (NotFound): error when deleting "https://github.com/cert-manager/cert-manager/releases/download/v1.17.2/cert-manager.yaml": clusterroles.rbac.authorization.k8s.io "cert-manager-controller-clusterissuers" not found Error from server (NotFound): error when deleting "https://github.com/cert-manager/cert-manager/releases/download/v1.17.2/cert-manager.yaml": clusterroles.rbac.authorization.k8s.io "cert-manager-controller-certificates" not found Error from server (NotFound): error when deleting "https://github.com/cert-manager/cert-manager/releases/download/v1.17.2/cert-manager.yaml": clusterroles.rbac.authorization.k8s.io "cert-manager-controller-orders" not found Error from server (NotFound): error when deleting "https://github.com/cert-manager/cert-manager/releases/download/v1.17.2/cert-manager.yaml": clusterroles.rbac.authorization.k8s.io "cert-manager-controller-challenges" not found Error from server (NotFound): error when deleting "https://github.com/cert-manager/cert-manager/releases/download/v1.17.2/cert-manager.yaml": clusterroles.rbac.authorization.k8s.io "cert-manager-controller-ingress-shim" not found Error from server (NotFound): error when deleting "https://github.com/cert-manager/cert-manager/releases/download/v1.17.2/cert-manager.yaml": clusterroles.rbac.authorization.k8s.io "cert-manager-cluster-view" not found Error from server (NotFound): error when deleting "https://github.com/cert-manager/cert-manager/releases/download/v1.17.2/cert-manager.yaml": clusterroles.rbac.authorization.k8s.io "cert-manager-view" not found Error from server (NotFound): error when deleting "https://github.com/cert-manager/cert-manager/releases/download/v1.17.2/cert-manager.yaml": clusterroles.rbac.authorization.k8s.io "cert-manager-edit" not found Error from server (NotFound): error when deleting "https://github.com/cert-manager/cert-manager/releases/download/v1.17.2/cert-manager.yaml": clusterroles.rbac.authorization.k8s.io "cert-manager-controller-approve:cert-manager-io" not found Error from server (NotFound): error when deleting "https://github.com/cert-manager/cert-manager/releases/download/v1.17.2/cert-manager.yaml": clusterroles.rbac.authorization.k8s.io "cert-manager-controller-certificatesigningrequests" not found Error from server (NotFound): error when deleting "https://github.com/cert-manager/cert-manager/releases/download/v1.17.2/cert-manager.yaml": clusterroles.rbac.authorization.k8s.io "cert-manager-webhook:subjectaccessreviews" not found Error from server (NotFound): error when deleting "https://github.com/cert-manager/cert-manager/releases/download/v1.17.2/cert-manager.yaml": clusterrolebindings.rbac.authorization.k8s.io "cert-manager-cainjector" not found Error from server (NotFound): error when deleting "https://github.com/cert-manager/cert-manager/releases/download/v1.17.2/cert-manager.yaml": clusterrolebindings.rbac.authorization.k8s.io "cert-manager-controller-issuers" not found Error from server (NotFound): error when deleting "https://github.com/cert-manager/cert-manager/releases/download/v1.17.2/cert-manager.yaml": clusterrolebindings.rbac.authorization.k8s.io "cert-manager-controller-clusterissuers" not found Error from server (NotFound): error when deleting "https://github.com/cert-manager/cert-manager/releases/download/v1.17.2/cert-manager.yaml": clusterrolebindings.rbac.authorization.k8s.io "cert-manager-controller-certificates" not found Error from server (NotFound): error when deleting "https://github.com/cert-manager/cert-manager/releases/download/v1.17.2/cert-manager.yaml": clusterrolebindings.rbac.authorization.k8s.io "cert-manager-controller-orders" not found Error from server (NotFound): error when deleting "https://github.com/cert-manager/cert-manager/releases/download/v1.17.2/cert-manager.yaml": clusterrolebindings.rbac.authorization.k8s.io "cert-manager-controller-challenges" not found Error from server (NotFound): error when deleting "https://github.com/cert-manager/cert-manager/releases/download/v1.17.2/cert-manager.yaml": clusterrolebindings.rbac.authorization.k8s.io "cert-manager-controller-ingress-shim" not found Error from server (NotFound): error when deleting "https://github.com/cert-manager/cert-manager/releases/download/v1.17.2/cert-manager.yaml": clusterrolebindings.rbac.authorization.k8s.io "cert-manager-controller-approve:cert-manager-io" not found Error from server (NotFound): error when deleting "https://github.com/cert-manager/cert-manager/releases/download/v1.17.2/cert-manager.yaml": clusterrolebindings.rbac.authorization.k8s.io "cert-manager-controller-certificatesigningrequests" not found Error from server (NotFound): error when deleting "https://github.com/cert-manager/cert-manager/releases/download/v1.17.2/cert-manager.yaml": clusterrolebindings.rbac.authorization.k8s.io "cert-manager-webhook:subjectaccessreviews" not found Error from server (NotFound): error when deleting "https://github.com/cert-manager/cert-manager/releases/download/v1.17.2/cert-manager.yaml": roles.rbac.authorization.k8s.io "cert-manager-cainjector:leaderelection" not found Error from server (NotFound): error when deleting "https://github.com/cert-manager/cert-manager/releases/download/v1.17.2/cert-manager.yaml": roles.rbac.authorization.k8s.io "cert-manager:leaderelection" not found Error from server (NotFound): error when deleting "https://github.com/cert-manager/cert-manager/releases/download/v1.17.2/cert-manager.yaml": roles.rbac.authorization.k8s.io "cert-manager-tokenrequest" not found Error from server (NotFound): error when deleting "https://github.com/cert-manager/cert-manager/releases/download/v1.17.2/cert-manager.yaml": roles.rbac.authorization.k8s.io "cert-manager-webhook:dynamic-serving" not found Error from server (NotFound): error when deleting "https://github.com/cert-manager/cert-manager/releases/download/v1.17.2/cert-manager.yaml": rolebindings.rbac.authorization.k8s.io "cert-manager-cainjector:leaderelection" not found Error from server (NotFound): error when deleting "https://github.com/cert-manager/cert-manager/releases/download/v1.17.2/cert-manager.yaml": rolebindings.rbac.authorization.k8s.io "cert-manager:leaderelection" not found Error from server (NotFound): error when deleting "https://github.com/cert-manager/cert-manager/releases/download/v1.17.2/cert-manager.yaml": rolebindings.rbac.authorization.k8s.io "cert-manager-cert-manager-tokenrequest" not found Error from server (NotFound): error when deleting "https://github.com/cert-manager/cert-manager/releases/download/v1.17.2/cert-manager.yaml": rolebindings.rbac.authorization.k8s.io "cert-manager-webhook:dynamic-serving" not found Error from server (NotFound): error when deleting "https://github.com/cert-manager/cert-manager/releases/download/v1.17.2/cert-manager.yaml": services "cert-manager-cainjector" not found Error from server (NotFound): error when deleting "https://github.com/cert-manager/cert-manager/releases/download/v1.17.2/cert-manager.yaml": services "cert-manager" not found Error from server (NotFound): error when deleting "https://github.com/cert-manager/cert-manager/releases/download/v1.17.2/cert-manager.yaml": services "cert-manager-webhook" not found Error from server (NotFound): error when deleting "https://github.com/cert-manager/cert-manager/releases/download/v1.17.2/cert-manager.yaml": deployments.apps "cert-manager-cainjector" not found Error from server (NotFound): error when deleting "https://github.com/cert-manager/cert-manager/releases/download/v1.17.2/cert-manager.yaml": deployments.apps "cert-manager" not found Error from server (NotFound): error when deleting "https://github.com/cert-manager/cert-manager/releases/download/v1.17.2/cert-manager.yaml": deployments.apps "cert-manager-webhook" not found Error from server (NotFound): error when deleting "https://github.com/cert-manager/cert-manager/releases/download/v1.17.2/cert-manager.yaml": mutatingwebhookconfigurations.admissionregistration.k8s.io "cert-manager-webhook" not found Error from server (NotFound): error when deleting "https://github.com/cert-manager/cert-manager/releases/download/v1.17.2/cert-manager.yaml": validatingwebhookconfigurations.admissionregistration.k8s.io "cert-manager-webhook" not found + rm /tmp/tmp.cj1CgNmr18 /tmp/tmp.jZpQJyg4za + return 1 + true + '[' -n '' ']' + '[' -n psmdb-operator ']' + kubectl_bin delete --grace-period=0 --force=true namespace pvc-resize-18358 + rm -rf /tmp/tmp.Q9ZZgBE25d + kubectl_bin delete --grace-period=0 --force=true namespace psmdb-operator ++ mktemp ++ mktemp + desc 'test passed' + set +o xtrace ----------------------------------------------------------------------------------- test passed ----------------------------------------------------------------------------------- + local LAST_OUT=/tmp/tmp.CEAdctoIhJ + local LAST_OUT=/tmp/tmp.crkftsYBV2 ++ mktemp ++ mktemp + local LAST_ERR=/tmp/tmp.FPLuckmb36 + local LAST_ERR=/tmp/tmp.DcyEEaTcdd + local exit_status=0 + local timeout=4 + local exit_status=0 + local timeout=4 ++ seq 0 2 ++ seq 0 2 + for i in $(seq 0 2) + set +e + kubectl delete --grace-period=0 --force=true namespace pvc-resize-18358 + for i in $(seq 0 2) + set +e + kubectl delete --grace-period=0 --force=true namespace psmdb-operator