Log: /mnt/jenkins/workspace/cloud-psmdb-operator_PR-2114/e2e-tests/logs/demand-backup-if-unhealthy.log grep: warning: stray \ before - Warning: version difference between client (1.34) and server (1.31) exceeds the supported minor version skew of +/-1 Warning: version difference between client (1.34) and server (1.31) exceeds the supported minor version skew of +/-1 Warning: version difference between client (1.34) and server (1.31) exceeds the supported minor version skew of +/-1 + create_infra demand-backup-if-unhealthy-227 + local ns=demand-backup-if-unhealthy-227 + [[ 1 == 1 ]] + delete_crd + desc 'get and delete old CRDs and RBAC' + set +o xtrace ----------------------------------------------------------------------------------- get and delete old CRDs and RBAC ----------------------------------------------------------------------------------- + kubectl_bin delete -f /mnt/jenkins/workspace/cloud-psmdb-operator_PR-2114/deploy/crd.yaml --ignore-not-found --wait=false ++ mktemp + local LAST_OUT=/tmp/tmp.sHKji6HlDH ++ mktemp + local LAST_ERR=/tmp/tmp.tvUMWOGbqr + local exit_status=0 + local timeout=4 ++ seq 0 2 + for i in $(seq 0 2) + set +e + kubectl delete -f /mnt/jenkins/workspace/cloud-psmdb-operator_PR-2114/deploy/crd.yaml --ignore-not-found --wait=false + exit_status=0 + set -e + '[' 0 '!=' 0 -a -n 1 ']' + break + cat /tmp/tmp.sHKji6HlDH customresourcedefinition.apiextensions.k8s.io "perconaservermongodbbackups.psmdb.percona.com" deleted customresourcedefinition.apiextensions.k8s.io "perconaservermongodbrestores.psmdb.percona.com" deleted customresourcedefinition.apiextensions.k8s.io "perconaservermongodbs.psmdb.percona.com" deleted + cat /tmp/tmp.tvUMWOGbqr + rm /tmp/tmp.sHKji6HlDH /tmp/tmp.tvUMWOGbqr + return 0 ++ yq eval .metadata.name /mnt/jenkins/workspace/cloud-psmdb-operator_PR-2114/deploy/crd.yaml ++ grep -v '\-\-\-' grep: warning: stray \ before - grep: warning: stray \ before - + for crd_name in $(yq eval '.metadata.name' "${src_dir}/deploy/crd.yaml" | grep -v '\-\-\-') + kubectl get perconaservermongodbbackups.psmdb.percona.com --all-namespaces -o wide + grep -v NAMESPACE + xargs -L 1 sh -xc 'kubectl patch perconaservermongodbbackups.psmdb.percona.com -n $0 $1 --type=merge -p "{\"metadata\":{\"finalizers\":[]}}"' No resources found + kubectl patch perconaservermongodbbackups.psmdb.percona.com -n sh --type=merge -p '{"metadata":{"finalizers":[]}}' error: resource(s) were provided, but no name was specified + : + kubectl_bin wait --for=delete crd perconaservermongodbbackups.psmdb.percona.com ++ mktemp + local LAST_OUT=/tmp/tmp.C7u2MxGZHq ++ mktemp + local LAST_ERR=/tmp/tmp.NJue6laidB + local exit_status=0 + local timeout=4 ++ seq 0 2 + for i in $(seq 0 2) + set +e + kubectl wait --for=delete crd perconaservermongodbbackups.psmdb.percona.com + exit_status=0 + set -e + '[' 0 '!=' 0 -a -n 1 ']' + break + cat /tmp/tmp.C7u2MxGZHq customresourcedefinition.apiextensions.k8s.io/perconaservermongodbbackups.psmdb.percona.com condition met + cat /tmp/tmp.NJue6laidB + rm /tmp/tmp.C7u2MxGZHq /tmp/tmp.NJue6laidB + return 0 + for crd_name in $(yq eval '.metadata.name' "${src_dir}/deploy/crd.yaml" | grep -v '\-\-\-') + kubectl get perconaservermongodbrestores.psmdb.percona.com --all-namespaces -o wide + grep -v NAMESPACE + xargs -L 1 sh -xc 'kubectl patch perconaservermongodbrestores.psmdb.percona.com -n $0 $1 --type=merge -p "{\"metadata\":{\"finalizers\":[]}}"' error: the server doesn't have a resource type "perconaservermongodbrestores" + kubectl patch perconaservermongodbrestores.psmdb.percona.com -n sh --type=merge -p '{"metadata":{"finalizers":[]}}' error: the server doesn't have a resource type "perconaservermongodbrestores" + : + kubectl_bin wait --for=delete crd perconaservermongodbrestores.psmdb.percona.com ++ mktemp + local LAST_OUT=/tmp/tmp.zX0ErIPASW ++ mktemp + local LAST_ERR=/tmp/tmp.DIfxNMyLj6 + local exit_status=0 + local timeout=4 ++ seq 0 2 + for i in $(seq 0 2) + set +e + kubectl wait --for=delete crd perconaservermongodbrestores.psmdb.percona.com + exit_status=0 + set -e + '[' 0 '!=' 0 -a -n 1 ']' + break + cat /tmp/tmp.zX0ErIPASW + cat /tmp/tmp.DIfxNMyLj6 + rm /tmp/tmp.zX0ErIPASW /tmp/tmp.DIfxNMyLj6 + return 0 + for crd_name in $(yq eval '.metadata.name' "${src_dir}/deploy/crd.yaml" | grep -v '\-\-\-') + kubectl get perconaservermongodbs.psmdb.percona.com --all-namespaces -o wide + grep -v NAMESPACE + xargs -L 1 sh -xc 'kubectl patch perconaservermongodbs.psmdb.percona.com -n $0 $1 --type=merge -p "{\"metadata\":{\"finalizers\":[]}}"' No resources found + kubectl patch perconaservermongodbs.psmdb.percona.com -n sh --type=merge -p '{"metadata":{"finalizers":[]}}' error: resource(s) were provided, but no name was specified + : + kubectl_bin wait --for=delete crd perconaservermongodbs.psmdb.percona.com ++ mktemp + local LAST_OUT=/tmp/tmp.0PHHp2ssBc ++ mktemp + local LAST_ERR=/tmp/tmp.8cnLHqqIj8 + local exit_status=0 + local timeout=4 ++ seq 0 2 + for i in $(seq 0 2) + set +e + kubectl wait --for=delete crd perconaservermongodbs.psmdb.percona.com + exit_status=0 + set -e + '[' 0 '!=' 0 -a -n 1 ']' + break + cat /tmp/tmp.0PHHp2ssBc customresourcedefinition.apiextensions.k8s.io/perconaservermongodbs.psmdb.percona.com condition met + cat /tmp/tmp.8cnLHqqIj8 + rm /tmp/tmp.0PHHp2ssBc /tmp/tmp.8cnLHqqIj8 + return 0 + local rbac_yaml=rbac.yaml + '[' -n psmdb-operator ']' + rbac_yaml=cw-rbac.yaml + kubectl_bin delete -f /mnt/jenkins/workspace/cloud-psmdb-operator_PR-2114/deploy/cw-rbac.yaml --ignore-not-found ++ mktemp + local LAST_OUT=/tmp/tmp.GWKdkDE0sA ++ mktemp + local LAST_ERR=/tmp/tmp.kdWECXejbO + local exit_status=0 + local timeout=4 ++ seq 0 2 + for i in $(seq 0 2) + set +e + kubectl delete -f /mnt/jenkins/workspace/cloud-psmdb-operator_PR-2114/deploy/cw-rbac.yaml --ignore-not-found + exit_status=0 + set -e + '[' 0 '!=' 0 -a -n 1 ']' + break + cat /tmp/tmp.GWKdkDE0sA clusterrole.rbac.authorization.k8s.io "percona-server-mongodb-operator" deleted clusterrolebinding.rbac.authorization.k8s.io "service-account-percona-server-mongodb-operator" deleted + cat /tmp/tmp.kdWECXejbO + rm /tmp/tmp.GWKdkDE0sA /tmp/tmp.kdWECXejbO + return 0 + check_crd_for_deletion PR-2114-3092c1dd + local git_tag=PR-2114-3092c1dd ++ curl -s https://raw.githubusercontent.com/percona/percona-server-mongodb-operator/PR-2114-3092c1dd/deploy/crd.yaml ++ yq eval .metadata.name ++ /usr/sbin/sed s/---//g ++ /usr/sbin/sed ':a;N;$!ba;s/\n/ /g' + for crd_name in $(curl -s https://raw.githubusercontent.com/percona/percona-server-mongodb-operator/${git_tag}/deploy/crd.yaml | yq eval '.metadata.name' | $sed 's/---//g' | $sed ':a;N;$!ba;s/\n/ /g') ++ kubectl_bin get crd/null -o 'jsonpath={.status.conditions[-1].type}' +++ mktemp ++ local LAST_OUT=/tmp/tmp.j4necf3Ike +++ mktemp ++ local LAST_ERR=/tmp/tmp.WTL3TSHOfq ++ local exit_status=0 ++ local timeout=4 +++ seq 0 2 ++ for i in $(seq 0 2) ++ set +e ++ kubectl get crd/null -o 'jsonpath={.status.conditions[-1].type}' ++ exit_status=1 ++ set -e ++ '[' 1 '!=' 0 -a -n 1 ']' ++ cat /tmp/tmp.j4necf3Ike ++ cat /tmp/tmp.WTL3TSHOfq Error from server (NotFound): customresourcedefinitions.apiextensions.k8s.io "null" not found ++ sleep 0 ++ for i in $(seq 0 2) ++ set +e ++ kubectl get crd/null -o 'jsonpath={.status.conditions[-1].type}' ++ exit_status=1 ++ set -e ++ '[' 1 '!=' 0 -a -n 1 ']' ++ cat /tmp/tmp.j4necf3Ike ++ cat /tmp/tmp.WTL3TSHOfq Error from server (NotFound): customresourcedefinitions.apiextensions.k8s.io "null" not found ++ sleep 4 ++ for i in $(seq 0 2) ++ set +e ++ kubectl get crd/null -o 'jsonpath={.status.conditions[-1].type}' ++ exit_status=1 ++ set -e ++ '[' 1 '!=' 0 -a -n 1 ']' ++ cat /tmp/tmp.j4necf3Ike ++ cat /tmp/tmp.WTL3TSHOfq Error from server (NotFound): customresourcedefinitions.apiextensions.k8s.io "null" not found ++ sleep 8 ++ cat /tmp/tmp.j4necf3Ike ++ cat /tmp/tmp.WTL3TSHOfq Error from server (NotFound): customresourcedefinitions.apiextensions.k8s.io "null" not found ++ rm /tmp/tmp.j4necf3Ike /tmp/tmp.WTL3TSHOfq ++ return 1 + [[ '' == \T\e\r\m\i\n\a\t\i\n\g ]] + '[' -n psmdb-operator ']' + create_namespace psmdb-operator + local namespace=psmdb-operator + local skip_clean_namespace= + [[ 1 == 1 ]] + [[ -z '' ]] + destroy_chaos_mesh ++ helm list --all-namespaces --filter chaos-mesh ++ tail -n1 ++ awk '-F ' '{print $2}' ++ sed s/NAMESPACE// + local chaos_mesh_ns= + desc 'destroy chaos-mesh' + set +o xtrace ----------------------------------------------------------------------------------- destroy chaos-mesh ----------------------------------------------------------------------------------- + '[' -n '' ']' ++ kubectl get MutatingWebhookConfiguration ++ grep chaos-mesh ++ awk '{print $1}' + timeout 30 kubectl delete MutatingWebhookConfiguration error: resource(s) were provided, but no name was specified + : ++ kubectl get ValidatingWebhookConfiguration ++ grep chaos-mesh ++ awk '{print $1}' + timeout 30 kubectl delete ValidatingWebhookConfiguration error: resource(s) were provided, but no name was specified + : ++ kubectl get ValidatingWebhookConfiguration ++ grep validate-auth ++ awk '{print $1}' + timeout 30 kubectl delete ValidatingWebhookConfiguration error: resource(s) were provided, but no name was specified + : ++ kubectl api-resources ++ grep chaos-mesh ++ awk '{print $1}' ++ kubectl get crd ++ grep chaos-mesh.org ++ awk '{print $1}' + timeout 30 kubectl delete crd error: resource(s) were provided, but no name was specified + : ++ kubectl get clusterrolebinding ++ grep chaos-mesh ++ awk '{print $1}' + timeout 30 kubectl delete clusterrolebinding error: resource(s) were provided, but no name was specified + : ++ kubectl get clusterrole ++ grep chaos-mesh ++ awk '{print $1}' + timeout 30 kubectl delete clusterrole error: resource(s) were provided, but no name was specified + : + desc 'cleaned up all old namespaces' + set +o xtrace ----------------------------------------------------------------------------------- cleaned up all old namespaces ----------------------------------------------------------------------------------- + kubectl_bin get ns + egrep -v '^kube-|^default|Terminating|psmdb-operator|openshift|^gke-|^gmp-|^NAME' + '[' -n '' ']' + awk '{print$1}' + desc 'cleaned up old namespaces psmdb-operator' + set +o xtrace ++ mktemp + xargs kubectl delete ns ----------------------------------------------------------------------------------- cleaned up old namespaces psmdb-operator ----------------------------------------------------------------------------------- + kubectl_bin delete namespace psmdb-operator --ignore-not-found ++ mktemp + local LAST_OUT=/tmp/tmp.ywTGzOWylT egrep: warning: egrep is obsolescent; using grep -E ++ mktemp + local LAST_OUT=/tmp/tmp.wmeTLr8ZcA ++ mktemp + local LAST_ERR=/tmp/tmp.MfSKAhIoSL + local exit_status=0 + local timeout=4 + local LAST_ERR=/tmp/tmp.CGV16ka8ap + local exit_status=0 + local timeout=4 ++ seq 0 2 ++ seq 0 2 + for i in $(seq 0 2) + set +e + kubectl get ns + for i in $(seq 0 2) + set +e + kubectl delete namespace psmdb-operator --ignore-not-found + exit_status=0 + set -e + '[' 0 '!=' 0 -a -n 1 ']' + break + cat /tmp/tmp.ywTGzOWylT + cat /tmp/tmp.MfSKAhIoSL + rm /tmp/tmp.ywTGzOWylT /tmp/tmp.MfSKAhIoSL + return 0 namespace "demand-backup-if-unhealthy-9170" deleted + exit_status=0 + set -e + '[' 0 '!=' 0 -a -n 1 ']' + break + cat /tmp/tmp.wmeTLr8ZcA namespace "psmdb-operator" deleted + cat /tmp/tmp.CGV16ka8ap + rm /tmp/tmp.wmeTLr8ZcA /tmp/tmp.CGV16ka8ap + return 0 + kubectl_bin wait --for=delete namespace psmdb-operator ++ mktemp + local LAST_OUT=/tmp/tmp.yG2p8d1vt7 ++ mktemp + local LAST_ERR=/tmp/tmp.cyrHgwJLn4 + local exit_status=0 + local timeout=4 ++ seq 0 2 + for i in $(seq 0 2) + set +e + kubectl wait --for=delete namespace psmdb-operator + exit_status=0 + set -e + '[' 0 '!=' 0 -a -n 1 ']' + break + cat /tmp/tmp.yG2p8d1vt7 + cat /tmp/tmp.cyrHgwJLn4 + rm /tmp/tmp.yG2p8d1vt7 /tmp/tmp.cyrHgwJLn4 + return 0 + desc 'create namespace psmdb-operator' + set +o xtrace ----------------------------------------------------------------------------------- create namespace psmdb-operator ----------------------------------------------------------------------------------- + kubectl_bin create namespace psmdb-operator ++ mktemp + local LAST_OUT=/tmp/tmp.fzu0eHDFCA ++ mktemp + local LAST_ERR=/tmp/tmp.79FLJxOyHX + local exit_status=0 + local timeout=4 ++ seq 0 2 + for i in $(seq 0 2) + set +e + kubectl create namespace psmdb-operator + exit_status=0 + set -e + '[' 0 '!=' 0 -a -n 1 ']' + break + cat /tmp/tmp.fzu0eHDFCA namespace/psmdb-operator created + cat /tmp/tmp.79FLJxOyHX + rm /tmp/tmp.fzu0eHDFCA /tmp/tmp.79FLJxOyHX + return 0 + set_kube_ctx psmdb-operator + local namespace=psmdb-operator ++ kubectl_bin config current-context +++ mktemp ++ local LAST_OUT=/tmp/tmp.bDuiW0oc3U +++ mktemp ++ local LAST_ERR=/tmp/tmp.acgOXcqYxA ++ local exit_status=0 ++ local timeout=4 +++ seq 0 2 ++ for i in $(seq 0 2) ++ set +e ++ kubectl config current-context ++ exit_status=0 ++ set -e ++ '[' 0 '!=' 0 -a -n 1 ']' ++ break ++ cat /tmp/tmp.bDuiW0oc3U ++ cat /tmp/tmp.acgOXcqYxA ++ rm /tmp/tmp.bDuiW0oc3U /tmp/tmp.acgOXcqYxA ++ return 0 + kubectl_bin config set-context gke_cloud-dev-112233_us-central1-a_jen-psmdb-2114-3092c1dd-20-cluster6 --namespace=psmdb-operator ++ mktemp + local LAST_OUT=/tmp/tmp.gSbEaEc5el ++ mktemp + local LAST_ERR=/tmp/tmp.FONQm7nw5I + local exit_status=0 + local timeout=4 ++ seq 0 2 + for i in $(seq 0 2) + set +e + kubectl config set-context gke_cloud-dev-112233_us-central1-a_jen-psmdb-2114-3092c1dd-20-cluster6 --namespace=psmdb-operator + exit_status=0 + set -e + '[' 0 '!=' 0 -a -n 1 ']' + break + cat /tmp/tmp.gSbEaEc5el Context "gke_cloud-dev-112233_us-central1-a_jen-psmdb-2114-3092c1dd-20-cluster6" modified. + cat /tmp/tmp.FONQm7nw5I + rm /tmp/tmp.gSbEaEc5el /tmp/tmp.FONQm7nw5I + return 0 + deploy_operator + desc 'start PSMDB operator: perconalab/percona-server-mongodb-operator:PR-2114-3092c1dd' + set +o xtrace ----------------------------------------------------------------------------------- start PSMDB operator: perconalab/percona-server-mongodb-operator:PR-2114-3092c1dd ----------------------------------------------------------------------------------- + local cr_file + '[' -f /mnt/jenkins/workspace/cloud-psmdb-operator_PR-2114/e2e-tests/demand-backup-if-unhealthy/conf/crd.yaml ']' + cr_file=/mnt/jenkins/workspace/cloud-psmdb-operator_PR-2114/deploy/crd.yaml + kubectl_bin apply --server-side --force-conflicts -f /mnt/jenkins/workspace/cloud-psmdb-operator_PR-2114/deploy/crd.yaml ++ mktemp + local LAST_OUT=/tmp/tmp.WjLkGCAmBL ++ mktemp + local LAST_ERR=/tmp/tmp.y4z8ekMSC9 + local exit_status=0 + local timeout=4 ++ seq 0 2 + for i in $(seq 0 2) + set +e + kubectl apply --server-side --force-conflicts -f /mnt/jenkins/workspace/cloud-psmdb-operator_PR-2114/deploy/crd.yaml + exit_status=0 + set -e + '[' 0 '!=' 0 -a -n 1 ']' + break + cat /tmp/tmp.WjLkGCAmBL customresourcedefinition.apiextensions.k8s.io/perconaservermongodbbackups.psmdb.percona.com serverside-applied customresourcedefinition.apiextensions.k8s.io/perconaservermongodbrestores.psmdb.percona.com serverside-applied customresourcedefinition.apiextensions.k8s.io/perconaservermongodbs.psmdb.percona.com serverside-applied + cat /tmp/tmp.y4z8ekMSC9 + rm /tmp/tmp.WjLkGCAmBL /tmp/tmp.y4z8ekMSC9 + return 0 + '[' -n psmdb-operator ']' + apply_rbac cw-rbac + local operator_namespace=psmdb-operator + local rbac=cw-rbac + cat /mnt/jenkins/workspace/cloud-psmdb-operator_PR-2114/deploy/cw-rbac.yaml + sed -e 's^namespace: .*^namespace: psmdb-operator^' + kubectl_bin apply -n psmdb-operator -f - ++ mktemp + local LAST_OUT=/tmp/tmp.NmuG7XQIV6 ++ mktemp + local LAST_ERR=/tmp/tmp.kL3shPwEFa + local exit_status=0 + local timeout=4 ++ seq 0 2 + for i in $(seq 0 2) + set +e + kubectl apply -n psmdb-operator -f - + exit_status=0 + set -e + '[' 0 '!=' 0 -a -n 1 ']' + break + cat /tmp/tmp.NmuG7XQIV6 clusterrole.rbac.authorization.k8s.io/percona-server-mongodb-operator created serviceaccount/percona-server-mongodb-operator created clusterrolebinding.rbac.authorization.k8s.io/service-account-percona-server-mongodb-operator created + cat /tmp/tmp.kL3shPwEFa + rm /tmp/tmp.NmuG7XQIV6 /tmp/tmp.kL3shPwEFa + return 0 + yq eval ' (.spec.template.spec.containers[].image = "perconalab/percona-server-mongodb-operator:PR-2114-3092c1dd") | ((.. | select(.[] == "DISABLE_TELEMETRY")) |= .value="true") | ((.. | select(.[] == "LOG_LEVEL")) |= .value="DEBUG")' /mnt/jenkins/workspace/cloud-psmdb-operator_PR-2114/deploy/cw-operator.yaml + kubectl_bin apply -n psmdb-operator -f - ++ mktemp + local LAST_OUT=/tmp/tmp.rYloGAKQWF ++ mktemp + local LAST_ERR=/tmp/tmp.PtJbZkYK6E + local exit_status=0 + local timeout=4 ++ seq 0 2 + for i in $(seq 0 2) + set +e + kubectl apply -n psmdb-operator -f - + exit_status=0 + set -e + '[' 0 '!=' 0 -a -n 1 ']' + break + cat /tmp/tmp.rYloGAKQWF deployment.apps/percona-server-mongodb-operator created + cat /tmp/tmp.PtJbZkYK6E + rm /tmp/tmp.rYloGAKQWF /tmp/tmp.PtJbZkYK6E + return 0 + sleep 20 ++ get_operator_pod ++ kubectl_bin get pods --selector=name=percona-server-mongodb-operator -o 'jsonpath={.items[].metadata.name}' -n psmdb-operator +++ mktemp ++ local LAST_OUT=/tmp/tmp.MDieimhVob +++ mktemp ++ local LAST_ERR=/tmp/tmp.Vs3UHUk0gh ++ local exit_status=0 ++ local timeout=4 +++ seq 0 2 ++ for i in $(seq 0 2) ++ set +e ++ kubectl get pods --selector=name=percona-server-mongodb-operator -o 'jsonpath={.items[].metadata.name}' -n psmdb-operator ++ exit_status=0 ++ set -e ++ '[' 0 '!=' 0 -a -n 1 ']' ++ break ++ cat /tmp/tmp.MDieimhVob ++ cat /tmp/tmp.Vs3UHUk0gh ++ rm /tmp/tmp.MDieimhVob /tmp/tmp.Vs3UHUk0gh ++ return 0 + wait_operator_pod percona-server-mongodb-operator-54db78fc9c-cct42 + local pod=percona-server-mongodb-operator-54db78fc9c-cct42 + set +o xtrace waiting for pod/percona-server-mongodb-operator-54db78fc9c-cct42 to be ready.OK + echo 'Print operator info from log' Print operator info from log + grep 'Manager starting up' ++ get_operator_pod ++ kubectl_bin get pods --selector=name=percona-server-mongodb-operator -o 'jsonpath={.items[].metadata.name}' -n psmdb-operator +++ mktemp ++ local LAST_OUT=/tmp/tmp.ZA1OXL2NXM +++ mktemp ++ local LAST_ERR=/tmp/tmp.sWCZk1nUdG ++ local exit_status=0 ++ local timeout=4 +++ seq 0 2 ++ for i in $(seq 0 2) ++ set +e ++ kubectl get pods --selector=name=percona-server-mongodb-operator -o 'jsonpath={.items[].metadata.name}' -n psmdb-operator ++ exit_status=0 ++ set -e ++ '[' 0 '!=' 0 -a -n 1 ']' ++ break ++ cat /tmp/tmp.ZA1OXL2NXM ++ cat /tmp/tmp.sWCZk1nUdG ++ rm /tmp/tmp.ZA1OXL2NXM /tmp/tmp.sWCZk1nUdG ++ return 0 + kubectl_bin logs -n psmdb-operator percona-server-mongodb-operator-54db78fc9c-cct42 ++ mktemp + local LAST_OUT=/tmp/tmp.e4CyXgoVD8 ++ mktemp + local LAST_ERR=/tmp/tmp.F3hdnAQSIG + local exit_status=0 + local timeout=4 ++ seq 0 2 + for i in $(seq 0 2) + set +e + kubectl logs -n psmdb-operator percona-server-mongodb-operator-54db78fc9c-cct42 + exit_status=0 + set -e + '[' 0 '!=' 0 -a -n 1 ']' + break + cat /tmp/tmp.e4CyXgoVD8 + cat /tmp/tmp.F3hdnAQSIG + rm /tmp/tmp.e4CyXgoVD8 /tmp/tmp.F3hdnAQSIG + return 0 2025-12-09T22:06:22.128Z INFO setup Manager starting up {"gitCommit": "3092c1ddb5afb6fa964fa527b9d03ff5bc63013a", "gitBranch": "PR-2114-3092c1dd", "buildTime": "", "goVersion": "go1.25.5", "os": "linux", "arch": "amd64"} + create_namespace demand-backup-if-unhealthy-227 + local namespace=demand-backup-if-unhealthy-227 + local skip_clean_namespace= + [[ 1 == 1 ]] + [[ -z '' ]] + destroy_chaos_mesh ++ helm list --all-namespaces --filter chaos-mesh ++ tail -n1 ++ awk '-F ' '{print $2}' ++ sed s/NAMESPACE// + local chaos_mesh_ns= + desc 'destroy chaos-mesh' + set +o xtrace ----------------------------------------------------------------------------------- destroy chaos-mesh ----------------------------------------------------------------------------------- + '[' -n '' ']' ++ kubectl get MutatingWebhookConfiguration ++ grep chaos-mesh ++ awk '{print $1}' + timeout 30 kubectl delete MutatingWebhookConfiguration error: resource(s) were provided, but no name was specified + : ++ kubectl get ValidatingWebhookConfiguration ++ grep chaos-mesh ++ awk '{print $1}' + timeout 30 kubectl delete ValidatingWebhookConfiguration error: resource(s) were provided, but no name was specified + : ++ kubectl get ValidatingWebhookConfiguration ++ grep validate-auth ++ awk '{print $1}' + timeout 30 kubectl delete ValidatingWebhookConfiguration error: resource(s) were provided, but no name was specified + : ++ kubectl api-resources ++ grep chaos-mesh ++ awk '{print $1}' ++ kubectl get crd ++ grep chaos-mesh.org ++ awk '{print $1}' + timeout 30 kubectl delete crd error: resource(s) were provided, but no name was specified + : ++ kubectl get clusterrolebinding ++ grep chaos-mesh ++ awk '{print $1}' + timeout 30 kubectl delete clusterrolebinding error: resource(s) were provided, but no name was specified + : ++ kubectl get clusterrole ++ grep chaos-mesh ++ awk '{print $1}' + timeout 30 kubectl delete clusterrole error: resource(s) were provided, but no name was specified + : + desc 'cleaned up all old namespaces' + set +o xtrace ----------------------------------------------------------------------------------- cleaned up all old namespaces ----------------------------------------------------------------------------------- + kubectl_bin get ns + egrep -v '^kube-|^default|Terminating|psmdb-operator|openshift|^gke-|^gmp-|^NAME' + awk '{print$1}' + '[' -n '' ']' + xargs kubectl delete ns + desc 'cleaned up old namespaces demand-backup-if-unhealthy-227' + set +o xtrace ----------------------------------------------------------------------------------- ++ mktemp cleaned up old namespaces demand-backup-if-unhealthy-227 ----------------------------------------------------------------------------------- + kubectl_bin delete namespace demand-backup-if-unhealthy-227 --ignore-not-found egrep: warning: egrep is obsolescent; using grep -E ++ mktemp + local LAST_OUT=/tmp/tmp.dQnSpYNldU ++ mktemp + local LAST_OUT=/tmp/tmp.JAiy6qCcXr ++ mktemp + local LAST_ERR=/tmp/tmp.95zjAM6LR9 + local exit_status=0 + local timeout=4 ++ seq 0 2 + local LAST_ERR=/tmp/tmp.Mqhj4FyexN + local exit_status=0 + local timeout=4 + for i in $(seq 0 2) + set +e + kubectl get ns ++ seq 0 2 + for i in $(seq 0 2) + set +e + kubectl delete namespace demand-backup-if-unhealthy-227 --ignore-not-found + exit_status=0 + set -e + '[' 0 '!=' 0 -a -n 1 ']' + break + cat /tmp/tmp.dQnSpYNldU + cat /tmp/tmp.95zjAM6LR9 + rm /tmp/tmp.dQnSpYNldU /tmp/tmp.95zjAM6LR9 + return 0 error: resource(s) were provided, but no name was specified + exit_status=0 + set -e + '[' 0 '!=' 0 -a -n 1 ']' + break + cat /tmp/tmp.JAiy6qCcXr + cat /tmp/tmp.Mqhj4FyexN + rm /tmp/tmp.JAiy6qCcXr /tmp/tmp.Mqhj4FyexN + return 0 + kubectl_bin wait --for=delete namespace demand-backup-if-unhealthy-227 ++ mktemp + local LAST_OUT=/tmp/tmp.32ZZrcopnt ++ mktemp + local LAST_ERR=/tmp/tmp.TfVZi0yQWn + local exit_status=0 + local timeout=4 ++ seq 0 2 + for i in $(seq 0 2) + set +e + kubectl wait --for=delete namespace demand-backup-if-unhealthy-227 + exit_status=0 + set -e + '[' 0 '!=' 0 -a -n 1 ']' + break + cat /tmp/tmp.32ZZrcopnt + cat /tmp/tmp.TfVZi0yQWn + rm /tmp/tmp.32ZZrcopnt /tmp/tmp.TfVZi0yQWn + return 0 + desc 'create namespace demand-backup-if-unhealthy-227' + set +o xtrace ----------------------------------------------------------------------------------- create namespace demand-backup-if-unhealthy-227 ----------------------------------------------------------------------------------- + kubectl_bin create namespace demand-backup-if-unhealthy-227 ++ mktemp + local LAST_OUT=/tmp/tmp.WUhQLOKThK ++ mktemp + local LAST_ERR=/tmp/tmp.M7xyswkXF1 + local exit_status=0 + local timeout=4 ++ seq 0 2 + for i in $(seq 0 2) + set +e + kubectl create namespace demand-backup-if-unhealthy-227 + exit_status=0 + set -e + '[' 0 '!=' 0 -a -n 1 ']' + break + cat /tmp/tmp.WUhQLOKThK namespace/demand-backup-if-unhealthy-227 created + cat /tmp/tmp.M7xyswkXF1 + rm /tmp/tmp.WUhQLOKThK /tmp/tmp.M7xyswkXF1 + return 0 + set_kube_ctx demand-backup-if-unhealthy-227 + local namespace=demand-backup-if-unhealthy-227 ++ kubectl_bin config current-context +++ mktemp ++ local LAST_OUT=/tmp/tmp.vLtOc7iDiY +++ mktemp ++ local LAST_ERR=/tmp/tmp.hBz9nREjUq ++ local exit_status=0 ++ local timeout=4 +++ seq 0 2 ++ for i in $(seq 0 2) ++ set +e ++ kubectl config current-context ++ exit_status=0 ++ set -e ++ '[' 0 '!=' 0 -a -n 1 ']' ++ break ++ cat /tmp/tmp.vLtOc7iDiY ++ cat /tmp/tmp.hBz9nREjUq ++ rm /tmp/tmp.vLtOc7iDiY /tmp/tmp.hBz9nREjUq ++ return 0 + kubectl_bin config set-context gke_cloud-dev-112233_us-central1-a_jen-psmdb-2114-3092c1dd-20-cluster6 --namespace=demand-backup-if-unhealthy-227 ++ mktemp + local LAST_OUT=/tmp/tmp.6tRxsh65Fz ++ mktemp + local LAST_ERR=/tmp/tmp.sYtjhzk3wm + local exit_status=0 + local timeout=4 ++ seq 0 2 + for i in $(seq 0 2) + set +e + kubectl config set-context gke_cloud-dev-112233_us-central1-a_jen-psmdb-2114-3092c1dd-20-cluster6 --namespace=demand-backup-if-unhealthy-227 + exit_status=0 + set -e + '[' 0 '!=' 0 -a -n 1 ']' + break + cat /tmp/tmp.6tRxsh65Fz Context "gke_cloud-dev-112233_us-central1-a_jen-psmdb-2114-3092c1dd-20-cluster6" modified. + cat /tmp/tmp.sYtjhzk3wm + rm /tmp/tmp.6tRxsh65Fz /tmp/tmp.sYtjhzk3wm + return 0 + deploy_minio + desc 'install Minio' + set +o xtrace ----------------------------------------------------------------------------------- install Minio ----------------------------------------------------------------------------------- + helm uninstall minio-service Error: uninstall: Release not loaded: minio-service: release: not found + : + helm repo remove minio "minio" has been removed from your repositories + helm repo add minio https://charts.min.io/ "minio" has been added to your repositories + retry 10 60 helm install minio-service --version 5.4.0 --set replicas=1 --set mode=standalone --set resources.requests.memory=256Mi --set rootUser=rootuser --set rootPassword=rootpass123 --set 'users[0].accessKey=some-access-key' --set 'users[0].secretKey=some-secret-key' --set 'users[0].policy=consoleAdmin' --set service.type=ClusterIP --set configPathmc=/tmp/.minio/ --set persistence.size=2G --set securityContext.enabled=false minio/minio + local max=10 + local delay=60 + shift 2 + local n=1 + helm install minio-service --version 5.4.0 --set replicas=1 --set mode=standalone --set resources.requests.memory=256Mi --set rootUser=rootuser --set rootPassword=rootpass123 --set 'users[0].accessKey=some-access-key' --set 'users[0].secretKey=some-secret-key' --set 'users[0].policy=consoleAdmin' --set service.type=ClusterIP --set configPathmc=/tmp/.minio/ --set persistence.size=2G --set securityContext.enabled=false minio/minio NAME: minio-service LAST DEPLOYED: Tue Dec 9 22:06:53 2025 NAMESPACE: demand-backup-if-unhealthy-227 STATUS: deployed REVISION: 1 TEST SUITE: None NOTES: MinIO can be accessed via port 9000 on the following DNS name from within your cluster: minio-service.demand-backup-if-unhealthy-227.cluster.local To access MinIO from localhost, run the below commands: 1. export POD_NAME=$(kubectl get pods --namespace demand-backup-if-unhealthy-227 -l "release=minio-service" -o jsonpath="{.items[0].metadata.name}") 2. kubectl port-forward $POD_NAME 9000 --namespace demand-backup-if-unhealthy-227 Read more about port forwarding here: http://kubernetes.io/docs/user-guide/kubectl/kubectl_port-forward/ You can now access MinIO server on http://localhost:9000. Follow the below steps to connect to MinIO server with mc client: 1. Download the MinIO mc client - https://min.io/docs/minio/linux/reference/minio-mc.html#quickstart 2. export MC_HOST_minio-service-local=http://$(kubectl get secret --namespace demand-backup-if-unhealthy-227 minio-service -o jsonpath="{.data.rootUser}" | base64 --decode):$(kubectl get secret --namespace demand-backup-if-unhealthy-227 minio-service -o jsonpath="{.data.rootPassword}" | base64 --decode)@localhost:9000 3. mc ls minio-service-local ++ kubectl_bin get pods --selector=release=minio-service -o 'jsonpath={.items[].metadata.name}' +++ mktemp ++ local LAST_OUT=/tmp/tmp.s8QXshZAhQ +++ mktemp ++ local LAST_ERR=/tmp/tmp.ZEPY14GYmx ++ local exit_status=0 ++ local timeout=4 +++ seq 0 2 ++ for i in $(seq 0 2) ++ set +e ++ kubectl get pods --selector=release=minio-service -o 'jsonpath={.items[].metadata.name}' ++ exit_status=0 ++ set -e ++ '[' 0 '!=' 0 -a -n 1 ']' ++ break ++ cat /tmp/tmp.s8QXshZAhQ ++ cat /tmp/tmp.ZEPY14GYmx ++ rm /tmp/tmp.s8QXshZAhQ /tmp/tmp.ZEPY14GYmx ++ return 0 + MINIO_POD=minio-service-d9589b474-pxgnf + wait_pod minio-service-d9589b474-pxgnf + local pod=minio-service-d9589b474-pxgnf + set +o xtrace waiting for pod/minio-service-d9589b474-pxgnf to be ready.OK + '[' -n psmdb-operator ']' + kubectl_bin create svc -n psmdb-operator externalname minio-service --external-name=minio-service.demand-backup-if-unhealthy-227.svc.cluster.local --tcp=9000 ++ mktemp + local LAST_OUT=/tmp/tmp.cV9x6O27NO ++ mktemp + local LAST_ERR=/tmp/tmp.JaqY4ekc9d + local exit_status=0 + local timeout=4 ++ seq 0 2 + for i in $(seq 0 2) + set +e + kubectl create svc -n psmdb-operator externalname minio-service --external-name=minio-service.demand-backup-if-unhealthy-227.svc.cluster.local --tcp=9000 + exit_status=0 + set -e + '[' 0 '!=' 0 -a -n 1 ']' + break + cat /tmp/tmp.cV9x6O27NO service/minio-service created + cat /tmp/tmp.JaqY4ekc9d + rm /tmp/tmp.cV9x6O27NO /tmp/tmp.JaqY4ekc9d + return 0 + create_minio_bucket operator-testing + local bucket=operator-testing + kubectl_bin run -i --rm aws-cli --image=perconalab/awscli --restart=Never -- bash -c 'AWS_ACCESS_KEY_ID=some-access-key AWS_SECRET_ACCESS_KEY=some-secret-key AWS_DEFAULT_REGION=us-east-1 /usr/bin/aws --endpoint-url http://minio-service:9000 s3 mb s3://operator-testing' ++ mktemp + local LAST_OUT=/tmp/tmp.dyuS5RQGmv ++ mktemp + local LAST_ERR=/tmp/tmp.e6Y38BtAZy + local exit_status=0 + local timeout=4 ++ seq 0 2 + for i in $(seq 0 2) + set +e + kubectl run -i --rm aws-cli --image=perconalab/awscli --restart=Never -- bash -c 'AWS_ACCESS_KEY_ID=some-access-key AWS_SECRET_ACCESS_KEY=some-secret-key AWS_DEFAULT_REGION=us-east-1 /usr/bin/aws --endpoint-url http://minio-service:9000 s3 mb s3://operator-testing' + exit_status=0 + set -e + '[' 0 '!=' 0 -a -n 1 ']' + break + cat /tmp/tmp.dyuS5RQGmv make_bucket: operator-testing pod "aws-cli" deleted from demand-backup-if-unhealthy-227 namespace + cat /tmp/tmp.e6Y38BtAZy + rm /tmp/tmp.dyuS5RQGmv /tmp/tmp.e6Y38BtAZy + return 0 + create_secrets + echo 'Creating secrets and start client' Creating secrets and start client + kubectl_bin apply -f /mnt/jenkins/workspace/cloud-psmdb-operator_PR-2114/e2e-tests/demand-backup-if-unhealthy/conf/secrets.yml -f /mnt/jenkins/workspace/cloud-psmdb-operator_PR-2114/e2e-tests/conf/client.yml ++ mktemp + local LAST_OUT=/tmp/tmp.bRJEdDKzn0 ++ mktemp + local LAST_ERR=/tmp/tmp.iEOCGlfWx9 + local exit_status=0 + local timeout=4 ++ seq 0 2 + for i in $(seq 0 2) + set +e + kubectl apply -f /mnt/jenkins/workspace/cloud-psmdb-operator_PR-2114/e2e-tests/demand-backup-if-unhealthy/conf/secrets.yml -f /mnt/jenkins/workspace/cloud-psmdb-operator_PR-2114/e2e-tests/conf/client.yml + exit_status=0 + set -e + '[' 0 '!=' 0 -a -n 1 ']' + break + cat /tmp/tmp.bRJEdDKzn0 secret/some-users created deployment.apps/psmdb-client created + cat /tmp/tmp.iEOCGlfWx9 + rm /tmp/tmp.bRJEdDKzn0 /tmp/tmp.iEOCGlfWx9 + return 0 + apply_s3_storage_secrets + desc 'create secrets for cloud storages' + set +o xtrace ----------------------------------------------------------------------------------- create secrets for cloud storages ----------------------------------------------------------------------------------- + '[' -z '' ']' + kubectl_bin apply -f /mnt/jenkins/workspace/cloud-psmdb-operator_PR-2114/e2e-tests/conf/minio-secret.yml -f /mnt/jenkins/workspace/cloud-psmdb-operator_PR-2114/e2e-tests/conf/cloud-secret.yml ++ mktemp + local LAST_OUT=/tmp/tmp.JXJFASb76C ++ mktemp + local LAST_ERR=/tmp/tmp.37yua8LO6f + local exit_status=0 + local timeout=4 ++ seq 0 2 + for i in $(seq 0 2) + set +e + kubectl apply -f /mnt/jenkins/workspace/cloud-psmdb-operator_PR-2114/e2e-tests/conf/minio-secret.yml -f /mnt/jenkins/workspace/cloud-psmdb-operator_PR-2114/e2e-tests/conf/cloud-secret.yml + exit_status=0 + set -e + '[' 0 '!=' 0 -a -n 1 ']' + break + cat /tmp/tmp.JXJFASb76C secret/minio-secret created secret/aws-s3-secret created secret/gcp-cs-secret created secret/azure-secret created secret/gcp-cs-sa-key-secret created + cat /tmp/tmp.37yua8LO6f + rm /tmp/tmp.JXJFASb76C /tmp/tmp.37yua8LO6f + return 0 + cluster=some-name + desc 'create PSMDB cluster some-name' + set +o xtrace ----------------------------------------------------------------------------------- create PSMDB cluster some-name ----------------------------------------------------------------------------------- + apply_cluster /mnt/jenkins/workspace/cloud-psmdb-operator_PR-2114/e2e-tests/demand-backup-if-unhealthy/conf/some-name.yml + '[' -z '' ']' + cat_config /mnt/jenkins/workspace/cloud-psmdb-operator_PR-2114/e2e-tests/demand-backup-if-unhealthy/conf/some-name.yml + cat /mnt/jenkins/workspace/cloud-psmdb-operator_PR-2114/e2e-tests/demand-backup-if-unhealthy/conf/some-name.yml + kubectl_bin apply -f - + yq eval '(.spec | select(.image == null)).image = "perconalab/percona-server-mongodb-operator:main-mongod8.0"' + yq eval '(.spec | select(has("pmm"))).pmm.image = "percona/pmm-client:2.44.1-1"' + yq eval '(.spec | select(has("initImage"))).initImage = "perconalab/percona-server-mongodb-operator:PR-2114-3092c1dd"' ++ mktemp + yq eval '(.spec | select(has("backup"))).backup.image = "perconalab/percona-server-mongodb-operator:main-backup"' + yq eval '.spec.upgradeOptions.apply="Never"' + local LAST_OUT=/tmp/tmp.HtCpKfZGda ++ mktemp + local LAST_ERR=/tmp/tmp.zxEE8Vtlzp + local exit_status=0 + local timeout=4 ++ seq 0 2 + for i in $(seq 0 2) + set +e + kubectl apply -f - + exit_status=0 + set -e + '[' 0 '!=' 0 -a -n 1 ']' + break + cat /tmp/tmp.HtCpKfZGda perconaservermongodb.psmdb.percona.com/some-name created + cat /tmp/tmp.zxEE8Vtlzp + rm /tmp/tmp.HtCpKfZGda /tmp/tmp.zxEE8Vtlzp + return 0 + wait_for_cluster_state some-name ready + local cluster_name=some-name + local target_state=ready + echo -n 'Waiting for psmdb/some-name to reach ready state' Waiting for psmdb/some-name to reach ready state+ local timeout=0 ++ kubectl_bin get psmdb some-name -o 'jsonpath={.status.state}' +++ mktemp ++ local LAST_OUT=/tmp/tmp.a5SIt97sKb +++ mktemp ++ local LAST_ERR=/tmp/tmp.zhPd8jYtOV ++ local exit_status=0 ++ local timeout=4 +++ seq 0 2 ++ for i in $(seq 0 2) ++ set +e ++ kubectl get psmdb some-name -o 'jsonpath={.status.state}' ++ exit_status=0 ++ set -e ++ '[' 0 '!=' 0 -a -n 1 ']' ++ break ++ cat /tmp/tmp.a5SIt97sKb ++ cat /tmp/tmp.zhPd8jYtOV ++ rm /tmp/tmp.a5SIt97sKb /tmp/tmp.zhPd8jYtOV ++ return 0 + [[ '' =~ ready ]] + sleep 1 + timeout=1 + echo -n . .+ [[ 1 -gt 1500 ]] ++ kubectl_bin get psmdb some-name -o 'jsonpath={.status.state}' +++ mktemp ++ local LAST_OUT=/tmp/tmp.INg72pSFtb +++ mktemp ++ local LAST_ERR=/tmp/tmp.asJgzKteRJ ++ local exit_status=0 ++ local timeout=4 +++ seq 0 2 ++ for i in $(seq 0 2) ++ set +e ++ kubectl get psmdb some-name -o 'jsonpath={.status.state}' ++ exit_status=0 ++ set -e ++ '[' 0 '!=' 0 -a -n 1 ']' ++ break ++ cat /tmp/tmp.INg72pSFtb ++ cat /tmp/tmp.asJgzKteRJ ++ rm /tmp/tmp.INg72pSFtb /tmp/tmp.asJgzKteRJ ++ return 0 + [[ initializing =~ ready ]] + sleep 1 + timeout=2 + echo -n . .+ [[ 2 -gt 1500 ]] ++ kubectl_bin get psmdb some-name -o 'jsonpath={.status.state}' +++ mktemp ++ local LAST_OUT=/tmp/tmp.61iUWDKo0k +++ mktemp ++ local LAST_ERR=/tmp/tmp.40Z4dUkxgb ++ local exit_status=0 ++ local timeout=4 +++ seq 0 2 ++ for i in $(seq 0 2) ++ set +e ++ kubectl get psmdb some-name -o 'jsonpath={.status.state}' ++ exit_status=0 ++ set -e ++ '[' 0 '!=' 0 -a -n 1 ']' ++ break ++ cat /tmp/tmp.61iUWDKo0k ++ cat /tmp/tmp.40Z4dUkxgb ++ rm /tmp/tmp.61iUWDKo0k /tmp/tmp.40Z4dUkxgb ++ return 0 + [[ initializing =~ ready ]] + sleep 1 + timeout=3 + echo -n . .+ [[ 3 -gt 1500 ]] ++ kubectl_bin get psmdb some-name -o 'jsonpath={.status.state}' +++ mktemp ++ local LAST_OUT=/tmp/tmp.03EgF3uEAy +++ mktemp ++ local LAST_ERR=/tmp/tmp.caW6ourKpS ++ local exit_status=0 ++ local timeout=4 +++ seq 0 2 ++ for i in $(seq 0 2) ++ set +e ++ kubectl get psmdb some-name -o 'jsonpath={.status.state}' ++ exit_status=0 ++ set -e ++ '[' 0 '!=' 0 -a -n 1 ']' ++ break ++ cat /tmp/tmp.03EgF3uEAy ++ cat /tmp/tmp.caW6ourKpS ++ rm /tmp/tmp.03EgF3uEAy /tmp/tmp.caW6ourKpS ++ return 0 + [[ initializing =~ ready ]] + sleep 1 + timeout=4 + echo -n . .+ [[ 4 -gt 1500 ]] ++ kubectl_bin get psmdb some-name -o 'jsonpath={.status.state}' +++ mktemp ++ local LAST_OUT=/tmp/tmp.FrXJQvPNN6 +++ mktemp ++ local LAST_ERR=/tmp/tmp.Tt77O0cRxa ++ local exit_status=0 ++ local timeout=4 +++ seq 0 2 ++ for i in $(seq 0 2) ++ set +e ++ kubectl get psmdb some-name -o 'jsonpath={.status.state}' ++ exit_status=0 ++ set -e ++ '[' 0 '!=' 0 -a -n 1 ']' ++ break ++ cat /tmp/tmp.FrXJQvPNN6 ++ cat /tmp/tmp.Tt77O0cRxa ++ rm /tmp/tmp.FrXJQvPNN6 /tmp/tmp.Tt77O0cRxa ++ return 0 + [[ initializing =~ ready ]] + sleep 1 + timeout=5 + echo -n . .+ [[ 5 -gt 1500 ]] ++ kubectl_bin get psmdb some-name -o 'jsonpath={.status.state}' +++ mktemp ++ local LAST_OUT=/tmp/tmp.pmNX0g9yFK +++ mktemp ++ local LAST_ERR=/tmp/tmp.o20FEXqO97 ++ local exit_status=0 ++ local timeout=4 +++ seq 0 2 ++ for i in $(seq 0 2) ++ set +e ++ kubectl get psmdb some-name -o 'jsonpath={.status.state}' ++ exit_status=0 ++ set -e ++ '[' 0 '!=' 0 -a -n 1 ']' ++ break ++ cat /tmp/tmp.pmNX0g9yFK ++ cat /tmp/tmp.o20FEXqO97 ++ rm /tmp/tmp.pmNX0g9yFK /tmp/tmp.o20FEXqO97 ++ return 0 + [[ initializing =~ ready ]] + sleep 1 + timeout=6 + echo -n . .+ [[ 6 -gt 1500 ]] ++ kubectl_bin get psmdb some-name -o 'jsonpath={.status.state}' +++ mktemp ++ local LAST_OUT=/tmp/tmp.fxfgrBHSOl +++ mktemp ++ local LAST_ERR=/tmp/tmp.suVYCZlqIj ++ local exit_status=0 ++ local timeout=4 +++ seq 0 2 ++ for i in $(seq 0 2) ++ set +e ++ kubectl get psmdb some-name -o 'jsonpath={.status.state}' ++ exit_status=0 ++ set -e ++ '[' 0 '!=' 0 -a -n 1 ']' ++ break ++ cat /tmp/tmp.fxfgrBHSOl ++ cat /tmp/tmp.suVYCZlqIj ++ rm /tmp/tmp.fxfgrBHSOl /tmp/tmp.suVYCZlqIj ++ return 0 + [[ initializing =~ ready ]] + sleep 1 + timeout=7 + echo -n . .+ [[ 7 -gt 1500 ]] ++ kubectl_bin get psmdb some-name -o 'jsonpath={.status.state}' +++ mktemp ++ local LAST_OUT=/tmp/tmp.erIJCd8pXa +++ mktemp ++ local LAST_ERR=/tmp/tmp.MR0XzuxyPF ++ local exit_status=0 ++ local timeout=4 +++ seq 0 2 ++ for i in $(seq 0 2) ++ set +e ++ kubectl get psmdb some-name -o 'jsonpath={.status.state}' ++ exit_status=0 ++ set -e ++ '[' 0 '!=' 0 -a -n 1 ']' ++ break ++ cat /tmp/tmp.erIJCd8pXa ++ cat /tmp/tmp.MR0XzuxyPF ++ rm /tmp/tmp.erIJCd8pXa /tmp/tmp.MR0XzuxyPF ++ return 0 + [[ initializing =~ ready ]] + sleep 1 + timeout=8 + echo -n . .+ [[ 8 -gt 1500 ]] ++ kubectl_bin get psmdb some-name -o 'jsonpath={.status.state}' +++ mktemp ++ local LAST_OUT=/tmp/tmp.sy1D25q34W +++ mktemp ++ local LAST_ERR=/tmp/tmp.CISRNTHq9A ++ local exit_status=0 ++ local timeout=4 +++ seq 0 2 ++ for i in $(seq 0 2) ++ set +e ++ kubectl get psmdb some-name -o 'jsonpath={.status.state}' ++ exit_status=0 ++ set -e ++ '[' 0 '!=' 0 -a -n 1 ']' ++ break ++ cat /tmp/tmp.sy1D25q34W ++ cat /tmp/tmp.CISRNTHq9A ++ rm /tmp/tmp.sy1D25q34W /tmp/tmp.CISRNTHq9A ++ return 0 + [[ initializing =~ ready ]] + sleep 1 + timeout=9 + echo -n . .+ [[ 9 -gt 1500 ]] ++ kubectl_bin get psmdb some-name -o 'jsonpath={.status.state}' +++ mktemp ++ local LAST_OUT=/tmp/tmp.kMBV3vdYtS +++ mktemp ++ local LAST_ERR=/tmp/tmp.kYy2V3GDv7 ++ local exit_status=0 ++ local timeout=4 +++ seq 0 2 ++ for i in $(seq 0 2) ++ set +e ++ kubectl get psmdb some-name -o 'jsonpath={.status.state}' ++ exit_status=0 ++ set -e ++ '[' 0 '!=' 0 -a -n 1 ']' ++ break ++ cat /tmp/tmp.kMBV3vdYtS ++ cat /tmp/tmp.kYy2V3GDv7 ++ rm /tmp/tmp.kMBV3vdYtS /tmp/tmp.kYy2V3GDv7 ++ return 0 + [[ initializing =~ ready ]] + sleep 1 + timeout=10 + echo -n . .+ [[ 10 -gt 1500 ]] ++ kubectl_bin get psmdb some-name -o 'jsonpath={.status.state}' +++ mktemp ++ local LAST_OUT=/tmp/tmp.eaK5dviqQ0 +++ mktemp ++ local LAST_ERR=/tmp/tmp.kIiAdPzN7C ++ local exit_status=0 ++ local timeout=4 +++ seq 0 2 ++ for i in $(seq 0 2) ++ set +e ++ kubectl get psmdb some-name -o 'jsonpath={.status.state}' ++ exit_status=0 ++ set -e ++ '[' 0 '!=' 0 -a -n 1 ']' ++ break ++ cat /tmp/tmp.eaK5dviqQ0 ++ cat /tmp/tmp.kIiAdPzN7C ++ rm /tmp/tmp.eaK5dviqQ0 /tmp/tmp.kIiAdPzN7C ++ return 0 + [[ initializing =~ ready ]] + sleep 1 + timeout=11 + echo -n . .+ [[ 11 -gt 1500 ]] ++ kubectl_bin get psmdb some-name -o 'jsonpath={.status.state}' +++ mktemp ++ local LAST_OUT=/tmp/tmp.II9jWwtlLr +++ mktemp ++ local LAST_ERR=/tmp/tmp.ecV1JyYjFf ++ local exit_status=0 ++ local timeout=4 +++ seq 0 2 ++ for i in $(seq 0 2) ++ set +e ++ kubectl get psmdb some-name -o 'jsonpath={.status.state}' ++ exit_status=0 ++ set -e ++ '[' 0 '!=' 0 -a -n 1 ']' ++ break ++ cat /tmp/tmp.II9jWwtlLr ++ cat /tmp/tmp.ecV1JyYjFf ++ rm /tmp/tmp.II9jWwtlLr /tmp/tmp.ecV1JyYjFf ++ return 0 + [[ initializing =~ ready ]] + sleep 1 + timeout=12 + echo -n . .+ [[ 12 -gt 1500 ]] ++ kubectl_bin get psmdb some-name -o 'jsonpath={.status.state}' +++ mktemp ++ local LAST_OUT=/tmp/tmp.VNGEsfELuV +++ mktemp ++ local LAST_ERR=/tmp/tmp.9tjLNQGbwH ++ local exit_status=0 ++ local timeout=4 +++ seq 0 2 ++ for i in $(seq 0 2) ++ set +e ++ kubectl get psmdb some-name -o 'jsonpath={.status.state}' ++ exit_status=0 ++ set -e ++ '[' 0 '!=' 0 -a -n 1 ']' ++ break ++ cat /tmp/tmp.VNGEsfELuV ++ cat /tmp/tmp.9tjLNQGbwH ++ rm /tmp/tmp.VNGEsfELuV /tmp/tmp.9tjLNQGbwH ++ return 0 + [[ initializing =~ ready ]] + sleep 1 + timeout=13 + echo -n . .+ [[ 13 -gt 1500 ]] ++ kubectl_bin get psmdb some-name -o 'jsonpath={.status.state}' +++ mktemp ++ local LAST_OUT=/tmp/tmp.e1EbkJeMOe +++ mktemp ++ local LAST_ERR=/tmp/tmp.RNzKuoaSkb ++ local exit_status=0 ++ local timeout=4 +++ seq 0 2 ++ for i in $(seq 0 2) ++ set +e ++ kubectl get psmdb some-name -o 'jsonpath={.status.state}' ++ exit_status=0 ++ set -e ++ '[' 0 '!=' 0 -a -n 1 ']' ++ break ++ cat /tmp/tmp.e1EbkJeMOe ++ cat /tmp/tmp.RNzKuoaSkb ++ rm /tmp/tmp.e1EbkJeMOe /tmp/tmp.RNzKuoaSkb ++ return 0 + [[ initializing =~ ready ]] + sleep 1 + timeout=14 + echo -n . .+ [[ 14 -gt 1500 ]] ++ kubectl_bin get psmdb some-name -o 'jsonpath={.status.state}' +++ mktemp ++ local LAST_OUT=/tmp/tmp.fjqUDvZDH8 +++ mktemp ++ local LAST_ERR=/tmp/tmp.W4VwhXE2y4 ++ local exit_status=0 ++ local timeout=4 +++ seq 0 2 ++ for i in $(seq 0 2) ++ set +e ++ kubectl get psmdb some-name -o 'jsonpath={.status.state}' ++ exit_status=0 ++ set -e ++ '[' 0 '!=' 0 -a -n 1 ']' ++ break ++ cat /tmp/tmp.fjqUDvZDH8 ++ cat /tmp/tmp.W4VwhXE2y4 ++ rm /tmp/tmp.fjqUDvZDH8 /tmp/tmp.W4VwhXE2y4 ++ return 0 + [[ initializing =~ ready ]] + sleep 1 + timeout=15 + echo -n . .+ [[ 15 -gt 1500 ]] ++ kubectl_bin get psmdb some-name -o 'jsonpath={.status.state}' +++ mktemp ++ local LAST_OUT=/tmp/tmp.AYFPO8Y3sd +++ mktemp ++ local LAST_ERR=/tmp/tmp.I2Z4fsTw2J ++ local exit_status=0 ++ local timeout=4 +++ seq 0 2 ++ for i in $(seq 0 2) ++ set +e ++ kubectl get psmdb some-name -o 'jsonpath={.status.state}' ++ exit_status=0 ++ set -e ++ '[' 0 '!=' 0 -a -n 1 ']' ++ break ++ cat /tmp/tmp.AYFPO8Y3sd ++ cat /tmp/tmp.I2Z4fsTw2J ++ rm /tmp/tmp.AYFPO8Y3sd /tmp/tmp.I2Z4fsTw2J ++ return 0 + [[ initializing =~ ready ]] + sleep 1 + timeout=16 + echo -n . .+ [[ 16 -gt 1500 ]] ++ kubectl_bin get psmdb some-name -o 'jsonpath={.status.state}' +++ mktemp ++ local LAST_OUT=/tmp/tmp.gwXBDRqxgr +++ mktemp ++ local LAST_ERR=/tmp/tmp.IHOxcVwzfN ++ local exit_status=0 ++ local timeout=4 +++ seq 0 2 ++ for i in $(seq 0 2) ++ set +e ++ kubectl get psmdb some-name -o 'jsonpath={.status.state}' ++ exit_status=0 ++ set -e ++ '[' 0 '!=' 0 -a -n 1 ']' ++ break ++ cat /tmp/tmp.gwXBDRqxgr ++ cat /tmp/tmp.IHOxcVwzfN ++ rm /tmp/tmp.gwXBDRqxgr /tmp/tmp.IHOxcVwzfN ++ return 0 + [[ initializing =~ ready ]] + sleep 1 + timeout=17 + echo -n . .+ [[ 17 -gt 1500 ]] ++ kubectl_bin get psmdb some-name -o 'jsonpath={.status.state}' +++ mktemp ++ local LAST_OUT=/tmp/tmp.89MpS6CXDO +++ mktemp ++ local LAST_ERR=/tmp/tmp.INccZ3WTlt ++ local exit_status=0 ++ local timeout=4 +++ seq 0 2 ++ for i in $(seq 0 2) ++ set +e ++ kubectl get psmdb some-name -o 'jsonpath={.status.state}' ++ exit_status=0 ++ set -e ++ '[' 0 '!=' 0 -a -n 1 ']' ++ break ++ cat /tmp/tmp.89MpS6CXDO ++ cat /tmp/tmp.INccZ3WTlt ++ rm /tmp/tmp.89MpS6CXDO /tmp/tmp.INccZ3WTlt ++ return 0 + [[ initializing =~ ready ]] + sleep 1 + timeout=18 + echo -n . .+ [[ 18 -gt 1500 ]] ++ kubectl_bin get psmdb some-name -o 'jsonpath={.status.state}' +++ mktemp ++ local LAST_OUT=/tmp/tmp.0aPTF0uXbc +++ mktemp ++ local LAST_ERR=/tmp/tmp.nPDRVaz024 ++ local exit_status=0 ++ local timeout=4 +++ seq 0 2 ++ for i in $(seq 0 2) ++ set +e ++ kubectl get psmdb some-name -o 'jsonpath={.status.state}' ++ exit_status=0 ++ set -e ++ '[' 0 '!=' 0 -a -n 1 ']' ++ break ++ cat /tmp/tmp.0aPTF0uXbc ++ cat /tmp/tmp.nPDRVaz024 ++ rm /tmp/tmp.0aPTF0uXbc /tmp/tmp.nPDRVaz024 ++ return 0 + [[ initializing =~ ready ]] + sleep 1 + timeout=19 + echo -n . .+ [[ 19 -gt 1500 ]] ++ kubectl_bin get psmdb some-name -o 'jsonpath={.status.state}' +++ mktemp ++ local LAST_OUT=/tmp/tmp.CLPPbDu3qf +++ mktemp ++ local LAST_ERR=/tmp/tmp.rjXNKXiSSK ++ local exit_status=0 ++ local timeout=4 +++ seq 0 2 ++ for i in $(seq 0 2) ++ set +e ++ kubectl get psmdb some-name -o 'jsonpath={.status.state}' ++ exit_status=0 ++ set -e ++ '[' 0 '!=' 0 -a -n 1 ']' ++ break ++ cat /tmp/tmp.CLPPbDu3qf ++ cat /tmp/tmp.rjXNKXiSSK ++ rm /tmp/tmp.CLPPbDu3qf /tmp/tmp.rjXNKXiSSK ++ return 0 + [[ initializing =~ ready ]] + sleep 1 + timeout=20 + echo -n . .+ [[ 20 -gt 1500 ]] ++ kubectl_bin get psmdb some-name -o 'jsonpath={.status.state}' +++ mktemp ++ local LAST_OUT=/tmp/tmp.ZHao2swW7N +++ mktemp ++ local LAST_ERR=/tmp/tmp.r40x0MVzxQ ++ local exit_status=0 ++ local timeout=4 +++ seq 0 2 ++ for i in $(seq 0 2) ++ set +e ++ kubectl get psmdb some-name -o 'jsonpath={.status.state}' ++ exit_status=0 ++ set -e ++ '[' 0 '!=' 0 -a -n 1 ']' ++ break ++ cat /tmp/tmp.ZHao2swW7N ++ cat /tmp/tmp.r40x0MVzxQ ++ rm /tmp/tmp.ZHao2swW7N /tmp/tmp.r40x0MVzxQ ++ return 0 + [[ initializing =~ ready ]] + sleep 1 + timeout=21 + echo -n . .+ [[ 21 -gt 1500 ]] ++ kubectl_bin get psmdb some-name -o 'jsonpath={.status.state}' +++ mktemp ++ local LAST_OUT=/tmp/tmp.BG6y3RtuHh +++ mktemp ++ local LAST_ERR=/tmp/tmp.w08FiNmOHq ++ local exit_status=0 ++ local timeout=4 +++ seq 0 2 ++ for i in $(seq 0 2) ++ set +e ++ kubectl get psmdb some-name -o 'jsonpath={.status.state}' ++ exit_status=0 ++ set -e ++ '[' 0 '!=' 0 -a -n 1 ']' ++ break ++ cat /tmp/tmp.BG6y3RtuHh ++ cat /tmp/tmp.w08FiNmOHq ++ rm /tmp/tmp.BG6y3RtuHh /tmp/tmp.w08FiNmOHq ++ return 0 + [[ initializing =~ ready ]] + sleep 1 + timeout=22 + echo -n . .+ [[ 22 -gt 1500 ]] ++ kubectl_bin get psmdb some-name -o 'jsonpath={.status.state}' +++ mktemp ++ local LAST_OUT=/tmp/tmp.8LQpufvtfW +++ mktemp ++ local LAST_ERR=/tmp/tmp.MvETBIDhaJ ++ local exit_status=0 ++ local timeout=4 +++ seq 0 2 ++ for i in $(seq 0 2) ++ set +e ++ kubectl get psmdb some-name -o 'jsonpath={.status.state}' ++ exit_status=0 ++ set -e ++ '[' 0 '!=' 0 -a -n 1 ']' ++ break ++ cat /tmp/tmp.8LQpufvtfW ++ cat /tmp/tmp.MvETBIDhaJ ++ rm /tmp/tmp.8LQpufvtfW /tmp/tmp.MvETBIDhaJ ++ return 0 + [[ initializing =~ ready ]] + sleep 1 + timeout=23 + echo -n . .+ [[ 23 -gt 1500 ]] ++ kubectl_bin get psmdb some-name -o 'jsonpath={.status.state}' +++ mktemp ++ local LAST_OUT=/tmp/tmp.VmC714GR3V +++ mktemp ++ local LAST_ERR=/tmp/tmp.Tkk1GYAOe0 ++ local exit_status=0 ++ local timeout=4 +++ seq 0 2 ++ for i in $(seq 0 2) ++ set +e ++ kubectl get psmdb some-name -o 'jsonpath={.status.state}' ++ exit_status=0 ++ set -e ++ '[' 0 '!=' 0 -a -n 1 ']' ++ break ++ cat /tmp/tmp.VmC714GR3V ++ cat /tmp/tmp.Tkk1GYAOe0 ++ rm /tmp/tmp.VmC714GR3V /tmp/tmp.Tkk1GYAOe0 ++ return 0 + [[ initializing =~ ready ]] + sleep 1 + timeout=24 + echo -n . .+ [[ 24 -gt 1500 ]] ++ kubectl_bin get psmdb some-name -o 'jsonpath={.status.state}' +++ mktemp ++ local LAST_OUT=/tmp/tmp.KvdsHcpUa7 +++ mktemp ++ local LAST_ERR=/tmp/tmp.604B7Mxfbw ++ local exit_status=0 ++ local timeout=4 +++ seq 0 2 ++ for i in $(seq 0 2) ++ set +e ++ kubectl get psmdb some-name -o 'jsonpath={.status.state}' ++ exit_status=0 ++ set -e ++ '[' 0 '!=' 0 -a -n 1 ']' ++ break ++ cat /tmp/tmp.KvdsHcpUa7 ++ cat /tmp/tmp.604B7Mxfbw ++ rm /tmp/tmp.KvdsHcpUa7 /tmp/tmp.604B7Mxfbw ++ return 0 + [[ initializing =~ ready ]] + sleep 1 + timeout=25 + echo -n . .+ [[ 25 -gt 1500 ]] ++ kubectl_bin get psmdb some-name -o 'jsonpath={.status.state}' +++ mktemp ++ local LAST_OUT=/tmp/tmp.mBLICKnySV +++ mktemp ++ local LAST_ERR=/tmp/tmp.OfOoVHxgRP ++ local exit_status=0 ++ local timeout=4 +++ seq 0 2 ++ for i in $(seq 0 2) ++ set +e ++ kubectl get psmdb some-name -o 'jsonpath={.status.state}' ++ exit_status=0 ++ set -e ++ '[' 0 '!=' 0 -a -n 1 ']' ++ break ++ cat /tmp/tmp.mBLICKnySV ++ cat /tmp/tmp.OfOoVHxgRP ++ rm /tmp/tmp.mBLICKnySV /tmp/tmp.OfOoVHxgRP ++ return 0 + [[ initializing =~ ready ]] + sleep 1 + timeout=26 + echo -n . .+ [[ 26 -gt 1500 ]] ++ kubectl_bin get psmdb some-name -o 'jsonpath={.status.state}' +++ mktemp ++ local LAST_OUT=/tmp/tmp.YlOjuCdIis +++ mktemp ++ local LAST_ERR=/tmp/tmp.CeWUIRSPdq ++ local exit_status=0 ++ local timeout=4 +++ seq 0 2 ++ for i in $(seq 0 2) ++ set +e ++ kubectl get psmdb some-name -o 'jsonpath={.status.state}' ++ exit_status=0 ++ set -e ++ '[' 0 '!=' 0 -a -n 1 ']' ++ break ++ cat /tmp/tmp.YlOjuCdIis ++ cat /tmp/tmp.CeWUIRSPdq ++ rm /tmp/tmp.YlOjuCdIis /tmp/tmp.CeWUIRSPdq ++ return 0 + [[ initializing =~ ready ]] + sleep 1 + timeout=27 + echo -n . .+ [[ 27 -gt 1500 ]] ++ kubectl_bin get psmdb some-name -o 'jsonpath={.status.state}' +++ mktemp ++ local LAST_OUT=/tmp/tmp.5kMxukHjg0 +++ mktemp ++ local LAST_ERR=/tmp/tmp.YOuT5Q8NGD ++ local exit_status=0 ++ local timeout=4 +++ seq 0 2 ++ for i in $(seq 0 2) ++ set +e ++ kubectl get psmdb some-name -o 'jsonpath={.status.state}' ++ exit_status=0 ++ set -e ++ '[' 0 '!=' 0 -a -n 1 ']' ++ break ++ cat /tmp/tmp.5kMxukHjg0 ++ cat /tmp/tmp.YOuT5Q8NGD ++ rm /tmp/tmp.5kMxukHjg0 /tmp/tmp.YOuT5Q8NGD ++ return 0 + [[ initializing =~ ready ]] + sleep 1 + timeout=28 + echo -n . .+ [[ 28 -gt 1500 ]] ++ kubectl_bin get psmdb some-name -o 'jsonpath={.status.state}' +++ mktemp ++ local LAST_OUT=/tmp/tmp.hAdiywtVKD +++ mktemp ++ local LAST_ERR=/tmp/tmp.rCIpJ1fHmb ++ local exit_status=0 ++ local timeout=4 +++ seq 0 2 ++ for i in $(seq 0 2) ++ set +e ++ kubectl get psmdb some-name -o 'jsonpath={.status.state}' ++ exit_status=0 ++ set -e ++ '[' 0 '!=' 0 -a -n 1 ']' ++ break ++ cat /tmp/tmp.hAdiywtVKD ++ cat /tmp/tmp.rCIpJ1fHmb ++ rm /tmp/tmp.hAdiywtVKD /tmp/tmp.rCIpJ1fHmb ++ return 0 + [[ initializing =~ ready ]] + sleep 1 + timeout=29 + echo -n . .+ [[ 29 -gt 1500 ]] ++ kubectl_bin get psmdb some-name -o 'jsonpath={.status.state}' +++ mktemp ++ local LAST_OUT=/tmp/tmp.tp2LFBTAm4 +++ mktemp ++ local LAST_ERR=/tmp/tmp.NpTjNKwDau ++ local exit_status=0 ++ local timeout=4 +++ seq 0 2 ++ for i in $(seq 0 2) ++ set +e ++ kubectl get psmdb some-name -o 'jsonpath={.status.state}' ++ exit_status=0 ++ set -e ++ '[' 0 '!=' 0 -a -n 1 ']' ++ break ++ cat /tmp/tmp.tp2LFBTAm4 ++ cat /tmp/tmp.NpTjNKwDau ++ rm /tmp/tmp.tp2LFBTAm4 /tmp/tmp.NpTjNKwDau ++ return 0 + [[ initializing =~ ready ]] + sleep 1 + timeout=30 + echo -n . .+ [[ 30 -gt 1500 ]] ++ kubectl_bin get psmdb some-name -o 'jsonpath={.status.state}' +++ mktemp ++ local LAST_OUT=/tmp/tmp.xiQPqeocxf +++ mktemp ++ local LAST_ERR=/tmp/tmp.qg9RW496CW ++ local exit_status=0 ++ local timeout=4 +++ seq 0 2 ++ for i in $(seq 0 2) ++ set +e ++ kubectl get psmdb some-name -o 'jsonpath={.status.state}' ++ exit_status=0 ++ set -e ++ '[' 0 '!=' 0 -a -n 1 ']' ++ break ++ cat /tmp/tmp.xiQPqeocxf ++ cat /tmp/tmp.qg9RW496CW ++ rm /tmp/tmp.xiQPqeocxf /tmp/tmp.qg9RW496CW ++ return 0 + [[ initializing =~ ready ]] + sleep 1 + timeout=31 + echo -n . .+ [[ 31 -gt 1500 ]] ++ kubectl_bin get psmdb some-name -o 'jsonpath={.status.state}' +++ mktemp ++ local LAST_OUT=/tmp/tmp.Ib7WTT2mjs +++ mktemp ++ local LAST_ERR=/tmp/tmp.YEGH2FemqV ++ local exit_status=0 ++ local timeout=4 +++ seq 0 2 ++ for i in $(seq 0 2) ++ set +e ++ kubectl get psmdb some-name -o 'jsonpath={.status.state}' ++ exit_status=0 ++ set -e ++ '[' 0 '!=' 0 -a -n 1 ']' ++ break ++ cat /tmp/tmp.Ib7WTT2mjs ++ cat /tmp/tmp.YEGH2FemqV ++ rm /tmp/tmp.Ib7WTT2mjs /tmp/tmp.YEGH2FemqV ++ return 0 + [[ initializing =~ ready ]] + sleep 1 + timeout=32 + echo -n . .+ [[ 32 -gt 1500 ]] ++ kubectl_bin get psmdb some-name -o 'jsonpath={.status.state}' +++ mktemp ++ local LAST_OUT=/tmp/tmp.ID4WzBobC9 +++ mktemp ++ local LAST_ERR=/tmp/tmp.ywDPGlHIsU ++ local exit_status=0 ++ local timeout=4 +++ seq 0 2 ++ for i in $(seq 0 2) ++ set +e ++ kubectl get psmdb some-name -o 'jsonpath={.status.state}' ++ exit_status=0 ++ set -e ++ '[' 0 '!=' 0 -a -n 1 ']' ++ break ++ cat /tmp/tmp.ID4WzBobC9 ++ cat /tmp/tmp.ywDPGlHIsU ++ rm /tmp/tmp.ID4WzBobC9 /tmp/tmp.ywDPGlHIsU ++ return 0 + [[ initializing =~ ready ]] + sleep 1 + timeout=33 + echo -n . .+ [[ 33 -gt 1500 ]] ++ kubectl_bin get psmdb some-name -o 'jsonpath={.status.state}' +++ mktemp ++ local LAST_OUT=/tmp/tmp.88JCmqJy5t +++ mktemp ++ local LAST_ERR=/tmp/tmp.5MnoRDMNM0 ++ local exit_status=0 ++ local timeout=4 +++ seq 0 2 ++ for i in $(seq 0 2) ++ set +e ++ kubectl get psmdb some-name -o 'jsonpath={.status.state}' ++ exit_status=0 ++ set -e ++ '[' 0 '!=' 0 -a -n 1 ']' ++ break ++ cat /tmp/tmp.88JCmqJy5t ++ cat /tmp/tmp.5MnoRDMNM0 ++ rm /tmp/tmp.88JCmqJy5t /tmp/tmp.5MnoRDMNM0 ++ return 0 + [[ initializing =~ ready ]] + sleep 1 + timeout=34 + echo -n . .+ [[ 34 -gt 1500 ]] ++ kubectl_bin get psmdb some-name -o 'jsonpath={.status.state}' +++ mktemp ++ local LAST_OUT=/tmp/tmp.VPAsh7GJ39 +++ mktemp ++ local LAST_ERR=/tmp/tmp.i2IWBfLSI7 ++ local exit_status=0 ++ local timeout=4 +++ seq 0 2 ++ for i in $(seq 0 2) ++ set +e ++ kubectl get psmdb some-name -o 'jsonpath={.status.state}' ++ exit_status=0 ++ set -e ++ '[' 0 '!=' 0 -a -n 1 ']' ++ break ++ cat /tmp/tmp.VPAsh7GJ39 ++ cat /tmp/tmp.i2IWBfLSI7 ++ rm /tmp/tmp.VPAsh7GJ39 /tmp/tmp.i2IWBfLSI7 ++ return 0 + [[ initializing =~ ready ]] + sleep 1 + timeout=35 + echo -n . .+ [[ 35 -gt 1500 ]] ++ kubectl_bin get psmdb some-name -o 'jsonpath={.status.state}' +++ mktemp ++ local LAST_OUT=/tmp/tmp.xCdb2SKRGr +++ mktemp ++ local LAST_ERR=/tmp/tmp.NMStpGluqI ++ local exit_status=0 ++ local timeout=4 +++ seq 0 2 ++ for i in $(seq 0 2) ++ set +e ++ kubectl get psmdb some-name -o 'jsonpath={.status.state}' ++ exit_status=0 ++ set -e ++ '[' 0 '!=' 0 -a -n 1 ']' ++ break ++ cat /tmp/tmp.xCdb2SKRGr ++ cat /tmp/tmp.NMStpGluqI ++ rm /tmp/tmp.xCdb2SKRGr /tmp/tmp.NMStpGluqI ++ return 0 + [[ initializing =~ ready ]] + sleep 1 + timeout=36 + echo -n . .+ [[ 36 -gt 1500 ]] ++ kubectl_bin get psmdb some-name -o 'jsonpath={.status.state}' +++ mktemp ++ local LAST_OUT=/tmp/tmp.tNz9ZJCiE2 +++ mktemp ++ local LAST_ERR=/tmp/tmp.faU2BQDYhT ++ local exit_status=0 ++ local timeout=4 +++ seq 0 2 ++ for i in $(seq 0 2) ++ set +e ++ kubectl get psmdb some-name -o 'jsonpath={.status.state}' ++ exit_status=0 ++ set -e ++ '[' 0 '!=' 0 -a -n 1 ']' ++ break ++ cat /tmp/tmp.tNz9ZJCiE2 ++ cat /tmp/tmp.faU2BQDYhT ++ rm /tmp/tmp.tNz9ZJCiE2 /tmp/tmp.faU2BQDYhT ++ return 0 + [[ initializing =~ ready ]] + sleep 1 + timeout=37 + echo -n . .+ [[ 37 -gt 1500 ]] ++ kubectl_bin get psmdb some-name -o 'jsonpath={.status.state}' +++ mktemp ++ local LAST_OUT=/tmp/tmp.fXGcizlwhP +++ mktemp ++ local LAST_ERR=/tmp/tmp.mt2cn56N5i ++ local exit_status=0 ++ local timeout=4 +++ seq 0 2 ++ for i in $(seq 0 2) ++ set +e ++ kubectl get psmdb some-name -o 'jsonpath={.status.state}' ++ exit_status=0 ++ set -e ++ '[' 0 '!=' 0 -a -n 1 ']' ++ break ++ cat /tmp/tmp.fXGcizlwhP ++ cat /tmp/tmp.mt2cn56N5i ++ rm /tmp/tmp.fXGcizlwhP /tmp/tmp.mt2cn56N5i ++ return 0 + [[ initializing =~ ready ]] + sleep 1 + timeout=38 + echo -n . .+ [[ 38 -gt 1500 ]] ++ kubectl_bin get psmdb some-name -o 'jsonpath={.status.state}' +++ mktemp ++ local LAST_OUT=/tmp/tmp.MrWmEzzhXD +++ mktemp ++ local LAST_ERR=/tmp/tmp.JB7x50TuXE ++ local exit_status=0 ++ local timeout=4 +++ seq 0 2 ++ for i in $(seq 0 2) ++ set +e ++ kubectl get psmdb some-name -o 'jsonpath={.status.state}' ++ exit_status=0 ++ set -e ++ '[' 0 '!=' 0 -a -n 1 ']' ++ break ++ cat /tmp/tmp.MrWmEzzhXD ++ cat /tmp/tmp.JB7x50TuXE ++ rm /tmp/tmp.MrWmEzzhXD /tmp/tmp.JB7x50TuXE ++ return 0 + [[ initializing =~ ready ]] + sleep 1 + timeout=39 + echo -n . .+ [[ 39 -gt 1500 ]] ++ kubectl_bin get psmdb some-name -o 'jsonpath={.status.state}' +++ mktemp ++ local LAST_OUT=/tmp/tmp.Co5IRMatzO +++ mktemp ++ local LAST_ERR=/tmp/tmp.SyCubC7tU7 ++ local exit_status=0 ++ local timeout=4 +++ seq 0 2 ++ for i in $(seq 0 2) ++ set +e ++ kubectl get psmdb some-name -o 'jsonpath={.status.state}' ++ exit_status=0 ++ set -e ++ '[' 0 '!=' 0 -a -n 1 ']' ++ break ++ cat /tmp/tmp.Co5IRMatzO ++ cat /tmp/tmp.SyCubC7tU7 ++ rm /tmp/tmp.Co5IRMatzO /tmp/tmp.SyCubC7tU7 ++ return 0 + [[ initializing =~ ready ]] + sleep 1 + timeout=40 + echo -n . .+ [[ 40 -gt 1500 ]] ++ kubectl_bin get psmdb some-name -o 'jsonpath={.status.state}' +++ mktemp ++ local LAST_OUT=/tmp/tmp.eQGup27VlS +++ mktemp ++ local LAST_ERR=/tmp/tmp.bzgMVpdIsW ++ local exit_status=0 ++ local timeout=4 +++ seq 0 2 ++ for i in $(seq 0 2) ++ set +e ++ kubectl get psmdb some-name -o 'jsonpath={.status.state}' ++ exit_status=0 ++ set -e ++ '[' 0 '!=' 0 -a -n 1 ']' ++ break ++ cat /tmp/tmp.eQGup27VlS ++ cat /tmp/tmp.bzgMVpdIsW ++ rm /tmp/tmp.eQGup27VlS /tmp/tmp.bzgMVpdIsW ++ return 0 + [[ initializing =~ ready ]] + sleep 1 + timeout=41 + echo -n . .+ [[ 41 -gt 1500 ]] ++ kubectl_bin get psmdb some-name -o 'jsonpath={.status.state}' +++ mktemp ++ local LAST_OUT=/tmp/tmp.84GEjnjmUk +++ mktemp ++ local LAST_ERR=/tmp/tmp.mdankjrRBx ++ local exit_status=0 ++ local timeout=4 +++ seq 0 2 ++ for i in $(seq 0 2) ++ set +e ++ kubectl get psmdb some-name -o 'jsonpath={.status.state}' ++ exit_status=0 ++ set -e ++ '[' 0 '!=' 0 -a -n 1 ']' ++ break ++ cat /tmp/tmp.84GEjnjmUk ++ cat /tmp/tmp.mdankjrRBx ++ rm /tmp/tmp.84GEjnjmUk /tmp/tmp.mdankjrRBx ++ return 0 + [[ initializing =~ ready ]] + sleep 1 + timeout=42 + echo -n . .+ [[ 42 -gt 1500 ]] ++ kubectl_bin get psmdb some-name -o 'jsonpath={.status.state}' +++ mktemp ++ local LAST_OUT=/tmp/tmp.ZTQAffJpwU +++ mktemp ++ local LAST_ERR=/tmp/tmp.yQtvyFyZas ++ local exit_status=0 ++ local timeout=4 +++ seq 0 2 ++ for i in $(seq 0 2) ++ set +e ++ kubectl get psmdb some-name -o 'jsonpath={.status.state}' ++ exit_status=0 ++ set -e ++ '[' 0 '!=' 0 -a -n 1 ']' ++ break ++ cat /tmp/tmp.ZTQAffJpwU ++ cat /tmp/tmp.yQtvyFyZas ++ rm /tmp/tmp.ZTQAffJpwU /tmp/tmp.yQtvyFyZas ++ return 0 + [[ initializing =~ ready ]] + sleep 1 + timeout=43 + echo -n . .+ [[ 43 -gt 1500 ]] ++ kubectl_bin get psmdb some-name -o 'jsonpath={.status.state}' +++ mktemp ++ local LAST_OUT=/tmp/tmp.CgwFJzLeqb +++ mktemp ++ local LAST_ERR=/tmp/tmp.fAk0dovzCT ++ local exit_status=0 ++ local timeout=4 +++ seq 0 2 ++ for i in $(seq 0 2) ++ set +e ++ kubectl get psmdb some-name -o 'jsonpath={.status.state}' ++ exit_status=0 ++ set -e ++ '[' 0 '!=' 0 -a -n 1 ']' ++ break ++ cat /tmp/tmp.CgwFJzLeqb ++ cat /tmp/tmp.fAk0dovzCT ++ rm /tmp/tmp.CgwFJzLeqb /tmp/tmp.fAk0dovzCT ++ return 0 + [[ initializing =~ ready ]] + sleep 1 + timeout=44 + echo -n . .+ [[ 44 -gt 1500 ]] ++ kubectl_bin get psmdb some-name -o 'jsonpath={.status.state}' +++ mktemp ++ local LAST_OUT=/tmp/tmp.ri3hIpgeJO +++ mktemp ++ local LAST_ERR=/tmp/tmp.ORW1YMlImp ++ local exit_status=0 ++ local timeout=4 +++ seq 0 2 ++ for i in $(seq 0 2) ++ set +e ++ kubectl get psmdb some-name -o 'jsonpath={.status.state}' ++ exit_status=0 ++ set -e ++ '[' 0 '!=' 0 -a -n 1 ']' ++ break ++ cat /tmp/tmp.ri3hIpgeJO ++ cat /tmp/tmp.ORW1YMlImp ++ rm /tmp/tmp.ri3hIpgeJO /tmp/tmp.ORW1YMlImp ++ return 0 + [[ initializing =~ ready ]] + sleep 1 + timeout=45 + echo -n . .+ [[ 45 -gt 1500 ]] ++ kubectl_bin get psmdb some-name -o 'jsonpath={.status.state}' +++ mktemp ++ local LAST_OUT=/tmp/tmp.oWhAbTtPVE +++ mktemp ++ local LAST_ERR=/tmp/tmp.H6SpTNdqyo ++ local exit_status=0 ++ local timeout=4 +++ seq 0 2 ++ for i in $(seq 0 2) ++ set +e ++ kubectl get psmdb some-name -o 'jsonpath={.status.state}' ++ exit_status=0 ++ set -e ++ '[' 0 '!=' 0 -a -n 1 ']' ++ break ++ cat /tmp/tmp.oWhAbTtPVE ++ cat /tmp/tmp.H6SpTNdqyo ++ rm /tmp/tmp.oWhAbTtPVE /tmp/tmp.H6SpTNdqyo ++ return 0 + [[ initializing =~ ready ]] + sleep 1 + timeout=46 + echo -n . .+ [[ 46 -gt 1500 ]] ++ kubectl_bin get psmdb some-name -o 'jsonpath={.status.state}' +++ mktemp ++ local LAST_OUT=/tmp/tmp.NwHnRGaWty +++ mktemp ++ local LAST_ERR=/tmp/tmp.6vWKFo2EVW ++ local exit_status=0 ++ local timeout=4 +++ seq 0 2 ++ for i in $(seq 0 2) ++ set +e ++ kubectl get psmdb some-name -o 'jsonpath={.status.state}' ++ exit_status=0 ++ set -e ++ '[' 0 '!=' 0 -a -n 1 ']' ++ break ++ cat /tmp/tmp.NwHnRGaWty ++ cat /tmp/tmp.6vWKFo2EVW ++ rm /tmp/tmp.NwHnRGaWty /tmp/tmp.6vWKFo2EVW ++ return 0 + [[ initializing =~ ready ]] + sleep 1 + timeout=47 + echo -n . .+ [[ 47 -gt 1500 ]] ++ kubectl_bin get psmdb some-name -o 'jsonpath={.status.state}' +++ mktemp ++ local LAST_OUT=/tmp/tmp.mzQxBcEWa3 +++ mktemp ++ local LAST_ERR=/tmp/tmp.zBX6cJmAWw ++ local exit_status=0 ++ local timeout=4 +++ seq 0 2 ++ for i in $(seq 0 2) ++ set +e ++ kubectl get psmdb some-name -o 'jsonpath={.status.state}' ++ exit_status=0 ++ set -e ++ '[' 0 '!=' 0 -a -n 1 ']' ++ break ++ cat /tmp/tmp.mzQxBcEWa3 ++ cat /tmp/tmp.zBX6cJmAWw ++ rm /tmp/tmp.mzQxBcEWa3 /tmp/tmp.zBX6cJmAWw ++ return 0 + [[ initializing =~ ready ]] + sleep 1 + timeout=48 + echo -n . .+ [[ 48 -gt 1500 ]] ++ kubectl_bin get psmdb some-name -o 'jsonpath={.status.state}' +++ mktemp ++ local LAST_OUT=/tmp/tmp.oGH8o32GQB +++ mktemp ++ local LAST_ERR=/tmp/tmp.HvJP8Br4EP ++ local exit_status=0 ++ local timeout=4 +++ seq 0 2 ++ for i in $(seq 0 2) ++ set +e ++ kubectl get psmdb some-name -o 'jsonpath={.status.state}' ++ exit_status=0 ++ set -e ++ '[' 0 '!=' 0 -a -n 1 ']' ++ break ++ cat /tmp/tmp.oGH8o32GQB ++ cat /tmp/tmp.HvJP8Br4EP ++ rm /tmp/tmp.oGH8o32GQB /tmp/tmp.HvJP8Br4EP ++ return 0 + [[ initializing =~ ready ]] + sleep 1 + timeout=49 + echo -n . .+ [[ 49 -gt 1500 ]] ++ kubectl_bin get psmdb some-name -o 'jsonpath={.status.state}' +++ mktemp ++ local LAST_OUT=/tmp/tmp.jT0WGWuKwR +++ mktemp ++ local LAST_ERR=/tmp/tmp.hVPBFllzBY ++ local exit_status=0 ++ local timeout=4 +++ seq 0 2 ++ for i in $(seq 0 2) ++ set +e ++ kubectl get psmdb some-name -o 'jsonpath={.status.state}' ++ exit_status=0 ++ set -e ++ '[' 0 '!=' 0 -a -n 1 ']' ++ break ++ cat /tmp/tmp.jT0WGWuKwR ++ cat /tmp/tmp.hVPBFllzBY ++ rm /tmp/tmp.jT0WGWuKwR /tmp/tmp.hVPBFllzBY ++ return 0 + [[ initializing =~ ready ]] + sleep 1 + timeout=50 + echo -n . .+ [[ 50 -gt 1500 ]] ++ kubectl_bin get psmdb some-name -o 'jsonpath={.status.state}' +++ mktemp ++ local LAST_OUT=/tmp/tmp.bzinLlMTaV +++ mktemp ++ local LAST_ERR=/tmp/tmp.oqsV7bgv4t ++ local exit_status=0 ++ local timeout=4 +++ seq 0 2 ++ for i in $(seq 0 2) ++ set +e ++ kubectl get psmdb some-name -o 'jsonpath={.status.state}' ++ exit_status=0 ++ set -e ++ '[' 0 '!=' 0 -a -n 1 ']' ++ break ++ cat /tmp/tmp.bzinLlMTaV ++ cat /tmp/tmp.oqsV7bgv4t ++ rm /tmp/tmp.bzinLlMTaV /tmp/tmp.oqsV7bgv4t ++ return 0 + [[ initializing =~ ready ]] + sleep 1 + timeout=51 + echo -n . .+ [[ 51 -gt 1500 ]] ++ kubectl_bin get psmdb some-name -o 'jsonpath={.status.state}' +++ mktemp ++ local LAST_OUT=/tmp/tmp.gz6R4LeqVY +++ mktemp ++ local LAST_ERR=/tmp/tmp.RjMVYtIkUE ++ local exit_status=0 ++ local timeout=4 +++ seq 0 2 ++ for i in $(seq 0 2) ++ set +e ++ kubectl get psmdb some-name -o 'jsonpath={.status.state}' ++ exit_status=0 ++ set -e ++ '[' 0 '!=' 0 -a -n 1 ']' ++ break ++ cat /tmp/tmp.gz6R4LeqVY ++ cat /tmp/tmp.RjMVYtIkUE ++ rm /tmp/tmp.gz6R4LeqVY /tmp/tmp.RjMVYtIkUE ++ return 0 + [[ initializing =~ ready ]] + sleep 1 + timeout=52 + echo -n . .+ [[ 52 -gt 1500 ]] ++ kubectl_bin get psmdb some-name -o 'jsonpath={.status.state}' +++ mktemp ++ local LAST_OUT=/tmp/tmp.r6pBrWNtik +++ mktemp ++ local LAST_ERR=/tmp/tmp.xl0O8q1srq ++ local exit_status=0 ++ local timeout=4 +++ seq 0 2 ++ for i in $(seq 0 2) ++ set +e ++ kubectl get psmdb some-name -o 'jsonpath={.status.state}' ++ exit_status=0 ++ set -e ++ '[' 0 '!=' 0 -a -n 1 ']' ++ break ++ cat /tmp/tmp.r6pBrWNtik ++ cat /tmp/tmp.xl0O8q1srq ++ rm /tmp/tmp.r6pBrWNtik /tmp/tmp.xl0O8q1srq ++ return 0 + [[ initializing =~ ready ]] + sleep 1 + timeout=53 + echo -n . .+ [[ 53 -gt 1500 ]] ++ kubectl_bin get psmdb some-name -o 'jsonpath={.status.state}' +++ mktemp ++ local LAST_OUT=/tmp/tmp.cmzZ35eJ3W +++ mktemp ++ local LAST_ERR=/tmp/tmp.ckbHQXuvlM ++ local exit_status=0 ++ local timeout=4 +++ seq 0 2 ++ for i in $(seq 0 2) ++ set +e ++ kubectl get psmdb some-name -o 'jsonpath={.status.state}' ++ exit_status=0 ++ set -e ++ '[' 0 '!=' 0 -a -n 1 ']' ++ break ++ cat /tmp/tmp.cmzZ35eJ3W ++ cat /tmp/tmp.ckbHQXuvlM ++ rm /tmp/tmp.cmzZ35eJ3W /tmp/tmp.ckbHQXuvlM ++ return 0 + [[ initializing =~ ready ]] + sleep 1 + timeout=54 + echo -n . .+ [[ 54 -gt 1500 ]] ++ kubectl_bin get psmdb some-name -o 'jsonpath={.status.state}' +++ mktemp ++ local LAST_OUT=/tmp/tmp.xz5FT4rCjw +++ mktemp ++ local LAST_ERR=/tmp/tmp.ToJvLGShdZ ++ local exit_status=0 ++ local timeout=4 +++ seq 0 2 ++ for i in $(seq 0 2) ++ set +e ++ kubectl get psmdb some-name -o 'jsonpath={.status.state}' ++ exit_status=0 ++ set -e ++ '[' 0 '!=' 0 -a -n 1 ']' ++ break ++ cat /tmp/tmp.xz5FT4rCjw ++ cat /tmp/tmp.ToJvLGShdZ ++ rm /tmp/tmp.xz5FT4rCjw /tmp/tmp.ToJvLGShdZ ++ return 0 + [[ initializing =~ ready ]] + sleep 1 + timeout=55 + echo -n . .+ [[ 55 -gt 1500 ]] ++ kubectl_bin get psmdb some-name -o 'jsonpath={.status.state}' +++ mktemp ++ local LAST_OUT=/tmp/tmp.4AF4uAKKFC +++ mktemp ++ local LAST_ERR=/tmp/tmp.3nQkGeruDH ++ local exit_status=0 ++ local timeout=4 +++ seq 0 2 ++ for i in $(seq 0 2) ++ set +e ++ kubectl get psmdb some-name -o 'jsonpath={.status.state}' ++ exit_status=0 ++ set -e ++ '[' 0 '!=' 0 -a -n 1 ']' ++ break ++ cat /tmp/tmp.4AF4uAKKFC ++ cat /tmp/tmp.3nQkGeruDH ++ rm /tmp/tmp.4AF4uAKKFC /tmp/tmp.3nQkGeruDH ++ return 0 + [[ initializing =~ ready ]] + sleep 1 + timeout=56 + echo -n . .+ [[ 56 -gt 1500 ]] ++ kubectl_bin get psmdb some-name -o 'jsonpath={.status.state}' +++ mktemp ++ local LAST_OUT=/tmp/tmp.BwB72rMrkf +++ mktemp ++ local LAST_ERR=/tmp/tmp.rU2bRtYjMM ++ local exit_status=0 ++ local timeout=4 +++ seq 0 2 ++ for i in $(seq 0 2) ++ set +e ++ kubectl get psmdb some-name -o 'jsonpath={.status.state}' ++ exit_status=0 ++ set -e ++ '[' 0 '!=' 0 -a -n 1 ']' ++ break ++ cat /tmp/tmp.BwB72rMrkf ++ cat /tmp/tmp.rU2bRtYjMM ++ rm /tmp/tmp.BwB72rMrkf /tmp/tmp.rU2bRtYjMM ++ return 0 + [[ initializing =~ ready ]] + sleep 1 + timeout=57 + echo -n . .+ [[ 57 -gt 1500 ]] ++ kubectl_bin get psmdb some-name -o 'jsonpath={.status.state}' +++ mktemp ++ local LAST_OUT=/tmp/tmp.Q520wofHmi +++ mktemp ++ local LAST_ERR=/tmp/tmp.obB9651Ew0 ++ local exit_status=0 ++ local timeout=4 +++ seq 0 2 ++ for i in $(seq 0 2) ++ set +e ++ kubectl get psmdb some-name -o 'jsonpath={.status.state}' ++ exit_status=0 ++ set -e ++ '[' 0 '!=' 0 -a -n 1 ']' ++ break ++ cat /tmp/tmp.Q520wofHmi ++ cat /tmp/tmp.obB9651Ew0 ++ rm /tmp/tmp.Q520wofHmi /tmp/tmp.obB9651Ew0 ++ return 0 + [[ initializing =~ ready ]] + sleep 1 + timeout=58 + echo -n . .+ [[ 58 -gt 1500 ]] ++ kubectl_bin get psmdb some-name -o 'jsonpath={.status.state}' +++ mktemp ++ local LAST_OUT=/tmp/tmp.RI0sAe28QW +++ mktemp ++ local LAST_ERR=/tmp/tmp.Im3pt5LCAv ++ local exit_status=0 ++ local timeout=4 +++ seq 0 2 ++ for i in $(seq 0 2) ++ set +e ++ kubectl get psmdb some-name -o 'jsonpath={.status.state}' ++ exit_status=0 ++ set -e ++ '[' 0 '!=' 0 -a -n 1 ']' ++ break ++ cat /tmp/tmp.RI0sAe28QW ++ cat /tmp/tmp.Im3pt5LCAv ++ rm /tmp/tmp.RI0sAe28QW /tmp/tmp.Im3pt5LCAv ++ return 0 + [[ initializing =~ ready ]] + sleep 1 + timeout=59 + echo -n . .+ [[ 59 -gt 1500 ]] ++ kubectl_bin get psmdb some-name -o 'jsonpath={.status.state}' +++ mktemp ++ local LAST_OUT=/tmp/tmp.LOFiLrJgnw +++ mktemp ++ local LAST_ERR=/tmp/tmp.87USbyjt3O ++ local exit_status=0 ++ local timeout=4 +++ seq 0 2 ++ for i in $(seq 0 2) ++ set +e ++ kubectl get psmdb some-name -o 'jsonpath={.status.state}' ++ exit_status=0 ++ set -e ++ '[' 0 '!=' 0 -a -n 1 ']' ++ break ++ cat /tmp/tmp.LOFiLrJgnw ++ cat /tmp/tmp.87USbyjt3O ++ rm /tmp/tmp.LOFiLrJgnw /tmp/tmp.87USbyjt3O ++ return 0 + [[ initializing =~ ready ]] + sleep 1 + timeout=60 + echo -n . .+ [[ 60 -gt 1500 ]] ++ kubectl_bin get psmdb some-name -o 'jsonpath={.status.state}' +++ mktemp ++ local LAST_OUT=/tmp/tmp.iao3V3X1OH +++ mktemp ++ local LAST_ERR=/tmp/tmp.F89rOxcvn6 ++ local exit_status=0 ++ local timeout=4 +++ seq 0 2 ++ for i in $(seq 0 2) ++ set +e ++ kubectl get psmdb some-name -o 'jsonpath={.status.state}' ++ exit_status=0 ++ set -e ++ '[' 0 '!=' 0 -a -n 1 ']' ++ break ++ cat /tmp/tmp.iao3V3X1OH ++ cat /tmp/tmp.F89rOxcvn6 ++ rm /tmp/tmp.iao3V3X1OH /tmp/tmp.F89rOxcvn6 ++ return 0 + [[ initializing =~ ready ]] + sleep 1 + timeout=61 + echo -n . .+ [[ 61 -gt 1500 ]] ++ kubectl_bin get psmdb some-name -o 'jsonpath={.status.state}' +++ mktemp ++ local LAST_OUT=/tmp/tmp.tGoR4p9CVo +++ mktemp ++ local LAST_ERR=/tmp/tmp.W1TfwGaChR ++ local exit_status=0 ++ local timeout=4 +++ seq 0 2 ++ for i in $(seq 0 2) ++ set +e ++ kubectl get psmdb some-name -o 'jsonpath={.status.state}' ++ exit_status=0 ++ set -e ++ '[' 0 '!=' 0 -a -n 1 ']' ++ break ++ cat /tmp/tmp.tGoR4p9CVo ++ cat /tmp/tmp.W1TfwGaChR ++ rm /tmp/tmp.tGoR4p9CVo /tmp/tmp.W1TfwGaChR ++ return 0 + [[ initializing =~ ready ]] + sleep 1 + timeout=62 + echo -n . .+ [[ 62 -gt 1500 ]] ++ kubectl_bin get psmdb some-name -o 'jsonpath={.status.state}' +++ mktemp ++ local LAST_OUT=/tmp/tmp.NgSor8PtVM +++ mktemp ++ local LAST_ERR=/tmp/tmp.V5VUf26NY9 ++ local exit_status=0 ++ local timeout=4 +++ seq 0 2 ++ for i in $(seq 0 2) ++ set +e ++ kubectl get psmdb some-name -o 'jsonpath={.status.state}' ++ exit_status=0 ++ set -e ++ '[' 0 '!=' 0 -a -n 1 ']' ++ break ++ cat /tmp/tmp.NgSor8PtVM ++ cat /tmp/tmp.V5VUf26NY9 ++ rm /tmp/tmp.NgSor8PtVM /tmp/tmp.V5VUf26NY9 ++ return 0 + [[ initializing =~ ready ]] + sleep 1 + timeout=63 + echo -n . .+ [[ 63 -gt 1500 ]] ++ kubectl_bin get psmdb some-name -o 'jsonpath={.status.state}' +++ mktemp ++ local LAST_OUT=/tmp/tmp.Y1KaOARqdH +++ mktemp ++ local LAST_ERR=/tmp/tmp.xIcMPmrjUe ++ local exit_status=0 ++ local timeout=4 +++ seq 0 2 ++ for i in $(seq 0 2) ++ set +e ++ kubectl get psmdb some-name -o 'jsonpath={.status.state}' ++ exit_status=0 ++ set -e ++ '[' 0 '!=' 0 -a -n 1 ']' ++ break ++ cat /tmp/tmp.Y1KaOARqdH ++ cat /tmp/tmp.xIcMPmrjUe ++ rm /tmp/tmp.Y1KaOARqdH /tmp/tmp.xIcMPmrjUe ++ return 0 + [[ initializing =~ ready ]] + sleep 1 + timeout=64 + echo -n . .+ [[ 64 -gt 1500 ]] ++ kubectl_bin get psmdb some-name -o 'jsonpath={.status.state}' +++ mktemp ++ local LAST_OUT=/tmp/tmp.MmKS7tQesp +++ mktemp ++ local LAST_ERR=/tmp/tmp.E2mfPRoa40 ++ local exit_status=0 ++ local timeout=4 +++ seq 0 2 ++ for i in $(seq 0 2) ++ set +e ++ kubectl get psmdb some-name -o 'jsonpath={.status.state}' ++ exit_status=0 ++ set -e ++ '[' 0 '!=' 0 -a -n 1 ']' ++ break ++ cat /tmp/tmp.MmKS7tQesp ++ cat /tmp/tmp.E2mfPRoa40 ++ rm /tmp/tmp.MmKS7tQesp /tmp/tmp.E2mfPRoa40 ++ return 0 + [[ initializing =~ ready ]] + sleep 1 + timeout=65 + echo -n . .+ [[ 65 -gt 1500 ]] ++ kubectl_bin get psmdb some-name -o 'jsonpath={.status.state}' +++ mktemp ++ local LAST_OUT=/tmp/tmp.Grpg6ApF92 +++ mktemp ++ local LAST_ERR=/tmp/tmp.zCWGmWHM9B ++ local exit_status=0 ++ local timeout=4 +++ seq 0 2 ++ for i in $(seq 0 2) ++ set +e ++ kubectl get psmdb some-name -o 'jsonpath={.status.state}' ++ exit_status=0 ++ set -e ++ '[' 0 '!=' 0 -a -n 1 ']' ++ break ++ cat /tmp/tmp.Grpg6ApF92 ++ cat /tmp/tmp.zCWGmWHM9B ++ rm /tmp/tmp.Grpg6ApF92 /tmp/tmp.zCWGmWHM9B ++ return 0 + [[ initializing =~ ready ]] + sleep 1 + timeout=66 + echo -n . .+ [[ 66 -gt 1500 ]] ++ kubectl_bin get psmdb some-name -o 'jsonpath={.status.state}' +++ mktemp ++ local LAST_OUT=/tmp/tmp.Ucg1PXdDLO +++ mktemp ++ local LAST_ERR=/tmp/tmp.06t130D2xt ++ local exit_status=0 ++ local timeout=4 +++ seq 0 2 ++ for i in $(seq 0 2) ++ set +e ++ kubectl get psmdb some-name -o 'jsonpath={.status.state}' ++ exit_status=0 ++ set -e ++ '[' 0 '!=' 0 -a -n 1 ']' ++ break ++ cat /tmp/tmp.Ucg1PXdDLO ++ cat /tmp/tmp.06t130D2xt ++ rm /tmp/tmp.Ucg1PXdDLO /tmp/tmp.06t130D2xt ++ return 0 + [[ initializing =~ ready ]] + sleep 1 + timeout=67 + echo -n . .+ [[ 67 -gt 1500 ]] ++ kubectl_bin get psmdb some-name -o 'jsonpath={.status.state}' +++ mktemp ++ local LAST_OUT=/tmp/tmp.Ylzj8yF7Q8 +++ mktemp ++ local LAST_ERR=/tmp/tmp.z6VnAdXLDz ++ local exit_status=0 ++ local timeout=4 +++ seq 0 2 ++ for i in $(seq 0 2) ++ set +e ++ kubectl get psmdb some-name -o 'jsonpath={.status.state}' ++ exit_status=0 ++ set -e ++ '[' 0 '!=' 0 -a -n 1 ']' ++ break ++ cat /tmp/tmp.Ylzj8yF7Q8 ++ cat /tmp/tmp.z6VnAdXLDz ++ rm /tmp/tmp.Ylzj8yF7Q8 /tmp/tmp.z6VnAdXLDz ++ return 0 + [[ ready =~ ready ]] + echo + log 'psmdb/some-name is ready: OK' + set +o xtrace [2025-12-09T22:09:47+0000] psmdb/some-name is ready: OK + desc 'Adding new user' + set +o xtrace ----------------------------------------------------------------------------------- Adding new user ----------------------------------------------------------------------------------- + run_mongo 'db.createUser({user:"myApp",pwd:"myPass",roles:[{db:"myApp",role:"readWrite"}]})' userAdmin:userAdmin123456@some-name-rs0.demand-backup-if-unhealthy-227 + local 'command=db.createUser({user:"myApp",pwd:"myPass",roles:[{db:"myApp",role:"readWrite"}]})' + local uri=userAdmin:userAdmin123456@some-name-rs0.demand-backup-if-unhealthy-227 + local driver=mongodb+srv + local suffix=.svc.cluster.local ++ kubectl_bin get pods --selector=name=psmdb-client -o 'jsonpath={.items[].metadata.name}' +++ mktemp ++ local LAST_OUT=/tmp/tmp.HgEnbML5XD +++ mktemp ++ local LAST_ERR=/tmp/tmp.Nk1VMu75gq ++ local exit_status=0 ++ local timeout=4 +++ seq 0 2 ++ for i in $(seq 0 2) ++ set +e ++ kubectl get pods --selector=name=psmdb-client -o 'jsonpath={.items[].metadata.name}' ++ exit_status=0 ++ set -e ++ '[' 0 '!=' 0 -a -n 1 ']' ++ break ++ cat /tmp/tmp.HgEnbML5XD ++ cat /tmp/tmp.Nk1VMu75gq ++ rm /tmp/tmp.HgEnbML5XD /tmp/tmp.Nk1VMu75gq ++ return 0 + local client_container=psmdb-client-696897d69b-96npl + local mongo_flag= + [[ userAdmin:userAdmin123456@some-name-rs0.demand-backup-if-unhealthy-227 == *cfg* ]] + replica_set=rs0 + kubectl_bin exec psmdb-client-696897d69b-96npl -- bash -c 'printf '\''db.createUser({user:"myApp",pwd:"myPass",roles:[{db:"myApp",role:"readWrite"}]})\n'\'' | mongo mongodb+srv://userAdmin:userAdmin123456@some-name-rs0.demand-backup-if-unhealthy-227.svc.cluster.local/admin?ssl=false\&replicaSet=rs0 ' ++ mktemp + local LAST_OUT=/tmp/tmp.e5WbVkPlf5 ++ mktemp + local LAST_ERR=/tmp/tmp.6NYc6HckL5 + local exit_status=0 + local timeout=4 ++ seq 0 2 + for i in $(seq 0 2) + set +e + kubectl exec psmdb-client-696897d69b-96npl -- bash -c 'printf '\''db.createUser({user:"myApp",pwd:"myPass",roles:[{db:"myApp",role:"readWrite"}]})\n'\'' | mongo mongodb+srv://userAdmin:userAdmin123456@some-name-rs0.demand-backup-if-unhealthy-227.svc.cluster.local/admin?ssl=false\&replicaSet=rs0 ' + exit_status=0 + set -e + '[' 0 '!=' 0 -a -n 1 ']' + break + cat /tmp/tmp.e5WbVkPlf5 Percona Server for MongoDB shell version v4.4.29-28 connecting to: mongodb://some-name-rs0-2.some-name-rs0.demand-backup-if-unhealthy-227.svc.cluster.local:27017,some-name-rs0-1.some-name-rs0.demand-backup-if-unhealthy-227.svc.cluster.local:27017,some-name-rs0-0.some-name-rs0.demand-backup-if-unhealthy-227.svc.cluster.local:27017/admin?compressors=disabled&gssapiServiceName=mongodb&replicaSet=rs0&ssl=false Implicit session: session { "id" : UUID("0340551a-fba5-4d3a-afe5-7774d45aad0a") } Percona Server for MongoDB server version: v8.0.16-5 WARNING: shell and server versions do not match Successfully added user: { "user" : "myApp", "roles" : [ { "db" : "myApp", "role" : "readWrite" } ] } bye + cat /tmp/tmp.6NYc6HckL5 + rm /tmp/tmp.e5WbVkPlf5 /tmp/tmp.6NYc6HckL5 + return 0 + echo -n 'Verifying user creation...' Verifying user creation...+ until_with_timeout 10 check_user + local timeout=10 + shift + local elapsed=0 + check_user + run_mongo 'db.getUser("myApp")' userAdmin:userAdmin123456@some-name-rs0.demand-backup-if-unhealthy-227 + grep -q '"user" : "myApp"' + local 'command=db.getUser("myApp")' + local uri=userAdmin:userAdmin123456@some-name-rs0.demand-backup-if-unhealthy-227 + local driver=mongodb+srv + local suffix=.svc.cluster.local ++ kubectl_bin get pods --selector=name=psmdb-client -o 'jsonpath={.items[].metadata.name}' +++ mktemp ++ local LAST_OUT=/tmp/tmp.j00taPdNoH +++ mktemp ++ local LAST_ERR=/tmp/tmp.oZPHKRs7qv ++ local exit_status=0 ++ local timeout=4 +++ seq 0 2 ++ for i in $(seq 0 2) ++ set +e ++ kubectl get pods --selector=name=psmdb-client -o 'jsonpath={.items[].metadata.name}' ++ exit_status=0 ++ set -e ++ '[' 0 '!=' 0 -a -n 1 ']' ++ break ++ cat /tmp/tmp.j00taPdNoH ++ cat /tmp/tmp.oZPHKRs7qv ++ rm /tmp/tmp.j00taPdNoH /tmp/tmp.oZPHKRs7qv ++ return 0 + local client_container=psmdb-client-696897d69b-96npl + local mongo_flag= + [[ userAdmin:userAdmin123456@some-name-rs0.demand-backup-if-unhealthy-227 == *cfg* ]] + replica_set=rs0 + kubectl_bin exec psmdb-client-696897d69b-96npl -- bash -c 'printf '\''db.getUser("myApp")\n'\'' | mongo mongodb+srv://userAdmin:userAdmin123456@some-name-rs0.demand-backup-if-unhealthy-227.svc.cluster.local/admin?ssl=false\&replicaSet=rs0 ' ++ mktemp + local LAST_OUT=/tmp/tmp.bNys7rTmHZ ++ mktemp + local LAST_ERR=/tmp/tmp.bIas8jhGcm + local exit_status=0 + local timeout=4 ++ seq 0 2 + for i in $(seq 0 2) + set +e + kubectl exec psmdb-client-696897d69b-96npl -- bash -c 'printf '\''db.getUser("myApp")\n'\'' | mongo mongodb+srv://userAdmin:userAdmin123456@some-name-rs0.demand-backup-if-unhealthy-227.svc.cluster.local/admin?ssl=false\&replicaSet=rs0 ' + exit_status=0 + set -e + '[' 0 '!=' 0 -a -n 1 ']' + break + cat /tmp/tmp.bNys7rTmHZ + cat /tmp/tmp.bIas8jhGcm + rm /tmp/tmp.bNys7rTmHZ /tmp/tmp.bIas8jhGcm + return 0 + return 0 + desc 'Testing logical backup with an unhealthy container' + set +o xtrace ----------------------------------------------------------------------------------- Testing logical backup with an unhealthy container ----------------------------------------------------------------------------------- + type=logical + write_data 'This is some data for backup if unhealthy' + local 'data=This is some data for backup if unhealthy' + local find_prefix= + run_mongo 'use myApp\n db.test.insert({ x: "This is some data for backup if unhealthy" })' myApp:myPass@some-name-rs0.demand-backup-if-unhealthy-227 + local 'command=use myApp\n db.test.insert({ x: "This is some data for backup if unhealthy" })' + local uri=myApp:myPass@some-name-rs0.demand-backup-if-unhealthy-227 + local driver=mongodb+srv + local suffix=.svc.cluster.local ++ kubectl_bin get pods --selector=name=psmdb-client -o 'jsonpath={.items[].metadata.name}' +++ mktemp ++ local LAST_OUT=/tmp/tmp.WiHeusst4I +++ mktemp ++ local LAST_ERR=/tmp/tmp.GXJizC26bA ++ local exit_status=0 ++ local timeout=4 +++ seq 0 2 ++ for i in $(seq 0 2) ++ set +e ++ kubectl get pods --selector=name=psmdb-client -o 'jsonpath={.items[].metadata.name}' ++ exit_status=0 ++ set -e ++ '[' 0 '!=' 0 -a -n 1 ']' ++ break ++ cat /tmp/tmp.WiHeusst4I ++ cat /tmp/tmp.GXJizC26bA ++ rm /tmp/tmp.WiHeusst4I /tmp/tmp.GXJizC26bA ++ return 0 + local client_container=psmdb-client-696897d69b-96npl + local mongo_flag= + [[ myApp:myPass@some-name-rs0.demand-backup-if-unhealthy-227 == *cfg* ]] + replica_set=rs0 + kubectl_bin exec psmdb-client-696897d69b-96npl -- bash -c 'printf '\''use myApp\n db.test.insert({ x: "This is some data for backup if unhealthy" })\n'\'' | mongo mongodb+srv://myApp:myPass@some-name-rs0.demand-backup-if-unhealthy-227.svc.cluster.local/admin?ssl=false\&replicaSet=rs0 ' ++ mktemp + local LAST_OUT=/tmp/tmp.AeanIQL80L ++ mktemp + local LAST_ERR=/tmp/tmp.8TXa7s7VEc + local exit_status=0 + local timeout=4 ++ seq 0 2 + for i in $(seq 0 2) + set +e + kubectl exec psmdb-client-696897d69b-96npl -- bash -c 'printf '\''use myApp\n db.test.insert({ x: "This is some data for backup if unhealthy" })\n'\'' | mongo mongodb+srv://myApp:myPass@some-name-rs0.demand-backup-if-unhealthy-227.svc.cluster.local/admin?ssl=false\&replicaSet=rs0 ' + exit_status=0 + set -e + '[' 0 '!=' 0 -a -n 1 ']' + break + cat /tmp/tmp.AeanIQL80L Percona Server for MongoDB shell version v4.4.29-28 connecting to: mongodb://some-name-rs0-1.some-name-rs0.demand-backup-if-unhealthy-227.svc.cluster.local:27017,some-name-rs0-0.some-name-rs0.demand-backup-if-unhealthy-227.svc.cluster.local:27017,some-name-rs0-2.some-name-rs0.demand-backup-if-unhealthy-227.svc.cluster.local:27017/admin?compressors=disabled&gssapiServiceName=mongodb&replicaSet=rs0&ssl=false Implicit session: session { "id" : UUID("ec0ebab1-49d6-420b-b6be-9f3c370c783e") } Percona Server for MongoDB server version: v8.0.16-5 WARNING: shell and server versions do not match switched to db myApp WriteResult({ "nInserted" : 1 }) bye + cat /tmp/tmp.8TXa7s7VEc + rm /tmp/tmp.AeanIQL80L /tmp/tmp.8TXa7s7VEc + return 0 + compare_mongo_cmd find myApp:myPass@some-name-rs0.demand-backup-if-unhealthy-227 '' .svc.cluster.local myApp test + local command=find + local uri=myApp:myPass@some-name-rs0.demand-backup-if-unhealthy-227 + local postfix= + local suffix=.svc.cluster.local + local database=myApp + local collection=test + local sort= + local tls=false + local 'full_command=db.test.find()' + [[ -n '' ]] + log 'running db.test.find() in myApp' + set +o xtrace [2025-12-09T22:09:55+0000] running db.test.find() in myApp + [[ false == \t\r\u\e ]] + mongo_command=run_mongo + run_mongo 'use myApp\n db.test.find()' myApp:myPass@some-name-rs0.demand-backup-if-unhealthy-227 mongodb .svc.cluster.local + local 'command=use myApp\n db.test.find()' + local uri=myApp:myPass@some-name-rs0.demand-backup-if-unhealthy-227 + local driver=mongodb + local suffix=.svc.cluster.local + egrep -v 'I NETWORK|W NETWORK|F NETWORK|Error saving history file|Percona Server for MongoDB|connecting to:|Unable to reach primary for set|Implicit session:|versions do not match|Error saving history file:|Started a new thread for the timer service' + /usr/sbin/sed -re 's/ObjectId\("[0-9a-f]+"\)//; s/-[0-9]+.svc/-xxx.svc/' ++ kubectl_bin get pods --selector=name=psmdb-client -o 'jsonpath={.items[].metadata.name}' +++ mktemp egrep: warning: egrep is obsolescent; using grep -E ++ local LAST_OUT=/tmp/tmp.PynRJBO2jc +++ mktemp ++ local LAST_ERR=/tmp/tmp.wrHEG3uPA8 ++ local exit_status=0 ++ local timeout=4 +++ seq 0 2 ++ for i in $(seq 0 2) ++ set +e ++ kubectl get pods --selector=name=psmdb-client -o 'jsonpath={.items[].metadata.name}' ++ exit_status=0 ++ set -e ++ '[' 0 '!=' 0 -a -n 1 ']' ++ break ++ cat /tmp/tmp.PynRJBO2jc ++ cat /tmp/tmp.wrHEG3uPA8 ++ rm /tmp/tmp.PynRJBO2jc /tmp/tmp.wrHEG3uPA8 ++ return 0 + local client_container=psmdb-client-696897d69b-96npl + local mongo_flag= + [[ myApp:myPass@some-name-rs0.demand-backup-if-unhealthy-227 == *cfg* ]] + replica_set=rs0 + kubectl_bin exec psmdb-client-696897d69b-96npl -- bash -c 'printf '\''use myApp\n db.test.find()\n'\'' | mongo mongodb://myApp:myPass@some-name-rs0.demand-backup-if-unhealthy-227.svc.cluster.local/admin?ssl=false\&replicaSet=rs0 ' ++ mktemp + local LAST_OUT=/tmp/tmp.UxJA2coYx8 ++ mktemp + local LAST_ERR=/tmp/tmp.NGTkWzJOIO + local exit_status=0 + local timeout=4 ++ seq 0 2 + for i in $(seq 0 2) + set +e + kubectl exec psmdb-client-696897d69b-96npl -- bash -c 'printf '\''use myApp\n db.test.find()\n'\'' | mongo mongodb://myApp:myPass@some-name-rs0.demand-backup-if-unhealthy-227.svc.cluster.local/admin?ssl=false\&replicaSet=rs0 ' + exit_status=0 + set -e + '[' 0 '!=' 0 -a -n 1 ']' + break + cat /tmp/tmp.UxJA2coYx8 + cat /tmp/tmp.NGTkWzJOIO + rm /tmp/tmp.UxJA2coYx8 /tmp/tmp.NGTkWzJOIO + return 0 + [[ 0 -eq 0 ]] + diff -u /mnt/jenkins/workspace/cloud-psmdb-operator_PR-2114/e2e-tests/demand-backup-if-unhealthy/compare/find.json /tmp/tmp.f9KmrCeeh7/find + wait_sts_to_become_not_ready + local sts_name=some-name-rs0 + local timeout=60 + local pod_name=some-name-rs0-1 + local interval=2 + local elapsed=0 + echo 'Updating cluster with invalid image...' Updating cluster with invalid image... + update_with_invalid_db_image + local cluster_name=some-name + yq /mnt/jenkins/workspace/cloud-psmdb-operator_PR-2114/e2e-tests/demand-backup-if-unhealthy/conf/some-name.yml + yq '.spec.image="invalid-image:invalid-tag"' + kubectl_bin apply -f - ++ mktemp + local LAST_OUT=/tmp/tmp.Z1zQtuo037 ++ mktemp + local LAST_ERR=/tmp/tmp.YDcuDDnmQC + local exit_status=0 + local timeout=4 ++ seq 0 2 + for i in $(seq 0 2) + set +e + kubectl apply -f - + exit_status=0 + set -e + '[' 0 '!=' 0 -a -n 1 ']' + break + cat /tmp/tmp.Z1zQtuo037 perconaservermongodb.psmdb.percona.com/some-name configured + cat /tmp/tmp.YDcuDDnmQC + rm /tmp/tmp.Z1zQtuo037 /tmp/tmp.YDcuDDnmQC + return 0 + echo -n 'Wait for statefulset some-name-rs0 to become not ready...' Wait for statefulset some-name-rs0 to become not ready...+ is_sts_ready some-name-rs0 + local sts_name=some-name-rs0 + local replicas ready_replicas + kubectl_bin get sts some-name-rs0 ++ kubectl_bin get sts some-name-rs0 -o 'jsonpath={.spec.replicas}' +++ mktemp ++ local LAST_OUT=/tmp/tmp.JqqkwnpEPx +++ mktemp ++ local LAST_ERR=/tmp/tmp.IHmvbBekFL ++ local exit_status=0 ++ local timeout=4 +++ seq 0 2 ++ for i in $(seq 0 2) ++ set +e ++ kubectl get sts some-name-rs0 -o 'jsonpath={.spec.replicas}' ++ exit_status=0 ++ set -e ++ '[' 0 '!=' 0 -a -n 1 ']' ++ break ++ cat /tmp/tmp.JqqkwnpEPx ++ cat /tmp/tmp.IHmvbBekFL ++ rm /tmp/tmp.JqqkwnpEPx /tmp/tmp.IHmvbBekFL ++ return 0 + replicas=3 ++ kubectl_bin get sts some-name-rs0 -o 'jsonpath={.status.readyReplicas}' +++ mktemp ++ local LAST_OUT=/tmp/tmp.2IOLA7go2I +++ mktemp ++ local LAST_ERR=/tmp/tmp.ycadcVMKPD ++ local exit_status=0 ++ local timeout=4 +++ seq 0 2 ++ for i in $(seq 0 2) ++ set +e ++ kubectl get sts some-name-rs0 -o 'jsonpath={.status.readyReplicas}' ++ exit_status=0 ++ set -e ++ '[' 0 '!=' 0 -a -n 1 ']' ++ break ++ cat /tmp/tmp.2IOLA7go2I ++ cat /tmp/tmp.ycadcVMKPD ++ rm /tmp/tmp.2IOLA7go2I /tmp/tmp.ycadcVMKPD ++ return 0 + ready_replicas=3 + [[ 3 -eq 3 ]] + (( elapsed >= timeout )) + sleep 2 + (( elapsed += interval )) + echo -n . .+ is_sts_ready some-name-rs0 + local sts_name=some-name-rs0 + local replicas ready_replicas + kubectl_bin get sts some-name-rs0 ++ kubectl_bin get sts some-name-rs0 -o 'jsonpath={.spec.replicas}' +++ mktemp ++ local LAST_OUT=/tmp/tmp.KXtgHeZgww +++ mktemp ++ local LAST_ERR=/tmp/tmp.H3uy8rZBUv ++ local exit_status=0 ++ local timeout=4 +++ seq 0 2 ++ for i in $(seq 0 2) ++ set +e ++ kubectl get sts some-name-rs0 -o 'jsonpath={.spec.replicas}' ++ exit_status=0 ++ set -e ++ '[' 0 '!=' 0 -a -n 1 ']' ++ break ++ cat /tmp/tmp.KXtgHeZgww ++ cat /tmp/tmp.H3uy8rZBUv ++ rm /tmp/tmp.KXtgHeZgww /tmp/tmp.H3uy8rZBUv ++ return 0 + replicas=3 ++ kubectl_bin get sts some-name-rs0 -o 'jsonpath={.status.readyReplicas}' +++ mktemp ++ local LAST_OUT=/tmp/tmp.QlCsRtQDLi +++ mktemp ++ local LAST_ERR=/tmp/tmp.cS2vG5iBAu ++ local exit_status=0 ++ local timeout=4 +++ seq 0 2 ++ for i in $(seq 0 2) ++ set +e ++ kubectl get sts some-name-rs0 -o 'jsonpath={.status.readyReplicas}' ++ exit_status=0 ++ set -e ++ '[' 0 '!=' 0 -a -n 1 ']' ++ break ++ cat /tmp/tmp.QlCsRtQDLi ++ cat /tmp/tmp.cS2vG5iBAu ++ rm /tmp/tmp.QlCsRtQDLi /tmp/tmp.cS2vG5iBAu ++ return 0 + ready_replicas=3 + [[ 3 -eq 3 ]] + (( elapsed >= timeout )) + sleep 2 + (( elapsed += interval )) + echo -n . .+ is_sts_ready some-name-rs0 + local sts_name=some-name-rs0 + local replicas ready_replicas + kubectl_bin get sts some-name-rs0 ++ kubectl_bin get sts some-name-rs0 -o 'jsonpath={.spec.replicas}' +++ mktemp ++ local LAST_OUT=/tmp/tmp.IXDl426KmZ +++ mktemp ++ local LAST_ERR=/tmp/tmp.vfU9SfJ2N8 ++ local exit_status=0 ++ local timeout=4 +++ seq 0 2 ++ for i in $(seq 0 2) ++ set +e ++ kubectl get sts some-name-rs0 -o 'jsonpath={.spec.replicas}' ++ exit_status=0 ++ set -e ++ '[' 0 '!=' 0 -a -n 1 ']' ++ break ++ cat /tmp/tmp.IXDl426KmZ ++ cat /tmp/tmp.vfU9SfJ2N8 ++ rm /tmp/tmp.IXDl426KmZ /tmp/tmp.vfU9SfJ2N8 ++ return 0 + replicas=3 ++ kubectl_bin get sts some-name-rs0 -o 'jsonpath={.status.readyReplicas}' +++ mktemp ++ local LAST_OUT=/tmp/tmp.HQkQcSHjcl +++ mktemp ++ local LAST_ERR=/tmp/tmp.0eMDyuG63S ++ local exit_status=0 ++ local timeout=4 +++ seq 0 2 ++ for i in $(seq 0 2) ++ set +e ++ kubectl get sts some-name-rs0 -o 'jsonpath={.status.readyReplicas}' ++ exit_status=0 ++ set -e ++ '[' 0 '!=' 0 -a -n 1 ']' ++ break ++ cat /tmp/tmp.HQkQcSHjcl ++ cat /tmp/tmp.0eMDyuG63S ++ rm /tmp/tmp.HQkQcSHjcl /tmp/tmp.0eMDyuG63S ++ return 0 + ready_replicas=3 + [[ 3 -eq 3 ]] + (( elapsed >= timeout )) + sleep 2 + (( elapsed += interval )) + echo -n . .+ is_sts_ready some-name-rs0 + local sts_name=some-name-rs0 + local replicas ready_replicas + kubectl_bin get sts some-name-rs0 ++ kubectl_bin get sts some-name-rs0 -o 'jsonpath={.spec.replicas}' +++ mktemp ++ local LAST_OUT=/tmp/tmp.5UlDuOJpy6 +++ mktemp ++ local LAST_ERR=/tmp/tmp.9tz4Nko1Lc ++ local exit_status=0 ++ local timeout=4 +++ seq 0 2 ++ for i in $(seq 0 2) ++ set +e ++ kubectl get sts some-name-rs0 -o 'jsonpath={.spec.replicas}' ++ exit_status=0 ++ set -e ++ '[' 0 '!=' 0 -a -n 1 ']' ++ break ++ cat /tmp/tmp.5UlDuOJpy6 ++ cat /tmp/tmp.9tz4Nko1Lc ++ rm /tmp/tmp.5UlDuOJpy6 /tmp/tmp.9tz4Nko1Lc ++ return 0 + replicas=3 ++ kubectl_bin get sts some-name-rs0 -o 'jsonpath={.status.readyReplicas}' +++ mktemp ++ local LAST_OUT=/tmp/tmp.qfnXaKltAZ +++ mktemp ++ local LAST_ERR=/tmp/tmp.65Z4OAKozd ++ local exit_status=0 ++ local timeout=4 +++ seq 0 2 ++ for i in $(seq 0 2) ++ set +e ++ kubectl get sts some-name-rs0 -o 'jsonpath={.status.readyReplicas}' ++ exit_status=0 ++ set -e ++ '[' 0 '!=' 0 -a -n 1 ']' ++ break ++ cat /tmp/tmp.qfnXaKltAZ ++ cat /tmp/tmp.65Z4OAKozd ++ rm /tmp/tmp.qfnXaKltAZ /tmp/tmp.65Z4OAKozd ++ return 0 + ready_replicas=3 + [[ 3 -eq 3 ]] + (( elapsed >= timeout )) + sleep 2 + (( elapsed += interval )) + echo -n . .+ is_sts_ready some-name-rs0 + local sts_name=some-name-rs0 + local replicas ready_replicas + kubectl_bin get sts some-name-rs0 ++ kubectl_bin get sts some-name-rs0 -o 'jsonpath={.spec.replicas}' +++ mktemp ++ local LAST_OUT=/tmp/tmp.lGuSItAXRz +++ mktemp ++ local LAST_ERR=/tmp/tmp.FtvLHu5FQC ++ local exit_status=0 ++ local timeout=4 +++ seq 0 2 ++ for i in $(seq 0 2) ++ set +e ++ kubectl get sts some-name-rs0 -o 'jsonpath={.spec.replicas}' ++ exit_status=0 ++ set -e ++ '[' 0 '!=' 0 -a -n 1 ']' ++ break ++ cat /tmp/tmp.lGuSItAXRz ++ cat /tmp/tmp.FtvLHu5FQC ++ rm /tmp/tmp.lGuSItAXRz /tmp/tmp.FtvLHu5FQC ++ return 0 + replicas=3 ++ kubectl_bin get sts some-name-rs0 -o 'jsonpath={.status.readyReplicas}' +++ mktemp ++ local LAST_OUT=/tmp/tmp.N34sXJyOMO +++ mktemp ++ local LAST_ERR=/tmp/tmp.4XVXgU6nqi ++ local exit_status=0 ++ local timeout=4 +++ seq 0 2 ++ for i in $(seq 0 2) ++ set +e ++ kubectl get sts some-name-rs0 -o 'jsonpath={.status.readyReplicas}' ++ exit_status=0 ++ set -e ++ '[' 0 '!=' 0 -a -n 1 ']' ++ break ++ cat /tmp/tmp.N34sXJyOMO ++ cat /tmp/tmp.4XVXgU6nqi ++ rm /tmp/tmp.N34sXJyOMO /tmp/tmp.4XVXgU6nqi ++ return 0 + ready_replicas=2 + [[ 3 -eq 2 ]] + wait_for_cluster_state some-name 'initializing|error' + local cluster_name=some-name + local 'target_state=initializing|error' + echo -n 'Waiting for psmdb/some-name to reach initializing|error state' Waiting for psmdb/some-name to reach initializing|error state+ local timeout=0 ++ kubectl_bin get psmdb some-name -o 'jsonpath={.status.state}' +++ mktemp ++ local LAST_OUT=/tmp/tmp.kNFsFJq0IR +++ mktemp ++ local LAST_ERR=/tmp/tmp.ojyWgQzhje ++ local exit_status=0 ++ local timeout=4 +++ seq 0 2 ++ for i in $(seq 0 2) ++ set +e ++ kubectl get psmdb some-name -o 'jsonpath={.status.state}' ++ exit_status=0 ++ set -e ++ '[' 0 '!=' 0 -a -n 1 ']' ++ break ++ cat /tmp/tmp.kNFsFJq0IR ++ cat /tmp/tmp.ojyWgQzhje ++ rm /tmp/tmp.kNFsFJq0IR /tmp/tmp.ojyWgQzhje ++ return 0 + [[ initializing =~ initializing|error ]] + echo + log 'psmdb/some-name is initializing|error: OK' + set +o xtrace [2025-12-09T22:10:23+0000] psmdb/some-name is initializing|error: OK + backup_name_minio=backup-minio-logical + run_backup minio backup-minio-logical logical + local storage=minio + local backup_name=backup-minio-logical + local type=logical + desc 'run backup backup-minio-logical' + set +o xtrace ----------------------------------------------------------------------------------- run backup backup-minio-logical ----------------------------------------------------------------------------------- + yq eval '.metadata.name = "backup-minio-logical" | .spec.storageName = "minio" | .spec.type = "logical"' /mnt/jenkins/workspace/cloud-psmdb-operator_PR-2114/e2e-tests/demand-backup-if-unhealthy/conf/backup-minio.yml + kubectl_bin apply -f - ++ mktemp + local LAST_OUT=/tmp/tmp.i59ILiJz7n ++ mktemp + local LAST_ERR=/tmp/tmp.x9OepGBoTK + local exit_status=0 + local timeout=4 ++ seq 0 2 + for i in $(seq 0 2) + set +e + kubectl apply -f - + exit_status=0 + set -e + '[' 0 '!=' 0 -a -n 1 ']' + break + cat /tmp/tmp.i59ILiJz7n perconaservermongodbbackup.psmdb.percona.com/backup-minio-logical created + cat /tmp/tmp.x9OepGBoTK + rm /tmp/tmp.i59ILiJz7n /tmp/tmp.x9OepGBoTK + return 0 + verify_sts_not_ready + local sts_name=some-name-rs0 + is_sts_ready some-name-rs0 + local sts_name=some-name-rs0 + local replicas ready_replicas + kubectl_bin get sts some-name-rs0 ++ kubectl_bin get sts some-name-rs0 -o 'jsonpath={.spec.replicas}' +++ mktemp ++ local LAST_OUT=/tmp/tmp.5zGIxkeSQL +++ mktemp ++ local LAST_ERR=/tmp/tmp.FSWBmakbxq ++ local exit_status=0 ++ local timeout=4 +++ seq 0 2 ++ for i in $(seq 0 2) ++ set +e ++ kubectl get sts some-name-rs0 -o 'jsonpath={.spec.replicas}' ++ exit_status=0 ++ set -e ++ '[' 0 '!=' 0 -a -n 1 ']' ++ break ++ cat /tmp/tmp.5zGIxkeSQL ++ cat /tmp/tmp.FSWBmakbxq ++ rm /tmp/tmp.5zGIxkeSQL /tmp/tmp.FSWBmakbxq ++ return 0 + replicas=3 ++ kubectl_bin get sts some-name-rs0 -o 'jsonpath={.status.readyReplicas}' +++ mktemp ++ local LAST_OUT=/tmp/tmp.vrmmgY7lXC +++ mktemp ++ local LAST_ERR=/tmp/tmp.8J2bvrLlqF ++ local exit_status=0 ++ local timeout=4 +++ seq 0 2 ++ for i in $(seq 0 2) ++ set +e ++ kubectl get sts some-name-rs0 -o 'jsonpath={.status.readyReplicas}' ++ exit_status=0 ++ set -e ++ '[' 0 '!=' 0 -a -n 1 ']' ++ break ++ cat /tmp/tmp.vrmmgY7lXC ++ cat /tmp/tmp.8J2bvrLlqF ++ rm /tmp/tmp.vrmmgY7lXC /tmp/tmp.8J2bvrLlqF ++ return 0 + ready_replicas=2 + [[ 3 -eq 2 ]] + wait_for_cluster_state some-name 'initializing|error' + local cluster_name=some-name + local 'target_state=initializing|error' + echo -n 'Waiting for psmdb/some-name to reach initializing|error state' Waiting for psmdb/some-name to reach initializing|error state+ local timeout=0 ++ kubectl_bin get psmdb some-name -o 'jsonpath={.status.state}' +++ mktemp ++ local LAST_OUT=/tmp/tmp.R7y8qIiopI +++ mktemp ++ local LAST_ERR=/tmp/tmp.cfe6L5jwGM ++ local exit_status=0 ++ local timeout=4 +++ seq 0 2 ++ for i in $(seq 0 2) ++ set +e ++ kubectl get psmdb some-name -o 'jsonpath={.status.state}' ++ exit_status=0 ++ set -e ++ '[' 0 '!=' 0 -a -n 1 ']' ++ break ++ cat /tmp/tmp.R7y8qIiopI ++ cat /tmp/tmp.cfe6L5jwGM ++ rm /tmp/tmp.R7y8qIiopI /tmp/tmp.cfe6L5jwGM ++ return 0 + [[ initializing =~ initializing|error ]] + echo + log 'psmdb/some-name is initializing|error: OK' + set +o xtrace [2025-12-09T22:10:29+0000] psmdb/some-name is initializing|error: OK + desc 'Verifying backup-minio-logical and update cluster back to ready state' + set +o xtrace ----------------------------------------------------------------------------------- Verifying backup-minio-logical and update cluster back to ready state ----------------------------------------------------------------------------------- + wait_backup backup-minio-logical ready + local backup_name=backup-minio-logical + local target_state=ready + set +o xtrace waiting for backup-minio-logical to reach ready state.........OK + verify_sts_not_ready + local sts_name=some-name-rs0 + is_sts_ready some-name-rs0 + local sts_name=some-name-rs0 + local replicas ready_replicas + kubectl_bin get sts some-name-rs0 ++ kubectl_bin get sts some-name-rs0 -o 'jsonpath={.spec.replicas}' +++ mktemp ++ local LAST_OUT=/tmp/tmp.THOcgIFdLi +++ mktemp ++ local LAST_ERR=/tmp/tmp.u1peV0RC48 ++ local exit_status=0 ++ local timeout=4 +++ seq 0 2 ++ for i in $(seq 0 2) ++ set +e ++ kubectl get sts some-name-rs0 -o 'jsonpath={.spec.replicas}' ++ exit_status=0 ++ set -e ++ '[' 0 '!=' 0 -a -n 1 ']' ++ break ++ cat /tmp/tmp.THOcgIFdLi ++ cat /tmp/tmp.u1peV0RC48 ++ rm /tmp/tmp.THOcgIFdLi /tmp/tmp.u1peV0RC48 ++ return 0 + replicas=3 ++ kubectl_bin get sts some-name-rs0 -o 'jsonpath={.status.readyReplicas}' +++ mktemp ++ local LAST_OUT=/tmp/tmp.cHaFzPuKmF +++ mktemp ++ local LAST_ERR=/tmp/tmp.DYjn88hL9G ++ local exit_status=0 ++ local timeout=4 +++ seq 0 2 ++ for i in $(seq 0 2) ++ set +e ++ kubectl get sts some-name-rs0 -o 'jsonpath={.status.readyReplicas}' ++ exit_status=0 ++ set -e ++ '[' 0 '!=' 0 -a -n 1 ']' ++ break ++ cat /tmp/tmp.cHaFzPuKmF ++ cat /tmp/tmp.DYjn88hL9G ++ rm /tmp/tmp.cHaFzPuKmF /tmp/tmp.DYjn88hL9G ++ return 0 + ready_replicas=2 + [[ 3 -eq 2 ]] + wait_for_cluster_state some-name 'initializing|error' + local cluster_name=some-name + local 'target_state=initializing|error' + echo -n 'Waiting for psmdb/some-name to reach initializing|error state' Waiting for psmdb/some-name to reach initializing|error state+ local timeout=0 ++ kubectl_bin get psmdb some-name -o 'jsonpath={.status.state}' +++ mktemp ++ local LAST_OUT=/tmp/tmp.fVf31isn6T +++ mktemp ++ local LAST_ERR=/tmp/tmp.doOqJ6u8Bd ++ local exit_status=0 ++ local timeout=4 +++ seq 0 2 ++ for i in $(seq 0 2) ++ set +e ++ kubectl get psmdb some-name -o 'jsonpath={.status.state}' ++ exit_status=0 ++ set -e ++ '[' 0 '!=' 0 -a -n 1 ']' ++ break ++ cat /tmp/tmp.fVf31isn6T ++ cat /tmp/tmp.doOqJ6u8Bd ++ rm /tmp/tmp.fVf31isn6T /tmp/tmp.doOqJ6u8Bd ++ return 0 + [[ initializing =~ initializing|error ]] + echo + log 'psmdb/some-name is initializing|error: OK' + set +o xtrace [2025-12-09T22:10:48+0000] psmdb/some-name is initializing|error: OK + rollback_invalid_image + local cluster_name=some-name + local timeout= + apply_cluster /mnt/jenkins/workspace/cloud-psmdb-operator_PR-2114/e2e-tests/demand-backup-if-unhealthy/conf/some-name.yml + '[' -z '' ']' + cat_config /mnt/jenkins/workspace/cloud-psmdb-operator_PR-2114/e2e-tests/demand-backup-if-unhealthy/conf/some-name.yml + kubectl_bin apply -f - ++ mktemp + cat /mnt/jenkins/workspace/cloud-psmdb-operator_PR-2114/e2e-tests/demand-backup-if-unhealthy/conf/some-name.yml + yq eval '(.spec | select(.image == null)).image = "perconalab/percona-server-mongodb-operator:main-mongod8.0"' + yq eval '(.spec | select(has("pmm"))).pmm.image = "percona/pmm-client:2.44.1-1"' + yq eval '(.spec | select(has("initImage"))).initImage = "perconalab/percona-server-mongodb-operator:PR-2114-3092c1dd"' + local LAST_OUT=/tmp/tmp.ZgT7g3srB9 + yq eval '(.spec | select(has("backup"))).backup.image = "perconalab/percona-server-mongodb-operator:main-backup"' ++ mktemp + yq eval '.spec.upgradeOptions.apply="Never"' + local LAST_ERR=/tmp/tmp.BqVAZJR9Zy + local exit_status=0 + local timeout=4 ++ seq 0 2 + for i in $(seq 0 2) + set +e + kubectl apply -f - + exit_status=0 + set -e + '[' 0 '!=' 0 -a -n 1 ']' + break + cat /tmp/tmp.ZgT7g3srB9 perconaservermongodb.psmdb.percona.com/some-name configured + cat /tmp/tmp.BqVAZJR9Zy + rm /tmp/tmp.ZgT7g3srB9 /tmp/tmp.BqVAZJR9Zy + return 0 + echo -n 'Waiting new statefulset version...' Waiting new statefulset version...+ kubectl_bin get statefulset some-name-rs0 -o 'jsonpath={range .spec.template.spec.containers[*]}{.image}{"\n"}{end}' + grep -q invalid-image:invalid-tag ++ mktemp + local LAST_OUT=/tmp/tmp.OXw203tGtb ++ mktemp + local LAST_ERR=/tmp/tmp.USLxNQnLCI + local exit_status=0 + local timeout=4 ++ seq 0 2 + for i in $(seq 0 2) + set +e + kubectl get statefulset some-name-rs0 -o 'jsonpath={range .spec.template.spec.containers[*]}{.image}{"\n"}{end}' + exit_status=0 + set -e + '[' 0 '!=' 0 -a -n 1 ']' + break + cat /tmp/tmp.OXw203tGtb + cat /tmp/tmp.USLxNQnLCI + rm /tmp/tmp.OXw203tGtb /tmp/tmp.USLxNQnLCI + return 0 + echo -n . .+ sleep 1 + kubectl_bin get statefulset some-name-rs0 -o 'jsonpath={range .spec.template.spec.containers[*]}{.image}{"\n"}{end}' + grep -q invalid-image:invalid-tag ++ mktemp + local LAST_OUT=/tmp/tmp.mYhVrEuslP ++ mktemp + local LAST_ERR=/tmp/tmp.qO0IKgUy5E + local exit_status=0 + local timeout=4 ++ seq 0 2 + for i in $(seq 0 2) + set +e + kubectl get statefulset some-name-rs0 -o 'jsonpath={range .spec.template.spec.containers[*]}{.image}{"\n"}{end}' + exit_status=0 + set -e + '[' 0 '!=' 0 -a -n 1 ']' + break + cat /tmp/tmp.mYhVrEuslP + cat /tmp/tmp.qO0IKgUy5E + rm /tmp/tmp.mYhVrEuslP /tmp/tmp.qO0IKgUy5E + return 0 + echo -n . .+ sleep 1 + kubectl_bin get statefulset some-name-rs0 -o 'jsonpath={range .spec.template.spec.containers[*]}{.image}{"\n"}{end}' + grep -q invalid-image:invalid-tag ++ mktemp + local LAST_OUT=/tmp/tmp.QDPXHjvHsQ ++ mktemp + local LAST_ERR=/tmp/tmp.I6A311U60y + local exit_status=0 + local timeout=4 ++ seq 0 2 + for i in $(seq 0 2) + set +e + kubectl get statefulset some-name-rs0 -o 'jsonpath={range .spec.template.spec.containers[*]}{.image}{"\n"}{end}' + exit_status=0 + set -e + '[' 0 '!=' 0 -a -n 1 ']' + break + cat /tmp/tmp.QDPXHjvHsQ + cat /tmp/tmp.I6A311U60y + rm /tmp/tmp.QDPXHjvHsQ /tmp/tmp.I6A311U60y + return 0 + echo -n . .+ sleep 1 + grep -q invalid-image:invalid-tag + kubectl_bin get statefulset some-name-rs0 -o 'jsonpath={range .spec.template.spec.containers[*]}{.image}{"\n"}{end}' ++ mktemp + local LAST_OUT=/tmp/tmp.BIwFRhZMx5 ++ mktemp + local LAST_ERR=/tmp/tmp.XJlYWWKgqC + local exit_status=0 + local timeout=4 ++ seq 0 2 + for i in $(seq 0 2) + set +e + kubectl get statefulset some-name-rs0 -o 'jsonpath={range .spec.template.spec.containers[*]}{.image}{"\n"}{end}' + exit_status=0 + set -e + '[' 0 '!=' 0 -a -n 1 ']' + break + cat /tmp/tmp.BIwFRhZMx5 + cat /tmp/tmp.XJlYWWKgqC + rm /tmp/tmp.BIwFRhZMx5 /tmp/tmp.XJlYWWKgqC + return 0 + echo -n . .+ sleep 1 + kubectl_bin get statefulset some-name-rs0 -o 'jsonpath={range .spec.template.spec.containers[*]}{.image}{"\n"}{end}' + grep -q invalid-image:invalid-tag ++ mktemp + local LAST_OUT=/tmp/tmp.ANpVZx76RH ++ mktemp + local LAST_ERR=/tmp/tmp.PbGusqMzP4 + local exit_status=0 + local timeout=4 ++ seq 0 2 + for i in $(seq 0 2) + set +e + kubectl get statefulset some-name-rs0 -o 'jsonpath={range .spec.template.spec.containers[*]}{.image}{"\n"}{end}' + exit_status=0 + set -e + '[' 0 '!=' 0 -a -n 1 ']' + break + cat /tmp/tmp.ANpVZx76RH + cat /tmp/tmp.PbGusqMzP4 + rm /tmp/tmp.ANpVZx76RH /tmp/tmp.PbGusqMzP4 + return 0 + echo -n . .+ sleep 1 + kubectl_bin get statefulset some-name-rs0 -o 'jsonpath={range .spec.template.spec.containers[*]}{.image}{"\n"}{end}' + grep -q invalid-image:invalid-tag ++ mktemp + local LAST_OUT=/tmp/tmp.b31sUTJpSl ++ mktemp + local LAST_ERR=/tmp/tmp.GDnOExx9Kp + local exit_status=0 + local timeout=4 ++ seq 0 2 + for i in $(seq 0 2) + set +e + kubectl get statefulset some-name-rs0 -o 'jsonpath={range .spec.template.spec.containers[*]}{.image}{"\n"}{end}' + exit_status=0 + set -e + '[' 0 '!=' 0 -a -n 1 ']' + break + cat /tmp/tmp.b31sUTJpSl + cat /tmp/tmp.GDnOExx9Kp + rm /tmp/tmp.b31sUTJpSl /tmp/tmp.GDnOExx9Kp + return 0 + echo -n . .+ sleep 1 + kubectl_bin get statefulset some-name-rs0 -o 'jsonpath={range .spec.template.spec.containers[*]}{.image}{"\n"}{end}' + grep -q invalid-image:invalid-tag ++ mktemp + local LAST_OUT=/tmp/tmp.A4IC5hM8l9 ++ mktemp + local LAST_ERR=/tmp/tmp.msGY3RIj58 + local exit_status=0 + local timeout=4 ++ seq 0 2 + for i in $(seq 0 2) + set +e + kubectl get statefulset some-name-rs0 -o 'jsonpath={range .spec.template.spec.containers[*]}{.image}{"\n"}{end}' + exit_status=0 + set -e + '[' 0 '!=' 0 -a -n 1 ']' + break + cat /tmp/tmp.A4IC5hM8l9 + cat /tmp/tmp.msGY3RIj58 + rm /tmp/tmp.A4IC5hM8l9 /tmp/tmp.msGY3RIj58 + return 0 + echo -n . .+ sleep 1 + kubectl_bin get statefulset some-name-rs0 -o 'jsonpath={range .spec.template.spec.containers[*]}{.image}{"\n"}{end}' + grep -q invalid-image:invalid-tag ++ mktemp + local LAST_OUT=/tmp/tmp.0IKEHzhRGH ++ mktemp + local LAST_ERR=/tmp/tmp.Nn5GhFSDJy + local exit_status=0 + local timeout=4 ++ seq 0 2 + for i in $(seq 0 2) + set +e + kubectl get statefulset some-name-rs0 -o 'jsonpath={range .spec.template.spec.containers[*]}{.image}{"\n"}{end}' + exit_status=0 + set -e + '[' 0 '!=' 0 -a -n 1 ']' + break + cat /tmp/tmp.0IKEHzhRGH + cat /tmp/tmp.Nn5GhFSDJy + rm /tmp/tmp.0IKEHzhRGH /tmp/tmp.Nn5GhFSDJy + return 0 + echo -n . .+ sleep 1 + kubectl_bin get statefulset some-name-rs0 -o 'jsonpath={range .spec.template.spec.containers[*]}{.image}{"\n"}{end}' + grep -q invalid-image:invalid-tag ++ mktemp + local LAST_OUT=/tmp/tmp.As4CN13e5F ++ mktemp + local LAST_ERR=/tmp/tmp.xMUyy3GJoG + local exit_status=0 + local timeout=4 ++ seq 0 2 + for i in $(seq 0 2) + set +e + kubectl get statefulset some-name-rs0 -o 'jsonpath={range .spec.template.spec.containers[*]}{.image}{"\n"}{end}' + exit_status=0 + set -e + '[' 0 '!=' 0 -a -n 1 ']' + break + cat /tmp/tmp.As4CN13e5F + cat /tmp/tmp.xMUyy3GJoG + rm /tmp/tmp.As4CN13e5F /tmp/tmp.xMUyy3GJoG + return 0 + echo -n . .+ sleep 1 + kubectl_bin get statefulset some-name-rs0 -o 'jsonpath={range .spec.template.spec.containers[*]}{.image}{"\n"}{end}' + grep -q invalid-image:invalid-tag ++ mktemp + local LAST_OUT=/tmp/tmp.8UCh7vuuY3 ++ mktemp + local LAST_ERR=/tmp/tmp.jUFfffYXxX + local exit_status=0 + local timeout=4 ++ seq 0 2 + for i in $(seq 0 2) + set +e + kubectl get statefulset some-name-rs0 -o 'jsonpath={range .spec.template.spec.containers[*]}{.image}{"\n"}{end}' + exit_status=0 + set -e + '[' 0 '!=' 0 -a -n 1 ']' + break + cat /tmp/tmp.8UCh7vuuY3 + cat /tmp/tmp.jUFfffYXxX + rm /tmp/tmp.8UCh7vuuY3 /tmp/tmp.jUFfffYXxX + return 0 + echo -n . .+ sleep 1 + kubectl_bin get statefulset some-name-rs0 -o 'jsonpath={range .spec.template.spec.containers[*]}{.image}{"\n"}{end}' + grep -q invalid-image:invalid-tag ++ mktemp + local LAST_OUT=/tmp/tmp.HMQEkkDnTw ++ mktemp + local LAST_ERR=/tmp/tmp.jW8mluAKii + local exit_status=0 + local timeout=4 ++ seq 0 2 + for i in $(seq 0 2) + set +e + kubectl get statefulset some-name-rs0 -o 'jsonpath={range .spec.template.spec.containers[*]}{.image}{"\n"}{end}' + exit_status=0 + set -e + '[' 0 '!=' 0 -a -n 1 ']' + break + cat /tmp/tmp.HMQEkkDnTw + cat /tmp/tmp.jW8mluAKii + rm /tmp/tmp.HMQEkkDnTw /tmp/tmp.jW8mluAKii + return 0 + echo -n . .+ sleep 1 + kubectl_bin get statefulset some-name-rs0 -o 'jsonpath={range .spec.template.spec.containers[*]}{.image}{"\n"}{end}' + grep -q invalid-image:invalid-tag ++ mktemp + local LAST_OUT=/tmp/tmp.BhuSe0Ida0 ++ mktemp + local LAST_ERR=/tmp/tmp.SzMZZwfhvG + local exit_status=0 + local timeout=4 ++ seq 0 2 + for i in $(seq 0 2) + set +e + kubectl get statefulset some-name-rs0 -o 'jsonpath={range .spec.template.spec.containers[*]}{.image}{"\n"}{end}' + exit_status=0 + set -e + '[' 0 '!=' 0 -a -n 1 ']' + break + cat /tmp/tmp.BhuSe0Ida0 + cat /tmp/tmp.SzMZZwfhvG + rm /tmp/tmp.BhuSe0Ida0 /tmp/tmp.SzMZZwfhvG + return 0 + echo -n . .+ sleep 1 + kubectl_bin get statefulset some-name-rs0 -o 'jsonpath={range .spec.template.spec.containers[*]}{.image}{"\n"}{end}' + grep -q invalid-image:invalid-tag ++ mktemp + local LAST_OUT=/tmp/tmp.03Jf4AGwo4 ++ mktemp + local LAST_ERR=/tmp/tmp.uJGaUCungZ + local exit_status=0 + local timeout=4 ++ seq 0 2 + for i in $(seq 0 2) + set +e + kubectl get statefulset some-name-rs0 -o 'jsonpath={range .spec.template.spec.containers[*]}{.image}{"\n"}{end}' + exit_status=0 + set -e + '[' 0 '!=' 0 -a -n 1 ']' + break + cat /tmp/tmp.03Jf4AGwo4 + cat /tmp/tmp.uJGaUCungZ + rm /tmp/tmp.03Jf4AGwo4 /tmp/tmp.uJGaUCungZ + return 0 + echo -n . .+ sleep 1 + kubectl_bin get statefulset some-name-rs0 -o 'jsonpath={range .spec.template.spec.containers[*]}{.image}{"\n"}{end}' + grep -q invalid-image:invalid-tag ++ mktemp + local LAST_OUT=/tmp/tmp.kcuoby12Rh ++ mktemp + local LAST_ERR=/tmp/tmp.Yu29V4G6pB + local exit_status=0 + local timeout=4 ++ seq 0 2 + for i in $(seq 0 2) + set +e + kubectl get statefulset some-name-rs0 -o 'jsonpath={range .spec.template.spec.containers[*]}{.image}{"\n"}{end}' + exit_status=0 + set -e + '[' 0 '!=' 0 -a -n 1 ']' + break + cat /tmp/tmp.kcuoby12Rh + cat /tmp/tmp.Yu29V4G6pB + rm /tmp/tmp.kcuoby12Rh /tmp/tmp.Yu29V4G6pB + return 0 + echo -n . .+ sleep 1 + kubectl_bin get statefulset some-name-rs0 -o 'jsonpath={range .spec.template.spec.containers[*]}{.image}{"\n"}{end}' + grep -q invalid-image:invalid-tag ++ mktemp + local LAST_OUT=/tmp/tmp.wAEah9QpXS ++ mktemp + local LAST_ERR=/tmp/tmp.r1iku5c91R + local exit_status=0 + local timeout=4 ++ seq 0 2 + for i in $(seq 0 2) + set +e + kubectl get statefulset some-name-rs0 -o 'jsonpath={range .spec.template.spec.containers[*]}{.image}{"\n"}{end}' + exit_status=0 + set -e + '[' 0 '!=' 0 -a -n 1 ']' + break + cat /tmp/tmp.wAEah9QpXS + cat /tmp/tmp.r1iku5c91R + rm /tmp/tmp.wAEah9QpXS /tmp/tmp.r1iku5c91R + return 0 + echo + echo 'Deleting crashing pod some-name-rs0-2' Deleting crashing pod some-name-rs0-2 + kubectl_bin delete pod some-name-rs0-2 ++ mktemp + local LAST_OUT=/tmp/tmp.JK03O7pkoy ++ mktemp + local LAST_ERR=/tmp/tmp.kxTIUVDmce + local exit_status=0 + local timeout=4 ++ seq 0 2 + for i in $(seq 0 2) + set +e + kubectl delete pod some-name-rs0-2 + exit_status=0 + set -e + '[' 0 '!=' 0 -a -n 1 ']' + break + cat /tmp/tmp.JK03O7pkoy pod "some-name-rs0-2" deleted from demand-backup-if-unhealthy-227 namespace + cat /tmp/tmp.kxTIUVDmce + rm /tmp/tmp.JK03O7pkoy /tmp/tmp.kxTIUVDmce + return 0 + wait_for_cluster_state some-name ready + local cluster_name=some-name + local target_state=ready + echo -n 'Waiting for psmdb/some-name to reach ready state' Waiting for psmdb/some-name to reach ready state+ local timeout=0 ++ kubectl_bin get psmdb some-name -o 'jsonpath={.status.state}' +++ mktemp ++ local LAST_OUT=/tmp/tmp.k8Hd9sUkvX +++ mktemp ++ local LAST_ERR=/tmp/tmp.hw4HCcAK6Y ++ local exit_status=0 ++ local timeout=4 +++ seq 0 2 ++ for i in $(seq 0 2) ++ set +e ++ kubectl get psmdb some-name -o 'jsonpath={.status.state}' ++ exit_status=0 ++ set -e ++ '[' 0 '!=' 0 -a -n 1 ']' ++ break ++ cat /tmp/tmp.k8Hd9sUkvX ++ cat /tmp/tmp.hw4HCcAK6Y ++ rm /tmp/tmp.k8Hd9sUkvX /tmp/tmp.hw4HCcAK6Y ++ return 0 + [[ initializing =~ ready ]] + sleep 1 + timeout=1 + echo -n . .+ [[ 1 -gt 1500 ]] ++ kubectl_bin get psmdb some-name -o 'jsonpath={.status.state}' +++ mktemp ++ local LAST_OUT=/tmp/tmp.RH9Y6CQuXQ +++ mktemp ++ local LAST_ERR=/tmp/tmp.UWgC1wppZ3 ++ local exit_status=0 ++ local timeout=4 +++ seq 0 2 ++ for i in $(seq 0 2) ++ set +e ++ kubectl get psmdb some-name -o 'jsonpath={.status.state}' ++ exit_status=0 ++ set -e ++ '[' 0 '!=' 0 -a -n 1 ']' ++ break ++ cat /tmp/tmp.RH9Y6CQuXQ ++ cat /tmp/tmp.UWgC1wppZ3 ++ rm /tmp/tmp.RH9Y6CQuXQ /tmp/tmp.UWgC1wppZ3 ++ return 0 + [[ initializing =~ ready ]] + sleep 1 + timeout=2 + echo -n . .+ [[ 2 -gt 1500 ]] ++ kubectl_bin get psmdb some-name -o 'jsonpath={.status.state}' +++ mktemp ++ local LAST_OUT=/tmp/tmp.bA5cvHZww9 +++ mktemp ++ local LAST_ERR=/tmp/tmp.z6exYx42uQ ++ local exit_status=0 ++ local timeout=4 +++ seq 0 2 ++ for i in $(seq 0 2) ++ set +e ++ kubectl get psmdb some-name -o 'jsonpath={.status.state}' ++ exit_status=0 ++ set -e ++ '[' 0 '!=' 0 -a -n 1 ']' ++ break ++ cat /tmp/tmp.bA5cvHZww9 ++ cat /tmp/tmp.z6exYx42uQ ++ rm /tmp/tmp.bA5cvHZww9 /tmp/tmp.z6exYx42uQ ++ return 0 + [[ initializing =~ ready ]] + sleep 1 + timeout=3 + echo -n . .+ [[ 3 -gt 1500 ]] ++ kubectl_bin get psmdb some-name -o 'jsonpath={.status.state}' +++ mktemp ++ local LAST_OUT=/tmp/tmp.rBsi82F250 +++ mktemp ++ local LAST_ERR=/tmp/tmp.EJr8GJ3cU9 ++ local exit_status=0 ++ local timeout=4 +++ seq 0 2 ++ for i in $(seq 0 2) ++ set +e ++ kubectl get psmdb some-name -o 'jsonpath={.status.state}' ++ exit_status=0 ++ set -e ++ '[' 0 '!=' 0 -a -n 1 ']' ++ break ++ cat /tmp/tmp.rBsi82F250 ++ cat /tmp/tmp.EJr8GJ3cU9 ++ rm /tmp/tmp.rBsi82F250 /tmp/tmp.EJr8GJ3cU9 ++ return 0 + [[ initializing =~ ready ]] + sleep 1 + timeout=4 + echo -n . .+ [[ 4 -gt 1500 ]] ++ kubectl_bin get psmdb some-name -o 'jsonpath={.status.state}' +++ mktemp ++ local LAST_OUT=/tmp/tmp.rhwXGA9z8E +++ mktemp ++ local LAST_ERR=/tmp/tmp.XGZ0c09VSG ++ local exit_status=0 ++ local timeout=4 +++ seq 0 2 ++ for i in $(seq 0 2) ++ set +e ++ kubectl get psmdb some-name -o 'jsonpath={.status.state}' ++ exit_status=0 ++ set -e ++ '[' 0 '!=' 0 -a -n 1 ']' ++ break ++ cat /tmp/tmp.rhwXGA9z8E ++ cat /tmp/tmp.XGZ0c09VSG ++ rm /tmp/tmp.rhwXGA9z8E /tmp/tmp.XGZ0c09VSG ++ return 0 + [[ initializing =~ ready ]] + sleep 1 + timeout=5 + echo -n . .+ [[ 5 -gt 1500 ]] ++ kubectl_bin get psmdb some-name -o 'jsonpath={.status.state}' +++ mktemp ++ local LAST_OUT=/tmp/tmp.htnS4uRC83 +++ mktemp ++ local LAST_ERR=/tmp/tmp.3RLWmQrNT5 ++ local exit_status=0 ++ local timeout=4 +++ seq 0 2 ++ for i in $(seq 0 2) ++ set +e ++ kubectl get psmdb some-name -o 'jsonpath={.status.state}' ++ exit_status=0 ++ set -e ++ '[' 0 '!=' 0 -a -n 1 ']' ++ break ++ cat /tmp/tmp.htnS4uRC83 ++ cat /tmp/tmp.3RLWmQrNT5 ++ rm /tmp/tmp.htnS4uRC83 /tmp/tmp.3RLWmQrNT5 ++ return 0 + [[ initializing =~ ready ]] + sleep 1 + timeout=6 + echo -n . .+ [[ 6 -gt 1500 ]] ++ kubectl_bin get psmdb some-name -o 'jsonpath={.status.state}' +++ mktemp ++ local LAST_OUT=/tmp/tmp.R4k8TaAQsL +++ mktemp ++ local LAST_ERR=/tmp/tmp.gnEboKevyB ++ local exit_status=0 ++ local timeout=4 +++ seq 0 2 ++ for i in $(seq 0 2) ++ set +e ++ kubectl get psmdb some-name -o 'jsonpath={.status.state}' ++ exit_status=0 ++ set -e ++ '[' 0 '!=' 0 -a -n 1 ']' ++ break ++ cat /tmp/tmp.R4k8TaAQsL ++ cat /tmp/tmp.gnEboKevyB ++ rm /tmp/tmp.R4k8TaAQsL /tmp/tmp.gnEboKevyB ++ return 0 + [[ initializing =~ ready ]] + sleep 1 + timeout=7 + echo -n . .+ [[ 7 -gt 1500 ]] ++ kubectl_bin get psmdb some-name -o 'jsonpath={.status.state}' +++ mktemp ++ local LAST_OUT=/tmp/tmp.Sp7n1uoXg4 +++ mktemp ++ local LAST_ERR=/tmp/tmp.neyH0YYe3x ++ local exit_status=0 ++ local timeout=4 +++ seq 0 2 ++ for i in $(seq 0 2) ++ set +e ++ kubectl get psmdb some-name -o 'jsonpath={.status.state}' ++ exit_status=0 ++ set -e ++ '[' 0 '!=' 0 -a -n 1 ']' ++ break ++ cat /tmp/tmp.Sp7n1uoXg4 ++ cat /tmp/tmp.neyH0YYe3x ++ rm /tmp/tmp.Sp7n1uoXg4 /tmp/tmp.neyH0YYe3x ++ return 0 + [[ initializing =~ ready ]] + sleep 1 + timeout=8 + echo -n . .+ [[ 8 -gt 1500 ]] ++ kubectl_bin get psmdb some-name -o 'jsonpath={.status.state}' +++ mktemp ++ local LAST_OUT=/tmp/tmp.warScnLY3W +++ mktemp ++ local LAST_ERR=/tmp/tmp.O42iSWvIqi ++ local exit_status=0 ++ local timeout=4 +++ seq 0 2 ++ for i in $(seq 0 2) ++ set +e ++ kubectl get psmdb some-name -o 'jsonpath={.status.state}' ++ exit_status=0 ++ set -e ++ '[' 0 '!=' 0 -a -n 1 ']' ++ break ++ cat /tmp/tmp.warScnLY3W ++ cat /tmp/tmp.O42iSWvIqi ++ rm /tmp/tmp.warScnLY3W /tmp/tmp.O42iSWvIqi ++ return 0 + [[ initializing =~ ready ]] + sleep 1 + timeout=9 + echo -n . .+ [[ 9 -gt 1500 ]] ++ kubectl_bin get psmdb some-name -o 'jsonpath={.status.state}' +++ mktemp ++ local LAST_OUT=/tmp/tmp.G3s2dPRTqP +++ mktemp ++ local LAST_ERR=/tmp/tmp.RkpecJ6yZI ++ local exit_status=0 ++ local timeout=4 +++ seq 0 2 ++ for i in $(seq 0 2) ++ set +e ++ kubectl get psmdb some-name -o 'jsonpath={.status.state}' ++ exit_status=0 ++ set -e ++ '[' 0 '!=' 0 -a -n 1 ']' ++ break ++ cat /tmp/tmp.G3s2dPRTqP ++ cat /tmp/tmp.RkpecJ6yZI ++ rm /tmp/tmp.G3s2dPRTqP /tmp/tmp.RkpecJ6yZI ++ return 0 + [[ initializing =~ ready ]] + sleep 1 + timeout=10 + echo -n . .+ [[ 10 -gt 1500 ]] ++ kubectl_bin get psmdb some-name -o 'jsonpath={.status.state}' +++ mktemp ++ local LAST_OUT=/tmp/tmp.6dZnH2S3u0 +++ mktemp ++ local LAST_ERR=/tmp/tmp.jOPLYddJEX ++ local exit_status=0 ++ local timeout=4 +++ seq 0 2 ++ for i in $(seq 0 2) ++ set +e ++ kubectl get psmdb some-name -o 'jsonpath={.status.state}' ++ exit_status=0 ++ set -e ++ '[' 0 '!=' 0 -a -n 1 ']' ++ break ++ cat /tmp/tmp.6dZnH2S3u0 ++ cat /tmp/tmp.jOPLYddJEX ++ rm /tmp/tmp.6dZnH2S3u0 /tmp/tmp.jOPLYddJEX ++ return 0 + [[ initializing =~ ready ]] + sleep 1 + timeout=11 + echo -n . .+ [[ 11 -gt 1500 ]] ++ kubectl_bin get psmdb some-name -o 'jsonpath={.status.state}' +++ mktemp ++ local LAST_OUT=/tmp/tmp.wS2FNhGMRu +++ mktemp ++ local LAST_ERR=/tmp/tmp.JyEnOwoAEi ++ local exit_status=0 ++ local timeout=4 +++ seq 0 2 ++ for i in $(seq 0 2) ++ set +e ++ kubectl get psmdb some-name -o 'jsonpath={.status.state}' ++ exit_status=0 ++ set -e ++ '[' 0 '!=' 0 -a -n 1 ']' ++ break ++ cat /tmp/tmp.wS2FNhGMRu ++ cat /tmp/tmp.JyEnOwoAEi ++ rm /tmp/tmp.wS2FNhGMRu /tmp/tmp.JyEnOwoAEi ++ return 0 + [[ initializing =~ ready ]] + sleep 1 + timeout=12 + echo -n . .+ [[ 12 -gt 1500 ]] ++ kubectl_bin get psmdb some-name -o 'jsonpath={.status.state}' +++ mktemp ++ local LAST_OUT=/tmp/tmp.ViQvgnybsV +++ mktemp ++ local LAST_ERR=/tmp/tmp.mWfg6tPRp4 ++ local exit_status=0 ++ local timeout=4 +++ seq 0 2 ++ for i in $(seq 0 2) ++ set +e ++ kubectl get psmdb some-name -o 'jsonpath={.status.state}' ++ exit_status=0 ++ set -e ++ '[' 0 '!=' 0 -a -n 1 ']' ++ break ++ cat /tmp/tmp.ViQvgnybsV ++ cat /tmp/tmp.mWfg6tPRp4 ++ rm /tmp/tmp.ViQvgnybsV /tmp/tmp.mWfg6tPRp4 ++ return 0 + [[ ready =~ ready ]] + echo + log 'psmdb/some-name is ready: OK' + set +o xtrace [2025-12-09T22:11:48+0000] psmdb/some-name is ready: OK + desc 'Delete data and run restore of backup-minio-logical' + set +o xtrace ----------------------------------------------------------------------------------- Delete data and run restore of backup-minio-logical ----------------------------------------------------------------------------------- + delete_data 'This is some data for backup if unhealthy' + local 'data=This is some data for backup if unhealthy' + run_mongo 'use myApp\n db.test.deleteOne({ x: "This is some data for backup if unhealthy" })' myApp:myPass@some-name-rs0.demand-backup-if-unhealthy-227 + local 'command=use myApp\n db.test.deleteOne({ x: "This is some data for backup if unhealthy" })' + local uri=myApp:myPass@some-name-rs0.demand-backup-if-unhealthy-227 + local driver=mongodb+srv + local suffix=.svc.cluster.local ++ kubectl_bin get pods --selector=name=psmdb-client -o 'jsonpath={.items[].metadata.name}' +++ mktemp ++ local LAST_OUT=/tmp/tmp.ibflh4gjK9 +++ mktemp ++ local LAST_ERR=/tmp/tmp.mjP9iVU0fc ++ local exit_status=0 ++ local timeout=4 +++ seq 0 2 ++ for i in $(seq 0 2) ++ set +e ++ kubectl get pods --selector=name=psmdb-client -o 'jsonpath={.items[].metadata.name}' ++ exit_status=0 ++ set -e ++ '[' 0 '!=' 0 -a -n 1 ']' ++ break ++ cat /tmp/tmp.ibflh4gjK9 ++ cat /tmp/tmp.mjP9iVU0fc ++ rm /tmp/tmp.ibflh4gjK9 /tmp/tmp.mjP9iVU0fc ++ return 0 + local client_container=psmdb-client-696897d69b-96npl + local mongo_flag= + [[ myApp:myPass@some-name-rs0.demand-backup-if-unhealthy-227 == *cfg* ]] + replica_set=rs0 + kubectl_bin exec psmdb-client-696897d69b-96npl -- bash -c 'printf '\''use myApp\n db.test.deleteOne({ x: "This is some data for backup if unhealthy" })\n'\'' | mongo mongodb+srv://myApp:myPass@some-name-rs0.demand-backup-if-unhealthy-227.svc.cluster.local/admin?ssl=false\&replicaSet=rs0 ' ++ mktemp + local LAST_OUT=/tmp/tmp.mgnNtiJaUN ++ mktemp + local LAST_ERR=/tmp/tmp.nj5UeZKH3X + local exit_status=0 + local timeout=4 ++ seq 0 2 + for i in $(seq 0 2) + set +e + kubectl exec psmdb-client-696897d69b-96npl -- bash -c 'printf '\''use myApp\n db.test.deleteOne({ x: "This is some data for backup if unhealthy" })\n'\'' | mongo mongodb+srv://myApp:myPass@some-name-rs0.demand-backup-if-unhealthy-227.svc.cluster.local/admin?ssl=false\&replicaSet=rs0 ' + exit_status=0 + set -e + '[' 0 '!=' 0 -a -n 1 ']' + break + cat /tmp/tmp.mgnNtiJaUN Percona Server for MongoDB shell version v4.4.29-28 connecting to: mongodb://some-name-rs0-2.some-name-rs0.demand-backup-if-unhealthy-227.svc.cluster.local:27017,some-name-rs0-1.some-name-rs0.demand-backup-if-unhealthy-227.svc.cluster.local:27017,some-name-rs0-0.some-name-rs0.demand-backup-if-unhealthy-227.svc.cluster.local:27017/admin?compressors=disabled&gssapiServiceName=mongodb&replicaSet=rs0&ssl=false Implicit session: session { "id" : UUID("fc73b582-f3c0-45f7-8139-d195d98026e6") } Percona Server for MongoDB server version: v8.0.16-5 WARNING: shell and server versions do not match switched to db myApp { "acknowledged" : true, "deletedCount" : 1 } bye + cat /tmp/tmp.nj5UeZKH3X + rm /tmp/tmp.mgnNtiJaUN /tmp/tmp.nj5UeZKH3X + return 0 + run_restore backup-minio-logical + local backup_name=backup-minio-logical + cat /mnt/jenkins/workspace/cloud-psmdb-operator_PR-2114/e2e-tests/demand-backup-if-unhealthy/conf/restore.yml + /usr/sbin/sed -e 's/name:/name: restore-backup-minio-logical/' + /usr/sbin/sed -e 's/backupName:/backupName: backup-minio-logical/' + kubectl_bin apply -f - ++ mktemp + local LAST_OUT=/tmp/tmp.LBkaKYYqfu ++ mktemp + local LAST_ERR=/tmp/tmp.jjeAc2ieE5 + local exit_status=0 + local timeout=4 ++ seq 0 2 + for i in $(seq 0 2) + set +e + kubectl apply -f - + exit_status=0 + set -e + '[' 0 '!=' 0 -a -n 1 ']' + break + cat /tmp/tmp.LBkaKYYqfu perconaservermongodbrestore.psmdb.percona.com/restore-backup-minio-logical created + cat /tmp/tmp.jjeAc2ieE5 + rm /tmp/tmp.LBkaKYYqfu /tmp/tmp.jjeAc2ieE5 + return 0 + wait_restore backup-minio-logical some-name + local backup_name=backup-minio-logical + local cluster_name=some-name + local target_state=ready + local wait_cluster_consistency=1 + local wait_time=1780 + local ok_if_ready=0 + set +o xtrace Waiting for the psmdb-restore/restore-backup-minio-logical object to be created.OK Waiting psmdb-restore/restore-backup-minio-logical to reach state "ready" .OK after 0 minutes + [[ 1 -eq 1 ]] + wait_cluster_consistency some-name + local cluster_name=some-name + local wait_time=32 + retry=0 + sleep 7 + echo -n 'waiting for cluster readyness' waiting for cluster readyness++ kubectl_bin get psmdb some-name -o 'jsonpath={.status.state}' +++ mktemp ++ local LAST_OUT=/tmp/tmp.3bfVduiofG +++ mktemp ++ local LAST_ERR=/tmp/tmp.rGmLxfv0Bh ++ local exit_status=0 ++ local timeout=4 +++ seq 0 2 ++ for i in $(seq 0 2) ++ set +e ++ kubectl get psmdb some-name -o 'jsonpath={.status.state}' ++ exit_status=0 ++ set -e ++ '[' 0 '!=' 0 -a -n 1 ']' ++ break ++ cat /tmp/tmp.3bfVduiofG ++ cat /tmp/tmp.rGmLxfv0Bh ++ rm /tmp/tmp.3bfVduiofG /tmp/tmp.rGmLxfv0Bh ++ return 0 + [[ ready == \r\e\a\d\y ]] + echo .OK .OK + wait_for_cluster_state some-name ready + local cluster_name=some-name + local target_state=ready + echo -n 'Waiting for psmdb/some-name to reach ready state' Waiting for psmdb/some-name to reach ready state+ local timeout=0 ++ kubectl_bin get psmdb some-name -o 'jsonpath={.status.state}' +++ mktemp ++ local LAST_OUT=/tmp/tmp.UdjCZvrCuE +++ mktemp ++ local LAST_ERR=/tmp/tmp.aesG8edPXA ++ local exit_status=0 ++ local timeout=4 +++ seq 0 2 ++ for i in $(seq 0 2) ++ set +e ++ kubectl get psmdb some-name -o 'jsonpath={.status.state}' ++ exit_status=0 ++ set -e ++ '[' 0 '!=' 0 -a -n 1 ']' ++ break ++ cat /tmp/tmp.UdjCZvrCuE ++ cat /tmp/tmp.aesG8edPXA ++ rm /tmp/tmp.UdjCZvrCuE /tmp/tmp.aesG8edPXA ++ return 0 + [[ ready =~ ready ]] + echo + log 'psmdb/some-name is ready: OK' + set +o xtrace [2025-12-09T22:12:17+0000] psmdb/some-name is ready: OK + desc 'Verify data is back to the cluster' + set +o xtrace ----------------------------------------------------------------------------------- Verify data is back to the cluster ----------------------------------------------------------------------------------- + simple_data_check some-name 3 0 -rs0 + local cluster_name=some-name + let last_pod=3-1 + local isSharded=0 + local cluster_pfx=-rs0 + '[' 0 -eq 1 ']' ++ seq 0 2 + for i in $(seq 0 $last_pod) + compare_mongo_cmd find myApp:myPass@some-name-rs0-0.some-name-rs0.demand-backup-if-unhealthy-227 + local command=find + local uri=myApp:myPass@some-name-rs0-0.some-name-rs0.demand-backup-if-unhealthy-227 + local postfix= + local suffix= + local database=myApp + local collection=test + local sort= + local tls=false + local 'full_command=db.test.find()' + [[ -n '' ]] + log 'running db.test.find() in myApp' + set +o xtrace [2025-12-09T22:12:17+0000] running db.test.find() in myApp + [[ false == \t\r\u\e ]] + mongo_command=run_mongo + run_mongo 'use myApp\n db.test.find()' myApp:myPass@some-name-rs0-0.some-name-rs0.demand-backup-if-unhealthy-227 mongodb '' + local 'command=use myApp\n db.test.find()' + local uri=myApp:myPass@some-name-rs0-0.some-name-rs0.demand-backup-if-unhealthy-227 + local driver=mongodb + local suffix=.svc.cluster.local + egrep -v 'I NETWORK|W NETWORK|F NETWORK|Error saving history file|Percona Server for MongoDB|connecting to:|Unable to reach primary for set|Implicit session:|versions do not match|Error saving history file:|Started a new thread for the timer service' + /usr/sbin/sed -re 's/ObjectId\("[0-9a-f]+"\)//; s/-[0-9]+.svc/-xxx.svc/' ++ kubectl_bin get pods --selector=name=psmdb-client -o 'jsonpath={.items[].metadata.name}' +++ mktemp egrep: warning: egrep is obsolescent; using grep -E ++ local LAST_OUT=/tmp/tmp.vm8dATz3pk +++ mktemp ++ local LAST_ERR=/tmp/tmp.cpKEFweIET ++ local exit_status=0 ++ local timeout=4 +++ seq 0 2 ++ for i in $(seq 0 2) ++ set +e ++ kubectl get pods --selector=name=psmdb-client -o 'jsonpath={.items[].metadata.name}' ++ exit_status=0 ++ set -e ++ '[' 0 '!=' 0 -a -n 1 ']' ++ break ++ cat /tmp/tmp.vm8dATz3pk ++ cat /tmp/tmp.cpKEFweIET ++ rm /tmp/tmp.vm8dATz3pk /tmp/tmp.cpKEFweIET ++ return 0 + local client_container=psmdb-client-696897d69b-96npl + local mongo_flag= + [[ myApp:myPass@some-name-rs0-0.some-name-rs0.demand-backup-if-unhealthy-227 == *cfg* ]] + replica_set=rs0 + kubectl_bin exec psmdb-client-696897d69b-96npl -- bash -c 'printf '\''use myApp\n db.test.find()\n'\'' | mongo mongodb://myApp:myPass@some-name-rs0-0.some-name-rs0.demand-backup-if-unhealthy-227.svc.cluster.local/admin?ssl=false\&replicaSet=rs0 ' ++ mktemp + local LAST_OUT=/tmp/tmp.Vhfuh2sPfu ++ mktemp + local LAST_ERR=/tmp/tmp.uzAXuMxRpM + local exit_status=0 + local timeout=4 ++ seq 0 2 + for i in $(seq 0 2) + set +e + kubectl exec psmdb-client-696897d69b-96npl -- bash -c 'printf '\''use myApp\n db.test.find()\n'\'' | mongo mongodb://myApp:myPass@some-name-rs0-0.some-name-rs0.demand-backup-if-unhealthy-227.svc.cluster.local/admin?ssl=false\&replicaSet=rs0 ' + exit_status=0 + set -e + '[' 0 '!=' 0 -a -n 1 ']' + break + cat /tmp/tmp.Vhfuh2sPfu + cat /tmp/tmp.uzAXuMxRpM + rm /tmp/tmp.Vhfuh2sPfu /tmp/tmp.uzAXuMxRpM + return 0 + [[ 0 -eq 0 ]] + diff -u /mnt/jenkins/workspace/cloud-psmdb-operator_PR-2114/e2e-tests/demand-backup-if-unhealthy/compare/find.json /tmp/tmp.f9KmrCeeh7/find + for i in $(seq 0 $last_pod) + compare_mongo_cmd find myApp:myPass@some-name-rs0-1.some-name-rs0.demand-backup-if-unhealthy-227 + local command=find + local uri=myApp:myPass@some-name-rs0-1.some-name-rs0.demand-backup-if-unhealthy-227 + local postfix= + local suffix= + local database=myApp + local collection=test + local sort= + local tls=false + local 'full_command=db.test.find()' + [[ -n '' ]] + log 'running db.test.find() in myApp' + set +o xtrace [2025-12-09T22:12:19+0000] running db.test.find() in myApp + [[ false == \t\r\u\e ]] + mongo_command=run_mongo + run_mongo 'use myApp\n db.test.find()' myApp:myPass@some-name-rs0-1.some-name-rs0.demand-backup-if-unhealthy-227 mongodb '' + local 'command=use myApp\n db.test.find()' + local uri=myApp:myPass@some-name-rs0-1.some-name-rs0.demand-backup-if-unhealthy-227 + local driver=mongodb + local suffix=.svc.cluster.local + egrep -v 'I NETWORK|W NETWORK|F NETWORK|Error saving history file|Percona Server for MongoDB|connecting to:|Unable to reach primary for set|Implicit session:|versions do not match|Error saving history file:|Started a new thread for the timer service' + /usr/sbin/sed -re 's/ObjectId\("[0-9a-f]+"\)//; s/-[0-9]+.svc/-xxx.svc/' ++ kubectl_bin get pods --selector=name=psmdb-client -o 'jsonpath={.items[].metadata.name}' +++ mktemp egrep: warning: egrep is obsolescent; using grep -E ++ local LAST_OUT=/tmp/tmp.6Ujm3Bto9T +++ mktemp ++ local LAST_ERR=/tmp/tmp.54FpRAStsx ++ local exit_status=0 ++ local timeout=4 +++ seq 0 2 ++ for i in $(seq 0 2) ++ set +e ++ kubectl get pods --selector=name=psmdb-client -o 'jsonpath={.items[].metadata.name}' ++ exit_status=0 ++ set -e ++ '[' 0 '!=' 0 -a -n 1 ']' ++ break ++ cat /tmp/tmp.6Ujm3Bto9T ++ cat /tmp/tmp.54FpRAStsx ++ rm /tmp/tmp.6Ujm3Bto9T /tmp/tmp.54FpRAStsx ++ return 0 + local client_container=psmdb-client-696897d69b-96npl + local mongo_flag= + [[ myApp:myPass@some-name-rs0-1.some-name-rs0.demand-backup-if-unhealthy-227 == *cfg* ]] + replica_set=rs0 + kubectl_bin exec psmdb-client-696897d69b-96npl -- bash -c 'printf '\''use myApp\n db.test.find()\n'\'' | mongo mongodb://myApp:myPass@some-name-rs0-1.some-name-rs0.demand-backup-if-unhealthy-227.svc.cluster.local/admin?ssl=false\&replicaSet=rs0 ' ++ mktemp + local LAST_OUT=/tmp/tmp.wgr6U779zg ++ mktemp + local LAST_ERR=/tmp/tmp.jqLYzN5CWq + local exit_status=0 + local timeout=4 ++ seq 0 2 + for i in $(seq 0 2) + set +e + kubectl exec psmdb-client-696897d69b-96npl -- bash -c 'printf '\''use myApp\n db.test.find()\n'\'' | mongo mongodb://myApp:myPass@some-name-rs0-1.some-name-rs0.demand-backup-if-unhealthy-227.svc.cluster.local/admin?ssl=false\&replicaSet=rs0 ' + exit_status=0 + set -e + '[' 0 '!=' 0 -a -n 1 ']' + break + cat /tmp/tmp.wgr6U779zg + cat /tmp/tmp.jqLYzN5CWq + rm /tmp/tmp.wgr6U779zg /tmp/tmp.jqLYzN5CWq + return 0 + [[ 0 -eq 0 ]] + diff -u /mnt/jenkins/workspace/cloud-psmdb-operator_PR-2114/e2e-tests/demand-backup-if-unhealthy/compare/find.json /tmp/tmp.f9KmrCeeh7/find + for i in $(seq 0 $last_pod) + compare_mongo_cmd find myApp:myPass@some-name-rs0-2.some-name-rs0.demand-backup-if-unhealthy-227 + local command=find + local uri=myApp:myPass@some-name-rs0-2.some-name-rs0.demand-backup-if-unhealthy-227 + local postfix= + local suffix= + local database=myApp + local collection=test + local sort= + local tls=false + local 'full_command=db.test.find()' + [[ -n '' ]] + log 'running db.test.find() in myApp' + set +o xtrace [2025-12-09T22:12:21+0000] running db.test.find() in myApp + [[ false == \t\r\u\e ]] + mongo_command=run_mongo + run_mongo 'use myApp\n db.test.find()' myApp:myPass@some-name-rs0-2.some-name-rs0.demand-backup-if-unhealthy-227 mongodb '' + egrep -v 'I NETWORK|W NETWORK|F NETWORK|Error saving history file|Percona Server for MongoDB|connecting to:|Unable to reach primary for set|Implicit session:|versions do not match|Error saving history file:|Started a new thread for the timer service' + local 'command=use myApp\n db.test.find()' + local uri=myApp:myPass@some-name-rs0-2.some-name-rs0.demand-backup-if-unhealthy-227 + local driver=mongodb + local suffix=.svc.cluster.local + /usr/sbin/sed -re 's/ObjectId\("[0-9a-f]+"\)//; s/-[0-9]+.svc/-xxx.svc/' ++ kubectl_bin get pods --selector=name=psmdb-client -o 'jsonpath={.items[].metadata.name}' +++ mktemp egrep: warning: egrep is obsolescent; using grep -E ++ local LAST_OUT=/tmp/tmp.pTI5mwV1uI +++ mktemp ++ local LAST_ERR=/tmp/tmp.s6rFbsmWWj ++ local exit_status=0 ++ local timeout=4 +++ seq 0 2 ++ for i in $(seq 0 2) ++ set +e ++ kubectl get pods --selector=name=psmdb-client -o 'jsonpath={.items[].metadata.name}' ++ exit_status=0 ++ set -e ++ '[' 0 '!=' 0 -a -n 1 ']' ++ break ++ cat /tmp/tmp.pTI5mwV1uI ++ cat /tmp/tmp.s6rFbsmWWj ++ rm /tmp/tmp.pTI5mwV1uI /tmp/tmp.s6rFbsmWWj ++ return 0 + local client_container=psmdb-client-696897d69b-96npl + local mongo_flag= + [[ myApp:myPass@some-name-rs0-2.some-name-rs0.demand-backup-if-unhealthy-227 == *cfg* ]] + replica_set=rs0 + kubectl_bin exec psmdb-client-696897d69b-96npl -- bash -c 'printf '\''use myApp\n db.test.find()\n'\'' | mongo mongodb://myApp:myPass@some-name-rs0-2.some-name-rs0.demand-backup-if-unhealthy-227.svc.cluster.local/admin?ssl=false\&replicaSet=rs0 ' ++ mktemp + local LAST_OUT=/tmp/tmp.AelWXCf2Ou ++ mktemp + local LAST_ERR=/tmp/tmp.eHly6F7Uxj + local exit_status=0 + local timeout=4 ++ seq 0 2 + for i in $(seq 0 2) + set +e + kubectl exec psmdb-client-696897d69b-96npl -- bash -c 'printf '\''use myApp\n db.test.find()\n'\'' | mongo mongodb://myApp:myPass@some-name-rs0-2.some-name-rs0.demand-backup-if-unhealthy-227.svc.cluster.local/admin?ssl=false\&replicaSet=rs0 ' + exit_status=0 + set -e + '[' 0 '!=' 0 -a -n 1 ']' + break + cat /tmp/tmp.AelWXCf2Ou + cat /tmp/tmp.eHly6F7Uxj + rm /tmp/tmp.AelWXCf2Ou /tmp/tmp.eHly6F7Uxj + return 0 + [[ 0 -eq 0 ]] + diff -u /mnt/jenkins/workspace/cloud-psmdb-operator_PR-2114/e2e-tests/demand-backup-if-unhealthy/compare/find.json /tmp/tmp.f9KmrCeeh7/find + destroy demand-backup-if-unhealthy-227 + local namespace=demand-backup-if-unhealthy-227 + local ignore_logs=true + [[ 0 == 1 ]] + desc 'destroy cluster/operator and all other resources' + set +o xtrace ----------------------------------------------------------------------------------- destroy cluster/operator and all other resources ----------------------------------------------------------------------------------- + '[' true == false ']' + delete_backups + desc 'Delete psmdb-backup' + set +o xtrace ----------------------------------------------------------------------------------- Delete psmdb-backup ----------------------------------------------------------------------------------- ++ kubectl_bin get psmdb-backup --no-headers ++ wc -l +++ mktemp ++ local LAST_OUT=/tmp/tmp.FFtxoQ3J1j +++ mktemp ++ local LAST_ERR=/tmp/tmp.yWPiwlIMm4 ++ local exit_status=0 ++ local timeout=4 +++ seq 0 2 ++ for i in $(seq 0 2) ++ set +e ++ kubectl get psmdb-backup --no-headers ++ exit_status=0 ++ set -e ++ '[' 0 '!=' 0 -a -n 1 ']' ++ break ++ cat /tmp/tmp.FFtxoQ3J1j ++ cat /tmp/tmp.yWPiwlIMm4 ++ rm /tmp/tmp.FFtxoQ3J1j /tmp/tmp.yWPiwlIMm4 ++ return 0 + '[' 1 '!=' 0 ']' + kubectl_bin get psmdb-backup ++ mktemp + local LAST_OUT=/tmp/tmp.1jGJt4f2KP ++ mktemp + local LAST_ERR=/tmp/tmp.cen2sEVswv + local exit_status=0 + local timeout=4 ++ seq 0 2 + for i in $(seq 0 2) + set +e + kubectl get psmdb-backup + exit_status=0 + set -e + '[' 0 '!=' 0 -a -n 1 ']' + break + cat /tmp/tmp.1jGJt4f2KP NAME CLUSTER STORAGE DESTINATION TYPE SIZE STATUS COMPLETED AGE backup-minio-logical some-name minio s3://operator-testing/2025-12-09T22:10:26Z logical 52.50KB ready 106s 119s + cat /tmp/tmp.cen2sEVswv + rm /tmp/tmp.1jGJt4f2KP /tmp/tmp.cen2sEVswv + return 0 + kubectl_bin delete psmdb-backup --all ++ mktemp + local LAST_OUT=/tmp/tmp.YUvNpbB0ls ++ mktemp + local LAST_ERR=/tmp/tmp.CrMiJEOc1P + local exit_status=0 + local timeout=4 ++ seq 0 2 + for i in $(seq 0 2) + set +e + kubectl delete psmdb-backup --all + exit_status=0 + set -e + '[' 0 '!=' 0 -a -n 1 ']' + break + cat /tmp/tmp.YUvNpbB0ls perconaservermongodbbackup.psmdb.percona.com "backup-minio-logical" deleted from demand-backup-if-unhealthy-227 namespace + cat /tmp/tmp.CrMiJEOc1P + rm /tmp/tmp.YUvNpbB0ls /tmp/tmp.CrMiJEOc1P + return 0 + delete_crd + desc 'get and delete old CRDs and RBAC' + set +o xtrace ----------------------------------------------------------------------------------- get and delete old CRDs and RBAC ----------------------------------------------------------------------------------- + kubectl_bin delete -f /mnt/jenkins/workspace/cloud-psmdb-operator_PR-2114/deploy/crd.yaml --ignore-not-found --wait=false ++ mktemp + local LAST_OUT=/tmp/tmp.0zClFuom6V ++ mktemp + local LAST_ERR=/tmp/tmp.MpRpi0aO5w + local exit_status=0 + local timeout=4 ++ seq 0 2 + for i in $(seq 0 2) + set +e + kubectl delete -f /mnt/jenkins/workspace/cloud-psmdb-operator_PR-2114/deploy/crd.yaml --ignore-not-found --wait=false + exit_status=0 + set -e + '[' 0 '!=' 0 -a -n 1 ']' + break + cat /tmp/tmp.0zClFuom6V customresourcedefinition.apiextensions.k8s.io "perconaservermongodbbackups.psmdb.percona.com" deleted customresourcedefinition.apiextensions.k8s.io "perconaservermongodbrestores.psmdb.percona.com" deleted customresourcedefinition.apiextensions.k8s.io "perconaservermongodbs.psmdb.percona.com" deleted + cat /tmp/tmp.MpRpi0aO5w + rm /tmp/tmp.0zClFuom6V /tmp/tmp.MpRpi0aO5w + return 0 ++ yq eval .metadata.name /mnt/jenkins/workspace/cloud-psmdb-operator_PR-2114/deploy/crd.yaml ++ grep -v '\-\-\-' grep: warning: stray \ before - grep: warning: stray \ before - + for crd_name in $(yq eval '.metadata.name' "${src_dir}/deploy/crd.yaml" | grep -v '\-\-\-') + kubectl get perconaservermongodbbackups.psmdb.percona.com --all-namespaces -o wide + grep -v NAMESPACE + xargs -L 1 sh -xc 'kubectl patch perconaservermongodbbackups.psmdb.percona.com -n $0 $1 --type=merge -p "{\"metadata\":{\"finalizers\":[]}}"' error: the server doesn't have a resource type "perconaservermongodbbackups" + kubectl patch perconaservermongodbbackups.psmdb.percona.com -n sh --type=merge -p '{"metadata":{"finalizers":[]}}' error: the server doesn't have a resource type "perconaservermongodbbackups" + : + kubectl_bin wait --for=delete crd perconaservermongodbbackups.psmdb.percona.com ++ mktemp + local LAST_OUT=/tmp/tmp.Z2HaA4hEhQ ++ mktemp + local LAST_ERR=/tmp/tmp.bLAa8AHUqZ + local exit_status=0 + local timeout=4 ++ seq 0 2 + for i in $(seq 0 2) + set +e + kubectl wait --for=delete crd perconaservermongodbbackups.psmdb.percona.com + exit_status=0 + set -e + '[' 0 '!=' 0 -a -n 1 ']' + break + cat /tmp/tmp.Z2HaA4hEhQ + cat /tmp/tmp.bLAa8AHUqZ + rm /tmp/tmp.Z2HaA4hEhQ /tmp/tmp.bLAa8AHUqZ + return 0 + for crd_name in $(yq eval '.metadata.name' "${src_dir}/deploy/crd.yaml" | grep -v '\-\-\-') + kubectl get perconaservermongodbrestores.psmdb.percona.com --all-namespaces -o wide + grep -v NAMESPACE + xargs -L 1 sh -xc 'kubectl patch perconaservermongodbrestores.psmdb.percona.com -n $0 $1 --type=merge -p "{\"metadata\":{\"finalizers\":[]}}"' error: the server doesn't have a resource type "perconaservermongodbrestores" + kubectl patch perconaservermongodbrestores.psmdb.percona.com -n sh --type=merge -p '{"metadata":{"finalizers":[]}}' error: the server doesn't have a resource type "perconaservermongodbrestores" + : + kubectl_bin wait --for=delete crd perconaservermongodbrestores.psmdb.percona.com ++ mktemp + local LAST_OUT=/tmp/tmp.0Q7NmAkFXD ++ mktemp + local LAST_ERR=/tmp/tmp.0m2wX1yhPj + local exit_status=0 + local timeout=4 ++ seq 0 2 + for i in $(seq 0 2) + set +e + kubectl wait --for=delete crd perconaservermongodbrestores.psmdb.percona.com + exit_status=0 + set -e + '[' 0 '!=' 0 -a -n 1 ']' + break + cat /tmp/tmp.0Q7NmAkFXD + cat /tmp/tmp.0m2wX1yhPj + rm /tmp/tmp.0Q7NmAkFXD /tmp/tmp.0m2wX1yhPj + return 0 + for crd_name in $(yq eval '.metadata.name' "${src_dir}/deploy/crd.yaml" | grep -v '\-\-\-') + kubectl get perconaservermongodbs.psmdb.percona.com --all-namespaces -o wide + grep -v NAMESPACE + xargs -L 1 sh -xc 'kubectl patch perconaservermongodbs.psmdb.percona.com -n $0 $1 --type=merge -p "{\"metadata\":{\"finalizers\":[]}}"' No resources found + kubectl patch perconaservermongodbs.psmdb.percona.com -n sh --type=merge -p '{"metadata":{"finalizers":[]}}' error: resource(s) were provided, but no name was specified + : + kubectl_bin wait --for=delete crd perconaservermongodbs.psmdb.percona.com ++ mktemp + local LAST_OUT=/tmp/tmp.hFKMx7YRLv ++ mktemp + local LAST_ERR=/tmp/tmp.VhJ3IeGD6b + local exit_status=0 + local timeout=4 ++ seq 0 2 + for i in $(seq 0 2) + set +e + kubectl wait --for=delete crd perconaservermongodbs.psmdb.percona.com + exit_status=0 + set -e + '[' 0 '!=' 0 -a -n 1 ']' + break + cat /tmp/tmp.hFKMx7YRLv customresourcedefinition.apiextensions.k8s.io/perconaservermongodbs.psmdb.percona.com condition met + cat /tmp/tmp.VhJ3IeGD6b + rm /tmp/tmp.hFKMx7YRLv /tmp/tmp.VhJ3IeGD6b + return 0 + local rbac_yaml=rbac.yaml + '[' -n psmdb-operator ']' + rbac_yaml=cw-rbac.yaml + kubectl_bin delete -f /mnt/jenkins/workspace/cloud-psmdb-operator_PR-2114/deploy/cw-rbac.yaml --ignore-not-found ++ mktemp + local LAST_OUT=/tmp/tmp.nDCoSjnFIn ++ mktemp + local LAST_ERR=/tmp/tmp.KCsd0LRt4H + local exit_status=0 + local timeout=4 ++ seq 0 2 + for i in $(seq 0 2) + set +e + kubectl delete -f /mnt/jenkins/workspace/cloud-psmdb-operator_PR-2114/deploy/cw-rbac.yaml --ignore-not-found + exit_status=0 + set -e + '[' 0 '!=' 0 -a -n 1 ']' + break + cat /tmp/tmp.nDCoSjnFIn clusterrole.rbac.authorization.k8s.io "percona-server-mongodb-operator" deleted clusterrolebinding.rbac.authorization.k8s.io "service-account-percona-server-mongodb-operator" deleted + cat /tmp/tmp.KCsd0LRt4H + rm /tmp/tmp.nDCoSjnFIn /tmp/tmp.KCsd0LRt4H + return 0 + destroy_cert_manager + kubectl_bin delete -f https://github.com/cert-manager/cert-manager/releases/download/v1.19.1/cert-manager.yaml ++ mktemp + local LAST_OUT=/tmp/tmp.QtNBmRdZbF ++ mktemp + local LAST_ERR=/tmp/tmp.runFgOsalo + local exit_status=0 + local timeout=4 ++ seq 0 2 + for i in $(seq 0 2) + set +e + kubectl delete -f https://github.com/cert-manager/cert-manager/releases/download/v1.19.1/cert-manager.yaml + exit_status=1 + set -e + '[' 1 '!=' 0 -a -n 1 ']' + cat /tmp/tmp.QtNBmRdZbF + cat /tmp/tmp.runFgOsalo Error from server (NotFound): error when deleting "https://github.com/cert-manager/cert-manager/releases/download/v1.19.1/cert-manager.yaml": namespaces "cert-manager" not found Error from server (NotFound): error when deleting "https://github.com/cert-manager/cert-manager/releases/download/v1.19.1/cert-manager.yaml": customresourcedefinitions.apiextensions.k8s.io "challenges.acme.cert-manager.io" not found Error from server (NotFound): error when deleting "https://github.com/cert-manager/cert-manager/releases/download/v1.19.1/cert-manager.yaml": customresourcedefinitions.apiextensions.k8s.io "orders.acme.cert-manager.io" not found Error from server (NotFound): error when deleting "https://github.com/cert-manager/cert-manager/releases/download/v1.19.1/cert-manager.yaml": customresourcedefinitions.apiextensions.k8s.io "certificaterequests.cert-manager.io" not found Error from server (NotFound): error when deleting "https://github.com/cert-manager/cert-manager/releases/download/v1.19.1/cert-manager.yaml": customresourcedefinitions.apiextensions.k8s.io "certificates.cert-manager.io" not found Error from server (NotFound): error when deleting "https://github.com/cert-manager/cert-manager/releases/download/v1.19.1/cert-manager.yaml": customresourcedefinitions.apiextensions.k8s.io "clusterissuers.cert-manager.io" not found Error from server (NotFound): error when deleting "https://github.com/cert-manager/cert-manager/releases/download/v1.19.1/cert-manager.yaml": customresourcedefinitions.apiextensions.k8s.io "issuers.cert-manager.io" not found Error from server (NotFound): error when deleting "https://github.com/cert-manager/cert-manager/releases/download/v1.19.1/cert-manager.yaml": serviceaccounts "cert-manager-cainjector" not found Error from server (NotFound): error when deleting "https://github.com/cert-manager/cert-manager/releases/download/v1.19.1/cert-manager.yaml": serviceaccounts "cert-manager" not found Error from server (NotFound): error when deleting "https://github.com/cert-manager/cert-manager/releases/download/v1.19.1/cert-manager.yaml": serviceaccounts "cert-manager-webhook" not found Error from server (NotFound): error when deleting "https://github.com/cert-manager/cert-manager/releases/download/v1.19.1/cert-manager.yaml": clusterroles.rbac.authorization.k8s.io "cert-manager-cainjector" not found Error from server (NotFound): error when deleting "https://github.com/cert-manager/cert-manager/releases/download/v1.19.1/cert-manager.yaml": clusterroles.rbac.authorization.k8s.io "cert-manager-controller-issuers" not found Error from server (NotFound): error when deleting "https://github.com/cert-manager/cert-manager/releases/download/v1.19.1/cert-manager.yaml": clusterroles.rbac.authorization.k8s.io "cert-manager-controller-clusterissuers" not found Error from server (NotFound): error when deleting "https://github.com/cert-manager/cert-manager/releases/download/v1.19.1/cert-manager.yaml": clusterroles.rbac.authorization.k8s.io "cert-manager-controller-certificates" not found Error from server (NotFound): error when deleting "https://github.com/cert-manager/cert-manager/releases/download/v1.19.1/cert-manager.yaml": clusterroles.rbac.authorization.k8s.io "cert-manager-controller-orders" not found Error from server (NotFound): error when deleting "https://github.com/cert-manager/cert-manager/releases/download/v1.19.1/cert-manager.yaml": clusterroles.rbac.authorization.k8s.io "cert-manager-controller-challenges" not found Error from server (NotFound): error when deleting "https://github.com/cert-manager/cert-manager/releases/download/v1.19.1/cert-manager.yaml": clusterroles.rbac.authorization.k8s.io "cert-manager-controller-ingress-shim" not found Error from server (NotFound): error when deleting "https://github.com/cert-manager/cert-manager/releases/download/v1.19.1/cert-manager.yaml": clusterroles.rbac.authorization.k8s.io "cert-manager-cluster-view" not found Error from server (NotFound): error when deleting "https://github.com/cert-manager/cert-manager/releases/download/v1.19.1/cert-manager.yaml": clusterroles.rbac.authorization.k8s.io "cert-manager-view" not found Error from server (NotFound): error when deleting "https://github.com/cert-manager/cert-manager/releases/download/v1.19.1/cert-manager.yaml": clusterroles.rbac.authorization.k8s.io "cert-manager-edit" not found Error from server (NotFound): error when deleting "https://github.com/cert-manager/cert-manager/releases/download/v1.19.1/cert-manager.yaml": clusterroles.rbac.authorization.k8s.io "cert-manager-controller-approve:cert-manager-io" not found Error from server (NotFound): error when deleting "https://github.com/cert-manager/cert-manager/releases/download/v1.19.1/cert-manager.yaml": clusterroles.rbac.authorization.k8s.io "cert-manager-controller-certificatesigningrequests" not found Error from server (NotFound): error when deleting "https://github.com/cert-manager/cert-manager/releases/download/v1.19.1/cert-manager.yaml": clusterroles.rbac.authorization.k8s.io "cert-manager-webhook:subjectaccessreviews" not found Error from server (NotFound): error when deleting "https://github.com/cert-manager/cert-manager/releases/download/v1.19.1/cert-manager.yaml": clusterrolebindings.rbac.authorization.k8s.io "cert-manager-cainjector" not found Error from server (NotFound): error when deleting "https://github.com/cert-manager/cert-manager/releases/download/v1.19.1/cert-manager.yaml": clusterrolebindings.rbac.authorization.k8s.io "cert-manager-controller-issuers" not found Error from server (NotFound): error when deleting "https://github.com/cert-manager/cert-manager/releases/download/v1.19.1/cert-manager.yaml": clusterrolebindings.rbac.authorization.k8s.io "cert-manager-controller-clusterissuers" not found Error from server (NotFound): error when deleting "https://github.com/cert-manager/cert-manager/releases/download/v1.19.1/cert-manager.yaml": clusterrolebindings.rbac.authorization.k8s.io "cert-manager-controller-certificates" not found Error from server (NotFound): error when deleting "https://github.com/cert-manager/cert-manager/releases/download/v1.19.1/cert-manager.yaml": clusterrolebindings.rbac.authorization.k8s.io "cert-manager-controller-orders" not found Error from server (NotFound): error when deleting "https://github.com/cert-manager/cert-manager/releases/download/v1.19.1/cert-manager.yaml": clusterrolebindings.rbac.authorization.k8s.io "cert-manager-controller-challenges" not found Error from server (NotFound): error when deleting "https://github.com/cert-manager/cert-manager/releases/download/v1.19.1/cert-manager.yaml": clusterrolebindings.rbac.authorization.k8s.io "cert-manager-controller-ingress-shim" not found Error from server (NotFound): error when deleting "https://github.com/cert-manager/cert-manager/releases/download/v1.19.1/cert-manager.yaml": clusterrolebindings.rbac.authorization.k8s.io "cert-manager-controller-approve:cert-manager-io" not found Error from server (NotFound): error when deleting "https://github.com/cert-manager/cert-manager/releases/download/v1.19.1/cert-manager.yaml": clusterrolebindings.rbac.authorization.k8s.io "cert-manager-controller-certificatesigningrequests" not found Error from server (NotFound): error when deleting "https://github.com/cert-manager/cert-manager/releases/download/v1.19.1/cert-manager.yaml": clusterrolebindings.rbac.authorization.k8s.io "cert-manager-webhook:subjectaccessreviews" not found Error from server (NotFound): error when deleting "https://github.com/cert-manager/cert-manager/releases/download/v1.19.1/cert-manager.yaml": roles.rbac.authorization.k8s.io "cert-manager-cainjector:leaderelection" not found Error from server (NotFound): error when deleting "https://github.com/cert-manager/cert-manager/releases/download/v1.19.1/cert-manager.yaml": roles.rbac.authorization.k8s.io "cert-manager:leaderelection" not found Error from server (NotFound): error when deleting "https://github.com/cert-manager/cert-manager/releases/download/v1.19.1/cert-manager.yaml": roles.rbac.authorization.k8s.io "cert-manager-tokenrequest" not found Error from server (NotFound): error when deleting "https://github.com/cert-manager/cert-manager/releases/download/v1.19.1/cert-manager.yaml": roles.rbac.authorization.k8s.io "cert-manager-webhook:dynamic-serving" not found Error from server (NotFound): error when deleting "https://github.com/cert-manager/cert-manager/releases/download/v1.19.1/cert-manager.yaml": rolebindings.rbac.authorization.k8s.io "cert-manager-cainjector:leaderelection" not found Error from server (NotFound): error when deleting "https://github.com/cert-manager/cert-manager/releases/download/v1.19.1/cert-manager.yaml": rolebindings.rbac.authorization.k8s.io "cert-manager:leaderelection" not found Error from server (NotFound): error when deleting "https://github.com/cert-manager/cert-manager/releases/download/v1.19.1/cert-manager.yaml": rolebindings.rbac.authorization.k8s.io "cert-manager-tokenrequest" not found Error from server (NotFound): error when deleting "https://github.com/cert-manager/cert-manager/releases/download/v1.19.1/cert-manager.yaml": rolebindings.rbac.authorization.k8s.io "cert-manager-webhook:dynamic-serving" not found Error from server (NotFound): error when deleting "https://github.com/cert-manager/cert-manager/releases/download/v1.19.1/cert-manager.yaml": services "cert-manager-cainjector" not found Error from server (NotFound): error when deleting "https://github.com/cert-manager/cert-manager/releases/download/v1.19.1/cert-manager.yaml": services "cert-manager" not found Error from server (NotFound): error when deleting "https://github.com/cert-manager/cert-manager/releases/download/v1.19.1/cert-manager.yaml": services "cert-manager-webhook" not found Error from server (NotFound): error when deleting "https://github.com/cert-manager/cert-manager/releases/download/v1.19.1/cert-manager.yaml": deployments.apps "cert-manager-cainjector" not found Error from server (NotFound): error when deleting "https://github.com/cert-manager/cert-manager/releases/download/v1.19.1/cert-manager.yaml": deployments.apps "cert-manager" not found Error from server (NotFound): error when deleting "https://github.com/cert-manager/cert-manager/releases/download/v1.19.1/cert-manager.yaml": deployments.apps "cert-manager-webhook" not found Error from server (NotFound): error when deleting "https://github.com/cert-manager/cert-manager/releases/download/v1.19.1/cert-manager.yaml": mutatingwebhookconfigurations.admissionregistration.k8s.io "cert-manager-webhook" not found Error from server (NotFound): error when deleting "https://github.com/cert-manager/cert-manager/releases/download/v1.19.1/cert-manager.yaml": validatingwebhookconfigurations.admissionregistration.k8s.io "cert-manager-webhook" not found + sleep 0 + for i in $(seq 0 2) + set +e + kubectl delete -f https://github.com/cert-manager/cert-manager/releases/download/v1.19.1/cert-manager.yaml + exit_status=1 + set -e + '[' 1 '!=' 0 -a -n 1 ']' + cat /tmp/tmp.QtNBmRdZbF + cat /tmp/tmp.runFgOsalo Error from server (NotFound): error when deleting "https://github.com/cert-manager/cert-manager/releases/download/v1.19.1/cert-manager.yaml": namespaces "cert-manager" not found Error from server (NotFound): error when deleting "https://github.com/cert-manager/cert-manager/releases/download/v1.19.1/cert-manager.yaml": customresourcedefinitions.apiextensions.k8s.io "challenges.acme.cert-manager.io" not found Error from server (NotFound): error when deleting "https://github.com/cert-manager/cert-manager/releases/download/v1.19.1/cert-manager.yaml": customresourcedefinitions.apiextensions.k8s.io "orders.acme.cert-manager.io" not found Error from server (NotFound): error when deleting "https://github.com/cert-manager/cert-manager/releases/download/v1.19.1/cert-manager.yaml": customresourcedefinitions.apiextensions.k8s.io "certificaterequests.cert-manager.io" not found Error from server (NotFound): error when deleting "https://github.com/cert-manager/cert-manager/releases/download/v1.19.1/cert-manager.yaml": customresourcedefinitions.apiextensions.k8s.io "certificates.cert-manager.io" not found Error from server (NotFound): error when deleting "https://github.com/cert-manager/cert-manager/releases/download/v1.19.1/cert-manager.yaml": customresourcedefinitions.apiextensions.k8s.io "clusterissuers.cert-manager.io" not found Error from server (NotFound): error when deleting "https://github.com/cert-manager/cert-manager/releases/download/v1.19.1/cert-manager.yaml": customresourcedefinitions.apiextensions.k8s.io "issuers.cert-manager.io" not found Error from server (NotFound): error when deleting "https://github.com/cert-manager/cert-manager/releases/download/v1.19.1/cert-manager.yaml": serviceaccounts "cert-manager-cainjector" not found Error from server (NotFound): error when deleting "https://github.com/cert-manager/cert-manager/releases/download/v1.19.1/cert-manager.yaml": serviceaccounts "cert-manager" not found Error from server (NotFound): error when deleting "https://github.com/cert-manager/cert-manager/releases/download/v1.19.1/cert-manager.yaml": serviceaccounts "cert-manager-webhook" not found Error from server (NotFound): error when deleting "https://github.com/cert-manager/cert-manager/releases/download/v1.19.1/cert-manager.yaml": clusterroles.rbac.authorization.k8s.io "cert-manager-cainjector" not found Error from server (NotFound): error when deleting "https://github.com/cert-manager/cert-manager/releases/download/v1.19.1/cert-manager.yaml": clusterroles.rbac.authorization.k8s.io "cert-manager-controller-issuers" not found Error from server (NotFound): error when deleting "https://github.com/cert-manager/cert-manager/releases/download/v1.19.1/cert-manager.yaml": clusterroles.rbac.authorization.k8s.io "cert-manager-controller-clusterissuers" not found Error from server (NotFound): error when deleting "https://github.com/cert-manager/cert-manager/releases/download/v1.19.1/cert-manager.yaml": clusterroles.rbac.authorization.k8s.io "cert-manager-controller-certificates" not found Error from server (NotFound): error when deleting "https://github.com/cert-manager/cert-manager/releases/download/v1.19.1/cert-manager.yaml": clusterroles.rbac.authorization.k8s.io "cert-manager-controller-orders" not found Error from server (NotFound): error when deleting "https://github.com/cert-manager/cert-manager/releases/download/v1.19.1/cert-manager.yaml": clusterroles.rbac.authorization.k8s.io "cert-manager-controller-challenges" not found Error from server (NotFound): error when deleting "https://github.com/cert-manager/cert-manager/releases/download/v1.19.1/cert-manager.yaml": clusterroles.rbac.authorization.k8s.io "cert-manager-controller-ingress-shim" not found Error from server (NotFound): error when deleting "https://github.com/cert-manager/cert-manager/releases/download/v1.19.1/cert-manager.yaml": clusterroles.rbac.authorization.k8s.io "cert-manager-cluster-view" not found Error from server (NotFound): error when deleting "https://github.com/cert-manager/cert-manager/releases/download/v1.19.1/cert-manager.yaml": clusterroles.rbac.authorization.k8s.io "cert-manager-view" not found Error from server (NotFound): error when deleting "https://github.com/cert-manager/cert-manager/releases/download/v1.19.1/cert-manager.yaml": clusterroles.rbac.authorization.k8s.io "cert-manager-edit" not found Error from server (NotFound): error when deleting "https://github.com/cert-manager/cert-manager/releases/download/v1.19.1/cert-manager.yaml": clusterroles.rbac.authorization.k8s.io "cert-manager-controller-approve:cert-manager-io" not found Error from server (NotFound): error when deleting "https://github.com/cert-manager/cert-manager/releases/download/v1.19.1/cert-manager.yaml": clusterroles.rbac.authorization.k8s.io "cert-manager-controller-certificatesigningrequests" not found Error from server (NotFound): error when deleting "https://github.com/cert-manager/cert-manager/releases/download/v1.19.1/cert-manager.yaml": clusterroles.rbac.authorization.k8s.io "cert-manager-webhook:subjectaccessreviews" not found Error from server (NotFound): error when deleting "https://github.com/cert-manager/cert-manager/releases/download/v1.19.1/cert-manager.yaml": clusterrolebindings.rbac.authorization.k8s.io "cert-manager-cainjector" not found Error from server (NotFound): error when deleting "https://github.com/cert-manager/cert-manager/releases/download/v1.19.1/cert-manager.yaml": clusterrolebindings.rbac.authorization.k8s.io "cert-manager-controller-issuers" not found Error from server (NotFound): error when deleting "https://github.com/cert-manager/cert-manager/releases/download/v1.19.1/cert-manager.yaml": clusterrolebindings.rbac.authorization.k8s.io "cert-manager-controller-clusterissuers" not found Error from server (NotFound): error when deleting "https://github.com/cert-manager/cert-manager/releases/download/v1.19.1/cert-manager.yaml": clusterrolebindings.rbac.authorization.k8s.io "cert-manager-controller-certificates" not found Error from server (NotFound): error when deleting "https://github.com/cert-manager/cert-manager/releases/download/v1.19.1/cert-manager.yaml": clusterrolebindings.rbac.authorization.k8s.io "cert-manager-controller-orders" not found Error from server (NotFound): error when deleting "https://github.com/cert-manager/cert-manager/releases/download/v1.19.1/cert-manager.yaml": clusterrolebindings.rbac.authorization.k8s.io "cert-manager-controller-challenges" not found Error from server (NotFound): error when deleting "https://github.com/cert-manager/cert-manager/releases/download/v1.19.1/cert-manager.yaml": clusterrolebindings.rbac.authorization.k8s.io "cert-manager-controller-ingress-shim" not found Error from server (NotFound): error when deleting "https://github.com/cert-manager/cert-manager/releases/download/v1.19.1/cert-manager.yaml": clusterrolebindings.rbac.authorization.k8s.io "cert-manager-controller-approve:cert-manager-io" not found Error from server (NotFound): error when deleting "https://github.com/cert-manager/cert-manager/releases/download/v1.19.1/cert-manager.yaml": clusterrolebindings.rbac.authorization.k8s.io "cert-manager-controller-certificatesigningrequests" not found Error from server (NotFound): error when deleting "https://github.com/cert-manager/cert-manager/releases/download/v1.19.1/cert-manager.yaml": clusterrolebindings.rbac.authorization.k8s.io "cert-manager-webhook:subjectaccessreviews" not found Error from server (NotFound): error when deleting "https://github.com/cert-manager/cert-manager/releases/download/v1.19.1/cert-manager.yaml": roles.rbac.authorization.k8s.io "cert-manager-cainjector:leaderelection" not found Error from server (NotFound): error when deleting "https://github.com/cert-manager/cert-manager/releases/download/v1.19.1/cert-manager.yaml": roles.rbac.authorization.k8s.io "cert-manager:leaderelection" not found Error from server (NotFound): error when deleting "https://github.com/cert-manager/cert-manager/releases/download/v1.19.1/cert-manager.yaml": roles.rbac.authorization.k8s.io "cert-manager-tokenrequest" not found Error from server (NotFound): error when deleting "https://github.com/cert-manager/cert-manager/releases/download/v1.19.1/cert-manager.yaml": roles.rbac.authorization.k8s.io "cert-manager-webhook:dynamic-serving" not found Error from server (NotFound): error when deleting "https://github.com/cert-manager/cert-manager/releases/download/v1.19.1/cert-manager.yaml": rolebindings.rbac.authorization.k8s.io "cert-manager-cainjector:leaderelection" not found Error from server (NotFound): error when deleting "https://github.com/cert-manager/cert-manager/releases/download/v1.19.1/cert-manager.yaml": rolebindings.rbac.authorization.k8s.io "cert-manager:leaderelection" not found Error from server (NotFound): error when deleting "https://github.com/cert-manager/cert-manager/releases/download/v1.19.1/cert-manager.yaml": rolebindings.rbac.authorization.k8s.io "cert-manager-tokenrequest" not found Error from server (NotFound): error when deleting "https://github.com/cert-manager/cert-manager/releases/download/v1.19.1/cert-manager.yaml": rolebindings.rbac.authorization.k8s.io "cert-manager-webhook:dynamic-serving" not found Error from server (NotFound): error when deleting "https://github.com/cert-manager/cert-manager/releases/download/v1.19.1/cert-manager.yaml": services "cert-manager-cainjector" not found Error from server (NotFound): error when deleting "https://github.com/cert-manager/cert-manager/releases/download/v1.19.1/cert-manager.yaml": services "cert-manager" not found Error from server (NotFound): error when deleting "https://github.com/cert-manager/cert-manager/releases/download/v1.19.1/cert-manager.yaml": services "cert-manager-webhook" not found Error from server (NotFound): error when deleting "https://github.com/cert-manager/cert-manager/releases/download/v1.19.1/cert-manager.yaml": deployments.apps "cert-manager-cainjector" not found Error from server (NotFound): error when deleting "https://github.com/cert-manager/cert-manager/releases/download/v1.19.1/cert-manager.yaml": deployments.apps "cert-manager" not found Error from server (NotFound): error when deleting "https://github.com/cert-manager/cert-manager/releases/download/v1.19.1/cert-manager.yaml": deployments.apps "cert-manager-webhook" not found Error from server (NotFound): error when deleting "https://github.com/cert-manager/cert-manager/releases/download/v1.19.1/cert-manager.yaml": mutatingwebhookconfigurations.admissionregistration.k8s.io "cert-manager-webhook" not found Error from server (NotFound): error when deleting "https://github.com/cert-manager/cert-manager/releases/download/v1.19.1/cert-manager.yaml": validatingwebhookconfigurations.admissionregistration.k8s.io "cert-manager-webhook" not found + sleep 4 + for i in $(seq 0 2) + set +e + kubectl delete -f https://github.com/cert-manager/cert-manager/releases/download/v1.19.1/cert-manager.yaml + exit_status=1 + set -e + '[' 1 '!=' 0 -a -n 1 ']' + cat /tmp/tmp.QtNBmRdZbF + cat /tmp/tmp.runFgOsalo Error from server (NotFound): error when deleting "https://github.com/cert-manager/cert-manager/releases/download/v1.19.1/cert-manager.yaml": namespaces "cert-manager" not found Error from server (NotFound): error when deleting "https://github.com/cert-manager/cert-manager/releases/download/v1.19.1/cert-manager.yaml": customresourcedefinitions.apiextensions.k8s.io "challenges.acme.cert-manager.io" not found Error from server (NotFound): error when deleting "https://github.com/cert-manager/cert-manager/releases/download/v1.19.1/cert-manager.yaml": customresourcedefinitions.apiextensions.k8s.io "orders.acme.cert-manager.io" not found Error from server (NotFound): error when deleting "https://github.com/cert-manager/cert-manager/releases/download/v1.19.1/cert-manager.yaml": customresourcedefinitions.apiextensions.k8s.io "certificaterequests.cert-manager.io" not found Error from server (NotFound): error when deleting "https://github.com/cert-manager/cert-manager/releases/download/v1.19.1/cert-manager.yaml": customresourcedefinitions.apiextensions.k8s.io "certificates.cert-manager.io" not found Error from server (NotFound): error when deleting "https://github.com/cert-manager/cert-manager/releases/download/v1.19.1/cert-manager.yaml": customresourcedefinitions.apiextensions.k8s.io "clusterissuers.cert-manager.io" not found Error from server (NotFound): error when deleting "https://github.com/cert-manager/cert-manager/releases/download/v1.19.1/cert-manager.yaml": customresourcedefinitions.apiextensions.k8s.io "issuers.cert-manager.io" not found Error from server (NotFound): error when deleting "https://github.com/cert-manager/cert-manager/releases/download/v1.19.1/cert-manager.yaml": serviceaccounts "cert-manager-cainjector" not found Error from server (NotFound): error when deleting "https://github.com/cert-manager/cert-manager/releases/download/v1.19.1/cert-manager.yaml": serviceaccounts "cert-manager" not found Error from server (NotFound): error when deleting "https://github.com/cert-manager/cert-manager/releases/download/v1.19.1/cert-manager.yaml": serviceaccounts "cert-manager-webhook" not found Error from server (NotFound): error when deleting "https://github.com/cert-manager/cert-manager/releases/download/v1.19.1/cert-manager.yaml": clusterroles.rbac.authorization.k8s.io "cert-manager-cainjector" not found Error from server (NotFound): error when deleting "https://github.com/cert-manager/cert-manager/releases/download/v1.19.1/cert-manager.yaml": clusterroles.rbac.authorization.k8s.io "cert-manager-controller-issuers" not found Error from server (NotFound): error when deleting "https://github.com/cert-manager/cert-manager/releases/download/v1.19.1/cert-manager.yaml": clusterroles.rbac.authorization.k8s.io "cert-manager-controller-clusterissuers" not found Error from server (NotFound): error when deleting "https://github.com/cert-manager/cert-manager/releases/download/v1.19.1/cert-manager.yaml": clusterroles.rbac.authorization.k8s.io "cert-manager-controller-certificates" not found Error from server (NotFound): error when deleting "https://github.com/cert-manager/cert-manager/releases/download/v1.19.1/cert-manager.yaml": clusterroles.rbac.authorization.k8s.io "cert-manager-controller-orders" not found Error from server (NotFound): error when deleting "https://github.com/cert-manager/cert-manager/releases/download/v1.19.1/cert-manager.yaml": clusterroles.rbac.authorization.k8s.io "cert-manager-controller-challenges" not found Error from server (NotFound): error when deleting "https://github.com/cert-manager/cert-manager/releases/download/v1.19.1/cert-manager.yaml": clusterroles.rbac.authorization.k8s.io "cert-manager-controller-ingress-shim" not found Error from server (NotFound): error when deleting "https://github.com/cert-manager/cert-manager/releases/download/v1.19.1/cert-manager.yaml": clusterroles.rbac.authorization.k8s.io "cert-manager-cluster-view" not found Error from server (NotFound): error when deleting "https://github.com/cert-manager/cert-manager/releases/download/v1.19.1/cert-manager.yaml": clusterroles.rbac.authorization.k8s.io "cert-manager-view" not found Error from server (NotFound): error when deleting "https://github.com/cert-manager/cert-manager/releases/download/v1.19.1/cert-manager.yaml": clusterroles.rbac.authorization.k8s.io "cert-manager-edit" not found Error from server (NotFound): error when deleting "https://github.com/cert-manager/cert-manager/releases/download/v1.19.1/cert-manager.yaml": clusterroles.rbac.authorization.k8s.io "cert-manager-controller-approve:cert-manager-io" not found Error from server (NotFound): error when deleting "https://github.com/cert-manager/cert-manager/releases/download/v1.19.1/cert-manager.yaml": clusterroles.rbac.authorization.k8s.io "cert-manager-controller-certificatesigningrequests" not found Error from server (NotFound): error when deleting "https://github.com/cert-manager/cert-manager/releases/download/v1.19.1/cert-manager.yaml": clusterroles.rbac.authorization.k8s.io "cert-manager-webhook:subjectaccessreviews" not found Error from server (NotFound): error when deleting "https://github.com/cert-manager/cert-manager/releases/download/v1.19.1/cert-manager.yaml": clusterrolebindings.rbac.authorization.k8s.io "cert-manager-cainjector" not found Error from server (NotFound): error when deleting "https://github.com/cert-manager/cert-manager/releases/download/v1.19.1/cert-manager.yaml": clusterrolebindings.rbac.authorization.k8s.io "cert-manager-controller-issuers" not found Error from server (NotFound): error when deleting "https://github.com/cert-manager/cert-manager/releases/download/v1.19.1/cert-manager.yaml": clusterrolebindings.rbac.authorization.k8s.io "cert-manager-controller-clusterissuers" not found Error from server (NotFound): error when deleting "https://github.com/cert-manager/cert-manager/releases/download/v1.19.1/cert-manager.yaml": clusterrolebindings.rbac.authorization.k8s.io "cert-manager-controller-certificates" not found Error from server (NotFound): error when deleting "https://github.com/cert-manager/cert-manager/releases/download/v1.19.1/cert-manager.yaml": clusterrolebindings.rbac.authorization.k8s.io "cert-manager-controller-orders" not found Error from server (NotFound): error when deleting "https://github.com/cert-manager/cert-manager/releases/download/v1.19.1/cert-manager.yaml": clusterrolebindings.rbac.authorization.k8s.io "cert-manager-controller-challenges" not found Error from server (NotFound): error when deleting "https://github.com/cert-manager/cert-manager/releases/download/v1.19.1/cert-manager.yaml": clusterrolebindings.rbac.authorization.k8s.io "cert-manager-controller-ingress-shim" not found Error from server (NotFound): error when deleting "https://github.com/cert-manager/cert-manager/releases/download/v1.19.1/cert-manager.yaml": clusterrolebindings.rbac.authorization.k8s.io "cert-manager-controller-approve:cert-manager-io" not found Error from server (NotFound): error when deleting "https://github.com/cert-manager/cert-manager/releases/download/v1.19.1/cert-manager.yaml": clusterrolebindings.rbac.authorization.k8s.io "cert-manager-controller-certificatesigningrequests" not found Error from server (NotFound): error when deleting "https://github.com/cert-manager/cert-manager/releases/download/v1.19.1/cert-manager.yaml": clusterrolebindings.rbac.authorization.k8s.io "cert-manager-webhook:subjectaccessreviews" not found Error from server (NotFound): error when deleting "https://github.com/cert-manager/cert-manager/releases/download/v1.19.1/cert-manager.yaml": roles.rbac.authorization.k8s.io "cert-manager-cainjector:leaderelection" not found Error from server (NotFound): error when deleting "https://github.com/cert-manager/cert-manager/releases/download/v1.19.1/cert-manager.yaml": roles.rbac.authorization.k8s.io "cert-manager:leaderelection" not found Error from server (NotFound): error when deleting "https://github.com/cert-manager/cert-manager/releases/download/v1.19.1/cert-manager.yaml": roles.rbac.authorization.k8s.io "cert-manager-tokenrequest" not found Error from server (NotFound): error when deleting "https://github.com/cert-manager/cert-manager/releases/download/v1.19.1/cert-manager.yaml": roles.rbac.authorization.k8s.io "cert-manager-webhook:dynamic-serving" not found Error from server (NotFound): error when deleting "https://github.com/cert-manager/cert-manager/releases/download/v1.19.1/cert-manager.yaml": rolebindings.rbac.authorization.k8s.io "cert-manager-cainjector:leaderelection" not found Error from server (NotFound): error when deleting "https://github.com/cert-manager/cert-manager/releases/download/v1.19.1/cert-manager.yaml": rolebindings.rbac.authorization.k8s.io "cert-manager:leaderelection" not found Error from server (NotFound): error when deleting "https://github.com/cert-manager/cert-manager/releases/download/v1.19.1/cert-manager.yaml": rolebindings.rbac.authorization.k8s.io "cert-manager-tokenrequest" not found Error from server (NotFound): error when deleting "https://github.com/cert-manager/cert-manager/releases/download/v1.19.1/cert-manager.yaml": rolebindings.rbac.authorization.k8s.io "cert-manager-webhook:dynamic-serving" not found Error from server (NotFound): error when deleting "https://github.com/cert-manager/cert-manager/releases/download/v1.19.1/cert-manager.yaml": services "cert-manager-cainjector" not found Error from server (NotFound): error when deleting "https://github.com/cert-manager/cert-manager/releases/download/v1.19.1/cert-manager.yaml": services "cert-manager" not found Error from server (NotFound): error when deleting "https://github.com/cert-manager/cert-manager/releases/download/v1.19.1/cert-manager.yaml": services "cert-manager-webhook" not found Error from server (NotFound): error when deleting "https://github.com/cert-manager/cert-manager/releases/download/v1.19.1/cert-manager.yaml": deployments.apps "cert-manager-cainjector" not found Error from server (NotFound): error when deleting "https://github.com/cert-manager/cert-manager/releases/download/v1.19.1/cert-manager.yaml": deployments.apps "cert-manager" not found Error from server (NotFound): error when deleting "https://github.com/cert-manager/cert-manager/releases/download/v1.19.1/cert-manager.yaml": deployments.apps "cert-manager-webhook" not found Error from server (NotFound): error when deleting "https://github.com/cert-manager/cert-manager/releases/download/v1.19.1/cert-manager.yaml": mutatingwebhookconfigurations.admissionregistration.k8s.io "cert-manager-webhook" not found Error from server (NotFound): error when deleting "https://github.com/cert-manager/cert-manager/releases/download/v1.19.1/cert-manager.yaml": validatingwebhookconfigurations.admissionregistration.k8s.io "cert-manager-webhook" not found + sleep 8 + cat /tmp/tmp.QtNBmRdZbF + cat /tmp/tmp.runFgOsalo Error from server (NotFound): error when deleting "https://github.com/cert-manager/cert-manager/releases/download/v1.19.1/cert-manager.yaml": namespaces "cert-manager" not found Error from server (NotFound): error when deleting "https://github.com/cert-manager/cert-manager/releases/download/v1.19.1/cert-manager.yaml": customresourcedefinitions.apiextensions.k8s.io "challenges.acme.cert-manager.io" not found Error from server (NotFound): error when deleting "https://github.com/cert-manager/cert-manager/releases/download/v1.19.1/cert-manager.yaml": customresourcedefinitions.apiextensions.k8s.io "orders.acme.cert-manager.io" not found Error from server (NotFound): error when deleting "https://github.com/cert-manager/cert-manager/releases/download/v1.19.1/cert-manager.yaml": customresourcedefinitions.apiextensions.k8s.io "certificaterequests.cert-manager.io" not found Error from server (NotFound): error when deleting "https://github.com/cert-manager/cert-manager/releases/download/v1.19.1/cert-manager.yaml": customresourcedefinitions.apiextensions.k8s.io "certificates.cert-manager.io" not found Error from server (NotFound): error when deleting "https://github.com/cert-manager/cert-manager/releases/download/v1.19.1/cert-manager.yaml": customresourcedefinitions.apiextensions.k8s.io "clusterissuers.cert-manager.io" not found Error from server (NotFound): error when deleting "https://github.com/cert-manager/cert-manager/releases/download/v1.19.1/cert-manager.yaml": customresourcedefinitions.apiextensions.k8s.io "issuers.cert-manager.io" not found Error from server (NotFound): error when deleting "https://github.com/cert-manager/cert-manager/releases/download/v1.19.1/cert-manager.yaml": serviceaccounts "cert-manager-cainjector" not found Error from server (NotFound): error when deleting "https://github.com/cert-manager/cert-manager/releases/download/v1.19.1/cert-manager.yaml": serviceaccounts "cert-manager" not found Error from server (NotFound): error when deleting "https://github.com/cert-manager/cert-manager/releases/download/v1.19.1/cert-manager.yaml": serviceaccounts "cert-manager-webhook" not found Error from server (NotFound): error when deleting "https://github.com/cert-manager/cert-manager/releases/download/v1.19.1/cert-manager.yaml": clusterroles.rbac.authorization.k8s.io "cert-manager-cainjector" not found Error from server (NotFound): error when deleting "https://github.com/cert-manager/cert-manager/releases/download/v1.19.1/cert-manager.yaml": clusterroles.rbac.authorization.k8s.io "cert-manager-controller-issuers" not found Error from server (NotFound): error when deleting "https://github.com/cert-manager/cert-manager/releases/download/v1.19.1/cert-manager.yaml": clusterroles.rbac.authorization.k8s.io "cert-manager-controller-clusterissuers" not found Error from server (NotFound): error when deleting "https://github.com/cert-manager/cert-manager/releases/download/v1.19.1/cert-manager.yaml": clusterroles.rbac.authorization.k8s.io "cert-manager-controller-certificates" not found Error from server (NotFound): error when deleting "https://github.com/cert-manager/cert-manager/releases/download/v1.19.1/cert-manager.yaml": clusterroles.rbac.authorization.k8s.io "cert-manager-controller-orders" not found Error from server (NotFound): error when deleting "https://github.com/cert-manager/cert-manager/releases/download/v1.19.1/cert-manager.yaml": clusterroles.rbac.authorization.k8s.io "cert-manager-controller-challenges" not found Error from server (NotFound): error when deleting "https://github.com/cert-manager/cert-manager/releases/download/v1.19.1/cert-manager.yaml": clusterroles.rbac.authorization.k8s.io "cert-manager-controller-ingress-shim" not found Error from server (NotFound): error when deleting "https://github.com/cert-manager/cert-manager/releases/download/v1.19.1/cert-manager.yaml": clusterroles.rbac.authorization.k8s.io "cert-manager-cluster-view" not found Error from server (NotFound): error when deleting "https://github.com/cert-manager/cert-manager/releases/download/v1.19.1/cert-manager.yaml": clusterroles.rbac.authorization.k8s.io "cert-manager-view" not found Error from server (NotFound): error when deleting "https://github.com/cert-manager/cert-manager/releases/download/v1.19.1/cert-manager.yaml": clusterroles.rbac.authorization.k8s.io "cert-manager-edit" not found Error from server (NotFound): error when deleting "https://github.com/cert-manager/cert-manager/releases/download/v1.19.1/cert-manager.yaml": clusterroles.rbac.authorization.k8s.io "cert-manager-controller-approve:cert-manager-io" not found Error from server (NotFound): error when deleting "https://github.com/cert-manager/cert-manager/releases/download/v1.19.1/cert-manager.yaml": clusterroles.rbac.authorization.k8s.io "cert-manager-controller-certificatesigningrequests" not found Error from server (NotFound): error when deleting "https://github.com/cert-manager/cert-manager/releases/download/v1.19.1/cert-manager.yaml": clusterroles.rbac.authorization.k8s.io "cert-manager-webhook:subjectaccessreviews" not found Error from server (NotFound): error when deleting "https://github.com/cert-manager/cert-manager/releases/download/v1.19.1/cert-manager.yaml": clusterrolebindings.rbac.authorization.k8s.io "cert-manager-cainjector" not found Error from server (NotFound): error when deleting "https://github.com/cert-manager/cert-manager/releases/download/v1.19.1/cert-manager.yaml": clusterrolebindings.rbac.authorization.k8s.io "cert-manager-controller-issuers" not found Error from server (NotFound): error when deleting "https://github.com/cert-manager/cert-manager/releases/download/v1.19.1/cert-manager.yaml": clusterrolebindings.rbac.authorization.k8s.io "cert-manager-controller-clusterissuers" not found Error from server (NotFound): error when deleting "https://github.com/cert-manager/cert-manager/releases/download/v1.19.1/cert-manager.yaml": clusterrolebindings.rbac.authorization.k8s.io "cert-manager-controller-certificates" not found Error from server (NotFound): error when deleting "https://github.com/cert-manager/cert-manager/releases/download/v1.19.1/cert-manager.yaml": clusterrolebindings.rbac.authorization.k8s.io "cert-manager-controller-orders" not found Error from server (NotFound): error when deleting "https://github.com/cert-manager/cert-manager/releases/download/v1.19.1/cert-manager.yaml": clusterrolebindings.rbac.authorization.k8s.io "cert-manager-controller-challenges" not found Error from server (NotFound): error when deleting "https://github.com/cert-manager/cert-manager/releases/download/v1.19.1/cert-manager.yaml": clusterrolebindings.rbac.authorization.k8s.io "cert-manager-controller-ingress-shim" not found Error from server (NotFound): error when deleting "https://github.com/cert-manager/cert-manager/releases/download/v1.19.1/cert-manager.yaml": clusterrolebindings.rbac.authorization.k8s.io "cert-manager-controller-approve:cert-manager-io" not found Error from server (NotFound): error when deleting "https://github.com/cert-manager/cert-manager/releases/download/v1.19.1/cert-manager.yaml": clusterrolebindings.rbac.authorization.k8s.io "cert-manager-controller-certificatesigningrequests" not found Error from server (NotFound): error when deleting "https://github.com/cert-manager/cert-manager/releases/download/v1.19.1/cert-manager.yaml": clusterrolebindings.rbac.authorization.k8s.io "cert-manager-webhook:subjectaccessreviews" not found Error from server (NotFound): error when deleting "https://github.com/cert-manager/cert-manager/releases/download/v1.19.1/cert-manager.yaml": roles.rbac.authorization.k8s.io "cert-manager-cainjector:leaderelection" not found Error from server (NotFound): error when deleting "https://github.com/cert-manager/cert-manager/releases/download/v1.19.1/cert-manager.yaml": roles.rbac.authorization.k8s.io "cert-manager:leaderelection" not found Error from server (NotFound): error when deleting "https://github.com/cert-manager/cert-manager/releases/download/v1.19.1/cert-manager.yaml": roles.rbac.authorization.k8s.io "cert-manager-tokenrequest" not found Error from server (NotFound): error when deleting "https://github.com/cert-manager/cert-manager/releases/download/v1.19.1/cert-manager.yaml": roles.rbac.authorization.k8s.io "cert-manager-webhook:dynamic-serving" not found Error from server (NotFound): error when deleting "https://github.com/cert-manager/cert-manager/releases/download/v1.19.1/cert-manager.yaml": rolebindings.rbac.authorization.k8s.io "cert-manager-cainjector:leaderelection" not found Error from server (NotFound): error when deleting "https://github.com/cert-manager/cert-manager/releases/download/v1.19.1/cert-manager.yaml": rolebindings.rbac.authorization.k8s.io "cert-manager:leaderelection" not found Error from server (NotFound): error when deleting "https://github.com/cert-manager/cert-manager/releases/download/v1.19.1/cert-manager.yaml": rolebindings.rbac.authorization.k8s.io "cert-manager-tokenrequest" not found Error from server (NotFound): error when deleting "https://github.com/cert-manager/cert-manager/releases/download/v1.19.1/cert-manager.yaml": rolebindings.rbac.authorization.k8s.io "cert-manager-webhook:dynamic-serving" not found Error from server (NotFound): error when deleting "https://github.com/cert-manager/cert-manager/releases/download/v1.19.1/cert-manager.yaml": services "cert-manager-cainjector" not found Error from server (NotFound): error when deleting "https://github.com/cert-manager/cert-manager/releases/download/v1.19.1/cert-manager.yaml": services "cert-manager" not found Error from server (NotFound): error when deleting "https://github.com/cert-manager/cert-manager/releases/download/v1.19.1/cert-manager.yaml": services "cert-manager-webhook" not found Error from server (NotFound): error when deleting "https://github.com/cert-manager/cert-manager/releases/download/v1.19.1/cert-manager.yaml": deployments.apps "cert-manager-cainjector" not found Error from server (NotFound): error when deleting "https://github.com/cert-manager/cert-manager/releases/download/v1.19.1/cert-manager.yaml": deployments.apps "cert-manager" not found Error from server (NotFound): error when deleting "https://github.com/cert-manager/cert-manager/releases/download/v1.19.1/cert-manager.yaml": deployments.apps "cert-manager-webhook" not found Error from server (NotFound): error when deleting "https://github.com/cert-manager/cert-manager/releases/download/v1.19.1/cert-manager.yaml": mutatingwebhookconfigurations.admissionregistration.k8s.io "cert-manager-webhook" not found Error from server (NotFound): error when deleting "https://github.com/cert-manager/cert-manager/releases/download/v1.19.1/cert-manager.yaml": validatingwebhookconfigurations.admissionregistration.k8s.io "cert-manager-webhook" not found + rm /tmp/tmp.QtNBmRdZbF /tmp/tmp.runFgOsalo + return 1 + true + '[' -n '' ']' + '[' -n psmdb-operator ']' + kubectl_bin delete --grace-period=0 --force=true namespace demand-backup-if-unhealthy-227 + rm -rf /tmp/tmp.f9KmrCeeh7 + kubectl_bin delete --grace-period=0 --force=true namespace psmdb-operator ++ mktemp ++ mktemp + desc 'test passed' + set +o xtrace ----------------------------------------------------------------------------------- test passed ----------------------------------------------------------------------------------- + local LAST_OUT=/tmp/tmp.XcaQ2UBtPt + local LAST_OUT=/tmp/tmp.vPZBlQjYAI ++ mktemp ++ mktemp + local LAST_ERR=/tmp/tmp.cFyBx5Dat9 + local exit_status=0 + local timeout=4 ++ seq 0 2 + local LAST_ERR=/tmp/tmp.8312Acc3WF + local exit_status=0 + local timeout=4 ++ seq 0 2 + for i in $(seq 0 2) + set +e + kubectl delete --grace-period=0 --force=true namespace psmdb-operator + for i in $(seq 0 2) + set +e + kubectl delete --grace-period=0 --force=true namespace demand-backup-if-unhealthy-227