Log: /mnt/jenkins/workspace/cloud-psmdb-operator_PR-1567/e2e-tests/logs/tls-issue-cert-manager.log WARNING: version difference between client (1.30) and server (1.26) exceeds the supported minor version skew of +/-1 WARNING: version difference between client (1.30) and server (1.26) exceeds the supported minor version skew of +/-1 WARNING: version difference between client (1.30) and server (1.26) exceeds the supported minor version skew of +/-1 + main + create_infra tls-issue-cert-manager-30987 + local ns=tls-issue-cert-manager-30987 + delete_crd + desc 'get and delete old CRDs and RBAC' + set +o xtrace ----------------------------------------------------------------------------------- get and delete old CRDs and RBAC ----------------------------------------------------------------------------------- + kubectl_bin delete -f /mnt/jenkins/workspace/cloud-psmdb-operator_PR-1567/deploy/crd.yaml --ignore-not-found --wait=false ++ mktemp + local LAST_OUT=/tmp/tmp.ThwWz6FlDf ++ mktemp + local LAST_ERR=/tmp/tmp.vzbZhNhEwU + local exit_status=0 + local timeout=4 ++ seq 0 2 + for i in '$(seq 0 2)' + set +e + kubectl delete -f /mnt/jenkins/workspace/cloud-psmdb-operator_PR-1567/deploy/crd.yaml --ignore-not-found --wait=false + exit_status=0 + set -e + '[' 0 '!=' 0 -a -n 1 ']' + break + cat /tmp/tmp.ThwWz6FlDf customresourcedefinition.apiextensions.k8s.io "perconaservermongodbbackups.psmdb.percona.com" deleted customresourcedefinition.apiextensions.k8s.io "perconaservermongodbrestores.psmdb.percona.com" deleted customresourcedefinition.apiextensions.k8s.io "perconaservermongodbs.psmdb.percona.com" deleted + cat /tmp/tmp.vzbZhNhEwU + rm /tmp/tmp.ThwWz6FlDf /tmp/tmp.vzbZhNhEwU + return 0 ++ yq eval .metadata.name /mnt/jenkins/workspace/cloud-psmdb-operator_PR-1567/deploy/crd.yaml ++ grep -v '\-\-\-' + for crd_name in '$(yq eval '\''.metadata.name'\'' "${src_dir}/deploy/crd.yaml" | grep -v '\''\-\-\-'\'')' + kubectl get perconaservermongodbbackups.psmdb.percona.com --all-namespaces -o wide + grep -v NAMESPACE + xargs -L 1 sh -xc 'kubectl patch perconaservermongodbbackups.psmdb.percona.com -n $0 $1 --type=merge -p "{\"metadata\":{\"finalizers\":[]}}"' error: the server doesn't have a resource type "perconaservermongodbbackups" + kubectl patch perconaservermongodbbackups.psmdb.percona.com -n sh --type=merge -p '{"metadata":{"finalizers":[]}}' error: the server doesn't have a resource type "perconaservermongodbbackups" + : + kubectl_bin wait --for=delete crd perconaservermongodbbackups.psmdb.percona.com ++ mktemp + local LAST_OUT=/tmp/tmp.mNydPPmV1e ++ mktemp + local LAST_ERR=/tmp/tmp.Or6ljTZf7z + local exit_status=0 + local timeout=4 ++ seq 0 2 + for i in '$(seq 0 2)' + set +e + kubectl wait --for=delete crd perconaservermongodbbackups.psmdb.percona.com + exit_status=0 + set -e + '[' 0 '!=' 0 -a -n 1 ']' + break + cat /tmp/tmp.mNydPPmV1e + cat /tmp/tmp.Or6ljTZf7z + rm /tmp/tmp.mNydPPmV1e /tmp/tmp.Or6ljTZf7z + return 0 + for crd_name in '$(yq eval '\''.metadata.name'\'' "${src_dir}/deploy/crd.yaml" | grep -v '\''\-\-\-'\'')' + kubectl get perconaservermongodbrestores.psmdb.percona.com --all-namespaces -o wide + grep -v NAMESPACE + xargs -L 1 sh -xc 'kubectl patch perconaservermongodbrestores.psmdb.percona.com -n $0 $1 --type=merge -p "{\"metadata\":{\"finalizers\":[]}}"' error: the server doesn't have a resource type "perconaservermongodbrestores" + kubectl patch perconaservermongodbrestores.psmdb.percona.com -n sh --type=merge -p '{"metadata":{"finalizers":[]}}' error: the server doesn't have a resource type "perconaservermongodbrestores" + : + kubectl_bin wait --for=delete crd perconaservermongodbrestores.psmdb.percona.com ++ mktemp + local LAST_OUT=/tmp/tmp.tlzpQKN40P ++ mktemp + local LAST_ERR=/tmp/tmp.pDRb5iP8Jr + local exit_status=0 + local timeout=4 ++ seq 0 2 + for i in '$(seq 0 2)' + set +e + kubectl wait --for=delete crd perconaservermongodbrestores.psmdb.percona.com + exit_status=0 + set -e + '[' 0 '!=' 0 -a -n 1 ']' + break + cat /tmp/tmp.tlzpQKN40P + cat /tmp/tmp.pDRb5iP8Jr + rm /tmp/tmp.tlzpQKN40P /tmp/tmp.pDRb5iP8Jr + return 0 + for crd_name in '$(yq eval '\''.metadata.name'\'' "${src_dir}/deploy/crd.yaml" | grep -v '\''\-\-\-'\'')' + kubectl get perconaservermongodbs.psmdb.percona.com --all-namespaces -o wide + grep -v NAMESPACE + xargs -L 1 sh -xc 'kubectl patch perconaservermongodbs.psmdb.percona.com -n $0 $1 --type=merge -p "{\"metadata\":{\"finalizers\":[]}}"' error: the server doesn't have a resource type "perconaservermongodbs" + kubectl patch perconaservermongodbs.psmdb.percona.com -n sh --type=merge -p '{"metadata":{"finalizers":[]}}' error: the server doesn't have a resource type "perconaservermongodbs" + : + kubectl_bin wait --for=delete crd perconaservermongodbs.psmdb.percona.com ++ mktemp + local LAST_OUT=/tmp/tmp.Yv6L6c4SkR ++ mktemp + local LAST_ERR=/tmp/tmp.kYwXwCo0yV + local exit_status=0 + local timeout=4 ++ seq 0 2 + for i in '$(seq 0 2)' + set +e + kubectl wait --for=delete crd perconaservermongodbs.psmdb.percona.com + exit_status=0 + set -e + '[' 0 '!=' 0 -a -n 1 ']' + break + cat /tmp/tmp.Yv6L6c4SkR + cat /tmp/tmp.kYwXwCo0yV + rm /tmp/tmp.Yv6L6c4SkR /tmp/tmp.kYwXwCo0yV + return 0 + local rbac_yaml=rbac.yaml + '[' -n psmdb-operator ']' + rbac_yaml=cw-rbac.yaml + kubectl_bin delete -f /mnt/jenkins/workspace/cloud-psmdb-operator_PR-1567/deploy/cw-rbac.yaml --ignore-not-found ++ mktemp + local LAST_OUT=/tmp/tmp.GIY0TJXtTh ++ mktemp + local LAST_ERR=/tmp/tmp.h3szYlxPch + local exit_status=0 + local timeout=4 ++ seq 0 2 + for i in '$(seq 0 2)' + set +e + kubectl delete -f /mnt/jenkins/workspace/cloud-psmdb-operator_PR-1567/deploy/cw-rbac.yaml --ignore-not-found + exit_status=0 + set -e + '[' 0 '!=' 0 -a -n 1 ']' + break + cat /tmp/tmp.GIY0TJXtTh clusterrole.rbac.authorization.k8s.io "percona-server-mongodb-operator" deleted clusterrolebinding.rbac.authorization.k8s.io "service-account-percona-server-mongodb-operator" deleted + cat /tmp/tmp.h3szYlxPch + rm /tmp/tmp.GIY0TJXtTh /tmp/tmp.h3szYlxPch + return 0 + check_crd_for_deletion PR-1567-b27e0b5e + local git_tag=PR-1567-b27e0b5e ++ curl -s https://raw.githubusercontent.com/percona/percona-server-mongodb-operator/PR-1567-b27e0b5e/deploy/crd.yaml ++ yq eval .metadata.name ++ /usr/bin/sed s/---//g ++ /usr/bin/sed ':a;N;$!ba;s/\n/ /g' + for crd_name in '$(curl -s https://raw.githubusercontent.com/percona/percona-server-mongodb-operator/${git_tag}/deploy/crd.yaml | yq eval '\''.metadata.name'\'' | $sed '\''s/---//g'\'' | $sed '\'':a;N;$!ba;s/\n/ /g'\'')' ++ kubectl_bin get crd/null -o 'jsonpath={.status.conditions[-1].type}' +++ mktemp ++ local LAST_OUT=/tmp/tmp.HrPnCM3KqA +++ mktemp ++ local LAST_ERR=/tmp/tmp.oGPm4xjZFr ++ local exit_status=0 ++ local timeout=4 +++ seq 0 2 ++ for i in '$(seq 0 2)' ++ set +e ++ kubectl get crd/null -o 'jsonpath={.status.conditions[-1].type}' ++ exit_status=1 ++ set -e ++ '[' 1 '!=' 0 -a -n 1 ']' ++ cat /tmp/tmp.HrPnCM3KqA ++ cat /tmp/tmp.oGPm4xjZFr Error from server (NotFound): customresourcedefinitions.apiextensions.k8s.io "null" not found ++ sleep 0 ++ for i in '$(seq 0 2)' ++ set +e ++ kubectl get crd/null -o 'jsonpath={.status.conditions[-1].type}' ++ exit_status=1 ++ set -e ++ '[' 1 '!=' 0 -a -n 1 ']' ++ cat /tmp/tmp.HrPnCM3KqA ++ cat /tmp/tmp.oGPm4xjZFr Error from server (NotFound): customresourcedefinitions.apiextensions.k8s.io "null" not found ++ sleep 4 ++ for i in '$(seq 0 2)' ++ set +e ++ kubectl get crd/null -o 'jsonpath={.status.conditions[-1].type}' ++ exit_status=1 ++ set -e ++ '[' 1 '!=' 0 -a -n 1 ']' ++ cat /tmp/tmp.HrPnCM3KqA ++ cat /tmp/tmp.oGPm4xjZFr Error from server (NotFound): customresourcedefinitions.apiextensions.k8s.io "null" not found ++ sleep 8 ++ cat /tmp/tmp.HrPnCM3KqA ++ cat /tmp/tmp.oGPm4xjZFr Error from server (NotFound): customresourcedefinitions.apiextensions.k8s.io "null" not found ++ rm /tmp/tmp.HrPnCM3KqA /tmp/tmp.oGPm4xjZFr ++ return 1 + [[ '' == \T\e\r\m\i\n\a\t\i\n\g ]] + '[' -n psmdb-operator ']' + create_namespace psmdb-operator + local namespace=psmdb-operator + local skip_clean_namespace= + [[ 1 == 1 ]] + [[ -z '' ]] + destroy_chaos_mesh ++ helm list --all-namespaces --filter chaos-mesh ++ tail -n1 ++ awk '-F ' '{print $2}' ++ sed s/NAMESPACE// + local chaos_mesh_ns= + desc 'destroy chaos-mesh' + set +o xtrace ----------------------------------------------------------------------------------- destroy chaos-mesh ----------------------------------------------------------------------------------- + '[' -n '' ']' ++ kubectl get MutatingWebhookConfiguration ++ grep chaos-mesh ++ awk '{print $1}' + timeout 30 kubectl delete MutatingWebhookConfiguration error: resource(s) were provided, but no name was specified + : ++ kubectl get ValidatingWebhookConfiguration ++ grep chaos-mesh ++ awk '{print $1}' + timeout 30 kubectl delete ValidatingWebhookConfiguration error: resource(s) were provided, but no name was specified + : ++ kubectl get ValidatingWebhookConfiguration ++ grep validate-auth ++ awk '{print $1}' + timeout 30 kubectl delete ValidatingWebhookConfiguration error: resource(s) were provided, but no name was specified + : ++ kubectl api-resources ++ grep chaos-mesh ++ awk '{print $1}' ++ kubectl get crd ++ grep chaos-mesh.org ++ awk '{print $1}' + timeout 30 kubectl delete crd error: resource(s) were provided, but no name was specified + : ++ kubectl get clusterrolebinding ++ grep chaos-mesh ++ awk '{print $1}' + timeout 30 kubectl delete clusterrolebinding error: resource(s) were provided, but no name was specified + : ++ kubectl get clusterrole ++ grep chaos-mesh ++ awk '{print $1}' + timeout 30 kubectl delete clusterrole error: resource(s) were provided, but no name was specified + : + desc 'cleaned up all old namespaces' + set +o xtrace ----------------------------------------------------------------------------------- cleaned up all old namespaces ----------------------------------------------------------------------------------- + egrep -v '^kube-|^default|Terminating|psmdb-operator|openshift|gke-mcs|^NAME' + awk '{print$1}' + '[' -n '' ']' + desc 'cleaned up old namespaces psmdb-operator' + xargs kubectl delete ns + set +o xtrace ----------------------------------------------------------------------------------- cleaned up old namespaces psmdb-operator ----------------------------------------------------------------------------------- + kubectl_bin delete namespace psmdb-operator --ignore-not-found + kubectl_bin get ns ++ mktemp ++ mktemp + local LAST_OUT=/tmp/tmp.tH9wx37yjR + local LAST_OUT=/tmp/tmp.1BDUITgWZ7 ++ mktemp ++ mktemp + local LAST_ERR=/tmp/tmp.GTUmmqEPXh + local exit_status=0 + local timeout=4 + local LAST_ERR=/tmp/tmp.dYKA383qwX + local exit_status=0 + local timeout=4 ++ seq 0 2 ++ seq 0 2 + for i in '$(seq 0 2)' + set +e + kubectl get ns + for i in '$(seq 0 2)' + set +e + kubectl delete namespace psmdb-operator --ignore-not-found + exit_status=0 + set -e + '[' 0 '!=' 0 -a -n 1 ']' + break + cat /tmp/tmp.tH9wx37yjR + cat /tmp/tmp.GTUmmqEPXh + rm /tmp/tmp.tH9wx37yjR /tmp/tmp.GTUmmqEPXh + return 0 namespace "cert-manager" deleted namespace "tls-issue-cert-manager-5340" deleted + exit_status=0 + set -e + '[' 0 '!=' 0 -a -n 1 ']' + break + cat /tmp/tmp.1BDUITgWZ7 namespace "psmdb-operator" deleted + cat /tmp/tmp.dYKA383qwX + rm /tmp/tmp.1BDUITgWZ7 /tmp/tmp.dYKA383qwX + return 0 + kubectl_bin wait --for=delete namespace psmdb-operator ++ mktemp + local LAST_OUT=/tmp/tmp.vx46QwUgXM ++ mktemp + local LAST_ERR=/tmp/tmp.YF3V0pmxmv + local exit_status=0 + local timeout=4 ++ seq 0 2 + for i in '$(seq 0 2)' + set +e + kubectl wait --for=delete namespace psmdb-operator + exit_status=0 + set -e + '[' 0 '!=' 0 -a -n 1 ']' + break + cat /tmp/tmp.vx46QwUgXM + cat /tmp/tmp.YF3V0pmxmv + rm /tmp/tmp.vx46QwUgXM /tmp/tmp.YF3V0pmxmv + return 0 + desc 'create namespace psmdb-operator' + set +o xtrace ----------------------------------------------------------------------------------- create namespace psmdb-operator ----------------------------------------------------------------------------------- + kubectl_bin create namespace psmdb-operator ++ mktemp + local LAST_OUT=/tmp/tmp.siM0Ylg6Qo ++ mktemp + local LAST_ERR=/tmp/tmp.zABvZ1weoQ + local exit_status=0 + local timeout=4 ++ seq 0 2 + for i in '$(seq 0 2)' + set +e + kubectl create namespace psmdb-operator + exit_status=0 + set -e + '[' 0 '!=' 0 -a -n 1 ']' + break + cat /tmp/tmp.siM0Ylg6Qo namespace/psmdb-operator created + cat /tmp/tmp.zABvZ1weoQ + rm /tmp/tmp.siM0Ylg6Qo /tmp/tmp.zABvZ1weoQ + return 0 ++ kubectl_bin config current-context +++ mktemp ++ local LAST_OUT=/tmp/tmp.aDK16g8h6j +++ mktemp ++ local LAST_ERR=/tmp/tmp.e3oA3d337e ++ local exit_status=0 ++ local timeout=4 +++ seq 0 2 ++ for i in '$(seq 0 2)' ++ set +e ++ kubectl config current-context ++ exit_status=0 ++ set -e ++ '[' 0 '!=' 0 -a -n 1 ']' ++ break ++ cat /tmp/tmp.aDK16g8h6j ++ cat /tmp/tmp.e3oA3d337e ++ rm /tmp/tmp.aDK16g8h6j /tmp/tmp.e3oA3d337e ++ return 0 + kubectl_bin config set-context gke_cloud-dev-112233_us-central1-a_jen-psmdb-1567-b27e0b5e-7-cluster3 --namespace=psmdb-operator ++ mktemp + local LAST_OUT=/tmp/tmp.9SpHhsIJG5 ++ mktemp + local LAST_ERR=/tmp/tmp.xlOG3UqYav + local exit_status=0 + local timeout=4 ++ seq 0 2 + for i in '$(seq 0 2)' + set +e + kubectl config set-context gke_cloud-dev-112233_us-central1-a_jen-psmdb-1567-b27e0b5e-7-cluster3 --namespace=psmdb-operator + exit_status=0 + set -e + '[' 0 '!=' 0 -a -n 1 ']' + break + cat /tmp/tmp.9SpHhsIJG5 Context "gke_cloud-dev-112233_us-central1-a_jen-psmdb-1567-b27e0b5e-7-cluster3" modified. + cat /tmp/tmp.xlOG3UqYav + rm /tmp/tmp.9SpHhsIJG5 /tmp/tmp.xlOG3UqYav + return 0 + deploy_operator + desc 'start PSMDB operator' + set +o xtrace ----------------------------------------------------------------------------------- start PSMDB operator ----------------------------------------------------------------------------------- + local cr_file + '[' -f /mnt/jenkins/workspace/cloud-psmdb-operator_PR-1567/e2e-tests/tls-issue-cert-manager/conf/crd.yaml ']' + cr_file=/mnt/jenkins/workspace/cloud-psmdb-operator_PR-1567/deploy/crd.yaml + kubectl_bin apply --server-side --force-conflicts -f /mnt/jenkins/workspace/cloud-psmdb-operator_PR-1567/deploy/crd.yaml ++ mktemp + local LAST_OUT=/tmp/tmp.p7573YjW0j ++ mktemp + local LAST_ERR=/tmp/tmp.dRNz3qALRf + local exit_status=0 + local timeout=4 ++ seq 0 2 + for i in '$(seq 0 2)' + set +e + kubectl apply --server-side --force-conflicts -f /mnt/jenkins/workspace/cloud-psmdb-operator_PR-1567/deploy/crd.yaml + exit_status=0 + set -e + '[' 0 '!=' 0 -a -n 1 ']' + break + cat /tmp/tmp.p7573YjW0j customresourcedefinition.apiextensions.k8s.io/perconaservermongodbbackups.psmdb.percona.com serverside-applied customresourcedefinition.apiextensions.k8s.io/perconaservermongodbrestores.psmdb.percona.com serverside-applied customresourcedefinition.apiextensions.k8s.io/perconaservermongodbs.psmdb.percona.com serverside-applied + cat /tmp/tmp.dRNz3qALRf + rm /tmp/tmp.p7573YjW0j /tmp/tmp.dRNz3qALRf + return 0 + '[' -n psmdb-operator ']' + apply_rbac cw-rbac + local operator_namespace=psmdb-operator + local rbac=cw-rbac + sed -e 's^namespace: .*^namespace: psmdb-operator^' + kubectl_bin apply -n psmdb-operator -f - + cat /mnt/jenkins/workspace/cloud-psmdb-operator_PR-1567/deploy/cw-rbac.yaml ++ mktemp + local LAST_OUT=/tmp/tmp.XRMgN4hdHX ++ mktemp + local LAST_ERR=/tmp/tmp.wk1OUF48OR + local exit_status=0 + local timeout=4 ++ seq 0 2 + for i in '$(seq 0 2)' + set +e + kubectl apply -n psmdb-operator -f - + exit_status=0 + set -e + '[' 0 '!=' 0 -a -n 1 ']' + break + cat /tmp/tmp.XRMgN4hdHX clusterrole.rbac.authorization.k8s.io/percona-server-mongodb-operator created serviceaccount/percona-server-mongodb-operator created clusterrolebinding.rbac.authorization.k8s.io/service-account-percona-server-mongodb-operator created + cat /tmp/tmp.wk1OUF48OR + rm /tmp/tmp.XRMgN4hdHX /tmp/tmp.wk1OUF48OR + return 0 + yq eval ' (.spec.template.spec.containers[].image = "perconalab/percona-server-mongodb-operator:PR-1567-b27e0b5e") | ((.. | select(.[] == "DISABLE_TELEMETRY")) |= .value="true") | ((.. | select(.[] == "LOG_LEVEL")) |= .value="DEBUG")' /mnt/jenkins/workspace/cloud-psmdb-operator_PR-1567/deploy/cw-operator.yaml + kubectl_bin apply -f - ++ mktemp + local LAST_OUT=/tmp/tmp.bVRDOddFrf ++ mktemp + local LAST_ERR=/tmp/tmp.tihE67DNqA + local exit_status=0 + local timeout=4 ++ seq 0 2 + for i in '$(seq 0 2)' + set +e + kubectl apply -f - + exit_status=0 + set -e + '[' 0 '!=' 0 -a -n 1 ']' + break + cat /tmp/tmp.bVRDOddFrf deployment.apps/percona-server-mongodb-operator created + cat /tmp/tmp.tihE67DNqA + rm /tmp/tmp.bVRDOddFrf /tmp/tmp.tihE67DNqA + return 0 + sleep 2 ++ get_operator_pod ++ kubectl_bin get pods --selector=name=percona-server-mongodb-operator -o 'jsonpath={.items[].metadata.name}' -n psmdb-operator +++ mktemp ++ local LAST_OUT=/tmp/tmp.n0hC8cm4jM +++ mktemp ++ local LAST_ERR=/tmp/tmp.LU7Q1PxWm5 ++ local exit_status=0 ++ local timeout=4 +++ seq 0 2 ++ for i in '$(seq 0 2)' ++ set +e ++ kubectl get pods --selector=name=percona-server-mongodb-operator -o 'jsonpath={.items[].metadata.name}' -n psmdb-operator ++ exit_status=0 ++ set -e ++ '[' 0 '!=' 0 -a -n 1 ']' ++ break ++ cat /tmp/tmp.n0hC8cm4jM ++ cat /tmp/tmp.LU7Q1PxWm5 ++ rm /tmp/tmp.n0hC8cm4jM /tmp/tmp.LU7Q1PxWm5 ++ return 0 + wait_pod percona-server-mongodb-operator-6cfcdf54dd-56bxw + local pod=percona-server-mongodb-operator-6cfcdf54dd-56bxw + set +o xtrace waiting for pod/percona-server-mongodb-operator-6cfcdf54dd-56bxw to be ready.OK + create_namespace tls-issue-cert-manager-30987 + local namespace=tls-issue-cert-manager-30987 + local skip_clean_namespace= + [[ 1 == 1 ]] + [[ -z '' ]] + destroy_chaos_mesh ++ helm list --all-namespaces --filter chaos-mesh ++ tail -n1 ++ sed s/NAMESPACE// ++ awk '-F ' '{print $2}' + local chaos_mesh_ns= + desc 'destroy chaos-mesh' + set +o xtrace ----------------------------------------------------------------------------------- destroy chaos-mesh ----------------------------------------------------------------------------------- + '[' -n '' ']' ++ kubectl get MutatingWebhookConfiguration ++ grep chaos-mesh ++ awk '{print $1}' + timeout 30 kubectl delete MutatingWebhookConfiguration error: resource(s) were provided, but no name was specified + : ++ kubectl get ValidatingWebhookConfiguration ++ grep chaos-mesh ++ awk '{print $1}' + timeout 30 kubectl delete ValidatingWebhookConfiguration error: resource(s) were provided, but no name was specified + : ++ kubectl get ValidatingWebhookConfiguration ++ grep validate-auth ++ awk '{print $1}' + timeout 30 kubectl delete ValidatingWebhookConfiguration error: resource(s) were provided, but no name was specified + : ++ kubectl api-resources ++ grep chaos-mesh ++ awk '{print $1}' ++ kubectl get crd ++ grep chaos-mesh.org ++ awk '{print $1}' + timeout 30 kubectl delete crd error: resource(s) were provided, but no name was specified + : ++ kubectl get clusterrolebinding ++ grep chaos-mesh ++ awk '{print $1}' + timeout 30 kubectl delete clusterrolebinding error: resource(s) were provided, but no name was specified + : ++ kubectl get clusterrole ++ grep chaos-mesh ++ awk '{print $1}' + timeout 30 kubectl delete clusterrole error: resource(s) were provided, but no name was specified + : + desc 'cleaned up all old namespaces' + set +o xtrace ----------------------------------------------------------------------------------- cleaned up all old namespaces ----------------------------------------------------------------------------------- + kubectl_bin get ns + awk '{print$1}' + '[' -n '' ']' + xargs kubectl delete ns + desc 'cleaned up old namespaces tls-issue-cert-manager-30987' + set +o xtrace ----------------------------------------------------------------------------------- cleaned up old namespaces tls-issue-cert-manager-30987 ----------------------------------------------------------------------------------- + kubectl_bin delete namespace tls-issue-cert-manager-30987 --ignore-not-found + egrep -v '^kube-|^default|Terminating|psmdb-operator|openshift|gke-mcs|^NAME' ++ mktemp ++ mktemp + local LAST_OUT=/tmp/tmp.OtByFb6tWf + local LAST_OUT=/tmp/tmp.aA9wGit6SB ++ mktemp + local LAST_ERR=/tmp/tmp.nlnDyNwpTc + local exit_status=0 + local timeout=4 ++ mktemp ++ seq 0 2 + for i in '$(seq 0 2)' + set +e + kubectl get ns + local LAST_ERR=/tmp/tmp.P40ZD5zpZC + local exit_status=0 + local timeout=4 ++ seq 0 2 + for i in '$(seq 0 2)' + set +e + kubectl delete namespace tls-issue-cert-manager-30987 --ignore-not-found + exit_status=0 + set -e + '[' 0 '!=' 0 -a -n 1 ']' + break + cat /tmp/tmp.aA9wGit6SB + cat /tmp/tmp.nlnDyNwpTc + rm /tmp/tmp.aA9wGit6SB /tmp/tmp.nlnDyNwpTc + return 0 error: resource(s) were provided, but no name was specified + exit_status=0 + set -e + '[' 0 '!=' 0 -a -n 1 ']' + break + cat /tmp/tmp.OtByFb6tWf + cat /tmp/tmp.P40ZD5zpZC + rm /tmp/tmp.OtByFb6tWf /tmp/tmp.P40ZD5zpZC + return 0 + kubectl_bin wait --for=delete namespace tls-issue-cert-manager-30987 ++ mktemp + local LAST_OUT=/tmp/tmp.6pZqkgnAbf ++ mktemp + local LAST_ERR=/tmp/tmp.rrt1G16rea + local exit_status=0 + local timeout=4 ++ seq 0 2 + for i in '$(seq 0 2)' + set +e + kubectl wait --for=delete namespace tls-issue-cert-manager-30987 + exit_status=0 + set -e + '[' 0 '!=' 0 -a -n 1 ']' + break + cat /tmp/tmp.6pZqkgnAbf + cat /tmp/tmp.rrt1G16rea + rm /tmp/tmp.6pZqkgnAbf /tmp/tmp.rrt1G16rea + return 0 + desc 'create namespace tls-issue-cert-manager-30987' + set +o xtrace ----------------------------------------------------------------------------------- create namespace tls-issue-cert-manager-30987 ----------------------------------------------------------------------------------- + kubectl_bin create namespace tls-issue-cert-manager-30987 ++ mktemp + local LAST_OUT=/tmp/tmp.C78KJuCOMz ++ mktemp + local LAST_ERR=/tmp/tmp.haCP5ax6bA + local exit_status=0 + local timeout=4 ++ seq 0 2 + for i in '$(seq 0 2)' + set +e + kubectl create namespace tls-issue-cert-manager-30987 + exit_status=0 + set -e + '[' 0 '!=' 0 -a -n 1 ']' + break + cat /tmp/tmp.C78KJuCOMz namespace/tls-issue-cert-manager-30987 created + cat /tmp/tmp.haCP5ax6bA + rm /tmp/tmp.C78KJuCOMz /tmp/tmp.haCP5ax6bA + return 0 ++ kubectl_bin config current-context +++ mktemp ++ local LAST_OUT=/tmp/tmp.DPOPhLOVFU +++ mktemp ++ local LAST_ERR=/tmp/tmp.RxkGCgaqkg ++ local exit_status=0 ++ local timeout=4 +++ seq 0 2 ++ for i in '$(seq 0 2)' ++ set +e ++ kubectl config current-context ++ exit_status=0 ++ set -e ++ '[' 0 '!=' 0 -a -n 1 ']' ++ break ++ cat /tmp/tmp.DPOPhLOVFU ++ cat /tmp/tmp.RxkGCgaqkg ++ rm /tmp/tmp.DPOPhLOVFU /tmp/tmp.RxkGCgaqkg ++ return 0 + kubectl_bin config set-context gke_cloud-dev-112233_us-central1-a_jen-psmdb-1567-b27e0b5e-7-cluster3 --namespace=tls-issue-cert-manager-30987 ++ mktemp + local LAST_OUT=/tmp/tmp.KYyTCKNNgH ++ mktemp + local LAST_ERR=/tmp/tmp.f6t2mrOCnK + local exit_status=0 + local timeout=4 ++ seq 0 2 + for i in '$(seq 0 2)' + set +e + kubectl config set-context gke_cloud-dev-112233_us-central1-a_jen-psmdb-1567-b27e0b5e-7-cluster3 --namespace=tls-issue-cert-manager-30987 + exit_status=0 + set -e + '[' 0 '!=' 0 -a -n 1 ']' + break + cat /tmp/tmp.KYyTCKNNgH Context "gke_cloud-dev-112233_us-central1-a_jen-psmdb-1567-b27e0b5e-7-cluster3" modified. + cat /tmp/tmp.f6t2mrOCnK + rm /tmp/tmp.KYyTCKNNgH /tmp/tmp.f6t2mrOCnK + return 0 + deploy_cert_manager + desc 'deploy cert manager' + set +o xtrace ----------------------------------------------------------------------------------- deploy cert manager ----------------------------------------------------------------------------------- + kubectl_bin create namespace cert-manager ++ mktemp + local LAST_OUT=/tmp/tmp.Fftp4s2ZTo ++ mktemp + local LAST_ERR=/tmp/tmp.p72l15d7H4 + local exit_status=0 + local timeout=4 ++ seq 0 2 + for i in '$(seq 0 2)' + set +e + kubectl create namespace cert-manager + exit_status=0 + set -e + '[' 0 '!=' 0 -a -n 1 ']' + break + cat /tmp/tmp.Fftp4s2ZTo namespace/cert-manager created + cat /tmp/tmp.p72l15d7H4 + rm /tmp/tmp.Fftp4s2ZTo /tmp/tmp.p72l15d7H4 + return 0 + kubectl_bin label namespace cert-manager certmanager.k8s.io/disable-validation=true ++ mktemp + local LAST_OUT=/tmp/tmp.iBR4wGatRX ++ mktemp + local LAST_ERR=/tmp/tmp.QnCmMSYyhJ + local exit_status=0 + local timeout=4 ++ seq 0 2 + for i in '$(seq 0 2)' + set +e + kubectl label namespace cert-manager certmanager.k8s.io/disable-validation=true + exit_status=0 + set -e + '[' 0 '!=' 0 -a -n 1 ']' + break + cat /tmp/tmp.iBR4wGatRX namespace/cert-manager labeled + cat /tmp/tmp.QnCmMSYyhJ + rm /tmp/tmp.iBR4wGatRX /tmp/tmp.QnCmMSYyhJ + return 0 + kubectl_bin apply -f https://github.com/cert-manager/cert-manager/releases/download/v1.14.5/cert-manager.yaml --validate=false ++ mktemp + local LAST_OUT=/tmp/tmp.RkJaFHnsCu ++ mktemp + local LAST_ERR=/tmp/tmp.9AjvVZSPGA + local exit_status=0 + local timeout=4 ++ seq 0 2 + for i in '$(seq 0 2)' + set +e + kubectl apply -f https://github.com/cert-manager/cert-manager/releases/download/v1.14.5/cert-manager.yaml --validate=false + exit_status=0 + set -e + '[' 0 '!=' 0 -a -n 1 ']' + break + cat /tmp/tmp.RkJaFHnsCu namespace/cert-manager configured customresourcedefinition.apiextensions.k8s.io/certificaterequests.cert-manager.io unchanged customresourcedefinition.apiextensions.k8s.io/certificates.cert-manager.io unchanged customresourcedefinition.apiextensions.k8s.io/challenges.acme.cert-manager.io unchanged customresourcedefinition.apiextensions.k8s.io/clusterissuers.cert-manager.io unchanged customresourcedefinition.apiextensions.k8s.io/issuers.cert-manager.io unchanged customresourcedefinition.apiextensions.k8s.io/orders.acme.cert-manager.io unchanged serviceaccount/cert-manager-cainjector created serviceaccount/cert-manager created serviceaccount/cert-manager-webhook created clusterrole.rbac.authorization.k8s.io/cert-manager-cainjector unchanged clusterrole.rbac.authorization.k8s.io/cert-manager-controller-issuers unchanged clusterrole.rbac.authorization.k8s.io/cert-manager-controller-clusterissuers unchanged clusterrole.rbac.authorization.k8s.io/cert-manager-controller-certificates unchanged clusterrole.rbac.authorization.k8s.io/cert-manager-controller-orders unchanged clusterrole.rbac.authorization.k8s.io/cert-manager-controller-challenges unchanged clusterrole.rbac.authorization.k8s.io/cert-manager-controller-ingress-shim unchanged clusterrole.rbac.authorization.k8s.io/cert-manager-cluster-view unchanged clusterrole.rbac.authorization.k8s.io/cert-manager-view unchanged clusterrole.rbac.authorization.k8s.io/cert-manager-edit unchanged clusterrole.rbac.authorization.k8s.io/cert-manager-controller-approve:cert-manager-io unchanged clusterrole.rbac.authorization.k8s.io/cert-manager-controller-certificatesigningrequests unchanged clusterrole.rbac.authorization.k8s.io/cert-manager-webhook:subjectaccessreviews unchanged clusterrolebinding.rbac.authorization.k8s.io/cert-manager-cainjector unchanged clusterrolebinding.rbac.authorization.k8s.io/cert-manager-controller-issuers unchanged clusterrolebinding.rbac.authorization.k8s.io/cert-manager-controller-clusterissuers unchanged clusterrolebinding.rbac.authorization.k8s.io/cert-manager-controller-certificates unchanged clusterrolebinding.rbac.authorization.k8s.io/cert-manager-controller-orders unchanged clusterrolebinding.rbac.authorization.k8s.io/cert-manager-controller-challenges unchanged clusterrolebinding.rbac.authorization.k8s.io/cert-manager-controller-ingress-shim unchanged clusterrolebinding.rbac.authorization.k8s.io/cert-manager-controller-approve:cert-manager-io unchanged clusterrolebinding.rbac.authorization.k8s.io/cert-manager-controller-certificatesigningrequests unchanged clusterrolebinding.rbac.authorization.k8s.io/cert-manager-webhook:subjectaccessreviews configured role.rbac.authorization.k8s.io/cert-manager-cainjector:leaderelection unchanged role.rbac.authorization.k8s.io/cert-manager:leaderelection unchanged role.rbac.authorization.k8s.io/cert-manager-webhook:dynamic-serving created rolebinding.rbac.authorization.k8s.io/cert-manager-cainjector:leaderelection unchanged rolebinding.rbac.authorization.k8s.io/cert-manager:leaderelection configured rolebinding.rbac.authorization.k8s.io/cert-manager-webhook:dynamic-serving created service/cert-manager created service/cert-manager-webhook created deployment.apps/cert-manager-cainjector created deployment.apps/cert-manager created deployment.apps/cert-manager-webhook created mutatingwebhookconfiguration.admissionregistration.k8s.io/cert-manager-webhook configured validatingwebhookconfiguration.admissionregistration.k8s.io/cert-manager-webhook configured + cat /tmp/tmp.9AjvVZSPGA Warning: resource namespaces/cert-manager is missing the kubectl.kubernetes.io/last-applied-configuration annotation which is required by kubectl apply. kubectl apply should only be used on resources created declaratively by either kubectl create --save-config or kubectl apply. The missing annotation will be patched automatically. + rm /tmp/tmp.RkJaFHnsCu /tmp/tmp.9AjvVZSPGA + return 0 + kubectl_bin -n cert-manager wait pod -l app.kubernetes.io/instance=cert-manager --for=condition=ready ++ mktemp + local LAST_OUT=/tmp/tmp.raGoJiYTul ++ mktemp + local LAST_ERR=/tmp/tmp.m8ipGPJuEO + local exit_status=0 + local timeout=4 ++ seq 0 2 + for i in '$(seq 0 2)' + set +e + kubectl -n cert-manager wait pod -l app.kubernetes.io/instance=cert-manager --for=condition=ready + exit_status=0 + set -e + '[' 0 '!=' 0 -a -n 1 ']' + break + cat /tmp/tmp.raGoJiYTul pod/cert-manager-5658d944df-5pg8s condition met pod/cert-manager-cainjector-cb99ff845-vglrp condition met pod/cert-manager-webhook-7fd74b8dc7-4vcxs condition met + cat /tmp/tmp.m8ipGPJuEO + rm /tmp/tmp.raGoJiYTul /tmp/tmp.m8ipGPJuEO + return 0 + sleep 120 + desc 'create secrets and start client' + set +o xtrace ----------------------------------------------------------------------------------- create secrets and start client ----------------------------------------------------------------------------------- + kubectl_bin apply -f /mnt/jenkins/workspace/cloud-psmdb-operator_PR-1567/e2e-tests/conf/secrets.yml ++ mktemp + local LAST_OUT=/tmp/tmp.qz66d52AAO ++ mktemp + local LAST_ERR=/tmp/tmp.FCmilTIuDx + local exit_status=0 + local timeout=4 ++ seq 0 2 + for i in '$(seq 0 2)' + set +e + kubectl apply -f /mnt/jenkins/workspace/cloud-psmdb-operator_PR-1567/e2e-tests/conf/secrets.yml + exit_status=0 + set -e + '[' 0 '!=' 0 -a -n 1 ']' + break + cat /tmp/tmp.qz66d52AAO secret/some-users created + cat /tmp/tmp.FCmilTIuDx + rm /tmp/tmp.qz66d52AAO /tmp/tmp.FCmilTIuDx + return 0 + kubectl_bin apply -f /mnt/jenkins/workspace/cloud-psmdb-operator_PR-1567/e2e-tests/conf/client_with_tls.yml ++ mktemp + local LAST_OUT=/tmp/tmp.XZ05hetvKr ++ mktemp + local LAST_ERR=/tmp/tmp.Es3W3SfMuw + local exit_status=0 + local timeout=4 ++ seq 0 2 + for i in '$(seq 0 2)' + set +e + kubectl apply -f /mnt/jenkins/workspace/cloud-psmdb-operator_PR-1567/e2e-tests/conf/client_with_tls.yml + exit_status=0 + set -e + '[' 0 '!=' 0 -a -n 1 ']' + break + cat /tmp/tmp.XZ05hetvKr deployment.apps/psmdb-client created + cat /tmp/tmp.Es3W3SfMuw + rm /tmp/tmp.XZ05hetvKr /tmp/tmp.Es3W3SfMuw + return 0 + desc 'create custom cert-manager issuers and certificates' + set +o xtrace ----------------------------------------------------------------------------------- create custom cert-manager issuers and certificates ----------------------------------------------------------------------------------- + kubectl_bin apply -f /mnt/jenkins/workspace/cloud-psmdb-operator_PR-1567/e2e-tests/tls-issue-cert-manager/conf/some-name-psmdb-ca-issuer.yml ++ mktemp + local LAST_OUT=/tmp/tmp.4bFkXPPpnX ++ mktemp + local LAST_ERR=/tmp/tmp.fYOMIVqHv3 + local exit_status=0 + local timeout=4 ++ seq 0 2 + for i in '$(seq 0 2)' + set +e + kubectl apply -f /mnt/jenkins/workspace/cloud-psmdb-operator_PR-1567/e2e-tests/tls-issue-cert-manager/conf/some-name-psmdb-ca-issuer.yml + exit_status=0 + set -e + '[' 0 '!=' 0 -a -n 1 ']' + break + cat /tmp/tmp.4bFkXPPpnX issuer.cert-manager.io/some-name-psmdb-ca-issuer created + cat /tmp/tmp.fYOMIVqHv3 + rm /tmp/tmp.4bFkXPPpnX /tmp/tmp.fYOMIVqHv3 + return 0 + kubectl_bin apply -f /mnt/jenkins/workspace/cloud-psmdb-operator_PR-1567/e2e-tests/tls-issue-cert-manager/conf/some-name-psmdb-issuer.yml ++ mktemp + local LAST_OUT=/tmp/tmp.N2s7sjrfzT ++ mktemp + local LAST_ERR=/tmp/tmp.N4NKVOBiG2 + local exit_status=0 + local timeout=4 ++ seq 0 2 + for i in '$(seq 0 2)' + set +e + kubectl apply -f /mnt/jenkins/workspace/cloud-psmdb-operator_PR-1567/e2e-tests/tls-issue-cert-manager/conf/some-name-psmdb-issuer.yml + exit_status=0 + set -e + '[' 0 '!=' 0 -a -n 1 ']' + break + cat /tmp/tmp.N2s7sjrfzT issuer.cert-manager.io/some-name-psmdb-issuer created + cat /tmp/tmp.N4NKVOBiG2 + rm /tmp/tmp.N2s7sjrfzT /tmp/tmp.N4NKVOBiG2 + return 0 + kubectl_bin apply -f /mnt/jenkins/workspace/cloud-psmdb-operator_PR-1567/e2e-tests/tls-issue-cert-manager/conf/some-name-ca-cert.yml ++ mktemp + local LAST_OUT=/tmp/tmp.e0r7Qlxc25 ++ mktemp + local LAST_ERR=/tmp/tmp.FyzfjiEMna + local exit_status=0 + local timeout=4 ++ seq 0 2 + for i in '$(seq 0 2)' + set +e + kubectl apply -f /mnt/jenkins/workspace/cloud-psmdb-operator_PR-1567/e2e-tests/tls-issue-cert-manager/conf/some-name-ca-cert.yml + exit_status=0 + set -e + '[' 0 '!=' 0 -a -n 1 ']' + break + cat /tmp/tmp.e0r7Qlxc25 certificate.cert-manager.io/some-name-ca-cert created + cat /tmp/tmp.FyzfjiEMna + rm /tmp/tmp.e0r7Qlxc25 /tmp/tmp.FyzfjiEMna + return 0 + kubectl_bin apply -f /mnt/jenkins/workspace/cloud-psmdb-operator_PR-1567/e2e-tests/tls-issue-cert-manager/conf/some-name-ssl-internal.yml ++ mktemp + local LAST_OUT=/tmp/tmp.atKMaaRHLM ++ mktemp + local LAST_ERR=/tmp/tmp.fPdo6xTKeC + local exit_status=0 + local timeout=4 ++ seq 0 2 + for i in '$(seq 0 2)' + set +e + kubectl apply -f /mnt/jenkins/workspace/cloud-psmdb-operator_PR-1567/e2e-tests/tls-issue-cert-manager/conf/some-name-ssl-internal.yml + exit_status=0 + set -e + '[' 0 '!=' 0 -a -n 1 ']' + break + cat /tmp/tmp.atKMaaRHLM certificate.cert-manager.io/some-name-ssl-internal created + cat /tmp/tmp.fPdo6xTKeC + rm /tmp/tmp.atKMaaRHLM /tmp/tmp.fPdo6xTKeC + return 0 + kubectl_bin apply -f /mnt/jenkins/workspace/cloud-psmdb-operator_PR-1567/e2e-tests/tls-issue-cert-manager/conf/some-name-ssl.yml ++ mktemp + local LAST_OUT=/tmp/tmp.7chQfeiAoY ++ mktemp + local LAST_ERR=/tmp/tmp.eVtS9OlHyk + local exit_status=0 + local timeout=4 ++ seq 0 2 + for i in '$(seq 0 2)' + set +e + kubectl apply -f /mnt/jenkins/workspace/cloud-psmdb-operator_PR-1567/e2e-tests/tls-issue-cert-manager/conf/some-name-ssl.yml + exit_status=0 + set -e + '[' 0 '!=' 0 -a -n 1 ']' + break + cat /tmp/tmp.7chQfeiAoY certificate.cert-manager.io/some-name-ssl created + cat /tmp/tmp.eVtS9OlHyk + rm /tmp/tmp.7chQfeiAoY /tmp/tmp.eVtS9OlHyk + return 0 + deploy_cmctl + local service_account=cmctl + yq '(select(.rules).rules[] | select(contains({"apiGroups": ["cert-manager.io"]}))).resources += "certificates/status"' + kubectl_bin apply -f - + /usr/bin/sed -e s/percona-server-mongodb-operator/cmctl/g /mnt/jenkins/workspace/cloud-psmdb-operator_PR-1567/deploy/rbac.yaml ++ mktemp + local LAST_OUT=/tmp/tmp.z5sHGVDHBz ++ mktemp + local LAST_ERR=/tmp/tmp.h0anw5aQp2 + local exit_status=0 + local timeout=4 ++ seq 0 2 + for i in '$(seq 0 2)' + set +e + kubectl apply -f - + exit_status=0 + set -e + '[' 0 '!=' 0 -a -n 1 ']' + break + cat /tmp/tmp.z5sHGVDHBz role.rbac.authorization.k8s.io/cmctl created serviceaccount/cmctl created rolebinding.rbac.authorization.k8s.io/service-account-cmctl created + cat /tmp/tmp.h0anw5aQp2 + rm /tmp/tmp.z5sHGVDHBz /tmp/tmp.h0anw5aQp2 + return 0 + kubectl_bin apply -f /mnt/jenkins/workspace/cloud-psmdb-operator_PR-1567/e2e-tests/conf/cmctl.yml ++ mktemp + local LAST_OUT=/tmp/tmp.PcjkeN05BK ++ mktemp + local LAST_ERR=/tmp/tmp.Xw5AmlBa5X + local exit_status=0 + local timeout=4 ++ seq 0 2 + for i in '$(seq 0 2)' + set +e + kubectl apply -f /mnt/jenkins/workspace/cloud-psmdb-operator_PR-1567/e2e-tests/conf/cmctl.yml + exit_status=0 + set -e + '[' 0 '!=' 0 -a -n 1 ']' + break + cat /tmp/tmp.PcjkeN05BK deployment.apps/cmctl created + cat /tmp/tmp.Xw5AmlBa5X + rm /tmp/tmp.PcjkeN05BK /tmp/tmp.Xw5AmlBa5X + return 0 + sleep 60 + cluster=some-name + desc 'create first PSMDB cluster some-name' + set +o xtrace ----------------------------------------------------------------------------------- create first PSMDB cluster some-name ----------------------------------------------------------------------------------- + apply_cluster /mnt/jenkins/workspace/cloud-psmdb-operator_PR-1567/e2e-tests/tls-issue-cert-manager/conf/some-name.yml + '[' -z '' ']' + cat_config /mnt/jenkins/workspace/cloud-psmdb-operator_PR-1567/e2e-tests/tls-issue-cert-manager/conf/some-name.yml + kubectl_bin apply -f - + cat /mnt/jenkins/workspace/cloud-psmdb-operator_PR-1567/e2e-tests/tls-issue-cert-manager/conf/some-name.yml + yq eval '(.spec | select(.image == null)).image = "perconalab/percona-server-mongodb-operator:main-mongod7.0"' ++ mktemp + yq eval '(.spec | select(has("pmm"))).pmm.image = "perconalab/pmm-client:dev-latest"' + yq eval '(.spec | select(has("initImage"))).initImage = "perconalab/percona-server-mongodb-operator:PR-1567-b27e0b5e"' + yq eval '(.spec | select(has("backup"))).backup.image = "perconalab/percona-server-mongodb-operator:main-backup"' + yq eval '.spec.upgradeOptions.apply="Never"' + local LAST_OUT=/tmp/tmp.RMs5UjWMnO ++ mktemp + local LAST_ERR=/tmp/tmp.GqOZUmREYK + local exit_status=0 + local timeout=4 ++ seq 0 2 + for i in '$(seq 0 2)' + set +e + kubectl apply -f - + exit_status=0 + set -e + '[' 0 '!=' 0 -a -n 1 ']' + break + cat /tmp/tmp.RMs5UjWMnO perconaservermongodb.psmdb.percona.com/some-name created + cat /tmp/tmp.GqOZUmREYK + rm /tmp/tmp.RMs5UjWMnO /tmp/tmp.GqOZUmREYK + return 0 + desc 'check if all Pods started' + set +o xtrace ----------------------------------------------------------------------------------- check if all Pods started ----------------------------------------------------------------------------------- + wait_for_running some-name-rs0 3 + local name=some-name-rs0 + let last_pod=2 + local check_cluster_readyness=true + set_debug + [[ 1 == 1 ]] + set -o xtrace + local rs_name=rs0 + local cluster_name=some-name ++ seq 0 2 + for i in '$(seq 0 $last_pod)' + [[ 0 -eq 2 ]] + wait_pod some-name-rs0-0 + local pod=some-name-rs0-0 + set +o xtrace waiting for pod/some-name-rs0-0 to be ready.................OK + for i in '$(seq 0 $last_pod)' + [[ 1 -eq 2 ]] + wait_pod some-name-rs0-1 + local pod=some-name-rs0-1 + set +o xtrace waiting for pod/some-name-rs0-1 to be ready..............OK + for i in '$(seq 0 $last_pod)' + [[ 2 -eq 2 ]] ++ kubectl_bin get psmdb some-name -o 'jsonpath={.spec.replsets[?(@.name=="rs0")].arbiter.enabled}' +++ mktemp ++ local LAST_OUT=/tmp/tmp.46L2ET1qpk +++ mktemp ++ local LAST_ERR=/tmp/tmp.Vcb8S4vFb3 ++ local exit_status=0 ++ local timeout=4 +++ seq 0 2 ++ for i in '$(seq 0 2)' ++ set +e ++ kubectl get psmdb some-name -o 'jsonpath={.spec.replsets[?(@.name=="rs0")].arbiter.enabled}' ++ exit_status=0 ++ set -e ++ '[' 0 '!=' 0 -a -n 1 ']' ++ break ++ cat /tmp/tmp.46L2ET1qpk ++ cat /tmp/tmp.Vcb8S4vFb3 ++ rm /tmp/tmp.46L2ET1qpk /tmp/tmp.Vcb8S4vFb3 ++ return 0 + [[ '' == \t\r\u\e ]] + wait_pod some-name-rs0-2 + local pod=some-name-rs0-2 + set +o xtrace waiting for pod/some-name-rs0-2 to be ready................OK ++ kubectl_bin get psmdb some-name -o 'jsonpath={.spec.replsets[?(@.name=="rs0")].non_voting.enabled}' +++ mktemp ++ local LAST_OUT=/tmp/tmp.5EI9HwjWe9 +++ mktemp ++ local LAST_ERR=/tmp/tmp.BTcHULjxrC ++ local exit_status=0 ++ local timeout=4 +++ seq 0 2 ++ for i in '$(seq 0 2)' ++ set +e ++ kubectl get psmdb some-name -o 'jsonpath={.spec.replsets[?(@.name=="rs0")].non_voting.enabled}' ++ exit_status=0 ++ set -e ++ '[' 0 '!=' 0 -a -n 1 ']' ++ break ++ cat /tmp/tmp.5EI9HwjWe9 ++ cat /tmp/tmp.BTcHULjxrC ++ rm /tmp/tmp.5EI9HwjWe9 /tmp/tmp.BTcHULjxrC ++ return 0 + [[ '' == \t\r\u\e ]] + sleep 10 + [[ true == \t\r\u\e ]] + set +x Waiting for cluster readyness...................... + wait_for_running some-name-cfg 3 false + local name=some-name-cfg + let last_pod=2 + local check_cluster_readyness=false + set_debug + [[ 1 == 1 ]] + set -o xtrace + local rs_name=cfg + local cluster_name=some-name ++ seq 0 2 + for i in '$(seq 0 $last_pod)' + [[ 0 -eq 2 ]] + wait_pod some-name-cfg-0 + local pod=some-name-cfg-0 + set +o xtrace waiting for pod/some-name-cfg-0 to be ready.OK + for i in '$(seq 0 $last_pod)' + [[ 1 -eq 2 ]] + wait_pod some-name-cfg-1 + local pod=some-name-cfg-1 + set +o xtrace waiting for pod/some-name-cfg-1 to be ready.OK + for i in '$(seq 0 $last_pod)' + [[ 2 -eq 2 ]] ++ kubectl_bin get psmdb some-name -o 'jsonpath={.spec.replsets[?(@.name=="cfg")].arbiter.enabled}' +++ mktemp ++ local LAST_OUT=/tmp/tmp.LzEpsZgpn9 +++ mktemp ++ local LAST_ERR=/tmp/tmp.aNjdlasZt4 ++ local exit_status=0 ++ local timeout=4 +++ seq 0 2 ++ for i in '$(seq 0 2)' ++ set +e ++ kubectl get psmdb some-name -o 'jsonpath={.spec.replsets[?(@.name=="cfg")].arbiter.enabled}' ++ exit_status=0 ++ set -e ++ '[' 0 '!=' 0 -a -n 1 ']' ++ break ++ cat /tmp/tmp.LzEpsZgpn9 ++ cat /tmp/tmp.aNjdlasZt4 ++ rm /tmp/tmp.LzEpsZgpn9 /tmp/tmp.aNjdlasZt4 ++ return 0 + [[ '' == \t\r\u\e ]] + wait_pod some-name-cfg-2 + local pod=some-name-cfg-2 + set +o xtrace waiting for pod/some-name-cfg-2 to be ready.OK ++ kubectl_bin get psmdb some-name -o 'jsonpath={.spec.replsets[?(@.name=="cfg")].non_voting.enabled}' +++ mktemp ++ local LAST_OUT=/tmp/tmp.Uc9cyHwEzg +++ mktemp ++ local LAST_ERR=/tmp/tmp.gzbtspohi7 ++ local exit_status=0 ++ local timeout=4 +++ seq 0 2 ++ for i in '$(seq 0 2)' ++ set +e ++ kubectl get psmdb some-name -o 'jsonpath={.spec.replsets[?(@.name=="cfg")].non_voting.enabled}' ++ exit_status=0 ++ set -e ++ '[' 0 '!=' 0 -a -n 1 ']' ++ break ++ cat /tmp/tmp.Uc9cyHwEzg ++ cat /tmp/tmp.gzbtspohi7 ++ rm /tmp/tmp.Uc9cyHwEzg /tmp/tmp.gzbtspohi7 ++ return 0 + [[ '' == \t\r\u\e ]] + sleep 10 + [[ false == \t\r\u\e ]] + wait_for_running some-name-mongos 3 + local name=some-name-mongos + let last_pod=2 + local check_cluster_readyness=true + set_debug + [[ 1 == 1 ]] + set -o xtrace + local rs_name=mongos + local cluster_name=some-name ++ seq 0 2 + for i in '$(seq 0 $last_pod)' + [[ 0 -eq 2 ]] + wait_pod some-name-mongos-0 + local pod=some-name-mongos-0 + set +o xtrace waiting for pod/some-name-mongos-0 to be ready.OK + for i in '$(seq 0 $last_pod)' + [[ 1 -eq 2 ]] + wait_pod some-name-mongos-1 + local pod=some-name-mongos-1 + set +o xtrace waiting for pod/some-name-mongos-1 to be ready.OK + for i in '$(seq 0 $last_pod)' + [[ 2 -eq 2 ]] ++ kubectl_bin get psmdb some-name -o 'jsonpath={.spec.replsets[?(@.name=="mongos")].arbiter.enabled}' +++ mktemp ++ local LAST_OUT=/tmp/tmp.LB6OHi1Vbx +++ mktemp ++ local LAST_ERR=/tmp/tmp.FP0YyzFyOG ++ local exit_status=0 ++ local timeout=4 +++ seq 0 2 ++ for i in '$(seq 0 2)' ++ set +e ++ kubectl get psmdb some-name -o 'jsonpath={.spec.replsets[?(@.name=="mongos")].arbiter.enabled}' ++ exit_status=0 ++ set -e ++ '[' 0 '!=' 0 -a -n 1 ']' ++ break ++ cat /tmp/tmp.LB6OHi1Vbx ++ cat /tmp/tmp.FP0YyzFyOG ++ rm /tmp/tmp.LB6OHi1Vbx /tmp/tmp.FP0YyzFyOG ++ return 0 + [[ '' == \t\r\u\e ]] + wait_pod some-name-mongos-2 + local pod=some-name-mongos-2 + set +o xtrace waiting for pod/some-name-mongos-2 to be ready.OK ++ kubectl_bin get psmdb some-name -o 'jsonpath={.spec.replsets[?(@.name=="mongos")].non_voting.enabled}' +++ mktemp ++ local LAST_OUT=/tmp/tmp.TjhmvImTmW +++ mktemp ++ local LAST_ERR=/tmp/tmp.WmY7vVWwk1 ++ local exit_status=0 ++ local timeout=4 +++ seq 0 2 ++ for i in '$(seq 0 2)' ++ set +e ++ kubectl get psmdb some-name -o 'jsonpath={.spec.replsets[?(@.name=="mongos")].non_voting.enabled}' ++ exit_status=0 ++ set -e ++ '[' 0 '!=' 0 -a -n 1 ']' ++ break ++ cat /tmp/tmp.TjhmvImTmW ++ cat /tmp/tmp.WmY7vVWwk1 ++ rm /tmp/tmp.TjhmvImTmW /tmp/tmp.WmY7vVWwk1 ++ return 0 + [[ '' == \t\r\u\e ]] + sleep 10 + [[ true == \t\r\u\e ]] + set +x Waiting for cluster readyness + desc 'compare custom certificates and issuers' + set +o xtrace ----------------------------------------------------------------------------------- compare custom certificates and issuers ----------------------------------------------------------------------------------- + compare_kubectl certificate/some-name-ssl -custom + local resource=certificate/some-name-ssl + local postfix=-custom + local expected_result=/mnt/jenkins/workspace/cloud-psmdb-operator_PR-1567/e2e-tests/tls-issue-cert-manager/compare/certificate_some-name-ssl-custom.yml + local new_result=/tmp/tmp.jTdR9s6bFl/certificate_some-name-ssl.yml + '[' -n '' -a -f /mnt/jenkins/workspace/cloud-psmdb-operator_PR-1567/e2e-tests/tls-issue-cert-manager/compare/certificate_some-name-ssl-custom-oc.yml ']' + kubectl_bin get -o yaml certificate/some-name-ssl ++ mktemp + yq eval ' del(.metadata.ownerReferences[].apiVersion) | del(.metadata.managedFields) | del(.. | select(has("creationTimestamp")).creationTimestamp) | del(.. | select(has("namespace")).namespace) | del(.. | select(has("uid")).uid) | del(.metadata.resourceVersion) | del(.spec.template.spec.containers[].env[] | select(.name == "NAMESPACE")) | del(.metadata.selfLink) | del(.metadata.annotations."cloud.google.com/neg") | del(.metadata.annotations."kubectl.kubernetes.io/last-applied-configuration") | del(.. | select(has("image")).image) | del(.. | select(has("clusterIP")).clusterIP) | del(.. | select(has("clusterIPs")).clusterIPs) | del(.. | select(has("dataSource")).dataSource) | del(.. | select(has("procMount")).procMount) | del(.. | select(has("storageClassName")).storageClassName) | del(.. | select(has("finalizers")).finalizers) | del(.. | select(has("kubernetes.io/pvc-protection"))."kubernetes.io/pvc-protection") | del(.. | select(has("volumeName")).volumeName) | del(.. | select(has("volume.beta.kubernetes.io/storage-provisioner"))."volume.beta.kubernetes.io/storage-provisioner") | del(.. | select(has("volume.kubernetes.io/storage-provisioner"))."volume.kubernetes.io/storage-provisioner") | del(.spec.volumeMode) | del(.. | select(has("volume.kubernetes.io/selected-node"))."volume.kubernetes.io/selected-node") | del(.. | select(has("percona.com/last-config-hash"))."percona.com/last-config-hash") | del(.. | select(has("percona.com/configuration-hash"))."percona.com/configuration-hash") | del(.. | select(has("percona.com/ssl-hash"))."percona.com/ssl-hash") | del(.. | select(has("percona.com/ssl-internal-hash"))."percona.com/ssl-internal-hash") | del(.spec.volumeClaimTemplates[].spec.volumeMode | select(. == "Filesystem")) | del(.. | select(has("healthCheckNodePort")).healthCheckNodePort) | del(.. | select(has("nodePort")).nodePort) | del(.status) | (.. | select(tag == "!!str")) |= sub("tls-issue-cert-manager-30987", "NAME_SPACE") | del(.spec.volumeClaimTemplates[].apiVersion) | del(.spec.volumeClaimTemplates[].kind) | del(.spec.ipFamilies) | del(.spec.ipFamilyPolicy) | (.. | select(. == "extensions/v1beta1")) = "apps/v1" | (.. | select(. == "batch/v1beta1")) = "batch/v1" ' - + local LAST_OUT=/tmp/tmp.dXjCa3tg2r ++ mktemp + local LAST_ERR=/tmp/tmp.FiHMf3Ecm1 + local exit_status=0 + local timeout=4 ++ seq 0 2 + for i in '$(seq 0 2)' + set +e + kubectl get -o yaml certificate/some-name-ssl + exit_status=0 + set -e + '[' 0 '!=' 0 -a -n 1 ']' + break + cat /tmp/tmp.dXjCa3tg2r + cat /tmp/tmp.FiHMf3Ecm1 + rm /tmp/tmp.dXjCa3tg2r /tmp/tmp.FiHMf3Ecm1 + return 0 + yq -i eval 'del(.spec.persistentVolumeClaimRetentionPolicy)' /tmp/tmp.jTdR9s6bFl/certificate_some-name-ssl.yml + version_gt 1.22 ++ echo '1.26 >= 1.22' ++ bc -l + '[' 1 -eq 1 ']' + return 0 + yq -i eval 'del(.spec.internalTrafficPolicy)' /tmp/tmp.jTdR9s6bFl/certificate_some-name-ssl.yml + yq -i eval 'del(.spec.allocateLoadBalancerNodePorts)' /tmp/tmp.jTdR9s6bFl/certificate_some-name-ssl.yml + [[ /mnt/jenkins/workspace/cloud-psmdb-operator_PR-1567/e2e-tests/tls-issue-cert-manager/compare/certificate_some-name-ssl-custom.yml == */cronjob* ]] + diff -u /mnt/jenkins/workspace/cloud-psmdb-operator_PR-1567/e2e-tests/tls-issue-cert-manager/compare/certificate_some-name-ssl-custom.yml /tmp/tmp.jTdR9s6bFl/certificate_some-name-ssl.yml + compare_kubectl certificate/some-name-ssl-internal -custom + local resource=certificate/some-name-ssl-internal + local postfix=-custom + local expected_result=/mnt/jenkins/workspace/cloud-psmdb-operator_PR-1567/e2e-tests/tls-issue-cert-manager/compare/certificate_some-name-ssl-internal-custom.yml + local new_result=/tmp/tmp.jTdR9s6bFl/certificate_some-name-ssl-internal.yml + '[' -n '' -a -f /mnt/jenkins/workspace/cloud-psmdb-operator_PR-1567/e2e-tests/tls-issue-cert-manager/compare/certificate_some-name-ssl-internal-custom-oc.yml ']' + kubectl_bin get -o yaml certificate/some-name-ssl-internal + yq eval ' del(.metadata.ownerReferences[].apiVersion) | del(.metadata.managedFields) | del(.. | select(has("creationTimestamp")).creationTimestamp) | del(.. | select(has("namespace")).namespace) | del(.. | select(has("uid")).uid) | del(.metadata.resourceVersion) | del(.spec.template.spec.containers[].env[] | select(.name == "NAMESPACE")) | del(.metadata.selfLink) | del(.metadata.annotations."cloud.google.com/neg") | del(.metadata.annotations."kubectl.kubernetes.io/last-applied-configuration") | del(.. | select(has("image")).image) | del(.. | select(has("clusterIP")).clusterIP) | del(.. | select(has("clusterIPs")).clusterIPs) | del(.. | select(has("dataSource")).dataSource) | del(.. | select(has("procMount")).procMount) | del(.. | select(has("storageClassName")).storageClassName) | del(.. | select(has("finalizers")).finalizers) | del(.. | select(has("kubernetes.io/pvc-protection"))."kubernetes.io/pvc-protection") | del(.. | select(has("volumeName")).volumeName) | del(.. | select(has("volume.beta.kubernetes.io/storage-provisioner"))."volume.beta.kubernetes.io/storage-provisioner") | del(.. | select(has("volume.kubernetes.io/storage-provisioner"))."volume.kubernetes.io/storage-provisioner") | del(.spec.volumeMode) | del(.. | select(has("volume.kubernetes.io/selected-node"))."volume.kubernetes.io/selected-node") | del(.. | select(has("percona.com/last-config-hash"))."percona.com/last-config-hash") | del(.. | select(has("percona.com/configuration-hash"))."percona.com/configuration-hash") | del(.. | select(has("percona.com/ssl-hash"))."percona.com/ssl-hash") | del(.. | select(has("percona.com/ssl-internal-hash"))."percona.com/ssl-internal-hash") | del(.spec.volumeClaimTemplates[].spec.volumeMode | select(. == "Filesystem")) | del(.. | select(has("healthCheckNodePort")).healthCheckNodePort) | del(.. | select(has("nodePort")).nodePort) | del(.status) | (.. | select(tag == "!!str")) |= sub("tls-issue-cert-manager-30987", "NAME_SPACE") | del(.spec.volumeClaimTemplates[].apiVersion) | del(.spec.volumeClaimTemplates[].kind) | del(.spec.ipFamilies) | del(.spec.ipFamilyPolicy) | (.. | select(. == "extensions/v1beta1")) = "apps/v1" | (.. | select(. == "batch/v1beta1")) = "batch/v1" ' - ++ mktemp + local LAST_OUT=/tmp/tmp.B1r7wdF0s4 ++ mktemp + local LAST_ERR=/tmp/tmp.Otv9DtMgFX + local exit_status=0 + local timeout=4 ++ seq 0 2 + for i in '$(seq 0 2)' + set +e + kubectl get -o yaml certificate/some-name-ssl-internal + exit_status=0 + set -e + '[' 0 '!=' 0 -a -n 1 ']' + break + cat /tmp/tmp.B1r7wdF0s4 + cat /tmp/tmp.Otv9DtMgFX + rm /tmp/tmp.B1r7wdF0s4 /tmp/tmp.Otv9DtMgFX + return 0 + yq -i eval 'del(.spec.persistentVolumeClaimRetentionPolicy)' /tmp/tmp.jTdR9s6bFl/certificate_some-name-ssl-internal.yml + version_gt 1.22 ++ echo '1.26 >= 1.22' ++ bc -l + '[' 1 -eq 1 ']' + return 0 + yq -i eval 'del(.spec.internalTrafficPolicy)' /tmp/tmp.jTdR9s6bFl/certificate_some-name-ssl-internal.yml + yq -i eval 'del(.spec.allocateLoadBalancerNodePorts)' /tmp/tmp.jTdR9s6bFl/certificate_some-name-ssl-internal.yml + [[ /mnt/jenkins/workspace/cloud-psmdb-operator_PR-1567/e2e-tests/tls-issue-cert-manager/compare/certificate_some-name-ssl-internal-custom.yml == */cronjob* ]] + diff -u /mnt/jenkins/workspace/cloud-psmdb-operator_PR-1567/e2e-tests/tls-issue-cert-manager/compare/certificate_some-name-ssl-internal-custom.yml /tmp/tmp.jTdR9s6bFl/certificate_some-name-ssl-internal.yml + compare_kubectl certificate/some-name-ca-cert -custom + local resource=certificate/some-name-ca-cert + local postfix=-custom + local expected_result=/mnt/jenkins/workspace/cloud-psmdb-operator_PR-1567/e2e-tests/tls-issue-cert-manager/compare/certificate_some-name-ca-cert-custom.yml + local new_result=/tmp/tmp.jTdR9s6bFl/certificate_some-name-ca-cert.yml + '[' -n '' -a -f /mnt/jenkins/workspace/cloud-psmdb-operator_PR-1567/e2e-tests/tls-issue-cert-manager/compare/certificate_some-name-ca-cert-custom-oc.yml ']' + kubectl_bin get -o yaml certificate/some-name-ca-cert ++ mktemp + yq eval ' del(.metadata.ownerReferences[].apiVersion) | del(.metadata.managedFields) | del(.. | select(has("creationTimestamp")).creationTimestamp) | del(.. | select(has("namespace")).namespace) | del(.. | select(has("uid")).uid) | del(.metadata.resourceVersion) | del(.spec.template.spec.containers[].env[] | select(.name == "NAMESPACE")) | del(.metadata.selfLink) | del(.metadata.annotations."cloud.google.com/neg") | del(.metadata.annotations."kubectl.kubernetes.io/last-applied-configuration") | del(.. | select(has("image")).image) | del(.. | select(has("clusterIP")).clusterIP) | del(.. | select(has("clusterIPs")).clusterIPs) | del(.. | select(has("dataSource")).dataSource) | del(.. | select(has("procMount")).procMount) | del(.. | select(has("storageClassName")).storageClassName) | del(.. | select(has("finalizers")).finalizers) | del(.. | select(has("kubernetes.io/pvc-protection"))."kubernetes.io/pvc-protection") | del(.. | select(has("volumeName")).volumeName) | del(.. | select(has("volume.beta.kubernetes.io/storage-provisioner"))."volume.beta.kubernetes.io/storage-provisioner") | del(.. | select(has("volume.kubernetes.io/storage-provisioner"))."volume.kubernetes.io/storage-provisioner") | del(.spec.volumeMode) | del(.. | select(has("volume.kubernetes.io/selected-node"))."volume.kubernetes.io/selected-node") | del(.. | select(has("percona.com/last-config-hash"))."percona.com/last-config-hash") | del(.. | select(has("percona.com/configuration-hash"))."percona.com/configuration-hash") | del(.. | select(has("percona.com/ssl-hash"))."percona.com/ssl-hash") | del(.. | select(has("percona.com/ssl-internal-hash"))."percona.com/ssl-internal-hash") | del(.spec.volumeClaimTemplates[].spec.volumeMode | select(. == "Filesystem")) | del(.. | select(has("healthCheckNodePort")).healthCheckNodePort) | del(.. | select(has("nodePort")).nodePort) | del(.status) | (.. | select(tag == "!!str")) |= sub("tls-issue-cert-manager-30987", "NAME_SPACE") | del(.spec.volumeClaimTemplates[].apiVersion) | del(.spec.volumeClaimTemplates[].kind) | del(.spec.ipFamilies) | del(.spec.ipFamilyPolicy) | (.. | select(. == "extensions/v1beta1")) = "apps/v1" | (.. | select(. == "batch/v1beta1")) = "batch/v1" ' - + local LAST_OUT=/tmp/tmp.OWIyCdNcM5 ++ mktemp + local LAST_ERR=/tmp/tmp.o4cLYVLE7D + local exit_status=0 + local timeout=4 ++ seq 0 2 + for i in '$(seq 0 2)' + set +e + kubectl get -o yaml certificate/some-name-ca-cert + exit_status=0 + set -e + '[' 0 '!=' 0 -a -n 1 ']' + break + cat /tmp/tmp.OWIyCdNcM5 + cat /tmp/tmp.o4cLYVLE7D + rm /tmp/tmp.OWIyCdNcM5 /tmp/tmp.o4cLYVLE7D + return 0 + yq -i eval 'del(.spec.persistentVolumeClaimRetentionPolicy)' /tmp/tmp.jTdR9s6bFl/certificate_some-name-ca-cert.yml + version_gt 1.22 ++ echo '1.26 >= 1.22' ++ bc -l + '[' 1 -eq 1 ']' + return 0 + yq -i eval 'del(.spec.internalTrafficPolicy)' /tmp/tmp.jTdR9s6bFl/certificate_some-name-ca-cert.yml + yq -i eval 'del(.spec.allocateLoadBalancerNodePorts)' /tmp/tmp.jTdR9s6bFl/certificate_some-name-ca-cert.yml + [[ /mnt/jenkins/workspace/cloud-psmdb-operator_PR-1567/e2e-tests/tls-issue-cert-manager/compare/certificate_some-name-ca-cert-custom.yml == */cronjob* ]] + diff -u /mnt/jenkins/workspace/cloud-psmdb-operator_PR-1567/e2e-tests/tls-issue-cert-manager/compare/certificate_some-name-ca-cert-custom.yml /tmp/tmp.jTdR9s6bFl/certificate_some-name-ca-cert.yml + compare_kubectl issuer/some-name-psmdb-ca-issuer -custom + local resource=issuer/some-name-psmdb-ca-issuer + local postfix=-custom + local expected_result=/mnt/jenkins/workspace/cloud-psmdb-operator_PR-1567/e2e-tests/tls-issue-cert-manager/compare/issuer_some-name-psmdb-ca-issuer-custom.yml + local new_result=/tmp/tmp.jTdR9s6bFl/issuer_some-name-psmdb-ca-issuer.yml + '[' -n '' -a -f /mnt/jenkins/workspace/cloud-psmdb-operator_PR-1567/e2e-tests/tls-issue-cert-manager/compare/issuer_some-name-psmdb-ca-issuer-custom-oc.yml ']' + kubectl_bin get -o yaml issuer/some-name-psmdb-ca-issuer + yq eval ' del(.metadata.ownerReferences[].apiVersion) | del(.metadata.managedFields) | del(.. | select(has("creationTimestamp")).creationTimestamp) | del(.. | select(has("namespace")).namespace) | del(.. | select(has("uid")).uid) | del(.metadata.resourceVersion) | del(.spec.template.spec.containers[].env[] | select(.name == "NAMESPACE")) | del(.metadata.selfLink) | del(.metadata.annotations."cloud.google.com/neg") | del(.metadata.annotations."kubectl.kubernetes.io/last-applied-configuration") | del(.. | select(has("image")).image) | del(.. | select(has("clusterIP")).clusterIP) | del(.. | select(has("clusterIPs")).clusterIPs) | del(.. | select(has("dataSource")).dataSource) | del(.. | select(has("procMount")).procMount) | del(.. | select(has("storageClassName")).storageClassName) | del(.. | select(has("finalizers")).finalizers) | del(.. | select(has("kubernetes.io/pvc-protection"))."kubernetes.io/pvc-protection") | del(.. | select(has("volumeName")).volumeName) | del(.. | select(has("volume.beta.kubernetes.io/storage-provisioner"))."volume.beta.kubernetes.io/storage-provisioner") | del(.. | select(has("volume.kubernetes.io/storage-provisioner"))."volume.kubernetes.io/storage-provisioner") | del(.spec.volumeMode) | del(.. | select(has("volume.kubernetes.io/selected-node"))."volume.kubernetes.io/selected-node") | del(.. | select(has("percona.com/last-config-hash"))."percona.com/last-config-hash") | del(.. | select(has("percona.com/configuration-hash"))."percona.com/configuration-hash") | del(.. | select(has("percona.com/ssl-hash"))."percona.com/ssl-hash") | del(.. | select(has("percona.com/ssl-internal-hash"))."percona.com/ssl-internal-hash") | del(.spec.volumeClaimTemplates[].spec.volumeMode | select(. == "Filesystem")) | del(.. | select(has("healthCheckNodePort")).healthCheckNodePort) | del(.. | select(has("nodePort")).nodePort) | del(.status) | (.. | select(tag == "!!str")) |= sub("tls-issue-cert-manager-30987", "NAME_SPACE") | del(.spec.volumeClaimTemplates[].apiVersion) | del(.spec.volumeClaimTemplates[].kind) | del(.spec.ipFamilies) | del(.spec.ipFamilyPolicy) | (.. | select(. == "extensions/v1beta1")) = "apps/v1" | (.. | select(. == "batch/v1beta1")) = "batch/v1" ' - ++ mktemp + local LAST_OUT=/tmp/tmp.Lpm65Nmq1T ++ mktemp + local LAST_ERR=/tmp/tmp.88ZTyeBTbb + local exit_status=0 + local timeout=4 ++ seq 0 2 + for i in '$(seq 0 2)' + set +e + kubectl get -o yaml issuer/some-name-psmdb-ca-issuer + exit_status=0 + set -e + '[' 0 '!=' 0 -a -n 1 ']' + break + cat /tmp/tmp.Lpm65Nmq1T + cat /tmp/tmp.88ZTyeBTbb + rm /tmp/tmp.Lpm65Nmq1T /tmp/tmp.88ZTyeBTbb + return 0 + yq -i eval 'del(.spec.persistentVolumeClaimRetentionPolicy)' /tmp/tmp.jTdR9s6bFl/issuer_some-name-psmdb-ca-issuer.yml + version_gt 1.22 ++ echo '1.26 >= 1.22' ++ bc -l + '[' 1 -eq 1 ']' + return 0 + yq -i eval 'del(.spec.internalTrafficPolicy)' /tmp/tmp.jTdR9s6bFl/issuer_some-name-psmdb-ca-issuer.yml + yq -i eval 'del(.spec.allocateLoadBalancerNodePorts)' /tmp/tmp.jTdR9s6bFl/issuer_some-name-psmdb-ca-issuer.yml + [[ /mnt/jenkins/workspace/cloud-psmdb-operator_PR-1567/e2e-tests/tls-issue-cert-manager/compare/issuer_some-name-psmdb-ca-issuer-custom.yml == */cronjob* ]] + diff -u /mnt/jenkins/workspace/cloud-psmdb-operator_PR-1567/e2e-tests/tls-issue-cert-manager/compare/issuer_some-name-psmdb-ca-issuer-custom.yml /tmp/tmp.jTdR9s6bFl/issuer_some-name-psmdb-ca-issuer.yml + compare_kubectl issuer/some-name-psmdb-issuer -custom + local resource=issuer/some-name-psmdb-issuer + local postfix=-custom + local expected_result=/mnt/jenkins/workspace/cloud-psmdb-operator_PR-1567/e2e-tests/tls-issue-cert-manager/compare/issuer_some-name-psmdb-issuer-custom.yml + local new_result=/tmp/tmp.jTdR9s6bFl/issuer_some-name-psmdb-issuer.yml + '[' -n '' -a -f /mnt/jenkins/workspace/cloud-psmdb-operator_PR-1567/e2e-tests/tls-issue-cert-manager/compare/issuer_some-name-psmdb-issuer-custom-oc.yml ']' + kubectl_bin get -o yaml issuer/some-name-psmdb-issuer + yq eval ' del(.metadata.ownerReferences[].apiVersion) | del(.metadata.managedFields) | del(.. | select(has("creationTimestamp")).creationTimestamp) | del(.. | select(has("namespace")).namespace) | del(.. | select(has("uid")).uid) | del(.metadata.resourceVersion) | del(.spec.template.spec.containers[].env[] | select(.name == "NAMESPACE")) | del(.metadata.selfLink) | del(.metadata.annotations."cloud.google.com/neg") | del(.metadata.annotations."kubectl.kubernetes.io/last-applied-configuration") | del(.. | select(has("image")).image) | del(.. | select(has("clusterIP")).clusterIP) | del(.. | select(has("clusterIPs")).clusterIPs) | del(.. | select(has("dataSource")).dataSource) | del(.. | select(has("procMount")).procMount) | del(.. | select(has("storageClassName")).storageClassName) | del(.. | select(has("finalizers")).finalizers) | del(.. | select(has("kubernetes.io/pvc-protection"))."kubernetes.io/pvc-protection") | del(.. | select(has("volumeName")).volumeName) | del(.. | select(has("volume.beta.kubernetes.io/storage-provisioner"))."volume.beta.kubernetes.io/storage-provisioner") | del(.. | select(has("volume.kubernetes.io/storage-provisioner"))."volume.kubernetes.io/storage-provisioner") | del(.spec.volumeMode) | del(.. | select(has("volume.kubernetes.io/selected-node"))."volume.kubernetes.io/selected-node") | del(.. | select(has("percona.com/last-config-hash"))."percona.com/last-config-hash") | del(.. | select(has("percona.com/configuration-hash"))."percona.com/configuration-hash") | del(.. | select(has("percona.com/ssl-hash"))."percona.com/ssl-hash") | del(.. | select(has("percona.com/ssl-internal-hash"))."percona.com/ssl-internal-hash") | del(.spec.volumeClaimTemplates[].spec.volumeMode | select(. == "Filesystem")) | del(.. | select(has("healthCheckNodePort")).healthCheckNodePort) | del(.. | select(has("nodePort")).nodePort) | del(.status) | (.. | select(tag == "!!str")) |= sub("tls-issue-cert-manager-30987", "NAME_SPACE") | del(.spec.volumeClaimTemplates[].apiVersion) | del(.spec.volumeClaimTemplates[].kind) | del(.spec.ipFamilies) | del(.spec.ipFamilyPolicy) | (.. | select(. == "extensions/v1beta1")) = "apps/v1" | (.. | select(. == "batch/v1beta1")) = "batch/v1" ' - ++ mktemp + local LAST_OUT=/tmp/tmp.sdN07TYmdk ++ mktemp + local LAST_ERR=/tmp/tmp.bU4pFJUYnb + local exit_status=0 + local timeout=4 ++ seq 0 2 + for i in '$(seq 0 2)' + set +e + kubectl get -o yaml issuer/some-name-psmdb-issuer + exit_status=0 + set -e + '[' 0 '!=' 0 -a -n 1 ']' + break + cat /tmp/tmp.sdN07TYmdk + cat /tmp/tmp.bU4pFJUYnb + rm /tmp/tmp.sdN07TYmdk /tmp/tmp.bU4pFJUYnb + return 0 + yq -i eval 'del(.spec.persistentVolumeClaimRetentionPolicy)' /tmp/tmp.jTdR9s6bFl/issuer_some-name-psmdb-issuer.yml + version_gt 1.22 ++ echo '1.26 >= 1.22' ++ bc -l + '[' 1 -eq 1 ']' + return 0 + yq -i eval 'del(.spec.internalTrafficPolicy)' /tmp/tmp.jTdR9s6bFl/issuer_some-name-psmdb-issuer.yml + yq -i eval 'del(.spec.allocateLoadBalancerNodePorts)' /tmp/tmp.jTdR9s6bFl/issuer_some-name-psmdb-issuer.yml + [[ /mnt/jenkins/workspace/cloud-psmdb-operator_PR-1567/e2e-tests/tls-issue-cert-manager/compare/issuer_some-name-psmdb-issuer-custom.yml == */cronjob* ]] + diff -u /mnt/jenkins/workspace/cloud-psmdb-operator_PR-1567/e2e-tests/tls-issue-cert-manager/compare/issuer_some-name-psmdb-issuer-custom.yml /tmp/tmp.jTdR9s6bFl/issuer_some-name-psmdb-issuer.yml + desc 'delete cluster' + set +o xtrace ----------------------------------------------------------------------------------- delete cluster ----------------------------------------------------------------------------------- + kubectl delete psmdb --all perconaservermongodb.psmdb.percona.com "some-name" deleted + kubectl delete pvc --all persistentvolumeclaim "mongod-data-some-name-cfg-0" deleted persistentvolumeclaim "mongod-data-some-name-cfg-1" deleted persistentvolumeclaim "mongod-data-some-name-cfg-2" deleted persistentvolumeclaim "mongod-data-some-name-rs0-0" deleted persistentvolumeclaim "mongod-data-some-name-rs0-1" deleted persistentvolumeclaim "mongod-data-some-name-rs0-2" deleted + desc 'delete custom cert-manager issuers and certificates' + set +o xtrace ----------------------------------------------------------------------------------- delete custom cert-manager issuers and certificates ----------------------------------------------------------------------------------- + kubectl_bin delete -f /mnt/jenkins/workspace/cloud-psmdb-operator_PR-1567/e2e-tests/tls-issue-cert-manager/conf/some-name-psmdb-ca-issuer.yml ++ mktemp + local LAST_OUT=/tmp/tmp.Sb0GOIEVXW ++ mktemp + local LAST_ERR=/tmp/tmp.0G503KfskV + local exit_status=0 + local timeout=4 ++ seq 0 2 + for i in '$(seq 0 2)' + set +e + kubectl delete -f /mnt/jenkins/workspace/cloud-psmdb-operator_PR-1567/e2e-tests/tls-issue-cert-manager/conf/some-name-psmdb-ca-issuer.yml + exit_status=0 + set -e + '[' 0 '!=' 0 -a -n 1 ']' + break + cat /tmp/tmp.Sb0GOIEVXW issuer.cert-manager.io "some-name-psmdb-ca-issuer" deleted + cat /tmp/tmp.0G503KfskV + rm /tmp/tmp.Sb0GOIEVXW /tmp/tmp.0G503KfskV + return 0 + kubectl_bin delete -f /mnt/jenkins/workspace/cloud-psmdb-operator_PR-1567/e2e-tests/tls-issue-cert-manager/conf/some-name-psmdb-issuer.yml ++ mktemp + local LAST_OUT=/tmp/tmp.ZwK49ri0kS ++ mktemp + local LAST_ERR=/tmp/tmp.7UAMetYXZI + local exit_status=0 + local timeout=4 ++ seq 0 2 + for i in '$(seq 0 2)' + set +e + kubectl delete -f /mnt/jenkins/workspace/cloud-psmdb-operator_PR-1567/e2e-tests/tls-issue-cert-manager/conf/some-name-psmdb-issuer.yml + exit_status=0 + set -e + '[' 0 '!=' 0 -a -n 1 ']' + break + cat /tmp/tmp.ZwK49ri0kS issuer.cert-manager.io "some-name-psmdb-issuer" deleted + cat /tmp/tmp.7UAMetYXZI + rm /tmp/tmp.ZwK49ri0kS /tmp/tmp.7UAMetYXZI + return 0 + kubectl_bin delete -f /mnt/jenkins/workspace/cloud-psmdb-operator_PR-1567/e2e-tests/tls-issue-cert-manager/conf/some-name-ca-cert.yml ++ mktemp + local LAST_OUT=/tmp/tmp.pmiSss4nhd ++ mktemp + local LAST_ERR=/tmp/tmp.JSNNh2NRjH + local exit_status=0 + local timeout=4 ++ seq 0 2 + for i in '$(seq 0 2)' + set +e + kubectl delete -f /mnt/jenkins/workspace/cloud-psmdb-operator_PR-1567/e2e-tests/tls-issue-cert-manager/conf/some-name-ca-cert.yml + exit_status=0 + set -e + '[' 0 '!=' 0 -a -n 1 ']' + break + cat /tmp/tmp.pmiSss4nhd certificate.cert-manager.io "some-name-ca-cert" deleted + cat /tmp/tmp.JSNNh2NRjH + rm /tmp/tmp.pmiSss4nhd /tmp/tmp.JSNNh2NRjH + return 0 + kubectl_bin delete -f /mnt/jenkins/workspace/cloud-psmdb-operator_PR-1567/e2e-tests/tls-issue-cert-manager/conf/some-name-ssl-internal.yml ++ mktemp + local LAST_OUT=/tmp/tmp.I2vKqBoAdr ++ mktemp + local LAST_ERR=/tmp/tmp.cnpAHLNiNm + local exit_status=0 + local timeout=4 ++ seq 0 2 + for i in '$(seq 0 2)' + set +e + kubectl delete -f /mnt/jenkins/workspace/cloud-psmdb-operator_PR-1567/e2e-tests/tls-issue-cert-manager/conf/some-name-ssl-internal.yml + exit_status=0 + set -e + '[' 0 '!=' 0 -a -n 1 ']' + break + cat /tmp/tmp.I2vKqBoAdr certificate.cert-manager.io "some-name-ssl-internal" deleted + cat /tmp/tmp.cnpAHLNiNm + rm /tmp/tmp.I2vKqBoAdr /tmp/tmp.cnpAHLNiNm + return 0 + kubectl_bin delete -f /mnt/jenkins/workspace/cloud-psmdb-operator_PR-1567/e2e-tests/tls-issue-cert-manager/conf/some-name-ssl.yml ++ mktemp + local LAST_OUT=/tmp/tmp.4ZdEhUIz63 ++ mktemp + local LAST_ERR=/tmp/tmp.OJqFoQjA29 + local exit_status=0 + local timeout=4 ++ seq 0 2 + for i in '$(seq 0 2)' + set +e + kubectl delete -f /mnt/jenkins/workspace/cloud-psmdb-operator_PR-1567/e2e-tests/tls-issue-cert-manager/conf/some-name-ssl.yml + exit_status=0 + set -e + '[' 0 '!=' 0 -a -n 1 ']' + break + cat /tmp/tmp.4ZdEhUIz63 certificate.cert-manager.io "some-name-ssl" deleted + cat /tmp/tmp.OJqFoQjA29 + rm /tmp/tmp.4ZdEhUIz63 /tmp/tmp.OJqFoQjA29 + return 0 + sleep 30 + desc 'delete ssl secrets, operator should recreate them' + set +o xtrace ----------------------------------------------------------------------------------- delete ssl secrets, operator should recreate them ----------------------------------------------------------------------------------- + kubectl_bin delete secret some-name-ssl-internal ++ mktemp + local LAST_OUT=/tmp/tmp.mybhxHjMbi ++ mktemp + local LAST_ERR=/tmp/tmp.hnOZlbiUmQ + local exit_status=0 + local timeout=4 ++ seq 0 2 + for i in '$(seq 0 2)' + set +e + kubectl delete secret some-name-ssl-internal + exit_status=0 + set -e + '[' 0 '!=' 0 -a -n 1 ']' + break + cat /tmp/tmp.mybhxHjMbi secret "some-name-ssl-internal" deleted + cat /tmp/tmp.hnOZlbiUmQ + rm /tmp/tmp.mybhxHjMbi /tmp/tmp.hnOZlbiUmQ + return 0 + kubectl_bin delete secret some-name-ssl ++ mktemp + local LAST_OUT=/tmp/tmp.ggW1M0x8QU ++ mktemp + local LAST_ERR=/tmp/tmp.Fu26eMCJ35 + local exit_status=0 + local timeout=4 ++ seq 0 2 + for i in '$(seq 0 2)' + set +e + kubectl delete secret some-name-ssl + exit_status=0 + set -e + '[' 0 '!=' 0 -a -n 1 ']' + break + cat /tmp/tmp.ggW1M0x8QU secret "some-name-ssl" deleted + cat /tmp/tmp.Fu26eMCJ35 + rm /tmp/tmp.ggW1M0x8QU /tmp/tmp.Fu26eMCJ35 + return 0 + sleep 30 + desc 'recreate PSMDB cluster some-name' + set +o xtrace ----------------------------------------------------------------------------------- recreate PSMDB cluster some-name ----------------------------------------------------------------------------------- + apply_cluster /mnt/jenkins/workspace/cloud-psmdb-operator_PR-1567/e2e-tests/tls-issue-cert-manager/conf/some-name.yml + '[' -z '' ']' + kubectl_bin apply -f - + cat_config /mnt/jenkins/workspace/cloud-psmdb-operator_PR-1567/e2e-tests/tls-issue-cert-manager/conf/some-name.yml + cat /mnt/jenkins/workspace/cloud-psmdb-operator_PR-1567/e2e-tests/tls-issue-cert-manager/conf/some-name.yml + yq eval '(.spec | select(.image == null)).image = "perconalab/percona-server-mongodb-operator:main-mongod7.0"' ++ mktemp + yq eval '(.spec | select(has("initImage"))).initImage = "perconalab/percona-server-mongodb-operator:PR-1567-b27e0b5e"' + yq eval '(.spec | select(has("pmm"))).pmm.image = "perconalab/pmm-client:dev-latest"' + yq eval '.spec.upgradeOptions.apply="Never"' + local LAST_OUT=/tmp/tmp.5zZNACSAYv + yq eval '(.spec | select(has("backup"))).backup.image = "perconalab/percona-server-mongodb-operator:main-backup"' ++ mktemp + local LAST_ERR=/tmp/tmp.tY2uWzc3a7 + local exit_status=0 + local timeout=4 ++ seq 0 2 + for i in '$(seq 0 2)' + set +e + kubectl apply -f - + exit_status=0 + set -e + '[' 0 '!=' 0 -a -n 1 ']' + break + cat /tmp/tmp.5zZNACSAYv perconaservermongodb.psmdb.percona.com/some-name created + cat /tmp/tmp.tY2uWzc3a7 + rm /tmp/tmp.5zZNACSAYv /tmp/tmp.tY2uWzc3a7 + return 0 + desc 'check if all Pods started' + set +o xtrace ----------------------------------------------------------------------------------- check if all Pods started ----------------------------------------------------------------------------------- + wait_for_running some-name-rs0 3 + local name=some-name-rs0 + let last_pod=2 + local check_cluster_readyness=true + set_debug + [[ 1 == 1 ]] + set -o xtrace + local rs_name=rs0 + local cluster_name=some-name ++ seq 0 2 + for i in '$(seq 0 $last_pod)' + [[ 0 -eq 2 ]] + wait_pod some-name-rs0-0 + local pod=some-name-rs0-0 + set +o xtrace waiting for pod/some-name-rs0-0 to be ready..............OK + for i in '$(seq 0 $last_pod)' + [[ 1 -eq 2 ]] + wait_pod some-name-rs0-1 + local pod=some-name-rs0-1 + set +o xtrace waiting for pod/some-name-rs0-1 to be ready...............OK + for i in '$(seq 0 $last_pod)' + [[ 2 -eq 2 ]] ++ kubectl_bin get psmdb some-name -o 'jsonpath={.spec.replsets[?(@.name=="rs0")].arbiter.enabled}' +++ mktemp ++ local LAST_OUT=/tmp/tmp.l9RJ2OaUXd +++ mktemp ++ local LAST_ERR=/tmp/tmp.L3dEqbt5oH ++ local exit_status=0 ++ local timeout=4 +++ seq 0 2 ++ for i in '$(seq 0 2)' ++ set +e ++ kubectl get psmdb some-name -o 'jsonpath={.spec.replsets[?(@.name=="rs0")].arbiter.enabled}' ++ exit_status=0 ++ set -e ++ '[' 0 '!=' 0 -a -n 1 ']' ++ break ++ cat /tmp/tmp.l9RJ2OaUXd ++ cat /tmp/tmp.L3dEqbt5oH ++ rm /tmp/tmp.l9RJ2OaUXd /tmp/tmp.L3dEqbt5oH ++ return 0 + [[ '' == \t\r\u\e ]] + wait_pod some-name-rs0-2 + local pod=some-name-rs0-2 + set +o xtrace waiting for pod/some-name-rs0-2 to be ready..............OK ++ kubectl_bin get psmdb some-name -o 'jsonpath={.spec.replsets[?(@.name=="rs0")].non_voting.enabled}' +++ mktemp ++ local LAST_OUT=/tmp/tmp.SsU7bWcEOu +++ mktemp ++ local LAST_ERR=/tmp/tmp.8kDkK50l0N ++ local exit_status=0 ++ local timeout=4 +++ seq 0 2 ++ for i in '$(seq 0 2)' ++ set +e ++ kubectl get psmdb some-name -o 'jsonpath={.spec.replsets[?(@.name=="rs0")].non_voting.enabled}' ++ exit_status=0 ++ set -e ++ '[' 0 '!=' 0 -a -n 1 ']' ++ break ++ cat /tmp/tmp.SsU7bWcEOu ++ cat /tmp/tmp.8kDkK50l0N ++ rm /tmp/tmp.SsU7bWcEOu /tmp/tmp.8kDkK50l0N ++ return 0 + [[ '' == \t\r\u\e ]] + sleep 10 + [[ true == \t\r\u\e ]] + set +x Waiting for cluster readyness.............................. + wait_for_running some-name-cfg 3 false + local name=some-name-cfg + let last_pod=2 + local check_cluster_readyness=false + set_debug + [[ 1 == 1 ]] + set -o xtrace + local rs_name=cfg + local cluster_name=some-name ++ seq 0 2 + for i in '$(seq 0 $last_pod)' + [[ 0 -eq 2 ]] + wait_pod some-name-cfg-0 + local pod=some-name-cfg-0 + set +o xtrace waiting for pod/some-name-cfg-0 to be ready.OK + for i in '$(seq 0 $last_pod)' + [[ 1 -eq 2 ]] + wait_pod some-name-cfg-1 + local pod=some-name-cfg-1 + set +o xtrace waiting for pod/some-name-cfg-1 to be ready.OK + for i in '$(seq 0 $last_pod)' + [[ 2 -eq 2 ]] ++ kubectl_bin get psmdb some-name -o 'jsonpath={.spec.replsets[?(@.name=="cfg")].arbiter.enabled}' +++ mktemp ++ local LAST_OUT=/tmp/tmp.5GUbeXOcnB +++ mktemp ++ local LAST_ERR=/tmp/tmp.YSin40umv8 ++ local exit_status=0 ++ local timeout=4 +++ seq 0 2 ++ for i in '$(seq 0 2)' ++ set +e ++ kubectl get psmdb some-name -o 'jsonpath={.spec.replsets[?(@.name=="cfg")].arbiter.enabled}' ++ exit_status=0 ++ set -e ++ '[' 0 '!=' 0 -a -n 1 ']' ++ break ++ cat /tmp/tmp.5GUbeXOcnB ++ cat /tmp/tmp.YSin40umv8 ++ rm /tmp/tmp.5GUbeXOcnB /tmp/tmp.YSin40umv8 ++ return 0 + [[ '' == \t\r\u\e ]] + wait_pod some-name-cfg-2 + local pod=some-name-cfg-2 + set +o xtrace waiting for pod/some-name-cfg-2 to be ready.OK ++ kubectl_bin get psmdb some-name -o 'jsonpath={.spec.replsets[?(@.name=="cfg")].non_voting.enabled}' +++ mktemp ++ local LAST_OUT=/tmp/tmp.N2H6IYaJlM +++ mktemp ++ local LAST_ERR=/tmp/tmp.KEZ2vQBSUr ++ local exit_status=0 ++ local timeout=4 +++ seq 0 2 ++ for i in '$(seq 0 2)' ++ set +e ++ kubectl get psmdb some-name -o 'jsonpath={.spec.replsets[?(@.name=="cfg")].non_voting.enabled}' ++ exit_status=0 ++ set -e ++ '[' 0 '!=' 0 -a -n 1 ']' ++ break ++ cat /tmp/tmp.N2H6IYaJlM ++ cat /tmp/tmp.KEZ2vQBSUr ++ rm /tmp/tmp.N2H6IYaJlM /tmp/tmp.KEZ2vQBSUr ++ return 0 + [[ '' == \t\r\u\e ]] + sleep 10 + [[ false == \t\r\u\e ]] + wait_for_running some-name-mongos 3 + local name=some-name-mongos + let last_pod=2 + local check_cluster_readyness=true + set_debug + [[ 1 == 1 ]] + set -o xtrace + local rs_name=mongos + local cluster_name=some-name ++ seq 0 2 + for i in '$(seq 0 $last_pod)' + [[ 0 -eq 2 ]] + wait_pod some-name-mongos-0 + local pod=some-name-mongos-0 + set +o xtrace waiting for pod/some-name-mongos-0 to be ready.OK + for i in '$(seq 0 $last_pod)' + [[ 1 -eq 2 ]] + wait_pod some-name-mongos-1 + local pod=some-name-mongos-1 + set +o xtrace waiting for pod/some-name-mongos-1 to be ready.OK + for i in '$(seq 0 $last_pod)' + [[ 2 -eq 2 ]] ++ kubectl_bin get psmdb some-name -o 'jsonpath={.spec.replsets[?(@.name=="mongos")].arbiter.enabled}' +++ mktemp ++ local LAST_OUT=/tmp/tmp.NwObl5YcNk +++ mktemp ++ local LAST_ERR=/tmp/tmp.6klSbYGivV ++ local exit_status=0 ++ local timeout=4 +++ seq 0 2 ++ for i in '$(seq 0 2)' ++ set +e ++ kubectl get psmdb some-name -o 'jsonpath={.spec.replsets[?(@.name=="mongos")].arbiter.enabled}' ++ exit_status=0 ++ set -e ++ '[' 0 '!=' 0 -a -n 1 ']' ++ break ++ cat /tmp/tmp.NwObl5YcNk ++ cat /tmp/tmp.6klSbYGivV ++ rm /tmp/tmp.NwObl5YcNk /tmp/tmp.6klSbYGivV ++ return 0 + [[ '' == \t\r\u\e ]] + wait_pod some-name-mongos-2 + local pod=some-name-mongos-2 + set +o xtrace waiting for pod/some-name-mongos-2 to be ready.OK ++ kubectl_bin get psmdb some-name -o 'jsonpath={.spec.replsets[?(@.name=="mongos")].non_voting.enabled}' +++ mktemp ++ local LAST_OUT=/tmp/tmp.l37UO0oVHZ +++ mktemp ++ local LAST_ERR=/tmp/tmp.7a9hBBrSE2 ++ local exit_status=0 ++ local timeout=4 +++ seq 0 2 ++ for i in '$(seq 0 2)' ++ set +e ++ kubectl get psmdb some-name -o 'jsonpath={.spec.replsets[?(@.name=="mongos")].non_voting.enabled}' ++ exit_status=0 ++ set -e ++ '[' 0 '!=' 0 -a -n 1 ']' ++ break ++ cat /tmp/tmp.l37UO0oVHZ ++ cat /tmp/tmp.7a9hBBrSE2 ++ rm /tmp/tmp.l37UO0oVHZ /tmp/tmp.7a9hBBrSE2 ++ return 0 + [[ '' == \t\r\u\e ]] + sleep 10 + [[ true == \t\r\u\e ]] + set +x Waiting for cluster readyness + compare_kubectl statefulset/some-name-rs0 + local resource=statefulset/some-name-rs0 + local postfix= + local expected_result=/mnt/jenkins/workspace/cloud-psmdb-operator_PR-1567/e2e-tests/tls-issue-cert-manager/compare/statefulset_some-name-rs0.yml + local new_result=/tmp/tmp.jTdR9s6bFl/statefulset_some-name-rs0.yml + '[' -n '' -a -f /mnt/jenkins/workspace/cloud-psmdb-operator_PR-1567/e2e-tests/tls-issue-cert-manager/compare/statefulset_some-name-rs0-oc.yml ']' + kubectl_bin get -o yaml statefulset/some-name-rs0 + yq eval ' del(.metadata.ownerReferences[].apiVersion) | del(.metadata.managedFields) | del(.. | select(has("creationTimestamp")).creationTimestamp) | del(.. | select(has("namespace")).namespace) | del(.. | select(has("uid")).uid) | del(.metadata.resourceVersion) | del(.spec.template.spec.containers[].env[] | select(.name == "NAMESPACE")) | del(.metadata.selfLink) | del(.metadata.annotations."cloud.google.com/neg") | del(.metadata.annotations."kubectl.kubernetes.io/last-applied-configuration") | del(.. | select(has("image")).image) | del(.. | select(has("clusterIP")).clusterIP) | del(.. | select(has("clusterIPs")).clusterIPs) | del(.. | select(has("dataSource")).dataSource) | del(.. | select(has("procMount")).procMount) | del(.. | select(has("storageClassName")).storageClassName) | del(.. | select(has("finalizers")).finalizers) | del(.. | select(has("kubernetes.io/pvc-protection"))."kubernetes.io/pvc-protection") | del(.. | select(has("volumeName")).volumeName) | del(.. | select(has("volume.beta.kubernetes.io/storage-provisioner"))."volume.beta.kubernetes.io/storage-provisioner") | del(.. | select(has("volume.kubernetes.io/storage-provisioner"))."volume.kubernetes.io/storage-provisioner") | del(.spec.volumeMode) | del(.. | select(has("volume.kubernetes.io/selected-node"))."volume.kubernetes.io/selected-node") | del(.. | select(has("percona.com/last-config-hash"))."percona.com/last-config-hash") | del(.. | select(has("percona.com/configuration-hash"))."percona.com/configuration-hash") | del(.. | select(has("percona.com/ssl-hash"))."percona.com/ssl-hash") | del(.. | select(has("percona.com/ssl-internal-hash"))."percona.com/ssl-internal-hash") | del(.spec.volumeClaimTemplates[].spec.volumeMode | select(. == "Filesystem")) | del(.. | select(has("healthCheckNodePort")).healthCheckNodePort) | del(.. | select(has("nodePort")).nodePort) | del(.status) | (.. | select(tag == "!!str")) |= sub("tls-issue-cert-manager-30987", "NAME_SPACE") | del(.spec.volumeClaimTemplates[].apiVersion) | del(.spec.volumeClaimTemplates[].kind) | del(.spec.ipFamilies) | del(.spec.ipFamilyPolicy) | (.. | select(. == "extensions/v1beta1")) = "apps/v1" | (.. | select(. == "batch/v1beta1")) = "batch/v1" ' - ++ mktemp + local LAST_OUT=/tmp/tmp.V2vlOyWZnJ ++ mktemp + local LAST_ERR=/tmp/tmp.TXHP3cqHhE + local exit_status=0 + local timeout=4 ++ seq 0 2 + for i in '$(seq 0 2)' + set +e + kubectl get -o yaml statefulset/some-name-rs0 + exit_status=0 + set -e + '[' 0 '!=' 0 -a -n 1 ']' + break + cat /tmp/tmp.V2vlOyWZnJ + cat /tmp/tmp.TXHP3cqHhE + rm /tmp/tmp.V2vlOyWZnJ /tmp/tmp.TXHP3cqHhE + return 0 + yq -i eval 'del(.spec.persistentVolumeClaimRetentionPolicy)' /tmp/tmp.jTdR9s6bFl/statefulset_some-name-rs0.yml + version_gt 1.22 ++ echo '1.26 >= 1.22' ++ bc -l + '[' 1 -eq 1 ']' + return 0 + yq -i eval 'del(.spec.internalTrafficPolicy)' /tmp/tmp.jTdR9s6bFl/statefulset_some-name-rs0.yml + yq -i eval 'del(.spec.allocateLoadBalancerNodePorts)' /tmp/tmp.jTdR9s6bFl/statefulset_some-name-rs0.yml + [[ /mnt/jenkins/workspace/cloud-psmdb-operator_PR-1567/e2e-tests/tls-issue-cert-manager/compare/statefulset_some-name-rs0.yml == */cronjob* ]] + diff -u /mnt/jenkins/workspace/cloud-psmdb-operator_PR-1567/e2e-tests/tls-issue-cert-manager/compare/statefulset_some-name-rs0.yml /tmp/tmp.jTdR9s6bFl/statefulset_some-name-rs0.yml + compare_kubectl statefulset/some-name-cfg + local resource=statefulset/some-name-cfg + local postfix= + local expected_result=/mnt/jenkins/workspace/cloud-psmdb-operator_PR-1567/e2e-tests/tls-issue-cert-manager/compare/statefulset_some-name-cfg.yml + local new_result=/tmp/tmp.jTdR9s6bFl/statefulset_some-name-cfg.yml + '[' -n '' -a -f /mnt/jenkins/workspace/cloud-psmdb-operator_PR-1567/e2e-tests/tls-issue-cert-manager/compare/statefulset_some-name-cfg-oc.yml ']' + kubectl_bin get -o yaml statefulset/some-name-cfg + yq eval ' del(.metadata.ownerReferences[].apiVersion) | del(.metadata.managedFields) | del(.. | select(has("creationTimestamp")).creationTimestamp) | del(.. | select(has("namespace")).namespace) | del(.. | select(has("uid")).uid) | del(.metadata.resourceVersion) | del(.spec.template.spec.containers[].env[] | select(.name == "NAMESPACE")) | del(.metadata.selfLink) | del(.metadata.annotations."cloud.google.com/neg") | del(.metadata.annotations."kubectl.kubernetes.io/last-applied-configuration") | del(.. | select(has("image")).image) | del(.. | select(has("clusterIP")).clusterIP) | del(.. | select(has("clusterIPs")).clusterIPs) | del(.. | select(has("dataSource")).dataSource) | del(.. | select(has("procMount")).procMount) | del(.. | select(has("storageClassName")).storageClassName) | del(.. | select(has("finalizers")).finalizers) | del(.. | select(has("kubernetes.io/pvc-protection"))."kubernetes.io/pvc-protection") | del(.. | select(has("volumeName")).volumeName) | del(.. | select(has("volume.beta.kubernetes.io/storage-provisioner"))."volume.beta.kubernetes.io/storage-provisioner") | del(.. | select(has("volume.kubernetes.io/storage-provisioner"))."volume.kubernetes.io/storage-provisioner") | del(.spec.volumeMode) | del(.. | select(has("volume.kubernetes.io/selected-node"))."volume.kubernetes.io/selected-node") | del(.. | select(has("percona.com/last-config-hash"))."percona.com/last-config-hash") | del(.. | select(has("percona.com/configuration-hash"))."percona.com/configuration-hash") | del(.. | select(has("percona.com/ssl-hash"))."percona.com/ssl-hash") | del(.. | select(has("percona.com/ssl-internal-hash"))."percona.com/ssl-internal-hash") | del(.spec.volumeClaimTemplates[].spec.volumeMode | select(. == "Filesystem")) | del(.. | select(has("healthCheckNodePort")).healthCheckNodePort) | del(.. | select(has("nodePort")).nodePort) | del(.status) | (.. | select(tag == "!!str")) |= sub("tls-issue-cert-manager-30987", "NAME_SPACE") | del(.spec.volumeClaimTemplates[].apiVersion) | del(.spec.volumeClaimTemplates[].kind) | del(.spec.ipFamilies) | del(.spec.ipFamilyPolicy) | (.. | select(. == "extensions/v1beta1")) = "apps/v1" | (.. | select(. == "batch/v1beta1")) = "batch/v1" ' - ++ mktemp + local LAST_OUT=/tmp/tmp.9y0nMBz3NM ++ mktemp + local LAST_ERR=/tmp/tmp.ER12Tqy41H + local exit_status=0 + local timeout=4 ++ seq 0 2 + for i in '$(seq 0 2)' + set +e + kubectl get -o yaml statefulset/some-name-cfg + exit_status=0 + set -e + '[' 0 '!=' 0 -a -n 1 ']' + break + cat /tmp/tmp.9y0nMBz3NM + cat /tmp/tmp.ER12Tqy41H + rm /tmp/tmp.9y0nMBz3NM /tmp/tmp.ER12Tqy41H + return 0 + yq -i eval 'del(.spec.persistentVolumeClaimRetentionPolicy)' /tmp/tmp.jTdR9s6bFl/statefulset_some-name-cfg.yml + version_gt 1.22 ++ echo '1.26 >= 1.22' ++ bc -l + '[' 1 -eq 1 ']' + return 0 + yq -i eval 'del(.spec.internalTrafficPolicy)' /tmp/tmp.jTdR9s6bFl/statefulset_some-name-cfg.yml + yq -i eval 'del(.spec.allocateLoadBalancerNodePorts)' /tmp/tmp.jTdR9s6bFl/statefulset_some-name-cfg.yml + [[ /mnt/jenkins/workspace/cloud-psmdb-operator_PR-1567/e2e-tests/tls-issue-cert-manager/compare/statefulset_some-name-cfg.yml == */cronjob* ]] + diff -u /mnt/jenkins/workspace/cloud-psmdb-operator_PR-1567/e2e-tests/tls-issue-cert-manager/compare/statefulset_some-name-cfg.yml /tmp/tmp.jTdR9s6bFl/statefulset_some-name-cfg.yml + compare_kubectl statefulset/some-name-mongos + local resource=statefulset/some-name-mongos + local postfix= + local expected_result=/mnt/jenkins/workspace/cloud-psmdb-operator_PR-1567/e2e-tests/tls-issue-cert-manager/compare/statefulset_some-name-mongos.yml + local new_result=/tmp/tmp.jTdR9s6bFl/statefulset_some-name-mongos.yml + '[' -n '' -a -f /mnt/jenkins/workspace/cloud-psmdb-operator_PR-1567/e2e-tests/tls-issue-cert-manager/compare/statefulset_some-name-mongos-oc.yml ']' + kubectl_bin get -o yaml statefulset/some-name-mongos + yq eval ' del(.metadata.ownerReferences[].apiVersion) | del(.metadata.managedFields) | del(.. | select(has("creationTimestamp")).creationTimestamp) | del(.. | select(has("namespace")).namespace) | del(.. | select(has("uid")).uid) | del(.metadata.resourceVersion) | del(.spec.template.spec.containers[].env[] | select(.name == "NAMESPACE")) | del(.metadata.selfLink) | del(.metadata.annotations."cloud.google.com/neg") | del(.metadata.annotations."kubectl.kubernetes.io/last-applied-configuration") | del(.. | select(has("image")).image) | del(.. | select(has("clusterIP")).clusterIP) | del(.. | select(has("clusterIPs")).clusterIPs) | del(.. | select(has("dataSource")).dataSource) | del(.. | select(has("procMount")).procMount) | del(.. | select(has("storageClassName")).storageClassName) | del(.. | select(has("finalizers")).finalizers) | del(.. | select(has("kubernetes.io/pvc-protection"))."kubernetes.io/pvc-protection") | del(.. | select(has("volumeName")).volumeName) | del(.. | select(has("volume.beta.kubernetes.io/storage-provisioner"))."volume.beta.kubernetes.io/storage-provisioner") | del(.. | select(has("volume.kubernetes.io/storage-provisioner"))."volume.kubernetes.io/storage-provisioner") | del(.spec.volumeMode) | del(.. | select(has("volume.kubernetes.io/selected-node"))."volume.kubernetes.io/selected-node") | del(.. | select(has("percona.com/last-config-hash"))."percona.com/last-config-hash") | del(.. | select(has("percona.com/configuration-hash"))."percona.com/configuration-hash") | del(.. | select(has("percona.com/ssl-hash"))."percona.com/ssl-hash") | del(.. | select(has("percona.com/ssl-internal-hash"))."percona.com/ssl-internal-hash") | del(.spec.volumeClaimTemplates[].spec.volumeMode | select(. == "Filesystem")) | del(.. | select(has("healthCheckNodePort")).healthCheckNodePort) | del(.. | select(has("nodePort")).nodePort) | del(.status) | (.. | select(tag == "!!str")) |= sub("tls-issue-cert-manager-30987", "NAME_SPACE") | del(.spec.volumeClaimTemplates[].apiVersion) | del(.spec.volumeClaimTemplates[].kind) | del(.spec.ipFamilies) | del(.spec.ipFamilyPolicy) | (.. | select(. == "extensions/v1beta1")) = "apps/v1" | (.. | select(. == "batch/v1beta1")) = "batch/v1" ' - ++ mktemp + local LAST_OUT=/tmp/tmp.KOCbKvxoWY ++ mktemp + local LAST_ERR=/tmp/tmp.joR4KItREi + local exit_status=0 + local timeout=4 ++ seq 0 2 + for i in '$(seq 0 2)' + set +e + kubectl get -o yaml statefulset/some-name-mongos + exit_status=0 + set -e + '[' 0 '!=' 0 -a -n 1 ']' + break + cat /tmp/tmp.KOCbKvxoWY + cat /tmp/tmp.joR4KItREi + rm /tmp/tmp.KOCbKvxoWY /tmp/tmp.joR4KItREi + return 0 + yq -i eval 'del(.spec.persistentVolumeClaimRetentionPolicy)' /tmp/tmp.jTdR9s6bFl/statefulset_some-name-mongos.yml + version_gt 1.22 ++ echo '1.26 >= 1.22' ++ bc -l + '[' 1 -eq 1 ']' + return 0 + yq -i eval 'del(.spec.internalTrafficPolicy)' /tmp/tmp.jTdR9s6bFl/statefulset_some-name-mongos.yml + yq -i eval 'del(.spec.allocateLoadBalancerNodePorts)' /tmp/tmp.jTdR9s6bFl/statefulset_some-name-mongos.yml + [[ /mnt/jenkins/workspace/cloud-psmdb-operator_PR-1567/e2e-tests/tls-issue-cert-manager/compare/statefulset_some-name-mongos.yml == */cronjob* ]] + diff -u /mnt/jenkins/workspace/cloud-psmdb-operator_PR-1567/e2e-tests/tls-issue-cert-manager/compare/statefulset_some-name-mongos.yml /tmp/tmp.jTdR9s6bFl/statefulset_some-name-mongos.yml + desc 'check if certificates issued with certmanager' + set +o xtrace ----------------------------------------------------------------------------------- check if certificates issued with certmanager ----------------------------------------------------------------------------------- + check_tls_secret some-name-ssl + local secret_name=some-name-ssl + check_secret_data_key some-name-ssl ca.crt + local secret_name=some-name-ssl + local data_key=ca.crt + local secret_data ++ kubectl_bin get secrets/some-name-ssl -o json ++ jq '.data["ca.crt"]' +++ mktemp ++ local LAST_OUT=/tmp/tmp.Dw3yhQ3Zpf +++ mktemp ++ local LAST_ERR=/tmp/tmp.bOfLTaWYW3 ++ local exit_status=0 ++ local timeout=4 +++ seq 0 2 ++ for i in '$(seq 0 2)' ++ set +e ++ kubectl get secrets/some-name-ssl -o json ++ exit_status=0 ++ set -e ++ '[' 0 '!=' 0 -a -n 1 ']' ++ break ++ cat /tmp/tmp.Dw3yhQ3Zpf ++ cat /tmp/tmp.bOfLTaWYW3 ++ rm /tmp/tmp.Dw3yhQ3Zpf /tmp/tmp.bOfLTaWYW3 ++ return 0 + secret_data='"LS0tLS1CRUdJTiBDRVJUSUZJQ0FURS0tLS0tCk1JSUMrakNDQWVLZ0F3SUJBZ0lRYmFyNm9qbUN5ZlhFelFnUmFFU0MxVEFOQmdrcWhraUc5dzBCQVFzRkFEQVgKTVJVd0V3WURWUVFERXd4emIyMWxMVzVoYldVdFkyRXdIaGNOTWpRd056QTVNRGd3TVRVeFdoY05NalV3TnpBNQpNRGd3TVRVeFdqQVhNUlV3RXdZRFZRUURFd3h6YjIxbExXNWhiV1V0WTJFd2dnRWlNQTBHQ1NxR1NJYjNEUUVCCkFRVUFBNElCRHdBd2dnRUtBb0lCQVFDd0VvckN6ZXV6VEJ2Zk5IV1VuZ0RSenJ4U1dTOUxTclZxKzcyYU1EekkKRVkwM3pQVEV1TGNGOVdxbVE2d3crTWsxYUUySUpBUkhuOVFYSnpRKzZzeSs4RWRLc3pxQW5sYmJaYXF1QTNHNQpoTjlOUHdGdWpKdHo2eDc3Q1liSWw0ZTl5U3lIRkd6Z2pVTGFoQW90c1dwdHFlM1QxYkgwZ2ZOWnRXRGlDNnVOCkVPT013RStLWmxidWpDQmtoK01KWi9jbXFJSndhVHJSSG9xRnArSVpnbTg4aExuSWFGU1oyRVFJQ0lnem96OUkKUCs4Zjh6QWZNZDhsSjRZbUxIMXNxV1BHbU5iY3ZJUmU5K0NXdzM4SXdSK3dtbWwxbk1wdXhMTlFHZXgvZDZ3aApRNVk0RzRkdFFIajlvS24xVGRQcFZHa2prV3QveDJQeGl3dW1iV0xLdVJPQkFnTUJBQUdqUWpCQU1BNEdBMVVkCkR3RUIvd1FFQXdJQ3BEQVBCZ05WSFJNQkFmOEVCVEFEQVFIL01CMEdBMVVkRGdRV0JCUWdPL2U5Z3N4MHl6K0oKRTNqSGsxbXNDTWw5ZVRBTkJna3Foa2lHOXcwQkFRc0ZBQU9DQVFFQUVZLzM0NEM2UmtpNGRXMnI2TEpSN2M4MApJZFhmQ0IwcWVLMVdQNW5QTUxtYWxOK2xlVzlMMlhlWi9FVC95Um4vNXBLRC96OG1GSEpTSDVWM3Y2NFVDK3d5Ckg0bWZFQW8vZVBIN05KSHVHT24za0pyazlHZnBtSW1ETjR3UXBSVXFvQ3I3NHVFaWRyWXY2MlM0dkJYWlhCSkQKU25HVVRHVU1BdGwwMmttekh5eDFKTElEeWtuS0ROZWZwVUV3UnA1Z1lGSE1SMzdPcS9UcDdTRmV1N3Zpa0lPWQpNK05zWjZJRndwS3pYU08wdHRFL09IRm4wamVXb2txcUZDTzR5MWlwd3dXaG9ZRm02ODZGRE0xUkpLT1phZlovClpBTUlEaWFncnRkQlNqS05qUDNtQnpDd3p3M2hGaHowWG0rMWhVZmltem84d3FiRzNubjAzYis3cjdJUnJRPT0KLS0tLS1FTkQgQ0VSVElGSUNBVEUtLS0tLQo="' + '[' -z '"LS0tLS1CRUdJTiBDRVJUSUZJQ0FURS0tLS0tCk1JSUMrakNDQWVLZ0F3SUJBZ0lRYmFyNm9qbUN5ZlhFelFnUmFFU0MxVEFOQmdrcWhraUc5dzBCQVFzRkFEQVgKTVJVd0V3WURWUVFERXd4emIyMWxMVzVoYldVdFkyRXdIaGNOTWpRd056QTVNRGd3TVRVeFdoY05NalV3TnpBNQpNRGd3TVRVeFdqQVhNUlV3RXdZRFZRUURFd3h6YjIxbExXNWhiV1V0WTJFd2dnRWlNQTBHQ1NxR1NJYjNEUUVCCkFRVUFBNElCRHdBd2dnRUtBb0lCQVFDd0VvckN6ZXV6VEJ2Zk5IV1VuZ0RSenJ4U1dTOUxTclZxKzcyYU1EekkKRVkwM3pQVEV1TGNGOVdxbVE2d3crTWsxYUUySUpBUkhuOVFYSnpRKzZzeSs4RWRLc3pxQW5sYmJaYXF1QTNHNQpoTjlOUHdGdWpKdHo2eDc3Q1liSWw0ZTl5U3lIRkd6Z2pVTGFoQW90c1dwdHFlM1QxYkgwZ2ZOWnRXRGlDNnVOCkVPT013RStLWmxidWpDQmtoK01KWi9jbXFJSndhVHJSSG9xRnArSVpnbTg4aExuSWFGU1oyRVFJQ0lnem96OUkKUCs4Zjh6QWZNZDhsSjRZbUxIMXNxV1BHbU5iY3ZJUmU5K0NXdzM4SXdSK3dtbWwxbk1wdXhMTlFHZXgvZDZ3aApRNVk0RzRkdFFIajlvS24xVGRQcFZHa2prV3QveDJQeGl3dW1iV0xLdVJPQkFnTUJBQUdqUWpCQU1BNEdBMVVkCkR3RUIvd1FFQXdJQ3BEQVBCZ05WSFJNQkFmOEVCVEFEQVFIL01CMEdBMVVkRGdRV0JCUWdPL2U5Z3N4MHl6K0oKRTNqSGsxbXNDTWw5ZVRBTkJna3Foa2lHOXcwQkFRc0ZBQU9DQVFFQUVZLzM0NEM2UmtpNGRXMnI2TEpSN2M4MApJZFhmQ0IwcWVLMVdQNW5QTUxtYWxOK2xlVzlMMlhlWi9FVC95Um4vNXBLRC96OG1GSEpTSDVWM3Y2NFVDK3d5Ckg0bWZFQW8vZVBIN05KSHVHT24za0pyazlHZnBtSW1ETjR3UXBSVXFvQ3I3NHVFaWRyWXY2MlM0dkJYWlhCSkQKU25HVVRHVU1BdGwwMmttekh5eDFKTElEeWtuS0ROZWZwVUV3UnA1Z1lGSE1SMzdPcS9UcDdTRmV1N3Zpa0lPWQpNK05zWjZJRndwS3pYU08wdHRFL09IRm4wamVXb2txcUZDTzR5MWlwd3dXaG9ZRm02ODZGRE0xUkpLT1phZlovClpBTUlEaWFncnRkQlNqS05qUDNtQnpDd3p3M2hGaHowWG0rMWhVZmltem84d3FiRzNubjAzYis3cjdJUnJRPT0KLS0tLS1FTkQgQ0VSVElGSUNBVEUtLS0tLQo="' ']' + check_secret_data_key some-name-ssl tls.crt + local secret_name=some-name-ssl + local data_key=tls.crt + local secret_data ++ kubectl_bin get secrets/some-name-ssl -o json ++ jq '.data["tls.crt"]' +++ mktemp ++ local LAST_OUT=/tmp/tmp.kyBtXmwz03 +++ mktemp ++ local LAST_ERR=/tmp/tmp.uJULXVX6DM ++ local exit_status=0 ++ local timeout=4 +++ seq 0 2 ++ for i in '$(seq 0 2)' ++ set +e ++ kubectl get secrets/some-name-ssl -o json ++ exit_status=0 ++ set -e ++ '[' 0 '!=' 0 -a -n 1 ']' ++ break ++ cat /tmp/tmp.kyBtXmwz03 ++ cat /tmp/tmp.uJULXVX6DM ++ rm /tmp/tmp.kyBtXmwz03 /tmp/tmp.uJULXVX6DM ++ return 0 + secret_data='"LS0tLS1CRUdJTiBDRVJUSUZJQ0FURS0tLS0tCk1JSUg1ekNDQnMrZ0F3SUJBZ0lRRVlGTmtYY3JKYUVJckhJZExYNitxVEFOQmdrcWhraUc5dzBCQVFzRkFEQVgKTVJVd0V3WURWUVFERXd4emIyMWxMVzVoYldVdFkyRXdIaGNOTWpRd056QTVNRGd3TmpVeFdoY05NalF4TURBMwpNRGd3TmpVeFdqQWtNUTR3REFZRFZRUUtFd1ZRVTAxRVFqRVNNQkFHQTFVRUF4TUpjMjl0WlMxdVlXMWxNSUlCCklqQU5CZ2txaGtpRzl3MEJBUUVGQUFPQ0FROEFNSUlCQ2dLQ0FRRUF2QTBkem1tL0tJWFBUZzBjb1BkK1AycFIKQi9Wd3NqSW5mVGNQYlpIWkowL21uSkFYYXpHTWY1emlQaG9ocm0xMlhwQUVYa3MxTERQMFhVMDZlblBicHNFRQpYQnZqbllzNG5DNDNTV0g0dHpRTGlmSGJDMnlLQklQUWp5SEM0STRCcGdmQmdJejZWenNYeXdzS1VMTGZGQmpSCnZoSG5KNktFNTN2aVUvMm85TkVSOW1uYnZzRDZUKzg3RDllV0FCdXZhSXFvbUdONkdyWTNITW1ER05aVVVOQTkKSU80bUF2WjFHRFdtVW5jN3h0S0V0TUpScVhyVktGN3kxU0c3K2toWmc0ckhvMWIvMW9wN3laQVdnRDBnaWttWApUakJ1ZEJWWlhnM3B0emJnbFhpTXpPZkJVOHNIdXRYOE4yQTNsdFRTdXJmbjBPYVRHQVdCZU42S2lZZVhBd0lECkFRQUJvNElGSURDQ0JSd3dEZ1lEVlIwUEFRSC9CQVFEQWdXZ01Bd0dBMVVkRXdFQi93UUNNQUF3SHdZRFZSMGoKQkJnd0ZvQVVJRHYzdllMTWRNcy9pUk40eDVOWnJBakpmWGt3Z2dUWkJnTlZIUkVFZ2dUUU1JSUV6SUlKYkc5agpZV3hvYjNOMGdnMXpiMjFsTFc1aGJXVXRjbk13Z2lwemIyMWxMVzVoYldVdGNuTXdMblJzY3kxcGMzTjFaUzFqClpYSjBMVzFoYm1GblpYSXRNekE1T0RlQ1BITnZiV1V0Ym1GdFpTMXljekF1ZEd4ekxXbHpjM1ZsTFdObGNuUXQKYldGdVlXZGxjaTB6TURrNE55NXpkbU11WTJ4MWMzUmxjaTVzYjJOaGJJSVBLaTV6YjIxbExXNWhiV1V0Y25NdwpnaXdxTG5OdmJXVXRibUZ0WlMxeWN6QXVkR3h6TFdsemMzVmxMV05sY25RdGJXRnVZV2RsY2kwek1EazRONEkrCktpNXpiMjFsTFc1aGJXVXRjbk13TG5Sc2N5MXBjM04xWlMxalpYSjBMVzFoYm1GblpYSXRNekE1T0RjdWMzWmoKTG1Oc2RYTjBaWEl1Ykc5allXeUNQM052YldVdGJtRnRaUzF5Y3pBdWRHeHpMV2x6YzNWbExXTmxjblF0YldGdQpZV2RsY2kwek1EazROeTV6ZG1NdVkyeDFjM1JsY25ObGRDNXNiMk5oYklKQktpNXpiMjFsTFc1aGJXVXRjbk13CkxuUnNjeTFwYzNOMVpTMWpaWEowTFcxaGJtRm5aWEl0TXpBNU9EY3VjM1pqTG1Oc2RYTjBaWEp6WlhRdWJHOWoKWVd5Q015b3VkR3h6TFdsemMzVmxMV05sY25RdGJXRnVZV2RsY2kwek1EazROeTV6ZG1NdVkyeDFjM1JsY25ObApkQzVzYjJOaGJJSVFjMjl0WlMxdVlXMWxMVzF2Ym1kdmM0SXRjMjl0WlMxdVlXMWxMVzF2Ym1kdmN5NTBiSE10CmFYTnpkV1V0WTJWeWRDMXRZVzVoWjJWeUxUTXdPVGczZ2o5emIyMWxMVzVoYldVdGJXOXVaMjl6TG5Sc2N5MXAKYzNOMVpTMWpaWEowTFcxaGJtRm5aWEl0TXpBNU9EY3VjM1pqTG1Oc2RYTjBaWEl1Ykc5allXeUNFaW91YzI5dApaUzF1WVcxbExXMXZibWR2YzRJdktpNXpiMjFsTFc1aGJXVXRiVzl1WjI5ekxuUnNjeTFwYzNOMVpTMWpaWEowCkxXMWhibUZuWlhJdE16QTVPRGVDUVNvdWMyOXRaUzF1WVcxbExXMXZibWR2Y3k1MGJITXRhWE56ZFdVdFkyVnkKZEMxdFlXNWhaMlZ5TFRNd09UZzNMbk4yWXk1amJIVnpkR1Z5TG14dlkyRnNnZzF6YjIxbExXNWhiV1V0WTJabgpnaXB6YjIxbExXNWhiV1V0WTJabkxuUnNjeTFwYzNOMVpTMWpaWEowTFcxaGJtRm5aWEl0TXpBNU9EZUNQSE52CmJXVXRibUZ0WlMxalptY3VkR3h6TFdsemMzVmxMV05sY25RdGJXRnVZV2RsY2kwek1EazROeTV6ZG1NdVkyeDEKYzNSbGNpNXNiMk5oYklJUEtpNXpiMjFsTFc1aGJXVXRZMlpuZ2l3cUxuTnZiV1V0Ym1GdFpTMWpabWN1ZEd4egpMV2x6YzNWbExXTmxjblF0YldGdVlXZGxjaTB6TURrNE40SStLaTV6YjIxbExXNWhiV1V0WTJabkxuUnNjeTFwCmMzTjFaUzFqWlhKMExXMWhibUZuWlhJdE16QTVPRGN1YzNaakxtTnNkWE4wWlhJdWJHOWpZV3lDUW5OdmJXVXQKYm1GdFpTMXRiMjVuYjNNdWRHeHpMV2x6YzNWbExXTmxjblF0YldGdVlXZGxjaTB6TURrNE55NXpkbU11WTJ4MQpjM1JsY25ObGRDNXNiMk5oYklKRUtpNXpiMjFsTFc1aGJXVXRiVzl1WjI5ekxuUnNjeTFwYzNOMVpTMWpaWEowCkxXMWhibUZuWlhJdE16QTVPRGN1YzNaakxtTnNkWE4wWlhKelpYUXViRzlqWVd5Q1AzTnZiV1V0Ym1GdFpTMWoKWm1jdWRHeHpMV2x6YzNWbExXTmxjblF0YldGdVlXZGxjaTB6TURrNE55NXpkbU11WTJ4MWMzUmxjbk5sZEM1cwpiMk5oYklKQktpNXpiMjFsTFc1aGJXVXRZMlpuTG5Sc2N5MXBjM04xWlMxalpYSjBMVzFoYm1GblpYSXRNekE1Ck9EY3VjM1pqTG1Oc2RYTjBaWEp6WlhRdWJHOWpZV3d3RFFZSktvWklodmNOQVFFTEJRQURnZ0VCQUFjTzBUNEwKaWRTekpFcEtNcm1RbmM2ci9MSDNpTWhQOXdnNm5HaTFmWXAzcGk1S2EyUFJ4OWdGOGNTa0ZmZDJDa2xZQmZFMgpFVG5DYTRPTlRvNEhDNFdjV0xnRGxPd3J5eGo5RUswNFVqYlVLNEppQ2h4TXhpZjFleUI4MC9WcWJ4cmFlK2gxCnppTFNLRmdkRlZIWGoyNDlUWTVldTFYblM5N0M0VGlwOSt3WjZ2ZGV6MGVvaFFUQTZnVVBKdlpRenR3Z2lTdEcKQ1JwcFA2UHJMaEI3dGltOVR0ekRNRWVkQTg3b0cvZEZTOCt5aHh5SDVZRFhjOTZRTFdOVENreFFHNVJlSjZIYwppQXVNS2ZKVld2ZGRnVWFabFJONmVmVmlWNXlROFNVTGEvTE9jM3ZNOTM2dVFKSzdiMjZQb3VhY09Fcm1xbHkvCkVlTjlQUTF0dEZVYU83dz0KLS0tLS1FTkQgQ0VSVElGSUNBVEUtLS0tLQo="' + '[' -z '"LS0tLS1CRUdJTiBDRVJUSUZJQ0FURS0tLS0tCk1JSUg1ekNDQnMrZ0F3SUJBZ0lRRVlGTmtYY3JKYUVJckhJZExYNitxVEFOQmdrcWhraUc5dzBCQVFzRkFEQVgKTVJVd0V3WURWUVFERXd4emIyMWxMVzVoYldVdFkyRXdIaGNOTWpRd056QTVNRGd3TmpVeFdoY05NalF4TURBMwpNRGd3TmpVeFdqQWtNUTR3REFZRFZRUUtFd1ZRVTAxRVFqRVNNQkFHQTFVRUF4TUpjMjl0WlMxdVlXMWxNSUlCCklqQU5CZ2txaGtpRzl3MEJBUUVGQUFPQ0FROEFNSUlCQ2dLQ0FRRUF2QTBkem1tL0tJWFBUZzBjb1BkK1AycFIKQi9Wd3NqSW5mVGNQYlpIWkowL21uSkFYYXpHTWY1emlQaG9ocm0xMlhwQUVYa3MxTERQMFhVMDZlblBicHNFRQpYQnZqbllzNG5DNDNTV0g0dHpRTGlmSGJDMnlLQklQUWp5SEM0STRCcGdmQmdJejZWenNYeXdzS1VMTGZGQmpSCnZoSG5KNktFNTN2aVUvMm85TkVSOW1uYnZzRDZUKzg3RDllV0FCdXZhSXFvbUdONkdyWTNITW1ER05aVVVOQTkKSU80bUF2WjFHRFdtVW5jN3h0S0V0TUpScVhyVktGN3kxU0c3K2toWmc0ckhvMWIvMW9wN3laQVdnRDBnaWttWApUakJ1ZEJWWlhnM3B0emJnbFhpTXpPZkJVOHNIdXRYOE4yQTNsdFRTdXJmbjBPYVRHQVdCZU42S2lZZVhBd0lECkFRQUJvNElGSURDQ0JSd3dEZ1lEVlIwUEFRSC9CQVFEQWdXZ01Bd0dBMVVkRXdFQi93UUNNQUF3SHdZRFZSMGoKQkJnd0ZvQVVJRHYzdllMTWRNcy9pUk40eDVOWnJBakpmWGt3Z2dUWkJnTlZIUkVFZ2dUUU1JSUV6SUlKYkc5agpZV3hvYjNOMGdnMXpiMjFsTFc1aGJXVXRjbk13Z2lwemIyMWxMVzVoYldVdGNuTXdMblJzY3kxcGMzTjFaUzFqClpYSjBMVzFoYm1GblpYSXRNekE1T0RlQ1BITnZiV1V0Ym1GdFpTMXljekF1ZEd4ekxXbHpjM1ZsTFdObGNuUXQKYldGdVlXZGxjaTB6TURrNE55NXpkbU11WTJ4MWMzUmxjaTVzYjJOaGJJSVBLaTV6YjIxbExXNWhiV1V0Y25NdwpnaXdxTG5OdmJXVXRibUZ0WlMxeWN6QXVkR3h6TFdsemMzVmxMV05sY25RdGJXRnVZV2RsY2kwek1EazRONEkrCktpNXpiMjFsTFc1aGJXVXRjbk13TG5Sc2N5MXBjM04xWlMxalpYSjBMVzFoYm1GblpYSXRNekE1T0RjdWMzWmoKTG1Oc2RYTjBaWEl1Ykc5allXeUNQM052YldVdGJtRnRaUzF5Y3pBdWRHeHpMV2x6YzNWbExXTmxjblF0YldGdQpZV2RsY2kwek1EazROeTV6ZG1NdVkyeDFjM1JsY25ObGRDNXNiMk5oYklKQktpNXpiMjFsTFc1aGJXVXRjbk13CkxuUnNjeTFwYzNOMVpTMWpaWEowTFcxaGJtRm5aWEl0TXpBNU9EY3VjM1pqTG1Oc2RYTjBaWEp6WlhRdWJHOWoKWVd5Q015b3VkR3h6TFdsemMzVmxMV05sY25RdGJXRnVZV2RsY2kwek1EazROeTV6ZG1NdVkyeDFjM1JsY25ObApkQzVzYjJOaGJJSVFjMjl0WlMxdVlXMWxMVzF2Ym1kdmM0SXRjMjl0WlMxdVlXMWxMVzF2Ym1kdmN5NTBiSE10CmFYTnpkV1V0WTJWeWRDMXRZVzVoWjJWeUxUTXdPVGczZ2o5emIyMWxMVzVoYldVdGJXOXVaMjl6TG5Sc2N5MXAKYzNOMVpTMWpaWEowTFcxaGJtRm5aWEl0TXpBNU9EY3VjM1pqTG1Oc2RYTjBaWEl1Ykc5allXeUNFaW91YzI5dApaUzF1WVcxbExXMXZibWR2YzRJdktpNXpiMjFsTFc1aGJXVXRiVzl1WjI5ekxuUnNjeTFwYzNOMVpTMWpaWEowCkxXMWhibUZuWlhJdE16QTVPRGVDUVNvdWMyOXRaUzF1WVcxbExXMXZibWR2Y3k1MGJITXRhWE56ZFdVdFkyVnkKZEMxdFlXNWhaMlZ5TFRNd09UZzNMbk4yWXk1amJIVnpkR1Z5TG14dlkyRnNnZzF6YjIxbExXNWhiV1V0WTJabgpnaXB6YjIxbExXNWhiV1V0WTJabkxuUnNjeTFwYzNOMVpTMWpaWEowTFcxaGJtRm5aWEl0TXpBNU9EZUNQSE52CmJXVXRibUZ0WlMxalptY3VkR3h6TFdsemMzVmxMV05sY25RdGJXRnVZV2RsY2kwek1EazROeTV6ZG1NdVkyeDEKYzNSbGNpNXNiMk5oYklJUEtpNXpiMjFsTFc1aGJXVXRZMlpuZ2l3cUxuTnZiV1V0Ym1GdFpTMWpabWN1ZEd4egpMV2x6YzNWbExXTmxjblF0YldGdVlXZGxjaTB6TURrNE40SStLaTV6YjIxbExXNWhiV1V0WTJabkxuUnNjeTFwCmMzTjFaUzFqWlhKMExXMWhibUZuWlhJdE16QTVPRGN1YzNaakxtTnNkWE4wWlhJdWJHOWpZV3lDUW5OdmJXVXQKYm1GdFpTMXRiMjVuYjNNdWRHeHpMV2x6YzNWbExXTmxjblF0YldGdVlXZGxjaTB6TURrNE55NXpkbU11WTJ4MQpjM1JsY25ObGRDNXNiMk5oYklKRUtpNXpiMjFsTFc1aGJXVXRiVzl1WjI5ekxuUnNjeTFwYzNOMVpTMWpaWEowCkxXMWhibUZuWlhJdE16QTVPRGN1YzNaakxtTnNkWE4wWlhKelpYUXViRzlqWVd5Q1AzTnZiV1V0Ym1GdFpTMWoKWm1jdWRHeHpMV2x6YzNWbExXTmxjblF0YldGdVlXZGxjaTB6TURrNE55NXpkbU11WTJ4MWMzUmxjbk5sZEM1cwpiMk5oYklKQktpNXpiMjFsTFc1aGJXVXRZMlpuTG5Sc2N5MXBjM04xWlMxalpYSjBMVzFoYm1GblpYSXRNekE1Ck9EY3VjM1pqTG1Oc2RYTjBaWEp6WlhRdWJHOWpZV3d3RFFZSktvWklodmNOQVFFTEJRQURnZ0VCQUFjTzBUNEwKaWRTekpFcEtNcm1RbmM2ci9MSDNpTWhQOXdnNm5HaTFmWXAzcGk1S2EyUFJ4OWdGOGNTa0ZmZDJDa2xZQmZFMgpFVG5DYTRPTlRvNEhDNFdjV0xnRGxPd3J5eGo5RUswNFVqYlVLNEppQ2h4TXhpZjFleUI4MC9WcWJ4cmFlK2gxCnppTFNLRmdkRlZIWGoyNDlUWTVldTFYblM5N0M0VGlwOSt3WjZ2ZGV6MGVvaFFUQTZnVVBKdlpRenR3Z2lTdEcKQ1JwcFA2UHJMaEI3dGltOVR0ekRNRWVkQTg3b0cvZEZTOCt5aHh5SDVZRFhjOTZRTFdOVENreFFHNVJlSjZIYwppQXVNS2ZKVld2ZGRnVWFabFJONmVmVmlWNXlROFNVTGEvTE9jM3ZNOTM2dVFKSzdiMjZQb3VhY09Fcm1xbHkvCkVlTjlQUTF0dEZVYU83dz0KLS0tLS1FTkQgQ0VSVElGSUNBVEUtLS0tLQo="' ']' + check_secret_data_key some-name-ssl tls.key + local secret_name=some-name-ssl + local data_key=tls.key + local secret_data ++ kubectl_bin get secrets/some-name-ssl -o json ++ jq '.data["tls.key"]' +++ mktemp ++ local LAST_OUT=/tmp/tmp.LI4IBBasUn +++ mktemp ++ local LAST_ERR=/tmp/tmp.l3dCv69OaB ++ local exit_status=0 ++ local timeout=4 +++ seq 0 2 ++ for i in '$(seq 0 2)' ++ set +e ++ kubectl get secrets/some-name-ssl -o json ++ exit_status=0 ++ set -e ++ '[' 0 '!=' 0 -a -n 1 ']' ++ break ++ cat /tmp/tmp.LI4IBBasUn ++ cat /tmp/tmp.l3dCv69OaB ++ rm /tmp/tmp.LI4IBBasUn /tmp/tmp.l3dCv69OaB ++ return 0 + secret_data='"LS0tLS1CRUdJTiBSU0EgUFJJVkFURSBLRVktLS0tLQpNSUlFb3dJQkFBS0NBUUVBdkEwZHptbS9LSVhQVGcwY29QZCtQMnBSQi9Wd3NqSW5mVGNQYlpIWkowL21uSkFYCmF6R01mNXppUGhvaHJtMTJYcEFFWGtzMUxEUDBYVTA2ZW5QYnBzRUVYQnZqbllzNG5DNDNTV0g0dHpRTGlmSGIKQzJ5S0JJUFFqeUhDNEk0QnBnZkJnSXo2VnpzWHl3c0tVTExmRkJqUnZoSG5KNktFNTN2aVUvMm85TkVSOW1uYgp2c0Q2VCs4N0Q5ZVdBQnV2YUlxb21HTjZHclkzSE1tREdOWlVVTkE5SU80bUF2WjFHRFdtVW5jN3h0S0V0TUpSCnFYclZLRjd5MVNHNytraFpnNHJIbzFiLzFvcDd5WkFXZ0QwZ2lrbVhUakJ1ZEJWWlhnM3B0emJnbFhpTXpPZkIKVThzSHV0WDhOMkEzbHRUU3VyZm4wT2FUR0FXQmVONktpWWVYQXdJREFRQUJBb0lCQUJhZEdTMStSKytPajh4YQpCVXhTRnN0M2RJRVpnZERiK1NXMG5sVlZ5YUJSbW5EUFh4R1Q1c3BkeWxNZmYxZGpjRzJVc2dISTEzOFdURmQvCjl2MU11MmhGSThvVHpINWdoSkU3ZVlpSWtJa3ZhQWZSbnI2ejhiRHpUUHc1V2g3NDFMZnJqTmh4bTVVcjFRT2sKc3p2YkZycHE5YVgrcnlaMEE3bDJpMVFBVi9ZQWpTQzFINHhEcG8rV05uTE1zdWI2aHJrMnlnUUR1ZDBDMEwyMAoxQS9qb1F4WjRmZS9zVFV5WWVQTllKQXRtRC9LaWxROWRyd2hZOFlIQWJqOG0vS25KOHJSTjM4NE5nWE5wdytsCnk2cGQvOFV5NUhacm5XRC9NL2l1VGpKVkVkWmhIV3VVTVBZcnZCbjdHaVEyWUlmVWlrMjREdTBsNFo2My9rN28KR2FIM2ZQRUNnWUVBdzFBOEk5aitIZEpZYWM1b29vSlI3aXd5emU2VHBGdGJKei85Z2Q4cmYwRVIzbmxWQjB4eQo4b2wvY1NCYjBvTFBETk5iUm5CZHZBUFFFK3RBSWFac1dDaFhiaDNJSEthZ0VDaXRrZFZzZlJ4TnFKbEJ5SzU1CitEVTM3N09xOC9WMFhOMWJXa0t2K3dYQlo5NHdOWVh4QjFsWldXeVg4RUJSUjBYdVdtM01MSHNDZ1lFQTluczYKUi9LbWlxaGdlTTJ3NXRWWlRzWDlnczE4bmZNdFJNc0s1bGFlcjRMMnQ3eE5DN2JCSUxaTTBnbjhOcVR6UVZrcwp2TUhFTWVHbVhUMnRWdXFjU3BNQm8rVGZrVnlPNHgvVmN5ZE5MS29uWit5Q0tRUS9oS0tLbGsxZlNYS0ttL1BJCnJJRU5kbnRaNVFiQ3hvUit4SFA0Y01yK0pubzYzb2pnSDZ0dkRSa0NnWUFCdjljNkR2aC90ZUdhMlFGdzZ5L20KclRpclJZYkhBeFoyN0lqTnYvejR2OUYwSjZ3T1M2My81Q0ZqRndJYmlaWm5MQUYyU1ExZDRMN2RYczhOUGh0YwpVc2d0c1ZDWWlSbUg2NFZhZUFubnVQZ3RHV3BRYk5lazFIMHl4VzR6anpCQ29LenZ5d2NlZlhlMUFYVjNGVU1LCmFNOTNFSlR2enZPcmFzSHV4MnI3dndLQmdCT3ZUNzVEUU0yK0dNWk5GeDI0V0VPVVNHVFQ2R25zNithY21LaEEKcnJleHErYU1vVktZSytCaTdzdXZoSGNRd1dTc3JJZitxWjZhT09FbGRkeUZMQ3BBaDFjQmhtRmpsUDNLb3c5cApTaDZSRUVpYWVPS3h0aWp2bWtYYXRBdDNuRjVNSC9DMjljNUtsNmxVMnVWajI0UHhCeHhxK2RVMDJkSy85N1pQCnlMU3hBb0dCQUs4YWJlNytoejh0dkZmTW1UeGhzM0lFdW1TalJWRXRZbmhURnM4dlFsUllpUnVSMFp3QnNxYU4KRUZBRVRTUHlUMml3dnZ5alM0ZHA4S0VYRC9CL1NHQ0lhMnJ1VElEL256QkU4TGNxN09SY0MwcmZHTTI5MlN2aQpqZEF1c284dXlEelpQcm9rcmlwVWpteE5NaXpnWldkclhtWk1JNXFiWHArQVg2L1ZJd0pmCi0tLS0tRU5EIFJTQSBQUklWQVRFIEtFWS0tLS0tCg=="' + '[' -z '"LS0tLS1CRUdJTiBSU0EgUFJJVkFURSBLRVktLS0tLQpNSUlFb3dJQkFBS0NBUUVBdkEwZHptbS9LSVhQVGcwY29QZCtQMnBSQi9Wd3NqSW5mVGNQYlpIWkowL21uSkFYCmF6R01mNXppUGhvaHJtMTJYcEFFWGtzMUxEUDBYVTA2ZW5QYnBzRUVYQnZqbllzNG5DNDNTV0g0dHpRTGlmSGIKQzJ5S0JJUFFqeUhDNEk0QnBnZkJnSXo2VnpzWHl3c0tVTExmRkJqUnZoSG5KNktFNTN2aVUvMm85TkVSOW1uYgp2c0Q2VCs4N0Q5ZVdBQnV2YUlxb21HTjZHclkzSE1tREdOWlVVTkE5SU80bUF2WjFHRFdtVW5jN3h0S0V0TUpSCnFYclZLRjd5MVNHNytraFpnNHJIbzFiLzFvcDd5WkFXZ0QwZ2lrbVhUakJ1ZEJWWlhnM3B0emJnbFhpTXpPZkIKVThzSHV0WDhOMkEzbHRUU3VyZm4wT2FUR0FXQmVONktpWWVYQXdJREFRQUJBb0lCQUJhZEdTMStSKytPajh4YQpCVXhTRnN0M2RJRVpnZERiK1NXMG5sVlZ5YUJSbW5EUFh4R1Q1c3BkeWxNZmYxZGpjRzJVc2dISTEzOFdURmQvCjl2MU11MmhGSThvVHpINWdoSkU3ZVlpSWtJa3ZhQWZSbnI2ejhiRHpUUHc1V2g3NDFMZnJqTmh4bTVVcjFRT2sKc3p2YkZycHE5YVgrcnlaMEE3bDJpMVFBVi9ZQWpTQzFINHhEcG8rV05uTE1zdWI2aHJrMnlnUUR1ZDBDMEwyMAoxQS9qb1F4WjRmZS9zVFV5WWVQTllKQXRtRC9LaWxROWRyd2hZOFlIQWJqOG0vS25KOHJSTjM4NE5nWE5wdytsCnk2cGQvOFV5NUhacm5XRC9NL2l1VGpKVkVkWmhIV3VVTVBZcnZCbjdHaVEyWUlmVWlrMjREdTBsNFo2My9rN28KR2FIM2ZQRUNnWUVBdzFBOEk5aitIZEpZYWM1b29vSlI3aXd5emU2VHBGdGJKei85Z2Q4cmYwRVIzbmxWQjB4eQo4b2wvY1NCYjBvTFBETk5iUm5CZHZBUFFFK3RBSWFac1dDaFhiaDNJSEthZ0VDaXRrZFZzZlJ4TnFKbEJ5SzU1CitEVTM3N09xOC9WMFhOMWJXa0t2K3dYQlo5NHdOWVh4QjFsWldXeVg4RUJSUjBYdVdtM01MSHNDZ1lFQTluczYKUi9LbWlxaGdlTTJ3NXRWWlRzWDlnczE4bmZNdFJNc0s1bGFlcjRMMnQ3eE5DN2JCSUxaTTBnbjhOcVR6UVZrcwp2TUhFTWVHbVhUMnRWdXFjU3BNQm8rVGZrVnlPNHgvVmN5ZE5MS29uWit5Q0tRUS9oS0tLbGsxZlNYS0ttL1BJCnJJRU5kbnRaNVFiQ3hvUit4SFA0Y01yK0pubzYzb2pnSDZ0dkRSa0NnWUFCdjljNkR2aC90ZUdhMlFGdzZ5L20KclRpclJZYkhBeFoyN0lqTnYvejR2OUYwSjZ3T1M2My81Q0ZqRndJYmlaWm5MQUYyU1ExZDRMN2RYczhOUGh0YwpVc2d0c1ZDWWlSbUg2NFZhZUFubnVQZ3RHV3BRYk5lazFIMHl4VzR6anpCQ29LenZ5d2NlZlhlMUFYVjNGVU1LCmFNOTNFSlR2enZPcmFzSHV4MnI3dndLQmdCT3ZUNzVEUU0yK0dNWk5GeDI0V0VPVVNHVFQ2R25zNithY21LaEEKcnJleHErYU1vVktZSytCaTdzdXZoSGNRd1dTc3JJZitxWjZhT09FbGRkeUZMQ3BBaDFjQmhtRmpsUDNLb3c5cApTaDZSRUVpYWVPS3h0aWp2bWtYYXRBdDNuRjVNSC9DMjljNUtsNmxVMnVWajI0UHhCeHhxK2RVMDJkSy85N1pQCnlMU3hBb0dCQUs4YWJlNytoejh0dkZmTW1UeGhzM0lFdW1TalJWRXRZbmhURnM4dlFsUllpUnVSMFp3QnNxYU4KRUZBRVRTUHlUMml3dnZ5alM0ZHA4S0VYRC9CL1NHQ0lhMnJ1VElEL256QkU4TGNxN09SY0MwcmZHTTI5MlN2aQpqZEF1c284dXlEelpQcm9rcmlwVWpteE5NaXpnWldkclhtWk1JNXFiWHArQVg2L1ZJd0pmCi0tLS0tRU5EIFJTQSBQUklWQVRFIEtFWS0tLS0tCg=="' ']' + desc 'check if CA issuer created' + set +o xtrace ----------------------------------------------------------------------------------- check if CA issuer created ----------------------------------------------------------------------------------- + compare_kubectl issuer/some-name-psmdb-ca-issuer + local resource=issuer/some-name-psmdb-ca-issuer + local postfix= + local expected_result=/mnt/jenkins/workspace/cloud-psmdb-operator_PR-1567/e2e-tests/tls-issue-cert-manager/compare/issuer_some-name-psmdb-ca-issuer.yml + local new_result=/tmp/tmp.jTdR9s6bFl/issuer_some-name-psmdb-ca-issuer.yml + '[' -n '' -a -f /mnt/jenkins/workspace/cloud-psmdb-operator_PR-1567/e2e-tests/tls-issue-cert-manager/compare/issuer_some-name-psmdb-ca-issuer-oc.yml ']' + kubectl_bin get -o yaml issuer/some-name-psmdb-ca-issuer + yq eval ' del(.metadata.ownerReferences[].apiVersion) | del(.metadata.managedFields) | del(.. | select(has("creationTimestamp")).creationTimestamp) | del(.. | select(has("namespace")).namespace) | del(.. | select(has("uid")).uid) | del(.metadata.resourceVersion) | del(.spec.template.spec.containers[].env[] | select(.name == "NAMESPACE")) | del(.metadata.selfLink) | del(.metadata.annotations."cloud.google.com/neg") | del(.metadata.annotations."kubectl.kubernetes.io/last-applied-configuration") | del(.. | select(has("image")).image) | del(.. | select(has("clusterIP")).clusterIP) | del(.. | select(has("clusterIPs")).clusterIPs) | del(.. | select(has("dataSource")).dataSource) | del(.. | select(has("procMount")).procMount) | del(.. | select(has("storageClassName")).storageClassName) | del(.. | select(has("finalizers")).finalizers) | del(.. | select(has("kubernetes.io/pvc-protection"))."kubernetes.io/pvc-protection") | del(.. | select(has("volumeName")).volumeName) | del(.. | select(has("volume.beta.kubernetes.io/storage-provisioner"))."volume.beta.kubernetes.io/storage-provisioner") | del(.. | select(has("volume.kubernetes.io/storage-provisioner"))."volume.kubernetes.io/storage-provisioner") | del(.spec.volumeMode) | del(.. | select(has("volume.kubernetes.io/selected-node"))."volume.kubernetes.io/selected-node") | del(.. | select(has("percona.com/last-config-hash"))."percona.com/last-config-hash") | del(.. | select(has("percona.com/configuration-hash"))."percona.com/configuration-hash") | del(.. | select(has("percona.com/ssl-hash"))."percona.com/ssl-hash") | del(.. | select(has("percona.com/ssl-internal-hash"))."percona.com/ssl-internal-hash") | del(.spec.volumeClaimTemplates[].spec.volumeMode | select(. == "Filesystem")) | del(.. | select(has("healthCheckNodePort")).healthCheckNodePort) | del(.. | select(has("nodePort")).nodePort) | del(.status) | (.. | select(tag == "!!str")) |= sub("tls-issue-cert-manager-30987", "NAME_SPACE") | del(.spec.volumeClaimTemplates[].apiVersion) | del(.spec.volumeClaimTemplates[].kind) | del(.spec.ipFamilies) | del(.spec.ipFamilyPolicy) | (.. | select(. == "extensions/v1beta1")) = "apps/v1" | (.. | select(. == "batch/v1beta1")) = "batch/v1" ' - ++ mktemp + local LAST_OUT=/tmp/tmp.4fF77I4TaP ++ mktemp + local LAST_ERR=/tmp/tmp.hqsvL7jCcY + local exit_status=0 + local timeout=4 ++ seq 0 2 + for i in '$(seq 0 2)' + set +e + kubectl get -o yaml issuer/some-name-psmdb-ca-issuer + exit_status=0 + set -e + '[' 0 '!=' 0 -a -n 1 ']' + break + cat /tmp/tmp.4fF77I4TaP + cat /tmp/tmp.hqsvL7jCcY + rm /tmp/tmp.4fF77I4TaP /tmp/tmp.hqsvL7jCcY + return 0 + yq -i eval 'del(.spec.persistentVolumeClaimRetentionPolicy)' /tmp/tmp.jTdR9s6bFl/issuer_some-name-psmdb-ca-issuer.yml + version_gt 1.22 ++ echo '1.26 >= 1.22' ++ bc -l + '[' 1 -eq 1 ']' + return 0 + yq -i eval 'del(.spec.internalTrafficPolicy)' /tmp/tmp.jTdR9s6bFl/issuer_some-name-psmdb-ca-issuer.yml + yq -i eval 'del(.spec.allocateLoadBalancerNodePorts)' /tmp/tmp.jTdR9s6bFl/issuer_some-name-psmdb-ca-issuer.yml + [[ /mnt/jenkins/workspace/cloud-psmdb-operator_PR-1567/e2e-tests/tls-issue-cert-manager/compare/issuer_some-name-psmdb-ca-issuer.yml == */cronjob* ]] + diff -u /mnt/jenkins/workspace/cloud-psmdb-operator_PR-1567/e2e-tests/tls-issue-cert-manager/compare/issuer_some-name-psmdb-ca-issuer.yml /tmp/tmp.jTdR9s6bFl/issuer_some-name-psmdb-ca-issuer.yml + desc 'check if issuer created' + set +o xtrace ----------------------------------------------------------------------------------- check if issuer created ----------------------------------------------------------------------------------- + compare_kubectl issuer/some-name-psmdb-issuer + local resource=issuer/some-name-psmdb-issuer + local postfix= + local expected_result=/mnt/jenkins/workspace/cloud-psmdb-operator_PR-1567/e2e-tests/tls-issue-cert-manager/compare/issuer_some-name-psmdb-issuer.yml + local new_result=/tmp/tmp.jTdR9s6bFl/issuer_some-name-psmdb-issuer.yml + '[' -n '' -a -f /mnt/jenkins/workspace/cloud-psmdb-operator_PR-1567/e2e-tests/tls-issue-cert-manager/compare/issuer_some-name-psmdb-issuer-oc.yml ']' + kubectl_bin get -o yaml issuer/some-name-psmdb-issuer + yq eval ' del(.metadata.ownerReferences[].apiVersion) | del(.metadata.managedFields) | del(.. | select(has("creationTimestamp")).creationTimestamp) | del(.. | select(has("namespace")).namespace) | del(.. | select(has("uid")).uid) | del(.metadata.resourceVersion) | del(.spec.template.spec.containers[].env[] | select(.name == "NAMESPACE")) | del(.metadata.selfLink) | del(.metadata.annotations."cloud.google.com/neg") | del(.metadata.annotations."kubectl.kubernetes.io/last-applied-configuration") | del(.. | select(has("image")).image) | del(.. | select(has("clusterIP")).clusterIP) | del(.. | select(has("clusterIPs")).clusterIPs) | del(.. | select(has("dataSource")).dataSource) | del(.. | select(has("procMount")).procMount) | del(.. | select(has("storageClassName")).storageClassName) | del(.. | select(has("finalizers")).finalizers) | del(.. | select(has("kubernetes.io/pvc-protection"))."kubernetes.io/pvc-protection") | del(.. | select(has("volumeName")).volumeName) | del(.. | select(has("volume.beta.kubernetes.io/storage-provisioner"))."volume.beta.kubernetes.io/storage-provisioner") | del(.. | select(has("volume.kubernetes.io/storage-provisioner"))."volume.kubernetes.io/storage-provisioner") | del(.spec.volumeMode) | del(.. | select(has("volume.kubernetes.io/selected-node"))."volume.kubernetes.io/selected-node") | del(.. | select(has("percona.com/last-config-hash"))."percona.com/last-config-hash") | del(.. | select(has("percona.com/configuration-hash"))."percona.com/configuration-hash") | del(.. | select(has("percona.com/ssl-hash"))."percona.com/ssl-hash") | del(.. | select(has("percona.com/ssl-internal-hash"))."percona.com/ssl-internal-hash") | del(.spec.volumeClaimTemplates[].spec.volumeMode | select(. == "Filesystem")) | del(.. | select(has("healthCheckNodePort")).healthCheckNodePort) | del(.. | select(has("nodePort")).nodePort) | del(.status) | (.. | select(tag == "!!str")) |= sub("tls-issue-cert-manager-30987", "NAME_SPACE") | del(.spec.volumeClaimTemplates[].apiVersion) | del(.spec.volumeClaimTemplates[].kind) | del(.spec.ipFamilies) | del(.spec.ipFamilyPolicy) | (.. | select(. == "extensions/v1beta1")) = "apps/v1" | (.. | select(. == "batch/v1beta1")) = "batch/v1" ' - ++ mktemp + local LAST_OUT=/tmp/tmp.lX5atqwTBW ++ mktemp + local LAST_ERR=/tmp/tmp.IiMTdabYes + local exit_status=0 + local timeout=4 ++ seq 0 2 + for i in '$(seq 0 2)' + set +e + kubectl get -o yaml issuer/some-name-psmdb-issuer + exit_status=0 + set -e + '[' 0 '!=' 0 -a -n 1 ']' + break + cat /tmp/tmp.lX5atqwTBW + cat /tmp/tmp.IiMTdabYes + rm /tmp/tmp.lX5atqwTBW /tmp/tmp.IiMTdabYes + return 0 + yq -i eval 'del(.spec.persistentVolumeClaimRetentionPolicy)' /tmp/tmp.jTdR9s6bFl/issuer_some-name-psmdb-issuer.yml + version_gt 1.22 ++ echo '1.26 >= 1.22' ++ bc -l + '[' 1 -eq 1 ']' + return 0 + yq -i eval 'del(.spec.internalTrafficPolicy)' /tmp/tmp.jTdR9s6bFl/issuer_some-name-psmdb-issuer.yml + yq -i eval 'del(.spec.allocateLoadBalancerNodePorts)' /tmp/tmp.jTdR9s6bFl/issuer_some-name-psmdb-issuer.yml + [[ /mnt/jenkins/workspace/cloud-psmdb-operator_PR-1567/e2e-tests/tls-issue-cert-manager/compare/issuer_some-name-psmdb-issuer.yml == */cronjob* ]] + diff -u /mnt/jenkins/workspace/cloud-psmdb-operator_PR-1567/e2e-tests/tls-issue-cert-manager/compare/issuer_some-name-psmdb-issuer.yml /tmp/tmp.jTdR9s6bFl/issuer_some-name-psmdb-issuer.yml + desc 'check if certificate issued' + set +o xtrace ----------------------------------------------------------------------------------- check if certificate issued ----------------------------------------------------------------------------------- + compare_kubectl certificate/some-name-ssl + local resource=certificate/some-name-ssl + local postfix= + local expected_result=/mnt/jenkins/workspace/cloud-psmdb-operator_PR-1567/e2e-tests/tls-issue-cert-manager/compare/certificate_some-name-ssl.yml + local new_result=/tmp/tmp.jTdR9s6bFl/certificate_some-name-ssl.yml + '[' -n '' -a -f /mnt/jenkins/workspace/cloud-psmdb-operator_PR-1567/e2e-tests/tls-issue-cert-manager/compare/certificate_some-name-ssl-oc.yml ']' + kubectl_bin get -o yaml certificate/some-name-ssl + yq eval ' del(.metadata.ownerReferences[].apiVersion) | del(.metadata.managedFields) | del(.. | select(has("creationTimestamp")).creationTimestamp) | del(.. | select(has("namespace")).namespace) | del(.. | select(has("uid")).uid) | del(.metadata.resourceVersion) | del(.spec.template.spec.containers[].env[] | select(.name == "NAMESPACE")) | del(.metadata.selfLink) | del(.metadata.annotations."cloud.google.com/neg") | del(.metadata.annotations."kubectl.kubernetes.io/last-applied-configuration") | del(.. | select(has("image")).image) | del(.. | select(has("clusterIP")).clusterIP) | del(.. | select(has("clusterIPs")).clusterIPs) | del(.. | select(has("dataSource")).dataSource) | del(.. | select(has("procMount")).procMount) | del(.. | select(has("storageClassName")).storageClassName) | del(.. | select(has("finalizers")).finalizers) | del(.. | select(has("kubernetes.io/pvc-protection"))."kubernetes.io/pvc-protection") | del(.. | select(has("volumeName")).volumeName) | del(.. | select(has("volume.beta.kubernetes.io/storage-provisioner"))."volume.beta.kubernetes.io/storage-provisioner") | del(.. | select(has("volume.kubernetes.io/storage-provisioner"))."volume.kubernetes.io/storage-provisioner") | del(.spec.volumeMode) | del(.. | select(has("volume.kubernetes.io/selected-node"))."volume.kubernetes.io/selected-node") | del(.. | select(has("percona.com/last-config-hash"))."percona.com/last-config-hash") | del(.. | select(has("percona.com/configuration-hash"))."percona.com/configuration-hash") | del(.. | select(has("percona.com/ssl-hash"))."percona.com/ssl-hash") | del(.. | select(has("percona.com/ssl-internal-hash"))."percona.com/ssl-internal-hash") | del(.spec.volumeClaimTemplates[].spec.volumeMode | select(. == "Filesystem")) | del(.. | select(has("healthCheckNodePort")).healthCheckNodePort) | del(.. | select(has("nodePort")).nodePort) | del(.status) | (.. | select(tag == "!!str")) |= sub("tls-issue-cert-manager-30987", "NAME_SPACE") | del(.spec.volumeClaimTemplates[].apiVersion) | del(.spec.volumeClaimTemplates[].kind) | del(.spec.ipFamilies) | del(.spec.ipFamilyPolicy) | (.. | select(. == "extensions/v1beta1")) = "apps/v1" | (.. | select(. == "batch/v1beta1")) = "batch/v1" ' - ++ mktemp + local LAST_OUT=/tmp/tmp.Isxv75kMuU ++ mktemp + local LAST_ERR=/tmp/tmp.fU6JNeVpnq + local exit_status=0 + local timeout=4 ++ seq 0 2 + for i in '$(seq 0 2)' + set +e + kubectl get -o yaml certificate/some-name-ssl + exit_status=0 + set -e + '[' 0 '!=' 0 -a -n 1 ']' + break + cat /tmp/tmp.Isxv75kMuU + cat /tmp/tmp.fU6JNeVpnq + rm /tmp/tmp.Isxv75kMuU /tmp/tmp.fU6JNeVpnq + return 0 + yq -i eval 'del(.spec.persistentVolumeClaimRetentionPolicy)' /tmp/tmp.jTdR9s6bFl/certificate_some-name-ssl.yml + version_gt 1.22 ++ echo '1.26 >= 1.22' ++ bc -l + '[' 1 -eq 1 ']' + return 0 + yq -i eval 'del(.spec.internalTrafficPolicy)' /tmp/tmp.jTdR9s6bFl/certificate_some-name-ssl.yml + yq -i eval 'del(.spec.allocateLoadBalancerNodePorts)' /tmp/tmp.jTdR9s6bFl/certificate_some-name-ssl.yml + [[ /mnt/jenkins/workspace/cloud-psmdb-operator_PR-1567/e2e-tests/tls-issue-cert-manager/compare/certificate_some-name-ssl.yml == */cronjob* ]] + diff -u /mnt/jenkins/workspace/cloud-psmdb-operator_PR-1567/e2e-tests/tls-issue-cert-manager/compare/certificate_some-name-ssl.yml /tmp/tmp.jTdR9s6bFl/certificate_some-name-ssl.yml + desc 'check if internal certificate issued' + set +o xtrace ----------------------------------------------------------------------------------- check if internal certificate issued ----------------------------------------------------------------------------------- + compare_kubectl certificate/some-name-ssl-internal + local resource=certificate/some-name-ssl-internal + local postfix= + local expected_result=/mnt/jenkins/workspace/cloud-psmdb-operator_PR-1567/e2e-tests/tls-issue-cert-manager/compare/certificate_some-name-ssl-internal.yml + local new_result=/tmp/tmp.jTdR9s6bFl/certificate_some-name-ssl-internal.yml + '[' -n '' -a -f /mnt/jenkins/workspace/cloud-psmdb-operator_PR-1567/e2e-tests/tls-issue-cert-manager/compare/certificate_some-name-ssl-internal-oc.yml ']' + kubectl_bin get -o yaml certificate/some-name-ssl-internal ++ mktemp + yq eval ' del(.metadata.ownerReferences[].apiVersion) | del(.metadata.managedFields) | del(.. | select(has("creationTimestamp")).creationTimestamp) | del(.. | select(has("namespace")).namespace) | del(.. | select(has("uid")).uid) | del(.metadata.resourceVersion) | del(.spec.template.spec.containers[].env[] | select(.name == "NAMESPACE")) | del(.metadata.selfLink) | del(.metadata.annotations."cloud.google.com/neg") | del(.metadata.annotations."kubectl.kubernetes.io/last-applied-configuration") | del(.. | select(has("image")).image) | del(.. | select(has("clusterIP")).clusterIP) | del(.. | select(has("clusterIPs")).clusterIPs) | del(.. | select(has("dataSource")).dataSource) | del(.. | select(has("procMount")).procMount) | del(.. | select(has("storageClassName")).storageClassName) | del(.. | select(has("finalizers")).finalizers) | del(.. | select(has("kubernetes.io/pvc-protection"))."kubernetes.io/pvc-protection") | del(.. | select(has("volumeName")).volumeName) | del(.. | select(has("volume.beta.kubernetes.io/storage-provisioner"))."volume.beta.kubernetes.io/storage-provisioner") | del(.. | select(has("volume.kubernetes.io/storage-provisioner"))."volume.kubernetes.io/storage-provisioner") | del(.spec.volumeMode) | del(.. | select(has("volume.kubernetes.io/selected-node"))."volume.kubernetes.io/selected-node") | del(.. | select(has("percona.com/last-config-hash"))."percona.com/last-config-hash") | del(.. | select(has("percona.com/configuration-hash"))."percona.com/configuration-hash") | del(.. | select(has("percona.com/ssl-hash"))."percona.com/ssl-hash") | del(.. | select(has("percona.com/ssl-internal-hash"))."percona.com/ssl-internal-hash") | del(.spec.volumeClaimTemplates[].spec.volumeMode | select(. == "Filesystem")) | del(.. | select(has("healthCheckNodePort")).healthCheckNodePort) | del(.. | select(has("nodePort")).nodePort) | del(.status) | (.. | select(tag == "!!str")) |= sub("tls-issue-cert-manager-30987", "NAME_SPACE") | del(.spec.volumeClaimTemplates[].apiVersion) | del(.spec.volumeClaimTemplates[].kind) | del(.spec.ipFamilies) | del(.spec.ipFamilyPolicy) | (.. | select(. == "extensions/v1beta1")) = "apps/v1" | (.. | select(. == "batch/v1beta1")) = "batch/v1" ' - + local LAST_OUT=/tmp/tmp.qOeFpxM3xe ++ mktemp + local LAST_ERR=/tmp/tmp.o9LWpZdZVQ + local exit_status=0 + local timeout=4 ++ seq 0 2 + for i in '$(seq 0 2)' + set +e + kubectl get -o yaml certificate/some-name-ssl-internal + exit_status=0 + set -e + '[' 0 '!=' 0 -a -n 1 ']' + break + cat /tmp/tmp.qOeFpxM3xe + cat /tmp/tmp.o9LWpZdZVQ + rm /tmp/tmp.qOeFpxM3xe /tmp/tmp.o9LWpZdZVQ + return 0 + yq -i eval 'del(.spec.persistentVolumeClaimRetentionPolicy)' /tmp/tmp.jTdR9s6bFl/certificate_some-name-ssl-internal.yml + version_gt 1.22 ++ echo '1.26 >= 1.22' ++ bc -l + '[' 1 -eq 1 ']' + return 0 + yq -i eval 'del(.spec.internalTrafficPolicy)' /tmp/tmp.jTdR9s6bFl/certificate_some-name-ssl-internal.yml + yq -i eval 'del(.spec.allocateLoadBalancerNodePorts)' /tmp/tmp.jTdR9s6bFl/certificate_some-name-ssl-internal.yml + [[ /mnt/jenkins/workspace/cloud-psmdb-operator_PR-1567/e2e-tests/tls-issue-cert-manager/compare/certificate_some-name-ssl-internal.yml == */cronjob* ]] + diff -u /mnt/jenkins/workspace/cloud-psmdb-operator_PR-1567/e2e-tests/tls-issue-cert-manager/compare/certificate_some-name-ssl-internal.yml /tmp/tmp.jTdR9s6bFl/certificate_some-name-ssl-internal.yml + renew_certificate some-name-ssl + certificate=some-name-ssl + wait_certificate some-name-ssl + certificate=some-name-ssl + for i in '{1..10}' + kubectl wait --for=condition=Ready certificate/some-name-ssl --timeout=60s certificate.cert-manager.io/some-name-ssl condition met + sleep 1 + for i in '{1..10}' + kubectl wait --for=condition=Ready certificate/some-name-ssl --timeout=60s certificate.cert-manager.io/some-name-ssl condition met + sleep 1 + for i in '{1..10}' + kubectl wait --for=condition=Ready certificate/some-name-ssl --timeout=60s certificate.cert-manager.io/some-name-ssl condition met + sleep 1 + for i in '{1..10}' + kubectl wait --for=condition=Ready certificate/some-name-ssl --timeout=60s certificate.cert-manager.io/some-name-ssl condition met + sleep 1 + for i in '{1..10}' + kubectl wait --for=condition=Ready certificate/some-name-ssl --timeout=60s certificate.cert-manager.io/some-name-ssl condition met + sleep 1 + for i in '{1..10}' + kubectl wait --for=condition=Ready certificate/some-name-ssl --timeout=60s certificate.cert-manager.io/some-name-ssl condition met + sleep 1 + for i in '{1..10}' + kubectl wait --for=condition=Ready certificate/some-name-ssl --timeout=60s certificate.cert-manager.io/some-name-ssl condition met + sleep 1 + for i in '{1..10}' + kubectl wait --for=condition=Ready certificate/some-name-ssl --timeout=60s certificate.cert-manager.io/some-name-ssl condition met + sleep 1 + for i in '{1..10}' + kubectl wait --for=condition=Ready certificate/some-name-ssl --timeout=60s certificate.cert-manager.io/some-name-ssl condition met + sleep 1 + for i in '{1..10}' + kubectl wait --for=condition=Ready certificate/some-name-ssl --timeout=60s certificate.cert-manager.io/some-name-ssl condition met + sleep 1 + desc 'renew some-name-ssl' + set +o xtrace ----------------------------------------------------------------------------------- renew some-name-ssl ----------------------------------------------------------------------------------- + local pod_name ++ kubectl_bin get pods --selector=name=cmctl -o 'jsonpath={.items[].metadata.name}' +++ mktemp ++ local LAST_OUT=/tmp/tmp.vVq5l2tYfQ +++ mktemp ++ local LAST_ERR=/tmp/tmp.C5K099bjik ++ local exit_status=0 ++ local timeout=4 +++ seq 0 2 ++ for i in '$(seq 0 2)' ++ set +e ++ kubectl get pods --selector=name=cmctl -o 'jsonpath={.items[].metadata.name}' ++ exit_status=0 ++ set -e ++ '[' 0 '!=' 0 -a -n 1 ']' ++ break ++ cat /tmp/tmp.vVq5l2tYfQ ++ cat /tmp/tmp.C5K099bjik ++ rm /tmp/tmp.vVq5l2tYfQ /tmp/tmp.C5K099bjik ++ return 0 + pod_name=cmctl-69659bcd68-6vrn6 + local revision ++ kubectl_bin get certificate some-name-ssl -o 'jsonpath={.status.revision}' +++ mktemp ++ local LAST_OUT=/tmp/tmp.lN4izIvaJE +++ mktemp ++ local LAST_ERR=/tmp/tmp.nXsGJqnJbN ++ local exit_status=0 ++ local timeout=4 +++ seq 0 2 ++ for i in '$(seq 0 2)' ++ set +e ++ kubectl get certificate some-name-ssl -o 'jsonpath={.status.revision}' ++ exit_status=0 ++ set -e ++ '[' 0 '!=' 0 -a -n 1 ']' ++ break ++ cat /tmp/tmp.lN4izIvaJE ++ cat /tmp/tmp.nXsGJqnJbN ++ rm /tmp/tmp.lN4izIvaJE /tmp/tmp.nXsGJqnJbN ++ return 0 + revision=1 + kubectl_bin exec cmctl-69659bcd68-6vrn6 -- /tmp/cmctl renew some-name-ssl ++ mktemp + local LAST_OUT=/tmp/tmp.EnM5iaPhJH ++ mktemp + local LAST_ERR=/tmp/tmp.2AYwdnYOqP + local exit_status=0 + local timeout=4 ++ seq 0 2 + for i in '$(seq 0 2)' + set +e + kubectl exec cmctl-69659bcd68-6vrn6 -- /tmp/cmctl renew some-name-ssl + exit_status=0 + set -e + '[' 0 '!=' 0 -a -n 1 ']' + break + cat /tmp/tmp.EnM5iaPhJH Manually triggered issuance of Certificate tls-issue-cert-manager-30987/some-name-ssl + cat /tmp/tmp.2AYwdnYOqP + rm /tmp/tmp.EnM5iaPhJH /tmp/tmp.2AYwdnYOqP + return 0 + for i in '{1..10}' + local new_revision ++ kubectl_bin get certificate some-name-ssl -o 'jsonpath={.status.revision}' +++ mktemp ++ local LAST_OUT=/tmp/tmp.mEhNXSdDQ7 +++ mktemp ++ local LAST_ERR=/tmp/tmp.27WH7jHmep ++ local exit_status=0 ++ local timeout=4 +++ seq 0 2 ++ for i in '$(seq 0 2)' ++ set +e ++ kubectl get certificate some-name-ssl -o 'jsonpath={.status.revision}' ++ exit_status=0 ++ set -e ++ '[' 0 '!=' 0 -a -n 1 ']' ++ break ++ cat /tmp/tmp.mEhNXSdDQ7 ++ cat /tmp/tmp.27WH7jHmep ++ rm /tmp/tmp.mEhNXSdDQ7 /tmp/tmp.27WH7jHmep ++ return 0 + new_revision=1 + '[' 2 == 1 ']' + sleep 1 + for i in '{1..10}' + local new_revision ++ kubectl_bin get certificate some-name-ssl -o 'jsonpath={.status.revision}' +++ mktemp ++ local LAST_OUT=/tmp/tmp.ep8qNOh2bc +++ mktemp ++ local LAST_ERR=/tmp/tmp.UNf9wZxFBD ++ local exit_status=0 ++ local timeout=4 +++ seq 0 2 ++ for i in '$(seq 0 2)' ++ set +e ++ kubectl get certificate some-name-ssl -o 'jsonpath={.status.revision}' ++ exit_status=0 ++ set -e ++ '[' 0 '!=' 0 -a -n 1 ']' ++ break ++ cat /tmp/tmp.ep8qNOh2bc ++ cat /tmp/tmp.UNf9wZxFBD ++ rm /tmp/tmp.ep8qNOh2bc /tmp/tmp.UNf9wZxFBD ++ return 0 + new_revision=2 + '[' 2 == 2 ']' + break + sleep 10 + wait_for_running some-name-rs0 3 + local name=some-name-rs0 + let last_pod=2 + local check_cluster_readyness=true + set_debug + [[ 1 == 1 ]] + set -o xtrace + local rs_name=rs0 + local cluster_name=some-name ++ seq 0 2 + for i in '$(seq 0 $last_pod)' + [[ 0 -eq 2 ]] + wait_pod some-name-rs0-0 + local pod=some-name-rs0-0 + set +o xtrace waiting for pod/some-name-rs0-0 to be ready.OK + for i in '$(seq 0 $last_pod)' + [[ 1 -eq 2 ]] + wait_pod some-name-rs0-1 + local pod=some-name-rs0-1 + set +o xtrace waiting for pod/some-name-rs0-1 to be ready.OK + for i in '$(seq 0 $last_pod)' + [[ 2 -eq 2 ]] ++ kubectl_bin get psmdb some-name -o 'jsonpath={.spec.replsets[?(@.name=="rs0")].arbiter.enabled}' +++ mktemp ++ local LAST_OUT=/tmp/tmp.UQx6fvPgZs +++ mktemp ++ local LAST_ERR=/tmp/tmp.5B7b2jMmyN ++ local exit_status=0 ++ local timeout=4 +++ seq 0 2 ++ for i in '$(seq 0 2)' ++ set +e ++ kubectl get psmdb some-name -o 'jsonpath={.spec.replsets[?(@.name=="rs0")].arbiter.enabled}' ++ exit_status=0 ++ set -e ++ '[' 0 '!=' 0 -a -n 1 ']' ++ break ++ cat /tmp/tmp.UQx6fvPgZs ++ cat /tmp/tmp.5B7b2jMmyN ++ rm /tmp/tmp.UQx6fvPgZs /tmp/tmp.5B7b2jMmyN ++ return 0 + [[ '' == \t\r\u\e ]] + wait_pod some-name-rs0-2 + local pod=some-name-rs0-2 + set +o xtrace waiting for pod/some-name-rs0-2 to be ready.OK ++ kubectl_bin get psmdb some-name -o 'jsonpath={.spec.replsets[?(@.name=="rs0")].non_voting.enabled}' +++ mktemp ++ local LAST_OUT=/tmp/tmp.BZASfHAF06 +++ mktemp ++ local LAST_ERR=/tmp/tmp.JMMrmvRmvv ++ local exit_status=0 ++ local timeout=4 +++ seq 0 2 ++ for i in '$(seq 0 2)' ++ set +e ++ kubectl get psmdb some-name -o 'jsonpath={.spec.replsets[?(@.name=="rs0")].non_voting.enabled}' ++ exit_status=0 ++ set -e ++ '[' 0 '!=' 0 -a -n 1 ']' ++ break ++ cat /tmp/tmp.BZASfHAF06 ++ cat /tmp/tmp.JMMrmvRmvv ++ rm /tmp/tmp.BZASfHAF06 /tmp/tmp.JMMrmvRmvv ++ return 0 + [[ '' == \t\r\u\e ]] + sleep 10 + [[ true == \t\r\u\e ]] + set +x Waiting for cluster readyness.......................................................................................................................................................................................... + wait_for_running some-name-cfg 3 false + local name=some-name-cfg + let last_pod=2 + local check_cluster_readyness=false + set_debug + [[ 1 == 1 ]] + set -o xtrace + local rs_name=cfg + local cluster_name=some-name ++ seq 0 2 + for i in '$(seq 0 $last_pod)' + [[ 0 -eq 2 ]] + wait_pod some-name-cfg-0 + local pod=some-name-cfg-0 + set +o xtrace waiting for pod/some-name-cfg-0 to be ready.OK + for i in '$(seq 0 $last_pod)' + [[ 1 -eq 2 ]] + wait_pod some-name-cfg-1 + local pod=some-name-cfg-1 + set +o xtrace waiting for pod/some-name-cfg-1 to be ready.OK + for i in '$(seq 0 $last_pod)' + [[ 2 -eq 2 ]] ++ kubectl_bin get psmdb some-name -o 'jsonpath={.spec.replsets[?(@.name=="cfg")].arbiter.enabled}' +++ mktemp ++ local LAST_OUT=/tmp/tmp.5t3IJEQgly +++ mktemp ++ local LAST_ERR=/tmp/tmp.1K3gwabmSh ++ local exit_status=0 ++ local timeout=4 +++ seq 0 2 ++ for i in '$(seq 0 2)' ++ set +e ++ kubectl get psmdb some-name -o 'jsonpath={.spec.replsets[?(@.name=="cfg")].arbiter.enabled}' ++ exit_status=0 ++ set -e ++ '[' 0 '!=' 0 -a -n 1 ']' ++ break ++ cat /tmp/tmp.5t3IJEQgly ++ cat /tmp/tmp.1K3gwabmSh ++ rm /tmp/tmp.5t3IJEQgly /tmp/tmp.1K3gwabmSh ++ return 0 + [[ '' == \t\r\u\e ]] + wait_pod some-name-cfg-2 + local pod=some-name-cfg-2 + set +o xtrace waiting for pod/some-name-cfg-2 to be ready.OK ++ kubectl_bin get psmdb some-name -o 'jsonpath={.spec.replsets[?(@.name=="cfg")].non_voting.enabled}' +++ mktemp ++ local LAST_OUT=/tmp/tmp.8MEMEZRnQQ +++ mktemp ++ local LAST_ERR=/tmp/tmp.SnFDoqFN8M ++ local exit_status=0 ++ local timeout=4 +++ seq 0 2 ++ for i in '$(seq 0 2)' ++ set +e ++ kubectl get psmdb some-name -o 'jsonpath={.spec.replsets[?(@.name=="cfg")].non_voting.enabled}' ++ exit_status=0 ++ set -e ++ '[' 0 '!=' 0 -a -n 1 ']' ++ break ++ cat /tmp/tmp.8MEMEZRnQQ ++ cat /tmp/tmp.SnFDoqFN8M ++ rm /tmp/tmp.8MEMEZRnQQ /tmp/tmp.SnFDoqFN8M ++ return 0 + [[ '' == \t\r\u\e ]] + sleep 10 + [[ false == \t\r\u\e ]] + wait_for_running some-name-mongos 3 + local name=some-name-mongos + let last_pod=2 + local check_cluster_readyness=true + set_debug + [[ 1 == 1 ]] + set -o xtrace + local rs_name=mongos + local cluster_name=some-name ++ seq 0 2 + for i in '$(seq 0 $last_pod)' + [[ 0 -eq 2 ]] + wait_pod some-name-mongos-0 + local pod=some-name-mongos-0 + set +o xtrace waiting for pod/some-name-mongos-0 to be ready.OK + for i in '$(seq 0 $last_pod)' + [[ 1 -eq 2 ]] + wait_pod some-name-mongos-1 + local pod=some-name-mongos-1 + set +o xtrace waiting for pod/some-name-mongos-1 to be ready.OK + for i in '$(seq 0 $last_pod)' + [[ 2 -eq 2 ]] ++ kubectl_bin get psmdb some-name -o 'jsonpath={.spec.replsets[?(@.name=="mongos")].arbiter.enabled}' +++ mktemp ++ local LAST_OUT=/tmp/tmp.lCQpBgGzRf +++ mktemp ++ local LAST_ERR=/tmp/tmp.VkHkGIXbp4 ++ local exit_status=0 ++ local timeout=4 +++ seq 0 2 ++ for i in '$(seq 0 2)' ++ set +e ++ kubectl get psmdb some-name -o 'jsonpath={.spec.replsets[?(@.name=="mongos")].arbiter.enabled}' ++ exit_status=0 ++ set -e ++ '[' 0 '!=' 0 -a -n 1 ']' ++ break ++ cat /tmp/tmp.lCQpBgGzRf ++ cat /tmp/tmp.VkHkGIXbp4 ++ rm /tmp/tmp.lCQpBgGzRf /tmp/tmp.VkHkGIXbp4 ++ return 0 + [[ '' == \t\r\u\e ]] + wait_pod some-name-mongos-2 + local pod=some-name-mongos-2 + set +o xtrace waiting for pod/some-name-mongos-2 to be ready.OK ++ kubectl_bin get psmdb some-name -o 'jsonpath={.spec.replsets[?(@.name=="mongos")].non_voting.enabled}' +++ mktemp ++ local LAST_OUT=/tmp/tmp.egKdH7Z8CR +++ mktemp ++ local LAST_ERR=/tmp/tmp.G76nxIrzzo ++ local exit_status=0 ++ local timeout=4 +++ seq 0 2 ++ for i in '$(seq 0 2)' ++ set +e ++ kubectl get psmdb some-name -o 'jsonpath={.spec.replsets[?(@.name=="mongos")].non_voting.enabled}' ++ exit_status=0 ++ set -e ++ '[' 0 '!=' 0 -a -n 1 ']' ++ break ++ cat /tmp/tmp.egKdH7Z8CR ++ cat /tmp/tmp.G76nxIrzzo ++ rm /tmp/tmp.egKdH7Z8CR /tmp/tmp.G76nxIrzzo ++ return 0 + [[ '' == \t\r\u\e ]] + sleep 10 + [[ true == \t\r\u\e ]] + set +x Waiting for cluster readyness + renew_certificate some-name-ssl-internal + certificate=some-name-ssl-internal + wait_certificate some-name-ssl-internal + certificate=some-name-ssl-internal + for i in '{1..10}' + kubectl wait --for=condition=Ready certificate/some-name-ssl-internal --timeout=60s certificate.cert-manager.io/some-name-ssl-internal condition met + sleep 1 + for i in '{1..10}' + kubectl wait --for=condition=Ready certificate/some-name-ssl-internal --timeout=60s certificate.cert-manager.io/some-name-ssl-internal condition met + sleep 1 + for i in '{1..10}' + kubectl wait --for=condition=Ready certificate/some-name-ssl-internal --timeout=60s certificate.cert-manager.io/some-name-ssl-internal condition met + sleep 1 + for i in '{1..10}' + kubectl wait --for=condition=Ready certificate/some-name-ssl-internal --timeout=60s certificate.cert-manager.io/some-name-ssl-internal condition met + sleep 1 + for i in '{1..10}' + kubectl wait --for=condition=Ready certificate/some-name-ssl-internal --timeout=60s certificate.cert-manager.io/some-name-ssl-internal condition met + sleep 1 + for i in '{1..10}' + kubectl wait --for=condition=Ready certificate/some-name-ssl-internal --timeout=60s certificate.cert-manager.io/some-name-ssl-internal condition met + sleep 1 + for i in '{1..10}' + kubectl wait --for=condition=Ready certificate/some-name-ssl-internal --timeout=60s certificate.cert-manager.io/some-name-ssl-internal condition met + sleep 1 + for i in '{1..10}' + kubectl wait --for=condition=Ready certificate/some-name-ssl-internal --timeout=60s certificate.cert-manager.io/some-name-ssl-internal condition met + sleep 1 + for i in '{1..10}' + kubectl wait --for=condition=Ready certificate/some-name-ssl-internal --timeout=60s certificate.cert-manager.io/some-name-ssl-internal condition met + sleep 1 + for i in '{1..10}' + kubectl wait --for=condition=Ready certificate/some-name-ssl-internal --timeout=60s certificate.cert-manager.io/some-name-ssl-internal condition met + sleep 1 + desc 'renew some-name-ssl-internal' + set +o xtrace ----------------------------------------------------------------------------------- renew some-name-ssl-internal ----------------------------------------------------------------------------------- + local pod_name ++ kubectl_bin get pods --selector=name=cmctl -o 'jsonpath={.items[].metadata.name}' +++ mktemp ++ local LAST_OUT=/tmp/tmp.IfYQSi2knp +++ mktemp ++ local LAST_ERR=/tmp/tmp.RBib8xZGqd ++ local exit_status=0 ++ local timeout=4 +++ seq 0 2 ++ for i in '$(seq 0 2)' ++ set +e ++ kubectl get pods --selector=name=cmctl -o 'jsonpath={.items[].metadata.name}' ++ exit_status=0 ++ set -e ++ '[' 0 '!=' 0 -a -n 1 ']' ++ break ++ cat /tmp/tmp.IfYQSi2knp ++ cat /tmp/tmp.RBib8xZGqd ++ rm /tmp/tmp.IfYQSi2knp /tmp/tmp.RBib8xZGqd ++ return 0 + pod_name=cmctl-69659bcd68-6vrn6 + local revision ++ kubectl_bin get certificate some-name-ssl-internal -o 'jsonpath={.status.revision}' +++ mktemp ++ local LAST_OUT=/tmp/tmp.IemHCDTXtw +++ mktemp ++ local LAST_ERR=/tmp/tmp.6WWJ59z3ac ++ local exit_status=0 ++ local timeout=4 +++ seq 0 2 ++ for i in '$(seq 0 2)' ++ set +e ++ kubectl get certificate some-name-ssl-internal -o 'jsonpath={.status.revision}' ++ exit_status=0 ++ set -e ++ '[' 0 '!=' 0 -a -n 1 ']' ++ break ++ cat /tmp/tmp.IemHCDTXtw ++ cat /tmp/tmp.6WWJ59z3ac ++ rm /tmp/tmp.IemHCDTXtw /tmp/tmp.6WWJ59z3ac ++ return 0 + revision=1 + kubectl_bin exec cmctl-69659bcd68-6vrn6 -- /tmp/cmctl renew some-name-ssl-internal ++ mktemp + local LAST_OUT=/tmp/tmp.2Yi7hakqMw ++ mktemp + local LAST_ERR=/tmp/tmp.HJ7U9Ppii9 + local exit_status=0 + local timeout=4 ++ seq 0 2 + for i in '$(seq 0 2)' + set +e + kubectl exec cmctl-69659bcd68-6vrn6 -- /tmp/cmctl renew some-name-ssl-internal + exit_status=0 + set -e + '[' 0 '!=' 0 -a -n 1 ']' + break + cat /tmp/tmp.2Yi7hakqMw Manually triggered issuance of Certificate tls-issue-cert-manager-30987/some-name-ssl-internal + cat /tmp/tmp.HJ7U9Ppii9 + rm /tmp/tmp.2Yi7hakqMw /tmp/tmp.HJ7U9Ppii9 + return 0 + for i in '{1..10}' + local new_revision ++ kubectl_bin get certificate some-name-ssl-internal -o 'jsonpath={.status.revision}' +++ mktemp ++ local LAST_OUT=/tmp/tmp.hXGHYfaOzN +++ mktemp ++ local LAST_ERR=/tmp/tmp.v4YNldmQD9 ++ local exit_status=0 ++ local timeout=4 +++ seq 0 2 ++ for i in '$(seq 0 2)' ++ set +e ++ kubectl get certificate some-name-ssl-internal -o 'jsonpath={.status.revision}' ++ exit_status=0 ++ set -e ++ '[' 0 '!=' 0 -a -n 1 ']' ++ break ++ cat /tmp/tmp.hXGHYfaOzN ++ cat /tmp/tmp.v4YNldmQD9 ++ rm /tmp/tmp.hXGHYfaOzN /tmp/tmp.v4YNldmQD9 ++ return 0 + new_revision=2 + '[' 2 == 2 ']' + break + sleep 10 + wait_for_running some-name-rs0 3 + local name=some-name-rs0 + let last_pod=2 + local check_cluster_readyness=true + set_debug + [[ 1 == 1 ]] + set -o xtrace + local rs_name=rs0 + local cluster_name=some-name ++ seq 0 2 + for i in '$(seq 0 $last_pod)' + [[ 0 -eq 2 ]] + wait_pod some-name-rs0-0 + local pod=some-name-rs0-0 + set +o xtrace waiting for pod/some-name-rs0-0 to be ready.OK + for i in '$(seq 0 $last_pod)' + [[ 1 -eq 2 ]] + wait_pod some-name-rs0-1 + local pod=some-name-rs0-1 + set +o xtrace waiting for pod/some-name-rs0-1 to be ready.OK + for i in '$(seq 0 $last_pod)' + [[ 2 -eq 2 ]] ++ kubectl_bin get psmdb some-name -o 'jsonpath={.spec.replsets[?(@.name=="rs0")].arbiter.enabled}' +++ mktemp ++ local LAST_OUT=/tmp/tmp.N99HLo3bWg +++ mktemp ++ local LAST_ERR=/tmp/tmp.KiUbAM2u2F ++ local exit_status=0 ++ local timeout=4 +++ seq 0 2 ++ for i in '$(seq 0 2)' ++ set +e ++ kubectl get psmdb some-name -o 'jsonpath={.spec.replsets[?(@.name=="rs0")].arbiter.enabled}' ++ exit_status=0 ++ set -e ++ '[' 0 '!=' 0 -a -n 1 ']' ++ break ++ cat /tmp/tmp.N99HLo3bWg ++ cat /tmp/tmp.KiUbAM2u2F ++ rm /tmp/tmp.N99HLo3bWg /tmp/tmp.KiUbAM2u2F ++ return 0 + [[ '' == \t\r\u\e ]] + wait_pod some-name-rs0-2 + local pod=some-name-rs0-2 + set +o xtrace waiting for pod/some-name-rs0-2 to be ready.OK ++ kubectl_bin get psmdb some-name -o 'jsonpath={.spec.replsets[?(@.name=="rs0")].non_voting.enabled}' +++ mktemp ++ local LAST_OUT=/tmp/tmp.gpGWEVOj6N +++ mktemp ++ local LAST_ERR=/tmp/tmp.syeb6m0q4y ++ local exit_status=0 ++ local timeout=4 +++ seq 0 2 ++ for i in '$(seq 0 2)' ++ set +e ++ kubectl get psmdb some-name -o 'jsonpath={.spec.replsets[?(@.name=="rs0")].non_voting.enabled}' ++ exit_status=0 ++ set -e ++ '[' 0 '!=' 0 -a -n 1 ']' ++ break ++ cat /tmp/tmp.gpGWEVOj6N ++ cat /tmp/tmp.syeb6m0q4y ++ rm /tmp/tmp.gpGWEVOj6N /tmp/tmp.syeb6m0q4y ++ return 0 + [[ '' == \t\r\u\e ]] + sleep 10 + [[ true == \t\r\u\e ]] + set +x Waiting for cluster readyness................................................................................................................................................................................................................ + wait_for_running some-name-cfg 3 false + local name=some-name-cfg + let last_pod=2 + local check_cluster_readyness=false + set_debug + [[ 1 == 1 ]] + set -o xtrace + local rs_name=cfg + local cluster_name=some-name ++ seq 0 2 + for i in '$(seq 0 $last_pod)' + [[ 0 -eq 2 ]] + wait_pod some-name-cfg-0 + local pod=some-name-cfg-0 + set +o xtrace waiting for pod/some-name-cfg-0 to be ready.OK + for i in '$(seq 0 $last_pod)' + [[ 1 -eq 2 ]] + wait_pod some-name-cfg-1 + local pod=some-name-cfg-1 + set +o xtrace waiting for pod/some-name-cfg-1 to be ready.OK + for i in '$(seq 0 $last_pod)' + [[ 2 -eq 2 ]] ++ kubectl_bin get psmdb some-name -o 'jsonpath={.spec.replsets[?(@.name=="cfg")].arbiter.enabled}' +++ mktemp ++ local LAST_OUT=/tmp/tmp.6YZggJmy1Z +++ mktemp ++ local LAST_ERR=/tmp/tmp.NGJxsJ7KZA ++ local exit_status=0 ++ local timeout=4 +++ seq 0 2 ++ for i in '$(seq 0 2)' ++ set +e ++ kubectl get psmdb some-name -o 'jsonpath={.spec.replsets[?(@.name=="cfg")].arbiter.enabled}' ++ exit_status=0 ++ set -e ++ '[' 0 '!=' 0 -a -n 1 ']' ++ break ++ cat /tmp/tmp.6YZggJmy1Z ++ cat /tmp/tmp.NGJxsJ7KZA ++ rm /tmp/tmp.6YZggJmy1Z /tmp/tmp.NGJxsJ7KZA ++ return 0 + [[ '' == \t\r\u\e ]] + wait_pod some-name-cfg-2 + local pod=some-name-cfg-2 + set +o xtrace waiting for pod/some-name-cfg-2 to be ready.OK ++ kubectl_bin get psmdb some-name -o 'jsonpath={.spec.replsets[?(@.name=="cfg")].non_voting.enabled}' +++ mktemp ++ local LAST_OUT=/tmp/tmp.pEcjrzcRfr +++ mktemp ++ local LAST_ERR=/tmp/tmp.jCCF0wg4Nl ++ local exit_status=0 ++ local timeout=4 +++ seq 0 2 ++ for i in '$(seq 0 2)' ++ set +e ++ kubectl get psmdb some-name -o 'jsonpath={.spec.replsets[?(@.name=="cfg")].non_voting.enabled}' ++ exit_status=0 ++ set -e ++ '[' 0 '!=' 0 -a -n 1 ']' ++ break ++ cat /tmp/tmp.pEcjrzcRfr ++ cat /tmp/tmp.jCCF0wg4Nl ++ rm /tmp/tmp.pEcjrzcRfr /tmp/tmp.jCCF0wg4Nl ++ return 0 + [[ '' == \t\r\u\e ]] + sleep 10 + [[ false == \t\r\u\e ]] + wait_for_running some-name-mongos 3 + local name=some-name-mongos + let last_pod=2 + local check_cluster_readyness=true + set_debug + [[ 1 == 1 ]] + set -o xtrace + local rs_name=mongos + local cluster_name=some-name ++ seq 0 2 + for i in '$(seq 0 $last_pod)' + [[ 0 -eq 2 ]] + wait_pod some-name-mongos-0 + local pod=some-name-mongos-0 + set +o xtrace waiting for pod/some-name-mongos-0 to be ready.OK + for i in '$(seq 0 $last_pod)' + [[ 1 -eq 2 ]] + wait_pod some-name-mongos-1 + local pod=some-name-mongos-1 + set +o xtrace waiting for pod/some-name-mongos-1 to be ready.OK + for i in '$(seq 0 $last_pod)' + [[ 2 -eq 2 ]] ++ kubectl_bin get psmdb some-name -o 'jsonpath={.spec.replsets[?(@.name=="mongos")].arbiter.enabled}' +++ mktemp ++ local LAST_OUT=/tmp/tmp.wLtt1nejSE +++ mktemp ++ local LAST_ERR=/tmp/tmp.pG2krqIopX ++ local exit_status=0 ++ local timeout=4 +++ seq 0 2 ++ for i in '$(seq 0 2)' ++ set +e ++ kubectl get psmdb some-name -o 'jsonpath={.spec.replsets[?(@.name=="mongos")].arbiter.enabled}' ++ exit_status=0 ++ set -e ++ '[' 0 '!=' 0 -a -n 1 ']' ++ break ++ cat /tmp/tmp.wLtt1nejSE ++ cat /tmp/tmp.pG2krqIopX ++ rm /tmp/tmp.wLtt1nejSE /tmp/tmp.pG2krqIopX ++ return 0 + [[ '' == \t\r\u\e ]] + wait_pod some-name-mongos-2 + local pod=some-name-mongos-2 + set +o xtrace waiting for pod/some-name-mongos-2 to be ready.OK ++ kubectl_bin get psmdb some-name -o 'jsonpath={.spec.replsets[?(@.name=="mongos")].non_voting.enabled}' +++ mktemp ++ local LAST_OUT=/tmp/tmp.MhUAdFE5zQ +++ mktemp ++ local LAST_ERR=/tmp/tmp.Y2YCrDfKrB ++ local exit_status=0 ++ local timeout=4 +++ seq 0 2 ++ for i in '$(seq 0 2)' ++ set +e ++ kubectl get psmdb some-name -o 'jsonpath={.spec.replsets[?(@.name=="mongos")].non_voting.enabled}' ++ exit_status=0 ++ set -e ++ '[' 0 '!=' 0 -a -n 1 ']' ++ break ++ cat /tmp/tmp.MhUAdFE5zQ ++ cat /tmp/tmp.Y2YCrDfKrB ++ rm /tmp/tmp.MhUAdFE5zQ /tmp/tmp.Y2YCrDfKrB ++ return 0 + [[ '' == \t\r\u\e ]] + sleep 10 + [[ true == \t\r\u\e ]] + set +x Waiting for cluster readyness + desc 'check if CA issuer created' + set +o xtrace ----------------------------------------------------------------------------------- check if CA issuer created ----------------------------------------------------------------------------------- + compare_kubectl issuer/some-name-psmdb-ca-issuer + local resource=issuer/some-name-psmdb-ca-issuer + local postfix= + local expected_result=/mnt/jenkins/workspace/cloud-psmdb-operator_PR-1567/e2e-tests/tls-issue-cert-manager/compare/issuer_some-name-psmdb-ca-issuer.yml + local new_result=/tmp/tmp.jTdR9s6bFl/issuer_some-name-psmdb-ca-issuer.yml + '[' -n '' -a -f /mnt/jenkins/workspace/cloud-psmdb-operator_PR-1567/e2e-tests/tls-issue-cert-manager/compare/issuer_some-name-psmdb-ca-issuer-oc.yml ']' + kubectl_bin get -o yaml issuer/some-name-psmdb-ca-issuer ++ mktemp + yq eval ' del(.metadata.ownerReferences[].apiVersion) | del(.metadata.managedFields) | del(.. | select(has("creationTimestamp")).creationTimestamp) | del(.. | select(has("namespace")).namespace) | del(.. | select(has("uid")).uid) | del(.metadata.resourceVersion) | del(.spec.template.spec.containers[].env[] | select(.name == "NAMESPACE")) | del(.metadata.selfLink) | del(.metadata.annotations."cloud.google.com/neg") | del(.metadata.annotations."kubectl.kubernetes.io/last-applied-configuration") | del(.. | select(has("image")).image) | del(.. | select(has("clusterIP")).clusterIP) | del(.. | select(has("clusterIPs")).clusterIPs) | del(.. | select(has("dataSource")).dataSource) | del(.. | select(has("procMount")).procMount) | del(.. | select(has("storageClassName")).storageClassName) | del(.. | select(has("finalizers")).finalizers) | del(.. | select(has("kubernetes.io/pvc-protection"))."kubernetes.io/pvc-protection") | del(.. | select(has("volumeName")).volumeName) | del(.. | select(has("volume.beta.kubernetes.io/storage-provisioner"))."volume.beta.kubernetes.io/storage-provisioner") | del(.. | select(has("volume.kubernetes.io/storage-provisioner"))."volume.kubernetes.io/storage-provisioner") | del(.spec.volumeMode) | del(.. | select(has("volume.kubernetes.io/selected-node"))."volume.kubernetes.io/selected-node") | del(.. | select(has("percona.com/last-config-hash"))."percona.com/last-config-hash") | del(.. | select(has("percona.com/configuration-hash"))."percona.com/configuration-hash") | del(.. | select(has("percona.com/ssl-hash"))."percona.com/ssl-hash") | del(.. | select(has("percona.com/ssl-internal-hash"))."percona.com/ssl-internal-hash") | del(.spec.volumeClaimTemplates[].spec.volumeMode | select(. == "Filesystem")) | del(.. | select(has("healthCheckNodePort")).healthCheckNodePort) | del(.. | select(has("nodePort")).nodePort) | del(.status) | (.. | select(tag == "!!str")) |= sub("tls-issue-cert-manager-30987", "NAME_SPACE") | del(.spec.volumeClaimTemplates[].apiVersion) | del(.spec.volumeClaimTemplates[].kind) | del(.spec.ipFamilies) | del(.spec.ipFamilyPolicy) | (.. | select(. == "extensions/v1beta1")) = "apps/v1" | (.. | select(. == "batch/v1beta1")) = "batch/v1" ' - + local LAST_OUT=/tmp/tmp.Um2shiMEuX ++ mktemp + local LAST_ERR=/tmp/tmp.LSFwqKMnM4 + local exit_status=0 + local timeout=4 ++ seq 0 2 + for i in '$(seq 0 2)' + set +e + kubectl get -o yaml issuer/some-name-psmdb-ca-issuer + exit_status=0 + set -e + '[' 0 '!=' 0 -a -n 1 ']' + break + cat /tmp/tmp.Um2shiMEuX + cat /tmp/tmp.LSFwqKMnM4 + rm /tmp/tmp.Um2shiMEuX /tmp/tmp.LSFwqKMnM4 + return 0 + yq -i eval 'del(.spec.persistentVolumeClaimRetentionPolicy)' /tmp/tmp.jTdR9s6bFl/issuer_some-name-psmdb-ca-issuer.yml + version_gt 1.22 ++ echo '1.26 >= 1.22' ++ bc -l + '[' 1 -eq 1 ']' + return 0 + yq -i eval 'del(.spec.internalTrafficPolicy)' /tmp/tmp.jTdR9s6bFl/issuer_some-name-psmdb-ca-issuer.yml + yq -i eval 'del(.spec.allocateLoadBalancerNodePorts)' /tmp/tmp.jTdR9s6bFl/issuer_some-name-psmdb-ca-issuer.yml + [[ /mnt/jenkins/workspace/cloud-psmdb-operator_PR-1567/e2e-tests/tls-issue-cert-manager/compare/issuer_some-name-psmdb-ca-issuer.yml == */cronjob* ]] + diff -u /mnt/jenkins/workspace/cloud-psmdb-operator_PR-1567/e2e-tests/tls-issue-cert-manager/compare/issuer_some-name-psmdb-ca-issuer.yml /tmp/tmp.jTdR9s6bFl/issuer_some-name-psmdb-ca-issuer.yml + desc 'check if issuer created' + set +o xtrace ----------------------------------------------------------------------------------- check if issuer created ----------------------------------------------------------------------------------- + compare_kubectl issuer/some-name-psmdb-issuer + local resource=issuer/some-name-psmdb-issuer + local postfix= + local expected_result=/mnt/jenkins/workspace/cloud-psmdb-operator_PR-1567/e2e-tests/tls-issue-cert-manager/compare/issuer_some-name-psmdb-issuer.yml + local new_result=/tmp/tmp.jTdR9s6bFl/issuer_some-name-psmdb-issuer.yml + '[' -n '' -a -f /mnt/jenkins/workspace/cloud-psmdb-operator_PR-1567/e2e-tests/tls-issue-cert-manager/compare/issuer_some-name-psmdb-issuer-oc.yml ']' + kubectl_bin get -o yaml issuer/some-name-psmdb-issuer + yq eval ' del(.metadata.ownerReferences[].apiVersion) | del(.metadata.managedFields) | del(.. | select(has("creationTimestamp")).creationTimestamp) | del(.. | select(has("namespace")).namespace) | del(.. | select(has("uid")).uid) | del(.metadata.resourceVersion) | del(.spec.template.spec.containers[].env[] | select(.name == "NAMESPACE")) | del(.metadata.selfLink) | del(.metadata.annotations."cloud.google.com/neg") | del(.metadata.annotations."kubectl.kubernetes.io/last-applied-configuration") | del(.. | select(has("image")).image) | del(.. | select(has("clusterIP")).clusterIP) | del(.. | select(has("clusterIPs")).clusterIPs) | del(.. | select(has("dataSource")).dataSource) | del(.. | select(has("procMount")).procMount) | del(.. | select(has("storageClassName")).storageClassName) | del(.. | select(has("finalizers")).finalizers) | del(.. | select(has("kubernetes.io/pvc-protection"))."kubernetes.io/pvc-protection") | del(.. | select(has("volumeName")).volumeName) | del(.. | select(has("volume.beta.kubernetes.io/storage-provisioner"))."volume.beta.kubernetes.io/storage-provisioner") | del(.. | select(has("volume.kubernetes.io/storage-provisioner"))."volume.kubernetes.io/storage-provisioner") | del(.spec.volumeMode) | del(.. | select(has("volume.kubernetes.io/selected-node"))."volume.kubernetes.io/selected-node") | del(.. | select(has("percona.com/last-config-hash"))."percona.com/last-config-hash") | del(.. | select(has("percona.com/configuration-hash"))."percona.com/configuration-hash") | del(.. | select(has("percona.com/ssl-hash"))."percona.com/ssl-hash") | del(.. | select(has("percona.com/ssl-internal-hash"))."percona.com/ssl-internal-hash") | del(.spec.volumeClaimTemplates[].spec.volumeMode | select(. == "Filesystem")) | del(.. | select(has("healthCheckNodePort")).healthCheckNodePort) | del(.. | select(has("nodePort")).nodePort) | del(.status) | (.. | select(tag == "!!str")) |= sub("tls-issue-cert-manager-30987", "NAME_SPACE") | del(.spec.volumeClaimTemplates[].apiVersion) | del(.spec.volumeClaimTemplates[].kind) | del(.spec.ipFamilies) | del(.spec.ipFamilyPolicy) | (.. | select(. == "extensions/v1beta1")) = "apps/v1" | (.. | select(. == "batch/v1beta1")) = "batch/v1" ' - ++ mktemp + local LAST_OUT=/tmp/tmp.RdKkivahtI ++ mktemp + local LAST_ERR=/tmp/tmp.LK55YbTdU6 + local exit_status=0 + local timeout=4 ++ seq 0 2 + for i in '$(seq 0 2)' + set +e + kubectl get -o yaml issuer/some-name-psmdb-issuer + exit_status=0 + set -e + '[' 0 '!=' 0 -a -n 1 ']' + break + cat /tmp/tmp.RdKkivahtI + cat /tmp/tmp.LK55YbTdU6 + rm /tmp/tmp.RdKkivahtI /tmp/tmp.LK55YbTdU6 + return 0 + yq -i eval 'del(.spec.persistentVolumeClaimRetentionPolicy)' /tmp/tmp.jTdR9s6bFl/issuer_some-name-psmdb-issuer.yml + version_gt 1.22 ++ echo '1.26 >= 1.22' ++ bc -l + '[' 1 -eq 1 ']' + return 0 + yq -i eval 'del(.spec.internalTrafficPolicy)' /tmp/tmp.jTdR9s6bFl/issuer_some-name-psmdb-issuer.yml + yq -i eval 'del(.spec.allocateLoadBalancerNodePorts)' /tmp/tmp.jTdR9s6bFl/issuer_some-name-psmdb-issuer.yml + [[ /mnt/jenkins/workspace/cloud-psmdb-operator_PR-1567/e2e-tests/tls-issue-cert-manager/compare/issuer_some-name-psmdb-issuer.yml == */cronjob* ]] + diff -u /mnt/jenkins/workspace/cloud-psmdb-operator_PR-1567/e2e-tests/tls-issue-cert-manager/compare/issuer_some-name-psmdb-issuer.yml /tmp/tmp.jTdR9s6bFl/issuer_some-name-psmdb-issuer.yml + desc 'check if certificate issued' + set +o xtrace ----------------------------------------------------------------------------------- check if certificate issued ----------------------------------------------------------------------------------- + compare_kubectl certificate/some-name-ssl + local resource=certificate/some-name-ssl + local postfix= + local expected_result=/mnt/jenkins/workspace/cloud-psmdb-operator_PR-1567/e2e-tests/tls-issue-cert-manager/compare/certificate_some-name-ssl.yml + local new_result=/tmp/tmp.jTdR9s6bFl/certificate_some-name-ssl.yml + '[' -n '' -a -f /mnt/jenkins/workspace/cloud-psmdb-operator_PR-1567/e2e-tests/tls-issue-cert-manager/compare/certificate_some-name-ssl-oc.yml ']' + kubectl_bin get -o yaml certificate/some-name-ssl + yq eval ' del(.metadata.ownerReferences[].apiVersion) | del(.metadata.managedFields) | del(.. | select(has("creationTimestamp")).creationTimestamp) | del(.. | select(has("namespace")).namespace) | del(.. | select(has("uid")).uid) | del(.metadata.resourceVersion) | del(.spec.template.spec.containers[].env[] | select(.name == "NAMESPACE")) | del(.metadata.selfLink) | del(.metadata.annotations."cloud.google.com/neg") | del(.metadata.annotations."kubectl.kubernetes.io/last-applied-configuration") | del(.. | select(has("image")).image) | del(.. | select(has("clusterIP")).clusterIP) | del(.. | select(has("clusterIPs")).clusterIPs) | del(.. | select(has("dataSource")).dataSource) | del(.. | select(has("procMount")).procMount) | del(.. | select(has("storageClassName")).storageClassName) | del(.. | select(has("finalizers")).finalizers) | del(.. | select(has("kubernetes.io/pvc-protection"))."kubernetes.io/pvc-protection") | del(.. | select(has("volumeName")).volumeName) | del(.. | select(has("volume.beta.kubernetes.io/storage-provisioner"))."volume.beta.kubernetes.io/storage-provisioner") | del(.. | select(has("volume.kubernetes.io/storage-provisioner"))."volume.kubernetes.io/storage-provisioner") | del(.spec.volumeMode) | del(.. | select(has("volume.kubernetes.io/selected-node"))."volume.kubernetes.io/selected-node") | del(.. | select(has("percona.com/last-config-hash"))."percona.com/last-config-hash") | del(.. | select(has("percona.com/configuration-hash"))."percona.com/configuration-hash") | del(.. | select(has("percona.com/ssl-hash"))."percona.com/ssl-hash") | del(.. | select(has("percona.com/ssl-internal-hash"))."percona.com/ssl-internal-hash") | del(.spec.volumeClaimTemplates[].spec.volumeMode | select(. == "Filesystem")) | del(.. | select(has("healthCheckNodePort")).healthCheckNodePort) | del(.. | select(has("nodePort")).nodePort) | del(.status) | (.. | select(tag == "!!str")) |= sub("tls-issue-cert-manager-30987", "NAME_SPACE") | del(.spec.volumeClaimTemplates[].apiVersion) | del(.spec.volumeClaimTemplates[].kind) | del(.spec.ipFamilies) | del(.spec.ipFamilyPolicy) | (.. | select(. == "extensions/v1beta1")) = "apps/v1" | (.. | select(. == "batch/v1beta1")) = "batch/v1" ' - ++ mktemp + local LAST_OUT=/tmp/tmp.yN2muwaCnd ++ mktemp + local LAST_ERR=/tmp/tmp.arvEY2aPOz + local exit_status=0 + local timeout=4 ++ seq 0 2 + for i in '$(seq 0 2)' + set +e + kubectl get -o yaml certificate/some-name-ssl + exit_status=0 + set -e + '[' 0 '!=' 0 -a -n 1 ']' + break + cat /tmp/tmp.yN2muwaCnd + cat /tmp/tmp.arvEY2aPOz + rm /tmp/tmp.yN2muwaCnd /tmp/tmp.arvEY2aPOz + return 0 + yq -i eval 'del(.spec.persistentVolumeClaimRetentionPolicy)' /tmp/tmp.jTdR9s6bFl/certificate_some-name-ssl.yml + version_gt 1.22 ++ echo '1.26 >= 1.22' ++ bc -l + '[' 1 -eq 1 ']' + return 0 + yq -i eval 'del(.spec.internalTrafficPolicy)' /tmp/tmp.jTdR9s6bFl/certificate_some-name-ssl.yml + yq -i eval 'del(.spec.allocateLoadBalancerNodePorts)' /tmp/tmp.jTdR9s6bFl/certificate_some-name-ssl.yml + [[ /mnt/jenkins/workspace/cloud-psmdb-operator_PR-1567/e2e-tests/tls-issue-cert-manager/compare/certificate_some-name-ssl.yml == */cronjob* ]] + diff -u /mnt/jenkins/workspace/cloud-psmdb-operator_PR-1567/e2e-tests/tls-issue-cert-manager/compare/certificate_some-name-ssl.yml /tmp/tmp.jTdR9s6bFl/certificate_some-name-ssl.yml + desc 'check if internal certificate issued' + set +o xtrace ----------------------------------------------------------------------------------- check if internal certificate issued ----------------------------------------------------------------------------------- + compare_kubectl certificate/some-name-ssl-internal + local resource=certificate/some-name-ssl-internal + local postfix= + local expected_result=/mnt/jenkins/workspace/cloud-psmdb-operator_PR-1567/e2e-tests/tls-issue-cert-manager/compare/certificate_some-name-ssl-internal.yml + local new_result=/tmp/tmp.jTdR9s6bFl/certificate_some-name-ssl-internal.yml + '[' -n '' -a -f /mnt/jenkins/workspace/cloud-psmdb-operator_PR-1567/e2e-tests/tls-issue-cert-manager/compare/certificate_some-name-ssl-internal-oc.yml ']' + kubectl_bin get -o yaml certificate/some-name-ssl-internal + yq eval ' del(.metadata.ownerReferences[].apiVersion) | del(.metadata.managedFields) | del(.. | select(has("creationTimestamp")).creationTimestamp) | del(.. | select(has("namespace")).namespace) | del(.. | select(has("uid")).uid) | del(.metadata.resourceVersion) | del(.spec.template.spec.containers[].env[] | select(.name == "NAMESPACE")) | del(.metadata.selfLink) | del(.metadata.annotations."cloud.google.com/neg") | del(.metadata.annotations."kubectl.kubernetes.io/last-applied-configuration") | del(.. | select(has("image")).image) | del(.. | select(has("clusterIP")).clusterIP) | del(.. | select(has("clusterIPs")).clusterIPs) | del(.. | select(has("dataSource")).dataSource) | del(.. | select(has("procMount")).procMount) | del(.. | select(has("storageClassName")).storageClassName) | del(.. | select(has("finalizers")).finalizers) | del(.. | select(has("kubernetes.io/pvc-protection"))."kubernetes.io/pvc-protection") | del(.. | select(has("volumeName")).volumeName) | del(.. | select(has("volume.beta.kubernetes.io/storage-provisioner"))."volume.beta.kubernetes.io/storage-provisioner") | del(.. | select(has("volume.kubernetes.io/storage-provisioner"))."volume.kubernetes.io/storage-provisioner") | del(.spec.volumeMode) | del(.. | select(has("volume.kubernetes.io/selected-node"))."volume.kubernetes.io/selected-node") | del(.. | select(has("percona.com/last-config-hash"))."percona.com/last-config-hash") | del(.. | select(has("percona.com/configuration-hash"))."percona.com/configuration-hash") | del(.. | select(has("percona.com/ssl-hash"))."percona.com/ssl-hash") | del(.. | select(has("percona.com/ssl-internal-hash"))."percona.com/ssl-internal-hash") | del(.spec.volumeClaimTemplates[].spec.volumeMode | select(. == "Filesystem")) | del(.. | select(has("healthCheckNodePort")).healthCheckNodePort) | del(.. | select(has("nodePort")).nodePort) | del(.status) | (.. | select(tag == "!!str")) |= sub("tls-issue-cert-manager-30987", "NAME_SPACE") | del(.spec.volumeClaimTemplates[].apiVersion) | del(.spec.volumeClaimTemplates[].kind) | del(.spec.ipFamilies) | del(.spec.ipFamilyPolicy) | (.. | select(. == "extensions/v1beta1")) = "apps/v1" | (.. | select(. == "batch/v1beta1")) = "batch/v1" ' - ++ mktemp + local LAST_OUT=/tmp/tmp.l8ckeK3JKD ++ mktemp + local LAST_ERR=/tmp/tmp.wUCCovfxtQ + local exit_status=0 + local timeout=4 ++ seq 0 2 + for i in '$(seq 0 2)' + set +e + kubectl get -o yaml certificate/some-name-ssl-internal + exit_status=0 + set -e + '[' 0 '!=' 0 -a -n 1 ']' + break + cat /tmp/tmp.l8ckeK3JKD + cat /tmp/tmp.wUCCovfxtQ + rm /tmp/tmp.l8ckeK3JKD /tmp/tmp.wUCCovfxtQ + return 0 + yq -i eval 'del(.spec.persistentVolumeClaimRetentionPolicy)' /tmp/tmp.jTdR9s6bFl/certificate_some-name-ssl-internal.yml + version_gt 1.22 ++ echo '1.26 >= 1.22' ++ bc -l + '[' 1 -eq 1 ']' + return 0 + yq -i eval 'del(.spec.internalTrafficPolicy)' /tmp/tmp.jTdR9s6bFl/certificate_some-name-ssl-internal.yml + yq -i eval 'del(.spec.allocateLoadBalancerNodePorts)' /tmp/tmp.jTdR9s6bFl/certificate_some-name-ssl-internal.yml + [[ /mnt/jenkins/workspace/cloud-psmdb-operator_PR-1567/e2e-tests/tls-issue-cert-manager/compare/certificate_some-name-ssl-internal.yml == */cronjob* ]] + diff -u /mnt/jenkins/workspace/cloud-psmdb-operator_PR-1567/e2e-tests/tls-issue-cert-manager/compare/certificate_some-name-ssl-internal.yml /tmp/tmp.jTdR9s6bFl/certificate_some-name-ssl-internal.yml + desc 'disable TLS' + set +o xtrace ----------------------------------------------------------------------------------- disable TLS ----------------------------------------------------------------------------------- + pause_cluster some-name + local cluster_name=some-name + echo 'Pausing cluster some-name' Pausing cluster some-name + kubectl_bin patch psmdb some-name --type merge '-p={"spec": { "pause": true } }' ++ mktemp + local LAST_OUT=/tmp/tmp.H7msbObnGY ++ mktemp + local LAST_ERR=/tmp/tmp.e2zN6Hnj2a + local exit_status=0 + local timeout=4 ++ seq 0 2 + for i in '$(seq 0 2)' + set +e + kubectl patch psmdb some-name --type merge '-p={"spec": { "pause": true } }' + exit_status=0 + set -e + '[' 0 '!=' 0 -a -n 1 ']' + break + cat /tmp/tmp.H7msbObnGY perconaservermongodb.psmdb.percona.com/some-name patched + cat /tmp/tmp.e2zN6Hnj2a + rm /tmp/tmp.H7msbObnGY /tmp/tmp.e2zN6Hnj2a + return 0 + wait_for_cluster_state some-name paused + local cluster_name=some-name + local target_state=paused + echo -n 'Waiting for cluster to reach paused state' Waiting for cluster to reach paused state+ local timeout=0 ++ kubectl_bin get psmdb some-name -o 'jsonpath={.status.state}' +++ mktemp ++ local LAST_OUT=/tmp/tmp.hg4J2QKs79 +++ mktemp ++ local LAST_ERR=/tmp/tmp.t4MTwkpQrX ++ local exit_status=0 ++ local timeout=4 +++ seq 0 2 ++ for i in '$(seq 0 2)' ++ set +e ++ kubectl get psmdb some-name -o 'jsonpath={.status.state}' ++ exit_status=0 ++ set -e ++ '[' 0 '!=' 0 -a -n 1 ']' ++ break ++ cat /tmp/tmp.hg4J2QKs79 ++ cat /tmp/tmp.t4MTwkpQrX ++ rm /tmp/tmp.hg4J2QKs79 /tmp/tmp.t4MTwkpQrX ++ return 0 + [[ ready == paused ]] + sleep 1 + timeout=1 + echo -n . .+ [[ 1 -gt 1500 ]] ++ kubectl_bin get psmdb some-name -o 'jsonpath={.status.state}' +++ mktemp ++ local LAST_OUT=/tmp/tmp.V3ohnRBr0V +++ mktemp ++ local LAST_ERR=/tmp/tmp.KF0TP4WBsM ++ local exit_status=0 ++ local timeout=4 +++ seq 0 2 ++ for i in '$(seq 0 2)' ++ set +e ++ kubectl get psmdb some-name -o 'jsonpath={.status.state}' ++ exit_status=0 ++ set -e ++ '[' 0 '!=' 0 -a -n 1 ']' ++ break ++ cat /tmp/tmp.V3ohnRBr0V ++ cat /tmp/tmp.KF0TP4WBsM ++ rm /tmp/tmp.V3ohnRBr0V /tmp/tmp.KF0TP4WBsM ++ return 0 + [[ stopping == paused ]] + sleep 1 + timeout=2 + echo -n . .+ [[ 2 -gt 1500 ]] ++ kubectl_bin get psmdb some-name -o 'jsonpath={.status.state}' +++ mktemp ++ local LAST_OUT=/tmp/tmp.9oudj1qwyh +++ mktemp ++ local LAST_ERR=/tmp/tmp.7xuUOpTxeo ++ local exit_status=0 ++ local timeout=4 +++ seq 0 2 ++ for i in '$(seq 0 2)' ++ set +e ++ kubectl get psmdb some-name -o 'jsonpath={.status.state}' ++ exit_status=0 ++ set -e ++ '[' 0 '!=' 0 -a -n 1 ']' ++ break ++ cat /tmp/tmp.9oudj1qwyh ++ cat /tmp/tmp.7xuUOpTxeo ++ rm /tmp/tmp.9oudj1qwyh /tmp/tmp.7xuUOpTxeo ++ return 0 + [[ stopping == paused ]] + sleep 1 + timeout=3 + echo -n . .+ [[ 3 -gt 1500 ]] ++ kubectl_bin get psmdb some-name -o 'jsonpath={.status.state}' +++ mktemp ++ local LAST_OUT=/tmp/tmp.ikY8YaYOfY +++ mktemp ++ local LAST_ERR=/tmp/tmp.F651hZcOxu ++ local exit_status=0 ++ local timeout=4 +++ seq 0 2 ++ for i in '$(seq 0 2)' ++ set +e ++ kubectl get psmdb some-name -o 'jsonpath={.status.state}' ++ exit_status=0 ++ set -e ++ '[' 0 '!=' 0 -a -n 1 ']' ++ break ++ cat /tmp/tmp.ikY8YaYOfY ++ cat /tmp/tmp.F651hZcOxu ++ rm /tmp/tmp.ikY8YaYOfY /tmp/tmp.F651hZcOxu ++ return 0 + [[ stopping == paused ]] + sleep 1 + timeout=4 + echo -n . .+ [[ 4 -gt 1500 ]] ++ kubectl_bin get psmdb some-name -o 'jsonpath={.status.state}' +++ mktemp ++ local LAST_OUT=/tmp/tmp.zotmuL8Tnk +++ mktemp ++ local LAST_ERR=/tmp/tmp.s5OsmT62rl ++ local exit_status=0 ++ local timeout=4 +++ seq 0 2 ++ for i in '$(seq 0 2)' ++ set +e ++ kubectl get psmdb some-name -o 'jsonpath={.status.state}' ++ exit_status=0 ++ set -e ++ '[' 0 '!=' 0 -a -n 1 ']' ++ break ++ cat /tmp/tmp.zotmuL8Tnk ++ cat /tmp/tmp.s5OsmT62rl ++ rm /tmp/tmp.zotmuL8Tnk /tmp/tmp.s5OsmT62rl ++ return 0 + [[ stopping == paused ]] + sleep 1 + timeout=5 + echo -n . .+ [[ 5 -gt 1500 ]] ++ kubectl_bin get psmdb some-name -o 'jsonpath={.status.state}' +++ mktemp ++ local LAST_OUT=/tmp/tmp.NjU1rsJE1U +++ mktemp ++ local LAST_ERR=/tmp/tmp.lnegXw4OUw ++ local exit_status=0 ++ local timeout=4 +++ seq 0 2 ++ for i in '$(seq 0 2)' ++ set +e ++ kubectl get psmdb some-name -o 'jsonpath={.status.state}' ++ exit_status=0 ++ set -e ++ '[' 0 '!=' 0 -a -n 1 ']' ++ break ++ cat /tmp/tmp.NjU1rsJE1U ++ cat /tmp/tmp.lnegXw4OUw ++ rm /tmp/tmp.NjU1rsJE1U /tmp/tmp.lnegXw4OUw ++ return 0 + [[ stopping == paused ]] + sleep 1 + timeout=6 + echo -n . .+ [[ 6 -gt 1500 ]] ++ kubectl_bin get psmdb some-name -o 'jsonpath={.status.state}' +++ mktemp ++ local LAST_OUT=/tmp/tmp.Boi1zXQvfi +++ mktemp ++ local LAST_ERR=/tmp/tmp.0CfLNTT0Vf ++ local exit_status=0 ++ local timeout=4 +++ seq 0 2 ++ for i in '$(seq 0 2)' ++ set +e ++ kubectl get psmdb some-name -o 'jsonpath={.status.state}' ++ exit_status=0 ++ set -e ++ '[' 0 '!=' 0 -a -n 1 ']' ++ break ++ cat /tmp/tmp.Boi1zXQvfi ++ cat /tmp/tmp.0CfLNTT0Vf ++ rm /tmp/tmp.Boi1zXQvfi /tmp/tmp.0CfLNTT0Vf ++ return 0 + [[ stopping == paused ]] + sleep 1 + timeout=7 + echo -n . .+ [[ 7 -gt 1500 ]] ++ kubectl_bin get psmdb some-name -o 'jsonpath={.status.state}' +++ mktemp ++ local LAST_OUT=/tmp/tmp.7eZXHDnZtQ +++ mktemp ++ local LAST_ERR=/tmp/tmp.noQC48n1vc ++ local exit_status=0 ++ local timeout=4 +++ seq 0 2 ++ for i in '$(seq 0 2)' ++ set +e ++ kubectl get psmdb some-name -o 'jsonpath={.status.state}' ++ exit_status=0 ++ set -e ++ '[' 0 '!=' 0 -a -n 1 ']' ++ break ++ cat /tmp/tmp.7eZXHDnZtQ ++ cat /tmp/tmp.noQC48n1vc ++ rm /tmp/tmp.7eZXHDnZtQ /tmp/tmp.noQC48n1vc ++ return 0 + [[ stopping == paused ]] + sleep 1 + timeout=8 + echo -n . .+ [[ 8 -gt 1500 ]] ++ kubectl_bin get psmdb some-name -o 'jsonpath={.status.state}' +++ mktemp ++ local LAST_OUT=/tmp/tmp.JgFYKRIQc7 +++ mktemp ++ local LAST_ERR=/tmp/tmp.fxlwT77kZ6 ++ local exit_status=0 ++ local timeout=4 +++ seq 0 2 ++ for i in '$(seq 0 2)' ++ set +e ++ kubectl get psmdb some-name -o 'jsonpath={.status.state}' ++ exit_status=0 ++ set -e ++ '[' 0 '!=' 0 -a -n 1 ']' ++ break ++ cat /tmp/tmp.JgFYKRIQc7 ++ cat /tmp/tmp.fxlwT77kZ6 ++ rm /tmp/tmp.JgFYKRIQc7 /tmp/tmp.fxlwT77kZ6 ++ return 0 + [[ stopping == paused ]] + sleep 1 + timeout=9 + echo -n . .+ [[ 9 -gt 1500 ]] ++ kubectl_bin get psmdb some-name -o 'jsonpath={.status.state}' +++ mktemp ++ local LAST_OUT=/tmp/tmp.aNZPv8Kyx7 +++ mktemp ++ local LAST_ERR=/tmp/tmp.Q58S0Ay4ng ++ local exit_status=0 ++ local timeout=4 +++ seq 0 2 ++ for i in '$(seq 0 2)' ++ set +e ++ kubectl get psmdb some-name -o 'jsonpath={.status.state}' ++ exit_status=0 ++ set -e ++ '[' 0 '!=' 0 -a -n 1 ']' ++ break ++ cat /tmp/tmp.aNZPv8Kyx7 ++ cat /tmp/tmp.Q58S0Ay4ng ++ rm /tmp/tmp.aNZPv8Kyx7 /tmp/tmp.Q58S0Ay4ng ++ return 0 + [[ stopping == paused ]] + sleep 1 + timeout=10 + echo -n . .+ [[ 10 -gt 1500 ]] ++ kubectl_bin get psmdb some-name -o 'jsonpath={.status.state}' +++ mktemp ++ local LAST_OUT=/tmp/tmp.rP2w2YEbRv +++ mktemp ++ local LAST_ERR=/tmp/tmp.RXkkIoZsdR ++ local exit_status=0 ++ local timeout=4 +++ seq 0 2 ++ for i in '$(seq 0 2)' ++ set +e ++ kubectl get psmdb some-name -o 'jsonpath={.status.state}' ++ exit_status=0 ++ set -e ++ '[' 0 '!=' 0 -a -n 1 ']' ++ break ++ cat /tmp/tmp.rP2w2YEbRv ++ cat /tmp/tmp.RXkkIoZsdR ++ rm /tmp/tmp.rP2w2YEbRv /tmp/tmp.RXkkIoZsdR ++ return 0 + [[ stopping == paused ]] + sleep 1 + timeout=11 + echo -n . .+ [[ 11 -gt 1500 ]] ++ kubectl_bin get psmdb some-name -o 'jsonpath={.status.state}' +++ mktemp ++ local LAST_OUT=/tmp/tmp.etUHwV5ZAX +++ mktemp ++ local LAST_ERR=/tmp/tmp.SLJC38r94r ++ local exit_status=0 ++ local timeout=4 +++ seq 0 2 ++ for i in '$(seq 0 2)' ++ set +e ++ kubectl get psmdb some-name -o 'jsonpath={.status.state}' ++ exit_status=0 ++ set -e ++ '[' 0 '!=' 0 -a -n 1 ']' ++ break ++ cat /tmp/tmp.etUHwV5ZAX ++ cat /tmp/tmp.SLJC38r94r ++ rm /tmp/tmp.etUHwV5ZAX /tmp/tmp.SLJC38r94r ++ return 0 + [[ stopping == paused ]] + sleep 1 + timeout=12 + echo -n . .+ [[ 12 -gt 1500 ]] ++ kubectl_bin get psmdb some-name -o 'jsonpath={.status.state}' +++ mktemp ++ local LAST_OUT=/tmp/tmp.sF5Ff0kosk +++ mktemp ++ local LAST_ERR=/tmp/tmp.dwUBOvLCEE ++ local exit_status=0 ++ local timeout=4 +++ seq 0 2 ++ for i in '$(seq 0 2)' ++ set +e ++ kubectl get psmdb some-name -o 'jsonpath={.status.state}' ++ exit_status=0 ++ set -e ++ '[' 0 '!=' 0 -a -n 1 ']' ++ break ++ cat /tmp/tmp.sF5Ff0kosk ++ cat /tmp/tmp.dwUBOvLCEE ++ rm /tmp/tmp.sF5Ff0kosk /tmp/tmp.dwUBOvLCEE ++ return 0 + [[ stopping == paused ]] + sleep 1 + timeout=13 + echo -n . .+ [[ 13 -gt 1500 ]] ++ kubectl_bin get psmdb some-name -o 'jsonpath={.status.state}' +++ mktemp ++ local LAST_OUT=/tmp/tmp.KMBBuTy8OI +++ mktemp ++ local LAST_ERR=/tmp/tmp.DcFtxpUgMX ++ local exit_status=0 ++ local timeout=4 +++ seq 0 2 ++ for i in '$(seq 0 2)' ++ set +e ++ kubectl get psmdb some-name -o 'jsonpath={.status.state}' ++ exit_status=0 ++ set -e ++ '[' 0 '!=' 0 -a -n 1 ']' ++ break ++ cat /tmp/tmp.KMBBuTy8OI ++ cat /tmp/tmp.DcFtxpUgMX ++ rm /tmp/tmp.KMBBuTy8OI /tmp/tmp.DcFtxpUgMX ++ return 0 + [[ stopping == paused ]] + sleep 1 + timeout=14 + echo -n . .+ [[ 14 -gt 1500 ]] ++ kubectl_bin get psmdb some-name -o 'jsonpath={.status.state}' +++ mktemp ++ local LAST_OUT=/tmp/tmp.wcyvYU3nar +++ mktemp ++ local LAST_ERR=/tmp/tmp.w3POqkPy6p ++ local exit_status=0 ++ local timeout=4 +++ seq 0 2 ++ for i in '$(seq 0 2)' ++ set +e ++ kubectl get psmdb some-name -o 'jsonpath={.status.state}' ++ exit_status=0 ++ set -e ++ '[' 0 '!=' 0 -a -n 1 ']' ++ break ++ cat /tmp/tmp.wcyvYU3nar ++ cat /tmp/tmp.w3POqkPy6p ++ rm /tmp/tmp.wcyvYU3nar /tmp/tmp.w3POqkPy6p ++ return 0 + [[ stopping == paused ]] + sleep 1 + timeout=15 + echo -n . .+ [[ 15 -gt 1500 ]] ++ kubectl_bin get psmdb some-name -o 'jsonpath={.status.state}' +++ mktemp ++ local LAST_OUT=/tmp/tmp.15l0lWf5fh +++ mktemp ++ local LAST_ERR=/tmp/tmp.Vhe5HSEQ4V ++ local exit_status=0 ++ local timeout=4 +++ seq 0 2 ++ for i in '$(seq 0 2)' ++ set +e ++ kubectl get psmdb some-name -o 'jsonpath={.status.state}' ++ exit_status=0 ++ set -e ++ '[' 0 '!=' 0 -a -n 1 ']' ++ break ++ cat /tmp/tmp.15l0lWf5fh ++ cat /tmp/tmp.Vhe5HSEQ4V ++ rm /tmp/tmp.15l0lWf5fh /tmp/tmp.Vhe5HSEQ4V ++ return 0 + [[ stopping == paused ]] + sleep 1 + timeout=16 + echo -n . .+ [[ 16 -gt 1500 ]] ++ kubectl_bin get psmdb some-name -o 'jsonpath={.status.state}' +++ mktemp ++ local LAST_OUT=/tmp/tmp.ksnvgRivIP +++ mktemp ++ local LAST_ERR=/tmp/tmp.s2Q3hSMn7n ++ local exit_status=0 ++ local timeout=4 +++ seq 0 2 ++ for i in '$(seq 0 2)' ++ set +e ++ kubectl get psmdb some-name -o 'jsonpath={.status.state}' ++ exit_status=0 ++ set -e ++ '[' 0 '!=' 0 -a -n 1 ']' ++ break ++ cat /tmp/tmp.ksnvgRivIP ++ cat /tmp/tmp.s2Q3hSMn7n ++ rm /tmp/tmp.ksnvgRivIP /tmp/tmp.s2Q3hSMn7n ++ return 0 + [[ stopping == paused ]] + sleep 1 + timeout=17 + echo -n . .+ [[ 17 -gt 1500 ]] ++ kubectl_bin get psmdb some-name -o 'jsonpath={.status.state}' +++ mktemp ++ local LAST_OUT=/tmp/tmp.LEpbCQINU6 +++ mktemp ++ local LAST_ERR=/tmp/tmp.T8n4yR7Nqe ++ local exit_status=0 ++ local timeout=4 +++ seq 0 2 ++ for i in '$(seq 0 2)' ++ set +e ++ kubectl get psmdb some-name -o 'jsonpath={.status.state}' ++ exit_status=0 ++ set -e ++ '[' 0 '!=' 0 -a -n 1 ']' ++ break ++ cat /tmp/tmp.LEpbCQINU6 ++ cat /tmp/tmp.T8n4yR7Nqe ++ rm /tmp/tmp.LEpbCQINU6 /tmp/tmp.T8n4yR7Nqe ++ return 0 + [[ stopping == paused ]] + sleep 1 + timeout=18 + echo -n . .+ [[ 18 -gt 1500 ]] ++ kubectl_bin get psmdb some-name -o 'jsonpath={.status.state}' +++ mktemp ++ local LAST_OUT=/tmp/tmp.VahpzRZuKQ +++ mktemp ++ local LAST_ERR=/tmp/tmp.c2OO7nS2in ++ local exit_status=0 ++ local timeout=4 +++ seq 0 2 ++ for i in '$(seq 0 2)' ++ set +e ++ kubectl get psmdb some-name -o 'jsonpath={.status.state}' ++ exit_status=0 ++ set -e ++ '[' 0 '!=' 0 -a -n 1 ']' ++ break ++ cat /tmp/tmp.VahpzRZuKQ ++ cat /tmp/tmp.c2OO7nS2in ++ rm /tmp/tmp.VahpzRZuKQ /tmp/tmp.c2OO7nS2in ++ return 0 + [[ stopping == paused ]] + sleep 1 + timeout=19 + echo -n . .+ [[ 19 -gt 1500 ]] ++ kubectl_bin get psmdb some-name -o 'jsonpath={.status.state}' +++ mktemp ++ local LAST_OUT=/tmp/tmp.pnQre9gcph +++ mktemp ++ local LAST_ERR=/tmp/tmp.xVfne9yjEJ ++ local exit_status=0 ++ local timeout=4 +++ seq 0 2 ++ for i in '$(seq 0 2)' ++ set +e ++ kubectl get psmdb some-name -o 'jsonpath={.status.state}' ++ exit_status=0 ++ set -e ++ '[' 0 '!=' 0 -a -n 1 ']' ++ break ++ cat /tmp/tmp.pnQre9gcph ++ cat /tmp/tmp.xVfne9yjEJ ++ rm /tmp/tmp.pnQre9gcph /tmp/tmp.xVfne9yjEJ ++ return 0 + [[ stopping == paused ]] + sleep 1 + timeout=20 + echo -n . .+ [[ 20 -gt 1500 ]] ++ kubectl_bin get psmdb some-name -o 'jsonpath={.status.state}' +++ mktemp ++ local LAST_OUT=/tmp/tmp.j6sgaiWia6 +++ mktemp ++ local LAST_ERR=/tmp/tmp.17eQ9IRULB ++ local exit_status=0 ++ local timeout=4 +++ seq 0 2 ++ for i in '$(seq 0 2)' ++ set +e ++ kubectl get psmdb some-name -o 'jsonpath={.status.state}' ++ exit_status=0 ++ set -e ++ '[' 0 '!=' 0 -a -n 1 ']' ++ break ++ cat /tmp/tmp.j6sgaiWia6 ++ cat /tmp/tmp.17eQ9IRULB ++ rm /tmp/tmp.j6sgaiWia6 /tmp/tmp.17eQ9IRULB ++ return 0 + [[ stopping == paused ]] + sleep 1 + timeout=21 + echo -n . .+ [[ 21 -gt 1500 ]] ++ kubectl_bin get psmdb some-name -o 'jsonpath={.status.state}' +++ mktemp ++ local LAST_OUT=/tmp/tmp.aKWU3PDb23 +++ mktemp ++ local LAST_ERR=/tmp/tmp.LuoKFom4xl ++ local exit_status=0 ++ local timeout=4 +++ seq 0 2 ++ for i in '$(seq 0 2)' ++ set +e ++ kubectl get psmdb some-name -o 'jsonpath={.status.state}' ++ exit_status=0 ++ set -e ++ '[' 0 '!=' 0 -a -n 1 ']' ++ break ++ cat /tmp/tmp.aKWU3PDb23 ++ cat /tmp/tmp.LuoKFom4xl ++ rm /tmp/tmp.aKWU3PDb23 /tmp/tmp.LuoKFom4xl ++ return 0 + [[ stopping == paused ]] + sleep 1 + timeout=22 + echo -n . .+ [[ 22 -gt 1500 ]] ++ kubectl_bin get psmdb some-name -o 'jsonpath={.status.state}' +++ mktemp ++ local LAST_OUT=/tmp/tmp.uNtSBHpm0f +++ mktemp ++ local LAST_ERR=/tmp/tmp.WnsQUmQ6gj ++ local exit_status=0 ++ local timeout=4 +++ seq 0 2 ++ for i in '$(seq 0 2)' ++ set +e ++ kubectl get psmdb some-name -o 'jsonpath={.status.state}' ++ exit_status=0 ++ set -e ++ '[' 0 '!=' 0 -a -n 1 ']' ++ break ++ cat /tmp/tmp.uNtSBHpm0f ++ cat /tmp/tmp.WnsQUmQ6gj ++ rm /tmp/tmp.uNtSBHpm0f /tmp/tmp.WnsQUmQ6gj ++ return 0 + [[ stopping == paused ]] + sleep 1 + timeout=23 + echo -n . .+ [[ 23 -gt 1500 ]] ++ kubectl_bin get psmdb some-name -o 'jsonpath={.status.state}' +++ mktemp ++ local LAST_OUT=/tmp/tmp.jeo2ZPmiwK +++ mktemp ++ local LAST_ERR=/tmp/tmp.8m6vm1juM9 ++ local exit_status=0 ++ local timeout=4 +++ seq 0 2 ++ for i in '$(seq 0 2)' ++ set +e ++ kubectl get psmdb some-name -o 'jsonpath={.status.state}' ++ exit_status=0 ++ set -e ++ '[' 0 '!=' 0 -a -n 1 ']' ++ break ++ cat /tmp/tmp.jeo2ZPmiwK ++ cat /tmp/tmp.8m6vm1juM9 ++ rm /tmp/tmp.jeo2ZPmiwK /tmp/tmp.8m6vm1juM9 ++ return 0 + [[ stopping == paused ]] + sleep 1 + timeout=24 + echo -n . .+ [[ 24 -gt 1500 ]] ++ kubectl_bin get psmdb some-name -o 'jsonpath={.status.state}' +++ mktemp ++ local LAST_OUT=/tmp/tmp.2CCCXJK5rq +++ mktemp ++ local LAST_ERR=/tmp/tmp.gTNq8cqxaD ++ local exit_status=0 ++ local timeout=4 +++ seq 0 2 ++ for i in '$(seq 0 2)' ++ set +e ++ kubectl get psmdb some-name -o 'jsonpath={.status.state}' ++ exit_status=0 ++ set -e ++ '[' 0 '!=' 0 -a -n 1 ']' ++ break ++ cat /tmp/tmp.2CCCXJK5rq ++ cat /tmp/tmp.gTNq8cqxaD ++ rm /tmp/tmp.2CCCXJK5rq /tmp/tmp.gTNq8cqxaD ++ return 0 + [[ stopping == paused ]] + sleep 1 + timeout=25 + echo -n . .+ [[ 25 -gt 1500 ]] ++ kubectl_bin get psmdb some-name -o 'jsonpath={.status.state}' +++ mktemp ++ local LAST_OUT=/tmp/tmp.E4IKj2FCXm +++ mktemp ++ local LAST_ERR=/tmp/tmp.Pt75glPxUf ++ local exit_status=0 ++ local timeout=4 +++ seq 0 2 ++ for i in '$(seq 0 2)' ++ set +e ++ kubectl get psmdb some-name -o 'jsonpath={.status.state}' ++ exit_status=0 ++ set -e ++ '[' 0 '!=' 0 -a -n 1 ']' ++ break ++ cat /tmp/tmp.E4IKj2FCXm ++ cat /tmp/tmp.Pt75glPxUf ++ rm /tmp/tmp.E4IKj2FCXm /tmp/tmp.Pt75glPxUf ++ return 0 + [[ stopping == paused ]] + sleep 1 + timeout=26 + echo -n . .+ [[ 26 -gt 1500 ]] ++ kubectl_bin get psmdb some-name -o 'jsonpath={.status.state}' +++ mktemp ++ local LAST_OUT=/tmp/tmp.UgzRqKYlOX +++ mktemp ++ local LAST_ERR=/tmp/tmp.ry8cTCt3do ++ local exit_status=0 ++ local timeout=4 +++ seq 0 2 ++ for i in '$(seq 0 2)' ++ set +e ++ kubectl get psmdb some-name -o 'jsonpath={.status.state}' ++ exit_status=0 ++ set -e ++ '[' 0 '!=' 0 -a -n 1 ']' ++ break ++ cat /tmp/tmp.UgzRqKYlOX ++ cat /tmp/tmp.ry8cTCt3do ++ rm /tmp/tmp.UgzRqKYlOX /tmp/tmp.ry8cTCt3do ++ return 0 + [[ stopping == paused ]] + sleep 1 + timeout=27 + echo -n . .+ [[ 27 -gt 1500 ]] ++ kubectl_bin get psmdb some-name -o 'jsonpath={.status.state}' +++ mktemp ++ local LAST_OUT=/tmp/tmp.3G6i1TjeF3 +++ mktemp ++ local LAST_ERR=/tmp/tmp.UWvWkreHlA ++ local exit_status=0 ++ local timeout=4 +++ seq 0 2 ++ for i in '$(seq 0 2)' ++ set +e ++ kubectl get psmdb some-name -o 'jsonpath={.status.state}' ++ exit_status=0 ++ set -e ++ '[' 0 '!=' 0 -a -n 1 ']' ++ break ++ cat /tmp/tmp.3G6i1TjeF3 ++ cat /tmp/tmp.UWvWkreHlA ++ rm /tmp/tmp.3G6i1TjeF3 /tmp/tmp.UWvWkreHlA ++ return 0 + [[ stopping == paused ]] + sleep 1 + timeout=28 + echo -n . .+ [[ 28 -gt 1500 ]] ++ kubectl_bin get psmdb some-name -o 'jsonpath={.status.state}' +++ mktemp ++ local LAST_OUT=/tmp/tmp.J3CULfhrPV +++ mktemp ++ local LAST_ERR=/tmp/tmp.geq8AGgWxe ++ local exit_status=0 ++ local timeout=4 +++ seq 0 2 ++ for i in '$(seq 0 2)' ++ set +e ++ kubectl get psmdb some-name -o 'jsonpath={.status.state}' ++ exit_status=0 ++ set -e ++ '[' 0 '!=' 0 -a -n 1 ']' ++ break ++ cat /tmp/tmp.J3CULfhrPV ++ cat /tmp/tmp.geq8AGgWxe ++ rm /tmp/tmp.J3CULfhrPV /tmp/tmp.geq8AGgWxe ++ return 0 + [[ stopping == paused ]] + sleep 1 + timeout=29 + echo -n . .+ [[ 29 -gt 1500 ]] ++ kubectl_bin get psmdb some-name -o 'jsonpath={.status.state}' +++ mktemp ++ local LAST_OUT=/tmp/tmp.zRNJMmsjnV +++ mktemp ++ local LAST_ERR=/tmp/tmp.l7E2HG6eII ++ local exit_status=0 ++ local timeout=4 +++ seq 0 2 ++ for i in '$(seq 0 2)' ++ set +e ++ kubectl get psmdb some-name -o 'jsonpath={.status.state}' ++ exit_status=0 ++ set -e ++ '[' 0 '!=' 0 -a -n 1 ']' ++ break ++ cat /tmp/tmp.zRNJMmsjnV ++ cat /tmp/tmp.l7E2HG6eII ++ rm /tmp/tmp.zRNJMmsjnV /tmp/tmp.l7E2HG6eII ++ return 0 + [[ stopping == paused ]] + sleep 1 + timeout=30 + echo -n . .+ [[ 30 -gt 1500 ]] ++ kubectl_bin get psmdb some-name -o 'jsonpath={.status.state}' +++ mktemp ++ local LAST_OUT=/tmp/tmp.PfANyZ1vU0 +++ mktemp ++ local LAST_ERR=/tmp/tmp.L2Ccmqo6hF ++ local exit_status=0 ++ local timeout=4 +++ seq 0 2 ++ for i in '$(seq 0 2)' ++ set +e ++ kubectl get psmdb some-name -o 'jsonpath={.status.state}' ++ exit_status=0 ++ set -e ++ '[' 0 '!=' 0 -a -n 1 ']' ++ break ++ cat /tmp/tmp.PfANyZ1vU0 ++ cat /tmp/tmp.L2Ccmqo6hF ++ rm /tmp/tmp.PfANyZ1vU0 /tmp/tmp.L2Ccmqo6hF ++ return 0 + [[ stopping == paused ]] + sleep 1 + timeout=31 + echo -n . .+ [[ 31 -gt 1500 ]] ++ kubectl_bin get psmdb some-name -o 'jsonpath={.status.state}' +++ mktemp ++ local LAST_OUT=/tmp/tmp.JyplfPALdc +++ mktemp ++ local LAST_ERR=/tmp/tmp.7D4Ene4D7l ++ local exit_status=0 ++ local timeout=4 +++ seq 0 2 ++ for i in '$(seq 0 2)' ++ set +e ++ kubectl get psmdb some-name -o 'jsonpath={.status.state}' ++ exit_status=0 ++ set -e ++ '[' 0 '!=' 0 -a -n 1 ']' ++ break ++ cat /tmp/tmp.JyplfPALdc ++ cat /tmp/tmp.7D4Ene4D7l ++ rm /tmp/tmp.JyplfPALdc /tmp/tmp.7D4Ene4D7l ++ return 0 + [[ stopping == paused ]] + sleep 1 + timeout=32 + echo -n . .+ [[ 32 -gt 1500 ]] ++ kubectl_bin get psmdb some-name -o 'jsonpath={.status.state}' +++ mktemp ++ local LAST_OUT=/tmp/tmp.gPqPKaUieU +++ mktemp ++ local LAST_ERR=/tmp/tmp.XebQ1OJYNH ++ local exit_status=0 ++ local timeout=4 +++ seq 0 2 ++ for i in '$(seq 0 2)' ++ set +e ++ kubectl get psmdb some-name -o 'jsonpath={.status.state}' ++ exit_status=0 ++ set -e ++ '[' 0 '!=' 0 -a -n 1 ']' ++ break ++ cat /tmp/tmp.gPqPKaUieU ++ cat /tmp/tmp.XebQ1OJYNH ++ rm /tmp/tmp.gPqPKaUieU /tmp/tmp.XebQ1OJYNH ++ return 0 + [[ stopping == paused ]] + sleep 1 + timeout=33 + echo -n . .+ [[ 33 -gt 1500 ]] ++ kubectl_bin get psmdb some-name -o 'jsonpath={.status.state}' +++ mktemp ++ local LAST_OUT=/tmp/tmp.BEWUsJJkvs +++ mktemp ++ local LAST_ERR=/tmp/tmp.7SH2IFyFq7 ++ local exit_status=0 ++ local timeout=4 +++ seq 0 2 ++ for i in '$(seq 0 2)' ++ set +e ++ kubectl get psmdb some-name -o 'jsonpath={.status.state}' ++ exit_status=0 ++ set -e ++ '[' 0 '!=' 0 -a -n 1 ']' ++ break ++ cat /tmp/tmp.BEWUsJJkvs ++ cat /tmp/tmp.7SH2IFyFq7 ++ rm /tmp/tmp.BEWUsJJkvs /tmp/tmp.7SH2IFyFq7 ++ return 0 + [[ stopping == paused ]] + sleep 1 + timeout=34 + echo -n . .+ [[ 34 -gt 1500 ]] ++ kubectl_bin get psmdb some-name -o 'jsonpath={.status.state}' +++ mktemp ++ local LAST_OUT=/tmp/tmp.HptQoevs46 +++ mktemp ++ local LAST_ERR=/tmp/tmp.M3l24ES9Na ++ local exit_status=0 ++ local timeout=4 +++ seq 0 2 ++ for i in '$(seq 0 2)' ++ set +e ++ kubectl get psmdb some-name -o 'jsonpath={.status.state}' ++ exit_status=0 ++ set -e ++ '[' 0 '!=' 0 -a -n 1 ']' ++ break ++ cat /tmp/tmp.HptQoevs46 ++ cat /tmp/tmp.M3l24ES9Na ++ rm /tmp/tmp.HptQoevs46 /tmp/tmp.M3l24ES9Na ++ return 0 + [[ stopping == paused ]] + sleep 1 + timeout=35 + echo -n . .+ [[ 35 -gt 1500 ]] ++ kubectl_bin get psmdb some-name -o 'jsonpath={.status.state}' +++ mktemp ++ local LAST_OUT=/tmp/tmp.0nvvgo70Z4 +++ mktemp ++ local LAST_ERR=/tmp/tmp.2FlAZOTp6n ++ local exit_status=0 ++ local timeout=4 +++ seq 0 2 ++ for i in '$(seq 0 2)' ++ set +e ++ kubectl get psmdb some-name -o 'jsonpath={.status.state}' ++ exit_status=0 ++ set -e ++ '[' 0 '!=' 0 -a -n 1 ']' ++ break ++ cat /tmp/tmp.0nvvgo70Z4 ++ cat /tmp/tmp.2FlAZOTp6n ++ rm /tmp/tmp.0nvvgo70Z4 /tmp/tmp.2FlAZOTp6n ++ return 0 + [[ stopping == paused ]] + sleep 1 + timeout=36 + echo -n . .+ [[ 36 -gt 1500 ]] ++ kubectl_bin get psmdb some-name -o 'jsonpath={.status.state}' +++ mktemp ++ local LAST_OUT=/tmp/tmp.Za13bjppoZ +++ mktemp ++ local LAST_ERR=/tmp/tmp.hsL1ge9noS ++ local exit_status=0 ++ local timeout=4 +++ seq 0 2 ++ for i in '$(seq 0 2)' ++ set +e ++ kubectl get psmdb some-name -o 'jsonpath={.status.state}' ++ exit_status=0 ++ set -e ++ '[' 0 '!=' 0 -a -n 1 ']' ++ break ++ cat /tmp/tmp.Za13bjppoZ ++ cat /tmp/tmp.hsL1ge9noS ++ rm /tmp/tmp.Za13bjppoZ /tmp/tmp.hsL1ge9noS ++ return 0 + [[ stopping == paused ]] + sleep 1 + timeout=37 + echo -n . .+ [[ 37 -gt 1500 ]] ++ kubectl_bin get psmdb some-name -o 'jsonpath={.status.state}' +++ mktemp ++ local LAST_OUT=/tmp/tmp.KxyBq3sVJN +++ mktemp ++ local LAST_ERR=/tmp/tmp.1lj7CB17qk ++ local exit_status=0 ++ local timeout=4 +++ seq 0 2 ++ for i in '$(seq 0 2)' ++ set +e ++ kubectl get psmdb some-name -o 'jsonpath={.status.state}' ++ exit_status=0 ++ set -e ++ '[' 0 '!=' 0 -a -n 1 ']' ++ break ++ cat /tmp/tmp.KxyBq3sVJN ++ cat /tmp/tmp.1lj7CB17qk ++ rm /tmp/tmp.KxyBq3sVJN /tmp/tmp.1lj7CB17qk ++ return 0 + [[ stopping == paused ]] + sleep 1 + timeout=38 + echo -n . .+ [[ 38 -gt 1500 ]] ++ kubectl_bin get psmdb some-name -o 'jsonpath={.status.state}' +++ mktemp ++ local LAST_OUT=/tmp/tmp.EG4qMn94wy +++ mktemp ++ local LAST_ERR=/tmp/tmp.2CKbgwE8y0 ++ local exit_status=0 ++ local timeout=4 +++ seq 0 2 ++ for i in '$(seq 0 2)' ++ set +e ++ kubectl get psmdb some-name -o 'jsonpath={.status.state}' ++ exit_status=0 ++ set -e ++ '[' 0 '!=' 0 -a -n 1 ']' ++ break ++ cat /tmp/tmp.EG4qMn94wy ++ cat /tmp/tmp.2CKbgwE8y0 ++ rm /tmp/tmp.EG4qMn94wy /tmp/tmp.2CKbgwE8y0 ++ return 0 + [[ stopping == paused ]] + sleep 1 + timeout=39 + echo -n . .+ [[ 39 -gt 1500 ]] ++ kubectl_bin get psmdb some-name -o 'jsonpath={.status.state}' +++ mktemp ++ local LAST_OUT=/tmp/tmp.6Gkca7VRLC +++ mktemp ++ local LAST_ERR=/tmp/tmp.A77Ok2C3DN ++ local exit_status=0 ++ local timeout=4 +++ seq 0 2 ++ for i in '$(seq 0 2)' ++ set +e ++ kubectl get psmdb some-name -o 'jsonpath={.status.state}' ++ exit_status=0 ++ set -e ++ '[' 0 '!=' 0 -a -n 1 ']' ++ break ++ cat /tmp/tmp.6Gkca7VRLC ++ cat /tmp/tmp.A77Ok2C3DN ++ rm /tmp/tmp.6Gkca7VRLC /tmp/tmp.A77Ok2C3DN ++ return 0 + [[ stopping == paused ]] + sleep 1 + timeout=40 + echo -n . .+ [[ 40 -gt 1500 ]] ++ kubectl_bin get psmdb some-name -o 'jsonpath={.status.state}' +++ mktemp ++ local LAST_OUT=/tmp/tmp.0m4y8fsW81 +++ mktemp ++ local LAST_ERR=/tmp/tmp.o4SNnYVkBD ++ local exit_status=0 ++ local timeout=4 +++ seq 0 2 ++ for i in '$(seq 0 2)' ++ set +e ++ kubectl get psmdb some-name -o 'jsonpath={.status.state}' ++ exit_status=0 ++ set -e ++ '[' 0 '!=' 0 -a -n 1 ']' ++ break ++ cat /tmp/tmp.0m4y8fsW81 ++ cat /tmp/tmp.o4SNnYVkBD ++ rm /tmp/tmp.0m4y8fsW81 /tmp/tmp.o4SNnYVkBD ++ return 0 + [[ stopping == paused ]] + sleep 1 + timeout=41 + echo -n . .+ [[ 41 -gt 1500 ]] ++ kubectl_bin get psmdb some-name -o 'jsonpath={.status.state}' +++ mktemp ++ local LAST_OUT=/tmp/tmp.7lYOS0gP1B +++ mktemp ++ local LAST_ERR=/tmp/tmp.wcPjwTMjmU ++ local exit_status=0 ++ local timeout=4 +++ seq 0 2 ++ for i in '$(seq 0 2)' ++ set +e ++ kubectl get psmdb some-name -o 'jsonpath={.status.state}' ++ exit_status=0 ++ set -e ++ '[' 0 '!=' 0 -a -n 1 ']' ++ break ++ cat /tmp/tmp.7lYOS0gP1B ++ cat /tmp/tmp.wcPjwTMjmU ++ rm /tmp/tmp.7lYOS0gP1B /tmp/tmp.wcPjwTMjmU ++ return 0 + [[ stopping == paused ]] + sleep 1 + timeout=42 + echo -n . .+ [[ 42 -gt 1500 ]] ++ kubectl_bin get psmdb some-name -o 'jsonpath={.status.state}' +++ mktemp ++ local LAST_OUT=/tmp/tmp.4keiiqa4E5 +++ mktemp ++ local LAST_ERR=/tmp/tmp.v5FBGHUAmx ++ local exit_status=0 ++ local timeout=4 +++ seq 0 2 ++ for i in '$(seq 0 2)' ++ set +e ++ kubectl get psmdb some-name -o 'jsonpath={.status.state}' ++ exit_status=0 ++ set -e ++ '[' 0 '!=' 0 -a -n 1 ']' ++ break ++ cat /tmp/tmp.4keiiqa4E5 ++ cat /tmp/tmp.v5FBGHUAmx ++ rm /tmp/tmp.4keiiqa4E5 /tmp/tmp.v5FBGHUAmx ++ return 0 + [[ stopping == paused ]] + sleep 1 + timeout=43 + echo -n . .+ [[ 43 -gt 1500 ]] ++ kubectl_bin get psmdb some-name -o 'jsonpath={.status.state}' +++ mktemp ++ local LAST_OUT=/tmp/tmp.lOG56TQzBL +++ mktemp ++ local LAST_ERR=/tmp/tmp.0nVv8eaVM5 ++ local exit_status=0 ++ local timeout=4 +++ seq 0 2 ++ for i in '$(seq 0 2)' ++ set +e ++ kubectl get psmdb some-name -o 'jsonpath={.status.state}' ++ exit_status=0 ++ set -e ++ '[' 0 '!=' 0 -a -n 1 ']' ++ break ++ cat /tmp/tmp.lOG56TQzBL ++ cat /tmp/tmp.0nVv8eaVM5 ++ rm /tmp/tmp.lOG56TQzBL /tmp/tmp.0nVv8eaVM5 ++ return 0 + [[ stopping == paused ]] + sleep 1 + timeout=44 + echo -n . .+ [[ 44 -gt 1500 ]] ++ kubectl_bin get psmdb some-name -o 'jsonpath={.status.state}' +++ mktemp ++ local LAST_OUT=/tmp/tmp.uHrdrGEKB7 +++ mktemp ++ local LAST_ERR=/tmp/tmp.VNaLcgYfi9 ++ local exit_status=0 ++ local timeout=4 +++ seq 0 2 ++ for i in '$(seq 0 2)' ++ set +e ++ kubectl get psmdb some-name -o 'jsonpath={.status.state}' ++ exit_status=0 ++ set -e ++ '[' 0 '!=' 0 -a -n 1 ']' ++ break ++ cat /tmp/tmp.uHrdrGEKB7 ++ cat /tmp/tmp.VNaLcgYfi9 ++ rm /tmp/tmp.uHrdrGEKB7 /tmp/tmp.VNaLcgYfi9 ++ return 0 + [[ stopping == paused ]] + sleep 1 + timeout=45 + echo -n . .+ [[ 45 -gt 1500 ]] ++ kubectl_bin get psmdb some-name -o 'jsonpath={.status.state}' +++ mktemp ++ local LAST_OUT=/tmp/tmp.1BsqNebyQL +++ mktemp ++ local LAST_ERR=/tmp/tmp.g0waibmuYp ++ local exit_status=0 ++ local timeout=4 +++ seq 0 2 ++ for i in '$(seq 0 2)' ++ set +e ++ kubectl get psmdb some-name -o 'jsonpath={.status.state}' ++ exit_status=0 ++ set -e ++ '[' 0 '!=' 0 -a -n 1 ']' ++ break ++ cat /tmp/tmp.1BsqNebyQL ++ cat /tmp/tmp.g0waibmuYp ++ rm /tmp/tmp.1BsqNebyQL /tmp/tmp.g0waibmuYp ++ return 0 + [[ stopping == paused ]] + sleep 1 + timeout=46 + echo -n . .+ [[ 46 -gt 1500 ]] ++ kubectl_bin get psmdb some-name -o 'jsonpath={.status.state}' +++ mktemp ++ local LAST_OUT=/tmp/tmp.CFjLsT4Pyg +++ mktemp ++ local LAST_ERR=/tmp/tmp.eP0nWYaEpY ++ local exit_status=0 ++ local timeout=4 +++ seq 0 2 ++ for i in '$(seq 0 2)' ++ set +e ++ kubectl get psmdb some-name -o 'jsonpath={.status.state}' ++ exit_status=0 ++ set -e ++ '[' 0 '!=' 0 -a -n 1 ']' ++ break ++ cat /tmp/tmp.CFjLsT4Pyg ++ cat /tmp/tmp.eP0nWYaEpY ++ rm /tmp/tmp.CFjLsT4Pyg /tmp/tmp.eP0nWYaEpY ++ return 0 + [[ stopping == paused ]] + sleep 1 + timeout=47 + echo -n . .+ [[ 47 -gt 1500 ]] ++ kubectl_bin get psmdb some-name -o 'jsonpath={.status.state}' +++ mktemp ++ local LAST_OUT=/tmp/tmp.34AxI2Wibb +++ mktemp ++ local LAST_ERR=/tmp/tmp.42wjpcO4xW ++ local exit_status=0 ++ local timeout=4 +++ seq 0 2 ++ for i in '$(seq 0 2)' ++ set +e ++ kubectl get psmdb some-name -o 'jsonpath={.status.state}' ++ exit_status=0 ++ set -e ++ '[' 0 '!=' 0 -a -n 1 ']' ++ break ++ cat /tmp/tmp.34AxI2Wibb ++ cat /tmp/tmp.42wjpcO4xW ++ rm /tmp/tmp.34AxI2Wibb /tmp/tmp.42wjpcO4xW ++ return 0 + [[ stopping == paused ]] + sleep 1 + timeout=48 + echo -n . .+ [[ 48 -gt 1500 ]] ++ kubectl_bin get psmdb some-name -o 'jsonpath={.status.state}' +++ mktemp ++ local LAST_OUT=/tmp/tmp.8g3Q9CkuKN +++ mktemp ++ local LAST_ERR=/tmp/tmp.TG0DprMxPq ++ local exit_status=0 ++ local timeout=4 +++ seq 0 2 ++ for i in '$(seq 0 2)' ++ set +e ++ kubectl get psmdb some-name -o 'jsonpath={.status.state}' ++ exit_status=0 ++ set -e ++ '[' 0 '!=' 0 -a -n 1 ']' ++ break ++ cat /tmp/tmp.8g3Q9CkuKN ++ cat /tmp/tmp.TG0DprMxPq ++ rm /tmp/tmp.8g3Q9CkuKN /tmp/tmp.TG0DprMxPq ++ return 0 + [[ stopping == paused ]] + sleep 1 + timeout=49 + echo -n . .+ [[ 49 -gt 1500 ]] ++ kubectl_bin get psmdb some-name -o 'jsonpath={.status.state}' +++ mktemp ++ local LAST_OUT=/tmp/tmp.RzBNqKCCuB +++ mktemp ++ local LAST_ERR=/tmp/tmp.7kNnOyzHyA ++ local exit_status=0 ++ local timeout=4 +++ seq 0 2 ++ for i in '$(seq 0 2)' ++ set +e ++ kubectl get psmdb some-name -o 'jsonpath={.status.state}' ++ exit_status=0 ++ set -e ++ '[' 0 '!=' 0 -a -n 1 ']' ++ break ++ cat /tmp/tmp.RzBNqKCCuB ++ cat /tmp/tmp.7kNnOyzHyA ++ rm /tmp/tmp.RzBNqKCCuB /tmp/tmp.7kNnOyzHyA ++ return 0 + [[ stopping == paused ]] + sleep 1 + timeout=50 + echo -n . .+ [[ 50 -gt 1500 ]] ++ kubectl_bin get psmdb some-name -o 'jsonpath={.status.state}' +++ mktemp ++ local LAST_OUT=/tmp/tmp.xvZRcZg4NJ +++ mktemp ++ local LAST_ERR=/tmp/tmp.kkMPzrKZb1 ++ local exit_status=0 ++ local timeout=4 +++ seq 0 2 ++ for i in '$(seq 0 2)' ++ set +e ++ kubectl get psmdb some-name -o 'jsonpath={.status.state}' ++ exit_status=0 ++ set -e ++ '[' 0 '!=' 0 -a -n 1 ']' ++ break ++ cat /tmp/tmp.xvZRcZg4NJ ++ cat /tmp/tmp.kkMPzrKZb1 ++ rm /tmp/tmp.xvZRcZg4NJ /tmp/tmp.kkMPzrKZb1 ++ return 0 + [[ stopping == paused ]] + sleep 1 + timeout=51 + echo -n . .+ [[ 51 -gt 1500 ]] ++ kubectl_bin get psmdb some-name -o 'jsonpath={.status.state}' +++ mktemp ++ local LAST_OUT=/tmp/tmp.237JgmjGWm +++ mktemp ++ local LAST_ERR=/tmp/tmp.1pwOArx7a8 ++ local exit_status=0 ++ local timeout=4 +++ seq 0 2 ++ for i in '$(seq 0 2)' ++ set +e ++ kubectl get psmdb some-name -o 'jsonpath={.status.state}' ++ exit_status=0 ++ set -e ++ '[' 0 '!=' 0 -a -n 1 ']' ++ break ++ cat /tmp/tmp.237JgmjGWm ++ cat /tmp/tmp.1pwOArx7a8 ++ rm /tmp/tmp.237JgmjGWm /tmp/tmp.1pwOArx7a8 ++ return 0 + [[ stopping == paused ]] + sleep 1 + timeout=52 + echo -n . .+ [[ 52 -gt 1500 ]] ++ kubectl_bin get psmdb some-name -o 'jsonpath={.status.state}' +++ mktemp ++ local LAST_OUT=/tmp/tmp.n2vR3L3c1m +++ mktemp ++ local LAST_ERR=/tmp/tmp.GTThUmOxi8 ++ local exit_status=0 ++ local timeout=4 +++ seq 0 2 ++ for i in '$(seq 0 2)' ++ set +e ++ kubectl get psmdb some-name -o 'jsonpath={.status.state}' ++ exit_status=0 ++ set -e ++ '[' 0 '!=' 0 -a -n 1 ']' ++ break ++ cat /tmp/tmp.n2vR3L3c1m ++ cat /tmp/tmp.GTThUmOxi8 ++ rm /tmp/tmp.n2vR3L3c1m /tmp/tmp.GTThUmOxi8 ++ return 0 + [[ stopping == paused ]] + sleep 1 + timeout=53 + echo -n . .+ [[ 53 -gt 1500 ]] ++ kubectl_bin get psmdb some-name -o 'jsonpath={.status.state}' +++ mktemp ++ local LAST_OUT=/tmp/tmp.ksdOQ0KTCM +++ mktemp ++ local LAST_ERR=/tmp/tmp.neaoyNtmxn ++ local exit_status=0 ++ local timeout=4 +++ seq 0 2 ++ for i in '$(seq 0 2)' ++ set +e ++ kubectl get psmdb some-name -o 'jsonpath={.status.state}' ++ exit_status=0 ++ set -e ++ '[' 0 '!=' 0 -a -n 1 ']' ++ break ++ cat /tmp/tmp.ksdOQ0KTCM ++ cat /tmp/tmp.neaoyNtmxn ++ rm /tmp/tmp.ksdOQ0KTCM /tmp/tmp.neaoyNtmxn ++ return 0 + [[ stopping == paused ]] + sleep 1 + timeout=54 + echo -n . .+ [[ 54 -gt 1500 ]] ++ kubectl_bin get psmdb some-name -o 'jsonpath={.status.state}' +++ mktemp ++ local LAST_OUT=/tmp/tmp.JRzc79auXs +++ mktemp ++ local LAST_ERR=/tmp/tmp.ulgVdxP4hw ++ local exit_status=0 ++ local timeout=4 +++ seq 0 2 ++ for i in '$(seq 0 2)' ++ set +e ++ kubectl get psmdb some-name -o 'jsonpath={.status.state}' ++ exit_status=0 ++ set -e ++ '[' 0 '!=' 0 -a -n 1 ']' ++ break ++ cat /tmp/tmp.JRzc79auXs ++ cat /tmp/tmp.ulgVdxP4hw ++ rm /tmp/tmp.JRzc79auXs /tmp/tmp.ulgVdxP4hw ++ return 0 + [[ stopping == paused ]] + sleep 1 + timeout=55 + echo -n . .+ [[ 55 -gt 1500 ]] ++ kubectl_bin get psmdb some-name -o 'jsonpath={.status.state}' +++ mktemp ++ local LAST_OUT=/tmp/tmp.fCymVEFKOH +++ mktemp ++ local LAST_ERR=/tmp/tmp.VGBys1fmD5 ++ local exit_status=0 ++ local timeout=4 +++ seq 0 2 ++ for i in '$(seq 0 2)' ++ set +e ++ kubectl get psmdb some-name -o 'jsonpath={.status.state}' ++ exit_status=0 ++ set -e ++ '[' 0 '!=' 0 -a -n 1 ']' ++ break ++ cat /tmp/tmp.fCymVEFKOH ++ cat /tmp/tmp.VGBys1fmD5 ++ rm /tmp/tmp.fCymVEFKOH /tmp/tmp.VGBys1fmD5 ++ return 0 + [[ stopping == paused ]] + sleep 1 + timeout=56 + echo -n . .+ [[ 56 -gt 1500 ]] ++ kubectl_bin get psmdb some-name -o 'jsonpath={.status.state}' +++ mktemp ++ local LAST_OUT=/tmp/tmp.SPRpFXZalL +++ mktemp ++ local LAST_ERR=/tmp/tmp.Cr7Nkq9PI6 ++ local exit_status=0 ++ local timeout=4 +++ seq 0 2 ++ for i in '$(seq 0 2)' ++ set +e ++ kubectl get psmdb some-name -o 'jsonpath={.status.state}' ++ exit_status=0 ++ set -e ++ '[' 0 '!=' 0 -a -n 1 ']' ++ break ++ cat /tmp/tmp.SPRpFXZalL ++ cat /tmp/tmp.Cr7Nkq9PI6 ++ rm /tmp/tmp.SPRpFXZalL /tmp/tmp.Cr7Nkq9PI6 ++ return 0 + [[ stopping == paused ]] + sleep 1 + timeout=57 + echo -n . .+ [[ 57 -gt 1500 ]] ++ kubectl_bin get psmdb some-name -o 'jsonpath={.status.state}' +++ mktemp ++ local LAST_OUT=/tmp/tmp.LddpcCvt9j +++ mktemp ++ local LAST_ERR=/tmp/tmp.vtG4zSo2Fi ++ local exit_status=0 ++ local timeout=4 +++ seq 0 2 ++ for i in '$(seq 0 2)' ++ set +e ++ kubectl get psmdb some-name -o 'jsonpath={.status.state}' ++ exit_status=0 ++ set -e ++ '[' 0 '!=' 0 -a -n 1 ']' ++ break ++ cat /tmp/tmp.LddpcCvt9j ++ cat /tmp/tmp.vtG4zSo2Fi ++ rm /tmp/tmp.LddpcCvt9j /tmp/tmp.vtG4zSo2Fi ++ return 0 + [[ stopping == paused ]] + sleep 1 + timeout=58 + echo -n . .+ [[ 58 -gt 1500 ]] ++ kubectl_bin get psmdb some-name -o 'jsonpath={.status.state}' +++ mktemp ++ local LAST_OUT=/tmp/tmp.Rz1VmaXxaz +++ mktemp ++ local LAST_ERR=/tmp/tmp.nLddFbdZgZ ++ local exit_status=0 ++ local timeout=4 +++ seq 0 2 ++ for i in '$(seq 0 2)' ++ set +e ++ kubectl get psmdb some-name -o 'jsonpath={.status.state}' ++ exit_status=0 ++ set -e ++ '[' 0 '!=' 0 -a -n 1 ']' ++ break ++ cat /tmp/tmp.Rz1VmaXxaz ++ cat /tmp/tmp.nLddFbdZgZ ++ rm /tmp/tmp.Rz1VmaXxaz /tmp/tmp.nLddFbdZgZ ++ return 0 + [[ stopping == paused ]] + sleep 1 + timeout=59 + echo -n . .+ [[ 59 -gt 1500 ]] ++ kubectl_bin get psmdb some-name -o 'jsonpath={.status.state}' +++ mktemp ++ local LAST_OUT=/tmp/tmp.oVkIEOrmdn +++ mktemp ++ local LAST_ERR=/tmp/tmp.Dn30bDBSrJ ++ local exit_status=0 ++ local timeout=4 +++ seq 0 2 ++ for i in '$(seq 0 2)' ++ set +e ++ kubectl get psmdb some-name -o 'jsonpath={.status.state}' ++ exit_status=0 ++ set -e ++ '[' 0 '!=' 0 -a -n 1 ']' ++ break ++ cat /tmp/tmp.oVkIEOrmdn ++ cat /tmp/tmp.Dn30bDBSrJ ++ rm /tmp/tmp.oVkIEOrmdn /tmp/tmp.Dn30bDBSrJ ++ return 0 + [[ stopping == paused ]] + sleep 1 + timeout=60 + echo -n . .+ [[ 60 -gt 1500 ]] ++ kubectl_bin get psmdb some-name -o 'jsonpath={.status.state}' +++ mktemp ++ local LAST_OUT=/tmp/tmp.E8GtUStGIP +++ mktemp ++ local LAST_ERR=/tmp/tmp.PBbgmvmJcA ++ local exit_status=0 ++ local timeout=4 +++ seq 0 2 ++ for i in '$(seq 0 2)' ++ set +e ++ kubectl get psmdb some-name -o 'jsonpath={.status.state}' ++ exit_status=0 ++ set -e ++ '[' 0 '!=' 0 -a -n 1 ']' ++ break ++ cat /tmp/tmp.E8GtUStGIP ++ cat /tmp/tmp.PBbgmvmJcA ++ rm /tmp/tmp.E8GtUStGIP /tmp/tmp.PBbgmvmJcA ++ return 0 + [[ stopping == paused ]] + sleep 1 + timeout=61 + echo -n . .+ [[ 61 -gt 1500 ]] ++ kubectl_bin get psmdb some-name -o 'jsonpath={.status.state}' +++ mktemp ++ local LAST_OUT=/tmp/tmp.9sTQSdY9df +++ mktemp ++ local LAST_ERR=/tmp/tmp.y4xmyfWhUZ ++ local exit_status=0 ++ local timeout=4 +++ seq 0 2 ++ for i in '$(seq 0 2)' ++ set +e ++ kubectl get psmdb some-name -o 'jsonpath={.status.state}' ++ exit_status=0 ++ set -e ++ '[' 0 '!=' 0 -a -n 1 ']' ++ break ++ cat /tmp/tmp.9sTQSdY9df ++ cat /tmp/tmp.y4xmyfWhUZ ++ rm /tmp/tmp.9sTQSdY9df /tmp/tmp.y4xmyfWhUZ ++ return 0 + [[ stopping == paused ]] + sleep 1 + timeout=62 + echo -n . .+ [[ 62 -gt 1500 ]] ++ kubectl_bin get psmdb some-name -o 'jsonpath={.status.state}' +++ mktemp ++ local LAST_OUT=/tmp/tmp.O7IoFtfFiT +++ mktemp ++ local LAST_ERR=/tmp/tmp.Yo7XWiNHeX ++ local exit_status=0 ++ local timeout=4 +++ seq 0 2 ++ for i in '$(seq 0 2)' ++ set +e ++ kubectl get psmdb some-name -o 'jsonpath={.status.state}' ++ exit_status=0 ++ set -e ++ '[' 0 '!=' 0 -a -n 1 ']' ++ break ++ cat /tmp/tmp.O7IoFtfFiT ++ cat /tmp/tmp.Yo7XWiNHeX ++ rm /tmp/tmp.O7IoFtfFiT /tmp/tmp.Yo7XWiNHeX ++ return 0 + [[ stopping == paused ]] + sleep 1 + timeout=63 + echo -n . .+ [[ 63 -gt 1500 ]] ++ kubectl_bin get psmdb some-name -o 'jsonpath={.status.state}' +++ mktemp ++ local LAST_OUT=/tmp/tmp.QJy8E4cJCi +++ mktemp ++ local LAST_ERR=/tmp/tmp.55e4n9be0E ++ local exit_status=0 ++ local timeout=4 +++ seq 0 2 ++ for i in '$(seq 0 2)' ++ set +e ++ kubectl get psmdb some-name -o 'jsonpath={.status.state}' ++ exit_status=0 ++ set -e ++ '[' 0 '!=' 0 -a -n 1 ']' ++ break ++ cat /tmp/tmp.QJy8E4cJCi ++ cat /tmp/tmp.55e4n9be0E ++ rm /tmp/tmp.QJy8E4cJCi /tmp/tmp.55e4n9be0E ++ return 0 + [[ stopping == paused ]] + sleep 1 + timeout=64 + echo -n . .+ [[ 64 -gt 1500 ]] ++ kubectl_bin get psmdb some-name -o 'jsonpath={.status.state}' +++ mktemp ++ local LAST_OUT=/tmp/tmp.WFfLzQ1Udl +++ mktemp ++ local LAST_ERR=/tmp/tmp.nFFPD1wpEz ++ local exit_status=0 ++ local timeout=4 +++ seq 0 2 ++ for i in '$(seq 0 2)' ++ set +e ++ kubectl get psmdb some-name -o 'jsonpath={.status.state}' ++ exit_status=0 ++ set -e ++ '[' 0 '!=' 0 -a -n 1 ']' ++ break ++ cat /tmp/tmp.WFfLzQ1Udl ++ cat /tmp/tmp.nFFPD1wpEz ++ rm /tmp/tmp.WFfLzQ1Udl /tmp/tmp.nFFPD1wpEz ++ return 0 + [[ stopping == paused ]] + sleep 1 + timeout=65 + echo -n . .+ [[ 65 -gt 1500 ]] ++ kubectl_bin get psmdb some-name -o 'jsonpath={.status.state}' +++ mktemp ++ local LAST_OUT=/tmp/tmp.tPliE9UVVw +++ mktemp ++ local LAST_ERR=/tmp/tmp.zmVZJ1Oflt ++ local exit_status=0 ++ local timeout=4 +++ seq 0 2 ++ for i in '$(seq 0 2)' ++ set +e ++ kubectl get psmdb some-name -o 'jsonpath={.status.state}' ++ exit_status=0 ++ set -e ++ '[' 0 '!=' 0 -a -n 1 ']' ++ break ++ cat /tmp/tmp.tPliE9UVVw ++ cat /tmp/tmp.zmVZJ1Oflt ++ rm /tmp/tmp.tPliE9UVVw /tmp/tmp.zmVZJ1Oflt ++ return 0 + [[ stopping == paused ]] + sleep 1 + timeout=66 + echo -n . .+ [[ 66 -gt 1500 ]] ++ kubectl_bin get psmdb some-name -o 'jsonpath={.status.state}' +++ mktemp ++ local LAST_OUT=/tmp/tmp.C0wKCqJRxt +++ mktemp ++ local LAST_ERR=/tmp/tmp.PJNAEdqz8M ++ local exit_status=0 ++ local timeout=4 +++ seq 0 2 ++ for i in '$(seq 0 2)' ++ set +e ++ kubectl get psmdb some-name -o 'jsonpath={.status.state}' ++ exit_status=0 ++ set -e ++ '[' 0 '!=' 0 -a -n 1 ']' ++ break ++ cat /tmp/tmp.C0wKCqJRxt ++ cat /tmp/tmp.PJNAEdqz8M ++ rm /tmp/tmp.C0wKCqJRxt /tmp/tmp.PJNAEdqz8M ++ return 0 + [[ stopping == paused ]] + sleep 1 + timeout=67 + echo -n . .+ [[ 67 -gt 1500 ]] ++ kubectl_bin get psmdb some-name -o 'jsonpath={.status.state}' +++ mktemp ++ local LAST_OUT=/tmp/tmp.Qg8Ufafv3i +++ mktemp ++ local LAST_ERR=/tmp/tmp.6zW1PoGZvW ++ local exit_status=0 ++ local timeout=4 +++ seq 0 2 ++ for i in '$(seq 0 2)' ++ set +e ++ kubectl get psmdb some-name -o 'jsonpath={.status.state}' ++ exit_status=0 ++ set -e ++ '[' 0 '!=' 0 -a -n 1 ']' ++ break ++ cat /tmp/tmp.Qg8Ufafv3i ++ cat /tmp/tmp.6zW1PoGZvW ++ rm /tmp/tmp.Qg8Ufafv3i /tmp/tmp.6zW1PoGZvW ++ return 0 + [[ stopping == paused ]] + sleep 1 + timeout=68 + echo -n . .+ [[ 68 -gt 1500 ]] ++ kubectl_bin get psmdb some-name -o 'jsonpath={.status.state}' +++ mktemp ++ local LAST_OUT=/tmp/tmp.jQfHwRWPEv +++ mktemp ++ local LAST_ERR=/tmp/tmp.NKeAnB43yT ++ local exit_status=0 ++ local timeout=4 +++ seq 0 2 ++ for i in '$(seq 0 2)' ++ set +e ++ kubectl get psmdb some-name -o 'jsonpath={.status.state}' ++ exit_status=0 ++ set -e ++ '[' 0 '!=' 0 -a -n 1 ']' ++ break ++ cat /tmp/tmp.jQfHwRWPEv ++ cat /tmp/tmp.NKeAnB43yT ++ rm /tmp/tmp.jQfHwRWPEv /tmp/tmp.NKeAnB43yT ++ return 0 + [[ stopping == paused ]] + sleep 1 + timeout=69 + echo -n . .+ [[ 69 -gt 1500 ]] ++ kubectl_bin get psmdb some-name -o 'jsonpath={.status.state}' +++ mktemp ++ local LAST_OUT=/tmp/tmp.RCas97mF0o +++ mktemp ++ local LAST_ERR=/tmp/tmp.bQ43IxJeeV ++ local exit_status=0 ++ local timeout=4 +++ seq 0 2 ++ for i in '$(seq 0 2)' ++ set +e ++ kubectl get psmdb some-name -o 'jsonpath={.status.state}' ++ exit_status=0 ++ set -e ++ '[' 0 '!=' 0 -a -n 1 ']' ++ break ++ cat /tmp/tmp.RCas97mF0o ++ cat /tmp/tmp.bQ43IxJeeV ++ rm /tmp/tmp.RCas97mF0o /tmp/tmp.bQ43IxJeeV ++ return 0 + [[ stopping == paused ]] + sleep 1 + timeout=70 + echo -n . .+ [[ 70 -gt 1500 ]] ++ kubectl_bin get psmdb some-name -o 'jsonpath={.status.state}' +++ mktemp ++ local LAST_OUT=/tmp/tmp.0a8rvFiyos +++ mktemp ++ local LAST_ERR=/tmp/tmp.nsYhWDFzcH ++ local exit_status=0 ++ local timeout=4 +++ seq 0 2 ++ for i in '$(seq 0 2)' ++ set +e ++ kubectl get psmdb some-name -o 'jsonpath={.status.state}' ++ exit_status=0 ++ set -e ++ '[' 0 '!=' 0 -a -n 1 ']' ++ break ++ cat /tmp/tmp.0a8rvFiyos ++ cat /tmp/tmp.nsYhWDFzcH ++ rm /tmp/tmp.0a8rvFiyos /tmp/tmp.nsYhWDFzcH ++ return 0 + [[ stopping == paused ]] + sleep 1 + timeout=71 + echo -n . .+ [[ 71 -gt 1500 ]] ++ kubectl_bin get psmdb some-name -o 'jsonpath={.status.state}' +++ mktemp ++ local LAST_OUT=/tmp/tmp.w3rtaqVyqS +++ mktemp ++ local LAST_ERR=/tmp/tmp.U2AjUa1Etl ++ local exit_status=0 ++ local timeout=4 +++ seq 0 2 ++ for i in '$(seq 0 2)' ++ set +e ++ kubectl get psmdb some-name -o 'jsonpath={.status.state}' ++ exit_status=0 ++ set -e ++ '[' 0 '!=' 0 -a -n 1 ']' ++ break ++ cat /tmp/tmp.w3rtaqVyqS ++ cat /tmp/tmp.U2AjUa1Etl ++ rm /tmp/tmp.w3rtaqVyqS /tmp/tmp.U2AjUa1Etl ++ return 0 + [[ stopping == paused ]] + sleep 1 + timeout=72 + echo -n . .+ [[ 72 -gt 1500 ]] ++ kubectl_bin get psmdb some-name -o 'jsonpath={.status.state}' +++ mktemp ++ local LAST_OUT=/tmp/tmp.fIA5GYOAkb +++ mktemp ++ local LAST_ERR=/tmp/tmp.l0F9aXtuhw ++ local exit_status=0 ++ local timeout=4 +++ seq 0 2 ++ for i in '$(seq 0 2)' ++ set +e ++ kubectl get psmdb some-name -o 'jsonpath={.status.state}' ++ exit_status=0 ++ set -e ++ '[' 0 '!=' 0 -a -n 1 ']' ++ break ++ cat /tmp/tmp.fIA5GYOAkb ++ cat /tmp/tmp.l0F9aXtuhw ++ rm /tmp/tmp.fIA5GYOAkb /tmp/tmp.l0F9aXtuhw ++ return 0 + [[ stopping == paused ]] + sleep 1 + timeout=73 + echo -n . .+ [[ 73 -gt 1500 ]] ++ kubectl_bin get psmdb some-name -o 'jsonpath={.status.state}' +++ mktemp ++ local LAST_OUT=/tmp/tmp.3hzy20yLk0 +++ mktemp ++ local LAST_ERR=/tmp/tmp.xhFpoavNam ++ local exit_status=0 ++ local timeout=4 +++ seq 0 2 ++ for i in '$(seq 0 2)' ++ set +e ++ kubectl get psmdb some-name -o 'jsonpath={.status.state}' ++ exit_status=0 ++ set -e ++ '[' 0 '!=' 0 -a -n 1 ']' ++ break ++ cat /tmp/tmp.3hzy20yLk0 ++ cat /tmp/tmp.xhFpoavNam ++ rm /tmp/tmp.3hzy20yLk0 /tmp/tmp.xhFpoavNam ++ return 0 + [[ stopping == paused ]] + sleep 1 + timeout=74 + echo -n . .+ [[ 74 -gt 1500 ]] ++ kubectl_bin get psmdb some-name -o 'jsonpath={.status.state}' +++ mktemp ++ local LAST_OUT=/tmp/tmp.G2OMrl3TPd +++ mktemp ++ local LAST_ERR=/tmp/tmp.TpeZTJjRvZ ++ local exit_status=0 ++ local timeout=4 +++ seq 0 2 ++ for i in '$(seq 0 2)' ++ set +e ++ kubectl get psmdb some-name -o 'jsonpath={.status.state}' ++ exit_status=0 ++ set -e ++ '[' 0 '!=' 0 -a -n 1 ']' ++ break ++ cat /tmp/tmp.G2OMrl3TPd ++ cat /tmp/tmp.TpeZTJjRvZ ++ rm /tmp/tmp.G2OMrl3TPd /tmp/tmp.TpeZTJjRvZ ++ return 0 + [[ stopping == paused ]] + sleep 1 + timeout=75 + echo -n . .+ [[ 75 -gt 1500 ]] ++ kubectl_bin get psmdb some-name -o 'jsonpath={.status.state}' +++ mktemp ++ local LAST_OUT=/tmp/tmp.ehLb1YgvSZ +++ mktemp ++ local LAST_ERR=/tmp/tmp.ElpgbEM6aS ++ local exit_status=0 ++ local timeout=4 +++ seq 0 2 ++ for i in '$(seq 0 2)' ++ set +e ++ kubectl get psmdb some-name -o 'jsonpath={.status.state}' ++ exit_status=0 ++ set -e ++ '[' 0 '!=' 0 -a -n 1 ']' ++ break ++ cat /tmp/tmp.ehLb1YgvSZ ++ cat /tmp/tmp.ElpgbEM6aS ++ rm /tmp/tmp.ehLb1YgvSZ /tmp/tmp.ElpgbEM6aS ++ return 0 + [[ stopping == paused ]] + sleep 1 + timeout=76 + echo -n . .+ [[ 76 -gt 1500 ]] ++ kubectl_bin get psmdb some-name -o 'jsonpath={.status.state}' +++ mktemp ++ local LAST_OUT=/tmp/tmp.YXY5gYlfza +++ mktemp ++ local LAST_ERR=/tmp/tmp.IEdxXoBQJ6 ++ local exit_status=0 ++ local timeout=4 +++ seq 0 2 ++ for i in '$(seq 0 2)' ++ set +e ++ kubectl get psmdb some-name -o 'jsonpath={.status.state}' ++ exit_status=0 ++ set -e ++ '[' 0 '!=' 0 -a -n 1 ']' ++ break ++ cat /tmp/tmp.YXY5gYlfza ++ cat /tmp/tmp.IEdxXoBQJ6 ++ rm /tmp/tmp.YXY5gYlfza /tmp/tmp.IEdxXoBQJ6 ++ return 0 + [[ stopping == paused ]] + sleep 1 + timeout=77 + echo -n . .+ [[ 77 -gt 1500 ]] ++ kubectl_bin get psmdb some-name -o 'jsonpath={.status.state}' +++ mktemp ++ local LAST_OUT=/tmp/tmp.4BTR57bCVP +++ mktemp ++ local LAST_ERR=/tmp/tmp.LcVLLX5KVz ++ local exit_status=0 ++ local timeout=4 +++ seq 0 2 ++ for i in '$(seq 0 2)' ++ set +e ++ kubectl get psmdb some-name -o 'jsonpath={.status.state}' ++ exit_status=0 ++ set -e ++ '[' 0 '!=' 0 -a -n 1 ']' ++ break ++ cat /tmp/tmp.4BTR57bCVP ++ cat /tmp/tmp.LcVLLX5KVz ++ rm /tmp/tmp.4BTR57bCVP /tmp/tmp.LcVLLX5KVz ++ return 0 + [[ stopping == paused ]] + sleep 1 + timeout=78 + echo -n . .+ [[ 78 -gt 1500 ]] ++ kubectl_bin get psmdb some-name -o 'jsonpath={.status.state}' +++ mktemp ++ local LAST_OUT=/tmp/tmp.JhXdrzxAt0 +++ mktemp ++ local LAST_ERR=/tmp/tmp.ZN2Jf6ECce ++ local exit_status=0 ++ local timeout=4 +++ seq 0 2 ++ for i in '$(seq 0 2)' ++ set +e ++ kubectl get psmdb some-name -o 'jsonpath={.status.state}' ++ exit_status=0 ++ set -e ++ '[' 0 '!=' 0 -a -n 1 ']' ++ break ++ cat /tmp/tmp.JhXdrzxAt0 ++ cat /tmp/tmp.ZN2Jf6ECce ++ rm /tmp/tmp.JhXdrzxAt0 /tmp/tmp.ZN2Jf6ECce ++ return 0 + [[ stopping == paused ]] + sleep 1 + timeout=79 + echo -n . .+ [[ 79 -gt 1500 ]] ++ kubectl_bin get psmdb some-name -o 'jsonpath={.status.state}' +++ mktemp ++ local LAST_OUT=/tmp/tmp.vHFUWCJLm7 +++ mktemp ++ local LAST_ERR=/tmp/tmp.H56bwaZoXc ++ local exit_status=0 ++ local timeout=4 +++ seq 0 2 ++ for i in '$(seq 0 2)' ++ set +e ++ kubectl get psmdb some-name -o 'jsonpath={.status.state}' ++ exit_status=0 ++ set -e ++ '[' 0 '!=' 0 -a -n 1 ']' ++ break ++ cat /tmp/tmp.vHFUWCJLm7 ++ cat /tmp/tmp.H56bwaZoXc ++ rm /tmp/tmp.vHFUWCJLm7 /tmp/tmp.H56bwaZoXc ++ return 0 + [[ stopping == paused ]] + sleep 1 + timeout=80 + echo -n . .+ [[ 80 -gt 1500 ]] ++ kubectl_bin get psmdb some-name -o 'jsonpath={.status.state}' +++ mktemp ++ local LAST_OUT=/tmp/tmp.9UmVTzjyOE +++ mktemp ++ local LAST_ERR=/tmp/tmp.SKXJbfO3GZ ++ local exit_status=0 ++ local timeout=4 +++ seq 0 2 ++ for i in '$(seq 0 2)' ++ set +e ++ kubectl get psmdb some-name -o 'jsonpath={.status.state}' ++ exit_status=0 ++ set -e ++ '[' 0 '!=' 0 -a -n 1 ']' ++ break ++ cat /tmp/tmp.9UmVTzjyOE ++ cat /tmp/tmp.SKXJbfO3GZ ++ rm /tmp/tmp.9UmVTzjyOE /tmp/tmp.SKXJbfO3GZ ++ return 0 + [[ stopping == paused ]] + sleep 1 + timeout=81 + echo -n . .+ [[ 81 -gt 1500 ]] ++ kubectl_bin get psmdb some-name -o 'jsonpath={.status.state}' +++ mktemp ++ local LAST_OUT=/tmp/tmp.1eLASRBKk5 +++ mktemp ++ local LAST_ERR=/tmp/tmp.sFSsZ1V3kw ++ local exit_status=0 ++ local timeout=4 +++ seq 0 2 ++ for i in '$(seq 0 2)' ++ set +e ++ kubectl get psmdb some-name -o 'jsonpath={.status.state}' ++ exit_status=0 ++ set -e ++ '[' 0 '!=' 0 -a -n 1 ']' ++ break ++ cat /tmp/tmp.1eLASRBKk5 ++ cat /tmp/tmp.sFSsZ1V3kw ++ rm /tmp/tmp.1eLASRBKk5 /tmp/tmp.sFSsZ1V3kw ++ return 0 + [[ stopping == paused ]] + sleep 1 + timeout=82 + echo -n . .+ [[ 82 -gt 1500 ]] ++ kubectl_bin get psmdb some-name -o 'jsonpath={.status.state}' +++ mktemp ++ local LAST_OUT=/tmp/tmp.hZDtyHpFhd +++ mktemp ++ local LAST_ERR=/tmp/tmp.WINIEMAgww ++ local exit_status=0 ++ local timeout=4 +++ seq 0 2 ++ for i in '$(seq 0 2)' ++ set +e ++ kubectl get psmdb some-name -o 'jsonpath={.status.state}' ++ exit_status=0 ++ set -e ++ '[' 0 '!=' 0 -a -n 1 ']' ++ break ++ cat /tmp/tmp.hZDtyHpFhd ++ cat /tmp/tmp.WINIEMAgww ++ rm /tmp/tmp.hZDtyHpFhd /tmp/tmp.WINIEMAgww ++ return 0 + [[ paused == paused ]] + echo + disable_tls some-name + local cluster_name=some-name + echo 'Disabling TLS for cluster some-name' Disabling TLS for cluster some-name + kubectl_bin patch psmdb some-name --type merge '-p={"spec": { "unsafeFlags": { "tls": true }, "tls": { "mode": "disabled" } } }' ++ mktemp + local LAST_OUT=/tmp/tmp.PMGRtLuLnJ ++ mktemp + local LAST_ERR=/tmp/tmp.6pe2G3nOW5 + local exit_status=0 + local timeout=4 ++ seq 0 2 + for i in '$(seq 0 2)' + set +e + kubectl patch psmdb some-name --type merge '-p={"spec": { "unsafeFlags": { "tls": true }, "tls": { "mode": "disabled" } } }' + exit_status=0 + set -e + '[' 0 '!=' 0 -a -n 1 ']' + break + cat /tmp/tmp.PMGRtLuLnJ perconaservermongodb.psmdb.percona.com/some-name patched + cat /tmp/tmp.6pe2G3nOW5 + rm /tmp/tmp.PMGRtLuLnJ /tmp/tmp.6pe2G3nOW5 + return 0 + unpause_cluster some-name + local cluster_name=some-name + echo 'Unpausing cluster some-name' Unpausing cluster some-name + kubectl_bin patch psmdb some-name --type merge '-p={"spec": { "pause": false } }' ++ mktemp + local LAST_OUT=/tmp/tmp.HS9iisnGnp ++ mktemp + local LAST_ERR=/tmp/tmp.8iUbvoFFkD + local exit_status=0 + local timeout=4 ++ seq 0 2 + for i in '$(seq 0 2)' + set +e + kubectl patch psmdb some-name --type merge '-p={"spec": { "pause": false } }' + exit_status=0 + set -e + '[' 0 '!=' 0 -a -n 1 ']' + break + cat /tmp/tmp.HS9iisnGnp perconaservermongodb.psmdb.percona.com/some-name patched + cat /tmp/tmp.8iUbvoFFkD + rm /tmp/tmp.HS9iisnGnp /tmp/tmp.8iUbvoFFkD + return 0 + wait_for_cluster_state some-name ready + local cluster_name=some-name + local target_state=ready + echo -n 'Waiting for cluster to reach ready state' Waiting for cluster to reach ready state+ local timeout=0 ++ kubectl_bin get psmdb some-name -o 'jsonpath={.status.state}' +++ mktemp ++ local LAST_OUT=/tmp/tmp.w7yrOh0vCx +++ mktemp ++ local LAST_ERR=/tmp/tmp.BLBdF0XLTC ++ local exit_status=0 ++ local timeout=4 +++ seq 0 2 ++ for i in '$(seq 0 2)' ++ set +e ++ kubectl get psmdb some-name -o 'jsonpath={.status.state}' ++ exit_status=0 ++ set -e ++ '[' 0 '!=' 0 -a -n 1 ']' ++ break ++ cat /tmp/tmp.w7yrOh0vCx ++ cat /tmp/tmp.BLBdF0XLTC ++ rm /tmp/tmp.w7yrOh0vCx /tmp/tmp.BLBdF0XLTC ++ return 0 + [[ paused == ready ]] + sleep 1 + timeout=1 + echo -n . .+ [[ 1 -gt 1500 ]] ++ kubectl_bin get psmdb some-name -o 'jsonpath={.status.state}' +++ mktemp ++ local LAST_OUT=/tmp/tmp.5G5jqq0MZa +++ mktemp ++ local LAST_ERR=/tmp/tmp.4bYWr0IduH ++ local exit_status=0 ++ local timeout=4 +++ seq 0 2 ++ for i in '$(seq 0 2)' ++ set +e ++ kubectl get psmdb some-name -o 'jsonpath={.status.state}' ++ exit_status=0 ++ set -e ++ '[' 0 '!=' 0 -a -n 1 ']' ++ break ++ cat /tmp/tmp.5G5jqq0MZa ++ cat /tmp/tmp.4bYWr0IduH ++ rm /tmp/tmp.5G5jqq0MZa /tmp/tmp.4bYWr0IduH ++ return 0 + [[ paused == ready ]] + sleep 1 + timeout=2 + echo -n . .+ [[ 2 -gt 1500 ]] ++ kubectl_bin get psmdb some-name -o 'jsonpath={.status.state}' +++ mktemp ++ local LAST_OUT=/tmp/tmp.xjjgHIkT2M +++ mktemp ++ local LAST_ERR=/tmp/tmp.ElUfkUuX9f ++ local exit_status=0 ++ local timeout=4 +++ seq 0 2 ++ for i in '$(seq 0 2)' ++ set +e ++ kubectl get psmdb some-name -o 'jsonpath={.status.state}' ++ exit_status=0 ++ set -e ++ '[' 0 '!=' 0 -a -n 1 ']' ++ break ++ cat /tmp/tmp.xjjgHIkT2M ++ cat /tmp/tmp.ElUfkUuX9f ++ rm /tmp/tmp.xjjgHIkT2M /tmp/tmp.ElUfkUuX9f ++ return 0 + [[ paused == ready ]] + sleep 1 + timeout=3 + echo -n . .+ [[ 3 -gt 1500 ]] ++ kubectl_bin get psmdb some-name -o 'jsonpath={.status.state}' +++ mktemp ++ local LAST_OUT=/tmp/tmp.VAFV7qJAa1 +++ mktemp ++ local LAST_ERR=/tmp/tmp.teo1ILRTOT ++ local exit_status=0 ++ local timeout=4 +++ seq 0 2 ++ for i in '$(seq 0 2)' ++ set +e ++ kubectl get psmdb some-name -o 'jsonpath={.status.state}' ++ exit_status=0 ++ set -e ++ '[' 0 '!=' 0 -a -n 1 ']' ++ break ++ cat /tmp/tmp.VAFV7qJAa1 ++ cat /tmp/tmp.teo1ILRTOT ++ rm /tmp/tmp.VAFV7qJAa1 /tmp/tmp.teo1ILRTOT ++ return 0 + [[ initializing == ready ]] + sleep 1 + timeout=4 + echo -n . .+ [[ 4 -gt 1500 ]] ++ kubectl_bin get psmdb some-name -o 'jsonpath={.status.state}' +++ mktemp ++ local LAST_OUT=/tmp/tmp.RBFsznqtMi +++ mktemp ++ local LAST_ERR=/tmp/tmp.aeY1oCohI1 ++ local exit_status=0 ++ local timeout=4 +++ seq 0 2 ++ for i in '$(seq 0 2)' ++ set +e ++ kubectl get psmdb some-name -o 'jsonpath={.status.state}' ++ exit_status=0 ++ set -e ++ '[' 0 '!=' 0 -a -n 1 ']' ++ break ++ cat /tmp/tmp.RBFsznqtMi ++ cat /tmp/tmp.aeY1oCohI1 ++ rm /tmp/tmp.RBFsznqtMi /tmp/tmp.aeY1oCohI1 ++ return 0 + [[ initializing == ready ]] + sleep 1 + timeout=5 + echo -n . .+ [[ 5 -gt 1500 ]] ++ kubectl_bin get psmdb some-name -o 'jsonpath={.status.state}' +++ mktemp ++ local LAST_OUT=/tmp/tmp.PoNruuV5ci +++ mktemp ++ local LAST_ERR=/tmp/tmp.8Du6lD8NoK ++ local exit_status=0 ++ local timeout=4 +++ seq 0 2 ++ for i in '$(seq 0 2)' ++ set +e ++ kubectl get psmdb some-name -o 'jsonpath={.status.state}' ++ exit_status=0 ++ set -e ++ '[' 0 '!=' 0 -a -n 1 ']' ++ break ++ cat /tmp/tmp.PoNruuV5ci ++ cat /tmp/tmp.8Du6lD8NoK ++ rm /tmp/tmp.PoNruuV5ci /tmp/tmp.8Du6lD8NoK ++ return 0 + [[ initializing == ready ]] + sleep 1 + timeout=6 + echo -n . .+ [[ 6 -gt 1500 ]] ++ kubectl_bin get psmdb some-name -o 'jsonpath={.status.state}' +++ mktemp ++ local LAST_OUT=/tmp/tmp.exHmMwAQjD +++ mktemp ++ local LAST_ERR=/tmp/tmp.m1XehK468O ++ local exit_status=0 ++ local timeout=4 +++ seq 0 2 ++ for i in '$(seq 0 2)' ++ set +e ++ kubectl get psmdb some-name -o 'jsonpath={.status.state}' ++ exit_status=0 ++ set -e ++ '[' 0 '!=' 0 -a -n 1 ']' ++ break ++ cat /tmp/tmp.exHmMwAQjD ++ cat /tmp/tmp.m1XehK468O ++ rm /tmp/tmp.exHmMwAQjD /tmp/tmp.m1XehK468O ++ return 0 + [[ initializing == ready ]] + sleep 1 + timeout=7 + echo -n . .+ [[ 7 -gt 1500 ]] ++ kubectl_bin get psmdb some-name -o 'jsonpath={.status.state}' +++ mktemp ++ local LAST_OUT=/tmp/tmp.1RX5KsKUQ7 +++ mktemp ++ local LAST_ERR=/tmp/tmp.3a8aBr7bS4 ++ local exit_status=0 ++ local timeout=4 +++ seq 0 2 ++ for i in '$(seq 0 2)' ++ set +e ++ kubectl get psmdb some-name -o 'jsonpath={.status.state}' ++ exit_status=0 ++ set -e ++ '[' 0 '!=' 0 -a -n 1 ']' ++ break ++ cat /tmp/tmp.1RX5KsKUQ7 ++ cat /tmp/tmp.3a8aBr7bS4 ++ rm /tmp/tmp.1RX5KsKUQ7 /tmp/tmp.3a8aBr7bS4 ++ return 0 + [[ initializing == ready ]] + sleep 1 + timeout=8 + echo -n . .+ [[ 8 -gt 1500 ]] ++ kubectl_bin get psmdb some-name -o 'jsonpath={.status.state}' +++ mktemp ++ local LAST_OUT=/tmp/tmp.N0V6teCj0l +++ mktemp ++ local LAST_ERR=/tmp/tmp.KgqnfgVPSl ++ local exit_status=0 ++ local timeout=4 +++ seq 0 2 ++ for i in '$(seq 0 2)' ++ set +e ++ kubectl get psmdb some-name -o 'jsonpath={.status.state}' ++ exit_status=0 ++ set -e ++ '[' 0 '!=' 0 -a -n 1 ']' ++ break ++ cat /tmp/tmp.N0V6teCj0l ++ cat /tmp/tmp.KgqnfgVPSl ++ rm /tmp/tmp.N0V6teCj0l /tmp/tmp.KgqnfgVPSl ++ return 0 + [[ initializing == ready ]] + sleep 1 + timeout=9 + echo -n . .+ [[ 9 -gt 1500 ]] ++ kubectl_bin get psmdb some-name -o 'jsonpath={.status.state}' +++ mktemp ++ local LAST_OUT=/tmp/tmp.ZlICPt64XS +++ mktemp ++ local LAST_ERR=/tmp/tmp.vnIASERaiZ ++ local exit_status=0 ++ local timeout=4 +++ seq 0 2 ++ for i in '$(seq 0 2)' ++ set +e ++ kubectl get psmdb some-name -o 'jsonpath={.status.state}' ++ exit_status=0 ++ set -e ++ '[' 0 '!=' 0 -a -n 1 ']' ++ break ++ cat /tmp/tmp.ZlICPt64XS ++ cat /tmp/tmp.vnIASERaiZ ++ rm /tmp/tmp.ZlICPt64XS /tmp/tmp.vnIASERaiZ ++ return 0 + [[ initializing == ready ]] + sleep 1 + timeout=10 + echo -n . .+ [[ 10 -gt 1500 ]] ++ kubectl_bin get psmdb some-name -o 'jsonpath={.status.state}' +++ mktemp ++ local LAST_OUT=/tmp/tmp.FH0g3nlvbV +++ mktemp ++ local LAST_ERR=/tmp/tmp.h9B1pogmPO ++ local exit_status=0 ++ local timeout=4 +++ seq 0 2 ++ for i in '$(seq 0 2)' ++ set +e ++ kubectl get psmdb some-name -o 'jsonpath={.status.state}' ++ exit_status=0 ++ set -e ++ '[' 0 '!=' 0 -a -n 1 ']' ++ break ++ cat /tmp/tmp.FH0g3nlvbV ++ cat /tmp/tmp.h9B1pogmPO ++ rm /tmp/tmp.FH0g3nlvbV /tmp/tmp.h9B1pogmPO ++ return 0 + [[ initializing == ready ]] + sleep 1 + timeout=11 + echo -n . .+ [[ 11 -gt 1500 ]] ++ kubectl_bin get psmdb some-name -o 'jsonpath={.status.state}' +++ mktemp ++ local LAST_OUT=/tmp/tmp.1xASsmPI0D +++ mktemp ++ local LAST_ERR=/tmp/tmp.PqWkygPna1 ++ local exit_status=0 ++ local timeout=4 +++ seq 0 2 ++ for i in '$(seq 0 2)' ++ set +e ++ kubectl get psmdb some-name -o 'jsonpath={.status.state}' ++ exit_status=0 ++ set -e ++ '[' 0 '!=' 0 -a -n 1 ']' ++ break ++ cat /tmp/tmp.1xASsmPI0D ++ cat /tmp/tmp.PqWkygPna1 ++ rm /tmp/tmp.1xASsmPI0D /tmp/tmp.PqWkygPna1 ++ return 0 + [[ initializing == ready ]] + sleep 1 + timeout=12 + echo -n . .+ [[ 12 -gt 1500 ]] ++ kubectl_bin get psmdb some-name -o 'jsonpath={.status.state}' +++ mktemp ++ local LAST_OUT=/tmp/tmp.ZlH6s31fnw +++ mktemp ++ local LAST_ERR=/tmp/tmp.idvSOpVkQN ++ local exit_status=0 ++ local timeout=4 +++ seq 0 2 ++ for i in '$(seq 0 2)' ++ set +e ++ kubectl get psmdb some-name -o 'jsonpath={.status.state}' ++ exit_status=0 ++ set -e ++ '[' 0 '!=' 0 -a -n 1 ']' ++ break ++ cat /tmp/tmp.ZlH6s31fnw ++ cat /tmp/tmp.idvSOpVkQN ++ rm /tmp/tmp.ZlH6s31fnw /tmp/tmp.idvSOpVkQN ++ return 0 + [[ initializing == ready ]] + sleep 1 + timeout=13 + echo -n . .+ [[ 13 -gt 1500 ]] ++ kubectl_bin get psmdb some-name -o 'jsonpath={.status.state}' +++ mktemp ++ local LAST_OUT=/tmp/tmp.tlpUDwyeQ0 +++ mktemp ++ local LAST_ERR=/tmp/tmp.FqWNjbpuP9 ++ local exit_status=0 ++ local timeout=4 +++ seq 0 2 ++ for i in '$(seq 0 2)' ++ set +e ++ kubectl get psmdb some-name -o 'jsonpath={.status.state}' ++ exit_status=0 ++ set -e ++ '[' 0 '!=' 0 -a -n 1 ']' ++ break ++ cat /tmp/tmp.tlpUDwyeQ0 ++ cat /tmp/tmp.FqWNjbpuP9 ++ rm /tmp/tmp.tlpUDwyeQ0 /tmp/tmp.FqWNjbpuP9 ++ return 0 + [[ initializing == ready ]] + sleep 1 + timeout=14 + echo -n . .+ [[ 14 -gt 1500 ]] ++ kubectl_bin get psmdb some-name -o 'jsonpath={.status.state}' +++ mktemp ++ local LAST_OUT=/tmp/tmp.NgUI8lkW62 +++ mktemp ++ local LAST_ERR=/tmp/tmp.V2q1ziF8EE ++ local exit_status=0 ++ local timeout=4 +++ seq 0 2 ++ for i in '$(seq 0 2)' ++ set +e ++ kubectl get psmdb some-name -o 'jsonpath={.status.state}' ++ exit_status=0 ++ set -e ++ '[' 0 '!=' 0 -a -n 1 ']' ++ break ++ cat /tmp/tmp.NgUI8lkW62 ++ cat /tmp/tmp.V2q1ziF8EE ++ rm /tmp/tmp.NgUI8lkW62 /tmp/tmp.V2q1ziF8EE ++ return 0 + [[ initializing == ready ]] + sleep 1 + timeout=15 + echo -n . .+ [[ 15 -gt 1500 ]] ++ kubectl_bin get psmdb some-name -o 'jsonpath={.status.state}' +++ mktemp ++ local LAST_OUT=/tmp/tmp.LGliaaDXyc +++ mktemp ++ local LAST_ERR=/tmp/tmp.dF0jf3mLxy ++ local exit_status=0 ++ local timeout=4 +++ seq 0 2 ++ for i in '$(seq 0 2)' ++ set +e ++ kubectl get psmdb some-name -o 'jsonpath={.status.state}' ++ exit_status=0 ++ set -e ++ '[' 0 '!=' 0 -a -n 1 ']' ++ break ++ cat /tmp/tmp.LGliaaDXyc ++ cat /tmp/tmp.dF0jf3mLxy ++ rm /tmp/tmp.LGliaaDXyc /tmp/tmp.dF0jf3mLxy ++ return 0 + [[ initializing == ready ]] + sleep 1 + timeout=16 + echo -n . .+ [[ 16 -gt 1500 ]] ++ kubectl_bin get psmdb some-name -o 'jsonpath={.status.state}' +++ mktemp ++ local LAST_OUT=/tmp/tmp.Migai3avpQ +++ mktemp ++ local LAST_ERR=/tmp/tmp.pUHIHnqC9i ++ local exit_status=0 ++ local timeout=4 +++ seq 0 2 ++ for i in '$(seq 0 2)' ++ set +e ++ kubectl get psmdb some-name -o 'jsonpath={.status.state}' ++ exit_status=0 ++ set -e ++ '[' 0 '!=' 0 -a -n 1 ']' ++ break ++ cat /tmp/tmp.Migai3avpQ ++ cat /tmp/tmp.pUHIHnqC9i ++ rm /tmp/tmp.Migai3avpQ /tmp/tmp.pUHIHnqC9i ++ return 0 + [[ initializing == ready ]] + sleep 1 + timeout=17 + echo -n . .+ [[ 17 -gt 1500 ]] ++ kubectl_bin get psmdb some-name -o 'jsonpath={.status.state}' +++ mktemp ++ local LAST_OUT=/tmp/tmp.u8xYREja1K +++ mktemp ++ local LAST_ERR=/tmp/tmp.yh4xDz8RcW ++ local exit_status=0 ++ local timeout=4 +++ seq 0 2 ++ for i in '$(seq 0 2)' ++ set +e ++ kubectl get psmdb some-name -o 'jsonpath={.status.state}' ++ exit_status=0 ++ set -e ++ '[' 0 '!=' 0 -a -n 1 ']' ++ break ++ cat /tmp/tmp.u8xYREja1K ++ cat /tmp/tmp.yh4xDz8RcW ++ rm /tmp/tmp.u8xYREja1K /tmp/tmp.yh4xDz8RcW ++ return 0 + [[ initializing == ready ]] + sleep 1 + timeout=18 + echo -n . .+ [[ 18 -gt 1500 ]] ++ kubectl_bin get psmdb some-name -o 'jsonpath={.status.state}' +++ mktemp ++ local LAST_OUT=/tmp/tmp.FdcrLRim4s +++ mktemp ++ local LAST_ERR=/tmp/tmp.nf7PGcVufW ++ local exit_status=0 ++ local timeout=4 +++ seq 0 2 ++ for i in '$(seq 0 2)' ++ set +e ++ kubectl get psmdb some-name -o 'jsonpath={.status.state}' ++ exit_status=0 ++ set -e ++ '[' 0 '!=' 0 -a -n 1 ']' ++ break ++ cat /tmp/tmp.FdcrLRim4s ++ cat /tmp/tmp.nf7PGcVufW ++ rm /tmp/tmp.FdcrLRim4s /tmp/tmp.nf7PGcVufW ++ return 0 + [[ initializing == ready ]] + sleep 1 + timeout=19 + echo -n . .+ [[ 19 -gt 1500 ]] ++ kubectl_bin get psmdb some-name -o 'jsonpath={.status.state}' +++ mktemp ++ local LAST_OUT=/tmp/tmp.rIz7xMImzF +++ mktemp ++ local LAST_ERR=/tmp/tmp.dDdJEhBWJC ++ local exit_status=0 ++ local timeout=4 +++ seq 0 2 ++ for i in '$(seq 0 2)' ++ set +e ++ kubectl get psmdb some-name -o 'jsonpath={.status.state}' ++ exit_status=0 ++ set -e ++ '[' 0 '!=' 0 -a -n 1 ']' ++ break ++ cat /tmp/tmp.rIz7xMImzF ++ cat /tmp/tmp.dDdJEhBWJC ++ rm /tmp/tmp.rIz7xMImzF /tmp/tmp.dDdJEhBWJC ++ return 0 + [[ initializing == ready ]] + sleep 1 + timeout=20 + echo -n . .+ [[ 20 -gt 1500 ]] ++ kubectl_bin get psmdb some-name -o 'jsonpath={.status.state}' +++ mktemp ++ local LAST_OUT=/tmp/tmp.f3qWJWhGAk +++ mktemp ++ local LAST_ERR=/tmp/tmp.1VXMHE4XnN ++ local exit_status=0 ++ local timeout=4 +++ seq 0 2 ++ for i in '$(seq 0 2)' ++ set +e ++ kubectl get psmdb some-name -o 'jsonpath={.status.state}' ++ exit_status=0 ++ set -e ++ '[' 0 '!=' 0 -a -n 1 ']' ++ break ++ cat /tmp/tmp.f3qWJWhGAk ++ cat /tmp/tmp.1VXMHE4XnN ++ rm /tmp/tmp.f3qWJWhGAk /tmp/tmp.1VXMHE4XnN ++ return 0 + [[ initializing == ready ]] + sleep 1 + timeout=21 + echo -n . .+ [[ 21 -gt 1500 ]] ++ kubectl_bin get psmdb some-name -o 'jsonpath={.status.state}' +++ mktemp ++ local LAST_OUT=/tmp/tmp.RSvecO47NK +++ mktemp ++ local LAST_ERR=/tmp/tmp.wWTQ3ExkiU ++ local exit_status=0 ++ local timeout=4 +++ seq 0 2 ++ for i in '$(seq 0 2)' ++ set +e ++ kubectl get psmdb some-name -o 'jsonpath={.status.state}' ++ exit_status=0 ++ set -e ++ '[' 0 '!=' 0 -a -n 1 ']' ++ break ++ cat /tmp/tmp.RSvecO47NK ++ cat /tmp/tmp.wWTQ3ExkiU ++ rm /tmp/tmp.RSvecO47NK /tmp/tmp.wWTQ3ExkiU ++ return 0 + [[ initializing == ready ]] + sleep 1 + timeout=22 + echo -n . .+ [[ 22 -gt 1500 ]] ++ kubectl_bin get psmdb some-name -o 'jsonpath={.status.state}' +++ mktemp ++ local LAST_OUT=/tmp/tmp.cllbIgzYe2 +++ mktemp ++ local LAST_ERR=/tmp/tmp.Rsae0uPIYk ++ local exit_status=0 ++ local timeout=4 +++ seq 0 2 ++ for i in '$(seq 0 2)' ++ set +e ++ kubectl get psmdb some-name -o 'jsonpath={.status.state}' ++ exit_status=0 ++ set -e ++ '[' 0 '!=' 0 -a -n 1 ']' ++ break ++ cat /tmp/tmp.cllbIgzYe2 ++ cat /tmp/tmp.Rsae0uPIYk ++ rm /tmp/tmp.cllbIgzYe2 /tmp/tmp.Rsae0uPIYk ++ return 0 + [[ initializing == ready ]] + sleep 1 + timeout=23 + echo -n . .+ [[ 23 -gt 1500 ]] ++ kubectl_bin get psmdb some-name -o 'jsonpath={.status.state}' +++ mktemp ++ local LAST_OUT=/tmp/tmp.fjkkoHL350 +++ mktemp ++ local LAST_ERR=/tmp/tmp.ibWTEPyPOB ++ local exit_status=0 ++ local timeout=4 +++ seq 0 2 ++ for i in '$(seq 0 2)' ++ set +e ++ kubectl get psmdb some-name -o 'jsonpath={.status.state}' ++ exit_status=0 ++ set -e ++ '[' 0 '!=' 0 -a -n 1 ']' ++ break ++ cat /tmp/tmp.fjkkoHL350 ++ cat /tmp/tmp.ibWTEPyPOB ++ rm /tmp/tmp.fjkkoHL350 /tmp/tmp.ibWTEPyPOB ++ return 0 + [[ initializing == ready ]] + sleep 1 + timeout=24 + echo -n . .+ [[ 24 -gt 1500 ]] ++ kubectl_bin get psmdb some-name -o 'jsonpath={.status.state}' +++ mktemp ++ local LAST_OUT=/tmp/tmp.eYpf4PG0Oc +++ mktemp ++ local LAST_ERR=/tmp/tmp.xjEpNS8ZMY ++ local exit_status=0 ++ local timeout=4 +++ seq 0 2 ++ for i in '$(seq 0 2)' ++ set +e ++ kubectl get psmdb some-name -o 'jsonpath={.status.state}' ++ exit_status=0 ++ set -e ++ '[' 0 '!=' 0 -a -n 1 ']' ++ break ++ cat /tmp/tmp.eYpf4PG0Oc ++ cat /tmp/tmp.xjEpNS8ZMY ++ rm /tmp/tmp.eYpf4PG0Oc /tmp/tmp.xjEpNS8ZMY ++ return 0 + [[ initializing == ready ]] + sleep 1 + timeout=25 + echo -n . .+ [[ 25 -gt 1500 ]] ++ kubectl_bin get psmdb some-name -o 'jsonpath={.status.state}' +++ mktemp ++ local LAST_OUT=/tmp/tmp.XiPtk758iU +++ mktemp ++ local LAST_ERR=/tmp/tmp.UXEpfM36OG ++ local exit_status=0 ++ local timeout=4 +++ seq 0 2 ++ for i in '$(seq 0 2)' ++ set +e ++ kubectl get psmdb some-name -o 'jsonpath={.status.state}' ++ exit_status=0 ++ set -e ++ '[' 0 '!=' 0 -a -n 1 ']' ++ break ++ cat /tmp/tmp.XiPtk758iU ++ cat /tmp/tmp.UXEpfM36OG ++ rm /tmp/tmp.XiPtk758iU /tmp/tmp.UXEpfM36OG ++ return 0 + [[ initializing == ready ]] + sleep 1 + timeout=26 + echo -n . .+ [[ 26 -gt 1500 ]] ++ kubectl_bin get psmdb some-name -o 'jsonpath={.status.state}' +++ mktemp ++ local LAST_OUT=/tmp/tmp.iDIgZky64e +++ mktemp ++ local LAST_ERR=/tmp/tmp.Ggg7VMalfQ ++ local exit_status=0 ++ local timeout=4 +++ seq 0 2 ++ for i in '$(seq 0 2)' ++ set +e ++ kubectl get psmdb some-name -o 'jsonpath={.status.state}' ++ exit_status=0 ++ set -e ++ '[' 0 '!=' 0 -a -n 1 ']' ++ break ++ cat /tmp/tmp.iDIgZky64e ++ cat /tmp/tmp.Ggg7VMalfQ ++ rm /tmp/tmp.iDIgZky64e /tmp/tmp.Ggg7VMalfQ ++ return 0 + [[ initializing == ready ]] + sleep 1 + timeout=27 + echo -n . .+ [[ 27 -gt 1500 ]] ++ kubectl_bin get psmdb some-name -o 'jsonpath={.status.state}' +++ mktemp ++ local LAST_OUT=/tmp/tmp.Y4pPEkrExD +++ mktemp ++ local LAST_ERR=/tmp/tmp.wxEfY9HOeq ++ local exit_status=0 ++ local timeout=4 +++ seq 0 2 ++ for i in '$(seq 0 2)' ++ set +e ++ kubectl get psmdb some-name -o 'jsonpath={.status.state}' ++ exit_status=0 ++ set -e ++ '[' 0 '!=' 0 -a -n 1 ']' ++ break ++ cat /tmp/tmp.Y4pPEkrExD ++ cat /tmp/tmp.wxEfY9HOeq ++ rm /tmp/tmp.Y4pPEkrExD /tmp/tmp.wxEfY9HOeq ++ return 0 + [[ initializing == ready ]] + sleep 1 + timeout=28 + echo -n . .+ [[ 28 -gt 1500 ]] ++ kubectl_bin get psmdb some-name -o 'jsonpath={.status.state}' +++ mktemp ++ local LAST_OUT=/tmp/tmp.W1GLJVQiLX +++ mktemp ++ local LAST_ERR=/tmp/tmp.cUAnaUQyra ++ local exit_status=0 ++ local timeout=4 +++ seq 0 2 ++ for i in '$(seq 0 2)' ++ set +e ++ kubectl get psmdb some-name -o 'jsonpath={.status.state}' ++ exit_status=0 ++ set -e ++ '[' 0 '!=' 0 -a -n 1 ']' ++ break ++ cat /tmp/tmp.W1GLJVQiLX ++ cat /tmp/tmp.cUAnaUQyra ++ rm /tmp/tmp.W1GLJVQiLX /tmp/tmp.cUAnaUQyra ++ return 0 + [[ initializing == ready ]] + sleep 1 + timeout=29 + echo -n . .+ [[ 29 -gt 1500 ]] ++ kubectl_bin get psmdb some-name -o 'jsonpath={.status.state}' +++ mktemp ++ local LAST_OUT=/tmp/tmp.vUXG1nz4l9 +++ mktemp ++ local LAST_ERR=/tmp/tmp.2zq9YiMwxM ++ local exit_status=0 ++ local timeout=4 +++ seq 0 2 ++ for i in '$(seq 0 2)' ++ set +e ++ kubectl get psmdb some-name -o 'jsonpath={.status.state}' ++ exit_status=0 ++ set -e ++ '[' 0 '!=' 0 -a -n 1 ']' ++ break ++ cat /tmp/tmp.vUXG1nz4l9 ++ cat /tmp/tmp.2zq9YiMwxM ++ rm /tmp/tmp.vUXG1nz4l9 /tmp/tmp.2zq9YiMwxM ++ return 0 + [[ initializing == ready ]] + sleep 1 + timeout=30 + echo -n . .+ [[ 30 -gt 1500 ]] ++ kubectl_bin get psmdb some-name -o 'jsonpath={.status.state}' +++ mktemp ++ local LAST_OUT=/tmp/tmp.IvLBLmQCSt +++ mktemp ++ local LAST_ERR=/tmp/tmp.I2Hsmd1Ag2 ++ local exit_status=0 ++ local timeout=4 +++ seq 0 2 ++ for i in '$(seq 0 2)' ++ set +e ++ kubectl get psmdb some-name -o 'jsonpath={.status.state}' ++ exit_status=0 ++ set -e ++ '[' 0 '!=' 0 -a -n 1 ']' ++ break ++ cat /tmp/tmp.IvLBLmQCSt ++ cat /tmp/tmp.I2Hsmd1Ag2 ++ rm /tmp/tmp.IvLBLmQCSt /tmp/tmp.I2Hsmd1Ag2 ++ return 0 + [[ initializing == ready ]] + sleep 1 + timeout=31 + echo -n . .+ [[ 31 -gt 1500 ]] ++ kubectl_bin get psmdb some-name -o 'jsonpath={.status.state}' +++ mktemp ++ local LAST_OUT=/tmp/tmp.tKjmgcQrP9 +++ mktemp ++ local LAST_ERR=/tmp/tmp.ddL0Hpu4Qi ++ local exit_status=0 ++ local timeout=4 +++ seq 0 2 ++ for i in '$(seq 0 2)' ++ set +e ++ kubectl get psmdb some-name -o 'jsonpath={.status.state}' ++ exit_status=0 ++ set -e ++ '[' 0 '!=' 0 -a -n 1 ']' ++ break ++ cat /tmp/tmp.tKjmgcQrP9 ++ cat /tmp/tmp.ddL0Hpu4Qi ++ rm /tmp/tmp.tKjmgcQrP9 /tmp/tmp.ddL0Hpu4Qi ++ return 0 + [[ initializing == ready ]] + sleep 1 + timeout=32 + echo -n . .+ [[ 32 -gt 1500 ]] ++ kubectl_bin get psmdb some-name -o 'jsonpath={.status.state}' +++ mktemp ++ local LAST_OUT=/tmp/tmp.jUfff8SZnL +++ mktemp ++ local LAST_ERR=/tmp/tmp.Fa32hQRDFp ++ local exit_status=0 ++ local timeout=4 +++ seq 0 2 ++ for i in '$(seq 0 2)' ++ set +e ++ kubectl get psmdb some-name -o 'jsonpath={.status.state}' ++ exit_status=0 ++ set -e ++ '[' 0 '!=' 0 -a -n 1 ']' ++ break ++ cat /tmp/tmp.jUfff8SZnL ++ cat /tmp/tmp.Fa32hQRDFp ++ rm /tmp/tmp.jUfff8SZnL /tmp/tmp.Fa32hQRDFp ++ return 0 + [[ initializing == ready ]] + sleep 1 + timeout=33 + echo -n . .+ [[ 33 -gt 1500 ]] ++ kubectl_bin get psmdb some-name -o 'jsonpath={.status.state}' +++ mktemp ++ local LAST_OUT=/tmp/tmp.ozStJDqtWG +++ mktemp ++ local LAST_ERR=/tmp/tmp.wYUBZBxjDf ++ local exit_status=0 ++ local timeout=4 +++ seq 0 2 ++ for i in '$(seq 0 2)' ++ set +e ++ kubectl get psmdb some-name -o 'jsonpath={.status.state}' ++ exit_status=0 ++ set -e ++ '[' 0 '!=' 0 -a -n 1 ']' ++ break ++ cat /tmp/tmp.ozStJDqtWG ++ cat /tmp/tmp.wYUBZBxjDf ++ rm /tmp/tmp.ozStJDqtWG /tmp/tmp.wYUBZBxjDf ++ return 0 + [[ initializing == ready ]] + sleep 1 + timeout=34 + echo -n . .+ [[ 34 -gt 1500 ]] ++ kubectl_bin get psmdb some-name -o 'jsonpath={.status.state}' +++ mktemp ++ local LAST_OUT=/tmp/tmp.MFViJTJyMg +++ mktemp ++ local LAST_ERR=/tmp/tmp.C5v3YcItb5 ++ local exit_status=0 ++ local timeout=4 +++ seq 0 2 ++ for i in '$(seq 0 2)' ++ set +e ++ kubectl get psmdb some-name -o 'jsonpath={.status.state}' ++ exit_status=0 ++ set -e ++ '[' 0 '!=' 0 -a -n 1 ']' ++ break ++ cat /tmp/tmp.MFViJTJyMg ++ cat /tmp/tmp.C5v3YcItb5 ++ rm /tmp/tmp.MFViJTJyMg /tmp/tmp.C5v3YcItb5 ++ return 0 + [[ initializing == ready ]] + sleep 1 + timeout=35 + echo -n . .+ [[ 35 -gt 1500 ]] ++ kubectl_bin get psmdb some-name -o 'jsonpath={.status.state}' +++ mktemp ++ local LAST_OUT=/tmp/tmp.adYSKzqKKz +++ mktemp ++ local LAST_ERR=/tmp/tmp.XsvL18qNye ++ local exit_status=0 ++ local timeout=4 +++ seq 0 2 ++ for i in '$(seq 0 2)' ++ set +e ++ kubectl get psmdb some-name -o 'jsonpath={.status.state}' ++ exit_status=0 ++ set -e ++ '[' 0 '!=' 0 -a -n 1 ']' ++ break ++ cat /tmp/tmp.adYSKzqKKz ++ cat /tmp/tmp.XsvL18qNye ++ rm /tmp/tmp.adYSKzqKKz /tmp/tmp.XsvL18qNye ++ return 0 + [[ initializing == ready ]] + sleep 1 + timeout=36 + echo -n . .+ [[ 36 -gt 1500 ]] ++ kubectl_bin get psmdb some-name -o 'jsonpath={.status.state}' +++ mktemp ++ local LAST_OUT=/tmp/tmp.Gv8mQ7catc +++ mktemp ++ local LAST_ERR=/tmp/tmp.AWz02yhMKf ++ local exit_status=0 ++ local timeout=4 +++ seq 0 2 ++ for i in '$(seq 0 2)' ++ set +e ++ kubectl get psmdb some-name -o 'jsonpath={.status.state}' ++ exit_status=0 ++ set -e ++ '[' 0 '!=' 0 -a -n 1 ']' ++ break ++ cat /tmp/tmp.Gv8mQ7catc ++ cat /tmp/tmp.AWz02yhMKf ++ rm /tmp/tmp.Gv8mQ7catc /tmp/tmp.AWz02yhMKf ++ return 0 + [[ initializing == ready ]] + sleep 1 + timeout=37 + echo -n . .+ [[ 37 -gt 1500 ]] ++ kubectl_bin get psmdb some-name -o 'jsonpath={.status.state}' +++ mktemp ++ local LAST_OUT=/tmp/tmp.N57N5ngcjb +++ mktemp ++ local LAST_ERR=/tmp/tmp.KRmuOycMrT ++ local exit_status=0 ++ local timeout=4 +++ seq 0 2 ++ for i in '$(seq 0 2)' ++ set +e ++ kubectl get psmdb some-name -o 'jsonpath={.status.state}' ++ exit_status=0 ++ set -e ++ '[' 0 '!=' 0 -a -n 1 ']' ++ break ++ cat /tmp/tmp.N57N5ngcjb ++ cat /tmp/tmp.KRmuOycMrT ++ rm /tmp/tmp.N57N5ngcjb /tmp/tmp.KRmuOycMrT ++ return 0 + [[ initializing == ready ]] + sleep 1 + timeout=38 + echo -n . .+ [[ 38 -gt 1500 ]] ++ kubectl_bin get psmdb some-name -o 'jsonpath={.status.state}' +++ mktemp ++ local LAST_OUT=/tmp/tmp.dVIdwcOhVD +++ mktemp ++ local LAST_ERR=/tmp/tmp.7ncY08xtHh ++ local exit_status=0 ++ local timeout=4 +++ seq 0 2 ++ for i in '$(seq 0 2)' ++ set +e ++ kubectl get psmdb some-name -o 'jsonpath={.status.state}' ++ exit_status=0 ++ set -e ++ '[' 0 '!=' 0 -a -n 1 ']' ++ break ++ cat /tmp/tmp.dVIdwcOhVD ++ cat /tmp/tmp.7ncY08xtHh ++ rm /tmp/tmp.dVIdwcOhVD /tmp/tmp.7ncY08xtHh ++ return 0 + [[ initializing == ready ]] + sleep 1 + timeout=39 + echo -n . .+ [[ 39 -gt 1500 ]] ++ kubectl_bin get psmdb some-name -o 'jsonpath={.status.state}' +++ mktemp ++ local LAST_OUT=/tmp/tmp.GPKLaRoKhR +++ mktemp ++ local LAST_ERR=/tmp/tmp.1pIpLHjF63 ++ local exit_status=0 ++ local timeout=4 +++ seq 0 2 ++ for i in '$(seq 0 2)' ++ set +e ++ kubectl get psmdb some-name -o 'jsonpath={.status.state}' ++ exit_status=0 ++ set -e ++ '[' 0 '!=' 0 -a -n 1 ']' ++ break ++ cat /tmp/tmp.GPKLaRoKhR ++ cat /tmp/tmp.1pIpLHjF63 ++ rm /tmp/tmp.GPKLaRoKhR /tmp/tmp.1pIpLHjF63 ++ return 0 + [[ initializing == ready ]] + sleep 1 + timeout=40 + echo -n . .+ [[ 40 -gt 1500 ]] ++ kubectl_bin get psmdb some-name -o 'jsonpath={.status.state}' +++ mktemp ++ local LAST_OUT=/tmp/tmp.YDj6wRqmX7 +++ mktemp ++ local LAST_ERR=/tmp/tmp.xGXLmPL32Y ++ local exit_status=0 ++ local timeout=4 +++ seq 0 2 ++ for i in '$(seq 0 2)' ++ set +e ++ kubectl get psmdb some-name -o 'jsonpath={.status.state}' ++ exit_status=0 ++ set -e ++ '[' 0 '!=' 0 -a -n 1 ']' ++ break ++ cat /tmp/tmp.YDj6wRqmX7 ++ cat /tmp/tmp.xGXLmPL32Y ++ rm /tmp/tmp.YDj6wRqmX7 /tmp/tmp.xGXLmPL32Y ++ return 0 + [[ initializing == ready ]] + sleep 1 + timeout=41 + echo -n . .+ [[ 41 -gt 1500 ]] ++ kubectl_bin get psmdb some-name -o 'jsonpath={.status.state}' +++ mktemp ++ local LAST_OUT=/tmp/tmp.RzmtrjPGxe +++ mktemp ++ local LAST_ERR=/tmp/tmp.dHbuC6eNDp ++ local exit_status=0 ++ local timeout=4 +++ seq 0 2 ++ for i in '$(seq 0 2)' ++ set +e ++ kubectl get psmdb some-name -o 'jsonpath={.status.state}' ++ exit_status=0 ++ set -e ++ '[' 0 '!=' 0 -a -n 1 ']' ++ break ++ cat /tmp/tmp.RzmtrjPGxe ++ cat /tmp/tmp.dHbuC6eNDp ++ rm /tmp/tmp.RzmtrjPGxe /tmp/tmp.dHbuC6eNDp ++ return 0 + [[ initializing == ready ]] + sleep 1 + timeout=42 + echo -n . .+ [[ 42 -gt 1500 ]] ++ kubectl_bin get psmdb some-name -o 'jsonpath={.status.state}' +++ mktemp ++ local LAST_OUT=/tmp/tmp.Hh0e2tdus5 +++ mktemp ++ local LAST_ERR=/tmp/tmp.HcMM6ZJr2d ++ local exit_status=0 ++ local timeout=4 +++ seq 0 2 ++ for i in '$(seq 0 2)' ++ set +e ++ kubectl get psmdb some-name -o 'jsonpath={.status.state}' ++ exit_status=0 ++ set -e ++ '[' 0 '!=' 0 -a -n 1 ']' ++ break ++ cat /tmp/tmp.Hh0e2tdus5 ++ cat /tmp/tmp.HcMM6ZJr2d ++ rm /tmp/tmp.Hh0e2tdus5 /tmp/tmp.HcMM6ZJr2d ++ return 0 + [[ initializing == ready ]] + sleep 1 + timeout=43 + echo -n . .+ [[ 43 -gt 1500 ]] ++ kubectl_bin get psmdb some-name -o 'jsonpath={.status.state}' +++ mktemp ++ local LAST_OUT=/tmp/tmp.PuHWAa0XRL +++ mktemp ++ local LAST_ERR=/tmp/tmp.CgoDNQE45V ++ local exit_status=0 ++ local timeout=4 +++ seq 0 2 ++ for i in '$(seq 0 2)' ++ set +e ++ kubectl get psmdb some-name -o 'jsonpath={.status.state}' ++ exit_status=0 ++ set -e ++ '[' 0 '!=' 0 -a -n 1 ']' ++ break ++ cat /tmp/tmp.PuHWAa0XRL ++ cat /tmp/tmp.CgoDNQE45V ++ rm /tmp/tmp.PuHWAa0XRL /tmp/tmp.CgoDNQE45V ++ return 0 + [[ initializing == ready ]] + sleep 1 + timeout=44 + echo -n . .+ [[ 44 -gt 1500 ]] ++ kubectl_bin get psmdb some-name -o 'jsonpath={.status.state}' +++ mktemp ++ local LAST_OUT=/tmp/tmp.lCFkVfCBlb +++ mktemp ++ local LAST_ERR=/tmp/tmp.RSNKMj95de ++ local exit_status=0 ++ local timeout=4 +++ seq 0 2 ++ for i in '$(seq 0 2)' ++ set +e ++ kubectl get psmdb some-name -o 'jsonpath={.status.state}' ++ exit_status=0 ++ set -e ++ '[' 0 '!=' 0 -a -n 1 ']' ++ break ++ cat /tmp/tmp.lCFkVfCBlb ++ cat /tmp/tmp.RSNKMj95de ++ rm /tmp/tmp.lCFkVfCBlb /tmp/tmp.RSNKMj95de ++ return 0 + [[ initializing == ready ]] + sleep 1 + timeout=45 + echo -n . .+ [[ 45 -gt 1500 ]] ++ kubectl_bin get psmdb some-name -o 'jsonpath={.status.state}' +++ mktemp ++ local LAST_OUT=/tmp/tmp.LeSZUbow6y +++ mktemp ++ local LAST_ERR=/tmp/tmp.JqeHWRXWEq ++ local exit_status=0 ++ local timeout=4 +++ seq 0 2 ++ for i in '$(seq 0 2)' ++ set +e ++ kubectl get psmdb some-name -o 'jsonpath={.status.state}' ++ exit_status=0 ++ set -e ++ '[' 0 '!=' 0 -a -n 1 ']' ++ break ++ cat /tmp/tmp.LeSZUbow6y ++ cat /tmp/tmp.JqeHWRXWEq ++ rm /tmp/tmp.LeSZUbow6y /tmp/tmp.JqeHWRXWEq ++ return 0 + [[ initializing == ready ]] + sleep 1 + timeout=46 + echo -n . .+ [[ 46 -gt 1500 ]] ++ kubectl_bin get psmdb some-name -o 'jsonpath={.status.state}' +++ mktemp ++ local LAST_OUT=/tmp/tmp.tid3iOIypM +++ mktemp ++ local LAST_ERR=/tmp/tmp.IMEHxLDEAM ++ local exit_status=0 ++ local timeout=4 +++ seq 0 2 ++ for i in '$(seq 0 2)' ++ set +e ++ kubectl get psmdb some-name -o 'jsonpath={.status.state}' ++ exit_status=0 ++ set -e ++ '[' 0 '!=' 0 -a -n 1 ']' ++ break ++ cat /tmp/tmp.tid3iOIypM ++ cat /tmp/tmp.IMEHxLDEAM ++ rm /tmp/tmp.tid3iOIypM /tmp/tmp.IMEHxLDEAM ++ return 0 + [[ initializing == ready ]] + sleep 1 + timeout=47 + echo -n . .+ [[ 47 -gt 1500 ]] ++ kubectl_bin get psmdb some-name -o 'jsonpath={.status.state}' +++ mktemp ++ local LAST_OUT=/tmp/tmp.wQK6sfNDou +++ mktemp ++ local LAST_ERR=/tmp/tmp.FF4jls2F6J ++ local exit_status=0 ++ local timeout=4 +++ seq 0 2 ++ for i in '$(seq 0 2)' ++ set +e ++ kubectl get psmdb some-name -o 'jsonpath={.status.state}' ++ exit_status=0 ++ set -e ++ '[' 0 '!=' 0 -a -n 1 ']' ++ break ++ cat /tmp/tmp.wQK6sfNDou ++ cat /tmp/tmp.FF4jls2F6J ++ rm /tmp/tmp.wQK6sfNDou /tmp/tmp.FF4jls2F6J ++ return 0 + [[ initializing == ready ]] + sleep 1 + timeout=48 + echo -n . .+ [[ 48 -gt 1500 ]] ++ kubectl_bin get psmdb some-name -o 'jsonpath={.status.state}' +++ mktemp ++ local LAST_OUT=/tmp/tmp.S8X9VnM3gT +++ mktemp ++ local LAST_ERR=/tmp/tmp.QkkyPPbVYF ++ local exit_status=0 ++ local timeout=4 +++ seq 0 2 ++ for i in '$(seq 0 2)' ++ set +e ++ kubectl get psmdb some-name -o 'jsonpath={.status.state}' ++ exit_status=0 ++ set -e ++ '[' 0 '!=' 0 -a -n 1 ']' ++ break ++ cat /tmp/tmp.S8X9VnM3gT ++ cat /tmp/tmp.QkkyPPbVYF ++ rm /tmp/tmp.S8X9VnM3gT /tmp/tmp.QkkyPPbVYF ++ return 0 + [[ initializing == ready ]] + sleep 1 + timeout=49 + echo -n . .+ [[ 49 -gt 1500 ]] ++ kubectl_bin get psmdb some-name -o 'jsonpath={.status.state}' +++ mktemp ++ local LAST_OUT=/tmp/tmp.3AkYeVPLPp +++ mktemp ++ local LAST_ERR=/tmp/tmp.efjOujVWui ++ local exit_status=0 ++ local timeout=4 +++ seq 0 2 ++ for i in '$(seq 0 2)' ++ set +e ++ kubectl get psmdb some-name -o 'jsonpath={.status.state}' ++ exit_status=0 ++ set -e ++ '[' 0 '!=' 0 -a -n 1 ']' ++ break ++ cat /tmp/tmp.3AkYeVPLPp ++ cat /tmp/tmp.efjOujVWui ++ rm /tmp/tmp.3AkYeVPLPp /tmp/tmp.efjOujVWui ++ return 0 + [[ initializing == ready ]] + sleep 1 + timeout=50 + echo -n . .+ [[ 50 -gt 1500 ]] ++ kubectl_bin get psmdb some-name -o 'jsonpath={.status.state}' +++ mktemp ++ local LAST_OUT=/tmp/tmp.dLGJvdhu1s +++ mktemp ++ local LAST_ERR=/tmp/tmp.mLy2kTYX4z ++ local exit_status=0 ++ local timeout=4 +++ seq 0 2 ++ for i in '$(seq 0 2)' ++ set +e ++ kubectl get psmdb some-name -o 'jsonpath={.status.state}' ++ exit_status=0 ++ set -e ++ '[' 0 '!=' 0 -a -n 1 ']' ++ break ++ cat /tmp/tmp.dLGJvdhu1s ++ cat /tmp/tmp.mLy2kTYX4z ++ rm /tmp/tmp.dLGJvdhu1s /tmp/tmp.mLy2kTYX4z ++ return 0 + [[ initializing == ready ]] + sleep 1 + timeout=51 + echo -n . .+ [[ 51 -gt 1500 ]] ++ kubectl_bin get psmdb some-name -o 'jsonpath={.status.state}' +++ mktemp ++ local LAST_OUT=/tmp/tmp.H9SiCm7VOE +++ mktemp ++ local LAST_ERR=/tmp/tmp.OChGEyJW89 ++ local exit_status=0 ++ local timeout=4 +++ seq 0 2 ++ for i in '$(seq 0 2)' ++ set +e ++ kubectl get psmdb some-name -o 'jsonpath={.status.state}' ++ exit_status=0 ++ set -e ++ '[' 0 '!=' 0 -a -n 1 ']' ++ break ++ cat /tmp/tmp.H9SiCm7VOE ++ cat /tmp/tmp.OChGEyJW89 ++ rm /tmp/tmp.H9SiCm7VOE /tmp/tmp.OChGEyJW89 ++ return 0 + [[ initializing == ready ]] + sleep 1 + timeout=52 + echo -n . .+ [[ 52 -gt 1500 ]] ++ kubectl_bin get psmdb some-name -o 'jsonpath={.status.state}' +++ mktemp ++ local LAST_OUT=/tmp/tmp.0Tbjor5del +++ mktemp ++ local LAST_ERR=/tmp/tmp.NzGzdwwf0h ++ local exit_status=0 ++ local timeout=4 +++ seq 0 2 ++ for i in '$(seq 0 2)' ++ set +e ++ kubectl get psmdb some-name -o 'jsonpath={.status.state}' ++ exit_status=0 ++ set -e ++ '[' 0 '!=' 0 -a -n 1 ']' ++ break ++ cat /tmp/tmp.0Tbjor5del ++ cat /tmp/tmp.NzGzdwwf0h ++ rm /tmp/tmp.0Tbjor5del /tmp/tmp.NzGzdwwf0h ++ return 0 + [[ initializing == ready ]] + sleep 1 + timeout=53 + echo -n . .+ [[ 53 -gt 1500 ]] ++ kubectl_bin get psmdb some-name -o 'jsonpath={.status.state}' +++ mktemp ++ local LAST_OUT=/tmp/tmp.diGAVlCt0X +++ mktemp ++ local LAST_ERR=/tmp/tmp.O39t4a12Hf ++ local exit_status=0 ++ local timeout=4 +++ seq 0 2 ++ for i in '$(seq 0 2)' ++ set +e ++ kubectl get psmdb some-name -o 'jsonpath={.status.state}' ++ exit_status=0 ++ set -e ++ '[' 0 '!=' 0 -a -n 1 ']' ++ break ++ cat /tmp/tmp.diGAVlCt0X ++ cat /tmp/tmp.O39t4a12Hf ++ rm /tmp/tmp.diGAVlCt0X /tmp/tmp.O39t4a12Hf ++ return 0 + [[ initializing == ready ]] + sleep 1 + timeout=54 + echo -n . .+ [[ 54 -gt 1500 ]] ++ kubectl_bin get psmdb some-name -o 'jsonpath={.status.state}' +++ mktemp ++ local LAST_OUT=/tmp/tmp.coPzWdhFB0 +++ mktemp ++ local LAST_ERR=/tmp/tmp.rPBx9muPgB ++ local exit_status=0 ++ local timeout=4 +++ seq 0 2 ++ for i in '$(seq 0 2)' ++ set +e ++ kubectl get psmdb some-name -o 'jsonpath={.status.state}' ++ exit_status=0 ++ set -e ++ '[' 0 '!=' 0 -a -n 1 ']' ++ break ++ cat /tmp/tmp.coPzWdhFB0 ++ cat /tmp/tmp.rPBx9muPgB ++ rm /tmp/tmp.coPzWdhFB0 /tmp/tmp.rPBx9muPgB ++ return 0 + [[ initializing == ready ]] + sleep 1 + timeout=55 + echo -n . .+ [[ 55 -gt 1500 ]] ++ kubectl_bin get psmdb some-name -o 'jsonpath={.status.state}' +++ mktemp ++ local LAST_OUT=/tmp/tmp.tMfGBewEli +++ mktemp ++ local LAST_ERR=/tmp/tmp.3E1txJmUV7 ++ local exit_status=0 ++ local timeout=4 +++ seq 0 2 ++ for i in '$(seq 0 2)' ++ set +e ++ kubectl get psmdb some-name -o 'jsonpath={.status.state}' ++ exit_status=0 ++ set -e ++ '[' 0 '!=' 0 -a -n 1 ']' ++ break ++ cat /tmp/tmp.tMfGBewEli ++ cat /tmp/tmp.3E1txJmUV7 ++ rm /tmp/tmp.tMfGBewEli /tmp/tmp.3E1txJmUV7 ++ return 0 + [[ initializing == ready ]] + sleep 1 + timeout=56 + echo -n . .+ [[ 56 -gt 1500 ]] ++ kubectl_bin get psmdb some-name -o 'jsonpath={.status.state}' +++ mktemp ++ local LAST_OUT=/tmp/tmp.ud6I818Bn4 +++ mktemp ++ local LAST_ERR=/tmp/tmp.hPffhKnQAM ++ local exit_status=0 ++ local timeout=4 +++ seq 0 2 ++ for i in '$(seq 0 2)' ++ set +e ++ kubectl get psmdb some-name -o 'jsonpath={.status.state}' ++ exit_status=0 ++ set -e ++ '[' 0 '!=' 0 -a -n 1 ']' ++ break ++ cat /tmp/tmp.ud6I818Bn4 ++ cat /tmp/tmp.hPffhKnQAM ++ rm /tmp/tmp.ud6I818Bn4 /tmp/tmp.hPffhKnQAM ++ return 0 + [[ initializing == ready ]] + sleep 1 + timeout=57 + echo -n . .+ [[ 57 -gt 1500 ]] ++ kubectl_bin get psmdb some-name -o 'jsonpath={.status.state}' +++ mktemp ++ local LAST_OUT=/tmp/tmp.gMCFV5WD9Y +++ mktemp ++ local LAST_ERR=/tmp/tmp.fjGrxBki6D ++ local exit_status=0 ++ local timeout=4 +++ seq 0 2 ++ for i in '$(seq 0 2)' ++ set +e ++ kubectl get psmdb some-name -o 'jsonpath={.status.state}' ++ exit_status=0 ++ set -e ++ '[' 0 '!=' 0 -a -n 1 ']' ++ break ++ cat /tmp/tmp.gMCFV5WD9Y ++ cat /tmp/tmp.fjGrxBki6D ++ rm /tmp/tmp.gMCFV5WD9Y /tmp/tmp.fjGrxBki6D ++ return 0 + [[ initializing == ready ]] + sleep 1 + timeout=58 + echo -n . .+ [[ 58 -gt 1500 ]] ++ kubectl_bin get psmdb some-name -o 'jsonpath={.status.state}' +++ mktemp ++ local LAST_OUT=/tmp/tmp.W8VCUduDMa +++ mktemp ++ local LAST_ERR=/tmp/tmp.dtkfYlNJEy ++ local exit_status=0 ++ local timeout=4 +++ seq 0 2 ++ for i in '$(seq 0 2)' ++ set +e ++ kubectl get psmdb some-name -o 'jsonpath={.status.state}' ++ exit_status=0 ++ set -e ++ '[' 0 '!=' 0 -a -n 1 ']' ++ break ++ cat /tmp/tmp.W8VCUduDMa ++ cat /tmp/tmp.dtkfYlNJEy ++ rm /tmp/tmp.W8VCUduDMa /tmp/tmp.dtkfYlNJEy ++ return 0 + [[ initializing == ready ]] + sleep 1 + timeout=59 + echo -n . .+ [[ 59 -gt 1500 ]] ++ kubectl_bin get psmdb some-name -o 'jsonpath={.status.state}' +++ mktemp ++ local LAST_OUT=/tmp/tmp.SUWfpz7JDy +++ mktemp ++ local LAST_ERR=/tmp/tmp.v3YWUcdYAm ++ local exit_status=0 ++ local timeout=4 +++ seq 0 2 ++ for i in '$(seq 0 2)' ++ set +e ++ kubectl get psmdb some-name -o 'jsonpath={.status.state}' ++ exit_status=0 ++ set -e ++ '[' 0 '!=' 0 -a -n 1 ']' ++ break ++ cat /tmp/tmp.SUWfpz7JDy ++ cat /tmp/tmp.v3YWUcdYAm ++ rm /tmp/tmp.SUWfpz7JDy /tmp/tmp.v3YWUcdYAm ++ return 0 + [[ initializing == ready ]] + sleep 1 + timeout=60 + echo -n . .+ [[ 60 -gt 1500 ]] ++ kubectl_bin get psmdb some-name -o 'jsonpath={.status.state}' +++ mktemp ++ local LAST_OUT=/tmp/tmp.jcZB4ENCfu +++ mktemp ++ local LAST_ERR=/tmp/tmp.6OPGHP10h5 ++ local exit_status=0 ++ local timeout=4 +++ seq 0 2 ++ for i in '$(seq 0 2)' ++ set +e ++ kubectl get psmdb some-name -o 'jsonpath={.status.state}' ++ exit_status=0 ++ set -e ++ '[' 0 '!=' 0 -a -n 1 ']' ++ break ++ cat /tmp/tmp.jcZB4ENCfu ++ cat /tmp/tmp.6OPGHP10h5 ++ rm /tmp/tmp.jcZB4ENCfu /tmp/tmp.6OPGHP10h5 ++ return 0 + [[ initializing == ready ]] + sleep 1 + timeout=61 + echo -n . .+ [[ 61 -gt 1500 ]] ++ kubectl_bin get psmdb some-name -o 'jsonpath={.status.state}' +++ mktemp ++ local LAST_OUT=/tmp/tmp.P1saMri1wW +++ mktemp ++ local LAST_ERR=/tmp/tmp.HNlNmkEkjT ++ local exit_status=0 ++ local timeout=4 +++ seq 0 2 ++ for i in '$(seq 0 2)' ++ set +e ++ kubectl get psmdb some-name -o 'jsonpath={.status.state}' ++ exit_status=0 ++ set -e ++ '[' 0 '!=' 0 -a -n 1 ']' ++ break ++ cat /tmp/tmp.P1saMri1wW ++ cat /tmp/tmp.HNlNmkEkjT ++ rm /tmp/tmp.P1saMri1wW /tmp/tmp.HNlNmkEkjT ++ return 0 + [[ initializing == ready ]] + sleep 1 + timeout=62 + echo -n . .+ [[ 62 -gt 1500 ]] ++ kubectl_bin get psmdb some-name -o 'jsonpath={.status.state}' +++ mktemp ++ local LAST_OUT=/tmp/tmp.oYc4E7irfb +++ mktemp ++ local LAST_ERR=/tmp/tmp.Mk5QsNXTkr ++ local exit_status=0 ++ local timeout=4 +++ seq 0 2 ++ for i in '$(seq 0 2)' ++ set +e ++ kubectl get psmdb some-name -o 'jsonpath={.status.state}' ++ exit_status=0 ++ set -e ++ '[' 0 '!=' 0 -a -n 1 ']' ++ break ++ cat /tmp/tmp.oYc4E7irfb ++ cat /tmp/tmp.Mk5QsNXTkr ++ rm /tmp/tmp.oYc4E7irfb /tmp/tmp.Mk5QsNXTkr ++ return 0 + [[ initializing == ready ]] + sleep 1 + timeout=63 + echo -n . .+ [[ 63 -gt 1500 ]] ++ kubectl_bin get psmdb some-name -o 'jsonpath={.status.state}' +++ mktemp ++ local LAST_OUT=/tmp/tmp.UWyC59HMmZ +++ mktemp ++ local LAST_ERR=/tmp/tmp.1Ji7Y1UfmW ++ local exit_status=0 ++ local timeout=4 +++ seq 0 2 ++ for i in '$(seq 0 2)' ++ set +e ++ kubectl get psmdb some-name -o 'jsonpath={.status.state}' ++ exit_status=0 ++ set -e ++ '[' 0 '!=' 0 -a -n 1 ']' ++ break ++ cat /tmp/tmp.UWyC59HMmZ ++ cat /tmp/tmp.1Ji7Y1UfmW ++ rm /tmp/tmp.UWyC59HMmZ /tmp/tmp.1Ji7Y1UfmW ++ return 0 + [[ initializing == ready ]] + sleep 1 + timeout=64 + echo -n . .+ [[ 64 -gt 1500 ]] ++ kubectl_bin get psmdb some-name -o 'jsonpath={.status.state}' +++ mktemp ++ local LAST_OUT=/tmp/tmp.GQ3TtXZYKN +++ mktemp ++ local LAST_ERR=/tmp/tmp.vfCqx37vqZ ++ local exit_status=0 ++ local timeout=4 +++ seq 0 2 ++ for i in '$(seq 0 2)' ++ set +e ++ kubectl get psmdb some-name -o 'jsonpath={.status.state}' ++ exit_status=0 ++ set -e ++ '[' 0 '!=' 0 -a -n 1 ']' ++ break ++ cat /tmp/tmp.GQ3TtXZYKN ++ cat /tmp/tmp.vfCqx37vqZ ++ rm /tmp/tmp.GQ3TtXZYKN /tmp/tmp.vfCqx37vqZ ++ return 0 + [[ initializing == ready ]] + sleep 1 + timeout=65 + echo -n . .+ [[ 65 -gt 1500 ]] ++ kubectl_bin get psmdb some-name -o 'jsonpath={.status.state}' +++ mktemp ++ local LAST_OUT=/tmp/tmp.vE8bDIVpjB +++ mktemp ++ local LAST_ERR=/tmp/tmp.KKhr0ori3L ++ local exit_status=0 ++ local timeout=4 +++ seq 0 2 ++ for i in '$(seq 0 2)' ++ set +e ++ kubectl get psmdb some-name -o 'jsonpath={.status.state}' ++ exit_status=0 ++ set -e ++ '[' 0 '!=' 0 -a -n 1 ']' ++ break ++ cat /tmp/tmp.vE8bDIVpjB ++ cat /tmp/tmp.KKhr0ori3L ++ rm /tmp/tmp.vE8bDIVpjB /tmp/tmp.KKhr0ori3L ++ return 0 + [[ initializing == ready ]] + sleep 1 + timeout=66 + echo -n . .+ [[ 66 -gt 1500 ]] ++ kubectl_bin get psmdb some-name -o 'jsonpath={.status.state}' +++ mktemp ++ local LAST_OUT=/tmp/tmp.4bwt9VPIkD +++ mktemp ++ local LAST_ERR=/tmp/tmp.lMgtb5Pghr ++ local exit_status=0 ++ local timeout=4 +++ seq 0 2 ++ for i in '$(seq 0 2)' ++ set +e ++ kubectl get psmdb some-name -o 'jsonpath={.status.state}' ++ exit_status=0 ++ set -e ++ '[' 0 '!=' 0 -a -n 1 ']' ++ break ++ cat /tmp/tmp.4bwt9VPIkD ++ cat /tmp/tmp.lMgtb5Pghr ++ rm /tmp/tmp.4bwt9VPIkD /tmp/tmp.lMgtb5Pghr ++ return 0 + [[ initializing == ready ]] + sleep 1 + timeout=67 + echo -n . .+ [[ 67 -gt 1500 ]] ++ kubectl_bin get psmdb some-name -o 'jsonpath={.status.state}' +++ mktemp ++ local LAST_OUT=/tmp/tmp.y7oOgJ5D0N +++ mktemp ++ local LAST_ERR=/tmp/tmp.3j3xYxts6a ++ local exit_status=0 ++ local timeout=4 +++ seq 0 2 ++ for i in '$(seq 0 2)' ++ set +e ++ kubectl get psmdb some-name -o 'jsonpath={.status.state}' ++ exit_status=0 ++ set -e ++ '[' 0 '!=' 0 -a -n 1 ']' ++ break ++ cat /tmp/tmp.y7oOgJ5D0N ++ cat /tmp/tmp.3j3xYxts6a ++ rm /tmp/tmp.y7oOgJ5D0N /tmp/tmp.3j3xYxts6a ++ return 0 + [[ initializing == ready ]] + sleep 1 + timeout=68 + echo -n . .+ [[ 68 -gt 1500 ]] ++ kubectl_bin get psmdb some-name -o 'jsonpath={.status.state}' +++ mktemp ++ local LAST_OUT=/tmp/tmp.iXeUAscjaE +++ mktemp ++ local LAST_ERR=/tmp/tmp.OoFDO4CtMC ++ local exit_status=0 ++ local timeout=4 +++ seq 0 2 ++ for i in '$(seq 0 2)' ++ set +e ++ kubectl get psmdb some-name -o 'jsonpath={.status.state}' ++ exit_status=0 ++ set -e ++ '[' 0 '!=' 0 -a -n 1 ']' ++ break ++ cat /tmp/tmp.iXeUAscjaE ++ cat /tmp/tmp.OoFDO4CtMC ++ rm /tmp/tmp.iXeUAscjaE /tmp/tmp.OoFDO4CtMC ++ return 0 + [[ ready == ready ]] + echo + compare_kubectl statefulset/some-name-rs0 -tls-disabled + local resource=statefulset/some-name-rs0 + local postfix=-tls-disabled + local expected_result=/mnt/jenkins/workspace/cloud-psmdb-operator_PR-1567/e2e-tests/tls-issue-cert-manager/compare/statefulset_some-name-rs0-tls-disabled.yml + local new_result=/tmp/tmp.jTdR9s6bFl/statefulset_some-name-rs0.yml + '[' -n '' -a -f /mnt/jenkins/workspace/cloud-psmdb-operator_PR-1567/e2e-tests/tls-issue-cert-manager/compare/statefulset_some-name-rs0-tls-disabled-oc.yml ']' + kubectl_bin get -o yaml statefulset/some-name-rs0 + yq eval ' del(.metadata.ownerReferences[].apiVersion) | del(.metadata.managedFields) | del(.. | select(has("creationTimestamp")).creationTimestamp) | del(.. | select(has("namespace")).namespace) | del(.. | select(has("uid")).uid) | del(.metadata.resourceVersion) | del(.spec.template.spec.containers[].env[] | select(.name == "NAMESPACE")) | del(.metadata.selfLink) | del(.metadata.annotations."cloud.google.com/neg") | del(.metadata.annotations."kubectl.kubernetes.io/last-applied-configuration") | del(.. | select(has("image")).image) | del(.. | select(has("clusterIP")).clusterIP) | del(.. | select(has("clusterIPs")).clusterIPs) | del(.. | select(has("dataSource")).dataSource) | del(.. | select(has("procMount")).procMount) | del(.. | select(has("storageClassName")).storageClassName) | del(.. | select(has("finalizers")).finalizers) | del(.. | select(has("kubernetes.io/pvc-protection"))."kubernetes.io/pvc-protection") | del(.. | select(has("volumeName")).volumeName) | del(.. | select(has("volume.beta.kubernetes.io/storage-provisioner"))."volume.beta.kubernetes.io/storage-provisioner") | del(.. | select(has("volume.kubernetes.io/storage-provisioner"))."volume.kubernetes.io/storage-provisioner") | del(.spec.volumeMode) | del(.. | select(has("volume.kubernetes.io/selected-node"))."volume.kubernetes.io/selected-node") | del(.. | select(has("percona.com/last-config-hash"))."percona.com/last-config-hash") | del(.. | select(has("percona.com/configuration-hash"))."percona.com/configuration-hash") | del(.. | select(has("percona.com/ssl-hash"))."percona.com/ssl-hash") | del(.. | select(has("percona.com/ssl-internal-hash"))."percona.com/ssl-internal-hash") | del(.spec.volumeClaimTemplates[].spec.volumeMode | select(. == "Filesystem")) | del(.. | select(has("healthCheckNodePort")).healthCheckNodePort) | del(.. | select(has("nodePort")).nodePort) | del(.status) | (.. | select(tag == "!!str")) |= sub("tls-issue-cert-manager-30987", "NAME_SPACE") | del(.spec.volumeClaimTemplates[].apiVersion) | del(.spec.volumeClaimTemplates[].kind) | del(.spec.ipFamilies) | del(.spec.ipFamilyPolicy) | (.. | select(. == "extensions/v1beta1")) = "apps/v1" | (.. | select(. == "batch/v1beta1")) = "batch/v1" ' - ++ mktemp + local LAST_OUT=/tmp/tmp.iz2MNtJo59 ++ mktemp + local LAST_ERR=/tmp/tmp.VnkuR8CIEE + local exit_status=0 + local timeout=4 ++ seq 0 2 + for i in '$(seq 0 2)' + set +e + kubectl get -o yaml statefulset/some-name-rs0 + exit_status=0 + set -e + '[' 0 '!=' 0 -a -n 1 ']' + break + cat /tmp/tmp.iz2MNtJo59 + cat /tmp/tmp.VnkuR8CIEE + rm /tmp/tmp.iz2MNtJo59 /tmp/tmp.VnkuR8CIEE + return 0 + yq -i eval 'del(.spec.persistentVolumeClaimRetentionPolicy)' /tmp/tmp.jTdR9s6bFl/statefulset_some-name-rs0.yml + version_gt 1.22 ++ echo '1.26 >= 1.22' ++ bc -l + '[' 1 -eq 1 ']' + return 0 + yq -i eval 'del(.spec.internalTrafficPolicy)' /tmp/tmp.jTdR9s6bFl/statefulset_some-name-rs0.yml + yq -i eval 'del(.spec.allocateLoadBalancerNodePorts)' /tmp/tmp.jTdR9s6bFl/statefulset_some-name-rs0.yml + [[ /mnt/jenkins/workspace/cloud-psmdb-operator_PR-1567/e2e-tests/tls-issue-cert-manager/compare/statefulset_some-name-rs0-tls-disabled.yml == */cronjob* ]] + diff -u /mnt/jenkins/workspace/cloud-psmdb-operator_PR-1567/e2e-tests/tls-issue-cert-manager/compare/statefulset_some-name-rs0-tls-disabled.yml /tmp/tmp.jTdR9s6bFl/statefulset_some-name-rs0.yml + compare_kubectl statefulset/some-name-cfg -tls-disabled + local resource=statefulset/some-name-cfg + local postfix=-tls-disabled + local expected_result=/mnt/jenkins/workspace/cloud-psmdb-operator_PR-1567/e2e-tests/tls-issue-cert-manager/compare/statefulset_some-name-cfg-tls-disabled.yml + local new_result=/tmp/tmp.jTdR9s6bFl/statefulset_some-name-cfg.yml + '[' -n '' -a -f /mnt/jenkins/workspace/cloud-psmdb-operator_PR-1567/e2e-tests/tls-issue-cert-manager/compare/statefulset_some-name-cfg-tls-disabled-oc.yml ']' + kubectl_bin get -o yaml statefulset/some-name-cfg + yq eval ' del(.metadata.ownerReferences[].apiVersion) | del(.metadata.managedFields) | del(.. | select(has("creationTimestamp")).creationTimestamp) | del(.. | select(has("namespace")).namespace) | del(.. | select(has("uid")).uid) | del(.metadata.resourceVersion) | del(.spec.template.spec.containers[].env[] | select(.name == "NAMESPACE")) | del(.metadata.selfLink) | del(.metadata.annotations."cloud.google.com/neg") | del(.metadata.annotations."kubectl.kubernetes.io/last-applied-configuration") | del(.. | select(has("image")).image) | del(.. | select(has("clusterIP")).clusterIP) | del(.. | select(has("clusterIPs")).clusterIPs) | del(.. | select(has("dataSource")).dataSource) | del(.. | select(has("procMount")).procMount) | del(.. | select(has("storageClassName")).storageClassName) | del(.. | select(has("finalizers")).finalizers) | del(.. | select(has("kubernetes.io/pvc-protection"))."kubernetes.io/pvc-protection") | del(.. | select(has("volumeName")).volumeName) | del(.. | select(has("volume.beta.kubernetes.io/storage-provisioner"))."volume.beta.kubernetes.io/storage-provisioner") | del(.. | select(has("volume.kubernetes.io/storage-provisioner"))."volume.kubernetes.io/storage-provisioner") | del(.spec.volumeMode) | del(.. | select(has("volume.kubernetes.io/selected-node"))."volume.kubernetes.io/selected-node") | del(.. | select(has("percona.com/last-config-hash"))."percona.com/last-config-hash") | del(.. | select(has("percona.com/configuration-hash"))."percona.com/configuration-hash") | del(.. | select(has("percona.com/ssl-hash"))."percona.com/ssl-hash") | del(.. | select(has("percona.com/ssl-internal-hash"))."percona.com/ssl-internal-hash") | del(.spec.volumeClaimTemplates[].spec.volumeMode | select(. == "Filesystem")) | del(.. | select(has("healthCheckNodePort")).healthCheckNodePort) | del(.. | select(has("nodePort")).nodePort) | del(.status) | (.. | select(tag == "!!str")) |= sub("tls-issue-cert-manager-30987", "NAME_SPACE") | del(.spec.volumeClaimTemplates[].apiVersion) | del(.spec.volumeClaimTemplates[].kind) | del(.spec.ipFamilies) | del(.spec.ipFamilyPolicy) | (.. | select(. == "extensions/v1beta1")) = "apps/v1" | (.. | select(. == "batch/v1beta1")) = "batch/v1" ' - ++ mktemp + local LAST_OUT=/tmp/tmp.NNZZnH4zSP ++ mktemp + local LAST_ERR=/tmp/tmp.gW6XxZ2JSD + local exit_status=0 + local timeout=4 ++ seq 0 2 + for i in '$(seq 0 2)' + set +e + kubectl get -o yaml statefulset/some-name-cfg + exit_status=0 + set -e + '[' 0 '!=' 0 -a -n 1 ']' + break + cat /tmp/tmp.NNZZnH4zSP + cat /tmp/tmp.gW6XxZ2JSD + rm /tmp/tmp.NNZZnH4zSP /tmp/tmp.gW6XxZ2JSD + return 0 + yq -i eval 'del(.spec.persistentVolumeClaimRetentionPolicy)' /tmp/tmp.jTdR9s6bFl/statefulset_some-name-cfg.yml + version_gt 1.22 ++ echo '1.26 >= 1.22' ++ bc -l + '[' 1 -eq 1 ']' + return 0 + yq -i eval 'del(.spec.internalTrafficPolicy)' /tmp/tmp.jTdR9s6bFl/statefulset_some-name-cfg.yml + yq -i eval 'del(.spec.allocateLoadBalancerNodePorts)' /tmp/tmp.jTdR9s6bFl/statefulset_some-name-cfg.yml + [[ /mnt/jenkins/workspace/cloud-psmdb-operator_PR-1567/e2e-tests/tls-issue-cert-manager/compare/statefulset_some-name-cfg-tls-disabled.yml == */cronjob* ]] + diff -u /mnt/jenkins/workspace/cloud-psmdb-operator_PR-1567/e2e-tests/tls-issue-cert-manager/compare/statefulset_some-name-cfg-tls-disabled.yml /tmp/tmp.jTdR9s6bFl/statefulset_some-name-cfg.yml + compare_kubectl statefulset/some-name-mongos -tls-disabled + local resource=statefulset/some-name-mongos + local postfix=-tls-disabled + local expected_result=/mnt/jenkins/workspace/cloud-psmdb-operator_PR-1567/e2e-tests/tls-issue-cert-manager/compare/statefulset_some-name-mongos-tls-disabled.yml + local new_result=/tmp/tmp.jTdR9s6bFl/statefulset_some-name-mongos.yml + '[' -n '' -a -f /mnt/jenkins/workspace/cloud-psmdb-operator_PR-1567/e2e-tests/tls-issue-cert-manager/compare/statefulset_some-name-mongos-tls-disabled-oc.yml ']' + kubectl_bin get -o yaml statefulset/some-name-mongos + yq eval ' del(.metadata.ownerReferences[].apiVersion) | del(.metadata.managedFields) | del(.. | select(has("creationTimestamp")).creationTimestamp) | del(.. | select(has("namespace")).namespace) | del(.. | select(has("uid")).uid) | del(.metadata.resourceVersion) | del(.spec.template.spec.containers[].env[] | select(.name == "NAMESPACE")) | del(.metadata.selfLink) | del(.metadata.annotations."cloud.google.com/neg") | del(.metadata.annotations."kubectl.kubernetes.io/last-applied-configuration") | del(.. | select(has("image")).image) | del(.. | select(has("clusterIP")).clusterIP) | del(.. | select(has("clusterIPs")).clusterIPs) | del(.. | select(has("dataSource")).dataSource) | del(.. | select(has("procMount")).procMount) | del(.. | select(has("storageClassName")).storageClassName) | del(.. | select(has("finalizers")).finalizers) | del(.. | select(has("kubernetes.io/pvc-protection"))."kubernetes.io/pvc-protection") | del(.. | select(has("volumeName")).volumeName) | del(.. | select(has("volume.beta.kubernetes.io/storage-provisioner"))."volume.beta.kubernetes.io/storage-provisioner") | del(.. | select(has("volume.kubernetes.io/storage-provisioner"))."volume.kubernetes.io/storage-provisioner") | del(.spec.volumeMode) | del(.. | select(has("volume.kubernetes.io/selected-node"))."volume.kubernetes.io/selected-node") | del(.. | select(has("percona.com/last-config-hash"))."percona.com/last-config-hash") | del(.. | select(has("percona.com/configuration-hash"))."percona.com/configuration-hash") | del(.. | select(has("percona.com/ssl-hash"))."percona.com/ssl-hash") | del(.. | select(has("percona.com/ssl-internal-hash"))."percona.com/ssl-internal-hash") | del(.spec.volumeClaimTemplates[].spec.volumeMode | select(. == "Filesystem")) | del(.. | select(has("healthCheckNodePort")).healthCheckNodePort) | del(.. | select(has("nodePort")).nodePort) | del(.status) | (.. | select(tag == "!!str")) |= sub("tls-issue-cert-manager-30987", "NAME_SPACE") | del(.spec.volumeClaimTemplates[].apiVersion) | del(.spec.volumeClaimTemplates[].kind) | del(.spec.ipFamilies) | del(.spec.ipFamilyPolicy) | (.. | select(. == "extensions/v1beta1")) = "apps/v1" | (.. | select(. == "batch/v1beta1")) = "batch/v1" ' - ++ mktemp + local LAST_OUT=/tmp/tmp.61VqbL7rN9 ++ mktemp + local LAST_ERR=/tmp/tmp.RzV35tMfSx + local exit_status=0 + local timeout=4 ++ seq 0 2 + for i in '$(seq 0 2)' + set +e + kubectl get -o yaml statefulset/some-name-mongos + exit_status=0 + set -e + '[' 0 '!=' 0 -a -n 1 ']' + break + cat /tmp/tmp.61VqbL7rN9 + cat /tmp/tmp.RzV35tMfSx + rm /tmp/tmp.61VqbL7rN9 /tmp/tmp.RzV35tMfSx + return 0 + yq -i eval 'del(.spec.persistentVolumeClaimRetentionPolicy)' /tmp/tmp.jTdR9s6bFl/statefulset_some-name-mongos.yml + version_gt 1.22 ++ echo '1.26 >= 1.22' ++ bc -l + '[' 1 -eq 1 ']' + return 0 + yq -i eval 'del(.spec.internalTrafficPolicy)' /tmp/tmp.jTdR9s6bFl/statefulset_some-name-mongos.yml + yq -i eval 'del(.spec.allocateLoadBalancerNodePorts)' /tmp/tmp.jTdR9s6bFl/statefulset_some-name-mongos.yml + [[ /mnt/jenkins/workspace/cloud-psmdb-operator_PR-1567/e2e-tests/tls-issue-cert-manager/compare/statefulset_some-name-mongos-tls-disabled.yml == */cronjob* ]] + diff -u /mnt/jenkins/workspace/cloud-psmdb-operator_PR-1567/e2e-tests/tls-issue-cert-manager/compare/statefulset_some-name-mongos-tls-disabled.yml /tmp/tmp.jTdR9s6bFl/statefulset_some-name-mongos.yml --- /mnt/jenkins/workspace/cloud-psmdb-operator_PR-1567/e2e-tests/tls-issue-cert-manager/compare/statefulset_some-name-mongos-tls-disabled.yml 2024-07-09 07:18:03.189919407 +0000 +++ /tmp/tmp.jTdR9s6bFl/statefulset_some-name-mongos.yml 2024-07-09 08:25:04.435921918 +0000 @@ -2,7 +2,7 @@ kind: StatefulSet metadata: annotations: {} - generation: 7 + generation: 6 labels: app.kubernetes.io/component: mongos app.kubernetes.io/instance: some-name