Log: /mnt/jenkins/workspace/cloud-psmdb-operator_PR-1581/e2e-tests/logs/tls-issue-cert-manager.log WARNING: version difference between client (1.30) and server (1.26) exceeds the supported minor version skew of +/-1 WARNING: version difference between client (1.30) and server (1.26) exceeds the supported minor version skew of +/-1 WARNING: version difference between client (1.30) and server (1.26) exceeds the supported minor version skew of +/-1 + main + create_infra tls-issue-cert-manager-1728 + local ns=tls-issue-cert-manager-1728 + delete_crd + desc 'get and delete old CRDs and RBAC' + set +o xtrace ----------------------------------------------------------------------------------- get and delete old CRDs and RBAC ----------------------------------------------------------------------------------- + kubectl_bin delete -f /mnt/jenkins/workspace/cloud-psmdb-operator_PR-1581/deploy/crd.yaml --ignore-not-found --wait=false ++ mktemp + local LAST_OUT=/tmp/tmp.ubuzdPFyNp ++ mktemp + local LAST_ERR=/tmp/tmp.1sTgSXbg7G + local exit_status=0 + local timeout=4 ++ seq 0 2 + for i in '$(seq 0 2)' + set +e + kubectl delete -f /mnt/jenkins/workspace/cloud-psmdb-operator_PR-1581/deploy/crd.yaml --ignore-not-found --wait=false + exit_status=0 + set -e + '[' 0 '!=' 0 -a -n 1 ']' + break + cat /tmp/tmp.ubuzdPFyNp customresourcedefinition.apiextensions.k8s.io "perconaservermongodbbackups.psmdb.percona.com" deleted customresourcedefinition.apiextensions.k8s.io "perconaservermongodbrestores.psmdb.percona.com" deleted customresourcedefinition.apiextensions.k8s.io "perconaservermongodbs.psmdb.percona.com" deleted + cat /tmp/tmp.1sTgSXbg7G + rm /tmp/tmp.ubuzdPFyNp /tmp/tmp.1sTgSXbg7G + return 0 ++ yq eval .metadata.name /mnt/jenkins/workspace/cloud-psmdb-operator_PR-1581/deploy/crd.yaml ++ grep -v '\-\-\-' + for crd_name in '$(yq eval '\''.metadata.name'\'' "${src_dir}/deploy/crd.yaml" | grep -v '\''\-\-\-'\'')' + kubectl get perconaservermongodbbackups.psmdb.percona.com --all-namespaces -o wide + grep -v NAMESPACE + xargs -L 1 sh -xc 'kubectl patch perconaservermongodbbackups.psmdb.percona.com -n $0 $1 --type=merge -p "{\"metadata\":{\"finalizers\":[]}}"' error: the server doesn't have a resource type "perconaservermongodbbackups" + kubectl patch perconaservermongodbbackups.psmdb.percona.com -n sh --type=merge -p '{"metadata":{"finalizers":[]}}' error: the server doesn't have a resource type "perconaservermongodbbackups" + : + kubectl_bin wait --for=delete crd perconaservermongodbbackups.psmdb.percona.com ++ mktemp + local LAST_OUT=/tmp/tmp.9N6HZm8U9G ++ mktemp + local LAST_ERR=/tmp/tmp.RMjJOBVQjH + local exit_status=0 + local timeout=4 ++ seq 0 2 + for i in '$(seq 0 2)' + set +e + kubectl wait --for=delete crd perconaservermongodbbackups.psmdb.percona.com + exit_status=0 + set -e + '[' 0 '!=' 0 -a -n 1 ']' + break + cat /tmp/tmp.9N6HZm8U9G + cat /tmp/tmp.RMjJOBVQjH + rm /tmp/tmp.9N6HZm8U9G /tmp/tmp.RMjJOBVQjH + return 0 + for crd_name in '$(yq eval '\''.metadata.name'\'' "${src_dir}/deploy/crd.yaml" | grep -v '\''\-\-\-'\'')' + kubectl get perconaservermongodbrestores.psmdb.percona.com --all-namespaces -o wide + grep -v NAMESPACE + xargs -L 1 sh -xc 'kubectl patch perconaservermongodbrestores.psmdb.percona.com -n $0 $1 --type=merge -p "{\"metadata\":{\"finalizers\":[]}}"' error: the server doesn't have a resource type "perconaservermongodbrestores" + kubectl patch perconaservermongodbrestores.psmdb.percona.com -n sh --type=merge -p '{"metadata":{"finalizers":[]}}' error: the server doesn't have a resource type "perconaservermongodbrestores" + : + kubectl_bin wait --for=delete crd perconaservermongodbrestores.psmdb.percona.com ++ mktemp + local LAST_OUT=/tmp/tmp.8QK72EHojU ++ mktemp + local LAST_ERR=/tmp/tmp.dI9zCG5FH9 + local exit_status=0 + local timeout=4 ++ seq 0 2 + for i in '$(seq 0 2)' + set +e + kubectl wait --for=delete crd perconaservermongodbrestores.psmdb.percona.com + exit_status=0 + set -e + '[' 0 '!=' 0 -a -n 1 ']' + break + cat /tmp/tmp.8QK72EHojU + cat /tmp/tmp.dI9zCG5FH9 + rm /tmp/tmp.8QK72EHojU /tmp/tmp.dI9zCG5FH9 + return 0 + for crd_name in '$(yq eval '\''.metadata.name'\'' "${src_dir}/deploy/crd.yaml" | grep -v '\''\-\-\-'\'')' + grep -v NAMESPACE + xargs -L 1 sh -xc 'kubectl patch perconaservermongodbs.psmdb.percona.com -n $0 $1 --type=merge -p "{\"metadata\":{\"finalizers\":[]}}"' + kubectl get perconaservermongodbs.psmdb.percona.com --all-namespaces -o wide error: the server doesn't have a resource type "perconaservermongodbs" + kubectl patch perconaservermongodbs.psmdb.percona.com -n sh --type=merge -p '{"metadata":{"finalizers":[]}}' error: the server doesn't have a resource type "perconaservermongodbs" + : + kubectl_bin wait --for=delete crd perconaservermongodbs.psmdb.percona.com ++ mktemp + local LAST_OUT=/tmp/tmp.hIN7TorkYx ++ mktemp + local LAST_ERR=/tmp/tmp.X502A4tH56 + local exit_status=0 + local timeout=4 ++ seq 0 2 + for i in '$(seq 0 2)' + set +e + kubectl wait --for=delete crd perconaservermongodbs.psmdb.percona.com + exit_status=0 + set -e + '[' 0 '!=' 0 -a -n 1 ']' + break + cat /tmp/tmp.hIN7TorkYx + cat /tmp/tmp.X502A4tH56 + rm /tmp/tmp.hIN7TorkYx /tmp/tmp.X502A4tH56 + return 0 + local rbac_yaml=rbac.yaml + '[' -n psmdb-operator ']' + rbac_yaml=cw-rbac.yaml + kubectl_bin delete -f /mnt/jenkins/workspace/cloud-psmdb-operator_PR-1581/deploy/cw-rbac.yaml --ignore-not-found ++ mktemp + local LAST_OUT=/tmp/tmp.nG1nZWJR9x ++ mktemp + local LAST_ERR=/tmp/tmp.MNh9GqLJja + local exit_status=0 + local timeout=4 ++ seq 0 2 + for i in '$(seq 0 2)' + set +e + kubectl delete -f /mnt/jenkins/workspace/cloud-psmdb-operator_PR-1581/deploy/cw-rbac.yaml --ignore-not-found + exit_status=0 + set -e + '[' 0 '!=' 0 -a -n 1 ']' + break + cat /tmp/tmp.nG1nZWJR9x clusterrole.rbac.authorization.k8s.io "percona-server-mongodb-operator" deleted clusterrolebinding.rbac.authorization.k8s.io "service-account-percona-server-mongodb-operator" deleted + cat /tmp/tmp.MNh9GqLJja + rm /tmp/tmp.nG1nZWJR9x /tmp/tmp.MNh9GqLJja + return 0 + check_crd_for_deletion PR-1581-772033cf + local git_tag=PR-1581-772033cf ++ curl -s https://raw.githubusercontent.com/percona/percona-server-mongodb-operator/PR-1581-772033cf/deploy/crd.yaml ++ yq eval .metadata.name ++ /usr/bin/sed s/---//g ++ /usr/bin/sed ':a;N;$!ba;s/\n/ /g' + for crd_name in '$(curl -s https://raw.githubusercontent.com/percona/percona-server-mongodb-operator/${git_tag}/deploy/crd.yaml | yq eval '\''.metadata.name'\'' | $sed '\''s/---//g'\'' | $sed '\'':a;N;$!ba;s/\n/ /g'\'')' ++ kubectl_bin get crd/null -o 'jsonpath={.status.conditions[-1].type}' +++ mktemp ++ local LAST_OUT=/tmp/tmp.EguuO1Buyg +++ mktemp ++ local LAST_ERR=/tmp/tmp.qqzuvSSRDs ++ local exit_status=0 ++ local timeout=4 +++ seq 0 2 ++ for i in '$(seq 0 2)' ++ set +e ++ kubectl get crd/null -o 'jsonpath={.status.conditions[-1].type}' ++ exit_status=1 ++ set -e ++ '[' 1 '!=' 0 -a -n 1 ']' ++ cat /tmp/tmp.EguuO1Buyg ++ cat /tmp/tmp.qqzuvSSRDs Error from server (NotFound): customresourcedefinitions.apiextensions.k8s.io "null" not found ++ sleep 0 ++ for i in '$(seq 0 2)' ++ set +e ++ kubectl get crd/null -o 'jsonpath={.status.conditions[-1].type}' ++ exit_status=1 ++ set -e ++ '[' 1 '!=' 0 -a -n 1 ']' ++ cat /tmp/tmp.EguuO1Buyg ++ cat /tmp/tmp.qqzuvSSRDs Error from server (NotFound): customresourcedefinitions.apiextensions.k8s.io "null" not found ++ sleep 4 ++ for i in '$(seq 0 2)' ++ set +e ++ kubectl get crd/null -o 'jsonpath={.status.conditions[-1].type}' ++ exit_status=1 ++ set -e ++ '[' 1 '!=' 0 -a -n 1 ']' ++ cat /tmp/tmp.EguuO1Buyg ++ cat /tmp/tmp.qqzuvSSRDs Error from server (NotFound): customresourcedefinitions.apiextensions.k8s.io "null" not found ++ sleep 8 ++ cat /tmp/tmp.EguuO1Buyg ++ cat /tmp/tmp.qqzuvSSRDs Error from server (NotFound): customresourcedefinitions.apiextensions.k8s.io "null" not found ++ rm /tmp/tmp.EguuO1Buyg /tmp/tmp.qqzuvSSRDs ++ return 1 + [[ '' == \T\e\r\m\i\n\a\t\i\n\g ]] + '[' -n psmdb-operator ']' + create_namespace psmdb-operator + local namespace=psmdb-operator + local skip_clean_namespace= + [[ 1 == 1 ]] + [[ -z '' ]] + destroy_chaos_mesh ++ helm list --all-namespaces --filter chaos-mesh ++ tail -n1 ++ awk '-F ' '{print $2}' ++ sed s/NAMESPACE// + local chaos_mesh_ns= + desc 'destroy chaos-mesh' + set +o xtrace ----------------------------------------------------------------------------------- destroy chaos-mesh ----------------------------------------------------------------------------------- + '[' -n '' ']' ++ kubectl get MutatingWebhookConfiguration ++ grep chaos-mesh ++ awk '{print $1}' + timeout 30 kubectl delete MutatingWebhookConfiguration error: resource(s) were provided, but no name was specified + : ++ kubectl get ValidatingWebhookConfiguration ++ grep chaos-mesh ++ awk '{print $1}' + timeout 30 kubectl delete ValidatingWebhookConfiguration error: resource(s) were provided, but no name was specified + : ++ kubectl get ValidatingWebhookConfiguration ++ grep validate-auth ++ awk '{print $1}' + timeout 30 kubectl delete ValidatingWebhookConfiguration error: resource(s) were provided, but no name was specified + : ++ kubectl api-resources ++ grep chaos-mesh ++ awk '{print $1}' ++ kubectl get crd ++ grep chaos-mesh.org ++ awk '{print $1}' + timeout 30 kubectl delete crd error: resource(s) were provided, but no name was specified + : ++ kubectl get clusterrolebinding ++ grep chaos-mesh ++ awk '{print $1}' + timeout 30 kubectl delete clusterrolebinding error: resource(s) were provided, but no name was specified + : ++ kubectl get clusterrole ++ grep chaos-mesh ++ awk '{print $1}' + timeout 30 kubectl delete clusterrole error: resource(s) were provided, but no name was specified + : + desc 'cleaned up all old namespaces' + set +o xtrace ----------------------------------------------------------------------------------- cleaned up all old namespaces ----------------------------------------------------------------------------------- + kubectl_bin get ns + awk '{print$1}' + '[' -n '' ']' ++ mktemp + desc 'cleaned up old namespaces psmdb-operator' + xargs kubectl delete ns + set +o xtrace ----------------------------------------------------------------------------------- cleaned up old namespaces psmdb-operator ----------------------------------------------------------------------------------- + kubectl_bin delete namespace psmdb-operator --ignore-not-found ++ mktemp + local LAST_OUT=/tmp/tmp.PPhZNynSU3 + egrep -v '^kube-|^default|Terminating|psmdb-operator|openshift|gke-mcs|^NAME' ++ mktemp + local LAST_OUT=/tmp/tmp.Yi7C73ZTm8 + local LAST_ERR=/tmp/tmp.U6kuQ2pYah + local exit_status=0 + local timeout=4 ++ mktemp + local LAST_ERR=/tmp/tmp.WYXR6LMXHd + local exit_status=0 + local timeout=4 ++ seq 0 2 ++ seq 0 2 + for i in '$(seq 0 2)' + set +e + kubectl get ns + for i in '$(seq 0 2)' + set +e + kubectl delete namespace psmdb-operator --ignore-not-found + exit_status=0 + set -e + '[' 0 '!=' 0 -a -n 1 ']' + break + cat /tmp/tmp.PPhZNynSU3 + cat /tmp/tmp.U6kuQ2pYah + rm /tmp/tmp.PPhZNynSU3 /tmp/tmp.U6kuQ2pYah + return 0 namespace "cert-manager" deleted namespace "tls-issue-cert-manager-29881" deleted + exit_status=0 + set -e + '[' 0 '!=' 0 -a -n 1 ']' + break + cat /tmp/tmp.Yi7C73ZTm8 namespace "psmdb-operator" deleted + cat /tmp/tmp.WYXR6LMXHd + rm /tmp/tmp.Yi7C73ZTm8 /tmp/tmp.WYXR6LMXHd + return 0 + kubectl_bin wait --for=delete namespace psmdb-operator ++ mktemp + local LAST_OUT=/tmp/tmp.dtZeNSSbbC ++ mktemp + local LAST_ERR=/tmp/tmp.b6FnW23qpr + local exit_status=0 + local timeout=4 ++ seq 0 2 + for i in '$(seq 0 2)' + set +e + kubectl wait --for=delete namespace psmdb-operator + exit_status=0 + set -e + '[' 0 '!=' 0 -a -n 1 ']' + break + cat /tmp/tmp.dtZeNSSbbC + cat /tmp/tmp.b6FnW23qpr + rm /tmp/tmp.dtZeNSSbbC /tmp/tmp.b6FnW23qpr + return 0 + desc 'create namespace psmdb-operator' + set +o xtrace ----------------------------------------------------------------------------------- create namespace psmdb-operator ----------------------------------------------------------------------------------- + kubectl_bin create namespace psmdb-operator ++ mktemp + local LAST_OUT=/tmp/tmp.4hBY460ZCa ++ mktemp + local LAST_ERR=/tmp/tmp.g2ZySpVfTO + local exit_status=0 + local timeout=4 ++ seq 0 2 + for i in '$(seq 0 2)' + set +e + kubectl create namespace psmdb-operator + exit_status=0 + set -e + '[' 0 '!=' 0 -a -n 1 ']' + break + cat /tmp/tmp.4hBY460ZCa namespace/psmdb-operator created + cat /tmp/tmp.g2ZySpVfTO + rm /tmp/tmp.4hBY460ZCa /tmp/tmp.g2ZySpVfTO + return 0 ++ kubectl_bin config current-context +++ mktemp ++ local LAST_OUT=/tmp/tmp.qR9evkMgtf +++ mktemp ++ local LAST_ERR=/tmp/tmp.lS6xppgAg9 ++ local exit_status=0 ++ local timeout=4 +++ seq 0 2 ++ for i in '$(seq 0 2)' ++ set +e ++ kubectl config current-context ++ exit_status=0 ++ set -e ++ '[' 0 '!=' 0 -a -n 1 ']' ++ break ++ cat /tmp/tmp.qR9evkMgtf ++ cat /tmp/tmp.lS6xppgAg9 ++ rm /tmp/tmp.qR9evkMgtf /tmp/tmp.lS6xppgAg9 ++ return 0 + kubectl_bin config set-context gke_cloud-dev-112233_us-central1-a_jen-psmdb-1581-772033cf-1-cluster8 --namespace=psmdb-operator ++ mktemp + local LAST_OUT=/tmp/tmp.zdXut0vaFt ++ mktemp + local LAST_ERR=/tmp/tmp.1aZk2vgL6v + local exit_status=0 + local timeout=4 ++ seq 0 2 + for i in '$(seq 0 2)' + set +e + kubectl config set-context gke_cloud-dev-112233_us-central1-a_jen-psmdb-1581-772033cf-1-cluster8 --namespace=psmdb-operator + exit_status=0 + set -e + '[' 0 '!=' 0 -a -n 1 ']' + break + cat /tmp/tmp.zdXut0vaFt Context "gke_cloud-dev-112233_us-central1-a_jen-psmdb-1581-772033cf-1-cluster8" modified. + cat /tmp/tmp.1aZk2vgL6v + rm /tmp/tmp.zdXut0vaFt /tmp/tmp.1aZk2vgL6v + return 0 + deploy_operator + desc 'start PSMDB operator' + set +o xtrace ----------------------------------------------------------------------------------- start PSMDB operator ----------------------------------------------------------------------------------- + local cr_file + '[' -f /mnt/jenkins/workspace/cloud-psmdb-operator_PR-1581/e2e-tests/tls-issue-cert-manager/conf/crd.yaml ']' + cr_file=/mnt/jenkins/workspace/cloud-psmdb-operator_PR-1581/deploy/crd.yaml + kubectl_bin apply --server-side --force-conflicts -f /mnt/jenkins/workspace/cloud-psmdb-operator_PR-1581/deploy/crd.yaml ++ mktemp + local LAST_OUT=/tmp/tmp.r5dhHixV6m ++ mktemp + local LAST_ERR=/tmp/tmp.Zsk98w6ff6 + local exit_status=0 + local timeout=4 ++ seq 0 2 + for i in '$(seq 0 2)' + set +e + kubectl apply --server-side --force-conflicts -f /mnt/jenkins/workspace/cloud-psmdb-operator_PR-1581/deploy/crd.yaml + exit_status=0 + set -e + '[' 0 '!=' 0 -a -n 1 ']' + break + cat /tmp/tmp.r5dhHixV6m customresourcedefinition.apiextensions.k8s.io/perconaservermongodbbackups.psmdb.percona.com serverside-applied customresourcedefinition.apiextensions.k8s.io/perconaservermongodbrestores.psmdb.percona.com serverside-applied customresourcedefinition.apiextensions.k8s.io/perconaservermongodbs.psmdb.percona.com serverside-applied + cat /tmp/tmp.Zsk98w6ff6 + rm /tmp/tmp.r5dhHixV6m /tmp/tmp.Zsk98w6ff6 + return 0 + '[' -n psmdb-operator ']' + apply_rbac cw-rbac + local operator_namespace=psmdb-operator + local rbac=cw-rbac + cat /mnt/jenkins/workspace/cloud-psmdb-operator_PR-1581/deploy/cw-rbac.yaml + sed -e 's^namespace: .*^namespace: psmdb-operator^' + kubectl_bin apply -n psmdb-operator -f - ++ mktemp + local LAST_OUT=/tmp/tmp.sqDc1kCLwt ++ mktemp + local LAST_ERR=/tmp/tmp.eqOkBNlYlI + local exit_status=0 + local timeout=4 ++ seq 0 2 + for i in '$(seq 0 2)' + set +e + kubectl apply -n psmdb-operator -f - + exit_status=0 + set -e + '[' 0 '!=' 0 -a -n 1 ']' + break + cat /tmp/tmp.sqDc1kCLwt clusterrole.rbac.authorization.k8s.io/percona-server-mongodb-operator created serviceaccount/percona-server-mongodb-operator created clusterrolebinding.rbac.authorization.k8s.io/service-account-percona-server-mongodb-operator created + cat /tmp/tmp.eqOkBNlYlI + rm /tmp/tmp.sqDc1kCLwt /tmp/tmp.eqOkBNlYlI + return 0 + yq eval ' (.spec.template.spec.containers[].image = "perconalab/percona-server-mongodb-operator:PR-1581-772033cf") | ((.. | select(.[] == "DISABLE_TELEMETRY")) |= .value="true") | ((.. | select(.[] == "LOG_LEVEL")) |= .value="DEBUG")' /mnt/jenkins/workspace/cloud-psmdb-operator_PR-1581/deploy/cw-operator.yaml + kubectl_bin apply -f - ++ mktemp + local LAST_OUT=/tmp/tmp.0pL4pnm9mU ++ mktemp + local LAST_ERR=/tmp/tmp.3x1w98nPdK + local exit_status=0 + local timeout=4 ++ seq 0 2 + for i in '$(seq 0 2)' + set +e + kubectl apply -f - + exit_status=0 + set -e + '[' 0 '!=' 0 -a -n 1 ']' + break + cat /tmp/tmp.0pL4pnm9mU deployment.apps/percona-server-mongodb-operator created + cat /tmp/tmp.3x1w98nPdK + rm /tmp/tmp.0pL4pnm9mU /tmp/tmp.3x1w98nPdK + return 0 + sleep 2 ++ get_operator_pod ++ kubectl_bin get pods --selector=name=percona-server-mongodb-operator -o 'jsonpath={.items[].metadata.name}' -n psmdb-operator +++ mktemp ++ local LAST_OUT=/tmp/tmp.szx8HduITS +++ mktemp ++ local LAST_ERR=/tmp/tmp.TNkqAjU1aC ++ local exit_status=0 ++ local timeout=4 +++ seq 0 2 ++ for i in '$(seq 0 2)' ++ set +e ++ kubectl get pods --selector=name=percona-server-mongodb-operator -o 'jsonpath={.items[].metadata.name}' -n psmdb-operator ++ exit_status=0 ++ set -e ++ '[' 0 '!=' 0 -a -n 1 ']' ++ break ++ cat /tmp/tmp.szx8HduITS ++ cat /tmp/tmp.TNkqAjU1aC ++ rm /tmp/tmp.szx8HduITS /tmp/tmp.TNkqAjU1aC ++ return 0 + wait_pod percona-server-mongodb-operator-767b698d47-cs4hd + local pod=percona-server-mongodb-operator-767b698d47-cs4hd + set +o xtrace waiting for pod/percona-server-mongodb-operator-767b698d47-cs4hd to be ready.OK + create_namespace tls-issue-cert-manager-1728 + local namespace=tls-issue-cert-manager-1728 + local skip_clean_namespace= + [[ 1 == 1 ]] + [[ -z '' ]] + destroy_chaos_mesh ++ helm list --all-namespaces --filter chaos-mesh ++ tail -n1 ++ awk '-F ' '{print $2}' ++ sed s/NAMESPACE// + local chaos_mesh_ns= + desc 'destroy chaos-mesh' + set +o xtrace ----------------------------------------------------------------------------------- destroy chaos-mesh ----------------------------------------------------------------------------------- + '[' -n '' ']' ++ kubectl get MutatingWebhookConfiguration ++ grep chaos-mesh ++ awk '{print $1}' + timeout 30 kubectl delete MutatingWebhookConfiguration error: resource(s) were provided, but no name was specified + : ++ kubectl get ValidatingWebhookConfiguration ++ grep chaos-mesh ++ awk '{print $1}' + timeout 30 kubectl delete ValidatingWebhookConfiguration error: resource(s) were provided, but no name was specified + : ++ kubectl get ValidatingWebhookConfiguration ++ grep validate-auth ++ awk '{print $1}' + timeout 30 kubectl delete ValidatingWebhookConfiguration error: resource(s) were provided, but no name was specified + : ++ kubectl api-resources ++ grep chaos-mesh ++ awk '{print $1}' ++ kubectl get crd ++ grep chaos-mesh.org ++ awk '{print $1}' + timeout 30 kubectl delete crd error: resource(s) were provided, but no name was specified + : ++ kubectl get clusterrolebinding ++ grep chaos-mesh ++ awk '{print $1}' + timeout 30 kubectl delete clusterrolebinding error: resource(s) were provided, but no name was specified + : ++ kubectl get clusterrole ++ grep chaos-mesh ++ awk '{print $1}' + timeout 30 kubectl delete clusterrole error: resource(s) were provided, but no name was specified + : + desc 'cleaned up all old namespaces' + set +o xtrace ----------------------------------------------------------------------------------- cleaned up all old namespaces ----------------------------------------------------------------------------------- + kubectl_bin get ns + egrep -v '^kube-|^default|Terminating|psmdb-operator|openshift|gke-mcs|^NAME' + '[' -n '' ']' + xargs kubectl delete ns + desc 'cleaned up old namespaces tls-issue-cert-manager-1728' + set +o xtrace ----------------------------------------------------------------------------------- cleaned up old namespaces tls-issue-cert-manager-1728 ----------------------------------------------------------------------------------- + kubectl_bin delete namespace tls-issue-cert-manager-1728 --ignore-not-found + awk '{print$1}' ++ mktemp ++ mktemp + local LAST_OUT=/tmp/tmp.WBdmOicECB ++ mktemp + local LAST_ERR=/tmp/tmp.cem1lGjtCs + local exit_status=0 + local timeout=4 + local LAST_OUT=/tmp/tmp.Tsd8V0PK6F ++ seq 0 2 ++ mktemp + for i in '$(seq 0 2)' + set +e + kubectl delete namespace tls-issue-cert-manager-1728 --ignore-not-found + local LAST_ERR=/tmp/tmp.IjaamOHjBZ + local exit_status=0 + local timeout=4 ++ seq 0 2 + for i in '$(seq 0 2)' + set +e + kubectl get ns + exit_status=0 + set -e + '[' 0 '!=' 0 -a -n 1 ']' + break + cat /tmp/tmp.Tsd8V0PK6F + cat /tmp/tmp.IjaamOHjBZ + rm /tmp/tmp.Tsd8V0PK6F /tmp/tmp.IjaamOHjBZ + return 0 error: resource(s) were provided, but no name was specified + exit_status=0 + set -e + '[' 0 '!=' 0 -a -n 1 ']' + break + cat /tmp/tmp.WBdmOicECB + cat /tmp/tmp.cem1lGjtCs + rm /tmp/tmp.WBdmOicECB /tmp/tmp.cem1lGjtCs + return 0 + kubectl_bin wait --for=delete namespace tls-issue-cert-manager-1728 ++ mktemp + local LAST_OUT=/tmp/tmp.6FCYFjL2Gz ++ mktemp + local LAST_ERR=/tmp/tmp.UIg7Bai0Kl + local exit_status=0 + local timeout=4 ++ seq 0 2 + for i in '$(seq 0 2)' + set +e + kubectl wait --for=delete namespace tls-issue-cert-manager-1728 + exit_status=0 + set -e + '[' 0 '!=' 0 -a -n 1 ']' + break + cat /tmp/tmp.6FCYFjL2Gz + cat /tmp/tmp.UIg7Bai0Kl + rm /tmp/tmp.6FCYFjL2Gz /tmp/tmp.UIg7Bai0Kl + return 0 + desc 'create namespace tls-issue-cert-manager-1728' + set +o xtrace ----------------------------------------------------------------------------------- create namespace tls-issue-cert-manager-1728 ----------------------------------------------------------------------------------- + kubectl_bin create namespace tls-issue-cert-manager-1728 ++ mktemp + local LAST_OUT=/tmp/tmp.1aJPkD0XBH ++ mktemp + local LAST_ERR=/tmp/tmp.k651lbiiWi + local exit_status=0 + local timeout=4 ++ seq 0 2 + for i in '$(seq 0 2)' + set +e + kubectl create namespace tls-issue-cert-manager-1728 + exit_status=0 + set -e + '[' 0 '!=' 0 -a -n 1 ']' + break + cat /tmp/tmp.1aJPkD0XBH namespace/tls-issue-cert-manager-1728 created + cat /tmp/tmp.k651lbiiWi + rm /tmp/tmp.1aJPkD0XBH /tmp/tmp.k651lbiiWi + return 0 ++ kubectl_bin config current-context +++ mktemp ++ local LAST_OUT=/tmp/tmp.g3v5Kq24Yt +++ mktemp ++ local LAST_ERR=/tmp/tmp.UQjZjq3RcM ++ local exit_status=0 ++ local timeout=4 +++ seq 0 2 ++ for i in '$(seq 0 2)' ++ set +e ++ kubectl config current-context ++ exit_status=0 ++ set -e ++ '[' 0 '!=' 0 -a -n 1 ']' ++ break ++ cat /tmp/tmp.g3v5Kq24Yt ++ cat /tmp/tmp.UQjZjq3RcM ++ rm /tmp/tmp.g3v5Kq24Yt /tmp/tmp.UQjZjq3RcM ++ return 0 + kubectl_bin config set-context gke_cloud-dev-112233_us-central1-a_jen-psmdb-1581-772033cf-1-cluster8 --namespace=tls-issue-cert-manager-1728 ++ mktemp + local LAST_OUT=/tmp/tmp.sINQYDGice ++ mktemp + local LAST_ERR=/tmp/tmp.VsQkQJBL45 + local exit_status=0 + local timeout=4 ++ seq 0 2 + for i in '$(seq 0 2)' + set +e + kubectl config set-context gke_cloud-dev-112233_us-central1-a_jen-psmdb-1581-772033cf-1-cluster8 --namespace=tls-issue-cert-manager-1728 + exit_status=0 + set -e + '[' 0 '!=' 0 -a -n 1 ']' + break + cat /tmp/tmp.sINQYDGice Context "gke_cloud-dev-112233_us-central1-a_jen-psmdb-1581-772033cf-1-cluster8" modified. + cat /tmp/tmp.VsQkQJBL45 + rm /tmp/tmp.sINQYDGice /tmp/tmp.VsQkQJBL45 + return 0 + deploy_cert_manager + desc 'deploy cert manager' + set +o xtrace ----------------------------------------------------------------------------------- deploy cert manager ----------------------------------------------------------------------------------- + kubectl_bin create namespace cert-manager ++ mktemp + local LAST_OUT=/tmp/tmp.x5C9B7ZSAa ++ mktemp + local LAST_ERR=/tmp/tmp.aQDSwHiThv + local exit_status=0 + local timeout=4 ++ seq 0 2 + for i in '$(seq 0 2)' + set +e + kubectl create namespace cert-manager + exit_status=0 + set -e + '[' 0 '!=' 0 -a -n 1 ']' + break + cat /tmp/tmp.x5C9B7ZSAa namespace/cert-manager created + cat /tmp/tmp.aQDSwHiThv + rm /tmp/tmp.x5C9B7ZSAa /tmp/tmp.aQDSwHiThv + return 0 + kubectl_bin label namespace cert-manager certmanager.k8s.io/disable-validation=true ++ mktemp + local LAST_OUT=/tmp/tmp.KdmmPZhioD ++ mktemp + local LAST_ERR=/tmp/tmp.3eV9CUVYZI + local exit_status=0 + local timeout=4 ++ seq 0 2 + for i in '$(seq 0 2)' + set +e + kubectl label namespace cert-manager certmanager.k8s.io/disable-validation=true + exit_status=0 + set -e + '[' 0 '!=' 0 -a -n 1 ']' + break + cat /tmp/tmp.KdmmPZhioD namespace/cert-manager labeled + cat /tmp/tmp.3eV9CUVYZI + rm /tmp/tmp.KdmmPZhioD /tmp/tmp.3eV9CUVYZI + return 0 + kubectl_bin apply -f https://github.com/cert-manager/cert-manager/releases/download/v1.14.5/cert-manager.yaml --validate=false ++ mktemp + local LAST_OUT=/tmp/tmp.mUV9u5v7Ml ++ mktemp + local LAST_ERR=/tmp/tmp.spRgpL4jfa + local exit_status=0 + local timeout=4 ++ seq 0 2 + for i in '$(seq 0 2)' + set +e + kubectl apply -f https://github.com/cert-manager/cert-manager/releases/download/v1.14.5/cert-manager.yaml --validate=false + exit_status=0 + set -e + '[' 0 '!=' 0 -a -n 1 ']' + break + cat /tmp/tmp.mUV9u5v7Ml namespace/cert-manager configured customresourcedefinition.apiextensions.k8s.io/certificaterequests.cert-manager.io unchanged customresourcedefinition.apiextensions.k8s.io/certificates.cert-manager.io unchanged customresourcedefinition.apiextensions.k8s.io/challenges.acme.cert-manager.io unchanged customresourcedefinition.apiextensions.k8s.io/clusterissuers.cert-manager.io unchanged customresourcedefinition.apiextensions.k8s.io/issuers.cert-manager.io unchanged customresourcedefinition.apiextensions.k8s.io/orders.acme.cert-manager.io unchanged serviceaccount/cert-manager-cainjector created serviceaccount/cert-manager created serviceaccount/cert-manager-webhook created clusterrole.rbac.authorization.k8s.io/cert-manager-cainjector unchanged clusterrole.rbac.authorization.k8s.io/cert-manager-controller-issuers unchanged clusterrole.rbac.authorization.k8s.io/cert-manager-controller-clusterissuers unchanged clusterrole.rbac.authorization.k8s.io/cert-manager-controller-certificates unchanged clusterrole.rbac.authorization.k8s.io/cert-manager-controller-orders unchanged clusterrole.rbac.authorization.k8s.io/cert-manager-controller-challenges unchanged clusterrole.rbac.authorization.k8s.io/cert-manager-controller-ingress-shim unchanged clusterrole.rbac.authorization.k8s.io/cert-manager-cluster-view unchanged clusterrole.rbac.authorization.k8s.io/cert-manager-view unchanged clusterrole.rbac.authorization.k8s.io/cert-manager-edit unchanged clusterrole.rbac.authorization.k8s.io/cert-manager-controller-approve:cert-manager-io unchanged clusterrole.rbac.authorization.k8s.io/cert-manager-controller-certificatesigningrequests unchanged clusterrole.rbac.authorization.k8s.io/cert-manager-webhook:subjectaccessreviews unchanged clusterrolebinding.rbac.authorization.k8s.io/cert-manager-cainjector unchanged clusterrolebinding.rbac.authorization.k8s.io/cert-manager-controller-issuers unchanged clusterrolebinding.rbac.authorization.k8s.io/cert-manager-controller-clusterissuers unchanged clusterrolebinding.rbac.authorization.k8s.io/cert-manager-controller-certificates unchanged clusterrolebinding.rbac.authorization.k8s.io/cert-manager-controller-orders unchanged clusterrolebinding.rbac.authorization.k8s.io/cert-manager-controller-challenges unchanged clusterrolebinding.rbac.authorization.k8s.io/cert-manager-controller-ingress-shim unchanged clusterrolebinding.rbac.authorization.k8s.io/cert-manager-controller-approve:cert-manager-io unchanged clusterrolebinding.rbac.authorization.k8s.io/cert-manager-controller-certificatesigningrequests unchanged clusterrolebinding.rbac.authorization.k8s.io/cert-manager-webhook:subjectaccessreviews configured role.rbac.authorization.k8s.io/cert-manager-cainjector:leaderelection unchanged role.rbac.authorization.k8s.io/cert-manager:leaderelection unchanged role.rbac.authorization.k8s.io/cert-manager-webhook:dynamic-serving created rolebinding.rbac.authorization.k8s.io/cert-manager-cainjector:leaderelection unchanged rolebinding.rbac.authorization.k8s.io/cert-manager:leaderelection configured rolebinding.rbac.authorization.k8s.io/cert-manager-webhook:dynamic-serving created service/cert-manager created service/cert-manager-webhook created deployment.apps/cert-manager-cainjector created deployment.apps/cert-manager created deployment.apps/cert-manager-webhook created mutatingwebhookconfiguration.admissionregistration.k8s.io/cert-manager-webhook configured validatingwebhookconfiguration.admissionregistration.k8s.io/cert-manager-webhook configured + cat /tmp/tmp.spRgpL4jfa Warning: resource namespaces/cert-manager is missing the kubectl.kubernetes.io/last-applied-configuration annotation which is required by kubectl apply. kubectl apply should only be used on resources created declaratively by either kubectl create --save-config or kubectl apply. The missing annotation will be patched automatically. + rm /tmp/tmp.mUV9u5v7Ml /tmp/tmp.spRgpL4jfa + return 0 + kubectl_bin -n cert-manager wait pod -l app.kubernetes.io/instance=cert-manager --for=condition=ready ++ mktemp + local LAST_OUT=/tmp/tmp.x7zIXPv3Sv ++ mktemp + local LAST_ERR=/tmp/tmp.oTAfYdGs3o + local exit_status=0 + local timeout=4 ++ seq 0 2 + for i in '$(seq 0 2)' + set +e + kubectl -n cert-manager wait pod -l app.kubernetes.io/instance=cert-manager --for=condition=ready + exit_status=0 + set -e + '[' 0 '!=' 0 -a -n 1 ']' + break + cat /tmp/tmp.x7zIXPv3Sv pod/cert-manager-5658d944df-j7zfr condition met pod/cert-manager-cainjector-cb99ff845-qrkfq condition met pod/cert-manager-webhook-7fd74b8dc7-dh8kk condition met + cat /tmp/tmp.oTAfYdGs3o + rm /tmp/tmp.x7zIXPv3Sv /tmp/tmp.oTAfYdGs3o + return 0 + sleep 120 + desc 'create secrets and start client' + set +o xtrace ----------------------------------------------------------------------------------- create secrets and start client ----------------------------------------------------------------------------------- + kubectl_bin apply -f /mnt/jenkins/workspace/cloud-psmdb-operator_PR-1581/e2e-tests/conf/secrets.yml ++ mktemp + local LAST_OUT=/tmp/tmp.rVgMDNjRzo ++ mktemp + local LAST_ERR=/tmp/tmp.iQpUv9PNzv + local exit_status=0 + local timeout=4 ++ seq 0 2 + for i in '$(seq 0 2)' + set +e + kubectl apply -f /mnt/jenkins/workspace/cloud-psmdb-operator_PR-1581/e2e-tests/conf/secrets.yml + exit_status=0 + set -e + '[' 0 '!=' 0 -a -n 1 ']' + break + cat /tmp/tmp.rVgMDNjRzo secret/some-users created + cat /tmp/tmp.iQpUv9PNzv + rm /tmp/tmp.rVgMDNjRzo /tmp/tmp.iQpUv9PNzv + return 0 + kubectl_bin apply -f /mnt/jenkins/workspace/cloud-psmdb-operator_PR-1581/e2e-tests/conf/client_with_tls.yml ++ mktemp + local LAST_OUT=/tmp/tmp.jdX5ebjCXa ++ mktemp + local LAST_ERR=/tmp/tmp.ZixBugi1PW + local exit_status=0 + local timeout=4 ++ seq 0 2 + for i in '$(seq 0 2)' + set +e + kubectl apply -f /mnt/jenkins/workspace/cloud-psmdb-operator_PR-1581/e2e-tests/conf/client_with_tls.yml + exit_status=0 + set -e + '[' 0 '!=' 0 -a -n 1 ']' + break + cat /tmp/tmp.jdX5ebjCXa deployment.apps/psmdb-client created + cat /tmp/tmp.ZixBugi1PW + rm /tmp/tmp.jdX5ebjCXa /tmp/tmp.ZixBugi1PW + return 0 + desc 'create custom cert-manager issuers and certificates' + set +o xtrace ----------------------------------------------------------------------------------- create custom cert-manager issuers and certificates ----------------------------------------------------------------------------------- + kubectl_bin apply -f /mnt/jenkins/workspace/cloud-psmdb-operator_PR-1581/e2e-tests/tls-issue-cert-manager/conf/some-name-psmdb-ca-issuer.yml ++ mktemp + local LAST_OUT=/tmp/tmp.IN8iQ4Zun7 ++ mktemp + local LAST_ERR=/tmp/tmp.OmT6HeDBTn + local exit_status=0 + local timeout=4 ++ seq 0 2 + for i in '$(seq 0 2)' + set +e + kubectl apply -f /mnt/jenkins/workspace/cloud-psmdb-operator_PR-1581/e2e-tests/tls-issue-cert-manager/conf/some-name-psmdb-ca-issuer.yml + exit_status=0 + set -e + '[' 0 '!=' 0 -a -n 1 ']' + break + cat /tmp/tmp.IN8iQ4Zun7 issuer.cert-manager.io/some-name-psmdb-ca-issuer created + cat /tmp/tmp.OmT6HeDBTn + rm /tmp/tmp.IN8iQ4Zun7 /tmp/tmp.OmT6HeDBTn + return 0 + kubectl_bin apply -f /mnt/jenkins/workspace/cloud-psmdb-operator_PR-1581/e2e-tests/tls-issue-cert-manager/conf/some-name-psmdb-issuer.yml ++ mktemp + local LAST_OUT=/tmp/tmp.DLZxsxhwfn ++ mktemp + local LAST_ERR=/tmp/tmp.nEtg8OTUKv + local exit_status=0 + local timeout=4 ++ seq 0 2 + for i in '$(seq 0 2)' + set +e + kubectl apply -f /mnt/jenkins/workspace/cloud-psmdb-operator_PR-1581/e2e-tests/tls-issue-cert-manager/conf/some-name-psmdb-issuer.yml + exit_status=0 + set -e + '[' 0 '!=' 0 -a -n 1 ']' + break + cat /tmp/tmp.DLZxsxhwfn issuer.cert-manager.io/some-name-psmdb-issuer created + cat /tmp/tmp.nEtg8OTUKv + rm /tmp/tmp.DLZxsxhwfn /tmp/tmp.nEtg8OTUKv + return 0 + kubectl_bin apply -f /mnt/jenkins/workspace/cloud-psmdb-operator_PR-1581/e2e-tests/tls-issue-cert-manager/conf/some-name-ca-cert.yml ++ mktemp + local LAST_OUT=/tmp/tmp.BiRBPjLkvF ++ mktemp + local LAST_ERR=/tmp/tmp.ljV1Ln3C97 + local exit_status=0 + local timeout=4 ++ seq 0 2 + for i in '$(seq 0 2)' + set +e + kubectl apply -f /mnt/jenkins/workspace/cloud-psmdb-operator_PR-1581/e2e-tests/tls-issue-cert-manager/conf/some-name-ca-cert.yml + exit_status=0 + set -e + '[' 0 '!=' 0 -a -n 1 ']' + break + cat /tmp/tmp.BiRBPjLkvF certificate.cert-manager.io/some-name-ca-cert created + cat /tmp/tmp.ljV1Ln3C97 + rm /tmp/tmp.BiRBPjLkvF /tmp/tmp.ljV1Ln3C97 + return 0 + kubectl_bin apply -f /mnt/jenkins/workspace/cloud-psmdb-operator_PR-1581/e2e-tests/tls-issue-cert-manager/conf/some-name-ssl-internal.yml ++ mktemp + local LAST_OUT=/tmp/tmp.UAQvQL2VQF ++ mktemp + local LAST_ERR=/tmp/tmp.gRalztdKTt + local exit_status=0 + local timeout=4 ++ seq 0 2 + for i in '$(seq 0 2)' + set +e + kubectl apply -f /mnt/jenkins/workspace/cloud-psmdb-operator_PR-1581/e2e-tests/tls-issue-cert-manager/conf/some-name-ssl-internal.yml + exit_status=0 + set -e + '[' 0 '!=' 0 -a -n 1 ']' + break + cat /tmp/tmp.UAQvQL2VQF certificate.cert-manager.io/some-name-ssl-internal created + cat /tmp/tmp.gRalztdKTt + rm /tmp/tmp.UAQvQL2VQF /tmp/tmp.gRalztdKTt + return 0 + kubectl_bin apply -f /mnt/jenkins/workspace/cloud-psmdb-operator_PR-1581/e2e-tests/tls-issue-cert-manager/conf/some-name-ssl.yml ++ mktemp + local LAST_OUT=/tmp/tmp.MwpY6e8JbC ++ mktemp + local LAST_ERR=/tmp/tmp.IneGnhYG4u + local exit_status=0 + local timeout=4 ++ seq 0 2 + for i in '$(seq 0 2)' + set +e + kubectl apply -f /mnt/jenkins/workspace/cloud-psmdb-operator_PR-1581/e2e-tests/tls-issue-cert-manager/conf/some-name-ssl.yml + exit_status=0 + set -e + '[' 0 '!=' 0 -a -n 1 ']' + break + cat /tmp/tmp.MwpY6e8JbC certificate.cert-manager.io/some-name-ssl created + cat /tmp/tmp.IneGnhYG4u + rm /tmp/tmp.MwpY6e8JbC /tmp/tmp.IneGnhYG4u + return 0 + deploy_cmctl + local service_account=cmctl + /usr/bin/sed -e s/percona-server-mongodb-operator/cmctl/g /mnt/jenkins/workspace/cloud-psmdb-operator_PR-1581/deploy/rbac.yaml + yq '(select(.rules).rules[] | select(contains({"apiGroups": ["cert-manager.io"]}))).resources += "certificates/status"' + kubectl_bin apply -f - ++ mktemp + local LAST_OUT=/tmp/tmp.UXMzlO50M2 ++ mktemp + local LAST_ERR=/tmp/tmp.KCaxgnhy1N + local exit_status=0 + local timeout=4 ++ seq 0 2 + for i in '$(seq 0 2)' + set +e + kubectl apply -f - + exit_status=0 + set -e + '[' 0 '!=' 0 -a -n 1 ']' + break + cat /tmp/tmp.UXMzlO50M2 role.rbac.authorization.k8s.io/cmctl created serviceaccount/cmctl created rolebinding.rbac.authorization.k8s.io/service-account-cmctl created + cat /tmp/tmp.KCaxgnhy1N + rm /tmp/tmp.UXMzlO50M2 /tmp/tmp.KCaxgnhy1N + return 0 + kubectl_bin apply -f /mnt/jenkins/workspace/cloud-psmdb-operator_PR-1581/e2e-tests/conf/cmctl.yml ++ mktemp + local LAST_OUT=/tmp/tmp.TChoQE11Gx ++ mktemp + local LAST_ERR=/tmp/tmp.tozX8WrmkK + local exit_status=0 + local timeout=4 ++ seq 0 2 + for i in '$(seq 0 2)' + set +e + kubectl apply -f /mnt/jenkins/workspace/cloud-psmdb-operator_PR-1581/e2e-tests/conf/cmctl.yml + exit_status=0 + set -e + '[' 0 '!=' 0 -a -n 1 ']' + break + cat /tmp/tmp.TChoQE11Gx deployment.apps/cmctl created + cat /tmp/tmp.tozX8WrmkK + rm /tmp/tmp.TChoQE11Gx /tmp/tmp.tozX8WrmkK + return 0 + sleep 60 + cluster=some-name + desc 'create first PSMDB cluster some-name' + set +o xtrace ----------------------------------------------------------------------------------- create first PSMDB cluster some-name ----------------------------------------------------------------------------------- + apply_cluster /mnt/jenkins/workspace/cloud-psmdb-operator_PR-1581/e2e-tests/tls-issue-cert-manager/conf/some-name.yml + '[' -z '' ']' + cat_config /mnt/jenkins/workspace/cloud-psmdb-operator_PR-1581/e2e-tests/tls-issue-cert-manager/conf/some-name.yml + kubectl_bin apply -f - + cat /mnt/jenkins/workspace/cloud-psmdb-operator_PR-1581/e2e-tests/tls-issue-cert-manager/conf/some-name.yml + yq eval '(.spec | select(.image == null)).image = "perconalab/percona-server-mongodb-operator:main-mongod7.0"' ++ mktemp + yq eval '(.spec | select(has("pmm"))).pmm.image = "perconalab/pmm-client:dev-latest"' + yq eval '(.spec | select(has("initImage"))).initImage = "perconalab/percona-server-mongodb-operator:PR-1581-772033cf"' + yq eval '(.spec | select(has("backup"))).backup.image = "perconalab/percona-server-mongodb-operator:main-backup"' + yq eval '.spec.upgradeOptions.apply="Never"' + local LAST_OUT=/tmp/tmp.1CrsEyXlVv ++ mktemp + local LAST_ERR=/tmp/tmp.ayNwoX6gL9 + local exit_status=0 + local timeout=4 ++ seq 0 2 + for i in '$(seq 0 2)' + set +e + kubectl apply -f - + exit_status=0 + set -e + '[' 0 '!=' 0 -a -n 1 ']' + break + cat /tmp/tmp.1CrsEyXlVv perconaservermongodb.psmdb.percona.com/some-name created + cat /tmp/tmp.ayNwoX6gL9 + rm /tmp/tmp.1CrsEyXlVv /tmp/tmp.ayNwoX6gL9 + return 0 + desc 'check if all Pods started' + set +o xtrace ----------------------------------------------------------------------------------- check if all Pods started ----------------------------------------------------------------------------------- + wait_for_running some-name-rs0 3 + local name=some-name-rs0 + let last_pod=2 + local check_cluster_readyness=true + set_debug + [[ 1 == 1 ]] + set -o xtrace + local rs_name=rs0 + local cluster_name=some-name ++ seq 0 2 + for i in '$(seq 0 $last_pod)' + [[ 0 -eq 2 ]] + wait_pod some-name-rs0-0 + local pod=some-name-rs0-0 + set +o xtrace waiting for pod/some-name-rs0-0 to be ready...........OK + for i in '$(seq 0 $last_pod)' + [[ 1 -eq 2 ]] + wait_pod some-name-rs0-1 + local pod=some-name-rs0-1 + set +o xtrace waiting for pod/some-name-rs0-1 to be ready.................OK + for i in '$(seq 0 $last_pod)' + [[ 2 -eq 2 ]] ++ kubectl_bin get psmdb some-name -o 'jsonpath={.spec.replsets[?(@.name=="rs0")].arbiter.enabled}' +++ mktemp ++ local LAST_OUT=/tmp/tmp.vFBsBpQeau +++ mktemp ++ local LAST_ERR=/tmp/tmp.JYYt8OY8pM ++ local exit_status=0 ++ local timeout=4 +++ seq 0 2 ++ for i in '$(seq 0 2)' ++ set +e ++ kubectl get psmdb some-name -o 'jsonpath={.spec.replsets[?(@.name=="rs0")].arbiter.enabled}' ++ exit_status=0 ++ set -e ++ '[' 0 '!=' 0 -a -n 1 ']' ++ break ++ cat /tmp/tmp.vFBsBpQeau ++ cat /tmp/tmp.JYYt8OY8pM ++ rm /tmp/tmp.vFBsBpQeau /tmp/tmp.JYYt8OY8pM ++ return 0 + [[ '' == \t\r\u\e ]] + wait_pod some-name-rs0-2 + local pod=some-name-rs0-2 + set +o xtrace waiting for pod/some-name-rs0-2 to be ready.............OK ++ kubectl_bin get psmdb some-name -o 'jsonpath={.spec.replsets[?(@.name=="rs0")].non_voting.enabled}' +++ mktemp ++ local LAST_OUT=/tmp/tmp.fuUUAj09hT +++ mktemp ++ local LAST_ERR=/tmp/tmp.4n1yrIG0ox ++ local exit_status=0 ++ local timeout=4 +++ seq 0 2 ++ for i in '$(seq 0 2)' ++ set +e ++ kubectl get psmdb some-name -o 'jsonpath={.spec.replsets[?(@.name=="rs0")].non_voting.enabled}' ++ exit_status=0 ++ set -e ++ '[' 0 '!=' 0 -a -n 1 ']' ++ break ++ cat /tmp/tmp.fuUUAj09hT ++ cat /tmp/tmp.4n1yrIG0ox ++ rm /tmp/tmp.fuUUAj09hT /tmp/tmp.4n1yrIG0ox ++ return 0 + [[ '' == \t\r\u\e ]] + sleep 10 + [[ true == \t\r\u\e ]] + set +x Waiting for cluster readyness...... + wait_for_running some-name-cfg 3 false + local name=some-name-cfg + let last_pod=2 + local check_cluster_readyness=false + set_debug + [[ 1 == 1 ]] + set -o xtrace + local rs_name=cfg + local cluster_name=some-name ++ seq 0 2 + for i in '$(seq 0 $last_pod)' + [[ 0 -eq 2 ]] + wait_pod some-name-cfg-0 + local pod=some-name-cfg-0 + set +o xtrace waiting for pod/some-name-cfg-0 to be ready.OK + for i in '$(seq 0 $last_pod)' + [[ 1 -eq 2 ]] + wait_pod some-name-cfg-1 + local pod=some-name-cfg-1 + set +o xtrace waiting for pod/some-name-cfg-1 to be ready.OK + for i in '$(seq 0 $last_pod)' + [[ 2 -eq 2 ]] ++ kubectl_bin get psmdb some-name -o 'jsonpath={.spec.replsets[?(@.name=="cfg")].arbiter.enabled}' +++ mktemp ++ local LAST_OUT=/tmp/tmp.QnwfdEmz0s +++ mktemp ++ local LAST_ERR=/tmp/tmp.XTuiZsnj7p ++ local exit_status=0 ++ local timeout=4 +++ seq 0 2 ++ for i in '$(seq 0 2)' ++ set +e ++ kubectl get psmdb some-name -o 'jsonpath={.spec.replsets[?(@.name=="cfg")].arbiter.enabled}' ++ exit_status=0 ++ set -e ++ '[' 0 '!=' 0 -a -n 1 ']' ++ break ++ cat /tmp/tmp.QnwfdEmz0s ++ cat /tmp/tmp.XTuiZsnj7p ++ rm /tmp/tmp.QnwfdEmz0s /tmp/tmp.XTuiZsnj7p ++ return 0 + [[ '' == \t\r\u\e ]] + wait_pod some-name-cfg-2 + local pod=some-name-cfg-2 + set +o xtrace waiting for pod/some-name-cfg-2 to be ready.OK ++ kubectl_bin get psmdb some-name -o 'jsonpath={.spec.replsets[?(@.name=="cfg")].non_voting.enabled}' +++ mktemp ++ local LAST_OUT=/tmp/tmp.WhjglDDHVZ +++ mktemp ++ local LAST_ERR=/tmp/tmp.Lqus6B1Ep5 ++ local exit_status=0 ++ local timeout=4 +++ seq 0 2 ++ for i in '$(seq 0 2)' ++ set +e ++ kubectl get psmdb some-name -o 'jsonpath={.spec.replsets[?(@.name=="cfg")].non_voting.enabled}' ++ exit_status=0 ++ set -e ++ '[' 0 '!=' 0 -a -n 1 ']' ++ break ++ cat /tmp/tmp.WhjglDDHVZ ++ cat /tmp/tmp.Lqus6B1Ep5 ++ rm /tmp/tmp.WhjglDDHVZ /tmp/tmp.Lqus6B1Ep5 ++ return 0 + [[ '' == \t\r\u\e ]] + sleep 10 + [[ false == \t\r\u\e ]] + wait_for_running some-name-mongos 3 + local name=some-name-mongos + let last_pod=2 + local check_cluster_readyness=true + set_debug + [[ 1 == 1 ]] + set -o xtrace + local rs_name=mongos + local cluster_name=some-name ++ seq 0 2 + for i in '$(seq 0 $last_pod)' + [[ 0 -eq 2 ]] + wait_pod some-name-mongos-0 + local pod=some-name-mongos-0 + set +o xtrace waiting for pod/some-name-mongos-0 to be ready.OK + for i in '$(seq 0 $last_pod)' + [[ 1 -eq 2 ]] + wait_pod some-name-mongos-1 + local pod=some-name-mongos-1 + set +o xtrace waiting for pod/some-name-mongos-1 to be ready.OK + for i in '$(seq 0 $last_pod)' + [[ 2 -eq 2 ]] ++ kubectl_bin get psmdb some-name -o 'jsonpath={.spec.replsets[?(@.name=="mongos")].arbiter.enabled}' +++ mktemp ++ local LAST_OUT=/tmp/tmp.8gGpzBSSJU +++ mktemp ++ local LAST_ERR=/tmp/tmp.gbb80ls6cH ++ local exit_status=0 ++ local timeout=4 +++ seq 0 2 ++ for i in '$(seq 0 2)' ++ set +e ++ kubectl get psmdb some-name -o 'jsonpath={.spec.replsets[?(@.name=="mongos")].arbiter.enabled}' ++ exit_status=0 ++ set -e ++ '[' 0 '!=' 0 -a -n 1 ']' ++ break ++ cat /tmp/tmp.8gGpzBSSJU ++ cat /tmp/tmp.gbb80ls6cH ++ rm /tmp/tmp.8gGpzBSSJU /tmp/tmp.gbb80ls6cH ++ return 0 + [[ '' == \t\r\u\e ]] + wait_pod some-name-mongos-2 + local pod=some-name-mongos-2 + set +o xtrace waiting for pod/some-name-mongos-2 to be ready.OK ++ kubectl_bin get psmdb some-name -o 'jsonpath={.spec.replsets[?(@.name=="mongos")].non_voting.enabled}' +++ mktemp ++ local LAST_OUT=/tmp/tmp.LPMREoChrM +++ mktemp ++ local LAST_ERR=/tmp/tmp.h8f5a7cOgr ++ local exit_status=0 ++ local timeout=4 +++ seq 0 2 ++ for i in '$(seq 0 2)' ++ set +e ++ kubectl get psmdb some-name -o 'jsonpath={.spec.replsets[?(@.name=="mongos")].non_voting.enabled}' ++ exit_status=0 ++ set -e ++ '[' 0 '!=' 0 -a -n 1 ']' ++ break ++ cat /tmp/tmp.LPMREoChrM ++ cat /tmp/tmp.h8f5a7cOgr ++ rm /tmp/tmp.LPMREoChrM /tmp/tmp.h8f5a7cOgr ++ return 0 + [[ '' == \t\r\u\e ]] + sleep 10 + [[ true == \t\r\u\e ]] + set +x Waiting for cluster readyness + desc 'compare custom certificates and issuers' + set +o xtrace ----------------------------------------------------------------------------------- compare custom certificates and issuers ----------------------------------------------------------------------------------- + compare_kubectl certificate/some-name-ssl -custom + local resource=certificate/some-name-ssl + local postfix=-custom + local expected_result=/mnt/jenkins/workspace/cloud-psmdb-operator_PR-1581/e2e-tests/tls-issue-cert-manager/compare/certificate_some-name-ssl-custom.yml + local new_result=/tmp/tmp.pCG7nqFZEo/certificate_some-name-ssl.yml + '[' -n '' -a -f /mnt/jenkins/workspace/cloud-psmdb-operator_PR-1581/e2e-tests/tls-issue-cert-manager/compare/certificate_some-name-ssl-custom-oc.yml ']' + kubectl_bin get -o yaml certificate/some-name-ssl + yq eval ' del(.metadata.ownerReferences[].apiVersion) | del(.metadata.managedFields) | del(.. | select(has("creationTimestamp")).creationTimestamp) | del(.. | select(has("namespace")).namespace) | del(.. | select(has("uid")).uid) | del(.metadata.resourceVersion) | del(.spec.template.spec.containers[].env[] | select(.name == "NAMESPACE")) | del(.metadata.selfLink) | del(.metadata.annotations."cloud.google.com/neg") | del(.metadata.annotations."kubectl.kubernetes.io/last-applied-configuration") | del(.. | select(has("image")).image) | del(.. | select(has("clusterIP")).clusterIP) | del(.. | select(has("clusterIPs")).clusterIPs) | del(.. | select(has("dataSource")).dataSource) | del(.. | select(has("procMount")).procMount) | del(.. | select(has("storageClassName")).storageClassName) | del(.. | select(has("finalizers")).finalizers) | del(.. | select(has("kubernetes.io/pvc-protection"))."kubernetes.io/pvc-protection") | del(.. | select(has("volumeName")).volumeName) | del(.. | select(has("volume.beta.kubernetes.io/storage-provisioner"))."volume.beta.kubernetes.io/storage-provisioner") | del(.. | select(has("volume.kubernetes.io/storage-provisioner"))."volume.kubernetes.io/storage-provisioner") | del(.spec.volumeMode) | del(.. | select(has("volume.kubernetes.io/selected-node"))."volume.kubernetes.io/selected-node") | del(.. | select(has("percona.com/last-config-hash"))."percona.com/last-config-hash") | del(.. | select(has("percona.com/configuration-hash"))."percona.com/configuration-hash") | del(.. | select(has("percona.com/ssl-hash"))."percona.com/ssl-hash") | del(.. | select(has("percona.com/ssl-internal-hash"))."percona.com/ssl-internal-hash") | del(.spec.volumeClaimTemplates[].spec.volumeMode | select(. == "Filesystem")) | del(.. | select(has("healthCheckNodePort")).healthCheckNodePort) | del(.. | select(has("nodePort")).nodePort) | del(.status) | (.. | select(tag == "!!str")) |= sub("tls-issue-cert-manager-1728", "NAME_SPACE") | del(.spec.volumeClaimTemplates[].apiVersion) | del(.spec.volumeClaimTemplates[].kind) | del(.spec.ipFamilies) | del(.spec.ipFamilyPolicy) | (.. | select(. == "extensions/v1beta1")) = "apps/v1" | (.. | select(. == "batch/v1beta1")) = "batch/v1" ' - ++ mktemp + local LAST_OUT=/tmp/tmp.diOI0IMHeY ++ mktemp + local LAST_ERR=/tmp/tmp.aWbxRdMvZu + local exit_status=0 + local timeout=4 ++ seq 0 2 + for i in '$(seq 0 2)' + set +e + kubectl get -o yaml certificate/some-name-ssl + exit_status=0 + set -e + '[' 0 '!=' 0 -a -n 1 ']' + break + cat /tmp/tmp.diOI0IMHeY + cat /tmp/tmp.aWbxRdMvZu + rm /tmp/tmp.diOI0IMHeY /tmp/tmp.aWbxRdMvZu + return 0 + yq -i eval 'del(.spec.persistentVolumeClaimRetentionPolicy)' /tmp/tmp.pCG7nqFZEo/certificate_some-name-ssl.yml + version_gt 1.22 ++ echo '1.26 >= 1.22' ++ bc -l + '[' 1 -eq 1 ']' + return 0 + yq -i eval 'del(.spec.internalTrafficPolicy)' /tmp/tmp.pCG7nqFZEo/certificate_some-name-ssl.yml + yq -i eval 'del(.spec.allocateLoadBalancerNodePorts)' /tmp/tmp.pCG7nqFZEo/certificate_some-name-ssl.yml + [[ /mnt/jenkins/workspace/cloud-psmdb-operator_PR-1581/e2e-tests/tls-issue-cert-manager/compare/certificate_some-name-ssl-custom.yml == */cronjob* ]] + diff -u /mnt/jenkins/workspace/cloud-psmdb-operator_PR-1581/e2e-tests/tls-issue-cert-manager/compare/certificate_some-name-ssl-custom.yml /tmp/tmp.pCG7nqFZEo/certificate_some-name-ssl.yml + compare_kubectl certificate/some-name-ssl-internal -custom + local resource=certificate/some-name-ssl-internal + local postfix=-custom + local expected_result=/mnt/jenkins/workspace/cloud-psmdb-operator_PR-1581/e2e-tests/tls-issue-cert-manager/compare/certificate_some-name-ssl-internal-custom.yml + local new_result=/tmp/tmp.pCG7nqFZEo/certificate_some-name-ssl-internal.yml + '[' -n '' -a -f /mnt/jenkins/workspace/cloud-psmdb-operator_PR-1581/e2e-tests/tls-issue-cert-manager/compare/certificate_some-name-ssl-internal-custom-oc.yml ']' + kubectl_bin get -o yaml certificate/some-name-ssl-internal + yq eval ' del(.metadata.ownerReferences[].apiVersion) | del(.metadata.managedFields) | del(.. | select(has("creationTimestamp")).creationTimestamp) | del(.. | select(has("namespace")).namespace) | del(.. | select(has("uid")).uid) | del(.metadata.resourceVersion) | del(.spec.template.spec.containers[].env[] | select(.name == "NAMESPACE")) | del(.metadata.selfLink) | del(.metadata.annotations."cloud.google.com/neg") | del(.metadata.annotations."kubectl.kubernetes.io/last-applied-configuration") | del(.. | select(has("image")).image) | del(.. | select(has("clusterIP")).clusterIP) | del(.. | select(has("clusterIPs")).clusterIPs) | del(.. | select(has("dataSource")).dataSource) | del(.. | select(has("procMount")).procMount) | del(.. | select(has("storageClassName")).storageClassName) | del(.. | select(has("finalizers")).finalizers) | del(.. | select(has("kubernetes.io/pvc-protection"))."kubernetes.io/pvc-protection") | del(.. | select(has("volumeName")).volumeName) | del(.. | select(has("volume.beta.kubernetes.io/storage-provisioner"))."volume.beta.kubernetes.io/storage-provisioner") | del(.. | select(has("volume.kubernetes.io/storage-provisioner"))."volume.kubernetes.io/storage-provisioner") | del(.spec.volumeMode) | del(.. | select(has("volume.kubernetes.io/selected-node"))."volume.kubernetes.io/selected-node") | del(.. | select(has("percona.com/last-config-hash"))."percona.com/last-config-hash") | del(.. | select(has("percona.com/configuration-hash"))."percona.com/configuration-hash") | del(.. | select(has("percona.com/ssl-hash"))."percona.com/ssl-hash") | del(.. | select(has("percona.com/ssl-internal-hash"))."percona.com/ssl-internal-hash") | del(.spec.volumeClaimTemplates[].spec.volumeMode | select(. == "Filesystem")) | del(.. | select(has("healthCheckNodePort")).healthCheckNodePort) | del(.. | select(has("nodePort")).nodePort) | del(.status) | (.. | select(tag == "!!str")) |= sub("tls-issue-cert-manager-1728", "NAME_SPACE") | del(.spec.volumeClaimTemplates[].apiVersion) | del(.spec.volumeClaimTemplates[].kind) | del(.spec.ipFamilies) | del(.spec.ipFamilyPolicy) | (.. | select(. == "extensions/v1beta1")) = "apps/v1" | (.. | select(. == "batch/v1beta1")) = "batch/v1" ' - ++ mktemp + local LAST_OUT=/tmp/tmp.rLAW4sBXOT ++ mktemp + local LAST_ERR=/tmp/tmp.gQOPNpl8Ka + local exit_status=0 + local timeout=4 ++ seq 0 2 + for i in '$(seq 0 2)' + set +e + kubectl get -o yaml certificate/some-name-ssl-internal + exit_status=0 + set -e + '[' 0 '!=' 0 -a -n 1 ']' + break + cat /tmp/tmp.rLAW4sBXOT + cat /tmp/tmp.gQOPNpl8Ka + rm /tmp/tmp.rLAW4sBXOT /tmp/tmp.gQOPNpl8Ka + return 0 + yq -i eval 'del(.spec.persistentVolumeClaimRetentionPolicy)' /tmp/tmp.pCG7nqFZEo/certificate_some-name-ssl-internal.yml + version_gt 1.22 ++ echo '1.26 >= 1.22' ++ bc -l + '[' 1 -eq 1 ']' + return 0 + yq -i eval 'del(.spec.internalTrafficPolicy)' /tmp/tmp.pCG7nqFZEo/certificate_some-name-ssl-internal.yml + yq -i eval 'del(.spec.allocateLoadBalancerNodePorts)' /tmp/tmp.pCG7nqFZEo/certificate_some-name-ssl-internal.yml + [[ /mnt/jenkins/workspace/cloud-psmdb-operator_PR-1581/e2e-tests/tls-issue-cert-manager/compare/certificate_some-name-ssl-internal-custom.yml == */cronjob* ]] + diff -u /mnt/jenkins/workspace/cloud-psmdb-operator_PR-1581/e2e-tests/tls-issue-cert-manager/compare/certificate_some-name-ssl-internal-custom.yml /tmp/tmp.pCG7nqFZEo/certificate_some-name-ssl-internal.yml + compare_kubectl certificate/some-name-ca-cert -custom + local resource=certificate/some-name-ca-cert + local postfix=-custom + local expected_result=/mnt/jenkins/workspace/cloud-psmdb-operator_PR-1581/e2e-tests/tls-issue-cert-manager/compare/certificate_some-name-ca-cert-custom.yml + local new_result=/tmp/tmp.pCG7nqFZEo/certificate_some-name-ca-cert.yml + '[' -n '' -a -f /mnt/jenkins/workspace/cloud-psmdb-operator_PR-1581/e2e-tests/tls-issue-cert-manager/compare/certificate_some-name-ca-cert-custom-oc.yml ']' + kubectl_bin get -o yaml certificate/some-name-ca-cert + yq eval ' del(.metadata.ownerReferences[].apiVersion) | del(.metadata.managedFields) | del(.. | select(has("creationTimestamp")).creationTimestamp) | del(.. | select(has("namespace")).namespace) | del(.. | select(has("uid")).uid) | del(.metadata.resourceVersion) | del(.spec.template.spec.containers[].env[] | select(.name == "NAMESPACE")) | del(.metadata.selfLink) | del(.metadata.annotations."cloud.google.com/neg") | del(.metadata.annotations."kubectl.kubernetes.io/last-applied-configuration") | del(.. | select(has("image")).image) | del(.. | select(has("clusterIP")).clusterIP) | del(.. | select(has("clusterIPs")).clusterIPs) | del(.. | select(has("dataSource")).dataSource) | del(.. | select(has("procMount")).procMount) | del(.. | select(has("storageClassName")).storageClassName) | del(.. | select(has("finalizers")).finalizers) | del(.. | select(has("kubernetes.io/pvc-protection"))."kubernetes.io/pvc-protection") | del(.. | select(has("volumeName")).volumeName) | del(.. | select(has("volume.beta.kubernetes.io/storage-provisioner"))."volume.beta.kubernetes.io/storage-provisioner") | del(.. | select(has("volume.kubernetes.io/storage-provisioner"))."volume.kubernetes.io/storage-provisioner") | del(.spec.volumeMode) | del(.. | select(has("volume.kubernetes.io/selected-node"))."volume.kubernetes.io/selected-node") | del(.. | select(has("percona.com/last-config-hash"))."percona.com/last-config-hash") | del(.. | select(has("percona.com/configuration-hash"))."percona.com/configuration-hash") | del(.. | select(has("percona.com/ssl-hash"))."percona.com/ssl-hash") | del(.. | select(has("percona.com/ssl-internal-hash"))."percona.com/ssl-internal-hash") | del(.spec.volumeClaimTemplates[].spec.volumeMode | select(. == "Filesystem")) | del(.. | select(has("healthCheckNodePort")).healthCheckNodePort) | del(.. | select(has("nodePort")).nodePort) | del(.status) | (.. | select(tag == "!!str")) |= sub("tls-issue-cert-manager-1728", "NAME_SPACE") | del(.spec.volumeClaimTemplates[].apiVersion) | del(.spec.volumeClaimTemplates[].kind) | del(.spec.ipFamilies) | del(.spec.ipFamilyPolicy) | (.. | select(. == "extensions/v1beta1")) = "apps/v1" | (.. | select(. == "batch/v1beta1")) = "batch/v1" ' - ++ mktemp + local LAST_OUT=/tmp/tmp.U5j6AvxlRG ++ mktemp + local LAST_ERR=/tmp/tmp.Z0G0oM9b0n + local exit_status=0 + local timeout=4 ++ seq 0 2 + for i in '$(seq 0 2)' + set +e + kubectl get -o yaml certificate/some-name-ca-cert + exit_status=0 + set -e + '[' 0 '!=' 0 -a -n 1 ']' + break + cat /tmp/tmp.U5j6AvxlRG + cat /tmp/tmp.Z0G0oM9b0n + rm /tmp/tmp.U5j6AvxlRG /tmp/tmp.Z0G0oM9b0n + return 0 + yq -i eval 'del(.spec.persistentVolumeClaimRetentionPolicy)' /tmp/tmp.pCG7nqFZEo/certificate_some-name-ca-cert.yml + version_gt 1.22 ++ echo '1.26 >= 1.22' ++ bc -l + '[' 1 -eq 1 ']' + return 0 + yq -i eval 'del(.spec.internalTrafficPolicy)' /tmp/tmp.pCG7nqFZEo/certificate_some-name-ca-cert.yml + yq -i eval 'del(.spec.allocateLoadBalancerNodePorts)' /tmp/tmp.pCG7nqFZEo/certificate_some-name-ca-cert.yml + [[ /mnt/jenkins/workspace/cloud-psmdb-operator_PR-1581/e2e-tests/tls-issue-cert-manager/compare/certificate_some-name-ca-cert-custom.yml == */cronjob* ]] + diff -u /mnt/jenkins/workspace/cloud-psmdb-operator_PR-1581/e2e-tests/tls-issue-cert-manager/compare/certificate_some-name-ca-cert-custom.yml /tmp/tmp.pCG7nqFZEo/certificate_some-name-ca-cert.yml + compare_kubectl issuer/some-name-psmdb-ca-issuer -custom + local resource=issuer/some-name-psmdb-ca-issuer + local postfix=-custom + local expected_result=/mnt/jenkins/workspace/cloud-psmdb-operator_PR-1581/e2e-tests/tls-issue-cert-manager/compare/issuer_some-name-psmdb-ca-issuer-custom.yml + local new_result=/tmp/tmp.pCG7nqFZEo/issuer_some-name-psmdb-ca-issuer.yml + '[' -n '' -a -f /mnt/jenkins/workspace/cloud-psmdb-operator_PR-1581/e2e-tests/tls-issue-cert-manager/compare/issuer_some-name-psmdb-ca-issuer-custom-oc.yml ']' + kubectl_bin get -o yaml issuer/some-name-psmdb-ca-issuer ++ mktemp + yq eval ' del(.metadata.ownerReferences[].apiVersion) | del(.metadata.managedFields) | del(.. | select(has("creationTimestamp")).creationTimestamp) | del(.. | select(has("namespace")).namespace) | del(.. | select(has("uid")).uid) | del(.metadata.resourceVersion) | del(.spec.template.spec.containers[].env[] | select(.name == "NAMESPACE")) | del(.metadata.selfLink) | del(.metadata.annotations."cloud.google.com/neg") | del(.metadata.annotations."kubectl.kubernetes.io/last-applied-configuration") | del(.. | select(has("image")).image) | del(.. | select(has("clusterIP")).clusterIP) | del(.. | select(has("clusterIPs")).clusterIPs) | del(.. | select(has("dataSource")).dataSource) | del(.. | select(has("procMount")).procMount) | del(.. | select(has("storageClassName")).storageClassName) | del(.. | select(has("finalizers")).finalizers) | del(.. | select(has("kubernetes.io/pvc-protection"))."kubernetes.io/pvc-protection") | del(.. | select(has("volumeName")).volumeName) | del(.. | select(has("volume.beta.kubernetes.io/storage-provisioner"))."volume.beta.kubernetes.io/storage-provisioner") | del(.. | select(has("volume.kubernetes.io/storage-provisioner"))."volume.kubernetes.io/storage-provisioner") | del(.spec.volumeMode) | del(.. | select(has("volume.kubernetes.io/selected-node"))."volume.kubernetes.io/selected-node") | del(.. | select(has("percona.com/last-config-hash"))."percona.com/last-config-hash") | del(.. | select(has("percona.com/configuration-hash"))."percona.com/configuration-hash") | del(.. | select(has("percona.com/ssl-hash"))."percona.com/ssl-hash") | del(.. | select(has("percona.com/ssl-internal-hash"))."percona.com/ssl-internal-hash") | del(.spec.volumeClaimTemplates[].spec.volumeMode | select(. == "Filesystem")) | del(.. | select(has("healthCheckNodePort")).healthCheckNodePort) | del(.. | select(has("nodePort")).nodePort) | del(.status) | (.. | select(tag == "!!str")) |= sub("tls-issue-cert-manager-1728", "NAME_SPACE") | del(.spec.volumeClaimTemplates[].apiVersion) | del(.spec.volumeClaimTemplates[].kind) | del(.spec.ipFamilies) | del(.spec.ipFamilyPolicy) | (.. | select(. == "extensions/v1beta1")) = "apps/v1" | (.. | select(. == "batch/v1beta1")) = "batch/v1" ' - + local LAST_OUT=/tmp/tmp.3anUwXKLav ++ mktemp + local LAST_ERR=/tmp/tmp.P9RTSM5R9i + local exit_status=0 + local timeout=4 ++ seq 0 2 + for i in '$(seq 0 2)' + set +e + kubectl get -o yaml issuer/some-name-psmdb-ca-issuer + exit_status=0 + set -e + '[' 0 '!=' 0 -a -n 1 ']' + break + cat /tmp/tmp.3anUwXKLav + cat /tmp/tmp.P9RTSM5R9i + rm /tmp/tmp.3anUwXKLav /tmp/tmp.P9RTSM5R9i + return 0 + yq -i eval 'del(.spec.persistentVolumeClaimRetentionPolicy)' /tmp/tmp.pCG7nqFZEo/issuer_some-name-psmdb-ca-issuer.yml + version_gt 1.22 ++ echo '1.26 >= 1.22' ++ bc -l + '[' 1 -eq 1 ']' + return 0 + yq -i eval 'del(.spec.internalTrafficPolicy)' /tmp/tmp.pCG7nqFZEo/issuer_some-name-psmdb-ca-issuer.yml + yq -i eval 'del(.spec.allocateLoadBalancerNodePorts)' /tmp/tmp.pCG7nqFZEo/issuer_some-name-psmdb-ca-issuer.yml + [[ /mnt/jenkins/workspace/cloud-psmdb-operator_PR-1581/e2e-tests/tls-issue-cert-manager/compare/issuer_some-name-psmdb-ca-issuer-custom.yml == */cronjob* ]] + diff -u /mnt/jenkins/workspace/cloud-psmdb-operator_PR-1581/e2e-tests/tls-issue-cert-manager/compare/issuer_some-name-psmdb-ca-issuer-custom.yml /tmp/tmp.pCG7nqFZEo/issuer_some-name-psmdb-ca-issuer.yml + compare_kubectl issuer/some-name-psmdb-issuer -custom + local resource=issuer/some-name-psmdb-issuer + local postfix=-custom + local expected_result=/mnt/jenkins/workspace/cloud-psmdb-operator_PR-1581/e2e-tests/tls-issue-cert-manager/compare/issuer_some-name-psmdb-issuer-custom.yml + local new_result=/tmp/tmp.pCG7nqFZEo/issuer_some-name-psmdb-issuer.yml + '[' -n '' -a -f /mnt/jenkins/workspace/cloud-psmdb-operator_PR-1581/e2e-tests/tls-issue-cert-manager/compare/issuer_some-name-psmdb-issuer-custom-oc.yml ']' + kubectl_bin get -o yaml issuer/some-name-psmdb-issuer + yq eval ' del(.metadata.ownerReferences[].apiVersion) | del(.metadata.managedFields) | del(.. | select(has("creationTimestamp")).creationTimestamp) | del(.. | select(has("namespace")).namespace) | del(.. | select(has("uid")).uid) | del(.metadata.resourceVersion) | del(.spec.template.spec.containers[].env[] | select(.name == "NAMESPACE")) | del(.metadata.selfLink) | del(.metadata.annotations."cloud.google.com/neg") | del(.metadata.annotations."kubectl.kubernetes.io/last-applied-configuration") | del(.. | select(has("image")).image) | del(.. | select(has("clusterIP")).clusterIP) | del(.. | select(has("clusterIPs")).clusterIPs) | del(.. | select(has("dataSource")).dataSource) | del(.. | select(has("procMount")).procMount) | del(.. | select(has("storageClassName")).storageClassName) | del(.. | select(has("finalizers")).finalizers) | del(.. | select(has("kubernetes.io/pvc-protection"))."kubernetes.io/pvc-protection") | del(.. | select(has("volumeName")).volumeName) | del(.. | select(has("volume.beta.kubernetes.io/storage-provisioner"))."volume.beta.kubernetes.io/storage-provisioner") | del(.. | select(has("volume.kubernetes.io/storage-provisioner"))."volume.kubernetes.io/storage-provisioner") | del(.spec.volumeMode) | del(.. | select(has("volume.kubernetes.io/selected-node"))."volume.kubernetes.io/selected-node") | del(.. | select(has("percona.com/last-config-hash"))."percona.com/last-config-hash") | del(.. | select(has("percona.com/configuration-hash"))."percona.com/configuration-hash") | del(.. | select(has("percona.com/ssl-hash"))."percona.com/ssl-hash") | del(.. | select(has("percona.com/ssl-internal-hash"))."percona.com/ssl-internal-hash") | del(.spec.volumeClaimTemplates[].spec.volumeMode | select(. == "Filesystem")) | del(.. | select(has("healthCheckNodePort")).healthCheckNodePort) | del(.. | select(has("nodePort")).nodePort) | del(.status) | (.. | select(tag == "!!str")) |= sub("tls-issue-cert-manager-1728", "NAME_SPACE") | del(.spec.volumeClaimTemplates[].apiVersion) | del(.spec.volumeClaimTemplates[].kind) | del(.spec.ipFamilies) | del(.spec.ipFamilyPolicy) | (.. | select(. == "extensions/v1beta1")) = "apps/v1" | (.. | select(. == "batch/v1beta1")) = "batch/v1" ' - ++ mktemp + local LAST_OUT=/tmp/tmp.De5lujzl1G ++ mktemp + local LAST_ERR=/tmp/tmp.Uq2kXvaC5Q + local exit_status=0 + local timeout=4 ++ seq 0 2 + for i in '$(seq 0 2)' + set +e + kubectl get -o yaml issuer/some-name-psmdb-issuer + exit_status=0 + set -e + '[' 0 '!=' 0 -a -n 1 ']' + break + cat /tmp/tmp.De5lujzl1G + cat /tmp/tmp.Uq2kXvaC5Q + rm /tmp/tmp.De5lujzl1G /tmp/tmp.Uq2kXvaC5Q + return 0 + yq -i eval 'del(.spec.persistentVolumeClaimRetentionPolicy)' /tmp/tmp.pCG7nqFZEo/issuer_some-name-psmdb-issuer.yml + version_gt 1.22 ++ echo '1.26 >= 1.22' ++ bc -l + '[' 1 -eq 1 ']' + return 0 + yq -i eval 'del(.spec.internalTrafficPolicy)' /tmp/tmp.pCG7nqFZEo/issuer_some-name-psmdb-issuer.yml + yq -i eval 'del(.spec.allocateLoadBalancerNodePorts)' /tmp/tmp.pCG7nqFZEo/issuer_some-name-psmdb-issuer.yml + [[ /mnt/jenkins/workspace/cloud-psmdb-operator_PR-1581/e2e-tests/tls-issue-cert-manager/compare/issuer_some-name-psmdb-issuer-custom.yml == */cronjob* ]] + diff -u /mnt/jenkins/workspace/cloud-psmdb-operator_PR-1581/e2e-tests/tls-issue-cert-manager/compare/issuer_some-name-psmdb-issuer-custom.yml /tmp/tmp.pCG7nqFZEo/issuer_some-name-psmdb-issuer.yml + desc 'delete cluster' + set +o xtrace ----------------------------------------------------------------------------------- delete cluster ----------------------------------------------------------------------------------- + kubectl delete psmdb --all perconaservermongodb.psmdb.percona.com "some-name" deleted + kubectl delete pvc --all persistentvolumeclaim "mongod-data-some-name-cfg-0" deleted persistentvolumeclaim "mongod-data-some-name-cfg-1" deleted persistentvolumeclaim "mongod-data-some-name-cfg-2" deleted persistentvolumeclaim "mongod-data-some-name-rs0-0" deleted persistentvolumeclaim "mongod-data-some-name-rs0-1" deleted persistentvolumeclaim "mongod-data-some-name-rs0-2" deleted + desc 'delete custom cert-manager issuers and certificates' + set +o xtrace ----------------------------------------------------------------------------------- delete custom cert-manager issuers and certificates ----------------------------------------------------------------------------------- + kubectl_bin delete -f /mnt/jenkins/workspace/cloud-psmdb-operator_PR-1581/e2e-tests/tls-issue-cert-manager/conf/some-name-psmdb-ca-issuer.yml ++ mktemp + local LAST_OUT=/tmp/tmp.v79uwdSVbQ ++ mktemp + local LAST_ERR=/tmp/tmp.LiQakVgLTk + local exit_status=0 + local timeout=4 ++ seq 0 2 + for i in '$(seq 0 2)' + set +e + kubectl delete -f /mnt/jenkins/workspace/cloud-psmdb-operator_PR-1581/e2e-tests/tls-issue-cert-manager/conf/some-name-psmdb-ca-issuer.yml + exit_status=0 + set -e + '[' 0 '!=' 0 -a -n 1 ']' + break + cat /tmp/tmp.v79uwdSVbQ issuer.cert-manager.io "some-name-psmdb-ca-issuer" deleted + cat /tmp/tmp.LiQakVgLTk + rm /tmp/tmp.v79uwdSVbQ /tmp/tmp.LiQakVgLTk + return 0 + kubectl_bin delete -f /mnt/jenkins/workspace/cloud-psmdb-operator_PR-1581/e2e-tests/tls-issue-cert-manager/conf/some-name-psmdb-issuer.yml ++ mktemp + local LAST_OUT=/tmp/tmp.56wfSXuyk3 ++ mktemp + local LAST_ERR=/tmp/tmp.t6ZNSSAF8n + local exit_status=0 + local timeout=4 ++ seq 0 2 + for i in '$(seq 0 2)' + set +e + kubectl delete -f /mnt/jenkins/workspace/cloud-psmdb-operator_PR-1581/e2e-tests/tls-issue-cert-manager/conf/some-name-psmdb-issuer.yml + exit_status=0 + set -e + '[' 0 '!=' 0 -a -n 1 ']' + break + cat /tmp/tmp.56wfSXuyk3 issuer.cert-manager.io "some-name-psmdb-issuer" deleted + cat /tmp/tmp.t6ZNSSAF8n + rm /tmp/tmp.56wfSXuyk3 /tmp/tmp.t6ZNSSAF8n + return 0 + kubectl_bin delete -f /mnt/jenkins/workspace/cloud-psmdb-operator_PR-1581/e2e-tests/tls-issue-cert-manager/conf/some-name-ca-cert.yml ++ mktemp + local LAST_OUT=/tmp/tmp.BfJnwPi12E ++ mktemp + local LAST_ERR=/tmp/tmp.HjPC7WH2rQ + local exit_status=0 + local timeout=4 ++ seq 0 2 + for i in '$(seq 0 2)' + set +e + kubectl delete -f /mnt/jenkins/workspace/cloud-psmdb-operator_PR-1581/e2e-tests/tls-issue-cert-manager/conf/some-name-ca-cert.yml + exit_status=0 + set -e + '[' 0 '!=' 0 -a -n 1 ']' + break + cat /tmp/tmp.BfJnwPi12E certificate.cert-manager.io "some-name-ca-cert" deleted + cat /tmp/tmp.HjPC7WH2rQ + rm /tmp/tmp.BfJnwPi12E /tmp/tmp.HjPC7WH2rQ + return 0 + kubectl_bin delete -f /mnt/jenkins/workspace/cloud-psmdb-operator_PR-1581/e2e-tests/tls-issue-cert-manager/conf/some-name-ssl-internal.yml ++ mktemp + local LAST_OUT=/tmp/tmp.iRhBs6v0QS ++ mktemp + local LAST_ERR=/tmp/tmp.rbmDddWP3l + local exit_status=0 + local timeout=4 ++ seq 0 2 + for i in '$(seq 0 2)' + set +e + kubectl delete -f /mnt/jenkins/workspace/cloud-psmdb-operator_PR-1581/e2e-tests/tls-issue-cert-manager/conf/some-name-ssl-internal.yml + exit_status=0 + set -e + '[' 0 '!=' 0 -a -n 1 ']' + break + cat /tmp/tmp.iRhBs6v0QS certificate.cert-manager.io "some-name-ssl-internal" deleted + cat /tmp/tmp.rbmDddWP3l + rm /tmp/tmp.iRhBs6v0QS /tmp/tmp.rbmDddWP3l + return 0 + kubectl_bin delete -f /mnt/jenkins/workspace/cloud-psmdb-operator_PR-1581/e2e-tests/tls-issue-cert-manager/conf/some-name-ssl.yml ++ mktemp + local LAST_OUT=/tmp/tmp.nbZsFrrn7d ++ mktemp + local LAST_ERR=/tmp/tmp.1LSVYYbYKQ + local exit_status=0 + local timeout=4 ++ seq 0 2 + for i in '$(seq 0 2)' + set +e + kubectl delete -f /mnt/jenkins/workspace/cloud-psmdb-operator_PR-1581/e2e-tests/tls-issue-cert-manager/conf/some-name-ssl.yml + exit_status=0 + set -e + '[' 0 '!=' 0 -a -n 1 ']' + break + cat /tmp/tmp.nbZsFrrn7d certificate.cert-manager.io "some-name-ssl" deleted + cat /tmp/tmp.1LSVYYbYKQ + rm /tmp/tmp.nbZsFrrn7d /tmp/tmp.1LSVYYbYKQ + return 0 + sleep 30 + desc 'delete ssl secrets, operator should recreate them' + set +o xtrace ----------------------------------------------------------------------------------- delete ssl secrets, operator should recreate them ----------------------------------------------------------------------------------- + kubectl_bin delete secret some-name-ssl-internal ++ mktemp + local LAST_OUT=/tmp/tmp.aYcKRCKsUY ++ mktemp + local LAST_ERR=/tmp/tmp.JsznzwHHfU + local exit_status=0 + local timeout=4 ++ seq 0 2 + for i in '$(seq 0 2)' + set +e + kubectl delete secret some-name-ssl-internal + exit_status=0 + set -e + '[' 0 '!=' 0 -a -n 1 ']' + break + cat /tmp/tmp.aYcKRCKsUY secret "some-name-ssl-internal" deleted + cat /tmp/tmp.JsznzwHHfU + rm /tmp/tmp.aYcKRCKsUY /tmp/tmp.JsznzwHHfU + return 0 + kubectl_bin delete secret some-name-ssl ++ mktemp + local LAST_OUT=/tmp/tmp.gnu7JvfFo7 ++ mktemp + local LAST_ERR=/tmp/tmp.UXL6p6mhXN + local exit_status=0 + local timeout=4 ++ seq 0 2 + for i in '$(seq 0 2)' + set +e + kubectl delete secret some-name-ssl + exit_status=0 + set -e + '[' 0 '!=' 0 -a -n 1 ']' + break + cat /tmp/tmp.gnu7JvfFo7 secret "some-name-ssl" deleted + cat /tmp/tmp.UXL6p6mhXN + rm /tmp/tmp.gnu7JvfFo7 /tmp/tmp.UXL6p6mhXN + return 0 + sleep 30 + desc 'recreate PSMDB cluster some-name' + set +o xtrace ----------------------------------------------------------------------------------- recreate PSMDB cluster some-name ----------------------------------------------------------------------------------- + apply_cluster /mnt/jenkins/workspace/cloud-psmdb-operator_PR-1581/e2e-tests/tls-issue-cert-manager/conf/some-name.yml + '[' -z '' ']' + cat_config /mnt/jenkins/workspace/cloud-psmdb-operator_PR-1581/e2e-tests/tls-issue-cert-manager/conf/some-name.yml + kubectl_bin apply -f - + cat /mnt/jenkins/workspace/cloud-psmdb-operator_PR-1581/e2e-tests/tls-issue-cert-manager/conf/some-name.yml ++ mktemp + yq eval '(.spec | select(.image == null)).image = "perconalab/percona-server-mongodb-operator:main-mongod7.0"' + yq eval '(.spec | select(has("pmm"))).pmm.image = "perconalab/pmm-client:dev-latest"' + yq eval '.spec.upgradeOptions.apply="Never"' + yq eval '(.spec | select(has("initImage"))).initImage = "perconalab/percona-server-mongodb-operator:PR-1581-772033cf"' + local LAST_OUT=/tmp/tmp.LuSVj5QZYv + yq eval '(.spec | select(has("backup"))).backup.image = "perconalab/percona-server-mongodb-operator:main-backup"' ++ mktemp + local LAST_ERR=/tmp/tmp.ueXuSaPskN + local exit_status=0 + local timeout=4 ++ seq 0 2 + for i in '$(seq 0 2)' + set +e + kubectl apply -f - + exit_status=0 + set -e + '[' 0 '!=' 0 -a -n 1 ']' + break + cat /tmp/tmp.LuSVj5QZYv perconaservermongodb.psmdb.percona.com/some-name created + cat /tmp/tmp.ueXuSaPskN + rm /tmp/tmp.LuSVj5QZYv /tmp/tmp.ueXuSaPskN + return 0 + desc 'check if all Pods started' + set +o xtrace ----------------------------------------------------------------------------------- check if all Pods started ----------------------------------------------------------------------------------- + wait_for_running some-name-rs0 3 + local name=some-name-rs0 + let last_pod=2 + local check_cluster_readyness=true + set_debug + [[ 1 == 1 ]] + set -o xtrace + local rs_name=rs0 + local cluster_name=some-name ++ seq 0 2 + for i in '$(seq 0 $last_pod)' + [[ 0 -eq 2 ]] + wait_pod some-name-rs0-0 + local pod=some-name-rs0-0 + set +o xtrace waiting for pod/some-name-rs0-0 to be ready...............OK + for i in '$(seq 0 $last_pod)' + [[ 1 -eq 2 ]] + wait_pod some-name-rs0-1 + local pod=some-name-rs0-1 + set +o xtrace waiting for pod/some-name-rs0-1 to be ready.................OK + for i in '$(seq 0 $last_pod)' + [[ 2 -eq 2 ]] ++ kubectl_bin get psmdb some-name -o 'jsonpath={.spec.replsets[?(@.name=="rs0")].arbiter.enabled}' +++ mktemp ++ local LAST_OUT=/tmp/tmp.vTjhWpN2oH +++ mktemp ++ local LAST_ERR=/tmp/tmp.WSy7dOIXXO ++ local exit_status=0 ++ local timeout=4 +++ seq 0 2 ++ for i in '$(seq 0 2)' ++ set +e ++ kubectl get psmdb some-name -o 'jsonpath={.spec.replsets[?(@.name=="rs0")].arbiter.enabled}' ++ exit_status=0 ++ set -e ++ '[' 0 '!=' 0 -a -n 1 ']' ++ break ++ cat /tmp/tmp.vTjhWpN2oH ++ cat /tmp/tmp.WSy7dOIXXO ++ rm /tmp/tmp.vTjhWpN2oH /tmp/tmp.WSy7dOIXXO ++ return 0 + [[ '' == \t\r\u\e ]] + wait_pod some-name-rs0-2 + local pod=some-name-rs0-2 + set +o xtrace waiting for pod/some-name-rs0-2 to be ready...............OK ++ kubectl_bin get psmdb some-name -o 'jsonpath={.spec.replsets[?(@.name=="rs0")].non_voting.enabled}' +++ mktemp ++ local LAST_OUT=/tmp/tmp.LMQ9iieEQY +++ mktemp ++ local LAST_ERR=/tmp/tmp.Qa0K577hF6 ++ local exit_status=0 ++ local timeout=4 +++ seq 0 2 ++ for i in '$(seq 0 2)' ++ set +e ++ kubectl get psmdb some-name -o 'jsonpath={.spec.replsets[?(@.name=="rs0")].non_voting.enabled}' ++ exit_status=0 ++ set -e ++ '[' 0 '!=' 0 -a -n 1 ']' ++ break ++ cat /tmp/tmp.LMQ9iieEQY ++ cat /tmp/tmp.Qa0K577hF6 ++ rm /tmp/tmp.LMQ9iieEQY /tmp/tmp.Qa0K577hF6 ++ return 0 + [[ '' == \t\r\u\e ]] + sleep 10 + [[ true == \t\r\u\e ]] + set +x Waiting for cluster readyness......................... + wait_for_running some-name-cfg 3 false + local name=some-name-cfg + let last_pod=2 + local check_cluster_readyness=false + set_debug + [[ 1 == 1 ]] + set -o xtrace + local rs_name=cfg + local cluster_name=some-name ++ seq 0 2 + for i in '$(seq 0 $last_pod)' + [[ 0 -eq 2 ]] + wait_pod some-name-cfg-0 + local pod=some-name-cfg-0 + set +o xtrace waiting for pod/some-name-cfg-0 to be ready.OK + for i in '$(seq 0 $last_pod)' + [[ 1 -eq 2 ]] + wait_pod some-name-cfg-1 + local pod=some-name-cfg-1 + set +o xtrace waiting for pod/some-name-cfg-1 to be ready.OK + for i in '$(seq 0 $last_pod)' + [[ 2 -eq 2 ]] ++ kubectl_bin get psmdb some-name -o 'jsonpath={.spec.replsets[?(@.name=="cfg")].arbiter.enabled}' +++ mktemp ++ local LAST_OUT=/tmp/tmp.wYlWnZLNQM +++ mktemp ++ local LAST_ERR=/tmp/tmp.B8t9w8OZvc ++ local exit_status=0 ++ local timeout=4 +++ seq 0 2 ++ for i in '$(seq 0 2)' ++ set +e ++ kubectl get psmdb some-name -o 'jsonpath={.spec.replsets[?(@.name=="cfg")].arbiter.enabled}' ++ exit_status=0 ++ set -e ++ '[' 0 '!=' 0 -a -n 1 ']' ++ break ++ cat /tmp/tmp.wYlWnZLNQM ++ cat /tmp/tmp.B8t9w8OZvc ++ rm /tmp/tmp.wYlWnZLNQM /tmp/tmp.B8t9w8OZvc ++ return 0 + [[ '' == \t\r\u\e ]] + wait_pod some-name-cfg-2 + local pod=some-name-cfg-2 + set +o xtrace waiting for pod/some-name-cfg-2 to be ready.OK ++ kubectl_bin get psmdb some-name -o 'jsonpath={.spec.replsets[?(@.name=="cfg")].non_voting.enabled}' +++ mktemp ++ local LAST_OUT=/tmp/tmp.WKpjVNkZpO +++ mktemp ++ local LAST_ERR=/tmp/tmp.jQzCmskMLc ++ local exit_status=0 ++ local timeout=4 +++ seq 0 2 ++ for i in '$(seq 0 2)' ++ set +e ++ kubectl get psmdb some-name -o 'jsonpath={.spec.replsets[?(@.name=="cfg")].non_voting.enabled}' ++ exit_status=0 ++ set -e ++ '[' 0 '!=' 0 -a -n 1 ']' ++ break ++ cat /tmp/tmp.WKpjVNkZpO ++ cat /tmp/tmp.jQzCmskMLc ++ rm /tmp/tmp.WKpjVNkZpO /tmp/tmp.jQzCmskMLc ++ return 0 + [[ '' == \t\r\u\e ]] + sleep 10 + [[ false == \t\r\u\e ]] + wait_for_running some-name-mongos 3 + local name=some-name-mongos + let last_pod=2 + local check_cluster_readyness=true + set_debug + [[ 1 == 1 ]] + set -o xtrace + local rs_name=mongos + local cluster_name=some-name ++ seq 0 2 + for i in '$(seq 0 $last_pod)' + [[ 0 -eq 2 ]] + wait_pod some-name-mongos-0 + local pod=some-name-mongos-0 + set +o xtrace waiting for pod/some-name-mongos-0 to be ready.OK + for i in '$(seq 0 $last_pod)' + [[ 1 -eq 2 ]] + wait_pod some-name-mongos-1 + local pod=some-name-mongos-1 + set +o xtrace waiting for pod/some-name-mongos-1 to be ready.OK + for i in '$(seq 0 $last_pod)' + [[ 2 -eq 2 ]] ++ kubectl_bin get psmdb some-name -o 'jsonpath={.spec.replsets[?(@.name=="mongos")].arbiter.enabled}' +++ mktemp ++ local LAST_OUT=/tmp/tmp.7W9C7du24f +++ mktemp ++ local LAST_ERR=/tmp/tmp.Q9NsaI77Hz ++ local exit_status=0 ++ local timeout=4 +++ seq 0 2 ++ for i in '$(seq 0 2)' ++ set +e ++ kubectl get psmdb some-name -o 'jsonpath={.spec.replsets[?(@.name=="mongos")].arbiter.enabled}' ++ exit_status=0 ++ set -e ++ '[' 0 '!=' 0 -a -n 1 ']' ++ break ++ cat /tmp/tmp.7W9C7du24f ++ cat /tmp/tmp.Q9NsaI77Hz ++ rm /tmp/tmp.7W9C7du24f /tmp/tmp.Q9NsaI77Hz ++ return 0 + [[ '' == \t\r\u\e ]] + wait_pod some-name-mongos-2 + local pod=some-name-mongos-2 + set +o xtrace waiting for pod/some-name-mongos-2 to be ready.OK ++ kubectl_bin get psmdb some-name -o 'jsonpath={.spec.replsets[?(@.name=="mongos")].non_voting.enabled}' +++ mktemp ++ local LAST_OUT=/tmp/tmp.CvHonbMROO +++ mktemp ++ local LAST_ERR=/tmp/tmp.BtkZ7Z1WPf ++ local exit_status=0 ++ local timeout=4 +++ seq 0 2 ++ for i in '$(seq 0 2)' ++ set +e ++ kubectl get psmdb some-name -o 'jsonpath={.spec.replsets[?(@.name=="mongos")].non_voting.enabled}' ++ exit_status=0 ++ set -e ++ '[' 0 '!=' 0 -a -n 1 ']' ++ break ++ cat /tmp/tmp.CvHonbMROO ++ cat /tmp/tmp.BtkZ7Z1WPf ++ rm /tmp/tmp.CvHonbMROO /tmp/tmp.BtkZ7Z1WPf ++ return 0 + [[ '' == \t\r\u\e ]] + sleep 10 + [[ true == \t\r\u\e ]] + set +x Waiting for cluster readyness + compare_kubectl statefulset/some-name-rs0 + local resource=statefulset/some-name-rs0 + local postfix= + local expected_result=/mnt/jenkins/workspace/cloud-psmdb-operator_PR-1581/e2e-tests/tls-issue-cert-manager/compare/statefulset_some-name-rs0.yml + local new_result=/tmp/tmp.pCG7nqFZEo/statefulset_some-name-rs0.yml + '[' -n '' -a -f /mnt/jenkins/workspace/cloud-psmdb-operator_PR-1581/e2e-tests/tls-issue-cert-manager/compare/statefulset_some-name-rs0-oc.yml ']' + kubectl_bin get -o yaml statefulset/some-name-rs0 + yq eval ' del(.metadata.ownerReferences[].apiVersion) | del(.metadata.managedFields) | del(.. | select(has("creationTimestamp")).creationTimestamp) | del(.. | select(has("namespace")).namespace) | del(.. | select(has("uid")).uid) | del(.metadata.resourceVersion) | del(.spec.template.spec.containers[].env[] | select(.name == "NAMESPACE")) | del(.metadata.selfLink) | del(.metadata.annotations."cloud.google.com/neg") | del(.metadata.annotations."kubectl.kubernetes.io/last-applied-configuration") | del(.. | select(has("image")).image) | del(.. | select(has("clusterIP")).clusterIP) | del(.. | select(has("clusterIPs")).clusterIPs) | del(.. | select(has("dataSource")).dataSource) | del(.. | select(has("procMount")).procMount) | del(.. | select(has("storageClassName")).storageClassName) | del(.. | select(has("finalizers")).finalizers) | del(.. | select(has("kubernetes.io/pvc-protection"))."kubernetes.io/pvc-protection") | del(.. | select(has("volumeName")).volumeName) | del(.. | select(has("volume.beta.kubernetes.io/storage-provisioner"))."volume.beta.kubernetes.io/storage-provisioner") | del(.. | select(has("volume.kubernetes.io/storage-provisioner"))."volume.kubernetes.io/storage-provisioner") | del(.spec.volumeMode) | del(.. | select(has("volume.kubernetes.io/selected-node"))."volume.kubernetes.io/selected-node") | del(.. | select(has("percona.com/last-config-hash"))."percona.com/last-config-hash") | del(.. | select(has("percona.com/configuration-hash"))."percona.com/configuration-hash") | del(.. | select(has("percona.com/ssl-hash"))."percona.com/ssl-hash") | del(.. | select(has("percona.com/ssl-internal-hash"))."percona.com/ssl-internal-hash") | del(.spec.volumeClaimTemplates[].spec.volumeMode | select(. == "Filesystem")) | del(.. | select(has("healthCheckNodePort")).healthCheckNodePort) | del(.. | select(has("nodePort")).nodePort) | del(.status) | (.. | select(tag == "!!str")) |= sub("tls-issue-cert-manager-1728", "NAME_SPACE") | del(.spec.volumeClaimTemplates[].apiVersion) | del(.spec.volumeClaimTemplates[].kind) | del(.spec.ipFamilies) | del(.spec.ipFamilyPolicy) | (.. | select(. == "extensions/v1beta1")) = "apps/v1" | (.. | select(. == "batch/v1beta1")) = "batch/v1" ' - ++ mktemp + local LAST_OUT=/tmp/tmp.Sy6oIg5gfC ++ mktemp + local LAST_ERR=/tmp/tmp.BJnPDfH0M3 + local exit_status=0 + local timeout=4 ++ seq 0 2 + for i in '$(seq 0 2)' + set +e + kubectl get -o yaml statefulset/some-name-rs0 + exit_status=0 + set -e + '[' 0 '!=' 0 -a -n 1 ']' + break + cat /tmp/tmp.Sy6oIg5gfC + cat /tmp/tmp.BJnPDfH0M3 + rm /tmp/tmp.Sy6oIg5gfC /tmp/tmp.BJnPDfH0M3 + return 0 + yq -i eval 'del(.spec.persistentVolumeClaimRetentionPolicy)' /tmp/tmp.pCG7nqFZEo/statefulset_some-name-rs0.yml + version_gt 1.22 ++ echo '1.26 >= 1.22' ++ bc -l + '[' 1 -eq 1 ']' + return 0 + yq -i eval 'del(.spec.internalTrafficPolicy)' /tmp/tmp.pCG7nqFZEo/statefulset_some-name-rs0.yml + yq -i eval 'del(.spec.allocateLoadBalancerNodePorts)' /tmp/tmp.pCG7nqFZEo/statefulset_some-name-rs0.yml + [[ /mnt/jenkins/workspace/cloud-psmdb-operator_PR-1581/e2e-tests/tls-issue-cert-manager/compare/statefulset_some-name-rs0.yml == */cronjob* ]] + diff -u /mnt/jenkins/workspace/cloud-psmdb-operator_PR-1581/e2e-tests/tls-issue-cert-manager/compare/statefulset_some-name-rs0.yml /tmp/tmp.pCG7nqFZEo/statefulset_some-name-rs0.yml + compare_kubectl statefulset/some-name-cfg + local resource=statefulset/some-name-cfg + local postfix= + local expected_result=/mnt/jenkins/workspace/cloud-psmdb-operator_PR-1581/e2e-tests/tls-issue-cert-manager/compare/statefulset_some-name-cfg.yml + local new_result=/tmp/tmp.pCG7nqFZEo/statefulset_some-name-cfg.yml + '[' -n '' -a -f /mnt/jenkins/workspace/cloud-psmdb-operator_PR-1581/e2e-tests/tls-issue-cert-manager/compare/statefulset_some-name-cfg-oc.yml ']' + kubectl_bin get -o yaml statefulset/some-name-cfg + yq eval ' del(.metadata.ownerReferences[].apiVersion) | del(.metadata.managedFields) | del(.. | select(has("creationTimestamp")).creationTimestamp) | del(.. | select(has("namespace")).namespace) | del(.. | select(has("uid")).uid) | del(.metadata.resourceVersion) | del(.spec.template.spec.containers[].env[] | select(.name == "NAMESPACE")) | del(.metadata.selfLink) | del(.metadata.annotations."cloud.google.com/neg") | del(.metadata.annotations."kubectl.kubernetes.io/last-applied-configuration") | del(.. | select(has("image")).image) | del(.. | select(has("clusterIP")).clusterIP) | del(.. | select(has("clusterIPs")).clusterIPs) | del(.. | select(has("dataSource")).dataSource) | del(.. | select(has("procMount")).procMount) | del(.. | select(has("storageClassName")).storageClassName) | del(.. | select(has("finalizers")).finalizers) | del(.. | select(has("kubernetes.io/pvc-protection"))."kubernetes.io/pvc-protection") | del(.. | select(has("volumeName")).volumeName) | del(.. | select(has("volume.beta.kubernetes.io/storage-provisioner"))."volume.beta.kubernetes.io/storage-provisioner") | del(.. | select(has("volume.kubernetes.io/storage-provisioner"))."volume.kubernetes.io/storage-provisioner") | del(.spec.volumeMode) | del(.. | select(has("volume.kubernetes.io/selected-node"))."volume.kubernetes.io/selected-node") | del(.. | select(has("percona.com/last-config-hash"))."percona.com/last-config-hash") | del(.. | select(has("percona.com/configuration-hash"))."percona.com/configuration-hash") | del(.. | select(has("percona.com/ssl-hash"))."percona.com/ssl-hash") | del(.. | select(has("percona.com/ssl-internal-hash"))."percona.com/ssl-internal-hash") | del(.spec.volumeClaimTemplates[].spec.volumeMode | select(. == "Filesystem")) | del(.. | select(has("healthCheckNodePort")).healthCheckNodePort) | del(.. | select(has("nodePort")).nodePort) | del(.status) | (.. | select(tag == "!!str")) |= sub("tls-issue-cert-manager-1728", "NAME_SPACE") | del(.spec.volumeClaimTemplates[].apiVersion) | del(.spec.volumeClaimTemplates[].kind) | del(.spec.ipFamilies) | del(.spec.ipFamilyPolicy) | (.. | select(. == "extensions/v1beta1")) = "apps/v1" | (.. | select(. == "batch/v1beta1")) = "batch/v1" ' - ++ mktemp + local LAST_OUT=/tmp/tmp.1Y6EgpVeFT ++ mktemp + local LAST_ERR=/tmp/tmp.mNmK3Vkwpe + local exit_status=0 + local timeout=4 ++ seq 0 2 + for i in '$(seq 0 2)' + set +e + kubectl get -o yaml statefulset/some-name-cfg + exit_status=0 + set -e + '[' 0 '!=' 0 -a -n 1 ']' + break + cat /tmp/tmp.1Y6EgpVeFT + cat /tmp/tmp.mNmK3Vkwpe + rm /tmp/tmp.1Y6EgpVeFT /tmp/tmp.mNmK3Vkwpe + return 0 + yq -i eval 'del(.spec.persistentVolumeClaimRetentionPolicy)' /tmp/tmp.pCG7nqFZEo/statefulset_some-name-cfg.yml + version_gt 1.22 ++ echo '1.26 >= 1.22' ++ bc -l + '[' 1 -eq 1 ']' + return 0 + yq -i eval 'del(.spec.internalTrafficPolicy)' /tmp/tmp.pCG7nqFZEo/statefulset_some-name-cfg.yml + yq -i eval 'del(.spec.allocateLoadBalancerNodePorts)' /tmp/tmp.pCG7nqFZEo/statefulset_some-name-cfg.yml + [[ /mnt/jenkins/workspace/cloud-psmdb-operator_PR-1581/e2e-tests/tls-issue-cert-manager/compare/statefulset_some-name-cfg.yml == */cronjob* ]] + diff -u /mnt/jenkins/workspace/cloud-psmdb-operator_PR-1581/e2e-tests/tls-issue-cert-manager/compare/statefulset_some-name-cfg.yml /tmp/tmp.pCG7nqFZEo/statefulset_some-name-cfg.yml + compare_kubectl statefulset/some-name-mongos + local resource=statefulset/some-name-mongos + local postfix= + local expected_result=/mnt/jenkins/workspace/cloud-psmdb-operator_PR-1581/e2e-tests/tls-issue-cert-manager/compare/statefulset_some-name-mongos.yml + local new_result=/tmp/tmp.pCG7nqFZEo/statefulset_some-name-mongos.yml + '[' -n '' -a -f /mnt/jenkins/workspace/cloud-psmdb-operator_PR-1581/e2e-tests/tls-issue-cert-manager/compare/statefulset_some-name-mongos-oc.yml ']' + yq eval ' del(.metadata.ownerReferences[].apiVersion) | del(.metadata.managedFields) | del(.. | select(has("creationTimestamp")).creationTimestamp) | del(.. | select(has("namespace")).namespace) | del(.. | select(has("uid")).uid) | del(.metadata.resourceVersion) | del(.spec.template.spec.containers[].env[] | select(.name == "NAMESPACE")) | del(.metadata.selfLink) | del(.metadata.annotations."cloud.google.com/neg") | del(.metadata.annotations."kubectl.kubernetes.io/last-applied-configuration") | del(.. | select(has("image")).image) | del(.. | select(has("clusterIP")).clusterIP) | del(.. | select(has("clusterIPs")).clusterIPs) | del(.. | select(has("dataSource")).dataSource) | del(.. | select(has("procMount")).procMount) | del(.. | select(has("storageClassName")).storageClassName) | del(.. | select(has("finalizers")).finalizers) | del(.. | select(has("kubernetes.io/pvc-protection"))."kubernetes.io/pvc-protection") | del(.. | select(has("volumeName")).volumeName) | del(.. | select(has("volume.beta.kubernetes.io/storage-provisioner"))."volume.beta.kubernetes.io/storage-provisioner") | del(.. | select(has("volume.kubernetes.io/storage-provisioner"))."volume.kubernetes.io/storage-provisioner") | del(.spec.volumeMode) | del(.. | select(has("volume.kubernetes.io/selected-node"))."volume.kubernetes.io/selected-node") | del(.. | select(has("percona.com/last-config-hash"))."percona.com/last-config-hash") | del(.. | select(has("percona.com/configuration-hash"))."percona.com/configuration-hash") | del(.. | select(has("percona.com/ssl-hash"))."percona.com/ssl-hash") | del(.. | select(has("percona.com/ssl-internal-hash"))."percona.com/ssl-internal-hash") | del(.spec.volumeClaimTemplates[].spec.volumeMode | select(. == "Filesystem")) | del(.. | select(has("healthCheckNodePort")).healthCheckNodePort) | del(.. | select(has("nodePort")).nodePort) | del(.status) | (.. | select(tag == "!!str")) |= sub("tls-issue-cert-manager-1728", "NAME_SPACE") | del(.spec.volumeClaimTemplates[].apiVersion) | del(.spec.volumeClaimTemplates[].kind) | del(.spec.ipFamilies) | del(.spec.ipFamilyPolicy) | (.. | select(. == "extensions/v1beta1")) = "apps/v1" | (.. | select(. == "batch/v1beta1")) = "batch/v1" ' - + kubectl_bin get -o yaml statefulset/some-name-mongos ++ mktemp + local LAST_OUT=/tmp/tmp.soTbfJ3IzP ++ mktemp + local LAST_ERR=/tmp/tmp.Vc9CM3wwKn + local exit_status=0 + local timeout=4 ++ seq 0 2 + for i in '$(seq 0 2)' + set +e + kubectl get -o yaml statefulset/some-name-mongos + exit_status=0 + set -e + '[' 0 '!=' 0 -a -n 1 ']' + break + cat /tmp/tmp.soTbfJ3IzP + cat /tmp/tmp.Vc9CM3wwKn + rm /tmp/tmp.soTbfJ3IzP /tmp/tmp.Vc9CM3wwKn + return 0 + yq -i eval 'del(.spec.persistentVolumeClaimRetentionPolicy)' /tmp/tmp.pCG7nqFZEo/statefulset_some-name-mongos.yml + version_gt 1.22 ++ echo '1.26 >= 1.22' ++ bc -l + '[' 1 -eq 1 ']' + return 0 + yq -i eval 'del(.spec.internalTrafficPolicy)' /tmp/tmp.pCG7nqFZEo/statefulset_some-name-mongos.yml + yq -i eval 'del(.spec.allocateLoadBalancerNodePorts)' /tmp/tmp.pCG7nqFZEo/statefulset_some-name-mongos.yml + [[ /mnt/jenkins/workspace/cloud-psmdb-operator_PR-1581/e2e-tests/tls-issue-cert-manager/compare/statefulset_some-name-mongos.yml == */cronjob* ]] + diff -u /mnt/jenkins/workspace/cloud-psmdb-operator_PR-1581/e2e-tests/tls-issue-cert-manager/compare/statefulset_some-name-mongos.yml /tmp/tmp.pCG7nqFZEo/statefulset_some-name-mongos.yml + desc 'check if certificates issued with certmanager' + set +o xtrace ----------------------------------------------------------------------------------- check if certificates issued with certmanager ----------------------------------------------------------------------------------- + check_tls_secret some-name-ssl + local secret_name=some-name-ssl + check_secret_data_key some-name-ssl ca.crt + local secret_name=some-name-ssl + local data_key=ca.crt + local secret_data ++ kubectl_bin get secrets/some-name-ssl -o json ++ jq '.data["ca.crt"]' +++ mktemp ++ local LAST_OUT=/tmp/tmp.njiFgDWl8U +++ mktemp ++ local LAST_ERR=/tmp/tmp.75tOFLHWA5 ++ local exit_status=0 ++ local timeout=4 +++ seq 0 2 ++ for i in '$(seq 0 2)' ++ set +e ++ kubectl get secrets/some-name-ssl -o json ++ exit_status=0 ++ set -e ++ '[' 0 '!=' 0 -a -n 1 ']' ++ break ++ cat /tmp/tmp.njiFgDWl8U ++ cat /tmp/tmp.75tOFLHWA5 ++ rm /tmp/tmp.njiFgDWl8U /tmp/tmp.75tOFLHWA5 ++ return 0 + secret_data='"LS0tLS1CRUdJTiBDRVJUSUZJQ0FURS0tLS0tCk1JSUMrakNDQWVLZ0F3SUJBZ0lRQk9aS0xsZldkdWZ1ajhoL3A5T2dKakFOQmdrcWhraUc5dzBCQVFzRkFEQVgKTVJVd0V3WURWUVFERXd4emIyMWxMVzVoYldVdFkyRXdIaGNOTWpRd056QXhNRE16TWpBeVdoY05NalV3TnpBeApNRE16TWpBeVdqQVhNUlV3RXdZRFZRUURFd3h6YjIxbExXNWhiV1V0WTJFd2dnRWlNQTBHQ1NxR1NJYjNEUUVCCkFRVUFBNElCRHdBd2dnRUtBb0lCQVFESWp6WWxxQ0M2VExwTEVoSnVGam96UmdFZ01vK3BWWVc0aTVYMWRraGEKUTZOZitNK2hDMHlYSkVlRVFDeHZtaldjRVdyeVRXZURLQW1CdFpIY0JQNGg5dnpxQ0Qvb1F4RmtQb3pQVFIybQowWDBHT2ZqVWlIKzFObTBwc1crWE5BbVBIOGtmYk5BU3VuR3E3VTZkbDl0NEtjUFRIcHVQa09HcEVMWTV1bm1UClJldkJGL2dIY2xNMmY2ai9qVmtuRjVjMWRrcHM4MU52MzFYQ056ZTA2WlJLS2ZsWWtuajF3OEduMlRFVzlKZVYKSWdsNkhpK0ZMTW9qS3gwOE1KdWtQWDBLQ09XcEg4ZTRiVEtjTzdDeXd0S0RYcnRCVmJDeXRsY3dYYVhQMHkyVApaekFlaklxaXRFZmN0cHZacnVFNFhJVDhVaEFjdG9BeTNRNFVsaGYwUzJXWEFnTUJBQUdqUWpCQU1BNEdBMVVkCkR3RUIvd1FFQXdJQ3BEQVBCZ05WSFJNQkFmOEVCVEFEQVFIL01CMEdBMVVkRGdRV0JCUWJWQ3pjM2NsZlY5a0oKUXFBSnA5ZXdvaGlNMHpBTkJna3Foa2lHOXcwQkFRc0ZBQU9DQVFFQWYrSGV5L0hJWGZiZm8zMmlHVjBwdk5YUQp3V1RBMzNlK1lOZGdJTWZ2TFJXeWJXeS9rMTNqYitiWkg5aDlTT2hqVjlBTzA0Vlo2dEZFb01LYXFFZTdTTWpsCldyYkU1UkNxMW9sRm91TXhUd2ZnamVvcGc1MG1IdmxCeTVEUy93YXBETU92TVN6MERoUzkySVE5R3JYYnBSTDQKYlZsa1VtcDkwTDBEL1l5aEord3JLY2FoelNHRmhKWDE2Z2RETXJBZVBydEVBcWNlNjNOV294UlVZK0tlRzArUApMWXk1TCtDWEpueWdtMENoZEovamRzL3pOL09YM0Q2UXJlK2EzZUZaN3AyRUViZWROclI3eWVqbWZuaStNSVhwCkFuWXh2UCtuQ3o5aGlYaExCQU92MmhaSXIreS9jNkd4ZGtUcnl4TDU4MFVaOHJXOHJJWDdWZWthS0VkUVlBPT0KLS0tLS1FTkQgQ0VSVElGSUNBVEUtLS0tLQo="' + '[' -z '"LS0tLS1CRUdJTiBDRVJUSUZJQ0FURS0tLS0tCk1JSUMrakNDQWVLZ0F3SUJBZ0lRQk9aS0xsZldkdWZ1ajhoL3A5T2dKakFOQmdrcWhraUc5dzBCQVFzRkFEQVgKTVJVd0V3WURWUVFERXd4emIyMWxMVzVoYldVdFkyRXdIaGNOTWpRd056QXhNRE16TWpBeVdoY05NalV3TnpBeApNRE16TWpBeVdqQVhNUlV3RXdZRFZRUURFd3h6YjIxbExXNWhiV1V0WTJFd2dnRWlNQTBHQ1NxR1NJYjNEUUVCCkFRVUFBNElCRHdBd2dnRUtBb0lCQVFESWp6WWxxQ0M2VExwTEVoSnVGam96UmdFZ01vK3BWWVc0aTVYMWRraGEKUTZOZitNK2hDMHlYSkVlRVFDeHZtaldjRVdyeVRXZURLQW1CdFpIY0JQNGg5dnpxQ0Qvb1F4RmtQb3pQVFIybQowWDBHT2ZqVWlIKzFObTBwc1crWE5BbVBIOGtmYk5BU3VuR3E3VTZkbDl0NEtjUFRIcHVQa09HcEVMWTV1bm1UClJldkJGL2dIY2xNMmY2ai9qVmtuRjVjMWRrcHM4MU52MzFYQ056ZTA2WlJLS2ZsWWtuajF3OEduMlRFVzlKZVYKSWdsNkhpK0ZMTW9qS3gwOE1KdWtQWDBLQ09XcEg4ZTRiVEtjTzdDeXd0S0RYcnRCVmJDeXRsY3dYYVhQMHkyVApaekFlaklxaXRFZmN0cHZacnVFNFhJVDhVaEFjdG9BeTNRNFVsaGYwUzJXWEFnTUJBQUdqUWpCQU1BNEdBMVVkCkR3RUIvd1FFQXdJQ3BEQVBCZ05WSFJNQkFmOEVCVEFEQVFIL01CMEdBMVVkRGdRV0JCUWJWQ3pjM2NsZlY5a0oKUXFBSnA5ZXdvaGlNMHpBTkJna3Foa2lHOXcwQkFRc0ZBQU9DQVFFQWYrSGV5L0hJWGZiZm8zMmlHVjBwdk5YUQp3V1RBMzNlK1lOZGdJTWZ2TFJXeWJXeS9rMTNqYitiWkg5aDlTT2hqVjlBTzA0Vlo2dEZFb01LYXFFZTdTTWpsCldyYkU1UkNxMW9sRm91TXhUd2ZnamVvcGc1MG1IdmxCeTVEUy93YXBETU92TVN6MERoUzkySVE5R3JYYnBSTDQKYlZsa1VtcDkwTDBEL1l5aEord3JLY2FoelNHRmhKWDE2Z2RETXJBZVBydEVBcWNlNjNOV294UlVZK0tlRzArUApMWXk1TCtDWEpueWdtMENoZEovamRzL3pOL09YM0Q2UXJlK2EzZUZaN3AyRUViZWROclI3eWVqbWZuaStNSVhwCkFuWXh2UCtuQ3o5aGlYaExCQU92MmhaSXIreS9jNkd4ZGtUcnl4TDU4MFVaOHJXOHJJWDdWZWthS0VkUVlBPT0KLS0tLS1FTkQgQ0VSVElGSUNBVEUtLS0tLQo="' ']' + check_secret_data_key some-name-ssl tls.crt + local secret_name=some-name-ssl + local data_key=tls.crt + local secret_data ++ kubectl_bin get secrets/some-name-ssl -o json ++ jq '.data["tls.crt"]' +++ mktemp ++ local LAST_OUT=/tmp/tmp.x4oz6ErLfT +++ mktemp ++ local LAST_ERR=/tmp/tmp.zK833DVCun ++ local exit_status=0 ++ local timeout=4 +++ seq 0 2 ++ for i in '$(seq 0 2)' ++ set +e ++ kubectl get secrets/some-name-ssl -o json ++ exit_status=0 ++ set -e ++ '[' 0 '!=' 0 -a -n 1 ']' ++ break ++ cat /tmp/tmp.x4oz6ErLfT ++ cat /tmp/tmp.zK833DVCun ++ rm /tmp/tmp.x4oz6ErLfT /tmp/tmp.zK833DVCun ++ return 0 + secret_data='"LS0tLS1CRUdJTiBDRVJUSUZJQ0FURS0tLS0tCk1JSUgxVENDQnIyZ0F3SUJBZ0lSQVBFS3FzblJTV3Vac3lsVVYzVy82SHN3RFFZSktvWklodmNOQVFFTEJRQXcKRnpFVk1CTUdBMVVFQXhNTWMyOXRaUzF1WVcxbExXTmhNQjRYRFRJME1EY3dNVEF6TXpZME5sb1hEVEkwTURreQpPVEF6TXpZME5sb3dKREVPTUF3R0ExVUVDaE1GVUZOTlJFSXhFakFRQmdOVkJBTVRDWE52YldVdGJtRnRaVENDCkFTSXdEUVlKS29aSWh2Y05BUUVCQlFBRGdnRVBBRENDQVFvQ2dnRUJBUElUTnV2amlEN0lUYUtPSjI2ZlJkRlQKemVqQ2sraGtDZ3NNS3JIdzN4T2dHWU1DVGpSb3RUSXgzQVB0TSs0Zkg3eStLZUVsVVlTcGZtVWk0TTlCM1cydwp5YWNlMCtzS05sNWVUbjhOOTZxVTJTTy9PT0hiRXc0UTk0RWZUTzNWclgyUXBNL3MzUTFjR3NHc0ViUk0yZW56ClFSUUQwNmw0Kzl2c1dZQzNzcm1sMnFjeHVpR2dsZWhUWitkd25yVTJ0QmlIVjNvaTdDT09YUE9DbkhHV3Z0eTkKU2YxL1NVYXZ0QWxWZ2cySmd5bllNRlIzeGw2alovOC9mMitRUWFwTXhWMjU0WFFzaERPdE5GbGJ4bmdWNTZMbgp3dzBmQ05Mc3ZBNmNiVlRQYTJzQ0xpd3hGQUQ4U05LUk5QWG5zek9EZURqUXkvbXc2UlF2emtLNXBPNThoWVVDCkF3RUFBYU9DQlEwd2dnVUpNQTRHQTFVZER3RUIvd1FFQXdJRm9EQU1CZ05WSFJNQkFmOEVBakFBTUI4R0ExVWQKSXdRWU1CYUFGQnRVTE56ZHlWOVgyUWxDb0FtbjE3Q2lHSXpUTUlJRXhnWURWUjBSQklJRXZUQ0NCTG1DQ1d4dgpZMkZzYUc5emRJSU5jMjl0WlMxdVlXMWxMWEp6TUlJcGMyOXRaUzF1WVcxbExYSnpNQzUwYkhNdGFYTnpkV1V0ClkyVnlkQzF0WVc1aFoyVnlMVEUzTWppQ08zTnZiV1V0Ym1GdFpTMXljekF1ZEd4ekxXbHpjM1ZsTFdObGNuUXQKYldGdVlXZGxjaTB4TnpJNExuTjJZeTVqYkhWemRHVnlMbXh2WTJGc2dnOHFMbk52YldVdGJtRnRaUzF5Y3pDQwpLeW91YzI5dFpTMXVZVzFsTFhKek1DNTBiSE10YVhOemRXVXRZMlZ5ZEMxdFlXNWhaMlZ5TFRFM01qaUNQU291CmMyOXRaUzF1WVcxbExYSnpNQzUwYkhNdGFYTnpkV1V0WTJWeWRDMXRZVzVoWjJWeUxURTNNamd1YzNaakxtTnMKZFhOMFpYSXViRzlqWVd5Q1BuTnZiV1V0Ym1GdFpTMXljekF1ZEd4ekxXbHpjM1ZsTFdObGNuUXRiV0Z1WVdkbApjaTB4TnpJNExuTjJZeTVqYkhWemRHVnljMlYwTG14dlkyRnNna0FxTG5OdmJXVXRibUZ0WlMxeWN6QXVkR3h6CkxXbHpjM1ZsTFdObGNuUXRiV0Z1WVdkbGNpMHhOekk0TG5OMll5NWpiSFZ6ZEdWeWMyVjBMbXh2WTJGc2dqSXEKTG5Sc2N5MXBjM04xWlMxalpYSjBMVzFoYm1GblpYSXRNVGN5T0M1emRtTXVZMngxYzNSbGNuTmxkQzVzYjJOaApiSUlRYzI5dFpTMXVZVzFsTFcxdmJtZHZjNElzYzI5dFpTMXVZVzFsTFcxdmJtZHZjeTUwYkhNdGFYTnpkV1V0ClkyVnlkQzF0WVc1aFoyVnlMVEUzTWppQ1BuTnZiV1V0Ym1GdFpTMXRiMjVuYjNNdWRHeHpMV2x6YzNWbExXTmwKY25RdGJXRnVZV2RsY2kweE56STRMbk4yWXk1amJIVnpkR1Z5TG14dlkyRnNnaElxTG5OdmJXVXRibUZ0WlMxdApiMjVuYjNPQ0xpb3VjMjl0WlMxdVlXMWxMVzF2Ym1kdmN5NTBiSE10YVhOemRXVXRZMlZ5ZEMxdFlXNWhaMlZ5CkxURTNNamlDUUNvdWMyOXRaUzF1WVcxbExXMXZibWR2Y3k1MGJITXRhWE56ZFdVdFkyVnlkQzF0WVc1aFoyVnkKTFRFM01qZ3VjM1pqTG1Oc2RYTjBaWEl1Ykc5allXeUNEWE52YldVdGJtRnRaUzFqWm1lQ0tYTnZiV1V0Ym1GdApaUzFqWm1jdWRHeHpMV2x6YzNWbExXTmxjblF0YldGdVlXZGxjaTB4TnpJNGdqdHpiMjFsTFc1aGJXVXRZMlpuCkxuUnNjeTFwYzNOMVpTMWpaWEowTFcxaGJtRm5aWEl0TVRjeU9DNXpkbU11WTJ4MWMzUmxjaTVzYjJOaGJJSVAKS2k1emIyMWxMVzVoYldVdFkyWm5naXNxTG5OdmJXVXRibUZ0WlMxalptY3VkR3h6TFdsemMzVmxMV05sY25RdApiV0Z1WVdkbGNpMHhOekk0Z2owcUxuTnZiV1V0Ym1GdFpTMWpabWN1ZEd4ekxXbHpjM1ZsTFdObGNuUXRiV0Z1CllXZGxjaTB4TnpJNExuTjJZeTVqYkhWemRHVnlMbXh2WTJGc2drRnpiMjFsTFc1aGJXVXRiVzl1WjI5ekxuUnMKY3kxcGMzTjFaUzFqWlhKMExXMWhibUZuWlhJdE1UY3lPQzV6ZG1NdVkyeDFjM1JsY25ObGRDNXNiMk5oYklKRApLaTV6YjIxbExXNWhiV1V0Ylc5dVoyOXpMblJzY3kxcGMzTjFaUzFqWlhKMExXMWhibUZuWlhJdE1UY3lPQzV6CmRtTXVZMngxYzNSbGNuTmxkQzVzYjJOaGJJSStjMjl0WlMxdVlXMWxMV05tWnk1MGJITXRhWE56ZFdVdFkyVnkKZEMxdFlXNWhaMlZ5TFRFM01qZ3VjM1pqTG1Oc2RYTjBaWEp6WlhRdWJHOWpZV3lDUUNvdWMyOXRaUzF1WVcxbApMV05tWnk1MGJITXRhWE56ZFdVdFkyVnlkQzF0WVc1aFoyVnlMVEUzTWpndWMzWmpMbU5zZFhOMFpYSnpaWFF1CmJHOWpZV3d3RFFZSktvWklodmNOQVFFTEJRQURnZ0VCQUxKRWhXSDd0aFJMbms0NzlJaFRRa24xenhGUXZSNzMKZlpYT3loT3J5Q0Nod3FDL29QdHpLNWM1NW5HMncrU0k3VWZmVVJlMnUvY1dRSTJINFc3Q2prYXhvbllJaFNWTwpZU3dBWEY4bFR5ODZjTDZQeHlXL1Z0Y0tUUW9NTzNYeDJNN3UyTExzL0wyWFBWRCtua1AwM1EweWF1V3RXektiCmppRFVFdTY3bjI3Q0MrSENUWWNzcWRtbG0xVU82Y3dDYzc2RndOdUtrVGhHaVZzTlNsL2F6Z2hJUGZMN20weG4KbEY2NjVZbG1RZFJ3Yk9INi9ienNGdDNDKzZrNGRINmkwOFFSRnhJK0EzOW5KVXZVb2k2bDhvOW81QVgxaHVaMwpNV1JkeExvS2UrSWo4aElHQVY0Ym9ncUM2RTd4ZlpLdHhGME9WUG43aE15UG9oeU9lUkh3bFhBPQotLS0tLUVORCBDRVJUSUZJQ0FURS0tLS0tCg=="' + '[' -z '"LS0tLS1CRUdJTiBDRVJUSUZJQ0FURS0tLS0tCk1JSUgxVENDQnIyZ0F3SUJBZ0lSQVBFS3FzblJTV3Vac3lsVVYzVy82SHN3RFFZSktvWklodmNOQVFFTEJRQXcKRnpFVk1CTUdBMVVFQXhNTWMyOXRaUzF1WVcxbExXTmhNQjRYRFRJME1EY3dNVEF6TXpZME5sb1hEVEkwTURreQpPVEF6TXpZME5sb3dKREVPTUF3R0ExVUVDaE1GVUZOTlJFSXhFakFRQmdOVkJBTVRDWE52YldVdGJtRnRaVENDCkFTSXdEUVlKS29aSWh2Y05BUUVCQlFBRGdnRVBBRENDQVFvQ2dnRUJBUElUTnV2amlEN0lUYUtPSjI2ZlJkRlQKemVqQ2sraGtDZ3NNS3JIdzN4T2dHWU1DVGpSb3RUSXgzQVB0TSs0Zkg3eStLZUVsVVlTcGZtVWk0TTlCM1cydwp5YWNlMCtzS05sNWVUbjhOOTZxVTJTTy9PT0hiRXc0UTk0RWZUTzNWclgyUXBNL3MzUTFjR3NHc0ViUk0yZW56ClFSUUQwNmw0Kzl2c1dZQzNzcm1sMnFjeHVpR2dsZWhUWitkd25yVTJ0QmlIVjNvaTdDT09YUE9DbkhHV3Z0eTkKU2YxL1NVYXZ0QWxWZ2cySmd5bllNRlIzeGw2alovOC9mMitRUWFwTXhWMjU0WFFzaERPdE5GbGJ4bmdWNTZMbgp3dzBmQ05Mc3ZBNmNiVlRQYTJzQ0xpd3hGQUQ4U05LUk5QWG5zek9EZURqUXkvbXc2UlF2emtLNXBPNThoWVVDCkF3RUFBYU9DQlEwd2dnVUpNQTRHQTFVZER3RUIvd1FFQXdJRm9EQU1CZ05WSFJNQkFmOEVBakFBTUI4R0ExVWQKSXdRWU1CYUFGQnRVTE56ZHlWOVgyUWxDb0FtbjE3Q2lHSXpUTUlJRXhnWURWUjBSQklJRXZUQ0NCTG1DQ1d4dgpZMkZzYUc5emRJSU5jMjl0WlMxdVlXMWxMWEp6TUlJcGMyOXRaUzF1WVcxbExYSnpNQzUwYkhNdGFYTnpkV1V0ClkyVnlkQzF0WVc1aFoyVnlMVEUzTWppQ08zTnZiV1V0Ym1GdFpTMXljekF1ZEd4ekxXbHpjM1ZsTFdObGNuUXQKYldGdVlXZGxjaTB4TnpJNExuTjJZeTVqYkhWemRHVnlMbXh2WTJGc2dnOHFMbk52YldVdGJtRnRaUzF5Y3pDQwpLeW91YzI5dFpTMXVZVzFsTFhKek1DNTBiSE10YVhOemRXVXRZMlZ5ZEMxdFlXNWhaMlZ5TFRFM01qaUNQU291CmMyOXRaUzF1WVcxbExYSnpNQzUwYkhNdGFYTnpkV1V0WTJWeWRDMXRZVzVoWjJWeUxURTNNamd1YzNaakxtTnMKZFhOMFpYSXViRzlqWVd5Q1BuTnZiV1V0Ym1GdFpTMXljekF1ZEd4ekxXbHpjM1ZsTFdObGNuUXRiV0Z1WVdkbApjaTB4TnpJNExuTjJZeTVqYkhWemRHVnljMlYwTG14dlkyRnNna0FxTG5OdmJXVXRibUZ0WlMxeWN6QXVkR3h6CkxXbHpjM1ZsTFdObGNuUXRiV0Z1WVdkbGNpMHhOekk0TG5OMll5NWpiSFZ6ZEdWeWMyVjBMbXh2WTJGc2dqSXEKTG5Sc2N5MXBjM04xWlMxalpYSjBMVzFoYm1GblpYSXRNVGN5T0M1emRtTXVZMngxYzNSbGNuTmxkQzVzYjJOaApiSUlRYzI5dFpTMXVZVzFsTFcxdmJtZHZjNElzYzI5dFpTMXVZVzFsTFcxdmJtZHZjeTUwYkhNdGFYTnpkV1V0ClkyVnlkQzF0WVc1aFoyVnlMVEUzTWppQ1BuTnZiV1V0Ym1GdFpTMXRiMjVuYjNNdWRHeHpMV2x6YzNWbExXTmwKY25RdGJXRnVZV2RsY2kweE56STRMbk4yWXk1amJIVnpkR1Z5TG14dlkyRnNnaElxTG5OdmJXVXRibUZ0WlMxdApiMjVuYjNPQ0xpb3VjMjl0WlMxdVlXMWxMVzF2Ym1kdmN5NTBiSE10YVhOemRXVXRZMlZ5ZEMxdFlXNWhaMlZ5CkxURTNNamlDUUNvdWMyOXRaUzF1WVcxbExXMXZibWR2Y3k1MGJITXRhWE56ZFdVdFkyVnlkQzF0WVc1aFoyVnkKTFRFM01qZ3VjM1pqTG1Oc2RYTjBaWEl1Ykc5allXeUNEWE52YldVdGJtRnRaUzFqWm1lQ0tYTnZiV1V0Ym1GdApaUzFqWm1jdWRHeHpMV2x6YzNWbExXTmxjblF0YldGdVlXZGxjaTB4TnpJNGdqdHpiMjFsTFc1aGJXVXRZMlpuCkxuUnNjeTFwYzNOMVpTMWpaWEowTFcxaGJtRm5aWEl0TVRjeU9DNXpkbU11WTJ4MWMzUmxjaTVzYjJOaGJJSVAKS2k1emIyMWxMVzVoYldVdFkyWm5naXNxTG5OdmJXVXRibUZ0WlMxalptY3VkR3h6TFdsemMzVmxMV05sY25RdApiV0Z1WVdkbGNpMHhOekk0Z2owcUxuTnZiV1V0Ym1GdFpTMWpabWN1ZEd4ekxXbHpjM1ZsTFdObGNuUXRiV0Z1CllXZGxjaTB4TnpJNExuTjJZeTVqYkhWemRHVnlMbXh2WTJGc2drRnpiMjFsTFc1aGJXVXRiVzl1WjI5ekxuUnMKY3kxcGMzTjFaUzFqWlhKMExXMWhibUZuWlhJdE1UY3lPQzV6ZG1NdVkyeDFjM1JsY25ObGRDNXNiMk5oYklKRApLaTV6YjIxbExXNWhiV1V0Ylc5dVoyOXpMblJzY3kxcGMzTjFaUzFqWlhKMExXMWhibUZuWlhJdE1UY3lPQzV6CmRtTXVZMngxYzNSbGNuTmxkQzVzYjJOaGJJSStjMjl0WlMxdVlXMWxMV05tWnk1MGJITXRhWE56ZFdVdFkyVnkKZEMxdFlXNWhaMlZ5TFRFM01qZ3VjM1pqTG1Oc2RYTjBaWEp6WlhRdWJHOWpZV3lDUUNvdWMyOXRaUzF1WVcxbApMV05tWnk1MGJITXRhWE56ZFdVdFkyVnlkQzF0WVc1aFoyVnlMVEUzTWpndWMzWmpMbU5zZFhOMFpYSnpaWFF1CmJHOWpZV3d3RFFZSktvWklodmNOQVFFTEJRQURnZ0VCQUxKRWhXSDd0aFJMbms0NzlJaFRRa24xenhGUXZSNzMKZlpYT3loT3J5Q0Nod3FDL29QdHpLNWM1NW5HMncrU0k3VWZmVVJlMnUvY1dRSTJINFc3Q2prYXhvbllJaFNWTwpZU3dBWEY4bFR5ODZjTDZQeHlXL1Z0Y0tUUW9NTzNYeDJNN3UyTExzL0wyWFBWRCtua1AwM1EweWF1V3RXektiCmppRFVFdTY3bjI3Q0MrSENUWWNzcWRtbG0xVU82Y3dDYzc2RndOdUtrVGhHaVZzTlNsL2F6Z2hJUGZMN20weG4KbEY2NjVZbG1RZFJ3Yk9INi9ienNGdDNDKzZrNGRINmkwOFFSRnhJK0EzOW5KVXZVb2k2bDhvOW81QVgxaHVaMwpNV1JkeExvS2UrSWo4aElHQVY0Ym9ncUM2RTd4ZlpLdHhGME9WUG43aE15UG9oeU9lUkh3bFhBPQotLS0tLUVORCBDRVJUSUZJQ0FURS0tLS0tCg=="' ']' + check_secret_data_key some-name-ssl tls.key + local secret_name=some-name-ssl + local data_key=tls.key + local secret_data ++ kubectl_bin get secrets/some-name-ssl -o json ++ jq '.data["tls.key"]' +++ mktemp ++ local LAST_OUT=/tmp/tmp.EjxWMtqviZ +++ mktemp ++ local LAST_ERR=/tmp/tmp.JerSQf09qt ++ local exit_status=0 ++ local timeout=4 +++ seq 0 2 ++ for i in '$(seq 0 2)' ++ set +e ++ kubectl get secrets/some-name-ssl -o json ++ exit_status=0 ++ set -e ++ '[' 0 '!=' 0 -a -n 1 ']' ++ break ++ cat /tmp/tmp.EjxWMtqviZ ++ cat /tmp/tmp.JerSQf09qt ++ rm /tmp/tmp.EjxWMtqviZ /tmp/tmp.JerSQf09qt ++ return 0 + secret_data='"LS0tLS1CRUdJTiBSU0EgUFJJVkFURSBLRVktLS0tLQpNSUlFcEFJQkFBS0NBUUVBOGhNMjYrT0lQc2hOb280bmJwOUYwVlBONk1LVDZHUUtDd3dxc2ZEZkU2QVpnd0pPCk5HaTFNakhjQSswejdoOGZ2TDRwNFNWUmhLbCtaU0xnejBIZGJiREpweDdUNndvMlhsNU9mdzMzcXBUWkk3ODQKNGRzVERoRDNnUjlNN2RXdGZaQ2t6K3pkRFZ3YXdhd1J0RXpaNmZOQkZBUFRxWGo3Mit4WmdMZXl1YVhhcHpHNgpJYUNWNkZObjUzQ2V0VGEwR0lkWGVpTHNJNDVjODRLY2NaYSszTDFKL1g5SlJxKzBDVldDRFltREtkZ3dWSGZHClhxTm4vejkvYjVCQnFrekZYYm5oZEN5RU02MDBXVnZHZUJYbm91ZkREUjhJMHV5OERweHRWTTlyYXdJdUxERVUKQVB4STBwRTA5ZWV6TTRONE9OREwrYkRwRkMvT1FybWs3bnlGaFFJREFRQUJBb0lCQUU0UkVrSXFZNUxUcCszSAphRUlOalVkL2ZrVVZFdnY3M3gxRzlESGtXeHlLSWhBVTIwR2RqdW04R0pjUGxSS3k2TnBHZlYwRld1K3NkWDJLCkxQUEo2dTNuK0hBenllWGZxajd1ck9QZWprTnRzMFVLOHY0clYwN08rL0toTkg3eXpQdFVmVUlzd2ZFQUJnVDEKUjlSY2dGNHBPNUNIMTJldEE1UWQrWk5BS3B3aGx2Wms0NHo3TmpTQWh0UUtwK2pqcjNKbFdUYm1rOTlOaURocApYOUl6TUorQ1NKeStraTBYU0lwT1B4eWJXaFVOaU5Fb2lmcks3TlplZERZK0ptR2VEaVdrdC9UME10VTcwUkJOCnBrZDViTHdIeGF1UEFLZTNUNmJOV0R3clNMR1JXWkdTdEJuVm5BUk93QXhHN1lmaVhPY1hUTzNqdlMrSE0yejUKcDNWVXg3VUNnWUVBOUpWeW5TUGlrQkltazJEOE8vYlhXYldNTFo0U2s0SHBFWFlUTXFUbnc0MGJrU3lBd2VqTgpDbS9RNVBvZ05XKzN6bDN5QnZxbXZ1YW9SVjE5ZmNyQ0c0N3ZIaS9pOE4xOW5Ma2VGTkZRNWVZZ1RadTByNzBCCk9UeDBKSGdhRVh6TkN3V3lidHdlYXhqUnd1NHpOTXR0YXRBN0tnQkp5dlQ5TlpCcG43MzgzdjhDZ1lFQS9WL0sKTllWU1U4djkzWmMyYmlqRExsNVIrclBFaVVDOWFyZUxWNVpidTk1dHJDNitYb2J4QzIwVVZqVHIyMnBiaEp2WAoyRi8zZXhhNzVaT2pNMC9oZlVYN1U3bkhJQlIzbGdhUFJ3WEw2Nm1veC9sdWo0TFFEelJuSEwzbVZock9RRXRUCmEweStzS2ZTOFVLM3pZZEhBNTIxTy9pOW1jenNrdlA1WElXRG4zc0NnWUVBd2tBWGdUcEk3SWVkMzVzOWVScDQKdXFUM3FodExRYUFCelg1cmpPTnZ0dDNSTVNHWk9XaDl3SFpDQURtdmJ3RkZpRDhRdy80NmhJK0l3VjZaRmxqOApoUXFtV3VGSmFtZGZKaVBZQWt5ajVaTTV3S21UZmFlczJ4cHFXN1NQYjZnZzZVb2pCdTBIczZOSXBYcXRiSmJNClZmTC9xa0lFV1k3dHM2YjRYSWgxMTEwQ2dZQitHZk9BaVpiOHY1VGNlUXFNN3RxZE5ESW16TnR1MS9xdHB5WEIKZE1LNUVLMlN1VHpqdk5Nd3Rad2VmMzQzR0lydHg4WTRHU01KYnJvYk8weEpHR09JUHBFa0RWenVPV0YvR0Vxcwp0VlRWTi80WmNLbTI0UXI5MWozUFR5L1pBTVJQdVcyT0JJY1BVcWNvamVUK2RHcENnWFlXbitoMUtFbEJwZzBnCklBN3ZDd0tCZ1FDRTZzMEJVeFhZSVVMaUhlNkpYMkpxUTVKVDFnRXVSTnlkT3UwVU01NFFmcEtDVndxSTRYanAKa1h2Y1dXekRsTGIxeDgzaWRPZ0VOVk9majJwSzhSN0tXUUVLUlJpZU10YzEwcnJDek5oRnZmb0ZzRktLSGpVNwpHaitrckh2ZFJNdGRVd2poTFlFR0lhZUJWL0ZwcmdZblh0UHhvN2lXOEs0VkZ5cHBoTlY1aWc9PQotLS0tLUVORCBSU0EgUFJJVkFURSBLRVktLS0tLQo="' + '[' -z '"LS0tLS1CRUdJTiBSU0EgUFJJVkFURSBLRVktLS0tLQpNSUlFcEFJQkFBS0NBUUVBOGhNMjYrT0lQc2hOb280bmJwOUYwVlBONk1LVDZHUUtDd3dxc2ZEZkU2QVpnd0pPCk5HaTFNakhjQSswejdoOGZ2TDRwNFNWUmhLbCtaU0xnejBIZGJiREpweDdUNndvMlhsNU9mdzMzcXBUWkk3ODQKNGRzVERoRDNnUjlNN2RXdGZaQ2t6K3pkRFZ3YXdhd1J0RXpaNmZOQkZBUFRxWGo3Mit4WmdMZXl1YVhhcHpHNgpJYUNWNkZObjUzQ2V0VGEwR0lkWGVpTHNJNDVjODRLY2NaYSszTDFKL1g5SlJxKzBDVldDRFltREtkZ3dWSGZHClhxTm4vejkvYjVCQnFrekZYYm5oZEN5RU02MDBXVnZHZUJYbm91ZkREUjhJMHV5OERweHRWTTlyYXdJdUxERVUKQVB4STBwRTA5ZWV6TTRONE9OREwrYkRwRkMvT1FybWs3bnlGaFFJREFRQUJBb0lCQUU0UkVrSXFZNUxUcCszSAphRUlOalVkL2ZrVVZFdnY3M3gxRzlESGtXeHlLSWhBVTIwR2RqdW04R0pjUGxSS3k2TnBHZlYwRld1K3NkWDJLCkxQUEo2dTNuK0hBenllWGZxajd1ck9QZWprTnRzMFVLOHY0clYwN08rL0toTkg3eXpQdFVmVUlzd2ZFQUJnVDEKUjlSY2dGNHBPNUNIMTJldEE1UWQrWk5BS3B3aGx2Wms0NHo3TmpTQWh0UUtwK2pqcjNKbFdUYm1rOTlOaURocApYOUl6TUorQ1NKeStraTBYU0lwT1B4eWJXaFVOaU5Fb2lmcks3TlplZERZK0ptR2VEaVdrdC9UME10VTcwUkJOCnBrZDViTHdIeGF1UEFLZTNUNmJOV0R3clNMR1JXWkdTdEJuVm5BUk93QXhHN1lmaVhPY1hUTzNqdlMrSE0yejUKcDNWVXg3VUNnWUVBOUpWeW5TUGlrQkltazJEOE8vYlhXYldNTFo0U2s0SHBFWFlUTXFUbnc0MGJrU3lBd2VqTgpDbS9RNVBvZ05XKzN6bDN5QnZxbXZ1YW9SVjE5ZmNyQ0c0N3ZIaS9pOE4xOW5Ma2VGTkZRNWVZZ1RadTByNzBCCk9UeDBKSGdhRVh6TkN3V3lidHdlYXhqUnd1NHpOTXR0YXRBN0tnQkp5dlQ5TlpCcG43MzgzdjhDZ1lFQS9WL0sKTllWU1U4djkzWmMyYmlqRExsNVIrclBFaVVDOWFyZUxWNVpidTk1dHJDNitYb2J4QzIwVVZqVHIyMnBiaEp2WAoyRi8zZXhhNzVaT2pNMC9oZlVYN1U3bkhJQlIzbGdhUFJ3WEw2Nm1veC9sdWo0TFFEelJuSEwzbVZock9RRXRUCmEweStzS2ZTOFVLM3pZZEhBNTIxTy9pOW1jenNrdlA1WElXRG4zc0NnWUVBd2tBWGdUcEk3SWVkMzVzOWVScDQKdXFUM3FodExRYUFCelg1cmpPTnZ0dDNSTVNHWk9XaDl3SFpDQURtdmJ3RkZpRDhRdy80NmhJK0l3VjZaRmxqOApoUXFtV3VGSmFtZGZKaVBZQWt5ajVaTTV3S21UZmFlczJ4cHFXN1NQYjZnZzZVb2pCdTBIczZOSXBYcXRiSmJNClZmTC9xa0lFV1k3dHM2YjRYSWgxMTEwQ2dZQitHZk9BaVpiOHY1VGNlUXFNN3RxZE5ESW16TnR1MS9xdHB5WEIKZE1LNUVLMlN1VHpqdk5Nd3Rad2VmMzQzR0lydHg4WTRHU01KYnJvYk8weEpHR09JUHBFa0RWenVPV0YvR0Vxcwp0VlRWTi80WmNLbTI0UXI5MWozUFR5L1pBTVJQdVcyT0JJY1BVcWNvamVUK2RHcENnWFlXbitoMUtFbEJwZzBnCklBN3ZDd0tCZ1FDRTZzMEJVeFhZSVVMaUhlNkpYMkpxUTVKVDFnRXVSTnlkT3UwVU01NFFmcEtDVndxSTRYanAKa1h2Y1dXekRsTGIxeDgzaWRPZ0VOVk9majJwSzhSN0tXUUVLUlJpZU10YzEwcnJDek5oRnZmb0ZzRktLSGpVNwpHaitrckh2ZFJNdGRVd2poTFlFR0lhZUJWL0ZwcmdZblh0UHhvN2lXOEs0VkZ5cHBoTlY1aWc9PQotLS0tLUVORCBSU0EgUFJJVkFURSBLRVktLS0tLQo="' ']' + desc 'check if CA issuer created' + set +o xtrace ----------------------------------------------------------------------------------- check if CA issuer created ----------------------------------------------------------------------------------- + compare_kubectl issuer/some-name-psmdb-ca-issuer + local resource=issuer/some-name-psmdb-ca-issuer + local postfix= + local expected_result=/mnt/jenkins/workspace/cloud-psmdb-operator_PR-1581/e2e-tests/tls-issue-cert-manager/compare/issuer_some-name-psmdb-ca-issuer.yml + local new_result=/tmp/tmp.pCG7nqFZEo/issuer_some-name-psmdb-ca-issuer.yml + '[' -n '' -a -f /mnt/jenkins/workspace/cloud-psmdb-operator_PR-1581/e2e-tests/tls-issue-cert-manager/compare/issuer_some-name-psmdb-ca-issuer-oc.yml ']' + kubectl_bin get -o yaml issuer/some-name-psmdb-ca-issuer + yq eval ' del(.metadata.ownerReferences[].apiVersion) | del(.metadata.managedFields) | del(.. | select(has("creationTimestamp")).creationTimestamp) | del(.. | select(has("namespace")).namespace) | del(.. | select(has("uid")).uid) | del(.metadata.resourceVersion) | del(.spec.template.spec.containers[].env[] | select(.name == "NAMESPACE")) | del(.metadata.selfLink) | del(.metadata.annotations."cloud.google.com/neg") | del(.metadata.annotations."kubectl.kubernetes.io/last-applied-configuration") | del(.. | select(has("image")).image) | del(.. | select(has("clusterIP")).clusterIP) | del(.. | select(has("clusterIPs")).clusterIPs) | del(.. | select(has("dataSource")).dataSource) | del(.. | select(has("procMount")).procMount) | del(.. | select(has("storageClassName")).storageClassName) | del(.. | select(has("finalizers")).finalizers) | del(.. | select(has("kubernetes.io/pvc-protection"))."kubernetes.io/pvc-protection") | del(.. | select(has("volumeName")).volumeName) | del(.. | select(has("volume.beta.kubernetes.io/storage-provisioner"))."volume.beta.kubernetes.io/storage-provisioner") | del(.. | select(has("volume.kubernetes.io/storage-provisioner"))."volume.kubernetes.io/storage-provisioner") | del(.spec.volumeMode) | del(.. | select(has("volume.kubernetes.io/selected-node"))."volume.kubernetes.io/selected-node") | del(.. | select(has("percona.com/last-config-hash"))."percona.com/last-config-hash") | del(.. | select(has("percona.com/configuration-hash"))."percona.com/configuration-hash") | del(.. | select(has("percona.com/ssl-hash"))."percona.com/ssl-hash") | del(.. | select(has("percona.com/ssl-internal-hash"))."percona.com/ssl-internal-hash") | del(.spec.volumeClaimTemplates[].spec.volumeMode | select(. == "Filesystem")) | del(.. | select(has("healthCheckNodePort")).healthCheckNodePort) | del(.. | select(has("nodePort")).nodePort) | del(.status) | (.. | select(tag == "!!str")) |= sub("tls-issue-cert-manager-1728", "NAME_SPACE") | del(.spec.volumeClaimTemplates[].apiVersion) | del(.spec.volumeClaimTemplates[].kind) | del(.spec.ipFamilies) | del(.spec.ipFamilyPolicy) | (.. | select(. == "extensions/v1beta1")) = "apps/v1" | (.. | select(. == "batch/v1beta1")) = "batch/v1" ' - ++ mktemp + local LAST_OUT=/tmp/tmp.nIZdlkdw2k ++ mktemp + local LAST_ERR=/tmp/tmp.RzPG6fDDKr + local exit_status=0 + local timeout=4 ++ seq 0 2 + for i in '$(seq 0 2)' + set +e + kubectl get -o yaml issuer/some-name-psmdb-ca-issuer + exit_status=0 + set -e + '[' 0 '!=' 0 -a -n 1 ']' + break + cat /tmp/tmp.nIZdlkdw2k + cat /tmp/tmp.RzPG6fDDKr + rm /tmp/tmp.nIZdlkdw2k /tmp/tmp.RzPG6fDDKr + return 0 + yq -i eval 'del(.spec.persistentVolumeClaimRetentionPolicy)' /tmp/tmp.pCG7nqFZEo/issuer_some-name-psmdb-ca-issuer.yml + version_gt 1.22 ++ echo '1.26 >= 1.22' ++ bc -l + '[' 1 -eq 1 ']' + return 0 + yq -i eval 'del(.spec.internalTrafficPolicy)' /tmp/tmp.pCG7nqFZEo/issuer_some-name-psmdb-ca-issuer.yml + yq -i eval 'del(.spec.allocateLoadBalancerNodePorts)' /tmp/tmp.pCG7nqFZEo/issuer_some-name-psmdb-ca-issuer.yml + [[ /mnt/jenkins/workspace/cloud-psmdb-operator_PR-1581/e2e-tests/tls-issue-cert-manager/compare/issuer_some-name-psmdb-ca-issuer.yml == */cronjob* ]] + diff -u /mnt/jenkins/workspace/cloud-psmdb-operator_PR-1581/e2e-tests/tls-issue-cert-manager/compare/issuer_some-name-psmdb-ca-issuer.yml /tmp/tmp.pCG7nqFZEo/issuer_some-name-psmdb-ca-issuer.yml + desc 'check if issuer created' + set +o xtrace ----------------------------------------------------------------------------------- check if issuer created ----------------------------------------------------------------------------------- + compare_kubectl issuer/some-name-psmdb-issuer + local resource=issuer/some-name-psmdb-issuer + local postfix= + local expected_result=/mnt/jenkins/workspace/cloud-psmdb-operator_PR-1581/e2e-tests/tls-issue-cert-manager/compare/issuer_some-name-psmdb-issuer.yml + local new_result=/tmp/tmp.pCG7nqFZEo/issuer_some-name-psmdb-issuer.yml + '[' -n '' -a -f /mnt/jenkins/workspace/cloud-psmdb-operator_PR-1581/e2e-tests/tls-issue-cert-manager/compare/issuer_some-name-psmdb-issuer-oc.yml ']' + kubectl_bin get -o yaml issuer/some-name-psmdb-issuer + yq eval ' del(.metadata.ownerReferences[].apiVersion) | del(.metadata.managedFields) | del(.. | select(has("creationTimestamp")).creationTimestamp) | del(.. | select(has("namespace")).namespace) | del(.. | select(has("uid")).uid) | del(.metadata.resourceVersion) | del(.spec.template.spec.containers[].env[] | select(.name == "NAMESPACE")) | del(.metadata.selfLink) | del(.metadata.annotations."cloud.google.com/neg") | del(.metadata.annotations."kubectl.kubernetes.io/last-applied-configuration") | del(.. | select(has("image")).image) | del(.. | select(has("clusterIP")).clusterIP) | del(.. | select(has("clusterIPs")).clusterIPs) | del(.. | select(has("dataSource")).dataSource) | del(.. | select(has("procMount")).procMount) | del(.. | select(has("storageClassName")).storageClassName) | del(.. | select(has("finalizers")).finalizers) | del(.. | select(has("kubernetes.io/pvc-protection"))."kubernetes.io/pvc-protection") | del(.. | select(has("volumeName")).volumeName) | del(.. | select(has("volume.beta.kubernetes.io/storage-provisioner"))."volume.beta.kubernetes.io/storage-provisioner") | del(.. | select(has("volume.kubernetes.io/storage-provisioner"))."volume.kubernetes.io/storage-provisioner") | del(.spec.volumeMode) | del(.. | select(has("volume.kubernetes.io/selected-node"))."volume.kubernetes.io/selected-node") | del(.. | select(has("percona.com/last-config-hash"))."percona.com/last-config-hash") | del(.. | select(has("percona.com/configuration-hash"))."percona.com/configuration-hash") | del(.. | select(has("percona.com/ssl-hash"))."percona.com/ssl-hash") | del(.. | select(has("percona.com/ssl-internal-hash"))."percona.com/ssl-internal-hash") | del(.spec.volumeClaimTemplates[].spec.volumeMode | select(. == "Filesystem")) | del(.. | select(has("healthCheckNodePort")).healthCheckNodePort) | del(.. | select(has("nodePort")).nodePort) | del(.status) | (.. | select(tag == "!!str")) |= sub("tls-issue-cert-manager-1728", "NAME_SPACE") | del(.spec.volumeClaimTemplates[].apiVersion) | del(.spec.volumeClaimTemplates[].kind) | del(.spec.ipFamilies) | del(.spec.ipFamilyPolicy) | (.. | select(. == "extensions/v1beta1")) = "apps/v1" | (.. | select(. == "batch/v1beta1")) = "batch/v1" ' - ++ mktemp + local LAST_OUT=/tmp/tmp.2rgoB61mFR ++ mktemp + local LAST_ERR=/tmp/tmp.wx2qJgUzvR + local exit_status=0 + local timeout=4 ++ seq 0 2 + for i in '$(seq 0 2)' + set +e + kubectl get -o yaml issuer/some-name-psmdb-issuer + exit_status=0 + set -e + '[' 0 '!=' 0 -a -n 1 ']' + break + cat /tmp/tmp.2rgoB61mFR + cat /tmp/tmp.wx2qJgUzvR + rm /tmp/tmp.2rgoB61mFR /tmp/tmp.wx2qJgUzvR + return 0 + yq -i eval 'del(.spec.persistentVolumeClaimRetentionPolicy)' /tmp/tmp.pCG7nqFZEo/issuer_some-name-psmdb-issuer.yml + version_gt 1.22 ++ echo '1.26 >= 1.22' ++ bc -l + '[' 1 -eq 1 ']' + return 0 + yq -i eval 'del(.spec.internalTrafficPolicy)' /tmp/tmp.pCG7nqFZEo/issuer_some-name-psmdb-issuer.yml + yq -i eval 'del(.spec.allocateLoadBalancerNodePorts)' /tmp/tmp.pCG7nqFZEo/issuer_some-name-psmdb-issuer.yml + [[ /mnt/jenkins/workspace/cloud-psmdb-operator_PR-1581/e2e-tests/tls-issue-cert-manager/compare/issuer_some-name-psmdb-issuer.yml == */cronjob* ]] + diff -u /mnt/jenkins/workspace/cloud-psmdb-operator_PR-1581/e2e-tests/tls-issue-cert-manager/compare/issuer_some-name-psmdb-issuer.yml /tmp/tmp.pCG7nqFZEo/issuer_some-name-psmdb-issuer.yml + desc 'check if certificate issued' + set +o xtrace ----------------------------------------------------------------------------------- check if certificate issued ----------------------------------------------------------------------------------- + compare_kubectl certificate/some-name-ssl + local resource=certificate/some-name-ssl + local postfix= + local expected_result=/mnt/jenkins/workspace/cloud-psmdb-operator_PR-1581/e2e-tests/tls-issue-cert-manager/compare/certificate_some-name-ssl.yml + local new_result=/tmp/tmp.pCG7nqFZEo/certificate_some-name-ssl.yml + '[' -n '' -a -f /mnt/jenkins/workspace/cloud-psmdb-operator_PR-1581/e2e-tests/tls-issue-cert-manager/compare/certificate_some-name-ssl-oc.yml ']' + kubectl_bin get -o yaml certificate/some-name-ssl + yq eval ' del(.metadata.ownerReferences[].apiVersion) | del(.metadata.managedFields) | del(.. | select(has("creationTimestamp")).creationTimestamp) | del(.. | select(has("namespace")).namespace) | del(.. | select(has("uid")).uid) | del(.metadata.resourceVersion) | del(.spec.template.spec.containers[].env[] | select(.name == "NAMESPACE")) | del(.metadata.selfLink) | del(.metadata.annotations."cloud.google.com/neg") | del(.metadata.annotations."kubectl.kubernetes.io/last-applied-configuration") | del(.. | select(has("image")).image) | del(.. | select(has("clusterIP")).clusterIP) | del(.. | select(has("clusterIPs")).clusterIPs) | del(.. | select(has("dataSource")).dataSource) | del(.. | select(has("procMount")).procMount) | del(.. | select(has("storageClassName")).storageClassName) | del(.. | select(has("finalizers")).finalizers) | del(.. | select(has("kubernetes.io/pvc-protection"))."kubernetes.io/pvc-protection") | del(.. | select(has("volumeName")).volumeName) | del(.. | select(has("volume.beta.kubernetes.io/storage-provisioner"))."volume.beta.kubernetes.io/storage-provisioner") | del(.. | select(has("volume.kubernetes.io/storage-provisioner"))."volume.kubernetes.io/storage-provisioner") | del(.spec.volumeMode) | del(.. | select(has("volume.kubernetes.io/selected-node"))."volume.kubernetes.io/selected-node") | del(.. | select(has("percona.com/last-config-hash"))."percona.com/last-config-hash") | del(.. | select(has("percona.com/configuration-hash"))."percona.com/configuration-hash") | del(.. | select(has("percona.com/ssl-hash"))."percona.com/ssl-hash") | del(.. | select(has("percona.com/ssl-internal-hash"))."percona.com/ssl-internal-hash") | del(.spec.volumeClaimTemplates[].spec.volumeMode | select(. == "Filesystem")) | del(.. | select(has("healthCheckNodePort")).healthCheckNodePort) | del(.. | select(has("nodePort")).nodePort) | del(.status) | (.. | select(tag == "!!str")) |= sub("tls-issue-cert-manager-1728", "NAME_SPACE") | del(.spec.volumeClaimTemplates[].apiVersion) | del(.spec.volumeClaimTemplates[].kind) | del(.spec.ipFamilies) | del(.spec.ipFamilyPolicy) | (.. | select(. == "extensions/v1beta1")) = "apps/v1" | (.. | select(. == "batch/v1beta1")) = "batch/v1" ' - ++ mktemp + local LAST_OUT=/tmp/tmp.8yStfjLC8q ++ mktemp + local LAST_ERR=/tmp/tmp.2z5C1RsjZ4 + local exit_status=0 + local timeout=4 ++ seq 0 2 + for i in '$(seq 0 2)' + set +e + kubectl get -o yaml certificate/some-name-ssl + exit_status=0 + set -e + '[' 0 '!=' 0 -a -n 1 ']' + break + cat /tmp/tmp.8yStfjLC8q + cat /tmp/tmp.2z5C1RsjZ4 + rm /tmp/tmp.8yStfjLC8q /tmp/tmp.2z5C1RsjZ4 + return 0 + yq -i eval 'del(.spec.persistentVolumeClaimRetentionPolicy)' /tmp/tmp.pCG7nqFZEo/certificate_some-name-ssl.yml + version_gt 1.22 ++ echo '1.26 >= 1.22' ++ bc -l + '[' 1 -eq 1 ']' + return 0 + yq -i eval 'del(.spec.internalTrafficPolicy)' /tmp/tmp.pCG7nqFZEo/certificate_some-name-ssl.yml + yq -i eval 'del(.spec.allocateLoadBalancerNodePorts)' /tmp/tmp.pCG7nqFZEo/certificate_some-name-ssl.yml + [[ /mnt/jenkins/workspace/cloud-psmdb-operator_PR-1581/e2e-tests/tls-issue-cert-manager/compare/certificate_some-name-ssl.yml == */cronjob* ]] + diff -u /mnt/jenkins/workspace/cloud-psmdb-operator_PR-1581/e2e-tests/tls-issue-cert-manager/compare/certificate_some-name-ssl.yml /tmp/tmp.pCG7nqFZEo/certificate_some-name-ssl.yml + desc 'check if internal certificate issued' + set +o xtrace ----------------------------------------------------------------------------------- check if internal certificate issued ----------------------------------------------------------------------------------- + compare_kubectl certificate/some-name-ssl-internal + local resource=certificate/some-name-ssl-internal + local postfix= + local expected_result=/mnt/jenkins/workspace/cloud-psmdb-operator_PR-1581/e2e-tests/tls-issue-cert-manager/compare/certificate_some-name-ssl-internal.yml + local new_result=/tmp/tmp.pCG7nqFZEo/certificate_some-name-ssl-internal.yml + '[' -n '' -a -f /mnt/jenkins/workspace/cloud-psmdb-operator_PR-1581/e2e-tests/tls-issue-cert-manager/compare/certificate_some-name-ssl-internal-oc.yml ']' + kubectl_bin get -o yaml certificate/some-name-ssl-internal + yq eval ' del(.metadata.ownerReferences[].apiVersion) | del(.metadata.managedFields) | del(.. | select(has("creationTimestamp")).creationTimestamp) | del(.. | select(has("namespace")).namespace) | del(.. | select(has("uid")).uid) | del(.metadata.resourceVersion) | del(.spec.template.spec.containers[].env[] | select(.name == "NAMESPACE")) | del(.metadata.selfLink) | del(.metadata.annotations."cloud.google.com/neg") | del(.metadata.annotations."kubectl.kubernetes.io/last-applied-configuration") | del(.. | select(has("image")).image) | del(.. | select(has("clusterIP")).clusterIP) | del(.. | select(has("clusterIPs")).clusterIPs) | del(.. | select(has("dataSource")).dataSource) | del(.. | select(has("procMount")).procMount) | del(.. | select(has("storageClassName")).storageClassName) | del(.. | select(has("finalizers")).finalizers) | del(.. | select(has("kubernetes.io/pvc-protection"))."kubernetes.io/pvc-protection") | del(.. | select(has("volumeName")).volumeName) | del(.. | select(has("volume.beta.kubernetes.io/storage-provisioner"))."volume.beta.kubernetes.io/storage-provisioner") | del(.. | select(has("volume.kubernetes.io/storage-provisioner"))."volume.kubernetes.io/storage-provisioner") | del(.spec.volumeMode) | del(.. | select(has("volume.kubernetes.io/selected-node"))."volume.kubernetes.io/selected-node") | del(.. | select(has("percona.com/last-config-hash"))."percona.com/last-config-hash") | del(.. | select(has("percona.com/configuration-hash"))."percona.com/configuration-hash") | del(.. | select(has("percona.com/ssl-hash"))."percona.com/ssl-hash") | del(.. | select(has("percona.com/ssl-internal-hash"))."percona.com/ssl-internal-hash") | del(.spec.volumeClaimTemplates[].spec.volumeMode | select(. == "Filesystem")) | del(.. | select(has("healthCheckNodePort")).healthCheckNodePort) | del(.. | select(has("nodePort")).nodePort) | del(.status) | (.. | select(tag == "!!str")) |= sub("tls-issue-cert-manager-1728", "NAME_SPACE") | del(.spec.volumeClaimTemplates[].apiVersion) | del(.spec.volumeClaimTemplates[].kind) | del(.spec.ipFamilies) | del(.spec.ipFamilyPolicy) | (.. | select(. == "extensions/v1beta1")) = "apps/v1" | (.. | select(. == "batch/v1beta1")) = "batch/v1" ' - ++ mktemp + local LAST_OUT=/tmp/tmp.t11ndRsPdn ++ mktemp + local LAST_ERR=/tmp/tmp.Mbt6CUfYXx + local exit_status=0 + local timeout=4 ++ seq 0 2 + for i in '$(seq 0 2)' + set +e + kubectl get -o yaml certificate/some-name-ssl-internal + exit_status=0 + set -e + '[' 0 '!=' 0 -a -n 1 ']' + break + cat /tmp/tmp.t11ndRsPdn + cat /tmp/tmp.Mbt6CUfYXx + rm /tmp/tmp.t11ndRsPdn /tmp/tmp.Mbt6CUfYXx + return 0 + yq -i eval 'del(.spec.persistentVolumeClaimRetentionPolicy)' /tmp/tmp.pCG7nqFZEo/certificate_some-name-ssl-internal.yml + version_gt 1.22 ++ echo '1.26 >= 1.22' ++ bc -l + '[' 1 -eq 1 ']' + return 0 + yq -i eval 'del(.spec.internalTrafficPolicy)' /tmp/tmp.pCG7nqFZEo/certificate_some-name-ssl-internal.yml + yq -i eval 'del(.spec.allocateLoadBalancerNodePorts)' /tmp/tmp.pCG7nqFZEo/certificate_some-name-ssl-internal.yml + [[ /mnt/jenkins/workspace/cloud-psmdb-operator_PR-1581/e2e-tests/tls-issue-cert-manager/compare/certificate_some-name-ssl-internal.yml == */cronjob* ]] + diff -u /mnt/jenkins/workspace/cloud-psmdb-operator_PR-1581/e2e-tests/tls-issue-cert-manager/compare/certificate_some-name-ssl-internal.yml /tmp/tmp.pCG7nqFZEo/certificate_some-name-ssl-internal.yml + renew_certificate some-name-ssl + certificate=some-name-ssl + wait_certificate some-name-ssl + certificate=some-name-ssl + for i in '{1..10}' + kubectl wait --for=condition=Ready certificate/some-name-ssl --timeout=60s certificate.cert-manager.io/some-name-ssl condition met + sleep 1 + for i in '{1..10}' + kubectl wait --for=condition=Ready certificate/some-name-ssl --timeout=60s certificate.cert-manager.io/some-name-ssl condition met + sleep 1 + for i in '{1..10}' + kubectl wait --for=condition=Ready certificate/some-name-ssl --timeout=60s certificate.cert-manager.io/some-name-ssl condition met + sleep 1 + for i in '{1..10}' + kubectl wait --for=condition=Ready certificate/some-name-ssl --timeout=60s certificate.cert-manager.io/some-name-ssl condition met + sleep 1 + for i in '{1..10}' + kubectl wait --for=condition=Ready certificate/some-name-ssl --timeout=60s certificate.cert-manager.io/some-name-ssl condition met + sleep 1 + for i in '{1..10}' + kubectl wait --for=condition=Ready certificate/some-name-ssl --timeout=60s certificate.cert-manager.io/some-name-ssl condition met + sleep 1 + for i in '{1..10}' + kubectl wait --for=condition=Ready certificate/some-name-ssl --timeout=60s certificate.cert-manager.io/some-name-ssl condition met + sleep 1 + for i in '{1..10}' + kubectl wait --for=condition=Ready certificate/some-name-ssl --timeout=60s certificate.cert-manager.io/some-name-ssl condition met + sleep 1 + for i in '{1..10}' + kubectl wait --for=condition=Ready certificate/some-name-ssl --timeout=60s certificate.cert-manager.io/some-name-ssl condition met + sleep 1 + for i in '{1..10}' + kubectl wait --for=condition=Ready certificate/some-name-ssl --timeout=60s certificate.cert-manager.io/some-name-ssl condition met + sleep 1 + desc 'renew some-name-ssl' + set +o xtrace ----------------------------------------------------------------------------------- renew some-name-ssl ----------------------------------------------------------------------------------- + local pod_name ++ kubectl_bin get pods --selector=name=cmctl -o 'jsonpath={.items[].metadata.name}' +++ mktemp ++ local LAST_OUT=/tmp/tmp.aYVgYZrlvG +++ mktemp ++ local LAST_ERR=/tmp/tmp.2AK4TBc8wZ ++ local exit_status=0 ++ local timeout=4 +++ seq 0 2 ++ for i in '$(seq 0 2)' ++ set +e ++ kubectl get pods --selector=name=cmctl -o 'jsonpath={.items[].metadata.name}' ++ exit_status=0 ++ set -e ++ '[' 0 '!=' 0 -a -n 1 ']' ++ break ++ cat /tmp/tmp.aYVgYZrlvG ++ cat /tmp/tmp.2AK4TBc8wZ ++ rm /tmp/tmp.aYVgYZrlvG /tmp/tmp.2AK4TBc8wZ ++ return 0 + pod_name=cmctl-69659bcd68-pq8ck + local revision ++ kubectl_bin get certificate some-name-ssl -o 'jsonpath={.status.revision}' +++ mktemp ++ local LAST_OUT=/tmp/tmp.7DgX7s2HeS +++ mktemp ++ local LAST_ERR=/tmp/tmp.9TjYR1qOGq ++ local exit_status=0 ++ local timeout=4 +++ seq 0 2 ++ for i in '$(seq 0 2)' ++ set +e ++ kubectl get certificate some-name-ssl -o 'jsonpath={.status.revision}' ++ exit_status=0 ++ set -e ++ '[' 0 '!=' 0 -a -n 1 ']' ++ break ++ cat /tmp/tmp.7DgX7s2HeS ++ cat /tmp/tmp.9TjYR1qOGq ++ rm /tmp/tmp.7DgX7s2HeS /tmp/tmp.9TjYR1qOGq ++ return 0 + revision=1 + kubectl_bin exec cmctl-69659bcd68-pq8ck -- /tmp/cmctl renew some-name-ssl ++ mktemp + local LAST_OUT=/tmp/tmp.TeghZ5thHU ++ mktemp + local LAST_ERR=/tmp/tmp.zed2961bTr + local exit_status=0 + local timeout=4 ++ seq 0 2 + for i in '$(seq 0 2)' + set +e + kubectl exec cmctl-69659bcd68-pq8ck -- /tmp/cmctl renew some-name-ssl + exit_status=0 + set -e + '[' 0 '!=' 0 -a -n 1 ']' + break + cat /tmp/tmp.TeghZ5thHU Manually triggered issuance of Certificate tls-issue-cert-manager-1728/some-name-ssl + cat /tmp/tmp.zed2961bTr + rm /tmp/tmp.TeghZ5thHU /tmp/tmp.zed2961bTr + return 0 + for i in '{1..10}' + local new_revision ++ kubectl_bin get certificate some-name-ssl -o 'jsonpath={.status.revision}' +++ mktemp ++ local LAST_OUT=/tmp/tmp.3n1vbROfHC +++ mktemp ++ local LAST_ERR=/tmp/tmp.ytfDZyjPbx ++ local exit_status=0 ++ local timeout=4 +++ seq 0 2 ++ for i in '$(seq 0 2)' ++ set +e ++ kubectl get certificate some-name-ssl -o 'jsonpath={.status.revision}' ++ exit_status=0 ++ set -e ++ '[' 0 '!=' 0 -a -n 1 ']' ++ break ++ cat /tmp/tmp.3n1vbROfHC ++ cat /tmp/tmp.ytfDZyjPbx ++ rm /tmp/tmp.3n1vbROfHC /tmp/tmp.ytfDZyjPbx ++ return 0 + new_revision=2 + '[' 2 == 2 ']' + break + sleep 10 + wait_for_running some-name-rs0 3 + local name=some-name-rs0 + let last_pod=2 + local check_cluster_readyness=true + set_debug + [[ 1 == 1 ]] + set -o xtrace + local rs_name=rs0 + local cluster_name=some-name ++ seq 0 2 + for i in '$(seq 0 $last_pod)' + [[ 0 -eq 2 ]] + wait_pod some-name-rs0-0 + local pod=some-name-rs0-0 + set +o xtrace waiting for pod/some-name-rs0-0 to be ready.OK + for i in '$(seq 0 $last_pod)' + [[ 1 -eq 2 ]] + wait_pod some-name-rs0-1 + local pod=some-name-rs0-1 + set +o xtrace waiting for pod/some-name-rs0-1 to be ready.OK + for i in '$(seq 0 $last_pod)' + [[ 2 -eq 2 ]] ++ kubectl_bin get psmdb some-name -o 'jsonpath={.spec.replsets[?(@.name=="rs0")].arbiter.enabled}' +++ mktemp ++ local LAST_OUT=/tmp/tmp.piqXyeFwqe +++ mktemp ++ local LAST_ERR=/tmp/tmp.eoQxDCdvUp ++ local exit_status=0 ++ local timeout=4 +++ seq 0 2 ++ for i in '$(seq 0 2)' ++ set +e ++ kubectl get psmdb some-name -o 'jsonpath={.spec.replsets[?(@.name=="rs0")].arbiter.enabled}' ++ exit_status=0 ++ set -e ++ '[' 0 '!=' 0 -a -n 1 ']' ++ break ++ cat /tmp/tmp.piqXyeFwqe ++ cat /tmp/tmp.eoQxDCdvUp ++ rm /tmp/tmp.piqXyeFwqe /tmp/tmp.eoQxDCdvUp ++ return 0 + [[ '' == \t\r\u\e ]] + wait_pod some-name-rs0-2 + local pod=some-name-rs0-2 + set +o xtrace waiting for pod/some-name-rs0-2 to be ready.OK ++ kubectl_bin get psmdb some-name -o 'jsonpath={.spec.replsets[?(@.name=="rs0")].non_voting.enabled}' +++ mktemp ++ local LAST_OUT=/tmp/tmp.x9Rm8fZEg9 +++ mktemp ++ local LAST_ERR=/tmp/tmp.NScYU3vA7K ++ local exit_status=0 ++ local timeout=4 +++ seq 0 2 ++ for i in '$(seq 0 2)' ++ set +e ++ kubectl get psmdb some-name -o 'jsonpath={.spec.replsets[?(@.name=="rs0")].non_voting.enabled}' ++ exit_status=0 ++ set -e ++ '[' 0 '!=' 0 -a -n 1 ']' ++ break ++ cat /tmp/tmp.x9Rm8fZEg9 ++ cat /tmp/tmp.NScYU3vA7K ++ rm /tmp/tmp.x9Rm8fZEg9 /tmp/tmp.NScYU3vA7K ++ return 0 + [[ '' == \t\r\u\e ]] + sleep 10 + [[ true == \t\r\u\e ]] + set +x Waiting for cluster readyness......................................................................................................................................................................... + wait_for_running some-name-cfg 3 false + local name=some-name-cfg + let last_pod=2 + local check_cluster_readyness=false + set_debug + [[ 1 == 1 ]] + set -o xtrace + local rs_name=cfg + local cluster_name=some-name ++ seq 0 2 + for i in '$(seq 0 $last_pod)' + [[ 0 -eq 2 ]] + wait_pod some-name-cfg-0 + local pod=some-name-cfg-0 + set +o xtrace waiting for pod/some-name-cfg-0 to be ready.OK + for i in '$(seq 0 $last_pod)' + [[ 1 -eq 2 ]] + wait_pod some-name-cfg-1 + local pod=some-name-cfg-1 + set +o xtrace waiting for pod/some-name-cfg-1 to be ready.OK + for i in '$(seq 0 $last_pod)' + [[ 2 -eq 2 ]] ++ kubectl_bin get psmdb some-name -o 'jsonpath={.spec.replsets[?(@.name=="cfg")].arbiter.enabled}' +++ mktemp ++ local LAST_OUT=/tmp/tmp.R4f1GeAGJx +++ mktemp ++ local LAST_ERR=/tmp/tmp.KLWS2x8onA ++ local exit_status=0 ++ local timeout=4 +++ seq 0 2 ++ for i in '$(seq 0 2)' ++ set +e ++ kubectl get psmdb some-name -o 'jsonpath={.spec.replsets[?(@.name=="cfg")].arbiter.enabled}' ++ exit_status=0 ++ set -e ++ '[' 0 '!=' 0 -a -n 1 ']' ++ break ++ cat /tmp/tmp.R4f1GeAGJx ++ cat /tmp/tmp.KLWS2x8onA ++ rm /tmp/tmp.R4f1GeAGJx /tmp/tmp.KLWS2x8onA ++ return 0 + [[ '' == \t\r\u\e ]] + wait_pod some-name-cfg-2 + local pod=some-name-cfg-2 + set +o xtrace waiting for pod/some-name-cfg-2 to be ready.OK ++ kubectl_bin get psmdb some-name -o 'jsonpath={.spec.replsets[?(@.name=="cfg")].non_voting.enabled}' +++ mktemp ++ local LAST_OUT=/tmp/tmp.oaMocoaSH0 +++ mktemp ++ local LAST_ERR=/tmp/tmp.V08tFFdZYr ++ local exit_status=0 ++ local timeout=4 +++ seq 0 2 ++ for i in '$(seq 0 2)' ++ set +e ++ kubectl get psmdb some-name -o 'jsonpath={.spec.replsets[?(@.name=="cfg")].non_voting.enabled}' ++ exit_status=0 ++ set -e ++ '[' 0 '!=' 0 -a -n 1 ']' ++ break ++ cat /tmp/tmp.oaMocoaSH0 ++ cat /tmp/tmp.V08tFFdZYr ++ rm /tmp/tmp.oaMocoaSH0 /tmp/tmp.V08tFFdZYr ++ return 0 + [[ '' == \t\r\u\e ]] + sleep 10 + [[ false == \t\r\u\e ]] + wait_for_running some-name-mongos 3 + local name=some-name-mongos + let last_pod=2 + local check_cluster_readyness=true + set_debug + [[ 1 == 1 ]] + set -o xtrace + local rs_name=mongos + local cluster_name=some-name ++ seq 0 2 + for i in '$(seq 0 $last_pod)' + [[ 0 -eq 2 ]] + wait_pod some-name-mongos-0 + local pod=some-name-mongos-0 + set +o xtrace waiting for pod/some-name-mongos-0 to be ready.OK + for i in '$(seq 0 $last_pod)' + [[ 1 -eq 2 ]] + wait_pod some-name-mongos-1 + local pod=some-name-mongos-1 + set +o xtrace waiting for pod/some-name-mongos-1 to be ready.OK + for i in '$(seq 0 $last_pod)' + [[ 2 -eq 2 ]] ++ kubectl_bin get psmdb some-name -o 'jsonpath={.spec.replsets[?(@.name=="mongos")].arbiter.enabled}' +++ mktemp ++ local LAST_OUT=/tmp/tmp.ydiKzzEh2P +++ mktemp ++ local LAST_ERR=/tmp/tmp.NUM8qkrRzI ++ local exit_status=0 ++ local timeout=4 +++ seq 0 2 ++ for i in '$(seq 0 2)' ++ set +e ++ kubectl get psmdb some-name -o 'jsonpath={.spec.replsets[?(@.name=="mongos")].arbiter.enabled}' ++ exit_status=0 ++ set -e ++ '[' 0 '!=' 0 -a -n 1 ']' ++ break ++ cat /tmp/tmp.ydiKzzEh2P ++ cat /tmp/tmp.NUM8qkrRzI ++ rm /tmp/tmp.ydiKzzEh2P /tmp/tmp.NUM8qkrRzI ++ return 0 + [[ '' == \t\r\u\e ]] + wait_pod some-name-mongos-2 + local pod=some-name-mongos-2 + set +o xtrace waiting for pod/some-name-mongos-2 to be ready.OK ++ kubectl_bin get psmdb some-name -o 'jsonpath={.spec.replsets[?(@.name=="mongos")].non_voting.enabled}' +++ mktemp ++ local LAST_OUT=/tmp/tmp.PPTqC6u7Vf +++ mktemp ++ local LAST_ERR=/tmp/tmp.zYbgLANEi7 ++ local exit_status=0 ++ local timeout=4 +++ seq 0 2 ++ for i in '$(seq 0 2)' ++ set +e ++ kubectl get psmdb some-name -o 'jsonpath={.spec.replsets[?(@.name=="mongos")].non_voting.enabled}' ++ exit_status=0 ++ set -e ++ '[' 0 '!=' 0 -a -n 1 ']' ++ break ++ cat /tmp/tmp.PPTqC6u7Vf ++ cat /tmp/tmp.zYbgLANEi7 ++ rm /tmp/tmp.PPTqC6u7Vf /tmp/tmp.zYbgLANEi7 ++ return 0 + [[ '' == \t\r\u\e ]] + sleep 10 + [[ true == \t\r\u\e ]] + set +x Waiting for cluster readyness + renew_certificate some-name-ssl-internal + certificate=some-name-ssl-internal + wait_certificate some-name-ssl-internal + certificate=some-name-ssl-internal + for i in '{1..10}' + kubectl wait --for=condition=Ready certificate/some-name-ssl-internal --timeout=60s certificate.cert-manager.io/some-name-ssl-internal condition met + sleep 1 + for i in '{1..10}' + kubectl wait --for=condition=Ready certificate/some-name-ssl-internal --timeout=60s certificate.cert-manager.io/some-name-ssl-internal condition met + sleep 1 + for i in '{1..10}' + kubectl wait --for=condition=Ready certificate/some-name-ssl-internal --timeout=60s certificate.cert-manager.io/some-name-ssl-internal condition met + sleep 1 + for i in '{1..10}' + kubectl wait --for=condition=Ready certificate/some-name-ssl-internal --timeout=60s certificate.cert-manager.io/some-name-ssl-internal condition met + sleep 1 + for i in '{1..10}' + kubectl wait --for=condition=Ready certificate/some-name-ssl-internal --timeout=60s certificate.cert-manager.io/some-name-ssl-internal condition met + sleep 1 + for i in '{1..10}' + kubectl wait --for=condition=Ready certificate/some-name-ssl-internal --timeout=60s certificate.cert-manager.io/some-name-ssl-internal condition met + sleep 1 + for i in '{1..10}' + kubectl wait --for=condition=Ready certificate/some-name-ssl-internal --timeout=60s certificate.cert-manager.io/some-name-ssl-internal condition met + sleep 1 + for i in '{1..10}' + kubectl wait --for=condition=Ready certificate/some-name-ssl-internal --timeout=60s certificate.cert-manager.io/some-name-ssl-internal condition met + sleep 1 + for i in '{1..10}' + kubectl wait --for=condition=Ready certificate/some-name-ssl-internal --timeout=60s certificate.cert-manager.io/some-name-ssl-internal condition met + sleep 1 + for i in '{1..10}' + kubectl wait --for=condition=Ready certificate/some-name-ssl-internal --timeout=60s certificate.cert-manager.io/some-name-ssl-internal condition met + sleep 1 + desc 'renew some-name-ssl-internal' + set +o xtrace ----------------------------------------------------------------------------------- renew some-name-ssl-internal ----------------------------------------------------------------------------------- + local pod_name ++ kubectl_bin get pods --selector=name=cmctl -o 'jsonpath={.items[].metadata.name}' +++ mktemp ++ local LAST_OUT=/tmp/tmp.0H2JzJAlZu +++ mktemp ++ local LAST_ERR=/tmp/tmp.U3IIH3FmQ5 ++ local exit_status=0 ++ local timeout=4 +++ seq 0 2 ++ for i in '$(seq 0 2)' ++ set +e ++ kubectl get pods --selector=name=cmctl -o 'jsonpath={.items[].metadata.name}' ++ exit_status=0 ++ set -e ++ '[' 0 '!=' 0 -a -n 1 ']' ++ break ++ cat /tmp/tmp.0H2JzJAlZu ++ cat /tmp/tmp.U3IIH3FmQ5 ++ rm /tmp/tmp.0H2JzJAlZu /tmp/tmp.U3IIH3FmQ5 ++ return 0 + pod_name=cmctl-69659bcd68-pq8ck + local revision ++ kubectl_bin get certificate some-name-ssl-internal -o 'jsonpath={.status.revision}' +++ mktemp ++ local LAST_OUT=/tmp/tmp.1ZsHVqEUr4 +++ mktemp ++ local LAST_ERR=/tmp/tmp.xHhV6shKK0 ++ local exit_status=0 ++ local timeout=4 +++ seq 0 2 ++ for i in '$(seq 0 2)' ++ set +e ++ kubectl get certificate some-name-ssl-internal -o 'jsonpath={.status.revision}' ++ exit_status=0 ++ set -e ++ '[' 0 '!=' 0 -a -n 1 ']' ++ break ++ cat /tmp/tmp.1ZsHVqEUr4 ++ cat /tmp/tmp.xHhV6shKK0 ++ rm /tmp/tmp.1ZsHVqEUr4 /tmp/tmp.xHhV6shKK0 ++ return 0 + revision=1 + kubectl_bin exec cmctl-69659bcd68-pq8ck -- /tmp/cmctl renew some-name-ssl-internal ++ mktemp + local LAST_OUT=/tmp/tmp.eb7xpCpk0y ++ mktemp + local LAST_ERR=/tmp/tmp.DhX0VPrYCB + local exit_status=0 + local timeout=4 ++ seq 0 2 + for i in '$(seq 0 2)' + set +e + kubectl exec cmctl-69659bcd68-pq8ck -- /tmp/cmctl renew some-name-ssl-internal + exit_status=0 + set -e + '[' 0 '!=' 0 -a -n 1 ']' + break + cat /tmp/tmp.eb7xpCpk0y Manually triggered issuance of Certificate tls-issue-cert-manager-1728/some-name-ssl-internal + cat /tmp/tmp.DhX0VPrYCB + rm /tmp/tmp.eb7xpCpk0y /tmp/tmp.DhX0VPrYCB + return 0 + for i in '{1..10}' + local new_revision ++ kubectl_bin get certificate some-name-ssl-internal -o 'jsonpath={.status.revision}' +++ mktemp ++ local LAST_OUT=/tmp/tmp.2V9wLYyqWn +++ mktemp ++ local LAST_ERR=/tmp/tmp.cYM7JFEUfX ++ local exit_status=0 ++ local timeout=4 +++ seq 0 2 ++ for i in '$(seq 0 2)' ++ set +e ++ kubectl get certificate some-name-ssl-internal -o 'jsonpath={.status.revision}' ++ exit_status=0 ++ set -e ++ '[' 0 '!=' 0 -a -n 1 ']' ++ break ++ cat /tmp/tmp.2V9wLYyqWn ++ cat /tmp/tmp.cYM7JFEUfX ++ rm /tmp/tmp.2V9wLYyqWn /tmp/tmp.cYM7JFEUfX ++ return 0 + new_revision=2 + '[' 2 == 2 ']' + break + sleep 10 + wait_for_running some-name-rs0 3 + local name=some-name-rs0 + let last_pod=2 + local check_cluster_readyness=true + set_debug + [[ 1 == 1 ]] + set -o xtrace + local rs_name=rs0 + local cluster_name=some-name ++ seq 0 2 + for i in '$(seq 0 $last_pod)' + [[ 0 -eq 2 ]] + wait_pod some-name-rs0-0 + local pod=some-name-rs0-0 + set +o xtrace waiting for pod/some-name-rs0-0 to be ready.OK + for i in '$(seq 0 $last_pod)' + [[ 1 -eq 2 ]] + wait_pod some-name-rs0-1 + local pod=some-name-rs0-1 + set +o xtrace waiting for pod/some-name-rs0-1 to be ready.OK + for i in '$(seq 0 $last_pod)' + [[ 2 -eq 2 ]] ++ kubectl_bin get psmdb some-name -o 'jsonpath={.spec.replsets[?(@.name=="rs0")].arbiter.enabled}' +++ mktemp ++ local LAST_OUT=/tmp/tmp.ZpjwUjwpGz +++ mktemp ++ local LAST_ERR=/tmp/tmp.JqSL9QFloB ++ local exit_status=0 ++ local timeout=4 +++ seq 0 2 ++ for i in '$(seq 0 2)' ++ set +e ++ kubectl get psmdb some-name -o 'jsonpath={.spec.replsets[?(@.name=="rs0")].arbiter.enabled}' ++ exit_status=0 ++ set -e ++ '[' 0 '!=' 0 -a -n 1 ']' ++ break ++ cat /tmp/tmp.ZpjwUjwpGz ++ cat /tmp/tmp.JqSL9QFloB ++ rm /tmp/tmp.ZpjwUjwpGz /tmp/tmp.JqSL9QFloB ++ return 0 + [[ '' == \t\r\u\e ]] + wait_pod some-name-rs0-2 + local pod=some-name-rs0-2 + set +o xtrace waiting for pod/some-name-rs0-2 to be ready.OK ++ kubectl_bin get psmdb some-name -o 'jsonpath={.spec.replsets[?(@.name=="rs0")].non_voting.enabled}' +++ mktemp ++ local LAST_OUT=/tmp/tmp.DO8wfW1vRg +++ mktemp ++ local LAST_ERR=/tmp/tmp.LvaHe6ihrh ++ local exit_status=0 ++ local timeout=4 +++ seq 0 2 ++ for i in '$(seq 0 2)' ++ set +e ++ kubectl get psmdb some-name -o 'jsonpath={.spec.replsets[?(@.name=="rs0")].non_voting.enabled}' ++ exit_status=0 ++ set -e ++ '[' 0 '!=' 0 -a -n 1 ']' ++ break ++ cat /tmp/tmp.DO8wfW1vRg ++ cat /tmp/tmp.LvaHe6ihrh ++ rm /tmp/tmp.DO8wfW1vRg /tmp/tmp.LvaHe6ihrh ++ return 0 + [[ '' == \t\r\u\e ]] + sleep 10 + [[ true == \t\r\u\e ]] + set +x Waiting for cluster readyness.................................................................................................................................................................. + wait_for_running some-name-cfg 3 false + local name=some-name-cfg + let last_pod=2 + local check_cluster_readyness=false + set_debug + [[ 1 == 1 ]] + set -o xtrace + local rs_name=cfg + local cluster_name=some-name ++ seq 0 2 + for i in '$(seq 0 $last_pod)' + [[ 0 -eq 2 ]] + wait_pod some-name-cfg-0 + local pod=some-name-cfg-0 + set +o xtrace waiting for pod/some-name-cfg-0 to be ready.OK + for i in '$(seq 0 $last_pod)' + [[ 1 -eq 2 ]] + wait_pod some-name-cfg-1 + local pod=some-name-cfg-1 + set +o xtrace waiting for pod/some-name-cfg-1 to be ready.OK + for i in '$(seq 0 $last_pod)' + [[ 2 -eq 2 ]] ++ kubectl_bin get psmdb some-name -o 'jsonpath={.spec.replsets[?(@.name=="cfg")].arbiter.enabled}' +++ mktemp ++ local LAST_OUT=/tmp/tmp.1qTLfXDqS9 +++ mktemp ++ local LAST_ERR=/tmp/tmp.srotB4lVmA ++ local exit_status=0 ++ local timeout=4 +++ seq 0 2 ++ for i in '$(seq 0 2)' ++ set +e ++ kubectl get psmdb some-name -o 'jsonpath={.spec.replsets[?(@.name=="cfg")].arbiter.enabled}' ++ exit_status=0 ++ set -e ++ '[' 0 '!=' 0 -a -n 1 ']' ++ break ++ cat /tmp/tmp.1qTLfXDqS9 ++ cat /tmp/tmp.srotB4lVmA ++ rm /tmp/tmp.1qTLfXDqS9 /tmp/tmp.srotB4lVmA ++ return 0 + [[ '' == \t\r\u\e ]] + wait_pod some-name-cfg-2 + local pod=some-name-cfg-2 + set +o xtrace waiting for pod/some-name-cfg-2 to be ready.OK ++ kubectl_bin get psmdb some-name -o 'jsonpath={.spec.replsets[?(@.name=="cfg")].non_voting.enabled}' +++ mktemp ++ local LAST_OUT=/tmp/tmp.12Ovk8vqW3 +++ mktemp ++ local LAST_ERR=/tmp/tmp.4HqiLQKnbp ++ local exit_status=0 ++ local timeout=4 +++ seq 0 2 ++ for i in '$(seq 0 2)' ++ set +e ++ kubectl get psmdb some-name -o 'jsonpath={.spec.replsets[?(@.name=="cfg")].non_voting.enabled}' ++ exit_status=0 ++ set -e ++ '[' 0 '!=' 0 -a -n 1 ']' ++ break ++ cat /tmp/tmp.12Ovk8vqW3 ++ cat /tmp/tmp.4HqiLQKnbp ++ rm /tmp/tmp.12Ovk8vqW3 /tmp/tmp.4HqiLQKnbp ++ return 0 + [[ '' == \t\r\u\e ]] + sleep 10 + [[ false == \t\r\u\e ]] + wait_for_running some-name-mongos 3 + local name=some-name-mongos + let last_pod=2 + local check_cluster_readyness=true + set_debug + [[ 1 == 1 ]] + set -o xtrace + local rs_name=mongos + local cluster_name=some-name ++ seq 0 2 + for i in '$(seq 0 $last_pod)' + [[ 0 -eq 2 ]] + wait_pod some-name-mongos-0 + local pod=some-name-mongos-0 + set +o xtrace waiting for pod/some-name-mongos-0 to be ready.OK + for i in '$(seq 0 $last_pod)' + [[ 1 -eq 2 ]] + wait_pod some-name-mongos-1 + local pod=some-name-mongos-1 + set +o xtrace waiting for pod/some-name-mongos-1 to be ready.OK + for i in '$(seq 0 $last_pod)' + [[ 2 -eq 2 ]] ++ kubectl_bin get psmdb some-name -o 'jsonpath={.spec.replsets[?(@.name=="mongos")].arbiter.enabled}' +++ mktemp ++ local LAST_OUT=/tmp/tmp.pPYd1eAW1A +++ mktemp ++ local LAST_ERR=/tmp/tmp.SjvexvTDl9 ++ local exit_status=0 ++ local timeout=4 +++ seq 0 2 ++ for i in '$(seq 0 2)' ++ set +e ++ kubectl get psmdb some-name -o 'jsonpath={.spec.replsets[?(@.name=="mongos")].arbiter.enabled}' ++ exit_status=0 ++ set -e ++ '[' 0 '!=' 0 -a -n 1 ']' ++ break ++ cat /tmp/tmp.pPYd1eAW1A ++ cat /tmp/tmp.SjvexvTDl9 ++ rm /tmp/tmp.pPYd1eAW1A /tmp/tmp.SjvexvTDl9 ++ return 0 + [[ '' == \t\r\u\e ]] + wait_pod some-name-mongos-2 + local pod=some-name-mongos-2 + set +o xtrace waiting for pod/some-name-mongos-2 to be ready.OK ++ kubectl_bin get psmdb some-name -o 'jsonpath={.spec.replsets[?(@.name=="mongos")].non_voting.enabled}' +++ mktemp ++ local LAST_OUT=/tmp/tmp.rlTjhejBRi +++ mktemp ++ local LAST_ERR=/tmp/tmp.oWtBNeyR0v ++ local exit_status=0 ++ local timeout=4 +++ seq 0 2 ++ for i in '$(seq 0 2)' ++ set +e ++ kubectl get psmdb some-name -o 'jsonpath={.spec.replsets[?(@.name=="mongos")].non_voting.enabled}' ++ exit_status=0 ++ set -e ++ '[' 0 '!=' 0 -a -n 1 ']' ++ break ++ cat /tmp/tmp.rlTjhejBRi ++ cat /tmp/tmp.oWtBNeyR0v ++ rm /tmp/tmp.rlTjhejBRi /tmp/tmp.oWtBNeyR0v ++ return 0 + [[ '' == \t\r\u\e ]] + sleep 10 + [[ true == \t\r\u\e ]] + set +x Waiting for cluster readyness + desc 'check if CA issuer created' + set +o xtrace ----------------------------------------------------------------------------------- check if CA issuer created ----------------------------------------------------------------------------------- + compare_kubectl issuer/some-name-psmdb-ca-issuer + local resource=issuer/some-name-psmdb-ca-issuer + local postfix= + local expected_result=/mnt/jenkins/workspace/cloud-psmdb-operator_PR-1581/e2e-tests/tls-issue-cert-manager/compare/issuer_some-name-psmdb-ca-issuer.yml + local new_result=/tmp/tmp.pCG7nqFZEo/issuer_some-name-psmdb-ca-issuer.yml + '[' -n '' -a -f /mnt/jenkins/workspace/cloud-psmdb-operator_PR-1581/e2e-tests/tls-issue-cert-manager/compare/issuer_some-name-psmdb-ca-issuer-oc.yml ']' + kubectl_bin get -o yaml issuer/some-name-psmdb-ca-issuer + yq eval ' del(.metadata.ownerReferences[].apiVersion) | del(.metadata.managedFields) | del(.. | select(has("creationTimestamp")).creationTimestamp) | del(.. | select(has("namespace")).namespace) | del(.. | select(has("uid")).uid) | del(.metadata.resourceVersion) | del(.spec.template.spec.containers[].env[] | select(.name == "NAMESPACE")) | del(.metadata.selfLink) | del(.metadata.annotations."cloud.google.com/neg") | del(.metadata.annotations."kubectl.kubernetes.io/last-applied-configuration") | del(.. | select(has("image")).image) | del(.. | select(has("clusterIP")).clusterIP) | del(.. | select(has("clusterIPs")).clusterIPs) | del(.. | select(has("dataSource")).dataSource) | del(.. | select(has("procMount")).procMount) | del(.. | select(has("storageClassName")).storageClassName) | del(.. | select(has("finalizers")).finalizers) | del(.. | select(has("kubernetes.io/pvc-protection"))."kubernetes.io/pvc-protection") | del(.. | select(has("volumeName")).volumeName) | del(.. | select(has("volume.beta.kubernetes.io/storage-provisioner"))."volume.beta.kubernetes.io/storage-provisioner") | del(.. | select(has("volume.kubernetes.io/storage-provisioner"))."volume.kubernetes.io/storage-provisioner") | del(.spec.volumeMode) | del(.. | select(has("volume.kubernetes.io/selected-node"))."volume.kubernetes.io/selected-node") | del(.. | select(has("percona.com/last-config-hash"))."percona.com/last-config-hash") | del(.. | select(has("percona.com/configuration-hash"))."percona.com/configuration-hash") | del(.. | select(has("percona.com/ssl-hash"))."percona.com/ssl-hash") | del(.. | select(has("percona.com/ssl-internal-hash"))."percona.com/ssl-internal-hash") | del(.spec.volumeClaimTemplates[].spec.volumeMode | select(. == "Filesystem")) | del(.. | select(has("healthCheckNodePort")).healthCheckNodePort) | del(.. | select(has("nodePort")).nodePort) | del(.status) | (.. | select(tag == "!!str")) |= sub("tls-issue-cert-manager-1728", "NAME_SPACE") | del(.spec.volumeClaimTemplates[].apiVersion) | del(.spec.volumeClaimTemplates[].kind) | del(.spec.ipFamilies) | del(.spec.ipFamilyPolicy) | (.. | select(. == "extensions/v1beta1")) = "apps/v1" | (.. | select(. == "batch/v1beta1")) = "batch/v1" ' - ++ mktemp + local LAST_OUT=/tmp/tmp.wQGeixBmUF ++ mktemp + local LAST_ERR=/tmp/tmp.5JHmMxIoEB + local exit_status=0 + local timeout=4 ++ seq 0 2 + for i in '$(seq 0 2)' + set +e + kubectl get -o yaml issuer/some-name-psmdb-ca-issuer + exit_status=0 + set -e + '[' 0 '!=' 0 -a -n 1 ']' + break + cat /tmp/tmp.wQGeixBmUF + cat /tmp/tmp.5JHmMxIoEB + rm /tmp/tmp.wQGeixBmUF /tmp/tmp.5JHmMxIoEB + return 0 + yq -i eval 'del(.spec.persistentVolumeClaimRetentionPolicy)' /tmp/tmp.pCG7nqFZEo/issuer_some-name-psmdb-ca-issuer.yml + version_gt 1.22 ++ echo '1.26 >= 1.22' ++ bc -l + '[' 1 -eq 1 ']' + return 0 + yq -i eval 'del(.spec.internalTrafficPolicy)' /tmp/tmp.pCG7nqFZEo/issuer_some-name-psmdb-ca-issuer.yml + yq -i eval 'del(.spec.allocateLoadBalancerNodePorts)' /tmp/tmp.pCG7nqFZEo/issuer_some-name-psmdb-ca-issuer.yml + [[ /mnt/jenkins/workspace/cloud-psmdb-operator_PR-1581/e2e-tests/tls-issue-cert-manager/compare/issuer_some-name-psmdb-ca-issuer.yml == */cronjob* ]] + diff -u /mnt/jenkins/workspace/cloud-psmdb-operator_PR-1581/e2e-tests/tls-issue-cert-manager/compare/issuer_some-name-psmdb-ca-issuer.yml /tmp/tmp.pCG7nqFZEo/issuer_some-name-psmdb-ca-issuer.yml + desc 'check if issuer created' + set +o xtrace ----------------------------------------------------------------------------------- check if issuer created ----------------------------------------------------------------------------------- + compare_kubectl issuer/some-name-psmdb-issuer + local resource=issuer/some-name-psmdb-issuer + local postfix= + local expected_result=/mnt/jenkins/workspace/cloud-psmdb-operator_PR-1581/e2e-tests/tls-issue-cert-manager/compare/issuer_some-name-psmdb-issuer.yml + local new_result=/tmp/tmp.pCG7nqFZEo/issuer_some-name-psmdb-issuer.yml + '[' -n '' -a -f /mnt/jenkins/workspace/cloud-psmdb-operator_PR-1581/e2e-tests/tls-issue-cert-manager/compare/issuer_some-name-psmdb-issuer-oc.yml ']' + kubectl_bin get -o yaml issuer/some-name-psmdb-issuer + yq eval ' del(.metadata.ownerReferences[].apiVersion) | del(.metadata.managedFields) | del(.. | select(has("creationTimestamp")).creationTimestamp) | del(.. | select(has("namespace")).namespace) | del(.. | select(has("uid")).uid) | del(.metadata.resourceVersion) | del(.spec.template.spec.containers[].env[] | select(.name == "NAMESPACE")) | del(.metadata.selfLink) | del(.metadata.annotations."cloud.google.com/neg") | del(.metadata.annotations."kubectl.kubernetes.io/last-applied-configuration") | del(.. | select(has("image")).image) | del(.. | select(has("clusterIP")).clusterIP) | del(.. | select(has("clusterIPs")).clusterIPs) | del(.. | select(has("dataSource")).dataSource) | del(.. | select(has("procMount")).procMount) | del(.. | select(has("storageClassName")).storageClassName) | del(.. | select(has("finalizers")).finalizers) | del(.. | select(has("kubernetes.io/pvc-protection"))."kubernetes.io/pvc-protection") | del(.. | select(has("volumeName")).volumeName) | del(.. | select(has("volume.beta.kubernetes.io/storage-provisioner"))."volume.beta.kubernetes.io/storage-provisioner") | del(.. | select(has("volume.kubernetes.io/storage-provisioner"))."volume.kubernetes.io/storage-provisioner") | del(.spec.volumeMode) | del(.. | select(has("volume.kubernetes.io/selected-node"))."volume.kubernetes.io/selected-node") | del(.. | select(has("percona.com/last-config-hash"))."percona.com/last-config-hash") | del(.. | select(has("percona.com/configuration-hash"))."percona.com/configuration-hash") | del(.. | select(has("percona.com/ssl-hash"))."percona.com/ssl-hash") | del(.. | select(has("percona.com/ssl-internal-hash"))."percona.com/ssl-internal-hash") | del(.spec.volumeClaimTemplates[].spec.volumeMode | select(. == "Filesystem")) | del(.. | select(has("healthCheckNodePort")).healthCheckNodePort) | del(.. | select(has("nodePort")).nodePort) | del(.status) | (.. | select(tag == "!!str")) |= sub("tls-issue-cert-manager-1728", "NAME_SPACE") | del(.spec.volumeClaimTemplates[].apiVersion) | del(.spec.volumeClaimTemplates[].kind) | del(.spec.ipFamilies) | del(.spec.ipFamilyPolicy) | (.. | select(. == "extensions/v1beta1")) = "apps/v1" | (.. | select(. == "batch/v1beta1")) = "batch/v1" ' - ++ mktemp + local LAST_OUT=/tmp/tmp.EtqYqW9ZbL ++ mktemp + local LAST_ERR=/tmp/tmp.0ijpB3j1BB + local exit_status=0 + local timeout=4 ++ seq 0 2 + for i in '$(seq 0 2)' + set +e + kubectl get -o yaml issuer/some-name-psmdb-issuer + exit_status=0 + set -e + '[' 0 '!=' 0 -a -n 1 ']' + break + cat /tmp/tmp.EtqYqW9ZbL + cat /tmp/tmp.0ijpB3j1BB + rm /tmp/tmp.EtqYqW9ZbL /tmp/tmp.0ijpB3j1BB + return 0 + yq -i eval 'del(.spec.persistentVolumeClaimRetentionPolicy)' /tmp/tmp.pCG7nqFZEo/issuer_some-name-psmdb-issuer.yml + version_gt 1.22 ++ echo '1.26 >= 1.22' ++ bc -l + '[' 1 -eq 1 ']' + return 0 + yq -i eval 'del(.spec.internalTrafficPolicy)' /tmp/tmp.pCG7nqFZEo/issuer_some-name-psmdb-issuer.yml + yq -i eval 'del(.spec.allocateLoadBalancerNodePorts)' /tmp/tmp.pCG7nqFZEo/issuer_some-name-psmdb-issuer.yml + [[ /mnt/jenkins/workspace/cloud-psmdb-operator_PR-1581/e2e-tests/tls-issue-cert-manager/compare/issuer_some-name-psmdb-issuer.yml == */cronjob* ]] + diff -u /mnt/jenkins/workspace/cloud-psmdb-operator_PR-1581/e2e-tests/tls-issue-cert-manager/compare/issuer_some-name-psmdb-issuer.yml /tmp/tmp.pCG7nqFZEo/issuer_some-name-psmdb-issuer.yml + desc 'check if certificate issued' + set +o xtrace ----------------------------------------------------------------------------------- check if certificate issued ----------------------------------------------------------------------------------- + compare_kubectl certificate/some-name-ssl + local resource=certificate/some-name-ssl + local postfix= + local expected_result=/mnt/jenkins/workspace/cloud-psmdb-operator_PR-1581/e2e-tests/tls-issue-cert-manager/compare/certificate_some-name-ssl.yml + local new_result=/tmp/tmp.pCG7nqFZEo/certificate_some-name-ssl.yml + '[' -n '' -a -f /mnt/jenkins/workspace/cloud-psmdb-operator_PR-1581/e2e-tests/tls-issue-cert-manager/compare/certificate_some-name-ssl-oc.yml ']' + kubectl_bin get -o yaml certificate/some-name-ssl + yq eval ' del(.metadata.ownerReferences[].apiVersion) | del(.metadata.managedFields) | del(.. | select(has("creationTimestamp")).creationTimestamp) | del(.. | select(has("namespace")).namespace) | del(.. | select(has("uid")).uid) | del(.metadata.resourceVersion) | del(.spec.template.spec.containers[].env[] | select(.name == "NAMESPACE")) | del(.metadata.selfLink) | del(.metadata.annotations."cloud.google.com/neg") | del(.metadata.annotations."kubectl.kubernetes.io/last-applied-configuration") | del(.. | select(has("image")).image) | del(.. | select(has("clusterIP")).clusterIP) | del(.. | select(has("clusterIPs")).clusterIPs) | del(.. | select(has("dataSource")).dataSource) | del(.. | select(has("procMount")).procMount) | del(.. | select(has("storageClassName")).storageClassName) | del(.. | select(has("finalizers")).finalizers) | del(.. | select(has("kubernetes.io/pvc-protection"))."kubernetes.io/pvc-protection") | del(.. | select(has("volumeName")).volumeName) | del(.. | select(has("volume.beta.kubernetes.io/storage-provisioner"))."volume.beta.kubernetes.io/storage-provisioner") | del(.. | select(has("volume.kubernetes.io/storage-provisioner"))."volume.kubernetes.io/storage-provisioner") | del(.spec.volumeMode) | del(.. | select(has("volume.kubernetes.io/selected-node"))."volume.kubernetes.io/selected-node") | del(.. | select(has("percona.com/last-config-hash"))."percona.com/last-config-hash") | del(.. | select(has("percona.com/configuration-hash"))."percona.com/configuration-hash") | del(.. | select(has("percona.com/ssl-hash"))."percona.com/ssl-hash") | del(.. | select(has("percona.com/ssl-internal-hash"))."percona.com/ssl-internal-hash") | del(.spec.volumeClaimTemplates[].spec.volumeMode | select(. == "Filesystem")) | del(.. | select(has("healthCheckNodePort")).healthCheckNodePort) | del(.. | select(has("nodePort")).nodePort) | del(.status) | (.. | select(tag == "!!str")) |= sub("tls-issue-cert-manager-1728", "NAME_SPACE") | del(.spec.volumeClaimTemplates[].apiVersion) | del(.spec.volumeClaimTemplates[].kind) | del(.spec.ipFamilies) | del(.spec.ipFamilyPolicy) | (.. | select(. == "extensions/v1beta1")) = "apps/v1" | (.. | select(. == "batch/v1beta1")) = "batch/v1" ' - ++ mktemp + local LAST_OUT=/tmp/tmp.4OaA1gc8hd ++ mktemp + local LAST_ERR=/tmp/tmp.9FfDvM9JVN + local exit_status=0 + local timeout=4 ++ seq 0 2 + for i in '$(seq 0 2)' + set +e + kubectl get -o yaml certificate/some-name-ssl + exit_status=0 + set -e + '[' 0 '!=' 0 -a -n 1 ']' + break + cat /tmp/tmp.4OaA1gc8hd + cat /tmp/tmp.9FfDvM9JVN + rm /tmp/tmp.4OaA1gc8hd /tmp/tmp.9FfDvM9JVN + return 0 + yq -i eval 'del(.spec.persistentVolumeClaimRetentionPolicy)' /tmp/tmp.pCG7nqFZEo/certificate_some-name-ssl.yml + version_gt 1.22 ++ echo '1.26 >= 1.22' ++ bc -l + '[' 1 -eq 1 ']' + return 0 + yq -i eval 'del(.spec.internalTrafficPolicy)' /tmp/tmp.pCG7nqFZEo/certificate_some-name-ssl.yml + yq -i eval 'del(.spec.allocateLoadBalancerNodePorts)' /tmp/tmp.pCG7nqFZEo/certificate_some-name-ssl.yml + [[ /mnt/jenkins/workspace/cloud-psmdb-operator_PR-1581/e2e-tests/tls-issue-cert-manager/compare/certificate_some-name-ssl.yml == */cronjob* ]] + diff -u /mnt/jenkins/workspace/cloud-psmdb-operator_PR-1581/e2e-tests/tls-issue-cert-manager/compare/certificate_some-name-ssl.yml /tmp/tmp.pCG7nqFZEo/certificate_some-name-ssl.yml + desc 'check if internal certificate issued' + set +o xtrace ----------------------------------------------------------------------------------- check if internal certificate issued ----------------------------------------------------------------------------------- + compare_kubectl certificate/some-name-ssl-internal + local resource=certificate/some-name-ssl-internal + local postfix= + local expected_result=/mnt/jenkins/workspace/cloud-psmdb-operator_PR-1581/e2e-tests/tls-issue-cert-manager/compare/certificate_some-name-ssl-internal.yml + local new_result=/tmp/tmp.pCG7nqFZEo/certificate_some-name-ssl-internal.yml + '[' -n '' -a -f /mnt/jenkins/workspace/cloud-psmdb-operator_PR-1581/e2e-tests/tls-issue-cert-manager/compare/certificate_some-name-ssl-internal-oc.yml ']' + kubectl_bin get -o yaml certificate/some-name-ssl-internal + yq eval ' del(.metadata.ownerReferences[].apiVersion) | del(.metadata.managedFields) | del(.. | select(has("creationTimestamp")).creationTimestamp) | del(.. | select(has("namespace")).namespace) | del(.. | select(has("uid")).uid) | del(.metadata.resourceVersion) | del(.spec.template.spec.containers[].env[] | select(.name == "NAMESPACE")) | del(.metadata.selfLink) | del(.metadata.annotations."cloud.google.com/neg") | del(.metadata.annotations."kubectl.kubernetes.io/last-applied-configuration") | del(.. | select(has("image")).image) | del(.. | select(has("clusterIP")).clusterIP) | del(.. | select(has("clusterIPs")).clusterIPs) | del(.. | select(has("dataSource")).dataSource) | del(.. | select(has("procMount")).procMount) | del(.. | select(has("storageClassName")).storageClassName) | del(.. | select(has("finalizers")).finalizers) | del(.. | select(has("kubernetes.io/pvc-protection"))."kubernetes.io/pvc-protection") | del(.. | select(has("volumeName")).volumeName) | del(.. | select(has("volume.beta.kubernetes.io/storage-provisioner"))."volume.beta.kubernetes.io/storage-provisioner") | del(.. | select(has("volume.kubernetes.io/storage-provisioner"))."volume.kubernetes.io/storage-provisioner") | del(.spec.volumeMode) | del(.. | select(has("volume.kubernetes.io/selected-node"))."volume.kubernetes.io/selected-node") | del(.. | select(has("percona.com/last-config-hash"))."percona.com/last-config-hash") | del(.. | select(has("percona.com/configuration-hash"))."percona.com/configuration-hash") | del(.. | select(has("percona.com/ssl-hash"))."percona.com/ssl-hash") | del(.. | select(has("percona.com/ssl-internal-hash"))."percona.com/ssl-internal-hash") | del(.spec.volumeClaimTemplates[].spec.volumeMode | select(. == "Filesystem")) | del(.. | select(has("healthCheckNodePort")).healthCheckNodePort) | del(.. | select(has("nodePort")).nodePort) | del(.status) | (.. | select(tag == "!!str")) |= sub("tls-issue-cert-manager-1728", "NAME_SPACE") | del(.spec.volumeClaimTemplates[].apiVersion) | del(.spec.volumeClaimTemplates[].kind) | del(.spec.ipFamilies) | del(.spec.ipFamilyPolicy) | (.. | select(. == "extensions/v1beta1")) = "apps/v1" | (.. | select(. == "batch/v1beta1")) = "batch/v1" ' - ++ mktemp + local LAST_OUT=/tmp/tmp.Rjy51QtFnB ++ mktemp + local LAST_ERR=/tmp/tmp.6nceiuj5S6 + local exit_status=0 + local timeout=4 ++ seq 0 2 + for i in '$(seq 0 2)' + set +e + kubectl get -o yaml certificate/some-name-ssl-internal + exit_status=0 + set -e + '[' 0 '!=' 0 -a -n 1 ']' + break + cat /tmp/tmp.Rjy51QtFnB + cat /tmp/tmp.6nceiuj5S6 + rm /tmp/tmp.Rjy51QtFnB /tmp/tmp.6nceiuj5S6 + return 0 + yq -i eval 'del(.spec.persistentVolumeClaimRetentionPolicy)' /tmp/tmp.pCG7nqFZEo/certificate_some-name-ssl-internal.yml + version_gt 1.22 ++ echo '1.26 >= 1.22' ++ bc -l + '[' 1 -eq 1 ']' + return 0 + yq -i eval 'del(.spec.internalTrafficPolicy)' /tmp/tmp.pCG7nqFZEo/certificate_some-name-ssl-internal.yml + yq -i eval 'del(.spec.allocateLoadBalancerNodePorts)' /tmp/tmp.pCG7nqFZEo/certificate_some-name-ssl-internal.yml + [[ /mnt/jenkins/workspace/cloud-psmdb-operator_PR-1581/e2e-tests/tls-issue-cert-manager/compare/certificate_some-name-ssl-internal.yml == */cronjob* ]] + diff -u /mnt/jenkins/workspace/cloud-psmdb-operator_PR-1581/e2e-tests/tls-issue-cert-manager/compare/certificate_some-name-ssl-internal.yml /tmp/tmp.pCG7nqFZEo/certificate_some-name-ssl-internal.yml + desc 'disable TLS' + set +o xtrace ----------------------------------------------------------------------------------- disable TLS ----------------------------------------------------------------------------------- + pause_cluster some-name + local cluster_name=some-name + echo 'Pausing cluster some-name' Pausing cluster some-name + kubectl_bin patch psmdb some-name --type merge '-p={"spec": { "pause": true } }' ++ mktemp + local LAST_OUT=/tmp/tmp.svfQB7jhI8 ++ mktemp + local LAST_ERR=/tmp/tmp.nSZc8ia1TI + local exit_status=0 + local timeout=4 ++ seq 0 2 + for i in '$(seq 0 2)' + set +e + kubectl patch psmdb some-name --type merge '-p={"spec": { "pause": true } }' + exit_status=0 + set -e + '[' 0 '!=' 0 -a -n 1 ']' + break + cat /tmp/tmp.svfQB7jhI8 perconaservermongodb.psmdb.percona.com/some-name patched + cat /tmp/tmp.nSZc8ia1TI + rm /tmp/tmp.svfQB7jhI8 /tmp/tmp.nSZc8ia1TI + return 0 + wait_for_cluster_state some-name paused + local cluster_name=some-name + local target_state=paused + echo -n 'Waiting for cluster to reach paused state' Waiting for cluster to reach paused state+ local timeout=0 ++ kubectl_bin get psmdb some-name -o 'jsonpath={.status.state}' +++ mktemp ++ local LAST_OUT=/tmp/tmp.TGSLGOtw1U +++ mktemp ++ local LAST_ERR=/tmp/tmp.b69PoRP2Ah ++ local exit_status=0 ++ local timeout=4 +++ seq 0 2 ++ for i in '$(seq 0 2)' ++ set +e ++ kubectl get psmdb some-name -o 'jsonpath={.status.state}' ++ exit_status=0 ++ set -e ++ '[' 0 '!=' 0 -a -n 1 ']' ++ break ++ cat /tmp/tmp.TGSLGOtw1U ++ cat /tmp/tmp.b69PoRP2Ah ++ rm /tmp/tmp.TGSLGOtw1U /tmp/tmp.b69PoRP2Ah ++ return 0 + [[ ready == paused ]] + sleep 1 + timeout=1 + echo -n . .+ [[ 1 -gt 1500 ]] ++ kubectl_bin get psmdb some-name -o 'jsonpath={.status.state}' +++ mktemp ++ local LAST_OUT=/tmp/tmp.85mLLeRJMY +++ mktemp ++ local LAST_ERR=/tmp/tmp.XVf2iJJAeL ++ local exit_status=0 ++ local timeout=4 +++ seq 0 2 ++ for i in '$(seq 0 2)' ++ set +e ++ kubectl get psmdb some-name -o 'jsonpath={.status.state}' ++ exit_status=0 ++ set -e ++ '[' 0 '!=' 0 -a -n 1 ']' ++ break ++ cat /tmp/tmp.85mLLeRJMY ++ cat /tmp/tmp.XVf2iJJAeL ++ rm /tmp/tmp.85mLLeRJMY /tmp/tmp.XVf2iJJAeL ++ return 0 + [[ stopping == paused ]] + sleep 1 + timeout=2 + echo -n . .+ [[ 2 -gt 1500 ]] ++ kubectl_bin get psmdb some-name -o 'jsonpath={.status.state}' +++ mktemp ++ local LAST_OUT=/tmp/tmp.biOfJ6tpcY +++ mktemp ++ local LAST_ERR=/tmp/tmp.iOZWYVHlkc ++ local exit_status=0 ++ local timeout=4 +++ seq 0 2 ++ for i in '$(seq 0 2)' ++ set +e ++ kubectl get psmdb some-name -o 'jsonpath={.status.state}' ++ exit_status=0 ++ set -e ++ '[' 0 '!=' 0 -a -n 1 ']' ++ break ++ cat /tmp/tmp.biOfJ6tpcY ++ cat /tmp/tmp.iOZWYVHlkc ++ rm /tmp/tmp.biOfJ6tpcY /tmp/tmp.iOZWYVHlkc ++ return 0 + [[ stopping == paused ]] + sleep 1 + timeout=3 + echo -n . .+ [[ 3 -gt 1500 ]] ++ kubectl_bin get psmdb some-name -o 'jsonpath={.status.state}' +++ mktemp ++ local LAST_OUT=/tmp/tmp.xOrY7PhjkF +++ mktemp ++ local LAST_ERR=/tmp/tmp.x9EFz1DQfw ++ local exit_status=0 ++ local timeout=4 +++ seq 0 2 ++ for i in '$(seq 0 2)' ++ set +e ++ kubectl get psmdb some-name -o 'jsonpath={.status.state}' ++ exit_status=0 ++ set -e ++ '[' 0 '!=' 0 -a -n 1 ']' ++ break ++ cat /tmp/tmp.xOrY7PhjkF ++ cat /tmp/tmp.x9EFz1DQfw ++ rm /tmp/tmp.xOrY7PhjkF /tmp/tmp.x9EFz1DQfw ++ return 0 + [[ stopping == paused ]] + sleep 1 + timeout=4 + echo -n . .+ [[ 4 -gt 1500 ]] ++ kubectl_bin get psmdb some-name -o 'jsonpath={.status.state}' +++ mktemp ++ local LAST_OUT=/tmp/tmp.TnxAOGqKFJ +++ mktemp ++ local LAST_ERR=/tmp/tmp.aDrHkOkBgM ++ local exit_status=0 ++ local timeout=4 +++ seq 0 2 ++ for i in '$(seq 0 2)' ++ set +e ++ kubectl get psmdb some-name -o 'jsonpath={.status.state}' ++ exit_status=0 ++ set -e ++ '[' 0 '!=' 0 -a -n 1 ']' ++ break ++ cat /tmp/tmp.TnxAOGqKFJ ++ cat /tmp/tmp.aDrHkOkBgM ++ rm /tmp/tmp.TnxAOGqKFJ /tmp/tmp.aDrHkOkBgM ++ return 0 + [[ stopping == paused ]] + sleep 1 + timeout=5 + echo -n . .+ [[ 5 -gt 1500 ]] ++ kubectl_bin get psmdb some-name -o 'jsonpath={.status.state}' +++ mktemp ++ local LAST_OUT=/tmp/tmp.NAD9J4tphP +++ mktemp ++ local LAST_ERR=/tmp/tmp.U3VXFKH7ox ++ local exit_status=0 ++ local timeout=4 +++ seq 0 2 ++ for i in '$(seq 0 2)' ++ set +e ++ kubectl get psmdb some-name -o 'jsonpath={.status.state}' ++ exit_status=0 ++ set -e ++ '[' 0 '!=' 0 -a -n 1 ']' ++ break ++ cat /tmp/tmp.NAD9J4tphP ++ cat /tmp/tmp.U3VXFKH7ox ++ rm /tmp/tmp.NAD9J4tphP /tmp/tmp.U3VXFKH7ox ++ return 0 + [[ stopping == paused ]] + sleep 1 + timeout=6 + echo -n . .+ [[ 6 -gt 1500 ]] ++ kubectl_bin get psmdb some-name -o 'jsonpath={.status.state}' +++ mktemp ++ local LAST_OUT=/tmp/tmp.qJqU5S57nU +++ mktemp ++ local LAST_ERR=/tmp/tmp.PxeUJ4P0qW ++ local exit_status=0 ++ local timeout=4 +++ seq 0 2 ++ for i in '$(seq 0 2)' ++ set +e ++ kubectl get psmdb some-name -o 'jsonpath={.status.state}' ++ exit_status=0 ++ set -e ++ '[' 0 '!=' 0 -a -n 1 ']' ++ break ++ cat /tmp/tmp.qJqU5S57nU ++ cat /tmp/tmp.PxeUJ4P0qW ++ rm /tmp/tmp.qJqU5S57nU /tmp/tmp.PxeUJ4P0qW ++ return 0 + [[ stopping == paused ]] + sleep 1 + timeout=7 + echo -n . .+ [[ 7 -gt 1500 ]] ++ kubectl_bin get psmdb some-name -o 'jsonpath={.status.state}' +++ mktemp ++ local LAST_OUT=/tmp/tmp.HFyJ7tmEMz +++ mktemp ++ local LAST_ERR=/tmp/tmp.PLULgjwafZ ++ local exit_status=0 ++ local timeout=4 +++ seq 0 2 ++ for i in '$(seq 0 2)' ++ set +e ++ kubectl get psmdb some-name -o 'jsonpath={.status.state}' ++ exit_status=0 ++ set -e ++ '[' 0 '!=' 0 -a -n 1 ']' ++ break ++ cat /tmp/tmp.HFyJ7tmEMz ++ cat /tmp/tmp.PLULgjwafZ ++ rm /tmp/tmp.HFyJ7tmEMz /tmp/tmp.PLULgjwafZ ++ return 0 + [[ stopping == paused ]] + sleep 1 + timeout=8 + echo -n . .+ [[ 8 -gt 1500 ]] ++ kubectl_bin get psmdb some-name -o 'jsonpath={.status.state}' +++ mktemp ++ local LAST_OUT=/tmp/tmp.Kh4SaeLdiE +++ mktemp ++ local LAST_ERR=/tmp/tmp.YmSpUp2fjq ++ local exit_status=0 ++ local timeout=4 +++ seq 0 2 ++ for i in '$(seq 0 2)' ++ set +e ++ kubectl get psmdb some-name -o 'jsonpath={.status.state}' ++ exit_status=0 ++ set -e ++ '[' 0 '!=' 0 -a -n 1 ']' ++ break ++ cat /tmp/tmp.Kh4SaeLdiE ++ cat /tmp/tmp.YmSpUp2fjq ++ rm /tmp/tmp.Kh4SaeLdiE /tmp/tmp.YmSpUp2fjq ++ return 0 + [[ stopping == paused ]] + sleep 1 + timeout=9 + echo -n . .+ [[ 9 -gt 1500 ]] ++ kubectl_bin get psmdb some-name -o 'jsonpath={.status.state}' +++ mktemp ++ local LAST_OUT=/tmp/tmp.DbYsr71RWT +++ mktemp ++ local LAST_ERR=/tmp/tmp.Ub3h6CRjPW ++ local exit_status=0 ++ local timeout=4 +++ seq 0 2 ++ for i in '$(seq 0 2)' ++ set +e ++ kubectl get psmdb some-name -o 'jsonpath={.status.state}' ++ exit_status=0 ++ set -e ++ '[' 0 '!=' 0 -a -n 1 ']' ++ break ++ cat /tmp/tmp.DbYsr71RWT ++ cat /tmp/tmp.Ub3h6CRjPW ++ rm /tmp/tmp.DbYsr71RWT /tmp/tmp.Ub3h6CRjPW ++ return 0 + [[ stopping == paused ]] + sleep 1 + timeout=10 + echo -n . .+ [[ 10 -gt 1500 ]] ++ kubectl_bin get psmdb some-name -o 'jsonpath={.status.state}' +++ mktemp ++ local LAST_OUT=/tmp/tmp.UZljf0mr3b +++ mktemp ++ local LAST_ERR=/tmp/tmp.xmkkrApLpd ++ local exit_status=0 ++ local timeout=4 +++ seq 0 2 ++ for i in '$(seq 0 2)' ++ set +e ++ kubectl get psmdb some-name -o 'jsonpath={.status.state}' ++ exit_status=0 ++ set -e ++ '[' 0 '!=' 0 -a -n 1 ']' ++ break ++ cat /tmp/tmp.UZljf0mr3b ++ cat /tmp/tmp.xmkkrApLpd ++ rm /tmp/tmp.UZljf0mr3b /tmp/tmp.xmkkrApLpd ++ return 0 + [[ stopping == paused ]] + sleep 1 + timeout=11 + echo -n . .+ [[ 11 -gt 1500 ]] ++ kubectl_bin get psmdb some-name -o 'jsonpath={.status.state}' +++ mktemp ++ local LAST_OUT=/tmp/tmp.E0nXUqfNQl +++ mktemp ++ local LAST_ERR=/tmp/tmp.K5k02Y1WcK ++ local exit_status=0 ++ local timeout=4 +++ seq 0 2 ++ for i in '$(seq 0 2)' ++ set +e ++ kubectl get psmdb some-name -o 'jsonpath={.status.state}' ++ exit_status=0 ++ set -e ++ '[' 0 '!=' 0 -a -n 1 ']' ++ break ++ cat /tmp/tmp.E0nXUqfNQl ++ cat /tmp/tmp.K5k02Y1WcK ++ rm /tmp/tmp.E0nXUqfNQl /tmp/tmp.K5k02Y1WcK ++ return 0 + [[ stopping == paused ]] + sleep 1 + timeout=12 + echo -n . .+ [[ 12 -gt 1500 ]] ++ kubectl_bin get psmdb some-name -o 'jsonpath={.status.state}' +++ mktemp ++ local LAST_OUT=/tmp/tmp.tDyk3GFK1G +++ mktemp ++ local LAST_ERR=/tmp/tmp.jw4OdjOglF ++ local exit_status=0 ++ local timeout=4 +++ seq 0 2 ++ for i in '$(seq 0 2)' ++ set +e ++ kubectl get psmdb some-name -o 'jsonpath={.status.state}' ++ exit_status=0 ++ set -e ++ '[' 0 '!=' 0 -a -n 1 ']' ++ break ++ cat /tmp/tmp.tDyk3GFK1G ++ cat /tmp/tmp.jw4OdjOglF ++ rm /tmp/tmp.tDyk3GFK1G /tmp/tmp.jw4OdjOglF ++ return 0 + [[ stopping == paused ]] + sleep 1 + timeout=13 + echo -n . .+ [[ 13 -gt 1500 ]] ++ kubectl_bin get psmdb some-name -o 'jsonpath={.status.state}' +++ mktemp ++ local LAST_OUT=/tmp/tmp.IOzWWWchju +++ mktemp ++ local LAST_ERR=/tmp/tmp.fyk6EXrG4W ++ local exit_status=0 ++ local timeout=4 +++ seq 0 2 ++ for i in '$(seq 0 2)' ++ set +e ++ kubectl get psmdb some-name -o 'jsonpath={.status.state}' ++ exit_status=0 ++ set -e ++ '[' 0 '!=' 0 -a -n 1 ']' ++ break ++ cat /tmp/tmp.IOzWWWchju ++ cat /tmp/tmp.fyk6EXrG4W ++ rm /tmp/tmp.IOzWWWchju /tmp/tmp.fyk6EXrG4W ++ return 0 + [[ stopping == paused ]] + sleep 1 + timeout=14 + echo -n . .+ [[ 14 -gt 1500 ]] ++ kubectl_bin get psmdb some-name -o 'jsonpath={.status.state}' +++ mktemp ++ local LAST_OUT=/tmp/tmp.C14RVe9PCP +++ mktemp ++ local LAST_ERR=/tmp/tmp.naWwgVjrg4 ++ local exit_status=0 ++ local timeout=4 +++ seq 0 2 ++ for i in '$(seq 0 2)' ++ set +e ++ kubectl get psmdb some-name -o 'jsonpath={.status.state}' ++ exit_status=0 ++ set -e ++ '[' 0 '!=' 0 -a -n 1 ']' ++ break ++ cat /tmp/tmp.C14RVe9PCP ++ cat /tmp/tmp.naWwgVjrg4 ++ rm /tmp/tmp.C14RVe9PCP /tmp/tmp.naWwgVjrg4 ++ return 0 + [[ stopping == paused ]] + sleep 1 + timeout=15 + echo -n . .+ [[ 15 -gt 1500 ]] ++ kubectl_bin get psmdb some-name -o 'jsonpath={.status.state}' +++ mktemp ++ local LAST_OUT=/tmp/tmp.EEi5yewiDM +++ mktemp ++ local LAST_ERR=/tmp/tmp.dSwzPUZqvU ++ local exit_status=0 ++ local timeout=4 +++ seq 0 2 ++ for i in '$(seq 0 2)' ++ set +e ++ kubectl get psmdb some-name -o 'jsonpath={.status.state}' ++ exit_status=0 ++ set -e ++ '[' 0 '!=' 0 -a -n 1 ']' ++ break ++ cat /tmp/tmp.EEi5yewiDM ++ cat /tmp/tmp.dSwzPUZqvU ++ rm /tmp/tmp.EEi5yewiDM /tmp/tmp.dSwzPUZqvU ++ return 0 + [[ stopping == paused ]] + sleep 1 + timeout=16 + echo -n . .+ [[ 16 -gt 1500 ]] ++ kubectl_bin get psmdb some-name -o 'jsonpath={.status.state}' +++ mktemp ++ local LAST_OUT=/tmp/tmp.7JgVos5SGl +++ mktemp ++ local LAST_ERR=/tmp/tmp.dApa5QVqCt ++ local exit_status=0 ++ local timeout=4 +++ seq 0 2 ++ for i in '$(seq 0 2)' ++ set +e ++ kubectl get psmdb some-name -o 'jsonpath={.status.state}' ++ exit_status=0 ++ set -e ++ '[' 0 '!=' 0 -a -n 1 ']' ++ break ++ cat /tmp/tmp.7JgVos5SGl ++ cat /tmp/tmp.dApa5QVqCt ++ rm /tmp/tmp.7JgVos5SGl /tmp/tmp.dApa5QVqCt ++ return 0 + [[ stopping == paused ]] + sleep 1 + timeout=17 + echo -n . .+ [[ 17 -gt 1500 ]] ++ kubectl_bin get psmdb some-name -o 'jsonpath={.status.state}' +++ mktemp ++ local LAST_OUT=/tmp/tmp.bbrpfj2HIf +++ mktemp ++ local LAST_ERR=/tmp/tmp.Xst61Svlif ++ local exit_status=0 ++ local timeout=4 +++ seq 0 2 ++ for i in '$(seq 0 2)' ++ set +e ++ kubectl get psmdb some-name -o 'jsonpath={.status.state}' ++ exit_status=0 ++ set -e ++ '[' 0 '!=' 0 -a -n 1 ']' ++ break ++ cat /tmp/tmp.bbrpfj2HIf ++ cat /tmp/tmp.Xst61Svlif ++ rm /tmp/tmp.bbrpfj2HIf /tmp/tmp.Xst61Svlif ++ return 0 + [[ stopping == paused ]] + sleep 1 + timeout=18 + echo -n . .+ [[ 18 -gt 1500 ]] ++ kubectl_bin get psmdb some-name -o 'jsonpath={.status.state}' +++ mktemp ++ local LAST_OUT=/tmp/tmp.i12vwc7L8M +++ mktemp ++ local LAST_ERR=/tmp/tmp.hnakHMxQe9 ++ local exit_status=0 ++ local timeout=4 +++ seq 0 2 ++ for i in '$(seq 0 2)' ++ set +e ++ kubectl get psmdb some-name -o 'jsonpath={.status.state}' ++ exit_status=0 ++ set -e ++ '[' 0 '!=' 0 -a -n 1 ']' ++ break ++ cat /tmp/tmp.i12vwc7L8M ++ cat /tmp/tmp.hnakHMxQe9 ++ rm /tmp/tmp.i12vwc7L8M /tmp/tmp.hnakHMxQe9 ++ return 0 + [[ stopping == paused ]] + sleep 1 + timeout=19 + echo -n . .+ [[ 19 -gt 1500 ]] ++ kubectl_bin get psmdb some-name -o 'jsonpath={.status.state}' +++ mktemp ++ local LAST_OUT=/tmp/tmp.nBccINIdUj +++ mktemp ++ local LAST_ERR=/tmp/tmp.CiIsZsz2kx ++ local exit_status=0 ++ local timeout=4 +++ seq 0 2 ++ for i in '$(seq 0 2)' ++ set +e ++ kubectl get psmdb some-name -o 'jsonpath={.status.state}' ++ exit_status=0 ++ set -e ++ '[' 0 '!=' 0 -a -n 1 ']' ++ break ++ cat /tmp/tmp.nBccINIdUj ++ cat /tmp/tmp.CiIsZsz2kx ++ rm /tmp/tmp.nBccINIdUj /tmp/tmp.CiIsZsz2kx ++ return 0 + [[ stopping == paused ]] + sleep 1 + timeout=20 + echo -n . .+ [[ 20 -gt 1500 ]] ++ kubectl_bin get psmdb some-name -o 'jsonpath={.status.state}' +++ mktemp ++ local LAST_OUT=/tmp/tmp.NdrGTyNHG8 +++ mktemp ++ local LAST_ERR=/tmp/tmp.ytafAUmv0v ++ local exit_status=0 ++ local timeout=4 +++ seq 0 2 ++ for i in '$(seq 0 2)' ++ set +e ++ kubectl get psmdb some-name -o 'jsonpath={.status.state}' ++ exit_status=0 ++ set -e ++ '[' 0 '!=' 0 -a -n 1 ']' ++ break ++ cat /tmp/tmp.NdrGTyNHG8 ++ cat /tmp/tmp.ytafAUmv0v ++ rm /tmp/tmp.NdrGTyNHG8 /tmp/tmp.ytafAUmv0v ++ return 0 + [[ stopping == paused ]] + sleep 1 + timeout=21 + echo -n . .+ [[ 21 -gt 1500 ]] ++ kubectl_bin get psmdb some-name -o 'jsonpath={.status.state}' +++ mktemp ++ local LAST_OUT=/tmp/tmp.jfmHeLG2jQ +++ mktemp ++ local LAST_ERR=/tmp/tmp.uAnkILzfoO ++ local exit_status=0 ++ local timeout=4 +++ seq 0 2 ++ for i in '$(seq 0 2)' ++ set +e ++ kubectl get psmdb some-name -o 'jsonpath={.status.state}' ++ exit_status=0 ++ set -e ++ '[' 0 '!=' 0 -a -n 1 ']' ++ break ++ cat /tmp/tmp.jfmHeLG2jQ ++ cat /tmp/tmp.uAnkILzfoO ++ rm /tmp/tmp.jfmHeLG2jQ /tmp/tmp.uAnkILzfoO ++ return 0 + [[ stopping == paused ]] + sleep 1 + timeout=22 + echo -n . .+ [[ 22 -gt 1500 ]] ++ kubectl_bin get psmdb some-name -o 'jsonpath={.status.state}' +++ mktemp ++ local LAST_OUT=/tmp/tmp.uYUC4Bl5LS +++ mktemp ++ local LAST_ERR=/tmp/tmp.BAgK7cTFTq ++ local exit_status=0 ++ local timeout=4 +++ seq 0 2 ++ for i in '$(seq 0 2)' ++ set +e ++ kubectl get psmdb some-name -o 'jsonpath={.status.state}' ++ exit_status=0 ++ set -e ++ '[' 0 '!=' 0 -a -n 1 ']' ++ break ++ cat /tmp/tmp.uYUC4Bl5LS ++ cat /tmp/tmp.BAgK7cTFTq ++ rm /tmp/tmp.uYUC4Bl5LS /tmp/tmp.BAgK7cTFTq ++ return 0 + [[ stopping == paused ]] + sleep 1 + timeout=23 + echo -n . .+ [[ 23 -gt 1500 ]] ++ kubectl_bin get psmdb some-name -o 'jsonpath={.status.state}' +++ mktemp ++ local LAST_OUT=/tmp/tmp.4Hqj1HRZyB +++ mktemp ++ local LAST_ERR=/tmp/tmp.lwKUFvuHEy ++ local exit_status=0 ++ local timeout=4 +++ seq 0 2 ++ for i in '$(seq 0 2)' ++ set +e ++ kubectl get psmdb some-name -o 'jsonpath={.status.state}' ++ exit_status=0 ++ set -e ++ '[' 0 '!=' 0 -a -n 1 ']' ++ break ++ cat /tmp/tmp.4Hqj1HRZyB ++ cat /tmp/tmp.lwKUFvuHEy ++ rm /tmp/tmp.4Hqj1HRZyB /tmp/tmp.lwKUFvuHEy ++ return 0 + [[ stopping == paused ]] + sleep 1 + timeout=24 + echo -n . .+ [[ 24 -gt 1500 ]] ++ kubectl_bin get psmdb some-name -o 'jsonpath={.status.state}' +++ mktemp ++ local LAST_OUT=/tmp/tmp.YGEIBUf7hb +++ mktemp ++ local LAST_ERR=/tmp/tmp.QOpfUaxysV ++ local exit_status=0 ++ local timeout=4 +++ seq 0 2 ++ for i in '$(seq 0 2)' ++ set +e ++ kubectl get psmdb some-name -o 'jsonpath={.status.state}' ++ exit_status=0 ++ set -e ++ '[' 0 '!=' 0 -a -n 1 ']' ++ break ++ cat /tmp/tmp.YGEIBUf7hb ++ cat /tmp/tmp.QOpfUaxysV ++ rm /tmp/tmp.YGEIBUf7hb /tmp/tmp.QOpfUaxysV ++ return 0 + [[ stopping == paused ]] + sleep 1 + timeout=25 + echo -n . .+ [[ 25 -gt 1500 ]] ++ kubectl_bin get psmdb some-name -o 'jsonpath={.status.state}' +++ mktemp ++ local LAST_OUT=/tmp/tmp.DukZ0aZu8O +++ mktemp ++ local LAST_ERR=/tmp/tmp.omi3wpTxkF ++ local exit_status=0 ++ local timeout=4 +++ seq 0 2 ++ for i in '$(seq 0 2)' ++ set +e ++ kubectl get psmdb some-name -o 'jsonpath={.status.state}' ++ exit_status=0 ++ set -e ++ '[' 0 '!=' 0 -a -n 1 ']' ++ break ++ cat /tmp/tmp.DukZ0aZu8O ++ cat /tmp/tmp.omi3wpTxkF ++ rm /tmp/tmp.DukZ0aZu8O /tmp/tmp.omi3wpTxkF ++ return 0 + [[ stopping == paused ]] + sleep 1 + timeout=26 + echo -n . .+ [[ 26 -gt 1500 ]] ++ kubectl_bin get psmdb some-name -o 'jsonpath={.status.state}' +++ mktemp ++ local LAST_OUT=/tmp/tmp.DQtgDJbH9v +++ mktemp ++ local LAST_ERR=/tmp/tmp.CfiNRzfHsK ++ local exit_status=0 ++ local timeout=4 +++ seq 0 2 ++ for i in '$(seq 0 2)' ++ set +e ++ kubectl get psmdb some-name -o 'jsonpath={.status.state}' ++ exit_status=0 ++ set -e ++ '[' 0 '!=' 0 -a -n 1 ']' ++ break ++ cat /tmp/tmp.DQtgDJbH9v ++ cat /tmp/tmp.CfiNRzfHsK ++ rm /tmp/tmp.DQtgDJbH9v /tmp/tmp.CfiNRzfHsK ++ return 0 + [[ stopping == paused ]] + sleep 1 + timeout=27 + echo -n . .+ [[ 27 -gt 1500 ]] ++ kubectl_bin get psmdb some-name -o 'jsonpath={.status.state}' +++ mktemp ++ local LAST_OUT=/tmp/tmp.wyg6shBx1M +++ mktemp ++ local LAST_ERR=/tmp/tmp.jth7KGOIZ2 ++ local exit_status=0 ++ local timeout=4 +++ seq 0 2 ++ for i in '$(seq 0 2)' ++ set +e ++ kubectl get psmdb some-name -o 'jsonpath={.status.state}' ++ exit_status=0 ++ set -e ++ '[' 0 '!=' 0 -a -n 1 ']' ++ break ++ cat /tmp/tmp.wyg6shBx1M ++ cat /tmp/tmp.jth7KGOIZ2 ++ rm /tmp/tmp.wyg6shBx1M /tmp/tmp.jth7KGOIZ2 ++ return 0 + [[ stopping == paused ]] + sleep 1 + timeout=28 + echo -n . .+ [[ 28 -gt 1500 ]] ++ kubectl_bin get psmdb some-name -o 'jsonpath={.status.state}' +++ mktemp ++ local LAST_OUT=/tmp/tmp.mAyJzffbj2 +++ mktemp ++ local LAST_ERR=/tmp/tmp.o7zfVTHvO4 ++ local exit_status=0 ++ local timeout=4 +++ seq 0 2 ++ for i in '$(seq 0 2)' ++ set +e ++ kubectl get psmdb some-name -o 'jsonpath={.status.state}' ++ exit_status=0 ++ set -e ++ '[' 0 '!=' 0 -a -n 1 ']' ++ break ++ cat /tmp/tmp.mAyJzffbj2 ++ cat /tmp/tmp.o7zfVTHvO4 ++ rm /tmp/tmp.mAyJzffbj2 /tmp/tmp.o7zfVTHvO4 ++ return 0 + [[ stopping == paused ]] + sleep 1 + timeout=29 + echo -n . .+ [[ 29 -gt 1500 ]] ++ kubectl_bin get psmdb some-name -o 'jsonpath={.status.state}' +++ mktemp ++ local LAST_OUT=/tmp/tmp.sJ9w5aFhYH +++ mktemp ++ local LAST_ERR=/tmp/tmp.9m826c9qfa ++ local exit_status=0 ++ local timeout=4 +++ seq 0 2 ++ for i in '$(seq 0 2)' ++ set +e ++ kubectl get psmdb some-name -o 'jsonpath={.status.state}' ++ exit_status=0 ++ set -e ++ '[' 0 '!=' 0 -a -n 1 ']' ++ break ++ cat /tmp/tmp.sJ9w5aFhYH ++ cat /tmp/tmp.9m826c9qfa ++ rm /tmp/tmp.sJ9w5aFhYH /tmp/tmp.9m826c9qfa ++ return 0 + [[ stopping == paused ]] + sleep 1 + timeout=30 + echo -n . .+ [[ 30 -gt 1500 ]] ++ kubectl_bin get psmdb some-name -o 'jsonpath={.status.state}' +++ mktemp ++ local LAST_OUT=/tmp/tmp.7KuqgHFeZX +++ mktemp ++ local LAST_ERR=/tmp/tmp.USvuI5c5m5 ++ local exit_status=0 ++ local timeout=4 +++ seq 0 2 ++ for i in '$(seq 0 2)' ++ set +e ++ kubectl get psmdb some-name -o 'jsonpath={.status.state}' ++ exit_status=0 ++ set -e ++ '[' 0 '!=' 0 -a -n 1 ']' ++ break ++ cat /tmp/tmp.7KuqgHFeZX ++ cat /tmp/tmp.USvuI5c5m5 ++ rm /tmp/tmp.7KuqgHFeZX /tmp/tmp.USvuI5c5m5 ++ return 0 + [[ stopping == paused ]] + sleep 1 + timeout=31 + echo -n . .+ [[ 31 -gt 1500 ]] ++ kubectl_bin get psmdb some-name -o 'jsonpath={.status.state}' +++ mktemp ++ local LAST_OUT=/tmp/tmp.feTVGmhmls +++ mktemp ++ local LAST_ERR=/tmp/tmp.3LjnqaWXU2 ++ local exit_status=0 ++ local timeout=4 +++ seq 0 2 ++ for i in '$(seq 0 2)' ++ set +e ++ kubectl get psmdb some-name -o 'jsonpath={.status.state}' ++ exit_status=0 ++ set -e ++ '[' 0 '!=' 0 -a -n 1 ']' ++ break ++ cat /tmp/tmp.feTVGmhmls ++ cat /tmp/tmp.3LjnqaWXU2 ++ rm /tmp/tmp.feTVGmhmls /tmp/tmp.3LjnqaWXU2 ++ return 0 + [[ stopping == paused ]] + sleep 1 + timeout=32 + echo -n . .+ [[ 32 -gt 1500 ]] ++ kubectl_bin get psmdb some-name -o 'jsonpath={.status.state}' +++ mktemp ++ local LAST_OUT=/tmp/tmp.oGMRUDEdRq +++ mktemp ++ local LAST_ERR=/tmp/tmp.vHyjth1FWA ++ local exit_status=0 ++ local timeout=4 +++ seq 0 2 ++ for i in '$(seq 0 2)' ++ set +e ++ kubectl get psmdb some-name -o 'jsonpath={.status.state}' ++ exit_status=0 ++ set -e ++ '[' 0 '!=' 0 -a -n 1 ']' ++ break ++ cat /tmp/tmp.oGMRUDEdRq ++ cat /tmp/tmp.vHyjth1FWA ++ rm /tmp/tmp.oGMRUDEdRq /tmp/tmp.vHyjth1FWA ++ return 0 + [[ stopping == paused ]] + sleep 1 + timeout=33 + echo -n . .+ [[ 33 -gt 1500 ]] ++ kubectl_bin get psmdb some-name -o 'jsonpath={.status.state}' +++ mktemp ++ local LAST_OUT=/tmp/tmp.wlXliCiVJE +++ mktemp ++ local LAST_ERR=/tmp/tmp.JBpgGRNUAA ++ local exit_status=0 ++ local timeout=4 +++ seq 0 2 ++ for i in '$(seq 0 2)' ++ set +e ++ kubectl get psmdb some-name -o 'jsonpath={.status.state}' ++ exit_status=0 ++ set -e ++ '[' 0 '!=' 0 -a -n 1 ']' ++ break ++ cat /tmp/tmp.wlXliCiVJE ++ cat /tmp/tmp.JBpgGRNUAA ++ rm /tmp/tmp.wlXliCiVJE /tmp/tmp.JBpgGRNUAA ++ return 0 + [[ stopping == paused ]] + sleep 1 + timeout=34 + echo -n . .+ [[ 34 -gt 1500 ]] ++ kubectl_bin get psmdb some-name -o 'jsonpath={.status.state}' +++ mktemp ++ local LAST_OUT=/tmp/tmp.uRrstDyxV2 +++ mktemp ++ local LAST_ERR=/tmp/tmp.b6cBR8OPZd ++ local exit_status=0 ++ local timeout=4 +++ seq 0 2 ++ for i in '$(seq 0 2)' ++ set +e ++ kubectl get psmdb some-name -o 'jsonpath={.status.state}' ++ exit_status=0 ++ set -e ++ '[' 0 '!=' 0 -a -n 1 ']' ++ break ++ cat /tmp/tmp.uRrstDyxV2 ++ cat /tmp/tmp.b6cBR8OPZd ++ rm /tmp/tmp.uRrstDyxV2 /tmp/tmp.b6cBR8OPZd ++ return 0 + [[ stopping == paused ]] + sleep 1 + timeout=35 + echo -n . .+ [[ 35 -gt 1500 ]] ++ kubectl_bin get psmdb some-name -o 'jsonpath={.status.state}' +++ mktemp ++ local LAST_OUT=/tmp/tmp.RiwVnSXkMx +++ mktemp ++ local LAST_ERR=/tmp/tmp.TpokNLPJ6S ++ local exit_status=0 ++ local timeout=4 +++ seq 0 2 ++ for i in '$(seq 0 2)' ++ set +e ++ kubectl get psmdb some-name -o 'jsonpath={.status.state}' ++ exit_status=0 ++ set -e ++ '[' 0 '!=' 0 -a -n 1 ']' ++ break ++ cat /tmp/tmp.RiwVnSXkMx ++ cat /tmp/tmp.TpokNLPJ6S ++ rm /tmp/tmp.RiwVnSXkMx /tmp/tmp.TpokNLPJ6S ++ return 0 + [[ stopping == paused ]] + sleep 1 + timeout=36 + echo -n . .+ [[ 36 -gt 1500 ]] ++ kubectl_bin get psmdb some-name -o 'jsonpath={.status.state}' +++ mktemp ++ local LAST_OUT=/tmp/tmp.3j8iNZ46vL +++ mktemp ++ local LAST_ERR=/tmp/tmp.3IRF7w1kuc ++ local exit_status=0 ++ local timeout=4 +++ seq 0 2 ++ for i in '$(seq 0 2)' ++ set +e ++ kubectl get psmdb some-name -o 'jsonpath={.status.state}' ++ exit_status=0 ++ set -e ++ '[' 0 '!=' 0 -a -n 1 ']' ++ break ++ cat /tmp/tmp.3j8iNZ46vL ++ cat /tmp/tmp.3IRF7w1kuc ++ rm /tmp/tmp.3j8iNZ46vL /tmp/tmp.3IRF7w1kuc ++ return 0 + [[ stopping == paused ]] + sleep 1 + timeout=37 + echo -n . .+ [[ 37 -gt 1500 ]] ++ kubectl_bin get psmdb some-name -o 'jsonpath={.status.state}' +++ mktemp ++ local LAST_OUT=/tmp/tmp.wkUExkxuny +++ mktemp ++ local LAST_ERR=/tmp/tmp.PnytqphbQ5 ++ local exit_status=0 ++ local timeout=4 +++ seq 0 2 ++ for i in '$(seq 0 2)' ++ set +e ++ kubectl get psmdb some-name -o 'jsonpath={.status.state}' ++ exit_status=0 ++ set -e ++ '[' 0 '!=' 0 -a -n 1 ']' ++ break ++ cat /tmp/tmp.wkUExkxuny ++ cat /tmp/tmp.PnytqphbQ5 ++ rm /tmp/tmp.wkUExkxuny /tmp/tmp.PnytqphbQ5 ++ return 0 + [[ stopping == paused ]] + sleep 1 + timeout=38 + echo -n . .+ [[ 38 -gt 1500 ]] ++ kubectl_bin get psmdb some-name -o 'jsonpath={.status.state}' +++ mktemp ++ local LAST_OUT=/tmp/tmp.xmpm8j1jLq +++ mktemp ++ local LAST_ERR=/tmp/tmp.aGcS9F0XKG ++ local exit_status=0 ++ local timeout=4 +++ seq 0 2 ++ for i in '$(seq 0 2)' ++ set +e ++ kubectl get psmdb some-name -o 'jsonpath={.status.state}' ++ exit_status=0 ++ set -e ++ '[' 0 '!=' 0 -a -n 1 ']' ++ break ++ cat /tmp/tmp.xmpm8j1jLq ++ cat /tmp/tmp.aGcS9F0XKG ++ rm /tmp/tmp.xmpm8j1jLq /tmp/tmp.aGcS9F0XKG ++ return 0 + [[ stopping == paused ]] + sleep 1 + timeout=39 + echo -n . .+ [[ 39 -gt 1500 ]] ++ kubectl_bin get psmdb some-name -o 'jsonpath={.status.state}' +++ mktemp ++ local LAST_OUT=/tmp/tmp.lJJGnqoFgp +++ mktemp ++ local LAST_ERR=/tmp/tmp.yI2yFrD6ar ++ local exit_status=0 ++ local timeout=4 +++ seq 0 2 ++ for i in '$(seq 0 2)' ++ set +e ++ kubectl get psmdb some-name -o 'jsonpath={.status.state}' ++ exit_status=0 ++ set -e ++ '[' 0 '!=' 0 -a -n 1 ']' ++ break ++ cat /tmp/tmp.lJJGnqoFgp ++ cat /tmp/tmp.yI2yFrD6ar ++ rm /tmp/tmp.lJJGnqoFgp /tmp/tmp.yI2yFrD6ar ++ return 0 + [[ stopping == paused ]] + sleep 1 + timeout=40 + echo -n . .+ [[ 40 -gt 1500 ]] ++ kubectl_bin get psmdb some-name -o 'jsonpath={.status.state}' +++ mktemp ++ local LAST_OUT=/tmp/tmp.Sx6t7vnlYB +++ mktemp ++ local LAST_ERR=/tmp/tmp.gh10yz7PdA ++ local exit_status=0 ++ local timeout=4 +++ seq 0 2 ++ for i in '$(seq 0 2)' ++ set +e ++ kubectl get psmdb some-name -o 'jsonpath={.status.state}' ++ exit_status=0 ++ set -e ++ '[' 0 '!=' 0 -a -n 1 ']' ++ break ++ cat /tmp/tmp.Sx6t7vnlYB ++ cat /tmp/tmp.gh10yz7PdA ++ rm /tmp/tmp.Sx6t7vnlYB /tmp/tmp.gh10yz7PdA ++ return 0 + [[ stopping == paused ]] + sleep 1 + timeout=41 + echo -n . .+ [[ 41 -gt 1500 ]] ++ kubectl_bin get psmdb some-name -o 'jsonpath={.status.state}' +++ mktemp ++ local LAST_OUT=/tmp/tmp.YehXTG4xRF +++ mktemp ++ local LAST_ERR=/tmp/tmp.0EWQHOLELJ ++ local exit_status=0 ++ local timeout=4 +++ seq 0 2 ++ for i in '$(seq 0 2)' ++ set +e ++ kubectl get psmdb some-name -o 'jsonpath={.status.state}' ++ exit_status=0 ++ set -e ++ '[' 0 '!=' 0 -a -n 1 ']' ++ break ++ cat /tmp/tmp.YehXTG4xRF ++ cat /tmp/tmp.0EWQHOLELJ ++ rm /tmp/tmp.YehXTG4xRF /tmp/tmp.0EWQHOLELJ ++ return 0 + [[ stopping == paused ]] + sleep 1 + timeout=42 + echo -n . .+ [[ 42 -gt 1500 ]] ++ kubectl_bin get psmdb some-name -o 'jsonpath={.status.state}' +++ mktemp ++ local LAST_OUT=/tmp/tmp.asLLVmbDoE +++ mktemp ++ local LAST_ERR=/tmp/tmp.oE5eWMFvfc ++ local exit_status=0 ++ local timeout=4 +++ seq 0 2 ++ for i in '$(seq 0 2)' ++ set +e ++ kubectl get psmdb some-name -o 'jsonpath={.status.state}' ++ exit_status=0 ++ set -e ++ '[' 0 '!=' 0 -a -n 1 ']' ++ break ++ cat /tmp/tmp.asLLVmbDoE ++ cat /tmp/tmp.oE5eWMFvfc ++ rm /tmp/tmp.asLLVmbDoE /tmp/tmp.oE5eWMFvfc ++ return 0 + [[ stopping == paused ]] + sleep 1 + timeout=43 + echo -n . .+ [[ 43 -gt 1500 ]] ++ kubectl_bin get psmdb some-name -o 'jsonpath={.status.state}' +++ mktemp ++ local LAST_OUT=/tmp/tmp.fkJfv0SmU0 +++ mktemp ++ local LAST_ERR=/tmp/tmp.de7c19qAVH ++ local exit_status=0 ++ local timeout=4 +++ seq 0 2 ++ for i in '$(seq 0 2)' ++ set +e ++ kubectl get psmdb some-name -o 'jsonpath={.status.state}' ++ exit_status=0 ++ set -e ++ '[' 0 '!=' 0 -a -n 1 ']' ++ break ++ cat /tmp/tmp.fkJfv0SmU0 ++ cat /tmp/tmp.de7c19qAVH ++ rm /tmp/tmp.fkJfv0SmU0 /tmp/tmp.de7c19qAVH ++ return 0 + [[ stopping == paused ]] + sleep 1 + timeout=44 + echo -n . .+ [[ 44 -gt 1500 ]] ++ kubectl_bin get psmdb some-name -o 'jsonpath={.status.state}' +++ mktemp ++ local LAST_OUT=/tmp/tmp.3jwTMUFUUy +++ mktemp ++ local LAST_ERR=/tmp/tmp.hPd0fybuKX ++ local exit_status=0 ++ local timeout=4 +++ seq 0 2 ++ for i in '$(seq 0 2)' ++ set +e ++ kubectl get psmdb some-name -o 'jsonpath={.status.state}' ++ exit_status=0 ++ set -e ++ '[' 0 '!=' 0 -a -n 1 ']' ++ break ++ cat /tmp/tmp.3jwTMUFUUy ++ cat /tmp/tmp.hPd0fybuKX ++ rm /tmp/tmp.3jwTMUFUUy /tmp/tmp.hPd0fybuKX ++ return 0 + [[ stopping == paused ]] + sleep 1 + timeout=45 + echo -n . .+ [[ 45 -gt 1500 ]] ++ kubectl_bin get psmdb some-name -o 'jsonpath={.status.state}' +++ mktemp ++ local LAST_OUT=/tmp/tmp.V8OAOW6Amt +++ mktemp ++ local LAST_ERR=/tmp/tmp.GmYQdvgGBD ++ local exit_status=0 ++ local timeout=4 +++ seq 0 2 ++ for i in '$(seq 0 2)' ++ set +e ++ kubectl get psmdb some-name -o 'jsonpath={.status.state}' ++ exit_status=0 ++ set -e ++ '[' 0 '!=' 0 -a -n 1 ']' ++ break ++ cat /tmp/tmp.V8OAOW6Amt ++ cat /tmp/tmp.GmYQdvgGBD ++ rm /tmp/tmp.V8OAOW6Amt /tmp/tmp.GmYQdvgGBD ++ return 0 + [[ stopping == paused ]] + sleep 1 + timeout=46 + echo -n . .+ [[ 46 -gt 1500 ]] ++ kubectl_bin get psmdb some-name -o 'jsonpath={.status.state}' +++ mktemp ++ local LAST_OUT=/tmp/tmp.oMhzinONfP +++ mktemp ++ local LAST_ERR=/tmp/tmp.2p3yHGndrD ++ local exit_status=0 ++ local timeout=4 +++ seq 0 2 ++ for i in '$(seq 0 2)' ++ set +e ++ kubectl get psmdb some-name -o 'jsonpath={.status.state}' ++ exit_status=0 ++ set -e ++ '[' 0 '!=' 0 -a -n 1 ']' ++ break ++ cat /tmp/tmp.oMhzinONfP ++ cat /tmp/tmp.2p3yHGndrD ++ rm /tmp/tmp.oMhzinONfP /tmp/tmp.2p3yHGndrD ++ return 0 + [[ stopping == paused ]] + sleep 1 + timeout=47 + echo -n . .+ [[ 47 -gt 1500 ]] ++ kubectl_bin get psmdb some-name -o 'jsonpath={.status.state}' +++ mktemp ++ local LAST_OUT=/tmp/tmp.1uBgBwkaic +++ mktemp ++ local LAST_ERR=/tmp/tmp.xX1gj7Lb2s ++ local exit_status=0 ++ local timeout=4 +++ seq 0 2 ++ for i in '$(seq 0 2)' ++ set +e ++ kubectl get psmdb some-name -o 'jsonpath={.status.state}' ++ exit_status=0 ++ set -e ++ '[' 0 '!=' 0 -a -n 1 ']' ++ break ++ cat /tmp/tmp.1uBgBwkaic ++ cat /tmp/tmp.xX1gj7Lb2s ++ rm /tmp/tmp.1uBgBwkaic /tmp/tmp.xX1gj7Lb2s ++ return 0 + [[ stopping == paused ]] + sleep 1 + timeout=48 + echo -n . .+ [[ 48 -gt 1500 ]] ++ kubectl_bin get psmdb some-name -o 'jsonpath={.status.state}' +++ mktemp ++ local LAST_OUT=/tmp/tmp.xM0am18FYn +++ mktemp ++ local LAST_ERR=/tmp/tmp.kau5HCbc65 ++ local exit_status=0 ++ local timeout=4 +++ seq 0 2 ++ for i in '$(seq 0 2)' ++ set +e ++ kubectl get psmdb some-name -o 'jsonpath={.status.state}' ++ exit_status=0 ++ set -e ++ '[' 0 '!=' 0 -a -n 1 ']' ++ break ++ cat /tmp/tmp.xM0am18FYn ++ cat /tmp/tmp.kau5HCbc65 ++ rm /tmp/tmp.xM0am18FYn /tmp/tmp.kau5HCbc65 ++ return 0 + [[ stopping == paused ]] + sleep 1 + timeout=49 + echo -n . .+ [[ 49 -gt 1500 ]] ++ kubectl_bin get psmdb some-name -o 'jsonpath={.status.state}' +++ mktemp ++ local LAST_OUT=/tmp/tmp.qBKpRzi74K +++ mktemp ++ local LAST_ERR=/tmp/tmp.M5pCNQM0XE ++ local exit_status=0 ++ local timeout=4 +++ seq 0 2 ++ for i in '$(seq 0 2)' ++ set +e ++ kubectl get psmdb some-name -o 'jsonpath={.status.state}' ++ exit_status=0 ++ set -e ++ '[' 0 '!=' 0 -a -n 1 ']' ++ break ++ cat /tmp/tmp.qBKpRzi74K ++ cat /tmp/tmp.M5pCNQM0XE ++ rm /tmp/tmp.qBKpRzi74K /tmp/tmp.M5pCNQM0XE ++ return 0 + [[ stopping == paused ]] + sleep 1 + timeout=50 + echo -n . .+ [[ 50 -gt 1500 ]] ++ kubectl_bin get psmdb some-name -o 'jsonpath={.status.state}' +++ mktemp ++ local LAST_OUT=/tmp/tmp.63jJ24aXKa +++ mktemp ++ local LAST_ERR=/tmp/tmp.zwwSzPjZS0 ++ local exit_status=0 ++ local timeout=4 +++ seq 0 2 ++ for i in '$(seq 0 2)' ++ set +e ++ kubectl get psmdb some-name -o 'jsonpath={.status.state}' ++ exit_status=0 ++ set -e ++ '[' 0 '!=' 0 -a -n 1 ']' ++ break ++ cat /tmp/tmp.63jJ24aXKa ++ cat /tmp/tmp.zwwSzPjZS0 ++ rm /tmp/tmp.63jJ24aXKa /tmp/tmp.zwwSzPjZS0 ++ return 0 + [[ stopping == paused ]] + sleep 1 + timeout=51 + echo -n . .+ [[ 51 -gt 1500 ]] ++ kubectl_bin get psmdb some-name -o 'jsonpath={.status.state}' +++ mktemp ++ local LAST_OUT=/tmp/tmp.aWAqilvng8 +++ mktemp ++ local LAST_ERR=/tmp/tmp.5EgxKJ0YFL ++ local exit_status=0 ++ local timeout=4 +++ seq 0 2 ++ for i in '$(seq 0 2)' ++ set +e ++ kubectl get psmdb some-name -o 'jsonpath={.status.state}' ++ exit_status=0 ++ set -e ++ '[' 0 '!=' 0 -a -n 1 ']' ++ break ++ cat /tmp/tmp.aWAqilvng8 ++ cat /tmp/tmp.5EgxKJ0YFL ++ rm /tmp/tmp.aWAqilvng8 /tmp/tmp.5EgxKJ0YFL ++ return 0 + [[ stopping == paused ]] + sleep 1 + timeout=52 + echo -n . .+ [[ 52 -gt 1500 ]] ++ kubectl_bin get psmdb some-name -o 'jsonpath={.status.state}' +++ mktemp ++ local LAST_OUT=/tmp/tmp.C7oVbWV33f +++ mktemp ++ local LAST_ERR=/tmp/tmp.MPuo6TekLd ++ local exit_status=0 ++ local timeout=4 +++ seq 0 2 ++ for i in '$(seq 0 2)' ++ set +e ++ kubectl get psmdb some-name -o 'jsonpath={.status.state}' ++ exit_status=0 ++ set -e ++ '[' 0 '!=' 0 -a -n 1 ']' ++ break ++ cat /tmp/tmp.C7oVbWV33f ++ cat /tmp/tmp.MPuo6TekLd ++ rm /tmp/tmp.C7oVbWV33f /tmp/tmp.MPuo6TekLd ++ return 0 + [[ stopping == paused ]] + sleep 1 + timeout=53 + echo -n . .+ [[ 53 -gt 1500 ]] ++ kubectl_bin get psmdb some-name -o 'jsonpath={.status.state}' +++ mktemp ++ local LAST_OUT=/tmp/tmp.OM0hwEsAFH +++ mktemp ++ local LAST_ERR=/tmp/tmp.e2Z2QigcE5 ++ local exit_status=0 ++ local timeout=4 +++ seq 0 2 ++ for i in '$(seq 0 2)' ++ set +e ++ kubectl get psmdb some-name -o 'jsonpath={.status.state}' ++ exit_status=0 ++ set -e ++ '[' 0 '!=' 0 -a -n 1 ']' ++ break ++ cat /tmp/tmp.OM0hwEsAFH ++ cat /tmp/tmp.e2Z2QigcE5 ++ rm /tmp/tmp.OM0hwEsAFH /tmp/tmp.e2Z2QigcE5 ++ return 0 + [[ stopping == paused ]] + sleep 1 + timeout=54 + echo -n . .+ [[ 54 -gt 1500 ]] ++ kubectl_bin get psmdb some-name -o 'jsonpath={.status.state}' +++ mktemp ++ local LAST_OUT=/tmp/tmp.I6jXARyd56 +++ mktemp ++ local LAST_ERR=/tmp/tmp.VdwG4u6Iyl ++ local exit_status=0 ++ local timeout=4 +++ seq 0 2 ++ for i in '$(seq 0 2)' ++ set +e ++ kubectl get psmdb some-name -o 'jsonpath={.status.state}' ++ exit_status=0 ++ set -e ++ '[' 0 '!=' 0 -a -n 1 ']' ++ break ++ cat /tmp/tmp.I6jXARyd56 ++ cat /tmp/tmp.VdwG4u6Iyl ++ rm /tmp/tmp.I6jXARyd56 /tmp/tmp.VdwG4u6Iyl ++ return 0 + [[ stopping == paused ]] + sleep 1 + timeout=55 + echo -n . .+ [[ 55 -gt 1500 ]] ++ kubectl_bin get psmdb some-name -o 'jsonpath={.status.state}' +++ mktemp ++ local LAST_OUT=/tmp/tmp.oH68CN8Pf5 +++ mktemp ++ local LAST_ERR=/tmp/tmp.adM5XWcgDl ++ local exit_status=0 ++ local timeout=4 +++ seq 0 2 ++ for i in '$(seq 0 2)' ++ set +e ++ kubectl get psmdb some-name -o 'jsonpath={.status.state}' ++ exit_status=0 ++ set -e ++ '[' 0 '!=' 0 -a -n 1 ']' ++ break ++ cat /tmp/tmp.oH68CN8Pf5 ++ cat /tmp/tmp.adM5XWcgDl ++ rm /tmp/tmp.oH68CN8Pf5 /tmp/tmp.adM5XWcgDl ++ return 0 + [[ stopping == paused ]] + sleep 1 + timeout=56 + echo -n . .+ [[ 56 -gt 1500 ]] ++ kubectl_bin get psmdb some-name -o 'jsonpath={.status.state}' +++ mktemp ++ local LAST_OUT=/tmp/tmp.FMjowuQTki +++ mktemp ++ local LAST_ERR=/tmp/tmp.g3iMjyOi6d ++ local exit_status=0 ++ local timeout=4 +++ seq 0 2 ++ for i in '$(seq 0 2)' ++ set +e ++ kubectl get psmdb some-name -o 'jsonpath={.status.state}' ++ exit_status=0 ++ set -e ++ '[' 0 '!=' 0 -a -n 1 ']' ++ break ++ cat /tmp/tmp.FMjowuQTki ++ cat /tmp/tmp.g3iMjyOi6d ++ rm /tmp/tmp.FMjowuQTki /tmp/tmp.g3iMjyOi6d ++ return 0 + [[ stopping == paused ]] + sleep 1 + timeout=57 + echo -n . .+ [[ 57 -gt 1500 ]] ++ kubectl_bin get psmdb some-name -o 'jsonpath={.status.state}' +++ mktemp ++ local LAST_OUT=/tmp/tmp.ZmV4fJUaKY +++ mktemp ++ local LAST_ERR=/tmp/tmp.1wBPllFdzP ++ local exit_status=0 ++ local timeout=4 +++ seq 0 2 ++ for i in '$(seq 0 2)' ++ set +e ++ kubectl get psmdb some-name -o 'jsonpath={.status.state}' ++ exit_status=0 ++ set -e ++ '[' 0 '!=' 0 -a -n 1 ']' ++ break ++ cat /tmp/tmp.ZmV4fJUaKY ++ cat /tmp/tmp.1wBPllFdzP ++ rm /tmp/tmp.ZmV4fJUaKY /tmp/tmp.1wBPllFdzP ++ return 0 + [[ stopping == paused ]] + sleep 1 + timeout=58 + echo -n . .+ [[ 58 -gt 1500 ]] ++ kubectl_bin get psmdb some-name -o 'jsonpath={.status.state}' +++ mktemp ++ local LAST_OUT=/tmp/tmp.Z2pz0T3Uxg +++ mktemp ++ local LAST_ERR=/tmp/tmp.n27rmQzd3A ++ local exit_status=0 ++ local timeout=4 +++ seq 0 2 ++ for i in '$(seq 0 2)' ++ set +e ++ kubectl get psmdb some-name -o 'jsonpath={.status.state}' ++ exit_status=0 ++ set -e ++ '[' 0 '!=' 0 -a -n 1 ']' ++ break ++ cat /tmp/tmp.Z2pz0T3Uxg ++ cat /tmp/tmp.n27rmQzd3A ++ rm /tmp/tmp.Z2pz0T3Uxg /tmp/tmp.n27rmQzd3A ++ return 0 + [[ stopping == paused ]] + sleep 1 + timeout=59 + echo -n . .+ [[ 59 -gt 1500 ]] ++ kubectl_bin get psmdb some-name -o 'jsonpath={.status.state}' +++ mktemp ++ local LAST_OUT=/tmp/tmp.yTXHw52mn3 +++ mktemp ++ local LAST_ERR=/tmp/tmp.bTbkzJTZPR ++ local exit_status=0 ++ local timeout=4 +++ seq 0 2 ++ for i in '$(seq 0 2)' ++ set +e ++ kubectl get psmdb some-name -o 'jsonpath={.status.state}' ++ exit_status=0 ++ set -e ++ '[' 0 '!=' 0 -a -n 1 ']' ++ break ++ cat /tmp/tmp.yTXHw52mn3 ++ cat /tmp/tmp.bTbkzJTZPR ++ rm /tmp/tmp.yTXHw52mn3 /tmp/tmp.bTbkzJTZPR ++ return 0 + [[ stopping == paused ]] + sleep 1 + timeout=60 + echo -n . .+ [[ 60 -gt 1500 ]] ++ kubectl_bin get psmdb some-name -o 'jsonpath={.status.state}' +++ mktemp ++ local LAST_OUT=/tmp/tmp.vvuf7jYdMK +++ mktemp ++ local LAST_ERR=/tmp/tmp.zwXqaqGrGo ++ local exit_status=0 ++ local timeout=4 +++ seq 0 2 ++ for i in '$(seq 0 2)' ++ set +e ++ kubectl get psmdb some-name -o 'jsonpath={.status.state}' ++ exit_status=0 ++ set -e ++ '[' 0 '!=' 0 -a -n 1 ']' ++ break ++ cat /tmp/tmp.vvuf7jYdMK ++ cat /tmp/tmp.zwXqaqGrGo ++ rm /tmp/tmp.vvuf7jYdMK /tmp/tmp.zwXqaqGrGo ++ return 0 + [[ stopping == paused ]] + sleep 1 + timeout=61 + echo -n . .+ [[ 61 -gt 1500 ]] ++ kubectl_bin get psmdb some-name -o 'jsonpath={.status.state}' +++ mktemp ++ local LAST_OUT=/tmp/tmp.ho7Jr9zaIO +++ mktemp ++ local LAST_ERR=/tmp/tmp.qd1JRmqPPg ++ local exit_status=0 ++ local timeout=4 +++ seq 0 2 ++ for i in '$(seq 0 2)' ++ set +e ++ kubectl get psmdb some-name -o 'jsonpath={.status.state}' ++ exit_status=0 ++ set -e ++ '[' 0 '!=' 0 -a -n 1 ']' ++ break ++ cat /tmp/tmp.ho7Jr9zaIO ++ cat /tmp/tmp.qd1JRmqPPg ++ rm /tmp/tmp.ho7Jr9zaIO /tmp/tmp.qd1JRmqPPg ++ return 0 + [[ stopping == paused ]] + sleep 1 + timeout=62 + echo -n . .+ [[ 62 -gt 1500 ]] ++ kubectl_bin get psmdb some-name -o 'jsonpath={.status.state}' +++ mktemp ++ local LAST_OUT=/tmp/tmp.Y4gIraSmGt +++ mktemp ++ local LAST_ERR=/tmp/tmp.HyPpxUMw1f ++ local exit_status=0 ++ local timeout=4 +++ seq 0 2 ++ for i in '$(seq 0 2)' ++ set +e ++ kubectl get psmdb some-name -o 'jsonpath={.status.state}' ++ exit_status=0 ++ set -e ++ '[' 0 '!=' 0 -a -n 1 ']' ++ break ++ cat /tmp/tmp.Y4gIraSmGt ++ cat /tmp/tmp.HyPpxUMw1f ++ rm /tmp/tmp.Y4gIraSmGt /tmp/tmp.HyPpxUMw1f ++ return 0 + [[ stopping == paused ]] + sleep 1 + timeout=63 + echo -n . .+ [[ 63 -gt 1500 ]] ++ kubectl_bin get psmdb some-name -o 'jsonpath={.status.state}' +++ mktemp ++ local LAST_OUT=/tmp/tmp.1iWuCBe8fe +++ mktemp ++ local LAST_ERR=/tmp/tmp.37wAsigroy ++ local exit_status=0 ++ local timeout=4 +++ seq 0 2 ++ for i in '$(seq 0 2)' ++ set +e ++ kubectl get psmdb some-name -o 'jsonpath={.status.state}' ++ exit_status=0 ++ set -e ++ '[' 0 '!=' 0 -a -n 1 ']' ++ break ++ cat /tmp/tmp.1iWuCBe8fe ++ cat /tmp/tmp.37wAsigroy ++ rm /tmp/tmp.1iWuCBe8fe /tmp/tmp.37wAsigroy ++ return 0 + [[ stopping == paused ]] + sleep 1 + timeout=64 + echo -n . .+ [[ 64 -gt 1500 ]] ++ kubectl_bin get psmdb some-name -o 'jsonpath={.status.state}' +++ mktemp ++ local LAST_OUT=/tmp/tmp.tdlO0jLMko +++ mktemp ++ local LAST_ERR=/tmp/tmp.6dAJqu7y4k ++ local exit_status=0 ++ local timeout=4 +++ seq 0 2 ++ for i in '$(seq 0 2)' ++ set +e ++ kubectl get psmdb some-name -o 'jsonpath={.status.state}' ++ exit_status=0 ++ set -e ++ '[' 0 '!=' 0 -a -n 1 ']' ++ break ++ cat /tmp/tmp.tdlO0jLMko ++ cat /tmp/tmp.6dAJqu7y4k ++ rm /tmp/tmp.tdlO0jLMko /tmp/tmp.6dAJqu7y4k ++ return 0 + [[ stopping == paused ]] + sleep 1 + timeout=65 + echo -n . .+ [[ 65 -gt 1500 ]] ++ kubectl_bin get psmdb some-name -o 'jsonpath={.status.state}' +++ mktemp ++ local LAST_OUT=/tmp/tmp.RQO0waZmi1 +++ mktemp ++ local LAST_ERR=/tmp/tmp.hVN0G0uf4q ++ local exit_status=0 ++ local timeout=4 +++ seq 0 2 ++ for i in '$(seq 0 2)' ++ set +e ++ kubectl get psmdb some-name -o 'jsonpath={.status.state}' ++ exit_status=0 ++ set -e ++ '[' 0 '!=' 0 -a -n 1 ']' ++ break ++ cat /tmp/tmp.RQO0waZmi1 ++ cat /tmp/tmp.hVN0G0uf4q ++ rm /tmp/tmp.RQO0waZmi1 /tmp/tmp.hVN0G0uf4q ++ return 0 + [[ stopping == paused ]] + sleep 1 + timeout=66 + echo -n . .+ [[ 66 -gt 1500 ]] ++ kubectl_bin get psmdb some-name -o 'jsonpath={.status.state}' +++ mktemp ++ local LAST_OUT=/tmp/tmp.8heBVheQHP +++ mktemp ++ local LAST_ERR=/tmp/tmp.zVlxXZLp0L ++ local exit_status=0 ++ local timeout=4 +++ seq 0 2 ++ for i in '$(seq 0 2)' ++ set +e ++ kubectl get psmdb some-name -o 'jsonpath={.status.state}' ++ exit_status=0 ++ set -e ++ '[' 0 '!=' 0 -a -n 1 ']' ++ break ++ cat /tmp/tmp.8heBVheQHP ++ cat /tmp/tmp.zVlxXZLp0L ++ rm /tmp/tmp.8heBVheQHP /tmp/tmp.zVlxXZLp0L ++ return 0 + [[ stopping == paused ]] + sleep 1 + timeout=67 + echo -n . .+ [[ 67 -gt 1500 ]] ++ kubectl_bin get psmdb some-name -o 'jsonpath={.status.state}' +++ mktemp ++ local LAST_OUT=/tmp/tmp.QLyaSPH8jO +++ mktemp ++ local LAST_ERR=/tmp/tmp.l9jq10aAqL ++ local exit_status=0 ++ local timeout=4 +++ seq 0 2 ++ for i in '$(seq 0 2)' ++ set +e ++ kubectl get psmdb some-name -o 'jsonpath={.status.state}' ++ exit_status=0 ++ set -e ++ '[' 0 '!=' 0 -a -n 1 ']' ++ break ++ cat /tmp/tmp.QLyaSPH8jO ++ cat /tmp/tmp.l9jq10aAqL ++ rm /tmp/tmp.QLyaSPH8jO /tmp/tmp.l9jq10aAqL ++ return 0 + [[ stopping == paused ]] + sleep 1 + timeout=68 + echo -n . .+ [[ 68 -gt 1500 ]] ++ kubectl_bin get psmdb some-name -o 'jsonpath={.status.state}' +++ mktemp ++ local LAST_OUT=/tmp/tmp.ajRlbXExyQ +++ mktemp ++ local LAST_ERR=/tmp/tmp.hPFj8iObTZ ++ local exit_status=0 ++ local timeout=4 +++ seq 0 2 ++ for i in '$(seq 0 2)' ++ set +e ++ kubectl get psmdb some-name -o 'jsonpath={.status.state}' ++ exit_status=0 ++ set -e ++ '[' 0 '!=' 0 -a -n 1 ']' ++ break ++ cat /tmp/tmp.ajRlbXExyQ ++ cat /tmp/tmp.hPFj8iObTZ ++ rm /tmp/tmp.ajRlbXExyQ /tmp/tmp.hPFj8iObTZ ++ return 0 + [[ stopping == paused ]] + sleep 1 + timeout=69 + echo -n . .+ [[ 69 -gt 1500 ]] ++ kubectl_bin get psmdb some-name -o 'jsonpath={.status.state}' +++ mktemp ++ local LAST_OUT=/tmp/tmp.VF1uz1JwDD +++ mktemp ++ local LAST_ERR=/tmp/tmp.6tHJ5c5cUv ++ local exit_status=0 ++ local timeout=4 +++ seq 0 2 ++ for i in '$(seq 0 2)' ++ set +e ++ kubectl get psmdb some-name -o 'jsonpath={.status.state}' ++ exit_status=0 ++ set -e ++ '[' 0 '!=' 0 -a -n 1 ']' ++ break ++ cat /tmp/tmp.VF1uz1JwDD ++ cat /tmp/tmp.6tHJ5c5cUv ++ rm /tmp/tmp.VF1uz1JwDD /tmp/tmp.6tHJ5c5cUv ++ return 0 + [[ stopping == paused ]] + sleep 1 + timeout=70 + echo -n . .+ [[ 70 -gt 1500 ]] ++ kubectl_bin get psmdb some-name -o 'jsonpath={.status.state}' +++ mktemp ++ local LAST_OUT=/tmp/tmp.AvBoS50vKl +++ mktemp ++ local LAST_ERR=/tmp/tmp.qw9rZ8g3Ze ++ local exit_status=0 ++ local timeout=4 +++ seq 0 2 ++ for i in '$(seq 0 2)' ++ set +e ++ kubectl get psmdb some-name -o 'jsonpath={.status.state}' ++ exit_status=0 ++ set -e ++ '[' 0 '!=' 0 -a -n 1 ']' ++ break ++ cat /tmp/tmp.AvBoS50vKl ++ cat /tmp/tmp.qw9rZ8g3Ze ++ rm /tmp/tmp.AvBoS50vKl /tmp/tmp.qw9rZ8g3Ze ++ return 0 + [[ stopping == paused ]] + sleep 1 + timeout=71 + echo -n . .+ [[ 71 -gt 1500 ]] ++ kubectl_bin get psmdb some-name -o 'jsonpath={.status.state}' +++ mktemp ++ local LAST_OUT=/tmp/tmp.kasY7z9rKs +++ mktemp ++ local LAST_ERR=/tmp/tmp.D7oheLjqXI ++ local exit_status=0 ++ local timeout=4 +++ seq 0 2 ++ for i in '$(seq 0 2)' ++ set +e ++ kubectl get psmdb some-name -o 'jsonpath={.status.state}' ++ exit_status=0 ++ set -e ++ '[' 0 '!=' 0 -a -n 1 ']' ++ break ++ cat /tmp/tmp.kasY7z9rKs ++ cat /tmp/tmp.D7oheLjqXI ++ rm /tmp/tmp.kasY7z9rKs /tmp/tmp.D7oheLjqXI ++ return 0 + [[ stopping == paused ]] + sleep 1 + timeout=72 + echo -n . .+ [[ 72 -gt 1500 ]] ++ kubectl_bin get psmdb some-name -o 'jsonpath={.status.state}' +++ mktemp ++ local LAST_OUT=/tmp/tmp.Xp48LCcppI +++ mktemp ++ local LAST_ERR=/tmp/tmp.2bJalHZifV ++ local exit_status=0 ++ local timeout=4 +++ seq 0 2 ++ for i in '$(seq 0 2)' ++ set +e ++ kubectl get psmdb some-name -o 'jsonpath={.status.state}' ++ exit_status=0 ++ set -e ++ '[' 0 '!=' 0 -a -n 1 ']' ++ break ++ cat /tmp/tmp.Xp48LCcppI ++ cat /tmp/tmp.2bJalHZifV ++ rm /tmp/tmp.Xp48LCcppI /tmp/tmp.2bJalHZifV ++ return 0 + [[ stopping == paused ]] + sleep 1 + timeout=73 + echo -n . .+ [[ 73 -gt 1500 ]] ++ kubectl_bin get psmdb some-name -o 'jsonpath={.status.state}' +++ mktemp ++ local LAST_OUT=/tmp/tmp.FdML7WPIsU +++ mktemp ++ local LAST_ERR=/tmp/tmp.5CpdijGJs6 ++ local exit_status=0 ++ local timeout=4 +++ seq 0 2 ++ for i in '$(seq 0 2)' ++ set +e ++ kubectl get psmdb some-name -o 'jsonpath={.status.state}' ++ exit_status=0 ++ set -e ++ '[' 0 '!=' 0 -a -n 1 ']' ++ break ++ cat /tmp/tmp.FdML7WPIsU ++ cat /tmp/tmp.5CpdijGJs6 ++ rm /tmp/tmp.FdML7WPIsU /tmp/tmp.5CpdijGJs6 ++ return 0 + [[ stopping == paused ]] + sleep 1 + timeout=74 + echo -n . .+ [[ 74 -gt 1500 ]] ++ kubectl_bin get psmdb some-name -o 'jsonpath={.status.state}' +++ mktemp ++ local LAST_OUT=/tmp/tmp.302ZfE3lz9 +++ mktemp ++ local LAST_ERR=/tmp/tmp.iAKaVYZBxy ++ local exit_status=0 ++ local timeout=4 +++ seq 0 2 ++ for i in '$(seq 0 2)' ++ set +e ++ kubectl get psmdb some-name -o 'jsonpath={.status.state}' ++ exit_status=0 ++ set -e ++ '[' 0 '!=' 0 -a -n 1 ']' ++ break ++ cat /tmp/tmp.302ZfE3lz9 ++ cat /tmp/tmp.iAKaVYZBxy ++ rm /tmp/tmp.302ZfE3lz9 /tmp/tmp.iAKaVYZBxy ++ return 0 + [[ stopping == paused ]] + sleep 1 + timeout=75 + echo -n . .+ [[ 75 -gt 1500 ]] ++ kubectl_bin get psmdb some-name -o 'jsonpath={.status.state}' +++ mktemp ++ local LAST_OUT=/tmp/tmp.9JXNCeqEVh +++ mktemp ++ local LAST_ERR=/tmp/tmp.TvJDhn94K1 ++ local exit_status=0 ++ local timeout=4 +++ seq 0 2 ++ for i in '$(seq 0 2)' ++ set +e ++ kubectl get psmdb some-name -o 'jsonpath={.status.state}' ++ exit_status=0 ++ set -e ++ '[' 0 '!=' 0 -a -n 1 ']' ++ break ++ cat /tmp/tmp.9JXNCeqEVh ++ cat /tmp/tmp.TvJDhn94K1 ++ rm /tmp/tmp.9JXNCeqEVh /tmp/tmp.TvJDhn94K1 ++ return 0 + [[ paused == paused ]] + echo + disable_tls some-name + local cluster_name=some-name + echo 'Disabling TLS for cluster some-name' Disabling TLS for cluster some-name + kubectl_bin patch psmdb some-name --type merge '-p={"spec": { "unsafeFlags": { "tls": true }, "tls": { "mode": "disabled" } } }' ++ mktemp + local LAST_OUT=/tmp/tmp.RYaoHD5CDm ++ mktemp + local LAST_ERR=/tmp/tmp.LVZG7QwLE4 + local exit_status=0 + local timeout=4 ++ seq 0 2 + for i in '$(seq 0 2)' + set +e + kubectl patch psmdb some-name --type merge '-p={"spec": { "unsafeFlags": { "tls": true }, "tls": { "mode": "disabled" } } }' + exit_status=0 + set -e + '[' 0 '!=' 0 -a -n 1 ']' + break + cat /tmp/tmp.RYaoHD5CDm perconaservermongodb.psmdb.percona.com/some-name patched + cat /tmp/tmp.LVZG7QwLE4 + rm /tmp/tmp.RYaoHD5CDm /tmp/tmp.LVZG7QwLE4 + return 0 + unpause_cluster some-name + local cluster_name=some-name + echo 'Unpausing cluster some-name' Unpausing cluster some-name + kubectl_bin patch psmdb some-name --type merge '-p={"spec": { "pause": false } }' ++ mktemp + local LAST_OUT=/tmp/tmp.9yoBNrYJSU ++ mktemp + local LAST_ERR=/tmp/tmp.bbQXksuEgJ + local exit_status=0 + local timeout=4 ++ seq 0 2 + for i in '$(seq 0 2)' + set +e + kubectl patch psmdb some-name --type merge '-p={"spec": { "pause": false } }' + exit_status=0 + set -e + '[' 0 '!=' 0 -a -n 1 ']' + break + cat /tmp/tmp.9yoBNrYJSU perconaservermongodb.psmdb.percona.com/some-name patched + cat /tmp/tmp.bbQXksuEgJ + rm /tmp/tmp.9yoBNrYJSU /tmp/tmp.bbQXksuEgJ + return 0 + wait_for_cluster_state some-name ready + local cluster_name=some-name + local target_state=ready + echo -n 'Waiting for cluster to reach ready state' Waiting for cluster to reach ready state+ local timeout=0 ++ kubectl_bin get psmdb some-name -o 'jsonpath={.status.state}' +++ mktemp ++ local LAST_OUT=/tmp/tmp.gFZGhB1m7E +++ mktemp ++ local LAST_ERR=/tmp/tmp.xO2oxnJgow ++ local exit_status=0 ++ local timeout=4 +++ seq 0 2 ++ for i in '$(seq 0 2)' ++ set +e ++ kubectl get psmdb some-name -o 'jsonpath={.status.state}' ++ exit_status=0 ++ set -e ++ '[' 0 '!=' 0 -a -n 1 ']' ++ break ++ cat /tmp/tmp.gFZGhB1m7E ++ cat /tmp/tmp.xO2oxnJgow ++ rm /tmp/tmp.gFZGhB1m7E /tmp/tmp.xO2oxnJgow ++ return 0 + [[ paused == ready ]] + sleep 1 + timeout=1 + echo -n . .+ [[ 1 -gt 1500 ]] ++ kubectl_bin get psmdb some-name -o 'jsonpath={.status.state}' +++ mktemp ++ local LAST_OUT=/tmp/tmp.ExDlaBF0P6 +++ mktemp ++ local LAST_ERR=/tmp/tmp.BBgAAvTMne ++ local exit_status=0 ++ local timeout=4 +++ seq 0 2 ++ for i in '$(seq 0 2)' ++ set +e ++ kubectl get psmdb some-name -o 'jsonpath={.status.state}' ++ exit_status=0 ++ set -e ++ '[' 0 '!=' 0 -a -n 1 ']' ++ break ++ cat /tmp/tmp.ExDlaBF0P6 ++ cat /tmp/tmp.BBgAAvTMne ++ rm /tmp/tmp.ExDlaBF0P6 /tmp/tmp.BBgAAvTMne ++ return 0 + [[ initializing == ready ]] + sleep 1 + timeout=2 + echo -n . .+ [[ 2 -gt 1500 ]] ++ kubectl_bin get psmdb some-name -o 'jsonpath={.status.state}' +++ mktemp ++ local LAST_OUT=/tmp/tmp.F7MrBg1LPV +++ mktemp ++ local LAST_ERR=/tmp/tmp.ryWxUndNMJ ++ local exit_status=0 ++ local timeout=4 +++ seq 0 2 ++ for i in '$(seq 0 2)' ++ set +e ++ kubectl get psmdb some-name -o 'jsonpath={.status.state}' ++ exit_status=0 ++ set -e ++ '[' 0 '!=' 0 -a -n 1 ']' ++ break ++ cat /tmp/tmp.F7MrBg1LPV ++ cat /tmp/tmp.ryWxUndNMJ ++ rm /tmp/tmp.F7MrBg1LPV /tmp/tmp.ryWxUndNMJ ++ return 0 + [[ initializing == ready ]] + sleep 1 + timeout=3 + echo -n . .+ [[ 3 -gt 1500 ]] ++ kubectl_bin get psmdb some-name -o 'jsonpath={.status.state}' +++ mktemp ++ local LAST_OUT=/tmp/tmp.DOYvx1THB2 +++ mktemp ++ local LAST_ERR=/tmp/tmp.qb0mCysGe7 ++ local exit_status=0 ++ local timeout=4 +++ seq 0 2 ++ for i in '$(seq 0 2)' ++ set +e ++ kubectl get psmdb some-name -o 'jsonpath={.status.state}' ++ exit_status=0 ++ set -e ++ '[' 0 '!=' 0 -a -n 1 ']' ++ break ++ cat /tmp/tmp.DOYvx1THB2 ++ cat /tmp/tmp.qb0mCysGe7 ++ rm /tmp/tmp.DOYvx1THB2 /tmp/tmp.qb0mCysGe7 ++ return 0 + [[ initializing == ready ]] + sleep 1 + timeout=4 + echo -n . .+ [[ 4 -gt 1500 ]] ++ kubectl_bin get psmdb some-name -o 'jsonpath={.status.state}' +++ mktemp ++ local LAST_OUT=/tmp/tmp.z57LXnF4AI +++ mktemp ++ local LAST_ERR=/tmp/tmp.cD3cr63jqE ++ local exit_status=0 ++ local timeout=4 +++ seq 0 2 ++ for i in '$(seq 0 2)' ++ set +e ++ kubectl get psmdb some-name -o 'jsonpath={.status.state}' ++ exit_status=0 ++ set -e ++ '[' 0 '!=' 0 -a -n 1 ']' ++ break ++ cat /tmp/tmp.z57LXnF4AI ++ cat /tmp/tmp.cD3cr63jqE ++ rm /tmp/tmp.z57LXnF4AI /tmp/tmp.cD3cr63jqE ++ return 0 + [[ initializing == ready ]] + sleep 1 + timeout=5 + echo -n . .+ [[ 5 -gt 1500 ]] ++ kubectl_bin get psmdb some-name -o 'jsonpath={.status.state}' +++ mktemp ++ local LAST_OUT=/tmp/tmp.GWTofUkkES +++ mktemp ++ local LAST_ERR=/tmp/tmp.ltXROiVTqN ++ local exit_status=0 ++ local timeout=4 +++ seq 0 2 ++ for i in '$(seq 0 2)' ++ set +e ++ kubectl get psmdb some-name -o 'jsonpath={.status.state}' ++ exit_status=0 ++ set -e ++ '[' 0 '!=' 0 -a -n 1 ']' ++ break ++ cat /tmp/tmp.GWTofUkkES ++ cat /tmp/tmp.ltXROiVTqN ++ rm /tmp/tmp.GWTofUkkES /tmp/tmp.ltXROiVTqN ++ return 0 + [[ initializing == ready ]] + sleep 1 + timeout=6 + echo -n . .+ [[ 6 -gt 1500 ]] ++ kubectl_bin get psmdb some-name -o 'jsonpath={.status.state}' +++ mktemp ++ local LAST_OUT=/tmp/tmp.CMGKr6qeeW +++ mktemp ++ local LAST_ERR=/tmp/tmp.URXHP7PrgZ ++ local exit_status=0 ++ local timeout=4 +++ seq 0 2 ++ for i in '$(seq 0 2)' ++ set +e ++ kubectl get psmdb some-name -o 'jsonpath={.status.state}' ++ exit_status=0 ++ set -e ++ '[' 0 '!=' 0 -a -n 1 ']' ++ break ++ cat /tmp/tmp.CMGKr6qeeW ++ cat /tmp/tmp.URXHP7PrgZ ++ rm /tmp/tmp.CMGKr6qeeW /tmp/tmp.URXHP7PrgZ ++ return 0 + [[ initializing == ready ]] + sleep 1 + timeout=7 + echo -n . .+ [[ 7 -gt 1500 ]] ++ kubectl_bin get psmdb some-name -o 'jsonpath={.status.state}' +++ mktemp ++ local LAST_OUT=/tmp/tmp.MFt4D5FnX7 +++ mktemp ++ local LAST_ERR=/tmp/tmp.ZrnmcZw1Te ++ local exit_status=0 ++ local timeout=4 +++ seq 0 2 ++ for i in '$(seq 0 2)' ++ set +e ++ kubectl get psmdb some-name -o 'jsonpath={.status.state}' ++ exit_status=0 ++ set -e ++ '[' 0 '!=' 0 -a -n 1 ']' ++ break ++ cat /tmp/tmp.MFt4D5FnX7 ++ cat /tmp/tmp.ZrnmcZw1Te ++ rm /tmp/tmp.MFt4D5FnX7 /tmp/tmp.ZrnmcZw1Te ++ return 0 + [[ initializing == ready ]] + sleep 1 + timeout=8 + echo -n . .+ [[ 8 -gt 1500 ]] ++ kubectl_bin get psmdb some-name -o 'jsonpath={.status.state}' +++ mktemp ++ local LAST_OUT=/tmp/tmp.5tuvMOE4Iw +++ mktemp ++ local LAST_ERR=/tmp/tmp.Z4NtLPsRdE ++ local exit_status=0 ++ local timeout=4 +++ seq 0 2 ++ for i in '$(seq 0 2)' ++ set +e ++ kubectl get psmdb some-name -o 'jsonpath={.status.state}' ++ exit_status=0 ++ set -e ++ '[' 0 '!=' 0 -a -n 1 ']' ++ break ++ cat /tmp/tmp.5tuvMOE4Iw ++ cat /tmp/tmp.Z4NtLPsRdE ++ rm /tmp/tmp.5tuvMOE4Iw /tmp/tmp.Z4NtLPsRdE ++ return 0 + [[ initializing == ready ]] + sleep 1 + timeout=9 + echo -n . .+ [[ 9 -gt 1500 ]] ++ kubectl_bin get psmdb some-name -o 'jsonpath={.status.state}' +++ mktemp ++ local LAST_OUT=/tmp/tmp.k4BICM98Qs +++ mktemp ++ local LAST_ERR=/tmp/tmp.GgNzu4k3Lx ++ local exit_status=0 ++ local timeout=4 +++ seq 0 2 ++ for i in '$(seq 0 2)' ++ set +e ++ kubectl get psmdb some-name -o 'jsonpath={.status.state}' ++ exit_status=0 ++ set -e ++ '[' 0 '!=' 0 -a -n 1 ']' ++ break ++ cat /tmp/tmp.k4BICM98Qs ++ cat /tmp/tmp.GgNzu4k3Lx ++ rm /tmp/tmp.k4BICM98Qs /tmp/tmp.GgNzu4k3Lx ++ return 0 + [[ initializing == ready ]] + sleep 1 + timeout=10 + echo -n . .+ [[ 10 -gt 1500 ]] ++ kubectl_bin get psmdb some-name -o 'jsonpath={.status.state}' +++ mktemp ++ local LAST_OUT=/tmp/tmp.Eo3sVp3XXs +++ mktemp ++ local LAST_ERR=/tmp/tmp.QEdaN7gkkY ++ local exit_status=0 ++ local timeout=4 +++ seq 0 2 ++ for i in '$(seq 0 2)' ++ set +e ++ kubectl get psmdb some-name -o 'jsonpath={.status.state}' ++ exit_status=0 ++ set -e ++ '[' 0 '!=' 0 -a -n 1 ']' ++ break ++ cat /tmp/tmp.Eo3sVp3XXs ++ cat /tmp/tmp.QEdaN7gkkY ++ rm /tmp/tmp.Eo3sVp3XXs /tmp/tmp.QEdaN7gkkY ++ return 0 + [[ initializing == ready ]] + sleep 1 + timeout=11 + echo -n . .+ [[ 11 -gt 1500 ]] ++ kubectl_bin get psmdb some-name -o 'jsonpath={.status.state}' +++ mktemp ++ local LAST_OUT=/tmp/tmp.mSCSDfyiEw +++ mktemp ++ local LAST_ERR=/tmp/tmp.P6UPSDvR5d ++ local exit_status=0 ++ local timeout=4 +++ seq 0 2 ++ for i in '$(seq 0 2)' ++ set +e ++ kubectl get psmdb some-name -o 'jsonpath={.status.state}' ++ exit_status=0 ++ set -e ++ '[' 0 '!=' 0 -a -n 1 ']' ++ break ++ cat /tmp/tmp.mSCSDfyiEw ++ cat /tmp/tmp.P6UPSDvR5d ++ rm /tmp/tmp.mSCSDfyiEw /tmp/tmp.P6UPSDvR5d ++ return 0 + [[ initializing == ready ]] + sleep 1 + timeout=12 + echo -n . .+ [[ 12 -gt 1500 ]] ++ kubectl_bin get psmdb some-name -o 'jsonpath={.status.state}' +++ mktemp ++ local LAST_OUT=/tmp/tmp.GV6rrvEhLB +++ mktemp ++ local LAST_ERR=/tmp/tmp.Ad1AeTjB4N ++ local exit_status=0 ++ local timeout=4 +++ seq 0 2 ++ for i in '$(seq 0 2)' ++ set +e ++ kubectl get psmdb some-name -o 'jsonpath={.status.state}' ++ exit_status=0 ++ set -e ++ '[' 0 '!=' 0 -a -n 1 ']' ++ break ++ cat /tmp/tmp.GV6rrvEhLB ++ cat /tmp/tmp.Ad1AeTjB4N ++ rm /tmp/tmp.GV6rrvEhLB /tmp/tmp.Ad1AeTjB4N ++ return 0 + [[ initializing == ready ]] + sleep 1 + timeout=13 + echo -n . .+ [[ 13 -gt 1500 ]] ++ kubectl_bin get psmdb some-name -o 'jsonpath={.status.state}' +++ mktemp ++ local LAST_OUT=/tmp/tmp.afXeRosfSm +++ mktemp ++ local LAST_ERR=/tmp/tmp.z2sC5uGwME ++ local exit_status=0 ++ local timeout=4 +++ seq 0 2 ++ for i in '$(seq 0 2)' ++ set +e ++ kubectl get psmdb some-name -o 'jsonpath={.status.state}' ++ exit_status=0 ++ set -e ++ '[' 0 '!=' 0 -a -n 1 ']' ++ break ++ cat /tmp/tmp.afXeRosfSm ++ cat /tmp/tmp.z2sC5uGwME ++ rm /tmp/tmp.afXeRosfSm /tmp/tmp.z2sC5uGwME ++ return 0 + [[ initializing == ready ]] + sleep 1 + timeout=14 + echo -n . .+ [[ 14 -gt 1500 ]] ++ kubectl_bin get psmdb some-name -o 'jsonpath={.status.state}' +++ mktemp ++ local LAST_OUT=/tmp/tmp.nEimiVBNwt +++ mktemp ++ local LAST_ERR=/tmp/tmp.NydCJDeFpA ++ local exit_status=0 ++ local timeout=4 +++ seq 0 2 ++ for i in '$(seq 0 2)' ++ set +e ++ kubectl get psmdb some-name -o 'jsonpath={.status.state}' ++ exit_status=0 ++ set -e ++ '[' 0 '!=' 0 -a -n 1 ']' ++ break ++ cat /tmp/tmp.nEimiVBNwt ++ cat /tmp/tmp.NydCJDeFpA ++ rm /tmp/tmp.nEimiVBNwt /tmp/tmp.NydCJDeFpA ++ return 0 + [[ initializing == ready ]] + sleep 1 + timeout=15 + echo -n . .+ [[ 15 -gt 1500 ]] ++ kubectl_bin get psmdb some-name -o 'jsonpath={.status.state}' +++ mktemp ++ local LAST_OUT=/tmp/tmp.wQWy4CUyXH +++ mktemp ++ local LAST_ERR=/tmp/tmp.YvD4J4SHr1 ++ local exit_status=0 ++ local timeout=4 +++ seq 0 2 ++ for i in '$(seq 0 2)' ++ set +e ++ kubectl get psmdb some-name -o 'jsonpath={.status.state}' ++ exit_status=0 ++ set -e ++ '[' 0 '!=' 0 -a -n 1 ']' ++ break ++ cat /tmp/tmp.wQWy4CUyXH ++ cat /tmp/tmp.YvD4J4SHr1 ++ rm /tmp/tmp.wQWy4CUyXH /tmp/tmp.YvD4J4SHr1 ++ return 0 + [[ initializing == ready ]] + sleep 1 + timeout=16 + echo -n . .+ [[ 16 -gt 1500 ]] ++ kubectl_bin get psmdb some-name -o 'jsonpath={.status.state}' +++ mktemp ++ local LAST_OUT=/tmp/tmp.bUpWNwc9Ax +++ mktemp ++ local LAST_ERR=/tmp/tmp.YQI0EuOmao ++ local exit_status=0 ++ local timeout=4 +++ seq 0 2 ++ for i in '$(seq 0 2)' ++ set +e ++ kubectl get psmdb some-name -o 'jsonpath={.status.state}' ++ exit_status=0 ++ set -e ++ '[' 0 '!=' 0 -a -n 1 ']' ++ break ++ cat /tmp/tmp.bUpWNwc9Ax ++ cat /tmp/tmp.YQI0EuOmao ++ rm /tmp/tmp.bUpWNwc9Ax /tmp/tmp.YQI0EuOmao ++ return 0 + [[ initializing == ready ]] + sleep 1 + timeout=17 + echo -n . .+ [[ 17 -gt 1500 ]] ++ kubectl_bin get psmdb some-name -o 'jsonpath={.status.state}' +++ mktemp ++ local LAST_OUT=/tmp/tmp.wJgQt4DZDq +++ mktemp ++ local LAST_ERR=/tmp/tmp.jXTfqB2FUq ++ local exit_status=0 ++ local timeout=4 +++ seq 0 2 ++ for i in '$(seq 0 2)' ++ set +e ++ kubectl get psmdb some-name -o 'jsonpath={.status.state}' ++ exit_status=0 ++ set -e ++ '[' 0 '!=' 0 -a -n 1 ']' ++ break ++ cat /tmp/tmp.wJgQt4DZDq ++ cat /tmp/tmp.jXTfqB2FUq ++ rm /tmp/tmp.wJgQt4DZDq /tmp/tmp.jXTfqB2FUq ++ return 0 + [[ initializing == ready ]] + sleep 1 + timeout=18 + echo -n . .+ [[ 18 -gt 1500 ]] ++ kubectl_bin get psmdb some-name -o 'jsonpath={.status.state}' +++ mktemp ++ local LAST_OUT=/tmp/tmp.XeMqNdqUDb +++ mktemp ++ local LAST_ERR=/tmp/tmp.NLPxSXAlFe ++ local exit_status=0 ++ local timeout=4 +++ seq 0 2 ++ for i in '$(seq 0 2)' ++ set +e ++ kubectl get psmdb some-name -o 'jsonpath={.status.state}' ++ exit_status=0 ++ set -e ++ '[' 0 '!=' 0 -a -n 1 ']' ++ break ++ cat /tmp/tmp.XeMqNdqUDb ++ cat /tmp/tmp.NLPxSXAlFe ++ rm /tmp/tmp.XeMqNdqUDb /tmp/tmp.NLPxSXAlFe ++ return 0 + [[ initializing == ready ]] + sleep 1 + timeout=19 + echo -n . .+ [[ 19 -gt 1500 ]] ++ kubectl_bin get psmdb some-name -o 'jsonpath={.status.state}' +++ mktemp ++ local LAST_OUT=/tmp/tmp.wsVJtQZ3Im +++ mktemp ++ local LAST_ERR=/tmp/tmp.jSUnCjMaa7 ++ local exit_status=0 ++ local timeout=4 +++ seq 0 2 ++ for i in '$(seq 0 2)' ++ set +e ++ kubectl get psmdb some-name -o 'jsonpath={.status.state}' ++ exit_status=0 ++ set -e ++ '[' 0 '!=' 0 -a -n 1 ']' ++ break ++ cat /tmp/tmp.wsVJtQZ3Im ++ cat /tmp/tmp.jSUnCjMaa7 ++ rm /tmp/tmp.wsVJtQZ3Im /tmp/tmp.jSUnCjMaa7 ++ return 0 + [[ initializing == ready ]] + sleep 1 + timeout=20 + echo -n . .+ [[ 20 -gt 1500 ]] ++ kubectl_bin get psmdb some-name -o 'jsonpath={.status.state}' +++ mktemp ++ local LAST_OUT=/tmp/tmp.WQl1E6zrsD +++ mktemp ++ local LAST_ERR=/tmp/tmp.5W6LjCmM6x ++ local exit_status=0 ++ local timeout=4 +++ seq 0 2 ++ for i in '$(seq 0 2)' ++ set +e ++ kubectl get psmdb some-name -o 'jsonpath={.status.state}' ++ exit_status=0 ++ set -e ++ '[' 0 '!=' 0 -a -n 1 ']' ++ break ++ cat /tmp/tmp.WQl1E6zrsD ++ cat /tmp/tmp.5W6LjCmM6x ++ rm /tmp/tmp.WQl1E6zrsD /tmp/tmp.5W6LjCmM6x ++ return 0 + [[ initializing == ready ]] + sleep 1 + timeout=21 + echo -n . .+ [[ 21 -gt 1500 ]] ++ kubectl_bin get psmdb some-name -o 'jsonpath={.status.state}' +++ mktemp ++ local LAST_OUT=/tmp/tmp.HNagipILi2 +++ mktemp ++ local LAST_ERR=/tmp/tmp.LHVOUHowSj ++ local exit_status=0 ++ local timeout=4 +++ seq 0 2 ++ for i in '$(seq 0 2)' ++ set +e ++ kubectl get psmdb some-name -o 'jsonpath={.status.state}' ++ exit_status=0 ++ set -e ++ '[' 0 '!=' 0 -a -n 1 ']' ++ break ++ cat /tmp/tmp.HNagipILi2 ++ cat /tmp/tmp.LHVOUHowSj ++ rm /tmp/tmp.HNagipILi2 /tmp/tmp.LHVOUHowSj ++ return 0 + [[ initializing == ready ]] + sleep 1 + timeout=22 + echo -n . .+ [[ 22 -gt 1500 ]] ++ kubectl_bin get psmdb some-name -o 'jsonpath={.status.state}' +++ mktemp ++ local LAST_OUT=/tmp/tmp.YD5lwWGIxm +++ mktemp ++ local LAST_ERR=/tmp/tmp.zYnwY40tja ++ local exit_status=0 ++ local timeout=4 +++ seq 0 2 ++ for i in '$(seq 0 2)' ++ set +e ++ kubectl get psmdb some-name -o 'jsonpath={.status.state}' ++ exit_status=0 ++ set -e ++ '[' 0 '!=' 0 -a -n 1 ']' ++ break ++ cat /tmp/tmp.YD5lwWGIxm ++ cat /tmp/tmp.zYnwY40tja ++ rm /tmp/tmp.YD5lwWGIxm /tmp/tmp.zYnwY40tja ++ return 0 + [[ initializing == ready ]] + sleep 1 + timeout=23 + echo -n . .+ [[ 23 -gt 1500 ]] ++ kubectl_bin get psmdb some-name -o 'jsonpath={.status.state}' +++ mktemp ++ local LAST_OUT=/tmp/tmp.Q3Lr5NQwQO +++ mktemp ++ local LAST_ERR=/tmp/tmp.NGb0DWWKH1 ++ local exit_status=0 ++ local timeout=4 +++ seq 0 2 ++ for i in '$(seq 0 2)' ++ set +e ++ kubectl get psmdb some-name -o 'jsonpath={.status.state}' ++ exit_status=0 ++ set -e ++ '[' 0 '!=' 0 -a -n 1 ']' ++ break ++ cat /tmp/tmp.Q3Lr5NQwQO ++ cat /tmp/tmp.NGb0DWWKH1 ++ rm /tmp/tmp.Q3Lr5NQwQO /tmp/tmp.NGb0DWWKH1 ++ return 0 + [[ initializing == ready ]] + sleep 1 + timeout=24 + echo -n . .+ [[ 24 -gt 1500 ]] ++ kubectl_bin get psmdb some-name -o 'jsonpath={.status.state}' +++ mktemp ++ local LAST_OUT=/tmp/tmp.0xyHKKnKfg +++ mktemp ++ local LAST_ERR=/tmp/tmp.zWfzkrv2ko ++ local exit_status=0 ++ local timeout=4 +++ seq 0 2 ++ for i in '$(seq 0 2)' ++ set +e ++ kubectl get psmdb some-name -o 'jsonpath={.status.state}' ++ exit_status=0 ++ set -e ++ '[' 0 '!=' 0 -a -n 1 ']' ++ break ++ cat /tmp/tmp.0xyHKKnKfg ++ cat /tmp/tmp.zWfzkrv2ko ++ rm /tmp/tmp.0xyHKKnKfg /tmp/tmp.zWfzkrv2ko ++ return 0 + [[ initializing == ready ]] + sleep 1 + timeout=25 + echo -n . .+ [[ 25 -gt 1500 ]] ++ kubectl_bin get psmdb some-name -o 'jsonpath={.status.state}' +++ mktemp ++ local LAST_OUT=/tmp/tmp.JnWylyUiSX +++ mktemp ++ local LAST_ERR=/tmp/tmp.yP5QJltEc4 ++ local exit_status=0 ++ local timeout=4 +++ seq 0 2 ++ for i in '$(seq 0 2)' ++ set +e ++ kubectl get psmdb some-name -o 'jsonpath={.status.state}' ++ exit_status=0 ++ set -e ++ '[' 0 '!=' 0 -a -n 1 ']' ++ break ++ cat /tmp/tmp.JnWylyUiSX ++ cat /tmp/tmp.yP5QJltEc4 ++ rm /tmp/tmp.JnWylyUiSX /tmp/tmp.yP5QJltEc4 ++ return 0 + [[ initializing == ready ]] + sleep 1 + timeout=26 + echo -n . .+ [[ 26 -gt 1500 ]] ++ kubectl_bin get psmdb some-name -o 'jsonpath={.status.state}' +++ mktemp ++ local LAST_OUT=/tmp/tmp.WDRfkXPIS5 +++ mktemp ++ local LAST_ERR=/tmp/tmp.Tprgh8Azz9 ++ local exit_status=0 ++ local timeout=4 +++ seq 0 2 ++ for i in '$(seq 0 2)' ++ set +e ++ kubectl get psmdb some-name -o 'jsonpath={.status.state}' ++ exit_status=0 ++ set -e ++ '[' 0 '!=' 0 -a -n 1 ']' ++ break ++ cat /tmp/tmp.WDRfkXPIS5 ++ cat /tmp/tmp.Tprgh8Azz9 ++ rm /tmp/tmp.WDRfkXPIS5 /tmp/tmp.Tprgh8Azz9 ++ return 0 + [[ initializing == ready ]] + sleep 1 + timeout=27 + echo -n . .+ [[ 27 -gt 1500 ]] ++ kubectl_bin get psmdb some-name -o 'jsonpath={.status.state}' +++ mktemp ++ local LAST_OUT=/tmp/tmp.339WTRUcLc +++ mktemp ++ local LAST_ERR=/tmp/tmp.pCVl9XSel4 ++ local exit_status=0 ++ local timeout=4 +++ seq 0 2 ++ for i in '$(seq 0 2)' ++ set +e ++ kubectl get psmdb some-name -o 'jsonpath={.status.state}' ++ exit_status=0 ++ set -e ++ '[' 0 '!=' 0 -a -n 1 ']' ++ break ++ cat /tmp/tmp.339WTRUcLc ++ cat /tmp/tmp.pCVl9XSel4 ++ rm /tmp/tmp.339WTRUcLc /tmp/tmp.pCVl9XSel4 ++ return 0 + [[ initializing == ready ]] + sleep 1 + timeout=28 + echo -n . .+ [[ 28 -gt 1500 ]] ++ kubectl_bin get psmdb some-name -o 'jsonpath={.status.state}' +++ mktemp ++ local LAST_OUT=/tmp/tmp.ePu5bsiue6 +++ mktemp ++ local LAST_ERR=/tmp/tmp.bWMOOeqZCA ++ local exit_status=0 ++ local timeout=4 +++ seq 0 2 ++ for i in '$(seq 0 2)' ++ set +e ++ kubectl get psmdb some-name -o 'jsonpath={.status.state}' ++ exit_status=0 ++ set -e ++ '[' 0 '!=' 0 -a -n 1 ']' ++ break ++ cat /tmp/tmp.ePu5bsiue6 ++ cat /tmp/tmp.bWMOOeqZCA ++ rm /tmp/tmp.ePu5bsiue6 /tmp/tmp.bWMOOeqZCA ++ return 0 + [[ initializing == ready ]] + sleep 1 + timeout=29 + echo -n . .+ [[ 29 -gt 1500 ]] ++ kubectl_bin get psmdb some-name -o 'jsonpath={.status.state}' +++ mktemp ++ local LAST_OUT=/tmp/tmp.dYhOpBZ8he +++ mktemp ++ local LAST_ERR=/tmp/tmp.TMlYvVS6CS ++ local exit_status=0 ++ local timeout=4 +++ seq 0 2 ++ for i in '$(seq 0 2)' ++ set +e ++ kubectl get psmdb some-name -o 'jsonpath={.status.state}' ++ exit_status=0 ++ set -e ++ '[' 0 '!=' 0 -a -n 1 ']' ++ break ++ cat /tmp/tmp.dYhOpBZ8he ++ cat /tmp/tmp.TMlYvVS6CS ++ rm /tmp/tmp.dYhOpBZ8he /tmp/tmp.TMlYvVS6CS ++ return 0 + [[ initializing == ready ]] + sleep 1 + timeout=30 + echo -n . .+ [[ 30 -gt 1500 ]] ++ kubectl_bin get psmdb some-name -o 'jsonpath={.status.state}' +++ mktemp ++ local LAST_OUT=/tmp/tmp.7Cf6LXBBkA +++ mktemp ++ local LAST_ERR=/tmp/tmp.Ep6ftgS6Y4 ++ local exit_status=0 ++ local timeout=4 +++ seq 0 2 ++ for i in '$(seq 0 2)' ++ set +e ++ kubectl get psmdb some-name -o 'jsonpath={.status.state}' ++ exit_status=0 ++ set -e ++ '[' 0 '!=' 0 -a -n 1 ']' ++ break ++ cat /tmp/tmp.7Cf6LXBBkA ++ cat /tmp/tmp.Ep6ftgS6Y4 ++ rm /tmp/tmp.7Cf6LXBBkA /tmp/tmp.Ep6ftgS6Y4 ++ return 0 + [[ initializing == ready ]] + sleep 1 + timeout=31 + echo -n . .+ [[ 31 -gt 1500 ]] ++ kubectl_bin get psmdb some-name -o 'jsonpath={.status.state}' +++ mktemp ++ local LAST_OUT=/tmp/tmp.M9NvuWmQmJ +++ mktemp ++ local LAST_ERR=/tmp/tmp.R0Bp720jnW ++ local exit_status=0 ++ local timeout=4 +++ seq 0 2 ++ for i in '$(seq 0 2)' ++ set +e ++ kubectl get psmdb some-name -o 'jsonpath={.status.state}' ++ exit_status=0 ++ set -e ++ '[' 0 '!=' 0 -a -n 1 ']' ++ break ++ cat /tmp/tmp.M9NvuWmQmJ ++ cat /tmp/tmp.R0Bp720jnW ++ rm /tmp/tmp.M9NvuWmQmJ /tmp/tmp.R0Bp720jnW ++ return 0 + [[ initializing == ready ]] + sleep 1 + timeout=32 + echo -n . .+ [[ 32 -gt 1500 ]] ++ kubectl_bin get psmdb some-name -o 'jsonpath={.status.state}' +++ mktemp ++ local LAST_OUT=/tmp/tmp.kx5l3onBk6 +++ mktemp ++ local LAST_ERR=/tmp/tmp.VMvxaqyYjh ++ local exit_status=0 ++ local timeout=4 +++ seq 0 2 ++ for i in '$(seq 0 2)' ++ set +e ++ kubectl get psmdb some-name -o 'jsonpath={.status.state}' ++ exit_status=0 ++ set -e ++ '[' 0 '!=' 0 -a -n 1 ']' ++ break ++ cat /tmp/tmp.kx5l3onBk6 ++ cat /tmp/tmp.VMvxaqyYjh ++ rm /tmp/tmp.kx5l3onBk6 /tmp/tmp.VMvxaqyYjh ++ return 0 + [[ initializing == ready ]] + sleep 1 + timeout=33 + echo -n . .+ [[ 33 -gt 1500 ]] ++ kubectl_bin get psmdb some-name -o 'jsonpath={.status.state}' +++ mktemp ++ local LAST_OUT=/tmp/tmp.ExHgGkuJPB +++ mktemp ++ local LAST_ERR=/tmp/tmp.bep4JbFUzR ++ local exit_status=0 ++ local timeout=4 +++ seq 0 2 ++ for i in '$(seq 0 2)' ++ set +e ++ kubectl get psmdb some-name -o 'jsonpath={.status.state}' ++ exit_status=0 ++ set -e ++ '[' 0 '!=' 0 -a -n 1 ']' ++ break ++ cat /tmp/tmp.ExHgGkuJPB ++ cat /tmp/tmp.bep4JbFUzR ++ rm /tmp/tmp.ExHgGkuJPB /tmp/tmp.bep4JbFUzR ++ return 0 + [[ initializing == ready ]] + sleep 1 + timeout=34 + echo -n . .+ [[ 34 -gt 1500 ]] ++ kubectl_bin get psmdb some-name -o 'jsonpath={.status.state}' +++ mktemp ++ local LAST_OUT=/tmp/tmp.E6xYeWCQPX +++ mktemp ++ local LAST_ERR=/tmp/tmp.0JKZT1MxOW ++ local exit_status=0 ++ local timeout=4 +++ seq 0 2 ++ for i in '$(seq 0 2)' ++ set +e ++ kubectl get psmdb some-name -o 'jsonpath={.status.state}' ++ exit_status=0 ++ set -e ++ '[' 0 '!=' 0 -a -n 1 ']' ++ break ++ cat /tmp/tmp.E6xYeWCQPX ++ cat /tmp/tmp.0JKZT1MxOW ++ rm /tmp/tmp.E6xYeWCQPX /tmp/tmp.0JKZT1MxOW ++ return 0 + [[ initializing == ready ]] + sleep 1 + timeout=35 + echo -n . .+ [[ 35 -gt 1500 ]] ++ kubectl_bin get psmdb some-name -o 'jsonpath={.status.state}' +++ mktemp ++ local LAST_OUT=/tmp/tmp.l4wueTKH4O +++ mktemp ++ local LAST_ERR=/tmp/tmp.xsnxA2Ysek ++ local exit_status=0 ++ local timeout=4 +++ seq 0 2 ++ for i in '$(seq 0 2)' ++ set +e ++ kubectl get psmdb some-name -o 'jsonpath={.status.state}' ++ exit_status=0 ++ set -e ++ '[' 0 '!=' 0 -a -n 1 ']' ++ break ++ cat /tmp/tmp.l4wueTKH4O ++ cat /tmp/tmp.xsnxA2Ysek ++ rm /tmp/tmp.l4wueTKH4O /tmp/tmp.xsnxA2Ysek ++ return 0 + [[ initializing == ready ]] + sleep 1 + timeout=36 + echo -n . .+ [[ 36 -gt 1500 ]] ++ kubectl_bin get psmdb some-name -o 'jsonpath={.status.state}' +++ mktemp ++ local LAST_OUT=/tmp/tmp.6OTLOls0Ht +++ mktemp ++ local LAST_ERR=/tmp/tmp.nSnGaNNxck ++ local exit_status=0 ++ local timeout=4 +++ seq 0 2 ++ for i in '$(seq 0 2)' ++ set +e ++ kubectl get psmdb some-name -o 'jsonpath={.status.state}' ++ exit_status=0 ++ set -e ++ '[' 0 '!=' 0 -a -n 1 ']' ++ break ++ cat /tmp/tmp.6OTLOls0Ht ++ cat /tmp/tmp.nSnGaNNxck ++ rm /tmp/tmp.6OTLOls0Ht /tmp/tmp.nSnGaNNxck ++ return 0 + [[ initializing == ready ]] + sleep 1 + timeout=37 + echo -n . .+ [[ 37 -gt 1500 ]] ++ kubectl_bin get psmdb some-name -o 'jsonpath={.status.state}' +++ mktemp ++ local LAST_OUT=/tmp/tmp.8oQQgihQ1M +++ mktemp ++ local LAST_ERR=/tmp/tmp.WPulVrP4Ud ++ local exit_status=0 ++ local timeout=4 +++ seq 0 2 ++ for i in '$(seq 0 2)' ++ set +e ++ kubectl get psmdb some-name -o 'jsonpath={.status.state}' ++ exit_status=0 ++ set -e ++ '[' 0 '!=' 0 -a -n 1 ']' ++ break ++ cat /tmp/tmp.8oQQgihQ1M ++ cat /tmp/tmp.WPulVrP4Ud ++ rm /tmp/tmp.8oQQgihQ1M /tmp/tmp.WPulVrP4Ud ++ return 0 + [[ initializing == ready ]] + sleep 1 + timeout=38 + echo -n . .+ [[ 38 -gt 1500 ]] ++ kubectl_bin get psmdb some-name -o 'jsonpath={.status.state}' +++ mktemp ++ local LAST_OUT=/tmp/tmp.es5e4J0prn +++ mktemp ++ local LAST_ERR=/tmp/tmp.osM5gWHrii ++ local exit_status=0 ++ local timeout=4 +++ seq 0 2 ++ for i in '$(seq 0 2)' ++ set +e ++ kubectl get psmdb some-name -o 'jsonpath={.status.state}' ++ exit_status=0 ++ set -e ++ '[' 0 '!=' 0 -a -n 1 ']' ++ break ++ cat /tmp/tmp.es5e4J0prn ++ cat /tmp/tmp.osM5gWHrii ++ rm /tmp/tmp.es5e4J0prn /tmp/tmp.osM5gWHrii ++ return 0 + [[ initializing == ready ]] + sleep 1 + timeout=39 + echo -n . .+ [[ 39 -gt 1500 ]] ++ kubectl_bin get psmdb some-name -o 'jsonpath={.status.state}' +++ mktemp ++ local LAST_OUT=/tmp/tmp.EzB7jEB9Cp +++ mktemp ++ local LAST_ERR=/tmp/tmp.6YGEufBkkK ++ local exit_status=0 ++ local timeout=4 +++ seq 0 2 ++ for i in '$(seq 0 2)' ++ set +e ++ kubectl get psmdb some-name -o 'jsonpath={.status.state}' ++ exit_status=0 ++ set -e ++ '[' 0 '!=' 0 -a -n 1 ']' ++ break ++ cat /tmp/tmp.EzB7jEB9Cp ++ cat /tmp/tmp.6YGEufBkkK ++ rm /tmp/tmp.EzB7jEB9Cp /tmp/tmp.6YGEufBkkK ++ return 0 + [[ initializing == ready ]] + sleep 1 + timeout=40 + echo -n . .+ [[ 40 -gt 1500 ]] ++ kubectl_bin get psmdb some-name -o 'jsonpath={.status.state}' +++ mktemp ++ local LAST_OUT=/tmp/tmp.0760HgELEX +++ mktemp ++ local LAST_ERR=/tmp/tmp.MngdEdb8xd ++ local exit_status=0 ++ local timeout=4 +++ seq 0 2 ++ for i in '$(seq 0 2)' ++ set +e ++ kubectl get psmdb some-name -o 'jsonpath={.status.state}' ++ exit_status=0 ++ set -e ++ '[' 0 '!=' 0 -a -n 1 ']' ++ break ++ cat /tmp/tmp.0760HgELEX ++ cat /tmp/tmp.MngdEdb8xd ++ rm /tmp/tmp.0760HgELEX /tmp/tmp.MngdEdb8xd ++ return 0 + [[ initializing == ready ]] + sleep 1 + timeout=41 + echo -n . .+ [[ 41 -gt 1500 ]] ++ kubectl_bin get psmdb some-name -o 'jsonpath={.status.state}' +++ mktemp ++ local LAST_OUT=/tmp/tmp.gyVGQY7WWH +++ mktemp ++ local LAST_ERR=/tmp/tmp.XPMOZ8jdDe ++ local exit_status=0 ++ local timeout=4 +++ seq 0 2 ++ for i in '$(seq 0 2)' ++ set +e ++ kubectl get psmdb some-name -o 'jsonpath={.status.state}' ++ exit_status=0 ++ set -e ++ '[' 0 '!=' 0 -a -n 1 ']' ++ break ++ cat /tmp/tmp.gyVGQY7WWH ++ cat /tmp/tmp.XPMOZ8jdDe ++ rm /tmp/tmp.gyVGQY7WWH /tmp/tmp.XPMOZ8jdDe ++ return 0 + [[ initializing == ready ]] + sleep 1 + timeout=42 + echo -n . .+ [[ 42 -gt 1500 ]] ++ kubectl_bin get psmdb some-name -o 'jsonpath={.status.state}' +++ mktemp ++ local LAST_OUT=/tmp/tmp.a6K0sILzZl +++ mktemp ++ local LAST_ERR=/tmp/tmp.bfPTkBQI3l ++ local exit_status=0 ++ local timeout=4 +++ seq 0 2 ++ for i in '$(seq 0 2)' ++ set +e ++ kubectl get psmdb some-name -o 'jsonpath={.status.state}' ++ exit_status=0 ++ set -e ++ '[' 0 '!=' 0 -a -n 1 ']' ++ break ++ cat /tmp/tmp.a6K0sILzZl ++ cat /tmp/tmp.bfPTkBQI3l ++ rm /tmp/tmp.a6K0sILzZl /tmp/tmp.bfPTkBQI3l ++ return 0 + [[ initializing == ready ]] + sleep 1 + timeout=43 + echo -n . .+ [[ 43 -gt 1500 ]] ++ kubectl_bin get psmdb some-name -o 'jsonpath={.status.state}' +++ mktemp ++ local LAST_OUT=/tmp/tmp.TsjZ5Jywti +++ mktemp ++ local LAST_ERR=/tmp/tmp.Xl6jMWKr4I ++ local exit_status=0 ++ local timeout=4 +++ seq 0 2 ++ for i in '$(seq 0 2)' ++ set +e ++ kubectl get psmdb some-name -o 'jsonpath={.status.state}' ++ exit_status=0 ++ set -e ++ '[' 0 '!=' 0 -a -n 1 ']' ++ break ++ cat /tmp/tmp.TsjZ5Jywti ++ cat /tmp/tmp.Xl6jMWKr4I ++ rm /tmp/tmp.TsjZ5Jywti /tmp/tmp.Xl6jMWKr4I ++ return 0 + [[ initializing == ready ]] + sleep 1 + timeout=44 + echo -n . .+ [[ 44 -gt 1500 ]] ++ kubectl_bin get psmdb some-name -o 'jsonpath={.status.state}' +++ mktemp ++ local LAST_OUT=/tmp/tmp.Mh4zGgtHVV +++ mktemp ++ local LAST_ERR=/tmp/tmp.Fxs7iI0jBr ++ local exit_status=0 ++ local timeout=4 +++ seq 0 2 ++ for i in '$(seq 0 2)' ++ set +e ++ kubectl get psmdb some-name -o 'jsonpath={.status.state}' ++ exit_status=0 ++ set -e ++ '[' 0 '!=' 0 -a -n 1 ']' ++ break ++ cat /tmp/tmp.Mh4zGgtHVV ++ cat /tmp/tmp.Fxs7iI0jBr ++ rm /tmp/tmp.Mh4zGgtHVV /tmp/tmp.Fxs7iI0jBr ++ return 0 + [[ initializing == ready ]] + sleep 1 + timeout=45 + echo -n . .+ [[ 45 -gt 1500 ]] ++ kubectl_bin get psmdb some-name -o 'jsonpath={.status.state}' +++ mktemp ++ local LAST_OUT=/tmp/tmp.lH9uE5d7fk +++ mktemp ++ local LAST_ERR=/tmp/tmp.XIvUueIsyT ++ local exit_status=0 ++ local timeout=4 +++ seq 0 2 ++ for i in '$(seq 0 2)' ++ set +e ++ kubectl get psmdb some-name -o 'jsonpath={.status.state}' ++ exit_status=0 ++ set -e ++ '[' 0 '!=' 0 -a -n 1 ']' ++ break ++ cat /tmp/tmp.lH9uE5d7fk ++ cat /tmp/tmp.XIvUueIsyT ++ rm /tmp/tmp.lH9uE5d7fk /tmp/tmp.XIvUueIsyT ++ return 0 + [[ initializing == ready ]] + sleep 1 + timeout=46 + echo -n . .+ [[ 46 -gt 1500 ]] ++ kubectl_bin get psmdb some-name -o 'jsonpath={.status.state}' +++ mktemp ++ local LAST_OUT=/tmp/tmp.dHZzCe2Bcu +++ mktemp ++ local LAST_ERR=/tmp/tmp.V9xRmQOn4A ++ local exit_status=0 ++ local timeout=4 +++ seq 0 2 ++ for i in '$(seq 0 2)' ++ set +e ++ kubectl get psmdb some-name -o 'jsonpath={.status.state}' ++ exit_status=0 ++ set -e ++ '[' 0 '!=' 0 -a -n 1 ']' ++ break ++ cat /tmp/tmp.dHZzCe2Bcu ++ cat /tmp/tmp.V9xRmQOn4A ++ rm /tmp/tmp.dHZzCe2Bcu /tmp/tmp.V9xRmQOn4A ++ return 0 + [[ initializing == ready ]] + sleep 1 + timeout=47 + echo -n . .+ [[ 47 -gt 1500 ]] ++ kubectl_bin get psmdb some-name -o 'jsonpath={.status.state}' +++ mktemp ++ local LAST_OUT=/tmp/tmp.4TpxiAPkWQ +++ mktemp ++ local LAST_ERR=/tmp/tmp.1uYhFaKaUM ++ local exit_status=0 ++ local timeout=4 +++ seq 0 2 ++ for i in '$(seq 0 2)' ++ set +e ++ kubectl get psmdb some-name -o 'jsonpath={.status.state}' ++ exit_status=0 ++ set -e ++ '[' 0 '!=' 0 -a -n 1 ']' ++ break ++ cat /tmp/tmp.4TpxiAPkWQ ++ cat /tmp/tmp.1uYhFaKaUM ++ rm /tmp/tmp.4TpxiAPkWQ /tmp/tmp.1uYhFaKaUM ++ return 0 + [[ ready == ready ]] + echo + compare_kubectl statefulset/some-name-rs0 -tls-disabled + local resource=statefulset/some-name-rs0 + local postfix=-tls-disabled + local expected_result=/mnt/jenkins/workspace/cloud-psmdb-operator_PR-1581/e2e-tests/tls-issue-cert-manager/compare/statefulset_some-name-rs0-tls-disabled.yml + local new_result=/tmp/tmp.pCG7nqFZEo/statefulset_some-name-rs0.yml + '[' -n '' -a -f /mnt/jenkins/workspace/cloud-psmdb-operator_PR-1581/e2e-tests/tls-issue-cert-manager/compare/statefulset_some-name-rs0-tls-disabled-oc.yml ']' + kubectl_bin get -o yaml statefulset/some-name-rs0 + yq eval ' del(.metadata.ownerReferences[].apiVersion) | del(.metadata.managedFields) | del(.. | select(has("creationTimestamp")).creationTimestamp) | del(.. | select(has("namespace")).namespace) | del(.. | select(has("uid")).uid) | del(.metadata.resourceVersion) | del(.spec.template.spec.containers[].env[] | select(.name == "NAMESPACE")) | del(.metadata.selfLink) | del(.metadata.annotations."cloud.google.com/neg") | del(.metadata.annotations."kubectl.kubernetes.io/last-applied-configuration") | del(.. | select(has("image")).image) | del(.. | select(has("clusterIP")).clusterIP) | del(.. | select(has("clusterIPs")).clusterIPs) | del(.. | select(has("dataSource")).dataSource) | del(.. | select(has("procMount")).procMount) | del(.. | select(has("storageClassName")).storageClassName) | del(.. | select(has("finalizers")).finalizers) | del(.. | select(has("kubernetes.io/pvc-protection"))."kubernetes.io/pvc-protection") | del(.. | select(has("volumeName")).volumeName) | del(.. | select(has("volume.beta.kubernetes.io/storage-provisioner"))."volume.beta.kubernetes.io/storage-provisioner") | del(.. | select(has("volume.kubernetes.io/storage-provisioner"))."volume.kubernetes.io/storage-provisioner") | del(.spec.volumeMode) | del(.. | select(has("volume.kubernetes.io/selected-node"))."volume.kubernetes.io/selected-node") | del(.. | select(has("percona.com/last-config-hash"))."percona.com/last-config-hash") | del(.. | select(has("percona.com/configuration-hash"))."percona.com/configuration-hash") | del(.. | select(has("percona.com/ssl-hash"))."percona.com/ssl-hash") | del(.. | select(has("percona.com/ssl-internal-hash"))."percona.com/ssl-internal-hash") | del(.spec.volumeClaimTemplates[].spec.volumeMode | select(. == "Filesystem")) | del(.. | select(has("healthCheckNodePort")).healthCheckNodePort) | del(.. | select(has("nodePort")).nodePort) | del(.status) | (.. | select(tag == "!!str")) |= sub("tls-issue-cert-manager-1728", "NAME_SPACE") | del(.spec.volumeClaimTemplates[].apiVersion) | del(.spec.volumeClaimTemplates[].kind) | del(.spec.ipFamilies) | del(.spec.ipFamilyPolicy) | (.. | select(. == "extensions/v1beta1")) = "apps/v1" | (.. | select(. == "batch/v1beta1")) = "batch/v1" ' - ++ mktemp + local LAST_OUT=/tmp/tmp.UXgUSJpT6D ++ mktemp + local LAST_ERR=/tmp/tmp.Mx9T74nrCv + local exit_status=0 + local timeout=4 ++ seq 0 2 + for i in '$(seq 0 2)' + set +e + kubectl get -o yaml statefulset/some-name-rs0 + exit_status=0 + set -e + '[' 0 '!=' 0 -a -n 1 ']' + break + cat /tmp/tmp.UXgUSJpT6D + cat /tmp/tmp.Mx9T74nrCv + rm /tmp/tmp.UXgUSJpT6D /tmp/tmp.Mx9T74nrCv + return 0 + yq -i eval 'del(.spec.persistentVolumeClaimRetentionPolicy)' /tmp/tmp.pCG7nqFZEo/statefulset_some-name-rs0.yml + version_gt 1.22 ++ echo '1.26 >= 1.22' ++ bc -l + '[' 1 -eq 1 ']' + return 0 + yq -i eval 'del(.spec.internalTrafficPolicy)' /tmp/tmp.pCG7nqFZEo/statefulset_some-name-rs0.yml + yq -i eval 'del(.spec.allocateLoadBalancerNodePorts)' /tmp/tmp.pCG7nqFZEo/statefulset_some-name-rs0.yml + [[ /mnt/jenkins/workspace/cloud-psmdb-operator_PR-1581/e2e-tests/tls-issue-cert-manager/compare/statefulset_some-name-rs0-tls-disabled.yml == */cronjob* ]] + diff -u /mnt/jenkins/workspace/cloud-psmdb-operator_PR-1581/e2e-tests/tls-issue-cert-manager/compare/statefulset_some-name-rs0-tls-disabled.yml /tmp/tmp.pCG7nqFZEo/statefulset_some-name-rs0.yml + compare_kubectl statefulset/some-name-cfg -tls-disabled + local resource=statefulset/some-name-cfg + local postfix=-tls-disabled + local expected_result=/mnt/jenkins/workspace/cloud-psmdb-operator_PR-1581/e2e-tests/tls-issue-cert-manager/compare/statefulset_some-name-cfg-tls-disabled.yml + local new_result=/tmp/tmp.pCG7nqFZEo/statefulset_some-name-cfg.yml + '[' -n '' -a -f /mnt/jenkins/workspace/cloud-psmdb-operator_PR-1581/e2e-tests/tls-issue-cert-manager/compare/statefulset_some-name-cfg-tls-disabled-oc.yml ']' + kubectl_bin get -o yaml statefulset/some-name-cfg + yq eval ' del(.metadata.ownerReferences[].apiVersion) | del(.metadata.managedFields) | del(.. | select(has("creationTimestamp")).creationTimestamp) | del(.. | select(has("namespace")).namespace) | del(.. | select(has("uid")).uid) | del(.metadata.resourceVersion) | del(.spec.template.spec.containers[].env[] | select(.name == "NAMESPACE")) | del(.metadata.selfLink) | del(.metadata.annotations."cloud.google.com/neg") | del(.metadata.annotations."kubectl.kubernetes.io/last-applied-configuration") | del(.. | select(has("image")).image) | del(.. | select(has("clusterIP")).clusterIP) | del(.. | select(has("clusterIPs")).clusterIPs) | del(.. | select(has("dataSource")).dataSource) | del(.. | select(has("procMount")).procMount) | del(.. | select(has("storageClassName")).storageClassName) | del(.. | select(has("finalizers")).finalizers) | del(.. | select(has("kubernetes.io/pvc-protection"))."kubernetes.io/pvc-protection") | del(.. | select(has("volumeName")).volumeName) | del(.. | select(has("volume.beta.kubernetes.io/storage-provisioner"))."volume.beta.kubernetes.io/storage-provisioner") | del(.. | select(has("volume.kubernetes.io/storage-provisioner"))."volume.kubernetes.io/storage-provisioner") | del(.spec.volumeMode) | del(.. | select(has("volume.kubernetes.io/selected-node"))."volume.kubernetes.io/selected-node") | del(.. | select(has("percona.com/last-config-hash"))."percona.com/last-config-hash") | del(.. | select(has("percona.com/configuration-hash"))."percona.com/configuration-hash") | del(.. | select(has("percona.com/ssl-hash"))."percona.com/ssl-hash") | del(.. | select(has("percona.com/ssl-internal-hash"))."percona.com/ssl-internal-hash") | del(.spec.volumeClaimTemplates[].spec.volumeMode | select(. == "Filesystem")) | del(.. | select(has("healthCheckNodePort")).healthCheckNodePort) | del(.. | select(has("nodePort")).nodePort) | del(.status) | (.. | select(tag == "!!str")) |= sub("tls-issue-cert-manager-1728", "NAME_SPACE") | del(.spec.volumeClaimTemplates[].apiVersion) | del(.spec.volumeClaimTemplates[].kind) | del(.spec.ipFamilies) | del(.spec.ipFamilyPolicy) | (.. | select(. == "extensions/v1beta1")) = "apps/v1" | (.. | select(. == "batch/v1beta1")) = "batch/v1" ' - ++ mktemp + local LAST_OUT=/tmp/tmp.Dck2ZoWAsj ++ mktemp + local LAST_ERR=/tmp/tmp.LdxXrBakMA + local exit_status=0 + local timeout=4 ++ seq 0 2 + for i in '$(seq 0 2)' + set +e + kubectl get -o yaml statefulset/some-name-cfg + exit_status=0 + set -e + '[' 0 '!=' 0 -a -n 1 ']' + break + cat /tmp/tmp.Dck2ZoWAsj + cat /tmp/tmp.LdxXrBakMA + rm /tmp/tmp.Dck2ZoWAsj /tmp/tmp.LdxXrBakMA + return 0 + yq -i eval 'del(.spec.persistentVolumeClaimRetentionPolicy)' /tmp/tmp.pCG7nqFZEo/statefulset_some-name-cfg.yml + version_gt 1.22 ++ echo '1.26 >= 1.22' ++ bc -l + '[' 1 -eq 1 ']' + return 0 + yq -i eval 'del(.spec.internalTrafficPolicy)' /tmp/tmp.pCG7nqFZEo/statefulset_some-name-cfg.yml + yq -i eval 'del(.spec.allocateLoadBalancerNodePorts)' /tmp/tmp.pCG7nqFZEo/statefulset_some-name-cfg.yml + [[ /mnt/jenkins/workspace/cloud-psmdb-operator_PR-1581/e2e-tests/tls-issue-cert-manager/compare/statefulset_some-name-cfg-tls-disabled.yml == */cronjob* ]] + diff -u /mnt/jenkins/workspace/cloud-psmdb-operator_PR-1581/e2e-tests/tls-issue-cert-manager/compare/statefulset_some-name-cfg-tls-disabled.yml /tmp/tmp.pCG7nqFZEo/statefulset_some-name-cfg.yml + compare_kubectl statefulset/some-name-mongos -tls-disabled + local resource=statefulset/some-name-mongos + local postfix=-tls-disabled + local expected_result=/mnt/jenkins/workspace/cloud-psmdb-operator_PR-1581/e2e-tests/tls-issue-cert-manager/compare/statefulset_some-name-mongos-tls-disabled.yml + local new_result=/tmp/tmp.pCG7nqFZEo/statefulset_some-name-mongos.yml + '[' -n '' -a -f /mnt/jenkins/workspace/cloud-psmdb-operator_PR-1581/e2e-tests/tls-issue-cert-manager/compare/statefulset_some-name-mongos-tls-disabled-oc.yml ']' + kubectl_bin get -o yaml statefulset/some-name-mongos + yq eval ' del(.metadata.ownerReferences[].apiVersion) | del(.metadata.managedFields) | del(.. | select(has("creationTimestamp")).creationTimestamp) | del(.. | select(has("namespace")).namespace) | del(.. | select(has("uid")).uid) | del(.metadata.resourceVersion) | del(.spec.template.spec.containers[].env[] | select(.name == "NAMESPACE")) | del(.metadata.selfLink) | del(.metadata.annotations."cloud.google.com/neg") | del(.metadata.annotations."kubectl.kubernetes.io/last-applied-configuration") | del(.. | select(has("image")).image) | del(.. | select(has("clusterIP")).clusterIP) | del(.. | select(has("clusterIPs")).clusterIPs) | del(.. | select(has("dataSource")).dataSource) | del(.. | select(has("procMount")).procMount) | del(.. | select(has("storageClassName")).storageClassName) | del(.. | select(has("finalizers")).finalizers) | del(.. | select(has("kubernetes.io/pvc-protection"))."kubernetes.io/pvc-protection") | del(.. | select(has("volumeName")).volumeName) | del(.. | select(has("volume.beta.kubernetes.io/storage-provisioner"))."volume.beta.kubernetes.io/storage-provisioner") | del(.. | select(has("volume.kubernetes.io/storage-provisioner"))."volume.kubernetes.io/storage-provisioner") | del(.spec.volumeMode) | del(.. | select(has("volume.kubernetes.io/selected-node"))."volume.kubernetes.io/selected-node") | del(.. | select(has("percona.com/last-config-hash"))."percona.com/last-config-hash") | del(.. | select(has("percona.com/configuration-hash"))."percona.com/configuration-hash") | del(.. | select(has("percona.com/ssl-hash"))."percona.com/ssl-hash") | del(.. | select(has("percona.com/ssl-internal-hash"))."percona.com/ssl-internal-hash") | del(.spec.volumeClaimTemplates[].spec.volumeMode | select(. == "Filesystem")) | del(.. | select(has("healthCheckNodePort")).healthCheckNodePort) | del(.. | select(has("nodePort")).nodePort) | del(.status) | (.. | select(tag == "!!str")) |= sub("tls-issue-cert-manager-1728", "NAME_SPACE") | del(.spec.volumeClaimTemplates[].apiVersion) | del(.spec.volumeClaimTemplates[].kind) | del(.spec.ipFamilies) | del(.spec.ipFamilyPolicy) | (.. | select(. == "extensions/v1beta1")) = "apps/v1" | (.. | select(. == "batch/v1beta1")) = "batch/v1" ' - ++ mktemp + local LAST_OUT=/tmp/tmp.ZC7eAg7lH3 ++ mktemp + local LAST_ERR=/tmp/tmp.iZMGOg40sa + local exit_status=0 + local timeout=4 ++ seq 0 2 + for i in '$(seq 0 2)' + set +e + kubectl get -o yaml statefulset/some-name-mongos + exit_status=0 + set -e + '[' 0 '!=' 0 -a -n 1 ']' + break + cat /tmp/tmp.ZC7eAg7lH3 + cat /tmp/tmp.iZMGOg40sa + rm /tmp/tmp.ZC7eAg7lH3 /tmp/tmp.iZMGOg40sa + return 0 + yq -i eval 'del(.spec.persistentVolumeClaimRetentionPolicy)' /tmp/tmp.pCG7nqFZEo/statefulset_some-name-mongos.yml + version_gt 1.22 ++ echo '1.26 >= 1.22' ++ bc -l + '[' 1 -eq 1 ']' + return 0 + yq -i eval 'del(.spec.internalTrafficPolicy)' /tmp/tmp.pCG7nqFZEo/statefulset_some-name-mongos.yml + yq -i eval 'del(.spec.allocateLoadBalancerNodePorts)' /tmp/tmp.pCG7nqFZEo/statefulset_some-name-mongos.yml + [[ /mnt/jenkins/workspace/cloud-psmdb-operator_PR-1581/e2e-tests/tls-issue-cert-manager/compare/statefulset_some-name-mongos-tls-disabled.yml == */cronjob* ]] + diff -u /mnt/jenkins/workspace/cloud-psmdb-operator_PR-1581/e2e-tests/tls-issue-cert-manager/compare/statefulset_some-name-mongos-tls-disabled.yml /tmp/tmp.pCG7nqFZEo/statefulset_some-name-mongos.yml --- /mnt/jenkins/workspace/cloud-psmdb-operator_PR-1581/e2e-tests/tls-issue-cert-manager/compare/statefulset_some-name-mongos-tls-disabled.yml 2024-07-01 01:43:28.773767991 +0000 +++ /tmp/tmp.pCG7nqFZEo/statefulset_some-name-mongos.yml 2024-07-01 03:54:52.247751983 +0000 @@ -2,7 +2,7 @@ kind: StatefulSet metadata: annotations: {} - generation: 7 + generation: 6 labels: app.kubernetes.io/component: mongos app.kubernetes.io/instance: some-name