Log: /mnt/jenkins/workspace/cloud-psmdb-operator_PR-2125/e2e-tests/logs/non-voting-and-hidden.log grep: warning: stray \ before - Warning: version difference between client (1.34) and server (1.31) exceeds the supported minor version skew of +/-1 Warning: version difference between client (1.34) and server (1.31) exceeds the supported minor version skew of +/-1 Warning: version difference between client (1.34) and server (1.31) exceeds the supported minor version skew of +/-1 + cluster=some-name + create_infra non-voting-and-hidden-7583 + local ns=non-voting-and-hidden-7583 + [[ 1 == 1 ]] + delete_crd + desc 'get and delete old CRDs and RBAC' + set +o xtrace ----------------------------------------------------------------------------------- get and delete old CRDs and RBAC ----------------------------------------------------------------------------------- + kubectl_bin delete -f /mnt/jenkins/workspace/cloud-psmdb-operator_PR-2125/deploy/crd.yaml --ignore-not-found --wait=false ++ mktemp + local LAST_OUT=/tmp/tmp.XWwZNdnA0o ++ mktemp + local LAST_ERR=/tmp/tmp.Pzb9G0YE2H + local exit_status=0 + local timeout=4 ++ seq 0 2 + for i in $(seq 0 2) + set +e + kubectl delete -f /mnt/jenkins/workspace/cloud-psmdb-operator_PR-2125/deploy/crd.yaml --ignore-not-found --wait=false + exit_status=0 + set -e + '[' 0 '!=' 0 -a -n 1 ']' + break + cat /tmp/tmp.XWwZNdnA0o customresourcedefinition.apiextensions.k8s.io "perconaservermongodbbackups.psmdb.percona.com" deleted customresourcedefinition.apiextensions.k8s.io "perconaservermongodbrestores.psmdb.percona.com" deleted customresourcedefinition.apiextensions.k8s.io "perconaservermongodbs.psmdb.percona.com" deleted + cat /tmp/tmp.Pzb9G0YE2H + rm /tmp/tmp.XWwZNdnA0o /tmp/tmp.Pzb9G0YE2H + return 0 ++ yq eval .metadata.name /mnt/jenkins/workspace/cloud-psmdb-operator_PR-2125/deploy/crd.yaml ++ grep -v '\-\-\-' grep: warning: stray \ before - grep: warning: stray \ before - + for crd_name in $(yq eval '.metadata.name' "${src_dir}/deploy/crd.yaml" | grep -v '\-\-\-') + kubectl get perconaservermongodbbackups.psmdb.percona.com --all-namespaces -o wide + grep -v NAMESPACE + xargs -L 1 sh -xc 'kubectl patch perconaservermongodbbackups.psmdb.percona.com -n $0 $1 --type=merge -p "{\"metadata\":{\"finalizers\":[]}}"' + kubectl patch perconaservermongodbbackups.psmdb.percona.com -n non-voting-and-hidden-2669 backup-logical --type=merge -p '{"metadata":{"finalizers":[]}}' perconaservermongodbbackup.psmdb.percona.com/backup-logical patched + kubectl patch perconaservermongodbbackups.psmdb.percona.com -n non-voting-and-hidden-2669 backup-physical --type=merge -p '{"metadata":{"finalizers":[]}}' perconaservermongodbbackup.psmdb.percona.com/backup-physical patched + kubectl_bin wait --for=delete crd perconaservermongodbbackups.psmdb.percona.com ++ mktemp + local LAST_OUT=/tmp/tmp.o0IJU3qeVB ++ mktemp + local LAST_ERR=/tmp/tmp.6yBKANUoqD + local exit_status=0 + local timeout=4 ++ seq 0 2 + for i in $(seq 0 2) + set +e + kubectl wait --for=delete crd perconaservermongodbbackups.psmdb.percona.com + exit_status=0 + set -e + '[' 0 '!=' 0 -a -n 1 ']' + break + cat /tmp/tmp.o0IJU3qeVB customresourcedefinition.apiextensions.k8s.io/perconaservermongodbbackups.psmdb.percona.com condition met + cat /tmp/tmp.6yBKANUoqD + rm /tmp/tmp.o0IJU3qeVB /tmp/tmp.6yBKANUoqD + return 0 + for crd_name in $(yq eval '.metadata.name' "${src_dir}/deploy/crd.yaml" | grep -v '\-\-\-') + grep -v NAMESPACE + kubectl get perconaservermongodbrestores.psmdb.percona.com --all-namespaces -o wide + xargs -L 1 sh -xc 'kubectl patch perconaservermongodbrestores.psmdb.percona.com -n $0 $1 --type=merge -p "{\"metadata\":{\"finalizers\":[]}}"' error: the server doesn't have a resource type "perconaservermongodbrestores" + kubectl patch perconaservermongodbrestores.psmdb.percona.com -n sh --type=merge -p '{"metadata":{"finalizers":[]}}' error: the server doesn't have a resource type "perconaservermongodbrestores" + : + kubectl_bin wait --for=delete crd perconaservermongodbrestores.psmdb.percona.com ++ mktemp + local LAST_OUT=/tmp/tmp.O58L0FEbet ++ mktemp + local LAST_ERR=/tmp/tmp.vtzBtJH4kN + local exit_status=0 + local timeout=4 ++ seq 0 2 + for i in $(seq 0 2) + set +e + kubectl wait --for=delete crd perconaservermongodbrestores.psmdb.percona.com + exit_status=0 + set -e + '[' 0 '!=' 0 -a -n 1 ']' + break + cat /tmp/tmp.O58L0FEbet + cat /tmp/tmp.vtzBtJH4kN + rm /tmp/tmp.O58L0FEbet /tmp/tmp.vtzBtJH4kN + return 0 + for crd_name in $(yq eval '.metadata.name' "${src_dir}/deploy/crd.yaml" | grep -v '\-\-\-') + kubectl get perconaservermongodbs.psmdb.percona.com --all-namespaces -o wide + grep -v NAMESPACE + xargs -L 1 sh -xc 'kubectl patch perconaservermongodbs.psmdb.percona.com -n $0 $1 --type=merge -p "{\"metadata\":{\"finalizers\":[]}}"' error: the server doesn't have a resource type "perconaservermongodbs" + kubectl patch perconaservermongodbs.psmdb.percona.com -n sh --type=merge -p '{"metadata":{"finalizers":[]}}' error: the server doesn't have a resource type "perconaservermongodbs" + : + kubectl_bin wait --for=delete crd perconaservermongodbs.psmdb.percona.com ++ mktemp + local LAST_OUT=/tmp/tmp.bX5OHpCkRw ++ mktemp + local LAST_ERR=/tmp/tmp.LQ5M1kvOVB + local exit_status=0 + local timeout=4 ++ seq 0 2 + for i in $(seq 0 2) + set +e + kubectl wait --for=delete crd perconaservermongodbs.psmdb.percona.com + exit_status=0 + set -e + '[' 0 '!=' 0 -a -n 1 ']' + break + cat /tmp/tmp.bX5OHpCkRw + cat /tmp/tmp.LQ5M1kvOVB + rm /tmp/tmp.bX5OHpCkRw /tmp/tmp.LQ5M1kvOVB + return 0 + local rbac_yaml=rbac.yaml + '[' -n psmdb-operator ']' + rbac_yaml=cw-rbac.yaml + kubectl_bin delete -f /mnt/jenkins/workspace/cloud-psmdb-operator_PR-2125/deploy/cw-rbac.yaml --ignore-not-found ++ mktemp + local LAST_OUT=/tmp/tmp.81e0ubi8Sz ++ mktemp + local LAST_ERR=/tmp/tmp.QPF24Ds3jE + local exit_status=0 + local timeout=4 ++ seq 0 2 + for i in $(seq 0 2) + set +e + kubectl delete -f /mnt/jenkins/workspace/cloud-psmdb-operator_PR-2125/deploy/cw-rbac.yaml --ignore-not-found + exit_status=0 + set -e + '[' 0 '!=' 0 -a -n 1 ']' + break + cat /tmp/tmp.81e0ubi8Sz clusterrole.rbac.authorization.k8s.io "percona-server-mongodb-operator" deleted clusterrolebinding.rbac.authorization.k8s.io "service-account-percona-server-mongodb-operator" deleted + cat /tmp/tmp.QPF24Ds3jE + rm /tmp/tmp.81e0ubi8Sz /tmp/tmp.QPF24Ds3jE + return 0 + check_crd_for_deletion PR-2125-8ebcb80f + local git_tag=PR-2125-8ebcb80f ++ yq eval .metadata.name ++ curl -s https://raw.githubusercontent.com/percona/percona-server-mongodb-operator/PR-2125-8ebcb80f/deploy/crd.yaml ++ /usr/sbin/sed s/---//g ++ /usr/sbin/sed ':a;N;$!ba;s/\n/ /g' + for crd_name in $(curl -s https://raw.githubusercontent.com/percona/percona-server-mongodb-operator/${git_tag}/deploy/crd.yaml | yq eval '.metadata.name' | $sed 's/---//g' | $sed ':a;N;$!ba;s/\n/ /g') ++ kubectl_bin get crd/null -o 'jsonpath={.status.conditions[-1].type}' +++ mktemp ++ local LAST_OUT=/tmp/tmp.r1Lr3a5Ryv +++ mktemp ++ local LAST_ERR=/tmp/tmp.q0vwbD9wUS ++ local exit_status=0 ++ local timeout=4 +++ seq 0 2 ++ for i in $(seq 0 2) ++ set +e ++ kubectl get crd/null -o 'jsonpath={.status.conditions[-1].type}' ++ exit_status=1 ++ set -e ++ '[' 1 '!=' 0 -a -n 1 ']' ++ cat /tmp/tmp.r1Lr3a5Ryv ++ cat /tmp/tmp.q0vwbD9wUS Error from server (NotFound): customresourcedefinitions.apiextensions.k8s.io "null" not found ++ sleep 0 ++ for i in $(seq 0 2) ++ set +e ++ kubectl get crd/null -o 'jsonpath={.status.conditions[-1].type}' ++ exit_status=1 ++ set -e ++ '[' 1 '!=' 0 -a -n 1 ']' ++ cat /tmp/tmp.r1Lr3a5Ryv ++ cat /tmp/tmp.q0vwbD9wUS Error from server (NotFound): customresourcedefinitions.apiextensions.k8s.io "null" not found ++ sleep 4 ++ for i in $(seq 0 2) ++ set +e ++ kubectl get crd/null -o 'jsonpath={.status.conditions[-1].type}' ++ exit_status=1 ++ set -e ++ '[' 1 '!=' 0 -a -n 1 ']' ++ cat /tmp/tmp.r1Lr3a5Ryv ++ cat /tmp/tmp.q0vwbD9wUS Error from server (NotFound): customresourcedefinitions.apiextensions.k8s.io "null" not found ++ sleep 8 ++ cat /tmp/tmp.r1Lr3a5Ryv ++ cat /tmp/tmp.q0vwbD9wUS Error from server (NotFound): customresourcedefinitions.apiextensions.k8s.io "null" not found ++ rm /tmp/tmp.r1Lr3a5Ryv /tmp/tmp.q0vwbD9wUS ++ return 1 + [[ '' == \T\e\r\m\i\n\a\t\i\n\g ]] + '[' -n psmdb-operator ']' + create_namespace psmdb-operator + local namespace=psmdb-operator + local skip_clean_namespace= + [[ 1 == 1 ]] + [[ -z '' ]] + destroy_chaos_mesh ++ helm list --all-namespaces --filter chaos-mesh ++ tail -n1 ++ sed s/NAMESPACE// ++ awk '-F ' '{print $2}' + local chaos_mesh_ns= + desc 'destroy chaos-mesh' + set +o xtrace ----------------------------------------------------------------------------------- destroy chaos-mesh ----------------------------------------------------------------------------------- + '[' -n '' ']' ++ kubectl get MutatingWebhookConfiguration ++ grep chaos-mesh ++ awk '{print $1}' + timeout 30 kubectl delete MutatingWebhookConfiguration error: resource(s) were provided, but no name was specified + : ++ kubectl get ValidatingWebhookConfiguration ++ grep chaos-mesh ++ awk '{print $1}' + timeout 30 kubectl delete ValidatingWebhookConfiguration error: resource(s) were provided, but no name was specified + : ++ kubectl get ValidatingWebhookConfiguration ++ grep validate-auth ++ awk '{print $1}' + timeout 30 kubectl delete ValidatingWebhookConfiguration error: resource(s) were provided, but no name was specified + : ++ kubectl api-resources ++ grep chaos-mesh ++ awk '{print $1}' ++ grep chaos-mesh.org ++ kubectl get crd ++ awk '{print $1}' + timeout 30 kubectl delete crd error: resource(s) were provided, but no name was specified + : ++ kubectl get clusterrolebinding ++ grep chaos-mesh ++ awk '{print $1}' + timeout 30 kubectl delete clusterrolebinding error: resource(s) were provided, but no name was specified + : ++ kubectl get clusterrole ++ grep chaos-mesh ++ awk '{print $1}' + timeout 30 kubectl delete clusterrole error: resource(s) were provided, but no name was specified + : + desc 'cleaned up all old namespaces' + set +o xtrace ----------------------------------------------------------------------------------- cleaned up all old namespaces ----------------------------------------------------------------------------------- + kubectl_bin get ns + egrep -v '^kube-|^default|Terminating|psmdb-operator|openshift|^gke-|^gmp-|^NAME' + awk '{print$1}' + '[' -n '' ']' + desc 'cleaned up old namespaces psmdb-operator' + set +o xtrace + xargs kubectl delete ns ----------------------------------------------------------------------------------- cleaned up old namespaces psmdb-operator ----------------------------------------------------------------------------------- + kubectl_bin delete namespace psmdb-operator --ignore-not-found ++ mktemp ++ mktemp egrep: warning: egrep is obsolescent; using grep -E + local LAST_OUT=/tmp/tmp.LxYjkwCGLY + local LAST_OUT=/tmp/tmp.mZg45uO8Q0 ++ mktemp ++ mktemp + local LAST_ERR=/tmp/tmp.NBisBbRWRW + local exit_status=0 + local timeout=4 ++ seq 0 2 + local LAST_ERR=/tmp/tmp.1oHgAxNqiS + local exit_status=0 + local timeout=4 ++ seq 0 2 + for i in $(seq 0 2) + set +e + kubectl get ns + for i in $(seq 0 2) + set +e + kubectl delete namespace psmdb-operator --ignore-not-found + exit_status=0 + set -e + '[' 0 '!=' 0 -a -n 1 ']' + break + cat /tmp/tmp.LxYjkwCGLY + cat /tmp/tmp.NBisBbRWRW + rm /tmp/tmp.LxYjkwCGLY /tmp/tmp.NBisBbRWRW + return 0 namespace "cert-manager" deleted namespace "non-voting-and-hidden-2669" deleted + exit_status=0 + set -e + '[' 0 '!=' 0 -a -n 1 ']' + break + cat /tmp/tmp.mZg45uO8Q0 namespace "psmdb-operator" deleted + cat /tmp/tmp.1oHgAxNqiS + rm /tmp/tmp.mZg45uO8Q0 /tmp/tmp.1oHgAxNqiS + return 0 + kubectl_bin wait --for=delete namespace psmdb-operator ++ mktemp + local LAST_OUT=/tmp/tmp.A8kewgMwDB ++ mktemp + local LAST_ERR=/tmp/tmp.FJ5njn44VO + local exit_status=0 + local timeout=4 ++ seq 0 2 + for i in $(seq 0 2) + set +e + kubectl wait --for=delete namespace psmdb-operator + exit_status=0 + set -e + '[' 0 '!=' 0 -a -n 1 ']' + break + cat /tmp/tmp.A8kewgMwDB + cat /tmp/tmp.FJ5njn44VO + rm /tmp/tmp.A8kewgMwDB /tmp/tmp.FJ5njn44VO + return 0 + desc 'create namespace psmdb-operator' + set +o xtrace ----------------------------------------------------------------------------------- create namespace psmdb-operator ----------------------------------------------------------------------------------- + kubectl_bin create namespace psmdb-operator ++ mktemp + local LAST_OUT=/tmp/tmp.iIKwi7HeoF ++ mktemp + local LAST_ERR=/tmp/tmp.9HEKyC9J76 + local exit_status=0 + local timeout=4 ++ seq 0 2 + for i in $(seq 0 2) + set +e + kubectl create namespace psmdb-operator + exit_status=0 + set -e + '[' 0 '!=' 0 -a -n 1 ']' + break + cat /tmp/tmp.iIKwi7HeoF namespace/psmdb-operator created + cat /tmp/tmp.9HEKyC9J76 + rm /tmp/tmp.iIKwi7HeoF /tmp/tmp.9HEKyC9J76 + return 0 + set_kube_ctx psmdb-operator + local namespace=psmdb-operator ++ kubectl_bin config current-context +++ mktemp ++ local LAST_OUT=/tmp/tmp.FEc8MAlQSX +++ mktemp ++ local LAST_ERR=/tmp/tmp.vGvW9qDNeL ++ local exit_status=0 ++ local timeout=4 +++ seq 0 2 ++ for i in $(seq 0 2) ++ set +e ++ kubectl config current-context ++ exit_status=0 ++ set -e ++ '[' 0 '!=' 0 -a -n 1 ']' ++ break ++ cat /tmp/tmp.FEc8MAlQSX ++ cat /tmp/tmp.vGvW9qDNeL ++ rm /tmp/tmp.FEc8MAlQSX /tmp/tmp.vGvW9qDNeL ++ return 0 + kubectl_bin config set-context gke_cloud-dev-112233_us-central1-a_jen-psmdb-2125-8ebcb80f-3-cluster1 --namespace=psmdb-operator ++ mktemp + local LAST_OUT=/tmp/tmp.kqfm337Edx ++ mktemp + local LAST_ERR=/tmp/tmp.AgU77tKbdO + local exit_status=0 + local timeout=4 ++ seq 0 2 + for i in $(seq 0 2) + set +e + kubectl config set-context gke_cloud-dev-112233_us-central1-a_jen-psmdb-2125-8ebcb80f-3-cluster1 --namespace=psmdb-operator + exit_status=0 + set -e + '[' 0 '!=' 0 -a -n 1 ']' + break + cat /tmp/tmp.kqfm337Edx Context "gke_cloud-dev-112233_us-central1-a_jen-psmdb-2125-8ebcb80f-3-cluster1" modified. + cat /tmp/tmp.AgU77tKbdO + rm /tmp/tmp.kqfm337Edx /tmp/tmp.AgU77tKbdO + return 0 + deploy_operator + desc 'start PSMDB operator: perconalab/percona-server-mongodb-operator:PR-2125-8ebcb80f' + set +o xtrace ----------------------------------------------------------------------------------- start PSMDB operator: perconalab/percona-server-mongodb-operator:PR-2125-8ebcb80f ----------------------------------------------------------------------------------- + local cr_file + '[' -f /mnt/jenkins/workspace/cloud-psmdb-operator_PR-2125/e2e-tests/non-voting-and-hidden/conf/crd.yaml ']' + cr_file=/mnt/jenkins/workspace/cloud-psmdb-operator_PR-2125/deploy/crd.yaml + kubectl_bin apply --server-side --force-conflicts -f /mnt/jenkins/workspace/cloud-psmdb-operator_PR-2125/deploy/crd.yaml ++ mktemp + local LAST_OUT=/tmp/tmp.AWSW67DmJD ++ mktemp + local LAST_ERR=/tmp/tmp.orxZs2j93W + local exit_status=0 + local timeout=4 ++ seq 0 2 + for i in $(seq 0 2) + set +e + kubectl apply --server-side --force-conflicts -f /mnt/jenkins/workspace/cloud-psmdb-operator_PR-2125/deploy/crd.yaml + exit_status=0 + set -e + '[' 0 '!=' 0 -a -n 1 ']' + break + cat /tmp/tmp.AWSW67DmJD customresourcedefinition.apiextensions.k8s.io/perconaservermongodbbackups.psmdb.percona.com serverside-applied customresourcedefinition.apiextensions.k8s.io/perconaservermongodbrestores.psmdb.percona.com serverside-applied customresourcedefinition.apiextensions.k8s.io/perconaservermongodbs.psmdb.percona.com serverside-applied + cat /tmp/tmp.orxZs2j93W + rm /tmp/tmp.AWSW67DmJD /tmp/tmp.orxZs2j93W + return 0 + '[' -n psmdb-operator ']' + apply_rbac cw-rbac + local operator_namespace=psmdb-operator + local rbac=cw-rbac + cat /mnt/jenkins/workspace/cloud-psmdb-operator_PR-2125/deploy/cw-rbac.yaml + sed -e 's^namespace: .*^namespace: psmdb-operator^' + kubectl_bin apply -n psmdb-operator -f - ++ mktemp + local LAST_OUT=/tmp/tmp.BRc2oUPLm2 ++ mktemp + local LAST_ERR=/tmp/tmp.4ULfYv6hWW + local exit_status=0 + local timeout=4 ++ seq 0 2 + for i in $(seq 0 2) + set +e + kubectl apply -n psmdb-operator -f - + exit_status=0 + set -e + '[' 0 '!=' 0 -a -n 1 ']' + break + cat /tmp/tmp.BRc2oUPLm2 clusterrole.rbac.authorization.k8s.io/percona-server-mongodb-operator created serviceaccount/percona-server-mongodb-operator created clusterrolebinding.rbac.authorization.k8s.io/service-account-percona-server-mongodb-operator created + cat /tmp/tmp.4ULfYv6hWW + rm /tmp/tmp.BRc2oUPLm2 /tmp/tmp.4ULfYv6hWW + return 0 + yq eval ' (.spec.template.spec.containers[].image = "perconalab/percona-server-mongodb-operator:PR-2125-8ebcb80f") | ((.. | select(.[] == "DISABLE_TELEMETRY")) |= .value="true") | ((.. | select(.[] == "LOG_LEVEL")) |= .value="DEBUG")' /mnt/jenkins/workspace/cloud-psmdb-operator_PR-2125/deploy/cw-operator.yaml + kubectl_bin apply -n psmdb-operator -f - ++ mktemp + local LAST_OUT=/tmp/tmp.AsKbt2D1rD ++ mktemp + local LAST_ERR=/tmp/tmp.DbmzmwkPKe + local exit_status=0 + local timeout=4 ++ seq 0 2 + for i in $(seq 0 2) + set +e + kubectl apply -n psmdb-operator -f - + exit_status=0 + set -e + '[' 0 '!=' 0 -a -n 1 ']' + break + cat /tmp/tmp.AsKbt2D1rD deployment.apps/percona-server-mongodb-operator created + cat /tmp/tmp.DbmzmwkPKe + rm /tmp/tmp.AsKbt2D1rD /tmp/tmp.DbmzmwkPKe + return 0 + sleep 20 ++ get_operator_pod ++ kubectl_bin get pods --selector=name=percona-server-mongodb-operator -o 'jsonpath={.items[].metadata.name}' -n psmdb-operator +++ mktemp ++ local LAST_OUT=/tmp/tmp.8K3t8XyZ9j +++ mktemp ++ local LAST_ERR=/tmp/tmp.UDXNgHREVn ++ local exit_status=0 ++ local timeout=4 +++ seq 0 2 ++ for i in $(seq 0 2) ++ set +e ++ kubectl get pods --selector=name=percona-server-mongodb-operator -o 'jsonpath={.items[].metadata.name}' -n psmdb-operator ++ exit_status=0 ++ set -e ++ '[' 0 '!=' 0 -a -n 1 ']' ++ break ++ cat /tmp/tmp.8K3t8XyZ9j ++ cat /tmp/tmp.UDXNgHREVn ++ rm /tmp/tmp.8K3t8XyZ9j /tmp/tmp.UDXNgHREVn ++ return 0 + wait_operator_pod percona-server-mongodb-operator-84495cbcf7-pvdnk + local pod=percona-server-mongodb-operator-84495cbcf7-pvdnk + set +o xtrace waiting for pod/percona-server-mongodb-operator-84495cbcf7-pvdnk to be ready.OK + echo 'Print operator info from log' Print operator info from log + grep 'Manager starting up' ++ get_operator_pod ++ kubectl_bin get pods --selector=name=percona-server-mongodb-operator -o 'jsonpath={.items[].metadata.name}' -n psmdb-operator +++ mktemp ++ local LAST_OUT=/tmp/tmp.xaG8NDf6Wx +++ mktemp ++ local LAST_ERR=/tmp/tmp.5BEI2LGmQm ++ local exit_status=0 ++ local timeout=4 +++ seq 0 2 ++ for i in $(seq 0 2) ++ set +e ++ kubectl get pods --selector=name=percona-server-mongodb-operator -o 'jsonpath={.items[].metadata.name}' -n psmdb-operator ++ exit_status=0 ++ set -e ++ '[' 0 '!=' 0 -a -n 1 ']' ++ break ++ cat /tmp/tmp.xaG8NDf6Wx ++ cat /tmp/tmp.5BEI2LGmQm ++ rm /tmp/tmp.xaG8NDf6Wx /tmp/tmp.5BEI2LGmQm ++ return 0 + kubectl_bin logs -n psmdb-operator percona-server-mongodb-operator-84495cbcf7-pvdnk ++ mktemp + local LAST_OUT=/tmp/tmp.MaNl8nCPrD ++ mktemp + local LAST_ERR=/tmp/tmp.sqeyuTjpDv + local exit_status=0 + local timeout=4 ++ seq 0 2 + for i in $(seq 0 2) + set +e + kubectl logs -n psmdb-operator percona-server-mongodb-operator-84495cbcf7-pvdnk + exit_status=0 + set -e + '[' 0 '!=' 0 -a -n 1 ']' + break + cat /tmp/tmp.MaNl8nCPrD + cat /tmp/tmp.sqeyuTjpDv + rm /tmp/tmp.MaNl8nCPrD /tmp/tmp.sqeyuTjpDv + return 0 2025-12-10T13:22:14.038Z INFO setup Manager starting up {"gitCommit": "8ebcb80f1012f36e90d7464b396a5e33442e27be", "gitBranch": "PR-2125-8ebcb80f", "buildTime": "", "goVersion": "go1.25.5", "os": "linux", "arch": "amd64"} + create_namespace non-voting-and-hidden-7583 + local namespace=non-voting-and-hidden-7583 + local skip_clean_namespace= + [[ 1 == 1 ]] + [[ -z '' ]] + destroy_chaos_mesh ++ helm list --all-namespaces --filter chaos-mesh ++ awk '-F ' '{print $2}' ++ tail -n1 ++ sed s/NAMESPACE// + local chaos_mesh_ns= + desc 'destroy chaos-mesh' + set +o xtrace ----------------------------------------------------------------------------------- destroy chaos-mesh ----------------------------------------------------------------------------------- + '[' -n '' ']' ++ kubectl get MutatingWebhookConfiguration ++ grep chaos-mesh ++ awk '{print $1}' + timeout 30 kubectl delete MutatingWebhookConfiguration error: resource(s) were provided, but no name was specified + : ++ kubectl get ValidatingWebhookConfiguration ++ grep chaos-mesh ++ awk '{print $1}' + timeout 30 kubectl delete ValidatingWebhookConfiguration error: resource(s) were provided, but no name was specified + : ++ kubectl get ValidatingWebhookConfiguration ++ grep validate-auth ++ awk '{print $1}' + timeout 30 kubectl delete ValidatingWebhookConfiguration error: resource(s) were provided, but no name was specified + : ++ kubectl api-resources ++ grep chaos-mesh ++ awk '{print $1}' ++ kubectl get crd ++ grep chaos-mesh.org ++ awk '{print $1}' + timeout 30 kubectl delete crd error: resource(s) were provided, but no name was specified + : ++ kubectl get clusterrolebinding ++ grep chaos-mesh ++ awk '{print $1}' + timeout 30 kubectl delete clusterrolebinding error: resource(s) were provided, but no name was specified + : ++ kubectl get clusterrole ++ grep chaos-mesh ++ awk '{print $1}' + timeout 30 kubectl delete clusterrole error: resource(s) were provided, but no name was specified + : + desc 'cleaned up all old namespaces' + set +o xtrace ----------------------------------------------------------------------------------- cleaned up all old namespaces ----------------------------------------------------------------------------------- + kubectl_bin get ns + egrep -v '^kube-|^default|Terminating|psmdb-operator|openshift|^gke-|^gmp-|^NAME' + '[' -n '' ']' + awk '{print$1}' + desc 'cleaned up old namespaces non-voting-and-hidden-7583' + xargs kubectl delete ns + set +o xtrace ----------------------------------------------------------------------------------- cleaned up old namespaces non-voting-and-hidden-7583 ----------------------------------------------------------------------------------- + kubectl_bin delete namespace non-voting-and-hidden-7583 --ignore-not-found ++ mktemp ++ mktemp egrep: warning: egrep is obsolescent; using grep -E + local LAST_OUT=/tmp/tmp.CYMnmKi8vX ++ mktemp + local LAST_OUT=/tmp/tmp.C1VXCcJGZ1 ++ mktemp + local LAST_ERR=/tmp/tmp.Wcj9fvY1zB + local exit_status=0 + local timeout=4 ++ seq 0 2 + local LAST_ERR=/tmp/tmp.X4kmSAOnBV + local exit_status=0 + local timeout=4 ++ seq 0 2 + for i in $(seq 0 2) + set +e + kubectl get ns + for i in $(seq 0 2) + set +e + kubectl delete namespace non-voting-and-hidden-7583 --ignore-not-found + exit_status=0 + set -e + '[' 0 '!=' 0 -a -n 1 ']' + break + cat /tmp/tmp.CYMnmKi8vX + cat /tmp/tmp.Wcj9fvY1zB + rm /tmp/tmp.CYMnmKi8vX /tmp/tmp.Wcj9fvY1zB + return 0 error: resource(s) were provided, but no name was specified + exit_status=0 + set -e + '[' 0 '!=' 0 -a -n 1 ']' + break + cat /tmp/tmp.C1VXCcJGZ1 + cat /tmp/tmp.X4kmSAOnBV + rm /tmp/tmp.C1VXCcJGZ1 /tmp/tmp.X4kmSAOnBV + return 0 + kubectl_bin wait --for=delete namespace non-voting-and-hidden-7583 ++ mktemp + local LAST_OUT=/tmp/tmp.hKXbcoTuGD ++ mktemp + local LAST_ERR=/tmp/tmp.PL0b8lYBJG + local exit_status=0 + local timeout=4 ++ seq 0 2 + for i in $(seq 0 2) + set +e + kubectl wait --for=delete namespace non-voting-and-hidden-7583 + exit_status=0 + set -e + '[' 0 '!=' 0 -a -n 1 ']' + break + cat /tmp/tmp.hKXbcoTuGD + cat /tmp/tmp.PL0b8lYBJG + rm /tmp/tmp.hKXbcoTuGD /tmp/tmp.PL0b8lYBJG + return 0 + desc 'create namespace non-voting-and-hidden-7583' + set +o xtrace ----------------------------------------------------------------------------------- create namespace non-voting-and-hidden-7583 ----------------------------------------------------------------------------------- + kubectl_bin create namespace non-voting-and-hidden-7583 ++ mktemp + local LAST_OUT=/tmp/tmp.hbc5KkGIyf ++ mktemp + local LAST_ERR=/tmp/tmp.SwjqT1ZzKI + local exit_status=0 + local timeout=4 ++ seq 0 2 + for i in $(seq 0 2) + set +e + kubectl create namespace non-voting-and-hidden-7583 + exit_status=0 + set -e + '[' 0 '!=' 0 -a -n 1 ']' + break + cat /tmp/tmp.hbc5KkGIyf namespace/non-voting-and-hidden-7583 created + cat /tmp/tmp.SwjqT1ZzKI + rm /tmp/tmp.hbc5KkGIyf /tmp/tmp.SwjqT1ZzKI + return 0 + set_kube_ctx non-voting-and-hidden-7583 + local namespace=non-voting-and-hidden-7583 ++ kubectl_bin config current-context +++ mktemp ++ local LAST_OUT=/tmp/tmp.dZCOP45Oe3 +++ mktemp ++ local LAST_ERR=/tmp/tmp.Nl1u2HIRs0 ++ local exit_status=0 ++ local timeout=4 +++ seq 0 2 ++ for i in $(seq 0 2) ++ set +e ++ kubectl config current-context ++ exit_status=0 ++ set -e ++ '[' 0 '!=' 0 -a -n 1 ']' ++ break ++ cat /tmp/tmp.dZCOP45Oe3 ++ cat /tmp/tmp.Nl1u2HIRs0 ++ rm /tmp/tmp.dZCOP45Oe3 /tmp/tmp.Nl1u2HIRs0 ++ return 0 + kubectl_bin config set-context gke_cloud-dev-112233_us-central1-a_jen-psmdb-2125-8ebcb80f-3-cluster1 --namespace=non-voting-and-hidden-7583 ++ mktemp + local LAST_OUT=/tmp/tmp.NN5dPoNZe6 ++ mktemp + local LAST_ERR=/tmp/tmp.yM194Ito9p + local exit_status=0 + local timeout=4 ++ seq 0 2 + for i in $(seq 0 2) + set +e + kubectl config set-context gke_cloud-dev-112233_us-central1-a_jen-psmdb-2125-8ebcb80f-3-cluster1 --namespace=non-voting-and-hidden-7583 + exit_status=0 + set -e + '[' 0 '!=' 0 -a -n 1 ']' + break + cat /tmp/tmp.NN5dPoNZe6 Context "gke_cloud-dev-112233_us-central1-a_jen-psmdb-2125-8ebcb80f-3-cluster1" modified. + cat /tmp/tmp.yM194Ito9p + rm /tmp/tmp.NN5dPoNZe6 /tmp/tmp.yM194Ito9p + return 0 + deploy_cert_manager + desc 'deploy cert manager' + set +o xtrace ----------------------------------------------------------------------------------- deploy cert manager ----------------------------------------------------------------------------------- + kubectl_bin create namespace cert-manager ++ mktemp + local LAST_OUT=/tmp/tmp.LyLMnSvaiB ++ mktemp + local LAST_ERR=/tmp/tmp.CNBFxdjE17 + local exit_status=0 + local timeout=4 ++ seq 0 2 + for i in $(seq 0 2) + set +e + kubectl create namespace cert-manager + exit_status=0 + set -e + '[' 0 '!=' 0 -a -n 1 ']' + break + cat /tmp/tmp.LyLMnSvaiB namespace/cert-manager created + cat /tmp/tmp.CNBFxdjE17 + rm /tmp/tmp.LyLMnSvaiB /tmp/tmp.CNBFxdjE17 + return 0 + kubectl_bin label namespace cert-manager certmanager.k8s.io/disable-validation=true ++ mktemp + local LAST_OUT=/tmp/tmp.jtsMnJRPCP ++ mktemp + local LAST_ERR=/tmp/tmp.fLYu2daw9m + local exit_status=0 + local timeout=4 ++ seq 0 2 + for i in $(seq 0 2) + set +e + kubectl label namespace cert-manager certmanager.k8s.io/disable-validation=true + exit_status=0 + set -e + '[' 0 '!=' 0 -a -n 1 ']' + break + cat /tmp/tmp.jtsMnJRPCP namespace/cert-manager labeled + cat /tmp/tmp.fLYu2daw9m + rm /tmp/tmp.jtsMnJRPCP /tmp/tmp.fLYu2daw9m + return 0 + kubectl_bin apply -f https://github.com/cert-manager/cert-manager/releases/download/v1.19.1/cert-manager.yaml --validate=false ++ mktemp + local LAST_OUT=/tmp/tmp.guCU2ZuP63 ++ mktemp + local LAST_ERR=/tmp/tmp.8byKf4xdwj + local exit_status=0 + local timeout=4 ++ seq 0 2 + for i in $(seq 0 2) + set +e + kubectl apply -f https://github.com/cert-manager/cert-manager/releases/download/v1.19.1/cert-manager.yaml --validate=false + exit_status=0 + set -e + '[' 0 '!=' 0 -a -n 1 ']' + break + cat /tmp/tmp.guCU2ZuP63 namespace/cert-manager configured customresourcedefinition.apiextensions.k8s.io/challenges.acme.cert-manager.io unchanged customresourcedefinition.apiextensions.k8s.io/orders.acme.cert-manager.io unchanged customresourcedefinition.apiextensions.k8s.io/certificaterequests.cert-manager.io unchanged customresourcedefinition.apiextensions.k8s.io/certificates.cert-manager.io unchanged customresourcedefinition.apiextensions.k8s.io/clusterissuers.cert-manager.io unchanged customresourcedefinition.apiextensions.k8s.io/issuers.cert-manager.io unchanged serviceaccount/cert-manager-cainjector created serviceaccount/cert-manager created serviceaccount/cert-manager-webhook created clusterrole.rbac.authorization.k8s.io/cert-manager-cainjector unchanged clusterrole.rbac.authorization.k8s.io/cert-manager-controller-issuers unchanged clusterrole.rbac.authorization.k8s.io/cert-manager-controller-clusterissuers unchanged clusterrole.rbac.authorization.k8s.io/cert-manager-controller-certificates unchanged clusterrole.rbac.authorization.k8s.io/cert-manager-controller-orders unchanged clusterrole.rbac.authorization.k8s.io/cert-manager-controller-challenges unchanged clusterrole.rbac.authorization.k8s.io/cert-manager-controller-ingress-shim unchanged clusterrole.rbac.authorization.k8s.io/cert-manager-cluster-view unchanged clusterrole.rbac.authorization.k8s.io/cert-manager-view unchanged clusterrole.rbac.authorization.k8s.io/cert-manager-edit unchanged clusterrole.rbac.authorization.k8s.io/cert-manager-controller-approve:cert-manager-io unchanged clusterrole.rbac.authorization.k8s.io/cert-manager-controller-certificatesigningrequests unchanged clusterrole.rbac.authorization.k8s.io/cert-manager-webhook:subjectaccessreviews unchanged clusterrolebinding.rbac.authorization.k8s.io/cert-manager-cainjector unchanged clusterrolebinding.rbac.authorization.k8s.io/cert-manager-controller-issuers unchanged clusterrolebinding.rbac.authorization.k8s.io/cert-manager-controller-clusterissuers unchanged clusterrolebinding.rbac.authorization.k8s.io/cert-manager-controller-certificates unchanged clusterrolebinding.rbac.authorization.k8s.io/cert-manager-controller-orders unchanged clusterrolebinding.rbac.authorization.k8s.io/cert-manager-controller-challenges unchanged clusterrolebinding.rbac.authorization.k8s.io/cert-manager-controller-ingress-shim unchanged clusterrolebinding.rbac.authorization.k8s.io/cert-manager-controller-approve:cert-manager-io unchanged clusterrolebinding.rbac.authorization.k8s.io/cert-manager-controller-certificatesigningrequests unchanged clusterrolebinding.rbac.authorization.k8s.io/cert-manager-webhook:subjectaccessreviews unchanged role.rbac.authorization.k8s.io/cert-manager-cainjector:leaderelection unchanged role.rbac.authorization.k8s.io/cert-manager:leaderelection unchanged role.rbac.authorization.k8s.io/cert-manager-tokenrequest created role.rbac.authorization.k8s.io/cert-manager-webhook:dynamic-serving created rolebinding.rbac.authorization.k8s.io/cert-manager-cainjector:leaderelection unchanged rolebinding.rbac.authorization.k8s.io/cert-manager:leaderelection unchanged rolebinding.rbac.authorization.k8s.io/cert-manager-tokenrequest created rolebinding.rbac.authorization.k8s.io/cert-manager-webhook:dynamic-serving created service/cert-manager-cainjector created service/cert-manager created service/cert-manager-webhook created deployment.apps/cert-manager-cainjector created deployment.apps/cert-manager created deployment.apps/cert-manager-webhook created mutatingwebhookconfiguration.admissionregistration.k8s.io/cert-manager-webhook configured validatingwebhookconfiguration.admissionregistration.k8s.io/cert-manager-webhook configured + cat /tmp/tmp.8byKf4xdwj Warning: resource namespaces/cert-manager is missing the kubectl.kubernetes.io/last-applied-configuration annotation which is required by kubectl apply. kubectl apply should only be used on resources created declaratively by either kubectl create --save-config or kubectl apply. The missing annotation will be patched automatically. + rm /tmp/tmp.guCU2ZuP63 /tmp/tmp.8byKf4xdwj + return 0 + kubectl_bin -n cert-manager wait pod -l app.kubernetes.io/instance=cert-manager --for=condition=ready ++ mktemp + local LAST_OUT=/tmp/tmp.7qwA3uVyWw ++ mktemp + local LAST_ERR=/tmp/tmp.1vHzOlBBZy + local exit_status=0 + local timeout=4 ++ seq 0 2 + for i in $(seq 0 2) + set +e + kubectl -n cert-manager wait pod -l app.kubernetes.io/instance=cert-manager --for=condition=ready + exit_status=0 + set -e + '[' 0 '!=' 0 -a -n 1 ']' + break + cat /tmp/tmp.7qwA3uVyWw pod/cert-manager-cainjector-5dc9c8b4f7-7m5bh condition met pod/cert-manager-df4b69479-xjqcm condition met pod/cert-manager-webhook-769bbb594d-r7clk condition met + cat /tmp/tmp.1vHzOlBBZy + rm /tmp/tmp.7qwA3uVyWw /tmp/tmp.1vHzOlBBZy + return 0 + sleep 120 + apply_s3_storage_secrets + desc 'create secrets for cloud storages' + set +o xtrace ----------------------------------------------------------------------------------- create secrets for cloud storages ----------------------------------------------------------------------------------- + '[' -z '' ']' + kubectl_bin apply -f /mnt/jenkins/workspace/cloud-psmdb-operator_PR-2125/e2e-tests/conf/minio-secret.yml -f /mnt/jenkins/workspace/cloud-psmdb-operator_PR-2125/e2e-tests/conf/cloud-secret.yml ++ mktemp + local LAST_OUT=/tmp/tmp.474P8RbYM6 ++ mktemp + local LAST_ERR=/tmp/tmp.P6KqlDKtox + local exit_status=0 + local timeout=4 ++ seq 0 2 + for i in $(seq 0 2) + set +e + kubectl apply -f /mnt/jenkins/workspace/cloud-psmdb-operator_PR-2125/e2e-tests/conf/minio-secret.yml -f /mnt/jenkins/workspace/cloud-psmdb-operator_PR-2125/e2e-tests/conf/cloud-secret.yml + exit_status=0 + set -e + '[' 0 '!=' 0 -a -n 1 ']' + break + cat /tmp/tmp.474P8RbYM6 secret/minio-secret created secret/aws-s3-secret created secret/gcp-cs-secret created secret/azure-secret created secret/gcp-cs-sa-key-secret created + cat /tmp/tmp.P6KqlDKtox + rm /tmp/tmp.474P8RbYM6 /tmp/tmp.P6KqlDKtox + return 0 + deploy_minio + desc 'install Minio' + set +o xtrace ----------------------------------------------------------------------------------- install Minio ----------------------------------------------------------------------------------- + helm uninstall minio-service Error: uninstall: Release not loaded: minio-service: release: not found + : + helm repo remove minio "minio" has been removed from your repositories + helm repo add minio https://charts.min.io/ "minio" has been added to your repositories + retry 10 60 helm install minio-service --version 5.4.0 --set replicas=1 --set mode=standalone --set resources.requests.memory=256Mi --set rootUser=rootuser --set rootPassword=rootpass123 --set 'users[0].accessKey=some-access-key' --set 'users[0].secretKey=some-secret-key' --set 'users[0].policy=consoleAdmin' --set service.type=ClusterIP --set configPathmc=/tmp/.minio/ --set persistence.size=2G --set securityContext.enabled=false minio/minio + local max=10 + local delay=60 + shift 2 + local n=1 + helm install minio-service --version 5.4.0 --set replicas=1 --set mode=standalone --set resources.requests.memory=256Mi --set rootUser=rootuser --set rootPassword=rootpass123 --set 'users[0].accessKey=some-access-key' --set 'users[0].secretKey=some-secret-key' --set 'users[0].policy=consoleAdmin' --set service.type=ClusterIP --set configPathmc=/tmp/.minio/ --set persistence.size=2G --set securityContext.enabled=false minio/minio NAME: minio-service LAST DEPLOYED: Wed Dec 10 13:25:15 2025 NAMESPACE: non-voting-and-hidden-7583 STATUS: deployed REVISION: 1 TEST SUITE: None NOTES: MinIO can be accessed via port 9000 on the following DNS name from within your cluster: minio-service.non-voting-and-hidden-7583.cluster.local To access MinIO from localhost, run the below commands: 1. export POD_NAME=$(kubectl get pods --namespace non-voting-and-hidden-7583 -l "release=minio-service" -o jsonpath="{.items[0].metadata.name}") 2. kubectl port-forward $POD_NAME 9000 --namespace non-voting-and-hidden-7583 Read more about port forwarding here: http://kubernetes.io/docs/user-guide/kubectl/kubectl_port-forward/ You can now access MinIO server on http://localhost:9000. Follow the below steps to connect to MinIO server with mc client: 1. Download the MinIO mc client - https://min.io/docs/minio/linux/reference/minio-mc.html#quickstart 2. export MC_HOST_minio-service-local=http://$(kubectl get secret --namespace non-voting-and-hidden-7583 minio-service -o jsonpath="{.data.rootUser}" | base64 --decode):$(kubectl get secret --namespace non-voting-and-hidden-7583 minio-service -o jsonpath="{.data.rootPassword}" | base64 --decode)@localhost:9000 3. mc ls minio-service-local ++ kubectl_bin get pods --selector=release=minio-service -o 'jsonpath={.items[].metadata.name}' +++ mktemp ++ local LAST_OUT=/tmp/tmp.yuxb7fcBsW +++ mktemp ++ local LAST_ERR=/tmp/tmp.ekng7seX0I ++ local exit_status=0 ++ local timeout=4 +++ seq 0 2 ++ for i in $(seq 0 2) ++ set +e ++ kubectl get pods --selector=release=minio-service -o 'jsonpath={.items[].metadata.name}' ++ exit_status=0 ++ set -e ++ '[' 0 '!=' 0 -a -n 1 ']' ++ break ++ cat /tmp/tmp.yuxb7fcBsW ++ cat /tmp/tmp.ekng7seX0I ++ rm /tmp/tmp.yuxb7fcBsW /tmp/tmp.ekng7seX0I ++ return 0 + MINIO_POD=minio-service-d9589b474-txnv4 + wait_pod minio-service-d9589b474-txnv4 + local pod=minio-service-d9589b474-txnv4 + set +o xtrace waiting for pod/minio-service-d9589b474-txnv4 to be ready.OK + '[' -n psmdb-operator ']' + kubectl_bin create svc -n psmdb-operator externalname minio-service --external-name=minio-service.non-voting-and-hidden-7583.svc.cluster.local --tcp=9000 ++ mktemp + local LAST_OUT=/tmp/tmp.DZPSHCrgfJ ++ mktemp + local LAST_ERR=/tmp/tmp.PkWSVR1dYQ + local exit_status=0 + local timeout=4 ++ seq 0 2 + for i in $(seq 0 2) + set +e + kubectl create svc -n psmdb-operator externalname minio-service --external-name=minio-service.non-voting-and-hidden-7583.svc.cluster.local --tcp=9000 + exit_status=0 + set -e + '[' 0 '!=' 0 -a -n 1 ']' + break + cat /tmp/tmp.DZPSHCrgfJ service/minio-service created + cat /tmp/tmp.PkWSVR1dYQ + rm /tmp/tmp.DZPSHCrgfJ /tmp/tmp.PkWSVR1dYQ + return 0 + create_minio_bucket operator-testing + local bucket=operator-testing + kubectl_bin run -i --rm aws-cli --image=perconalab/awscli --restart=Never -- bash -c 'AWS_ACCESS_KEY_ID=some-access-key AWS_SECRET_ACCESS_KEY=some-secret-key AWS_DEFAULT_REGION=us-east-1 /usr/bin/aws --endpoint-url http://minio-service:9000 s3 mb s3://operator-testing' ++ mktemp + local LAST_OUT=/tmp/tmp.KKvBfhPTAd ++ mktemp + local LAST_ERR=/tmp/tmp.BHQXL5U6CS + local exit_status=0 + local timeout=4 ++ seq 0 2 + for i in $(seq 0 2) + set +e + kubectl run -i --rm aws-cli --image=perconalab/awscli --restart=Never -- bash -c 'AWS_ACCESS_KEY_ID=some-access-key AWS_SECRET_ACCESS_KEY=some-secret-key AWS_DEFAULT_REGION=us-east-1 /usr/bin/aws --endpoint-url http://minio-service:9000 s3 mb s3://operator-testing' + exit_status=0 + set -e + '[' 0 '!=' 0 -a -n 1 ']' + break + cat /tmp/tmp.KKvBfhPTAd make_bucket: operator-testing pod "aws-cli" deleted from non-voting-and-hidden-7583 namespace + cat /tmp/tmp.BHQXL5U6CS All commands and output from this session will be recorded in container logs, including credentials and sensitive information passed through the command prompt. If you don't see a command prompt, try pressing enter. warning: couldn't attach to pod/aws-cli, falling back to streaming logs: Internal error occurred: unable to upgrade connection: container aws-cli not found in pod aws-cli_non-voting-and-hidden-7583 + rm /tmp/tmp.KKvBfhPTAd /tmp/tmp.BHQXL5U6CS + return 0 + kubectl_bin apply -f /mnt/jenkins/workspace/cloud-psmdb-operator_PR-2125/e2e-tests/conf/client.yml -f /mnt/jenkins/workspace/cloud-psmdb-operator_PR-2125/e2e-tests/conf/secrets.yml ++ mktemp + local LAST_OUT=/tmp/tmp.6ThW9XmpMh ++ mktemp + local LAST_ERR=/tmp/tmp.PjkAwaDNdH + local exit_status=0 + local timeout=4 ++ seq 0 2 + for i in $(seq 0 2) + set +e + kubectl apply -f /mnt/jenkins/workspace/cloud-psmdb-operator_PR-2125/e2e-tests/conf/client.yml -f /mnt/jenkins/workspace/cloud-psmdb-operator_PR-2125/e2e-tests/conf/secrets.yml + exit_status=0 + set -e + '[' 0 '!=' 0 -a -n 1 ']' + break + cat /tmp/tmp.6ThW9XmpMh deployment.apps/psmdb-client created secret/some-users created + cat /tmp/tmp.PjkAwaDNdH + rm /tmp/tmp.6ThW9XmpMh /tmp/tmp.PjkAwaDNdH + return 0 + desc 'Creating PSMDB cluster with non-voting + hidden members' + set +o xtrace ----------------------------------------------------------------------------------- Creating PSMDB cluster with non-voting + hidden members ----------------------------------------------------------------------------------- + log 'create PSMDB cluster some-name' + set +o xtrace [2025-12-10T13:25:54+0000] create PSMDB cluster some-name + apply_cluster /mnt/jenkins/workspace/cloud-psmdb-operator_PR-2125/e2e-tests/non-voting-and-hidden/conf/some-name.yml + '[' -z '' ']' + cat_config /mnt/jenkins/workspace/cloud-psmdb-operator_PR-2125/e2e-tests/non-voting-and-hidden/conf/some-name.yml + kubectl_bin apply -f - + cat /mnt/jenkins/workspace/cloud-psmdb-operator_PR-2125/e2e-tests/non-voting-and-hidden/conf/some-name.yml + yq eval '(.spec | select(.image == null)).image = "perconalab/percona-server-mongodb-operator:main-mongod8.0"' + yq eval '(.spec | select(has("pmm"))).pmm.image = "percona/pmm-client:2.44.1-1"' + yq eval '(.spec | select(has("initImage"))).initImage = "perconalab/percona-server-mongodb-operator:PR-2125-8ebcb80f"' + yq eval '(.spec | select(has("backup"))).backup.image = "perconalab/percona-server-mongodb-operator:main-backup"' + yq eval '.spec.upgradeOptions.apply="Never"' ++ mktemp + local LAST_OUT=/tmp/tmp.LBQJiAJUt2 ++ mktemp + local LAST_ERR=/tmp/tmp.uNrg0vd6Zr + local exit_status=0 + local timeout=4 ++ seq 0 2 + for i in $(seq 0 2) + set +e + kubectl apply -f - + exit_status=0 + set -e + '[' 0 '!=' 0 -a -n 1 ']' + break + cat /tmp/tmp.LBQJiAJUt2 perconaservermongodb.psmdb.percona.com/some-name created + cat /tmp/tmp.uNrg0vd6Zr + rm /tmp/tmp.LBQJiAJUt2 /tmp/tmp.uNrg0vd6Zr + return 0 + log 'check if Pods are started' + set +o xtrace [2025-12-10T13:25:56+0000] check if Pods are started + wait_for_running some-name-rs0 3 false + local name=some-name-rs0 + let last_pod=2 + local check_cluster_readyness=false + set_debug + [[ 1 == 1 ]] + set -o xtrace + local rs_name=rs0 + local cluster_name=some-name ++ seq 0 2 + for i in $(seq 0 $last_pod) + [[ 0 -eq 2 ]] + wait_pod some-name-rs0-0 + local pod=some-name-rs0-0 + set +o xtrace waiting for pod/some-name-rs0-0 to be ready........OK + for i in $(seq 0 $last_pod) + [[ 1 -eq 2 ]] + wait_pod some-name-rs0-1 + local pod=some-name-rs0-1 + set +o xtrace waiting for pod/some-name-rs0-1 to be ready......OK + for i in $(seq 0 $last_pod) + [[ 2 -eq 2 ]] ++ kubectl_bin get psmdb some-name -o 'jsonpath={.spec.replsets[?(@.name=="rs0")].arbiter.enabled}' +++ mktemp ++ local LAST_OUT=/tmp/tmp.CTqZbwGNrD +++ mktemp ++ local LAST_ERR=/tmp/tmp.GsvPxJlsra ++ local exit_status=0 ++ local timeout=4 +++ seq 0 2 ++ for i in $(seq 0 2) ++ set +e ++ kubectl get psmdb some-name -o 'jsonpath={.spec.replsets[?(@.name=="rs0")].arbiter.enabled}' ++ exit_status=0 ++ set -e ++ '[' 0 '!=' 0 -a -n 1 ']' ++ break ++ cat /tmp/tmp.CTqZbwGNrD ++ cat /tmp/tmp.GsvPxJlsra ++ rm /tmp/tmp.CTqZbwGNrD /tmp/tmp.GsvPxJlsra ++ return 0 + [[ '' == \t\r\u\e ]] + wait_pod some-name-rs0-2 + local pod=some-name-rs0-2 + set +o xtrace waiting for pod/some-name-rs0-2 to be ready.....OK ++ kubectl_bin get psmdb some-name -o 'jsonpath={.spec.replsets[?(@.name=="rs0")].nonvoting.enabled}' +++ mktemp ++ local LAST_OUT=/tmp/tmp.GMMAYobOiZ +++ mktemp ++ local LAST_ERR=/tmp/tmp.R0NgvbHda0 ++ local exit_status=0 ++ local timeout=4 +++ seq 0 2 ++ for i in $(seq 0 2) ++ set +e ++ kubectl get psmdb some-name -o 'jsonpath={.spec.replsets[?(@.name=="rs0")].nonvoting.enabled}' ++ exit_status=0 ++ set -e ++ '[' 0 '!=' 0 -a -n 1 ']' ++ break ++ cat /tmp/tmp.GMMAYobOiZ ++ cat /tmp/tmp.R0NgvbHda0 ++ rm /tmp/tmp.GMMAYobOiZ /tmp/tmp.R0NgvbHda0 ++ return 0 + [[ true == \t\r\u\e ]] ++ kubectl_bin get psmdb some-name -o 'jsonpath={.spec.replsets[?(@.name=="rs0")].nonvoting.size}' +++ mktemp ++ local LAST_OUT=/tmp/tmp.8izju76wUO +++ mktemp ++ local LAST_ERR=/tmp/tmp.i36JZsrQYe ++ local exit_status=0 ++ local timeout=4 +++ seq 0 2 ++ for i in $(seq 0 2) ++ set +e ++ kubectl get psmdb some-name -o 'jsonpath={.spec.replsets[?(@.name=="rs0")].nonvoting.size}' ++ exit_status=0 ++ set -e ++ '[' 0 '!=' 0 -a -n 1 ']' ++ break ++ cat /tmp/tmp.8izju76wUO ++ cat /tmp/tmp.i36JZsrQYe ++ rm /tmp/tmp.8izju76wUO /tmp/tmp.i36JZsrQYe ++ return 0 + last_pod=0 ++ seq 0 0 + for i in $(seq 0 $last_pod) + wait_pod some-name-rs0-nv-0 + local pod=some-name-rs0-nv-0 + set +o xtrace waiting for pod/some-name-rs0-nv-0 to be ready.OK ++ kubectl_bin get psmdb some-name -o 'jsonpath={.spec.replsets[?(@.name=="rs0")].hidden.enabled}' +++ mktemp ++ local LAST_OUT=/tmp/tmp.w39mmCLxNX +++ mktemp ++ local LAST_ERR=/tmp/tmp.wJTMgQeSwi ++ local exit_status=0 ++ local timeout=4 +++ seq 0 2 ++ for i in $(seq 0 2) ++ set +e ++ kubectl get psmdb some-name -o 'jsonpath={.spec.replsets[?(@.name=="rs0")].hidden.enabled}' ++ exit_status=0 ++ set -e ++ '[' 0 '!=' 0 -a -n 1 ']' ++ break ++ cat /tmp/tmp.w39mmCLxNX ++ cat /tmp/tmp.wJTMgQeSwi ++ rm /tmp/tmp.w39mmCLxNX /tmp/tmp.wJTMgQeSwi ++ return 0 + [[ true == \t\r\u\e ]] ++ kubectl_bin get psmdb some-name -o 'jsonpath={.spec.replsets[?(@.name=="rs0")].hidden.size}' +++ mktemp ++ local LAST_OUT=/tmp/tmp.GCajfGjEj2 +++ mktemp ++ local LAST_ERR=/tmp/tmp.5WQJEXMDDo ++ local exit_status=0 ++ local timeout=4 +++ seq 0 2 ++ for i in $(seq 0 2) ++ set +e ++ kubectl get psmdb some-name -o 'jsonpath={.spec.replsets[?(@.name=="rs0")].hidden.size}' ++ exit_status=0 ++ set -e ++ '[' 0 '!=' 0 -a -n 1 ']' ++ break ++ cat /tmp/tmp.GCajfGjEj2 ++ cat /tmp/tmp.5WQJEXMDDo ++ rm /tmp/tmp.GCajfGjEj2 /tmp/tmp.5WQJEXMDDo ++ return 0 + last_pod=1 ++ seq 0 1 + for i in $(seq 0 $last_pod) + wait_pod some-name-rs0-hidden-0 + local pod=some-name-rs0-hidden-0 + set +o xtrace waiting for pod/some-name-rs0-hidden-0 to be ready.OK + for i in $(seq 0 $last_pod) + wait_pod some-name-rs0-hidden-1 + local pod=some-name-rs0-hidden-1 + set +o xtrace waiting for pod/some-name-rs0-hidden-1 to be ready.OK + sleep 10 + [[ false == \t\r\u\e ]] + wait_for_cluster_state some-name ready + local cluster_name=some-name + local target_state=ready + echo -n 'Waiting for psmdb/some-name to reach ready state' Waiting for psmdb/some-name to reach ready state+ local timeout=0 ++ kubectl_bin get psmdb some-name -o 'jsonpath={.status.state}' +++ mktemp ++ local LAST_OUT=/tmp/tmp.BqOYqfjm6B +++ mktemp ++ local LAST_ERR=/tmp/tmp.EsHira6DaT ++ local exit_status=0 ++ local timeout=4 +++ seq 0 2 ++ for i in $(seq 0 2) ++ set +e ++ kubectl get psmdb some-name -o 'jsonpath={.status.state}' ++ exit_status=0 ++ set -e ++ '[' 0 '!=' 0 -a -n 1 ']' ++ break ++ cat /tmp/tmp.BqOYqfjm6B ++ cat /tmp/tmp.EsHira6DaT ++ rm /tmp/tmp.BqOYqfjm6B /tmp/tmp.EsHira6DaT ++ return 0 + [[ initializing =~ ready ]] + sleep 1 + timeout=1 + echo -n . .+ [[ 1 -gt 1500 ]] ++ kubectl_bin get psmdb some-name -o 'jsonpath={.status.state}' +++ mktemp ++ local LAST_OUT=/tmp/tmp.tEYGfnuaPi +++ mktemp ++ local LAST_ERR=/tmp/tmp.2ulgu2DPSl ++ local exit_status=0 ++ local timeout=4 +++ seq 0 2 ++ for i in $(seq 0 2) ++ set +e ++ kubectl get psmdb some-name -o 'jsonpath={.status.state}' ++ exit_status=0 ++ set -e ++ '[' 0 '!=' 0 -a -n 1 ']' ++ break ++ cat /tmp/tmp.tEYGfnuaPi ++ cat /tmp/tmp.2ulgu2DPSl ++ rm /tmp/tmp.tEYGfnuaPi /tmp/tmp.2ulgu2DPSl ++ return 0 + [[ initializing =~ ready ]] + sleep 1 + timeout=2 + echo -n . .+ [[ 2 -gt 1500 ]] ++ kubectl_bin get psmdb some-name -o 'jsonpath={.status.state}' +++ mktemp ++ local LAST_OUT=/tmp/tmp.ODPuTC4Vj2 +++ mktemp ++ local LAST_ERR=/tmp/tmp.AikGgAZzEg ++ local exit_status=0 ++ local timeout=4 +++ seq 0 2 ++ for i in $(seq 0 2) ++ set +e ++ kubectl get psmdb some-name -o 'jsonpath={.status.state}' ++ exit_status=0 ++ set -e ++ '[' 0 '!=' 0 -a -n 1 ']' ++ break ++ cat /tmp/tmp.ODPuTC4Vj2 ++ cat /tmp/tmp.AikGgAZzEg ++ rm /tmp/tmp.ODPuTC4Vj2 /tmp/tmp.AikGgAZzEg ++ return 0 + [[ initializing =~ ready ]] + sleep 1 + timeout=3 + echo -n . .+ [[ 3 -gt 1500 ]] ++ kubectl_bin get psmdb some-name -o 'jsonpath={.status.state}' +++ mktemp ++ local LAST_OUT=/tmp/tmp.N3SNQBgPKp +++ mktemp ++ local LAST_ERR=/tmp/tmp.pwvIV2kJGg ++ local exit_status=0 ++ local timeout=4 +++ seq 0 2 ++ for i in $(seq 0 2) ++ set +e ++ kubectl get psmdb some-name -o 'jsonpath={.status.state}' ++ exit_status=0 ++ set -e ++ '[' 0 '!=' 0 -a -n 1 ']' ++ break ++ cat /tmp/tmp.N3SNQBgPKp ++ cat /tmp/tmp.pwvIV2kJGg ++ rm /tmp/tmp.N3SNQBgPKp /tmp/tmp.pwvIV2kJGg ++ return 0 + [[ initializing =~ ready ]] + sleep 1 + timeout=4 + echo -n . .+ [[ 4 -gt 1500 ]] ++ kubectl_bin get psmdb some-name -o 'jsonpath={.status.state}' +++ mktemp ++ local LAST_OUT=/tmp/tmp.TYXDVMOAEj +++ mktemp ++ local LAST_ERR=/tmp/tmp.rvTGz3gjkt ++ local exit_status=0 ++ local timeout=4 +++ seq 0 2 ++ for i in $(seq 0 2) ++ set +e ++ kubectl get psmdb some-name -o 'jsonpath={.status.state}' ++ exit_status=0 ++ set -e ++ '[' 0 '!=' 0 -a -n 1 ']' ++ break ++ cat /tmp/tmp.TYXDVMOAEj ++ cat /tmp/tmp.rvTGz3gjkt ++ rm /tmp/tmp.TYXDVMOAEj /tmp/tmp.rvTGz3gjkt ++ return 0 + [[ initializing =~ ready ]] + sleep 1 + timeout=5 + echo -n . .+ [[ 5 -gt 1500 ]] ++ kubectl_bin get psmdb some-name -o 'jsonpath={.status.state}' +++ mktemp ++ local LAST_OUT=/tmp/tmp.ujrU82Qetq +++ mktemp ++ local LAST_ERR=/tmp/tmp.O476R7dJT2 ++ local exit_status=0 ++ local timeout=4 +++ seq 0 2 ++ for i in $(seq 0 2) ++ set +e ++ kubectl get psmdb some-name -o 'jsonpath={.status.state}' ++ exit_status=0 ++ set -e ++ '[' 0 '!=' 0 -a -n 1 ']' ++ break ++ cat /tmp/tmp.ujrU82Qetq ++ cat /tmp/tmp.O476R7dJT2 ++ rm /tmp/tmp.ujrU82Qetq /tmp/tmp.O476R7dJT2 ++ return 0 + [[ initializing =~ ready ]] + sleep 1 + timeout=6 + echo -n . .+ [[ 6 -gt 1500 ]] ++ kubectl_bin get psmdb some-name -o 'jsonpath={.status.state}' +++ mktemp ++ local LAST_OUT=/tmp/tmp.uLw9xy4CZ6 +++ mktemp ++ local LAST_ERR=/tmp/tmp.F9n4uvtbCX ++ local exit_status=0 ++ local timeout=4 +++ seq 0 2 ++ for i in $(seq 0 2) ++ set +e ++ kubectl get psmdb some-name -o 'jsonpath={.status.state}' ++ exit_status=0 ++ set -e ++ '[' 0 '!=' 0 -a -n 1 ']' ++ break ++ cat /tmp/tmp.uLw9xy4CZ6 ++ cat /tmp/tmp.F9n4uvtbCX ++ rm /tmp/tmp.uLw9xy4CZ6 /tmp/tmp.F9n4uvtbCX ++ return 0 + [[ initializing =~ ready ]] + sleep 1 + timeout=7 + echo -n . .+ [[ 7 -gt 1500 ]] ++ kubectl_bin get psmdb some-name -o 'jsonpath={.status.state}' +++ mktemp ++ local LAST_OUT=/tmp/tmp.uw4SbO5gmP +++ mktemp ++ local LAST_ERR=/tmp/tmp.Wmcx3RCyIM ++ local exit_status=0 ++ local timeout=4 +++ seq 0 2 ++ for i in $(seq 0 2) ++ set +e ++ kubectl get psmdb some-name -o 'jsonpath={.status.state}' ++ exit_status=0 ++ set -e ++ '[' 0 '!=' 0 -a -n 1 ']' ++ break ++ cat /tmp/tmp.uw4SbO5gmP ++ cat /tmp/tmp.Wmcx3RCyIM ++ rm /tmp/tmp.uw4SbO5gmP /tmp/tmp.Wmcx3RCyIM ++ return 0 + [[ initializing =~ ready ]] + sleep 1 + timeout=8 + echo -n . .+ [[ 8 -gt 1500 ]] ++ kubectl_bin get psmdb some-name -o 'jsonpath={.status.state}' +++ mktemp ++ local LAST_OUT=/tmp/tmp.ex2s15vMnP +++ mktemp ++ local LAST_ERR=/tmp/tmp.3m8YN0xFiB ++ local exit_status=0 ++ local timeout=4 +++ seq 0 2 ++ for i in $(seq 0 2) ++ set +e ++ kubectl get psmdb some-name -o 'jsonpath={.status.state}' ++ exit_status=0 ++ set -e ++ '[' 0 '!=' 0 -a -n 1 ']' ++ break ++ cat /tmp/tmp.ex2s15vMnP ++ cat /tmp/tmp.3m8YN0xFiB ++ rm /tmp/tmp.ex2s15vMnP /tmp/tmp.3m8YN0xFiB ++ return 0 + [[ initializing =~ ready ]] + sleep 1 + timeout=9 + echo -n . .+ [[ 9 -gt 1500 ]] ++ kubectl_bin get psmdb some-name -o 'jsonpath={.status.state}' +++ mktemp ++ local LAST_OUT=/tmp/tmp.Faz6vHpmL2 +++ mktemp ++ local LAST_ERR=/tmp/tmp.KzCIEacoQ0 ++ local exit_status=0 ++ local timeout=4 +++ seq 0 2 ++ for i in $(seq 0 2) ++ set +e ++ kubectl get psmdb some-name -o 'jsonpath={.status.state}' ++ exit_status=0 ++ set -e ++ '[' 0 '!=' 0 -a -n 1 ']' ++ break ++ cat /tmp/tmp.Faz6vHpmL2 ++ cat /tmp/tmp.KzCIEacoQ0 ++ rm /tmp/tmp.Faz6vHpmL2 /tmp/tmp.KzCIEacoQ0 ++ return 0 + [[ initializing =~ ready ]] + sleep 1 + timeout=10 + echo -n . .+ [[ 10 -gt 1500 ]] ++ kubectl_bin get psmdb some-name -o 'jsonpath={.status.state}' +++ mktemp ++ local LAST_OUT=/tmp/tmp.Ho6b7TvBtb +++ mktemp ++ local LAST_ERR=/tmp/tmp.4qqg56uHJg ++ local exit_status=0 ++ local timeout=4 +++ seq 0 2 ++ for i in $(seq 0 2) ++ set +e ++ kubectl get psmdb some-name -o 'jsonpath={.status.state}' ++ exit_status=0 ++ set -e ++ '[' 0 '!=' 0 -a -n 1 ']' ++ break ++ cat /tmp/tmp.Ho6b7TvBtb ++ cat /tmp/tmp.4qqg56uHJg ++ rm /tmp/tmp.Ho6b7TvBtb /tmp/tmp.4qqg56uHJg ++ return 0 + [[ ready =~ ready ]] + echo + log 'psmdb/some-name is ready: OK' + set +o xtrace [2025-12-10T13:27:45+0000] psmdb/some-name is ready: OK + log 'check if statefulsets created with expected config' + set +o xtrace [2025-12-10T13:27:45+0000] check if statefulsets created with expected config + compare_kubectl statefulset/some-name-rs0 + local resource=statefulset/some-name-rs0 + local postfix= + local skip_generation_check= + local expected_result=/mnt/jenkins/workspace/cloud-psmdb-operator_PR-2125/e2e-tests/non-voting-and-hidden/compare/statefulset_some-name-rs0.yml + local new_result=/tmp/tmp.6L5peFLawB/statefulset_some-name-rs0.yml + '[' -n '' -a -f /mnt/jenkins/workspace/cloud-psmdb-operator_PR-2125/e2e-tests/non-voting-and-hidden/compare/statefulset_some-name-rs0-oc.yml ']' + kubectl_bin get -o yaml statefulset/some-name-rs0 + yq eval ' del(.metadata.ownerReferences[].apiVersion) | del(.metadata.managedFields) | del(.. | select(has("creationTimestamp")).creationTimestamp) | del(.. | select(has("namespace")).namespace) | del(.. | select(has("uid")).uid) | del(.metadata.resourceVersion) | del(.spec.template.spec.containers[].env[] | select(.name == "NAMESPACE")) | del(.metadata.selfLink) | del(.metadata.annotations."cloud.google.com/neg") | del(.metadata.annotations."kubectl.kubernetes.io/last-applied-configuration") | del(.. | select(has("image")).image) | del(.. | select(has("clusterIP")).clusterIP) | del(.. | select(has("clusterIPs")).clusterIPs) | del(.. | select(has("dataSource")).dataSource) | del(.. | select(has("procMount")).procMount) | del(.. | select(has("storageClassName")).storageClassName) | del(.. | select(has("finalizers")).finalizers) | del(.. | select(has("kubernetes.io/pvc-protection"))."kubernetes.io/pvc-protection") | del(.. | select(has("volumeName")).volumeName) | del(.. | select(has("volume.beta.kubernetes.io/storage-provisioner"))."volume.beta.kubernetes.io/storage-provisioner") | del(.. | select(has("volume.kubernetes.io/storage-provisioner"))."volume.kubernetes.io/storage-provisioner") | del(.spec.volumeMode) | del(.. | select(has("volume.kubernetes.io/selected-node"))."volume.kubernetes.io/selected-node") | del(.. | select(has("percona.com/last-config-hash"))."percona.com/last-config-hash") | del(.. | select(has("percona.com/configuration-hash"))."percona.com/configuration-hash") | del(.. | select(has("percona.com/ssl-hash"))."percona.com/ssl-hash") | del(.. | select(has("percona.com/ssl-internal-hash"))."percona.com/ssl-internal-hash") | del(.spec.volumeClaimTemplates[].spec.volumeMode | select(. == "Filesystem")) | del(.. | select(has("healthCheckNodePort")).healthCheckNodePort) | del(.. | select(has("nodePort")).nodePort) | del(.status) | (.. | select(tag == "!!str")) |= sub("non-voting-and-hidden-7583", "NAME_SPACE") | del(.spec.volumeClaimTemplates[].apiVersion) | del(.spec.volumeClaimTemplates[].kind) | del(.spec.ipFamilies) | del(.spec.ipFamilyPolicy) | (.. | select(. == "extensions/v1beta1")) = "apps/v1" | (.. | select(. == "batch/v1beta1")) = "batch/v1" ' - ++ mktemp + local LAST_OUT=/tmp/tmp.ANN5RYAV1e ++ mktemp + local LAST_ERR=/tmp/tmp.xqyb0fdw5f + local exit_status=0 + local timeout=4 ++ seq 0 2 + for i in $(seq 0 2) + set +e + kubectl get -o yaml statefulset/some-name-rs0 + exit_status=0 + set -e + '[' 0 '!=' 0 -a -n 1 ']' + break + cat /tmp/tmp.ANN5RYAV1e + cat /tmp/tmp.xqyb0fdw5f + rm /tmp/tmp.ANN5RYAV1e /tmp/tmp.xqyb0fdw5f + return 0 + yq -i eval 'del(.spec.persistentVolumeClaimRetentionPolicy)' /tmp/tmp.6L5peFLawB/statefulset_some-name-rs0.yml + version_gt 1.22 ++ echo '1.31 >= 1.22' ++ bc -l + '[' 1 -eq 1 ']' + return 0 + yq -i eval 'del(.spec.internalTrafficPolicy)' /tmp/tmp.6L5peFLawB/statefulset_some-name-rs0.yml + yq -i eval 'del(.spec.allocateLoadBalancerNodePorts)' /tmp/tmp.6L5peFLawB/statefulset_some-name-rs0.yml + [[ /mnt/jenkins/workspace/cloud-psmdb-operator_PR-2125/e2e-tests/non-voting-and-hidden/compare/statefulset_some-name-rs0.yml == */cronjob* ]] + '[' -n '' ']' + [[ 0 -eq 0 ]] + diff -u /mnt/jenkins/workspace/cloud-psmdb-operator_PR-2125/e2e-tests/non-voting-and-hidden/compare/statefulset_some-name-rs0.yml /tmp/tmp.6L5peFLawB/statefulset_some-name-rs0.yml + log 'compare_kubectl: statefulset/some-name-rs0 OK' + set +o xtrace [2025-12-10T13:27:46+0000] compare_kubectl: statefulset/some-name-rs0 OK + compare_kubectl statefulset/some-name-rs0-nv + local resource=statefulset/some-name-rs0-nv + local postfix= + local skip_generation_check= + local expected_result=/mnt/jenkins/workspace/cloud-psmdb-operator_PR-2125/e2e-tests/non-voting-and-hidden/compare/statefulset_some-name-rs0-nv.yml + local new_result=/tmp/tmp.6L5peFLawB/statefulset_some-name-rs0-nv.yml + '[' -n '' -a -f /mnt/jenkins/workspace/cloud-psmdb-operator_PR-2125/e2e-tests/non-voting-and-hidden/compare/statefulset_some-name-rs0-nv-oc.yml ']' + kubectl_bin get -o yaml statefulset/some-name-rs0-nv + yq eval ' del(.metadata.ownerReferences[].apiVersion) | del(.metadata.managedFields) | del(.. | select(has("creationTimestamp")).creationTimestamp) | del(.. | select(has("namespace")).namespace) | del(.. | select(has("uid")).uid) | del(.metadata.resourceVersion) | del(.spec.template.spec.containers[].env[] | select(.name == "NAMESPACE")) | del(.metadata.selfLink) | del(.metadata.annotations."cloud.google.com/neg") | del(.metadata.annotations."kubectl.kubernetes.io/last-applied-configuration") | del(.. | select(has("image")).image) | del(.. | select(has("clusterIP")).clusterIP) | del(.. | select(has("clusterIPs")).clusterIPs) | del(.. | select(has("dataSource")).dataSource) | del(.. | select(has("procMount")).procMount) | del(.. | select(has("storageClassName")).storageClassName) | del(.. | select(has("finalizers")).finalizers) | del(.. | select(has("kubernetes.io/pvc-protection"))."kubernetes.io/pvc-protection") | del(.. | select(has("volumeName")).volumeName) | del(.. | select(has("volume.beta.kubernetes.io/storage-provisioner"))."volume.beta.kubernetes.io/storage-provisioner") | del(.. | select(has("volume.kubernetes.io/storage-provisioner"))."volume.kubernetes.io/storage-provisioner") | del(.spec.volumeMode) | del(.. | select(has("volume.kubernetes.io/selected-node"))."volume.kubernetes.io/selected-node") | del(.. | select(has("percona.com/last-config-hash"))."percona.com/last-config-hash") | del(.. | select(has("percona.com/configuration-hash"))."percona.com/configuration-hash") | del(.. | select(has("percona.com/ssl-hash"))."percona.com/ssl-hash") | del(.. | select(has("percona.com/ssl-internal-hash"))."percona.com/ssl-internal-hash") | del(.spec.volumeClaimTemplates[].spec.volumeMode | select(. == "Filesystem")) | del(.. | select(has("healthCheckNodePort")).healthCheckNodePort) | del(.. | select(has("nodePort")).nodePort) | del(.status) | (.. | select(tag == "!!str")) |= sub("non-voting-and-hidden-7583", "NAME_SPACE") | del(.spec.volumeClaimTemplates[].apiVersion) | del(.spec.volumeClaimTemplates[].kind) | del(.spec.ipFamilies) | del(.spec.ipFamilyPolicy) | (.. | select(. == "extensions/v1beta1")) = "apps/v1" | (.. | select(. == "batch/v1beta1")) = "batch/v1" ' - ++ mktemp + local LAST_OUT=/tmp/tmp.zRm6bKoWAk ++ mktemp + local LAST_ERR=/tmp/tmp.8KSup4SC9G + local exit_status=0 + local timeout=4 ++ seq 0 2 + for i in $(seq 0 2) + set +e + kubectl get -o yaml statefulset/some-name-rs0-nv + exit_status=0 + set -e + '[' 0 '!=' 0 -a -n 1 ']' + break + cat /tmp/tmp.zRm6bKoWAk + cat /tmp/tmp.8KSup4SC9G + rm /tmp/tmp.zRm6bKoWAk /tmp/tmp.8KSup4SC9G + return 0 + yq -i eval 'del(.spec.persistentVolumeClaimRetentionPolicy)' /tmp/tmp.6L5peFLawB/statefulset_some-name-rs0-nv.yml + version_gt 1.22 ++ echo '1.31 >= 1.22' ++ bc -l + '[' 1 -eq 1 ']' + return 0 + yq -i eval 'del(.spec.internalTrafficPolicy)' /tmp/tmp.6L5peFLawB/statefulset_some-name-rs0-nv.yml + yq -i eval 'del(.spec.allocateLoadBalancerNodePorts)' /tmp/tmp.6L5peFLawB/statefulset_some-name-rs0-nv.yml + [[ /mnt/jenkins/workspace/cloud-psmdb-operator_PR-2125/e2e-tests/non-voting-and-hidden/compare/statefulset_some-name-rs0-nv.yml == */cronjob* ]] + '[' -n '' ']' + [[ 0 -eq 0 ]] + diff -u /mnt/jenkins/workspace/cloud-psmdb-operator_PR-2125/e2e-tests/non-voting-and-hidden/compare/statefulset_some-name-rs0-nv.yml /tmp/tmp.6L5peFLawB/statefulset_some-name-rs0-nv.yml + log 'compare_kubectl: statefulset/some-name-rs0-nv OK' + set +o xtrace [2025-12-10T13:27:47+0000] compare_kubectl: statefulset/some-name-rs0-nv OK + compare_kubectl statefulset/some-name-rs0-hidden + local resource=statefulset/some-name-rs0-hidden + local postfix= + local skip_generation_check= + local expected_result=/mnt/jenkins/workspace/cloud-psmdb-operator_PR-2125/e2e-tests/non-voting-and-hidden/compare/statefulset_some-name-rs0-hidden.yml + local new_result=/tmp/tmp.6L5peFLawB/statefulset_some-name-rs0-hidden.yml + '[' -n '' -a -f /mnt/jenkins/workspace/cloud-psmdb-operator_PR-2125/e2e-tests/non-voting-and-hidden/compare/statefulset_some-name-rs0-hidden-oc.yml ']' + kubectl_bin get -o yaml statefulset/some-name-rs0-hidden + yq eval ' del(.metadata.ownerReferences[].apiVersion) | del(.metadata.managedFields) | del(.. | select(has("creationTimestamp")).creationTimestamp) | del(.. | select(has("namespace")).namespace) | del(.. | select(has("uid")).uid) | del(.metadata.resourceVersion) | del(.spec.template.spec.containers[].env[] | select(.name == "NAMESPACE")) | del(.metadata.selfLink) | del(.metadata.annotations."cloud.google.com/neg") | del(.metadata.annotations."kubectl.kubernetes.io/last-applied-configuration") | del(.. | select(has("image")).image) | del(.. | select(has("clusterIP")).clusterIP) | del(.. | select(has("clusterIPs")).clusterIPs) | del(.. | select(has("dataSource")).dataSource) | del(.. | select(has("procMount")).procMount) | del(.. | select(has("storageClassName")).storageClassName) | del(.. | select(has("finalizers")).finalizers) | del(.. | select(has("kubernetes.io/pvc-protection"))."kubernetes.io/pvc-protection") | del(.. | select(has("volumeName")).volumeName) | del(.. | select(has("volume.beta.kubernetes.io/storage-provisioner"))."volume.beta.kubernetes.io/storage-provisioner") | del(.. | select(has("volume.kubernetes.io/storage-provisioner"))."volume.kubernetes.io/storage-provisioner") | del(.spec.volumeMode) | del(.. | select(has("volume.kubernetes.io/selected-node"))."volume.kubernetes.io/selected-node") | del(.. | select(has("percona.com/last-config-hash"))."percona.com/last-config-hash") | del(.. | select(has("percona.com/configuration-hash"))."percona.com/configuration-hash") | del(.. | select(has("percona.com/ssl-hash"))."percona.com/ssl-hash") | del(.. | select(has("percona.com/ssl-internal-hash"))."percona.com/ssl-internal-hash") | del(.spec.volumeClaimTemplates[].spec.volumeMode | select(. == "Filesystem")) | del(.. | select(has("healthCheckNodePort")).healthCheckNodePort) | del(.. | select(has("nodePort")).nodePort) | del(.status) | (.. | select(tag == "!!str")) |= sub("non-voting-and-hidden-7583", "NAME_SPACE") | del(.spec.volumeClaimTemplates[].apiVersion) | del(.spec.volumeClaimTemplates[].kind) | del(.spec.ipFamilies) | del(.spec.ipFamilyPolicy) | (.. | select(. == "extensions/v1beta1")) = "apps/v1" | (.. | select(. == "batch/v1beta1")) = "batch/v1" ' - ++ mktemp + local LAST_OUT=/tmp/tmp.hkkzAiRI7h ++ mktemp + local LAST_ERR=/tmp/tmp.Uow9RnARh5 + local exit_status=0 + local timeout=4 ++ seq 0 2 + for i in $(seq 0 2) + set +e + kubectl get -o yaml statefulset/some-name-rs0-hidden + exit_status=0 + set -e + '[' 0 '!=' 0 -a -n 1 ']' + break + cat /tmp/tmp.hkkzAiRI7h + cat /tmp/tmp.Uow9RnARh5 + rm /tmp/tmp.hkkzAiRI7h /tmp/tmp.Uow9RnARh5 + return 0 + yq -i eval 'del(.spec.persistentVolumeClaimRetentionPolicy)' /tmp/tmp.6L5peFLawB/statefulset_some-name-rs0-hidden.yml + version_gt 1.22 ++ echo '1.31 >= 1.22' ++ bc -l + '[' 1 -eq 1 ']' + return 0 + yq -i eval 'del(.spec.internalTrafficPolicy)' /tmp/tmp.6L5peFLawB/statefulset_some-name-rs0-hidden.yml + yq -i eval 'del(.spec.allocateLoadBalancerNodePorts)' /tmp/tmp.6L5peFLawB/statefulset_some-name-rs0-hidden.yml + [[ /mnt/jenkins/workspace/cloud-psmdb-operator_PR-2125/e2e-tests/non-voting-and-hidden/compare/statefulset_some-name-rs0-hidden.yml == */cronjob* ]] + '[' -n '' ']' + [[ 0 -eq 0 ]] + diff -u /mnt/jenkins/workspace/cloud-psmdb-operator_PR-2125/e2e-tests/non-voting-and-hidden/compare/statefulset_some-name-rs0-hidden.yml /tmp/tmp.6L5peFLawB/statefulset_some-name-rs0-hidden.yml + log 'compare_kubectl: statefulset/some-name-rs0-hidden OK' + set +o xtrace [2025-12-10T13:27:47+0000] compare_kubectl: statefulset/some-name-rs0-hidden OK + log 'check if rs.config is correct' + set +o xtrace [2025-12-10T13:27:47+0000] check if rs.config is correct + compare_rs_config 'rs.config().members.filter(m => m.tags.nonVoting == "true").map(m => m.votes)' clusterAdmin:clusterAdmin123456@some-name-rs0.non-voting-and-hidden-7583 nonvoting + local 'command=rs.config().members.filter(m => m.tags.nonVoting == "true").map(m => m.votes)' + local uri=clusterAdmin:clusterAdmin123456@some-name-rs0.non-voting-and-hidden-7583 + local compare_file=nonvoting + run_mongo 'rs.config().members.filter(m => m.tags.nonVoting == "true").map(m => m.votes)' clusterAdmin:clusterAdmin123456@some-name-rs0.non-voting-and-hidden-7583 mongodb '' + egrep -v 'I NETWORK|W NETWORK|F NETWORK|Error saving history file|Percona Server for MongoDB|connecting to:|Unable to reach primary for set|Implicit session:|versions do not match|Error saving history file:|Started a new thread for the timer service' + local 'command=rs.config().members.filter(m => m.tags.nonVoting == "true").map(m => m.votes)' + local uri=clusterAdmin:clusterAdmin123456@some-name-rs0.non-voting-and-hidden-7583 + local driver=mongodb + /usr/sbin/sed -re 's/ObjectId\("[0-9a-f]+"\)//; s/-[0-9]+.svc/-xxx.svc/' + local suffix=.svc.cluster.local ++ kubectl_bin get pods --selector=name=psmdb-client -o 'jsonpath={.items[].metadata.name}' +++ mktemp egrep: warning: egrep is obsolescent; using grep -E ++ local LAST_OUT=/tmp/tmp.ea6R2vmoBk +++ mktemp ++ local LAST_ERR=/tmp/tmp.nztEJQirom ++ local exit_status=0 ++ local timeout=4 +++ seq 0 2 ++ for i in $(seq 0 2) ++ set +e ++ kubectl get pods --selector=name=psmdb-client -o 'jsonpath={.items[].metadata.name}' ++ exit_status=0 ++ set -e ++ '[' 0 '!=' 0 -a -n 1 ']' ++ break ++ cat /tmp/tmp.ea6R2vmoBk ++ cat /tmp/tmp.nztEJQirom ++ rm /tmp/tmp.ea6R2vmoBk /tmp/tmp.nztEJQirom ++ return 0 + local client_container=psmdb-client-696897d69b-t229s + local mongo_flag= + [[ clusterAdmin:clusterAdmin123456@some-name-rs0.non-voting-and-hidden-7583 == *cfg* ]] + replica_set=rs0 + kubectl_bin exec psmdb-client-696897d69b-t229s -- bash -c 'printf '\''rs.config().members.filter(m => m.tags.nonVoting == "true").map(m => m.votes)\n'\'' | mongo mongodb://clusterAdmin:clusterAdmin123456@some-name-rs0.non-voting-and-hidden-7583.svc.cluster.local/admin?ssl=false\&replicaSet=rs0 ' ++ mktemp + local LAST_OUT=/tmp/tmp.9pvlMBYmqT ++ mktemp + local LAST_ERR=/tmp/tmp.P2v9wdkqLE + local exit_status=0 + local timeout=4 ++ seq 0 2 + for i in $(seq 0 2) + set +e + kubectl exec psmdb-client-696897d69b-t229s -- bash -c 'printf '\''rs.config().members.filter(m => m.tags.nonVoting == "true").map(m => m.votes)\n'\'' | mongo mongodb://clusterAdmin:clusterAdmin123456@some-name-rs0.non-voting-and-hidden-7583.svc.cluster.local/admin?ssl=false\&replicaSet=rs0 ' + exit_status=0 + set -e + '[' 0 '!=' 0 -a -n 1 ']' + break + cat /tmp/tmp.9pvlMBYmqT + cat /tmp/tmp.P2v9wdkqLE + rm /tmp/tmp.9pvlMBYmqT /tmp/tmp.P2v9wdkqLE + return 0 + [[ 0 -eq 0 ]] + diff -u /mnt/jenkins/workspace/cloud-psmdb-operator_PR-2125/e2e-tests/non-voting-and-hidden/compare/nonvoting.json /tmp/tmp.6L5peFLawB/nonvoting + log 'nonVoting members: OK' + set +o xtrace [2025-12-10T13:27:49+0000] nonVoting members: OK + compare_rs_config 'rs.config().members.map(m => m.hidden)' clusterAdmin:clusterAdmin123456@some-name-rs0.non-voting-and-hidden-7583 hidden + local 'command=rs.config().members.map(m => m.hidden)' + local uri=clusterAdmin:clusterAdmin123456@some-name-rs0.non-voting-and-hidden-7583 + local compare_file=hidden + run_mongo 'rs.config().members.map(m => m.hidden)' clusterAdmin:clusterAdmin123456@some-name-rs0.non-voting-and-hidden-7583 mongodb '' + local 'command=rs.config().members.map(m => m.hidden)' + local uri=clusterAdmin:clusterAdmin123456@some-name-rs0.non-voting-and-hidden-7583 + local driver=mongodb + local suffix=.svc.cluster.local + egrep -v 'I NETWORK|W NETWORK|F NETWORK|Error saving history file|Percona Server for MongoDB|connecting to:|Unable to reach primary for set|Implicit session:|versions do not match|Error saving history file:|Started a new thread for the timer service' + /usr/sbin/sed -re 's/ObjectId\("[0-9a-f]+"\)//; s/-[0-9]+.svc/-xxx.svc/' ++ kubectl_bin get pods --selector=name=psmdb-client -o 'jsonpath={.items[].metadata.name}' +++ mktemp ++ local LAST_OUT=/tmp/tmp.9qcyS5zjuU egrep: warning: egrep is obsolescent; using grep -E +++ mktemp ++ local LAST_ERR=/tmp/tmp.AhfzJPNo2t ++ local exit_status=0 ++ local timeout=4 +++ seq 0 2 ++ for i in $(seq 0 2) ++ set +e ++ kubectl get pods --selector=name=psmdb-client -o 'jsonpath={.items[].metadata.name}' ++ exit_status=0 ++ set -e ++ '[' 0 '!=' 0 -a -n 1 ']' ++ break ++ cat /tmp/tmp.9qcyS5zjuU ++ cat /tmp/tmp.AhfzJPNo2t ++ rm /tmp/tmp.9qcyS5zjuU /tmp/tmp.AhfzJPNo2t ++ return 0 + local client_container=psmdb-client-696897d69b-t229s + local mongo_flag= + [[ clusterAdmin:clusterAdmin123456@some-name-rs0.non-voting-and-hidden-7583 == *cfg* ]] + replica_set=rs0 + kubectl_bin exec psmdb-client-696897d69b-t229s -- bash -c 'printf '\''rs.config().members.map(m => m.hidden)\n'\'' | mongo mongodb://clusterAdmin:clusterAdmin123456@some-name-rs0.non-voting-and-hidden-7583.svc.cluster.local/admin?ssl=false\&replicaSet=rs0 ' ++ mktemp + local LAST_OUT=/tmp/tmp.3ScwQxuFN1 ++ mktemp + local LAST_ERR=/tmp/tmp.TpIjMoZ2Os + local exit_status=0 + local timeout=4 ++ seq 0 2 + for i in $(seq 0 2) + set +e + kubectl exec psmdb-client-696897d69b-t229s -- bash -c 'printf '\''rs.config().members.map(m => m.hidden)\n'\'' | mongo mongodb://clusterAdmin:clusterAdmin123456@some-name-rs0.non-voting-and-hidden-7583.svc.cluster.local/admin?ssl=false\&replicaSet=rs0 ' + exit_status=0 + set -e + '[' 0 '!=' 0 -a -n 1 ']' + break + cat /tmp/tmp.3ScwQxuFN1 + cat /tmp/tmp.TpIjMoZ2Os + rm /tmp/tmp.3ScwQxuFN1 /tmp/tmp.TpIjMoZ2Os + return 0 + [[ 0 -eq 0 ]] + diff -u /mnt/jenkins/workspace/cloud-psmdb-operator_PR-2125/e2e-tests/non-voting-and-hidden/compare/hidden.json /tmp/tmp.6L5peFLawB/hidden + log 'hidden members: OK' + set +o xtrace [2025-12-10T13:27:51+0000] hidden members: OK + log 'create user myApp' + set +o xtrace [2025-12-10T13:27:51+0000] create user myApp + run_mongo 'db.createUser({user: "myApp", pwd: "myPass", roles: [{ db: "myApp", role: "readWrite" }]})' userAdmin:userAdmin123456@some-name-rs0.non-voting-and-hidden-7583 + local 'command=db.createUser({user: "myApp", pwd: "myPass", roles: [{ db: "myApp", role: "readWrite" }]})' + local uri=userAdmin:userAdmin123456@some-name-rs0.non-voting-and-hidden-7583 + local driver=mongodb+srv + local suffix=.svc.cluster.local ++ kubectl_bin get pods --selector=name=psmdb-client -o 'jsonpath={.items[].metadata.name}' +++ mktemp ++ local LAST_OUT=/tmp/tmp.5OTKu9c4de +++ mktemp ++ local LAST_ERR=/tmp/tmp.fOGmxABHrO ++ local exit_status=0 ++ local timeout=4 +++ seq 0 2 ++ for i in $(seq 0 2) ++ set +e ++ kubectl get pods --selector=name=psmdb-client -o 'jsonpath={.items[].metadata.name}' ++ exit_status=0 ++ set -e ++ '[' 0 '!=' 0 -a -n 1 ']' ++ break ++ cat /tmp/tmp.5OTKu9c4de ++ cat /tmp/tmp.fOGmxABHrO ++ rm /tmp/tmp.5OTKu9c4de /tmp/tmp.fOGmxABHrO ++ return 0 + local client_container=psmdb-client-696897d69b-t229s + local mongo_flag= + [[ userAdmin:userAdmin123456@some-name-rs0.non-voting-and-hidden-7583 == *cfg* ]] + replica_set=rs0 + kubectl_bin exec psmdb-client-696897d69b-t229s -- bash -c 'printf '\''db.createUser({user: "myApp", pwd: "myPass", roles: [{ db: "myApp", role: "readWrite" }]})\n'\'' | mongo mongodb+srv://userAdmin:userAdmin123456@some-name-rs0.non-voting-and-hidden-7583.svc.cluster.local/admin?ssl=false\&replicaSet=rs0 ' ++ mktemp + local LAST_OUT=/tmp/tmp.5yM3FrNfxo ++ mktemp + local LAST_ERR=/tmp/tmp.6UXQWgW0gQ + local exit_status=0 + local timeout=4 ++ seq 0 2 + for i in $(seq 0 2) + set +e + kubectl exec psmdb-client-696897d69b-t229s -- bash -c 'printf '\''db.createUser({user: "myApp", pwd: "myPass", roles: [{ db: "myApp", role: "readWrite" }]})\n'\'' | mongo mongodb+srv://userAdmin:userAdmin123456@some-name-rs0.non-voting-and-hidden-7583.svc.cluster.local/admin?ssl=false\&replicaSet=rs0 ' + exit_status=0 + set -e + '[' 0 '!=' 0 -a -n 1 ']' + break + cat /tmp/tmp.5yM3FrNfxo Percona Server for MongoDB shell version v4.4.29-28 connecting to: mongodb://some-name-rs0-nv-0.some-name-rs0.non-voting-and-hidden-7583.svc.cluster.local:27017,some-name-rs0-1.some-name-rs0.non-voting-and-hidden-7583.svc.cluster.local:27017,some-name-rs0-hidden-0.some-name-rs0.non-voting-and-hidden-7583.svc.cluster.local:27017,some-name-rs0-2.some-name-rs0.non-voting-and-hidden-7583.svc.cluster.local:27017,some-name-rs0-0.some-name-rs0.non-voting-and-hidden-7583.svc.cluster.local:27017,some-name-rs0-hidden-1.some-name-rs0.non-voting-and-hidden-7583.svc.cluster.local:27017/admin?compressors=disabled&gssapiServiceName=mongodb&replicaSet=rs0&ssl=false Implicit session: session { "id" : UUID("129b6b42-35ce-4ea5-824d-750dc35614cb") } Percona Server for MongoDB server version: v8.0.16-5 WARNING: shell and server versions do not match Successfully added user: { "user" : "myApp", "roles" : [ { "db" : "myApp", "role" : "readWrite" } ] } bye + cat /tmp/tmp.6UXQWgW0gQ + rm /tmp/tmp.5yM3FrNfxo /tmp/tmp.6UXQWgW0gQ + return 0 + log 'write data' + set +o xtrace [2025-12-10T13:27:54+0000] write data + run_mongo 'use myApp\n db.test.insert({ x: 100500 })' myApp:myPass@some-name-rs0.non-voting-and-hidden-7583 + local 'command=use myApp\n db.test.insert({ x: 100500 })' + local uri=myApp:myPass@some-name-rs0.non-voting-and-hidden-7583 + local driver=mongodb+srv + local suffix=.svc.cluster.local ++ kubectl_bin get pods --selector=name=psmdb-client -o 'jsonpath={.items[].metadata.name}' +++ mktemp ++ local LAST_OUT=/tmp/tmp.ORGaw6tSbY +++ mktemp ++ local LAST_ERR=/tmp/tmp.9KZeKJcAD2 ++ local exit_status=0 ++ local timeout=4 +++ seq 0 2 ++ for i in $(seq 0 2) ++ set +e ++ kubectl get pods --selector=name=psmdb-client -o 'jsonpath={.items[].metadata.name}' ++ exit_status=0 ++ set -e ++ '[' 0 '!=' 0 -a -n 1 ']' ++ break ++ cat /tmp/tmp.ORGaw6tSbY ++ cat /tmp/tmp.9KZeKJcAD2 ++ rm /tmp/tmp.ORGaw6tSbY /tmp/tmp.9KZeKJcAD2 ++ return 0 + local client_container=psmdb-client-696897d69b-t229s + local mongo_flag= + [[ myApp:myPass@some-name-rs0.non-voting-and-hidden-7583 == *cfg* ]] + replica_set=rs0 + kubectl_bin exec psmdb-client-696897d69b-t229s -- bash -c 'printf '\''use myApp\n db.test.insert({ x: 100500 })\n'\'' | mongo mongodb+srv://myApp:myPass@some-name-rs0.non-voting-and-hidden-7583.svc.cluster.local/admin?ssl=false\&replicaSet=rs0 ' ++ mktemp + local LAST_OUT=/tmp/tmp.G7utTujYLX ++ mktemp + local LAST_ERR=/tmp/tmp.RkqxP7V5tp + local exit_status=0 + local timeout=4 ++ seq 0 2 + for i in $(seq 0 2) + set +e + kubectl exec psmdb-client-696897d69b-t229s -- bash -c 'printf '\''use myApp\n db.test.insert({ x: 100500 })\n'\'' | mongo mongodb+srv://myApp:myPass@some-name-rs0.non-voting-and-hidden-7583.svc.cluster.local/admin?ssl=false\&replicaSet=rs0 ' + exit_status=0 + set -e + '[' 0 '!=' 0 -a -n 1 ']' + break + cat /tmp/tmp.G7utTujYLX Percona Server for MongoDB shell version v4.4.29-28 connecting to: mongodb://some-name-rs0-nv-0.some-name-rs0.non-voting-and-hidden-7583.svc.cluster.local:27017,some-name-rs0-hidden-1.some-name-rs0.non-voting-and-hidden-7583.svc.cluster.local:27017,some-name-rs0-0.some-name-rs0.non-voting-and-hidden-7583.svc.cluster.local:27017,some-name-rs0-2.some-name-rs0.non-voting-and-hidden-7583.svc.cluster.local:27017,some-name-rs0-hidden-0.some-name-rs0.non-voting-and-hidden-7583.svc.cluster.local:27017,some-name-rs0-1.some-name-rs0.non-voting-and-hidden-7583.svc.cluster.local:27017/admin?compressors=disabled&gssapiServiceName=mongodb&replicaSet=rs0&ssl=false Implicit session: session { "id" : UUID("97ab0382-bf56-4d06-aadc-03de7a856938") } Percona Server for MongoDB server version: v8.0.16-5 WARNING: shell and server versions do not match switched to db myApp WriteResult({ "nInserted" : 1 }) bye + cat /tmp/tmp.RkqxP7V5tp + rm /tmp/tmp.G7utTujYLX /tmp/tmp.RkqxP7V5tp + return 0 + sleep 10 + log 'compare data' + set +o xtrace [2025-12-10T13:28:06+0000] compare data + compare_mongo_cmd find myApp:myPass@some-name-rs0-0.some-name-rs0.non-voting-and-hidden-7583 + local command=find + local uri=myApp:myPass@some-name-rs0-0.some-name-rs0.non-voting-and-hidden-7583 + local postfix= + local suffix= + local database=myApp + local collection=test + local sort= + local tls=false + local 'full_command=db.test.find()' + [[ -n '' ]] + log 'running db.test.find() in myApp' + set +o xtrace [2025-12-10T13:28:06+0000] running db.test.find() in myApp + [[ false == \t\r\u\e ]] + mongo_command=run_mongo + run_mongo 'use myApp\n db.test.find()' myApp:myPass@some-name-rs0-0.some-name-rs0.non-voting-and-hidden-7583 mongodb '' + local 'command=use myApp\n db.test.find()' + local uri=myApp:myPass@some-name-rs0-0.some-name-rs0.non-voting-and-hidden-7583 + local driver=mongodb + local suffix=.svc.cluster.local + /usr/sbin/sed -re 's/ObjectId\("[0-9a-f]+"\)//; s/-[0-9]+.svc/-xxx.svc/' + egrep -v 'I NETWORK|W NETWORK|F NETWORK|Error saving history file|Percona Server for MongoDB|connecting to:|Unable to reach primary for set|Implicit session:|versions do not match|Error saving history file:|Started a new thread for the timer service' ++ kubectl_bin get pods --selector=name=psmdb-client -o 'jsonpath={.items[].metadata.name}' +++ mktemp ++ local LAST_OUT=/tmp/tmp.Wkb9NZKaSk egrep: warning: egrep is obsolescent; using grep -E +++ mktemp ++ local LAST_ERR=/tmp/tmp.ZnaRxQGRAF ++ local exit_status=0 ++ local timeout=4 +++ seq 0 2 ++ for i in $(seq 0 2) ++ set +e ++ kubectl get pods --selector=name=psmdb-client -o 'jsonpath={.items[].metadata.name}' ++ exit_status=0 ++ set -e ++ '[' 0 '!=' 0 -a -n 1 ']' ++ break ++ cat /tmp/tmp.Wkb9NZKaSk ++ cat /tmp/tmp.ZnaRxQGRAF ++ rm /tmp/tmp.Wkb9NZKaSk /tmp/tmp.ZnaRxQGRAF ++ return 0 + local client_container=psmdb-client-696897d69b-t229s + local mongo_flag= + [[ myApp:myPass@some-name-rs0-0.some-name-rs0.non-voting-and-hidden-7583 == *cfg* ]] + replica_set=rs0 + kubectl_bin exec psmdb-client-696897d69b-t229s -- bash -c 'printf '\''use myApp\n db.test.find()\n'\'' | mongo mongodb://myApp:myPass@some-name-rs0-0.some-name-rs0.non-voting-and-hidden-7583.svc.cluster.local/admin?ssl=false\&replicaSet=rs0 ' ++ mktemp + local LAST_OUT=/tmp/tmp.LnsuBRUU2W ++ mktemp + local LAST_ERR=/tmp/tmp.njPWH6lg2G + local exit_status=0 + local timeout=4 ++ seq 0 2 + for i in $(seq 0 2) + set +e + kubectl exec psmdb-client-696897d69b-t229s -- bash -c 'printf '\''use myApp\n db.test.find()\n'\'' | mongo mongodb://myApp:myPass@some-name-rs0-0.some-name-rs0.non-voting-and-hidden-7583.svc.cluster.local/admin?ssl=false\&replicaSet=rs0 ' + exit_status=0 + set -e + '[' 0 '!=' 0 -a -n 1 ']' + break + cat /tmp/tmp.LnsuBRUU2W + cat /tmp/tmp.njPWH6lg2G + rm /tmp/tmp.LnsuBRUU2W /tmp/tmp.njPWH6lg2G + return 0 + [[ 0 -eq 0 ]] + diff -u /mnt/jenkins/workspace/cloud-psmdb-operator_PR-2125/e2e-tests/non-voting-and-hidden/compare/find.json /tmp/tmp.6L5peFLawB/find + compare_mongo_cmd find myApp:myPass@some-name-rs0-nv-0.some-name-rs0.non-voting-and-hidden-7583 + local command=find + local uri=myApp:myPass@some-name-rs0-nv-0.some-name-rs0.non-voting-and-hidden-7583 + local postfix= + local suffix= + local database=myApp + local collection=test + local sort= + local tls=false + local 'full_command=db.test.find()' + [[ -n '' ]] + log 'running db.test.find() in myApp' + set +o xtrace [2025-12-10T13:28:09+0000] running db.test.find() in myApp + [[ false == \t\r\u\e ]] + mongo_command=run_mongo + run_mongo 'use myApp\n db.test.find()' myApp:myPass@some-name-rs0-nv-0.some-name-rs0.non-voting-and-hidden-7583 mongodb '' + local 'command=use myApp\n db.test.find()' + local uri=myApp:myPass@some-name-rs0-nv-0.some-name-rs0.non-voting-and-hidden-7583 + local driver=mongodb + local suffix=.svc.cluster.local + egrep -v 'I NETWORK|W NETWORK|F NETWORK|Error saving history file|Percona Server for MongoDB|connecting to:|Unable to reach primary for set|Implicit session:|versions do not match|Error saving history file:|Started a new thread for the timer service' + /usr/sbin/sed -re 's/ObjectId\("[0-9a-f]+"\)//; s/-[0-9]+.svc/-xxx.svc/' ++ kubectl_bin get pods --selector=name=psmdb-client -o 'jsonpath={.items[].metadata.name}' egrep: warning: egrep is obsolescent; using grep -E +++ mktemp ++ local LAST_OUT=/tmp/tmp.Ji6TUBtjFS +++ mktemp ++ local LAST_ERR=/tmp/tmp.SKpoDWjq0E ++ local exit_status=0 ++ local timeout=4 +++ seq 0 2 ++ for i in $(seq 0 2) ++ set +e ++ kubectl get pods --selector=name=psmdb-client -o 'jsonpath={.items[].metadata.name}' ++ exit_status=0 ++ set -e ++ '[' 0 '!=' 0 -a -n 1 ']' ++ break ++ cat /tmp/tmp.Ji6TUBtjFS ++ cat /tmp/tmp.SKpoDWjq0E ++ rm /tmp/tmp.Ji6TUBtjFS /tmp/tmp.SKpoDWjq0E ++ return 0 + local client_container=psmdb-client-696897d69b-t229s + local mongo_flag= + [[ myApp:myPass@some-name-rs0-nv-0.some-name-rs0.non-voting-and-hidden-7583 == *cfg* ]] + replica_set=rs0 + kubectl_bin exec psmdb-client-696897d69b-t229s -- bash -c 'printf '\''use myApp\n db.test.find()\n'\'' | mongo mongodb://myApp:myPass@some-name-rs0-nv-0.some-name-rs0.non-voting-and-hidden-7583.svc.cluster.local/admin?ssl=false\&replicaSet=rs0 ' ++ mktemp + local LAST_OUT=/tmp/tmp.uWiYqMYHR0 ++ mktemp + local LAST_ERR=/tmp/tmp.55EMl7XVAh + local exit_status=0 + local timeout=4 ++ seq 0 2 + for i in $(seq 0 2) + set +e + kubectl exec psmdb-client-696897d69b-t229s -- bash -c 'printf '\''use myApp\n db.test.find()\n'\'' | mongo mongodb://myApp:myPass@some-name-rs0-nv-0.some-name-rs0.non-voting-and-hidden-7583.svc.cluster.local/admin?ssl=false\&replicaSet=rs0 ' + exit_status=0 + set -e + '[' 0 '!=' 0 -a -n 1 ']' + break + cat /tmp/tmp.uWiYqMYHR0 + cat /tmp/tmp.55EMl7XVAh + rm /tmp/tmp.uWiYqMYHR0 /tmp/tmp.55EMl7XVAh + return 0 + [[ 0 -eq 0 ]] + diff -u /mnt/jenkins/workspace/cloud-psmdb-operator_PR-2125/e2e-tests/non-voting-and-hidden/compare/find.json /tmp/tmp.6L5peFLawB/find + compare_mongo_cmd find myApp:myPass@some-name-rs0-hidden-0.some-name-rs0.non-voting-and-hidden-7583 + local command=find + local uri=myApp:myPass@some-name-rs0-hidden-0.some-name-rs0.non-voting-and-hidden-7583 + local postfix= + local suffix= + local database=myApp + local collection=test + local sort= + local tls=false + local 'full_command=db.test.find()' + [[ -n '' ]] + log 'running db.test.find() in myApp' + set +o xtrace [2025-12-10T13:28:11+0000] running db.test.find() in myApp + [[ false == \t\r\u\e ]] + mongo_command=run_mongo + run_mongo 'use myApp\n db.test.find()' myApp:myPass@some-name-rs0-hidden-0.some-name-rs0.non-voting-and-hidden-7583 mongodb '' + local 'command=use myApp\n db.test.find()' + local uri=myApp:myPass@some-name-rs0-hidden-0.some-name-rs0.non-voting-and-hidden-7583 + egrep -v 'I NETWORK|W NETWORK|F NETWORK|Error saving history file|Percona Server for MongoDB|connecting to:|Unable to reach primary for set|Implicit session:|versions do not match|Error saving history file:|Started a new thread for the timer service' + local driver=mongodb + local suffix=.svc.cluster.local + /usr/sbin/sed -re 's/ObjectId\("[0-9a-f]+"\)//; s/-[0-9]+.svc/-xxx.svc/' ++ kubectl_bin get pods --selector=name=psmdb-client -o 'jsonpath={.items[].metadata.name}' +++ mktemp egrep: warning: egrep is obsolescent; using grep -E ++ local LAST_OUT=/tmp/tmp.UJICTUl0ho +++ mktemp ++ local LAST_ERR=/tmp/tmp.4b71HckYsy ++ local exit_status=0 ++ local timeout=4 +++ seq 0 2 ++ for i in $(seq 0 2) ++ set +e ++ kubectl get pods --selector=name=psmdb-client -o 'jsonpath={.items[].metadata.name}' ++ exit_status=0 ++ set -e ++ '[' 0 '!=' 0 -a -n 1 ']' ++ break ++ cat /tmp/tmp.UJICTUl0ho ++ cat /tmp/tmp.4b71HckYsy ++ rm /tmp/tmp.UJICTUl0ho /tmp/tmp.4b71HckYsy ++ return 0 + local client_container=psmdb-client-696897d69b-t229s + local mongo_flag= + [[ myApp:myPass@some-name-rs0-hidden-0.some-name-rs0.non-voting-and-hidden-7583 == *cfg* ]] + replica_set=rs0 + kubectl_bin exec psmdb-client-696897d69b-t229s -- bash -c 'printf '\''use myApp\n db.test.find()\n'\'' | mongo mongodb://myApp:myPass@some-name-rs0-hidden-0.some-name-rs0.non-voting-and-hidden-7583.svc.cluster.local/admin?ssl=false\&replicaSet=rs0 ' ++ mktemp + local LAST_OUT=/tmp/tmp.kkeedTn9ph ++ mktemp + local LAST_ERR=/tmp/tmp.MMK6xzfSft + local exit_status=0 + local timeout=4 ++ seq 0 2 + for i in $(seq 0 2) + set +e + kubectl exec psmdb-client-696897d69b-t229s -- bash -c 'printf '\''use myApp\n db.test.find()\n'\'' | mongo mongodb://myApp:myPass@some-name-rs0-hidden-0.some-name-rs0.non-voting-and-hidden-7583.svc.cluster.local/admin?ssl=false\&replicaSet=rs0 ' + exit_status=0 + set -e + '[' 0 '!=' 0 -a -n 1 ']' + break + cat /tmp/tmp.kkeedTn9ph + cat /tmp/tmp.MMK6xzfSft + rm /tmp/tmp.kkeedTn9ph /tmp/tmp.MMK6xzfSft + return 0 + [[ 0 -eq 0 ]] + diff -u /mnt/jenkins/workspace/cloud-psmdb-operator_PR-2125/e2e-tests/non-voting-and-hidden/compare/find.json /tmp/tmp.6L5peFLawB/find + compare_mongo_cmd find myApp:myPass@some-name-rs0-hidden-1.some-name-rs0.non-voting-and-hidden-7583 + local command=find + local uri=myApp:myPass@some-name-rs0-hidden-1.some-name-rs0.non-voting-and-hidden-7583 + local postfix= + local suffix= + local database=myApp + local collection=test + local sort= + local tls=false + local 'full_command=db.test.find()' + [[ -n '' ]] + log 'running db.test.find() in myApp' + set +o xtrace [2025-12-10T13:28:13+0000] running db.test.find() in myApp + [[ false == \t\r\u\e ]] + mongo_command=run_mongo + run_mongo 'use myApp\n db.test.find()' myApp:myPass@some-name-rs0-hidden-1.some-name-rs0.non-voting-and-hidden-7583 mongodb '' + local 'command=use myApp\n db.test.find()' + egrep -v 'I NETWORK|W NETWORK|F NETWORK|Error saving history file|Percona Server for MongoDB|connecting to:|Unable to reach primary for set|Implicit session:|versions do not match|Error saving history file:|Started a new thread for the timer service' + local uri=myApp:myPass@some-name-rs0-hidden-1.some-name-rs0.non-voting-and-hidden-7583 + local driver=mongodb + local suffix=.svc.cluster.local + /usr/sbin/sed -re 's/ObjectId\("[0-9a-f]+"\)//; s/-[0-9]+.svc/-xxx.svc/' ++ kubectl_bin get pods --selector=name=psmdb-client -o 'jsonpath={.items[].metadata.name}' +++ mktemp egrep: warning: egrep is obsolescent; using grep -E ++ local LAST_OUT=/tmp/tmp.AKirVezAkt +++ mktemp ++ local LAST_ERR=/tmp/tmp.mKseISK6dh ++ local exit_status=0 ++ local timeout=4 +++ seq 0 2 ++ for i in $(seq 0 2) ++ set +e ++ kubectl get pods --selector=name=psmdb-client -o 'jsonpath={.items[].metadata.name}' ++ exit_status=0 ++ set -e ++ '[' 0 '!=' 0 -a -n 1 ']' ++ break ++ cat /tmp/tmp.AKirVezAkt ++ cat /tmp/tmp.mKseISK6dh ++ rm /tmp/tmp.AKirVezAkt /tmp/tmp.mKseISK6dh ++ return 0 + local client_container=psmdb-client-696897d69b-t229s + local mongo_flag= + [[ myApp:myPass@some-name-rs0-hidden-1.some-name-rs0.non-voting-and-hidden-7583 == *cfg* ]] + replica_set=rs0 + kubectl_bin exec psmdb-client-696897d69b-t229s -- bash -c 'printf '\''use myApp\n db.test.find()\n'\'' | mongo mongodb://myApp:myPass@some-name-rs0-hidden-1.some-name-rs0.non-voting-and-hidden-7583.svc.cluster.local/admin?ssl=false\&replicaSet=rs0 ' ++ mktemp + local LAST_OUT=/tmp/tmp.5o0rFLG0Mu ++ mktemp + local LAST_ERR=/tmp/tmp.3cInrIh0Hb + local exit_status=0 + local timeout=4 ++ seq 0 2 + for i in $(seq 0 2) + set +e + kubectl exec psmdb-client-696897d69b-t229s -- bash -c 'printf '\''use myApp\n db.test.find()\n'\'' | mongo mongodb://myApp:myPass@some-name-rs0-hidden-1.some-name-rs0.non-voting-and-hidden-7583.svc.cluster.local/admin?ssl=false\&replicaSet=rs0 ' + exit_status=0 + set -e + '[' 0 '!=' 0 -a -n 1 ']' + break + cat /tmp/tmp.5o0rFLG0Mu + cat /tmp/tmp.3cInrIh0Hb + rm /tmp/tmp.5o0rFLG0Mu /tmp/tmp.3cInrIh0Hb + return 0 + [[ 0 -eq 0 ]] + diff -u /mnt/jenkins/workspace/cloud-psmdb-operator_PR-2125/e2e-tests/non-voting-and-hidden/compare/find.json /tmp/tmp.6L5peFLawB/find + run_backup minio backup-logical logical + local storage=minio + local backup_name=backup-logical + local type=logical + desc 'run backup backup-logical' + set +o xtrace ----------------------------------------------------------------------------------- run backup backup-logical ----------------------------------------------------------------------------------- + yq eval '.metadata.name = "backup-logical" | .spec.storageName = "minio" | .spec.type = "logical"' /mnt/jenkins/workspace/cloud-psmdb-operator_PR-2125/e2e-tests/non-voting-and-hidden/conf/backup-minio.yml + kubectl_bin apply -f - ++ mktemp + local LAST_OUT=/tmp/tmp.CxsXZUyBKA ++ mktemp + local LAST_ERR=/tmp/tmp.vSCvg4sTmE + local exit_status=0 + local timeout=4 ++ seq 0 2 + for i in $(seq 0 2) + set +e + kubectl apply -f - + exit_status=0 + set -e + '[' 0 '!=' 0 -a -n 1 ']' + break + cat /tmp/tmp.CxsXZUyBKA perconaservermongodbbackup.psmdb.percona.com/backup-logical created + cat /tmp/tmp.vSCvg4sTmE + rm /tmp/tmp.CxsXZUyBKA /tmp/tmp.vSCvg4sTmE + return 0 + wait_backup backup-logical ready + local backup_name=backup-logical + local target_state=ready + set +o xtrace waiting for backup-logical to reach ready state.......OK + log 'drop collection' + set +o xtrace [2025-12-10T13:28:29+0000] drop collection + run_mongo 'use myApp\n db.test.drop()' myApp:myPass@some-name-rs0.non-voting-and-hidden-7583 + local 'command=use myApp\n db.test.drop()' + local uri=myApp:myPass@some-name-rs0.non-voting-and-hidden-7583 + local driver=mongodb+srv + local suffix=.svc.cluster.local ++ kubectl_bin get pods --selector=name=psmdb-client -o 'jsonpath={.items[].metadata.name}' +++ mktemp ++ local LAST_OUT=/tmp/tmp.LVyEnGqH1T +++ mktemp ++ local LAST_ERR=/tmp/tmp.m3nHMhmbLa ++ local exit_status=0 ++ local timeout=4 +++ seq 0 2 ++ for i in $(seq 0 2) ++ set +e ++ kubectl get pods --selector=name=psmdb-client -o 'jsonpath={.items[].metadata.name}' ++ exit_status=0 ++ set -e ++ '[' 0 '!=' 0 -a -n 1 ']' ++ break ++ cat /tmp/tmp.LVyEnGqH1T ++ cat /tmp/tmp.m3nHMhmbLa ++ rm /tmp/tmp.LVyEnGqH1T /tmp/tmp.m3nHMhmbLa ++ return 0 + local client_container=psmdb-client-696897d69b-t229s + local mongo_flag= + [[ myApp:myPass@some-name-rs0.non-voting-and-hidden-7583 == *cfg* ]] + replica_set=rs0 + kubectl_bin exec psmdb-client-696897d69b-t229s -- bash -c 'printf '\''use myApp\n db.test.drop()\n'\'' | mongo mongodb+srv://myApp:myPass@some-name-rs0.non-voting-and-hidden-7583.svc.cluster.local/admin?ssl=false\&replicaSet=rs0 ' ++ mktemp + local LAST_OUT=/tmp/tmp.CSiFUXDcK0 ++ mktemp + local LAST_ERR=/tmp/tmp.z5BvdeYuOw + local exit_status=0 + local timeout=4 ++ seq 0 2 + for i in $(seq 0 2) + set +e + kubectl exec psmdb-client-696897d69b-t229s -- bash -c 'printf '\''use myApp\n db.test.drop()\n'\'' | mongo mongodb+srv://myApp:myPass@some-name-rs0.non-voting-and-hidden-7583.svc.cluster.local/admin?ssl=false\&replicaSet=rs0 ' + exit_status=0 + set -e + '[' 0 '!=' 0 -a -n 1 ']' + break + cat /tmp/tmp.CSiFUXDcK0 Percona Server for MongoDB shell version v4.4.29-28 connecting to: mongodb://some-name-rs0-hidden-0.some-name-rs0.non-voting-and-hidden-7583.svc.cluster.local:27017,some-name-rs0-2.some-name-rs0.non-voting-and-hidden-7583.svc.cluster.local:27017,some-name-rs0-0.some-name-rs0.non-voting-and-hidden-7583.svc.cluster.local:27017,some-name-rs0-hidden-1.some-name-rs0.non-voting-and-hidden-7583.svc.cluster.local:27017,some-name-rs0-nv-0.some-name-rs0.non-voting-and-hidden-7583.svc.cluster.local:27017,some-name-rs0-1.some-name-rs0.non-voting-and-hidden-7583.svc.cluster.local:27017/admin?compressors=disabled&gssapiServiceName=mongodb&replicaSet=rs0&ssl=false Implicit session: session { "id" : UUID("844a3f4e-409a-4969-97c7-cf71018ad81f") } Percona Server for MongoDB server version: v8.0.16-5 WARNING: shell and server versions do not match switched to db myApp true bye + cat /tmp/tmp.z5BvdeYuOw + rm /tmp/tmp.CSiFUXDcK0 /tmp/tmp.z5BvdeYuOw + return 0 + run_restore backup-logical + local backup_name=backup-logical + cat /mnt/jenkins/workspace/cloud-psmdb-operator_PR-2125/e2e-tests/non-voting-and-hidden/conf/restore.yml + /usr/sbin/sed -e 's/name:/name: restore-backup-logical/' + /usr/sbin/sed -e 's/backupName:/backupName: backup-logical/' + kubectl_bin apply -f - ++ mktemp + local LAST_OUT=/tmp/tmp.0R4h7qtLau ++ mktemp + local LAST_ERR=/tmp/tmp.eisRvWrGdl + local exit_status=0 + local timeout=4 ++ seq 0 2 + for i in $(seq 0 2) + set +e + kubectl apply -f - + exit_status=0 + set -e + '[' 0 '!=' 0 -a -n 1 ']' + break + cat /tmp/tmp.0R4h7qtLau perconaservermongodbrestore.psmdb.percona.com/restore-backup-logical created + cat /tmp/tmp.eisRvWrGdl + rm /tmp/tmp.0R4h7qtLau /tmp/tmp.eisRvWrGdl + return 0 + wait_restore backup-logical some-name + local backup_name=backup-logical + local cluster_name=some-name + local target_state=ready + local wait_cluster_consistency=1 + local wait_time=1780 + local ok_if_ready=0 + set +o xtrace Waiting for the psmdb-restore/restore-backup-logical object to be created.OK Waiting psmdb-restore/restore-backup-logical to reach state "ready" .OK after 0 minutes + [[ 1 -eq 1 ]] + wait_cluster_consistency some-name + local cluster_name=some-name + local wait_time=32 + retry=0 + sleep 7 + echo -n 'waiting for cluster readyness' waiting for cluster readyness++ kubectl_bin get psmdb some-name -o 'jsonpath={.status.state}' +++ mktemp ++ local LAST_OUT=/tmp/tmp.TZtsyNYEEZ +++ mktemp ++ local LAST_ERR=/tmp/tmp.5gB8mXH0vW ++ local exit_status=0 ++ local timeout=4 +++ seq 0 2 ++ for i in $(seq 0 2) ++ set +e ++ kubectl get psmdb some-name -o 'jsonpath={.status.state}' ++ exit_status=0 ++ set -e ++ '[' 0 '!=' 0 -a -n 1 ']' ++ break ++ cat /tmp/tmp.TZtsyNYEEZ ++ cat /tmp/tmp.5gB8mXH0vW ++ rm /tmp/tmp.TZtsyNYEEZ /tmp/tmp.5gB8mXH0vW ++ return 0 + [[ ready == \r\e\a\d\y ]] + echo .OK .OK + log 'compare data after restore-backup-logical' + set +o xtrace [2025-12-10T13:28:53+0000] compare data after restore-backup-logical + compare_mongo_cmd find myApp:myPass@some-name-rs0-0.some-name-rs0.non-voting-and-hidden-7583 + local command=find + local uri=myApp:myPass@some-name-rs0-0.some-name-rs0.non-voting-and-hidden-7583 + local postfix= + local suffix= + local database=myApp + local collection=test + local sort= + local tls=false + local 'full_command=db.test.find()' + [[ -n '' ]] + log 'running db.test.find() in myApp' + set +o xtrace [2025-12-10T13:28:53+0000] running db.test.find() in myApp + [[ false == \t\r\u\e ]] + mongo_command=run_mongo + run_mongo 'use myApp\n db.test.find()' myApp:myPass@some-name-rs0-0.some-name-rs0.non-voting-and-hidden-7583 mongodb '' + local 'command=use myApp\n db.test.find()' + local uri=myApp:myPass@some-name-rs0-0.some-name-rs0.non-voting-and-hidden-7583 + local driver=mongodb + local suffix=.svc.cluster.local ++ kubectl_bin get pods --selector=name=psmdb-client -o 'jsonpath={.items[].metadata.name}' + egrep -v 'I NETWORK|W NETWORK|F NETWORK|Error saving history file|Percona Server for MongoDB|connecting to:|Unable to reach primary for set|Implicit session:|versions do not match|Error saving history file:|Started a new thread for the timer service' + /usr/sbin/sed -re 's/ObjectId\("[0-9a-f]+"\)//; s/-[0-9]+.svc/-xxx.svc/' +++ mktemp egrep: warning: egrep is obsolescent; using grep -E ++ local LAST_OUT=/tmp/tmp.PXZl3G4vDc +++ mktemp ++ local LAST_ERR=/tmp/tmp.58lNFTOZlC ++ local exit_status=0 ++ local timeout=4 +++ seq 0 2 ++ for i in $(seq 0 2) ++ set +e ++ kubectl get pods --selector=name=psmdb-client -o 'jsonpath={.items[].metadata.name}' ++ exit_status=0 ++ set -e ++ '[' 0 '!=' 0 -a -n 1 ']' ++ break ++ cat /tmp/tmp.PXZl3G4vDc ++ cat /tmp/tmp.58lNFTOZlC ++ rm /tmp/tmp.PXZl3G4vDc /tmp/tmp.58lNFTOZlC ++ return 0 + local client_container=psmdb-client-696897d69b-t229s + local mongo_flag= + [[ myApp:myPass@some-name-rs0-0.some-name-rs0.non-voting-and-hidden-7583 == *cfg* ]] + replica_set=rs0 + kubectl_bin exec psmdb-client-696897d69b-t229s -- bash -c 'printf '\''use myApp\n db.test.find()\n'\'' | mongo mongodb://myApp:myPass@some-name-rs0-0.some-name-rs0.non-voting-and-hidden-7583.svc.cluster.local/admin?ssl=false\&replicaSet=rs0 ' ++ mktemp + local LAST_OUT=/tmp/tmp.UFdjPoIgcz ++ mktemp + local LAST_ERR=/tmp/tmp.R6rj568f9P + local exit_status=0 + local timeout=4 ++ seq 0 2 + for i in $(seq 0 2) + set +e + kubectl exec psmdb-client-696897d69b-t229s -- bash -c 'printf '\''use myApp\n db.test.find()\n'\'' | mongo mongodb://myApp:myPass@some-name-rs0-0.some-name-rs0.non-voting-and-hidden-7583.svc.cluster.local/admin?ssl=false\&replicaSet=rs0 ' + exit_status=0 + set -e + '[' 0 '!=' 0 -a -n 1 ']' + break + cat /tmp/tmp.UFdjPoIgcz + cat /tmp/tmp.R6rj568f9P + rm /tmp/tmp.UFdjPoIgcz /tmp/tmp.R6rj568f9P + return 0 + [[ 0 -eq 0 ]] + diff -u /mnt/jenkins/workspace/cloud-psmdb-operator_PR-2125/e2e-tests/non-voting-and-hidden/compare/find.json /tmp/tmp.6L5peFLawB/find + compare_mongo_cmd find myApp:myPass@some-name-rs0-nv-0.some-name-rs0.non-voting-and-hidden-7583 + local command=find + local uri=myApp:myPass@some-name-rs0-nv-0.some-name-rs0.non-voting-and-hidden-7583 + local postfix= + local suffix= + local database=myApp + local collection=test + local sort= + local tls=false + local 'full_command=db.test.find()' + [[ -n '' ]] + log 'running db.test.find() in myApp' + set +o xtrace [2025-12-10T13:28:55+0000] running db.test.find() in myApp + [[ false == \t\r\u\e ]] + mongo_command=run_mongo + egrep -v 'I NETWORK|W NETWORK|F NETWORK|Error saving history file|Percona Server for MongoDB|connecting to:|Unable to reach primary for set|Implicit session:|versions do not match|Error saving history file:|Started a new thread for the timer service' + run_mongo 'use myApp\n db.test.find()' myApp:myPass@some-name-rs0-nv-0.some-name-rs0.non-voting-and-hidden-7583 mongodb '' + local 'command=use myApp\n db.test.find()' + local uri=myApp:myPass@some-name-rs0-nv-0.some-name-rs0.non-voting-and-hidden-7583 + local driver=mongodb + local suffix=.svc.cluster.local + /usr/sbin/sed -re 's/ObjectId\("[0-9a-f]+"\)//; s/-[0-9]+.svc/-xxx.svc/' egrep: warning: egrep is obsolescent; using grep -E ++ kubectl_bin get pods --selector=name=psmdb-client -o 'jsonpath={.items[].metadata.name}' +++ mktemp ++ local LAST_OUT=/tmp/tmp.31SeMCl6Pw +++ mktemp ++ local LAST_ERR=/tmp/tmp.Hx0O5goSGX ++ local exit_status=0 ++ local timeout=4 +++ seq 0 2 ++ for i in $(seq 0 2) ++ set +e ++ kubectl get pods --selector=name=psmdb-client -o 'jsonpath={.items[].metadata.name}' ++ exit_status=0 ++ set -e ++ '[' 0 '!=' 0 -a -n 1 ']' ++ break ++ cat /tmp/tmp.31SeMCl6Pw ++ cat /tmp/tmp.Hx0O5goSGX ++ rm /tmp/tmp.31SeMCl6Pw /tmp/tmp.Hx0O5goSGX ++ return 0 + local client_container=psmdb-client-696897d69b-t229s + local mongo_flag= + [[ myApp:myPass@some-name-rs0-nv-0.some-name-rs0.non-voting-and-hidden-7583 == *cfg* ]] + replica_set=rs0 + kubectl_bin exec psmdb-client-696897d69b-t229s -- bash -c 'printf '\''use myApp\n db.test.find()\n'\'' | mongo mongodb://myApp:myPass@some-name-rs0-nv-0.some-name-rs0.non-voting-and-hidden-7583.svc.cluster.local/admin?ssl=false\&replicaSet=rs0 ' ++ mktemp + local LAST_OUT=/tmp/tmp.rVrBG3SID5 ++ mktemp + local LAST_ERR=/tmp/tmp.zSnckJBiL9 + local exit_status=0 + local timeout=4 ++ seq 0 2 + for i in $(seq 0 2) + set +e + kubectl exec psmdb-client-696897d69b-t229s -- bash -c 'printf '\''use myApp\n db.test.find()\n'\'' | mongo mongodb://myApp:myPass@some-name-rs0-nv-0.some-name-rs0.non-voting-and-hidden-7583.svc.cluster.local/admin?ssl=false\&replicaSet=rs0 ' + exit_status=0 + set -e + '[' 0 '!=' 0 -a -n 1 ']' + break + cat /tmp/tmp.rVrBG3SID5 + cat /tmp/tmp.zSnckJBiL9 + rm /tmp/tmp.rVrBG3SID5 /tmp/tmp.zSnckJBiL9 + return 0 + [[ 0 -eq 0 ]] + diff -u /mnt/jenkins/workspace/cloud-psmdb-operator_PR-2125/e2e-tests/non-voting-and-hidden/compare/find.json /tmp/tmp.6L5peFLawB/find + compare_mongo_cmd find myApp:myPass@some-name-rs0-hidden-0.some-name-rs0.non-voting-and-hidden-7583 + local command=find + local uri=myApp:myPass@some-name-rs0-hidden-0.some-name-rs0.non-voting-and-hidden-7583 + local postfix= + local suffix= + local database=myApp + local collection=test + local sort= + local tls=false + local 'full_command=db.test.find()' + [[ -n '' ]] + log 'running db.test.find() in myApp' + set +o xtrace [2025-12-10T13:28:58+0000] running db.test.find() in myApp + [[ false == \t\r\u\e ]] + mongo_command=run_mongo + run_mongo 'use myApp\n db.test.find()' myApp:myPass@some-name-rs0-hidden-0.some-name-rs0.non-voting-and-hidden-7583 mongodb '' + /usr/sbin/sed -re 's/ObjectId\("[0-9a-f]+"\)//; s/-[0-9]+.svc/-xxx.svc/' + local 'command=use myApp\n db.test.find()' + egrep -v 'I NETWORK|W NETWORK|F NETWORK|Error saving history file|Percona Server for MongoDB|connecting to:|Unable to reach primary for set|Implicit session:|versions do not match|Error saving history file:|Started a new thread for the timer service' + local uri=myApp:myPass@some-name-rs0-hidden-0.some-name-rs0.non-voting-and-hidden-7583 + local driver=mongodb + local suffix=.svc.cluster.local ++ kubectl_bin get pods --selector=name=psmdb-client -o 'jsonpath={.items[].metadata.name}' +++ mktemp egrep: warning: egrep is obsolescent; using grep -E ++ local LAST_OUT=/tmp/tmp.3jqy1WZ5tJ +++ mktemp ++ local LAST_ERR=/tmp/tmp.ZBiEEyddEH ++ local exit_status=0 ++ local timeout=4 +++ seq 0 2 ++ for i in $(seq 0 2) ++ set +e ++ kubectl get pods --selector=name=psmdb-client -o 'jsonpath={.items[].metadata.name}' ++ exit_status=0 ++ set -e ++ '[' 0 '!=' 0 -a -n 1 ']' ++ break ++ cat /tmp/tmp.3jqy1WZ5tJ ++ cat /tmp/tmp.ZBiEEyddEH ++ rm /tmp/tmp.3jqy1WZ5tJ /tmp/tmp.ZBiEEyddEH ++ return 0 + local client_container=psmdb-client-696897d69b-t229s + local mongo_flag= + [[ myApp:myPass@some-name-rs0-hidden-0.some-name-rs0.non-voting-and-hidden-7583 == *cfg* ]] + replica_set=rs0 + kubectl_bin exec psmdb-client-696897d69b-t229s -- bash -c 'printf '\''use myApp\n db.test.find()\n'\'' | mongo mongodb://myApp:myPass@some-name-rs0-hidden-0.some-name-rs0.non-voting-and-hidden-7583.svc.cluster.local/admin?ssl=false\&replicaSet=rs0 ' ++ mktemp + local LAST_OUT=/tmp/tmp.7ebBjsv2LG ++ mktemp + local LAST_ERR=/tmp/tmp.85H37bLYwe + local exit_status=0 + local timeout=4 ++ seq 0 2 + for i in $(seq 0 2) + set +e + kubectl exec psmdb-client-696897d69b-t229s -- bash -c 'printf '\''use myApp\n db.test.find()\n'\'' | mongo mongodb://myApp:myPass@some-name-rs0-hidden-0.some-name-rs0.non-voting-and-hidden-7583.svc.cluster.local/admin?ssl=false\&replicaSet=rs0 ' + exit_status=0 + set -e + '[' 0 '!=' 0 -a -n 1 ']' + break + cat /tmp/tmp.7ebBjsv2LG + cat /tmp/tmp.85H37bLYwe + rm /tmp/tmp.7ebBjsv2LG /tmp/tmp.85H37bLYwe + return 0 + [[ 0 -eq 0 ]] + diff -u /mnt/jenkins/workspace/cloud-psmdb-operator_PR-2125/e2e-tests/non-voting-and-hidden/compare/find.json /tmp/tmp.6L5peFLawB/find + compare_mongo_cmd find myApp:myPass@some-name-rs0-hidden-1.some-name-rs0.non-voting-and-hidden-7583 + local command=find + local uri=myApp:myPass@some-name-rs0-hidden-1.some-name-rs0.non-voting-and-hidden-7583 + local postfix= + local suffix= + local database=myApp + local collection=test + local sort= + local tls=false + local 'full_command=db.test.find()' + [[ -n '' ]] + log 'running db.test.find() in myApp' + set +o xtrace [2025-12-10T13:29:01+0000] running db.test.find() in myApp + [[ false == \t\r\u\e ]] + mongo_command=run_mongo + run_mongo 'use myApp\n db.test.find()' myApp:myPass@some-name-rs0-hidden-1.some-name-rs0.non-voting-and-hidden-7583 mongodb '' + local 'command=use myApp\n db.test.find()' + local uri=myApp:myPass@some-name-rs0-hidden-1.some-name-rs0.non-voting-and-hidden-7583 + local driver=mongodb + local suffix=.svc.cluster.local + egrep -v 'I NETWORK|W NETWORK|F NETWORK|Error saving history file|Percona Server for MongoDB|connecting to:|Unable to reach primary for set|Implicit session:|versions do not match|Error saving history file:|Started a new thread for the timer service' + /usr/sbin/sed -re 's/ObjectId\("[0-9a-f]+"\)//; s/-[0-9]+.svc/-xxx.svc/' ++ kubectl_bin get pods --selector=name=psmdb-client -o 'jsonpath={.items[].metadata.name}' +++ mktemp egrep: warning: egrep is obsolescent; using grep -E ++ local LAST_OUT=/tmp/tmp.lg61xhLf5I +++ mktemp ++ local LAST_ERR=/tmp/tmp.kac09ghaIc ++ local exit_status=0 ++ local timeout=4 +++ seq 0 2 ++ for i in $(seq 0 2) ++ set +e ++ kubectl get pods --selector=name=psmdb-client -o 'jsonpath={.items[].metadata.name}' ++ exit_status=0 ++ set -e ++ '[' 0 '!=' 0 -a -n 1 ']' ++ break ++ cat /tmp/tmp.lg61xhLf5I ++ cat /tmp/tmp.kac09ghaIc ++ rm /tmp/tmp.lg61xhLf5I /tmp/tmp.kac09ghaIc ++ return 0 + local client_container=psmdb-client-696897d69b-t229s + local mongo_flag= + [[ myApp:myPass@some-name-rs0-hidden-1.some-name-rs0.non-voting-and-hidden-7583 == *cfg* ]] + replica_set=rs0 + kubectl_bin exec psmdb-client-696897d69b-t229s -- bash -c 'printf '\''use myApp\n db.test.find()\n'\'' | mongo mongodb://myApp:myPass@some-name-rs0-hidden-1.some-name-rs0.non-voting-and-hidden-7583.svc.cluster.local/admin?ssl=false\&replicaSet=rs0 ' ++ mktemp + local LAST_OUT=/tmp/tmp.FY2vrgOIGf ++ mktemp + local LAST_ERR=/tmp/tmp.rECOm60SkG + local exit_status=0 + local timeout=4 ++ seq 0 2 + for i in $(seq 0 2) + set +e + kubectl exec psmdb-client-696897d69b-t229s -- bash -c 'printf '\''use myApp\n db.test.find()\n'\'' | mongo mongodb://myApp:myPass@some-name-rs0-hidden-1.some-name-rs0.non-voting-and-hidden-7583.svc.cluster.local/admin?ssl=false\&replicaSet=rs0 ' + exit_status=0 + set -e + '[' 0 '!=' 0 -a -n 1 ']' + break + cat /tmp/tmp.FY2vrgOIGf + cat /tmp/tmp.rECOm60SkG + rm /tmp/tmp.FY2vrgOIGf /tmp/tmp.rECOm60SkG + return 0 + [[ 0 -eq 0 ]] + diff -u /mnt/jenkins/workspace/cloud-psmdb-operator_PR-2125/e2e-tests/non-voting-and-hidden/compare/find.json /tmp/tmp.6L5peFLawB/find + run_backup minio backup-physical physical + local storage=minio + local backup_name=backup-physical + local type=physical + desc 'run backup backup-physical' + set +o xtrace ----------------------------------------------------------------------------------- run backup backup-physical ----------------------------------------------------------------------------------- + yq eval '.metadata.name = "backup-physical" | .spec.storageName = "minio" | .spec.type = "physical"' /mnt/jenkins/workspace/cloud-psmdb-operator_PR-2125/e2e-tests/non-voting-and-hidden/conf/backup-minio.yml + kubectl_bin apply -f - ++ mktemp + local LAST_OUT=/tmp/tmp.oAQb2Sn8et ++ mktemp + local LAST_ERR=/tmp/tmp.mz8kS97gZ6 + local exit_status=0 + local timeout=4 ++ seq 0 2 + for i in $(seq 0 2) + set +e + kubectl apply -f - + exit_status=0 + set -e + '[' 0 '!=' 0 -a -n 1 ']' + break + cat /tmp/tmp.oAQb2Sn8et perconaservermongodbbackup.psmdb.percona.com/backup-physical created + cat /tmp/tmp.mz8kS97gZ6 + rm /tmp/tmp.oAQb2Sn8et /tmp/tmp.mz8kS97gZ6 + return 0 + wait_backup backup-physical ready + local backup_name=backup-physical + local target_state=ready + set +o xtrace waiting for backup-physical to reach ready state.......OK + log 'drop collection' + set +o xtrace [2025-12-10T13:29:17+0000] drop collection + run_mongo 'use myApp\n db.test.drop()' myApp:myPass@some-name-rs0.non-voting-and-hidden-7583 + local 'command=use myApp\n db.test.drop()' + local uri=myApp:myPass@some-name-rs0.non-voting-and-hidden-7583 + local driver=mongodb+srv + local suffix=.svc.cluster.local ++ kubectl_bin get pods --selector=name=psmdb-client -o 'jsonpath={.items[].metadata.name}' +++ mktemp ++ local LAST_OUT=/tmp/tmp.2gbX5s7HIF +++ mktemp ++ local LAST_ERR=/tmp/tmp.Aygb04RIAh ++ local exit_status=0 ++ local timeout=4 +++ seq 0 2 ++ for i in $(seq 0 2) ++ set +e ++ kubectl get pods --selector=name=psmdb-client -o 'jsonpath={.items[].metadata.name}' ++ exit_status=0 ++ set -e ++ '[' 0 '!=' 0 -a -n 1 ']' ++ break ++ cat /tmp/tmp.2gbX5s7HIF ++ cat /tmp/tmp.Aygb04RIAh ++ rm /tmp/tmp.2gbX5s7HIF /tmp/tmp.Aygb04RIAh ++ return 0 + local client_container=psmdb-client-696897d69b-t229s + local mongo_flag= + [[ myApp:myPass@some-name-rs0.non-voting-and-hidden-7583 == *cfg* ]] + replica_set=rs0 + kubectl_bin exec psmdb-client-696897d69b-t229s -- bash -c 'printf '\''use myApp\n db.test.drop()\n'\'' | mongo mongodb+srv://myApp:myPass@some-name-rs0.non-voting-and-hidden-7583.svc.cluster.local/admin?ssl=false\&replicaSet=rs0 ' ++ mktemp + local LAST_OUT=/tmp/tmp.44fRQH01Vp ++ mktemp + local LAST_ERR=/tmp/tmp.b2zM0lzJIt + local exit_status=0 + local timeout=4 ++ seq 0 2 + for i in $(seq 0 2) + set +e + kubectl exec psmdb-client-696897d69b-t229s -- bash -c 'printf '\''use myApp\n db.test.drop()\n'\'' | mongo mongodb+srv://myApp:myPass@some-name-rs0.non-voting-and-hidden-7583.svc.cluster.local/admin?ssl=false\&replicaSet=rs0 ' + exit_status=0 + set -e + '[' 0 '!=' 0 -a -n 1 ']' + break + cat /tmp/tmp.44fRQH01Vp Percona Server for MongoDB shell version v4.4.29-28 connecting to: mongodb://some-name-rs0-hidden-1.some-name-rs0.non-voting-and-hidden-7583.svc.cluster.local:27017,some-name-rs0-nv-0.some-name-rs0.non-voting-and-hidden-7583.svc.cluster.local:27017,some-name-rs0-1.some-name-rs0.non-voting-and-hidden-7583.svc.cluster.local:27017,some-name-rs0-hidden-0.some-name-rs0.non-voting-and-hidden-7583.svc.cluster.local:27017,some-name-rs0-2.some-name-rs0.non-voting-and-hidden-7583.svc.cluster.local:27017,some-name-rs0-0.some-name-rs0.non-voting-and-hidden-7583.svc.cluster.local:27017/admin?compressors=disabled&gssapiServiceName=mongodb&replicaSet=rs0&ssl=false Implicit session: session { "id" : UUID("49025d3a-ee0b-45ff-aa94-8f47403eb7b3") } Percona Server for MongoDB server version: v8.0.16-5 WARNING: shell and server versions do not match switched to db myApp true bye + cat /tmp/tmp.b2zM0lzJIt + rm /tmp/tmp.44fRQH01Vp /tmp/tmp.b2zM0lzJIt + return 0 + run_restore backup-physical + local backup_name=backup-physical + cat /mnt/jenkins/workspace/cloud-psmdb-operator_PR-2125/e2e-tests/non-voting-and-hidden/conf/restore.yml + /usr/sbin/sed -e 's/name:/name: restore-backup-physical/' + /usr/sbin/sed -e 's/backupName:/backupName: backup-physical/' + kubectl_bin apply -f - ++ mktemp + local LAST_OUT=/tmp/tmp.tH6Vgibtxe ++ mktemp + local LAST_ERR=/tmp/tmp.2A34UX6UXX + local exit_status=0 + local timeout=4 ++ seq 0 2 + for i in $(seq 0 2) + set +e + kubectl apply -f - + exit_status=0 + set -e + '[' 0 '!=' 0 -a -n 1 ']' + break + cat /tmp/tmp.tH6Vgibtxe perconaservermongodbrestore.psmdb.percona.com/restore-backup-physical created + cat /tmp/tmp.2A34UX6UXX + rm /tmp/tmp.tH6Vgibtxe /tmp/tmp.2A34UX6UXX + return 0 + wait_restore backup-physical some-name + local backup_name=backup-physical + local cluster_name=some-name + local target_state=ready + local wait_cluster_consistency=1 + local wait_time=1780 + local ok_if_ready=0 + set +o xtrace Waiting for the psmdb-restore/restore-backup-physical object to be created.OK Waiting psmdb-restore/restore-backup-physical to reach state "ready" ......OK after 5 minutes + [[ 1 -eq 1 ]] + wait_cluster_consistency some-name + local cluster_name=some-name + local wait_time=32 + retry=0 + sleep 7 + echo -n 'waiting for cluster readyness' waiting for cluster readyness++ kubectl_bin get psmdb some-name -o 'jsonpath={.status.state}' +++ mktemp ++ local LAST_OUT=/tmp/tmp.1xbd34PMUL +++ mktemp ++ local LAST_ERR=/tmp/tmp.0N9UZIVhPp ++ local exit_status=0 ++ local timeout=4 +++ seq 0 2 ++ for i in $(seq 0 2) ++ set +e ++ kubectl get psmdb some-name -o 'jsonpath={.status.state}' ++ exit_status=0 ++ set -e ++ '[' 0 '!=' 0 -a -n 1 ']' ++ break ++ cat /tmp/tmp.1xbd34PMUL ++ cat /tmp/tmp.0N9UZIVhPp ++ rm /tmp/tmp.1xbd34PMUL /tmp/tmp.0N9UZIVhPp ++ return 0 + [[ initializing == \r\e\a\d\y ]] + let retry+=1 + '[' 1 -ge 32 ']' + echo -n . .+ sleep 10 ++ kubectl_bin get psmdb some-name -o 'jsonpath={.status.state}' +++ mktemp ++ local LAST_OUT=/tmp/tmp.l6hP6PIK11 +++ mktemp ++ local LAST_ERR=/tmp/tmp.ltrlJ1lsRd ++ local exit_status=0 ++ local timeout=4 +++ seq 0 2 ++ for i in $(seq 0 2) ++ set +e ++ kubectl get psmdb some-name -o 'jsonpath={.status.state}' ++ exit_status=0 ++ set -e ++ '[' 0 '!=' 0 -a -n 1 ']' ++ break ++ cat /tmp/tmp.l6hP6PIK11 ++ cat /tmp/tmp.ltrlJ1lsRd ++ rm /tmp/tmp.l6hP6PIK11 /tmp/tmp.ltrlJ1lsRd ++ return 0 + [[ error == \r\e\a\d\y ]] + let retry+=1 + '[' 2 -ge 32 ']' + echo -n . .+ sleep 10 ++ kubectl_bin get psmdb some-name -o 'jsonpath={.status.state}' +++ mktemp ++ local LAST_OUT=/tmp/tmp.3CozhdZ0HV +++ mktemp ++ local LAST_ERR=/tmp/tmp.ELl1LxfuVz ++ local exit_status=0 ++ local timeout=4 +++ seq 0 2 ++ for i in $(seq 0 2) ++ set +e ++ kubectl get psmdb some-name -o 'jsonpath={.status.state}' ++ exit_status=0 ++ set -e ++ '[' 0 '!=' 0 -a -n 1 ']' ++ break ++ cat /tmp/tmp.3CozhdZ0HV ++ cat /tmp/tmp.ELl1LxfuVz ++ rm /tmp/tmp.3CozhdZ0HV /tmp/tmp.ELl1LxfuVz ++ return 0 + [[ error == \r\e\a\d\y ]] + let retry+=1 + '[' 3 -ge 32 ']' + echo -n . .+ sleep 10 ++ kubectl_bin get psmdb some-name -o 'jsonpath={.status.state}' +++ mktemp ++ local LAST_OUT=/tmp/tmp.RY810fGTnX +++ mktemp ++ local LAST_ERR=/tmp/tmp.GNPjF7rAtf ++ local exit_status=0 ++ local timeout=4 +++ seq 0 2 ++ for i in $(seq 0 2) ++ set +e ++ kubectl get psmdb some-name -o 'jsonpath={.status.state}' ++ exit_status=0 ++ set -e ++ '[' 0 '!=' 0 -a -n 1 ']' ++ break ++ cat /tmp/tmp.RY810fGTnX ++ cat /tmp/tmp.GNPjF7rAtf ++ rm /tmp/tmp.RY810fGTnX /tmp/tmp.GNPjF7rAtf ++ return 0 + [[ error == \r\e\a\d\y ]] + let retry+=1 + '[' 4 -ge 32 ']' + echo -n . .+ sleep 10 ++ kubectl_bin get psmdb some-name -o 'jsonpath={.status.state}' +++ mktemp ++ local LAST_OUT=/tmp/tmp.bhEj5c05FZ +++ mktemp ++ local LAST_ERR=/tmp/tmp.KAND5xOtvf ++ local exit_status=0 ++ local timeout=4 +++ seq 0 2 ++ for i in $(seq 0 2) ++ set +e ++ kubectl get psmdb some-name -o 'jsonpath={.status.state}' ++ exit_status=0 ++ set -e ++ '[' 0 '!=' 0 -a -n 1 ']' ++ break ++ cat /tmp/tmp.bhEj5c05FZ ++ cat /tmp/tmp.KAND5xOtvf ++ rm /tmp/tmp.bhEj5c05FZ /tmp/tmp.KAND5xOtvf ++ return 0 + [[ error == \r\e\a\d\y ]] + let retry+=1 + '[' 5 -ge 32 ']' + echo -n . .+ sleep 10 ++ kubectl_bin get psmdb some-name -o 'jsonpath={.status.state}' +++ mktemp ++ local LAST_OUT=/tmp/tmp.wazm81NVv3 +++ mktemp ++ local LAST_ERR=/tmp/tmp.vijaZ0F1M5 ++ local exit_status=0 ++ local timeout=4 +++ seq 0 2 ++ for i in $(seq 0 2) ++ set +e ++ kubectl get psmdb some-name -o 'jsonpath={.status.state}' ++ exit_status=0 ++ set -e ++ '[' 0 '!=' 0 -a -n 1 ']' ++ break ++ cat /tmp/tmp.wazm81NVv3 ++ cat /tmp/tmp.vijaZ0F1M5 ++ rm /tmp/tmp.wazm81NVv3 /tmp/tmp.vijaZ0F1M5 ++ return 0 + [[ initializing == \r\e\a\d\y ]] + let retry+=1 + '[' 6 -ge 32 ']' + echo -n . .+ sleep 10 ++ kubectl_bin get psmdb some-name -o 'jsonpath={.status.state}' +++ mktemp ++ local LAST_OUT=/tmp/tmp.DfcDTRJhIj +++ mktemp ++ local LAST_ERR=/tmp/tmp.DYFj5PwFxQ ++ local exit_status=0 ++ local timeout=4 +++ seq 0 2 ++ for i in $(seq 0 2) ++ set +e ++ kubectl get psmdb some-name -o 'jsonpath={.status.state}' ++ exit_status=0 ++ set -e ++ '[' 0 '!=' 0 -a -n 1 ']' ++ break ++ cat /tmp/tmp.DfcDTRJhIj ++ cat /tmp/tmp.DYFj5PwFxQ ++ rm /tmp/tmp.DfcDTRJhIj /tmp/tmp.DYFj5PwFxQ ++ return 0 + [[ initializing == \r\e\a\d\y ]] + let retry+=1 + '[' 7 -ge 32 ']' + echo -n . .+ sleep 10 ++ kubectl_bin get psmdb some-name -o 'jsonpath={.status.state}' +++ mktemp ++ local LAST_OUT=/tmp/tmp.oUVzbhcrHu +++ mktemp ++ local LAST_ERR=/tmp/tmp.S8wT5xQtKh ++ local exit_status=0 ++ local timeout=4 +++ seq 0 2 ++ for i in $(seq 0 2) ++ set +e ++ kubectl get psmdb some-name -o 'jsonpath={.status.state}' ++ exit_status=0 ++ set -e ++ '[' 0 '!=' 0 -a -n 1 ']' ++ break ++ cat /tmp/tmp.oUVzbhcrHu ++ cat /tmp/tmp.S8wT5xQtKh ++ rm /tmp/tmp.oUVzbhcrHu /tmp/tmp.S8wT5xQtKh ++ return 0 + [[ initializing == \r\e\a\d\y ]] + let retry+=1 + '[' 8 -ge 32 ']' + echo -n . .+ sleep 10 ++ kubectl_bin get psmdb some-name -o 'jsonpath={.status.state}' +++ mktemp ++ local LAST_OUT=/tmp/tmp.g10KeJ1qiO +++ mktemp ++ local LAST_ERR=/tmp/tmp.AefokKKsMh ++ local exit_status=0 ++ local timeout=4 +++ seq 0 2 ++ for i in $(seq 0 2) ++ set +e ++ kubectl get psmdb some-name -o 'jsonpath={.status.state}' ++ exit_status=0 ++ set -e ++ '[' 0 '!=' 0 -a -n 1 ']' ++ break ++ cat /tmp/tmp.g10KeJ1qiO ++ cat /tmp/tmp.AefokKKsMh ++ rm /tmp/tmp.g10KeJ1qiO /tmp/tmp.AefokKKsMh ++ return 0 + [[ initializing == \r\e\a\d\y ]] + let retry+=1 + '[' 9 -ge 32 ']' + echo -n . .+ sleep 10 ++ kubectl_bin get psmdb some-name -o 'jsonpath={.status.state}' +++ mktemp ++ local LAST_OUT=/tmp/tmp.h7942nxQoP +++ mktemp ++ local LAST_ERR=/tmp/tmp.5SpBGxEqOO ++ local exit_status=0 ++ local timeout=4 +++ seq 0 2 ++ for i in $(seq 0 2) ++ set +e ++ kubectl get psmdb some-name -o 'jsonpath={.status.state}' ++ exit_status=0 ++ set -e ++ '[' 0 '!=' 0 -a -n 1 ']' ++ break ++ cat /tmp/tmp.h7942nxQoP ++ cat /tmp/tmp.5SpBGxEqOO ++ rm /tmp/tmp.h7942nxQoP /tmp/tmp.5SpBGxEqOO ++ return 0 + [[ initializing == \r\e\a\d\y ]] + let retry+=1 + '[' 10 -ge 32 ']' + echo -n . .+ sleep 10 ++ kubectl_bin get psmdb some-name -o 'jsonpath={.status.state}' +++ mktemp ++ local LAST_OUT=/tmp/tmp.x0D8mgu9dD +++ mktemp ++ local LAST_ERR=/tmp/tmp.zumhkLHeV6 ++ local exit_status=0 ++ local timeout=4 +++ seq 0 2 ++ for i in $(seq 0 2) ++ set +e ++ kubectl get psmdb some-name -o 'jsonpath={.status.state}' ++ exit_status=0 ++ set -e ++ '[' 0 '!=' 0 -a -n 1 ']' ++ break ++ cat /tmp/tmp.x0D8mgu9dD ++ cat /tmp/tmp.zumhkLHeV6 ++ rm /tmp/tmp.x0D8mgu9dD /tmp/tmp.zumhkLHeV6 ++ return 0 + [[ ready == \r\e\a\d\y ]] + echo .OK .OK + log 'compare data after restore-backup-physical' + set +o xtrace [2025-12-10T13:36:35+0000] compare data after restore-backup-physical + compare_mongo_cmd find myApp:myPass@some-name-rs0-0.some-name-rs0.non-voting-and-hidden-7583 + local command=find + local uri=myApp:myPass@some-name-rs0-0.some-name-rs0.non-voting-and-hidden-7583 + local postfix= + local suffix= + local database=myApp + local collection=test + local sort= + local tls=false + local 'full_command=db.test.find()' + [[ -n '' ]] + log 'running db.test.find() in myApp' + set +o xtrace [2025-12-10T13:36:35+0000] running db.test.find() in myApp + [[ false == \t\r\u\e ]] + mongo_command=run_mongo + run_mongo 'use myApp\n db.test.find()' myApp:myPass@some-name-rs0-0.some-name-rs0.non-voting-and-hidden-7583 mongodb '' + local 'command=use myApp\n db.test.find()' + local uri=myApp:myPass@some-name-rs0-0.some-name-rs0.non-voting-and-hidden-7583 + local driver=mongodb + local suffix=.svc.cluster.local + egrep -v 'I NETWORK|W NETWORK|F NETWORK|Error saving history file|Percona Server for MongoDB|connecting to:|Unable to reach primary for set|Implicit session:|versions do not match|Error saving history file:|Started a new thread for the timer service' + /usr/sbin/sed -re 's/ObjectId\("[0-9a-f]+"\)//; s/-[0-9]+.svc/-xxx.svc/' ++ kubectl_bin get pods --selector=name=psmdb-client -o 'jsonpath={.items[].metadata.name}' +++ mktemp egrep: warning: egrep is obsolescent; using grep -E ++ local LAST_OUT=/tmp/tmp.1UJ8rWB86l +++ mktemp ++ local LAST_ERR=/tmp/tmp.NMvINDWzja ++ local exit_status=0 ++ local timeout=4 +++ seq 0 2 ++ for i in $(seq 0 2) ++ set +e ++ kubectl get pods --selector=name=psmdb-client -o 'jsonpath={.items[].metadata.name}' ++ exit_status=0 ++ set -e ++ '[' 0 '!=' 0 -a -n 1 ']' ++ break ++ cat /tmp/tmp.1UJ8rWB86l ++ cat /tmp/tmp.NMvINDWzja ++ rm /tmp/tmp.1UJ8rWB86l /tmp/tmp.NMvINDWzja ++ return 0 + local client_container=psmdb-client-696897d69b-t229s + local mongo_flag= + [[ myApp:myPass@some-name-rs0-0.some-name-rs0.non-voting-and-hidden-7583 == *cfg* ]] + replica_set=rs0 + kubectl_bin exec psmdb-client-696897d69b-t229s -- bash -c 'printf '\''use myApp\n db.test.find()\n'\'' | mongo mongodb://myApp:myPass@some-name-rs0-0.some-name-rs0.non-voting-and-hidden-7583.svc.cluster.local/admin?ssl=false\&replicaSet=rs0 ' ++ mktemp + local LAST_OUT=/tmp/tmp.fcNw3g4Kkv ++ mktemp + local LAST_ERR=/tmp/tmp.YI0ykLNhmN + local exit_status=0 + local timeout=4 ++ seq 0 2 + for i in $(seq 0 2) + set +e + kubectl exec psmdb-client-696897d69b-t229s -- bash -c 'printf '\''use myApp\n db.test.find()\n'\'' | mongo mongodb://myApp:myPass@some-name-rs0-0.some-name-rs0.non-voting-and-hidden-7583.svc.cluster.local/admin?ssl=false\&replicaSet=rs0 ' + exit_status=0 + set -e + '[' 0 '!=' 0 -a -n 1 ']' + break + cat /tmp/tmp.fcNw3g4Kkv + cat /tmp/tmp.YI0ykLNhmN + rm /tmp/tmp.fcNw3g4Kkv /tmp/tmp.YI0ykLNhmN + return 0 + [[ 0 -eq 0 ]] + diff -u /mnt/jenkins/workspace/cloud-psmdb-operator_PR-2125/e2e-tests/non-voting-and-hidden/compare/find.json /tmp/tmp.6L5peFLawB/find + compare_mongo_cmd find myApp:myPass@some-name-rs0-nv-0.some-name-rs0.non-voting-and-hidden-7583 + local command=find + local uri=myApp:myPass@some-name-rs0-nv-0.some-name-rs0.non-voting-and-hidden-7583 + local postfix= + local suffix= + local database=myApp + local collection=test + local sort= + local tls=false + local 'full_command=db.test.find()' + [[ -n '' ]] + log 'running db.test.find() in myApp' + set +o xtrace [2025-12-10T13:36:38+0000] running db.test.find() in myApp + [[ false == \t\r\u\e ]] + mongo_command=run_mongo + run_mongo 'use myApp\n db.test.find()' myApp:myPass@some-name-rs0-nv-0.some-name-rs0.non-voting-and-hidden-7583 mongodb '' + local 'command=use myApp\n db.test.find()' + local uri=myApp:myPass@some-name-rs0-nv-0.some-name-rs0.non-voting-and-hidden-7583 + local driver=mongodb + local suffix=.svc.cluster.local + egrep -v 'I NETWORK|W NETWORK|F NETWORK|Error saving history file|Percona Server for MongoDB|connecting to:|Unable to reach primary for set|Implicit session:|versions do not match|Error saving history file:|Started a new thread for the timer service' + /usr/sbin/sed -re 's/ObjectId\("[0-9a-f]+"\)//; s/-[0-9]+.svc/-xxx.svc/' ++ kubectl_bin get pods --selector=name=psmdb-client -o 'jsonpath={.items[].metadata.name}' egrep: warning: egrep is obsolescent; using grep -E +++ mktemp ++ local LAST_OUT=/tmp/tmp.pYHRoq2SXM +++ mktemp ++ local LAST_ERR=/tmp/tmp.NxQyD8UAwp ++ local exit_status=0 ++ local timeout=4 +++ seq 0 2 ++ for i in $(seq 0 2) ++ set +e ++ kubectl get pods --selector=name=psmdb-client -o 'jsonpath={.items[].metadata.name}' ++ exit_status=0 ++ set -e ++ '[' 0 '!=' 0 -a -n 1 ']' ++ break ++ cat /tmp/tmp.pYHRoq2SXM ++ cat /tmp/tmp.NxQyD8UAwp ++ rm /tmp/tmp.pYHRoq2SXM /tmp/tmp.NxQyD8UAwp ++ return 0 + local client_container=psmdb-client-696897d69b-t229s + local mongo_flag= + [[ myApp:myPass@some-name-rs0-nv-0.some-name-rs0.non-voting-and-hidden-7583 == *cfg* ]] + replica_set=rs0 + kubectl_bin exec psmdb-client-696897d69b-t229s -- bash -c 'printf '\''use myApp\n db.test.find()\n'\'' | mongo mongodb://myApp:myPass@some-name-rs0-nv-0.some-name-rs0.non-voting-and-hidden-7583.svc.cluster.local/admin?ssl=false\&replicaSet=rs0 ' ++ mktemp + local LAST_OUT=/tmp/tmp.SdrsA6LyZe ++ mktemp + local LAST_ERR=/tmp/tmp.gBiQRDxTfo + local exit_status=0 + local timeout=4 ++ seq 0 2 + for i in $(seq 0 2) + set +e + kubectl exec psmdb-client-696897d69b-t229s -- bash -c 'printf '\''use myApp\n db.test.find()\n'\'' | mongo mongodb://myApp:myPass@some-name-rs0-nv-0.some-name-rs0.non-voting-and-hidden-7583.svc.cluster.local/admin?ssl=false\&replicaSet=rs0 ' + exit_status=0 + set -e + '[' 0 '!=' 0 -a -n 1 ']' + break + cat /tmp/tmp.SdrsA6LyZe + cat /tmp/tmp.gBiQRDxTfo + rm /tmp/tmp.SdrsA6LyZe /tmp/tmp.gBiQRDxTfo + return 0 + [[ 0 -eq 0 ]] + diff -u /mnt/jenkins/workspace/cloud-psmdb-operator_PR-2125/e2e-tests/non-voting-and-hidden/compare/find.json /tmp/tmp.6L5peFLawB/find + compare_mongo_cmd find myApp:myPass@some-name-rs0-hidden-0.some-name-rs0.non-voting-and-hidden-7583 + local command=find + local uri=myApp:myPass@some-name-rs0-hidden-0.some-name-rs0.non-voting-and-hidden-7583 + local postfix= + local suffix= + local database=myApp + local collection=test + local sort= + local tls=false + local 'full_command=db.test.find()' + [[ -n '' ]] + log 'running db.test.find() in myApp' + set +o xtrace [2025-12-10T13:36:41+0000] running db.test.find() in myApp + [[ false == \t\r\u\e ]] + mongo_command=run_mongo + run_mongo 'use myApp\n db.test.find()' myApp:myPass@some-name-rs0-hidden-0.some-name-rs0.non-voting-and-hidden-7583 mongodb '' + local 'command=use myApp\n db.test.find()' + local uri=myApp:myPass@some-name-rs0-hidden-0.some-name-rs0.non-voting-and-hidden-7583 + egrep -v 'I NETWORK|W NETWORK|F NETWORK|Error saving history file|Percona Server for MongoDB|connecting to:|Unable to reach primary for set|Implicit session:|versions do not match|Error saving history file:|Started a new thread for the timer service' + local driver=mongodb + local suffix=.svc.cluster.local + /usr/sbin/sed -re 's/ObjectId\("[0-9a-f]+"\)//; s/-[0-9]+.svc/-xxx.svc/' ++ kubectl_bin get pods --selector=name=psmdb-client -o 'jsonpath={.items[].metadata.name}' +++ mktemp ++ local LAST_OUT=/tmp/tmp.yQLzFn3Z0k egrep: warning: egrep is obsolescent; using grep -E +++ mktemp ++ local LAST_ERR=/tmp/tmp.QqYBC5fdcZ ++ local exit_status=0 ++ local timeout=4 +++ seq 0 2 ++ for i in $(seq 0 2) ++ set +e ++ kubectl get pods --selector=name=psmdb-client -o 'jsonpath={.items[].metadata.name}' ++ exit_status=0 ++ set -e ++ '[' 0 '!=' 0 -a -n 1 ']' ++ break ++ cat /tmp/tmp.yQLzFn3Z0k ++ cat /tmp/tmp.QqYBC5fdcZ ++ rm /tmp/tmp.yQLzFn3Z0k /tmp/tmp.QqYBC5fdcZ ++ return 0 + local client_container=psmdb-client-696897d69b-t229s + local mongo_flag= + [[ myApp:myPass@some-name-rs0-hidden-0.some-name-rs0.non-voting-and-hidden-7583 == *cfg* ]] + replica_set=rs0 + kubectl_bin exec psmdb-client-696897d69b-t229s -- bash -c 'printf '\''use myApp\n db.test.find()\n'\'' | mongo mongodb://myApp:myPass@some-name-rs0-hidden-0.some-name-rs0.non-voting-and-hidden-7583.svc.cluster.local/admin?ssl=false\&replicaSet=rs0 ' ++ mktemp + local LAST_OUT=/tmp/tmp.cEDuPUrZpZ ++ mktemp + local LAST_ERR=/tmp/tmp.ECvhdvOPYi + local exit_status=0 + local timeout=4 ++ seq 0 2 + for i in $(seq 0 2) + set +e + kubectl exec psmdb-client-696897d69b-t229s -- bash -c 'printf '\''use myApp\n db.test.find()\n'\'' | mongo mongodb://myApp:myPass@some-name-rs0-hidden-0.some-name-rs0.non-voting-and-hidden-7583.svc.cluster.local/admin?ssl=false\&replicaSet=rs0 ' + exit_status=0 + set -e + '[' 0 '!=' 0 -a -n 1 ']' + break + cat /tmp/tmp.cEDuPUrZpZ + cat /tmp/tmp.ECvhdvOPYi + rm /tmp/tmp.cEDuPUrZpZ /tmp/tmp.ECvhdvOPYi + return 0 + [[ 0 -eq 0 ]] + diff -u /mnt/jenkins/workspace/cloud-psmdb-operator_PR-2125/e2e-tests/non-voting-and-hidden/compare/find.json /tmp/tmp.6L5peFLawB/find + compare_mongo_cmd find myApp:myPass@some-name-rs0-hidden-1.some-name-rs0.non-voting-and-hidden-7583 + local command=find + local uri=myApp:myPass@some-name-rs0-hidden-1.some-name-rs0.non-voting-and-hidden-7583 + local postfix= + local suffix= + local database=myApp + local collection=test + local sort= + local tls=false + local 'full_command=db.test.find()' + [[ -n '' ]] + log 'running db.test.find() in myApp' + set +o xtrace [2025-12-10T13:36:44+0000] running db.test.find() in myApp + [[ false == \t\r\u\e ]] + mongo_command=run_mongo + run_mongo 'use myApp\n db.test.find()' myApp:myPass@some-name-rs0-hidden-1.some-name-rs0.non-voting-and-hidden-7583 mongodb '' + local 'command=use myApp\n db.test.find()' + local uri=myApp:myPass@some-name-rs0-hidden-1.some-name-rs0.non-voting-and-hidden-7583 + local driver=mongodb + egrep -v 'I NETWORK|W NETWORK|F NETWORK|Error saving history file|Percona Server for MongoDB|connecting to:|Unable to reach primary for set|Implicit session:|versions do not match|Error saving history file:|Started a new thread for the timer service' + local suffix=.svc.cluster.local + /usr/sbin/sed -re 's/ObjectId\("[0-9a-f]+"\)//; s/-[0-9]+.svc/-xxx.svc/' ++ kubectl_bin get pods --selector=name=psmdb-client -o 'jsonpath={.items[].metadata.name}' +++ mktemp egrep: warning: egrep is obsolescent; using grep -E ++ local LAST_OUT=/tmp/tmp.16JSqtdypO +++ mktemp ++ local LAST_ERR=/tmp/tmp.5ikqY1p1FQ ++ local exit_status=0 ++ local timeout=4 +++ seq 0 2 ++ for i in $(seq 0 2) ++ set +e ++ kubectl get pods --selector=name=psmdb-client -o 'jsonpath={.items[].metadata.name}' ++ exit_status=0 ++ set -e ++ '[' 0 '!=' 0 -a -n 1 ']' ++ break ++ cat /tmp/tmp.16JSqtdypO ++ cat /tmp/tmp.5ikqY1p1FQ ++ rm /tmp/tmp.16JSqtdypO /tmp/tmp.5ikqY1p1FQ ++ return 0 + local client_container=psmdb-client-696897d69b-t229s + local mongo_flag= + [[ myApp:myPass@some-name-rs0-hidden-1.some-name-rs0.non-voting-and-hidden-7583 == *cfg* ]] + replica_set=rs0 + kubectl_bin exec psmdb-client-696897d69b-t229s -- bash -c 'printf '\''use myApp\n db.test.find()\n'\'' | mongo mongodb://myApp:myPass@some-name-rs0-hidden-1.some-name-rs0.non-voting-and-hidden-7583.svc.cluster.local/admin?ssl=false\&replicaSet=rs0 ' ++ mktemp + local LAST_OUT=/tmp/tmp.pXr4lHhGis ++ mktemp + local LAST_ERR=/tmp/tmp.zNFW4XPHH9 + local exit_status=0 + local timeout=4 ++ seq 0 2 + for i in $(seq 0 2) + set +e + kubectl exec psmdb-client-696897d69b-t229s -- bash -c 'printf '\''use myApp\n db.test.find()\n'\'' | mongo mongodb://myApp:myPass@some-name-rs0-hidden-1.some-name-rs0.non-voting-and-hidden-7583.svc.cluster.local/admin?ssl=false\&replicaSet=rs0 ' + exit_status=0 + set -e + '[' 0 '!=' 0 -a -n 1 ']' + break + cat /tmp/tmp.pXr4lHhGis + cat /tmp/tmp.zNFW4XPHH9 + rm /tmp/tmp.pXr4lHhGis /tmp/tmp.zNFW4XPHH9 + return 0 + [[ 0 -eq 0 ]] + diff -u /mnt/jenkins/workspace/cloud-psmdb-operator_PR-2125/e2e-tests/non-voting-and-hidden/compare/find.json /tmp/tmp.6L5peFLawB/find + desc 'test passed' + set +o xtrace ----------------------------------------------------------------------------------- test passed ----------------------------------------------------------------------------------- + destroy non-voting-and-hidden-7583 + local namespace=non-voting-and-hidden-7583 + local ignore_logs=true + [[ 0 == 1 ]] + desc 'destroy cluster/operator and all other resources' + set +o xtrace ----------------------------------------------------------------------------------- destroy cluster/operator and all other resources ----------------------------------------------------------------------------------- + '[' true == false ']' + delete_backups + desc 'Delete psmdb-backup' + set +o xtrace ----------------------------------------------------------------------------------- Delete psmdb-backup ----------------------------------------------------------------------------------- ++ kubectl_bin get psmdb-backup --no-headers ++ wc -l +++ mktemp ++ local LAST_OUT=/tmp/tmp.hJYYwEX7Gs +++ mktemp ++ local LAST_ERR=/tmp/tmp.sa4wwmhsqQ ++ local exit_status=0 ++ local timeout=4 +++ seq 0 2 ++ for i in $(seq 0 2) ++ set +e ++ kubectl get psmdb-backup --no-headers ++ exit_status=0 ++ set -e ++ '[' 0 '!=' 0 -a -n 1 ']' ++ break ++ cat /tmp/tmp.hJYYwEX7Gs ++ cat /tmp/tmp.sa4wwmhsqQ ++ rm /tmp/tmp.hJYYwEX7Gs /tmp/tmp.sa4wwmhsqQ ++ return 0 + '[' 2 '!=' 0 ']' + kubectl_bin get psmdb-backup ++ mktemp + local LAST_OUT=/tmp/tmp.ggTXBud9Dt ++ mktemp + local LAST_ERR=/tmp/tmp.mmABYZgvp3 + local exit_status=0 + local timeout=4 ++ seq 0 2 + for i in $(seq 0 2) + set +e + kubectl get psmdb-backup + exit_status=0 + set -e + '[' 0 '!=' 0 -a -n 1 ']' + break + cat /tmp/tmp.ggTXBud9Dt NAME CLUSTER STORAGE DESTINATION TYPE SIZE STATUS COMPLETED AGE backup-logical some-name minio s3://operator-testing/2025-12-10T13:28:18Z logical 54.65KB ready 8m22s 8m30s backup-physical some-name minio s3://operator-testing/2025-12-10T13:29:05Z physical 1.01MB ready 7m33s 7m42s + cat /tmp/tmp.mmABYZgvp3 + rm /tmp/tmp.ggTXBud9Dt /tmp/tmp.mmABYZgvp3 + return 0 + kubectl_bin delete psmdb-backup --all ++ mktemp + local LAST_OUT=/tmp/tmp.zkFmDEHsf2 ++ mktemp + local LAST_ERR=/tmp/tmp.pj8EKTza76 + local exit_status=0 + local timeout=4 ++ seq 0 2 + for i in $(seq 0 2) + set +e + kubectl delete psmdb-backup --all + exit_status=0 + set -e + '[' 0 '!=' 0 -a -n 1 ']' + break + cat /tmp/tmp.zkFmDEHsf2 perconaservermongodbbackup.psmdb.percona.com "backup-logical" deleted from non-voting-and-hidden-7583 namespace perconaservermongodbbackup.psmdb.percona.com "backup-physical" deleted from non-voting-and-hidden-7583 namespace + cat /tmp/tmp.pj8EKTza76 + rm /tmp/tmp.zkFmDEHsf2 /tmp/tmp.pj8EKTza76 + return 0 + delete_crd + desc 'get and delete old CRDs and RBAC' + set +o xtrace ----------------------------------------------------------------------------------- get and delete old CRDs and RBAC ----------------------------------------------------------------------------------- + kubectl_bin delete -f /mnt/jenkins/workspace/cloud-psmdb-operator_PR-2125/deploy/crd.yaml --ignore-not-found --wait=false ++ mktemp + local LAST_OUT=/tmp/tmp.EozS0vQBxr ++ mktemp + local LAST_ERR=/tmp/tmp.H9AELKBM9d + local exit_status=0 + local timeout=4 ++ seq 0 2 + for i in $(seq 0 2) + set +e + kubectl delete -f /mnt/jenkins/workspace/cloud-psmdb-operator_PR-2125/deploy/crd.yaml --ignore-not-found --wait=false + exit_status=0 + set -e + '[' 0 '!=' 0 -a -n 1 ']' + break + cat /tmp/tmp.EozS0vQBxr customresourcedefinition.apiextensions.k8s.io "perconaservermongodbbackups.psmdb.percona.com" deleted customresourcedefinition.apiextensions.k8s.io "perconaservermongodbrestores.psmdb.percona.com" deleted customresourcedefinition.apiextensions.k8s.io "perconaservermongodbs.psmdb.percona.com" deleted + cat /tmp/tmp.H9AELKBM9d + rm /tmp/tmp.EozS0vQBxr /tmp/tmp.H9AELKBM9d + return 0 ++ yq eval .metadata.name /mnt/jenkins/workspace/cloud-psmdb-operator_PR-2125/deploy/crd.yaml ++ grep -v '\-\-\-' grep: warning: stray \ before - grep: warning: stray \ before - + for crd_name in $(yq eval '.metadata.name' "${src_dir}/deploy/crd.yaml" | grep -v '\-\-\-') + kubectl get perconaservermongodbbackups.psmdb.percona.com --all-namespaces -o wide + grep -v NAMESPACE + xargs -L 1 sh -xc 'kubectl patch perconaservermongodbbackups.psmdb.percona.com -n $0 $1 --type=merge -p "{\"metadata\":{\"finalizers\":[]}}"' error: the server doesn't have a resource type "perconaservermongodbbackups" + kubectl patch perconaservermongodbbackups.psmdb.percona.com -n sh --type=merge -p '{"metadata":{"finalizers":[]}}' error: the server doesn't have a resource type "perconaservermongodbbackups" + : + kubectl_bin wait --for=delete crd perconaservermongodbbackups.psmdb.percona.com ++ mktemp + local LAST_OUT=/tmp/tmp.1e5yoLnlx7 ++ mktemp + local LAST_ERR=/tmp/tmp.M2nw8X2ZU6 + local exit_status=0 + local timeout=4 ++ seq 0 2 + for i in $(seq 0 2) + set +e + kubectl wait --for=delete crd perconaservermongodbbackups.psmdb.percona.com + exit_status=0 + set -e + '[' 0 '!=' 0 -a -n 1 ']' + break + cat /tmp/tmp.1e5yoLnlx7 + cat /tmp/tmp.M2nw8X2ZU6 + rm /tmp/tmp.1e5yoLnlx7 /tmp/tmp.M2nw8X2ZU6 + return 0 + for crd_name in $(yq eval '.metadata.name' "${src_dir}/deploy/crd.yaml" | grep -v '\-\-\-') + kubectl get perconaservermongodbrestores.psmdb.percona.com --all-namespaces -o wide + grep -v NAMESPACE + xargs -L 1 sh -xc 'kubectl patch perconaservermongodbrestores.psmdb.percona.com -n $0 $1 --type=merge -p "{\"metadata\":{\"finalizers\":[]}}"' error: the server doesn't have a resource type "perconaservermongodbrestores" + kubectl patch perconaservermongodbrestores.psmdb.percona.com -n sh --type=merge -p '{"metadata":{"finalizers":[]}}' error: the server doesn't have a resource type "perconaservermongodbrestores" + : + kubectl_bin wait --for=delete crd perconaservermongodbrestores.psmdb.percona.com ++ mktemp + local LAST_OUT=/tmp/tmp.2WeHiJdIWF ++ mktemp + local LAST_ERR=/tmp/tmp.4lSDSI0qjf + local exit_status=0 + local timeout=4 ++ seq 0 2 + for i in $(seq 0 2) + set +e + kubectl wait --for=delete crd perconaservermongodbrestores.psmdb.percona.com + exit_status=0 + set -e + '[' 0 '!=' 0 -a -n 1 ']' + break + cat /tmp/tmp.2WeHiJdIWF + cat /tmp/tmp.4lSDSI0qjf + rm /tmp/tmp.2WeHiJdIWF /tmp/tmp.4lSDSI0qjf + return 0 + for crd_name in $(yq eval '.metadata.name' "${src_dir}/deploy/crd.yaml" | grep -v '\-\-\-') + kubectl get perconaservermongodbs.psmdb.percona.com --all-namespaces -o wide + grep -v NAMESPACE + xargs -L 1 sh -xc 'kubectl patch perconaservermongodbs.psmdb.percona.com -n $0 $1 --type=merge -p "{\"metadata\":{\"finalizers\":[]}}"' error: the server doesn't have a resource type "perconaservermongodbs" + kubectl patch perconaservermongodbs.psmdb.percona.com -n sh --type=merge -p '{"metadata":{"finalizers":[]}}' error: the server doesn't have a resource type "perconaservermongodbs" + : + kubectl_bin wait --for=delete crd perconaservermongodbs.psmdb.percona.com ++ mktemp + local LAST_OUT=/tmp/tmp.DanIQmqqWh ++ mktemp + local LAST_ERR=/tmp/tmp.Jld03vdeCp + local exit_status=0 + local timeout=4 ++ seq 0 2 + for i in $(seq 0 2) + set +e + kubectl wait --for=delete crd perconaservermongodbs.psmdb.percona.com + exit_status=0 + set -e + '[' 0 '!=' 0 -a -n 1 ']' + break + cat /tmp/tmp.DanIQmqqWh + cat /tmp/tmp.Jld03vdeCp + rm /tmp/tmp.DanIQmqqWh /tmp/tmp.Jld03vdeCp + return 0 + local rbac_yaml=rbac.yaml + '[' -n psmdb-operator ']' + rbac_yaml=cw-rbac.yaml + kubectl_bin delete -f /mnt/jenkins/workspace/cloud-psmdb-operator_PR-2125/deploy/cw-rbac.yaml --ignore-not-found ++ mktemp + local LAST_OUT=/tmp/tmp.07gFkzkOmc ++ mktemp + local LAST_ERR=/tmp/tmp.OprFb2MAGg + local exit_status=0 + local timeout=4 ++ seq 0 2 + for i in $(seq 0 2) + set +e + kubectl delete -f /mnt/jenkins/workspace/cloud-psmdb-operator_PR-2125/deploy/cw-rbac.yaml --ignore-not-found + exit_status=0 + set -e + '[' 0 '!=' 0 -a -n 1 ']' + break + cat /tmp/tmp.07gFkzkOmc clusterrole.rbac.authorization.k8s.io "percona-server-mongodb-operator" deleted clusterrolebinding.rbac.authorization.k8s.io "service-account-percona-server-mongodb-operator" deleted + cat /tmp/tmp.OprFb2MAGg + rm /tmp/tmp.07gFkzkOmc /tmp/tmp.OprFb2MAGg + return 0 + destroy_cert_manager + kubectl_bin delete -f https://github.com/cert-manager/cert-manager/releases/download/v1.19.1/cert-manager.yaml ++ mktemp + local LAST_OUT=/tmp/tmp.b1PTcH3lEY ++ mktemp + local LAST_ERR=/tmp/tmp.EcPrPy1ZYW + local exit_status=0 + local timeout=4 ++ seq 0 2 + for i in $(seq 0 2) + set +e + kubectl delete -f https://github.com/cert-manager/cert-manager/releases/download/v1.19.1/cert-manager.yaml + exit_status=1 + set -e + '[' 1 '!=' 0 -a -n 1 ']' + cat /tmp/tmp.b1PTcH3lEY namespace "cert-manager" deleted customresourcedefinition.apiextensions.k8s.io "challenges.acme.cert-manager.io" deleted customresourcedefinition.apiextensions.k8s.io "orders.acme.cert-manager.io" deleted customresourcedefinition.apiextensions.k8s.io "certificaterequests.cert-manager.io" deleted customresourcedefinition.apiextensions.k8s.io "certificates.cert-manager.io" deleted customresourcedefinition.apiextensions.k8s.io "clusterissuers.cert-manager.io" deleted customresourcedefinition.apiextensions.k8s.io "issuers.cert-manager.io" deleted serviceaccount "cert-manager-cainjector" deleted from cert-manager namespace serviceaccount "cert-manager" deleted from cert-manager namespace serviceaccount "cert-manager-webhook" deleted from cert-manager namespace clusterrole.rbac.authorization.k8s.io "cert-manager-cainjector" deleted clusterrole.rbac.authorization.k8s.io "cert-manager-controller-issuers" deleted clusterrole.rbac.authorization.k8s.io "cert-manager-controller-clusterissuers" deleted clusterrole.rbac.authorization.k8s.io "cert-manager-controller-certificates" deleted clusterrole.rbac.authorization.k8s.io "cert-manager-controller-orders" deleted clusterrole.rbac.authorization.k8s.io "cert-manager-controller-challenges" deleted clusterrole.rbac.authorization.k8s.io "cert-manager-controller-ingress-shim" deleted clusterrole.rbac.authorization.k8s.io "cert-manager-cluster-view" deleted clusterrole.rbac.authorization.k8s.io "cert-manager-view" deleted clusterrole.rbac.authorization.k8s.io "cert-manager-edit" deleted clusterrole.rbac.authorization.k8s.io "cert-manager-controller-approve:cert-manager-io" deleted clusterrole.rbac.authorization.k8s.io "cert-manager-controller-certificatesigningrequests" deleted clusterrole.rbac.authorization.k8s.io "cert-manager-webhook:subjectaccessreviews" deleted clusterrolebinding.rbac.authorization.k8s.io "cert-manager-cainjector" deleted clusterrolebinding.rbac.authorization.k8s.io "cert-manager-controller-issuers" deleted clusterrolebinding.rbac.authorization.k8s.io "cert-manager-controller-clusterissuers" deleted clusterrolebinding.rbac.authorization.k8s.io "cert-manager-controller-certificates" deleted clusterrolebinding.rbac.authorization.k8s.io "cert-manager-controller-orders" deleted clusterrolebinding.rbac.authorization.k8s.io "cert-manager-controller-challenges" deleted clusterrolebinding.rbac.authorization.k8s.io "cert-manager-controller-ingress-shim" deleted clusterrolebinding.rbac.authorization.k8s.io "cert-manager-controller-approve:cert-manager-io" deleted clusterrolebinding.rbac.authorization.k8s.io "cert-manager-controller-certificatesigningrequests" deleted clusterrolebinding.rbac.authorization.k8s.io "cert-manager-webhook:subjectaccessreviews" deleted role.rbac.authorization.k8s.io "cert-manager-cainjector:leaderelection" deleted from kube-system namespace role.rbac.authorization.k8s.io "cert-manager:leaderelection" deleted from kube-system namespace role.rbac.authorization.k8s.io "cert-manager-tokenrequest" deleted from cert-manager namespace role.rbac.authorization.k8s.io "cert-manager-webhook:dynamic-serving" deleted from cert-manager namespace rolebinding.rbac.authorization.k8s.io "cert-manager-cainjector:leaderelection" deleted from kube-system namespace rolebinding.rbac.authorization.k8s.io "cert-manager:leaderelection" deleted from kube-system namespace deployment.apps "cert-manager-cainjector" deleted from cert-manager namespace deployment.apps "cert-manager" deleted from cert-manager namespace deployment.apps "cert-manager-webhook" deleted from cert-manager namespace mutatingwebhookconfiguration.admissionregistration.k8s.io "cert-manager-webhook" deleted validatingwebhookconfiguration.admissionregistration.k8s.io "cert-manager-webhook" deleted + cat /tmp/tmp.EcPrPy1ZYW Error from server (NotFound): error when deleting "https://github.com/cert-manager/cert-manager/releases/download/v1.19.1/cert-manager.yaml": rolebindings.rbac.authorization.k8s.io "cert-manager-tokenrequest" not found Error from server (NotFound): error when deleting "https://github.com/cert-manager/cert-manager/releases/download/v1.19.1/cert-manager.yaml": rolebindings.rbac.authorization.k8s.io "cert-manager-webhook:dynamic-serving" not found Error from server (NotFound): error when deleting "https://github.com/cert-manager/cert-manager/releases/download/v1.19.1/cert-manager.yaml": services "cert-manager-cainjector" not found Error from server (NotFound): error when deleting "https://github.com/cert-manager/cert-manager/releases/download/v1.19.1/cert-manager.yaml": services "cert-manager" not found Error from server (NotFound): error when deleting "https://github.com/cert-manager/cert-manager/releases/download/v1.19.1/cert-manager.yaml": services "cert-manager-webhook" not found + sleep 0 + for i in $(seq 0 2) + set +e + kubectl delete -f https://github.com/cert-manager/cert-manager/releases/download/v1.19.1/cert-manager.yaml + exit_status=1 + set -e + '[' 1 '!=' 0 -a -n 1 ']' + cat /tmp/tmp.b1PTcH3lEY namespace "cert-manager" deleted + cat /tmp/tmp.EcPrPy1ZYW Error from server (NotFound): error when deleting "https://github.com/cert-manager/cert-manager/releases/download/v1.19.1/cert-manager.yaml": customresourcedefinitions.apiextensions.k8s.io "challenges.acme.cert-manager.io" not found Error from server (NotFound): error when deleting "https://github.com/cert-manager/cert-manager/releases/download/v1.19.1/cert-manager.yaml": customresourcedefinitions.apiextensions.k8s.io "orders.acme.cert-manager.io" not found Error from server (NotFound): error when deleting "https://github.com/cert-manager/cert-manager/releases/download/v1.19.1/cert-manager.yaml": customresourcedefinitions.apiextensions.k8s.io "certificaterequests.cert-manager.io" not found Error from server (NotFound): error when deleting "https://github.com/cert-manager/cert-manager/releases/download/v1.19.1/cert-manager.yaml": customresourcedefinitions.apiextensions.k8s.io "certificates.cert-manager.io" not found Error from server (NotFound): error when deleting "https://github.com/cert-manager/cert-manager/releases/download/v1.19.1/cert-manager.yaml": customresourcedefinitions.apiextensions.k8s.io "clusterissuers.cert-manager.io" not found Error from server (NotFound): error when deleting "https://github.com/cert-manager/cert-manager/releases/download/v1.19.1/cert-manager.yaml": customresourcedefinitions.apiextensions.k8s.io "issuers.cert-manager.io" not found Error from server (NotFound): error when deleting "https://github.com/cert-manager/cert-manager/releases/download/v1.19.1/cert-manager.yaml": serviceaccounts "cert-manager-cainjector" not found Error from server (NotFound): error when deleting "https://github.com/cert-manager/cert-manager/releases/download/v1.19.1/cert-manager.yaml": serviceaccounts "cert-manager" not found Error from server (NotFound): error when deleting "https://github.com/cert-manager/cert-manager/releases/download/v1.19.1/cert-manager.yaml": serviceaccounts "cert-manager-webhook" not found Error from server (NotFound): error when deleting "https://github.com/cert-manager/cert-manager/releases/download/v1.19.1/cert-manager.yaml": clusterroles.rbac.authorization.k8s.io "cert-manager-cainjector" not found Error from server (NotFound): error when deleting "https://github.com/cert-manager/cert-manager/releases/download/v1.19.1/cert-manager.yaml": clusterroles.rbac.authorization.k8s.io "cert-manager-controller-issuers" not found Error from server (NotFound): error when deleting "https://github.com/cert-manager/cert-manager/releases/download/v1.19.1/cert-manager.yaml": clusterroles.rbac.authorization.k8s.io "cert-manager-controller-clusterissuers" not found Error from server (NotFound): error when deleting "https://github.com/cert-manager/cert-manager/releases/download/v1.19.1/cert-manager.yaml": clusterroles.rbac.authorization.k8s.io "cert-manager-controller-certificates" not found Error from server (NotFound): error when deleting "https://github.com/cert-manager/cert-manager/releases/download/v1.19.1/cert-manager.yaml": clusterroles.rbac.authorization.k8s.io "cert-manager-controller-orders" not found Error from server (NotFound): error when deleting "https://github.com/cert-manager/cert-manager/releases/download/v1.19.1/cert-manager.yaml": clusterroles.rbac.authorization.k8s.io "cert-manager-controller-challenges" not found Error from server (NotFound): error when deleting "https://github.com/cert-manager/cert-manager/releases/download/v1.19.1/cert-manager.yaml": clusterroles.rbac.authorization.k8s.io "cert-manager-controller-ingress-shim" not found Error from server (NotFound): error when deleting "https://github.com/cert-manager/cert-manager/releases/download/v1.19.1/cert-manager.yaml": clusterroles.rbac.authorization.k8s.io "cert-manager-cluster-view" not found Error from server (NotFound): error when deleting "https://github.com/cert-manager/cert-manager/releases/download/v1.19.1/cert-manager.yaml": clusterroles.rbac.authorization.k8s.io "cert-manager-view" not found Error from server (NotFound): error when deleting "https://github.com/cert-manager/cert-manager/releases/download/v1.19.1/cert-manager.yaml": clusterroles.rbac.authorization.k8s.io "cert-manager-edit" not found Error from server (NotFound): error when deleting "https://github.com/cert-manager/cert-manager/releases/download/v1.19.1/cert-manager.yaml": clusterroles.rbac.authorization.k8s.io "cert-manager-controller-approve:cert-manager-io" not found Error from server (NotFound): error when deleting "https://github.com/cert-manager/cert-manager/releases/download/v1.19.1/cert-manager.yaml": clusterroles.rbac.authorization.k8s.io "cert-manager-controller-certificatesigningrequests" not found Error from server (NotFound): error when deleting "https://github.com/cert-manager/cert-manager/releases/download/v1.19.1/cert-manager.yaml": clusterroles.rbac.authorization.k8s.io "cert-manager-webhook:subjectaccessreviews" not found Error from server (NotFound): error when deleting "https://github.com/cert-manager/cert-manager/releases/download/v1.19.1/cert-manager.yaml": clusterrolebindings.rbac.authorization.k8s.io "cert-manager-cainjector" not found Error from server (NotFound): error when deleting "https://github.com/cert-manager/cert-manager/releases/download/v1.19.1/cert-manager.yaml": clusterrolebindings.rbac.authorization.k8s.io "cert-manager-controller-issuers" not found Error from server (NotFound): error when deleting "https://github.com/cert-manager/cert-manager/releases/download/v1.19.1/cert-manager.yaml": clusterrolebindings.rbac.authorization.k8s.io "cert-manager-controller-clusterissuers" not found Error from server (NotFound): error when deleting "https://github.com/cert-manager/cert-manager/releases/download/v1.19.1/cert-manager.yaml": clusterrolebindings.rbac.authorization.k8s.io "cert-manager-controller-certificates" not found Error from server (NotFound): error when deleting "https://github.com/cert-manager/cert-manager/releases/download/v1.19.1/cert-manager.yaml": clusterrolebindings.rbac.authorization.k8s.io "cert-manager-controller-orders" not found Error from server (NotFound): error when deleting "https://github.com/cert-manager/cert-manager/releases/download/v1.19.1/cert-manager.yaml": clusterrolebindings.rbac.authorization.k8s.io "cert-manager-controller-challenges" not found Error from server (NotFound): error when deleting "https://github.com/cert-manager/cert-manager/releases/download/v1.19.1/cert-manager.yaml": clusterrolebindings.rbac.authorization.k8s.io "cert-manager-controller-ingress-shim" not found Error from server (NotFound): error when deleting "https://github.com/cert-manager/cert-manager/releases/download/v1.19.1/cert-manager.yaml": clusterrolebindings.rbac.authorization.k8s.io "cert-manager-controller-approve:cert-manager-io" not found Error from server (NotFound): error when deleting "https://github.com/cert-manager/cert-manager/releases/download/v1.19.1/cert-manager.yaml": clusterrolebindings.rbac.authorization.k8s.io "cert-manager-controller-certificatesigningrequests" not found Error from server (NotFound): error when deleting "https://github.com/cert-manager/cert-manager/releases/download/v1.19.1/cert-manager.yaml": clusterrolebindings.rbac.authorization.k8s.io "cert-manager-webhook:subjectaccessreviews" not found Error from server (NotFound): error when deleting "https://github.com/cert-manager/cert-manager/releases/download/v1.19.1/cert-manager.yaml": roles.rbac.authorization.k8s.io "cert-manager-cainjector:leaderelection" not found Error from server (NotFound): error when deleting "https://github.com/cert-manager/cert-manager/releases/download/v1.19.1/cert-manager.yaml": roles.rbac.authorization.k8s.io "cert-manager:leaderelection" not found Error from server (NotFound): error when deleting "https://github.com/cert-manager/cert-manager/releases/download/v1.19.1/cert-manager.yaml": roles.rbac.authorization.k8s.io "cert-manager-tokenrequest" not found Error from server (NotFound): error when deleting "https://github.com/cert-manager/cert-manager/releases/download/v1.19.1/cert-manager.yaml": roles.rbac.authorization.k8s.io "cert-manager-webhook:dynamic-serving" not found Error from server (NotFound): error when deleting "https://github.com/cert-manager/cert-manager/releases/download/v1.19.1/cert-manager.yaml": rolebindings.rbac.authorization.k8s.io "cert-manager-cainjector:leaderelection" not found Error from server (NotFound): error when deleting "https://github.com/cert-manager/cert-manager/releases/download/v1.19.1/cert-manager.yaml": rolebindings.rbac.authorization.k8s.io "cert-manager:leaderelection" not found Error from server (NotFound): error when deleting "https://github.com/cert-manager/cert-manager/releases/download/v1.19.1/cert-manager.yaml": rolebindings.rbac.authorization.k8s.io "cert-manager-tokenrequest" not found Error from server (NotFound): error when deleting "https://github.com/cert-manager/cert-manager/releases/download/v1.19.1/cert-manager.yaml": rolebindings.rbac.authorization.k8s.io "cert-manager-webhook:dynamic-serving" not found Error from server (NotFound): error when deleting "https://github.com/cert-manager/cert-manager/releases/download/v1.19.1/cert-manager.yaml": services "cert-manager-cainjector" not found Error from server (NotFound): error when deleting "https://github.com/cert-manager/cert-manager/releases/download/v1.19.1/cert-manager.yaml": services "cert-manager" not found Error from server (NotFound): error when deleting "https://github.com/cert-manager/cert-manager/releases/download/v1.19.1/cert-manager.yaml": services "cert-manager-webhook" not found Error from server (NotFound): error when deleting "https://github.com/cert-manager/cert-manager/releases/download/v1.19.1/cert-manager.yaml": deployments.apps "cert-manager-cainjector" not found Error from server (NotFound): error when deleting "https://github.com/cert-manager/cert-manager/releases/download/v1.19.1/cert-manager.yaml": deployments.apps "cert-manager" not found Error from server (NotFound): error when deleting "https://github.com/cert-manager/cert-manager/releases/download/v1.19.1/cert-manager.yaml": deployments.apps "cert-manager-webhook" not found Error from server (NotFound): error when deleting "https://github.com/cert-manager/cert-manager/releases/download/v1.19.1/cert-manager.yaml": mutatingwebhookconfigurations.admissionregistration.k8s.io "cert-manager-webhook" not found Error from server (NotFound): error when deleting "https://github.com/cert-manager/cert-manager/releases/download/v1.19.1/cert-manager.yaml": validatingwebhookconfigurations.admissionregistration.k8s.io "cert-manager-webhook" not found + sleep 4 + for i in $(seq 0 2) + set +e + kubectl delete -f https://github.com/cert-manager/cert-manager/releases/download/v1.19.1/cert-manager.yaml + exit_status=1 + set -e + '[' 1 '!=' 0 -a -n 1 ']' + cat /tmp/tmp.b1PTcH3lEY + cat /tmp/tmp.EcPrPy1ZYW Error from server (NotFound): error when deleting "https://github.com/cert-manager/cert-manager/releases/download/v1.19.1/cert-manager.yaml": namespaces "cert-manager" not found Error from server (NotFound): error when deleting "https://github.com/cert-manager/cert-manager/releases/download/v1.19.1/cert-manager.yaml": customresourcedefinitions.apiextensions.k8s.io "challenges.acme.cert-manager.io" not found Error from server (NotFound): error when deleting "https://github.com/cert-manager/cert-manager/releases/download/v1.19.1/cert-manager.yaml": customresourcedefinitions.apiextensions.k8s.io "orders.acme.cert-manager.io" not found Error from server (NotFound): error when deleting "https://github.com/cert-manager/cert-manager/releases/download/v1.19.1/cert-manager.yaml": customresourcedefinitions.apiextensions.k8s.io "certificaterequests.cert-manager.io" not found Error from server (NotFound): error when deleting "https://github.com/cert-manager/cert-manager/releases/download/v1.19.1/cert-manager.yaml": customresourcedefinitions.apiextensions.k8s.io "certificates.cert-manager.io" not found Error from server (NotFound): error when deleting "https://github.com/cert-manager/cert-manager/releases/download/v1.19.1/cert-manager.yaml": customresourcedefinitions.apiextensions.k8s.io "clusterissuers.cert-manager.io" not found Error from server (NotFound): error when deleting "https://github.com/cert-manager/cert-manager/releases/download/v1.19.1/cert-manager.yaml": customresourcedefinitions.apiextensions.k8s.io "issuers.cert-manager.io" not found Error from server (NotFound): error when deleting "https://github.com/cert-manager/cert-manager/releases/download/v1.19.1/cert-manager.yaml": serviceaccounts "cert-manager-cainjector" not found Error from server (NotFound): error when deleting "https://github.com/cert-manager/cert-manager/releases/download/v1.19.1/cert-manager.yaml": serviceaccounts "cert-manager" not found Error from server (NotFound): error when deleting "https://github.com/cert-manager/cert-manager/releases/download/v1.19.1/cert-manager.yaml": serviceaccounts "cert-manager-webhook" not found Error from server (NotFound): error when deleting "https://github.com/cert-manager/cert-manager/releases/download/v1.19.1/cert-manager.yaml": clusterroles.rbac.authorization.k8s.io "cert-manager-cainjector" not found Error from server (NotFound): error when deleting "https://github.com/cert-manager/cert-manager/releases/download/v1.19.1/cert-manager.yaml": clusterroles.rbac.authorization.k8s.io "cert-manager-controller-issuers" not found Error from server (NotFound): error when deleting "https://github.com/cert-manager/cert-manager/releases/download/v1.19.1/cert-manager.yaml": clusterroles.rbac.authorization.k8s.io "cert-manager-controller-clusterissuers" not found Error from server (NotFound): error when deleting "https://github.com/cert-manager/cert-manager/releases/download/v1.19.1/cert-manager.yaml": clusterroles.rbac.authorization.k8s.io "cert-manager-controller-certificates" not found Error from server (NotFound): error when deleting "https://github.com/cert-manager/cert-manager/releases/download/v1.19.1/cert-manager.yaml": clusterroles.rbac.authorization.k8s.io "cert-manager-controller-orders" not found Error from server (NotFound): error when deleting "https://github.com/cert-manager/cert-manager/releases/download/v1.19.1/cert-manager.yaml": clusterroles.rbac.authorization.k8s.io "cert-manager-controller-challenges" not found Error from server (NotFound): error when deleting "https://github.com/cert-manager/cert-manager/releases/download/v1.19.1/cert-manager.yaml": clusterroles.rbac.authorization.k8s.io "cert-manager-controller-ingress-shim" not found Error from server (NotFound): error when deleting "https://github.com/cert-manager/cert-manager/releases/download/v1.19.1/cert-manager.yaml": clusterroles.rbac.authorization.k8s.io "cert-manager-cluster-view" not found Error from server (NotFound): error when deleting "https://github.com/cert-manager/cert-manager/releases/download/v1.19.1/cert-manager.yaml": clusterroles.rbac.authorization.k8s.io "cert-manager-view" not found Error from server (NotFound): error when deleting "https://github.com/cert-manager/cert-manager/releases/download/v1.19.1/cert-manager.yaml": clusterroles.rbac.authorization.k8s.io "cert-manager-edit" not found Error from server (NotFound): error when deleting "https://github.com/cert-manager/cert-manager/releases/download/v1.19.1/cert-manager.yaml": clusterroles.rbac.authorization.k8s.io "cert-manager-controller-approve:cert-manager-io" not found Error from server (NotFound): error when deleting "https://github.com/cert-manager/cert-manager/releases/download/v1.19.1/cert-manager.yaml": clusterroles.rbac.authorization.k8s.io "cert-manager-controller-certificatesigningrequests" not found Error from server (NotFound): error when deleting "https://github.com/cert-manager/cert-manager/releases/download/v1.19.1/cert-manager.yaml": clusterroles.rbac.authorization.k8s.io "cert-manager-webhook:subjectaccessreviews" not found Error from server (NotFound): error when deleting "https://github.com/cert-manager/cert-manager/releases/download/v1.19.1/cert-manager.yaml": clusterrolebindings.rbac.authorization.k8s.io "cert-manager-cainjector" not found Error from server (NotFound): error when deleting "https://github.com/cert-manager/cert-manager/releases/download/v1.19.1/cert-manager.yaml": clusterrolebindings.rbac.authorization.k8s.io "cert-manager-controller-issuers" not found Error from server (NotFound): error when deleting "https://github.com/cert-manager/cert-manager/releases/download/v1.19.1/cert-manager.yaml": clusterrolebindings.rbac.authorization.k8s.io "cert-manager-controller-clusterissuers" not found Error from server (NotFound): error when deleting "https://github.com/cert-manager/cert-manager/releases/download/v1.19.1/cert-manager.yaml": clusterrolebindings.rbac.authorization.k8s.io "cert-manager-controller-certificates" not found Error from server (NotFound): error when deleting "https://github.com/cert-manager/cert-manager/releases/download/v1.19.1/cert-manager.yaml": clusterrolebindings.rbac.authorization.k8s.io "cert-manager-controller-orders" not found Error from server (NotFound): error when deleting "https://github.com/cert-manager/cert-manager/releases/download/v1.19.1/cert-manager.yaml": clusterrolebindings.rbac.authorization.k8s.io "cert-manager-controller-challenges" not found Error from server (NotFound): error when deleting "https://github.com/cert-manager/cert-manager/releases/download/v1.19.1/cert-manager.yaml": clusterrolebindings.rbac.authorization.k8s.io "cert-manager-controller-ingress-shim" not found Error from server (NotFound): error when deleting "https://github.com/cert-manager/cert-manager/releases/download/v1.19.1/cert-manager.yaml": clusterrolebindings.rbac.authorization.k8s.io "cert-manager-controller-approve:cert-manager-io" not found Error from server (NotFound): error when deleting "https://github.com/cert-manager/cert-manager/releases/download/v1.19.1/cert-manager.yaml": clusterrolebindings.rbac.authorization.k8s.io "cert-manager-controller-certificatesigningrequests" not found Error from server (NotFound): error when deleting "https://github.com/cert-manager/cert-manager/releases/download/v1.19.1/cert-manager.yaml": clusterrolebindings.rbac.authorization.k8s.io "cert-manager-webhook:subjectaccessreviews" not found Error from server (NotFound): error when deleting "https://github.com/cert-manager/cert-manager/releases/download/v1.19.1/cert-manager.yaml": roles.rbac.authorization.k8s.io "cert-manager-cainjector:leaderelection" not found Error from server (NotFound): error when deleting "https://github.com/cert-manager/cert-manager/releases/download/v1.19.1/cert-manager.yaml": roles.rbac.authorization.k8s.io "cert-manager:leaderelection" not found Error from server (NotFound): error when deleting "https://github.com/cert-manager/cert-manager/releases/download/v1.19.1/cert-manager.yaml": roles.rbac.authorization.k8s.io "cert-manager-tokenrequest" not found Error from server (NotFound): error when deleting "https://github.com/cert-manager/cert-manager/releases/download/v1.19.1/cert-manager.yaml": roles.rbac.authorization.k8s.io "cert-manager-webhook:dynamic-serving" not found Error from server (NotFound): error when deleting "https://github.com/cert-manager/cert-manager/releases/download/v1.19.1/cert-manager.yaml": rolebindings.rbac.authorization.k8s.io "cert-manager-cainjector:leaderelection" not found Error from server (NotFound): error when deleting "https://github.com/cert-manager/cert-manager/releases/download/v1.19.1/cert-manager.yaml": rolebindings.rbac.authorization.k8s.io "cert-manager:leaderelection" not found Error from server (NotFound): error when deleting "https://github.com/cert-manager/cert-manager/releases/download/v1.19.1/cert-manager.yaml": rolebindings.rbac.authorization.k8s.io "cert-manager-tokenrequest" not found Error from server (NotFound): error when deleting "https://github.com/cert-manager/cert-manager/releases/download/v1.19.1/cert-manager.yaml": rolebindings.rbac.authorization.k8s.io "cert-manager-webhook:dynamic-serving" not found Error from server (NotFound): error when deleting "https://github.com/cert-manager/cert-manager/releases/download/v1.19.1/cert-manager.yaml": services "cert-manager-cainjector" not found Error from server (NotFound): error when deleting "https://github.com/cert-manager/cert-manager/releases/download/v1.19.1/cert-manager.yaml": services "cert-manager" not found Error from server (NotFound): error when deleting "https://github.com/cert-manager/cert-manager/releases/download/v1.19.1/cert-manager.yaml": services "cert-manager-webhook" not found Error from server (NotFound): error when deleting "https://github.com/cert-manager/cert-manager/releases/download/v1.19.1/cert-manager.yaml": deployments.apps "cert-manager-cainjector" not found Error from server (NotFound): error when deleting "https://github.com/cert-manager/cert-manager/releases/download/v1.19.1/cert-manager.yaml": deployments.apps "cert-manager" not found Error from server (NotFound): error when deleting "https://github.com/cert-manager/cert-manager/releases/download/v1.19.1/cert-manager.yaml": deployments.apps "cert-manager-webhook" not found Error from server (NotFound): error when deleting "https://github.com/cert-manager/cert-manager/releases/download/v1.19.1/cert-manager.yaml": mutatingwebhookconfigurations.admissionregistration.k8s.io "cert-manager-webhook" not found Error from server (NotFound): error when deleting "https://github.com/cert-manager/cert-manager/releases/download/v1.19.1/cert-manager.yaml": validatingwebhookconfigurations.admissionregistration.k8s.io "cert-manager-webhook" not found + sleep 8 + cat /tmp/tmp.b1PTcH3lEY + cat /tmp/tmp.EcPrPy1ZYW Error from server (NotFound): error when deleting "https://github.com/cert-manager/cert-manager/releases/download/v1.19.1/cert-manager.yaml": namespaces "cert-manager" not found Error from server (NotFound): error when deleting "https://github.com/cert-manager/cert-manager/releases/download/v1.19.1/cert-manager.yaml": customresourcedefinitions.apiextensions.k8s.io "challenges.acme.cert-manager.io" not found Error from server (NotFound): error when deleting "https://github.com/cert-manager/cert-manager/releases/download/v1.19.1/cert-manager.yaml": customresourcedefinitions.apiextensions.k8s.io "orders.acme.cert-manager.io" not found Error from server (NotFound): error when deleting "https://github.com/cert-manager/cert-manager/releases/download/v1.19.1/cert-manager.yaml": customresourcedefinitions.apiextensions.k8s.io "certificaterequests.cert-manager.io" not found Error from server (NotFound): error when deleting "https://github.com/cert-manager/cert-manager/releases/download/v1.19.1/cert-manager.yaml": customresourcedefinitions.apiextensions.k8s.io "certificates.cert-manager.io" not found Error from server (NotFound): error when deleting "https://github.com/cert-manager/cert-manager/releases/download/v1.19.1/cert-manager.yaml": customresourcedefinitions.apiextensions.k8s.io "clusterissuers.cert-manager.io" not found Error from server (NotFound): error when deleting "https://github.com/cert-manager/cert-manager/releases/download/v1.19.1/cert-manager.yaml": customresourcedefinitions.apiextensions.k8s.io "issuers.cert-manager.io" not found Error from server (NotFound): error when deleting "https://github.com/cert-manager/cert-manager/releases/download/v1.19.1/cert-manager.yaml": serviceaccounts "cert-manager-cainjector" not found Error from server (NotFound): error when deleting "https://github.com/cert-manager/cert-manager/releases/download/v1.19.1/cert-manager.yaml": serviceaccounts "cert-manager" not found Error from server (NotFound): error when deleting "https://github.com/cert-manager/cert-manager/releases/download/v1.19.1/cert-manager.yaml": serviceaccounts "cert-manager-webhook" not found Error from server (NotFound): error when deleting "https://github.com/cert-manager/cert-manager/releases/download/v1.19.1/cert-manager.yaml": clusterroles.rbac.authorization.k8s.io "cert-manager-cainjector" not found Error from server (NotFound): error when deleting "https://github.com/cert-manager/cert-manager/releases/download/v1.19.1/cert-manager.yaml": clusterroles.rbac.authorization.k8s.io "cert-manager-controller-issuers" not found Error from server (NotFound): error when deleting "https://github.com/cert-manager/cert-manager/releases/download/v1.19.1/cert-manager.yaml": clusterroles.rbac.authorization.k8s.io "cert-manager-controller-clusterissuers" not found Error from server (NotFound): error when deleting "https://github.com/cert-manager/cert-manager/releases/download/v1.19.1/cert-manager.yaml": clusterroles.rbac.authorization.k8s.io "cert-manager-controller-certificates" not found Error from server (NotFound): error when deleting "https://github.com/cert-manager/cert-manager/releases/download/v1.19.1/cert-manager.yaml": clusterroles.rbac.authorization.k8s.io "cert-manager-controller-orders" not found Error from server (NotFound): error when deleting "https://github.com/cert-manager/cert-manager/releases/download/v1.19.1/cert-manager.yaml": clusterroles.rbac.authorization.k8s.io "cert-manager-controller-challenges" not found Error from server (NotFound): error when deleting "https://github.com/cert-manager/cert-manager/releases/download/v1.19.1/cert-manager.yaml": clusterroles.rbac.authorization.k8s.io "cert-manager-controller-ingress-shim" not found Error from server (NotFound): error when deleting "https://github.com/cert-manager/cert-manager/releases/download/v1.19.1/cert-manager.yaml": clusterroles.rbac.authorization.k8s.io "cert-manager-cluster-view" not found Error from server (NotFound): error when deleting "https://github.com/cert-manager/cert-manager/releases/download/v1.19.1/cert-manager.yaml": clusterroles.rbac.authorization.k8s.io "cert-manager-view" not found Error from server (NotFound): error when deleting "https://github.com/cert-manager/cert-manager/releases/download/v1.19.1/cert-manager.yaml": clusterroles.rbac.authorization.k8s.io "cert-manager-edit" not found Error from server (NotFound): error when deleting "https://github.com/cert-manager/cert-manager/releases/download/v1.19.1/cert-manager.yaml": clusterroles.rbac.authorization.k8s.io "cert-manager-controller-approve:cert-manager-io" not found Error from server (NotFound): error when deleting "https://github.com/cert-manager/cert-manager/releases/download/v1.19.1/cert-manager.yaml": clusterroles.rbac.authorization.k8s.io "cert-manager-controller-certificatesigningrequests" not found Error from server (NotFound): error when deleting "https://github.com/cert-manager/cert-manager/releases/download/v1.19.1/cert-manager.yaml": clusterroles.rbac.authorization.k8s.io "cert-manager-webhook:subjectaccessreviews" not found Error from server (NotFound): error when deleting "https://github.com/cert-manager/cert-manager/releases/download/v1.19.1/cert-manager.yaml": clusterrolebindings.rbac.authorization.k8s.io "cert-manager-cainjector" not found Error from server (NotFound): error when deleting "https://github.com/cert-manager/cert-manager/releases/download/v1.19.1/cert-manager.yaml": clusterrolebindings.rbac.authorization.k8s.io "cert-manager-controller-issuers" not found Error from server (NotFound): error when deleting "https://github.com/cert-manager/cert-manager/releases/download/v1.19.1/cert-manager.yaml": clusterrolebindings.rbac.authorization.k8s.io "cert-manager-controller-clusterissuers" not found Error from server (NotFound): error when deleting "https://github.com/cert-manager/cert-manager/releases/download/v1.19.1/cert-manager.yaml": clusterrolebindings.rbac.authorization.k8s.io "cert-manager-controller-certificates" not found Error from server (NotFound): error when deleting "https://github.com/cert-manager/cert-manager/releases/download/v1.19.1/cert-manager.yaml": clusterrolebindings.rbac.authorization.k8s.io "cert-manager-controller-orders" not found Error from server (NotFound): error when deleting "https://github.com/cert-manager/cert-manager/releases/download/v1.19.1/cert-manager.yaml": clusterrolebindings.rbac.authorization.k8s.io "cert-manager-controller-challenges" not found Error from server (NotFound): error when deleting "https://github.com/cert-manager/cert-manager/releases/download/v1.19.1/cert-manager.yaml": clusterrolebindings.rbac.authorization.k8s.io "cert-manager-controller-ingress-shim" not found Error from server (NotFound): error when deleting "https://github.com/cert-manager/cert-manager/releases/download/v1.19.1/cert-manager.yaml": clusterrolebindings.rbac.authorization.k8s.io "cert-manager-controller-approve:cert-manager-io" not found Error from server (NotFound): error when deleting "https://github.com/cert-manager/cert-manager/releases/download/v1.19.1/cert-manager.yaml": clusterrolebindings.rbac.authorization.k8s.io "cert-manager-controller-certificatesigningrequests" not found Error from server (NotFound): error when deleting "https://github.com/cert-manager/cert-manager/releases/download/v1.19.1/cert-manager.yaml": clusterrolebindings.rbac.authorization.k8s.io "cert-manager-webhook:subjectaccessreviews" not found Error from server (NotFound): error when deleting "https://github.com/cert-manager/cert-manager/releases/download/v1.19.1/cert-manager.yaml": roles.rbac.authorization.k8s.io "cert-manager-cainjector:leaderelection" not found Error from server (NotFound): error when deleting "https://github.com/cert-manager/cert-manager/releases/download/v1.19.1/cert-manager.yaml": roles.rbac.authorization.k8s.io "cert-manager:leaderelection" not found Error from server (NotFound): error when deleting "https://github.com/cert-manager/cert-manager/releases/download/v1.19.1/cert-manager.yaml": roles.rbac.authorization.k8s.io "cert-manager-tokenrequest" not found Error from server (NotFound): error when deleting "https://github.com/cert-manager/cert-manager/releases/download/v1.19.1/cert-manager.yaml": roles.rbac.authorization.k8s.io "cert-manager-webhook:dynamic-serving" not found Error from server (NotFound): error when deleting "https://github.com/cert-manager/cert-manager/releases/download/v1.19.1/cert-manager.yaml": rolebindings.rbac.authorization.k8s.io "cert-manager-cainjector:leaderelection" not found Error from server (NotFound): error when deleting "https://github.com/cert-manager/cert-manager/releases/download/v1.19.1/cert-manager.yaml": rolebindings.rbac.authorization.k8s.io "cert-manager:leaderelection" not found Error from server (NotFound): error when deleting "https://github.com/cert-manager/cert-manager/releases/download/v1.19.1/cert-manager.yaml": rolebindings.rbac.authorization.k8s.io "cert-manager-tokenrequest" not found Error from server (NotFound): error when deleting "https://github.com/cert-manager/cert-manager/releases/download/v1.19.1/cert-manager.yaml": rolebindings.rbac.authorization.k8s.io "cert-manager-webhook:dynamic-serving" not found Error from server (NotFound): error when deleting "https://github.com/cert-manager/cert-manager/releases/download/v1.19.1/cert-manager.yaml": services "cert-manager-cainjector" not found Error from server (NotFound): error when deleting "https://github.com/cert-manager/cert-manager/releases/download/v1.19.1/cert-manager.yaml": services "cert-manager" not found Error from server (NotFound): error when deleting "https://github.com/cert-manager/cert-manager/releases/download/v1.19.1/cert-manager.yaml": services "cert-manager-webhook" not found Error from server (NotFound): error when deleting "https://github.com/cert-manager/cert-manager/releases/download/v1.19.1/cert-manager.yaml": deployments.apps "cert-manager-cainjector" not found Error from server (NotFound): error when deleting "https://github.com/cert-manager/cert-manager/releases/download/v1.19.1/cert-manager.yaml": deployments.apps "cert-manager" not found Error from server (NotFound): error when deleting "https://github.com/cert-manager/cert-manager/releases/download/v1.19.1/cert-manager.yaml": deployments.apps "cert-manager-webhook" not found Error from server (NotFound): error when deleting "https://github.com/cert-manager/cert-manager/releases/download/v1.19.1/cert-manager.yaml": mutatingwebhookconfigurations.admissionregistration.k8s.io "cert-manager-webhook" not found Error from server (NotFound): error when deleting "https://github.com/cert-manager/cert-manager/releases/download/v1.19.1/cert-manager.yaml": validatingwebhookconfigurations.admissionregistration.k8s.io "cert-manager-webhook" not found + rm /tmp/tmp.b1PTcH3lEY /tmp/tmp.EcPrPy1ZYW + return 1 + true + '[' -n '' ']' + '[' -n psmdb-operator ']' + kubectl_bin delete --grace-period=0 --force=true namespace non-voting-and-hidden-7583 + rm -rf /tmp/tmp.6L5peFLawB ++ mktemp + kubectl_bin delete --grace-period=0 --force=true namespace psmdb-operator ++ mktemp + local LAST_OUT=/tmp/tmp.4qiOyEfiLv ++ mktemp + local LAST_OUT=/tmp/tmp.rVDypxrH0q ++ mktemp + local LAST_ERR=/tmp/tmp.hT6bBRjVvT + local exit_status=0 + local timeout=4 ++ seq 0 2 + local LAST_ERR=/tmp/tmp.MCG4nZbH9m + local exit_status=0 + local timeout=4 ++ seq 0 2 + for i in $(seq 0 2) + set +e + kubectl delete --grace-period=0 --force=true namespace non-voting-and-hidden-7583 + for i in $(seq 0 2) + set +e + kubectl delete --grace-period=0 --force=true namespace psmdb-operator