Log: /mnt/jenkins/workspace/cloud-psmdb-operator_PR-1915/e2e-tests/logs/data-sharded.log WARNING: version difference between client (1.33) and server (1.30) exceeds the supported minor version skew of +/-1 WARNING: version difference between client (1.33) and server (1.30) exceeds the supported minor version skew of +/-1 WARNING: version difference between client (1.33) and server (1.30) exceeds the supported minor version skew of +/-1 + main + [[ perconalab/percona-server-mongodb-operator:main-mongod7.0 == *\p\e\r\c\o\n\a\-\s\e\r\v\e\r\-\m\o\n\g\o\d\b\-\o\p\e\r\a\t\o\r* ]] ++ echo -n perconalab/percona-server-mongodb-operator:main-mongod7.0 ++ /usr/bin/sed -r 's/.*([0-9].[0-9])$/\1/' + MONGO_VER=7.0 + create_infra data-sharded-22380 + local ns=data-sharded-22380 + [[ 1 == 1 ]] + delete_crd + desc 'get and delete old CRDs and RBAC' + set +o xtrace ----------------------------------------------------------------------------------- get and delete old CRDs and RBAC ----------------------------------------------------------------------------------- + kubectl_bin delete -f /mnt/jenkins/workspace/cloud-psmdb-operator_PR-1915/deploy/crd.yaml --ignore-not-found --wait=false ++ mktemp + local LAST_OUT=/tmp/tmp.JPq8CSyPIR ++ mktemp + local LAST_ERR=/tmp/tmp.4rVP9b55GE + local exit_status=0 + local timeout=4 ++ seq 0 2 + for i in '$(seq 0 2)' + set +e + kubectl delete -f /mnt/jenkins/workspace/cloud-psmdb-operator_PR-1915/deploy/crd.yaml --ignore-not-found --wait=false + exit_status=0 + set -e + '[' 0 '!=' 0 -a -n 1 ']' + break + cat /tmp/tmp.JPq8CSyPIR customresourcedefinition.apiextensions.k8s.io "perconaservermongodbbackups.psmdb.percona.com" deleted customresourcedefinition.apiextensions.k8s.io "perconaservermongodbrestores.psmdb.percona.com" deleted customresourcedefinition.apiextensions.k8s.io "perconaservermongodbs.psmdb.percona.com" deleted + cat /tmp/tmp.4rVP9b55GE + rm /tmp/tmp.JPq8CSyPIR /tmp/tmp.4rVP9b55GE + return 0 ++ yq eval .metadata.name /mnt/jenkins/workspace/cloud-psmdb-operator_PR-1915/deploy/crd.yaml ++ grep -v '\-\-\-' + for crd_name in '$(yq eval '\''.metadata.name'\'' "${src_dir}/deploy/crd.yaml" | grep -v '\''\-\-\-'\'')' + kubectl get perconaservermongodbbackups.psmdb.percona.com --all-namespaces -o wide + grep -v NAMESPACE + xargs -L 1 sh -xc 'kubectl patch perconaservermongodbbackups.psmdb.percona.com -n $0 $1 --type=merge -p "{\"metadata\":{\"finalizers\":[]}}"' error: the server doesn't have a resource type "perconaservermongodbbackups" + kubectl patch perconaservermongodbbackups.psmdb.percona.com -n sh --type=merge -p '{"metadata":{"finalizers":[]}}' error: the server doesn't have a resource type "perconaservermongodbbackups" + : + kubectl_bin wait --for=delete crd perconaservermongodbbackups.psmdb.percona.com ++ mktemp + local LAST_OUT=/tmp/tmp.90Iz1q8xPs ++ mktemp + local LAST_ERR=/tmp/tmp.B7c5pW3gqg + local exit_status=0 + local timeout=4 ++ seq 0 2 + for i in '$(seq 0 2)' + set +e + kubectl wait --for=delete crd perconaservermongodbbackups.psmdb.percona.com + exit_status=0 + set -e + '[' 0 '!=' 0 -a -n 1 ']' + break + cat /tmp/tmp.90Iz1q8xPs + cat /tmp/tmp.B7c5pW3gqg + rm /tmp/tmp.90Iz1q8xPs /tmp/tmp.B7c5pW3gqg + return 0 + for crd_name in '$(yq eval '\''.metadata.name'\'' "${src_dir}/deploy/crd.yaml" | grep -v '\''\-\-\-'\'')' + kubectl get perconaservermongodbrestores.psmdb.percona.com --all-namespaces -o wide + grep -v NAMESPACE + xargs -L 1 sh -xc 'kubectl patch perconaservermongodbrestores.psmdb.percona.com -n $0 $1 --type=merge -p "{\"metadata\":{\"finalizers\":[]}}"' error: the server doesn't have a resource type "perconaservermongodbrestores" + kubectl patch perconaservermongodbrestores.psmdb.percona.com -n sh --type=merge -p '{"metadata":{"finalizers":[]}}' error: the server doesn't have a resource type "perconaservermongodbrestores" + : + kubectl_bin wait --for=delete crd perconaservermongodbrestores.psmdb.percona.com ++ mktemp + local LAST_OUT=/tmp/tmp.6BaNVuD5f6 ++ mktemp + local LAST_ERR=/tmp/tmp.ufqmuU4CIz + local exit_status=0 + local timeout=4 ++ seq 0 2 + for i in '$(seq 0 2)' + set +e + kubectl wait --for=delete crd perconaservermongodbrestores.psmdb.percona.com + exit_status=0 + set -e + '[' 0 '!=' 0 -a -n 1 ']' + break + cat /tmp/tmp.6BaNVuD5f6 + cat /tmp/tmp.ufqmuU4CIz + rm /tmp/tmp.6BaNVuD5f6 /tmp/tmp.ufqmuU4CIz + return 0 + for crd_name in '$(yq eval '\''.metadata.name'\'' "${src_dir}/deploy/crd.yaml" | grep -v '\''\-\-\-'\'')' + kubectl get perconaservermongodbs.psmdb.percona.com --all-namespaces -o wide + grep -v NAMESPACE + xargs -L 1 sh -xc 'kubectl patch perconaservermongodbs.psmdb.percona.com -n $0 $1 --type=merge -p "{\"metadata\":{\"finalizers\":[]}}"' error: the server doesn't have a resource type "perconaservermongodbs" + kubectl patch perconaservermongodbs.psmdb.percona.com -n sh --type=merge -p '{"metadata":{"finalizers":[]}}' error: the server doesn't have a resource type "perconaservermongodbs" + : + kubectl_bin wait --for=delete crd perconaservermongodbs.psmdb.percona.com ++ mktemp + local LAST_OUT=/tmp/tmp.0TFF8Sttcb ++ mktemp + local LAST_ERR=/tmp/tmp.mD6vwqoYXx + local exit_status=0 + local timeout=4 ++ seq 0 2 + for i in '$(seq 0 2)' + set +e + kubectl wait --for=delete crd perconaservermongodbs.psmdb.percona.com + exit_status=0 + set -e + '[' 0 '!=' 0 -a -n 1 ']' + break + cat /tmp/tmp.0TFF8Sttcb + cat /tmp/tmp.mD6vwqoYXx + rm /tmp/tmp.0TFF8Sttcb /tmp/tmp.mD6vwqoYXx + return 0 + local rbac_yaml=rbac.yaml + '[' -n psmdb-operator ']' + rbac_yaml=cw-rbac.yaml + kubectl_bin delete -f /mnt/jenkins/workspace/cloud-psmdb-operator_PR-1915/deploy/cw-rbac.yaml --ignore-not-found ++ mktemp + local LAST_OUT=/tmp/tmp.PunBpZhiAK ++ mktemp + local LAST_ERR=/tmp/tmp.qZfDnhi8Wa + local exit_status=0 + local timeout=4 ++ seq 0 2 + for i in '$(seq 0 2)' + set +e + kubectl delete -f /mnt/jenkins/workspace/cloud-psmdb-operator_PR-1915/deploy/cw-rbac.yaml --ignore-not-found + exit_status=0 + set -e + '[' 0 '!=' 0 -a -n 1 ']' + break + cat /tmp/tmp.PunBpZhiAK clusterrole.rbac.authorization.k8s.io "percona-server-mongodb-operator" deleted clusterrolebinding.rbac.authorization.k8s.io "service-account-percona-server-mongodb-operator" deleted + cat /tmp/tmp.qZfDnhi8Wa + rm /tmp/tmp.PunBpZhiAK /tmp/tmp.qZfDnhi8Wa + return 0 + check_crd_for_deletion PR-1915-2d829121 + local git_tag=PR-1915-2d829121 ++ curl -s https://raw.githubusercontent.com/percona/percona-server-mongodb-operator/PR-1915-2d829121/deploy/crd.yaml ++ /usr/bin/sed ':a;N;$!ba;s/\n/ /g' ++ yq eval .metadata.name ++ /usr/bin/sed s/---//g + for crd_name in '$(curl -s https://raw.githubusercontent.com/percona/percona-server-mongodb-operator/${git_tag}/deploy/crd.yaml | yq eval '\''.metadata.name'\'' | $sed '\''s/---//g'\'' | $sed '\'':a;N;$!ba;s/\n/ /g'\'')' ++ kubectl_bin get crd/null -o 'jsonpath={.status.conditions[-1].type}' +++ mktemp ++ local LAST_OUT=/tmp/tmp.Si8nVWQ3Ui +++ mktemp ++ local LAST_ERR=/tmp/tmp.WmXz9qk0al ++ local exit_status=0 ++ local timeout=4 +++ seq 0 2 ++ for i in '$(seq 0 2)' ++ set +e ++ kubectl get crd/null -o 'jsonpath={.status.conditions[-1].type}' ++ exit_status=1 ++ set -e ++ '[' 1 '!=' 0 -a -n 1 ']' ++ cat /tmp/tmp.Si8nVWQ3Ui ++ cat /tmp/tmp.WmXz9qk0al Error from server (NotFound): customresourcedefinitions.apiextensions.k8s.io "null" not found ++ sleep 0 ++ for i in '$(seq 0 2)' ++ set +e ++ kubectl get crd/null -o 'jsonpath={.status.conditions[-1].type}' ++ exit_status=1 ++ set -e ++ '[' 1 '!=' 0 -a -n 1 ']' ++ cat /tmp/tmp.Si8nVWQ3Ui ++ cat /tmp/tmp.WmXz9qk0al Error from server (NotFound): customresourcedefinitions.apiextensions.k8s.io "null" not found ++ sleep 4 ++ for i in '$(seq 0 2)' ++ set +e ++ kubectl get crd/null -o 'jsonpath={.status.conditions[-1].type}' ++ exit_status=1 ++ set -e ++ '[' 1 '!=' 0 -a -n 1 ']' ++ cat /tmp/tmp.Si8nVWQ3Ui ++ cat /tmp/tmp.WmXz9qk0al Error from server (NotFound): customresourcedefinitions.apiextensions.k8s.io "null" not found ++ sleep 8 ++ cat /tmp/tmp.Si8nVWQ3Ui ++ cat /tmp/tmp.WmXz9qk0al Error from server (NotFound): customresourcedefinitions.apiextensions.k8s.io "null" not found ++ rm /tmp/tmp.Si8nVWQ3Ui /tmp/tmp.WmXz9qk0al ++ return 1 + [[ '' == \T\e\r\m\i\n\a\t\i\n\g ]] + '[' -n psmdb-operator ']' + create_namespace psmdb-operator + local namespace=psmdb-operator + local skip_clean_namespace= + [[ 1 == 1 ]] + [[ -z '' ]] + destroy_chaos_mesh ++ helm list --all-namespaces --filter chaos-mesh ++ tail -n1 ++ awk '-F ' '{print $2}' ++ sed s/NAMESPACE// + local chaos_mesh_ns= + desc 'destroy chaos-mesh' + set +o xtrace ----------------------------------------------------------------------------------- destroy chaos-mesh ----------------------------------------------------------------------------------- + '[' -n '' ']' ++ kubectl get MutatingWebhookConfiguration ++ grep chaos-mesh ++ awk '{print $1}' + timeout 30 kubectl delete MutatingWebhookConfiguration error: resource(s) were provided, but no name was specified + : ++ kubectl get ValidatingWebhookConfiguration ++ grep chaos-mesh ++ awk '{print $1}' + timeout 30 kubectl delete ValidatingWebhookConfiguration error: resource(s) were provided, but no name was specified + : ++ kubectl get ValidatingWebhookConfiguration ++ grep validate-auth ++ awk '{print $1}' + timeout 30 kubectl delete ValidatingWebhookConfiguration error: resource(s) were provided, but no name was specified + : ++ kubectl api-resources ++ awk '{print $1}' ++ grep chaos-mesh ++ kubectl get crd ++ awk '{print $1}' ++ grep chaos-mesh.org + timeout 30 kubectl delete crd error: resource(s) were provided, but no name was specified + : ++ kubectl get clusterrolebinding ++ grep chaos-mesh ++ awk '{print $1}' + timeout 30 kubectl delete clusterrolebinding error: resource(s) were provided, but no name was specified + : ++ kubectl get clusterrole ++ grep chaos-mesh ++ awk '{print $1}' + timeout 30 kubectl delete clusterrole error: resource(s) were provided, but no name was specified + : + desc 'cleaned up all old namespaces' + set +o xtrace ----------------------------------------------------------------------------------- cleaned up all old namespaces ----------------------------------------------------------------------------------- + egrep -v '^kube-|^default|Terminating|psmdb-operator|openshift|gke-mcs|^NAME' + awk '{print$1}' + '[' -n '' ']' + desc 'cleaned up old namespaces psmdb-operator' + set +o xtrace ----------------------------------------------------------------------------------- cleaned up old namespaces psmdb-operator ----------------------------------------------------------------------------------- + kubectl_bin delete namespace psmdb-operator --ignore-not-found + kubectl_bin get ns + xargs kubectl delete ns ++ mktemp + local LAST_OUT=/tmp/tmp.gVawRch5re ++ mktemp + local LAST_ERR=/tmp/tmp.gcJs3Nbj4j + local exit_status=0 + local timeout=4 ++ seq 0 2 + for i in '$(seq 0 2)' + set +e + kubectl get ns ++ mktemp + local LAST_OUT=/tmp/tmp.xUo2CEsRB4 ++ mktemp + local LAST_ERR=/tmp/tmp.TkxNKKLDsh + local exit_status=0 + local timeout=4 ++ seq 0 2 + for i in '$(seq 0 2)' + set +e + kubectl delete namespace psmdb-operator --ignore-not-found + exit_status=0 + set -e + '[' 0 '!=' 0 -a -n 1 ']' + break + cat /tmp/tmp.gVawRch5re + cat /tmp/tmp.gcJs3Nbj4j + rm /tmp/tmp.gVawRch5re /tmp/tmp.gcJs3Nbj4j + return 0 namespace "cert-manager" deleted namespace "data-sharded-23056" deleted namespace "gke-managed-cim" deleted namespace "gke-managed-system" deleted namespace "gmp-public" deleted namespace "gmp-system" deleted + exit_status=0 + set -e + '[' 0 '!=' 0 -a -n 1 ']' + break + cat /tmp/tmp.xUo2CEsRB4 namespace "psmdb-operator" deleted + cat /tmp/tmp.TkxNKKLDsh + rm /tmp/tmp.xUo2CEsRB4 /tmp/tmp.TkxNKKLDsh + return 0 + kubectl_bin wait --for=delete namespace psmdb-operator ++ mktemp + local LAST_OUT=/tmp/tmp.VrjMposLBC ++ mktemp + local LAST_ERR=/tmp/tmp.TGUr7iLc53 + local exit_status=0 + local timeout=4 ++ seq 0 2 + for i in '$(seq 0 2)' + set +e + kubectl wait --for=delete namespace psmdb-operator + exit_status=0 + set -e + '[' 0 '!=' 0 -a -n 1 ']' + break + cat /tmp/tmp.VrjMposLBC + cat /tmp/tmp.TGUr7iLc53 + rm /tmp/tmp.VrjMposLBC /tmp/tmp.TGUr7iLc53 + return 0 + desc 'create namespace psmdb-operator' + set +o xtrace ----------------------------------------------------------------------------------- create namespace psmdb-operator ----------------------------------------------------------------------------------- + kubectl_bin create namespace psmdb-operator ++ mktemp + local LAST_OUT=/tmp/tmp.BcWMm86EkU ++ mktemp + local LAST_ERR=/tmp/tmp.WDCOclaIqy + local exit_status=0 + local timeout=4 ++ seq 0 2 + for i in '$(seq 0 2)' + set +e + kubectl create namespace psmdb-operator + exit_status=0 + set -e + '[' 0 '!=' 0 -a -n 1 ']' + break + cat /tmp/tmp.BcWMm86EkU namespace/psmdb-operator created + cat /tmp/tmp.WDCOclaIqy + rm /tmp/tmp.BcWMm86EkU /tmp/tmp.WDCOclaIqy + return 0 ++ kubectl_bin config current-context +++ mktemp ++ local LAST_OUT=/tmp/tmp.0WVCgmW31h +++ mktemp ++ local LAST_ERR=/tmp/tmp.DYBHvjmDTo ++ local exit_status=0 ++ local timeout=4 +++ seq 0 2 ++ for i in '$(seq 0 2)' ++ set +e ++ kubectl config current-context ++ exit_status=0 ++ set -e ++ '[' 0 '!=' 0 -a -n 1 ']' ++ break ++ cat /tmp/tmp.0WVCgmW31h ++ cat /tmp/tmp.DYBHvjmDTo ++ rm /tmp/tmp.0WVCgmW31h /tmp/tmp.DYBHvjmDTo ++ return 0 + kubectl_bin config set-context gke_cloud-dev-112233_us-central1-a_jen-psmdb-1915-2d829121-4-cluster9 --namespace=psmdb-operator ++ mktemp + local LAST_OUT=/tmp/tmp.qXGxnwBrFT ++ mktemp + local LAST_ERR=/tmp/tmp.cg2wl3PZfY + local exit_status=0 + local timeout=4 ++ seq 0 2 + for i in '$(seq 0 2)' + set +e + kubectl config set-context gke_cloud-dev-112233_us-central1-a_jen-psmdb-1915-2d829121-4-cluster9 --namespace=psmdb-operator + exit_status=0 + set -e + '[' 0 '!=' 0 -a -n 1 ']' + break + cat /tmp/tmp.qXGxnwBrFT Context "gke_cloud-dev-112233_us-central1-a_jen-psmdb-1915-2d829121-4-cluster9" modified. + cat /tmp/tmp.cg2wl3PZfY + rm /tmp/tmp.qXGxnwBrFT /tmp/tmp.cg2wl3PZfY + return 0 + deploy_operator + desc 'start PSMDB operator' + set +o xtrace ----------------------------------------------------------------------------------- start PSMDB operator ----------------------------------------------------------------------------------- + local cr_file + '[' -f /mnt/jenkins/workspace/cloud-psmdb-operator_PR-1915/e2e-tests/data-sharded/conf/crd.yaml ']' + cr_file=/mnt/jenkins/workspace/cloud-psmdb-operator_PR-1915/deploy/crd.yaml + kubectl_bin apply --server-side --force-conflicts -f /mnt/jenkins/workspace/cloud-psmdb-operator_PR-1915/deploy/crd.yaml ++ mktemp + local LAST_OUT=/tmp/tmp.FaEo6yasuz ++ mktemp + local LAST_ERR=/tmp/tmp.QU3oMKY1ih + local exit_status=0 + local timeout=4 ++ seq 0 2 + for i in '$(seq 0 2)' + set +e + kubectl apply --server-side --force-conflicts -f /mnt/jenkins/workspace/cloud-psmdb-operator_PR-1915/deploy/crd.yaml + exit_status=0 + set -e + '[' 0 '!=' 0 -a -n 1 ']' + break + cat /tmp/tmp.FaEo6yasuz customresourcedefinition.apiextensions.k8s.io/perconaservermongodbbackups.psmdb.percona.com serverside-applied customresourcedefinition.apiextensions.k8s.io/perconaservermongodbrestores.psmdb.percona.com serverside-applied customresourcedefinition.apiextensions.k8s.io/perconaservermongodbs.psmdb.percona.com serverside-applied + cat /tmp/tmp.QU3oMKY1ih + rm /tmp/tmp.FaEo6yasuz /tmp/tmp.QU3oMKY1ih + return 0 + '[' -n psmdb-operator ']' + apply_rbac cw-rbac + local operator_namespace=psmdb-operator + local rbac=cw-rbac + sed -e 's^namespace: .*^namespace: psmdb-operator^' + cat /mnt/jenkins/workspace/cloud-psmdb-operator_PR-1915/deploy/cw-rbac.yaml + kubectl_bin apply -n psmdb-operator -f - ++ mktemp + local LAST_OUT=/tmp/tmp.8RhxU4taX8 ++ mktemp + local LAST_ERR=/tmp/tmp.hRVFlDedMz + local exit_status=0 + local timeout=4 ++ seq 0 2 + for i in '$(seq 0 2)' + set +e + kubectl apply -n psmdb-operator -f - + exit_status=0 + set -e + '[' 0 '!=' 0 -a -n 1 ']' + break + cat /tmp/tmp.8RhxU4taX8 clusterrole.rbac.authorization.k8s.io/percona-server-mongodb-operator created serviceaccount/percona-server-mongodb-operator created clusterrolebinding.rbac.authorization.k8s.io/service-account-percona-server-mongodb-operator created + cat /tmp/tmp.hRVFlDedMz + rm /tmp/tmp.8RhxU4taX8 /tmp/tmp.hRVFlDedMz + return 0 + yq eval ' (.spec.template.spec.containers[].image = "perconalab/percona-server-mongodb-operator:PR-1915-2d829121") | ((.. | select(.[] == "DISABLE_TELEMETRY")) |= .value="true") | ((.. | select(.[] == "LOG_LEVEL")) |= .value="DEBUG")' /mnt/jenkins/workspace/cloud-psmdb-operator_PR-1915/deploy/cw-operator.yaml + kubectl_bin apply -f - ++ mktemp + local LAST_OUT=/tmp/tmp.tVH0W46uFg ++ mktemp + local LAST_ERR=/tmp/tmp.UE3jhW9jSY + local exit_status=0 + local timeout=4 ++ seq 0 2 + for i in '$(seq 0 2)' + set +e + kubectl apply -f - + exit_status=0 + set -e + '[' 0 '!=' 0 -a -n 1 ']' + break + cat /tmp/tmp.tVH0W46uFg deployment.apps/percona-server-mongodb-operator created + cat /tmp/tmp.UE3jhW9jSY + rm /tmp/tmp.tVH0W46uFg /tmp/tmp.UE3jhW9jSY + return 0 + sleep 2 ++ get_operator_pod ++ kubectl_bin get pods --selector=name=percona-server-mongodb-operator -o 'jsonpath={.items[].metadata.name}' -n psmdb-operator +++ mktemp ++ local LAST_OUT=/tmp/tmp.mUv6ApIoP4 +++ mktemp ++ local LAST_ERR=/tmp/tmp.saWq2bKz4e ++ local exit_status=0 ++ local timeout=4 +++ seq 0 2 ++ for i in '$(seq 0 2)' ++ set +e ++ kubectl get pods --selector=name=percona-server-mongodb-operator -o 'jsonpath={.items[].metadata.name}' -n psmdb-operator ++ exit_status=0 ++ set -e ++ '[' 0 '!=' 0 -a -n 1 ']' ++ break ++ cat /tmp/tmp.mUv6ApIoP4 ++ cat /tmp/tmp.saWq2bKz4e ++ rm /tmp/tmp.mUv6ApIoP4 /tmp/tmp.saWq2bKz4e ++ return 0 + wait_pod percona-server-mongodb-operator-6c96ccdc79-8hvpb + local pod=percona-server-mongodb-operator-6c96ccdc79-8hvpb + set +o xtrace waiting for pod/percona-server-mongodb-operator-6c96ccdc79-8hvpb to be ready.OK + echo 'Print operator info from log' Print operator info from log + grep 'Manager starting up' ++ get_operator_pod ++ kubectl_bin get pods --selector=name=percona-server-mongodb-operator -o 'jsonpath={.items[].metadata.name}' -n psmdb-operator +++ mktemp ++ local LAST_OUT=/tmp/tmp.jUoJuIRNdE +++ mktemp ++ local LAST_ERR=/tmp/tmp.N5Rta3zAR5 ++ local exit_status=0 ++ local timeout=4 +++ seq 0 2 ++ for i in '$(seq 0 2)' ++ set +e ++ kubectl get pods --selector=name=percona-server-mongodb-operator -o 'jsonpath={.items[].metadata.name}' -n psmdb-operator ++ exit_status=0 ++ set -e ++ '[' 0 '!=' 0 -a -n 1 ']' ++ break ++ cat /tmp/tmp.jUoJuIRNdE ++ cat /tmp/tmp.N5Rta3zAR5 ++ rm /tmp/tmp.jUoJuIRNdE /tmp/tmp.N5Rta3zAR5 ++ return 0 + kubectl_bin logs percona-server-mongodb-operator-6c96ccdc79-8hvpb ++ mktemp + local LAST_OUT=/tmp/tmp.tN6G9u1qYX ++ mktemp + local LAST_ERR=/tmp/tmp.BHw6gOgKJ8 + local exit_status=0 + local timeout=4 ++ seq 0 2 + for i in '$(seq 0 2)' + set +e + kubectl logs percona-server-mongodb-operator-6c96ccdc79-8hvpb + exit_status=0 + set -e + '[' 0 '!=' 0 -a -n 1 ']' + break + cat /tmp/tmp.tN6G9u1qYX + cat /tmp/tmp.BHw6gOgKJ8 + rm /tmp/tmp.tN6G9u1qYX /tmp/tmp.BHw6gOgKJ8 + return 0 2025-05-21T09:49:45.490Z INFO setup Manager starting up {"gitCommit": "2d8291219bbb1a7ad015786c5ebd76d37f8fba63", "gitBranch": "PR-1915-2d829121", "buildTime": "", "goVersion": "go1.24.3", "os": "linux", "arch": "amd64"} + create_namespace data-sharded-22380 + local namespace=data-sharded-22380 + local skip_clean_namespace= + [[ 1 == 1 ]] + [[ -z '' ]] + destroy_chaos_mesh ++ helm list --all-namespaces --filter chaos-mesh ++ tail -n1 ++ sed s/NAMESPACE// ++ awk '-F ' '{print $2}' + local chaos_mesh_ns= + desc 'destroy chaos-mesh' + set +o xtrace ----------------------------------------------------------------------------------- destroy chaos-mesh ----------------------------------------------------------------------------------- + '[' -n '' ']' ++ kubectl get MutatingWebhookConfiguration ++ grep chaos-mesh ++ awk '{print $1}' + timeout 30 kubectl delete MutatingWebhookConfiguration error: resource(s) were provided, but no name was specified + : ++ kubectl get ValidatingWebhookConfiguration ++ awk '{print $1}' ++ grep chaos-mesh + timeout 30 kubectl delete ValidatingWebhookConfiguration error: resource(s) were provided, but no name was specified + : ++ kubectl get ValidatingWebhookConfiguration ++ grep validate-auth ++ awk '{print $1}' + timeout 30 kubectl delete ValidatingWebhookConfiguration error: resource(s) were provided, but no name was specified + : ++ kubectl api-resources ++ grep chaos-mesh ++ awk '{print $1}' ++ kubectl get crd ++ grep chaos-mesh.org ++ awk '{print $1}' + timeout 30 kubectl delete crd error: resource(s) were provided, but no name was specified + : ++ kubectl get clusterrolebinding ++ grep chaos-mesh ++ awk '{print $1}' + timeout 30 kubectl delete clusterrolebinding error: resource(s) were provided, but no name was specified + : ++ kubectl get clusterrole ++ grep chaos-mesh ++ awk '{print $1}' + timeout 30 kubectl delete clusterrole error: resource(s) were provided, but no name was specified + : + desc 'cleaned up all old namespaces' + set +o xtrace ----------------------------------------------------------------------------------- cleaned up all old namespaces ----------------------------------------------------------------------------------- + kubectl_bin get ns + awk '{print$1}' ++ mktemp + '[' -n '' ']' + desc 'cleaned up old namespaces data-sharded-22380' + set +o xtrace ----------------------------------------------------------------------------------- cleaned up old namespaces data-sharded-22380+ xargs kubectl delete ns ----------------------------------------------------------------------------------- + kubectl_bin delete namespace data-sharded-22380 --ignore-not-found + egrep -v '^kube-|^default|Terminating|psmdb-operator|openshift|gke-mcs|^NAME' ++ mktemp + local LAST_OUT=/tmp/tmp.2CAjMxCDnu + local LAST_OUT=/tmp/tmp.NfPo3RnXXW ++ mktemp ++ mktemp + local LAST_ERR=/tmp/tmp.4QF6Dn0W7o + local LAST_ERR=/tmp/tmp.r4s2phsXkB + local exit_status=0 + local timeout=4 + local exit_status=0 + local timeout=4 ++ seq 0 2 ++ seq 0 2 + for i in '$(seq 0 2)' + set +e + kubectl get ns + for i in '$(seq 0 2)' + set +e + kubectl delete namespace data-sharded-22380 --ignore-not-found + exit_status=0 + set -e + '[' 0 '!=' 0 -a -n 1 ']' + break + cat /tmp/tmp.2CAjMxCDnu + cat /tmp/tmp.4QF6Dn0W7o + rm /tmp/tmp.2CAjMxCDnu /tmp/tmp.4QF6Dn0W7o + return 0 + exit_status=0 + set -e + '[' 0 '!=' 0 -a -n 1 ']' + break + cat /tmp/tmp.NfPo3RnXXW + cat /tmp/tmp.r4s2phsXkB + rm /tmp/tmp.NfPo3RnXXW /tmp/tmp.r4s2phsXkB + return 0 + kubectl_bin wait --for=delete namespace data-sharded-22380 ++ mktemp + local LAST_OUT=/tmp/tmp.BHN2pXLA2J ++ mktemp + local LAST_ERR=/tmp/tmp.Nri3jIpHBT + local exit_status=0 + local timeout=4 ++ seq 0 2 + for i in '$(seq 0 2)' + set +e + kubectl wait --for=delete namespace data-sharded-22380 + exit_status=0 + set -e + '[' 0 '!=' 0 -a -n 1 ']' + break + cat /tmp/tmp.BHN2pXLA2J + cat /tmp/tmp.Nri3jIpHBT + rm /tmp/tmp.BHN2pXLA2J /tmp/tmp.Nri3jIpHBT + return 0 + desc 'create namespace data-sharded-22380' + set +o xtrace ----------------------------------------------------------------------------------- create namespace data-sharded-22380 ----------------------------------------------------------------------------------- + kubectl_bin create namespace data-sharded-22380 ++ mktemp + local LAST_OUT=/tmp/tmp.cEDuU6bP7c ++ mktemp + local LAST_ERR=/tmp/tmp.gzGgSo4nNx + local exit_status=0 + local timeout=4 ++ seq 0 2 + for i in '$(seq 0 2)' + set +e + kubectl create namespace data-sharded-22380 namespace "gke-managed-cim" deleted namespace "gke-managed-system" deleted namespace "gmp-public" deleted namespace "gmp-system" deleted + exit_status=0 + set -e + '[' 0 '!=' 0 -a -n 1 ']' + break + cat /tmp/tmp.cEDuU6bP7c namespace/data-sharded-22380 created + cat /tmp/tmp.gzGgSo4nNx + rm /tmp/tmp.cEDuU6bP7c /tmp/tmp.gzGgSo4nNx + return 0 ++ kubectl_bin config current-context +++ mktemp ++ local LAST_OUT=/tmp/tmp.Mn8o8LdaKg +++ mktemp ++ local LAST_ERR=/tmp/tmp.AMCAv3h1jm ++ local exit_status=0 ++ local timeout=4 +++ seq 0 2 ++ for i in '$(seq 0 2)' ++ set +e ++ kubectl config current-context ++ exit_status=0 ++ set -e ++ '[' 0 '!=' 0 -a -n 1 ']' ++ break ++ cat /tmp/tmp.Mn8o8LdaKg ++ cat /tmp/tmp.AMCAv3h1jm ++ rm /tmp/tmp.Mn8o8LdaKg /tmp/tmp.AMCAv3h1jm ++ return 0 + kubectl_bin config set-context gke_cloud-dev-112233_us-central1-a_jen-psmdb-1915-2d829121-4-cluster9 --namespace=data-sharded-22380 ++ mktemp + local LAST_OUT=/tmp/tmp.7EBMTq7UR7 ++ mktemp + local LAST_ERR=/tmp/tmp.aUROmn2ERa + local exit_status=0 + local timeout=4 ++ seq 0 2 + for i in '$(seq 0 2)' + set +e + kubectl config set-context gke_cloud-dev-112233_us-central1-a_jen-psmdb-1915-2d829121-4-cluster9 --namespace=data-sharded-22380 + exit_status=0 + set -e + '[' 0 '!=' 0 -a -n 1 ']' + break + cat /tmp/tmp.7EBMTq7UR7 Context "gke_cloud-dev-112233_us-central1-a_jen-psmdb-1915-2d829121-4-cluster9" modified. + cat /tmp/tmp.aUROmn2ERa + rm /tmp/tmp.7EBMTq7UR7 /tmp/tmp.aUROmn2ERa + return 0 + deploy_cert_manager + desc 'deploy cert manager' + set +o xtrace ----------------------------------------------------------------------------------- deploy cert manager ----------------------------------------------------------------------------------- + kubectl_bin create namespace cert-manager ++ mktemp + local LAST_OUT=/tmp/tmp.wUVLzLWV4h ++ mktemp + local LAST_ERR=/tmp/tmp.0Omn44kjY8 + local exit_status=0 + local timeout=4 ++ seq 0 2 + for i in '$(seq 0 2)' + set +e + kubectl create namespace cert-manager + exit_status=0 + set -e + '[' 0 '!=' 0 -a -n 1 ']' + break + cat /tmp/tmp.wUVLzLWV4h namespace/cert-manager created + cat /tmp/tmp.0Omn44kjY8 + rm /tmp/tmp.wUVLzLWV4h /tmp/tmp.0Omn44kjY8 + return 0 + kubectl_bin label namespace cert-manager certmanager.k8s.io/disable-validation=true ++ mktemp + local LAST_OUT=/tmp/tmp.APl68A8Kek ++ mktemp + local LAST_ERR=/tmp/tmp.alaITVPGkc + local exit_status=0 + local timeout=4 ++ seq 0 2 + for i in '$(seq 0 2)' + set +e + kubectl label namespace cert-manager certmanager.k8s.io/disable-validation=true + exit_status=0 + set -e + '[' 0 '!=' 0 -a -n 1 ']' + break + cat /tmp/tmp.APl68A8Kek namespace/cert-manager labeled + cat /tmp/tmp.alaITVPGkc + rm /tmp/tmp.APl68A8Kek /tmp/tmp.alaITVPGkc + return 0 + kubectl_bin apply -f https://github.com/cert-manager/cert-manager/releases/download/v1.17.2/cert-manager.yaml --validate=false ++ mktemp + local LAST_OUT=/tmp/tmp.jlVt4Q7b86 ++ mktemp + local LAST_ERR=/tmp/tmp.riKrEipBh4 + local exit_status=0 + local timeout=4 ++ seq 0 2 + for i in '$(seq 0 2)' + set +e + kubectl apply -f https://github.com/cert-manager/cert-manager/releases/download/v1.17.2/cert-manager.yaml --validate=false + exit_status=0 + set -e + '[' 0 '!=' 0 -a -n 1 ']' + break + cat /tmp/tmp.jlVt4Q7b86 namespace/cert-manager configured customresourcedefinition.apiextensions.k8s.io/certificaterequests.cert-manager.io unchanged customresourcedefinition.apiextensions.k8s.io/certificates.cert-manager.io unchanged customresourcedefinition.apiextensions.k8s.io/challenges.acme.cert-manager.io unchanged customresourcedefinition.apiextensions.k8s.io/clusterissuers.cert-manager.io unchanged customresourcedefinition.apiextensions.k8s.io/issuers.cert-manager.io unchanged customresourcedefinition.apiextensions.k8s.io/orders.acme.cert-manager.io unchanged serviceaccount/cert-manager-cainjector created serviceaccount/cert-manager created serviceaccount/cert-manager-webhook created clusterrole.rbac.authorization.k8s.io/cert-manager-cainjector unchanged clusterrole.rbac.authorization.k8s.io/cert-manager-controller-issuers unchanged clusterrole.rbac.authorization.k8s.io/cert-manager-controller-clusterissuers unchanged clusterrole.rbac.authorization.k8s.io/cert-manager-controller-certificates unchanged clusterrole.rbac.authorization.k8s.io/cert-manager-controller-orders unchanged clusterrole.rbac.authorization.k8s.io/cert-manager-controller-challenges unchanged clusterrole.rbac.authorization.k8s.io/cert-manager-controller-ingress-shim unchanged clusterrole.rbac.authorization.k8s.io/cert-manager-cluster-view unchanged clusterrole.rbac.authorization.k8s.io/cert-manager-view unchanged clusterrole.rbac.authorization.k8s.io/cert-manager-edit unchanged clusterrole.rbac.authorization.k8s.io/cert-manager-controller-approve:cert-manager-io unchanged clusterrole.rbac.authorization.k8s.io/cert-manager-controller-certificatesigningrequests unchanged clusterrole.rbac.authorization.k8s.io/cert-manager-webhook:subjectaccessreviews unchanged clusterrolebinding.rbac.authorization.k8s.io/cert-manager-cainjector unchanged clusterrolebinding.rbac.authorization.k8s.io/cert-manager-controller-issuers unchanged clusterrolebinding.rbac.authorization.k8s.io/cert-manager-controller-clusterissuers unchanged clusterrolebinding.rbac.authorization.k8s.io/cert-manager-controller-certificates unchanged clusterrolebinding.rbac.authorization.k8s.io/cert-manager-controller-orders unchanged clusterrolebinding.rbac.authorization.k8s.io/cert-manager-controller-challenges unchanged clusterrolebinding.rbac.authorization.k8s.io/cert-manager-controller-ingress-shim unchanged clusterrolebinding.rbac.authorization.k8s.io/cert-manager-controller-approve:cert-manager-io unchanged clusterrolebinding.rbac.authorization.k8s.io/cert-manager-controller-certificatesigningrequests unchanged clusterrolebinding.rbac.authorization.k8s.io/cert-manager-webhook:subjectaccessreviews unchanged role.rbac.authorization.k8s.io/cert-manager-cainjector:leaderelection unchanged role.rbac.authorization.k8s.io/cert-manager:leaderelection unchanged role.rbac.authorization.k8s.io/cert-manager-tokenrequest created role.rbac.authorization.k8s.io/cert-manager-webhook:dynamic-serving created rolebinding.rbac.authorization.k8s.io/cert-manager-cainjector:leaderelection unchanged rolebinding.rbac.authorization.k8s.io/cert-manager:leaderelection unchanged rolebinding.rbac.authorization.k8s.io/cert-manager-cert-manager-tokenrequest created rolebinding.rbac.authorization.k8s.io/cert-manager-webhook:dynamic-serving created service/cert-manager-cainjector created service/cert-manager created service/cert-manager-webhook created deployment.apps/cert-manager-cainjector created deployment.apps/cert-manager created deployment.apps/cert-manager-webhook created mutatingwebhookconfiguration.admissionregistration.k8s.io/cert-manager-webhook configured validatingwebhookconfiguration.admissionregistration.k8s.io/cert-manager-webhook configured + cat /tmp/tmp.riKrEipBh4 Warning: resource namespaces/cert-manager is missing the kubectl.kubernetes.io/last-applied-configuration annotation which is required by kubectl apply. kubectl apply should only be used on resources created declaratively by either kubectl create --save-config or kubectl apply. The missing annotation will be patched automatically. + rm /tmp/tmp.jlVt4Q7b86 /tmp/tmp.riKrEipBh4 + return 0 + kubectl_bin -n cert-manager wait pod -l app.kubernetes.io/instance=cert-manager --for=condition=ready ++ mktemp + local LAST_OUT=/tmp/tmp.1FbMy9NCN3 ++ mktemp + local LAST_ERR=/tmp/tmp.X3G63d27CK + local exit_status=0 + local timeout=4 ++ seq 0 2 + for i in '$(seq 0 2)' + set +e + kubectl -n cert-manager wait pod -l app.kubernetes.io/instance=cert-manager --for=condition=ready + exit_status=0 + set -e + '[' 0 '!=' 0 -a -n 1 ']' + break + cat /tmp/tmp.1FbMy9NCN3 pod/cert-manager-6687d8765c-llp7g condition met pod/cert-manager-cainjector-764498cfc8-gn52m condition met pod/cert-manager-webhook-74c74b87d7-lzf68 condition met + cat /tmp/tmp.X3G63d27CK + rm /tmp/tmp.1FbMy9NCN3 /tmp/tmp.X3G63d27CK + return 0 + sleep 120 + desc 'create secrets and start client' + set +o xtrace ----------------------------------------------------------------------------------- create secrets and start client ----------------------------------------------------------------------------------- + kubectl_bin apply -f /mnt/jenkins/workspace/cloud-psmdb-operator_PR-1915/e2e-tests/conf/secrets.yml ++ mktemp + local LAST_OUT=/tmp/tmp.50k4iBalZc ++ mktemp + local LAST_ERR=/tmp/tmp.fP9ltqgBZk + local exit_status=0 + local timeout=4 ++ seq 0 2 + for i in '$(seq 0 2)' + set +e + kubectl apply -f /mnt/jenkins/workspace/cloud-psmdb-operator_PR-1915/e2e-tests/conf/secrets.yml + exit_status=0 + set -e + '[' 0 '!=' 0 -a -n 1 ']' + break + cat /tmp/tmp.50k4iBalZc secret/some-users created + cat /tmp/tmp.fP9ltqgBZk + rm /tmp/tmp.50k4iBalZc /tmp/tmp.fP9ltqgBZk + return 0 + kubectl_bin apply -f /mnt/jenkins/workspace/cloud-psmdb-operator_PR-1915/e2e-tests/conf/client_with_tls.yml ++ mktemp + local LAST_OUT=/tmp/tmp.8IyKVX2bYu ++ mktemp + local LAST_ERR=/tmp/tmp.QvQLJOM23U + local exit_status=0 + local timeout=4 ++ seq 0 2 + for i in '$(seq 0 2)' + set +e + kubectl apply -f /mnt/jenkins/workspace/cloud-psmdb-operator_PR-1915/e2e-tests/conf/client_with_tls.yml + exit_status=0 + set -e + '[' 0 '!=' 0 -a -n 1 ']' + break + cat /tmp/tmp.8IyKVX2bYu deployment.apps/psmdb-client created + cat /tmp/tmp.QvQLJOM23U + rm /tmp/tmp.8IyKVX2bYu /tmp/tmp.QvQLJOM23U + return 0 + cluster=some-name + desc 'create first PSMDB cluster some-name' + set +o xtrace ----------------------------------------------------------------------------------- create first PSMDB cluster some-name ----------------------------------------------------------------------------------- + apply_cluster /mnt/jenkins/workspace/cloud-psmdb-operator_PR-1915/e2e-tests/data-sharded/conf/some-name.yml + '[' -z '' ']' + cat_config /mnt/jenkins/workspace/cloud-psmdb-operator_PR-1915/e2e-tests/data-sharded/conf/some-name.yml + kubectl_bin apply -f - + cat /mnt/jenkins/workspace/cloud-psmdb-operator_PR-1915/e2e-tests/data-sharded/conf/some-name.yml + yq eval '(.spec | select(.image == null)).image = "perconalab/percona-server-mongodb-operator:main-mongod7.0"' ++ mktemp + yq eval '.spec.upgradeOptions.apply="Never"' + yq eval '(.spec | select(has("initImage"))).initImage = "perconalab/percona-server-mongodb-operator:PR-1915-2d829121"' + local LAST_OUT=/tmp/tmp.vr4xXU03n5 + yq eval '(.spec | select(has("backup"))).backup.image = "perconalab/percona-server-mongodb-operator:main-backup"' ++ mktemp + local LAST_ERR=/tmp/tmp.8pqh3Ljop9 + local exit_status=0 + local timeout=4 ++ seq 0 2 + for i in '$(seq 0 2)' + set +e + kubectl apply -f - + yq eval '(.spec | select(has("pmm"))).pmm.image = "perconalab/pmm-client:dev-latest"' + exit_status=0 + set -e + '[' 0 '!=' 0 -a -n 1 ']' + break + cat /tmp/tmp.vr4xXU03n5 perconaservermongodb.psmdb.percona.com/some-name created + cat /tmp/tmp.8pqh3Ljop9 + rm /tmp/tmp.vr4xXU03n5 /tmp/tmp.8pqh3Ljop9 + return 0 + desc 'check if all Pods started' + set +o xtrace ----------------------------------------------------------------------------------- check if all Pods started ----------------------------------------------------------------------------------- + wait_for_running some-name-cfg 3 + local name=some-name-cfg + let last_pod=2 + local check_cluster_readyness=true + set_debug + [[ 1 == 1 ]] + set -o xtrace + local rs_name=cfg + local cluster_name=some-name ++ seq 0 2 + for i in '$(seq 0 $last_pod)' + [[ 0 -eq 2 ]] + wait_pod some-name-cfg-0 + local pod=some-name-cfg-0 + set +o xtrace waiting for pod/some-name-cfg-0 to be ready.............OK + for i in '$(seq 0 $last_pod)' + [[ 1 -eq 2 ]] + wait_pod some-name-cfg-1 + local pod=some-name-cfg-1 + set +o xtrace waiting for pod/some-name-cfg-1 to be ready............OK + for i in '$(seq 0 $last_pod)' + [[ 2 -eq 2 ]] ++ kubectl_bin get psmdb some-name -o 'jsonpath={.spec.replsets[?(@.name=="cfg")].arbiter.enabled}' +++ mktemp ++ local LAST_OUT=/tmp/tmp.N2Aa7pV4Vr +++ mktemp ++ local LAST_ERR=/tmp/tmp.2AmZbI3F2J ++ local exit_status=0 ++ local timeout=4 +++ seq 0 2 ++ for i in '$(seq 0 2)' ++ set +e ++ kubectl get psmdb some-name -o 'jsonpath={.spec.replsets[?(@.name=="cfg")].arbiter.enabled}' ++ exit_status=0 ++ set -e ++ '[' 0 '!=' 0 -a -n 1 ']' ++ break ++ cat /tmp/tmp.N2Aa7pV4Vr ++ cat /tmp/tmp.2AmZbI3F2J ++ rm /tmp/tmp.N2Aa7pV4Vr /tmp/tmp.2AmZbI3F2J ++ return 0 + [[ '' == \t\r\u\e ]] + wait_pod some-name-cfg-2 + local pod=some-name-cfg-2 + set +o xtrace waiting for pod/some-name-cfg-2 to be ready............OK ++ kubectl_bin get psmdb some-name -o 'jsonpath={.spec.replsets[?(@.name=="cfg")].non_voting.enabled}' +++ mktemp ++ local LAST_OUT=/tmp/tmp.xUHFqxiyoX +++ mktemp ++ local LAST_ERR=/tmp/tmp.j9JNMYxqT8 ++ local exit_status=0 ++ local timeout=4 +++ seq 0 2 ++ for i in '$(seq 0 2)' ++ set +e ++ kubectl get psmdb some-name -o 'jsonpath={.spec.replsets[?(@.name=="cfg")].non_voting.enabled}' ++ exit_status=0 ++ set -e ++ '[' 0 '!=' 0 -a -n 1 ']' ++ break ++ cat /tmp/tmp.xUHFqxiyoX ++ cat /tmp/tmp.j9JNMYxqT8 ++ rm /tmp/tmp.xUHFqxiyoX /tmp/tmp.j9JNMYxqT8 ++ return 0 + [[ '' == \t\r\u\e ]] + sleep 10 + [[ true == \t\r\u\e ]] + set +x Waiting for cluster readyness.......................................... + wait_for_running some-name-rs0 3 + local name=some-name-rs0 + let last_pod=2 + local check_cluster_readyness=true + set_debug + [[ 1 == 1 ]] + set -o xtrace + local rs_name=rs0 + local cluster_name=some-name ++ seq 0 2 + for i in '$(seq 0 $last_pod)' + [[ 0 -eq 2 ]] + wait_pod some-name-rs0-0 + local pod=some-name-rs0-0 + set +o xtrace waiting for pod/some-name-rs0-0 to be ready.OK + for i in '$(seq 0 $last_pod)' + [[ 1 -eq 2 ]] + wait_pod some-name-rs0-1 + local pod=some-name-rs0-1 + set +o xtrace waiting for pod/some-name-rs0-1 to be ready.OK + for i in '$(seq 0 $last_pod)' + [[ 2 -eq 2 ]] ++ kubectl_bin get psmdb some-name -o 'jsonpath={.spec.replsets[?(@.name=="rs0")].arbiter.enabled}' +++ mktemp ++ local LAST_OUT=/tmp/tmp.n0rBgKLi8i +++ mktemp ++ local LAST_ERR=/tmp/tmp.APSvm4xiQJ ++ local exit_status=0 ++ local timeout=4 +++ seq 0 2 ++ for i in '$(seq 0 2)' ++ set +e ++ kubectl get psmdb some-name -o 'jsonpath={.spec.replsets[?(@.name=="rs0")].arbiter.enabled}' ++ exit_status=0 ++ set -e ++ '[' 0 '!=' 0 -a -n 1 ']' ++ break ++ cat /tmp/tmp.n0rBgKLi8i ++ cat /tmp/tmp.APSvm4xiQJ ++ rm /tmp/tmp.n0rBgKLi8i /tmp/tmp.APSvm4xiQJ ++ return 0 + [[ '' == \t\r\u\e ]] + wait_pod some-name-rs0-2 + local pod=some-name-rs0-2 + set +o xtrace waiting for pod/some-name-rs0-2 to be ready.OK ++ kubectl_bin get psmdb some-name -o 'jsonpath={.spec.replsets[?(@.name=="rs0")].non_voting.enabled}' +++ mktemp ++ local LAST_OUT=/tmp/tmp.ewWo3ZUWzb +++ mktemp ++ local LAST_ERR=/tmp/tmp.Kr1w6OtPCp ++ local exit_status=0 ++ local timeout=4 +++ seq 0 2 ++ for i in '$(seq 0 2)' ++ set +e ++ kubectl get psmdb some-name -o 'jsonpath={.spec.replsets[?(@.name=="rs0")].non_voting.enabled}' ++ exit_status=0 ++ set -e ++ '[' 0 '!=' 0 -a -n 1 ']' ++ break ++ cat /tmp/tmp.ewWo3ZUWzb ++ cat /tmp/tmp.Kr1w6OtPCp ++ rm /tmp/tmp.ewWo3ZUWzb /tmp/tmp.Kr1w6OtPCp ++ return 0 + [[ '' == \t\r\u\e ]] + sleep 10 + [[ true == \t\r\u\e ]] + set +x Waiting for cluster readyness + wait_for_running some-name-rs1 3 + local name=some-name-rs1 + let last_pod=2 + local check_cluster_readyness=true + set_debug + [[ 1 == 1 ]] + set -o xtrace + local rs_name=rs1 + local cluster_name=some-name ++ seq 0 2 + for i in '$(seq 0 $last_pod)' + [[ 0 -eq 2 ]] + wait_pod some-name-rs1-0 + local pod=some-name-rs1-0 + set +o xtrace waiting for pod/some-name-rs1-0 to be ready.OK + for i in '$(seq 0 $last_pod)' + [[ 1 -eq 2 ]] + wait_pod some-name-rs1-1 + local pod=some-name-rs1-1 + set +o xtrace waiting for pod/some-name-rs1-1 to be ready.OK + for i in '$(seq 0 $last_pod)' + [[ 2 -eq 2 ]] ++ kubectl_bin get psmdb some-name -o 'jsonpath={.spec.replsets[?(@.name=="rs1")].arbiter.enabled}' +++ mktemp ++ local LAST_OUT=/tmp/tmp.FWFcpacDJm +++ mktemp ++ local LAST_ERR=/tmp/tmp.Qd5PK7QzGX ++ local exit_status=0 ++ local timeout=4 +++ seq 0 2 ++ for i in '$(seq 0 2)' ++ set +e ++ kubectl get psmdb some-name -o 'jsonpath={.spec.replsets[?(@.name=="rs1")].arbiter.enabled}' ++ exit_status=0 ++ set -e ++ '[' 0 '!=' 0 -a -n 1 ']' ++ break ++ cat /tmp/tmp.FWFcpacDJm ++ cat /tmp/tmp.Qd5PK7QzGX ++ rm /tmp/tmp.FWFcpacDJm /tmp/tmp.Qd5PK7QzGX ++ return 0 + [[ true == \t\r\u\e ]] + wait_pod some-name-rs1-arbiter-0 + local pod=some-name-rs1-arbiter-0 + set +o xtrace waiting for pod/some-name-rs1-arbiter-0 to be ready.OK ++ kubectl_bin get psmdb some-name -o 'jsonpath={.spec.replsets[?(@.name=="rs1")].non_voting.enabled}' +++ mktemp ++ local LAST_OUT=/tmp/tmp.6jcy50AHW6 +++ mktemp ++ local LAST_ERR=/tmp/tmp.4qcPQDSGsc ++ local exit_status=0 ++ local timeout=4 +++ seq 0 2 ++ for i in '$(seq 0 2)' ++ set +e ++ kubectl get psmdb some-name -o 'jsonpath={.spec.replsets[?(@.name=="rs1")].non_voting.enabled}' ++ exit_status=0 ++ set -e ++ '[' 0 '!=' 0 -a -n 1 ']' ++ break ++ cat /tmp/tmp.6jcy50AHW6 ++ cat /tmp/tmp.4qcPQDSGsc ++ rm /tmp/tmp.6jcy50AHW6 /tmp/tmp.4qcPQDSGsc ++ return 0 + [[ '' == \t\r\u\e ]] + sleep 10 + [[ true == \t\r\u\e ]] + set +x Waiting for cluster readyness + wait_for_running some-name-rs2 3 + local name=some-name-rs2 + let last_pod=2 + local check_cluster_readyness=true + set_debug + [[ 1 == 1 ]] + set -o xtrace + local rs_name=rs2 + local cluster_name=some-name ++ seq 0 2 + for i in '$(seq 0 $last_pod)' + [[ 0 -eq 2 ]] + wait_pod some-name-rs2-0 + local pod=some-name-rs2-0 + set +o xtrace waiting for pod/some-name-rs2-0 to be ready.OK + for i in '$(seq 0 $last_pod)' + [[ 1 -eq 2 ]] + wait_pod some-name-rs2-1 + local pod=some-name-rs2-1 + set +o xtrace waiting for pod/some-name-rs2-1 to be ready.OK + for i in '$(seq 0 $last_pod)' + [[ 2 -eq 2 ]] ++ kubectl_bin get psmdb some-name -o 'jsonpath={.spec.replsets[?(@.name=="rs2")].arbiter.enabled}' +++ mktemp ++ local LAST_OUT=/tmp/tmp.JXp54glVjl +++ mktemp ++ local LAST_ERR=/tmp/tmp.RqsRhn6JE9 ++ local exit_status=0 ++ local timeout=4 +++ seq 0 2 ++ for i in '$(seq 0 2)' ++ set +e ++ kubectl get psmdb some-name -o 'jsonpath={.spec.replsets[?(@.name=="rs2")].arbiter.enabled}' ++ exit_status=0 ++ set -e ++ '[' 0 '!=' 0 -a -n 1 ']' ++ break ++ cat /tmp/tmp.JXp54glVjl ++ cat /tmp/tmp.RqsRhn6JE9 ++ rm /tmp/tmp.JXp54glVjl /tmp/tmp.RqsRhn6JE9 ++ return 0 + [[ '' == \t\r\u\e ]] + wait_pod some-name-rs2-2 + local pod=some-name-rs2-2 + set +o xtrace waiting for pod/some-name-rs2-2 to be ready.OK ++ kubectl_bin get psmdb some-name -o 'jsonpath={.spec.replsets[?(@.name=="rs2")].non_voting.enabled}' +++ mktemp ++ local LAST_OUT=/tmp/tmp.7MrMApV3SY +++ mktemp ++ local LAST_ERR=/tmp/tmp.pX70HoVCqI ++ local exit_status=0 ++ local timeout=4 +++ seq 0 2 ++ for i in '$(seq 0 2)' ++ set +e ++ kubectl get psmdb some-name -o 'jsonpath={.spec.replsets[?(@.name=="rs2")].non_voting.enabled}' ++ exit_status=0 ++ set -e ++ '[' 0 '!=' 0 -a -n 1 ']' ++ break ++ cat /tmp/tmp.7MrMApV3SY ++ cat /tmp/tmp.pX70HoVCqI ++ rm /tmp/tmp.7MrMApV3SY /tmp/tmp.pX70HoVCqI ++ return 0 + [[ '' == \t\r\u\e ]] + sleep 10 + [[ true == \t\r\u\e ]] + set +x Waiting for cluster readyness + wait_for_running some-name-mongos 3 + local name=some-name-mongos + let last_pod=2 + local check_cluster_readyness=true + set_debug + [[ 1 == 1 ]] + set -o xtrace + local rs_name=mongos + local cluster_name=some-name ++ seq 0 2 + for i in '$(seq 0 $last_pod)' + [[ 0 -eq 2 ]] + wait_pod some-name-mongos-0 + local pod=some-name-mongos-0 + set +o xtrace waiting for pod/some-name-mongos-0 to be ready.OK + for i in '$(seq 0 $last_pod)' + [[ 1 -eq 2 ]] + wait_pod some-name-mongos-1 + local pod=some-name-mongos-1 + set +o xtrace waiting for pod/some-name-mongos-1 to be ready.OK + for i in '$(seq 0 $last_pod)' + [[ 2 -eq 2 ]] ++ kubectl_bin get psmdb some-name -o 'jsonpath={.spec.replsets[?(@.name=="mongos")].arbiter.enabled}' +++ mktemp ++ local LAST_OUT=/tmp/tmp.BX0ea2JujB +++ mktemp ++ local LAST_ERR=/tmp/tmp.AfFiIkpjFg ++ local exit_status=0 ++ local timeout=4 +++ seq 0 2 ++ for i in '$(seq 0 2)' ++ set +e ++ kubectl get psmdb some-name -o 'jsonpath={.spec.replsets[?(@.name=="mongos")].arbiter.enabled}' ++ exit_status=0 ++ set -e ++ '[' 0 '!=' 0 -a -n 1 ']' ++ break ++ cat /tmp/tmp.BX0ea2JujB ++ cat /tmp/tmp.AfFiIkpjFg ++ rm /tmp/tmp.BX0ea2JujB /tmp/tmp.AfFiIkpjFg ++ return 0 + [[ '' == \t\r\u\e ]] + wait_pod some-name-mongos-2 + local pod=some-name-mongos-2 + set +o xtrace waiting for pod/some-name-mongos-2 to be ready.OK ++ kubectl_bin get psmdb some-name -o 'jsonpath={.spec.replsets[?(@.name=="mongos")].non_voting.enabled}' +++ mktemp ++ local LAST_OUT=/tmp/tmp.XksMcCDQhU +++ mktemp ++ local LAST_ERR=/tmp/tmp.DeZwRwMfTU ++ local exit_status=0 ++ local timeout=4 +++ seq 0 2 ++ for i in '$(seq 0 2)' ++ set +e ++ kubectl get psmdb some-name -o 'jsonpath={.spec.replsets[?(@.name=="mongos")].non_voting.enabled}' ++ exit_status=0 ++ set -e ++ '[' 0 '!=' 0 -a -n 1 ']' ++ break ++ cat /tmp/tmp.XksMcCDQhU ++ cat /tmp/tmp.DeZwRwMfTU ++ rm /tmp/tmp.XksMcCDQhU /tmp/tmp.DeZwRwMfTU ++ return 0 + [[ '' == \t\r\u\e ]] + sleep 10 + [[ true == \t\r\u\e ]] + set +x Waiting for cluster readyness + desc 'create user' + set +o xtrace ----------------------------------------------------------------------------------- create user ----------------------------------------------------------------------------------- + run_mongos 'db.createUser({user:"user",pwd:"pass",roles:[{db:"app",role:"readWrite"}]})' userAdmin:userAdmin123456@some-name-mongos.data-sharded-22380 mongodb .svc.cluster.local '--tlsCertificateKeyFile /tmp/tls.pem --tlsCAFile /etc/mongodb-ssl/ca.crt --tls' + local 'command=db.createUser({user:"user",pwd:"pass",roles:[{db:"app",role:"readWrite"}]})' + local uri=userAdmin:userAdmin123456@some-name-mongos.data-sharded-22380 + local driver=mongodb + local suffix=.svc.cluster.local + local 'mongo_flag=--tlsCertificateKeyFile /tmp/tls.pem --tlsCAFile /etc/mongodb-ssl/ca.crt --tls' + local port=27017 + local mongo_bin=mongo ++ echo .svc.cluster.local ++ awk -F: '{print $2}' + suffix_port= + [[ -z '' ]] + suffix=.svc.cluster.local:27017 ++ kubectl_bin get pods --selector=name=psmdb-client -o 'jsonpath={.items[].metadata.name}' +++ mktemp ++ local LAST_OUT=/tmp/tmp.hdAgIIW28y +++ mktemp ++ local LAST_ERR=/tmp/tmp.GpJOe0kpS6 ++ local exit_status=0 ++ local timeout=4 +++ seq 0 2 ++ for i in '$(seq 0 2)' ++ set +e ++ kubectl get pods --selector=name=psmdb-client -o 'jsonpath={.items[].metadata.name}' ++ exit_status=0 ++ set -e ++ '[' 0 '!=' 0 -a -n 1 ']' ++ break ++ cat /tmp/tmp.hdAgIIW28y ++ cat /tmp/tmp.GpJOe0kpS6 ++ rm /tmp/tmp.hdAgIIW28y /tmp/tmp.GpJOe0kpS6 ++ return 0 + local client_container=psmdb-client-b9788d8bc-6ltm9 + kubectl_bin exec psmdb-client-b9788d8bc-6ltm9 -- bash -c 'printf '\''db.createUser({user:"user",pwd:"pass",roles:[{db:"app",role:"readWrite"}]})\n'\'' | mongo mongodb://userAdmin:userAdmin123456@some-name-mongos.data-sharded-22380.svc.cluster.local:27017/admin --tlsCertificateKeyFile /tmp/tls.pem --tlsCAFile /etc/mongodb-ssl/ca.crt --tls' ++ mktemp + local LAST_OUT=/tmp/tmp.Qq8DyOT7kL ++ mktemp + local LAST_ERR=/tmp/tmp.SfozoPVy35 + local exit_status=0 + local timeout=4 ++ seq 0 2 + for i in '$(seq 0 2)' + set +e + kubectl exec psmdb-client-b9788d8bc-6ltm9 -- bash -c 'printf '\''db.createUser({user:"user",pwd:"pass",roles:[{db:"app",role:"readWrite"}]})\n'\'' | mongo mongodb://userAdmin:userAdmin123456@some-name-mongos.data-sharded-22380.svc.cluster.local:27017/admin --tlsCertificateKeyFile /tmp/tls.pem --tlsCAFile /etc/mongodb-ssl/ca.crt --tls' + exit_status=0 + set -e + '[' 0 '!=' 0 -a -n 1 ']' + break + cat /tmp/tmp.Qq8DyOT7kL Percona Server for MongoDB shell version v4.4.29-28 connecting to: mongodb://some-name-mongos.data-sharded-22380.svc.cluster.local:27017/admin?compressors=disabled&gssapiServiceName=mongodb {"t":{"$date":"2025-05-21T09:56:33.634Z"},"s":"I", "c":"NETWORK", "id":5490002, "ctx":"thread1","msg":"Started a new thread for the timer service"} Implicit session: session { "id" : UUID("c89fbd8f-e6cf-48e1-a090-c9e688424f77") } Percona Server for MongoDB server version: v7.0.18-11 WARNING: shell and server versions do not match Successfully added user: { "user" : "user", "roles" : [ { "db" : "app", "role" : "readWrite" } ] } bye + cat /tmp/tmp.SfozoPVy35 + rm /tmp/tmp.Qq8DyOT7kL /tmp/tmp.SfozoPVy35 + return 0 + sleep 2 + desc 'set chunk size to 32 MB' + set +o xtrace ----------------------------------------------------------------------------------- set chunk size to 32 MB ----------------------------------------------------------------------------------- + run_mongos 'use config\n db.settings.save( { _id:"chunksize", value: 32 } )' clusterAdmin:clusterAdmin123456@some-name-mongos.data-sharded-22380 mongodb .svc.cluster.local '--tlsCertificateKeyFile /tmp/tls.pem --tlsCAFile /etc/mongodb-ssl/ca.crt --tls' + local 'command=use config\n db.settings.save( { _id:"chunksize", value: 32 } )' + local uri=clusterAdmin:clusterAdmin123456@some-name-mongos.data-sharded-22380 + local driver=mongodb + local suffix=.svc.cluster.local + local 'mongo_flag=--tlsCertificateKeyFile /tmp/tls.pem --tlsCAFile /etc/mongodb-ssl/ca.crt --tls' + local port=27017 + local mongo_bin=mongo ++ echo .svc.cluster.local ++ awk -F: '{print $2}' + suffix_port= + [[ -z '' ]] + suffix=.svc.cluster.local:27017 ++ kubectl_bin get pods --selector=name=psmdb-client -o 'jsonpath={.items[].metadata.name}' +++ mktemp ++ local LAST_OUT=/tmp/tmp.hvDQYzINNC +++ mktemp ++ local LAST_ERR=/tmp/tmp.LHKp4MjrkC ++ local exit_status=0 ++ local timeout=4 +++ seq 0 2 ++ for i in '$(seq 0 2)' ++ set +e ++ kubectl get pods --selector=name=psmdb-client -o 'jsonpath={.items[].metadata.name}' ++ exit_status=0 ++ set -e ++ '[' 0 '!=' 0 -a -n 1 ']' ++ break ++ cat /tmp/tmp.hvDQYzINNC ++ cat /tmp/tmp.LHKp4MjrkC ++ rm /tmp/tmp.hvDQYzINNC /tmp/tmp.LHKp4MjrkC ++ return 0 + local client_container=psmdb-client-b9788d8bc-6ltm9 + kubectl_bin exec psmdb-client-b9788d8bc-6ltm9 -- bash -c 'printf '\''use config\n db.settings.save( { _id:"chunksize", value: 32 } )\n'\'' | mongo mongodb://clusterAdmin:clusterAdmin123456@some-name-mongos.data-sharded-22380.svc.cluster.local:27017/admin --tlsCertificateKeyFile /tmp/tls.pem --tlsCAFile /etc/mongodb-ssl/ca.crt --tls' ++ mktemp + local LAST_OUT=/tmp/tmp.PswWQLlKLY ++ mktemp + local LAST_ERR=/tmp/tmp.afFVdR4KlP + local exit_status=0 + local timeout=4 ++ seq 0 2 + for i in '$(seq 0 2)' + set +e + kubectl exec psmdb-client-b9788d8bc-6ltm9 -- bash -c 'printf '\''use config\n db.settings.save( { _id:"chunksize", value: 32 } )\n'\'' | mongo mongodb://clusterAdmin:clusterAdmin123456@some-name-mongos.data-sharded-22380.svc.cluster.local:27017/admin --tlsCertificateKeyFile /tmp/tls.pem --tlsCAFile /etc/mongodb-ssl/ca.crt --tls' + exit_status=0 + set -e + '[' 0 '!=' 0 -a -n 1 ']' + break + cat /tmp/tmp.PswWQLlKLY Percona Server for MongoDB shell version v4.4.29-28 connecting to: mongodb://some-name-mongos.data-sharded-22380.svc.cluster.local:27017/admin?compressors=disabled&gssapiServiceName=mongodb {"t":{"$date":"2025-05-21T09:56:37.563Z"},"s":"I", "c":"NETWORK", "id":5490002, "ctx":"thread1","msg":"Started a new thread for the timer service"} Implicit session: session { "id" : UUID("ee3a5827-b58d-460a-b8ce-b6da948377c9") } Percona Server for MongoDB server version: v7.0.18-11 WARNING: shell and server versions do not match switched to db config WriteResult({ "nMatched" : 0, "nUpserted" : 1, "nModified" : 0, "_id" : "chunksize" }) bye + cat /tmp/tmp.afFVdR4KlP + rm /tmp/tmp.PswWQLlKLY /tmp/tmp.afFVdR4KlP + return 0 + sleep 2 + desc 'write data' + set +o xtrace ----------------------------------------------------------------------------------- write data ----------------------------------------------------------------------------------- + run_script_mongos /mnt/jenkins/workspace/cloud-psmdb-operator_PR-1915/e2e-tests/data-sharded/data.js user:pass@some-name-mongos.data-sharded-22380 mongodb .svc.cluster.local '--tlsCertificateKeyFile /tmp/tls.pem --tlsCAFile /etc/mongodb-ssl/ca.crt --tls' + local script=/mnt/jenkins/workspace/cloud-psmdb-operator_PR-1915/e2e-tests/data-sharded/data.js + local uri=user:pass@some-name-mongos.data-sharded-22380 + local driver=mongodb + local suffix=.svc.cluster.local + local 'mongo_flag=--tlsCertificateKeyFile /tmp/tls.pem --tlsCAFile /etc/mongodb-ssl/ca.crt --tls' + local mongo_bin=mongo ++ kubectl_bin get pods --selector=name=psmdb-client -o 'jsonpath={.items[].metadata.name}' +++ mktemp ++ local LAST_OUT=/tmp/tmp.rXm1gZ7YNu +++ mktemp ++ local LAST_ERR=/tmp/tmp.25bktu5zys ++ local exit_status=0 ++ local timeout=4 +++ seq 0 2 ++ for i in '$(seq 0 2)' ++ set +e ++ kubectl get pods --selector=name=psmdb-client -o 'jsonpath={.items[].metadata.name}' ++ exit_status=0 ++ set -e ++ '[' 0 '!=' 0 -a -n 1 ']' ++ break ++ cat /tmp/tmp.rXm1gZ7YNu ++ cat /tmp/tmp.25bktu5zys ++ rm /tmp/tmp.rXm1gZ7YNu /tmp/tmp.25bktu5zys ++ return 0 + local client_container=psmdb-client-b9788d8bc-6ltm9 ++ basename /mnt/jenkins/workspace/cloud-psmdb-operator_PR-1915/e2e-tests/data-sharded/data.js + name=data.js + kubectl_bin cp /mnt/jenkins/workspace/cloud-psmdb-operator_PR-1915/e2e-tests/data-sharded/data.js data-sharded-22380/psmdb-client-b9788d8bc-6ltm9:/tmp ++ mktemp + local LAST_OUT=/tmp/tmp.2q60Ue7vJh ++ mktemp + local LAST_ERR=/tmp/tmp.9JYjwVvXnj + local exit_status=0 + local timeout=4 ++ seq 0 2 + for i in '$(seq 0 2)' + set +e + kubectl cp /mnt/jenkins/workspace/cloud-psmdb-operator_PR-1915/e2e-tests/data-sharded/data.js data-sharded-22380/psmdb-client-b9788d8bc-6ltm9:/tmp + exit_status=0 + set -e + '[' 0 '!=' 0 -a -n 1 ']' + break + cat /tmp/tmp.2q60Ue7vJh + cat /tmp/tmp.9JYjwVvXnj + rm /tmp/tmp.2q60Ue7vJh /tmp/tmp.9JYjwVvXnj + return 0 + kubectl_bin exec psmdb-client-b9788d8bc-6ltm9 -- bash -c 'mongo mongodb://user:pass@some-name-mongos.data-sharded-22380.svc.cluster.local/admin --tlsCertificateKeyFile /tmp/tls.pem --tlsCAFile /etc/mongodb-ssl/ca.crt --tls /tmp/data.js' ++ mktemp + local LAST_OUT=/tmp/tmp.5ziNvPeE4T ++ mktemp + local LAST_ERR=/tmp/tmp.J4kBWKDl6U + local exit_status=0 + local timeout=4 ++ seq 0 2 + for i in '$(seq 0 2)' + set +e + kubectl exec psmdb-client-b9788d8bc-6ltm9 -- bash -c 'mongo mongodb://user:pass@some-name-mongos.data-sharded-22380.svc.cluster.local/admin --tlsCertificateKeyFile /tmp/tls.pem --tlsCAFile /etc/mongodb-ssl/ca.crt --tls /tmp/data.js' + exit_status=0 + set -e + '[' 0 '!=' 0 -a -n 1 ']' + break + cat /tmp/tmp.5ziNvPeE4T Percona Server for MongoDB shell version v4.4.29-28 connecting to: mongodb://some-name-mongos.data-sharded-22380.svc.cluster.local:27017/admin?compressors=disabled&gssapiServiceName=mongodb {"t":{"$date":"2025-05-21T09:56:43.658Z"},"s":"I", "c":"NETWORK", "id":5490002, "ctx":"thread1","msg":"Started a new thread for the timer service"} Implicit session: session { "id" : UUID("d9bbf19f-a986-4987-b25a-0b05868aa0e1") } Percona Server for MongoDB server version: v7.0.18-11 WARNING: shell and server versions do not match + cat /tmp/tmp.J4kBWKDl6U + rm /tmp/tmp.5ziNvPeE4T /tmp/tmp.J4kBWKDl6U + return 0 + desc 'shard collection' + set +o xtrace ----------------------------------------------------------------------------------- shard collection ----------------------------------------------------------------------------------- + run_mongos 'sh.enableSharding("app")' clusterAdmin:clusterAdmin123456@some-name-mongos.data-sharded-22380 mongodb .svc.cluster.local '--tlsCertificateKeyFile /tmp/tls.pem --tlsCAFile /etc/mongodb-ssl/ca.crt --tls' + local 'command=sh.enableSharding("app")' + local uri=clusterAdmin:clusterAdmin123456@some-name-mongos.data-sharded-22380 + local driver=mongodb + local suffix=.svc.cluster.local + local 'mongo_flag=--tlsCertificateKeyFile /tmp/tls.pem --tlsCAFile /etc/mongodb-ssl/ca.crt --tls' + local port=27017 + local mongo_bin=mongo ++ echo .svc.cluster.local ++ awk -F: '{print $2}' + suffix_port= + [[ -z '' ]] + suffix=.svc.cluster.local:27017 ++ kubectl_bin get pods --selector=name=psmdb-client -o 'jsonpath={.items[].metadata.name}' +++ mktemp ++ local LAST_OUT=/tmp/tmp.VSmr7FY2Hg +++ mktemp ++ local LAST_ERR=/tmp/tmp.03F2UT9fwS ++ local exit_status=0 ++ local timeout=4 +++ seq 0 2 ++ for i in '$(seq 0 2)' ++ set +e ++ kubectl get pods --selector=name=psmdb-client -o 'jsonpath={.items[].metadata.name}' ++ exit_status=0 ++ set -e ++ '[' 0 '!=' 0 -a -n 1 ']' ++ break ++ cat /tmp/tmp.VSmr7FY2Hg ++ cat /tmp/tmp.03F2UT9fwS ++ rm /tmp/tmp.VSmr7FY2Hg /tmp/tmp.03F2UT9fwS ++ return 0 + local client_container=psmdb-client-b9788d8bc-6ltm9 + kubectl_bin exec psmdb-client-b9788d8bc-6ltm9 -- bash -c 'printf '\''sh.enableSharding("app")\n'\'' | mongo mongodb://clusterAdmin:clusterAdmin123456@some-name-mongos.data-sharded-22380.svc.cluster.local:27017/admin --tlsCertificateKeyFile /tmp/tls.pem --tlsCAFile /etc/mongodb-ssl/ca.crt --tls' ++ mktemp + local LAST_OUT=/tmp/tmp.a72idb2A7T ++ mktemp + local LAST_ERR=/tmp/tmp.ixZp3dWQj6 + local exit_status=0 + local timeout=4 ++ seq 0 2 + for i in '$(seq 0 2)' + set +e + kubectl exec psmdb-client-b9788d8bc-6ltm9 -- bash -c 'printf '\''sh.enableSharding("app")\n'\'' | mongo mongodb://clusterAdmin:clusterAdmin123456@some-name-mongos.data-sharded-22380.svc.cluster.local:27017/admin --tlsCertificateKeyFile /tmp/tls.pem --tlsCAFile /etc/mongodb-ssl/ca.crt --tls' + exit_status=0 + set -e + '[' 0 '!=' 0 -a -n 1 ']' + break + cat /tmp/tmp.a72idb2A7T Percona Server for MongoDB shell version v4.4.29-28 connecting to: mongodb://some-name-mongos.data-sharded-22380.svc.cluster.local:27017/admin?compressors=disabled&gssapiServiceName=mongodb {"t":{"$date":"2025-05-21T10:02:08.557Z"},"s":"I", "c":"NETWORK", "id":5490002, "ctx":"thread1","msg":"Started a new thread for the timer service"} Implicit session: session { "id" : UUID("bff5ae65-3351-4c34-9f0a-33894b02bdbf") } Percona Server for MongoDB server version: v7.0.18-11 WARNING: shell and server versions do not match { "ok" : 1, "$clusterTime" : { "clusterTime" : Timestamp(1747821728, 1), "signature" : { "hash" : BinData(0,"3BAwC4tbz4i6cPBOe9Y3REfuWwI="), "keyId" : NumberLong("7506834961974951959") } }, "operationTime" : Timestamp(1747821728, 1) } bye + cat /tmp/tmp.ixZp3dWQj6 + rm /tmp/tmp.a72idb2A7T /tmp/tmp.ixZp3dWQj6 + return 0 + sleep 2 + run_mongos 'sh.shardCollection("app.city", { _id: 1 } )' clusterAdmin:clusterAdmin123456@some-name-mongos.data-sharded-22380 mongodb .svc.cluster.local '--tlsCertificateKeyFile /tmp/tls.pem --tlsCAFile /etc/mongodb-ssl/ca.crt --tls' + local 'command=sh.shardCollection("app.city", { _id: 1 } )' + local uri=clusterAdmin:clusterAdmin123456@some-name-mongos.data-sharded-22380 + local driver=mongodb + local suffix=.svc.cluster.local + local 'mongo_flag=--tlsCertificateKeyFile /tmp/tls.pem --tlsCAFile /etc/mongodb-ssl/ca.crt --tls' + local port=27017 + local mongo_bin=mongo ++ echo .svc.cluster.local ++ awk -F: '{print $2}' + suffix_port= + [[ -z '' ]] + suffix=.svc.cluster.local:27017 ++ kubectl_bin get pods --selector=name=psmdb-client -o 'jsonpath={.items[].metadata.name}' +++ mktemp ++ local LAST_OUT=/tmp/tmp.d2A93Cgb8x +++ mktemp ++ local LAST_ERR=/tmp/tmp.i8rAYviqlV ++ local exit_status=0 ++ local timeout=4 +++ seq 0 2 ++ for i in '$(seq 0 2)' ++ set +e ++ kubectl get pods --selector=name=psmdb-client -o 'jsonpath={.items[].metadata.name}' ++ exit_status=0 ++ set -e ++ '[' 0 '!=' 0 -a -n 1 ']' ++ break ++ cat /tmp/tmp.d2A93Cgb8x ++ cat /tmp/tmp.i8rAYviqlV ++ rm /tmp/tmp.d2A93Cgb8x /tmp/tmp.i8rAYviqlV ++ return 0 + local client_container=psmdb-client-b9788d8bc-6ltm9 + kubectl_bin exec psmdb-client-b9788d8bc-6ltm9 -- bash -c 'printf '\''sh.shardCollection("app.city", { _id: 1 } )\n'\'' | mongo mongodb://clusterAdmin:clusterAdmin123456@some-name-mongos.data-sharded-22380.svc.cluster.local:27017/admin --tlsCertificateKeyFile /tmp/tls.pem --tlsCAFile /etc/mongodb-ssl/ca.crt --tls' ++ mktemp + local LAST_OUT=/tmp/tmp.vJ3JXsUV4M ++ mktemp + local LAST_ERR=/tmp/tmp.4SLiAuNJKj + local exit_status=0 + local timeout=4 ++ seq 0 2 + for i in '$(seq 0 2)' + set +e + kubectl exec psmdb-client-b9788d8bc-6ltm9 -- bash -c 'printf '\''sh.shardCollection("app.city", { _id: 1 } )\n'\'' | mongo mongodb://clusterAdmin:clusterAdmin123456@some-name-mongos.data-sharded-22380.svc.cluster.local:27017/admin --tlsCertificateKeyFile /tmp/tls.pem --tlsCAFile /etc/mongodb-ssl/ca.crt --tls' + exit_status=0 + set -e + '[' 0 '!=' 0 -a -n 1 ']' + break + cat /tmp/tmp.vJ3JXsUV4M Percona Server for MongoDB shell version v4.4.29-28 connecting to: mongodb://some-name-mongos.data-sharded-22380.svc.cluster.local:27017/admin?compressors=disabled&gssapiServiceName=mongodb {"t":{"$date":"2025-05-21T10:02:13.360Z"},"s":"I", "c":"NETWORK", "id":5490002, "ctx":"thread1","msg":"Started a new thread for the timer service"} Implicit session: session { "id" : UUID("098688c7-fcb0-4ba3-b69c-1a895c12b603") } Percona Server for MongoDB server version: v7.0.18-11 WARNING: shell and server versions do not match { "collectionsharded" : "app.city", "ok" : 1, "$clusterTime" : { "clusterTime" : Timestamp(1747821733, 35), "signature" : { "hash" : BinData(0,"noBzlxIDMDm6Jp6reprSeKFp1HI="), "keyId" : NumberLong("7506834961974951959") } }, "operationTime" : Timestamp(1747821733, 35) } bye + cat /tmp/tmp.4SLiAuNJKj + rm /tmp/tmp.vJ3JXsUV4M /tmp/tmp.4SLiAuNJKj + return 0 + sleep 120 + desc 'check chunks' + set +o xtrace ----------------------------------------------------------------------------------- check chunks ----------------------------------------------------------------------------------- + chunks_param1=ns + chunks_param2='"app.city"' + [[ 7.0 != \4\.\4 ]] + chunks_param1=uuid ++ run_mongos 'use app\n db.getCollectionInfos({ "name": "city" })[0].info.uuid' user:pass@some-name-mongos.data-sharded-22380 '' '' '--tlsCertificateKeyFile /tmp/tls.pem --tlsCAFile /etc/mongodb-ssl/ca.crt --tls' ++ grep 'switched to db app' -A 1 ++ local 'command=use app\n db.getCollectionInfos({ "name": "city" })[0].info.uuid' ++ local uri=user:pass@some-name-mongos.data-sharded-22380 ++ grep -v 'switched to db app' ++ local driver=mongodb ++ local suffix=.svc.cluster.local ++ local 'mongo_flag=--tlsCertificateKeyFile /tmp/tls.pem --tlsCAFile /etc/mongodb-ssl/ca.crt --tls' ++ local port=27017 ++ local mongo_bin=mongo +++ echo .svc.cluster.local +++ awk -F: '{print $2}' ++ suffix_port= ++ [[ -z '' ]] ++ suffix=.svc.cluster.local:27017 +++ kubectl_bin get pods --selector=name=psmdb-client -o 'jsonpath={.items[].metadata.name}' ++++ mktemp +++ local LAST_OUT=/tmp/tmp.eDYuGRCEIx ++++ mktemp +++ local LAST_ERR=/tmp/tmp.xXpXjjG4se +++ local exit_status=0 +++ local timeout=4 ++++ seq 0 2 +++ for i in '$(seq 0 2)' +++ set +e +++ kubectl get pods --selector=name=psmdb-client -o 'jsonpath={.items[].metadata.name}' +++ exit_status=0 +++ set -e +++ '[' 0 '!=' 0 -a -n 1 ']' +++ break +++ cat /tmp/tmp.eDYuGRCEIx +++ cat /tmp/tmp.xXpXjjG4se +++ rm /tmp/tmp.eDYuGRCEIx /tmp/tmp.xXpXjjG4se +++ return 0 ++ local client_container=psmdb-client-b9788d8bc-6ltm9 ++ kubectl_bin exec psmdb-client-b9788d8bc-6ltm9 -- bash -c 'printf '\''use app\n db.getCollectionInfos({ "name": "city" })[0].info.uuid\n'\'' | mongo mongodb://user:pass@some-name-mongos.data-sharded-22380.svc.cluster.local:27017/admin --tlsCertificateKeyFile /tmp/tls.pem --tlsCAFile /etc/mongodb-ssl/ca.crt --tls' +++ mktemp ++ local LAST_OUT=/tmp/tmp.XMZFYw11pD +++ mktemp ++ local LAST_ERR=/tmp/tmp.7d4T4FbRzO ++ local exit_status=0 ++ local timeout=4 +++ seq 0 2 ++ for i in '$(seq 0 2)' ++ set +e ++ kubectl exec psmdb-client-b9788d8bc-6ltm9 -- bash -c 'printf '\''use app\n db.getCollectionInfos({ "name": "city" })[0].info.uuid\n'\'' | mongo mongodb://user:pass@some-name-mongos.data-sharded-22380.svc.cluster.local:27017/admin --tlsCertificateKeyFile /tmp/tls.pem --tlsCAFile /etc/mongodb-ssl/ca.crt --tls' ++ exit_status=0 ++ set -e ++ '[' 0 '!=' 0 -a -n 1 ']' ++ break ++ cat /tmp/tmp.XMZFYw11pD ++ cat /tmp/tmp.7d4T4FbRzO ++ rm /tmp/tmp.XMZFYw11pD /tmp/tmp.7d4T4FbRzO ++ return 0 + chunks_param2='UUID("a74a0b8e-d357-42b1-a6ee-e70bb641bf6a")' + shards=0 + for i in '"rs0"' '"rs1"' '"rs2"' ++ run_mongos 'use config\n db.chunks.count({"uuid": UUID("a74a0b8e-d357-42b1-a6ee-e70bb641bf6a"), "shard": "rs0"})' clusterAdmin:clusterAdmin123456@some-name-mongos.data-sharded-22380 mongodb .svc.cluster.local '--tlsCertificateKeyFile /tmp/tls.pem --tlsCAFile /etc/mongodb-ssl/ca.crt --tls' ++ grep 'switched to db config' -A 1 ++ local 'command=use config\n db.chunks.count({"uuid": UUID("a74a0b8e-d357-42b1-a6ee-e70bb641bf6a"), "shard": "rs0"})' ++ local uri=clusterAdmin:clusterAdmin123456@some-name-mongos.data-sharded-22380 ++ local driver=mongodb ++ local suffix=.svc.cluster.local ++ local 'mongo_flag=--tlsCertificateKeyFile /tmp/tls.pem --tlsCAFile /etc/mongodb-ssl/ca.crt --tls' ++ grep -v 'switched to db config' ++ local port=27017 ++ local mongo_bin=mongo +++ echo .svc.cluster.local +++ awk -F: '{print $2}' ++ suffix_port= ++ [[ -z '' ]] ++ suffix=.svc.cluster.local:27017 +++ kubectl_bin get pods --selector=name=psmdb-client -o 'jsonpath={.items[].metadata.name}' ++++ mktemp +++ local LAST_OUT=/tmp/tmp.3kWV9rDfPw ++++ mktemp +++ local LAST_ERR=/tmp/tmp.KVOG8wyV14 +++ local exit_status=0 +++ local timeout=4 ++++ seq 0 2 +++ for i in '$(seq 0 2)' +++ set +e +++ kubectl get pods --selector=name=psmdb-client -o 'jsonpath={.items[].metadata.name}' +++ exit_status=0 +++ set -e +++ '[' 0 '!=' 0 -a -n 1 ']' +++ break +++ cat /tmp/tmp.3kWV9rDfPw +++ cat /tmp/tmp.KVOG8wyV14 +++ rm /tmp/tmp.3kWV9rDfPw /tmp/tmp.KVOG8wyV14 +++ return 0 ++ local client_container=psmdb-client-b9788d8bc-6ltm9 ++ kubectl_bin exec psmdb-client-b9788d8bc-6ltm9 -- bash -c 'printf '\''use config\n db.chunks.count({"uuid": UUID("a74a0b8e-d357-42b1-a6ee-e70bb641bf6a"), "shard": "rs0"})\n'\'' | mongo mongodb://clusterAdmin:clusterAdmin123456@some-name-mongos.data-sharded-22380.svc.cluster.local:27017/admin --tlsCertificateKeyFile /tmp/tls.pem --tlsCAFile /etc/mongodb-ssl/ca.crt --tls' +++ mktemp ++ local LAST_OUT=/tmp/tmp.1PawZCg1rt +++ mktemp ++ local LAST_ERR=/tmp/tmp.AHMLQ0bbS0 ++ local exit_status=0 ++ local timeout=4 +++ seq 0 2 ++ for i in '$(seq 0 2)' ++ set +e ++ kubectl exec psmdb-client-b9788d8bc-6ltm9 -- bash -c 'printf '\''use config\n db.chunks.count({"uuid": UUID("a74a0b8e-d357-42b1-a6ee-e70bb641bf6a"), "shard": "rs0"})\n'\'' | mongo mongodb://clusterAdmin:clusterAdmin123456@some-name-mongos.data-sharded-22380.svc.cluster.local:27017/admin --tlsCertificateKeyFile /tmp/tls.pem --tlsCAFile /etc/mongodb-ssl/ca.crt --tls' ++ exit_status=0 ++ set -e ++ '[' 0 '!=' 0 -a -n 1 ']' ++ break ++ cat /tmp/tmp.1PawZCg1rt ++ cat /tmp/tmp.AHMLQ0bbS0 ++ rm /tmp/tmp.1PawZCg1rt /tmp/tmp.AHMLQ0bbS0 ++ return 0 + out=1 + desc 'rs0 has 1 chunks' + set +o xtrace ----------------------------------------------------------------------------------- rs0 has 1 chunks ----------------------------------------------------------------------------------- + [[ 1 -ne 0 ]] + (( shards = shards + 1 )) + for i in '"rs0"' '"rs1"' '"rs2"' ++ run_mongos 'use config\n db.chunks.count({"uuid": UUID("a74a0b8e-d357-42b1-a6ee-e70bb641bf6a"), "shard": "rs1"})' clusterAdmin:clusterAdmin123456@some-name-mongos.data-sharded-22380 mongodb .svc.cluster.local '--tlsCertificateKeyFile /tmp/tls.pem --tlsCAFile /etc/mongodb-ssl/ca.crt --tls' ++ local 'command=use config\n db.chunks.count({"uuid": UUID("a74a0b8e-d357-42b1-a6ee-e70bb641bf6a"), "shard": "rs1"})' ++ local uri=clusterAdmin:clusterAdmin123456@some-name-mongos.data-sharded-22380 ++ local driver=mongodb ++ local suffix=.svc.cluster.local ++ local 'mongo_flag=--tlsCertificateKeyFile /tmp/tls.pem --tlsCAFile /etc/mongodb-ssl/ca.crt --tls' ++ local port=27017 ++ local mongo_bin=mongo ++ grep -v 'switched to db config' ++ grep 'switched to db config' -A 1 +++ echo .svc.cluster.local +++ awk -F: '{print $2}' ++ suffix_port= ++ [[ -z '' ]] ++ suffix=.svc.cluster.local:27017 +++ kubectl_bin get pods --selector=name=psmdb-client -o 'jsonpath={.items[].metadata.name}' ++++ mktemp +++ local LAST_OUT=/tmp/tmp.sQnJigGgQ2 ++++ mktemp +++ local LAST_ERR=/tmp/tmp.hYrl5ZygT7 +++ local exit_status=0 +++ local timeout=4 ++++ seq 0 2 +++ for i in '$(seq 0 2)' +++ set +e +++ kubectl get pods --selector=name=psmdb-client -o 'jsonpath={.items[].metadata.name}' +++ exit_status=0 +++ set -e +++ '[' 0 '!=' 0 -a -n 1 ']' +++ break +++ cat /tmp/tmp.sQnJigGgQ2 +++ cat /tmp/tmp.hYrl5ZygT7 +++ rm /tmp/tmp.sQnJigGgQ2 /tmp/tmp.hYrl5ZygT7 +++ return 0 ++ local client_container=psmdb-client-b9788d8bc-6ltm9 ++ kubectl_bin exec psmdb-client-b9788d8bc-6ltm9 -- bash -c 'printf '\''use config\n db.chunks.count({"uuid": UUID("a74a0b8e-d357-42b1-a6ee-e70bb641bf6a"), "shard": "rs1"})\n'\'' | mongo mongodb://clusterAdmin:clusterAdmin123456@some-name-mongos.data-sharded-22380.svc.cluster.local:27017/admin --tlsCertificateKeyFile /tmp/tls.pem --tlsCAFile /etc/mongodb-ssl/ca.crt --tls' +++ mktemp ++ local LAST_OUT=/tmp/tmp.WJyAOC2AYB +++ mktemp ++ local LAST_ERR=/tmp/tmp.uitzDLKfPX ++ local exit_status=0 ++ local timeout=4 +++ seq 0 2 ++ for i in '$(seq 0 2)' ++ set +e ++ kubectl exec psmdb-client-b9788d8bc-6ltm9 -- bash -c 'printf '\''use config\n db.chunks.count({"uuid": UUID("a74a0b8e-d357-42b1-a6ee-e70bb641bf6a"), "shard": "rs1"})\n'\'' | mongo mongodb://clusterAdmin:clusterAdmin123456@some-name-mongos.data-sharded-22380.svc.cluster.local:27017/admin --tlsCertificateKeyFile /tmp/tls.pem --tlsCAFile /etc/mongodb-ssl/ca.crt --tls' ++ exit_status=0 ++ set -e ++ '[' 0 '!=' 0 -a -n 1 ']' ++ break ++ cat /tmp/tmp.WJyAOC2AYB ++ cat /tmp/tmp.uitzDLKfPX ++ rm /tmp/tmp.WJyAOC2AYB /tmp/tmp.uitzDLKfPX ++ return 0 + out=1 + desc 'rs1 has 1 chunks' + set +o xtrace ----------------------------------------------------------------------------------- rs1 has 1 chunks ----------------------------------------------------------------------------------- + [[ 1 -ne 0 ]] + (( shards = shards + 1 )) + for i in '"rs0"' '"rs1"' '"rs2"' ++ run_mongos 'use config\n db.chunks.count({"uuid": UUID("a74a0b8e-d357-42b1-a6ee-e70bb641bf6a"), "shard": "rs2"})' clusterAdmin:clusterAdmin123456@some-name-mongos.data-sharded-22380 mongodb .svc.cluster.local '--tlsCertificateKeyFile /tmp/tls.pem --tlsCAFile /etc/mongodb-ssl/ca.crt --tls' ++ local 'command=use config\n db.chunks.count({"uuid": UUID("a74a0b8e-d357-42b1-a6ee-e70bb641bf6a"), "shard": "rs2"})' ++ grep 'switched to db config' -A 1 ++ local uri=clusterAdmin:clusterAdmin123456@some-name-mongos.data-sharded-22380 ++ local driver=mongodb ++ local suffix=.svc.cluster.local ++ local 'mongo_flag=--tlsCertificateKeyFile /tmp/tls.pem --tlsCAFile /etc/mongodb-ssl/ca.crt --tls' ++ local port=27017 ++ local mongo_bin=mongo ++ grep -v 'switched to db config' +++ echo .svc.cluster.local +++ awk -F: '{print $2}' ++ suffix_port= ++ [[ -z '' ]] ++ suffix=.svc.cluster.local:27017 +++ kubectl_bin get pods --selector=name=psmdb-client -o 'jsonpath={.items[].metadata.name}' ++++ mktemp +++ local LAST_OUT=/tmp/tmp.66DZekGyh0 ++++ mktemp +++ local LAST_ERR=/tmp/tmp.XY4RvmNMWl +++ local exit_status=0 +++ local timeout=4 ++++ seq 0 2 +++ for i in '$(seq 0 2)' +++ set +e +++ kubectl get pods --selector=name=psmdb-client -o 'jsonpath={.items[].metadata.name}' +++ exit_status=0 +++ set -e +++ '[' 0 '!=' 0 -a -n 1 ']' +++ break +++ cat /tmp/tmp.66DZekGyh0 +++ cat /tmp/tmp.XY4RvmNMWl +++ rm /tmp/tmp.66DZekGyh0 /tmp/tmp.XY4RvmNMWl +++ return 0 ++ local client_container=psmdb-client-b9788d8bc-6ltm9 ++ kubectl_bin exec psmdb-client-b9788d8bc-6ltm9 -- bash -c 'printf '\''use config\n db.chunks.count({"uuid": UUID("a74a0b8e-d357-42b1-a6ee-e70bb641bf6a"), "shard": "rs2"})\n'\'' | mongo mongodb://clusterAdmin:clusterAdmin123456@some-name-mongos.data-sharded-22380.svc.cluster.local:27017/admin --tlsCertificateKeyFile /tmp/tls.pem --tlsCAFile /etc/mongodb-ssl/ca.crt --tls' +++ mktemp ++ local LAST_OUT=/tmp/tmp.vdrCe5tnk2 +++ mktemp ++ local LAST_ERR=/tmp/tmp.odqaUTI5AV ++ local exit_status=0 ++ local timeout=4 +++ seq 0 2 ++ for i in '$(seq 0 2)' ++ set +e ++ kubectl exec psmdb-client-b9788d8bc-6ltm9 -- bash -c 'printf '\''use config\n db.chunks.count({"uuid": UUID("a74a0b8e-d357-42b1-a6ee-e70bb641bf6a"), "shard": "rs2"})\n'\'' | mongo mongodb://clusterAdmin:clusterAdmin123456@some-name-mongos.data-sharded-22380.svc.cluster.local:27017/admin --tlsCertificateKeyFile /tmp/tls.pem --tlsCAFile /etc/mongodb-ssl/ca.crt --tls' ++ exit_status=0 ++ set -e ++ '[' 0 '!=' 0 -a -n 1 ']' ++ break ++ cat /tmp/tmp.vdrCe5tnk2 ++ cat /tmp/tmp.odqaUTI5AV ++ rm /tmp/tmp.vdrCe5tnk2 /tmp/tmp.odqaUTI5AV ++ return 0 + out=1 + desc 'rs2 has 1 chunks' + set +o xtrace ----------------------------------------------------------------------------------- rs2 has 1 chunks ----------------------------------------------------------------------------------- + [[ 1 -ne 0 ]] + (( shards = shards + 1 )) + [[ 3 -lt 3 ]] ++ run_mongos 'use app\n db.dropDatabase()' clusterAdmin:clusterAdmin123456@some-name-mongos.data-sharded-22380 mongodb .svc.cluster.local '--tlsCertificateKeyFile /tmp/tls.pem --tlsCAFile /etc/mongodb-ssl/ca.crt --tls' ++ local 'command=use app\n db.dropDatabase()' ++ local uri=clusterAdmin:clusterAdmin123456@some-name-mongos.data-sharded-22380 ++ local driver=mongodb ++ local suffix=.svc.cluster.local ++ local 'mongo_flag=--tlsCertificateKeyFile /tmp/tls.pem --tlsCAFile /etc/mongodb-ssl/ca.crt --tls' ++ local port=27017 ++ local mongo_bin=mongo +++ echo .svc.cluster.local +++ awk -F: '{print $2}' ++ suffix_port= ++ [[ -z '' ]] ++ suffix=.svc.cluster.local:27017 +++ kubectl_bin get pods --selector=name=psmdb-client -o 'jsonpath={.items[].metadata.name}' ++++ mktemp +++ local LAST_OUT=/tmp/tmp.fUUp6P6agh ++++ mktemp +++ local LAST_ERR=/tmp/tmp.215awQ15w8 +++ local exit_status=0 +++ local timeout=4 ++++ seq 0 2 +++ for i in '$(seq 0 2)' +++ set +e +++ kubectl get pods --selector=name=psmdb-client -o 'jsonpath={.items[].metadata.name}' +++ exit_status=0 +++ set -e +++ '[' 0 '!=' 0 -a -n 1 ']' +++ break +++ cat /tmp/tmp.fUUp6P6agh +++ cat /tmp/tmp.215awQ15w8 +++ rm /tmp/tmp.fUUp6P6agh /tmp/tmp.215awQ15w8 +++ return 0 ++ local client_container=psmdb-client-b9788d8bc-6ltm9 ++ kubectl_bin exec psmdb-client-b9788d8bc-6ltm9 -- bash -c 'printf '\''use app\n db.dropDatabase()\n'\'' | mongo mongodb://clusterAdmin:clusterAdmin123456@some-name-mongos.data-sharded-22380.svc.cluster.local:27017/admin --tlsCertificateKeyFile /tmp/tls.pem --tlsCAFile /etc/mongodb-ssl/ca.crt --tls' +++ mktemp ++ local LAST_OUT=/tmp/tmp.JSiuPIqYF0 +++ mktemp ++ local LAST_ERR=/tmp/tmp.tj80ZcFBI2 ++ local exit_status=0 ++ local timeout=4 +++ seq 0 2 ++ for i in '$(seq 0 2)' ++ set +e ++ kubectl exec psmdb-client-b9788d8bc-6ltm9 -- bash -c 'printf '\''use app\n db.dropDatabase()\n'\'' | mongo mongodb://clusterAdmin:clusterAdmin123456@some-name-mongos.data-sharded-22380.svc.cluster.local:27017/admin --tlsCertificateKeyFile /tmp/tls.pem --tlsCAFile /etc/mongodb-ssl/ca.crt --tls' ++ exit_status=0 ++ set -e ++ '[' 0 '!=' 0 -a -n 1 ']' ++ break ++ cat /tmp/tmp.JSiuPIqYF0 ++ cat /tmp/tmp.tj80ZcFBI2 ++ rm /tmp/tmp.JSiuPIqYF0 /tmp/tmp.tj80ZcFBI2 ++ return 0 + res='Percona Server for MongoDB shell version v4.4.29-28 connecting to: mongodb://some-name-mongos.data-sharded-22380.svc.cluster.local:27017/admin?compressors=disabled&gssapiServiceName=mongodb {"t":{"$date":"2025-05-21T10:04:25.423Z"},"s":"I", "c":"NETWORK", "id":5490002, "ctx":"thread1","msg":"Started a new thread for the timer service"} Implicit session: session { "id" : UUID("449e9bfc-2380-4493-a6e2-e81d16569d90") } Percona Server for MongoDB server version: v7.0.18-11 WARNING: shell and server versions do not match switched to db app { "ok" : 1, "$clusterTime" : { "clusterTime" : Timestamp(1747821865, 59), "signature" : { "hash" : BinData(0,"jrIkDYHNS0MefiN5V/zX/3Dxjec="), "keyId" : NumberLong("7506834961974951959") } }, "operationTime" : Timestamp(1747821865, 59) } bye' + grep -q '"ok" : 1' + echo Percona Server for MongoDB shell version v4.4.29-28 connecting to: 'mongodb://some-name-mongos.data-sharded-22380.svc.cluster.local:27017/admin?compressors=disabled&gssapiServiceName=mongodb' '{"t":{"$date":"2025-05-21T10:04:25.423Z"},"s":"I",' '"c":"NETWORK",' '"id":5490002,' '"ctx":"thread1","msg":"Started' a new thread for the timer 'service"}' Implicit session: session '{' '"id"' : 'UUID("449e9bfc-2380-4493-a6e2-e81d16569d90")' '}' Percona Server for MongoDB server version: v7.0.18-11 WARNING: shell and server versions do not match switched to db app '{' '"ok"' : 1, '"$clusterTime"' : '{' '"clusterTime"' : 'Timestamp(1747821865,' '59),' '"signature"' : '{' '"hash"' : 'BinData(0,"jrIkDYHNS0MefiN5V/zX/3Dxjec="),' '"keyId"' : 'NumberLong("7506834961974951959")' '}' '},' '"operationTime"' : 'Timestamp(1747821865,' '59)' '}' bye + desc 'check if rs1 and all its related stateful sets are properly removed' + set +o xtrace ----------------------------------------------------------------------------------- check if rs1 and all its related stateful sets are properly removed ----------------------------------------------------------------------------------- + check_rs_proper_component_deletion some-name rs1 + local cluster=some-name + local rs_name=rs1 ++ kubectl_bin get psmdb some-name -ojson +++ mktemp ++ local LAST_OUT=/tmp/tmp.DVwgZwU9E4 +++ mktemp ++ local LAST_ERR=/tmp/tmp.9zcBWztHkP ++ local exit_status=0 ++ local timeout=4 ++ jq --arg RS rs1 '.spec.replsets | map(.name == $RS) | index(true)' +++ seq 0 2 ++ for i in '$(seq 0 2)' ++ set +e ++ kubectl get psmdb some-name -ojson ++ exit_status=0 ++ set -e ++ '[' 0 '!=' 0 -a -n 1 ']' ++ break ++ cat /tmp/tmp.DVwgZwU9E4 ++ cat /tmp/tmp.9zcBWztHkP ++ rm /tmp/tmp.DVwgZwU9E4 /tmp/tmp.9zcBWztHkP ++ return 0 + rs_idx=1 + kubectl_bin patch psmdb some-name --type=json '-p=[{'\''op'\'': '\''remove'\'', '\''path'\'': '\''/spec/replsets/1'\''}]' ++ mktemp + local LAST_OUT=/tmp/tmp.LFN4VBhxO9 ++ mktemp + local LAST_ERR=/tmp/tmp.z55wxWx0g8 + local exit_status=0 + local timeout=4 ++ seq 0 2 + for i in '$(seq 0 2)' + set +e + kubectl patch psmdb some-name --type=json '-p=[{'\''op'\'': '\''remove'\'', '\''path'\'': '\''/spec/replsets/1'\''}]' + exit_status=0 + set -e + '[' 0 '!=' 0 -a -n 1 ']' + break + cat /tmp/tmp.LFN4VBhxO9 perconaservermongodb.psmdb.percona.com/some-name patched + cat /tmp/tmp.z55wxWx0g8 + rm /tmp/tmp.LFN4VBhxO9 /tmp/tmp.z55wxWx0g8 + return 0 + echo -n 'Deleting replset rs1' Deleting replset rs1++ kubectl_bin get sts -l app.kubernetes.io/instance=some-name,app.kubernetes.io/replset=rs1 -ojson ++ jq '.items | length' +++ mktemp ++ local LAST_OUT=/tmp/tmp.XB2ZV6X58w +++ mktemp ++ local LAST_ERR=/tmp/tmp.H2snpBp7Jh ++ local exit_status=0 ++ local timeout=4 +++ seq 0 2 ++ for i in '$(seq 0 2)' ++ set +e ++ kubectl get sts -l app.kubernetes.io/instance=some-name,app.kubernetes.io/replset=rs1 -ojson ++ exit_status=0 ++ set -e ++ '[' 0 '!=' 0 -a -n 1 ']' ++ break ++ cat /tmp/tmp.XB2ZV6X58w ++ cat /tmp/tmp.H2snpBp7Jh ++ rm /tmp/tmp.XB2ZV6X58w /tmp/tmp.H2snpBp7Jh ++ return 0 + [[ 1 -eq 0 ]] + let retry+=1 + '[' 1 -ge 70 ']' + echo -n . .+ sleep 30 ++ kubectl_bin get sts -l app.kubernetes.io/instance=some-name,app.kubernetes.io/replset=rs1 -ojson ++ jq '.items | length' +++ mktemp ++ local LAST_OUT=/tmp/tmp.UmZGmcBiM7 +++ mktemp ++ local LAST_ERR=/tmp/tmp.lLUPybfU8s ++ local exit_status=0 ++ local timeout=4 +++ seq 0 2 ++ for i in '$(seq 0 2)' ++ set +e ++ kubectl get sts -l app.kubernetes.io/instance=some-name,app.kubernetes.io/replset=rs1 -ojson ++ exit_status=0 ++ set -e ++ '[' 0 '!=' 0 -a -n 1 ']' ++ break ++ cat /tmp/tmp.UmZGmcBiM7 ++ cat /tmp/tmp.lLUPybfU8s ++ rm /tmp/tmp.UmZGmcBiM7 /tmp/tmp.lLUPybfU8s ++ return 0 + [[ 0 -eq 0 ]] + echo OK OK + desc 'check if rs2 and all its related stateful sets are properly removed' + set +o xtrace ----------------------------------------------------------------------------------- check if rs2 and all its related stateful sets are properly removed ----------------------------------------------------------------------------------- + check_rs_proper_component_deletion some-name rs2 + local cluster=some-name + local rs_name=rs2 ++ kubectl_bin get psmdb some-name -ojson ++ jq --arg RS rs2 '.spec.replsets | map(.name == $RS) | index(true)' +++ mktemp ++ local LAST_OUT=/tmp/tmp.qXroe756Vx +++ mktemp ++ local LAST_ERR=/tmp/tmp.Sp3r3jgfSu ++ local exit_status=0 ++ local timeout=4 +++ seq 0 2 ++ for i in '$(seq 0 2)' ++ set +e ++ kubectl get psmdb some-name -ojson ++ exit_status=0 ++ set -e ++ '[' 0 '!=' 0 -a -n 1 ']' ++ break ++ cat /tmp/tmp.qXroe756Vx ++ cat /tmp/tmp.Sp3r3jgfSu ++ rm /tmp/tmp.qXroe756Vx /tmp/tmp.Sp3r3jgfSu ++ return 0 + rs_idx=1 + kubectl_bin patch psmdb some-name --type=json '-p=[{'\''op'\'': '\''remove'\'', '\''path'\'': '\''/spec/replsets/1'\''}]' ++ mktemp + local LAST_OUT=/tmp/tmp.W3xMOKflFE ++ mktemp + local LAST_ERR=/tmp/tmp.A4qDvPD6Qd + local exit_status=0 + local timeout=4 ++ seq 0 2 + for i in '$(seq 0 2)' + set +e + kubectl patch psmdb some-name --type=json '-p=[{'\''op'\'': '\''remove'\'', '\''path'\'': '\''/spec/replsets/1'\''}]' + exit_status=0 + set -e + '[' 0 '!=' 0 -a -n 1 ']' + break + cat /tmp/tmp.W3xMOKflFE perconaservermongodb.psmdb.percona.com/some-name patched + cat /tmp/tmp.A4qDvPD6Qd + rm /tmp/tmp.W3xMOKflFE /tmp/tmp.A4qDvPD6Qd + return 0 + echo -n 'Deleting replset rs2' Deleting replset rs2++ kubectl_bin get sts -l app.kubernetes.io/instance=some-name,app.kubernetes.io/replset=rs2 -ojson ++ jq '.items | length' +++ mktemp ++ local LAST_OUT=/tmp/tmp.wvkGfzFw1Z +++ mktemp ++ local LAST_ERR=/tmp/tmp.t7Bf2h3MYX ++ local exit_status=0 ++ local timeout=4 +++ seq 0 2 ++ for i in '$(seq 0 2)' ++ set +e ++ kubectl get sts -l app.kubernetes.io/instance=some-name,app.kubernetes.io/replset=rs2 -ojson ++ exit_status=0 ++ set -e ++ '[' 0 '!=' 0 -a -n 1 ']' ++ break ++ cat /tmp/tmp.wvkGfzFw1Z ++ cat /tmp/tmp.t7Bf2h3MYX ++ rm /tmp/tmp.wvkGfzFw1Z /tmp/tmp.t7Bf2h3MYX ++ return 0 + [[ 2 -eq 0 ]] + let retry+=1 + '[' 2 -ge 70 ']' + echo -n . .+ sleep 30 ++ kubectl_bin get sts -l app.kubernetes.io/instance=some-name,app.kubernetes.io/replset=rs2 -ojson ++ jq '.items | length' +++ mktemp ++ local LAST_OUT=/tmp/tmp.G5ZoOXFphH +++ mktemp ++ local LAST_ERR=/tmp/tmp.QmT2Pv8QcE ++ local exit_status=0 ++ local timeout=4 +++ seq 0 2 ++ for i in '$(seq 0 2)' ++ set +e ++ kubectl get sts -l app.kubernetes.io/instance=some-name,app.kubernetes.io/replset=rs2 -ojson ++ exit_status=0 ++ set -e ++ '[' 0 '!=' 0 -a -n 1 ']' ++ break ++ cat /tmp/tmp.G5ZoOXFphH ++ cat /tmp/tmp.QmT2Pv8QcE ++ rm /tmp/tmp.G5ZoOXFphH /tmp/tmp.QmT2Pv8QcE ++ return 0 + [[ 2 -eq 0 ]] + let retry+=1 + '[' 3 -ge 70 ']' + echo -n . .+ sleep 30 ++ kubectl_bin get sts -l app.kubernetes.io/instance=some-name,app.kubernetes.io/replset=rs2 -ojson ++ jq '.items | length' +++ mktemp ++ local LAST_OUT=/tmp/tmp.fr2oRuMgVP +++ mktemp ++ local LAST_ERR=/tmp/tmp.GT8fwJlTcn ++ local exit_status=0 ++ local timeout=4 +++ seq 0 2 ++ for i in '$(seq 0 2)' ++ set +e ++ kubectl get sts -l app.kubernetes.io/instance=some-name,app.kubernetes.io/replset=rs2 -ojson ++ exit_status=0 ++ set -e ++ '[' 0 '!=' 0 -a -n 1 ']' ++ break ++ cat /tmp/tmp.fr2oRuMgVP ++ cat /tmp/tmp.GT8fwJlTcn ++ rm /tmp/tmp.fr2oRuMgVP /tmp/tmp.GT8fwJlTcn ++ return 0 + [[ 2 -eq 0 ]] + let retry+=1 + '[' 4 -ge 70 ']' + echo -n . .+ sleep 30 ++ kubectl_bin get sts -l app.kubernetes.io/instance=some-name,app.kubernetes.io/replset=rs2 -ojson ++ jq '.items | length' +++ mktemp ++ local LAST_OUT=/tmp/tmp.w0tz46oHcG +++ mktemp ++ local LAST_ERR=/tmp/tmp.q1QeUWrIVN ++ local exit_status=0 ++ local timeout=4 +++ seq 0 2 ++ for i in '$(seq 0 2)' ++ set +e ++ kubectl get sts -l app.kubernetes.io/instance=some-name,app.kubernetes.io/replset=rs2 -ojson ++ exit_status=0 ++ set -e ++ '[' 0 '!=' 0 -a -n 1 ']' ++ break ++ cat /tmp/tmp.w0tz46oHcG ++ cat /tmp/tmp.q1QeUWrIVN ++ rm /tmp/tmp.w0tz46oHcG /tmp/tmp.q1QeUWrIVN ++ return 0 + [[ 2 -eq 0 ]] + let retry+=1 + '[' 5 -ge 70 ']' + echo -n . .+ sleep 30 ++ kubectl_bin get sts -l app.kubernetes.io/instance=some-name,app.kubernetes.io/replset=rs2 -ojson ++ jq '.items | length' +++ mktemp ++ local LAST_OUT=/tmp/tmp.0sT3ZtW8Fp +++ mktemp ++ local LAST_ERR=/tmp/tmp.gIu6L4GU6M ++ local exit_status=0 ++ local timeout=4 +++ seq 0 2 ++ for i in '$(seq 0 2)' ++ set +e ++ kubectl get sts -l app.kubernetes.io/instance=some-name,app.kubernetes.io/replset=rs2 -ojson ++ exit_status=0 ++ set -e ++ '[' 0 '!=' 0 -a -n 1 ']' ++ break ++ cat /tmp/tmp.0sT3ZtW8Fp ++ cat /tmp/tmp.gIu6L4GU6M ++ rm /tmp/tmp.0sT3ZtW8Fp /tmp/tmp.gIu6L4GU6M ++ return 0 + [[ 2 -eq 0 ]] + let retry+=1 + '[' 6 -ge 70 ']' + echo -n . .+ sleep 30 ++ kubectl_bin get sts -l app.kubernetes.io/instance=some-name,app.kubernetes.io/replset=rs2 -ojson ++ jq '.items | length' +++ mktemp ++ local LAST_OUT=/tmp/tmp.wucwB03va0 +++ mktemp ++ local LAST_ERR=/tmp/tmp.o3arWrdBEJ ++ local exit_status=0 ++ local timeout=4 +++ seq 0 2 ++ for i in '$(seq 0 2)' ++ set +e ++ kubectl get sts -l app.kubernetes.io/instance=some-name,app.kubernetes.io/replset=rs2 -ojson ++ exit_status=0 ++ set -e ++ '[' 0 '!=' 0 -a -n 1 ']' ++ break ++ cat /tmp/tmp.wucwB03va0 ++ cat /tmp/tmp.o3arWrdBEJ ++ rm /tmp/tmp.wucwB03va0 /tmp/tmp.o3arWrdBEJ ++ return 0 + [[ 2 -eq 0 ]] + let retry+=1 + '[' 7 -ge 70 ']' + echo -n . .+ sleep 30 ++ kubectl_bin get sts -l app.kubernetes.io/instance=some-name,app.kubernetes.io/replset=rs2 -ojson ++ jq '.items | length' +++ mktemp ++ local LAST_OUT=/tmp/tmp.TZHbbmaUXG +++ mktemp ++ local LAST_ERR=/tmp/tmp.sV4xValeyt ++ local exit_status=0 ++ local timeout=4 +++ seq 0 2 ++ for i in '$(seq 0 2)' ++ set +e ++ kubectl get sts -l app.kubernetes.io/instance=some-name,app.kubernetes.io/replset=rs2 -ojson ++ exit_status=0 ++ set -e ++ '[' 0 '!=' 0 -a -n 1 ']' ++ break ++ cat /tmp/tmp.TZHbbmaUXG ++ cat /tmp/tmp.sV4xValeyt ++ rm /tmp/tmp.TZHbbmaUXG /tmp/tmp.sV4xValeyt ++ return 0 + [[ 2 -eq 0 ]] + let retry+=1 + '[' 8 -ge 70 ']' + echo -n . .+ sleep 30 ++ kubectl_bin get sts -l app.kubernetes.io/instance=some-name,app.kubernetes.io/replset=rs2 -ojson ++ jq '.items | length' +++ mktemp ++ local LAST_OUT=/tmp/tmp.UEvjTF9284 +++ mktemp ++ local LAST_ERR=/tmp/tmp.PSkzYdu9iN ++ local exit_status=0 ++ local timeout=4 +++ seq 0 2 ++ for i in '$(seq 0 2)' ++ set +e ++ kubectl get sts -l app.kubernetes.io/instance=some-name,app.kubernetes.io/replset=rs2 -ojson ++ exit_status=0 ++ set -e ++ '[' 0 '!=' 0 -a -n 1 ']' ++ break ++ cat /tmp/tmp.UEvjTF9284 ++ cat /tmp/tmp.PSkzYdu9iN ++ rm /tmp/tmp.UEvjTF9284 /tmp/tmp.PSkzYdu9iN ++ return 0 + [[ 2 -eq 0 ]] + let retry+=1 + '[' 9 -ge 70 ']' + echo -n . .+ sleep 30 ++ kubectl_bin get sts -l app.kubernetes.io/instance=some-name,app.kubernetes.io/replset=rs2 -ojson ++ jq '.items | length' +++ mktemp ++ local LAST_OUT=/tmp/tmp.AtJyj8RjOQ +++ mktemp ++ local LAST_ERR=/tmp/tmp.kTP86PvBDf ++ local exit_status=0 ++ local timeout=4 +++ seq 0 2 ++ for i in '$(seq 0 2)' ++ set +e ++ kubectl get sts -l app.kubernetes.io/instance=some-name,app.kubernetes.io/replset=rs2 -ojson ++ exit_status=0 ++ set -e ++ '[' 0 '!=' 0 -a -n 1 ']' ++ break ++ cat /tmp/tmp.AtJyj8RjOQ ++ cat /tmp/tmp.kTP86PvBDf ++ rm /tmp/tmp.AtJyj8RjOQ /tmp/tmp.kTP86PvBDf ++ return 0 + [[ 2 -eq 0 ]] + let retry+=1 + '[' 10 -ge 70 ']' + echo -n . .+ sleep 30 ++ kubectl_bin get sts -l app.kubernetes.io/instance=some-name,app.kubernetes.io/replset=rs2 -ojson ++ jq '.items | length' +++ mktemp ++ local LAST_OUT=/tmp/tmp.QrbYzJmUuz +++ mktemp ++ local LAST_ERR=/tmp/tmp.gwK28Y44t5 ++ local exit_status=0 ++ local timeout=4 +++ seq 0 2 ++ for i in '$(seq 0 2)' ++ set +e ++ kubectl get sts -l app.kubernetes.io/instance=some-name,app.kubernetes.io/replset=rs2 -ojson ++ exit_status=0 ++ set -e ++ '[' 0 '!=' 0 -a -n 1 ']' ++ break ++ cat /tmp/tmp.QrbYzJmUuz ++ cat /tmp/tmp.gwK28Y44t5 ++ rm /tmp/tmp.QrbYzJmUuz /tmp/tmp.gwK28Y44t5 ++ return 0 + [[ 2 -eq 0 ]] + let retry+=1 + '[' 11 -ge 70 ']' + echo -n . .+ sleep 30 ++ kubectl_bin get sts -l app.kubernetes.io/instance=some-name,app.kubernetes.io/replset=rs2 -ojson +++ mktemp ++ local LAST_OUT=/tmp/tmp.3Z2Y0F08cT +++ mktemp ++ local LAST_ERR=/tmp/tmp.yxxca5h7sK ++ local exit_status=0 ++ local timeout=4 +++ seq 0 2 ++ for i in '$(seq 0 2)' ++ set +e ++ kubectl get sts -l app.kubernetes.io/instance=some-name,app.kubernetes.io/replset=rs2 -ojson ++ jq '.items | length' ++ exit_status=0 ++ set -e ++ '[' 0 '!=' 0 -a -n 1 ']' ++ break ++ cat /tmp/tmp.3Z2Y0F08cT ++ cat /tmp/tmp.yxxca5h7sK ++ rm /tmp/tmp.3Z2Y0F08cT /tmp/tmp.yxxca5h7sK ++ return 0 + [[ 2 -eq 0 ]] + let retry+=1 + '[' 12 -ge 70 ']' + echo -n . .+ sleep 30 ++ kubectl_bin get sts -l app.kubernetes.io/instance=some-name,app.kubernetes.io/replset=rs2 -ojson ++ jq '.items | length' +++ mktemp ++ local LAST_OUT=/tmp/tmp.SRItfLJ0d8 +++ mktemp ++ local LAST_ERR=/tmp/tmp.LH3R4rIasY ++ local exit_status=0 ++ local timeout=4 +++ seq 0 2 ++ for i in '$(seq 0 2)' ++ set +e ++ kubectl get sts -l app.kubernetes.io/instance=some-name,app.kubernetes.io/replset=rs2 -ojson ++ exit_status=0 ++ set -e ++ '[' 0 '!=' 0 -a -n 1 ']' ++ break ++ cat /tmp/tmp.SRItfLJ0d8 ++ cat /tmp/tmp.LH3R4rIasY ++ rm /tmp/tmp.SRItfLJ0d8 /tmp/tmp.LH3R4rIasY ++ return 0 + [[ 2 -eq 0 ]] + let retry+=1 + '[' 13 -ge 70 ']' + echo -n . .+ sleep 30 ++ kubectl_bin get sts -l app.kubernetes.io/instance=some-name,app.kubernetes.io/replset=rs2 -ojson ++ jq '.items | length' +++ mktemp ++ local LAST_OUT=/tmp/tmp.1F8Jen8W5r +++ mktemp ++ local LAST_ERR=/tmp/tmp.Bqqe2xTZQ5 ++ local exit_status=0 ++ local timeout=4 +++ seq 0 2 ++ for i in '$(seq 0 2)' ++ set +e ++ kubectl get sts -l app.kubernetes.io/instance=some-name,app.kubernetes.io/replset=rs2 -ojson ++ exit_status=0 ++ set -e ++ '[' 0 '!=' 0 -a -n 1 ']' ++ break ++ cat /tmp/tmp.1F8Jen8W5r ++ cat /tmp/tmp.Bqqe2xTZQ5 ++ rm /tmp/tmp.1F8Jen8W5r /tmp/tmp.Bqqe2xTZQ5 ++ return 0 + [[ 2 -eq 0 ]] + let retry+=1 + '[' 14 -ge 70 ']' + echo -n . .+ sleep 30 ++ kubectl_bin get sts -l app.kubernetes.io/instance=some-name,app.kubernetes.io/replset=rs2 -ojson ++ jq '.items | length' +++ mktemp ++ local LAST_OUT=/tmp/tmp.8rCP3Tn87S +++ mktemp ++ local LAST_ERR=/tmp/tmp.sUbhZFncdp ++ local exit_status=0 ++ local timeout=4 +++ seq 0 2 ++ for i in '$(seq 0 2)' ++ set +e ++ kubectl get sts -l app.kubernetes.io/instance=some-name,app.kubernetes.io/replset=rs2 -ojson ++ exit_status=0 ++ set -e ++ '[' 0 '!=' 0 -a -n 1 ']' ++ break ++ cat /tmp/tmp.8rCP3Tn87S ++ cat /tmp/tmp.sUbhZFncdp ++ rm /tmp/tmp.8rCP3Tn87S /tmp/tmp.sUbhZFncdp ++ return 0 + [[ 1 -eq 0 ]] + let retry+=1 + '[' 15 -ge 70 ']' + echo -n . .+ sleep 30 ++ kubectl_bin get sts -l app.kubernetes.io/instance=some-name,app.kubernetes.io/replset=rs2 -ojson ++ jq '.items | length' +++ mktemp ++ local LAST_OUT=/tmp/tmp.F0gxVWSPrm +++ mktemp ++ local LAST_ERR=/tmp/tmp.cWroneKczc ++ local exit_status=0 ++ local timeout=4 +++ seq 0 2 ++ for i in '$(seq 0 2)' ++ set +e ++ kubectl get sts -l app.kubernetes.io/instance=some-name,app.kubernetes.io/replset=rs2 -ojson ++ exit_status=0 ++ set -e ++ '[' 0 '!=' 0 -a -n 1 ']' ++ break ++ cat /tmp/tmp.F0gxVWSPrm ++ cat /tmp/tmp.cWroneKczc ++ rm /tmp/tmp.F0gxVWSPrm /tmp/tmp.cWroneKczc ++ return 0 + [[ 0 -eq 0 ]] + echo OK OK + destroy data-sharded-22380 + local namespace=data-sharded-22380 + local ignore_logs=true + desc 'destroy cluster/operator and all other resources' + set +o xtrace ----------------------------------------------------------------------------------- destroy cluster/operator and all other resources ----------------------------------------------------------------------------------- + '[' true == false ']' + delete_backups + desc 'Delete psmdb-backup' + set +o xtrace ----------------------------------------------------------------------------------- Delete psmdb-backup ----------------------------------------------------------------------------------- ++ kubectl_bin get psmdb-backup --no-headers ++ wc -l +++ mktemp ++ local LAST_OUT=/tmp/tmp.0fnGAtyt5n +++ mktemp ++ local LAST_ERR=/tmp/tmp.Lb77Jt75lG ++ local exit_status=0 ++ local timeout=4 +++ seq 0 2 ++ for i in '$(seq 0 2)' ++ set +e ++ kubectl get psmdb-backup --no-headers ++ exit_status=0 ++ set -e ++ '[' 0 '!=' 0 -a -n 1 ']' ++ break ++ cat /tmp/tmp.0fnGAtyt5n ++ cat /tmp/tmp.Lb77Jt75lG No resources found in data-sharded-22380 namespace. ++ rm /tmp/tmp.0fnGAtyt5n /tmp/tmp.Lb77Jt75lG ++ return 0 + '[' 0 '!=' 0 ']' + delete_crd + desc 'get and delete old CRDs and RBAC' + set +o xtrace ----------------------------------------------------------------------------------- get and delete old CRDs and RBAC ----------------------------------------------------------------------------------- + kubectl_bin delete -f /mnt/jenkins/workspace/cloud-psmdb-operator_PR-1915/deploy/crd.yaml --ignore-not-found --wait=false ++ mktemp + local LAST_OUT=/tmp/tmp.EaqnW5DjFi ++ mktemp + local LAST_ERR=/tmp/tmp.Gs4dhfNvlx + local exit_status=0 + local timeout=4 ++ seq 0 2 + for i in '$(seq 0 2)' + set +e + kubectl delete -f /mnt/jenkins/workspace/cloud-psmdb-operator_PR-1915/deploy/crd.yaml --ignore-not-found --wait=false + exit_status=0 + set -e + '[' 0 '!=' 0 -a -n 1 ']' + break + cat /tmp/tmp.EaqnW5DjFi customresourcedefinition.apiextensions.k8s.io "perconaservermongodbbackups.psmdb.percona.com" deleted customresourcedefinition.apiextensions.k8s.io "perconaservermongodbrestores.psmdb.percona.com" deleted customresourcedefinition.apiextensions.k8s.io "perconaservermongodbs.psmdb.percona.com" deleted + cat /tmp/tmp.Gs4dhfNvlx + rm /tmp/tmp.EaqnW5DjFi /tmp/tmp.Gs4dhfNvlx + return 0 ++ yq eval .metadata.name /mnt/jenkins/workspace/cloud-psmdb-operator_PR-1915/deploy/crd.yaml ++ grep -v '\-\-\-' + for crd_name in '$(yq eval '\''.metadata.name'\'' "${src_dir}/deploy/crd.yaml" | grep -v '\''\-\-\-'\'')' + kubectl get perconaservermongodbbackups.psmdb.percona.com --all-namespaces -o wide + grep -v NAMESPACE + xargs -L 1 sh -xc 'kubectl patch perconaservermongodbbackups.psmdb.percona.com -n $0 $1 --type=merge -p "{\"metadata\":{\"finalizers\":[]}}"' error: the server doesn't have a resource type "perconaservermongodbbackups" + kubectl patch perconaservermongodbbackups.psmdb.percona.com -n sh --type=merge -p '{"metadata":{"finalizers":[]}}' error: the server doesn't have a resource type "perconaservermongodbbackups" + : + kubectl_bin wait --for=delete crd perconaservermongodbbackups.psmdb.percona.com ++ mktemp + local LAST_OUT=/tmp/tmp.klvsbt3zpR ++ mktemp + local LAST_ERR=/tmp/tmp.g9wRqE1nzZ + local exit_status=0 + local timeout=4 ++ seq 0 2 + for i in '$(seq 0 2)' + set +e + kubectl wait --for=delete crd perconaservermongodbbackups.psmdb.percona.com + exit_status=0 + set -e + '[' 0 '!=' 0 -a -n 1 ']' + break + cat /tmp/tmp.klvsbt3zpR + cat /tmp/tmp.g9wRqE1nzZ + rm /tmp/tmp.klvsbt3zpR /tmp/tmp.g9wRqE1nzZ + return 0 + for crd_name in '$(yq eval '\''.metadata.name'\'' "${src_dir}/deploy/crd.yaml" | grep -v '\''\-\-\-'\'')' + kubectl get perconaservermongodbrestores.psmdb.percona.com --all-namespaces -o wide + grep -v NAMESPACE + xargs -L 1 sh -xc 'kubectl patch perconaservermongodbrestores.psmdb.percona.com -n $0 $1 --type=merge -p "{\"metadata\":{\"finalizers\":[]}}"' error: the server doesn't have a resource type "perconaservermongodbrestores" + kubectl patch perconaservermongodbrestores.psmdb.percona.com -n sh --type=merge -p '{"metadata":{"finalizers":[]}}' error: the server doesn't have a resource type "perconaservermongodbrestores" + : + kubectl_bin wait --for=delete crd perconaservermongodbrestores.psmdb.percona.com ++ mktemp + local LAST_OUT=/tmp/tmp.CrKuQcv0DC ++ mktemp + local LAST_ERR=/tmp/tmp.KGT6fa3l6b + local exit_status=0 + local timeout=4 ++ seq 0 2 + for i in '$(seq 0 2)' + set +e + kubectl wait --for=delete crd perconaservermongodbrestores.psmdb.percona.com + exit_status=0 + set -e + '[' 0 '!=' 0 -a -n 1 ']' + break + cat /tmp/tmp.CrKuQcv0DC + cat /tmp/tmp.KGT6fa3l6b + rm /tmp/tmp.CrKuQcv0DC /tmp/tmp.KGT6fa3l6b + return 0 + for crd_name in '$(yq eval '\''.metadata.name'\'' "${src_dir}/deploy/crd.yaml" | grep -v '\''\-\-\-'\'')' + kubectl get perconaservermongodbs.psmdb.percona.com --all-namespaces -o wide + grep -v NAMESPACE + xargs -L 1 sh -xc 'kubectl patch perconaservermongodbs.psmdb.percona.com -n $0 $1 --type=merge -p "{\"metadata\":{\"finalizers\":[]}}"' error: the server doesn't have a resource type "perconaservermongodbs" + kubectl patch perconaservermongodbs.psmdb.percona.com -n sh --type=merge -p '{"metadata":{"finalizers":[]}}' error: the server doesn't have a resource type "perconaservermongodbs" + : + kubectl_bin wait --for=delete crd perconaservermongodbs.psmdb.percona.com ++ mktemp + local LAST_OUT=/tmp/tmp.jaOdSMIxAv ++ mktemp + local LAST_ERR=/tmp/tmp.S59hPfoH98 + local exit_status=0 + local timeout=4 ++ seq 0 2 + for i in '$(seq 0 2)' + set +e + kubectl wait --for=delete crd perconaservermongodbs.psmdb.percona.com + exit_status=0 + set -e + '[' 0 '!=' 0 -a -n 1 ']' + break + cat /tmp/tmp.jaOdSMIxAv + cat /tmp/tmp.S59hPfoH98 + rm /tmp/tmp.jaOdSMIxAv /tmp/tmp.S59hPfoH98 + return 0 + local rbac_yaml=rbac.yaml + '[' -n psmdb-operator ']' + rbac_yaml=cw-rbac.yaml + kubectl_bin delete -f /mnt/jenkins/workspace/cloud-psmdb-operator_PR-1915/deploy/cw-rbac.yaml --ignore-not-found ++ mktemp + local LAST_OUT=/tmp/tmp.o734lNBOEp ++ mktemp + local LAST_ERR=/tmp/tmp.yfv5f4ZdYs + local exit_status=0 + local timeout=4 ++ seq 0 2 + for i in '$(seq 0 2)' + set +e + kubectl delete -f /mnt/jenkins/workspace/cloud-psmdb-operator_PR-1915/deploy/cw-rbac.yaml --ignore-not-found + exit_status=0 + set -e + '[' 0 '!=' 0 -a -n 1 ']' + break + cat /tmp/tmp.o734lNBOEp clusterrole.rbac.authorization.k8s.io "percona-server-mongodb-operator" deleted clusterrolebinding.rbac.authorization.k8s.io "service-account-percona-server-mongodb-operator" deleted + cat /tmp/tmp.yfv5f4ZdYs + rm /tmp/tmp.o734lNBOEp /tmp/tmp.yfv5f4ZdYs + return 0 + destroy_cert_manager + kubectl_bin delete -f https://github.com/cert-manager/cert-manager/releases/download/v1.17.2/cert-manager.yaml ++ mktemp + local LAST_OUT=/tmp/tmp.x2e3UH7r6g ++ mktemp + local LAST_ERR=/tmp/tmp.KVJnWJBloo + local exit_status=0 + local timeout=4 ++ seq 0 2 + for i in '$(seq 0 2)' + set +e + kubectl delete -f https://github.com/cert-manager/cert-manager/releases/download/v1.17.2/cert-manager.yaml + exit_status=1 + set -e + '[' 1 '!=' 0 -a -n 1 ']' + cat /tmp/tmp.x2e3UH7r6g namespace "cert-manager" deleted customresourcedefinition.apiextensions.k8s.io "certificaterequests.cert-manager.io" deleted customresourcedefinition.apiextensions.k8s.io "certificates.cert-manager.io" deleted customresourcedefinition.apiextensions.k8s.io "challenges.acme.cert-manager.io" deleted customresourcedefinition.apiextensions.k8s.io "clusterissuers.cert-manager.io" deleted customresourcedefinition.apiextensions.k8s.io "issuers.cert-manager.io" deleted customresourcedefinition.apiextensions.k8s.io "orders.acme.cert-manager.io" deleted serviceaccount "cert-manager-cainjector" deleted serviceaccount "cert-manager" deleted serviceaccount "cert-manager-webhook" deleted clusterrole.rbac.authorization.k8s.io "cert-manager-cainjector" deleted clusterrole.rbac.authorization.k8s.io "cert-manager-controller-issuers" deleted clusterrole.rbac.authorization.k8s.io "cert-manager-controller-clusterissuers" deleted clusterrole.rbac.authorization.k8s.io "cert-manager-controller-certificates" deleted clusterrole.rbac.authorization.k8s.io "cert-manager-controller-orders" deleted clusterrole.rbac.authorization.k8s.io "cert-manager-controller-challenges" deleted clusterrole.rbac.authorization.k8s.io "cert-manager-controller-ingress-shim" deleted clusterrole.rbac.authorization.k8s.io "cert-manager-cluster-view" deleted clusterrole.rbac.authorization.k8s.io "cert-manager-view" deleted clusterrole.rbac.authorization.k8s.io "cert-manager-edit" deleted clusterrole.rbac.authorization.k8s.io "cert-manager-controller-approve:cert-manager-io" deleted clusterrole.rbac.authorization.k8s.io "cert-manager-controller-certificatesigningrequests" deleted clusterrole.rbac.authorization.k8s.io "cert-manager-webhook:subjectaccessreviews" deleted clusterrolebinding.rbac.authorization.k8s.io "cert-manager-cainjector" deleted clusterrolebinding.rbac.authorization.k8s.io "cert-manager-controller-issuers" deleted clusterrolebinding.rbac.authorization.k8s.io "cert-manager-controller-clusterissuers" deleted clusterrolebinding.rbac.authorization.k8s.io "cert-manager-controller-certificates" deleted clusterrolebinding.rbac.authorization.k8s.io "cert-manager-controller-orders" deleted clusterrolebinding.rbac.authorization.k8s.io "cert-manager-controller-challenges" deleted clusterrolebinding.rbac.authorization.k8s.io "cert-manager-controller-ingress-shim" deleted clusterrolebinding.rbac.authorization.k8s.io "cert-manager-controller-approve:cert-manager-io" deleted clusterrolebinding.rbac.authorization.k8s.io "cert-manager-controller-certificatesigningrequests" deleted clusterrolebinding.rbac.authorization.k8s.io "cert-manager-webhook:subjectaccessreviews" deleted role.rbac.authorization.k8s.io "cert-manager-cainjector:leaderelection" deleted role.rbac.authorization.k8s.io "cert-manager:leaderelection" deleted rolebinding.rbac.authorization.k8s.io "cert-manager-cainjector:leaderelection" deleted rolebinding.rbac.authorization.k8s.io "cert-manager:leaderelection" deleted mutatingwebhookconfiguration.admissionregistration.k8s.io "cert-manager-webhook" deleted validatingwebhookconfiguration.admissionregistration.k8s.io "cert-manager-webhook" deleted + cat /tmp/tmp.KVJnWJBloo Error from server (NotFound): error when deleting "https://github.com/cert-manager/cert-manager/releases/download/v1.17.2/cert-manager.yaml": roles.rbac.authorization.k8s.io "cert-manager-tokenrequest" not found Error from server (NotFound): error when deleting "https://github.com/cert-manager/cert-manager/releases/download/v1.17.2/cert-manager.yaml": roles.rbac.authorization.k8s.io "cert-manager-webhook:dynamic-serving" not found Error from server (NotFound): error when deleting "https://github.com/cert-manager/cert-manager/releases/download/v1.17.2/cert-manager.yaml": rolebindings.rbac.authorization.k8s.io "cert-manager-cert-manager-tokenrequest" not found Error from server (NotFound): error when deleting "https://github.com/cert-manager/cert-manager/releases/download/v1.17.2/cert-manager.yaml": rolebindings.rbac.authorization.k8s.io "cert-manager-webhook:dynamic-serving" not found Error from server (NotFound): error when deleting "https://github.com/cert-manager/cert-manager/releases/download/v1.17.2/cert-manager.yaml": services "cert-manager-cainjector" not found Error from server (NotFound): error when deleting "https://github.com/cert-manager/cert-manager/releases/download/v1.17.2/cert-manager.yaml": services "cert-manager" not found Error from server (NotFound): error when deleting "https://github.com/cert-manager/cert-manager/releases/download/v1.17.2/cert-manager.yaml": services "cert-manager-webhook" not found Error from server (NotFound): error when deleting "https://github.com/cert-manager/cert-manager/releases/download/v1.17.2/cert-manager.yaml": deployments.apps "cert-manager-cainjector" not found Error from server (NotFound): error when deleting "https://github.com/cert-manager/cert-manager/releases/download/v1.17.2/cert-manager.yaml": deployments.apps "cert-manager" not found Error from server (NotFound): error when deleting "https://github.com/cert-manager/cert-manager/releases/download/v1.17.2/cert-manager.yaml": deployments.apps "cert-manager-webhook" not found + sleep 0 + for i in '$(seq 0 2)' + set +e + kubectl delete -f https://github.com/cert-manager/cert-manager/releases/download/v1.17.2/cert-manager.yaml + exit_status=1 + set -e + '[' 1 '!=' 0 -a -n 1 ']' + cat /tmp/tmp.x2e3UH7r6g namespace "cert-manager" deleted + cat /tmp/tmp.KVJnWJBloo Error from server (NotFound): error when deleting "https://github.com/cert-manager/cert-manager/releases/download/v1.17.2/cert-manager.yaml": customresourcedefinitions.apiextensions.k8s.io "certificaterequests.cert-manager.io" not found Error from server (NotFound): error when deleting "https://github.com/cert-manager/cert-manager/releases/download/v1.17.2/cert-manager.yaml": customresourcedefinitions.apiextensions.k8s.io "certificates.cert-manager.io" not found Error from server (NotFound): error when deleting "https://github.com/cert-manager/cert-manager/releases/download/v1.17.2/cert-manager.yaml": customresourcedefinitions.apiextensions.k8s.io "challenges.acme.cert-manager.io" not found Error from server (NotFound): error when deleting "https://github.com/cert-manager/cert-manager/releases/download/v1.17.2/cert-manager.yaml": customresourcedefinitions.apiextensions.k8s.io "clusterissuers.cert-manager.io" not found Error from server (NotFound): error when deleting "https://github.com/cert-manager/cert-manager/releases/download/v1.17.2/cert-manager.yaml": customresourcedefinitions.apiextensions.k8s.io "issuers.cert-manager.io" not found Error from server (NotFound): error when deleting "https://github.com/cert-manager/cert-manager/releases/download/v1.17.2/cert-manager.yaml": customresourcedefinitions.apiextensions.k8s.io "orders.acme.cert-manager.io" not found Error from server (NotFound): error when deleting "https://github.com/cert-manager/cert-manager/releases/download/v1.17.2/cert-manager.yaml": serviceaccounts "cert-manager-cainjector" not found Error from server (NotFound): error when deleting "https://github.com/cert-manager/cert-manager/releases/download/v1.17.2/cert-manager.yaml": serviceaccounts "cert-manager" not found Error from server (NotFound): error when deleting "https://github.com/cert-manager/cert-manager/releases/download/v1.17.2/cert-manager.yaml": serviceaccounts "cert-manager-webhook" not found Error from server (NotFound): error when deleting "https://github.com/cert-manager/cert-manager/releases/download/v1.17.2/cert-manager.yaml": clusterroles.rbac.authorization.k8s.io "cert-manager-cainjector" not found Error from server (NotFound): error when deleting "https://github.com/cert-manager/cert-manager/releases/download/v1.17.2/cert-manager.yaml": clusterroles.rbac.authorization.k8s.io "cert-manager-controller-issuers" not found Error from server (NotFound): error when deleting "https://github.com/cert-manager/cert-manager/releases/download/v1.17.2/cert-manager.yaml": clusterroles.rbac.authorization.k8s.io "cert-manager-controller-clusterissuers" not found Error from server (NotFound): error when deleting "https://github.com/cert-manager/cert-manager/releases/download/v1.17.2/cert-manager.yaml": clusterroles.rbac.authorization.k8s.io "cert-manager-controller-certificates" not found Error from server (NotFound): error when deleting "https://github.com/cert-manager/cert-manager/releases/download/v1.17.2/cert-manager.yaml": clusterroles.rbac.authorization.k8s.io "cert-manager-controller-orders" not found Error from server (NotFound): error when deleting "https://github.com/cert-manager/cert-manager/releases/download/v1.17.2/cert-manager.yaml": clusterroles.rbac.authorization.k8s.io "cert-manager-controller-challenges" not found Error from server (NotFound): error when deleting "https://github.com/cert-manager/cert-manager/releases/download/v1.17.2/cert-manager.yaml": clusterroles.rbac.authorization.k8s.io "cert-manager-controller-ingress-shim" not found Error from server (NotFound): error when deleting "https://github.com/cert-manager/cert-manager/releases/download/v1.17.2/cert-manager.yaml": clusterroles.rbac.authorization.k8s.io "cert-manager-cluster-view" not found Error from server (NotFound): error when deleting "https://github.com/cert-manager/cert-manager/releases/download/v1.17.2/cert-manager.yaml": clusterroles.rbac.authorization.k8s.io "cert-manager-view" not found Error from server (NotFound): error when deleting "https://github.com/cert-manager/cert-manager/releases/download/v1.17.2/cert-manager.yaml": clusterroles.rbac.authorization.k8s.io "cert-manager-edit" not found Error from server (NotFound): error when deleting "https://github.com/cert-manager/cert-manager/releases/download/v1.17.2/cert-manager.yaml": clusterroles.rbac.authorization.k8s.io "cert-manager-controller-approve:cert-manager-io" not found Error from server (NotFound): error when deleting "https://github.com/cert-manager/cert-manager/releases/download/v1.17.2/cert-manager.yaml": clusterroles.rbac.authorization.k8s.io "cert-manager-controller-certificatesigningrequests" not found Error from server (NotFound): error when deleting "https://github.com/cert-manager/cert-manager/releases/download/v1.17.2/cert-manager.yaml": clusterroles.rbac.authorization.k8s.io "cert-manager-webhook:subjectaccessreviews" not found Error from server (NotFound): error when deleting "https://github.com/cert-manager/cert-manager/releases/download/v1.17.2/cert-manager.yaml": clusterrolebindings.rbac.authorization.k8s.io "cert-manager-cainjector" not found Error from server (NotFound): error when deleting "https://github.com/cert-manager/cert-manager/releases/download/v1.17.2/cert-manager.yaml": clusterrolebindings.rbac.authorization.k8s.io "cert-manager-controller-issuers" not found Error from server (NotFound): error when deleting "https://github.com/cert-manager/cert-manager/releases/download/v1.17.2/cert-manager.yaml": clusterrolebindings.rbac.authorization.k8s.io "cert-manager-controller-clusterissuers" not found Error from server (NotFound): error when deleting "https://github.com/cert-manager/cert-manager/releases/download/v1.17.2/cert-manager.yaml": clusterrolebindings.rbac.authorization.k8s.io "cert-manager-controller-certificates" not found Error from server (NotFound): error when deleting "https://github.com/cert-manager/cert-manager/releases/download/v1.17.2/cert-manager.yaml": clusterrolebindings.rbac.authorization.k8s.io "cert-manager-controller-orders" not found Error from server (NotFound): error when deleting "https://github.com/cert-manager/cert-manager/releases/download/v1.17.2/cert-manager.yaml": clusterrolebindings.rbac.authorization.k8s.io "cert-manager-controller-challenges" not found Error from server (NotFound): error when deleting "https://github.com/cert-manager/cert-manager/releases/download/v1.17.2/cert-manager.yaml": clusterrolebindings.rbac.authorization.k8s.io "cert-manager-controller-ingress-shim" not found Error from server (NotFound): error when deleting "https://github.com/cert-manager/cert-manager/releases/download/v1.17.2/cert-manager.yaml": clusterrolebindings.rbac.authorization.k8s.io "cert-manager-controller-approve:cert-manager-io" not found Error from server (NotFound): error when deleting "https://github.com/cert-manager/cert-manager/releases/download/v1.17.2/cert-manager.yaml": clusterrolebindings.rbac.authorization.k8s.io "cert-manager-controller-certificatesigningrequests" not found Error from server (NotFound): error when deleting "https://github.com/cert-manager/cert-manager/releases/download/v1.17.2/cert-manager.yaml": clusterrolebindings.rbac.authorization.k8s.io "cert-manager-webhook:subjectaccessreviews" not found Error from server (NotFound): error when deleting "https://github.com/cert-manager/cert-manager/releases/download/v1.17.2/cert-manager.yaml": roles.rbac.authorization.k8s.io "cert-manager-cainjector:leaderelection" not found Error from server (NotFound): error when deleting "https://github.com/cert-manager/cert-manager/releases/download/v1.17.2/cert-manager.yaml": roles.rbac.authorization.k8s.io "cert-manager:leaderelection" not found Error from server (NotFound): error when deleting "https://github.com/cert-manager/cert-manager/releases/download/v1.17.2/cert-manager.yaml": roles.rbac.authorization.k8s.io "cert-manager-tokenrequest" not found Error from server (NotFound): error when deleting "https://github.com/cert-manager/cert-manager/releases/download/v1.17.2/cert-manager.yaml": roles.rbac.authorization.k8s.io "cert-manager-webhook:dynamic-serving" not found Error from server (NotFound): error when deleting "https://github.com/cert-manager/cert-manager/releases/download/v1.17.2/cert-manager.yaml": rolebindings.rbac.authorization.k8s.io "cert-manager-cainjector:leaderelection" not found Error from server (NotFound): error when deleting "https://github.com/cert-manager/cert-manager/releases/download/v1.17.2/cert-manager.yaml": rolebindings.rbac.authorization.k8s.io "cert-manager:leaderelection" not found Error from server (NotFound): error when deleting "https://github.com/cert-manager/cert-manager/releases/download/v1.17.2/cert-manager.yaml": rolebindings.rbac.authorization.k8s.io "cert-manager-cert-manager-tokenrequest" not found Error from server (NotFound): error when deleting "https://github.com/cert-manager/cert-manager/releases/download/v1.17.2/cert-manager.yaml": rolebindings.rbac.authorization.k8s.io "cert-manager-webhook:dynamic-serving" not found Error from server (NotFound): error when deleting "https://github.com/cert-manager/cert-manager/releases/download/v1.17.2/cert-manager.yaml": services "cert-manager-cainjector" not found Error from server (NotFound): error when deleting "https://github.com/cert-manager/cert-manager/releases/download/v1.17.2/cert-manager.yaml": services "cert-manager" not found Error from server (NotFound): error when deleting "https://github.com/cert-manager/cert-manager/releases/download/v1.17.2/cert-manager.yaml": services "cert-manager-webhook" not found Error from server (NotFound): error when deleting "https://github.com/cert-manager/cert-manager/releases/download/v1.17.2/cert-manager.yaml": deployments.apps "cert-manager-cainjector" not found Error from server (NotFound): error when deleting "https://github.com/cert-manager/cert-manager/releases/download/v1.17.2/cert-manager.yaml": deployments.apps "cert-manager" not found Error from server (NotFound): error when deleting "https://github.com/cert-manager/cert-manager/releases/download/v1.17.2/cert-manager.yaml": deployments.apps "cert-manager-webhook" not found Error from server (NotFound): error when deleting "https://github.com/cert-manager/cert-manager/releases/download/v1.17.2/cert-manager.yaml": mutatingwebhookconfigurations.admissionregistration.k8s.io "cert-manager-webhook" not found Error from server (NotFound): error when deleting "https://github.com/cert-manager/cert-manager/releases/download/v1.17.2/cert-manager.yaml": validatingwebhookconfigurations.admissionregistration.k8s.io "cert-manager-webhook" not found + sleep 4 + for i in '$(seq 0 2)' + set +e + kubectl delete -f https://github.com/cert-manager/cert-manager/releases/download/v1.17.2/cert-manager.yaml + exit_status=1 + set -e + '[' 1 '!=' 0 -a -n 1 ']' + cat /tmp/tmp.x2e3UH7r6g + cat /tmp/tmp.KVJnWJBloo Error from server (NotFound): error when deleting "https://github.com/cert-manager/cert-manager/releases/download/v1.17.2/cert-manager.yaml": namespaces "cert-manager" not found Error from server (NotFound): error when deleting "https://github.com/cert-manager/cert-manager/releases/download/v1.17.2/cert-manager.yaml": customresourcedefinitions.apiextensions.k8s.io "certificaterequests.cert-manager.io" not found Error from server (NotFound): error when deleting "https://github.com/cert-manager/cert-manager/releases/download/v1.17.2/cert-manager.yaml": customresourcedefinitions.apiextensions.k8s.io "certificates.cert-manager.io" not found Error from server (NotFound): error when deleting "https://github.com/cert-manager/cert-manager/releases/download/v1.17.2/cert-manager.yaml": customresourcedefinitions.apiextensions.k8s.io "challenges.acme.cert-manager.io" not found Error from server (NotFound): error when deleting "https://github.com/cert-manager/cert-manager/releases/download/v1.17.2/cert-manager.yaml": customresourcedefinitions.apiextensions.k8s.io "clusterissuers.cert-manager.io" not found Error from server (NotFound): error when deleting "https://github.com/cert-manager/cert-manager/releases/download/v1.17.2/cert-manager.yaml": customresourcedefinitions.apiextensions.k8s.io "issuers.cert-manager.io" not found Error from server (NotFound): error when deleting "https://github.com/cert-manager/cert-manager/releases/download/v1.17.2/cert-manager.yaml": customresourcedefinitions.apiextensions.k8s.io "orders.acme.cert-manager.io" not found Error from server (NotFound): error when deleting "https://github.com/cert-manager/cert-manager/releases/download/v1.17.2/cert-manager.yaml": serviceaccounts "cert-manager-cainjector" not found Error from server (NotFound): error when deleting "https://github.com/cert-manager/cert-manager/releases/download/v1.17.2/cert-manager.yaml": serviceaccounts "cert-manager" not found Error from server (NotFound): error when deleting "https://github.com/cert-manager/cert-manager/releases/download/v1.17.2/cert-manager.yaml": serviceaccounts "cert-manager-webhook" not found Error from server (NotFound): error when deleting "https://github.com/cert-manager/cert-manager/releases/download/v1.17.2/cert-manager.yaml": clusterroles.rbac.authorization.k8s.io "cert-manager-cainjector" not found Error from server (NotFound): error when deleting "https://github.com/cert-manager/cert-manager/releases/download/v1.17.2/cert-manager.yaml": clusterroles.rbac.authorization.k8s.io "cert-manager-controller-issuers" not found Error from server (NotFound): error when deleting "https://github.com/cert-manager/cert-manager/releases/download/v1.17.2/cert-manager.yaml": clusterroles.rbac.authorization.k8s.io "cert-manager-controller-clusterissuers" not found Error from server (NotFound): error when deleting "https://github.com/cert-manager/cert-manager/releases/download/v1.17.2/cert-manager.yaml": clusterroles.rbac.authorization.k8s.io "cert-manager-controller-certificates" not found Error from server (NotFound): error when deleting "https://github.com/cert-manager/cert-manager/releases/download/v1.17.2/cert-manager.yaml": clusterroles.rbac.authorization.k8s.io "cert-manager-controller-orders" not found Error from server (NotFound): error when deleting "https://github.com/cert-manager/cert-manager/releases/download/v1.17.2/cert-manager.yaml": clusterroles.rbac.authorization.k8s.io "cert-manager-controller-challenges" not found Error from server (NotFound): error when deleting "https://github.com/cert-manager/cert-manager/releases/download/v1.17.2/cert-manager.yaml": clusterroles.rbac.authorization.k8s.io "cert-manager-controller-ingress-shim" not found Error from server (NotFound): error when deleting "https://github.com/cert-manager/cert-manager/releases/download/v1.17.2/cert-manager.yaml": clusterroles.rbac.authorization.k8s.io "cert-manager-cluster-view" not found Error from server (NotFound): error when deleting "https://github.com/cert-manager/cert-manager/releases/download/v1.17.2/cert-manager.yaml": clusterroles.rbac.authorization.k8s.io "cert-manager-view" not found Error from server (NotFound): error when deleting "https://github.com/cert-manager/cert-manager/releases/download/v1.17.2/cert-manager.yaml": clusterroles.rbac.authorization.k8s.io "cert-manager-edit" not found Error from server (NotFound): error when deleting "https://github.com/cert-manager/cert-manager/releases/download/v1.17.2/cert-manager.yaml": clusterroles.rbac.authorization.k8s.io "cert-manager-controller-approve:cert-manager-io" not found Error from server (NotFound): error when deleting "https://github.com/cert-manager/cert-manager/releases/download/v1.17.2/cert-manager.yaml": clusterroles.rbac.authorization.k8s.io "cert-manager-controller-certificatesigningrequests" not found Error from server (NotFound): error when deleting "https://github.com/cert-manager/cert-manager/releases/download/v1.17.2/cert-manager.yaml": clusterroles.rbac.authorization.k8s.io "cert-manager-webhook:subjectaccessreviews" not found Error from server (NotFound): error when deleting "https://github.com/cert-manager/cert-manager/releases/download/v1.17.2/cert-manager.yaml": clusterrolebindings.rbac.authorization.k8s.io "cert-manager-cainjector" not found Error from server (NotFound): error when deleting "https://github.com/cert-manager/cert-manager/releases/download/v1.17.2/cert-manager.yaml": clusterrolebindings.rbac.authorization.k8s.io "cert-manager-controller-issuers" not found Error from server (NotFound): error when deleting "https://github.com/cert-manager/cert-manager/releases/download/v1.17.2/cert-manager.yaml": clusterrolebindings.rbac.authorization.k8s.io "cert-manager-controller-clusterissuers" not found Error from server (NotFound): error when deleting "https://github.com/cert-manager/cert-manager/releases/download/v1.17.2/cert-manager.yaml": clusterrolebindings.rbac.authorization.k8s.io "cert-manager-controller-certificates" not found Error from server (NotFound): error when deleting "https://github.com/cert-manager/cert-manager/releases/download/v1.17.2/cert-manager.yaml": clusterrolebindings.rbac.authorization.k8s.io "cert-manager-controller-orders" not found Error from server (NotFound): error when deleting "https://github.com/cert-manager/cert-manager/releases/download/v1.17.2/cert-manager.yaml": clusterrolebindings.rbac.authorization.k8s.io "cert-manager-controller-challenges" not found Error from server (NotFound): error when deleting "https://github.com/cert-manager/cert-manager/releases/download/v1.17.2/cert-manager.yaml": clusterrolebindings.rbac.authorization.k8s.io "cert-manager-controller-ingress-shim" not found Error from server (NotFound): error when deleting "https://github.com/cert-manager/cert-manager/releases/download/v1.17.2/cert-manager.yaml": clusterrolebindings.rbac.authorization.k8s.io "cert-manager-controller-approve:cert-manager-io" not found Error from server (NotFound): error when deleting "https://github.com/cert-manager/cert-manager/releases/download/v1.17.2/cert-manager.yaml": clusterrolebindings.rbac.authorization.k8s.io "cert-manager-controller-certificatesigningrequests" not found Error from server (NotFound): error when deleting "https://github.com/cert-manager/cert-manager/releases/download/v1.17.2/cert-manager.yaml": clusterrolebindings.rbac.authorization.k8s.io "cert-manager-webhook:subjectaccessreviews" not found Error from server (NotFound): error when deleting "https://github.com/cert-manager/cert-manager/releases/download/v1.17.2/cert-manager.yaml": roles.rbac.authorization.k8s.io "cert-manager-cainjector:leaderelection" not found Error from server (NotFound): error when deleting "https://github.com/cert-manager/cert-manager/releases/download/v1.17.2/cert-manager.yaml": roles.rbac.authorization.k8s.io "cert-manager:leaderelection" not found Error from server (NotFound): error when deleting "https://github.com/cert-manager/cert-manager/releases/download/v1.17.2/cert-manager.yaml": roles.rbac.authorization.k8s.io "cert-manager-tokenrequest" not found Error from server (NotFound): error when deleting "https://github.com/cert-manager/cert-manager/releases/download/v1.17.2/cert-manager.yaml": roles.rbac.authorization.k8s.io "cert-manager-webhook:dynamic-serving" not found Error from server (NotFound): error when deleting "https://github.com/cert-manager/cert-manager/releases/download/v1.17.2/cert-manager.yaml": rolebindings.rbac.authorization.k8s.io "cert-manager-cainjector:leaderelection" not found Error from server (NotFound): error when deleting "https://github.com/cert-manager/cert-manager/releases/download/v1.17.2/cert-manager.yaml": rolebindings.rbac.authorization.k8s.io "cert-manager:leaderelection" not found Error from server (NotFound): error when deleting "https://github.com/cert-manager/cert-manager/releases/download/v1.17.2/cert-manager.yaml": rolebindings.rbac.authorization.k8s.io "cert-manager-cert-manager-tokenrequest" not found Error from server (NotFound): error when deleting "https://github.com/cert-manager/cert-manager/releases/download/v1.17.2/cert-manager.yaml": rolebindings.rbac.authorization.k8s.io "cert-manager-webhook:dynamic-serving" not found Error from server (NotFound): error when deleting "https://github.com/cert-manager/cert-manager/releases/download/v1.17.2/cert-manager.yaml": services "cert-manager-cainjector" not found Error from server (NotFound): error when deleting "https://github.com/cert-manager/cert-manager/releases/download/v1.17.2/cert-manager.yaml": services "cert-manager" not found Error from server (NotFound): error when deleting "https://github.com/cert-manager/cert-manager/releases/download/v1.17.2/cert-manager.yaml": services "cert-manager-webhook" not found Error from server (NotFound): error when deleting "https://github.com/cert-manager/cert-manager/releases/download/v1.17.2/cert-manager.yaml": deployments.apps "cert-manager-cainjector" not found Error from server (NotFound): error when deleting "https://github.com/cert-manager/cert-manager/releases/download/v1.17.2/cert-manager.yaml": deployments.apps "cert-manager" not found Error from server (NotFound): error when deleting "https://github.com/cert-manager/cert-manager/releases/download/v1.17.2/cert-manager.yaml": deployments.apps "cert-manager-webhook" not found Error from server (NotFound): error when deleting "https://github.com/cert-manager/cert-manager/releases/download/v1.17.2/cert-manager.yaml": mutatingwebhookconfigurations.admissionregistration.k8s.io "cert-manager-webhook" not found Error from server (NotFound): error when deleting "https://github.com/cert-manager/cert-manager/releases/download/v1.17.2/cert-manager.yaml": validatingwebhookconfigurations.admissionregistration.k8s.io "cert-manager-webhook" not found + sleep 8 + cat /tmp/tmp.x2e3UH7r6g + cat /tmp/tmp.KVJnWJBloo Error from server (NotFound): error when deleting "https://github.com/cert-manager/cert-manager/releases/download/v1.17.2/cert-manager.yaml": namespaces "cert-manager" not found Error from server (NotFound): error when deleting "https://github.com/cert-manager/cert-manager/releases/download/v1.17.2/cert-manager.yaml": customresourcedefinitions.apiextensions.k8s.io "certificaterequests.cert-manager.io" not found Error from server (NotFound): error when deleting "https://github.com/cert-manager/cert-manager/releases/download/v1.17.2/cert-manager.yaml": customresourcedefinitions.apiextensions.k8s.io "certificates.cert-manager.io" not found Error from server (NotFound): error when deleting "https://github.com/cert-manager/cert-manager/releases/download/v1.17.2/cert-manager.yaml": customresourcedefinitions.apiextensions.k8s.io "challenges.acme.cert-manager.io" not found Error from server (NotFound): error when deleting "https://github.com/cert-manager/cert-manager/releases/download/v1.17.2/cert-manager.yaml": customresourcedefinitions.apiextensions.k8s.io "clusterissuers.cert-manager.io" not found Error from server (NotFound): error when deleting "https://github.com/cert-manager/cert-manager/releases/download/v1.17.2/cert-manager.yaml": customresourcedefinitions.apiextensions.k8s.io "issuers.cert-manager.io" not found Error from server (NotFound): error when deleting "https://github.com/cert-manager/cert-manager/releases/download/v1.17.2/cert-manager.yaml": customresourcedefinitions.apiextensions.k8s.io "orders.acme.cert-manager.io" not found Error from server (NotFound): error when deleting "https://github.com/cert-manager/cert-manager/releases/download/v1.17.2/cert-manager.yaml": serviceaccounts "cert-manager-cainjector" not found Error from server (NotFound): error when deleting "https://github.com/cert-manager/cert-manager/releases/download/v1.17.2/cert-manager.yaml": serviceaccounts "cert-manager" not found Error from server (NotFound): error when deleting "https://github.com/cert-manager/cert-manager/releases/download/v1.17.2/cert-manager.yaml": serviceaccounts "cert-manager-webhook" not found Error from server (NotFound): error when deleting "https://github.com/cert-manager/cert-manager/releases/download/v1.17.2/cert-manager.yaml": clusterroles.rbac.authorization.k8s.io "cert-manager-cainjector" not found Error from server (NotFound): error when deleting "https://github.com/cert-manager/cert-manager/releases/download/v1.17.2/cert-manager.yaml": clusterroles.rbac.authorization.k8s.io "cert-manager-controller-issuers" not found Error from server (NotFound): error when deleting "https://github.com/cert-manager/cert-manager/releases/download/v1.17.2/cert-manager.yaml": clusterroles.rbac.authorization.k8s.io "cert-manager-controller-clusterissuers" not found Error from server (NotFound): error when deleting "https://github.com/cert-manager/cert-manager/releases/download/v1.17.2/cert-manager.yaml": clusterroles.rbac.authorization.k8s.io "cert-manager-controller-certificates" not found Error from server (NotFound): error when deleting "https://github.com/cert-manager/cert-manager/releases/download/v1.17.2/cert-manager.yaml": clusterroles.rbac.authorization.k8s.io "cert-manager-controller-orders" not found Error from server (NotFound): error when deleting "https://github.com/cert-manager/cert-manager/releases/download/v1.17.2/cert-manager.yaml": clusterroles.rbac.authorization.k8s.io "cert-manager-controller-challenges" not found Error from server (NotFound): error when deleting "https://github.com/cert-manager/cert-manager/releases/download/v1.17.2/cert-manager.yaml": clusterroles.rbac.authorization.k8s.io "cert-manager-controller-ingress-shim" not found Error from server (NotFound): error when deleting "https://github.com/cert-manager/cert-manager/releases/download/v1.17.2/cert-manager.yaml": clusterroles.rbac.authorization.k8s.io "cert-manager-cluster-view" not found Error from server (NotFound): error when deleting "https://github.com/cert-manager/cert-manager/releases/download/v1.17.2/cert-manager.yaml": clusterroles.rbac.authorization.k8s.io "cert-manager-view" not found Error from server (NotFound): error when deleting "https://github.com/cert-manager/cert-manager/releases/download/v1.17.2/cert-manager.yaml": clusterroles.rbac.authorization.k8s.io "cert-manager-edit" not found Error from server (NotFound): error when deleting "https://github.com/cert-manager/cert-manager/releases/download/v1.17.2/cert-manager.yaml": clusterroles.rbac.authorization.k8s.io "cert-manager-controller-approve:cert-manager-io" not found Error from server (NotFound): error when deleting "https://github.com/cert-manager/cert-manager/releases/download/v1.17.2/cert-manager.yaml": clusterroles.rbac.authorization.k8s.io "cert-manager-controller-certificatesigningrequests" not found Error from server (NotFound): error when deleting "https://github.com/cert-manager/cert-manager/releases/download/v1.17.2/cert-manager.yaml": clusterroles.rbac.authorization.k8s.io "cert-manager-webhook:subjectaccessreviews" not found Error from server (NotFound): error when deleting "https://github.com/cert-manager/cert-manager/releases/download/v1.17.2/cert-manager.yaml": clusterrolebindings.rbac.authorization.k8s.io "cert-manager-cainjector" not found Error from server (NotFound): error when deleting "https://github.com/cert-manager/cert-manager/releases/download/v1.17.2/cert-manager.yaml": clusterrolebindings.rbac.authorization.k8s.io "cert-manager-controller-issuers" not found Error from server (NotFound): error when deleting "https://github.com/cert-manager/cert-manager/releases/download/v1.17.2/cert-manager.yaml": clusterrolebindings.rbac.authorization.k8s.io "cert-manager-controller-clusterissuers" not found Error from server (NotFound): error when deleting "https://github.com/cert-manager/cert-manager/releases/download/v1.17.2/cert-manager.yaml": clusterrolebindings.rbac.authorization.k8s.io "cert-manager-controller-certificates" not found Error from server (NotFound): error when deleting "https://github.com/cert-manager/cert-manager/releases/download/v1.17.2/cert-manager.yaml": clusterrolebindings.rbac.authorization.k8s.io "cert-manager-controller-orders" not found Error from server (NotFound): error when deleting "https://github.com/cert-manager/cert-manager/releases/download/v1.17.2/cert-manager.yaml": clusterrolebindings.rbac.authorization.k8s.io "cert-manager-controller-challenges" not found Error from server (NotFound): error when deleting "https://github.com/cert-manager/cert-manager/releases/download/v1.17.2/cert-manager.yaml": clusterrolebindings.rbac.authorization.k8s.io "cert-manager-controller-ingress-shim" not found Error from server (NotFound): error when deleting "https://github.com/cert-manager/cert-manager/releases/download/v1.17.2/cert-manager.yaml": clusterrolebindings.rbac.authorization.k8s.io "cert-manager-controller-approve:cert-manager-io" not found Error from server (NotFound): error when deleting "https://github.com/cert-manager/cert-manager/releases/download/v1.17.2/cert-manager.yaml": clusterrolebindings.rbac.authorization.k8s.io "cert-manager-controller-certificatesigningrequests" not found Error from server (NotFound): error when deleting "https://github.com/cert-manager/cert-manager/releases/download/v1.17.2/cert-manager.yaml": clusterrolebindings.rbac.authorization.k8s.io "cert-manager-webhook:subjectaccessreviews" not found Error from server (NotFound): error when deleting "https://github.com/cert-manager/cert-manager/releases/download/v1.17.2/cert-manager.yaml": roles.rbac.authorization.k8s.io "cert-manager-cainjector:leaderelection" not found Error from server (NotFound): error when deleting "https://github.com/cert-manager/cert-manager/releases/download/v1.17.2/cert-manager.yaml": roles.rbac.authorization.k8s.io "cert-manager:leaderelection" not found Error from server (NotFound): error when deleting "https://github.com/cert-manager/cert-manager/releases/download/v1.17.2/cert-manager.yaml": roles.rbac.authorization.k8s.io "cert-manager-tokenrequest" not found Error from server (NotFound): error when deleting "https://github.com/cert-manager/cert-manager/releases/download/v1.17.2/cert-manager.yaml": roles.rbac.authorization.k8s.io "cert-manager-webhook:dynamic-serving" not found Error from server (NotFound): error when deleting "https://github.com/cert-manager/cert-manager/releases/download/v1.17.2/cert-manager.yaml": rolebindings.rbac.authorization.k8s.io "cert-manager-cainjector:leaderelection" not found Error from server (NotFound): error when deleting "https://github.com/cert-manager/cert-manager/releases/download/v1.17.2/cert-manager.yaml": rolebindings.rbac.authorization.k8s.io "cert-manager:leaderelection" not found Error from server (NotFound): error when deleting "https://github.com/cert-manager/cert-manager/releases/download/v1.17.2/cert-manager.yaml": rolebindings.rbac.authorization.k8s.io "cert-manager-cert-manager-tokenrequest" not found Error from server (NotFound): error when deleting "https://github.com/cert-manager/cert-manager/releases/download/v1.17.2/cert-manager.yaml": rolebindings.rbac.authorization.k8s.io "cert-manager-webhook:dynamic-serving" not found Error from server (NotFound): error when deleting "https://github.com/cert-manager/cert-manager/releases/download/v1.17.2/cert-manager.yaml": services "cert-manager-cainjector" not found Error from server (NotFound): error when deleting "https://github.com/cert-manager/cert-manager/releases/download/v1.17.2/cert-manager.yaml": services "cert-manager" not found Error from server (NotFound): error when deleting "https://github.com/cert-manager/cert-manager/releases/download/v1.17.2/cert-manager.yaml": services "cert-manager-webhook" not found Error from server (NotFound): error when deleting "https://github.com/cert-manager/cert-manager/releases/download/v1.17.2/cert-manager.yaml": deployments.apps "cert-manager-cainjector" not found Error from server (NotFound): error when deleting "https://github.com/cert-manager/cert-manager/releases/download/v1.17.2/cert-manager.yaml": deployments.apps "cert-manager" not found Error from server (NotFound): error when deleting "https://github.com/cert-manager/cert-manager/releases/download/v1.17.2/cert-manager.yaml": deployments.apps "cert-manager-webhook" not found Error from server (NotFound): error when deleting "https://github.com/cert-manager/cert-manager/releases/download/v1.17.2/cert-manager.yaml": mutatingwebhookconfigurations.admissionregistration.k8s.io "cert-manager-webhook" not found Error from server (NotFound): error when deleting "https://github.com/cert-manager/cert-manager/releases/download/v1.17.2/cert-manager.yaml": validatingwebhookconfigurations.admissionregistration.k8s.io "cert-manager-webhook" not found + rm /tmp/tmp.x2e3UH7r6g /tmp/tmp.KVJnWJBloo + return 1 + true + '[' -n '' ']' + '[' -n psmdb-operator ']' + kubectl_bin delete --grace-period=0 --force=true namespace data-sharded-22380 + rm -rf /tmp/tmp.DzmstfAEeq + kubectl_bin delete --grace-period=0 --force=true namespace psmdb-operator ++ mktemp + desc 'test passed' + set +o xtrace ++ mktemp ----------------------------------------------------------------------------------- test passed ----------------------------------------------------------------------------------- + local LAST_OUT=/tmp/tmp.uq27zfJz2O + local LAST_OUT=/tmp/tmp.S6DZpWl6R9 ++ mktemp + local LAST_ERR=/tmp/tmp.jDbzz5CKZ9 + local exit_status=0 + local timeout=4 ++ mktemp ++ seq 0 2 + local LAST_ERR=/tmp/tmp.UFERZsR4fp + local exit_status=0 + local timeout=4 ++ seq 0 2 + for i in '$(seq 0 2)' + set +e + kubectl delete --grace-period=0 --force=true namespace data-sharded-22380 + for i in '$(seq 0 2)' + set +e + kubectl delete --grace-period=0 --force=true namespace psmdb-operator