++ echo 'Log: /mnt/jenkins/workspace/cloud-psmdb-operator_PR-1917/e2e-tests/logs/mongod-major-upgrade.log' Log: /mnt/jenkins/workspace/cloud-psmdb-operator_PR-1917/e2e-tests/logs/mongod-major-upgrade.log ++ '[' -f /mnt/jenkins/workspace/cloud-psmdb-operator_PR-1917/e2e-tests/conf/cloud-secret.yml ']' ++ SKIP_BACKUPS_TO_AWS_GCP_AZURE= ++ oc get projects ++ kubectl get nodes ++ grep '^minikube' +++ kubectl version -o json +++ jq -r .serverVersion.gitVersion +++ grep '\-eks\-' WARNING: version difference between client (1.33) and server (1.30) exceeds the supported minor version skew of +/-1 ++ '[' ']' ++ EKS=0 +++ kubectl version -o json +++ grep gke +++ jq -r .serverVersion.gitVersion WARNING: version difference between client (1.33) and server (1.30) exceeds the supported minor version skew of +/-1 ++ '[' v1.30.12-gke.1208000 ']' ++ GKE=1 +++ kubectl version -o json +++ jq -r '.serverVersion.major + "." + .serverVersion.minor' +++ /usr/bin/sed -r 's/[^0-9.]+//g' WARNING: version difference between client (1.33) and server (1.30) exceeds the supported minor version skew of +/-1 ++ KUBE_VERSION=1.30 + set_debug + [[ 1 == 1 ]] + set -o xtrace + main + rbac=rbac + '[' -n psmdb-operator ']' + rbac=cw-rbac + create_infra mongod-major-upgrade-29555 + local ns=mongod-major-upgrade-29555 + [[ 1 == 1 ]] + delete_crd + desc 'get and delete old CRDs and RBAC' + set +o xtrace ----------------------------------------------------------------------------------- get and delete old CRDs and RBAC ----------------------------------------------------------------------------------- + kubectl_bin delete -f /mnt/jenkins/workspace/cloud-psmdb-operator_PR-1917/deploy/crd.yaml --ignore-not-found --wait=false ++ mktemp + local LAST_OUT=/tmp/tmp.o0vOcUQ0es ++ mktemp + local LAST_ERR=/tmp/tmp.Vokz1lmt96 + local exit_status=0 + local timeout=4 ++ seq 0 2 + for i in '$(seq 0 2)' + set +e + kubectl delete -f /mnt/jenkins/workspace/cloud-psmdb-operator_PR-1917/deploy/crd.yaml --ignore-not-found --wait=false + exit_status=0 + set -e + '[' 0 '!=' 0 -a -n 1 ']' + break + cat /tmp/tmp.o0vOcUQ0es customresourcedefinition.apiextensions.k8s.io "perconaservermongodbbackups.psmdb.percona.com" deleted customresourcedefinition.apiextensions.k8s.io "perconaservermongodbrestores.psmdb.percona.com" deleted customresourcedefinition.apiextensions.k8s.io "perconaservermongodbs.psmdb.percona.com" deleted + cat /tmp/tmp.Vokz1lmt96 + rm /tmp/tmp.o0vOcUQ0es /tmp/tmp.Vokz1lmt96 + return 0 ++ yq eval .metadata.name /mnt/jenkins/workspace/cloud-psmdb-operator_PR-1917/deploy/crd.yaml ++ grep -v '\-\-\-' + for crd_name in '$(yq eval '\''.metadata.name'\'' "${src_dir}/deploy/crd.yaml" | grep -v '\''\-\-\-'\'')' + kubectl get perconaservermongodbbackups.psmdb.percona.com --all-namespaces -o wide + grep -v NAMESPACE + xargs -L 1 sh -xc 'kubectl patch perconaservermongodbbackups.psmdb.percona.com -n $0 $1 --type=merge -p "{\"metadata\":{\"finalizers\":[]}}"' error: the server doesn't have a resource type "perconaservermongodbbackups" + kubectl patch perconaservermongodbbackups.psmdb.percona.com -n sh --type=merge -p '{"metadata":{"finalizers":[]}}' error: the server doesn't have a resource type "perconaservermongodbbackups" + : + kubectl_bin wait --for=delete crd perconaservermongodbbackups.psmdb.percona.com ++ mktemp + local LAST_OUT=/tmp/tmp.A1CFKy5tH1 ++ mktemp + local LAST_ERR=/tmp/tmp.lEBkMN9D1Q + local exit_status=0 + local timeout=4 ++ seq 0 2 + for i in '$(seq 0 2)' + set +e + kubectl wait --for=delete crd perconaservermongodbbackups.psmdb.percona.com + exit_status=0 + set -e + '[' 0 '!=' 0 -a -n 1 ']' + break + cat /tmp/tmp.A1CFKy5tH1 + cat /tmp/tmp.lEBkMN9D1Q + rm /tmp/tmp.A1CFKy5tH1 /tmp/tmp.lEBkMN9D1Q + return 0 + for crd_name in '$(yq eval '\''.metadata.name'\'' "${src_dir}/deploy/crd.yaml" | grep -v '\''\-\-\-'\'')' + kubectl get perconaservermongodbrestores.psmdb.percona.com --all-namespaces -o wide + grep -v NAMESPACE + xargs -L 1 sh -xc 'kubectl patch perconaservermongodbrestores.psmdb.percona.com -n $0 $1 --type=merge -p "{\"metadata\":{\"finalizers\":[]}}"' error: the server doesn't have a resource type "perconaservermongodbrestores" + kubectl patch perconaservermongodbrestores.psmdb.percona.com -n sh --type=merge -p '{"metadata":{"finalizers":[]}}' error: the server doesn't have a resource type "perconaservermongodbrestores" + : + kubectl_bin wait --for=delete crd perconaservermongodbrestores.psmdb.percona.com ++ mktemp + local LAST_OUT=/tmp/tmp.KNqxubYsd8 ++ mktemp + local LAST_ERR=/tmp/tmp.36IowfRCnd + local exit_status=0 + local timeout=4 ++ seq 0 2 + for i in '$(seq 0 2)' + set +e + kubectl wait --for=delete crd perconaservermongodbrestores.psmdb.percona.com + exit_status=0 + set -e + '[' 0 '!=' 0 -a -n 1 ']' + break + cat /tmp/tmp.KNqxubYsd8 + cat /tmp/tmp.36IowfRCnd + rm /tmp/tmp.KNqxubYsd8 /tmp/tmp.36IowfRCnd + return 0 + for crd_name in '$(yq eval '\''.metadata.name'\'' "${src_dir}/deploy/crd.yaml" | grep -v '\''\-\-\-'\'')' + kubectl get perconaservermongodbs.psmdb.percona.com --all-namespaces -o wide + grep -v NAMESPACE + xargs -L 1 sh -xc 'kubectl patch perconaservermongodbs.psmdb.percona.com -n $0 $1 --type=merge -p "{\"metadata\":{\"finalizers\":[]}}"' error: the server doesn't have a resource type "perconaservermongodbs" + kubectl patch perconaservermongodbs.psmdb.percona.com -n sh --type=merge -p '{"metadata":{"finalizers":[]}}' error: the server doesn't have a resource type "perconaservermongodbs" + : + kubectl_bin wait --for=delete crd perconaservermongodbs.psmdb.percona.com ++ mktemp + local LAST_OUT=/tmp/tmp.tNmKuH5q9j ++ mktemp + local LAST_ERR=/tmp/tmp.DD5JLspKfI + local exit_status=0 + local timeout=4 ++ seq 0 2 + for i in '$(seq 0 2)' + set +e + kubectl wait --for=delete crd perconaservermongodbs.psmdb.percona.com + exit_status=0 + set -e + '[' 0 '!=' 0 -a -n 1 ']' + break + cat /tmp/tmp.tNmKuH5q9j + cat /tmp/tmp.DD5JLspKfI + rm /tmp/tmp.tNmKuH5q9j /tmp/tmp.DD5JLspKfI + return 0 + local rbac_yaml=rbac.yaml + '[' -n psmdb-operator ']' + rbac_yaml=cw-rbac.yaml + kubectl_bin delete -f /mnt/jenkins/workspace/cloud-psmdb-operator_PR-1917/deploy/cw-rbac.yaml --ignore-not-found ++ mktemp + local LAST_OUT=/tmp/tmp.2PSnCcbovX ++ mktemp + local LAST_ERR=/tmp/tmp.5s18EMYpUp + local exit_status=0 + local timeout=4 ++ seq 0 2 + for i in '$(seq 0 2)' + set +e + kubectl delete -f /mnt/jenkins/workspace/cloud-psmdb-operator_PR-1917/deploy/cw-rbac.yaml --ignore-not-found + exit_status=0 + set -e + '[' 0 '!=' 0 -a -n 1 ']' + break + cat /tmp/tmp.2PSnCcbovX clusterrole.rbac.authorization.k8s.io "percona-server-mongodb-operator" deleted clusterrolebinding.rbac.authorization.k8s.io "service-account-percona-server-mongodb-operator" deleted + cat /tmp/tmp.5s18EMYpUp + rm /tmp/tmp.2PSnCcbovX /tmp/tmp.5s18EMYpUp + return 0 + check_crd_for_deletion PR-1917-cd834df2 + local git_tag=PR-1917-cd834df2 ++ /usr/bin/sed s/---//g ++ yq eval .metadata.name ++ curl -s https://raw.githubusercontent.com/percona/percona-server-mongodb-operator/PR-1917-cd834df2/deploy/crd.yaml ++ /usr/bin/sed ':a;N;$!ba;s/\n/ /g' + for crd_name in '$(curl -s https://raw.githubusercontent.com/percona/percona-server-mongodb-operator/${git_tag}/deploy/crd.yaml | yq eval '\''.metadata.name'\'' | $sed '\''s/---//g'\'' | $sed '\'':a;N;$!ba;s/\n/ /g'\'')' ++ kubectl_bin get crd/null -o 'jsonpath={.status.conditions[-1].type}' +++ mktemp ++ local LAST_OUT=/tmp/tmp.UVJb9qM5ji +++ mktemp ++ local LAST_ERR=/tmp/tmp.PjDQ6TqzR2 ++ local exit_status=0 ++ local timeout=4 +++ seq 0 2 ++ for i in '$(seq 0 2)' ++ set +e ++ kubectl get crd/null -o 'jsonpath={.status.conditions[-1].type}' ++ exit_status=1 ++ set -e ++ '[' 1 '!=' 0 -a -n 1 ']' ++ cat /tmp/tmp.UVJb9qM5ji ++ cat /tmp/tmp.PjDQ6TqzR2 Error from server (NotFound): customresourcedefinitions.apiextensions.k8s.io "null" not found ++ sleep 0 ++ for i in '$(seq 0 2)' ++ set +e ++ kubectl get crd/null -o 'jsonpath={.status.conditions[-1].type}' ++ exit_status=1 ++ set -e ++ '[' 1 '!=' 0 -a -n 1 ']' ++ cat /tmp/tmp.UVJb9qM5ji ++ cat /tmp/tmp.PjDQ6TqzR2 Error from server (NotFound): customresourcedefinitions.apiextensions.k8s.io "null" not found ++ sleep 4 ++ for i in '$(seq 0 2)' ++ set +e ++ kubectl get crd/null -o 'jsonpath={.status.conditions[-1].type}' ++ exit_status=1 ++ set -e ++ '[' 1 '!=' 0 -a -n 1 ']' ++ cat /tmp/tmp.UVJb9qM5ji ++ cat /tmp/tmp.PjDQ6TqzR2 Error from server (NotFound): customresourcedefinitions.apiextensions.k8s.io "null" not found ++ sleep 8 ++ cat /tmp/tmp.UVJb9qM5ji ++ cat /tmp/tmp.PjDQ6TqzR2 Error from server (NotFound): customresourcedefinitions.apiextensions.k8s.io "null" not found ++ rm /tmp/tmp.UVJb9qM5ji /tmp/tmp.PjDQ6TqzR2 ++ return 1 + [[ '' == \T\e\r\m\i\n\a\t\i\n\g ]] + '[' -n psmdb-operator ']' + create_namespace psmdb-operator + local namespace=psmdb-operator + local skip_clean_namespace= + [[ 1 == 1 ]] + [[ -z '' ]] + destroy_chaos_mesh ++ helm list --all-namespaces --filter chaos-mesh ++ tail -n1 ++ sed s/NAMESPACE// ++ awk '-F ' '{print $2}' + local chaos_mesh_ns= + desc 'destroy chaos-mesh' + set +o xtrace ----------------------------------------------------------------------------------- destroy chaos-mesh ----------------------------------------------------------------------------------- + '[' -n '' ']' ++ kubectl get MutatingWebhookConfiguration ++ grep chaos-mesh ++ awk '{print $1}' + timeout 30 kubectl delete MutatingWebhookConfiguration error: resource(s) were provided, but no name was specified + : ++ kubectl get ValidatingWebhookConfiguration ++ grep chaos-mesh ++ awk '{print $1}' + timeout 30 kubectl delete ValidatingWebhookConfiguration error: resource(s) were provided, but no name was specified + : ++ kubectl get ValidatingWebhookConfiguration ++ grep validate-auth ++ awk '{print $1}' + timeout 30 kubectl delete ValidatingWebhookConfiguration error: resource(s) were provided, but no name was specified + : ++ kubectl api-resources ++ grep chaos-mesh ++ awk '{print $1}' ++ grep chaos-mesh.org ++ awk '{print $1}' ++ kubectl get crd + timeout 30 kubectl delete crd error: resource(s) were provided, but no name was specified + : ++ kubectl get clusterrolebinding ++ grep chaos-mesh ++ awk '{print $1}' + timeout 30 kubectl delete clusterrolebinding error: resource(s) were provided, but no name was specified + : ++ grep chaos-mesh ++ awk '{print $1}' ++ kubectl get clusterrole + timeout 30 kubectl delete clusterrole error: resource(s) were provided, but no name was specified + : + desc 'cleaned up all old namespaces' + set +o xtrace ----------------------------------------------------------------------------------- cleaned up all old namespaces ----------------------------------------------------------------------------------- + egrep -v '^kube-|^default|Terminating|psmdb-operator|openshift|gke-mcs|^NAME' + awk '{print$1}' + '[' -n '' ']' + desc 'cleaned up old namespaces psmdb-operator' + set +o xtrace ----------------------------------------------------------------------------------- cleaned up old namespaces psmdb-operator ----------------------------------------------------------------------------------- + kubectl_bin delete namespace psmdb-operator --ignore-not-found + kubectl_bin get ns ++ mktemp ++ mktemp + xargs kubectl delete ns + local LAST_OUT=/tmp/tmp.y9wvfEwmbH + local LAST_OUT=/tmp/tmp.LYEZNSm1tA ++ mktemp ++ mktemp + local LAST_ERR=/tmp/tmp.QAY6qZCUSL + local exit_status=0 + local timeout=4 + local LAST_ERR=/tmp/tmp.w2N4z35WCs + local exit_status=0 + local timeout=4 ++ seq 0 2 ++ seq 0 2 + for i in '$(seq 0 2)' + set +e + kubectl get ns + for i in '$(seq 0 2)' + set +e + kubectl delete namespace psmdb-operator --ignore-not-found + exit_status=0 + set -e + '[' 0 '!=' 0 -a -n 1 ']' + break + cat /tmp/tmp.y9wvfEwmbH + cat /tmp/tmp.QAY6qZCUSL + rm /tmp/tmp.y9wvfEwmbH /tmp/tmp.QAY6qZCUSL + return 0 namespace "gke-managed-cim" deleted namespace "gke-managed-system" deleted namespace "gmp-public" deleted namespace "gmp-system" deleted namespace "mongod-major-upgrade-15534" deleted + exit_status=0 + set -e + '[' 0 '!=' 0 -a -n 1 ']' + break + cat /tmp/tmp.LYEZNSm1tA namespace "psmdb-operator" deleted + cat /tmp/tmp.w2N4z35WCs + rm /tmp/tmp.LYEZNSm1tA /tmp/tmp.w2N4z35WCs + return 0 + kubectl_bin wait --for=delete namespace psmdb-operator ++ mktemp + local LAST_OUT=/tmp/tmp.EBLxpXmF3y ++ mktemp + local LAST_ERR=/tmp/tmp.Q3vJ8Wde2I + local exit_status=0 + local timeout=4 ++ seq 0 2 + for i in '$(seq 0 2)' + set +e + kubectl wait --for=delete namespace psmdb-operator + exit_status=0 + set -e + '[' 0 '!=' 0 -a -n 1 ']' + break + cat /tmp/tmp.EBLxpXmF3y + cat /tmp/tmp.Q3vJ8Wde2I + rm /tmp/tmp.EBLxpXmF3y /tmp/tmp.Q3vJ8Wde2I + return 0 + desc 'create namespace psmdb-operator' + set +o xtrace ----------------------------------------------------------------------------------- create namespace psmdb-operator ----------------------------------------------------------------------------------- + kubectl_bin create namespace psmdb-operator ++ mktemp + local LAST_OUT=/tmp/tmp.9RFDgcDLjB ++ mktemp + local LAST_ERR=/tmp/tmp.Dqm4CB1TLJ + local exit_status=0 + local timeout=4 ++ seq 0 2 + for i in '$(seq 0 2)' + set +e + kubectl create namespace psmdb-operator + exit_status=0 + set -e + '[' 0 '!=' 0 -a -n 1 ']' + break + cat /tmp/tmp.9RFDgcDLjB namespace/psmdb-operator created + cat /tmp/tmp.Dqm4CB1TLJ + rm /tmp/tmp.9RFDgcDLjB /tmp/tmp.Dqm4CB1TLJ + return 0 + set_kube_ctx psmdb-operator + local namespace=psmdb-operator ++ kubectl_bin config current-context +++ mktemp ++ local LAST_OUT=/tmp/tmp.z8lx3ehtUB +++ mktemp ++ local LAST_ERR=/tmp/tmp.FvCzEW5SzH ++ local exit_status=0 ++ local timeout=4 +++ seq 0 2 ++ for i in '$(seq 0 2)' ++ set +e ++ kubectl config current-context ++ exit_status=0 ++ set -e ++ '[' 0 '!=' 0 -a -n 1 ']' ++ break ++ cat /tmp/tmp.z8lx3ehtUB ++ cat /tmp/tmp.FvCzEW5SzH ++ rm /tmp/tmp.z8lx3ehtUB /tmp/tmp.FvCzEW5SzH ++ return 0 + kubectl_bin config set-context gke_cloud-dev-112233_us-central1-a_jen-psmdb-1917-cd834df2-31-cluster2 --namespace=psmdb-operator ++ mktemp + local LAST_OUT=/tmp/tmp.lWEyy1VdUt ++ mktemp + local LAST_ERR=/tmp/tmp.7ebdd8fBgU + local exit_status=0 + local timeout=4 ++ seq 0 2 + for i in '$(seq 0 2)' + set +e + kubectl config set-context gke_cloud-dev-112233_us-central1-a_jen-psmdb-1917-cd834df2-31-cluster2 --namespace=psmdb-operator + exit_status=0 + set -e + '[' 0 '!=' 0 -a -n 1 ']' + break + cat /tmp/tmp.lWEyy1VdUt Context "gke_cloud-dev-112233_us-central1-a_jen-psmdb-1917-cd834df2-31-cluster2" modified. + cat /tmp/tmp.7ebdd8fBgU + rm /tmp/tmp.lWEyy1VdUt /tmp/tmp.7ebdd8fBgU + return 0 + deploy_operator + desc 'start PSMDB operator' + set +o xtrace ----------------------------------------------------------------------------------- start PSMDB operator ----------------------------------------------------------------------------------- + local cr_file + '[' -f /mnt/jenkins/workspace/cloud-psmdb-operator_PR-1917/e2e-tests/mongod-major-upgrade/conf/crd.yaml ']' + cr_file=/mnt/jenkins/workspace/cloud-psmdb-operator_PR-1917/deploy/crd.yaml + kubectl_bin apply --server-side --force-conflicts -f /mnt/jenkins/workspace/cloud-psmdb-operator_PR-1917/deploy/crd.yaml ++ mktemp + local LAST_OUT=/tmp/tmp.5FSuHN3pbp ++ mktemp + local LAST_ERR=/tmp/tmp.P5zZMWXgWG + local exit_status=0 + local timeout=4 ++ seq 0 2 + for i in '$(seq 0 2)' + set +e + kubectl apply --server-side --force-conflicts -f /mnt/jenkins/workspace/cloud-psmdb-operator_PR-1917/deploy/crd.yaml + exit_status=0 + set -e + '[' 0 '!=' 0 -a -n 1 ']' + break + cat /tmp/tmp.5FSuHN3pbp customresourcedefinition.apiextensions.k8s.io/perconaservermongodbbackups.psmdb.percona.com serverside-applied customresourcedefinition.apiextensions.k8s.io/perconaservermongodbrestores.psmdb.percona.com serverside-applied customresourcedefinition.apiextensions.k8s.io/perconaservermongodbs.psmdb.percona.com serverside-applied + cat /tmp/tmp.P5zZMWXgWG + rm /tmp/tmp.5FSuHN3pbp /tmp/tmp.P5zZMWXgWG + return 0 + '[' -n psmdb-operator ']' + apply_rbac cw-rbac + local operator_namespace=psmdb-operator + local rbac=cw-rbac + sed -e 's^namespace: .*^namespace: psmdb-operator^' + cat /mnt/jenkins/workspace/cloud-psmdb-operator_PR-1917/deploy/cw-rbac.yaml + kubectl_bin apply -n psmdb-operator -f - ++ mktemp + local LAST_OUT=/tmp/tmp.xN0Dfyxhno ++ mktemp + local LAST_ERR=/tmp/tmp.pALXtKsEpP + local exit_status=0 + local timeout=4 ++ seq 0 2 + for i in '$(seq 0 2)' + set +e + kubectl apply -n psmdb-operator -f - + exit_status=0 + set -e + '[' 0 '!=' 0 -a -n 1 ']' + break + cat /tmp/tmp.xN0Dfyxhno clusterrole.rbac.authorization.k8s.io/percona-server-mongodb-operator created serviceaccount/percona-server-mongodb-operator created clusterrolebinding.rbac.authorization.k8s.io/service-account-percona-server-mongodb-operator created + cat /tmp/tmp.pALXtKsEpP + rm /tmp/tmp.xN0Dfyxhno /tmp/tmp.pALXtKsEpP + return 0 + kubectl_bin apply -f - + yq eval ' (.spec.template.spec.containers[].image = "perconalab/percona-server-mongodb-operator:PR-1917-cd834df2") | ((.. | select(.[] == "DISABLE_TELEMETRY")) |= .value="true") | ((.. | select(.[] == "LOG_LEVEL")) |= .value="DEBUG")' /mnt/jenkins/workspace/cloud-psmdb-operator_PR-1917/deploy/cw-operator.yaml ++ mktemp + local LAST_OUT=/tmp/tmp.P4j3sFSP30 ++ mktemp + local LAST_ERR=/tmp/tmp.cQVBNYSGZE + local exit_status=0 + local timeout=4 ++ seq 0 2 + for i in '$(seq 0 2)' + set +e + kubectl apply -f - + exit_status=0 + set -e + '[' 0 '!=' 0 -a -n 1 ']' + break + cat /tmp/tmp.P4j3sFSP30 deployment.apps/percona-server-mongodb-operator created + cat /tmp/tmp.cQVBNYSGZE + rm /tmp/tmp.P4j3sFSP30 /tmp/tmp.cQVBNYSGZE + return 0 + sleep 2 ++ get_operator_pod ++ kubectl_bin get pods --selector=name=percona-server-mongodb-operator -o 'jsonpath={.items[].metadata.name}' -n psmdb-operator +++ mktemp ++ local LAST_OUT=/tmp/tmp.Hjc4jDOQ7q +++ mktemp ++ local LAST_ERR=/tmp/tmp.rSdt8qIwj0 ++ local exit_status=0 ++ local timeout=4 +++ seq 0 2 ++ for i in '$(seq 0 2)' ++ set +e ++ kubectl get pods --selector=name=percona-server-mongodb-operator -o 'jsonpath={.items[].metadata.name}' -n psmdb-operator ++ exit_status=0 ++ set -e ++ '[' 0 '!=' 0 -a -n 1 ']' ++ break ++ cat /tmp/tmp.Hjc4jDOQ7q ++ cat /tmp/tmp.rSdt8qIwj0 ++ rm /tmp/tmp.Hjc4jDOQ7q /tmp/tmp.rSdt8qIwj0 ++ return 0 + wait_pod percona-server-mongodb-operator-66d6995788-v7vrc + local pod=percona-server-mongodb-operator-66d6995788-v7vrc + set +o xtrace waiting for pod/percona-server-mongodb-operator-66d6995788-v7vrc to be ready..OK + echo 'Print operator info from log' Print operator info from log + grep 'Manager starting up' ++ get_operator_pod ++ kubectl_bin get pods --selector=name=percona-server-mongodb-operator -o 'jsonpath={.items[].metadata.name}' -n psmdb-operator +++ mktemp ++ local LAST_OUT=/tmp/tmp.v6QdyzUaWz +++ mktemp ++ local LAST_ERR=/tmp/tmp.9xHDY6ynfG ++ local exit_status=0 ++ local timeout=4 +++ seq 0 2 ++ for i in '$(seq 0 2)' ++ set +e ++ kubectl get pods --selector=name=percona-server-mongodb-operator -o 'jsonpath={.items[].metadata.name}' -n psmdb-operator ++ exit_status=0 ++ set -e ++ '[' 0 '!=' 0 -a -n 1 ']' ++ break ++ cat /tmp/tmp.v6QdyzUaWz ++ cat /tmp/tmp.9xHDY6ynfG ++ rm /tmp/tmp.v6QdyzUaWz /tmp/tmp.9xHDY6ynfG ++ return 0 + kubectl_bin logs percona-server-mongodb-operator-66d6995788-v7vrc ++ mktemp + local LAST_OUT=/tmp/tmp.itUh7xmoKD ++ mktemp + local LAST_ERR=/tmp/tmp.ZZ6kpCB4Jt + local exit_status=0 + local timeout=4 ++ seq 0 2 + for i in '$(seq 0 2)' + set +e + kubectl logs percona-server-mongodb-operator-66d6995788-v7vrc + exit_status=0 + set -e + '[' 0 '!=' 0 -a -n 1 ']' + break + cat /tmp/tmp.itUh7xmoKD + cat /tmp/tmp.ZZ6kpCB4Jt + rm /tmp/tmp.itUh7xmoKD /tmp/tmp.ZZ6kpCB4Jt + return 0 2025-06-11T15:17:05.720Z INFO setup Manager starting up {"gitCommit": "cd834df28df9b61510cd3bdf17208295c4489164", "gitBranch": "PR-1917-cd834df2", "buildTime": "", "goVersion": "go1.24.4", "os": "linux", "arch": "amd64"} + create_namespace mongod-major-upgrade-29555 + local namespace=mongod-major-upgrade-29555 + local skip_clean_namespace= + [[ 1 == 1 ]] + [[ -z '' ]] + destroy_chaos_mesh ++ helm list --all-namespaces --filter chaos-mesh ++ awk '-F ' '{print $2}' ++ tail -n1 ++ sed s/NAMESPACE// + local chaos_mesh_ns= + desc 'destroy chaos-mesh' + set +o xtrace ----------------------------------------------------------------------------------- destroy chaos-mesh ----------------------------------------------------------------------------------- + '[' -n '' ']' ++ kubectl get MutatingWebhookConfiguration ++ grep chaos-mesh ++ awk '{print $1}' + timeout 30 kubectl delete MutatingWebhookConfiguration error: resource(s) were provided, but no name was specified + : ++ kubectl get ValidatingWebhookConfiguration ++ grep chaos-mesh ++ awk '{print $1}' + timeout 30 kubectl delete ValidatingWebhookConfiguration error: resource(s) were provided, but no name was specified + : ++ grep validate-auth ++ awk '{print $1}' ++ kubectl get ValidatingWebhookConfiguration + timeout 30 kubectl delete ValidatingWebhookConfiguration error: resource(s) were provided, but no name was specified + : ++ kubectl api-resources ++ grep chaos-mesh ++ awk '{print $1}' ++ kubectl get crd ++ awk '{print $1}' ++ grep chaos-mesh.org + timeout 30 kubectl delete crd error: resource(s) were provided, but no name was specified + : ++ awk '{print $1}' ++ kubectl get clusterrolebinding ++ grep chaos-mesh + timeout 30 kubectl delete clusterrolebinding error: resource(s) were provided, but no name was specified + : ++ kubectl get clusterrole ++ grep chaos-mesh ++ awk '{print $1}' + timeout 30 kubectl delete clusterrole error: resource(s) were provided, but no name was specified + : + desc 'cleaned up all old namespaces' + set +o xtrace ----------------------------------------------------------------------------------- cleaned up all old namespaces ----------------------------------------------------------------------------------- + egrep -v '^kube-|^default|Terminating|psmdb-operator|openshift|gke-mcs|^NAME' + '[' -n '' ']' + desc 'cleaned up old namespaces mongod-major-upgrade-29555' + set +o xtrace ----------------------------------------------------------------------------------- cleaned up old namespaces mongod-major-upgrade-29555 ----------------------------------------------------------------------------------- + kubectl_bin delete namespace mongod-major-upgrade-29555 --ignore-not-found ++ mktemp + local LAST_OUT=/tmp/tmp.FqkgbDI8Am + awk '{print$1}' + kubectl_bin get ns ++ mktemp + local LAST_OUT=/tmp/tmp.5lcLayx09t ++ mktemp ++ mktemp + local LAST_ERR=/tmp/tmp.PxwcWrMvbH + local exit_status=0 + local timeout=4 + local LAST_ERR=/tmp/tmp.VmYMiSk5Yu + local exit_status=0 + local timeout=4 ++ seq 0 2 ++ seq 0 2 + xargs kubectl delete ns + for i in '$(seq 0 2)' + set +e + kubectl delete namespace mongod-major-upgrade-29555 --ignore-not-found + for i in '$(seq 0 2)' + set +e + kubectl get ns + exit_status=0 + set -e + '[' 0 '!=' 0 -a -n 1 ']' + break + cat /tmp/tmp.5lcLayx09t + cat /tmp/tmp.VmYMiSk5Yu + rm /tmp/tmp.5lcLayx09t /tmp/tmp.VmYMiSk5Yu + return 0 + exit_status=0 + set -e + '[' 0 '!=' 0 -a -n 1 ']' + break + cat /tmp/tmp.FqkgbDI8Am + cat /tmp/tmp.PxwcWrMvbH + rm /tmp/tmp.FqkgbDI8Am /tmp/tmp.PxwcWrMvbH + return 0 + kubectl_bin wait --for=delete namespace mongod-major-upgrade-29555 ++ mktemp + local LAST_OUT=/tmp/tmp.lbLKXRTSOz ++ mktemp + local LAST_ERR=/tmp/tmp.VLIIthKNDm + local exit_status=0 + local timeout=4 ++ seq 0 2 + for i in '$(seq 0 2)' + set +e + kubectl wait --for=delete namespace mongod-major-upgrade-29555 namespace "gke-managed-cim" deleted + exit_status=0 + set -e + '[' 0 '!=' 0 -a -n 1 ']' + break + cat /tmp/tmp.lbLKXRTSOz + cat /tmp/tmp.VLIIthKNDm + rm /tmp/tmp.lbLKXRTSOz /tmp/tmp.VLIIthKNDm + return 0 + desc 'create namespace mongod-major-upgrade-29555' + set +o xtrace ----------------------------------------------------------------------------------- create namespace mongod-major-upgrade-29555 ----------------------------------------------------------------------------------- + kubectl_bin create namespace mongod-major-upgrade-29555 ++ mktemp + local LAST_OUT=/tmp/tmp.kAxN2ZaZeG ++ mktemp + local LAST_ERR=/tmp/tmp.O4VRmsirlh + local exit_status=0 + local timeout=4 ++ seq 0 2 + for i in '$(seq 0 2)' + set +e + kubectl create namespace mongod-major-upgrade-29555 namespace "gke-managed-system" deleted namespace "gmp-public" deleted namespace "gmp-system" deleted + exit_status=0 + set -e + '[' 0 '!=' 0 -a -n 1 ']' + break + cat /tmp/tmp.kAxN2ZaZeG namespace/mongod-major-upgrade-29555 created + cat /tmp/tmp.O4VRmsirlh + rm /tmp/tmp.kAxN2ZaZeG /tmp/tmp.O4VRmsirlh + return 0 + set_kube_ctx mongod-major-upgrade-29555 + local namespace=mongod-major-upgrade-29555 ++ kubectl_bin config current-context +++ mktemp ++ local LAST_OUT=/tmp/tmp.ip7FD59J8f +++ mktemp ++ local LAST_ERR=/tmp/tmp.HsPwGmFqq4 ++ local exit_status=0 ++ local timeout=4 +++ seq 0 2 ++ for i in '$(seq 0 2)' ++ set +e ++ kubectl config current-context ++ exit_status=0 ++ set -e ++ '[' 0 '!=' 0 -a -n 1 ']' ++ break ++ cat /tmp/tmp.ip7FD59J8f ++ cat /tmp/tmp.HsPwGmFqq4 ++ rm /tmp/tmp.ip7FD59J8f /tmp/tmp.HsPwGmFqq4 ++ return 0 + kubectl_bin config set-context gke_cloud-dev-112233_us-central1-a_jen-psmdb-1917-cd834df2-31-cluster2 --namespace=mongod-major-upgrade-29555 ++ mktemp + local LAST_OUT=/tmp/tmp.rrTYvuQ2qn ++ mktemp + local LAST_ERR=/tmp/tmp.lLoHrGdulo + local exit_status=0 + local timeout=4 ++ seq 0 2 + for i in '$(seq 0 2)' + set +e + kubectl config set-context gke_cloud-dev-112233_us-central1-a_jen-psmdb-1917-cd834df2-31-cluster2 --namespace=mongod-major-upgrade-29555 + exit_status=0 + set -e + '[' 0 '!=' 0 -a -n 1 ']' + break + cat /tmp/tmp.rrTYvuQ2qn Context "gke_cloud-dev-112233_us-central1-a_jen-psmdb-1917-cd834df2-31-cluster2" modified. + cat /tmp/tmp.lLoHrGdulo + rm /tmp/tmp.rrTYvuQ2qn /tmp/tmp.lLoHrGdulo + return 0 + kubectl_bin apply -f /mnt/jenkins/workspace/cloud-psmdb-operator_PR-1917/e2e-tests/conf/client.yml -f /mnt/jenkins/workspace/cloud-psmdb-operator_PR-1917/e2e-tests/conf/secrets.yml ++ mktemp + local LAST_OUT=/tmp/tmp.2NomiFTJUi ++ mktemp + local LAST_ERR=/tmp/tmp.Mh9SP2bZDi + local exit_status=0 + local timeout=4 ++ seq 0 2 + for i in '$(seq 0 2)' + set +e + kubectl apply -f /mnt/jenkins/workspace/cloud-psmdb-operator_PR-1917/e2e-tests/conf/client.yml -f /mnt/jenkins/workspace/cloud-psmdb-operator_PR-1917/e2e-tests/conf/secrets.yml + exit_status=0 + set -e + '[' 0 '!=' 0 -a -n 1 ']' + break + cat /tmp/tmp.2NomiFTJUi deployment.apps/psmdb-client created secret/some-users created + cat /tmp/tmp.Mh9SP2bZDi + rm /tmp/tmp.2NomiFTJUi /tmp/tmp.Mh9SP2bZDi + return 0 + apply_s3_storage_secrets + desc 'create secrets for cloud storages' + set +o xtrace ----------------------------------------------------------------------------------- create secrets for cloud storages ----------------------------------------------------------------------------------- + '[' -z '' ']' + kubectl_bin apply -f /mnt/jenkins/workspace/cloud-psmdb-operator_PR-1917/e2e-tests/conf/minio-secret.yml -f /mnt/jenkins/workspace/cloud-psmdb-operator_PR-1917/e2e-tests/conf/cloud-secret.yml ++ mktemp + local LAST_OUT=/tmp/tmp.3XJOzOVdD9 ++ mktemp + local LAST_ERR=/tmp/tmp.sDlAm9HYXQ + local exit_status=0 + local timeout=4 ++ seq 0 2 + for i in '$(seq 0 2)' + set +e + kubectl apply -f /mnt/jenkins/workspace/cloud-psmdb-operator_PR-1917/e2e-tests/conf/minio-secret.yml -f /mnt/jenkins/workspace/cloud-psmdb-operator_PR-1917/e2e-tests/conf/cloud-secret.yml + exit_status=0 + set -e + '[' 0 '!=' 0 -a -n 1 ']' + break + cat /tmp/tmp.3XJOzOVdD9 secret/minio-secret created secret/aws-s3-secret created secret/gcp-cs-secret created secret/azure-secret created + cat /tmp/tmp.sDlAm9HYXQ + rm /tmp/tmp.3XJOzOVdD9 /tmp/tmp.sDlAm9HYXQ + return 0 + desc 'install version service' + set +o xtrace ----------------------------------------------------------------------------------- install version service ----------------------------------------------------------------------------------- + cp /mnt/jenkins/workspace/cloud-psmdb-operator_PR-1917/e2e-tests/mongod-major-upgrade/conf/operator.main.psmdb-operator.dep.json /tmp/tmp.o3Qe20mAZP/operator.1.21.0.psmdb-operator.dep.json + generate_vs_json /mnt/jenkins/workspace/cloud-psmdb-operator_PR-1917/e2e-tests/mongod-major-upgrade/conf/operator.main.psmdb-operator.json /tmp/tmp.o3Qe20mAZP/operator.1.21.0.psmdb-operator.json + local template_path=/mnt/jenkins/workspace/cloud-psmdb-operator_PR-1917/e2e-tests/mongod-major-upgrade/conf/operator.main.psmdb-operator.json + local target_path=/tmp/tmp.o3Qe20mAZP/operator.1.21.0.psmdb-operator.json ++ jq '.versions[0].operator="1.21.0"' /mnt/jenkins/workspace/cloud-psmdb-operator_PR-1917/e2e-tests/mongod-major-upgrade/conf/operator.main.psmdb-operator.json + local 'version_service_source={ "versions": [ { "operator": "1.21.0", "product": "psmdb-operator", "matrix": { "mongod": {}, "pmm": { "2.27.0": { "image_path": "percona/pmm-client:2.27.0", "image_hash": "4f4d4508afe4ef3b5d72e2ebec6485be84204902b0b2b23c3a9e7c2fe4726884", "status": "recommended", "critical": false } }, "backup": {}, "operator": {} } } ] }' + for image_mongod in '${IMAGE_MONGOD_CHAIN[@]}' ++ get_mongod_ver_from_image perconalab/percona-server-mongodb-operator:main-mongod6.0 ++ local image=perconalab/percona-server-mongodb-operator:main-mongod6.0 +++ /usr/bin/sed -r 's/^.*db version v(([0-9]+\.){2}[0-9]+-[0-9]+).*$/\1/g' +++ run_simple_cli_inside_image perconalab/percona-server-mongodb-operator:main-mongod6.0 'mongod --version' +++ local image=perconalab/percona-server-mongodb-operator:main-mongod6.0 +++ local 'cli=mongod --version' +++ local pod_name=2419 +++ kubectl_bin -n default run 2419 --image=perconalab/percona-server-mongodb-operator:main-mongod6.0 --restart=Never --command -- sleep infinity ++++ mktemp +++ local LAST_OUT=/tmp/tmp.slu2xhplqN ++++ mktemp +++ local LAST_ERR=/tmp/tmp.cGOYkYODIk +++ local exit_status=0 +++ local timeout=4 ++++ seq 0 2 +++ for i in '$(seq 0 2)' +++ set +e +++ kubectl -n default run 2419 --image=perconalab/percona-server-mongodb-operator:main-mongod6.0 --restart=Never --command -- sleep infinity +++ exit_status=0 +++ set -e +++ '[' 0 '!=' 0 -a -n 1 ']' +++ break +++ cat /tmp/tmp.slu2xhplqN +++ cat /tmp/tmp.cGOYkYODIk +++ rm /tmp/tmp.slu2xhplqN /tmp/tmp.cGOYkYODIk +++ return 0 +++ kubectl_bin -n default wait --for=condition=Ready pod/2419 ++++ mktemp +++ local LAST_OUT=/tmp/tmp.MWRP6okxrS ++++ mktemp +++ local LAST_ERR=/tmp/tmp.s6xNC50tyM +++ local exit_status=0 +++ local timeout=4 ++++ seq 0 2 +++ for i in '$(seq 0 2)' +++ set +e +++ kubectl -n default wait --for=condition=Ready pod/2419 +++ exit_status=0 +++ set -e +++ '[' 0 '!=' 0 -a -n 1 ']' +++ break +++ cat /tmp/tmp.MWRP6okxrS +++ cat /tmp/tmp.s6xNC50tyM +++ rm /tmp/tmp.MWRP6okxrS /tmp/tmp.s6xNC50tyM +++ return 0 ++++ kubectl_bin -n default exec 2419 -- bash -c 'mongod --version 2>&1' +++++ mktemp ++++ local LAST_OUT=/tmp/tmp.JsisT8LDzc +++++ mktemp ++++ local LAST_ERR=/tmp/tmp.h2oOLamU0B ++++ local exit_status=0 ++++ local timeout=4 +++++ seq 0 2 ++++ for i in '$(seq 0 2)' ++++ set +e ++++ kubectl -n default exec 2419 -- bash -c 'mongod --version 2>&1' ++++ exit_status=0 ++++ set -e ++++ '[' 0 '!=' 0 -a -n 1 ']' ++++ break ++++ cat /tmp/tmp.JsisT8LDzc ++++ cat /tmp/tmp.h2oOLamU0B ++++ rm /tmp/tmp.JsisT8LDzc /tmp/tmp.h2oOLamU0B ++++ return 0 +++ local 'output=db version v6.0.21-18 Build Info: { "version": "6.0.21-18", "gitVersion": "3cce182dd6706709ea283d68c7875dbd5de6012d", "openSSLVersion": "OpenSSL 3.2.2 4 Jun 2024", "modules": [], "proFeatures": [], "allocator": "tcmalloc", "environment": { "distarch": "x86_64", "target_arch": "x86_64" } }' +++ kubectl_bin -n default delete pod/2419 --grace-period=0 --force ++++ mktemp +++ local LAST_OUT=/tmp/tmp.80BxZpvgzZ ++++ mktemp +++ local LAST_ERR=/tmp/tmp.pfq5zTYp4V +++ local exit_status=0 +++ local timeout=4 ++++ seq 0 2 +++ for i in '$(seq 0 2)' +++ set +e +++ kubectl -n default delete pod/2419 --grace-period=0 --force +++ exit_status=0 +++ set -e +++ '[' 0 '!=' 0 -a -n 1 ']' +++ break +++ cat /tmp/tmp.80BxZpvgzZ +++ cat /tmp/tmp.pfq5zTYp4V Warning: Immediate deletion does not wait for confirmation that the running resource has been terminated. The resource may continue to run on the cluster indefinitely. +++ rm /tmp/tmp.80BxZpvgzZ /tmp/tmp.pfq5zTYp4V +++ return 0 +++ echo db version v6.0.21-18 Build Info: '{' '"version":' '"6.0.21-18",' '"gitVersion":' '"3cce182dd6706709ea283d68c7875dbd5de6012d",' '"openSSLVersion":' '"OpenSSL' 3.2.2 4 Jun '2024",' '"modules":' '[],' '"proFeatures":' '[],' '"allocator":' '"tcmalloc",' '"environment":' '{' '"distarch":' '"x86_64",' '"target_arch":' '"x86_64"' '}' '}' ++ version_info=6.0.21-18 ++ [[ ! 6.0.21-18 =~ ^([0-9]+\.){2}[0-9]+-[0-9]+$ ]] ++ echo 6.0.21-18 + current_mongod_version=6.0.21-18 ++ echo '{' '"versions":' '[' '{' '"operator":' '"1.21.0",' '"product":' '"psmdb-operator",' '"matrix":' '{' '"mongod":' '{},' '"pmm":' '{' '"2.27.0":' '{' '"image_path":' '"percona/pmm-client:2.27.0",' '"image_hash":' '"4f4d4508afe4ef3b5d72e2ebec6485be84204902b0b2b23c3a9e7c2fe4726884",' '"status":' '"recommended",' '"critical":' false '}' '},' '"backup":' '{},' '"operator":' '{}' '}' '}' ']' '}' ++ jq '.versions[0].matrix.mongod += {"6.0.21-18": {"image_path":"perconalab/percona-server-mongodb-operator:main-mongod6.0","status":"recommended"}}' + version_service_source='{ "versions": [ { "operator": "1.21.0", "product": "psmdb-operator", "matrix": { "mongod": { "6.0.21-18": { "image_path": "perconalab/percona-server-mongodb-operator:main-mongod6.0", "status": "recommended" } }, "pmm": { "2.27.0": { "image_path": "percona/pmm-client:2.27.0", "image_hash": "4f4d4508afe4ef3b5d72e2ebec6485be84204902b0b2b23c3a9e7c2fe4726884", "status": "recommended", "critical": false } }, "backup": {}, "operator": {} } } ] }' + for image_mongod in '${IMAGE_MONGOD_CHAIN[@]}' ++ get_mongod_ver_from_image perconalab/percona-server-mongodb-operator:main-mongod7.0 ++ local image=perconalab/percona-server-mongodb-operator:main-mongod7.0 +++ /usr/bin/sed -r 's/^.*db version v(([0-9]+\.){2}[0-9]+-[0-9]+).*$/\1/g' +++ run_simple_cli_inside_image perconalab/percona-server-mongodb-operator:main-mongod7.0 'mongod --version' +++ local image=perconalab/percona-server-mongodb-operator:main-mongod7.0 +++ local 'cli=mongod --version' +++ local pod_name=25461 +++ kubectl_bin -n default run 25461 --image=perconalab/percona-server-mongodb-operator:main-mongod7.0 --restart=Never --command -- sleep infinity ++++ mktemp +++ local LAST_OUT=/tmp/tmp.ve1Abl1YCa ++++ mktemp +++ local LAST_ERR=/tmp/tmp.2C77NXYcID +++ local exit_status=0 +++ local timeout=4 ++++ seq 0 2 +++ for i in '$(seq 0 2)' +++ set +e +++ kubectl -n default run 25461 --image=perconalab/percona-server-mongodb-operator:main-mongod7.0 --restart=Never --command -- sleep infinity +++ exit_status=0 +++ set -e +++ '[' 0 '!=' 0 -a -n 1 ']' +++ break +++ cat /tmp/tmp.ve1Abl1YCa +++ cat /tmp/tmp.2C77NXYcID +++ rm /tmp/tmp.ve1Abl1YCa /tmp/tmp.2C77NXYcID +++ return 0 +++ kubectl_bin -n default wait --for=condition=Ready pod/25461 ++++ mktemp +++ local LAST_OUT=/tmp/tmp.g3lUebqbqt ++++ mktemp +++ local LAST_ERR=/tmp/tmp.EObGgBM1zZ +++ local exit_status=0 +++ local timeout=4 ++++ seq 0 2 +++ for i in '$(seq 0 2)' +++ set +e +++ kubectl -n default wait --for=condition=Ready pod/25461 +++ exit_status=0 +++ set -e +++ '[' 0 '!=' 0 -a -n 1 ']' +++ break +++ cat /tmp/tmp.g3lUebqbqt +++ cat /tmp/tmp.EObGgBM1zZ +++ rm /tmp/tmp.g3lUebqbqt /tmp/tmp.EObGgBM1zZ +++ return 0 ++++ kubectl_bin -n default exec 25461 -- bash -c 'mongod --version 2>&1' +++++ mktemp ++++ local LAST_OUT=/tmp/tmp.Dm3wVDQ118 +++++ mktemp ++++ local LAST_ERR=/tmp/tmp.YvTvZZ8122 ++++ local exit_status=0 ++++ local timeout=4 +++++ seq 0 2 ++++ for i in '$(seq 0 2)' ++++ set +e ++++ kubectl -n default exec 25461 -- bash -c 'mongod --version 2>&1' ++++ exit_status=0 ++++ set -e ++++ '[' 0 '!=' 0 -a -n 1 ']' ++++ break ++++ cat /tmp/tmp.Dm3wVDQ118 ++++ cat /tmp/tmp.YvTvZZ8122 ++++ rm /tmp/tmp.Dm3wVDQ118 /tmp/tmp.YvTvZZ8122 ++++ return 0 +++ local 'output=db version v7.0.18-11 Build Info: { "version": "7.0.18-11", "gitVersion": "97ceec31da21050a3f6f3b203bac9e5a2685dcd2", "openSSLVersion": "OpenSSL 3.2.2 4 Jun 2024", "modules": [], "proFeatures": [], "allocator": "tcmalloc", "environment": { "distarch": "x86_64", "target_arch": "x86_64" } }' +++ kubectl_bin -n default delete pod/25461 --grace-period=0 --force ++++ mktemp +++ local LAST_OUT=/tmp/tmp.Rze93rG29d ++++ mktemp +++ local LAST_ERR=/tmp/tmp.fdD2ihT7Dr +++ local exit_status=0 +++ local timeout=4 ++++ seq 0 2 +++ for i in '$(seq 0 2)' +++ set +e +++ kubectl -n default delete pod/25461 --grace-period=0 --force +++ exit_status=0 +++ set -e +++ '[' 0 '!=' 0 -a -n 1 ']' +++ break +++ cat /tmp/tmp.Rze93rG29d +++ cat /tmp/tmp.fdD2ihT7Dr Warning: Immediate deletion does not wait for confirmation that the running resource has been terminated. The resource may continue to run on the cluster indefinitely. +++ rm /tmp/tmp.Rze93rG29d /tmp/tmp.fdD2ihT7Dr +++ return 0 +++ echo db version v7.0.18-11 Build Info: '{' '"version":' '"7.0.18-11",' '"gitVersion":' '"97ceec31da21050a3f6f3b203bac9e5a2685dcd2",' '"openSSLVersion":' '"OpenSSL' 3.2.2 4 Jun '2024",' '"modules":' '[],' '"proFeatures":' '[],' '"allocator":' '"tcmalloc",' '"environment":' '{' '"distarch":' '"x86_64",' '"target_arch":' '"x86_64"' '}' '}' ++ version_info=7.0.18-11 ++ [[ ! 7.0.18-11 =~ ^([0-9]+\.){2}[0-9]+-[0-9]+$ ]] ++ echo 7.0.18-11 + current_mongod_version=7.0.18-11 ++ echo '{' '"versions":' '[' '{' '"operator":' '"1.21.0",' '"product":' '"psmdb-operator",' '"matrix":' '{' '"mongod":' '{' '"6.0.21-18":' '{' '"image_path":' '"perconalab/percona-server-mongodb-operator:main-mongod6.0",' '"status":' '"recommended"' '}' '},' '"pmm":' '{' '"2.27.0":' '{' '"image_path":' '"percona/pmm-client:2.27.0",' '"image_hash":' '"4f4d4508afe4ef3b5d72e2ebec6485be84204902b0b2b23c3a9e7c2fe4726884",' '"status":' '"recommended",' '"critical":' false '}' '},' '"backup":' '{},' '"operator":' '{}' '}' '}' ']' '}' ++ jq '.versions[0].matrix.mongod += {"7.0.18-11": {"image_path":"perconalab/percona-server-mongodb-operator:main-mongod7.0","status":"recommended"}}' + version_service_source='{ "versions": [ { "operator": "1.21.0", "product": "psmdb-operator", "matrix": { "mongod": { "6.0.21-18": { "image_path": "perconalab/percona-server-mongodb-operator:main-mongod6.0", "status": "recommended" }, "7.0.18-11": { "image_path": "perconalab/percona-server-mongodb-operator:main-mongod7.0", "status": "recommended" } }, "pmm": { "2.27.0": { "image_path": "percona/pmm-client:2.27.0", "image_hash": "4f4d4508afe4ef3b5d72e2ebec6485be84204902b0b2b23c3a9e7c2fe4726884", "status": "recommended", "critical": false } }, "backup": {}, "operator": {} } } ] }' + for image_mongod in '${IMAGE_MONGOD_CHAIN[@]}' ++ get_mongod_ver_from_image perconalab/percona-server-mongodb-operator:main-mongod8.0 ++ local image=perconalab/percona-server-mongodb-operator:main-mongod8.0 +++ /usr/bin/sed -r 's/^.*db version v(([0-9]+\.){2}[0-9]+-[0-9]+).*$/\1/g' +++ run_simple_cli_inside_image perconalab/percona-server-mongodb-operator:main-mongod8.0 'mongod --version' +++ local image=perconalab/percona-server-mongodb-operator:main-mongod8.0 +++ local 'cli=mongod --version' +++ local pod_name=18753 +++ kubectl_bin -n default run 18753 --image=perconalab/percona-server-mongodb-operator:main-mongod8.0 --restart=Never --command -- sleep infinity ++++ mktemp +++ local LAST_OUT=/tmp/tmp.0dAbDvtLkv ++++ mktemp +++ local LAST_ERR=/tmp/tmp.kWzxAeejsh +++ local exit_status=0 +++ local timeout=4 ++++ seq 0 2 +++ for i in '$(seq 0 2)' +++ set +e +++ kubectl -n default run 18753 --image=perconalab/percona-server-mongodb-operator:main-mongod8.0 --restart=Never --command -- sleep infinity +++ exit_status=0 +++ set -e +++ '[' 0 '!=' 0 -a -n 1 ']' +++ break +++ cat /tmp/tmp.0dAbDvtLkv +++ cat /tmp/tmp.kWzxAeejsh +++ rm /tmp/tmp.0dAbDvtLkv /tmp/tmp.kWzxAeejsh +++ return 0 +++ kubectl_bin -n default wait --for=condition=Ready pod/18753 ++++ mktemp +++ local LAST_OUT=/tmp/tmp.r1beoTmr9G ++++ mktemp +++ local LAST_ERR=/tmp/tmp.dJCoOcoUt0 +++ local exit_status=0 +++ local timeout=4 ++++ seq 0 2 +++ for i in '$(seq 0 2)' +++ set +e +++ kubectl -n default wait --for=condition=Ready pod/18753 +++ exit_status=0 +++ set -e +++ '[' 0 '!=' 0 -a -n 1 ']' +++ break +++ cat /tmp/tmp.r1beoTmr9G +++ cat /tmp/tmp.dJCoOcoUt0 +++ rm /tmp/tmp.r1beoTmr9G /tmp/tmp.dJCoOcoUt0 +++ return 0 ++++ kubectl_bin -n default exec 18753 -- bash -c 'mongod --version 2>&1' +++++ mktemp ++++ local LAST_OUT=/tmp/tmp.c5hEekOJky +++++ mktemp ++++ local LAST_ERR=/tmp/tmp.NQ7OUYY9Zf ++++ local exit_status=0 ++++ local timeout=4 +++++ seq 0 2 ++++ for i in '$(seq 0 2)' ++++ set +e ++++ kubectl -n default exec 18753 -- bash -c 'mongod --version 2>&1' ++++ exit_status=0 ++++ set -e ++++ '[' 0 '!=' 0 -a -n 1 ']' ++++ break ++++ cat /tmp/tmp.c5hEekOJky ++++ cat /tmp/tmp.NQ7OUYY9Zf ++++ rm /tmp/tmp.c5hEekOJky /tmp/tmp.NQ7OUYY9Zf ++++ return 0 +++ local 'output=db version v8.0.8-3 Build Info: { "version": "8.0.8-3", "gitVersion": "6c9bddbcdcbc766a0771e4756e1310252b2c91de", "openSSLVersion": "OpenSSL 3.2.2 4 Jun 2024", "modules": [], "proFeatures": [], "allocator": "tcmalloc-google", "environment": { "distarch": "x86_64", "target_arch": "x86_64" } }' +++ kubectl_bin -n default delete pod/18753 --grace-period=0 --force ++++ mktemp +++ local LAST_OUT=/tmp/tmp.Pv0ZIrSHMB ++++ mktemp +++ local LAST_ERR=/tmp/tmp.0gZWT6q7iW +++ local exit_status=0 +++ local timeout=4 ++++ seq 0 2 +++ for i in '$(seq 0 2)' +++ set +e +++ kubectl -n default delete pod/18753 --grace-period=0 --force +++ exit_status=0 +++ set -e +++ '[' 0 '!=' 0 -a -n 1 ']' +++ break +++ cat /tmp/tmp.Pv0ZIrSHMB +++ cat /tmp/tmp.0gZWT6q7iW Warning: Immediate deletion does not wait for confirmation that the running resource has been terminated. The resource may continue to run on the cluster indefinitely. +++ rm /tmp/tmp.Pv0ZIrSHMB /tmp/tmp.0gZWT6q7iW +++ return 0 +++ echo db version v8.0.8-3 Build Info: '{' '"version":' '"8.0.8-3",' '"gitVersion":' '"6c9bddbcdcbc766a0771e4756e1310252b2c91de",' '"openSSLVersion":' '"OpenSSL' 3.2.2 4 Jun '2024",' '"modules":' '[],' '"proFeatures":' '[],' '"allocator":' '"tcmalloc-google",' '"environment":' '{' '"distarch":' '"x86_64",' '"target_arch":' '"x86_64"' '}' '}' ++ version_info=8.0.8-3 ++ [[ ! 8.0.8-3 =~ ^([0-9]+\.){2}[0-9]+-[0-9]+$ ]] ++ echo 8.0.8-3 + current_mongod_version=8.0.8-3 ++ echo '{' '"versions":' '[' '{' '"operator":' '"1.21.0",' '"product":' '"psmdb-operator",' '"matrix":' '{' '"mongod":' '{' '"6.0.21-18":' '{' '"image_path":' '"perconalab/percona-server-mongodb-operator:main-mongod6.0",' '"status":' '"recommended"' '},' '"7.0.18-11":' '{' '"image_path":' '"perconalab/percona-server-mongodb-operator:main-mongod7.0",' '"status":' '"recommended"' '}' '},' '"pmm":' '{' '"2.27.0":' '{' '"image_path":' '"percona/pmm-client:2.27.0",' '"image_hash":' '"4f4d4508afe4ef3b5d72e2ebec6485be84204902b0b2b23c3a9e7c2fe4726884",' '"status":' '"recommended",' '"critical":' false '}' '},' '"backup":' '{},' '"operator":' '{}' '}' '}' ']' '}' ++ jq '.versions[0].matrix.mongod += {"8.0.8-3": {"image_path":"perconalab/percona-server-mongodb-operator:main-mongod8.0","status":"recommended"}}' + version_service_source='{ "versions": [ { "operator": "1.21.0", "product": "psmdb-operator", "matrix": { "mongod": { "6.0.21-18": { "image_path": "perconalab/percona-server-mongodb-operator:main-mongod6.0", "status": "recommended" }, "7.0.18-11": { "image_path": "perconalab/percona-server-mongodb-operator:main-mongod7.0", "status": "recommended" }, "8.0.8-3": { "image_path": "perconalab/percona-server-mongodb-operator:main-mongod8.0", "status": "recommended" } }, "pmm": { "2.27.0": { "image_path": "percona/pmm-client:2.27.0", "image_hash": "4f4d4508afe4ef3b5d72e2ebec6485be84204902b0b2b23c3a9e7c2fe4726884", "status": "recommended", "critical": false } }, "backup": {}, "operator": {} } } ] }' ++ echo '{' '"versions":' '[' '{' '"operator":' '"1.21.0",' '"product":' '"psmdb-operator",' '"matrix":' '{' '"mongod":' '{' '"6.0.21-18":' '{' '"image_path":' '"perconalab/percona-server-mongodb-operator:main-mongod6.0",' '"status":' '"recommended"' '},' '"7.0.18-11":' '{' '"image_path":' '"perconalab/percona-server-mongodb-operator:main-mongod7.0",' '"status":' '"recommended"' '},' '"8.0.8-3":' '{' '"image_path":' '"perconalab/percona-server-mongodb-operator:main-mongod8.0",' '"status":' '"recommended"' '}' '},' '"pmm":' '{' '"2.27.0":' '{' '"image_path":' '"percona/pmm-client:2.27.0",' '"image_hash":' '"4f4d4508afe4ef3b5d72e2ebec6485be84204902b0b2b23c3a9e7c2fe4726884",' '"status":' '"recommended",' '"critical":' false '}' '},' '"backup":' '{},' '"operator":' '{}' '}' '}' ']' '}' +++ get_pbm_version perconalab/percona-server-mongodb-operator:main-backup +++ local image=perconalab/percona-server-mongodb-operator:main-backup ++++ run_simple_cli_inside_image perconalab/percona-server-mongodb-operator:main-backup 'pbm-agent version' ++++ /usr/bin/sed -r 's/^Version:\ (([0-9]+\.){2}[0-9]+)\ .*/\1/g' ++++ local image=perconalab/percona-server-mongodb-operator:main-backup ++++ local 'cli=pbm-agent version' ++++ local pod_name=24053 ++++ kubectl_bin -n default run 24053 --image=perconalab/percona-server-mongodb-operator:main-backup --restart=Never --command -- sleep infinity +++++ mktemp ++++ local LAST_OUT=/tmp/tmp.vH17k752Ob +++++ mktemp ++++ local LAST_ERR=/tmp/tmp.kKY1hKhYvo ++++ local exit_status=0 ++++ local timeout=4 +++++ seq 0 2 ++++ for i in '$(seq 0 2)' ++++ set +e ++++ kubectl -n default run 24053 --image=perconalab/percona-server-mongodb-operator:main-backup --restart=Never --command -- sleep infinity ++++ exit_status=0 ++++ set -e ++++ '[' 0 '!=' 0 -a -n 1 ']' ++++ break ++++ cat /tmp/tmp.vH17k752Ob ++++ cat /tmp/tmp.kKY1hKhYvo ++++ rm /tmp/tmp.vH17k752Ob /tmp/tmp.kKY1hKhYvo ++++ return 0 ++++ kubectl_bin -n default wait --for=condition=Ready pod/24053 +++++ mktemp ++++ local LAST_OUT=/tmp/tmp.1hJ9bmuVNy +++++ mktemp ++++ local LAST_ERR=/tmp/tmp.qtnMEHeLLc ++++ local exit_status=0 ++++ local timeout=4 +++++ seq 0 2 ++++ for i in '$(seq 0 2)' ++++ set +e ++++ kubectl -n default wait --for=condition=Ready pod/24053 ++++ exit_status=0 ++++ set -e ++++ '[' 0 '!=' 0 -a -n 1 ']' ++++ break ++++ cat /tmp/tmp.1hJ9bmuVNy ++++ cat /tmp/tmp.qtnMEHeLLc ++++ rm /tmp/tmp.1hJ9bmuVNy /tmp/tmp.qtnMEHeLLc ++++ return 0 +++++ kubectl_bin -n default exec 24053 -- bash -c 'pbm-agent version 2>&1' ++++++ mktemp +++++ local LAST_OUT=/tmp/tmp.W2Jbg9phcM ++++++ mktemp +++++ local LAST_ERR=/tmp/tmp.0189lBt5UF +++++ local exit_status=0 +++++ local timeout=4 ++++++ seq 0 2 +++++ for i in '$(seq 0 2)' +++++ set +e +++++ kubectl -n default exec 24053 -- bash -c 'pbm-agent version 2>&1' +++++ exit_status=0 +++++ set -e +++++ '[' 0 '!=' 0 -a -n 1 ']' +++++ break +++++ cat /tmp/tmp.W2Jbg9phcM +++++ cat /tmp/tmp.0189lBt5UF +++++ rm /tmp/tmp.W2Jbg9phcM /tmp/tmp.0189lBt5UF +++++ return 0 ++++ local 'output=Version: 2.9.1 Platform: linux/amd64 GitCommit: 1f8d60145ed5e7b834c6d7c5318446071bc04ecb GitBranch: release-2.9.1 BuildTime: 2025-03-25_11:35_UTC GoVersion: go1.22.8' ++++ kubectl_bin -n default delete pod/24053 --grace-period=0 --force +++++ mktemp ++++ local LAST_OUT=/tmp/tmp.OsKyFVZMVR +++++ mktemp ++++ local LAST_ERR=/tmp/tmp.ltEsWeZ1af ++++ local exit_status=0 ++++ local timeout=4 +++++ seq 0 2 ++++ for i in '$(seq 0 2)' ++++ set +e ++++ kubectl -n default delete pod/24053 --grace-period=0 --force ++++ exit_status=0 ++++ set -e ++++ '[' 0 '!=' 0 -a -n 1 ']' ++++ break ++++ cat /tmp/tmp.OsKyFVZMVR ++++ cat /tmp/tmp.ltEsWeZ1af Warning: Immediate deletion does not wait for confirmation that the running resource has been terminated. The resource may continue to run on the cluster indefinitely. ++++ rm /tmp/tmp.OsKyFVZMVR /tmp/tmp.ltEsWeZ1af ++++ return 0 ++++ echo Version: 2.9.1 Platform: linux/amd64 GitCommit: 1f8d60145ed5e7b834c6d7c5318446071bc04ecb GitBranch: release-2.9.1 BuildTime: 2025-03-25_11:35_UTC GoVersion: go1.22.8 +++ local version_info=2.9.1 +++ [[ ! 2.9.1 =~ ^([0-9]+\.){2}[0-9]+$ ]] +++ echo 2.9.1 ++ jq '.versions[0].matrix.backup += {"2.9.1": {"image_path":"perconalab/percona-server-mongodb-operator:main-backup","status":"recommended"}}' + version_service_source='{ "versions": [ { "operator": "1.21.0", "product": "psmdb-operator", "matrix": { "mongod": { "6.0.21-18": { "image_path": "perconalab/percona-server-mongodb-operator:main-mongod6.0", "status": "recommended" }, "7.0.18-11": { "image_path": "perconalab/percona-server-mongodb-operator:main-mongod7.0", "status": "recommended" }, "8.0.8-3": { "image_path": "perconalab/percona-server-mongodb-operator:main-mongod8.0", "status": "recommended" } }, "pmm": { "2.27.0": { "image_path": "percona/pmm-client:2.27.0", "image_hash": "4f4d4508afe4ef3b5d72e2ebec6485be84204902b0b2b23c3a9e7c2fe4726884", "status": "recommended", "critical": false } }, "backup": { "2.9.1": { "image_path": "perconalab/percona-server-mongodb-operator:main-backup", "status": "recommended" } }, "operator": {} } } ] }' ++ echo '{' '"versions":' '[' '{' '"operator":' '"1.21.0",' '"product":' '"psmdb-operator",' '"matrix":' '{' '"mongod":' '{' '"6.0.21-18":' '{' '"image_path":' '"perconalab/percona-server-mongodb-operator:main-mongod6.0",' '"status":' '"recommended"' '},' '"7.0.18-11":' '{' '"image_path":' '"perconalab/percona-server-mongodb-operator:main-mongod7.0",' '"status":' '"recommended"' '},' '"8.0.8-3":' '{' '"image_path":' '"perconalab/percona-server-mongodb-operator:main-mongod8.0",' '"status":' '"recommended"' '}' '},' '"pmm":' '{' '"2.27.0":' '{' '"image_path":' '"percona/pmm-client:2.27.0",' '"image_hash":' '"4f4d4508afe4ef3b5d72e2ebec6485be84204902b0b2b23c3a9e7c2fe4726884",' '"status":' '"recommended",' '"critical":' false '}' '},' '"backup":' '{' '"2.9.1":' '{' '"image_path":' '"perconalab/percona-server-mongodb-operator:main-backup",' '"status":' '"recommended"' '}' '},' '"operator":' '{}' '}' '}' ']' '}' ++ jq '.versions[0].matrix.operator += {"1.21.0": {"image_path":"perconalab/percona-server-mongodb-operator:PR-1917-cd834df2","status":"recommended"}}' + version_service_source='{ "versions": [ { "operator": "1.21.0", "product": "psmdb-operator", "matrix": { "mongod": { "6.0.21-18": { "image_path": "perconalab/percona-server-mongodb-operator:main-mongod6.0", "status": "recommended" }, "7.0.18-11": { "image_path": "perconalab/percona-server-mongodb-operator:main-mongod7.0", "status": "recommended" }, "8.0.8-3": { "image_path": "perconalab/percona-server-mongodb-operator:main-mongod8.0", "status": "recommended" } }, "pmm": { "2.27.0": { "image_path": "percona/pmm-client:2.27.0", "image_hash": "4f4d4508afe4ef3b5d72e2ebec6485be84204902b0b2b23c3a9e7c2fe4726884", "status": "recommended", "critical": false } }, "backup": { "2.9.1": { "image_path": "perconalab/percona-server-mongodb-operator:main-backup", "status": "recommended" } }, "operator": { "1.21.0": { "image_path": "perconalab/percona-server-mongodb-operator:PR-1917-cd834df2", "status": "recommended" } } } } ] }' + jq . + echo '{' '"versions":' '[' '{' '"operator":' '"1.21.0",' '"product":' '"psmdb-operator",' '"matrix":' '{' '"mongod":' '{' '"6.0.21-18":' '{' '"image_path":' '"perconalab/percona-server-mongodb-operator:main-mongod6.0",' '"status":' '"recommended"' '},' '"7.0.18-11":' '{' '"image_path":' '"perconalab/percona-server-mongodb-operator:main-mongod7.0",' '"status":' '"recommended"' '},' '"8.0.8-3":' '{' '"image_path":' '"perconalab/percona-server-mongodb-operator:main-mongod8.0",' '"status":' '"recommended"' '}' '},' '"pmm":' '{' '"2.27.0":' '{' '"image_path":' '"percona/pmm-client:2.27.0",' '"image_hash":' '"4f4d4508afe4ef3b5d72e2ebec6485be84204902b0b2b23c3a9e7c2fe4726884",' '"status":' '"recommended",' '"critical":' false '}' '},' '"backup":' '{' '"2.9.1":' '{' '"image_path":' '"perconalab/percona-server-mongodb-operator:main-backup",' '"status":' '"recommended"' '}' '},' '"operator":' '{' '"1.21.0":' '{' '"image_path":' '"perconalab/percona-server-mongodb-operator:PR-1917-cd834df2",' '"status":' '"recommended"' '}' '}' '}' '}' ']' '}' + kubectl_bin create configmap -n psmdb-operator versions --from-file /tmp/tmp.o3Qe20mAZP/operator.1.21.0.psmdb-operator.dep.json --from-file /tmp/tmp.o3Qe20mAZP/operator.1.21.0.psmdb-operator.json ++ mktemp + local LAST_OUT=/tmp/tmp.eIW8pe5Ssa ++ mktemp + local LAST_ERR=/tmp/tmp.F5A3xGGfSI + local exit_status=0 + local timeout=4 ++ seq 0 2 + for i in '$(seq 0 2)' + set +e + kubectl create configmap -n psmdb-operator versions --from-file /tmp/tmp.o3Qe20mAZP/operator.1.21.0.psmdb-operator.dep.json --from-file /tmp/tmp.o3Qe20mAZP/operator.1.21.0.psmdb-operator.json + exit_status=0 + set -e + '[' 0 '!=' 0 -a -n 1 ']' + break + cat /tmp/tmp.eIW8pe5Ssa configmap/versions created + cat /tmp/tmp.F5A3xGGfSI + rm /tmp/tmp.eIW8pe5Ssa /tmp/tmp.F5A3xGGfSI + return 0 + kubectl_bin apply -n psmdb-operator -f - + cat /mnt/jenkins/workspace/cloud-psmdb-operator_PR-1917/e2e-tests/mongod-major-upgrade/conf/vs.yml + /usr/bin/sed -r s#operator.9.9.9.psmdb-operator#operator.1.21.0.psmdb-operator#g ++ mktemp + local LAST_OUT=/tmp/tmp.Jz9zEG3Awd ++ mktemp + local LAST_ERR=/tmp/tmp.wNmxYYYlOi + local exit_status=0 + local timeout=4 ++ seq 0 2 + for i in '$(seq 0 2)' + set +e + kubectl apply -n psmdb-operator -f - + exit_status=0 + set -e + '[' 0 '!=' 0 -a -n 1 ']' + break + cat /tmp/tmp.Jz9zEG3Awd deployment.apps/version-service created service/version-service created + cat /tmp/tmp.wNmxYYYlOi + rm /tmp/tmp.Jz9zEG3Awd /tmp/tmp.wNmxYYYlOi + return 0 ++ jq '.[] | .[] |.matrix.mongod' /tmp/tmp.o3Qe20mAZP/operator.1.21.0.psmdb-operator.json + version_matrix='{ "6.0.21-18": { "image_path": "perconalab/percona-server-mongodb-operator:main-mongod6.0", "status": "recommended" }, "7.0.18-11": { "image_path": "perconalab/percona-server-mongodb-operator:main-mongod7.0", "status": "recommended" }, "8.0.8-3": { "image_path": "perconalab/percona-server-mongodb-operator:main-mongod8.0", "status": "recommended" } }' ++ get_mongod_ver_from_image perconalab/percona-server-mongodb-operator:main-mongod7.0 ++ local image=perconalab/percona-server-mongodb-operator:main-mongod7.0 +++ run_simple_cli_inside_image perconalab/percona-server-mongodb-operator:main-mongod7.0 'mongod --version' +++ local image=perconalab/percona-server-mongodb-operator:main-mongod7.0 +++ /usr/bin/sed -r 's/^.*db version v(([0-9]+\.){2}[0-9]+-[0-9]+).*$/\1/g' +++ local 'cli=mongod --version' +++ local pod_name=21731 +++ kubectl_bin -n default run 21731 --image=perconalab/percona-server-mongodb-operator:main-mongod7.0 --restart=Never --command -- sleep infinity ++++ mktemp +++ local LAST_OUT=/tmp/tmp.zXoBvDrsn0 ++++ mktemp +++ local LAST_ERR=/tmp/tmp.8MoXNIeg4j +++ local exit_status=0 +++ local timeout=4 ++++ seq 0 2 +++ for i in '$(seq 0 2)' +++ set +e +++ kubectl -n default run 21731 --image=perconalab/percona-server-mongodb-operator:main-mongod7.0 --restart=Never --command -- sleep infinity +++ exit_status=0 +++ set -e +++ '[' 0 '!=' 0 -a -n 1 ']' +++ break +++ cat /tmp/tmp.zXoBvDrsn0 +++ cat /tmp/tmp.8MoXNIeg4j +++ rm /tmp/tmp.zXoBvDrsn0 /tmp/tmp.8MoXNIeg4j +++ return 0 +++ kubectl_bin -n default wait --for=condition=Ready pod/21731 ++++ mktemp +++ local LAST_OUT=/tmp/tmp.euQuzLqqKp ++++ mktemp +++ local LAST_ERR=/tmp/tmp.VVpHHdorrM +++ local exit_status=0 +++ local timeout=4 ++++ seq 0 2 +++ for i in '$(seq 0 2)' +++ set +e +++ kubectl -n default wait --for=condition=Ready pod/21731 +++ exit_status=0 +++ set -e +++ '[' 0 '!=' 0 -a -n 1 ']' +++ break +++ cat /tmp/tmp.euQuzLqqKp +++ cat /tmp/tmp.VVpHHdorrM +++ rm /tmp/tmp.euQuzLqqKp /tmp/tmp.VVpHHdorrM +++ return 0 ++++ kubectl_bin -n default exec 21731 -- bash -c 'mongod --version 2>&1' +++++ mktemp ++++ local LAST_OUT=/tmp/tmp.PFZZMRAIQG +++++ mktemp ++++ local LAST_ERR=/tmp/tmp.wfUhiJCceC ++++ local exit_status=0 ++++ local timeout=4 +++++ seq 0 2 ++++ for i in '$(seq 0 2)' ++++ set +e ++++ kubectl -n default exec 21731 -- bash -c 'mongod --version 2>&1' ++++ exit_status=0 ++++ set -e ++++ '[' 0 '!=' 0 -a -n 1 ']' ++++ break ++++ cat /tmp/tmp.PFZZMRAIQG ++++ cat /tmp/tmp.wfUhiJCceC ++++ rm /tmp/tmp.PFZZMRAIQG /tmp/tmp.wfUhiJCceC ++++ return 0 +++ local 'output=db version v7.0.18-11 Build Info: { "version": "7.0.18-11", "gitVersion": "97ceec31da21050a3f6f3b203bac9e5a2685dcd2", "openSSLVersion": "OpenSSL 3.2.2 4 Jun 2024", "modules": [], "proFeatures": [], "allocator": "tcmalloc", "environment": { "distarch": "x86_64", "target_arch": "x86_64" } }' +++ kubectl_bin -n default delete pod/21731 --grace-period=0 --force ++++ mktemp +++ local LAST_OUT=/tmp/tmp.qL28MYN38Z ++++ mktemp +++ local LAST_ERR=/tmp/tmp.SsMZJCp4Gh +++ local exit_status=0 +++ local timeout=4 ++++ seq 0 2 +++ for i in '$(seq 0 2)' +++ set +e +++ kubectl -n default delete pod/21731 --grace-period=0 --force +++ exit_status=0 +++ set -e +++ '[' 0 '!=' 0 -a -n 1 ']' +++ break +++ cat /tmp/tmp.qL28MYN38Z +++ cat /tmp/tmp.SsMZJCp4Gh Warning: Immediate deletion does not wait for confirmation that the running resource has been terminated. The resource may continue to run on the cluster indefinitely. +++ rm /tmp/tmp.qL28MYN38Z /tmp/tmp.SsMZJCp4Gh +++ return 0 +++ echo db version v7.0.18-11 Build Info: '{' '"version":' '"7.0.18-11",' '"gitVersion":' '"97ceec31da21050a3f6f3b203bac9e5a2685dcd2",' '"openSSLVersion":' '"OpenSSL' 3.2.2 4 Jun '2024",' '"modules":' '[],' '"proFeatures":' '[],' '"allocator":' '"tcmalloc",' '"environment":' '{' '"distarch":' '"x86_64",' '"target_arch":' '"x86_64"' '}' '}' ++ version_info=7.0.18-11 ++ [[ ! 7.0.18-11 =~ ^([0-9]+\.){2}[0-9]+-[0-9]+$ ]] ++ echo 7.0.18-11 + current_mongod_version=7.0.18-11 ++ echo '{' '"6.0.21-18":' '{' '"image_path":' '"perconalab/percona-server-mongodb-operator:main-mongod6.0",' '"status":' '"recommended"' '},' '"7.0.18-11":' '{' '"image_path":' '"perconalab/percona-server-mongodb-operator:main-mongod7.0",' '"status":' '"recommended"' '},' '"8.0.8-3":' '{' '"image_path":' '"perconalab/percona-server-mongodb-operator:main-mongod8.0",' '"status":' '"recommended"' '}' '}' ++ jq '. += {"7.0.18-11":{"image_path":"perconalab/percona-server-mongodb-operator:main-mongod7.0", "status": "recommended"}}' + version_matrix='{ "6.0.21-18": { "image_path": "perconalab/percona-server-mongodb-operator:main-mongod6.0", "status": "recommended" }, "7.0.18-11": { "image_path": "perconalab/percona-server-mongodb-operator:main-mongod7.0", "status": "recommended" }, "8.0.8-3": { "image_path": "perconalab/percona-server-mongodb-operator:main-mongod8.0", "status": "recommended" } }' ++ echo '{' '"6.0.21-18":' '{' '"image_path":' '"perconalab/percona-server-mongodb-operator:main-mongod6.0",' '"status":' '"recommended"' '},' '"7.0.18-11":' '{' '"image_path":' '"perconalab/percona-server-mongodb-operator:main-mongod7.0",' '"status":' '"recommended"' '},' '"8.0.8-3":' '{' '"image_path":' '"perconalab/percona-server-mongodb-operator:main-mongod8.0",' '"status":' '"recommended"' '}' '}' ++ jq 'to_entries | sort_by( .key | split("[[:punct:]]";"g") | map(tonumber) ) | map({(.key): .value}) ' + version_matrix='[ { "6.0.21-18": { "image_path": "perconalab/percona-server-mongodb-operator:main-mongod6.0", "status": "recommended" } }, { "7.0.18-11": { "image_path": "perconalab/percona-server-mongodb-operator:main-mongod7.0", "status": "recommended" } }, { "8.0.8-3": { "image_path": "perconalab/percona-server-mongodb-operator:main-mongod8.0", "status": "recommended" } } ]' ++ echo '[' '{' '"6.0.21-18":' '{' '"image_path":' '"perconalab/percona-server-mongodb-operator:main-mongod6.0",' '"status":' '"recommended"' '}' '},' '{' '"7.0.18-11":' '{' '"image_path":' '"perconalab/percona-server-mongodb-operator:main-mongod7.0",' '"status":' '"recommended"' '}' '},' '{' '"8.0.8-3":' '{' '"image_path":' '"perconalab/percona-server-mongodb-operator:main-mongod8.0",' '"status":' '"recommended"' '}' '}' ']' ++ uniq ++ tail -n +2 ++ jq -r '.[] | keys | .[] | split(".") | .[:2] | join(".")' + versions_to_verify='7.0 8.0' + cluster=some-name-rs0 ++ echo '[' '{' '"6.0.21-18":' '{' '"image_path":' '"perconalab/percona-server-mongodb-operator:main-mongod6.0",' '"status":' '"recommended"' '}' '},' '{' '"7.0.18-11":' '{' '"image_path":' '"perconalab/percona-server-mongodb-operator:main-mongod7.0",' '"status":' '"recommended"' '}' '},' '{' '"8.0.8-3":' '{' '"image_path":' '"perconalab/percona-server-mongodb-operator:main-mongod8.0",' '"status":' '"recommended"' '}' '}' ']' ++ jq '.[0] | keys | .[0]' + desc 'Starting the cluster with "6.0.21-18"' + set +o xtrace ----------------------------------------------------------------------------------- Starting the cluster with "6.0.21-18" ----------------------------------------------------------------------------------- ++ echo '[' '{' '"6.0.21-18":' '{' '"image_path":' '"perconalab/percona-server-mongodb-operator:main-mongod6.0",' '"status":' '"recommended"' '}' '},' '{' '"7.0.18-11":' '{' '"image_path":' '"perconalab/percona-server-mongodb-operator:main-mongod7.0",' '"status":' '"recommended"' '}' '},' '{' '"8.0.8-3":' '{' '"image_path":' '"perconalab/percona-server-mongodb-operator:main-mongod8.0",' '"status":' '"recommended"' '}' '}' ']' ++ jq -r 'to_entries | .[0].value | .[].image_path' + export IMAGE_MONGOD=perconalab/percona-server-mongodb-operator:main-mongod6.0 + IMAGE_MONGOD=perconalab/percona-server-mongodb-operator:main-mongod6.0 + apply_cluster /mnt/jenkins/workspace/cloud-psmdb-operator_PR-1917/e2e-tests/mongod-major-upgrade/conf/some-name-rs0.yml + '[' -z '' ']' + kubectl_bin apply -f - + cat_config /mnt/jenkins/workspace/cloud-psmdb-operator_PR-1917/e2e-tests/mongod-major-upgrade/conf/some-name-rs0.yml ++ mktemp + yq eval '(.spec | select(has("initImage"))).initImage = "perconalab/percona-server-mongodb-operator:PR-1917-cd834df2"' + yq eval '(.spec | select(has("pmm"))).pmm.image = "perconalab/pmm-client:dev-latest"' + local LAST_OUT=/tmp/tmp.RltXYsQBFG + cat /mnt/jenkins/workspace/cloud-psmdb-operator_PR-1917/e2e-tests/mongod-major-upgrade/conf/some-name-rs0.yml + yq eval '(.spec | select(has("backup"))).backup.image = "perconalab/percona-server-mongodb-operator:main-backup"' + yq eval '(.spec | select(.image == null)).image = "perconalab/percona-server-mongodb-operator:main-mongod6.0"' ++ mktemp + local LAST_ERR=/tmp/tmp.2kp4LrcqO0 + local exit_status=0 + local timeout=4 ++ seq 0 2 + for i in '$(seq 0 2)' + set +e + kubectl apply -f - + yq eval '.spec.upgradeOptions.apply="Never"' + exit_status=0 + set -e + '[' 0 '!=' 0 -a -n 1 ']' + break + cat /tmp/tmp.RltXYsQBFG perconaservermongodb.psmdb.percona.com/some-name created + cat /tmp/tmp.2kp4LrcqO0 + rm /tmp/tmp.RltXYsQBFG /tmp/tmp.2kp4LrcqO0 + return 0 + desc 'check if all 3 Pods started' + set +o xtrace ----------------------------------------------------------------------------------- check if all 3 Pods started ----------------------------------------------------------------------------------- + wait_for_running some-name-rs0 3 + local name=some-name-rs0 + let last_pod=2 + local check_cluster_readyness=true + set_debug + [[ 1 == 1 ]] + set -o xtrace + local rs_name=rs0 + local cluster_name=some-name ++ seq 0 2 + for i in '$(seq 0 $last_pod)' + [[ 0 -eq 2 ]] + wait_pod some-name-rs0-0 + local pod=some-name-rs0-0 + set +o xtrace waiting for pod/some-name-rs0-0 to be ready...........OK + for i in '$(seq 0 $last_pod)' + [[ 1 -eq 2 ]] + wait_pod some-name-rs0-1 + local pod=some-name-rs0-1 + set +o xtrace waiting for pod/some-name-rs0-1 to be ready.............OK + for i in '$(seq 0 $last_pod)' + [[ 2 -eq 2 ]] ++ kubectl_bin get psmdb some-name -o 'jsonpath={.spec.replsets[?(@.name=="rs0")].arbiter.enabled}' +++ mktemp ++ local LAST_OUT=/tmp/tmp.FFMUcugjDA +++ mktemp ++ local LAST_ERR=/tmp/tmp.RvhAQxlb4v ++ local exit_status=0 ++ local timeout=4 +++ seq 0 2 ++ for i in '$(seq 0 2)' ++ set +e ++ kubectl get psmdb some-name -o 'jsonpath={.spec.replsets[?(@.name=="rs0")].arbiter.enabled}' ++ exit_status=0 ++ set -e ++ '[' 0 '!=' 0 -a -n 1 ']' ++ break ++ cat /tmp/tmp.FFMUcugjDA ++ cat /tmp/tmp.RvhAQxlb4v ++ rm /tmp/tmp.FFMUcugjDA /tmp/tmp.RvhAQxlb4v ++ return 0 + [[ '' == \t\r\u\e ]] + wait_pod some-name-rs0-2 + local pod=some-name-rs0-2 + set +o xtrace waiting for pod/some-name-rs0-2 to be ready...........OK ++ kubectl_bin get psmdb some-name -o 'jsonpath={.spec.replsets[?(@.name=="rs0")].non_voting.enabled}' +++ mktemp ++ local LAST_OUT=/tmp/tmp.FccbBNW3Rr +++ mktemp ++ local LAST_ERR=/tmp/tmp.rIZhfBL4Ha ++ local exit_status=0 ++ local timeout=4 +++ seq 0 2 ++ for i in '$(seq 0 2)' ++ set +e ++ kubectl get psmdb some-name -o 'jsonpath={.spec.replsets[?(@.name=="rs0")].non_voting.enabled}' ++ exit_status=0 ++ set -e ++ '[' 0 '!=' 0 -a -n 1 ']' ++ break ++ cat /tmp/tmp.FccbBNW3Rr ++ cat /tmp/tmp.rIZhfBL4Ha ++ rm /tmp/tmp.FccbBNW3Rr /tmp/tmp.rIZhfBL4Ha ++ return 0 + [[ '' == \t\r\u\e ]] ++ kubectl_bin get psmdb some-name -o 'jsonpath={.spec.replsets[?(@.name=="rs0")].hidden.enabled}' +++ mktemp ++ local LAST_OUT=/tmp/tmp.0lTkXxbOkV +++ mktemp ++ local LAST_ERR=/tmp/tmp.RuMN0Qi4bK ++ local exit_status=0 ++ local timeout=4 +++ seq 0 2 ++ for i in '$(seq 0 2)' ++ set +e ++ kubectl get psmdb some-name -o 'jsonpath={.spec.replsets[?(@.name=="rs0")].hidden.enabled}' ++ exit_status=0 ++ set -e ++ '[' 0 '!=' 0 -a -n 1 ']' ++ break ++ cat /tmp/tmp.0lTkXxbOkV ++ cat /tmp/tmp.RuMN0Qi4bK ++ rm /tmp/tmp.0lTkXxbOkV /tmp/tmp.RuMN0Qi4bK ++ return 0 + [[ '' == \t\r\u\e ]] + sleep 10 + [[ true == \t\r\u\e ]] + set +x Waiting for cluster readyness + desc 'write data, read from all' + set +o xtrace ----------------------------------------------------------------------------------- write data, read from all ----------------------------------------------------------------------------------- + run_mongo 'db.createUser({user:"myApp",pwd:"myPass",roles:[{db:"myApp",role:"readWrite"}]})' userAdmin:userAdmin123456@some-name-rs0.mongod-major-upgrade-29555 + local 'command=db.createUser({user:"myApp",pwd:"myPass",roles:[{db:"myApp",role:"readWrite"}]})' + local uri=userAdmin:userAdmin123456@some-name-rs0.mongod-major-upgrade-29555 + local driver=mongodb+srv + local suffix=.svc.cluster.local ++ kubectl_bin get pods --selector=name=psmdb-client -o 'jsonpath={.items[].metadata.name}' +++ mktemp ++ local LAST_OUT=/tmp/tmp.7IiWJ5H3hH +++ mktemp ++ local LAST_ERR=/tmp/tmp.5QZU3At0qk ++ local exit_status=0 ++ local timeout=4 +++ seq 0 2 ++ for i in '$(seq 0 2)' ++ set +e ++ kubectl get pods --selector=name=psmdb-client -o 'jsonpath={.items[].metadata.name}' ++ exit_status=0 ++ set -e ++ '[' 0 '!=' 0 -a -n 1 ']' ++ break ++ cat /tmp/tmp.7IiWJ5H3hH ++ cat /tmp/tmp.5QZU3At0qk ++ rm /tmp/tmp.7IiWJ5H3hH /tmp/tmp.5QZU3At0qk ++ return 0 + local client_container=psmdb-client-66f577db5f-ddjrx + local mongo_flag= + [[ userAdmin:userAdmin123456@some-name-rs0.mongod-major-upgrade-29555 == *cfg* ]] + replica_set=rs0 + kubectl_bin exec psmdb-client-66f577db5f-ddjrx -- bash -c 'printf '\''db.createUser({user:"myApp",pwd:"myPass",roles:[{db:"myApp",role:"readWrite"}]})\n'\'' | mongo mongodb+srv://userAdmin:userAdmin123456@some-name-rs0.mongod-major-upgrade-29555.svc.cluster.local/admin?ssl=false\&replicaSet=rs0 ' ++ mktemp + local LAST_OUT=/tmp/tmp.vVqVUzVpeX ++ mktemp + local LAST_ERR=/tmp/tmp.7KedgKFsMu + local exit_status=0 + local timeout=4 ++ seq 0 2 + for i in '$(seq 0 2)' + set +e + kubectl exec psmdb-client-66f577db5f-ddjrx -- bash -c 'printf '\''db.createUser({user:"myApp",pwd:"myPass",roles:[{db:"myApp",role:"readWrite"}]})\n'\'' | mongo mongodb+srv://userAdmin:userAdmin123456@some-name-rs0.mongod-major-upgrade-29555.svc.cluster.local/admin?ssl=false\&replicaSet=rs0 ' + exit_status=0 + set -e + '[' 0 '!=' 0 -a -n 1 ']' + break + cat /tmp/tmp.vVqVUzVpeX Percona Server for MongoDB shell version v4.4.29-28 connecting to: mongodb://some-name-rs0-1.some-name-rs0.mongod-major-upgrade-29555.svc.cluster.local:27017,some-name-rs0-2.some-name-rs0.mongod-major-upgrade-29555.svc.cluster.local:27017,some-name-rs0-0.some-name-rs0.mongod-major-upgrade-29555.svc.cluster.local:27017/admin?compressors=disabled&gssapiServiceName=mongodb&replicaSet=rs0&ssl=false Implicit session: session { "id" : UUID("97f03573-3443-47c2-9dcb-eb792a735fe5") } Percona Server for MongoDB server version: v6.0.21-18 WARNING: shell and server versions do not match Successfully added user: { "user" : "myApp", "roles" : [ { "db" : "myApp", "role" : "readWrite" } ] } bye + cat /tmp/tmp.7KedgKFsMu + rm /tmp/tmp.vVqVUzVpeX /tmp/tmp.7KedgKFsMu + return 0 + sleep 2 + run_mongo 'use myApp\n db.test.insert({ x: 100501 })' myApp:myPass@some-name-rs0.mongod-major-upgrade-29555 + local 'command=use myApp\n db.test.insert({ x: 100501 })' + local uri=myApp:myPass@some-name-rs0.mongod-major-upgrade-29555 + local driver=mongodb+srv + local suffix=.svc.cluster.local ++ kubectl_bin get pods --selector=name=psmdb-client -o 'jsonpath={.items[].metadata.name}' +++ mktemp ++ local LAST_OUT=/tmp/tmp.ZOdyfk62Vd +++ mktemp ++ local LAST_ERR=/tmp/tmp.cUum19ZsyB ++ local exit_status=0 ++ local timeout=4 +++ seq 0 2 ++ for i in '$(seq 0 2)' ++ set +e ++ kubectl get pods --selector=name=psmdb-client -o 'jsonpath={.items[].metadata.name}' ++ exit_status=0 ++ set -e ++ '[' 0 '!=' 0 -a -n 1 ']' ++ break ++ cat /tmp/tmp.ZOdyfk62Vd ++ cat /tmp/tmp.cUum19ZsyB ++ rm /tmp/tmp.ZOdyfk62Vd /tmp/tmp.cUum19ZsyB ++ return 0 + local client_container=psmdb-client-66f577db5f-ddjrx + local mongo_flag= + [[ myApp:myPass@some-name-rs0.mongod-major-upgrade-29555 == *cfg* ]] + replica_set=rs0 + kubectl_bin exec psmdb-client-66f577db5f-ddjrx -- bash -c 'printf '\''use myApp\n db.test.insert({ x: 100501 })\n'\'' | mongo mongodb+srv://myApp:myPass@some-name-rs0.mongod-major-upgrade-29555.svc.cluster.local/admin?ssl=false\&replicaSet=rs0 ' ++ mktemp + local LAST_OUT=/tmp/tmp.5ZHbZr8XNu ++ mktemp + local LAST_ERR=/tmp/tmp.Um2foBb9lI + local exit_status=0 + local timeout=4 ++ seq 0 2 + for i in '$(seq 0 2)' + set +e + kubectl exec psmdb-client-66f577db5f-ddjrx -- bash -c 'printf '\''use myApp\n db.test.insert({ x: 100501 })\n'\'' | mongo mongodb+srv://myApp:myPass@some-name-rs0.mongod-major-upgrade-29555.svc.cluster.local/admin?ssl=false\&replicaSet=rs0 ' + exit_status=0 + set -e + '[' 0 '!=' 0 -a -n 1 ']' + break + cat /tmp/tmp.5ZHbZr8XNu Percona Server for MongoDB shell version v4.4.29-28 connecting to: mongodb://some-name-rs0-0.some-name-rs0.mongod-major-upgrade-29555.svc.cluster.local:27017,some-name-rs0-1.some-name-rs0.mongod-major-upgrade-29555.svc.cluster.local:27017,some-name-rs0-2.some-name-rs0.mongod-major-upgrade-29555.svc.cluster.local:27017/admin?compressors=disabled&gssapiServiceName=mongodb&replicaSet=rs0&ssl=false Implicit session: session { "id" : UUID("b2d49a8d-3c2c-474b-93b9-94e0b9e0e83c") } Percona Server for MongoDB server version: v6.0.21-18 WARNING: shell and server versions do not match switched to db myApp WriteResult({ "nInserted" : 1 }) bye + cat /tmp/tmp.Um2foBb9lI + rm /tmp/tmp.5ZHbZr8XNu /tmp/tmp.Um2foBb9lI + return 0 + compare_mongo_cmd find myApp:myPass@some-name-rs0-0.some-name-rs0.mongod-major-upgrade-29555 + local command=find + local uri=myApp:myPass@some-name-rs0-0.some-name-rs0.mongod-major-upgrade-29555 + local postfix= + local suffix= + local database=myApp + local collection=test + local sort= + local 'full_command=db.test.find()' + [[ ! -z '' ]] + log 'running db.test.find() in myApp' + set +o xtrace [2025-06-11T15:20:29+0000] running db.test.find() in myApp + run_mongo 'use myApp\n db.test.find()' myApp:myPass@some-name-rs0-0.some-name-rs0.mongod-major-upgrade-29555 mongodb '' + local 'command=use myApp\n db.test.find()' + local uri=myApp:myPass@some-name-rs0-0.some-name-rs0.mongod-major-upgrade-29555 + local driver=mongodb + local suffix=.svc.cluster.local + egrep -v 'I NETWORK|W NETWORK|F NETWORK|Error saving history file|Percona Server for MongoDB|connecting to:|Unable to reach primary for set|Implicit session:|versions do not match|Error saving history file:' + /usr/bin/sed -re 's/ObjectId\("[0-9a-f]+"\)//; s/-[0-9]+.svc/-xxx.svc/' ++ kubectl_bin get pods --selector=name=psmdb-client -o 'jsonpath={.items[].metadata.name}' +++ mktemp ++ local LAST_OUT=/tmp/tmp.MCYqw5d5zq +++ mktemp ++ local LAST_ERR=/tmp/tmp.QgHYyFGQ90 ++ local exit_status=0 ++ local timeout=4 +++ seq 0 2 ++ for i in '$(seq 0 2)' ++ set +e ++ kubectl get pods --selector=name=psmdb-client -o 'jsonpath={.items[].metadata.name}' ++ exit_status=0 ++ set -e ++ '[' 0 '!=' 0 -a -n 1 ']' ++ break ++ cat /tmp/tmp.MCYqw5d5zq ++ cat /tmp/tmp.QgHYyFGQ90 ++ rm /tmp/tmp.MCYqw5d5zq /tmp/tmp.QgHYyFGQ90 ++ return 0 + local client_container=psmdb-client-66f577db5f-ddjrx + local mongo_flag= + [[ myApp:myPass@some-name-rs0-0.some-name-rs0.mongod-major-upgrade-29555 == *cfg* ]] + replica_set=rs0 + kubectl_bin exec psmdb-client-66f577db5f-ddjrx -- bash -c 'printf '\''use myApp\n db.test.find()\n'\'' | mongo mongodb://myApp:myPass@some-name-rs0-0.some-name-rs0.mongod-major-upgrade-29555.svc.cluster.local/admin?ssl=false\&replicaSet=rs0 ' ++ mktemp + local LAST_OUT=/tmp/tmp.y4T9564Lhm ++ mktemp + local LAST_ERR=/tmp/tmp.tXn3i50a1S + local exit_status=0 + local timeout=4 ++ seq 0 2 + for i in '$(seq 0 2)' + set +e + kubectl exec psmdb-client-66f577db5f-ddjrx -- bash -c 'printf '\''use myApp\n db.test.find()\n'\'' | mongo mongodb://myApp:myPass@some-name-rs0-0.some-name-rs0.mongod-major-upgrade-29555.svc.cluster.local/admin?ssl=false\&replicaSet=rs0 ' + exit_status=0 + set -e + '[' 0 '!=' 0 -a -n 1 ']' + break + cat /tmp/tmp.y4T9564Lhm + cat /tmp/tmp.tXn3i50a1S + rm /tmp/tmp.y4T9564Lhm /tmp/tmp.tXn3i50a1S + return 0 + [[ 0 -eq 0 ]] + diff -u /mnt/jenkins/workspace/cloud-psmdb-operator_PR-1917/e2e-tests/mongod-major-upgrade/compare/find.json /tmp/tmp.o3Qe20mAZP/find + compare_mongo_cmd find myApp:myPass@some-name-rs0-1.some-name-rs0.mongod-major-upgrade-29555 + local command=find + local uri=myApp:myPass@some-name-rs0-1.some-name-rs0.mongod-major-upgrade-29555 + local postfix= + local suffix= + local database=myApp + local collection=test + local sort= + local 'full_command=db.test.find()' + [[ ! -z '' ]] + log 'running db.test.find() in myApp' + set +o xtrace [2025-06-11T15:20:32+0000] running db.test.find() in myApp + run_mongo 'use myApp\n db.test.find()' myApp:myPass@some-name-rs0-1.some-name-rs0.mongod-major-upgrade-29555 mongodb '' + local 'command=use myApp\n db.test.find()' + local uri=myApp:myPass@some-name-rs0-1.some-name-rs0.mongod-major-upgrade-29555 + local driver=mongodb + local suffix=.svc.cluster.local + /usr/bin/sed -re 's/ObjectId\("[0-9a-f]+"\)//; s/-[0-9]+.svc/-xxx.svc/' + egrep -v 'I NETWORK|W NETWORK|F NETWORK|Error saving history file|Percona Server for MongoDB|connecting to:|Unable to reach primary for set|Implicit session:|versions do not match|Error saving history file:' ++ kubectl_bin get pods --selector=name=psmdb-client -o 'jsonpath={.items[].metadata.name}' +++ mktemp ++ local LAST_OUT=/tmp/tmp.alhiT9EGLK +++ mktemp ++ local LAST_ERR=/tmp/tmp.PXD449SqiT ++ local exit_status=0 ++ local timeout=4 +++ seq 0 2 ++ for i in '$(seq 0 2)' ++ set +e ++ kubectl get pods --selector=name=psmdb-client -o 'jsonpath={.items[].metadata.name}' ++ exit_status=0 ++ set -e ++ '[' 0 '!=' 0 -a -n 1 ']' ++ break ++ cat /tmp/tmp.alhiT9EGLK ++ cat /tmp/tmp.PXD449SqiT ++ rm /tmp/tmp.alhiT9EGLK /tmp/tmp.PXD449SqiT ++ return 0 + local client_container=psmdb-client-66f577db5f-ddjrx + local mongo_flag= + [[ myApp:myPass@some-name-rs0-1.some-name-rs0.mongod-major-upgrade-29555 == *cfg* ]] + replica_set=rs0 + kubectl_bin exec psmdb-client-66f577db5f-ddjrx -- bash -c 'printf '\''use myApp\n db.test.find()\n'\'' | mongo mongodb://myApp:myPass@some-name-rs0-1.some-name-rs0.mongod-major-upgrade-29555.svc.cluster.local/admin?ssl=false\&replicaSet=rs0 ' ++ mktemp + local LAST_OUT=/tmp/tmp.Msri9vorv5 ++ mktemp + local LAST_ERR=/tmp/tmp.Kx2z8RFsCZ + local exit_status=0 + local timeout=4 ++ seq 0 2 + for i in '$(seq 0 2)' + set +e + kubectl exec psmdb-client-66f577db5f-ddjrx -- bash -c 'printf '\''use myApp\n db.test.find()\n'\'' | mongo mongodb://myApp:myPass@some-name-rs0-1.some-name-rs0.mongod-major-upgrade-29555.svc.cluster.local/admin?ssl=false\&replicaSet=rs0 ' + exit_status=0 + set -e + '[' 0 '!=' 0 -a -n 1 ']' + break + cat /tmp/tmp.Msri9vorv5 + cat /tmp/tmp.Kx2z8RFsCZ + rm /tmp/tmp.Msri9vorv5 /tmp/tmp.Kx2z8RFsCZ + return 0 + [[ 0 -eq 0 ]] + diff -u /mnt/jenkins/workspace/cloud-psmdb-operator_PR-1917/e2e-tests/mongod-major-upgrade/compare/find.json /tmp/tmp.o3Qe20mAZP/find + compare_mongo_cmd find myApp:myPass@some-name-rs0-2.some-name-rs0.mongod-major-upgrade-29555 + local command=find + local uri=myApp:myPass@some-name-rs0-2.some-name-rs0.mongod-major-upgrade-29555 + local postfix= + local suffix= + local database=myApp + local collection=test + local sort= + local 'full_command=db.test.find()' + [[ ! -z '' ]] + log 'running db.test.find() in myApp' + set +o xtrace [2025-06-11T15:20:35+0000] running db.test.find() in myApp + run_mongo 'use myApp\n db.test.find()' myApp:myPass@some-name-rs0-2.some-name-rs0.mongod-major-upgrade-29555 mongodb '' + local 'command=use myApp\n db.test.find()' + local uri=myApp:myPass@some-name-rs0-2.some-name-rs0.mongod-major-upgrade-29555 + local driver=mongodb + local suffix=.svc.cluster.local + /usr/bin/sed -re 's/ObjectId\("[0-9a-f]+"\)//; s/-[0-9]+.svc/-xxx.svc/' ++ kubectl_bin get pods --selector=name=psmdb-client -o 'jsonpath={.items[].metadata.name}' + egrep -v 'I NETWORK|W NETWORK|F NETWORK|Error saving history file|Percona Server for MongoDB|connecting to:|Unable to reach primary for set|Implicit session:|versions do not match|Error saving history file:' +++ mktemp ++ local LAST_OUT=/tmp/tmp.FJyeo2a3Rx +++ mktemp ++ local LAST_ERR=/tmp/tmp.kyvpAPAULi ++ local exit_status=0 ++ local timeout=4 +++ seq 0 2 ++ for i in '$(seq 0 2)' ++ set +e ++ kubectl get pods --selector=name=psmdb-client -o 'jsonpath={.items[].metadata.name}' ++ exit_status=0 ++ set -e ++ '[' 0 '!=' 0 -a -n 1 ']' ++ break ++ cat /tmp/tmp.FJyeo2a3Rx ++ cat /tmp/tmp.kyvpAPAULi ++ rm /tmp/tmp.FJyeo2a3Rx /tmp/tmp.kyvpAPAULi ++ return 0 + local client_container=psmdb-client-66f577db5f-ddjrx + local mongo_flag= + [[ myApp:myPass@some-name-rs0-2.some-name-rs0.mongod-major-upgrade-29555 == *cfg* ]] + replica_set=rs0 + kubectl_bin exec psmdb-client-66f577db5f-ddjrx -- bash -c 'printf '\''use myApp\n db.test.find()\n'\'' | mongo mongodb://myApp:myPass@some-name-rs0-2.some-name-rs0.mongod-major-upgrade-29555.svc.cluster.local/admin?ssl=false\&replicaSet=rs0 ' ++ mktemp + local LAST_OUT=/tmp/tmp.6RwHmsNhMn ++ mktemp + local LAST_ERR=/tmp/tmp.6pAZIGpB2j + local exit_status=0 + local timeout=4 ++ seq 0 2 + for i in '$(seq 0 2)' + set +e + kubectl exec psmdb-client-66f577db5f-ddjrx -- bash -c 'printf '\''use myApp\n db.test.find()\n'\'' | mongo mongodb://myApp:myPass@some-name-rs0-2.some-name-rs0.mongod-major-upgrade-29555.svc.cluster.local/admin?ssl=false\&replicaSet=rs0 ' + exit_status=0 + set -e + '[' 0 '!=' 0 -a -n 1 ']' + break + cat /tmp/tmp.6RwHmsNhMn + cat /tmp/tmp.6pAZIGpB2j + rm /tmp/tmp.6RwHmsNhMn /tmp/tmp.6pAZIGpB2j + return 0 + [[ 0 -eq 0 ]] + diff -u /mnt/jenkins/workspace/cloud-psmdb-operator_PR-1917/e2e-tests/mongod-major-upgrade/compare/find.json /tmp/tmp.o3Qe20mAZP/find + desc 'Starting to follow mongod upgrade images chain' + set +o xtrace ----------------------------------------------------------------------------------- Starting to follow mongod upgrade images chain ----------------------------------------------------------------------------------- + target_generation=2 + for version in '${versions_to_verify[@]}' + desc 'Testing upgrade to version: 7.0' + set +o xtrace ----------------------------------------------------------------------------------- Testing upgrade to version: 7.0 ----------------------------------------------------------------------------------- + kubectl_bin patch psmdb/some-name --type=json '-p=[ {"op":"replace","path":"/spec/upgradeOptions/apply","value": "7.0-recommended"} ]' ++ mktemp + local LAST_OUT=/tmp/tmp.GwL73VXrSm ++ mktemp + local LAST_ERR=/tmp/tmp.9Z0JUIdFaY + local exit_status=0 + local timeout=4 ++ seq 0 2 + for i in '$(seq 0 2)' + set +e + kubectl patch psmdb/some-name --type=json '-p=[ {"op":"replace","path":"/spec/upgradeOptions/apply","value": "7.0-recommended"} ]' + exit_status=0 + set -e + '[' 0 '!=' 0 -a -n 1 ']' + break + cat /tmp/tmp.GwL73VXrSm perconaservermongodb.psmdb.percona.com/some-name patched + cat /tmp/tmp.9Z0JUIdFaY + rm /tmp/tmp.GwL73VXrSm /tmp/tmp.9Z0JUIdFaY + return 0 + sleep 70 + wait_for_running some-name-rs0 3 + local name=some-name-rs0 + let last_pod=2 + local check_cluster_readyness=true + set_debug + [[ 1 == 1 ]] + set -o xtrace + local rs_name=rs0 + local cluster_name=some-name ++ seq 0 2 + for i in '$(seq 0 $last_pod)' + [[ 0 -eq 2 ]] + wait_pod some-name-rs0-0 + local pod=some-name-rs0-0 + set +o xtrace waiting for pod/some-name-rs0-0 to be ready.OK + for i in '$(seq 0 $last_pod)' + [[ 1 -eq 2 ]] + wait_pod some-name-rs0-1 + local pod=some-name-rs0-1 + set +o xtrace waiting for pod/some-name-rs0-1 to be ready.OK + for i in '$(seq 0 $last_pod)' + [[ 2 -eq 2 ]] ++ kubectl_bin get psmdb some-name -o 'jsonpath={.spec.replsets[?(@.name=="rs0")].arbiter.enabled}' +++ mktemp ++ local LAST_OUT=/tmp/tmp.wwwvuD3vya +++ mktemp ++ local LAST_ERR=/tmp/tmp.kCE54Y4zm0 ++ local exit_status=0 ++ local timeout=4 +++ seq 0 2 ++ for i in '$(seq 0 2)' ++ set +e ++ kubectl get psmdb some-name -o 'jsonpath={.spec.replsets[?(@.name=="rs0")].arbiter.enabled}' ++ exit_status=0 ++ set -e ++ '[' 0 '!=' 0 -a -n 1 ']' ++ break ++ cat /tmp/tmp.wwwvuD3vya ++ cat /tmp/tmp.kCE54Y4zm0 ++ rm /tmp/tmp.wwwvuD3vya /tmp/tmp.kCE54Y4zm0 ++ return 0 + [[ '' == \t\r\u\e ]] + wait_pod some-name-rs0-2 + local pod=some-name-rs0-2 + set +o xtrace waiting for pod/some-name-rs0-2 to be ready.OK ++ kubectl_bin get psmdb some-name -o 'jsonpath={.spec.replsets[?(@.name=="rs0")].non_voting.enabled}' +++ mktemp ++ local LAST_OUT=/tmp/tmp.rfmvru86qA +++ mktemp ++ local LAST_ERR=/tmp/tmp.XnlQl53TVh ++ local exit_status=0 ++ local timeout=4 +++ seq 0 2 ++ for i in '$(seq 0 2)' ++ set +e ++ kubectl get psmdb some-name -o 'jsonpath={.spec.replsets[?(@.name=="rs0")].non_voting.enabled}' ++ exit_status=0 ++ set -e ++ '[' 0 '!=' 0 -a -n 1 ']' ++ break ++ cat /tmp/tmp.rfmvru86qA ++ cat /tmp/tmp.XnlQl53TVh ++ rm /tmp/tmp.rfmvru86qA /tmp/tmp.XnlQl53TVh ++ return 0 + [[ '' == \t\r\u\e ]] ++ kubectl_bin get psmdb some-name -o 'jsonpath={.spec.replsets[?(@.name=="rs0")].hidden.enabled}' +++ mktemp ++ local LAST_OUT=/tmp/tmp.qlaKGGVAQS +++ mktemp ++ local LAST_ERR=/tmp/tmp.UXgYTls7rc ++ local exit_status=0 ++ local timeout=4 +++ seq 0 2 ++ for i in '$(seq 0 2)' ++ set +e ++ kubectl get psmdb some-name -o 'jsonpath={.spec.replsets[?(@.name=="rs0")].hidden.enabled}' ++ exit_status=0 ++ set -e ++ '[' 0 '!=' 0 -a -n 1 ']' ++ break ++ cat /tmp/tmp.qlaKGGVAQS ++ cat /tmp/tmp.UXgYTls7rc ++ rm /tmp/tmp.qlaKGGVAQS /tmp/tmp.UXgYTls7rc ++ return 0 + [[ '' == \t\r\u\e ]] + sleep 10 + [[ true == \t\r\u\e ]] + set +x Waiting for cluster readyness............................................ + wait_cluster_consistency some-name + local cluster_name=some-name + local wait_time=32 + retry=0 + sleep 7 + echo -n 'waiting for cluster readyness' waiting for cluster readyness++ kubectl_bin get psmdb some-name -o 'jsonpath={.status.state}' +++ mktemp ++ local LAST_OUT=/tmp/tmp.cE8KYSR4ly +++ mktemp ++ local LAST_ERR=/tmp/tmp.vq18jOAZCV ++ local exit_status=0 ++ local timeout=4 +++ seq 0 2 ++ for i in '$(seq 0 2)' ++ set +e ++ kubectl get psmdb some-name -o 'jsonpath={.status.state}' ++ exit_status=0 ++ set -e ++ '[' 0 '!=' 0 -a -n 1 ']' ++ break ++ cat /tmp/tmp.cE8KYSR4ly ++ cat /tmp/tmp.vq18jOAZCV ++ rm /tmp/tmp.cE8KYSR4ly /tmp/tmp.vq18jOAZCV ++ return 0 + [[ ready == \r\e\a\d\y ]] + echo + compare_generation 2 statefulset some-name-rs0 + local generation=2 + local resource_type=statefulset + local resource_name=some-name-rs0 + local current_generation ++ kubectl_bin get statefulset some-name-rs0 -o 'jsonpath={.metadata.generation}' +++ mktemp ++ local LAST_OUT=/tmp/tmp.UICpPbmnfp +++ mktemp ++ local LAST_ERR=/tmp/tmp.1V9OLVqcPX ++ local exit_status=0 ++ local timeout=4 +++ seq 0 2 ++ for i in '$(seq 0 2)' ++ set +e ++ kubectl get statefulset some-name-rs0 -o 'jsonpath={.metadata.generation}' ++ exit_status=0 ++ set -e ++ '[' 0 '!=' 0 -a -n 1 ']' ++ break ++ cat /tmp/tmp.UICpPbmnfp ++ cat /tmp/tmp.1V9OLVqcPX ++ rm /tmp/tmp.UICpPbmnfp /tmp/tmp.1V9OLVqcPX ++ return 0 + current_generation=2 + [[ 2 != \2 ]] + kubectl_bin patch psmdb/some-name --type=json '-p=[ {"op":"replace","path":"/spec/upgradeOptions/setFCV","value": true} ]' ++ mktemp + local LAST_OUT=/tmp/tmp.zyuhz8Pf9Q ++ mktemp + local LAST_ERR=/tmp/tmp.FO9xGHqBlj + local exit_status=0 + local timeout=4 ++ seq 0 2 + for i in '$(seq 0 2)' + set +e + kubectl patch psmdb/some-name --type=json '-p=[ {"op":"replace","path":"/spec/upgradeOptions/setFCV","value": true} ]' + exit_status=0 + set -e + '[' 0 '!=' 0 -a -n 1 ']' + break + cat /tmp/tmp.zyuhz8Pf9Q perconaservermongodb.psmdb.percona.com/some-name patched + cat /tmp/tmp.FO9xGHqBlj + rm /tmp/tmp.zyuhz8Pf9Q /tmp/tmp.FO9xGHqBlj + return 0 + sleep 10 + wait_cluster_consistency some-name + local cluster_name=some-name + local wait_time=32 + retry=0 + sleep 7 + echo -n 'waiting for cluster readyness' waiting for cluster readyness++ kubectl_bin get psmdb some-name -o 'jsonpath={.status.state}' +++ mktemp ++ local LAST_OUT=/tmp/tmp.lWj71Yz7Vs +++ mktemp ++ local LAST_ERR=/tmp/tmp.GDhgNMo3fK ++ local exit_status=0 ++ local timeout=4 +++ seq 0 2 ++ for i in '$(seq 0 2)' ++ set +e ++ kubectl get psmdb some-name -o 'jsonpath={.status.state}' ++ exit_status=0 ++ set -e ++ '[' 0 '!=' 0 -a -n 1 ']' ++ break ++ cat /tmp/tmp.lWj71Yz7Vs ++ cat /tmp/tmp.GDhgNMo3fK ++ rm /tmp/tmp.lWj71Yz7Vs /tmp/tmp.GDhgNMo3fK ++ return 0 + [[ ready == \r\e\a\d\y ]] + echo ++ run_mongos 'JSON.stringify(db.adminCommand({getParameter:1,featureCompatibilityVersion:1}))' clusterAdmin:clusterAdmin123456@some-name-rs0.mongod-major-upgrade-29555 ++ local 'command=JSON.stringify(db.adminCommand({getParameter:1,featureCompatibilityVersion:1}))' ++ local uri=clusterAdmin:clusterAdmin123456@some-name-rs0.mongod-major-upgrade-29555 ++ local driver=mongodb ++ grep -E '^\{.*\}$' ++ local suffix=.svc.cluster.local ++ local mongo_flag= ++ local port=27017 ++ local mongo_bin=mongo ++ jq -r .featureCompatibilityVersion.version +++ echo .svc.cluster.local +++ awk -F: '{print $2}' ++ suffix_port= ++ [[ -z '' ]] ++ suffix=.svc.cluster.local:27017 +++ kubectl_bin get pods --selector=name=psmdb-client -o 'jsonpath={.items[].metadata.name}' ++++ mktemp +++ local LAST_OUT=/tmp/tmp.N0Ca3100jw ++++ mktemp +++ local LAST_ERR=/tmp/tmp.FTrNXqYAjV +++ local exit_status=0 +++ local timeout=4 ++++ seq 0 2 +++ for i in '$(seq 0 2)' +++ set +e +++ kubectl get pods --selector=name=psmdb-client -o 'jsonpath={.items[].metadata.name}' +++ exit_status=0 +++ set -e +++ '[' 0 '!=' 0 -a -n 1 ']' +++ break +++ cat /tmp/tmp.N0Ca3100jw +++ cat /tmp/tmp.FTrNXqYAjV +++ rm /tmp/tmp.N0Ca3100jw /tmp/tmp.FTrNXqYAjV +++ return 0 ++ local client_container=psmdb-client-66f577db5f-ddjrx ++ kubectl_bin exec psmdb-client-66f577db5f-ddjrx -- bash -c 'printf '\''JSON.stringify(db.adminCommand({getParameter:1,featureCompatibilityVersion:1}))\n'\'' | mongo mongodb://clusterAdmin:clusterAdmin123456@some-name-rs0.mongod-major-upgrade-29555.svc.cluster.local:27017/admin ' +++ mktemp ++ local LAST_OUT=/tmp/tmp.5qtxh0vzqw +++ mktemp ++ local LAST_ERR=/tmp/tmp.nQeje9UWPU ++ local exit_status=0 ++ local timeout=4 +++ seq 0 2 ++ for i in '$(seq 0 2)' ++ set +e ++ kubectl exec psmdb-client-66f577db5f-ddjrx -- bash -c 'printf '\''JSON.stringify(db.adminCommand({getParameter:1,featureCompatibilityVersion:1}))\n'\'' | mongo mongodb://clusterAdmin:clusterAdmin123456@some-name-rs0.mongod-major-upgrade-29555.svc.cluster.local:27017/admin ' ++ exit_status=0 ++ set -e ++ '[' 0 '!=' 0 -a -n 1 ']' ++ break ++ cat /tmp/tmp.5qtxh0vzqw ++ cat /tmp/tmp.nQeje9UWPU ++ rm /tmp/tmp.5qtxh0vzqw /tmp/tmp.nQeje9UWPU ++ return 0 + currentFCV=7.0 + [[ 7.0 != 7.0 ]] + run_mongo 'use myApp\n db.test.insert({ x: 100502 })' myApp:myPass@some-name-rs0.mongod-major-upgrade-29555 + local 'command=use myApp\n db.test.insert({ x: 100502 })' + local uri=myApp:myPass@some-name-rs0.mongod-major-upgrade-29555 + local driver=mongodb+srv + local suffix=.svc.cluster.local ++ kubectl_bin get pods --selector=name=psmdb-client -o 'jsonpath={.items[].metadata.name}' +++ mktemp ++ local LAST_OUT=/tmp/tmp.zplQruAzot +++ mktemp ++ local LAST_ERR=/tmp/tmp.liEZ4nFUWe ++ local exit_status=0 ++ local timeout=4 +++ seq 0 2 ++ for i in '$(seq 0 2)' ++ set +e ++ kubectl get pods --selector=name=psmdb-client -o 'jsonpath={.items[].metadata.name}' ++ exit_status=0 ++ set -e ++ '[' 0 '!=' 0 -a -n 1 ']' ++ break ++ cat /tmp/tmp.zplQruAzot ++ cat /tmp/tmp.liEZ4nFUWe ++ rm /tmp/tmp.zplQruAzot /tmp/tmp.liEZ4nFUWe ++ return 0 + local client_container=psmdb-client-66f577db5f-ddjrx + local mongo_flag= + [[ myApp:myPass@some-name-rs0.mongod-major-upgrade-29555 == *cfg* ]] + replica_set=rs0 + kubectl_bin exec psmdb-client-66f577db5f-ddjrx -- bash -c 'printf '\''use myApp\n db.test.insert({ x: 100502 })\n'\'' | mongo mongodb+srv://myApp:myPass@some-name-rs0.mongod-major-upgrade-29555.svc.cluster.local/admin?ssl=false\&replicaSet=rs0 ' ++ mktemp + local LAST_OUT=/tmp/tmp.9K9A5rK6P9 ++ mktemp + local LAST_ERR=/tmp/tmp.E3ULWdC0dP + local exit_status=0 + local timeout=4 ++ seq 0 2 + for i in '$(seq 0 2)' + set +e + kubectl exec psmdb-client-66f577db5f-ddjrx -- bash -c 'printf '\''use myApp\n db.test.insert({ x: 100502 })\n'\'' | mongo mongodb+srv://myApp:myPass@some-name-rs0.mongod-major-upgrade-29555.svc.cluster.local/admin?ssl=false\&replicaSet=rs0 ' + exit_status=0 + set -e + '[' 0 '!=' 0 -a -n 1 ']' + break + cat /tmp/tmp.9K9A5rK6P9 Percona Server for MongoDB shell version v4.4.29-28 connecting to: mongodb://some-name-rs0-0.some-name-rs0.mongod-major-upgrade-29555.svc.cluster.local:27017,some-name-rs0-2.some-name-rs0.mongod-major-upgrade-29555.svc.cluster.local:27017,some-name-rs0-1.some-name-rs0.mongod-major-upgrade-29555.svc.cluster.local:27017/admin?compressors=disabled&gssapiServiceName=mongodb&replicaSet=rs0&ssl=false Implicit session: session { "id" : UUID("d4dc92d7-65eb-4cdb-b4ba-19b51bc11316") } Percona Server for MongoDB server version: v7.0.18-11 WARNING: shell and server versions do not match switched to db myApp WriteResult({ "nInserted" : 1 }) bye + cat /tmp/tmp.E3ULWdC0dP + rm /tmp/tmp.9K9A5rK6P9 /tmp/tmp.E3ULWdC0dP + return 0 + compare_mongo_cmd find myApp:myPass@some-name-rs0-0.some-name-rs0.mongod-major-upgrade-29555 -2 + local command=find + local uri=myApp:myPass@some-name-rs0-0.some-name-rs0.mongod-major-upgrade-29555 + local postfix=-2 + local suffix= + local database=myApp + local collection=test + local sort= + local 'full_command=db.test.find()' + [[ ! -z '' ]] + log 'running db.test.find() in myApp' + set +o xtrace [2025-06-11T15:24:28+0000] running db.test.find() in myApp + egrep -v 'I NETWORK|W NETWORK|F NETWORK|Error saving history file|Percona Server for MongoDB|connecting to:|Unable to reach primary for set|Implicit session:|versions do not match|Error saving history file:' + run_mongo 'use myApp\n db.test.find()' myApp:myPass@some-name-rs0-0.some-name-rs0.mongod-major-upgrade-29555 mongodb '' + local 'command=use myApp\n db.test.find()' + local uri=myApp:myPass@some-name-rs0-0.some-name-rs0.mongod-major-upgrade-29555 + local driver=mongodb + local suffix=.svc.cluster.local + /usr/bin/sed -re 's/ObjectId\("[0-9a-f]+"\)//; s/-[0-9]+.svc/-xxx.svc/' ++ kubectl_bin get pods --selector=name=psmdb-client -o 'jsonpath={.items[].metadata.name}' +++ mktemp ++ local LAST_OUT=/tmp/tmp.kmhndUb6vN +++ mktemp ++ local LAST_ERR=/tmp/tmp.UOftyiiA8X ++ local exit_status=0 ++ local timeout=4 +++ seq 0 2 ++ for i in '$(seq 0 2)' ++ set +e ++ kubectl get pods --selector=name=psmdb-client -o 'jsonpath={.items[].metadata.name}' ++ exit_status=0 ++ set -e ++ '[' 0 '!=' 0 -a -n 1 ']' ++ break ++ cat /tmp/tmp.kmhndUb6vN ++ cat /tmp/tmp.UOftyiiA8X ++ rm /tmp/tmp.kmhndUb6vN /tmp/tmp.UOftyiiA8X ++ return 0 + local client_container=psmdb-client-66f577db5f-ddjrx + local mongo_flag= + [[ myApp:myPass@some-name-rs0-0.some-name-rs0.mongod-major-upgrade-29555 == *cfg* ]] + replica_set=rs0 + kubectl_bin exec psmdb-client-66f577db5f-ddjrx -- bash -c 'printf '\''use myApp\n db.test.find()\n'\'' | mongo mongodb://myApp:myPass@some-name-rs0-0.some-name-rs0.mongod-major-upgrade-29555.svc.cluster.local/admin?ssl=false\&replicaSet=rs0 ' ++ mktemp + local LAST_OUT=/tmp/tmp.V5zk1GczDe ++ mktemp + local LAST_ERR=/tmp/tmp.qWN9uVvIA7 + local exit_status=0 + local timeout=4 ++ seq 0 2 + for i in '$(seq 0 2)' + set +e + kubectl exec psmdb-client-66f577db5f-ddjrx -- bash -c 'printf '\''use myApp\n db.test.find()\n'\'' | mongo mongodb://myApp:myPass@some-name-rs0-0.some-name-rs0.mongod-major-upgrade-29555.svc.cluster.local/admin?ssl=false\&replicaSet=rs0 ' + exit_status=0 + set -e + '[' 0 '!=' 0 -a -n 1 ']' + break + cat /tmp/tmp.V5zk1GczDe + cat /tmp/tmp.qWN9uVvIA7 + rm /tmp/tmp.V5zk1GczDe /tmp/tmp.qWN9uVvIA7 + return 0 + [[ 0 -eq 0 ]] + diff -u /mnt/jenkins/workspace/cloud-psmdb-operator_PR-1917/e2e-tests/mongod-major-upgrade/compare/find-2.json /tmp/tmp.o3Qe20mAZP/find-2 + compare_mongo_cmd find myApp:myPass@some-name-rs0-1.some-name-rs0.mongod-major-upgrade-29555 -2 + local command=find + local uri=myApp:myPass@some-name-rs0-1.some-name-rs0.mongod-major-upgrade-29555 + local postfix=-2 + local suffix= + local database=myApp + local collection=test + local sort= + local 'full_command=db.test.find()' + [[ ! -z '' ]] + log 'running db.test.find() in myApp' + set +o xtrace [2025-06-11T15:24:30+0000] running db.test.find() in myApp + run_mongo 'use myApp\n db.test.find()' myApp:myPass@some-name-rs0-1.some-name-rs0.mongod-major-upgrade-29555 mongodb '' + local 'command=use myApp\n db.test.find()' + local uri=myApp:myPass@some-name-rs0-1.some-name-rs0.mongod-major-upgrade-29555 + local driver=mongodb + local suffix=.svc.cluster.local ++ kubectl_bin get pods --selector=name=psmdb-client -o 'jsonpath={.items[].metadata.name}' +++ mktemp + /usr/bin/sed -re 's/ObjectId\("[0-9a-f]+"\)//; s/-[0-9]+.svc/-xxx.svc/' + egrep -v 'I NETWORK|W NETWORK|F NETWORK|Error saving history file|Percona Server for MongoDB|connecting to:|Unable to reach primary for set|Implicit session:|versions do not match|Error saving history file:' ++ local LAST_OUT=/tmp/tmp.gXNt9TXMkp +++ mktemp ++ local LAST_ERR=/tmp/tmp.DfNfJmMiPQ ++ local exit_status=0 ++ local timeout=4 +++ seq 0 2 ++ for i in '$(seq 0 2)' ++ set +e ++ kubectl get pods --selector=name=psmdb-client -o 'jsonpath={.items[].metadata.name}' ++ exit_status=0 ++ set -e ++ '[' 0 '!=' 0 -a -n 1 ']' ++ break ++ cat /tmp/tmp.gXNt9TXMkp ++ cat /tmp/tmp.DfNfJmMiPQ ++ rm /tmp/tmp.gXNt9TXMkp /tmp/tmp.DfNfJmMiPQ ++ return 0 + local client_container=psmdb-client-66f577db5f-ddjrx + local mongo_flag= + [[ myApp:myPass@some-name-rs0-1.some-name-rs0.mongod-major-upgrade-29555 == *cfg* ]] + replica_set=rs0 + kubectl_bin exec psmdb-client-66f577db5f-ddjrx -- bash -c 'printf '\''use myApp\n db.test.find()\n'\'' | mongo mongodb://myApp:myPass@some-name-rs0-1.some-name-rs0.mongod-major-upgrade-29555.svc.cluster.local/admin?ssl=false\&replicaSet=rs0 ' ++ mktemp + local LAST_OUT=/tmp/tmp.xL96JXyDM9 ++ mktemp + local LAST_ERR=/tmp/tmp.qlOxtW03HM + local exit_status=0 + local timeout=4 ++ seq 0 2 + for i in '$(seq 0 2)' + set +e + kubectl exec psmdb-client-66f577db5f-ddjrx -- bash -c 'printf '\''use myApp\n db.test.find()\n'\'' | mongo mongodb://myApp:myPass@some-name-rs0-1.some-name-rs0.mongod-major-upgrade-29555.svc.cluster.local/admin?ssl=false\&replicaSet=rs0 ' + exit_status=0 + set -e + '[' 0 '!=' 0 -a -n 1 ']' + break + cat /tmp/tmp.xL96JXyDM9 + cat /tmp/tmp.qlOxtW03HM + rm /tmp/tmp.xL96JXyDM9 /tmp/tmp.qlOxtW03HM + return 0 + [[ 0 -eq 0 ]] + diff -u /mnt/jenkins/workspace/cloud-psmdb-operator_PR-1917/e2e-tests/mongod-major-upgrade/compare/find-2.json /tmp/tmp.o3Qe20mAZP/find-2 + compare_mongo_cmd find myApp:myPass@some-name-rs0-2.some-name-rs0.mongod-major-upgrade-29555 -2 + local command=find + local uri=myApp:myPass@some-name-rs0-2.some-name-rs0.mongod-major-upgrade-29555 + local postfix=-2 + local suffix= + local database=myApp + local collection=test + local sort= + local 'full_command=db.test.find()' + [[ ! -z '' ]] + log 'running db.test.find() in myApp' + set +o xtrace [2025-06-11T15:24:32+0000] running db.test.find() in myApp + run_mongo 'use myApp\n db.test.find()' myApp:myPass@some-name-rs0-2.some-name-rs0.mongod-major-upgrade-29555 mongodb '' + local 'command=use myApp\n db.test.find()' + local uri=myApp:myPass@some-name-rs0-2.some-name-rs0.mongod-major-upgrade-29555 + local driver=mongodb + local suffix=.svc.cluster.local + egrep -v 'I NETWORK|W NETWORK|F NETWORK|Error saving history file|Percona Server for MongoDB|connecting to:|Unable to reach primary for set|Implicit session:|versions do not match|Error saving history file:' + /usr/bin/sed -re 's/ObjectId\("[0-9a-f]+"\)//; s/-[0-9]+.svc/-xxx.svc/' ++ kubectl_bin get pods --selector=name=psmdb-client -o 'jsonpath={.items[].metadata.name}' +++ mktemp ++ local LAST_OUT=/tmp/tmp.KAN1stihwf +++ mktemp ++ local LAST_ERR=/tmp/tmp.drKuZRr7eb ++ local exit_status=0 ++ local timeout=4 +++ seq 0 2 ++ for i in '$(seq 0 2)' ++ set +e ++ kubectl get pods --selector=name=psmdb-client -o 'jsonpath={.items[].metadata.name}' ++ exit_status=0 ++ set -e ++ '[' 0 '!=' 0 -a -n 1 ']' ++ break ++ cat /tmp/tmp.KAN1stihwf ++ cat /tmp/tmp.drKuZRr7eb ++ rm /tmp/tmp.KAN1stihwf /tmp/tmp.drKuZRr7eb ++ return 0 + local client_container=psmdb-client-66f577db5f-ddjrx + local mongo_flag= + [[ myApp:myPass@some-name-rs0-2.some-name-rs0.mongod-major-upgrade-29555 == *cfg* ]] + replica_set=rs0 + kubectl_bin exec psmdb-client-66f577db5f-ddjrx -- bash -c 'printf '\''use myApp\n db.test.find()\n'\'' | mongo mongodb://myApp:myPass@some-name-rs0-2.some-name-rs0.mongod-major-upgrade-29555.svc.cluster.local/admin?ssl=false\&replicaSet=rs0 ' ++ mktemp + local LAST_OUT=/tmp/tmp.icZ1HVPffk ++ mktemp + local LAST_ERR=/tmp/tmp.PXp7SpsGiU + local exit_status=0 + local timeout=4 ++ seq 0 2 + for i in '$(seq 0 2)' + set +e + kubectl exec psmdb-client-66f577db5f-ddjrx -- bash -c 'printf '\''use myApp\n db.test.find()\n'\'' | mongo mongodb://myApp:myPass@some-name-rs0-2.some-name-rs0.mongod-major-upgrade-29555.svc.cluster.local/admin?ssl=false\&replicaSet=rs0 ' + exit_status=0 + set -e + '[' 0 '!=' 0 -a -n 1 ']' + break + cat /tmp/tmp.icZ1HVPffk + cat /tmp/tmp.PXp7SpsGiU + rm /tmp/tmp.icZ1HVPffk /tmp/tmp.PXp7SpsGiU + return 0 + [[ 0 -eq 0 ]] + diff -u /mnt/jenkins/workspace/cloud-psmdb-operator_PR-1917/e2e-tests/mongod-major-upgrade/compare/find-2.json /tmp/tmp.o3Qe20mAZP/find-2 + target_generation=3 + for version in '${versions_to_verify[@]}' + desc 'Testing upgrade to version: 8.0' + set +o xtrace ----------------------------------------------------------------------------------- Testing upgrade to version: 8.0 ----------------------------------------------------------------------------------- + kubectl_bin patch psmdb/some-name --type=json '-p=[ {"op":"replace","path":"/spec/upgradeOptions/apply","value": "8.0-recommended"} ]' ++ mktemp + local LAST_OUT=/tmp/tmp.DoRSS5YKKz ++ mktemp + local LAST_ERR=/tmp/tmp.WO3q6codIi + local exit_status=0 + local timeout=4 ++ seq 0 2 + for i in '$(seq 0 2)' + set +e + kubectl patch psmdb/some-name --type=json '-p=[ {"op":"replace","path":"/spec/upgradeOptions/apply","value": "8.0-recommended"} ]' + exit_status=0 + set -e + '[' 0 '!=' 0 -a -n 1 ']' + break + cat /tmp/tmp.DoRSS5YKKz perconaservermongodb.psmdb.percona.com/some-name patched + cat /tmp/tmp.WO3q6codIi + rm /tmp/tmp.DoRSS5YKKz /tmp/tmp.WO3q6codIi + return 0 + sleep 70 + wait_for_running some-name-rs0 3 + local name=some-name-rs0 + let last_pod=2 + local check_cluster_readyness=true + set_debug + [[ 1 == 1 ]] + set -o xtrace + local rs_name=rs0 + local cluster_name=some-name ++ seq 0 2 + for i in '$(seq 0 $last_pod)' + [[ 0 -eq 2 ]] + wait_pod some-name-rs0-0 + local pod=some-name-rs0-0 + set +o xtrace waiting for pod/some-name-rs0-0 to be ready......................OK + for i in '$(seq 0 $last_pod)' + [[ 1 -eq 2 ]] + wait_pod some-name-rs0-1 + local pod=some-name-rs0-1 + set +o xtrace waiting for pod/some-name-rs0-1 to be ready.OK + for i in '$(seq 0 $last_pod)' + [[ 2 -eq 2 ]] ++ kubectl_bin get psmdb some-name -o 'jsonpath={.spec.replsets[?(@.name=="rs0")].arbiter.enabled}' +++ mktemp ++ local LAST_OUT=/tmp/tmp.PMke8wFgM1 +++ mktemp ++ local LAST_ERR=/tmp/tmp.Wp7lLVVOSK ++ local exit_status=0 ++ local timeout=4 +++ seq 0 2 ++ for i in '$(seq 0 2)' ++ set +e ++ kubectl get psmdb some-name -o 'jsonpath={.spec.replsets[?(@.name=="rs0")].arbiter.enabled}' ++ exit_status=0 ++ set -e ++ '[' 0 '!=' 0 -a -n 1 ']' ++ break ++ cat /tmp/tmp.PMke8wFgM1 ++ cat /tmp/tmp.Wp7lLVVOSK ++ rm /tmp/tmp.PMke8wFgM1 /tmp/tmp.Wp7lLVVOSK ++ return 0 + [[ '' == \t\r\u\e ]] + wait_pod some-name-rs0-2 + local pod=some-name-rs0-2 + set +o xtrace waiting for pod/some-name-rs0-2 to be ready.OK ++ kubectl_bin get psmdb some-name -o 'jsonpath={.spec.replsets[?(@.name=="rs0")].non_voting.enabled}' +++ mktemp ++ local LAST_OUT=/tmp/tmp.I9bHhwSVO7 +++ mktemp ++ local LAST_ERR=/tmp/tmp.oGrKND34hy ++ local exit_status=0 ++ local timeout=4 +++ seq 0 2 ++ for i in '$(seq 0 2)' ++ set +e ++ kubectl get psmdb some-name -o 'jsonpath={.spec.replsets[?(@.name=="rs0")].non_voting.enabled}' ++ exit_status=0 ++ set -e ++ '[' 0 '!=' 0 -a -n 1 ']' ++ break ++ cat /tmp/tmp.I9bHhwSVO7 ++ cat /tmp/tmp.oGrKND34hy ++ rm /tmp/tmp.I9bHhwSVO7 /tmp/tmp.oGrKND34hy ++ return 0 + [[ '' == \t\r\u\e ]] ++ kubectl_bin get psmdb some-name -o 'jsonpath={.spec.replsets[?(@.name=="rs0")].hidden.enabled}' +++ mktemp ++ local LAST_OUT=/tmp/tmp.DJ7vtSEjHo +++ mktemp ++ local LAST_ERR=/tmp/tmp.6fXypFHVwA ++ local exit_status=0 ++ local timeout=4 +++ seq 0 2 ++ for i in '$(seq 0 2)' ++ set +e ++ kubectl get psmdb some-name -o 'jsonpath={.spec.replsets[?(@.name=="rs0")].hidden.enabled}' ++ exit_status=0 ++ set -e ++ '[' 0 '!=' 0 -a -n 1 ']' ++ break ++ cat /tmp/tmp.DJ7vtSEjHo ++ cat /tmp/tmp.6fXypFHVwA ++ rm /tmp/tmp.DJ7vtSEjHo /tmp/tmp.6fXypFHVwA ++ return 0 + [[ '' == \t\r\u\e ]] + sleep 10 + [[ true == \t\r\u\e ]] + set +x Waiting for cluster readyness................. + wait_cluster_consistency some-name + local cluster_name=some-name + local wait_time=32 + retry=0 + sleep 7 + echo -n 'waiting for cluster readyness' waiting for cluster readyness++ kubectl_bin get psmdb some-name -o 'jsonpath={.status.state}' +++ mktemp ++ local LAST_OUT=/tmp/tmp.rhIBSSF5Cz +++ mktemp ++ local LAST_ERR=/tmp/tmp.oRYoCOqwSK ++ local exit_status=0 ++ local timeout=4 +++ seq 0 2 ++ for i in '$(seq 0 2)' ++ set +e ++ kubectl get psmdb some-name -o 'jsonpath={.status.state}' ++ exit_status=0 ++ set -e ++ '[' 0 '!=' 0 -a -n 1 ']' ++ break ++ cat /tmp/tmp.rhIBSSF5Cz ++ cat /tmp/tmp.oRYoCOqwSK ++ rm /tmp/tmp.rhIBSSF5Cz /tmp/tmp.oRYoCOqwSK ++ return 0 + [[ ready == \r\e\a\d\y ]] + echo + compare_generation 3 statefulset some-name-rs0 + local generation=3 + local resource_type=statefulset + local resource_name=some-name-rs0 + local current_generation ++ kubectl_bin get statefulset some-name-rs0 -o 'jsonpath={.metadata.generation}' +++ mktemp ++ local LAST_OUT=/tmp/tmp.N2riLI3abb +++ mktemp ++ local LAST_ERR=/tmp/tmp.ElCzzzfWXm ++ local exit_status=0 ++ local timeout=4 +++ seq 0 2 ++ for i in '$(seq 0 2)' ++ set +e ++ kubectl get statefulset some-name-rs0 -o 'jsonpath={.metadata.generation}' ++ exit_status=0 ++ set -e ++ '[' 0 '!=' 0 -a -n 1 ']' ++ break ++ cat /tmp/tmp.N2riLI3abb ++ cat /tmp/tmp.ElCzzzfWXm ++ rm /tmp/tmp.N2riLI3abb /tmp/tmp.ElCzzzfWXm ++ return 0 + current_generation=3 + [[ 3 != \3 ]] + kubectl_bin patch psmdb/some-name --type=json '-p=[ {"op":"replace","path":"/spec/upgradeOptions/setFCV","value": true} ]' ++ mktemp + local LAST_OUT=/tmp/tmp.nX0NPjZLzh ++ mktemp + local LAST_ERR=/tmp/tmp.ICvCOtWH9z + local exit_status=0 + local timeout=4 ++ seq 0 2 + for i in '$(seq 0 2)' + set +e + kubectl patch psmdb/some-name --type=json '-p=[ {"op":"replace","path":"/spec/upgradeOptions/setFCV","value": true} ]' + exit_status=0 + set -e + '[' 0 '!=' 0 -a -n 1 ']' + break + cat /tmp/tmp.nX0NPjZLzh perconaservermongodb.psmdb.percona.com/some-name patched (no change) + cat /tmp/tmp.ICvCOtWH9z + rm /tmp/tmp.nX0NPjZLzh /tmp/tmp.ICvCOtWH9z + return 0 + sleep 10 + wait_cluster_consistency some-name + local cluster_name=some-name + local wait_time=32 + retry=0 + sleep 7 + echo -n 'waiting for cluster readyness' waiting for cluster readyness++ kubectl_bin get psmdb some-name -o 'jsonpath={.status.state}' +++ mktemp ++ local LAST_OUT=/tmp/tmp.LuwRUavinn +++ mktemp ++ local LAST_ERR=/tmp/tmp.Eb3tAA5hFu ++ local exit_status=0 ++ local timeout=4 +++ seq 0 2 ++ for i in '$(seq 0 2)' ++ set +e ++ kubectl get psmdb some-name -o 'jsonpath={.status.state}' ++ exit_status=0 ++ set -e ++ '[' 0 '!=' 0 -a -n 1 ']' ++ break ++ cat /tmp/tmp.LuwRUavinn ++ cat /tmp/tmp.Eb3tAA5hFu ++ rm /tmp/tmp.LuwRUavinn /tmp/tmp.Eb3tAA5hFu ++ return 0 + [[ ready == \r\e\a\d\y ]] + echo ++ run_mongos 'JSON.stringify(db.adminCommand({getParameter:1,featureCompatibilityVersion:1}))' clusterAdmin:clusterAdmin123456@some-name-rs0.mongod-major-upgrade-29555 ++ local 'command=JSON.stringify(db.adminCommand({getParameter:1,featureCompatibilityVersion:1}))' ++ grep -E '^\{.*\}$' ++ local uri=clusterAdmin:clusterAdmin123456@some-name-rs0.mongod-major-upgrade-29555 ++ local driver=mongodb ++ local suffix=.svc.cluster.local ++ local mongo_flag= ++ local port=27017 ++ local mongo_bin=mongo ++ jq -r .featureCompatibilityVersion.version +++ echo .svc.cluster.local +++ awk -F: '{print $2}' ++ suffix_port= ++ [[ -z '' ]] ++ suffix=.svc.cluster.local:27017 +++ kubectl_bin get pods --selector=name=psmdb-client -o 'jsonpath={.items[].metadata.name}' ++++ mktemp +++ local LAST_OUT=/tmp/tmp.sExoaeFBHI ++++ mktemp +++ local LAST_ERR=/tmp/tmp.G1BbAlpxaC +++ local exit_status=0 +++ local timeout=4 ++++ seq 0 2 +++ for i in '$(seq 0 2)' +++ set +e +++ kubectl get pods --selector=name=psmdb-client -o 'jsonpath={.items[].metadata.name}' +++ exit_status=0 +++ set -e +++ '[' 0 '!=' 0 -a -n 1 ']' +++ break +++ cat /tmp/tmp.sExoaeFBHI +++ cat /tmp/tmp.G1BbAlpxaC +++ rm /tmp/tmp.sExoaeFBHI /tmp/tmp.G1BbAlpxaC +++ return 0 ++ local client_container=psmdb-client-66f577db5f-ddjrx ++ kubectl_bin exec psmdb-client-66f577db5f-ddjrx -- bash -c 'printf '\''JSON.stringify(db.adminCommand({getParameter:1,featureCompatibilityVersion:1}))\n'\'' | mongo mongodb://clusterAdmin:clusterAdmin123456@some-name-rs0.mongod-major-upgrade-29555.svc.cluster.local:27017/admin ' +++ mktemp ++ local LAST_OUT=/tmp/tmp.nDvSkst4SJ +++ mktemp ++ local LAST_ERR=/tmp/tmp.P2LNTOK0ej ++ local exit_status=0 ++ local timeout=4 +++ seq 0 2 ++ for i in '$(seq 0 2)' ++ set +e ++ kubectl exec psmdb-client-66f577db5f-ddjrx -- bash -c 'printf '\''JSON.stringify(db.adminCommand({getParameter:1,featureCompatibilityVersion:1}))\n'\'' | mongo mongodb://clusterAdmin:clusterAdmin123456@some-name-rs0.mongod-major-upgrade-29555.svc.cluster.local:27017/admin ' ++ exit_status=0 ++ set -e ++ '[' 0 '!=' 0 -a -n 1 ']' ++ break ++ cat /tmp/tmp.nDvSkst4SJ ++ cat /tmp/tmp.P2LNTOK0ej ++ rm /tmp/tmp.nDvSkst4SJ /tmp/tmp.P2LNTOK0ej ++ return 0 + currentFCV=8.0 + [[ 8.0 != 8.0 ]] + run_mongo 'use myApp\n db.test.insert({ x: 100503 })' myApp:myPass@some-name-rs0.mongod-major-upgrade-29555 + local 'command=use myApp\n db.test.insert({ x: 100503 })' + local uri=myApp:myPass@some-name-rs0.mongod-major-upgrade-29555 + local driver=mongodb+srv + local suffix=.svc.cluster.local ++ kubectl_bin get pods --selector=name=psmdb-client -o 'jsonpath={.items[].metadata.name}' +++ mktemp ++ local LAST_OUT=/tmp/tmp.ibEyiIZ5jh +++ mktemp ++ local LAST_ERR=/tmp/tmp.zWCGgepN0Q ++ local exit_status=0 ++ local timeout=4 +++ seq 0 2 ++ for i in '$(seq 0 2)' ++ set +e ++ kubectl get pods --selector=name=psmdb-client -o 'jsonpath={.items[].metadata.name}' ++ exit_status=0 ++ set -e ++ '[' 0 '!=' 0 -a -n 1 ']' ++ break ++ cat /tmp/tmp.ibEyiIZ5jh ++ cat /tmp/tmp.zWCGgepN0Q ++ rm /tmp/tmp.ibEyiIZ5jh /tmp/tmp.zWCGgepN0Q ++ return 0 + local client_container=psmdb-client-66f577db5f-ddjrx + local mongo_flag= + [[ myApp:myPass@some-name-rs0.mongod-major-upgrade-29555 == *cfg* ]] + replica_set=rs0 + kubectl_bin exec psmdb-client-66f577db5f-ddjrx -- bash -c 'printf '\''use myApp\n db.test.insert({ x: 100503 })\n'\'' | mongo mongodb+srv://myApp:myPass@some-name-rs0.mongod-major-upgrade-29555.svc.cluster.local/admin?ssl=false\&replicaSet=rs0 ' ++ mktemp + local LAST_OUT=/tmp/tmp.AqGa77gwjt ++ mktemp + local LAST_ERR=/tmp/tmp.iZjrWglECU + local exit_status=0 + local timeout=4 ++ seq 0 2 + for i in '$(seq 0 2)' + set +e + kubectl exec psmdb-client-66f577db5f-ddjrx -- bash -c 'printf '\''use myApp\n db.test.insert({ x: 100503 })\n'\'' | mongo mongodb+srv://myApp:myPass@some-name-rs0.mongod-major-upgrade-29555.svc.cluster.local/admin?ssl=false\&replicaSet=rs0 ' + exit_status=0 + set -e + '[' 0 '!=' 0 -a -n 1 ']' + break + cat /tmp/tmp.AqGa77gwjt Percona Server for MongoDB shell version v4.4.29-28 connecting to: mongodb://some-name-rs0-1.some-name-rs0.mongod-major-upgrade-29555.svc.cluster.local:27017,some-name-rs0-2.some-name-rs0.mongod-major-upgrade-29555.svc.cluster.local:27017,some-name-rs0-0.some-name-rs0.mongod-major-upgrade-29555.svc.cluster.local:27017/admin?compressors=disabled&gssapiServiceName=mongodb&replicaSet=rs0&ssl=false Implicit session: session { "id" : UUID("52334c09-612a-4511-a748-b09b5b29165a") } Percona Server for MongoDB server version: v8.0.8-3 WARNING: shell and server versions do not match switched to db myApp WriteResult({ "nInserted" : 1 }) bye + cat /tmp/tmp.iZjrWglECU + rm /tmp/tmp.AqGa77gwjt /tmp/tmp.iZjrWglECU + return 0 + compare_mongo_cmd find myApp:myPass@some-name-rs0-0.some-name-rs0.mongod-major-upgrade-29555 -3 + local command=find + local uri=myApp:myPass@some-name-rs0-0.some-name-rs0.mongod-major-upgrade-29555 + local postfix=-3 + local suffix= + local database=myApp + local collection=test + local sort= + local 'full_command=db.test.find()' + [[ ! -z '' ]] + log 'running db.test.find() in myApp' + set +o xtrace [2025-06-11T15:27:53+0000] running db.test.find() in myApp + run_mongo 'use myApp\n db.test.find()' myApp:myPass@some-name-rs0-0.some-name-rs0.mongod-major-upgrade-29555 mongodb '' + local 'command=use myApp\n db.test.find()' + local uri=myApp:myPass@some-name-rs0-0.some-name-rs0.mongod-major-upgrade-29555 + local driver=mongodb + local suffix=.svc.cluster.local ++ kubectl_bin get pods --selector=name=psmdb-client -o 'jsonpath={.items[].metadata.name}' +++ mktemp + /usr/bin/sed -re 's/ObjectId\("[0-9a-f]+"\)//; s/-[0-9]+.svc/-xxx.svc/' + egrep -v 'I NETWORK|W NETWORK|F NETWORK|Error saving history file|Percona Server for MongoDB|connecting to:|Unable to reach primary for set|Implicit session:|versions do not match|Error saving history file:' ++ local LAST_OUT=/tmp/tmp.8bajNREkzZ +++ mktemp ++ local LAST_ERR=/tmp/tmp.tRqYzxvZjt ++ local exit_status=0 ++ local timeout=4 +++ seq 0 2 ++ for i in '$(seq 0 2)' ++ set +e ++ kubectl get pods --selector=name=psmdb-client -o 'jsonpath={.items[].metadata.name}' ++ exit_status=0 ++ set -e ++ '[' 0 '!=' 0 -a -n 1 ']' ++ break ++ cat /tmp/tmp.8bajNREkzZ ++ cat /tmp/tmp.tRqYzxvZjt ++ rm /tmp/tmp.8bajNREkzZ /tmp/tmp.tRqYzxvZjt ++ return 0 + local client_container=psmdb-client-66f577db5f-ddjrx + local mongo_flag= + [[ myApp:myPass@some-name-rs0-0.some-name-rs0.mongod-major-upgrade-29555 == *cfg* ]] + replica_set=rs0 + kubectl_bin exec psmdb-client-66f577db5f-ddjrx -- bash -c 'printf '\''use myApp\n db.test.find()\n'\'' | mongo mongodb://myApp:myPass@some-name-rs0-0.some-name-rs0.mongod-major-upgrade-29555.svc.cluster.local/admin?ssl=false\&replicaSet=rs0 ' ++ mktemp + local LAST_OUT=/tmp/tmp.tz309DwMHJ ++ mktemp + local LAST_ERR=/tmp/tmp.cazaDxqwQr + local exit_status=0 + local timeout=4 ++ seq 0 2 + for i in '$(seq 0 2)' + set +e + kubectl exec psmdb-client-66f577db5f-ddjrx -- bash -c 'printf '\''use myApp\n db.test.find()\n'\'' | mongo mongodb://myApp:myPass@some-name-rs0-0.some-name-rs0.mongod-major-upgrade-29555.svc.cluster.local/admin?ssl=false\&replicaSet=rs0 ' + exit_status=0 + set -e + '[' 0 '!=' 0 -a -n 1 ']' + break + cat /tmp/tmp.tz309DwMHJ + cat /tmp/tmp.cazaDxqwQr + rm /tmp/tmp.tz309DwMHJ /tmp/tmp.cazaDxqwQr + return 0 + [[ 0 -eq 0 ]] + diff -u /mnt/jenkins/workspace/cloud-psmdb-operator_PR-1917/e2e-tests/mongod-major-upgrade/compare/find-3.json /tmp/tmp.o3Qe20mAZP/find-3 + compare_mongo_cmd find myApp:myPass@some-name-rs0-1.some-name-rs0.mongod-major-upgrade-29555 -3 + local command=find + local uri=myApp:myPass@some-name-rs0-1.some-name-rs0.mongod-major-upgrade-29555 + local postfix=-3 + local suffix= + local database=myApp + local collection=test + local sort= + local 'full_command=db.test.find()' + [[ ! -z '' ]] + log 'running db.test.find() in myApp' + set +o xtrace [2025-06-11T15:27:56+0000] running db.test.find() in myApp + /usr/bin/sed -re 's/ObjectId\("[0-9a-f]+"\)//; s/-[0-9]+.svc/-xxx.svc/' + egrep -v 'I NETWORK|W NETWORK|F NETWORK|Error saving history file|Percona Server for MongoDB|connecting to:|Unable to reach primary for set|Implicit session:|versions do not match|Error saving history file:' + run_mongo 'use myApp\n db.test.find()' myApp:myPass@some-name-rs0-1.some-name-rs0.mongod-major-upgrade-29555 mongodb '' + local 'command=use myApp\n db.test.find()' + local uri=myApp:myPass@some-name-rs0-1.some-name-rs0.mongod-major-upgrade-29555 + local driver=mongodb + local suffix=.svc.cluster.local ++ kubectl_bin get pods --selector=name=psmdb-client -o 'jsonpath={.items[].metadata.name}' +++ mktemp ++ local LAST_OUT=/tmp/tmp.AAeMZ4VefW +++ mktemp ++ local LAST_ERR=/tmp/tmp.qBxHCpMXrI ++ local exit_status=0 ++ local timeout=4 +++ seq 0 2 ++ for i in '$(seq 0 2)' ++ set +e ++ kubectl get pods --selector=name=psmdb-client -o 'jsonpath={.items[].metadata.name}' ++ exit_status=0 ++ set -e ++ '[' 0 '!=' 0 -a -n 1 ']' ++ break ++ cat /tmp/tmp.AAeMZ4VefW ++ cat /tmp/tmp.qBxHCpMXrI ++ rm /tmp/tmp.AAeMZ4VefW /tmp/tmp.qBxHCpMXrI ++ return 0 + local client_container=psmdb-client-66f577db5f-ddjrx + local mongo_flag= + [[ myApp:myPass@some-name-rs0-1.some-name-rs0.mongod-major-upgrade-29555 == *cfg* ]] + replica_set=rs0 + kubectl_bin exec psmdb-client-66f577db5f-ddjrx -- bash -c 'printf '\''use myApp\n db.test.find()\n'\'' | mongo mongodb://myApp:myPass@some-name-rs0-1.some-name-rs0.mongod-major-upgrade-29555.svc.cluster.local/admin?ssl=false\&replicaSet=rs0 ' ++ mktemp + local LAST_OUT=/tmp/tmp.REf0Ew8cM2 ++ mktemp + local LAST_ERR=/tmp/tmp.rjeYC9gjLI + local exit_status=0 + local timeout=4 ++ seq 0 2 + for i in '$(seq 0 2)' + set +e + kubectl exec psmdb-client-66f577db5f-ddjrx -- bash -c 'printf '\''use myApp\n db.test.find()\n'\'' | mongo mongodb://myApp:myPass@some-name-rs0-1.some-name-rs0.mongod-major-upgrade-29555.svc.cluster.local/admin?ssl=false\&replicaSet=rs0 ' + exit_status=0 + set -e + '[' 0 '!=' 0 -a -n 1 ']' + break + cat /tmp/tmp.REf0Ew8cM2 + cat /tmp/tmp.rjeYC9gjLI + rm /tmp/tmp.REf0Ew8cM2 /tmp/tmp.rjeYC9gjLI + return 0 + [[ 0 -eq 0 ]] + diff -u /mnt/jenkins/workspace/cloud-psmdb-operator_PR-1917/e2e-tests/mongod-major-upgrade/compare/find-3.json /tmp/tmp.o3Qe20mAZP/find-3 + compare_mongo_cmd find myApp:myPass@some-name-rs0-2.some-name-rs0.mongod-major-upgrade-29555 -3 + local command=find + local uri=myApp:myPass@some-name-rs0-2.some-name-rs0.mongod-major-upgrade-29555 + local postfix=-3 + local suffix= + local database=myApp + local collection=test + local sort= + local 'full_command=db.test.find()' + [[ ! -z '' ]] + log 'running db.test.find() in myApp' + set +o xtrace [2025-06-11T15:27:59+0000] running db.test.find() in myApp + egrep -v 'I NETWORK|W NETWORK|F NETWORK|Error saving history file|Percona Server for MongoDB|connecting to:|Unable to reach primary for set|Implicit session:|versions do not match|Error saving history file:' + run_mongo 'use myApp\n db.test.find()' myApp:myPass@some-name-rs0-2.some-name-rs0.mongod-major-upgrade-29555 mongodb '' + local 'command=use myApp\n db.test.find()' + local uri=myApp:myPass@some-name-rs0-2.some-name-rs0.mongod-major-upgrade-29555 + local driver=mongodb + local suffix=.svc.cluster.local ++ kubectl_bin get pods --selector=name=psmdb-client -o 'jsonpath={.items[].metadata.name}' +++ mktemp + /usr/bin/sed -re 's/ObjectId\("[0-9a-f]+"\)//; s/-[0-9]+.svc/-xxx.svc/' ++ local LAST_OUT=/tmp/tmp.So4jl3ulLm +++ mktemp ++ local LAST_ERR=/tmp/tmp.pjeHAWBIkG ++ local exit_status=0 ++ local timeout=4 +++ seq 0 2 ++ for i in '$(seq 0 2)' ++ set +e ++ kubectl get pods --selector=name=psmdb-client -o 'jsonpath={.items[].metadata.name}' ++ exit_status=0 ++ set -e ++ '[' 0 '!=' 0 -a -n 1 ']' ++ break ++ cat /tmp/tmp.So4jl3ulLm ++ cat /tmp/tmp.pjeHAWBIkG ++ rm /tmp/tmp.So4jl3ulLm /tmp/tmp.pjeHAWBIkG ++ return 0 + local client_container=psmdb-client-66f577db5f-ddjrx + local mongo_flag= + [[ myApp:myPass@some-name-rs0-2.some-name-rs0.mongod-major-upgrade-29555 == *cfg* ]] + replica_set=rs0 + kubectl_bin exec psmdb-client-66f577db5f-ddjrx -- bash -c 'printf '\''use myApp\n db.test.find()\n'\'' | mongo mongodb://myApp:myPass@some-name-rs0-2.some-name-rs0.mongod-major-upgrade-29555.svc.cluster.local/admin?ssl=false\&replicaSet=rs0 ' ++ mktemp + local LAST_OUT=/tmp/tmp.Od51LfSv0l ++ mktemp + local LAST_ERR=/tmp/tmp.2FXUnRclV2 + local exit_status=0 + local timeout=4 ++ seq 0 2 + for i in '$(seq 0 2)' + set +e + kubectl exec psmdb-client-66f577db5f-ddjrx -- bash -c 'printf '\''use myApp\n db.test.find()\n'\'' | mongo mongodb://myApp:myPass@some-name-rs0-2.some-name-rs0.mongod-major-upgrade-29555.svc.cluster.local/admin?ssl=false\&replicaSet=rs0 ' + exit_status=0 + set -e + '[' 0 '!=' 0 -a -n 1 ']' + break + cat /tmp/tmp.Od51LfSv0l + cat /tmp/tmp.2FXUnRclV2 + rm /tmp/tmp.Od51LfSv0l /tmp/tmp.2FXUnRclV2 + return 0 + [[ 0 -eq 0 ]] + diff -u /mnt/jenkins/workspace/cloud-psmdb-operator_PR-1917/e2e-tests/mongod-major-upgrade/compare/find-3.json /tmp/tmp.o3Qe20mAZP/find-3 + target_generation=4 + destroy mongod-major-upgrade-29555 + local namespace=mongod-major-upgrade-29555 + local ignore_logs=true + [[ 0 == 1 ]] + desc 'destroy cluster/operator and all other resources' + set +o xtrace ----------------------------------------------------------------------------------- destroy cluster/operator and all other resources ----------------------------------------------------------------------------------- + '[' true == false ']' + delete_backups + desc 'Delete psmdb-backup' + set +o xtrace ----------------------------------------------------------------------------------- Delete psmdb-backup ----------------------------------------------------------------------------------- ++ kubectl_bin get psmdb-backup --no-headers ++ wc -l +++ mktemp ++ local LAST_OUT=/tmp/tmp.tsKOxCuFCy +++ mktemp ++ local LAST_ERR=/tmp/tmp.eTUSkz6Rf7 ++ local exit_status=0 ++ local timeout=4 +++ seq 0 2 ++ for i in '$(seq 0 2)' ++ set +e ++ kubectl get psmdb-backup --no-headers ++ exit_status=0 ++ set -e ++ '[' 0 '!=' 0 -a -n 1 ']' ++ break ++ cat /tmp/tmp.tsKOxCuFCy ++ cat /tmp/tmp.eTUSkz6Rf7 No resources found in mongod-major-upgrade-29555 namespace. ++ rm /tmp/tmp.tsKOxCuFCy /tmp/tmp.eTUSkz6Rf7 ++ return 0 + '[' 0 '!=' 0 ']' + delete_crd + desc 'get and delete old CRDs and RBAC' + set +o xtrace ----------------------------------------------------------------------------------- get and delete old CRDs and RBAC ----------------------------------------------------------------------------------- + kubectl_bin delete -f /mnt/jenkins/workspace/cloud-psmdb-operator_PR-1917/deploy/crd.yaml --ignore-not-found --wait=false ++ mktemp + local LAST_OUT=/tmp/tmp.xTmrgRGbnM ++ mktemp + local LAST_ERR=/tmp/tmp.tazuLp2o90 + local exit_status=0 + local timeout=4 ++ seq 0 2 + for i in '$(seq 0 2)' + set +e + kubectl delete -f /mnt/jenkins/workspace/cloud-psmdb-operator_PR-1917/deploy/crd.yaml --ignore-not-found --wait=false + exit_status=0 + set -e + '[' 0 '!=' 0 -a -n 1 ']' + break + cat /tmp/tmp.xTmrgRGbnM customresourcedefinition.apiextensions.k8s.io "perconaservermongodbbackups.psmdb.percona.com" deleted customresourcedefinition.apiextensions.k8s.io "perconaservermongodbrestores.psmdb.percona.com" deleted customresourcedefinition.apiextensions.k8s.io "perconaservermongodbs.psmdb.percona.com" deleted + cat /tmp/tmp.tazuLp2o90 + rm /tmp/tmp.xTmrgRGbnM /tmp/tmp.tazuLp2o90 + return 0 ++ yq eval .metadata.name /mnt/jenkins/workspace/cloud-psmdb-operator_PR-1917/deploy/crd.yaml ++ grep -v '\-\-\-' + for crd_name in '$(yq eval '\''.metadata.name'\'' "${src_dir}/deploy/crd.yaml" | grep -v '\''\-\-\-'\'')' + kubectl get perconaservermongodbbackups.psmdb.percona.com --all-namespaces -o wide + grep -v NAMESPACE + xargs -L 1 sh -xc 'kubectl patch perconaservermongodbbackups.psmdb.percona.com -n $0 $1 --type=merge -p "{\"metadata\":{\"finalizers\":[]}}"' error: the server doesn't have a resource type "perconaservermongodbbackups" + kubectl patch perconaservermongodbbackups.psmdb.percona.com -n sh --type=merge -p '{"metadata":{"finalizers":[]}}' error: the server doesn't have a resource type "perconaservermongodbbackups" + : + kubectl_bin wait --for=delete crd perconaservermongodbbackups.psmdb.percona.com ++ mktemp + local LAST_OUT=/tmp/tmp.N5RrTdSent ++ mktemp + local LAST_ERR=/tmp/tmp.rKB7h6FnG8 + local exit_status=0 + local timeout=4 ++ seq 0 2 + for i in '$(seq 0 2)' + set +e + kubectl wait --for=delete crd perconaservermongodbbackups.psmdb.percona.com + exit_status=0 + set -e + '[' 0 '!=' 0 -a -n 1 ']' + break + cat /tmp/tmp.N5RrTdSent + cat /tmp/tmp.rKB7h6FnG8 + rm /tmp/tmp.N5RrTdSent /tmp/tmp.rKB7h6FnG8 + return 0 + for crd_name in '$(yq eval '\''.metadata.name'\'' "${src_dir}/deploy/crd.yaml" | grep -v '\''\-\-\-'\'')' + kubectl get perconaservermongodbrestores.psmdb.percona.com --all-namespaces -o wide + grep -v NAMESPACE + xargs -L 1 sh -xc 'kubectl patch perconaservermongodbrestores.psmdb.percona.com -n $0 $1 --type=merge -p "{\"metadata\":{\"finalizers\":[]}}"' error: the server doesn't have a resource type "perconaservermongodbrestores" + kubectl patch perconaservermongodbrestores.psmdb.percona.com -n sh --type=merge -p '{"metadata":{"finalizers":[]}}' error: the server doesn't have a resource type "perconaservermongodbrestores" + : + kubectl_bin wait --for=delete crd perconaservermongodbrestores.psmdb.percona.com ++ mktemp + local LAST_OUT=/tmp/tmp.Ezkei7y3cg ++ mktemp + local LAST_ERR=/tmp/tmp.rYP3YNgBkD + local exit_status=0 + local timeout=4 ++ seq 0 2 + for i in '$(seq 0 2)' + set +e + kubectl wait --for=delete crd perconaservermongodbrestores.psmdb.percona.com + exit_status=0 + set -e + '[' 0 '!=' 0 -a -n 1 ']' + break + cat /tmp/tmp.Ezkei7y3cg + cat /tmp/tmp.rYP3YNgBkD + rm /tmp/tmp.Ezkei7y3cg /tmp/tmp.rYP3YNgBkD + return 0 + for crd_name in '$(yq eval '\''.metadata.name'\'' "${src_dir}/deploy/crd.yaml" | grep -v '\''\-\-\-'\'')' + kubectl get perconaservermongodbs.psmdb.percona.com --all-namespaces -o wide + grep -v NAMESPACE + xargs -L 1 sh -xc 'kubectl patch perconaservermongodbs.psmdb.percona.com -n $0 $1 --type=merge -p "{\"metadata\":{\"finalizers\":[]}}"' error: the server doesn't have a resource type "perconaservermongodbs" + kubectl patch perconaservermongodbs.psmdb.percona.com -n sh --type=merge -p '{"metadata":{"finalizers":[]}}' error: the server doesn't have a resource type "perconaservermongodbs" + : + kubectl_bin wait --for=delete crd perconaservermongodbs.psmdb.percona.com ++ mktemp + local LAST_OUT=/tmp/tmp.4c5CkIAzHn ++ mktemp + local LAST_ERR=/tmp/tmp.poEDsvh1Ef + local exit_status=0 + local timeout=4 ++ seq 0 2 + for i in '$(seq 0 2)' + set +e + kubectl wait --for=delete crd perconaservermongodbs.psmdb.percona.com + exit_status=0 + set -e + '[' 0 '!=' 0 -a -n 1 ']' + break + cat /tmp/tmp.4c5CkIAzHn + cat /tmp/tmp.poEDsvh1Ef + rm /tmp/tmp.4c5CkIAzHn /tmp/tmp.poEDsvh1Ef + return 0 + local rbac_yaml=rbac.yaml + '[' -n psmdb-operator ']' + rbac_yaml=cw-rbac.yaml + kubectl_bin delete -f /mnt/jenkins/workspace/cloud-psmdb-operator_PR-1917/deploy/cw-rbac.yaml --ignore-not-found ++ mktemp + local LAST_OUT=/tmp/tmp.wO37RN94Ks ++ mktemp + local LAST_ERR=/tmp/tmp.dkbIlY4wCR + local exit_status=0 + local timeout=4 ++ seq 0 2 + for i in '$(seq 0 2)' + set +e + kubectl delete -f /mnt/jenkins/workspace/cloud-psmdb-operator_PR-1917/deploy/cw-rbac.yaml --ignore-not-found + exit_status=0 + set -e + '[' 0 '!=' 0 -a -n 1 ']' + break + cat /tmp/tmp.wO37RN94Ks clusterrole.rbac.authorization.k8s.io "percona-server-mongodb-operator" deleted clusterrolebinding.rbac.authorization.k8s.io "service-account-percona-server-mongodb-operator" deleted + cat /tmp/tmp.dkbIlY4wCR + rm /tmp/tmp.wO37RN94Ks /tmp/tmp.dkbIlY4wCR + return 0 + destroy_cert_manager + kubectl_bin delete -f https://github.com/cert-manager/cert-manager/releases/download/v1.17.2/cert-manager.yaml ++ mktemp + local LAST_OUT=/tmp/tmp.BUhKuq8tZ8 ++ mktemp + local LAST_ERR=/tmp/tmp.CtbV1nZuLA + local exit_status=0 + local timeout=4 ++ seq 0 2 + for i in '$(seq 0 2)' + set +e + kubectl delete -f https://github.com/cert-manager/cert-manager/releases/download/v1.17.2/cert-manager.yaml + exit_status=1 + set -e + '[' 1 '!=' 0 -a -n 1 ']' + cat /tmp/tmp.BUhKuq8tZ8 + cat /tmp/tmp.CtbV1nZuLA Error from server (NotFound): error when deleting "https://github.com/cert-manager/cert-manager/releases/download/v1.17.2/cert-manager.yaml": namespaces "cert-manager" not found Error from server (NotFound): error when deleting "https://github.com/cert-manager/cert-manager/releases/download/v1.17.2/cert-manager.yaml": customresourcedefinitions.apiextensions.k8s.io "certificaterequests.cert-manager.io" not found Error from server (NotFound): error when deleting "https://github.com/cert-manager/cert-manager/releases/download/v1.17.2/cert-manager.yaml": customresourcedefinitions.apiextensions.k8s.io "certificates.cert-manager.io" not found Error from server (NotFound): error when deleting "https://github.com/cert-manager/cert-manager/releases/download/v1.17.2/cert-manager.yaml": customresourcedefinitions.apiextensions.k8s.io "challenges.acme.cert-manager.io" not found Error from server (NotFound): error when deleting "https://github.com/cert-manager/cert-manager/releases/download/v1.17.2/cert-manager.yaml": customresourcedefinitions.apiextensions.k8s.io "clusterissuers.cert-manager.io" not found Error from server (NotFound): error when deleting "https://github.com/cert-manager/cert-manager/releases/download/v1.17.2/cert-manager.yaml": customresourcedefinitions.apiextensions.k8s.io "issuers.cert-manager.io" not found Error from server (NotFound): error when deleting "https://github.com/cert-manager/cert-manager/releases/download/v1.17.2/cert-manager.yaml": customresourcedefinitions.apiextensions.k8s.io "orders.acme.cert-manager.io" not found Error from server (NotFound): error when deleting "https://github.com/cert-manager/cert-manager/releases/download/v1.17.2/cert-manager.yaml": serviceaccounts "cert-manager-cainjector" not found Error from server (NotFound): error when deleting "https://github.com/cert-manager/cert-manager/releases/download/v1.17.2/cert-manager.yaml": serviceaccounts "cert-manager" not found Error from server (NotFound): error when deleting "https://github.com/cert-manager/cert-manager/releases/download/v1.17.2/cert-manager.yaml": serviceaccounts "cert-manager-webhook" not found Error from server (NotFound): error when deleting "https://github.com/cert-manager/cert-manager/releases/download/v1.17.2/cert-manager.yaml": clusterroles.rbac.authorization.k8s.io "cert-manager-cainjector" not found Error from server (NotFound): error when deleting "https://github.com/cert-manager/cert-manager/releases/download/v1.17.2/cert-manager.yaml": clusterroles.rbac.authorization.k8s.io "cert-manager-controller-issuers" not found Error from server (NotFound): error when deleting "https://github.com/cert-manager/cert-manager/releases/download/v1.17.2/cert-manager.yaml": clusterroles.rbac.authorization.k8s.io "cert-manager-controller-clusterissuers" not found Error from server (NotFound): error when deleting "https://github.com/cert-manager/cert-manager/releases/download/v1.17.2/cert-manager.yaml": clusterroles.rbac.authorization.k8s.io "cert-manager-controller-certificates" not found Error from server (NotFound): error when deleting "https://github.com/cert-manager/cert-manager/releases/download/v1.17.2/cert-manager.yaml": clusterroles.rbac.authorization.k8s.io "cert-manager-controller-orders" not found Error from server (NotFound): error when deleting "https://github.com/cert-manager/cert-manager/releases/download/v1.17.2/cert-manager.yaml": clusterroles.rbac.authorization.k8s.io "cert-manager-controller-challenges" not found Error from server (NotFound): error when deleting "https://github.com/cert-manager/cert-manager/releases/download/v1.17.2/cert-manager.yaml": clusterroles.rbac.authorization.k8s.io "cert-manager-controller-ingress-shim" not found Error from server (NotFound): error when deleting "https://github.com/cert-manager/cert-manager/releases/download/v1.17.2/cert-manager.yaml": clusterroles.rbac.authorization.k8s.io "cert-manager-cluster-view" not found Error from server (NotFound): error when deleting "https://github.com/cert-manager/cert-manager/releases/download/v1.17.2/cert-manager.yaml": clusterroles.rbac.authorization.k8s.io "cert-manager-view" not found Error from server (NotFound): error when deleting "https://github.com/cert-manager/cert-manager/releases/download/v1.17.2/cert-manager.yaml": clusterroles.rbac.authorization.k8s.io "cert-manager-edit" not found Error from server (NotFound): error when deleting "https://github.com/cert-manager/cert-manager/releases/download/v1.17.2/cert-manager.yaml": clusterroles.rbac.authorization.k8s.io "cert-manager-controller-approve:cert-manager-io" not found Error from server (NotFound): error when deleting "https://github.com/cert-manager/cert-manager/releases/download/v1.17.2/cert-manager.yaml": clusterroles.rbac.authorization.k8s.io "cert-manager-controller-certificatesigningrequests" not found Error from server (NotFound): error when deleting "https://github.com/cert-manager/cert-manager/releases/download/v1.17.2/cert-manager.yaml": clusterroles.rbac.authorization.k8s.io "cert-manager-webhook:subjectaccessreviews" not found Error from server (NotFound): error when deleting "https://github.com/cert-manager/cert-manager/releases/download/v1.17.2/cert-manager.yaml": clusterrolebindings.rbac.authorization.k8s.io "cert-manager-cainjector" not found Error from server (NotFound): error when deleting "https://github.com/cert-manager/cert-manager/releases/download/v1.17.2/cert-manager.yaml": clusterrolebindings.rbac.authorization.k8s.io "cert-manager-controller-issuers" not found Error from server (NotFound): error when deleting "https://github.com/cert-manager/cert-manager/releases/download/v1.17.2/cert-manager.yaml": clusterrolebindings.rbac.authorization.k8s.io "cert-manager-controller-clusterissuers" not found Error from server (NotFound): error when deleting "https://github.com/cert-manager/cert-manager/releases/download/v1.17.2/cert-manager.yaml": clusterrolebindings.rbac.authorization.k8s.io "cert-manager-controller-certificates" not found Error from server (NotFound): error when deleting "https://github.com/cert-manager/cert-manager/releases/download/v1.17.2/cert-manager.yaml": clusterrolebindings.rbac.authorization.k8s.io "cert-manager-controller-orders" not found Error from server (NotFound): error when deleting "https://github.com/cert-manager/cert-manager/releases/download/v1.17.2/cert-manager.yaml": clusterrolebindings.rbac.authorization.k8s.io "cert-manager-controller-challenges" not found Error from server (NotFound): error when deleting "https://github.com/cert-manager/cert-manager/releases/download/v1.17.2/cert-manager.yaml": clusterrolebindings.rbac.authorization.k8s.io "cert-manager-controller-ingress-shim" not found Error from server (NotFound): error when deleting "https://github.com/cert-manager/cert-manager/releases/download/v1.17.2/cert-manager.yaml": clusterrolebindings.rbac.authorization.k8s.io "cert-manager-controller-approve:cert-manager-io" not found Error from server (NotFound): error when deleting "https://github.com/cert-manager/cert-manager/releases/download/v1.17.2/cert-manager.yaml": clusterrolebindings.rbac.authorization.k8s.io "cert-manager-controller-certificatesigningrequests" not found Error from server (NotFound): error when deleting "https://github.com/cert-manager/cert-manager/releases/download/v1.17.2/cert-manager.yaml": clusterrolebindings.rbac.authorization.k8s.io "cert-manager-webhook:subjectaccessreviews" not found Error from server (NotFound): error when deleting "https://github.com/cert-manager/cert-manager/releases/download/v1.17.2/cert-manager.yaml": roles.rbac.authorization.k8s.io "cert-manager-cainjector:leaderelection" not found Error from server (NotFound): error when deleting "https://github.com/cert-manager/cert-manager/releases/download/v1.17.2/cert-manager.yaml": roles.rbac.authorization.k8s.io "cert-manager:leaderelection" not found Error from server (NotFound): error when deleting "https://github.com/cert-manager/cert-manager/releases/download/v1.17.2/cert-manager.yaml": roles.rbac.authorization.k8s.io "cert-manager-tokenrequest" not found Error from server (NotFound): error when deleting "https://github.com/cert-manager/cert-manager/releases/download/v1.17.2/cert-manager.yaml": roles.rbac.authorization.k8s.io "cert-manager-webhook:dynamic-serving" not found Error from server (NotFound): error when deleting "https://github.com/cert-manager/cert-manager/releases/download/v1.17.2/cert-manager.yaml": rolebindings.rbac.authorization.k8s.io "cert-manager-cainjector:leaderelection" not found Error from server (NotFound): error when deleting "https://github.com/cert-manager/cert-manager/releases/download/v1.17.2/cert-manager.yaml": rolebindings.rbac.authorization.k8s.io "cert-manager:leaderelection" not found Error from server (NotFound): error when deleting "https://github.com/cert-manager/cert-manager/releases/download/v1.17.2/cert-manager.yaml": rolebindings.rbac.authorization.k8s.io "cert-manager-cert-manager-tokenrequest" not found Error from server (NotFound): error when deleting "https://github.com/cert-manager/cert-manager/releases/download/v1.17.2/cert-manager.yaml": rolebindings.rbac.authorization.k8s.io "cert-manager-webhook:dynamic-serving" not found Error from server (NotFound): error when deleting "https://github.com/cert-manager/cert-manager/releases/download/v1.17.2/cert-manager.yaml": services "cert-manager-cainjector" not found Error from server (NotFound): error when deleting "https://github.com/cert-manager/cert-manager/releases/download/v1.17.2/cert-manager.yaml": services "cert-manager" not found Error from server (NotFound): error when deleting "https://github.com/cert-manager/cert-manager/releases/download/v1.17.2/cert-manager.yaml": services "cert-manager-webhook" not found Error from server (NotFound): error when deleting "https://github.com/cert-manager/cert-manager/releases/download/v1.17.2/cert-manager.yaml": deployments.apps "cert-manager-cainjector" not found Error from server (NotFound): error when deleting "https://github.com/cert-manager/cert-manager/releases/download/v1.17.2/cert-manager.yaml": deployments.apps "cert-manager" not found Error from server (NotFound): error when deleting "https://github.com/cert-manager/cert-manager/releases/download/v1.17.2/cert-manager.yaml": deployments.apps "cert-manager-webhook" not found Error from server (NotFound): error when deleting "https://github.com/cert-manager/cert-manager/releases/download/v1.17.2/cert-manager.yaml": mutatingwebhookconfigurations.admissionregistration.k8s.io "cert-manager-webhook" not found Error from server (NotFound): error when deleting "https://github.com/cert-manager/cert-manager/releases/download/v1.17.2/cert-manager.yaml": validatingwebhookconfigurations.admissionregistration.k8s.io "cert-manager-webhook" not found + sleep 0 + for i in '$(seq 0 2)' + set +e + kubectl delete -f https://github.com/cert-manager/cert-manager/releases/download/v1.17.2/cert-manager.yaml + exit_status=1 + set -e + '[' 1 '!=' 0 -a -n 1 ']' + cat /tmp/tmp.BUhKuq8tZ8 + cat /tmp/tmp.CtbV1nZuLA Error from server (NotFound): error when deleting "https://github.com/cert-manager/cert-manager/releases/download/v1.17.2/cert-manager.yaml": namespaces "cert-manager" not found Error from server (NotFound): error when deleting "https://github.com/cert-manager/cert-manager/releases/download/v1.17.2/cert-manager.yaml": customresourcedefinitions.apiextensions.k8s.io "certificaterequests.cert-manager.io" not found Error from server (NotFound): error when deleting "https://github.com/cert-manager/cert-manager/releases/download/v1.17.2/cert-manager.yaml": customresourcedefinitions.apiextensions.k8s.io "certificates.cert-manager.io" not found Error from server (NotFound): error when deleting "https://github.com/cert-manager/cert-manager/releases/download/v1.17.2/cert-manager.yaml": customresourcedefinitions.apiextensions.k8s.io "challenges.acme.cert-manager.io" not found Error from server (NotFound): error when deleting "https://github.com/cert-manager/cert-manager/releases/download/v1.17.2/cert-manager.yaml": customresourcedefinitions.apiextensions.k8s.io "clusterissuers.cert-manager.io" not found Error from server (NotFound): error when deleting "https://github.com/cert-manager/cert-manager/releases/download/v1.17.2/cert-manager.yaml": customresourcedefinitions.apiextensions.k8s.io "issuers.cert-manager.io" not found Error from server (NotFound): error when deleting "https://github.com/cert-manager/cert-manager/releases/download/v1.17.2/cert-manager.yaml": customresourcedefinitions.apiextensions.k8s.io "orders.acme.cert-manager.io" not found Error from server (NotFound): error when deleting "https://github.com/cert-manager/cert-manager/releases/download/v1.17.2/cert-manager.yaml": serviceaccounts "cert-manager-cainjector" not found Error from server (NotFound): error when deleting "https://github.com/cert-manager/cert-manager/releases/download/v1.17.2/cert-manager.yaml": serviceaccounts "cert-manager" not found Error from server (NotFound): error when deleting "https://github.com/cert-manager/cert-manager/releases/download/v1.17.2/cert-manager.yaml": serviceaccounts "cert-manager-webhook" not found Error from server (NotFound): error when deleting "https://github.com/cert-manager/cert-manager/releases/download/v1.17.2/cert-manager.yaml": clusterroles.rbac.authorization.k8s.io "cert-manager-cainjector" not found Error from server (NotFound): error when deleting "https://github.com/cert-manager/cert-manager/releases/download/v1.17.2/cert-manager.yaml": clusterroles.rbac.authorization.k8s.io "cert-manager-controller-issuers" not found Error from server (NotFound): error when deleting "https://github.com/cert-manager/cert-manager/releases/download/v1.17.2/cert-manager.yaml": clusterroles.rbac.authorization.k8s.io "cert-manager-controller-clusterissuers" not found Error from server (NotFound): error when deleting "https://github.com/cert-manager/cert-manager/releases/download/v1.17.2/cert-manager.yaml": clusterroles.rbac.authorization.k8s.io "cert-manager-controller-certificates" not found Error from server (NotFound): error when deleting "https://github.com/cert-manager/cert-manager/releases/download/v1.17.2/cert-manager.yaml": clusterroles.rbac.authorization.k8s.io "cert-manager-controller-orders" not found Error from server (NotFound): error when deleting "https://github.com/cert-manager/cert-manager/releases/download/v1.17.2/cert-manager.yaml": clusterroles.rbac.authorization.k8s.io "cert-manager-controller-challenges" not found Error from server (NotFound): error when deleting "https://github.com/cert-manager/cert-manager/releases/download/v1.17.2/cert-manager.yaml": clusterroles.rbac.authorization.k8s.io "cert-manager-controller-ingress-shim" not found Error from server (NotFound): error when deleting "https://github.com/cert-manager/cert-manager/releases/download/v1.17.2/cert-manager.yaml": clusterroles.rbac.authorization.k8s.io "cert-manager-cluster-view" not found Error from server (NotFound): error when deleting "https://github.com/cert-manager/cert-manager/releases/download/v1.17.2/cert-manager.yaml": clusterroles.rbac.authorization.k8s.io "cert-manager-view" not found Error from server (NotFound): error when deleting "https://github.com/cert-manager/cert-manager/releases/download/v1.17.2/cert-manager.yaml": clusterroles.rbac.authorization.k8s.io "cert-manager-edit" not found Error from server (NotFound): error when deleting "https://github.com/cert-manager/cert-manager/releases/download/v1.17.2/cert-manager.yaml": clusterroles.rbac.authorization.k8s.io "cert-manager-controller-approve:cert-manager-io" not found Error from server (NotFound): error when deleting "https://github.com/cert-manager/cert-manager/releases/download/v1.17.2/cert-manager.yaml": clusterroles.rbac.authorization.k8s.io "cert-manager-controller-certificatesigningrequests" not found Error from server (NotFound): error when deleting "https://github.com/cert-manager/cert-manager/releases/download/v1.17.2/cert-manager.yaml": clusterroles.rbac.authorization.k8s.io "cert-manager-webhook:subjectaccessreviews" not found Error from server (NotFound): error when deleting "https://github.com/cert-manager/cert-manager/releases/download/v1.17.2/cert-manager.yaml": clusterrolebindings.rbac.authorization.k8s.io "cert-manager-cainjector" not found Error from server (NotFound): error when deleting "https://github.com/cert-manager/cert-manager/releases/download/v1.17.2/cert-manager.yaml": clusterrolebindings.rbac.authorization.k8s.io "cert-manager-controller-issuers" not found Error from server (NotFound): error when deleting "https://github.com/cert-manager/cert-manager/releases/download/v1.17.2/cert-manager.yaml": clusterrolebindings.rbac.authorization.k8s.io "cert-manager-controller-clusterissuers" not found Error from server (NotFound): error when deleting "https://github.com/cert-manager/cert-manager/releases/download/v1.17.2/cert-manager.yaml": clusterrolebindings.rbac.authorization.k8s.io "cert-manager-controller-certificates" not found Error from server (NotFound): error when deleting "https://github.com/cert-manager/cert-manager/releases/download/v1.17.2/cert-manager.yaml": clusterrolebindings.rbac.authorization.k8s.io "cert-manager-controller-orders" not found Error from server (NotFound): error when deleting "https://github.com/cert-manager/cert-manager/releases/download/v1.17.2/cert-manager.yaml": clusterrolebindings.rbac.authorization.k8s.io "cert-manager-controller-challenges" not found Error from server (NotFound): error when deleting "https://github.com/cert-manager/cert-manager/releases/download/v1.17.2/cert-manager.yaml": clusterrolebindings.rbac.authorization.k8s.io "cert-manager-controller-ingress-shim" not found Error from server (NotFound): error when deleting "https://github.com/cert-manager/cert-manager/releases/download/v1.17.2/cert-manager.yaml": clusterrolebindings.rbac.authorization.k8s.io "cert-manager-controller-approve:cert-manager-io" not found Error from server (NotFound): error when deleting "https://github.com/cert-manager/cert-manager/releases/download/v1.17.2/cert-manager.yaml": clusterrolebindings.rbac.authorization.k8s.io "cert-manager-controller-certificatesigningrequests" not found Error from server (NotFound): error when deleting "https://github.com/cert-manager/cert-manager/releases/download/v1.17.2/cert-manager.yaml": clusterrolebindings.rbac.authorization.k8s.io "cert-manager-webhook:subjectaccessreviews" not found Error from server (NotFound): error when deleting "https://github.com/cert-manager/cert-manager/releases/download/v1.17.2/cert-manager.yaml": roles.rbac.authorization.k8s.io "cert-manager-cainjector:leaderelection" not found Error from server (NotFound): error when deleting "https://github.com/cert-manager/cert-manager/releases/download/v1.17.2/cert-manager.yaml": roles.rbac.authorization.k8s.io "cert-manager:leaderelection" not found Error from server (NotFound): error when deleting "https://github.com/cert-manager/cert-manager/releases/download/v1.17.2/cert-manager.yaml": roles.rbac.authorization.k8s.io "cert-manager-tokenrequest" not found Error from server (NotFound): error when deleting "https://github.com/cert-manager/cert-manager/releases/download/v1.17.2/cert-manager.yaml": roles.rbac.authorization.k8s.io "cert-manager-webhook:dynamic-serving" not found Error from server (NotFound): error when deleting "https://github.com/cert-manager/cert-manager/releases/download/v1.17.2/cert-manager.yaml": rolebindings.rbac.authorization.k8s.io "cert-manager-cainjector:leaderelection" not found Error from server (NotFound): error when deleting "https://github.com/cert-manager/cert-manager/releases/download/v1.17.2/cert-manager.yaml": rolebindings.rbac.authorization.k8s.io "cert-manager:leaderelection" not found Error from server (NotFound): error when deleting "https://github.com/cert-manager/cert-manager/releases/download/v1.17.2/cert-manager.yaml": rolebindings.rbac.authorization.k8s.io "cert-manager-cert-manager-tokenrequest" not found Error from server (NotFound): error when deleting "https://github.com/cert-manager/cert-manager/releases/download/v1.17.2/cert-manager.yaml": rolebindings.rbac.authorization.k8s.io "cert-manager-webhook:dynamic-serving" not found Error from server (NotFound): error when deleting "https://github.com/cert-manager/cert-manager/releases/download/v1.17.2/cert-manager.yaml": services "cert-manager-cainjector" not found Error from server (NotFound): error when deleting "https://github.com/cert-manager/cert-manager/releases/download/v1.17.2/cert-manager.yaml": services "cert-manager" not found Error from server (NotFound): error when deleting "https://github.com/cert-manager/cert-manager/releases/download/v1.17.2/cert-manager.yaml": services "cert-manager-webhook" not found Error from server (NotFound): error when deleting "https://github.com/cert-manager/cert-manager/releases/download/v1.17.2/cert-manager.yaml": deployments.apps "cert-manager-cainjector" not found Error from server (NotFound): error when deleting "https://github.com/cert-manager/cert-manager/releases/download/v1.17.2/cert-manager.yaml": deployments.apps "cert-manager" not found Error from server (NotFound): error when deleting "https://github.com/cert-manager/cert-manager/releases/download/v1.17.2/cert-manager.yaml": deployments.apps "cert-manager-webhook" not found Error from server (NotFound): error when deleting "https://github.com/cert-manager/cert-manager/releases/download/v1.17.2/cert-manager.yaml": mutatingwebhookconfigurations.admissionregistration.k8s.io "cert-manager-webhook" not found Error from server (NotFound): error when deleting "https://github.com/cert-manager/cert-manager/releases/download/v1.17.2/cert-manager.yaml": validatingwebhookconfigurations.admissionregistration.k8s.io "cert-manager-webhook" not found + sleep 4 + for i in '$(seq 0 2)' + set +e + kubectl delete -f https://github.com/cert-manager/cert-manager/releases/download/v1.17.2/cert-manager.yaml + exit_status=1 + set -e + '[' 1 '!=' 0 -a -n 1 ']' + cat /tmp/tmp.BUhKuq8tZ8 + cat /tmp/tmp.CtbV1nZuLA Error from server (NotFound): error when deleting "https://github.com/cert-manager/cert-manager/releases/download/v1.17.2/cert-manager.yaml": namespaces "cert-manager" not found Error from server (NotFound): error when deleting "https://github.com/cert-manager/cert-manager/releases/download/v1.17.2/cert-manager.yaml": customresourcedefinitions.apiextensions.k8s.io "certificaterequests.cert-manager.io" not found Error from server (NotFound): error when deleting "https://github.com/cert-manager/cert-manager/releases/download/v1.17.2/cert-manager.yaml": customresourcedefinitions.apiextensions.k8s.io "certificates.cert-manager.io" not found Error from server (NotFound): error when deleting "https://github.com/cert-manager/cert-manager/releases/download/v1.17.2/cert-manager.yaml": customresourcedefinitions.apiextensions.k8s.io "challenges.acme.cert-manager.io" not found Error from server (NotFound): error when deleting "https://github.com/cert-manager/cert-manager/releases/download/v1.17.2/cert-manager.yaml": customresourcedefinitions.apiextensions.k8s.io "clusterissuers.cert-manager.io" not found Error from server (NotFound): error when deleting "https://github.com/cert-manager/cert-manager/releases/download/v1.17.2/cert-manager.yaml": customresourcedefinitions.apiextensions.k8s.io "issuers.cert-manager.io" not found Error from server (NotFound): error when deleting "https://github.com/cert-manager/cert-manager/releases/download/v1.17.2/cert-manager.yaml": customresourcedefinitions.apiextensions.k8s.io "orders.acme.cert-manager.io" not found Error from server (NotFound): error when deleting "https://github.com/cert-manager/cert-manager/releases/download/v1.17.2/cert-manager.yaml": serviceaccounts "cert-manager-cainjector" not found Error from server (NotFound): error when deleting "https://github.com/cert-manager/cert-manager/releases/download/v1.17.2/cert-manager.yaml": serviceaccounts "cert-manager" not found Error from server (NotFound): error when deleting "https://github.com/cert-manager/cert-manager/releases/download/v1.17.2/cert-manager.yaml": serviceaccounts "cert-manager-webhook" not found Error from server (NotFound): error when deleting "https://github.com/cert-manager/cert-manager/releases/download/v1.17.2/cert-manager.yaml": clusterroles.rbac.authorization.k8s.io "cert-manager-cainjector" not found Error from server (NotFound): error when deleting "https://github.com/cert-manager/cert-manager/releases/download/v1.17.2/cert-manager.yaml": clusterroles.rbac.authorization.k8s.io "cert-manager-controller-issuers" not found Error from server (NotFound): error when deleting "https://github.com/cert-manager/cert-manager/releases/download/v1.17.2/cert-manager.yaml": clusterroles.rbac.authorization.k8s.io "cert-manager-controller-clusterissuers" not found Error from server (NotFound): error when deleting "https://github.com/cert-manager/cert-manager/releases/download/v1.17.2/cert-manager.yaml": clusterroles.rbac.authorization.k8s.io "cert-manager-controller-certificates" not found Error from server (NotFound): error when deleting "https://github.com/cert-manager/cert-manager/releases/download/v1.17.2/cert-manager.yaml": clusterroles.rbac.authorization.k8s.io "cert-manager-controller-orders" not found Error from server (NotFound): error when deleting "https://github.com/cert-manager/cert-manager/releases/download/v1.17.2/cert-manager.yaml": clusterroles.rbac.authorization.k8s.io "cert-manager-controller-challenges" not found Error from server (NotFound): error when deleting "https://github.com/cert-manager/cert-manager/releases/download/v1.17.2/cert-manager.yaml": clusterroles.rbac.authorization.k8s.io "cert-manager-controller-ingress-shim" not found Error from server (NotFound): error when deleting "https://github.com/cert-manager/cert-manager/releases/download/v1.17.2/cert-manager.yaml": clusterroles.rbac.authorization.k8s.io "cert-manager-cluster-view" not found Error from server (NotFound): error when deleting "https://github.com/cert-manager/cert-manager/releases/download/v1.17.2/cert-manager.yaml": clusterroles.rbac.authorization.k8s.io "cert-manager-view" not found Error from server (NotFound): error when deleting "https://github.com/cert-manager/cert-manager/releases/download/v1.17.2/cert-manager.yaml": clusterroles.rbac.authorization.k8s.io "cert-manager-edit" not found Error from server (NotFound): error when deleting "https://github.com/cert-manager/cert-manager/releases/download/v1.17.2/cert-manager.yaml": clusterroles.rbac.authorization.k8s.io "cert-manager-controller-approve:cert-manager-io" not found Error from server (NotFound): error when deleting "https://github.com/cert-manager/cert-manager/releases/download/v1.17.2/cert-manager.yaml": clusterroles.rbac.authorization.k8s.io "cert-manager-controller-certificatesigningrequests" not found Error from server (NotFound): error when deleting "https://github.com/cert-manager/cert-manager/releases/download/v1.17.2/cert-manager.yaml": clusterroles.rbac.authorization.k8s.io "cert-manager-webhook:subjectaccessreviews" not found Error from server (NotFound): error when deleting "https://github.com/cert-manager/cert-manager/releases/download/v1.17.2/cert-manager.yaml": clusterrolebindings.rbac.authorization.k8s.io "cert-manager-cainjector" not found Error from server (NotFound): error when deleting "https://github.com/cert-manager/cert-manager/releases/download/v1.17.2/cert-manager.yaml": clusterrolebindings.rbac.authorization.k8s.io "cert-manager-controller-issuers" not found Error from server (NotFound): error when deleting "https://github.com/cert-manager/cert-manager/releases/download/v1.17.2/cert-manager.yaml": clusterrolebindings.rbac.authorization.k8s.io "cert-manager-controller-clusterissuers" not found Error from server (NotFound): error when deleting "https://github.com/cert-manager/cert-manager/releases/download/v1.17.2/cert-manager.yaml": clusterrolebindings.rbac.authorization.k8s.io "cert-manager-controller-certificates" not found Error from server (NotFound): error when deleting "https://github.com/cert-manager/cert-manager/releases/download/v1.17.2/cert-manager.yaml": clusterrolebindings.rbac.authorization.k8s.io "cert-manager-controller-orders" not found Error from server (NotFound): error when deleting "https://github.com/cert-manager/cert-manager/releases/download/v1.17.2/cert-manager.yaml": clusterrolebindings.rbac.authorization.k8s.io "cert-manager-controller-challenges" not found Error from server (NotFound): error when deleting "https://github.com/cert-manager/cert-manager/releases/download/v1.17.2/cert-manager.yaml": clusterrolebindings.rbac.authorization.k8s.io "cert-manager-controller-ingress-shim" not found Error from server (NotFound): error when deleting "https://github.com/cert-manager/cert-manager/releases/download/v1.17.2/cert-manager.yaml": clusterrolebindings.rbac.authorization.k8s.io "cert-manager-controller-approve:cert-manager-io" not found Error from server (NotFound): error when deleting "https://github.com/cert-manager/cert-manager/releases/download/v1.17.2/cert-manager.yaml": clusterrolebindings.rbac.authorization.k8s.io "cert-manager-controller-certificatesigningrequests" not found Error from server (NotFound): error when deleting "https://github.com/cert-manager/cert-manager/releases/download/v1.17.2/cert-manager.yaml": clusterrolebindings.rbac.authorization.k8s.io "cert-manager-webhook:subjectaccessreviews" not found Error from server (NotFound): error when deleting "https://github.com/cert-manager/cert-manager/releases/download/v1.17.2/cert-manager.yaml": roles.rbac.authorization.k8s.io "cert-manager-cainjector:leaderelection" not found Error from server (NotFound): error when deleting "https://github.com/cert-manager/cert-manager/releases/download/v1.17.2/cert-manager.yaml": roles.rbac.authorization.k8s.io "cert-manager:leaderelection" not found Error from server (NotFound): error when deleting "https://github.com/cert-manager/cert-manager/releases/download/v1.17.2/cert-manager.yaml": roles.rbac.authorization.k8s.io "cert-manager-tokenrequest" not found Error from server (NotFound): error when deleting "https://github.com/cert-manager/cert-manager/releases/download/v1.17.2/cert-manager.yaml": roles.rbac.authorization.k8s.io "cert-manager-webhook:dynamic-serving" not found Error from server (NotFound): error when deleting "https://github.com/cert-manager/cert-manager/releases/download/v1.17.2/cert-manager.yaml": rolebindings.rbac.authorization.k8s.io "cert-manager-cainjector:leaderelection" not found Error from server (NotFound): error when deleting "https://github.com/cert-manager/cert-manager/releases/download/v1.17.2/cert-manager.yaml": rolebindings.rbac.authorization.k8s.io "cert-manager:leaderelection" not found Error from server (NotFound): error when deleting "https://github.com/cert-manager/cert-manager/releases/download/v1.17.2/cert-manager.yaml": rolebindings.rbac.authorization.k8s.io "cert-manager-cert-manager-tokenrequest" not found Error from server (NotFound): error when deleting "https://github.com/cert-manager/cert-manager/releases/download/v1.17.2/cert-manager.yaml": rolebindings.rbac.authorization.k8s.io "cert-manager-webhook:dynamic-serving" not found Error from server (NotFound): error when deleting "https://github.com/cert-manager/cert-manager/releases/download/v1.17.2/cert-manager.yaml": services "cert-manager-cainjector" not found Error from server (NotFound): error when deleting "https://github.com/cert-manager/cert-manager/releases/download/v1.17.2/cert-manager.yaml": services "cert-manager" not found Error from server (NotFound): error when deleting "https://github.com/cert-manager/cert-manager/releases/download/v1.17.2/cert-manager.yaml": services "cert-manager-webhook" not found Error from server (NotFound): error when deleting "https://github.com/cert-manager/cert-manager/releases/download/v1.17.2/cert-manager.yaml": deployments.apps "cert-manager-cainjector" not found Error from server (NotFound): error when deleting "https://github.com/cert-manager/cert-manager/releases/download/v1.17.2/cert-manager.yaml": deployments.apps "cert-manager" not found Error from server (NotFound): error when deleting "https://github.com/cert-manager/cert-manager/releases/download/v1.17.2/cert-manager.yaml": deployments.apps "cert-manager-webhook" not found Error from server (NotFound): error when deleting "https://github.com/cert-manager/cert-manager/releases/download/v1.17.2/cert-manager.yaml": mutatingwebhookconfigurations.admissionregistration.k8s.io "cert-manager-webhook" not found Error from server (NotFound): error when deleting "https://github.com/cert-manager/cert-manager/releases/download/v1.17.2/cert-manager.yaml": validatingwebhookconfigurations.admissionregistration.k8s.io "cert-manager-webhook" not found + sleep 8 + cat /tmp/tmp.BUhKuq8tZ8 + cat /tmp/tmp.CtbV1nZuLA Error from server (NotFound): error when deleting "https://github.com/cert-manager/cert-manager/releases/download/v1.17.2/cert-manager.yaml": namespaces "cert-manager" not found Error from server (NotFound): error when deleting "https://github.com/cert-manager/cert-manager/releases/download/v1.17.2/cert-manager.yaml": customresourcedefinitions.apiextensions.k8s.io "certificaterequests.cert-manager.io" not found Error from server (NotFound): error when deleting "https://github.com/cert-manager/cert-manager/releases/download/v1.17.2/cert-manager.yaml": customresourcedefinitions.apiextensions.k8s.io "certificates.cert-manager.io" not found Error from server (NotFound): error when deleting "https://github.com/cert-manager/cert-manager/releases/download/v1.17.2/cert-manager.yaml": customresourcedefinitions.apiextensions.k8s.io "challenges.acme.cert-manager.io" not found Error from server (NotFound): error when deleting "https://github.com/cert-manager/cert-manager/releases/download/v1.17.2/cert-manager.yaml": customresourcedefinitions.apiextensions.k8s.io "clusterissuers.cert-manager.io" not found Error from server (NotFound): error when deleting "https://github.com/cert-manager/cert-manager/releases/download/v1.17.2/cert-manager.yaml": customresourcedefinitions.apiextensions.k8s.io "issuers.cert-manager.io" not found Error from server (NotFound): error when deleting "https://github.com/cert-manager/cert-manager/releases/download/v1.17.2/cert-manager.yaml": customresourcedefinitions.apiextensions.k8s.io "orders.acme.cert-manager.io" not found Error from server (NotFound): error when deleting "https://github.com/cert-manager/cert-manager/releases/download/v1.17.2/cert-manager.yaml": serviceaccounts "cert-manager-cainjector" not found Error from server (NotFound): error when deleting "https://github.com/cert-manager/cert-manager/releases/download/v1.17.2/cert-manager.yaml": serviceaccounts "cert-manager" not found Error from server (NotFound): error when deleting "https://github.com/cert-manager/cert-manager/releases/download/v1.17.2/cert-manager.yaml": serviceaccounts "cert-manager-webhook" not found Error from server (NotFound): error when deleting "https://github.com/cert-manager/cert-manager/releases/download/v1.17.2/cert-manager.yaml": clusterroles.rbac.authorization.k8s.io "cert-manager-cainjector" not found Error from server (NotFound): error when deleting "https://github.com/cert-manager/cert-manager/releases/download/v1.17.2/cert-manager.yaml": clusterroles.rbac.authorization.k8s.io "cert-manager-controller-issuers" not found Error from server (NotFound): error when deleting "https://github.com/cert-manager/cert-manager/releases/download/v1.17.2/cert-manager.yaml": clusterroles.rbac.authorization.k8s.io "cert-manager-controller-clusterissuers" not found Error from server (NotFound): error when deleting "https://github.com/cert-manager/cert-manager/releases/download/v1.17.2/cert-manager.yaml": clusterroles.rbac.authorization.k8s.io "cert-manager-controller-certificates" not found Error from server (NotFound): error when deleting "https://github.com/cert-manager/cert-manager/releases/download/v1.17.2/cert-manager.yaml": clusterroles.rbac.authorization.k8s.io "cert-manager-controller-orders" not found Error from server (NotFound): error when deleting "https://github.com/cert-manager/cert-manager/releases/download/v1.17.2/cert-manager.yaml": clusterroles.rbac.authorization.k8s.io "cert-manager-controller-challenges" not found Error from server (NotFound): error when deleting "https://github.com/cert-manager/cert-manager/releases/download/v1.17.2/cert-manager.yaml": clusterroles.rbac.authorization.k8s.io "cert-manager-controller-ingress-shim" not found Error from server (NotFound): error when deleting "https://github.com/cert-manager/cert-manager/releases/download/v1.17.2/cert-manager.yaml": clusterroles.rbac.authorization.k8s.io "cert-manager-cluster-view" not found Error from server (NotFound): error when deleting "https://github.com/cert-manager/cert-manager/releases/download/v1.17.2/cert-manager.yaml": clusterroles.rbac.authorization.k8s.io "cert-manager-view" not found Error from server (NotFound): error when deleting "https://github.com/cert-manager/cert-manager/releases/download/v1.17.2/cert-manager.yaml": clusterroles.rbac.authorization.k8s.io "cert-manager-edit" not found Error from server (NotFound): error when deleting "https://github.com/cert-manager/cert-manager/releases/download/v1.17.2/cert-manager.yaml": clusterroles.rbac.authorization.k8s.io "cert-manager-controller-approve:cert-manager-io" not found Error from server (NotFound): error when deleting "https://github.com/cert-manager/cert-manager/releases/download/v1.17.2/cert-manager.yaml": clusterroles.rbac.authorization.k8s.io "cert-manager-controller-certificatesigningrequests" not found Error from server (NotFound): error when deleting "https://github.com/cert-manager/cert-manager/releases/download/v1.17.2/cert-manager.yaml": clusterroles.rbac.authorization.k8s.io "cert-manager-webhook:subjectaccessreviews" not found Error from server (NotFound): error when deleting "https://github.com/cert-manager/cert-manager/releases/download/v1.17.2/cert-manager.yaml": clusterrolebindings.rbac.authorization.k8s.io "cert-manager-cainjector" not found Error from server (NotFound): error when deleting "https://github.com/cert-manager/cert-manager/releases/download/v1.17.2/cert-manager.yaml": clusterrolebindings.rbac.authorization.k8s.io "cert-manager-controller-issuers" not found Error from server (NotFound): error when deleting "https://github.com/cert-manager/cert-manager/releases/download/v1.17.2/cert-manager.yaml": clusterrolebindings.rbac.authorization.k8s.io "cert-manager-controller-clusterissuers" not found Error from server (NotFound): error when deleting "https://github.com/cert-manager/cert-manager/releases/download/v1.17.2/cert-manager.yaml": clusterrolebindings.rbac.authorization.k8s.io "cert-manager-controller-certificates" not found Error from server (NotFound): error when deleting "https://github.com/cert-manager/cert-manager/releases/download/v1.17.2/cert-manager.yaml": clusterrolebindings.rbac.authorization.k8s.io "cert-manager-controller-orders" not found Error from server (NotFound): error when deleting "https://github.com/cert-manager/cert-manager/releases/download/v1.17.2/cert-manager.yaml": clusterrolebindings.rbac.authorization.k8s.io "cert-manager-controller-challenges" not found Error from server (NotFound): error when deleting "https://github.com/cert-manager/cert-manager/releases/download/v1.17.2/cert-manager.yaml": clusterrolebindings.rbac.authorization.k8s.io "cert-manager-controller-ingress-shim" not found Error from server (NotFound): error when deleting "https://github.com/cert-manager/cert-manager/releases/download/v1.17.2/cert-manager.yaml": clusterrolebindings.rbac.authorization.k8s.io "cert-manager-controller-approve:cert-manager-io" not found Error from server (NotFound): error when deleting "https://github.com/cert-manager/cert-manager/releases/download/v1.17.2/cert-manager.yaml": clusterrolebindings.rbac.authorization.k8s.io "cert-manager-controller-certificatesigningrequests" not found Error from server (NotFound): error when deleting "https://github.com/cert-manager/cert-manager/releases/download/v1.17.2/cert-manager.yaml": clusterrolebindings.rbac.authorization.k8s.io "cert-manager-webhook:subjectaccessreviews" not found Error from server (NotFound): error when deleting "https://github.com/cert-manager/cert-manager/releases/download/v1.17.2/cert-manager.yaml": roles.rbac.authorization.k8s.io "cert-manager-cainjector:leaderelection" not found Error from server (NotFound): error when deleting "https://github.com/cert-manager/cert-manager/releases/download/v1.17.2/cert-manager.yaml": roles.rbac.authorization.k8s.io "cert-manager:leaderelection" not found Error from server (NotFound): error when deleting "https://github.com/cert-manager/cert-manager/releases/download/v1.17.2/cert-manager.yaml": roles.rbac.authorization.k8s.io "cert-manager-tokenrequest" not found Error from server (NotFound): error when deleting "https://github.com/cert-manager/cert-manager/releases/download/v1.17.2/cert-manager.yaml": roles.rbac.authorization.k8s.io "cert-manager-webhook:dynamic-serving" not found Error from server (NotFound): error when deleting "https://github.com/cert-manager/cert-manager/releases/download/v1.17.2/cert-manager.yaml": rolebindings.rbac.authorization.k8s.io "cert-manager-cainjector:leaderelection" not found Error from server (NotFound): error when deleting "https://github.com/cert-manager/cert-manager/releases/download/v1.17.2/cert-manager.yaml": rolebindings.rbac.authorization.k8s.io "cert-manager:leaderelection" not found Error from server (NotFound): error when deleting "https://github.com/cert-manager/cert-manager/releases/download/v1.17.2/cert-manager.yaml": rolebindings.rbac.authorization.k8s.io "cert-manager-cert-manager-tokenrequest" not found Error from server (NotFound): error when deleting "https://github.com/cert-manager/cert-manager/releases/download/v1.17.2/cert-manager.yaml": rolebindings.rbac.authorization.k8s.io "cert-manager-webhook:dynamic-serving" not found Error from server (NotFound): error when deleting "https://github.com/cert-manager/cert-manager/releases/download/v1.17.2/cert-manager.yaml": services "cert-manager-cainjector" not found Error from server (NotFound): error when deleting "https://github.com/cert-manager/cert-manager/releases/download/v1.17.2/cert-manager.yaml": services "cert-manager" not found Error from server (NotFound): error when deleting "https://github.com/cert-manager/cert-manager/releases/download/v1.17.2/cert-manager.yaml": services "cert-manager-webhook" not found Error from server (NotFound): error when deleting "https://github.com/cert-manager/cert-manager/releases/download/v1.17.2/cert-manager.yaml": deployments.apps "cert-manager-cainjector" not found Error from server (NotFound): error when deleting "https://github.com/cert-manager/cert-manager/releases/download/v1.17.2/cert-manager.yaml": deployments.apps "cert-manager" not found Error from server (NotFound): error when deleting "https://github.com/cert-manager/cert-manager/releases/download/v1.17.2/cert-manager.yaml": deployments.apps "cert-manager-webhook" not found Error from server (NotFound): error when deleting "https://github.com/cert-manager/cert-manager/releases/download/v1.17.2/cert-manager.yaml": mutatingwebhookconfigurations.admissionregistration.k8s.io "cert-manager-webhook" not found Error from server (NotFound): error when deleting "https://github.com/cert-manager/cert-manager/releases/download/v1.17.2/cert-manager.yaml": validatingwebhookconfigurations.admissionregistration.k8s.io "cert-manager-webhook" not found + rm /tmp/tmp.BUhKuq8tZ8 /tmp/tmp.CtbV1nZuLA + return 1 + true + '[' -n '' ']' + '[' -n psmdb-operator ']' + kubectl_bin delete --grace-period=0 --force=true namespace mongod-major-upgrade-29555 + rm -rf /tmp/tmp.o3Qe20mAZP + kubectl_bin delete --grace-period=0 --force=true namespace psmdb-operator ++ mktemp ++ mktemp + local LAST_OUT=/tmp/tmp.QP44oeugR0 + local LAST_OUT=/tmp/tmp.HA0FdCVl3D ++ mktemp ++ mktemp + local LAST_ERR=/tmp/tmp.1tQHwmhfLh + local exit_status=0 + local timeout=4 ++ seq 0 2 + local LAST_ERR=/tmp/tmp.4lYVmATxFa + for i in '$(seq 0 2)' + local exit_status=0 + set +e + local timeout=4 + kubectl delete --grace-period=0 --force=true namespace mongod-major-upgrade-29555 ++ seq 0 2 + for i in '$(seq 0 2)' + set +e + kubectl delete --grace-period=0 --force=true namespace psmdb-operator