Log: /mnt/jenkins/workspace/cloud-psmdb-operator_PR-2125/e2e-tests/logs/pitr.log grep: warning: stray \ before - Warning: version difference between client (1.34) and server (1.31) exceeds the supported minor version skew of +/-1 Warning: version difference between client (1.34) and server (1.31) exceeds the supported minor version skew of +/-1 Warning: version difference between client (1.34) and server (1.31) exceeds the supported minor version skew of +/-1 + main + create_infra pitr-13298 + local ns=pitr-13298 + [[ 1 == 1 ]] + delete_crd + desc 'get and delete old CRDs and RBAC' + set +o xtrace ----------------------------------------------------------------------------------- get and delete old CRDs and RBAC ----------------------------------------------------------------------------------- + kubectl_bin delete -f /mnt/jenkins/workspace/cloud-psmdb-operator_PR-2125/deploy/crd.yaml --ignore-not-found --wait=false ++ mktemp + local LAST_OUT=/tmp/tmp.W5U6tWsg0W ++ mktemp + local LAST_ERR=/tmp/tmp.lNrTzp6Ang + local exit_status=0 + local timeout=4 ++ seq 0 2 + for i in $(seq 0 2) + set +e + kubectl delete -f /mnt/jenkins/workspace/cloud-psmdb-operator_PR-2125/deploy/crd.yaml --ignore-not-found --wait=false + exit_status=0 + set -e + '[' 0 '!=' 0 -a -n 1 ']' + break + cat /tmp/tmp.W5U6tWsg0W customresourcedefinition.apiextensions.k8s.io "perconaservermongodbbackups.psmdb.percona.com" deleted customresourcedefinition.apiextensions.k8s.io "perconaservermongodbrestores.psmdb.percona.com" deleted customresourcedefinition.apiextensions.k8s.io "perconaservermongodbs.psmdb.percona.com" deleted + cat /tmp/tmp.lNrTzp6Ang + rm /tmp/tmp.W5U6tWsg0W /tmp/tmp.lNrTzp6Ang + return 0 ++ yq eval .metadata.name /mnt/jenkins/workspace/cloud-psmdb-operator_PR-2125/deploy/crd.yaml ++ grep -v '\-\-\-' grep: warning: stray \ before - grep: warning: stray \ before - + for crd_name in $(yq eval '.metadata.name' "${src_dir}/deploy/crd.yaml" | grep -v '\-\-\-') + kubectl get perconaservermongodbbackups.psmdb.percona.com --all-namespaces -o wide + grep -v NAMESPACE + xargs -L 1 sh -xc 'kubectl patch perconaservermongodbbackups.psmdb.percona.com -n $0 $1 --type=merge -p "{\"metadata\":{\"finalizers\":[]}}"' error: the server doesn't have a resource type "perconaservermongodbbackups" + kubectl patch perconaservermongodbbackups.psmdb.percona.com -n sh --type=merge -p '{"metadata":{"finalizers":[]}}' error: the server doesn't have a resource type "perconaservermongodbbackups" + : + kubectl_bin wait --for=delete crd perconaservermongodbbackups.psmdb.percona.com ++ mktemp + local LAST_OUT=/tmp/tmp.RQVGzt3PvS ++ mktemp + local LAST_ERR=/tmp/tmp.RrWgFJ8d2y + local exit_status=0 + local timeout=4 ++ seq 0 2 + for i in $(seq 0 2) + set +e + kubectl wait --for=delete crd perconaservermongodbbackups.psmdb.percona.com + exit_status=0 + set -e + '[' 0 '!=' 0 -a -n 1 ']' + break + cat /tmp/tmp.RQVGzt3PvS + cat /tmp/tmp.RrWgFJ8d2y + rm /tmp/tmp.RQVGzt3PvS /tmp/tmp.RrWgFJ8d2y + return 0 + for crd_name in $(yq eval '.metadata.name' "${src_dir}/deploy/crd.yaml" | grep -v '\-\-\-') + kubectl get perconaservermongodbrestores.psmdb.percona.com --all-namespaces -o wide + grep -v NAMESPACE + xargs -L 1 sh -xc 'kubectl patch perconaservermongodbrestores.psmdb.percona.com -n $0 $1 --type=merge -p "{\"metadata\":{\"finalizers\":[]}}"' error: the server doesn't have a resource type "perconaservermongodbrestores" + kubectl patch perconaservermongodbrestores.psmdb.percona.com -n sh --type=merge -p '{"metadata":{"finalizers":[]}}' error: the server doesn't have a resource type "perconaservermongodbrestores" + : + kubectl_bin wait --for=delete crd perconaservermongodbrestores.psmdb.percona.com ++ mktemp + local LAST_OUT=/tmp/tmp.6Bt8euue4b ++ mktemp + local LAST_ERR=/tmp/tmp.fSuCwI1mPC + local exit_status=0 + local timeout=4 ++ seq 0 2 + for i in $(seq 0 2) + set +e + kubectl wait --for=delete crd perconaservermongodbrestores.psmdb.percona.com + exit_status=0 + set -e + '[' 0 '!=' 0 -a -n 1 ']' + break + cat /tmp/tmp.6Bt8euue4b + cat /tmp/tmp.fSuCwI1mPC + rm /tmp/tmp.6Bt8euue4b /tmp/tmp.fSuCwI1mPC + return 0 + for crd_name in $(yq eval '.metadata.name' "${src_dir}/deploy/crd.yaml" | grep -v '\-\-\-') + kubectl get perconaservermongodbs.psmdb.percona.com --all-namespaces -o wide + grep -v NAMESPACE + xargs -L 1 sh -xc 'kubectl patch perconaservermongodbs.psmdb.percona.com -n $0 $1 --type=merge -p "{\"metadata\":{\"finalizers\":[]}}"' error: the server doesn't have a resource type "perconaservermongodbs" + kubectl patch perconaservermongodbs.psmdb.percona.com -n sh --type=merge -p '{"metadata":{"finalizers":[]}}' error: the server doesn't have a resource type "perconaservermongodbs" + : + kubectl_bin wait --for=delete crd perconaservermongodbs.psmdb.percona.com ++ mktemp + local LAST_OUT=/tmp/tmp.IlhQJVvXY9 ++ mktemp + local LAST_ERR=/tmp/tmp.nviFM23msc + local exit_status=0 + local timeout=4 ++ seq 0 2 + for i in $(seq 0 2) + set +e + kubectl wait --for=delete crd perconaservermongodbs.psmdb.percona.com + exit_status=0 + set -e + '[' 0 '!=' 0 -a -n 1 ']' + break + cat /tmp/tmp.IlhQJVvXY9 + cat /tmp/tmp.nviFM23msc + rm /tmp/tmp.IlhQJVvXY9 /tmp/tmp.nviFM23msc + return 0 + local rbac_yaml=rbac.yaml + '[' -n psmdb-operator ']' + rbac_yaml=cw-rbac.yaml + kubectl_bin delete -f /mnt/jenkins/workspace/cloud-psmdb-operator_PR-2125/deploy/cw-rbac.yaml --ignore-not-found ++ mktemp + local LAST_OUT=/tmp/tmp.f1V7N66PQe ++ mktemp + local LAST_ERR=/tmp/tmp.NTCr6drW1k + local exit_status=0 + local timeout=4 ++ seq 0 2 + for i in $(seq 0 2) + set +e + kubectl delete -f /mnt/jenkins/workspace/cloud-psmdb-operator_PR-2125/deploy/cw-rbac.yaml --ignore-not-found + exit_status=0 + set -e + '[' 0 '!=' 0 -a -n 1 ']' + break + cat /tmp/tmp.f1V7N66PQe clusterrole.rbac.authorization.k8s.io "percona-server-mongodb-operator" deleted clusterrolebinding.rbac.authorization.k8s.io "service-account-percona-server-mongodb-operator" deleted + cat /tmp/tmp.NTCr6drW1k + rm /tmp/tmp.f1V7N66PQe /tmp/tmp.NTCr6drW1k + return 0 + check_crd_for_deletion PR-2125-8ebcb80f + local git_tag=PR-2125-8ebcb80f ++ curl -s https://raw.githubusercontent.com/percona/percona-server-mongodb-operator/PR-2125-8ebcb80f/deploy/crd.yaml ++ yq eval .metadata.name ++ /usr/sbin/sed s/---//g ++ /usr/sbin/sed ':a;N;$!ba;s/\n/ /g' + for crd_name in $(curl -s https://raw.githubusercontent.com/percona/percona-server-mongodb-operator/${git_tag}/deploy/crd.yaml | yq eval '.metadata.name' | $sed 's/---//g' | $sed ':a;N;$!ba;s/\n/ /g') ++ kubectl_bin get crd/null -o 'jsonpath={.status.conditions[-1].type}' +++ mktemp ++ local LAST_OUT=/tmp/tmp.HokXHuRVF9 +++ mktemp ++ local LAST_ERR=/tmp/tmp.6IS7gohohR ++ local exit_status=0 ++ local timeout=4 +++ seq 0 2 ++ for i in $(seq 0 2) ++ set +e ++ kubectl get crd/null -o 'jsonpath={.status.conditions[-1].type}' ++ exit_status=1 ++ set -e ++ '[' 1 '!=' 0 -a -n 1 ']' ++ cat /tmp/tmp.HokXHuRVF9 ++ cat /tmp/tmp.6IS7gohohR Error from server (NotFound): customresourcedefinitions.apiextensions.k8s.io "null" not found ++ sleep 0 ++ for i in $(seq 0 2) ++ set +e ++ kubectl get crd/null -o 'jsonpath={.status.conditions[-1].type}' ++ exit_status=1 ++ set -e ++ '[' 1 '!=' 0 -a -n 1 ']' ++ cat /tmp/tmp.HokXHuRVF9 ++ cat /tmp/tmp.6IS7gohohR Error from server (NotFound): customresourcedefinitions.apiextensions.k8s.io "null" not found ++ sleep 4 ++ for i in $(seq 0 2) ++ set +e ++ kubectl get crd/null -o 'jsonpath={.status.conditions[-1].type}' ++ exit_status=1 ++ set -e ++ '[' 1 '!=' 0 -a -n 1 ']' ++ cat /tmp/tmp.HokXHuRVF9 ++ cat /tmp/tmp.6IS7gohohR Error from server (NotFound): customresourcedefinitions.apiextensions.k8s.io "null" not found ++ sleep 8 ++ cat /tmp/tmp.HokXHuRVF9 ++ cat /tmp/tmp.6IS7gohohR Error from server (NotFound): customresourcedefinitions.apiextensions.k8s.io "null" not found ++ rm /tmp/tmp.HokXHuRVF9 /tmp/tmp.6IS7gohohR ++ return 1 + [[ '' == \T\e\r\m\i\n\a\t\i\n\g ]] + '[' -n psmdb-operator ']' + create_namespace psmdb-operator + local namespace=psmdb-operator + local skip_clean_namespace= + [[ 1 == 1 ]] + [[ -z '' ]] + destroy_chaos_mesh ++ helm list --all-namespaces --filter chaos-mesh ++ tail -n1 ++ awk '-F ' '{print $2}' ++ sed s/NAMESPACE// + local chaos_mesh_ns= + desc 'destroy chaos-mesh' + set +o xtrace ----------------------------------------------------------------------------------- destroy chaos-mesh ----------------------------------------------------------------------------------- + '[' -n '' ']' ++ kubectl get MutatingWebhookConfiguration ++ grep chaos-mesh ++ awk '{print $1}' + timeout 30 kubectl delete MutatingWebhookConfiguration error: resource(s) were provided, but no name was specified + : ++ kubectl get ValidatingWebhookConfiguration ++ grep chaos-mesh ++ awk '{print $1}' + timeout 30 kubectl delete ValidatingWebhookConfiguration error: resource(s) were provided, but no name was specified + : ++ grep validate-auth ++ kubectl get ValidatingWebhookConfiguration ++ awk '{print $1}' + timeout 30 kubectl delete ValidatingWebhookConfiguration error: resource(s) were provided, but no name was specified + : ++ kubectl api-resources ++ grep chaos-mesh ++ awk '{print $1}' ++ kubectl get crd ++ grep chaos-mesh.org ++ awk '{print $1}' + timeout 30 kubectl delete crd error: resource(s) were provided, but no name was specified + : ++ kubectl get clusterrolebinding ++ grep chaos-mesh ++ awk '{print $1}' + timeout 30 kubectl delete clusterrolebinding error: resource(s) were provided, but no name was specified + : ++ kubectl get clusterrole ++ grep chaos-mesh ++ awk '{print $1}' + timeout 30 kubectl delete clusterrole error: resource(s) were provided, but no name was specified + : + desc 'cleaned up all old namespaces' + set +o xtrace ----------------------------------------------------------------------------------- cleaned up all old namespaces ----------------------------------------------------------------------------------- + kubectl_bin get ns + egrep -v '^kube-|^default|Terminating|psmdb-operator|openshift|^gke-|^gmp-|^NAME' + awk '{print$1}' + '[' -n '' ']' + desc 'cleaned up old namespaces psmdb-operator' + xargs kubectl delete ns + set +o xtrace ++ mktemp ----------------------------------------------------------------------------------- cleaned up old namespaces psmdb-operator ----------------------------------------------------------------------------------- + kubectl_bin delete namespace psmdb-operator --ignore-not-found ++ mktemp + local LAST_OUT=/tmp/tmp.nuMI3cXizv egrep: warning: egrep is obsolescent; using grep -E ++ mktemp + local LAST_OUT=/tmp/tmp.KC2J3BaJCN ++ mktemp + local LAST_ERR=/tmp/tmp.ERowy1QiyM + local exit_status=0 + local timeout=4 ++ seq 0 2 + local LAST_ERR=/tmp/tmp.EXY28HpjFH + local exit_status=0 + local timeout=4 ++ seq 0 2 + for i in $(seq 0 2) + set +e + kubectl get ns + for i in $(seq 0 2) + set +e + kubectl delete namespace psmdb-operator --ignore-not-found + exit_status=0 + set -e + '[' 0 '!=' 0 -a -n 1 ']' + break + cat /tmp/tmp.nuMI3cXizv + cat /tmp/tmp.ERowy1QiyM + rm /tmp/tmp.nuMI3cXizv /tmp/tmp.ERowy1QiyM + return 0 namespace "pitr-705" deleted + exit_status=0 + set -e + '[' 0 '!=' 0 -a -n 1 ']' + break + cat /tmp/tmp.KC2J3BaJCN namespace "psmdb-operator" deleted + cat /tmp/tmp.EXY28HpjFH + rm /tmp/tmp.KC2J3BaJCN /tmp/tmp.EXY28HpjFH + return 0 + kubectl_bin wait --for=delete namespace psmdb-operator ++ mktemp + local LAST_OUT=/tmp/tmp.vHQ04tfj1U ++ mktemp + local LAST_ERR=/tmp/tmp.UmKdeTVOWp + local exit_status=0 + local timeout=4 ++ seq 0 2 + for i in $(seq 0 2) + set +e + kubectl wait --for=delete namespace psmdb-operator + exit_status=0 + set -e + '[' 0 '!=' 0 -a -n 1 ']' + break + cat /tmp/tmp.vHQ04tfj1U + cat /tmp/tmp.UmKdeTVOWp + rm /tmp/tmp.vHQ04tfj1U /tmp/tmp.UmKdeTVOWp + return 0 + desc 'create namespace psmdb-operator' + set +o xtrace ----------------------------------------------------------------------------------- create namespace psmdb-operator ----------------------------------------------------------------------------------- + kubectl_bin create namespace psmdb-operator ++ mktemp + local LAST_OUT=/tmp/tmp.RLWtKPzCaR ++ mktemp + local LAST_ERR=/tmp/tmp.JwPyPudpBF + local exit_status=0 + local timeout=4 ++ seq 0 2 + for i in $(seq 0 2) + set +e + kubectl create namespace psmdb-operator + exit_status=0 + set -e + '[' 0 '!=' 0 -a -n 1 ']' + break + cat /tmp/tmp.RLWtKPzCaR namespace/psmdb-operator created + cat /tmp/tmp.JwPyPudpBF + rm /tmp/tmp.RLWtKPzCaR /tmp/tmp.JwPyPudpBF + return 0 + set_kube_ctx psmdb-operator + local namespace=psmdb-operator ++ kubectl_bin config current-context +++ mktemp ++ local LAST_OUT=/tmp/tmp.zdy4hxg8Mm +++ mktemp ++ local LAST_ERR=/tmp/tmp.fxodAW1LLu ++ local exit_status=0 ++ local timeout=4 +++ seq 0 2 ++ for i in $(seq 0 2) ++ set +e ++ kubectl config current-context ++ exit_status=0 ++ set -e ++ '[' 0 '!=' 0 -a -n 1 ']' ++ break ++ cat /tmp/tmp.zdy4hxg8Mm ++ cat /tmp/tmp.fxodAW1LLu ++ rm /tmp/tmp.zdy4hxg8Mm /tmp/tmp.fxodAW1LLu ++ return 0 + kubectl_bin config set-context gke_cloud-dev-112233_us-central1-a_jen-psmdb-2125-8ebcb80f-3-cluster9 --namespace=psmdb-operator ++ mktemp + local LAST_OUT=/tmp/tmp.nLk5TRLk6e ++ mktemp + local LAST_ERR=/tmp/tmp.dwog2rftlj + local exit_status=0 + local timeout=4 ++ seq 0 2 + for i in $(seq 0 2) + set +e + kubectl config set-context gke_cloud-dev-112233_us-central1-a_jen-psmdb-2125-8ebcb80f-3-cluster9 --namespace=psmdb-operator + exit_status=0 + set -e + '[' 0 '!=' 0 -a -n 1 ']' + break + cat /tmp/tmp.nLk5TRLk6e Context "gke_cloud-dev-112233_us-central1-a_jen-psmdb-2125-8ebcb80f-3-cluster9" modified. + cat /tmp/tmp.dwog2rftlj + rm /tmp/tmp.nLk5TRLk6e /tmp/tmp.dwog2rftlj + return 0 + deploy_operator + desc 'start PSMDB operator: perconalab/percona-server-mongodb-operator:PR-2125-8ebcb80f' + set +o xtrace ----------------------------------------------------------------------------------- start PSMDB operator: perconalab/percona-server-mongodb-operator:PR-2125-8ebcb80f ----------------------------------------------------------------------------------- + local cr_file + '[' -f /mnt/jenkins/workspace/cloud-psmdb-operator_PR-2125/e2e-tests/pitr/conf/crd.yaml ']' + cr_file=/mnt/jenkins/workspace/cloud-psmdb-operator_PR-2125/deploy/crd.yaml + kubectl_bin apply --server-side --force-conflicts -f /mnt/jenkins/workspace/cloud-psmdb-operator_PR-2125/deploy/crd.yaml ++ mktemp + local LAST_OUT=/tmp/tmp.z5JsLFVhaA ++ mktemp + local LAST_ERR=/tmp/tmp.eoTqRj8bo2 + local exit_status=0 + local timeout=4 ++ seq 0 2 + for i in $(seq 0 2) + set +e + kubectl apply --server-side --force-conflicts -f /mnt/jenkins/workspace/cloud-psmdb-operator_PR-2125/deploy/crd.yaml + exit_status=0 + set -e + '[' 0 '!=' 0 -a -n 1 ']' + break + cat /tmp/tmp.z5JsLFVhaA customresourcedefinition.apiextensions.k8s.io/perconaservermongodbbackups.psmdb.percona.com serverside-applied customresourcedefinition.apiextensions.k8s.io/perconaservermongodbrestores.psmdb.percona.com serverside-applied customresourcedefinition.apiextensions.k8s.io/perconaservermongodbs.psmdb.percona.com serverside-applied + cat /tmp/tmp.eoTqRj8bo2 + rm /tmp/tmp.z5JsLFVhaA /tmp/tmp.eoTqRj8bo2 + return 0 + '[' -n psmdb-operator ']' + apply_rbac cw-rbac + local operator_namespace=psmdb-operator + local rbac=cw-rbac + cat /mnt/jenkins/workspace/cloud-psmdb-operator_PR-2125/deploy/cw-rbac.yaml + sed -e 's^namespace: .*^namespace: psmdb-operator^' + kubectl_bin apply -n psmdb-operator -f - ++ mktemp + local LAST_OUT=/tmp/tmp.gxQvBBkW6W ++ mktemp + local LAST_ERR=/tmp/tmp.Di5T4LrbqN + local exit_status=0 + local timeout=4 ++ seq 0 2 + for i in $(seq 0 2) + set +e + kubectl apply -n psmdb-operator -f - + exit_status=0 + set -e + '[' 0 '!=' 0 -a -n 1 ']' + break + cat /tmp/tmp.gxQvBBkW6W clusterrole.rbac.authorization.k8s.io/percona-server-mongodb-operator created serviceaccount/percona-server-mongodb-operator created clusterrolebinding.rbac.authorization.k8s.io/service-account-percona-server-mongodb-operator created + cat /tmp/tmp.Di5T4LrbqN + rm /tmp/tmp.gxQvBBkW6W /tmp/tmp.Di5T4LrbqN + return 0 + yq eval ' (.spec.template.spec.containers[].image = "perconalab/percona-server-mongodb-operator:PR-2125-8ebcb80f") | ((.. | select(.[] == "DISABLE_TELEMETRY")) |= .value="true") | ((.. | select(.[] == "LOG_LEVEL")) |= .value="DEBUG")' /mnt/jenkins/workspace/cloud-psmdb-operator_PR-2125/deploy/cw-operator.yaml + kubectl_bin apply -n psmdb-operator -f - ++ mktemp + local LAST_OUT=/tmp/tmp.XNibAP2DuW ++ mktemp + local LAST_ERR=/tmp/tmp.TSwcUKViH8 + local exit_status=0 + local timeout=4 ++ seq 0 2 + for i in $(seq 0 2) + set +e + kubectl apply -n psmdb-operator -f - + exit_status=0 + set -e + '[' 0 '!=' 0 -a -n 1 ']' + break + cat /tmp/tmp.XNibAP2DuW deployment.apps/percona-server-mongodb-operator created + cat /tmp/tmp.TSwcUKViH8 + rm /tmp/tmp.XNibAP2DuW /tmp/tmp.TSwcUKViH8 + return 0 + sleep 20 ++ get_operator_pod ++ kubectl_bin get pods --selector=name=percona-server-mongodb-operator -o 'jsonpath={.items[].metadata.name}' -n psmdb-operator +++ mktemp ++ local LAST_OUT=/tmp/tmp.m4VcocSxcK +++ mktemp ++ local LAST_ERR=/tmp/tmp.l282RwOcr3 ++ local exit_status=0 ++ local timeout=4 +++ seq 0 2 ++ for i in $(seq 0 2) ++ set +e ++ kubectl get pods --selector=name=percona-server-mongodb-operator -o 'jsonpath={.items[].metadata.name}' -n psmdb-operator ++ exit_status=0 ++ set -e ++ '[' 0 '!=' 0 -a -n 1 ']' ++ break ++ cat /tmp/tmp.m4VcocSxcK ++ cat /tmp/tmp.l282RwOcr3 ++ rm /tmp/tmp.m4VcocSxcK /tmp/tmp.l282RwOcr3 ++ return 0 + wait_operator_pod percona-server-mongodb-operator-84495cbcf7-d78bj + local pod=percona-server-mongodb-operator-84495cbcf7-d78bj + set +o xtrace waiting for pod/percona-server-mongodb-operator-84495cbcf7-d78bj to be ready.OK + echo 'Print operator info from log' Print operator info from log + grep 'Manager starting up' ++ get_operator_pod ++ kubectl_bin get pods --selector=name=percona-server-mongodb-operator -o 'jsonpath={.items[].metadata.name}' -n psmdb-operator +++ mktemp ++ local LAST_OUT=/tmp/tmp.kChagdVm4Z +++ mktemp ++ local LAST_ERR=/tmp/tmp.tqpzYOlN1m ++ local exit_status=0 ++ local timeout=4 +++ seq 0 2 ++ for i in $(seq 0 2) ++ set +e ++ kubectl get pods --selector=name=percona-server-mongodb-operator -o 'jsonpath={.items[].metadata.name}' -n psmdb-operator ++ exit_status=0 ++ set -e ++ '[' 0 '!=' 0 -a -n 1 ']' ++ break ++ cat /tmp/tmp.kChagdVm4Z ++ cat /tmp/tmp.tqpzYOlN1m ++ rm /tmp/tmp.kChagdVm4Z /tmp/tmp.tqpzYOlN1m ++ return 0 + kubectl_bin logs -n psmdb-operator percona-server-mongodb-operator-84495cbcf7-d78bj ++ mktemp + local LAST_OUT=/tmp/tmp.LZrJ2gwwJz ++ mktemp + local LAST_ERR=/tmp/tmp.8QD2mIuf3o + local exit_status=0 + local timeout=4 ++ seq 0 2 + for i in $(seq 0 2) + set +e + kubectl logs -n psmdb-operator percona-server-mongodb-operator-84495cbcf7-d78bj + exit_status=0 + set -e + '[' 0 '!=' 0 -a -n 1 ']' + break + cat /tmp/tmp.LZrJ2gwwJz + cat /tmp/tmp.8QD2mIuf3o + rm /tmp/tmp.LZrJ2gwwJz /tmp/tmp.8QD2mIuf3o + return 0 2025-12-10T13:21:58.253Z INFO setup Manager starting up {"gitCommit": "8ebcb80f1012f36e90d7464b396a5e33442e27be", "gitBranch": "PR-2125-8ebcb80f", "buildTime": "", "goVersion": "go1.25.5", "os": "linux", "arch": "amd64"} + create_namespace pitr-13298 + local namespace=pitr-13298 + local skip_clean_namespace= + [[ 1 == 1 ]] + [[ -z '' ]] + destroy_chaos_mesh ++ helm list --all-namespaces --filter chaos-mesh ++ tail -n1 ++ awk '-F ' '{print $2}' ++ sed s/NAMESPACE// + local chaos_mesh_ns= + desc 'destroy chaos-mesh' + set +o xtrace ----------------------------------------------------------------------------------- destroy chaos-mesh ----------------------------------------------------------------------------------- + '[' -n '' ']' ++ kubectl get MutatingWebhookConfiguration ++ awk '{print $1}' ++ grep chaos-mesh + timeout 30 kubectl delete MutatingWebhookConfiguration error: resource(s) were provided, but no name was specified + : ++ kubectl get ValidatingWebhookConfiguration ++ grep chaos-mesh ++ awk '{print $1}' + timeout 30 kubectl delete ValidatingWebhookConfiguration error: resource(s) were provided, but no name was specified + : ++ grep validate-auth ++ kubectl get ValidatingWebhookConfiguration ++ awk '{print $1}' + timeout 30 kubectl delete ValidatingWebhookConfiguration error: resource(s) were provided, but no name was specified + : ++ kubectl api-resources ++ grep chaos-mesh ++ awk '{print $1}' ++ kubectl get crd ++ grep chaos-mesh.org ++ awk '{print $1}' + timeout 30 kubectl delete crd error: resource(s) were provided, but no name was specified + : ++ kubectl get clusterrolebinding ++ grep chaos-mesh ++ awk '{print $1}' + timeout 30 kubectl delete clusterrolebinding error: resource(s) were provided, but no name was specified + : ++ kubectl get clusterrole ++ grep chaos-mesh ++ awk '{print $1}' + timeout 30 kubectl delete clusterrole error: resource(s) were provided, but no name was specified + : + desc 'cleaned up all old namespaces' + set +o xtrace ----------------------------------------------------------------------------------- cleaned up all old namespaces ----------------------------------------------------------------------------------- + kubectl_bin get ns + egrep -v '^kube-|^default|Terminating|psmdb-operator|openshift|^gke-|^gmp-|^NAME' + awk '{print$1}' + '[' -n '' ']' + desc 'cleaned up old namespaces pitr-13298' + set +o xtrace + xargs kubectl delete ns ----------------------------------------------------------------------------------- cleaned up old namespaces pitr-13298 ----------------------------------------------------------------------------------- + kubectl_bin delete namespace pitr-13298 --ignore-not-found ++ mktemp ++ mktemp egrep: warning: egrep is obsolescent; using grep -E + local LAST_OUT=/tmp/tmp.IEjtnDksVa + local LAST_OUT=/tmp/tmp.7fLhzGjIfT ++ mktemp ++ mktemp + local LAST_ERR=/tmp/tmp.zuUCWosOOH + local exit_status=0 + local timeout=4 ++ seq 0 2 + local LAST_ERR=/tmp/tmp.QiCA5AHWhw + local exit_status=0 + local timeout=4 ++ seq 0 2 + for i in $(seq 0 2) + set +e + kubectl get ns + for i in $(seq 0 2) + set +e + kubectl delete namespace pitr-13298 --ignore-not-found + exit_status=0 + set -e + '[' 0 '!=' 0 -a -n 1 ']' + break + cat /tmp/tmp.7fLhzGjIfT + cat /tmp/tmp.zuUCWosOOH + rm /tmp/tmp.7fLhzGjIfT /tmp/tmp.zuUCWosOOH + return 0 + exit_status=0 + set -e + '[' 0 '!=' 0 -a -n 1 ']' + break + cat /tmp/tmp.IEjtnDksVa + cat /tmp/tmp.QiCA5AHWhw + rm /tmp/tmp.IEjtnDksVa /tmp/tmp.QiCA5AHWhw + return 0 + kubectl_bin wait --for=delete namespace pitr-13298 ++ mktemp + local LAST_OUT=/tmp/tmp.xOtFwBZoXQ ++ mktemp + local LAST_ERR=/tmp/tmp.bbagM8HVQ3 + local exit_status=0 + local timeout=4 ++ seq 0 2 + for i in $(seq 0 2) + set +e + kubectl wait --for=delete namespace pitr-13298 error: resource(s) were provided, but no name was specified + exit_status=0 + set -e + '[' 0 '!=' 0 -a -n 1 ']' + break + cat /tmp/tmp.xOtFwBZoXQ + cat /tmp/tmp.bbagM8HVQ3 + rm /tmp/tmp.xOtFwBZoXQ /tmp/tmp.bbagM8HVQ3 + return 0 + desc 'create namespace pitr-13298' + set +o xtrace ----------------------------------------------------------------------------------- create namespace pitr-13298 ----------------------------------------------------------------------------------- + kubectl_bin create namespace pitr-13298 ++ mktemp + local LAST_OUT=/tmp/tmp.Qt3xClIM3I ++ mktemp + local LAST_ERR=/tmp/tmp.rC453li1vj + local exit_status=0 + local timeout=4 ++ seq 0 2 + for i in $(seq 0 2) + set +e + kubectl create namespace pitr-13298 + exit_status=0 + set -e + '[' 0 '!=' 0 -a -n 1 ']' + break + cat /tmp/tmp.Qt3xClIM3I namespace/pitr-13298 created + cat /tmp/tmp.rC453li1vj + rm /tmp/tmp.Qt3xClIM3I /tmp/tmp.rC453li1vj + return 0 + set_kube_ctx pitr-13298 + local namespace=pitr-13298 ++ kubectl_bin config current-context +++ mktemp ++ local LAST_OUT=/tmp/tmp.ta4KZqfXM2 +++ mktemp ++ local LAST_ERR=/tmp/tmp.lrR71F06xH ++ local exit_status=0 ++ local timeout=4 +++ seq 0 2 ++ for i in $(seq 0 2) ++ set +e ++ kubectl config current-context ++ exit_status=0 ++ set -e ++ '[' 0 '!=' 0 -a -n 1 ']' ++ break ++ cat /tmp/tmp.ta4KZqfXM2 ++ cat /tmp/tmp.lrR71F06xH ++ rm /tmp/tmp.ta4KZqfXM2 /tmp/tmp.lrR71F06xH ++ return 0 + kubectl_bin config set-context gke_cloud-dev-112233_us-central1-a_jen-psmdb-2125-8ebcb80f-3-cluster9 --namespace=pitr-13298 ++ mktemp + local LAST_OUT=/tmp/tmp.RVDlQxFQwd ++ mktemp + local LAST_ERR=/tmp/tmp.oFO1qhH1YF + local exit_status=0 + local timeout=4 ++ seq 0 2 + for i in $(seq 0 2) + set +e + kubectl config set-context gke_cloud-dev-112233_us-central1-a_jen-psmdb-2125-8ebcb80f-3-cluster9 --namespace=pitr-13298 + exit_status=0 + set -e + '[' 0 '!=' 0 -a -n 1 ']' + break + cat /tmp/tmp.RVDlQxFQwd Context "gke_cloud-dev-112233_us-central1-a_jen-psmdb-2125-8ebcb80f-3-cluster9" modified. + cat /tmp/tmp.oFO1qhH1YF + rm /tmp/tmp.RVDlQxFQwd /tmp/tmp.oFO1qhH1YF + return 0 + deploy_minio + desc 'install Minio' + set +o xtrace ----------------------------------------------------------------------------------- install Minio ----------------------------------------------------------------------------------- + helm uninstall minio-service Error: uninstall: Release not loaded: minio-service: release: not found + : + helm repo remove minio "minio" has been removed from your repositories + helm repo add minio https://charts.min.io/ "minio" has been added to your repositories + retry 10 60 helm install minio-service --version 5.4.0 --set replicas=1 --set mode=standalone --set resources.requests.memory=256Mi --set rootUser=rootuser --set rootPassword=rootpass123 --set 'users[0].accessKey=some-access-key' --set 'users[0].secretKey=some-secret-key' --set 'users[0].policy=consoleAdmin' --set service.type=ClusterIP --set configPathmc=/tmp/.minio/ --set persistence.size=2G --set securityContext.enabled=false minio/minio + local max=10 + local delay=60 + shift 2 + local n=1 + helm install minio-service --version 5.4.0 --set replicas=1 --set mode=standalone --set resources.requests.memory=256Mi --set rootUser=rootuser --set rootPassword=rootpass123 --set 'users[0].accessKey=some-access-key' --set 'users[0].secretKey=some-secret-key' --set 'users[0].policy=consoleAdmin' --set service.type=ClusterIP --set configPathmc=/tmp/.minio/ --set persistence.size=2G --set securityContext.enabled=false minio/minio NAME: minio-service LAST DEPLOYED: Wed Dec 10 13:22:34 2025 NAMESPACE: pitr-13298 STATUS: deployed REVISION: 1 TEST SUITE: None NOTES: MinIO can be accessed via port 9000 on the following DNS name from within your cluster: minio-service.pitr-13298.cluster.local To access MinIO from localhost, run the below commands: 1. export POD_NAME=$(kubectl get pods --namespace pitr-13298 -l "release=minio-service" -o jsonpath="{.items[0].metadata.name}") 2. kubectl port-forward $POD_NAME 9000 --namespace pitr-13298 Read more about port forwarding here: http://kubernetes.io/docs/user-guide/kubectl/kubectl_port-forward/ You can now access MinIO server on http://localhost:9000. Follow the below steps to connect to MinIO server with mc client: 1. Download the MinIO mc client - https://min.io/docs/minio/linux/reference/minio-mc.html#quickstart 2. export MC_HOST_minio-service-local=http://$(kubectl get secret --namespace pitr-13298 minio-service -o jsonpath="{.data.rootUser}" | base64 --decode):$(kubectl get secret --namespace pitr-13298 minio-service -o jsonpath="{.data.rootPassword}" | base64 --decode)@localhost:9000 3. mc ls minio-service-local ++ kubectl_bin get pods --selector=release=minio-service -o 'jsonpath={.items[].metadata.name}' +++ mktemp ++ local LAST_OUT=/tmp/tmp.0lB8cTil60 +++ mktemp ++ local LAST_ERR=/tmp/tmp.eNluySB4cF ++ local exit_status=0 ++ local timeout=4 +++ seq 0 2 ++ for i in $(seq 0 2) ++ set +e ++ kubectl get pods --selector=release=minio-service -o 'jsonpath={.items[].metadata.name}' ++ exit_status=0 ++ set -e ++ '[' 0 '!=' 0 -a -n 1 ']' ++ break ++ cat /tmp/tmp.0lB8cTil60 ++ cat /tmp/tmp.eNluySB4cF ++ rm /tmp/tmp.0lB8cTil60 /tmp/tmp.eNluySB4cF ++ return 0 + MINIO_POD=minio-service-d9589b474-8g8q5 + wait_pod minio-service-d9589b474-8g8q5 + local pod=minio-service-d9589b474-8g8q5 + set +o xtrace waiting for pod/minio-service-d9589b474-8g8q5 to be ready.OK + '[' -n psmdb-operator ']' + kubectl_bin create svc -n psmdb-operator externalname minio-service --external-name=minio-service.pitr-13298.svc.cluster.local --tcp=9000 ++ mktemp + local LAST_OUT=/tmp/tmp.R6wiaz8S4x ++ mktemp + local LAST_ERR=/tmp/tmp.pIrB7I9PZI + local exit_status=0 + local timeout=4 ++ seq 0 2 + for i in $(seq 0 2) + set +e + kubectl create svc -n psmdb-operator externalname minio-service --external-name=minio-service.pitr-13298.svc.cluster.local --tcp=9000 + exit_status=0 + set -e + '[' 0 '!=' 0 -a -n 1 ']' + break + cat /tmp/tmp.R6wiaz8S4x service/minio-service created + cat /tmp/tmp.pIrB7I9PZI + rm /tmp/tmp.R6wiaz8S4x /tmp/tmp.pIrB7I9PZI + return 0 + create_minio_bucket operator-testing + local bucket=operator-testing + kubectl_bin run -i --rm aws-cli --image=perconalab/awscli --restart=Never -- bash -c 'AWS_ACCESS_KEY_ID=some-access-key AWS_SECRET_ACCESS_KEY=some-secret-key AWS_DEFAULT_REGION=us-east-1 /usr/bin/aws --endpoint-url http://minio-service:9000 s3 mb s3://operator-testing' ++ mktemp + local LAST_OUT=/tmp/tmp.jdbFTz3n2n ++ mktemp + local LAST_ERR=/tmp/tmp.5iZzvQTnDz + local exit_status=0 + local timeout=4 ++ seq 0 2 + for i in $(seq 0 2) + set +e + kubectl run -i --rm aws-cli --image=perconalab/awscli --restart=Never -- bash -c 'AWS_ACCESS_KEY_ID=some-access-key AWS_SECRET_ACCESS_KEY=some-secret-key AWS_DEFAULT_REGION=us-east-1 /usr/bin/aws --endpoint-url http://minio-service:9000 s3 mb s3://operator-testing' + exit_status=0 + set -e + '[' 0 '!=' 0 -a -n 1 ']' + break + cat /tmp/tmp.jdbFTz3n2n make_bucket: operator-testing pod "aws-cli" deleted from pitr-13298 namespace + cat /tmp/tmp.5iZzvQTnDz + rm /tmp/tmp.jdbFTz3n2n /tmp/tmp.5iZzvQTnDz + return 0 + desc 'create secrets and start client' + set +o xtrace ----------------------------------------------------------------------------------- create secrets and start client ----------------------------------------------------------------------------------- + kubectl_bin apply -f /mnt/jenkins/workspace/cloud-psmdb-operator_PR-2125/e2e-tests/conf/secrets.yml -f /mnt/jenkins/workspace/cloud-psmdb-operator_PR-2125/e2e-tests/conf/client.yml -f /mnt/jenkins/workspace/cloud-psmdb-operator_PR-2125/e2e-tests/conf/minio-secret.yml ++ mktemp + local LAST_OUT=/tmp/tmp.Sq4HgOTtt1 ++ mktemp + local LAST_ERR=/tmp/tmp.596E7WhDRI + local exit_status=0 + local timeout=4 ++ seq 0 2 + for i in $(seq 0 2) + set +e + kubectl apply -f /mnt/jenkins/workspace/cloud-psmdb-operator_PR-2125/e2e-tests/conf/secrets.yml -f /mnt/jenkins/workspace/cloud-psmdb-operator_PR-2125/e2e-tests/conf/client.yml -f /mnt/jenkins/workspace/cloud-psmdb-operator_PR-2125/e2e-tests/conf/minio-secret.yml + exit_status=0 + set -e + '[' 0 '!=' 0 -a -n 1 ']' + break + cat /tmp/tmp.Sq4HgOTtt1 secret/some-users created deployment.apps/psmdb-client created secret/minio-secret created + cat /tmp/tmp.596E7WhDRI + rm /tmp/tmp.Sq4HgOTtt1 /tmp/tmp.596E7WhDRI + return 0 + cluster=some-name-rs0 + desc 'create first PSMDB cluster some-name-rs0' + set +o xtrace ----------------------------------------------------------------------------------- create first PSMDB cluster some-name-rs0 ----------------------------------------------------------------------------------- + apply_cluster /mnt/jenkins/workspace/cloud-psmdb-operator_PR-2125/e2e-tests/pitr/conf/some-name-rs0.yml + '[' -z '' ']' + cat_config /mnt/jenkins/workspace/cloud-psmdb-operator_PR-2125/e2e-tests/pitr/conf/some-name-rs0.yml + kubectl_bin apply -f - + cat /mnt/jenkins/workspace/cloud-psmdb-operator_PR-2125/e2e-tests/pitr/conf/some-name-rs0.yml ++ mktemp + yq eval '(.spec | select(.image == null)).image = "perconalab/percona-server-mongodb-operator:main-mongod8.0"' + yq eval '(.spec | select(has("pmm"))).pmm.image = "percona/pmm-client:2.44.1-1"' + yq eval '(.spec | select(has("initImage"))).initImage = "perconalab/percona-server-mongodb-operator:PR-2125-8ebcb80f"' + yq eval '(.spec | select(has("backup"))).backup.image = "perconalab/percona-server-mongodb-operator:main-backup"' + yq eval '.spec.upgradeOptions.apply="Never"' + local LAST_OUT=/tmp/tmp.DOvhRulBZj ++ mktemp + local LAST_ERR=/tmp/tmp.J7Hibq0EwN + local exit_status=0 + local timeout=4 ++ seq 0 2 + for i in $(seq 0 2) + set +e + kubectl apply -f - + exit_status=0 + set -e + '[' 0 '!=' 0 -a -n 1 ']' + break + cat /tmp/tmp.DOvhRulBZj perconaservermongodb.psmdb.percona.com/some-name created + cat /tmp/tmp.J7Hibq0EwN + rm /tmp/tmp.DOvhRulBZj /tmp/tmp.J7Hibq0EwN + return 0 + desc 'check if all 3 Pods started' + set +o xtrace ----------------------------------------------------------------------------------- check if all 3 Pods started ----------------------------------------------------------------------------------- + wait_for_running some-name-rs0 3 + local name=some-name-rs0 + let last_pod=2 + local check_cluster_readyness=true + set_debug + [[ 1 == 1 ]] + set -o xtrace + local rs_name=rs0 + local cluster_name=some-name ++ seq 0 2 + for i in $(seq 0 $last_pod) + [[ 0 -eq 2 ]] + wait_pod some-name-rs0-0 + local pod=some-name-rs0-0 + set +o xtrace waiting for pod/some-name-rs0-0 to be ready.........OK + for i in $(seq 0 $last_pod) + [[ 1 -eq 2 ]] + wait_pod some-name-rs0-1 + local pod=some-name-rs0-1 + set +o xtrace waiting for pod/some-name-rs0-1 to be ready......OK + for i in $(seq 0 $last_pod) + [[ 2 -eq 2 ]] ++ kubectl_bin get psmdb some-name -o 'jsonpath={.spec.replsets[?(@.name=="rs0")].arbiter.enabled}' +++ mktemp ++ local LAST_OUT=/tmp/tmp.64SUivJFbm +++ mktemp ++ local LAST_ERR=/tmp/tmp.hsImApgiFD ++ local exit_status=0 ++ local timeout=4 +++ seq 0 2 ++ for i in $(seq 0 2) ++ set +e ++ kubectl get psmdb some-name -o 'jsonpath={.spec.replsets[?(@.name=="rs0")].arbiter.enabled}' ++ exit_status=0 ++ set -e ++ '[' 0 '!=' 0 -a -n 1 ']' ++ break ++ cat /tmp/tmp.64SUivJFbm ++ cat /tmp/tmp.hsImApgiFD ++ rm /tmp/tmp.64SUivJFbm /tmp/tmp.hsImApgiFD ++ return 0 + [[ '' == \t\r\u\e ]] + wait_pod some-name-rs0-2 + local pod=some-name-rs0-2 + set +o xtrace waiting for pod/some-name-rs0-2 to be ready.....OK ++ kubectl_bin get psmdb some-name -o 'jsonpath={.spec.replsets[?(@.name=="rs0")].nonvoting.enabled}' +++ mktemp ++ local LAST_OUT=/tmp/tmp.H6GJCMsAbZ +++ mktemp ++ local LAST_ERR=/tmp/tmp.s3k7VxNcQI ++ local exit_status=0 ++ local timeout=4 +++ seq 0 2 ++ for i in $(seq 0 2) ++ set +e ++ kubectl get psmdb some-name -o 'jsonpath={.spec.replsets[?(@.name=="rs0")].nonvoting.enabled}' ++ exit_status=0 ++ set -e ++ '[' 0 '!=' 0 -a -n 1 ']' ++ break ++ cat /tmp/tmp.H6GJCMsAbZ ++ cat /tmp/tmp.s3k7VxNcQI ++ rm /tmp/tmp.H6GJCMsAbZ /tmp/tmp.s3k7VxNcQI ++ return 0 + [[ '' == \t\r\u\e ]] ++ kubectl_bin get psmdb some-name -o 'jsonpath={.spec.replsets[?(@.name=="rs0")].hidden.enabled}' +++ mktemp ++ local LAST_OUT=/tmp/tmp.ybP3zIxOlT +++ mktemp ++ local LAST_ERR=/tmp/tmp.lYZFEXU8ze ++ local exit_status=0 ++ local timeout=4 +++ seq 0 2 ++ for i in $(seq 0 2) ++ set +e ++ kubectl get psmdb some-name -o 'jsonpath={.spec.replsets[?(@.name=="rs0")].hidden.enabled}' ++ exit_status=0 ++ set -e ++ '[' 0 '!=' 0 -a -n 1 ']' ++ break ++ cat /tmp/tmp.ybP3zIxOlT ++ cat /tmp/tmp.lYZFEXU8ze ++ rm /tmp/tmp.ybP3zIxOlT /tmp/tmp.lYZFEXU8ze ++ return 0 + [[ '' == \t\r\u\e ]] + sleep 10 + [[ true == \t\r\u\e ]] + set +x Waiting for cluster readyness.. + desc 'check if statefulset created with expected config' + set +o xtrace ----------------------------------------------------------------------------------- check if statefulset created with expected config ----------------------------------------------------------------------------------- + compare_kubectl statefulset/some-name-rs0 + local resource=statefulset/some-name-rs0 + local postfix= + local skip_generation_check= + local expected_result=/mnt/jenkins/workspace/cloud-psmdb-operator_PR-2125/e2e-tests/pitr/compare/statefulset_some-name-rs0.yml + local new_result=/tmp/tmp.uUgrEfFz0e/statefulset_some-name-rs0.yml + '[' -n '' -a -f /mnt/jenkins/workspace/cloud-psmdb-operator_PR-2125/e2e-tests/pitr/compare/statefulset_some-name-rs0-oc.yml ']' + kubectl_bin get -o yaml statefulset/some-name-rs0 ++ mktemp + yq eval ' del(.metadata.ownerReferences[].apiVersion) | del(.metadata.managedFields) | del(.. | select(has("creationTimestamp")).creationTimestamp) | del(.. | select(has("namespace")).namespace) | del(.. | select(has("uid")).uid) | del(.metadata.resourceVersion) | del(.spec.template.spec.containers[].env[] | select(.name == "NAMESPACE")) | del(.metadata.selfLink) | del(.metadata.annotations."cloud.google.com/neg") | del(.metadata.annotations."kubectl.kubernetes.io/last-applied-configuration") | del(.. | select(has("image")).image) | del(.. | select(has("clusterIP")).clusterIP) | del(.. | select(has("clusterIPs")).clusterIPs) | del(.. | select(has("dataSource")).dataSource) | del(.. | select(has("procMount")).procMount) | del(.. | select(has("storageClassName")).storageClassName) | del(.. | select(has("finalizers")).finalizers) | del(.. | select(has("kubernetes.io/pvc-protection"))."kubernetes.io/pvc-protection") | del(.. | select(has("volumeName")).volumeName) | del(.. | select(has("volume.beta.kubernetes.io/storage-provisioner"))."volume.beta.kubernetes.io/storage-provisioner") | del(.. | select(has("volume.kubernetes.io/storage-provisioner"))."volume.kubernetes.io/storage-provisioner") | del(.spec.volumeMode) | del(.. | select(has("volume.kubernetes.io/selected-node"))."volume.kubernetes.io/selected-node") | del(.. | select(has("percona.com/last-config-hash"))."percona.com/last-config-hash") | del(.. | select(has("percona.com/configuration-hash"))."percona.com/configuration-hash") | del(.. | select(has("percona.com/ssl-hash"))."percona.com/ssl-hash") | del(.. | select(has("percona.com/ssl-internal-hash"))."percona.com/ssl-internal-hash") | del(.spec.volumeClaimTemplates[].spec.volumeMode | select(. == "Filesystem")) | del(.. | select(has("healthCheckNodePort")).healthCheckNodePort) | del(.. | select(has("nodePort")).nodePort) | del(.status) | (.. | select(tag == "!!str")) |= sub("pitr-13298", "NAME_SPACE") | del(.spec.volumeClaimTemplates[].apiVersion) | del(.spec.volumeClaimTemplates[].kind) | del(.spec.ipFamilies) | del(.spec.ipFamilyPolicy) | (.. | select(. == "extensions/v1beta1")) = "apps/v1" | (.. | select(. == "batch/v1beta1")) = "batch/v1" ' - + local LAST_OUT=/tmp/tmp.H4NzRU95cx ++ mktemp + local LAST_ERR=/tmp/tmp.ya1XGKLNQr + local exit_status=0 + local timeout=4 ++ seq 0 2 + for i in $(seq 0 2) + set +e + kubectl get -o yaml statefulset/some-name-rs0 + exit_status=0 + set -e + '[' 0 '!=' 0 -a -n 1 ']' + break + cat /tmp/tmp.H4NzRU95cx + cat /tmp/tmp.ya1XGKLNQr + rm /tmp/tmp.H4NzRU95cx /tmp/tmp.ya1XGKLNQr + return 0 + yq -i eval 'del(.spec.persistentVolumeClaimRetentionPolicy)' /tmp/tmp.uUgrEfFz0e/statefulset_some-name-rs0.yml + version_gt 1.22 ++ echo '1.31 >= 1.22' ++ bc -l + '[' 1 -eq 1 ']' + return 0 + yq -i eval 'del(.spec.internalTrafficPolicy)' /tmp/tmp.uUgrEfFz0e/statefulset_some-name-rs0.yml + yq -i eval 'del(.spec.allocateLoadBalancerNodePorts)' /tmp/tmp.uUgrEfFz0e/statefulset_some-name-rs0.yml + [[ /mnt/jenkins/workspace/cloud-psmdb-operator_PR-2125/e2e-tests/pitr/compare/statefulset_some-name-rs0.yml == */cronjob* ]] + '[' -n '' ']' + [[ 0 -eq 0 ]] + diff -u /mnt/jenkins/workspace/cloud-psmdb-operator_PR-2125/e2e-tests/pitr/compare/statefulset_some-name-rs0.yml /tmp/tmp.uUgrEfFz0e/statefulset_some-name-rs0.yml + log 'compare_kubectl: statefulset/some-name-rs0 OK' + set +o xtrace [2025-12-10T13:24:37+0000] compare_kubectl: statefulset/some-name-rs0 OK + write_initial_data + desc 'create user myApp' + set +o xtrace ----------------------------------------------------------------------------------- create user myApp ----------------------------------------------------------------------------------- + run_mongo 'db.createUser({user:"myApp",pwd:"myPass",roles:[{db:"myApp",role:"readWrite"}]})' userAdmin:userAdmin123456@some-name-rs0.pitr-13298 + local 'command=db.createUser({user:"myApp",pwd:"myPass",roles:[{db:"myApp",role:"readWrite"}]})' + local uri=userAdmin:userAdmin123456@some-name-rs0.pitr-13298 + local driver=mongodb+srv + local suffix=.svc.cluster.local ++ kubectl_bin get pods --selector=name=psmdb-client -o 'jsonpath={.items[].metadata.name}' +++ mktemp ++ local LAST_OUT=/tmp/tmp.YAF5rhKljD +++ mktemp ++ local LAST_ERR=/tmp/tmp.xNjvUu6YJd ++ local exit_status=0 ++ local timeout=4 +++ seq 0 2 ++ for i in $(seq 0 2) ++ set +e ++ kubectl get pods --selector=name=psmdb-client -o 'jsonpath={.items[].metadata.name}' ++ exit_status=0 ++ set -e ++ '[' 0 '!=' 0 -a -n 1 ']' ++ break ++ cat /tmp/tmp.YAF5rhKljD ++ cat /tmp/tmp.xNjvUu6YJd ++ rm /tmp/tmp.YAF5rhKljD /tmp/tmp.xNjvUu6YJd ++ return 0 + local client_container=psmdb-client-696897d69b-nrc64 + local mongo_flag= + [[ userAdmin:userAdmin123456@some-name-rs0.pitr-13298 == *cfg* ]] + replica_set=rs0 + kubectl_bin exec psmdb-client-696897d69b-nrc64 -- bash -c 'printf '\''db.createUser({user:"myApp",pwd:"myPass",roles:[{db:"myApp",role:"readWrite"}]})\n'\'' | mongo mongodb+srv://userAdmin:userAdmin123456@some-name-rs0.pitr-13298.svc.cluster.local/admin?ssl=false\&replicaSet=rs0 ' ++ mktemp + local LAST_OUT=/tmp/tmp.O4AEAwHiwN ++ mktemp + local LAST_ERR=/tmp/tmp.kE1ko2jQgH + local exit_status=0 + local timeout=4 ++ seq 0 2 + for i in $(seq 0 2) + set +e + kubectl exec psmdb-client-696897d69b-nrc64 -- bash -c 'printf '\''db.createUser({user:"myApp",pwd:"myPass",roles:[{db:"myApp",role:"readWrite"}]})\n'\'' | mongo mongodb+srv://userAdmin:userAdmin123456@some-name-rs0.pitr-13298.svc.cluster.local/admin?ssl=false\&replicaSet=rs0 ' + exit_status=0 + set -e + '[' 0 '!=' 0 -a -n 1 ']' + break + cat /tmp/tmp.O4AEAwHiwN Percona Server for MongoDB shell version v4.4.29-28 connecting to: mongodb://some-name-rs0-0.some-name-rs0.pitr-13298.svc.cluster.local:27017,some-name-rs0-1.some-name-rs0.pitr-13298.svc.cluster.local:27017,some-name-rs0-2.some-name-rs0.pitr-13298.svc.cluster.local:27017/admin?compressors=disabled&gssapiServiceName=mongodb&replicaSet=rs0&ssl=false Implicit session: session { "id" : UUID("43859c4f-076c-4734-b318-cc31bd82cad3") } Percona Server for MongoDB server version: v8.0.16-5 WARNING: shell and server versions do not match Successfully added user: { "user" : "myApp", "roles" : [ { "db" : "myApp", "role" : "readWrite" } ] } bye + cat /tmp/tmp.kE1ko2jQgH + rm /tmp/tmp.O4AEAwHiwN /tmp/tmp.kE1ko2jQgH + return 0 + sleep 2 + write_document + local cmp_postfix= + local cluster_name=some-name-rs0 + desc 'write initial data, read from all' + set +o xtrace ----------------------------------------------------------------------------------- write initial data, read from all ----------------------------------------------------------------------------------- + run_mongo 'use myApp\n db.test.insert({ x: 100500 })' myApp:myPass@some-name-rs0.pitr-13298 + local 'command=use myApp\n db.test.insert({ x: 100500 })' + local uri=myApp:myPass@some-name-rs0.pitr-13298 + local driver=mongodb+srv + local suffix=.svc.cluster.local ++ kubectl_bin get pods --selector=name=psmdb-client -o 'jsonpath={.items[].metadata.name}' +++ mktemp ++ local LAST_OUT=/tmp/tmp.CZiexjsBGx +++ mktemp ++ local LAST_ERR=/tmp/tmp.8UiMd06ryv ++ local exit_status=0 ++ local timeout=4 +++ seq 0 2 ++ for i in $(seq 0 2) ++ set +e ++ kubectl get pods --selector=name=psmdb-client -o 'jsonpath={.items[].metadata.name}' ++ exit_status=0 ++ set -e ++ '[' 0 '!=' 0 -a -n 1 ']' ++ break ++ cat /tmp/tmp.CZiexjsBGx ++ cat /tmp/tmp.8UiMd06ryv ++ rm /tmp/tmp.CZiexjsBGx /tmp/tmp.8UiMd06ryv ++ return 0 + local client_container=psmdb-client-696897d69b-nrc64 + local mongo_flag= + [[ myApp:myPass@some-name-rs0.pitr-13298 == *cfg* ]] + replica_set=rs0 + kubectl_bin exec psmdb-client-696897d69b-nrc64 -- bash -c 'printf '\''use myApp\n db.test.insert({ x: 100500 })\n'\'' | mongo mongodb+srv://myApp:myPass@some-name-rs0.pitr-13298.svc.cluster.local/admin?ssl=false\&replicaSet=rs0 ' ++ mktemp + local LAST_OUT=/tmp/tmp.JtaVkgDoGu ++ mktemp + local LAST_ERR=/tmp/tmp.8h8nl849YL + local exit_status=0 + local timeout=4 ++ seq 0 2 + for i in $(seq 0 2) + set +e + kubectl exec psmdb-client-696897d69b-nrc64 -- bash -c 'printf '\''use myApp\n db.test.insert({ x: 100500 })\n'\'' | mongo mongodb+srv://myApp:myPass@some-name-rs0.pitr-13298.svc.cluster.local/admin?ssl=false\&replicaSet=rs0 ' + exit_status=0 + set -e + '[' 0 '!=' 0 -a -n 1 ']' + break + cat /tmp/tmp.JtaVkgDoGu Percona Server for MongoDB shell version v4.4.29-28 connecting to: mongodb://some-name-rs0-2.some-name-rs0.pitr-13298.svc.cluster.local:27017,some-name-rs0-0.some-name-rs0.pitr-13298.svc.cluster.local:27017,some-name-rs0-1.some-name-rs0.pitr-13298.svc.cluster.local:27017/admin?compressors=disabled&gssapiServiceName=mongodb&replicaSet=rs0&ssl=false Implicit session: session { "id" : UUID("5dd54c17-5ad2-47fe-935a-a214f00c4bda") } Percona Server for MongoDB server version: v8.0.16-5 WARNING: shell and server versions do not match switched to db myApp WriteResult({ "nInserted" : 1 }) bye + cat /tmp/tmp.8h8nl849YL + rm /tmp/tmp.JtaVkgDoGu /tmp/tmp.8h8nl849YL + return 0 + minikube_sleep + sleep_time=10 + [[ '' == 1 ]] + compare_mongo_cmd find myApp:myPass@some-name-rs0-0.some-name-rs0.pitr-13298 '' + local command=find + local uri=myApp:myPass@some-name-rs0-0.some-name-rs0.pitr-13298 + local postfix= + local suffix= + local database=myApp + local collection=test + local sort= + local tls=false + local 'full_command=db.test.find()' + [[ -n '' ]] + log 'running db.test.find() in myApp' + set +o xtrace [2025-12-10T13:24:43+0000] running db.test.find() in myApp + [[ false == \t\r\u\e ]] + mongo_command=run_mongo + run_mongo 'use myApp\n db.test.find()' myApp:myPass@some-name-rs0-0.some-name-rs0.pitr-13298 mongodb '' + local 'command=use myApp\n db.test.find()' + local uri=myApp:myPass@some-name-rs0-0.some-name-rs0.pitr-13298 + local driver=mongodb + local suffix=.svc.cluster.local + egrep -v 'I NETWORK|W NETWORK|F NETWORK|Error saving history file|Percona Server for MongoDB|connecting to:|Unable to reach primary for set|Implicit session:|versions do not match|Error saving history file:|Started a new thread for the timer service' + /usr/sbin/sed -re 's/ObjectId\("[0-9a-f]+"\)//; s/-[0-9]+.svc/-xxx.svc/' ++ kubectl_bin get pods --selector=name=psmdb-client -o 'jsonpath={.items[].metadata.name}' +++ mktemp ++ local LAST_OUT=/tmp/tmp.92584vsG3Q egrep: warning: egrep is obsolescent; using grep -E +++ mktemp ++ local LAST_ERR=/tmp/tmp.XcVANOHabT ++ local exit_status=0 ++ local timeout=4 +++ seq 0 2 ++ for i in $(seq 0 2) ++ set +e ++ kubectl get pods --selector=name=psmdb-client -o 'jsonpath={.items[].metadata.name}' ++ exit_status=0 ++ set -e ++ '[' 0 '!=' 0 -a -n 1 ']' ++ break ++ cat /tmp/tmp.92584vsG3Q ++ cat /tmp/tmp.XcVANOHabT ++ rm /tmp/tmp.92584vsG3Q /tmp/tmp.XcVANOHabT ++ return 0 + local client_container=psmdb-client-696897d69b-nrc64 + local mongo_flag= + [[ myApp:myPass@some-name-rs0-0.some-name-rs0.pitr-13298 == *cfg* ]] + replica_set=rs0 + kubectl_bin exec psmdb-client-696897d69b-nrc64 -- bash -c 'printf '\''use myApp\n db.test.find()\n'\'' | mongo mongodb://myApp:myPass@some-name-rs0-0.some-name-rs0.pitr-13298.svc.cluster.local/admin?ssl=false\&replicaSet=rs0 ' ++ mktemp + local LAST_OUT=/tmp/tmp.vlaiiNT7WE ++ mktemp + local LAST_ERR=/tmp/tmp.6jbACpVmbH + local exit_status=0 + local timeout=4 ++ seq 0 2 + for i in $(seq 0 2) + set +e + kubectl exec psmdb-client-696897d69b-nrc64 -- bash -c 'printf '\''use myApp\n db.test.find()\n'\'' | mongo mongodb://myApp:myPass@some-name-rs0-0.some-name-rs0.pitr-13298.svc.cluster.local/admin?ssl=false\&replicaSet=rs0 ' + exit_status=0 + set -e + '[' 0 '!=' 0 -a -n 1 ']' + break + cat /tmp/tmp.vlaiiNT7WE + cat /tmp/tmp.6jbACpVmbH + rm /tmp/tmp.vlaiiNT7WE /tmp/tmp.6jbACpVmbH + return 0 + [[ 0 -eq 0 ]] + diff -u /mnt/jenkins/workspace/cloud-psmdb-operator_PR-2125/e2e-tests/pitr/compare/find.json /tmp/tmp.uUgrEfFz0e/find + compare_mongo_cmd find myApp:myPass@some-name-rs0-1.some-name-rs0.pitr-13298 '' + local command=find + local uri=myApp:myPass@some-name-rs0-1.some-name-rs0.pitr-13298 + local postfix= + local suffix= + local database=myApp + local collection=test + local sort= + local tls=false + local 'full_command=db.test.find()' + [[ -n '' ]] + log 'running db.test.find() in myApp' + set +o xtrace [2025-12-10T13:24:45+0000] running db.test.find() in myApp + [[ false == \t\r\u\e ]] + mongo_command=run_mongo + run_mongo 'use myApp\n db.test.find()' myApp:myPass@some-name-rs0-1.some-name-rs0.pitr-13298 mongodb '' + local 'command=use myApp\n db.test.find()' + egrep -v 'I NETWORK|W NETWORK|F NETWORK|Error saving history file|Percona Server for MongoDB|connecting to:|Unable to reach primary for set|Implicit session:|versions do not match|Error saving history file:|Started a new thread for the timer service' + local uri=myApp:myPass@some-name-rs0-1.some-name-rs0.pitr-13298 + local driver=mongodb + local suffix=.svc.cluster.local + /usr/sbin/sed -re 's/ObjectId\("[0-9a-f]+"\)//; s/-[0-9]+.svc/-xxx.svc/' ++ kubectl_bin get pods --selector=name=psmdb-client -o 'jsonpath={.items[].metadata.name}' +++ mktemp egrep: warning: egrep is obsolescent; using grep -E ++ local LAST_OUT=/tmp/tmp.f1zRn0xQza +++ mktemp ++ local LAST_ERR=/tmp/tmp.lCWSYEYXPe ++ local exit_status=0 ++ local timeout=4 +++ seq 0 2 ++ for i in $(seq 0 2) ++ set +e ++ kubectl get pods --selector=name=psmdb-client -o 'jsonpath={.items[].metadata.name}' ++ exit_status=0 ++ set -e ++ '[' 0 '!=' 0 -a -n 1 ']' ++ break ++ cat /tmp/tmp.f1zRn0xQza ++ cat /tmp/tmp.lCWSYEYXPe ++ rm /tmp/tmp.f1zRn0xQza /tmp/tmp.lCWSYEYXPe ++ return 0 + local client_container=psmdb-client-696897d69b-nrc64 + local mongo_flag= + [[ myApp:myPass@some-name-rs0-1.some-name-rs0.pitr-13298 == *cfg* ]] + replica_set=rs0 + kubectl_bin exec psmdb-client-696897d69b-nrc64 -- bash -c 'printf '\''use myApp\n db.test.find()\n'\'' | mongo mongodb://myApp:myPass@some-name-rs0-1.some-name-rs0.pitr-13298.svc.cluster.local/admin?ssl=false\&replicaSet=rs0 ' ++ mktemp + local LAST_OUT=/tmp/tmp.A9udo6J7v2 ++ mktemp + local LAST_ERR=/tmp/tmp.nNdal8lRk5 + local exit_status=0 + local timeout=4 ++ seq 0 2 + for i in $(seq 0 2) + set +e + kubectl exec psmdb-client-696897d69b-nrc64 -- bash -c 'printf '\''use myApp\n db.test.find()\n'\'' | mongo mongodb://myApp:myPass@some-name-rs0-1.some-name-rs0.pitr-13298.svc.cluster.local/admin?ssl=false\&replicaSet=rs0 ' + exit_status=0 + set -e + '[' 0 '!=' 0 -a -n 1 ']' + break + cat /tmp/tmp.A9udo6J7v2 + cat /tmp/tmp.nNdal8lRk5 + rm /tmp/tmp.A9udo6J7v2 /tmp/tmp.nNdal8lRk5 + return 0 + [[ 0 -eq 0 ]] + diff -u /mnt/jenkins/workspace/cloud-psmdb-operator_PR-2125/e2e-tests/pitr/compare/find.json /tmp/tmp.uUgrEfFz0e/find + compare_mongo_cmd find myApp:myPass@some-name-rs0-2.some-name-rs0.pitr-13298 '' + local command=find + local uri=myApp:myPass@some-name-rs0-2.some-name-rs0.pitr-13298 + local postfix= + local suffix= + local database=myApp + local collection=test + local sort= + local tls=false + local 'full_command=db.test.find()' + [[ -n '' ]] + log 'running db.test.find() in myApp' + set +o xtrace [2025-12-10T13:24:48+0000] running db.test.find() in myApp + [[ false == \t\r\u\e ]] + mongo_command=run_mongo + run_mongo 'use myApp\n db.test.find()' myApp:myPass@some-name-rs0-2.some-name-rs0.pitr-13298 mongodb '' + local 'command=use myApp\n db.test.find()' + egrep -v 'I NETWORK|W NETWORK|F NETWORK|Error saving history file|Percona Server for MongoDB|connecting to:|Unable to reach primary for set|Implicit session:|versions do not match|Error saving history file:|Started a new thread for the timer service' + local uri=myApp:myPass@some-name-rs0-2.some-name-rs0.pitr-13298 + local driver=mongodb + local suffix=.svc.cluster.local + /usr/sbin/sed -re 's/ObjectId\("[0-9a-f]+"\)//; s/-[0-9]+.svc/-xxx.svc/' ++ kubectl_bin get pods --selector=name=psmdb-client -o 'jsonpath={.items[].metadata.name}' +++ mktemp egrep: warning: egrep is obsolescent; using grep -E ++ local LAST_OUT=/tmp/tmp.FU5nQcnovQ +++ mktemp ++ local LAST_ERR=/tmp/tmp.KDCeHaqHte ++ local exit_status=0 ++ local timeout=4 +++ seq 0 2 ++ for i in $(seq 0 2) ++ set +e ++ kubectl get pods --selector=name=psmdb-client -o 'jsonpath={.items[].metadata.name}' ++ exit_status=0 ++ set -e ++ '[' 0 '!=' 0 -a -n 1 ']' ++ break ++ cat /tmp/tmp.FU5nQcnovQ ++ cat /tmp/tmp.KDCeHaqHte ++ rm /tmp/tmp.FU5nQcnovQ /tmp/tmp.KDCeHaqHte ++ return 0 + local client_container=psmdb-client-696897d69b-nrc64 + local mongo_flag= + [[ myApp:myPass@some-name-rs0-2.some-name-rs0.pitr-13298 == *cfg* ]] + replica_set=rs0 + kubectl_bin exec psmdb-client-696897d69b-nrc64 -- bash -c 'printf '\''use myApp\n db.test.find()\n'\'' | mongo mongodb://myApp:myPass@some-name-rs0-2.some-name-rs0.pitr-13298.svc.cluster.local/admin?ssl=false\&replicaSet=rs0 ' ++ mktemp + local LAST_OUT=/tmp/tmp.4JQsHIrkuh ++ mktemp + local LAST_ERR=/tmp/tmp.I1XRUBU86L + local exit_status=0 + local timeout=4 ++ seq 0 2 + for i in $(seq 0 2) + set +e + kubectl exec psmdb-client-696897d69b-nrc64 -- bash -c 'printf '\''use myApp\n db.test.find()\n'\'' | mongo mongodb://myApp:myPass@some-name-rs0-2.some-name-rs0.pitr-13298.svc.cluster.local/admin?ssl=false\&replicaSet=rs0 ' + exit_status=0 + set -e + '[' 0 '!=' 0 -a -n 1 ']' + break + cat /tmp/tmp.4JQsHIrkuh + cat /tmp/tmp.I1XRUBU86L + rm /tmp/tmp.4JQsHIrkuh /tmp/tmp.I1XRUBU86L + return 0 + [[ 0 -eq 0 ]] + diff -u /mnt/jenkins/workspace/cloud-psmdb-operator_PR-2125/e2e-tests/pitr/compare/find.json /tmp/tmp.uUgrEfFz0e/find + wait_backup_agent some-name-rs0-0 + local agent_pod=some-name-rs0-0 + set +o xtrace waiting for pbm-agent to be ready in some-name-rs0-0...2025-12-10T13:24:30.000+0000 I listening for the commands + wait_backup_agent some-name-rs0-1 + local agent_pod=some-name-rs0-1 + set +o xtrace waiting for pbm-agent to be ready in some-name-rs0-1...2025-12-10T13:24:35.000+0000 I listening for the commands + wait_backup_agent some-name-rs0-2 + local agent_pod=some-name-rs0-2 + set +o xtrace waiting for pbm-agent to be ready in some-name-rs0-2...2025-12-10T13:24:37.000+0000 I listening for the commands + backup_name_minio=backup-minio + run_mongo 'use myApp\n db.test2.insert({ x: 100501 })' myApp:myPass@some-name-rs0.pitr-13298 + local 'command=use myApp\n db.test2.insert({ x: 100501 })' + local uri=myApp:myPass@some-name-rs0.pitr-13298 + local driver=mongodb+srv + local suffix=.svc.cluster.local ++ kubectl_bin get pods --selector=name=psmdb-client -o 'jsonpath={.items[].metadata.name}' +++ mktemp ++ local LAST_OUT=/tmp/tmp.8CGQKFAC3w +++ mktemp ++ local LAST_ERR=/tmp/tmp.529E07VXTv ++ local exit_status=0 ++ local timeout=4 +++ seq 0 2 ++ for i in $(seq 0 2) ++ set +e ++ kubectl get pods --selector=name=psmdb-client -o 'jsonpath={.items[].metadata.name}' ++ exit_status=0 ++ set -e ++ '[' 0 '!=' 0 -a -n 1 ']' ++ break ++ cat /tmp/tmp.8CGQKFAC3w ++ cat /tmp/tmp.529E07VXTv ++ rm /tmp/tmp.8CGQKFAC3w /tmp/tmp.529E07VXTv ++ return 0 + local client_container=psmdb-client-696897d69b-nrc64 + local mongo_flag= + [[ myApp:myPass@some-name-rs0.pitr-13298 == *cfg* ]] + replica_set=rs0 + kubectl_bin exec psmdb-client-696897d69b-nrc64 -- bash -c 'printf '\''use myApp\n db.test2.insert({ x: 100501 })\n'\'' | mongo mongodb+srv://myApp:myPass@some-name-rs0.pitr-13298.svc.cluster.local/admin?ssl=false\&replicaSet=rs0 ' ++ mktemp + local LAST_OUT=/tmp/tmp.mSN4GnyHuI ++ mktemp + local LAST_ERR=/tmp/tmp.2gxSKi6p9w + local exit_status=0 + local timeout=4 ++ seq 0 2 + for i in $(seq 0 2) + set +e + kubectl exec psmdb-client-696897d69b-nrc64 -- bash -c 'printf '\''use myApp\n db.test2.insert({ x: 100501 })\n'\'' | mongo mongodb+srv://myApp:myPass@some-name-rs0.pitr-13298.svc.cluster.local/admin?ssl=false\&replicaSet=rs0 ' + exit_status=0 + set -e + '[' 0 '!=' 0 -a -n 1 ']' + break + cat /tmp/tmp.mSN4GnyHuI Percona Server for MongoDB shell version v4.4.29-28 connecting to: mongodb://some-name-rs0-0.some-name-rs0.pitr-13298.svc.cluster.local:27017,some-name-rs0-1.some-name-rs0.pitr-13298.svc.cluster.local:27017,some-name-rs0-2.some-name-rs0.pitr-13298.svc.cluster.local:27017/admin?compressors=disabled&gssapiServiceName=mongodb&replicaSet=rs0&ssl=false Implicit session: session { "id" : UUID("fcded465-b0c0-4bfb-af20-339c79f31519") } Percona Server for MongoDB server version: v8.0.16-5 WARNING: shell and server versions do not match switched to db myApp WriteResult({ "nInserted" : 1 }) bye + cat /tmp/tmp.2gxSKi6p9w + rm /tmp/tmp.mSN4GnyHuI /tmp/tmp.2gxSKi6p9w + return 0 + run_backup backup-minio 0 + local name=backup-minio + local idx=0 + desc 'run backup backup-minio-0' + set +o xtrace ----------------------------------------------------------------------------------- run backup backup-minio-0 ----------------------------------------------------------------------------------- + cat /mnt/jenkins/workspace/cloud-psmdb-operator_PR-2125/e2e-tests/pitr/conf/backup-minio.yml + /usr/sbin/sed -e 's/name:/name: backup-minio-0/' + kubectl_bin apply -f - ++ mktemp + local LAST_OUT=/tmp/tmp.bQTfkwntRB ++ mktemp + local LAST_ERR=/tmp/tmp.vffVMWwETs + local exit_status=0 + local timeout=4 ++ seq 0 2 + for i in $(seq 0 2) + set +e + kubectl apply -f - + exit_status=0 + set -e + '[' 0 '!=' 0 -a -n 1 ']' + break + cat /tmp/tmp.bQTfkwntRB perconaservermongodbbackup.psmdb.percona.com/backup-minio-0 created + cat /tmp/tmp.vffVMWwETs + rm /tmp/tmp.bQTfkwntRB /tmp/tmp.vffVMWwETs + return 0 + wait_backup backup-minio-0 + local backup_name=backup-minio-0 + local target_state=ready + set +o xtrace waiting for backup-minio-0 to reach ready state.......OK + write_document -2nd + local cmp_postfix=-2nd + local cluster_name=some-name-rs0 + desc 'write initial data, read from all' + set +o xtrace ----------------------------------------------------------------------------------- write initial data, read from all ----------------------------------------------------------------------------------- + run_mongo 'use myApp\n db.test.insert({ x: 100500 })' myApp:myPass@some-name-rs0.pitr-13298 + local 'command=use myApp\n db.test.insert({ x: 100500 })' + local uri=myApp:myPass@some-name-rs0.pitr-13298 + local driver=mongodb+srv + local suffix=.svc.cluster.local ++ kubectl_bin get pods --selector=name=psmdb-client -o 'jsonpath={.items[].metadata.name}' +++ mktemp ++ local LAST_OUT=/tmp/tmp.FCQtblGESA +++ mktemp ++ local LAST_ERR=/tmp/tmp.1OEPmofPrf ++ local exit_status=0 ++ local timeout=4 +++ seq 0 2 ++ for i in $(seq 0 2) ++ set +e ++ kubectl get pods --selector=name=psmdb-client -o 'jsonpath={.items[].metadata.name}' ++ exit_status=0 ++ set -e ++ '[' 0 '!=' 0 -a -n 1 ']' ++ break ++ cat /tmp/tmp.FCQtblGESA ++ cat /tmp/tmp.1OEPmofPrf ++ rm /tmp/tmp.FCQtblGESA /tmp/tmp.1OEPmofPrf ++ return 0 + local client_container=psmdb-client-696897d69b-nrc64 + local mongo_flag= + [[ myApp:myPass@some-name-rs0.pitr-13298 == *cfg* ]] + replica_set=rs0 + kubectl_bin exec psmdb-client-696897d69b-nrc64 -- bash -c 'printf '\''use myApp\n db.test.insert({ x: 100500 })\n'\'' | mongo mongodb+srv://myApp:myPass@some-name-rs0.pitr-13298.svc.cluster.local/admin?ssl=false\&replicaSet=rs0 ' ++ mktemp + local LAST_OUT=/tmp/tmp.44zP2QOWA5 ++ mktemp + local LAST_ERR=/tmp/tmp.LeLocWQ5qG + local exit_status=0 + local timeout=4 ++ seq 0 2 + for i in $(seq 0 2) + set +e + kubectl exec psmdb-client-696897d69b-nrc64 -- bash -c 'printf '\''use myApp\n db.test.insert({ x: 100500 })\n'\'' | mongo mongodb+srv://myApp:myPass@some-name-rs0.pitr-13298.svc.cluster.local/admin?ssl=false\&replicaSet=rs0 ' + exit_status=0 + set -e + '[' 0 '!=' 0 -a -n 1 ']' + break + cat /tmp/tmp.44zP2QOWA5 Percona Server for MongoDB shell version v4.4.29-28 connecting to: mongodb://some-name-rs0-0.some-name-rs0.pitr-13298.svc.cluster.local:27017,some-name-rs0-1.some-name-rs0.pitr-13298.svc.cluster.local:27017,some-name-rs0-2.some-name-rs0.pitr-13298.svc.cluster.local:27017/admin?compressors=disabled&gssapiServiceName=mongodb&replicaSet=rs0&ssl=false Implicit session: session { "id" : UUID("501c3661-7398-456e-8969-247544c9b6e9") } Percona Server for MongoDB server version: v8.0.16-5 WARNING: shell and server versions do not match switched to db myApp WriteResult({ "nInserted" : 1 }) bye + cat /tmp/tmp.LeLocWQ5qG + rm /tmp/tmp.44zP2QOWA5 /tmp/tmp.LeLocWQ5qG + return 0 + minikube_sleep + sleep_time=10 + [[ '' == 1 ]] + compare_mongo_cmd find myApp:myPass@some-name-rs0-0.some-name-rs0.pitr-13298 -2nd + local command=find + local uri=myApp:myPass@some-name-rs0-0.some-name-rs0.pitr-13298 + local postfix=-2nd + local suffix= + local database=myApp + local collection=test + local sort= + local tls=false + local 'full_command=db.test.find()' + [[ -n '' ]] + log 'running db.test.find() in myApp' + set +o xtrace [2025-12-10T13:25:12+0000] running db.test.find() in myApp + [[ false == \t\r\u\e ]] + mongo_command=run_mongo + run_mongo 'use myApp\n db.test.find()' myApp:myPass@some-name-rs0-0.some-name-rs0.pitr-13298 mongodb '' + local 'command=use myApp\n db.test.find()' + local uri=myApp:myPass@some-name-rs0-0.some-name-rs0.pitr-13298 + egrep -v 'I NETWORK|W NETWORK|F NETWORK|Error saving history file|Percona Server for MongoDB|connecting to:|Unable to reach primary for set|Implicit session:|versions do not match|Error saving history file:|Started a new thread for the timer service' + local driver=mongodb + local suffix=.svc.cluster.local + /usr/sbin/sed -re 's/ObjectId\("[0-9a-f]+"\)//; s/-[0-9]+.svc/-xxx.svc/' ++ kubectl_bin get pods --selector=name=psmdb-client -o 'jsonpath={.items[].metadata.name}' +++ mktemp egrep: warning: egrep is obsolescent; using grep -E ++ local LAST_OUT=/tmp/tmp.WezbSR6Hll +++ mktemp ++ local LAST_ERR=/tmp/tmp.Va8Y2yIdcg ++ local exit_status=0 ++ local timeout=4 +++ seq 0 2 ++ for i in $(seq 0 2) ++ set +e ++ kubectl get pods --selector=name=psmdb-client -o 'jsonpath={.items[].metadata.name}' ++ exit_status=0 ++ set -e ++ '[' 0 '!=' 0 -a -n 1 ']' ++ break ++ cat /tmp/tmp.WezbSR6Hll ++ cat /tmp/tmp.Va8Y2yIdcg ++ rm /tmp/tmp.WezbSR6Hll /tmp/tmp.Va8Y2yIdcg ++ return 0 + local client_container=psmdb-client-696897d69b-nrc64 + local mongo_flag= + [[ myApp:myPass@some-name-rs0-0.some-name-rs0.pitr-13298 == *cfg* ]] + replica_set=rs0 + kubectl_bin exec psmdb-client-696897d69b-nrc64 -- bash -c 'printf '\''use myApp\n db.test.find()\n'\'' | mongo mongodb://myApp:myPass@some-name-rs0-0.some-name-rs0.pitr-13298.svc.cluster.local/admin?ssl=false\&replicaSet=rs0 ' ++ mktemp + local LAST_OUT=/tmp/tmp.laO5n0L82l ++ mktemp + local LAST_ERR=/tmp/tmp.YKQ4i1NnS1 + local exit_status=0 + local timeout=4 ++ seq 0 2 + for i in $(seq 0 2) + set +e + kubectl exec psmdb-client-696897d69b-nrc64 -- bash -c 'printf '\''use myApp\n db.test.find()\n'\'' | mongo mongodb://myApp:myPass@some-name-rs0-0.some-name-rs0.pitr-13298.svc.cluster.local/admin?ssl=false\&replicaSet=rs0 ' + exit_status=0 + set -e + '[' 0 '!=' 0 -a -n 1 ']' + break + cat /tmp/tmp.laO5n0L82l + cat /tmp/tmp.YKQ4i1NnS1 + rm /tmp/tmp.laO5n0L82l /tmp/tmp.YKQ4i1NnS1 + return 0 + [[ 0 -eq 0 ]] + diff -u /mnt/jenkins/workspace/cloud-psmdb-operator_PR-2125/e2e-tests/pitr/compare/find-2nd.json /tmp/tmp.uUgrEfFz0e/find-2nd + compare_mongo_cmd find myApp:myPass@some-name-rs0-1.some-name-rs0.pitr-13298 -2nd + local command=find + local uri=myApp:myPass@some-name-rs0-1.some-name-rs0.pitr-13298 + local postfix=-2nd + local suffix= + local database=myApp + local collection=test + local sort= + local tls=false + local 'full_command=db.test.find()' + [[ -n '' ]] + log 'running db.test.find() in myApp' + set +o xtrace [2025-12-10T13:25:15+0000] running db.test.find() in myApp + [[ false == \t\r\u\e ]] + mongo_command=run_mongo + run_mongo 'use myApp\n db.test.find()' myApp:myPass@some-name-rs0-1.some-name-rs0.pitr-13298 mongodb '' + local 'command=use myApp\n db.test.find()' + local uri=myApp:myPass@some-name-rs0-1.some-name-rs0.pitr-13298 + local driver=mongodb + local suffix=.svc.cluster.local + egrep -v 'I NETWORK|W NETWORK|F NETWORK|Error saving history file|Percona Server for MongoDB|connecting to:|Unable to reach primary for set|Implicit session:|versions do not match|Error saving history file:|Started a new thread for the timer service' + /usr/sbin/sed -re 's/ObjectId\("[0-9a-f]+"\)//; s/-[0-9]+.svc/-xxx.svc/' ++ kubectl_bin get pods --selector=name=psmdb-client -o 'jsonpath={.items[].metadata.name}' +++ mktemp egrep: warning: egrep is obsolescent; using grep -E ++ local LAST_OUT=/tmp/tmp.U0bUouYMXP +++ mktemp ++ local LAST_ERR=/tmp/tmp.sxnj76MdJI ++ local exit_status=0 ++ local timeout=4 +++ seq 0 2 ++ for i in $(seq 0 2) ++ set +e ++ kubectl get pods --selector=name=psmdb-client -o 'jsonpath={.items[].metadata.name}' ++ exit_status=0 ++ set -e ++ '[' 0 '!=' 0 -a -n 1 ']' ++ break ++ cat /tmp/tmp.U0bUouYMXP ++ cat /tmp/tmp.sxnj76MdJI ++ rm /tmp/tmp.U0bUouYMXP /tmp/tmp.sxnj76MdJI ++ return 0 + local client_container=psmdb-client-696897d69b-nrc64 + local mongo_flag= + [[ myApp:myPass@some-name-rs0-1.some-name-rs0.pitr-13298 == *cfg* ]] + replica_set=rs0 + kubectl_bin exec psmdb-client-696897d69b-nrc64 -- bash -c 'printf '\''use myApp\n db.test.find()\n'\'' | mongo mongodb://myApp:myPass@some-name-rs0-1.some-name-rs0.pitr-13298.svc.cluster.local/admin?ssl=false\&replicaSet=rs0 ' ++ mktemp + local LAST_OUT=/tmp/tmp.bFGhJKUZnO ++ mktemp + local LAST_ERR=/tmp/tmp.yD8sBiQnhK + local exit_status=0 + local timeout=4 ++ seq 0 2 + for i in $(seq 0 2) + set +e + kubectl exec psmdb-client-696897d69b-nrc64 -- bash -c 'printf '\''use myApp\n db.test.find()\n'\'' | mongo mongodb://myApp:myPass@some-name-rs0-1.some-name-rs0.pitr-13298.svc.cluster.local/admin?ssl=false\&replicaSet=rs0 ' + exit_status=0 + set -e + '[' 0 '!=' 0 -a -n 1 ']' + break + cat /tmp/tmp.bFGhJKUZnO + cat /tmp/tmp.yD8sBiQnhK + rm /tmp/tmp.bFGhJKUZnO /tmp/tmp.yD8sBiQnhK + return 0 + [[ 0 -eq 0 ]] + diff -u /mnt/jenkins/workspace/cloud-psmdb-operator_PR-2125/e2e-tests/pitr/compare/find-2nd.json /tmp/tmp.uUgrEfFz0e/find-2nd + compare_mongo_cmd find myApp:myPass@some-name-rs0-2.some-name-rs0.pitr-13298 -2nd + local command=find + local uri=myApp:myPass@some-name-rs0-2.some-name-rs0.pitr-13298 + local postfix=-2nd + local suffix= + local database=myApp + local collection=test + local sort= + local tls=false + local 'full_command=db.test.find()' + [[ -n '' ]] + log 'running db.test.find() in myApp' + set +o xtrace [2025-12-10T13:25:17+0000] running db.test.find() in myApp + [[ false == \t\r\u\e ]] + mongo_command=run_mongo + run_mongo 'use myApp\n db.test.find()' myApp:myPass@some-name-rs0-2.some-name-rs0.pitr-13298 mongodb '' + egrep -v 'I NETWORK|W NETWORK|F NETWORK|Error saving history file|Percona Server for MongoDB|connecting to:|Unable to reach primary for set|Implicit session:|versions do not match|Error saving history file:|Started a new thread for the timer service' + local 'command=use myApp\n db.test.find()' + local uri=myApp:myPass@some-name-rs0-2.some-name-rs0.pitr-13298 + local driver=mongodb + local suffix=.svc.cluster.local + /usr/sbin/sed -re 's/ObjectId\("[0-9a-f]+"\)//; s/-[0-9]+.svc/-xxx.svc/' ++ kubectl_bin get pods --selector=name=psmdb-client -o 'jsonpath={.items[].metadata.name}' +++ mktemp egrep: warning: egrep is obsolescent; using grep -E ++ local LAST_OUT=/tmp/tmp.c8wCim1Xgn +++ mktemp ++ local LAST_ERR=/tmp/tmp.d3xQqDbJAs ++ local exit_status=0 ++ local timeout=4 +++ seq 0 2 ++ for i in $(seq 0 2) ++ set +e ++ kubectl get pods --selector=name=psmdb-client -o 'jsonpath={.items[].metadata.name}' ++ exit_status=0 ++ set -e ++ '[' 0 '!=' 0 -a -n 1 ']' ++ break ++ cat /tmp/tmp.c8wCim1Xgn ++ cat /tmp/tmp.d3xQqDbJAs ++ rm /tmp/tmp.c8wCim1Xgn /tmp/tmp.d3xQqDbJAs ++ return 0 + local client_container=psmdb-client-696897d69b-nrc64 + local mongo_flag= + [[ myApp:myPass@some-name-rs0-2.some-name-rs0.pitr-13298 == *cfg* ]] + replica_set=rs0 + kubectl_bin exec psmdb-client-696897d69b-nrc64 -- bash -c 'printf '\''use myApp\n db.test.find()\n'\'' | mongo mongodb://myApp:myPass@some-name-rs0-2.some-name-rs0.pitr-13298.svc.cluster.local/admin?ssl=false\&replicaSet=rs0 ' ++ mktemp + local LAST_OUT=/tmp/tmp.prXQubbKIz ++ mktemp + local LAST_ERR=/tmp/tmp.2fHJ4exNh0 + local exit_status=0 + local timeout=4 ++ seq 0 2 + for i in $(seq 0 2) + set +e + kubectl exec psmdb-client-696897d69b-nrc64 -- bash -c 'printf '\''use myApp\n db.test.find()\n'\'' | mongo mongodb://myApp:myPass@some-name-rs0-2.some-name-rs0.pitr-13298.svc.cluster.local/admin?ssl=false\&replicaSet=rs0 ' + exit_status=0 + set -e + '[' 0 '!=' 0 -a -n 1 ']' + break + cat /tmp/tmp.prXQubbKIz + cat /tmp/tmp.2fHJ4exNh0 + rm /tmp/tmp.prXQubbKIz /tmp/tmp.2fHJ4exNh0 + return 0 + [[ 0 -eq 0 ]] + diff -u /mnt/jenkins/workspace/cloud-psmdb-operator_PR-2125/e2e-tests/pitr/compare/find-2nd.json /tmp/tmp.uUgrEfFz0e/find-2nd + sleep 2 ++ run_mongo 'new Date().toISOString()' myApp:myPass@some-name-rs0.pitr-13298 mongodb '' --quiet ++ local 'command=new Date().toISOString()' ++ local uri=myApp:myPass@some-name-rs0.pitr-13298 ++ local driver=mongodb ++ local suffix=.svc.cluster.local ++ grep -E -v 'I NETWORK|W NETWORK|Error saving history file|Percona Server for MongoDB|connecting to:|Unable to reach primary for set|Implicit session:|versions do not match' ++ cut -c1-19 ++ tr T ' ' +++ kubectl_bin get pods --selector=name=psmdb-client -o 'jsonpath={.items[].metadata.name}' ++++ mktemp +++ local LAST_OUT=/tmp/tmp.nUZlZLvYH3 ++++ mktemp +++ local LAST_ERR=/tmp/tmp.4rYoP4FSRH +++ local exit_status=0 +++ local timeout=4 ++++ seq 0 2 +++ for i in $(seq 0 2) +++ set +e +++ kubectl get pods --selector=name=psmdb-client -o 'jsonpath={.items[].metadata.name}' +++ exit_status=0 +++ set -e +++ '[' 0 '!=' 0 -a -n 1 ']' +++ break +++ cat /tmp/tmp.nUZlZLvYH3 +++ cat /tmp/tmp.4rYoP4FSRH +++ rm /tmp/tmp.nUZlZLvYH3 /tmp/tmp.4rYoP4FSRH +++ return 0 ++ local client_container=psmdb-client-696897d69b-nrc64 ++ local mongo_flag=--quiet ++ [[ myApp:myPass@some-name-rs0.pitr-13298 == *cfg* ]] ++ replica_set=rs0 ++ kubectl_bin exec psmdb-client-696897d69b-nrc64 -- bash -c 'printf '\''new Date().toISOString()\n'\'' | mongo mongodb://myApp:myPass@some-name-rs0.pitr-13298.svc.cluster.local/admin?ssl=false\&replicaSet=rs0 --quiet' +++ mktemp ++ local LAST_OUT=/tmp/tmp.43X6i1tI8U +++ mktemp ++ local LAST_ERR=/tmp/tmp.CHOllie9dr ++ local exit_status=0 ++ local timeout=4 +++ seq 0 2 ++ for i in $(seq 0 2) ++ set +e ++ kubectl exec psmdb-client-696897d69b-nrc64 -- bash -c 'printf '\''new Date().toISOString()\n'\'' | mongo mongodb://myApp:myPass@some-name-rs0.pitr-13298.svc.cluster.local/admin?ssl=false\&replicaSet=rs0 --quiet' ++ exit_status=0 ++ set -e ++ '[' 0 '!=' 0 -a -n 1 ']' ++ break ++ cat /tmp/tmp.43X6i1tI8U ++ cat /tmp/tmp.CHOllie9dr ++ rm /tmp/tmp.43X6i1tI8U /tmp/tmp.CHOllie9dr ++ return 0 + time_now='2025-12-10 13:25:24' + check_recovery backup-minio-0 date '2025-12-10 13:25:24' -2nd '' some-name test2 + local backup_name=backup-minio-0 + local restore_type=date + local 'restore_date=2025-12-10 13:25:24' + local cmp_postfix=-2nd + local backupSource= + local cluster_name=some-name + local selective_collection=test2 + local restore_name=restore-backup-minio-0 + local restore_file=restore.yml + local cluster + cluster=some-name-rs0 + desc 'write more data before restore by date' + set +o xtrace ----------------------------------------------------------------------------------- write more data before restore by date ----------------------------------------------------------------------------------- + sleep 60 + run_mongo 'use myApp\n db.test.insert({ x: 100501 })' myApp:myPass@some-name-rs0.pitr-13298 + local 'command=use myApp\n db.test.insert({ x: 100501 })' + local uri=myApp:myPass@some-name-rs0.pitr-13298 + local driver=mongodb+srv + local suffix=.svc.cluster.local ++ kubectl_bin get pods --selector=name=psmdb-client -o 'jsonpath={.items[].metadata.name}' +++ mktemp ++ local LAST_OUT=/tmp/tmp.1qX0lJJwxL +++ mktemp ++ local LAST_ERR=/tmp/tmp.FfyJBnPSMi ++ local exit_status=0 ++ local timeout=4 +++ seq 0 2 ++ for i in $(seq 0 2) ++ set +e ++ kubectl get pods --selector=name=psmdb-client -o 'jsonpath={.items[].metadata.name}' ++ exit_status=0 ++ set -e ++ '[' 0 '!=' 0 -a -n 1 ']' ++ break ++ cat /tmp/tmp.1qX0lJJwxL ++ cat /tmp/tmp.FfyJBnPSMi ++ rm /tmp/tmp.1qX0lJJwxL /tmp/tmp.FfyJBnPSMi ++ return 0 + local client_container=psmdb-client-696897d69b-nrc64 + local mongo_flag= + [[ myApp:myPass@some-name-rs0.pitr-13298 == *cfg* ]] + replica_set=rs0 + kubectl_bin exec psmdb-client-696897d69b-nrc64 -- bash -c 'printf '\''use myApp\n db.test.insert({ x: 100501 })\n'\'' | mongo mongodb+srv://myApp:myPass@some-name-rs0.pitr-13298.svc.cluster.local/admin?ssl=false\&replicaSet=rs0 ' ++ mktemp + local LAST_OUT=/tmp/tmp.KenR9etcBN ++ mktemp + local LAST_ERR=/tmp/tmp.wBpfRdxbO5 + local exit_status=0 + local timeout=4 ++ seq 0 2 + for i in $(seq 0 2) + set +e + kubectl exec psmdb-client-696897d69b-nrc64 -- bash -c 'printf '\''use myApp\n db.test.insert({ x: 100501 })\n'\'' | mongo mongodb+srv://myApp:myPass@some-name-rs0.pitr-13298.svc.cluster.local/admin?ssl=false\&replicaSet=rs0 ' + exit_status=0 + set -e + '[' 0 '!=' 0 -a -n 1 ']' + break + cat /tmp/tmp.KenR9etcBN Percona Server for MongoDB shell version v4.4.29-28 connecting to: mongodb://some-name-rs0-0.some-name-rs0.pitr-13298.svc.cluster.local:27017,some-name-rs0-1.some-name-rs0.pitr-13298.svc.cluster.local:27017,some-name-rs0-2.some-name-rs0.pitr-13298.svc.cluster.local:27017/admin?compressors=disabled&gssapiServiceName=mongodb&replicaSet=rs0&ssl=false Implicit session: session { "id" : UUID("f32e742d-6bab-4ec9-8751-182fcb187e14") } Percona Server for MongoDB server version: v8.0.16-5 WARNING: shell and server versions do not match switched to db myApp WriteResult({ "nInserted" : 1 }) bye + cat /tmp/tmp.wBpfRdxbO5 + rm /tmp/tmp.KenR9etcBN /tmp/tmp.wBpfRdxbO5 + return 0 + '[' -n test2 ']' + run_mongo 'use myApp\n db.test2.drop()' myApp:myPass@some-name-rs0.pitr-13298 + local 'command=use myApp\n db.test2.drop()' + local uri=myApp:myPass@some-name-rs0.pitr-13298 + local driver=mongodb+srv + local suffix=.svc.cluster.local ++ kubectl_bin get pods --selector=name=psmdb-client -o 'jsonpath={.items[].metadata.name}' +++ mktemp ++ local LAST_OUT=/tmp/tmp.vuqG2u80Oj +++ mktemp ++ local LAST_ERR=/tmp/tmp.vxKlU0rF75 ++ local exit_status=0 ++ local timeout=4 +++ seq 0 2 ++ for i in $(seq 0 2) ++ set +e ++ kubectl get pods --selector=name=psmdb-client -o 'jsonpath={.items[].metadata.name}' ++ exit_status=0 ++ set -e ++ '[' 0 '!=' 0 -a -n 1 ']' ++ break ++ cat /tmp/tmp.vuqG2u80Oj ++ cat /tmp/tmp.vxKlU0rF75 ++ rm /tmp/tmp.vuqG2u80Oj /tmp/tmp.vxKlU0rF75 ++ return 0 + local client_container=psmdb-client-696897d69b-nrc64 + local mongo_flag= + [[ myApp:myPass@some-name-rs0.pitr-13298 == *cfg* ]] + replica_set=rs0 + kubectl_bin exec psmdb-client-696897d69b-nrc64 -- bash -c 'printf '\''use myApp\n db.test2.drop()\n'\'' | mongo mongodb+srv://myApp:myPass@some-name-rs0.pitr-13298.svc.cluster.local/admin?ssl=false\&replicaSet=rs0 ' ++ mktemp + local LAST_OUT=/tmp/tmp.MPUmzNfz6r ++ mktemp + local LAST_ERR=/tmp/tmp.0ADvSBNwyq + local exit_status=0 + local timeout=4 ++ seq 0 2 + for i in $(seq 0 2) + set +e + kubectl exec psmdb-client-696897d69b-nrc64 -- bash -c 'printf '\''use myApp\n db.test2.drop()\n'\'' | mongo mongodb+srv://myApp:myPass@some-name-rs0.pitr-13298.svc.cluster.local/admin?ssl=false\&replicaSet=rs0 ' + exit_status=0 + set -e + '[' 0 '!=' 0 -a -n 1 ']' + break + cat /tmp/tmp.MPUmzNfz6r Percona Server for MongoDB shell version v4.4.29-28 connecting to: mongodb://some-name-rs0-2.some-name-rs0.pitr-13298.svc.cluster.local:27017,some-name-rs0-0.some-name-rs0.pitr-13298.svc.cluster.local:27017,some-name-rs0-1.some-name-rs0.pitr-13298.svc.cluster.local:27017/admin?compressors=disabled&gssapiServiceName=mongodb&replicaSet=rs0&ssl=false Implicit session: session { "id" : UUID("c33a6f2e-0dea-49c8-bd5f-9f89ec0a8158") } Percona Server for MongoDB server version: v8.0.16-5 WARNING: shell and server versions do not match switched to db myApp true bye + cat /tmp/tmp.0ADvSBNwyq + rm /tmp/tmp.MPUmzNfz6r /tmp/tmp.0ADvSBNwyq + return 0 + desc 'waiting for chunks to be uploaded' + set +o xtrace ----------------------------------------------------------------------------------- waiting for chunks to be uploaded ----------------------------------------------------------------------------------- + sleep 120 + desc 'check restore by date' + set +o xtrace ----------------------------------------------------------------------------------- check restore by date ----------------------------------------------------------------------------------- + '[' -z '' ']' + cat /mnt/jenkins/workspace/cloud-psmdb-operator_PR-2125/e2e-tests/pitr/conf/restore.yml + /usr/sbin/sed -e 's/name:/name: restore-backup-minio-0/' + /usr/sbin/sed -e 's/backupName:/backupName: backup-minio-0/' + /usr/sbin/sed -e 's/clusterName:/clusterName: some-name/' + /usr/sbin/sed -e 's/pitrType:/type: date/' + /usr/sbin/sed -e /backupSource/,+2d + '[' -z '2025-12-10 13:25:24' ']' + '[' -n test2 ']' + /usr/sbin/sed -e 's/date:/date: 2025-12-10 13:25:24/' + yq eval '.spec.selective = {"namespaces": ["myApp.test"], "withUsersAndRoles": true}' + kubectl_bin apply -f - ++ mktemp + local LAST_OUT=/tmp/tmp.r7pTSMnIUd ++ mktemp + local LAST_ERR=/tmp/tmp.iDNkGiYXiQ + local exit_status=0 + local timeout=4 ++ seq 0 2 + for i in $(seq 0 2) + set +e + kubectl apply -f - + exit_status=0 + set -e + '[' 0 '!=' 0 -a -n 1 ']' + break + cat /tmp/tmp.r7pTSMnIUd perconaservermongodbrestore.psmdb.percona.com/restore-backup-minio-0 created + cat /tmp/tmp.iDNkGiYXiQ + rm /tmp/tmp.r7pTSMnIUd /tmp/tmp.iDNkGiYXiQ + return 0 + wait_restore backup-minio-0 some-name + local backup_name=backup-minio-0 + local cluster_name=some-name + local target_state=ready + local wait_cluster_consistency=1 + local wait_time=1780 + local ok_if_ready=0 + set +o xtrace Waiting for the psmdb-restore/restore-backup-minio-0 object to be created.OK Waiting psmdb-restore/restore-backup-minio-0 to reach state "ready" .OK after 0 minutes + [[ 1 -eq 1 ]] + wait_cluster_consistency some-name + local cluster_name=some-name + local wait_time=32 + retry=0 + sleep 7 + echo -n 'waiting for cluster readyness' waiting for cluster readyness++ kubectl_bin get psmdb some-name -o 'jsonpath={.status.state}' +++ mktemp ++ local LAST_OUT=/tmp/tmp.iOKqFB53eB +++ mktemp ++ local LAST_ERR=/tmp/tmp.5E2J1vpVrV ++ local exit_status=0 ++ local timeout=4 +++ seq 0 2 ++ for i in $(seq 0 2) ++ set +e ++ kubectl get psmdb some-name -o 'jsonpath={.status.state}' ++ exit_status=0 ++ set -e ++ '[' 0 '!=' 0 -a -n 1 ']' ++ break ++ cat /tmp/tmp.iOKqFB53eB ++ cat /tmp/tmp.5E2J1vpVrV ++ rm /tmp/tmp.iOKqFB53eB /tmp/tmp.5E2J1vpVrV ++ return 0 + [[ ready == \r\e\a\d\y ]] + echo .OK .OK + echo + set -o xtrace + '[' -n test2 ']' ++ collection_exists test2 ./e2e-tests/pitr/run: line 108: collection_exists: command not found + [[ '' == \t\r\u\e ]] + compare_mongo_cmd find myApp:myPass@some-name-rs0-0.some-name-rs0.pitr-13298 -2nd + local command=find + local uri=myApp:myPass@some-name-rs0-0.some-name-rs0.pitr-13298 + local postfix=-2nd + local suffix= + local database=myApp + local collection=test + local sort= + local tls=false + local 'full_command=db.test.find()' + [[ -n '' ]] + log 'running db.test.find() in myApp' + set +o xtrace [2025-12-10T13:28:55+0000] running db.test.find() in myApp + [[ false == \t\r\u\e ]] + mongo_command=run_mongo + run_mongo 'use myApp\n db.test.find()' myApp:myPass@some-name-rs0-0.some-name-rs0.pitr-13298 mongodb '' + egrep -v 'I NETWORK|W NETWORK|F NETWORK|Error saving history file|Percona Server for MongoDB|connecting to:|Unable to reach primary for set|Implicit session:|versions do not match|Error saving history file:|Started a new thread for the timer service' + local 'command=use myApp\n db.test.find()' + /usr/sbin/sed -re 's/ObjectId\("[0-9a-f]+"\)//; s/-[0-9]+.svc/-xxx.svc/' + local uri=myApp:myPass@some-name-rs0-0.some-name-rs0.pitr-13298 + local driver=mongodb + local suffix=.svc.cluster.local ++ kubectl_bin get pods --selector=name=psmdb-client -o 'jsonpath={.items[].metadata.name}' +++ mktemp ++ local LAST_OUT=/tmp/tmp.7TzWVyH6RB egrep: warning: egrep is obsolescent; using grep -E +++ mktemp ++ local LAST_ERR=/tmp/tmp.eoREVACyTd ++ local exit_status=0 ++ local timeout=4 +++ seq 0 2 ++ for i in $(seq 0 2) ++ set +e ++ kubectl get pods --selector=name=psmdb-client -o 'jsonpath={.items[].metadata.name}' ++ exit_status=0 ++ set -e ++ '[' 0 '!=' 0 -a -n 1 ']' ++ break ++ cat /tmp/tmp.7TzWVyH6RB ++ cat /tmp/tmp.eoREVACyTd ++ rm /tmp/tmp.7TzWVyH6RB /tmp/tmp.eoREVACyTd ++ return 0 + local client_container=psmdb-client-696897d69b-nrc64 + local mongo_flag= + [[ myApp:myPass@some-name-rs0-0.some-name-rs0.pitr-13298 == *cfg* ]] + replica_set=rs0 + kubectl_bin exec psmdb-client-696897d69b-nrc64 -- bash -c 'printf '\''use myApp\n db.test.find()\n'\'' | mongo mongodb://myApp:myPass@some-name-rs0-0.some-name-rs0.pitr-13298.svc.cluster.local/admin?ssl=false\&replicaSet=rs0 ' ++ mktemp + local LAST_OUT=/tmp/tmp.TNCcUEEApe ++ mktemp + local LAST_ERR=/tmp/tmp.xoh8GCtm6Q + local exit_status=0 + local timeout=4 ++ seq 0 2 + for i in $(seq 0 2) + set +e + kubectl exec psmdb-client-696897d69b-nrc64 -- bash -c 'printf '\''use myApp\n db.test.find()\n'\'' | mongo mongodb://myApp:myPass@some-name-rs0-0.some-name-rs0.pitr-13298.svc.cluster.local/admin?ssl=false\&replicaSet=rs0 ' + exit_status=0 + set -e + '[' 0 '!=' 0 -a -n 1 ']' + break + cat /tmp/tmp.TNCcUEEApe + cat /tmp/tmp.xoh8GCtm6Q + rm /tmp/tmp.TNCcUEEApe /tmp/tmp.xoh8GCtm6Q + return 0 + [[ 0 -eq 0 ]] + diff -u /mnt/jenkins/workspace/cloud-psmdb-operator_PR-2125/e2e-tests/pitr/compare/find-2nd.json /tmp/tmp.uUgrEfFz0e/find-2nd + compare_mongo_cmd find myApp:myPass@some-name-rs0-1.some-name-rs0.pitr-13298 -2nd + local command=find + local uri=myApp:myPass@some-name-rs0-1.some-name-rs0.pitr-13298 + local postfix=-2nd + local suffix= + local database=myApp + local collection=test + local sort= + local tls=false + local 'full_command=db.test.find()' + [[ -n '' ]] + log 'running db.test.find() in myApp' + set +o xtrace [2025-12-10T13:28:57+0000] running db.test.find() in myApp + [[ false == \t\r\u\e ]] + mongo_command=run_mongo + run_mongo 'use myApp\n db.test.find()' myApp:myPass@some-name-rs0-1.some-name-rs0.pitr-13298 mongodb '' + local 'command=use myApp\n db.test.find()' + local uri=myApp:myPass@some-name-rs0-1.some-name-rs0.pitr-13298 + local driver=mongodb + local suffix=.svc.cluster.local + egrep -v 'I NETWORK|W NETWORK|F NETWORK|Error saving history file|Percona Server for MongoDB|connecting to:|Unable to reach primary for set|Implicit session:|versions do not match|Error saving history file:|Started a new thread for the timer service' + /usr/sbin/sed -re 's/ObjectId\("[0-9a-f]+"\)//; s/-[0-9]+.svc/-xxx.svc/' ++ kubectl_bin get pods --selector=name=psmdb-client -o 'jsonpath={.items[].metadata.name}' +++ mktemp egrep: warning: egrep is obsolescent; using grep -E ++ local LAST_OUT=/tmp/tmp.mYO8rEmEd7 +++ mktemp ++ local LAST_ERR=/tmp/tmp.eXS94bTUcU ++ local exit_status=0 ++ local timeout=4 +++ seq 0 2 ++ for i in $(seq 0 2) ++ set +e ++ kubectl get pods --selector=name=psmdb-client -o 'jsonpath={.items[].metadata.name}' ++ exit_status=0 ++ set -e ++ '[' 0 '!=' 0 -a -n 1 ']' ++ break ++ cat /tmp/tmp.mYO8rEmEd7 ++ cat /tmp/tmp.eXS94bTUcU ++ rm /tmp/tmp.mYO8rEmEd7 /tmp/tmp.eXS94bTUcU ++ return 0 + local client_container=psmdb-client-696897d69b-nrc64 + local mongo_flag= + [[ myApp:myPass@some-name-rs0-1.some-name-rs0.pitr-13298 == *cfg* ]] + replica_set=rs0 + kubectl_bin exec psmdb-client-696897d69b-nrc64 -- bash -c 'printf '\''use myApp\n db.test.find()\n'\'' | mongo mongodb://myApp:myPass@some-name-rs0-1.some-name-rs0.pitr-13298.svc.cluster.local/admin?ssl=false\&replicaSet=rs0 ' ++ mktemp + local LAST_OUT=/tmp/tmp.oDulYEg1NP ++ mktemp + local LAST_ERR=/tmp/tmp.ex05JFjBL8 + local exit_status=0 + local timeout=4 ++ seq 0 2 + for i in $(seq 0 2) + set +e + kubectl exec psmdb-client-696897d69b-nrc64 -- bash -c 'printf '\''use myApp\n db.test.find()\n'\'' | mongo mongodb://myApp:myPass@some-name-rs0-1.some-name-rs0.pitr-13298.svc.cluster.local/admin?ssl=false\&replicaSet=rs0 ' + exit_status=0 + set -e + '[' 0 '!=' 0 -a -n 1 ']' + break + cat /tmp/tmp.oDulYEg1NP + cat /tmp/tmp.ex05JFjBL8 + rm /tmp/tmp.oDulYEg1NP /tmp/tmp.ex05JFjBL8 + return 0 + [[ 0 -eq 0 ]] + diff -u /mnt/jenkins/workspace/cloud-psmdb-operator_PR-2125/e2e-tests/pitr/compare/find-2nd.json /tmp/tmp.uUgrEfFz0e/find-2nd + compare_mongo_cmd find myApp:myPass@some-name-rs0-2.some-name-rs0.pitr-13298 -2nd + local command=find + local uri=myApp:myPass@some-name-rs0-2.some-name-rs0.pitr-13298 + local postfix=-2nd + local suffix= + local database=myApp + local collection=test + local sort= + local tls=false + local 'full_command=db.test.find()' + [[ -n '' ]] + log 'running db.test.find() in myApp' + set +o xtrace [2025-12-10T13:28:59+0000] running db.test.find() in myApp + [[ false == \t\r\u\e ]] + mongo_command=run_mongo + run_mongo 'use myApp\n db.test.find()' myApp:myPass@some-name-rs0-2.some-name-rs0.pitr-13298 mongodb '' + local 'command=use myApp\n db.test.find()' + local uri=myApp:myPass@some-name-rs0-2.some-name-rs0.pitr-13298 + local driver=mongodb + egrep -v 'I NETWORK|W NETWORK|F NETWORK|Error saving history file|Percona Server for MongoDB|connecting to:|Unable to reach primary for set|Implicit session:|versions do not match|Error saving history file:|Started a new thread for the timer service' + local suffix=.svc.cluster.local + /usr/sbin/sed -re 's/ObjectId\("[0-9a-f]+"\)//; s/-[0-9]+.svc/-xxx.svc/' ++ kubectl_bin get pods --selector=name=psmdb-client -o 'jsonpath={.items[].metadata.name}' +++ mktemp egrep: warning: egrep is obsolescent; using grep -E ++ local LAST_OUT=/tmp/tmp.5zANci7gq9 +++ mktemp ++ local LAST_ERR=/tmp/tmp.VhVMAu1vKa ++ local exit_status=0 ++ local timeout=4 +++ seq 0 2 ++ for i in $(seq 0 2) ++ set +e ++ kubectl get pods --selector=name=psmdb-client -o 'jsonpath={.items[].metadata.name}' ++ exit_status=0 ++ set -e ++ '[' 0 '!=' 0 -a -n 1 ']' ++ break ++ cat /tmp/tmp.5zANci7gq9 ++ cat /tmp/tmp.VhVMAu1vKa ++ rm /tmp/tmp.5zANci7gq9 /tmp/tmp.VhVMAu1vKa ++ return 0 + local client_container=psmdb-client-696897d69b-nrc64 + local mongo_flag= + [[ myApp:myPass@some-name-rs0-2.some-name-rs0.pitr-13298 == *cfg* ]] + replica_set=rs0 + kubectl_bin exec psmdb-client-696897d69b-nrc64 -- bash -c 'printf '\''use myApp\n db.test.find()\n'\'' | mongo mongodb://myApp:myPass@some-name-rs0-2.some-name-rs0.pitr-13298.svc.cluster.local/admin?ssl=false\&replicaSet=rs0 ' ++ mktemp + local LAST_OUT=/tmp/tmp.T19whsD0Zq ++ mktemp + local LAST_ERR=/tmp/tmp.QQrmgaGCZ2 + local exit_status=0 + local timeout=4 ++ seq 0 2 + for i in $(seq 0 2) + set +e + kubectl exec psmdb-client-696897d69b-nrc64 -- bash -c 'printf '\''use myApp\n db.test.find()\n'\'' | mongo mongodb://myApp:myPass@some-name-rs0-2.some-name-rs0.pitr-13298.svc.cluster.local/admin?ssl=false\&replicaSet=rs0 ' + exit_status=0 + set -e + '[' 0 '!=' 0 -a -n 1 ']' + break + cat /tmp/tmp.T19whsD0Zq + cat /tmp/tmp.QQrmgaGCZ2 + rm /tmp/tmp.T19whsD0Zq /tmp/tmp.QQrmgaGCZ2 + return 0 + [[ 0 -eq 0 ]] + diff -u /mnt/jenkins/workspace/cloud-psmdb-operator_PR-2125/e2e-tests/pitr/compare/find-2nd.json /tmp/tmp.uUgrEfFz0e/find-2nd + run_backup backup-minio 1 + local name=backup-minio + local idx=1 + desc 'run backup backup-minio-1' + set +o xtrace ----------------------------------------------------------------------------------- run backup backup-minio-1 ----------------------------------------------------------------------------------- + cat /mnt/jenkins/workspace/cloud-psmdb-operator_PR-2125/e2e-tests/pitr/conf/backup-minio.yml + /usr/sbin/sed -e 's/name:/name: backup-minio-1/' + kubectl_bin apply -f - ++ mktemp + local LAST_OUT=/tmp/tmp.MzKT6BHORv ++ mktemp + local LAST_ERR=/tmp/tmp.q6OS5au4kX + local exit_status=0 + local timeout=4 ++ seq 0 2 + for i in $(seq 0 2) + set +e + kubectl apply -f - + exit_status=0 + set -e + '[' 0 '!=' 0 -a -n 1 ']' + break + cat /tmp/tmp.MzKT6BHORv perconaservermongodbbackup.psmdb.percona.com/backup-minio-1 created + cat /tmp/tmp.q6OS5au4kX + rm /tmp/tmp.MzKT6BHORv /tmp/tmp.q6OS5au4kX + return 0 + wait_backup backup-minio-1 + local backup_name=backup-minio-1 + local target_state=ready + set +o xtrace waiting for backup-minio-1 to reach ready state.......OK + compare_latest_restorable_time some-name-rs0 backup-minio-1 + local cluster=some-name-rs0 + local backup_name=backup-minio-1 + local latest_restorable_time + local backup_time ++ get_latest_restorable_time some-name-rs0 ++ local cluster=some-name-rs0 ++ local first_timestamp ++ local second_timestamp ++ local retry=0 ++ [[ '' != '' ]] +++ kubectl_bin exec some-name-rs0-0 -c backup-agent -- pbm status -o json +++ jq '.backups.pitrChunks.pitrChunks | last | .range.end' ++++ mktemp +++ local LAST_OUT=/tmp/tmp.bX4ngwRR9E ++++ mktemp +++ local LAST_ERR=/tmp/tmp.MKal8Owv3Y +++ local exit_status=0 +++ local timeout=4 ++++ seq 0 2 +++ for i in $(seq 0 2) +++ set +e +++ kubectl exec some-name-rs0-0 -c backup-agent -- pbm status -o json +++ exit_status=0 +++ set -e +++ '[' 0 '!=' 0 -a -n 1 ']' +++ break +++ cat /tmp/tmp.bX4ngwRR9E +++ cat /tmp/tmp.MKal8Owv3Y +++ rm /tmp/tmp.bX4ngwRR9E /tmp/tmp.MKal8Owv3Y +++ return 0 ++ first_timestamp=1765373316 ++ sleep 5 ++ [[ 1765373316 != '' ]] ++ [[ 1765373316 != \n\u\l\l ]] +++ jq '.backups.pitrChunks.pitrChunks | last | .range.end' +++ kubectl_bin exec some-name-rs0-0 -c backup-agent -- pbm status -o json ++++ mktemp +++ local LAST_OUT=/tmp/tmp.5bhOsR6yLn ++++ mktemp +++ local LAST_ERR=/tmp/tmp.8kvv0NksWs +++ local exit_status=0 +++ local timeout=4 ++++ seq 0 2 +++ for i in $(seq 0 2) +++ set +e +++ kubectl exec some-name-rs0-0 -c backup-agent -- pbm status -o json +++ exit_status=0 +++ set -e +++ '[' 0 '!=' 0 -a -n 1 ']' +++ break +++ cat /tmp/tmp.5bhOsR6yLn +++ cat /tmp/tmp.8kvv0NksWs +++ rm /tmp/tmp.5bhOsR6yLn /tmp/tmp.8kvv0NksWs +++ return 0 ++ second_timestamp=1765373316 ++ let retry+=1 ++ [[ 1 -gt 30 ]] ++ [[ 1765373316 != '' ]] ++ [[ 1765373316 != \n\u\l\l ]] ++ [[ 1765373316 == 1765373316 ]] ++ /usr/sbin/date -u -d @1765373316 +%Y-%m-%dT%H:%M:%SZ + latest_restorable_time=2025-12-10T13:28:36Z ++ get_latest_restorable_time_from_backup_object backup-minio-1 ++ local backup_name=backup-minio-1 ++ local latestRestorableTime ++ local retry=0 ++ [[ '' != '' ]] ++ sleep 5 +++ kubectl_bin get psmdb-backup backup-minio-1 -o 'jsonpath={.status.latestRestorableTime}' ++++ mktemp +++ local LAST_OUT=/tmp/tmp.OXeNtvGW8K ++++ mktemp +++ local LAST_ERR=/tmp/tmp.qGzKIPkPOV +++ local exit_status=0 +++ local timeout=4 ++++ seq 0 2 +++ for i in $(seq 0 2) +++ set +e +++ kubectl get psmdb-backup backup-minio-1 -o 'jsonpath={.status.latestRestorableTime}' +++ exit_status=0 +++ set -e +++ '[' 0 '!=' 0 -a -n 1 ']' +++ break +++ cat /tmp/tmp.OXeNtvGW8K +++ cat /tmp/tmp.qGzKIPkPOV +++ rm /tmp/tmp.OXeNtvGW8K /tmp/tmp.qGzKIPkPOV +++ return 0 ++ latestRestorableTime=2025-12-10T13:28:36Z ++ let retry+=1 ++ [[ 1 -gt 30 ]] ++ [[ 2025-12-10T13:28:36Z != '' ]] ++ [[ 2025-12-10T13:28:36Z != \n\u\l\l ]] ++ echo 2025-12-10T13:28:36Z + backup_time=2025-12-10T13:28:36Z + [[ 2025-12-10T13:28:36Z != \2\0\2\5\-\1\2\-\1\0\T\1\3\:\2\8\:\3\6\Z ]] + check_recovery backup-minio-1 latest '' -3rd '' some-name + local backup_name=backup-minio-1 + local restore_type=latest + local restore_date= + local cmp_postfix=-3rd + local backupSource= + local cluster_name=some-name + local selective_collection= + local restore_name=restore-backup-minio-1 + local restore_file=restore.yml + local cluster + cluster=some-name-rs0 + desc 'write more data before restore by latest' + set +o xtrace ----------------------------------------------------------------------------------- write more data before restore by latest ----------------------------------------------------------------------------------- + sleep 60 + run_mongo 'use myApp\n db.test.insert({ x: 100501 })' myApp:myPass@some-name-rs0.pitr-13298 + local 'command=use myApp\n db.test.insert({ x: 100501 })' + local uri=myApp:myPass@some-name-rs0.pitr-13298 + local driver=mongodb+srv + local suffix=.svc.cluster.local ++ kubectl_bin get pods --selector=name=psmdb-client -o 'jsonpath={.items[].metadata.name}' +++ mktemp ++ local LAST_OUT=/tmp/tmp.b1uY69zWat +++ mktemp ++ local LAST_ERR=/tmp/tmp.uNVLkPJ0qB ++ local exit_status=0 ++ local timeout=4 +++ seq 0 2 ++ for i in $(seq 0 2) ++ set +e ++ kubectl get pods --selector=name=psmdb-client -o 'jsonpath={.items[].metadata.name}' ++ exit_status=0 ++ set -e ++ '[' 0 '!=' 0 -a -n 1 ']' ++ break ++ cat /tmp/tmp.b1uY69zWat ++ cat /tmp/tmp.uNVLkPJ0qB ++ rm /tmp/tmp.b1uY69zWat /tmp/tmp.uNVLkPJ0qB ++ return 0 + local client_container=psmdb-client-696897d69b-nrc64 + local mongo_flag= + [[ myApp:myPass@some-name-rs0.pitr-13298 == *cfg* ]] + replica_set=rs0 + kubectl_bin exec psmdb-client-696897d69b-nrc64 -- bash -c 'printf '\''use myApp\n db.test.insert({ x: 100501 })\n'\'' | mongo mongodb+srv://myApp:myPass@some-name-rs0.pitr-13298.svc.cluster.local/admin?ssl=false\&replicaSet=rs0 ' ++ mktemp + local LAST_OUT=/tmp/tmp.aRdM3kXUsB ++ mktemp + local LAST_ERR=/tmp/tmp.YlQoaATfPn + local exit_status=0 + local timeout=4 ++ seq 0 2 + for i in $(seq 0 2) + set +e + kubectl exec psmdb-client-696897d69b-nrc64 -- bash -c 'printf '\''use myApp\n db.test.insert({ x: 100501 })\n'\'' | mongo mongodb+srv://myApp:myPass@some-name-rs0.pitr-13298.svc.cluster.local/admin?ssl=false\&replicaSet=rs0 ' + exit_status=0 + set -e + '[' 0 '!=' 0 -a -n 1 ']' + break + cat /tmp/tmp.aRdM3kXUsB Percona Server for MongoDB shell version v4.4.29-28 connecting to: mongodb://some-name-rs0-0.some-name-rs0.pitr-13298.svc.cluster.local:27017,some-name-rs0-1.some-name-rs0.pitr-13298.svc.cluster.local:27017,some-name-rs0-2.some-name-rs0.pitr-13298.svc.cluster.local:27017/admin?compressors=disabled&gssapiServiceName=mongodb&replicaSet=rs0&ssl=false Implicit session: session { "id" : UUID("fb24d687-8340-4fb7-9c1b-2f6439f70882") } Percona Server for MongoDB server version: v8.0.16-5 WARNING: shell and server versions do not match switched to db myApp WriteResult({ "nInserted" : 1 }) bye + cat /tmp/tmp.YlQoaATfPn + rm /tmp/tmp.aRdM3kXUsB /tmp/tmp.YlQoaATfPn + return 0 + '[' -n '' ']' + desc 'waiting for chunks to be uploaded' + set +o xtrace ----------------------------------------------------------------------------------- waiting for chunks to be uploaded ----------------------------------------------------------------------------------- + sleep 120 + desc 'check restore by latest' + set +o xtrace ----------------------------------------------------------------------------------- check restore by latest ----------------------------------------------------------------------------------- + '[' -z '' ']' + cat /mnt/jenkins/workspace/cloud-psmdb-operator_PR-2125/e2e-tests/pitr/conf/restore.yml + /usr/sbin/sed -e 's/name:/name: restore-backup-minio-1/' + /usr/sbin/sed -e 's/backupName:/backupName: backup-minio-1/' + /usr/sbin/sed -e 's/clusterName:/clusterName: some-name/' + /usr/sbin/sed -e 's/pitrType:/type: latest/' + '[' -z '' ']' + /usr/sbin/sed -e /date:/d + /usr/sbin/sed -e /backupSource/,+2d + '[' -n '' ']' + yq + kubectl_bin apply -f - ++ mktemp + local LAST_OUT=/tmp/tmp.i5j7Tr7bnM ++ mktemp + local LAST_ERR=/tmp/tmp.X3bSzJ7EYQ + local exit_status=0 + local timeout=4 ++ seq 0 2 + for i in $(seq 0 2) + set +e + kubectl apply -f - + exit_status=0 + set -e + '[' 0 '!=' 0 -a -n 1 ']' + break + cat /tmp/tmp.i5j7Tr7bnM perconaservermongodbrestore.psmdb.percona.com/restore-backup-minio-1 created + cat /tmp/tmp.X3bSzJ7EYQ + rm /tmp/tmp.i5j7Tr7bnM /tmp/tmp.X3bSzJ7EYQ + return 0 + wait_restore backup-minio-1 some-name + local backup_name=backup-minio-1 + local cluster_name=some-name + local target_state=ready + local wait_cluster_consistency=1 + local wait_time=1780 + local ok_if_ready=0 + set +o xtrace Waiting for the psmdb-restore/restore-backup-minio-1 object to be created.OK Waiting psmdb-restore/restore-backup-minio-1 to reach state "ready" .OK after 0 minutes + [[ 1 -eq 1 ]] + wait_cluster_consistency some-name + local cluster_name=some-name + local wait_time=32 + retry=0 + sleep 7 + echo -n 'waiting for cluster readyness' waiting for cluster readyness++ kubectl_bin get psmdb some-name -o 'jsonpath={.status.state}' +++ mktemp ++ local LAST_OUT=/tmp/tmp.VZOO2cXtNP +++ mktemp ++ local LAST_ERR=/tmp/tmp.GtgyufNeKk ++ local exit_status=0 ++ local timeout=4 +++ seq 0 2 ++ for i in $(seq 0 2) ++ set +e ++ kubectl get psmdb some-name -o 'jsonpath={.status.state}' ++ exit_status=0 ++ set -e ++ '[' 0 '!=' 0 -a -n 1 ']' ++ break ++ cat /tmp/tmp.VZOO2cXtNP ++ cat /tmp/tmp.GtgyufNeKk ++ rm /tmp/tmp.VZOO2cXtNP /tmp/tmp.GtgyufNeKk ++ return 0 + [[ ready == \r\e\a\d\y ]] + echo .OK .OK + echo + set -o xtrace + '[' -n '' ']' + compare_mongo_cmd find myApp:myPass@some-name-rs0-0.some-name-rs0.pitr-13298 -3rd + local command=find + local uri=myApp:myPass@some-name-rs0-0.some-name-rs0.pitr-13298 + local postfix=-3rd + local suffix= + local database=myApp + local collection=test + local sort= + local tls=false + local 'full_command=db.test.find()' + [[ -n '' ]] + log 'running db.test.find() in myApp' + set +o xtrace [2025-12-10T13:33:05+0000] running db.test.find() in myApp + [[ false == \t\r\u\e ]] + mongo_command=run_mongo + run_mongo 'use myApp\n db.test.find()' myApp:myPass@some-name-rs0-0.some-name-rs0.pitr-13298 mongodb '' + local 'command=use myApp\n db.test.find()' + local uri=myApp:myPass@some-name-rs0-0.some-name-rs0.pitr-13298 + local driver=mongodb + local suffix=.svc.cluster.local + egrep -v 'I NETWORK|W NETWORK|F NETWORK|Error saving history file|Percona Server for MongoDB|connecting to:|Unable to reach primary for set|Implicit session:|versions do not match|Error saving history file:|Started a new thread for the timer service' + /usr/sbin/sed -re 's/ObjectId\("[0-9a-f]+"\)//; s/-[0-9]+.svc/-xxx.svc/' ++ kubectl_bin get pods --selector=name=psmdb-client -o 'jsonpath={.items[].metadata.name}' +++ mktemp ++ local LAST_OUT=/tmp/tmp.ecjcfrnsE8 egrep: warning: egrep is obsolescent; using grep -E +++ mktemp ++ local LAST_ERR=/tmp/tmp.Zu5psdGFAR ++ local exit_status=0 ++ local timeout=4 +++ seq 0 2 ++ for i in $(seq 0 2) ++ set +e ++ kubectl get pods --selector=name=psmdb-client -o 'jsonpath={.items[].metadata.name}' ++ exit_status=0 ++ set -e ++ '[' 0 '!=' 0 -a -n 1 ']' ++ break ++ cat /tmp/tmp.ecjcfrnsE8 ++ cat /tmp/tmp.Zu5psdGFAR ++ rm /tmp/tmp.ecjcfrnsE8 /tmp/tmp.Zu5psdGFAR ++ return 0 + local client_container=psmdb-client-696897d69b-nrc64 + local mongo_flag= + [[ myApp:myPass@some-name-rs0-0.some-name-rs0.pitr-13298 == *cfg* ]] + replica_set=rs0 + kubectl_bin exec psmdb-client-696897d69b-nrc64 -- bash -c 'printf '\''use myApp\n db.test.find()\n'\'' | mongo mongodb://myApp:myPass@some-name-rs0-0.some-name-rs0.pitr-13298.svc.cluster.local/admin?ssl=false\&replicaSet=rs0 ' ++ mktemp + local LAST_OUT=/tmp/tmp.Npeh8g6VJp ++ mktemp + local LAST_ERR=/tmp/tmp.NJncSOUSUm + local exit_status=0 + local timeout=4 ++ seq 0 2 + for i in $(seq 0 2) + set +e + kubectl exec psmdb-client-696897d69b-nrc64 -- bash -c 'printf '\''use myApp\n db.test.find()\n'\'' | mongo mongodb://myApp:myPass@some-name-rs0-0.some-name-rs0.pitr-13298.svc.cluster.local/admin?ssl=false\&replicaSet=rs0 ' + exit_status=0 + set -e + '[' 0 '!=' 0 -a -n 1 ']' + break + cat /tmp/tmp.Npeh8g6VJp + cat /tmp/tmp.NJncSOUSUm + rm /tmp/tmp.Npeh8g6VJp /tmp/tmp.NJncSOUSUm + return 0 + [[ 0 -eq 0 ]] + diff -u /mnt/jenkins/workspace/cloud-psmdb-operator_PR-2125/e2e-tests/pitr/compare/find-3rd.json /tmp/tmp.uUgrEfFz0e/find-3rd + compare_mongo_cmd find myApp:myPass@some-name-rs0-1.some-name-rs0.pitr-13298 -3rd + local command=find + local uri=myApp:myPass@some-name-rs0-1.some-name-rs0.pitr-13298 + local postfix=-3rd + local suffix= + local database=myApp + local collection=test + local sort= + local tls=false + local 'full_command=db.test.find()' + [[ -n '' ]] + log 'running db.test.find() in myApp' + set +o xtrace [2025-12-10T13:33:07+0000] running db.test.find() in myApp + [[ false == \t\r\u\e ]] + mongo_command=run_mongo + run_mongo 'use myApp\n db.test.find()' myApp:myPass@some-name-rs0-1.some-name-rs0.pitr-13298 mongodb '' + local 'command=use myApp\n db.test.find()' + local uri=myApp:myPass@some-name-rs0-1.some-name-rs0.pitr-13298 + local driver=mongodb + local suffix=.svc.cluster.local + egrep -v 'I NETWORK|W NETWORK|F NETWORK|Error saving history file|Percona Server for MongoDB|connecting to:|Unable to reach primary for set|Implicit session:|versions do not match|Error saving history file:|Started a new thread for the timer service' + /usr/sbin/sed -re 's/ObjectId\("[0-9a-f]+"\)//; s/-[0-9]+.svc/-xxx.svc/' ++ kubectl_bin get pods --selector=name=psmdb-client -o 'jsonpath={.items[].metadata.name}' +++ mktemp egrep: warning: egrep is obsolescent; using grep -E ++ local LAST_OUT=/tmp/tmp.ZSkGaukx8r +++ mktemp ++ local LAST_ERR=/tmp/tmp.HhsK4wkSsp ++ local exit_status=0 ++ local timeout=4 +++ seq 0 2 ++ for i in $(seq 0 2) ++ set +e ++ kubectl get pods --selector=name=psmdb-client -o 'jsonpath={.items[].metadata.name}' ++ exit_status=0 ++ set -e ++ '[' 0 '!=' 0 -a -n 1 ']' ++ break ++ cat /tmp/tmp.ZSkGaukx8r ++ cat /tmp/tmp.HhsK4wkSsp ++ rm /tmp/tmp.ZSkGaukx8r /tmp/tmp.HhsK4wkSsp ++ return 0 + local client_container=psmdb-client-696897d69b-nrc64 + local mongo_flag= + [[ myApp:myPass@some-name-rs0-1.some-name-rs0.pitr-13298 == *cfg* ]] + replica_set=rs0 + kubectl_bin exec psmdb-client-696897d69b-nrc64 -- bash -c 'printf '\''use myApp\n db.test.find()\n'\'' | mongo mongodb://myApp:myPass@some-name-rs0-1.some-name-rs0.pitr-13298.svc.cluster.local/admin?ssl=false\&replicaSet=rs0 ' ++ mktemp + local LAST_OUT=/tmp/tmp.FsuqA4E5bK ++ mktemp + local LAST_ERR=/tmp/tmp.POik89OwsZ + local exit_status=0 + local timeout=4 ++ seq 0 2 + for i in $(seq 0 2) + set +e + kubectl exec psmdb-client-696897d69b-nrc64 -- bash -c 'printf '\''use myApp\n db.test.find()\n'\'' | mongo mongodb://myApp:myPass@some-name-rs0-1.some-name-rs0.pitr-13298.svc.cluster.local/admin?ssl=false\&replicaSet=rs0 ' + exit_status=0 + set -e + '[' 0 '!=' 0 -a -n 1 ']' + break + cat /tmp/tmp.FsuqA4E5bK + cat /tmp/tmp.POik89OwsZ + rm /tmp/tmp.FsuqA4E5bK /tmp/tmp.POik89OwsZ + return 0 + [[ 0 -eq 0 ]] + diff -u /mnt/jenkins/workspace/cloud-psmdb-operator_PR-2125/e2e-tests/pitr/compare/find-3rd.json /tmp/tmp.uUgrEfFz0e/find-3rd + compare_mongo_cmd find myApp:myPass@some-name-rs0-2.some-name-rs0.pitr-13298 -3rd + local command=find + local uri=myApp:myPass@some-name-rs0-2.some-name-rs0.pitr-13298 + local postfix=-3rd + local suffix= + local database=myApp + local collection=test + local sort= + local tls=false + local 'full_command=db.test.find()' + [[ -n '' ]] + log 'running db.test.find() in myApp' + set +o xtrace [2025-12-10T13:33:09+0000] running db.test.find() in myApp + [[ false == \t\r\u\e ]] + mongo_command=run_mongo + run_mongo 'use myApp\n db.test.find()' myApp:myPass@some-name-rs0-2.some-name-rs0.pitr-13298 mongodb '' + egrep -v 'I NETWORK|W NETWORK|F NETWORK|Error saving history file|Percona Server for MongoDB|connecting to:|Unable to reach primary for set|Implicit session:|versions do not match|Error saving history file:|Started a new thread for the timer service' + local 'command=use myApp\n db.test.find()' + local uri=myApp:myPass@some-name-rs0-2.some-name-rs0.pitr-13298 + local driver=mongodb + local suffix=.svc.cluster.local + /usr/sbin/sed -re 's/ObjectId\("[0-9a-f]+"\)//; s/-[0-9]+.svc/-xxx.svc/' ++ kubectl_bin get pods --selector=name=psmdb-client -o 'jsonpath={.items[].metadata.name}' egrep: warning: egrep is obsolescent; using grep -E +++ mktemp ++ local LAST_OUT=/tmp/tmp.ocDROshmId +++ mktemp ++ local LAST_ERR=/tmp/tmp.Rmvn3IV4JO ++ local exit_status=0 ++ local timeout=4 +++ seq 0 2 ++ for i in $(seq 0 2) ++ set +e ++ kubectl get pods --selector=name=psmdb-client -o 'jsonpath={.items[].metadata.name}' ++ exit_status=0 ++ set -e ++ '[' 0 '!=' 0 -a -n 1 ']' ++ break ++ cat /tmp/tmp.ocDROshmId ++ cat /tmp/tmp.Rmvn3IV4JO ++ rm /tmp/tmp.ocDROshmId /tmp/tmp.Rmvn3IV4JO ++ return 0 + local client_container=psmdb-client-696897d69b-nrc64 + local mongo_flag= + [[ myApp:myPass@some-name-rs0-2.some-name-rs0.pitr-13298 == *cfg* ]] + replica_set=rs0 + kubectl_bin exec psmdb-client-696897d69b-nrc64 -- bash -c 'printf '\''use myApp\n db.test.find()\n'\'' | mongo mongodb://myApp:myPass@some-name-rs0-2.some-name-rs0.pitr-13298.svc.cluster.local/admin?ssl=false\&replicaSet=rs0 ' ++ mktemp + local LAST_OUT=/tmp/tmp.iTRdfu49Y8 ++ mktemp + local LAST_ERR=/tmp/tmp.ReD5mVdT0Z + local exit_status=0 + local timeout=4 ++ seq 0 2 + for i in $(seq 0 2) + set +e + kubectl exec psmdb-client-696897d69b-nrc64 -- bash -c 'printf '\''use myApp\n db.test.find()\n'\'' | mongo mongodb://myApp:myPass@some-name-rs0-2.some-name-rs0.pitr-13298.svc.cluster.local/admin?ssl=false\&replicaSet=rs0 ' + exit_status=0 + set -e + '[' 0 '!=' 0 -a -n 1 ']' + break + cat /tmp/tmp.iTRdfu49Y8 + cat /tmp/tmp.ReD5mVdT0Z + rm /tmp/tmp.iTRdfu49Y8 /tmp/tmp.ReD5mVdT0Z + return 0 + [[ 0 -eq 0 ]] + diff -u /mnt/jenkins/workspace/cloud-psmdb-operator_PR-2125/e2e-tests/pitr/compare/find-3rd.json /tmp/tmp.uUgrEfFz0e/find-3rd + desc 'restore pitr using backupSource' + set +o xtrace ----------------------------------------------------------------------------------- restore pitr using backupSource ----------------------------------------------------------------------------------- + reset_collection + desc 'reset data' + set +o xtrace ----------------------------------------------------------------------------------- reset data ----------------------------------------------------------------------------------- + run_mongo 'use myApp\n db.test.remove({})' myApp:myPass@some-name-rs0.pitr-13298 + local 'command=use myApp\n db.test.remove({})' + local uri=myApp:myPass@some-name-rs0.pitr-13298 + local driver=mongodb+srv + local suffix=.svc.cluster.local ++ kubectl_bin get pods --selector=name=psmdb-client -o 'jsonpath={.items[].metadata.name}' +++ mktemp ++ local LAST_OUT=/tmp/tmp.d6MmDubX1s +++ mktemp ++ local LAST_ERR=/tmp/tmp.uH0aXhsGUL ++ local exit_status=0 ++ local timeout=4 +++ seq 0 2 ++ for i in $(seq 0 2) ++ set +e ++ kubectl get pods --selector=name=psmdb-client -o 'jsonpath={.items[].metadata.name}' ++ exit_status=0 ++ set -e ++ '[' 0 '!=' 0 -a -n 1 ']' ++ break ++ cat /tmp/tmp.d6MmDubX1s ++ cat /tmp/tmp.uH0aXhsGUL ++ rm /tmp/tmp.d6MmDubX1s /tmp/tmp.uH0aXhsGUL ++ return 0 + local client_container=psmdb-client-696897d69b-nrc64 + local mongo_flag= + [[ myApp:myPass@some-name-rs0.pitr-13298 == *cfg* ]] + replica_set=rs0 + kubectl_bin exec psmdb-client-696897d69b-nrc64 -- bash -c 'printf '\''use myApp\n db.test.remove({})\n'\'' | mongo mongodb+srv://myApp:myPass@some-name-rs0.pitr-13298.svc.cluster.local/admin?ssl=false\&replicaSet=rs0 ' ++ mktemp + local LAST_OUT=/tmp/tmp.h1LL73bA5x ++ mktemp + local LAST_ERR=/tmp/tmp.c8Abv5B1lx + local exit_status=0 + local timeout=4 ++ seq 0 2 + for i in $(seq 0 2) + set +e + kubectl exec psmdb-client-696897d69b-nrc64 -- bash -c 'printf '\''use myApp\n db.test.remove({})\n'\'' | mongo mongodb+srv://myApp:myPass@some-name-rs0.pitr-13298.svc.cluster.local/admin?ssl=false\&replicaSet=rs0 ' + exit_status=0 + set -e + '[' 0 '!=' 0 -a -n 1 ']' + break + cat /tmp/tmp.h1LL73bA5x Percona Server for MongoDB shell version v4.4.29-28 connecting to: mongodb://some-name-rs0-0.some-name-rs0.pitr-13298.svc.cluster.local:27017,some-name-rs0-1.some-name-rs0.pitr-13298.svc.cluster.local:27017,some-name-rs0-2.some-name-rs0.pitr-13298.svc.cluster.local:27017/admin?compressors=disabled&gssapiServiceName=mongodb&replicaSet=rs0&ssl=false Implicit session: session { "id" : UUID("a666b28f-0f12-49ca-98c8-feeadd6a1a6e") } Percona Server for MongoDB server version: v8.0.16-5 WARNING: shell and server versions do not match switched to db myApp WriteResult({ "nRemoved" : 3 }) bye + cat /tmp/tmp.c8Abv5B1lx + rm /tmp/tmp.h1LL73bA5x /tmp/tmp.c8Abv5B1lx + return 0 + sleep 2 + write_document + local cmp_postfix= + local cluster_name=some-name-rs0 + desc 'write initial data, read from all' + set +o xtrace ----------------------------------------------------------------------------------- write initial data, read from all ----------------------------------------------------------------------------------- + run_mongo 'use myApp\n db.test.insert({ x: 100500 })' myApp:myPass@some-name-rs0.pitr-13298 + local 'command=use myApp\n db.test.insert({ x: 100500 })' + local uri=myApp:myPass@some-name-rs0.pitr-13298 + local driver=mongodb+srv + local suffix=.svc.cluster.local ++ kubectl_bin get pods --selector=name=psmdb-client -o 'jsonpath={.items[].metadata.name}' +++ mktemp ++ local LAST_OUT=/tmp/tmp.4s6AlxUoU2 +++ mktemp ++ local LAST_ERR=/tmp/tmp.msaORiOhDt ++ local exit_status=0 ++ local timeout=4 +++ seq 0 2 ++ for i in $(seq 0 2) ++ set +e ++ kubectl get pods --selector=name=psmdb-client -o 'jsonpath={.items[].metadata.name}' ++ exit_status=0 ++ set -e ++ '[' 0 '!=' 0 -a -n 1 ']' ++ break ++ cat /tmp/tmp.4s6AlxUoU2 ++ cat /tmp/tmp.msaORiOhDt ++ rm /tmp/tmp.4s6AlxUoU2 /tmp/tmp.msaORiOhDt ++ return 0 + local client_container=psmdb-client-696897d69b-nrc64 + local mongo_flag= + [[ myApp:myPass@some-name-rs0.pitr-13298 == *cfg* ]] + replica_set=rs0 + kubectl_bin exec psmdb-client-696897d69b-nrc64 -- bash -c 'printf '\''use myApp\n db.test.insert({ x: 100500 })\n'\'' | mongo mongodb+srv://myApp:myPass@some-name-rs0.pitr-13298.svc.cluster.local/admin?ssl=false\&replicaSet=rs0 ' ++ mktemp + local LAST_OUT=/tmp/tmp.yY46MBUNyo ++ mktemp + local LAST_ERR=/tmp/tmp.QgBsaJUsQE + local exit_status=0 + local timeout=4 ++ seq 0 2 + for i in $(seq 0 2) + set +e + kubectl exec psmdb-client-696897d69b-nrc64 -- bash -c 'printf '\''use myApp\n db.test.insert({ x: 100500 })\n'\'' | mongo mongodb+srv://myApp:myPass@some-name-rs0.pitr-13298.svc.cluster.local/admin?ssl=false\&replicaSet=rs0 ' + exit_status=0 + set -e + '[' 0 '!=' 0 -a -n 1 ']' + break + cat /tmp/tmp.yY46MBUNyo Percona Server for MongoDB shell version v4.4.29-28 connecting to: mongodb://some-name-rs0-0.some-name-rs0.pitr-13298.svc.cluster.local:27017,some-name-rs0-1.some-name-rs0.pitr-13298.svc.cluster.local:27017,some-name-rs0-2.some-name-rs0.pitr-13298.svc.cluster.local:27017/admin?compressors=disabled&gssapiServiceName=mongodb&replicaSet=rs0&ssl=false Implicit session: session { "id" : UUID("759e615e-42c1-4fb6-9951-bbe01a51c087") } Percona Server for MongoDB server version: v8.0.16-5 WARNING: shell and server versions do not match switched to db myApp WriteResult({ "nInserted" : 1 }) bye + cat /tmp/tmp.QgBsaJUsQE + rm /tmp/tmp.yY46MBUNyo /tmp/tmp.QgBsaJUsQE + return 0 + minikube_sleep + sleep_time=10 + [[ '' == 1 ]] + compare_mongo_cmd find myApp:myPass@some-name-rs0-0.some-name-rs0.pitr-13298 '' + local command=find + local uri=myApp:myPass@some-name-rs0-0.some-name-rs0.pitr-13298 + local postfix= + local suffix= + local database=myApp + local collection=test + local sort= + local tls=false + local 'full_command=db.test.find()' + [[ -n '' ]] + log 'running db.test.find() in myApp' + set +o xtrace [2025-12-10T13:33:19+0000] running db.test.find() in myApp + [[ false == \t\r\u\e ]] + mongo_command=run_mongo + run_mongo 'use myApp\n db.test.find()' myApp:myPass@some-name-rs0-0.some-name-rs0.pitr-13298 mongodb '' + egrep -v 'I NETWORK|W NETWORK|F NETWORK|Error saving history file|Percona Server for MongoDB|connecting to:|Unable to reach primary for set|Implicit session:|versions do not match|Error saving history file:|Started a new thread for the timer service' + local 'command=use myApp\n db.test.find()' + local uri=myApp:myPass@some-name-rs0-0.some-name-rs0.pitr-13298 + local driver=mongodb + local suffix=.svc.cluster.local + /usr/sbin/sed -re 's/ObjectId\("[0-9a-f]+"\)//; s/-[0-9]+.svc/-xxx.svc/' ++ kubectl_bin get pods --selector=name=psmdb-client -o 'jsonpath={.items[].metadata.name}' +++ mktemp egrep: warning: egrep is obsolescent; using grep -E ++ local LAST_OUT=/tmp/tmp.yG4piM2hXC +++ mktemp ++ local LAST_ERR=/tmp/tmp.44B0sgGQWz ++ local exit_status=0 ++ local timeout=4 +++ seq 0 2 ++ for i in $(seq 0 2) ++ set +e ++ kubectl get pods --selector=name=psmdb-client -o 'jsonpath={.items[].metadata.name}' ++ exit_status=0 ++ set -e ++ '[' 0 '!=' 0 -a -n 1 ']' ++ break ++ cat /tmp/tmp.yG4piM2hXC ++ cat /tmp/tmp.44B0sgGQWz ++ rm /tmp/tmp.yG4piM2hXC /tmp/tmp.44B0sgGQWz ++ return 0 + local client_container=psmdb-client-696897d69b-nrc64 + local mongo_flag= + [[ myApp:myPass@some-name-rs0-0.some-name-rs0.pitr-13298 == *cfg* ]] + replica_set=rs0 + kubectl_bin exec psmdb-client-696897d69b-nrc64 -- bash -c 'printf '\''use myApp\n db.test.find()\n'\'' | mongo mongodb://myApp:myPass@some-name-rs0-0.some-name-rs0.pitr-13298.svc.cluster.local/admin?ssl=false\&replicaSet=rs0 ' ++ mktemp + local LAST_OUT=/tmp/tmp.RaLfDTSSdr ++ mktemp + local LAST_ERR=/tmp/tmp.vscBeqldJ5 + local exit_status=0 + local timeout=4 ++ seq 0 2 + for i in $(seq 0 2) + set +e + kubectl exec psmdb-client-696897d69b-nrc64 -- bash -c 'printf '\''use myApp\n db.test.find()\n'\'' | mongo mongodb://myApp:myPass@some-name-rs0-0.some-name-rs0.pitr-13298.svc.cluster.local/admin?ssl=false\&replicaSet=rs0 ' + exit_status=0 + set -e + '[' 0 '!=' 0 -a -n 1 ']' + break + cat /tmp/tmp.RaLfDTSSdr + cat /tmp/tmp.vscBeqldJ5 + rm /tmp/tmp.RaLfDTSSdr /tmp/tmp.vscBeqldJ5 + return 0 + [[ 0 -eq 0 ]] + diff -u /mnt/jenkins/workspace/cloud-psmdb-operator_PR-2125/e2e-tests/pitr/compare/find.json /tmp/tmp.uUgrEfFz0e/find + compare_mongo_cmd find myApp:myPass@some-name-rs0-1.some-name-rs0.pitr-13298 '' + local command=find + local uri=myApp:myPass@some-name-rs0-1.some-name-rs0.pitr-13298 + local postfix= + local suffix= + local database=myApp + local collection=test + local sort= + local tls=false + local 'full_command=db.test.find()' + [[ -n '' ]] + log 'running db.test.find() in myApp' + set +o xtrace [2025-12-10T13:33:21+0000] running db.test.find() in myApp + [[ false == \t\r\u\e ]] + mongo_command=run_mongo + run_mongo 'use myApp\n db.test.find()' myApp:myPass@some-name-rs0-1.some-name-rs0.pitr-13298 mongodb '' + local 'command=use myApp\n db.test.find()' + local uri=myApp:myPass@some-name-rs0-1.some-name-rs0.pitr-13298 + local driver=mongodb + local suffix=.svc.cluster.local + egrep -v 'I NETWORK|W NETWORK|F NETWORK|Error saving history file|Percona Server for MongoDB|connecting to:|Unable to reach primary for set|Implicit session:|versions do not match|Error saving history file:|Started a new thread for the timer service' ++ kubectl_bin get pods --selector=name=psmdb-client -o 'jsonpath={.items[].metadata.name}' + /usr/sbin/sed -re 's/ObjectId\("[0-9a-f]+"\)//; s/-[0-9]+.svc/-xxx.svc/' +++ mktemp ++ local LAST_OUT=/tmp/tmp.3ZP2zmKstV egrep: warning: egrep is obsolescent; using grep -E +++ mktemp ++ local LAST_ERR=/tmp/tmp.CKVkshyls7 ++ local exit_status=0 ++ local timeout=4 +++ seq 0 2 ++ for i in $(seq 0 2) ++ set +e ++ kubectl get pods --selector=name=psmdb-client -o 'jsonpath={.items[].metadata.name}' ++ exit_status=0 ++ set -e ++ '[' 0 '!=' 0 -a -n 1 ']' ++ break ++ cat /tmp/tmp.3ZP2zmKstV ++ cat /tmp/tmp.CKVkshyls7 ++ rm /tmp/tmp.3ZP2zmKstV /tmp/tmp.CKVkshyls7 ++ return 0 + local client_container=psmdb-client-696897d69b-nrc64 + local mongo_flag= + [[ myApp:myPass@some-name-rs0-1.some-name-rs0.pitr-13298 == *cfg* ]] + replica_set=rs0 + kubectl_bin exec psmdb-client-696897d69b-nrc64 -- bash -c 'printf '\''use myApp\n db.test.find()\n'\'' | mongo mongodb://myApp:myPass@some-name-rs0-1.some-name-rs0.pitr-13298.svc.cluster.local/admin?ssl=false\&replicaSet=rs0 ' ++ mktemp + local LAST_OUT=/tmp/tmp.pB7QGukKs8 ++ mktemp + local LAST_ERR=/tmp/tmp.STCLxXBZmg + local exit_status=0 + local timeout=4 ++ seq 0 2 + for i in $(seq 0 2) + set +e + kubectl exec psmdb-client-696897d69b-nrc64 -- bash -c 'printf '\''use myApp\n db.test.find()\n'\'' | mongo mongodb://myApp:myPass@some-name-rs0-1.some-name-rs0.pitr-13298.svc.cluster.local/admin?ssl=false\&replicaSet=rs0 ' + exit_status=0 + set -e + '[' 0 '!=' 0 -a -n 1 ']' + break + cat /tmp/tmp.pB7QGukKs8 + cat /tmp/tmp.STCLxXBZmg + rm /tmp/tmp.pB7QGukKs8 /tmp/tmp.STCLxXBZmg + return 0 + [[ 0 -eq 0 ]] + diff -u /mnt/jenkins/workspace/cloud-psmdb-operator_PR-2125/e2e-tests/pitr/compare/find.json /tmp/tmp.uUgrEfFz0e/find + compare_mongo_cmd find myApp:myPass@some-name-rs0-2.some-name-rs0.pitr-13298 '' + local command=find + local uri=myApp:myPass@some-name-rs0-2.some-name-rs0.pitr-13298 + local postfix= + local suffix= + local database=myApp + local collection=test + local sort= + local tls=false + local 'full_command=db.test.find()' + [[ -n '' ]] + log 'running db.test.find() in myApp' + set +o xtrace [2025-12-10T13:33:24+0000] running db.test.find() in myApp + [[ false == \t\r\u\e ]] + mongo_command=run_mongo + run_mongo 'use myApp\n db.test.find()' myApp:myPass@some-name-rs0-2.some-name-rs0.pitr-13298 mongodb '' + local 'command=use myApp\n db.test.find()' + local uri=myApp:myPass@some-name-rs0-2.some-name-rs0.pitr-13298 + local driver=mongodb + local suffix=.svc.cluster.local + egrep -v 'I NETWORK|W NETWORK|F NETWORK|Error saving history file|Percona Server for MongoDB|connecting to:|Unable to reach primary for set|Implicit session:|versions do not match|Error saving history file:|Started a new thread for the timer service' + /usr/sbin/sed -re 's/ObjectId\("[0-9a-f]+"\)//; s/-[0-9]+.svc/-xxx.svc/' ++ kubectl_bin get pods --selector=name=psmdb-client -o 'jsonpath={.items[].metadata.name}' +++ mktemp egrep: warning: egrep is obsolescent; using grep -E ++ local LAST_OUT=/tmp/tmp.AwfDYwo0cn +++ mktemp ++ local LAST_ERR=/tmp/tmp.mEZeC2I2BN ++ local exit_status=0 ++ local timeout=4 +++ seq 0 2 ++ for i in $(seq 0 2) ++ set +e ++ kubectl get pods --selector=name=psmdb-client -o 'jsonpath={.items[].metadata.name}' ++ exit_status=0 ++ set -e ++ '[' 0 '!=' 0 -a -n 1 ']' ++ break ++ cat /tmp/tmp.AwfDYwo0cn ++ cat /tmp/tmp.mEZeC2I2BN ++ rm /tmp/tmp.AwfDYwo0cn /tmp/tmp.mEZeC2I2BN ++ return 0 + local client_container=psmdb-client-696897d69b-nrc64 + local mongo_flag= + [[ myApp:myPass@some-name-rs0-2.some-name-rs0.pitr-13298 == *cfg* ]] + replica_set=rs0 + kubectl_bin exec psmdb-client-696897d69b-nrc64 -- bash -c 'printf '\''use myApp\n db.test.find()\n'\'' | mongo mongodb://myApp:myPass@some-name-rs0-2.some-name-rs0.pitr-13298.svc.cluster.local/admin?ssl=false\&replicaSet=rs0 ' ++ mktemp + local LAST_OUT=/tmp/tmp.LxACrrwtj9 ++ mktemp + local LAST_ERR=/tmp/tmp.SIeQ2w9rNB + local exit_status=0 + local timeout=4 ++ seq 0 2 + for i in $(seq 0 2) + set +e + kubectl exec psmdb-client-696897d69b-nrc64 -- bash -c 'printf '\''use myApp\n db.test.find()\n'\'' | mongo mongodb://myApp:myPass@some-name-rs0-2.some-name-rs0.pitr-13298.svc.cluster.local/admin?ssl=false\&replicaSet=rs0 ' + exit_status=0 + set -e + '[' 0 '!=' 0 -a -n 1 ']' + break + cat /tmp/tmp.LxACrrwtj9 + cat /tmp/tmp.SIeQ2w9rNB + rm /tmp/tmp.LxACrrwtj9 /tmp/tmp.SIeQ2w9rNB + return 0 + [[ 0 -eq 0 ]] + diff -u /mnt/jenkins/workspace/cloud-psmdb-operator_PR-2125/e2e-tests/pitr/compare/find.json /tmp/tmp.uUgrEfFz0e/find + run_backup backup-minio 2 + local name=backup-minio + local idx=2 + desc 'run backup backup-minio-2' + set +o xtrace ----------------------------------------------------------------------------------- run backup backup-minio-2 ----------------------------------------------------------------------------------- + cat /mnt/jenkins/workspace/cloud-psmdb-operator_PR-2125/e2e-tests/pitr/conf/backup-minio.yml + /usr/sbin/sed -e 's/name:/name: backup-minio-2/' + kubectl_bin apply -f - ++ mktemp + local LAST_OUT=/tmp/tmp.Ikorg4CdYw ++ mktemp + local LAST_ERR=/tmp/tmp.FxytjDaK5e + local exit_status=0 + local timeout=4 ++ seq 0 2 + for i in $(seq 0 2) + set +e + kubectl apply -f - + exit_status=0 + set -e + '[' 0 '!=' 0 -a -n 1 ']' + break + cat /tmp/tmp.Ikorg4CdYw perconaservermongodbbackup.psmdb.percona.com/backup-minio-2 created + cat /tmp/tmp.FxytjDaK5e + rm /tmp/tmp.Ikorg4CdYw /tmp/tmp.FxytjDaK5e + return 0 + wait_backup backup-minio-2 + local backup_name=backup-minio-2 + local target_state=ready + set +o xtrace waiting for backup-minio-2 to reach ready state.......OK + compare_latest_restorable_time some-name-rs0 backup-minio-2 + local cluster=some-name-rs0 + local backup_name=backup-minio-2 + local latest_restorable_time + local backup_time ++ get_latest_restorable_time some-name-rs0 ++ local cluster=some-name-rs0 ++ local first_timestamp ++ local second_timestamp ++ local retry=0 ++ [[ '' != '' ]] +++ kubectl_bin exec some-name-rs0-0 -c backup-agent -- pbm status -o json +++ jq '.backups.pitrChunks.pitrChunks | last | .range.end' ++++ mktemp +++ local LAST_OUT=/tmp/tmp.MrYDkai4N2 ++++ mktemp +++ local LAST_ERR=/tmp/tmp.sFUTl2rB8N +++ local exit_status=0 +++ local timeout=4 ++++ seq 0 2 +++ for i in $(seq 0 2) +++ set +e +++ kubectl exec some-name-rs0-0 -c backup-agent -- pbm status -o json +++ exit_status=0 +++ set -e +++ '[' 0 '!=' 0 -a -n 1 ']' +++ break +++ cat /tmp/tmp.MrYDkai4N2 +++ cat /tmp/tmp.sFUTl2rB8N +++ rm /tmp/tmp.MrYDkai4N2 /tmp/tmp.sFUTl2rB8N +++ return 0 ++ first_timestamp=1765373562 ++ sleep 5 ++ [[ 1765373562 != '' ]] ++ [[ 1765373562 != \n\u\l\l ]] +++ kubectl_bin exec some-name-rs0-0 -c backup-agent -- pbm status -o json +++ jq '.backups.pitrChunks.pitrChunks | last | .range.end' ++++ mktemp +++ local LAST_OUT=/tmp/tmp.dDv60A5Vtp ++++ mktemp +++ local LAST_ERR=/tmp/tmp.zy5nXfkI9d +++ local exit_status=0 +++ local timeout=4 ++++ seq 0 2 +++ for i in $(seq 0 2) +++ set +e +++ kubectl exec some-name-rs0-0 -c backup-agent -- pbm status -o json +++ exit_status=0 +++ set -e +++ '[' 0 '!=' 0 -a -n 1 ']' +++ break +++ cat /tmp/tmp.dDv60A5Vtp +++ cat /tmp/tmp.zy5nXfkI9d +++ rm /tmp/tmp.dDv60A5Vtp /tmp/tmp.zy5nXfkI9d +++ return 0 ++ second_timestamp=1765373562 ++ let retry+=1 ++ [[ 1 -gt 30 ]] ++ [[ 1765373562 != '' ]] ++ [[ 1765373562 != \n\u\l\l ]] ++ [[ 1765373562 == 1765373562 ]] ++ /usr/sbin/date -u -d @1765373562 +%Y-%m-%dT%H:%M:%SZ + latest_restorable_time=2025-12-10T13:32:42Z ++ get_latest_restorable_time_from_backup_object backup-minio-2 ++ local backup_name=backup-minio-2 ++ local latestRestorableTime ++ local retry=0 ++ [[ '' != '' ]] ++ sleep 5 +++ kubectl_bin get psmdb-backup backup-minio-2 -o 'jsonpath={.status.latestRestorableTime}' ++++ mktemp +++ local LAST_OUT=/tmp/tmp.Faj5uJit1J ++++ mktemp +++ local LAST_ERR=/tmp/tmp.EVx9eMvCct +++ local exit_status=0 +++ local timeout=4 ++++ seq 0 2 +++ for i in $(seq 0 2) +++ set +e +++ kubectl get psmdb-backup backup-minio-2 -o 'jsonpath={.status.latestRestorableTime}' +++ exit_status=0 +++ set -e +++ '[' 0 '!=' 0 -a -n 1 ']' +++ break +++ cat /tmp/tmp.Faj5uJit1J +++ cat /tmp/tmp.EVx9eMvCct +++ rm /tmp/tmp.Faj5uJit1J /tmp/tmp.EVx9eMvCct +++ return 0 ++ latestRestorableTime=2025-12-10T13:32:42Z ++ let retry+=1 ++ [[ 1 -gt 30 ]] ++ [[ 2025-12-10T13:32:42Z != '' ]] ++ [[ 2025-12-10T13:32:42Z != \n\u\l\l ]] ++ echo 2025-12-10T13:32:42Z + backup_time=2025-12-10T13:32:42Z + [[ 2025-12-10T13:32:42Z != \2\0\2\5\-\1\2\-\1\0\T\1\3\:\3\2\:\4\2\Z ]] ++ run_mongo 'new Date().toISOString()' myApp:myPass@some-name-rs0.pitr-13298 mongodb '' --quiet ++ grep -E -v 'I NETWORK|W NETWORK|Error saving history file|Percona Server for MongoDB|connecting to:|Unable to reach primary for set|Implicit session:|versions do not match' ++ local 'command=new Date().toISOString()' ++ cut -c1-19 ++ local uri=myApp:myPass@some-name-rs0.pitr-13298 ++ local driver=mongodb ++ tr T ' ' ++ local suffix=.svc.cluster.local +++ kubectl_bin get pods --selector=name=psmdb-client -o 'jsonpath={.items[].metadata.name}' ++++ mktemp +++ local LAST_OUT=/tmp/tmp.sGvKoSWUXG ++++ mktemp +++ local LAST_ERR=/tmp/tmp.iUSTgIal5n +++ local exit_status=0 +++ local timeout=4 ++++ seq 0 2 +++ for i in $(seq 0 2) +++ set +e +++ kubectl get pods --selector=name=psmdb-client -o 'jsonpath={.items[].metadata.name}' +++ exit_status=0 +++ set -e +++ '[' 0 '!=' 0 -a -n 1 ']' +++ break +++ cat /tmp/tmp.sGvKoSWUXG +++ cat /tmp/tmp.iUSTgIal5n +++ rm /tmp/tmp.sGvKoSWUXG /tmp/tmp.iUSTgIal5n +++ return 0 ++ local client_container=psmdb-client-696897d69b-nrc64 ++ local mongo_flag=--quiet ++ [[ myApp:myPass@some-name-rs0.pitr-13298 == *cfg* ]] ++ replica_set=rs0 ++ kubectl_bin exec psmdb-client-696897d69b-nrc64 -- bash -c 'printf '\''new Date().toISOString()\n'\'' | mongo mongodb://myApp:myPass@some-name-rs0.pitr-13298.svc.cluster.local/admin?ssl=false\&replicaSet=rs0 --quiet' +++ mktemp ++ local LAST_OUT=/tmp/tmp.PNJwjmdGgD +++ mktemp ++ local LAST_ERR=/tmp/tmp.G2TnE1sP6r ++ local exit_status=0 ++ local timeout=4 +++ seq 0 2 ++ for i in $(seq 0 2) ++ set +e ++ kubectl exec psmdb-client-696897d69b-nrc64 -- bash -c 'printf '\''new Date().toISOString()\n'\'' | mongo mongodb://myApp:myPass@some-name-rs0.pitr-13298.svc.cluster.local/admin?ssl=false\&replicaSet=rs0 --quiet' ++ exit_status=0 ++ set -e ++ '[' 0 '!=' 0 -a -n 1 ']' ++ break ++ cat /tmp/tmp.PNJwjmdGgD ++ cat /tmp/tmp.G2TnE1sP6r ++ rm /tmp/tmp.PNJwjmdGgD /tmp/tmp.G2TnE1sP6r ++ return 0 + time_now='2025-12-10 13:33:57' + check_recovery backup-minio-2 date '2025-12-10 13:33:57' '' backupSource some-name + local backup_name=backup-minio-2 + local restore_type=date + local 'restore_date=2025-12-10 13:33:57' + local cmp_postfix= + local backupSource=backupSource + local cluster_name=some-name + local selective_collection= + local restore_name=restore-backup-minio-2 + local restore_file=restore.yml + local cluster + cluster=some-name-rs0 + desc 'write more data before restore by date' + set +o xtrace ----------------------------------------------------------------------------------- write more data before restore by date ----------------------------------------------------------------------------------- + sleep 60 + run_mongo 'use myApp\n db.test.insert({ x: 100501 })' myApp:myPass@some-name-rs0.pitr-13298 + local 'command=use myApp\n db.test.insert({ x: 100501 })' + local uri=myApp:myPass@some-name-rs0.pitr-13298 + local driver=mongodb+srv + local suffix=.svc.cluster.local ++ kubectl_bin get pods --selector=name=psmdb-client -o 'jsonpath={.items[].metadata.name}' +++ mktemp ++ local LAST_OUT=/tmp/tmp.l8mT736dHY +++ mktemp ++ local LAST_ERR=/tmp/tmp.7voqmqQDTz ++ local exit_status=0 ++ local timeout=4 +++ seq 0 2 ++ for i in $(seq 0 2) ++ set +e ++ kubectl get pods --selector=name=psmdb-client -o 'jsonpath={.items[].metadata.name}' ++ exit_status=0 ++ set -e ++ '[' 0 '!=' 0 -a -n 1 ']' ++ break ++ cat /tmp/tmp.l8mT736dHY ++ cat /tmp/tmp.7voqmqQDTz ++ rm /tmp/tmp.l8mT736dHY /tmp/tmp.7voqmqQDTz ++ return 0 + local client_container=psmdb-client-696897d69b-nrc64 + local mongo_flag= + [[ myApp:myPass@some-name-rs0.pitr-13298 == *cfg* ]] + replica_set=rs0 + kubectl_bin exec psmdb-client-696897d69b-nrc64 -- bash -c 'printf '\''use myApp\n db.test.insert({ x: 100501 })\n'\'' | mongo mongodb+srv://myApp:myPass@some-name-rs0.pitr-13298.svc.cluster.local/admin?ssl=false\&replicaSet=rs0 ' ++ mktemp + local LAST_OUT=/tmp/tmp.sozrqY01Oe ++ mktemp + local LAST_ERR=/tmp/tmp.NS0LdOJ1o3 + local exit_status=0 + local timeout=4 ++ seq 0 2 + for i in $(seq 0 2) + set +e + kubectl exec psmdb-client-696897d69b-nrc64 -- bash -c 'printf '\''use myApp\n db.test.insert({ x: 100501 })\n'\'' | mongo mongodb+srv://myApp:myPass@some-name-rs0.pitr-13298.svc.cluster.local/admin?ssl=false\&replicaSet=rs0 ' + exit_status=0 + set -e + '[' 0 '!=' 0 -a -n 1 ']' + break + cat /tmp/tmp.sozrqY01Oe Percona Server for MongoDB shell version v4.4.29-28 connecting to: mongodb://some-name-rs0-2.some-name-rs0.pitr-13298.svc.cluster.local:27017,some-name-rs0-0.some-name-rs0.pitr-13298.svc.cluster.local:27017,some-name-rs0-1.some-name-rs0.pitr-13298.svc.cluster.local:27017/admin?compressors=disabled&gssapiServiceName=mongodb&replicaSet=rs0&ssl=false Implicit session: session { "id" : UUID("d06f9e27-9411-4350-99d8-4795ca2e70cd") } Percona Server for MongoDB server version: v8.0.16-5 WARNING: shell and server versions do not match switched to db myApp WriteResult({ "nInserted" : 1 }) bye + cat /tmp/tmp.NS0LdOJ1o3 + rm /tmp/tmp.sozrqY01Oe /tmp/tmp.NS0LdOJ1o3 + return 0 + '[' -n '' ']' + desc 'waiting for chunks to be uploaded' + set +o xtrace ----------------------------------------------------------------------------------- waiting for chunks to be uploaded ----------------------------------------------------------------------------------- + sleep 120 + desc 'check restore by date' + set +o xtrace ----------------------------------------------------------------------------------- check restore by date ----------------------------------------------------------------------------------- + '[' -z backupSource ']' ++ get_backup_dest backup-minio-2 ++ local backup_name=backup-minio-2 ++ kubectl_bin get psmdb-backup backup-minio-2 -o 'jsonpath={.status.destination}' ++ sed -e 's/.json$//' +++ mktemp ++ sed 's|s3://||' ++ sed 's|azure://||' ++ sed 's|gs://||' ++ local LAST_OUT=/tmp/tmp.3fHCbJswFG +++ mktemp ++ local LAST_ERR=/tmp/tmp.BPWLYJTLyE ++ local exit_status=0 ++ local timeout=4 +++ seq 0 2 ++ for i in $(seq 0 2) ++ set +e ++ kubectl get psmdb-backup backup-minio-2 -o 'jsonpath={.status.destination}' ++ exit_status=0 ++ set -e ++ '[' 0 '!=' 0 -a -n 1 ']' ++ break ++ cat /tmp/tmp.3fHCbJswFG ++ cat /tmp/tmp.BPWLYJTLyE ++ rm /tmp/tmp.3fHCbJswFG /tmp/tmp.BPWLYJTLyE ++ return 0 + backup_dest=operator-testing/pitr-prefix-1/2025-12-10T13:33:29Z + cat /mnt/jenkins/workspace/cloud-psmdb-operator_PR-2125/e2e-tests/pitr/conf/restore.yml + /usr/sbin/sed -e 's/name:/name: restore-backup-minio-2/' + /usr/sbin/sed -e /backupName/d + /usr/sbin/sed -e 's/clusterName:/clusterName: some-name/' + /usr/sbin/sed -e 's/pitrType:/type: date/' + '[' -z '2025-12-10 13:33:57' ']' + /usr/sbin/sed -e 's/date:/date: 2025-12-10 13:33:57/' + kubectl_bin apply -f - + /usr/sbin/sed -e 's|BACKUP-NAME|operator-testing/pitr-prefix-1/2025-12-10T13:33:29Z|' + '[' -n '' ']' + yq ++ mktemp + local LAST_OUT=/tmp/tmp.kX24Ci3cu5 ++ mktemp + local LAST_ERR=/tmp/tmp.f6RgPXQN23 + local exit_status=0 + local timeout=4 ++ seq 0 2 + for i in $(seq 0 2) + set +e + kubectl apply -f - + exit_status=0 + set -e + '[' 0 '!=' 0 -a -n 1 ']' + break + cat /tmp/tmp.kX24Ci3cu5 perconaservermongodbrestore.psmdb.percona.com/restore-backup-minio-2 created + cat /tmp/tmp.f6RgPXQN23 + rm /tmp/tmp.kX24Ci3cu5 /tmp/tmp.f6RgPXQN23 + return 0 + wait_restore backup-minio-2 some-name + local backup_name=backup-minio-2 + local cluster_name=some-name + local target_state=ready + local wait_cluster_consistency=1 + local wait_time=1780 + local ok_if_ready=0 + set +o xtrace Waiting for the psmdb-restore/restore-backup-minio-2 object to be created.OK Waiting psmdb-restore/restore-backup-minio-2 to reach state "ready" .OK after 0 minutes + [[ 1 -eq 1 ]] + wait_cluster_consistency some-name + local cluster_name=some-name + local wait_time=32 + retry=0 + sleep 7 + echo -n 'waiting for cluster readyness' waiting for cluster readyness++ kubectl_bin get psmdb some-name -o 'jsonpath={.status.state}' +++ mktemp ++ local LAST_OUT=/tmp/tmp.He30nror26 +++ mktemp ++ local LAST_ERR=/tmp/tmp.oqEoFO9Bxi ++ local exit_status=0 ++ local timeout=4 +++ seq 0 2 ++ for i in $(seq 0 2) ++ set +e ++ kubectl get psmdb some-name -o 'jsonpath={.status.state}' ++ exit_status=0 ++ set -e ++ '[' 0 '!=' 0 -a -n 1 ']' ++ break ++ cat /tmp/tmp.He30nror26 ++ cat /tmp/tmp.oqEoFO9Bxi ++ rm /tmp/tmp.He30nror26 /tmp/tmp.oqEoFO9Bxi ++ return 0 + [[ ready == \r\e\a\d\y ]] + echo .OK .OK + echo + set -o xtrace + '[' -n '' ']' + compare_mongo_cmd find myApp:myPass@some-name-rs0-0.some-name-rs0.pitr-13298 '' + local command=find + local uri=myApp:myPass@some-name-rs0-0.some-name-rs0.pitr-13298 + local postfix= + local suffix= + local database=myApp + local collection=test + local sort= + local tls=false + local 'full_command=db.test.find()' + [[ -n '' ]] + log 'running db.test.find() in myApp' + set +o xtrace [2025-12-10T13:37:38+0000] running db.test.find() in myApp + [[ false == \t\r\u\e ]] + mongo_command=run_mongo + run_mongo 'use myApp\n db.test.find()' myApp:myPass@some-name-rs0-0.some-name-rs0.pitr-13298 mongodb '' + egrep -v 'I NETWORK|W NETWORK|F NETWORK|Error saving history file|Percona Server for MongoDB|connecting to:|Unable to reach primary for set|Implicit session:|versions do not match|Error saving history file:|Started a new thread for the timer service' + local 'command=use myApp\n db.test.find()' + local uri=myApp:myPass@some-name-rs0-0.some-name-rs0.pitr-13298 + local driver=mongodb + local suffix=.svc.cluster.local + /usr/sbin/sed -re 's/ObjectId\("[0-9a-f]+"\)//; s/-[0-9]+.svc/-xxx.svc/' ++ kubectl_bin get pods --selector=name=psmdb-client -o 'jsonpath={.items[].metadata.name}' egrep: warning: egrep is obsolescent; using grep -E +++ mktemp ++ local LAST_OUT=/tmp/tmp.HnMAUdB2RG +++ mktemp ++ local LAST_ERR=/tmp/tmp.BeBoMeobmc ++ local exit_status=0 ++ local timeout=4 +++ seq 0 2 ++ for i in $(seq 0 2) ++ set +e ++ kubectl get pods --selector=name=psmdb-client -o 'jsonpath={.items[].metadata.name}' ++ exit_status=0 ++ set -e ++ '[' 0 '!=' 0 -a -n 1 ']' ++ break ++ cat /tmp/tmp.HnMAUdB2RG ++ cat /tmp/tmp.BeBoMeobmc ++ rm /tmp/tmp.HnMAUdB2RG /tmp/tmp.BeBoMeobmc ++ return 0 + local client_container=psmdb-client-696897d69b-nrc64 + local mongo_flag= + [[ myApp:myPass@some-name-rs0-0.some-name-rs0.pitr-13298 == *cfg* ]] + replica_set=rs0 + kubectl_bin exec psmdb-client-696897d69b-nrc64 -- bash -c 'printf '\''use myApp\n db.test.find()\n'\'' | mongo mongodb://myApp:myPass@some-name-rs0-0.some-name-rs0.pitr-13298.svc.cluster.local/admin?ssl=false\&replicaSet=rs0 ' ++ mktemp + local LAST_OUT=/tmp/tmp.0umsWmf3vv ++ mktemp + local LAST_ERR=/tmp/tmp.VY5dH8VXBP + local exit_status=0 + local timeout=4 ++ seq 0 2 + for i in $(seq 0 2) + set +e + kubectl exec psmdb-client-696897d69b-nrc64 -- bash -c 'printf '\''use myApp\n db.test.find()\n'\'' | mongo mongodb://myApp:myPass@some-name-rs0-0.some-name-rs0.pitr-13298.svc.cluster.local/admin?ssl=false\&replicaSet=rs0 ' + exit_status=0 + set -e + '[' 0 '!=' 0 -a -n 1 ']' + break + cat /tmp/tmp.0umsWmf3vv + cat /tmp/tmp.VY5dH8VXBP + rm /tmp/tmp.0umsWmf3vv /tmp/tmp.VY5dH8VXBP + return 0 + [[ 0 -eq 0 ]] + diff -u /mnt/jenkins/workspace/cloud-psmdb-operator_PR-2125/e2e-tests/pitr/compare/find.json /tmp/tmp.uUgrEfFz0e/find + compare_mongo_cmd find myApp:myPass@some-name-rs0-1.some-name-rs0.pitr-13298 '' + local command=find + local uri=myApp:myPass@some-name-rs0-1.some-name-rs0.pitr-13298 + local postfix= + local suffix= + local database=myApp + local collection=test + local sort= + local tls=false + local 'full_command=db.test.find()' + [[ -n '' ]] + log 'running db.test.find() in myApp' + set +o xtrace [2025-12-10T13:37:39+0000] running db.test.find() in myApp + [[ false == \t\r\u\e ]] + mongo_command=run_mongo + run_mongo 'use myApp\n db.test.find()' myApp:myPass@some-name-rs0-1.some-name-rs0.pitr-13298 mongodb '' + local 'command=use myApp\n db.test.find()' + local uri=myApp:myPass@some-name-rs0-1.some-name-rs0.pitr-13298 + local driver=mongodb + local suffix=.svc.cluster.local + egrep -v 'I NETWORK|W NETWORK|F NETWORK|Error saving history file|Percona Server for MongoDB|connecting to:|Unable to reach primary for set|Implicit session:|versions do not match|Error saving history file:|Started a new thread for the timer service' + /usr/sbin/sed -re 's/ObjectId\("[0-9a-f]+"\)//; s/-[0-9]+.svc/-xxx.svc/' ++ kubectl_bin get pods --selector=name=psmdb-client -o 'jsonpath={.items[].metadata.name}' +++ mktemp egrep: warning: egrep is obsolescent; using grep -E ++ local LAST_OUT=/tmp/tmp.Z0q9fHppCs +++ mktemp ++ local LAST_ERR=/tmp/tmp.pmaei8scQp ++ local exit_status=0 ++ local timeout=4 +++ seq 0 2 ++ for i in $(seq 0 2) ++ set +e ++ kubectl get pods --selector=name=psmdb-client -o 'jsonpath={.items[].metadata.name}' ++ exit_status=0 ++ set -e ++ '[' 0 '!=' 0 -a -n 1 ']' ++ break ++ cat /tmp/tmp.Z0q9fHppCs ++ cat /tmp/tmp.pmaei8scQp ++ rm /tmp/tmp.Z0q9fHppCs /tmp/tmp.pmaei8scQp ++ return 0 + local client_container=psmdb-client-696897d69b-nrc64 + local mongo_flag= + [[ myApp:myPass@some-name-rs0-1.some-name-rs0.pitr-13298 == *cfg* ]] + replica_set=rs0 + kubectl_bin exec psmdb-client-696897d69b-nrc64 -- bash -c 'printf '\''use myApp\n db.test.find()\n'\'' | mongo mongodb://myApp:myPass@some-name-rs0-1.some-name-rs0.pitr-13298.svc.cluster.local/admin?ssl=false\&replicaSet=rs0 ' ++ mktemp + local LAST_OUT=/tmp/tmp.SRKo9a5Ryk ++ mktemp + local LAST_ERR=/tmp/tmp.Q8mQsGMpYv + local exit_status=0 + local timeout=4 ++ seq 0 2 + for i in $(seq 0 2) + set +e + kubectl exec psmdb-client-696897d69b-nrc64 -- bash -c 'printf '\''use myApp\n db.test.find()\n'\'' | mongo mongodb://myApp:myPass@some-name-rs0-1.some-name-rs0.pitr-13298.svc.cluster.local/admin?ssl=false\&replicaSet=rs0 ' + exit_status=0 + set -e + '[' 0 '!=' 0 -a -n 1 ']' + break + cat /tmp/tmp.SRKo9a5Ryk + cat /tmp/tmp.Q8mQsGMpYv + rm /tmp/tmp.SRKo9a5Ryk /tmp/tmp.Q8mQsGMpYv + return 0 + [[ 0 -eq 0 ]] + diff -u /mnt/jenkins/workspace/cloud-psmdb-operator_PR-2125/e2e-tests/pitr/compare/find.json /tmp/tmp.uUgrEfFz0e/find + compare_mongo_cmd find myApp:myPass@some-name-rs0-2.some-name-rs0.pitr-13298 '' + local command=find + local uri=myApp:myPass@some-name-rs0-2.some-name-rs0.pitr-13298 + local postfix= + local suffix= + local database=myApp + local collection=test + local sort= + local tls=false + local 'full_command=db.test.find()' + [[ -n '' ]] + log 'running db.test.find() in myApp' + set +o xtrace [2025-12-10T13:37:41+0000] running db.test.find() in myApp + [[ false == \t\r\u\e ]] + mongo_command=run_mongo + run_mongo 'use myApp\n db.test.find()' myApp:myPass@some-name-rs0-2.some-name-rs0.pitr-13298 mongodb '' + local 'command=use myApp\n db.test.find()' + local uri=myApp:myPass@some-name-rs0-2.some-name-rs0.pitr-13298 + local driver=mongodb + local suffix=.svc.cluster.local + egrep -v 'I NETWORK|W NETWORK|F NETWORK|Error saving history file|Percona Server for MongoDB|connecting to:|Unable to reach primary for set|Implicit session:|versions do not match|Error saving history file:|Started a new thread for the timer service' + /usr/sbin/sed -re 's/ObjectId\("[0-9a-f]+"\)//; s/-[0-9]+.svc/-xxx.svc/' ++ kubectl_bin get pods --selector=name=psmdb-client -o 'jsonpath={.items[].metadata.name}' +++ mktemp egrep: warning: egrep is obsolescent; using grep -E ++ local LAST_OUT=/tmp/tmp.CNNR0M3XzB +++ mktemp ++ local LAST_ERR=/tmp/tmp.gAw5CvLmoc ++ local exit_status=0 ++ local timeout=4 +++ seq 0 2 ++ for i in $(seq 0 2) ++ set +e ++ kubectl get pods --selector=name=psmdb-client -o 'jsonpath={.items[].metadata.name}' ++ exit_status=0 ++ set -e ++ '[' 0 '!=' 0 -a -n 1 ']' ++ break ++ cat /tmp/tmp.CNNR0M3XzB ++ cat /tmp/tmp.gAw5CvLmoc ++ rm /tmp/tmp.CNNR0M3XzB /tmp/tmp.gAw5CvLmoc ++ return 0 + local client_container=psmdb-client-696897d69b-nrc64 + local mongo_flag= + [[ myApp:myPass@some-name-rs0-2.some-name-rs0.pitr-13298 == *cfg* ]] + replica_set=rs0 + kubectl_bin exec psmdb-client-696897d69b-nrc64 -- bash -c 'printf '\''use myApp\n db.test.find()\n'\'' | mongo mongodb://myApp:myPass@some-name-rs0-2.some-name-rs0.pitr-13298.svc.cluster.local/admin?ssl=false\&replicaSet=rs0 ' ++ mktemp + local LAST_OUT=/tmp/tmp.fAQSDIZ9bT ++ mktemp + local LAST_ERR=/tmp/tmp.iyLsWCBwGS + local exit_status=0 + local timeout=4 ++ seq 0 2 + for i in $(seq 0 2) + set +e + kubectl exec psmdb-client-696897d69b-nrc64 -- bash -c 'printf '\''use myApp\n db.test.find()\n'\'' | mongo mongodb://myApp:myPass@some-name-rs0-2.some-name-rs0.pitr-13298.svc.cluster.local/admin?ssl=false\&replicaSet=rs0 ' + exit_status=0 + set -e + '[' 0 '!=' 0 -a -n 1 ']' + break + cat /tmp/tmp.fAQSDIZ9bT + cat /tmp/tmp.iyLsWCBwGS + rm /tmp/tmp.fAQSDIZ9bT /tmp/tmp.iyLsWCBwGS + return 0 + [[ 0 -eq 0 ]] + diff -u /mnt/jenkins/workspace/cloud-psmdb-operator_PR-2125/e2e-tests/pitr/compare/find.json /tmp/tmp.uUgrEfFz0e/find + run_backup backup-minio 3 + local name=backup-minio + local idx=3 + desc 'run backup backup-minio-3' + set +o xtrace ----------------------------------------------------------------------------------- run backup backup-minio-3 ----------------------------------------------------------------------------------- + cat /mnt/jenkins/workspace/cloud-psmdb-operator_PR-2125/e2e-tests/pitr/conf/backup-minio.yml + /usr/sbin/sed -e 's/name:/name: backup-minio-3/' + kubectl_bin apply -f - ++ mktemp + local LAST_OUT=/tmp/tmp.Tru1dKgM8c ++ mktemp + local LAST_ERR=/tmp/tmp.BCUH476Ic5 + local exit_status=0 + local timeout=4 ++ seq 0 2 + for i in $(seq 0 2) + set +e + kubectl apply -f - + exit_status=0 + set -e + '[' 0 '!=' 0 -a -n 1 ']' + break + cat /tmp/tmp.Tru1dKgM8c perconaservermongodbbackup.psmdb.percona.com/backup-minio-3 created + cat /tmp/tmp.BCUH476Ic5 + rm /tmp/tmp.Tru1dKgM8c /tmp/tmp.BCUH476Ic5 + return 0 + wait_backup backup-minio-3 + local backup_name=backup-minio-3 + local target_state=ready + set +o xtrace waiting for backup-minio-3 to reach ready state.......OK + compare_latest_restorable_time some-name-rs0 backup-minio-3 + local cluster=some-name-rs0 + local backup_name=backup-minio-3 + local latest_restorable_time + local backup_time ++ get_latest_restorable_time some-name-rs0 ++ local cluster=some-name-rs0 ++ local first_timestamp ++ local second_timestamp ++ local retry=0 ++ [[ '' != '' ]] +++ kubectl_bin exec some-name-rs0-0 -c backup-agent -- pbm status -o json +++ jq '.backups.pitrChunks.pitrChunks | last | .range.end' ++++ mktemp +++ local LAST_OUT=/tmp/tmp.R5A0pPYypg ++++ mktemp +++ local LAST_ERR=/tmp/tmp.AHqmVYFeIc +++ local exit_status=0 +++ local timeout=4 ++++ seq 0 2 +++ for i in $(seq 0 2) +++ set +e +++ kubectl exec some-name-rs0-0 -c backup-agent -- pbm status -o json +++ exit_status=0 +++ set -e +++ '[' 0 '!=' 0 -a -n 1 ']' +++ break +++ cat /tmp/tmp.R5A0pPYypg +++ cat /tmp/tmp.AHqmVYFeIc +++ rm /tmp/tmp.R5A0pPYypg /tmp/tmp.AHqmVYFeIc +++ return 0 ++ first_timestamp=1765373838 ++ sleep 5 ++ [[ 1765373838 != '' ]] ++ [[ 1765373838 != \n\u\l\l ]] +++ jq '.backups.pitrChunks.pitrChunks | last | .range.end' +++ kubectl_bin exec some-name-rs0-0 -c backup-agent -- pbm status -o json ++++ mktemp +++ local LAST_OUT=/tmp/tmp.82XVtuHCGZ ++++ mktemp +++ local LAST_ERR=/tmp/tmp.B7o17TQtGt +++ local exit_status=0 +++ local timeout=4 ++++ seq 0 2 +++ for i in $(seq 0 2) +++ set +e +++ kubectl exec some-name-rs0-0 -c backup-agent -- pbm status -o json +++ exit_status=0 +++ set -e +++ '[' 0 '!=' 0 -a -n 1 ']' +++ break +++ cat /tmp/tmp.82XVtuHCGZ +++ cat /tmp/tmp.B7o17TQtGt +++ rm /tmp/tmp.82XVtuHCGZ /tmp/tmp.B7o17TQtGt +++ return 0 ++ second_timestamp=1765373838 ++ let retry+=1 ++ [[ 1 -gt 30 ]] ++ [[ 1765373838 != '' ]] ++ [[ 1765373838 != \n\u\l\l ]] ++ [[ 1765373838 == 1765373838 ]] ++ /usr/sbin/date -u -d @1765373838 +%Y-%m-%dT%H:%M:%SZ + latest_restorable_time=2025-12-10T13:37:18Z ++ get_latest_restorable_time_from_backup_object backup-minio-3 ++ local backup_name=backup-minio-3 ++ local latestRestorableTime ++ local retry=0 ++ [[ '' != '' ]] ++ sleep 5 +++ kubectl_bin get psmdb-backup backup-minio-3 -o 'jsonpath={.status.latestRestorableTime}' ++++ mktemp +++ local LAST_OUT=/tmp/tmp.34hz44TOAk ++++ mktemp +++ local LAST_ERR=/tmp/tmp.Wx0wvC1671 +++ local exit_status=0 +++ local timeout=4 ++++ seq 0 2 +++ for i in $(seq 0 2) +++ set +e +++ kubectl get psmdb-backup backup-minio-3 -o 'jsonpath={.status.latestRestorableTime}' +++ exit_status=0 +++ set -e +++ '[' 0 '!=' 0 -a -n 1 ']' +++ break +++ cat /tmp/tmp.34hz44TOAk +++ cat /tmp/tmp.Wx0wvC1671 +++ rm /tmp/tmp.34hz44TOAk /tmp/tmp.Wx0wvC1671 +++ return 0 ++ latestRestorableTime=2025-12-10T13:37:18Z ++ let retry+=1 ++ [[ 1 -gt 30 ]] ++ [[ 2025-12-10T13:37:18Z != '' ]] ++ [[ 2025-12-10T13:37:18Z != \n\u\l\l ]] ++ echo 2025-12-10T13:37:18Z + backup_time=2025-12-10T13:37:18Z + [[ 2025-12-10T13:37:18Z != \2\0\2\5\-\1\2\-\1\0\T\1\3\:\3\7\:\1\8\Z ]] + check_recovery backup-minio-3 latest '' -4th backupSource some-name + local backup_name=backup-minio-3 + local restore_type=latest + local restore_date= + local cmp_postfix=-4th + local backupSource=backupSource + local cluster_name=some-name + local selective_collection= + local restore_name=restore-backup-minio-3 + local restore_file=restore.yml + local cluster + cluster=some-name-rs0 + desc 'write more data before restore by latest' + set +o xtrace ----------------------------------------------------------------------------------- write more data before restore by latest ----------------------------------------------------------------------------------- + sleep 60 + run_mongo 'use myApp\n db.test.insert({ x: 100501 })' myApp:myPass@some-name-rs0.pitr-13298 + local 'command=use myApp\n db.test.insert({ x: 100501 })' + local uri=myApp:myPass@some-name-rs0.pitr-13298 + local driver=mongodb+srv + local suffix=.svc.cluster.local ++ kubectl_bin get pods --selector=name=psmdb-client -o 'jsonpath={.items[].metadata.name}' +++ mktemp ++ local LAST_OUT=/tmp/tmp.EEsQHbCqQx +++ mktemp ++ local LAST_ERR=/tmp/tmp.qTy0i1jGJX ++ local exit_status=0 ++ local timeout=4 +++ seq 0 2 ++ for i in $(seq 0 2) ++ set +e ++ kubectl get pods --selector=name=psmdb-client -o 'jsonpath={.items[].metadata.name}' ++ exit_status=0 ++ set -e ++ '[' 0 '!=' 0 -a -n 1 ']' ++ break ++ cat /tmp/tmp.EEsQHbCqQx ++ cat /tmp/tmp.qTy0i1jGJX ++ rm /tmp/tmp.EEsQHbCqQx /tmp/tmp.qTy0i1jGJX ++ return 0 + local client_container=psmdb-client-696897d69b-nrc64 + local mongo_flag= + [[ myApp:myPass@some-name-rs0.pitr-13298 == *cfg* ]] + replica_set=rs0 + kubectl_bin exec psmdb-client-696897d69b-nrc64 -- bash -c 'printf '\''use myApp\n db.test.insert({ x: 100501 })\n'\'' | mongo mongodb+srv://myApp:myPass@some-name-rs0.pitr-13298.svc.cluster.local/admin?ssl=false\&replicaSet=rs0 ' ++ mktemp + local LAST_OUT=/tmp/tmp.SHwHOthY2K ++ mktemp + local LAST_ERR=/tmp/tmp.NcX0P60l88 + local exit_status=0 + local timeout=4 ++ seq 0 2 + for i in $(seq 0 2) + set +e + kubectl exec psmdb-client-696897d69b-nrc64 -- bash -c 'printf '\''use myApp\n db.test.insert({ x: 100501 })\n'\'' | mongo mongodb+srv://myApp:myPass@some-name-rs0.pitr-13298.svc.cluster.local/admin?ssl=false\&replicaSet=rs0 ' + exit_status=0 + set -e + '[' 0 '!=' 0 -a -n 1 ']' + break + cat /tmp/tmp.SHwHOthY2K Percona Server for MongoDB shell version v4.4.29-28 connecting to: mongodb://some-name-rs0-0.some-name-rs0.pitr-13298.svc.cluster.local:27017,some-name-rs0-1.some-name-rs0.pitr-13298.svc.cluster.local:27017,some-name-rs0-2.some-name-rs0.pitr-13298.svc.cluster.local:27017/admin?compressors=disabled&gssapiServiceName=mongodb&replicaSet=rs0&ssl=false Implicit session: session { "id" : UUID("10357b7a-35b5-4787-940f-51a0892f78b5") } Percona Server for MongoDB server version: v8.0.16-5 WARNING: shell and server versions do not match switched to db myApp WriteResult({ "nInserted" : 1 }) bye + cat /tmp/tmp.NcX0P60l88 + rm /tmp/tmp.SHwHOthY2K /tmp/tmp.NcX0P60l88 + return 0 + '[' -n '' ']' + desc 'waiting for chunks to be uploaded' + set +o xtrace ----------------------------------------------------------------------------------- waiting for chunks to be uploaded ----------------------------------------------------------------------------------- + sleep 120 + desc 'check restore by latest' + set +o xtrace ----------------------------------------------------------------------------------- check restore by latest ----------------------------------------------------------------------------------- + '[' -z backupSource ']' ++ get_backup_dest backup-minio-3 ++ local backup_name=backup-minio-3 ++ kubectl_bin get psmdb-backup backup-minio-3 -o 'jsonpath={.status.destination}' ++ sed -e 's/.json$//' ++ sed 's|s3://||' ++ sed 's|azure://||' +++ mktemp ++ sed 's|gs://||' ++ local LAST_OUT=/tmp/tmp.hzBUPQTrmw +++ mktemp ++ local LAST_ERR=/tmp/tmp.sdTEauTmtb ++ local exit_status=0 ++ local timeout=4 +++ seq 0 2 ++ for i in $(seq 0 2) ++ set +e ++ kubectl get psmdb-backup backup-minio-3 -o 'jsonpath={.status.destination}' ++ exit_status=0 ++ set -e ++ '[' 0 '!=' 0 -a -n 1 ']' ++ break ++ cat /tmp/tmp.hzBUPQTrmw ++ cat /tmp/tmp.sdTEauTmtb ++ rm /tmp/tmp.hzBUPQTrmw /tmp/tmp.sdTEauTmtb ++ return 0 + backup_dest=operator-testing/pitr-prefix-1/2025-12-10T13:37:46Z + cat /mnt/jenkins/workspace/cloud-psmdb-operator_PR-2125/e2e-tests/pitr/conf/restore.yml + /usr/sbin/sed -e 's/name:/name: restore-backup-minio-3/' + /usr/sbin/sed -e /backupName/d + /usr/sbin/sed -e 's/clusterName:/clusterName: some-name/' + /usr/sbin/sed -e 's/pitrType:/type: latest/' + '[' -z '' ']' + /usr/sbin/sed -e /date:/d + /usr/sbin/sed -e 's|BACKUP-NAME|operator-testing/pitr-prefix-1/2025-12-10T13:37:46Z|' + '[' -n '' ']' + yq + kubectl_bin apply -f - ++ mktemp + local LAST_OUT=/tmp/tmp.6FFtiVNv2p ++ mktemp + local LAST_ERR=/tmp/tmp.EjdwxAQVO1 + local exit_status=0 + local timeout=4 ++ seq 0 2 + for i in $(seq 0 2) + set +e + kubectl apply -f - + exit_status=0 + set -e + '[' 0 '!=' 0 -a -n 1 ']' + break + cat /tmp/tmp.6FFtiVNv2p perconaservermongodbrestore.psmdb.percona.com/restore-backup-minio-3 created + cat /tmp/tmp.EjdwxAQVO1 + rm /tmp/tmp.6FFtiVNv2p /tmp/tmp.EjdwxAQVO1 + return 0 + wait_restore backup-minio-3 some-name + local backup_name=backup-minio-3 + local cluster_name=some-name + local target_state=ready + local wait_cluster_consistency=1 + local wait_time=1780 + local ok_if_ready=0 + set +o xtrace Waiting for the psmdb-restore/restore-backup-minio-3 object to be created.OK Waiting psmdb-restore/restore-backup-minio-3 to reach state "ready" .OK after 0 minutes + [[ 1 -eq 1 ]] + wait_cluster_consistency some-name + local cluster_name=some-name + local wait_time=32 + retry=0 + sleep 7 + echo -n 'waiting for cluster readyness' waiting for cluster readyness++ kubectl_bin get psmdb some-name -o 'jsonpath={.status.state}' +++ mktemp ++ local LAST_OUT=/tmp/tmp.54Gf6iyQ19 +++ mktemp ++ local LAST_ERR=/tmp/tmp.HsLoD6PgdV ++ local exit_status=0 ++ local timeout=4 +++ seq 0 2 ++ for i in $(seq 0 2) ++ set +e ++ kubectl get psmdb some-name -o 'jsonpath={.status.state}' ++ exit_status=0 ++ set -e ++ '[' 0 '!=' 0 -a -n 1 ']' ++ break ++ cat /tmp/tmp.54Gf6iyQ19 ++ cat /tmp/tmp.HsLoD6PgdV ++ rm /tmp/tmp.54Gf6iyQ19 /tmp/tmp.HsLoD6PgdV ++ return 0 + [[ ready == \r\e\a\d\y ]] + echo .OK .OK + echo + set -o xtrace + '[' -n '' ']' + compare_mongo_cmd find myApp:myPass@some-name-rs0-0.some-name-rs0.pitr-13298 -4th + local command=find + local uri=myApp:myPass@some-name-rs0-0.some-name-rs0.pitr-13298 + local postfix=-4th + local suffix= + local database=myApp + local collection=test + local sort= + local tls=false + local 'full_command=db.test.find()' + [[ -n '' ]] + log 'running db.test.find() in myApp' + set +o xtrace [2025-12-10T13:41:47+0000] running db.test.find() in myApp + [[ false == \t\r\u\e ]] + mongo_command=run_mongo + run_mongo 'use myApp\n db.test.find()' myApp:myPass@some-name-rs0-0.some-name-rs0.pitr-13298 mongodb '' + local 'command=use myApp\n db.test.find()' + local uri=myApp:myPass@some-name-rs0-0.some-name-rs0.pitr-13298 + egrep -v 'I NETWORK|W NETWORK|F NETWORK|Error saving history file|Percona Server for MongoDB|connecting to:|Unable to reach primary for set|Implicit session:|versions do not match|Error saving history file:|Started a new thread for the timer service' + local driver=mongodb + local suffix=.svc.cluster.local + /usr/sbin/sed -re 's/ObjectId\("[0-9a-f]+"\)//; s/-[0-9]+.svc/-xxx.svc/' ++ kubectl_bin get pods --selector=name=psmdb-client -o 'jsonpath={.items[].metadata.name}' egrep: warning: egrep is obsolescent; using grep -E +++ mktemp ++ local LAST_OUT=/tmp/tmp.yuP1uMrrUO +++ mktemp ++ local LAST_ERR=/tmp/tmp.LsMj2qTzdM ++ local exit_status=0 ++ local timeout=4 +++ seq 0 2 ++ for i in $(seq 0 2) ++ set +e ++ kubectl get pods --selector=name=psmdb-client -o 'jsonpath={.items[].metadata.name}' ++ exit_status=0 ++ set -e ++ '[' 0 '!=' 0 -a -n 1 ']' ++ break ++ cat /tmp/tmp.yuP1uMrrUO ++ cat /tmp/tmp.LsMj2qTzdM ++ rm /tmp/tmp.yuP1uMrrUO /tmp/tmp.LsMj2qTzdM ++ return 0 + local client_container=psmdb-client-696897d69b-nrc64 + local mongo_flag= + [[ myApp:myPass@some-name-rs0-0.some-name-rs0.pitr-13298 == *cfg* ]] + replica_set=rs0 + kubectl_bin exec psmdb-client-696897d69b-nrc64 -- bash -c 'printf '\''use myApp\n db.test.find()\n'\'' | mongo mongodb://myApp:myPass@some-name-rs0-0.some-name-rs0.pitr-13298.svc.cluster.local/admin?ssl=false\&replicaSet=rs0 ' ++ mktemp + local LAST_OUT=/tmp/tmp.XPvbprmZOg ++ mktemp + local LAST_ERR=/tmp/tmp.Vkn4ryhm5f + local exit_status=0 + local timeout=4 ++ seq 0 2 + for i in $(seq 0 2) + set +e + kubectl exec psmdb-client-696897d69b-nrc64 -- bash -c 'printf '\''use myApp\n db.test.find()\n'\'' | mongo mongodb://myApp:myPass@some-name-rs0-0.some-name-rs0.pitr-13298.svc.cluster.local/admin?ssl=false\&replicaSet=rs0 ' + exit_status=0 + set -e + '[' 0 '!=' 0 -a -n 1 ']' + break + cat /tmp/tmp.XPvbprmZOg + cat /tmp/tmp.Vkn4ryhm5f + rm /tmp/tmp.XPvbprmZOg /tmp/tmp.Vkn4ryhm5f + return 0 + [[ 0 -eq 0 ]] + diff -u /mnt/jenkins/workspace/cloud-psmdb-operator_PR-2125/e2e-tests/pitr/compare/find-4th.json /tmp/tmp.uUgrEfFz0e/find-4th + compare_mongo_cmd find myApp:myPass@some-name-rs0-1.some-name-rs0.pitr-13298 -4th + local command=find + local uri=myApp:myPass@some-name-rs0-1.some-name-rs0.pitr-13298 + local postfix=-4th + local suffix= + local database=myApp + local collection=test + local sort= + local tls=false + local 'full_command=db.test.find()' + [[ -n '' ]] + log 'running db.test.find() in myApp' + set +o xtrace [2025-12-10T13:41:50+0000] running db.test.find() in myApp + [[ false == \t\r\u\e ]] + mongo_command=run_mongo + run_mongo 'use myApp\n db.test.find()' myApp:myPass@some-name-rs0-1.some-name-rs0.pitr-13298 mongodb '' + local 'command=use myApp\n db.test.find()' + local uri=myApp:myPass@some-name-rs0-1.some-name-rs0.pitr-13298 + local driver=mongodb + local suffix=.svc.cluster.local + /usr/sbin/sed -re 's/ObjectId\("[0-9a-f]+"\)//; s/-[0-9]+.svc/-xxx.svc/' + egrep -v 'I NETWORK|W NETWORK|F NETWORK|Error saving history file|Percona Server for MongoDB|connecting to:|Unable to reach primary for set|Implicit session:|versions do not match|Error saving history file:|Started a new thread for the timer service' ++ kubectl_bin get pods --selector=name=psmdb-client -o 'jsonpath={.items[].metadata.name}' +++ mktemp egrep: warning: egrep is obsolescent; using grep -E ++ local LAST_OUT=/tmp/tmp.3NcXLH6MY0 +++ mktemp ++ local LAST_ERR=/tmp/tmp.gwCzLzXmuT ++ local exit_status=0 ++ local timeout=4 +++ seq 0 2 ++ for i in $(seq 0 2) ++ set +e ++ kubectl get pods --selector=name=psmdb-client -o 'jsonpath={.items[].metadata.name}' ++ exit_status=0 ++ set -e ++ '[' 0 '!=' 0 -a -n 1 ']' ++ break ++ cat /tmp/tmp.3NcXLH6MY0 ++ cat /tmp/tmp.gwCzLzXmuT ++ rm /tmp/tmp.3NcXLH6MY0 /tmp/tmp.gwCzLzXmuT ++ return 0 + local client_container=psmdb-client-696897d69b-nrc64 + local mongo_flag= + [[ myApp:myPass@some-name-rs0-1.some-name-rs0.pitr-13298 == *cfg* ]] + replica_set=rs0 + kubectl_bin exec psmdb-client-696897d69b-nrc64 -- bash -c 'printf '\''use myApp\n db.test.find()\n'\'' | mongo mongodb://myApp:myPass@some-name-rs0-1.some-name-rs0.pitr-13298.svc.cluster.local/admin?ssl=false\&replicaSet=rs0 ' ++ mktemp + local LAST_OUT=/tmp/tmp.8uFXlS8yRr ++ mktemp + local LAST_ERR=/tmp/tmp.DHSWts9jYI + local exit_status=0 + local timeout=4 ++ seq 0 2 + for i in $(seq 0 2) + set +e + kubectl exec psmdb-client-696897d69b-nrc64 -- bash -c 'printf '\''use myApp\n db.test.find()\n'\'' | mongo mongodb://myApp:myPass@some-name-rs0-1.some-name-rs0.pitr-13298.svc.cluster.local/admin?ssl=false\&replicaSet=rs0 ' + exit_status=0 + set -e + '[' 0 '!=' 0 -a -n 1 ']' + break + cat /tmp/tmp.8uFXlS8yRr + cat /tmp/tmp.DHSWts9jYI + rm /tmp/tmp.8uFXlS8yRr /tmp/tmp.DHSWts9jYI + return 0 + [[ 0 -eq 0 ]] + diff -u /mnt/jenkins/workspace/cloud-psmdb-operator_PR-2125/e2e-tests/pitr/compare/find-4th.json /tmp/tmp.uUgrEfFz0e/find-4th + compare_mongo_cmd find myApp:myPass@some-name-rs0-2.some-name-rs0.pitr-13298 -4th + local command=find + local uri=myApp:myPass@some-name-rs0-2.some-name-rs0.pitr-13298 + local postfix=-4th + local suffix= + local database=myApp + local collection=test + local sort= + local tls=false + local 'full_command=db.test.find()' + [[ -n '' ]] + log 'running db.test.find() in myApp' + set +o xtrace [2025-12-10T13:41:52+0000] running db.test.find() in myApp + [[ false == \t\r\u\e ]] + mongo_command=run_mongo + run_mongo 'use myApp\n db.test.find()' myApp:myPass@some-name-rs0-2.some-name-rs0.pitr-13298 mongodb '' + egrep -v 'I NETWORK|W NETWORK|F NETWORK|Error saving history file|Percona Server for MongoDB|connecting to:|Unable to reach primary for set|Implicit session:|versions do not match|Error saving history file:|Started a new thread for the timer service' + local 'command=use myApp\n db.test.find()' + local uri=myApp:myPass@some-name-rs0-2.some-name-rs0.pitr-13298 + local driver=mongodb + local suffix=.svc.cluster.local + /usr/sbin/sed -re 's/ObjectId\("[0-9a-f]+"\)//; s/-[0-9]+.svc/-xxx.svc/' ++ kubectl_bin get pods --selector=name=psmdb-client -o 'jsonpath={.items[].metadata.name}' egrep: warning: egrep is obsolescent; using grep -E +++ mktemp ++ local LAST_OUT=/tmp/tmp.kGfKIxC9yg +++ mktemp ++ local LAST_ERR=/tmp/tmp.f9h3CAc6P2 ++ local exit_status=0 ++ local timeout=4 +++ seq 0 2 ++ for i in $(seq 0 2) ++ set +e ++ kubectl get pods --selector=name=psmdb-client -o 'jsonpath={.items[].metadata.name}' ++ exit_status=0 ++ set -e ++ '[' 0 '!=' 0 -a -n 1 ']' ++ break ++ cat /tmp/tmp.kGfKIxC9yg ++ cat /tmp/tmp.f9h3CAc6P2 ++ rm /tmp/tmp.kGfKIxC9yg /tmp/tmp.f9h3CAc6P2 ++ return 0 + local client_container=psmdb-client-696897d69b-nrc64 + local mongo_flag= + [[ myApp:myPass@some-name-rs0-2.some-name-rs0.pitr-13298 == *cfg* ]] + replica_set=rs0 + kubectl_bin exec psmdb-client-696897d69b-nrc64 -- bash -c 'printf '\''use myApp\n db.test.find()\n'\'' | mongo mongodb://myApp:myPass@some-name-rs0-2.some-name-rs0.pitr-13298.svc.cluster.local/admin?ssl=false\&replicaSet=rs0 ' ++ mktemp + local LAST_OUT=/tmp/tmp.VrmQFWfwrf ++ mktemp + local LAST_ERR=/tmp/tmp.BQNvCExFGE + local exit_status=0 + local timeout=4 ++ seq 0 2 + for i in $(seq 0 2) + set +e + kubectl exec psmdb-client-696897d69b-nrc64 -- bash -c 'printf '\''use myApp\n db.test.find()\n'\'' | mongo mongodb://myApp:myPass@some-name-rs0-2.some-name-rs0.pitr-13298.svc.cluster.local/admin?ssl=false\&replicaSet=rs0 ' + exit_status=0 + set -e + '[' 0 '!=' 0 -a -n 1 ']' + break + cat /tmp/tmp.VrmQFWfwrf + cat /tmp/tmp.BQNvCExFGE + rm /tmp/tmp.VrmQFWfwrf /tmp/tmp.BQNvCExFGE + return 0 + [[ 0 -eq 0 ]] + diff -u /mnt/jenkins/workspace/cloud-psmdb-operator_PR-2125/e2e-tests/pitr/compare/find-4th.json /tmp/tmp.uUgrEfFz0e/find-4th + desc 'restore pitr using backupSource on second cluster with another prefix in storage' + set +o xtrace ----------------------------------------------------------------------------------- restore pitr using backupSource on second cluster with another prefix in storage ----------------------------------------------------------------------------------- + second_cluster=cluster2-rs0 + apply_cluster /mnt/jenkins/workspace/cloud-psmdb-operator_PR-2125/e2e-tests/pitr/conf/cluster2-rs0.yml + '[' -z '' ']' + cat_config /mnt/jenkins/workspace/cloud-psmdb-operator_PR-2125/e2e-tests/pitr/conf/cluster2-rs0.yml + kubectl_bin apply -f - + cat /mnt/jenkins/workspace/cloud-psmdb-operator_PR-2125/e2e-tests/pitr/conf/cluster2-rs0.yml ++ mktemp + yq eval '(.spec | select(.image == null)).image = "perconalab/percona-server-mongodb-operator:main-mongod8.0"' + yq eval '(.spec | select(has("pmm"))).pmm.image = "percona/pmm-client:2.44.1-1"' + local LAST_OUT=/tmp/tmp.TdgQe7oMXf + yq eval '(.spec | select(has("initImage"))).initImage = "perconalab/percona-server-mongodb-operator:PR-2125-8ebcb80f"' ++ mktemp + yq eval '(.spec | select(has("backup"))).backup.image = "perconalab/percona-server-mongodb-operator:main-backup"' + yq eval '.spec.upgradeOptions.apply="Never"' + local LAST_ERR=/tmp/tmp.tBzNCnIQmI + local exit_status=0 + local timeout=4 ++ seq 0 2 + for i in $(seq 0 2) + set +e + kubectl apply -f - + exit_status=0 + set -e + '[' 0 '!=' 0 -a -n 1 ']' + break + cat /tmp/tmp.TdgQe7oMXf perconaservermongodb.psmdb.percona.com/cluster2 created + cat /tmp/tmp.tBzNCnIQmI + rm /tmp/tmp.TdgQe7oMXf /tmp/tmp.tBzNCnIQmI + return 0 + desc 'check if all 3 Pods started' + set +o xtrace ----------------------------------------------------------------------------------- check if all 3 Pods started ----------------------------------------------------------------------------------- + wait_for_running cluster2-rs0 3 + local name=cluster2-rs0 + let last_pod=2 + local check_cluster_readyness=true + set_debug + [[ 1 == 1 ]] + set -o xtrace + local rs_name=rs0 + local cluster_name=cluster2 ++ seq 0 2 + for i in $(seq 0 $last_pod) + [[ 0 -eq 2 ]] + wait_pod cluster2-rs0-0 + local pod=cluster2-rs0-0 + set +o xtrace waiting for pod/cluster2-rs0-0 to be ready.......OK + for i in $(seq 0 $last_pod) + [[ 1 -eq 2 ]] + wait_pod cluster2-rs0-1 + local pod=cluster2-rs0-1 + set +o xtrace waiting for pod/cluster2-rs0-1 to be ready....OK + for i in $(seq 0 $last_pod) + [[ 2 -eq 2 ]] ++ kubectl_bin get psmdb cluster2 -o 'jsonpath={.spec.replsets[?(@.name=="rs0")].arbiter.enabled}' +++ mktemp ++ local LAST_OUT=/tmp/tmp.t1agyQQD08 +++ mktemp ++ local LAST_ERR=/tmp/tmp.mvNAgsNpdg ++ local exit_status=0 ++ local timeout=4 +++ seq 0 2 ++ for i in $(seq 0 2) ++ set +e ++ kubectl get psmdb cluster2 -o 'jsonpath={.spec.replsets[?(@.name=="rs0")].arbiter.enabled}' ++ exit_status=0 ++ set -e ++ '[' 0 '!=' 0 -a -n 1 ']' ++ break ++ cat /tmp/tmp.t1agyQQD08 ++ cat /tmp/tmp.mvNAgsNpdg ++ rm /tmp/tmp.t1agyQQD08 /tmp/tmp.mvNAgsNpdg ++ return 0 + [[ '' == \t\r\u\e ]] + wait_pod cluster2-rs0-2 + local pod=cluster2-rs0-2 + set +o xtrace waiting for pod/cluster2-rs0-2 to be ready....OK ++ kubectl_bin get psmdb cluster2 -o 'jsonpath={.spec.replsets[?(@.name=="rs0")].nonvoting.enabled}' +++ mktemp ++ local LAST_OUT=/tmp/tmp.b3JCDoaUrI +++ mktemp ++ local LAST_ERR=/tmp/tmp.N21nCCPZAK ++ local exit_status=0 ++ local timeout=4 +++ seq 0 2 ++ for i in $(seq 0 2) ++ set +e ++ kubectl get psmdb cluster2 -o 'jsonpath={.spec.replsets[?(@.name=="rs0")].nonvoting.enabled}' ++ exit_status=0 ++ set -e ++ '[' 0 '!=' 0 -a -n 1 ']' ++ break ++ cat /tmp/tmp.b3JCDoaUrI ++ cat /tmp/tmp.N21nCCPZAK ++ rm /tmp/tmp.b3JCDoaUrI /tmp/tmp.N21nCCPZAK ++ return 0 + [[ '' == \t\r\u\e ]] ++ kubectl_bin get psmdb cluster2 -o 'jsonpath={.spec.replsets[?(@.name=="rs0")].hidden.enabled}' +++ mktemp ++ local LAST_OUT=/tmp/tmp.dLfFqQijvO +++ mktemp ++ local LAST_ERR=/tmp/tmp.pP1nn8bQGx ++ local exit_status=0 ++ local timeout=4 +++ seq 0 2 ++ for i in $(seq 0 2) ++ set +e ++ kubectl get psmdb cluster2 -o 'jsonpath={.spec.replsets[?(@.name=="rs0")].hidden.enabled}' ++ exit_status=0 ++ set -e ++ '[' 0 '!=' 0 -a -n 1 ']' ++ break ++ cat /tmp/tmp.dLfFqQijvO ++ cat /tmp/tmp.pP1nn8bQGx ++ rm /tmp/tmp.dLfFqQijvO /tmp/tmp.pP1nn8bQGx ++ return 0 + [[ '' == \t\r\u\e ]] + sleep 10 + [[ true == \t\r\u\e ]] + set +x Waiting for cluster readyness..... + wait_backup_agent cluster2-rs0-0 + local agent_pod=cluster2-rs0-0 + set +o xtrace waiting for pbm-agent to be ready in cluster2-rs0-0...2025-12-10T13:43:16.000+0000 I listening for the commands + wait_backup_agent cluster2-rs0-1 + local agent_pod=cluster2-rs0-1 + set +o xtrace waiting for pbm-agent to be ready in cluster2-rs0-1...2025-12-10T13:43:22.000+0000 I listening for the commands + wait_backup_agent cluster2-rs0-2 + local agent_pod=cluster2-rs0-2 + set +o xtrace waiting for pbm-agent to be ready in cluster2-rs0-2...2025-12-10T13:43:25.000+0000 I listening for the commands + desc 'create user myApp' + set +o xtrace ----------------------------------------------------------------------------------- create user myApp ----------------------------------------------------------------------------------- + run_mongo 'db.createUser({user:"myApp",pwd:"myPass",roles:[{db:"myApp",role:"readWrite"}]})' userAdmin:userAdmin123456@cluster2-rs0.pitr-13298 + local 'command=db.createUser({user:"myApp",pwd:"myPass",roles:[{db:"myApp",role:"readWrite"}]})' + local uri=userAdmin:userAdmin123456@cluster2-rs0.pitr-13298 + local driver=mongodb+srv + local suffix=.svc.cluster.local ++ kubectl_bin get pods --selector=name=psmdb-client -o 'jsonpath={.items[].metadata.name}' +++ mktemp ++ local LAST_OUT=/tmp/tmp.brtE4ZDKqo +++ mktemp ++ local LAST_ERR=/tmp/tmp.oD6ZguROBq ++ local exit_status=0 ++ local timeout=4 +++ seq 0 2 ++ for i in $(seq 0 2) ++ set +e ++ kubectl get pods --selector=name=psmdb-client -o 'jsonpath={.items[].metadata.name}' ++ exit_status=0 ++ set -e ++ '[' 0 '!=' 0 -a -n 1 ']' ++ break ++ cat /tmp/tmp.brtE4ZDKqo ++ cat /tmp/tmp.oD6ZguROBq ++ rm /tmp/tmp.brtE4ZDKqo /tmp/tmp.oD6ZguROBq ++ return 0 + local client_container=psmdb-client-696897d69b-nrc64 + local mongo_flag= + [[ userAdmin:userAdmin123456@cluster2-rs0.pitr-13298 == *cfg* ]] + replica_set=rs0 + kubectl_bin exec psmdb-client-696897d69b-nrc64 -- bash -c 'printf '\''db.createUser({user:"myApp",pwd:"myPass",roles:[{db:"myApp",role:"readWrite"}]})\n'\'' | mongo mongodb+srv://userAdmin:userAdmin123456@cluster2-rs0.pitr-13298.svc.cluster.local/admin?ssl=false\&replicaSet=rs0 ' ++ mktemp + local LAST_OUT=/tmp/tmp.U4NUjCdgDg ++ mktemp + local LAST_ERR=/tmp/tmp.8TqD7XhcHK + local exit_status=0 + local timeout=4 ++ seq 0 2 + for i in $(seq 0 2) + set +e + kubectl exec psmdb-client-696897d69b-nrc64 -- bash -c 'printf '\''db.createUser({user:"myApp",pwd:"myPass",roles:[{db:"myApp",role:"readWrite"}]})\n'\'' | mongo mongodb+srv://userAdmin:userAdmin123456@cluster2-rs0.pitr-13298.svc.cluster.local/admin?ssl=false\&replicaSet=rs0 ' + exit_status=0 + set -e + '[' 0 '!=' 0 -a -n 1 ']' + break + cat /tmp/tmp.U4NUjCdgDg Percona Server for MongoDB shell version v4.4.29-28 connecting to: mongodb://cluster2-rs0-2.cluster2-rs0.pitr-13298.svc.cluster.local:27017,cluster2-rs0-0.cluster2-rs0.pitr-13298.svc.cluster.local:27017,cluster2-rs0-1.cluster2-rs0.pitr-13298.svc.cluster.local:27017/admin?compressors=disabled&gssapiServiceName=mongodb&replicaSet=rs0&ssl=false Implicit session: session { "id" : UUID("dc14cb76-e94c-4c2b-b971-7da096969631") } Percona Server for MongoDB server version: v8.0.16-5 WARNING: shell and server versions do not match Successfully added user: { "user" : "myApp", "roles" : [ { "db" : "myApp", "role" : "readWrite" } ] } bye + cat /tmp/tmp.8TqD7XhcHK + rm /tmp/tmp.U4NUjCdgDg /tmp/tmp.8TqD7XhcHK + return 0 + sleep 2 + check_recovery backup-minio-3 latest '' -4th backupSource cluster2 '' backup-minio-3-second-cluster restore2.yml + local backup_name=backup-minio-3 + local restore_type=latest + local restore_date= + local cmp_postfix=-4th + local backupSource=backupSource + local cluster_name=cluster2 + local selective_collection= + local restore_name=restore-backup-minio-3-second-cluster + local restore_file=restore2.yml + local cluster + cluster=cluster2-rs0 + desc 'write more data before restore by latest' + set +o xtrace ----------------------------------------------------------------------------------- write more data before restore by latest ----------------------------------------------------------------------------------- + sleep 60 + run_mongo 'use myApp\n db.test.insert({ x: 100501 })' myApp:myPass@cluster2-rs0.pitr-13298 + local 'command=use myApp\n db.test.insert({ x: 100501 })' + local uri=myApp:myPass@cluster2-rs0.pitr-13298 + local driver=mongodb+srv + local suffix=.svc.cluster.local ++ kubectl_bin get pods --selector=name=psmdb-client -o 'jsonpath={.items[].metadata.name}' +++ mktemp ++ local LAST_OUT=/tmp/tmp.PkYMY9vI4V +++ mktemp ++ local LAST_ERR=/tmp/tmp.WRDwoq7UuH ++ local exit_status=0 ++ local timeout=4 +++ seq 0 2 ++ for i in $(seq 0 2) ++ set +e ++ kubectl get pods --selector=name=psmdb-client -o 'jsonpath={.items[].metadata.name}' ++ exit_status=0 ++ set -e ++ '[' 0 '!=' 0 -a -n 1 ']' ++ break ++ cat /tmp/tmp.PkYMY9vI4V ++ cat /tmp/tmp.WRDwoq7UuH ++ rm /tmp/tmp.PkYMY9vI4V /tmp/tmp.WRDwoq7UuH ++ return 0 + local client_container=psmdb-client-696897d69b-nrc64 + local mongo_flag= + [[ myApp:myPass@cluster2-rs0.pitr-13298 == *cfg* ]] + replica_set=rs0 + kubectl_bin exec psmdb-client-696897d69b-nrc64 -- bash -c 'printf '\''use myApp\n db.test.insert({ x: 100501 })\n'\'' | mongo mongodb+srv://myApp:myPass@cluster2-rs0.pitr-13298.svc.cluster.local/admin?ssl=false\&replicaSet=rs0 ' ++ mktemp + local LAST_OUT=/tmp/tmp.Dl9kWthAqO ++ mktemp + local LAST_ERR=/tmp/tmp.NKER9rIT4X + local exit_status=0 + local timeout=4 ++ seq 0 2 + for i in $(seq 0 2) + set +e + kubectl exec psmdb-client-696897d69b-nrc64 -- bash -c 'printf '\''use myApp\n db.test.insert({ x: 100501 })\n'\'' | mongo mongodb+srv://myApp:myPass@cluster2-rs0.pitr-13298.svc.cluster.local/admin?ssl=false\&replicaSet=rs0 ' + exit_status=0 + set -e + '[' 0 '!=' 0 -a -n 1 ']' + break + cat /tmp/tmp.Dl9kWthAqO Percona Server for MongoDB shell version v4.4.29-28 connecting to: mongodb://cluster2-rs0-0.cluster2-rs0.pitr-13298.svc.cluster.local:27017,cluster2-rs0-1.cluster2-rs0.pitr-13298.svc.cluster.local:27017,cluster2-rs0-2.cluster2-rs0.pitr-13298.svc.cluster.local:27017/admin?compressors=disabled&gssapiServiceName=mongodb&replicaSet=rs0&ssl=false Implicit session: session { "id" : UUID("7a0db2b8-86fb-4fcf-9c0e-d1773dd9ea87") } Percona Server for MongoDB server version: v8.0.16-5 WARNING: shell and server versions do not match switched to db myApp WriteResult({ "nInserted" : 1 }) bye + cat /tmp/tmp.NKER9rIT4X + rm /tmp/tmp.Dl9kWthAqO /tmp/tmp.NKER9rIT4X + return 0 + '[' -n '' ']' + desc 'waiting for chunks to be uploaded' + set +o xtrace ----------------------------------------------------------------------------------- waiting for chunks to be uploaded ----------------------------------------------------------------------------------- + sleep 120 + desc 'check restore by latest' + set +o xtrace ----------------------------------------------------------------------------------- check restore by latest ----------------------------------------------------------------------------------- + '[' -z backupSource ']' ++ get_backup_dest backup-minio-3 ++ local backup_name=backup-minio-3 ++ kubectl_bin get psmdb-backup backup-minio-3 -o 'jsonpath={.status.destination}' ++ sed 's|s3://||' ++ sed -e 's/.json$//' ++ sed 's|azure://||' +++ mktemp ++ sed 's|gs://||' ++ local LAST_OUT=/tmp/tmp.ZWMftaCbc4 +++ mktemp ++ local LAST_ERR=/tmp/tmp.v5yu12Ntbn ++ local exit_status=0 ++ local timeout=4 +++ seq 0 2 ++ for i in $(seq 0 2) ++ set +e ++ kubectl get psmdb-backup backup-minio-3 -o 'jsonpath={.status.destination}' ++ exit_status=0 ++ set -e ++ '[' 0 '!=' 0 -a -n 1 ']' ++ break ++ cat /tmp/tmp.ZWMftaCbc4 ++ cat /tmp/tmp.v5yu12Ntbn ++ rm /tmp/tmp.ZWMftaCbc4 /tmp/tmp.v5yu12Ntbn ++ return 0 + backup_dest=operator-testing/pitr-prefix-1/2025-12-10T13:37:46Z + cat /mnt/jenkins/workspace/cloud-psmdb-operator_PR-2125/e2e-tests/pitr/conf/restore2.yml + /usr/sbin/sed -e /backupName/d + /usr/sbin/sed -e 's/name:/name: restore-backup-minio-3-second-cluster/' + /usr/sbin/sed -e 's/clusterName:/clusterName: cluster2/' + /usr/sbin/sed -e 's/pitrType:/type: latest/' + '[' -z '' ']' + /usr/sbin/sed -e /date:/d + /usr/sbin/sed -e 's|BACKUP-NAME|operator-testing/pitr-prefix-1/2025-12-10T13:37:46Z|' + '[' -n '' ']' + yq + kubectl_bin apply -f - ++ mktemp + local LAST_OUT=/tmp/tmp.O8JPAlz9YT ++ mktemp + local LAST_ERR=/tmp/tmp.to5TQQQWse + local exit_status=0 + local timeout=4 ++ seq 0 2 + for i in $(seq 0 2) + set +e + kubectl apply -f - + exit_status=0 + set -e + '[' 0 '!=' 0 -a -n 1 ']' + break + cat /tmp/tmp.O8JPAlz9YT perconaservermongodbrestore.psmdb.percona.com/restore-backup-minio-3-second-cluster created + cat /tmp/tmp.to5TQQQWse + rm /tmp/tmp.O8JPAlz9YT /tmp/tmp.to5TQQQWse + return 0 + wait_restore backup-minio-3-second-cluster cluster2 + local backup_name=backup-minio-3-second-cluster + local cluster_name=cluster2 + local target_state=ready + local wait_cluster_consistency=1 + local wait_time=1780 + local ok_if_ready=0 + set +o xtrace Waiting for the psmdb-restore/restore-backup-minio-3-second-cluster object to be created.OK Waiting psmdb-restore/restore-backup-minio-3-second-cluster to reach state "ready" .OK after 0 minutes + [[ 1 -eq 1 ]] + wait_cluster_consistency cluster2 + local cluster_name=cluster2 + local wait_time=32 + retry=0 + sleep 7 + echo -n 'waiting for cluster readyness' waiting for cluster readyness++ kubectl_bin get psmdb cluster2 -o 'jsonpath={.status.state}' +++ mktemp ++ local LAST_OUT=/tmp/tmp.SeVHFDilvb +++ mktemp ++ local LAST_ERR=/tmp/tmp.DvUayeQPQe ++ local exit_status=0 ++ local timeout=4 +++ seq 0 2 ++ for i in $(seq 0 2) ++ set +e ++ kubectl get psmdb cluster2 -o 'jsonpath={.status.state}' ++ exit_status=0 ++ set -e ++ '[' 0 '!=' 0 -a -n 1 ']' ++ break ++ cat /tmp/tmp.SeVHFDilvb ++ cat /tmp/tmp.DvUayeQPQe ++ rm /tmp/tmp.SeVHFDilvb /tmp/tmp.DvUayeQPQe ++ return 0 + [[ ready == \r\e\a\d\y ]] + echo .OK .OK + echo + set -o xtrace + '[' -n '' ']' + compare_mongo_cmd find myApp:myPass@cluster2-rs0-0.cluster2-rs0.pitr-13298 -4th + local command=find + local uri=myApp:myPass@cluster2-rs0-0.cluster2-rs0.pitr-13298 + local postfix=-4th + local suffix= + local database=myApp + local collection=test + local sort= + local tls=false + local 'full_command=db.test.find()' + [[ -n '' ]] + log 'running db.test.find() in myApp' + set +o xtrace [2025-12-10T13:47:03+0000] running db.test.find() in myApp + [[ false == \t\r\u\e ]] + mongo_command=run_mongo + run_mongo 'use myApp\n db.test.find()' myApp:myPass@cluster2-rs0-0.cluster2-rs0.pitr-13298 mongodb '' + local 'command=use myApp\n db.test.find()' + local uri=myApp:myPass@cluster2-rs0-0.cluster2-rs0.pitr-13298 + local driver=mongodb + local suffix=.svc.cluster.local + /usr/sbin/sed -re 's/ObjectId\("[0-9a-f]+"\)//; s/-[0-9]+.svc/-xxx.svc/' + egrep -v 'I NETWORK|W NETWORK|F NETWORK|Error saving history file|Percona Server for MongoDB|connecting to:|Unable to reach primary for set|Implicit session:|versions do not match|Error saving history file:|Started a new thread for the timer service' ++ kubectl_bin get pods --selector=name=psmdb-client -o 'jsonpath={.items[].metadata.name}' +++ mktemp egrep: warning: egrep is obsolescent; using grep -E ++ local LAST_OUT=/tmp/tmp.XYVRmdl3Qz +++ mktemp ++ local LAST_ERR=/tmp/tmp.DFl336XEPs ++ local exit_status=0 ++ local timeout=4 +++ seq 0 2 ++ for i in $(seq 0 2) ++ set +e ++ kubectl get pods --selector=name=psmdb-client -o 'jsonpath={.items[].metadata.name}' ++ exit_status=0 ++ set -e ++ '[' 0 '!=' 0 -a -n 1 ']' ++ break ++ cat /tmp/tmp.XYVRmdl3Qz ++ cat /tmp/tmp.DFl336XEPs ++ rm /tmp/tmp.XYVRmdl3Qz /tmp/tmp.DFl336XEPs ++ return 0 + local client_container=psmdb-client-696897d69b-nrc64 + local mongo_flag= + [[ myApp:myPass@cluster2-rs0-0.cluster2-rs0.pitr-13298 == *cfg* ]] + replica_set=rs0 + kubectl_bin exec psmdb-client-696897d69b-nrc64 -- bash -c 'printf '\''use myApp\n db.test.find()\n'\'' | mongo mongodb://myApp:myPass@cluster2-rs0-0.cluster2-rs0.pitr-13298.svc.cluster.local/admin?ssl=false\&replicaSet=rs0 ' ++ mktemp + local LAST_OUT=/tmp/tmp.yTVyQ0iru2 ++ mktemp + local LAST_ERR=/tmp/tmp.PsdXrPvS3R + local exit_status=0 + local timeout=4 ++ seq 0 2 + for i in $(seq 0 2) + set +e + kubectl exec psmdb-client-696897d69b-nrc64 -- bash -c 'printf '\''use myApp\n db.test.find()\n'\'' | mongo mongodb://myApp:myPass@cluster2-rs0-0.cluster2-rs0.pitr-13298.svc.cluster.local/admin?ssl=false\&replicaSet=rs0 ' + exit_status=0 + set -e + '[' 0 '!=' 0 -a -n 1 ']' + break + cat /tmp/tmp.yTVyQ0iru2 + cat /tmp/tmp.PsdXrPvS3R + rm /tmp/tmp.yTVyQ0iru2 /tmp/tmp.PsdXrPvS3R + return 0 + [[ 0 -eq 0 ]] + diff -u /mnt/jenkins/workspace/cloud-psmdb-operator_PR-2125/e2e-tests/pitr/compare/find-4th.json /tmp/tmp.uUgrEfFz0e/find-4th + compare_mongo_cmd find myApp:myPass@cluster2-rs0-1.cluster2-rs0.pitr-13298 -4th + local command=find + local uri=myApp:myPass@cluster2-rs0-1.cluster2-rs0.pitr-13298 + local postfix=-4th + local suffix= + local database=myApp + local collection=test + local sort= + local tls=false + local 'full_command=db.test.find()' + [[ -n '' ]] + log 'running db.test.find() in myApp' + set +o xtrace [2025-12-10T13:47:05+0000] running db.test.find() in myApp + [[ false == \t\r\u\e ]] + mongo_command=run_mongo + run_mongo 'use myApp\n db.test.find()' myApp:myPass@cluster2-rs0-1.cluster2-rs0.pitr-13298 mongodb '' + local 'command=use myApp\n db.test.find()' + local uri=myApp:myPass@cluster2-rs0-1.cluster2-rs0.pitr-13298 + local driver=mongodb + local suffix=.svc.cluster.local + egrep -v 'I NETWORK|W NETWORK|F NETWORK|Error saving history file|Percona Server for MongoDB|connecting to:|Unable to reach primary for set|Implicit session:|versions do not match|Error saving history file:|Started a new thread for the timer service' + /usr/sbin/sed -re 's/ObjectId\("[0-9a-f]+"\)//; s/-[0-9]+.svc/-xxx.svc/' ++ kubectl_bin get pods --selector=name=psmdb-client -o 'jsonpath={.items[].metadata.name}' +++ mktemp egrep: warning: egrep is obsolescent; using grep -E ++ local LAST_OUT=/tmp/tmp.2qeANwI8LE +++ mktemp ++ local LAST_ERR=/tmp/tmp.jXIqwdvdgU ++ local exit_status=0 ++ local timeout=4 +++ seq 0 2 ++ for i in $(seq 0 2) ++ set +e ++ kubectl get pods --selector=name=psmdb-client -o 'jsonpath={.items[].metadata.name}' ++ exit_status=0 ++ set -e ++ '[' 0 '!=' 0 -a -n 1 ']' ++ break ++ cat /tmp/tmp.2qeANwI8LE ++ cat /tmp/tmp.jXIqwdvdgU ++ rm /tmp/tmp.2qeANwI8LE /tmp/tmp.jXIqwdvdgU ++ return 0 + local client_container=psmdb-client-696897d69b-nrc64 + local mongo_flag= + [[ myApp:myPass@cluster2-rs0-1.cluster2-rs0.pitr-13298 == *cfg* ]] + replica_set=rs0 + kubectl_bin exec psmdb-client-696897d69b-nrc64 -- bash -c 'printf '\''use myApp\n db.test.find()\n'\'' | mongo mongodb://myApp:myPass@cluster2-rs0-1.cluster2-rs0.pitr-13298.svc.cluster.local/admin?ssl=false\&replicaSet=rs0 ' ++ mktemp + local LAST_OUT=/tmp/tmp.93zb3sIJYf ++ mktemp + local LAST_ERR=/tmp/tmp.zLR5AN9Q3Y + local exit_status=0 + local timeout=4 ++ seq 0 2 + for i in $(seq 0 2) + set +e + kubectl exec psmdb-client-696897d69b-nrc64 -- bash -c 'printf '\''use myApp\n db.test.find()\n'\'' | mongo mongodb://myApp:myPass@cluster2-rs0-1.cluster2-rs0.pitr-13298.svc.cluster.local/admin?ssl=false\&replicaSet=rs0 ' + exit_status=0 + set -e + '[' 0 '!=' 0 -a -n 1 ']' + break + cat /tmp/tmp.93zb3sIJYf + cat /tmp/tmp.zLR5AN9Q3Y + rm /tmp/tmp.93zb3sIJYf /tmp/tmp.zLR5AN9Q3Y + return 0 + [[ 0 -eq 0 ]] + diff -u /mnt/jenkins/workspace/cloud-psmdb-operator_PR-2125/e2e-tests/pitr/compare/find-4th.json /tmp/tmp.uUgrEfFz0e/find-4th + compare_mongo_cmd find myApp:myPass@cluster2-rs0-2.cluster2-rs0.pitr-13298 -4th + local command=find + local uri=myApp:myPass@cluster2-rs0-2.cluster2-rs0.pitr-13298 + local postfix=-4th + local suffix= + local database=myApp + local collection=test + local sort= + local tls=false + local 'full_command=db.test.find()' + [[ -n '' ]] + log 'running db.test.find() in myApp' + set +o xtrace [2025-12-10T13:47:07+0000] running db.test.find() in myApp + [[ false == \t\r\u\e ]] + mongo_command=run_mongo + run_mongo 'use myApp\n db.test.find()' myApp:myPass@cluster2-rs0-2.cluster2-rs0.pitr-13298 mongodb '' + local 'command=use myApp\n db.test.find()' + local uri=myApp:myPass@cluster2-rs0-2.cluster2-rs0.pitr-13298 + local driver=mongodb + local suffix=.svc.cluster.local + /usr/sbin/sed -re 's/ObjectId\("[0-9a-f]+"\)//; s/-[0-9]+.svc/-xxx.svc/' + egrep -v 'I NETWORK|W NETWORK|F NETWORK|Error saving history file|Percona Server for MongoDB|connecting to:|Unable to reach primary for set|Implicit session:|versions do not match|Error saving history file:|Started a new thread for the timer service' ++ kubectl_bin get pods --selector=name=psmdb-client -o 'jsonpath={.items[].metadata.name}' +++ mktemp ++ local LAST_OUT=/tmp/tmp.WOdqxRhjbs egrep: warning: egrep is obsolescent; using grep -E +++ mktemp ++ local LAST_ERR=/tmp/tmp.gvX9LbdFAo ++ local exit_status=0 ++ local timeout=4 +++ seq 0 2 ++ for i in $(seq 0 2) ++ set +e ++ kubectl get pods --selector=name=psmdb-client -o 'jsonpath={.items[].metadata.name}' ++ exit_status=0 ++ set -e ++ '[' 0 '!=' 0 -a -n 1 ']' ++ break ++ cat /tmp/tmp.WOdqxRhjbs ++ cat /tmp/tmp.gvX9LbdFAo ++ rm /tmp/tmp.WOdqxRhjbs /tmp/tmp.gvX9LbdFAo ++ return 0 + local client_container=psmdb-client-696897d69b-nrc64 + local mongo_flag= + [[ myApp:myPass@cluster2-rs0-2.cluster2-rs0.pitr-13298 == *cfg* ]] + replica_set=rs0 + kubectl_bin exec psmdb-client-696897d69b-nrc64 -- bash -c 'printf '\''use myApp\n db.test.find()\n'\'' | mongo mongodb://myApp:myPass@cluster2-rs0-2.cluster2-rs0.pitr-13298.svc.cluster.local/admin?ssl=false\&replicaSet=rs0 ' ++ mktemp + local LAST_OUT=/tmp/tmp.xGRtu5QKHz ++ mktemp + local LAST_ERR=/tmp/tmp.7oH8bqm0p5 + local exit_status=0 + local timeout=4 ++ seq 0 2 + for i in $(seq 0 2) + set +e + kubectl exec psmdb-client-696897d69b-nrc64 -- bash -c 'printf '\''use myApp\n db.test.find()\n'\'' | mongo mongodb://myApp:myPass@cluster2-rs0-2.cluster2-rs0.pitr-13298.svc.cluster.local/admin?ssl=false\&replicaSet=rs0 ' + exit_status=0 + set -e + '[' 0 '!=' 0 -a -n 1 ']' + break + cat /tmp/tmp.xGRtu5QKHz + cat /tmp/tmp.7oH8bqm0p5 + rm /tmp/tmp.xGRtu5QKHz /tmp/tmp.7oH8bqm0p5 + return 0 + [[ 0 -eq 0 ]] + diff -u /mnt/jenkins/workspace/cloud-psmdb-operator_PR-2125/e2e-tests/pitr/compare/find-4th.json /tmp/tmp.uUgrEfFz0e/find-4th + backup_name_minio_2=backup-minio2 + run_backup backup-minio2 0 + local name=backup-minio2 + local idx=0 + desc 'run backup backup-minio2-0' + set +o xtrace ----------------------------------------------------------------------------------- run backup backup-minio2-0 ----------------------------------------------------------------------------------- + cat /mnt/jenkins/workspace/cloud-psmdb-operator_PR-2125/e2e-tests/pitr/conf/backup-minio2.yml + /usr/sbin/sed -e 's/name:/name: backup-minio2-0/' + kubectl_bin apply -f - ++ mktemp + local LAST_OUT=/tmp/tmp.gFhlp3t2Xr ++ mktemp + local LAST_ERR=/tmp/tmp.AmwRb7YmZB + local exit_status=0 + local timeout=4 ++ seq 0 2 + for i in $(seq 0 2) + set +e + kubectl apply -f - + exit_status=0 + set -e + '[' 0 '!=' 0 -a -n 1 ']' + break + cat /tmp/tmp.gFhlp3t2Xr perconaservermongodbbackup.psmdb.percona.com/backup-minio2-0 created + cat /tmp/tmp.AmwRb7YmZB + rm /tmp/tmp.gFhlp3t2Xr /tmp/tmp.AmwRb7YmZB + return 0 + wait_backup backup-minio2-0 + local backup_name=backup-minio2-0 + local target_state=ready + set +o xtrace waiting for backup-minio2-0 to reach ready state.......OK ++ run_mongo 'new Date().toISOString()' myApp:myPass@cluster2-rs0.pitr-13298 mongodb '' --quiet ++ local 'command=new Date().toISOString()' ++ local uri=myApp:myPass@cluster2-rs0.pitr-13298 ++ grep -E -v 'I NETWORK|W NETWORK|Error saving history file|Percona Server for MongoDB|connecting to:|Unable to reach primary for set|Implicit session:|versions do not match' ++ local driver=mongodb ++ local suffix=.svc.cluster.local ++ cut -c1-19 +++ kubectl_bin get pods --selector=name=psmdb-client -o 'jsonpath={.items[].metadata.name}' ++ tr T ' ' ++++ mktemp +++ local LAST_OUT=/tmp/tmp.VCX56VjeN9 ++++ mktemp +++ local LAST_ERR=/tmp/tmp.eBLrFt0Kyn +++ local exit_status=0 +++ local timeout=4 ++++ seq 0 2 +++ for i in $(seq 0 2) +++ set +e +++ kubectl get pods --selector=name=psmdb-client -o 'jsonpath={.items[].metadata.name}' +++ exit_status=0 +++ set -e +++ '[' 0 '!=' 0 -a -n 1 ']' +++ break +++ cat /tmp/tmp.VCX56VjeN9 +++ cat /tmp/tmp.eBLrFt0Kyn +++ rm /tmp/tmp.VCX56VjeN9 /tmp/tmp.eBLrFt0Kyn +++ return 0 ++ local client_container=psmdb-client-696897d69b-nrc64 ++ local mongo_flag=--quiet ++ [[ myApp:myPass@cluster2-rs0.pitr-13298 == *cfg* ]] ++ replica_set=rs0 ++ kubectl_bin exec psmdb-client-696897d69b-nrc64 -- bash -c 'printf '\''new Date().toISOString()\n'\'' | mongo mongodb://myApp:myPass@cluster2-rs0.pitr-13298.svc.cluster.local/admin?ssl=false\&replicaSet=rs0 --quiet' +++ mktemp ++ local LAST_OUT=/tmp/tmp.wAp6r66BwT +++ mktemp ++ local LAST_ERR=/tmp/tmp.O9jq2CdsFs ++ local exit_status=0 ++ local timeout=4 +++ seq 0 2 ++ for i in $(seq 0 2) ++ set +e ++ kubectl exec psmdb-client-696897d69b-nrc64 -- bash -c 'printf '\''new Date().toISOString()\n'\'' | mongo mongodb://myApp:myPass@cluster2-rs0.pitr-13298.svc.cluster.local/admin?ssl=false\&replicaSet=rs0 --quiet' ++ exit_status=0 ++ set -e ++ '[' 0 '!=' 0 -a -n 1 ']' ++ break ++ cat /tmp/tmp.wAp6r66BwT ++ cat /tmp/tmp.O9jq2CdsFs ++ rm /tmp/tmp.wAp6r66BwT /tmp/tmp.O9jq2CdsFs ++ return 0 + time_now='2025-12-10 13:47:25' + write_document -5th cluster2-rs0 + local cmp_postfix=-5th + local cluster_name=cluster2-rs0 + desc 'write initial data, read from all' + set +o xtrace ----------------------------------------------------------------------------------- write initial data, read from all ----------------------------------------------------------------------------------- + run_mongo 'use myApp\n db.test.insert({ x: 100500 })' myApp:myPass@cluster2-rs0.pitr-13298 + local 'command=use myApp\n db.test.insert({ x: 100500 })' + local uri=myApp:myPass@cluster2-rs0.pitr-13298 + local driver=mongodb+srv + local suffix=.svc.cluster.local ++ kubectl_bin get pods --selector=name=psmdb-client -o 'jsonpath={.items[].metadata.name}' +++ mktemp ++ local LAST_OUT=/tmp/tmp.mr1NL8bRHB +++ mktemp ++ local LAST_ERR=/tmp/tmp.afRV0Hjs9i ++ local exit_status=0 ++ local timeout=4 +++ seq 0 2 ++ for i in $(seq 0 2) ++ set +e ++ kubectl get pods --selector=name=psmdb-client -o 'jsonpath={.items[].metadata.name}' ++ exit_status=0 ++ set -e ++ '[' 0 '!=' 0 -a -n 1 ']' ++ break ++ cat /tmp/tmp.mr1NL8bRHB ++ cat /tmp/tmp.afRV0Hjs9i ++ rm /tmp/tmp.mr1NL8bRHB /tmp/tmp.afRV0Hjs9i ++ return 0 + local client_container=psmdb-client-696897d69b-nrc64 + local mongo_flag= + [[ myApp:myPass@cluster2-rs0.pitr-13298 == *cfg* ]] + replica_set=rs0 + kubectl_bin exec psmdb-client-696897d69b-nrc64 -- bash -c 'printf '\''use myApp\n db.test.insert({ x: 100500 })\n'\'' | mongo mongodb+srv://myApp:myPass@cluster2-rs0.pitr-13298.svc.cluster.local/admin?ssl=false\&replicaSet=rs0 ' ++ mktemp + local LAST_OUT=/tmp/tmp.J1HQwMjfcl ++ mktemp + local LAST_ERR=/tmp/tmp.UzcstbiP9a + local exit_status=0 + local timeout=4 ++ seq 0 2 + for i in $(seq 0 2) + set +e + kubectl exec psmdb-client-696897d69b-nrc64 -- bash -c 'printf '\''use myApp\n db.test.insert({ x: 100500 })\n'\'' | mongo mongodb+srv://myApp:myPass@cluster2-rs0.pitr-13298.svc.cluster.local/admin?ssl=false\&replicaSet=rs0 ' + exit_status=0 + set -e + '[' 0 '!=' 0 -a -n 1 ']' + break + cat /tmp/tmp.J1HQwMjfcl Percona Server for MongoDB shell version v4.4.29-28 connecting to: mongodb://cluster2-rs0-0.cluster2-rs0.pitr-13298.svc.cluster.local:27017,cluster2-rs0-1.cluster2-rs0.pitr-13298.svc.cluster.local:27017,cluster2-rs0-2.cluster2-rs0.pitr-13298.svc.cluster.local:27017/admin?compressors=disabled&gssapiServiceName=mongodb&replicaSet=rs0&ssl=false Implicit session: session { "id" : UUID("d258721a-0278-48dc-ac37-f99257d1e3c9") } Percona Server for MongoDB server version: v8.0.16-5 WARNING: shell and server versions do not match switched to db myApp WriteResult({ "nInserted" : 1 }) bye + cat /tmp/tmp.UzcstbiP9a + rm /tmp/tmp.J1HQwMjfcl /tmp/tmp.UzcstbiP9a + return 0 + minikube_sleep + sleep_time=10 + [[ '' == 1 ]] + compare_mongo_cmd find myApp:myPass@cluster2-rs0-0.cluster2-rs0.pitr-13298 -5th + local command=find + local uri=myApp:myPass@cluster2-rs0-0.cluster2-rs0.pitr-13298 + local postfix=-5th + local suffix= + local database=myApp + local collection=test + local sort= + local tls=false + local 'full_command=db.test.find()' + [[ -n '' ]] + log 'running db.test.find() in myApp' + set +o xtrace [2025-12-10T13:47:28+0000] running db.test.find() in myApp + [[ false == \t\r\u\e ]] + mongo_command=run_mongo + run_mongo 'use myApp\n db.test.find()' myApp:myPass@cluster2-rs0-0.cluster2-rs0.pitr-13298 mongodb '' + local 'command=use myApp\n db.test.find()' + egrep -v 'I NETWORK|W NETWORK|F NETWORK|Error saving history file|Percona Server for MongoDB|connecting to:|Unable to reach primary for set|Implicit session:|versions do not match|Error saving history file:|Started a new thread for the timer service' + local uri=myApp:myPass@cluster2-rs0-0.cluster2-rs0.pitr-13298 + local driver=mongodb + local suffix=.svc.cluster.local + /usr/sbin/sed -re 's/ObjectId\("[0-9a-f]+"\)//; s/-[0-9]+.svc/-xxx.svc/' ++ kubectl_bin get pods --selector=name=psmdb-client -o 'jsonpath={.items[].metadata.name}' +++ mktemp ++ local LAST_OUT=/tmp/tmp.oUIgLSinOW egrep: warning: egrep is obsolescent; using grep -E +++ mktemp ++ local LAST_ERR=/tmp/tmp.faQQ1uNlj2 ++ local exit_status=0 ++ local timeout=4 +++ seq 0 2 ++ for i in $(seq 0 2) ++ set +e ++ kubectl get pods --selector=name=psmdb-client -o 'jsonpath={.items[].metadata.name}' ++ exit_status=0 ++ set -e ++ '[' 0 '!=' 0 -a -n 1 ']' ++ break ++ cat /tmp/tmp.oUIgLSinOW ++ cat /tmp/tmp.faQQ1uNlj2 ++ rm /tmp/tmp.oUIgLSinOW /tmp/tmp.faQQ1uNlj2 ++ return 0 + local client_container=psmdb-client-696897d69b-nrc64 + local mongo_flag= + [[ myApp:myPass@cluster2-rs0-0.cluster2-rs0.pitr-13298 == *cfg* ]] + replica_set=rs0 + kubectl_bin exec psmdb-client-696897d69b-nrc64 -- bash -c 'printf '\''use myApp\n db.test.find()\n'\'' | mongo mongodb://myApp:myPass@cluster2-rs0-0.cluster2-rs0.pitr-13298.svc.cluster.local/admin?ssl=false\&replicaSet=rs0 ' ++ mktemp + local LAST_OUT=/tmp/tmp.eu2S7E9xHm ++ mktemp + local LAST_ERR=/tmp/tmp.NDQZJrT4xA + local exit_status=0 + local timeout=4 ++ seq 0 2 + for i in $(seq 0 2) + set +e + kubectl exec psmdb-client-696897d69b-nrc64 -- bash -c 'printf '\''use myApp\n db.test.find()\n'\'' | mongo mongodb://myApp:myPass@cluster2-rs0-0.cluster2-rs0.pitr-13298.svc.cluster.local/admin?ssl=false\&replicaSet=rs0 ' + exit_status=0 + set -e + '[' 0 '!=' 0 -a -n 1 ']' + break + cat /tmp/tmp.eu2S7E9xHm + cat /tmp/tmp.NDQZJrT4xA + rm /tmp/tmp.eu2S7E9xHm /tmp/tmp.NDQZJrT4xA + return 0 + [[ 0 -eq 0 ]] + diff -u /mnt/jenkins/workspace/cloud-psmdb-operator_PR-2125/e2e-tests/pitr/compare/find-5th.json /tmp/tmp.uUgrEfFz0e/find-5th + compare_mongo_cmd find myApp:myPass@cluster2-rs0-1.cluster2-rs0.pitr-13298 -5th + local command=find + local uri=myApp:myPass@cluster2-rs0-1.cluster2-rs0.pitr-13298 + local postfix=-5th + local suffix= + local database=myApp + local collection=test + local sort= + local tls=false + local 'full_command=db.test.find()' + [[ -n '' ]] + log 'running db.test.find() in myApp' + set +o xtrace [2025-12-10T13:47:30+0000] running db.test.find() in myApp + [[ false == \t\r\u\e ]] + mongo_command=run_mongo + run_mongo 'use myApp\n db.test.find()' myApp:myPass@cluster2-rs0-1.cluster2-rs0.pitr-13298 mongodb '' + local 'command=use myApp\n db.test.find()' + local uri=myApp:myPass@cluster2-rs0-1.cluster2-rs0.pitr-13298 + local driver=mongodb + local suffix=.svc.cluster.local + egrep -v 'I NETWORK|W NETWORK|F NETWORK|Error saving history file|Percona Server for MongoDB|connecting to:|Unable to reach primary for set|Implicit session:|versions do not match|Error saving history file:|Started a new thread for the timer service' + /usr/sbin/sed -re 's/ObjectId\("[0-9a-f]+"\)//; s/-[0-9]+.svc/-xxx.svc/' ++ kubectl_bin get pods --selector=name=psmdb-client -o 'jsonpath={.items[].metadata.name}' egrep: warning: egrep is obsolescent; using grep -E +++ mktemp ++ local LAST_OUT=/tmp/tmp.2jD0Eg6vcs +++ mktemp ++ local LAST_ERR=/tmp/tmp.ielO0yXOzp ++ local exit_status=0 ++ local timeout=4 +++ seq 0 2 ++ for i in $(seq 0 2) ++ set +e ++ kubectl get pods --selector=name=psmdb-client -o 'jsonpath={.items[].metadata.name}' ++ exit_status=0 ++ set -e ++ '[' 0 '!=' 0 -a -n 1 ']' ++ break ++ cat /tmp/tmp.2jD0Eg6vcs ++ cat /tmp/tmp.ielO0yXOzp ++ rm /tmp/tmp.2jD0Eg6vcs /tmp/tmp.ielO0yXOzp ++ return 0 + local client_container=psmdb-client-696897d69b-nrc64 + local mongo_flag= + [[ myApp:myPass@cluster2-rs0-1.cluster2-rs0.pitr-13298 == *cfg* ]] + replica_set=rs0 + kubectl_bin exec psmdb-client-696897d69b-nrc64 -- bash -c 'printf '\''use myApp\n db.test.find()\n'\'' | mongo mongodb://myApp:myPass@cluster2-rs0-1.cluster2-rs0.pitr-13298.svc.cluster.local/admin?ssl=false\&replicaSet=rs0 ' ++ mktemp + local LAST_OUT=/tmp/tmp.TlHrfXe5Gu ++ mktemp + local LAST_ERR=/tmp/tmp.medbfXUCKg + local exit_status=0 + local timeout=4 ++ seq 0 2 + for i in $(seq 0 2) + set +e + kubectl exec psmdb-client-696897d69b-nrc64 -- bash -c 'printf '\''use myApp\n db.test.find()\n'\'' | mongo mongodb://myApp:myPass@cluster2-rs0-1.cluster2-rs0.pitr-13298.svc.cluster.local/admin?ssl=false\&replicaSet=rs0 ' + exit_status=0 + set -e + '[' 0 '!=' 0 -a -n 1 ']' + break + cat /tmp/tmp.TlHrfXe5Gu + cat /tmp/tmp.medbfXUCKg + rm /tmp/tmp.TlHrfXe5Gu /tmp/tmp.medbfXUCKg + return 0 + [[ 0 -eq 0 ]] + diff -u /mnt/jenkins/workspace/cloud-psmdb-operator_PR-2125/e2e-tests/pitr/compare/find-5th.json /tmp/tmp.uUgrEfFz0e/find-5th + compare_mongo_cmd find myApp:myPass@cluster2-rs0-2.cluster2-rs0.pitr-13298 -5th + local command=find + local uri=myApp:myPass@cluster2-rs0-2.cluster2-rs0.pitr-13298 + local postfix=-5th + local suffix= + local database=myApp + local collection=test + local sort= + local tls=false + local 'full_command=db.test.find()' + [[ -n '' ]] + log 'running db.test.find() in myApp' + set +o xtrace [2025-12-10T13:47:32+0000] running db.test.find() in myApp + [[ false == \t\r\u\e ]] + mongo_command=run_mongo + run_mongo 'use myApp\n db.test.find()' myApp:myPass@cluster2-rs0-2.cluster2-rs0.pitr-13298 mongodb '' + egrep -v 'I NETWORK|W NETWORK|F NETWORK|Error saving history file|Percona Server for MongoDB|connecting to:|Unable to reach primary for set|Implicit session:|versions do not match|Error saving history file:|Started a new thread for the timer service' + local 'command=use myApp\n db.test.find()' + local uri=myApp:myPass@cluster2-rs0-2.cluster2-rs0.pitr-13298 + local driver=mongodb + local suffix=.svc.cluster.local + /usr/sbin/sed -re 's/ObjectId\("[0-9a-f]+"\)//; s/-[0-9]+.svc/-xxx.svc/' ++ kubectl_bin get pods --selector=name=psmdb-client -o 'jsonpath={.items[].metadata.name}' egrep: warning: egrep is obsolescent; using grep -E +++ mktemp ++ local LAST_OUT=/tmp/tmp.KIAOmJleXk +++ mktemp ++ local LAST_ERR=/tmp/tmp.XI429FRbe0 ++ local exit_status=0 ++ local timeout=4 +++ seq 0 2 ++ for i in $(seq 0 2) ++ set +e ++ kubectl get pods --selector=name=psmdb-client -o 'jsonpath={.items[].metadata.name}' ++ exit_status=0 ++ set -e ++ '[' 0 '!=' 0 -a -n 1 ']' ++ break ++ cat /tmp/tmp.KIAOmJleXk ++ cat /tmp/tmp.XI429FRbe0 ++ rm /tmp/tmp.KIAOmJleXk /tmp/tmp.XI429FRbe0 ++ return 0 + local client_container=psmdb-client-696897d69b-nrc64 + local mongo_flag= + [[ myApp:myPass@cluster2-rs0-2.cluster2-rs0.pitr-13298 == *cfg* ]] + replica_set=rs0 + kubectl_bin exec psmdb-client-696897d69b-nrc64 -- bash -c 'printf '\''use myApp\n db.test.find()\n'\'' | mongo mongodb://myApp:myPass@cluster2-rs0-2.cluster2-rs0.pitr-13298.svc.cluster.local/admin?ssl=false\&replicaSet=rs0 ' ++ mktemp + local LAST_OUT=/tmp/tmp.o9mQ27c2T5 ++ mktemp + local LAST_ERR=/tmp/tmp.72soi5dbiL + local exit_status=0 + local timeout=4 ++ seq 0 2 + for i in $(seq 0 2) + set +e + kubectl exec psmdb-client-696897d69b-nrc64 -- bash -c 'printf '\''use myApp\n db.test.find()\n'\'' | mongo mongodb://myApp:myPass@cluster2-rs0-2.cluster2-rs0.pitr-13298.svc.cluster.local/admin?ssl=false\&replicaSet=rs0 ' + exit_status=0 + set -e + '[' 0 '!=' 0 -a -n 1 ']' + break + cat /tmp/tmp.o9mQ27c2T5 + cat /tmp/tmp.72soi5dbiL + rm /tmp/tmp.o9mQ27c2T5 /tmp/tmp.72soi5dbiL + return 0 + [[ 0 -eq 0 ]] + diff -u /mnt/jenkins/workspace/cloud-psmdb-operator_PR-2125/e2e-tests/pitr/compare/find-5th.json /tmp/tmp.uUgrEfFz0e/find-5th + sleep 2 + check_recovery backup-minio2-0 date '2025-12-10 13:47:25' -4th '' cluster2 + local backup_name=backup-minio2-0 + local restore_type=date + local 'restore_date=2025-12-10 13:47:25' + local cmp_postfix=-4th + local backupSource= + local cluster_name=cluster2 + local selective_collection= + local restore_name=restore-backup-minio2-0 + local restore_file=restore.yml + local cluster + cluster=cluster2-rs0 + desc 'write more data before restore by date' + set +o xtrace ----------------------------------------------------------------------------------- write more data before restore by date ----------------------------------------------------------------------------------- + sleep 60 + run_mongo 'use myApp\n db.test.insert({ x: 100501 })' myApp:myPass@cluster2-rs0.pitr-13298 + local 'command=use myApp\n db.test.insert({ x: 100501 })' + local uri=myApp:myPass@cluster2-rs0.pitr-13298 + local driver=mongodb+srv + local suffix=.svc.cluster.local ++ kubectl_bin get pods --selector=name=psmdb-client -o 'jsonpath={.items[].metadata.name}' +++ mktemp ++ local LAST_OUT=/tmp/tmp.CcALLehnYz +++ mktemp ++ local LAST_ERR=/tmp/tmp.XLQQMzMQcy ++ local exit_status=0 ++ local timeout=4 +++ seq 0 2 ++ for i in $(seq 0 2) ++ set +e ++ kubectl get pods --selector=name=psmdb-client -o 'jsonpath={.items[].metadata.name}' ++ exit_status=0 ++ set -e ++ '[' 0 '!=' 0 -a -n 1 ']' ++ break ++ cat /tmp/tmp.CcALLehnYz ++ cat /tmp/tmp.XLQQMzMQcy ++ rm /tmp/tmp.CcALLehnYz /tmp/tmp.XLQQMzMQcy ++ return 0 + local client_container=psmdb-client-696897d69b-nrc64 + local mongo_flag= + [[ myApp:myPass@cluster2-rs0.pitr-13298 == *cfg* ]] + replica_set=rs0 + kubectl_bin exec psmdb-client-696897d69b-nrc64 -- bash -c 'printf '\''use myApp\n db.test.insert({ x: 100501 })\n'\'' | mongo mongodb+srv://myApp:myPass@cluster2-rs0.pitr-13298.svc.cluster.local/admin?ssl=false\&replicaSet=rs0 ' ++ mktemp + local LAST_OUT=/tmp/tmp.gXe2J0eahH ++ mktemp + local LAST_ERR=/tmp/tmp.I7oHEQXC8J + local exit_status=0 + local timeout=4 ++ seq 0 2 + for i in $(seq 0 2) + set +e + kubectl exec psmdb-client-696897d69b-nrc64 -- bash -c 'printf '\''use myApp\n db.test.insert({ x: 100501 })\n'\'' | mongo mongodb+srv://myApp:myPass@cluster2-rs0.pitr-13298.svc.cluster.local/admin?ssl=false\&replicaSet=rs0 ' + exit_status=0 + set -e + '[' 0 '!=' 0 -a -n 1 ']' + break + cat /tmp/tmp.gXe2J0eahH Percona Server for MongoDB shell version v4.4.29-28 connecting to: mongodb://cluster2-rs0-0.cluster2-rs0.pitr-13298.svc.cluster.local:27017,cluster2-rs0-1.cluster2-rs0.pitr-13298.svc.cluster.local:27017,cluster2-rs0-2.cluster2-rs0.pitr-13298.svc.cluster.local:27017/admin?compressors=disabled&gssapiServiceName=mongodb&replicaSet=rs0&ssl=false Implicit session: session { "id" : UUID("6d9725c6-5a53-4b11-bfa6-51ee98889386") } Percona Server for MongoDB server version: v8.0.16-5 WARNING: shell and server versions do not match switched to db myApp WriteResult({ "nInserted" : 1 }) bye + cat /tmp/tmp.I7oHEQXC8J + rm /tmp/tmp.gXe2J0eahH /tmp/tmp.I7oHEQXC8J + return 0 + '[' -n '' ']' + desc 'waiting for chunks to be uploaded' + set +o xtrace ----------------------------------------------------------------------------------- waiting for chunks to be uploaded ----------------------------------------------------------------------------------- + sleep 120 + desc 'check restore by date' + set +o xtrace ----------------------------------------------------------------------------------- check restore by date ----------------------------------------------------------------------------------- + '[' -z '' ']' + cat /mnt/jenkins/workspace/cloud-psmdb-operator_PR-2125/e2e-tests/pitr/conf/restore.yml + /usr/sbin/sed -e 's/name:/name: restore-backup-minio2-0/' + /usr/sbin/sed -e 's/backupName:/backupName: backup-minio2-0/' + /usr/sbin/sed -e 's/clusterName:/clusterName: cluster2/' + /usr/sbin/sed -e 's/pitrType:/type: date/' + kubectl_bin apply -f - + '[' -n '' ']' + yq ++ mktemp + '[' -z '2025-12-10 13:47:25' ']' + /usr/sbin/sed -e 's/date:/date: 2025-12-10 13:47:25/' + local LAST_OUT=/tmp/tmp.V0bR7vBFmI ++ mktemp + /usr/sbin/sed -e /backupSource/,+2d + local LAST_ERR=/tmp/tmp.KFPd2QOTqC + local exit_status=0 + local timeout=4 ++ seq 0 2 + for i in $(seq 0 2) + set +e + kubectl apply -f - + exit_status=0 + set -e + '[' 0 '!=' 0 -a -n 1 ']' + break + cat /tmp/tmp.V0bR7vBFmI perconaservermongodbrestore.psmdb.percona.com/restore-backup-minio2-0 created + cat /tmp/tmp.KFPd2QOTqC + rm /tmp/tmp.V0bR7vBFmI /tmp/tmp.KFPd2QOTqC + return 0 + wait_restore backup-minio2-0 cluster2 + local backup_name=backup-minio2-0 + local cluster_name=cluster2 + local target_state=ready + local wait_cluster_consistency=1 + local wait_time=1780 + local ok_if_ready=0 + set +o xtrace Waiting for the psmdb-restore/restore-backup-minio2-0 object to be created.OK Waiting psmdb-restore/restore-backup-minio2-0 to reach state "ready" .OK after 0 minutes + [[ 1 -eq 1 ]] + wait_cluster_consistency cluster2 + local cluster_name=cluster2 + local wait_time=32 + retry=0 + sleep 7 + echo -n 'waiting for cluster readyness' waiting for cluster readyness++ kubectl_bin get psmdb cluster2 -o 'jsonpath={.status.state}' +++ mktemp ++ local LAST_OUT=/tmp/tmp.vtQdBtY3Y7 +++ mktemp ++ local LAST_ERR=/tmp/tmp.NE76u7xiHK ++ local exit_status=0 ++ local timeout=4 +++ seq 0 2 ++ for i in $(seq 0 2) ++ set +e ++ kubectl get psmdb cluster2 -o 'jsonpath={.status.state}' ++ exit_status=0 ++ set -e ++ '[' 0 '!=' 0 -a -n 1 ']' ++ break ++ cat /tmp/tmp.vtQdBtY3Y7 ++ cat /tmp/tmp.NE76u7xiHK ++ rm /tmp/tmp.vtQdBtY3Y7 /tmp/tmp.NE76u7xiHK ++ return 0 + [[ ready == \r\e\a\d\y ]] + echo .OK .OK + echo + set -o xtrace + '[' -n '' ']' + compare_mongo_cmd find myApp:myPass@cluster2-rs0-0.cluster2-rs0.pitr-13298 -4th + local command=find + local uri=myApp:myPass@cluster2-rs0-0.cluster2-rs0.pitr-13298 + local postfix=-4th + local suffix= + local database=myApp + local collection=test + local sort= + local tls=false + local 'full_command=db.test.find()' + [[ -n '' ]] + log 'running db.test.find() in myApp' + set +o xtrace [2025-12-10T13:51:11+0000] running db.test.find() in myApp + [[ false == \t\r\u\e ]] + mongo_command=run_mongo + run_mongo 'use myApp\n db.test.find()' myApp:myPass@cluster2-rs0-0.cluster2-rs0.pitr-13298 mongodb '' + local 'command=use myApp\n db.test.find()' + local uri=myApp:myPass@cluster2-rs0-0.cluster2-rs0.pitr-13298 + local driver=mongodb + local suffix=.svc.cluster.local + egrep -v 'I NETWORK|W NETWORK|F NETWORK|Error saving history file|Percona Server for MongoDB|connecting to:|Unable to reach primary for set|Implicit session:|versions do not match|Error saving history file:|Started a new thread for the timer service' + /usr/sbin/sed -re 's/ObjectId\("[0-9a-f]+"\)//; s/-[0-9]+.svc/-xxx.svc/' ++ kubectl_bin get pods --selector=name=psmdb-client -o 'jsonpath={.items[].metadata.name}' +++ mktemp ++ local LAST_OUT=/tmp/tmp.cM8qDsTsdb +++ mktemp egrep: warning: egrep is obsolescent; using grep -E ++ local LAST_ERR=/tmp/tmp.5N8MqeQoL7 ++ local exit_status=0 ++ local timeout=4 +++ seq 0 2 ++ for i in $(seq 0 2) ++ set +e ++ kubectl get pods --selector=name=psmdb-client -o 'jsonpath={.items[].metadata.name}' ++ exit_status=0 ++ set -e ++ '[' 0 '!=' 0 -a -n 1 ']' ++ break ++ cat /tmp/tmp.cM8qDsTsdb ++ cat /tmp/tmp.5N8MqeQoL7 ++ rm /tmp/tmp.cM8qDsTsdb /tmp/tmp.5N8MqeQoL7 ++ return 0 + local client_container=psmdb-client-696897d69b-nrc64 + local mongo_flag= + [[ myApp:myPass@cluster2-rs0-0.cluster2-rs0.pitr-13298 == *cfg* ]] + replica_set=rs0 + kubectl_bin exec psmdb-client-696897d69b-nrc64 -- bash -c 'printf '\''use myApp\n db.test.find()\n'\'' | mongo mongodb://myApp:myPass@cluster2-rs0-0.cluster2-rs0.pitr-13298.svc.cluster.local/admin?ssl=false\&replicaSet=rs0 ' ++ mktemp + local LAST_OUT=/tmp/tmp.7nPFYTyxYr ++ mktemp + local LAST_ERR=/tmp/tmp.vb6gMANjbv + local exit_status=0 + local timeout=4 ++ seq 0 2 + for i in $(seq 0 2) + set +e + kubectl exec psmdb-client-696897d69b-nrc64 -- bash -c 'printf '\''use myApp\n db.test.find()\n'\'' | mongo mongodb://myApp:myPass@cluster2-rs0-0.cluster2-rs0.pitr-13298.svc.cluster.local/admin?ssl=false\&replicaSet=rs0 ' + exit_status=0 + set -e + '[' 0 '!=' 0 -a -n 1 ']' + break + cat /tmp/tmp.7nPFYTyxYr + cat /tmp/tmp.vb6gMANjbv + rm /tmp/tmp.7nPFYTyxYr /tmp/tmp.vb6gMANjbv + return 0 + [[ 0 -eq 0 ]] + diff -u /mnt/jenkins/workspace/cloud-psmdb-operator_PR-2125/e2e-tests/pitr/compare/find-4th.json /tmp/tmp.uUgrEfFz0e/find-4th + compare_mongo_cmd find myApp:myPass@cluster2-rs0-1.cluster2-rs0.pitr-13298 -4th + local command=find + local uri=myApp:myPass@cluster2-rs0-1.cluster2-rs0.pitr-13298 + local postfix=-4th + local suffix= + local database=myApp + local collection=test + local sort= + local tls=false + local 'full_command=db.test.find()' + [[ -n '' ]] + log 'running db.test.find() in myApp' + set +o xtrace [2025-12-10T13:51:13+0000] running db.test.find() in myApp + [[ false == \t\r\u\e ]] + mongo_command=run_mongo + run_mongo 'use myApp\n db.test.find()' myApp:myPass@cluster2-rs0-1.cluster2-rs0.pitr-13298 mongodb '' + local 'command=use myApp\n db.test.find()' + local uri=myApp:myPass@cluster2-rs0-1.cluster2-rs0.pitr-13298 + egrep -v 'I NETWORK|W NETWORK|F NETWORK|Error saving history file|Percona Server for MongoDB|connecting to:|Unable to reach primary for set|Implicit session:|versions do not match|Error saving history file:|Started a new thread for the timer service' + local driver=mongodb + local suffix=.svc.cluster.local + /usr/sbin/sed -re 's/ObjectId\("[0-9a-f]+"\)//; s/-[0-9]+.svc/-xxx.svc/' ++ kubectl_bin get pods --selector=name=psmdb-client -o 'jsonpath={.items[].metadata.name}' +++ mktemp egrep: warning: egrep is obsolescent; using grep -E ++ local LAST_OUT=/tmp/tmp.Cc4oSW0RGv +++ mktemp ++ local LAST_ERR=/tmp/tmp.86JmqqktfT ++ local exit_status=0 ++ local timeout=4 +++ seq 0 2 ++ for i in $(seq 0 2) ++ set +e ++ kubectl get pods --selector=name=psmdb-client -o 'jsonpath={.items[].metadata.name}' ++ exit_status=0 ++ set -e ++ '[' 0 '!=' 0 -a -n 1 ']' ++ break ++ cat /tmp/tmp.Cc4oSW0RGv ++ cat /tmp/tmp.86JmqqktfT ++ rm /tmp/tmp.Cc4oSW0RGv /tmp/tmp.86JmqqktfT ++ return 0 + local client_container=psmdb-client-696897d69b-nrc64 + local mongo_flag= + [[ myApp:myPass@cluster2-rs0-1.cluster2-rs0.pitr-13298 == *cfg* ]] + replica_set=rs0 + kubectl_bin exec psmdb-client-696897d69b-nrc64 -- bash -c 'printf '\''use myApp\n db.test.find()\n'\'' | mongo mongodb://myApp:myPass@cluster2-rs0-1.cluster2-rs0.pitr-13298.svc.cluster.local/admin?ssl=false\&replicaSet=rs0 ' ++ mktemp + local LAST_OUT=/tmp/tmp.BmygkChkE5 ++ mktemp + local LAST_ERR=/tmp/tmp.lKEbYMGbnQ + local exit_status=0 + local timeout=4 ++ seq 0 2 + for i in $(seq 0 2) + set +e + kubectl exec psmdb-client-696897d69b-nrc64 -- bash -c 'printf '\''use myApp\n db.test.find()\n'\'' | mongo mongodb://myApp:myPass@cluster2-rs0-1.cluster2-rs0.pitr-13298.svc.cluster.local/admin?ssl=false\&replicaSet=rs0 ' + exit_status=0 + set -e + '[' 0 '!=' 0 -a -n 1 ']' + break + cat /tmp/tmp.BmygkChkE5 + cat /tmp/tmp.lKEbYMGbnQ + rm /tmp/tmp.BmygkChkE5 /tmp/tmp.lKEbYMGbnQ + return 0 + [[ 0 -eq 0 ]] + diff -u /mnt/jenkins/workspace/cloud-psmdb-operator_PR-2125/e2e-tests/pitr/compare/find-4th.json /tmp/tmp.uUgrEfFz0e/find-4th + compare_mongo_cmd find myApp:myPass@cluster2-rs0-2.cluster2-rs0.pitr-13298 -4th + local command=find + local uri=myApp:myPass@cluster2-rs0-2.cluster2-rs0.pitr-13298 + local postfix=-4th + local suffix= + local database=myApp + local collection=test + local sort= + local tls=false + local 'full_command=db.test.find()' + [[ -n '' ]] + log 'running db.test.find() in myApp' + set +o xtrace [2025-12-10T13:51:16+0000] running db.test.find() in myApp + [[ false == \t\r\u\e ]] + mongo_command=run_mongo + run_mongo 'use myApp\n db.test.find()' myApp:myPass@cluster2-rs0-2.cluster2-rs0.pitr-13298 mongodb '' + local 'command=use myApp\n db.test.find()' + local uri=myApp:myPass@cluster2-rs0-2.cluster2-rs0.pitr-13298 + local driver=mongodb + local suffix=.svc.cluster.local + /usr/sbin/sed -re 's/ObjectId\("[0-9a-f]+"\)//; s/-[0-9]+.svc/-xxx.svc/' + egrep -v 'I NETWORK|W NETWORK|F NETWORK|Error saving history file|Percona Server for MongoDB|connecting to:|Unable to reach primary for set|Implicit session:|versions do not match|Error saving history file:|Started a new thread for the timer service' ++ kubectl_bin get pods --selector=name=psmdb-client -o 'jsonpath={.items[].metadata.name}' +++ mktemp ++ local LAST_OUT=/tmp/tmp.GHlExwunXA egrep: warning: egrep is obsolescent; using grep -E +++ mktemp ++ local LAST_ERR=/tmp/tmp.JlQgamrgEG ++ local exit_status=0 ++ local timeout=4 +++ seq 0 2 ++ for i in $(seq 0 2) ++ set +e ++ kubectl get pods --selector=name=psmdb-client -o 'jsonpath={.items[].metadata.name}' ++ exit_status=0 ++ set -e ++ '[' 0 '!=' 0 -a -n 1 ']' ++ break ++ cat /tmp/tmp.GHlExwunXA ++ cat /tmp/tmp.JlQgamrgEG ++ rm /tmp/tmp.GHlExwunXA /tmp/tmp.JlQgamrgEG ++ return 0 + local client_container=psmdb-client-696897d69b-nrc64 + local mongo_flag= + [[ myApp:myPass@cluster2-rs0-2.cluster2-rs0.pitr-13298 == *cfg* ]] + replica_set=rs0 + kubectl_bin exec psmdb-client-696897d69b-nrc64 -- bash -c 'printf '\''use myApp\n db.test.find()\n'\'' | mongo mongodb://myApp:myPass@cluster2-rs0-2.cluster2-rs0.pitr-13298.svc.cluster.local/admin?ssl=false\&replicaSet=rs0 ' ++ mktemp + local LAST_OUT=/tmp/tmp.lK5j26nzDP ++ mktemp + local LAST_ERR=/tmp/tmp.wQ0RYWthAc + local exit_status=0 + local timeout=4 ++ seq 0 2 + for i in $(seq 0 2) + set +e + kubectl exec psmdb-client-696897d69b-nrc64 -- bash -c 'printf '\''use myApp\n db.test.find()\n'\'' | mongo mongodb://myApp:myPass@cluster2-rs0-2.cluster2-rs0.pitr-13298.svc.cluster.local/admin?ssl=false\&replicaSet=rs0 ' + exit_status=0 + set -e + '[' 0 '!=' 0 -a -n 1 ']' + break + cat /tmp/tmp.lK5j26nzDP + cat /tmp/tmp.wQ0RYWthAc + rm /tmp/tmp.lK5j26nzDP /tmp/tmp.wQ0RYWthAc + return 0 + [[ 0 -eq 0 ]] + diff -u /mnt/jenkins/workspace/cloud-psmdb-operator_PR-2125/e2e-tests/pitr/compare/find-4th.json /tmp/tmp.uUgrEfFz0e/find-4th + kubectl patch psmdb some-name --type=merge --patch '{"spec": {"backup": {"pitr": {"enabled": false}}}}' perconaservermongodb.psmdb.percona.com/some-name patched + kubectl patch psmdb cluster2 --type=merge --patch '{"spec": {"backup": {"pitr": {"enabled": false}}}}' perconaservermongodb.psmdb.percona.com/cluster2 patched + sleep 20 + kubectl_bin delete psmdb-backup --all ++ mktemp + local LAST_OUT=/tmp/tmp.NDxjXWoumr ++ mktemp + local LAST_ERR=/tmp/tmp.OWroqHnPnA + local exit_status=0 + local timeout=4 ++ seq 0 2 + for i in $(seq 0 2) + set +e + kubectl delete psmdb-backup --all + exit_status=0 + set -e + '[' 0 '!=' 0 -a -n 1 ']' + break + cat /tmp/tmp.NDxjXWoumr perconaservermongodbbackup.psmdb.percona.com "backup-minio-0" deleted from pitr-13298 namespace perconaservermongodbbackup.psmdb.percona.com "backup-minio-1" deleted from pitr-13298 namespace perconaservermongodbbackup.psmdb.percona.com "backup-minio-2" deleted from pitr-13298 namespace perconaservermongodbbackup.psmdb.percona.com "backup-minio-3" deleted from pitr-13298 namespace perconaservermongodbbackup.psmdb.percona.com "backup-minio2-0" deleted from pitr-13298 namespace + cat /tmp/tmp.OWroqHnPnA + rm /tmp/tmp.NDxjXWoumr /tmp/tmp.OWroqHnPnA + return 0 + destroy pitr-13298 + local namespace=pitr-13298 + local ignore_logs=true + [[ 0 == 1 ]] + desc 'destroy cluster/operator and all other resources' + set +o xtrace ----------------------------------------------------------------------------------- destroy cluster/operator and all other resources ----------------------------------------------------------------------------------- + '[' true == false ']' + delete_backups + desc 'Delete psmdb-backup' + set +o xtrace ----------------------------------------------------------------------------------- Delete psmdb-backup ----------------------------------------------------------------------------------- ++ wc -l ++ kubectl_bin get psmdb-backup --no-headers +++ mktemp ++ local LAST_OUT=/tmp/tmp.mx0N0evrUJ +++ mktemp ++ local LAST_ERR=/tmp/tmp.pN6Fxkjqoc ++ local exit_status=0 ++ local timeout=4 +++ seq 0 2 ++ for i in $(seq 0 2) ++ set +e ++ kubectl get psmdb-backup --no-headers ++ exit_status=0 ++ set -e ++ '[' 0 '!=' 0 -a -n 1 ']' ++ break ++ cat /tmp/tmp.mx0N0evrUJ ++ cat /tmp/tmp.pN6Fxkjqoc No resources found in pitr-13298 namespace. ++ rm /tmp/tmp.mx0N0evrUJ /tmp/tmp.pN6Fxkjqoc ++ return 0 + '[' 0 '!=' 0 ']' + delete_crd + desc 'get and delete old CRDs and RBAC' + set +o xtrace ----------------------------------------------------------------------------------- get and delete old CRDs and RBAC ----------------------------------------------------------------------------------- + kubectl_bin delete -f /mnt/jenkins/workspace/cloud-psmdb-operator_PR-2125/deploy/crd.yaml --ignore-not-found --wait=false ++ mktemp + local LAST_OUT=/tmp/tmp.ckJzqB8UkF ++ mktemp + local LAST_ERR=/tmp/tmp.nBr8rcXypT + local exit_status=0 + local timeout=4 ++ seq 0 2 + for i in $(seq 0 2) + set +e + kubectl delete -f /mnt/jenkins/workspace/cloud-psmdb-operator_PR-2125/deploy/crd.yaml --ignore-not-found --wait=false + exit_status=0 + set -e + '[' 0 '!=' 0 -a -n 1 ']' + break + cat /tmp/tmp.ckJzqB8UkF customresourcedefinition.apiextensions.k8s.io "perconaservermongodbbackups.psmdb.percona.com" deleted customresourcedefinition.apiextensions.k8s.io "perconaservermongodbrestores.psmdb.percona.com" deleted customresourcedefinition.apiextensions.k8s.io "perconaservermongodbs.psmdb.percona.com" deleted + cat /tmp/tmp.nBr8rcXypT + rm /tmp/tmp.ckJzqB8UkF /tmp/tmp.nBr8rcXypT + return 0 ++ yq eval .metadata.name /mnt/jenkins/workspace/cloud-psmdb-operator_PR-2125/deploy/crd.yaml ++ grep -v '\-\-\-' grep: warning: stray \ before - grep: warning: stray \ before - + for crd_name in $(yq eval '.metadata.name' "${src_dir}/deploy/crd.yaml" | grep -v '\-\-\-') + kubectl get perconaservermongodbbackups.psmdb.percona.com --all-namespaces -o wide + grep -v NAMESPACE + xargs -L 1 sh -xc 'kubectl patch perconaservermongodbbackups.psmdb.percona.com -n $0 $1 --type=merge -p "{\"metadata\":{\"finalizers\":[]}}"' error: the server doesn't have a resource type "perconaservermongodbbackups" + kubectl patch perconaservermongodbbackups.psmdb.percona.com -n sh --type=merge -p '{"metadata":{"finalizers":[]}}' error: the server doesn't have a resource type "perconaservermongodbbackups" + : + kubectl_bin wait --for=delete crd perconaservermongodbbackups.psmdb.percona.com ++ mktemp + local LAST_OUT=/tmp/tmp.HHYPcCKveh ++ mktemp + local LAST_ERR=/tmp/tmp.akhpHEScHI + local exit_status=0 + local timeout=4 ++ seq 0 2 + for i in $(seq 0 2) + set +e + kubectl wait --for=delete crd perconaservermongodbbackups.psmdb.percona.com + exit_status=0 + set -e + '[' 0 '!=' 0 -a -n 1 ']' + break + cat /tmp/tmp.HHYPcCKveh + cat /tmp/tmp.akhpHEScHI + rm /tmp/tmp.HHYPcCKveh /tmp/tmp.akhpHEScHI + return 0 + for crd_name in $(yq eval '.metadata.name' "${src_dir}/deploy/crd.yaml" | grep -v '\-\-\-') + kubectl get perconaservermongodbrestores.psmdb.percona.com --all-namespaces -o wide + grep -v NAMESPACE + xargs -L 1 sh -xc 'kubectl patch perconaservermongodbrestores.psmdb.percona.com -n $0 $1 --type=merge -p "{\"metadata\":{\"finalizers\":[]}}"' error: the server doesn't have a resource type "perconaservermongodbrestores" + kubectl patch perconaservermongodbrestores.psmdb.percona.com -n sh --type=merge -p '{"metadata":{"finalizers":[]}}' error: the server doesn't have a resource type "perconaservermongodbrestores" + : + kubectl_bin wait --for=delete crd perconaservermongodbrestores.psmdb.percona.com ++ mktemp + local LAST_OUT=/tmp/tmp.TA86Y1aDSY ++ mktemp + local LAST_ERR=/tmp/tmp.Dj30sHj6aM + local exit_status=0 + local timeout=4 ++ seq 0 2 + for i in $(seq 0 2) + set +e + kubectl wait --for=delete crd perconaservermongodbrestores.psmdb.percona.com + exit_status=0 + set -e + '[' 0 '!=' 0 -a -n 1 ']' + break + cat /tmp/tmp.TA86Y1aDSY + cat /tmp/tmp.Dj30sHj6aM + rm /tmp/tmp.TA86Y1aDSY /tmp/tmp.Dj30sHj6aM + return 0 + for crd_name in $(yq eval '.metadata.name' "${src_dir}/deploy/crd.yaml" | grep -v '\-\-\-') + kubectl get perconaservermongodbs.psmdb.percona.com --all-namespaces -o wide + grep -v NAMESPACE + xargs -L 1 sh -xc 'kubectl patch perconaservermongodbs.psmdb.percona.com -n $0 $1 --type=merge -p "{\"metadata\":{\"finalizers\":[]}}"' No resources found + kubectl patch perconaservermongodbs.psmdb.percona.com -n sh --type=merge -p '{"metadata":{"finalizers":[]}}' error: resource(s) were provided, but no name was specified + : + kubectl_bin wait --for=delete crd perconaservermongodbs.psmdb.percona.com ++ mktemp + local LAST_OUT=/tmp/tmp.wtIEdSFLwE ++ mktemp + local LAST_ERR=/tmp/tmp.FUyLcttBU5 + local exit_status=0 + local timeout=4 ++ seq 0 2 + for i in $(seq 0 2) + set +e + kubectl wait --for=delete crd perconaservermongodbs.psmdb.percona.com + exit_status=0 + set -e + '[' 0 '!=' 0 -a -n 1 ']' + break + cat /tmp/tmp.wtIEdSFLwE customresourcedefinition.apiextensions.k8s.io/perconaservermongodbs.psmdb.percona.com condition met + cat /tmp/tmp.FUyLcttBU5 + rm /tmp/tmp.wtIEdSFLwE /tmp/tmp.FUyLcttBU5 + return 0 + local rbac_yaml=rbac.yaml + '[' -n psmdb-operator ']' + rbac_yaml=cw-rbac.yaml + kubectl_bin delete -f /mnt/jenkins/workspace/cloud-psmdb-operator_PR-2125/deploy/cw-rbac.yaml --ignore-not-found ++ mktemp + local LAST_OUT=/tmp/tmp.59jwUmYigR ++ mktemp + local LAST_ERR=/tmp/tmp.cyDyFrEZHs + local exit_status=0 + local timeout=4 ++ seq 0 2 + for i in $(seq 0 2) + set +e + kubectl delete -f /mnt/jenkins/workspace/cloud-psmdb-operator_PR-2125/deploy/cw-rbac.yaml --ignore-not-found + exit_status=0 + set -e + '[' 0 '!=' 0 -a -n 1 ']' + break + cat /tmp/tmp.59jwUmYigR clusterrole.rbac.authorization.k8s.io "percona-server-mongodb-operator" deleted clusterrolebinding.rbac.authorization.k8s.io "service-account-percona-server-mongodb-operator" deleted + cat /tmp/tmp.cyDyFrEZHs + rm /tmp/tmp.59jwUmYigR /tmp/tmp.cyDyFrEZHs + return 0 + destroy_cert_manager + kubectl_bin delete -f https://github.com/cert-manager/cert-manager/releases/download/v1.19.1/cert-manager.yaml ++ mktemp + local LAST_OUT=/tmp/tmp.ZZfx7SAauG ++ mktemp + local LAST_ERR=/tmp/tmp.85hoYZGOxw + local exit_status=0 + local timeout=4 ++ seq 0 2 + for i in $(seq 0 2) + set +e + kubectl delete -f https://github.com/cert-manager/cert-manager/releases/download/v1.19.1/cert-manager.yaml + exit_status=1 + set -e + '[' 1 '!=' 0 -a -n 1 ']' + cat /tmp/tmp.ZZfx7SAauG + cat /tmp/tmp.85hoYZGOxw Error from server (NotFound): error when deleting "https://github.com/cert-manager/cert-manager/releases/download/v1.19.1/cert-manager.yaml": namespaces "cert-manager" not found Error from server (NotFound): error when deleting "https://github.com/cert-manager/cert-manager/releases/download/v1.19.1/cert-manager.yaml": customresourcedefinitions.apiextensions.k8s.io "challenges.acme.cert-manager.io" not found Error from server (NotFound): error when deleting "https://github.com/cert-manager/cert-manager/releases/download/v1.19.1/cert-manager.yaml": customresourcedefinitions.apiextensions.k8s.io "orders.acme.cert-manager.io" not found Error from server (NotFound): error when deleting "https://github.com/cert-manager/cert-manager/releases/download/v1.19.1/cert-manager.yaml": customresourcedefinitions.apiextensions.k8s.io "certificaterequests.cert-manager.io" not found Error from server (NotFound): error when deleting "https://github.com/cert-manager/cert-manager/releases/download/v1.19.1/cert-manager.yaml": customresourcedefinitions.apiextensions.k8s.io "certificates.cert-manager.io" not found Error from server (NotFound): error when deleting "https://github.com/cert-manager/cert-manager/releases/download/v1.19.1/cert-manager.yaml": customresourcedefinitions.apiextensions.k8s.io "clusterissuers.cert-manager.io" not found Error from server (NotFound): error when deleting "https://github.com/cert-manager/cert-manager/releases/download/v1.19.1/cert-manager.yaml": customresourcedefinitions.apiextensions.k8s.io "issuers.cert-manager.io" not found Error from server (NotFound): error when deleting "https://github.com/cert-manager/cert-manager/releases/download/v1.19.1/cert-manager.yaml": serviceaccounts "cert-manager-cainjector" not found Error from server (NotFound): error when deleting "https://github.com/cert-manager/cert-manager/releases/download/v1.19.1/cert-manager.yaml": serviceaccounts "cert-manager" not found Error from server (NotFound): error when deleting "https://github.com/cert-manager/cert-manager/releases/download/v1.19.1/cert-manager.yaml": serviceaccounts "cert-manager-webhook" not found Error from server (NotFound): error when deleting "https://github.com/cert-manager/cert-manager/releases/download/v1.19.1/cert-manager.yaml": clusterroles.rbac.authorization.k8s.io "cert-manager-cainjector" not found Error from server (NotFound): error when deleting "https://github.com/cert-manager/cert-manager/releases/download/v1.19.1/cert-manager.yaml": clusterroles.rbac.authorization.k8s.io "cert-manager-controller-issuers" not found Error from server (NotFound): error when deleting "https://github.com/cert-manager/cert-manager/releases/download/v1.19.1/cert-manager.yaml": clusterroles.rbac.authorization.k8s.io "cert-manager-controller-clusterissuers" not found Error from server (NotFound): error when deleting "https://github.com/cert-manager/cert-manager/releases/download/v1.19.1/cert-manager.yaml": clusterroles.rbac.authorization.k8s.io "cert-manager-controller-certificates" not found Error from server (NotFound): error when deleting "https://github.com/cert-manager/cert-manager/releases/download/v1.19.1/cert-manager.yaml": clusterroles.rbac.authorization.k8s.io "cert-manager-controller-orders" not found Error from server (NotFound): error when deleting "https://github.com/cert-manager/cert-manager/releases/download/v1.19.1/cert-manager.yaml": clusterroles.rbac.authorization.k8s.io "cert-manager-controller-challenges" not found Error from server (NotFound): error when deleting "https://github.com/cert-manager/cert-manager/releases/download/v1.19.1/cert-manager.yaml": clusterroles.rbac.authorization.k8s.io "cert-manager-controller-ingress-shim" not found Error from server (NotFound): error when deleting "https://github.com/cert-manager/cert-manager/releases/download/v1.19.1/cert-manager.yaml": clusterroles.rbac.authorization.k8s.io "cert-manager-cluster-view" not found Error from server (NotFound): error when deleting "https://github.com/cert-manager/cert-manager/releases/download/v1.19.1/cert-manager.yaml": clusterroles.rbac.authorization.k8s.io "cert-manager-view" not found Error from server (NotFound): error when deleting "https://github.com/cert-manager/cert-manager/releases/download/v1.19.1/cert-manager.yaml": clusterroles.rbac.authorization.k8s.io "cert-manager-edit" not found Error from server (NotFound): error when deleting "https://github.com/cert-manager/cert-manager/releases/download/v1.19.1/cert-manager.yaml": clusterroles.rbac.authorization.k8s.io "cert-manager-controller-approve:cert-manager-io" not found Error from server (NotFound): error when deleting "https://github.com/cert-manager/cert-manager/releases/download/v1.19.1/cert-manager.yaml": clusterroles.rbac.authorization.k8s.io "cert-manager-controller-certificatesigningrequests" not found Error from server (NotFound): error when deleting "https://github.com/cert-manager/cert-manager/releases/download/v1.19.1/cert-manager.yaml": clusterroles.rbac.authorization.k8s.io "cert-manager-webhook:subjectaccessreviews" not found Error from server (NotFound): error when deleting "https://github.com/cert-manager/cert-manager/releases/download/v1.19.1/cert-manager.yaml": clusterrolebindings.rbac.authorization.k8s.io "cert-manager-cainjector" not found Error from server (NotFound): error when deleting "https://github.com/cert-manager/cert-manager/releases/download/v1.19.1/cert-manager.yaml": clusterrolebindings.rbac.authorization.k8s.io "cert-manager-controller-issuers" not found Error from server (NotFound): error when deleting "https://github.com/cert-manager/cert-manager/releases/download/v1.19.1/cert-manager.yaml": clusterrolebindings.rbac.authorization.k8s.io "cert-manager-controller-clusterissuers" not found Error from server (NotFound): error when deleting "https://github.com/cert-manager/cert-manager/releases/download/v1.19.1/cert-manager.yaml": clusterrolebindings.rbac.authorization.k8s.io "cert-manager-controller-certificates" not found Error from server (NotFound): error when deleting "https://github.com/cert-manager/cert-manager/releases/download/v1.19.1/cert-manager.yaml": clusterrolebindings.rbac.authorization.k8s.io "cert-manager-controller-orders" not found Error from server (NotFound): error when deleting "https://github.com/cert-manager/cert-manager/releases/download/v1.19.1/cert-manager.yaml": clusterrolebindings.rbac.authorization.k8s.io "cert-manager-controller-challenges" not found Error from server (NotFound): error when deleting "https://github.com/cert-manager/cert-manager/releases/download/v1.19.1/cert-manager.yaml": clusterrolebindings.rbac.authorization.k8s.io "cert-manager-controller-ingress-shim" not found Error from server (NotFound): error when deleting "https://github.com/cert-manager/cert-manager/releases/download/v1.19.1/cert-manager.yaml": clusterrolebindings.rbac.authorization.k8s.io "cert-manager-controller-approve:cert-manager-io" not found Error from server (NotFound): error when deleting "https://github.com/cert-manager/cert-manager/releases/download/v1.19.1/cert-manager.yaml": clusterrolebindings.rbac.authorization.k8s.io "cert-manager-controller-certificatesigningrequests" not found Error from server (NotFound): error when deleting "https://github.com/cert-manager/cert-manager/releases/download/v1.19.1/cert-manager.yaml": clusterrolebindings.rbac.authorization.k8s.io "cert-manager-webhook:subjectaccessreviews" not found Error from server (NotFound): error when deleting "https://github.com/cert-manager/cert-manager/releases/download/v1.19.1/cert-manager.yaml": roles.rbac.authorization.k8s.io "cert-manager-cainjector:leaderelection" not found Error from server (NotFound): error when deleting "https://github.com/cert-manager/cert-manager/releases/download/v1.19.1/cert-manager.yaml": roles.rbac.authorization.k8s.io "cert-manager:leaderelection" not found Error from server (NotFound): error when deleting "https://github.com/cert-manager/cert-manager/releases/download/v1.19.1/cert-manager.yaml": roles.rbac.authorization.k8s.io "cert-manager-tokenrequest" not found Error from server (NotFound): error when deleting "https://github.com/cert-manager/cert-manager/releases/download/v1.19.1/cert-manager.yaml": roles.rbac.authorization.k8s.io "cert-manager-webhook:dynamic-serving" not found Error from server (NotFound): error when deleting "https://github.com/cert-manager/cert-manager/releases/download/v1.19.1/cert-manager.yaml": rolebindings.rbac.authorization.k8s.io "cert-manager-cainjector:leaderelection" not found Error from server (NotFound): error when deleting "https://github.com/cert-manager/cert-manager/releases/download/v1.19.1/cert-manager.yaml": rolebindings.rbac.authorization.k8s.io "cert-manager:leaderelection" not found Error from server (NotFound): error when deleting "https://github.com/cert-manager/cert-manager/releases/download/v1.19.1/cert-manager.yaml": rolebindings.rbac.authorization.k8s.io "cert-manager-tokenrequest" not found Error from server (NotFound): error when deleting "https://github.com/cert-manager/cert-manager/releases/download/v1.19.1/cert-manager.yaml": rolebindings.rbac.authorization.k8s.io "cert-manager-webhook:dynamic-serving" not found Error from server (NotFound): error when deleting "https://github.com/cert-manager/cert-manager/releases/download/v1.19.1/cert-manager.yaml": services "cert-manager-cainjector" not found Error from server (NotFound): error when deleting "https://github.com/cert-manager/cert-manager/releases/download/v1.19.1/cert-manager.yaml": services "cert-manager" not found Error from server (NotFound): error when deleting "https://github.com/cert-manager/cert-manager/releases/download/v1.19.1/cert-manager.yaml": services "cert-manager-webhook" not found Error from server (NotFound): error when deleting "https://github.com/cert-manager/cert-manager/releases/download/v1.19.1/cert-manager.yaml": deployments.apps "cert-manager-cainjector" not found Error from server (NotFound): error when deleting "https://github.com/cert-manager/cert-manager/releases/download/v1.19.1/cert-manager.yaml": deployments.apps "cert-manager" not found Error from server (NotFound): error when deleting "https://github.com/cert-manager/cert-manager/releases/download/v1.19.1/cert-manager.yaml": deployments.apps "cert-manager-webhook" not found Error from server (NotFound): error when deleting "https://github.com/cert-manager/cert-manager/releases/download/v1.19.1/cert-manager.yaml": mutatingwebhookconfigurations.admissionregistration.k8s.io "cert-manager-webhook" not found Error from server (NotFound): error when deleting "https://github.com/cert-manager/cert-manager/releases/download/v1.19.1/cert-manager.yaml": validatingwebhookconfigurations.admissionregistration.k8s.io "cert-manager-webhook" not found + sleep 0 + for i in $(seq 0 2) + set +e + kubectl delete -f https://github.com/cert-manager/cert-manager/releases/download/v1.19.1/cert-manager.yaml + exit_status=1 + set -e + '[' 1 '!=' 0 -a -n 1 ']' + cat /tmp/tmp.ZZfx7SAauG + cat /tmp/tmp.85hoYZGOxw Error from server (NotFound): error when deleting "https://github.com/cert-manager/cert-manager/releases/download/v1.19.1/cert-manager.yaml": namespaces "cert-manager" not found Error from server (NotFound): error when deleting "https://github.com/cert-manager/cert-manager/releases/download/v1.19.1/cert-manager.yaml": customresourcedefinitions.apiextensions.k8s.io "challenges.acme.cert-manager.io" not found Error from server (NotFound): error when deleting "https://github.com/cert-manager/cert-manager/releases/download/v1.19.1/cert-manager.yaml": customresourcedefinitions.apiextensions.k8s.io "orders.acme.cert-manager.io" not found Error from server (NotFound): error when deleting "https://github.com/cert-manager/cert-manager/releases/download/v1.19.1/cert-manager.yaml": customresourcedefinitions.apiextensions.k8s.io "certificaterequests.cert-manager.io" not found Error from server (NotFound): error when deleting "https://github.com/cert-manager/cert-manager/releases/download/v1.19.1/cert-manager.yaml": customresourcedefinitions.apiextensions.k8s.io "certificates.cert-manager.io" not found Error from server (NotFound): error when deleting "https://github.com/cert-manager/cert-manager/releases/download/v1.19.1/cert-manager.yaml": customresourcedefinitions.apiextensions.k8s.io "clusterissuers.cert-manager.io" not found Error from server (NotFound): error when deleting "https://github.com/cert-manager/cert-manager/releases/download/v1.19.1/cert-manager.yaml": customresourcedefinitions.apiextensions.k8s.io "issuers.cert-manager.io" not found Error from server (NotFound): error when deleting "https://github.com/cert-manager/cert-manager/releases/download/v1.19.1/cert-manager.yaml": serviceaccounts "cert-manager-cainjector" not found Error from server (NotFound): error when deleting "https://github.com/cert-manager/cert-manager/releases/download/v1.19.1/cert-manager.yaml": serviceaccounts "cert-manager" not found Error from server (NotFound): error when deleting "https://github.com/cert-manager/cert-manager/releases/download/v1.19.1/cert-manager.yaml": serviceaccounts "cert-manager-webhook" not found Error from server (NotFound): error when deleting "https://github.com/cert-manager/cert-manager/releases/download/v1.19.1/cert-manager.yaml": clusterroles.rbac.authorization.k8s.io "cert-manager-cainjector" not found Error from server (NotFound): error when deleting "https://github.com/cert-manager/cert-manager/releases/download/v1.19.1/cert-manager.yaml": clusterroles.rbac.authorization.k8s.io "cert-manager-controller-issuers" not found Error from server (NotFound): error when deleting "https://github.com/cert-manager/cert-manager/releases/download/v1.19.1/cert-manager.yaml": clusterroles.rbac.authorization.k8s.io "cert-manager-controller-clusterissuers" not found Error from server (NotFound): error when deleting "https://github.com/cert-manager/cert-manager/releases/download/v1.19.1/cert-manager.yaml": clusterroles.rbac.authorization.k8s.io "cert-manager-controller-certificates" not found Error from server (NotFound): error when deleting "https://github.com/cert-manager/cert-manager/releases/download/v1.19.1/cert-manager.yaml": clusterroles.rbac.authorization.k8s.io "cert-manager-controller-orders" not found Error from server (NotFound): error when deleting "https://github.com/cert-manager/cert-manager/releases/download/v1.19.1/cert-manager.yaml": clusterroles.rbac.authorization.k8s.io "cert-manager-controller-challenges" not found Error from server (NotFound): error when deleting "https://github.com/cert-manager/cert-manager/releases/download/v1.19.1/cert-manager.yaml": clusterroles.rbac.authorization.k8s.io "cert-manager-controller-ingress-shim" not found Error from server (NotFound): error when deleting "https://github.com/cert-manager/cert-manager/releases/download/v1.19.1/cert-manager.yaml": clusterroles.rbac.authorization.k8s.io "cert-manager-cluster-view" not found Error from server (NotFound): error when deleting "https://github.com/cert-manager/cert-manager/releases/download/v1.19.1/cert-manager.yaml": clusterroles.rbac.authorization.k8s.io "cert-manager-view" not found Error from server (NotFound): error when deleting "https://github.com/cert-manager/cert-manager/releases/download/v1.19.1/cert-manager.yaml": clusterroles.rbac.authorization.k8s.io "cert-manager-edit" not found Error from server (NotFound): error when deleting "https://github.com/cert-manager/cert-manager/releases/download/v1.19.1/cert-manager.yaml": clusterroles.rbac.authorization.k8s.io "cert-manager-controller-approve:cert-manager-io" not found Error from server (NotFound): error when deleting "https://github.com/cert-manager/cert-manager/releases/download/v1.19.1/cert-manager.yaml": clusterroles.rbac.authorization.k8s.io "cert-manager-controller-certificatesigningrequests" not found Error from server (NotFound): error when deleting "https://github.com/cert-manager/cert-manager/releases/download/v1.19.1/cert-manager.yaml": clusterroles.rbac.authorization.k8s.io "cert-manager-webhook:subjectaccessreviews" not found Error from server (NotFound): error when deleting "https://github.com/cert-manager/cert-manager/releases/download/v1.19.1/cert-manager.yaml": clusterrolebindings.rbac.authorization.k8s.io "cert-manager-cainjector" not found Error from server (NotFound): error when deleting "https://github.com/cert-manager/cert-manager/releases/download/v1.19.1/cert-manager.yaml": clusterrolebindings.rbac.authorization.k8s.io "cert-manager-controller-issuers" not found Error from server (NotFound): error when deleting "https://github.com/cert-manager/cert-manager/releases/download/v1.19.1/cert-manager.yaml": clusterrolebindings.rbac.authorization.k8s.io "cert-manager-controller-clusterissuers" not found Error from server (NotFound): error when deleting "https://github.com/cert-manager/cert-manager/releases/download/v1.19.1/cert-manager.yaml": clusterrolebindings.rbac.authorization.k8s.io "cert-manager-controller-certificates" not found Error from server (NotFound): error when deleting "https://github.com/cert-manager/cert-manager/releases/download/v1.19.1/cert-manager.yaml": clusterrolebindings.rbac.authorization.k8s.io "cert-manager-controller-orders" not found Error from server (NotFound): error when deleting "https://github.com/cert-manager/cert-manager/releases/download/v1.19.1/cert-manager.yaml": clusterrolebindings.rbac.authorization.k8s.io "cert-manager-controller-challenges" not found Error from server (NotFound): error when deleting "https://github.com/cert-manager/cert-manager/releases/download/v1.19.1/cert-manager.yaml": clusterrolebindings.rbac.authorization.k8s.io "cert-manager-controller-ingress-shim" not found Error from server (NotFound): error when deleting "https://github.com/cert-manager/cert-manager/releases/download/v1.19.1/cert-manager.yaml": clusterrolebindings.rbac.authorization.k8s.io "cert-manager-controller-approve:cert-manager-io" not found Error from server (NotFound): error when deleting "https://github.com/cert-manager/cert-manager/releases/download/v1.19.1/cert-manager.yaml": clusterrolebindings.rbac.authorization.k8s.io "cert-manager-controller-certificatesigningrequests" not found Error from server (NotFound): error when deleting "https://github.com/cert-manager/cert-manager/releases/download/v1.19.1/cert-manager.yaml": clusterrolebindings.rbac.authorization.k8s.io "cert-manager-webhook:subjectaccessreviews" not found Error from server (NotFound): error when deleting "https://github.com/cert-manager/cert-manager/releases/download/v1.19.1/cert-manager.yaml": roles.rbac.authorization.k8s.io "cert-manager-cainjector:leaderelection" not found Error from server (NotFound): error when deleting "https://github.com/cert-manager/cert-manager/releases/download/v1.19.1/cert-manager.yaml": roles.rbac.authorization.k8s.io "cert-manager:leaderelection" not found Error from server (NotFound): error when deleting "https://github.com/cert-manager/cert-manager/releases/download/v1.19.1/cert-manager.yaml": roles.rbac.authorization.k8s.io "cert-manager-tokenrequest" not found Error from server (NotFound): error when deleting "https://github.com/cert-manager/cert-manager/releases/download/v1.19.1/cert-manager.yaml": roles.rbac.authorization.k8s.io "cert-manager-webhook:dynamic-serving" not found Error from server (NotFound): error when deleting "https://github.com/cert-manager/cert-manager/releases/download/v1.19.1/cert-manager.yaml": rolebindings.rbac.authorization.k8s.io "cert-manager-cainjector:leaderelection" not found Error from server (NotFound): error when deleting "https://github.com/cert-manager/cert-manager/releases/download/v1.19.1/cert-manager.yaml": rolebindings.rbac.authorization.k8s.io "cert-manager:leaderelection" not found Error from server (NotFound): error when deleting "https://github.com/cert-manager/cert-manager/releases/download/v1.19.1/cert-manager.yaml": rolebindings.rbac.authorization.k8s.io "cert-manager-tokenrequest" not found Error from server (NotFound): error when deleting "https://github.com/cert-manager/cert-manager/releases/download/v1.19.1/cert-manager.yaml": rolebindings.rbac.authorization.k8s.io "cert-manager-webhook:dynamic-serving" not found Error from server (NotFound): error when deleting "https://github.com/cert-manager/cert-manager/releases/download/v1.19.1/cert-manager.yaml": services "cert-manager-cainjector" not found Error from server (NotFound): error when deleting "https://github.com/cert-manager/cert-manager/releases/download/v1.19.1/cert-manager.yaml": services "cert-manager" not found Error from server (NotFound): error when deleting "https://github.com/cert-manager/cert-manager/releases/download/v1.19.1/cert-manager.yaml": services "cert-manager-webhook" not found Error from server (NotFound): error when deleting "https://github.com/cert-manager/cert-manager/releases/download/v1.19.1/cert-manager.yaml": deployments.apps "cert-manager-cainjector" not found Error from server (NotFound): error when deleting "https://github.com/cert-manager/cert-manager/releases/download/v1.19.1/cert-manager.yaml": deployments.apps "cert-manager" not found Error from server (NotFound): error when deleting "https://github.com/cert-manager/cert-manager/releases/download/v1.19.1/cert-manager.yaml": deployments.apps "cert-manager-webhook" not found Error from server (NotFound): error when deleting "https://github.com/cert-manager/cert-manager/releases/download/v1.19.1/cert-manager.yaml": mutatingwebhookconfigurations.admissionregistration.k8s.io "cert-manager-webhook" not found Error from server (NotFound): error when deleting "https://github.com/cert-manager/cert-manager/releases/download/v1.19.1/cert-manager.yaml": validatingwebhookconfigurations.admissionregistration.k8s.io "cert-manager-webhook" not found + sleep 4 + for i in $(seq 0 2) + set +e + kubectl delete -f https://github.com/cert-manager/cert-manager/releases/download/v1.19.1/cert-manager.yaml + exit_status=1 + set -e + '[' 1 '!=' 0 -a -n 1 ']' + cat /tmp/tmp.ZZfx7SAauG + cat /tmp/tmp.85hoYZGOxw Error from server (NotFound): error when deleting "https://github.com/cert-manager/cert-manager/releases/download/v1.19.1/cert-manager.yaml": namespaces "cert-manager" not found Error from server (NotFound): error when deleting "https://github.com/cert-manager/cert-manager/releases/download/v1.19.1/cert-manager.yaml": customresourcedefinitions.apiextensions.k8s.io "challenges.acme.cert-manager.io" not found Error from server (NotFound): error when deleting "https://github.com/cert-manager/cert-manager/releases/download/v1.19.1/cert-manager.yaml": customresourcedefinitions.apiextensions.k8s.io "orders.acme.cert-manager.io" not found Error from server (NotFound): error when deleting "https://github.com/cert-manager/cert-manager/releases/download/v1.19.1/cert-manager.yaml": customresourcedefinitions.apiextensions.k8s.io "certificaterequests.cert-manager.io" not found Error from server (NotFound): error when deleting "https://github.com/cert-manager/cert-manager/releases/download/v1.19.1/cert-manager.yaml": customresourcedefinitions.apiextensions.k8s.io "certificates.cert-manager.io" not found Error from server (NotFound): error when deleting "https://github.com/cert-manager/cert-manager/releases/download/v1.19.1/cert-manager.yaml": customresourcedefinitions.apiextensions.k8s.io "clusterissuers.cert-manager.io" not found Error from server (NotFound): error when deleting "https://github.com/cert-manager/cert-manager/releases/download/v1.19.1/cert-manager.yaml": customresourcedefinitions.apiextensions.k8s.io "issuers.cert-manager.io" not found Error from server (NotFound): error when deleting "https://github.com/cert-manager/cert-manager/releases/download/v1.19.1/cert-manager.yaml": serviceaccounts "cert-manager-cainjector" not found Error from server (NotFound): error when deleting "https://github.com/cert-manager/cert-manager/releases/download/v1.19.1/cert-manager.yaml": serviceaccounts "cert-manager" not found Error from server (NotFound): error when deleting "https://github.com/cert-manager/cert-manager/releases/download/v1.19.1/cert-manager.yaml": serviceaccounts "cert-manager-webhook" not found Error from server (NotFound): error when deleting "https://github.com/cert-manager/cert-manager/releases/download/v1.19.1/cert-manager.yaml": clusterroles.rbac.authorization.k8s.io "cert-manager-cainjector" not found Error from server (NotFound): error when deleting "https://github.com/cert-manager/cert-manager/releases/download/v1.19.1/cert-manager.yaml": clusterroles.rbac.authorization.k8s.io "cert-manager-controller-issuers" not found Error from server (NotFound): error when deleting "https://github.com/cert-manager/cert-manager/releases/download/v1.19.1/cert-manager.yaml": clusterroles.rbac.authorization.k8s.io "cert-manager-controller-clusterissuers" not found Error from server (NotFound): error when deleting "https://github.com/cert-manager/cert-manager/releases/download/v1.19.1/cert-manager.yaml": clusterroles.rbac.authorization.k8s.io "cert-manager-controller-certificates" not found Error from server (NotFound): error when deleting "https://github.com/cert-manager/cert-manager/releases/download/v1.19.1/cert-manager.yaml": clusterroles.rbac.authorization.k8s.io "cert-manager-controller-orders" not found Error from server (NotFound): error when deleting "https://github.com/cert-manager/cert-manager/releases/download/v1.19.1/cert-manager.yaml": clusterroles.rbac.authorization.k8s.io "cert-manager-controller-challenges" not found Error from server (NotFound): error when deleting "https://github.com/cert-manager/cert-manager/releases/download/v1.19.1/cert-manager.yaml": clusterroles.rbac.authorization.k8s.io "cert-manager-controller-ingress-shim" not found Error from server (NotFound): error when deleting "https://github.com/cert-manager/cert-manager/releases/download/v1.19.1/cert-manager.yaml": clusterroles.rbac.authorization.k8s.io "cert-manager-cluster-view" not found Error from server (NotFound): error when deleting "https://github.com/cert-manager/cert-manager/releases/download/v1.19.1/cert-manager.yaml": clusterroles.rbac.authorization.k8s.io "cert-manager-view" not found Error from server (NotFound): error when deleting "https://github.com/cert-manager/cert-manager/releases/download/v1.19.1/cert-manager.yaml": clusterroles.rbac.authorization.k8s.io "cert-manager-edit" not found Error from server (NotFound): error when deleting "https://github.com/cert-manager/cert-manager/releases/download/v1.19.1/cert-manager.yaml": clusterroles.rbac.authorization.k8s.io "cert-manager-controller-approve:cert-manager-io" not found Error from server (NotFound): error when deleting "https://github.com/cert-manager/cert-manager/releases/download/v1.19.1/cert-manager.yaml": clusterroles.rbac.authorization.k8s.io "cert-manager-controller-certificatesigningrequests" not found Error from server (NotFound): error when deleting "https://github.com/cert-manager/cert-manager/releases/download/v1.19.1/cert-manager.yaml": clusterroles.rbac.authorization.k8s.io "cert-manager-webhook:subjectaccessreviews" not found Error from server (NotFound): error when deleting "https://github.com/cert-manager/cert-manager/releases/download/v1.19.1/cert-manager.yaml": clusterrolebindings.rbac.authorization.k8s.io "cert-manager-cainjector" not found Error from server (NotFound): error when deleting "https://github.com/cert-manager/cert-manager/releases/download/v1.19.1/cert-manager.yaml": clusterrolebindings.rbac.authorization.k8s.io "cert-manager-controller-issuers" not found Error from server (NotFound): error when deleting "https://github.com/cert-manager/cert-manager/releases/download/v1.19.1/cert-manager.yaml": clusterrolebindings.rbac.authorization.k8s.io "cert-manager-controller-clusterissuers" not found Error from server (NotFound): error when deleting "https://github.com/cert-manager/cert-manager/releases/download/v1.19.1/cert-manager.yaml": clusterrolebindings.rbac.authorization.k8s.io "cert-manager-controller-certificates" not found Error from server (NotFound): error when deleting "https://github.com/cert-manager/cert-manager/releases/download/v1.19.1/cert-manager.yaml": clusterrolebindings.rbac.authorization.k8s.io "cert-manager-controller-orders" not found Error from server (NotFound): error when deleting "https://github.com/cert-manager/cert-manager/releases/download/v1.19.1/cert-manager.yaml": clusterrolebindings.rbac.authorization.k8s.io "cert-manager-controller-challenges" not found Error from server (NotFound): error when deleting "https://github.com/cert-manager/cert-manager/releases/download/v1.19.1/cert-manager.yaml": clusterrolebindings.rbac.authorization.k8s.io "cert-manager-controller-ingress-shim" not found Error from server (NotFound): error when deleting "https://github.com/cert-manager/cert-manager/releases/download/v1.19.1/cert-manager.yaml": clusterrolebindings.rbac.authorization.k8s.io "cert-manager-controller-approve:cert-manager-io" not found Error from server (NotFound): error when deleting "https://github.com/cert-manager/cert-manager/releases/download/v1.19.1/cert-manager.yaml": clusterrolebindings.rbac.authorization.k8s.io "cert-manager-controller-certificatesigningrequests" not found Error from server (NotFound): error when deleting "https://github.com/cert-manager/cert-manager/releases/download/v1.19.1/cert-manager.yaml": clusterrolebindings.rbac.authorization.k8s.io "cert-manager-webhook:subjectaccessreviews" not found Error from server (NotFound): error when deleting "https://github.com/cert-manager/cert-manager/releases/download/v1.19.1/cert-manager.yaml": roles.rbac.authorization.k8s.io "cert-manager-cainjector:leaderelection" not found Error from server (NotFound): error when deleting "https://github.com/cert-manager/cert-manager/releases/download/v1.19.1/cert-manager.yaml": roles.rbac.authorization.k8s.io "cert-manager:leaderelection" not found Error from server (NotFound): error when deleting "https://github.com/cert-manager/cert-manager/releases/download/v1.19.1/cert-manager.yaml": roles.rbac.authorization.k8s.io "cert-manager-tokenrequest" not found Error from server (NotFound): error when deleting "https://github.com/cert-manager/cert-manager/releases/download/v1.19.1/cert-manager.yaml": roles.rbac.authorization.k8s.io "cert-manager-webhook:dynamic-serving" not found Error from server (NotFound): error when deleting "https://github.com/cert-manager/cert-manager/releases/download/v1.19.1/cert-manager.yaml": rolebindings.rbac.authorization.k8s.io "cert-manager-cainjector:leaderelection" not found Error from server (NotFound): error when deleting "https://github.com/cert-manager/cert-manager/releases/download/v1.19.1/cert-manager.yaml": rolebindings.rbac.authorization.k8s.io "cert-manager:leaderelection" not found Error from server (NotFound): error when deleting "https://github.com/cert-manager/cert-manager/releases/download/v1.19.1/cert-manager.yaml": rolebindings.rbac.authorization.k8s.io "cert-manager-tokenrequest" not found Error from server (NotFound): error when deleting "https://github.com/cert-manager/cert-manager/releases/download/v1.19.1/cert-manager.yaml": rolebindings.rbac.authorization.k8s.io "cert-manager-webhook:dynamic-serving" not found Error from server (NotFound): error when deleting "https://github.com/cert-manager/cert-manager/releases/download/v1.19.1/cert-manager.yaml": services "cert-manager-cainjector" not found Error from server (NotFound): error when deleting "https://github.com/cert-manager/cert-manager/releases/download/v1.19.1/cert-manager.yaml": services "cert-manager" not found Error from server (NotFound): error when deleting "https://github.com/cert-manager/cert-manager/releases/download/v1.19.1/cert-manager.yaml": services "cert-manager-webhook" not found Error from server (NotFound): error when deleting "https://github.com/cert-manager/cert-manager/releases/download/v1.19.1/cert-manager.yaml": deployments.apps "cert-manager-cainjector" not found Error from server (NotFound): error when deleting "https://github.com/cert-manager/cert-manager/releases/download/v1.19.1/cert-manager.yaml": deployments.apps "cert-manager" not found Error from server (NotFound): error when deleting "https://github.com/cert-manager/cert-manager/releases/download/v1.19.1/cert-manager.yaml": deployments.apps "cert-manager-webhook" not found Error from server (NotFound): error when deleting "https://github.com/cert-manager/cert-manager/releases/download/v1.19.1/cert-manager.yaml": mutatingwebhookconfigurations.admissionregistration.k8s.io "cert-manager-webhook" not found Error from server (NotFound): error when deleting "https://github.com/cert-manager/cert-manager/releases/download/v1.19.1/cert-manager.yaml": validatingwebhookconfigurations.admissionregistration.k8s.io "cert-manager-webhook" not found + sleep 8 + cat /tmp/tmp.ZZfx7SAauG + cat /tmp/tmp.85hoYZGOxw Error from server (NotFound): error when deleting "https://github.com/cert-manager/cert-manager/releases/download/v1.19.1/cert-manager.yaml": namespaces "cert-manager" not found Error from server (NotFound): error when deleting "https://github.com/cert-manager/cert-manager/releases/download/v1.19.1/cert-manager.yaml": customresourcedefinitions.apiextensions.k8s.io "challenges.acme.cert-manager.io" not found Error from server (NotFound): error when deleting "https://github.com/cert-manager/cert-manager/releases/download/v1.19.1/cert-manager.yaml": customresourcedefinitions.apiextensions.k8s.io "orders.acme.cert-manager.io" not found Error from server (NotFound): error when deleting "https://github.com/cert-manager/cert-manager/releases/download/v1.19.1/cert-manager.yaml": customresourcedefinitions.apiextensions.k8s.io "certificaterequests.cert-manager.io" not found Error from server (NotFound): error when deleting "https://github.com/cert-manager/cert-manager/releases/download/v1.19.1/cert-manager.yaml": customresourcedefinitions.apiextensions.k8s.io "certificates.cert-manager.io" not found Error from server (NotFound): error when deleting "https://github.com/cert-manager/cert-manager/releases/download/v1.19.1/cert-manager.yaml": customresourcedefinitions.apiextensions.k8s.io "clusterissuers.cert-manager.io" not found Error from server (NotFound): error when deleting "https://github.com/cert-manager/cert-manager/releases/download/v1.19.1/cert-manager.yaml": customresourcedefinitions.apiextensions.k8s.io "issuers.cert-manager.io" not found Error from server (NotFound): error when deleting "https://github.com/cert-manager/cert-manager/releases/download/v1.19.1/cert-manager.yaml": serviceaccounts "cert-manager-cainjector" not found Error from server (NotFound): error when deleting "https://github.com/cert-manager/cert-manager/releases/download/v1.19.1/cert-manager.yaml": serviceaccounts "cert-manager" not found Error from server (NotFound): error when deleting "https://github.com/cert-manager/cert-manager/releases/download/v1.19.1/cert-manager.yaml": serviceaccounts "cert-manager-webhook" not found Error from server (NotFound): error when deleting "https://github.com/cert-manager/cert-manager/releases/download/v1.19.1/cert-manager.yaml": clusterroles.rbac.authorization.k8s.io "cert-manager-cainjector" not found Error from server (NotFound): error when deleting "https://github.com/cert-manager/cert-manager/releases/download/v1.19.1/cert-manager.yaml": clusterroles.rbac.authorization.k8s.io "cert-manager-controller-issuers" not found Error from server (NotFound): error when deleting "https://github.com/cert-manager/cert-manager/releases/download/v1.19.1/cert-manager.yaml": clusterroles.rbac.authorization.k8s.io "cert-manager-controller-clusterissuers" not found Error from server (NotFound): error when deleting "https://github.com/cert-manager/cert-manager/releases/download/v1.19.1/cert-manager.yaml": clusterroles.rbac.authorization.k8s.io "cert-manager-controller-certificates" not found Error from server (NotFound): error when deleting "https://github.com/cert-manager/cert-manager/releases/download/v1.19.1/cert-manager.yaml": clusterroles.rbac.authorization.k8s.io "cert-manager-controller-orders" not found Error from server (NotFound): error when deleting "https://github.com/cert-manager/cert-manager/releases/download/v1.19.1/cert-manager.yaml": clusterroles.rbac.authorization.k8s.io "cert-manager-controller-challenges" not found Error from server (NotFound): error when deleting "https://github.com/cert-manager/cert-manager/releases/download/v1.19.1/cert-manager.yaml": clusterroles.rbac.authorization.k8s.io "cert-manager-controller-ingress-shim" not found Error from server (NotFound): error when deleting "https://github.com/cert-manager/cert-manager/releases/download/v1.19.1/cert-manager.yaml": clusterroles.rbac.authorization.k8s.io "cert-manager-cluster-view" not found Error from server (NotFound): error when deleting "https://github.com/cert-manager/cert-manager/releases/download/v1.19.1/cert-manager.yaml": clusterroles.rbac.authorization.k8s.io "cert-manager-view" not found Error from server (NotFound): error when deleting "https://github.com/cert-manager/cert-manager/releases/download/v1.19.1/cert-manager.yaml": clusterroles.rbac.authorization.k8s.io "cert-manager-edit" not found Error from server (NotFound): error when deleting "https://github.com/cert-manager/cert-manager/releases/download/v1.19.1/cert-manager.yaml": clusterroles.rbac.authorization.k8s.io "cert-manager-controller-approve:cert-manager-io" not found Error from server (NotFound): error when deleting "https://github.com/cert-manager/cert-manager/releases/download/v1.19.1/cert-manager.yaml": clusterroles.rbac.authorization.k8s.io "cert-manager-controller-certificatesigningrequests" not found Error from server (NotFound): error when deleting "https://github.com/cert-manager/cert-manager/releases/download/v1.19.1/cert-manager.yaml": clusterroles.rbac.authorization.k8s.io "cert-manager-webhook:subjectaccessreviews" not found Error from server (NotFound): error when deleting "https://github.com/cert-manager/cert-manager/releases/download/v1.19.1/cert-manager.yaml": clusterrolebindings.rbac.authorization.k8s.io "cert-manager-cainjector" not found Error from server (NotFound): error when deleting "https://github.com/cert-manager/cert-manager/releases/download/v1.19.1/cert-manager.yaml": clusterrolebindings.rbac.authorization.k8s.io "cert-manager-controller-issuers" not found Error from server (NotFound): error when deleting "https://github.com/cert-manager/cert-manager/releases/download/v1.19.1/cert-manager.yaml": clusterrolebindings.rbac.authorization.k8s.io "cert-manager-controller-clusterissuers" not found Error from server (NotFound): error when deleting "https://github.com/cert-manager/cert-manager/releases/download/v1.19.1/cert-manager.yaml": clusterrolebindings.rbac.authorization.k8s.io "cert-manager-controller-certificates" not found Error from server (NotFound): error when deleting "https://github.com/cert-manager/cert-manager/releases/download/v1.19.1/cert-manager.yaml": clusterrolebindings.rbac.authorization.k8s.io "cert-manager-controller-orders" not found Error from server (NotFound): error when deleting "https://github.com/cert-manager/cert-manager/releases/download/v1.19.1/cert-manager.yaml": clusterrolebindings.rbac.authorization.k8s.io "cert-manager-controller-challenges" not found Error from server (NotFound): error when deleting "https://github.com/cert-manager/cert-manager/releases/download/v1.19.1/cert-manager.yaml": clusterrolebindings.rbac.authorization.k8s.io "cert-manager-controller-ingress-shim" not found Error from server (NotFound): error when deleting "https://github.com/cert-manager/cert-manager/releases/download/v1.19.1/cert-manager.yaml": clusterrolebindings.rbac.authorization.k8s.io "cert-manager-controller-approve:cert-manager-io" not found Error from server (NotFound): error when deleting "https://github.com/cert-manager/cert-manager/releases/download/v1.19.1/cert-manager.yaml": clusterrolebindings.rbac.authorization.k8s.io "cert-manager-controller-certificatesigningrequests" not found Error from server (NotFound): error when deleting "https://github.com/cert-manager/cert-manager/releases/download/v1.19.1/cert-manager.yaml": clusterrolebindings.rbac.authorization.k8s.io "cert-manager-webhook:subjectaccessreviews" not found Error from server (NotFound): error when deleting "https://github.com/cert-manager/cert-manager/releases/download/v1.19.1/cert-manager.yaml": roles.rbac.authorization.k8s.io "cert-manager-cainjector:leaderelection" not found Error from server (NotFound): error when deleting "https://github.com/cert-manager/cert-manager/releases/download/v1.19.1/cert-manager.yaml": roles.rbac.authorization.k8s.io "cert-manager:leaderelection" not found Error from server (NotFound): error when deleting "https://github.com/cert-manager/cert-manager/releases/download/v1.19.1/cert-manager.yaml": roles.rbac.authorization.k8s.io "cert-manager-tokenrequest" not found Error from server (NotFound): error when deleting "https://github.com/cert-manager/cert-manager/releases/download/v1.19.1/cert-manager.yaml": roles.rbac.authorization.k8s.io "cert-manager-webhook:dynamic-serving" not found Error from server (NotFound): error when deleting "https://github.com/cert-manager/cert-manager/releases/download/v1.19.1/cert-manager.yaml": rolebindings.rbac.authorization.k8s.io "cert-manager-cainjector:leaderelection" not found Error from server (NotFound): error when deleting "https://github.com/cert-manager/cert-manager/releases/download/v1.19.1/cert-manager.yaml": rolebindings.rbac.authorization.k8s.io "cert-manager:leaderelection" not found Error from server (NotFound): error when deleting "https://github.com/cert-manager/cert-manager/releases/download/v1.19.1/cert-manager.yaml": rolebindings.rbac.authorization.k8s.io "cert-manager-tokenrequest" not found Error from server (NotFound): error when deleting "https://github.com/cert-manager/cert-manager/releases/download/v1.19.1/cert-manager.yaml": rolebindings.rbac.authorization.k8s.io "cert-manager-webhook:dynamic-serving" not found Error from server (NotFound): error when deleting "https://github.com/cert-manager/cert-manager/releases/download/v1.19.1/cert-manager.yaml": services "cert-manager-cainjector" not found Error from server (NotFound): error when deleting "https://github.com/cert-manager/cert-manager/releases/download/v1.19.1/cert-manager.yaml": services "cert-manager" not found Error from server (NotFound): error when deleting "https://github.com/cert-manager/cert-manager/releases/download/v1.19.1/cert-manager.yaml": services "cert-manager-webhook" not found Error from server (NotFound): error when deleting "https://github.com/cert-manager/cert-manager/releases/download/v1.19.1/cert-manager.yaml": deployments.apps "cert-manager-cainjector" not found Error from server (NotFound): error when deleting "https://github.com/cert-manager/cert-manager/releases/download/v1.19.1/cert-manager.yaml": deployments.apps "cert-manager" not found Error from server (NotFound): error when deleting "https://github.com/cert-manager/cert-manager/releases/download/v1.19.1/cert-manager.yaml": deployments.apps "cert-manager-webhook" not found Error from server (NotFound): error when deleting "https://github.com/cert-manager/cert-manager/releases/download/v1.19.1/cert-manager.yaml": mutatingwebhookconfigurations.admissionregistration.k8s.io "cert-manager-webhook" not found Error from server (NotFound): error when deleting "https://github.com/cert-manager/cert-manager/releases/download/v1.19.1/cert-manager.yaml": validatingwebhookconfigurations.admissionregistration.k8s.io "cert-manager-webhook" not found + rm /tmp/tmp.ZZfx7SAauG /tmp/tmp.85hoYZGOxw + return 1 + true + '[' -n '' ']' + '[' -n psmdb-operator ']' + kubectl_bin delete --grace-period=0 --force=true namespace pitr-13298 + rm -rf /tmp/tmp.uUgrEfFz0e + kubectl_bin delete --grace-period=0 --force=true namespace psmdb-operator ++ mktemp ++ mktemp + desc 'test passed' + set +o xtrace ----------------------------------------------------------------------------------- test passed ----------------------------------------------------------------------------------- + local LAST_OUT=/tmp/tmp.hKhuDBlqKh + local LAST_OUT=/tmp/tmp.IiGuVqAMor ++ mktemp ++ mktemp + local LAST_ERR=/tmp/tmp.xXMohMNhUC + local exit_status=0 + local timeout=4 + local LAST_ERR=/tmp/tmp.v8c4ZmVrVa + local exit_status=0 + local timeout=4 ++ seq 0 2 ++ seq 0 2 + for i in $(seq 0 2) + set +e + kubectl delete --grace-period=0 --force=true namespace psmdb-operator + for i in $(seq 0 2) + set +e + kubectl delete --grace-period=0 --force=true namespace pitr-13298