Log: /mnt/jenkins/workspace/cloud-psmdb-operator_PR-2282/e2e-tests/logs/pitr.log Warning: version difference between client (1.35) and server (1.32) exceeds the supported minor version skew of +/-1 Warning: version difference between client (1.35) and server (1.32) exceeds the supported minor version skew of +/-1 Warning: version difference between client (1.35) and server (1.32) exceeds the supported minor version skew of +/-1 + main + create_infra pitr-32420 + local ns=pitr-32420 + [[ 1 == 1 ]] + delete_crd + desc 'get and delete old CRDs and RBAC' + set +o xtrace ----------------------------------------------------------------------------------- get and delete old CRDs and RBAC ----------------------------------------------------------------------------------- + kubectl_bin delete -f /mnt/jenkins/workspace/cloud-psmdb-operator_PR-2282/deploy/crd.yaml --ignore-not-found --wait=false ++ mktemp + local LAST_OUT=/tmp/tmp.29ZR877xIR ++ mktemp + local LAST_ERR=/tmp/tmp.K5A1mv8SqV + local exit_status=0 + local timeout=4 ++ seq 0 2 + for i in $(seq 0 2) + set +e + kubectl delete -f /mnt/jenkins/workspace/cloud-psmdb-operator_PR-2282/deploy/crd.yaml --ignore-not-found --wait=false + exit_status=0 + set -e + '[' 0 '!=' 0 -a -n 1 ']' + break + cat /tmp/tmp.29ZR877xIR customresourcedefinition.apiextensions.k8s.io "perconaservermongodbbackups.psmdb.percona.com" deleted customresourcedefinition.apiextensions.k8s.io "perconaservermongodbrestores.psmdb.percona.com" deleted customresourcedefinition.apiextensions.k8s.io "perconaservermongodbs.psmdb.percona.com" deleted + cat /tmp/tmp.K5A1mv8SqV + rm /tmp/tmp.29ZR877xIR /tmp/tmp.K5A1mv8SqV + return 0 ++ yq eval .metadata.name /mnt/jenkins/workspace/cloud-psmdb-operator_PR-2282/deploy/crd.yaml ++ grep -v '\-\-\-' grep: warning: stray \ before - grep: warning: stray \ before - + for crd_name in $(yq eval '.metadata.name' "${src_dir}/deploy/crd.yaml" | grep -v '\-\-\-') + kubectl get perconaservermongodbbackups.psmdb.percona.com --all-namespaces -o wide + grep -v NAMESPACE + xargs -L 1 sh -xc 'kubectl patch perconaservermongodbbackups.psmdb.percona.com -n $0 $1 --type=merge -p "{\"metadata\":{\"finalizers\":[]}}"' + kubectl patch perconaservermongodbbackups.psmdb.percona.com -n pitr-6094 backup-minio-0 --type=merge -p '{"metadata":{"finalizers":[]}}' perconaservermongodbbackup.psmdb.percona.com/backup-minio-0 patched + kubectl patch perconaservermongodbbackups.psmdb.percona.com -n pitr-6094 backup-minio-1 --type=merge -p '{"metadata":{"finalizers":[]}}' perconaservermongodbbackup.psmdb.percona.com/backup-minio-1 patched + kubectl patch perconaservermongodbbackups.psmdb.percona.com -n pitr-6094 backup-minio-2 --type=merge -p '{"metadata":{"finalizers":[]}}' perconaservermongodbbackup.psmdb.percona.com/backup-minio-2 patched + kubectl_bin wait --for=delete crd perconaservermongodbbackups.psmdb.percona.com ++ mktemp + local LAST_OUT=/tmp/tmp.dfZ9dRihwz ++ mktemp + local LAST_ERR=/tmp/tmp.X2AQuIMkuU + local exit_status=0 + local timeout=4 ++ seq 0 2 + for i in $(seq 0 2) + set +e + kubectl wait --for=delete crd perconaservermongodbbackups.psmdb.percona.com + exit_status=0 + set -e + '[' 0 '!=' 0 -a -n 1 ']' + break + cat /tmp/tmp.dfZ9dRihwz customresourcedefinition.apiextensions.k8s.io/perconaservermongodbbackups.psmdb.percona.com condition met + cat /tmp/tmp.X2AQuIMkuU + rm /tmp/tmp.dfZ9dRihwz /tmp/tmp.X2AQuIMkuU + return 0 + for crd_name in $(yq eval '.metadata.name' "${src_dir}/deploy/crd.yaml" | grep -v '\-\-\-') + kubectl get perconaservermongodbrestores.psmdb.percona.com --all-namespaces -o wide + grep -v NAMESPACE + xargs -L 1 sh -xc 'kubectl patch perconaservermongodbrestores.psmdb.percona.com -n $0 $1 --type=merge -p "{\"metadata\":{\"finalizers\":[]}}"' error: the server doesn't have a resource type "perconaservermongodbrestores" + kubectl patch perconaservermongodbrestores.psmdb.percona.com -n sh --type=merge -p '{"metadata":{"finalizers":[]}}' error: the server doesn't have a resource type "perconaservermongodbrestores" + : + kubectl_bin wait --for=delete crd perconaservermongodbrestores.psmdb.percona.com ++ mktemp + local LAST_OUT=/tmp/tmp.24v1LJwuN0 ++ mktemp + local LAST_ERR=/tmp/tmp.m55XIsbKt6 + local exit_status=0 + local timeout=4 ++ seq 0 2 + for i in $(seq 0 2) + set +e + kubectl wait --for=delete crd perconaservermongodbrestores.psmdb.percona.com + exit_status=0 + set -e + '[' 0 '!=' 0 -a -n 1 ']' + break + cat /tmp/tmp.24v1LJwuN0 + cat /tmp/tmp.m55XIsbKt6 + rm /tmp/tmp.24v1LJwuN0 /tmp/tmp.m55XIsbKt6 + return 0 + for crd_name in $(yq eval '.metadata.name' "${src_dir}/deploy/crd.yaml" | grep -v '\-\-\-') + kubectl get perconaservermongodbs.psmdb.percona.com --all-namespaces -o wide + grep -v NAMESPACE + xargs -L 1 sh -xc 'kubectl patch perconaservermongodbs.psmdb.percona.com -n $0 $1 --type=merge -p "{\"metadata\":{\"finalizers\":[]}}"' error: the server doesn't have a resource type "perconaservermongodbs" + kubectl patch perconaservermongodbs.psmdb.percona.com -n sh --type=merge -p '{"metadata":{"finalizers":[]}}' error: the server doesn't have a resource type "perconaservermongodbs" + : + kubectl_bin wait --for=delete crd perconaservermongodbs.psmdb.percona.com ++ mktemp + local LAST_OUT=/tmp/tmp.FSsFogsimC ++ mktemp + local LAST_ERR=/tmp/tmp.KMnR8PunIa + local exit_status=0 + local timeout=4 ++ seq 0 2 + for i in $(seq 0 2) + set +e + kubectl wait --for=delete crd perconaservermongodbs.psmdb.percona.com + exit_status=0 + set -e + '[' 0 '!=' 0 -a -n 1 ']' + break + cat /tmp/tmp.FSsFogsimC + cat /tmp/tmp.KMnR8PunIa + rm /tmp/tmp.FSsFogsimC /tmp/tmp.KMnR8PunIa + return 0 + local rbac_yaml=rbac.yaml + '[' -n psmdb-operator ']' + rbac_yaml=cw-rbac.yaml + kubectl_bin delete -f /mnt/jenkins/workspace/cloud-psmdb-operator_PR-2282/deploy/cw-rbac.yaml --ignore-not-found ++ mktemp + local LAST_OUT=/tmp/tmp.kYAwsXJq6G ++ mktemp + local LAST_ERR=/tmp/tmp.8TEsETY6Sg + local exit_status=0 + local timeout=4 ++ seq 0 2 + for i in $(seq 0 2) + set +e + kubectl delete -f /mnt/jenkins/workspace/cloud-psmdb-operator_PR-2282/deploy/cw-rbac.yaml --ignore-not-found + exit_status=0 + set -e + '[' 0 '!=' 0 -a -n 1 ']' + break + cat /tmp/tmp.kYAwsXJq6G clusterrole.rbac.authorization.k8s.io "percona-server-mongodb-operator" deleted clusterrolebinding.rbac.authorization.k8s.io "service-account-percona-server-mongodb-operator" deleted + cat /tmp/tmp.8TEsETY6Sg + rm /tmp/tmp.kYAwsXJq6G /tmp/tmp.8TEsETY6Sg + return 0 + check_crd_for_deletion PR-2282-535b6b53 + local git_tag=PR-2282-535b6b53 ++ curl -s https://raw.githubusercontent.com/percona/percona-server-mongodb-operator/PR-2282-535b6b53/deploy/crd.yaml ++ yq eval .metadata.name ++ /usr/sbin/sed s/---//g ++ /usr/sbin/sed ':a;N;$!ba;s/\n/ /g' + for crd_name in $(curl -s https://raw.githubusercontent.com/percona/percona-server-mongodb-operator/${git_tag}/deploy/crd.yaml | yq eval '.metadata.name' | $sed 's/---//g' | $sed ':a;N;$!ba;s/\n/ /g') ++ kubectl_bin get crd/null -o 'jsonpath={.status.conditions[-1].type}' +++ mktemp ++ local LAST_OUT=/tmp/tmp.wM2Yhy4fjg +++ mktemp ++ local LAST_ERR=/tmp/tmp.698e9Q0IZV ++ local exit_status=0 ++ local timeout=4 +++ seq 0 2 ++ for i in $(seq 0 2) ++ set +e ++ kubectl get crd/null -o 'jsonpath={.status.conditions[-1].type}' ++ exit_status=1 ++ set -e ++ '[' 1 '!=' 0 -a -n 1 ']' ++ cat /tmp/tmp.wM2Yhy4fjg ++ cat /tmp/tmp.698e9Q0IZV Error from server (NotFound): customresourcedefinitions.apiextensions.k8s.io "null" not found ++ sleep 0 ++ for i in $(seq 0 2) ++ set +e ++ kubectl get crd/null -o 'jsonpath={.status.conditions[-1].type}' ++ exit_status=1 ++ set -e ++ '[' 1 '!=' 0 -a -n 1 ']' ++ cat /tmp/tmp.wM2Yhy4fjg ++ cat /tmp/tmp.698e9Q0IZV Error from server (NotFound): customresourcedefinitions.apiextensions.k8s.io "null" not found ++ sleep 4 ++ for i in $(seq 0 2) ++ set +e ++ kubectl get crd/null -o 'jsonpath={.status.conditions[-1].type}' ++ exit_status=1 ++ set -e ++ '[' 1 '!=' 0 -a -n 1 ']' ++ cat /tmp/tmp.wM2Yhy4fjg ++ cat /tmp/tmp.698e9Q0IZV Error from server (NotFound): customresourcedefinitions.apiextensions.k8s.io "null" not found ++ sleep 8 ++ cat /tmp/tmp.wM2Yhy4fjg ++ cat /tmp/tmp.698e9Q0IZV Error from server (NotFound): customresourcedefinitions.apiextensions.k8s.io "null" not found ++ rm /tmp/tmp.wM2Yhy4fjg /tmp/tmp.698e9Q0IZV ++ return 1 + [[ '' == \T\e\r\m\i\n\a\t\i\n\g ]] + '[' -n psmdb-operator ']' + create_namespace psmdb-operator + local namespace=psmdb-operator + local skip_clean_namespace= + [[ 1 == 1 ]] + [[ -z '' ]] + destroy_chaos_mesh ++ helm list --all-namespaces --filter chaos-mesh ++ tail -n1 ++ awk '-F ' '{print $2}' ++ sed s/NAMESPACE// + local chaos_mesh_ns= + desc 'destroy chaos-mesh' + set +o xtrace ----------------------------------------------------------------------------------- destroy chaos-mesh ----------------------------------------------------------------------------------- + '[' -n '' ']' ++ kubectl get MutatingWebhookConfiguration ++ grep chaos-mesh ++ awk '{print $1}' + timeout 30 kubectl delete MutatingWebhookConfiguration error: resource(s) were provided, but no name was specified + : ++ kubectl get ValidatingWebhookConfiguration ++ grep chaos-mesh ++ awk '{print $1}' + timeout 30 kubectl delete ValidatingWebhookConfiguration error: resource(s) were provided, but no name was specified + : ++ kubectl get ValidatingWebhookConfiguration ++ grep validate-auth ++ awk '{print $1}' + timeout 30 kubectl delete ValidatingWebhookConfiguration error: resource(s) were provided, but no name was specified + : ++ kubectl api-resources ++ grep chaos-mesh ++ awk '{print $1}' ++ kubectl get crd ++ grep chaos-mesh.org ++ awk '{print $1}' + timeout 30 kubectl delete crd error: resource(s) were provided, but no name was specified + : ++ kubectl get clusterrolebinding ++ grep chaos-mesh ++ awk '{print $1}' + timeout 30 kubectl delete clusterrolebinding error: resource(s) were provided, but no name was specified + : ++ kubectl get clusterrole ++ grep chaos-mesh ++ awk '{print $1}' + timeout 30 kubectl delete clusterrole error: resource(s) were provided, but no name was specified + : + desc 'cleaned up all old namespaces' + set +o xtrace ----------------------------------------------------------------------------------- cleaned up all old namespaces ----------------------------------------------------------------------------------- + kubectl_bin get ns + grep -E -v '^kube-|^default|Terminating|psmdb-operator|openshift|^gke-|^gmp-|^NAME' + awk '{print$1}' ++ mktemp + '[' -n '' ']' + desc 'cleaned up old namespaces psmdb-operator' + set +o xtrace ----------------------------------------------------------------------------------- cleaned up old namespaces psmdb-operator ----------------------------------------------------------------------------------- + kubectl_bin delete namespace psmdb-operator --ignore-not-found + local LAST_OUT=/tmp/tmp.b7jhu1Cp8S ++ mktemp + xargs kubectl delete ns ++ mktemp + local LAST_OUT=/tmp/tmp.o8XSJJWvVt + local LAST_ERR=/tmp/tmp.3u8f5DTMHu + local exit_status=0 + local timeout=4 ++ seq 0 2 ++ mktemp + for i in $(seq 0 2) + set +e + kubectl get ns + local LAST_ERR=/tmp/tmp.x3BQFl3mfs + local exit_status=0 + local timeout=4 ++ seq 0 2 + for i in $(seq 0 2) + set +e + kubectl delete namespace psmdb-operator --ignore-not-found + exit_status=0 + set -e + '[' 0 '!=' 0 -a -n 1 ']' + break + cat /tmp/tmp.b7jhu1Cp8S + cat /tmp/tmp.3u8f5DTMHu + rm /tmp/tmp.b7jhu1Cp8S /tmp/tmp.3u8f5DTMHu + return 0 namespace "pitr-6094" deleted + exit_status=0 + set -e + '[' 0 '!=' 0 -a -n 1 ']' + break + cat /tmp/tmp.o8XSJJWvVt namespace "psmdb-operator" deleted + cat /tmp/tmp.x3BQFl3mfs + rm /tmp/tmp.o8XSJJWvVt /tmp/tmp.x3BQFl3mfs + return 0 + kubectl_bin wait --for=delete namespace psmdb-operator ++ mktemp + local LAST_OUT=/tmp/tmp.RIJbVNV5TL ++ mktemp + local LAST_ERR=/tmp/tmp.wKmg8y5cVd + local exit_status=0 + local timeout=4 ++ seq 0 2 + for i in $(seq 0 2) + set +e + kubectl wait --for=delete namespace psmdb-operator + exit_status=0 + set -e + '[' 0 '!=' 0 -a -n 1 ']' + break + cat /tmp/tmp.RIJbVNV5TL + cat /tmp/tmp.wKmg8y5cVd + rm /tmp/tmp.RIJbVNV5TL /tmp/tmp.wKmg8y5cVd + return 0 + desc 'create namespace psmdb-operator' + set +o xtrace ----------------------------------------------------------------------------------- create namespace psmdb-operator ----------------------------------------------------------------------------------- + kubectl_bin create namespace psmdb-operator ++ mktemp + local LAST_OUT=/tmp/tmp.KWNXrhObOV ++ mktemp + local LAST_ERR=/tmp/tmp.O0unn8HtmT + local exit_status=0 + local timeout=4 ++ seq 0 2 + for i in $(seq 0 2) + set +e + kubectl create namespace psmdb-operator + exit_status=0 + set -e + '[' 0 '!=' 0 -a -n 1 ']' + break + cat /tmp/tmp.KWNXrhObOV namespace/psmdb-operator created + cat /tmp/tmp.O0unn8HtmT + rm /tmp/tmp.KWNXrhObOV /tmp/tmp.O0unn8HtmT + return 0 + set_kube_ctx psmdb-operator + local namespace=psmdb-operator ++ kubectl_bin config current-context +++ mktemp ++ local LAST_OUT=/tmp/tmp.8EnA3jhjjg +++ mktemp ++ local LAST_ERR=/tmp/tmp.6OiDTn33UY ++ local exit_status=0 ++ local timeout=4 +++ seq 0 2 ++ for i in $(seq 0 2) ++ set +e ++ kubectl config current-context ++ exit_status=0 ++ set -e ++ '[' 0 '!=' 0 -a -n 1 ']' ++ break ++ cat /tmp/tmp.8EnA3jhjjg ++ cat /tmp/tmp.6OiDTn33UY ++ rm /tmp/tmp.8EnA3jhjjg /tmp/tmp.6OiDTn33UY ++ return 0 + kubectl_bin config set-context gke_cloud-dev-112233_us-central1-a_jen-psmdb-2282-535b6b53-1-cluster5 --namespace=psmdb-operator ++ mktemp + local LAST_OUT=/tmp/tmp.cgkDjbGmlK ++ mktemp + local LAST_ERR=/tmp/tmp.zXuNnQQOm6 + local exit_status=0 + local timeout=4 ++ seq 0 2 + for i in $(seq 0 2) + set +e + kubectl config set-context gke_cloud-dev-112233_us-central1-a_jen-psmdb-2282-535b6b53-1-cluster5 --namespace=psmdb-operator + exit_status=0 + set -e + '[' 0 '!=' 0 -a -n 1 ']' + break + cat /tmp/tmp.cgkDjbGmlK Context "gke_cloud-dev-112233_us-central1-a_jen-psmdb-2282-535b6b53-1-cluster5" modified. + cat /tmp/tmp.zXuNnQQOm6 + rm /tmp/tmp.cgkDjbGmlK /tmp/tmp.zXuNnQQOm6 + return 0 + deploy_operator + desc 'start PSMDB operator: docker.io/perconalab/percona-server-mongodb-operator:PR-2282-535b6b53' + set +o xtrace ----------------------------------------------------------------------------------- start PSMDB operator: docker.io/perconalab/percona-server-mongodb-operator:PR-2282-535b6b53 ----------------------------------------------------------------------------------- + local cr_file + '[' -f /mnt/jenkins/workspace/cloud-psmdb-operator_PR-2282/e2e-tests/pitr/conf/crd.yaml ']' + cr_file=/mnt/jenkins/workspace/cloud-psmdb-operator_PR-2282/deploy/crd.yaml + kubectl_bin apply --server-side --force-conflicts -f /mnt/jenkins/workspace/cloud-psmdb-operator_PR-2282/deploy/crd.yaml ++ mktemp + local LAST_OUT=/tmp/tmp.4eksa3B0sh ++ mktemp + local LAST_ERR=/tmp/tmp.NAGD0lV6wM + local exit_status=0 + local timeout=4 ++ seq 0 2 + for i in $(seq 0 2) + set +e + kubectl apply --server-side --force-conflicts -f /mnt/jenkins/workspace/cloud-psmdb-operator_PR-2282/deploy/crd.yaml + exit_status=0 + set -e + '[' 0 '!=' 0 -a -n 1 ']' + break + cat /tmp/tmp.4eksa3B0sh customresourcedefinition.apiextensions.k8s.io/perconaservermongodbbackups.psmdb.percona.com serverside-applied customresourcedefinition.apiextensions.k8s.io/perconaservermongodbrestores.psmdb.percona.com serverside-applied customresourcedefinition.apiextensions.k8s.io/perconaservermongodbs.psmdb.percona.com serverside-applied + cat /tmp/tmp.NAGD0lV6wM + rm /tmp/tmp.4eksa3B0sh /tmp/tmp.NAGD0lV6wM + return 0 + '[' -n psmdb-operator ']' + apply_rbac cw-rbac + local operator_namespace=psmdb-operator + local rbac=cw-rbac + cat /mnt/jenkins/workspace/cloud-psmdb-operator_PR-2282/deploy/cw-rbac.yaml + sed -e 's^namespace: .*^namespace: psmdb-operator^' + kubectl_bin apply -n psmdb-operator -f - ++ mktemp + local LAST_OUT=/tmp/tmp.3Ic2WmqGao ++ mktemp + local LAST_ERR=/tmp/tmp.Es89FqzERW + local exit_status=0 + local timeout=4 ++ seq 0 2 + for i in $(seq 0 2) + set +e + kubectl apply -n psmdb-operator -f - + exit_status=0 + set -e + '[' 0 '!=' 0 -a -n 1 ']' + break + cat /tmp/tmp.3Ic2WmqGao clusterrole.rbac.authorization.k8s.io/percona-server-mongodb-operator created serviceaccount/percona-server-mongodb-operator created clusterrolebinding.rbac.authorization.k8s.io/service-account-percona-server-mongodb-operator created + cat /tmp/tmp.Es89FqzERW + rm /tmp/tmp.3Ic2WmqGao /tmp/tmp.Es89FqzERW + return 0 + yq eval ' (.spec.template.spec.containers[].image = "docker.io/perconalab/percona-server-mongodb-operator:PR-2282-535b6b53") | ((.. | select(.[] == "DISABLE_TELEMETRY")) |= .value="true") | ((.. | select(.[] == "LOG_LEVEL")) |= .value="DEBUG")' /mnt/jenkins/workspace/cloud-psmdb-operator_PR-2282/deploy/cw-operator.yaml + kubectl_bin apply -n psmdb-operator -f - ++ mktemp + local LAST_OUT=/tmp/tmp.GEz1Hy8oKX ++ mktemp + local LAST_ERR=/tmp/tmp.7sNoIEalwV + local exit_status=0 + local timeout=4 ++ seq 0 2 + for i in $(seq 0 2) + set +e + kubectl apply -n psmdb-operator -f - + exit_status=0 + set -e + '[' 0 '!=' 0 -a -n 1 ']' + break + cat /tmp/tmp.GEz1Hy8oKX deployment.apps/percona-server-mongodb-operator created + cat /tmp/tmp.7sNoIEalwV + rm /tmp/tmp.GEz1Hy8oKX /tmp/tmp.7sNoIEalwV + return 0 + sleep 20 ++ get_operator_pod ++ kubectl_bin get pods --selector=name=percona-server-mongodb-operator -o 'jsonpath={.items[].metadata.name}' -n psmdb-operator +++ mktemp ++ local LAST_OUT=/tmp/tmp.VlXKY034O1 +++ mktemp ++ local LAST_ERR=/tmp/tmp.VSQTuDUQqf ++ local exit_status=0 ++ local timeout=4 +++ seq 0 2 ++ for i in $(seq 0 2) ++ set +e ++ kubectl get pods --selector=name=percona-server-mongodb-operator -o 'jsonpath={.items[].metadata.name}' -n psmdb-operator ++ exit_status=0 ++ set -e ++ '[' 0 '!=' 0 -a -n 1 ']' ++ break ++ cat /tmp/tmp.VlXKY034O1 ++ cat /tmp/tmp.VSQTuDUQqf ++ rm /tmp/tmp.VlXKY034O1 /tmp/tmp.VSQTuDUQqf ++ return 0 + wait_operator_pod percona-server-mongodb-operator-667884bf6b-gkn8d + local pod=percona-server-mongodb-operator-667884bf6b-gkn8d + set +o xtrace waiting for pod/percona-server-mongodb-operator-667884bf6b-gkn8d to be ready.OK + echo 'Print operator info from log' Print operator info from log + grep 'Manager starting up' ++ get_operator_pod ++ kubectl_bin get pods --selector=name=percona-server-mongodb-operator -o 'jsonpath={.items[].metadata.name}' -n psmdb-operator +++ mktemp ++ local LAST_OUT=/tmp/tmp.cc80HOeqa9 +++ mktemp ++ local LAST_ERR=/tmp/tmp.guO6jTtYic ++ local exit_status=0 ++ local timeout=4 +++ seq 0 2 ++ for i in $(seq 0 2) ++ set +e ++ kubectl get pods --selector=name=percona-server-mongodb-operator -o 'jsonpath={.items[].metadata.name}' -n psmdb-operator ++ exit_status=0 ++ set -e ++ '[' 0 '!=' 0 -a -n 1 ']' ++ break ++ cat /tmp/tmp.cc80HOeqa9 ++ cat /tmp/tmp.guO6jTtYic ++ rm /tmp/tmp.cc80HOeqa9 /tmp/tmp.guO6jTtYic ++ return 0 + kubectl_bin logs -n psmdb-operator percona-server-mongodb-operator-667884bf6b-gkn8d ++ mktemp + local LAST_OUT=/tmp/tmp.CuaSP3NtcN ++ mktemp + local LAST_ERR=/tmp/tmp.5SFJoSLGaA + local exit_status=0 + local timeout=4 ++ seq 0 2 + for i in $(seq 0 2) + set +e + kubectl logs -n psmdb-operator percona-server-mongodb-operator-667884bf6b-gkn8d + exit_status=0 + set -e + '[' 0 '!=' 0 -a -n 1 ']' + break + cat /tmp/tmp.CuaSP3NtcN + cat /tmp/tmp.5SFJoSLGaA + rm /tmp/tmp.CuaSP3NtcN /tmp/tmp.5SFJoSLGaA + return 0 2026-03-10T22:11:11.035Z INFO setup Manager starting up {"gitCommit": "535b6b53bbcb570a58886a695a8db35ef8fa502d", "gitBranch": "PR-2282-535b6b53", "buildTime": "", "goVersion": "go1.25.8", "os": "linux", "arch": "amd64"} + create_namespace pitr-32420 + local namespace=pitr-32420 + local skip_clean_namespace= + [[ 1 == 1 ]] + [[ -z '' ]] + destroy_chaos_mesh ++ helm list --all-namespaces --filter chaos-mesh ++ tail -n1 ++ awk '-F ' '{print $2}' ++ sed s/NAMESPACE// + local chaos_mesh_ns= + desc 'destroy chaos-mesh' + set +o xtrace ----------------------------------------------------------------------------------- destroy chaos-mesh ----------------------------------------------------------------------------------- + '[' -n '' ']' ++ grep chaos-mesh ++ kubectl get MutatingWebhookConfiguration ++ awk '{print $1}' + timeout 30 kubectl delete MutatingWebhookConfiguration error: resource(s) were provided, but no name was specified + : ++ kubectl get ValidatingWebhookConfiguration ++ grep chaos-mesh ++ awk '{print $1}' + timeout 30 kubectl delete ValidatingWebhookConfiguration error: resource(s) were provided, but no name was specified + : ++ kubectl get ValidatingWebhookConfiguration ++ grep validate-auth ++ awk '{print $1}' + timeout 30 kubectl delete ValidatingWebhookConfiguration error: resource(s) were provided, but no name was specified + : ++ kubectl api-resources ++ grep chaos-mesh ++ awk '{print $1}' ++ kubectl get crd ++ grep chaos-mesh.org ++ awk '{print $1}' + timeout 30 kubectl delete crd error: resource(s) were provided, but no name was specified + : ++ kubectl get clusterrolebinding ++ grep chaos-mesh ++ awk '{print $1}' + timeout 30 kubectl delete clusterrolebinding error: resource(s) were provided, but no name was specified + : ++ kubectl get clusterrole ++ grep chaos-mesh ++ awk '{print $1}' + timeout 30 kubectl delete clusterrole error: resource(s) were provided, but no name was specified + : + desc 'cleaned up all old namespaces' + set +o xtrace ----------------------------------------------------------------------------------- cleaned up all old namespaces ----------------------------------------------------------------------------------- + kubectl_bin get ns + grep -E -v '^kube-|^default|Terminating|psmdb-operator|openshift|^gke-|^gmp-|^NAME' + '[' -n '' ']' + desc 'cleaned up old namespaces pitr-32420' + set +o xtrace ----------------------------------------------------------------------------------- cleaned up old namespaces pitr-32420 ----------------------------------------------------------------------------------- + kubectl_bin delete namespace pitr-32420 --ignore-not-found + awk '{print$1}' ++ mktemp + xargs kubectl delete ns ++ mktemp + local LAST_OUT=/tmp/tmp.dEZE4YgXW1 ++ mktemp + local LAST_OUT=/tmp/tmp.XCfh8Cuyn0 ++ mktemp + local LAST_ERR=/tmp/tmp.oiibVc6crL + local exit_status=0 + local timeout=4 ++ seq 0 2 + local LAST_ERR=/tmp/tmp.m4Dw3UylmC + local exit_status=0 + local timeout=4 ++ seq 0 2 + for i in $(seq 0 2) + set +e + kubectl get ns + for i in $(seq 0 2) + set +e + kubectl delete namespace pitr-32420 --ignore-not-found + exit_status=0 + set -e + '[' 0 '!=' 0 -a -n 1 ']' + break + cat /tmp/tmp.dEZE4YgXW1 + cat /tmp/tmp.oiibVc6crL + rm /tmp/tmp.dEZE4YgXW1 /tmp/tmp.oiibVc6crL + return 0 error: resource(s) were provided, but no name was specified + exit_status=0 + set -e + '[' 0 '!=' 0 -a -n 1 ']' + break + cat /tmp/tmp.XCfh8Cuyn0 + cat /tmp/tmp.m4Dw3UylmC + rm /tmp/tmp.XCfh8Cuyn0 /tmp/tmp.m4Dw3UylmC + return 0 + kubectl_bin wait --for=delete namespace pitr-32420 ++ mktemp + local LAST_OUT=/tmp/tmp.ChJyFnmeI2 ++ mktemp + local LAST_ERR=/tmp/tmp.0U0M4NzjGE + local exit_status=0 + local timeout=4 ++ seq 0 2 + for i in $(seq 0 2) + set +e + kubectl wait --for=delete namespace pitr-32420 + exit_status=0 + set -e + '[' 0 '!=' 0 -a -n 1 ']' + break + cat /tmp/tmp.ChJyFnmeI2 + cat /tmp/tmp.0U0M4NzjGE + rm /tmp/tmp.ChJyFnmeI2 /tmp/tmp.0U0M4NzjGE + return 0 + desc 'create namespace pitr-32420' + set +o xtrace ----------------------------------------------------------------------------------- create namespace pitr-32420 ----------------------------------------------------------------------------------- + kubectl_bin create namespace pitr-32420 ++ mktemp + local LAST_OUT=/tmp/tmp.c55cqVBmcx ++ mktemp + local LAST_ERR=/tmp/tmp.iRSREOHxdP + local exit_status=0 + local timeout=4 ++ seq 0 2 + for i in $(seq 0 2) + set +e + kubectl create namespace pitr-32420 + exit_status=0 + set -e + '[' 0 '!=' 0 -a -n 1 ']' + break + cat /tmp/tmp.c55cqVBmcx namespace/pitr-32420 created + cat /tmp/tmp.iRSREOHxdP + rm /tmp/tmp.c55cqVBmcx /tmp/tmp.iRSREOHxdP + return 0 + set_kube_ctx pitr-32420 + local namespace=pitr-32420 ++ kubectl_bin config current-context +++ mktemp ++ local LAST_OUT=/tmp/tmp.ePSKq63L4g +++ mktemp ++ local LAST_ERR=/tmp/tmp.EvKd8yXYEm ++ local exit_status=0 ++ local timeout=4 +++ seq 0 2 ++ for i in $(seq 0 2) ++ set +e ++ kubectl config current-context ++ exit_status=0 ++ set -e ++ '[' 0 '!=' 0 -a -n 1 ']' ++ break ++ cat /tmp/tmp.ePSKq63L4g ++ cat /tmp/tmp.EvKd8yXYEm ++ rm /tmp/tmp.ePSKq63L4g /tmp/tmp.EvKd8yXYEm ++ return 0 + kubectl_bin config set-context gke_cloud-dev-112233_us-central1-a_jen-psmdb-2282-535b6b53-1-cluster5 --namespace=pitr-32420 ++ mktemp + local LAST_OUT=/tmp/tmp.5bdPNgV6B9 ++ mktemp + local LAST_ERR=/tmp/tmp.nROYVaOoMV + local exit_status=0 + local timeout=4 ++ seq 0 2 + for i in $(seq 0 2) + set +e + kubectl config set-context gke_cloud-dev-112233_us-central1-a_jen-psmdb-2282-535b6b53-1-cluster5 --namespace=pitr-32420 + exit_status=0 + set -e + '[' 0 '!=' 0 -a -n 1 ']' + break + cat /tmp/tmp.5bdPNgV6B9 Context "gke_cloud-dev-112233_us-central1-a_jen-psmdb-2282-535b6b53-1-cluster5" modified. + cat /tmp/tmp.nROYVaOoMV + rm /tmp/tmp.5bdPNgV6B9 /tmp/tmp.nROYVaOoMV + return 0 + deploy_minio + local cert_secret= + local service_name=minio-service + desc 'install MinIO: minio-service' + set +o xtrace ----------------------------------------------------------------------------------- install MinIO: minio-service ----------------------------------------------------------------------------------- + helm uninstall minio-service + : + helm repo remove minio "minio" has been removed from your repositories + helm repo add minio https://charts.min.io/ "minio" has been added to your repositories + local endpoint=http://minio-service:9000 + minio_args=('--version' '5.4.0' '--set' 'replicas=1' '--set' 'mode=standalone' '--set' 'resources.requests.memory=256Mi' '--set' 'rootUser=rootuser' '--set' 'rootPassword=rootpass123' '--set' 'users[0].accessKey=some-access-key' '--set' 'users[0].secretKey=some-secret-key' '--set' 'users[0].policy=consoleAdmin' '--set' 'service.type=ClusterIP' '--set' 'configPathmc=/tmp/' '--set' 'securityContext.enabled=false' '--set' 'persistence.size=2G' '--set' 'fullnameOverride=minio-service' '--set' 'serviceAccount.create=true' '--set' 'serviceAccount.name=minio-service-sa') + local minio_args + [[ -n '' ]] + retry 10 60 helm install minio-service --version 5.4.0 --set replicas=1 --set mode=standalone --set resources.requests.memory=256Mi --set rootUser=rootuser --set rootPassword=rootpass123 --set 'users[0].accessKey=some-access-key' --set 'users[0].secretKey=some-secret-key' --set 'users[0].policy=consoleAdmin' --set service.type=ClusterIP --set configPathmc=/tmp/ --set securityContext.enabled=false --set persistence.size=2G --set fullnameOverride=minio-service --set serviceAccount.create=true --set serviceAccount.name=minio-service-sa minio/minio + local max=10 + local delay=60 + shift 2 + local n=1 + helm install minio-service --version 5.4.0 --set replicas=1 --set mode=standalone --set resources.requests.memory=256Mi --set rootUser=rootuser --set rootPassword=rootpass123 --set 'users[0].accessKey=some-access-key' --set 'users[0].secretKey=some-secret-key' --set 'users[0].policy=consoleAdmin' --set service.type=ClusterIP --set configPathmc=/tmp/ --set securityContext.enabled=false --set persistence.size=2G --set fullnameOverride=minio-service --set serviceAccount.create=true --set serviceAccount.name=minio-service-sa minio/minio NAME: minio-service LAST DEPLOYED: Tue Mar 10 22:11:46 2026 NAMESPACE: pitr-32420 STATUS: deployed REVISION: 1 TEST SUITE: None NOTES: MinIO can be accessed via port 9000 on the following DNS name from within your cluster: minio-service.pitr-32420.cluster.local To access MinIO from localhost, run the below commands: 1. export POD_NAME=$(kubectl get pods --namespace pitr-32420 -l "release=minio-service" -o jsonpath="{.items[0].metadata.name}") 2. kubectl port-forward $POD_NAME 9000 --namespace pitr-32420 Read more about port forwarding here: http://kubernetes.io/docs/user-guide/kubectl/kubectl_port-forward/ You can now access MinIO server on http://localhost:9000. Follow the below steps to connect to MinIO server with mc client: 1. Download the MinIO mc client - https://min.io/docs/minio/linux/reference/minio-mc.html#quickstart 2. export MC_HOST_minio-service-local=http://$(kubectl get secret --namespace pitr-32420 minio-service -o jsonpath="{.data.rootUser}" | base64 --decode):$(kubectl get secret --namespace pitr-32420 minio-service -o jsonpath="{.data.rootPassword}" | base64 --decode)@localhost:9000 3. mc ls minio-service-local ++ kubectl_bin get pods --selector=release=minio-service -o 'jsonpath={.items[].metadata.name}' +++ mktemp ++ local LAST_OUT=/tmp/tmp.97L3I89HHn +++ mktemp ++ local LAST_ERR=/tmp/tmp.pHW0TU4b6d ++ local exit_status=0 ++ local timeout=4 +++ seq 0 2 ++ for i in $(seq 0 2) ++ set +e ++ kubectl get pods --selector=release=minio-service -o 'jsonpath={.items[].metadata.name}' ++ exit_status=0 ++ set -e ++ '[' 0 '!=' 0 -a -n 1 ']' ++ break ++ cat /tmp/tmp.97L3I89HHn ++ cat /tmp/tmp.pHW0TU4b6d ++ rm /tmp/tmp.97L3I89HHn /tmp/tmp.pHW0TU4b6d ++ return 0 + local MINIO_POD=minio-service-6d5f646cdc-rm8mw + wait_pod minio-service-6d5f646cdc-rm8mw + local pod=minio-service-6d5f646cdc-rm8mw + set +o xtrace waiting for pod/minio-service-6d5f646cdc-rm8mw to be ready.OK + '[' -n psmdb-operator ']' + kubectl_bin create svc -n psmdb-operator externalname minio-service --external-name=minio-service.pitr-32420.svc.cluster.local --tcp=9000 service/minio-service created + create_minio_bucket operator-testing http://minio-service:9000 + local bucket=operator-testing + local endpoint=http://minio-service:9000 + kubectl_bin run -i --rm aws-cli --image=docker.io/perconalab/awscli --restart=Never -- bash -c 'AWS_ACCESS_KEY_ID=some-access-key AWS_SECRET_ACCESS_KEY=some-secret-key AWS_DEFAULT_REGION=us-east-1 /usr/bin/aws --no-verify-ssl --endpoint-url http://minio-service:9000 s3 mb s3://operator-testing' ++ mktemp + local LAST_OUT=/tmp/tmp.naRzQtdGsY ++ mktemp + local LAST_ERR=/tmp/tmp.7SemkE2ZQZ + local exit_status=0 + local timeout=4 ++ seq 0 2 + for i in $(seq 0 2) + set +e + kubectl run -i --rm aws-cli --image=docker.io/perconalab/awscli --restart=Never -- bash -c 'AWS_ACCESS_KEY_ID=some-access-key AWS_SECRET_ACCESS_KEY=some-secret-key AWS_DEFAULT_REGION=us-east-1 /usr/bin/aws --no-verify-ssl --endpoint-url http://minio-service:9000 s3 mb s3://operator-testing' + exit_status=0 + set -e + '[' 0 '!=' 0 -a -n 1 ']' + break + cat /tmp/tmp.naRzQtdGsY pod "aws-cli" deleted from pitr-32420 namespace + cat /tmp/tmp.7SemkE2ZQZ All commands and output from this session will be recorded in container logs, including credentials and sensitive information passed through the command prompt. If you don't see a command prompt, try pressing enter. + rm /tmp/tmp.naRzQtdGsY /tmp/tmp.7SemkE2ZQZ + return 0 + desc 'create secrets and start client' + set +o xtrace ----------------------------------------------------------------------------------- create secrets and start client ----------------------------------------------------------------------------------- + kubectl_bin apply -f /mnt/jenkins/workspace/cloud-psmdb-operator_PR-2282/e2e-tests/conf/secrets.yml -f /mnt/jenkins/workspace/cloud-psmdb-operator_PR-2282/e2e-tests/conf/client.yml -f /mnt/jenkins/workspace/cloud-psmdb-operator_PR-2282/e2e-tests/conf/minio-secret.yml ++ mktemp + local LAST_OUT=/tmp/tmp.hPv3B8xsHC ++ mktemp + local LAST_ERR=/tmp/tmp.n5UzRKyMrc + local exit_status=0 + local timeout=4 ++ seq 0 2 + for i in $(seq 0 2) + set +e + kubectl apply -f /mnt/jenkins/workspace/cloud-psmdb-operator_PR-2282/e2e-tests/conf/secrets.yml -f /mnt/jenkins/workspace/cloud-psmdb-operator_PR-2282/e2e-tests/conf/client.yml -f /mnt/jenkins/workspace/cloud-psmdb-operator_PR-2282/e2e-tests/conf/minio-secret.yml + exit_status=0 + set -e + '[' 0 '!=' 0 -a -n 1 ']' + break + cat /tmp/tmp.hPv3B8xsHC secret/some-users created deployment.apps/psmdb-client created secret/minio-secret created + cat /tmp/tmp.n5UzRKyMrc + rm /tmp/tmp.hPv3B8xsHC /tmp/tmp.n5UzRKyMrc + return 0 + cluster=some-name-rs0 + desc 'create first PSMDB cluster some-name-rs0' + set +o xtrace ----------------------------------------------------------------------------------- create first PSMDB cluster some-name-rs0 ----------------------------------------------------------------------------------- + apply_cluster /mnt/jenkins/workspace/cloud-psmdb-operator_PR-2282/e2e-tests/pitr/conf/some-name-rs0.yml + '[' -z '' ']' + cat_config /mnt/jenkins/workspace/cloud-psmdb-operator_PR-2282/e2e-tests/pitr/conf/some-name-rs0.yml + kubectl_bin apply -f - + cat /mnt/jenkins/workspace/cloud-psmdb-operator_PR-2282/e2e-tests/pitr/conf/some-name-rs0.yml + yq eval '(.spec | select(.image == null)).image = "docker.io/perconalab/percona-server-mongodb-operator:main-mongod8.0"' ++ mktemp + yq eval '(.spec | select(has("pmm"))).pmm.image = "docker.io/percona/pmm-client:2.44.1-1"' + yq eval '(.spec | select(has("initImage"))).initImage = "docker.io/perconalab/percona-server-mongodb-operator:PR-2282-535b6b53"' + yq eval '(.spec | select(has("backup"))).backup.image = "docker.io/perconalab/percona-server-mongodb-operator:main-backup"' + yq eval '.spec.upgradeOptions.apply="Never"' + /usr/sbin/sed -e s/NAME_SPACE/pitr-32420/g + local LAST_OUT=/tmp/tmp.numWnJPEGl ++ mktemp + local LAST_ERR=/tmp/tmp.M9YUagl5oZ + local exit_status=0 + local timeout=4 ++ seq 0 2 + for i in $(seq 0 2) + set +e + kubectl apply -f - + exit_status=0 + set -e + '[' 0 '!=' 0 -a -n 1 ']' + break + cat /tmp/tmp.numWnJPEGl perconaservermongodb.psmdb.percona.com/some-name created + cat /tmp/tmp.M9YUagl5oZ + rm /tmp/tmp.numWnJPEGl /tmp/tmp.M9YUagl5oZ + return 0 + desc 'check if all 3 Pods started' + set +o xtrace ----------------------------------------------------------------------------------- check if all 3 Pods started ----------------------------------------------------------------------------------- + wait_for_running some-name-rs0 3 + local name=some-name-rs0 + let last_pod=2 + local check_cluster_readyness=true + set_debug + [[ 1 == 1 ]] + set -o xtrace + local rs_name=rs0 + local cluster_name=some-name ++ seq 0 2 + for i in $(seq 0 $last_pod) + [[ 0 -eq 2 ]] + wait_pod some-name-rs0-0 + local pod=some-name-rs0-0 + set +o xtrace waiting for pod/some-name-rs0-0 to be ready.....OK + for i in $(seq 0 $last_pod) + [[ 1 -eq 2 ]] + wait_pod some-name-rs0-1 + local pod=some-name-rs0-1 + set +o xtrace waiting for pod/some-name-rs0-1 to be ready.............OK + for i in $(seq 0 $last_pod) + [[ 2 -eq 2 ]] ++ kubectl_bin get psmdb some-name -o 'jsonpath={.spec.replsets[?(@.name=="rs0")].arbiter.enabled}' +++ mktemp ++ local LAST_OUT=/tmp/tmp.Ch3tnyIxy8 +++ mktemp ++ local LAST_ERR=/tmp/tmp.LPahK5HBJz ++ local exit_status=0 ++ local timeout=4 +++ seq 0 2 ++ for i in $(seq 0 2) ++ set +e ++ kubectl get psmdb some-name -o 'jsonpath={.spec.replsets[?(@.name=="rs0")].arbiter.enabled}' ++ exit_status=0 ++ set -e ++ '[' 0 '!=' 0 -a -n 1 ']' ++ break ++ cat /tmp/tmp.Ch3tnyIxy8 ++ cat /tmp/tmp.LPahK5HBJz ++ rm /tmp/tmp.Ch3tnyIxy8 /tmp/tmp.LPahK5HBJz ++ return 0 + [[ '' == \t\r\u\e ]] + wait_pod some-name-rs0-2 + local pod=some-name-rs0-2 + set +o xtrace waiting for pod/some-name-rs0-2 to be ready......OK ++ kubectl_bin get psmdb some-name -o 'jsonpath={.spec.replsets[?(@.name=="rs0")].nonvoting.enabled}' +++ mktemp ++ local LAST_OUT=/tmp/tmp.bhUUF5qaWL +++ mktemp ++ local LAST_ERR=/tmp/tmp.x41RW245ZL ++ local exit_status=0 ++ local timeout=4 +++ seq 0 2 ++ for i in $(seq 0 2) ++ set +e ++ kubectl get psmdb some-name -o 'jsonpath={.spec.replsets[?(@.name=="rs0")].nonvoting.enabled}' ++ exit_status=0 ++ set -e ++ '[' 0 '!=' 0 -a -n 1 ']' ++ break ++ cat /tmp/tmp.bhUUF5qaWL ++ cat /tmp/tmp.x41RW245ZL ++ rm /tmp/tmp.bhUUF5qaWL /tmp/tmp.x41RW245ZL ++ return 0 + [[ '' == \t\r\u\e ]] ++ kubectl_bin get psmdb some-name -o 'jsonpath={.spec.replsets[?(@.name=="rs0")].hidden.enabled}' +++ mktemp ++ local LAST_OUT=/tmp/tmp.J31WQc65Nq +++ mktemp ++ local LAST_ERR=/tmp/tmp.bxjbGTuVRh ++ local exit_status=0 ++ local timeout=4 +++ seq 0 2 ++ for i in $(seq 0 2) ++ set +e ++ kubectl get psmdb some-name -o 'jsonpath={.spec.replsets[?(@.name=="rs0")].hidden.enabled}' ++ exit_status=0 ++ set -e ++ '[' 0 '!=' 0 -a -n 1 ']' ++ break ++ cat /tmp/tmp.J31WQc65Nq ++ cat /tmp/tmp.bxjbGTuVRh ++ rm /tmp/tmp.J31WQc65Nq /tmp/tmp.bxjbGTuVRh ++ return 0 + [[ '' == \t\r\u\e ]] + sleep 10 + [[ true == \t\r\u\e ]] + set +x Waiting for cluster readyness...................... + desc 'check if statefulset created with expected config' + set +o xtrace ----------------------------------------------------------------------------------- check if statefulset created with expected config ----------------------------------------------------------------------------------- + compare_kubectl statefulset/some-name-rs0 + local resource=statefulset/some-name-rs0 + local postfix= + local skip_generation_check= + local expected_result=/mnt/jenkins/workspace/cloud-psmdb-operator_PR-2282/e2e-tests/pitr/compare/statefulset_some-name-rs0.yml + local new_result=/tmp/tmp.oRDgtyLOf2/statefulset_some-name-rs0.yml + '[' -n '' -a -f /mnt/jenkins/workspace/cloud-psmdb-operator_PR-2282/e2e-tests/pitr/compare/statefulset_some-name-rs0-oc.yml ']' + kubectl_bin get -o yaml statefulset/some-name-rs0 + yq eval ' del(.metadata.ownerReferences[].apiVersion) | del(.metadata.managedFields) | del(.. | select(has("creationTimestamp")).creationTimestamp) | del(.. | select(has("namespace")).namespace) | del(.. | select(has("uid")).uid) | del(.metadata.resourceVersion) | del(.spec.template.spec.containers[].env[] | select(.name == "NAMESPACE")) | del(.metadata.selfLink) | del(.metadata.annotations."cloud.google.com/neg") | del(.metadata.annotations."kubectl.kubernetes.io/last-applied-configuration") | del(.. | select(has("image")).image) | del(.. | select(has("clusterIP")).clusterIP) | del(.. | select(has("clusterIPs")).clusterIPs) | del(.. | select(has("dataSource")).dataSource) | del(.. | select(has("procMount")).procMount) | del(.. | select(has("storageClassName")).storageClassName) | del(.. | select(has("finalizers")).finalizers) | del(.. | select(has("kubernetes.io/pvc-protection"))."kubernetes.io/pvc-protection") | del(.. | select(has("volumeName")).volumeName) | del(.. | select(has("volume.beta.kubernetes.io/storage-provisioner"))."volume.beta.kubernetes.io/storage-provisioner") | del(.. | select(has("volume.kubernetes.io/storage-provisioner"))."volume.kubernetes.io/storage-provisioner") | del(.spec.volumeMode) | del(.. | select(has("volume.kubernetes.io/selected-node"))."volume.kubernetes.io/selected-node") | del(.. | select(has("percona.com/last-config-hash"))."percona.com/last-config-hash") | del(.. | select(has("percona.com/configuration-hash"))."percona.com/configuration-hash") | del(.. | select(has("percona.com/ssl-hash"))."percona.com/ssl-hash") | del(.. | select(has("percona.com/ssl-internal-hash"))."percona.com/ssl-internal-hash") | del(.spec.volumeClaimTemplates[].spec.volumeMode | select(. == "Filesystem")) | del(.. | select(has("healthCheckNodePort")).healthCheckNodePort) | del(.. | select(has("nodePort")).nodePort) | del(.status) | (.. | select(tag == "!!str")) |= sub("pitr-32420", "NAME_SPACE") | del(.spec.volumeClaimTemplates[].apiVersion) | del(.spec.volumeClaimTemplates[].kind) | del(.spec.ipFamilies) | del(.spec.ipFamilyPolicy) | (.. | select(. == "extensions/v1beta1")) = "apps/v1" | (.. | select(. == "batch/v1beta1")) = "batch/v1" ' - ++ mktemp + local LAST_OUT=/tmp/tmp.GGMhgZCM74 ++ mktemp + local LAST_ERR=/tmp/tmp.aj0Uu9Zw9t + local exit_status=0 + local timeout=4 ++ seq 0 2 + for i in $(seq 0 2) + set +e + kubectl get -o yaml statefulset/some-name-rs0 + exit_status=0 + set -e + '[' 0 '!=' 0 -a -n 1 ']' + break + cat /tmp/tmp.GGMhgZCM74 + cat /tmp/tmp.aj0Uu9Zw9t + rm /tmp/tmp.GGMhgZCM74 /tmp/tmp.aj0Uu9Zw9t + return 0 + yq -i eval 'del(.spec.persistentVolumeClaimRetentionPolicy)' /tmp/tmp.oRDgtyLOf2/statefulset_some-name-rs0.yml + version_gt 1.22 ++ echo '1.32 >= 1.22' ++ bc -l + '[' 1 -eq 1 ']' + return 0 + yq -i eval 'del(.spec.internalTrafficPolicy)' /tmp/tmp.oRDgtyLOf2/statefulset_some-name-rs0.yml + yq -i eval 'del(.spec.allocateLoadBalancerNodePorts)' /tmp/tmp.oRDgtyLOf2/statefulset_some-name-rs0.yml + [[ /mnt/jenkins/workspace/cloud-psmdb-operator_PR-2282/e2e-tests/pitr/compare/statefulset_some-name-rs0.yml == */cronjob* ]] + '[' -n '' ']' + [[ 0 -eq 0 ]] + diff -u /mnt/jenkins/workspace/cloud-psmdb-operator_PR-2282/e2e-tests/pitr/compare/statefulset_some-name-rs0.yml /tmp/tmp.oRDgtyLOf2/statefulset_some-name-rs0.yml + log 'compare_kubectl: statefulset/some-name-rs0 OK' + set +o xtrace [2026-03-10T22:14:43+0000] compare_kubectl: statefulset/some-name-rs0 OK + write_initial_data + desc 'create user myApp' + set +o xtrace ----------------------------------------------------------------------------------- create user myApp ----------------------------------------------------------------------------------- + run_mongo 'db.createUser({user:"myApp",pwd:"myPass",roles:[{db:"myApp",role:"readWrite"}]})' userAdmin:userAdmin123456@some-name-rs0.pitr-32420 + local 'command=db.createUser({user:"myApp",pwd:"myPass",roles:[{db:"myApp",role:"readWrite"}]})' + local uri=userAdmin:userAdmin123456@some-name-rs0.pitr-32420 + local driver=mongodb+srv + local suffix=.svc.cluster.local + local mongo_flag= + local replica_set=rs0 + [[ userAdmin:userAdmin123456@some-name-rs0.pitr-32420 == *cfg* ]] ++ kubectl_bin get pods --selector=name=psmdb-client -o 'jsonpath={.items[].metadata.name}' +++ mktemp ++ local LAST_OUT=/tmp/tmp.NuX9vCHOZm +++ mktemp ++ local LAST_ERR=/tmp/tmp.XSznfJjS9l ++ local exit_status=0 ++ local timeout=4 +++ seq 0 2 ++ for i in $(seq 0 2) ++ set +e ++ kubectl get pods --selector=name=psmdb-client -o 'jsonpath={.items[].metadata.name}' ++ exit_status=0 ++ set -e ++ '[' 0 '!=' 0 -a -n 1 ']' ++ break ++ cat /tmp/tmp.NuX9vCHOZm ++ cat /tmp/tmp.XSznfJjS9l ++ rm /tmp/tmp.NuX9vCHOZm /tmp/tmp.XSznfJjS9l ++ return 0 + local client_container=psmdb-client-bb8b97679-jlqq8 + kubectl_bin exec psmdb-client-bb8b97679-jlqq8 -- bash -c 'printf '\''db.createUser({user:"myApp",pwd:"myPass",roles:[{db:"myApp",role:"readWrite"}]})\n'\'' | mongo mongodb+srv://userAdmin:userAdmin123456@some-name-rs0.pitr-32420.svc.cluster.local/admin?ssl=false\&replicaSet=rs0 ' ++ mktemp + local LAST_OUT=/tmp/tmp.q0w37GqjrY ++ mktemp + local LAST_ERR=/tmp/tmp.jhbPH2ZQ4w + local exit_status=0 + local timeout=4 ++ seq 0 2 + for i in $(seq 0 2) + set +e + kubectl exec psmdb-client-bb8b97679-jlqq8 -- bash -c 'printf '\''db.createUser({user:"myApp",pwd:"myPass",roles:[{db:"myApp",role:"readWrite"}]})\n'\'' | mongo mongodb+srv://userAdmin:userAdmin123456@some-name-rs0.pitr-32420.svc.cluster.local/admin?ssl=false\&replicaSet=rs0 ' + exit_status=0 + set -e + '[' 0 '!=' 0 -a -n 1 ']' + break + cat /tmp/tmp.q0w37GqjrY Percona Server for MongoDB shell version v4.4.29-28 connecting to: mongodb://some-name-rs0-0.some-name-rs0.pitr-32420.svc.cluster.local:27017,some-name-rs0-2.some-name-rs0.pitr-32420.svc.cluster.local:27017,some-name-rs0-1.some-name-rs0.pitr-32420.svc.cluster.local:27017/admin?compressors=disabled&gssapiServiceName=mongodb&replicaSet=rs0&ssl=false Implicit session: session { "id" : UUID("486fbc69-40b1-4f39-b153-b25baab1093a") } Percona Server for MongoDB server version: v8.0.19-7 WARNING: shell and server versions do not match Successfully added user: { "user" : "myApp", "roles" : [ { "db" : "myApp", "role" : "readWrite" } ] } bye + cat /tmp/tmp.jhbPH2ZQ4w + rm /tmp/tmp.q0w37GqjrY /tmp/tmp.jhbPH2ZQ4w + return 0 + sleep 2 + write_document + local cmp_postfix= + local cluster_name=some-name-rs0 + desc 'write initial data, read from all' + set +o xtrace ----------------------------------------------------------------------------------- write initial data, read from all ----------------------------------------------------------------------------------- + run_mongo 'use myApp\n db.test.insert({ x: 100500 })' myApp:myPass@some-name-rs0.pitr-32420 + local 'command=use myApp\n db.test.insert({ x: 100500 })' + local uri=myApp:myPass@some-name-rs0.pitr-32420 + local driver=mongodb+srv + local suffix=.svc.cluster.local + local mongo_flag= + local replica_set=rs0 + [[ myApp:myPass@some-name-rs0.pitr-32420 == *cfg* ]] ++ kubectl_bin get pods --selector=name=psmdb-client -o 'jsonpath={.items[].metadata.name}' +++ mktemp ++ local LAST_OUT=/tmp/tmp.YGSpkxXgNi +++ mktemp ++ local LAST_ERR=/tmp/tmp.fqZG8blV34 ++ local exit_status=0 ++ local timeout=4 +++ seq 0 2 ++ for i in $(seq 0 2) ++ set +e ++ kubectl get pods --selector=name=psmdb-client -o 'jsonpath={.items[].metadata.name}' ++ exit_status=0 ++ set -e ++ '[' 0 '!=' 0 -a -n 1 ']' ++ break ++ cat /tmp/tmp.YGSpkxXgNi ++ cat /tmp/tmp.fqZG8blV34 ++ rm /tmp/tmp.YGSpkxXgNi /tmp/tmp.fqZG8blV34 ++ return 0 + local client_container=psmdb-client-bb8b97679-jlqq8 + kubectl_bin exec psmdb-client-bb8b97679-jlqq8 -- bash -c 'printf '\''use myApp\n db.test.insert({ x: 100500 })\n'\'' | mongo mongodb+srv://myApp:myPass@some-name-rs0.pitr-32420.svc.cluster.local/admin?ssl=false\&replicaSet=rs0 ' ++ mktemp + local LAST_OUT=/tmp/tmp.qFc0u1s44q ++ mktemp + local LAST_ERR=/tmp/tmp.iKX8BFns1k + local exit_status=0 + local timeout=4 ++ seq 0 2 + for i in $(seq 0 2) + set +e + kubectl exec psmdb-client-bb8b97679-jlqq8 -- bash -c 'printf '\''use myApp\n db.test.insert({ x: 100500 })\n'\'' | mongo mongodb+srv://myApp:myPass@some-name-rs0.pitr-32420.svc.cluster.local/admin?ssl=false\&replicaSet=rs0 ' + exit_status=0 + set -e + '[' 0 '!=' 0 -a -n 1 ']' + break + cat /tmp/tmp.qFc0u1s44q Percona Server for MongoDB shell version v4.4.29-28 connecting to: mongodb://some-name-rs0-1.some-name-rs0.pitr-32420.svc.cluster.local:27017,some-name-rs0-0.some-name-rs0.pitr-32420.svc.cluster.local:27017,some-name-rs0-2.some-name-rs0.pitr-32420.svc.cluster.local:27017/admin?compressors=disabled&gssapiServiceName=mongodb&replicaSet=rs0&ssl=false Implicit session: session { "id" : UUID("eb9244ed-34eb-4afa-9241-43d805f295cb") } Percona Server for MongoDB server version: v8.0.19-7 WARNING: shell and server versions do not match switched to db myApp WriteResult({ "nInserted" : 1 }) bye + cat /tmp/tmp.iKX8BFns1k + rm /tmp/tmp.qFc0u1s44q /tmp/tmp.iKX8BFns1k + return 0 + minikube_sleep + sleep_time=10 + [[ '' == 1 ]] + compare_mongo_cmd find myApp:myPass@some-name-rs0-0.some-name-rs0.pitr-32420 '' + local command=find + local uri=myApp:myPass@some-name-rs0-0.some-name-rs0.pitr-32420 + local postfix= + local suffix= + local database=myApp + local collection=test + local sort= + local tls=false + local replicaset= + local 'full_command=db.test.find()' + [[ -n '' ]] + log 'running db.test.find() in myApp' + set +o xtrace [2026-03-10T22:14:51+0000] running db.test.find() in myApp + [[ false == \t\r\u\e ]] + mongo_command=run_mongo + run_mongo 'use myApp\n db.test.find()' myApp:myPass@some-name-rs0-0.some-name-rs0.pitr-32420 mongodb '' '' + local 'command=use myApp\n db.test.find()' + local uri=myApp:myPass@some-name-rs0-0.some-name-rs0.pitr-32420 + local driver=mongodb + local suffix=.svc.cluster.local + local mongo_flag= + local replica_set=rs0 + [[ myApp:myPass@some-name-rs0-0.some-name-rs0.pitr-32420 == *cfg* ]] + grep -E -v 'I NETWORK|W NETWORK|F NETWORK|"c":"NETWORK"|Error saving history file|Percona Server for MongoDB|connecting to:|Unable to reach primary for set|Implicit session:|versions do not match|Error saving history file:|Started a new thread for the timer service' + /usr/sbin/sed -re 's/ObjectId\("[0-9a-f]+"\)//; s/-[0-9]+.svc/-xxx.svc/' ++ kubectl_bin get pods --selector=name=psmdb-client -o 'jsonpath={.items[].metadata.name}' +++ mktemp ++ local LAST_OUT=/tmp/tmp.c2KfXPvIWP +++ mktemp ++ local LAST_ERR=/tmp/tmp.wPhCT4Dl27 ++ local exit_status=0 ++ local timeout=4 +++ seq 0 2 ++ for i in $(seq 0 2) ++ set +e ++ kubectl get pods --selector=name=psmdb-client -o 'jsonpath={.items[].metadata.name}' ++ exit_status=0 ++ set -e ++ '[' 0 '!=' 0 -a -n 1 ']' ++ break ++ cat /tmp/tmp.c2KfXPvIWP ++ cat /tmp/tmp.wPhCT4Dl27 ++ rm /tmp/tmp.c2KfXPvIWP /tmp/tmp.wPhCT4Dl27 ++ return 0 + local client_container=psmdb-client-bb8b97679-jlqq8 + kubectl_bin exec psmdb-client-bb8b97679-jlqq8 -- bash -c 'printf '\''use myApp\n db.test.find()\n'\'' | mongo mongodb://myApp:myPass@some-name-rs0-0.some-name-rs0.pitr-32420.svc.cluster.local/admin?ssl=false\&replicaSet=rs0 ' ++ mktemp + local LAST_OUT=/tmp/tmp.kK1DCv2V4b ++ mktemp + local LAST_ERR=/tmp/tmp.G66r9dNtoB + local exit_status=0 + local timeout=4 ++ seq 0 2 + for i in $(seq 0 2) + set +e + kubectl exec psmdb-client-bb8b97679-jlqq8 -- bash -c 'printf '\''use myApp\n db.test.find()\n'\'' | mongo mongodb://myApp:myPass@some-name-rs0-0.some-name-rs0.pitr-32420.svc.cluster.local/admin?ssl=false\&replicaSet=rs0 ' + exit_status=0 + set -e + '[' 0 '!=' 0 -a -n 1 ']' + break + cat /tmp/tmp.kK1DCv2V4b + cat /tmp/tmp.G66r9dNtoB + rm /tmp/tmp.kK1DCv2V4b /tmp/tmp.G66r9dNtoB + return 0 + [[ 0 -eq 0 ]] + diff -u /mnt/jenkins/workspace/cloud-psmdb-operator_PR-2282/e2e-tests/pitr/compare/find.json /tmp/tmp.oRDgtyLOf2/find + compare_mongo_cmd find myApp:myPass@some-name-rs0-1.some-name-rs0.pitr-32420 '' + local command=find + local uri=myApp:myPass@some-name-rs0-1.some-name-rs0.pitr-32420 + local postfix= + local suffix= + local database=myApp + local collection=test + local sort= + local tls=false + local replicaset= + local 'full_command=db.test.find()' + [[ -n '' ]] + log 'running db.test.find() in myApp' + set +o xtrace [2026-03-10T22:14:54+0000] running db.test.find() in myApp + [[ false == \t\r\u\e ]] + mongo_command=run_mongo + run_mongo 'use myApp\n db.test.find()' myApp:myPass@some-name-rs0-1.some-name-rs0.pitr-32420 mongodb '' '' + local 'command=use myApp\n db.test.find()' + grep -E -v 'I NETWORK|W NETWORK|F NETWORK|"c":"NETWORK"|Error saving history file|Percona Server for MongoDB|connecting to:|Unable to reach primary for set|Implicit session:|versions do not match|Error saving history file:|Started a new thread for the timer service' + local uri=myApp:myPass@some-name-rs0-1.some-name-rs0.pitr-32420 + local driver=mongodb + local suffix=.svc.cluster.local + local mongo_flag= + local replica_set=rs0 + [[ myApp:myPass@some-name-rs0-1.some-name-rs0.pitr-32420 == *cfg* ]] + /usr/sbin/sed -re 's/ObjectId\("[0-9a-f]+"\)//; s/-[0-9]+.svc/-xxx.svc/' ++ kubectl_bin get pods --selector=name=psmdb-client -o 'jsonpath={.items[].metadata.name}' +++ mktemp ++ local LAST_OUT=/tmp/tmp.mLXTauuouv +++ mktemp ++ local LAST_ERR=/tmp/tmp.mkb1D4AMvu ++ local exit_status=0 ++ local timeout=4 +++ seq 0 2 ++ for i in $(seq 0 2) ++ set +e ++ kubectl get pods --selector=name=psmdb-client -o 'jsonpath={.items[].metadata.name}' ++ exit_status=0 ++ set -e ++ '[' 0 '!=' 0 -a -n 1 ']' ++ break ++ cat /tmp/tmp.mLXTauuouv ++ cat /tmp/tmp.mkb1D4AMvu ++ rm /tmp/tmp.mLXTauuouv /tmp/tmp.mkb1D4AMvu ++ return 0 + local client_container=psmdb-client-bb8b97679-jlqq8 + kubectl_bin exec psmdb-client-bb8b97679-jlqq8 -- bash -c 'printf '\''use myApp\n db.test.find()\n'\'' | mongo mongodb://myApp:myPass@some-name-rs0-1.some-name-rs0.pitr-32420.svc.cluster.local/admin?ssl=false\&replicaSet=rs0 ' ++ mktemp + local LAST_OUT=/tmp/tmp.frA6g2ki9k ++ mktemp + local LAST_ERR=/tmp/tmp.2YWy3BuBsj + local exit_status=0 + local timeout=4 ++ seq 0 2 + for i in $(seq 0 2) + set +e + kubectl exec psmdb-client-bb8b97679-jlqq8 -- bash -c 'printf '\''use myApp\n db.test.find()\n'\'' | mongo mongodb://myApp:myPass@some-name-rs0-1.some-name-rs0.pitr-32420.svc.cluster.local/admin?ssl=false\&replicaSet=rs0 ' + exit_status=0 + set -e + '[' 0 '!=' 0 -a -n 1 ']' + break + cat /tmp/tmp.frA6g2ki9k + cat /tmp/tmp.2YWy3BuBsj + rm /tmp/tmp.frA6g2ki9k /tmp/tmp.2YWy3BuBsj + return 0 + [[ 0 -eq 0 ]] + diff -u /mnt/jenkins/workspace/cloud-psmdb-operator_PR-2282/e2e-tests/pitr/compare/find.json /tmp/tmp.oRDgtyLOf2/find + compare_mongo_cmd find myApp:myPass@some-name-rs0-2.some-name-rs0.pitr-32420 '' + local command=find + local uri=myApp:myPass@some-name-rs0-2.some-name-rs0.pitr-32420 + local postfix= + local suffix= + local database=myApp + local collection=test + local sort= + local tls=false + local replicaset= + local 'full_command=db.test.find()' + [[ -n '' ]] + log 'running db.test.find() in myApp' + set +o xtrace [2026-03-10T22:14:57+0000] running db.test.find() in myApp + [[ false == \t\r\u\e ]] + mongo_command=run_mongo + run_mongo 'use myApp\n db.test.find()' myApp:myPass@some-name-rs0-2.some-name-rs0.pitr-32420 mongodb '' '' + local 'command=use myApp\n db.test.find()' + local uri=myApp:myPass@some-name-rs0-2.some-name-rs0.pitr-32420 + local driver=mongodb + local suffix=.svc.cluster.local + local mongo_flag= + local replica_set=rs0 + [[ myApp:myPass@some-name-rs0-2.some-name-rs0.pitr-32420 == *cfg* ]] + grep -E -v 'I NETWORK|W NETWORK|F NETWORK|"c":"NETWORK"|Error saving history file|Percona Server for MongoDB|connecting to:|Unable to reach primary for set|Implicit session:|versions do not match|Error saving history file:|Started a new thread for the timer service' + /usr/sbin/sed -re 's/ObjectId\("[0-9a-f]+"\)//; s/-[0-9]+.svc/-xxx.svc/' ++ kubectl_bin get pods --selector=name=psmdb-client -o 'jsonpath={.items[].metadata.name}' +++ mktemp ++ local LAST_OUT=/tmp/tmp.6iAh6sqMK9 +++ mktemp ++ local LAST_ERR=/tmp/tmp.r8IVqOeXSf ++ local exit_status=0 ++ local timeout=4 +++ seq 0 2 ++ for i in $(seq 0 2) ++ set +e ++ kubectl get pods --selector=name=psmdb-client -o 'jsonpath={.items[].metadata.name}' ++ exit_status=0 ++ set -e ++ '[' 0 '!=' 0 -a -n 1 ']' ++ break ++ cat /tmp/tmp.6iAh6sqMK9 ++ cat /tmp/tmp.r8IVqOeXSf ++ rm /tmp/tmp.6iAh6sqMK9 /tmp/tmp.r8IVqOeXSf ++ return 0 + local client_container=psmdb-client-bb8b97679-jlqq8 + kubectl_bin exec psmdb-client-bb8b97679-jlqq8 -- bash -c 'printf '\''use myApp\n db.test.find()\n'\'' | mongo mongodb://myApp:myPass@some-name-rs0-2.some-name-rs0.pitr-32420.svc.cluster.local/admin?ssl=false\&replicaSet=rs0 ' ++ mktemp + local LAST_OUT=/tmp/tmp.Ek6sWj7wEe ++ mktemp + local LAST_ERR=/tmp/tmp.ahhrTTK0UO + local exit_status=0 + local timeout=4 ++ seq 0 2 + for i in $(seq 0 2) + set +e + kubectl exec psmdb-client-bb8b97679-jlqq8 -- bash -c 'printf '\''use myApp\n db.test.find()\n'\'' | mongo mongodb://myApp:myPass@some-name-rs0-2.some-name-rs0.pitr-32420.svc.cluster.local/admin?ssl=false\&replicaSet=rs0 ' + exit_status=0 + set -e + '[' 0 '!=' 0 -a -n 1 ']' + break + cat /tmp/tmp.Ek6sWj7wEe + cat /tmp/tmp.ahhrTTK0UO + rm /tmp/tmp.Ek6sWj7wEe /tmp/tmp.ahhrTTK0UO + return 0 + [[ 0 -eq 0 ]] + diff -u /mnt/jenkins/workspace/cloud-psmdb-operator_PR-2282/e2e-tests/pitr/compare/find.json /tmp/tmp.oRDgtyLOf2/find + wait_backup_agent some-name-rs0-0 + local agent_pod=some-name-rs0-0 + set +o xtrace waiting for pbm-agent to be ready in some-name-rs0-0...2026-03-10T22:14:06.000+0000 I listening for the commands + wait_backup_agent some-name-rs0-1 + local agent_pod=some-name-rs0-1 + set +o xtrace waiting for pbm-agent to be ready in some-name-rs0-1...2026-03-10T22:14:39.000+0000 I listening for the commands + wait_backup_agent some-name-rs0-2 + local agent_pod=some-name-rs0-2 + set +o xtrace waiting for pbm-agent to be ready in some-name-rs0-2...2026-03-10T22:14:42.000+0000 I listening for the commands + backup_name_minio=backup-minio + run_mongo 'use myApp\n db.test2.insert({ x: 100501 })' myApp:myPass@some-name-rs0.pitr-32420 + local 'command=use myApp\n db.test2.insert({ x: 100501 })' + local uri=myApp:myPass@some-name-rs0.pitr-32420 + local driver=mongodb+srv + local suffix=.svc.cluster.local + local mongo_flag= + local replica_set=rs0 + [[ myApp:myPass@some-name-rs0.pitr-32420 == *cfg* ]] ++ kubectl_bin get pods --selector=name=psmdb-client -o 'jsonpath={.items[].metadata.name}' +++ mktemp ++ local LAST_OUT=/tmp/tmp.nJVFxY7G0P +++ mktemp ++ local LAST_ERR=/tmp/tmp.u43lyVUekC ++ local exit_status=0 ++ local timeout=4 +++ seq 0 2 ++ for i in $(seq 0 2) ++ set +e ++ kubectl get pods --selector=name=psmdb-client -o 'jsonpath={.items[].metadata.name}' ++ exit_status=0 ++ set -e ++ '[' 0 '!=' 0 -a -n 1 ']' ++ break ++ cat /tmp/tmp.nJVFxY7G0P ++ cat /tmp/tmp.u43lyVUekC ++ rm /tmp/tmp.nJVFxY7G0P /tmp/tmp.u43lyVUekC ++ return 0 + local client_container=psmdb-client-bb8b97679-jlqq8 + kubectl_bin exec psmdb-client-bb8b97679-jlqq8 -- bash -c 'printf '\''use myApp\n db.test2.insert({ x: 100501 })\n'\'' | mongo mongodb+srv://myApp:myPass@some-name-rs0.pitr-32420.svc.cluster.local/admin?ssl=false\&replicaSet=rs0 ' ++ mktemp + local LAST_OUT=/tmp/tmp.jirKetBBtD ++ mktemp + local LAST_ERR=/tmp/tmp.SzbQzciN9I + local exit_status=0 + local timeout=4 ++ seq 0 2 + for i in $(seq 0 2) + set +e + kubectl exec psmdb-client-bb8b97679-jlqq8 -- bash -c 'printf '\''use myApp\n db.test2.insert({ x: 100501 })\n'\'' | mongo mongodb+srv://myApp:myPass@some-name-rs0.pitr-32420.svc.cluster.local/admin?ssl=false\&replicaSet=rs0 ' + exit_status=0 + set -e + '[' 0 '!=' 0 -a -n 1 ']' + break + cat /tmp/tmp.jirKetBBtD Percona Server for MongoDB shell version v4.4.29-28 connecting to: mongodb://some-name-rs0-0.some-name-rs0.pitr-32420.svc.cluster.local:27017,some-name-rs0-2.some-name-rs0.pitr-32420.svc.cluster.local:27017,some-name-rs0-1.some-name-rs0.pitr-32420.svc.cluster.local:27017/admin?compressors=disabled&gssapiServiceName=mongodb&replicaSet=rs0&ssl=false Implicit session: session { "id" : UUID("d3a578b2-f74e-4a12-bfc7-4b73c1c53d1c") } Percona Server for MongoDB server version: v8.0.19-7 WARNING: shell and server versions do not match switched to db myApp WriteResult({ "nInserted" : 1 }) bye + cat /tmp/tmp.SzbQzciN9I + rm /tmp/tmp.jirKetBBtD /tmp/tmp.SzbQzciN9I + return 0 + run_backup backup-minio 0 + local name=backup-minio + local idx=0 + desc 'run backup backup-minio-0' + set +o xtrace ----------------------------------------------------------------------------------- run backup backup-minio-0 ----------------------------------------------------------------------------------- + cat /mnt/jenkins/workspace/cloud-psmdb-operator_PR-2282/e2e-tests/pitr/conf/backup-minio.yml + /usr/sbin/sed -e 's/name:/name: backup-minio-0/' + kubectl_bin apply -f - ++ mktemp + local LAST_OUT=/tmp/tmp.sjtiZjzLLl ++ mktemp + local LAST_ERR=/tmp/tmp.bzVPENsWYD + local exit_status=0 + local timeout=4 ++ seq 0 2 + for i in $(seq 0 2) + set +e + kubectl apply -f - + exit_status=0 + set -e + '[' 0 '!=' 0 -a -n 1 ']' + break + cat /tmp/tmp.sjtiZjzLLl perconaservermongodbbackup.psmdb.percona.com/backup-minio-0 created + cat /tmp/tmp.bzVPENsWYD + rm /tmp/tmp.sjtiZjzLLl /tmp/tmp.bzVPENsWYD + return 0 + wait_backup backup-minio-0 + local backup_name=backup-minio-0 + local target_state=ready + set +o xtrace waiting for backup-minio-0 to reach ready state.......OK + write_document -2nd + local cmp_postfix=-2nd + local cluster_name=some-name-rs0 + desc 'write initial data, read from all' + set +o xtrace ----------------------------------------------------------------------------------- write initial data, read from all ----------------------------------------------------------------------------------- + run_mongo 'use myApp\n db.test.insert({ x: 100500 })' myApp:myPass@some-name-rs0.pitr-32420 + local 'command=use myApp\n db.test.insert({ x: 100500 })' + local uri=myApp:myPass@some-name-rs0.pitr-32420 + local driver=mongodb+srv + local suffix=.svc.cluster.local + local mongo_flag= + local replica_set=rs0 + [[ myApp:myPass@some-name-rs0.pitr-32420 == *cfg* ]] ++ kubectl_bin get pods --selector=name=psmdb-client -o 'jsonpath={.items[].metadata.name}' +++ mktemp ++ local LAST_OUT=/tmp/tmp.UN8vs7mnCG +++ mktemp ++ local LAST_ERR=/tmp/tmp.oX4yfVJizJ ++ local exit_status=0 ++ local timeout=4 +++ seq 0 2 ++ for i in $(seq 0 2) ++ set +e ++ kubectl get pods --selector=name=psmdb-client -o 'jsonpath={.items[].metadata.name}' ++ exit_status=0 ++ set -e ++ '[' 0 '!=' 0 -a -n 1 ']' ++ break ++ cat /tmp/tmp.UN8vs7mnCG ++ cat /tmp/tmp.oX4yfVJizJ ++ rm /tmp/tmp.UN8vs7mnCG /tmp/tmp.oX4yfVJizJ ++ return 0 + local client_container=psmdb-client-bb8b97679-jlqq8 + kubectl_bin exec psmdb-client-bb8b97679-jlqq8 -- bash -c 'printf '\''use myApp\n db.test.insert({ x: 100500 })\n'\'' | mongo mongodb+srv://myApp:myPass@some-name-rs0.pitr-32420.svc.cluster.local/admin?ssl=false\&replicaSet=rs0 ' ++ mktemp + local LAST_OUT=/tmp/tmp.SsYzrGzDik ++ mktemp + local LAST_ERR=/tmp/tmp.BsfDtBk2OJ + local exit_status=0 + local timeout=4 ++ seq 0 2 + for i in $(seq 0 2) + set +e + kubectl exec psmdb-client-bb8b97679-jlqq8 -- bash -c 'printf '\''use myApp\n db.test.insert({ x: 100500 })\n'\'' | mongo mongodb+srv://myApp:myPass@some-name-rs0.pitr-32420.svc.cluster.local/admin?ssl=false\&replicaSet=rs0 ' + exit_status=0 + set -e + '[' 0 '!=' 0 -a -n 1 ']' + break + cat /tmp/tmp.SsYzrGzDik Percona Server for MongoDB shell version v4.4.29-28 connecting to: mongodb://some-name-rs0-1.some-name-rs0.pitr-32420.svc.cluster.local:27017,some-name-rs0-0.some-name-rs0.pitr-32420.svc.cluster.local:27017,some-name-rs0-2.some-name-rs0.pitr-32420.svc.cluster.local:27017/admin?compressors=disabled&gssapiServiceName=mongodb&replicaSet=rs0&ssl=false Implicit session: session { "id" : UUID("e9c13006-e9ff-434d-bd9b-26167a0a82a6") } Percona Server for MongoDB server version: v8.0.19-7 WARNING: shell and server versions do not match switched to db myApp WriteResult({ "nInserted" : 1 }) bye + cat /tmp/tmp.BsfDtBk2OJ + rm /tmp/tmp.SsYzrGzDik /tmp/tmp.BsfDtBk2OJ + return 0 + minikube_sleep + sleep_time=10 + [[ '' == 1 ]] + compare_mongo_cmd find myApp:myPass@some-name-rs0-0.some-name-rs0.pitr-32420 -2nd + local command=find + local uri=myApp:myPass@some-name-rs0-0.some-name-rs0.pitr-32420 + local postfix=-2nd + local suffix= + local database=myApp + local collection=test + local sort= + local tls=false + local replicaset= + local 'full_command=db.test.find()' + [[ -n '' ]] + log 'running db.test.find() in myApp' + set +o xtrace [2026-03-10T22:15:23+0000] running db.test.find() in myApp + [[ false == \t\r\u\e ]] + mongo_command=run_mongo + run_mongo 'use myApp\n db.test.find()' myApp:myPass@some-name-rs0-0.some-name-rs0.pitr-32420 mongodb '' '' + local 'command=use myApp\n db.test.find()' + local uri=myApp:myPass@some-name-rs0-0.some-name-rs0.pitr-32420 + local driver=mongodb + local suffix=.svc.cluster.local + local mongo_flag= + local replica_set=rs0 + [[ myApp:myPass@some-name-rs0-0.some-name-rs0.pitr-32420 == *cfg* ]] + grep -E -v 'I NETWORK|W NETWORK|F NETWORK|"c":"NETWORK"|Error saving history file|Percona Server for MongoDB|connecting to:|Unable to reach primary for set|Implicit session:|versions do not match|Error saving history file:|Started a new thread for the timer service' + /usr/sbin/sed -re 's/ObjectId\("[0-9a-f]+"\)//; s/-[0-9]+.svc/-xxx.svc/' ++ kubectl_bin get pods --selector=name=psmdb-client -o 'jsonpath={.items[].metadata.name}' +++ mktemp ++ local LAST_OUT=/tmp/tmp.kDD5jH3MzC +++ mktemp ++ local LAST_ERR=/tmp/tmp.CoZu8sIqu9 ++ local exit_status=0 ++ local timeout=4 +++ seq 0 2 ++ for i in $(seq 0 2) ++ set +e ++ kubectl get pods --selector=name=psmdb-client -o 'jsonpath={.items[].metadata.name}' ++ exit_status=0 ++ set -e ++ '[' 0 '!=' 0 -a -n 1 ']' ++ break ++ cat /tmp/tmp.kDD5jH3MzC ++ cat /tmp/tmp.CoZu8sIqu9 ++ rm /tmp/tmp.kDD5jH3MzC /tmp/tmp.CoZu8sIqu9 ++ return 0 + local client_container=psmdb-client-bb8b97679-jlqq8 + kubectl_bin exec psmdb-client-bb8b97679-jlqq8 -- bash -c 'printf '\''use myApp\n db.test.find()\n'\'' | mongo mongodb://myApp:myPass@some-name-rs0-0.some-name-rs0.pitr-32420.svc.cluster.local/admin?ssl=false\&replicaSet=rs0 ' ++ mktemp + local LAST_OUT=/tmp/tmp.jauuEnuJum ++ mktemp + local LAST_ERR=/tmp/tmp.zq5KTIHPWI + local exit_status=0 + local timeout=4 ++ seq 0 2 + for i in $(seq 0 2) + set +e + kubectl exec psmdb-client-bb8b97679-jlqq8 -- bash -c 'printf '\''use myApp\n db.test.find()\n'\'' | mongo mongodb://myApp:myPass@some-name-rs0-0.some-name-rs0.pitr-32420.svc.cluster.local/admin?ssl=false\&replicaSet=rs0 ' + exit_status=0 + set -e + '[' 0 '!=' 0 -a -n 1 ']' + break + cat /tmp/tmp.jauuEnuJum + cat /tmp/tmp.zq5KTIHPWI + rm /tmp/tmp.jauuEnuJum /tmp/tmp.zq5KTIHPWI + return 0 + [[ 0 -eq 0 ]] + diff -u /mnt/jenkins/workspace/cloud-psmdb-operator_PR-2282/e2e-tests/pitr/compare/find-2nd.json /tmp/tmp.oRDgtyLOf2/find-2nd + compare_mongo_cmd find myApp:myPass@some-name-rs0-1.some-name-rs0.pitr-32420 -2nd + local command=find + local uri=myApp:myPass@some-name-rs0-1.some-name-rs0.pitr-32420 + local postfix=-2nd + local suffix= + local database=myApp + local collection=test + local sort= + local tls=false + local replicaset= + local 'full_command=db.test.find()' + [[ -n '' ]] + log 'running db.test.find() in myApp' + set +o xtrace [2026-03-10T22:15:26+0000] running db.test.find() in myApp + [[ false == \t\r\u\e ]] + mongo_command=run_mongo + run_mongo 'use myApp\n db.test.find()' myApp:myPass@some-name-rs0-1.some-name-rs0.pitr-32420 mongodb '' '' + local 'command=use myApp\n db.test.find()' + local uri=myApp:myPass@some-name-rs0-1.some-name-rs0.pitr-32420 + local driver=mongodb + local suffix=.svc.cluster.local + local mongo_flag= + local replica_set=rs0 + [[ myApp:myPass@some-name-rs0-1.some-name-rs0.pitr-32420 == *cfg* ]] + grep -E -v 'I NETWORK|W NETWORK|F NETWORK|"c":"NETWORK"|Error saving history file|Percona Server for MongoDB|connecting to:|Unable to reach primary for set|Implicit session:|versions do not match|Error saving history file:|Started a new thread for the timer service' + /usr/sbin/sed -re 's/ObjectId\("[0-9a-f]+"\)//; s/-[0-9]+.svc/-xxx.svc/' ++ kubectl_bin get pods --selector=name=psmdb-client -o 'jsonpath={.items[].metadata.name}' +++ mktemp ++ local LAST_OUT=/tmp/tmp.p8tDkeIEWR +++ mktemp ++ local LAST_ERR=/tmp/tmp.iD5MXSl6oE ++ local exit_status=0 ++ local timeout=4 +++ seq 0 2 ++ for i in $(seq 0 2) ++ set +e ++ kubectl get pods --selector=name=psmdb-client -o 'jsonpath={.items[].metadata.name}' ++ exit_status=0 ++ set -e ++ '[' 0 '!=' 0 -a -n 1 ']' ++ break ++ cat /tmp/tmp.p8tDkeIEWR ++ cat /tmp/tmp.iD5MXSl6oE ++ rm /tmp/tmp.p8tDkeIEWR /tmp/tmp.iD5MXSl6oE ++ return 0 + local client_container=psmdb-client-bb8b97679-jlqq8 + kubectl_bin exec psmdb-client-bb8b97679-jlqq8 -- bash -c 'printf '\''use myApp\n db.test.find()\n'\'' | mongo mongodb://myApp:myPass@some-name-rs0-1.some-name-rs0.pitr-32420.svc.cluster.local/admin?ssl=false\&replicaSet=rs0 ' ++ mktemp + local LAST_OUT=/tmp/tmp.umLFmhaZ23 ++ mktemp + local LAST_ERR=/tmp/tmp.LB66TDApwl + local exit_status=0 + local timeout=4 ++ seq 0 2 + for i in $(seq 0 2) + set +e + kubectl exec psmdb-client-bb8b97679-jlqq8 -- bash -c 'printf '\''use myApp\n db.test.find()\n'\'' | mongo mongodb://myApp:myPass@some-name-rs0-1.some-name-rs0.pitr-32420.svc.cluster.local/admin?ssl=false\&replicaSet=rs0 ' + exit_status=0 + set -e + '[' 0 '!=' 0 -a -n 1 ']' + break + cat /tmp/tmp.umLFmhaZ23 + cat /tmp/tmp.LB66TDApwl + rm /tmp/tmp.umLFmhaZ23 /tmp/tmp.LB66TDApwl + return 0 + [[ 0 -eq 0 ]] + diff -u /mnt/jenkins/workspace/cloud-psmdb-operator_PR-2282/e2e-tests/pitr/compare/find-2nd.json /tmp/tmp.oRDgtyLOf2/find-2nd + compare_mongo_cmd find myApp:myPass@some-name-rs0-2.some-name-rs0.pitr-32420 -2nd + local command=find + local uri=myApp:myPass@some-name-rs0-2.some-name-rs0.pitr-32420 + local postfix=-2nd + local suffix= + local database=myApp + local collection=test + local sort= + local tls=false + local replicaset= + local 'full_command=db.test.find()' + [[ -n '' ]] + log 'running db.test.find() in myApp' + set +o xtrace [2026-03-10T22:15:28+0000] running db.test.find() in myApp + [[ false == \t\r\u\e ]] + mongo_command=run_mongo + run_mongo 'use myApp\n db.test.find()' myApp:myPass@some-name-rs0-2.some-name-rs0.pitr-32420 mongodb '' '' + local 'command=use myApp\n db.test.find()' + local uri=myApp:myPass@some-name-rs0-2.some-name-rs0.pitr-32420 + local driver=mongodb + local suffix=.svc.cluster.local + local mongo_flag= + local replica_set=rs0 + [[ myApp:myPass@some-name-rs0-2.some-name-rs0.pitr-32420 == *cfg* ]] + grep -E -v 'I NETWORK|W NETWORK|F NETWORK|"c":"NETWORK"|Error saving history file|Percona Server for MongoDB|connecting to:|Unable to reach primary for set|Implicit session:|versions do not match|Error saving history file:|Started a new thread for the timer service' + /usr/sbin/sed -re 's/ObjectId\("[0-9a-f]+"\)//; s/-[0-9]+.svc/-xxx.svc/' ++ kubectl_bin get pods --selector=name=psmdb-client -o 'jsonpath={.items[].metadata.name}' +++ mktemp ++ local LAST_OUT=/tmp/tmp.9UMSqJh5xJ +++ mktemp ++ local LAST_ERR=/tmp/tmp.iFH8Mvh4kz ++ local exit_status=0 ++ local timeout=4 +++ seq 0 2 ++ for i in $(seq 0 2) ++ set +e ++ kubectl get pods --selector=name=psmdb-client -o 'jsonpath={.items[].metadata.name}' ++ exit_status=0 ++ set -e ++ '[' 0 '!=' 0 -a -n 1 ']' ++ break ++ cat /tmp/tmp.9UMSqJh5xJ ++ cat /tmp/tmp.iFH8Mvh4kz ++ rm /tmp/tmp.9UMSqJh5xJ /tmp/tmp.iFH8Mvh4kz ++ return 0 + local client_container=psmdb-client-bb8b97679-jlqq8 + kubectl_bin exec psmdb-client-bb8b97679-jlqq8 -- bash -c 'printf '\''use myApp\n db.test.find()\n'\'' | mongo mongodb://myApp:myPass@some-name-rs0-2.some-name-rs0.pitr-32420.svc.cluster.local/admin?ssl=false\&replicaSet=rs0 ' ++ mktemp + local LAST_OUT=/tmp/tmp.aPwzqERA4L ++ mktemp + local LAST_ERR=/tmp/tmp.xCSJvvwS0b + local exit_status=0 + local timeout=4 ++ seq 0 2 + for i in $(seq 0 2) + set +e + kubectl exec psmdb-client-bb8b97679-jlqq8 -- bash -c 'printf '\''use myApp\n db.test.find()\n'\'' | mongo mongodb://myApp:myPass@some-name-rs0-2.some-name-rs0.pitr-32420.svc.cluster.local/admin?ssl=false\&replicaSet=rs0 ' + exit_status=0 + set -e + '[' 0 '!=' 0 -a -n 1 ']' + break + cat /tmp/tmp.aPwzqERA4L + cat /tmp/tmp.xCSJvvwS0b + rm /tmp/tmp.aPwzqERA4L /tmp/tmp.xCSJvvwS0b + return 0 + [[ 0 -eq 0 ]] + diff -u /mnt/jenkins/workspace/cloud-psmdb-operator_PR-2282/e2e-tests/pitr/compare/find-2nd.json /tmp/tmp.oRDgtyLOf2/find-2nd + sleep 2 ++ run_mongo 'new Date().toISOString()' myApp:myPass@some-name-rs0.pitr-32420 mongodb '' --quiet ++ local 'command=new Date().toISOString()' ++ grep -E -v 'I NETWORK|W NETWORK|Error saving history file|Percona Server for MongoDB|connecting to:|Unable to reach primary for set|Implicit session:|versions do not match' ++ local uri=myApp:myPass@some-name-rs0.pitr-32420 ++ local driver=mongodb ++ local suffix=.svc.cluster.local ++ local mongo_flag=--quiet ++ local replica_set=rs0 ++ [[ myApp:myPass@some-name-rs0.pitr-32420 == *cfg* ]] ++ cut -c1-19 +++ kubectl_bin get pods --selector=name=psmdb-client -o 'jsonpath={.items[].metadata.name}' ++++ mktemp ++ tr T ' ' +++ local LAST_OUT=/tmp/tmp.EVeazwsUED ++++ mktemp +++ local LAST_ERR=/tmp/tmp.wwACUJioBR +++ local exit_status=0 +++ local timeout=4 ++++ seq 0 2 +++ for i in $(seq 0 2) +++ set +e +++ kubectl get pods --selector=name=psmdb-client -o 'jsonpath={.items[].metadata.name}' +++ exit_status=0 +++ set -e +++ '[' 0 '!=' 0 -a -n 1 ']' +++ break +++ cat /tmp/tmp.EVeazwsUED +++ cat /tmp/tmp.wwACUJioBR +++ rm /tmp/tmp.EVeazwsUED /tmp/tmp.wwACUJioBR +++ return 0 ++ local client_container=psmdb-client-bb8b97679-jlqq8 ++ kubectl_bin exec psmdb-client-bb8b97679-jlqq8 -- bash -c 'printf '\''new Date().toISOString()\n'\'' | mongo mongodb://myApp:myPass@some-name-rs0.pitr-32420.svc.cluster.local/admin?ssl=false\&replicaSet=rs0 --quiet' +++ mktemp ++ local LAST_OUT=/tmp/tmp.ZKzp90GJcc +++ mktemp ++ local LAST_ERR=/tmp/tmp.DOuhewQug7 ++ local exit_status=0 ++ local timeout=4 +++ seq 0 2 ++ for i in $(seq 0 2) ++ set +e ++ kubectl exec psmdb-client-bb8b97679-jlqq8 -- bash -c 'printf '\''new Date().toISOString()\n'\'' | mongo mongodb://myApp:myPass@some-name-rs0.pitr-32420.svc.cluster.local/admin?ssl=false\&replicaSet=rs0 --quiet' ++ exit_status=0 ++ set -e ++ '[' 0 '!=' 0 -a -n 1 ']' ++ break ++ cat /tmp/tmp.ZKzp90GJcc ++ cat /tmp/tmp.DOuhewQug7 ++ rm /tmp/tmp.ZKzp90GJcc /tmp/tmp.DOuhewQug7 ++ return 0 + time_now='2026-03-10 22:15:36' + check_recovery backup-minio-0 date '2026-03-10 22:15:36' -2nd '' some-name test2 + local backup_name=backup-minio-0 + local restore_type=date + local 'restore_date=2026-03-10 22:15:36' + local cmp_postfix=-2nd + local backupSource= + local cluster_name=some-name + local selective_collection=test2 + local restore_name=restore-backup-minio-0 + local restore_file=restore.yml + local cluster + cluster=some-name-rs0 + desc 'write more data before restore by date' + set +o xtrace ----------------------------------------------------------------------------------- write more data before restore by date ----------------------------------------------------------------------------------- + sleep 60 + run_mongo 'use myApp\n db.test.insert({ x: 100501 })' myApp:myPass@some-name-rs0.pitr-32420 + local 'command=use myApp\n db.test.insert({ x: 100501 })' + local uri=myApp:myPass@some-name-rs0.pitr-32420 + local driver=mongodb+srv + local suffix=.svc.cluster.local + local mongo_flag= + local replica_set=rs0 + [[ myApp:myPass@some-name-rs0.pitr-32420 == *cfg* ]] ++ kubectl_bin get pods --selector=name=psmdb-client -o 'jsonpath={.items[].metadata.name}' +++ mktemp ++ local LAST_OUT=/tmp/tmp.STzuB6BbSi +++ mktemp ++ local LAST_ERR=/tmp/tmp.OJ91IVh9Sx ++ local exit_status=0 ++ local timeout=4 +++ seq 0 2 ++ for i in $(seq 0 2) ++ set +e ++ kubectl get pods --selector=name=psmdb-client -o 'jsonpath={.items[].metadata.name}' ++ exit_status=0 ++ set -e ++ '[' 0 '!=' 0 -a -n 1 ']' ++ break ++ cat /tmp/tmp.STzuB6BbSi ++ cat /tmp/tmp.OJ91IVh9Sx ++ rm /tmp/tmp.STzuB6BbSi /tmp/tmp.OJ91IVh9Sx ++ return 0 + local client_container=psmdb-client-bb8b97679-jlqq8 + kubectl_bin exec psmdb-client-bb8b97679-jlqq8 -- bash -c 'printf '\''use myApp\n db.test.insert({ x: 100501 })\n'\'' | mongo mongodb+srv://myApp:myPass@some-name-rs0.pitr-32420.svc.cluster.local/admin?ssl=false\&replicaSet=rs0 ' ++ mktemp + local LAST_OUT=/tmp/tmp.LS8sHdD6Es ++ mktemp + local LAST_ERR=/tmp/tmp.7lXVZoA03U + local exit_status=0 + local timeout=4 ++ seq 0 2 + for i in $(seq 0 2) + set +e + kubectl exec psmdb-client-bb8b97679-jlqq8 -- bash -c 'printf '\''use myApp\n db.test.insert({ x: 100501 })\n'\'' | mongo mongodb+srv://myApp:myPass@some-name-rs0.pitr-32420.svc.cluster.local/admin?ssl=false\&replicaSet=rs0 ' + exit_status=0 + set -e + '[' 0 '!=' 0 -a -n 1 ']' + break + cat /tmp/tmp.LS8sHdD6Es Percona Server for MongoDB shell version v4.4.29-28 connecting to: mongodb://some-name-rs0-0.some-name-rs0.pitr-32420.svc.cluster.local:27017,some-name-rs0-2.some-name-rs0.pitr-32420.svc.cluster.local:27017,some-name-rs0-1.some-name-rs0.pitr-32420.svc.cluster.local:27017/admin?compressors=disabled&gssapiServiceName=mongodb&replicaSet=rs0&ssl=false Implicit session: session { "id" : UUID("fb0c83ad-1d68-43bf-aebb-b823ca61d56c") } Percona Server for MongoDB server version: v8.0.19-7 WARNING: shell and server versions do not match switched to db myApp WriteResult({ "nInserted" : 1 }) bye + cat /tmp/tmp.7lXVZoA03U + rm /tmp/tmp.LS8sHdD6Es /tmp/tmp.7lXVZoA03U + return 0 + '[' -n test2 ']' + run_mongo 'use myApp\n db.test2.drop()' myApp:myPass@some-name-rs0.pitr-32420 + local 'command=use myApp\n db.test2.drop()' + local uri=myApp:myPass@some-name-rs0.pitr-32420 + local driver=mongodb+srv + local suffix=.svc.cluster.local + local mongo_flag= + local replica_set=rs0 + [[ myApp:myPass@some-name-rs0.pitr-32420 == *cfg* ]] ++ kubectl_bin get pods --selector=name=psmdb-client -o 'jsonpath={.items[].metadata.name}' +++ mktemp ++ local LAST_OUT=/tmp/tmp.W8DlNFuPne +++ mktemp ++ local LAST_ERR=/tmp/tmp.0qREjdSHRg ++ local exit_status=0 ++ local timeout=4 +++ seq 0 2 ++ for i in $(seq 0 2) ++ set +e ++ kubectl get pods --selector=name=psmdb-client -o 'jsonpath={.items[].metadata.name}' ++ exit_status=0 ++ set -e ++ '[' 0 '!=' 0 -a -n 1 ']' ++ break ++ cat /tmp/tmp.W8DlNFuPne ++ cat /tmp/tmp.0qREjdSHRg ++ rm /tmp/tmp.W8DlNFuPne /tmp/tmp.0qREjdSHRg ++ return 0 + local client_container=psmdb-client-bb8b97679-jlqq8 + kubectl_bin exec psmdb-client-bb8b97679-jlqq8 -- bash -c 'printf '\''use myApp\n db.test2.drop()\n'\'' | mongo mongodb+srv://myApp:myPass@some-name-rs0.pitr-32420.svc.cluster.local/admin?ssl=false\&replicaSet=rs0 ' ++ mktemp + local LAST_OUT=/tmp/tmp.e43uZbh5Yv ++ mktemp + local LAST_ERR=/tmp/tmp.yvlCyLIWzx + local exit_status=0 + local timeout=4 ++ seq 0 2 + for i in $(seq 0 2) + set +e + kubectl exec psmdb-client-bb8b97679-jlqq8 -- bash -c 'printf '\''use myApp\n db.test2.drop()\n'\'' | mongo mongodb+srv://myApp:myPass@some-name-rs0.pitr-32420.svc.cluster.local/admin?ssl=false\&replicaSet=rs0 ' + exit_status=0 + set -e + '[' 0 '!=' 0 -a -n 1 ']' + break + cat /tmp/tmp.e43uZbh5Yv Percona Server for MongoDB shell version v4.4.29-28 connecting to: mongodb://some-name-rs0-2.some-name-rs0.pitr-32420.svc.cluster.local:27017,some-name-rs0-1.some-name-rs0.pitr-32420.svc.cluster.local:27017,some-name-rs0-0.some-name-rs0.pitr-32420.svc.cluster.local:27017/admin?compressors=disabled&gssapiServiceName=mongodb&replicaSet=rs0&ssl=false Implicit session: session { "id" : UUID("16a78586-61aa-4b05-8ff9-a8924d6264d9") } Percona Server for MongoDB server version: v8.0.19-7 WARNING: shell and server versions do not match switched to db myApp true bye + cat /tmp/tmp.yvlCyLIWzx + rm /tmp/tmp.e43uZbh5Yv /tmp/tmp.yvlCyLIWzx + return 0 + desc 'waiting for chunks to be uploaded' + set +o xtrace ----------------------------------------------------------------------------------- waiting for chunks to be uploaded ----------------------------------------------------------------------------------- + sleep 120 + desc 'check restore by date' + set +o xtrace ----------------------------------------------------------------------------------- check restore by date ----------------------------------------------------------------------------------- + '[' -z '' ']' + cat /mnt/jenkins/workspace/cloud-psmdb-operator_PR-2282/e2e-tests/pitr/conf/restore.yml + /usr/sbin/sed -e 's/name:/name: restore-backup-minio-0/' + /usr/sbin/sed -e 's/backupName:/backupName: backup-minio-0/' + /usr/sbin/sed -e 's/clusterName:/clusterName: some-name/' + '[' -z '2026-03-10 22:15:36' ']' + /usr/sbin/sed -e 's/date:/date: 2026-03-10 22:15:36/' + /usr/sbin/sed -e 's/pitrType:/type: date/' + /usr/sbin/sed -e /backupSource/,+2d + '[' -n test2 ']' + yq eval '.spec.selective = {"namespaces": ["myApp.test"], "withUsersAndRoles": true}' + kubectl_bin apply -f - ++ mktemp + local LAST_OUT=/tmp/tmp.cyRQkXTjw8 ++ mktemp + local LAST_ERR=/tmp/tmp.WA4NqGa73C + local exit_status=0 + local timeout=4 ++ seq 0 2 + for i in $(seq 0 2) + set +e + kubectl apply -f - + exit_status=0 + set -e + '[' 0 '!=' 0 -a -n 1 ']' + break + cat /tmp/tmp.cyRQkXTjw8 perconaservermongodbrestore.psmdb.percona.com/restore-backup-minio-0 created + cat /tmp/tmp.WA4NqGa73C + rm /tmp/tmp.cyRQkXTjw8 /tmp/tmp.WA4NqGa73C + return 0 + wait_restore backup-minio-0 some-name + local backup_name=backup-minio-0 + local cluster_name=some-name + local target_state=ready + local wait_cluster_consistency=1 + local wait_time=1780 + local ok_if_ready=0 + set +o xtrace Waiting for the psmdb-restore/restore-backup-minio-0 object to be created.OK Waiting psmdb-restore/restore-backup-minio-0 to reach state "ready" .OK after 0 minutes + [[ 1 -eq 1 ]] + wait_cluster_consistency some-name + local cluster_name=some-name + local wait_time=32 + retry=0 + sleep 7 + echo -n 'waiting for cluster readyness' waiting for cluster readyness++ kubectl_bin get psmdb some-name -o 'jsonpath={.status.state}' +++ mktemp ++ local LAST_OUT=/tmp/tmp.3rYmhw4wDM +++ mktemp ++ local LAST_ERR=/tmp/tmp.aAIa2f08OV ++ local exit_status=0 ++ local timeout=4 +++ seq 0 2 ++ for i in $(seq 0 2) ++ set +e ++ kubectl get psmdb some-name -o 'jsonpath={.status.state}' ++ exit_status=0 ++ set -e ++ '[' 0 '!=' 0 -a -n 1 ']' ++ break ++ cat /tmp/tmp.3rYmhw4wDM ++ cat /tmp/tmp.aAIa2f08OV ++ rm /tmp/tmp.3rYmhw4wDM /tmp/tmp.aAIa2f08OV ++ return 0 + [[ ready == \r\e\a\d\y ]] + echo .OK .OK + echo + set -o xtrace + '[' -n test2 ']' ++ collection_exists test2 ./e2e-tests/pitr/run: line 108: collection_exists: command not found + [[ '' == \t\r\u\e ]] + compare_mongo_cmd find myApp:myPass@some-name-rs0-0.some-name-rs0.pitr-32420 -2nd + local command=find + local uri=myApp:myPass@some-name-rs0-0.some-name-rs0.pitr-32420 + local postfix=-2nd + local suffix= + local database=myApp + local collection=test + local sort= + local tls=false + local replicaset= + local 'full_command=db.test.find()' + [[ -n '' ]] + log 'running db.test.find() in myApp' + set +o xtrace [2026-03-10T22:19:10+0000] running db.test.find() in myApp + [[ false == \t\r\u\e ]] + mongo_command=run_mongo + run_mongo 'use myApp\n db.test.find()' myApp:myPass@some-name-rs0-0.some-name-rs0.pitr-32420 mongodb '' '' + local 'command=use myApp\n db.test.find()' + local uri=myApp:myPass@some-name-rs0-0.some-name-rs0.pitr-32420 + local driver=mongodb + local suffix=.svc.cluster.local + local mongo_flag= + grep -E -v 'I NETWORK|W NETWORK|F NETWORK|"c":"NETWORK"|Error saving history file|Percona Server for MongoDB|connecting to:|Unable to reach primary for set|Implicit session:|versions do not match|Error saving history file:|Started a new thread for the timer service' + local replica_set=rs0 + [[ myApp:myPass@some-name-rs0-0.some-name-rs0.pitr-32420 == *cfg* ]] + /usr/sbin/sed -re 's/ObjectId\("[0-9a-f]+"\)//; s/-[0-9]+.svc/-xxx.svc/' ++ kubectl_bin get pods --selector=name=psmdb-client -o 'jsonpath={.items[].metadata.name}' +++ mktemp ++ local LAST_OUT=/tmp/tmp.sVmvfGpkXL +++ mktemp ++ local LAST_ERR=/tmp/tmp.ALm7dJfdOm ++ local exit_status=0 ++ local timeout=4 +++ seq 0 2 ++ for i in $(seq 0 2) ++ set +e ++ kubectl get pods --selector=name=psmdb-client -o 'jsonpath={.items[].metadata.name}' ++ exit_status=0 ++ set -e ++ '[' 0 '!=' 0 -a -n 1 ']' ++ break ++ cat /tmp/tmp.sVmvfGpkXL ++ cat /tmp/tmp.ALm7dJfdOm ++ rm /tmp/tmp.sVmvfGpkXL /tmp/tmp.ALm7dJfdOm ++ return 0 + local client_container=psmdb-client-bb8b97679-jlqq8 + kubectl_bin exec psmdb-client-bb8b97679-jlqq8 -- bash -c 'printf '\''use myApp\n db.test.find()\n'\'' | mongo mongodb://myApp:myPass@some-name-rs0-0.some-name-rs0.pitr-32420.svc.cluster.local/admin?ssl=false\&replicaSet=rs0 ' ++ mktemp + local LAST_OUT=/tmp/tmp.D2kXC288zY ++ mktemp + local LAST_ERR=/tmp/tmp.9Y49NaWWRf + local exit_status=0 + local timeout=4 ++ seq 0 2 + for i in $(seq 0 2) + set +e + kubectl exec psmdb-client-bb8b97679-jlqq8 -- bash -c 'printf '\''use myApp\n db.test.find()\n'\'' | mongo mongodb://myApp:myPass@some-name-rs0-0.some-name-rs0.pitr-32420.svc.cluster.local/admin?ssl=false\&replicaSet=rs0 ' + exit_status=0 + set -e + '[' 0 '!=' 0 -a -n 1 ']' + break + cat /tmp/tmp.D2kXC288zY + cat /tmp/tmp.9Y49NaWWRf + rm /tmp/tmp.D2kXC288zY /tmp/tmp.9Y49NaWWRf + return 0 + [[ 0 -eq 0 ]] + diff -u /mnt/jenkins/workspace/cloud-psmdb-operator_PR-2282/e2e-tests/pitr/compare/find-2nd.json /tmp/tmp.oRDgtyLOf2/find-2nd + compare_mongo_cmd find myApp:myPass@some-name-rs0-1.some-name-rs0.pitr-32420 -2nd + local command=find + local uri=myApp:myPass@some-name-rs0-1.some-name-rs0.pitr-32420 + local postfix=-2nd + local suffix= + local database=myApp + local collection=test + local sort= + local tls=false + local replicaset= + local 'full_command=db.test.find()' + [[ -n '' ]] + log 'running db.test.find() in myApp' + set +o xtrace [2026-03-10T22:19:12+0000] running db.test.find() in myApp + [[ false == \t\r\u\e ]] + mongo_command=run_mongo + run_mongo 'use myApp\n db.test.find()' myApp:myPass@some-name-rs0-1.some-name-rs0.pitr-32420 mongodb '' '' + local 'command=use myApp\n db.test.find()' + local uri=myApp:myPass@some-name-rs0-1.some-name-rs0.pitr-32420 + local driver=mongodb + local suffix=.svc.cluster.local + local mongo_flag= + local replica_set=rs0 + [[ myApp:myPass@some-name-rs0-1.some-name-rs0.pitr-32420 == *cfg* ]] + grep -E -v 'I NETWORK|W NETWORK|F NETWORK|"c":"NETWORK"|Error saving history file|Percona Server for MongoDB|connecting to:|Unable to reach primary for set|Implicit session:|versions do not match|Error saving history file:|Started a new thread for the timer service' + /usr/sbin/sed -re 's/ObjectId\("[0-9a-f]+"\)//; s/-[0-9]+.svc/-xxx.svc/' ++ kubectl_bin get pods --selector=name=psmdb-client -o 'jsonpath={.items[].metadata.name}' +++ mktemp ++ local LAST_OUT=/tmp/tmp.61l4nkj0ea +++ mktemp ++ local LAST_ERR=/tmp/tmp.E0Te5vMsXO ++ local exit_status=0 ++ local timeout=4 +++ seq 0 2 ++ for i in $(seq 0 2) ++ set +e ++ kubectl get pods --selector=name=psmdb-client -o 'jsonpath={.items[].metadata.name}' ++ exit_status=0 ++ set -e ++ '[' 0 '!=' 0 -a -n 1 ']' ++ break ++ cat /tmp/tmp.61l4nkj0ea ++ cat /tmp/tmp.E0Te5vMsXO ++ rm /tmp/tmp.61l4nkj0ea /tmp/tmp.E0Te5vMsXO ++ return 0 + local client_container=psmdb-client-bb8b97679-jlqq8 + kubectl_bin exec psmdb-client-bb8b97679-jlqq8 -- bash -c 'printf '\''use myApp\n db.test.find()\n'\'' | mongo mongodb://myApp:myPass@some-name-rs0-1.some-name-rs0.pitr-32420.svc.cluster.local/admin?ssl=false\&replicaSet=rs0 ' ++ mktemp + local LAST_OUT=/tmp/tmp.FazAb0cc10 ++ mktemp + local LAST_ERR=/tmp/tmp.TJ2S5mxpjR + local exit_status=0 + local timeout=4 ++ seq 0 2 + for i in $(seq 0 2) + set +e + kubectl exec psmdb-client-bb8b97679-jlqq8 -- bash -c 'printf '\''use myApp\n db.test.find()\n'\'' | mongo mongodb://myApp:myPass@some-name-rs0-1.some-name-rs0.pitr-32420.svc.cluster.local/admin?ssl=false\&replicaSet=rs0 ' + exit_status=0 + set -e + '[' 0 '!=' 0 -a -n 1 ']' + break + cat /tmp/tmp.FazAb0cc10 + cat /tmp/tmp.TJ2S5mxpjR + rm /tmp/tmp.FazAb0cc10 /tmp/tmp.TJ2S5mxpjR + return 0 + [[ 0 -eq 0 ]] + diff -u /mnt/jenkins/workspace/cloud-psmdb-operator_PR-2282/e2e-tests/pitr/compare/find-2nd.json /tmp/tmp.oRDgtyLOf2/find-2nd + compare_mongo_cmd find myApp:myPass@some-name-rs0-2.some-name-rs0.pitr-32420 -2nd + local command=find + local uri=myApp:myPass@some-name-rs0-2.some-name-rs0.pitr-32420 + local postfix=-2nd + local suffix= + local database=myApp + local collection=test + local sort= + local tls=false + local replicaset= + local 'full_command=db.test.find()' + [[ -n '' ]] + log 'running db.test.find() in myApp' + set +o xtrace [2026-03-10T22:19:15+0000] running db.test.find() in myApp + [[ false == \t\r\u\e ]] + mongo_command=run_mongo + run_mongo 'use myApp\n db.test.find()' myApp:myPass@some-name-rs0-2.some-name-rs0.pitr-32420 mongodb '' '' + local 'command=use myApp\n db.test.find()' + local uri=myApp:myPass@some-name-rs0-2.some-name-rs0.pitr-32420 + local driver=mongodb + local suffix=.svc.cluster.local + local mongo_flag= + local replica_set=rs0 + [[ myApp:myPass@some-name-rs0-2.some-name-rs0.pitr-32420 == *cfg* ]] + /usr/sbin/sed -re 's/ObjectId\("[0-9a-f]+"\)//; s/-[0-9]+.svc/-xxx.svc/' + grep -E -v 'I NETWORK|W NETWORK|F NETWORK|"c":"NETWORK"|Error saving history file|Percona Server for MongoDB|connecting to:|Unable to reach primary for set|Implicit session:|versions do not match|Error saving history file:|Started a new thread for the timer service' ++ kubectl_bin get pods --selector=name=psmdb-client -o 'jsonpath={.items[].metadata.name}' +++ mktemp ++ local LAST_OUT=/tmp/tmp.fFn3bFvD0Q +++ mktemp ++ local LAST_ERR=/tmp/tmp.0ZSOkIObss ++ local exit_status=0 ++ local timeout=4 +++ seq 0 2 ++ for i in $(seq 0 2) ++ set +e ++ kubectl get pods --selector=name=psmdb-client -o 'jsonpath={.items[].metadata.name}' ++ exit_status=0 ++ set -e ++ '[' 0 '!=' 0 -a -n 1 ']' ++ break ++ cat /tmp/tmp.fFn3bFvD0Q ++ cat /tmp/tmp.0ZSOkIObss ++ rm /tmp/tmp.fFn3bFvD0Q /tmp/tmp.0ZSOkIObss ++ return 0 + local client_container=psmdb-client-bb8b97679-jlqq8 + kubectl_bin exec psmdb-client-bb8b97679-jlqq8 -- bash -c 'printf '\''use myApp\n db.test.find()\n'\'' | mongo mongodb://myApp:myPass@some-name-rs0-2.some-name-rs0.pitr-32420.svc.cluster.local/admin?ssl=false\&replicaSet=rs0 ' ++ mktemp + local LAST_OUT=/tmp/tmp.wNKwSUPuKi ++ mktemp + local LAST_ERR=/tmp/tmp.E5kBOo4LaX + local exit_status=0 + local timeout=4 ++ seq 0 2 + for i in $(seq 0 2) + set +e + kubectl exec psmdb-client-bb8b97679-jlqq8 -- bash -c 'printf '\''use myApp\n db.test.find()\n'\'' | mongo mongodb://myApp:myPass@some-name-rs0-2.some-name-rs0.pitr-32420.svc.cluster.local/admin?ssl=false\&replicaSet=rs0 ' + exit_status=0 + set -e + '[' 0 '!=' 0 -a -n 1 ']' + break + cat /tmp/tmp.wNKwSUPuKi + cat /tmp/tmp.E5kBOo4LaX + rm /tmp/tmp.wNKwSUPuKi /tmp/tmp.E5kBOo4LaX + return 0 + [[ 0 -eq 0 ]] + diff -u /mnt/jenkins/workspace/cloud-psmdb-operator_PR-2282/e2e-tests/pitr/compare/find-2nd.json /tmp/tmp.oRDgtyLOf2/find-2nd + run_backup backup-minio 1 + local name=backup-minio + local idx=1 + desc 'run backup backup-minio-1' + set +o xtrace ----------------------------------------------------------------------------------- run backup backup-minio-1 ----------------------------------------------------------------------------------- + cat /mnt/jenkins/workspace/cloud-psmdb-operator_PR-2282/e2e-tests/pitr/conf/backup-minio.yml + /usr/sbin/sed -e 's/name:/name: backup-minio-1/' + kubectl_bin apply -f - ++ mktemp + local LAST_OUT=/tmp/tmp.tfa2zB5XQO ++ mktemp + local LAST_ERR=/tmp/tmp.ZkKxIlx82M + local exit_status=0 + local timeout=4 ++ seq 0 2 + for i in $(seq 0 2) + set +e + kubectl apply -f - + exit_status=0 + set -e + '[' 0 '!=' 0 -a -n 1 ']' + break + cat /tmp/tmp.tfa2zB5XQO perconaservermongodbbackup.psmdb.percona.com/backup-minio-1 created + cat /tmp/tmp.ZkKxIlx82M + rm /tmp/tmp.tfa2zB5XQO /tmp/tmp.ZkKxIlx82M + return 0 + wait_backup backup-minio-1 + local backup_name=backup-minio-1 + local target_state=ready + set +o xtrace waiting for backup-minio-1 to reach ready state.......OK + compare_latest_restorable_time some-name-rs0 backup-minio-1 + local cluster=some-name-rs0 + local backup_name=backup-minio-1 + local latest_restorable_time + local backup_time ++ get_latest_restorable_time some-name-rs0 ++ local cluster=some-name-rs0 ++ local first_timestamp ++ local second_timestamp ++ local retry=0 ++ [[ '' != '' ]] +++ kubectl_bin exec some-name-rs0-0 -c backup-agent -- pbm status -o json +++ jq '.backups.pitrChunks.pitrChunks | last | .range.end' ++++ mktemp +++ local LAST_OUT=/tmp/tmp.J8pgmxOAck ++++ mktemp +++ local LAST_ERR=/tmp/tmp.ysCR5jUnhZ +++ local exit_status=0 +++ local timeout=4 ++++ seq 0 2 +++ for i in $(seq 0 2) +++ set +e +++ kubectl exec some-name-rs0-0 -c backup-agent -- pbm status -o json +++ exit_status=0 +++ set -e +++ '[' 0 '!=' 0 -a -n 1 ']' +++ break +++ cat /tmp/tmp.J8pgmxOAck +++ cat /tmp/tmp.ysCR5jUnhZ +++ rm /tmp/tmp.J8pgmxOAck /tmp/tmp.ysCR5jUnhZ +++ return 0 ++ first_timestamp=1773181128 ++ sleep 5 ++ [[ 1773181128 != '' ]] ++ [[ 1773181128 != \n\u\l\l ]] +++ kubectl_bin exec some-name-rs0-0 -c backup-agent -- pbm status -o json +++ jq '.backups.pitrChunks.pitrChunks | last | .range.end' ++++ mktemp +++ local LAST_OUT=/tmp/tmp.6GoXxO17OB ++++ mktemp +++ local LAST_ERR=/tmp/tmp.dJQ7zC7aH6 +++ local exit_status=0 +++ local timeout=4 ++++ seq 0 2 +++ for i in $(seq 0 2) +++ set +e +++ kubectl exec some-name-rs0-0 -c backup-agent -- pbm status -o json +++ exit_status=0 +++ set -e +++ '[' 0 '!=' 0 -a -n 1 ']' +++ break +++ cat /tmp/tmp.6GoXxO17OB +++ cat /tmp/tmp.dJQ7zC7aH6 +++ rm /tmp/tmp.6GoXxO17OB /tmp/tmp.dJQ7zC7aH6 +++ return 0 ++ second_timestamp=1773181128 ++ let retry+=1 ++ [[ 1 -gt 30 ]] ++ [[ 1773181128 != '' ]] ++ [[ 1773181128 != \n\u\l\l ]] ++ [[ 1773181128 == 1773181128 ]] ++ /usr/sbin/date -u -d @1773181128 +%Y-%m-%dT%H:%M:%SZ + latest_restorable_time=2026-03-10T22:18:48Z ++ get_latest_restorable_time_from_backup_object backup-minio-1 ++ local backup_name=backup-minio-1 ++ local latestRestorableTime ++ local retry=0 ++ [[ '' != '' ]] ++ sleep 5 +++ kubectl_bin get psmdb-backup backup-minio-1 -o 'jsonpath={.status.latestRestorableTime}' ++++ mktemp +++ local LAST_OUT=/tmp/tmp.VEpNjU19YO ++++ mktemp +++ local LAST_ERR=/tmp/tmp.gzTH5MazyV +++ local exit_status=0 +++ local timeout=4 ++++ seq 0 2 +++ for i in $(seq 0 2) +++ set +e +++ kubectl get psmdb-backup backup-minio-1 -o 'jsonpath={.status.latestRestorableTime}' +++ exit_status=0 +++ set -e +++ '[' 0 '!=' 0 -a -n 1 ']' +++ break +++ cat /tmp/tmp.VEpNjU19YO +++ cat /tmp/tmp.gzTH5MazyV +++ rm /tmp/tmp.VEpNjU19YO /tmp/tmp.gzTH5MazyV +++ return 0 ++ latestRestorableTime=2026-03-10T22:18:48Z ++ let retry+=1 ++ [[ 1 -gt 30 ]] ++ [[ 2026-03-10T22:18:48Z != '' ]] ++ [[ 2026-03-10T22:18:48Z != \n\u\l\l ]] ++ echo 2026-03-10T22:18:48Z + backup_time=2026-03-10T22:18:48Z + [[ 2026-03-10T22:18:48Z != \2\0\2\6\-\0\3\-\1\0\T\2\2\:\1\8\:\4\8\Z ]] + check_recovery backup-minio-1 latest '' -3rd '' some-name + local backup_name=backup-minio-1 + local restore_type=latest + local restore_date= + local cmp_postfix=-3rd + local backupSource= + local cluster_name=some-name + local selective_collection= + local restore_name=restore-backup-minio-1 + local restore_file=restore.yml + local cluster + cluster=some-name-rs0 + desc 'write more data before restore by latest' + set +o xtrace ----------------------------------------------------------------------------------- write more data before restore by latest ----------------------------------------------------------------------------------- + sleep 60 + run_mongo 'use myApp\n db.test.insert({ x: 100501 })' myApp:myPass@some-name-rs0.pitr-32420 + local 'command=use myApp\n db.test.insert({ x: 100501 })' + local uri=myApp:myPass@some-name-rs0.pitr-32420 + local driver=mongodb+srv + local suffix=.svc.cluster.local + local mongo_flag= + local replica_set=rs0 + [[ myApp:myPass@some-name-rs0.pitr-32420 == *cfg* ]] ++ kubectl_bin get pods --selector=name=psmdb-client -o 'jsonpath={.items[].metadata.name}' +++ mktemp ++ local LAST_OUT=/tmp/tmp.71UlxAccpJ +++ mktemp ++ local LAST_ERR=/tmp/tmp.vxqaZTK4L6 ++ local exit_status=0 ++ local timeout=4 +++ seq 0 2 ++ for i in $(seq 0 2) ++ set +e ++ kubectl get pods --selector=name=psmdb-client -o 'jsonpath={.items[].metadata.name}' ++ exit_status=0 ++ set -e ++ '[' 0 '!=' 0 -a -n 1 ']' ++ break ++ cat /tmp/tmp.71UlxAccpJ ++ cat /tmp/tmp.vxqaZTK4L6 ++ rm /tmp/tmp.71UlxAccpJ /tmp/tmp.vxqaZTK4L6 ++ return 0 + local client_container=psmdb-client-bb8b97679-jlqq8 + kubectl_bin exec psmdb-client-bb8b97679-jlqq8 -- bash -c 'printf '\''use myApp\n db.test.insert({ x: 100501 })\n'\'' | mongo mongodb+srv://myApp:myPass@some-name-rs0.pitr-32420.svc.cluster.local/admin?ssl=false\&replicaSet=rs0 ' ++ mktemp + local LAST_OUT=/tmp/tmp.P7kJapbnpe ++ mktemp + local LAST_ERR=/tmp/tmp.LL8WlBjSJx + local exit_status=0 + local timeout=4 ++ seq 0 2 + for i in $(seq 0 2) + set +e + kubectl exec psmdb-client-bb8b97679-jlqq8 -- bash -c 'printf '\''use myApp\n db.test.insert({ x: 100501 })\n'\'' | mongo mongodb+srv://myApp:myPass@some-name-rs0.pitr-32420.svc.cluster.local/admin?ssl=false\&replicaSet=rs0 ' + exit_status=0 + set -e + '[' 0 '!=' 0 -a -n 1 ']' + break + cat /tmp/tmp.P7kJapbnpe Percona Server for MongoDB shell version v4.4.29-28 connecting to: mongodb://some-name-rs0-1.some-name-rs0.pitr-32420.svc.cluster.local:27017,some-name-rs0-0.some-name-rs0.pitr-32420.svc.cluster.local:27017,some-name-rs0-2.some-name-rs0.pitr-32420.svc.cluster.local:27017/admin?compressors=disabled&gssapiServiceName=mongodb&replicaSet=rs0&ssl=false Implicit session: session { "id" : UUID("2f0f1873-32c1-485f-a5cf-1edff0019e19") } Percona Server for MongoDB server version: v8.0.19-7 WARNING: shell and server versions do not match switched to db myApp WriteResult({ "nInserted" : 1 }) bye + cat /tmp/tmp.LL8WlBjSJx + rm /tmp/tmp.P7kJapbnpe /tmp/tmp.LL8WlBjSJx + return 0 + '[' -n '' ']' + desc 'waiting for chunks to be uploaded' + set +o xtrace ----------------------------------------------------------------------------------- waiting for chunks to be uploaded ----------------------------------------------------------------------------------- + sleep 120 + desc 'check restore by latest' + set +o xtrace ----------------------------------------------------------------------------------- check restore by latest ----------------------------------------------------------------------------------- + '[' -z '' ']' + cat /mnt/jenkins/workspace/cloud-psmdb-operator_PR-2282/e2e-tests/pitr/conf/restore.yml + /usr/sbin/sed -e 's/name:/name: restore-backup-minio-1/' + /usr/sbin/sed -e 's/backupName:/backupName: backup-minio-1/' + /usr/sbin/sed -e 's/clusterName:/clusterName: some-name/' + /usr/sbin/sed -e 's/pitrType:/type: latest/' + '[' -z '' ']' + /usr/sbin/sed -e /date:/d + /usr/sbin/sed -e /backupSource/,+2d + '[' -n '' ']' + yq + kubectl_bin apply -f - ++ mktemp + local LAST_OUT=/tmp/tmp.mnk0X7CCW5 ++ mktemp + local LAST_ERR=/tmp/tmp.KyKGUNXkfY + local exit_status=0 + local timeout=4 ++ seq 0 2 + for i in $(seq 0 2) + set +e + kubectl apply -f - + exit_status=0 + set -e + '[' 0 '!=' 0 -a -n 1 ']' + break + cat /tmp/tmp.mnk0X7CCW5 perconaservermongodbrestore.psmdb.percona.com/restore-backup-minio-1 created + cat /tmp/tmp.KyKGUNXkfY + rm /tmp/tmp.mnk0X7CCW5 /tmp/tmp.KyKGUNXkfY + return 0 + wait_restore backup-minio-1 some-name + local backup_name=backup-minio-1 + local cluster_name=some-name + local target_state=ready + local wait_cluster_consistency=1 + local wait_time=1780 + local ok_if_ready=0 + set +o xtrace Waiting for the psmdb-restore/restore-backup-minio-1 object to be created.OK Waiting psmdb-restore/restore-backup-minio-1 to reach state "ready" .OK after 0 minutes + [[ 1 -eq 1 ]] + wait_cluster_consistency some-name + local cluster_name=some-name + local wait_time=32 + retry=0 + sleep 7 + echo -n 'waiting for cluster readyness' waiting for cluster readyness++ kubectl_bin get psmdb some-name -o 'jsonpath={.status.state}' +++ mktemp ++ local LAST_OUT=/tmp/tmp.87RjCDiS0W +++ mktemp ++ local LAST_ERR=/tmp/tmp.JMVRsj1pG3 ++ local exit_status=0 ++ local timeout=4 +++ seq 0 2 ++ for i in $(seq 0 2) ++ set +e ++ kubectl get psmdb some-name -o 'jsonpath={.status.state}' ++ exit_status=0 ++ set -e ++ '[' 0 '!=' 0 -a -n 1 ']' ++ break ++ cat /tmp/tmp.87RjCDiS0W ++ cat /tmp/tmp.JMVRsj1pG3 ++ rm /tmp/tmp.87RjCDiS0W /tmp/tmp.JMVRsj1pG3 ++ return 0 + [[ ready == \r\e\a\d\y ]] + echo .OK .OK + echo + set -o xtrace + '[' -n '' ']' + compare_mongo_cmd find myApp:myPass@some-name-rs0-0.some-name-rs0.pitr-32420 -3rd + local command=find + local uri=myApp:myPass@some-name-rs0-0.some-name-rs0.pitr-32420 + local postfix=-3rd + local suffix= + local database=myApp + local collection=test + local sort= + local tls=false + local replicaset= + local 'full_command=db.test.find()' + [[ -n '' ]] + log 'running db.test.find() in myApp' + set +o xtrace [2026-03-10T22:23:18+0000] running db.test.find() in myApp + [[ false == \t\r\u\e ]] + mongo_command=run_mongo + run_mongo 'use myApp\n db.test.find()' myApp:myPass@some-name-rs0-0.some-name-rs0.pitr-32420 mongodb '' '' + local 'command=use myApp\n db.test.find()' + local uri=myApp:myPass@some-name-rs0-0.some-name-rs0.pitr-32420 + local driver=mongodb + grep -E -v 'I NETWORK|W NETWORK|F NETWORK|"c":"NETWORK"|Error saving history file|Percona Server for MongoDB|connecting to:|Unable to reach primary for set|Implicit session:|versions do not match|Error saving history file:|Started a new thread for the timer service' + local suffix=.svc.cluster.local + local mongo_flag= + local replica_set=rs0 + [[ myApp:myPass@some-name-rs0-0.some-name-rs0.pitr-32420 == *cfg* ]] + /usr/sbin/sed -re 's/ObjectId\("[0-9a-f]+"\)//; s/-[0-9]+.svc/-xxx.svc/' ++ kubectl_bin get pods --selector=name=psmdb-client -o 'jsonpath={.items[].metadata.name}' +++ mktemp ++ local LAST_OUT=/tmp/tmp.PN5lMLRFWc +++ mktemp ++ local LAST_ERR=/tmp/tmp.Ez7zELYyjF ++ local exit_status=0 ++ local timeout=4 +++ seq 0 2 ++ for i in $(seq 0 2) ++ set +e ++ kubectl get pods --selector=name=psmdb-client -o 'jsonpath={.items[].metadata.name}' ++ exit_status=0 ++ set -e ++ '[' 0 '!=' 0 -a -n 1 ']' ++ break ++ cat /tmp/tmp.PN5lMLRFWc ++ cat /tmp/tmp.Ez7zELYyjF ++ rm /tmp/tmp.PN5lMLRFWc /tmp/tmp.Ez7zELYyjF ++ return 0 + local client_container=psmdb-client-bb8b97679-jlqq8 + kubectl_bin exec psmdb-client-bb8b97679-jlqq8 -- bash -c 'printf '\''use myApp\n db.test.find()\n'\'' | mongo mongodb://myApp:myPass@some-name-rs0-0.some-name-rs0.pitr-32420.svc.cluster.local/admin?ssl=false\&replicaSet=rs0 ' ++ mktemp + local LAST_OUT=/tmp/tmp.GttuT3AgLS ++ mktemp + local LAST_ERR=/tmp/tmp.wVwGIoT7eT + local exit_status=0 + local timeout=4 ++ seq 0 2 + for i in $(seq 0 2) + set +e + kubectl exec psmdb-client-bb8b97679-jlqq8 -- bash -c 'printf '\''use myApp\n db.test.find()\n'\'' | mongo mongodb://myApp:myPass@some-name-rs0-0.some-name-rs0.pitr-32420.svc.cluster.local/admin?ssl=false\&replicaSet=rs0 ' + exit_status=0 + set -e + '[' 0 '!=' 0 -a -n 1 ']' + break + cat /tmp/tmp.GttuT3AgLS + cat /tmp/tmp.wVwGIoT7eT + rm /tmp/tmp.GttuT3AgLS /tmp/tmp.wVwGIoT7eT + return 0 + [[ 0 -eq 0 ]] + diff -u /mnt/jenkins/workspace/cloud-psmdb-operator_PR-2282/e2e-tests/pitr/compare/find-3rd.json /tmp/tmp.oRDgtyLOf2/find-3rd + compare_mongo_cmd find myApp:myPass@some-name-rs0-1.some-name-rs0.pitr-32420 -3rd + local command=find + local uri=myApp:myPass@some-name-rs0-1.some-name-rs0.pitr-32420 + local postfix=-3rd + local suffix= + local database=myApp + local collection=test + local sort= + local tls=false + local replicaset= + local 'full_command=db.test.find()' + [[ -n '' ]] + log 'running db.test.find() in myApp' + set +o xtrace [2026-03-10T22:23:20+0000] running db.test.find() in myApp + [[ false == \t\r\u\e ]] + mongo_command=run_mongo + run_mongo 'use myApp\n db.test.find()' myApp:myPass@some-name-rs0-1.some-name-rs0.pitr-32420 mongodb '' '' + local 'command=use myApp\n db.test.find()' + local uri=myApp:myPass@some-name-rs0-1.some-name-rs0.pitr-32420 + local driver=mongodb + local suffix=.svc.cluster.local + local mongo_flag= + local replica_set=rs0 + [[ myApp:myPass@some-name-rs0-1.some-name-rs0.pitr-32420 == *cfg* ]] + grep -E -v 'I NETWORK|W NETWORK|F NETWORK|"c":"NETWORK"|Error saving history file|Percona Server for MongoDB|connecting to:|Unable to reach primary for set|Implicit session:|versions do not match|Error saving history file:|Started a new thread for the timer service' + /usr/sbin/sed -re 's/ObjectId\("[0-9a-f]+"\)//; s/-[0-9]+.svc/-xxx.svc/' ++ kubectl_bin get pods --selector=name=psmdb-client -o 'jsonpath={.items[].metadata.name}' +++ mktemp ++ local LAST_OUT=/tmp/tmp.e4sRaBEqIg +++ mktemp ++ local LAST_ERR=/tmp/tmp.2GT5oInQ9f ++ local exit_status=0 ++ local timeout=4 +++ seq 0 2 ++ for i in $(seq 0 2) ++ set +e ++ kubectl get pods --selector=name=psmdb-client -o 'jsonpath={.items[].metadata.name}' ++ exit_status=0 ++ set -e ++ '[' 0 '!=' 0 -a -n 1 ']' ++ break ++ cat /tmp/tmp.e4sRaBEqIg ++ cat /tmp/tmp.2GT5oInQ9f ++ rm /tmp/tmp.e4sRaBEqIg /tmp/tmp.2GT5oInQ9f ++ return 0 + local client_container=psmdb-client-bb8b97679-jlqq8 + kubectl_bin exec psmdb-client-bb8b97679-jlqq8 -- bash -c 'printf '\''use myApp\n db.test.find()\n'\'' | mongo mongodb://myApp:myPass@some-name-rs0-1.some-name-rs0.pitr-32420.svc.cluster.local/admin?ssl=false\&replicaSet=rs0 ' ++ mktemp + local LAST_OUT=/tmp/tmp.szNKzax4RI ++ mktemp + local LAST_ERR=/tmp/tmp.hKBa6lXbVi + local exit_status=0 + local timeout=4 ++ seq 0 2 + for i in $(seq 0 2) + set +e + kubectl exec psmdb-client-bb8b97679-jlqq8 -- bash -c 'printf '\''use myApp\n db.test.find()\n'\'' | mongo mongodb://myApp:myPass@some-name-rs0-1.some-name-rs0.pitr-32420.svc.cluster.local/admin?ssl=false\&replicaSet=rs0 ' + exit_status=0 + set -e + '[' 0 '!=' 0 -a -n 1 ']' + break + cat /tmp/tmp.szNKzax4RI + cat /tmp/tmp.hKBa6lXbVi + rm /tmp/tmp.szNKzax4RI /tmp/tmp.hKBa6lXbVi + return 0 + [[ 0 -eq 0 ]] + diff -u /mnt/jenkins/workspace/cloud-psmdb-operator_PR-2282/e2e-tests/pitr/compare/find-3rd.json /tmp/tmp.oRDgtyLOf2/find-3rd + compare_mongo_cmd find myApp:myPass@some-name-rs0-2.some-name-rs0.pitr-32420 -3rd + local command=find + local uri=myApp:myPass@some-name-rs0-2.some-name-rs0.pitr-32420 + local postfix=-3rd + local suffix= + local database=myApp + local collection=test + local sort= + local tls=false + local replicaset= + local 'full_command=db.test.find()' + [[ -n '' ]] + log 'running db.test.find() in myApp' + set +o xtrace [2026-03-10T22:23:23+0000] running db.test.find() in myApp + [[ false == \t\r\u\e ]] + mongo_command=run_mongo + run_mongo 'use myApp\n db.test.find()' myApp:myPass@some-name-rs0-2.some-name-rs0.pitr-32420 mongodb '' '' + local 'command=use myApp\n db.test.find()' + local uri=myApp:myPass@some-name-rs0-2.some-name-rs0.pitr-32420 + local driver=mongodb + local suffix=.svc.cluster.local + local mongo_flag= + local replica_set=rs0 + [[ myApp:myPass@some-name-rs0-2.some-name-rs0.pitr-32420 == *cfg* ]] + grep -E -v 'I NETWORK|W NETWORK|F NETWORK|"c":"NETWORK"|Error saving history file|Percona Server for MongoDB|connecting to:|Unable to reach primary for set|Implicit session:|versions do not match|Error saving history file:|Started a new thread for the timer service' + /usr/sbin/sed -re 's/ObjectId\("[0-9a-f]+"\)//; s/-[0-9]+.svc/-xxx.svc/' ++ kubectl_bin get pods --selector=name=psmdb-client -o 'jsonpath={.items[].metadata.name}' +++ mktemp ++ local LAST_OUT=/tmp/tmp.tVboNT27Wg +++ mktemp ++ local LAST_ERR=/tmp/tmp.AKA7CNaV1Q ++ local exit_status=0 ++ local timeout=4 +++ seq 0 2 ++ for i in $(seq 0 2) ++ set +e ++ kubectl get pods --selector=name=psmdb-client -o 'jsonpath={.items[].metadata.name}' ++ exit_status=0 ++ set -e ++ '[' 0 '!=' 0 -a -n 1 ']' ++ break ++ cat /tmp/tmp.tVboNT27Wg ++ cat /tmp/tmp.AKA7CNaV1Q ++ rm /tmp/tmp.tVboNT27Wg /tmp/tmp.AKA7CNaV1Q ++ return 0 + local client_container=psmdb-client-bb8b97679-jlqq8 + kubectl_bin exec psmdb-client-bb8b97679-jlqq8 -- bash -c 'printf '\''use myApp\n db.test.find()\n'\'' | mongo mongodb://myApp:myPass@some-name-rs0-2.some-name-rs0.pitr-32420.svc.cluster.local/admin?ssl=false\&replicaSet=rs0 ' ++ mktemp + local LAST_OUT=/tmp/tmp.lITDIoipaC ++ mktemp + local LAST_ERR=/tmp/tmp.e0SPrJU7Ai + local exit_status=0 + local timeout=4 ++ seq 0 2 + for i in $(seq 0 2) + set +e + kubectl exec psmdb-client-bb8b97679-jlqq8 -- bash -c 'printf '\''use myApp\n db.test.find()\n'\'' | mongo mongodb://myApp:myPass@some-name-rs0-2.some-name-rs0.pitr-32420.svc.cluster.local/admin?ssl=false\&replicaSet=rs0 ' + exit_status=0 + set -e + '[' 0 '!=' 0 -a -n 1 ']' + break + cat /tmp/tmp.lITDIoipaC + cat /tmp/tmp.e0SPrJU7Ai + rm /tmp/tmp.lITDIoipaC /tmp/tmp.e0SPrJU7Ai + return 0 + [[ 0 -eq 0 ]] + diff -u /mnt/jenkins/workspace/cloud-psmdb-operator_PR-2282/e2e-tests/pitr/compare/find-3rd.json /tmp/tmp.oRDgtyLOf2/find-3rd + desc 'restore pitr using backupSource' + set +o xtrace ----------------------------------------------------------------------------------- restore pitr using backupSource ----------------------------------------------------------------------------------- + reset_collection + desc 'reset data' + set +o xtrace ----------------------------------------------------------------------------------- reset data ----------------------------------------------------------------------------------- + run_mongo 'use myApp\n db.test.remove({})' myApp:myPass@some-name-rs0.pitr-32420 + local 'command=use myApp\n db.test.remove({})' + local uri=myApp:myPass@some-name-rs0.pitr-32420 + local driver=mongodb+srv + local suffix=.svc.cluster.local + local mongo_flag= + local replica_set=rs0 + [[ myApp:myPass@some-name-rs0.pitr-32420 == *cfg* ]] ++ kubectl_bin get pods --selector=name=psmdb-client -o 'jsonpath={.items[].metadata.name}' +++ mktemp ++ local LAST_OUT=/tmp/tmp.OuRwuJrDAJ +++ mktemp ++ local LAST_ERR=/tmp/tmp.YGWQ2rqIXN ++ local exit_status=0 ++ local timeout=4 +++ seq 0 2 ++ for i in $(seq 0 2) ++ set +e ++ kubectl get pods --selector=name=psmdb-client -o 'jsonpath={.items[].metadata.name}' ++ exit_status=0 ++ set -e ++ '[' 0 '!=' 0 -a -n 1 ']' ++ break ++ cat /tmp/tmp.OuRwuJrDAJ ++ cat /tmp/tmp.YGWQ2rqIXN ++ rm /tmp/tmp.OuRwuJrDAJ /tmp/tmp.YGWQ2rqIXN ++ return 0 + local client_container=psmdb-client-bb8b97679-jlqq8 + kubectl_bin exec psmdb-client-bb8b97679-jlqq8 -- bash -c 'printf '\''use myApp\n db.test.remove({})\n'\'' | mongo mongodb+srv://myApp:myPass@some-name-rs0.pitr-32420.svc.cluster.local/admin?ssl=false\&replicaSet=rs0 ' ++ mktemp + local LAST_OUT=/tmp/tmp.XY8djpE6ik ++ mktemp + local LAST_ERR=/tmp/tmp.zhKJR7UDxT + local exit_status=0 + local timeout=4 ++ seq 0 2 + for i in $(seq 0 2) + set +e + kubectl exec psmdb-client-bb8b97679-jlqq8 -- bash -c 'printf '\''use myApp\n db.test.remove({})\n'\'' | mongo mongodb+srv://myApp:myPass@some-name-rs0.pitr-32420.svc.cluster.local/admin?ssl=false\&replicaSet=rs0 ' + exit_status=0 + set -e + '[' 0 '!=' 0 -a -n 1 ']' + break + cat /tmp/tmp.XY8djpE6ik Percona Server for MongoDB shell version v4.4.29-28 connecting to: mongodb://some-name-rs0-1.some-name-rs0.pitr-32420.svc.cluster.local:27017,some-name-rs0-0.some-name-rs0.pitr-32420.svc.cluster.local:27017,some-name-rs0-2.some-name-rs0.pitr-32420.svc.cluster.local:27017/admin?compressors=disabled&gssapiServiceName=mongodb&replicaSet=rs0&ssl=false Implicit session: session { "id" : UUID("4a588bfa-c6b5-49e0-82cf-4948e3beaddc") } Percona Server for MongoDB server version: v8.0.19-7 WARNING: shell and server versions do not match switched to db myApp WriteResult({ "nRemoved" : 3 }) bye + cat /tmp/tmp.zhKJR7UDxT + rm /tmp/tmp.XY8djpE6ik /tmp/tmp.zhKJR7UDxT + return 0 + sleep 2 + write_document + local cmp_postfix= + local cluster_name=some-name-rs0 + desc 'write initial data, read from all' + set +o xtrace ----------------------------------------------------------------------------------- write initial data, read from all ----------------------------------------------------------------------------------- + run_mongo 'use myApp\n db.test.insert({ x: 100500 })' myApp:myPass@some-name-rs0.pitr-32420 + local 'command=use myApp\n db.test.insert({ x: 100500 })' + local uri=myApp:myPass@some-name-rs0.pitr-32420 + local driver=mongodb+srv + local suffix=.svc.cluster.local + local mongo_flag= + local replica_set=rs0 + [[ myApp:myPass@some-name-rs0.pitr-32420 == *cfg* ]] ++ kubectl_bin get pods --selector=name=psmdb-client -o 'jsonpath={.items[].metadata.name}' +++ mktemp ++ local LAST_OUT=/tmp/tmp.3oJuN0GIOV +++ mktemp ++ local LAST_ERR=/tmp/tmp.g0C7vccrQE ++ local exit_status=0 ++ local timeout=4 +++ seq 0 2 ++ for i in $(seq 0 2) ++ set +e ++ kubectl get pods --selector=name=psmdb-client -o 'jsonpath={.items[].metadata.name}' ++ exit_status=0 ++ set -e ++ '[' 0 '!=' 0 -a -n 1 ']' ++ break ++ cat /tmp/tmp.3oJuN0GIOV ++ cat /tmp/tmp.g0C7vccrQE ++ rm /tmp/tmp.3oJuN0GIOV /tmp/tmp.g0C7vccrQE ++ return 0 + local client_container=psmdb-client-bb8b97679-jlqq8 + kubectl_bin exec psmdb-client-bb8b97679-jlqq8 -- bash -c 'printf '\''use myApp\n db.test.insert({ x: 100500 })\n'\'' | mongo mongodb+srv://myApp:myPass@some-name-rs0.pitr-32420.svc.cluster.local/admin?ssl=false\&replicaSet=rs0 ' ++ mktemp + local LAST_OUT=/tmp/tmp.NB9x0XWoNm ++ mktemp + local LAST_ERR=/tmp/tmp.UnwskT6b8n + local exit_status=0 + local timeout=4 ++ seq 0 2 + for i in $(seq 0 2) + set +e + kubectl exec psmdb-client-bb8b97679-jlqq8 -- bash -c 'printf '\''use myApp\n db.test.insert({ x: 100500 })\n'\'' | mongo mongodb+srv://myApp:myPass@some-name-rs0.pitr-32420.svc.cluster.local/admin?ssl=false\&replicaSet=rs0 ' + exit_status=0 + set -e + '[' 0 '!=' 0 -a -n 1 ']' + break + cat /tmp/tmp.NB9x0XWoNm Percona Server for MongoDB shell version v4.4.29-28 connecting to: mongodb://some-name-rs0-0.some-name-rs0.pitr-32420.svc.cluster.local:27017,some-name-rs0-2.some-name-rs0.pitr-32420.svc.cluster.local:27017,some-name-rs0-1.some-name-rs0.pitr-32420.svc.cluster.local:27017/admin?compressors=disabled&gssapiServiceName=mongodb&replicaSet=rs0&ssl=false Implicit session: session { "id" : UUID("fd34ee6e-ab85-4ad0-9751-d93570a2d13f") } Percona Server for MongoDB server version: v8.0.19-7 WARNING: shell and server versions do not match switched to db myApp WriteResult({ "nInserted" : 1 }) bye + cat /tmp/tmp.UnwskT6b8n + rm /tmp/tmp.NB9x0XWoNm /tmp/tmp.UnwskT6b8n + return 0 + minikube_sleep + sleep_time=10 + [[ '' == 1 ]] + compare_mongo_cmd find myApp:myPass@some-name-rs0-0.some-name-rs0.pitr-32420 '' + local command=find + local uri=myApp:myPass@some-name-rs0-0.some-name-rs0.pitr-32420 + local postfix= + local suffix= + local database=myApp + local collection=test + local sort= + local tls=false + local replicaset= + local 'full_command=db.test.find()' + [[ -n '' ]] + log 'running db.test.find() in myApp' + set +o xtrace [2026-03-10T22:23:34+0000] running db.test.find() in myApp + [[ false == \t\r\u\e ]] + mongo_command=run_mongo + run_mongo 'use myApp\n db.test.find()' myApp:myPass@some-name-rs0-0.some-name-rs0.pitr-32420 mongodb '' '' + local 'command=use myApp\n db.test.find()' + local uri=myApp:myPass@some-name-rs0-0.some-name-rs0.pitr-32420 + local driver=mongodb + local suffix=.svc.cluster.local + local mongo_flag= + local replica_set=rs0 + [[ myApp:myPass@some-name-rs0-0.some-name-rs0.pitr-32420 == *cfg* ]] + grep -E -v 'I NETWORK|W NETWORK|F NETWORK|"c":"NETWORK"|Error saving history file|Percona Server for MongoDB|connecting to:|Unable to reach primary for set|Implicit session:|versions do not match|Error saving history file:|Started a new thread for the timer service' + /usr/sbin/sed -re 's/ObjectId\("[0-9a-f]+"\)//; s/-[0-9]+.svc/-xxx.svc/' ++ kubectl_bin get pods --selector=name=psmdb-client -o 'jsonpath={.items[].metadata.name}' +++ mktemp ++ local LAST_OUT=/tmp/tmp.DbUBmujNF8 +++ mktemp ++ local LAST_ERR=/tmp/tmp.UVAMmdjCVp ++ local exit_status=0 ++ local timeout=4 +++ seq 0 2 ++ for i in $(seq 0 2) ++ set +e ++ kubectl get pods --selector=name=psmdb-client -o 'jsonpath={.items[].metadata.name}' ++ exit_status=0 ++ set -e ++ '[' 0 '!=' 0 -a -n 1 ']' ++ break ++ cat /tmp/tmp.DbUBmujNF8 ++ cat /tmp/tmp.UVAMmdjCVp ++ rm /tmp/tmp.DbUBmujNF8 /tmp/tmp.UVAMmdjCVp ++ return 0 + local client_container=psmdb-client-bb8b97679-jlqq8 + kubectl_bin exec psmdb-client-bb8b97679-jlqq8 -- bash -c 'printf '\''use myApp\n db.test.find()\n'\'' | mongo mongodb://myApp:myPass@some-name-rs0-0.some-name-rs0.pitr-32420.svc.cluster.local/admin?ssl=false\&replicaSet=rs0 ' ++ mktemp + local LAST_OUT=/tmp/tmp.df9Szi36pk ++ mktemp + local LAST_ERR=/tmp/tmp.FXYqX3ajWO + local exit_status=0 + local timeout=4 ++ seq 0 2 + for i in $(seq 0 2) + set +e + kubectl exec psmdb-client-bb8b97679-jlqq8 -- bash -c 'printf '\''use myApp\n db.test.find()\n'\'' | mongo mongodb://myApp:myPass@some-name-rs0-0.some-name-rs0.pitr-32420.svc.cluster.local/admin?ssl=false\&replicaSet=rs0 ' + exit_status=0 + set -e + '[' 0 '!=' 0 -a -n 1 ']' + break + cat /tmp/tmp.df9Szi36pk + cat /tmp/tmp.FXYqX3ajWO + rm /tmp/tmp.df9Szi36pk /tmp/tmp.FXYqX3ajWO + return 0 + [[ 0 -eq 0 ]] + diff -u /mnt/jenkins/workspace/cloud-psmdb-operator_PR-2282/e2e-tests/pitr/compare/find.json /tmp/tmp.oRDgtyLOf2/find + compare_mongo_cmd find myApp:myPass@some-name-rs0-1.some-name-rs0.pitr-32420 '' + local command=find + local uri=myApp:myPass@some-name-rs0-1.some-name-rs0.pitr-32420 + local postfix= + local suffix= + local database=myApp + local collection=test + local sort= + local tls=false + local replicaset= + local 'full_command=db.test.find()' + [[ -n '' ]] + log 'running db.test.find() in myApp' + set +o xtrace [2026-03-10T22:23:36+0000] running db.test.find() in myApp + [[ false == \t\r\u\e ]] + mongo_command=run_mongo + run_mongo 'use myApp\n db.test.find()' myApp:myPass@some-name-rs0-1.some-name-rs0.pitr-32420 mongodb '' '' + local 'command=use myApp\n db.test.find()' + local uri=myApp:myPass@some-name-rs0-1.some-name-rs0.pitr-32420 + local driver=mongodb + local suffix=.svc.cluster.local + local mongo_flag= + local replica_set=rs0 + [[ myApp:myPass@some-name-rs0-1.some-name-rs0.pitr-32420 == *cfg* ]] + grep -E -v 'I NETWORK|W NETWORK|F NETWORK|"c":"NETWORK"|Error saving history file|Percona Server for MongoDB|connecting to:|Unable to reach primary for set|Implicit session:|versions do not match|Error saving history file:|Started a new thread for the timer service' + /usr/sbin/sed -re 's/ObjectId\("[0-9a-f]+"\)//; s/-[0-9]+.svc/-xxx.svc/' ++ kubectl_bin get pods --selector=name=psmdb-client -o 'jsonpath={.items[].metadata.name}' +++ mktemp ++ local LAST_OUT=/tmp/tmp.8WGdR50l3x +++ mktemp ++ local LAST_ERR=/tmp/tmp.rdq3P854fM ++ local exit_status=0 ++ local timeout=4 +++ seq 0 2 ++ for i in $(seq 0 2) ++ set +e ++ kubectl get pods --selector=name=psmdb-client -o 'jsonpath={.items[].metadata.name}' ++ exit_status=0 ++ set -e ++ '[' 0 '!=' 0 -a -n 1 ']' ++ break ++ cat /tmp/tmp.8WGdR50l3x ++ cat /tmp/tmp.rdq3P854fM ++ rm /tmp/tmp.8WGdR50l3x /tmp/tmp.rdq3P854fM ++ return 0 + local client_container=psmdb-client-bb8b97679-jlqq8 + kubectl_bin exec psmdb-client-bb8b97679-jlqq8 -- bash -c 'printf '\''use myApp\n db.test.find()\n'\'' | mongo mongodb://myApp:myPass@some-name-rs0-1.some-name-rs0.pitr-32420.svc.cluster.local/admin?ssl=false\&replicaSet=rs0 ' ++ mktemp + local LAST_OUT=/tmp/tmp.sfrN5N911U ++ mktemp + local LAST_ERR=/tmp/tmp.TZHxuLLvqg + local exit_status=0 + local timeout=4 ++ seq 0 2 + for i in $(seq 0 2) + set +e + kubectl exec psmdb-client-bb8b97679-jlqq8 -- bash -c 'printf '\''use myApp\n db.test.find()\n'\'' | mongo mongodb://myApp:myPass@some-name-rs0-1.some-name-rs0.pitr-32420.svc.cluster.local/admin?ssl=false\&replicaSet=rs0 ' + exit_status=0 + set -e + '[' 0 '!=' 0 -a -n 1 ']' + break + cat /tmp/tmp.sfrN5N911U + cat /tmp/tmp.TZHxuLLvqg + rm /tmp/tmp.sfrN5N911U /tmp/tmp.TZHxuLLvqg + return 0 + [[ 0 -eq 0 ]] + diff -u /mnt/jenkins/workspace/cloud-psmdb-operator_PR-2282/e2e-tests/pitr/compare/find.json /tmp/tmp.oRDgtyLOf2/find + compare_mongo_cmd find myApp:myPass@some-name-rs0-2.some-name-rs0.pitr-32420 '' + local command=find + local uri=myApp:myPass@some-name-rs0-2.some-name-rs0.pitr-32420 + local postfix= + local suffix= + local database=myApp + local collection=test + local sort= + local tls=false + local replicaset= + local 'full_command=db.test.find()' + [[ -n '' ]] + log 'running db.test.find() in myApp' + set +o xtrace [2026-03-10T22:23:38+0000] running db.test.find() in myApp + [[ false == \t\r\u\e ]] + mongo_command=run_mongo + grep -E -v 'I NETWORK|W NETWORK|F NETWORK|"c":"NETWORK"|Error saving history file|Percona Server for MongoDB|connecting to:|Unable to reach primary for set|Implicit session:|versions do not match|Error saving history file:|Started a new thread for the timer service' + run_mongo 'use myApp\n db.test.find()' myApp:myPass@some-name-rs0-2.some-name-rs0.pitr-32420 mongodb '' '' + local 'command=use myApp\n db.test.find()' + local uri=myApp:myPass@some-name-rs0-2.some-name-rs0.pitr-32420 + /usr/sbin/sed -re 's/ObjectId\("[0-9a-f]+"\)//; s/-[0-9]+.svc/-xxx.svc/' + local driver=mongodb + local suffix=.svc.cluster.local + local mongo_flag= + local replica_set=rs0 + [[ myApp:myPass@some-name-rs0-2.some-name-rs0.pitr-32420 == *cfg* ]] ++ kubectl_bin get pods --selector=name=psmdb-client -o 'jsonpath={.items[].metadata.name}' +++ mktemp ++ local LAST_OUT=/tmp/tmp.0CSdpiUgDR +++ mktemp ++ local LAST_ERR=/tmp/tmp.N5sKzInTIe ++ local exit_status=0 ++ local timeout=4 +++ seq 0 2 ++ for i in $(seq 0 2) ++ set +e ++ kubectl get pods --selector=name=psmdb-client -o 'jsonpath={.items[].metadata.name}' ++ exit_status=0 ++ set -e ++ '[' 0 '!=' 0 -a -n 1 ']' ++ break ++ cat /tmp/tmp.0CSdpiUgDR ++ cat /tmp/tmp.N5sKzInTIe ++ rm /tmp/tmp.0CSdpiUgDR /tmp/tmp.N5sKzInTIe ++ return 0 + local client_container=psmdb-client-bb8b97679-jlqq8 + kubectl_bin exec psmdb-client-bb8b97679-jlqq8 -- bash -c 'printf '\''use myApp\n db.test.find()\n'\'' | mongo mongodb://myApp:myPass@some-name-rs0-2.some-name-rs0.pitr-32420.svc.cluster.local/admin?ssl=false\&replicaSet=rs0 ' ++ mktemp + local LAST_OUT=/tmp/tmp.Y2hFhuGgvi ++ mktemp + local LAST_ERR=/tmp/tmp.V7Qub5NCfi + local exit_status=0 + local timeout=4 ++ seq 0 2 + for i in $(seq 0 2) + set +e + kubectl exec psmdb-client-bb8b97679-jlqq8 -- bash -c 'printf '\''use myApp\n db.test.find()\n'\'' | mongo mongodb://myApp:myPass@some-name-rs0-2.some-name-rs0.pitr-32420.svc.cluster.local/admin?ssl=false\&replicaSet=rs0 ' + exit_status=0 + set -e + '[' 0 '!=' 0 -a -n 1 ']' + break + cat /tmp/tmp.Y2hFhuGgvi + cat /tmp/tmp.V7Qub5NCfi + rm /tmp/tmp.Y2hFhuGgvi /tmp/tmp.V7Qub5NCfi + return 0 + [[ 0 -eq 0 ]] + diff -u /mnt/jenkins/workspace/cloud-psmdb-operator_PR-2282/e2e-tests/pitr/compare/find.json /tmp/tmp.oRDgtyLOf2/find + run_backup backup-minio 2 + local name=backup-minio + local idx=2 + desc 'run backup backup-minio-2' + set +o xtrace ----------------------------------------------------------------------------------- run backup backup-minio-2 ----------------------------------------------------------------------------------- + cat /mnt/jenkins/workspace/cloud-psmdb-operator_PR-2282/e2e-tests/pitr/conf/backup-minio.yml + /usr/sbin/sed -e 's/name:/name: backup-minio-2/' + kubectl_bin apply -f - ++ mktemp + local LAST_OUT=/tmp/tmp.2hOncCU6Ep ++ mktemp + local LAST_ERR=/tmp/tmp.3IsqUXMd1K + local exit_status=0 + local timeout=4 ++ seq 0 2 + for i in $(seq 0 2) + set +e + kubectl apply -f - + exit_status=0 + set -e + '[' 0 '!=' 0 -a -n 1 ']' + break + cat /tmp/tmp.2hOncCU6Ep perconaservermongodbbackup.psmdb.percona.com/backup-minio-2 created + cat /tmp/tmp.3IsqUXMd1K + rm /tmp/tmp.2hOncCU6Ep /tmp/tmp.3IsqUXMd1K + return 0 + wait_backup backup-minio-2 + local backup_name=backup-minio-2 + local target_state=ready + set +o xtrace waiting for backup-minio-2 to reach ready state.......OK + compare_latest_restorable_time some-name-rs0 backup-minio-2 + local cluster=some-name-rs0 + local backup_name=backup-minio-2 + local latest_restorable_time + local backup_time ++ get_latest_restorable_time some-name-rs0 ++ local cluster=some-name-rs0 ++ local first_timestamp ++ local second_timestamp ++ local retry=0 ++ [[ '' != '' ]] +++ kubectl_bin exec some-name-rs0-0 -c backup-agent -- pbm status -o json +++ jq '.backups.pitrChunks.pitrChunks | last | .range.end' ++++ mktemp +++ local LAST_OUT=/tmp/tmp.rQ3x9G5bdg ++++ mktemp +++ local LAST_ERR=/tmp/tmp.BK6ro3KRPW +++ local exit_status=0 +++ local timeout=4 ++++ seq 0 2 +++ for i in $(seq 0 2) +++ set +e +++ kubectl exec some-name-rs0-0 -c backup-agent -- pbm status -o json +++ exit_status=0 +++ set -e +++ '[' 0 '!=' 0 -a -n 1 ']' +++ break +++ cat /tmp/tmp.rQ3x9G5bdg +++ cat /tmp/tmp.BK6ro3KRPW +++ rm /tmp/tmp.rQ3x9G5bdg /tmp/tmp.BK6ro3KRPW +++ return 0 ++ first_timestamp=1773181378 ++ sleep 5 ++ [[ 1773181378 != '' ]] ++ [[ 1773181378 != \n\u\l\l ]] +++ kubectl_bin exec some-name-rs0-0 -c backup-agent -- pbm status -o json +++ jq '.backups.pitrChunks.pitrChunks | last | .range.end' ++++ mktemp +++ local LAST_OUT=/tmp/tmp.W2wnDch1uI ++++ mktemp +++ local LAST_ERR=/tmp/tmp.oNsqunQs1d +++ local exit_status=0 +++ local timeout=4 ++++ seq 0 2 +++ for i in $(seq 0 2) +++ set +e +++ kubectl exec some-name-rs0-0 -c backup-agent -- pbm status -o json +++ exit_status=0 +++ set -e +++ '[' 0 '!=' 0 -a -n 1 ']' +++ break +++ cat /tmp/tmp.W2wnDch1uI +++ cat /tmp/tmp.oNsqunQs1d +++ rm /tmp/tmp.W2wnDch1uI /tmp/tmp.oNsqunQs1d +++ return 0 ++ second_timestamp=1773181378 ++ let retry+=1 ++ [[ 1 -gt 30 ]] ++ [[ 1773181378 != '' ]] ++ [[ 1773181378 != \n\u\l\l ]] ++ [[ 1773181378 == 1773181378 ]] ++ /usr/sbin/date -u -d @1773181378 +%Y-%m-%dT%H:%M:%SZ + latest_restorable_time=2026-03-10T22:22:58Z ++ get_latest_restorable_time_from_backup_object backup-minio-2 ++ local backup_name=backup-minio-2 ++ local latestRestorableTime ++ local retry=0 ++ [[ '' != '' ]] ++ sleep 5 +++ kubectl_bin get psmdb-backup backup-minio-2 -o 'jsonpath={.status.latestRestorableTime}' ++++ mktemp +++ local LAST_OUT=/tmp/tmp.sU1rSetKWj ++++ mktemp +++ local LAST_ERR=/tmp/tmp.46leIccIHn +++ local exit_status=0 +++ local timeout=4 ++++ seq 0 2 +++ for i in $(seq 0 2) +++ set +e +++ kubectl get psmdb-backup backup-minio-2 -o 'jsonpath={.status.latestRestorableTime}' +++ exit_status=0 +++ set -e +++ '[' 0 '!=' 0 -a -n 1 ']' +++ break +++ cat /tmp/tmp.sU1rSetKWj +++ cat /tmp/tmp.46leIccIHn +++ rm /tmp/tmp.sU1rSetKWj /tmp/tmp.46leIccIHn +++ return 0 ++ latestRestorableTime=2026-03-10T22:22:58Z ++ let retry+=1 ++ [[ 1 -gt 30 ]] ++ [[ 2026-03-10T22:22:58Z != '' ]] ++ [[ 2026-03-10T22:22:58Z != \n\u\l\l ]] ++ echo 2026-03-10T22:22:58Z + backup_time=2026-03-10T22:22:58Z + [[ 2026-03-10T22:22:58Z != \2\0\2\6\-\0\3\-\1\0\T\2\2\:\2\2\:\5\8\Z ]] ++ run_mongo 'new Date().toISOString()' myApp:myPass@some-name-rs0.pitr-32420 mongodb '' --quiet ++ local 'command=new Date().toISOString()' ++ local uri=myApp:myPass@some-name-rs0.pitr-32420 ++ local driver=mongodb ++ local suffix=.svc.cluster.local ++ local mongo_flag=--quiet ++ grep -E -v 'I NETWORK|W NETWORK|Error saving history file|Percona Server for MongoDB|connecting to:|Unable to reach primary for set|Implicit session:|versions do not match' ++ local replica_set=rs0 ++ [[ myApp:myPass@some-name-rs0.pitr-32420 == *cfg* ]] ++ cut -c1-19 ++ tr T ' ' +++ kubectl_bin get pods --selector=name=psmdb-client -o 'jsonpath={.items[].metadata.name}' ++++ mktemp +++ local LAST_OUT=/tmp/tmp.EU7pT9r1oE ++++ mktemp +++ local LAST_ERR=/tmp/tmp.l228M7Yhkr +++ local exit_status=0 +++ local timeout=4 ++++ seq 0 2 +++ for i in $(seq 0 2) +++ set +e +++ kubectl get pods --selector=name=psmdb-client -o 'jsonpath={.items[].metadata.name}' +++ exit_status=0 +++ set -e +++ '[' 0 '!=' 0 -a -n 1 ']' +++ break +++ cat /tmp/tmp.EU7pT9r1oE +++ cat /tmp/tmp.l228M7Yhkr +++ rm /tmp/tmp.EU7pT9r1oE /tmp/tmp.l228M7Yhkr +++ return 0 ++ local client_container=psmdb-client-bb8b97679-jlqq8 ++ kubectl_bin exec psmdb-client-bb8b97679-jlqq8 -- bash -c 'printf '\''new Date().toISOString()\n'\'' | mongo mongodb://myApp:myPass@some-name-rs0.pitr-32420.svc.cluster.local/admin?ssl=false\&replicaSet=rs0 --quiet' +++ mktemp ++ local LAST_OUT=/tmp/tmp.Teqb64iMcm +++ mktemp ++ local LAST_ERR=/tmp/tmp.YVmK23oqDm ++ local exit_status=0 ++ local timeout=4 +++ seq 0 2 ++ for i in $(seq 0 2) ++ set +e ++ kubectl exec psmdb-client-bb8b97679-jlqq8 -- bash -c 'printf '\''new Date().toISOString()\n'\'' | mongo mongodb://myApp:myPass@some-name-rs0.pitr-32420.svc.cluster.local/admin?ssl=false\&replicaSet=rs0 --quiet' ++ exit_status=0 ++ set -e ++ '[' 0 '!=' 0 -a -n 1 ']' ++ break ++ cat /tmp/tmp.Teqb64iMcm ++ cat /tmp/tmp.YVmK23oqDm ++ rm /tmp/tmp.Teqb64iMcm /tmp/tmp.YVmK23oqDm ++ return 0 + time_now='2026-03-10 22:24:13' + check_recovery backup-minio-2 date '2026-03-10 22:24:13' '' backupSource some-name + local backup_name=backup-minio-2 + local restore_type=date + local 'restore_date=2026-03-10 22:24:13' + local cmp_postfix= + local backupSource=backupSource + local cluster_name=some-name + local selective_collection= + local restore_name=restore-backup-minio-2 + local restore_file=restore.yml + local cluster + cluster=some-name-rs0 + desc 'write more data before restore by date' + set +o xtrace ----------------------------------------------------------------------------------- write more data before restore by date ----------------------------------------------------------------------------------- + sleep 60 + run_mongo 'use myApp\n db.test.insert({ x: 100501 })' myApp:myPass@some-name-rs0.pitr-32420 + local 'command=use myApp\n db.test.insert({ x: 100501 })' + local uri=myApp:myPass@some-name-rs0.pitr-32420 + local driver=mongodb+srv + local suffix=.svc.cluster.local + local mongo_flag= + local replica_set=rs0 + [[ myApp:myPass@some-name-rs0.pitr-32420 == *cfg* ]] ++ kubectl_bin get pods --selector=name=psmdb-client -o 'jsonpath={.items[].metadata.name}' +++ mktemp ++ local LAST_OUT=/tmp/tmp.bLHHPDmiOK +++ mktemp ++ local LAST_ERR=/tmp/tmp.42qt3Lvbvu ++ local exit_status=0 ++ local timeout=4 +++ seq 0 2 ++ for i in $(seq 0 2) ++ set +e ++ kubectl get pods --selector=name=psmdb-client -o 'jsonpath={.items[].metadata.name}' ++ exit_status=0 ++ set -e ++ '[' 0 '!=' 0 -a -n 1 ']' ++ break ++ cat /tmp/tmp.bLHHPDmiOK ++ cat /tmp/tmp.42qt3Lvbvu ++ rm /tmp/tmp.bLHHPDmiOK /tmp/tmp.42qt3Lvbvu ++ return 0 + local client_container=psmdb-client-bb8b97679-jlqq8 + kubectl_bin exec psmdb-client-bb8b97679-jlqq8 -- bash -c 'printf '\''use myApp\n db.test.insert({ x: 100501 })\n'\'' | mongo mongodb+srv://myApp:myPass@some-name-rs0.pitr-32420.svc.cluster.local/admin?ssl=false\&replicaSet=rs0 ' ++ mktemp + local LAST_OUT=/tmp/tmp.hUcCUL4Hx5 ++ mktemp + local LAST_ERR=/tmp/tmp.GsYbAWKJIO + local exit_status=0 + local timeout=4 ++ seq 0 2 + for i in $(seq 0 2) + set +e + kubectl exec psmdb-client-bb8b97679-jlqq8 -- bash -c 'printf '\''use myApp\n db.test.insert({ x: 100501 })\n'\'' | mongo mongodb+srv://myApp:myPass@some-name-rs0.pitr-32420.svc.cluster.local/admin?ssl=false\&replicaSet=rs0 ' + exit_status=0 + set -e + '[' 0 '!=' 0 -a -n 1 ']' + break + cat /tmp/tmp.hUcCUL4Hx5 Percona Server for MongoDB shell version v4.4.29-28 connecting to: mongodb://some-name-rs0-1.some-name-rs0.pitr-32420.svc.cluster.local:27017,some-name-rs0-0.some-name-rs0.pitr-32420.svc.cluster.local:27017,some-name-rs0-2.some-name-rs0.pitr-32420.svc.cluster.local:27017/admin?compressors=disabled&gssapiServiceName=mongodb&replicaSet=rs0&ssl=false Implicit session: session { "id" : UUID("5e6ec4c6-4eb8-42e6-a534-d3125da0e2a5") } Percona Server for MongoDB server version: v8.0.19-7 WARNING: shell and server versions do not match switched to db myApp WriteResult({ "nInserted" : 1 }) bye + cat /tmp/tmp.GsYbAWKJIO + rm /tmp/tmp.hUcCUL4Hx5 /tmp/tmp.GsYbAWKJIO + return 0 + '[' -n '' ']' + desc 'waiting for chunks to be uploaded' + set +o xtrace ----------------------------------------------------------------------------------- waiting for chunks to be uploaded ----------------------------------------------------------------------------------- + sleep 120 + desc 'check restore by date' + set +o xtrace ----------------------------------------------------------------------------------- check restore by date ----------------------------------------------------------------------------------- + '[' -z backupSource ']' ++ get_backup_dest backup-minio-2 ++ local backup_name=backup-minio-2 ++ kubectl_bin get psmdb-backup backup-minio-2 -o 'jsonpath={.status.destination}' ++ sed -e 's/.json$//' +++ mktemp ++ sed 's|s3://||' ++ local LAST_OUT=/tmp/tmp.MF3YoZgXVG +++ mktemp ++ sed 's|azure://||' ++ sed 's|gs://||' ++ local LAST_ERR=/tmp/tmp.cGXkWJP9Ma ++ local exit_status=0 ++ local timeout=4 +++ seq 0 2 ++ for i in $(seq 0 2) ++ set +e ++ kubectl get psmdb-backup backup-minio-2 -o 'jsonpath={.status.destination}' ++ exit_status=0 ++ set -e ++ '[' 0 '!=' 0 -a -n 1 ']' ++ break ++ cat /tmp/tmp.MF3YoZgXVG ++ cat /tmp/tmp.cGXkWJP9Ma ++ rm /tmp/tmp.MF3YoZgXVG /tmp/tmp.cGXkWJP9Ma ++ return 0 + backup_dest=operator-testing/pitr-prefix-1/2026-03-10T22:23:44Z + cat /mnt/jenkins/workspace/cloud-psmdb-operator_PR-2282/e2e-tests/pitr/conf/restore.yml + /usr/sbin/sed -e 's/name:/name: restore-backup-minio-2/' + /usr/sbin/sed -e /backupName/d + /usr/sbin/sed -e 's/pitrType:/type: date/' + /usr/sbin/sed -e 's/clusterName:/clusterName: some-name/' + '[' -z '2026-03-10 22:24:13' ']' + /usr/sbin/sed -e 's/date:/date: 2026-03-10 22:24:13/' + /usr/sbin/sed -e 's|BACKUP-NAME|operator-testing/pitr-prefix-1/2026-03-10T22:23:44Z|' + '[' -n '' ']' + yq + kubectl_bin apply -f - ++ mktemp + local LAST_OUT=/tmp/tmp.D2frQZ6GZM ++ mktemp + local LAST_ERR=/tmp/tmp.eewDXgDifM + local exit_status=0 + local timeout=4 ++ seq 0 2 + for i in $(seq 0 2) + set +e + kubectl apply -f - + exit_status=0 + set -e + '[' 0 '!=' 0 -a -n 1 ']' + break + cat /tmp/tmp.D2frQZ6GZM perconaservermongodbrestore.psmdb.percona.com/restore-backup-minio-2 created + cat /tmp/tmp.eewDXgDifM + rm /tmp/tmp.D2frQZ6GZM /tmp/tmp.eewDXgDifM + return 0 + wait_restore backup-minio-2 some-name + local backup_name=backup-minio-2 + local cluster_name=some-name + local target_state=ready + local wait_cluster_consistency=1 + local wait_time=1780 + local ok_if_ready=0 + set +o xtrace Waiting for the psmdb-restore/restore-backup-minio-2 object to be created.OK Waiting psmdb-restore/restore-backup-minio-2 to reach state "ready" .OK after 0 minutes + [[ 1 -eq 1 ]] + wait_cluster_consistency some-name + local cluster_name=some-name + local wait_time=32 + retry=0 + sleep 7 + echo -n 'waiting for cluster readyness' waiting for cluster readyness++ kubectl_bin get psmdb some-name -o 'jsonpath={.status.state}' +++ mktemp ++ local LAST_OUT=/tmp/tmp.sEHwQHAidn +++ mktemp ++ local LAST_ERR=/tmp/tmp.WrgZETk69o ++ local exit_status=0 ++ local timeout=4 +++ seq 0 2 ++ for i in $(seq 0 2) ++ set +e ++ kubectl get psmdb some-name -o 'jsonpath={.status.state}' ++ exit_status=0 ++ set -e ++ '[' 0 '!=' 0 -a -n 1 ']' ++ break ++ cat /tmp/tmp.sEHwQHAidn ++ cat /tmp/tmp.WrgZETk69o ++ rm /tmp/tmp.sEHwQHAidn /tmp/tmp.WrgZETk69o ++ return 0 + [[ ready == \r\e\a\d\y ]] + echo .OK .OK + echo + set -o xtrace + '[' -n '' ']' + compare_mongo_cmd find myApp:myPass@some-name-rs0-0.some-name-rs0.pitr-32420 '' + local command=find + local uri=myApp:myPass@some-name-rs0-0.some-name-rs0.pitr-32420 + local postfix= + local suffix= + local database=myApp + local collection=test + local sort= + local tls=false + local replicaset= + local 'full_command=db.test.find()' + [[ -n '' ]] + log 'running db.test.find() in myApp' + set +o xtrace [2026-03-10T22:27:45+0000] running db.test.find() in myApp + [[ false == \t\r\u\e ]] + mongo_command=run_mongo + run_mongo 'use myApp\n db.test.find()' myApp:myPass@some-name-rs0-0.some-name-rs0.pitr-32420 mongodb '' '' + local 'command=use myApp\n db.test.find()' + local uri=myApp:myPass@some-name-rs0-0.some-name-rs0.pitr-32420 + local driver=mongodb + local suffix=.svc.cluster.local + local mongo_flag= + local replica_set=rs0 + [[ myApp:myPass@some-name-rs0-0.some-name-rs0.pitr-32420 == *cfg* ]] + grep -E -v 'I NETWORK|W NETWORK|F NETWORK|"c":"NETWORK"|Error saving history file|Percona Server for MongoDB|connecting to:|Unable to reach primary for set|Implicit session:|versions do not match|Error saving history file:|Started a new thread for the timer service' + /usr/sbin/sed -re 's/ObjectId\("[0-9a-f]+"\)//; s/-[0-9]+.svc/-xxx.svc/' ++ kubectl_bin get pods --selector=name=psmdb-client -o 'jsonpath={.items[].metadata.name}' +++ mktemp ++ local LAST_OUT=/tmp/tmp.SesqoCX3tA +++ mktemp ++ local LAST_ERR=/tmp/tmp.MzSH6cV9bJ ++ local exit_status=0 ++ local timeout=4 +++ seq 0 2 ++ for i in $(seq 0 2) ++ set +e ++ kubectl get pods --selector=name=psmdb-client -o 'jsonpath={.items[].metadata.name}' ++ exit_status=0 ++ set -e ++ '[' 0 '!=' 0 -a -n 1 ']' ++ break ++ cat /tmp/tmp.SesqoCX3tA ++ cat /tmp/tmp.MzSH6cV9bJ ++ rm /tmp/tmp.SesqoCX3tA /tmp/tmp.MzSH6cV9bJ ++ return 0 + local client_container=psmdb-client-bb8b97679-jlqq8 + kubectl_bin exec psmdb-client-bb8b97679-jlqq8 -- bash -c 'printf '\''use myApp\n db.test.find()\n'\'' | mongo mongodb://myApp:myPass@some-name-rs0-0.some-name-rs0.pitr-32420.svc.cluster.local/admin?ssl=false\&replicaSet=rs0 ' ++ mktemp + local LAST_OUT=/tmp/tmp.YbJhkaojnP ++ mktemp + local LAST_ERR=/tmp/tmp.R8g2CRNpxc + local exit_status=0 + local timeout=4 ++ seq 0 2 + for i in $(seq 0 2) + set +e + kubectl exec psmdb-client-bb8b97679-jlqq8 -- bash -c 'printf '\''use myApp\n db.test.find()\n'\'' | mongo mongodb://myApp:myPass@some-name-rs0-0.some-name-rs0.pitr-32420.svc.cluster.local/admin?ssl=false\&replicaSet=rs0 ' + exit_status=0 + set -e + '[' 0 '!=' 0 -a -n 1 ']' + break + cat /tmp/tmp.YbJhkaojnP + cat /tmp/tmp.R8g2CRNpxc + rm /tmp/tmp.YbJhkaojnP /tmp/tmp.R8g2CRNpxc + return 0 + [[ 0 -eq 0 ]] + diff -u /mnt/jenkins/workspace/cloud-psmdb-operator_PR-2282/e2e-tests/pitr/compare/find.json /tmp/tmp.oRDgtyLOf2/find + compare_mongo_cmd find myApp:myPass@some-name-rs0-1.some-name-rs0.pitr-32420 '' + local command=find + local uri=myApp:myPass@some-name-rs0-1.some-name-rs0.pitr-32420 + local postfix= + local suffix= + local database=myApp + local collection=test + local sort= + local tls=false + local replicaset= + local 'full_command=db.test.find()' + [[ -n '' ]] + log 'running db.test.find() in myApp' + set +o xtrace [2026-03-10T22:27:48+0000] running db.test.find() in myApp + [[ false == \t\r\u\e ]] + mongo_command=run_mongo + run_mongo 'use myApp\n db.test.find()' myApp:myPass@some-name-rs0-1.some-name-rs0.pitr-32420 mongodb '' '' + local 'command=use myApp\n db.test.find()' + grep -E -v 'I NETWORK|W NETWORK|F NETWORK|"c":"NETWORK"|Error saving history file|Percona Server for MongoDB|connecting to:|Unable to reach primary for set|Implicit session:|versions do not match|Error saving history file:|Started a new thread for the timer service' + local uri=myApp:myPass@some-name-rs0-1.some-name-rs0.pitr-32420 + local driver=mongodb + local suffix=.svc.cluster.local + local mongo_flag= + local replica_set=rs0 + [[ myApp:myPass@some-name-rs0-1.some-name-rs0.pitr-32420 == *cfg* ]] + /usr/sbin/sed -re 's/ObjectId\("[0-9a-f]+"\)//; s/-[0-9]+.svc/-xxx.svc/' ++ kubectl_bin get pods --selector=name=psmdb-client -o 'jsonpath={.items[].metadata.name}' +++ mktemp ++ local LAST_OUT=/tmp/tmp.WklGmN8ttd +++ mktemp ++ local LAST_ERR=/tmp/tmp.KlELuWp5F2 ++ local exit_status=0 ++ local timeout=4 +++ seq 0 2 ++ for i in $(seq 0 2) ++ set +e ++ kubectl get pods --selector=name=psmdb-client -o 'jsonpath={.items[].metadata.name}' ++ exit_status=0 ++ set -e ++ '[' 0 '!=' 0 -a -n 1 ']' ++ break ++ cat /tmp/tmp.WklGmN8ttd ++ cat /tmp/tmp.KlELuWp5F2 ++ rm /tmp/tmp.WklGmN8ttd /tmp/tmp.KlELuWp5F2 ++ return 0 + local client_container=psmdb-client-bb8b97679-jlqq8 + kubectl_bin exec psmdb-client-bb8b97679-jlqq8 -- bash -c 'printf '\''use myApp\n db.test.find()\n'\'' | mongo mongodb://myApp:myPass@some-name-rs0-1.some-name-rs0.pitr-32420.svc.cluster.local/admin?ssl=false\&replicaSet=rs0 ' ++ mktemp + local LAST_OUT=/tmp/tmp.toWMy31kJc ++ mktemp + local LAST_ERR=/tmp/tmp.ajC8gLE8CU + local exit_status=0 + local timeout=4 ++ seq 0 2 + for i in $(seq 0 2) + set +e + kubectl exec psmdb-client-bb8b97679-jlqq8 -- bash -c 'printf '\''use myApp\n db.test.find()\n'\'' | mongo mongodb://myApp:myPass@some-name-rs0-1.some-name-rs0.pitr-32420.svc.cluster.local/admin?ssl=false\&replicaSet=rs0 ' + exit_status=0 + set -e + '[' 0 '!=' 0 -a -n 1 ']' + break + cat /tmp/tmp.toWMy31kJc + cat /tmp/tmp.ajC8gLE8CU + rm /tmp/tmp.toWMy31kJc /tmp/tmp.ajC8gLE8CU + return 0 + [[ 0 -eq 0 ]] + diff -u /mnt/jenkins/workspace/cloud-psmdb-operator_PR-2282/e2e-tests/pitr/compare/find.json /tmp/tmp.oRDgtyLOf2/find + compare_mongo_cmd find myApp:myPass@some-name-rs0-2.some-name-rs0.pitr-32420 '' + local command=find + local uri=myApp:myPass@some-name-rs0-2.some-name-rs0.pitr-32420 + local postfix= + local suffix= + local database=myApp + local collection=test + local sort= + local tls=false + local replicaset= + local 'full_command=db.test.find()' + [[ -n '' ]] + log 'running db.test.find() in myApp' + set +o xtrace [2026-03-10T22:27:51+0000] running db.test.find() in myApp + [[ false == \t\r\u\e ]] + mongo_command=run_mongo + run_mongo 'use myApp\n db.test.find()' myApp:myPass@some-name-rs0-2.some-name-rs0.pitr-32420 mongodb '' '' + local 'command=use myApp\n db.test.find()' + local uri=myApp:myPass@some-name-rs0-2.some-name-rs0.pitr-32420 + local driver=mongodb + local suffix=.svc.cluster.local + local mongo_flag= + local replica_set=rs0 + [[ myApp:myPass@some-name-rs0-2.some-name-rs0.pitr-32420 == *cfg* ]] + grep -E -v 'I NETWORK|W NETWORK|F NETWORK|"c":"NETWORK"|Error saving history file|Percona Server for MongoDB|connecting to:|Unable to reach primary for set|Implicit session:|versions do not match|Error saving history file:|Started a new thread for the timer service' + /usr/sbin/sed -re 's/ObjectId\("[0-9a-f]+"\)//; s/-[0-9]+.svc/-xxx.svc/' ++ kubectl_bin get pods --selector=name=psmdb-client -o 'jsonpath={.items[].metadata.name}' +++ mktemp ++ local LAST_OUT=/tmp/tmp.XFyioZJwZb +++ mktemp ++ local LAST_ERR=/tmp/tmp.rJ9VAXZtBj ++ local exit_status=0 ++ local timeout=4 +++ seq 0 2 ++ for i in $(seq 0 2) ++ set +e ++ kubectl get pods --selector=name=psmdb-client -o 'jsonpath={.items[].metadata.name}' ++ exit_status=0 ++ set -e ++ '[' 0 '!=' 0 -a -n 1 ']' ++ break ++ cat /tmp/tmp.XFyioZJwZb ++ cat /tmp/tmp.rJ9VAXZtBj ++ rm /tmp/tmp.XFyioZJwZb /tmp/tmp.rJ9VAXZtBj ++ return 0 + local client_container=psmdb-client-bb8b97679-jlqq8 + kubectl_bin exec psmdb-client-bb8b97679-jlqq8 -- bash -c 'printf '\''use myApp\n db.test.find()\n'\'' | mongo mongodb://myApp:myPass@some-name-rs0-2.some-name-rs0.pitr-32420.svc.cluster.local/admin?ssl=false\&replicaSet=rs0 ' ++ mktemp + local LAST_OUT=/tmp/tmp.sUvgWgTPIR ++ mktemp + local LAST_ERR=/tmp/tmp.3sgivzya8d + local exit_status=0 + local timeout=4 ++ seq 0 2 + for i in $(seq 0 2) + set +e + kubectl exec psmdb-client-bb8b97679-jlqq8 -- bash -c 'printf '\''use myApp\n db.test.find()\n'\'' | mongo mongodb://myApp:myPass@some-name-rs0-2.some-name-rs0.pitr-32420.svc.cluster.local/admin?ssl=false\&replicaSet=rs0 ' + exit_status=0 + set -e + '[' 0 '!=' 0 -a -n 1 ']' + break + cat /tmp/tmp.sUvgWgTPIR + cat /tmp/tmp.3sgivzya8d + rm /tmp/tmp.sUvgWgTPIR /tmp/tmp.3sgivzya8d + return 0 + [[ 0 -eq 0 ]] + diff -u /mnt/jenkins/workspace/cloud-psmdb-operator_PR-2282/e2e-tests/pitr/compare/find.json /tmp/tmp.oRDgtyLOf2/find + run_backup backup-minio 3 + local name=backup-minio + local idx=3 + desc 'run backup backup-minio-3' + set +o xtrace ----------------------------------------------------------------------------------- run backup backup-minio-3 ----------------------------------------------------------------------------------- + cat /mnt/jenkins/workspace/cloud-psmdb-operator_PR-2282/e2e-tests/pitr/conf/backup-minio.yml + /usr/sbin/sed -e 's/name:/name: backup-minio-3/' + kubectl_bin apply -f - ++ mktemp + local LAST_OUT=/tmp/tmp.tqnPFuPnAJ ++ mktemp + local LAST_ERR=/tmp/tmp.tf9d9CtfbE + local exit_status=0 + local timeout=4 ++ seq 0 2 + for i in $(seq 0 2) + set +e + kubectl apply -f - + exit_status=0 + set -e + '[' 0 '!=' 0 -a -n 1 ']' + break + cat /tmp/tmp.tqnPFuPnAJ perconaservermongodbbackup.psmdb.percona.com/backup-minio-3 created + cat /tmp/tmp.tf9d9CtfbE + rm /tmp/tmp.tqnPFuPnAJ /tmp/tmp.tf9d9CtfbE + return 0 + wait_backup backup-minio-3 + local backup_name=backup-minio-3 + local target_state=ready + set +o xtrace waiting for backup-minio-3 to reach ready state....OK + compare_latest_restorable_time some-name-rs0 backup-minio-3 + local cluster=some-name-rs0 + local backup_name=backup-minio-3 + local latest_restorable_time + local backup_time ++ get_latest_restorable_time some-name-rs0 ++ local cluster=some-name-rs0 ++ local first_timestamp ++ local second_timestamp ++ local retry=0 ++ [[ '' != '' ]] +++ kubectl_bin exec some-name-rs0-0 -c backup-agent -- pbm status -o json +++ jq '.backups.pitrChunks.pitrChunks | last | .range.end' ++++ mktemp +++ local LAST_OUT=/tmp/tmp.OxP8mH4BxH ++++ mktemp +++ local LAST_ERR=/tmp/tmp.737PdOzaiu +++ local exit_status=0 +++ local timeout=4 ++++ seq 0 2 +++ for i in $(seq 0 2) +++ set +e +++ kubectl exec some-name-rs0-0 -c backup-agent -- pbm status -o json +++ exit_status=0 +++ set -e +++ '[' 0 '!=' 0 -a -n 1 ']' +++ break +++ cat /tmp/tmp.OxP8mH4BxH +++ cat /tmp/tmp.737PdOzaiu +++ rm /tmp/tmp.OxP8mH4BxH /tmp/tmp.737PdOzaiu +++ return 0 ++ first_timestamp=1773181643 ++ sleep 5 ++ [[ 1773181643 != '' ]] ++ [[ 1773181643 != \n\u\l\l ]] +++ kubectl_bin exec some-name-rs0-0 -c backup-agent -- pbm status -o json +++ jq '.backups.pitrChunks.pitrChunks | last | .range.end' ++++ mktemp +++ local LAST_OUT=/tmp/tmp.sjqwM70Qqr ++++ mktemp +++ local LAST_ERR=/tmp/tmp.c49Z51gwsM +++ local exit_status=0 +++ local timeout=4 ++++ seq 0 2 +++ for i in $(seq 0 2) +++ set +e +++ kubectl exec some-name-rs0-0 -c backup-agent -- pbm status -o json +++ exit_status=0 +++ set -e +++ '[' 0 '!=' 0 -a -n 1 ']' +++ break +++ cat /tmp/tmp.sjqwM70Qqr +++ cat /tmp/tmp.c49Z51gwsM +++ rm /tmp/tmp.sjqwM70Qqr /tmp/tmp.c49Z51gwsM +++ return 0 ++ second_timestamp=1773181643 ++ let retry+=1 ++ [[ 1 -gt 30 ]] ++ [[ 1773181643 != '' ]] ++ [[ 1773181643 != \n\u\l\l ]] ++ [[ 1773181643 == 1773181643 ]] ++ /usr/sbin/date -u -d @1773181643 +%Y-%m-%dT%H:%M:%SZ + latest_restorable_time=2026-03-10T22:27:23Z ++ get_latest_restorable_time_from_backup_object backup-minio-3 ++ local backup_name=backup-minio-3 ++ local latestRestorableTime ++ local retry=0 ++ [[ '' != '' ]] ++ sleep 5 +++ kubectl_bin get psmdb-backup backup-minio-3 -o 'jsonpath={.status.latestRestorableTime}' ++++ mktemp +++ local LAST_OUT=/tmp/tmp.t5Pv431O2T ++++ mktemp +++ local LAST_ERR=/tmp/tmp.sYWdhBVoUh +++ local exit_status=0 +++ local timeout=4 ++++ seq 0 2 +++ for i in $(seq 0 2) +++ set +e +++ kubectl get psmdb-backup backup-minio-3 -o 'jsonpath={.status.latestRestorableTime}' +++ exit_status=0 +++ set -e +++ '[' 0 '!=' 0 -a -n 1 ']' +++ break +++ cat /tmp/tmp.t5Pv431O2T +++ cat /tmp/tmp.sYWdhBVoUh +++ rm /tmp/tmp.t5Pv431O2T /tmp/tmp.sYWdhBVoUh +++ return 0 ++ latestRestorableTime=2026-03-10T22:27:23Z ++ let retry+=1 ++ [[ 1 -gt 30 ]] ++ [[ 2026-03-10T22:27:23Z != '' ]] ++ [[ 2026-03-10T22:27:23Z != \n\u\l\l ]] ++ echo 2026-03-10T22:27:23Z + backup_time=2026-03-10T22:27:23Z + [[ 2026-03-10T22:27:23Z != \2\0\2\6\-\0\3\-\1\0\T\2\2\:\2\7\:\2\3\Z ]] + check_recovery backup-minio-3 latest '' -4th backupSource some-name + local backup_name=backup-minio-3 + local restore_type=latest + local restore_date= + local cmp_postfix=-4th + local backupSource=backupSource + local cluster_name=some-name + local selective_collection= + local restore_name=restore-backup-minio-3 + local restore_file=restore.yml + local cluster + cluster=some-name-rs0 + desc 'write more data before restore by latest' + set +o xtrace ----------------------------------------------------------------------------------- write more data before restore by latest ----------------------------------------------------------------------------------- + sleep 60 + run_mongo 'use myApp\n db.test.insert({ x: 100501 })' myApp:myPass@some-name-rs0.pitr-32420 + local 'command=use myApp\n db.test.insert({ x: 100501 })' + local uri=myApp:myPass@some-name-rs0.pitr-32420 + local driver=mongodb+srv + local suffix=.svc.cluster.local + local mongo_flag= + local replica_set=rs0 + [[ myApp:myPass@some-name-rs0.pitr-32420 == *cfg* ]] ++ kubectl_bin get pods --selector=name=psmdb-client -o 'jsonpath={.items[].metadata.name}' +++ mktemp ++ local LAST_OUT=/tmp/tmp.JSvC5aLqcX +++ mktemp ++ local LAST_ERR=/tmp/tmp.2CF1GLCCUt ++ local exit_status=0 ++ local timeout=4 +++ seq 0 2 ++ for i in $(seq 0 2) ++ set +e ++ kubectl get pods --selector=name=psmdb-client -o 'jsonpath={.items[].metadata.name}' ++ exit_status=0 ++ set -e ++ '[' 0 '!=' 0 -a -n 1 ']' ++ break ++ cat /tmp/tmp.JSvC5aLqcX ++ cat /tmp/tmp.2CF1GLCCUt ++ rm /tmp/tmp.JSvC5aLqcX /tmp/tmp.2CF1GLCCUt ++ return 0 + local client_container=psmdb-client-bb8b97679-jlqq8 + kubectl_bin exec psmdb-client-bb8b97679-jlqq8 -- bash -c 'printf '\''use myApp\n db.test.insert({ x: 100501 })\n'\'' | mongo mongodb+srv://myApp:myPass@some-name-rs0.pitr-32420.svc.cluster.local/admin?ssl=false\&replicaSet=rs0 ' ++ mktemp + local LAST_OUT=/tmp/tmp.sD0HGtCz5n ++ mktemp + local LAST_ERR=/tmp/tmp.sElWcL4szL + local exit_status=0 + local timeout=4 ++ seq 0 2 + for i in $(seq 0 2) + set +e + kubectl exec psmdb-client-bb8b97679-jlqq8 -- bash -c 'printf '\''use myApp\n db.test.insert({ x: 100501 })\n'\'' | mongo mongodb+srv://myApp:myPass@some-name-rs0.pitr-32420.svc.cluster.local/admin?ssl=false\&replicaSet=rs0 ' + exit_status=0 + set -e + '[' 0 '!=' 0 -a -n 1 ']' + break + cat /tmp/tmp.sD0HGtCz5n Percona Server for MongoDB shell version v4.4.29-28 connecting to: mongodb://some-name-rs0-0.some-name-rs0.pitr-32420.svc.cluster.local:27017,some-name-rs0-2.some-name-rs0.pitr-32420.svc.cluster.local:27017,some-name-rs0-1.some-name-rs0.pitr-32420.svc.cluster.local:27017/admin?compressors=disabled&gssapiServiceName=mongodb&replicaSet=rs0&ssl=false Implicit session: session { "id" : UUID("407708ed-67c8-4c68-9e89-1ddbacee66eb") } Percona Server for MongoDB server version: v8.0.19-7 WARNING: shell and server versions do not match switched to db myApp WriteResult({ "nInserted" : 1 }) bye + cat /tmp/tmp.sElWcL4szL + rm /tmp/tmp.sD0HGtCz5n /tmp/tmp.sElWcL4szL + return 0 + '[' -n '' ']' + desc 'waiting for chunks to be uploaded' + set +o xtrace ----------------------------------------------------------------------------------- waiting for chunks to be uploaded ----------------------------------------------------------------------------------- + sleep 120 + desc 'check restore by latest' + set +o xtrace ----------------------------------------------------------------------------------- check restore by latest ----------------------------------------------------------------------------------- + '[' -z backupSource ']' ++ get_backup_dest backup-minio-3 ++ local backup_name=backup-minio-3 ++ kubectl_bin get psmdb-backup backup-minio-3 -o 'jsonpath={.status.destination}' ++ sed -e 's/.json$//' ++ sed 's|s3://||' +++ mktemp ++ local LAST_OUT=/tmp/tmp.74pCEEBQ19 ++ sed 's|azure://||' ++ sed 's|gs://||' +++ mktemp ++ local LAST_ERR=/tmp/tmp.jEzJsLHAPd ++ local exit_status=0 ++ local timeout=4 +++ seq 0 2 ++ for i in $(seq 0 2) ++ set +e ++ kubectl get psmdb-backup backup-minio-3 -o 'jsonpath={.status.destination}' ++ exit_status=0 ++ set -e ++ '[' 0 '!=' 0 -a -n 1 ']' ++ break ++ cat /tmp/tmp.74pCEEBQ19 ++ cat /tmp/tmp.jEzJsLHAPd ++ rm /tmp/tmp.74pCEEBQ19 /tmp/tmp.jEzJsLHAPd ++ return 0 + backup_dest=operator-testing/pitr-prefix-1/2026-03-10T22:27:57Z + cat /mnt/jenkins/workspace/cloud-psmdb-operator_PR-2282/e2e-tests/pitr/conf/restore.yml + /usr/sbin/sed -e 's/name:/name: restore-backup-minio-3/' + /usr/sbin/sed -e /backupName/d + /usr/sbin/sed -e 's|BACKUP-NAME|operator-testing/pitr-prefix-1/2026-03-10T22:27:57Z|' + /usr/sbin/sed -e 's/clusterName:/clusterName: some-name/' + /usr/sbin/sed -e 's/pitrType:/type: latest/' + kubectl_bin apply -f - + '[' -n '' ']' + yq + '[' -z '' ']' + /usr/sbin/sed -e /date:/d ++ mktemp + local LAST_OUT=/tmp/tmp.IiAD4kouTF ++ mktemp + local LAST_ERR=/tmp/tmp.SBCNlkhqvJ + local exit_status=0 + local timeout=4 ++ seq 0 2 + for i in $(seq 0 2) + set +e + kubectl apply -f - + exit_status=0 + set -e + '[' 0 '!=' 0 -a -n 1 ']' + break + cat /tmp/tmp.IiAD4kouTF perconaservermongodbrestore.psmdb.percona.com/restore-backup-minio-3 created + cat /tmp/tmp.SBCNlkhqvJ + rm /tmp/tmp.IiAD4kouTF /tmp/tmp.SBCNlkhqvJ + return 0 + wait_restore backup-minio-3 some-name + local backup_name=backup-minio-3 + local cluster_name=some-name + local target_state=ready + local wait_cluster_consistency=1 + local wait_time=1780 + local ok_if_ready=0 + set +o xtrace Waiting for the psmdb-restore/restore-backup-minio-3 object to be created.OK Waiting psmdb-restore/restore-backup-minio-3 to reach state "ready" .OK after 0 minutes + [[ 1 -eq 1 ]] + wait_cluster_consistency some-name + local cluster_name=some-name + local wait_time=32 + retry=0 + sleep 7 + echo -n 'waiting for cluster readyness' waiting for cluster readyness++ kubectl_bin get psmdb some-name -o 'jsonpath={.status.state}' +++ mktemp ++ local LAST_OUT=/tmp/tmp.Wh9xPfkTWg +++ mktemp ++ local LAST_ERR=/tmp/tmp.WK9wiSs5kK ++ local exit_status=0 ++ local timeout=4 +++ seq 0 2 ++ for i in $(seq 0 2) ++ set +e ++ kubectl get psmdb some-name -o 'jsonpath={.status.state}' ++ exit_status=0 ++ set -e ++ '[' 0 '!=' 0 -a -n 1 ']' ++ break ++ cat /tmp/tmp.Wh9xPfkTWg ++ cat /tmp/tmp.WK9wiSs5kK ++ rm /tmp/tmp.Wh9xPfkTWg /tmp/tmp.WK9wiSs5kK ++ return 0 + [[ ready == \r\e\a\d\y ]] + echo .OK .OK + echo + set -o xtrace + '[' -n '' ']' + compare_mongo_cmd find myApp:myPass@some-name-rs0-0.some-name-rs0.pitr-32420 -4th + local command=find + local uri=myApp:myPass@some-name-rs0-0.some-name-rs0.pitr-32420 + local postfix=-4th + local suffix= + local database=myApp + local collection=test + local sort= + local tls=false + local replicaset= + local 'full_command=db.test.find()' + [[ -n '' ]] + log 'running db.test.find() in myApp' + set +o xtrace [2026-03-10T22:31:55+0000] running db.test.find() in myApp + [[ false == \t\r\u\e ]] + mongo_command=run_mongo + run_mongo 'use myApp\n db.test.find()' myApp:myPass@some-name-rs0-0.some-name-rs0.pitr-32420 mongodb '' '' + grep -E -v 'I NETWORK|W NETWORK|F NETWORK|"c":"NETWORK"|Error saving history file|Percona Server for MongoDB|connecting to:|Unable to reach primary for set|Implicit session:|versions do not match|Error saving history file:|Started a new thread for the timer service' + local 'command=use myApp\n db.test.find()' + local uri=myApp:myPass@some-name-rs0-0.some-name-rs0.pitr-32420 + local driver=mongodb + local suffix=.svc.cluster.local + local mongo_flag= + local replica_set=rs0 + [[ myApp:myPass@some-name-rs0-0.some-name-rs0.pitr-32420 == *cfg* ]] + /usr/sbin/sed -re 's/ObjectId\("[0-9a-f]+"\)//; s/-[0-9]+.svc/-xxx.svc/' ++ kubectl_bin get pods --selector=name=psmdb-client -o 'jsonpath={.items[].metadata.name}' +++ mktemp ++ local LAST_OUT=/tmp/tmp.WoZ3fTLkk7 +++ mktemp ++ local LAST_ERR=/tmp/tmp.yKSgfOl8Hm ++ local exit_status=0 ++ local timeout=4 +++ seq 0 2 ++ for i in $(seq 0 2) ++ set +e ++ kubectl get pods --selector=name=psmdb-client -o 'jsonpath={.items[].metadata.name}' ++ exit_status=0 ++ set -e ++ '[' 0 '!=' 0 -a -n 1 ']' ++ break ++ cat /tmp/tmp.WoZ3fTLkk7 ++ cat /tmp/tmp.yKSgfOl8Hm ++ rm /tmp/tmp.WoZ3fTLkk7 /tmp/tmp.yKSgfOl8Hm ++ return 0 + local client_container=psmdb-client-bb8b97679-jlqq8 + kubectl_bin exec psmdb-client-bb8b97679-jlqq8 -- bash -c 'printf '\''use myApp\n db.test.find()\n'\'' | mongo mongodb://myApp:myPass@some-name-rs0-0.some-name-rs0.pitr-32420.svc.cluster.local/admin?ssl=false\&replicaSet=rs0 ' ++ mktemp + local LAST_OUT=/tmp/tmp.DLcmbvoCnI ++ mktemp + local LAST_ERR=/tmp/tmp.LgiPT99WGk + local exit_status=0 + local timeout=4 ++ seq 0 2 + for i in $(seq 0 2) + set +e + kubectl exec psmdb-client-bb8b97679-jlqq8 -- bash -c 'printf '\''use myApp\n db.test.find()\n'\'' | mongo mongodb://myApp:myPass@some-name-rs0-0.some-name-rs0.pitr-32420.svc.cluster.local/admin?ssl=false\&replicaSet=rs0 ' + exit_status=0 + set -e + '[' 0 '!=' 0 -a -n 1 ']' + break + cat /tmp/tmp.DLcmbvoCnI + cat /tmp/tmp.LgiPT99WGk + rm /tmp/tmp.DLcmbvoCnI /tmp/tmp.LgiPT99WGk + return 0 + [[ 0 -eq 0 ]] + diff -u /mnt/jenkins/workspace/cloud-psmdb-operator_PR-2282/e2e-tests/pitr/compare/find-4th.json /tmp/tmp.oRDgtyLOf2/find-4th + compare_mongo_cmd find myApp:myPass@some-name-rs0-1.some-name-rs0.pitr-32420 -4th + local command=find + local uri=myApp:myPass@some-name-rs0-1.some-name-rs0.pitr-32420 + local postfix=-4th + local suffix= + local database=myApp + local collection=test + local sort= + local tls=false + local replicaset= + local 'full_command=db.test.find()' + [[ -n '' ]] + log 'running db.test.find() in myApp' + set +o xtrace [2026-03-10T22:31:57+0000] running db.test.find() in myApp + [[ false == \t\r\u\e ]] + mongo_command=run_mongo + run_mongo 'use myApp\n db.test.find()' myApp:myPass@some-name-rs0-1.some-name-rs0.pitr-32420 mongodb '' '' + local 'command=use myApp\n db.test.find()' + local uri=myApp:myPass@some-name-rs0-1.some-name-rs0.pitr-32420 + local driver=mongodb + local suffix=.svc.cluster.local + local mongo_flag= + local replica_set=rs0 + [[ myApp:myPass@some-name-rs0-1.some-name-rs0.pitr-32420 == *cfg* ]] + grep -E -v 'I NETWORK|W NETWORK|F NETWORK|"c":"NETWORK"|Error saving history file|Percona Server for MongoDB|connecting to:|Unable to reach primary for set|Implicit session:|versions do not match|Error saving history file:|Started a new thread for the timer service' + /usr/sbin/sed -re 's/ObjectId\("[0-9a-f]+"\)//; s/-[0-9]+.svc/-xxx.svc/' ++ kubectl_bin get pods --selector=name=psmdb-client -o 'jsonpath={.items[].metadata.name}' +++ mktemp ++ local LAST_OUT=/tmp/tmp.vnDArCo0He +++ mktemp ++ local LAST_ERR=/tmp/tmp.aGfTmxq9yD ++ local exit_status=0 ++ local timeout=4 +++ seq 0 2 ++ for i in $(seq 0 2) ++ set +e ++ kubectl get pods --selector=name=psmdb-client -o 'jsonpath={.items[].metadata.name}' ++ exit_status=0 ++ set -e ++ '[' 0 '!=' 0 -a -n 1 ']' ++ break ++ cat /tmp/tmp.vnDArCo0He ++ cat /tmp/tmp.aGfTmxq9yD ++ rm /tmp/tmp.vnDArCo0He /tmp/tmp.aGfTmxq9yD ++ return 0 + local client_container=psmdb-client-bb8b97679-jlqq8 + kubectl_bin exec psmdb-client-bb8b97679-jlqq8 -- bash -c 'printf '\''use myApp\n db.test.find()\n'\'' | mongo mongodb://myApp:myPass@some-name-rs0-1.some-name-rs0.pitr-32420.svc.cluster.local/admin?ssl=false\&replicaSet=rs0 ' ++ mktemp + local LAST_OUT=/tmp/tmp.VsRrNgwkKu ++ mktemp + local LAST_ERR=/tmp/tmp.Eqj8RmAWfr + local exit_status=0 + local timeout=4 ++ seq 0 2 + for i in $(seq 0 2) + set +e + kubectl exec psmdb-client-bb8b97679-jlqq8 -- bash -c 'printf '\''use myApp\n db.test.find()\n'\'' | mongo mongodb://myApp:myPass@some-name-rs0-1.some-name-rs0.pitr-32420.svc.cluster.local/admin?ssl=false\&replicaSet=rs0 ' + exit_status=0 + set -e + '[' 0 '!=' 0 -a -n 1 ']' + break + cat /tmp/tmp.VsRrNgwkKu + cat /tmp/tmp.Eqj8RmAWfr + rm /tmp/tmp.VsRrNgwkKu /tmp/tmp.Eqj8RmAWfr + return 0 + [[ 0 -eq 0 ]] + diff -u /mnt/jenkins/workspace/cloud-psmdb-operator_PR-2282/e2e-tests/pitr/compare/find-4th.json /tmp/tmp.oRDgtyLOf2/find-4th + compare_mongo_cmd find myApp:myPass@some-name-rs0-2.some-name-rs0.pitr-32420 -4th + local command=find + local uri=myApp:myPass@some-name-rs0-2.some-name-rs0.pitr-32420 + local postfix=-4th + local suffix= + local database=myApp + local collection=test + local sort= + local tls=false + local replicaset= + local 'full_command=db.test.find()' + [[ -n '' ]] + log 'running db.test.find() in myApp' + set +o xtrace [2026-03-10T22:32:00+0000] running db.test.find() in myApp + [[ false == \t\r\u\e ]] + mongo_command=run_mongo + run_mongo 'use myApp\n db.test.find()' myApp:myPass@some-name-rs0-2.some-name-rs0.pitr-32420 mongodb '' '' + local 'command=use myApp\n db.test.find()' + local uri=myApp:myPass@some-name-rs0-2.some-name-rs0.pitr-32420 + local driver=mongodb + local suffix=.svc.cluster.local + local mongo_flag= + local replica_set=rs0 + [[ myApp:myPass@some-name-rs0-2.some-name-rs0.pitr-32420 == *cfg* ]] + grep -E -v 'I NETWORK|W NETWORK|F NETWORK|"c":"NETWORK"|Error saving history file|Percona Server for MongoDB|connecting to:|Unable to reach primary for set|Implicit session:|versions do not match|Error saving history file:|Started a new thread for the timer service' + /usr/sbin/sed -re 's/ObjectId\("[0-9a-f]+"\)//; s/-[0-9]+.svc/-xxx.svc/' ++ kubectl_bin get pods --selector=name=psmdb-client -o 'jsonpath={.items[].metadata.name}' +++ mktemp ++ local LAST_OUT=/tmp/tmp.Pa0Y9BySoY +++ mktemp ++ local LAST_ERR=/tmp/tmp.rWiCXYNFYh ++ local exit_status=0 ++ local timeout=4 +++ seq 0 2 ++ for i in $(seq 0 2) ++ set +e ++ kubectl get pods --selector=name=psmdb-client -o 'jsonpath={.items[].metadata.name}' ++ exit_status=0 ++ set -e ++ '[' 0 '!=' 0 -a -n 1 ']' ++ break ++ cat /tmp/tmp.Pa0Y9BySoY ++ cat /tmp/tmp.rWiCXYNFYh ++ rm /tmp/tmp.Pa0Y9BySoY /tmp/tmp.rWiCXYNFYh ++ return 0 + local client_container=psmdb-client-bb8b97679-jlqq8 + kubectl_bin exec psmdb-client-bb8b97679-jlqq8 -- bash -c 'printf '\''use myApp\n db.test.find()\n'\'' | mongo mongodb://myApp:myPass@some-name-rs0-2.some-name-rs0.pitr-32420.svc.cluster.local/admin?ssl=false\&replicaSet=rs0 ' ++ mktemp + local LAST_OUT=/tmp/tmp.1rvUMCy4rN ++ mktemp + local LAST_ERR=/tmp/tmp.eWSLfKf4S4 + local exit_status=0 + local timeout=4 ++ seq 0 2 + for i in $(seq 0 2) + set +e + kubectl exec psmdb-client-bb8b97679-jlqq8 -- bash -c 'printf '\''use myApp\n db.test.find()\n'\'' | mongo mongodb://myApp:myPass@some-name-rs0-2.some-name-rs0.pitr-32420.svc.cluster.local/admin?ssl=false\&replicaSet=rs0 ' + exit_status=0 + set -e + '[' 0 '!=' 0 -a -n 1 ']' + break + cat /tmp/tmp.1rvUMCy4rN + cat /tmp/tmp.eWSLfKf4S4 + rm /tmp/tmp.1rvUMCy4rN /tmp/tmp.eWSLfKf4S4 + return 0 + [[ 0 -eq 0 ]] + diff -u /mnt/jenkins/workspace/cloud-psmdb-operator_PR-2282/e2e-tests/pitr/compare/find-4th.json /tmp/tmp.oRDgtyLOf2/find-4th + desc 'restore pitr using backupSource on second cluster with another prefix in storage' + set +o xtrace ----------------------------------------------------------------------------------- restore pitr using backupSource on second cluster with another prefix in storage ----------------------------------------------------------------------------------- + second_cluster=cluster2-rs0 + apply_cluster /mnt/jenkins/workspace/cloud-psmdb-operator_PR-2282/e2e-tests/pitr/conf/cluster2-rs0.yml + '[' -z '' ']' + cat_config /mnt/jenkins/workspace/cloud-psmdb-operator_PR-2282/e2e-tests/pitr/conf/cluster2-rs0.yml + kubectl_bin apply -f - + cat /mnt/jenkins/workspace/cloud-psmdb-operator_PR-2282/e2e-tests/pitr/conf/cluster2-rs0.yml + yq eval '(.spec | select(.image == null)).image = "docker.io/perconalab/percona-server-mongodb-operator:main-mongod8.0"' ++ mktemp + yq eval '(.spec | select(has("initImage"))).initImage = "docker.io/perconalab/percona-server-mongodb-operator:PR-2282-535b6b53"' + yq eval '(.spec | select(has("backup"))).backup.image = "docker.io/perconalab/percona-server-mongodb-operator:main-backup"' + yq eval '(.spec | select(has("pmm"))).pmm.image = "docker.io/percona/pmm-client:2.44.1-1"' + /usr/sbin/sed -e s/NAME_SPACE/pitr-32420/g + yq eval '.spec.upgradeOptions.apply="Never"' + local LAST_OUT=/tmp/tmp.7p9CdldO8M ++ mktemp + local LAST_ERR=/tmp/tmp.HeZLCF6zZV + local exit_status=0 + local timeout=4 ++ seq 0 2 + for i in $(seq 0 2) + set +e + kubectl apply -f - + exit_status=0 + set -e + '[' 0 '!=' 0 -a -n 1 ']' + break + cat /tmp/tmp.7p9CdldO8M perconaservermongodb.psmdb.percona.com/cluster2 created + cat /tmp/tmp.HeZLCF6zZV + rm /tmp/tmp.7p9CdldO8M /tmp/tmp.HeZLCF6zZV + return 0 + desc 'check if all 3 Pods started' + set +o xtrace ----------------------------------------------------------------------------------- check if all 3 Pods started ----------------------------------------------------------------------------------- + wait_for_running cluster2-rs0 3 + local name=cluster2-rs0 + let last_pod=2 + local check_cluster_readyness=true + set_debug + [[ 1 == 1 ]] + set -o xtrace + local rs_name=rs0 + local cluster_name=cluster2 ++ seq 0 2 + for i in $(seq 0 $last_pod) + [[ 0 -eq 2 ]] + wait_pod cluster2-rs0-0 + local pod=cluster2-rs0-0 + set +o xtrace waiting for pod/cluster2-rs0-0 to be ready.................OK + for i in $(seq 0 $last_pod) + [[ 1 -eq 2 ]] + wait_pod cluster2-rs0-1 + local pod=cluster2-rs0-1 + set +o xtrace waiting for pod/cluster2-rs0-1 to be ready.......OK + for i in $(seq 0 $last_pod) + [[ 2 -eq 2 ]] ++ kubectl_bin get psmdb cluster2 -o 'jsonpath={.spec.replsets[?(@.name=="rs0")].arbiter.enabled}' +++ mktemp ++ local LAST_OUT=/tmp/tmp.3NbFpbIjBL +++ mktemp ++ local LAST_ERR=/tmp/tmp.oPxruG9Dz5 ++ local exit_status=0 ++ local timeout=4 +++ seq 0 2 ++ for i in $(seq 0 2) ++ set +e ++ kubectl get psmdb cluster2 -o 'jsonpath={.spec.replsets[?(@.name=="rs0")].arbiter.enabled}' ++ exit_status=0 ++ set -e ++ '[' 0 '!=' 0 -a -n 1 ']' ++ break ++ cat /tmp/tmp.3NbFpbIjBL ++ cat /tmp/tmp.oPxruG9Dz5 ++ rm /tmp/tmp.3NbFpbIjBL /tmp/tmp.oPxruG9Dz5 ++ return 0 + [[ '' == \t\r\u\e ]] + wait_pod cluster2-rs0-2 + local pod=cluster2-rs0-2 + set +o xtrace waiting for pod/cluster2-rs0-2 to be ready.....OK ++ kubectl_bin get psmdb cluster2 -o 'jsonpath={.spec.replsets[?(@.name=="rs0")].nonvoting.enabled}' +++ mktemp ++ local LAST_OUT=/tmp/tmp.2zz1IJwGQK +++ mktemp ++ local LAST_ERR=/tmp/tmp.fdBg53SBxH ++ local exit_status=0 ++ local timeout=4 +++ seq 0 2 ++ for i in $(seq 0 2) ++ set +e ++ kubectl get psmdb cluster2 -o 'jsonpath={.spec.replsets[?(@.name=="rs0")].nonvoting.enabled}' ++ exit_status=0 ++ set -e ++ '[' 0 '!=' 0 -a -n 1 ']' ++ break ++ cat /tmp/tmp.2zz1IJwGQK ++ cat /tmp/tmp.fdBg53SBxH ++ rm /tmp/tmp.2zz1IJwGQK /tmp/tmp.fdBg53SBxH ++ return 0 + [[ '' == \t\r\u\e ]] ++ kubectl_bin get psmdb cluster2 -o 'jsonpath={.spec.replsets[?(@.name=="rs0")].hidden.enabled}' +++ mktemp ++ local LAST_OUT=/tmp/tmp.loCfjd6lzq +++ mktemp ++ local LAST_ERR=/tmp/tmp.Ed7hmONR5B ++ local exit_status=0 ++ local timeout=4 +++ seq 0 2 ++ for i in $(seq 0 2) ++ set +e ++ kubectl get psmdb cluster2 -o 'jsonpath={.spec.replsets[?(@.name=="rs0")].hidden.enabled}' ++ exit_status=0 ++ set -e ++ '[' 0 '!=' 0 -a -n 1 ']' ++ break ++ cat /tmp/tmp.loCfjd6lzq ++ cat /tmp/tmp.Ed7hmONR5B ++ rm /tmp/tmp.loCfjd6lzq /tmp/tmp.Ed7hmONR5B ++ return 0 + [[ '' == \t\r\u\e ]] + sleep 10 + [[ true == \t\r\u\e ]] + set +x Waiting for cluster readyness.. + wait_backup_agent cluster2-rs0-0 + local agent_pod=cluster2-rs0-0 + set +o xtrace waiting for pbm-agent to be ready in cluster2-rs0-0...2026-03-10T22:33:33.000+0000 I listening for the commands + wait_backup_agent cluster2-rs0-1 + local agent_pod=cluster2-rs0-1 + set +o xtrace waiting for pbm-agent to be ready in cluster2-rs0-1...2026-03-10T22:33:44.000+0000 I listening for the commands + wait_backup_agent cluster2-rs0-2 + local agent_pod=cluster2-rs0-2 + set +o xtrace waiting for pbm-agent to be ready in cluster2-rs0-2...2026-03-10T22:33:47.000+0000 I listening for the commands + desc 'create user myApp' + set +o xtrace ----------------------------------------------------------------------------------- create user myApp ----------------------------------------------------------------------------------- + run_mongo 'db.createUser({user:"myApp",pwd:"myPass",roles:[{db:"myApp",role:"readWrite"}]})' userAdmin:userAdmin123456@cluster2-rs0.pitr-32420 + local 'command=db.createUser({user:"myApp",pwd:"myPass",roles:[{db:"myApp",role:"readWrite"}]})' + local uri=userAdmin:userAdmin123456@cluster2-rs0.pitr-32420 + local driver=mongodb+srv + local suffix=.svc.cluster.local + local mongo_flag= + local replica_set=rs0 + [[ userAdmin:userAdmin123456@cluster2-rs0.pitr-32420 == *cfg* ]] ++ kubectl_bin get pods --selector=name=psmdb-client -o 'jsonpath={.items[].metadata.name}' +++ mktemp ++ local LAST_OUT=/tmp/tmp.nOTqOfegKl +++ mktemp ++ local LAST_ERR=/tmp/tmp.01m5ICOOMW ++ local exit_status=0 ++ local timeout=4 +++ seq 0 2 ++ for i in $(seq 0 2) ++ set +e ++ kubectl get pods --selector=name=psmdb-client -o 'jsonpath={.items[].metadata.name}' ++ exit_status=0 ++ set -e ++ '[' 0 '!=' 0 -a -n 1 ']' ++ break ++ cat /tmp/tmp.nOTqOfegKl ++ cat /tmp/tmp.01m5ICOOMW ++ rm /tmp/tmp.nOTqOfegKl /tmp/tmp.01m5ICOOMW ++ return 0 + local client_container=psmdb-client-bb8b97679-jlqq8 + kubectl_bin exec psmdb-client-bb8b97679-jlqq8 -- bash -c 'printf '\''db.createUser({user:"myApp",pwd:"myPass",roles:[{db:"myApp",role:"readWrite"}]})\n'\'' | mongo mongodb+srv://userAdmin:userAdmin123456@cluster2-rs0.pitr-32420.svc.cluster.local/admin?ssl=false\&replicaSet=rs0 ' ++ mktemp + local LAST_OUT=/tmp/tmp.rRCtsgBD7s ++ mktemp + local LAST_ERR=/tmp/tmp.7LdcHVgw5h + local exit_status=0 + local timeout=4 ++ seq 0 2 + for i in $(seq 0 2) + set +e + kubectl exec psmdb-client-bb8b97679-jlqq8 -- bash -c 'printf '\''db.createUser({user:"myApp",pwd:"myPass",roles:[{db:"myApp",role:"readWrite"}]})\n'\'' | mongo mongodb+srv://userAdmin:userAdmin123456@cluster2-rs0.pitr-32420.svc.cluster.local/admin?ssl=false\&replicaSet=rs0 ' + exit_status=0 + set -e + '[' 0 '!=' 0 -a -n 1 ']' + break + cat /tmp/tmp.rRCtsgBD7s Percona Server for MongoDB shell version v4.4.29-28 connecting to: mongodb://cluster2-rs0-1.cluster2-rs0.pitr-32420.svc.cluster.local:27017,cluster2-rs0-2.cluster2-rs0.pitr-32420.svc.cluster.local:27017,cluster2-rs0-0.cluster2-rs0.pitr-32420.svc.cluster.local:27017/admin?compressors=disabled&gssapiServiceName=mongodb&replicaSet=rs0&ssl=false Implicit session: session { "id" : UUID("0fb2741e-4dcc-49ff-802b-2c999b76f51f") } Percona Server for MongoDB server version: v8.0.19-7 WARNING: shell and server versions do not match Successfully added user: { "user" : "myApp", "roles" : [ { "db" : "myApp", "role" : "readWrite" } ] } bye + cat /tmp/tmp.7LdcHVgw5h + rm /tmp/tmp.rRCtsgBD7s /tmp/tmp.7LdcHVgw5h + return 0 + sleep 2 + check_recovery backup-minio-3 latest '' -4th backupSource cluster2 '' backup-minio-3-second-cluster restore2.yml + local backup_name=backup-minio-3 + local restore_type=latest + local restore_date= + local cmp_postfix=-4th + local backupSource=backupSource + local cluster_name=cluster2 + local selective_collection= + local restore_name=restore-backup-minio-3-second-cluster + local restore_file=restore2.yml + local cluster + cluster=cluster2-rs0 + desc 'write more data before restore by latest' + set +o xtrace ----------------------------------------------------------------------------------- write more data before restore by latest ----------------------------------------------------------------------------------- + sleep 60 + run_mongo 'use myApp\n db.test.insert({ x: 100501 })' myApp:myPass@cluster2-rs0.pitr-32420 + local 'command=use myApp\n db.test.insert({ x: 100501 })' + local uri=myApp:myPass@cluster2-rs0.pitr-32420 + local driver=mongodb+srv + local suffix=.svc.cluster.local + local mongo_flag= + local replica_set=rs0 + [[ myApp:myPass@cluster2-rs0.pitr-32420 == *cfg* ]] ++ kubectl_bin get pods --selector=name=psmdb-client -o 'jsonpath={.items[].metadata.name}' +++ mktemp ++ local LAST_OUT=/tmp/tmp.SaTj5X11dJ +++ mktemp ++ local LAST_ERR=/tmp/tmp.nu2pztUsao ++ local exit_status=0 ++ local timeout=4 +++ seq 0 2 ++ for i in $(seq 0 2) ++ set +e ++ kubectl get pods --selector=name=psmdb-client -o 'jsonpath={.items[].metadata.name}' ++ exit_status=0 ++ set -e ++ '[' 0 '!=' 0 -a -n 1 ']' ++ break ++ cat /tmp/tmp.SaTj5X11dJ ++ cat /tmp/tmp.nu2pztUsao ++ rm /tmp/tmp.SaTj5X11dJ /tmp/tmp.nu2pztUsao ++ return 0 + local client_container=psmdb-client-bb8b97679-jlqq8 + kubectl_bin exec psmdb-client-bb8b97679-jlqq8 -- bash -c 'printf '\''use myApp\n db.test.insert({ x: 100501 })\n'\'' | mongo mongodb+srv://myApp:myPass@cluster2-rs0.pitr-32420.svc.cluster.local/admin?ssl=false\&replicaSet=rs0 ' ++ mktemp + local LAST_OUT=/tmp/tmp.DP3oibym6S ++ mktemp + local LAST_ERR=/tmp/tmp.EztUtd4vZa + local exit_status=0 + local timeout=4 ++ seq 0 2 + for i in $(seq 0 2) + set +e + kubectl exec psmdb-client-bb8b97679-jlqq8 -- bash -c 'printf '\''use myApp\n db.test.insert({ x: 100501 })\n'\'' | mongo mongodb+srv://myApp:myPass@cluster2-rs0.pitr-32420.svc.cluster.local/admin?ssl=false\&replicaSet=rs0 ' + exit_status=0 + set -e + '[' 0 '!=' 0 -a -n 1 ']' + break + cat /tmp/tmp.DP3oibym6S Percona Server for MongoDB shell version v4.4.29-28 connecting to: mongodb://cluster2-rs0-1.cluster2-rs0.pitr-32420.svc.cluster.local:27017,cluster2-rs0-2.cluster2-rs0.pitr-32420.svc.cluster.local:27017,cluster2-rs0-0.cluster2-rs0.pitr-32420.svc.cluster.local:27017/admin?compressors=disabled&gssapiServiceName=mongodb&replicaSet=rs0&ssl=false Implicit session: session { "id" : UUID("ded1d4e1-8b86-478a-8da8-78f36859ea3e") } Percona Server for MongoDB server version: v8.0.19-7 WARNING: shell and server versions do not match switched to db myApp WriteResult({ "nInserted" : 1 }) bye + cat /tmp/tmp.EztUtd4vZa + rm /tmp/tmp.DP3oibym6S /tmp/tmp.EztUtd4vZa + return 0 + '[' -n '' ']' + desc 'waiting for chunks to be uploaded' + set +o xtrace ----------------------------------------------------------------------------------- waiting for chunks to be uploaded ----------------------------------------------------------------------------------- + sleep 120 + desc 'check restore by latest' + set +o xtrace ----------------------------------------------------------------------------------- check restore by latest ----------------------------------------------------------------------------------- + '[' -z backupSource ']' ++ get_backup_dest backup-minio-3 ++ local backup_name=backup-minio-3 ++ kubectl_bin get psmdb-backup backup-minio-3 -o 'jsonpath={.status.destination}' ++ sed -e 's/.json$//' ++ sed 's|s3://||' +++ mktemp ++ sed 's|azure://||' ++ sed 's|gs://||' ++ local LAST_OUT=/tmp/tmp.RdJQMErQei +++ mktemp ++ local LAST_ERR=/tmp/tmp.2ZFq34E27A ++ local exit_status=0 ++ local timeout=4 +++ seq 0 2 ++ for i in $(seq 0 2) ++ set +e ++ kubectl get psmdb-backup backup-minio-3 -o 'jsonpath={.status.destination}' ++ exit_status=0 ++ set -e ++ '[' 0 '!=' 0 -a -n 1 ']' ++ break ++ cat /tmp/tmp.RdJQMErQei ++ cat /tmp/tmp.2ZFq34E27A ++ rm /tmp/tmp.RdJQMErQei /tmp/tmp.2ZFq34E27A ++ return 0 + backup_dest=operator-testing/pitr-prefix-1/2026-03-10T22:27:57Z + cat /mnt/jenkins/workspace/cloud-psmdb-operator_PR-2282/e2e-tests/pitr/conf/restore2.yml + /usr/sbin/sed -e 's/name:/name: restore-backup-minio-3-second-cluster/' + /usr/sbin/sed -e /backupName/d + /usr/sbin/sed -e 's/clusterName:/clusterName: cluster2/' + /usr/sbin/sed -e 's/pitrType:/type: latest/' + '[' -z '' ']' + /usr/sbin/sed -e /date:/d + kubectl_bin apply -f - + /usr/sbin/sed -e 's|BACKUP-NAME|operator-testing/pitr-prefix-1/2026-03-10T22:27:57Z|' ++ mktemp + '[' -n '' ']' + yq + local LAST_OUT=/tmp/tmp.r3sQaibpBj ++ mktemp + local LAST_ERR=/tmp/tmp.tFTQWXMM1p + local exit_status=0 + local timeout=4 ++ seq 0 2 + for i in $(seq 0 2) + set +e + kubectl apply -f - + exit_status=0 + set -e + '[' 0 '!=' 0 -a -n 1 ']' + break + cat /tmp/tmp.r3sQaibpBj perconaservermongodbrestore.psmdb.percona.com/restore-backup-minio-3-second-cluster created + cat /tmp/tmp.tFTQWXMM1p + rm /tmp/tmp.r3sQaibpBj /tmp/tmp.tFTQWXMM1p + return 0 + wait_restore backup-minio-3-second-cluster cluster2 + local backup_name=backup-minio-3-second-cluster + local cluster_name=cluster2 + local target_state=ready + local wait_cluster_consistency=1 + local wait_time=1780 + local ok_if_ready=0 + set +o xtrace Waiting for the psmdb-restore/restore-backup-minio-3-second-cluster object to be created.OK Waiting psmdb-restore/restore-backup-minio-3-second-cluster to reach state "ready" .OK after 0 minutes + [[ 1 -eq 1 ]] + wait_cluster_consistency cluster2 + local cluster_name=cluster2 + local wait_time=32 + retry=0 + sleep 7 + echo -n 'waiting for cluster readyness' waiting for cluster readyness++ kubectl_bin get psmdb cluster2 -o 'jsonpath={.status.state}' +++ mktemp ++ local LAST_OUT=/tmp/tmp.dfsLiy3MjZ +++ mktemp ++ local LAST_ERR=/tmp/tmp.fQop0EKdzK ++ local exit_status=0 ++ local timeout=4 +++ seq 0 2 ++ for i in $(seq 0 2) ++ set +e ++ kubectl get psmdb cluster2 -o 'jsonpath={.status.state}' ++ exit_status=0 ++ set -e ++ '[' 0 '!=' 0 -a -n 1 ']' ++ break ++ cat /tmp/tmp.dfsLiy3MjZ ++ cat /tmp/tmp.fQop0EKdzK ++ rm /tmp/tmp.dfsLiy3MjZ /tmp/tmp.fQop0EKdzK ++ return 0 + [[ ready == \r\e\a\d\y ]] + echo .OK .OK + echo + set -o xtrace + '[' -n '' ']' + compare_mongo_cmd find myApp:myPass@cluster2-rs0-0.cluster2-rs0.pitr-32420 -4th + local command=find + local uri=myApp:myPass@cluster2-rs0-0.cluster2-rs0.pitr-32420 + local postfix=-4th + local suffix= + local database=myApp + local collection=test + local sort= + local tls=false + local replicaset= + local 'full_command=db.test.find()' + [[ -n '' ]] + log 'running db.test.find() in myApp' + set +o xtrace [2026-03-10T22:37:28+0000] running db.test.find() in myApp + [[ false == \t\r\u\e ]] + mongo_command=run_mongo + run_mongo 'use myApp\n db.test.find()' myApp:myPass@cluster2-rs0-0.cluster2-rs0.pitr-32420 mongodb '' '' + local 'command=use myApp\n db.test.find()' + local uri=myApp:myPass@cluster2-rs0-0.cluster2-rs0.pitr-32420 + local driver=mongodb + local suffix=.svc.cluster.local + local mongo_flag= + local replica_set=rs0 + [[ myApp:myPass@cluster2-rs0-0.cluster2-rs0.pitr-32420 == *cfg* ]] + grep -E -v 'I NETWORK|W NETWORK|F NETWORK|"c":"NETWORK"|Error saving history file|Percona Server for MongoDB|connecting to:|Unable to reach primary for set|Implicit session:|versions do not match|Error saving history file:|Started a new thread for the timer service' + /usr/sbin/sed -re 's/ObjectId\("[0-9a-f]+"\)//; s/-[0-9]+.svc/-xxx.svc/' ++ kubectl_bin get pods --selector=name=psmdb-client -o 'jsonpath={.items[].metadata.name}' +++ mktemp ++ local LAST_OUT=/tmp/tmp.nW7wCPkRH2 +++ mktemp ++ local LAST_ERR=/tmp/tmp.k5rlzaNcOs ++ local exit_status=0 ++ local timeout=4 +++ seq 0 2 ++ for i in $(seq 0 2) ++ set +e ++ kubectl get pods --selector=name=psmdb-client -o 'jsonpath={.items[].metadata.name}' ++ exit_status=0 ++ set -e ++ '[' 0 '!=' 0 -a -n 1 ']' ++ break ++ cat /tmp/tmp.nW7wCPkRH2 ++ cat /tmp/tmp.k5rlzaNcOs ++ rm /tmp/tmp.nW7wCPkRH2 /tmp/tmp.k5rlzaNcOs ++ return 0 + local client_container=psmdb-client-bb8b97679-jlqq8 + kubectl_bin exec psmdb-client-bb8b97679-jlqq8 -- bash -c 'printf '\''use myApp\n db.test.find()\n'\'' | mongo mongodb://myApp:myPass@cluster2-rs0-0.cluster2-rs0.pitr-32420.svc.cluster.local/admin?ssl=false\&replicaSet=rs0 ' ++ mktemp + local LAST_OUT=/tmp/tmp.JfowskY7Ih ++ mktemp + local LAST_ERR=/tmp/tmp.0TKUb2CCZf + local exit_status=0 + local timeout=4 ++ seq 0 2 + for i in $(seq 0 2) + set +e + kubectl exec psmdb-client-bb8b97679-jlqq8 -- bash -c 'printf '\''use myApp\n db.test.find()\n'\'' | mongo mongodb://myApp:myPass@cluster2-rs0-0.cluster2-rs0.pitr-32420.svc.cluster.local/admin?ssl=false\&replicaSet=rs0 ' + exit_status=0 + set -e + '[' 0 '!=' 0 -a -n 1 ']' + break + cat /tmp/tmp.JfowskY7Ih + cat /tmp/tmp.0TKUb2CCZf + rm /tmp/tmp.JfowskY7Ih /tmp/tmp.0TKUb2CCZf + return 0 + [[ 0 -eq 0 ]] + diff -u /mnt/jenkins/workspace/cloud-psmdb-operator_PR-2282/e2e-tests/pitr/compare/find-4th.json /tmp/tmp.oRDgtyLOf2/find-4th + compare_mongo_cmd find myApp:myPass@cluster2-rs0-1.cluster2-rs0.pitr-32420 -4th + local command=find + local uri=myApp:myPass@cluster2-rs0-1.cluster2-rs0.pitr-32420 + local postfix=-4th + local suffix= + local database=myApp + local collection=test + local sort= + local tls=false + local replicaset= + local 'full_command=db.test.find()' + [[ -n '' ]] + log 'running db.test.find() in myApp' + set +o xtrace [2026-03-10T22:37:31+0000] running db.test.find() in myApp + [[ false == \t\r\u\e ]] + mongo_command=run_mongo + run_mongo 'use myApp\n db.test.find()' myApp:myPass@cluster2-rs0-1.cluster2-rs0.pitr-32420 mongodb '' '' + grep -E -v 'I NETWORK|W NETWORK|F NETWORK|"c":"NETWORK"|Error saving history file|Percona Server for MongoDB|connecting to:|Unable to reach primary for set|Implicit session:|versions do not match|Error saving history file:|Started a new thread for the timer service' + /usr/sbin/sed -re 's/ObjectId\("[0-9a-f]+"\)//; s/-[0-9]+.svc/-xxx.svc/' + local 'command=use myApp\n db.test.find()' + local uri=myApp:myPass@cluster2-rs0-1.cluster2-rs0.pitr-32420 + local driver=mongodb + local suffix=.svc.cluster.local + local mongo_flag= + local replica_set=rs0 + [[ myApp:myPass@cluster2-rs0-1.cluster2-rs0.pitr-32420 == *cfg* ]] ++ kubectl_bin get pods --selector=name=psmdb-client -o 'jsonpath={.items[].metadata.name}' +++ mktemp ++ local LAST_OUT=/tmp/tmp.99EbFa5lHw +++ mktemp ++ local LAST_ERR=/tmp/tmp.o4xHWASvIH ++ local exit_status=0 ++ local timeout=4 +++ seq 0 2 ++ for i in $(seq 0 2) ++ set +e ++ kubectl get pods --selector=name=psmdb-client -o 'jsonpath={.items[].metadata.name}' ++ exit_status=0 ++ set -e ++ '[' 0 '!=' 0 -a -n 1 ']' ++ break ++ cat /tmp/tmp.99EbFa5lHw ++ cat /tmp/tmp.o4xHWASvIH ++ rm /tmp/tmp.99EbFa5lHw /tmp/tmp.o4xHWASvIH ++ return 0 + local client_container=psmdb-client-bb8b97679-jlqq8 + kubectl_bin exec psmdb-client-bb8b97679-jlqq8 -- bash -c 'printf '\''use myApp\n db.test.find()\n'\'' | mongo mongodb://myApp:myPass@cluster2-rs0-1.cluster2-rs0.pitr-32420.svc.cluster.local/admin?ssl=false\&replicaSet=rs0 ' ++ mktemp + local LAST_OUT=/tmp/tmp.4XBVGHbQwa ++ mktemp + local LAST_ERR=/tmp/tmp.o6OI4MYEJX + local exit_status=0 + local timeout=4 ++ seq 0 2 + for i in $(seq 0 2) + set +e + kubectl exec psmdb-client-bb8b97679-jlqq8 -- bash -c 'printf '\''use myApp\n db.test.find()\n'\'' | mongo mongodb://myApp:myPass@cluster2-rs0-1.cluster2-rs0.pitr-32420.svc.cluster.local/admin?ssl=false\&replicaSet=rs0 ' + exit_status=0 + set -e + '[' 0 '!=' 0 -a -n 1 ']' + break + cat /tmp/tmp.4XBVGHbQwa + cat /tmp/tmp.o6OI4MYEJX + rm /tmp/tmp.4XBVGHbQwa /tmp/tmp.o6OI4MYEJX + return 0 + [[ 0 -eq 0 ]] + diff -u /mnt/jenkins/workspace/cloud-psmdb-operator_PR-2282/e2e-tests/pitr/compare/find-4th.json /tmp/tmp.oRDgtyLOf2/find-4th + compare_mongo_cmd find myApp:myPass@cluster2-rs0-2.cluster2-rs0.pitr-32420 -4th + local command=find + local uri=myApp:myPass@cluster2-rs0-2.cluster2-rs0.pitr-32420 + local postfix=-4th + local suffix= + local database=myApp + local collection=test + local sort= + local tls=false + local replicaset= + local 'full_command=db.test.find()' + [[ -n '' ]] + log 'running db.test.find() in myApp' + set +o xtrace [2026-03-10T22:37:33+0000] running db.test.find() in myApp + [[ false == \t\r\u\e ]] + mongo_command=run_mongo + run_mongo 'use myApp\n db.test.find()' myApp:myPass@cluster2-rs0-2.cluster2-rs0.pitr-32420 mongodb '' '' + local 'command=use myApp\n db.test.find()' + local uri=myApp:myPass@cluster2-rs0-2.cluster2-rs0.pitr-32420 + local driver=mongodb + local suffix=.svc.cluster.local + local mongo_flag= + local replica_set=rs0 + [[ myApp:myPass@cluster2-rs0-2.cluster2-rs0.pitr-32420 == *cfg* ]] + grep -E -v 'I NETWORK|W NETWORK|F NETWORK|"c":"NETWORK"|Error saving history file|Percona Server for MongoDB|connecting to:|Unable to reach primary for set|Implicit session:|versions do not match|Error saving history file:|Started a new thread for the timer service' + /usr/sbin/sed -re 's/ObjectId\("[0-9a-f]+"\)//; s/-[0-9]+.svc/-xxx.svc/' ++ kubectl_bin get pods --selector=name=psmdb-client -o 'jsonpath={.items[].metadata.name}' +++ mktemp ++ local LAST_OUT=/tmp/tmp.C99OHqVVir +++ mktemp ++ local LAST_ERR=/tmp/tmp.lFIwy2ve1H ++ local exit_status=0 ++ local timeout=4 +++ seq 0 2 ++ for i in $(seq 0 2) ++ set +e ++ kubectl get pods --selector=name=psmdb-client -o 'jsonpath={.items[].metadata.name}' ++ exit_status=0 ++ set -e ++ '[' 0 '!=' 0 -a -n 1 ']' ++ break ++ cat /tmp/tmp.C99OHqVVir ++ cat /tmp/tmp.lFIwy2ve1H ++ rm /tmp/tmp.C99OHqVVir /tmp/tmp.lFIwy2ve1H ++ return 0 + local client_container=psmdb-client-bb8b97679-jlqq8 + kubectl_bin exec psmdb-client-bb8b97679-jlqq8 -- bash -c 'printf '\''use myApp\n db.test.find()\n'\'' | mongo mongodb://myApp:myPass@cluster2-rs0-2.cluster2-rs0.pitr-32420.svc.cluster.local/admin?ssl=false\&replicaSet=rs0 ' ++ mktemp + local LAST_OUT=/tmp/tmp.7PMWea3OJc ++ mktemp + local LAST_ERR=/tmp/tmp.w5zNS9oVko + local exit_status=0 + local timeout=4 ++ seq 0 2 + for i in $(seq 0 2) + set +e + kubectl exec psmdb-client-bb8b97679-jlqq8 -- bash -c 'printf '\''use myApp\n db.test.find()\n'\'' | mongo mongodb://myApp:myPass@cluster2-rs0-2.cluster2-rs0.pitr-32420.svc.cluster.local/admin?ssl=false\&replicaSet=rs0 ' + exit_status=0 + set -e + '[' 0 '!=' 0 -a -n 1 ']' + break + cat /tmp/tmp.7PMWea3OJc + cat /tmp/tmp.w5zNS9oVko + rm /tmp/tmp.7PMWea3OJc /tmp/tmp.w5zNS9oVko + return 0 + [[ 0 -eq 0 ]] + diff -u /mnt/jenkins/workspace/cloud-psmdb-operator_PR-2282/e2e-tests/pitr/compare/find-4th.json /tmp/tmp.oRDgtyLOf2/find-4th + backup_name_minio_2=backup-minio2 + run_backup backup-minio2 0 + local name=backup-minio2 + local idx=0 + desc 'run backup backup-minio2-0' + set +o xtrace ----------------------------------------------------------------------------------- run backup backup-minio2-0 ----------------------------------------------------------------------------------- + cat /mnt/jenkins/workspace/cloud-psmdb-operator_PR-2282/e2e-tests/pitr/conf/backup-minio2.yml + /usr/sbin/sed -e 's/name:/name: backup-minio2-0/' + kubectl_bin apply -f - ++ mktemp + local LAST_OUT=/tmp/tmp.zRgeHHi87s ++ mktemp + local LAST_ERR=/tmp/tmp.qcZFzN6bv6 + local exit_status=0 + local timeout=4 ++ seq 0 2 + for i in $(seq 0 2) + set +e + kubectl apply -f - + exit_status=0 + set -e + '[' 0 '!=' 0 -a -n 1 ']' + break + cat /tmp/tmp.zRgeHHi87s perconaservermongodbbackup.psmdb.percona.com/backup-minio2-0 created + cat /tmp/tmp.qcZFzN6bv6 + rm /tmp/tmp.zRgeHHi87s /tmp/tmp.qcZFzN6bv6 + return 0 + wait_backup backup-minio2-0 + local backup_name=backup-minio2-0 + local target_state=ready + set +o xtrace waiting for backup-minio2-0 to reach ready state.......OK ++ run_mongo 'new Date().toISOString()' myApp:myPass@cluster2-rs0.pitr-32420 mongodb '' --quiet ++ local 'command=new Date().toISOString()' ++ local uri=myApp:myPass@cluster2-rs0.pitr-32420 ++ local driver=mongodb ++ local suffix=.svc.cluster.local ++ local mongo_flag=--quiet ++ local replica_set=rs0 ++ grep -E -v 'I NETWORK|W NETWORK|Error saving history file|Percona Server for MongoDB|connecting to:|Unable to reach primary for set|Implicit session:|versions do not match' ++ [[ myApp:myPass@cluster2-rs0.pitr-32420 == *cfg* ]] ++ cut -c1-19 ++ tr T ' ' +++ kubectl_bin get pods --selector=name=psmdb-client -o 'jsonpath={.items[].metadata.name}' ++++ mktemp +++ local LAST_OUT=/tmp/tmp.yLOsVoyauH ++++ mktemp +++ local LAST_ERR=/tmp/tmp.cwIqkTDSA9 +++ local exit_status=0 +++ local timeout=4 ++++ seq 0 2 +++ for i in $(seq 0 2) +++ set +e +++ kubectl get pods --selector=name=psmdb-client -o 'jsonpath={.items[].metadata.name}' +++ exit_status=0 +++ set -e +++ '[' 0 '!=' 0 -a -n 1 ']' +++ break +++ cat /tmp/tmp.yLOsVoyauH +++ cat /tmp/tmp.cwIqkTDSA9 +++ rm /tmp/tmp.yLOsVoyauH /tmp/tmp.cwIqkTDSA9 +++ return 0 ++ local client_container=psmdb-client-bb8b97679-jlqq8 ++ kubectl_bin exec psmdb-client-bb8b97679-jlqq8 -- bash -c 'printf '\''new Date().toISOString()\n'\'' | mongo mongodb://myApp:myPass@cluster2-rs0.pitr-32420.svc.cluster.local/admin?ssl=false\&replicaSet=rs0 --quiet' +++ mktemp ++ local LAST_OUT=/tmp/tmp.FDGRAC0ZSO +++ mktemp ++ local LAST_ERR=/tmp/tmp.15FIAyLFhU ++ local exit_status=0 ++ local timeout=4 +++ seq 0 2 ++ for i in $(seq 0 2) ++ set +e ++ kubectl exec psmdb-client-bb8b97679-jlqq8 -- bash -c 'printf '\''new Date().toISOString()\n'\'' | mongo mongodb://myApp:myPass@cluster2-rs0.pitr-32420.svc.cluster.local/admin?ssl=false\&replicaSet=rs0 --quiet' ++ exit_status=0 ++ set -e ++ '[' 0 '!=' 0 -a -n 1 ']' ++ break ++ cat /tmp/tmp.FDGRAC0ZSO ++ cat /tmp/tmp.15FIAyLFhU ++ rm /tmp/tmp.FDGRAC0ZSO /tmp/tmp.15FIAyLFhU ++ return 0 + time_now='2026-03-10 22:37:53' + write_document -5th cluster2-rs0 + local cmp_postfix=-5th + local cluster_name=cluster2-rs0 + desc 'write initial data, read from all' + set +o xtrace ----------------------------------------------------------------------------------- write initial data, read from all ----------------------------------------------------------------------------------- + run_mongo 'use myApp\n db.test.insert({ x: 100500 })' myApp:myPass@cluster2-rs0.pitr-32420 + local 'command=use myApp\n db.test.insert({ x: 100500 })' + local uri=myApp:myPass@cluster2-rs0.pitr-32420 + local driver=mongodb+srv + local suffix=.svc.cluster.local + local mongo_flag= + local replica_set=rs0 + [[ myApp:myPass@cluster2-rs0.pitr-32420 == *cfg* ]] ++ kubectl_bin get pods --selector=name=psmdb-client -o 'jsonpath={.items[].metadata.name}' +++ mktemp ++ local LAST_OUT=/tmp/tmp.QeRoKdVQFv +++ mktemp ++ local LAST_ERR=/tmp/tmp.keFuWe9wWe ++ local exit_status=0 ++ local timeout=4 +++ seq 0 2 ++ for i in $(seq 0 2) ++ set +e ++ kubectl get pods --selector=name=psmdb-client -o 'jsonpath={.items[].metadata.name}' ++ exit_status=0 ++ set -e ++ '[' 0 '!=' 0 -a -n 1 ']' ++ break ++ cat /tmp/tmp.QeRoKdVQFv ++ cat /tmp/tmp.keFuWe9wWe ++ rm /tmp/tmp.QeRoKdVQFv /tmp/tmp.keFuWe9wWe ++ return 0 + local client_container=psmdb-client-bb8b97679-jlqq8 + kubectl_bin exec psmdb-client-bb8b97679-jlqq8 -- bash -c 'printf '\''use myApp\n db.test.insert({ x: 100500 })\n'\'' | mongo mongodb+srv://myApp:myPass@cluster2-rs0.pitr-32420.svc.cluster.local/admin?ssl=false\&replicaSet=rs0 ' ++ mktemp + local LAST_OUT=/tmp/tmp.Ams5ySpUOl ++ mktemp + local LAST_ERR=/tmp/tmp.aPKqfOBFnx + local exit_status=0 + local timeout=4 ++ seq 0 2 + for i in $(seq 0 2) + set +e + kubectl exec psmdb-client-bb8b97679-jlqq8 -- bash -c 'printf '\''use myApp\n db.test.insert({ x: 100500 })\n'\'' | mongo mongodb+srv://myApp:myPass@cluster2-rs0.pitr-32420.svc.cluster.local/admin?ssl=false\&replicaSet=rs0 ' + exit_status=0 + set -e + '[' 0 '!=' 0 -a -n 1 ']' + break + cat /tmp/tmp.Ams5ySpUOl Percona Server for MongoDB shell version v4.4.29-28 connecting to: mongodb://cluster2-rs0-1.cluster2-rs0.pitr-32420.svc.cluster.local:27017,cluster2-rs0-2.cluster2-rs0.pitr-32420.svc.cluster.local:27017,cluster2-rs0-0.cluster2-rs0.pitr-32420.svc.cluster.local:27017/admin?compressors=disabled&gssapiServiceName=mongodb&replicaSet=rs0&ssl=false Implicit session: session { "id" : UUID("ec836d25-d55d-41e1-be60-91d578a4e9f5") } Percona Server for MongoDB server version: v8.0.19-7 WARNING: shell and server versions do not match switched to db myApp WriteResult({ "nInserted" : 1 }) bye + cat /tmp/tmp.aPKqfOBFnx + rm /tmp/tmp.Ams5ySpUOl /tmp/tmp.aPKqfOBFnx + return 0 + minikube_sleep + sleep_time=10 + [[ '' == 1 ]] + compare_mongo_cmd find myApp:myPass@cluster2-rs0-0.cluster2-rs0.pitr-32420 -5th + local command=find + local uri=myApp:myPass@cluster2-rs0-0.cluster2-rs0.pitr-32420 + local postfix=-5th + local suffix= + local database=myApp + local collection=test + local sort= + local tls=false + local replicaset= + local 'full_command=db.test.find()' + [[ -n '' ]] + log 'running db.test.find() in myApp' + set +o xtrace [2026-03-10T22:37:56+0000] running db.test.find() in myApp + [[ false == \t\r\u\e ]] + mongo_command=run_mongo + run_mongo 'use myApp\n db.test.find()' myApp:myPass@cluster2-rs0-0.cluster2-rs0.pitr-32420 mongodb '' '' + local 'command=use myApp\n db.test.find()' + local uri=myApp:myPass@cluster2-rs0-0.cluster2-rs0.pitr-32420 + local driver=mongodb + local suffix=.svc.cluster.local + local mongo_flag= + local replica_set=rs0 + [[ myApp:myPass@cluster2-rs0-0.cluster2-rs0.pitr-32420 == *cfg* ]] + grep -E -v 'I NETWORK|W NETWORK|F NETWORK|"c":"NETWORK"|Error saving history file|Percona Server for MongoDB|connecting to:|Unable to reach primary for set|Implicit session:|versions do not match|Error saving history file:|Started a new thread for the timer service' + /usr/sbin/sed -re 's/ObjectId\("[0-9a-f]+"\)//; s/-[0-9]+.svc/-xxx.svc/' ++ kubectl_bin get pods --selector=name=psmdb-client -o 'jsonpath={.items[].metadata.name}' +++ mktemp ++ local LAST_OUT=/tmp/tmp.OH2sLCjr0G +++ mktemp ++ local LAST_ERR=/tmp/tmp.mXAUgimdwp ++ local exit_status=0 ++ local timeout=4 +++ seq 0 2 ++ for i in $(seq 0 2) ++ set +e ++ kubectl get pods --selector=name=psmdb-client -o 'jsonpath={.items[].metadata.name}' ++ exit_status=0 ++ set -e ++ '[' 0 '!=' 0 -a -n 1 ']' ++ break ++ cat /tmp/tmp.OH2sLCjr0G ++ cat /tmp/tmp.mXAUgimdwp ++ rm /tmp/tmp.OH2sLCjr0G /tmp/tmp.mXAUgimdwp ++ return 0 + local client_container=psmdb-client-bb8b97679-jlqq8 + kubectl_bin exec psmdb-client-bb8b97679-jlqq8 -- bash -c 'printf '\''use myApp\n db.test.find()\n'\'' | mongo mongodb://myApp:myPass@cluster2-rs0-0.cluster2-rs0.pitr-32420.svc.cluster.local/admin?ssl=false\&replicaSet=rs0 ' ++ mktemp + local LAST_OUT=/tmp/tmp.mhSmQeOn8A ++ mktemp + local LAST_ERR=/tmp/tmp.W9Sr48hX0g + local exit_status=0 + local timeout=4 ++ seq 0 2 + for i in $(seq 0 2) + set +e + kubectl exec psmdb-client-bb8b97679-jlqq8 -- bash -c 'printf '\''use myApp\n db.test.find()\n'\'' | mongo mongodb://myApp:myPass@cluster2-rs0-0.cluster2-rs0.pitr-32420.svc.cluster.local/admin?ssl=false\&replicaSet=rs0 ' + exit_status=0 + set -e + '[' 0 '!=' 0 -a -n 1 ']' + break + cat /tmp/tmp.mhSmQeOn8A + cat /tmp/tmp.W9Sr48hX0g + rm /tmp/tmp.mhSmQeOn8A /tmp/tmp.W9Sr48hX0g + return 0 + [[ 0 -eq 0 ]] + diff -u /mnt/jenkins/workspace/cloud-psmdb-operator_PR-2282/e2e-tests/pitr/compare/find-5th.json /tmp/tmp.oRDgtyLOf2/find-5th + compare_mongo_cmd find myApp:myPass@cluster2-rs0-1.cluster2-rs0.pitr-32420 -5th + local command=find + local uri=myApp:myPass@cluster2-rs0-1.cluster2-rs0.pitr-32420 + local postfix=-5th + local suffix= + local database=myApp + local collection=test + local sort= + local tls=false + local replicaset= + local 'full_command=db.test.find()' + [[ -n '' ]] + log 'running db.test.find() in myApp' + set +o xtrace [2026-03-10T22:37:59+0000] running db.test.find() in myApp + [[ false == \t\r\u\e ]] + mongo_command=run_mongo + grep -E -v 'I NETWORK|W NETWORK|F NETWORK|"c":"NETWORK"|Error saving history file|Percona Server for MongoDB|connecting to:|Unable to reach primary for set|Implicit session:|versions do not match|Error saving history file:|Started a new thread for the timer service' + run_mongo 'use myApp\n db.test.find()' myApp:myPass@cluster2-rs0-1.cluster2-rs0.pitr-32420 mongodb '' '' + /usr/sbin/sed -re 's/ObjectId\("[0-9a-f]+"\)//; s/-[0-9]+.svc/-xxx.svc/' + local 'command=use myApp\n db.test.find()' + local uri=myApp:myPass@cluster2-rs0-1.cluster2-rs0.pitr-32420 + local driver=mongodb + local suffix=.svc.cluster.local + local mongo_flag= + local replica_set=rs0 + [[ myApp:myPass@cluster2-rs0-1.cluster2-rs0.pitr-32420 == *cfg* ]] ++ kubectl_bin get pods --selector=name=psmdb-client -o 'jsonpath={.items[].metadata.name}' +++ mktemp ++ local LAST_OUT=/tmp/tmp.uP6OVpD8Fn +++ mktemp ++ local LAST_ERR=/tmp/tmp.wVOD32FCwn ++ local exit_status=0 ++ local timeout=4 +++ seq 0 2 ++ for i in $(seq 0 2) ++ set +e ++ kubectl get pods --selector=name=psmdb-client -o 'jsonpath={.items[].metadata.name}' ++ exit_status=0 ++ set -e ++ '[' 0 '!=' 0 -a -n 1 ']' ++ break ++ cat /tmp/tmp.uP6OVpD8Fn ++ cat /tmp/tmp.wVOD32FCwn ++ rm /tmp/tmp.uP6OVpD8Fn /tmp/tmp.wVOD32FCwn ++ return 0 + local client_container=psmdb-client-bb8b97679-jlqq8 + kubectl_bin exec psmdb-client-bb8b97679-jlqq8 -- bash -c 'printf '\''use myApp\n db.test.find()\n'\'' | mongo mongodb://myApp:myPass@cluster2-rs0-1.cluster2-rs0.pitr-32420.svc.cluster.local/admin?ssl=false\&replicaSet=rs0 ' ++ mktemp + local LAST_OUT=/tmp/tmp.bIs1jvfrEv ++ mktemp + local LAST_ERR=/tmp/tmp.GJ2e1FeGJ2 + local exit_status=0 + local timeout=4 ++ seq 0 2 + for i in $(seq 0 2) + set +e + kubectl exec psmdb-client-bb8b97679-jlqq8 -- bash -c 'printf '\''use myApp\n db.test.find()\n'\'' | mongo mongodb://myApp:myPass@cluster2-rs0-1.cluster2-rs0.pitr-32420.svc.cluster.local/admin?ssl=false\&replicaSet=rs0 ' + exit_status=0 + set -e + '[' 0 '!=' 0 -a -n 1 ']' + break + cat /tmp/tmp.bIs1jvfrEv + cat /tmp/tmp.GJ2e1FeGJ2 + rm /tmp/tmp.bIs1jvfrEv /tmp/tmp.GJ2e1FeGJ2 + return 0 + [[ 0 -eq 0 ]] + diff -u /mnt/jenkins/workspace/cloud-psmdb-operator_PR-2282/e2e-tests/pitr/compare/find-5th.json /tmp/tmp.oRDgtyLOf2/find-5th + compare_mongo_cmd find myApp:myPass@cluster2-rs0-2.cluster2-rs0.pitr-32420 -5th + local command=find + local uri=myApp:myPass@cluster2-rs0-2.cluster2-rs0.pitr-32420 + local postfix=-5th + local suffix= + local database=myApp + local collection=test + local sort= + local tls=false + local replicaset= + local 'full_command=db.test.find()' + [[ -n '' ]] + log 'running db.test.find() in myApp' + set +o xtrace [2026-03-10T22:38:02+0000] running db.test.find() in myApp + [[ false == \t\r\u\e ]] + mongo_command=run_mongo + run_mongo 'use myApp\n db.test.find()' myApp:myPass@cluster2-rs0-2.cluster2-rs0.pitr-32420 mongodb '' '' + /usr/sbin/sed -re 's/ObjectId\("[0-9a-f]+"\)//; s/-[0-9]+.svc/-xxx.svc/' + grep -E -v 'I NETWORK|W NETWORK|F NETWORK|"c":"NETWORK"|Error saving history file|Percona Server for MongoDB|connecting to:|Unable to reach primary for set|Implicit session:|versions do not match|Error saving history file:|Started a new thread for the timer service' + local 'command=use myApp\n db.test.find()' + local uri=myApp:myPass@cluster2-rs0-2.cluster2-rs0.pitr-32420 + local driver=mongodb + local suffix=.svc.cluster.local + local mongo_flag= + local replica_set=rs0 + [[ myApp:myPass@cluster2-rs0-2.cluster2-rs0.pitr-32420 == *cfg* ]] ++ kubectl_bin get pods --selector=name=psmdb-client -o 'jsonpath={.items[].metadata.name}' +++ mktemp ++ local LAST_OUT=/tmp/tmp.TH1T8UKfTw +++ mktemp ++ local LAST_ERR=/tmp/tmp.yojCIqatMF ++ local exit_status=0 ++ local timeout=4 +++ seq 0 2 ++ for i in $(seq 0 2) ++ set +e ++ kubectl get pods --selector=name=psmdb-client -o 'jsonpath={.items[].metadata.name}' ++ exit_status=0 ++ set -e ++ '[' 0 '!=' 0 -a -n 1 ']' ++ break ++ cat /tmp/tmp.TH1T8UKfTw ++ cat /tmp/tmp.yojCIqatMF ++ rm /tmp/tmp.TH1T8UKfTw /tmp/tmp.yojCIqatMF ++ return 0 + local client_container=psmdb-client-bb8b97679-jlqq8 + kubectl_bin exec psmdb-client-bb8b97679-jlqq8 -- bash -c 'printf '\''use myApp\n db.test.find()\n'\'' | mongo mongodb://myApp:myPass@cluster2-rs0-2.cluster2-rs0.pitr-32420.svc.cluster.local/admin?ssl=false\&replicaSet=rs0 ' ++ mktemp + local LAST_OUT=/tmp/tmp.BGP3ww9z2U ++ mktemp + local LAST_ERR=/tmp/tmp.Bh9CttlXv3 + local exit_status=0 + local timeout=4 ++ seq 0 2 + for i in $(seq 0 2) + set +e + kubectl exec psmdb-client-bb8b97679-jlqq8 -- bash -c 'printf '\''use myApp\n db.test.find()\n'\'' | mongo mongodb://myApp:myPass@cluster2-rs0-2.cluster2-rs0.pitr-32420.svc.cluster.local/admin?ssl=false\&replicaSet=rs0 ' + exit_status=0 + set -e + '[' 0 '!=' 0 -a -n 1 ']' + break + cat /tmp/tmp.BGP3ww9z2U + cat /tmp/tmp.Bh9CttlXv3 + rm /tmp/tmp.BGP3ww9z2U /tmp/tmp.Bh9CttlXv3 + return 0 + [[ 0 -eq 0 ]] + diff -u /mnt/jenkins/workspace/cloud-psmdb-operator_PR-2282/e2e-tests/pitr/compare/find-5th.json /tmp/tmp.oRDgtyLOf2/find-5th + sleep 2 + check_recovery backup-minio2-0 date '2026-03-10 22:37:53' -4th '' cluster2 + local backup_name=backup-minio2-0 + local restore_type=date + local 'restore_date=2026-03-10 22:37:53' + local cmp_postfix=-4th + local backupSource= + local cluster_name=cluster2 + local selective_collection= + local restore_name=restore-backup-minio2-0 + local restore_file=restore.yml + local cluster + cluster=cluster2-rs0 + desc 'write more data before restore by date' + set +o xtrace ----------------------------------------------------------------------------------- write more data before restore by date ----------------------------------------------------------------------------------- + sleep 60 + run_mongo 'use myApp\n db.test.insert({ x: 100501 })' myApp:myPass@cluster2-rs0.pitr-32420 + local 'command=use myApp\n db.test.insert({ x: 100501 })' + local uri=myApp:myPass@cluster2-rs0.pitr-32420 + local driver=mongodb+srv + local suffix=.svc.cluster.local + local mongo_flag= + local replica_set=rs0 + [[ myApp:myPass@cluster2-rs0.pitr-32420 == *cfg* ]] ++ kubectl_bin get pods --selector=name=psmdb-client -o 'jsonpath={.items[].metadata.name}' +++ mktemp ++ local LAST_OUT=/tmp/tmp.uftmTmFljR +++ mktemp ++ local LAST_ERR=/tmp/tmp.nlPyqresHV ++ local exit_status=0 ++ local timeout=4 +++ seq 0 2 ++ for i in $(seq 0 2) ++ set +e ++ kubectl get pods --selector=name=psmdb-client -o 'jsonpath={.items[].metadata.name}' ++ exit_status=0 ++ set -e ++ '[' 0 '!=' 0 -a -n 1 ']' ++ break ++ cat /tmp/tmp.uftmTmFljR ++ cat /tmp/tmp.nlPyqresHV ++ rm /tmp/tmp.uftmTmFljR /tmp/tmp.nlPyqresHV ++ return 0 + local client_container=psmdb-client-bb8b97679-jlqq8 + kubectl_bin exec psmdb-client-bb8b97679-jlqq8 -- bash -c 'printf '\''use myApp\n db.test.insert({ x: 100501 })\n'\'' | mongo mongodb+srv://myApp:myPass@cluster2-rs0.pitr-32420.svc.cluster.local/admin?ssl=false\&replicaSet=rs0 ' ++ mktemp + local LAST_OUT=/tmp/tmp.aIKu1bEhyb ++ mktemp + local LAST_ERR=/tmp/tmp.lYmYc50aeZ + local exit_status=0 + local timeout=4 ++ seq 0 2 + for i in $(seq 0 2) + set +e + kubectl exec psmdb-client-bb8b97679-jlqq8 -- bash -c 'printf '\''use myApp\n db.test.insert({ x: 100501 })\n'\'' | mongo mongodb+srv://myApp:myPass@cluster2-rs0.pitr-32420.svc.cluster.local/admin?ssl=false\&replicaSet=rs0 ' + exit_status=0 + set -e + '[' 0 '!=' 0 -a -n 1 ']' + break + cat /tmp/tmp.aIKu1bEhyb Percona Server for MongoDB shell version v4.4.29-28 connecting to: mongodb://cluster2-rs0-1.cluster2-rs0.pitr-32420.svc.cluster.local:27017,cluster2-rs0-2.cluster2-rs0.pitr-32420.svc.cluster.local:27017,cluster2-rs0-0.cluster2-rs0.pitr-32420.svc.cluster.local:27017/admin?compressors=disabled&gssapiServiceName=mongodb&replicaSet=rs0&ssl=false Implicit session: session { "id" : UUID("ca626897-a797-434a-835a-eda466589f2d") } Percona Server for MongoDB server version: v8.0.19-7 WARNING: shell and server versions do not match switched to db myApp WriteResult({ "nInserted" : 1 }) bye + cat /tmp/tmp.lYmYc50aeZ + rm /tmp/tmp.aIKu1bEhyb /tmp/tmp.lYmYc50aeZ + return 0 + '[' -n '' ']' + desc 'waiting for chunks to be uploaded' + set +o xtrace ----------------------------------------------------------------------------------- waiting for chunks to be uploaded ----------------------------------------------------------------------------------- + sleep 120 + desc 'check restore by date' + set +o xtrace ----------------------------------------------------------------------------------- check restore by date ----------------------------------------------------------------------------------- + '[' -z '' ']' + cat /mnt/jenkins/workspace/cloud-psmdb-operator_PR-2282/e2e-tests/pitr/conf/restore.yml + /usr/sbin/sed -e 's/name:/name: restore-backup-minio2-0/' + /usr/sbin/sed -e 's/backupName:/backupName: backup-minio2-0/' + /usr/sbin/sed -e 's/clusterName:/clusterName: cluster2/' + /usr/sbin/sed -e 's/pitrType:/type: date/' + '[' -z '2026-03-10 22:37:53' ']' + /usr/sbin/sed -e 's/date:/date: 2026-03-10 22:37:53/' + /usr/sbin/sed -e /backupSource/,+2d + kubectl_bin apply -f - + '[' -n '' ']' + yq ++ mktemp + local LAST_OUT=/tmp/tmp.3bloLrsYql ++ mktemp + local LAST_ERR=/tmp/tmp.mUZXREknHv + local exit_status=0 + local timeout=4 ++ seq 0 2 + for i in $(seq 0 2) + set +e + kubectl apply -f - + exit_status=0 + set -e + '[' 0 '!=' 0 -a -n 1 ']' + break + cat /tmp/tmp.3bloLrsYql perconaservermongodbrestore.psmdb.percona.com/restore-backup-minio2-0 created + cat /tmp/tmp.mUZXREknHv + rm /tmp/tmp.3bloLrsYql /tmp/tmp.mUZXREknHv + return 0 + wait_restore backup-minio2-0 cluster2 + local backup_name=backup-minio2-0 + local cluster_name=cluster2 + local target_state=ready + local wait_cluster_consistency=1 + local wait_time=1780 + local ok_if_ready=0 + set +o xtrace Waiting for the psmdb-restore/restore-backup-minio2-0 object to be created.OK Waiting psmdb-restore/restore-backup-minio2-0 to reach state "ready" .OK after 0 minutes + [[ 1 -eq 1 ]] + wait_cluster_consistency cluster2 + local cluster_name=cluster2 + local wait_time=32 + retry=0 + sleep 7 + echo -n 'waiting for cluster readyness' waiting for cluster readyness++ kubectl_bin get psmdb cluster2 -o 'jsonpath={.status.state}' +++ mktemp ++ local LAST_OUT=/tmp/tmp.zqZKTJEESV +++ mktemp ++ local LAST_ERR=/tmp/tmp.ks9xcEQ9zi ++ local exit_status=0 ++ local timeout=4 +++ seq 0 2 ++ for i in $(seq 0 2) ++ set +e ++ kubectl get psmdb cluster2 -o 'jsonpath={.status.state}' ++ exit_status=0 ++ set -e ++ '[' 0 '!=' 0 -a -n 1 ']' ++ break ++ cat /tmp/tmp.zqZKTJEESV ++ cat /tmp/tmp.ks9xcEQ9zi ++ rm /tmp/tmp.zqZKTJEESV /tmp/tmp.ks9xcEQ9zi ++ return 0 + [[ ready == \r\e\a\d\y ]] + echo .OK .OK + echo + set -o xtrace + '[' -n '' ']' + compare_mongo_cmd find myApp:myPass@cluster2-rs0-0.cluster2-rs0.pitr-32420 -4th + local command=find + local uri=myApp:myPass@cluster2-rs0-0.cluster2-rs0.pitr-32420 + local postfix=-4th + local suffix= + local database=myApp + local collection=test + local sort= + local tls=false + local replicaset= + local 'full_command=db.test.find()' + [[ -n '' ]] + log 'running db.test.find() in myApp' + set +o xtrace [2026-03-10T22:41:42+0000] running db.test.find() in myApp + [[ false == \t\r\u\e ]] + mongo_command=run_mongo + run_mongo 'use myApp\n db.test.find()' myApp:myPass@cluster2-rs0-0.cluster2-rs0.pitr-32420 mongodb '' '' + grep -E -v 'I NETWORK|W NETWORK|F NETWORK|"c":"NETWORK"|Error saving history file|Percona Server for MongoDB|connecting to:|Unable to reach primary for set|Implicit session:|versions do not match|Error saving history file:|Started a new thread for the timer service' + local 'command=use myApp\n db.test.find()' + local uri=myApp:myPass@cluster2-rs0-0.cluster2-rs0.pitr-32420 + local driver=mongodb + /usr/sbin/sed -re 's/ObjectId\("[0-9a-f]+"\)//; s/-[0-9]+.svc/-xxx.svc/' + local suffix=.svc.cluster.local + local mongo_flag= + local replica_set=rs0 + [[ myApp:myPass@cluster2-rs0-0.cluster2-rs0.pitr-32420 == *cfg* ]] ++ kubectl_bin get pods --selector=name=psmdb-client -o 'jsonpath={.items[].metadata.name}' +++ mktemp ++ local LAST_OUT=/tmp/tmp.iE6RCGAmCy +++ mktemp ++ local LAST_ERR=/tmp/tmp.iV7hReXImN ++ local exit_status=0 ++ local timeout=4 +++ seq 0 2 ++ for i in $(seq 0 2) ++ set +e ++ kubectl get pods --selector=name=psmdb-client -o 'jsonpath={.items[].metadata.name}' ++ exit_status=0 ++ set -e ++ '[' 0 '!=' 0 -a -n 1 ']' ++ break ++ cat /tmp/tmp.iE6RCGAmCy ++ cat /tmp/tmp.iV7hReXImN ++ rm /tmp/tmp.iE6RCGAmCy /tmp/tmp.iV7hReXImN ++ return 0 + local client_container=psmdb-client-bb8b97679-jlqq8 + kubectl_bin exec psmdb-client-bb8b97679-jlqq8 -- bash -c 'printf '\''use myApp\n db.test.find()\n'\'' | mongo mongodb://myApp:myPass@cluster2-rs0-0.cluster2-rs0.pitr-32420.svc.cluster.local/admin?ssl=false\&replicaSet=rs0 ' ++ mktemp + local LAST_OUT=/tmp/tmp.1tErvIxS6U ++ mktemp + local LAST_ERR=/tmp/tmp.sQhweU4FCj + local exit_status=0 + local timeout=4 ++ seq 0 2 + for i in $(seq 0 2) + set +e + kubectl exec psmdb-client-bb8b97679-jlqq8 -- bash -c 'printf '\''use myApp\n db.test.find()\n'\'' | mongo mongodb://myApp:myPass@cluster2-rs0-0.cluster2-rs0.pitr-32420.svc.cluster.local/admin?ssl=false\&replicaSet=rs0 ' + exit_status=0 + set -e + '[' 0 '!=' 0 -a -n 1 ']' + break + cat /tmp/tmp.1tErvIxS6U + cat /tmp/tmp.sQhweU4FCj + rm /tmp/tmp.1tErvIxS6U /tmp/tmp.sQhweU4FCj + return 0 + [[ 0 -eq 0 ]] + diff -u /mnt/jenkins/workspace/cloud-psmdb-operator_PR-2282/e2e-tests/pitr/compare/find-4th.json /tmp/tmp.oRDgtyLOf2/find-4th + compare_mongo_cmd find myApp:myPass@cluster2-rs0-1.cluster2-rs0.pitr-32420 -4th + local command=find + local uri=myApp:myPass@cluster2-rs0-1.cluster2-rs0.pitr-32420 + local postfix=-4th + local suffix= + local database=myApp + local collection=test + local sort= + local tls=false + local replicaset= + local 'full_command=db.test.find()' + [[ -n '' ]] + log 'running db.test.find() in myApp' + set +o xtrace [2026-03-10T22:41:45+0000] running db.test.find() in myApp + [[ false == \t\r\u\e ]] + mongo_command=run_mongo + run_mongo 'use myApp\n db.test.find()' myApp:myPass@cluster2-rs0-1.cluster2-rs0.pitr-32420 mongodb '' '' + local 'command=use myApp\n db.test.find()' + local uri=myApp:myPass@cluster2-rs0-1.cluster2-rs0.pitr-32420 + local driver=mongodb + local suffix=.svc.cluster.local + local mongo_flag= + local replica_set=rs0 + [[ myApp:myPass@cluster2-rs0-1.cluster2-rs0.pitr-32420 == *cfg* ]] + /usr/sbin/sed -re 's/ObjectId\("[0-9a-f]+"\)//; s/-[0-9]+.svc/-xxx.svc/' + grep -E -v 'I NETWORK|W NETWORK|F NETWORK|"c":"NETWORK"|Error saving history file|Percona Server for MongoDB|connecting to:|Unable to reach primary for set|Implicit session:|versions do not match|Error saving history file:|Started a new thread for the timer service' ++ kubectl_bin get pods --selector=name=psmdb-client -o 'jsonpath={.items[].metadata.name}' +++ mktemp ++ local LAST_OUT=/tmp/tmp.zOFkm482aq +++ mktemp ++ local LAST_ERR=/tmp/tmp.cCidcxyaOz ++ local exit_status=0 ++ local timeout=4 +++ seq 0 2 ++ for i in $(seq 0 2) ++ set +e ++ kubectl get pods --selector=name=psmdb-client -o 'jsonpath={.items[].metadata.name}' ++ exit_status=0 ++ set -e ++ '[' 0 '!=' 0 -a -n 1 ']' ++ break ++ cat /tmp/tmp.zOFkm482aq ++ cat /tmp/tmp.cCidcxyaOz ++ rm /tmp/tmp.zOFkm482aq /tmp/tmp.cCidcxyaOz ++ return 0 + local client_container=psmdb-client-bb8b97679-jlqq8 + kubectl_bin exec psmdb-client-bb8b97679-jlqq8 -- bash -c 'printf '\''use myApp\n db.test.find()\n'\'' | mongo mongodb://myApp:myPass@cluster2-rs0-1.cluster2-rs0.pitr-32420.svc.cluster.local/admin?ssl=false\&replicaSet=rs0 ' ++ mktemp + local LAST_OUT=/tmp/tmp.epuoyCkq8y ++ mktemp + local LAST_ERR=/tmp/tmp.I5jOkItjV8 + local exit_status=0 + local timeout=4 ++ seq 0 2 + for i in $(seq 0 2) + set +e + kubectl exec psmdb-client-bb8b97679-jlqq8 -- bash -c 'printf '\''use myApp\n db.test.find()\n'\'' | mongo mongodb://myApp:myPass@cluster2-rs0-1.cluster2-rs0.pitr-32420.svc.cluster.local/admin?ssl=false\&replicaSet=rs0 ' + exit_status=0 + set -e + '[' 0 '!=' 0 -a -n 1 ']' + break + cat /tmp/tmp.epuoyCkq8y + cat /tmp/tmp.I5jOkItjV8 + rm /tmp/tmp.epuoyCkq8y /tmp/tmp.I5jOkItjV8 + return 0 + [[ 0 -eq 0 ]] + diff -u /mnt/jenkins/workspace/cloud-psmdb-operator_PR-2282/e2e-tests/pitr/compare/find-4th.json /tmp/tmp.oRDgtyLOf2/find-4th + compare_mongo_cmd find myApp:myPass@cluster2-rs0-2.cluster2-rs0.pitr-32420 -4th + local command=find + local uri=myApp:myPass@cluster2-rs0-2.cluster2-rs0.pitr-32420 + local postfix=-4th + local suffix= + local database=myApp + local collection=test + local sort= + local tls=false + local replicaset= + local 'full_command=db.test.find()' + [[ -n '' ]] + log 'running db.test.find() in myApp' + set +o xtrace [2026-03-10T22:41:48+0000] running db.test.find() in myApp + [[ false == \t\r\u\e ]] + mongo_command=run_mongo + run_mongo 'use myApp\n db.test.find()' myApp:myPass@cluster2-rs0-2.cluster2-rs0.pitr-32420 mongodb '' '' + local 'command=use myApp\n db.test.find()' + grep -E -v 'I NETWORK|W NETWORK|F NETWORK|"c":"NETWORK"|Error saving history file|Percona Server for MongoDB|connecting to:|Unable to reach primary for set|Implicit session:|versions do not match|Error saving history file:|Started a new thread for the timer service' + local uri=myApp:myPass@cluster2-rs0-2.cluster2-rs0.pitr-32420 + local driver=mongodb + local suffix=.svc.cluster.local + local mongo_flag= + local replica_set=rs0 + [[ myApp:myPass@cluster2-rs0-2.cluster2-rs0.pitr-32420 == *cfg* ]] ++ kubectl_bin get pods --selector=name=psmdb-client -o 'jsonpath={.items[].metadata.name}' + /usr/sbin/sed -re 's/ObjectId\("[0-9a-f]+"\)//; s/-[0-9]+.svc/-xxx.svc/' +++ mktemp ++ local LAST_OUT=/tmp/tmp.M2TcqU2I40 +++ mktemp ++ local LAST_ERR=/tmp/tmp.3YUbZnLzl1 ++ local exit_status=0 ++ local timeout=4 +++ seq 0 2 ++ for i in $(seq 0 2) ++ set +e ++ kubectl get pods --selector=name=psmdb-client -o 'jsonpath={.items[].metadata.name}' ++ exit_status=0 ++ set -e ++ '[' 0 '!=' 0 -a -n 1 ']' ++ break ++ cat /tmp/tmp.M2TcqU2I40 ++ cat /tmp/tmp.3YUbZnLzl1 ++ rm /tmp/tmp.M2TcqU2I40 /tmp/tmp.3YUbZnLzl1 ++ return 0 + local client_container=psmdb-client-bb8b97679-jlqq8 + kubectl_bin exec psmdb-client-bb8b97679-jlqq8 -- bash -c 'printf '\''use myApp\n db.test.find()\n'\'' | mongo mongodb://myApp:myPass@cluster2-rs0-2.cluster2-rs0.pitr-32420.svc.cluster.local/admin?ssl=false\&replicaSet=rs0 ' ++ mktemp + local LAST_OUT=/tmp/tmp.vPFoe8jWRW ++ mktemp + local LAST_ERR=/tmp/tmp.Wx29LmDDTi + local exit_status=0 + local timeout=4 ++ seq 0 2 + for i in $(seq 0 2) + set +e + kubectl exec psmdb-client-bb8b97679-jlqq8 -- bash -c 'printf '\''use myApp\n db.test.find()\n'\'' | mongo mongodb://myApp:myPass@cluster2-rs0-2.cluster2-rs0.pitr-32420.svc.cluster.local/admin?ssl=false\&replicaSet=rs0 ' + exit_status=0 + set -e + '[' 0 '!=' 0 -a -n 1 ']' + break + cat /tmp/tmp.vPFoe8jWRW + cat /tmp/tmp.Wx29LmDDTi + rm /tmp/tmp.vPFoe8jWRW /tmp/tmp.Wx29LmDDTi + return 0 + [[ 0 -eq 0 ]] + diff -u /mnt/jenkins/workspace/cloud-psmdb-operator_PR-2282/e2e-tests/pitr/compare/find-4th.json /tmp/tmp.oRDgtyLOf2/find-4th + kubectl patch psmdb some-name --type=merge --patch '{"spec": {"backup": {"pitr": {"enabled": false}}}}' perconaservermongodb.psmdb.percona.com/some-name patched + kubectl patch psmdb cluster2 --type=merge --patch '{"spec": {"backup": {"pitr": {"enabled": false}}}}' perconaservermongodb.psmdb.percona.com/cluster2 patched + sleep 20 + kubectl_bin delete psmdb-backup --all ++ mktemp + local LAST_OUT=/tmp/tmp.Js0hQ4dKxZ ++ mktemp + local LAST_ERR=/tmp/tmp.FeMYBWzQBB + local exit_status=0 + local timeout=4 ++ seq 0 2 + for i in $(seq 0 2) + set +e + kubectl delete psmdb-backup --all + exit_status=0 + set -e + '[' 0 '!=' 0 -a -n 1 ']' + break + cat /tmp/tmp.Js0hQ4dKxZ perconaservermongodbbackup.psmdb.percona.com "backup-minio-0" deleted from pitr-32420 namespace perconaservermongodbbackup.psmdb.percona.com "backup-minio-1" deleted from pitr-32420 namespace perconaservermongodbbackup.psmdb.percona.com "backup-minio-2" deleted from pitr-32420 namespace perconaservermongodbbackup.psmdb.percona.com "backup-minio-3" deleted from pitr-32420 namespace perconaservermongodbbackup.psmdb.percona.com "backup-minio2-0" deleted from pitr-32420 namespace + cat /tmp/tmp.FeMYBWzQBB + rm /tmp/tmp.Js0hQ4dKxZ /tmp/tmp.FeMYBWzQBB + return 0 + destroy pitr-32420 + local namespace=pitr-32420 + local ignore_logs=true + [[ 0 == 1 ]] + desc 'destroy cluster/operator and all other resources' + set +o xtrace ----------------------------------------------------------------------------------- destroy cluster/operator and all other resources ----------------------------------------------------------------------------------- + '[' true == false ']' + delete_backups + desc 'Delete psmdb-backup' + set +o xtrace ----------------------------------------------------------------------------------- Delete psmdb-backup ----------------------------------------------------------------------------------- ++ wc -l ++ kubectl_bin get psmdb-backup --no-headers +++ mktemp ++ local LAST_OUT=/tmp/tmp.WIGtTR5VTI +++ mktemp ++ local LAST_ERR=/tmp/tmp.Ac2wC1N5d2 ++ local exit_status=0 ++ local timeout=4 +++ seq 0 2 ++ for i in $(seq 0 2) ++ set +e ++ kubectl get psmdb-backup --no-headers ++ exit_status=0 ++ set -e ++ '[' 0 '!=' 0 -a -n 1 ']' ++ break ++ cat /tmp/tmp.WIGtTR5VTI ++ cat /tmp/tmp.Ac2wC1N5d2 No resources found in pitr-32420 namespace. ++ rm /tmp/tmp.WIGtTR5VTI /tmp/tmp.Ac2wC1N5d2 ++ return 0 + '[' 0 '!=' 0 ']' + delete_crd + desc 'get and delete old CRDs and RBAC' + set +o xtrace ----------------------------------------------------------------------------------- get and delete old CRDs and RBAC ----------------------------------------------------------------------------------- + kubectl_bin delete -f /mnt/jenkins/workspace/cloud-psmdb-operator_PR-2282/deploy/crd.yaml --ignore-not-found --wait=false ++ mktemp + local LAST_OUT=/tmp/tmp.lSBWbhQGLO ++ mktemp + local LAST_ERR=/tmp/tmp.MU7zZ3GnOS + local exit_status=0 + local timeout=4 ++ seq 0 2 + for i in $(seq 0 2) + set +e + kubectl delete -f /mnt/jenkins/workspace/cloud-psmdb-operator_PR-2282/deploy/crd.yaml --ignore-not-found --wait=false + exit_status=0 + set -e + '[' 0 '!=' 0 -a -n 1 ']' + break + cat /tmp/tmp.lSBWbhQGLO customresourcedefinition.apiextensions.k8s.io "perconaservermongodbbackups.psmdb.percona.com" deleted customresourcedefinition.apiextensions.k8s.io "perconaservermongodbrestores.psmdb.percona.com" deleted customresourcedefinition.apiextensions.k8s.io "perconaservermongodbs.psmdb.percona.com" deleted + cat /tmp/tmp.MU7zZ3GnOS + rm /tmp/tmp.lSBWbhQGLO /tmp/tmp.MU7zZ3GnOS + return 0 ++ yq eval .metadata.name /mnt/jenkins/workspace/cloud-psmdb-operator_PR-2282/deploy/crd.yaml ++ grep -v '\-\-\-' grep: warning: stray \ before - grep: warning: stray \ before - + for crd_name in $(yq eval '.metadata.name' "${src_dir}/deploy/crd.yaml" | grep -v '\-\-\-') + kubectl get perconaservermongodbbackups.psmdb.percona.com --all-namespaces -o wide + grep -v NAMESPACE + xargs -L 1 sh -xc 'kubectl patch perconaservermongodbbackups.psmdb.percona.com -n $0 $1 --type=merge -p "{\"metadata\":{\"finalizers\":[]}}"' error: the server doesn't have a resource type "perconaservermongodbbackups" + kubectl patch perconaservermongodbbackups.psmdb.percona.com -n sh --type=merge -p '{"metadata":{"finalizers":[]}}' error: the server doesn't have a resource type "perconaservermongodbbackups" + : + kubectl_bin wait --for=delete crd perconaservermongodbbackups.psmdb.percona.com ++ mktemp + local LAST_OUT=/tmp/tmp.lvSFAgQGVW ++ mktemp + local LAST_ERR=/tmp/tmp.58sLflPGXO + local exit_status=0 + local timeout=4 ++ seq 0 2 + for i in $(seq 0 2) + set +e + kubectl wait --for=delete crd perconaservermongodbbackups.psmdb.percona.com + exit_status=0 + set -e + '[' 0 '!=' 0 -a -n 1 ']' + break + cat /tmp/tmp.lvSFAgQGVW + cat /tmp/tmp.58sLflPGXO + rm /tmp/tmp.lvSFAgQGVW /tmp/tmp.58sLflPGXO + return 0 + for crd_name in $(yq eval '.metadata.name' "${src_dir}/deploy/crd.yaml" | grep -v '\-\-\-') + kubectl get perconaservermongodbrestores.psmdb.percona.com --all-namespaces -o wide + grep -v NAMESPACE + xargs -L 1 sh -xc 'kubectl patch perconaservermongodbrestores.psmdb.percona.com -n $0 $1 --type=merge -p "{\"metadata\":{\"finalizers\":[]}}"' error: the server doesn't have a resource type "perconaservermongodbrestores" + kubectl patch perconaservermongodbrestores.psmdb.percona.com -n sh --type=merge -p '{"metadata":{"finalizers":[]}}' error: the server doesn't have a resource type "perconaservermongodbrestores" + : + kubectl_bin wait --for=delete crd perconaservermongodbrestores.psmdb.percona.com ++ mktemp + local LAST_OUT=/tmp/tmp.zGVxNklNe5 ++ mktemp + local LAST_ERR=/tmp/tmp.3Xs8u7vjBE + local exit_status=0 + local timeout=4 ++ seq 0 2 + for i in $(seq 0 2) + set +e + kubectl wait --for=delete crd perconaservermongodbrestores.psmdb.percona.com + exit_status=0 + set -e + '[' 0 '!=' 0 -a -n 1 ']' + break + cat /tmp/tmp.zGVxNklNe5 + cat /tmp/tmp.3Xs8u7vjBE + rm /tmp/tmp.zGVxNklNe5 /tmp/tmp.3Xs8u7vjBE + return 0 + for crd_name in $(yq eval '.metadata.name' "${src_dir}/deploy/crd.yaml" | grep -v '\-\-\-') + kubectl get perconaservermongodbs.psmdb.percona.com --all-namespaces -o wide + grep -v NAMESPACE + xargs -L 1 sh -xc 'kubectl patch perconaservermongodbs.psmdb.percona.com -n $0 $1 --type=merge -p "{\"metadata\":{\"finalizers\":[]}}"' error: the server doesn't have a resource type "perconaservermongodbs" + kubectl patch perconaservermongodbs.psmdb.percona.com -n sh --type=merge -p '{"metadata":{"finalizers":[]}}' error: the server doesn't have a resource type "perconaservermongodbs" + : + kubectl_bin wait --for=delete crd perconaservermongodbs.psmdb.percona.com ++ mktemp + local LAST_OUT=/tmp/tmp.DVfUAPcTi9 ++ mktemp + local LAST_ERR=/tmp/tmp.7K0mcK3Oce + local exit_status=0 + local timeout=4 ++ seq 0 2 + for i in $(seq 0 2) + set +e + kubectl wait --for=delete crd perconaservermongodbs.psmdb.percona.com + exit_status=0 + set -e + '[' 0 '!=' 0 -a -n 1 ']' + break + cat /tmp/tmp.DVfUAPcTi9 + cat /tmp/tmp.7K0mcK3Oce + rm /tmp/tmp.DVfUAPcTi9 /tmp/tmp.7K0mcK3Oce + return 0 + local rbac_yaml=rbac.yaml + '[' -n psmdb-operator ']' + rbac_yaml=cw-rbac.yaml + kubectl_bin delete -f /mnt/jenkins/workspace/cloud-psmdb-operator_PR-2282/deploy/cw-rbac.yaml --ignore-not-found ++ mktemp + local LAST_OUT=/tmp/tmp.GDe9sfeQbD ++ mktemp + local LAST_ERR=/tmp/tmp.pDy6DNirvL + local exit_status=0 + local timeout=4 ++ seq 0 2 + for i in $(seq 0 2) + set +e + kubectl delete -f /mnt/jenkins/workspace/cloud-psmdb-operator_PR-2282/deploy/cw-rbac.yaml --ignore-not-found + exit_status=0 + set -e + '[' 0 '!=' 0 -a -n 1 ']' + break + cat /tmp/tmp.GDe9sfeQbD clusterrole.rbac.authorization.k8s.io "percona-server-mongodb-operator" deleted clusterrolebinding.rbac.authorization.k8s.io "service-account-percona-server-mongodb-operator" deleted + cat /tmp/tmp.pDy6DNirvL + rm /tmp/tmp.GDe9sfeQbD /tmp/tmp.pDy6DNirvL + return 0 + destroy_cert_manager + kubectl_bin delete -f https://github.com/cert-manager/cert-manager/releases/download/v1.19.3/cert-manager.yaml ++ mktemp + local LAST_OUT=/tmp/tmp.136vdp0kGO ++ mktemp + local LAST_ERR=/tmp/tmp.JO9VSVzQUJ + local exit_status=0 + local timeout=4 ++ seq 0 2 + for i in $(seq 0 2) + set +e + kubectl delete -f https://github.com/cert-manager/cert-manager/releases/download/v1.19.3/cert-manager.yaml + exit_status=1 + set -e + '[' 1 '!=' 0 -a -n 1 ']' + cat /tmp/tmp.136vdp0kGO + cat /tmp/tmp.JO9VSVzQUJ Error from server (NotFound): error when deleting "https://github.com/cert-manager/cert-manager/releases/download/v1.19.3/cert-manager.yaml": namespaces "cert-manager" not found Error from server (NotFound): error when deleting "https://github.com/cert-manager/cert-manager/releases/download/v1.19.3/cert-manager.yaml": customresourcedefinitions.apiextensions.k8s.io "challenges.acme.cert-manager.io" not found Error from server (NotFound): error when deleting "https://github.com/cert-manager/cert-manager/releases/download/v1.19.3/cert-manager.yaml": customresourcedefinitions.apiextensions.k8s.io "orders.acme.cert-manager.io" not found Error from server (NotFound): error when deleting "https://github.com/cert-manager/cert-manager/releases/download/v1.19.3/cert-manager.yaml": customresourcedefinitions.apiextensions.k8s.io "certificaterequests.cert-manager.io" not found Error from server (NotFound): error when deleting "https://github.com/cert-manager/cert-manager/releases/download/v1.19.3/cert-manager.yaml": customresourcedefinitions.apiextensions.k8s.io "certificates.cert-manager.io" not found Error from server (NotFound): error when deleting "https://github.com/cert-manager/cert-manager/releases/download/v1.19.3/cert-manager.yaml": customresourcedefinitions.apiextensions.k8s.io "clusterissuers.cert-manager.io" not found Error from server (NotFound): error when deleting "https://github.com/cert-manager/cert-manager/releases/download/v1.19.3/cert-manager.yaml": customresourcedefinitions.apiextensions.k8s.io "issuers.cert-manager.io" not found Error from server (NotFound): error when deleting "https://github.com/cert-manager/cert-manager/releases/download/v1.19.3/cert-manager.yaml": serviceaccounts "cert-manager-cainjector" not found Error from server (NotFound): error when deleting "https://github.com/cert-manager/cert-manager/releases/download/v1.19.3/cert-manager.yaml": serviceaccounts "cert-manager" not found Error from server (NotFound): error when deleting "https://github.com/cert-manager/cert-manager/releases/download/v1.19.3/cert-manager.yaml": serviceaccounts "cert-manager-webhook" not found Error from server (NotFound): error when deleting "https://github.com/cert-manager/cert-manager/releases/download/v1.19.3/cert-manager.yaml": clusterroles.rbac.authorization.k8s.io "cert-manager-cainjector" not found Error from server (NotFound): error when deleting "https://github.com/cert-manager/cert-manager/releases/download/v1.19.3/cert-manager.yaml": clusterroles.rbac.authorization.k8s.io "cert-manager-controller-issuers" not found Error from server (NotFound): error when deleting "https://github.com/cert-manager/cert-manager/releases/download/v1.19.3/cert-manager.yaml": clusterroles.rbac.authorization.k8s.io "cert-manager-controller-clusterissuers" not found Error from server (NotFound): error when deleting "https://github.com/cert-manager/cert-manager/releases/download/v1.19.3/cert-manager.yaml": clusterroles.rbac.authorization.k8s.io "cert-manager-controller-certificates" not found Error from server (NotFound): error when deleting "https://github.com/cert-manager/cert-manager/releases/download/v1.19.3/cert-manager.yaml": clusterroles.rbac.authorization.k8s.io "cert-manager-controller-orders" not found Error from server (NotFound): error when deleting "https://github.com/cert-manager/cert-manager/releases/download/v1.19.3/cert-manager.yaml": clusterroles.rbac.authorization.k8s.io "cert-manager-controller-challenges" not found Error from server (NotFound): error when deleting "https://github.com/cert-manager/cert-manager/releases/download/v1.19.3/cert-manager.yaml": clusterroles.rbac.authorization.k8s.io "cert-manager-controller-ingress-shim" not found Error from server (NotFound): error when deleting "https://github.com/cert-manager/cert-manager/releases/download/v1.19.3/cert-manager.yaml": clusterroles.rbac.authorization.k8s.io "cert-manager-cluster-view" not found Error from server (NotFound): error when deleting "https://github.com/cert-manager/cert-manager/releases/download/v1.19.3/cert-manager.yaml": clusterroles.rbac.authorization.k8s.io "cert-manager-view" not found Error from server (NotFound): error when deleting "https://github.com/cert-manager/cert-manager/releases/download/v1.19.3/cert-manager.yaml": clusterroles.rbac.authorization.k8s.io "cert-manager-edit" not found Error from server (NotFound): error when deleting "https://github.com/cert-manager/cert-manager/releases/download/v1.19.3/cert-manager.yaml": clusterroles.rbac.authorization.k8s.io "cert-manager-controller-approve:cert-manager-io" not found Error from server (NotFound): error when deleting "https://github.com/cert-manager/cert-manager/releases/download/v1.19.3/cert-manager.yaml": clusterroles.rbac.authorization.k8s.io "cert-manager-controller-certificatesigningrequests" not found Error from server (NotFound): error when deleting "https://github.com/cert-manager/cert-manager/releases/download/v1.19.3/cert-manager.yaml": clusterroles.rbac.authorization.k8s.io "cert-manager-webhook:subjectaccessreviews" not found Error from server (NotFound): error when deleting "https://github.com/cert-manager/cert-manager/releases/download/v1.19.3/cert-manager.yaml": clusterrolebindings.rbac.authorization.k8s.io "cert-manager-cainjector" not found Error from server (NotFound): error when deleting "https://github.com/cert-manager/cert-manager/releases/download/v1.19.3/cert-manager.yaml": clusterrolebindings.rbac.authorization.k8s.io "cert-manager-controller-issuers" not found Error from server (NotFound): error when deleting "https://github.com/cert-manager/cert-manager/releases/download/v1.19.3/cert-manager.yaml": clusterrolebindings.rbac.authorization.k8s.io "cert-manager-controller-clusterissuers" not found Error from server (NotFound): error when deleting "https://github.com/cert-manager/cert-manager/releases/download/v1.19.3/cert-manager.yaml": clusterrolebindings.rbac.authorization.k8s.io "cert-manager-controller-certificates" not found Error from server (NotFound): error when deleting "https://github.com/cert-manager/cert-manager/releases/download/v1.19.3/cert-manager.yaml": clusterrolebindings.rbac.authorization.k8s.io "cert-manager-controller-orders" not found Error from server (NotFound): error when deleting "https://github.com/cert-manager/cert-manager/releases/download/v1.19.3/cert-manager.yaml": clusterrolebindings.rbac.authorization.k8s.io "cert-manager-controller-challenges" not found Error from server (NotFound): error when deleting "https://github.com/cert-manager/cert-manager/releases/download/v1.19.3/cert-manager.yaml": clusterrolebindings.rbac.authorization.k8s.io "cert-manager-controller-ingress-shim" not found Error from server (NotFound): error when deleting "https://github.com/cert-manager/cert-manager/releases/download/v1.19.3/cert-manager.yaml": clusterrolebindings.rbac.authorization.k8s.io "cert-manager-controller-approve:cert-manager-io" not found Error from server (NotFound): error when deleting "https://github.com/cert-manager/cert-manager/releases/download/v1.19.3/cert-manager.yaml": clusterrolebindings.rbac.authorization.k8s.io "cert-manager-controller-certificatesigningrequests" not found Error from server (NotFound): error when deleting "https://github.com/cert-manager/cert-manager/releases/download/v1.19.3/cert-manager.yaml": clusterrolebindings.rbac.authorization.k8s.io "cert-manager-webhook:subjectaccessreviews" not found Error from server (NotFound): error when deleting "https://github.com/cert-manager/cert-manager/releases/download/v1.19.3/cert-manager.yaml": roles.rbac.authorization.k8s.io "cert-manager-cainjector:leaderelection" not found Error from server (NotFound): error when deleting "https://github.com/cert-manager/cert-manager/releases/download/v1.19.3/cert-manager.yaml": roles.rbac.authorization.k8s.io "cert-manager:leaderelection" not found Error from server (NotFound): error when deleting "https://github.com/cert-manager/cert-manager/releases/download/v1.19.3/cert-manager.yaml": roles.rbac.authorization.k8s.io "cert-manager-tokenrequest" not found Error from server (NotFound): error when deleting "https://github.com/cert-manager/cert-manager/releases/download/v1.19.3/cert-manager.yaml": roles.rbac.authorization.k8s.io "cert-manager-webhook:dynamic-serving" not found Error from server (NotFound): error when deleting "https://github.com/cert-manager/cert-manager/releases/download/v1.19.3/cert-manager.yaml": rolebindings.rbac.authorization.k8s.io "cert-manager-cainjector:leaderelection" not found Error from server (NotFound): error when deleting "https://github.com/cert-manager/cert-manager/releases/download/v1.19.3/cert-manager.yaml": rolebindings.rbac.authorization.k8s.io "cert-manager:leaderelection" not found Error from server (NotFound): error when deleting "https://github.com/cert-manager/cert-manager/releases/download/v1.19.3/cert-manager.yaml": rolebindings.rbac.authorization.k8s.io "cert-manager-tokenrequest" not found Error from server (NotFound): error when deleting "https://github.com/cert-manager/cert-manager/releases/download/v1.19.3/cert-manager.yaml": rolebindings.rbac.authorization.k8s.io "cert-manager-webhook:dynamic-serving" not found Error from server (NotFound): error when deleting "https://github.com/cert-manager/cert-manager/releases/download/v1.19.3/cert-manager.yaml": services "cert-manager-cainjector" not found Error from server (NotFound): error when deleting "https://github.com/cert-manager/cert-manager/releases/download/v1.19.3/cert-manager.yaml": services "cert-manager" not found Error from server (NotFound): error when deleting "https://github.com/cert-manager/cert-manager/releases/download/v1.19.3/cert-manager.yaml": services "cert-manager-webhook" not found Error from server (NotFound): error when deleting "https://github.com/cert-manager/cert-manager/releases/download/v1.19.3/cert-manager.yaml": deployments.apps "cert-manager-cainjector" not found Error from server (NotFound): error when deleting "https://github.com/cert-manager/cert-manager/releases/download/v1.19.3/cert-manager.yaml": deployments.apps "cert-manager" not found Error from server (NotFound): error when deleting "https://github.com/cert-manager/cert-manager/releases/download/v1.19.3/cert-manager.yaml": deployments.apps "cert-manager-webhook" not found Error from server (NotFound): error when deleting "https://github.com/cert-manager/cert-manager/releases/download/v1.19.3/cert-manager.yaml": mutatingwebhookconfigurations.admissionregistration.k8s.io "cert-manager-webhook" not found Error from server (NotFound): error when deleting "https://github.com/cert-manager/cert-manager/releases/download/v1.19.3/cert-manager.yaml": validatingwebhookconfigurations.admissionregistration.k8s.io "cert-manager-webhook" not found + sleep 0 + for i in $(seq 0 2) + set +e + kubectl delete -f https://github.com/cert-manager/cert-manager/releases/download/v1.19.3/cert-manager.yaml + exit_status=1 + set -e + '[' 1 '!=' 0 -a -n 1 ']' + cat /tmp/tmp.136vdp0kGO + cat /tmp/tmp.JO9VSVzQUJ Error from server (NotFound): error when deleting "https://github.com/cert-manager/cert-manager/releases/download/v1.19.3/cert-manager.yaml": namespaces "cert-manager" not found Error from server (NotFound): error when deleting "https://github.com/cert-manager/cert-manager/releases/download/v1.19.3/cert-manager.yaml": customresourcedefinitions.apiextensions.k8s.io "challenges.acme.cert-manager.io" not found Error from server (NotFound): error when deleting "https://github.com/cert-manager/cert-manager/releases/download/v1.19.3/cert-manager.yaml": customresourcedefinitions.apiextensions.k8s.io "orders.acme.cert-manager.io" not found Error from server (NotFound): error when deleting "https://github.com/cert-manager/cert-manager/releases/download/v1.19.3/cert-manager.yaml": customresourcedefinitions.apiextensions.k8s.io "certificaterequests.cert-manager.io" not found Error from server (NotFound): error when deleting "https://github.com/cert-manager/cert-manager/releases/download/v1.19.3/cert-manager.yaml": customresourcedefinitions.apiextensions.k8s.io "certificates.cert-manager.io" not found Error from server (NotFound): error when deleting "https://github.com/cert-manager/cert-manager/releases/download/v1.19.3/cert-manager.yaml": customresourcedefinitions.apiextensions.k8s.io "clusterissuers.cert-manager.io" not found Error from server (NotFound): error when deleting "https://github.com/cert-manager/cert-manager/releases/download/v1.19.3/cert-manager.yaml": customresourcedefinitions.apiextensions.k8s.io "issuers.cert-manager.io" not found Error from server (NotFound): error when deleting "https://github.com/cert-manager/cert-manager/releases/download/v1.19.3/cert-manager.yaml": serviceaccounts "cert-manager-cainjector" not found Error from server (NotFound): error when deleting "https://github.com/cert-manager/cert-manager/releases/download/v1.19.3/cert-manager.yaml": serviceaccounts "cert-manager" not found Error from server (NotFound): error when deleting "https://github.com/cert-manager/cert-manager/releases/download/v1.19.3/cert-manager.yaml": serviceaccounts "cert-manager-webhook" not found Error from server (NotFound): error when deleting "https://github.com/cert-manager/cert-manager/releases/download/v1.19.3/cert-manager.yaml": clusterroles.rbac.authorization.k8s.io "cert-manager-cainjector" not found Error from server (NotFound): error when deleting "https://github.com/cert-manager/cert-manager/releases/download/v1.19.3/cert-manager.yaml": clusterroles.rbac.authorization.k8s.io "cert-manager-controller-issuers" not found Error from server (NotFound): error when deleting "https://github.com/cert-manager/cert-manager/releases/download/v1.19.3/cert-manager.yaml": clusterroles.rbac.authorization.k8s.io "cert-manager-controller-clusterissuers" not found Error from server (NotFound): error when deleting "https://github.com/cert-manager/cert-manager/releases/download/v1.19.3/cert-manager.yaml": clusterroles.rbac.authorization.k8s.io "cert-manager-controller-certificates" not found Error from server (NotFound): error when deleting "https://github.com/cert-manager/cert-manager/releases/download/v1.19.3/cert-manager.yaml": clusterroles.rbac.authorization.k8s.io "cert-manager-controller-orders" not found Error from server (NotFound): error when deleting "https://github.com/cert-manager/cert-manager/releases/download/v1.19.3/cert-manager.yaml": clusterroles.rbac.authorization.k8s.io "cert-manager-controller-challenges" not found Error from server (NotFound): error when deleting "https://github.com/cert-manager/cert-manager/releases/download/v1.19.3/cert-manager.yaml": clusterroles.rbac.authorization.k8s.io "cert-manager-controller-ingress-shim" not found Error from server (NotFound): error when deleting "https://github.com/cert-manager/cert-manager/releases/download/v1.19.3/cert-manager.yaml": clusterroles.rbac.authorization.k8s.io "cert-manager-cluster-view" not found Error from server (NotFound): error when deleting "https://github.com/cert-manager/cert-manager/releases/download/v1.19.3/cert-manager.yaml": clusterroles.rbac.authorization.k8s.io "cert-manager-view" not found Error from server (NotFound): error when deleting "https://github.com/cert-manager/cert-manager/releases/download/v1.19.3/cert-manager.yaml": clusterroles.rbac.authorization.k8s.io "cert-manager-edit" not found Error from server (NotFound): error when deleting "https://github.com/cert-manager/cert-manager/releases/download/v1.19.3/cert-manager.yaml": clusterroles.rbac.authorization.k8s.io "cert-manager-controller-approve:cert-manager-io" not found Error from server (NotFound): error when deleting "https://github.com/cert-manager/cert-manager/releases/download/v1.19.3/cert-manager.yaml": clusterroles.rbac.authorization.k8s.io "cert-manager-controller-certificatesigningrequests" not found Error from server (NotFound): error when deleting "https://github.com/cert-manager/cert-manager/releases/download/v1.19.3/cert-manager.yaml": clusterroles.rbac.authorization.k8s.io "cert-manager-webhook:subjectaccessreviews" not found Error from server (NotFound): error when deleting "https://github.com/cert-manager/cert-manager/releases/download/v1.19.3/cert-manager.yaml": clusterrolebindings.rbac.authorization.k8s.io "cert-manager-cainjector" not found Error from server (NotFound): error when deleting "https://github.com/cert-manager/cert-manager/releases/download/v1.19.3/cert-manager.yaml": clusterrolebindings.rbac.authorization.k8s.io "cert-manager-controller-issuers" not found Error from server (NotFound): error when deleting "https://github.com/cert-manager/cert-manager/releases/download/v1.19.3/cert-manager.yaml": clusterrolebindings.rbac.authorization.k8s.io "cert-manager-controller-clusterissuers" not found Error from server (NotFound): error when deleting "https://github.com/cert-manager/cert-manager/releases/download/v1.19.3/cert-manager.yaml": clusterrolebindings.rbac.authorization.k8s.io "cert-manager-controller-certificates" not found Error from server (NotFound): error when deleting "https://github.com/cert-manager/cert-manager/releases/download/v1.19.3/cert-manager.yaml": clusterrolebindings.rbac.authorization.k8s.io "cert-manager-controller-orders" not found Error from server (NotFound): error when deleting "https://github.com/cert-manager/cert-manager/releases/download/v1.19.3/cert-manager.yaml": clusterrolebindings.rbac.authorization.k8s.io "cert-manager-controller-challenges" not found Error from server (NotFound): error when deleting "https://github.com/cert-manager/cert-manager/releases/download/v1.19.3/cert-manager.yaml": clusterrolebindings.rbac.authorization.k8s.io "cert-manager-controller-ingress-shim" not found Error from server (NotFound): error when deleting "https://github.com/cert-manager/cert-manager/releases/download/v1.19.3/cert-manager.yaml": clusterrolebindings.rbac.authorization.k8s.io "cert-manager-controller-approve:cert-manager-io" not found Error from server (NotFound): error when deleting "https://github.com/cert-manager/cert-manager/releases/download/v1.19.3/cert-manager.yaml": clusterrolebindings.rbac.authorization.k8s.io "cert-manager-controller-certificatesigningrequests" not found Error from server (NotFound): error when deleting "https://github.com/cert-manager/cert-manager/releases/download/v1.19.3/cert-manager.yaml": clusterrolebindings.rbac.authorization.k8s.io "cert-manager-webhook:subjectaccessreviews" not found Error from server (NotFound): error when deleting "https://github.com/cert-manager/cert-manager/releases/download/v1.19.3/cert-manager.yaml": roles.rbac.authorization.k8s.io "cert-manager-cainjector:leaderelection" not found Error from server (NotFound): error when deleting "https://github.com/cert-manager/cert-manager/releases/download/v1.19.3/cert-manager.yaml": roles.rbac.authorization.k8s.io "cert-manager:leaderelection" not found Error from server (NotFound): error when deleting "https://github.com/cert-manager/cert-manager/releases/download/v1.19.3/cert-manager.yaml": roles.rbac.authorization.k8s.io "cert-manager-tokenrequest" not found Error from server (NotFound): error when deleting "https://github.com/cert-manager/cert-manager/releases/download/v1.19.3/cert-manager.yaml": roles.rbac.authorization.k8s.io "cert-manager-webhook:dynamic-serving" not found Error from server (NotFound): error when deleting "https://github.com/cert-manager/cert-manager/releases/download/v1.19.3/cert-manager.yaml": rolebindings.rbac.authorization.k8s.io "cert-manager-cainjector:leaderelection" not found Error from server (NotFound): error when deleting "https://github.com/cert-manager/cert-manager/releases/download/v1.19.3/cert-manager.yaml": rolebindings.rbac.authorization.k8s.io "cert-manager:leaderelection" not found Error from server (NotFound): error when deleting "https://github.com/cert-manager/cert-manager/releases/download/v1.19.3/cert-manager.yaml": rolebindings.rbac.authorization.k8s.io "cert-manager-tokenrequest" not found Error from server (NotFound): error when deleting "https://github.com/cert-manager/cert-manager/releases/download/v1.19.3/cert-manager.yaml": rolebindings.rbac.authorization.k8s.io "cert-manager-webhook:dynamic-serving" not found Error from server (NotFound): error when deleting "https://github.com/cert-manager/cert-manager/releases/download/v1.19.3/cert-manager.yaml": services "cert-manager-cainjector" not found Error from server (NotFound): error when deleting "https://github.com/cert-manager/cert-manager/releases/download/v1.19.3/cert-manager.yaml": services "cert-manager" not found Error from server (NotFound): error when deleting "https://github.com/cert-manager/cert-manager/releases/download/v1.19.3/cert-manager.yaml": services "cert-manager-webhook" not found Error from server (NotFound): error when deleting "https://github.com/cert-manager/cert-manager/releases/download/v1.19.3/cert-manager.yaml": deployments.apps "cert-manager-cainjector" not found Error from server (NotFound): error when deleting "https://github.com/cert-manager/cert-manager/releases/download/v1.19.3/cert-manager.yaml": deployments.apps "cert-manager" not found Error from server (NotFound): error when deleting "https://github.com/cert-manager/cert-manager/releases/download/v1.19.3/cert-manager.yaml": deployments.apps "cert-manager-webhook" not found Error from server (NotFound): error when deleting "https://github.com/cert-manager/cert-manager/releases/download/v1.19.3/cert-manager.yaml": mutatingwebhookconfigurations.admissionregistration.k8s.io "cert-manager-webhook" not found Error from server (NotFound): error when deleting "https://github.com/cert-manager/cert-manager/releases/download/v1.19.3/cert-manager.yaml": validatingwebhookconfigurations.admissionregistration.k8s.io "cert-manager-webhook" not found + sleep 4 + for i in $(seq 0 2) + set +e + kubectl delete -f https://github.com/cert-manager/cert-manager/releases/download/v1.19.3/cert-manager.yaml + exit_status=1 + set -e + '[' 1 '!=' 0 -a -n 1 ']' + cat /tmp/tmp.136vdp0kGO + cat /tmp/tmp.JO9VSVzQUJ Error from server (NotFound): error when deleting "https://github.com/cert-manager/cert-manager/releases/download/v1.19.3/cert-manager.yaml": namespaces "cert-manager" not found Error from server (NotFound): error when deleting "https://github.com/cert-manager/cert-manager/releases/download/v1.19.3/cert-manager.yaml": customresourcedefinitions.apiextensions.k8s.io "challenges.acme.cert-manager.io" not found Error from server (NotFound): error when deleting "https://github.com/cert-manager/cert-manager/releases/download/v1.19.3/cert-manager.yaml": customresourcedefinitions.apiextensions.k8s.io "orders.acme.cert-manager.io" not found Error from server (NotFound): error when deleting "https://github.com/cert-manager/cert-manager/releases/download/v1.19.3/cert-manager.yaml": customresourcedefinitions.apiextensions.k8s.io "certificaterequests.cert-manager.io" not found Error from server (NotFound): error when deleting "https://github.com/cert-manager/cert-manager/releases/download/v1.19.3/cert-manager.yaml": customresourcedefinitions.apiextensions.k8s.io "certificates.cert-manager.io" not found Error from server (NotFound): error when deleting "https://github.com/cert-manager/cert-manager/releases/download/v1.19.3/cert-manager.yaml": customresourcedefinitions.apiextensions.k8s.io "clusterissuers.cert-manager.io" not found Error from server (NotFound): error when deleting "https://github.com/cert-manager/cert-manager/releases/download/v1.19.3/cert-manager.yaml": customresourcedefinitions.apiextensions.k8s.io "issuers.cert-manager.io" not found Error from server (NotFound): error when deleting "https://github.com/cert-manager/cert-manager/releases/download/v1.19.3/cert-manager.yaml": serviceaccounts "cert-manager-cainjector" not found Error from server (NotFound): error when deleting "https://github.com/cert-manager/cert-manager/releases/download/v1.19.3/cert-manager.yaml": serviceaccounts "cert-manager" not found Error from server (NotFound): error when deleting "https://github.com/cert-manager/cert-manager/releases/download/v1.19.3/cert-manager.yaml": serviceaccounts "cert-manager-webhook" not found Error from server (NotFound): error when deleting "https://github.com/cert-manager/cert-manager/releases/download/v1.19.3/cert-manager.yaml": clusterroles.rbac.authorization.k8s.io "cert-manager-cainjector" not found Error from server (NotFound): error when deleting "https://github.com/cert-manager/cert-manager/releases/download/v1.19.3/cert-manager.yaml": clusterroles.rbac.authorization.k8s.io "cert-manager-controller-issuers" not found Error from server (NotFound): error when deleting "https://github.com/cert-manager/cert-manager/releases/download/v1.19.3/cert-manager.yaml": clusterroles.rbac.authorization.k8s.io "cert-manager-controller-clusterissuers" not found Error from server (NotFound): error when deleting "https://github.com/cert-manager/cert-manager/releases/download/v1.19.3/cert-manager.yaml": clusterroles.rbac.authorization.k8s.io "cert-manager-controller-certificates" not found Error from server (NotFound): error when deleting "https://github.com/cert-manager/cert-manager/releases/download/v1.19.3/cert-manager.yaml": clusterroles.rbac.authorization.k8s.io "cert-manager-controller-orders" not found Error from server (NotFound): error when deleting "https://github.com/cert-manager/cert-manager/releases/download/v1.19.3/cert-manager.yaml": clusterroles.rbac.authorization.k8s.io "cert-manager-controller-challenges" not found Error from server (NotFound): error when deleting "https://github.com/cert-manager/cert-manager/releases/download/v1.19.3/cert-manager.yaml": clusterroles.rbac.authorization.k8s.io "cert-manager-controller-ingress-shim" not found Error from server (NotFound): error when deleting "https://github.com/cert-manager/cert-manager/releases/download/v1.19.3/cert-manager.yaml": clusterroles.rbac.authorization.k8s.io "cert-manager-cluster-view" not found Error from server (NotFound): error when deleting "https://github.com/cert-manager/cert-manager/releases/download/v1.19.3/cert-manager.yaml": clusterroles.rbac.authorization.k8s.io "cert-manager-view" not found Error from server (NotFound): error when deleting "https://github.com/cert-manager/cert-manager/releases/download/v1.19.3/cert-manager.yaml": clusterroles.rbac.authorization.k8s.io "cert-manager-edit" not found Error from server (NotFound): error when deleting "https://github.com/cert-manager/cert-manager/releases/download/v1.19.3/cert-manager.yaml": clusterroles.rbac.authorization.k8s.io "cert-manager-controller-approve:cert-manager-io" not found Error from server (NotFound): error when deleting "https://github.com/cert-manager/cert-manager/releases/download/v1.19.3/cert-manager.yaml": clusterroles.rbac.authorization.k8s.io "cert-manager-controller-certificatesigningrequests" not found Error from server (NotFound): error when deleting "https://github.com/cert-manager/cert-manager/releases/download/v1.19.3/cert-manager.yaml": clusterroles.rbac.authorization.k8s.io "cert-manager-webhook:subjectaccessreviews" not found Error from server (NotFound): error when deleting "https://github.com/cert-manager/cert-manager/releases/download/v1.19.3/cert-manager.yaml": clusterrolebindings.rbac.authorization.k8s.io "cert-manager-cainjector" not found Error from server (NotFound): error when deleting "https://github.com/cert-manager/cert-manager/releases/download/v1.19.3/cert-manager.yaml": clusterrolebindings.rbac.authorization.k8s.io "cert-manager-controller-issuers" not found Error from server (NotFound): error when deleting "https://github.com/cert-manager/cert-manager/releases/download/v1.19.3/cert-manager.yaml": clusterrolebindings.rbac.authorization.k8s.io "cert-manager-controller-clusterissuers" not found Error from server (NotFound): error when deleting "https://github.com/cert-manager/cert-manager/releases/download/v1.19.3/cert-manager.yaml": clusterrolebindings.rbac.authorization.k8s.io "cert-manager-controller-certificates" not found Error from server (NotFound): error when deleting "https://github.com/cert-manager/cert-manager/releases/download/v1.19.3/cert-manager.yaml": clusterrolebindings.rbac.authorization.k8s.io "cert-manager-controller-orders" not found Error from server (NotFound): error when deleting "https://github.com/cert-manager/cert-manager/releases/download/v1.19.3/cert-manager.yaml": clusterrolebindings.rbac.authorization.k8s.io "cert-manager-controller-challenges" not found Error from server (NotFound): error when deleting "https://github.com/cert-manager/cert-manager/releases/download/v1.19.3/cert-manager.yaml": clusterrolebindings.rbac.authorization.k8s.io "cert-manager-controller-ingress-shim" not found Error from server (NotFound): error when deleting "https://github.com/cert-manager/cert-manager/releases/download/v1.19.3/cert-manager.yaml": clusterrolebindings.rbac.authorization.k8s.io "cert-manager-controller-approve:cert-manager-io" not found Error from server (NotFound): error when deleting "https://github.com/cert-manager/cert-manager/releases/download/v1.19.3/cert-manager.yaml": clusterrolebindings.rbac.authorization.k8s.io "cert-manager-controller-certificatesigningrequests" not found Error from server (NotFound): error when deleting "https://github.com/cert-manager/cert-manager/releases/download/v1.19.3/cert-manager.yaml": clusterrolebindings.rbac.authorization.k8s.io "cert-manager-webhook:subjectaccessreviews" not found Error from server (NotFound): error when deleting "https://github.com/cert-manager/cert-manager/releases/download/v1.19.3/cert-manager.yaml": roles.rbac.authorization.k8s.io "cert-manager-cainjector:leaderelection" not found Error from server (NotFound): error when deleting "https://github.com/cert-manager/cert-manager/releases/download/v1.19.3/cert-manager.yaml": roles.rbac.authorization.k8s.io "cert-manager:leaderelection" not found Error from server (NotFound): error when deleting "https://github.com/cert-manager/cert-manager/releases/download/v1.19.3/cert-manager.yaml": roles.rbac.authorization.k8s.io "cert-manager-tokenrequest" not found Error from server (NotFound): error when deleting "https://github.com/cert-manager/cert-manager/releases/download/v1.19.3/cert-manager.yaml": roles.rbac.authorization.k8s.io "cert-manager-webhook:dynamic-serving" not found Error from server (NotFound): error when deleting "https://github.com/cert-manager/cert-manager/releases/download/v1.19.3/cert-manager.yaml": rolebindings.rbac.authorization.k8s.io "cert-manager-cainjector:leaderelection" not found Error from server (NotFound): error when deleting "https://github.com/cert-manager/cert-manager/releases/download/v1.19.3/cert-manager.yaml": rolebindings.rbac.authorization.k8s.io "cert-manager:leaderelection" not found Error from server (NotFound): error when deleting "https://github.com/cert-manager/cert-manager/releases/download/v1.19.3/cert-manager.yaml": rolebindings.rbac.authorization.k8s.io "cert-manager-tokenrequest" not found Error from server (NotFound): error when deleting "https://github.com/cert-manager/cert-manager/releases/download/v1.19.3/cert-manager.yaml": rolebindings.rbac.authorization.k8s.io "cert-manager-webhook:dynamic-serving" not found Error from server (NotFound): error when deleting "https://github.com/cert-manager/cert-manager/releases/download/v1.19.3/cert-manager.yaml": services "cert-manager-cainjector" not found Error from server (NotFound): error when deleting "https://github.com/cert-manager/cert-manager/releases/download/v1.19.3/cert-manager.yaml": services "cert-manager" not found Error from server (NotFound): error when deleting "https://github.com/cert-manager/cert-manager/releases/download/v1.19.3/cert-manager.yaml": services "cert-manager-webhook" not found Error from server (NotFound): error when deleting "https://github.com/cert-manager/cert-manager/releases/download/v1.19.3/cert-manager.yaml": deployments.apps "cert-manager-cainjector" not found Error from server (NotFound): error when deleting "https://github.com/cert-manager/cert-manager/releases/download/v1.19.3/cert-manager.yaml": deployments.apps "cert-manager" not found Error from server (NotFound): error when deleting "https://github.com/cert-manager/cert-manager/releases/download/v1.19.3/cert-manager.yaml": deployments.apps "cert-manager-webhook" not found Error from server (NotFound): error when deleting "https://github.com/cert-manager/cert-manager/releases/download/v1.19.3/cert-manager.yaml": mutatingwebhookconfigurations.admissionregistration.k8s.io "cert-manager-webhook" not found Error from server (NotFound): error when deleting "https://github.com/cert-manager/cert-manager/releases/download/v1.19.3/cert-manager.yaml": validatingwebhookconfigurations.admissionregistration.k8s.io "cert-manager-webhook" not found + sleep 8 + cat /tmp/tmp.136vdp0kGO + cat /tmp/tmp.JO9VSVzQUJ Error from server (NotFound): error when deleting "https://github.com/cert-manager/cert-manager/releases/download/v1.19.3/cert-manager.yaml": namespaces "cert-manager" not found Error from server (NotFound): error when deleting "https://github.com/cert-manager/cert-manager/releases/download/v1.19.3/cert-manager.yaml": customresourcedefinitions.apiextensions.k8s.io "challenges.acme.cert-manager.io" not found Error from server (NotFound): error when deleting "https://github.com/cert-manager/cert-manager/releases/download/v1.19.3/cert-manager.yaml": customresourcedefinitions.apiextensions.k8s.io "orders.acme.cert-manager.io" not found Error from server (NotFound): error when deleting "https://github.com/cert-manager/cert-manager/releases/download/v1.19.3/cert-manager.yaml": customresourcedefinitions.apiextensions.k8s.io "certificaterequests.cert-manager.io" not found Error from server (NotFound): error when deleting "https://github.com/cert-manager/cert-manager/releases/download/v1.19.3/cert-manager.yaml": customresourcedefinitions.apiextensions.k8s.io "certificates.cert-manager.io" not found Error from server (NotFound): error when deleting "https://github.com/cert-manager/cert-manager/releases/download/v1.19.3/cert-manager.yaml": customresourcedefinitions.apiextensions.k8s.io "clusterissuers.cert-manager.io" not found Error from server (NotFound): error when deleting "https://github.com/cert-manager/cert-manager/releases/download/v1.19.3/cert-manager.yaml": customresourcedefinitions.apiextensions.k8s.io "issuers.cert-manager.io" not found Error from server (NotFound): error when deleting "https://github.com/cert-manager/cert-manager/releases/download/v1.19.3/cert-manager.yaml": serviceaccounts "cert-manager-cainjector" not found Error from server (NotFound): error when deleting "https://github.com/cert-manager/cert-manager/releases/download/v1.19.3/cert-manager.yaml": serviceaccounts "cert-manager" not found Error from server (NotFound): error when deleting "https://github.com/cert-manager/cert-manager/releases/download/v1.19.3/cert-manager.yaml": serviceaccounts "cert-manager-webhook" not found Error from server (NotFound): error when deleting "https://github.com/cert-manager/cert-manager/releases/download/v1.19.3/cert-manager.yaml": clusterroles.rbac.authorization.k8s.io "cert-manager-cainjector" not found Error from server (NotFound): error when deleting "https://github.com/cert-manager/cert-manager/releases/download/v1.19.3/cert-manager.yaml": clusterroles.rbac.authorization.k8s.io "cert-manager-controller-issuers" not found Error from server (NotFound): error when deleting "https://github.com/cert-manager/cert-manager/releases/download/v1.19.3/cert-manager.yaml": clusterroles.rbac.authorization.k8s.io "cert-manager-controller-clusterissuers" not found Error from server (NotFound): error when deleting "https://github.com/cert-manager/cert-manager/releases/download/v1.19.3/cert-manager.yaml": clusterroles.rbac.authorization.k8s.io "cert-manager-controller-certificates" not found Error from server (NotFound): error when deleting "https://github.com/cert-manager/cert-manager/releases/download/v1.19.3/cert-manager.yaml": clusterroles.rbac.authorization.k8s.io "cert-manager-controller-orders" not found Error from server (NotFound): error when deleting "https://github.com/cert-manager/cert-manager/releases/download/v1.19.3/cert-manager.yaml": clusterroles.rbac.authorization.k8s.io "cert-manager-controller-challenges" not found Error from server (NotFound): error when deleting "https://github.com/cert-manager/cert-manager/releases/download/v1.19.3/cert-manager.yaml": clusterroles.rbac.authorization.k8s.io "cert-manager-controller-ingress-shim" not found Error from server (NotFound): error when deleting "https://github.com/cert-manager/cert-manager/releases/download/v1.19.3/cert-manager.yaml": clusterroles.rbac.authorization.k8s.io "cert-manager-cluster-view" not found Error from server (NotFound): error when deleting "https://github.com/cert-manager/cert-manager/releases/download/v1.19.3/cert-manager.yaml": clusterroles.rbac.authorization.k8s.io "cert-manager-view" not found Error from server (NotFound): error when deleting "https://github.com/cert-manager/cert-manager/releases/download/v1.19.3/cert-manager.yaml": clusterroles.rbac.authorization.k8s.io "cert-manager-edit" not found Error from server (NotFound): error when deleting "https://github.com/cert-manager/cert-manager/releases/download/v1.19.3/cert-manager.yaml": clusterroles.rbac.authorization.k8s.io "cert-manager-controller-approve:cert-manager-io" not found Error from server (NotFound): error when deleting "https://github.com/cert-manager/cert-manager/releases/download/v1.19.3/cert-manager.yaml": clusterroles.rbac.authorization.k8s.io "cert-manager-controller-certificatesigningrequests" not found Error from server (NotFound): error when deleting "https://github.com/cert-manager/cert-manager/releases/download/v1.19.3/cert-manager.yaml": clusterroles.rbac.authorization.k8s.io "cert-manager-webhook:subjectaccessreviews" not found Error from server (NotFound): error when deleting "https://github.com/cert-manager/cert-manager/releases/download/v1.19.3/cert-manager.yaml": clusterrolebindings.rbac.authorization.k8s.io "cert-manager-cainjector" not found Error from server (NotFound): error when deleting "https://github.com/cert-manager/cert-manager/releases/download/v1.19.3/cert-manager.yaml": clusterrolebindings.rbac.authorization.k8s.io "cert-manager-controller-issuers" not found Error from server (NotFound): error when deleting "https://github.com/cert-manager/cert-manager/releases/download/v1.19.3/cert-manager.yaml": clusterrolebindings.rbac.authorization.k8s.io "cert-manager-controller-clusterissuers" not found Error from server (NotFound): error when deleting "https://github.com/cert-manager/cert-manager/releases/download/v1.19.3/cert-manager.yaml": clusterrolebindings.rbac.authorization.k8s.io "cert-manager-controller-certificates" not found Error from server (NotFound): error when deleting "https://github.com/cert-manager/cert-manager/releases/download/v1.19.3/cert-manager.yaml": clusterrolebindings.rbac.authorization.k8s.io "cert-manager-controller-orders" not found Error from server (NotFound): error when deleting "https://github.com/cert-manager/cert-manager/releases/download/v1.19.3/cert-manager.yaml": clusterrolebindings.rbac.authorization.k8s.io "cert-manager-controller-challenges" not found Error from server (NotFound): error when deleting "https://github.com/cert-manager/cert-manager/releases/download/v1.19.3/cert-manager.yaml": clusterrolebindings.rbac.authorization.k8s.io "cert-manager-controller-ingress-shim" not found Error from server (NotFound): error when deleting "https://github.com/cert-manager/cert-manager/releases/download/v1.19.3/cert-manager.yaml": clusterrolebindings.rbac.authorization.k8s.io "cert-manager-controller-approve:cert-manager-io" not found Error from server (NotFound): error when deleting "https://github.com/cert-manager/cert-manager/releases/download/v1.19.3/cert-manager.yaml": clusterrolebindings.rbac.authorization.k8s.io "cert-manager-controller-certificatesigningrequests" not found Error from server (NotFound): error when deleting "https://github.com/cert-manager/cert-manager/releases/download/v1.19.3/cert-manager.yaml": clusterrolebindings.rbac.authorization.k8s.io "cert-manager-webhook:subjectaccessreviews" not found Error from server (NotFound): error when deleting "https://github.com/cert-manager/cert-manager/releases/download/v1.19.3/cert-manager.yaml": roles.rbac.authorization.k8s.io "cert-manager-cainjector:leaderelection" not found Error from server (NotFound): error when deleting "https://github.com/cert-manager/cert-manager/releases/download/v1.19.3/cert-manager.yaml": roles.rbac.authorization.k8s.io "cert-manager:leaderelection" not found Error from server (NotFound): error when deleting "https://github.com/cert-manager/cert-manager/releases/download/v1.19.3/cert-manager.yaml": roles.rbac.authorization.k8s.io "cert-manager-tokenrequest" not found Error from server (NotFound): error when deleting "https://github.com/cert-manager/cert-manager/releases/download/v1.19.3/cert-manager.yaml": roles.rbac.authorization.k8s.io "cert-manager-webhook:dynamic-serving" not found Error from server (NotFound): error when deleting "https://github.com/cert-manager/cert-manager/releases/download/v1.19.3/cert-manager.yaml": rolebindings.rbac.authorization.k8s.io "cert-manager-cainjector:leaderelection" not found Error from server (NotFound): error when deleting "https://github.com/cert-manager/cert-manager/releases/download/v1.19.3/cert-manager.yaml": rolebindings.rbac.authorization.k8s.io "cert-manager:leaderelection" not found Error from server (NotFound): error when deleting "https://github.com/cert-manager/cert-manager/releases/download/v1.19.3/cert-manager.yaml": rolebindings.rbac.authorization.k8s.io "cert-manager-tokenrequest" not found Error from server (NotFound): error when deleting "https://github.com/cert-manager/cert-manager/releases/download/v1.19.3/cert-manager.yaml": rolebindings.rbac.authorization.k8s.io "cert-manager-webhook:dynamic-serving" not found Error from server (NotFound): error when deleting "https://github.com/cert-manager/cert-manager/releases/download/v1.19.3/cert-manager.yaml": services "cert-manager-cainjector" not found Error from server (NotFound): error when deleting "https://github.com/cert-manager/cert-manager/releases/download/v1.19.3/cert-manager.yaml": services "cert-manager" not found Error from server (NotFound): error when deleting "https://github.com/cert-manager/cert-manager/releases/download/v1.19.3/cert-manager.yaml": services "cert-manager-webhook" not found Error from server (NotFound): error when deleting "https://github.com/cert-manager/cert-manager/releases/download/v1.19.3/cert-manager.yaml": deployments.apps "cert-manager-cainjector" not found Error from server (NotFound): error when deleting "https://github.com/cert-manager/cert-manager/releases/download/v1.19.3/cert-manager.yaml": deployments.apps "cert-manager" not found Error from server (NotFound): error when deleting "https://github.com/cert-manager/cert-manager/releases/download/v1.19.3/cert-manager.yaml": deployments.apps "cert-manager-webhook" not found Error from server (NotFound): error when deleting "https://github.com/cert-manager/cert-manager/releases/download/v1.19.3/cert-manager.yaml": mutatingwebhookconfigurations.admissionregistration.k8s.io "cert-manager-webhook" not found Error from server (NotFound): error when deleting "https://github.com/cert-manager/cert-manager/releases/download/v1.19.3/cert-manager.yaml": validatingwebhookconfigurations.admissionregistration.k8s.io "cert-manager-webhook" not found + rm /tmp/tmp.136vdp0kGO /tmp/tmp.JO9VSVzQUJ + return 1 + true + '[' -n '' ']' + '[' -n psmdb-operator ']' + kubectl_bin delete --grace-period=0 --force=true namespace pitr-32420 + rm -rf /tmp/tmp.oRDgtyLOf2 + kubectl_bin delete --grace-period=0 --force=true namespace psmdb-operator ++ mktemp + desc 'test passed' ++ mktemp + set +o xtrace ----------------------------------------------------------------------------------- test passed ----------------------------------------------------------------------------------- + local LAST_OUT=/tmp/tmp.q9mXKCNgi4 + local LAST_OUT=/tmp/tmp.9ocFaHLkNU ++ mktemp ++ mktemp + local LAST_ERR=/tmp/tmp.AhOvgbPne7 + local exit_status=0 + local timeout=4 ++ seq 0 2 + local LAST_ERR=/tmp/tmp.6nbrmA0xIh + local exit_status=0 + local timeout=4 + for i in $(seq 0 2) + set +e + kubectl delete --grace-period=0 --force=true namespace psmdb-operator ++ seq 0 2 + for i in $(seq 0 2) + set +e + kubectl delete --grace-period=0 --force=true namespace pitr-32420