Log: /mnt/jenkins/workspace/cloud-psmdb-operator_PR-2247/e2e-tests/logs/pitr.log Warning: version difference between client (1.35) and server (1.32) exceeds the supported minor version skew of +/-1 Warning: version difference between client (1.35) and server (1.32) exceeds the supported minor version skew of +/-1 Warning: version difference between client (1.35) and server (1.32) exceeds the supported minor version skew of +/-1 + main + create_infra pitr-14308 + local ns=pitr-14308 + [[ 1 == 1 ]] + delete_crd + desc 'get and delete old CRDs and RBAC' + set +o xtrace ----------------------------------------------------------------------------------- get and delete old CRDs and RBAC ----------------------------------------------------------------------------------- + kubectl_bin delete -f /mnt/jenkins/workspace/cloud-psmdb-operator_PR-2247/deploy/crd.yaml --ignore-not-found --wait=false ++ mktemp + local LAST_OUT=/tmp/tmp.0LBLi0FQ5o ++ mktemp + local LAST_ERR=/tmp/tmp.rPwxPNah64 + local exit_status=0 + local timeout=4 ++ seq 0 2 + for i in $(seq 0 2) + set +e + kubectl delete -f /mnt/jenkins/workspace/cloud-psmdb-operator_PR-2247/deploy/crd.yaml --ignore-not-found --wait=false + exit_status=0 + set -e + '[' 0 '!=' 0 -a -n 1 ']' + break + cat /tmp/tmp.0LBLi0FQ5o + cat /tmp/tmp.rPwxPNah64 + rm /tmp/tmp.0LBLi0FQ5o /tmp/tmp.rPwxPNah64 + return 0 ++ grep -v '\-\-\-' ++ yq eval .metadata.name /mnt/jenkins/workspace/cloud-psmdb-operator_PR-2247/deploy/crd.yaml grep: warning: stray \ before - grep: warning: stray \ before - + for crd_name in $(yq eval '.metadata.name' "${src_dir}/deploy/crd.yaml" | grep -v '\-\-\-') + grep -v NAMESPACE + xargs -L 1 sh -xc 'kubectl patch perconaservermongodbbackups.psmdb.percona.com -n $0 $1 --type=merge -p "{\"metadata\":{\"finalizers\":[]}}"' + kubectl get perconaservermongodbbackups.psmdb.percona.com --all-namespaces -o wide error: the server doesn't have a resource type "perconaservermongodbbackups" + kubectl patch perconaservermongodbbackups.psmdb.percona.com -n sh --type=merge -p '{"metadata":{"finalizers":[]}}' error: the server doesn't have a resource type "perconaservermongodbbackups" + : + kubectl_bin wait --for=delete crd perconaservermongodbbackups.psmdb.percona.com ++ mktemp + local LAST_OUT=/tmp/tmp.GjT8Sn3G1a ++ mktemp + local LAST_ERR=/tmp/tmp.K4YSF8iD0t + local exit_status=0 + local timeout=4 ++ seq 0 2 + for i in $(seq 0 2) + set +e + kubectl wait --for=delete crd perconaservermongodbbackups.psmdb.percona.com + exit_status=0 + set -e + '[' 0 '!=' 0 -a -n 1 ']' + break + cat /tmp/tmp.GjT8Sn3G1a + cat /tmp/tmp.K4YSF8iD0t + rm /tmp/tmp.GjT8Sn3G1a /tmp/tmp.K4YSF8iD0t + return 0 + for crd_name in $(yq eval '.metadata.name' "${src_dir}/deploy/crd.yaml" | grep -v '\-\-\-') + kubectl get perconaservermongodbrestores.psmdb.percona.com --all-namespaces -o wide + grep -v NAMESPACE + xargs -L 1 sh -xc 'kubectl patch perconaservermongodbrestores.psmdb.percona.com -n $0 $1 --type=merge -p "{\"metadata\":{\"finalizers\":[]}}"' error: the server doesn't have a resource type "perconaservermongodbrestores" + kubectl patch perconaservermongodbrestores.psmdb.percona.com -n sh --type=merge -p '{"metadata":{"finalizers":[]}}' error: the server doesn't have a resource type "perconaservermongodbrestores" + : + kubectl_bin wait --for=delete crd perconaservermongodbrestores.psmdb.percona.com ++ mktemp + local LAST_OUT=/tmp/tmp.bJGl3OIGLR ++ mktemp + local LAST_ERR=/tmp/tmp.PmeuJpkf2p + local exit_status=0 + local timeout=4 ++ seq 0 2 + for i in $(seq 0 2) + set +e + kubectl wait --for=delete crd perconaservermongodbrestores.psmdb.percona.com + exit_status=0 + set -e + '[' 0 '!=' 0 -a -n 1 ']' + break + cat /tmp/tmp.bJGl3OIGLR + cat /tmp/tmp.PmeuJpkf2p + rm /tmp/tmp.bJGl3OIGLR /tmp/tmp.PmeuJpkf2p + return 0 + for crd_name in $(yq eval '.metadata.name' "${src_dir}/deploy/crd.yaml" | grep -v '\-\-\-') + grep -v NAMESPACE + xargs -L 1 sh -xc 'kubectl patch perconaservermongodbs.psmdb.percona.com -n $0 $1 --type=merge -p "{\"metadata\":{\"finalizers\":[]}}"' + kubectl get perconaservermongodbs.psmdb.percona.com --all-namespaces -o wide error: the server doesn't have a resource type "perconaservermongodbs" + kubectl patch perconaservermongodbs.psmdb.percona.com -n sh --type=merge -p '{"metadata":{"finalizers":[]}}' error: the server doesn't have a resource type "perconaservermongodbs" + : + kubectl_bin wait --for=delete crd perconaservermongodbs.psmdb.percona.com ++ mktemp + local LAST_OUT=/tmp/tmp.GAM7XNkWuv ++ mktemp + local LAST_ERR=/tmp/tmp.VkAwXax3ff + local exit_status=0 + local timeout=4 ++ seq 0 2 + for i in $(seq 0 2) + set +e + kubectl wait --for=delete crd perconaservermongodbs.psmdb.percona.com + exit_status=0 + set -e + '[' 0 '!=' 0 -a -n 1 ']' + break + cat /tmp/tmp.GAM7XNkWuv + cat /tmp/tmp.VkAwXax3ff + rm /tmp/tmp.GAM7XNkWuv /tmp/tmp.VkAwXax3ff + return 0 + local rbac_yaml=rbac.yaml + '[' -n psmdb-operator ']' + rbac_yaml=cw-rbac.yaml + kubectl_bin delete -f /mnt/jenkins/workspace/cloud-psmdb-operator_PR-2247/deploy/cw-rbac.yaml --ignore-not-found ++ mktemp + local LAST_OUT=/tmp/tmp.zae302ZkTF ++ mktemp + local LAST_ERR=/tmp/tmp.wDvrFTePLX + local exit_status=0 + local timeout=4 ++ seq 0 2 + for i in $(seq 0 2) + set +e + kubectl delete -f /mnt/jenkins/workspace/cloud-psmdb-operator_PR-2247/deploy/cw-rbac.yaml --ignore-not-found + exit_status=0 + set -e + '[' 0 '!=' 0 -a -n 1 ']' + break + cat /tmp/tmp.zae302ZkTF + cat /tmp/tmp.wDvrFTePLX + rm /tmp/tmp.zae302ZkTF /tmp/tmp.wDvrFTePLX + return 0 + check_crd_for_deletion PR-2247-091270b4 + local git_tag=PR-2247-091270b4 ++ curl -s https://raw.githubusercontent.com/percona/percona-server-mongodb-operator/PR-2247-091270b4/deploy/crd.yaml ++ yq eval .metadata.name ++ /usr/sbin/sed s/---//g ++ /usr/sbin/sed ':a;N;$!ba;s/\n/ /g' + for crd_name in $(curl -s https://raw.githubusercontent.com/percona/percona-server-mongodb-operator/${git_tag}/deploy/crd.yaml | yq eval '.metadata.name' | $sed 's/---//g' | $sed ':a;N;$!ba;s/\n/ /g') ++ kubectl_bin get crd/null -o 'jsonpath={.status.conditions[-1].type}' +++ mktemp ++ local LAST_OUT=/tmp/tmp.BJCyyH5C1k +++ mktemp ++ local LAST_ERR=/tmp/tmp.KsiPm2CL84 ++ local exit_status=0 ++ local timeout=4 +++ seq 0 2 ++ for i in $(seq 0 2) ++ set +e ++ kubectl get crd/null -o 'jsonpath={.status.conditions[-1].type}' ++ exit_status=1 ++ set -e ++ '[' 1 '!=' 0 -a -n 1 ']' ++ cat /tmp/tmp.BJCyyH5C1k ++ cat /tmp/tmp.KsiPm2CL84 Error from server (NotFound): customresourcedefinitions.apiextensions.k8s.io "null" not found ++ sleep 0 ++ for i in $(seq 0 2) ++ set +e ++ kubectl get crd/null -o 'jsonpath={.status.conditions[-1].type}' ++ exit_status=1 ++ set -e ++ '[' 1 '!=' 0 -a -n 1 ']' ++ cat /tmp/tmp.BJCyyH5C1k ++ cat /tmp/tmp.KsiPm2CL84 Error from server (NotFound): customresourcedefinitions.apiextensions.k8s.io "null" not found ++ sleep 4 ++ for i in $(seq 0 2) ++ set +e ++ kubectl get crd/null -o 'jsonpath={.status.conditions[-1].type}' ++ exit_status=1 ++ set -e ++ '[' 1 '!=' 0 -a -n 1 ']' ++ cat /tmp/tmp.BJCyyH5C1k ++ cat /tmp/tmp.KsiPm2CL84 Error from server (NotFound): customresourcedefinitions.apiextensions.k8s.io "null" not found ++ sleep 8 ++ cat /tmp/tmp.BJCyyH5C1k ++ cat /tmp/tmp.KsiPm2CL84 Error from server (NotFound): customresourcedefinitions.apiextensions.k8s.io "null" not found ++ rm /tmp/tmp.BJCyyH5C1k /tmp/tmp.KsiPm2CL84 ++ return 1 + [[ '' == \T\e\r\m\i\n\a\t\i\n\g ]] + '[' -n psmdb-operator ']' + create_namespace psmdb-operator + local namespace=psmdb-operator + local skip_clean_namespace= + [[ 1 == 1 ]] + [[ -z '' ]] + destroy_chaos_mesh ++ helm list --all-namespaces --filter chaos-mesh ++ tail -n1 ++ awk '-F ' '{print $2}' ++ sed s/NAMESPACE// + local chaos_mesh_ns= + desc 'destroy chaos-mesh' + set +o xtrace ----------------------------------------------------------------------------------- destroy chaos-mesh ----------------------------------------------------------------------------------- + '[' -n '' ']' ++ kubectl get MutatingWebhookConfiguration ++ grep chaos-mesh ++ awk '{print $1}' + timeout 30 kubectl delete MutatingWebhookConfiguration error: resource(s) were provided, but no name was specified + : ++ kubectl get ValidatingWebhookConfiguration ++ grep chaos-mesh ++ awk '{print $1}' + timeout 30 kubectl delete ValidatingWebhookConfiguration error: resource(s) were provided, but no name was specified + : ++ kubectl get ValidatingWebhookConfiguration ++ grep validate-auth ++ awk '{print $1}' + timeout 30 kubectl delete ValidatingWebhookConfiguration error: resource(s) were provided, but no name was specified + : ++ kubectl api-resources ++ grep chaos-mesh ++ awk '{print $1}' ++ kubectl get crd ++ grep chaos-mesh.org ++ awk '{print $1}' + timeout 30 kubectl delete crd error: resource(s) were provided, but no name was specified + : ++ kubectl get clusterrolebinding ++ grep chaos-mesh ++ awk '{print $1}' + timeout 30 kubectl delete clusterrolebinding error: resource(s) were provided, but no name was specified + : ++ kubectl get clusterrole ++ grep chaos-mesh ++ awk '{print $1}' + timeout 30 kubectl delete clusterrole error: resource(s) were provided, but no name was specified + : + desc 'cleaned up all old namespaces' + set +o xtrace ----------------------------------------------------------------------------------- cleaned up all old namespaces ----------------------------------------------------------------------------------- + kubectl_bin get ns + grep -E -v '^kube-|^default|Terminating|psmdb-operator|openshift|^gke-|^gmp-|^NAME' ++ mktemp + awk '{print$1}' + xargs kubectl delete ns + local LAST_OUT=/tmp/tmp.Dd4XxLotCx ++ mktemp + '[' -n '' ']' + desc 'cleaned up old namespaces psmdb-operator' + set +o xtrace ----------------------------------------------------------------------------------- cleaned up old namespaces psmdb-operator ----------------------------------------------------------------------------------- + kubectl_bin delete namespace psmdb-operator --ignore-not-found ++ mktemp + local LAST_ERR=/tmp/tmp.1VlZUY3RmS + local exit_status=0 + local timeout=4 ++ seq 0 2 + for i in $(seq 0 2) + set +e + kubectl get ns + local LAST_OUT=/tmp/tmp.3NgBIlFyoK ++ mktemp + local LAST_ERR=/tmp/tmp.4ZDf3anPG1 + local exit_status=0 + local timeout=4 ++ seq 0 2 + for i in $(seq 0 2) + set +e + kubectl delete namespace psmdb-operator --ignore-not-found + exit_status=0 + set -e + '[' 0 '!=' 0 -a -n 1 ']' + break + cat /tmp/tmp.Dd4XxLotCx + cat /tmp/tmp.1VlZUY3RmS + rm /tmp/tmp.Dd4XxLotCx /tmp/tmp.1VlZUY3RmS + return 0 error: resource(s) were provided, but no name was specified + exit_status=0 + set -e + '[' 0 '!=' 0 -a -n 1 ']' + break + cat /tmp/tmp.3NgBIlFyoK namespace "psmdb-operator" deleted + cat /tmp/tmp.4ZDf3anPG1 + rm /tmp/tmp.3NgBIlFyoK /tmp/tmp.4ZDf3anPG1 + return 0 + kubectl_bin wait --for=delete namespace psmdb-operator ++ mktemp + local LAST_OUT=/tmp/tmp.bC7byefspG ++ mktemp + local LAST_ERR=/tmp/tmp.v8n3VL0whw + local exit_status=0 + local timeout=4 ++ seq 0 2 + for i in $(seq 0 2) + set +e + kubectl wait --for=delete namespace psmdb-operator + exit_status=0 + set -e + '[' 0 '!=' 0 -a -n 1 ']' + break + cat /tmp/tmp.bC7byefspG + cat /tmp/tmp.v8n3VL0whw + rm /tmp/tmp.bC7byefspG /tmp/tmp.v8n3VL0whw + return 0 + desc 'create namespace psmdb-operator' + set +o xtrace ----------------------------------------------------------------------------------- create namespace psmdb-operator ----------------------------------------------------------------------------------- + kubectl_bin create namespace psmdb-operator ++ mktemp + local LAST_OUT=/tmp/tmp.myPh3nVGvW ++ mktemp + local LAST_ERR=/tmp/tmp.XpOjTQ2h0T + local exit_status=0 + local timeout=4 ++ seq 0 2 + for i in $(seq 0 2) + set +e + kubectl create namespace psmdb-operator + exit_status=0 + set -e + '[' 0 '!=' 0 -a -n 1 ']' + break + cat /tmp/tmp.myPh3nVGvW namespace/psmdb-operator created + cat /tmp/tmp.XpOjTQ2h0T + rm /tmp/tmp.myPh3nVGvW /tmp/tmp.XpOjTQ2h0T + return 0 + set_kube_ctx psmdb-operator + local namespace=psmdb-operator ++ kubectl_bin config current-context +++ mktemp ++ local LAST_OUT=/tmp/tmp.XZ3odRsCjS +++ mktemp ++ local LAST_ERR=/tmp/tmp.RcYfidOsvY ++ local exit_status=0 ++ local timeout=4 +++ seq 0 2 ++ for i in $(seq 0 2) ++ set +e ++ kubectl config current-context ++ exit_status=0 ++ set -e ++ '[' 0 '!=' 0 -a -n 1 ']' ++ break ++ cat /tmp/tmp.XZ3odRsCjS ++ cat /tmp/tmp.RcYfidOsvY ++ rm /tmp/tmp.XZ3odRsCjS /tmp/tmp.RcYfidOsvY ++ return 0 + kubectl_bin config set-context gke_cloud-dev-112233_us-central1-a_jen-psmdb-2247-091270b4-5-cluster1 --namespace=psmdb-operator ++ mktemp + local LAST_OUT=/tmp/tmp.ktejPm4uYo ++ mktemp + local LAST_ERR=/tmp/tmp.3Xxa45W1Xm + local exit_status=0 + local timeout=4 ++ seq 0 2 + for i in $(seq 0 2) + set +e + kubectl config set-context gke_cloud-dev-112233_us-central1-a_jen-psmdb-2247-091270b4-5-cluster1 --namespace=psmdb-operator + exit_status=0 + set -e + '[' 0 '!=' 0 -a -n 1 ']' + break + cat /tmp/tmp.ktejPm4uYo Context "gke_cloud-dev-112233_us-central1-a_jen-psmdb-2247-091270b4-5-cluster1" modified. + cat /tmp/tmp.3Xxa45W1Xm + rm /tmp/tmp.ktejPm4uYo /tmp/tmp.3Xxa45W1Xm + return 0 + deploy_operator + desc 'start PSMDB operator: perconalab/percona-server-mongodb-operator:PR-2247-091270b4' + set +o xtrace ----------------------------------------------------------------------------------- start PSMDB operator: perconalab/percona-server-mongodb-operator:PR-2247-091270b4 ----------------------------------------------------------------------------------- + local cr_file + '[' -f /mnt/jenkins/workspace/cloud-psmdb-operator_PR-2247/e2e-tests/pitr/conf/crd.yaml ']' + cr_file=/mnt/jenkins/workspace/cloud-psmdb-operator_PR-2247/deploy/crd.yaml + kubectl_bin apply --server-side --force-conflicts -f /mnt/jenkins/workspace/cloud-psmdb-operator_PR-2247/deploy/crd.yaml ++ mktemp + local LAST_OUT=/tmp/tmp.Q3wcBlhZim ++ mktemp + local LAST_ERR=/tmp/tmp.thRIwMATWF + local exit_status=0 + local timeout=4 ++ seq 0 2 + for i in $(seq 0 2) + set +e + kubectl apply --server-side --force-conflicts -f /mnt/jenkins/workspace/cloud-psmdb-operator_PR-2247/deploy/crd.yaml + exit_status=0 + set -e + '[' 0 '!=' 0 -a -n 1 ']' + break + cat /tmp/tmp.Q3wcBlhZim customresourcedefinition.apiextensions.k8s.io/perconaservermongodbbackups.psmdb.percona.com serverside-applied customresourcedefinition.apiextensions.k8s.io/perconaservermongodbrestores.psmdb.percona.com serverside-applied customresourcedefinition.apiextensions.k8s.io/perconaservermongodbs.psmdb.percona.com serverside-applied + cat /tmp/tmp.thRIwMATWF + rm /tmp/tmp.Q3wcBlhZim /tmp/tmp.thRIwMATWF + return 0 + '[' -n psmdb-operator ']' + apply_rbac cw-rbac + local operator_namespace=psmdb-operator + local rbac=cw-rbac + cat /mnt/jenkins/workspace/cloud-psmdb-operator_PR-2247/deploy/cw-rbac.yaml + sed -e 's^namespace: .*^namespace: psmdb-operator^' + kubectl_bin apply -n psmdb-operator -f - ++ mktemp + local LAST_OUT=/tmp/tmp.LYZpOFcccK ++ mktemp + local LAST_ERR=/tmp/tmp.g0NPjnw2mK + local exit_status=0 + local timeout=4 ++ seq 0 2 + for i in $(seq 0 2) + set +e + kubectl apply -n psmdb-operator -f - + exit_status=0 + set -e + '[' 0 '!=' 0 -a -n 1 ']' + break + cat /tmp/tmp.LYZpOFcccK clusterrole.rbac.authorization.k8s.io/percona-server-mongodb-operator created serviceaccount/percona-server-mongodb-operator created clusterrolebinding.rbac.authorization.k8s.io/service-account-percona-server-mongodb-operator created + cat /tmp/tmp.g0NPjnw2mK + rm /tmp/tmp.LYZpOFcccK /tmp/tmp.g0NPjnw2mK + return 0 + yq eval ' (.spec.template.spec.containers[].image = "perconalab/percona-server-mongodb-operator:PR-2247-091270b4") | ((.. | select(.[] == "DISABLE_TELEMETRY")) |= .value="true") | ((.. | select(.[] == "LOG_LEVEL")) |= .value="DEBUG")' /mnt/jenkins/workspace/cloud-psmdb-operator_PR-2247/deploy/cw-operator.yaml + kubectl_bin apply -n psmdb-operator -f - ++ mktemp + local LAST_OUT=/tmp/tmp.A1Wpa3ljBW ++ mktemp + local LAST_ERR=/tmp/tmp.a3XDHSWbgY + local exit_status=0 + local timeout=4 ++ seq 0 2 + for i in $(seq 0 2) + set +e + kubectl apply -n psmdb-operator -f - + exit_status=0 + set -e + '[' 0 '!=' 0 -a -n 1 ']' + break + cat /tmp/tmp.A1Wpa3ljBW deployment.apps/percona-server-mongodb-operator created + cat /tmp/tmp.a3XDHSWbgY + rm /tmp/tmp.A1Wpa3ljBW /tmp/tmp.a3XDHSWbgY + return 0 + sleep 20 ++ get_operator_pod ++ kubectl_bin get pods --selector=name=percona-server-mongodb-operator -o 'jsonpath={.items[].metadata.name}' -n psmdb-operator +++ mktemp ++ local LAST_OUT=/tmp/tmp.g3xCim6sCJ +++ mktemp ++ local LAST_ERR=/tmp/tmp.PFi87UWUx3 ++ local exit_status=0 ++ local timeout=4 +++ seq 0 2 ++ for i in $(seq 0 2) ++ set +e ++ kubectl get pods --selector=name=percona-server-mongodb-operator -o 'jsonpath={.items[].metadata.name}' -n psmdb-operator ++ exit_status=0 ++ set -e ++ '[' 0 '!=' 0 -a -n 1 ']' ++ break ++ cat /tmp/tmp.g3xCim6sCJ ++ cat /tmp/tmp.PFi87UWUx3 ++ rm /tmp/tmp.g3xCim6sCJ /tmp/tmp.PFi87UWUx3 ++ return 0 + wait_operator_pod percona-server-mongodb-operator-64bc9b5fc9-82lz6 + local pod=percona-server-mongodb-operator-64bc9b5fc9-82lz6 + set +o xtrace waiting for pod/percona-server-mongodb-operator-64bc9b5fc9-82lz6 to be ready.OK + echo 'Print operator info from log' Print operator info from log + grep 'Manager starting up' ++ get_operator_pod ++ kubectl_bin get pods --selector=name=percona-server-mongodb-operator -o 'jsonpath={.items[].metadata.name}' -n psmdb-operator +++ mktemp ++ local LAST_OUT=/tmp/tmp.68LFLd6xCQ +++ mktemp ++ local LAST_ERR=/tmp/tmp.BmYX6Lq9hj ++ local exit_status=0 ++ local timeout=4 +++ seq 0 2 ++ for i in $(seq 0 2) ++ set +e ++ kubectl get pods --selector=name=percona-server-mongodb-operator -o 'jsonpath={.items[].metadata.name}' -n psmdb-operator ++ exit_status=0 ++ set -e ++ '[' 0 '!=' 0 -a -n 1 ']' ++ break ++ cat /tmp/tmp.68LFLd6xCQ ++ cat /tmp/tmp.BmYX6Lq9hj ++ rm /tmp/tmp.68LFLd6xCQ /tmp/tmp.BmYX6Lq9hj ++ return 0 + kubectl_bin logs -n psmdb-operator percona-server-mongodb-operator-64bc9b5fc9-82lz6 ++ mktemp + local LAST_OUT=/tmp/tmp.3geLYdjZAh ++ mktemp + local LAST_ERR=/tmp/tmp.c1tOT24TF9 + local exit_status=0 + local timeout=4 ++ seq 0 2 + for i in $(seq 0 2) + set +e + kubectl logs -n psmdb-operator percona-server-mongodb-operator-64bc9b5fc9-82lz6 + exit_status=0 + set -e + '[' 0 '!=' 0 -a -n 1 ']' + break + cat /tmp/tmp.3geLYdjZAh + cat /tmp/tmp.c1tOT24TF9 + rm /tmp/tmp.3geLYdjZAh /tmp/tmp.c1tOT24TF9 + return 0 2026-02-19T14:48:58.295Z INFO setup Manager starting up {"gitCommit": "091270b48773244359d74f74a10340fc1c0cff51", "gitBranch": "PR-2247-091270b4", "buildTime": "", "goVersion": "go1.25.7", "os": "linux", "arch": "amd64"} + create_namespace pitr-14308 + local namespace=pitr-14308 + local skip_clean_namespace= + [[ 1 == 1 ]] + [[ -z '' ]] + destroy_chaos_mesh ++ helm list --all-namespaces --filter chaos-mesh ++ tail -n1 ++ awk '-F ' '{print $2}' ++ sed s/NAMESPACE// + local chaos_mesh_ns= + desc 'destroy chaos-mesh' + set +o xtrace ----------------------------------------------------------------------------------- destroy chaos-mesh ----------------------------------------------------------------------------------- + '[' -n '' ']' ++ kubectl get MutatingWebhookConfiguration ++ grep chaos-mesh ++ awk '{print $1}' + timeout 30 kubectl delete MutatingWebhookConfiguration error: resource(s) were provided, but no name was specified + : ++ kubectl get ValidatingWebhookConfiguration ++ awk '{print $1}' ++ grep chaos-mesh + timeout 30 kubectl delete ValidatingWebhookConfiguration error: resource(s) were provided, but no name was specified + : ++ kubectl get ValidatingWebhookConfiguration ++ grep validate-auth ++ awk '{print $1}' + timeout 30 kubectl delete ValidatingWebhookConfiguration error: resource(s) were provided, but no name was specified + : ++ kubectl api-resources ++ grep chaos-mesh ++ awk '{print $1}' ++ kubectl get crd ++ grep chaos-mesh.org ++ awk '{print $1}' + timeout 30 kubectl delete crd error: resource(s) were provided, but no name was specified + : ++ kubectl get clusterrolebinding ++ awk '{print $1}' ++ grep chaos-mesh + timeout 30 kubectl delete clusterrolebinding error: resource(s) were provided, but no name was specified + : ++ kubectl get clusterrole ++ grep chaos-mesh ++ awk '{print $1}' + timeout 30 kubectl delete clusterrole error: resource(s) were provided, but no name was specified + : + desc 'cleaned up all old namespaces' + set +o xtrace ----------------------------------------------------------------------------------- cleaned up all old namespaces ----------------------------------------------------------------------------------- + kubectl_bin get ns + '[' -n '' ']' + desc 'cleaned up old namespaces pitr-14308' + set +o xtrace ++ mktemp ----------------------------------------------------------------------------------- cleaned up old namespaces pitr-14308 ----------------------------------------------------------------------------------- + kubectl_bin delete namespace pitr-14308 --ignore-not-found + grep -E -v '^kube-|^default|Terminating|psmdb-operator|openshift|^gke-|^gmp-|^NAME' + awk '{print$1}' + local LAST_OUT=/tmp/tmp.KdESTD0yqj + xargs kubectl delete ns ++ mktemp + local LAST_ERR=/tmp/tmp.md7VqacuYv + local exit_status=0 + local timeout=4 ++ mktemp ++ seq 0 2 + for i in $(seq 0 2) + set +e + kubectl get ns + local LAST_OUT=/tmp/tmp.t29N6y8sad ++ mktemp + local LAST_ERR=/tmp/tmp.5patfDBJ2j + local exit_status=0 + local timeout=4 ++ seq 0 2 + for i in $(seq 0 2) + set +e + kubectl delete namespace pitr-14308 --ignore-not-found + exit_status=0 + set -e + '[' 0 '!=' 0 -a -n 1 ']' + break + cat /tmp/tmp.KdESTD0yqj + cat /tmp/tmp.md7VqacuYv + rm /tmp/tmp.KdESTD0yqj /tmp/tmp.md7VqacuYv + return 0 error: resource(s) were provided, but no name was specified + exit_status=0 + set -e + '[' 0 '!=' 0 -a -n 1 ']' + break + cat /tmp/tmp.t29N6y8sad + cat /tmp/tmp.5patfDBJ2j + rm /tmp/tmp.t29N6y8sad /tmp/tmp.5patfDBJ2j + return 0 + kubectl_bin wait --for=delete namespace pitr-14308 ++ mktemp + local LAST_OUT=/tmp/tmp.GKhJgODlwr ++ mktemp + local LAST_ERR=/tmp/tmp.HFrLnyqFPr + local exit_status=0 + local timeout=4 ++ seq 0 2 + for i in $(seq 0 2) + set +e + kubectl wait --for=delete namespace pitr-14308 + exit_status=0 + set -e + '[' 0 '!=' 0 -a -n 1 ']' + break + cat /tmp/tmp.GKhJgODlwr + cat /tmp/tmp.HFrLnyqFPr + rm /tmp/tmp.GKhJgODlwr /tmp/tmp.HFrLnyqFPr + return 0 + desc 'create namespace pitr-14308' + set +o xtrace ----------------------------------------------------------------------------------- create namespace pitr-14308 ----------------------------------------------------------------------------------- + kubectl_bin create namespace pitr-14308 ++ mktemp + local LAST_OUT=/tmp/tmp.YHF0URDOlh ++ mktemp + local LAST_ERR=/tmp/tmp.F4UYxtr3vp + local exit_status=0 + local timeout=4 ++ seq 0 2 + for i in $(seq 0 2) + set +e + kubectl create namespace pitr-14308 + exit_status=0 + set -e + '[' 0 '!=' 0 -a -n 1 ']' + break + cat /tmp/tmp.YHF0URDOlh namespace/pitr-14308 created + cat /tmp/tmp.F4UYxtr3vp + rm /tmp/tmp.YHF0URDOlh /tmp/tmp.F4UYxtr3vp + return 0 + set_kube_ctx pitr-14308 + local namespace=pitr-14308 ++ kubectl_bin config current-context +++ mktemp ++ local LAST_OUT=/tmp/tmp.kbQyswaGKJ +++ mktemp ++ local LAST_ERR=/tmp/tmp.0Gu2TAUtAV ++ local exit_status=0 ++ local timeout=4 +++ seq 0 2 ++ for i in $(seq 0 2) ++ set +e ++ kubectl config current-context ++ exit_status=0 ++ set -e ++ '[' 0 '!=' 0 -a -n 1 ']' ++ break ++ cat /tmp/tmp.kbQyswaGKJ ++ cat /tmp/tmp.0Gu2TAUtAV ++ rm /tmp/tmp.kbQyswaGKJ /tmp/tmp.0Gu2TAUtAV ++ return 0 + kubectl_bin config set-context gke_cloud-dev-112233_us-central1-a_jen-psmdb-2247-091270b4-5-cluster1 --namespace=pitr-14308 ++ mktemp + local LAST_OUT=/tmp/tmp.Gn25IUInUv ++ mktemp + local LAST_ERR=/tmp/tmp.YBY5LKrCF0 + local exit_status=0 + local timeout=4 ++ seq 0 2 + for i in $(seq 0 2) + set +e + kubectl config set-context gke_cloud-dev-112233_us-central1-a_jen-psmdb-2247-091270b4-5-cluster1 --namespace=pitr-14308 + exit_status=0 + set -e + '[' 0 '!=' 0 -a -n 1 ']' + break + cat /tmp/tmp.Gn25IUInUv Context "gke_cloud-dev-112233_us-central1-a_jen-psmdb-2247-091270b4-5-cluster1" modified. + cat /tmp/tmp.YBY5LKrCF0 + rm /tmp/tmp.Gn25IUInUv /tmp/tmp.YBY5LKrCF0 + return 0 + deploy_minio + local cert_secret= + local service_name=minio-service + desc 'install MinIO: minio-service' + set +o xtrace ----------------------------------------------------------------------------------- install MinIO: minio-service ----------------------------------------------------------------------------------- + helm uninstall minio-service + : + helm repo remove minio "minio" has been removed from your repositories + helm repo add minio https://charts.min.io/ "minio" has been added to your repositories + local endpoint=http://minio-service:9000 + minio_args=('--version' '5.4.0' '--set' 'replicas=1' '--set' 'mode=standalone' '--set' 'resources.requests.memory=256Mi' '--set' 'rootUser=rootuser' '--set' 'rootPassword=rootpass123' '--set' 'users[0].accessKey=some-access-key' '--set' 'users[0].secretKey=some-secret-key' '--set' 'users[0].policy=consoleAdmin' '--set' 'service.type=ClusterIP' '--set' 'configPathmc=/tmp/' '--set' 'securityContext.enabled=false' '--set' 'persistence.size=2G' '--set' 'fullnameOverride=minio-service' '--set' 'serviceAccount.create=true' '--set' 'serviceAccount.name=minio-service-sa') + local minio_args + [[ -n '' ]] + retry 10 60 helm install minio-service --version 5.4.0 --set replicas=1 --set mode=standalone --set resources.requests.memory=256Mi --set rootUser=rootuser --set rootPassword=rootpass123 --set 'users[0].accessKey=some-access-key' --set 'users[0].secretKey=some-secret-key' --set 'users[0].policy=consoleAdmin' --set service.type=ClusterIP --set configPathmc=/tmp/ --set securityContext.enabled=false --set persistence.size=2G --set fullnameOverride=minio-service --set serviceAccount.create=true --set serviceAccount.name=minio-service-sa minio/minio + local max=10 + local delay=60 + shift 2 + local n=1 + helm install minio-service --version 5.4.0 --set replicas=1 --set mode=standalone --set resources.requests.memory=256Mi --set rootUser=rootuser --set rootPassword=rootpass123 --set 'users[0].accessKey=some-access-key' --set 'users[0].secretKey=some-secret-key' --set 'users[0].policy=consoleAdmin' --set service.type=ClusterIP --set configPathmc=/tmp/ --set securityContext.enabled=false --set persistence.size=2G --set fullnameOverride=minio-service --set serviceAccount.create=true --set serviceAccount.name=minio-service-sa minio/minio NAME: minio-service LAST DEPLOYED: Thu Feb 19 14:49:36 2026 NAMESPACE: pitr-14308 STATUS: deployed REVISION: 1 TEST SUITE: None NOTES: MinIO can be accessed via port 9000 on the following DNS name from within your cluster: minio-service.pitr-14308.cluster.local To access MinIO from localhost, run the below commands: 1. export POD_NAME=$(kubectl get pods --namespace pitr-14308 -l "release=minio-service" -o jsonpath="{.items[0].metadata.name}") 2. kubectl port-forward $POD_NAME 9000 --namespace pitr-14308 Read more about port forwarding here: http://kubernetes.io/docs/user-guide/kubectl/kubectl_port-forward/ You can now access MinIO server on http://localhost:9000. Follow the below steps to connect to MinIO server with mc client: 1. Download the MinIO mc client - https://min.io/docs/minio/linux/reference/minio-mc.html#quickstart 2. export MC_HOST_minio-service-local=http://$(kubectl get secret --namespace pitr-14308 minio-service -o jsonpath="{.data.rootUser}" | base64 --decode):$(kubectl get secret --namespace pitr-14308 minio-service -o jsonpath="{.data.rootPassword}" | base64 --decode)@localhost:9000 3. mc ls minio-service-local ++ kubectl_bin get pods --selector=release=minio-service -o 'jsonpath={.items[].metadata.name}' +++ mktemp ++ local LAST_OUT=/tmp/tmp.3qYcGUTqnd +++ mktemp ++ local LAST_ERR=/tmp/tmp.EHBdTJcWVJ ++ local exit_status=0 ++ local timeout=4 +++ seq 0 2 ++ for i in $(seq 0 2) ++ set +e ++ kubectl get pods --selector=release=minio-service -o 'jsonpath={.items[].metadata.name}' ++ exit_status=0 ++ set -e ++ '[' 0 '!=' 0 -a -n 1 ']' ++ break ++ cat /tmp/tmp.3qYcGUTqnd ++ cat /tmp/tmp.EHBdTJcWVJ ++ rm /tmp/tmp.3qYcGUTqnd /tmp/tmp.EHBdTJcWVJ ++ return 0 + local MINIO_POD=minio-service-6d5f646cdc-t7n64 + wait_pod minio-service-6d5f646cdc-t7n64 + local pod=minio-service-6d5f646cdc-t7n64 + set +o xtrace waiting for pod/minio-service-6d5f646cdc-t7n64 to be ready.OK + '[' -n psmdb-operator ']' + kubectl_bin create svc -n psmdb-operator externalname minio-service --external-name=minio-service.pitr-14308.svc.cluster.local --tcp=9000 service/minio-service created + create_minio_bucket operator-testing http://minio-service:9000 + local bucket=operator-testing + local endpoint=http://minio-service:9000 + kubectl_bin run -i --rm aws-cli --image=perconalab/awscli --restart=Never -- bash -c 'AWS_ACCESS_KEY_ID=some-access-key AWS_SECRET_ACCESS_KEY=some-secret-key AWS_DEFAULT_REGION=us-east-1 /usr/bin/aws --no-verify-ssl --endpoint-url http://minio-service:9000 s3 mb s3://operator-testing' ++ mktemp + local LAST_OUT=/tmp/tmp.ldBk1CM3vt ++ mktemp + local LAST_ERR=/tmp/tmp.h5ybJj7LZD + local exit_status=0 + local timeout=4 ++ seq 0 2 + for i in $(seq 0 2) + set +e + kubectl run -i --rm aws-cli --image=perconalab/awscli --restart=Never -- bash -c 'AWS_ACCESS_KEY_ID=some-access-key AWS_SECRET_ACCESS_KEY=some-secret-key AWS_DEFAULT_REGION=us-east-1 /usr/bin/aws --no-verify-ssl --endpoint-url http://minio-service:9000 s3 mb s3://operator-testing' + exit_status=0 + set -e + '[' 0 '!=' 0 -a -n 1 ']' + break + cat /tmp/tmp.ldBk1CM3vt pod "aws-cli" deleted from pitr-14308 namespace + cat /tmp/tmp.h5ybJj7LZD All commands and output from this session will be recorded in container logs, including credentials and sensitive information passed through the command prompt. If you don't see a command prompt, try pressing enter. + rm /tmp/tmp.ldBk1CM3vt /tmp/tmp.h5ybJj7LZD + return 0 + desc 'create secrets and start client' + set +o xtrace ----------------------------------------------------------------------------------- create secrets and start client ----------------------------------------------------------------------------------- + kubectl_bin apply -f /mnt/jenkins/workspace/cloud-psmdb-operator_PR-2247/e2e-tests/conf/secrets.yml -f /mnt/jenkins/workspace/cloud-psmdb-operator_PR-2247/e2e-tests/conf/client.yml -f /mnt/jenkins/workspace/cloud-psmdb-operator_PR-2247/e2e-tests/conf/minio-secret.yml ++ mktemp + local LAST_OUT=/tmp/tmp.KpfaoU6ccY ++ mktemp + local LAST_ERR=/tmp/tmp.F6VehrpP0w + local exit_status=0 + local timeout=4 ++ seq 0 2 + for i in $(seq 0 2) + set +e + kubectl apply -f /mnt/jenkins/workspace/cloud-psmdb-operator_PR-2247/e2e-tests/conf/secrets.yml -f /mnt/jenkins/workspace/cloud-psmdb-operator_PR-2247/e2e-tests/conf/client.yml -f /mnt/jenkins/workspace/cloud-psmdb-operator_PR-2247/e2e-tests/conf/minio-secret.yml + exit_status=0 + set -e + '[' 0 '!=' 0 -a -n 1 ']' + break + cat /tmp/tmp.KpfaoU6ccY secret/some-users created deployment.apps/psmdb-client created secret/minio-secret created + cat /tmp/tmp.F6VehrpP0w + rm /tmp/tmp.KpfaoU6ccY /tmp/tmp.F6VehrpP0w + return 0 + cluster=some-name-rs0 + desc 'create first PSMDB cluster some-name-rs0' + set +o xtrace ----------------------------------------------------------------------------------- create first PSMDB cluster some-name-rs0 ----------------------------------------------------------------------------------- + apply_cluster /mnt/jenkins/workspace/cloud-psmdb-operator_PR-2247/e2e-tests/pitr/conf/some-name-rs0.yml + '[' -z '' ']' + cat_config /mnt/jenkins/workspace/cloud-psmdb-operator_PR-2247/e2e-tests/pitr/conf/some-name-rs0.yml + kubectl_bin apply -f - + cat /mnt/jenkins/workspace/cloud-psmdb-operator_PR-2247/e2e-tests/pitr/conf/some-name-rs0.yml + yq eval '(.spec | select(.image == null)).image = "perconalab/percona-server-mongodb-operator:main-mongod8.0"' ++ mktemp + yq eval '(.spec | select(has("pmm"))).pmm.image = "percona/pmm-client:2.44.1-1"' + yq eval '(.spec | select(has("initImage"))).initImage = "perconalab/percona-server-mongodb-operator:PR-2247-091270b4"' + local LAST_OUT=/tmp/tmp.cDz32JR0FT ++ mktemp + yq eval '(.spec | select(has("backup"))).backup.image = "perconalab/percona-server-mongodb-operator:main-backup"' + local LAST_ERR=/tmp/tmp.VTQTyGAOjb + local exit_status=0 + local timeout=4 + /usr/sbin/sed -e s/NAME_SPACE/pitr-14308/g ++ seq 0 2 + yq eval '.spec.upgradeOptions.apply="Never"' + for i in $(seq 0 2) + set +e + kubectl apply -f - + exit_status=0 + set -e + '[' 0 '!=' 0 -a -n 1 ']' + break + cat /tmp/tmp.cDz32JR0FT perconaservermongodb.psmdb.percona.com/some-name created + cat /tmp/tmp.VTQTyGAOjb + rm /tmp/tmp.cDz32JR0FT /tmp/tmp.VTQTyGAOjb + return 0 + desc 'check if all 3 Pods started' + set +o xtrace ----------------------------------------------------------------------------------- check if all 3 Pods started ----------------------------------------------------------------------------------- + wait_for_running some-name-rs0 3 + local name=some-name-rs0 + let last_pod=2 + local check_cluster_readyness=true + set_debug + [[ 1 == 1 ]] + set -o xtrace + local rs_name=rs0 + local cluster_name=some-name ++ seq 0 2 + for i in $(seq 0 $last_pod) + [[ 0 -eq 2 ]] + wait_pod some-name-rs0-0 + local pod=some-name-rs0-0 + set +o xtrace waiting for pod/some-name-rs0-0 to be ready........OK + for i in $(seq 0 $last_pod) + [[ 1 -eq 2 ]] + wait_pod some-name-rs0-1 + local pod=some-name-rs0-1 + set +o xtrace waiting for pod/some-name-rs0-1 to be ready.......OK + for i in $(seq 0 $last_pod) + [[ 2 -eq 2 ]] ++ kubectl_bin get psmdb some-name -o 'jsonpath={.spec.replsets[?(@.name=="rs0")].arbiter.enabled}' +++ mktemp ++ local LAST_OUT=/tmp/tmp.PLdwbOmuci +++ mktemp ++ local LAST_ERR=/tmp/tmp.7qYsvQD9rf ++ local exit_status=0 ++ local timeout=4 +++ seq 0 2 ++ for i in $(seq 0 2) ++ set +e ++ kubectl get psmdb some-name -o 'jsonpath={.spec.replsets[?(@.name=="rs0")].arbiter.enabled}' ++ exit_status=0 ++ set -e ++ '[' 0 '!=' 0 -a -n 1 ']' ++ break ++ cat /tmp/tmp.PLdwbOmuci ++ cat /tmp/tmp.7qYsvQD9rf ++ rm /tmp/tmp.PLdwbOmuci /tmp/tmp.7qYsvQD9rf ++ return 0 + [[ '' == \t\r\u\e ]] + wait_pod some-name-rs0-2 + local pod=some-name-rs0-2 + set +o xtrace waiting for pod/some-name-rs0-2 to be ready.......OK ++ kubectl_bin get psmdb some-name -o 'jsonpath={.spec.replsets[?(@.name=="rs0")].nonvoting.enabled}' +++ mktemp ++ local LAST_OUT=/tmp/tmp.ahAEumTvKH +++ mktemp ++ local LAST_ERR=/tmp/tmp.xtUKiKqUB4 ++ local exit_status=0 ++ local timeout=4 +++ seq 0 2 ++ for i in $(seq 0 2) ++ set +e ++ kubectl get psmdb some-name -o 'jsonpath={.spec.replsets[?(@.name=="rs0")].nonvoting.enabled}' ++ exit_status=0 ++ set -e ++ '[' 0 '!=' 0 -a -n 1 ']' ++ break ++ cat /tmp/tmp.ahAEumTvKH ++ cat /tmp/tmp.xtUKiKqUB4 ++ rm /tmp/tmp.ahAEumTvKH /tmp/tmp.xtUKiKqUB4 ++ return 0 + [[ '' == \t\r\u\e ]] ++ kubectl_bin get psmdb some-name -o 'jsonpath={.spec.replsets[?(@.name=="rs0")].hidden.enabled}' +++ mktemp ++ local LAST_OUT=/tmp/tmp.ecydPMSvQ2 +++ mktemp ++ local LAST_ERR=/tmp/tmp.A1A5NctfQj ++ local exit_status=0 ++ local timeout=4 +++ seq 0 2 ++ for i in $(seq 0 2) ++ set +e ++ kubectl get psmdb some-name -o 'jsonpath={.spec.replsets[?(@.name=="rs0")].hidden.enabled}' ++ exit_status=0 ++ set -e ++ '[' 0 '!=' 0 -a -n 1 ']' ++ break ++ cat /tmp/tmp.ecydPMSvQ2 ++ cat /tmp/tmp.A1A5NctfQj ++ rm /tmp/tmp.ecydPMSvQ2 /tmp/tmp.A1A5NctfQj ++ return 0 + [[ '' == \t\r\u\e ]] + sleep 10 + [[ true == \t\r\u\e ]] + set +x Waiting for cluster readyness............... + desc 'check if statefulset created with expected config' + set +o xtrace ----------------------------------------------------------------------------------- check if statefulset created with expected config ----------------------------------------------------------------------------------- + compare_kubectl statefulset/some-name-rs0 + local resource=statefulset/some-name-rs0 + local postfix= + local skip_generation_check= + local expected_result=/mnt/jenkins/workspace/cloud-psmdb-operator_PR-2247/e2e-tests/pitr/compare/statefulset_some-name-rs0.yml + local new_result=/tmp/tmp.TVPhAYVAUk/statefulset_some-name-rs0.yml + '[' -n '' -a -f /mnt/jenkins/workspace/cloud-psmdb-operator_PR-2247/e2e-tests/pitr/compare/statefulset_some-name-rs0-oc.yml ']' + kubectl_bin get -o yaml statefulset/some-name-rs0 + yq eval ' del(.metadata.ownerReferences[].apiVersion) | del(.metadata.managedFields) | del(.. | select(has("creationTimestamp")).creationTimestamp) | del(.. | select(has("namespace")).namespace) | del(.. | select(has("uid")).uid) | del(.metadata.resourceVersion) | del(.spec.template.spec.containers[].env[] | select(.name == "NAMESPACE")) | del(.metadata.selfLink) | del(.metadata.annotations."cloud.google.com/neg") | del(.metadata.annotations."kubectl.kubernetes.io/last-applied-configuration") | del(.. | select(has("image")).image) | del(.. | select(has("clusterIP")).clusterIP) | del(.. | select(has("clusterIPs")).clusterIPs) | del(.. | select(has("dataSource")).dataSource) | del(.. | select(has("procMount")).procMount) | del(.. | select(has("storageClassName")).storageClassName) | del(.. | select(has("finalizers")).finalizers) | del(.. | select(has("kubernetes.io/pvc-protection"))."kubernetes.io/pvc-protection") | del(.. | select(has("volumeName")).volumeName) | del(.. | select(has("volume.beta.kubernetes.io/storage-provisioner"))."volume.beta.kubernetes.io/storage-provisioner") | del(.. | select(has("volume.kubernetes.io/storage-provisioner"))."volume.kubernetes.io/storage-provisioner") | del(.spec.volumeMode) | del(.. | select(has("volume.kubernetes.io/selected-node"))."volume.kubernetes.io/selected-node") | del(.. | select(has("percona.com/last-config-hash"))."percona.com/last-config-hash") | del(.. | select(has("percona.com/configuration-hash"))."percona.com/configuration-hash") | del(.. | select(has("percona.com/ssl-hash"))."percona.com/ssl-hash") | del(.. | select(has("percona.com/ssl-internal-hash"))."percona.com/ssl-internal-hash") | del(.spec.volumeClaimTemplates[].spec.volumeMode | select(. == "Filesystem")) | del(.. | select(has("healthCheckNodePort")).healthCheckNodePort) | del(.. | select(has("nodePort")).nodePort) | del(.status) | (.. | select(tag == "!!str")) |= sub("pitr-14308", "NAME_SPACE") | del(.spec.volumeClaimTemplates[].apiVersion) | del(.spec.volumeClaimTemplates[].kind) | del(.spec.ipFamilies) | del(.spec.ipFamilyPolicy) | (.. | select(. == "extensions/v1beta1")) = "apps/v1" | (.. | select(. == "batch/v1beta1")) = "batch/v1" ' - ++ mktemp + local LAST_OUT=/tmp/tmp.H3IAt5g26l ++ mktemp + local LAST_ERR=/tmp/tmp.C9HTd6HejA + local exit_status=0 + local timeout=4 ++ seq 0 2 + for i in $(seq 0 2) + set +e + kubectl get -o yaml statefulset/some-name-rs0 + exit_status=0 + set -e + '[' 0 '!=' 0 -a -n 1 ']' + break + cat /tmp/tmp.H3IAt5g26l + cat /tmp/tmp.C9HTd6HejA + rm /tmp/tmp.H3IAt5g26l /tmp/tmp.C9HTd6HejA + return 0 + yq -i eval 'del(.spec.persistentVolumeClaimRetentionPolicy)' /tmp/tmp.TVPhAYVAUk/statefulset_some-name-rs0.yml + version_gt 1.22 ++ echo '1.32 >= 1.22' ++ bc -l + '[' 1 -eq 1 ']' + return 0 + yq -i eval 'del(.spec.internalTrafficPolicy)' /tmp/tmp.TVPhAYVAUk/statefulset_some-name-rs0.yml + yq -i eval 'del(.spec.allocateLoadBalancerNodePorts)' /tmp/tmp.TVPhAYVAUk/statefulset_some-name-rs0.yml + [[ /mnt/jenkins/workspace/cloud-psmdb-operator_PR-2247/e2e-tests/pitr/compare/statefulset_some-name-rs0.yml == */cronjob* ]] + '[' -n '' ']' + [[ 0 -eq 0 ]] + diff -u /mnt/jenkins/workspace/cloud-psmdb-operator_PR-2247/e2e-tests/pitr/compare/statefulset_some-name-rs0.yml /tmp/tmp.TVPhAYVAUk/statefulset_some-name-rs0.yml + log 'compare_kubectl: statefulset/some-name-rs0 OK' + set +o xtrace [2026-02-19T14:52:14+0000] compare_kubectl: statefulset/some-name-rs0 OK + write_initial_data + desc 'create user myApp' + set +o xtrace ----------------------------------------------------------------------------------- create user myApp ----------------------------------------------------------------------------------- + run_mongo 'db.createUser({user:"myApp",pwd:"myPass",roles:[{db:"myApp",role:"readWrite"}]})' userAdmin:userAdmin123456@some-name-rs0.pitr-14308 + local 'command=db.createUser({user:"myApp",pwd:"myPass",roles:[{db:"myApp",role:"readWrite"}]})' + local uri=userAdmin:userAdmin123456@some-name-rs0.pitr-14308 + local driver=mongodb+srv + local suffix=.svc.cluster.local + local mongo_flag= + local replica_set=rs0 + [[ userAdmin:userAdmin123456@some-name-rs0.pitr-14308 == *cfg* ]] ++ kubectl_bin get pods --selector=name=psmdb-client -o 'jsonpath={.items[].metadata.name}' +++ mktemp ++ local LAST_OUT=/tmp/tmp.8JvYvDkssV +++ mktemp ++ local LAST_ERR=/tmp/tmp.MwIVmRVWoD ++ local exit_status=0 ++ local timeout=4 +++ seq 0 2 ++ for i in $(seq 0 2) ++ set +e ++ kubectl get pods --selector=name=psmdb-client -o 'jsonpath={.items[].metadata.name}' ++ exit_status=0 ++ set -e ++ '[' 0 '!=' 0 -a -n 1 ']' ++ break ++ cat /tmp/tmp.8JvYvDkssV ++ cat /tmp/tmp.MwIVmRVWoD ++ rm /tmp/tmp.8JvYvDkssV /tmp/tmp.MwIVmRVWoD ++ return 0 + local client_container=psmdb-client-86cb5d8484-r5b58 + kubectl_bin exec psmdb-client-86cb5d8484-r5b58 -- bash -c 'printf '\''db.createUser({user:"myApp",pwd:"myPass",roles:[{db:"myApp",role:"readWrite"}]})\n'\'' | mongo mongodb+srv://userAdmin:userAdmin123456@some-name-rs0.pitr-14308.svc.cluster.local/admin?ssl=false\&replicaSet=rs0 ' ++ mktemp + local LAST_OUT=/tmp/tmp.GG4CPWWRXK ++ mktemp + local LAST_ERR=/tmp/tmp.I5RH28j6zo + local exit_status=0 + local timeout=4 ++ seq 0 2 + for i in $(seq 0 2) + set +e + kubectl exec psmdb-client-86cb5d8484-r5b58 -- bash -c 'printf '\''db.createUser({user:"myApp",pwd:"myPass",roles:[{db:"myApp",role:"readWrite"}]})\n'\'' | mongo mongodb+srv://userAdmin:userAdmin123456@some-name-rs0.pitr-14308.svc.cluster.local/admin?ssl=false\&replicaSet=rs0 ' + exit_status=0 + set -e + '[' 0 '!=' 0 -a -n 1 ']' + break + cat /tmp/tmp.GG4CPWWRXK Percona Server for MongoDB shell version v4.4.29-28 connecting to: mongodb://some-name-rs0-1.some-name-rs0.pitr-14308.svc.cluster.local:27017,some-name-rs0-0.some-name-rs0.pitr-14308.svc.cluster.local:27017,some-name-rs0-2.some-name-rs0.pitr-14308.svc.cluster.local:27017/admin?compressors=disabled&gssapiServiceName=mongodb&replicaSet=rs0&ssl=false Implicit session: session { "id" : UUID("fa8fccc7-2aeb-4df7-a417-0ac1053af227") } Percona Server for MongoDB server version: v8.0.19-7 WARNING: shell and server versions do not match Successfully added user: { "user" : "myApp", "roles" : [ { "db" : "myApp", "role" : "readWrite" } ] } bye + cat /tmp/tmp.I5RH28j6zo + rm /tmp/tmp.GG4CPWWRXK /tmp/tmp.I5RH28j6zo + return 0 + sleep 2 + write_document + local cmp_postfix= + local cluster_name=some-name-rs0 + desc 'write initial data, read from all' + set +o xtrace ----------------------------------------------------------------------------------- write initial data, read from all ----------------------------------------------------------------------------------- + run_mongo 'use myApp\n db.test.insert({ x: 100500 })' myApp:myPass@some-name-rs0.pitr-14308 + local 'command=use myApp\n db.test.insert({ x: 100500 })' + local uri=myApp:myPass@some-name-rs0.pitr-14308 + local driver=mongodb+srv + local suffix=.svc.cluster.local + local mongo_flag= + local replica_set=rs0 + [[ myApp:myPass@some-name-rs0.pitr-14308 == *cfg* ]] ++ kubectl_bin get pods --selector=name=psmdb-client -o 'jsonpath={.items[].metadata.name}' +++ mktemp ++ local LAST_OUT=/tmp/tmp.5XHjsi3s5X +++ mktemp ++ local LAST_ERR=/tmp/tmp.xCdiJ0W3DQ ++ local exit_status=0 ++ local timeout=4 +++ seq 0 2 ++ for i in $(seq 0 2) ++ set +e ++ kubectl get pods --selector=name=psmdb-client -o 'jsonpath={.items[].metadata.name}' ++ exit_status=0 ++ set -e ++ '[' 0 '!=' 0 -a -n 1 ']' ++ break ++ cat /tmp/tmp.5XHjsi3s5X ++ cat /tmp/tmp.xCdiJ0W3DQ ++ rm /tmp/tmp.5XHjsi3s5X /tmp/tmp.xCdiJ0W3DQ ++ return 0 + local client_container=psmdb-client-86cb5d8484-r5b58 + kubectl_bin exec psmdb-client-86cb5d8484-r5b58 -- bash -c 'printf '\''use myApp\n db.test.insert({ x: 100500 })\n'\'' | mongo mongodb+srv://myApp:myPass@some-name-rs0.pitr-14308.svc.cluster.local/admin?ssl=false\&replicaSet=rs0 ' ++ mktemp + local LAST_OUT=/tmp/tmp.L3DoSxy900 ++ mktemp + local LAST_ERR=/tmp/tmp.bv6VLd4h3C + local exit_status=0 + local timeout=4 ++ seq 0 2 + for i in $(seq 0 2) + set +e + kubectl exec psmdb-client-86cb5d8484-r5b58 -- bash -c 'printf '\''use myApp\n db.test.insert({ x: 100500 })\n'\'' | mongo mongodb+srv://myApp:myPass@some-name-rs0.pitr-14308.svc.cluster.local/admin?ssl=false\&replicaSet=rs0 ' + exit_status=0 + set -e + '[' 0 '!=' 0 -a -n 1 ']' + break + cat /tmp/tmp.L3DoSxy900 Percona Server for MongoDB shell version v4.4.29-28 connecting to: mongodb://some-name-rs0-0.some-name-rs0.pitr-14308.svc.cluster.local:27017,some-name-rs0-2.some-name-rs0.pitr-14308.svc.cluster.local:27017,some-name-rs0-1.some-name-rs0.pitr-14308.svc.cluster.local:27017/admin?compressors=disabled&gssapiServiceName=mongodb&replicaSet=rs0&ssl=false Implicit session: session { "id" : UUID("96f63796-8ac3-49e3-be34-0cc467b51d29") } Percona Server for MongoDB server version: v8.0.19-7 WARNING: shell and server versions do not match switched to db myApp WriteResult({ "nInserted" : 1 }) bye + cat /tmp/tmp.bv6VLd4h3C + rm /tmp/tmp.L3DoSxy900 /tmp/tmp.bv6VLd4h3C + return 0 + minikube_sleep + sleep_time=10 + [[ '' == 1 ]] + compare_mongo_cmd find myApp:myPass@some-name-rs0-0.some-name-rs0.pitr-14308 '' + local command=find + local uri=myApp:myPass@some-name-rs0-0.some-name-rs0.pitr-14308 + local postfix= + local suffix= + local database=myApp + local collection=test + local sort= + local tls=false + local replicaset= + local 'full_command=db.test.find()' + [[ -n '' ]] + log 'running db.test.find() in myApp' + set +o xtrace [2026-02-19T14:52:21+0000] running db.test.find() in myApp + [[ false == \t\r\u\e ]] + mongo_command=run_mongo + run_mongo 'use myApp\n db.test.find()' myApp:myPass@some-name-rs0-0.some-name-rs0.pitr-14308 mongodb '' '' + grep -E -v 'I NETWORK|W NETWORK|F NETWORK|Error saving history file|Percona Server for MongoDB|connecting to:|Unable to reach primary for set|Implicit session:|versions do not match|Error saving history file:|Started a new thread for the timer service' + local 'command=use myApp\n db.test.find()' + local uri=myApp:myPass@some-name-rs0-0.some-name-rs0.pitr-14308 + local driver=mongodb + local suffix=.svc.cluster.local + local mongo_flag= + local replica_set=rs0 + [[ myApp:myPass@some-name-rs0-0.some-name-rs0.pitr-14308 == *cfg* ]] + /usr/sbin/sed -re 's/ObjectId\("[0-9a-f]+"\)//; s/-[0-9]+.svc/-xxx.svc/' ++ kubectl_bin get pods --selector=name=psmdb-client -o 'jsonpath={.items[].metadata.name}' +++ mktemp ++ local LAST_OUT=/tmp/tmp.c7HCWv4QRo +++ mktemp ++ local LAST_ERR=/tmp/tmp.BcxR6cGWN4 ++ local exit_status=0 ++ local timeout=4 +++ seq 0 2 ++ for i in $(seq 0 2) ++ set +e ++ kubectl get pods --selector=name=psmdb-client -o 'jsonpath={.items[].metadata.name}' ++ exit_status=0 ++ set -e ++ '[' 0 '!=' 0 -a -n 1 ']' ++ break ++ cat /tmp/tmp.c7HCWv4QRo ++ cat /tmp/tmp.BcxR6cGWN4 ++ rm /tmp/tmp.c7HCWv4QRo /tmp/tmp.BcxR6cGWN4 ++ return 0 + local client_container=psmdb-client-86cb5d8484-r5b58 + kubectl_bin exec psmdb-client-86cb5d8484-r5b58 -- bash -c 'printf '\''use myApp\n db.test.find()\n'\'' | mongo mongodb://myApp:myPass@some-name-rs0-0.some-name-rs0.pitr-14308.svc.cluster.local/admin?ssl=false\&replicaSet=rs0 ' ++ mktemp + local LAST_OUT=/tmp/tmp.zHJNP11y6x ++ mktemp + local LAST_ERR=/tmp/tmp.tRfwBD1sJm + local exit_status=0 + local timeout=4 ++ seq 0 2 + for i in $(seq 0 2) + set +e + kubectl exec psmdb-client-86cb5d8484-r5b58 -- bash -c 'printf '\''use myApp\n db.test.find()\n'\'' | mongo mongodb://myApp:myPass@some-name-rs0-0.some-name-rs0.pitr-14308.svc.cluster.local/admin?ssl=false\&replicaSet=rs0 ' + exit_status=0 + set -e + '[' 0 '!=' 0 -a -n 1 ']' + break + cat /tmp/tmp.zHJNP11y6x + cat /tmp/tmp.tRfwBD1sJm + rm /tmp/tmp.zHJNP11y6x /tmp/tmp.tRfwBD1sJm + return 0 + [[ 0 -eq 0 ]] + diff -u /mnt/jenkins/workspace/cloud-psmdb-operator_PR-2247/e2e-tests/pitr/compare/find.json /tmp/tmp.TVPhAYVAUk/find + compare_mongo_cmd find myApp:myPass@some-name-rs0-1.some-name-rs0.pitr-14308 '' + local command=find + local uri=myApp:myPass@some-name-rs0-1.some-name-rs0.pitr-14308 + local postfix= + local suffix= + local database=myApp + local collection=test + local sort= + local tls=false + local replicaset= + local 'full_command=db.test.find()' + [[ -n '' ]] + log 'running db.test.find() in myApp' + set +o xtrace [2026-02-19T14:52:23+0000] running db.test.find() in myApp + [[ false == \t\r\u\e ]] + mongo_command=run_mongo + run_mongo 'use myApp\n db.test.find()' myApp:myPass@some-name-rs0-1.some-name-rs0.pitr-14308 mongodb '' '' + local 'command=use myApp\n db.test.find()' + local uri=myApp:myPass@some-name-rs0-1.some-name-rs0.pitr-14308 + local driver=mongodb + local suffix=.svc.cluster.local + local mongo_flag= + local replica_set=rs0 + [[ myApp:myPass@some-name-rs0-1.some-name-rs0.pitr-14308 == *cfg* ]] + /usr/sbin/sed -re 's/ObjectId\("[0-9a-f]+"\)//; s/-[0-9]+.svc/-xxx.svc/' + grep -E -v 'I NETWORK|W NETWORK|F NETWORK|Error saving history file|Percona Server for MongoDB|connecting to:|Unable to reach primary for set|Implicit session:|versions do not match|Error saving history file:|Started a new thread for the timer service' ++ kubectl_bin get pods --selector=name=psmdb-client -o 'jsonpath={.items[].metadata.name}' +++ mktemp ++ local LAST_OUT=/tmp/tmp.3MOZCtuqoc +++ mktemp ++ local LAST_ERR=/tmp/tmp.OwYZslWkEd ++ local exit_status=0 ++ local timeout=4 +++ seq 0 2 ++ for i in $(seq 0 2) ++ set +e ++ kubectl get pods --selector=name=psmdb-client -o 'jsonpath={.items[].metadata.name}' ++ exit_status=0 ++ set -e ++ '[' 0 '!=' 0 -a -n 1 ']' ++ break ++ cat /tmp/tmp.3MOZCtuqoc ++ cat /tmp/tmp.OwYZslWkEd ++ rm /tmp/tmp.3MOZCtuqoc /tmp/tmp.OwYZslWkEd ++ return 0 + local client_container=psmdb-client-86cb5d8484-r5b58 + kubectl_bin exec psmdb-client-86cb5d8484-r5b58 -- bash -c 'printf '\''use myApp\n db.test.find()\n'\'' | mongo mongodb://myApp:myPass@some-name-rs0-1.some-name-rs0.pitr-14308.svc.cluster.local/admin?ssl=false\&replicaSet=rs0 ' ++ mktemp + local LAST_OUT=/tmp/tmp.Dk4XbNnmLt ++ mktemp + local LAST_ERR=/tmp/tmp.XvgffI455L + local exit_status=0 + local timeout=4 ++ seq 0 2 + for i in $(seq 0 2) + set +e + kubectl exec psmdb-client-86cb5d8484-r5b58 -- bash -c 'printf '\''use myApp\n db.test.find()\n'\'' | mongo mongodb://myApp:myPass@some-name-rs0-1.some-name-rs0.pitr-14308.svc.cluster.local/admin?ssl=false\&replicaSet=rs0 ' + exit_status=0 + set -e + '[' 0 '!=' 0 -a -n 1 ']' + break + cat /tmp/tmp.Dk4XbNnmLt + cat /tmp/tmp.XvgffI455L + rm /tmp/tmp.Dk4XbNnmLt /tmp/tmp.XvgffI455L + return 0 + [[ 0 -eq 0 ]] + diff -u /mnt/jenkins/workspace/cloud-psmdb-operator_PR-2247/e2e-tests/pitr/compare/find.json /tmp/tmp.TVPhAYVAUk/find + compare_mongo_cmd find myApp:myPass@some-name-rs0-2.some-name-rs0.pitr-14308 '' + local command=find + local uri=myApp:myPass@some-name-rs0-2.some-name-rs0.pitr-14308 + local postfix= + local suffix= + local database=myApp + local collection=test + local sort= + local tls=false + local replicaset= + local 'full_command=db.test.find()' + [[ -n '' ]] + log 'running db.test.find() in myApp' + set +o xtrace [2026-02-19T14:52:26+0000] running db.test.find() in myApp + [[ false == \t\r\u\e ]] + mongo_command=run_mongo + run_mongo 'use myApp\n db.test.find()' myApp:myPass@some-name-rs0-2.some-name-rs0.pitr-14308 mongodb '' '' + local 'command=use myApp\n db.test.find()' + local uri=myApp:myPass@some-name-rs0-2.some-name-rs0.pitr-14308 + local driver=mongodb + local suffix=.svc.cluster.local + local mongo_flag= + local replica_set=rs0 + [[ myApp:myPass@some-name-rs0-2.some-name-rs0.pitr-14308 == *cfg* ]] + grep -E -v 'I NETWORK|W NETWORK|F NETWORK|Error saving history file|Percona Server for MongoDB|connecting to:|Unable to reach primary for set|Implicit session:|versions do not match|Error saving history file:|Started a new thread for the timer service' + /usr/sbin/sed -re 's/ObjectId\("[0-9a-f]+"\)//; s/-[0-9]+.svc/-xxx.svc/' ++ kubectl_bin get pods --selector=name=psmdb-client -o 'jsonpath={.items[].metadata.name}' +++ mktemp ++ local LAST_OUT=/tmp/tmp.XDaSF5b1rU +++ mktemp ++ local LAST_ERR=/tmp/tmp.pBIRmUzkQ2 ++ local exit_status=0 ++ local timeout=4 +++ seq 0 2 ++ for i in $(seq 0 2) ++ set +e ++ kubectl get pods --selector=name=psmdb-client -o 'jsonpath={.items[].metadata.name}' ++ exit_status=0 ++ set -e ++ '[' 0 '!=' 0 -a -n 1 ']' ++ break ++ cat /tmp/tmp.XDaSF5b1rU ++ cat /tmp/tmp.pBIRmUzkQ2 ++ rm /tmp/tmp.XDaSF5b1rU /tmp/tmp.pBIRmUzkQ2 ++ return 0 + local client_container=psmdb-client-86cb5d8484-r5b58 + kubectl_bin exec psmdb-client-86cb5d8484-r5b58 -- bash -c 'printf '\''use myApp\n db.test.find()\n'\'' | mongo mongodb://myApp:myPass@some-name-rs0-2.some-name-rs0.pitr-14308.svc.cluster.local/admin?ssl=false\&replicaSet=rs0 ' ++ mktemp + local LAST_OUT=/tmp/tmp.OFsRujCKoZ ++ mktemp + local LAST_ERR=/tmp/tmp.2U3nzrXlg2 + local exit_status=0 + local timeout=4 ++ seq 0 2 + for i in $(seq 0 2) + set +e + kubectl exec psmdb-client-86cb5d8484-r5b58 -- bash -c 'printf '\''use myApp\n db.test.find()\n'\'' | mongo mongodb://myApp:myPass@some-name-rs0-2.some-name-rs0.pitr-14308.svc.cluster.local/admin?ssl=false\&replicaSet=rs0 ' + exit_status=0 + set -e + '[' 0 '!=' 0 -a -n 1 ']' + break + cat /tmp/tmp.OFsRujCKoZ + cat /tmp/tmp.2U3nzrXlg2 + rm /tmp/tmp.OFsRujCKoZ /tmp/tmp.2U3nzrXlg2 + return 0 + [[ 0 -eq 0 ]] + diff -u /mnt/jenkins/workspace/cloud-psmdb-operator_PR-2247/e2e-tests/pitr/compare/find.json /tmp/tmp.TVPhAYVAUk/find + wait_backup_agent some-name-rs0-0 + local agent_pod=some-name-rs0-0 + set +o xtrace waiting for pbm-agent to be ready in some-name-rs0-0...2026-02-19T14:51:39.000+0000 I listening for the commands + wait_backup_agent some-name-rs0-1 + local agent_pod=some-name-rs0-1 + set +o xtrace waiting for pbm-agent to be ready in some-name-rs0-1...2026-02-19T14:52:11.000+0000 I listening for the commands + wait_backup_agent some-name-rs0-2 + local agent_pod=some-name-rs0-2 + set +o xtrace waiting for pbm-agent to be ready in some-name-rs0-2...2026-02-19T14:52:12.000+0000 I listening for the commands + backup_name_minio=backup-minio + run_mongo 'use myApp\n db.test2.insert({ x: 100501 })' myApp:myPass@some-name-rs0.pitr-14308 + local 'command=use myApp\n db.test2.insert({ x: 100501 })' + local uri=myApp:myPass@some-name-rs0.pitr-14308 + local driver=mongodb+srv + local suffix=.svc.cluster.local + local mongo_flag= + local replica_set=rs0 + [[ myApp:myPass@some-name-rs0.pitr-14308 == *cfg* ]] ++ kubectl_bin get pods --selector=name=psmdb-client -o 'jsonpath={.items[].metadata.name}' +++ mktemp ++ local LAST_OUT=/tmp/tmp.QD69LxmSIO +++ mktemp ++ local LAST_ERR=/tmp/tmp.2fhnPgkfEu ++ local exit_status=0 ++ local timeout=4 +++ seq 0 2 ++ for i in $(seq 0 2) ++ set +e ++ kubectl get pods --selector=name=psmdb-client -o 'jsonpath={.items[].metadata.name}' ++ exit_status=0 ++ set -e ++ '[' 0 '!=' 0 -a -n 1 ']' ++ break ++ cat /tmp/tmp.QD69LxmSIO ++ cat /tmp/tmp.2fhnPgkfEu ++ rm /tmp/tmp.QD69LxmSIO /tmp/tmp.2fhnPgkfEu ++ return 0 + local client_container=psmdb-client-86cb5d8484-r5b58 + kubectl_bin exec psmdb-client-86cb5d8484-r5b58 -- bash -c 'printf '\''use myApp\n db.test2.insert({ x: 100501 })\n'\'' | mongo mongodb+srv://myApp:myPass@some-name-rs0.pitr-14308.svc.cluster.local/admin?ssl=false\&replicaSet=rs0 ' ++ mktemp + local LAST_OUT=/tmp/tmp.XuioHh5trA ++ mktemp + local LAST_ERR=/tmp/tmp.LcVzWl7oJf + local exit_status=0 + local timeout=4 ++ seq 0 2 + for i in $(seq 0 2) + set +e + kubectl exec psmdb-client-86cb5d8484-r5b58 -- bash -c 'printf '\''use myApp\n db.test2.insert({ x: 100501 })\n'\'' | mongo mongodb+srv://myApp:myPass@some-name-rs0.pitr-14308.svc.cluster.local/admin?ssl=false\&replicaSet=rs0 ' + exit_status=0 + set -e + '[' 0 '!=' 0 -a -n 1 ']' + break + cat /tmp/tmp.XuioHh5trA Percona Server for MongoDB shell version v4.4.29-28 connecting to: mongodb://some-name-rs0-2.some-name-rs0.pitr-14308.svc.cluster.local:27017,some-name-rs0-1.some-name-rs0.pitr-14308.svc.cluster.local:27017,some-name-rs0-0.some-name-rs0.pitr-14308.svc.cluster.local:27017/admin?compressors=disabled&gssapiServiceName=mongodb&replicaSet=rs0&ssl=false Implicit session: session { "id" : UUID("abbbd826-7418-41e2-90b9-9f03d7f800bb") } Percona Server for MongoDB server version: v8.0.19-7 WARNING: shell and server versions do not match switched to db myApp WriteResult({ "nInserted" : 1 }) bye + cat /tmp/tmp.LcVzWl7oJf + rm /tmp/tmp.XuioHh5trA /tmp/tmp.LcVzWl7oJf + return 0 + run_backup backup-minio 0 + local name=backup-minio + local idx=0 + desc 'run backup backup-minio-0' + set +o xtrace ----------------------------------------------------------------------------------- run backup backup-minio-0 ----------------------------------------------------------------------------------- + cat /mnt/jenkins/workspace/cloud-psmdb-operator_PR-2247/e2e-tests/pitr/conf/backup-minio.yml + /usr/sbin/sed -e 's/name:/name: backup-minio-0/' + kubectl_bin apply -f - ++ mktemp + local LAST_OUT=/tmp/tmp.Bj2asq2BwO ++ mktemp + local LAST_ERR=/tmp/tmp.14ZKlAhqfN + local exit_status=0 + local timeout=4 ++ seq 0 2 + for i in $(seq 0 2) + set +e + kubectl apply -f - + exit_status=0 + set -e + '[' 0 '!=' 0 -a -n 1 ']' + break + cat /tmp/tmp.Bj2asq2BwO perconaservermongodbbackup.psmdb.percona.com/backup-minio-0 created + cat /tmp/tmp.14ZKlAhqfN + rm /tmp/tmp.Bj2asq2BwO /tmp/tmp.14ZKlAhqfN + return 0 + wait_backup backup-minio-0 + local backup_name=backup-minio-0 + local target_state=ready + set +o xtrace waiting for backup-minio-0 to reach ready state.......OK + write_document -2nd + local cmp_postfix=-2nd + local cluster_name=some-name-rs0 + desc 'write initial data, read from all' + set +o xtrace ----------------------------------------------------------------------------------- write initial data, read from all ----------------------------------------------------------------------------------- + run_mongo 'use myApp\n db.test.insert({ x: 100500 })' myApp:myPass@some-name-rs0.pitr-14308 + local 'command=use myApp\n db.test.insert({ x: 100500 })' + local uri=myApp:myPass@some-name-rs0.pitr-14308 + local driver=mongodb+srv + local suffix=.svc.cluster.local + local mongo_flag= + local replica_set=rs0 + [[ myApp:myPass@some-name-rs0.pitr-14308 == *cfg* ]] ++ kubectl_bin get pods --selector=name=psmdb-client -o 'jsonpath={.items[].metadata.name}' +++ mktemp ++ local LAST_OUT=/tmp/tmp.ptpWf3GBks +++ mktemp ++ local LAST_ERR=/tmp/tmp.eSMGwOs48z ++ local exit_status=0 ++ local timeout=4 +++ seq 0 2 ++ for i in $(seq 0 2) ++ set +e ++ kubectl get pods --selector=name=psmdb-client -o 'jsonpath={.items[].metadata.name}' ++ exit_status=0 ++ set -e ++ '[' 0 '!=' 0 -a -n 1 ']' ++ break ++ cat /tmp/tmp.ptpWf3GBks ++ cat /tmp/tmp.eSMGwOs48z ++ rm /tmp/tmp.ptpWf3GBks /tmp/tmp.eSMGwOs48z ++ return 0 + local client_container=psmdb-client-86cb5d8484-r5b58 + kubectl_bin exec psmdb-client-86cb5d8484-r5b58 -- bash -c 'printf '\''use myApp\n db.test.insert({ x: 100500 })\n'\'' | mongo mongodb+srv://myApp:myPass@some-name-rs0.pitr-14308.svc.cluster.local/admin?ssl=false\&replicaSet=rs0 ' ++ mktemp + local LAST_OUT=/tmp/tmp.04CRfsoL0d ++ mktemp + local LAST_ERR=/tmp/tmp.W6Auv8FKli + local exit_status=0 + local timeout=4 ++ seq 0 2 + for i in $(seq 0 2) + set +e + kubectl exec psmdb-client-86cb5d8484-r5b58 -- bash -c 'printf '\''use myApp\n db.test.insert({ x: 100500 })\n'\'' | mongo mongodb+srv://myApp:myPass@some-name-rs0.pitr-14308.svc.cluster.local/admin?ssl=false\&replicaSet=rs0 ' + exit_status=0 + set -e + '[' 0 '!=' 0 -a -n 1 ']' + break + cat /tmp/tmp.04CRfsoL0d Percona Server for MongoDB shell version v4.4.29-28 connecting to: mongodb://some-name-rs0-2.some-name-rs0.pitr-14308.svc.cluster.local:27017,some-name-rs0-1.some-name-rs0.pitr-14308.svc.cluster.local:27017,some-name-rs0-0.some-name-rs0.pitr-14308.svc.cluster.local:27017/admin?compressors=disabled&gssapiServiceName=mongodb&replicaSet=rs0&ssl=false Implicit session: session { "id" : UUID("8943f6b1-d196-4f73-9709-fcf1bacb73b4") } Percona Server for MongoDB server version: v8.0.19-7 WARNING: shell and server versions do not match switched to db myApp WriteResult({ "nInserted" : 1 }) bye + cat /tmp/tmp.W6Auv8FKli + rm /tmp/tmp.04CRfsoL0d /tmp/tmp.W6Auv8FKli + return 0 + minikube_sleep + sleep_time=10 + [[ '' == 1 ]] + compare_mongo_cmd find myApp:myPass@some-name-rs0-0.some-name-rs0.pitr-14308 -2nd + local command=find + local uri=myApp:myPass@some-name-rs0-0.some-name-rs0.pitr-14308 + local postfix=-2nd + local suffix= + local database=myApp + local collection=test + local sort= + local tls=false + local replicaset= + local 'full_command=db.test.find()' + [[ -n '' ]] + log 'running db.test.find() in myApp' + set +o xtrace [2026-02-19T14:52:51+0000] running db.test.find() in myApp + [[ false == \t\r\u\e ]] + mongo_command=run_mongo + run_mongo 'use myApp\n db.test.find()' myApp:myPass@some-name-rs0-0.some-name-rs0.pitr-14308 mongodb '' '' + local 'command=use myApp\n db.test.find()' + local uri=myApp:myPass@some-name-rs0-0.some-name-rs0.pitr-14308 + local driver=mongodb + local suffix=.svc.cluster.local + local mongo_flag= + local replica_set=rs0 + [[ myApp:myPass@some-name-rs0-0.some-name-rs0.pitr-14308 == *cfg* ]] + grep -E -v 'I NETWORK|W NETWORK|F NETWORK|Error saving history file|Percona Server for MongoDB|connecting to:|Unable to reach primary for set|Implicit session:|versions do not match|Error saving history file:|Started a new thread for the timer service' + /usr/sbin/sed -re 's/ObjectId\("[0-9a-f]+"\)//; s/-[0-9]+.svc/-xxx.svc/' ++ kubectl_bin get pods --selector=name=psmdb-client -o 'jsonpath={.items[].metadata.name}' +++ mktemp ++ local LAST_OUT=/tmp/tmp.2hrZLEUVyc +++ mktemp ++ local LAST_ERR=/tmp/tmp.y7bM5zfK1v ++ local exit_status=0 ++ local timeout=4 +++ seq 0 2 ++ for i in $(seq 0 2) ++ set +e ++ kubectl get pods --selector=name=psmdb-client -o 'jsonpath={.items[].metadata.name}' ++ exit_status=0 ++ set -e ++ '[' 0 '!=' 0 -a -n 1 ']' ++ break ++ cat /tmp/tmp.2hrZLEUVyc ++ cat /tmp/tmp.y7bM5zfK1v ++ rm /tmp/tmp.2hrZLEUVyc /tmp/tmp.y7bM5zfK1v ++ return 0 + local client_container=psmdb-client-86cb5d8484-r5b58 + kubectl_bin exec psmdb-client-86cb5d8484-r5b58 -- bash -c 'printf '\''use myApp\n db.test.find()\n'\'' | mongo mongodb://myApp:myPass@some-name-rs0-0.some-name-rs0.pitr-14308.svc.cluster.local/admin?ssl=false\&replicaSet=rs0 ' ++ mktemp + local LAST_OUT=/tmp/tmp.c5YMYNnAVE ++ mktemp + local LAST_ERR=/tmp/tmp.4YO5EVbEo4 + local exit_status=0 + local timeout=4 ++ seq 0 2 + for i in $(seq 0 2) + set +e + kubectl exec psmdb-client-86cb5d8484-r5b58 -- bash -c 'printf '\''use myApp\n db.test.find()\n'\'' | mongo mongodb://myApp:myPass@some-name-rs0-0.some-name-rs0.pitr-14308.svc.cluster.local/admin?ssl=false\&replicaSet=rs0 ' + exit_status=0 + set -e + '[' 0 '!=' 0 -a -n 1 ']' + break + cat /tmp/tmp.c5YMYNnAVE + cat /tmp/tmp.4YO5EVbEo4 + rm /tmp/tmp.c5YMYNnAVE /tmp/tmp.4YO5EVbEo4 + return 0 + [[ 0 -eq 0 ]] + diff -u /mnt/jenkins/workspace/cloud-psmdb-operator_PR-2247/e2e-tests/pitr/compare/find-2nd.json /tmp/tmp.TVPhAYVAUk/find-2nd + compare_mongo_cmd find myApp:myPass@some-name-rs0-1.some-name-rs0.pitr-14308 -2nd + local command=find + local uri=myApp:myPass@some-name-rs0-1.some-name-rs0.pitr-14308 + local postfix=-2nd + local suffix= + local database=myApp + local collection=test + local sort= + local tls=false + local replicaset= + local 'full_command=db.test.find()' + [[ -n '' ]] + log 'running db.test.find() in myApp' + set +o xtrace [2026-02-19T14:52:53+0000] running db.test.find() in myApp + [[ false == \t\r\u\e ]] + mongo_command=run_mongo + run_mongo 'use myApp\n db.test.find()' myApp:myPass@some-name-rs0-1.some-name-rs0.pitr-14308 mongodb '' '' + local 'command=use myApp\n db.test.find()' + local uri=myApp:myPass@some-name-rs0-1.some-name-rs0.pitr-14308 + local driver=mongodb + local suffix=.svc.cluster.local + local mongo_flag= + local replica_set=rs0 + [[ myApp:myPass@some-name-rs0-1.some-name-rs0.pitr-14308 == *cfg* ]] + grep -E -v 'I NETWORK|W NETWORK|F NETWORK|Error saving history file|Percona Server for MongoDB|connecting to:|Unable to reach primary for set|Implicit session:|versions do not match|Error saving history file:|Started a new thread for the timer service' + /usr/sbin/sed -re 's/ObjectId\("[0-9a-f]+"\)//; s/-[0-9]+.svc/-xxx.svc/' ++ kubectl_bin get pods --selector=name=psmdb-client -o 'jsonpath={.items[].metadata.name}' +++ mktemp ++ local LAST_OUT=/tmp/tmp.fgOpKQnFgE +++ mktemp ++ local LAST_ERR=/tmp/tmp.De1uDdVbkW ++ local exit_status=0 ++ local timeout=4 +++ seq 0 2 ++ for i in $(seq 0 2) ++ set +e ++ kubectl get pods --selector=name=psmdb-client -o 'jsonpath={.items[].metadata.name}' ++ exit_status=0 ++ set -e ++ '[' 0 '!=' 0 -a -n 1 ']' ++ break ++ cat /tmp/tmp.fgOpKQnFgE ++ cat /tmp/tmp.De1uDdVbkW ++ rm /tmp/tmp.fgOpKQnFgE /tmp/tmp.De1uDdVbkW ++ return 0 + local client_container=psmdb-client-86cb5d8484-r5b58 + kubectl_bin exec psmdb-client-86cb5d8484-r5b58 -- bash -c 'printf '\''use myApp\n db.test.find()\n'\'' | mongo mongodb://myApp:myPass@some-name-rs0-1.some-name-rs0.pitr-14308.svc.cluster.local/admin?ssl=false\&replicaSet=rs0 ' ++ mktemp + local LAST_OUT=/tmp/tmp.E0OHflpoJr ++ mktemp + local LAST_ERR=/tmp/tmp.8KeRjtl9n6 + local exit_status=0 + local timeout=4 ++ seq 0 2 + for i in $(seq 0 2) + set +e + kubectl exec psmdb-client-86cb5d8484-r5b58 -- bash -c 'printf '\''use myApp\n db.test.find()\n'\'' | mongo mongodb://myApp:myPass@some-name-rs0-1.some-name-rs0.pitr-14308.svc.cluster.local/admin?ssl=false\&replicaSet=rs0 ' + exit_status=0 + set -e + '[' 0 '!=' 0 -a -n 1 ']' + break + cat /tmp/tmp.E0OHflpoJr + cat /tmp/tmp.8KeRjtl9n6 + rm /tmp/tmp.E0OHflpoJr /tmp/tmp.8KeRjtl9n6 + return 0 + [[ 0 -eq 0 ]] + diff -u /mnt/jenkins/workspace/cloud-psmdb-operator_PR-2247/e2e-tests/pitr/compare/find-2nd.json /tmp/tmp.TVPhAYVAUk/find-2nd + compare_mongo_cmd find myApp:myPass@some-name-rs0-2.some-name-rs0.pitr-14308 -2nd + local command=find + local uri=myApp:myPass@some-name-rs0-2.some-name-rs0.pitr-14308 + local postfix=-2nd + local suffix= + local database=myApp + local collection=test + local sort= + local tls=false + local replicaset= + local 'full_command=db.test.find()' + [[ -n '' ]] + log 'running db.test.find() in myApp' + set +o xtrace [2026-02-19T14:52:56+0000] running db.test.find() in myApp + [[ false == \t\r\u\e ]] + mongo_command=run_mongo + run_mongo 'use myApp\n db.test.find()' myApp:myPass@some-name-rs0-2.some-name-rs0.pitr-14308 mongodb '' '' + local 'command=use myApp\n db.test.find()' + local uri=myApp:myPass@some-name-rs0-2.some-name-rs0.pitr-14308 + local driver=mongodb + local suffix=.svc.cluster.local + local mongo_flag= + local replica_set=rs0 + [[ myApp:myPass@some-name-rs0-2.some-name-rs0.pitr-14308 == *cfg* ]] + grep -E -v 'I NETWORK|W NETWORK|F NETWORK|Error saving history file|Percona Server for MongoDB|connecting to:|Unable to reach primary for set|Implicit session:|versions do not match|Error saving history file:|Started a new thread for the timer service' + /usr/sbin/sed -re 's/ObjectId\("[0-9a-f]+"\)//; s/-[0-9]+.svc/-xxx.svc/' ++ kubectl_bin get pods --selector=name=psmdb-client -o 'jsonpath={.items[].metadata.name}' +++ mktemp ++ local LAST_OUT=/tmp/tmp.Mfg6kFyNRr +++ mktemp ++ local LAST_ERR=/tmp/tmp.UgVuuBBTOi ++ local exit_status=0 ++ local timeout=4 +++ seq 0 2 ++ for i in $(seq 0 2) ++ set +e ++ kubectl get pods --selector=name=psmdb-client -o 'jsonpath={.items[].metadata.name}' ++ exit_status=0 ++ set -e ++ '[' 0 '!=' 0 -a -n 1 ']' ++ break ++ cat /tmp/tmp.Mfg6kFyNRr ++ cat /tmp/tmp.UgVuuBBTOi ++ rm /tmp/tmp.Mfg6kFyNRr /tmp/tmp.UgVuuBBTOi ++ return 0 + local client_container=psmdb-client-86cb5d8484-r5b58 + kubectl_bin exec psmdb-client-86cb5d8484-r5b58 -- bash -c 'printf '\''use myApp\n db.test.find()\n'\'' | mongo mongodb://myApp:myPass@some-name-rs0-2.some-name-rs0.pitr-14308.svc.cluster.local/admin?ssl=false\&replicaSet=rs0 ' ++ mktemp + local LAST_OUT=/tmp/tmp.U0lQfDfNHF ++ mktemp + local LAST_ERR=/tmp/tmp.hEEbwXCRch + local exit_status=0 + local timeout=4 ++ seq 0 2 + for i in $(seq 0 2) + set +e + kubectl exec psmdb-client-86cb5d8484-r5b58 -- bash -c 'printf '\''use myApp\n db.test.find()\n'\'' | mongo mongodb://myApp:myPass@some-name-rs0-2.some-name-rs0.pitr-14308.svc.cluster.local/admin?ssl=false\&replicaSet=rs0 ' + exit_status=0 + set -e + '[' 0 '!=' 0 -a -n 1 ']' + break + cat /tmp/tmp.U0lQfDfNHF + cat /tmp/tmp.hEEbwXCRch + rm /tmp/tmp.U0lQfDfNHF /tmp/tmp.hEEbwXCRch + return 0 + [[ 0 -eq 0 ]] + diff -u /mnt/jenkins/workspace/cloud-psmdb-operator_PR-2247/e2e-tests/pitr/compare/find-2nd.json /tmp/tmp.TVPhAYVAUk/find-2nd + sleep 2 ++ run_mongo 'new Date().toISOString()' myApp:myPass@some-name-rs0.pitr-14308 mongodb '' --quiet ++ local 'command=new Date().toISOString()' ++ local uri=myApp:myPass@some-name-rs0.pitr-14308 ++ local driver=mongodb ++ local suffix=.svc.cluster.local ++ local mongo_flag=--quiet ++ local replica_set=rs0 ++ [[ myApp:myPass@some-name-rs0.pitr-14308 == *cfg* ]] ++ tr T ' ' ++ cut -c1-19 ++ grep -E -v 'I NETWORK|W NETWORK|Error saving history file|Percona Server for MongoDB|connecting to:|Unable to reach primary for set|Implicit session:|versions do not match' +++ kubectl_bin get pods --selector=name=psmdb-client -o 'jsonpath={.items[].metadata.name}' ++++ mktemp +++ local LAST_OUT=/tmp/tmp.oF5CI7KkGp ++++ mktemp +++ local LAST_ERR=/tmp/tmp.G8pGeiCAJO +++ local exit_status=0 +++ local timeout=4 ++++ seq 0 2 +++ for i in $(seq 0 2) +++ set +e +++ kubectl get pods --selector=name=psmdb-client -o 'jsonpath={.items[].metadata.name}' +++ exit_status=0 +++ set -e +++ '[' 0 '!=' 0 -a -n 1 ']' +++ break +++ cat /tmp/tmp.oF5CI7KkGp +++ cat /tmp/tmp.G8pGeiCAJO +++ rm /tmp/tmp.oF5CI7KkGp /tmp/tmp.G8pGeiCAJO +++ return 0 ++ local client_container=psmdb-client-86cb5d8484-r5b58 ++ kubectl_bin exec psmdb-client-86cb5d8484-r5b58 -- bash -c 'printf '\''new Date().toISOString()\n'\'' | mongo mongodb://myApp:myPass@some-name-rs0.pitr-14308.svc.cluster.local/admin?ssl=false\&replicaSet=rs0 --quiet' +++ mktemp ++ local LAST_OUT=/tmp/tmp.JQe3kLglHl +++ mktemp ++ local LAST_ERR=/tmp/tmp.ZpdchMQ2v6 ++ local exit_status=0 ++ local timeout=4 +++ seq 0 2 ++ for i in $(seq 0 2) ++ set +e ++ kubectl exec psmdb-client-86cb5d8484-r5b58 -- bash -c 'printf '\''new Date().toISOString()\n'\'' | mongo mongodb://myApp:myPass@some-name-rs0.pitr-14308.svc.cluster.local/admin?ssl=false\&replicaSet=rs0 --quiet' ++ exit_status=0 ++ set -e ++ '[' 0 '!=' 0 -a -n 1 ']' ++ break ++ cat /tmp/tmp.JQe3kLglHl ++ cat /tmp/tmp.ZpdchMQ2v6 ++ rm /tmp/tmp.JQe3kLglHl /tmp/tmp.ZpdchMQ2v6 ++ return 0 + time_now='2026-02-19 14:53:03' + check_recovery backup-minio-0 date '2026-02-19 14:53:03' -2nd '' some-name test2 + local backup_name=backup-minio-0 + local restore_type=date + local 'restore_date=2026-02-19 14:53:03' + local cmp_postfix=-2nd + local backupSource= + local cluster_name=some-name + local selective_collection=test2 + local restore_name=restore-backup-minio-0 + local restore_file=restore.yml + local cluster + cluster=some-name-rs0 + desc 'write more data before restore by date' + set +o xtrace ----------------------------------------------------------------------------------- write more data before restore by date ----------------------------------------------------------------------------------- + sleep 60 + run_mongo 'use myApp\n db.test.insert({ x: 100501 })' myApp:myPass@some-name-rs0.pitr-14308 + local 'command=use myApp\n db.test.insert({ x: 100501 })' + local uri=myApp:myPass@some-name-rs0.pitr-14308 + local driver=mongodb+srv + local suffix=.svc.cluster.local + local mongo_flag= + local replica_set=rs0 + [[ myApp:myPass@some-name-rs0.pitr-14308 == *cfg* ]] ++ kubectl_bin get pods --selector=name=psmdb-client -o 'jsonpath={.items[].metadata.name}' +++ mktemp ++ local LAST_OUT=/tmp/tmp.w65v5Opt13 +++ mktemp ++ local LAST_ERR=/tmp/tmp.un03RVdCZM ++ local exit_status=0 ++ local timeout=4 +++ seq 0 2 ++ for i in $(seq 0 2) ++ set +e ++ kubectl get pods --selector=name=psmdb-client -o 'jsonpath={.items[].metadata.name}' ++ exit_status=0 ++ set -e ++ '[' 0 '!=' 0 -a -n 1 ']' ++ break ++ cat /tmp/tmp.w65v5Opt13 ++ cat /tmp/tmp.un03RVdCZM ++ rm /tmp/tmp.w65v5Opt13 /tmp/tmp.un03RVdCZM ++ return 0 + local client_container=psmdb-client-86cb5d8484-r5b58 + kubectl_bin exec psmdb-client-86cb5d8484-r5b58 -- bash -c 'printf '\''use myApp\n db.test.insert({ x: 100501 })\n'\'' | mongo mongodb+srv://myApp:myPass@some-name-rs0.pitr-14308.svc.cluster.local/admin?ssl=false\&replicaSet=rs0 ' ++ mktemp + local LAST_OUT=/tmp/tmp.avET0uvIr6 ++ mktemp + local LAST_ERR=/tmp/tmp.bPhs9J6icS + local exit_status=0 + local timeout=4 ++ seq 0 2 + for i in $(seq 0 2) + set +e + kubectl exec psmdb-client-86cb5d8484-r5b58 -- bash -c 'printf '\''use myApp\n db.test.insert({ x: 100501 })\n'\'' | mongo mongodb+srv://myApp:myPass@some-name-rs0.pitr-14308.svc.cluster.local/admin?ssl=false\&replicaSet=rs0 ' + exit_status=0 + set -e + '[' 0 '!=' 0 -a -n 1 ']' + break + cat /tmp/tmp.avET0uvIr6 Percona Server for MongoDB shell version v4.4.29-28 connecting to: mongodb://some-name-rs0-2.some-name-rs0.pitr-14308.svc.cluster.local:27017,some-name-rs0-1.some-name-rs0.pitr-14308.svc.cluster.local:27017,some-name-rs0-0.some-name-rs0.pitr-14308.svc.cluster.local:27017/admin?compressors=disabled&gssapiServiceName=mongodb&replicaSet=rs0&ssl=false Implicit session: session { "id" : UUID("47c36ea4-198c-4d82-8916-195d78429b6c") } Percona Server for MongoDB server version: v8.0.19-7 WARNING: shell and server versions do not match switched to db myApp WriteResult({ "nInserted" : 1 }) bye + cat /tmp/tmp.bPhs9J6icS + rm /tmp/tmp.avET0uvIr6 /tmp/tmp.bPhs9J6icS + return 0 + '[' -n test2 ']' + run_mongo 'use myApp\n db.test2.drop()' myApp:myPass@some-name-rs0.pitr-14308 + local 'command=use myApp\n db.test2.drop()' + local uri=myApp:myPass@some-name-rs0.pitr-14308 + local driver=mongodb+srv + local suffix=.svc.cluster.local + local mongo_flag= + local replica_set=rs0 + [[ myApp:myPass@some-name-rs0.pitr-14308 == *cfg* ]] ++ kubectl_bin get pods --selector=name=psmdb-client -o 'jsonpath={.items[].metadata.name}' +++ mktemp ++ local LAST_OUT=/tmp/tmp.xBuQM7aXLS +++ mktemp ++ local LAST_ERR=/tmp/tmp.xWcbajgPdM ++ local exit_status=0 ++ local timeout=4 +++ seq 0 2 ++ for i in $(seq 0 2) ++ set +e ++ kubectl get pods --selector=name=psmdb-client -o 'jsonpath={.items[].metadata.name}' ++ exit_status=0 ++ set -e ++ '[' 0 '!=' 0 -a -n 1 ']' ++ break ++ cat /tmp/tmp.xBuQM7aXLS ++ cat /tmp/tmp.xWcbajgPdM ++ rm /tmp/tmp.xBuQM7aXLS /tmp/tmp.xWcbajgPdM ++ return 0 + local client_container=psmdb-client-86cb5d8484-r5b58 + kubectl_bin exec psmdb-client-86cb5d8484-r5b58 -- bash -c 'printf '\''use myApp\n db.test2.drop()\n'\'' | mongo mongodb+srv://myApp:myPass@some-name-rs0.pitr-14308.svc.cluster.local/admin?ssl=false\&replicaSet=rs0 ' ++ mktemp + local LAST_OUT=/tmp/tmp.z9AS5liy6B ++ mktemp + local LAST_ERR=/tmp/tmp.aP3T1TvqQq + local exit_status=0 + local timeout=4 ++ seq 0 2 + for i in $(seq 0 2) + set +e + kubectl exec psmdb-client-86cb5d8484-r5b58 -- bash -c 'printf '\''use myApp\n db.test2.drop()\n'\'' | mongo mongodb+srv://myApp:myPass@some-name-rs0.pitr-14308.svc.cluster.local/admin?ssl=false\&replicaSet=rs0 ' + exit_status=0 + set -e + '[' 0 '!=' 0 -a -n 1 ']' + break + cat /tmp/tmp.z9AS5liy6B Percona Server for MongoDB shell version v4.4.29-28 connecting to: mongodb://some-name-rs0-0.some-name-rs0.pitr-14308.svc.cluster.local:27017,some-name-rs0-2.some-name-rs0.pitr-14308.svc.cluster.local:27017,some-name-rs0-1.some-name-rs0.pitr-14308.svc.cluster.local:27017/admin?compressors=disabled&gssapiServiceName=mongodb&replicaSet=rs0&ssl=false Implicit session: session { "id" : UUID("f83e7ca1-7af2-463f-ae0c-0badb39f48f9") } Percona Server for MongoDB server version: v8.0.19-7 WARNING: shell and server versions do not match switched to db myApp true bye + cat /tmp/tmp.aP3T1TvqQq + rm /tmp/tmp.z9AS5liy6B /tmp/tmp.aP3T1TvqQq + return 0 + desc 'waiting for chunks to be uploaded' + set +o xtrace ----------------------------------------------------------------------------------- waiting for chunks to be uploaded ----------------------------------------------------------------------------------- + sleep 120 + desc 'check restore by date' + set +o xtrace ----------------------------------------------------------------------------------- check restore by date ----------------------------------------------------------------------------------- + '[' -z '' ']' + cat /mnt/jenkins/workspace/cloud-psmdb-operator_PR-2247/e2e-tests/pitr/conf/restore.yml + /usr/sbin/sed -e 's/name:/name: restore-backup-minio-0/' + /usr/sbin/sed -e 's/backupName:/backupName: backup-minio-0/' + /usr/sbin/sed -e 's/clusterName:/clusterName: some-name/' + /usr/sbin/sed -e 's/pitrType:/type: date/' + '[' -z '2026-02-19 14:53:03' ']' + /usr/sbin/sed -e 's/date:/date: 2026-02-19 14:53:03/' + /usr/sbin/sed -e /backupSource/,+2d + '[' -n test2 ']' + yq eval '.spec.selective = {"namespaces": ["myApp.test"], "withUsersAndRoles": true}' + kubectl_bin apply -f - ++ mktemp + local LAST_OUT=/tmp/tmp.IEevrBmKgE ++ mktemp + local LAST_ERR=/tmp/tmp.Mh9eQUHonf + local exit_status=0 + local timeout=4 ++ seq 0 2 + for i in $(seq 0 2) + set +e + kubectl apply -f - + exit_status=0 + set -e + '[' 0 '!=' 0 -a -n 1 ']' + break + cat /tmp/tmp.IEevrBmKgE perconaservermongodbrestore.psmdb.percona.com/restore-backup-minio-0 created + cat /tmp/tmp.Mh9eQUHonf + rm /tmp/tmp.IEevrBmKgE /tmp/tmp.Mh9eQUHonf + return 0 + wait_restore backup-minio-0 some-name + local backup_name=backup-minio-0 + local cluster_name=some-name + local target_state=ready + local wait_cluster_consistency=1 + local wait_time=1780 + local ok_if_ready=0 + set +o xtrace Waiting for the psmdb-restore/restore-backup-minio-0 object to be created.OK Waiting psmdb-restore/restore-backup-minio-0 to reach state "ready" .OK after 0 minutes + [[ 1 -eq 1 ]] + wait_cluster_consistency some-name + local cluster_name=some-name + local wait_time=32 + retry=0 + sleep 7 + echo -n 'waiting for cluster readyness' waiting for cluster readyness++ kubectl_bin get psmdb some-name -o 'jsonpath={.status.state}' +++ mktemp ++ local LAST_OUT=/tmp/tmp.pObfNYvG5m +++ mktemp ++ local LAST_ERR=/tmp/tmp.1eWlmyODAP ++ local exit_status=0 ++ local timeout=4 +++ seq 0 2 ++ for i in $(seq 0 2) ++ set +e ++ kubectl get psmdb some-name -o 'jsonpath={.status.state}' ++ exit_status=0 ++ set -e ++ '[' 0 '!=' 0 -a -n 1 ']' ++ break ++ cat /tmp/tmp.pObfNYvG5m ++ cat /tmp/tmp.1eWlmyODAP ++ rm /tmp/tmp.pObfNYvG5m /tmp/tmp.1eWlmyODAP ++ return 0 + [[ ready == \r\e\a\d\y ]] + echo .OK .OK + echo + set -o xtrace + '[' -n test2 ']' ++ collection_exists test2 ./e2e-tests/pitr/run: line 108: collection_exists: command not found + [[ '' == \t\r\u\e ]] + compare_mongo_cmd find myApp:myPass@some-name-rs0-0.some-name-rs0.pitr-14308 -2nd + local command=find + local uri=myApp:myPass@some-name-rs0-0.some-name-rs0.pitr-14308 + local postfix=-2nd + local suffix= + local database=myApp + local collection=test + local sort= + local tls=false + local replicaset= + local 'full_command=db.test.find()' + [[ -n '' ]] + log 'running db.test.find() in myApp' + set +o xtrace [2026-02-19T14:56:45+0000] running db.test.find() in myApp + [[ false == \t\r\u\e ]] + mongo_command=run_mongo + grep -E -v 'I NETWORK|W NETWORK|F NETWORK|Error saving history file|Percona Server for MongoDB|connecting to:|Unable to reach primary for set|Implicit session:|versions do not match|Error saving history file:|Started a new thread for the timer service' + run_mongo 'use myApp\n db.test.find()' myApp:myPass@some-name-rs0-0.some-name-rs0.pitr-14308 mongodb '' '' + local 'command=use myApp\n db.test.find()' + local uri=myApp:myPass@some-name-rs0-0.some-name-rs0.pitr-14308 + local driver=mongodb + local suffix=.svc.cluster.local + local mongo_flag= + local replica_set=rs0 + [[ myApp:myPass@some-name-rs0-0.some-name-rs0.pitr-14308 == *cfg* ]] + /usr/sbin/sed -re 's/ObjectId\("[0-9a-f]+"\)//; s/-[0-9]+.svc/-xxx.svc/' ++ kubectl_bin get pods --selector=name=psmdb-client -o 'jsonpath={.items[].metadata.name}' +++ mktemp ++ local LAST_OUT=/tmp/tmp.9qB6LD46rX +++ mktemp ++ local LAST_ERR=/tmp/tmp.Jzqmh4a9Eq ++ local exit_status=0 ++ local timeout=4 +++ seq 0 2 ++ for i in $(seq 0 2) ++ set +e ++ kubectl get pods --selector=name=psmdb-client -o 'jsonpath={.items[].metadata.name}' ++ exit_status=0 ++ set -e ++ '[' 0 '!=' 0 -a -n 1 ']' ++ break ++ cat /tmp/tmp.9qB6LD46rX ++ cat /tmp/tmp.Jzqmh4a9Eq ++ rm /tmp/tmp.9qB6LD46rX /tmp/tmp.Jzqmh4a9Eq ++ return 0 + local client_container=psmdb-client-86cb5d8484-r5b58 + kubectl_bin exec psmdb-client-86cb5d8484-r5b58 -- bash -c 'printf '\''use myApp\n db.test.find()\n'\'' | mongo mongodb://myApp:myPass@some-name-rs0-0.some-name-rs0.pitr-14308.svc.cluster.local/admin?ssl=false\&replicaSet=rs0 ' ++ mktemp + local LAST_OUT=/tmp/tmp.QbQ6HnCq47 ++ mktemp + local LAST_ERR=/tmp/tmp.KBD6IW5iQS + local exit_status=0 + local timeout=4 ++ seq 0 2 + for i in $(seq 0 2) + set +e + kubectl exec psmdb-client-86cb5d8484-r5b58 -- bash -c 'printf '\''use myApp\n db.test.find()\n'\'' | mongo mongodb://myApp:myPass@some-name-rs0-0.some-name-rs0.pitr-14308.svc.cluster.local/admin?ssl=false\&replicaSet=rs0 ' + exit_status=0 + set -e + '[' 0 '!=' 0 -a -n 1 ']' + break + cat /tmp/tmp.QbQ6HnCq47 + cat /tmp/tmp.KBD6IW5iQS + rm /tmp/tmp.QbQ6HnCq47 /tmp/tmp.KBD6IW5iQS + return 0 + [[ 0 -eq 0 ]] + diff -u /mnt/jenkins/workspace/cloud-psmdb-operator_PR-2247/e2e-tests/pitr/compare/find-2nd.json /tmp/tmp.TVPhAYVAUk/find-2nd + compare_mongo_cmd find myApp:myPass@some-name-rs0-1.some-name-rs0.pitr-14308 -2nd + local command=find + local uri=myApp:myPass@some-name-rs0-1.some-name-rs0.pitr-14308 + local postfix=-2nd + local suffix= + local database=myApp + local collection=test + local sort= + local tls=false + local replicaset= + local 'full_command=db.test.find()' + [[ -n '' ]] + log 'running db.test.find() in myApp' + set +o xtrace [2026-02-19T14:56:47+0000] running db.test.find() in myApp + [[ false == \t\r\u\e ]] + mongo_command=run_mongo + run_mongo 'use myApp\n db.test.find()' myApp:myPass@some-name-rs0-1.some-name-rs0.pitr-14308 mongodb '' '' + local 'command=use myApp\n db.test.find()' + local uri=myApp:myPass@some-name-rs0-1.some-name-rs0.pitr-14308 + local driver=mongodb + local suffix=.svc.cluster.local + local mongo_flag= + local replica_set=rs0 + [[ myApp:myPass@some-name-rs0-1.some-name-rs0.pitr-14308 == *cfg* ]] + grep -E -v 'I NETWORK|W NETWORK|F NETWORK|Error saving history file|Percona Server for MongoDB|connecting to:|Unable to reach primary for set|Implicit session:|versions do not match|Error saving history file:|Started a new thread for the timer service' + /usr/sbin/sed -re 's/ObjectId\("[0-9a-f]+"\)//; s/-[0-9]+.svc/-xxx.svc/' ++ kubectl_bin get pods --selector=name=psmdb-client -o 'jsonpath={.items[].metadata.name}' +++ mktemp ++ local LAST_OUT=/tmp/tmp.qahpbfe7iC +++ mktemp ++ local LAST_ERR=/tmp/tmp.isD6kmBxPc ++ local exit_status=0 ++ local timeout=4 +++ seq 0 2 ++ for i in $(seq 0 2) ++ set +e ++ kubectl get pods --selector=name=psmdb-client -o 'jsonpath={.items[].metadata.name}' ++ exit_status=0 ++ set -e ++ '[' 0 '!=' 0 -a -n 1 ']' ++ break ++ cat /tmp/tmp.qahpbfe7iC ++ cat /tmp/tmp.isD6kmBxPc ++ rm /tmp/tmp.qahpbfe7iC /tmp/tmp.isD6kmBxPc ++ return 0 + local client_container=psmdb-client-86cb5d8484-r5b58 + kubectl_bin exec psmdb-client-86cb5d8484-r5b58 -- bash -c 'printf '\''use myApp\n db.test.find()\n'\'' | mongo mongodb://myApp:myPass@some-name-rs0-1.some-name-rs0.pitr-14308.svc.cluster.local/admin?ssl=false\&replicaSet=rs0 ' ++ mktemp + local LAST_OUT=/tmp/tmp.SLXXqn9QFA ++ mktemp + local LAST_ERR=/tmp/tmp.fzHlESl01w + local exit_status=0 + local timeout=4 ++ seq 0 2 + for i in $(seq 0 2) + set +e + kubectl exec psmdb-client-86cb5d8484-r5b58 -- bash -c 'printf '\''use myApp\n db.test.find()\n'\'' | mongo mongodb://myApp:myPass@some-name-rs0-1.some-name-rs0.pitr-14308.svc.cluster.local/admin?ssl=false\&replicaSet=rs0 ' + exit_status=0 + set -e + '[' 0 '!=' 0 -a -n 1 ']' + break + cat /tmp/tmp.SLXXqn9QFA + cat /tmp/tmp.fzHlESl01w + rm /tmp/tmp.SLXXqn9QFA /tmp/tmp.fzHlESl01w + return 0 + [[ 0 -eq 0 ]] + diff -u /mnt/jenkins/workspace/cloud-psmdb-operator_PR-2247/e2e-tests/pitr/compare/find-2nd.json /tmp/tmp.TVPhAYVAUk/find-2nd + compare_mongo_cmd find myApp:myPass@some-name-rs0-2.some-name-rs0.pitr-14308 -2nd + local command=find + local uri=myApp:myPass@some-name-rs0-2.some-name-rs0.pitr-14308 + local postfix=-2nd + local suffix= + local database=myApp + local collection=test + local sort= + local tls=false + local replicaset= + local 'full_command=db.test.find()' + [[ -n '' ]] + log 'running db.test.find() in myApp' + set +o xtrace [2026-02-19T14:56:49+0000] running db.test.find() in myApp + [[ false == \t\r\u\e ]] + mongo_command=run_mongo + run_mongo 'use myApp\n db.test.find()' myApp:myPass@some-name-rs0-2.some-name-rs0.pitr-14308 mongodb '' '' + local 'command=use myApp\n db.test.find()' + local uri=myApp:myPass@some-name-rs0-2.some-name-rs0.pitr-14308 + local driver=mongodb + local suffix=.svc.cluster.local + local mongo_flag= + local replica_set=rs0 + [[ myApp:myPass@some-name-rs0-2.some-name-rs0.pitr-14308 == *cfg* ]] + grep -E -v 'I NETWORK|W NETWORK|F NETWORK|Error saving history file|Percona Server for MongoDB|connecting to:|Unable to reach primary for set|Implicit session:|versions do not match|Error saving history file:|Started a new thread for the timer service' + /usr/sbin/sed -re 's/ObjectId\("[0-9a-f]+"\)//; s/-[0-9]+.svc/-xxx.svc/' ++ kubectl_bin get pods --selector=name=psmdb-client -o 'jsonpath={.items[].metadata.name}' +++ mktemp ++ local LAST_OUT=/tmp/tmp.j8J0DGzOHB +++ mktemp ++ local LAST_ERR=/tmp/tmp.qRyb9Slx3I ++ local exit_status=0 ++ local timeout=4 +++ seq 0 2 ++ for i in $(seq 0 2) ++ set +e ++ kubectl get pods --selector=name=psmdb-client -o 'jsonpath={.items[].metadata.name}' ++ exit_status=0 ++ set -e ++ '[' 0 '!=' 0 -a -n 1 ']' ++ break ++ cat /tmp/tmp.j8J0DGzOHB ++ cat /tmp/tmp.qRyb9Slx3I ++ rm /tmp/tmp.j8J0DGzOHB /tmp/tmp.qRyb9Slx3I ++ return 0 + local client_container=psmdb-client-86cb5d8484-r5b58 + kubectl_bin exec psmdb-client-86cb5d8484-r5b58 -- bash -c 'printf '\''use myApp\n db.test.find()\n'\'' | mongo mongodb://myApp:myPass@some-name-rs0-2.some-name-rs0.pitr-14308.svc.cluster.local/admin?ssl=false\&replicaSet=rs0 ' ++ mktemp + local LAST_OUT=/tmp/tmp.56UWB6aZzl ++ mktemp + local LAST_ERR=/tmp/tmp.3K8f5iAJJl + local exit_status=0 + local timeout=4 ++ seq 0 2 + for i in $(seq 0 2) + set +e + kubectl exec psmdb-client-86cb5d8484-r5b58 -- bash -c 'printf '\''use myApp\n db.test.find()\n'\'' | mongo mongodb://myApp:myPass@some-name-rs0-2.some-name-rs0.pitr-14308.svc.cluster.local/admin?ssl=false\&replicaSet=rs0 ' + exit_status=0 + set -e + '[' 0 '!=' 0 -a -n 1 ']' + break + cat /tmp/tmp.56UWB6aZzl + cat /tmp/tmp.3K8f5iAJJl + rm /tmp/tmp.56UWB6aZzl /tmp/tmp.3K8f5iAJJl + return 0 + [[ 0 -eq 0 ]] + diff -u /mnt/jenkins/workspace/cloud-psmdb-operator_PR-2247/e2e-tests/pitr/compare/find-2nd.json /tmp/tmp.TVPhAYVAUk/find-2nd + run_backup backup-minio 1 + local name=backup-minio + local idx=1 + desc 'run backup backup-minio-1' + set +o xtrace ----------------------------------------------------------------------------------- run backup backup-minio-1 ----------------------------------------------------------------------------------- + cat /mnt/jenkins/workspace/cloud-psmdb-operator_PR-2247/e2e-tests/pitr/conf/backup-minio.yml + /usr/sbin/sed -e 's/name:/name: backup-minio-1/' + kubectl_bin apply -f - ++ mktemp + local LAST_OUT=/tmp/tmp.EEz5XdR6H7 ++ mktemp + local LAST_ERR=/tmp/tmp.umuDqRGYH8 + local exit_status=0 + local timeout=4 ++ seq 0 2 + for i in $(seq 0 2) + set +e + kubectl apply -f - + exit_status=0 + set -e + '[' 0 '!=' 0 -a -n 1 ']' + break + cat /tmp/tmp.EEz5XdR6H7 perconaservermongodbbackup.psmdb.percona.com/backup-minio-1 created + cat /tmp/tmp.umuDqRGYH8 + rm /tmp/tmp.EEz5XdR6H7 /tmp/tmp.umuDqRGYH8 + return 0 + wait_backup backup-minio-1 + local backup_name=backup-minio-1 + local target_state=ready + set +o xtrace waiting for backup-minio-1 to reach ready state.......OK + compare_latest_restorable_time some-name-rs0 backup-minio-1 + local cluster=some-name-rs0 + local backup_name=backup-minio-1 + local latest_restorable_time + local backup_time ++ get_latest_restorable_time some-name-rs0 ++ local cluster=some-name-rs0 ++ local first_timestamp ++ local second_timestamp ++ local retry=0 ++ [[ '' != '' ]] +++ kubectl_bin exec some-name-rs0-0 -c backup-agent -- pbm status -o json +++ jq '.backups.pitrChunks.pitrChunks | last | .range.end' ++++ mktemp +++ local LAST_OUT=/tmp/tmp.QioxcE35tH ++++ mktemp +++ local LAST_ERR=/tmp/tmp.RPwWNQ5eOF +++ local exit_status=0 +++ local timeout=4 ++++ seq 0 2 +++ for i in $(seq 0 2) +++ set +e +++ kubectl exec some-name-rs0-0 -c backup-agent -- pbm status -o json +++ exit_status=0 +++ set -e +++ '[' 0 '!=' 0 -a -n 1 ']' +++ break +++ cat /tmp/tmp.QioxcE35tH +++ cat /tmp/tmp.RPwWNQ5eOF +++ rm /tmp/tmp.QioxcE35tH /tmp/tmp.RPwWNQ5eOF +++ return 0 ++ first_timestamp=1771512981 ++ sleep 5 ++ [[ 1771512981 != '' ]] ++ [[ 1771512981 != \n\u\l\l ]] +++ kubectl_bin exec some-name-rs0-0 -c backup-agent -- pbm status -o json +++ jq '.backups.pitrChunks.pitrChunks | last | .range.end' ++++ mktemp +++ local LAST_OUT=/tmp/tmp.lJvP0Nxugg ++++ mktemp +++ local LAST_ERR=/tmp/tmp.pmc84LBj2K +++ local exit_status=0 +++ local timeout=4 ++++ seq 0 2 +++ for i in $(seq 0 2) +++ set +e +++ kubectl exec some-name-rs0-0 -c backup-agent -- pbm status -o json +++ exit_status=0 +++ set -e +++ '[' 0 '!=' 0 -a -n 1 ']' +++ break +++ cat /tmp/tmp.lJvP0Nxugg +++ cat /tmp/tmp.pmc84LBj2K +++ rm /tmp/tmp.lJvP0Nxugg /tmp/tmp.pmc84LBj2K +++ return 0 ++ second_timestamp=1771512981 ++ let retry+=1 ++ [[ 1 -gt 30 ]] ++ [[ 1771512981 != '' ]] ++ [[ 1771512981 != \n\u\l\l ]] ++ [[ 1771512981 == 1771512981 ]] ++ /usr/sbin/date -u -d @1771512981 +%Y-%m-%dT%H:%M:%SZ + latest_restorable_time=2026-02-19T14:56:21Z ++ get_latest_restorable_time_from_backup_object backup-minio-1 ++ local backup_name=backup-minio-1 ++ local latestRestorableTime ++ local retry=0 ++ [[ '' != '' ]] ++ sleep 5 +++ kubectl_bin get psmdb-backup backup-minio-1 -o 'jsonpath={.status.latestRestorableTime}' ++++ mktemp +++ local LAST_OUT=/tmp/tmp.vwxgNi1OC0 ++++ mktemp +++ local LAST_ERR=/tmp/tmp.Ba0JOXkfzk +++ local exit_status=0 +++ local timeout=4 ++++ seq 0 2 +++ for i in $(seq 0 2) +++ set +e +++ kubectl get psmdb-backup backup-minio-1 -o 'jsonpath={.status.latestRestorableTime}' +++ exit_status=0 +++ set -e +++ '[' 0 '!=' 0 -a -n 1 ']' +++ break +++ cat /tmp/tmp.vwxgNi1OC0 +++ cat /tmp/tmp.Ba0JOXkfzk +++ rm /tmp/tmp.vwxgNi1OC0 /tmp/tmp.Ba0JOXkfzk +++ return 0 ++ latestRestorableTime=2026-02-19T14:56:21Z ++ let retry+=1 ++ [[ 1 -gt 30 ]] ++ [[ 2026-02-19T14:56:21Z != '' ]] ++ [[ 2026-02-19T14:56:21Z != \n\u\l\l ]] ++ echo 2026-02-19T14:56:21Z + backup_time=2026-02-19T14:56:21Z + [[ 2026-02-19T14:56:21Z != \2\0\2\6\-\0\2\-\1\9\T\1\4\:\5\6\:\2\1\Z ]] + check_recovery backup-minio-1 latest '' -3rd '' some-name + local backup_name=backup-minio-1 + local restore_type=latest + local restore_date= + local cmp_postfix=-3rd + local backupSource= + local cluster_name=some-name + local selective_collection= + local restore_name=restore-backup-minio-1 + local restore_file=restore.yml + local cluster + cluster=some-name-rs0 + desc 'write more data before restore by latest' + set +o xtrace ----------------------------------------------------------------------------------- write more data before restore by latest ----------------------------------------------------------------------------------- + sleep 60 + run_mongo 'use myApp\n db.test.insert({ x: 100501 })' myApp:myPass@some-name-rs0.pitr-14308 + local 'command=use myApp\n db.test.insert({ x: 100501 })' + local uri=myApp:myPass@some-name-rs0.pitr-14308 + local driver=mongodb+srv + local suffix=.svc.cluster.local + local mongo_flag= + local replica_set=rs0 + [[ myApp:myPass@some-name-rs0.pitr-14308 == *cfg* ]] ++ kubectl_bin get pods --selector=name=psmdb-client -o 'jsonpath={.items[].metadata.name}' +++ mktemp ++ local LAST_OUT=/tmp/tmp.lkhSTzvCDv +++ mktemp ++ local LAST_ERR=/tmp/tmp.2uUGC4vCPv ++ local exit_status=0 ++ local timeout=4 +++ seq 0 2 ++ for i in $(seq 0 2) ++ set +e ++ kubectl get pods --selector=name=psmdb-client -o 'jsonpath={.items[].metadata.name}' ++ exit_status=0 ++ set -e ++ '[' 0 '!=' 0 -a -n 1 ']' ++ break ++ cat /tmp/tmp.lkhSTzvCDv ++ cat /tmp/tmp.2uUGC4vCPv ++ rm /tmp/tmp.lkhSTzvCDv /tmp/tmp.2uUGC4vCPv ++ return 0 + local client_container=psmdb-client-86cb5d8484-r5b58 + kubectl_bin exec psmdb-client-86cb5d8484-r5b58 -- bash -c 'printf '\''use myApp\n db.test.insert({ x: 100501 })\n'\'' | mongo mongodb+srv://myApp:myPass@some-name-rs0.pitr-14308.svc.cluster.local/admin?ssl=false\&replicaSet=rs0 ' ++ mktemp + local LAST_OUT=/tmp/tmp.0xV1rMEgvF ++ mktemp + local LAST_ERR=/tmp/tmp.cCj8yFeJyJ + local exit_status=0 + local timeout=4 ++ seq 0 2 + for i in $(seq 0 2) + set +e + kubectl exec psmdb-client-86cb5d8484-r5b58 -- bash -c 'printf '\''use myApp\n db.test.insert({ x: 100501 })\n'\'' | mongo mongodb+srv://myApp:myPass@some-name-rs0.pitr-14308.svc.cluster.local/admin?ssl=false\&replicaSet=rs0 ' + exit_status=0 + set -e + '[' 0 '!=' 0 -a -n 1 ']' + break + cat /tmp/tmp.0xV1rMEgvF Percona Server for MongoDB shell version v4.4.29-28 connecting to: mongodb://some-name-rs0-2.some-name-rs0.pitr-14308.svc.cluster.local:27017,some-name-rs0-1.some-name-rs0.pitr-14308.svc.cluster.local:27017,some-name-rs0-0.some-name-rs0.pitr-14308.svc.cluster.local:27017/admin?compressors=disabled&gssapiServiceName=mongodb&replicaSet=rs0&ssl=false Implicit session: session { "id" : UUID("d885e560-f1eb-45d3-97a6-b831c2482d21") } Percona Server for MongoDB server version: v8.0.19-7 WARNING: shell and server versions do not match switched to db myApp WriteResult({ "nInserted" : 1 }) bye + cat /tmp/tmp.cCj8yFeJyJ + rm /tmp/tmp.0xV1rMEgvF /tmp/tmp.cCj8yFeJyJ + return 0 + '[' -n '' ']' + desc 'waiting for chunks to be uploaded' + set +o xtrace ----------------------------------------------------------------------------------- waiting for chunks to be uploaded ----------------------------------------------------------------------------------- + sleep 120 + desc 'check restore by latest' + set +o xtrace ----------------------------------------------------------------------------------- check restore by latest ----------------------------------------------------------------------------------- + '[' -z '' ']' + cat /mnt/jenkins/workspace/cloud-psmdb-operator_PR-2247/e2e-tests/pitr/conf/restore.yml + /usr/sbin/sed -e 's/name:/name: restore-backup-minio-1/' + /usr/sbin/sed -e 's/backupName:/backupName: backup-minio-1/' + /usr/sbin/sed -e 's/clusterName:/clusterName: some-name/' + /usr/sbin/sed -e 's/pitrType:/type: latest/' + '[' -z '' ']' + /usr/sbin/sed -e /date:/d + /usr/sbin/sed -e /backupSource/,+2d + '[' -n '' ']' + yq + kubectl_bin apply -f - ++ mktemp + local LAST_OUT=/tmp/tmp.EVXArNRDvs ++ mktemp + local LAST_ERR=/tmp/tmp.teu6mbaHSU + local exit_status=0 + local timeout=4 ++ seq 0 2 + for i in $(seq 0 2) + set +e + kubectl apply -f - + exit_status=0 + set -e + '[' 0 '!=' 0 -a -n 1 ']' + break + cat /tmp/tmp.EVXArNRDvs perconaservermongodbrestore.psmdb.percona.com/restore-backup-minio-1 created + cat /tmp/tmp.teu6mbaHSU + rm /tmp/tmp.EVXArNRDvs /tmp/tmp.teu6mbaHSU + return 0 + wait_restore backup-minio-1 some-name + local backup_name=backup-minio-1 + local cluster_name=some-name + local target_state=ready + local wait_cluster_consistency=1 + local wait_time=1780 + local ok_if_ready=0 + set +o xtrace Waiting for the psmdb-restore/restore-backup-minio-1 object to be created.OK Waiting psmdb-restore/restore-backup-minio-1 to reach state "ready" .OK after 0 minutes + [[ 1 -eq 1 ]] + wait_cluster_consistency some-name + local cluster_name=some-name + local wait_time=32 + retry=0 + sleep 7 + echo -n 'waiting for cluster readyness' waiting for cluster readyness++ kubectl_bin get psmdb some-name -o 'jsonpath={.status.state}' +++ mktemp ++ local LAST_OUT=/tmp/tmp.QSlcwdEqcP +++ mktemp ++ local LAST_ERR=/tmp/tmp.j2NJ6MtA8m ++ local exit_status=0 ++ local timeout=4 +++ seq 0 2 ++ for i in $(seq 0 2) ++ set +e ++ kubectl get psmdb some-name -o 'jsonpath={.status.state}' ++ exit_status=0 ++ set -e ++ '[' 0 '!=' 0 -a -n 1 ']' ++ break ++ cat /tmp/tmp.QSlcwdEqcP ++ cat /tmp/tmp.j2NJ6MtA8m ++ rm /tmp/tmp.QSlcwdEqcP /tmp/tmp.j2NJ6MtA8m ++ return 0 + [[ ready == \r\e\a\d\y ]] + echo .OK .OK + echo + set -o xtrace + '[' -n '' ']' + compare_mongo_cmd find myApp:myPass@some-name-rs0-0.some-name-rs0.pitr-14308 -3rd + local command=find + local uri=myApp:myPass@some-name-rs0-0.some-name-rs0.pitr-14308 + local postfix=-3rd + local suffix= + local database=myApp + local collection=test + local sort= + local tls=false + local replicaset= + local 'full_command=db.test.find()' + [[ -n '' ]] + log 'running db.test.find() in myApp' + set +o xtrace [2026-02-19T15:00:50+0000] running db.test.find() in myApp + [[ false == \t\r\u\e ]] + mongo_command=run_mongo + run_mongo 'use myApp\n db.test.find()' myApp:myPass@some-name-rs0-0.some-name-rs0.pitr-14308 mongodb '' '' + local 'command=use myApp\n db.test.find()' + local uri=myApp:myPass@some-name-rs0-0.some-name-rs0.pitr-14308 + local driver=mongodb + local suffix=.svc.cluster.local + local mongo_flag= + local replica_set=rs0 + [[ myApp:myPass@some-name-rs0-0.some-name-rs0.pitr-14308 == *cfg* ]] + grep -E -v 'I NETWORK|W NETWORK|F NETWORK|Error saving history file|Percona Server for MongoDB|connecting to:|Unable to reach primary for set|Implicit session:|versions do not match|Error saving history file:|Started a new thread for the timer service' ++ kubectl_bin get pods --selector=name=psmdb-client -o 'jsonpath={.items[].metadata.name}' + /usr/sbin/sed -re 's/ObjectId\("[0-9a-f]+"\)//; s/-[0-9]+.svc/-xxx.svc/' +++ mktemp ++ local LAST_OUT=/tmp/tmp.j0cjkH8qVj +++ mktemp ++ local LAST_ERR=/tmp/tmp.epcHVqj2wt ++ local exit_status=0 ++ local timeout=4 +++ seq 0 2 ++ for i in $(seq 0 2) ++ set +e ++ kubectl get pods --selector=name=psmdb-client -o 'jsonpath={.items[].metadata.name}' ++ exit_status=0 ++ set -e ++ '[' 0 '!=' 0 -a -n 1 ']' ++ break ++ cat /tmp/tmp.j0cjkH8qVj ++ cat /tmp/tmp.epcHVqj2wt ++ rm /tmp/tmp.j0cjkH8qVj /tmp/tmp.epcHVqj2wt ++ return 0 + local client_container=psmdb-client-86cb5d8484-r5b58 + kubectl_bin exec psmdb-client-86cb5d8484-r5b58 -- bash -c 'printf '\''use myApp\n db.test.find()\n'\'' | mongo mongodb://myApp:myPass@some-name-rs0-0.some-name-rs0.pitr-14308.svc.cluster.local/admin?ssl=false\&replicaSet=rs0 ' ++ mktemp + local LAST_OUT=/tmp/tmp.DWIQtdoe4c ++ mktemp + local LAST_ERR=/tmp/tmp.G2LioJPNcH + local exit_status=0 + local timeout=4 ++ seq 0 2 + for i in $(seq 0 2) + set +e + kubectl exec psmdb-client-86cb5d8484-r5b58 -- bash -c 'printf '\''use myApp\n db.test.find()\n'\'' | mongo mongodb://myApp:myPass@some-name-rs0-0.some-name-rs0.pitr-14308.svc.cluster.local/admin?ssl=false\&replicaSet=rs0 ' + exit_status=0 + set -e + '[' 0 '!=' 0 -a -n 1 ']' + break + cat /tmp/tmp.DWIQtdoe4c + cat /tmp/tmp.G2LioJPNcH + rm /tmp/tmp.DWIQtdoe4c /tmp/tmp.G2LioJPNcH + return 0 + [[ 0 -eq 0 ]] + diff -u /mnt/jenkins/workspace/cloud-psmdb-operator_PR-2247/e2e-tests/pitr/compare/find-3rd.json /tmp/tmp.TVPhAYVAUk/find-3rd + compare_mongo_cmd find myApp:myPass@some-name-rs0-1.some-name-rs0.pitr-14308 -3rd + local command=find + local uri=myApp:myPass@some-name-rs0-1.some-name-rs0.pitr-14308 + local postfix=-3rd + local suffix= + local database=myApp + local collection=test + local sort= + local tls=false + local replicaset= + local 'full_command=db.test.find()' + [[ -n '' ]] + log 'running db.test.find() in myApp' + set +o xtrace [2026-02-19T15:00:52+0000] running db.test.find() in myApp + [[ false == \t\r\u\e ]] + mongo_command=run_mongo + run_mongo 'use myApp\n db.test.find()' myApp:myPass@some-name-rs0-1.some-name-rs0.pitr-14308 mongodb '' '' + local 'command=use myApp\n db.test.find()' + local uri=myApp:myPass@some-name-rs0-1.some-name-rs0.pitr-14308 + local driver=mongodb + local suffix=.svc.cluster.local + local mongo_flag= + local replica_set=rs0 + [[ myApp:myPass@some-name-rs0-1.some-name-rs0.pitr-14308 == *cfg* ]] + grep -E -v 'I NETWORK|W NETWORK|F NETWORK|Error saving history file|Percona Server for MongoDB|connecting to:|Unable to reach primary for set|Implicit session:|versions do not match|Error saving history file:|Started a new thread for the timer service' + /usr/sbin/sed -re 's/ObjectId\("[0-9a-f]+"\)//; s/-[0-9]+.svc/-xxx.svc/' ++ kubectl_bin get pods --selector=name=psmdb-client -o 'jsonpath={.items[].metadata.name}' +++ mktemp ++ local LAST_OUT=/tmp/tmp.0Q4RuMJqCB +++ mktemp ++ local LAST_ERR=/tmp/tmp.NC52TQm6G1 ++ local exit_status=0 ++ local timeout=4 +++ seq 0 2 ++ for i in $(seq 0 2) ++ set +e ++ kubectl get pods --selector=name=psmdb-client -o 'jsonpath={.items[].metadata.name}' ++ exit_status=0 ++ set -e ++ '[' 0 '!=' 0 -a -n 1 ']' ++ break ++ cat /tmp/tmp.0Q4RuMJqCB ++ cat /tmp/tmp.NC52TQm6G1 ++ rm /tmp/tmp.0Q4RuMJqCB /tmp/tmp.NC52TQm6G1 ++ return 0 + local client_container=psmdb-client-86cb5d8484-r5b58 + kubectl_bin exec psmdb-client-86cb5d8484-r5b58 -- bash -c 'printf '\''use myApp\n db.test.find()\n'\'' | mongo mongodb://myApp:myPass@some-name-rs0-1.some-name-rs0.pitr-14308.svc.cluster.local/admin?ssl=false\&replicaSet=rs0 ' ++ mktemp + local LAST_OUT=/tmp/tmp.CCkkgYuuSc ++ mktemp + local LAST_ERR=/tmp/tmp.Svn5JzjpSB + local exit_status=0 + local timeout=4 ++ seq 0 2 + for i in $(seq 0 2) + set +e + kubectl exec psmdb-client-86cb5d8484-r5b58 -- bash -c 'printf '\''use myApp\n db.test.find()\n'\'' | mongo mongodb://myApp:myPass@some-name-rs0-1.some-name-rs0.pitr-14308.svc.cluster.local/admin?ssl=false\&replicaSet=rs0 ' + exit_status=0 + set -e + '[' 0 '!=' 0 -a -n 1 ']' + break + cat /tmp/tmp.CCkkgYuuSc + cat /tmp/tmp.Svn5JzjpSB + rm /tmp/tmp.CCkkgYuuSc /tmp/tmp.Svn5JzjpSB + return 0 + [[ 0 -eq 0 ]] + diff -u /mnt/jenkins/workspace/cloud-psmdb-operator_PR-2247/e2e-tests/pitr/compare/find-3rd.json /tmp/tmp.TVPhAYVAUk/find-3rd + compare_mongo_cmd find myApp:myPass@some-name-rs0-2.some-name-rs0.pitr-14308 -3rd + local command=find + local uri=myApp:myPass@some-name-rs0-2.some-name-rs0.pitr-14308 + local postfix=-3rd + local suffix= + local database=myApp + local collection=test + local sort= + local tls=false + local replicaset= + local 'full_command=db.test.find()' + [[ -n '' ]] + log 'running db.test.find() in myApp' + set +o xtrace [2026-02-19T15:00:54+0000] running db.test.find() in myApp + [[ false == \t\r\u\e ]] + mongo_command=run_mongo + run_mongo 'use myApp\n db.test.find()' myApp:myPass@some-name-rs0-2.some-name-rs0.pitr-14308 mongodb '' '' + local 'command=use myApp\n db.test.find()' + local uri=myApp:myPass@some-name-rs0-2.some-name-rs0.pitr-14308 + local driver=mongodb + local suffix=.svc.cluster.local + local mongo_flag= + local replica_set=rs0 + [[ myApp:myPass@some-name-rs0-2.some-name-rs0.pitr-14308 == *cfg* ]] + grep -E -v 'I NETWORK|W NETWORK|F NETWORK|Error saving history file|Percona Server for MongoDB|connecting to:|Unable to reach primary for set|Implicit session:|versions do not match|Error saving history file:|Started a new thread for the timer service' + /usr/sbin/sed -re 's/ObjectId\("[0-9a-f]+"\)//; s/-[0-9]+.svc/-xxx.svc/' ++ kubectl_bin get pods --selector=name=psmdb-client -o 'jsonpath={.items[].metadata.name}' +++ mktemp ++ local LAST_OUT=/tmp/tmp.1kOeopFkdb +++ mktemp ++ local LAST_ERR=/tmp/tmp.WfGnBPWFOc ++ local exit_status=0 ++ local timeout=4 +++ seq 0 2 ++ for i in $(seq 0 2) ++ set +e ++ kubectl get pods --selector=name=psmdb-client -o 'jsonpath={.items[].metadata.name}' ++ exit_status=0 ++ set -e ++ '[' 0 '!=' 0 -a -n 1 ']' ++ break ++ cat /tmp/tmp.1kOeopFkdb ++ cat /tmp/tmp.WfGnBPWFOc ++ rm /tmp/tmp.1kOeopFkdb /tmp/tmp.WfGnBPWFOc ++ return 0 + local client_container=psmdb-client-86cb5d8484-r5b58 + kubectl_bin exec psmdb-client-86cb5d8484-r5b58 -- bash -c 'printf '\''use myApp\n db.test.find()\n'\'' | mongo mongodb://myApp:myPass@some-name-rs0-2.some-name-rs0.pitr-14308.svc.cluster.local/admin?ssl=false\&replicaSet=rs0 ' ++ mktemp + local LAST_OUT=/tmp/tmp.VCF9B2VJa2 ++ mktemp + local LAST_ERR=/tmp/tmp.4ZgUuh5VxN + local exit_status=0 + local timeout=4 ++ seq 0 2 + for i in $(seq 0 2) + set +e + kubectl exec psmdb-client-86cb5d8484-r5b58 -- bash -c 'printf '\''use myApp\n db.test.find()\n'\'' | mongo mongodb://myApp:myPass@some-name-rs0-2.some-name-rs0.pitr-14308.svc.cluster.local/admin?ssl=false\&replicaSet=rs0 ' + exit_status=0 + set -e + '[' 0 '!=' 0 -a -n 1 ']' + break + cat /tmp/tmp.VCF9B2VJa2 + cat /tmp/tmp.4ZgUuh5VxN + rm /tmp/tmp.VCF9B2VJa2 /tmp/tmp.4ZgUuh5VxN + return 0 + [[ 0 -eq 0 ]] + diff -u /mnt/jenkins/workspace/cloud-psmdb-operator_PR-2247/e2e-tests/pitr/compare/find-3rd.json /tmp/tmp.TVPhAYVAUk/find-3rd + desc 'restore pitr using backupSource' + set +o xtrace ----------------------------------------------------------------------------------- restore pitr using backupSource ----------------------------------------------------------------------------------- + reset_collection + desc 'reset data' + set +o xtrace ----------------------------------------------------------------------------------- reset data ----------------------------------------------------------------------------------- + run_mongo 'use myApp\n db.test.remove({})' myApp:myPass@some-name-rs0.pitr-14308 + local 'command=use myApp\n db.test.remove({})' + local uri=myApp:myPass@some-name-rs0.pitr-14308 + local driver=mongodb+srv + local suffix=.svc.cluster.local + local mongo_flag= + local replica_set=rs0 + [[ myApp:myPass@some-name-rs0.pitr-14308 == *cfg* ]] ++ kubectl_bin get pods --selector=name=psmdb-client -o 'jsonpath={.items[].metadata.name}' +++ mktemp ++ local LAST_OUT=/tmp/tmp.S45RPqrwOK +++ mktemp ++ local LAST_ERR=/tmp/tmp.OSrbiOXRFy ++ local exit_status=0 ++ local timeout=4 +++ seq 0 2 ++ for i in $(seq 0 2) ++ set +e ++ kubectl get pods --selector=name=psmdb-client -o 'jsonpath={.items[].metadata.name}' ++ exit_status=0 ++ set -e ++ '[' 0 '!=' 0 -a -n 1 ']' ++ break ++ cat /tmp/tmp.S45RPqrwOK ++ cat /tmp/tmp.OSrbiOXRFy ++ rm /tmp/tmp.S45RPqrwOK /tmp/tmp.OSrbiOXRFy ++ return 0 + local client_container=psmdb-client-86cb5d8484-r5b58 + kubectl_bin exec psmdb-client-86cb5d8484-r5b58 -- bash -c 'printf '\''use myApp\n db.test.remove({})\n'\'' | mongo mongodb+srv://myApp:myPass@some-name-rs0.pitr-14308.svc.cluster.local/admin?ssl=false\&replicaSet=rs0 ' ++ mktemp + local LAST_OUT=/tmp/tmp.aL5fUug7Ue ++ mktemp + local LAST_ERR=/tmp/tmp.aF6bnhuxZN + local exit_status=0 + local timeout=4 ++ seq 0 2 + for i in $(seq 0 2) + set +e + kubectl exec psmdb-client-86cb5d8484-r5b58 -- bash -c 'printf '\''use myApp\n db.test.remove({})\n'\'' | mongo mongodb+srv://myApp:myPass@some-name-rs0.pitr-14308.svc.cluster.local/admin?ssl=false\&replicaSet=rs0 ' + exit_status=0 + set -e + '[' 0 '!=' 0 -a -n 1 ']' + break + cat /tmp/tmp.aL5fUug7Ue Percona Server for MongoDB shell version v4.4.29-28 connecting to: mongodb://some-name-rs0-0.some-name-rs0.pitr-14308.svc.cluster.local:27017,some-name-rs0-2.some-name-rs0.pitr-14308.svc.cluster.local:27017,some-name-rs0-1.some-name-rs0.pitr-14308.svc.cluster.local:27017/admin?compressors=disabled&gssapiServiceName=mongodb&replicaSet=rs0&ssl=false Implicit session: session { "id" : UUID("902462cb-d3fb-4dff-82a4-043bf2934bb4") } Percona Server for MongoDB server version: v8.0.19-7 WARNING: shell and server versions do not match switched to db myApp WriteResult({ "nRemoved" : 3 }) bye + cat /tmp/tmp.aF6bnhuxZN + rm /tmp/tmp.aL5fUug7Ue /tmp/tmp.aF6bnhuxZN + return 0 + sleep 2 + write_document + local cmp_postfix= + local cluster_name=some-name-rs0 + desc 'write initial data, read from all' + set +o xtrace ----------------------------------------------------------------------------------- write initial data, read from all ----------------------------------------------------------------------------------- + run_mongo 'use myApp\n db.test.insert({ x: 100500 })' myApp:myPass@some-name-rs0.pitr-14308 + local 'command=use myApp\n db.test.insert({ x: 100500 })' + local uri=myApp:myPass@some-name-rs0.pitr-14308 + local driver=mongodb+srv + local suffix=.svc.cluster.local + local mongo_flag= + local replica_set=rs0 + [[ myApp:myPass@some-name-rs0.pitr-14308 == *cfg* ]] ++ kubectl_bin get pods --selector=name=psmdb-client -o 'jsonpath={.items[].metadata.name}' +++ mktemp ++ local LAST_OUT=/tmp/tmp.ObpUxLOayC +++ mktemp ++ local LAST_ERR=/tmp/tmp.lUiAiNiGFI ++ local exit_status=0 ++ local timeout=4 +++ seq 0 2 ++ for i in $(seq 0 2) ++ set +e ++ kubectl get pods --selector=name=psmdb-client -o 'jsonpath={.items[].metadata.name}' ++ exit_status=0 ++ set -e ++ '[' 0 '!=' 0 -a -n 1 ']' ++ break ++ cat /tmp/tmp.ObpUxLOayC ++ cat /tmp/tmp.lUiAiNiGFI ++ rm /tmp/tmp.ObpUxLOayC /tmp/tmp.lUiAiNiGFI ++ return 0 + local client_container=psmdb-client-86cb5d8484-r5b58 + kubectl_bin exec psmdb-client-86cb5d8484-r5b58 -- bash -c 'printf '\''use myApp\n db.test.insert({ x: 100500 })\n'\'' | mongo mongodb+srv://myApp:myPass@some-name-rs0.pitr-14308.svc.cluster.local/admin?ssl=false\&replicaSet=rs0 ' ++ mktemp + local LAST_OUT=/tmp/tmp.JllnIqyGcN ++ mktemp + local LAST_ERR=/tmp/tmp.pi8mI7QbkD + local exit_status=0 + local timeout=4 ++ seq 0 2 + for i in $(seq 0 2) + set +e + kubectl exec psmdb-client-86cb5d8484-r5b58 -- bash -c 'printf '\''use myApp\n db.test.insert({ x: 100500 })\n'\'' | mongo mongodb+srv://myApp:myPass@some-name-rs0.pitr-14308.svc.cluster.local/admin?ssl=false\&replicaSet=rs0 ' + exit_status=0 + set -e + '[' 0 '!=' 0 -a -n 1 ']' + break + cat /tmp/tmp.JllnIqyGcN Percona Server for MongoDB shell version v4.4.29-28 connecting to: mongodb://some-name-rs0-0.some-name-rs0.pitr-14308.svc.cluster.local:27017,some-name-rs0-2.some-name-rs0.pitr-14308.svc.cluster.local:27017,some-name-rs0-1.some-name-rs0.pitr-14308.svc.cluster.local:27017/admin?compressors=disabled&gssapiServiceName=mongodb&replicaSet=rs0&ssl=false Implicit session: session { "id" : UUID("79ad9059-e295-4918-8c0a-c873c1973be2") } Percona Server for MongoDB server version: v8.0.19-7 WARNING: shell and server versions do not match switched to db myApp WriteResult({ "nInserted" : 1 }) bye + cat /tmp/tmp.pi8mI7QbkD + rm /tmp/tmp.JllnIqyGcN /tmp/tmp.pi8mI7QbkD + return 0 + minikube_sleep + sleep_time=10 + [[ '' == 1 ]] + compare_mongo_cmd find myApp:myPass@some-name-rs0-0.some-name-rs0.pitr-14308 '' + local command=find + local uri=myApp:myPass@some-name-rs0-0.some-name-rs0.pitr-14308 + local postfix= + local suffix= + local database=myApp + local collection=test + local sort= + local tls=false + local replicaset= + local 'full_command=db.test.find()' + [[ -n '' ]] + log 'running db.test.find() in myApp' + set +o xtrace [2026-02-19T15:01:04+0000] running db.test.find() in myApp + [[ false == \t\r\u\e ]] + mongo_command=run_mongo + run_mongo 'use myApp\n db.test.find()' myApp:myPass@some-name-rs0-0.some-name-rs0.pitr-14308 mongodb '' '' + local 'command=use myApp\n db.test.find()' + local uri=myApp:myPass@some-name-rs0-0.some-name-rs0.pitr-14308 + local driver=mongodb + local suffix=.svc.cluster.local + local mongo_flag= + local replica_set=rs0 + [[ myApp:myPass@some-name-rs0-0.some-name-rs0.pitr-14308 == *cfg* ]] + grep -E -v 'I NETWORK|W NETWORK|F NETWORK|Error saving history file|Percona Server for MongoDB|connecting to:|Unable to reach primary for set|Implicit session:|versions do not match|Error saving history file:|Started a new thread for the timer service' + /usr/sbin/sed -re 's/ObjectId\("[0-9a-f]+"\)//; s/-[0-9]+.svc/-xxx.svc/' ++ kubectl_bin get pods --selector=name=psmdb-client -o 'jsonpath={.items[].metadata.name}' +++ mktemp ++ local LAST_OUT=/tmp/tmp.88g2pVapbJ +++ mktemp ++ local LAST_ERR=/tmp/tmp.XCl24c24mI ++ local exit_status=0 ++ local timeout=4 +++ seq 0 2 ++ for i in $(seq 0 2) ++ set +e ++ kubectl get pods --selector=name=psmdb-client -o 'jsonpath={.items[].metadata.name}' ++ exit_status=0 ++ set -e ++ '[' 0 '!=' 0 -a -n 1 ']' ++ break ++ cat /tmp/tmp.88g2pVapbJ ++ cat /tmp/tmp.XCl24c24mI ++ rm /tmp/tmp.88g2pVapbJ /tmp/tmp.XCl24c24mI ++ return 0 + local client_container=psmdb-client-86cb5d8484-r5b58 + kubectl_bin exec psmdb-client-86cb5d8484-r5b58 -- bash -c 'printf '\''use myApp\n db.test.find()\n'\'' | mongo mongodb://myApp:myPass@some-name-rs0-0.some-name-rs0.pitr-14308.svc.cluster.local/admin?ssl=false\&replicaSet=rs0 ' ++ mktemp + local LAST_OUT=/tmp/tmp.u1I1oiaLDp ++ mktemp + local LAST_ERR=/tmp/tmp.sAD588SejA + local exit_status=0 + local timeout=4 ++ seq 0 2 + for i in $(seq 0 2) + set +e + kubectl exec psmdb-client-86cb5d8484-r5b58 -- bash -c 'printf '\''use myApp\n db.test.find()\n'\'' | mongo mongodb://myApp:myPass@some-name-rs0-0.some-name-rs0.pitr-14308.svc.cluster.local/admin?ssl=false\&replicaSet=rs0 ' + exit_status=0 + set -e + '[' 0 '!=' 0 -a -n 1 ']' + break + cat /tmp/tmp.u1I1oiaLDp + cat /tmp/tmp.sAD588SejA + rm /tmp/tmp.u1I1oiaLDp /tmp/tmp.sAD588SejA + return 0 + [[ 0 -eq 0 ]] + diff -u /mnt/jenkins/workspace/cloud-psmdb-operator_PR-2247/e2e-tests/pitr/compare/find.json /tmp/tmp.TVPhAYVAUk/find + compare_mongo_cmd find myApp:myPass@some-name-rs0-1.some-name-rs0.pitr-14308 '' + local command=find + local uri=myApp:myPass@some-name-rs0-1.some-name-rs0.pitr-14308 + local postfix= + local suffix= + local database=myApp + local collection=test + local sort= + local tls=false + local replicaset= + local 'full_command=db.test.find()' + [[ -n '' ]] + log 'running db.test.find() in myApp' + set +o xtrace [2026-02-19T15:01:07+0000] running db.test.find() in myApp + [[ false == \t\r\u\e ]] + mongo_command=run_mongo + run_mongo 'use myApp\n db.test.find()' myApp:myPass@some-name-rs0-1.some-name-rs0.pitr-14308 mongodb '' '' + grep -E -v 'I NETWORK|W NETWORK|F NETWORK|Error saving history file|Percona Server for MongoDB|connecting to:|Unable to reach primary for set|Implicit session:|versions do not match|Error saving history file:|Started a new thread for the timer service' + local 'command=use myApp\n db.test.find()' + local uri=myApp:myPass@some-name-rs0-1.some-name-rs0.pitr-14308 + local driver=mongodb + local suffix=.svc.cluster.local + local mongo_flag= + /usr/sbin/sed -re 's/ObjectId\("[0-9a-f]+"\)//; s/-[0-9]+.svc/-xxx.svc/' + local replica_set=rs0 + [[ myApp:myPass@some-name-rs0-1.some-name-rs0.pitr-14308 == *cfg* ]] ++ kubectl_bin get pods --selector=name=psmdb-client -o 'jsonpath={.items[].metadata.name}' +++ mktemp ++ local LAST_OUT=/tmp/tmp.VSJ9UiHY7e +++ mktemp ++ local LAST_ERR=/tmp/tmp.KXRgSqA7q3 ++ local exit_status=0 ++ local timeout=4 +++ seq 0 2 ++ for i in $(seq 0 2) ++ set +e ++ kubectl get pods --selector=name=psmdb-client -o 'jsonpath={.items[].metadata.name}' ++ exit_status=0 ++ set -e ++ '[' 0 '!=' 0 -a -n 1 ']' ++ break ++ cat /tmp/tmp.VSJ9UiHY7e ++ cat /tmp/tmp.KXRgSqA7q3 ++ rm /tmp/tmp.VSJ9UiHY7e /tmp/tmp.KXRgSqA7q3 ++ return 0 + local client_container=psmdb-client-86cb5d8484-r5b58 + kubectl_bin exec psmdb-client-86cb5d8484-r5b58 -- bash -c 'printf '\''use myApp\n db.test.find()\n'\'' | mongo mongodb://myApp:myPass@some-name-rs0-1.some-name-rs0.pitr-14308.svc.cluster.local/admin?ssl=false\&replicaSet=rs0 ' ++ mktemp + local LAST_OUT=/tmp/tmp.fhX0AXPLl4 ++ mktemp + local LAST_ERR=/tmp/tmp.BSt5l3AJ2r + local exit_status=0 + local timeout=4 ++ seq 0 2 + for i in $(seq 0 2) + set +e + kubectl exec psmdb-client-86cb5d8484-r5b58 -- bash -c 'printf '\''use myApp\n db.test.find()\n'\'' | mongo mongodb://myApp:myPass@some-name-rs0-1.some-name-rs0.pitr-14308.svc.cluster.local/admin?ssl=false\&replicaSet=rs0 ' + exit_status=0 + set -e + '[' 0 '!=' 0 -a -n 1 ']' + break + cat /tmp/tmp.fhX0AXPLl4 + cat /tmp/tmp.BSt5l3AJ2r + rm /tmp/tmp.fhX0AXPLl4 /tmp/tmp.BSt5l3AJ2r + return 0 + [[ 0 -eq 0 ]] + diff -u /mnt/jenkins/workspace/cloud-psmdb-operator_PR-2247/e2e-tests/pitr/compare/find.json /tmp/tmp.TVPhAYVAUk/find + compare_mongo_cmd find myApp:myPass@some-name-rs0-2.some-name-rs0.pitr-14308 '' + local command=find + local uri=myApp:myPass@some-name-rs0-2.some-name-rs0.pitr-14308 + local postfix= + local suffix= + local database=myApp + local collection=test + local sort= + local tls=false + local replicaset= + local 'full_command=db.test.find()' + [[ -n '' ]] + log 'running db.test.find() in myApp' + set +o xtrace [2026-02-19T15:01:09+0000] running db.test.find() in myApp + [[ false == \t\r\u\e ]] + mongo_command=run_mongo + run_mongo 'use myApp\n db.test.find()' myApp:myPass@some-name-rs0-2.some-name-rs0.pitr-14308 mongodb '' '' + local 'command=use myApp\n db.test.find()' + local uri=myApp:myPass@some-name-rs0-2.some-name-rs0.pitr-14308 + local driver=mongodb + local suffix=.svc.cluster.local + local mongo_flag= + local replica_set=rs0 + [[ myApp:myPass@some-name-rs0-2.some-name-rs0.pitr-14308 == *cfg* ]] + grep -E -v 'I NETWORK|W NETWORK|F NETWORK|Error saving history file|Percona Server for MongoDB|connecting to:|Unable to reach primary for set|Implicit session:|versions do not match|Error saving history file:|Started a new thread for the timer service' + /usr/sbin/sed -re 's/ObjectId\("[0-9a-f]+"\)//; s/-[0-9]+.svc/-xxx.svc/' ++ kubectl_bin get pods --selector=name=psmdb-client -o 'jsonpath={.items[].metadata.name}' +++ mktemp ++ local LAST_OUT=/tmp/tmp.eYj1yfydmr +++ mktemp ++ local LAST_ERR=/tmp/tmp.1i9AJHtJzk ++ local exit_status=0 ++ local timeout=4 +++ seq 0 2 ++ for i in $(seq 0 2) ++ set +e ++ kubectl get pods --selector=name=psmdb-client -o 'jsonpath={.items[].metadata.name}' ++ exit_status=0 ++ set -e ++ '[' 0 '!=' 0 -a -n 1 ']' ++ break ++ cat /tmp/tmp.eYj1yfydmr ++ cat /tmp/tmp.1i9AJHtJzk ++ rm /tmp/tmp.eYj1yfydmr /tmp/tmp.1i9AJHtJzk ++ return 0 + local client_container=psmdb-client-86cb5d8484-r5b58 + kubectl_bin exec psmdb-client-86cb5d8484-r5b58 -- bash -c 'printf '\''use myApp\n db.test.find()\n'\'' | mongo mongodb://myApp:myPass@some-name-rs0-2.some-name-rs0.pitr-14308.svc.cluster.local/admin?ssl=false\&replicaSet=rs0 ' ++ mktemp + local LAST_OUT=/tmp/tmp.3wUYjZ2ko1 ++ mktemp + local LAST_ERR=/tmp/tmp.1A3x7gATDG + local exit_status=0 + local timeout=4 ++ seq 0 2 + for i in $(seq 0 2) + set +e + kubectl exec psmdb-client-86cb5d8484-r5b58 -- bash -c 'printf '\''use myApp\n db.test.find()\n'\'' | mongo mongodb://myApp:myPass@some-name-rs0-2.some-name-rs0.pitr-14308.svc.cluster.local/admin?ssl=false\&replicaSet=rs0 ' + exit_status=0 + set -e + '[' 0 '!=' 0 -a -n 1 ']' + break + cat /tmp/tmp.3wUYjZ2ko1 + cat /tmp/tmp.1A3x7gATDG + rm /tmp/tmp.3wUYjZ2ko1 /tmp/tmp.1A3x7gATDG + return 0 + [[ 0 -eq 0 ]] + diff -u /mnt/jenkins/workspace/cloud-psmdb-operator_PR-2247/e2e-tests/pitr/compare/find.json /tmp/tmp.TVPhAYVAUk/find + run_backup backup-minio 2 + local name=backup-minio + local idx=2 + desc 'run backup backup-minio-2' + set +o xtrace ----------------------------------------------------------------------------------- run backup backup-minio-2 ----------------------------------------------------------------------------------- + cat /mnt/jenkins/workspace/cloud-psmdb-operator_PR-2247/e2e-tests/pitr/conf/backup-minio.yml + /usr/sbin/sed -e 's/name:/name: backup-minio-2/' + kubectl_bin apply -f - ++ mktemp + local LAST_OUT=/tmp/tmp.ISelMl4DIV ++ mktemp + local LAST_ERR=/tmp/tmp.w4HvDxBIBM + local exit_status=0 + local timeout=4 ++ seq 0 2 + for i in $(seq 0 2) + set +e + kubectl apply -f - + exit_status=0 + set -e + '[' 0 '!=' 0 -a -n 1 ']' + break + cat /tmp/tmp.ISelMl4DIV perconaservermongodbbackup.psmdb.percona.com/backup-minio-2 created + cat /tmp/tmp.w4HvDxBIBM + rm /tmp/tmp.ISelMl4DIV /tmp/tmp.w4HvDxBIBM + return 0 + wait_backup backup-minio-2 + local backup_name=backup-minio-2 + local target_state=ready + set +o xtrace waiting for backup-minio-2 to reach ready state.......OK + compare_latest_restorable_time some-name-rs0 backup-minio-2 + local cluster=some-name-rs0 + local backup_name=backup-minio-2 + local latest_restorable_time + local backup_time ++ get_latest_restorable_time some-name-rs0 ++ local cluster=some-name-rs0 ++ local first_timestamp ++ local second_timestamp ++ local retry=0 ++ [[ '' != '' ]] +++ kubectl_bin exec some-name-rs0-0 -c backup-agent -- pbm status -o json +++ jq '.backups.pitrChunks.pitrChunks | last | .range.end' ++++ mktemp +++ local LAST_OUT=/tmp/tmp.2v80IlZHxR ++++ mktemp +++ local LAST_ERR=/tmp/tmp.cgUhADfGKu +++ local exit_status=0 +++ local timeout=4 ++++ seq 0 2 +++ for i in $(seq 0 2) +++ set +e +++ kubectl exec some-name-rs0-0 -c backup-agent -- pbm status -o json +++ exit_status=0 +++ set -e +++ '[' 0 '!=' 0 -a -n 1 ']' +++ break +++ cat /tmp/tmp.2v80IlZHxR +++ cat /tmp/tmp.cgUhADfGKu +++ rm /tmp/tmp.2v80IlZHxR /tmp/tmp.cgUhADfGKu +++ return 0 ++ first_timestamp=1771513228 ++ sleep 5 ++ [[ 1771513228 != '' ]] ++ [[ 1771513228 != \n\u\l\l ]] +++ kubectl_bin exec some-name-rs0-0 -c backup-agent -- pbm status -o json +++ jq '.backups.pitrChunks.pitrChunks | last | .range.end' ++++ mktemp +++ local LAST_OUT=/tmp/tmp.u5J10qHWEj ++++ mktemp +++ local LAST_ERR=/tmp/tmp.kYLKv4imnU +++ local exit_status=0 +++ local timeout=4 ++++ seq 0 2 +++ for i in $(seq 0 2) +++ set +e +++ kubectl exec some-name-rs0-0 -c backup-agent -- pbm status -o json +++ exit_status=0 +++ set -e +++ '[' 0 '!=' 0 -a -n 1 ']' +++ break +++ cat /tmp/tmp.u5J10qHWEj +++ cat /tmp/tmp.kYLKv4imnU +++ rm /tmp/tmp.u5J10qHWEj /tmp/tmp.kYLKv4imnU +++ return 0 ++ second_timestamp=1771513228 ++ let retry+=1 ++ [[ 1 -gt 30 ]] ++ [[ 1771513228 != '' ]] ++ [[ 1771513228 != \n\u\l\l ]] ++ [[ 1771513228 == 1771513228 ]] ++ /usr/sbin/date -u -d @1771513228 +%Y-%m-%dT%H:%M:%SZ + latest_restorable_time=2026-02-19T15:00:28Z ++ get_latest_restorable_time_from_backup_object backup-minio-2 ++ local backup_name=backup-minio-2 ++ local latestRestorableTime ++ local retry=0 ++ [[ '' != '' ]] ++ sleep 5 +++ kubectl_bin get psmdb-backup backup-minio-2 -o 'jsonpath={.status.latestRestorableTime}' ++++ mktemp +++ local LAST_OUT=/tmp/tmp.AKw2RIivTM ++++ mktemp +++ local LAST_ERR=/tmp/tmp.Ejf0pt00h9 +++ local exit_status=0 +++ local timeout=4 ++++ seq 0 2 +++ for i in $(seq 0 2) +++ set +e +++ kubectl get psmdb-backup backup-minio-2 -o 'jsonpath={.status.latestRestorableTime}' +++ exit_status=0 +++ set -e +++ '[' 0 '!=' 0 -a -n 1 ']' +++ break +++ cat /tmp/tmp.AKw2RIivTM +++ cat /tmp/tmp.Ejf0pt00h9 +++ rm /tmp/tmp.AKw2RIivTM /tmp/tmp.Ejf0pt00h9 +++ return 0 ++ latestRestorableTime=2026-02-19T15:00:28Z ++ let retry+=1 ++ [[ 1 -gt 30 ]] ++ [[ 2026-02-19T15:00:28Z != '' ]] ++ [[ 2026-02-19T15:00:28Z != \n\u\l\l ]] ++ echo 2026-02-19T15:00:28Z + backup_time=2026-02-19T15:00:28Z + [[ 2026-02-19T15:00:28Z != \2\0\2\6\-\0\2\-\1\9\T\1\5\:\0\0\:\2\8\Z ]] ++ run_mongo 'new Date().toISOString()' myApp:myPass@some-name-rs0.pitr-14308 mongodb '' --quiet ++ grep -E -v 'I NETWORK|W NETWORK|Error saving history file|Percona Server for MongoDB|connecting to:|Unable to reach primary for set|Implicit session:|versions do not match' ++ local 'command=new Date().toISOString()' ++ local uri=myApp:myPass@some-name-rs0.pitr-14308 ++ cut -c1-19 ++ local driver=mongodb ++ local suffix=.svc.cluster.local ++ local mongo_flag=--quiet ++ local replica_set=rs0 ++ [[ myApp:myPass@some-name-rs0.pitr-14308 == *cfg* ]] ++ tr T ' ' +++ kubectl_bin get pods --selector=name=psmdb-client -o 'jsonpath={.items[].metadata.name}' ++++ mktemp +++ local LAST_OUT=/tmp/tmp.zwt4zIIbLI ++++ mktemp +++ local LAST_ERR=/tmp/tmp.jwkVT261Kj +++ local exit_status=0 +++ local timeout=4 ++++ seq 0 2 +++ for i in $(seq 0 2) +++ set +e +++ kubectl get pods --selector=name=psmdb-client -o 'jsonpath={.items[].metadata.name}' +++ exit_status=0 +++ set -e +++ '[' 0 '!=' 0 -a -n 1 ']' +++ break +++ cat /tmp/tmp.zwt4zIIbLI +++ cat /tmp/tmp.jwkVT261Kj +++ rm /tmp/tmp.zwt4zIIbLI /tmp/tmp.jwkVT261Kj +++ return 0 ++ local client_container=psmdb-client-86cb5d8484-r5b58 ++ kubectl_bin exec psmdb-client-86cb5d8484-r5b58 -- bash -c 'printf '\''new Date().toISOString()\n'\'' | mongo mongodb://myApp:myPass@some-name-rs0.pitr-14308.svc.cluster.local/admin?ssl=false\&replicaSet=rs0 --quiet' +++ mktemp ++ local LAST_OUT=/tmp/tmp.X7oqAUut1M +++ mktemp ++ local LAST_ERR=/tmp/tmp.z0rBWiXkCA ++ local exit_status=0 ++ local timeout=4 +++ seq 0 2 ++ for i in $(seq 0 2) ++ set +e ++ kubectl exec psmdb-client-86cb5d8484-r5b58 -- bash -c 'printf '\''new Date().toISOString()\n'\'' | mongo mongodb://myApp:myPass@some-name-rs0.pitr-14308.svc.cluster.local/admin?ssl=false\&replicaSet=rs0 --quiet' ++ exit_status=0 ++ set -e ++ '[' 0 '!=' 0 -a -n 1 ']' ++ break ++ cat /tmp/tmp.X7oqAUut1M ++ cat /tmp/tmp.z0rBWiXkCA ++ rm /tmp/tmp.X7oqAUut1M /tmp/tmp.z0rBWiXkCA ++ return 0 + time_now='2026-02-19 15:01:43' + check_recovery backup-minio-2 date '2026-02-19 15:01:43' '' backupSource some-name + local backup_name=backup-minio-2 + local restore_type=date + local 'restore_date=2026-02-19 15:01:43' + local cmp_postfix= + local backupSource=backupSource + local cluster_name=some-name + local selective_collection= + local restore_name=restore-backup-minio-2 + local restore_file=restore.yml + local cluster + cluster=some-name-rs0 + desc 'write more data before restore by date' + set +o xtrace ----------------------------------------------------------------------------------- write more data before restore by date ----------------------------------------------------------------------------------- + sleep 60 + run_mongo 'use myApp\n db.test.insert({ x: 100501 })' myApp:myPass@some-name-rs0.pitr-14308 + local 'command=use myApp\n db.test.insert({ x: 100501 })' + local uri=myApp:myPass@some-name-rs0.pitr-14308 + local driver=mongodb+srv + local suffix=.svc.cluster.local + local mongo_flag= + local replica_set=rs0 + [[ myApp:myPass@some-name-rs0.pitr-14308 == *cfg* ]] ++ kubectl_bin get pods --selector=name=psmdb-client -o 'jsonpath={.items[].metadata.name}' +++ mktemp ++ local LAST_OUT=/tmp/tmp.sTsTaAPWjJ +++ mktemp ++ local LAST_ERR=/tmp/tmp.TxprysihT0 ++ local exit_status=0 ++ local timeout=4 +++ seq 0 2 ++ for i in $(seq 0 2) ++ set +e ++ kubectl get pods --selector=name=psmdb-client -o 'jsonpath={.items[].metadata.name}' ++ exit_status=0 ++ set -e ++ '[' 0 '!=' 0 -a -n 1 ']' ++ break ++ cat /tmp/tmp.sTsTaAPWjJ ++ cat /tmp/tmp.TxprysihT0 ++ rm /tmp/tmp.sTsTaAPWjJ /tmp/tmp.TxprysihT0 ++ return 0 + local client_container=psmdb-client-86cb5d8484-r5b58 + kubectl_bin exec psmdb-client-86cb5d8484-r5b58 -- bash -c 'printf '\''use myApp\n db.test.insert({ x: 100501 })\n'\'' | mongo mongodb+srv://myApp:myPass@some-name-rs0.pitr-14308.svc.cluster.local/admin?ssl=false\&replicaSet=rs0 ' ++ mktemp + local LAST_OUT=/tmp/tmp.8D94KbAJAV ++ mktemp + local LAST_ERR=/tmp/tmp.BrGSLfM4Zz + local exit_status=0 + local timeout=4 ++ seq 0 2 + for i in $(seq 0 2) + set +e + kubectl exec psmdb-client-86cb5d8484-r5b58 -- bash -c 'printf '\''use myApp\n db.test.insert({ x: 100501 })\n'\'' | mongo mongodb+srv://myApp:myPass@some-name-rs0.pitr-14308.svc.cluster.local/admin?ssl=false\&replicaSet=rs0 ' + exit_status=0 + set -e + '[' 0 '!=' 0 -a -n 1 ']' + break + cat /tmp/tmp.8D94KbAJAV Percona Server for MongoDB shell version v4.4.29-28 connecting to: mongodb://some-name-rs0-2.some-name-rs0.pitr-14308.svc.cluster.local:27017,some-name-rs0-1.some-name-rs0.pitr-14308.svc.cluster.local:27017,some-name-rs0-0.some-name-rs0.pitr-14308.svc.cluster.local:27017/admin?compressors=disabled&gssapiServiceName=mongodb&replicaSet=rs0&ssl=false Implicit session: session { "id" : UUID("987a1520-bf80-4dbb-9325-4444736110c9") } Percona Server for MongoDB server version: v8.0.19-7 WARNING: shell and server versions do not match switched to db myApp WriteResult({ "nInserted" : 1 }) bye + cat /tmp/tmp.BrGSLfM4Zz + rm /tmp/tmp.8D94KbAJAV /tmp/tmp.BrGSLfM4Zz + return 0 + '[' -n '' ']' + desc 'waiting for chunks to be uploaded' + set +o xtrace ----------------------------------------------------------------------------------- waiting for chunks to be uploaded ----------------------------------------------------------------------------------- + sleep 120 + desc 'check restore by date' + set +o xtrace ----------------------------------------------------------------------------------- check restore by date ----------------------------------------------------------------------------------- + '[' -z backupSource ']' ++ get_backup_dest backup-minio-2 ++ local backup_name=backup-minio-2 ++ kubectl_bin get psmdb-backup backup-minio-2 -o 'jsonpath={.status.destination}' ++ sed -e 's/.json$//' +++ mktemp ++ sed 's|s3://||' ++ sed 's|azure://||' ++ sed 's|gs://||' ++ local LAST_OUT=/tmp/tmp.WVGWpD7CZc +++ mktemp ++ local LAST_ERR=/tmp/tmp.Wb1YmF8SSe ++ local exit_status=0 ++ local timeout=4 +++ seq 0 2 ++ for i in $(seq 0 2) ++ set +e ++ kubectl get psmdb-backup backup-minio-2 -o 'jsonpath={.status.destination}' ++ exit_status=0 ++ set -e ++ '[' 0 '!=' 0 -a -n 1 ']' ++ break ++ cat /tmp/tmp.WVGWpD7CZc ++ cat /tmp/tmp.Wb1YmF8SSe ++ rm /tmp/tmp.WVGWpD7CZc /tmp/tmp.Wb1YmF8SSe ++ return 0 + backup_dest=operator-testing/pitr-prefix-1/2026-02-19T15:01:14Z + cat /mnt/jenkins/workspace/cloud-psmdb-operator_PR-2247/e2e-tests/pitr/conf/restore.yml + /usr/sbin/sed -e 's/name:/name: restore-backup-minio-2/' + /usr/sbin/sed -e /backupName/d + /usr/sbin/sed -e 's/clusterName:/clusterName: some-name/' + '[' -z '2026-02-19 15:01:43' ']' + /usr/sbin/sed -e 's/date:/date: 2026-02-19 15:01:43/' + '[' -n '' ']' + yq + kubectl_bin apply -f - + /usr/sbin/sed -e 's/pitrType:/type: date/' + /usr/sbin/sed -e 's|BACKUP-NAME|operator-testing/pitr-prefix-1/2026-02-19T15:01:14Z|' ++ mktemp + local LAST_OUT=/tmp/tmp.PyjXUzorSa ++ mktemp + local LAST_ERR=/tmp/tmp.zb9xelazEb + local exit_status=0 + local timeout=4 ++ seq 0 2 + for i in $(seq 0 2) + set +e + kubectl apply -f - + exit_status=0 + set -e + '[' 0 '!=' 0 -a -n 1 ']' + break + cat /tmp/tmp.PyjXUzorSa perconaservermongodbrestore.psmdb.percona.com/restore-backup-minio-2 created + cat /tmp/tmp.zb9xelazEb + rm /tmp/tmp.PyjXUzorSa /tmp/tmp.zb9xelazEb + return 0 + wait_restore backup-minio-2 some-name + local backup_name=backup-minio-2 + local cluster_name=some-name + local target_state=ready + local wait_cluster_consistency=1 + local wait_time=1780 + local ok_if_ready=0 + set +o xtrace Waiting for the psmdb-restore/restore-backup-minio-2 object to be created.OK Waiting psmdb-restore/restore-backup-minio-2 to reach state "ready" .OK after 0 minutes + [[ 1 -eq 1 ]] + wait_cluster_consistency some-name + local cluster_name=some-name + local wait_time=32 + retry=0 + sleep 7 + echo -n 'waiting for cluster readyness' waiting for cluster readyness++ kubectl_bin get psmdb some-name -o 'jsonpath={.status.state}' +++ mktemp ++ local LAST_OUT=/tmp/tmp.wG6FnpBKRb +++ mktemp ++ local LAST_ERR=/tmp/tmp.7L4zmPOXgd ++ local exit_status=0 ++ local timeout=4 +++ seq 0 2 ++ for i in $(seq 0 2) ++ set +e ++ kubectl get psmdb some-name -o 'jsonpath={.status.state}' ++ exit_status=0 ++ set -e ++ '[' 0 '!=' 0 -a -n 1 ']' ++ break ++ cat /tmp/tmp.wG6FnpBKRb ++ cat /tmp/tmp.7L4zmPOXgd ++ rm /tmp/tmp.wG6FnpBKRb /tmp/tmp.7L4zmPOXgd ++ return 0 + [[ ready == \r\e\a\d\y ]] + echo .OK .OK + echo + set -o xtrace + '[' -n '' ']' + compare_mongo_cmd find myApp:myPass@some-name-rs0-0.some-name-rs0.pitr-14308 '' + local command=find + local uri=myApp:myPass@some-name-rs0-0.some-name-rs0.pitr-14308 + local postfix= + local suffix= + local database=myApp + local collection=test + local sort= + local tls=false + local replicaset= + local 'full_command=db.test.find()' + [[ -n '' ]] + log 'running db.test.find() in myApp' + set +o xtrace [2026-02-19T15:05:09+0000] running db.test.find() in myApp + [[ false == \t\r\u\e ]] + mongo_command=run_mongo + run_mongo 'use myApp\n db.test.find()' myApp:myPass@some-name-rs0-0.some-name-rs0.pitr-14308 mongodb '' '' + /usr/sbin/sed -re 's/ObjectId\("[0-9a-f]+"\)//; s/-[0-9]+.svc/-xxx.svc/' + local 'command=use myApp\n db.test.find()' + grep -E -v 'I NETWORK|W NETWORK|F NETWORK|Error saving history file|Percona Server for MongoDB|connecting to:|Unable to reach primary for set|Implicit session:|versions do not match|Error saving history file:|Started a new thread for the timer service' + local uri=myApp:myPass@some-name-rs0-0.some-name-rs0.pitr-14308 + local driver=mongodb + local suffix=.svc.cluster.local + local mongo_flag= + local replica_set=rs0 + [[ myApp:myPass@some-name-rs0-0.some-name-rs0.pitr-14308 == *cfg* ]] ++ kubectl_bin get pods --selector=name=psmdb-client -o 'jsonpath={.items[].metadata.name}' +++ mktemp ++ local LAST_OUT=/tmp/tmp.aMYANCBAie +++ mktemp ++ local LAST_ERR=/tmp/tmp.oiKRNVru3A ++ local exit_status=0 ++ local timeout=4 +++ seq 0 2 ++ for i in $(seq 0 2) ++ set +e ++ kubectl get pods --selector=name=psmdb-client -o 'jsonpath={.items[].metadata.name}' ++ exit_status=0 ++ set -e ++ '[' 0 '!=' 0 -a -n 1 ']' ++ break ++ cat /tmp/tmp.aMYANCBAie ++ cat /tmp/tmp.oiKRNVru3A ++ rm /tmp/tmp.aMYANCBAie /tmp/tmp.oiKRNVru3A ++ return 0 + local client_container=psmdb-client-86cb5d8484-r5b58 + kubectl_bin exec psmdb-client-86cb5d8484-r5b58 -- bash -c 'printf '\''use myApp\n db.test.find()\n'\'' | mongo mongodb://myApp:myPass@some-name-rs0-0.some-name-rs0.pitr-14308.svc.cluster.local/admin?ssl=false\&replicaSet=rs0 ' ++ mktemp + local LAST_OUT=/tmp/tmp.SYuETH1dzT ++ mktemp + local LAST_ERR=/tmp/tmp.x4kbc5tOv2 + local exit_status=0 + local timeout=4 ++ seq 0 2 + for i in $(seq 0 2) + set +e + kubectl exec psmdb-client-86cb5d8484-r5b58 -- bash -c 'printf '\''use myApp\n db.test.find()\n'\'' | mongo mongodb://myApp:myPass@some-name-rs0-0.some-name-rs0.pitr-14308.svc.cluster.local/admin?ssl=false\&replicaSet=rs0 ' + exit_status=0 + set -e + '[' 0 '!=' 0 -a -n 1 ']' + break + cat /tmp/tmp.SYuETH1dzT + cat /tmp/tmp.x4kbc5tOv2 + rm /tmp/tmp.SYuETH1dzT /tmp/tmp.x4kbc5tOv2 + return 0 + [[ 0 -eq 0 ]] + diff -u /mnt/jenkins/workspace/cloud-psmdb-operator_PR-2247/e2e-tests/pitr/compare/find.json /tmp/tmp.TVPhAYVAUk/find + compare_mongo_cmd find myApp:myPass@some-name-rs0-1.some-name-rs0.pitr-14308 '' + local command=find + local uri=myApp:myPass@some-name-rs0-1.some-name-rs0.pitr-14308 + local postfix= + local suffix= + local database=myApp + local collection=test + local sort= + local tls=false + local replicaset= + local 'full_command=db.test.find()' + [[ -n '' ]] + log 'running db.test.find() in myApp' + set +o xtrace [2026-02-19T15:05:11+0000] running db.test.find() in myApp + [[ false == \t\r\u\e ]] + mongo_command=run_mongo + run_mongo 'use myApp\n db.test.find()' myApp:myPass@some-name-rs0-1.some-name-rs0.pitr-14308 mongodb '' '' + local 'command=use myApp\n db.test.find()' + local uri=myApp:myPass@some-name-rs0-1.some-name-rs0.pitr-14308 + local driver=mongodb + local suffix=.svc.cluster.local + local mongo_flag= + local replica_set=rs0 + [[ myApp:myPass@some-name-rs0-1.some-name-rs0.pitr-14308 == *cfg* ]] + grep -E -v 'I NETWORK|W NETWORK|F NETWORK|Error saving history file|Percona Server for MongoDB|connecting to:|Unable to reach primary for set|Implicit session:|versions do not match|Error saving history file:|Started a new thread for the timer service' ++ kubectl_bin get pods --selector=name=psmdb-client -o 'jsonpath={.items[].metadata.name}' + /usr/sbin/sed -re 's/ObjectId\("[0-9a-f]+"\)//; s/-[0-9]+.svc/-xxx.svc/' +++ mktemp ++ local LAST_OUT=/tmp/tmp.R4IMlGQgxc +++ mktemp ++ local LAST_ERR=/tmp/tmp.2M3npSQz7o ++ local exit_status=0 ++ local timeout=4 +++ seq 0 2 ++ for i in $(seq 0 2) ++ set +e ++ kubectl get pods --selector=name=psmdb-client -o 'jsonpath={.items[].metadata.name}' ++ exit_status=0 ++ set -e ++ '[' 0 '!=' 0 -a -n 1 ']' ++ break ++ cat /tmp/tmp.R4IMlGQgxc ++ cat /tmp/tmp.2M3npSQz7o ++ rm /tmp/tmp.R4IMlGQgxc /tmp/tmp.2M3npSQz7o ++ return 0 + local client_container=psmdb-client-86cb5d8484-r5b58 + kubectl_bin exec psmdb-client-86cb5d8484-r5b58 -- bash -c 'printf '\''use myApp\n db.test.find()\n'\'' | mongo mongodb://myApp:myPass@some-name-rs0-1.some-name-rs0.pitr-14308.svc.cluster.local/admin?ssl=false\&replicaSet=rs0 ' ++ mktemp + local LAST_OUT=/tmp/tmp.0rkMA4XVKn ++ mktemp + local LAST_ERR=/tmp/tmp.YW0Ndl7ZxL + local exit_status=0 + local timeout=4 ++ seq 0 2 + for i in $(seq 0 2) + set +e + kubectl exec psmdb-client-86cb5d8484-r5b58 -- bash -c 'printf '\''use myApp\n db.test.find()\n'\'' | mongo mongodb://myApp:myPass@some-name-rs0-1.some-name-rs0.pitr-14308.svc.cluster.local/admin?ssl=false\&replicaSet=rs0 ' + exit_status=0 + set -e + '[' 0 '!=' 0 -a -n 1 ']' + break + cat /tmp/tmp.0rkMA4XVKn + cat /tmp/tmp.YW0Ndl7ZxL + rm /tmp/tmp.0rkMA4XVKn /tmp/tmp.YW0Ndl7ZxL + return 0 + [[ 0 -eq 0 ]] + diff -u /mnt/jenkins/workspace/cloud-psmdb-operator_PR-2247/e2e-tests/pitr/compare/find.json /tmp/tmp.TVPhAYVAUk/find + compare_mongo_cmd find myApp:myPass@some-name-rs0-2.some-name-rs0.pitr-14308 '' + local command=find + local uri=myApp:myPass@some-name-rs0-2.some-name-rs0.pitr-14308 + local postfix= + local suffix= + local database=myApp + local collection=test + local sort= + local tls=false + local replicaset= + local 'full_command=db.test.find()' + [[ -n '' ]] + log 'running db.test.find() in myApp' + set +o xtrace [2026-02-19T15:05:14+0000] running db.test.find() in myApp + [[ false == \t\r\u\e ]] + mongo_command=run_mongo + run_mongo 'use myApp\n db.test.find()' myApp:myPass@some-name-rs0-2.some-name-rs0.pitr-14308 mongodb '' '' + local 'command=use myApp\n db.test.find()' + local uri=myApp:myPass@some-name-rs0-2.some-name-rs0.pitr-14308 + local driver=mongodb + local suffix=.svc.cluster.local + local mongo_flag= + local replica_set=rs0 + [[ myApp:myPass@some-name-rs0-2.some-name-rs0.pitr-14308 == *cfg* ]] + grep -E -v 'I NETWORK|W NETWORK|F NETWORK|Error saving history file|Percona Server for MongoDB|connecting to:|Unable to reach primary for set|Implicit session:|versions do not match|Error saving history file:|Started a new thread for the timer service' + /usr/sbin/sed -re 's/ObjectId\("[0-9a-f]+"\)//; s/-[0-9]+.svc/-xxx.svc/' ++ kubectl_bin get pods --selector=name=psmdb-client -o 'jsonpath={.items[].metadata.name}' +++ mktemp ++ local LAST_OUT=/tmp/tmp.oxwdg4VnT9 +++ mktemp ++ local LAST_ERR=/tmp/tmp.XADT0guRx6 ++ local exit_status=0 ++ local timeout=4 +++ seq 0 2 ++ for i in $(seq 0 2) ++ set +e ++ kubectl get pods --selector=name=psmdb-client -o 'jsonpath={.items[].metadata.name}' ++ exit_status=0 ++ set -e ++ '[' 0 '!=' 0 -a -n 1 ']' ++ break ++ cat /tmp/tmp.oxwdg4VnT9 ++ cat /tmp/tmp.XADT0guRx6 ++ rm /tmp/tmp.oxwdg4VnT9 /tmp/tmp.XADT0guRx6 ++ return 0 + local client_container=psmdb-client-86cb5d8484-r5b58 + kubectl_bin exec psmdb-client-86cb5d8484-r5b58 -- bash -c 'printf '\''use myApp\n db.test.find()\n'\'' | mongo mongodb://myApp:myPass@some-name-rs0-2.some-name-rs0.pitr-14308.svc.cluster.local/admin?ssl=false\&replicaSet=rs0 ' ++ mktemp + local LAST_OUT=/tmp/tmp.WgHPJjocO2 ++ mktemp + local LAST_ERR=/tmp/tmp.ppx1WfoUrf + local exit_status=0 + local timeout=4 ++ seq 0 2 + for i in $(seq 0 2) + set +e + kubectl exec psmdb-client-86cb5d8484-r5b58 -- bash -c 'printf '\''use myApp\n db.test.find()\n'\'' | mongo mongodb://myApp:myPass@some-name-rs0-2.some-name-rs0.pitr-14308.svc.cluster.local/admin?ssl=false\&replicaSet=rs0 ' + exit_status=0 + set -e + '[' 0 '!=' 0 -a -n 1 ']' + break + cat /tmp/tmp.WgHPJjocO2 + cat /tmp/tmp.ppx1WfoUrf + rm /tmp/tmp.WgHPJjocO2 /tmp/tmp.ppx1WfoUrf + return 0 + [[ 0 -eq 0 ]] + diff -u /mnt/jenkins/workspace/cloud-psmdb-operator_PR-2247/e2e-tests/pitr/compare/find.json /tmp/tmp.TVPhAYVAUk/find + run_backup backup-minio 3 + local name=backup-minio + local idx=3 + desc 'run backup backup-minio-3' + set +o xtrace ----------------------------------------------------------------------------------- run backup backup-minio-3 ----------------------------------------------------------------------------------- + cat /mnt/jenkins/workspace/cloud-psmdb-operator_PR-2247/e2e-tests/pitr/conf/backup-minio.yml + kubectl_bin apply -f - + /usr/sbin/sed -e 's/name:/name: backup-minio-3/' ++ mktemp + local LAST_OUT=/tmp/tmp.VN4F5VRVoH ++ mktemp + local LAST_ERR=/tmp/tmp.9MX01N4KYw + local exit_status=0 + local timeout=4 ++ seq 0 2 + for i in $(seq 0 2) + set +e + kubectl apply -f - + exit_status=0 + set -e + '[' 0 '!=' 0 -a -n 1 ']' + break + cat /tmp/tmp.VN4F5VRVoH perconaservermongodbbackup.psmdb.percona.com/backup-minio-3 created + cat /tmp/tmp.9MX01N4KYw + rm /tmp/tmp.VN4F5VRVoH /tmp/tmp.9MX01N4KYw + return 0 + wait_backup backup-minio-3 + local backup_name=backup-minio-3 + local target_state=ready + set +o xtrace waiting for backup-minio-3 to reach ready state.......OK + compare_latest_restorable_time some-name-rs0 backup-minio-3 + local cluster=some-name-rs0 + local backup_name=backup-minio-3 + local latest_restorable_time + local backup_time ++ get_latest_restorable_time some-name-rs0 ++ local cluster=some-name-rs0 ++ local first_timestamp ++ local second_timestamp ++ local retry=0 ++ [[ '' != '' ]] +++ kubectl_bin exec some-name-rs0-0 -c backup-agent -- pbm status -o json +++ jq '.backups.pitrChunks.pitrChunks | last | .range.end' ++++ mktemp +++ local LAST_OUT=/tmp/tmp.NXGYLZuVaM ++++ mktemp +++ local LAST_ERR=/tmp/tmp.Ot0dl4rI3t +++ local exit_status=0 +++ local timeout=4 ++++ seq 0 2 +++ for i in $(seq 0 2) +++ set +e +++ kubectl exec some-name-rs0-0 -c backup-agent -- pbm status -o json +++ exit_status=0 +++ set -e +++ '[' 0 '!=' 0 -a -n 1 ']' +++ break +++ cat /tmp/tmp.NXGYLZuVaM +++ cat /tmp/tmp.Ot0dl4rI3t +++ rm /tmp/tmp.NXGYLZuVaM /tmp/tmp.Ot0dl4rI3t +++ return 0 ++ first_timestamp=1771513489 ++ sleep 5 ++ [[ 1771513489 != '' ]] ++ [[ 1771513489 != \n\u\l\l ]] +++ kubectl_bin exec some-name-rs0-0 -c backup-agent -- pbm status -o json +++ jq '.backups.pitrChunks.pitrChunks | last | .range.end' ++++ mktemp +++ local LAST_OUT=/tmp/tmp.GyguvCCWhq ++++ mktemp +++ local LAST_ERR=/tmp/tmp.qJYg6566Sw +++ local exit_status=0 +++ local timeout=4 ++++ seq 0 2 +++ for i in $(seq 0 2) +++ set +e +++ kubectl exec some-name-rs0-0 -c backup-agent -- pbm status -o json +++ exit_status=0 +++ set -e +++ '[' 0 '!=' 0 -a -n 1 ']' +++ break +++ cat /tmp/tmp.GyguvCCWhq +++ cat /tmp/tmp.qJYg6566Sw +++ rm /tmp/tmp.GyguvCCWhq /tmp/tmp.qJYg6566Sw +++ return 0 ++ second_timestamp=1771513489 ++ let retry+=1 ++ [[ 1 -gt 30 ]] ++ [[ 1771513489 != '' ]] ++ [[ 1771513489 != \n\u\l\l ]] ++ [[ 1771513489 == 1771513489 ]] ++ /usr/sbin/date -u -d @1771513489 +%Y-%m-%dT%H:%M:%SZ + latest_restorable_time=2026-02-19T15:04:49Z ++ get_latest_restorable_time_from_backup_object backup-minio-3 ++ local backup_name=backup-minio-3 ++ local latestRestorableTime ++ local retry=0 ++ [[ '' != '' ]] ++ sleep 5 +++ kubectl_bin get psmdb-backup backup-minio-3 -o 'jsonpath={.status.latestRestorableTime}' ++++ mktemp +++ local LAST_OUT=/tmp/tmp.ClXNe1W3HT ++++ mktemp +++ local LAST_ERR=/tmp/tmp.OKH17O9YWs +++ local exit_status=0 +++ local timeout=4 ++++ seq 0 2 +++ for i in $(seq 0 2) +++ set +e +++ kubectl get psmdb-backup backup-minio-3 -o 'jsonpath={.status.latestRestorableTime}' +++ exit_status=0 +++ set -e +++ '[' 0 '!=' 0 -a -n 1 ']' +++ break +++ cat /tmp/tmp.ClXNe1W3HT +++ cat /tmp/tmp.OKH17O9YWs +++ rm /tmp/tmp.ClXNe1W3HT /tmp/tmp.OKH17O9YWs +++ return 0 ++ latestRestorableTime=2026-02-19T15:04:49Z ++ let retry+=1 ++ [[ 1 -gt 30 ]] ++ [[ 2026-02-19T15:04:49Z != '' ]] ++ [[ 2026-02-19T15:04:49Z != \n\u\l\l ]] ++ echo 2026-02-19T15:04:49Z + backup_time=2026-02-19T15:04:49Z + [[ 2026-02-19T15:04:49Z != \2\0\2\6\-\0\2\-\1\9\T\1\5\:\0\4\:\4\9\Z ]] + check_recovery backup-minio-3 latest '' -4th backupSource some-name + local backup_name=backup-minio-3 + local restore_type=latest + local restore_date= + local cmp_postfix=-4th + local backupSource=backupSource + local cluster_name=some-name + local selective_collection= + local restore_name=restore-backup-minio-3 + local restore_file=restore.yml + local cluster + cluster=some-name-rs0 + desc 'write more data before restore by latest' + set +o xtrace ----------------------------------------------------------------------------------- write more data before restore by latest ----------------------------------------------------------------------------------- + sleep 60 + run_mongo 'use myApp\n db.test.insert({ x: 100501 })' myApp:myPass@some-name-rs0.pitr-14308 + local 'command=use myApp\n db.test.insert({ x: 100501 })' + local uri=myApp:myPass@some-name-rs0.pitr-14308 + local driver=mongodb+srv + local suffix=.svc.cluster.local + local mongo_flag= + local replica_set=rs0 + [[ myApp:myPass@some-name-rs0.pitr-14308 == *cfg* ]] ++ kubectl_bin get pods --selector=name=psmdb-client -o 'jsonpath={.items[].metadata.name}' +++ mktemp ++ local LAST_OUT=/tmp/tmp.rOrQoBGXdN +++ mktemp ++ local LAST_ERR=/tmp/tmp.pC3qzEheis ++ local exit_status=0 ++ local timeout=4 +++ seq 0 2 ++ for i in $(seq 0 2) ++ set +e ++ kubectl get pods --selector=name=psmdb-client -o 'jsonpath={.items[].metadata.name}' ++ exit_status=0 ++ set -e ++ '[' 0 '!=' 0 -a -n 1 ']' ++ break ++ cat /tmp/tmp.rOrQoBGXdN ++ cat /tmp/tmp.pC3qzEheis ++ rm /tmp/tmp.rOrQoBGXdN /tmp/tmp.pC3qzEheis ++ return 0 + local client_container=psmdb-client-86cb5d8484-r5b58 + kubectl_bin exec psmdb-client-86cb5d8484-r5b58 -- bash -c 'printf '\''use myApp\n db.test.insert({ x: 100501 })\n'\'' | mongo mongodb+srv://myApp:myPass@some-name-rs0.pitr-14308.svc.cluster.local/admin?ssl=false\&replicaSet=rs0 ' ++ mktemp + local LAST_OUT=/tmp/tmp.heEj9mzKL6 ++ mktemp + local LAST_ERR=/tmp/tmp.o3FrAAOWz4 + local exit_status=0 + local timeout=4 ++ seq 0 2 + for i in $(seq 0 2) + set +e + kubectl exec psmdb-client-86cb5d8484-r5b58 -- bash -c 'printf '\''use myApp\n db.test.insert({ x: 100501 })\n'\'' | mongo mongodb+srv://myApp:myPass@some-name-rs0.pitr-14308.svc.cluster.local/admin?ssl=false\&replicaSet=rs0 ' + exit_status=0 + set -e + '[' 0 '!=' 0 -a -n 1 ']' + break + cat /tmp/tmp.heEj9mzKL6 Percona Server for MongoDB shell version v4.4.29-28 connecting to: mongodb://some-name-rs0-2.some-name-rs0.pitr-14308.svc.cluster.local:27017,some-name-rs0-1.some-name-rs0.pitr-14308.svc.cluster.local:27017,some-name-rs0-0.some-name-rs0.pitr-14308.svc.cluster.local:27017/admin?compressors=disabled&gssapiServiceName=mongodb&replicaSet=rs0&ssl=false Implicit session: session { "id" : UUID("180da890-c0f8-46fe-8c4a-0025303aa4c2") } Percona Server for MongoDB server version: v8.0.19-7 WARNING: shell and server versions do not match switched to db myApp WriteResult({ "nInserted" : 1 }) bye + cat /tmp/tmp.o3FrAAOWz4 + rm /tmp/tmp.heEj9mzKL6 /tmp/tmp.o3FrAAOWz4 + return 0 + '[' -n '' ']' + desc 'waiting for chunks to be uploaded' + set +o xtrace ----------------------------------------------------------------------------------- waiting for chunks to be uploaded ----------------------------------------------------------------------------------- + sleep 120 + desc 'check restore by latest' + set +o xtrace ----------------------------------------------------------------------------------- check restore by latest ----------------------------------------------------------------------------------- + '[' -z backupSource ']' ++ get_backup_dest backup-minio-3 ++ local backup_name=backup-minio-3 ++ kubectl_bin get psmdb-backup backup-minio-3 -o 'jsonpath={.status.destination}' ++ sed -e 's/.json$//' ++ sed 's|s3://||' ++ sed 's|gs://||' ++ sed 's|azure://||' +++ mktemp ++ local LAST_OUT=/tmp/tmp.GrSIRS6scf +++ mktemp ++ local LAST_ERR=/tmp/tmp.omcYtApoFF ++ local exit_status=0 ++ local timeout=4 +++ seq 0 2 ++ for i in $(seq 0 2) ++ set +e ++ kubectl get psmdb-backup backup-minio-3 -o 'jsonpath={.status.destination}' ++ exit_status=0 ++ set -e ++ '[' 0 '!=' 0 -a -n 1 ']' ++ break ++ cat /tmp/tmp.GrSIRS6scf ++ cat /tmp/tmp.omcYtApoFF ++ rm /tmp/tmp.GrSIRS6scf /tmp/tmp.omcYtApoFF ++ return 0 + backup_dest=operator-testing/pitr-prefix-1/2026-02-19T15:05:19Z + cat /mnt/jenkins/workspace/cloud-psmdb-operator_PR-2247/e2e-tests/pitr/conf/restore.yml + /usr/sbin/sed -e 's/name:/name: restore-backup-minio-3/' + /usr/sbin/sed -e /backupName/d + /usr/sbin/sed -e 's/clusterName:/clusterName: some-name/' + /usr/sbin/sed -e 's/pitrType:/type: latest/' + '[' -z '' ']' + /usr/sbin/sed -e /date:/d + /usr/sbin/sed -e 's|BACKUP-NAME|operator-testing/pitr-prefix-1/2026-02-19T15:05:19Z|' + kubectl_bin apply -f - ++ mktemp + '[' -n '' ']' + yq + local LAST_OUT=/tmp/tmp.4lqe0flveD ++ mktemp + local LAST_ERR=/tmp/tmp.bST5we8NFf + local exit_status=0 + local timeout=4 ++ seq 0 2 + for i in $(seq 0 2) + set +e + kubectl apply -f - + exit_status=0 + set -e + '[' 0 '!=' 0 -a -n 1 ']' + break + cat /tmp/tmp.4lqe0flveD perconaservermongodbrestore.psmdb.percona.com/restore-backup-minio-3 created + cat /tmp/tmp.bST5we8NFf + rm /tmp/tmp.4lqe0flveD /tmp/tmp.bST5we8NFf + return 0 + wait_restore backup-minio-3 some-name + local backup_name=backup-minio-3 + local cluster_name=some-name + local target_state=ready + local wait_cluster_consistency=1 + local wait_time=1780 + local ok_if_ready=0 + set +o xtrace Waiting for the psmdb-restore/restore-backup-minio-3 object to be created.OK Waiting psmdb-restore/restore-backup-minio-3 to reach state "ready" .OK after 0 minutes + [[ 1 -eq 1 ]] + wait_cluster_consistency some-name + local cluster_name=some-name + local wait_time=32 + retry=0 + sleep 7 + echo -n 'waiting for cluster readyness' waiting for cluster readyness++ kubectl_bin get psmdb some-name -o 'jsonpath={.status.state}' +++ mktemp ++ local LAST_OUT=/tmp/tmp.kgOAY2j0Tk +++ mktemp ++ local LAST_ERR=/tmp/tmp.IzEm1Q9nnW ++ local exit_status=0 ++ local timeout=4 +++ seq 0 2 ++ for i in $(seq 0 2) ++ set +e ++ kubectl get psmdb some-name -o 'jsonpath={.status.state}' ++ exit_status=0 ++ set -e ++ '[' 0 '!=' 0 -a -n 1 ']' ++ break ++ cat /tmp/tmp.kgOAY2j0Tk ++ cat /tmp/tmp.IzEm1Q9nnW ++ rm /tmp/tmp.kgOAY2j0Tk /tmp/tmp.IzEm1Q9nnW ++ return 0 + [[ ready == \r\e\a\d\y ]] + echo .OK .OK + echo + set -o xtrace + '[' -n '' ']' + compare_mongo_cmd find myApp:myPass@some-name-rs0-0.some-name-rs0.pitr-14308 -4th + local command=find + local uri=myApp:myPass@some-name-rs0-0.some-name-rs0.pitr-14308 + local postfix=-4th + local suffix= + local database=myApp + local collection=test + local sort= + local tls=false + local replicaset= + local 'full_command=db.test.find()' + [[ -n '' ]] + log 'running db.test.find() in myApp' + set +o xtrace [2026-02-19T15:09:15+0000] running db.test.find() in myApp + [[ false == \t\r\u\e ]] + mongo_command=run_mongo + run_mongo 'use myApp\n db.test.find()' myApp:myPass@some-name-rs0-0.some-name-rs0.pitr-14308 mongodb '' '' + local 'command=use myApp\n db.test.find()' + local uri=myApp:myPass@some-name-rs0-0.some-name-rs0.pitr-14308 + local driver=mongodb + local suffix=.svc.cluster.local + local mongo_flag= + local replica_set=rs0 + [[ myApp:myPass@some-name-rs0-0.some-name-rs0.pitr-14308 == *cfg* ]] + grep -E -v 'I NETWORK|W NETWORK|F NETWORK|Error saving history file|Percona Server for MongoDB|connecting to:|Unable to reach primary for set|Implicit session:|versions do not match|Error saving history file:|Started a new thread for the timer service' + /usr/sbin/sed -re 's/ObjectId\("[0-9a-f]+"\)//; s/-[0-9]+.svc/-xxx.svc/' ++ kubectl_bin get pods --selector=name=psmdb-client -o 'jsonpath={.items[].metadata.name}' +++ mktemp ++ local LAST_OUT=/tmp/tmp.xBQkyyv16A +++ mktemp ++ local LAST_ERR=/tmp/tmp.WRJMXg6jla ++ local exit_status=0 ++ local timeout=4 +++ seq 0 2 ++ for i in $(seq 0 2) ++ set +e ++ kubectl get pods --selector=name=psmdb-client -o 'jsonpath={.items[].metadata.name}' ++ exit_status=0 ++ set -e ++ '[' 0 '!=' 0 -a -n 1 ']' ++ break ++ cat /tmp/tmp.xBQkyyv16A ++ cat /tmp/tmp.WRJMXg6jla ++ rm /tmp/tmp.xBQkyyv16A /tmp/tmp.WRJMXg6jla ++ return 0 + local client_container=psmdb-client-86cb5d8484-r5b58 + kubectl_bin exec psmdb-client-86cb5d8484-r5b58 -- bash -c 'printf '\''use myApp\n db.test.find()\n'\'' | mongo mongodb://myApp:myPass@some-name-rs0-0.some-name-rs0.pitr-14308.svc.cluster.local/admin?ssl=false\&replicaSet=rs0 ' ++ mktemp + local LAST_OUT=/tmp/tmp.nhxHT07oMc ++ mktemp + local LAST_ERR=/tmp/tmp.JODotfjf05 + local exit_status=0 + local timeout=4 ++ seq 0 2 + for i in $(seq 0 2) + set +e + kubectl exec psmdb-client-86cb5d8484-r5b58 -- bash -c 'printf '\''use myApp\n db.test.find()\n'\'' | mongo mongodb://myApp:myPass@some-name-rs0-0.some-name-rs0.pitr-14308.svc.cluster.local/admin?ssl=false\&replicaSet=rs0 ' + exit_status=0 + set -e + '[' 0 '!=' 0 -a -n 1 ']' + break + cat /tmp/tmp.nhxHT07oMc + cat /tmp/tmp.JODotfjf05 + rm /tmp/tmp.nhxHT07oMc /tmp/tmp.JODotfjf05 + return 0 + [[ 0 -eq 0 ]] + diff -u /mnt/jenkins/workspace/cloud-psmdb-operator_PR-2247/e2e-tests/pitr/compare/find-4th.json /tmp/tmp.TVPhAYVAUk/find-4th + compare_mongo_cmd find myApp:myPass@some-name-rs0-1.some-name-rs0.pitr-14308 -4th + local command=find + local uri=myApp:myPass@some-name-rs0-1.some-name-rs0.pitr-14308 + local postfix=-4th + local suffix= + local database=myApp + local collection=test + local sort= + local tls=false + local replicaset= + local 'full_command=db.test.find()' + [[ -n '' ]] + log 'running db.test.find() in myApp' + set +o xtrace [2026-02-19T15:09:18+0000] running db.test.find() in myApp + [[ false == \t\r\u\e ]] + mongo_command=run_mongo + run_mongo 'use myApp\n db.test.find()' myApp:myPass@some-name-rs0-1.some-name-rs0.pitr-14308 mongodb '' '' + local 'command=use myApp\n db.test.find()' + local uri=myApp:myPass@some-name-rs0-1.some-name-rs0.pitr-14308 + local driver=mongodb + local suffix=.svc.cluster.local + local mongo_flag= + local replica_set=rs0 + [[ myApp:myPass@some-name-rs0-1.some-name-rs0.pitr-14308 == *cfg* ]] + grep -E -v 'I NETWORK|W NETWORK|F NETWORK|Error saving history file|Percona Server for MongoDB|connecting to:|Unable to reach primary for set|Implicit session:|versions do not match|Error saving history file:|Started a new thread for the timer service' + /usr/sbin/sed -re 's/ObjectId\("[0-9a-f]+"\)//; s/-[0-9]+.svc/-xxx.svc/' ++ kubectl_bin get pods --selector=name=psmdb-client -o 'jsonpath={.items[].metadata.name}' +++ mktemp ++ local LAST_OUT=/tmp/tmp.aa3iZyQCXT +++ mktemp ++ local LAST_ERR=/tmp/tmp.zdKM7SUrt3 ++ local exit_status=0 ++ local timeout=4 +++ seq 0 2 ++ for i in $(seq 0 2) ++ set +e ++ kubectl get pods --selector=name=psmdb-client -o 'jsonpath={.items[].metadata.name}' ++ exit_status=0 ++ set -e ++ '[' 0 '!=' 0 -a -n 1 ']' ++ break ++ cat /tmp/tmp.aa3iZyQCXT ++ cat /tmp/tmp.zdKM7SUrt3 ++ rm /tmp/tmp.aa3iZyQCXT /tmp/tmp.zdKM7SUrt3 ++ return 0 + local client_container=psmdb-client-86cb5d8484-r5b58 + kubectl_bin exec psmdb-client-86cb5d8484-r5b58 -- bash -c 'printf '\''use myApp\n db.test.find()\n'\'' | mongo mongodb://myApp:myPass@some-name-rs0-1.some-name-rs0.pitr-14308.svc.cluster.local/admin?ssl=false\&replicaSet=rs0 ' ++ mktemp + local LAST_OUT=/tmp/tmp.xEe37dDKn7 ++ mktemp + local LAST_ERR=/tmp/tmp.nmimHz9kaf + local exit_status=0 + local timeout=4 ++ seq 0 2 + for i in $(seq 0 2) + set +e + kubectl exec psmdb-client-86cb5d8484-r5b58 -- bash -c 'printf '\''use myApp\n db.test.find()\n'\'' | mongo mongodb://myApp:myPass@some-name-rs0-1.some-name-rs0.pitr-14308.svc.cluster.local/admin?ssl=false\&replicaSet=rs0 ' + exit_status=0 + set -e + '[' 0 '!=' 0 -a -n 1 ']' + break + cat /tmp/tmp.xEe37dDKn7 + cat /tmp/tmp.nmimHz9kaf + rm /tmp/tmp.xEe37dDKn7 /tmp/tmp.nmimHz9kaf + return 0 + [[ 0 -eq 0 ]] + diff -u /mnt/jenkins/workspace/cloud-psmdb-operator_PR-2247/e2e-tests/pitr/compare/find-4th.json /tmp/tmp.TVPhAYVAUk/find-4th + compare_mongo_cmd find myApp:myPass@some-name-rs0-2.some-name-rs0.pitr-14308 -4th + local command=find + local uri=myApp:myPass@some-name-rs0-2.some-name-rs0.pitr-14308 + local postfix=-4th + local suffix= + local database=myApp + local collection=test + local sort= + local tls=false + local replicaset= + local 'full_command=db.test.find()' + [[ -n '' ]] + log 'running db.test.find() in myApp' + set +o xtrace [2026-02-19T15:09:20+0000] running db.test.find() in myApp + [[ false == \t\r\u\e ]] + mongo_command=run_mongo + run_mongo 'use myApp\n db.test.find()' myApp:myPass@some-name-rs0-2.some-name-rs0.pitr-14308 mongodb '' '' + grep -E -v 'I NETWORK|W NETWORK|F NETWORK|Error saving history file|Percona Server for MongoDB|connecting to:|Unable to reach primary for set|Implicit session:|versions do not match|Error saving history file:|Started a new thread for the timer service' + local 'command=use myApp\n db.test.find()' + local uri=myApp:myPass@some-name-rs0-2.some-name-rs0.pitr-14308 + local driver=mongodb + local suffix=.svc.cluster.local + local mongo_flag= + local replica_set=rs0 + [[ myApp:myPass@some-name-rs0-2.some-name-rs0.pitr-14308 == *cfg* ]] + /usr/sbin/sed -re 's/ObjectId\("[0-9a-f]+"\)//; s/-[0-9]+.svc/-xxx.svc/' ++ kubectl_bin get pods --selector=name=psmdb-client -o 'jsonpath={.items[].metadata.name}' +++ mktemp ++ local LAST_OUT=/tmp/tmp.8BT1TDeNld +++ mktemp ++ local LAST_ERR=/tmp/tmp.KhzXJdOh2t ++ local exit_status=0 ++ local timeout=4 +++ seq 0 2 ++ for i in $(seq 0 2) ++ set +e ++ kubectl get pods --selector=name=psmdb-client -o 'jsonpath={.items[].metadata.name}' ++ exit_status=0 ++ set -e ++ '[' 0 '!=' 0 -a -n 1 ']' ++ break ++ cat /tmp/tmp.8BT1TDeNld ++ cat /tmp/tmp.KhzXJdOh2t ++ rm /tmp/tmp.8BT1TDeNld /tmp/tmp.KhzXJdOh2t ++ return 0 + local client_container=psmdb-client-86cb5d8484-r5b58 + kubectl_bin exec psmdb-client-86cb5d8484-r5b58 -- bash -c 'printf '\''use myApp\n db.test.find()\n'\'' | mongo mongodb://myApp:myPass@some-name-rs0-2.some-name-rs0.pitr-14308.svc.cluster.local/admin?ssl=false\&replicaSet=rs0 ' ++ mktemp + local LAST_OUT=/tmp/tmp.aoPjMTnSdj ++ mktemp + local LAST_ERR=/tmp/tmp.sss13iF4s3 + local exit_status=0 + local timeout=4 ++ seq 0 2 + for i in $(seq 0 2) + set +e + kubectl exec psmdb-client-86cb5d8484-r5b58 -- bash -c 'printf '\''use myApp\n db.test.find()\n'\'' | mongo mongodb://myApp:myPass@some-name-rs0-2.some-name-rs0.pitr-14308.svc.cluster.local/admin?ssl=false\&replicaSet=rs0 ' + exit_status=0 + set -e + '[' 0 '!=' 0 -a -n 1 ']' + break + cat /tmp/tmp.aoPjMTnSdj + cat /tmp/tmp.sss13iF4s3 + rm /tmp/tmp.aoPjMTnSdj /tmp/tmp.sss13iF4s3 + return 0 + [[ 0 -eq 0 ]] + diff -u /mnt/jenkins/workspace/cloud-psmdb-operator_PR-2247/e2e-tests/pitr/compare/find-4th.json /tmp/tmp.TVPhAYVAUk/find-4th + desc 'restore pitr using backupSource on second cluster with another prefix in storage' + set +o xtrace ----------------------------------------------------------------------------------- restore pitr using backupSource on second cluster with another prefix in storage ----------------------------------------------------------------------------------- + second_cluster=cluster2-rs0 + apply_cluster /mnt/jenkins/workspace/cloud-psmdb-operator_PR-2247/e2e-tests/pitr/conf/cluster2-rs0.yml + '[' -z '' ']' + cat_config /mnt/jenkins/workspace/cloud-psmdb-operator_PR-2247/e2e-tests/pitr/conf/cluster2-rs0.yml + kubectl_bin apply -f - + cat /mnt/jenkins/workspace/cloud-psmdb-operator_PR-2247/e2e-tests/pitr/conf/cluster2-rs0.yml ++ mktemp + yq eval '(.spec | select(.image == null)).image = "perconalab/percona-server-mongodb-operator:main-mongod8.0"' + yq eval '(.spec | select(has("pmm"))).pmm.image = "percona/pmm-client:2.44.1-1"' + yq eval '(.spec | select(has("initImage"))).initImage = "perconalab/percona-server-mongodb-operator:PR-2247-091270b4"' + local LAST_OUT=/tmp/tmp.lTU7zjQ7vF + /usr/sbin/sed -e s/NAME_SPACE/pitr-14308/g + yq eval '(.spec | select(has("backup"))).backup.image = "perconalab/percona-server-mongodb-operator:main-backup"' ++ mktemp + yq eval '.spec.upgradeOptions.apply="Never"' + local LAST_ERR=/tmp/tmp.kiJpOGGcjI + local exit_status=0 + local timeout=4 ++ seq 0 2 + for i in $(seq 0 2) + set +e + kubectl apply -f - + exit_status=0 + set -e + '[' 0 '!=' 0 -a -n 1 ']' + break + cat /tmp/tmp.lTU7zjQ7vF perconaservermongodb.psmdb.percona.com/cluster2 created + cat /tmp/tmp.kiJpOGGcjI + rm /tmp/tmp.lTU7zjQ7vF /tmp/tmp.kiJpOGGcjI + return 0 + desc 'check if all 3 Pods started' + set +o xtrace ----------------------------------------------------------------------------------- check if all 3 Pods started ----------------------------------------------------------------------------------- + wait_for_running cluster2-rs0 3 + local name=cluster2-rs0 + let last_pod=2 + local check_cluster_readyness=true + set_debug + [[ 1 == 1 ]] + set -o xtrace + local rs_name=rs0 + local cluster_name=cluster2 ++ seq 0 2 + for i in $(seq 0 $last_pod) + [[ 0 -eq 2 ]] + wait_pod cluster2-rs0-0 + local pod=cluster2-rs0-0 + set +o xtrace waiting for pod/cluster2-rs0-0 to be ready.........OK + for i in $(seq 0 $last_pod) + [[ 1 -eq 2 ]] + wait_pod cluster2-rs0-1 + local pod=cluster2-rs0-1 + set +o xtrace waiting for pod/cluster2-rs0-1 to be ready.......OK + for i in $(seq 0 $last_pod) + [[ 2 -eq 2 ]] ++ kubectl_bin get psmdb cluster2 -o 'jsonpath={.spec.replsets[?(@.name=="rs0")].arbiter.enabled}' +++ mktemp ++ local LAST_OUT=/tmp/tmp.Np1q2tnqJA +++ mktemp ++ local LAST_ERR=/tmp/tmp.0JeVrLyC2R ++ local exit_status=0 ++ local timeout=4 +++ seq 0 2 ++ for i in $(seq 0 2) ++ set +e ++ kubectl get psmdb cluster2 -o 'jsonpath={.spec.replsets[?(@.name=="rs0")].arbiter.enabled}' ++ exit_status=0 ++ set -e ++ '[' 0 '!=' 0 -a -n 1 ']' ++ break ++ cat /tmp/tmp.Np1q2tnqJA ++ cat /tmp/tmp.0JeVrLyC2R ++ rm /tmp/tmp.Np1q2tnqJA /tmp/tmp.0JeVrLyC2R ++ return 0 + [[ '' == \t\r\u\e ]] + wait_pod cluster2-rs0-2 + local pod=cluster2-rs0-2 + set +o xtrace waiting for pod/cluster2-rs0-2 to be ready......OK ++ kubectl_bin get psmdb cluster2 -o 'jsonpath={.spec.replsets[?(@.name=="rs0")].nonvoting.enabled}' +++ mktemp ++ local LAST_OUT=/tmp/tmp.e0CWp8thHp +++ mktemp ++ local LAST_ERR=/tmp/tmp.fewKLgOVdq ++ local exit_status=0 ++ local timeout=4 +++ seq 0 2 ++ for i in $(seq 0 2) ++ set +e ++ kubectl get psmdb cluster2 -o 'jsonpath={.spec.replsets[?(@.name=="rs0")].nonvoting.enabled}' ++ exit_status=0 ++ set -e ++ '[' 0 '!=' 0 -a -n 1 ']' ++ break ++ cat /tmp/tmp.e0CWp8thHp ++ cat /tmp/tmp.fewKLgOVdq ++ rm /tmp/tmp.e0CWp8thHp /tmp/tmp.fewKLgOVdq ++ return 0 + [[ '' == \t\r\u\e ]] ++ kubectl_bin get psmdb cluster2 -o 'jsonpath={.spec.replsets[?(@.name=="rs0")].hidden.enabled}' +++ mktemp ++ local LAST_OUT=/tmp/tmp.5l4vmzmtd4 +++ mktemp ++ local LAST_ERR=/tmp/tmp.lbR3SGA7Ca ++ local exit_status=0 ++ local timeout=4 +++ seq 0 2 ++ for i in $(seq 0 2) ++ set +e ++ kubectl get psmdb cluster2 -o 'jsonpath={.spec.replsets[?(@.name=="rs0")].hidden.enabled}' ++ exit_status=0 ++ set -e ++ '[' 0 '!=' 0 -a -n 1 ']' ++ break ++ cat /tmp/tmp.5l4vmzmtd4 ++ cat /tmp/tmp.lbR3SGA7Ca ++ rm /tmp/tmp.5l4vmzmtd4 /tmp/tmp.lbR3SGA7Ca ++ return 0 + [[ '' == \t\r\u\e ]] + sleep 10 + [[ true == \t\r\u\e ]] + set +x Waiting for cluster readyness.... + wait_backup_agent cluster2-rs0-0 + local agent_pod=cluster2-rs0-0 + set +o xtrace waiting for pbm-agent to be ready in cluster2-rs0-0...2026-02-19T15:10:47.000+0000 I listening for the commands + wait_backup_agent cluster2-rs0-1 + local agent_pod=cluster2-rs0-1 + set +o xtrace waiting for pbm-agent to be ready in cluster2-rs0-1...2026-02-19T15:10:57.000+0000 I listening for the commands + wait_backup_agent cluster2-rs0-2 + local agent_pod=cluster2-rs0-2 + set +o xtrace waiting for pbm-agent to be ready in cluster2-rs0-2...2026-02-19T15:10:59.000+0000 I listening for the commands + desc 'create user myApp' + set +o xtrace ----------------------------------------------------------------------------------- create user myApp ----------------------------------------------------------------------------------- + run_mongo 'db.createUser({user:"myApp",pwd:"myPass",roles:[{db:"myApp",role:"readWrite"}]})' userAdmin:userAdmin123456@cluster2-rs0.pitr-14308 + local 'command=db.createUser({user:"myApp",pwd:"myPass",roles:[{db:"myApp",role:"readWrite"}]})' + local uri=userAdmin:userAdmin123456@cluster2-rs0.pitr-14308 + local driver=mongodb+srv + local suffix=.svc.cluster.local + local mongo_flag= + local replica_set=rs0 + [[ userAdmin:userAdmin123456@cluster2-rs0.pitr-14308 == *cfg* ]] ++ kubectl_bin get pods --selector=name=psmdb-client -o 'jsonpath={.items[].metadata.name}' +++ mktemp ++ local LAST_OUT=/tmp/tmp.F20MDcNq3u +++ mktemp ++ local LAST_ERR=/tmp/tmp.hnYQyVliAt ++ local exit_status=0 ++ local timeout=4 +++ seq 0 2 ++ for i in $(seq 0 2) ++ set +e ++ kubectl get pods --selector=name=psmdb-client -o 'jsonpath={.items[].metadata.name}' ++ exit_status=0 ++ set -e ++ '[' 0 '!=' 0 -a -n 1 ']' ++ break ++ cat /tmp/tmp.F20MDcNq3u ++ cat /tmp/tmp.hnYQyVliAt ++ rm /tmp/tmp.F20MDcNq3u /tmp/tmp.hnYQyVliAt ++ return 0 + local client_container=psmdb-client-86cb5d8484-r5b58 + kubectl_bin exec psmdb-client-86cb5d8484-r5b58 -- bash -c 'printf '\''db.createUser({user:"myApp",pwd:"myPass",roles:[{db:"myApp",role:"readWrite"}]})\n'\'' | mongo mongodb+srv://userAdmin:userAdmin123456@cluster2-rs0.pitr-14308.svc.cluster.local/admin?ssl=false\&replicaSet=rs0 ' ++ mktemp + local LAST_OUT=/tmp/tmp.Ua6qPbdFCE ++ mktemp + local LAST_ERR=/tmp/tmp.WSBiFFxKRU + local exit_status=0 + local timeout=4 ++ seq 0 2 + for i in $(seq 0 2) + set +e + kubectl exec psmdb-client-86cb5d8484-r5b58 -- bash -c 'printf '\''db.createUser({user:"myApp",pwd:"myPass",roles:[{db:"myApp",role:"readWrite"}]})\n'\'' | mongo mongodb+srv://userAdmin:userAdmin123456@cluster2-rs0.pitr-14308.svc.cluster.local/admin?ssl=false\&replicaSet=rs0 ' + exit_status=0 + set -e + '[' 0 '!=' 0 -a -n 1 ']' + break + cat /tmp/tmp.Ua6qPbdFCE Percona Server for MongoDB shell version v4.4.29-28 connecting to: mongodb://cluster2-rs0-2.cluster2-rs0.pitr-14308.svc.cluster.local:27017,cluster2-rs0-1.cluster2-rs0.pitr-14308.svc.cluster.local:27017,cluster2-rs0-0.cluster2-rs0.pitr-14308.svc.cluster.local:27017/admin?compressors=disabled&gssapiServiceName=mongodb&replicaSet=rs0&ssl=false Implicit session: session { "id" : UUID("bc27f739-7adc-4ce2-bc2d-d9d6675e492d") } Percona Server for MongoDB server version: v8.0.19-7 WARNING: shell and server versions do not match Successfully added user: { "user" : "myApp", "roles" : [ { "db" : "myApp", "role" : "readWrite" } ] } bye + cat /tmp/tmp.WSBiFFxKRU + rm /tmp/tmp.Ua6qPbdFCE /tmp/tmp.WSBiFFxKRU + return 0 + sleep 2 + check_recovery backup-minio-3 latest '' -4th backupSource cluster2 '' backup-minio-3-second-cluster restore2.yml + local backup_name=backup-minio-3 + local restore_type=latest + local restore_date= + local cmp_postfix=-4th + local backupSource=backupSource + local cluster_name=cluster2 + local selective_collection= + local restore_name=restore-backup-minio-3-second-cluster + local restore_file=restore2.yml + local cluster + cluster=cluster2-rs0 + desc 'write more data before restore by latest' + set +o xtrace ----------------------------------------------------------------------------------- write more data before restore by latest ----------------------------------------------------------------------------------- + sleep 60 + run_mongo 'use myApp\n db.test.insert({ x: 100501 })' myApp:myPass@cluster2-rs0.pitr-14308 + local 'command=use myApp\n db.test.insert({ x: 100501 })' + local uri=myApp:myPass@cluster2-rs0.pitr-14308 + local driver=mongodb+srv + local suffix=.svc.cluster.local + local mongo_flag= + local replica_set=rs0 + [[ myApp:myPass@cluster2-rs0.pitr-14308 == *cfg* ]] ++ kubectl_bin get pods --selector=name=psmdb-client -o 'jsonpath={.items[].metadata.name}' +++ mktemp ++ local LAST_OUT=/tmp/tmp.WgOGgMRPe8 +++ mktemp ++ local LAST_ERR=/tmp/tmp.xMRTkcwK84 ++ local exit_status=0 ++ local timeout=4 +++ seq 0 2 ++ for i in $(seq 0 2) ++ set +e ++ kubectl get pods --selector=name=psmdb-client -o 'jsonpath={.items[].metadata.name}' ++ exit_status=0 ++ set -e ++ '[' 0 '!=' 0 -a -n 1 ']' ++ break ++ cat /tmp/tmp.WgOGgMRPe8 ++ cat /tmp/tmp.xMRTkcwK84 ++ rm /tmp/tmp.WgOGgMRPe8 /tmp/tmp.xMRTkcwK84 ++ return 0 + local client_container=psmdb-client-86cb5d8484-r5b58 + kubectl_bin exec psmdb-client-86cb5d8484-r5b58 -- bash -c 'printf '\''use myApp\n db.test.insert({ x: 100501 })\n'\'' | mongo mongodb+srv://myApp:myPass@cluster2-rs0.pitr-14308.svc.cluster.local/admin?ssl=false\&replicaSet=rs0 ' ++ mktemp + local LAST_OUT=/tmp/tmp.DCL1zbqm21 ++ mktemp + local LAST_ERR=/tmp/tmp.nPu2QXGgVR + local exit_status=0 + local timeout=4 ++ seq 0 2 + for i in $(seq 0 2) + set +e + kubectl exec psmdb-client-86cb5d8484-r5b58 -- bash -c 'printf '\''use myApp\n db.test.insert({ x: 100501 })\n'\'' | mongo mongodb+srv://myApp:myPass@cluster2-rs0.pitr-14308.svc.cluster.local/admin?ssl=false\&replicaSet=rs0 ' + exit_status=0 + set -e + '[' 0 '!=' 0 -a -n 1 ']' + break + cat /tmp/tmp.DCL1zbqm21 Percona Server for MongoDB shell version v4.4.29-28 connecting to: mongodb://cluster2-rs0-2.cluster2-rs0.pitr-14308.svc.cluster.local:27017,cluster2-rs0-1.cluster2-rs0.pitr-14308.svc.cluster.local:27017,cluster2-rs0-0.cluster2-rs0.pitr-14308.svc.cluster.local:27017/admin?compressors=disabled&gssapiServiceName=mongodb&replicaSet=rs0&ssl=false Implicit session: session { "id" : UUID("3da7a071-d101-4b49-8596-9e548ba60a96") } Percona Server for MongoDB server version: v8.0.19-7 WARNING: shell and server versions do not match switched to db myApp WriteResult({ "nInserted" : 1 }) bye + cat /tmp/tmp.nPu2QXGgVR + rm /tmp/tmp.DCL1zbqm21 /tmp/tmp.nPu2QXGgVR + return 0 + '[' -n '' ']' + desc 'waiting for chunks to be uploaded' + set +o xtrace ----------------------------------------------------------------------------------- waiting for chunks to be uploaded ----------------------------------------------------------------------------------- + sleep 120 + desc 'check restore by latest' + set +o xtrace ----------------------------------------------------------------------------------- check restore by latest ----------------------------------------------------------------------------------- + '[' -z backupSource ']' ++ get_backup_dest backup-minio-3 ++ local backup_name=backup-minio-3 ++ kubectl_bin get psmdb-backup backup-minio-3 -o 'jsonpath={.status.destination}' ++ sed -e 's/.json$//' ++ sed 's|s3://||' ++ sed 's|azure://||' ++ sed 's|gs://||' +++ mktemp ++ local LAST_OUT=/tmp/tmp.ANgYDv5bTe +++ mktemp ++ local LAST_ERR=/tmp/tmp.m1JwDDnZdf ++ local exit_status=0 ++ local timeout=4 +++ seq 0 2 ++ for i in $(seq 0 2) ++ set +e ++ kubectl get psmdb-backup backup-minio-3 -o 'jsonpath={.status.destination}' ++ exit_status=0 ++ set -e ++ '[' 0 '!=' 0 -a -n 1 ']' ++ break ++ cat /tmp/tmp.ANgYDv5bTe ++ cat /tmp/tmp.m1JwDDnZdf ++ rm /tmp/tmp.ANgYDv5bTe /tmp/tmp.m1JwDDnZdf ++ return 0 + backup_dest=operator-testing/pitr-prefix-1/2026-02-19T15:05:19Z + cat /mnt/jenkins/workspace/cloud-psmdb-operator_PR-2247/e2e-tests/pitr/conf/restore2.yml + /usr/sbin/sed -e 's/name:/name: restore-backup-minio-3-second-cluster/' + /usr/sbin/sed -e /backupName/d + /usr/sbin/sed -e 's/clusterName:/clusterName: cluster2/' + /usr/sbin/sed -e 's/pitrType:/type: latest/' + '[' -z '' ']' + /usr/sbin/sed -e /date:/d + /usr/sbin/sed -e 's|BACKUP-NAME|operator-testing/pitr-prefix-1/2026-02-19T15:05:19Z|' + '[' -n '' ']' + yq + kubectl_bin apply -f - ++ mktemp + local LAST_OUT=/tmp/tmp.hr0Kwunnj8 ++ mktemp + local LAST_ERR=/tmp/tmp.iIWRom506H + local exit_status=0 + local timeout=4 ++ seq 0 2 + for i in $(seq 0 2) + set +e + kubectl apply -f - + exit_status=0 + set -e + '[' 0 '!=' 0 -a -n 1 ']' + break + cat /tmp/tmp.hr0Kwunnj8 perconaservermongodbrestore.psmdb.percona.com/restore-backup-minio-3-second-cluster created + cat /tmp/tmp.iIWRom506H + rm /tmp/tmp.hr0Kwunnj8 /tmp/tmp.iIWRom506H + return 0 + wait_restore backup-minio-3-second-cluster cluster2 + local backup_name=backup-minio-3-second-cluster + local cluster_name=cluster2 + local target_state=ready + local wait_cluster_consistency=1 + local wait_time=1780 + local ok_if_ready=0 + set +o xtrace Waiting for the psmdb-restore/restore-backup-minio-3-second-cluster object to be created.OK Waiting psmdb-restore/restore-backup-minio-3-second-cluster to reach state "ready" .OK after 0 minutes + [[ 1 -eq 1 ]] + wait_cluster_consistency cluster2 + local cluster_name=cluster2 + local wait_time=32 + retry=0 + sleep 7 + echo -n 'waiting for cluster readyness' waiting for cluster readyness++ kubectl_bin get psmdb cluster2 -o 'jsonpath={.status.state}' +++ mktemp ++ local LAST_OUT=/tmp/tmp.5VBKVaotQm +++ mktemp ++ local LAST_ERR=/tmp/tmp.LacZVPyvfX ++ local exit_status=0 ++ local timeout=4 +++ seq 0 2 ++ for i in $(seq 0 2) ++ set +e ++ kubectl get psmdb cluster2 -o 'jsonpath={.status.state}' ++ exit_status=0 ++ set -e ++ '[' 0 '!=' 0 -a -n 1 ']' ++ break ++ cat /tmp/tmp.5VBKVaotQm ++ cat /tmp/tmp.LacZVPyvfX ++ rm /tmp/tmp.5VBKVaotQm /tmp/tmp.LacZVPyvfX ++ return 0 + [[ ready == \r\e\a\d\y ]] + echo .OK .OK + echo + set -o xtrace + '[' -n '' ']' + compare_mongo_cmd find myApp:myPass@cluster2-rs0-0.cluster2-rs0.pitr-14308 -4th + local command=find + local uri=myApp:myPass@cluster2-rs0-0.cluster2-rs0.pitr-14308 + local postfix=-4th + local suffix= + local database=myApp + local collection=test + local sort= + local tls=false + local replicaset= + local 'full_command=db.test.find()' + [[ -n '' ]] + log 'running db.test.find() in myApp' + set +o xtrace [2026-02-19T15:14:39+0000] running db.test.find() in myApp + [[ false == \t\r\u\e ]] + mongo_command=run_mongo + run_mongo 'use myApp\n db.test.find()' myApp:myPass@cluster2-rs0-0.cluster2-rs0.pitr-14308 mongodb '' '' + grep -E -v 'I NETWORK|W NETWORK|F NETWORK|Error saving history file|Percona Server for MongoDB|connecting to:|Unable to reach primary for set|Implicit session:|versions do not match|Error saving history file:|Started a new thread for the timer service' + local 'command=use myApp\n db.test.find()' + local uri=myApp:myPass@cluster2-rs0-0.cluster2-rs0.pitr-14308 + local driver=mongodb + local suffix=.svc.cluster.local + local mongo_flag= + local replica_set=rs0 + [[ myApp:myPass@cluster2-rs0-0.cluster2-rs0.pitr-14308 == *cfg* ]] + /usr/sbin/sed -re 's/ObjectId\("[0-9a-f]+"\)//; s/-[0-9]+.svc/-xxx.svc/' ++ kubectl_bin get pods --selector=name=psmdb-client -o 'jsonpath={.items[].metadata.name}' +++ mktemp ++ local LAST_OUT=/tmp/tmp.3yFhAfL7jb +++ mktemp ++ local LAST_ERR=/tmp/tmp.Yc7j6FQ9zX ++ local exit_status=0 ++ local timeout=4 +++ seq 0 2 ++ for i in $(seq 0 2) ++ set +e ++ kubectl get pods --selector=name=psmdb-client -o 'jsonpath={.items[].metadata.name}' ++ exit_status=0 ++ set -e ++ '[' 0 '!=' 0 -a -n 1 ']' ++ break ++ cat /tmp/tmp.3yFhAfL7jb ++ cat /tmp/tmp.Yc7j6FQ9zX ++ rm /tmp/tmp.3yFhAfL7jb /tmp/tmp.Yc7j6FQ9zX ++ return 0 + local client_container=psmdb-client-86cb5d8484-r5b58 + kubectl_bin exec psmdb-client-86cb5d8484-r5b58 -- bash -c 'printf '\''use myApp\n db.test.find()\n'\'' | mongo mongodb://myApp:myPass@cluster2-rs0-0.cluster2-rs0.pitr-14308.svc.cluster.local/admin?ssl=false\&replicaSet=rs0 ' ++ mktemp + local LAST_OUT=/tmp/tmp.X0FcnR8nlV ++ mktemp + local LAST_ERR=/tmp/tmp.6nla9gv0n4 + local exit_status=0 + local timeout=4 ++ seq 0 2 + for i in $(seq 0 2) + set +e + kubectl exec psmdb-client-86cb5d8484-r5b58 -- bash -c 'printf '\''use myApp\n db.test.find()\n'\'' | mongo mongodb://myApp:myPass@cluster2-rs0-0.cluster2-rs0.pitr-14308.svc.cluster.local/admin?ssl=false\&replicaSet=rs0 ' + exit_status=0 + set -e + '[' 0 '!=' 0 -a -n 1 ']' + break + cat /tmp/tmp.X0FcnR8nlV + cat /tmp/tmp.6nla9gv0n4 + rm /tmp/tmp.X0FcnR8nlV /tmp/tmp.6nla9gv0n4 + return 0 + [[ 0 -eq 0 ]] + diff -u /mnt/jenkins/workspace/cloud-psmdb-operator_PR-2247/e2e-tests/pitr/compare/find-4th.json /tmp/tmp.TVPhAYVAUk/find-4th + compare_mongo_cmd find myApp:myPass@cluster2-rs0-1.cluster2-rs0.pitr-14308 -4th + local command=find + local uri=myApp:myPass@cluster2-rs0-1.cluster2-rs0.pitr-14308 + local postfix=-4th + local suffix= + local database=myApp + local collection=test + local sort= + local tls=false + local replicaset= + local 'full_command=db.test.find()' + [[ -n '' ]] + log 'running db.test.find() in myApp' + set +o xtrace [2026-02-19T15:14:42+0000] running db.test.find() in myApp + [[ false == \t\r\u\e ]] + mongo_command=run_mongo + run_mongo 'use myApp\n db.test.find()' myApp:myPass@cluster2-rs0-1.cluster2-rs0.pitr-14308 mongodb '' '' + local 'command=use myApp\n db.test.find()' + local uri=myApp:myPass@cluster2-rs0-1.cluster2-rs0.pitr-14308 + local driver=mongodb + local suffix=.svc.cluster.local + local mongo_flag= + local replica_set=rs0 + [[ myApp:myPass@cluster2-rs0-1.cluster2-rs0.pitr-14308 == *cfg* ]] ++ kubectl_bin get pods --selector=name=psmdb-client -o 'jsonpath={.items[].metadata.name}' + /usr/sbin/sed -re 's/ObjectId\("[0-9a-f]+"\)//; s/-[0-9]+.svc/-xxx.svc/' + grep -E -v 'I NETWORK|W NETWORK|F NETWORK|Error saving history file|Percona Server for MongoDB|connecting to:|Unable to reach primary for set|Implicit session:|versions do not match|Error saving history file:|Started a new thread for the timer service' +++ mktemp ++ local LAST_OUT=/tmp/tmp.pG8z5EQ56f +++ mktemp ++ local LAST_ERR=/tmp/tmp.hGpNPZbGPP ++ local exit_status=0 ++ local timeout=4 +++ seq 0 2 ++ for i in $(seq 0 2) ++ set +e ++ kubectl get pods --selector=name=psmdb-client -o 'jsonpath={.items[].metadata.name}' ++ exit_status=0 ++ set -e ++ '[' 0 '!=' 0 -a -n 1 ']' ++ break ++ cat /tmp/tmp.pG8z5EQ56f ++ cat /tmp/tmp.hGpNPZbGPP ++ rm /tmp/tmp.pG8z5EQ56f /tmp/tmp.hGpNPZbGPP ++ return 0 + local client_container=psmdb-client-86cb5d8484-r5b58 + kubectl_bin exec psmdb-client-86cb5d8484-r5b58 -- bash -c 'printf '\''use myApp\n db.test.find()\n'\'' | mongo mongodb://myApp:myPass@cluster2-rs0-1.cluster2-rs0.pitr-14308.svc.cluster.local/admin?ssl=false\&replicaSet=rs0 ' ++ mktemp + local LAST_OUT=/tmp/tmp.NWFlZoVuUn ++ mktemp + local LAST_ERR=/tmp/tmp.xVTEU3kJ1F + local exit_status=0 + local timeout=4 ++ seq 0 2 + for i in $(seq 0 2) + set +e + kubectl exec psmdb-client-86cb5d8484-r5b58 -- bash -c 'printf '\''use myApp\n db.test.find()\n'\'' | mongo mongodb://myApp:myPass@cluster2-rs0-1.cluster2-rs0.pitr-14308.svc.cluster.local/admin?ssl=false\&replicaSet=rs0 ' + exit_status=0 + set -e + '[' 0 '!=' 0 -a -n 1 ']' + break + cat /tmp/tmp.NWFlZoVuUn + cat /tmp/tmp.xVTEU3kJ1F + rm /tmp/tmp.NWFlZoVuUn /tmp/tmp.xVTEU3kJ1F + return 0 + [[ 0 -eq 0 ]] + diff -u /mnt/jenkins/workspace/cloud-psmdb-operator_PR-2247/e2e-tests/pitr/compare/find-4th.json /tmp/tmp.TVPhAYVAUk/find-4th + compare_mongo_cmd find myApp:myPass@cluster2-rs0-2.cluster2-rs0.pitr-14308 -4th + local command=find + local uri=myApp:myPass@cluster2-rs0-2.cluster2-rs0.pitr-14308 + local postfix=-4th + local suffix= + local database=myApp + local collection=test + local sort= + local tls=false + local replicaset= + local 'full_command=db.test.find()' + [[ -n '' ]] + log 'running db.test.find() in myApp' + set +o xtrace [2026-02-19T15:14:44+0000] running db.test.find() in myApp + [[ false == \t\r\u\e ]] + mongo_command=run_mongo + run_mongo 'use myApp\n db.test.find()' myApp:myPass@cluster2-rs0-2.cluster2-rs0.pitr-14308 mongodb '' '' + local 'command=use myApp\n db.test.find()' + local uri=myApp:myPass@cluster2-rs0-2.cluster2-rs0.pitr-14308 + local driver=mongodb + local suffix=.svc.cluster.local + local mongo_flag= + local replica_set=rs0 + [[ myApp:myPass@cluster2-rs0-2.cluster2-rs0.pitr-14308 == *cfg* ]] + grep -E -v 'I NETWORK|W NETWORK|F NETWORK|Error saving history file|Percona Server for MongoDB|connecting to:|Unable to reach primary for set|Implicit session:|versions do not match|Error saving history file:|Started a new thread for the timer service' + /usr/sbin/sed -re 's/ObjectId\("[0-9a-f]+"\)//; s/-[0-9]+.svc/-xxx.svc/' ++ kubectl_bin get pods --selector=name=psmdb-client -o 'jsonpath={.items[].metadata.name}' +++ mktemp ++ local LAST_OUT=/tmp/tmp.R89b1pujCD +++ mktemp ++ local LAST_ERR=/tmp/tmp.lZLwX4WNuu ++ local exit_status=0 ++ local timeout=4 +++ seq 0 2 ++ for i in $(seq 0 2) ++ set +e ++ kubectl get pods --selector=name=psmdb-client -o 'jsonpath={.items[].metadata.name}' ++ exit_status=0 ++ set -e ++ '[' 0 '!=' 0 -a -n 1 ']' ++ break ++ cat /tmp/tmp.R89b1pujCD ++ cat /tmp/tmp.lZLwX4WNuu ++ rm /tmp/tmp.R89b1pujCD /tmp/tmp.lZLwX4WNuu ++ return 0 + local client_container=psmdb-client-86cb5d8484-r5b58 + kubectl_bin exec psmdb-client-86cb5d8484-r5b58 -- bash -c 'printf '\''use myApp\n db.test.find()\n'\'' | mongo mongodb://myApp:myPass@cluster2-rs0-2.cluster2-rs0.pitr-14308.svc.cluster.local/admin?ssl=false\&replicaSet=rs0 ' ++ mktemp + local LAST_OUT=/tmp/tmp.AMEV6nod88 ++ mktemp + local LAST_ERR=/tmp/tmp.oEEst9j9ot + local exit_status=0 + local timeout=4 ++ seq 0 2 + for i in $(seq 0 2) + set +e + kubectl exec psmdb-client-86cb5d8484-r5b58 -- bash -c 'printf '\''use myApp\n db.test.find()\n'\'' | mongo mongodb://myApp:myPass@cluster2-rs0-2.cluster2-rs0.pitr-14308.svc.cluster.local/admin?ssl=false\&replicaSet=rs0 ' + exit_status=0 + set -e + '[' 0 '!=' 0 -a -n 1 ']' + break + cat /tmp/tmp.AMEV6nod88 + cat /tmp/tmp.oEEst9j9ot + rm /tmp/tmp.AMEV6nod88 /tmp/tmp.oEEst9j9ot + return 0 + [[ 0 -eq 0 ]] + diff -u /mnt/jenkins/workspace/cloud-psmdb-operator_PR-2247/e2e-tests/pitr/compare/find-4th.json /tmp/tmp.TVPhAYVAUk/find-4th + backup_name_minio_2=backup-minio2 + run_backup backup-minio2 0 + local name=backup-minio2 + local idx=0 + desc 'run backup backup-minio2-0' + set +o xtrace ----------------------------------------------------------------------------------- run backup backup-minio2-0 ----------------------------------------------------------------------------------- + cat /mnt/jenkins/workspace/cloud-psmdb-operator_PR-2247/e2e-tests/pitr/conf/backup-minio2.yml + /usr/sbin/sed -e 's/name:/name: backup-minio2-0/' + kubectl_bin apply -f - ++ mktemp + local LAST_OUT=/tmp/tmp.mtIrErtqQN ++ mktemp + local LAST_ERR=/tmp/tmp.ob0lvgq4kI + local exit_status=0 + local timeout=4 ++ seq 0 2 + for i in $(seq 0 2) + set +e + kubectl apply -f - + exit_status=0 + set -e + '[' 0 '!=' 0 -a -n 1 ']' + break + cat /tmp/tmp.mtIrErtqQN perconaservermongodbbackup.psmdb.percona.com/backup-minio2-0 created + cat /tmp/tmp.ob0lvgq4kI + rm /tmp/tmp.mtIrErtqQN /tmp/tmp.ob0lvgq4kI + return 0 + wait_backup backup-minio2-0 + local backup_name=backup-minio2-0 + local target_state=ready + set +o xtrace waiting for backup-minio2-0 to reach ready state.......OK ++ run_mongo 'new Date().toISOString()' myApp:myPass@cluster2-rs0.pitr-14308 mongodb '' --quiet ++ local 'command=new Date().toISOString()' ++ local uri=myApp:myPass@cluster2-rs0.pitr-14308 ++ local driver=mongodb ++ local suffix=.svc.cluster.local ++ local mongo_flag=--quiet ++ local replica_set=rs0 ++ [[ myApp:myPass@cluster2-rs0.pitr-14308 == *cfg* ]] +++ kubectl_bin get pods --selector=name=psmdb-client -o 'jsonpath={.items[].metadata.name}' ++++ mktemp ++ grep -E -v 'I NETWORK|W NETWORK|Error saving history file|Percona Server for MongoDB|connecting to:|Unable to reach primary for set|Implicit session:|versions do not match' ++ cut -c1-19 ++ tr T ' ' +++ local LAST_OUT=/tmp/tmp.jv5QiEIl7F ++++ mktemp +++ local LAST_ERR=/tmp/tmp.OFG0gg3NLa +++ local exit_status=0 +++ local timeout=4 ++++ seq 0 2 +++ for i in $(seq 0 2) +++ set +e +++ kubectl get pods --selector=name=psmdb-client -o 'jsonpath={.items[].metadata.name}' +++ exit_status=0 +++ set -e +++ '[' 0 '!=' 0 -a -n 1 ']' +++ break +++ cat /tmp/tmp.jv5QiEIl7F +++ cat /tmp/tmp.OFG0gg3NLa +++ rm /tmp/tmp.jv5QiEIl7F /tmp/tmp.OFG0gg3NLa +++ return 0 ++ local client_container=psmdb-client-86cb5d8484-r5b58 ++ kubectl_bin exec psmdb-client-86cb5d8484-r5b58 -- bash -c 'printf '\''new Date().toISOString()\n'\'' | mongo mongodb://myApp:myPass@cluster2-rs0.pitr-14308.svc.cluster.local/admin?ssl=false\&replicaSet=rs0 --quiet' +++ mktemp ++ local LAST_OUT=/tmp/tmp.XJjt4jXcZe +++ mktemp ++ local LAST_ERR=/tmp/tmp.J8jZV743oT ++ local exit_status=0 ++ local timeout=4 +++ seq 0 2 ++ for i in $(seq 0 2) ++ set +e ++ kubectl exec psmdb-client-86cb5d8484-r5b58 -- bash -c 'printf '\''new Date().toISOString()\n'\'' | mongo mongodb://myApp:myPass@cluster2-rs0.pitr-14308.svc.cluster.local/admin?ssl=false\&replicaSet=rs0 --quiet' ++ exit_status=0 ++ set -e ++ '[' 0 '!=' 0 -a -n 1 ']' ++ break ++ cat /tmp/tmp.XJjt4jXcZe ++ cat /tmp/tmp.J8jZV743oT ++ rm /tmp/tmp.XJjt4jXcZe /tmp/tmp.J8jZV743oT ++ return 0 + time_now='2026-02-19 15:15:02' + write_document -5th cluster2-rs0 + local cmp_postfix=-5th + local cluster_name=cluster2-rs0 + desc 'write initial data, read from all' + set +o xtrace ----------------------------------------------------------------------------------- write initial data, read from all ----------------------------------------------------------------------------------- + run_mongo 'use myApp\n db.test.insert({ x: 100500 })' myApp:myPass@cluster2-rs0.pitr-14308 + local 'command=use myApp\n db.test.insert({ x: 100500 })' + local uri=myApp:myPass@cluster2-rs0.pitr-14308 + local driver=mongodb+srv + local suffix=.svc.cluster.local + local mongo_flag= + local replica_set=rs0 + [[ myApp:myPass@cluster2-rs0.pitr-14308 == *cfg* ]] ++ kubectl_bin get pods --selector=name=psmdb-client -o 'jsonpath={.items[].metadata.name}' +++ mktemp ++ local LAST_OUT=/tmp/tmp.KpvjSpBkDC +++ mktemp ++ local LAST_ERR=/tmp/tmp.MC5XVVu1mU ++ local exit_status=0 ++ local timeout=4 +++ seq 0 2 ++ for i in $(seq 0 2) ++ set +e ++ kubectl get pods --selector=name=psmdb-client -o 'jsonpath={.items[].metadata.name}' ++ exit_status=0 ++ set -e ++ '[' 0 '!=' 0 -a -n 1 ']' ++ break ++ cat /tmp/tmp.KpvjSpBkDC ++ cat /tmp/tmp.MC5XVVu1mU ++ rm /tmp/tmp.KpvjSpBkDC /tmp/tmp.MC5XVVu1mU ++ return 0 + local client_container=psmdb-client-86cb5d8484-r5b58 + kubectl_bin exec psmdb-client-86cb5d8484-r5b58 -- bash -c 'printf '\''use myApp\n db.test.insert({ x: 100500 })\n'\'' | mongo mongodb+srv://myApp:myPass@cluster2-rs0.pitr-14308.svc.cluster.local/admin?ssl=false\&replicaSet=rs0 ' ++ mktemp + local LAST_OUT=/tmp/tmp.Qbu0DohqPF ++ mktemp + local LAST_ERR=/tmp/tmp.gjnSbN65I4 + local exit_status=0 + local timeout=4 ++ seq 0 2 + for i in $(seq 0 2) + set +e + kubectl exec psmdb-client-86cb5d8484-r5b58 -- bash -c 'printf '\''use myApp\n db.test.insert({ x: 100500 })\n'\'' | mongo mongodb+srv://myApp:myPass@cluster2-rs0.pitr-14308.svc.cluster.local/admin?ssl=false\&replicaSet=rs0 ' + exit_status=0 + set -e + '[' 0 '!=' 0 -a -n 1 ']' + break + cat /tmp/tmp.Qbu0DohqPF Percona Server for MongoDB shell version v4.4.29-28 connecting to: mongodb://cluster2-rs0-2.cluster2-rs0.pitr-14308.svc.cluster.local:27017,cluster2-rs0-1.cluster2-rs0.pitr-14308.svc.cluster.local:27017,cluster2-rs0-0.cluster2-rs0.pitr-14308.svc.cluster.local:27017/admin?compressors=disabled&gssapiServiceName=mongodb&replicaSet=rs0&ssl=false Implicit session: session { "id" : UUID("fef0f8db-5cda-440e-8461-a024722c9f74") } Percona Server for MongoDB server version: v8.0.19-7 WARNING: shell and server versions do not match switched to db myApp WriteResult({ "nInserted" : 1 }) bye + cat /tmp/tmp.gjnSbN65I4 + rm /tmp/tmp.Qbu0DohqPF /tmp/tmp.gjnSbN65I4 + return 0 + minikube_sleep + sleep_time=10 + [[ '' == 1 ]] + compare_mongo_cmd find myApp:myPass@cluster2-rs0-0.cluster2-rs0.pitr-14308 -5th + local command=find + local uri=myApp:myPass@cluster2-rs0-0.cluster2-rs0.pitr-14308 + local postfix=-5th + local suffix= + local database=myApp + local collection=test + local sort= + local tls=false + local replicaset= + local 'full_command=db.test.find()' + [[ -n '' ]] + log 'running db.test.find() in myApp' + set +o xtrace [2026-02-19T15:15:05+0000] running db.test.find() in myApp + [[ false == \t\r\u\e ]] + mongo_command=run_mongo + run_mongo 'use myApp\n db.test.find()' myApp:myPass@cluster2-rs0-0.cluster2-rs0.pitr-14308 mongodb '' '' + local 'command=use myApp\n db.test.find()' + local uri=myApp:myPass@cluster2-rs0-0.cluster2-rs0.pitr-14308 + local driver=mongodb + local suffix=.svc.cluster.local + local mongo_flag= + local replica_set=rs0 + [[ myApp:myPass@cluster2-rs0-0.cluster2-rs0.pitr-14308 == *cfg* ]] + grep -E -v 'I NETWORK|W NETWORK|F NETWORK|Error saving history file|Percona Server for MongoDB|connecting to:|Unable to reach primary for set|Implicit session:|versions do not match|Error saving history file:|Started a new thread for the timer service' + /usr/sbin/sed -re 's/ObjectId\("[0-9a-f]+"\)//; s/-[0-9]+.svc/-xxx.svc/' ++ kubectl_bin get pods --selector=name=psmdb-client -o 'jsonpath={.items[].metadata.name}' +++ mktemp ++ local LAST_OUT=/tmp/tmp.fnjEirOmci +++ mktemp ++ local LAST_ERR=/tmp/tmp.43v71izNTV ++ local exit_status=0 ++ local timeout=4 +++ seq 0 2 ++ for i in $(seq 0 2) ++ set +e ++ kubectl get pods --selector=name=psmdb-client -o 'jsonpath={.items[].metadata.name}' ++ exit_status=0 ++ set -e ++ '[' 0 '!=' 0 -a -n 1 ']' ++ break ++ cat /tmp/tmp.fnjEirOmci ++ cat /tmp/tmp.43v71izNTV ++ rm /tmp/tmp.fnjEirOmci /tmp/tmp.43v71izNTV ++ return 0 + local client_container=psmdb-client-86cb5d8484-r5b58 + kubectl_bin exec psmdb-client-86cb5d8484-r5b58 -- bash -c 'printf '\''use myApp\n db.test.find()\n'\'' | mongo mongodb://myApp:myPass@cluster2-rs0-0.cluster2-rs0.pitr-14308.svc.cluster.local/admin?ssl=false\&replicaSet=rs0 ' ++ mktemp + local LAST_OUT=/tmp/tmp.3ZPdSTLcSp ++ mktemp + local LAST_ERR=/tmp/tmp.C32NlzS5Ms + local exit_status=0 + local timeout=4 ++ seq 0 2 + for i in $(seq 0 2) + set +e + kubectl exec psmdb-client-86cb5d8484-r5b58 -- bash -c 'printf '\''use myApp\n db.test.find()\n'\'' | mongo mongodb://myApp:myPass@cluster2-rs0-0.cluster2-rs0.pitr-14308.svc.cluster.local/admin?ssl=false\&replicaSet=rs0 ' + exit_status=0 + set -e + '[' 0 '!=' 0 -a -n 1 ']' + break + cat /tmp/tmp.3ZPdSTLcSp + cat /tmp/tmp.C32NlzS5Ms + rm /tmp/tmp.3ZPdSTLcSp /tmp/tmp.C32NlzS5Ms + return 0 + [[ 0 -eq 0 ]] + diff -u /mnt/jenkins/workspace/cloud-psmdb-operator_PR-2247/e2e-tests/pitr/compare/find-5th.json /tmp/tmp.TVPhAYVAUk/find-5th + compare_mongo_cmd find myApp:myPass@cluster2-rs0-1.cluster2-rs0.pitr-14308 -5th + local command=find + local uri=myApp:myPass@cluster2-rs0-1.cluster2-rs0.pitr-14308 + local postfix=-5th + local suffix= + local database=myApp + local collection=test + local sort= + local tls=false + local replicaset= + local 'full_command=db.test.find()' + [[ -n '' ]] + log 'running db.test.find() in myApp' + set +o xtrace [2026-02-19T15:15:07+0000] running db.test.find() in myApp + [[ false == \t\r\u\e ]] + mongo_command=run_mongo + grep -E -v 'I NETWORK|W NETWORK|F NETWORK|Error saving history file|Percona Server for MongoDB|connecting to:|Unable to reach primary for set|Implicit session:|versions do not match|Error saving history file:|Started a new thread for the timer service' + /usr/sbin/sed -re 's/ObjectId\("[0-9a-f]+"\)//; s/-[0-9]+.svc/-xxx.svc/' + run_mongo 'use myApp\n db.test.find()' myApp:myPass@cluster2-rs0-1.cluster2-rs0.pitr-14308 mongodb '' '' + local 'command=use myApp\n db.test.find()' + local uri=myApp:myPass@cluster2-rs0-1.cluster2-rs0.pitr-14308 + local driver=mongodb + local suffix=.svc.cluster.local + local mongo_flag= + local replica_set=rs0 + [[ myApp:myPass@cluster2-rs0-1.cluster2-rs0.pitr-14308 == *cfg* ]] ++ kubectl_bin get pods --selector=name=psmdb-client -o 'jsonpath={.items[].metadata.name}' +++ mktemp ++ local LAST_OUT=/tmp/tmp.4nZAbeKts2 +++ mktemp ++ local LAST_ERR=/tmp/tmp.PN0B1Oahwd ++ local exit_status=0 ++ local timeout=4 +++ seq 0 2 ++ for i in $(seq 0 2) ++ set +e ++ kubectl get pods --selector=name=psmdb-client -o 'jsonpath={.items[].metadata.name}' ++ exit_status=0 ++ set -e ++ '[' 0 '!=' 0 -a -n 1 ']' ++ break ++ cat /tmp/tmp.4nZAbeKts2 ++ cat /tmp/tmp.PN0B1Oahwd ++ rm /tmp/tmp.4nZAbeKts2 /tmp/tmp.PN0B1Oahwd ++ return 0 + local client_container=psmdb-client-86cb5d8484-r5b58 + kubectl_bin exec psmdb-client-86cb5d8484-r5b58 -- bash -c 'printf '\''use myApp\n db.test.find()\n'\'' | mongo mongodb://myApp:myPass@cluster2-rs0-1.cluster2-rs0.pitr-14308.svc.cluster.local/admin?ssl=false\&replicaSet=rs0 ' ++ mktemp + local LAST_OUT=/tmp/tmp.Or330bhDvS ++ mktemp + local LAST_ERR=/tmp/tmp.fY34YuRsKr + local exit_status=0 + local timeout=4 ++ seq 0 2 + for i in $(seq 0 2) + set +e + kubectl exec psmdb-client-86cb5d8484-r5b58 -- bash -c 'printf '\''use myApp\n db.test.find()\n'\'' | mongo mongodb://myApp:myPass@cluster2-rs0-1.cluster2-rs0.pitr-14308.svc.cluster.local/admin?ssl=false\&replicaSet=rs0 ' + exit_status=0 + set -e + '[' 0 '!=' 0 -a -n 1 ']' + break + cat /tmp/tmp.Or330bhDvS + cat /tmp/tmp.fY34YuRsKr + rm /tmp/tmp.Or330bhDvS /tmp/tmp.fY34YuRsKr + return 0 + [[ 0 -eq 0 ]] + diff -u /mnt/jenkins/workspace/cloud-psmdb-operator_PR-2247/e2e-tests/pitr/compare/find-5th.json /tmp/tmp.TVPhAYVAUk/find-5th + compare_mongo_cmd find myApp:myPass@cluster2-rs0-2.cluster2-rs0.pitr-14308 -5th + local command=find + local uri=myApp:myPass@cluster2-rs0-2.cluster2-rs0.pitr-14308 + local postfix=-5th + local suffix= + local database=myApp + local collection=test + local sort= + local tls=false + local replicaset= + local 'full_command=db.test.find()' + [[ -n '' ]] + log 'running db.test.find() in myApp' + set +o xtrace [2026-02-19T15:15:09+0000] running db.test.find() in myApp + [[ false == \t\r\u\e ]] + mongo_command=run_mongo + run_mongo 'use myApp\n db.test.find()' myApp:myPass@cluster2-rs0-2.cluster2-rs0.pitr-14308 mongodb '' '' + local 'command=use myApp\n db.test.find()' + local uri=myApp:myPass@cluster2-rs0-2.cluster2-rs0.pitr-14308 + local driver=mongodb + local suffix=.svc.cluster.local + local mongo_flag= + grep -E -v 'I NETWORK|W NETWORK|F NETWORK|Error saving history file|Percona Server for MongoDB|connecting to:|Unable to reach primary for set|Implicit session:|versions do not match|Error saving history file:|Started a new thread for the timer service' + local replica_set=rs0 + [[ myApp:myPass@cluster2-rs0-2.cluster2-rs0.pitr-14308 == *cfg* ]] + /usr/sbin/sed -re 's/ObjectId\("[0-9a-f]+"\)//; s/-[0-9]+.svc/-xxx.svc/' ++ kubectl_bin get pods --selector=name=psmdb-client -o 'jsonpath={.items[].metadata.name}' +++ mktemp ++ local LAST_OUT=/tmp/tmp.VHRVJ0zIvr +++ mktemp ++ local LAST_ERR=/tmp/tmp.HX47ARstSm ++ local exit_status=0 ++ local timeout=4 +++ seq 0 2 ++ for i in $(seq 0 2) ++ set +e ++ kubectl get pods --selector=name=psmdb-client -o 'jsonpath={.items[].metadata.name}' ++ exit_status=0 ++ set -e ++ '[' 0 '!=' 0 -a -n 1 ']' ++ break ++ cat /tmp/tmp.VHRVJ0zIvr ++ cat /tmp/tmp.HX47ARstSm ++ rm /tmp/tmp.VHRVJ0zIvr /tmp/tmp.HX47ARstSm ++ return 0 + local client_container=psmdb-client-86cb5d8484-r5b58 + kubectl_bin exec psmdb-client-86cb5d8484-r5b58 -- bash -c 'printf '\''use myApp\n db.test.find()\n'\'' | mongo mongodb://myApp:myPass@cluster2-rs0-2.cluster2-rs0.pitr-14308.svc.cluster.local/admin?ssl=false\&replicaSet=rs0 ' ++ mktemp + local LAST_OUT=/tmp/tmp.qP5eDAdsSq ++ mktemp + local LAST_ERR=/tmp/tmp.ZlT2cveu01 + local exit_status=0 + local timeout=4 ++ seq 0 2 + for i in $(seq 0 2) + set +e + kubectl exec psmdb-client-86cb5d8484-r5b58 -- bash -c 'printf '\''use myApp\n db.test.find()\n'\'' | mongo mongodb://myApp:myPass@cluster2-rs0-2.cluster2-rs0.pitr-14308.svc.cluster.local/admin?ssl=false\&replicaSet=rs0 ' + exit_status=0 + set -e + '[' 0 '!=' 0 -a -n 1 ']' + break + cat /tmp/tmp.qP5eDAdsSq + cat /tmp/tmp.ZlT2cveu01 + rm /tmp/tmp.qP5eDAdsSq /tmp/tmp.ZlT2cveu01 + return 0 + [[ 0 -eq 0 ]] + diff -u /mnt/jenkins/workspace/cloud-psmdb-operator_PR-2247/e2e-tests/pitr/compare/find-5th.json /tmp/tmp.TVPhAYVAUk/find-5th + sleep 2 + check_recovery backup-minio2-0 date '2026-02-19 15:15:02' -4th '' cluster2 + local backup_name=backup-minio2-0 + local restore_type=date + local 'restore_date=2026-02-19 15:15:02' + local cmp_postfix=-4th + local backupSource= + local cluster_name=cluster2 + local selective_collection= + local restore_name=restore-backup-minio2-0 + local restore_file=restore.yml + local cluster + cluster=cluster2-rs0 + desc 'write more data before restore by date' + set +o xtrace ----------------------------------------------------------------------------------- write more data before restore by date ----------------------------------------------------------------------------------- + sleep 60 + run_mongo 'use myApp\n db.test.insert({ x: 100501 })' myApp:myPass@cluster2-rs0.pitr-14308 + local 'command=use myApp\n db.test.insert({ x: 100501 })' + local uri=myApp:myPass@cluster2-rs0.pitr-14308 + local driver=mongodb+srv + local suffix=.svc.cluster.local + local mongo_flag= + local replica_set=rs0 + [[ myApp:myPass@cluster2-rs0.pitr-14308 == *cfg* ]] ++ kubectl_bin get pods --selector=name=psmdb-client -o 'jsonpath={.items[].metadata.name}' +++ mktemp ++ local LAST_OUT=/tmp/tmp.tvPm1t9pdq +++ mktemp ++ local LAST_ERR=/tmp/tmp.dQvnQXXSJG ++ local exit_status=0 ++ local timeout=4 +++ seq 0 2 ++ for i in $(seq 0 2) ++ set +e ++ kubectl get pods --selector=name=psmdb-client -o 'jsonpath={.items[].metadata.name}' ++ exit_status=0 ++ set -e ++ '[' 0 '!=' 0 -a -n 1 ']' ++ break ++ cat /tmp/tmp.tvPm1t9pdq ++ cat /tmp/tmp.dQvnQXXSJG ++ rm /tmp/tmp.tvPm1t9pdq /tmp/tmp.dQvnQXXSJG ++ return 0 + local client_container=psmdb-client-86cb5d8484-r5b58 + kubectl_bin exec psmdb-client-86cb5d8484-r5b58 -- bash -c 'printf '\''use myApp\n db.test.insert({ x: 100501 })\n'\'' | mongo mongodb+srv://myApp:myPass@cluster2-rs0.pitr-14308.svc.cluster.local/admin?ssl=false\&replicaSet=rs0 ' ++ mktemp + local LAST_OUT=/tmp/tmp.F48aY70bUN ++ mktemp + local LAST_ERR=/tmp/tmp.ZxWpUIu2GU + local exit_status=0 + local timeout=4 ++ seq 0 2 + for i in $(seq 0 2) + set +e + kubectl exec psmdb-client-86cb5d8484-r5b58 -- bash -c 'printf '\''use myApp\n db.test.insert({ x: 100501 })\n'\'' | mongo mongodb+srv://myApp:myPass@cluster2-rs0.pitr-14308.svc.cluster.local/admin?ssl=false\&replicaSet=rs0 ' + exit_status=0 + set -e + '[' 0 '!=' 0 -a -n 1 ']' + break + cat /tmp/tmp.F48aY70bUN Percona Server for MongoDB shell version v4.4.29-28 connecting to: mongodb://cluster2-rs0-2.cluster2-rs0.pitr-14308.svc.cluster.local:27017,cluster2-rs0-1.cluster2-rs0.pitr-14308.svc.cluster.local:27017,cluster2-rs0-0.cluster2-rs0.pitr-14308.svc.cluster.local:27017/admin?compressors=disabled&gssapiServiceName=mongodb&replicaSet=rs0&ssl=false Implicit session: session { "id" : UUID("a7a321eb-8afb-49e2-8060-98816006d3d5") } Percona Server for MongoDB server version: v8.0.19-7 WARNING: shell and server versions do not match switched to db myApp WriteResult({ "nInserted" : 1 }) bye + cat /tmp/tmp.ZxWpUIu2GU + rm /tmp/tmp.F48aY70bUN /tmp/tmp.ZxWpUIu2GU + return 0 + '[' -n '' ']' + desc 'waiting for chunks to be uploaded' + set +o xtrace ----------------------------------------------------------------------------------- waiting for chunks to be uploaded ----------------------------------------------------------------------------------- + sleep 120 + desc 'check restore by date' + set +o xtrace ----------------------------------------------------------------------------------- check restore by date ----------------------------------------------------------------------------------- + '[' -z '' ']' + cat /mnt/jenkins/workspace/cloud-psmdb-operator_PR-2247/e2e-tests/pitr/conf/restore.yml + /usr/sbin/sed -e 's/name:/name: restore-backup-minio2-0/' + /usr/sbin/sed -e 's/backupName:/backupName: backup-minio2-0/' + /usr/sbin/sed -e 's/clusterName:/clusterName: cluster2/' + /usr/sbin/sed -e 's/pitrType:/type: date/' + '[' -z '2026-02-19 15:15:02' ']' + /usr/sbin/sed -e 's/date:/date: 2026-02-19 15:15:02/' + /usr/sbin/sed -e /backupSource/,+2d + '[' -n '' ']' + yq + kubectl_bin apply -f - ++ mktemp + local LAST_OUT=/tmp/tmp.2h0oWuaKoB ++ mktemp + local LAST_ERR=/tmp/tmp.bZ60uu3cJS + local exit_status=0 + local timeout=4 ++ seq 0 2 + for i in $(seq 0 2) + set +e + kubectl apply -f - + exit_status=0 + set -e + '[' 0 '!=' 0 -a -n 1 ']' + break + cat /tmp/tmp.2h0oWuaKoB perconaservermongodbrestore.psmdb.percona.com/restore-backup-minio2-0 created + cat /tmp/tmp.bZ60uu3cJS + rm /tmp/tmp.2h0oWuaKoB /tmp/tmp.bZ60uu3cJS + return 0 + wait_restore backup-minio2-0 cluster2 + local backup_name=backup-minio2-0 + local cluster_name=cluster2 + local target_state=ready + local wait_cluster_consistency=1 + local wait_time=1780 + local ok_if_ready=0 + set +o xtrace Waiting for the psmdb-restore/restore-backup-minio2-0 object to be created.OK Waiting psmdb-restore/restore-backup-minio2-0 to reach state "ready" .OK after 0 minutes + [[ 1 -eq 1 ]] + wait_cluster_consistency cluster2 + local cluster_name=cluster2 + local wait_time=32 + retry=0 + sleep 7 + echo -n 'waiting for cluster readyness' waiting for cluster readyness++ kubectl_bin get psmdb cluster2 -o 'jsonpath={.status.state}' +++ mktemp ++ local LAST_OUT=/tmp/tmp.ZEQSGSmhW1 +++ mktemp ++ local LAST_ERR=/tmp/tmp.ymAwXxF6iS ++ local exit_status=0 ++ local timeout=4 +++ seq 0 2 ++ for i in $(seq 0 2) ++ set +e ++ kubectl get psmdb cluster2 -o 'jsonpath={.status.state}' ++ exit_status=0 ++ set -e ++ '[' 0 '!=' 0 -a -n 1 ']' ++ break ++ cat /tmp/tmp.ZEQSGSmhW1 ++ cat /tmp/tmp.ymAwXxF6iS ++ rm /tmp/tmp.ZEQSGSmhW1 /tmp/tmp.ymAwXxF6iS ++ return 0 + [[ ready == \r\e\a\d\y ]] + echo .OK .OK + echo + set -o xtrace + '[' -n '' ']' + compare_mongo_cmd find myApp:myPass@cluster2-rs0-0.cluster2-rs0.pitr-14308 -4th + local command=find + local uri=myApp:myPass@cluster2-rs0-0.cluster2-rs0.pitr-14308 + local postfix=-4th + local suffix= + local database=myApp + local collection=test + local sort= + local tls=false + local replicaset= + local 'full_command=db.test.find()' + [[ -n '' ]] + log 'running db.test.find() in myApp' + set +o xtrace [2026-02-19T15:18:49+0000] running db.test.find() in myApp + [[ false == \t\r\u\e ]] + mongo_command=run_mongo + run_mongo 'use myApp\n db.test.find()' myApp:myPass@cluster2-rs0-0.cluster2-rs0.pitr-14308 mongodb '' '' + local 'command=use myApp\n db.test.find()' + local uri=myApp:myPass@cluster2-rs0-0.cluster2-rs0.pitr-14308 + local driver=mongodb + local suffix=.svc.cluster.local + local mongo_flag= + local replica_set=rs0 + [[ myApp:myPass@cluster2-rs0-0.cluster2-rs0.pitr-14308 == *cfg* ]] + grep -E -v 'I NETWORK|W NETWORK|F NETWORK|Error saving history file|Percona Server for MongoDB|connecting to:|Unable to reach primary for set|Implicit session:|versions do not match|Error saving history file:|Started a new thread for the timer service' + /usr/sbin/sed -re 's/ObjectId\("[0-9a-f]+"\)//; s/-[0-9]+.svc/-xxx.svc/' ++ kubectl_bin get pods --selector=name=psmdb-client -o 'jsonpath={.items[].metadata.name}' +++ mktemp ++ local LAST_OUT=/tmp/tmp.nm0djxKJZh +++ mktemp ++ local LAST_ERR=/tmp/tmp.SpRYGzPQfz ++ local exit_status=0 ++ local timeout=4 +++ seq 0 2 ++ for i in $(seq 0 2) ++ set +e ++ kubectl get pods --selector=name=psmdb-client -o 'jsonpath={.items[].metadata.name}' ++ exit_status=0 ++ set -e ++ '[' 0 '!=' 0 -a -n 1 ']' ++ break ++ cat /tmp/tmp.nm0djxKJZh ++ cat /tmp/tmp.SpRYGzPQfz ++ rm /tmp/tmp.nm0djxKJZh /tmp/tmp.SpRYGzPQfz ++ return 0 + local client_container=psmdb-client-86cb5d8484-r5b58 + kubectl_bin exec psmdb-client-86cb5d8484-r5b58 -- bash -c 'printf '\''use myApp\n db.test.find()\n'\'' | mongo mongodb://myApp:myPass@cluster2-rs0-0.cluster2-rs0.pitr-14308.svc.cluster.local/admin?ssl=false\&replicaSet=rs0 ' ++ mktemp + local LAST_OUT=/tmp/tmp.r22dwxuIQa ++ mktemp + local LAST_ERR=/tmp/tmp.4xfvu4rCu3 + local exit_status=0 + local timeout=4 ++ seq 0 2 + for i in $(seq 0 2) + set +e + kubectl exec psmdb-client-86cb5d8484-r5b58 -- bash -c 'printf '\''use myApp\n db.test.find()\n'\'' | mongo mongodb://myApp:myPass@cluster2-rs0-0.cluster2-rs0.pitr-14308.svc.cluster.local/admin?ssl=false\&replicaSet=rs0 ' + exit_status=0 + set -e + '[' 0 '!=' 0 -a -n 1 ']' + break + cat /tmp/tmp.r22dwxuIQa + cat /tmp/tmp.4xfvu4rCu3 + rm /tmp/tmp.r22dwxuIQa /tmp/tmp.4xfvu4rCu3 + return 0 + [[ 0 -eq 0 ]] + diff -u /mnt/jenkins/workspace/cloud-psmdb-operator_PR-2247/e2e-tests/pitr/compare/find-4th.json /tmp/tmp.TVPhAYVAUk/find-4th + compare_mongo_cmd find myApp:myPass@cluster2-rs0-1.cluster2-rs0.pitr-14308 -4th + local command=find + local uri=myApp:myPass@cluster2-rs0-1.cluster2-rs0.pitr-14308 + local postfix=-4th + local suffix= + local database=myApp + local collection=test + local sort= + local tls=false + local replicaset= + local 'full_command=db.test.find()' + [[ -n '' ]] + log 'running db.test.find() in myApp' + set +o xtrace [2026-02-19T15:18:52+0000] running db.test.find() in myApp + [[ false == \t\r\u\e ]] + mongo_command=run_mongo + run_mongo 'use myApp\n db.test.find()' myApp:myPass@cluster2-rs0-1.cluster2-rs0.pitr-14308 mongodb '' '' + local 'command=use myApp\n db.test.find()' + local uri=myApp:myPass@cluster2-rs0-1.cluster2-rs0.pitr-14308 + local driver=mongodb + local suffix=.svc.cluster.local + local mongo_flag= + local replica_set=rs0 + [[ myApp:myPass@cluster2-rs0-1.cluster2-rs0.pitr-14308 == *cfg* ]] + grep -E -v 'I NETWORK|W NETWORK|F NETWORK|Error saving history file|Percona Server for MongoDB|connecting to:|Unable to reach primary for set|Implicit session:|versions do not match|Error saving history file:|Started a new thread for the timer service' + /usr/sbin/sed -re 's/ObjectId\("[0-9a-f]+"\)//; s/-[0-9]+.svc/-xxx.svc/' ++ kubectl_bin get pods --selector=name=psmdb-client -o 'jsonpath={.items[].metadata.name}' +++ mktemp ++ local LAST_OUT=/tmp/tmp.w1Eea1Tb5b +++ mktemp ++ local LAST_ERR=/tmp/tmp.MkoPMFB58x ++ local exit_status=0 ++ local timeout=4 +++ seq 0 2 ++ for i in $(seq 0 2) ++ set +e ++ kubectl get pods --selector=name=psmdb-client -o 'jsonpath={.items[].metadata.name}' ++ exit_status=0 ++ set -e ++ '[' 0 '!=' 0 -a -n 1 ']' ++ break ++ cat /tmp/tmp.w1Eea1Tb5b ++ cat /tmp/tmp.MkoPMFB58x ++ rm /tmp/tmp.w1Eea1Tb5b /tmp/tmp.MkoPMFB58x ++ return 0 + local client_container=psmdb-client-86cb5d8484-r5b58 + kubectl_bin exec psmdb-client-86cb5d8484-r5b58 -- bash -c 'printf '\''use myApp\n db.test.find()\n'\'' | mongo mongodb://myApp:myPass@cluster2-rs0-1.cluster2-rs0.pitr-14308.svc.cluster.local/admin?ssl=false\&replicaSet=rs0 ' ++ mktemp + local LAST_OUT=/tmp/tmp.yjdutLsSJv ++ mktemp + local LAST_ERR=/tmp/tmp.2761v1icTc + local exit_status=0 + local timeout=4 ++ seq 0 2 + for i in $(seq 0 2) + set +e + kubectl exec psmdb-client-86cb5d8484-r5b58 -- bash -c 'printf '\''use myApp\n db.test.find()\n'\'' | mongo mongodb://myApp:myPass@cluster2-rs0-1.cluster2-rs0.pitr-14308.svc.cluster.local/admin?ssl=false\&replicaSet=rs0 ' + exit_status=0 + set -e + '[' 0 '!=' 0 -a -n 1 ']' + break + cat /tmp/tmp.yjdutLsSJv + cat /tmp/tmp.2761v1icTc + rm /tmp/tmp.yjdutLsSJv /tmp/tmp.2761v1icTc + return 0 + [[ 0 -eq 0 ]] + diff -u /mnt/jenkins/workspace/cloud-psmdb-operator_PR-2247/e2e-tests/pitr/compare/find-4th.json /tmp/tmp.TVPhAYVAUk/find-4th + compare_mongo_cmd find myApp:myPass@cluster2-rs0-2.cluster2-rs0.pitr-14308 -4th + local command=find + local uri=myApp:myPass@cluster2-rs0-2.cluster2-rs0.pitr-14308 + local postfix=-4th + local suffix= + local database=myApp + local collection=test + local sort= + local tls=false + local replicaset= + local 'full_command=db.test.find()' + [[ -n '' ]] + log 'running db.test.find() in myApp' + set +o xtrace [2026-02-19T15:18:54+0000] running db.test.find() in myApp + [[ false == \t\r\u\e ]] + mongo_command=run_mongo + run_mongo 'use myApp\n db.test.find()' myApp:myPass@cluster2-rs0-2.cluster2-rs0.pitr-14308 mongodb '' '' + local 'command=use myApp\n db.test.find()' + local uri=myApp:myPass@cluster2-rs0-2.cluster2-rs0.pitr-14308 + local driver=mongodb + local suffix=.svc.cluster.local + local mongo_flag= + local replica_set=rs0 + [[ myApp:myPass@cluster2-rs0-2.cluster2-rs0.pitr-14308 == *cfg* ]] + grep -E -v 'I NETWORK|W NETWORK|F NETWORK|Error saving history file|Percona Server for MongoDB|connecting to:|Unable to reach primary for set|Implicit session:|versions do not match|Error saving history file:|Started a new thread for the timer service' ++ kubectl_bin get pods --selector=name=psmdb-client -o 'jsonpath={.items[].metadata.name}' + /usr/sbin/sed -re 's/ObjectId\("[0-9a-f]+"\)//; s/-[0-9]+.svc/-xxx.svc/' +++ mktemp ++ local LAST_OUT=/tmp/tmp.6xRgqMjNhL +++ mktemp ++ local LAST_ERR=/tmp/tmp.b0aAFnXpfz ++ local exit_status=0 ++ local timeout=4 +++ seq 0 2 ++ for i in $(seq 0 2) ++ set +e ++ kubectl get pods --selector=name=psmdb-client -o 'jsonpath={.items[].metadata.name}' ++ exit_status=0 ++ set -e ++ '[' 0 '!=' 0 -a -n 1 ']' ++ break ++ cat /tmp/tmp.6xRgqMjNhL ++ cat /tmp/tmp.b0aAFnXpfz ++ rm /tmp/tmp.6xRgqMjNhL /tmp/tmp.b0aAFnXpfz ++ return 0 + local client_container=psmdb-client-86cb5d8484-r5b58 + kubectl_bin exec psmdb-client-86cb5d8484-r5b58 -- bash -c 'printf '\''use myApp\n db.test.find()\n'\'' | mongo mongodb://myApp:myPass@cluster2-rs0-2.cluster2-rs0.pitr-14308.svc.cluster.local/admin?ssl=false\&replicaSet=rs0 ' ++ mktemp + local LAST_OUT=/tmp/tmp.LxywbbQus7 ++ mktemp + local LAST_ERR=/tmp/tmp.ykJzknFCiN + local exit_status=0 + local timeout=4 ++ seq 0 2 + for i in $(seq 0 2) + set +e + kubectl exec psmdb-client-86cb5d8484-r5b58 -- bash -c 'printf '\''use myApp\n db.test.find()\n'\'' | mongo mongodb://myApp:myPass@cluster2-rs0-2.cluster2-rs0.pitr-14308.svc.cluster.local/admin?ssl=false\&replicaSet=rs0 ' + exit_status=0 + set -e + '[' 0 '!=' 0 -a -n 1 ']' + break + cat /tmp/tmp.LxywbbQus7 + cat /tmp/tmp.ykJzknFCiN + rm /tmp/tmp.LxywbbQus7 /tmp/tmp.ykJzknFCiN + return 0 + [[ 0 -eq 0 ]] + diff -u /mnt/jenkins/workspace/cloud-psmdb-operator_PR-2247/e2e-tests/pitr/compare/find-4th.json /tmp/tmp.TVPhAYVAUk/find-4th + kubectl patch psmdb some-name --type=merge --patch '{"spec": {"backup": {"pitr": {"enabled": false}}}}' perconaservermongodb.psmdb.percona.com/some-name patched + kubectl patch psmdb cluster2 --type=merge --patch '{"spec": {"backup": {"pitr": {"enabled": false}}}}' perconaservermongodb.psmdb.percona.com/cluster2 patched + sleep 20 + kubectl_bin delete psmdb-backup --all ++ mktemp + local LAST_OUT=/tmp/tmp.Bj2V0I5R5e ++ mktemp + local LAST_ERR=/tmp/tmp.9tXUzHSMcM + local exit_status=0 + local timeout=4 ++ seq 0 2 + for i in $(seq 0 2) + set +e + kubectl delete psmdb-backup --all + exit_status=0 + set -e + '[' 0 '!=' 0 -a -n 1 ']' + break + cat /tmp/tmp.Bj2V0I5R5e perconaservermongodbbackup.psmdb.percona.com "backup-minio-0" deleted from pitr-14308 namespace perconaservermongodbbackup.psmdb.percona.com "backup-minio-1" deleted from pitr-14308 namespace perconaservermongodbbackup.psmdb.percona.com "backup-minio-2" deleted from pitr-14308 namespace perconaservermongodbbackup.psmdb.percona.com "backup-minio-3" deleted from pitr-14308 namespace perconaservermongodbbackup.psmdb.percona.com "backup-minio2-0" deleted from pitr-14308 namespace + cat /tmp/tmp.9tXUzHSMcM + rm /tmp/tmp.Bj2V0I5R5e /tmp/tmp.9tXUzHSMcM + return 0 + destroy pitr-14308 + local namespace=pitr-14308 + local ignore_logs=true + [[ 0 == 1 ]] + desc 'destroy cluster/operator and all other resources' + set +o xtrace ----------------------------------------------------------------------------------- destroy cluster/operator and all other resources ----------------------------------------------------------------------------------- + '[' true == false ']' + delete_backups + desc 'Delete psmdb-backup' + set +o xtrace ----------------------------------------------------------------------------------- Delete psmdb-backup ----------------------------------------------------------------------------------- ++ kubectl_bin get psmdb-backup --no-headers ++ wc -l +++ mktemp ++ local LAST_OUT=/tmp/tmp.Rk5jYGw6wM +++ mktemp ++ local LAST_ERR=/tmp/tmp.7TU4mwtjMc ++ local exit_status=0 ++ local timeout=4 +++ seq 0 2 ++ for i in $(seq 0 2) ++ set +e ++ kubectl get psmdb-backup --no-headers ++ exit_status=0 ++ set -e ++ '[' 0 '!=' 0 -a -n 1 ']' ++ break ++ cat /tmp/tmp.Rk5jYGw6wM ++ cat /tmp/tmp.7TU4mwtjMc No resources found in pitr-14308 namespace. ++ rm /tmp/tmp.Rk5jYGw6wM /tmp/tmp.7TU4mwtjMc ++ return 0 + '[' 0 '!=' 0 ']' + delete_crd + desc 'get and delete old CRDs and RBAC' + set +o xtrace ----------------------------------------------------------------------------------- get and delete old CRDs and RBAC ----------------------------------------------------------------------------------- + kubectl_bin delete -f /mnt/jenkins/workspace/cloud-psmdb-operator_PR-2247/deploy/crd.yaml --ignore-not-found --wait=false ++ mktemp + local LAST_OUT=/tmp/tmp.Z1yntk4SdP ++ mktemp + local LAST_ERR=/tmp/tmp.5JkGYBD4ee + local exit_status=0 + local timeout=4 ++ seq 0 2 + for i in $(seq 0 2) + set +e + kubectl delete -f /mnt/jenkins/workspace/cloud-psmdb-operator_PR-2247/deploy/crd.yaml --ignore-not-found --wait=false + exit_status=0 + set -e + '[' 0 '!=' 0 -a -n 1 ']' + break + cat /tmp/tmp.Z1yntk4SdP customresourcedefinition.apiextensions.k8s.io "perconaservermongodbbackups.psmdb.percona.com" deleted customresourcedefinition.apiextensions.k8s.io "perconaservermongodbrestores.psmdb.percona.com" deleted customresourcedefinition.apiextensions.k8s.io "perconaservermongodbs.psmdb.percona.com" deleted + cat /tmp/tmp.5JkGYBD4ee + rm /tmp/tmp.Z1yntk4SdP /tmp/tmp.5JkGYBD4ee + return 0 ++ yq eval .metadata.name /mnt/jenkins/workspace/cloud-psmdb-operator_PR-2247/deploy/crd.yaml ++ grep -v '\-\-\-' grep: warning: stray \ before - grep: warning: stray \ before - + for crd_name in $(yq eval '.metadata.name' "${src_dir}/deploy/crd.yaml" | grep -v '\-\-\-') + kubectl get perconaservermongodbbackups.psmdb.percona.com --all-namespaces -o wide + grep -v NAMESPACE + xargs -L 1 sh -xc 'kubectl patch perconaservermongodbbackups.psmdb.percona.com -n $0 $1 --type=merge -p "{\"metadata\":{\"finalizers\":[]}}"' error: the server doesn't have a resource type "perconaservermongodbbackups" + kubectl patch perconaservermongodbbackups.psmdb.percona.com -n sh --type=merge -p '{"metadata":{"finalizers":[]}}' error: the server doesn't have a resource type "perconaservermongodbbackups" + : + kubectl_bin wait --for=delete crd perconaservermongodbbackups.psmdb.percona.com ++ mktemp + local LAST_OUT=/tmp/tmp.Tjgr76RzaH ++ mktemp + local LAST_ERR=/tmp/tmp.ANZYphHTSu + local exit_status=0 + local timeout=4 ++ seq 0 2 + for i in $(seq 0 2) + set +e + kubectl wait --for=delete crd perconaservermongodbbackups.psmdb.percona.com + exit_status=0 + set -e + '[' 0 '!=' 0 -a -n 1 ']' + break + cat /tmp/tmp.Tjgr76RzaH + cat /tmp/tmp.ANZYphHTSu + rm /tmp/tmp.Tjgr76RzaH /tmp/tmp.ANZYphHTSu + return 0 + for crd_name in $(yq eval '.metadata.name' "${src_dir}/deploy/crd.yaml" | grep -v '\-\-\-') + kubectl get perconaservermongodbrestores.psmdb.percona.com --all-namespaces -o wide + grep -v NAMESPACE + xargs -L 1 sh -xc 'kubectl patch perconaservermongodbrestores.psmdb.percona.com -n $0 $1 --type=merge -p "{\"metadata\":{\"finalizers\":[]}}"' error: the server doesn't have a resource type "perconaservermongodbrestores" + kubectl patch perconaservermongodbrestores.psmdb.percona.com -n sh --type=merge -p '{"metadata":{"finalizers":[]}}' error: the server doesn't have a resource type "perconaservermongodbrestores" + : + kubectl_bin wait --for=delete crd perconaservermongodbrestores.psmdb.percona.com ++ mktemp + local LAST_OUT=/tmp/tmp.ylRCAexEZp ++ mktemp + local LAST_ERR=/tmp/tmp.Nz4s40xKWP + local exit_status=0 + local timeout=4 ++ seq 0 2 + for i in $(seq 0 2) + set +e + kubectl wait --for=delete crd perconaservermongodbrestores.psmdb.percona.com + exit_status=0 + set -e + '[' 0 '!=' 0 -a -n 1 ']' + break + cat /tmp/tmp.ylRCAexEZp + cat /tmp/tmp.Nz4s40xKWP + rm /tmp/tmp.ylRCAexEZp /tmp/tmp.Nz4s40xKWP + return 0 + for crd_name in $(yq eval '.metadata.name' "${src_dir}/deploy/crd.yaml" | grep -v '\-\-\-') + kubectl get perconaservermongodbs.psmdb.percona.com --all-namespaces -o wide + grep -v NAMESPACE + xargs -L 1 sh -xc 'kubectl patch perconaservermongodbs.psmdb.percona.com -n $0 $1 --type=merge -p "{\"metadata\":{\"finalizers\":[]}}"' error: the server doesn't have a resource type "perconaservermongodbs" + kubectl patch perconaservermongodbs.psmdb.percona.com -n sh --type=merge -p '{"metadata":{"finalizers":[]}}' error: the server doesn't have a resource type "perconaservermongodbs" + : + kubectl_bin wait --for=delete crd perconaservermongodbs.psmdb.percona.com ++ mktemp + local LAST_OUT=/tmp/tmp.ltpHfiKepa ++ mktemp + local LAST_ERR=/tmp/tmp.S4fjL0tlJx + local exit_status=0 + local timeout=4 ++ seq 0 2 + for i in $(seq 0 2) + set +e + kubectl wait --for=delete crd perconaservermongodbs.psmdb.percona.com + exit_status=0 + set -e + '[' 0 '!=' 0 -a -n 1 ']' + break + cat /tmp/tmp.ltpHfiKepa + cat /tmp/tmp.S4fjL0tlJx + rm /tmp/tmp.ltpHfiKepa /tmp/tmp.S4fjL0tlJx + return 0 + local rbac_yaml=rbac.yaml + '[' -n psmdb-operator ']' + rbac_yaml=cw-rbac.yaml + kubectl_bin delete -f /mnt/jenkins/workspace/cloud-psmdb-operator_PR-2247/deploy/cw-rbac.yaml --ignore-not-found ++ mktemp + local LAST_OUT=/tmp/tmp.VtlFJ4o0Oz ++ mktemp + local LAST_ERR=/tmp/tmp.IUzsFQAFz4 + local exit_status=0 + local timeout=4 ++ seq 0 2 + for i in $(seq 0 2) + set +e + kubectl delete -f /mnt/jenkins/workspace/cloud-psmdb-operator_PR-2247/deploy/cw-rbac.yaml --ignore-not-found + exit_status=0 + set -e + '[' 0 '!=' 0 -a -n 1 ']' + break + cat /tmp/tmp.VtlFJ4o0Oz clusterrole.rbac.authorization.k8s.io "percona-server-mongodb-operator" deleted clusterrolebinding.rbac.authorization.k8s.io "service-account-percona-server-mongodb-operator" deleted + cat /tmp/tmp.IUzsFQAFz4 + rm /tmp/tmp.VtlFJ4o0Oz /tmp/tmp.IUzsFQAFz4 + return 0 + destroy_cert_manager + kubectl_bin delete -f https://github.com/cert-manager/cert-manager/releases/download/v1.19.1/cert-manager.yaml ++ mktemp + local LAST_OUT=/tmp/tmp.QwRsmyHD9F ++ mktemp + local LAST_ERR=/tmp/tmp.VH1wKo4ite + local exit_status=0 + local timeout=4 ++ seq 0 2 + for i in $(seq 0 2) + set +e + kubectl delete -f https://github.com/cert-manager/cert-manager/releases/download/v1.19.1/cert-manager.yaml + exit_status=1 + set -e + '[' 1 '!=' 0 -a -n 1 ']' + cat /tmp/tmp.QwRsmyHD9F + cat /tmp/tmp.VH1wKo4ite Error from server (NotFound): error when deleting "https://github.com/cert-manager/cert-manager/releases/download/v1.19.1/cert-manager.yaml": namespaces "cert-manager" not found Error from server (NotFound): error when deleting "https://github.com/cert-manager/cert-manager/releases/download/v1.19.1/cert-manager.yaml": customresourcedefinitions.apiextensions.k8s.io "challenges.acme.cert-manager.io" not found Error from server (NotFound): error when deleting "https://github.com/cert-manager/cert-manager/releases/download/v1.19.1/cert-manager.yaml": customresourcedefinitions.apiextensions.k8s.io "orders.acme.cert-manager.io" not found Error from server (NotFound): error when deleting "https://github.com/cert-manager/cert-manager/releases/download/v1.19.1/cert-manager.yaml": customresourcedefinitions.apiextensions.k8s.io "certificaterequests.cert-manager.io" not found Error from server (NotFound): error when deleting "https://github.com/cert-manager/cert-manager/releases/download/v1.19.1/cert-manager.yaml": customresourcedefinitions.apiextensions.k8s.io "certificates.cert-manager.io" not found Error from server (NotFound): error when deleting "https://github.com/cert-manager/cert-manager/releases/download/v1.19.1/cert-manager.yaml": customresourcedefinitions.apiextensions.k8s.io "clusterissuers.cert-manager.io" not found Error from server (NotFound): error when deleting "https://github.com/cert-manager/cert-manager/releases/download/v1.19.1/cert-manager.yaml": customresourcedefinitions.apiextensions.k8s.io "issuers.cert-manager.io" not found Error from server (NotFound): error when deleting "https://github.com/cert-manager/cert-manager/releases/download/v1.19.1/cert-manager.yaml": serviceaccounts "cert-manager-cainjector" not found Error from server (NotFound): error when deleting "https://github.com/cert-manager/cert-manager/releases/download/v1.19.1/cert-manager.yaml": serviceaccounts "cert-manager" not found Error from server (NotFound): error when deleting "https://github.com/cert-manager/cert-manager/releases/download/v1.19.1/cert-manager.yaml": serviceaccounts "cert-manager-webhook" not found Error from server (NotFound): error when deleting "https://github.com/cert-manager/cert-manager/releases/download/v1.19.1/cert-manager.yaml": clusterroles.rbac.authorization.k8s.io "cert-manager-cainjector" not found Error from server (NotFound): error when deleting "https://github.com/cert-manager/cert-manager/releases/download/v1.19.1/cert-manager.yaml": clusterroles.rbac.authorization.k8s.io "cert-manager-controller-issuers" not found Error from server (NotFound): error when deleting "https://github.com/cert-manager/cert-manager/releases/download/v1.19.1/cert-manager.yaml": clusterroles.rbac.authorization.k8s.io "cert-manager-controller-clusterissuers" not found Error from server (NotFound): error when deleting "https://github.com/cert-manager/cert-manager/releases/download/v1.19.1/cert-manager.yaml": clusterroles.rbac.authorization.k8s.io "cert-manager-controller-certificates" not found Error from server (NotFound): error when deleting "https://github.com/cert-manager/cert-manager/releases/download/v1.19.1/cert-manager.yaml": clusterroles.rbac.authorization.k8s.io "cert-manager-controller-orders" not found Error from server (NotFound): error when deleting "https://github.com/cert-manager/cert-manager/releases/download/v1.19.1/cert-manager.yaml": clusterroles.rbac.authorization.k8s.io "cert-manager-controller-challenges" not found Error from server (NotFound): error when deleting "https://github.com/cert-manager/cert-manager/releases/download/v1.19.1/cert-manager.yaml": clusterroles.rbac.authorization.k8s.io "cert-manager-controller-ingress-shim" not found Error from server (NotFound): error when deleting "https://github.com/cert-manager/cert-manager/releases/download/v1.19.1/cert-manager.yaml": clusterroles.rbac.authorization.k8s.io "cert-manager-cluster-view" not found Error from server (NotFound): error when deleting "https://github.com/cert-manager/cert-manager/releases/download/v1.19.1/cert-manager.yaml": clusterroles.rbac.authorization.k8s.io "cert-manager-view" not found Error from server (NotFound): error when deleting "https://github.com/cert-manager/cert-manager/releases/download/v1.19.1/cert-manager.yaml": clusterroles.rbac.authorization.k8s.io "cert-manager-edit" not found Error from server (NotFound): error when deleting "https://github.com/cert-manager/cert-manager/releases/download/v1.19.1/cert-manager.yaml": clusterroles.rbac.authorization.k8s.io "cert-manager-controller-approve:cert-manager-io" not found Error from server (NotFound): error when deleting "https://github.com/cert-manager/cert-manager/releases/download/v1.19.1/cert-manager.yaml": clusterroles.rbac.authorization.k8s.io "cert-manager-controller-certificatesigningrequests" not found Error from server (NotFound): error when deleting "https://github.com/cert-manager/cert-manager/releases/download/v1.19.1/cert-manager.yaml": clusterroles.rbac.authorization.k8s.io "cert-manager-webhook:subjectaccessreviews" not found Error from server (NotFound): error when deleting "https://github.com/cert-manager/cert-manager/releases/download/v1.19.1/cert-manager.yaml": clusterrolebindings.rbac.authorization.k8s.io "cert-manager-cainjector" not found Error from server (NotFound): error when deleting "https://github.com/cert-manager/cert-manager/releases/download/v1.19.1/cert-manager.yaml": clusterrolebindings.rbac.authorization.k8s.io "cert-manager-controller-issuers" not found Error from server (NotFound): error when deleting "https://github.com/cert-manager/cert-manager/releases/download/v1.19.1/cert-manager.yaml": clusterrolebindings.rbac.authorization.k8s.io "cert-manager-controller-clusterissuers" not found Error from server (NotFound): error when deleting "https://github.com/cert-manager/cert-manager/releases/download/v1.19.1/cert-manager.yaml": clusterrolebindings.rbac.authorization.k8s.io "cert-manager-controller-certificates" not found Error from server (NotFound): error when deleting "https://github.com/cert-manager/cert-manager/releases/download/v1.19.1/cert-manager.yaml": clusterrolebindings.rbac.authorization.k8s.io "cert-manager-controller-orders" not found Error from server (NotFound): error when deleting "https://github.com/cert-manager/cert-manager/releases/download/v1.19.1/cert-manager.yaml": clusterrolebindings.rbac.authorization.k8s.io "cert-manager-controller-challenges" not found Error from server (NotFound): error when deleting "https://github.com/cert-manager/cert-manager/releases/download/v1.19.1/cert-manager.yaml": clusterrolebindings.rbac.authorization.k8s.io "cert-manager-controller-ingress-shim" not found Error from server (NotFound): error when deleting "https://github.com/cert-manager/cert-manager/releases/download/v1.19.1/cert-manager.yaml": clusterrolebindings.rbac.authorization.k8s.io "cert-manager-controller-approve:cert-manager-io" not found Error from server (NotFound): error when deleting "https://github.com/cert-manager/cert-manager/releases/download/v1.19.1/cert-manager.yaml": clusterrolebindings.rbac.authorization.k8s.io "cert-manager-controller-certificatesigningrequests" not found Error from server (NotFound): error when deleting "https://github.com/cert-manager/cert-manager/releases/download/v1.19.1/cert-manager.yaml": clusterrolebindings.rbac.authorization.k8s.io "cert-manager-webhook:subjectaccessreviews" not found Error from server (NotFound): error when deleting "https://github.com/cert-manager/cert-manager/releases/download/v1.19.1/cert-manager.yaml": roles.rbac.authorization.k8s.io "cert-manager-cainjector:leaderelection" not found Error from server (NotFound): error when deleting "https://github.com/cert-manager/cert-manager/releases/download/v1.19.1/cert-manager.yaml": roles.rbac.authorization.k8s.io "cert-manager:leaderelection" not found Error from server (NotFound): error when deleting "https://github.com/cert-manager/cert-manager/releases/download/v1.19.1/cert-manager.yaml": roles.rbac.authorization.k8s.io "cert-manager-tokenrequest" not found Error from server (NotFound): error when deleting "https://github.com/cert-manager/cert-manager/releases/download/v1.19.1/cert-manager.yaml": roles.rbac.authorization.k8s.io "cert-manager-webhook:dynamic-serving" not found Error from server (NotFound): error when deleting "https://github.com/cert-manager/cert-manager/releases/download/v1.19.1/cert-manager.yaml": rolebindings.rbac.authorization.k8s.io "cert-manager-cainjector:leaderelection" not found Error from server (NotFound): error when deleting "https://github.com/cert-manager/cert-manager/releases/download/v1.19.1/cert-manager.yaml": rolebindings.rbac.authorization.k8s.io "cert-manager:leaderelection" not found Error from server (NotFound): error when deleting "https://github.com/cert-manager/cert-manager/releases/download/v1.19.1/cert-manager.yaml": rolebindings.rbac.authorization.k8s.io "cert-manager-tokenrequest" not found Error from server (NotFound): error when deleting "https://github.com/cert-manager/cert-manager/releases/download/v1.19.1/cert-manager.yaml": rolebindings.rbac.authorization.k8s.io "cert-manager-webhook:dynamic-serving" not found Error from server (NotFound): error when deleting "https://github.com/cert-manager/cert-manager/releases/download/v1.19.1/cert-manager.yaml": services "cert-manager-cainjector" not found Error from server (NotFound): error when deleting "https://github.com/cert-manager/cert-manager/releases/download/v1.19.1/cert-manager.yaml": services "cert-manager" not found Error from server (NotFound): error when deleting "https://github.com/cert-manager/cert-manager/releases/download/v1.19.1/cert-manager.yaml": services "cert-manager-webhook" not found Error from server (NotFound): error when deleting "https://github.com/cert-manager/cert-manager/releases/download/v1.19.1/cert-manager.yaml": deployments.apps "cert-manager-cainjector" not found Error from server (NotFound): error when deleting "https://github.com/cert-manager/cert-manager/releases/download/v1.19.1/cert-manager.yaml": deployments.apps "cert-manager" not found Error from server (NotFound): error when deleting "https://github.com/cert-manager/cert-manager/releases/download/v1.19.1/cert-manager.yaml": deployments.apps "cert-manager-webhook" not found Error from server (NotFound): error when deleting "https://github.com/cert-manager/cert-manager/releases/download/v1.19.1/cert-manager.yaml": mutatingwebhookconfigurations.admissionregistration.k8s.io "cert-manager-webhook" not found Error from server (NotFound): error when deleting "https://github.com/cert-manager/cert-manager/releases/download/v1.19.1/cert-manager.yaml": validatingwebhookconfigurations.admissionregistration.k8s.io "cert-manager-webhook" not found + sleep 0 + for i in $(seq 0 2) + set +e + kubectl delete -f https://github.com/cert-manager/cert-manager/releases/download/v1.19.1/cert-manager.yaml + exit_status=1 + set -e + '[' 1 '!=' 0 -a -n 1 ']' + cat /tmp/tmp.QwRsmyHD9F + cat /tmp/tmp.VH1wKo4ite Error from server (NotFound): error when deleting "https://github.com/cert-manager/cert-manager/releases/download/v1.19.1/cert-manager.yaml": namespaces "cert-manager" not found Error from server (NotFound): error when deleting "https://github.com/cert-manager/cert-manager/releases/download/v1.19.1/cert-manager.yaml": customresourcedefinitions.apiextensions.k8s.io "challenges.acme.cert-manager.io" not found Error from server (NotFound): error when deleting "https://github.com/cert-manager/cert-manager/releases/download/v1.19.1/cert-manager.yaml": customresourcedefinitions.apiextensions.k8s.io "orders.acme.cert-manager.io" not found Error from server (NotFound): error when deleting "https://github.com/cert-manager/cert-manager/releases/download/v1.19.1/cert-manager.yaml": customresourcedefinitions.apiextensions.k8s.io "certificaterequests.cert-manager.io" not found Error from server (NotFound): error when deleting "https://github.com/cert-manager/cert-manager/releases/download/v1.19.1/cert-manager.yaml": customresourcedefinitions.apiextensions.k8s.io "certificates.cert-manager.io" not found Error from server (NotFound): error when deleting "https://github.com/cert-manager/cert-manager/releases/download/v1.19.1/cert-manager.yaml": customresourcedefinitions.apiextensions.k8s.io "clusterissuers.cert-manager.io" not found Error from server (NotFound): error when deleting "https://github.com/cert-manager/cert-manager/releases/download/v1.19.1/cert-manager.yaml": customresourcedefinitions.apiextensions.k8s.io "issuers.cert-manager.io" not found Error from server (NotFound): error when deleting "https://github.com/cert-manager/cert-manager/releases/download/v1.19.1/cert-manager.yaml": serviceaccounts "cert-manager-cainjector" not found Error from server (NotFound): error when deleting "https://github.com/cert-manager/cert-manager/releases/download/v1.19.1/cert-manager.yaml": serviceaccounts "cert-manager" not found Error from server (NotFound): error when deleting "https://github.com/cert-manager/cert-manager/releases/download/v1.19.1/cert-manager.yaml": serviceaccounts "cert-manager-webhook" not found Error from server (NotFound): error when deleting "https://github.com/cert-manager/cert-manager/releases/download/v1.19.1/cert-manager.yaml": clusterroles.rbac.authorization.k8s.io "cert-manager-cainjector" not found Error from server (NotFound): error when deleting "https://github.com/cert-manager/cert-manager/releases/download/v1.19.1/cert-manager.yaml": clusterroles.rbac.authorization.k8s.io "cert-manager-controller-issuers" not found Error from server (NotFound): error when deleting "https://github.com/cert-manager/cert-manager/releases/download/v1.19.1/cert-manager.yaml": clusterroles.rbac.authorization.k8s.io "cert-manager-controller-clusterissuers" not found Error from server (NotFound): error when deleting "https://github.com/cert-manager/cert-manager/releases/download/v1.19.1/cert-manager.yaml": clusterroles.rbac.authorization.k8s.io "cert-manager-controller-certificates" not found Error from server (NotFound): error when deleting "https://github.com/cert-manager/cert-manager/releases/download/v1.19.1/cert-manager.yaml": clusterroles.rbac.authorization.k8s.io "cert-manager-controller-orders" not found Error from server (NotFound): error when deleting "https://github.com/cert-manager/cert-manager/releases/download/v1.19.1/cert-manager.yaml": clusterroles.rbac.authorization.k8s.io "cert-manager-controller-challenges" not found Error from server (NotFound): error when deleting "https://github.com/cert-manager/cert-manager/releases/download/v1.19.1/cert-manager.yaml": clusterroles.rbac.authorization.k8s.io "cert-manager-controller-ingress-shim" not found Error from server (NotFound): error when deleting "https://github.com/cert-manager/cert-manager/releases/download/v1.19.1/cert-manager.yaml": clusterroles.rbac.authorization.k8s.io "cert-manager-cluster-view" not found Error from server (NotFound): error when deleting "https://github.com/cert-manager/cert-manager/releases/download/v1.19.1/cert-manager.yaml": clusterroles.rbac.authorization.k8s.io "cert-manager-view" not found Error from server (NotFound): error when deleting "https://github.com/cert-manager/cert-manager/releases/download/v1.19.1/cert-manager.yaml": clusterroles.rbac.authorization.k8s.io "cert-manager-edit" not found Error from server (NotFound): error when deleting "https://github.com/cert-manager/cert-manager/releases/download/v1.19.1/cert-manager.yaml": clusterroles.rbac.authorization.k8s.io "cert-manager-controller-approve:cert-manager-io" not found Error from server (NotFound): error when deleting "https://github.com/cert-manager/cert-manager/releases/download/v1.19.1/cert-manager.yaml": clusterroles.rbac.authorization.k8s.io "cert-manager-controller-certificatesigningrequests" not found Error from server (NotFound): error when deleting "https://github.com/cert-manager/cert-manager/releases/download/v1.19.1/cert-manager.yaml": clusterroles.rbac.authorization.k8s.io "cert-manager-webhook:subjectaccessreviews" not found Error from server (NotFound): error when deleting "https://github.com/cert-manager/cert-manager/releases/download/v1.19.1/cert-manager.yaml": clusterrolebindings.rbac.authorization.k8s.io "cert-manager-cainjector" not found Error from server (NotFound): error when deleting "https://github.com/cert-manager/cert-manager/releases/download/v1.19.1/cert-manager.yaml": clusterrolebindings.rbac.authorization.k8s.io "cert-manager-controller-issuers" not found Error from server (NotFound): error when deleting "https://github.com/cert-manager/cert-manager/releases/download/v1.19.1/cert-manager.yaml": clusterrolebindings.rbac.authorization.k8s.io "cert-manager-controller-clusterissuers" not found Error from server (NotFound): error when deleting "https://github.com/cert-manager/cert-manager/releases/download/v1.19.1/cert-manager.yaml": clusterrolebindings.rbac.authorization.k8s.io "cert-manager-controller-certificates" not found Error from server (NotFound): error when deleting "https://github.com/cert-manager/cert-manager/releases/download/v1.19.1/cert-manager.yaml": clusterrolebindings.rbac.authorization.k8s.io "cert-manager-controller-orders" not found Error from server (NotFound): error when deleting "https://github.com/cert-manager/cert-manager/releases/download/v1.19.1/cert-manager.yaml": clusterrolebindings.rbac.authorization.k8s.io "cert-manager-controller-challenges" not found Error from server (NotFound): error when deleting "https://github.com/cert-manager/cert-manager/releases/download/v1.19.1/cert-manager.yaml": clusterrolebindings.rbac.authorization.k8s.io "cert-manager-controller-ingress-shim" not found Error from server (NotFound): error when deleting "https://github.com/cert-manager/cert-manager/releases/download/v1.19.1/cert-manager.yaml": clusterrolebindings.rbac.authorization.k8s.io "cert-manager-controller-approve:cert-manager-io" not found Error from server (NotFound): error when deleting "https://github.com/cert-manager/cert-manager/releases/download/v1.19.1/cert-manager.yaml": clusterrolebindings.rbac.authorization.k8s.io "cert-manager-controller-certificatesigningrequests" not found Error from server (NotFound): error when deleting "https://github.com/cert-manager/cert-manager/releases/download/v1.19.1/cert-manager.yaml": clusterrolebindings.rbac.authorization.k8s.io "cert-manager-webhook:subjectaccessreviews" not found Error from server (NotFound): error when deleting "https://github.com/cert-manager/cert-manager/releases/download/v1.19.1/cert-manager.yaml": roles.rbac.authorization.k8s.io "cert-manager-cainjector:leaderelection" not found Error from server (NotFound): error when deleting "https://github.com/cert-manager/cert-manager/releases/download/v1.19.1/cert-manager.yaml": roles.rbac.authorization.k8s.io "cert-manager:leaderelection" not found Error from server (NotFound): error when deleting "https://github.com/cert-manager/cert-manager/releases/download/v1.19.1/cert-manager.yaml": roles.rbac.authorization.k8s.io "cert-manager-tokenrequest" not found Error from server (NotFound): error when deleting "https://github.com/cert-manager/cert-manager/releases/download/v1.19.1/cert-manager.yaml": roles.rbac.authorization.k8s.io "cert-manager-webhook:dynamic-serving" not found Error from server (NotFound): error when deleting "https://github.com/cert-manager/cert-manager/releases/download/v1.19.1/cert-manager.yaml": rolebindings.rbac.authorization.k8s.io "cert-manager-cainjector:leaderelection" not found Error from server (NotFound): error when deleting "https://github.com/cert-manager/cert-manager/releases/download/v1.19.1/cert-manager.yaml": rolebindings.rbac.authorization.k8s.io "cert-manager:leaderelection" not found Error from server (NotFound): error when deleting "https://github.com/cert-manager/cert-manager/releases/download/v1.19.1/cert-manager.yaml": rolebindings.rbac.authorization.k8s.io "cert-manager-tokenrequest" not found Error from server (NotFound): error when deleting "https://github.com/cert-manager/cert-manager/releases/download/v1.19.1/cert-manager.yaml": rolebindings.rbac.authorization.k8s.io "cert-manager-webhook:dynamic-serving" not found Error from server (NotFound): error when deleting "https://github.com/cert-manager/cert-manager/releases/download/v1.19.1/cert-manager.yaml": services "cert-manager-cainjector" not found Error from server (NotFound): error when deleting "https://github.com/cert-manager/cert-manager/releases/download/v1.19.1/cert-manager.yaml": services "cert-manager" not found Error from server (NotFound): error when deleting "https://github.com/cert-manager/cert-manager/releases/download/v1.19.1/cert-manager.yaml": services "cert-manager-webhook" not found Error from server (NotFound): error when deleting "https://github.com/cert-manager/cert-manager/releases/download/v1.19.1/cert-manager.yaml": deployments.apps "cert-manager-cainjector" not found Error from server (NotFound): error when deleting "https://github.com/cert-manager/cert-manager/releases/download/v1.19.1/cert-manager.yaml": deployments.apps "cert-manager" not found Error from server (NotFound): error when deleting "https://github.com/cert-manager/cert-manager/releases/download/v1.19.1/cert-manager.yaml": deployments.apps "cert-manager-webhook" not found Error from server (NotFound): error when deleting "https://github.com/cert-manager/cert-manager/releases/download/v1.19.1/cert-manager.yaml": mutatingwebhookconfigurations.admissionregistration.k8s.io "cert-manager-webhook" not found Error from server (NotFound): error when deleting "https://github.com/cert-manager/cert-manager/releases/download/v1.19.1/cert-manager.yaml": validatingwebhookconfigurations.admissionregistration.k8s.io "cert-manager-webhook" not found + sleep 4 + for i in $(seq 0 2) + set +e + kubectl delete -f https://github.com/cert-manager/cert-manager/releases/download/v1.19.1/cert-manager.yaml + exit_status=1 + set -e + '[' 1 '!=' 0 -a -n 1 ']' + cat /tmp/tmp.QwRsmyHD9F + cat /tmp/tmp.VH1wKo4ite Error from server (NotFound): error when deleting "https://github.com/cert-manager/cert-manager/releases/download/v1.19.1/cert-manager.yaml": namespaces "cert-manager" not found Error from server (NotFound): error when deleting "https://github.com/cert-manager/cert-manager/releases/download/v1.19.1/cert-manager.yaml": customresourcedefinitions.apiextensions.k8s.io "challenges.acme.cert-manager.io" not found Error from server (NotFound): error when deleting "https://github.com/cert-manager/cert-manager/releases/download/v1.19.1/cert-manager.yaml": customresourcedefinitions.apiextensions.k8s.io "orders.acme.cert-manager.io" not found Error from server (NotFound): error when deleting "https://github.com/cert-manager/cert-manager/releases/download/v1.19.1/cert-manager.yaml": customresourcedefinitions.apiextensions.k8s.io "certificaterequests.cert-manager.io" not found Error from server (NotFound): error when deleting "https://github.com/cert-manager/cert-manager/releases/download/v1.19.1/cert-manager.yaml": customresourcedefinitions.apiextensions.k8s.io "certificates.cert-manager.io" not found Error from server (NotFound): error when deleting "https://github.com/cert-manager/cert-manager/releases/download/v1.19.1/cert-manager.yaml": customresourcedefinitions.apiextensions.k8s.io "clusterissuers.cert-manager.io" not found Error from server (NotFound): error when deleting "https://github.com/cert-manager/cert-manager/releases/download/v1.19.1/cert-manager.yaml": customresourcedefinitions.apiextensions.k8s.io "issuers.cert-manager.io" not found Error from server (NotFound): error when deleting "https://github.com/cert-manager/cert-manager/releases/download/v1.19.1/cert-manager.yaml": serviceaccounts "cert-manager-cainjector" not found Error from server (NotFound): error when deleting "https://github.com/cert-manager/cert-manager/releases/download/v1.19.1/cert-manager.yaml": serviceaccounts "cert-manager" not found Error from server (NotFound): error when deleting "https://github.com/cert-manager/cert-manager/releases/download/v1.19.1/cert-manager.yaml": serviceaccounts "cert-manager-webhook" not found Error from server (NotFound): error when deleting "https://github.com/cert-manager/cert-manager/releases/download/v1.19.1/cert-manager.yaml": clusterroles.rbac.authorization.k8s.io "cert-manager-cainjector" not found Error from server (NotFound): error when deleting "https://github.com/cert-manager/cert-manager/releases/download/v1.19.1/cert-manager.yaml": clusterroles.rbac.authorization.k8s.io "cert-manager-controller-issuers" not found Error from server (NotFound): error when deleting "https://github.com/cert-manager/cert-manager/releases/download/v1.19.1/cert-manager.yaml": clusterroles.rbac.authorization.k8s.io "cert-manager-controller-clusterissuers" not found Error from server (NotFound): error when deleting "https://github.com/cert-manager/cert-manager/releases/download/v1.19.1/cert-manager.yaml": clusterroles.rbac.authorization.k8s.io "cert-manager-controller-certificates" not found Error from server (NotFound): error when deleting "https://github.com/cert-manager/cert-manager/releases/download/v1.19.1/cert-manager.yaml": clusterroles.rbac.authorization.k8s.io "cert-manager-controller-orders" not found Error from server (NotFound): error when deleting "https://github.com/cert-manager/cert-manager/releases/download/v1.19.1/cert-manager.yaml": clusterroles.rbac.authorization.k8s.io "cert-manager-controller-challenges" not found Error from server (NotFound): error when deleting "https://github.com/cert-manager/cert-manager/releases/download/v1.19.1/cert-manager.yaml": clusterroles.rbac.authorization.k8s.io "cert-manager-controller-ingress-shim" not found Error from server (NotFound): error when deleting "https://github.com/cert-manager/cert-manager/releases/download/v1.19.1/cert-manager.yaml": clusterroles.rbac.authorization.k8s.io "cert-manager-cluster-view" not found Error from server (NotFound): error when deleting "https://github.com/cert-manager/cert-manager/releases/download/v1.19.1/cert-manager.yaml": clusterroles.rbac.authorization.k8s.io "cert-manager-view" not found Error from server (NotFound): error when deleting "https://github.com/cert-manager/cert-manager/releases/download/v1.19.1/cert-manager.yaml": clusterroles.rbac.authorization.k8s.io "cert-manager-edit" not found Error from server (NotFound): error when deleting "https://github.com/cert-manager/cert-manager/releases/download/v1.19.1/cert-manager.yaml": clusterroles.rbac.authorization.k8s.io "cert-manager-controller-approve:cert-manager-io" not found Error from server (NotFound): error when deleting "https://github.com/cert-manager/cert-manager/releases/download/v1.19.1/cert-manager.yaml": clusterroles.rbac.authorization.k8s.io "cert-manager-controller-certificatesigningrequests" not found Error from server (NotFound): error when deleting "https://github.com/cert-manager/cert-manager/releases/download/v1.19.1/cert-manager.yaml": clusterroles.rbac.authorization.k8s.io "cert-manager-webhook:subjectaccessreviews" not found Error from server (NotFound): error when deleting "https://github.com/cert-manager/cert-manager/releases/download/v1.19.1/cert-manager.yaml": clusterrolebindings.rbac.authorization.k8s.io "cert-manager-cainjector" not found Error from server (NotFound): error when deleting "https://github.com/cert-manager/cert-manager/releases/download/v1.19.1/cert-manager.yaml": clusterrolebindings.rbac.authorization.k8s.io "cert-manager-controller-issuers" not found Error from server (NotFound): error when deleting "https://github.com/cert-manager/cert-manager/releases/download/v1.19.1/cert-manager.yaml": clusterrolebindings.rbac.authorization.k8s.io "cert-manager-controller-clusterissuers" not found Error from server (NotFound): error when deleting "https://github.com/cert-manager/cert-manager/releases/download/v1.19.1/cert-manager.yaml": clusterrolebindings.rbac.authorization.k8s.io "cert-manager-controller-certificates" not found Error from server (NotFound): error when deleting "https://github.com/cert-manager/cert-manager/releases/download/v1.19.1/cert-manager.yaml": clusterrolebindings.rbac.authorization.k8s.io "cert-manager-controller-orders" not found Error from server (NotFound): error when deleting "https://github.com/cert-manager/cert-manager/releases/download/v1.19.1/cert-manager.yaml": clusterrolebindings.rbac.authorization.k8s.io "cert-manager-controller-challenges" not found Error from server (NotFound): error when deleting "https://github.com/cert-manager/cert-manager/releases/download/v1.19.1/cert-manager.yaml": clusterrolebindings.rbac.authorization.k8s.io "cert-manager-controller-ingress-shim" not found Error from server (NotFound): error when deleting "https://github.com/cert-manager/cert-manager/releases/download/v1.19.1/cert-manager.yaml": clusterrolebindings.rbac.authorization.k8s.io "cert-manager-controller-approve:cert-manager-io" not found Error from server (NotFound): error when deleting "https://github.com/cert-manager/cert-manager/releases/download/v1.19.1/cert-manager.yaml": clusterrolebindings.rbac.authorization.k8s.io "cert-manager-controller-certificatesigningrequests" not found Error from server (NotFound): error when deleting "https://github.com/cert-manager/cert-manager/releases/download/v1.19.1/cert-manager.yaml": clusterrolebindings.rbac.authorization.k8s.io "cert-manager-webhook:subjectaccessreviews" not found Error from server (NotFound): error when deleting "https://github.com/cert-manager/cert-manager/releases/download/v1.19.1/cert-manager.yaml": roles.rbac.authorization.k8s.io "cert-manager-cainjector:leaderelection" not found Error from server (NotFound): error when deleting "https://github.com/cert-manager/cert-manager/releases/download/v1.19.1/cert-manager.yaml": roles.rbac.authorization.k8s.io "cert-manager:leaderelection" not found Error from server (NotFound): error when deleting "https://github.com/cert-manager/cert-manager/releases/download/v1.19.1/cert-manager.yaml": roles.rbac.authorization.k8s.io "cert-manager-tokenrequest" not found Error from server (NotFound): error when deleting "https://github.com/cert-manager/cert-manager/releases/download/v1.19.1/cert-manager.yaml": roles.rbac.authorization.k8s.io "cert-manager-webhook:dynamic-serving" not found Error from server (NotFound): error when deleting "https://github.com/cert-manager/cert-manager/releases/download/v1.19.1/cert-manager.yaml": rolebindings.rbac.authorization.k8s.io "cert-manager-cainjector:leaderelection" not found Error from server (NotFound): error when deleting "https://github.com/cert-manager/cert-manager/releases/download/v1.19.1/cert-manager.yaml": rolebindings.rbac.authorization.k8s.io "cert-manager:leaderelection" not found Error from server (NotFound): error when deleting "https://github.com/cert-manager/cert-manager/releases/download/v1.19.1/cert-manager.yaml": rolebindings.rbac.authorization.k8s.io "cert-manager-tokenrequest" not found Error from server (NotFound): error when deleting "https://github.com/cert-manager/cert-manager/releases/download/v1.19.1/cert-manager.yaml": rolebindings.rbac.authorization.k8s.io "cert-manager-webhook:dynamic-serving" not found Error from server (NotFound): error when deleting "https://github.com/cert-manager/cert-manager/releases/download/v1.19.1/cert-manager.yaml": services "cert-manager-cainjector" not found Error from server (NotFound): error when deleting "https://github.com/cert-manager/cert-manager/releases/download/v1.19.1/cert-manager.yaml": services "cert-manager" not found Error from server (NotFound): error when deleting "https://github.com/cert-manager/cert-manager/releases/download/v1.19.1/cert-manager.yaml": services "cert-manager-webhook" not found Error from server (NotFound): error when deleting "https://github.com/cert-manager/cert-manager/releases/download/v1.19.1/cert-manager.yaml": deployments.apps "cert-manager-cainjector" not found Error from server (NotFound): error when deleting "https://github.com/cert-manager/cert-manager/releases/download/v1.19.1/cert-manager.yaml": deployments.apps "cert-manager" not found Error from server (NotFound): error when deleting "https://github.com/cert-manager/cert-manager/releases/download/v1.19.1/cert-manager.yaml": deployments.apps "cert-manager-webhook" not found Error from server (NotFound): error when deleting "https://github.com/cert-manager/cert-manager/releases/download/v1.19.1/cert-manager.yaml": mutatingwebhookconfigurations.admissionregistration.k8s.io "cert-manager-webhook" not found Error from server (NotFound): error when deleting "https://github.com/cert-manager/cert-manager/releases/download/v1.19.1/cert-manager.yaml": validatingwebhookconfigurations.admissionregistration.k8s.io "cert-manager-webhook" not found + sleep 8 + cat /tmp/tmp.QwRsmyHD9F + cat /tmp/tmp.VH1wKo4ite Error from server (NotFound): error when deleting "https://github.com/cert-manager/cert-manager/releases/download/v1.19.1/cert-manager.yaml": namespaces "cert-manager" not found Error from server (NotFound): error when deleting "https://github.com/cert-manager/cert-manager/releases/download/v1.19.1/cert-manager.yaml": customresourcedefinitions.apiextensions.k8s.io "challenges.acme.cert-manager.io" not found Error from server (NotFound): error when deleting "https://github.com/cert-manager/cert-manager/releases/download/v1.19.1/cert-manager.yaml": customresourcedefinitions.apiextensions.k8s.io "orders.acme.cert-manager.io" not found Error from server (NotFound): error when deleting "https://github.com/cert-manager/cert-manager/releases/download/v1.19.1/cert-manager.yaml": customresourcedefinitions.apiextensions.k8s.io "certificaterequests.cert-manager.io" not found Error from server (NotFound): error when deleting "https://github.com/cert-manager/cert-manager/releases/download/v1.19.1/cert-manager.yaml": customresourcedefinitions.apiextensions.k8s.io "certificates.cert-manager.io" not found Error from server (NotFound): error when deleting "https://github.com/cert-manager/cert-manager/releases/download/v1.19.1/cert-manager.yaml": customresourcedefinitions.apiextensions.k8s.io "clusterissuers.cert-manager.io" not found Error from server (NotFound): error when deleting "https://github.com/cert-manager/cert-manager/releases/download/v1.19.1/cert-manager.yaml": customresourcedefinitions.apiextensions.k8s.io "issuers.cert-manager.io" not found Error from server (NotFound): error when deleting "https://github.com/cert-manager/cert-manager/releases/download/v1.19.1/cert-manager.yaml": serviceaccounts "cert-manager-cainjector" not found Error from server (NotFound): error when deleting "https://github.com/cert-manager/cert-manager/releases/download/v1.19.1/cert-manager.yaml": serviceaccounts "cert-manager" not found Error from server (NotFound): error when deleting "https://github.com/cert-manager/cert-manager/releases/download/v1.19.1/cert-manager.yaml": serviceaccounts "cert-manager-webhook" not found Error from server (NotFound): error when deleting "https://github.com/cert-manager/cert-manager/releases/download/v1.19.1/cert-manager.yaml": clusterroles.rbac.authorization.k8s.io "cert-manager-cainjector" not found Error from server (NotFound): error when deleting "https://github.com/cert-manager/cert-manager/releases/download/v1.19.1/cert-manager.yaml": clusterroles.rbac.authorization.k8s.io "cert-manager-controller-issuers" not found Error from server (NotFound): error when deleting "https://github.com/cert-manager/cert-manager/releases/download/v1.19.1/cert-manager.yaml": clusterroles.rbac.authorization.k8s.io "cert-manager-controller-clusterissuers" not found Error from server (NotFound): error when deleting "https://github.com/cert-manager/cert-manager/releases/download/v1.19.1/cert-manager.yaml": clusterroles.rbac.authorization.k8s.io "cert-manager-controller-certificates" not found Error from server (NotFound): error when deleting "https://github.com/cert-manager/cert-manager/releases/download/v1.19.1/cert-manager.yaml": clusterroles.rbac.authorization.k8s.io "cert-manager-controller-orders" not found Error from server (NotFound): error when deleting "https://github.com/cert-manager/cert-manager/releases/download/v1.19.1/cert-manager.yaml": clusterroles.rbac.authorization.k8s.io "cert-manager-controller-challenges" not found Error from server (NotFound): error when deleting "https://github.com/cert-manager/cert-manager/releases/download/v1.19.1/cert-manager.yaml": clusterroles.rbac.authorization.k8s.io "cert-manager-controller-ingress-shim" not found Error from server (NotFound): error when deleting "https://github.com/cert-manager/cert-manager/releases/download/v1.19.1/cert-manager.yaml": clusterroles.rbac.authorization.k8s.io "cert-manager-cluster-view" not found Error from server (NotFound): error when deleting "https://github.com/cert-manager/cert-manager/releases/download/v1.19.1/cert-manager.yaml": clusterroles.rbac.authorization.k8s.io "cert-manager-view" not found Error from server (NotFound): error when deleting "https://github.com/cert-manager/cert-manager/releases/download/v1.19.1/cert-manager.yaml": clusterroles.rbac.authorization.k8s.io "cert-manager-edit" not found Error from server (NotFound): error when deleting "https://github.com/cert-manager/cert-manager/releases/download/v1.19.1/cert-manager.yaml": clusterroles.rbac.authorization.k8s.io "cert-manager-controller-approve:cert-manager-io" not found Error from server (NotFound): error when deleting "https://github.com/cert-manager/cert-manager/releases/download/v1.19.1/cert-manager.yaml": clusterroles.rbac.authorization.k8s.io "cert-manager-controller-certificatesigningrequests" not found Error from server (NotFound): error when deleting "https://github.com/cert-manager/cert-manager/releases/download/v1.19.1/cert-manager.yaml": clusterroles.rbac.authorization.k8s.io "cert-manager-webhook:subjectaccessreviews" not found Error from server (NotFound): error when deleting "https://github.com/cert-manager/cert-manager/releases/download/v1.19.1/cert-manager.yaml": clusterrolebindings.rbac.authorization.k8s.io "cert-manager-cainjector" not found Error from server (NotFound): error when deleting "https://github.com/cert-manager/cert-manager/releases/download/v1.19.1/cert-manager.yaml": clusterrolebindings.rbac.authorization.k8s.io "cert-manager-controller-issuers" not found Error from server (NotFound): error when deleting "https://github.com/cert-manager/cert-manager/releases/download/v1.19.1/cert-manager.yaml": clusterrolebindings.rbac.authorization.k8s.io "cert-manager-controller-clusterissuers" not found Error from server (NotFound): error when deleting "https://github.com/cert-manager/cert-manager/releases/download/v1.19.1/cert-manager.yaml": clusterrolebindings.rbac.authorization.k8s.io "cert-manager-controller-certificates" not found Error from server (NotFound): error when deleting "https://github.com/cert-manager/cert-manager/releases/download/v1.19.1/cert-manager.yaml": clusterrolebindings.rbac.authorization.k8s.io "cert-manager-controller-orders" not found Error from server (NotFound): error when deleting "https://github.com/cert-manager/cert-manager/releases/download/v1.19.1/cert-manager.yaml": clusterrolebindings.rbac.authorization.k8s.io "cert-manager-controller-challenges" not found Error from server (NotFound): error when deleting "https://github.com/cert-manager/cert-manager/releases/download/v1.19.1/cert-manager.yaml": clusterrolebindings.rbac.authorization.k8s.io "cert-manager-controller-ingress-shim" not found Error from server (NotFound): error when deleting "https://github.com/cert-manager/cert-manager/releases/download/v1.19.1/cert-manager.yaml": clusterrolebindings.rbac.authorization.k8s.io "cert-manager-controller-approve:cert-manager-io" not found Error from server (NotFound): error when deleting "https://github.com/cert-manager/cert-manager/releases/download/v1.19.1/cert-manager.yaml": clusterrolebindings.rbac.authorization.k8s.io "cert-manager-controller-certificatesigningrequests" not found Error from server (NotFound): error when deleting "https://github.com/cert-manager/cert-manager/releases/download/v1.19.1/cert-manager.yaml": clusterrolebindings.rbac.authorization.k8s.io "cert-manager-webhook:subjectaccessreviews" not found Error from server (NotFound): error when deleting "https://github.com/cert-manager/cert-manager/releases/download/v1.19.1/cert-manager.yaml": roles.rbac.authorization.k8s.io "cert-manager-cainjector:leaderelection" not found Error from server (NotFound): error when deleting "https://github.com/cert-manager/cert-manager/releases/download/v1.19.1/cert-manager.yaml": roles.rbac.authorization.k8s.io "cert-manager:leaderelection" not found Error from server (NotFound): error when deleting "https://github.com/cert-manager/cert-manager/releases/download/v1.19.1/cert-manager.yaml": roles.rbac.authorization.k8s.io "cert-manager-tokenrequest" not found Error from server (NotFound): error when deleting "https://github.com/cert-manager/cert-manager/releases/download/v1.19.1/cert-manager.yaml": roles.rbac.authorization.k8s.io "cert-manager-webhook:dynamic-serving" not found Error from server (NotFound): error when deleting "https://github.com/cert-manager/cert-manager/releases/download/v1.19.1/cert-manager.yaml": rolebindings.rbac.authorization.k8s.io "cert-manager-cainjector:leaderelection" not found Error from server (NotFound): error when deleting "https://github.com/cert-manager/cert-manager/releases/download/v1.19.1/cert-manager.yaml": rolebindings.rbac.authorization.k8s.io "cert-manager:leaderelection" not found Error from server (NotFound): error when deleting "https://github.com/cert-manager/cert-manager/releases/download/v1.19.1/cert-manager.yaml": rolebindings.rbac.authorization.k8s.io "cert-manager-tokenrequest" not found Error from server (NotFound): error when deleting "https://github.com/cert-manager/cert-manager/releases/download/v1.19.1/cert-manager.yaml": rolebindings.rbac.authorization.k8s.io "cert-manager-webhook:dynamic-serving" not found Error from server (NotFound): error when deleting "https://github.com/cert-manager/cert-manager/releases/download/v1.19.1/cert-manager.yaml": services "cert-manager-cainjector" not found Error from server (NotFound): error when deleting "https://github.com/cert-manager/cert-manager/releases/download/v1.19.1/cert-manager.yaml": services "cert-manager" not found Error from server (NotFound): error when deleting "https://github.com/cert-manager/cert-manager/releases/download/v1.19.1/cert-manager.yaml": services "cert-manager-webhook" not found Error from server (NotFound): error when deleting "https://github.com/cert-manager/cert-manager/releases/download/v1.19.1/cert-manager.yaml": deployments.apps "cert-manager-cainjector" not found Error from server (NotFound): error when deleting "https://github.com/cert-manager/cert-manager/releases/download/v1.19.1/cert-manager.yaml": deployments.apps "cert-manager" not found Error from server (NotFound): error when deleting "https://github.com/cert-manager/cert-manager/releases/download/v1.19.1/cert-manager.yaml": deployments.apps "cert-manager-webhook" not found Error from server (NotFound): error when deleting "https://github.com/cert-manager/cert-manager/releases/download/v1.19.1/cert-manager.yaml": mutatingwebhookconfigurations.admissionregistration.k8s.io "cert-manager-webhook" not found Error from server (NotFound): error when deleting "https://github.com/cert-manager/cert-manager/releases/download/v1.19.1/cert-manager.yaml": validatingwebhookconfigurations.admissionregistration.k8s.io "cert-manager-webhook" not found + rm /tmp/tmp.QwRsmyHD9F /tmp/tmp.VH1wKo4ite + return 1 + true + '[' -n '' ']' + '[' -n psmdb-operator ']' + kubectl_bin delete --grace-period=0 --force=true namespace pitr-14308 + rm -rf /tmp/tmp.TVPhAYVAUk + kubectl_bin delete --grace-period=0 --force=true namespace psmdb-operator ++ mktemp ++ mktemp + desc 'test passed' + set +o xtrace ----------------------------------------------------------------------------------- test passed ----------------------------------------------------------------------------------- + local LAST_OUT=/tmp/tmp.zdpw2lEKlb + local LAST_OUT=/tmp/tmp.tH7FvqtjxI ++ mktemp ++ mktemp + local LAST_ERR=/tmp/tmp.jXRn4axg6Y + local exit_status=0 + local timeout=4 + local LAST_ERR=/tmp/tmp.DTU2Ua9xiA + local exit_status=0 + local timeout=4 ++ seq 0 2 ++ seq 0 2 + for i in $(seq 0 2) + set +e + kubectl delete --grace-period=0 --force=true namespace pitr-14308 + for i in $(seq 0 2) + set +e + kubectl delete --grace-period=0 --force=true namespace psmdb-operator